1use crate::{
38 DefinedGlobalIndex, DefinedMemoryIndex, DefinedTableIndex, FuncIndex, FuncRefIndex,
39 GlobalIndex, MemoryIndex, Module, OwnedMemoryIndex, TableIndex,
40};
41use cranelift_entity::packed_option::ReservedValue;
42
43#[cfg(target_pointer_width = "32")]
44fn cast_to_u32(sz: usize) -> u32 {
45 u32::try_from(sz).unwrap()
46}
47#[cfg(target_pointer_width = "64")]
48fn cast_to_u32(sz: usize) -> u32 {
49 u32::try_from(sz).expect("overflow in cast from usize to u32")
50}
51
52#[inline]
54fn align(offset: u32, width: u32) -> u32 {
55 (offset + (width - 1)) / width * width
56}
57
58#[derive(Debug, Clone, Copy)]
61pub struct VMOffsets<P> {
62 pub ptr: P,
64 pub num_imported_functions: u32,
66 pub num_imported_tables: u32,
68 pub num_imported_memories: u32,
70 pub num_imported_globals: u32,
72 pub num_defined_tables: u32,
74 pub num_defined_memories: u32,
76 pub num_owned_memories: u32,
78 pub num_defined_globals: u32,
80 pub num_escaped_funcs: u32,
83
84 imported_functions: u32,
86 imported_tables: u32,
87 imported_memories: u32,
88 imported_globals: u32,
89 defined_tables: u32,
90 defined_memories: u32,
91 owned_memories: u32,
92 defined_globals: u32,
93 defined_func_refs: u32,
94 size: u32,
95}
96
97pub trait PtrSize {
99 fn size(&self) -> u8;
101
102 fn vmcontext_runtime_limits(&self) -> u8 {
104 u8::try_from(align(
105 u32::try_from(core::mem::size_of::<u32>()).unwrap(),
106 u32::from(self.size()),
107 ))
108 .unwrap()
109 }
110
111 fn vmcontext_builtin_functions(&self) -> u8 {
113 self.vmcontext_runtime_limits() + self.size()
114 }
115
116 #[inline]
118 fn vm_func_ref_array_call(&self) -> u8 {
119 0 * self.size()
120 }
121
122 #[inline]
124 fn vm_func_ref_wasm_call(&self) -> u8 {
125 1 * self.size()
126 }
127
128 #[inline]
130 fn vm_func_ref_type_index(&self) -> u8 {
131 2 * self.size()
132 }
133
134 #[inline]
136 fn vm_func_ref_vmctx(&self) -> u8 {
137 3 * self.size()
138 }
139
140 #[inline]
142 fn size_of_vm_func_ref(&self) -> u8 {
143 4 * self.size()
144 }
145
146 #[inline]
149 fn size_of_vmglobal_definition(&self) -> u8 {
150 16
151 }
152
153 #[inline]
157 fn vmruntime_limits_fuel_consumed(&self) -> u8 {
158 0
159 }
160
161 #[inline]
163 fn vmruntime_limits_epoch_deadline(&self) -> u8 {
164 self.vmruntime_limits_fuel_consumed() + 8
165 }
166
167 #[inline]
169 fn vmruntime_limits_stack_limit(&self) -> u8 {
170 self.vmruntime_limits_epoch_deadline() + 8
171 }
172
173 fn vmruntime_limits_last_wasm_exit_fp(&self) -> u8 {
175 self.vmruntime_limits_stack_limit() + self.size()
176 }
177
178 fn vmruntime_limits_last_wasm_exit_pc(&self) -> u8 {
180 self.vmruntime_limits_last_wasm_exit_fp() + self.size()
181 }
182
183 fn vmruntime_limits_last_wasm_entry_fp(&self) -> u8 {
185 self.vmruntime_limits_last_wasm_exit_pc() + self.size()
186 }
187
188 #[inline]
192 fn vmmemory_definition_base(&self) -> u8 {
193 0 * self.size()
194 }
195
196 #[inline]
198 fn vmmemory_definition_current_length(&self) -> u8 {
199 1 * self.size()
200 }
201
202 #[inline]
204 fn size_of_vmmemory_definition(&self) -> u8 {
205 2 * self.size()
206 }
207
208 #[inline]
210 fn size_of_vmmemory_pointer(&self) -> u8 {
211 self.size()
212 }
213
214 fn vmarray_call_host_func_context_func_ref(&self) -> u8 {
218 u8::try_from(align(
219 u32::try_from(core::mem::size_of::<u32>()).unwrap(),
220 u32::from(self.size()),
221 ))
222 .unwrap()
223 }
224
225 #[inline]
227 fn vmctx_magic(&self) -> u8 {
228 0
232 }
233
234 #[inline]
236 fn vmctx_runtime_limits(&self) -> u8 {
237 self.vmctx_magic() + self.size()
238 }
239
240 #[inline]
242 fn vmctx_builtin_functions(&self) -> u8 {
243 self.vmctx_runtime_limits() + self.size()
244 }
245
246 #[inline]
248 fn vmctx_callee(&self) -> u8 {
249 self.vmctx_builtin_functions() + self.size()
250 }
251
252 #[inline]
255 fn vmctx_epoch_ptr(&self) -> u8 {
256 self.vmctx_callee() + self.size()
257 }
258
259 #[inline]
261 fn vmctx_gc_heap_base(&self) -> u8 {
262 self.vmctx_epoch_ptr() + self.size()
263 }
264
265 #[inline]
267 fn vmctx_gc_heap_bound(&self) -> u8 {
268 self.vmctx_gc_heap_base() + self.size()
269 }
270
271 #[inline]
276 fn vmctx_gc_heap_data(&self) -> u8 {
277 self.vmctx_gc_heap_bound() + self.size()
278 }
279
280 #[inline]
282 fn vmctx_type_ids_array(&self) -> u8 {
283 self.vmctx_gc_heap_data() + self.size()
284 }
285
286 #[inline]
290 fn vmctx_dynamic_data_start(&self) -> u8 {
291 self.vmctx_type_ids_array() + self.size()
292 }
293}
294
295#[derive(Clone, Copy)]
297pub struct HostPtr;
298
299impl PtrSize for HostPtr {
300 #[inline]
301 fn size(&self) -> u8 {
302 core::mem::size_of::<usize>() as u8
303 }
304}
305
306impl PtrSize for u8 {
307 #[inline]
308 fn size(&self) -> u8 {
309 *self
310 }
311}
312
313#[derive(Debug, Clone, Copy)]
315pub struct VMOffsetsFields<P> {
316 pub ptr: P,
318 pub num_imported_functions: u32,
320 pub num_imported_tables: u32,
322 pub num_imported_memories: u32,
324 pub num_imported_globals: u32,
326 pub num_defined_tables: u32,
328 pub num_defined_memories: u32,
330 pub num_owned_memories: u32,
332 pub num_defined_globals: u32,
334 pub num_escaped_funcs: u32,
337}
338
339impl<P: PtrSize> VMOffsets<P> {
340 pub fn new(ptr: P, module: &Module) -> Self {
342 let num_owned_memories = module
343 .memories
344 .iter()
345 .skip(module.num_imported_memories)
346 .filter(|p| !p.1.shared)
347 .count()
348 .try_into()
349 .unwrap();
350 VMOffsets::from(VMOffsetsFields {
351 ptr,
352 num_imported_functions: cast_to_u32(module.num_imported_funcs),
353 num_imported_tables: cast_to_u32(module.num_imported_tables),
354 num_imported_memories: cast_to_u32(module.num_imported_memories),
355 num_imported_globals: cast_to_u32(module.num_imported_globals),
356 num_defined_tables: cast_to_u32(module.num_defined_tables()),
357 num_defined_memories: cast_to_u32(module.num_defined_memories()),
358 num_owned_memories,
359 num_defined_globals: cast_to_u32(module.globals.len() - module.num_imported_globals),
360 num_escaped_funcs: cast_to_u32(module.num_escaped_funcs),
361 })
362 }
363
364 #[inline]
366 pub fn pointer_size(&self) -> u8 {
367 self.ptr.size()
368 }
369
370 pub fn region_sizes(&self) -> impl Iterator<Item = (&str, u32)> {
375 macro_rules! calculate_sizes {
376 ($($name:ident: $desc:tt,)*) => {{
377 let VMOffsets {
378 ptr: _,
381 num_imported_functions: _,
382 num_imported_tables: _,
383 num_imported_memories: _,
384 num_imported_globals: _,
385 num_defined_tables: _,
386 num_defined_globals: _,
387 num_defined_memories: _,
388 num_owned_memories: _,
389 num_escaped_funcs: _,
390
391 size,
393
394 $($name,)*
397 } = *self;
398
399 let mut last = size;
403 $(
404 assert!($name <= last);
405 let tmp = $name;
406 let $name = last - $name;
407 last = tmp;
408 )*
409 assert_ne!(last, 0);
410 IntoIterator::into_iter([
411 $(($desc, $name),)*
412 ("static vmctx data", last),
413 ])
414 }};
415 }
416
417 calculate_sizes! {
418 defined_func_refs: "module functions",
419 defined_globals: "defined globals",
420 defined_tables: "defined tables",
421 imported_globals: "imported globals",
422 imported_tables: "imported tables",
423 imported_functions: "imported functions",
424 owned_memories: "owned memories",
425 defined_memories: "defined memories",
426 imported_memories: "imported memories",
427 }
428 }
429}
430
431impl<P: PtrSize> From<VMOffsetsFields<P>> for VMOffsets<P> {
432 fn from(fields: VMOffsetsFields<P>) -> VMOffsets<P> {
433 let mut ret = Self {
434 ptr: fields.ptr,
435 num_imported_functions: fields.num_imported_functions,
436 num_imported_tables: fields.num_imported_tables,
437 num_imported_memories: fields.num_imported_memories,
438 num_imported_globals: fields.num_imported_globals,
439 num_defined_tables: fields.num_defined_tables,
440 num_defined_memories: fields.num_defined_memories,
441 num_owned_memories: fields.num_owned_memories,
442 num_defined_globals: fields.num_defined_globals,
443 num_escaped_funcs: fields.num_escaped_funcs,
444 imported_functions: 0,
445 imported_tables: 0,
446 imported_memories: 0,
447 imported_globals: 0,
448 defined_tables: 0,
449 defined_memories: 0,
450 owned_memories: 0,
451 defined_globals: 0,
452 defined_func_refs: 0,
453 size: 0,
454 };
455
456 #[inline]
461 fn cadd(count: u32, size: u32) -> u32 {
462 count.checked_add(size).unwrap()
463 }
464
465 #[inline]
466 fn cmul(count: u32, size: u8) -> u32 {
467 count.checked_mul(u32::from(size)).unwrap()
468 }
469
470 let mut next_field_offset = u32::from(ret.ptr.vmctx_dynamic_data_start());
471
472 macro_rules! fields {
473 (size($field:ident) = $size:expr, $($rest:tt)*) => {
474 ret.$field = next_field_offset;
475 next_field_offset = cadd(next_field_offset, u32::from($size));
476 fields!($($rest)*);
477 };
478 (align($align:expr), $($rest:tt)*) => {
479 next_field_offset = align(next_field_offset, $align);
480 fields!($($rest)*);
481 };
482 () => {};
483 }
484
485 fields! {
486 size(imported_memories)
487 = cmul(ret.num_imported_memories, ret.size_of_vmmemory_import()),
488 size(defined_memories)
489 = cmul(ret.num_defined_memories, ret.ptr.size_of_vmmemory_pointer()),
490 size(owned_memories)
491 = cmul(ret.num_owned_memories, ret.ptr.size_of_vmmemory_definition()),
492 size(imported_functions)
493 = cmul(ret.num_imported_functions, ret.size_of_vmfunction_import()),
494 size(imported_tables)
495 = cmul(ret.num_imported_tables, ret.size_of_vmtable_import()),
496 size(imported_globals)
497 = cmul(ret.num_imported_globals, ret.size_of_vmglobal_import()),
498 size(defined_tables)
499 = cmul(ret.num_defined_tables, ret.size_of_vmtable_definition()),
500 align(16),
501 size(defined_globals)
502 = cmul(ret.num_defined_globals, ret.ptr.size_of_vmglobal_definition()),
503 size(defined_func_refs) = cmul(
504 ret.num_escaped_funcs,
505 ret.ptr.size_of_vm_func_ref(),
506 ),
507 }
508
509 ret.size = next_field_offset;
510
511 return ret;
512 }
513}
514
515impl<P: PtrSize> VMOffsets<P> {
516 #[inline]
518 pub fn vmfunction_import_wasm_call(&self) -> u8 {
519 0 * self.pointer_size()
520 }
521
522 #[inline]
524 pub fn vmfunction_import_array_call(&self) -> u8 {
525 1 * self.pointer_size()
526 }
527
528 #[inline]
530 pub fn vmfunction_import_vmctx(&self) -> u8 {
531 2 * self.pointer_size()
532 }
533
534 #[inline]
536 pub fn size_of_vmfunction_import(&self) -> u8 {
537 3 * self.pointer_size()
538 }
539}
540
541impl<P: PtrSize> VMOffsets<P> {
543 pub fn size_of_vmfunction_body_ptr(&self) -> u8 {
545 1 * self.pointer_size()
546 }
547}
548
549impl<P: PtrSize> VMOffsets<P> {
551 #[inline]
553 pub fn vmtable_import_from(&self) -> u8 {
554 0 * self.pointer_size()
555 }
556
557 #[inline]
559 pub fn vmtable_import_vmctx(&self) -> u8 {
560 1 * self.pointer_size()
561 }
562
563 #[inline]
565 pub fn size_of_vmtable_import(&self) -> u8 {
566 2 * self.pointer_size()
567 }
568}
569
570impl<P: PtrSize> VMOffsets<P> {
572 #[inline]
574 pub fn vmtable_definition_base(&self) -> u8 {
575 0 * self.pointer_size()
576 }
577
578 pub fn vmtable_definition_current_elements(&self) -> u8 {
580 1 * self.pointer_size()
581 }
582
583 #[inline]
585 pub fn size_of_vmtable_definition_current_elements(&self) -> u8 {
586 self.pointer_size()
587 }
588
589 #[inline]
591 pub fn size_of_vmtable_definition(&self) -> u8 {
592 2 * self.pointer_size()
593 }
594}
595
596impl<P: PtrSize> VMOffsets<P> {
598 #[inline]
600 pub fn vmmemory_import_from(&self) -> u8 {
601 0 * self.pointer_size()
602 }
603
604 #[inline]
606 pub fn vmmemory_import_vmctx(&self) -> u8 {
607 1 * self.pointer_size()
608 }
609
610 #[inline]
612 pub fn size_of_vmmemory_import(&self) -> u8 {
613 3 * self.pointer_size()
614 }
615}
616
617impl<P: PtrSize> VMOffsets<P> {
619 #[inline]
621 pub fn vmglobal_import_from(&self) -> u8 {
622 0 * self.pointer_size()
623 }
624
625 #[inline]
627 pub fn size_of_vmglobal_import(&self) -> u8 {
628 1 * self.pointer_size()
629 }
630}
631
632impl<P: PtrSize> VMOffsets<P> {
634 #[inline]
636 pub fn size_of_vmshared_type_index(&self) -> u8 {
637 4
638 }
639}
640
641impl<P: PtrSize> VMOffsets<P> {
643 #[inline]
645 pub fn vmctx_imported_functions_begin(&self) -> u32 {
646 self.imported_functions
647 }
648
649 #[inline]
651 pub fn vmctx_imported_tables_begin(&self) -> u32 {
652 self.imported_tables
653 }
654
655 #[inline]
657 pub fn vmctx_imported_memories_begin(&self) -> u32 {
658 self.imported_memories
659 }
660
661 #[inline]
663 pub fn vmctx_imported_globals_begin(&self) -> u32 {
664 self.imported_globals
665 }
666
667 #[inline]
669 pub fn vmctx_tables_begin(&self) -> u32 {
670 self.defined_tables
671 }
672
673 #[inline]
675 pub fn vmctx_memories_begin(&self) -> u32 {
676 self.defined_memories
677 }
678
679 #[inline]
681 pub fn vmctx_owned_memories_begin(&self) -> u32 {
682 self.owned_memories
683 }
684
685 #[inline]
687 pub fn vmctx_globals_begin(&self) -> u32 {
688 self.defined_globals
689 }
690
691 #[inline]
693 pub fn vmctx_func_refs_begin(&self) -> u32 {
694 self.defined_func_refs
695 }
696
697 #[inline]
699 pub fn size_of_vmctx(&self) -> u32 {
700 self.size
701 }
702
703 #[inline]
705 pub fn vmctx_vmfunction_import(&self, index: FuncIndex) -> u32 {
706 assert!(index.as_u32() < self.num_imported_functions);
707 self.vmctx_imported_functions_begin()
708 + index.as_u32() * u32::from(self.size_of_vmfunction_import())
709 }
710
711 #[inline]
713 pub fn vmctx_vmtable_import(&self, index: TableIndex) -> u32 {
714 assert!(index.as_u32() < self.num_imported_tables);
715 self.vmctx_imported_tables_begin()
716 + index.as_u32() * u32::from(self.size_of_vmtable_import())
717 }
718
719 #[inline]
721 pub fn vmctx_vmmemory_import(&self, index: MemoryIndex) -> u32 {
722 assert!(index.as_u32() < self.num_imported_memories);
723 self.vmctx_imported_memories_begin()
724 + index.as_u32() * u32::from(self.size_of_vmmemory_import())
725 }
726
727 #[inline]
729 pub fn vmctx_vmglobal_import(&self, index: GlobalIndex) -> u32 {
730 assert!(index.as_u32() < self.num_imported_globals);
731 self.vmctx_imported_globals_begin()
732 + index.as_u32() * u32::from(self.size_of_vmglobal_import())
733 }
734
735 #[inline]
737 pub fn vmctx_vmtable_definition(&self, index: DefinedTableIndex) -> u32 {
738 assert!(index.as_u32() < self.num_defined_tables);
739 self.vmctx_tables_begin() + index.as_u32() * u32::from(self.size_of_vmtable_definition())
740 }
741
742 #[inline]
744 pub fn vmctx_vmmemory_pointer(&self, index: DefinedMemoryIndex) -> u32 {
745 assert!(index.as_u32() < self.num_defined_memories);
746 self.vmctx_memories_begin()
747 + index.as_u32() * u32::from(self.ptr.size_of_vmmemory_pointer())
748 }
749
750 #[inline]
752 pub fn vmctx_vmmemory_definition(&self, index: OwnedMemoryIndex) -> u32 {
753 assert!(index.as_u32() < self.num_owned_memories);
754 self.vmctx_owned_memories_begin()
755 + index.as_u32() * u32::from(self.ptr.size_of_vmmemory_definition())
756 }
757
758 #[inline]
760 pub fn vmctx_vmglobal_definition(&self, index: DefinedGlobalIndex) -> u32 {
761 assert!(index.as_u32() < self.num_defined_globals);
762 self.vmctx_globals_begin()
763 + index.as_u32() * u32::from(self.ptr.size_of_vmglobal_definition())
764 }
765
766 #[inline]
769 pub fn vmctx_func_ref(&self, index: FuncRefIndex) -> u32 {
770 assert!(!index.is_reserved_value());
771 assert!(index.as_u32() < self.num_escaped_funcs);
772 self.vmctx_func_refs_begin() + index.as_u32() * u32::from(self.ptr.size_of_vm_func_ref())
773 }
774
775 #[inline]
777 pub fn vmctx_vmfunction_import_wasm_call(&self, index: FuncIndex) -> u32 {
778 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_wasm_call())
779 }
780
781 #[inline]
783 pub fn vmctx_vmfunction_import_array_call(&self, index: FuncIndex) -> u32 {
784 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_array_call())
785 }
786
787 #[inline]
789 pub fn vmctx_vmfunction_import_vmctx(&self, index: FuncIndex) -> u32 {
790 self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_vmctx())
791 }
792
793 #[inline]
795 pub fn vmctx_vmtable_import_from(&self, index: TableIndex) -> u32 {
796 self.vmctx_vmtable_import(index) + u32::from(self.vmtable_import_from())
797 }
798
799 #[inline]
801 pub fn vmctx_vmtable_definition_base(&self, index: DefinedTableIndex) -> u32 {
802 self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_base())
803 }
804
805 #[inline]
807 pub fn vmctx_vmtable_definition_current_elements(&self, index: DefinedTableIndex) -> u32 {
808 self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_current_elements())
809 }
810
811 #[inline]
813 pub fn vmctx_vmmemory_import_from(&self, index: MemoryIndex) -> u32 {
814 self.vmctx_vmmemory_import(index) + u32::from(self.vmmemory_import_from())
815 }
816
817 #[inline]
819 pub fn vmctx_vmmemory_import_vmctx(&self, index: MemoryIndex) -> u32 {
820 self.vmctx_vmmemory_import(index) + u32::from(self.vmmemory_import_vmctx())
821 }
822
823 #[inline]
825 pub fn vmctx_vmmemory_definition_base(&self, index: OwnedMemoryIndex) -> u32 {
826 self.vmctx_vmmemory_definition(index) + u32::from(self.ptr.vmmemory_definition_base())
827 }
828
829 #[inline]
831 pub fn vmctx_vmmemory_definition_current_length(&self, index: OwnedMemoryIndex) -> u32 {
832 self.vmctx_vmmemory_definition(index)
833 + u32::from(self.ptr.vmmemory_definition_current_length())
834 }
835
836 #[inline]
838 pub fn vmctx_vmglobal_import_from(&self, index: GlobalIndex) -> u32 {
839 self.vmctx_vmglobal_import(index) + u32::from(self.vmglobal_import_from())
840 }
841}
842
843impl<P: PtrSize> VMOffsets<P> {
847 #[inline]
849 pub fn vm_drc_header_ref_count(&self) -> u32 {
850 8
851 }
852}
853
854impl<P: PtrSize> VMOffsets<P> {
858 #[inline]
860 pub fn vm_gc_ref_activation_table_next(&self) -> u32 {
861 0
862 }
863
864 #[inline]
866 pub fn vm_gc_ref_activation_table_end(&self) -> u32 {
867 self.pointer_size().into()
868 }
869}
870
871pub const VMCONTEXT_MAGIC: u32 = u32::from_le_bytes(*b"core");
875
876pub const VM_ARRAY_CALL_HOST_FUNC_MAGIC: u32 = u32::from_le_bytes(*b"ACHF");
881
882#[cfg(test)]
883mod tests {
884 use crate::vmoffsets::align;
885
886 #[test]
887 fn alignment() {
888 fn is_aligned(x: u32) -> bool {
889 x % 16 == 0
890 }
891 assert!(is_aligned(align(0, 16)));
892 assert!(is_aligned(align(32, 16)));
893 assert!(is_aligned(align(33, 16)));
894 assert!(is_aligned(align(31, 16)));
895 }
896}