1use crate::global::VMGlobal;
8use crate::instance::Instance;
9use crate::memory::VMMemory;
10use crate::store::InternalStoreHandle;
11use crate::trap::{Trap, TrapCode};
12use crate::VMTable;
13use crate::{VMBuiltinFunctionIndex, VMFunction};
14use crate::{VMFunctionBody, VMTag};
15use std::convert::TryFrom;
16use std::ptr::{self, NonNull};
17use std::sync::atomic::{AtomicPtr, Ordering};
18use wasmer_types::RawValue;
19
20#[derive(Copy, Clone, Eq)]
25#[repr(C)]
26pub union VMFunctionContext {
27 pub vmctx: *mut VMContext,
29 pub host_env: *mut std::ffi::c_void,
31}
32
33impl VMFunctionContext {
34 pub fn is_null(&self) -> bool {
36 unsafe { self.host_env.is_null() }
37 }
38}
39
40impl std::fmt::Debug for VMFunctionContext {
41 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
42 f.debug_struct("VMFunctionContext")
43 .field("vmctx_or_hostenv", unsafe { &self.host_env })
44 .finish()
45 }
46}
47
48impl std::cmp::PartialEq for VMFunctionContext {
49 fn eq(&self, rhs: &Self) -> bool {
50 unsafe { self.host_env as usize == rhs.host_env as usize }
51 }
52}
53
54impl std::hash::Hash for VMFunctionContext {
55 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
56 unsafe {
57 self.vmctx.hash(state);
58 }
59 }
60}
61
62#[derive(Debug, Copy, Clone)]
64#[repr(C)]
65pub struct VMFunctionImport {
66 pub body: *const VMFunctionBody,
68
69 pub environment: VMFunctionContext,
71
72 pub handle: InternalStoreHandle<VMFunction>,
74}
75
76#[cfg(test)]
77mod test_vmfunction_import {
78 use super::VMFunctionImport;
79 use memoffset::offset_of;
80 use std::mem::size_of;
81 use wasmer_types::ModuleInfo;
82 use wasmer_types::VMOffsets;
83
84 #[test]
85 fn check_vmfunction_import_offsets() {
86 let module = ModuleInfo::new();
87 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
88 assert_eq!(
89 size_of::<VMFunctionImport>(),
90 usize::from(offsets.size_of_vmfunction_import())
91 );
92 assert_eq!(
93 offset_of!(VMFunctionImport, body),
94 usize::from(offsets.vmfunction_import_body())
95 );
96 assert_eq!(
97 offset_of!(VMFunctionImport, environment),
98 usize::from(offsets.vmfunction_import_vmctx())
99 );
100 }
101}
102
103#[repr(C)]
112pub struct VMDynamicFunctionContext<T> {
113 pub address: *const VMFunctionBody,
118
119 pub ctx: T,
121}
122
123unsafe impl<T: Sized + Send + Sync> Send for VMDynamicFunctionContext<T> {}
126unsafe impl<T: Sized + Send + Sync> Sync for VMDynamicFunctionContext<T> {}
129
130impl<T: Sized + Clone + Send + Sync> Clone for VMDynamicFunctionContext<T> {
131 fn clone(&self) -> Self {
132 Self {
133 address: self.address,
134 ctx: self.ctx.clone(),
135 }
136 }
137}
138
139#[cfg(test)]
140mod test_vmdynamicfunction_import_context {
141 use super::VMDynamicFunctionContext;
142 use crate::VMOffsets;
143 use memoffset::offset_of;
144 use std::mem::size_of;
145 use wasmer_types::ModuleInfo;
146
147 #[test]
148 fn check_vmdynamicfunction_import_context_offsets() {
149 let module = ModuleInfo::new();
150 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
151 assert_eq!(
152 size_of::<VMDynamicFunctionContext<usize>>(),
153 usize::from(offsets.size_of_vmdynamicfunction_import_context())
154 );
155 assert_eq!(
156 offset_of!(VMDynamicFunctionContext<usize>, address),
157 usize::from(offsets.vmdynamicfunction_import_context_address())
158 );
159 assert_eq!(
160 offset_of!(VMDynamicFunctionContext<usize>, ctx),
161 usize::from(offsets.vmdynamicfunction_import_context_ctx())
162 );
163 }
164}
165
166#[derive(Debug, Copy, Clone, Eq, PartialEq)]
168#[repr(C)]
169pub enum VMFunctionKind {
170 Static,
177
178 Dynamic,
184}
185
186#[derive(Clone)]
189#[repr(C)]
190pub struct VMTableImport {
191 pub definition: NonNull<VMTableDefinition>,
193
194 pub handle: InternalStoreHandle<VMTable>,
196}
197
198#[cfg(test)]
199mod test_vmtable_import {
200 use super::VMTableImport;
201 use crate::VMOffsets;
202 use memoffset::offset_of;
203 use std::mem::size_of;
204 use wasmer_types::ModuleInfo;
205
206 #[test]
207 fn check_vmtable_import_offsets() {
208 let module = ModuleInfo::new();
209 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
210 assert_eq!(
211 size_of::<VMTableImport>(),
212 usize::from(offsets.size_of_vmtable_import())
213 );
214 assert_eq!(
215 offset_of!(VMTableImport, definition),
216 usize::from(offsets.vmtable_import_definition())
217 );
218 }
219}
220
221#[derive(Clone)]
224#[repr(C)]
225pub struct VMMemoryImport {
226 pub definition: NonNull<VMMemoryDefinition>,
228
229 pub handle: InternalStoreHandle<VMMemory>,
231}
232
233#[cfg(test)]
234mod test_vmmemory_import {
235 use super::VMMemoryImport;
236 use crate::VMOffsets;
237 use memoffset::offset_of;
238 use std::mem::size_of;
239 use wasmer_types::ModuleInfo;
240
241 #[test]
242 fn check_vmmemory_import_offsets() {
243 let module = ModuleInfo::new();
244 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
245 assert_eq!(
246 size_of::<VMMemoryImport>(),
247 usize::from(offsets.size_of_vmmemory_import())
248 );
249 assert_eq!(
250 offset_of!(VMMemoryImport, definition),
251 usize::from(offsets.vmmemory_import_definition())
252 );
253 assert_eq!(
254 offset_of!(VMMemoryImport, handle),
255 usize::from(offsets.vmmemory_import_handle())
256 );
257 }
258}
259
260#[derive(Clone)]
263#[repr(C)]
264pub struct VMTagImport {
265 pub handle: InternalStoreHandle<VMTag>,
267}
268
269unsafe impl Send for VMTagImport {}
274unsafe impl Sync for VMTagImport {}
280
281#[derive(Clone)]
307#[repr(C)]
308pub struct VMGlobalImport {
309 pub definition: NonNull<VMGlobalDefinition>,
311
312 pub handle: InternalStoreHandle<VMGlobal>,
314}
315
316unsafe impl Send for VMGlobalImport {}
321unsafe impl Sync for VMGlobalImport {}
327
328#[cfg(test)]
329mod test_vmglobal_import {
330 use super::VMGlobalImport;
331 use crate::VMOffsets;
332 use memoffset::offset_of;
333 use std::mem::size_of;
334 use wasmer_types::ModuleInfo;
335
336 #[test]
337 fn check_vmglobal_import_offsets() {
338 let module = ModuleInfo::new();
339 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
340 assert_eq!(
341 size_of::<VMGlobalImport>(),
342 usize::from(offsets.size_of_vmglobal_import())
343 );
344 assert_eq!(
345 offset_of!(VMGlobalImport, definition),
346 usize::from(offsets.vmglobal_import_definition())
347 );
348 }
349}
350
351pub(crate) unsafe fn memory_copy(
362 mem: &VMMemoryDefinition,
363 dst: u32,
364 src: u32,
365 len: u32,
366) -> Result<(), Trap> {
367 if src
369 .checked_add(len)
370 .map_or(true, |n| usize::try_from(n).unwrap() > mem.current_length)
371 || dst
372 .checked_add(len)
373 .map_or(true, |m| usize::try_from(m).unwrap() > mem.current_length)
374 {
375 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
376 }
377
378 let dst = usize::try_from(dst).unwrap();
379 let src = usize::try_from(src).unwrap();
380
381 let dst = mem.base.add(dst);
384 let src = mem.base.add(src);
385 ptr::copy(src, dst, len as usize);
386
387 Ok(())
388}
389
390pub(crate) unsafe fn memory_fill(
401 mem: &VMMemoryDefinition,
402 dst: u32,
403 val: u32,
404 len: u32,
405) -> Result<(), Trap> {
406 if dst
407 .checked_add(len)
408 .map_or(true, |m| usize::try_from(m).unwrap() > mem.current_length)
409 {
410 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
411 }
412
413 let dst = isize::try_from(dst).unwrap();
414 let val = val as u8;
415
416 let dst = mem.base.offset(dst);
419 ptr::write_bytes(dst, val, len as usize);
420
421 Ok(())
422}
423
424pub(crate) unsafe fn memory32_atomic_check32(
433 mem: &VMMemoryDefinition,
434 dst: u32,
435 val: u32,
436) -> Result<u32, Trap> {
437 if usize::try_from(dst).unwrap() > mem.current_length {
438 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
439 }
440
441 let dst = isize::try_from(dst).unwrap();
442 if dst & 0b11 != 0 {
443 return Err(Trap::lib(TrapCode::UnalignedAtomic));
444 }
445
446 let dst = mem.base.offset(dst) as *mut u32;
449 let atomic_dst = AtomicPtr::new(dst);
450 let read_val = *atomic_dst.load(Ordering::Acquire);
451 let ret = if read_val == val { 0 } else { 1 };
452 Ok(ret)
453}
454
455pub(crate) unsafe fn memory32_atomic_check64(
464 mem: &VMMemoryDefinition,
465 dst: u32,
466 val: u64,
467) -> Result<u32, Trap> {
468 if usize::try_from(dst).unwrap() > mem.current_length {
469 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
470 }
471
472 let dst = isize::try_from(dst).unwrap();
473 if dst & 0b111 != 0 {
474 return Err(Trap::lib(TrapCode::UnalignedAtomic));
475 }
476
477 let dst = mem.base.offset(dst) as *mut u64;
480 let atomic_dst = AtomicPtr::new(dst);
481 let read_val = *atomic_dst.load(Ordering::Acquire);
482 let ret = if read_val == val { 0 } else { 1 };
483 Ok(ret)
484}
485
486#[derive(Debug, Clone, Copy)]
489#[repr(C)]
490pub struct VMTableDefinition {
491 pub base: *mut u8,
493
494 pub current_elements: u32,
496}
497
498#[cfg(test)]
499mod test_vmtable_definition {
500 use super::VMTableDefinition;
501 use crate::VMOffsets;
502 use memoffset::offset_of;
503 use std::mem::size_of;
504 use wasmer_types::ModuleInfo;
505
506 #[test]
507 fn check_vmtable_definition_offsets() {
508 let module = ModuleInfo::new();
509 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
510 assert_eq!(
511 size_of::<VMTableDefinition>(),
512 usize::from(offsets.size_of_vmtable_definition())
513 );
514 assert_eq!(
515 offset_of!(VMTableDefinition, base),
516 usize::from(offsets.vmtable_definition_base())
517 );
518 assert_eq!(
519 offset_of!(VMTableDefinition, current_elements),
520 usize::from(offsets.vmtable_definition_current_elements())
521 );
522 }
523}
524
525#[derive(Debug, Clone)]
530#[repr(C, align(16))]
531pub struct VMGlobalDefinition {
532 pub val: RawValue,
534}
535
536#[cfg(test)]
537mod test_vmglobal_definition {
538 use super::VMGlobalDefinition;
539 use crate::{VMFuncRef, VMOffsets};
540 use more_asserts::assert_ge;
541 use std::mem::{align_of, size_of};
542 use wasmer_types::ModuleInfo;
543
544 #[test]
545 fn check_vmglobal_definition_alignment() {
546 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<i32>());
547 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<i64>());
548 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<f32>());
549 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<f64>());
550 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<VMFuncRef>());
551 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<[u8; 16]>());
552 }
553
554 #[test]
555 fn check_vmglobal_definition_offsets() {
556 let module = ModuleInfo::new();
557 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
558 assert_eq!(
559 size_of::<*const VMGlobalDefinition>(),
560 usize::from(offsets.size_of_vmglobal_local())
561 );
562 }
563
564 #[test]
565 fn check_vmglobal_begins_aligned() {
566 let module = ModuleInfo::new();
567 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
568 assert_eq!(offsets.vmctx_globals_begin() % 16, 0);
569 }
570}
571
572impl VMGlobalDefinition {
573 pub fn new() -> Self {
575 Self {
576 val: Default::default(),
577 }
578 }
579}
580
581#[repr(C)]
584#[cfg_attr(feature = "artifact-size", derive(loupe::MemoryUsage))]
585#[derive(Debug, Eq, PartialEq, Clone, Copy, Hash)]
586pub struct VMSharedSignatureIndex(u32);
587
588#[cfg(test)]
589mod test_vmshared_signature_index {
590 use super::VMSharedSignatureIndex;
591 use std::mem::size_of;
592 use wasmer_types::{ModuleInfo, TargetSharedSignatureIndex, VMOffsets};
593
594 #[test]
595 fn check_vmshared_signature_index() {
596 let module = ModuleInfo::new();
597 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
598 assert_eq!(
599 size_of::<VMSharedSignatureIndex>(),
600 usize::from(offsets.size_of_vmshared_signature_index())
601 );
602 }
603
604 #[test]
605 fn check_target_shared_signature_index() {
606 assert_eq!(
607 size_of::<VMSharedSignatureIndex>(),
608 size_of::<TargetSharedSignatureIndex>()
609 );
610 }
611}
612
613impl VMSharedSignatureIndex {
614 pub fn new(value: u32) -> Self {
616 Self(value)
617 }
618}
619
620impl Default for VMSharedSignatureIndex {
621 fn default() -> Self {
622 Self::new(u32::MAX)
623 }
624}
625
626#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
630#[repr(C)]
631pub struct VMCallerCheckedAnyfunc {
632 pub func_ptr: *const VMFunctionBody,
634 pub type_index: VMSharedSignatureIndex,
636 pub vmctx: VMFunctionContext,
638 pub call_trampoline: VMTrampoline,
641 }
643
644#[cfg(test)]
645mod test_vmcaller_checked_anyfunc {
646 use super::VMCallerCheckedAnyfunc;
647 use crate::VMOffsets;
648 use memoffset::offset_of;
649 use std::mem::size_of;
650 use wasmer_types::ModuleInfo;
651
652 #[test]
653 fn check_vmcaller_checked_anyfunc_offsets() {
654 let module = ModuleInfo::new();
655 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
656 assert_eq!(
657 size_of::<VMCallerCheckedAnyfunc>(),
658 usize::from(offsets.size_of_vmcaller_checked_anyfunc())
659 );
660 assert_eq!(
661 offset_of!(VMCallerCheckedAnyfunc, func_ptr),
662 usize::from(offsets.vmcaller_checked_anyfunc_func_ptr())
663 );
664 assert_eq!(
665 offset_of!(VMCallerCheckedAnyfunc, type_index),
666 usize::from(offsets.vmcaller_checked_anyfunc_type_index())
667 );
668 assert_eq!(
669 offset_of!(VMCallerCheckedAnyfunc, vmctx),
670 usize::from(offsets.vmcaller_checked_anyfunc_vmctx())
671 );
672 }
673}
674
675#[repr(C)]
678pub struct VMBuiltinFunctionsArray {
679 ptrs: [usize; Self::len()],
680}
681
682impl VMBuiltinFunctionsArray {
683 pub const fn len() -> usize {
684 VMBuiltinFunctionIndex::builtin_functions_total_number() as usize
685 }
686
687 pub fn initialized() -> Self {
688 use crate::libcalls::*;
689
690 let mut ptrs = [0; Self::len()];
691
692 ptrs[VMBuiltinFunctionIndex::get_memory32_grow_index().index() as usize] =
693 wasmer_vm_memory32_grow as usize;
694 ptrs[VMBuiltinFunctionIndex::get_imported_memory32_grow_index().index() as usize] =
695 wasmer_vm_imported_memory32_grow as usize;
696
697 ptrs[VMBuiltinFunctionIndex::get_memory32_size_index().index() as usize] =
698 wasmer_vm_memory32_size as usize;
699 ptrs[VMBuiltinFunctionIndex::get_imported_memory32_size_index().index() as usize] =
700 wasmer_vm_imported_memory32_size as usize;
701
702 ptrs[VMBuiltinFunctionIndex::get_table_copy_index().index() as usize] =
703 wasmer_vm_table_copy as usize;
704
705 ptrs[VMBuiltinFunctionIndex::get_table_init_index().index() as usize] =
706 wasmer_vm_table_init as usize;
707 ptrs[VMBuiltinFunctionIndex::get_elem_drop_index().index() as usize] =
708 wasmer_vm_elem_drop as usize;
709
710 ptrs[VMBuiltinFunctionIndex::get_memory_copy_index().index() as usize] =
711 wasmer_vm_memory32_copy as usize;
712 ptrs[VMBuiltinFunctionIndex::get_imported_memory_copy_index().index() as usize] =
713 wasmer_vm_imported_memory32_copy as usize;
714 ptrs[VMBuiltinFunctionIndex::get_memory_fill_index().index() as usize] =
715 wasmer_vm_memory32_fill as usize;
716 ptrs[VMBuiltinFunctionIndex::get_imported_memory_fill_index().index() as usize] =
717 wasmer_vm_imported_memory32_fill as usize;
718 ptrs[VMBuiltinFunctionIndex::get_memory_init_index().index() as usize] =
719 wasmer_vm_memory32_init as usize;
720 ptrs[VMBuiltinFunctionIndex::get_data_drop_index().index() as usize] =
721 wasmer_vm_data_drop as usize;
722 ptrs[VMBuiltinFunctionIndex::get_raise_trap_index().index() as usize] =
723 wasmer_vm_raise_trap as usize;
724 ptrs[VMBuiltinFunctionIndex::get_table_size_index().index() as usize] =
725 wasmer_vm_table_size as usize;
726 ptrs[VMBuiltinFunctionIndex::get_imported_table_size_index().index() as usize] =
727 wasmer_vm_imported_table_size as usize;
728 ptrs[VMBuiltinFunctionIndex::get_table_grow_index().index() as usize] =
729 wasmer_vm_table_grow as usize;
730 ptrs[VMBuiltinFunctionIndex::get_imported_table_grow_index().index() as usize] =
731 wasmer_vm_imported_table_grow as usize;
732 ptrs[VMBuiltinFunctionIndex::get_table_get_index().index() as usize] =
733 wasmer_vm_table_get as usize;
734 ptrs[VMBuiltinFunctionIndex::get_imported_table_get_index().index() as usize] =
735 wasmer_vm_imported_table_get as usize;
736 ptrs[VMBuiltinFunctionIndex::get_table_set_index().index() as usize] =
737 wasmer_vm_table_set as usize;
738 ptrs[VMBuiltinFunctionIndex::get_imported_table_set_index().index() as usize] =
739 wasmer_vm_imported_table_set as usize;
740 ptrs[VMBuiltinFunctionIndex::get_func_ref_index().index() as usize] =
741 wasmer_vm_func_ref as usize;
742 ptrs[VMBuiltinFunctionIndex::get_table_fill_index().index() as usize] =
743 wasmer_vm_table_fill as usize;
744
745 ptrs[VMBuiltinFunctionIndex::get_memory_atomic_wait32_index().index() as usize] =
746 wasmer_vm_memory32_atomic_wait32 as usize;
747 ptrs[VMBuiltinFunctionIndex::get_imported_memory_atomic_wait32_index().index() as usize] =
748 wasmer_vm_imported_memory32_atomic_wait32 as usize;
749 ptrs[VMBuiltinFunctionIndex::get_memory_atomic_wait64_index().index() as usize] =
750 wasmer_vm_memory32_atomic_wait64 as usize;
751 ptrs[VMBuiltinFunctionIndex::get_imported_memory_atomic_wait64_index().index() as usize] =
752 wasmer_vm_imported_memory32_atomic_wait64 as usize;
753 ptrs[VMBuiltinFunctionIndex::get_memory_atomic_notify_index().index() as usize] =
754 wasmer_vm_memory32_atomic_notify as usize;
755 ptrs[VMBuiltinFunctionIndex::get_imported_memory_atomic_notify_index().index() as usize] =
756 wasmer_vm_imported_memory32_atomic_notify as usize;
757 ptrs[VMBuiltinFunctionIndex::get_imported_throw_index().index() as usize] =
758 wasmer_vm_throw as usize;
759 ptrs[VMBuiltinFunctionIndex::get_imported_rethrow_index().index() as usize] =
760 wasmer_vm_rethrow as usize;
761
762 ptrs[VMBuiltinFunctionIndex::get_imported_alloc_exception_index().index() as usize] =
763 wasmer_vm_alloc_exception as usize;
764 ptrs[VMBuiltinFunctionIndex::get_imported_delete_exception_index().index() as usize] =
765 wasmer_vm_delete_exception as usize;
766 ptrs[VMBuiltinFunctionIndex::get_imported_read_exception_index().index() as usize] =
767 wasmer_vm_read_exception as usize;
768
769 ptrs[VMBuiltinFunctionIndex::get_imported_debug_usize_index().index() as usize] =
770 wasmer_vm_dbg_usize as usize;
771
772 debug_assert!(ptrs.iter().cloned().all(|p| p != 0));
773
774 Self { ptrs }
775 }
776}
777
778#[derive(Debug)]
788#[repr(C, align(16))] pub struct VMContext {}
790
791impl VMContext {
792 #[allow(clippy::cast_ptr_alignment)]
798 #[inline]
799 pub(crate) unsafe fn instance(&self) -> &Instance {
800 &*((self as *const Self as *mut u8).offset(-Instance::vmctx_offset()) as *const Instance)
801 }
802
803 #[inline]
804 pub(crate) unsafe fn instance_mut(&mut self) -> &mut Instance {
805 &mut *((self as *const Self as *mut u8).offset(-Instance::vmctx_offset()) as *mut Instance)
806 }
807}
808
809pub type VMTrampoline = unsafe extern "C" fn(
811 *mut VMContext, *const VMFunctionBody, *mut RawValue, );
815
816#[derive(Debug, Copy, Clone)]
820#[repr(C)]
821pub struct VMMemoryDefinition {
822 pub base: *mut u8,
824
825 pub current_length: usize,
827}
828
829unsafe impl Send for VMMemoryDefinition {}
833unsafe impl Sync for VMMemoryDefinition {}
839
840#[cfg(test)]
841mod test_vmmemory_definition {
842 use super::VMMemoryDefinition;
843 use crate::VMOffsets;
844 use memoffset::offset_of;
845 use std::mem::size_of;
846 use wasmer_types::ModuleInfo;
847
848 #[test]
849 fn check_vmmemory_definition_offsets() {
850 let module = ModuleInfo::new();
851 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
852 assert_eq!(
853 size_of::<VMMemoryDefinition>(),
854 usize::from(offsets.size_of_vmmemory_definition())
855 );
856 assert_eq!(
857 offset_of!(VMMemoryDefinition, base),
858 usize::from(offsets.vmmemory_definition_base())
859 );
860 assert_eq!(
861 offset_of!(VMMemoryDefinition, current_length),
862 usize::from(offsets.vmmemory_definition_current_length())
863 );
864 }
865}