1use crate::global::VMGlobal;
8use crate::instance::Instance;
9use crate::memory::VMMemory;
10use crate::store::InternalStoreHandle;
11use crate::trap::{Trap, TrapCode};
12use crate::VMFunctionBody;
13use crate::VMTable;
14use crate::{VMBuiltinFunctionIndex, VMFunction};
15use std::convert::TryFrom;
16use std::ptr::{self, NonNull};
17use std::sync::atomic::{AtomicPtr, Ordering};
18use wasmer_types::RawValue;
19
20#[derive(Copy, Clone, Eq)]
25#[repr(C)]
26pub union VMFunctionContext {
27 pub vmctx: *mut VMContext,
29 pub host_env: *mut std::ffi::c_void,
31}
32
33impl VMFunctionContext {
34 pub fn is_null(&self) -> bool {
36 unsafe { self.host_env.is_null() }
37 }
38}
39
40impl std::fmt::Debug for VMFunctionContext {
41 fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
42 f.debug_struct("VMFunctionContext")
43 .field("vmctx_or_hostenv", unsafe { &self.host_env })
44 .finish()
45 }
46}
47
48impl std::cmp::PartialEq for VMFunctionContext {
49 fn eq(&self, rhs: &Self) -> bool {
50 unsafe { self.host_env as usize == rhs.host_env as usize }
51 }
52}
53
54impl std::hash::Hash for VMFunctionContext {
55 fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
56 unsafe {
57 self.vmctx.hash(state);
58 }
59 }
60}
61
62#[derive(Debug, Copy, Clone)]
64#[repr(C)]
65pub struct VMFunctionImport {
66 pub body: *const VMFunctionBody,
68
69 pub environment: VMFunctionContext,
71
72 pub handle: InternalStoreHandle<VMFunction>,
74}
75
76#[cfg(test)]
77mod test_vmfunction_import {
78 use super::VMFunctionImport;
79 use memoffset::offset_of;
80 use std::mem::size_of;
81 use wasmer_types::ModuleInfo;
82 use wasmer_types::VMOffsets;
83
84 #[test]
85 fn check_vmfunction_import_offsets() {
86 let module = ModuleInfo::new();
87 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
88 assert_eq!(
89 size_of::<VMFunctionImport>(),
90 usize::from(offsets.size_of_vmfunction_import())
91 );
92 assert_eq!(
93 offset_of!(VMFunctionImport, body),
94 usize::from(offsets.vmfunction_import_body())
95 );
96 assert_eq!(
97 offset_of!(VMFunctionImport, environment),
98 usize::from(offsets.vmfunction_import_vmctx())
99 );
100 }
101}
102
103#[repr(C)]
112pub struct VMDynamicFunctionContext<T> {
113 pub address: *const VMFunctionBody,
118
119 pub ctx: T,
121}
122
123unsafe impl<T: Sized + Send + Sync> Send for VMDynamicFunctionContext<T> {}
126unsafe impl<T: Sized + Send + Sync> Sync for VMDynamicFunctionContext<T> {}
129
130impl<T: Sized + Clone + Send + Sync> Clone for VMDynamicFunctionContext<T> {
131 fn clone(&self) -> Self {
132 Self {
133 address: self.address,
134 ctx: self.ctx.clone(),
135 }
136 }
137}
138
139#[cfg(test)]
140mod test_vmdynamicfunction_import_context {
141 use super::VMDynamicFunctionContext;
142 use crate::VMOffsets;
143 use memoffset::offset_of;
144 use std::mem::size_of;
145 use wasmer_types::ModuleInfo;
146
147 #[test]
148 fn check_vmdynamicfunction_import_context_offsets() {
149 let module = ModuleInfo::new();
150 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
151 assert_eq!(
152 size_of::<VMDynamicFunctionContext<usize>>(),
153 usize::from(offsets.size_of_vmdynamicfunction_import_context())
154 );
155 assert_eq!(
156 offset_of!(VMDynamicFunctionContext<usize>, address),
157 usize::from(offsets.vmdynamicfunction_import_context_address())
158 );
159 assert_eq!(
160 offset_of!(VMDynamicFunctionContext<usize>, ctx),
161 usize::from(offsets.vmdynamicfunction_import_context_ctx())
162 );
163 }
164}
165
166#[derive(Debug, Copy, Clone, Eq, PartialEq)]
168#[repr(C)]
169pub enum VMFunctionKind {
170 Static,
177
178 Dynamic,
184}
185
186#[derive(Clone)]
189#[repr(C)]
190pub struct VMTableImport {
191 pub definition: NonNull<VMTableDefinition>,
193
194 pub handle: InternalStoreHandle<VMTable>,
196}
197
198#[cfg(test)]
199mod test_vmtable_import {
200 use super::VMTableImport;
201 use crate::VMOffsets;
202 use memoffset::offset_of;
203 use std::mem::size_of;
204 use wasmer_types::ModuleInfo;
205
206 #[test]
207 fn check_vmtable_import_offsets() {
208 let module = ModuleInfo::new();
209 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
210 assert_eq!(
211 size_of::<VMTableImport>(),
212 usize::from(offsets.size_of_vmtable_import())
213 );
214 assert_eq!(
215 offset_of!(VMTableImport, definition),
216 usize::from(offsets.vmtable_import_definition())
217 );
218 }
219}
220
221#[derive(Clone)]
224#[repr(C)]
225pub struct VMMemoryImport {
226 pub definition: NonNull<VMMemoryDefinition>,
228
229 pub handle: InternalStoreHandle<VMMemory>,
231}
232
233#[cfg(test)]
234mod test_vmmemory_import {
235 use super::VMMemoryImport;
236 use crate::VMOffsets;
237 use memoffset::offset_of;
238 use std::mem::size_of;
239 use wasmer_types::ModuleInfo;
240
241 #[test]
242 fn check_vmmemory_import_offsets() {
243 let module = ModuleInfo::new();
244 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
245 assert_eq!(
246 size_of::<VMMemoryImport>(),
247 usize::from(offsets.size_of_vmmemory_import())
248 );
249 assert_eq!(
250 offset_of!(VMMemoryImport, definition),
251 usize::from(offsets.vmmemory_import_definition())
252 );
253 assert_eq!(
254 offset_of!(VMMemoryImport, handle),
255 usize::from(offsets.vmmemory_import_handle())
256 );
257 }
258}
259
260#[derive(Clone)]
263#[repr(C)]
264pub struct VMGlobalImport {
265 pub definition: NonNull<VMGlobalDefinition>,
267
268 pub handle: InternalStoreHandle<VMGlobal>,
270}
271
272unsafe impl Send for VMGlobalImport {}
277unsafe impl Sync for VMGlobalImport {}
283
284#[cfg(test)]
285mod test_vmglobal_import {
286 use super::VMGlobalImport;
287 use crate::VMOffsets;
288 use memoffset::offset_of;
289 use std::mem::size_of;
290 use wasmer_types::ModuleInfo;
291
292 #[test]
293 fn check_vmglobal_import_offsets() {
294 let module = ModuleInfo::new();
295 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
296 assert_eq!(
297 size_of::<VMGlobalImport>(),
298 usize::from(offsets.size_of_vmglobal_import())
299 );
300 assert_eq!(
301 offset_of!(VMGlobalImport, definition),
302 usize::from(offsets.vmglobal_import_definition())
303 );
304 }
305}
306
307pub(crate) unsafe fn memory_copy(
318 mem: &VMMemoryDefinition,
319 dst: u32,
320 src: u32,
321 len: u32,
322) -> Result<(), Trap> {
323 if src
325 .checked_add(len)
326 .map_or(true, |n| usize::try_from(n).unwrap() > mem.current_length)
327 || dst
328 .checked_add(len)
329 .map_or(true, |m| usize::try_from(m).unwrap() > mem.current_length)
330 {
331 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
332 }
333
334 let dst = usize::try_from(dst).unwrap();
335 let src = usize::try_from(src).unwrap();
336
337 let dst = mem.base.add(dst);
340 let src = mem.base.add(src);
341 ptr::copy(src, dst, len as usize);
342
343 Ok(())
344}
345
346pub(crate) unsafe fn memory_fill(
357 mem: &VMMemoryDefinition,
358 dst: u32,
359 val: u32,
360 len: u32,
361) -> Result<(), Trap> {
362 if dst
363 .checked_add(len)
364 .map_or(true, |m| usize::try_from(m).unwrap() > mem.current_length)
365 {
366 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
367 }
368
369 let dst = isize::try_from(dst).unwrap();
370 let val = val as u8;
371
372 let dst = mem.base.offset(dst);
375 ptr::write_bytes(dst, val, len as usize);
376
377 Ok(())
378}
379
380pub(crate) unsafe fn memory32_atomic_check32(
389 mem: &VMMemoryDefinition,
390 dst: u32,
391 val: u32,
392) -> Result<u32, Trap> {
393 if usize::try_from(dst).unwrap() > mem.current_length {
394 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
395 }
396
397 let dst = isize::try_from(dst).unwrap();
398 if dst & 0b11 != 0 {
399 return Err(Trap::lib(TrapCode::UnalignedAtomic));
400 }
401
402 let dst = mem.base.offset(dst) as *mut u32;
405 let atomic_dst = AtomicPtr::new(dst);
406 let read_val = *atomic_dst.load(Ordering::Acquire);
407 let ret = if read_val == val { 0 } else { 1 };
408 Ok(ret)
409}
410
411pub(crate) unsafe fn memory32_atomic_check64(
420 mem: &VMMemoryDefinition,
421 dst: u32,
422 val: u64,
423) -> Result<u32, Trap> {
424 if usize::try_from(dst).unwrap() > mem.current_length {
425 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
426 }
427
428 let dst = isize::try_from(dst).unwrap();
429 if dst & 0b111 != 0 {
430 return Err(Trap::lib(TrapCode::UnalignedAtomic));
431 }
432
433 let dst = mem.base.offset(dst) as *mut u64;
436 let atomic_dst = AtomicPtr::new(dst);
437 let read_val = *atomic_dst.load(Ordering::Acquire);
438 let ret = if read_val == val { 0 } else { 1 };
439 Ok(ret)
440}
441
442#[derive(Debug, Clone, Copy)]
445#[repr(C)]
446pub struct VMTableDefinition {
447 pub base: *mut u8,
449
450 pub current_elements: u32,
452}
453
454#[cfg(test)]
455mod test_vmtable_definition {
456 use super::VMTableDefinition;
457 use crate::VMOffsets;
458 use memoffset::offset_of;
459 use std::mem::size_of;
460 use wasmer_types::ModuleInfo;
461
462 #[test]
463 fn check_vmtable_definition_offsets() {
464 let module = ModuleInfo::new();
465 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
466 assert_eq!(
467 size_of::<VMTableDefinition>(),
468 usize::from(offsets.size_of_vmtable_definition())
469 );
470 assert_eq!(
471 offset_of!(VMTableDefinition, base),
472 usize::from(offsets.vmtable_definition_base())
473 );
474 assert_eq!(
475 offset_of!(VMTableDefinition, current_elements),
476 usize::from(offsets.vmtable_definition_current_elements())
477 );
478 }
479}
480
481#[derive(Debug, Clone)]
486#[repr(C, align(16))]
487pub struct VMGlobalDefinition {
488 pub val: RawValue,
490}
491
492#[cfg(test)]
493mod test_vmglobal_definition {
494 use super::VMGlobalDefinition;
495 use crate::{VMFuncRef, VMOffsets};
496 use more_asserts::assert_ge;
497 use std::mem::{align_of, size_of};
498 use wasmer_types::ModuleInfo;
499
500 #[test]
501 fn check_vmglobal_definition_alignment() {
502 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<i32>());
503 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<i64>());
504 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<f32>());
505 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<f64>());
506 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<VMFuncRef>());
507 assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<[u8; 16]>());
508 }
509
510 #[test]
511 fn check_vmglobal_definition_offsets() {
512 let module = ModuleInfo::new();
513 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
514 assert_eq!(
515 size_of::<*const VMGlobalDefinition>(),
516 usize::from(offsets.size_of_vmglobal_local())
517 );
518 }
519
520 #[test]
521 fn check_vmglobal_begins_aligned() {
522 let module = ModuleInfo::new();
523 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
524 assert_eq!(offsets.vmctx_globals_begin() % 16, 0);
525 }
526}
527
528impl VMGlobalDefinition {
529 pub fn new() -> Self {
531 Self {
532 val: Default::default(),
533 }
534 }
535}
536
537#[repr(C)]
540#[cfg_attr(feature = "artifact-size", derive(loupe::MemoryUsage))]
541#[derive(Debug, Eq, PartialEq, Clone, Copy, Hash)]
542pub struct VMSharedSignatureIndex(u32);
543
544#[cfg(test)]
545mod test_vmshared_signature_index {
546 use super::VMSharedSignatureIndex;
547 use std::mem::size_of;
548 use wasmer_types::{ModuleInfo, TargetSharedSignatureIndex, VMOffsets};
549
550 #[test]
551 fn check_vmshared_signature_index() {
552 let module = ModuleInfo::new();
553 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
554 assert_eq!(
555 size_of::<VMSharedSignatureIndex>(),
556 usize::from(offsets.size_of_vmshared_signature_index())
557 );
558 }
559
560 #[test]
561 fn check_target_shared_signature_index() {
562 assert_eq!(
563 size_of::<VMSharedSignatureIndex>(),
564 size_of::<TargetSharedSignatureIndex>()
565 );
566 }
567}
568
569impl VMSharedSignatureIndex {
570 pub fn new(value: u32) -> Self {
572 Self(value)
573 }
574}
575
576impl Default for VMSharedSignatureIndex {
577 fn default() -> Self {
578 Self::new(u32::MAX)
579 }
580}
581
582#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq)]
586#[repr(C)]
587pub struct VMCallerCheckedAnyfunc {
588 pub func_ptr: *const VMFunctionBody,
590 pub type_index: VMSharedSignatureIndex,
592 pub vmctx: VMFunctionContext,
594 pub call_trampoline: VMTrampoline,
597 }
599
600#[cfg(test)]
601mod test_vmcaller_checked_anyfunc {
602 use super::VMCallerCheckedAnyfunc;
603 use crate::VMOffsets;
604 use memoffset::offset_of;
605 use std::mem::size_of;
606 use wasmer_types::ModuleInfo;
607
608 #[test]
609 fn check_vmcaller_checked_anyfunc_offsets() {
610 let module = ModuleInfo::new();
611 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
612 assert_eq!(
613 size_of::<VMCallerCheckedAnyfunc>(),
614 usize::from(offsets.size_of_vmcaller_checked_anyfunc())
615 );
616 assert_eq!(
617 offset_of!(VMCallerCheckedAnyfunc, func_ptr),
618 usize::from(offsets.vmcaller_checked_anyfunc_func_ptr())
619 );
620 assert_eq!(
621 offset_of!(VMCallerCheckedAnyfunc, type_index),
622 usize::from(offsets.vmcaller_checked_anyfunc_type_index())
623 );
624 assert_eq!(
625 offset_of!(VMCallerCheckedAnyfunc, vmctx),
626 usize::from(offsets.vmcaller_checked_anyfunc_vmctx())
627 );
628 }
629}
630
631#[repr(C)]
634pub struct VMBuiltinFunctionsArray {
635 ptrs: [usize; Self::len()],
636}
637
638impl VMBuiltinFunctionsArray {
639 pub const fn len() -> usize {
640 VMBuiltinFunctionIndex::builtin_functions_total_number() as usize
641 }
642
643 pub fn initialized() -> Self {
644 use crate::libcalls::*;
645
646 let mut ptrs = [0; Self::len()];
647
648 ptrs[VMBuiltinFunctionIndex::get_memory32_grow_index().index() as usize] =
649 wasmer_vm_memory32_grow as usize;
650 ptrs[VMBuiltinFunctionIndex::get_imported_memory32_grow_index().index() as usize] =
651 wasmer_vm_imported_memory32_grow as usize;
652
653 ptrs[VMBuiltinFunctionIndex::get_memory32_size_index().index() as usize] =
654 wasmer_vm_memory32_size as usize;
655 ptrs[VMBuiltinFunctionIndex::get_imported_memory32_size_index().index() as usize] =
656 wasmer_vm_imported_memory32_size as usize;
657
658 ptrs[VMBuiltinFunctionIndex::get_table_copy_index().index() as usize] =
659 wasmer_vm_table_copy as usize;
660
661 ptrs[VMBuiltinFunctionIndex::get_table_init_index().index() as usize] =
662 wasmer_vm_table_init as usize;
663 ptrs[VMBuiltinFunctionIndex::get_elem_drop_index().index() as usize] =
664 wasmer_vm_elem_drop as usize;
665
666 ptrs[VMBuiltinFunctionIndex::get_memory_copy_index().index() as usize] =
667 wasmer_vm_memory32_copy as usize;
668 ptrs[VMBuiltinFunctionIndex::get_imported_memory_copy_index().index() as usize] =
669 wasmer_vm_imported_memory32_copy as usize;
670 ptrs[VMBuiltinFunctionIndex::get_memory_fill_index().index() as usize] =
671 wasmer_vm_memory32_fill as usize;
672 ptrs[VMBuiltinFunctionIndex::get_imported_memory_fill_index().index() as usize] =
673 wasmer_vm_imported_memory32_fill as usize;
674 ptrs[VMBuiltinFunctionIndex::get_memory_init_index().index() as usize] =
675 wasmer_vm_memory32_init as usize;
676 ptrs[VMBuiltinFunctionIndex::get_data_drop_index().index() as usize] =
677 wasmer_vm_data_drop as usize;
678 ptrs[VMBuiltinFunctionIndex::get_raise_trap_index().index() as usize] =
679 wasmer_vm_raise_trap as usize;
680 ptrs[VMBuiltinFunctionIndex::get_table_size_index().index() as usize] =
681 wasmer_vm_table_size as usize;
682 ptrs[VMBuiltinFunctionIndex::get_imported_table_size_index().index() as usize] =
683 wasmer_vm_imported_table_size as usize;
684 ptrs[VMBuiltinFunctionIndex::get_table_grow_index().index() as usize] =
685 wasmer_vm_table_grow as usize;
686 ptrs[VMBuiltinFunctionIndex::get_imported_table_grow_index().index() as usize] =
687 wasmer_vm_imported_table_grow as usize;
688 ptrs[VMBuiltinFunctionIndex::get_table_get_index().index() as usize] =
689 wasmer_vm_table_get as usize;
690 ptrs[VMBuiltinFunctionIndex::get_imported_table_get_index().index() as usize] =
691 wasmer_vm_imported_table_get as usize;
692 ptrs[VMBuiltinFunctionIndex::get_table_set_index().index() as usize] =
693 wasmer_vm_table_set as usize;
694 ptrs[VMBuiltinFunctionIndex::get_imported_table_set_index().index() as usize] =
695 wasmer_vm_imported_table_set as usize;
696 ptrs[VMBuiltinFunctionIndex::get_func_ref_index().index() as usize] =
697 wasmer_vm_func_ref as usize;
698 ptrs[VMBuiltinFunctionIndex::get_table_fill_index().index() as usize] =
699 wasmer_vm_table_fill as usize;
700
701 ptrs[VMBuiltinFunctionIndex::get_memory_atomic_wait32_index().index() as usize] =
702 wasmer_vm_memory32_atomic_wait32 as usize;
703 ptrs[VMBuiltinFunctionIndex::get_imported_memory_atomic_wait32_index().index() as usize] =
704 wasmer_vm_imported_memory32_atomic_wait32 as usize;
705 ptrs[VMBuiltinFunctionIndex::get_memory_atomic_wait64_index().index() as usize] =
706 wasmer_vm_memory32_atomic_wait64 as usize;
707 ptrs[VMBuiltinFunctionIndex::get_imported_memory_atomic_wait64_index().index() as usize] =
708 wasmer_vm_imported_memory32_atomic_wait64 as usize;
709 ptrs[VMBuiltinFunctionIndex::get_memory_atomic_notify_index().index() as usize] =
710 wasmer_vm_memory32_atomic_notify as usize;
711 ptrs[VMBuiltinFunctionIndex::get_imported_memory_atomic_notify_index().index() as usize] =
712 wasmer_vm_imported_memory32_atomic_notify as usize;
713
714 debug_assert!(ptrs.iter().cloned().all(|p| p != 0));
715
716 Self { ptrs }
717 }
718}
719
720#[derive(Debug)]
730#[repr(C, align(16))] pub struct VMContext {}
732
733impl VMContext {
734 #[allow(clippy::cast_ptr_alignment)]
740 #[inline]
741 pub(crate) unsafe fn instance(&self) -> &Instance {
742 &*((self as *const Self as *mut u8).offset(-Instance::vmctx_offset()) as *const Instance)
743 }
744
745 #[inline]
746 pub(crate) unsafe fn instance_mut(&mut self) -> &mut Instance {
747 &mut *((self as *const Self as *mut u8).offset(-Instance::vmctx_offset()) as *mut Instance)
748 }
749}
750
751pub type VMTrampoline = unsafe extern "C" fn(
753 *mut VMContext, *const VMFunctionBody, *mut RawValue, );
757
758#[derive(Debug, Copy, Clone)]
762#[repr(C)]
763pub struct VMMemoryDefinition {
764 pub base: *mut u8,
766
767 pub current_length: usize,
769}
770
771unsafe impl Send for VMMemoryDefinition {}
775unsafe impl Sync for VMMemoryDefinition {}
781
782#[cfg(test)]
783mod test_vmmemory_definition {
784 use super::VMMemoryDefinition;
785 use crate::VMOffsets;
786 use memoffset::offset_of;
787 use std::mem::size_of;
788 use wasmer_types::ModuleInfo;
789
790 #[test]
791 fn check_vmmemory_definition_offsets() {
792 let module = ModuleInfo::new();
793 let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
794 assert_eq!(
795 size_of::<VMMemoryDefinition>(),
796 usize::from(offsets.size_of_vmmemory_definition())
797 );
798 assert_eq!(
799 offset_of!(VMMemoryDefinition, base),
800 usize::from(offsets.vmmemory_definition_base())
801 );
802 assert_eq!(
803 offset_of!(VMMemoryDefinition, current_length),
804 usize::from(offsets.vmmemory_definition_current_length())
805 );
806 }
807}