1use crate::export::Export;
6use crate::memory::{Memory, RuntimeMemoryCreator};
7use crate::table::{Table, TableElement, TableElementType};
8use crate::vmcontext::{
9 VMBuiltinFunctionsArray, VMContext, VMFuncRef, VMFunctionImport, VMGlobalDefinition,
10 VMGlobalImport, VMMemoryDefinition, VMMemoryImport, VMOpaqueContext, VMRuntimeLimits,
11 VMTableDefinition, VMTableImport,
12};
13use crate::{
14 ExportFunction, ExportGlobal, ExportMemory, ExportTable, GcStore, Imports, ModuleRuntimeInfo,
15 SendSyncPtr, Store, VMFunctionBody, VMGcRef, VMSharedTypeIndex, WasmFault, I31,
16};
17use anyhow::Error;
18use anyhow::Result;
19use sptr::Strict;
20use std::alloc::{self, Layout};
21use std::any::Any;
22use std::ops::Range;
23use std::ptr::NonNull;
24use std::sync::atomic::AtomicU64;
25use std::sync::Arc;
26use std::{mem, ptr};
27use wasmtime_environ::ModuleInternedTypeIndex;
28use wasmtime_environ::{
29 packed_option::ReservedValue, DataIndex, DefinedGlobalIndex, DefinedMemoryIndex,
30 DefinedTableIndex, ElemIndex, EntityIndex, EntityRef, EntitySet, FuncIndex, GlobalIndex,
31 GlobalInit, HostPtr, MemoryIndex, MemoryPlan, Module, PrimaryMap, TableElementExpression,
32 TableIndex, TableInitialValue, TableSegmentElements, Trap, VMOffsets, WasmRefType, WasmValType,
33 VMCONTEXT_MAGIC,
34};
35#[cfg(feature = "wmemcheck")]
36use wasmtime_wmemcheck::Wmemcheck;
37
38mod allocator;
39
40pub use allocator::*;
41
42#[repr(C)] pub struct Instance {
59 runtime_info: Arc<dyn ModuleRuntimeInfo>,
65
66 memories: PrimaryMap<DefinedMemoryIndex, (MemoryAllocationIndex, Memory)>,
75
76 tables: PrimaryMap<DefinedTableIndex, (TableAllocationIndex, Table)>,
85
86 dropped_elements: EntitySet<ElemIndex>,
89
90 dropped_data: EntitySet<DataIndex>,
93
94 host_state: Box<dyn Any + Send + Sync>,
99
100 vmctx_self_reference: SendSyncPtr<VMContext>,
145
146 #[cfg(feature = "wmemcheck")]
149 pub(crate) wmemcheck_state: Option<Wmemcheck>,
150
151 vmctx: VMContext,
155}
156
157impl Instance {
158 unsafe fn new(
163 req: InstanceAllocationRequest,
164 memories: PrimaryMap<DefinedMemoryIndex, (MemoryAllocationIndex, Memory)>,
165 tables: PrimaryMap<DefinedTableIndex, (TableAllocationIndex, Table)>,
166 memory_plans: &PrimaryMap<MemoryIndex, MemoryPlan>,
167 ) -> InstanceHandle {
168 let layout = Self::alloc_layout(req.runtime_info.offsets());
170 let ptr = alloc::alloc(layout);
171 if ptr.is_null() {
172 alloc::handle_alloc_error(layout);
173 }
174 let ptr = ptr.cast::<Instance>();
175
176 let module = req.runtime_info.module();
177 let dropped_elements = EntitySet::with_capacity(module.passive_elements.len());
178 let dropped_data = EntitySet::with_capacity(module.passive_data_map.len());
179
180 #[cfg(not(feature = "wmemcheck"))]
181 let _ = memory_plans;
182
183 ptr::write(
184 ptr,
185 Instance {
186 runtime_info: req.runtime_info.clone(),
187 memories,
188 tables,
189 dropped_elements,
190 dropped_data,
191 host_state: req.host_state,
192 vmctx_self_reference: SendSyncPtr::new(NonNull::new(ptr.add(1).cast()).unwrap()),
193 vmctx: VMContext {
194 _marker: std::marker::PhantomPinned,
195 },
196 #[cfg(feature = "wmemcheck")]
197 wmemcheck_state: {
198 if req.wmemcheck {
199 let size = memory_plans
200 .iter()
201 .next()
202 .map(|plan| plan.1.memory.minimum)
203 .unwrap_or(0)
204 * 64
205 * 1024;
206 Some(Wmemcheck::new(size as usize))
207 } else {
208 None
209 }
210 },
211 },
212 );
213
214 (*ptr).initialize_vmctx(module, req.runtime_info.offsets(), req.store, req.imports);
215 InstanceHandle {
216 instance: Some(SendSyncPtr::new(NonNull::new(ptr).unwrap())),
217 }
218 }
219
220 #[inline]
235 pub unsafe fn from_vmctx<R>(vmctx: *mut VMContext, f: impl FnOnce(&mut Instance) -> R) -> R {
236 assert!(!vmctx.is_null());
237 let ptr = vmctx
238 .byte_sub(mem::size_of::<Instance>())
239 .cast::<Instance>();
240 f(&mut *ptr)
241 }
242
243 unsafe fn vmctx_plus_offset<T>(&self, offset: u32) -> *const T {
251 self.vmctx()
252 .byte_add(usize::try_from(offset).unwrap())
253 .cast()
254 }
255
256 unsafe fn vmctx_plus_offset_mut<T>(&mut self, offset: u32) -> *mut T {
258 self.vmctx()
259 .byte_add(usize::try_from(offset).unwrap())
260 .cast()
261 }
262
263 pub(crate) fn module(&self) -> &Arc<Module> {
264 self.runtime_info.module()
265 }
266
267 pub fn engine_type_index(&self, module_index: ModuleInternedTypeIndex) -> VMSharedTypeIndex {
270 self.runtime_info.engine_type_index(module_index)
271 }
272
273 #[inline]
274 fn offsets(&self) -> &VMOffsets<HostPtr> {
275 self.runtime_info.offsets()
276 }
277
278 fn imported_function(&self, index: FuncIndex) -> &VMFunctionImport {
280 unsafe { &*self.vmctx_plus_offset(self.offsets().vmctx_vmfunction_import(index)) }
281 }
282
283 fn imported_table(&self, index: TableIndex) -> &VMTableImport {
285 unsafe { &*self.vmctx_plus_offset(self.offsets().vmctx_vmtable_import(index)) }
286 }
287
288 fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
290 unsafe { &*self.vmctx_plus_offset(self.offsets().vmctx_vmmemory_import(index)) }
291 }
292
293 fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
295 unsafe { &*self.vmctx_plus_offset(self.offsets().vmctx_vmglobal_import(index)) }
296 }
297
298 #[allow(dead_code)]
300 fn table(&mut self, index: DefinedTableIndex) -> VMTableDefinition {
301 unsafe { *self.table_ptr(index) }
302 }
303
304 fn set_table(&mut self, index: DefinedTableIndex, table: VMTableDefinition) {
306 unsafe {
307 *self.table_ptr(index) = table;
308 }
309 }
310
311 fn table_ptr(&mut self, index: DefinedTableIndex) -> *mut VMTableDefinition {
313 unsafe { self.vmctx_plus_offset_mut(self.offsets().vmctx_vmtable_definition(index)) }
314 }
315
316 pub(crate) fn get_memory(&self, index: MemoryIndex) -> VMMemoryDefinition {
318 if let Some(defined_index) = self.module().defined_memory_index(index) {
319 self.memory(defined_index)
320 } else {
321 let import = self.imported_memory(index);
322 unsafe { VMMemoryDefinition::load(import.from) }
323 }
324 }
325
326 #[cfg(feature = "threads")]
328 pub(crate) fn get_runtime_memory(&mut self, index: MemoryIndex) -> &mut Memory {
329 if let Some(defined_index) = self.module().defined_memory_index(index) {
330 unsafe { &mut *self.get_defined_memory(defined_index) }
331 } else {
332 let import = self.imported_memory(index);
333 unsafe {
334 let ptr =
335 Instance::from_vmctx(import.vmctx, |i| i.get_defined_memory(import.index));
336 &mut *ptr
337 }
338 }
339 }
340
341 fn memory(&self, index: DefinedMemoryIndex) -> VMMemoryDefinition {
343 unsafe { VMMemoryDefinition::load(self.memory_ptr(index)) }
344 }
345
346 fn set_memory(&self, index: DefinedMemoryIndex, mem: VMMemoryDefinition) {
348 unsafe {
349 *self.memory_ptr(index) = mem;
350 }
351 }
352
353 fn memory_ptr(&self, index: DefinedMemoryIndex) -> *mut VMMemoryDefinition {
355 unsafe { *self.vmctx_plus_offset(self.offsets().vmctx_vmmemory_pointer(index)) }
356 }
357
358 pub fn defined_memories<'a>(
360 &'a self,
361 ) -> impl ExactSizeIterator<Item = (DefinedMemoryIndex, &'a Memory)> + 'a {
362 self.memories
363 .iter()
364 .map(|(index, (_alloc_index, memory))| (index, memory))
365 }
366
367 fn global(&mut self, index: DefinedGlobalIndex) -> &VMGlobalDefinition {
369 unsafe { &*self.global_ptr(index) }
370 }
371
372 fn global_ptr(&mut self, index: DefinedGlobalIndex) -> *mut VMGlobalDefinition {
374 unsafe { self.vmctx_plus_offset_mut(self.offsets().vmctx_vmglobal_definition(index)) }
375 }
376
377 pub(crate) fn defined_or_imported_global_ptr(
382 &mut self,
383 index: GlobalIndex,
384 ) -> *mut VMGlobalDefinition {
385 if let Some(index) = self.module().defined_global_index(index) {
386 self.global_ptr(index)
387 } else {
388 self.imported_global(index).from
389 }
390 }
391
392 pub fn all_globals<'a>(
400 &'a mut self,
401 ) -> impl ExactSizeIterator<Item = (GlobalIndex, ExportGlobal)> + 'a {
402 let module = self.module().clone();
403 module.globals.keys().map(move |idx| {
404 (
405 idx,
406 ExportGlobal {
407 definition: self.defined_or_imported_global_ptr(idx),
408 vmctx: self.vmctx(),
409 global: self.module().globals[idx],
410 },
411 )
412 })
413 }
414
415 pub fn defined_globals<'a>(
417 &'a mut self,
418 ) -> impl ExactSizeIterator<Item = (DefinedGlobalIndex, ExportGlobal)> + 'a {
419 let module = self.module().clone();
420 module
421 .globals
422 .keys()
423 .skip(module.num_imported_globals)
424 .map(move |global_idx| {
425 let def_idx = module.defined_global_index(global_idx).unwrap();
426 let global = ExportGlobal {
427 definition: self.global_ptr(def_idx),
428 vmctx: self.vmctx(),
429 global: self.module().globals[global_idx],
430 };
431 (def_idx, global)
432 })
433 }
434
435 #[inline]
437 pub fn runtime_limits(&mut self) -> *mut *const VMRuntimeLimits {
438 unsafe { self.vmctx_plus_offset_mut(self.offsets().vmctx_runtime_limits()) }
439 }
440
441 pub fn epoch_ptr(&mut self) -> *mut *const AtomicU64 {
443 unsafe { self.vmctx_plus_offset_mut(self.offsets().vmctx_epoch_ptr()) }
444 }
445
446 pub fn gc_heap_base(&mut self) -> *mut *mut u8 {
448 unsafe { self.vmctx_plus_offset_mut(self.offsets().vmctx_gc_heap_base()) }
449 }
450
451 pub fn gc_heap_bound(&mut self) -> *mut usize {
453 unsafe { self.vmctx_plus_offset_mut(self.offsets().vmctx_gc_heap_bound()) }
454 }
455
456 pub fn gc_heap_data(&mut self) -> *mut *mut u8 {
458 unsafe { self.vmctx_plus_offset_mut(self.offsets().vmctx_gc_heap_data()) }
459 }
460
461 #[inline]
472 pub fn store(&self) -> *mut dyn Store {
473 let ptr =
474 unsafe { *self.vmctx_plus_offset::<*mut dyn Store>(self.offsets().vmctx_store()) };
475 assert!(!ptr.is_null());
476 ptr
477 }
478
479 pub(crate) unsafe fn set_store(&mut self, store: Option<*mut dyn Store>) {
480 if let Some(store) = store {
481 *self.vmctx_plus_offset_mut(self.offsets().vmctx_store()) = store;
482 *self.runtime_limits() = (*store).vmruntime_limits();
483 *self.epoch_ptr() = (*store).epoch_ptr();
484 self.set_gc_heap((*store).maybe_gc_store());
485 } else {
486 assert_eq!(
487 mem::size_of::<*mut dyn Store>(),
488 mem::size_of::<[*mut (); 2]>()
489 );
490 *self.vmctx_plus_offset_mut::<[*mut (); 2]>(self.offsets().vmctx_store()) =
491 [ptr::null_mut(), ptr::null_mut()];
492 *self.runtime_limits() = ptr::null_mut();
493 *self.epoch_ptr() = ptr::null_mut();
494 self.set_gc_heap(None);
495 }
496 }
497
498 unsafe fn set_gc_heap(&mut self, gc_store: Option<&mut GcStore>) {
499 if let Some(gc_store) = gc_store {
500 *self.gc_heap_base() = gc_store.gc_heap.vmctx_gc_heap_base();
501 *self.gc_heap_bound() = gc_store.gc_heap.vmctx_gc_heap_bound();
502 *self.gc_heap_data() = gc_store.gc_heap.vmctx_gc_heap_data();
503 } else {
504 *self.gc_heap_base() = ptr::null_mut();
505 *self.gc_heap_bound() = 0;
506 *self.gc_heap_data() = ptr::null_mut();
507 }
508 }
509
510 pub(crate) unsafe fn set_callee(&mut self, callee: Option<NonNull<VMFunctionBody>>) {
511 *self.vmctx_plus_offset_mut(self.offsets().vmctx_callee()) =
512 callee.map_or(ptr::null_mut(), |c| c.as_ptr());
513 }
514
515 #[inline]
517 pub fn vmctx(&self) -> *mut VMContext {
518 let addr = std::ptr::addr_of!(self.vmctx);
539 Strict::with_addr(self.vmctx_self_reference.as_ptr(), Strict::addr(addr))
540 }
541
542 fn get_exported_func(&mut self, index: FuncIndex) -> ExportFunction {
543 let func_ref = self.get_func_ref(index).unwrap();
544 let func_ref = NonNull::new(func_ref as *const VMFuncRef as *mut _).unwrap();
545 ExportFunction { func_ref }
546 }
547
548 fn get_exported_table(&mut self, index: TableIndex) -> ExportTable {
549 let (definition, vmctx) = if let Some(def_index) = self.module().defined_table_index(index)
550 {
551 (self.table_ptr(def_index), self.vmctx())
552 } else {
553 let import = self.imported_table(index);
554 (import.from, import.vmctx)
555 };
556 ExportTable {
557 definition,
558 vmctx,
559 table: self.module().table_plans[index].clone(),
560 }
561 }
562
563 fn get_exported_memory(&mut self, index: MemoryIndex) -> ExportMemory {
564 let (definition, vmctx, def_index) =
565 if let Some(def_index) = self.module().defined_memory_index(index) {
566 (self.memory_ptr(def_index), self.vmctx(), def_index)
567 } else {
568 let import = self.imported_memory(index);
569 (import.from, import.vmctx, import.index)
570 };
571 ExportMemory {
572 definition,
573 vmctx,
574 memory: self.module().memory_plans[index].clone(),
575 index: def_index,
576 }
577 }
578
579 fn get_exported_global(&mut self, index: GlobalIndex) -> ExportGlobal {
580 ExportGlobal {
581 definition: if let Some(def_index) = self.module().defined_global_index(index) {
582 self.global_ptr(def_index)
583 } else {
584 self.imported_global(index).from
585 },
586 vmctx: self.vmctx(),
587 global: self.module().globals[index],
588 }
589 }
590
591 pub fn exports(&self) -> indexmap::map::Iter<String, EntityIndex> {
597 self.module().exports.iter()
598 }
599
600 #[inline]
602 pub fn host_state(&self) -> &dyn Any {
603 &*self.host_state
604 }
605
606 pub unsafe fn table_index(&mut self, table: &VMTableDefinition) -> DefinedTableIndex {
608 let index = DefinedTableIndex::new(
609 usize::try_from(
610 (table as *const VMTableDefinition)
611 .offset_from(self.table_ptr(DefinedTableIndex::new(0))),
612 )
613 .unwrap(),
614 );
615 assert!(index.index() < self.tables.len());
616 index
617 }
618
619 pub(crate) fn memory_grow(
625 &mut self,
626 index: MemoryIndex,
627 delta: u64,
628 ) -> Result<Option<usize>, Error> {
629 match self.module().defined_memory_index(index) {
630 Some(idx) => self.defined_memory_grow(idx, delta),
631 None => {
632 let import = self.imported_memory(index);
633 unsafe {
634 Instance::from_vmctx(import.vmctx, |i| {
635 i.defined_memory_grow(import.index, delta)
636 })
637 }
638 }
639 }
640 }
641
642 fn defined_memory_grow(
643 &mut self,
644 idx: DefinedMemoryIndex,
645 delta: u64,
646 ) -> Result<Option<usize>, Error> {
647 let store = unsafe { &mut *self.store() };
648 let memory = &mut self.memories[idx].1;
649
650 let result = unsafe { memory.grow(delta, Some(store)) };
651
652 if memory.as_shared_memory().is_none() {
655 let vmmemory = memory.vmmemory();
656 self.set_memory(idx, vmmemory);
657 }
658
659 result
660 }
661
662 pub(crate) fn table_element_type(&mut self, table_index: TableIndex) -> TableElementType {
663 unsafe { (*self.get_table(table_index)).element_type() }
664 }
665
666 pub(crate) fn table_grow(
672 &mut self,
673 table_index: TableIndex,
674 delta: u32,
675 init_value: TableElement,
676 ) -> Result<Option<u32>, Error> {
677 self.with_defined_table_index_and_instance(table_index, |i, instance| {
678 instance.defined_table_grow(i, delta, init_value)
679 })
680 }
681
682 fn defined_table_grow(
683 &mut self,
684 table_index: DefinedTableIndex,
685 delta: u32,
686 init_value: TableElement,
687 ) -> Result<Option<u32>, Error> {
688 let store = unsafe { &mut *self.store() };
689 let table = &mut self
690 .tables
691 .get_mut(table_index)
692 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()))
693 .1;
694
695 let result = unsafe { table.grow(delta, init_value, store) };
696
697 let element = self.tables[table_index].1.vmtable();
700 self.set_table(table_index, element);
701
702 result
703 }
704
705 fn alloc_layout(offsets: &VMOffsets<HostPtr>) -> Layout {
706 let size = mem::size_of::<Self>()
707 .checked_add(usize::try_from(offsets.size_of_vmctx()).unwrap())
708 .unwrap();
709 let align = mem::align_of::<Self>();
710 Layout::from_size_align(size, align).unwrap()
711 }
712
713 fn construct_func_ref(
723 &mut self,
724 index: FuncIndex,
725 sig: ModuleInternedTypeIndex,
726 into: *mut VMFuncRef,
727 ) {
728 let type_index = unsafe {
729 let base: *const VMSharedTypeIndex =
730 *self.vmctx_plus_offset_mut(self.offsets().vmctx_type_ids_array());
731 *base.add(sig.index())
732 };
733
734 let func_ref = if let Some(def_index) = self.module().defined_func_index(index) {
735 VMFuncRef {
736 native_call: self
737 .runtime_info
738 .native_to_wasm_trampoline(def_index)
739 .expect("should have native-to-Wasm trampoline for escaping function"),
740 array_call: self
741 .runtime_info
742 .array_to_wasm_trampoline(def_index)
743 .expect("should have array-to-Wasm trampoline for escaping function"),
744 wasm_call: Some(self.runtime_info.function(def_index)),
745 vmctx: VMOpaqueContext::from_vmcontext(self.vmctx()),
746 type_index,
747 }
748 } else {
749 let import = self.imported_function(index);
750 VMFuncRef {
751 native_call: import.native_call,
752 array_call: import.array_call,
753 wasm_call: Some(import.wasm_call),
754 vmctx: import.vmctx,
755 type_index,
756 }
757 };
758
759 unsafe {
762 std::ptr::write(into, func_ref);
763 }
764 }
765
766 pub(crate) fn get_func_ref(&mut self, index: FuncIndex) -> Option<*mut VMFuncRef> {
773 if index == FuncIndex::reserved_value() {
774 return None;
775 }
776
777 unsafe {
780 let func = &self.module().functions[index];
805 let sig = func.signature;
806 let func_ref: *mut VMFuncRef = self
807 .vmctx_plus_offset_mut::<VMFuncRef>(self.offsets().vmctx_func_ref(func.func_ref));
808 self.construct_func_ref(index, sig, func_ref);
809
810 Some(func_ref)
811 }
812 }
813
814 pub(crate) fn table_init(
822 &mut self,
823 table_index: TableIndex,
824 elem_index: ElemIndex,
825 dst: u32,
826 src: u32,
827 len: u32,
828 ) -> Result<(), Trap> {
829 let module = self.module().clone();
833
834 let empty = TableSegmentElements::Expressions(Box::new([]));
839
840 let elements = match module.passive_elements_map.get(&elem_index) {
841 Some(index) if !self.dropped_elements.contains(elem_index) => {
842 &module.passive_elements[*index]
843 }
844 _ => &empty,
845 };
846 self.table_init_segment(table_index, elements, dst, src, len)
847 }
848
849 pub(crate) fn table_init_segment(
850 &mut self,
851 table_index: TableIndex,
852 elements: &TableSegmentElements,
853 dst: u32,
854 src: u32,
855 len: u32,
856 ) -> Result<(), Trap> {
857 let table = unsafe { &mut *self.get_table(table_index) };
860 let src = usize::try_from(src).map_err(|_| Trap::TableOutOfBounds)?;
861 let len = usize::try_from(len).map_err(|_| Trap::TableOutOfBounds)?;
862
863 match elements {
864 TableSegmentElements::Functions(funcs) => {
865 let elements = funcs
866 .get(src..)
867 .and_then(|s| s.get(..len))
868 .ok_or(Trap::TableOutOfBounds)?;
869 table.init_func(
870 dst,
871 elements
872 .iter()
873 .map(|idx| self.get_func_ref(*idx).unwrap_or(std::ptr::null_mut())),
874 )?;
875 }
876 TableSegmentElements::Expressions(exprs) => {
877 let ty = table.element_type();
878 let exprs = exprs
879 .get(src..)
880 .and_then(|s| s.get(..len))
881 .ok_or(Trap::TableOutOfBounds)?;
882 match ty {
883 TableElementType::Func => {
884 table.init_func(
885 dst,
886 exprs.iter().map(|expr| match expr {
887 TableElementExpression::Null => std::ptr::null_mut(),
888 TableElementExpression::Function(idx) => {
889 self.get_func_ref(*idx).unwrap()
890 }
891 TableElementExpression::GlobalGet(idx) => {
892 let global = self.defined_or_imported_global_ptr(*idx);
893 unsafe { (*global).as_func_ref() }
894 }
895 }),
896 )?;
897 }
898 TableElementType::GcRef => {
899 table.init_gc_refs(
900 dst,
901 exprs.iter().map(|expr| match expr {
902 TableElementExpression::Null => None,
903 TableElementExpression::Function(_) => unreachable!(),
904 TableElementExpression::GlobalGet(idx) => {
905 let global = self.defined_or_imported_global_ptr(*idx);
906 let gc_ref = unsafe { (*global).as_gc_ref() };
907 gc_ref.map(|r| {
908 let store = unsafe { &mut *self.store() };
909 store.gc_store().clone_gc_ref(r)
910 })
911 }
912 }),
913 )?;
914 }
915 }
916 }
917 }
918
919 Ok(())
920 }
921
922 pub(crate) fn elem_drop(&mut self, elem_index: ElemIndex) {
924 self.dropped_elements.insert(elem_index);
927
928 }
931
932 pub fn get_defined_memory(&mut self, index: DefinedMemoryIndex) -> *mut Memory {
934 ptr::addr_of_mut!(self.memories[index].1)
935 }
936
937 pub(crate) fn memory_copy(
944 &mut self,
945 dst_index: MemoryIndex,
946 dst: u64,
947 src_index: MemoryIndex,
948 src: u64,
949 len: u64,
950 ) -> Result<(), Trap> {
951 let src_mem = self.get_memory(src_index);
954 let dst_mem = self.get_memory(dst_index);
955
956 let src = self.validate_inbounds(src_mem.current_length(), src, len)?;
957 let dst = self.validate_inbounds(dst_mem.current_length(), dst, len)?;
958
959 unsafe {
962 let dst = dst_mem.base.add(dst);
963 let src = src_mem.base.add(src);
964 ptr::copy(src, dst, len as usize);
967 }
968
969 Ok(())
970 }
971
972 fn validate_inbounds(&self, max: usize, ptr: u64, len: u64) -> Result<usize, Trap> {
973 let oob = || Trap::MemoryOutOfBounds;
974 let end = ptr
975 .checked_add(len)
976 .and_then(|i| usize::try_from(i).ok())
977 .ok_or_else(oob)?;
978 if end > max {
979 Err(oob())
980 } else {
981 Ok(ptr as usize)
982 }
983 }
984
985 pub(crate) fn memory_fill(
991 &mut self,
992 memory_index: MemoryIndex,
993 dst: u64,
994 val: u8,
995 len: u64,
996 ) -> Result<(), Trap> {
997 let memory = self.get_memory(memory_index);
998 let dst = self.validate_inbounds(memory.current_length(), dst, len)?;
999
1000 unsafe {
1003 let dst = memory.base.add(dst);
1004 ptr::write_bytes(dst, val, len as usize);
1007 }
1008
1009 Ok(())
1010 }
1011
1012 pub(crate) fn memory_init(
1020 &mut self,
1021 memory_index: MemoryIndex,
1022 data_index: DataIndex,
1023 dst: u64,
1024 src: u32,
1025 len: u32,
1026 ) -> Result<(), Trap> {
1027 let range = match self.module().passive_data_map.get(&data_index).cloned() {
1028 Some(range) if !self.dropped_data.contains(data_index) => range,
1029 _ => 0..0,
1030 };
1031 self.memory_init_segment(memory_index, range, dst, src, len)
1032 }
1033
1034 pub(crate) fn wasm_data(&self, range: Range<u32>) -> &[u8] {
1035 &self.runtime_info.wasm_data()[range.start as usize..range.end as usize]
1036 }
1037
1038 pub(crate) fn memory_init_segment(
1039 &mut self,
1040 memory_index: MemoryIndex,
1041 range: Range<u32>,
1042 dst: u64,
1043 src: u32,
1044 len: u32,
1045 ) -> Result<(), Trap> {
1046 let memory = self.get_memory(memory_index);
1049 let data = self.wasm_data(range);
1050 let dst = self.validate_inbounds(memory.current_length(), dst, len.into())?;
1051 let src = self.validate_inbounds(data.len(), src.into(), len.into())?;
1052 let len = len as usize;
1053
1054 unsafe {
1055 let src_start = data.as_ptr().add(src);
1056 let dst_start = memory.base.add(dst);
1057 ptr::copy_nonoverlapping(src_start, dst_start, len);
1060 }
1061
1062 Ok(())
1063 }
1064
1065 pub(crate) fn data_drop(&mut self, data_index: DataIndex) {
1067 self.dropped_data.insert(data_index);
1068
1069 }
1072
1073 pub(crate) fn get_table_with_lazy_init(
1085 &mut self,
1086 table_index: TableIndex,
1087 range: impl Iterator<Item = u32>,
1088 ) -> *mut Table {
1089 self.with_defined_table_index_and_instance(table_index, |idx, instance| {
1090 instance.get_defined_table_with_lazy_init(idx, range)
1091 })
1092 }
1093
1094 pub fn get_defined_table_with_lazy_init(
1099 &mut self,
1100 idx: DefinedTableIndex,
1101 range: impl Iterator<Item = u32>,
1102 ) -> *mut Table {
1103 let elt_ty = self.tables[idx].1.element_type();
1104
1105 if elt_ty == TableElementType::Func {
1106 for i in range {
1107 let gc_store = unsafe { (*self.store()).gc_store() };
1108 let value = match self.tables[idx].1.get(gc_store, i) {
1109 Some(value) => value,
1110 None => {
1111 break;
1115 }
1116 };
1117
1118 if !value.is_uninit() {
1119 continue;
1120 }
1121
1122 let module = self.module();
1130 let precomputed = match &module.table_initialization.initial_values[idx] {
1131 TableInitialValue::Null { precomputed } => precomputed,
1132 TableInitialValue::FuncRef(_)
1133 | TableInitialValue::GlobalGet(_)
1134 | TableInitialValue::I31Ref(_) => {
1135 unreachable!()
1136 }
1137 };
1138 let func_index = precomputed.get(i as usize).cloned();
1139 let func_ref = func_index
1140 .and_then(|func_index| self.get_func_ref(func_index))
1141 .unwrap_or(std::ptr::null_mut());
1142 self.tables[idx]
1143 .1
1144 .set(i, TableElement::FuncRef(func_ref))
1145 .expect("Table type should match and index should be in-bounds");
1146 }
1147 }
1148
1149 ptr::addr_of_mut!(self.tables[idx].1)
1150 }
1151
1152 pub(crate) fn get_table(&mut self, table_index: TableIndex) -> *mut Table {
1155 self.with_defined_table_index_and_instance(table_index, |idx, instance| {
1156 ptr::addr_of_mut!(instance.tables[idx].1)
1157 })
1158 }
1159
1160 pub(crate) fn get_defined_table(&mut self, index: DefinedTableIndex) -> *mut Table {
1162 ptr::addr_of_mut!(self.tables[index].1)
1163 }
1164
1165 pub(crate) fn with_defined_table_index_and_instance<R>(
1166 &mut self,
1167 index: TableIndex,
1168 f: impl FnOnce(DefinedTableIndex, &mut Instance) -> R,
1169 ) -> R {
1170 if let Some(defined_table_index) = self.module().defined_table_index(index) {
1171 f(defined_table_index, self)
1172 } else {
1173 let import = self.imported_table(index);
1174 unsafe {
1175 Instance::from_vmctx(import.vmctx, |foreign_instance| {
1176 let foreign_table_def = import.from;
1177 let foreign_table_index = foreign_instance.table_index(&*foreign_table_def);
1178 f(foreign_table_index, foreign_instance)
1179 })
1180 }
1181 }
1182 }
1183
1184 unsafe fn initialize_vmctx(
1190 &mut self,
1191 module: &Module,
1192 offsets: &VMOffsets<HostPtr>,
1193 store: StorePtr,
1194 imports: Imports,
1195 ) {
1196 assert!(std::ptr::eq(module, self.module().as_ref()));
1197
1198 *self.vmctx_plus_offset_mut(offsets.vmctx_magic()) = VMCONTEXT_MAGIC;
1199 self.set_callee(None);
1200 self.set_store(store.as_raw());
1201
1202 let types = self.runtime_info.type_ids();
1204 *self.vmctx_plus_offset_mut(offsets.vmctx_type_ids_array()) = types.as_ptr();
1205
1206 *self.vmctx_plus_offset_mut(offsets.vmctx_builtin_functions()) =
1208 &VMBuiltinFunctionsArray::INIT;
1209
1210 debug_assert_eq!(imports.functions.len(), module.num_imported_funcs);
1212 ptr::copy_nonoverlapping(
1213 imports.functions.as_ptr(),
1214 self.vmctx_plus_offset_mut(offsets.vmctx_imported_functions_begin()),
1215 imports.functions.len(),
1216 );
1217 debug_assert_eq!(imports.tables.len(), module.num_imported_tables);
1218 ptr::copy_nonoverlapping(
1219 imports.tables.as_ptr(),
1220 self.vmctx_plus_offset_mut(offsets.vmctx_imported_tables_begin()),
1221 imports.tables.len(),
1222 );
1223 debug_assert_eq!(imports.memories.len(), module.num_imported_memories);
1224 ptr::copy_nonoverlapping(
1225 imports.memories.as_ptr(),
1226 self.vmctx_plus_offset_mut(offsets.vmctx_imported_memories_begin()),
1227 imports.memories.len(),
1228 );
1229 debug_assert_eq!(imports.globals.len(), module.num_imported_globals);
1230 ptr::copy_nonoverlapping(
1231 imports.globals.as_ptr(),
1232 self.vmctx_plus_offset_mut(offsets.vmctx_imported_globals_begin()),
1233 imports.globals.len(),
1234 );
1235
1236 let mut ptr = self.vmctx_plus_offset_mut(offsets.vmctx_tables_begin());
1243 for i in 0..module.table_plans.len() - module.num_imported_tables {
1244 ptr::write(ptr, self.tables[DefinedTableIndex::new(i)].1.vmtable());
1245 ptr = ptr.add(1);
1246 }
1247
1248 let mut ptr = self.vmctx_plus_offset_mut(offsets.vmctx_memories_begin());
1254 let mut owned_ptr = self.vmctx_plus_offset_mut(offsets.vmctx_owned_memories_begin());
1255 for i in 0..module.memory_plans.len() - module.num_imported_memories {
1256 let defined_memory_index = DefinedMemoryIndex::new(i);
1257 let memory_index = module.memory_index(defined_memory_index);
1258 if module.memory_plans[memory_index].memory.shared {
1259 let def_ptr = self.memories[defined_memory_index]
1260 .1
1261 .as_shared_memory()
1262 .unwrap()
1263 .vmmemory_ptr();
1264 ptr::write(ptr, def_ptr.cast_mut());
1265 } else {
1266 ptr::write(owned_ptr, self.memories[defined_memory_index].1.vmmemory());
1267 ptr::write(ptr, owned_ptr);
1268 owned_ptr = owned_ptr.add(1);
1269 }
1270 ptr = ptr.add(1);
1271 }
1272
1273 self.initialize_vmctx_globals(module);
1275 }
1276
1277 unsafe fn initialize_vmctx_globals(&mut self, module: &Module) {
1278 for (index, init) in module.global_initializers.iter() {
1279 let to = self.global_ptr(index);
1280 let wasm_ty = module.globals[module.global_index(index)].wasm_ty;
1281
1282 ptr::write(to, VMGlobalDefinition::new());
1284
1285 match *init {
1286 GlobalInit::I32Const(x) => {
1287 let index = module.global_index(index);
1288 if index.index() == 0 {
1289 #[cfg(feature = "wmemcheck")]
1290 {
1291 if let Some(wmemcheck) = &mut self.wmemcheck_state {
1292 wmemcheck.set_stack_size(x as usize);
1293 }
1294 }
1295 }
1296 *(*to).as_i32_mut() = x;
1297 }
1298 GlobalInit::I64Const(x) => *(*to).as_i64_mut() = x,
1299 GlobalInit::F32Const(x) => *(*to).as_f32_bits_mut() = x,
1300 GlobalInit::F64Const(x) => *(*to).as_f64_bits_mut() = x,
1301 GlobalInit::V128Const(x) => *(*to).as_u128_mut() = x,
1302 GlobalInit::GetGlobal(x) => {
1303 let from = if let Some(def_x) = module.defined_global_index(x) {
1304 self.global(def_x)
1305 } else {
1306 &*self.imported_global(x).from
1307 };
1308
1309 if wasm_ty.is_gc_heap_type() {
1312 let gc_ref = (*from)
1313 .as_gc_ref()
1314 .map(|r| r.unchecked_copy())
1315 .map(|r| (*self.store()).gc_store().clone_gc_ref(&r));
1316 (*to).init_gc_ref(gc_ref);
1317 } else {
1318 ptr::copy_nonoverlapping(from, to, 1);
1319 }
1320 }
1321 GlobalInit::RefFunc(f) => {
1322 *(*to).as_func_ref_mut() = self.get_func_ref(f).unwrap();
1323 }
1324 GlobalInit::RefNullConst => match wasm_ty {
1325 WasmValType::Ref(WasmRefType { nullable: true, .. }) => {}
1327 ty => panic!("unsupported reference type for global: {:?}", ty),
1328 },
1329 GlobalInit::RefI31Const(x) => {
1330 let gc_ref = VMGcRef::from_i31(I31::wrapping_i32(x));
1331 (*to).init_gc_ref(Some(gc_ref));
1332 }
1333 }
1334 }
1335 }
1336
1337 fn wasm_fault(&self, addr: usize) -> Option<WasmFault> {
1338 let mut fault = None;
1339 for (_, (_, memory)) in self.memories.iter() {
1340 let accessible = memory.wasm_accessible();
1341 if accessible.start <= addr && addr < accessible.end {
1342 assert!(fault.is_none());
1345 fault = Some(WasmFault {
1346 memory_size: memory.byte_size(),
1347 wasm_address: u64::try_from(addr - accessible.start).unwrap(),
1348 });
1349 }
1350 }
1351 fault
1352 }
1353}
1354
1355#[derive(Debug)]
1357pub struct InstanceHandle {
1358 instance: Option<SendSyncPtr<Instance>>,
1359}
1360
1361impl InstanceHandle {
1362 pub fn null() -> InstanceHandle {
1365 InstanceHandle { instance: None }
1366 }
1367
1368 #[inline]
1370 pub fn vmctx(&self) -> *mut VMContext {
1371 self.instance().vmctx()
1372 }
1373
1374 pub fn module(&self) -> &Arc<Module> {
1376 self.instance().module()
1377 }
1378
1379 pub fn get_exported_func(&mut self, export: FuncIndex) -> ExportFunction {
1381 self.instance_mut().get_exported_func(export)
1382 }
1383
1384 pub fn get_exported_global(&mut self, export: GlobalIndex) -> ExportGlobal {
1386 self.instance_mut().get_exported_global(export)
1387 }
1388
1389 pub fn get_exported_memory(&mut self, export: MemoryIndex) -> ExportMemory {
1391 self.instance_mut().get_exported_memory(export)
1392 }
1393
1394 pub fn get_exported_table(&mut self, export: TableIndex) -> ExportTable {
1396 self.instance_mut().get_exported_table(export)
1397 }
1398
1399 pub fn get_export_by_index(&mut self, export: EntityIndex) -> Export {
1401 match export {
1402 EntityIndex::Function(i) => Export::Function(self.get_exported_func(i)),
1403 EntityIndex::Global(i) => Export::Global(self.get_exported_global(i)),
1404 EntityIndex::Table(i) => Export::Table(self.get_exported_table(i)),
1405 EntityIndex::Memory(i) => Export::Memory(self.get_exported_memory(i)),
1406 }
1407 }
1408
1409 pub fn exports(&self) -> indexmap::map::Iter<String, EntityIndex> {
1415 self.instance().exports()
1416 }
1417
1418 pub fn host_state(&self) -> &dyn Any {
1420 self.instance().host_state()
1421 }
1422
1423 pub fn get_defined_table(&mut self, index: DefinedTableIndex) -> *mut Table {
1425 self.instance_mut().get_defined_table(index)
1426 }
1427
1428 pub fn get_defined_table_with_lazy_init(
1431 &mut self,
1432 index: DefinedTableIndex,
1433 range: impl Iterator<Item = u32>,
1434 ) -> *mut Table {
1435 let index = self.instance().module().table_index(index);
1436 self.instance_mut().get_table_with_lazy_init(index, range)
1437 }
1438
1439 pub fn all_tables<'a>(
1447 &'a mut self,
1448 ) -> impl ExactSizeIterator<Item = (TableIndex, ExportTable)> + 'a {
1449 let indices = (0..self.module().table_plans.len())
1450 .map(|i| TableIndex::new(i))
1451 .collect::<Vec<_>>();
1452 indices.into_iter().map(|i| (i, self.get_exported_table(i)))
1453 }
1454
1455 pub fn defined_tables<'a>(&'a mut self) -> impl ExactSizeIterator<Item = ExportTable> + 'a {
1457 let num_imported = self.module().num_imported_tables;
1458 self.all_tables()
1459 .skip(num_imported)
1460 .map(|(_i, table)| table)
1461 }
1462
1463 pub fn all_memories<'a>(
1471 &'a mut self,
1472 ) -> impl ExactSizeIterator<Item = (MemoryIndex, ExportMemory)> + 'a {
1473 let indices = (0..self.module().memory_plans.len())
1474 .map(|i| MemoryIndex::new(i))
1475 .collect::<Vec<_>>();
1476 indices
1477 .into_iter()
1478 .map(|i| (i, self.get_exported_memory(i)))
1479 }
1480
1481 pub fn defined_memories<'a>(&'a mut self) -> impl ExactSizeIterator<Item = ExportMemory> + 'a {
1483 let num_imported = self.module().num_imported_memories;
1484 self.all_memories()
1485 .skip(num_imported)
1486 .map(|(_i, memory)| memory)
1487 }
1488
1489 pub fn all_globals<'a>(
1497 &'a mut self,
1498 ) -> impl ExactSizeIterator<Item = (GlobalIndex, ExportGlobal)> + 'a {
1499 self.instance_mut().all_globals()
1500 }
1501
1502 pub fn defined_globals<'a>(
1504 &'a mut self,
1505 ) -> impl ExactSizeIterator<Item = (DefinedGlobalIndex, ExportGlobal)> + 'a {
1506 self.instance_mut().defined_globals()
1507 }
1508
1509 #[inline]
1511 pub(crate) fn instance(&self) -> &Instance {
1512 unsafe { &*self.instance.unwrap().as_ptr() }
1513 }
1514
1515 pub(crate) fn instance_mut(&mut self) -> &mut Instance {
1516 unsafe { &mut *self.instance.unwrap().as_ptr() }
1517 }
1518
1519 #[inline]
1521 pub fn store(&self) -> *mut dyn Store {
1522 self.instance().store()
1523 }
1524
1525 pub unsafe fn set_store(&mut self, store: *mut dyn Store) {
1530 self.instance_mut().set_store(Some(store));
1531 }
1532
1533 #[inline]
1540 pub unsafe fn clone(&self) -> InstanceHandle {
1541 InstanceHandle {
1542 instance: self.instance,
1543 }
1544 }
1545
1546 pub fn initialize(&mut self, module: &Module, is_bulk_memory: bool) -> Result<()> {
1553 allocator::initialize_instance(self.instance_mut(), module, is_bulk_memory)
1554 }
1555
1556 pub fn wasm_fault(&self, addr: usize) -> Option<WasmFault> {
1564 self.instance().wasm_fault(addr)
1565 }
1566}