wasmer_vm/instance/
mod.rs

1// This file contains code from external sources.
2// Attributions: https://github.com/wasmerio/wasmer/blob/main/docs/ATTRIBUTIONS.md
3
4//! An `Instance` contains all the runtime state used by execution of
5//! a WebAssembly module (except its callstack and register state). An
6//! `VMInstance` is a wrapper around `Instance` that manages
7//! how it is allocated and deallocated.
8
9mod allocator;
10
11use crate::export::VMExtern;
12use crate::imports::Imports;
13use crate::store::{InternalStoreHandle, StoreObjects};
14use crate::table::TableElement;
15use crate::trap::{catch_traps, Trap, TrapCode};
16use crate::vmcontext::{
17    memory32_atomic_check32, memory32_atomic_check64, memory_copy, memory_fill,
18    VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionContext,
19    VMFunctionImport, VMFunctionKind, VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition,
20    VMMemoryImport, VMSharedSignatureIndex, VMTableDefinition, VMTableImport, VMTrampoline,
21};
22use crate::{FunctionBodyPtr, MaybeInstanceOwned, TrapHandlerFn, VMFunctionBody};
23use crate::{LinearMemory, NotifyLocation};
24use crate::{VMConfig, VMFuncRef, VMFunction, VMGlobal, VMMemory, VMTable};
25pub use allocator::InstanceAllocator;
26use memoffset::offset_of;
27use more_asserts::assert_lt;
28use std::alloc::Layout;
29use std::cell::RefCell;
30use std::collections::HashMap;
31use std::convert::TryFrom;
32use std::fmt;
33use std::mem;
34use std::ptr::{self, NonNull};
35use std::slice;
36use std::sync::Arc;
37use wasmer_types::entity::{packed_option::ReservedValue, BoxedSlice, EntityRef, PrimaryMap};
38use wasmer_types::{
39    DataIndex, DataInitializer, ElemIndex, ExportIndex, FunctionIndex, GlobalIndex, GlobalInit,
40    LocalFunctionIndex, LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex, MemoryError,
41    MemoryIndex, ModuleInfo, Pages, SignatureIndex, TableIndex, TableInitializer, VMOffsets,
42};
43
44/// A WebAssembly instance.
45///
46/// The type is dynamically-sized. Indeed, the `vmctx` field can
47/// contain various data. That's why the type has a C representation
48/// to ensure that the `vmctx` field is last. See the documentation of
49/// the `vmctx` field to learn more.
50#[repr(C)]
51#[allow(clippy::type_complexity)]
52pub(crate) struct Instance {
53    /// The `ModuleInfo` this `Instance` was instantiated from.
54    module: Arc<ModuleInfo>,
55
56    /// Pointer to the object store of the context owning this instance.
57    context: *mut StoreObjects,
58
59    /// Offsets in the `vmctx` region.
60    offsets: VMOffsets,
61
62    /// WebAssembly linear memory data.
63    memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
64
65    /// WebAssembly table data.
66    tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
67
68    /// WebAssembly global data.
69    globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
70
71    /// Pointers to functions in executable memory.
72    functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
73
74    /// Pointers to function call trampolines in executable memory.
75    function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
76
77    /// Passive elements in this instantiation. As `elem.drop`s happen, these
78    /// entries get removed.
79    passive_elements: RefCell<HashMap<ElemIndex, Box<[Option<VMFuncRef>]>>>,
80
81    /// Passive data segments from our module. As `data.drop`s happen, entries
82    /// get removed. A missing entry is considered equivalent to an empty slice.
83    passive_data: RefCell<HashMap<DataIndex, Arc<[u8]>>>,
84
85    /// Mapping of function indices to their func ref backing data. `VMFuncRef`s
86    /// will point to elements here for functions defined by this instance.
87    funcrefs: BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
88
89    /// Mapping of function indices to their func ref backing data. `VMFuncRef`s
90    /// will point to elements here for functions imported by this instance.
91    imported_funcrefs: BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
92
93    /// Additional context used by compiled WebAssembly code. This
94    /// field is last, and represents a dynamically-sized array that
95    /// extends beyond the nominal end of the struct (similar to a
96    /// flexible array member).
97    vmctx: VMContext,
98}
99
100impl fmt::Debug for Instance {
101    fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
102        formatter.debug_struct("Instance").finish()
103    }
104}
105
106#[allow(clippy::cast_ptr_alignment)]
107impl Instance {
108    /// Helper function to access various locations offset from our `*mut
109    /// VMContext` object.
110    unsafe fn vmctx_plus_offset<T>(&self, offset: u32) -> *mut T {
111        (self.vmctx_ptr() as *mut u8)
112            .add(usize::try_from(offset).unwrap())
113            .cast()
114    }
115
116    fn module(&self) -> &Arc<ModuleInfo> {
117        &self.module
118    }
119
120    pub(crate) fn module_ref(&self) -> &ModuleInfo {
121        &self.module
122    }
123
124    fn context(&self) -> &StoreObjects {
125        unsafe { &*self.context }
126    }
127
128    fn context_mut(&mut self) -> &mut StoreObjects {
129        unsafe { &mut *self.context }
130    }
131
132    /// Offsets in the `vmctx` region.
133    fn offsets(&self) -> &VMOffsets {
134        &self.offsets
135    }
136
137    /// Return a pointer to the `VMSharedSignatureIndex`s.
138    fn signature_ids_ptr(&self) -> *mut VMSharedSignatureIndex {
139        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_signature_ids_begin()) }
140    }
141
142    /// Return the indexed `VMFunctionImport`.
143    fn imported_function(&self, index: FunctionIndex) -> &VMFunctionImport {
144        let index = usize::try_from(index.as_u32()).unwrap();
145        unsafe { &*self.imported_functions_ptr().add(index) }
146    }
147
148    /// Return a pointer to the `VMFunctionImport`s.
149    fn imported_functions_ptr(&self) -> *mut VMFunctionImport {
150        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_functions_begin()) }
151    }
152
153    /// Return the index `VMTableImport`.
154    fn imported_table(&self, index: TableIndex) -> &VMTableImport {
155        let index = usize::try_from(index.as_u32()).unwrap();
156        unsafe { &*self.imported_tables_ptr().add(index) }
157    }
158
159    /// Return a pointer to the `VMTableImports`s.
160    fn imported_tables_ptr(&self) -> *mut VMTableImport {
161        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_tables_begin()) }
162    }
163
164    /// Return the indexed `VMMemoryImport`.
165    fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
166        let index = usize::try_from(index.as_u32()).unwrap();
167        unsafe { &*self.imported_memories_ptr().add(index) }
168    }
169
170    /// Return a pointer to the `VMMemoryImport`s.
171    fn imported_memories_ptr(&self) -> *mut VMMemoryImport {
172        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_memories_begin()) }
173    }
174
175    /// Return the indexed `VMGlobalImport`.
176    fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
177        let index = usize::try_from(index.as_u32()).unwrap();
178        unsafe { &*self.imported_globals_ptr().add(index) }
179    }
180
181    /// Return a pointer to the `VMGlobalImport`s.
182    fn imported_globals_ptr(&self) -> *mut VMGlobalImport {
183        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_globals_begin()) }
184    }
185
186    /// Return the indexed `VMTableDefinition`.
187    #[allow(dead_code)]
188    fn table(&self, index: LocalTableIndex) -> VMTableDefinition {
189        unsafe { *self.table_ptr(index).as_ref() }
190    }
191
192    #[allow(dead_code)]
193    /// Updates the value for a defined table to `VMTableDefinition`.
194    fn set_table(&self, index: LocalTableIndex, table: &VMTableDefinition) {
195        unsafe {
196            *self.table_ptr(index).as_ptr() = *table;
197        }
198    }
199
200    /// Return the indexed `VMTableDefinition`.
201    fn table_ptr(&self, index: LocalTableIndex) -> NonNull<VMTableDefinition> {
202        let index = usize::try_from(index.as_u32()).unwrap();
203        NonNull::new(unsafe { self.tables_ptr().add(index) }).unwrap()
204    }
205
206    /// Return a pointer to the `VMTableDefinition`s.
207    fn tables_ptr(&self) -> *mut VMTableDefinition {
208        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_tables_begin()) }
209    }
210
211    #[allow(dead_code)]
212    /// Get a locally defined or imported memory.
213    fn get_memory(&self, index: MemoryIndex) -> VMMemoryDefinition {
214        if let Some(local_index) = self.module.local_memory_index(index) {
215            self.memory(local_index)
216        } else {
217            let import = self.imported_memory(index);
218            unsafe { *import.definition.as_ref() }
219        }
220    }
221
222    /// Return the indexed `VMMemoryDefinition`.
223    fn memory(&self, index: LocalMemoryIndex) -> VMMemoryDefinition {
224        unsafe { *self.memory_ptr(index).as_ref() }
225    }
226
227    #[allow(dead_code)]
228    /// Set the indexed memory to `VMMemoryDefinition`.
229    fn set_memory(&self, index: LocalMemoryIndex, mem: &VMMemoryDefinition) {
230        unsafe {
231            *self.memory_ptr(index).as_ptr() = *mem;
232        }
233    }
234
235    /// Return the indexed `VMMemoryDefinition`.
236    fn memory_ptr(&self, index: LocalMemoryIndex) -> NonNull<VMMemoryDefinition> {
237        let index = usize::try_from(index.as_u32()).unwrap();
238        NonNull::new(unsafe { self.memories_ptr().add(index) }).unwrap()
239    }
240
241    /// Return a pointer to the `VMMemoryDefinition`s.
242    fn memories_ptr(&self) -> *mut VMMemoryDefinition {
243        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_memories_begin()) }
244    }
245
246    /// Get a locally defined or imported memory.
247    fn get_vmmemory(&self, index: MemoryIndex) -> &VMMemory {
248        if let Some(local_index) = self.module.local_memory_index(index) {
249            unsafe {
250                self.memories
251                    .get(local_index)
252                    .unwrap()
253                    .get(self.context.as_ref().unwrap())
254            }
255        } else {
256            let import = self.imported_memory(index);
257            unsafe { import.handle.get(self.context.as_ref().unwrap()) }
258        }
259    }
260
261    /// Get a locally defined or imported memory.
262    fn get_vmmemory_mut(&mut self, index: MemoryIndex) -> &mut VMMemory {
263        if let Some(local_index) = self.module.local_memory_index(index) {
264            unsafe {
265                self.memories
266                    .get_mut(local_index)
267                    .unwrap()
268                    .get_mut(self.context.as_mut().unwrap())
269            }
270        } else {
271            let import = self.imported_memory(index);
272            unsafe { import.handle.get_mut(self.context.as_mut().unwrap()) }
273        }
274    }
275
276    /// Get a locally defined memory as mutable.
277    fn get_local_vmmemory_mut(&mut self, local_index: LocalMemoryIndex) -> &mut VMMemory {
278        unsafe {
279            self.memories
280                .get_mut(local_index)
281                .unwrap()
282                .get_mut(self.context.as_mut().unwrap())
283        }
284    }
285
286    /// Return the indexed `VMGlobalDefinition`.
287    fn global(&self, index: LocalGlobalIndex) -> VMGlobalDefinition {
288        unsafe { self.global_ptr(index).as_ref().clone() }
289    }
290
291    /// Set the indexed global to `VMGlobalDefinition`.
292    #[allow(dead_code)]
293    fn set_global(&self, index: LocalGlobalIndex, global: &VMGlobalDefinition) {
294        unsafe {
295            *self.global_ptr(index).as_ptr() = global.clone();
296        }
297    }
298
299    /// Return the indexed `VMGlobalDefinition`.
300    fn global_ptr(&self, index: LocalGlobalIndex) -> NonNull<VMGlobalDefinition> {
301        let index = usize::try_from(index.as_u32()).unwrap();
302        // TODO:
303        NonNull::new(unsafe { *self.globals_ptr().add(index) }).unwrap()
304    }
305
306    /// Return a pointer to the `VMGlobalDefinition`s.
307    fn globals_ptr(&self) -> *mut *mut VMGlobalDefinition {
308        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_globals_begin()) }
309    }
310
311    /// Return a pointer to the `VMBuiltinFunctionsArray`.
312    fn builtin_functions_ptr(&self) -> *mut VMBuiltinFunctionsArray {
313        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_builtin_functions_begin()) }
314    }
315
316    /// Return a reference to the vmctx used by compiled wasm code.
317    fn vmctx(&self) -> &VMContext {
318        &self.vmctx
319    }
320
321    /// Return a raw pointer to the vmctx used by compiled wasm code.
322    fn vmctx_ptr(&self) -> *mut VMContext {
323        self.vmctx() as *const VMContext as *mut VMContext
324    }
325
326    /// Invoke the WebAssembly start function of the instance, if one is present.
327    fn invoke_start_function(
328        &self,
329        config: &VMConfig,
330        trap_handler: Option<*const TrapHandlerFn<'static>>,
331    ) -> Result<(), Trap> {
332        let start_index = match self.module.start_function {
333            Some(idx) => idx,
334            None => return Ok(()),
335        };
336
337        let (callee_address, callee_vmctx) = match self.module.local_func_index(start_index) {
338            Some(local_index) => {
339                let body = self
340                    .functions
341                    .get(local_index)
342                    .expect("function index is out of bounds")
343                    .0;
344                (
345                    body as *const _,
346                    VMFunctionContext {
347                        vmctx: self.vmctx_ptr(),
348                    },
349                )
350            }
351            None => {
352                assert_lt!(start_index.index(), self.module.num_imported_functions);
353                let import = self.imported_function(start_index);
354                (import.body, import.environment)
355            }
356        };
357
358        // Make the call.
359        unsafe {
360            catch_traps(trap_handler, config, move || {
361                mem::transmute::<*const VMFunctionBody, unsafe extern "C" fn(VMFunctionContext)>(
362                    callee_address,
363                )(callee_vmctx)
364            })
365        }
366    }
367
368    /// Return the offset from the vmctx pointer to its containing `Instance`.
369    #[inline]
370    pub(crate) fn vmctx_offset() -> isize {
371        offset_of!(Self, vmctx) as isize
372    }
373
374    /// Return the table index for the given `VMTableDefinition`.
375    pub(crate) fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
376        let begin: *const VMTableDefinition = self.tables_ptr() as *const _;
377        let end: *const VMTableDefinition = table;
378        // TODO: Use `offset_from` once it stablizes.
379        let index = LocalTableIndex::new(
380            (end as usize - begin as usize) / mem::size_of::<VMTableDefinition>(),
381        );
382        assert_lt!(index.index(), self.tables.len());
383        index
384    }
385
386    /// Return the memory index for the given `VMMemoryDefinition`.
387    pub(crate) fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
388        let begin: *const VMMemoryDefinition = self.memories_ptr() as *const _;
389        let end: *const VMMemoryDefinition = memory;
390        // TODO: Use `offset_from` once it stablizes.
391        let index = LocalMemoryIndex::new(
392            (end as usize - begin as usize) / mem::size_of::<VMMemoryDefinition>(),
393        );
394        assert_lt!(index.index(), self.memories.len());
395        index
396    }
397
398    /// Grow memory by the specified amount of pages.
399    ///
400    /// Returns `None` if memory can't be grown by the specified amount
401    /// of pages.
402    pub(crate) fn memory_grow<IntoPages>(
403        &mut self,
404        memory_index: LocalMemoryIndex,
405        delta: IntoPages,
406    ) -> Result<Pages, MemoryError>
407    where
408        IntoPages: Into<Pages>,
409    {
410        let mem = *self
411            .memories
412            .get(memory_index)
413            .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
414        mem.get_mut(self.context_mut()).grow(delta.into())
415    }
416
417    /// Grow imported memory by the specified amount of pages.
418    ///
419    /// Returns `None` if memory can't be grown by the specified amount
420    /// of pages.
421    ///
422    /// # Safety
423    /// This and `imported_memory_size` are currently unsafe because they
424    /// dereference the memory import's pointers.
425    pub(crate) unsafe fn imported_memory_grow<IntoPages>(
426        &mut self,
427        memory_index: MemoryIndex,
428        delta: IntoPages,
429    ) -> Result<Pages, MemoryError>
430    where
431        IntoPages: Into<Pages>,
432    {
433        let import = self.imported_memory(memory_index);
434        let mem = import.handle;
435        mem.get_mut(self.context_mut()).grow(delta.into())
436    }
437
438    /// Returns the number of allocated wasm pages.
439    pub(crate) fn memory_size(&self, memory_index: LocalMemoryIndex) -> Pages {
440        let mem = *self
441            .memories
442            .get(memory_index)
443            .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
444        mem.get(self.context()).size()
445    }
446
447    /// Returns the number of allocated wasm pages in an imported memory.
448    ///
449    /// # Safety
450    /// This and `imported_memory_grow` are currently unsafe because they
451    /// dereference the memory import's pointers.
452    pub(crate) unsafe fn imported_memory_size(&self, memory_index: MemoryIndex) -> Pages {
453        let import = self.imported_memory(memory_index);
454        let mem = import.handle;
455        mem.get(self.context()).size()
456    }
457
458    /// Returns the number of elements in a given table.
459    pub(crate) fn table_size(&self, table_index: LocalTableIndex) -> u32 {
460        let table = self
461            .tables
462            .get(table_index)
463            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
464        table.get(self.context()).size()
465    }
466
467    /// Returns the number of elements in a given imported table.
468    ///
469    /// # Safety
470    /// `table_index` must be a valid, imported table index.
471    pub(crate) unsafe fn imported_table_size(&self, table_index: TableIndex) -> u32 {
472        let import = self.imported_table(table_index);
473        let table = import.handle;
474        table.get(self.context()).size()
475    }
476
477    /// Grow table by the specified amount of elements.
478    ///
479    /// Returns `None` if table can't be grown by the specified amount
480    /// of elements.
481    pub(crate) fn table_grow(
482        &mut self,
483        table_index: LocalTableIndex,
484        delta: u32,
485        init_value: TableElement,
486    ) -> Option<u32> {
487        let table = *self
488            .tables
489            .get(table_index)
490            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
491        table.get_mut(self.context_mut()).grow(delta, init_value)
492    }
493
494    /// Grow table by the specified amount of elements.
495    ///
496    /// # Safety
497    /// `table_index` must be a valid, imported table index.
498    pub(crate) unsafe fn imported_table_grow(
499        &mut self,
500        table_index: TableIndex,
501        delta: u32,
502        init_value: TableElement,
503    ) -> Option<u32> {
504        let import = self.imported_table(table_index);
505        let table = import.handle;
506        table.get_mut(self.context_mut()).grow(delta, init_value)
507    }
508
509    /// Get table element by index.
510    pub(crate) fn table_get(
511        &self,
512        table_index: LocalTableIndex,
513        index: u32,
514    ) -> Option<TableElement> {
515        let table = self
516            .tables
517            .get(table_index)
518            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
519        table.get(self.context()).get(index)
520    }
521
522    /// Returns the element at the given index.
523    ///
524    /// # Safety
525    /// `table_index` must be a valid, imported table index.
526    pub(crate) unsafe fn imported_table_get(
527        &self,
528        table_index: TableIndex,
529        index: u32,
530    ) -> Option<TableElement> {
531        let import = self.imported_table(table_index);
532        let table = import.handle;
533        table.get(self.context()).get(index)
534    }
535
536    /// Set table element by index.
537    pub(crate) fn table_set(
538        &mut self,
539        table_index: LocalTableIndex,
540        index: u32,
541        val: TableElement,
542    ) -> Result<(), Trap> {
543        let table = *self
544            .tables
545            .get(table_index)
546            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
547        table.get_mut(self.context_mut()).set(index, val)
548    }
549
550    /// Set table element by index for an imported table.
551    ///
552    /// # Safety
553    /// `table_index` must be a valid, imported table index.
554    pub(crate) unsafe fn imported_table_set(
555        &mut self,
556        table_index: TableIndex,
557        index: u32,
558        val: TableElement,
559    ) -> Result<(), Trap> {
560        let import = self.imported_table(table_index);
561        let table = import.handle;
562        table.get_mut(self.context_mut()).set(index, val)
563    }
564
565    /// Get a `VMFuncRef` for the given `FunctionIndex`.
566    pub(crate) fn func_ref(&self, function_index: FunctionIndex) -> Option<VMFuncRef> {
567        if function_index == FunctionIndex::reserved_value() {
568            None
569        } else if let Some(local_function_index) = self.module.local_func_index(function_index) {
570            Some(VMFuncRef(NonNull::from(
571                &self.funcrefs[local_function_index],
572            )))
573        } else {
574            Some(VMFuncRef(self.imported_funcrefs[function_index]))
575        }
576    }
577
578    /// The `table.init` operation: initializes a portion of a table with a
579    /// passive element.
580    ///
581    /// # Errors
582    ///
583    /// Returns a `Trap` error when the range within the table is out of bounds
584    /// or the range within the passive element is out of bounds.
585    pub(crate) fn table_init(
586        &mut self,
587        table_index: TableIndex,
588        elem_index: ElemIndex,
589        dst: u32,
590        src: u32,
591        len: u32,
592    ) -> Result<(), Trap> {
593        // https://webassembly.github.io/bulk-memory-operations/core/exec/instructions.html#exec-table-init
594
595        let table = self.get_table_handle(table_index);
596        let table = unsafe { table.get_mut(&mut *self.context) };
597        let passive_elements = self.passive_elements.borrow();
598        let elem = passive_elements
599            .get(&elem_index)
600            .map_or::<&[Option<VMFuncRef>], _>(&[], |e| &**e);
601
602        if src
603            .checked_add(len)
604            .map_or(true, |n| n as usize > elem.len())
605            || dst.checked_add(len).map_or(true, |m| m > table.size())
606        {
607            return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
608        }
609
610        for (dst, src) in (dst..dst + len).zip(src..src + len) {
611            table
612                .set(dst, TableElement::FuncRef(elem[src as usize]))
613                .expect("should never panic because we already did the bounds check above");
614        }
615
616        Ok(())
617    }
618
619    /// The `table.fill` operation: fills a portion of a table with a given value.
620    ///
621    /// # Errors
622    ///
623    /// Returns a `Trap` error when the range within the table is out of bounds
624    pub(crate) fn table_fill(
625        &mut self,
626        table_index: TableIndex,
627        start_index: u32,
628        item: TableElement,
629        len: u32,
630    ) -> Result<(), Trap> {
631        // https://webassembly.github.io/bulk-memory-operations/core/exec/instructions.html#exec-table-init
632
633        let table = self.get_table(table_index);
634        let table_size = table.size() as usize;
635
636        if start_index
637            .checked_add(len)
638            .map_or(true, |n| n as usize > table_size)
639        {
640            return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
641        }
642
643        for i in start_index..(start_index + len) {
644            table
645                .set(i, item.clone())
646                .expect("should never panic because we already did the bounds check above");
647        }
648
649        Ok(())
650    }
651
652    /// Drop an element.
653    pub(crate) fn elem_drop(&self, elem_index: ElemIndex) {
654        // https://webassembly.github.io/reference-types/core/exec/instructions.html#exec-elem-drop
655
656        let mut passive_elements = self.passive_elements.borrow_mut();
657        passive_elements.remove(&elem_index);
658        // Note that we don't check that we actually removed an element because
659        // dropping a non-passive element is a no-op (not a trap).
660    }
661
662    /// Do a `memory.copy` for a locally defined memory.
663    ///
664    /// # Errors
665    ///
666    /// Returns a `Trap` error when the source or destination ranges are out of
667    /// bounds.
668    pub(crate) fn local_memory_copy(
669        &self,
670        memory_index: LocalMemoryIndex,
671        dst: u32,
672        src: u32,
673        len: u32,
674    ) -> Result<(), Trap> {
675        // https://webassembly.github.io/reference-types/core/exec/instructions.html#exec-memory-copy
676
677        let memory = self.memory(memory_index);
678        // The following memory copy is not synchronized and is not atomic:
679        unsafe { memory_copy(&memory, dst, src, len) }
680    }
681
682    /// Perform a `memory.copy` on an imported memory.
683    pub(crate) fn imported_memory_copy(
684        &self,
685        memory_index: MemoryIndex,
686        dst: u32,
687        src: u32,
688        len: u32,
689    ) -> Result<(), Trap> {
690        let import = self.imported_memory(memory_index);
691        let memory = unsafe { import.definition.as_ref() };
692        // The following memory copy is not synchronized and is not atomic:
693        unsafe { memory_copy(memory, dst, src, len) }
694    }
695
696    /// Perform the `memory.fill` operation on a locally defined memory.
697    ///
698    /// # Errors
699    ///
700    /// Returns a `Trap` error if the memory range is out of bounds.
701    pub(crate) fn local_memory_fill(
702        &self,
703        memory_index: LocalMemoryIndex,
704        dst: u32,
705        val: u32,
706        len: u32,
707    ) -> Result<(), Trap> {
708        let memory = self.memory(memory_index);
709        // The following memory fill is not synchronized and is not atomic:
710        unsafe { memory_fill(&memory, dst, val, len) }
711    }
712
713    /// Perform the `memory.fill` operation on an imported memory.
714    ///
715    /// # Errors
716    ///
717    /// Returns a `Trap` error if the memory range is out of bounds.
718    pub(crate) fn imported_memory_fill(
719        &self,
720        memory_index: MemoryIndex,
721        dst: u32,
722        val: u32,
723        len: u32,
724    ) -> Result<(), Trap> {
725        let import = self.imported_memory(memory_index);
726        let memory = unsafe { import.definition.as_ref() };
727        // The following memory fill is not synchronized and is not atomic:
728        unsafe { memory_fill(memory, dst, val, len) }
729    }
730
731    /// Performs the `memory.init` operation.
732    ///
733    /// # Errors
734    ///
735    /// Returns a `Trap` error if the destination range is out of this module's
736    /// memory's bounds or if the source range is outside the data segment's
737    /// bounds.
738    pub(crate) fn memory_init(
739        &self,
740        memory_index: MemoryIndex,
741        data_index: DataIndex,
742        dst: u32,
743        src: u32,
744        len: u32,
745    ) -> Result<(), Trap> {
746        // https://webassembly.github.io/bulk-memory-operations/core/exec/instructions.html#exec-memory-init
747
748        let memory = self.get_vmmemory(memory_index);
749        let passive_data = self.passive_data.borrow();
750        let data = passive_data.get(&data_index).map_or(&[][..], |d| &**d);
751
752        let current_length = unsafe { memory.vmmemory().as_ref().current_length };
753        if src
754            .checked_add(len)
755            .map_or(true, |n| n as usize > data.len())
756            || dst
757                .checked_add(len)
758                .map_or(true, |m| usize::try_from(m).unwrap() > current_length)
759        {
760            return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
761        }
762        let src_slice = &data[src as usize..(src + len) as usize];
763        unsafe { memory.initialize_with_data(dst as usize, src_slice) }
764    }
765
766    /// Drop the given data segment, truncating its length to zero.
767    pub(crate) fn data_drop(&self, data_index: DataIndex) {
768        let mut passive_data = self.passive_data.borrow_mut();
769        passive_data.remove(&data_index);
770    }
771
772    /// Get a table by index regardless of whether it is locally-defined or an
773    /// imported, foreign table.
774    pub(crate) fn get_table(&mut self, table_index: TableIndex) -> &mut VMTable {
775        if let Some(local_table_index) = self.module.local_table_index(table_index) {
776            self.get_local_table(local_table_index)
777        } else {
778            self.get_foreign_table(table_index)
779        }
780    }
781
782    /// Get a locally-defined table.
783    pub(crate) fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
784        let table = self.tables[index];
785        table.get_mut(self.context_mut())
786    }
787
788    /// Get an imported, foreign table.
789    pub(crate) fn get_foreign_table(&mut self, index: TableIndex) -> &mut VMTable {
790        let import = self.imported_table(index);
791        let table = import.handle;
792        table.get_mut(self.context_mut())
793    }
794
795    /// Get a table handle by index regardless of whether it is locally-defined
796    /// or an imported, foreign table.
797    pub(crate) fn get_table_handle(
798        &mut self,
799        table_index: TableIndex,
800    ) -> InternalStoreHandle<VMTable> {
801        if let Some(local_table_index) = self.module.local_table_index(table_index) {
802            self.tables[local_table_index]
803        } else {
804            self.imported_table(table_index).handle
805        }
806    }
807
808    fn memory_wait(memory: &mut VMMemory, dst: u32, timeout: i64) -> Result<u32, Trap> {
809        let location = NotifyLocation { address: dst };
810        let timeout = if timeout < 0 {
811            None
812        } else {
813            Some(std::time::Duration::from_nanos(timeout as u64))
814        };
815        match memory.do_wait(location, timeout) {
816            Ok(count) => Ok(count),
817            Err(_err) => {
818                // ret is None if there is more than 2^32 waiter in queue or some other error
819                Err(Trap::lib(TrapCode::TableAccessOutOfBounds))
820            }
821        }
822    }
823
824    /// Perform an Atomic.Wait32
825    pub(crate) fn local_memory_wait32(
826        &mut self,
827        memory_index: LocalMemoryIndex,
828        dst: u32,
829        val: u32,
830        timeout: i64,
831    ) -> Result<u32, Trap> {
832        let memory = self.memory(memory_index);
833        //if ! memory.shared {
834        // We should trap according to spec, but official test rely on not trapping...
835        //}
836
837        let ret = unsafe { memory32_atomic_check32(&memory, dst, val) };
838
839        if let Ok(mut ret) = ret {
840            if ret == 0 {
841                let memory = self.get_local_vmmemory_mut(memory_index);
842                ret = Self::memory_wait(memory, dst, timeout)?;
843            }
844            Ok(ret)
845        } else {
846            ret
847        }
848    }
849
850    /// Perform an Atomic.Wait32
851    pub(crate) fn imported_memory_wait32(
852        &mut self,
853        memory_index: MemoryIndex,
854        dst: u32,
855        val: u32,
856        timeout: i64,
857    ) -> Result<u32, Trap> {
858        let import = self.imported_memory(memory_index);
859        let memory = unsafe { import.definition.as_ref() };
860        //if ! memory.shared {
861        // We should trap according to spec, but official test rely on not trapping...
862        //}
863
864        let ret = unsafe { memory32_atomic_check32(memory, dst, val) };
865        if let Ok(mut ret) = ret {
866            if ret == 0 {
867                let memory = self.get_vmmemory_mut(memory_index);
868                ret = Self::memory_wait(memory, dst, timeout)?;
869            }
870            Ok(ret)
871        } else {
872            ret
873        }
874    }
875
876    /// Perform an Atomic.Wait64
877    pub(crate) fn local_memory_wait64(
878        &mut self,
879        memory_index: LocalMemoryIndex,
880        dst: u32,
881        val: u64,
882        timeout: i64,
883    ) -> Result<u32, Trap> {
884        let memory = self.memory(memory_index);
885        //if ! memory.shared {
886        // We should trap according to spec, but official test rely on not trapping...
887        //}
888
889        let ret = unsafe { memory32_atomic_check64(&memory, dst, val) };
890
891        if let Ok(mut ret) = ret {
892            if ret == 0 {
893                let memory = self.get_local_vmmemory_mut(memory_index);
894                ret = Self::memory_wait(memory, dst, timeout)?;
895            }
896            Ok(ret)
897        } else {
898            ret
899        }
900    }
901
902    /// Perform an Atomic.Wait64
903    pub(crate) fn imported_memory_wait64(
904        &mut self,
905        memory_index: MemoryIndex,
906        dst: u32,
907        val: u64,
908        timeout: i64,
909    ) -> Result<u32, Trap> {
910        let import = self.imported_memory(memory_index);
911        let memory = unsafe { import.definition.as_ref() };
912        //if ! memory.shared {
913        // We should trap according to spec, but official test rely on not trapping...
914        //}
915
916        let ret = unsafe { memory32_atomic_check64(memory, dst, val) };
917
918        if let Ok(mut ret) = ret {
919            if ret == 0 {
920                let memory = self.get_vmmemory_mut(memory_index);
921                ret = Self::memory_wait(memory, dst, timeout)?;
922            }
923            Ok(ret)
924        } else {
925            ret
926        }
927    }
928
929    /// Perform an Atomic.Notify
930    pub(crate) fn local_memory_notify(
931        &mut self,
932        memory_index: LocalMemoryIndex,
933        dst: u32,
934        count: u32,
935    ) -> Result<u32, Trap> {
936        let memory = self.get_local_vmmemory_mut(memory_index);
937        // fetch the notifier
938        let location = NotifyLocation { address: dst };
939        Ok(memory.do_notify(location, count))
940    }
941
942    /// Perform an Atomic.Notify
943    pub(crate) fn imported_memory_notify(
944        &mut self,
945        memory_index: MemoryIndex,
946        dst: u32,
947        count: u32,
948    ) -> Result<u32, Trap> {
949        let memory = self.get_vmmemory_mut(memory_index);
950        // fetch the notifier
951        let location = NotifyLocation { address: dst };
952        Ok(memory.do_notify(location, count))
953    }
954}
955
956/// A handle holding an `Instance` of a WebAssembly module.
957///
958/// This is more or less a public facade of the private `Instance`,
959/// providing useful higher-level API.
960#[derive(Debug, Eq, PartialEq)]
961pub struct VMInstance {
962    /// The layout of `Instance` (which can vary).
963    instance_layout: Layout,
964
965    /// The `Instance` itself.
966    ///
967    /// `Instance` must not be dropped manually by Rust, because it's
968    /// allocated manually with `alloc` and a specific layout (Rust
969    /// would be able to drop `Instance` itself but it will imply a
970    /// memory leak because of `alloc`).
971    ///
972    /// No one in the code has a copy of the `Instance`'s
973    /// pointer. `Self` is the only one.
974    instance: NonNull<Instance>,
975}
976
977/// VMInstance are created with an InstanceAllocator
978/// and it will "consume" the memory
979/// So the Drop here actualy free it (else it would be leaked)
980impl Drop for VMInstance {
981    fn drop(&mut self) {
982        let instance_ptr = self.instance.as_ptr();
983
984        unsafe {
985            // Need to drop all the actual Instance members
986            instance_ptr.drop_in_place();
987            // And then free the memory allocated for the Instance itself
988            std::alloc::dealloc(instance_ptr as *mut u8, self.instance_layout);
989        }
990    }
991}
992
993impl VMInstance {
994    /// Create a new `VMInstance` pointing at a new [`Instance`].
995    ///
996    /// # Safety
997    ///
998    /// This method is not necessarily inherently unsafe to call, but in general
999    /// the APIs of an `Instance` are quite unsafe and have not been really
1000    /// audited for safety that much. As a result the unsafety here on this
1001    /// method is a low-overhead way of saying “this is an extremely unsafe type
1002    /// to work with”.
1003    ///
1004    /// Extreme care must be taken when working with `VMInstance` and it's
1005    /// recommended to have relatively intimate knowledge of how it works
1006    /// internally if you'd like to do so. If possible it's recommended to use
1007    /// the `wasmer` crate API rather than this type since that is vetted for
1008    /// safety.
1009    ///
1010    /// However the following must be taken care of before calling this function:
1011    /// - The memory at `instance.tables_ptr()` must be initialized with data for
1012    ///   all the local tables.
1013    /// - The memory at `instance.memories_ptr()` must be initialized with data for
1014    ///   all the local memories.
1015    #[allow(clippy::too_many_arguments)]
1016    pub unsafe fn new(
1017        allocator: InstanceAllocator,
1018        module: Arc<ModuleInfo>,
1019        context: &mut StoreObjects,
1020        finished_functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1021        finished_function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
1022        finished_memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
1023        finished_tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
1024        finished_globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
1025        imports: Imports,
1026        vmshared_signatures: BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
1027    ) -> Result<Self, Trap> {
1028        let vmctx_globals = finished_globals
1029            .values()
1030            .map(|m| m.get(context).vmglobal())
1031            .collect::<PrimaryMap<LocalGlobalIndex, _>>()
1032            .into_boxed_slice();
1033        let passive_data = RefCell::new(
1034            module
1035                .passive_data
1036                .clone()
1037                .into_iter()
1038                .map(|(idx, bytes)| (idx, Arc::from(bytes)))
1039                .collect::<HashMap<_, _>>(),
1040        );
1041
1042        let handle = {
1043            let offsets = allocator.offsets().clone();
1044            // use dummy value to create an instance so we can get the vmctx pointer
1045            let funcrefs = PrimaryMap::new().into_boxed_slice();
1046            let imported_funcrefs = PrimaryMap::new().into_boxed_slice();
1047            // Create the `Instance`. The unique, the One.
1048            let instance = Instance {
1049                module,
1050                context,
1051                offsets,
1052                memories: finished_memories,
1053                tables: finished_tables,
1054                globals: finished_globals,
1055                functions: finished_functions,
1056                function_call_trampolines: finished_function_call_trampolines,
1057                passive_elements: Default::default(),
1058                passive_data,
1059                funcrefs,
1060                imported_funcrefs,
1061                vmctx: VMContext {},
1062            };
1063
1064            let mut instance_handle = allocator.into_vminstance(instance);
1065
1066            // Set the funcrefs after we've built the instance
1067            {
1068                let instance = instance_handle.instance_mut();
1069                let vmctx_ptr = instance.vmctx_ptr();
1070                (instance.funcrefs, instance.imported_funcrefs) = build_funcrefs(
1071                    &instance.module,
1072                    context,
1073                    &imports,
1074                    &instance.functions,
1075                    &vmshared_signatures,
1076                    &instance.function_call_trampolines,
1077                    vmctx_ptr,
1078                );
1079            }
1080
1081            instance_handle
1082        };
1083        let instance = handle.instance();
1084
1085        ptr::copy(
1086            vmshared_signatures.values().as_slice().as_ptr(),
1087            instance.signature_ids_ptr(),
1088            vmshared_signatures.len(),
1089        );
1090        ptr::copy(
1091            imports.functions.values().as_slice().as_ptr(),
1092            instance.imported_functions_ptr(),
1093            imports.functions.len(),
1094        );
1095        ptr::copy(
1096            imports.tables.values().as_slice().as_ptr(),
1097            instance.imported_tables_ptr(),
1098            imports.tables.len(),
1099        );
1100        ptr::copy(
1101            imports.memories.values().as_slice().as_ptr(),
1102            instance.imported_memories_ptr(),
1103            imports.memories.len(),
1104        );
1105        ptr::copy(
1106            imports.globals.values().as_slice().as_ptr(),
1107            instance.imported_globals_ptr(),
1108            imports.globals.len(),
1109        );
1110        // these should already be set, add asserts here? for:
1111        // - instance.tables_ptr() as *mut VMTableDefinition
1112        // - instance.memories_ptr() as *mut VMMemoryDefinition
1113        ptr::copy(
1114            vmctx_globals.values().as_slice().as_ptr(),
1115            instance.globals_ptr() as *mut NonNull<VMGlobalDefinition>,
1116            vmctx_globals.len(),
1117        );
1118        ptr::write(
1119            instance.builtin_functions_ptr(),
1120            VMBuiltinFunctionsArray::initialized(),
1121        );
1122
1123        // Perform infallible initialization in this constructor, while fallible
1124        // initialization is deferred to the `initialize` method.
1125        initialize_passive_elements(instance);
1126        initialize_globals(instance);
1127
1128        Ok(handle)
1129    }
1130
1131    /// Return a reference to the contained `Instance`.
1132    pub(crate) fn instance(&self) -> &Instance {
1133        unsafe { self.instance.as_ref() }
1134    }
1135
1136    /// Return a mutable reference to the contained `Instance`.
1137    pub(crate) fn instance_mut(&mut self) -> &mut Instance {
1138        unsafe { self.instance.as_mut() }
1139    }
1140
1141    /// Finishes the instantiation process started by `Instance::new`.
1142    ///
1143    /// # Safety
1144    ///
1145    /// Only safe to call immediately after instantiation.
1146    pub unsafe fn finish_instantiation(
1147        &mut self,
1148        config: &VMConfig,
1149        trap_handler: Option<*const TrapHandlerFn<'static>>,
1150        data_initializers: &[DataInitializer<'_>],
1151    ) -> Result<(), Trap> {
1152        let instance = self.instance_mut();
1153
1154        // Apply the initializers.
1155        initialize_tables(instance)?;
1156        initialize_memories(instance, data_initializers)?;
1157
1158        // The WebAssembly spec specifies that the start function is
1159        // invoked automatically at instantiation time.
1160        instance.invoke_start_function(config, trap_handler)?;
1161        Ok(())
1162    }
1163
1164    /// Return a reference to the vmctx used by compiled wasm code.
1165    pub fn vmctx(&self) -> &VMContext {
1166        self.instance().vmctx()
1167    }
1168
1169    /// Return a raw pointer to the vmctx used by compiled wasm code.
1170    pub fn vmctx_ptr(&self) -> *mut VMContext {
1171        self.instance().vmctx_ptr()
1172    }
1173
1174    /// Return a reference to the `VMOffsets` to get offsets in the
1175    /// `Self::vmctx_ptr` region. Be careful when doing pointer
1176    /// arithmetic!
1177    pub fn vmoffsets(&self) -> &VMOffsets {
1178        self.instance().offsets()
1179    }
1180
1181    /// Return a reference-counting pointer to a module.
1182    pub fn module(&self) -> &Arc<ModuleInfo> {
1183        self.instance().module()
1184    }
1185
1186    /// Return a reference to a module.
1187    pub fn module_ref(&self) -> &ModuleInfo {
1188        self.instance().module_ref()
1189    }
1190
1191    /// Lookup an export with the given name.
1192    pub fn lookup(&mut self, field: &str) -> Option<VMExtern> {
1193        let export = *self.module_ref().exports.get(field)?;
1194
1195        Some(self.lookup_by_declaration(export))
1196    }
1197
1198    /// Lookup an export with the given export declaration.
1199    pub fn lookup_by_declaration(&mut self, export: ExportIndex) -> VMExtern {
1200        let instance = self.instance();
1201
1202        match export {
1203            ExportIndex::Function(index) => {
1204                let sig_index = &instance.module.functions[index];
1205                let handle = if let Some(def_index) = instance.module.local_func_index(index) {
1206                    // A VMFunction is lazily created only for functions that are
1207                    // exported.
1208                    let signature = instance.module.signatures[*sig_index].clone();
1209                    let vm_function = VMFunction {
1210                        anyfunc: MaybeInstanceOwned::Instance(NonNull::from(
1211                            &instance.funcrefs[def_index],
1212                        )),
1213                        signature,
1214                        // Any function received is already static at this point as:
1215                        // 1. All locally defined functions in the Wasm have a static signature.
1216                        // 2. All the imported functions are already static (because
1217                        //    they point to the trampolines rather than the dynamic addresses).
1218                        kind: VMFunctionKind::Static,
1219                        host_data: Box::new(()),
1220                    };
1221                    InternalStoreHandle::new(self.instance_mut().context_mut(), vm_function)
1222                } else {
1223                    let import = instance.imported_function(index);
1224                    import.handle
1225                };
1226
1227                VMExtern::Function(handle)
1228            }
1229            ExportIndex::Table(index) => {
1230                let handle = if let Some(def_index) = instance.module.local_table_index(index) {
1231                    instance.tables[def_index]
1232                } else {
1233                    let import = instance.imported_table(index);
1234                    import.handle
1235                };
1236                VMExtern::Table(handle)
1237            }
1238            ExportIndex::Memory(index) => {
1239                let handle = if let Some(def_index) = instance.module.local_memory_index(index) {
1240                    instance.memories[def_index]
1241                } else {
1242                    let import = instance.imported_memory(index);
1243                    import.handle
1244                };
1245                VMExtern::Memory(handle)
1246            }
1247            ExportIndex::Global(index) => {
1248                let handle = if let Some(def_index) = instance.module.local_global_index(index) {
1249                    instance.globals[def_index]
1250                } else {
1251                    let import = instance.imported_global(index);
1252                    import.handle
1253                };
1254                VMExtern::Global(handle)
1255            }
1256        }
1257    }
1258
1259    /// Return an iterator over the exports of this instance.
1260    ///
1261    /// Specifically, it provides access to the key-value pairs, where the keys
1262    /// are export names, and the values are export declarations which can be
1263    /// resolved `lookup_by_declaration`.
1264    pub fn exports(&self) -> indexmap::map::Iter<String, ExportIndex> {
1265        self.module().exports.iter()
1266    }
1267
1268    /// Return the memory index for the given `VMMemoryDefinition` in this instance.
1269    pub fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
1270        self.instance().memory_index(memory)
1271    }
1272
1273    /// Grow memory in this instance by the specified amount of pages.
1274    ///
1275    /// Returns `None` if memory can't be grown by the specified amount
1276    /// of pages.
1277    pub fn memory_grow<IntoPages>(
1278        &mut self,
1279        memory_index: LocalMemoryIndex,
1280        delta: IntoPages,
1281    ) -> Result<Pages, MemoryError>
1282    where
1283        IntoPages: Into<Pages>,
1284    {
1285        self.instance_mut().memory_grow(memory_index, delta)
1286    }
1287
1288    /// Return the table index for the given `VMTableDefinition` in this instance.
1289    pub fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
1290        self.instance().table_index(table)
1291    }
1292
1293    /// Grow table in this instance by the specified amount of pages.
1294    ///
1295    /// Returns `None` if memory can't be grown by the specified amount
1296    /// of pages.
1297    pub fn table_grow(
1298        &mut self,
1299        table_index: LocalTableIndex,
1300        delta: u32,
1301        init_value: TableElement,
1302    ) -> Option<u32> {
1303        self.instance_mut()
1304            .table_grow(table_index, delta, init_value)
1305    }
1306
1307    /// Get table element reference.
1308    ///
1309    /// Returns `None` if index is out of bounds.
1310    pub fn table_get(&self, table_index: LocalTableIndex, index: u32) -> Option<TableElement> {
1311        self.instance().table_get(table_index, index)
1312    }
1313
1314    /// Set table element reference.
1315    ///
1316    /// Returns an error if the index is out of bounds
1317    pub fn table_set(
1318        &mut self,
1319        table_index: LocalTableIndex,
1320        index: u32,
1321        val: TableElement,
1322    ) -> Result<(), Trap> {
1323        self.instance_mut().table_set(table_index, index, val)
1324    }
1325
1326    /// Get a table defined locally within this module.
1327    pub fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
1328        self.instance_mut().get_local_table(index)
1329    }
1330}
1331
1332/// Compute the offset for a memory data initializer.
1333fn get_memory_init_start(init: &DataInitializer<'_>, instance: &Instance) -> usize {
1334    let mut start = init.location.offset;
1335
1336    if let Some(base) = init.location.base {
1337        let val = unsafe {
1338            if let Some(def_index) = instance.module.local_global_index(base) {
1339                instance.global(def_index).val.u32
1340            } else {
1341                instance.imported_global(base).definition.as_ref().val.u32
1342            }
1343        };
1344        start += usize::try_from(val).unwrap();
1345    }
1346
1347    start
1348}
1349
1350#[allow(clippy::mut_from_ref)]
1351#[allow(dead_code)]
1352/// Return a byte-slice view of a memory's data.
1353unsafe fn get_memory_slice<'instance>(
1354    init: &DataInitializer<'_>,
1355    instance: &'instance Instance,
1356) -> &'instance mut [u8] {
1357    let memory = if let Some(local_memory_index) = instance
1358        .module
1359        .local_memory_index(init.location.memory_index)
1360    {
1361        instance.memory(local_memory_index)
1362    } else {
1363        let import = instance.imported_memory(init.location.memory_index);
1364        *import.definition.as_ref()
1365    };
1366    slice::from_raw_parts_mut(memory.base, memory.current_length)
1367}
1368
1369/// Compute the offset for a table element initializer.
1370fn get_table_init_start(init: &TableInitializer, instance: &Instance) -> usize {
1371    let mut start = init.offset;
1372
1373    if let Some(base) = init.base {
1374        let val = unsafe {
1375            if let Some(def_index) = instance.module.local_global_index(base) {
1376                instance.global(def_index).val.u32
1377            } else {
1378                instance.imported_global(base).definition.as_ref().val.u32
1379            }
1380        };
1381        start += usize::try_from(val).unwrap();
1382    }
1383
1384    start
1385}
1386
1387/// Initialize the table memory from the provided initializers.
1388fn initialize_tables(instance: &mut Instance) -> Result<(), Trap> {
1389    let module = Arc::clone(&instance.module);
1390    for init in &module.table_initializers {
1391        let start = get_table_init_start(init, instance);
1392        let table = instance.get_table_handle(init.table_index);
1393        let table = unsafe { table.get_mut(&mut *instance.context) };
1394
1395        if start
1396            .checked_add(init.elements.len())
1397            .map_or(true, |end| end > table.size() as usize)
1398        {
1399            return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
1400        }
1401
1402        if let wasmer_types::Type::FuncRef = table.ty().ty {
1403            for (i, func_idx) in init.elements.iter().enumerate() {
1404                let anyfunc = instance.func_ref(*func_idx);
1405                table
1406                    .set(
1407                        u32::try_from(start + i).unwrap(),
1408                        TableElement::FuncRef(anyfunc),
1409                    )
1410                    .unwrap();
1411            }
1412        } else {
1413            for i in 0..init.elements.len() {
1414                table
1415                    .set(
1416                        u32::try_from(start + i).unwrap(),
1417                        TableElement::ExternRef(None),
1418                    )
1419                    .unwrap();
1420            }
1421        }
1422    }
1423
1424    Ok(())
1425}
1426
1427/// Initialize the `Instance::passive_elements` map by resolving the
1428/// `ModuleInfo::passive_elements`'s `FunctionIndex`s into `VMCallerCheckedAnyfunc`s for
1429/// this instance.
1430fn initialize_passive_elements(instance: &Instance) {
1431    let mut passive_elements = instance.passive_elements.borrow_mut();
1432    debug_assert!(
1433        passive_elements.is_empty(),
1434        "should only be called once, at initialization time"
1435    );
1436
1437    passive_elements.extend(
1438        instance
1439            .module
1440            .passive_elements
1441            .iter()
1442            .filter(|(_, segments)| !segments.is_empty())
1443            .map(|(idx, segments)| {
1444                (
1445                    *idx,
1446                    segments.iter().map(|s| instance.func_ref(*s)).collect(),
1447                )
1448            }),
1449    );
1450}
1451
1452/// Initialize the table memory from the provided initializers.
1453fn initialize_memories(
1454    instance: &mut Instance,
1455    data_initializers: &[DataInitializer<'_>],
1456) -> Result<(), Trap> {
1457    for init in data_initializers {
1458        let memory = instance.get_vmmemory(init.location.memory_index);
1459
1460        let start = get_memory_init_start(init, instance);
1461        unsafe {
1462            let current_length = memory.vmmemory().as_ref().current_length;
1463            if start
1464                .checked_add(init.data.len())
1465                .map_or(true, |end| end > current_length)
1466            {
1467                return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
1468            }
1469            memory.initialize_with_data(start, init.data)?;
1470        }
1471    }
1472
1473    Ok(())
1474}
1475
1476fn initialize_globals(instance: &Instance) {
1477    let module = Arc::clone(&instance.module);
1478    for (index, initializer) in module.global_initializers.iter() {
1479        unsafe {
1480            let to = instance.global_ptr(index).as_ptr();
1481            match initializer {
1482                GlobalInit::I32Const(x) => (*to).val.i32 = *x,
1483                GlobalInit::I64Const(x) => (*to).val.i64 = *x,
1484                GlobalInit::F32Const(x) => (*to).val.f32 = *x,
1485                GlobalInit::F64Const(x) => (*to).val.f64 = *x,
1486                GlobalInit::V128Const(x) => (*to).val.bytes = *x.bytes(),
1487                GlobalInit::GetGlobal(x) => {
1488                    let from: VMGlobalDefinition =
1489                        if let Some(def_x) = module.local_global_index(*x) {
1490                            instance.global(def_x)
1491                        } else {
1492                            instance.imported_global(*x).definition.as_ref().clone()
1493                        };
1494                    *to = from;
1495                }
1496                GlobalInit::RefNullConst => (*to).val.funcref = 0,
1497                GlobalInit::RefFunc(func_idx) => {
1498                    let funcref = instance.func_ref(*func_idx).unwrap();
1499                    (*to).val = funcref.into_raw();
1500                }
1501            }
1502        }
1503    }
1504}
1505
1506/// Eagerly builds all the `VMFuncRef`s for imported and local functions so that all
1507/// future funcref operations are just looking up this data.
1508fn build_funcrefs(
1509    module_info: &ModuleInfo,
1510    ctx: &StoreObjects,
1511    imports: &Imports,
1512    finished_functions: &BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1513    vmshared_signatures: &BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
1514    function_call_trampolines: &BoxedSlice<SignatureIndex, VMTrampoline>,
1515    vmctx_ptr: *mut VMContext,
1516) -> (
1517    BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
1518    BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
1519) {
1520    let mut func_refs =
1521        PrimaryMap::with_capacity(module_info.functions.len() - module_info.num_imported_functions);
1522    let mut imported_func_refs = PrimaryMap::with_capacity(module_info.num_imported_functions);
1523
1524    // do imported functions
1525    for import in imports.functions.values() {
1526        imported_func_refs.push(import.handle.get(ctx).anyfunc.as_ptr());
1527    }
1528
1529    // do local functions
1530    for (local_index, func_ptr) in finished_functions.iter() {
1531        let index = module_info.func_index(local_index);
1532        let sig_index = module_info.functions[index];
1533        let type_index = vmshared_signatures[sig_index];
1534        let call_trampoline = function_call_trampolines[sig_index];
1535        let anyfunc = VMCallerCheckedAnyfunc {
1536            func_ptr: func_ptr.0,
1537            type_index,
1538            vmctx: VMFunctionContext { vmctx: vmctx_ptr },
1539            call_trampoline,
1540        };
1541        func_refs.push(anyfunc);
1542    }
1543    (
1544        func_refs.into_boxed_slice(),
1545        imported_func_refs.into_boxed_slice(),
1546    )
1547}