wasmer_vm/instance/
mod.rs

1// This file contains code from external sources.
2// Attributions: https://github.com/wasmerio/wasmer/blob/main/docs/ATTRIBUTIONS.md
3
4//! An `Instance` contains all the runtime state used by execution of
5//! a WebAssembly module (except its callstack and register state). An
6//! `VMInstance` is a wrapper around `Instance` that manages
7//! how it is allocated and deallocated.
8
9mod allocator;
10
11use crate::export::VMExtern;
12use crate::imports::Imports;
13use crate::store::{InternalStoreHandle, StoreObjects};
14use crate::table::TableElement;
15use crate::trap::{catch_traps, Trap, TrapCode};
16use crate::vmcontext::{
17    memory32_atomic_check32, memory32_atomic_check64, memory_copy, memory_fill,
18    VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionContext,
19    VMFunctionImport, VMFunctionKind, VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition,
20    VMMemoryImport, VMSharedSignatureIndex, VMTableDefinition, VMTableImport, VMTagImport,
21    VMTrampoline,
22};
23use crate::{FunctionBodyPtr, MaybeInstanceOwned, TrapHandlerFn, VMFunctionBody, VMTag};
24use crate::{LinearMemory, NotifyLocation};
25use crate::{VMConfig, VMFuncRef, VMFunction, VMGlobal, VMMemory, VMTable};
26pub use allocator::InstanceAllocator;
27use memoffset::offset_of;
28use more_asserts::assert_lt;
29use std::alloc::Layout;
30use std::cell::RefCell;
31use std::collections::HashMap;
32use std::convert::TryFrom;
33use std::fmt;
34use std::mem;
35use std::ptr::{self, NonNull};
36use std::slice;
37use std::sync::Arc;
38use wasmer_types::entity::{packed_option::ReservedValue, BoxedSlice, EntityRef, PrimaryMap};
39use wasmer_types::{
40    DataIndex, DataInitializer, ElemIndex, ExportIndex, FunctionIndex, GlobalIndex, GlobalInit,
41    LocalFunctionIndex, LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex, LocalTagIndex,
42    MemoryError, MemoryIndex, ModuleInfo, Pages, SignatureIndex, TableIndex, TableInitializer,
43    TagIndex, VMOffsets,
44};
45
46/// A WebAssembly instance.
47///
48/// The type is dynamically-sized. Indeed, the `vmctx` field can
49/// contain various data. That's why the type has a C representation
50/// to ensure that the `vmctx` field is last. See the documentation of
51/// the `vmctx` field to learn more.
52#[repr(C)]
53#[allow(clippy::type_complexity)]
54pub(crate) struct Instance {
55    /// The `ModuleInfo` this `Instance` was instantiated from.
56    module: Arc<ModuleInfo>,
57
58    /// Pointer to the object store of the context owning this instance.
59    context: *mut StoreObjects,
60
61    /// Offsets in the `vmctx` region.
62    offsets: VMOffsets,
63
64    /// WebAssembly linear memory data.
65    memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
66
67    /// WebAssembly table data.
68    tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
69
70    /// WebAssembly global data.
71    globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
72
73    /// WebAssembly global data.
74    tags: BoxedSlice<LocalTagIndex, InternalStoreHandle<VMTag>>,
75
76    /// Pointers to functions in executable memory.
77    functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
78
79    /// Pointers to function call trampolines in executable memory.
80    function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
81
82    /// Passive elements in this instantiation. As `elem.drop`s happen, these
83    /// entries get removed.
84    passive_elements: RefCell<HashMap<ElemIndex, Box<[Option<VMFuncRef>]>>>,
85
86    /// Passive data segments from our module. As `data.drop`s happen, entries
87    /// get removed. A missing entry is considered equivalent to an empty slice.
88    passive_data: RefCell<HashMap<DataIndex, Arc<[u8]>>>,
89
90    /// Mapping of function indices to their func ref backing data. `VMFuncRef`s
91    /// will point to elements here for functions defined by this instance.
92    funcrefs: BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
93
94    /// Mapping of function indices to their func ref backing data. `VMFuncRef`s
95    /// will point to elements here for functions imported by this instance.
96    imported_funcrefs: BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
97
98    /// Additional context used by compiled WebAssembly code. This
99    /// field is last, and represents a dynamically-sized array that
100    /// extends beyond the nominal end of the struct (similar to a
101    /// flexible array member).
102    vmctx: VMContext,
103}
104
105impl fmt::Debug for Instance {
106    fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
107        formatter.debug_struct("Instance").finish()
108    }
109}
110
111#[allow(clippy::cast_ptr_alignment)]
112impl Instance {
113    /// Helper function to access various locations offset from our `*mut
114    /// VMContext` object.
115    unsafe fn vmctx_plus_offset<T>(&self, offset: u32) -> *mut T {
116        (self.vmctx_ptr() as *mut u8)
117            .add(usize::try_from(offset).unwrap())
118            .cast()
119    }
120
121    fn module(&self) -> &Arc<ModuleInfo> {
122        &self.module
123    }
124
125    pub(crate) fn module_ref(&self) -> &ModuleInfo {
126        &self.module
127    }
128
129    fn context(&self) -> &StoreObjects {
130        unsafe { &*self.context }
131    }
132
133    fn context_mut(&mut self) -> &mut StoreObjects {
134        unsafe { &mut *self.context }
135    }
136
137    /// Offsets in the `vmctx` region.
138    fn offsets(&self) -> &VMOffsets {
139        &self.offsets
140    }
141
142    /// Return a pointer to the `VMSharedSignatureIndex`s.
143    fn signature_ids_ptr(&self) -> *mut VMSharedSignatureIndex {
144        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_signature_ids_begin()) }
145    }
146
147    /// Return the indexed `VMFunctionImport`.
148    fn imported_function(&self, index: FunctionIndex) -> &VMFunctionImport {
149        let index = usize::try_from(index.as_u32()).unwrap();
150        unsafe { &*self.imported_functions_ptr().add(index) }
151    }
152
153    /// Return a pointer to the `VMFunctionImport`s.
154    fn imported_functions_ptr(&self) -> *mut VMFunctionImport {
155        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_functions_begin()) }
156    }
157
158    /// Return the index `VMTableImport`.
159    fn imported_table(&self, index: TableIndex) -> &VMTableImport {
160        let index = usize::try_from(index.as_u32()).unwrap();
161        unsafe { &*self.imported_tables_ptr().add(index) }
162    }
163
164    /// Return a pointer to the `VMTableImports`s.
165    fn imported_tables_ptr(&self) -> *mut VMTableImport {
166        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_tables_begin()) }
167    }
168
169    /// Return the indexed `VMMemoryImport`.
170    fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
171        let index = usize::try_from(index.as_u32()).unwrap();
172        unsafe { &*self.imported_memories_ptr().add(index) }
173    }
174
175    /// Return a pointer to the `VMMemoryImport`s.
176    fn imported_memories_ptr(&self) -> *mut VMMemoryImport {
177        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_memories_begin()) }
178    }
179
180    /// Return the indexed `VMGlobalImport`.
181    fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
182        let index = usize::try_from(index.as_u32()).unwrap();
183        unsafe { &*self.imported_globals_ptr().add(index) }
184    }
185
186    /// Return a pointer to the `VMGlobalImport`s.
187    fn imported_globals_ptr(&self) -> *mut VMGlobalImport {
188        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_globals_begin()) }
189    }
190
191    /// Return the indexed `VMTagImport`.
192    fn imported_tag(&self, index: TagIndex) -> &VMTagImport {
193        let index = usize::try_from(index.as_u32()).unwrap();
194        unsafe { &*self.imported_tags_ptr().add(index) }
195    }
196
197    /// Return a pointer to the `VMTagImport`s.
198    fn imported_tags_ptr(&self) -> *mut VMTagImport {
199        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_tags_begin()) }
200    }
201
202    /// Return the indexed `VMTableDefinition`.
203    #[allow(dead_code)]
204    fn table(&self, index: LocalTableIndex) -> VMTableDefinition {
205        unsafe { *self.table_ptr(index).as_ref() }
206    }
207
208    #[allow(dead_code)]
209    /// Updates the value for a defined table to `VMTableDefinition`.
210    fn set_table(&self, index: LocalTableIndex, table: &VMTableDefinition) {
211        unsafe {
212            *self.table_ptr(index).as_ptr() = *table;
213        }
214    }
215
216    /// Return the indexed `VMTableDefinition`.
217    fn table_ptr(&self, index: LocalTableIndex) -> NonNull<VMTableDefinition> {
218        let index = usize::try_from(index.as_u32()).unwrap();
219        NonNull::new(unsafe { self.tables_ptr().add(index) }).unwrap()
220    }
221
222    /// Return a pointer to the `VMTableDefinition`s.
223    fn tables_ptr(&self) -> *mut VMTableDefinition {
224        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_tables_begin()) }
225    }
226
227    #[allow(dead_code)]
228    /// Get a locally defined or imported memory.
229    fn get_memory(&self, index: MemoryIndex) -> VMMemoryDefinition {
230        if let Some(local_index) = self.module.local_memory_index(index) {
231            self.memory(local_index)
232        } else {
233            let import = self.imported_memory(index);
234            unsafe { *import.definition.as_ref() }
235        }
236    }
237
238    /// Return the indexed `VMMemoryDefinition`.
239    fn memory(&self, index: LocalMemoryIndex) -> VMMemoryDefinition {
240        unsafe { *self.memory_ptr(index).as_ref() }
241    }
242
243    #[allow(dead_code)]
244    /// Set the indexed memory to `VMMemoryDefinition`.
245    fn set_memory(&self, index: LocalMemoryIndex, mem: &VMMemoryDefinition) {
246        unsafe {
247            *self.memory_ptr(index).as_ptr() = *mem;
248        }
249    }
250
251    /// Return the indexed `VMMemoryDefinition`.
252    fn memory_ptr(&self, index: LocalMemoryIndex) -> NonNull<VMMemoryDefinition> {
253        let index = usize::try_from(index.as_u32()).unwrap();
254        NonNull::new(unsafe { self.memories_ptr().add(index) }).unwrap()
255    }
256
257    /// Return a pointer to the `VMMemoryDefinition`s.
258    fn memories_ptr(&self) -> *mut VMMemoryDefinition {
259        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_memories_begin()) }
260    }
261
262    /// Get a locally defined or imported memory.
263    fn get_vmmemory(&self, index: MemoryIndex) -> &VMMemory {
264        if let Some(local_index) = self.module.local_memory_index(index) {
265            unsafe {
266                self.memories
267                    .get(local_index)
268                    .unwrap()
269                    .get(self.context.as_ref().unwrap())
270            }
271        } else {
272            let import = self.imported_memory(index);
273            unsafe { import.handle.get(self.context.as_ref().unwrap()) }
274        }
275    }
276
277    /// Get a locally defined or imported memory.
278    fn get_vmmemory_mut(&mut self, index: MemoryIndex) -> &mut VMMemory {
279        if let Some(local_index) = self.module.local_memory_index(index) {
280            unsafe {
281                self.memories
282                    .get_mut(local_index)
283                    .unwrap()
284                    .get_mut(self.context.as_mut().unwrap())
285            }
286        } else {
287            let import = self.imported_memory(index);
288            unsafe { import.handle.get_mut(self.context.as_mut().unwrap()) }
289        }
290    }
291
292    /// Get a locally defined memory as mutable.
293    fn get_local_vmmemory_mut(&mut self, local_index: LocalMemoryIndex) -> &mut VMMemory {
294        unsafe {
295            self.memories
296                .get_mut(local_index)
297                .unwrap()
298                .get_mut(self.context.as_mut().unwrap())
299        }
300    }
301
302    /// Return the indexed `VMGlobalDefinition`.
303    fn global(&self, index: LocalGlobalIndex) -> VMGlobalDefinition {
304        unsafe { self.global_ptr(index).as_ref().clone() }
305    }
306
307    /// Set the indexed global to `VMGlobalDefinition`.
308    #[allow(dead_code)]
309    fn set_global(&self, index: LocalGlobalIndex, global: &VMGlobalDefinition) {
310        unsafe {
311            *self.global_ptr(index).as_ptr() = global.clone();
312        }
313    }
314
315    /// Return the indexed `VMGlobalDefinition`.
316    fn global_ptr(&self, index: LocalGlobalIndex) -> NonNull<VMGlobalDefinition> {
317        let index = usize::try_from(index.as_u32()).unwrap();
318        // TODO:
319        NonNull::new(unsafe { *self.globals_ptr().add(index) }).unwrap()
320    }
321
322    /// Return a pointer to the `VMGlobalDefinition`s.
323    fn globals_ptr(&self) -> *mut *mut VMGlobalDefinition {
324        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_globals_begin()) }
325    }
326
327    /// Return a pointer to the `VMBuiltinFunctionsArray`.
328    fn builtin_functions_ptr(&self) -> *mut VMBuiltinFunctionsArray {
329        unsafe { self.vmctx_plus_offset(self.offsets.vmctx_builtin_functions_begin()) }
330    }
331
332    /// Return a reference to the vmctx used by compiled wasm code.
333    fn vmctx(&self) -> &VMContext {
334        &self.vmctx
335    }
336
337    /// Return a raw pointer to the vmctx used by compiled wasm code.
338    fn vmctx_ptr(&self) -> *mut VMContext {
339        self.vmctx() as *const VMContext as *mut VMContext
340    }
341
342    /// Invoke the WebAssembly start function of the instance, if one is present.
343    fn invoke_start_function(
344        &self,
345        config: &VMConfig,
346        trap_handler: Option<*const TrapHandlerFn<'static>>,
347    ) -> Result<(), Trap> {
348        let start_index = match self.module.start_function {
349            Some(idx) => idx,
350            None => return Ok(()),
351        };
352
353        let (callee_address, callee_vmctx) = match self.module.local_func_index(start_index) {
354            Some(local_index) => {
355                let body = self
356                    .functions
357                    .get(local_index)
358                    .expect("function index is out of bounds")
359                    .0;
360                (
361                    body as *const _,
362                    VMFunctionContext {
363                        vmctx: self.vmctx_ptr(),
364                    },
365                )
366            }
367            None => {
368                assert_lt!(start_index.index(), self.module.num_imported_functions);
369                let import = self.imported_function(start_index);
370                (import.body, import.environment)
371            }
372        };
373
374        // Make the call.
375        unsafe {
376            catch_traps(trap_handler, config, move || {
377                mem::transmute::<*const VMFunctionBody, unsafe extern "C" fn(VMFunctionContext)>(
378                    callee_address,
379                )(callee_vmctx)
380            })
381        }
382    }
383
384    /// Return the offset from the vmctx pointer to its containing `Instance`.
385    #[inline]
386    pub(crate) fn vmctx_offset() -> isize {
387        offset_of!(Self, vmctx) as isize
388    }
389
390    /// Return the table index for the given `VMTableDefinition`.
391    pub(crate) fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
392        let begin: *const VMTableDefinition = self.tables_ptr() as *const _;
393        let end: *const VMTableDefinition = table;
394        // TODO: Use `offset_from` once it stablizes.
395        let index = LocalTableIndex::new(
396            (end as usize - begin as usize) / mem::size_of::<VMTableDefinition>(),
397        );
398        assert_lt!(index.index(), self.tables.len());
399        index
400    }
401
402    /// Return the memory index for the given `VMMemoryDefinition`.
403    pub(crate) fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
404        let begin: *const VMMemoryDefinition = self.memories_ptr() as *const _;
405        let end: *const VMMemoryDefinition = memory;
406        // TODO: Use `offset_from` once it stablizes.
407        let index = LocalMemoryIndex::new(
408            (end as usize - begin as usize) / mem::size_of::<VMMemoryDefinition>(),
409        );
410        assert_lt!(index.index(), self.memories.len());
411        index
412    }
413
414    /// Grow memory by the specified amount of pages.
415    ///
416    /// Returns `None` if memory can't be grown by the specified amount
417    /// of pages.
418    pub(crate) fn memory_grow<IntoPages>(
419        &mut self,
420        memory_index: LocalMemoryIndex,
421        delta: IntoPages,
422    ) -> Result<Pages, MemoryError>
423    where
424        IntoPages: Into<Pages>,
425    {
426        let mem = *self
427            .memories
428            .get(memory_index)
429            .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
430        mem.get_mut(self.context_mut()).grow(delta.into())
431    }
432
433    /// Grow imported memory by the specified amount of pages.
434    ///
435    /// Returns `None` if memory can't be grown by the specified amount
436    /// of pages.
437    ///
438    /// # Safety
439    /// This and `imported_memory_size` are currently unsafe because they
440    /// dereference the memory import's pointers.
441    pub(crate) unsafe fn imported_memory_grow<IntoPages>(
442        &mut self,
443        memory_index: MemoryIndex,
444        delta: IntoPages,
445    ) -> Result<Pages, MemoryError>
446    where
447        IntoPages: Into<Pages>,
448    {
449        let import = self.imported_memory(memory_index);
450        let mem = import.handle;
451        mem.get_mut(self.context_mut()).grow(delta.into())
452    }
453
454    /// Returns the number of allocated wasm pages.
455    pub(crate) fn memory_size(&self, memory_index: LocalMemoryIndex) -> Pages {
456        let mem = *self
457            .memories
458            .get(memory_index)
459            .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
460        mem.get(self.context()).size()
461    }
462
463    /// Returns the number of allocated wasm pages in an imported memory.
464    ///
465    /// # Safety
466    /// This and `imported_memory_grow` are currently unsafe because they
467    /// dereference the memory import's pointers.
468    pub(crate) unsafe fn imported_memory_size(&self, memory_index: MemoryIndex) -> Pages {
469        let import = self.imported_memory(memory_index);
470        let mem = import.handle;
471        mem.get(self.context()).size()
472    }
473
474    /// Returns the number of elements in a given table.
475    pub(crate) fn table_size(&self, table_index: LocalTableIndex) -> u32 {
476        let table = self
477            .tables
478            .get(table_index)
479            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
480        table.get(self.context()).size()
481    }
482
483    /// Returns the number of elements in a given imported table.
484    ///
485    /// # Safety
486    /// `table_index` must be a valid, imported table index.
487    pub(crate) unsafe fn imported_table_size(&self, table_index: TableIndex) -> u32 {
488        let import = self.imported_table(table_index);
489        let table = import.handle;
490        table.get(self.context()).size()
491    }
492
493    /// Grow table by the specified amount of elements.
494    ///
495    /// Returns `None` if table can't be grown by the specified amount
496    /// of elements.
497    pub(crate) fn table_grow(
498        &mut self,
499        table_index: LocalTableIndex,
500        delta: u32,
501        init_value: TableElement,
502    ) -> Option<u32> {
503        let table = *self
504            .tables
505            .get(table_index)
506            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
507        table.get_mut(self.context_mut()).grow(delta, init_value)
508    }
509
510    /// Grow table by the specified amount of elements.
511    ///
512    /// # Safety
513    /// `table_index` must be a valid, imported table index.
514    pub(crate) unsafe fn imported_table_grow(
515        &mut self,
516        table_index: TableIndex,
517        delta: u32,
518        init_value: TableElement,
519    ) -> Option<u32> {
520        let import = self.imported_table(table_index);
521        let table = import.handle;
522        table.get_mut(self.context_mut()).grow(delta, init_value)
523    }
524
525    /// Get table element by index.
526    pub(crate) fn table_get(
527        &self,
528        table_index: LocalTableIndex,
529        index: u32,
530    ) -> Option<TableElement> {
531        let table = self
532            .tables
533            .get(table_index)
534            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
535        table.get(self.context()).get(index)
536    }
537
538    /// Returns the element at the given index.
539    ///
540    /// # Safety
541    /// `table_index` must be a valid, imported table index.
542    pub(crate) unsafe fn imported_table_get(
543        &self,
544        table_index: TableIndex,
545        index: u32,
546    ) -> Option<TableElement> {
547        let import = self.imported_table(table_index);
548        let table = import.handle;
549        table.get(self.context()).get(index)
550    }
551
552    /// Set table element by index.
553    pub(crate) fn table_set(
554        &mut self,
555        table_index: LocalTableIndex,
556        index: u32,
557        val: TableElement,
558    ) -> Result<(), Trap> {
559        let table = *self
560            .tables
561            .get(table_index)
562            .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
563        table.get_mut(self.context_mut()).set(index, val)
564    }
565
566    /// Set table element by index for an imported table.
567    ///
568    /// # Safety
569    /// `table_index` must be a valid, imported table index.
570    pub(crate) unsafe fn imported_table_set(
571        &mut self,
572        table_index: TableIndex,
573        index: u32,
574        val: TableElement,
575    ) -> Result<(), Trap> {
576        let import = self.imported_table(table_index);
577        let table = import.handle;
578        table.get_mut(self.context_mut()).set(index, val)
579    }
580
581    /// Get a `VMFuncRef` for the given `FunctionIndex`.
582    pub(crate) fn func_ref(&self, function_index: FunctionIndex) -> Option<VMFuncRef> {
583        if function_index == FunctionIndex::reserved_value() {
584            None
585        } else if let Some(local_function_index) = self.module.local_func_index(function_index) {
586            Some(VMFuncRef(NonNull::from(
587                &self.funcrefs[local_function_index],
588            )))
589        } else {
590            Some(VMFuncRef(self.imported_funcrefs[function_index]))
591        }
592    }
593
594    /// The `table.init` operation: initializes a portion of a table with a
595    /// passive element.
596    ///
597    /// # Errors
598    ///
599    /// Returns a `Trap` error when the range within the table is out of bounds
600    /// or the range within the passive element is out of bounds.
601    pub(crate) fn table_init(
602        &mut self,
603        table_index: TableIndex,
604        elem_index: ElemIndex,
605        dst: u32,
606        src: u32,
607        len: u32,
608    ) -> Result<(), Trap> {
609        // https://webassembly.github.io/bulk-memory-operations/core/exec/instructions.html#exec-table-init
610
611        let table = self.get_table_handle(table_index);
612        let table = unsafe { table.get_mut(&mut *self.context) };
613        let passive_elements = self.passive_elements.borrow();
614        let elem = passive_elements
615            .get(&elem_index)
616            .map_or::<&[Option<VMFuncRef>], _>(&[], |e| &**e);
617
618        if src
619            .checked_add(len)
620            .map_or(true, |n| n as usize > elem.len())
621            || dst.checked_add(len).map_or(true, |m| m > table.size())
622        {
623            return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
624        }
625
626        for (dst, src) in (dst..dst + len).zip(src..src + len) {
627            table
628                .set(dst, TableElement::FuncRef(elem[src as usize]))
629                .expect("should never panic because we already did the bounds check above");
630        }
631
632        Ok(())
633    }
634
635    /// The `table.fill` operation: fills a portion of a table with a given value.
636    ///
637    /// # Errors
638    ///
639    /// Returns a `Trap` error when the range within the table is out of bounds
640    pub(crate) fn table_fill(
641        &mut self,
642        table_index: TableIndex,
643        start_index: u32,
644        item: TableElement,
645        len: u32,
646    ) -> Result<(), Trap> {
647        // https://webassembly.github.io/bulk-memory-operations/core/exec/instructions.html#exec-table-init
648
649        let table = self.get_table(table_index);
650        let table_size = table.size() as usize;
651
652        if start_index
653            .checked_add(len)
654            .map_or(true, |n| n as usize > table_size)
655        {
656            return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
657        }
658
659        for i in start_index..(start_index + len) {
660            table
661                .set(i, item.clone())
662                .expect("should never panic because we already did the bounds check above");
663        }
664
665        Ok(())
666    }
667
668    /// Drop an element.
669    pub(crate) fn elem_drop(&self, elem_index: ElemIndex) {
670        // https://webassembly.github.io/reference-types/core/exec/instructions.html#exec-elem-drop
671
672        let mut passive_elements = self.passive_elements.borrow_mut();
673        passive_elements.remove(&elem_index);
674        // Note that we don't check that we actually removed an element because
675        // dropping a non-passive element is a no-op (not a trap).
676    }
677
678    /// Do a `memory.copy` for a locally defined memory.
679    ///
680    /// # Errors
681    ///
682    /// Returns a `Trap` error when the source or destination ranges are out of
683    /// bounds.
684    pub(crate) fn local_memory_copy(
685        &self,
686        memory_index: LocalMemoryIndex,
687        dst: u32,
688        src: u32,
689        len: u32,
690    ) -> Result<(), Trap> {
691        // https://webassembly.github.io/reference-types/core/exec/instructions.html#exec-memory-copy
692
693        let memory = self.memory(memory_index);
694        // The following memory copy is not synchronized and is not atomic:
695        unsafe { memory_copy(&memory, dst, src, len) }
696    }
697
698    /// Perform a `memory.copy` on an imported memory.
699    pub(crate) fn imported_memory_copy(
700        &self,
701        memory_index: MemoryIndex,
702        dst: u32,
703        src: u32,
704        len: u32,
705    ) -> Result<(), Trap> {
706        let import = self.imported_memory(memory_index);
707        let memory = unsafe { import.definition.as_ref() };
708        // The following memory copy is not synchronized and is not atomic:
709        unsafe { memory_copy(memory, dst, src, len) }
710    }
711
712    /// Perform the `memory.fill` operation on a locally defined memory.
713    ///
714    /// # Errors
715    ///
716    /// Returns a `Trap` error if the memory range is out of bounds.
717    pub(crate) fn local_memory_fill(
718        &self,
719        memory_index: LocalMemoryIndex,
720        dst: u32,
721        val: u32,
722        len: u32,
723    ) -> Result<(), Trap> {
724        let memory = self.memory(memory_index);
725        // The following memory fill is not synchronized and is not atomic:
726        unsafe { memory_fill(&memory, dst, val, len) }
727    }
728
729    /// Perform the `memory.fill` operation on an imported memory.
730    ///
731    /// # Errors
732    ///
733    /// Returns a `Trap` error if the memory range is out of bounds.
734    pub(crate) fn imported_memory_fill(
735        &self,
736        memory_index: MemoryIndex,
737        dst: u32,
738        val: u32,
739        len: u32,
740    ) -> Result<(), Trap> {
741        let import = self.imported_memory(memory_index);
742        let memory = unsafe { import.definition.as_ref() };
743        // The following memory fill is not synchronized and is not atomic:
744        unsafe { memory_fill(memory, dst, val, len) }
745    }
746
747    /// Performs the `memory.init` operation.
748    ///
749    /// # Errors
750    ///
751    /// Returns a `Trap` error if the destination range is out of this module's
752    /// memory's bounds or if the source range is outside the data segment's
753    /// bounds.
754    pub(crate) fn memory_init(
755        &self,
756        memory_index: MemoryIndex,
757        data_index: DataIndex,
758        dst: u32,
759        src: u32,
760        len: u32,
761    ) -> Result<(), Trap> {
762        // https://webassembly.github.io/bulk-memory-operations/core/exec/instructions.html#exec-memory-init
763
764        let memory = self.get_vmmemory(memory_index);
765        let passive_data = self.passive_data.borrow();
766        let data = passive_data.get(&data_index).map_or(&[][..], |d| &**d);
767
768        let current_length = unsafe { memory.vmmemory().as_ref().current_length };
769        if src
770            .checked_add(len)
771            .map_or(true, |n| n as usize > data.len())
772            || dst
773                .checked_add(len)
774                .map_or(true, |m| usize::try_from(m).unwrap() > current_length)
775        {
776            return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
777        }
778        let src_slice = &data[src as usize..(src + len) as usize];
779        unsafe { memory.initialize_with_data(dst as usize, src_slice) }
780    }
781
782    /// Drop the given data segment, truncating its length to zero.
783    pub(crate) fn data_drop(&self, data_index: DataIndex) {
784        let mut passive_data = self.passive_data.borrow_mut();
785        passive_data.remove(&data_index);
786    }
787
788    /// Get a table by index regardless of whether it is locally-defined or an
789    /// imported, foreign table.
790    pub(crate) fn get_table(&mut self, table_index: TableIndex) -> &mut VMTable {
791        if let Some(local_table_index) = self.module.local_table_index(table_index) {
792            self.get_local_table(local_table_index)
793        } else {
794            self.get_foreign_table(table_index)
795        }
796    }
797
798    /// Get a locally-defined table.
799    pub(crate) fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
800        let table = self.tables[index];
801        table.get_mut(self.context_mut())
802    }
803
804    /// Get an imported, foreign table.
805    pub(crate) fn get_foreign_table(&mut self, index: TableIndex) -> &mut VMTable {
806        let import = self.imported_table(index);
807        let table = import.handle;
808        table.get_mut(self.context_mut())
809    }
810
811    /// Get a table handle by index regardless of whether it is locally-defined
812    /// or an imported, foreign table.
813    pub(crate) fn get_table_handle(
814        &mut self,
815        table_index: TableIndex,
816    ) -> InternalStoreHandle<VMTable> {
817        if let Some(local_table_index) = self.module.local_table_index(table_index) {
818            self.tables[local_table_index]
819        } else {
820            self.imported_table(table_index).handle
821        }
822    }
823
824    fn memory_wait(memory: &mut VMMemory, dst: u32, timeout: i64) -> Result<u32, Trap> {
825        let location = NotifyLocation { address: dst };
826        let timeout = if timeout < 0 {
827            None
828        } else {
829            Some(std::time::Duration::from_nanos(timeout as u64))
830        };
831        match memory.do_wait(location, timeout) {
832            Ok(count) => Ok(count),
833            Err(_err) => {
834                // ret is None if there is more than 2^32 waiter in queue or some other error
835                Err(Trap::lib(TrapCode::TableAccessOutOfBounds))
836            }
837        }
838    }
839
840    /// Perform an Atomic.Wait32
841    pub(crate) fn local_memory_wait32(
842        &mut self,
843        memory_index: LocalMemoryIndex,
844        dst: u32,
845        val: u32,
846        timeout: i64,
847    ) -> Result<u32, Trap> {
848        let memory = self.memory(memory_index);
849        //if ! memory.shared {
850        // We should trap according to spec, but official test rely on not trapping...
851        //}
852
853        let ret = unsafe { memory32_atomic_check32(&memory, dst, val) };
854
855        if let Ok(mut ret) = ret {
856            if ret == 0 {
857                let memory = self.get_local_vmmemory_mut(memory_index);
858                ret = Self::memory_wait(memory, dst, timeout)?;
859            }
860            Ok(ret)
861        } else {
862            ret
863        }
864    }
865
866    /// Perform an Atomic.Wait32
867    pub(crate) fn imported_memory_wait32(
868        &mut self,
869        memory_index: MemoryIndex,
870        dst: u32,
871        val: u32,
872        timeout: i64,
873    ) -> Result<u32, Trap> {
874        let import = self.imported_memory(memory_index);
875        let memory = unsafe { import.definition.as_ref() };
876        //if ! memory.shared {
877        // We should trap according to spec, but official test rely on not trapping...
878        //}
879
880        let ret = unsafe { memory32_atomic_check32(memory, dst, val) };
881        if let Ok(mut ret) = ret {
882            if ret == 0 {
883                let memory = self.get_vmmemory_mut(memory_index);
884                ret = Self::memory_wait(memory, dst, timeout)?;
885            }
886            Ok(ret)
887        } else {
888            ret
889        }
890    }
891
892    /// Perform an Atomic.Wait64
893    pub(crate) fn local_memory_wait64(
894        &mut self,
895        memory_index: LocalMemoryIndex,
896        dst: u32,
897        val: u64,
898        timeout: i64,
899    ) -> Result<u32, Trap> {
900        let memory = self.memory(memory_index);
901        //if ! memory.shared {
902        // We should trap according to spec, but official test rely on not trapping...
903        //}
904
905        let ret = unsafe { memory32_atomic_check64(&memory, dst, val) };
906
907        if let Ok(mut ret) = ret {
908            if ret == 0 {
909                let memory = self.get_local_vmmemory_mut(memory_index);
910                ret = Self::memory_wait(memory, dst, timeout)?;
911            }
912            Ok(ret)
913        } else {
914            ret
915        }
916    }
917
918    /// Perform an Atomic.Wait64
919    pub(crate) fn imported_memory_wait64(
920        &mut self,
921        memory_index: MemoryIndex,
922        dst: u32,
923        val: u64,
924        timeout: i64,
925    ) -> Result<u32, Trap> {
926        let import = self.imported_memory(memory_index);
927        let memory = unsafe { import.definition.as_ref() };
928        //if ! memory.shared {
929        // We should trap according to spec, but official test rely on not trapping...
930        //}
931
932        let ret = unsafe { memory32_atomic_check64(memory, dst, val) };
933
934        if let Ok(mut ret) = ret {
935            if ret == 0 {
936                let memory = self.get_vmmemory_mut(memory_index);
937                ret = Self::memory_wait(memory, dst, timeout)?;
938            }
939            Ok(ret)
940        } else {
941            ret
942        }
943    }
944
945    /// Perform an Atomic.Notify
946    pub(crate) fn local_memory_notify(
947        &mut self,
948        memory_index: LocalMemoryIndex,
949        dst: u32,
950        count: u32,
951    ) -> Result<u32, Trap> {
952        let memory = self.get_local_vmmemory_mut(memory_index);
953        // fetch the notifier
954        let location = NotifyLocation { address: dst };
955        Ok(memory.do_notify(location, count))
956    }
957
958    /// Perform an Atomic.Notify
959    pub(crate) fn imported_memory_notify(
960        &mut self,
961        memory_index: MemoryIndex,
962        dst: u32,
963        count: u32,
964    ) -> Result<u32, Trap> {
965        let memory = self.get_vmmemory_mut(memory_index);
966        // fetch the notifier
967        let location = NotifyLocation { address: dst };
968        Ok(memory.do_notify(location, count))
969    }
970}
971
972/// A handle holding an `Instance` of a WebAssembly module.
973///
974/// This is more or less a public facade of the private `Instance`,
975/// providing useful higher-level API.
976#[derive(Debug, Eq, PartialEq)]
977pub struct VMInstance {
978    /// The layout of `Instance` (which can vary).
979    instance_layout: Layout,
980
981    /// The `Instance` itself.
982    ///
983    /// `Instance` must not be dropped manually by Rust, because it's
984    /// allocated manually with `alloc` and a specific layout (Rust
985    /// would be able to drop `Instance` itself but it will imply a
986    /// memory leak because of `alloc`).
987    ///
988    /// No one in the code has a copy of the `Instance`'s
989    /// pointer. `Self` is the only one.
990    instance: NonNull<Instance>,
991}
992
993/// VMInstance are created with an InstanceAllocator
994/// and it will "consume" the memory
995/// So the Drop here actualy free it (else it would be leaked)
996impl Drop for VMInstance {
997    fn drop(&mut self) {
998        let instance_ptr = self.instance.as_ptr();
999
1000        unsafe {
1001            // Need to drop all the actual Instance members
1002            instance_ptr.drop_in_place();
1003            // And then free the memory allocated for the Instance itself
1004            std::alloc::dealloc(instance_ptr as *mut u8, self.instance_layout);
1005        }
1006    }
1007}
1008
1009impl VMInstance {
1010    /// Create a new `VMInstance` pointing at a new [`Instance`].
1011    ///
1012    /// # Safety
1013    ///
1014    /// This method is not necessarily inherently unsafe to call, but in general
1015    /// the APIs of an `Instance` are quite unsafe and have not been really
1016    /// audited for safety that much. As a result the unsafety here on this
1017    /// method is a low-overhead way of saying “this is an extremely unsafe type
1018    /// to work with”.
1019    ///
1020    /// Extreme care must be taken when working with `VMInstance` and it's
1021    /// recommended to have relatively intimate knowledge of how it works
1022    /// internally if you'd like to do so. If possible it's recommended to use
1023    /// the `wasmer` crate API rather than this type since that is vetted for
1024    /// safety.
1025    ///
1026    /// However the following must be taken care of before calling this function:
1027    /// - The memory at `instance.tables_ptr()` must be initialized with data for
1028    ///   all the local tables.
1029    /// - The memory at `instance.memories_ptr()` must be initialized with data for
1030    ///   all the local memories.
1031    #[allow(clippy::too_many_arguments)]
1032    pub unsafe fn new(
1033        allocator: InstanceAllocator,
1034        module: Arc<ModuleInfo>,
1035        context: &mut StoreObjects,
1036        finished_functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1037        finished_function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
1038        finished_memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
1039        finished_tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
1040        finished_tags: BoxedSlice<LocalTagIndex, InternalStoreHandle<VMTag>>,
1041        finished_globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
1042        imports: Imports,
1043        vmshared_signatures: BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
1044    ) -> Result<Self, Trap> {
1045        let vmctx_globals = finished_globals
1046            .values()
1047            .map(|m| m.get(context).vmglobal())
1048            .collect::<PrimaryMap<LocalGlobalIndex, _>>()
1049            .into_boxed_slice();
1050        let passive_data = RefCell::new(
1051            module
1052                .passive_data
1053                .clone()
1054                .into_iter()
1055                .map(|(idx, bytes)| (idx, Arc::from(bytes)))
1056                .collect::<HashMap<_, _>>(),
1057        );
1058
1059        let handle = {
1060            let offsets = allocator.offsets().clone();
1061            // use dummy value to create an instance so we can get the vmctx pointer
1062            let funcrefs = PrimaryMap::new().into_boxed_slice();
1063            let imported_funcrefs = PrimaryMap::new().into_boxed_slice();
1064            // Create the `Instance`. The unique, the One.
1065            let instance = Instance {
1066                module,
1067                context,
1068                offsets,
1069                memories: finished_memories,
1070                tables: finished_tables,
1071                tags: finished_tags,
1072                globals: finished_globals,
1073                functions: finished_functions,
1074                function_call_trampolines: finished_function_call_trampolines,
1075                passive_elements: Default::default(),
1076                passive_data,
1077                funcrefs,
1078                imported_funcrefs,
1079                vmctx: VMContext {},
1080            };
1081
1082            let mut instance_handle = allocator.into_vminstance(instance);
1083
1084            // Set the funcrefs after we've built the instance
1085            {
1086                let instance = instance_handle.instance_mut();
1087                let vmctx_ptr = instance.vmctx_ptr();
1088                (instance.funcrefs, instance.imported_funcrefs) = build_funcrefs(
1089                    &instance.module,
1090                    context,
1091                    &imports,
1092                    &instance.functions,
1093                    &vmshared_signatures,
1094                    &instance.function_call_trampolines,
1095                    vmctx_ptr,
1096                );
1097            }
1098
1099            instance_handle
1100        };
1101        let instance = handle.instance();
1102
1103        ptr::copy(
1104            vmshared_signatures.values().as_slice().as_ptr(),
1105            instance.signature_ids_ptr(),
1106            vmshared_signatures.len(),
1107        );
1108        ptr::copy(
1109            imports.functions.values().as_slice().as_ptr(),
1110            instance.imported_functions_ptr(),
1111            imports.functions.len(),
1112        );
1113        ptr::copy(
1114            imports.tables.values().as_slice().as_ptr(),
1115            instance.imported_tables_ptr(),
1116            imports.tables.len(),
1117        );
1118        ptr::copy(
1119            imports.memories.values().as_slice().as_ptr(),
1120            instance.imported_memories_ptr(),
1121            imports.memories.len(),
1122        );
1123        ptr::copy(
1124            imports.globals.values().as_slice().as_ptr(),
1125            instance.imported_globals_ptr(),
1126            imports.globals.len(),
1127        );
1128        // these should already be set, add asserts here? for:
1129        // - instance.tables_ptr() as *mut VMTableDefinition
1130        // - instance.memories_ptr() as *mut VMMemoryDefinition
1131        ptr::copy(
1132            vmctx_globals.values().as_slice().as_ptr(),
1133            instance.globals_ptr() as *mut NonNull<VMGlobalDefinition>,
1134            vmctx_globals.len(),
1135        );
1136        ptr::write(
1137            instance.builtin_functions_ptr(),
1138            VMBuiltinFunctionsArray::initialized(),
1139        );
1140
1141        // Perform infallible initialization in this constructor, while fallible
1142        // initialization is deferred to the `initialize` method.
1143        initialize_passive_elements(instance);
1144        initialize_globals(instance);
1145
1146        Ok(handle)
1147    }
1148
1149    /// Return a reference to the contained `Instance`.
1150    pub(crate) fn instance(&self) -> &Instance {
1151        unsafe { self.instance.as_ref() }
1152    }
1153
1154    /// Return a mutable reference to the contained `Instance`.
1155    pub(crate) fn instance_mut(&mut self) -> &mut Instance {
1156        unsafe { self.instance.as_mut() }
1157    }
1158
1159    /// Finishes the instantiation process started by `Instance::new`.
1160    ///
1161    /// # Safety
1162    ///
1163    /// Only safe to call immediately after instantiation.
1164    pub unsafe fn finish_instantiation(
1165        &mut self,
1166        config: &VMConfig,
1167        trap_handler: Option<*const TrapHandlerFn<'static>>,
1168        data_initializers: &[DataInitializer<'_>],
1169    ) -> Result<(), Trap> {
1170        let instance = self.instance_mut();
1171
1172        // Apply the initializers.
1173        initialize_tables(instance)?;
1174        initialize_memories(instance, data_initializers)?;
1175
1176        // The WebAssembly spec specifies that the start function is
1177        // invoked automatically at instantiation time.
1178        instance.invoke_start_function(config, trap_handler)?;
1179        Ok(())
1180    }
1181
1182    /// Return a reference to the vmctx used by compiled wasm code.
1183    pub fn vmctx(&self) -> &VMContext {
1184        self.instance().vmctx()
1185    }
1186
1187    /// Return a raw pointer to the vmctx used by compiled wasm code.
1188    pub fn vmctx_ptr(&self) -> *mut VMContext {
1189        self.instance().vmctx_ptr()
1190    }
1191
1192    /// Return a reference to the `VMOffsets` to get offsets in the
1193    /// `Self::vmctx_ptr` region. Be careful when doing pointer
1194    /// arithmetic!
1195    pub fn vmoffsets(&self) -> &VMOffsets {
1196        self.instance().offsets()
1197    }
1198
1199    /// Return a reference-counting pointer to a module.
1200    pub fn module(&self) -> &Arc<ModuleInfo> {
1201        self.instance().module()
1202    }
1203
1204    /// Return a reference to a module.
1205    pub fn module_ref(&self) -> &ModuleInfo {
1206        self.instance().module_ref()
1207    }
1208
1209    /// Lookup an export with the given name.
1210    pub fn lookup(&mut self, field: &str) -> Option<VMExtern> {
1211        let export = *self.module_ref().exports.get(field)?;
1212
1213        Some(self.lookup_by_declaration(export))
1214    }
1215
1216    /// Lookup an export with the given export declaration.
1217    pub fn lookup_by_declaration(&mut self, export: ExportIndex) -> VMExtern {
1218        let instance = self.instance();
1219
1220        match export {
1221            ExportIndex::Function(index) => {
1222                let sig_index = &instance.module.functions[index];
1223                let handle = if let Some(def_index) = instance.module.local_func_index(index) {
1224                    // A VMFunction is lazily created only for functions that are
1225                    // exported.
1226                    let signature = instance.module.signatures[*sig_index].clone();
1227                    let vm_function = VMFunction {
1228                        anyfunc: MaybeInstanceOwned::Instance(NonNull::from(
1229                            &instance.funcrefs[def_index],
1230                        )),
1231                        signature,
1232                        // Any function received is already static at this point as:
1233                        // 1. All locally defined functions in the Wasm have a static signature.
1234                        // 2. All the imported functions are already static (because
1235                        //    they point to the trampolines rather than the dynamic addresses).
1236                        kind: VMFunctionKind::Static,
1237                        host_data: Box::new(()),
1238                    };
1239                    InternalStoreHandle::new(self.instance_mut().context_mut(), vm_function)
1240                } else {
1241                    let import = instance.imported_function(index);
1242                    import.handle
1243                };
1244
1245                VMExtern::Function(handle)
1246            }
1247            ExportIndex::Table(index) => {
1248                let handle = if let Some(def_index) = instance.module.local_table_index(index) {
1249                    instance.tables[def_index]
1250                } else {
1251                    let import = instance.imported_table(index);
1252                    import.handle
1253                };
1254                VMExtern::Table(handle)
1255            }
1256            ExportIndex::Memory(index) => {
1257                let handle = if let Some(def_index) = instance.module.local_memory_index(index) {
1258                    instance.memories[def_index]
1259                } else {
1260                    let import = instance.imported_memory(index);
1261                    import.handle
1262                };
1263                VMExtern::Memory(handle)
1264            }
1265            ExportIndex::Global(index) => {
1266                let handle = if let Some(def_index) = instance.module.local_global_index(index) {
1267                    instance.globals[def_index]
1268                } else {
1269                    let import = instance.imported_global(index);
1270                    import.handle
1271                };
1272                VMExtern::Global(handle)
1273            }
1274
1275            ExportIndex::Tag(index) => {
1276                let handle = if let Some(def_index) = instance.module.local_tag_index(index) {
1277                    instance.tags[def_index]
1278                } else {
1279                    let import = instance.imported_tag(index);
1280                    import.handle
1281                };
1282                VMExtern::Tag(handle)
1283            }
1284        }
1285    }
1286
1287    /// Return an iterator over the exports of this instance.
1288    ///
1289    /// Specifically, it provides access to the key-value pairs, where the keys
1290    /// are export names, and the values are export declarations which can be
1291    /// resolved `lookup_by_declaration`.
1292    pub fn exports(&self) -> indexmap::map::Iter<String, ExportIndex> {
1293        self.module().exports.iter()
1294    }
1295
1296    /// Return the memory index for the given `VMMemoryDefinition` in this instance.
1297    pub fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
1298        self.instance().memory_index(memory)
1299    }
1300
1301    /// Grow memory in this instance by the specified amount of pages.
1302    ///
1303    /// Returns `None` if memory can't be grown by the specified amount
1304    /// of pages.
1305    pub fn memory_grow<IntoPages>(
1306        &mut self,
1307        memory_index: LocalMemoryIndex,
1308        delta: IntoPages,
1309    ) -> Result<Pages, MemoryError>
1310    where
1311        IntoPages: Into<Pages>,
1312    {
1313        self.instance_mut().memory_grow(memory_index, delta)
1314    }
1315
1316    /// Return the table index for the given `VMTableDefinition` in this instance.
1317    pub fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
1318        self.instance().table_index(table)
1319    }
1320
1321    /// Grow table in this instance by the specified amount of pages.
1322    ///
1323    /// Returns `None` if memory can't be grown by the specified amount
1324    /// of pages.
1325    pub fn table_grow(
1326        &mut self,
1327        table_index: LocalTableIndex,
1328        delta: u32,
1329        init_value: TableElement,
1330    ) -> Option<u32> {
1331        self.instance_mut()
1332            .table_grow(table_index, delta, init_value)
1333    }
1334
1335    /// Get table element reference.
1336    ///
1337    /// Returns `None` if index is out of bounds.
1338    pub fn table_get(&self, table_index: LocalTableIndex, index: u32) -> Option<TableElement> {
1339        self.instance().table_get(table_index, index)
1340    }
1341
1342    /// Set table element reference.
1343    ///
1344    /// Returns an error if the index is out of bounds
1345    pub fn table_set(
1346        &mut self,
1347        table_index: LocalTableIndex,
1348        index: u32,
1349        val: TableElement,
1350    ) -> Result<(), Trap> {
1351        self.instance_mut().table_set(table_index, index, val)
1352    }
1353
1354    /// Get a table defined locally within this module.
1355    pub fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
1356        self.instance_mut().get_local_table(index)
1357    }
1358}
1359
1360/// Compute the offset for a memory data initializer.
1361fn get_memory_init_start(init: &DataInitializer<'_>, instance: &Instance) -> usize {
1362    let mut start = init.location.offset;
1363
1364    if let Some(base) = init.location.base {
1365        let val = unsafe {
1366            if let Some(def_index) = instance.module.local_global_index(base) {
1367                instance.global(def_index).val.u32
1368            } else {
1369                instance.imported_global(base).definition.as_ref().val.u32
1370            }
1371        };
1372        start += usize::try_from(val).unwrap();
1373    }
1374
1375    start
1376}
1377
1378#[allow(clippy::mut_from_ref)]
1379#[allow(dead_code)]
1380/// Return a byte-slice view of a memory's data.
1381unsafe fn get_memory_slice<'instance>(
1382    init: &DataInitializer<'_>,
1383    instance: &'instance Instance,
1384) -> &'instance mut [u8] {
1385    let memory = if let Some(local_memory_index) = instance
1386        .module
1387        .local_memory_index(init.location.memory_index)
1388    {
1389        instance.memory(local_memory_index)
1390    } else {
1391        let import = instance.imported_memory(init.location.memory_index);
1392        *import.definition.as_ref()
1393    };
1394    slice::from_raw_parts_mut(memory.base, memory.current_length)
1395}
1396
1397/// Compute the offset for a table element initializer.
1398fn get_table_init_start(init: &TableInitializer, instance: &Instance) -> usize {
1399    let mut start = init.offset;
1400
1401    if let Some(base) = init.base {
1402        let val = unsafe {
1403            if let Some(def_index) = instance.module.local_global_index(base) {
1404                instance.global(def_index).val.u32
1405            } else {
1406                instance.imported_global(base).definition.as_ref().val.u32
1407            }
1408        };
1409        start += usize::try_from(val).unwrap();
1410    }
1411
1412    start
1413}
1414
1415/// Initialize the table memory from the provided initializers.
1416fn initialize_tables(instance: &mut Instance) -> Result<(), Trap> {
1417    let module = Arc::clone(&instance.module);
1418    for init in &module.table_initializers {
1419        let start = get_table_init_start(init, instance);
1420        let table = instance.get_table_handle(init.table_index);
1421        let table = unsafe { table.get_mut(&mut *instance.context) };
1422
1423        if start
1424            .checked_add(init.elements.len())
1425            .map_or(true, |end| end > table.size() as usize)
1426        {
1427            return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
1428        }
1429
1430        if let wasmer_types::Type::FuncRef = table.ty().ty {
1431            for (i, func_idx) in init.elements.iter().enumerate() {
1432                let anyfunc = instance.func_ref(*func_idx);
1433                table
1434                    .set(
1435                        u32::try_from(start + i).unwrap(),
1436                        TableElement::FuncRef(anyfunc),
1437                    )
1438                    .unwrap();
1439            }
1440        } else {
1441            for i in 0..init.elements.len() {
1442                table
1443                    .set(
1444                        u32::try_from(start + i).unwrap(),
1445                        TableElement::ExternRef(None),
1446                    )
1447                    .unwrap();
1448            }
1449        }
1450    }
1451
1452    Ok(())
1453}
1454
1455/// Initialize the `Instance::passive_elements` map by resolving the
1456/// `ModuleInfo::passive_elements`'s `FunctionIndex`s into `VMCallerCheckedAnyfunc`s for
1457/// this instance.
1458fn initialize_passive_elements(instance: &Instance) {
1459    let mut passive_elements = instance.passive_elements.borrow_mut();
1460    debug_assert!(
1461        passive_elements.is_empty(),
1462        "should only be called once, at initialization time"
1463    );
1464
1465    passive_elements.extend(
1466        instance
1467            .module
1468            .passive_elements
1469            .iter()
1470            .filter(|(_, segments)| !segments.is_empty())
1471            .map(|(idx, segments)| {
1472                (
1473                    *idx,
1474                    segments.iter().map(|s| instance.func_ref(*s)).collect(),
1475                )
1476            }),
1477    );
1478}
1479
1480/// Initialize the table memory from the provided initializers.
1481fn initialize_memories(
1482    instance: &mut Instance,
1483    data_initializers: &[DataInitializer<'_>],
1484) -> Result<(), Trap> {
1485    for init in data_initializers {
1486        let memory = instance.get_vmmemory(init.location.memory_index);
1487
1488        let start = get_memory_init_start(init, instance);
1489        unsafe {
1490            let current_length = memory.vmmemory().as_ref().current_length;
1491            if start
1492                .checked_add(init.data.len())
1493                .map_or(true, |end| end > current_length)
1494            {
1495                return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
1496            }
1497            memory.initialize_with_data(start, init.data)?;
1498        }
1499    }
1500
1501    Ok(())
1502}
1503
1504fn initialize_globals(instance: &Instance) {
1505    let module = Arc::clone(&instance.module);
1506    for (index, initializer) in module.global_initializers.iter() {
1507        unsafe {
1508            let to = instance.global_ptr(index).as_ptr();
1509            match initializer {
1510                GlobalInit::I32Const(x) => (*to).val.i32 = *x,
1511                GlobalInit::I64Const(x) => (*to).val.i64 = *x,
1512                GlobalInit::F32Const(x) => (*to).val.f32 = *x,
1513                GlobalInit::F64Const(x) => (*to).val.f64 = *x,
1514                GlobalInit::V128Const(x) => (*to).val.bytes = *x.bytes(),
1515                GlobalInit::GetGlobal(x) => {
1516                    let from: VMGlobalDefinition =
1517                        if let Some(def_x) = module.local_global_index(*x) {
1518                            instance.global(def_x)
1519                        } else {
1520                            instance.imported_global(*x).definition.as_ref().clone()
1521                        };
1522                    *to = from;
1523                }
1524                GlobalInit::RefNullConst => (*to).val.funcref = 0,
1525                GlobalInit::RefFunc(func_idx) => {
1526                    let funcref = instance.func_ref(*func_idx).unwrap();
1527                    (*to).val = funcref.into_raw();
1528                }
1529            }
1530        }
1531    }
1532}
1533
1534/// Eagerly builds all the `VMFuncRef`s for imported and local functions so that all
1535/// future funcref operations are just looking up this data.
1536fn build_funcrefs(
1537    module_info: &ModuleInfo,
1538    ctx: &StoreObjects,
1539    imports: &Imports,
1540    finished_functions: &BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1541    vmshared_signatures: &BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
1542    function_call_trampolines: &BoxedSlice<SignatureIndex, VMTrampoline>,
1543    vmctx_ptr: *mut VMContext,
1544) -> (
1545    BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
1546    BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
1547) {
1548    let mut func_refs =
1549        PrimaryMap::with_capacity(module_info.functions.len() - module_info.num_imported_functions);
1550    let mut imported_func_refs = PrimaryMap::with_capacity(module_info.num_imported_functions);
1551
1552    // do imported functions
1553    for import in imports.functions.values() {
1554        imported_func_refs.push(import.handle.get(ctx).anyfunc.as_ptr());
1555    }
1556
1557    // do local functions
1558    for (local_index, func_ptr) in finished_functions.iter() {
1559        let index = module_info.func_index(local_index);
1560        let sig_index = module_info.functions[index];
1561        let type_index = vmshared_signatures[sig_index];
1562        let call_trampoline = function_call_trampolines[sig_index];
1563        let anyfunc = VMCallerCheckedAnyfunc {
1564            func_ptr: func_ptr.0,
1565            type_index,
1566            vmctx: VMFunctionContext { vmctx: vmctx_ptr },
1567            call_trampoline,
1568        };
1569        func_refs.push(anyfunc);
1570    }
1571    (
1572        func_refs.into_boxed_slice(),
1573        imported_func_refs.into_boxed_slice(),
1574    )
1575}