1mod allocator;
10
11use crate::export::VMExtern;
12use crate::imports::Imports;
13use crate::store::{InternalStoreHandle, StoreObjects};
14use crate::table::TableElement;
15use crate::trap::{catch_traps, Trap, TrapCode};
16use crate::vmcontext::{
17 memory32_atomic_check32, memory32_atomic_check64, memory_copy, memory_fill,
18 VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionContext,
19 VMFunctionImport, VMFunctionKind, VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition,
20 VMMemoryImport, VMSharedSignatureIndex, VMTableDefinition, VMTableImport, VMTagImport,
21 VMTrampoline,
22};
23use crate::{FunctionBodyPtr, MaybeInstanceOwned, TrapHandlerFn, VMFunctionBody, VMTag};
24use crate::{LinearMemory, NotifyLocation};
25use crate::{VMConfig, VMFuncRef, VMFunction, VMGlobal, VMMemory, VMTable};
26pub use allocator::InstanceAllocator;
27use memoffset::offset_of;
28use more_asserts::assert_lt;
29use std::alloc::Layout;
30use std::cell::RefCell;
31use std::collections::HashMap;
32use std::convert::TryFrom;
33use std::fmt;
34use std::mem;
35use std::ptr::{self, NonNull};
36use std::slice;
37use std::sync::Arc;
38use wasmer_types::entity::{packed_option::ReservedValue, BoxedSlice, EntityRef, PrimaryMap};
39use wasmer_types::{
40 DataIndex, DataInitializer, ElemIndex, ExportIndex, FunctionIndex, GlobalIndex, GlobalInit,
41 LocalFunctionIndex, LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex, LocalTagIndex,
42 MemoryError, MemoryIndex, ModuleInfo, Pages, SignatureIndex, TableIndex, TableInitializer,
43 TagIndex, VMOffsets,
44};
45
46#[repr(C)]
53#[allow(clippy::type_complexity)]
54pub(crate) struct Instance {
55 module: Arc<ModuleInfo>,
57
58 context: *mut StoreObjects,
60
61 offsets: VMOffsets,
63
64 memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
66
67 tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
69
70 globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
72
73 tags: BoxedSlice<LocalTagIndex, InternalStoreHandle<VMTag>>,
75
76 functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
78
79 function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
81
82 passive_elements: RefCell<HashMap<ElemIndex, Box<[Option<VMFuncRef>]>>>,
85
86 passive_data: RefCell<HashMap<DataIndex, Arc<[u8]>>>,
89
90 funcrefs: BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
93
94 imported_funcrefs: BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
97
98 vmctx: VMContext,
103}
104
105impl fmt::Debug for Instance {
106 fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
107 formatter.debug_struct("Instance").finish()
108 }
109}
110
111#[allow(clippy::cast_ptr_alignment)]
112impl Instance {
113 unsafe fn vmctx_plus_offset<T>(&self, offset: u32) -> *mut T {
116 (self.vmctx_ptr() as *mut u8)
117 .add(usize::try_from(offset).unwrap())
118 .cast()
119 }
120
121 fn module(&self) -> &Arc<ModuleInfo> {
122 &self.module
123 }
124
125 pub(crate) fn module_ref(&self) -> &ModuleInfo {
126 &self.module
127 }
128
129 fn context(&self) -> &StoreObjects {
130 unsafe { &*self.context }
131 }
132
133 fn context_mut(&mut self) -> &mut StoreObjects {
134 unsafe { &mut *self.context }
135 }
136
137 fn offsets(&self) -> &VMOffsets {
139 &self.offsets
140 }
141
142 fn signature_ids_ptr(&self) -> *mut VMSharedSignatureIndex {
144 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_signature_ids_begin()) }
145 }
146
147 fn imported_function(&self, index: FunctionIndex) -> &VMFunctionImport {
149 let index = usize::try_from(index.as_u32()).unwrap();
150 unsafe { &*self.imported_functions_ptr().add(index) }
151 }
152
153 fn imported_functions_ptr(&self) -> *mut VMFunctionImport {
155 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_functions_begin()) }
156 }
157
158 fn imported_table(&self, index: TableIndex) -> &VMTableImport {
160 let index = usize::try_from(index.as_u32()).unwrap();
161 unsafe { &*self.imported_tables_ptr().add(index) }
162 }
163
164 fn imported_tables_ptr(&self) -> *mut VMTableImport {
166 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_tables_begin()) }
167 }
168
169 fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
171 let index = usize::try_from(index.as_u32()).unwrap();
172 unsafe { &*self.imported_memories_ptr().add(index) }
173 }
174
175 fn imported_memories_ptr(&self) -> *mut VMMemoryImport {
177 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_memories_begin()) }
178 }
179
180 fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
182 let index = usize::try_from(index.as_u32()).unwrap();
183 unsafe { &*self.imported_globals_ptr().add(index) }
184 }
185
186 fn imported_globals_ptr(&self) -> *mut VMGlobalImport {
188 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_globals_begin()) }
189 }
190
191 fn imported_tag(&self, index: TagIndex) -> &VMTagImport {
193 let index = usize::try_from(index.as_u32()).unwrap();
194 unsafe { &*self.imported_tags_ptr().add(index) }
195 }
196
197 fn imported_tags_ptr(&self) -> *mut VMTagImport {
199 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_tags_begin()) }
200 }
201
202 #[allow(dead_code)]
204 fn table(&self, index: LocalTableIndex) -> VMTableDefinition {
205 unsafe { *self.table_ptr(index).as_ref() }
206 }
207
208 #[allow(dead_code)]
209 fn set_table(&self, index: LocalTableIndex, table: &VMTableDefinition) {
211 unsafe {
212 *self.table_ptr(index).as_ptr() = *table;
213 }
214 }
215
216 fn table_ptr(&self, index: LocalTableIndex) -> NonNull<VMTableDefinition> {
218 let index = usize::try_from(index.as_u32()).unwrap();
219 NonNull::new(unsafe { self.tables_ptr().add(index) }).unwrap()
220 }
221
222 fn tables_ptr(&self) -> *mut VMTableDefinition {
224 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_tables_begin()) }
225 }
226
227 #[allow(dead_code)]
228 fn get_memory(&self, index: MemoryIndex) -> VMMemoryDefinition {
230 if let Some(local_index) = self.module.local_memory_index(index) {
231 self.memory(local_index)
232 } else {
233 let import = self.imported_memory(index);
234 unsafe { *import.definition.as_ref() }
235 }
236 }
237
238 fn memory(&self, index: LocalMemoryIndex) -> VMMemoryDefinition {
240 unsafe { *self.memory_ptr(index).as_ref() }
241 }
242
243 #[allow(dead_code)]
244 fn set_memory(&self, index: LocalMemoryIndex, mem: &VMMemoryDefinition) {
246 unsafe {
247 *self.memory_ptr(index).as_ptr() = *mem;
248 }
249 }
250
251 fn memory_ptr(&self, index: LocalMemoryIndex) -> NonNull<VMMemoryDefinition> {
253 let index = usize::try_from(index.as_u32()).unwrap();
254 NonNull::new(unsafe { self.memories_ptr().add(index) }).unwrap()
255 }
256
257 fn memories_ptr(&self) -> *mut VMMemoryDefinition {
259 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_memories_begin()) }
260 }
261
262 fn get_vmmemory(&self, index: MemoryIndex) -> &VMMemory {
264 if let Some(local_index) = self.module.local_memory_index(index) {
265 unsafe {
266 self.memories
267 .get(local_index)
268 .unwrap()
269 .get(self.context.as_ref().unwrap())
270 }
271 } else {
272 let import = self.imported_memory(index);
273 unsafe { import.handle.get(self.context.as_ref().unwrap()) }
274 }
275 }
276
277 fn get_vmmemory_mut(&mut self, index: MemoryIndex) -> &mut VMMemory {
279 if let Some(local_index) = self.module.local_memory_index(index) {
280 unsafe {
281 self.memories
282 .get_mut(local_index)
283 .unwrap()
284 .get_mut(self.context.as_mut().unwrap())
285 }
286 } else {
287 let import = self.imported_memory(index);
288 unsafe { import.handle.get_mut(self.context.as_mut().unwrap()) }
289 }
290 }
291
292 fn get_local_vmmemory_mut(&mut self, local_index: LocalMemoryIndex) -> &mut VMMemory {
294 unsafe {
295 self.memories
296 .get_mut(local_index)
297 .unwrap()
298 .get_mut(self.context.as_mut().unwrap())
299 }
300 }
301
302 fn global(&self, index: LocalGlobalIndex) -> VMGlobalDefinition {
304 unsafe { self.global_ptr(index).as_ref().clone() }
305 }
306
307 #[allow(dead_code)]
309 fn set_global(&self, index: LocalGlobalIndex, global: &VMGlobalDefinition) {
310 unsafe {
311 *self.global_ptr(index).as_ptr() = global.clone();
312 }
313 }
314
315 fn global_ptr(&self, index: LocalGlobalIndex) -> NonNull<VMGlobalDefinition> {
317 let index = usize::try_from(index.as_u32()).unwrap();
318 NonNull::new(unsafe { *self.globals_ptr().add(index) }).unwrap()
320 }
321
322 fn globals_ptr(&self) -> *mut *mut VMGlobalDefinition {
324 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_globals_begin()) }
325 }
326
327 fn builtin_functions_ptr(&self) -> *mut VMBuiltinFunctionsArray {
329 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_builtin_functions_begin()) }
330 }
331
332 fn vmctx(&self) -> &VMContext {
334 &self.vmctx
335 }
336
337 fn vmctx_ptr(&self) -> *mut VMContext {
339 self.vmctx() as *const VMContext as *mut VMContext
340 }
341
342 fn invoke_start_function(
344 &self,
345 config: &VMConfig,
346 trap_handler: Option<*const TrapHandlerFn<'static>>,
347 ) -> Result<(), Trap> {
348 let start_index = match self.module.start_function {
349 Some(idx) => idx,
350 None => return Ok(()),
351 };
352
353 let (callee_address, callee_vmctx) = match self.module.local_func_index(start_index) {
354 Some(local_index) => {
355 let body = self
356 .functions
357 .get(local_index)
358 .expect("function index is out of bounds")
359 .0;
360 (
361 body as *const _,
362 VMFunctionContext {
363 vmctx: self.vmctx_ptr(),
364 },
365 )
366 }
367 None => {
368 assert_lt!(start_index.index(), self.module.num_imported_functions);
369 let import = self.imported_function(start_index);
370 (import.body, import.environment)
371 }
372 };
373
374 unsafe {
376 catch_traps(trap_handler, config, move || {
377 mem::transmute::<*const VMFunctionBody, unsafe extern "C" fn(VMFunctionContext)>(
378 callee_address,
379 )(callee_vmctx)
380 })
381 }
382 }
383
384 #[inline]
386 pub(crate) fn vmctx_offset() -> isize {
387 offset_of!(Self, vmctx) as isize
388 }
389
390 pub(crate) fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
392 let begin: *const VMTableDefinition = self.tables_ptr() as *const _;
393 let end: *const VMTableDefinition = table;
394 let index = LocalTableIndex::new(
396 (end as usize - begin as usize) / mem::size_of::<VMTableDefinition>(),
397 );
398 assert_lt!(index.index(), self.tables.len());
399 index
400 }
401
402 pub(crate) fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
404 let begin: *const VMMemoryDefinition = self.memories_ptr() as *const _;
405 let end: *const VMMemoryDefinition = memory;
406 let index = LocalMemoryIndex::new(
408 (end as usize - begin as usize) / mem::size_of::<VMMemoryDefinition>(),
409 );
410 assert_lt!(index.index(), self.memories.len());
411 index
412 }
413
414 pub(crate) fn memory_grow<IntoPages>(
419 &mut self,
420 memory_index: LocalMemoryIndex,
421 delta: IntoPages,
422 ) -> Result<Pages, MemoryError>
423 where
424 IntoPages: Into<Pages>,
425 {
426 let mem = *self
427 .memories
428 .get(memory_index)
429 .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
430 mem.get_mut(self.context_mut()).grow(delta.into())
431 }
432
433 pub(crate) unsafe fn imported_memory_grow<IntoPages>(
442 &mut self,
443 memory_index: MemoryIndex,
444 delta: IntoPages,
445 ) -> Result<Pages, MemoryError>
446 where
447 IntoPages: Into<Pages>,
448 {
449 let import = self.imported_memory(memory_index);
450 let mem = import.handle;
451 mem.get_mut(self.context_mut()).grow(delta.into())
452 }
453
454 pub(crate) fn memory_size(&self, memory_index: LocalMemoryIndex) -> Pages {
456 let mem = *self
457 .memories
458 .get(memory_index)
459 .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
460 mem.get(self.context()).size()
461 }
462
463 pub(crate) unsafe fn imported_memory_size(&self, memory_index: MemoryIndex) -> Pages {
469 let import = self.imported_memory(memory_index);
470 let mem = import.handle;
471 mem.get(self.context()).size()
472 }
473
474 pub(crate) fn table_size(&self, table_index: LocalTableIndex) -> u32 {
476 let table = self
477 .tables
478 .get(table_index)
479 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
480 table.get(self.context()).size()
481 }
482
483 pub(crate) unsafe fn imported_table_size(&self, table_index: TableIndex) -> u32 {
488 let import = self.imported_table(table_index);
489 let table = import.handle;
490 table.get(self.context()).size()
491 }
492
493 pub(crate) fn table_grow(
498 &mut self,
499 table_index: LocalTableIndex,
500 delta: u32,
501 init_value: TableElement,
502 ) -> Option<u32> {
503 let table = *self
504 .tables
505 .get(table_index)
506 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
507 table.get_mut(self.context_mut()).grow(delta, init_value)
508 }
509
510 pub(crate) unsafe fn imported_table_grow(
515 &mut self,
516 table_index: TableIndex,
517 delta: u32,
518 init_value: TableElement,
519 ) -> Option<u32> {
520 let import = self.imported_table(table_index);
521 let table = import.handle;
522 table.get_mut(self.context_mut()).grow(delta, init_value)
523 }
524
525 pub(crate) fn table_get(
527 &self,
528 table_index: LocalTableIndex,
529 index: u32,
530 ) -> Option<TableElement> {
531 let table = self
532 .tables
533 .get(table_index)
534 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
535 table.get(self.context()).get(index)
536 }
537
538 pub(crate) unsafe fn imported_table_get(
543 &self,
544 table_index: TableIndex,
545 index: u32,
546 ) -> Option<TableElement> {
547 let import = self.imported_table(table_index);
548 let table = import.handle;
549 table.get(self.context()).get(index)
550 }
551
552 pub(crate) fn table_set(
554 &mut self,
555 table_index: LocalTableIndex,
556 index: u32,
557 val: TableElement,
558 ) -> Result<(), Trap> {
559 let table = *self
560 .tables
561 .get(table_index)
562 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
563 table.get_mut(self.context_mut()).set(index, val)
564 }
565
566 pub(crate) unsafe fn imported_table_set(
571 &mut self,
572 table_index: TableIndex,
573 index: u32,
574 val: TableElement,
575 ) -> Result<(), Trap> {
576 let import = self.imported_table(table_index);
577 let table = import.handle;
578 table.get_mut(self.context_mut()).set(index, val)
579 }
580
581 pub(crate) fn func_ref(&self, function_index: FunctionIndex) -> Option<VMFuncRef> {
583 if function_index == FunctionIndex::reserved_value() {
584 None
585 } else if let Some(local_function_index) = self.module.local_func_index(function_index) {
586 Some(VMFuncRef(NonNull::from(
587 &self.funcrefs[local_function_index],
588 )))
589 } else {
590 Some(VMFuncRef(self.imported_funcrefs[function_index]))
591 }
592 }
593
594 pub(crate) fn table_init(
602 &mut self,
603 table_index: TableIndex,
604 elem_index: ElemIndex,
605 dst: u32,
606 src: u32,
607 len: u32,
608 ) -> Result<(), Trap> {
609 let table = self.get_table_handle(table_index);
612 let table = unsafe { table.get_mut(&mut *self.context) };
613 let passive_elements = self.passive_elements.borrow();
614 let elem = passive_elements
615 .get(&elem_index)
616 .map_or::<&[Option<VMFuncRef>], _>(&[], |e| &**e);
617
618 if src
619 .checked_add(len)
620 .map_or(true, |n| n as usize > elem.len())
621 || dst.checked_add(len).map_or(true, |m| m > table.size())
622 {
623 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
624 }
625
626 for (dst, src) in (dst..dst + len).zip(src..src + len) {
627 table
628 .set(dst, TableElement::FuncRef(elem[src as usize]))
629 .expect("should never panic because we already did the bounds check above");
630 }
631
632 Ok(())
633 }
634
635 pub(crate) fn table_fill(
641 &mut self,
642 table_index: TableIndex,
643 start_index: u32,
644 item: TableElement,
645 len: u32,
646 ) -> Result<(), Trap> {
647 let table = self.get_table(table_index);
650 let table_size = table.size() as usize;
651
652 if start_index
653 .checked_add(len)
654 .map_or(true, |n| n as usize > table_size)
655 {
656 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
657 }
658
659 for i in start_index..(start_index + len) {
660 table
661 .set(i, item.clone())
662 .expect("should never panic because we already did the bounds check above");
663 }
664
665 Ok(())
666 }
667
668 pub(crate) fn elem_drop(&self, elem_index: ElemIndex) {
670 let mut passive_elements = self.passive_elements.borrow_mut();
673 passive_elements.remove(&elem_index);
674 }
677
678 pub(crate) fn local_memory_copy(
685 &self,
686 memory_index: LocalMemoryIndex,
687 dst: u32,
688 src: u32,
689 len: u32,
690 ) -> Result<(), Trap> {
691 let memory = self.memory(memory_index);
694 unsafe { memory_copy(&memory, dst, src, len) }
696 }
697
698 pub(crate) fn imported_memory_copy(
700 &self,
701 memory_index: MemoryIndex,
702 dst: u32,
703 src: u32,
704 len: u32,
705 ) -> Result<(), Trap> {
706 let import = self.imported_memory(memory_index);
707 let memory = unsafe { import.definition.as_ref() };
708 unsafe { memory_copy(memory, dst, src, len) }
710 }
711
712 pub(crate) fn local_memory_fill(
718 &self,
719 memory_index: LocalMemoryIndex,
720 dst: u32,
721 val: u32,
722 len: u32,
723 ) -> Result<(), Trap> {
724 let memory = self.memory(memory_index);
725 unsafe { memory_fill(&memory, dst, val, len) }
727 }
728
729 pub(crate) fn imported_memory_fill(
735 &self,
736 memory_index: MemoryIndex,
737 dst: u32,
738 val: u32,
739 len: u32,
740 ) -> Result<(), Trap> {
741 let import = self.imported_memory(memory_index);
742 let memory = unsafe { import.definition.as_ref() };
743 unsafe { memory_fill(memory, dst, val, len) }
745 }
746
747 pub(crate) fn memory_init(
755 &self,
756 memory_index: MemoryIndex,
757 data_index: DataIndex,
758 dst: u32,
759 src: u32,
760 len: u32,
761 ) -> Result<(), Trap> {
762 let memory = self.get_vmmemory(memory_index);
765 let passive_data = self.passive_data.borrow();
766 let data = passive_data.get(&data_index).map_or(&[][..], |d| &**d);
767
768 let current_length = unsafe { memory.vmmemory().as_ref().current_length };
769 if src
770 .checked_add(len)
771 .map_or(true, |n| n as usize > data.len())
772 || dst
773 .checked_add(len)
774 .map_or(true, |m| usize::try_from(m).unwrap() > current_length)
775 {
776 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
777 }
778 let src_slice = &data[src as usize..(src + len) as usize];
779 unsafe { memory.initialize_with_data(dst as usize, src_slice) }
780 }
781
782 pub(crate) fn data_drop(&self, data_index: DataIndex) {
784 let mut passive_data = self.passive_data.borrow_mut();
785 passive_data.remove(&data_index);
786 }
787
788 pub(crate) fn get_table(&mut self, table_index: TableIndex) -> &mut VMTable {
791 if let Some(local_table_index) = self.module.local_table_index(table_index) {
792 self.get_local_table(local_table_index)
793 } else {
794 self.get_foreign_table(table_index)
795 }
796 }
797
798 pub(crate) fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
800 let table = self.tables[index];
801 table.get_mut(self.context_mut())
802 }
803
804 pub(crate) fn get_foreign_table(&mut self, index: TableIndex) -> &mut VMTable {
806 let import = self.imported_table(index);
807 let table = import.handle;
808 table.get_mut(self.context_mut())
809 }
810
811 pub(crate) fn get_table_handle(
814 &mut self,
815 table_index: TableIndex,
816 ) -> InternalStoreHandle<VMTable> {
817 if let Some(local_table_index) = self.module.local_table_index(table_index) {
818 self.tables[local_table_index]
819 } else {
820 self.imported_table(table_index).handle
821 }
822 }
823
824 fn memory_wait(memory: &mut VMMemory, dst: u32, timeout: i64) -> Result<u32, Trap> {
825 let location = NotifyLocation { address: dst };
826 let timeout = if timeout < 0 {
827 None
828 } else {
829 Some(std::time::Duration::from_nanos(timeout as u64))
830 };
831 match memory.do_wait(location, timeout) {
832 Ok(count) => Ok(count),
833 Err(_err) => {
834 Err(Trap::lib(TrapCode::TableAccessOutOfBounds))
836 }
837 }
838 }
839
840 pub(crate) fn local_memory_wait32(
842 &mut self,
843 memory_index: LocalMemoryIndex,
844 dst: u32,
845 val: u32,
846 timeout: i64,
847 ) -> Result<u32, Trap> {
848 let memory = self.memory(memory_index);
849 let ret = unsafe { memory32_atomic_check32(&memory, dst, val) };
854
855 if let Ok(mut ret) = ret {
856 if ret == 0 {
857 let memory = self.get_local_vmmemory_mut(memory_index);
858 ret = Self::memory_wait(memory, dst, timeout)?;
859 }
860 Ok(ret)
861 } else {
862 ret
863 }
864 }
865
866 pub(crate) fn imported_memory_wait32(
868 &mut self,
869 memory_index: MemoryIndex,
870 dst: u32,
871 val: u32,
872 timeout: i64,
873 ) -> Result<u32, Trap> {
874 let import = self.imported_memory(memory_index);
875 let memory = unsafe { import.definition.as_ref() };
876 let ret = unsafe { memory32_atomic_check32(memory, dst, val) };
881 if let Ok(mut ret) = ret {
882 if ret == 0 {
883 let memory = self.get_vmmemory_mut(memory_index);
884 ret = Self::memory_wait(memory, dst, timeout)?;
885 }
886 Ok(ret)
887 } else {
888 ret
889 }
890 }
891
892 pub(crate) fn local_memory_wait64(
894 &mut self,
895 memory_index: LocalMemoryIndex,
896 dst: u32,
897 val: u64,
898 timeout: i64,
899 ) -> Result<u32, Trap> {
900 let memory = self.memory(memory_index);
901 let ret = unsafe { memory32_atomic_check64(&memory, dst, val) };
906
907 if let Ok(mut ret) = ret {
908 if ret == 0 {
909 let memory = self.get_local_vmmemory_mut(memory_index);
910 ret = Self::memory_wait(memory, dst, timeout)?;
911 }
912 Ok(ret)
913 } else {
914 ret
915 }
916 }
917
918 pub(crate) fn imported_memory_wait64(
920 &mut self,
921 memory_index: MemoryIndex,
922 dst: u32,
923 val: u64,
924 timeout: i64,
925 ) -> Result<u32, Trap> {
926 let import = self.imported_memory(memory_index);
927 let memory = unsafe { import.definition.as_ref() };
928 let ret = unsafe { memory32_atomic_check64(memory, dst, val) };
933
934 if let Ok(mut ret) = ret {
935 if ret == 0 {
936 let memory = self.get_vmmemory_mut(memory_index);
937 ret = Self::memory_wait(memory, dst, timeout)?;
938 }
939 Ok(ret)
940 } else {
941 ret
942 }
943 }
944
945 pub(crate) fn local_memory_notify(
947 &mut self,
948 memory_index: LocalMemoryIndex,
949 dst: u32,
950 count: u32,
951 ) -> Result<u32, Trap> {
952 let memory = self.get_local_vmmemory_mut(memory_index);
953 let location = NotifyLocation { address: dst };
955 Ok(memory.do_notify(location, count))
956 }
957
958 pub(crate) fn imported_memory_notify(
960 &mut self,
961 memory_index: MemoryIndex,
962 dst: u32,
963 count: u32,
964 ) -> Result<u32, Trap> {
965 let memory = self.get_vmmemory_mut(memory_index);
966 let location = NotifyLocation { address: dst };
968 Ok(memory.do_notify(location, count))
969 }
970}
971
972#[derive(Debug, Eq, PartialEq)]
977pub struct VMInstance {
978 instance_layout: Layout,
980
981 instance: NonNull<Instance>,
991}
992
993impl Drop for VMInstance {
997 fn drop(&mut self) {
998 let instance_ptr = self.instance.as_ptr();
999
1000 unsafe {
1001 instance_ptr.drop_in_place();
1003 std::alloc::dealloc(instance_ptr as *mut u8, self.instance_layout);
1005 }
1006 }
1007}
1008
1009impl VMInstance {
1010 #[allow(clippy::too_many_arguments)]
1032 pub unsafe fn new(
1033 allocator: InstanceAllocator,
1034 module: Arc<ModuleInfo>,
1035 context: &mut StoreObjects,
1036 finished_functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1037 finished_function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
1038 finished_memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
1039 finished_tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
1040 finished_tags: BoxedSlice<LocalTagIndex, InternalStoreHandle<VMTag>>,
1041 finished_globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
1042 imports: Imports,
1043 vmshared_signatures: BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
1044 ) -> Result<Self, Trap> {
1045 let vmctx_globals = finished_globals
1046 .values()
1047 .map(|m| m.get(context).vmglobal())
1048 .collect::<PrimaryMap<LocalGlobalIndex, _>>()
1049 .into_boxed_slice();
1050 let passive_data = RefCell::new(
1051 module
1052 .passive_data
1053 .clone()
1054 .into_iter()
1055 .map(|(idx, bytes)| (idx, Arc::from(bytes)))
1056 .collect::<HashMap<_, _>>(),
1057 );
1058
1059 let handle = {
1060 let offsets = allocator.offsets().clone();
1061 let funcrefs = PrimaryMap::new().into_boxed_slice();
1063 let imported_funcrefs = PrimaryMap::new().into_boxed_slice();
1064 let instance = Instance {
1066 module,
1067 context,
1068 offsets,
1069 memories: finished_memories,
1070 tables: finished_tables,
1071 tags: finished_tags,
1072 globals: finished_globals,
1073 functions: finished_functions,
1074 function_call_trampolines: finished_function_call_trampolines,
1075 passive_elements: Default::default(),
1076 passive_data,
1077 funcrefs,
1078 imported_funcrefs,
1079 vmctx: VMContext {},
1080 };
1081
1082 let mut instance_handle = allocator.into_vminstance(instance);
1083
1084 {
1086 let instance = instance_handle.instance_mut();
1087 let vmctx_ptr = instance.vmctx_ptr();
1088 (instance.funcrefs, instance.imported_funcrefs) = build_funcrefs(
1089 &instance.module,
1090 context,
1091 &imports,
1092 &instance.functions,
1093 &vmshared_signatures,
1094 &instance.function_call_trampolines,
1095 vmctx_ptr,
1096 );
1097 }
1098
1099 instance_handle
1100 };
1101 let instance = handle.instance();
1102
1103 ptr::copy(
1104 vmshared_signatures.values().as_slice().as_ptr(),
1105 instance.signature_ids_ptr(),
1106 vmshared_signatures.len(),
1107 );
1108 ptr::copy(
1109 imports.functions.values().as_slice().as_ptr(),
1110 instance.imported_functions_ptr(),
1111 imports.functions.len(),
1112 );
1113 ptr::copy(
1114 imports.tables.values().as_slice().as_ptr(),
1115 instance.imported_tables_ptr(),
1116 imports.tables.len(),
1117 );
1118 ptr::copy(
1119 imports.memories.values().as_slice().as_ptr(),
1120 instance.imported_memories_ptr(),
1121 imports.memories.len(),
1122 );
1123 ptr::copy(
1124 imports.globals.values().as_slice().as_ptr(),
1125 instance.imported_globals_ptr(),
1126 imports.globals.len(),
1127 );
1128 ptr::copy(
1132 vmctx_globals.values().as_slice().as_ptr(),
1133 instance.globals_ptr() as *mut NonNull<VMGlobalDefinition>,
1134 vmctx_globals.len(),
1135 );
1136 ptr::write(
1137 instance.builtin_functions_ptr(),
1138 VMBuiltinFunctionsArray::initialized(),
1139 );
1140
1141 initialize_passive_elements(instance);
1144 initialize_globals(instance);
1145
1146 Ok(handle)
1147 }
1148
1149 pub(crate) fn instance(&self) -> &Instance {
1151 unsafe { self.instance.as_ref() }
1152 }
1153
1154 pub(crate) fn instance_mut(&mut self) -> &mut Instance {
1156 unsafe { self.instance.as_mut() }
1157 }
1158
1159 pub unsafe fn finish_instantiation(
1165 &mut self,
1166 config: &VMConfig,
1167 trap_handler: Option<*const TrapHandlerFn<'static>>,
1168 data_initializers: &[DataInitializer<'_>],
1169 ) -> Result<(), Trap> {
1170 let instance = self.instance_mut();
1171
1172 initialize_tables(instance)?;
1174 initialize_memories(instance, data_initializers)?;
1175
1176 instance.invoke_start_function(config, trap_handler)?;
1179 Ok(())
1180 }
1181
1182 pub fn vmctx(&self) -> &VMContext {
1184 self.instance().vmctx()
1185 }
1186
1187 pub fn vmctx_ptr(&self) -> *mut VMContext {
1189 self.instance().vmctx_ptr()
1190 }
1191
1192 pub fn vmoffsets(&self) -> &VMOffsets {
1196 self.instance().offsets()
1197 }
1198
1199 pub fn module(&self) -> &Arc<ModuleInfo> {
1201 self.instance().module()
1202 }
1203
1204 pub fn module_ref(&self) -> &ModuleInfo {
1206 self.instance().module_ref()
1207 }
1208
1209 pub fn lookup(&mut self, field: &str) -> Option<VMExtern> {
1211 let export = *self.module_ref().exports.get(field)?;
1212
1213 Some(self.lookup_by_declaration(export))
1214 }
1215
1216 pub fn lookup_by_declaration(&mut self, export: ExportIndex) -> VMExtern {
1218 let instance = self.instance();
1219
1220 match export {
1221 ExportIndex::Function(index) => {
1222 let sig_index = &instance.module.functions[index];
1223 let handle = if let Some(def_index) = instance.module.local_func_index(index) {
1224 let signature = instance.module.signatures[*sig_index].clone();
1227 let vm_function = VMFunction {
1228 anyfunc: MaybeInstanceOwned::Instance(NonNull::from(
1229 &instance.funcrefs[def_index],
1230 )),
1231 signature,
1232 kind: VMFunctionKind::Static,
1237 host_data: Box::new(()),
1238 };
1239 InternalStoreHandle::new(self.instance_mut().context_mut(), vm_function)
1240 } else {
1241 let import = instance.imported_function(index);
1242 import.handle
1243 };
1244
1245 VMExtern::Function(handle)
1246 }
1247 ExportIndex::Table(index) => {
1248 let handle = if let Some(def_index) = instance.module.local_table_index(index) {
1249 instance.tables[def_index]
1250 } else {
1251 let import = instance.imported_table(index);
1252 import.handle
1253 };
1254 VMExtern::Table(handle)
1255 }
1256 ExportIndex::Memory(index) => {
1257 let handle = if let Some(def_index) = instance.module.local_memory_index(index) {
1258 instance.memories[def_index]
1259 } else {
1260 let import = instance.imported_memory(index);
1261 import.handle
1262 };
1263 VMExtern::Memory(handle)
1264 }
1265 ExportIndex::Global(index) => {
1266 let handle = if let Some(def_index) = instance.module.local_global_index(index) {
1267 instance.globals[def_index]
1268 } else {
1269 let import = instance.imported_global(index);
1270 import.handle
1271 };
1272 VMExtern::Global(handle)
1273 }
1274
1275 ExportIndex::Tag(index) => {
1276 let handle = if let Some(def_index) = instance.module.local_tag_index(index) {
1277 instance.tags[def_index]
1278 } else {
1279 let import = instance.imported_tag(index);
1280 import.handle
1281 };
1282 VMExtern::Tag(handle)
1283 }
1284 }
1285 }
1286
1287 pub fn exports(&self) -> indexmap::map::Iter<String, ExportIndex> {
1293 self.module().exports.iter()
1294 }
1295
1296 pub fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
1298 self.instance().memory_index(memory)
1299 }
1300
1301 pub fn memory_grow<IntoPages>(
1306 &mut self,
1307 memory_index: LocalMemoryIndex,
1308 delta: IntoPages,
1309 ) -> Result<Pages, MemoryError>
1310 where
1311 IntoPages: Into<Pages>,
1312 {
1313 self.instance_mut().memory_grow(memory_index, delta)
1314 }
1315
1316 pub fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
1318 self.instance().table_index(table)
1319 }
1320
1321 pub fn table_grow(
1326 &mut self,
1327 table_index: LocalTableIndex,
1328 delta: u32,
1329 init_value: TableElement,
1330 ) -> Option<u32> {
1331 self.instance_mut()
1332 .table_grow(table_index, delta, init_value)
1333 }
1334
1335 pub fn table_get(&self, table_index: LocalTableIndex, index: u32) -> Option<TableElement> {
1339 self.instance().table_get(table_index, index)
1340 }
1341
1342 pub fn table_set(
1346 &mut self,
1347 table_index: LocalTableIndex,
1348 index: u32,
1349 val: TableElement,
1350 ) -> Result<(), Trap> {
1351 self.instance_mut().table_set(table_index, index, val)
1352 }
1353
1354 pub fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
1356 self.instance_mut().get_local_table(index)
1357 }
1358}
1359
1360fn get_memory_init_start(init: &DataInitializer<'_>, instance: &Instance) -> usize {
1362 let mut start = init.location.offset;
1363
1364 if let Some(base) = init.location.base {
1365 let val = unsafe {
1366 if let Some(def_index) = instance.module.local_global_index(base) {
1367 instance.global(def_index).val.u32
1368 } else {
1369 instance.imported_global(base).definition.as_ref().val.u32
1370 }
1371 };
1372 start += usize::try_from(val).unwrap();
1373 }
1374
1375 start
1376}
1377
1378#[allow(clippy::mut_from_ref)]
1379#[allow(dead_code)]
1380unsafe fn get_memory_slice<'instance>(
1382 init: &DataInitializer<'_>,
1383 instance: &'instance Instance,
1384) -> &'instance mut [u8] {
1385 let memory = if let Some(local_memory_index) = instance
1386 .module
1387 .local_memory_index(init.location.memory_index)
1388 {
1389 instance.memory(local_memory_index)
1390 } else {
1391 let import = instance.imported_memory(init.location.memory_index);
1392 *import.definition.as_ref()
1393 };
1394 slice::from_raw_parts_mut(memory.base, memory.current_length)
1395}
1396
1397fn get_table_init_start(init: &TableInitializer, instance: &Instance) -> usize {
1399 let mut start = init.offset;
1400
1401 if let Some(base) = init.base {
1402 let val = unsafe {
1403 if let Some(def_index) = instance.module.local_global_index(base) {
1404 instance.global(def_index).val.u32
1405 } else {
1406 instance.imported_global(base).definition.as_ref().val.u32
1407 }
1408 };
1409 start += usize::try_from(val).unwrap();
1410 }
1411
1412 start
1413}
1414
1415fn initialize_tables(instance: &mut Instance) -> Result<(), Trap> {
1417 let module = Arc::clone(&instance.module);
1418 for init in &module.table_initializers {
1419 let start = get_table_init_start(init, instance);
1420 let table = instance.get_table_handle(init.table_index);
1421 let table = unsafe { table.get_mut(&mut *instance.context) };
1422
1423 if start
1424 .checked_add(init.elements.len())
1425 .map_or(true, |end| end > table.size() as usize)
1426 {
1427 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
1428 }
1429
1430 if let wasmer_types::Type::FuncRef = table.ty().ty {
1431 for (i, func_idx) in init.elements.iter().enumerate() {
1432 let anyfunc = instance.func_ref(*func_idx);
1433 table
1434 .set(
1435 u32::try_from(start + i).unwrap(),
1436 TableElement::FuncRef(anyfunc),
1437 )
1438 .unwrap();
1439 }
1440 } else {
1441 for i in 0..init.elements.len() {
1442 table
1443 .set(
1444 u32::try_from(start + i).unwrap(),
1445 TableElement::ExternRef(None),
1446 )
1447 .unwrap();
1448 }
1449 }
1450 }
1451
1452 Ok(())
1453}
1454
1455fn initialize_passive_elements(instance: &Instance) {
1459 let mut passive_elements = instance.passive_elements.borrow_mut();
1460 debug_assert!(
1461 passive_elements.is_empty(),
1462 "should only be called once, at initialization time"
1463 );
1464
1465 passive_elements.extend(
1466 instance
1467 .module
1468 .passive_elements
1469 .iter()
1470 .filter(|(_, segments)| !segments.is_empty())
1471 .map(|(idx, segments)| {
1472 (
1473 *idx,
1474 segments.iter().map(|s| instance.func_ref(*s)).collect(),
1475 )
1476 }),
1477 );
1478}
1479
1480fn initialize_memories(
1482 instance: &mut Instance,
1483 data_initializers: &[DataInitializer<'_>],
1484) -> Result<(), Trap> {
1485 for init in data_initializers {
1486 let memory = instance.get_vmmemory(init.location.memory_index);
1487
1488 let start = get_memory_init_start(init, instance);
1489 unsafe {
1490 let current_length = memory.vmmemory().as_ref().current_length;
1491 if start
1492 .checked_add(init.data.len())
1493 .map_or(true, |end| end > current_length)
1494 {
1495 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
1496 }
1497 memory.initialize_with_data(start, init.data)?;
1498 }
1499 }
1500
1501 Ok(())
1502}
1503
1504fn initialize_globals(instance: &Instance) {
1505 let module = Arc::clone(&instance.module);
1506 for (index, initializer) in module.global_initializers.iter() {
1507 unsafe {
1508 let to = instance.global_ptr(index).as_ptr();
1509 match initializer {
1510 GlobalInit::I32Const(x) => (*to).val.i32 = *x,
1511 GlobalInit::I64Const(x) => (*to).val.i64 = *x,
1512 GlobalInit::F32Const(x) => (*to).val.f32 = *x,
1513 GlobalInit::F64Const(x) => (*to).val.f64 = *x,
1514 GlobalInit::V128Const(x) => (*to).val.bytes = *x.bytes(),
1515 GlobalInit::GetGlobal(x) => {
1516 let from: VMGlobalDefinition =
1517 if let Some(def_x) = module.local_global_index(*x) {
1518 instance.global(def_x)
1519 } else {
1520 instance.imported_global(*x).definition.as_ref().clone()
1521 };
1522 *to = from;
1523 }
1524 GlobalInit::RefNullConst => (*to).val.funcref = 0,
1525 GlobalInit::RefFunc(func_idx) => {
1526 let funcref = instance.func_ref(*func_idx).unwrap();
1527 (*to).val = funcref.into_raw();
1528 }
1529 }
1530 }
1531 }
1532}
1533
1534fn build_funcrefs(
1537 module_info: &ModuleInfo,
1538 ctx: &StoreObjects,
1539 imports: &Imports,
1540 finished_functions: &BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1541 vmshared_signatures: &BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
1542 function_call_trampolines: &BoxedSlice<SignatureIndex, VMTrampoline>,
1543 vmctx_ptr: *mut VMContext,
1544) -> (
1545 BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
1546 BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
1547) {
1548 let mut func_refs =
1549 PrimaryMap::with_capacity(module_info.functions.len() - module_info.num_imported_functions);
1550 let mut imported_func_refs = PrimaryMap::with_capacity(module_info.num_imported_functions);
1551
1552 for import in imports.functions.values() {
1554 imported_func_refs.push(import.handle.get(ctx).anyfunc.as_ptr());
1555 }
1556
1557 for (local_index, func_ptr) in finished_functions.iter() {
1559 let index = module_info.func_index(local_index);
1560 let sig_index = module_info.functions[index];
1561 let type_index = vmshared_signatures[sig_index];
1562 let call_trampoline = function_call_trampolines[sig_index];
1563 let anyfunc = VMCallerCheckedAnyfunc {
1564 func_ptr: func_ptr.0,
1565 type_index,
1566 vmctx: VMFunctionContext { vmctx: vmctx_ptr },
1567 call_trampoline,
1568 };
1569 func_refs.push(anyfunc);
1570 }
1571 (
1572 func_refs.into_boxed_slice(),
1573 imported_func_refs.into_boxed_slice(),
1574 )
1575}