1mod allocator;
10
11use crate::export::VMExtern;
12use crate::imports::Imports;
13use crate::store::{InternalStoreHandle, StoreObjects};
14use crate::table::TableElement;
15use crate::trap::{catch_traps, Trap, TrapCode};
16use crate::vmcontext::{
17 memory32_atomic_check32, memory32_atomic_check64, memory_copy, memory_fill,
18 VMBuiltinFunctionsArray, VMCallerCheckedAnyfunc, VMContext, VMFunctionContext,
19 VMFunctionImport, VMFunctionKind, VMGlobalDefinition, VMGlobalImport, VMMemoryDefinition,
20 VMMemoryImport, VMSharedSignatureIndex, VMTableDefinition, VMTableImport, VMTrampoline,
21};
22use crate::{FunctionBodyPtr, MaybeInstanceOwned, TrapHandlerFn, VMFunctionBody};
23use crate::{LinearMemory, NotifyLocation};
24use crate::{VMConfig, VMFuncRef, VMFunction, VMGlobal, VMMemory, VMTable};
25pub use allocator::InstanceAllocator;
26use memoffset::offset_of;
27use more_asserts::assert_lt;
28use std::alloc::Layout;
29use std::cell::RefCell;
30use std::collections::HashMap;
31use std::convert::TryFrom;
32use std::fmt;
33use std::mem;
34use std::ptr::{self, NonNull};
35use std::slice;
36use std::sync::Arc;
37use wasmer_types::entity::{packed_option::ReservedValue, BoxedSlice, EntityRef, PrimaryMap};
38use wasmer_types::{
39 DataIndex, DataInitializer, ElemIndex, ExportIndex, FunctionIndex, GlobalIndex, GlobalInit,
40 LocalFunctionIndex, LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex, MemoryError,
41 MemoryIndex, ModuleInfo, Pages, SignatureIndex, TableIndex, TableInitializer, VMOffsets,
42};
43
44#[repr(C)]
51#[allow(clippy::type_complexity)]
52pub(crate) struct Instance {
53 module: Arc<ModuleInfo>,
55
56 context: *mut StoreObjects,
58
59 offsets: VMOffsets,
61
62 memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
64
65 tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
67
68 globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
70
71 functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
73
74 function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
76
77 passive_elements: RefCell<HashMap<ElemIndex, Box<[Option<VMFuncRef>]>>>,
80
81 passive_data: RefCell<HashMap<DataIndex, Arc<[u8]>>>,
84
85 funcrefs: BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
88
89 imported_funcrefs: BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
92
93 vmctx: VMContext,
98}
99
100impl fmt::Debug for Instance {
101 fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
102 formatter.debug_struct("Instance").finish()
103 }
104}
105
106#[allow(clippy::cast_ptr_alignment)]
107impl Instance {
108 unsafe fn vmctx_plus_offset<T>(&self, offset: u32) -> *mut T {
111 (self.vmctx_ptr() as *mut u8)
112 .add(usize::try_from(offset).unwrap())
113 .cast()
114 }
115
116 fn module(&self) -> &Arc<ModuleInfo> {
117 &self.module
118 }
119
120 pub(crate) fn module_ref(&self) -> &ModuleInfo {
121 &self.module
122 }
123
124 fn context(&self) -> &StoreObjects {
125 unsafe { &*self.context }
126 }
127
128 fn context_mut(&mut self) -> &mut StoreObjects {
129 unsafe { &mut *self.context }
130 }
131
132 fn offsets(&self) -> &VMOffsets {
134 &self.offsets
135 }
136
137 fn signature_ids_ptr(&self) -> *mut VMSharedSignatureIndex {
139 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_signature_ids_begin()) }
140 }
141
142 fn imported_function(&self, index: FunctionIndex) -> &VMFunctionImport {
144 let index = usize::try_from(index.as_u32()).unwrap();
145 unsafe { &*self.imported_functions_ptr().add(index) }
146 }
147
148 fn imported_functions_ptr(&self) -> *mut VMFunctionImport {
150 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_functions_begin()) }
151 }
152
153 fn imported_table(&self, index: TableIndex) -> &VMTableImport {
155 let index = usize::try_from(index.as_u32()).unwrap();
156 unsafe { &*self.imported_tables_ptr().add(index) }
157 }
158
159 fn imported_tables_ptr(&self) -> *mut VMTableImport {
161 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_tables_begin()) }
162 }
163
164 fn imported_memory(&self, index: MemoryIndex) -> &VMMemoryImport {
166 let index = usize::try_from(index.as_u32()).unwrap();
167 unsafe { &*self.imported_memories_ptr().add(index) }
168 }
169
170 fn imported_memories_ptr(&self) -> *mut VMMemoryImport {
172 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_memories_begin()) }
173 }
174
175 fn imported_global(&self, index: GlobalIndex) -> &VMGlobalImport {
177 let index = usize::try_from(index.as_u32()).unwrap();
178 unsafe { &*self.imported_globals_ptr().add(index) }
179 }
180
181 fn imported_globals_ptr(&self) -> *mut VMGlobalImport {
183 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_imported_globals_begin()) }
184 }
185
186 #[allow(dead_code)]
188 fn table(&self, index: LocalTableIndex) -> VMTableDefinition {
189 unsafe { *self.table_ptr(index).as_ref() }
190 }
191
192 #[allow(dead_code)]
193 fn set_table(&self, index: LocalTableIndex, table: &VMTableDefinition) {
195 unsafe {
196 *self.table_ptr(index).as_ptr() = *table;
197 }
198 }
199
200 fn table_ptr(&self, index: LocalTableIndex) -> NonNull<VMTableDefinition> {
202 let index = usize::try_from(index.as_u32()).unwrap();
203 NonNull::new(unsafe { self.tables_ptr().add(index) }).unwrap()
204 }
205
206 fn tables_ptr(&self) -> *mut VMTableDefinition {
208 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_tables_begin()) }
209 }
210
211 #[allow(dead_code)]
212 fn get_memory(&self, index: MemoryIndex) -> VMMemoryDefinition {
214 if let Some(local_index) = self.module.local_memory_index(index) {
215 self.memory(local_index)
216 } else {
217 let import = self.imported_memory(index);
218 unsafe { *import.definition.as_ref() }
219 }
220 }
221
222 fn memory(&self, index: LocalMemoryIndex) -> VMMemoryDefinition {
224 unsafe { *self.memory_ptr(index).as_ref() }
225 }
226
227 #[allow(dead_code)]
228 fn set_memory(&self, index: LocalMemoryIndex, mem: &VMMemoryDefinition) {
230 unsafe {
231 *self.memory_ptr(index).as_ptr() = *mem;
232 }
233 }
234
235 fn memory_ptr(&self, index: LocalMemoryIndex) -> NonNull<VMMemoryDefinition> {
237 let index = usize::try_from(index.as_u32()).unwrap();
238 NonNull::new(unsafe { self.memories_ptr().add(index) }).unwrap()
239 }
240
241 fn memories_ptr(&self) -> *mut VMMemoryDefinition {
243 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_memories_begin()) }
244 }
245
246 fn get_vmmemory(&self, index: MemoryIndex) -> &VMMemory {
248 if let Some(local_index) = self.module.local_memory_index(index) {
249 unsafe {
250 self.memories
251 .get(local_index)
252 .unwrap()
253 .get(self.context.as_ref().unwrap())
254 }
255 } else {
256 let import = self.imported_memory(index);
257 unsafe { import.handle.get(self.context.as_ref().unwrap()) }
258 }
259 }
260
261 fn get_vmmemory_mut(&mut self, index: MemoryIndex) -> &mut VMMemory {
263 if let Some(local_index) = self.module.local_memory_index(index) {
264 unsafe {
265 self.memories
266 .get_mut(local_index)
267 .unwrap()
268 .get_mut(self.context.as_mut().unwrap())
269 }
270 } else {
271 let import = self.imported_memory(index);
272 unsafe { import.handle.get_mut(self.context.as_mut().unwrap()) }
273 }
274 }
275
276 fn get_local_vmmemory_mut(&mut self, local_index: LocalMemoryIndex) -> &mut VMMemory {
278 unsafe {
279 self.memories
280 .get_mut(local_index)
281 .unwrap()
282 .get_mut(self.context.as_mut().unwrap())
283 }
284 }
285
286 fn global(&self, index: LocalGlobalIndex) -> VMGlobalDefinition {
288 unsafe { self.global_ptr(index).as_ref().clone() }
289 }
290
291 #[allow(dead_code)]
293 fn set_global(&self, index: LocalGlobalIndex, global: &VMGlobalDefinition) {
294 unsafe {
295 *self.global_ptr(index).as_ptr() = global.clone();
296 }
297 }
298
299 fn global_ptr(&self, index: LocalGlobalIndex) -> NonNull<VMGlobalDefinition> {
301 let index = usize::try_from(index.as_u32()).unwrap();
302 NonNull::new(unsafe { *self.globals_ptr().add(index) }).unwrap()
304 }
305
306 fn globals_ptr(&self) -> *mut *mut VMGlobalDefinition {
308 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_globals_begin()) }
309 }
310
311 fn builtin_functions_ptr(&self) -> *mut VMBuiltinFunctionsArray {
313 unsafe { self.vmctx_plus_offset(self.offsets.vmctx_builtin_functions_begin()) }
314 }
315
316 fn vmctx(&self) -> &VMContext {
318 &self.vmctx
319 }
320
321 fn vmctx_ptr(&self) -> *mut VMContext {
323 self.vmctx() as *const VMContext as *mut VMContext
324 }
325
326 fn invoke_start_function(
328 &self,
329 config: &VMConfig,
330 trap_handler: Option<*const TrapHandlerFn<'static>>,
331 ) -> Result<(), Trap> {
332 let start_index = match self.module.start_function {
333 Some(idx) => idx,
334 None => return Ok(()),
335 };
336
337 let (callee_address, callee_vmctx) = match self.module.local_func_index(start_index) {
338 Some(local_index) => {
339 let body = self
340 .functions
341 .get(local_index)
342 .expect("function index is out of bounds")
343 .0;
344 (
345 body as *const _,
346 VMFunctionContext {
347 vmctx: self.vmctx_ptr(),
348 },
349 )
350 }
351 None => {
352 assert_lt!(start_index.index(), self.module.num_imported_functions);
353 let import = self.imported_function(start_index);
354 (import.body, import.environment)
355 }
356 };
357
358 unsafe {
360 catch_traps(trap_handler, config, move || {
361 mem::transmute::<*const VMFunctionBody, unsafe extern "C" fn(VMFunctionContext)>(
362 callee_address,
363 )(callee_vmctx)
364 })
365 }
366 }
367
368 #[inline]
370 pub(crate) fn vmctx_offset() -> isize {
371 offset_of!(Self, vmctx) as isize
372 }
373
374 pub(crate) fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
376 let begin: *const VMTableDefinition = self.tables_ptr() as *const _;
377 let end: *const VMTableDefinition = table;
378 let index = LocalTableIndex::new(
380 (end as usize - begin as usize) / mem::size_of::<VMTableDefinition>(),
381 );
382 assert_lt!(index.index(), self.tables.len());
383 index
384 }
385
386 pub(crate) fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
388 let begin: *const VMMemoryDefinition = self.memories_ptr() as *const _;
389 let end: *const VMMemoryDefinition = memory;
390 let index = LocalMemoryIndex::new(
392 (end as usize - begin as usize) / mem::size_of::<VMMemoryDefinition>(),
393 );
394 assert_lt!(index.index(), self.memories.len());
395 index
396 }
397
398 pub(crate) fn memory_grow<IntoPages>(
403 &mut self,
404 memory_index: LocalMemoryIndex,
405 delta: IntoPages,
406 ) -> Result<Pages, MemoryError>
407 where
408 IntoPages: Into<Pages>,
409 {
410 let mem = *self
411 .memories
412 .get(memory_index)
413 .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
414 mem.get_mut(self.context_mut()).grow(delta.into())
415 }
416
417 pub(crate) unsafe fn imported_memory_grow<IntoPages>(
426 &mut self,
427 memory_index: MemoryIndex,
428 delta: IntoPages,
429 ) -> Result<Pages, MemoryError>
430 where
431 IntoPages: Into<Pages>,
432 {
433 let import = self.imported_memory(memory_index);
434 let mem = import.handle;
435 mem.get_mut(self.context_mut()).grow(delta.into())
436 }
437
438 pub(crate) fn memory_size(&self, memory_index: LocalMemoryIndex) -> Pages {
440 let mem = *self
441 .memories
442 .get(memory_index)
443 .unwrap_or_else(|| panic!("no memory for index {}", memory_index.index()));
444 mem.get(self.context()).size()
445 }
446
447 pub(crate) unsafe fn imported_memory_size(&self, memory_index: MemoryIndex) -> Pages {
453 let import = self.imported_memory(memory_index);
454 let mem = import.handle;
455 mem.get(self.context()).size()
456 }
457
458 pub(crate) fn table_size(&self, table_index: LocalTableIndex) -> u32 {
460 let table = self
461 .tables
462 .get(table_index)
463 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
464 table.get(self.context()).size()
465 }
466
467 pub(crate) unsafe fn imported_table_size(&self, table_index: TableIndex) -> u32 {
472 let import = self.imported_table(table_index);
473 let table = import.handle;
474 table.get(self.context()).size()
475 }
476
477 pub(crate) fn table_grow(
482 &mut self,
483 table_index: LocalTableIndex,
484 delta: u32,
485 init_value: TableElement,
486 ) -> Option<u32> {
487 let table = *self
488 .tables
489 .get(table_index)
490 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
491 table.get_mut(self.context_mut()).grow(delta, init_value)
492 }
493
494 pub(crate) unsafe fn imported_table_grow(
499 &mut self,
500 table_index: TableIndex,
501 delta: u32,
502 init_value: TableElement,
503 ) -> Option<u32> {
504 let import = self.imported_table(table_index);
505 let table = import.handle;
506 table.get_mut(self.context_mut()).grow(delta, init_value)
507 }
508
509 pub(crate) fn table_get(
511 &self,
512 table_index: LocalTableIndex,
513 index: u32,
514 ) -> Option<TableElement> {
515 let table = self
516 .tables
517 .get(table_index)
518 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
519 table.get(self.context()).get(index)
520 }
521
522 pub(crate) unsafe fn imported_table_get(
527 &self,
528 table_index: TableIndex,
529 index: u32,
530 ) -> Option<TableElement> {
531 let import = self.imported_table(table_index);
532 let table = import.handle;
533 table.get(self.context()).get(index)
534 }
535
536 pub(crate) fn table_set(
538 &mut self,
539 table_index: LocalTableIndex,
540 index: u32,
541 val: TableElement,
542 ) -> Result<(), Trap> {
543 let table = *self
544 .tables
545 .get(table_index)
546 .unwrap_or_else(|| panic!("no table for index {}", table_index.index()));
547 table.get_mut(self.context_mut()).set(index, val)
548 }
549
550 pub(crate) unsafe fn imported_table_set(
555 &mut self,
556 table_index: TableIndex,
557 index: u32,
558 val: TableElement,
559 ) -> Result<(), Trap> {
560 let import = self.imported_table(table_index);
561 let table = import.handle;
562 table.get_mut(self.context_mut()).set(index, val)
563 }
564
565 pub(crate) fn func_ref(&self, function_index: FunctionIndex) -> Option<VMFuncRef> {
567 if function_index == FunctionIndex::reserved_value() {
568 None
569 } else if let Some(local_function_index) = self.module.local_func_index(function_index) {
570 Some(VMFuncRef(NonNull::from(
571 &self.funcrefs[local_function_index],
572 )))
573 } else {
574 Some(VMFuncRef(self.imported_funcrefs[function_index]))
575 }
576 }
577
578 pub(crate) fn table_init(
586 &mut self,
587 table_index: TableIndex,
588 elem_index: ElemIndex,
589 dst: u32,
590 src: u32,
591 len: u32,
592 ) -> Result<(), Trap> {
593 let table = self.get_table_handle(table_index);
596 let table = unsafe { table.get_mut(&mut *self.context) };
597 let passive_elements = self.passive_elements.borrow();
598 let elem = passive_elements
599 .get(&elem_index)
600 .map_or::<&[Option<VMFuncRef>], _>(&[], |e| &**e);
601
602 if src
603 .checked_add(len)
604 .map_or(true, |n| n as usize > elem.len())
605 || dst.checked_add(len).map_or(true, |m| m > table.size())
606 {
607 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
608 }
609
610 for (dst, src) in (dst..dst + len).zip(src..src + len) {
611 table
612 .set(dst, TableElement::FuncRef(elem[src as usize]))
613 .expect("should never panic because we already did the bounds check above");
614 }
615
616 Ok(())
617 }
618
619 pub(crate) fn table_fill(
625 &mut self,
626 table_index: TableIndex,
627 start_index: u32,
628 item: TableElement,
629 len: u32,
630 ) -> Result<(), Trap> {
631 let table = self.get_table(table_index);
634 let table_size = table.size() as usize;
635
636 if start_index
637 .checked_add(len)
638 .map_or(true, |n| n as usize > table_size)
639 {
640 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
641 }
642
643 for i in start_index..(start_index + len) {
644 table
645 .set(i, item.clone())
646 .expect("should never panic because we already did the bounds check above");
647 }
648
649 Ok(())
650 }
651
652 pub(crate) fn elem_drop(&self, elem_index: ElemIndex) {
654 let mut passive_elements = self.passive_elements.borrow_mut();
657 passive_elements.remove(&elem_index);
658 }
661
662 pub(crate) fn local_memory_copy(
669 &self,
670 memory_index: LocalMemoryIndex,
671 dst: u32,
672 src: u32,
673 len: u32,
674 ) -> Result<(), Trap> {
675 let memory = self.memory(memory_index);
678 unsafe { memory_copy(&memory, dst, src, len) }
680 }
681
682 pub(crate) fn imported_memory_copy(
684 &self,
685 memory_index: MemoryIndex,
686 dst: u32,
687 src: u32,
688 len: u32,
689 ) -> Result<(), Trap> {
690 let import = self.imported_memory(memory_index);
691 let memory = unsafe { import.definition.as_ref() };
692 unsafe { memory_copy(memory, dst, src, len) }
694 }
695
696 pub(crate) fn local_memory_fill(
702 &self,
703 memory_index: LocalMemoryIndex,
704 dst: u32,
705 val: u32,
706 len: u32,
707 ) -> Result<(), Trap> {
708 let memory = self.memory(memory_index);
709 unsafe { memory_fill(&memory, dst, val, len) }
711 }
712
713 pub(crate) fn imported_memory_fill(
719 &self,
720 memory_index: MemoryIndex,
721 dst: u32,
722 val: u32,
723 len: u32,
724 ) -> Result<(), Trap> {
725 let import = self.imported_memory(memory_index);
726 let memory = unsafe { import.definition.as_ref() };
727 unsafe { memory_fill(memory, dst, val, len) }
729 }
730
731 pub(crate) fn memory_init(
739 &self,
740 memory_index: MemoryIndex,
741 data_index: DataIndex,
742 dst: u32,
743 src: u32,
744 len: u32,
745 ) -> Result<(), Trap> {
746 let memory = self.get_vmmemory(memory_index);
749 let passive_data = self.passive_data.borrow();
750 let data = passive_data.get(&data_index).map_or(&[][..], |d| &**d);
751
752 let current_length = unsafe { memory.vmmemory().as_ref().current_length };
753 if src
754 .checked_add(len)
755 .map_or(true, |n| n as usize > data.len())
756 || dst
757 .checked_add(len)
758 .map_or(true, |m| usize::try_from(m).unwrap() > current_length)
759 {
760 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
761 }
762 let src_slice = &data[src as usize..(src + len) as usize];
763 unsafe { memory.initialize_with_data(dst as usize, src_slice) }
764 }
765
766 pub(crate) fn data_drop(&self, data_index: DataIndex) {
768 let mut passive_data = self.passive_data.borrow_mut();
769 passive_data.remove(&data_index);
770 }
771
772 pub(crate) fn get_table(&mut self, table_index: TableIndex) -> &mut VMTable {
775 if let Some(local_table_index) = self.module.local_table_index(table_index) {
776 self.get_local_table(local_table_index)
777 } else {
778 self.get_foreign_table(table_index)
779 }
780 }
781
782 pub(crate) fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
784 let table = self.tables[index];
785 table.get_mut(self.context_mut())
786 }
787
788 pub(crate) fn get_foreign_table(&mut self, index: TableIndex) -> &mut VMTable {
790 let import = self.imported_table(index);
791 let table = import.handle;
792 table.get_mut(self.context_mut())
793 }
794
795 pub(crate) fn get_table_handle(
798 &mut self,
799 table_index: TableIndex,
800 ) -> InternalStoreHandle<VMTable> {
801 if let Some(local_table_index) = self.module.local_table_index(table_index) {
802 self.tables[local_table_index]
803 } else {
804 self.imported_table(table_index).handle
805 }
806 }
807
808 fn memory_wait(memory: &mut VMMemory, dst: u32, timeout: i64) -> Result<u32, Trap> {
809 let location = NotifyLocation { address: dst };
810 let timeout = if timeout < 0 {
811 None
812 } else {
813 Some(std::time::Duration::from_nanos(timeout as u64))
814 };
815 match memory.do_wait(location, timeout) {
816 Ok(count) => Ok(count),
817 Err(_err) => {
818 Err(Trap::lib(TrapCode::TableAccessOutOfBounds))
820 }
821 }
822 }
823
824 pub(crate) fn local_memory_wait32(
826 &mut self,
827 memory_index: LocalMemoryIndex,
828 dst: u32,
829 val: u32,
830 timeout: i64,
831 ) -> Result<u32, Trap> {
832 let memory = self.memory(memory_index);
833 let ret = unsafe { memory32_atomic_check32(&memory, dst, val) };
838
839 if let Ok(mut ret) = ret {
840 if ret == 0 {
841 let memory = self.get_local_vmmemory_mut(memory_index);
842 ret = Self::memory_wait(memory, dst, timeout)?;
843 }
844 Ok(ret)
845 } else {
846 ret
847 }
848 }
849
850 pub(crate) fn imported_memory_wait32(
852 &mut self,
853 memory_index: MemoryIndex,
854 dst: u32,
855 val: u32,
856 timeout: i64,
857 ) -> Result<u32, Trap> {
858 let import = self.imported_memory(memory_index);
859 let memory = unsafe { import.definition.as_ref() };
860 let ret = unsafe { memory32_atomic_check32(memory, dst, val) };
865 if let Ok(mut ret) = ret {
866 if ret == 0 {
867 let memory = self.get_vmmemory_mut(memory_index);
868 ret = Self::memory_wait(memory, dst, timeout)?;
869 }
870 Ok(ret)
871 } else {
872 ret
873 }
874 }
875
876 pub(crate) fn local_memory_wait64(
878 &mut self,
879 memory_index: LocalMemoryIndex,
880 dst: u32,
881 val: u64,
882 timeout: i64,
883 ) -> Result<u32, Trap> {
884 let memory = self.memory(memory_index);
885 let ret = unsafe { memory32_atomic_check64(&memory, dst, val) };
890
891 if let Ok(mut ret) = ret {
892 if ret == 0 {
893 let memory = self.get_local_vmmemory_mut(memory_index);
894 ret = Self::memory_wait(memory, dst, timeout)?;
895 }
896 Ok(ret)
897 } else {
898 ret
899 }
900 }
901
902 pub(crate) fn imported_memory_wait64(
904 &mut self,
905 memory_index: MemoryIndex,
906 dst: u32,
907 val: u64,
908 timeout: i64,
909 ) -> Result<u32, Trap> {
910 let import = self.imported_memory(memory_index);
911 let memory = unsafe { import.definition.as_ref() };
912 let ret = unsafe { memory32_atomic_check64(memory, dst, val) };
917
918 if let Ok(mut ret) = ret {
919 if ret == 0 {
920 let memory = self.get_vmmemory_mut(memory_index);
921 ret = Self::memory_wait(memory, dst, timeout)?;
922 }
923 Ok(ret)
924 } else {
925 ret
926 }
927 }
928
929 pub(crate) fn local_memory_notify(
931 &mut self,
932 memory_index: LocalMemoryIndex,
933 dst: u32,
934 count: u32,
935 ) -> Result<u32, Trap> {
936 let memory = self.get_local_vmmemory_mut(memory_index);
937 let location = NotifyLocation { address: dst };
939 Ok(memory.do_notify(location, count))
940 }
941
942 pub(crate) fn imported_memory_notify(
944 &mut self,
945 memory_index: MemoryIndex,
946 dst: u32,
947 count: u32,
948 ) -> Result<u32, Trap> {
949 let memory = self.get_vmmemory_mut(memory_index);
950 let location = NotifyLocation { address: dst };
952 Ok(memory.do_notify(location, count))
953 }
954}
955
956#[derive(Debug, Eq, PartialEq)]
961pub struct VMInstance {
962 instance_layout: Layout,
964
965 instance: NonNull<Instance>,
975}
976
977impl Drop for VMInstance {
981 fn drop(&mut self) {
982 let instance_ptr = self.instance.as_ptr();
983
984 unsafe {
985 instance_ptr.drop_in_place();
987 std::alloc::dealloc(instance_ptr as *mut u8, self.instance_layout);
989 }
990 }
991}
992
993impl VMInstance {
994 #[allow(clippy::too_many_arguments)]
1016 pub unsafe fn new(
1017 allocator: InstanceAllocator,
1018 module: Arc<ModuleInfo>,
1019 context: &mut StoreObjects,
1020 finished_functions: BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1021 finished_function_call_trampolines: BoxedSlice<SignatureIndex, VMTrampoline>,
1022 finished_memories: BoxedSlice<LocalMemoryIndex, InternalStoreHandle<VMMemory>>,
1023 finished_tables: BoxedSlice<LocalTableIndex, InternalStoreHandle<VMTable>>,
1024 finished_globals: BoxedSlice<LocalGlobalIndex, InternalStoreHandle<VMGlobal>>,
1025 imports: Imports,
1026 vmshared_signatures: BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
1027 ) -> Result<Self, Trap> {
1028 let vmctx_globals = finished_globals
1029 .values()
1030 .map(|m| m.get(context).vmglobal())
1031 .collect::<PrimaryMap<LocalGlobalIndex, _>>()
1032 .into_boxed_slice();
1033 let passive_data = RefCell::new(
1034 module
1035 .passive_data
1036 .clone()
1037 .into_iter()
1038 .map(|(idx, bytes)| (idx, Arc::from(bytes)))
1039 .collect::<HashMap<_, _>>(),
1040 );
1041
1042 let handle = {
1043 let offsets = allocator.offsets().clone();
1044 let funcrefs = PrimaryMap::new().into_boxed_slice();
1046 let imported_funcrefs = PrimaryMap::new().into_boxed_slice();
1047 let instance = Instance {
1049 module,
1050 context,
1051 offsets,
1052 memories: finished_memories,
1053 tables: finished_tables,
1054 globals: finished_globals,
1055 functions: finished_functions,
1056 function_call_trampolines: finished_function_call_trampolines,
1057 passive_elements: Default::default(),
1058 passive_data,
1059 funcrefs,
1060 imported_funcrefs,
1061 vmctx: VMContext {},
1062 };
1063
1064 let mut instance_handle = allocator.into_vminstance(instance);
1065
1066 {
1068 let instance = instance_handle.instance_mut();
1069 let vmctx_ptr = instance.vmctx_ptr();
1070 (instance.funcrefs, instance.imported_funcrefs) = build_funcrefs(
1071 &instance.module,
1072 context,
1073 &imports,
1074 &instance.functions,
1075 &vmshared_signatures,
1076 &instance.function_call_trampolines,
1077 vmctx_ptr,
1078 );
1079 }
1080
1081 instance_handle
1082 };
1083 let instance = handle.instance();
1084
1085 ptr::copy(
1086 vmshared_signatures.values().as_slice().as_ptr(),
1087 instance.signature_ids_ptr(),
1088 vmshared_signatures.len(),
1089 );
1090 ptr::copy(
1091 imports.functions.values().as_slice().as_ptr(),
1092 instance.imported_functions_ptr(),
1093 imports.functions.len(),
1094 );
1095 ptr::copy(
1096 imports.tables.values().as_slice().as_ptr(),
1097 instance.imported_tables_ptr(),
1098 imports.tables.len(),
1099 );
1100 ptr::copy(
1101 imports.memories.values().as_slice().as_ptr(),
1102 instance.imported_memories_ptr(),
1103 imports.memories.len(),
1104 );
1105 ptr::copy(
1106 imports.globals.values().as_slice().as_ptr(),
1107 instance.imported_globals_ptr(),
1108 imports.globals.len(),
1109 );
1110 ptr::copy(
1114 vmctx_globals.values().as_slice().as_ptr(),
1115 instance.globals_ptr() as *mut NonNull<VMGlobalDefinition>,
1116 vmctx_globals.len(),
1117 );
1118 ptr::write(
1119 instance.builtin_functions_ptr(),
1120 VMBuiltinFunctionsArray::initialized(),
1121 );
1122
1123 initialize_passive_elements(instance);
1126 initialize_globals(instance);
1127
1128 Ok(handle)
1129 }
1130
1131 pub(crate) fn instance(&self) -> &Instance {
1133 unsafe { self.instance.as_ref() }
1134 }
1135
1136 pub(crate) fn instance_mut(&mut self) -> &mut Instance {
1138 unsafe { self.instance.as_mut() }
1139 }
1140
1141 pub unsafe fn finish_instantiation(
1147 &mut self,
1148 config: &VMConfig,
1149 trap_handler: Option<*const TrapHandlerFn<'static>>,
1150 data_initializers: &[DataInitializer<'_>],
1151 ) -> Result<(), Trap> {
1152 let instance = self.instance_mut();
1153
1154 initialize_tables(instance)?;
1156 initialize_memories(instance, data_initializers)?;
1157
1158 instance.invoke_start_function(config, trap_handler)?;
1161 Ok(())
1162 }
1163
1164 pub fn vmctx(&self) -> &VMContext {
1166 self.instance().vmctx()
1167 }
1168
1169 pub fn vmctx_ptr(&self) -> *mut VMContext {
1171 self.instance().vmctx_ptr()
1172 }
1173
1174 pub fn vmoffsets(&self) -> &VMOffsets {
1178 self.instance().offsets()
1179 }
1180
1181 pub fn module(&self) -> &Arc<ModuleInfo> {
1183 self.instance().module()
1184 }
1185
1186 pub fn module_ref(&self) -> &ModuleInfo {
1188 self.instance().module_ref()
1189 }
1190
1191 pub fn lookup(&mut self, field: &str) -> Option<VMExtern> {
1193 let export = *self.module_ref().exports.get(field)?;
1194
1195 Some(self.lookup_by_declaration(export))
1196 }
1197
1198 pub fn lookup_by_declaration(&mut self, export: ExportIndex) -> VMExtern {
1200 let instance = self.instance();
1201
1202 match export {
1203 ExportIndex::Function(index) => {
1204 let sig_index = &instance.module.functions[index];
1205 let handle = if let Some(def_index) = instance.module.local_func_index(index) {
1206 let signature = instance.module.signatures[*sig_index].clone();
1209 let vm_function = VMFunction {
1210 anyfunc: MaybeInstanceOwned::Instance(NonNull::from(
1211 &instance.funcrefs[def_index],
1212 )),
1213 signature,
1214 kind: VMFunctionKind::Static,
1219 host_data: Box::new(()),
1220 };
1221 InternalStoreHandle::new(self.instance_mut().context_mut(), vm_function)
1222 } else {
1223 let import = instance.imported_function(index);
1224 import.handle
1225 };
1226
1227 VMExtern::Function(handle)
1228 }
1229 ExportIndex::Table(index) => {
1230 let handle = if let Some(def_index) = instance.module.local_table_index(index) {
1231 instance.tables[def_index]
1232 } else {
1233 let import = instance.imported_table(index);
1234 import.handle
1235 };
1236 VMExtern::Table(handle)
1237 }
1238 ExportIndex::Memory(index) => {
1239 let handle = if let Some(def_index) = instance.module.local_memory_index(index) {
1240 instance.memories[def_index]
1241 } else {
1242 let import = instance.imported_memory(index);
1243 import.handle
1244 };
1245 VMExtern::Memory(handle)
1246 }
1247 ExportIndex::Global(index) => {
1248 let handle = if let Some(def_index) = instance.module.local_global_index(index) {
1249 instance.globals[def_index]
1250 } else {
1251 let import = instance.imported_global(index);
1252 import.handle
1253 };
1254 VMExtern::Global(handle)
1255 }
1256 }
1257 }
1258
1259 pub fn exports(&self) -> indexmap::map::Iter<String, ExportIndex> {
1265 self.module().exports.iter()
1266 }
1267
1268 pub fn memory_index(&self, memory: &VMMemoryDefinition) -> LocalMemoryIndex {
1270 self.instance().memory_index(memory)
1271 }
1272
1273 pub fn memory_grow<IntoPages>(
1278 &mut self,
1279 memory_index: LocalMemoryIndex,
1280 delta: IntoPages,
1281 ) -> Result<Pages, MemoryError>
1282 where
1283 IntoPages: Into<Pages>,
1284 {
1285 self.instance_mut().memory_grow(memory_index, delta)
1286 }
1287
1288 pub fn table_index(&self, table: &VMTableDefinition) -> LocalTableIndex {
1290 self.instance().table_index(table)
1291 }
1292
1293 pub fn table_grow(
1298 &mut self,
1299 table_index: LocalTableIndex,
1300 delta: u32,
1301 init_value: TableElement,
1302 ) -> Option<u32> {
1303 self.instance_mut()
1304 .table_grow(table_index, delta, init_value)
1305 }
1306
1307 pub fn table_get(&self, table_index: LocalTableIndex, index: u32) -> Option<TableElement> {
1311 self.instance().table_get(table_index, index)
1312 }
1313
1314 pub fn table_set(
1318 &mut self,
1319 table_index: LocalTableIndex,
1320 index: u32,
1321 val: TableElement,
1322 ) -> Result<(), Trap> {
1323 self.instance_mut().table_set(table_index, index, val)
1324 }
1325
1326 pub fn get_local_table(&mut self, index: LocalTableIndex) -> &mut VMTable {
1328 self.instance_mut().get_local_table(index)
1329 }
1330}
1331
1332fn get_memory_init_start(init: &DataInitializer<'_>, instance: &Instance) -> usize {
1334 let mut start = init.location.offset;
1335
1336 if let Some(base) = init.location.base {
1337 let val = unsafe {
1338 if let Some(def_index) = instance.module.local_global_index(base) {
1339 instance.global(def_index).val.u32
1340 } else {
1341 instance.imported_global(base).definition.as_ref().val.u32
1342 }
1343 };
1344 start += usize::try_from(val).unwrap();
1345 }
1346
1347 start
1348}
1349
1350#[allow(clippy::mut_from_ref)]
1351#[allow(dead_code)]
1352unsafe fn get_memory_slice<'instance>(
1354 init: &DataInitializer<'_>,
1355 instance: &'instance Instance,
1356) -> &'instance mut [u8] {
1357 let memory = if let Some(local_memory_index) = instance
1358 .module
1359 .local_memory_index(init.location.memory_index)
1360 {
1361 instance.memory(local_memory_index)
1362 } else {
1363 let import = instance.imported_memory(init.location.memory_index);
1364 *import.definition.as_ref()
1365 };
1366 slice::from_raw_parts_mut(memory.base, memory.current_length)
1367}
1368
1369fn get_table_init_start(init: &TableInitializer, instance: &Instance) -> usize {
1371 let mut start = init.offset;
1372
1373 if let Some(base) = init.base {
1374 let val = unsafe {
1375 if let Some(def_index) = instance.module.local_global_index(base) {
1376 instance.global(def_index).val.u32
1377 } else {
1378 instance.imported_global(base).definition.as_ref().val.u32
1379 }
1380 };
1381 start += usize::try_from(val).unwrap();
1382 }
1383
1384 start
1385}
1386
1387fn initialize_tables(instance: &mut Instance) -> Result<(), Trap> {
1389 let module = Arc::clone(&instance.module);
1390 for init in &module.table_initializers {
1391 let start = get_table_init_start(init, instance);
1392 let table = instance.get_table_handle(init.table_index);
1393 let table = unsafe { table.get_mut(&mut *instance.context) };
1394
1395 if start
1396 .checked_add(init.elements.len())
1397 .map_or(true, |end| end > table.size() as usize)
1398 {
1399 return Err(Trap::lib(TrapCode::TableAccessOutOfBounds));
1400 }
1401
1402 if let wasmer_types::Type::FuncRef = table.ty().ty {
1403 for (i, func_idx) in init.elements.iter().enumerate() {
1404 let anyfunc = instance.func_ref(*func_idx);
1405 table
1406 .set(
1407 u32::try_from(start + i).unwrap(),
1408 TableElement::FuncRef(anyfunc),
1409 )
1410 .unwrap();
1411 }
1412 } else {
1413 for i in 0..init.elements.len() {
1414 table
1415 .set(
1416 u32::try_from(start + i).unwrap(),
1417 TableElement::ExternRef(None),
1418 )
1419 .unwrap();
1420 }
1421 }
1422 }
1423
1424 Ok(())
1425}
1426
1427fn initialize_passive_elements(instance: &Instance) {
1431 let mut passive_elements = instance.passive_elements.borrow_mut();
1432 debug_assert!(
1433 passive_elements.is_empty(),
1434 "should only be called once, at initialization time"
1435 );
1436
1437 passive_elements.extend(
1438 instance
1439 .module
1440 .passive_elements
1441 .iter()
1442 .filter(|(_, segments)| !segments.is_empty())
1443 .map(|(idx, segments)| {
1444 (
1445 *idx,
1446 segments.iter().map(|s| instance.func_ref(*s)).collect(),
1447 )
1448 }),
1449 );
1450}
1451
1452fn initialize_memories(
1454 instance: &mut Instance,
1455 data_initializers: &[DataInitializer<'_>],
1456) -> Result<(), Trap> {
1457 for init in data_initializers {
1458 let memory = instance.get_vmmemory(init.location.memory_index);
1459
1460 let start = get_memory_init_start(init, instance);
1461 unsafe {
1462 let current_length = memory.vmmemory().as_ref().current_length;
1463 if start
1464 .checked_add(init.data.len())
1465 .map_or(true, |end| end > current_length)
1466 {
1467 return Err(Trap::lib(TrapCode::HeapAccessOutOfBounds));
1468 }
1469 memory.initialize_with_data(start, init.data)?;
1470 }
1471 }
1472
1473 Ok(())
1474}
1475
1476fn initialize_globals(instance: &Instance) {
1477 let module = Arc::clone(&instance.module);
1478 for (index, initializer) in module.global_initializers.iter() {
1479 unsafe {
1480 let to = instance.global_ptr(index).as_ptr();
1481 match initializer {
1482 GlobalInit::I32Const(x) => (*to).val.i32 = *x,
1483 GlobalInit::I64Const(x) => (*to).val.i64 = *x,
1484 GlobalInit::F32Const(x) => (*to).val.f32 = *x,
1485 GlobalInit::F64Const(x) => (*to).val.f64 = *x,
1486 GlobalInit::V128Const(x) => (*to).val.bytes = *x.bytes(),
1487 GlobalInit::GetGlobal(x) => {
1488 let from: VMGlobalDefinition =
1489 if let Some(def_x) = module.local_global_index(*x) {
1490 instance.global(def_x)
1491 } else {
1492 instance.imported_global(*x).definition.as_ref().clone()
1493 };
1494 *to = from;
1495 }
1496 GlobalInit::RefNullConst => (*to).val.funcref = 0,
1497 GlobalInit::RefFunc(func_idx) => {
1498 let funcref = instance.func_ref(*func_idx).unwrap();
1499 (*to).val = funcref.into_raw();
1500 }
1501 }
1502 }
1503 }
1504}
1505
1506fn build_funcrefs(
1509 module_info: &ModuleInfo,
1510 ctx: &StoreObjects,
1511 imports: &Imports,
1512 finished_functions: &BoxedSlice<LocalFunctionIndex, FunctionBodyPtr>,
1513 vmshared_signatures: &BoxedSlice<SignatureIndex, VMSharedSignatureIndex>,
1514 function_call_trampolines: &BoxedSlice<SignatureIndex, VMTrampoline>,
1515 vmctx_ptr: *mut VMContext,
1516) -> (
1517 BoxedSlice<LocalFunctionIndex, VMCallerCheckedAnyfunc>,
1518 BoxedSlice<FunctionIndex, NonNull<VMCallerCheckedAnyfunc>>,
1519) {
1520 let mut func_refs =
1521 PrimaryMap::with_capacity(module_info.functions.len() - module_info.num_imported_functions);
1522 let mut imported_func_refs = PrimaryMap::with_capacity(module_info.num_imported_functions);
1523
1524 for import in imports.functions.values() {
1526 imported_func_refs.push(import.handle.get(ctx).anyfunc.as_ptr());
1527 }
1528
1529 for (local_index, func_ptr) in finished_functions.iter() {
1531 let index = module_info.func_index(local_index);
1532 let sig_index = module_info.functions[index];
1533 let type_index = vmshared_signatures[sig_index];
1534 let call_trampoline = function_call_trampolines[sig_index];
1535 let anyfunc = VMCallerCheckedAnyfunc {
1536 func_ptr: func_ptr.0,
1537 type_index,
1538 vmctx: VMFunctionContext { vmctx: vmctx_ptr },
1539 call_trampoline,
1540 };
1541 func_refs.push(anyfunc);
1542 }
1543 (
1544 func_refs.into_boxed_slice(),
1545 imported_func_refs.into_boxed_slice(),
1546 )
1547}