use crate::global::Global;
use crate::instance::Instance;
use crate::memory::Memory;
use crate::table::Table;
use crate::trap::{Trap, TrapCode};
use std::any::Any;
use std::convert::TryFrom;
use std::ptr::{self, NonNull};
use std::sync::Arc;
use std::u32;
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct VMFunctionImport {
pub body: *const VMFunctionBody,
pub vmctx: *mut VMContext,
}
#[cfg(test)]
mod test_vmfunction_import {
use super::VMFunctionImport;
use crate::{ModuleInfo, VMOffsets};
use memoffset::offset_of;
use std::mem::size_of;
#[test]
fn check_vmfunction_import_offsets() {
let module = ModuleInfo::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMFunctionImport>(),
usize::from(offsets.size_of_vmfunction_import())
);
assert_eq!(
offset_of!(VMFunctionImport, body),
usize::from(offsets.vmfunction_import_body())
);
assert_eq!(
offset_of!(VMFunctionImport, vmctx),
usize::from(offsets.vmfunction_import_vmctx())
);
}
}
#[repr(C)]
pub struct VMDynamicFunctionContext<T: Sized> {
pub address: *const VMFunctionBody,
pub ctx: T,
}
#[cfg(test)]
mod test_vmdynamicfunction_import_context {
use super::VMDynamicFunctionContext;
use crate::{ModuleInfo, VMOffsets};
use memoffset::offset_of;
use std::mem::size_of;
#[test]
fn check_vmdynamicfunction_import_context_offsets() {
let module = ModuleInfo::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMDynamicFunctionContext<usize>>(),
usize::from(offsets.size_of_vmdynamicfunction_import_context())
);
assert_eq!(
offset_of!(VMDynamicFunctionContext<usize>, address),
usize::from(offsets.vmdynamicfunction_import_context_address())
);
assert_eq!(
offset_of!(VMDynamicFunctionContext<usize>, ctx),
usize::from(offsets.vmdynamicfunction_import_context_ctx())
);
}
}
#[repr(C)]
pub struct VMFunctionBody(u8);
#[cfg(test)]
mod test_vmfunction_body {
use super::VMFunctionBody;
use std::mem::size_of;
#[test]
fn check_vmfunction_body_offsets() {
assert_eq!(size_of::<VMFunctionBody>(), 1);
}
}
#[derive(Debug, Copy, Clone, PartialEq)]
#[repr(C)]
pub enum VMFunctionKind {
Static,
Dynamic,
}
#[derive(Debug, Clone)]
#[repr(C)]
pub struct VMTableImport {
pub definition: NonNull<VMTableDefinition>,
pub from: Arc<dyn Table>,
}
#[cfg(test)]
mod test_vmtable_import {
use super::VMTableImport;
use crate::{ModuleInfo, VMOffsets};
use memoffset::offset_of;
use std::mem::size_of;
#[test]
fn check_vmtable_import_offsets() {
let module = ModuleInfo::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMTableImport>(),
usize::from(offsets.size_of_vmtable_import())
);
assert_eq!(
offset_of!(VMTableImport, definition),
usize::from(offsets.vmtable_import_definition())
);
assert_eq!(
offset_of!(VMTableImport, from),
usize::from(offsets.vmtable_import_from())
);
}
}
#[derive(Debug, Clone)]
#[repr(C)]
pub struct VMMemoryImport {
pub definition: NonNull<VMMemoryDefinition>,
pub from: Arc<dyn Memory>,
}
#[cfg(test)]
mod test_vmmemory_import {
use super::VMMemoryImport;
use crate::{ModuleInfo, VMOffsets};
use memoffset::offset_of;
use std::mem::size_of;
#[test]
fn check_vmmemory_import_offsets() {
let module = ModuleInfo::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMMemoryImport>(),
usize::from(offsets.size_of_vmmemory_import())
);
assert_eq!(
offset_of!(VMMemoryImport, definition),
usize::from(offsets.vmmemory_import_definition())
);
assert_eq!(
offset_of!(VMMemoryImport, from),
usize::from(offsets.vmmemory_import_from())
);
}
}
#[derive(Debug, Clone)]
#[repr(C)]
pub struct VMGlobalImport {
pub definition: NonNull<VMGlobalDefinition>,
pub from: Arc<Global>,
}
unsafe impl Send for VMGlobalImport {}
unsafe impl Sync for VMGlobalImport {}
#[cfg(test)]
mod test_vmglobal_import {
use super::VMGlobalImport;
use crate::{ModuleInfo, VMOffsets};
use memoffset::offset_of;
use std::mem::size_of;
#[test]
fn check_vmglobal_import_offsets() {
let module = ModuleInfo::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMGlobalImport>(),
usize::from(offsets.size_of_vmglobal_import())
);
assert_eq!(
offset_of!(VMGlobalImport, definition),
usize::from(offsets.vmglobal_import_definition())
);
assert_eq!(
offset_of!(VMGlobalImport, from),
usize::from(offsets.vmglobal_import_from())
);
}
}
#[derive(Debug, Copy, Clone)]
#[repr(C)]
pub struct VMMemoryDefinition {
pub base: *mut u8,
pub current_length: u32,
}
unsafe impl Send for VMMemoryDefinition {}
unsafe impl Sync for VMMemoryDefinition {}
impl VMMemoryDefinition {
pub(crate) unsafe fn memory_copy(&self, dst: u32, src: u32, len: u32) -> Result<(), Trap> {
if src
.checked_add(len)
.map_or(true, |n| n > self.current_length)
|| dst
.checked_add(len)
.map_or(true, |m| m > self.current_length)
{
return Err(Trap::new_from_runtime(TrapCode::HeapAccessOutOfBounds));
}
let dst = usize::try_from(dst).unwrap();
let src = usize::try_from(src).unwrap();
let dst = self.base.add(dst);
let src = self.base.add(src);
ptr::copy(src, dst, len as usize);
Ok(())
}
pub(crate) unsafe fn memory_fill(&self, dst: u32, val: u32, len: u32) -> Result<(), Trap> {
if dst
.checked_add(len)
.map_or(true, |m| m > self.current_length)
{
return Err(Trap::new_from_runtime(TrapCode::HeapAccessOutOfBounds));
}
let dst = isize::try_from(dst).unwrap();
let val = val as u8;
let dst = self.base.offset(dst);
ptr::write_bytes(dst, val, len as usize);
Ok(())
}
}
#[cfg(test)]
mod test_vmmemory_definition {
use super::VMMemoryDefinition;
use crate::{ModuleInfo, VMOffsets};
use memoffset::offset_of;
use std::mem::size_of;
#[test]
fn check_vmmemory_definition_offsets() {
let module = ModuleInfo::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMMemoryDefinition>(),
usize::from(offsets.size_of_vmmemory_definition())
);
assert_eq!(
offset_of!(VMMemoryDefinition, base),
usize::from(offsets.vmmemory_definition_base())
);
assert_eq!(
offset_of!(VMMemoryDefinition, current_length),
usize::from(offsets.vmmemory_definition_current_length())
);
}
}
#[derive(Debug, Clone, Copy)]
#[repr(C)]
pub struct VMTableDefinition {
pub base: *mut u8,
pub current_elements: u32,
}
#[cfg(test)]
mod test_vmtable_definition {
use super::VMTableDefinition;
use crate::{ModuleInfo, VMOffsets};
use memoffset::offset_of;
use std::mem::size_of;
#[test]
fn check_vmtable_definition_offsets() {
let module = ModuleInfo::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMTableDefinition>(),
usize::from(offsets.size_of_vmtable_definition())
);
assert_eq!(
offset_of!(VMTableDefinition, base),
usize::from(offsets.vmtable_definition_base())
);
assert_eq!(
offset_of!(VMTableDefinition, current_elements),
usize::from(offsets.vmtable_definition_current_elements())
);
}
}
#[derive(Debug, Clone)]
#[repr(C, align(16))]
pub struct VMGlobalDefinition {
storage: [u8; 16],
}
#[cfg(test)]
mod test_vmglobal_definition {
use super::VMGlobalDefinition;
use crate::{ModuleInfo, VMOffsets};
use more_asserts::assert_ge;
use std::mem::{align_of, size_of};
#[test]
fn check_vmglobal_definition_alignment() {
assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<i32>());
assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<i64>());
assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<f32>());
assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<f64>());
assert_ge!(align_of::<VMGlobalDefinition>(), align_of::<[u8; 16]>());
}
#[test]
fn check_vmglobal_definition_offsets() {
let module = ModuleInfo::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<*const VMGlobalDefinition>(),
usize::from(offsets.size_of_vmglobal_local())
);
}
#[test]
fn check_vmglobal_begins_aligned() {
let module = ModuleInfo::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(offsets.vmctx_globals_begin() % 16, 0);
}
}
impl VMGlobalDefinition {
pub fn new() -> Self {
Self { storage: [0; 16] }
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_i32(&self) -> &i32 {
&*(self.storage.as_ref().as_ptr() as *const i32)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_i32_mut(&mut self) -> &mut i32 {
&mut *(self.storage.as_mut().as_mut_ptr() as *mut i32)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_u32(&self) -> &u32 {
&*(self.storage.as_ref().as_ptr() as *const u32)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_u32_mut(&mut self) -> &mut u32 {
&mut *(self.storage.as_mut().as_mut_ptr() as *mut u32)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_i64(&self) -> &i64 {
&*(self.storage.as_ref().as_ptr() as *const i64)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_i64_mut(&mut self) -> &mut i64 {
&mut *(self.storage.as_mut().as_mut_ptr() as *mut i64)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_u64(&self) -> &u64 {
&*(self.storage.as_ref().as_ptr() as *const u64)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_u64_mut(&mut self) -> &mut u64 {
&mut *(self.storage.as_mut().as_mut_ptr() as *mut u64)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_f32(&self) -> &f32 {
&*(self.storage.as_ref().as_ptr() as *const f32)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_f32_mut(&mut self) -> &mut f32 {
&mut *(self.storage.as_mut().as_mut_ptr() as *mut f32)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_f32_bits(&self) -> &u32 {
&*(self.storage.as_ref().as_ptr() as *const u32)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_f32_bits_mut(&mut self) -> &mut u32 {
&mut *(self.storage.as_mut().as_mut_ptr() as *mut u32)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_f64(&self) -> &f64 {
&*(self.storage.as_ref().as_ptr() as *const f64)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_f64_mut(&mut self) -> &mut f64 {
&mut *(self.storage.as_mut().as_mut_ptr() as *mut f64)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_f64_bits(&self) -> &u64 {
&*(self.storage.as_ref().as_ptr() as *const u64)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_f64_bits_mut(&mut self) -> &mut u64 {
&mut *(self.storage.as_mut().as_mut_ptr() as *mut u64)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_u128(&self) -> &u128 {
&*(self.storage.as_ref().as_ptr() as *const u128)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_u128_mut(&mut self) -> &mut u128 {
&mut *(self.storage.as_mut().as_mut_ptr() as *mut u128)
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_u128_bits(&self) -> &[u8; 16] {
&*(self.storage.as_ref().as_ptr() as *const [u8; 16])
}
#[allow(clippy::cast_ptr_alignment)]
pub unsafe fn as_u128_bits_mut(&mut self) -> &mut [u8; 16] {
&mut *(self.storage.as_mut().as_mut_ptr() as *mut [u8; 16])
}
}
#[repr(C)]
#[derive(Debug, Eq, PartialEq, Clone, Copy, Hash)]
pub struct VMSharedSignatureIndex(u32);
#[cfg(test)]
mod test_vmshared_signature_index {
use super::VMSharedSignatureIndex;
use crate::module::ModuleInfo;
use crate::vmoffsets::{TargetSharedSignatureIndex, VMOffsets};
use std::mem::size_of;
#[test]
fn check_vmshared_signature_index() {
let module = ModuleInfo::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMSharedSignatureIndex>(),
usize::from(offsets.size_of_vmshared_signature_index())
);
}
#[test]
fn check_target_shared_signature_index() {
assert_eq!(
size_of::<VMSharedSignatureIndex>(),
size_of::<TargetSharedSignatureIndex>()
);
}
}
impl VMSharedSignatureIndex {
pub fn new(value: u32) -> Self {
Self(value)
}
}
impl Default for VMSharedSignatureIndex {
fn default() -> Self {
Self::new(u32::MAX)
}
}
#[derive(Debug, Clone)]
#[repr(C)]
pub struct VMCallerCheckedAnyfunc {
pub func_ptr: *const VMFunctionBody,
pub type_index: VMSharedSignatureIndex,
pub vmctx: *mut VMContext,
}
#[cfg(test)]
mod test_vmcaller_checked_anyfunc {
use super::VMCallerCheckedAnyfunc;
use crate::{ModuleInfo, VMOffsets};
use memoffset::offset_of;
use std::mem::size_of;
#[test]
fn check_vmcaller_checked_anyfunc_offsets() {
let module = ModuleInfo::new();
let offsets = VMOffsets::new(size_of::<*mut u8>() as u8, &module);
assert_eq!(
size_of::<VMCallerCheckedAnyfunc>(),
usize::from(offsets.size_of_vmcaller_checked_anyfunc())
);
assert_eq!(
offset_of!(VMCallerCheckedAnyfunc, func_ptr),
usize::from(offsets.vmcaller_checked_anyfunc_func_ptr())
);
assert_eq!(
offset_of!(VMCallerCheckedAnyfunc, type_index),
usize::from(offsets.vmcaller_checked_anyfunc_type_index())
);
assert_eq!(
offset_of!(VMCallerCheckedAnyfunc, vmctx),
usize::from(offsets.vmcaller_checked_anyfunc_vmctx())
);
}
}
impl Default for VMCallerCheckedAnyfunc {
fn default() -> Self {
Self {
func_ptr: ptr::null_mut(),
type_index: Default::default(),
vmctx: ptr::null_mut(),
}
}
}
#[derive(Copy, Clone, Debug)]
pub struct VMBuiltinFunctionIndex(u32);
impl VMBuiltinFunctionIndex {
pub const fn get_memory32_grow_index() -> Self {
Self(0)
}
pub const fn get_imported_memory32_grow_index() -> Self {
Self(1)
}
pub const fn get_memory32_size_index() -> Self {
Self(2)
}
pub const fn get_imported_memory32_size_index() -> Self {
Self(3)
}
pub const fn get_table_copy_index() -> Self {
Self(4)
}
pub const fn get_table_init_index() -> Self {
Self(5)
}
pub const fn get_elem_drop_index() -> Self {
Self(6)
}
pub const fn get_local_memory_copy_index() -> Self {
Self(7)
}
pub const fn get_imported_memory_copy_index() -> Self {
Self(8)
}
pub const fn get_memory_fill_index() -> Self {
Self(9)
}
pub const fn get_imported_memory_fill_index() -> Self {
Self(10)
}
pub const fn get_memory_init_index() -> Self {
Self(11)
}
pub const fn get_data_drop_index() -> Self {
Self(12)
}
pub const fn get_raise_trap_index() -> Self {
Self(13)
}
pub const fn builtin_functions_total_number() -> u32 {
14
}
pub const fn index(self) -> u32 {
self.0
}
}
#[repr(C)]
pub struct VMBuiltinFunctionsArray {
ptrs: [usize; Self::len()],
}
impl VMBuiltinFunctionsArray {
pub const fn len() -> usize {
VMBuiltinFunctionIndex::builtin_functions_total_number() as usize
}
pub fn initialized() -> Self {
use crate::libcalls::*;
let mut ptrs = [0; Self::len()];
ptrs[VMBuiltinFunctionIndex::get_memory32_grow_index().index() as usize] =
wasmer_memory32_grow as usize;
ptrs[VMBuiltinFunctionIndex::get_imported_memory32_grow_index().index() as usize] =
wasmer_imported_memory32_grow as usize;
ptrs[VMBuiltinFunctionIndex::get_memory32_size_index().index() as usize] =
wasmer_memory32_size as usize;
ptrs[VMBuiltinFunctionIndex::get_imported_memory32_size_index().index() as usize] =
wasmer_imported_memory32_size as usize;
ptrs[VMBuiltinFunctionIndex::get_table_copy_index().index() as usize] =
wasmer_table_copy as usize;
ptrs[VMBuiltinFunctionIndex::get_table_init_index().index() as usize] =
wasmer_table_init as usize;
ptrs[VMBuiltinFunctionIndex::get_elem_drop_index().index() as usize] =
wasmer_elem_drop as usize;
ptrs[VMBuiltinFunctionIndex::get_local_memory_copy_index().index() as usize] =
wasmer_local_memory_copy as usize;
ptrs[VMBuiltinFunctionIndex::get_imported_memory_copy_index().index() as usize] =
wasmer_imported_memory_copy as usize;
ptrs[VMBuiltinFunctionIndex::get_memory_fill_index().index() as usize] =
wasmer_memory_fill as usize;
ptrs[VMBuiltinFunctionIndex::get_imported_memory_fill_index().index() as usize] =
wasmer_imported_memory_fill as usize;
ptrs[VMBuiltinFunctionIndex::get_memory_init_index().index() as usize] =
wasmer_memory_init as usize;
ptrs[VMBuiltinFunctionIndex::get_data_drop_index().index() as usize] =
wasmer_data_drop as usize;
ptrs[VMBuiltinFunctionIndex::get_raise_trap_index().index() as usize] =
wasmer_raise_trap as usize;
debug_assert!(ptrs.iter().cloned().all(|p| p != 0));
Self { ptrs }
}
}
#[derive(Debug)]
#[repr(C, align(16))]
pub struct VMContext {}
impl VMContext {
#[allow(clippy::cast_ptr_alignment)]
#[inline]
pub(crate) unsafe fn instance(&self) -> &Instance {
&*((self as *const Self as *mut u8).offset(-Instance::vmctx_offset()) as *const Instance)
}
#[inline]
pub unsafe fn host_state(&self) -> &dyn Any {
self.instance().host_state()
}
}
pub type VMTrampoline = unsafe extern "C" fn(
*mut VMContext,
*const VMFunctionBody,
*mut u128,
);