#![deny(broken_intra_doc_links)]
use crate::{
FunctionIndex, GlobalIndex, LocalGlobalIndex, LocalMemoryIndex, LocalTableIndex, MemoryIndex,
ModuleInfo, SignatureIndex, TableIndex,
};
use more_asserts::assert_lt;
use std::convert::TryFrom;
#[derive(Copy, Clone, Debug)]
pub struct VMBuiltinFunctionIndex(u32);
impl VMBuiltinFunctionIndex {
pub const fn get_memory32_grow_index() -> Self {
Self(0)
}
pub const fn get_imported_memory32_grow_index() -> Self {
Self(1)
}
pub const fn get_memory32_size_index() -> Self {
Self(2)
}
pub const fn get_imported_memory32_size_index() -> Self {
Self(3)
}
pub const fn get_table_copy_index() -> Self {
Self(4)
}
pub const fn get_table_init_index() -> Self {
Self(5)
}
pub const fn get_elem_drop_index() -> Self {
Self(6)
}
pub const fn get_memory_copy_index() -> Self {
Self(7)
}
pub const fn get_imported_memory_copy_index() -> Self {
Self(8)
}
pub const fn get_memory_fill_index() -> Self {
Self(9)
}
pub const fn get_imported_memory_fill_index() -> Self {
Self(10)
}
pub const fn get_memory_init_index() -> Self {
Self(11)
}
pub const fn get_data_drop_index() -> Self {
Self(12)
}
pub const fn get_raise_trap_index() -> Self {
Self(13)
}
pub const fn get_table_size_index() -> Self {
Self(14)
}
pub const fn get_imported_table_size_index() -> Self {
Self(15)
}
pub const fn get_table_grow_index() -> Self {
Self(16)
}
pub const fn get_imported_table_grow_index() -> Self {
Self(17)
}
pub const fn get_table_get_index() -> Self {
Self(18)
}
pub const fn get_imported_table_get_index() -> Self {
Self(19)
}
pub const fn get_table_set_index() -> Self {
Self(20)
}
pub const fn get_imported_table_set_index() -> Self {
Self(21)
}
pub const fn get_func_ref_index() -> Self {
Self(22)
}
pub const fn get_table_fill_index() -> Self {
Self(23)
}
pub const fn get_memory_atomic_wait32_index() -> Self {
Self(24)
}
pub const fn get_imported_memory_atomic_wait32_index() -> Self {
Self(25)
}
pub const fn get_memory_atomic_wait64_index() -> Self {
Self(26)
}
pub const fn get_imported_memory_atomic_wait64_index() -> Self {
Self(27)
}
pub const fn get_memory_atomic_notify_index() -> Self {
Self(28)
}
pub const fn get_imported_memory_atomic_notify_index() -> Self {
Self(29)
}
pub const fn builtin_functions_total_number() -> u32 {
30
}
pub const fn index(self) -> u32 {
self.0
}
}
#[cfg(target_pointer_width = "32")]
fn cast_to_u32(sz: usize) -> u32 {
u32::try_from(sz).unwrap()
}
#[cfg(target_pointer_width = "64")]
fn cast_to_u32(sz: usize) -> u32 {
u32::try_from(sz).expect("overflow in cast from usize to u32")
}
#[inline]
const fn align(offset: u32, width: u32) -> u32 {
(offset + (width - 1)) / width * width
}
#[derive(Clone, Debug)]
pub struct VMOffsets {
pointer_size: u8,
num_signature_ids: u32,
num_imported_functions: u32,
num_imported_tables: u32,
num_imported_memories: u32,
num_imported_globals: u32,
num_local_tables: u32,
num_local_memories: u32,
num_local_globals: u32,
vmctx_signature_ids_begin: u32,
vmctx_imported_functions_begin: u32,
vmctx_imported_tables_begin: u32,
vmctx_imported_memories_begin: u32,
vmctx_imported_globals_begin: u32,
vmctx_tables_begin: u32,
vmctx_memories_begin: u32,
vmctx_globals_begin: u32,
vmctx_builtin_functions_begin: u32,
vmctx_trap_handler_begin: u32,
vmctx_gas_limiter_pointer: u32,
vmctx_stack_limit_begin: u32,
vmctx_stack_limit_initial_begin: u32,
size_of_vmctx: u32,
}
impl VMOffsets {
pub fn new(pointer_size: u8, module: &ModuleInfo) -> Self {
let mut ret = Self {
pointer_size,
num_signature_ids: cast_to_u32(module.signatures.len()),
num_imported_functions: cast_to_u32(module.num_imported_functions),
num_imported_tables: cast_to_u32(module.num_imported_tables),
num_imported_memories: cast_to_u32(module.num_imported_memories),
num_imported_globals: cast_to_u32(module.num_imported_globals),
num_local_tables: cast_to_u32(module.tables.len()),
num_local_memories: cast_to_u32(module.memories.len()),
num_local_globals: cast_to_u32(module.globals.len()),
vmctx_signature_ids_begin: 0,
vmctx_imported_functions_begin: 0,
vmctx_imported_tables_begin: 0,
vmctx_imported_memories_begin: 0,
vmctx_imported_globals_begin: 0,
vmctx_tables_begin: 0,
vmctx_memories_begin: 0,
vmctx_globals_begin: 0,
vmctx_builtin_functions_begin: 0,
vmctx_trap_handler_begin: 0,
vmctx_gas_limiter_pointer: 0,
vmctx_stack_limit_begin: 0,
vmctx_stack_limit_initial_begin: 0,
size_of_vmctx: 0,
};
ret.precompute();
ret
}
pub fn new_for_trampolines(pointer_size: u8) -> Self {
Self {
pointer_size,
num_signature_ids: 0,
num_imported_functions: 0,
num_imported_tables: 0,
num_imported_memories: 0,
num_imported_globals: 0,
num_local_tables: 0,
num_local_memories: 0,
num_local_globals: 0,
vmctx_signature_ids_begin: 0,
vmctx_imported_functions_begin: 0,
vmctx_imported_tables_begin: 0,
vmctx_imported_memories_begin: 0,
vmctx_imported_globals_begin: 0,
vmctx_tables_begin: 0,
vmctx_memories_begin: 0,
vmctx_globals_begin: 0,
vmctx_builtin_functions_begin: 0,
vmctx_trap_handler_begin: 0,
vmctx_gas_limiter_pointer: 0,
vmctx_stack_limit_begin: 0,
vmctx_stack_limit_initial_begin: 0,
size_of_vmctx: 0,
}
}
pub fn num_local_tables(&self) -> u32 {
self.num_local_tables
}
pub fn num_local_memories(&self) -> u32 {
self.num_local_memories
}
fn precompute(&mut self) {
fn offset_by(base: u32, num_items: u32, item_size: u32) -> u32 {
base.checked_add(num_items.checked_mul(item_size).unwrap())
.unwrap()
}
self.vmctx_signature_ids_begin = 0;
self.vmctx_imported_functions_begin = offset_by(
self.vmctx_signature_ids_begin,
self.num_signature_ids,
u32::from(self.size_of_vmshared_signature_index()),
);
self.vmctx_imported_tables_begin = offset_by(
self.vmctx_imported_functions_begin,
self.num_imported_functions,
u32::from(self.size_of_vmfunction_import()),
);
self.vmctx_imported_memories_begin = offset_by(
self.vmctx_imported_tables_begin,
self.num_imported_tables,
u32::from(self.size_of_vmtable_import()),
);
self.vmctx_imported_globals_begin = offset_by(
self.vmctx_imported_memories_begin,
self.num_imported_memories,
u32::from(self.size_of_vmmemory_import()),
);
self.vmctx_tables_begin = offset_by(
self.vmctx_imported_globals_begin,
self.num_imported_globals,
u32::from(self.size_of_vmglobal_import()),
);
self.vmctx_memories_begin = offset_by(
self.vmctx_tables_begin,
self.num_local_tables,
u32::from(self.size_of_vmtable_definition()),
);
self.vmctx_globals_begin = align(
offset_by(
self.vmctx_memories_begin,
self.num_local_memories,
u32::from(self.size_of_vmmemory_definition()),
),
16,
);
self.vmctx_builtin_functions_begin = offset_by(
self.vmctx_globals_begin,
self.num_local_globals,
u32::from(self.size_of_vmglobal_local()),
);
self.vmctx_trap_handler_begin = offset_by(
self.vmctx_builtin_functions_begin,
VMBuiltinFunctionIndex::builtin_functions_total_number(),
u32::from(self.pointer_size),
);
self.vmctx_gas_limiter_pointer = offset_by(
self.vmctx_trap_handler_begin,
1,
u32::from(self.pointer_size),
);
self.vmctx_stack_limit_begin = offset_by(
self.vmctx_gas_limiter_pointer,
1,
u32::from(self.pointer_size),
);
self.vmctx_stack_limit_initial_begin = self.vmctx_stack_limit_begin.checked_add(4).unwrap();
self.size_of_vmctx = self.vmctx_stack_limit_begin.checked_add(4).unwrap();
}
}
impl VMOffsets {
#[allow(clippy::erasing_op)]
pub const fn vmfunction_import_body(&self) -> u8 {
0 * self.pointer_size
}
#[allow(clippy::identity_op)]
pub const fn vmfunction_import_vmctx(&self) -> u8 {
1 * self.pointer_size
}
pub const fn vmfunction_import_handle(&self) -> u8 {
2 * self.pointer_size
}
pub const fn size_of_vmfunction_import(&self) -> u8 {
3 * self.pointer_size
}
}
impl VMOffsets {
#[allow(clippy::erasing_op)]
pub const fn vmdynamicfunction_import_context_address(&self) -> u8 {
0 * self.pointer_size
}
#[allow(clippy::identity_op)]
pub const fn vmdynamicfunction_import_context_ctx(&self) -> u8 {
1 * self.pointer_size
}
pub const fn size_of_vmdynamicfunction_import_context(&self) -> u8 {
2 * self.pointer_size
}
}
impl VMOffsets {
#[allow(clippy::identity_op)]
pub const fn size_of_vmfunction_body_ptr(&self) -> u8 {
1 * self.pointer_size
}
}
impl VMOffsets {
#[allow(clippy::erasing_op)]
pub const fn vmtable_import_definition(&self) -> u8 {
0 * self.pointer_size
}
#[allow(clippy::identity_op)]
pub const fn vmtable_import_handle(&self) -> u8 {
1 * self.pointer_size
}
pub const fn size_of_vmtable_import(&self) -> u8 {
2 * self.pointer_size
}
}
impl VMOffsets {
#[allow(clippy::erasing_op)]
pub const fn vmtable_definition_base(&self) -> u8 {
0 * self.pointer_size
}
#[allow(clippy::identity_op)]
pub const fn vmtable_definition_current_elements(&self) -> u8 {
1 * self.pointer_size
}
pub const fn size_of_vmtable_definition_current_elements(&self) -> u8 {
4
}
pub const fn size_of_vmtable_definition(&self) -> u8 {
2 * self.pointer_size
}
}
impl VMOffsets {
#[allow(clippy::erasing_op)]
pub const fn vmmemory_import_definition(&self) -> u8 {
0 * self.pointer_size
}
#[allow(clippy::identity_op)]
pub const fn vmmemory_import_handle(&self) -> u8 {
1 * self.pointer_size
}
pub const fn size_of_vmmemory_import(&self) -> u8 {
2 * self.pointer_size
}
}
impl VMOffsets {
#[allow(clippy::erasing_op)]
pub const fn vmmemory_definition_base(&self) -> u8 {
0 * self.pointer_size
}
#[allow(clippy::identity_op)]
pub const fn vmmemory_definition_current_length(&self) -> u8 {
1 * self.pointer_size
}
pub const fn size_of_vmmemory_definition_current_length(&self) -> u8 {
4
}
pub const fn size_of_vmmemory_definition(&self) -> u8 {
2 * self.pointer_size
}
}
impl VMOffsets {
#[allow(clippy::erasing_op)]
pub const fn vmglobal_import_definition(&self) -> u8 {
0 * self.pointer_size
}
#[allow(clippy::identity_op)]
pub const fn vmglobal_import_handle(&self) -> u8 {
1 * self.pointer_size
}
#[allow(clippy::identity_op)]
pub const fn size_of_vmglobal_import(&self) -> u8 {
2 * self.pointer_size
}
}
impl VMOffsets {
pub const fn size_of_vmglobal_local(&self) -> u8 {
self.pointer_size
}
}
impl VMOffsets {
pub const fn size_of_vmshared_signature_index(&self) -> u8 {
4
}
}
impl VMOffsets {
#[allow(clippy::erasing_op)]
pub const fn vmcaller_checked_anyfunc_func_ptr(&self) -> u8 {
0 * self.pointer_size
}
#[allow(clippy::identity_op)]
pub const fn vmcaller_checked_anyfunc_type_index(&self) -> u8 {
1 * self.pointer_size
}
pub const fn vmcaller_checked_anyfunc_vmctx(&self) -> u8 {
2 * self.pointer_size
}
pub const fn vmcaller_checked_anyfunc_call_trampoline(&self) -> u8 {
3 * self.pointer_size
}
pub const fn size_of_vmcaller_checked_anyfunc(&self) -> u8 {
4 * self.pointer_size
}
}
impl VMOffsets {
#[allow(clippy::erasing_op)]
pub const fn vm_funcref_anyfunc_ptr(&self) -> u8 {
0 * self.pointer_size
}
#[allow(clippy::identity_op)]
pub const fn size_of_vm_funcref(&self) -> u8 {
1 * self.pointer_size
}
}
impl VMOffsets {
pub fn vmctx_signature_ids_begin(&self) -> u32 {
self.vmctx_signature_ids_begin
}
#[allow(clippy::erasing_op)]
pub fn vmctx_imported_functions_begin(&self) -> u32 {
self.vmctx_imported_functions_begin
}
#[allow(clippy::identity_op)]
pub fn vmctx_imported_tables_begin(&self) -> u32 {
self.vmctx_imported_tables_begin
}
pub fn vmctx_imported_memories_begin(&self) -> u32 {
self.vmctx_imported_memories_begin
}
pub fn vmctx_imported_globals_begin(&self) -> u32 {
self.vmctx_imported_globals_begin
}
pub fn vmctx_tables_begin(&self) -> u32 {
self.vmctx_tables_begin
}
pub fn vmctx_memories_begin(&self) -> u32 {
self.vmctx_memories_begin
}
pub fn vmctx_globals_begin(&self) -> u32 {
self.vmctx_globals_begin
}
pub fn vmctx_builtin_functions_begin(&self) -> u32 {
self.vmctx_builtin_functions_begin
}
pub fn size_of_vmctx(&self) -> u32 {
self.size_of_vmctx
}
pub fn vmctx_vmshared_signature_id(&self, index: SignatureIndex) -> u32 {
assert_lt!(index.as_u32(), self.num_signature_ids);
self.vmctx_signature_ids_begin
+ index.as_u32() * u32::from(self.size_of_vmshared_signature_index())
}
pub fn vmctx_vmfunction_import(&self, index: FunctionIndex) -> u32 {
assert_lt!(index.as_u32(), self.num_imported_functions);
self.vmctx_imported_functions_begin
+ index.as_u32() * u32::from(self.size_of_vmfunction_import())
}
pub fn vmctx_vmtable_import(&self, index: TableIndex) -> u32 {
assert_lt!(index.as_u32(), self.num_imported_tables);
self.vmctx_imported_tables_begin + index.as_u32() * u32::from(self.size_of_vmtable_import())
}
pub fn vmctx_vmmemory_import(&self, index: MemoryIndex) -> u32 {
assert_lt!(index.as_u32(), self.num_imported_memories);
self.vmctx_imported_memories_begin
+ index.as_u32() * u32::from(self.size_of_vmmemory_import())
}
pub fn vmctx_vmglobal_import(&self, index: GlobalIndex) -> u32 {
assert_lt!(index.as_u32(), self.num_imported_globals);
self.vmctx_imported_globals_begin
+ index.as_u32() * u32::from(self.size_of_vmglobal_import())
}
pub fn vmctx_vmtable_definition(&self, index: LocalTableIndex) -> u32 {
assert_lt!(index.as_u32(), self.num_local_tables);
self.vmctx_tables_begin + index.as_u32() * u32::from(self.size_of_vmtable_definition())
}
pub fn vmctx_vmmemory_definition(&self, index: LocalMemoryIndex) -> u32 {
assert_lt!(index.as_u32(), self.num_local_memories);
self.vmctx_memories_begin + index.as_u32() * u32::from(self.size_of_vmmemory_definition())
}
pub fn vmctx_vmglobal_definition(&self, index: LocalGlobalIndex) -> u32 {
assert_lt!(index.as_u32(), self.num_local_globals);
self.vmctx_globals_begin + index.as_u32() * u32::from(self.size_of_vmglobal_local())
}
pub fn vmctx_vmfunction_import_body(&self, index: FunctionIndex) -> u32 {
self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_body())
}
pub fn vmctx_vmfunction_import_vmctx(&self, index: FunctionIndex) -> u32 {
self.vmctx_vmfunction_import(index) + u32::from(self.vmfunction_import_vmctx())
}
pub fn vmctx_vmtable_import_definition(&self, index: TableIndex) -> u32 {
self.vmctx_vmtable_import(index) + u32::from(self.vmtable_import_definition())
}
pub fn vmctx_vmtable_definition_base(&self, index: LocalTableIndex) -> u32 {
self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_base())
}
pub fn vmctx_vmtable_definition_current_elements(&self, index: LocalTableIndex) -> u32 {
self.vmctx_vmtable_definition(index) + u32::from(self.vmtable_definition_current_elements())
}
pub fn vmctx_vmmemory_import_definition(&self, index: MemoryIndex) -> u32 {
self.vmctx_vmmemory_import(index) + u32::from(self.vmmemory_import_definition())
}
pub fn vmctx_vmmemory_import_handle(&self, index: MemoryIndex) -> u32 {
self.vmctx_vmmemory_import(index) + u32::from(self.vmmemory_import_handle())
}
pub fn vmctx_vmmemory_definition_base(&self, index: LocalMemoryIndex) -> u32 {
self.vmctx_vmmemory_definition(index) + u32::from(self.vmmemory_definition_base())
}
pub fn vmctx_vmmemory_definition_current_length(&self, index: LocalMemoryIndex) -> u32 {
self.vmctx_vmmemory_definition(index) + u32::from(self.vmmemory_definition_current_length())
}
pub fn vmctx_vmglobal_import_definition(&self, index: GlobalIndex) -> u32 {
self.vmctx_vmglobal_import(index) + u32::from(self.vmglobal_import_definition())
}
pub fn vmctx_builtin_function(&self, index: VMBuiltinFunctionIndex) -> u32 {
self.vmctx_builtin_functions_begin + index.index() * u32::from(self.pointer_size)
}
}
#[derive(Debug, Copy, Clone)]
pub struct TargetSharedSignatureIndex(u32);
impl TargetSharedSignatureIndex {
pub const fn new(value: u32) -> Self {
Self(value)
}
pub const fn index(self) -> u32 {
self.0
}
}
#[cfg(test)]
mod tests {
use crate::vmoffsets::align;
#[test]
fn alignment() {
fn is_aligned(x: u32) -> bool {
x % 16 == 0
}
assert!(is_aligned(align(0, 16)));
assert!(is_aligned(align(32, 16)));
assert!(is_aligned(align(33, 16)));
assert!(is_aligned(align(31, 16)));
}
}