wasmer_engine_universal/
code_memory.rsuse crate::unwind::UnwindRegistry;
use loupe::MemoryUsage;
use wasmer_compiler::{CompiledFunctionUnwindInfo, CustomSection, FunctionBody};
use wasmer_vm::{Mmap, VMFunctionBody};
const ARCH_FUNCTION_ALIGNMENT: usize = 16;
const DATA_SECTION_ALIGNMENT: usize = 64;
#[derive(MemoryUsage)]
pub struct CodeMemory {
unwind_registry: UnwindRegistry,
mmap: Mmap,
start_of_nonexecutable_pages: usize,
}
impl CodeMemory {
pub fn new() -> Self {
Self {
unwind_registry: UnwindRegistry::new(),
mmap: Mmap::new(),
start_of_nonexecutable_pages: 0,
}
}
pub fn unwind_registry_mut(&mut self) -> &mut UnwindRegistry {
&mut self.unwind_registry
}
pub fn allocate(
&mut self,
functions: &[&FunctionBody],
executable_sections: &[&CustomSection],
data_sections: &[&CustomSection],
) -> Result<(Vec<&mut [VMFunctionBody]>, Vec<&mut [u8]>, Vec<&mut [u8]>), String> {
let mut function_result = vec![];
let mut data_section_result = vec![];
let mut executable_section_result = vec![];
let page_size = region::page::size();
let total_len = round_up(
functions.iter().fold(0, |acc, func| {
round_up(
acc + Self::function_allocation_size(func),
ARCH_FUNCTION_ALIGNMENT,
)
}) + executable_sections.iter().fold(0, |acc, exec| {
round_up(acc + exec.bytes.len(), ARCH_FUNCTION_ALIGNMENT)
}),
page_size,
) + data_sections.iter().fold(0, |acc, data| {
round_up(acc + data.bytes.len(), DATA_SECTION_ALIGNMENT)
});
self.mmap = Mmap::with_at_least(total_len)?;
let mut bytes = 0;
let mut buf = self.mmap.as_mut_slice();
for func in functions {
let len = round_up(
Self::function_allocation_size(func),
ARCH_FUNCTION_ALIGNMENT,
);
let (func_buf, next_buf) = buf.split_at_mut(len);
buf = next_buf;
bytes += len;
let vmfunc = Self::copy_function(&mut self.unwind_registry, func, func_buf);
assert_eq!(vmfunc.as_ptr() as usize % ARCH_FUNCTION_ALIGNMENT, 0);
function_result.push(vmfunc);
}
for section in executable_sections {
let section = §ion.bytes;
assert_eq!(buf.as_mut_ptr() as usize % ARCH_FUNCTION_ALIGNMENT, 0);
let len = round_up(section.len(), ARCH_FUNCTION_ALIGNMENT);
let (s, next_buf) = buf.split_at_mut(len);
buf = next_buf;
bytes += len;
s[..section.len()].copy_from_slice(section.as_slice());
executable_section_result.push(s);
}
self.start_of_nonexecutable_pages = bytes;
if !data_sections.is_empty() {
let padding = round_up(bytes, page_size) - bytes;
buf = buf.split_at_mut(padding).1;
for section in data_sections {
let section = §ion.bytes;
assert_eq!(buf.as_mut_ptr() as usize % DATA_SECTION_ALIGNMENT, 0);
let len = round_up(section.len(), DATA_SECTION_ALIGNMENT);
let (s, next_buf) = buf.split_at_mut(len);
buf = next_buf;
s[..section.len()].copy_from_slice(section.as_slice());
data_section_result.push(s);
}
}
Ok((
function_result,
executable_section_result,
data_section_result,
))
}
pub fn publish(&mut self) {
if self.mmap.is_empty() || self.start_of_nonexecutable_pages == 0 {
return;
}
assert!(self.mmap.len() >= self.start_of_nonexecutable_pages);
unsafe {
region::protect(
self.mmap.as_mut_ptr(),
self.start_of_nonexecutable_pages,
region::Protection::READ_EXECUTE,
)
}
.expect("unable to make memory readonly and executable");
}
fn function_allocation_size(func: &FunctionBody) -> usize {
match &func.unwind_info {
Some(CompiledFunctionUnwindInfo::WindowsX64(info)) => {
((func.body.len() + 3) & !3) + info.len()
}
_ => func.body.len(),
}
}
fn copy_function<'a>(
registry: &mut UnwindRegistry,
func: &FunctionBody,
buf: &'a mut [u8],
) -> &'a mut [VMFunctionBody] {
assert_eq!(buf.as_ptr() as usize % ARCH_FUNCTION_ALIGNMENT, 0);
let func_len = func.body.len();
let (body, remainder) = buf.split_at_mut(func_len);
body.copy_from_slice(&func.body);
let vmfunc = Self::view_as_mut_vmfunc_slice(body);
if let Some(CompiledFunctionUnwindInfo::WindowsX64(info)) = &func.unwind_info {
let unwind_start = (func_len + 3) & !3;
let unwind_size = info.len();
let padding = unwind_start - func_len;
assert_eq!((func_len + padding) % 4, 0);
let slice = remainder.split_at_mut(padding + unwind_size).0;
slice[padding..].copy_from_slice(&info);
}
if let Some(info) = &func.unwind_info {
registry
.register(vmfunc.as_ptr() as usize, 0, func_len as u32, info)
.expect("failed to register unwind information");
}
vmfunc
}
fn view_as_mut_vmfunc_slice(slice: &mut [u8]) -> &mut [VMFunctionBody] {
let byte_ptr: *mut [u8] = slice;
let body_ptr = byte_ptr as *mut [VMFunctionBody];
unsafe { &mut *body_ptr }
}
}
fn round_up(size: usize, multiple: usize) -> usize {
debug_assert!(multiple.is_power_of_two());
(size + (multiple - 1)) & !(multiple - 1)
}
#[cfg(test)]
mod tests {
use super::CodeMemory;
fn _assert() {
fn _assert_send_sync<T: Send + Sync>() {}
_assert_send_sync::<CodeMemory>();
}
}