wasmer_compiler/engine/
code_memory.rs

1// This file contains code from external sources.
2// Attributions: https://github.com/wasmerio/wasmer/blob/main/docs/ATTRIBUTIONS.md
3
4//! Memory management for executable code.
5use super::unwind::UnwindRegistry;
6use crate::{
7    types::{
8        function::FunctionBodyLike,
9        section::CustomSectionLike,
10        unwind::{CompiledFunctionUnwindInfoLike, CompiledFunctionUnwindInfoReference},
11    },
12    GlobalFrameInfoRegistration,
13};
14use wasmer_vm::{Mmap, VMFunctionBody};
15
16/// The optimal alignment for functions.
17///
18/// On x86-64, this is 16 since it's what the optimizations assume.
19/// When we add support for other architectures, we should also figure out their
20/// optimal alignment values.
21const ARCH_FUNCTION_ALIGNMENT: usize = 16;
22
23/// The optimal alignment for data.
24///
25const DATA_SECTION_ALIGNMENT: usize = 64;
26
27/// Memory manager for executable code.
28pub struct CodeMemory {
29    // frame info is placed first, to ensure it's dropped before the mmap
30    frame_info_registration: Option<GlobalFrameInfoRegistration>,
31    unwind_registry: UnwindRegistry,
32    mmap: Mmap,
33    start_of_nonexecutable_pages: usize,
34}
35
36impl CodeMemory {
37    /// Create a new `CodeMemory` instance.
38    pub fn new() -> Self {
39        Self {
40            unwind_registry: UnwindRegistry::new(),
41            mmap: Mmap::new(),
42            start_of_nonexecutable_pages: 0,
43            frame_info_registration: None,
44        }
45    }
46
47    /// Mutably get the UnwindRegistry.
48    pub fn unwind_registry_mut(&mut self) -> &mut UnwindRegistry {
49        &mut self.unwind_registry
50    }
51
52    /// Allocate a single contiguous block of memory at a fixed virtual address for the functions and custom sections, and copy the data in place.
53    #[allow(clippy::type_complexity)]
54    pub fn allocate<'module, 'memory, FunctionBody, CustomSection>(
55        &'memory mut self,
56        functions: &'memory [&'module FunctionBody],
57        executable_sections: &'memory [&'module CustomSection],
58        data_sections: &'memory [&'module CustomSection],
59    ) -> Result<
60        (
61            Vec<&'memory mut [VMFunctionBody]>,
62            Vec<&'memory mut [u8]>,
63            Vec<&'memory mut [u8]>,
64        ),
65        String,
66    >
67    where
68        FunctionBody: FunctionBodyLike<'module> + 'module,
69        CustomSection: CustomSectionLike<'module> + 'module,
70    {
71        let mut function_result = vec![];
72        let mut data_section_result = vec![];
73        let mut executable_section_result = vec![];
74
75        let page_size = region::page::size();
76
77        // 1. Calculate the total size, that is:
78        // - function body size, including all trampolines
79        // -- windows unwind info
80        // -- padding between functions
81        // - executable section body
82        // -- padding between executable sections
83        // - padding until a new page to change page permissions
84        // - data section body size
85        // -- padding between data sections
86
87        let total_len = round_up(
88            functions.iter().fold(0, |acc, func| {
89                round_up(
90                    acc + Self::function_allocation_size(*func),
91                    ARCH_FUNCTION_ALIGNMENT,
92                )
93            }) + executable_sections.iter().fold(0, |acc, exec| {
94                round_up(acc + exec.bytes().len(), ARCH_FUNCTION_ALIGNMENT)
95            }),
96            page_size,
97        ) + data_sections.iter().fold(0, |acc, data| {
98            round_up(acc + data.bytes().len(), DATA_SECTION_ALIGNMENT)
99        });
100
101        // 2. Allocate the pages. Mark them all read-write.
102
103        self.mmap = Mmap::with_at_least(total_len)?;
104
105        // 3. Determine where the pointers to each function, executable section
106        // or data section are. Copy the functions. Collect the addresses of each and return them.
107
108        let mut bytes = 0;
109        let mut buf = self.mmap.as_mut_slice();
110        for func in functions {
111            let len = round_up(
112                Self::function_allocation_size(*func),
113                ARCH_FUNCTION_ALIGNMENT,
114            );
115            let (func_buf, next_buf) = buf.split_at_mut(len);
116            buf = next_buf;
117            bytes += len;
118
119            let vmfunc = Self::copy_function(&mut self.unwind_registry, *func, func_buf);
120            assert_eq!(vmfunc.as_ptr() as usize % ARCH_FUNCTION_ALIGNMENT, 0);
121            function_result.push(vmfunc);
122        }
123        for section in executable_sections {
124            let section = section.bytes();
125            assert_eq!(buf.as_mut_ptr() as usize % ARCH_FUNCTION_ALIGNMENT, 0);
126            let len = round_up(section.len(), ARCH_FUNCTION_ALIGNMENT);
127            let (s, next_buf) = buf.split_at_mut(len);
128            buf = next_buf;
129            bytes += len;
130            s[..section.len()].copy_from_slice(section);
131            executable_section_result.push(s);
132        }
133
134        self.start_of_nonexecutable_pages = bytes;
135
136        if !data_sections.is_empty() {
137            // Data sections have different page permissions from the executable
138            // code that came before it, so they need to be on different pages.
139            let padding = round_up(bytes, page_size) - bytes;
140            buf = buf.split_at_mut(padding).1;
141
142            for section in data_sections {
143                let section = section.bytes();
144                assert_eq!(buf.as_mut_ptr() as usize % DATA_SECTION_ALIGNMENT, 0);
145                let len = round_up(section.len(), DATA_SECTION_ALIGNMENT);
146                let (s, next_buf) = buf.split_at_mut(len);
147                buf = next_buf;
148                s[..section.len()].copy_from_slice(section);
149                data_section_result.push(s);
150            }
151        }
152
153        Ok((
154            function_result,
155            executable_section_result,
156            data_section_result,
157        ))
158    }
159
160    /// Apply the page permissions.
161    pub fn publish(&mut self) {
162        if self.mmap.is_empty() || self.start_of_nonexecutable_pages == 0 {
163            return;
164        }
165        assert!(self.mmap.len() >= self.start_of_nonexecutable_pages);
166        unsafe {
167            region::protect(
168                self.mmap.as_mut_ptr(),
169                self.start_of_nonexecutable_pages,
170                region::Protection::READ_EXECUTE,
171            )
172        }
173        .expect("unable to make memory readonly and executable");
174    }
175
176    /// Calculates the allocation size of the given compiled function.
177    fn function_allocation_size<'a>(func: &'a impl FunctionBodyLike<'a>) -> usize {
178        match &func.unwind_info().map(|o| o.get()) {
179            Some(CompiledFunctionUnwindInfoReference::WindowsX64(info)) => {
180                // Windows unwind information is required to be emitted into code memory
181                // This is because it must be a positive relative offset from the start of the memory
182                // Account for necessary unwind information alignment padding (32-bit alignment)
183                ((func.body().len() + 3) & !3) + info.len()
184            }
185            _ => func.body().len(),
186        }
187    }
188
189    /// Copies the data of the compiled function to the given buffer.
190    ///
191    /// This will also add the function to the current function table.
192    fn copy_function<'module, 'memory>(
193        registry: &mut UnwindRegistry,
194        func: &'module impl FunctionBodyLike<'module>,
195        buf: &'memory mut [u8],
196    ) -> &'memory mut [VMFunctionBody] {
197        assert_eq!(buf.as_ptr() as usize % ARCH_FUNCTION_ALIGNMENT, 0);
198
199        let func_len = func.body().len();
200
201        let (body, remainder) = buf.split_at_mut(func_len);
202        body.copy_from_slice(func.body());
203        let vmfunc = Self::view_as_mut_vmfunc_slice(body);
204
205        let unwind_info = func.unwind_info().map(|o| o.get());
206        if let Some(CompiledFunctionUnwindInfoReference::WindowsX64(info)) = unwind_info {
207            // Windows unwind information is written following the function body
208            // Keep unwind information 32-bit aligned (round up to the nearest 4 byte boundary)
209            let unwind_start = (func_len + 3) & !3;
210            let unwind_size = info.len();
211            let padding = unwind_start - func_len;
212            assert_eq!((func_len + padding) % 4, 0);
213            let slice = remainder.split_at_mut(padding + unwind_size).0;
214            slice[padding..].copy_from_slice(info);
215        }
216
217        if let Some(ref info) = unwind_info {
218            registry
219                .register(vmfunc.as_ptr() as usize, 0, func_len as u32, info)
220                .expect("failed to register unwind information");
221        }
222
223        vmfunc
224    }
225
226    /// Convert mut a slice from u8 to VMFunctionBody.
227    fn view_as_mut_vmfunc_slice(slice: &mut [u8]) -> &mut [VMFunctionBody] {
228        let byte_ptr: *mut [u8] = slice;
229        let body_ptr = byte_ptr as *mut [VMFunctionBody];
230        unsafe { &mut *body_ptr }
231    }
232
233    /// Register the frame info, so it's free when the mememory gets freed
234    pub fn register_frame_info(&mut self, frame_info: GlobalFrameInfoRegistration) {
235        self.frame_info_registration = Some(frame_info);
236    }
237}
238
239fn round_up(size: usize, multiple: usize) -> usize {
240    debug_assert!(multiple.is_power_of_two());
241    (size + (multiple - 1)) & !(multiple - 1)
242}
243
244#[cfg(test)]
245mod tests {
246    use super::CodeMemory;
247    fn _assert() {
248        fn _assert_send_sync<T: Send + Sync>() {}
249        _assert_send_sync::<CodeMemory>();
250    }
251}