wasmtime_runtime/
gc.rs

1use std::{any::Any, num::NonZeroUsize};
2
3#[cfg(feature = "gc")]
4mod enabled;
5#[cfg(feature = "gc")]
6pub use enabled::*;
7
8#[cfg(not(feature = "gc"))]
9mod disabled;
10#[cfg(not(feature = "gc"))]
11pub use disabled::*;
12
13mod gc_ref;
14mod gc_runtime;
15mod host_data;
16mod i31;
17
18pub use gc_ref::*;
19pub use gc_runtime::*;
20pub use host_data::*;
21pub use i31::*;
22
23use crate::GcHeapAllocationIndex;
24use anyhow::{bail, Result};
25use wasmtime_environ::StackMap;
26
27/// Used by the runtime to lookup information about a module given a
28/// program counter value.
29pub trait ModuleInfoLookup {
30    /// Lookup the module information from a program counter value.
31    fn lookup(&self, pc: usize) -> Option<&dyn ModuleInfo>;
32}
33
34/// Used by the runtime to query module information.
35pub trait ModuleInfo {
36    /// Lookup the stack map at a program counter value.
37    fn lookup_stack_map(&self, pc: usize) -> Option<&StackMap>;
38}
39
40/// GC-related data that is one-to-one with a `wasmtime::Store`.
41///
42/// Contains everything we need to do collections, invoke barriers, etc...
43///
44/// In general, exposes a very similar interface to `GcHeap`, but fills in some
45/// of the context arguments for callers (such as the `ExternRefHostDataTable`)
46/// since they are all stored together inside `GcStore`.
47pub struct GcStore {
48    /// This GC heap's allocation index (primarily used for integrating with the
49    /// pooling allocator).
50    pub allocation_index: GcHeapAllocationIndex,
51
52    /// The actual GC heap.
53    pub gc_heap: Box<dyn GcHeap>,
54
55    /// The `externref` host data table for this GC heap.
56    pub host_data_table: ExternRefHostDataTable,
57}
58
59impl GcStore {
60    /// Create a new `GcStore`.
61    pub fn new(allocation_index: GcHeapAllocationIndex, gc_heap: Box<dyn GcHeap>) -> Self {
62        let host_data_table = ExternRefHostDataTable::default();
63        Self {
64            allocation_index,
65            gc_heap,
66            host_data_table,
67        }
68    }
69
70    /// Perform garbage collection within this heap.
71    pub fn gc(&mut self, roots: GcRootsIter<'_>) {
72        let mut collection = self.gc_heap.gc(roots, &mut self.host_data_table);
73        collection.collect();
74    }
75
76    /// Asynchronously perform garbage collection within this heap.
77    #[cfg(feature = "async")]
78    pub async fn gc_async(&mut self, roots: GcRootsIter<'_>) {
79        let collection = self.gc_heap.gc(roots, &mut self.host_data_table);
80        collect_async(collection).await;
81    }
82
83    /// Clone a GC reference, calling GC write barriers as necessary.
84    pub fn clone_gc_ref(&mut self, gc_ref: &VMGcRef) -> VMGcRef {
85        if gc_ref.is_i31() {
86            gc_ref.unchecked_copy()
87        } else {
88            self.gc_heap.clone_gc_ref(gc_ref)
89        }
90    }
91
92    /// Write the `source` GC reference into the `destination` slot, performing
93    /// write barriers as necessary.
94    pub fn write_gc_ref(&mut self, destination: &mut Option<VMGcRef>, source: Option<&VMGcRef>) {
95        // If neither the source nor destination actually point to a GC object
96        // (that is, they are both either null or `i31ref`s) then we can skip
97        // the GC barrier.
98        if destination.as_ref().map_or(true, |d| d.is_i31())
99            && source.as_ref().map_or(true, |s| s.is_i31())
100        {
101            *destination = source.map(|s| s.unchecked_copy());
102            return;
103        }
104
105        self.gc_heap
106            .write_gc_ref(&mut self.host_data_table, destination, source);
107    }
108
109    /// Drop the given GC reference, performing drop barriers as necessary.
110    pub fn drop_gc_ref(&mut self, gc_ref: VMGcRef) {
111        if !gc_ref.is_i31() {
112            self.gc_heap.drop_gc_ref(&mut self.host_data_table, gc_ref);
113        }
114    }
115
116    /// Hook to call whenever a GC reference is about to be exposed to Wasm.
117    pub fn expose_gc_ref_to_wasm(&mut self, gc_ref: VMGcRef) {
118        if !gc_ref.is_i31() {
119            self.gc_heap.expose_gc_ref_to_wasm(gc_ref);
120        }
121    }
122
123    /// Allocate a new `externref`.
124    ///
125    /// Returns:
126    ///
127    /// * `Ok(Ok(_))`: Successfully allocated the `externref`.
128    ///
129    /// * `Ok(Err(value))`: Failed to allocate the `externref`, but doing a GC
130    ///   and then trying again may succeed. Returns the given `value` as the
131    ///   error payload.
132    ///
133    /// * `Err(_)`: Unrecoverable allocation failure.
134    pub fn alloc_externref(
135        &mut self,
136        value: Box<dyn Any + Send + Sync>,
137    ) -> Result<Result<VMExternRef, Box<dyn Any + Send + Sync>>> {
138        let host_data_id = self.host_data_table.alloc(value);
139        match self.gc_heap.alloc_externref(host_data_id)? {
140            Some(x) => Ok(Ok(x)),
141            None => Ok(Err(self.host_data_table.dealloc(host_data_id))),
142        }
143    }
144
145    /// Get a shared borrow of the given `externref`'s host data.
146    ///
147    /// Passing invalid `VMExternRef`s (eg garbage values or `externref`s
148    /// associated with a different heap is memory safe but will lead to general
149    /// incorrectness such as panics and wrong results.
150    pub fn externref_host_data(&self, externref: &VMExternRef) -> &(dyn Any + Send + Sync) {
151        let host_data_id = self.gc_heap.externref_host_data(externref);
152        self.host_data_table.get(host_data_id)
153    }
154
155    /// Get a mutable borrow of the given `externref`'s host data.
156    ///
157    /// Passing invalid `VMExternRef`s (eg garbage values or `externref`s
158    /// associated with a different heap is memory safe but will lead to general
159    /// incorrectness such as panics and wrong results.
160    pub fn externref_host_data_mut(
161        &mut self,
162        externref: &VMExternRef,
163    ) -> &mut (dyn Any + Send + Sync) {
164        let host_data_id = self.gc_heap.externref_host_data(externref);
165        self.host_data_table.get_mut(host_data_id)
166    }
167}
168
169/// Get a no-op GC heap for when GC is disabled (either statically at compile
170/// time or dynamically due to it being turned off in the `wasmtime::Config`).
171pub fn disabled_gc_heap() -> Box<dyn GcHeap> {
172    return Box::new(DisabledGcHeap);
173
174    struct DisabledGcHeap;
175
176    unsafe impl GcHeap for DisabledGcHeap {
177        fn as_any(&self) -> &dyn Any {
178            self
179        }
180        fn as_any_mut(&mut self) -> &mut dyn Any {
181            self
182        }
183        fn enter_no_gc_scope(&mut self) {}
184        fn exit_no_gc_scope(&mut self) {}
185        fn header(&self, _gc_ref: &VMGcRef) -> &VMGcHeader {
186            unreachable!()
187        }
188        fn clone_gc_ref(&mut self, _gc_ref: &VMGcRef) -> VMGcRef {
189            unreachable!()
190        }
191        fn write_gc_ref(
192            &mut self,
193            _host_data_table: &mut ExternRefHostDataTable,
194            _destination: &mut Option<VMGcRef>,
195            _source: Option<&VMGcRef>,
196        ) {
197            unreachable!()
198        }
199        fn expose_gc_ref_to_wasm(&mut self, _gc_ref: VMGcRef) {
200            unreachable!()
201        }
202        fn need_gc_before_entering_wasm(&self, _num_gc_refs: NonZeroUsize) -> bool {
203            unreachable!()
204        }
205        fn alloc_externref(
206            &mut self,
207            _host_data: ExternRefHostDataId,
208        ) -> Result<Option<VMExternRef>> {
209            bail!(
210                "GC support disabled either in the `Config` or at compile time \
211                 because the `gc` cargo feature was not enabled"
212            )
213        }
214        fn externref_host_data(&self, _externref: &VMExternRef) -> ExternRefHostDataId {
215            unreachable!()
216        }
217        fn gc<'a>(
218            &'a mut self,
219            _roots: GcRootsIter<'a>,
220            _host_data_table: &'a mut ExternRefHostDataTable,
221        ) -> Box<dyn GarbageCollection<'a> + 'a> {
222            return Box::new(NoGc);
223
224            struct NoGc;
225
226            impl<'a> GarbageCollection<'a> for NoGc {
227                fn collect_increment(&mut self) -> GcProgress {
228                    GcProgress::Complete
229                }
230            }
231        }
232        unsafe fn vmctx_gc_heap_base(&self) -> *mut u8 {
233            std::ptr::null_mut()
234        }
235        unsafe fn vmctx_gc_heap_bound(&self) -> usize {
236            0
237        }
238        unsafe fn vmctx_gc_heap_data(&self) -> *mut u8 {
239            std::ptr::null_mut()
240        }
241        #[cfg(feature = "pooling-allocator")]
242        fn reset(&mut self) {}
243    }
244}