1use std::{any::Any, num::NonZeroUsize};
2
3#[cfg(feature = "gc")]
4mod enabled;
5#[cfg(feature = "gc")]
6pub use enabled::*;
7
8#[cfg(not(feature = "gc"))]
9mod disabled;
10#[cfg(not(feature = "gc"))]
11pub use disabled::*;
12
13mod gc_ref;
14mod gc_runtime;
15mod host_data;
16mod i31;
17
18pub use gc_ref::*;
19pub use gc_runtime::*;
20pub use host_data::*;
21pub use i31::*;
22
23use crate::GcHeapAllocationIndex;
24use anyhow::{bail, Result};
25use wasmtime_environ::StackMap;
26
27pub trait ModuleInfoLookup {
30 fn lookup(&self, pc: usize) -> Option<&dyn ModuleInfo>;
32}
33
34pub trait ModuleInfo {
36 fn lookup_stack_map(&self, pc: usize) -> Option<&StackMap>;
38}
39
40pub struct GcStore {
48 pub allocation_index: GcHeapAllocationIndex,
51
52 pub gc_heap: Box<dyn GcHeap>,
54
55 pub host_data_table: ExternRefHostDataTable,
57}
58
59impl GcStore {
60 pub fn new(allocation_index: GcHeapAllocationIndex, gc_heap: Box<dyn GcHeap>) -> Self {
62 let host_data_table = ExternRefHostDataTable::default();
63 Self {
64 allocation_index,
65 gc_heap,
66 host_data_table,
67 }
68 }
69
70 pub fn gc(&mut self, roots: GcRootsIter<'_>) {
72 let mut collection = self.gc_heap.gc(roots, &mut self.host_data_table);
73 collection.collect();
74 }
75
76 #[cfg(feature = "async")]
78 pub async fn gc_async(&mut self, roots: GcRootsIter<'_>) {
79 let collection = self.gc_heap.gc(roots, &mut self.host_data_table);
80 collect_async(collection).await;
81 }
82
83 pub fn clone_gc_ref(&mut self, gc_ref: &VMGcRef) -> VMGcRef {
85 if gc_ref.is_i31() {
86 gc_ref.unchecked_copy()
87 } else {
88 self.gc_heap.clone_gc_ref(gc_ref)
89 }
90 }
91
92 pub fn write_gc_ref(&mut self, destination: &mut Option<VMGcRef>, source: Option<&VMGcRef>) {
95 if destination.as_ref().map_or(true, |d| d.is_i31())
99 && source.as_ref().map_or(true, |s| s.is_i31())
100 {
101 *destination = source.map(|s| s.unchecked_copy());
102 return;
103 }
104
105 self.gc_heap
106 .write_gc_ref(&mut self.host_data_table, destination, source);
107 }
108
109 pub fn drop_gc_ref(&mut self, gc_ref: VMGcRef) {
111 if !gc_ref.is_i31() {
112 self.gc_heap.drop_gc_ref(&mut self.host_data_table, gc_ref);
113 }
114 }
115
116 pub fn expose_gc_ref_to_wasm(&mut self, gc_ref: VMGcRef) {
118 if !gc_ref.is_i31() {
119 self.gc_heap.expose_gc_ref_to_wasm(gc_ref);
120 }
121 }
122
123 pub fn alloc_externref(
135 &mut self,
136 value: Box<dyn Any + Send + Sync>,
137 ) -> Result<Result<VMExternRef, Box<dyn Any + Send + Sync>>> {
138 let host_data_id = self.host_data_table.alloc(value);
139 match self.gc_heap.alloc_externref(host_data_id)? {
140 Some(x) => Ok(Ok(x)),
141 None => Ok(Err(self.host_data_table.dealloc(host_data_id))),
142 }
143 }
144
145 pub fn externref_host_data(&self, externref: &VMExternRef) -> &(dyn Any + Send + Sync) {
151 let host_data_id = self.gc_heap.externref_host_data(externref);
152 self.host_data_table.get(host_data_id)
153 }
154
155 pub fn externref_host_data_mut(
161 &mut self,
162 externref: &VMExternRef,
163 ) -> &mut (dyn Any + Send + Sync) {
164 let host_data_id = self.gc_heap.externref_host_data(externref);
165 self.host_data_table.get_mut(host_data_id)
166 }
167}
168
169pub fn disabled_gc_heap() -> Box<dyn GcHeap> {
172 return Box::new(DisabledGcHeap);
173
174 struct DisabledGcHeap;
175
176 unsafe impl GcHeap for DisabledGcHeap {
177 fn as_any(&self) -> &dyn Any {
178 self
179 }
180 fn as_any_mut(&mut self) -> &mut dyn Any {
181 self
182 }
183 fn enter_no_gc_scope(&mut self) {}
184 fn exit_no_gc_scope(&mut self) {}
185 fn header(&self, _gc_ref: &VMGcRef) -> &VMGcHeader {
186 unreachable!()
187 }
188 fn clone_gc_ref(&mut self, _gc_ref: &VMGcRef) -> VMGcRef {
189 unreachable!()
190 }
191 fn write_gc_ref(
192 &mut self,
193 _host_data_table: &mut ExternRefHostDataTable,
194 _destination: &mut Option<VMGcRef>,
195 _source: Option<&VMGcRef>,
196 ) {
197 unreachable!()
198 }
199 fn expose_gc_ref_to_wasm(&mut self, _gc_ref: VMGcRef) {
200 unreachable!()
201 }
202 fn need_gc_before_entering_wasm(&self, _num_gc_refs: NonZeroUsize) -> bool {
203 unreachable!()
204 }
205 fn alloc_externref(
206 &mut self,
207 _host_data: ExternRefHostDataId,
208 ) -> Result<Option<VMExternRef>> {
209 bail!(
210 "GC support disabled either in the `Config` or at compile time \
211 because the `gc` cargo feature was not enabled"
212 )
213 }
214 fn externref_host_data(&self, _externref: &VMExternRef) -> ExternRefHostDataId {
215 unreachable!()
216 }
217 fn gc<'a>(
218 &'a mut self,
219 _roots: GcRootsIter<'a>,
220 _host_data_table: &'a mut ExternRefHostDataTable,
221 ) -> Box<dyn GarbageCollection<'a> + 'a> {
222 return Box::new(NoGc);
223
224 struct NoGc;
225
226 impl<'a> GarbageCollection<'a> for NoGc {
227 fn collect_increment(&mut self) -> GcProgress {
228 GcProgress::Complete
229 }
230 }
231 }
232 unsafe fn vmctx_gc_heap_base(&self) -> *mut u8 {
233 std::ptr::null_mut()
234 }
235 unsafe fn vmctx_gc_heap_bound(&self) -> usize {
236 0
237 }
238 unsafe fn vmctx_gc_heap_data(&self) -> *mut u8 {
239 std::ptr::null_mut()
240 }
241 #[cfg(feature = "pooling-allocator")]
242 fn reset(&mut self) {}
243 }
244}