rkyv_test/ser/serializers/
alloc.rs1use crate::{
2 ser::{serializers::BufferScratch, ScratchSpace, Serializer, SharedSerializeRegistry},
3 AlignedBytes, AlignedVec, Archive, ArchiveUnsized, Fallible, RelPtr,
4};
5#[cfg(not(feature = "std"))]
6use ::alloc::{alloc, boxed::Box, vec::Vec};
7#[cfg(feature = "std")]
8use ::std::alloc;
9use core::{
10 alloc::Layout,
11 borrow::{Borrow, BorrowMut},
12 convert::Infallible,
13 fmt, mem,
14 ptr::NonNull,
15};
16#[cfg(not(feature = "std"))]
17use hashbrown::hash_map;
18#[cfg(feature = "std")]
19use std::collections::hash_map;
20
21#[derive(Debug)]
26pub struct AlignedSerializer<A> {
27 inner: A,
28}
29
30impl<A: Borrow<AlignedVec>> AlignedSerializer<A> {
31 #[inline]
33 pub fn new(inner: A) -> Self {
34 Self { inner }
35 }
36
37 #[inline]
39 pub fn into_inner(self) -> A {
40 self.inner
41 }
42}
43
44impl<A: Default> Default for AlignedSerializer<A> {
45 #[inline]
46 fn default() -> Self {
47 Self {
48 inner: A::default(),
49 }
50 }
51}
52
53impl<A> Fallible for AlignedSerializer<A> {
54 type Error = Infallible;
55}
56
57impl<A: Borrow<AlignedVec> + BorrowMut<AlignedVec>> Serializer for AlignedSerializer<A> {
58 #[inline]
59 fn pos(&self) -> usize {
60 self.inner.borrow().len()
61 }
62
63 #[inline]
64 fn write(&mut self, bytes: &[u8]) -> Result<(), Self::Error> {
65 self.inner.borrow_mut().extend_from_slice(bytes);
66 Ok(())
67 }
68
69 #[inline]
70 unsafe fn resolve_aligned<T: Archive + ?Sized>(
71 &mut self,
72 value: &T,
73 resolver: T::Resolver,
74 ) -> Result<usize, Self::Error> {
75 let pos = self.pos();
76 debug_assert_eq!(pos & (mem::align_of::<T::Archived>() - 1), 0);
77 let vec = self.inner.borrow_mut();
78 let additional = mem::size_of::<T::Archived>();
79 vec.reserve(additional);
80 vec.set_len(vec.len() + additional);
81
82 let ptr = vec.as_mut_ptr().add(pos).cast::<T::Archived>();
83 ptr.write_bytes(0, 1);
84 value.resolve(pos, resolver, ptr);
85
86 Ok(pos)
87 }
88
89 #[inline]
90 unsafe fn resolve_unsized_aligned<T: ArchiveUnsized + ?Sized>(
91 &mut self,
92 value: &T,
93 to: usize,
94 metadata_resolver: T::MetadataResolver,
95 ) -> Result<usize, Self::Error> {
96 let from = self.pos();
97 debug_assert_eq!(from & (mem::align_of::<RelPtr<T::Archived>>() - 1), 0);
98 let vec = self.inner.borrow_mut();
99 let additional = mem::size_of::<RelPtr<T::Archived>>();
100 vec.reserve(additional);
101 vec.set_len(vec.len() + additional);
102
103 let ptr = vec.as_mut_ptr().add(from).cast::<RelPtr<T::Archived>>();
104 ptr.write_bytes(0, 1);
105
106 value.resolve_unsized(from, to, metadata_resolver, ptr);
107 Ok(from)
108 }
109}
110
111#[derive(Debug)]
113pub struct HeapScratch<const N: usize> {
114 inner: BufferScratch<Box<AlignedBytes<N>>>,
115}
116
117impl<const N: usize> HeapScratch<N> {
118 pub fn new() -> Self {
120 if N != 0 {
121 unsafe {
122 let layout = Layout::new::<AlignedBytes<N>>();
123 let ptr = alloc::alloc(layout).cast::<AlignedBytes<N>>();
124 assert!(!ptr.is_null());
125 let buf = Box::from_raw(ptr);
126 Self {
127 inner: BufferScratch::new(buf),
128 }
129 }
130 } else {
131 Self {
132 inner: BufferScratch::new(Box::new(AlignedBytes::default())),
133 }
134 }
135 }
136
137 pub fn layout() -> Layout {
139 unsafe { Layout::from_size_align_unchecked(N, 1) }
140 }
141}
142
143impl<const N: usize> Default for HeapScratch<N> {
144 fn default() -> Self {
145 Self::new()
146 }
147}
148
149impl<const N: usize> Fallible for HeapScratch<N> {
150 type Error = <BufferScratch<Box<[u8]>> as Fallible>::Error;
151}
152
153impl<const N: usize> ScratchSpace for HeapScratch<N> {
154 #[inline]
155 unsafe fn push_scratch(&mut self, layout: Layout) -> Result<NonNull<[u8]>, Self::Error> {
156 self.inner.push_scratch(layout)
157 }
158
159 #[inline]
160 unsafe fn pop_scratch(&mut self, ptr: NonNull<u8>, layout: Layout) -> Result<(), Self::Error> {
161 self.inner.pop_scratch(ptr, layout)
162 }
163}
164
165#[derive(Debug)]
167pub enum AllocScratchError {
168 ExceededLimit {
170 requested: usize,
172 remaining: usize,
174 },
175 NotPoppedInReverseOrder {
177 expected: *mut u8,
179 expected_layout: Layout,
181 actual: *mut u8,
183 actual_layout: Layout,
185 },
186 NoAllocationsToPop,
188}
189
190unsafe impl Send for AllocScratchError {}
193
194unsafe impl Sync for AllocScratchError {}
197
198impl fmt::Display for AllocScratchError {
199 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
200 match self {
201 Self::ExceededLimit { requested, remaining } => write!(
202 f,
203 "exceeded the maxmium limit of scratch space: requested {}, remaining {}",
204 requested, remaining
205 ),
206 Self::NotPoppedInReverseOrder {
207 expected,
208 expected_layout,
209 actual,
210 actual_layout,
211 } => write!(
212 f,
213 "scratch space was not popped in reverse order: expected {:p} with size {} and align {}, found {:p} with size {} and align {}",
214 expected, expected_layout.size(), expected_layout.align(), actual, actual_layout.size(), actual_layout.align()
215 ),
216 Self::NoAllocationsToPop => write!(
217 f, "attempted to pop scratch space but there were no allocations to pop"
218 ),
219 }
220 }
221}
222
223#[cfg(feature = "std")]
224const _: () = {
225 use std::error::Error;
226
227 impl Error for AllocScratchError {}
228};
229
230#[derive(Debug)]
235pub struct AllocScratch {
236 remaining: Option<usize>,
237 allocations: Vec<(*mut u8, Layout)>,
238}
239
240unsafe impl Send for AllocScratch {}
243
244unsafe impl Sync for AllocScratch {}
247
248impl AllocScratch {
249 pub fn new() -> Self {
251 Self {
252 remaining: None,
253 allocations: Vec::new(),
254 }
255 }
256
257 pub fn with_limit(limit: usize) -> Self {
259 Self {
260 remaining: Some(limit),
261 allocations: Vec::new(),
262 }
263 }
264}
265
266impl Drop for AllocScratch {
267 fn drop(&mut self) {
268 for (ptr, layout) in self.allocations.drain(..).rev() {
269 unsafe {
270 alloc::dealloc(ptr, layout);
271 }
272 }
273 }
274}
275
276impl Default for AllocScratch {
277 fn default() -> Self {
278 Self::new()
279 }
280}
281
282impl Fallible for AllocScratch {
283 type Error = AllocScratchError;
284}
285
286impl ScratchSpace for AllocScratch {
287 #[inline]
288 unsafe fn push_scratch(&mut self, layout: Layout) -> Result<NonNull<[u8]>, Self::Error> {
289 if let Some(remaining) = self.remaining {
290 if remaining < layout.size() {
291 return Err(AllocScratchError::ExceededLimit {
292 requested: layout.size(),
293 remaining,
294 });
295 }
296 }
297 let result_ptr = alloc::alloc(layout);
298 assert!(!result_ptr.is_null());
299 self.allocations.push((result_ptr, layout));
300 let result_slice = ptr_meta::from_raw_parts_mut(result_ptr.cast(), layout.size());
301 let result = NonNull::new_unchecked(result_slice);
302 Ok(result)
303 }
304
305 #[inline]
306 unsafe fn pop_scratch(&mut self, ptr: NonNull<u8>, layout: Layout) -> Result<(), Self::Error> {
307 if let Some(&(last_ptr, last_layout)) = self.allocations.last() {
308 if ptr.as_ptr() == last_ptr && layout == last_layout {
309 alloc::dealloc(ptr.as_ptr(), layout);
310 self.allocations.pop();
311 Ok(())
312 } else {
313 Err(AllocScratchError::NotPoppedInReverseOrder {
314 expected: last_ptr,
315 expected_layout: last_layout,
316 actual: ptr.as_ptr(),
317 actual_layout: layout,
318 })
319 }
320 } else {
321 Err(AllocScratchError::NoAllocationsToPop)
322 }
323 }
324}
325
326#[derive(Debug)]
328pub enum SharedSerializeMapError {
329 DuplicateSharedPointer(*const u8),
331}
332
333unsafe impl Send for SharedSerializeMapError {}
336
337unsafe impl Sync for SharedSerializeMapError {}
340
341impl fmt::Display for SharedSerializeMapError {
342 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
343 match self {
344 Self::DuplicateSharedPointer(p) => write!(f, "duplicate shared pointer: {:p}", p),
345 }
346 }
347}
348
349#[cfg(feature = "std")]
350const _: () = {
351 use std::error::Error;
352
353 impl Error for SharedSerializeMapError {}
354};
355
356#[derive(Debug)]
358pub struct SharedSerializeMap {
359 shared_resolvers: hash_map::HashMap<*const u8, usize>,
360}
361
362unsafe impl Send for SharedSerializeMap {}
365
366unsafe impl Sync for SharedSerializeMap {}
369
370impl SharedSerializeMap {
371 #[inline]
373 pub fn new() -> Self {
374 Self {
375 shared_resolvers: hash_map::HashMap::new(),
376 }
377 }
378}
379
380impl Default for SharedSerializeMap {
381 #[inline]
382 fn default() -> Self {
383 Self::new()
384 }
385}
386
387impl Fallible for SharedSerializeMap {
388 type Error = SharedSerializeMapError;
389}
390
391impl SharedSerializeRegistry for SharedSerializeMap {
392 fn get_shared_ptr(&self, value: *const u8) -> Option<usize> {
393 self.shared_resolvers.get(&value).copied()
394 }
395
396 fn add_shared_ptr(&mut self, value: *const u8, pos: usize) -> Result<(), Self::Error> {
397 match self.shared_resolvers.entry(value) {
398 hash_map::Entry::Occupied(_) => {
399 Err(SharedSerializeMapError::DuplicateSharedPointer(value))
400 }
401 hash_map::Entry::Vacant(e) => {
402 e.insert(pos);
403 Ok(())
404 }
405 }
406 }
407}