1#[cfg(unix)]
2use core::ffi::c_int;
3use core::{
4 alloc::Layout,
5 ffi::{c_uint, c_void},
6 marker::PhantomData,
7 ptr::NonNull,
8};
9
10#[cfg(feature = "rust-allocator")]
11use alloc::alloc::GlobalAlloc;
12
13#[allow(non_camel_case_types)]
14type size_t = usize;
15
16#[cfg(unix)]
20unsafe extern "C" fn zalloc_c(opaque: *mut c_void, items: c_uint, size: c_uint) -> *mut c_void {
21 let _ = opaque;
22
23 extern "C" {
24 fn posix_memalign(memptr: *mut *mut c_void, align: size_t, size: size_t) -> c_int;
25 }
26
27 let mut ptr = core::ptr::null_mut();
28 match posix_memalign(&mut ptr, 64, items as size_t * size as size_t) {
29 0 => ptr,
30 _ => core::ptr::null_mut(),
31 }
32}
33
34#[cfg(not(unix))]
38unsafe extern "C" fn zalloc_c(opaque: *mut c_void, items: c_uint, size: c_uint) -> *mut c_void {
39 let _ = opaque;
40
41 extern "C" {
42 fn malloc(size: size_t) -> *mut c_void;
43 }
44
45 malloc(items as size_t * size as size_t)
46}
47
48unsafe extern "C" fn zalloc_c_calloc(
52 opaque: *mut c_void,
53 items: c_uint,
54 size: c_uint,
55) -> *mut c_void {
56 let _ = opaque;
57
58 extern "C" {
59 fn calloc(nitems: size_t, size: size_t) -> *mut c_void;
60 }
61
62 calloc(items as size_t, size as size_t)
63}
64
65unsafe extern "C" fn zfree_c(opaque: *mut c_void, ptr: *mut c_void) {
69 let _ = opaque;
70
71 extern "C" {
72 fn free(p: *mut c_void);
73 }
74
75 unsafe { free(ptr) }
76}
77
78#[cfg(feature = "rust-allocator")]
82unsafe extern "C" fn zalloc_rust(_opaque: *mut c_void, count: c_uint, size: c_uint) -> *mut c_void {
83 let align = 64;
84 let size = count as usize * size as usize;
85
86 let layout = Layout::from_size_align(size, align).unwrap();
88
89 let ptr = std::alloc::System.alloc(layout);
90
91 ptr as *mut c_void
92}
93
94#[cfg(feature = "rust-allocator")]
99unsafe extern "C" fn zfree_rust(opaque: *mut c_void, ptr: *mut c_void) {
100 if ptr.is_null() {
101 return;
102 }
103
104 debug_assert!(!opaque.is_null());
106 if opaque.is_null() {
107 return;
108 }
109
110 let size = *(opaque as *mut usize);
111 let align = 64;
112
113 let layout = Layout::from_size_align(size, align);
114 let layout = layout.unwrap();
115
116 std::alloc::System.dealloc(ptr.cast(), layout);
117}
118
119#[cfg(test)]
120unsafe extern "C" fn zalloc_fail(_: *mut c_void, _: c_uint, _: c_uint) -> *mut c_void {
121 core::ptr::null_mut()
122}
123
124#[cfg(test)]
125unsafe extern "C" fn zfree_fail(_: *mut c_void, _: *mut c_void) {
126 }
128
129#[derive(Clone, Copy)]
130#[repr(C)]
131pub struct Allocator<'a> {
132 pub zalloc: crate::c_api::alloc_func,
133 pub zfree: crate::c_api::free_func,
134 pub opaque: crate::c_api::voidpf,
135 pub _marker: PhantomData<&'a ()>,
136}
137
138impl Allocator<'static> {
139 #[cfg(feature = "rust-allocator")]
140 pub const RUST: Self = Self {
141 zalloc: zalloc_rust,
142 zfree: zfree_rust,
143 opaque: core::ptr::null_mut(),
144 _marker: PhantomData,
145 };
146
147 #[cfg(feature = "c-allocator")]
148 pub const C: Self = Self {
149 zalloc: zalloc_c,
150 zfree: zfree_c,
151 opaque: core::ptr::null_mut(),
152 _marker: PhantomData,
153 };
154
155 #[cfg(test)]
156 const FAIL: Self = Self {
157 zalloc: zalloc_fail,
158 zfree: zfree_fail,
159 opaque: core::ptr::null_mut(),
160 _marker: PhantomData,
161 };
162}
163
164impl Allocator<'_> {
165 pub fn allocate_layout(&self, layout: Layout) -> *mut c_void {
166 #[cfg(feature = "rust-allocator")]
168 if self.zalloc == Allocator::RUST.zalloc {
169 let ptr = unsafe { (Allocator::RUST.zalloc)(self.opaque, layout.size() as _, 1) };
170
171 debug_assert_eq!(ptr as usize % layout.align(), 0);
172
173 return ptr;
174 }
175
176 let extra_space = core::mem::size_of::<*mut c_void>() + layout.align();
206
207 let ptr = unsafe { (self.zalloc)(self.opaque, (layout.size() + extra_space) as _, 1) };
210
211 if ptr.is_null() {
212 return ptr;
213 }
214
215 let align_diff = (ptr as usize).next_multiple_of(layout.align()) - (ptr as usize);
217
218 let mut return_ptr = unsafe { ptr.cast::<u8>().add(align_diff) };
220
221 if align_diff < core::mem::size_of::<*mut c_void>() {
223 let offset = Ord::max(core::mem::size_of::<*mut c_void>(), layout.align());
229 return_ptr = unsafe { return_ptr.add(offset) };
230 }
231
232 unsafe {
237 let original_ptr = return_ptr.sub(core::mem::size_of::<*mut c_void>());
238 core::ptr::write_unaligned(original_ptr.cast::<*mut c_void>(), ptr);
239 };
240
241 let ptr = return_ptr.cast::<c_void>();
243
244 debug_assert_eq!(ptr as usize % layout.align(), 0);
245
246 ptr
247 }
248
249 pub fn allocate_raw<T>(&self) -> Option<NonNull<T>> {
250 NonNull::new(self.allocate_layout(Layout::new::<T>()).cast())
251 }
252
253 pub fn allocate_slice_raw<T>(&self, len: usize) -> Option<NonNull<T>> {
254 NonNull::new(self.allocate_layout(Layout::array::<T>(len).ok()?).cast())
255 }
256
257 pub fn allocate_zeroed(&self, len: usize) -> Option<NonNull<u8>> {
258 #[cfg(feature = "rust-allocator")]
259 if self.zalloc == Allocator::RUST.zalloc {
260 let layout = Layout::from_size_align(len, 64).unwrap();
262
263 return NonNull::new(unsafe { std::alloc::System.alloc_zeroed(layout) });
264 }
265
266 #[cfg(feature = "c-allocator")]
267 if self.zalloc == Allocator::C.zalloc {
268 let alloc = Allocator {
269 zalloc: zalloc_c_calloc,
270 zfree: zfree_c,
271 opaque: core::ptr::null_mut(),
272 _marker: PhantomData,
273 };
274
275 let ptr = alloc.allocate_layout(Layout::array::<u8>(len).ok().unwrap());
276
277 return NonNull::new(ptr.cast());
278 }
279
280 let ptr = self.allocate_layout(Layout::array::<u8>(len).ok().unwrap());
282
283 let ptr = NonNull::new(ptr)?;
284
285 unsafe { core::ptr::write_bytes(ptr.as_ptr(), 0, len) };
287
288 Some(ptr.cast())
289 }
290
291 #[allow(unused)] pub unsafe fn deallocate<T>(&self, ptr: *mut T, len: usize) {
301 if !ptr.is_null() {
302 #[cfg(feature = "rust-allocator")]
304 if self.zfree == Allocator::RUST.zfree {
305 assert_ne!(len, 0, "invalid size for {:?}", ptr);
306 let mut size = core::mem::size_of::<T>() * len;
307 return (Allocator::RUST.zfree)(&mut size as *mut usize as *mut c_void, ptr.cast());
308 }
309
310 let original_ptr = (ptr as *mut u8).sub(core::mem::size_of::<*const c_void>());
312 let free_ptr = core::ptr::read_unaligned(original_ptr as *mut *mut c_void);
313
314 (self.zfree)(self.opaque, free_ptr)
315 }
316 }
317}
318
319#[cfg(test)]
320mod tests {
321 use core::sync::atomic::{AtomicPtr, Ordering};
322 use std::sync::Mutex;
323
324 use super::*;
325
326 static PTR: AtomicPtr<c_void> = AtomicPtr::new(core::ptr::null_mut());
327 static MUTEX: Mutex<()> = Mutex::new(());
328
329 unsafe extern "C" fn unaligned_alloc(
330 _opaque: *mut c_void,
331 _items: c_uint,
332 _size: c_uint,
333 ) -> *mut c_void {
334 PTR.load(Ordering::Relaxed)
335 }
336
337 unsafe extern "C" fn unaligned_free(_opaque: *mut c_void, ptr: *mut c_void) {
338 let expected = PTR.load(Ordering::Relaxed);
339 assert_eq!(expected, ptr)
340 }
341
342 fn unaligned_allocator_help<T>() {
343 let mut buf = [0u8; 1024];
344
345 let _guard = MUTEX.lock().unwrap();
347
348 for i in 0..64 {
349 let ptr = unsafe { buf.as_mut_ptr().add(i).cast() };
350 PTR.store(ptr, Ordering::Relaxed);
351
352 let allocator = Allocator {
353 zalloc: unaligned_alloc,
354 zfree: unaligned_free,
355 opaque: core::ptr::null_mut(),
356 _marker: PhantomData,
357 };
358
359 let ptr = allocator.allocate_raw::<T>().unwrap().as_ptr();
360 assert_eq!(ptr as usize % core::mem::align_of::<T>(), 0);
361 unsafe { allocator.deallocate(ptr, 1) }
362
363 let ptr = allocator.allocate_slice_raw::<T>(10).unwrap().as_ptr();
364 assert_eq!(ptr as usize % core::mem::align_of::<T>(), 0);
365 unsafe { allocator.deallocate(ptr, 10) }
366 }
367 }
368
369 #[test]
370 fn unaligned_allocator_0() {
371 unaligned_allocator_help::<()>()
372 }
373
374 #[test]
375 fn unaligned_allocator_1() {
376 unaligned_allocator_help::<u8>()
377 }
378
379 #[test]
380 fn unaligned_allocator_2() {
381 unaligned_allocator_help::<u16>()
382 }
383 #[test]
384 fn unaligned_allocator_4() {
385 unaligned_allocator_help::<u32>()
386 }
387 #[test]
388 fn unaligned_allocator_8() {
389 unaligned_allocator_help::<u64>()
390 }
391 #[test]
392 fn unaligned_allocator_16() {
393 unaligned_allocator_help::<u128>()
394 }
395
396 #[test]
397 fn unaligned_allocator_32() {
398 #[repr(C, align(32))]
399 struct Align32(u8);
400
401 unaligned_allocator_help::<Align32>()
402 }
403
404 #[test]
405 fn unaligned_allocator_64() {
406 #[repr(C, align(64))]
407 struct Align64(u8);
408
409 unaligned_allocator_help::<Align64>()
410 }
411
412 fn test_allocate_zeroed_help(allocator: Allocator) {
413 let len = 42;
414 let Some(buf) = allocator.allocate_zeroed(len) else {
415 return;
416 };
417
418 let slice = unsafe { core::slice::from_raw_parts_mut(buf.as_ptr(), len) };
419
420 assert_eq!(slice.iter().sum::<u8>(), 0);
421
422 unsafe { allocator.deallocate(buf.as_ptr(), len) };
423 }
424
425 #[test]
426 fn test_allocate_zeroed() {
427 #[cfg(feature = "rust-allocator")]
428 test_allocate_zeroed_help(Allocator::RUST);
429
430 #[cfg(feature = "c-allocator")]
431 test_allocate_zeroed_help(Allocator::C);
432
433 test_allocate_zeroed_help(Allocator::FAIL);
434 }
435
436 #[test]
437 fn test_deallocate_null() {
438 unsafe {
439 #[cfg(feature = "rust-allocator")]
440 (Allocator::RUST.zfree)(core::ptr::null_mut(), core::ptr::null_mut());
441
442 #[cfg(feature = "c-allocator")]
443 (Allocator::C.zfree)(core::ptr::null_mut(), core::ptr::null_mut());
444
445 (Allocator::FAIL.zfree)(core::ptr::null_mut(), core::ptr::null_mut());
446 }
447 }
448}