1#![no_std]
2#![cfg_attr(docsrs, feature(doc_cfg))]
3#![deny(elided_lifetimes_in_paths)]
4#![allow(clippy::missing_transmute_annotations, clippy::type_complexity)]
5
6#[cfg(feature = "std")]
65extern crate std;
66
67#[cfg(feature = "std")]
68pub use std::error::Error;
69
70#[cfg(all(feature = "core-error", not(feature = "std")))]
71pub use core::error::Error;
72
73pub mod alloc;
74
75pub mod mem;
76
77pub type DynStack = MemStack;
78
79use bytemuck::Pod;
80
81#[cfg(feature = "alloc")]
82#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
83pub use mem::MemBuffer;
84#[cfg(feature = "alloc")]
85#[cfg_attr(docsrs, doc(cfg(feature = "alloc")))]
86pub use mem::PodBuffer;
87
88mod stack_req;
89pub use stack_req::StackReq;
90
91use core::fmt;
92use core::fmt::Debug;
93use core::marker::PhantomData;
94use core::mem::MaybeUninit;
95use core::ops::Deref;
96use core::ops::DerefMut;
97use core::ptr::NonNull;
98use core::slice;
99
100#[repr(transparent)]
102pub struct MemStack {
103 buffer: [MaybeUninit<u8>],
104}
105#[repr(transparent)]
107pub struct PodStack {
108 buffer: [u8],
109}
110
111pub struct DynArray<'a, T> {
113 ptr: NonNull<T>,
114 len: usize,
115 __marker: PhantomData<(&'a T, T)>,
116}
117
118impl<T> DynArray<'_, T> {
119 #[inline]
120 #[doc(hidden)]
121 pub fn into_raw_parts(self) -> (*mut T, usize) {
122 let this = core::mem::ManuallyDrop::new(self);
123 (this.ptr.as_ptr(), this.len)
124 }
125
126 #[inline]
127 #[doc(hidden)]
128 pub unsafe fn from_raw_parts(ptr: *mut T, len: usize) -> Self {
129 Self {
130 ptr: NonNull::new_unchecked(ptr),
131 len,
132 __marker: PhantomData,
133 }
134 }
135}
136
137pub struct UnpodStack<'a> {
139 ptr: NonNull<u8>,
140 len: usize,
141 __marker: PhantomData<&'a ()>,
142}
143
144impl<T: Debug> Debug for DynArray<'_, T> {
145 fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
146 (**self).fmt(fmt)
147 }
148}
149
150unsafe impl<T> Send for DynArray<'_, T> where T: Send {}
151unsafe impl<T> Sync for DynArray<'_, T> where T: Sync {}
152
153unsafe impl Send for UnpodStack<'_> {}
154unsafe impl Sync for UnpodStack<'_> {}
155
156impl<T> Drop for DynArray<'_, T> {
157 #[inline]
158 fn drop(&mut self) {
159 unsafe {
160 core::ptr::drop_in_place(core::ptr::slice_from_raw_parts_mut(
161 self.ptr.as_ptr(),
162 self.len,
163 ))
164 };
165 }
166}
167
168macro_rules! if_cfg {
169 (if $cfg: meta $if_true: block else $if_false: block $(,)?) => {
170 #[cfg($cfg)]
171 {
172 $if_true
173 }
174 #[cfg(not($cfg))]
175 {
176 $if_false
177 }
178 };
179}
180
181#[inline(always)]
186unsafe fn launder(ptr: *mut u8, len: usize) {
187 unsafe {
188 if_cfg!(if all(
189 not(debug_assertions),
190 not(miri),
191 any(
192 target_arch = "x86",
193 target_arch = "x86_64",
194 target_arch = "arm",
195 target_arch = "aarch64",
196 target_arch = "loongarch64",
197 target_arch = "riscv32",
198 target_arch = "riscv64",
199 )
200 ) {
201 _ = len;
202 core::arch::asm! { "/* {0} */", in(reg) ptr, options(nostack) }
203 } else {
204 const ARBITRARY_BYTE: u8 = 0xCD;
205 core::ptr::write_bytes(ptr, ARBITRARY_BYTE, len)
206 });
207 }
208}
209
210impl Drop for UnpodStack<'_> {
211 #[inline]
212 fn drop(&mut self) {
213 unsafe { launder(self.ptr.as_ptr(), self.len) };
214 }
215}
216
217impl<T> Deref for DynArray<'_, T> {
218 type Target = [T];
219
220 #[inline]
221 fn deref(&self) -> &'_ Self::Target {
222 unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len) }
223 }
224}
225
226impl<T> DerefMut for DynArray<'_, T> {
227 #[inline]
228 fn deref_mut(&mut self) -> &mut Self::Target {
229 unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len) }
230 }
231}
232
233impl<T> AsRef<[T]> for DynArray<'_, T> {
234 #[inline]
235 fn as_ref(&self) -> &'_ [T] {
236 unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.len) }
237 }
238}
239
240impl<T> AsMut<[T]> for DynArray<'_, T> {
241 #[inline]
242 fn as_mut(&mut self) -> &'_ mut [T] {
243 unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.len) }
244 }
245}
246
247impl Deref for UnpodStack<'_> {
248 type Target = MemStack;
249
250 #[inline]
251 fn deref(&self) -> &'_ Self::Target {
252 unsafe {
253 &*(core::ptr::slice_from_raw_parts(self.ptr.as_ptr(), self.len) as *const MemStack)
254 }
255 }
256}
257
258impl DerefMut for UnpodStack<'_> {
259 #[inline]
260 fn deref_mut(&mut self) -> &mut Self::Target {
261 unsafe {
262 &mut *(core::ptr::slice_from_raw_parts_mut(self.ptr.as_ptr(), self.len)
263 as *mut MemStack)
264 }
265 }
266}
267
268#[inline]
269unsafe fn transmute_slice<T>(slice: &mut [MaybeUninit<u8>], size: usize) -> &mut [T] {
270 slice::from_raw_parts_mut(slice.as_mut_ptr() as *mut T, size)
271}
272#[inline]
273unsafe fn transmute_pod_slice<T: Pod>(slice: &mut [u8], size: usize) -> &mut [T] {
274 slice::from_raw_parts_mut(slice.as_mut_ptr() as *mut T, size)
275}
276
277struct DropGuard<T> {
278 ptr: *mut T,
279 len: usize,
280}
281
282impl<T> Drop for DropGuard<T> {
283 #[inline]
284 fn drop(&mut self) {
285 unsafe {
286 core::ptr::drop_in_place(core::ptr::slice_from_raw_parts_mut(self.ptr, self.len))
287 };
288 }
289}
290
291#[inline]
292fn init_array_with<T>(mut f: impl FnMut(usize) -> T, array: &mut [MaybeUninit<T>]) -> &mut [T] {
293 let len = array.len();
294 let ptr = array.as_mut_ptr() as *mut T;
295
296 let mut guard = DropGuard { ptr, len: 0 };
297
298 for i in 0..len {
299 guard.len = i;
300 unsafe { ptr.add(i).write(f(i)) };
301 }
302 core::mem::forget(guard);
303
304 unsafe { slice::from_raw_parts_mut(ptr, len) }
305}
306
307#[inline]
308fn init_pod_array_with<T: Pod>(mut f: impl FnMut(usize) -> T, array: &mut [T]) -> &mut [T] {
309 for (i, x) in array.iter_mut().enumerate() {
310 *x = f(i);
311 }
312 array
313}
314
315#[inline]
316unsafe fn init_array_with_iter<T, I: Iterator<Item = T>>(
317 iter: I,
318 ptr: &mut [MaybeUninit<T>],
319) -> usize {
320 let max_len = ptr.len();
321 let ptr = ptr.as_mut_ptr();
322 let mut guard = DropGuard { ptr, len: 0 };
323
324 iter.take(max_len).enumerate().for_each(|(i, item)| {
325 *ptr.add(i) = MaybeUninit::new(item);
326 guard.len += 1;
327 });
328
329 let len = guard.len;
330 core::mem::forget(guard);
331
332 len
333}
334
335#[inline]
336fn init_pod_array_with_iter<T: Pod, I: Iterator<Item = T>>(iter: I, ptr: &mut [T]) -> usize {
337 let mut len = 0;
338 iter.zip(ptr).for_each(|(item, dst)| {
339 *dst = item;
340 len += 1;
341 });
342 len
343}
344
345#[track_caller]
346#[inline]
347fn check_alignment(align: usize, alignof_val: usize, type_name: &'static str) {
348 assert!(
349 (align & (align.wrapping_sub(1))) == 0,
350 r#"
351requested alignment is not a power of two:
352 - requested alignment: {}
353"#,
354 align
355 );
356 assert!(
357 alignof_val <= align,
358 r#"
359requested alignment is less than the minimum valid alignment for `{}`:
360 - requested alignment: {}
361 - minimum alignment: {}
362"#,
363 type_name,
364 align,
365 alignof_val,
366 );
367}
368
369#[track_caller]
370#[inline]
371fn check_enough_space_for_align_offset(len: usize, align: usize, align_offset: usize) {
372 assert!(
373 len >= align_offset,
374 r#"
375buffer is not large enough to accomodate the requested alignment
376 - buffer length: {}
377 - requested alignment: {}
378 - byte offset for alignment: {}
379"#,
380 len,
381 align,
382 align_offset,
383 );
384}
385
386#[track_caller]
387#[inline]
388fn check_enough_space_for_array(
389 remaining_len: usize,
390 sizeof_val: usize,
391 array_len: usize,
392 type_name: &'static str,
393) {
394 if sizeof_val == 0 {
395 return;
396 }
397 assert!(
398 remaining_len / sizeof_val >= array_len,
399 r#"
400buffer is not large enough to allocate an array of type `{}` of the requested length:
401 - remaining buffer length (after adjusting for alignment): {},
402 - requested array length: {} ({} bytes),
403"#,
404 type_name,
405 remaining_len,
406 array_len,
407 array_len * sizeof_val,
408 );
409}
410
411#[repr(transparent)]
412pub struct Bump<'stack> {
413 ptr: core::cell::UnsafeCell<&'stack mut MemStack>,
414}
415
416unsafe impl alloc::Allocator for Bump<'_> {
417 fn allocate(&self, layout: core::alloc::Layout) -> Result<NonNull<[u8]>, alloc::AllocError> {
418 let ptr = unsafe { &mut *self.ptr.get() };
419 let old = core::mem::replace(ptr, MemStack::new(&mut []));
420
421 if old.can_hold(StackReq::new_aligned::<u8>(layout.size(), layout.align())) {
422 let (alloc, new) = old.make_aligned_uninit::<u8>(layout.size(), layout.align());
423 *ptr = new;
424
425 let len = alloc.len();
426 let ptr = alloc.as_mut_ptr() as *mut u8;
427 Ok(unsafe { NonNull::new_unchecked(core::ptr::slice_from_raw_parts_mut(ptr, len)) })
428 } else {
429 Err(alloc::AllocError)
430 }
431 }
432
433 #[inline]
434 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: core::alloc::Layout) {
435 let _ = (ptr, layout);
436 }
437}
438
439impl MemStack {
440 #[inline]
442 pub fn new(buffer: &mut [MaybeUninit<u8>]) -> &mut Self {
443 unsafe { &mut *(buffer as *mut [MaybeUninit<u8>] as *mut Self) }
444 }
445
446 #[inline]
448 pub fn new_any<T>(buffer: &mut [MaybeUninit<T>]) -> &mut Self {
449 let len = core::mem::size_of_val(buffer);
450 Self::new(unsafe { slice::from_raw_parts_mut(buffer.as_mut_ptr() as *mut _, len) })
451 }
452
453 #[inline]
456 #[must_use]
457 pub fn can_hold(&self, alloc_req: StackReq) -> bool {
458 let align = alloc_req.align_bytes();
459 let size = alloc_req.size_bytes();
460 let align_offset = self.buffer.as_ptr().align_offset(align);
461 let self_size = self.buffer.len();
462 (self_size >= align_offset) && (self_size - align_offset >= size)
463 }
464
465 #[inline]
467 pub fn len_bytes(&self) -> usize {
468 self.buffer.len()
469 }
470
471 #[inline]
473 pub fn as_ptr(&self) -> *const u8 {
474 self.buffer.as_ptr() as _
475 }
476
477 #[track_caller]
478 #[inline]
479 fn split_buffer<'out>(
480 buffer: &'out mut [MaybeUninit<u8>],
481 size: usize,
482 align: usize,
483 sizeof_val: usize,
484 alignof_val: usize,
485 type_name: &'static str,
486 ) -> (&'out mut [MaybeUninit<u8>], &'out mut [MaybeUninit<u8>]) {
487 let len = buffer.len();
488 let align_offset = buffer.as_mut_ptr().align_offset(align);
489
490 check_alignment(align, alignof_val, type_name);
491 check_enough_space_for_align_offset(len, align, align_offset);
492 check_enough_space_for_array(len - align_offset, sizeof_val, size, type_name);
493
494 let buffer = unsafe { buffer.get_unchecked_mut(align_offset..) };
495 let len = len - align_offset;
496
497 let begin = buffer.as_mut_ptr();
498 let begin_len = size * sizeof_val;
499 let mid = unsafe { begin.add(begin_len) };
500 let mid_len = len - begin_len;
501 unsafe {
502 (
503 slice::from_raw_parts_mut(begin, begin_len),
504 slice::from_raw_parts_mut(mid, mid_len),
505 )
506 }
507 }
508
509 #[track_caller]
516 #[inline]
517 #[must_use]
518 pub fn make_aligned_uninit<T>(
519 &mut self,
520 size: usize,
521 align: usize,
522 ) -> (&mut [MaybeUninit<T>], &mut Self) {
523 let (taken, remaining) = Self::split_buffer(
524 &mut self.buffer,
525 size,
526 align,
527 core::mem::size_of::<T>(),
528 core::mem::align_of::<T>(),
529 core::any::type_name::<T>(),
530 );
531
532 (
533 unsafe { transmute_slice::<MaybeUninit<T>>(taken, size) },
534 MemStack::new(remaining),
535 )
536 }
537
538 #[track_caller]
546 #[inline]
547 #[must_use]
548 pub fn make_aligned_with<T>(
549 &mut self,
550 size: usize,
551 align: usize,
552 f: impl FnMut(usize) -> T,
553 ) -> (DynArray<'_, T>, &mut Self) {
554 let (taken, remaining) = self.make_aligned_uninit(size, align);
555 let (len, ptr) = {
556 let taken = init_array_with(f, taken);
557 (taken.len(), taken.as_mut_ptr())
558 };
559 (
560 DynArray {
561 ptr: unsafe { NonNull::<T>::new_unchecked(ptr) },
562 len,
563 __marker: PhantomData,
564 },
565 remaining,
566 )
567 }
568
569 #[track_caller]
570 #[inline]
571 #[must_use]
572 #[doc(hidden)]
573 pub unsafe fn make_raw<T: Pod>(&mut self, size: usize) -> (&mut [T], &mut Self) {
574 self.make_aligned_raw(size, core::mem::align_of::<T>())
575 }
576
577 #[track_caller]
578 #[inline]
579 #[must_use]
580 #[doc(hidden)]
581 pub unsafe fn make_aligned_raw<T: Pod>(
582 &mut self,
583 size: usize,
584 align: usize,
585 ) -> (&mut [T], &mut Self) {
586 let (mem, stack) = self.make_aligned_uninit::<T>(size, align);
587 unsafe { (&mut *(mem as *mut [MaybeUninit<T>] as *mut [T]), stack) }
588 }
589
590 #[track_caller]
596 #[inline]
597 #[must_use]
598 pub fn make_uninit<T>(&mut self, size: usize) -> (&mut [MaybeUninit<T>], &mut Self) {
599 self.make_aligned_uninit(size, core::mem::align_of::<T>())
600 }
601
602 #[track_caller]
610 #[inline]
611 #[must_use]
612 pub fn make_with<T>(
613 &mut self,
614 size: usize,
615 f: impl FnMut(usize) -> T,
616 ) -> (DynArray<'_, T>, &mut Self) {
617 self.make_aligned_with(size, core::mem::align_of::<T>(), f)
618 }
619
620 #[track_caller]
629 #[inline]
630 #[must_use]
631 pub fn collect_aligned<I>(
632 &mut self,
633 align: usize,
634 iter: impl IntoIterator<Item = I>,
635 ) -> (DynArray<'_, I>, &mut Self) {
636 self.collect_aligned_impl(align, iter.into_iter())
637 }
638
639 #[track_caller]
648 #[inline]
649 #[must_use]
650 pub fn collect<I>(
651 &mut self,
652 iter: impl IntoIterator<Item = I>,
653 ) -> (DynArray<'_, I>, &mut Self) {
654 self.collect_aligned_impl(core::mem::align_of::<I>(), iter.into_iter())
655 }
656
657 #[track_caller]
658 #[inline]
659 fn collect_aligned_impl<I: Iterator>(
660 &mut self,
661 align: usize,
662 iter: I,
663 ) -> (DynArray<'_, I::Item>, &mut Self) {
664 let sizeof_val = core::mem::size_of::<I::Item>();
665 let alignof_val = core::mem::align_of::<I::Item>();
666 let align_offset = self.buffer.as_mut_ptr().align_offset(align);
667
668 check_alignment(align, alignof_val, core::any::type_name::<I::Item>());
669 check_enough_space_for_align_offset(self.buffer.len(), align, align_offset);
670
671 let buffer = unsafe { self.buffer.get_unchecked_mut(align_offset..) };
672 let buffer_len = buffer.len();
673 let buffer_ptr = buffer.as_mut_ptr();
674 unsafe {
675 let len = init_array_with_iter(
676 iter,
677 slice::from_raw_parts_mut(
678 buffer_ptr as *mut MaybeUninit<I::Item>,
679 if sizeof_val == 0 {
680 usize::MAX
681 } else {
682 buffer_len / sizeof_val
683 },
684 ),
685 );
686
687 let remaining_slice = slice::from_raw_parts_mut(
688 buffer_ptr.add(len * sizeof_val),
689 buffer.len() - len * sizeof_val,
690 );
691 (
692 DynArray {
693 ptr: NonNull::new_unchecked(buffer_ptr as *mut I::Item),
694 len,
695 __marker: PhantomData,
696 },
697 Self::new(remaining_slice),
698 )
699 }
700 }
701
702 #[inline]
703 pub fn bump<'bump, 'stack>(self: &'bump mut &'stack mut Self) -> &'bump mut Bump<'stack> {
704 unsafe { &mut *(self as *mut &mut Self as *mut Bump<'stack>) }
705 }
706}
707
708impl PodStack {
709 #[inline]
711 pub fn new(buffer: &mut [u8]) -> &mut Self {
712 unsafe { &mut *(buffer as *mut [u8] as *mut Self) }
713 }
714
715 #[inline]
717 pub fn new_any<T: Pod>(buffer: &mut [T]) -> &mut Self {
718 let len = core::mem::size_of_val(buffer);
719 Self::new(unsafe { slice::from_raw_parts_mut(buffer.as_mut_ptr() as *mut _, len) })
720 }
721
722 #[inline]
725 #[must_use]
726 pub fn can_hold(&self, alloc_req: StackReq) -> bool {
727 let align = alloc_req.align_bytes();
728 let size = alloc_req.size_bytes();
729 let align_offset = self.buffer.as_ptr().align_offset(align);
730 let self_size = self.buffer.len();
731 (self_size >= align_offset) && (self_size - align_offset >= size)
732 }
733
734 #[inline]
736 pub fn len_bytes(&self) -> usize {
737 self.buffer.len()
738 }
739
740 #[inline]
742 pub fn as_ptr(&self) -> *const u8 {
743 self.buffer.as_ptr() as _
744 }
745
746 #[track_caller]
747 #[inline]
748 fn split_buffer<'out>(
749 buffer: &'out mut [u8],
750 size: usize,
751 align: usize,
752 sizeof_val: usize,
753 alignof_val: usize,
754 type_name: &'static str,
755 ) -> (&'out mut [u8], &'out mut [u8]) {
756 let len = buffer.len();
757 let align_offset = buffer.as_mut_ptr().align_offset(align);
758
759 check_alignment(align, alignof_val, type_name);
760 check_enough_space_for_align_offset(len, align, align_offset);
761 check_enough_space_for_array(len - align_offset, sizeof_val, size, type_name);
762
763 let buffer = unsafe { buffer.get_unchecked_mut(align_offset..) };
764 let len = len - align_offset;
765
766 let begin = buffer.as_mut_ptr();
767 let begin_len = size * sizeof_val;
768 let mid = unsafe { begin.add(begin_len) };
769 let mid_len = len - begin_len;
770 unsafe {
771 (
772 slice::from_raw_parts_mut(begin, begin_len),
773 slice::from_raw_parts_mut(mid, mid_len),
774 )
775 }
776 }
777
778 #[track_caller]
785 #[inline]
786 #[must_use]
787 pub fn make_aligned_raw<T: Pod>(&mut self, size: usize, align: usize) -> (&mut [T], &mut Self) {
788 let (taken, remaining) = Self::split_buffer(
789 &mut self.buffer,
790 size,
791 align,
792 core::mem::size_of::<T>(),
793 core::mem::align_of::<T>(),
794 core::any::type_name::<T>(),
795 );
796
797 let taken = unsafe { transmute_pod_slice::<T>(taken, size) };
798 (taken, Self::new(remaining))
799 }
800
801 pub unsafe fn make_aligned_unpod(
812 &mut self,
813 size: usize,
814 align: usize,
815 ) -> (UnpodStack<'_>, &mut Self) {
816 let (taken, remaining) = Self::split_buffer(&mut self.buffer, size, align, 1, 1, "[Bytes]");
817 (
818 UnpodStack {
819 ptr: NonNull::new_unchecked(taken.as_mut_ptr()),
820 len: size,
821 __marker: PhantomData,
822 },
823 Self::new(remaining),
824 )
825 }
826
827 #[track_caller]
835 #[inline]
836 #[must_use]
837 pub fn make_aligned_with<T: Pod>(
838 &mut self,
839 size: usize,
840 align: usize,
841 f: impl FnMut(usize) -> T,
842 ) -> (&mut [T], &mut Self) {
843 let (taken, remaining) = self.make_aligned_raw(size, align);
844 let taken = init_pod_array_with(f, taken);
845 (taken, remaining)
846 }
847
848 #[track_caller]
854 #[inline]
855 #[must_use]
856 pub fn make_raw<T: Pod>(&mut self, size: usize) -> (&mut [T], &mut Self) {
857 self.make_aligned_raw(size, core::mem::align_of::<T>())
858 }
859
860 #[track_caller]
868 #[inline]
869 #[must_use]
870 pub fn make_with<T: Pod>(
871 &mut self,
872 size: usize,
873 f: impl FnMut(usize) -> T,
874 ) -> (&mut [T], &mut Self) {
875 self.make_aligned_with(size, core::mem::align_of::<T>(), f)
876 }
877
878 #[track_caller]
887 #[inline]
888 #[must_use]
889 pub fn collect_aligned<I: Pod>(
890 &mut self,
891 align: usize,
892 iter: impl IntoIterator<Item = I>,
893 ) -> (&mut [I], &mut Self) {
894 self.collect_aligned_impl(align, iter.into_iter())
895 }
896
897 #[track_caller]
906 #[inline]
907 #[must_use]
908 pub fn collect<I: Pod>(&mut self, iter: impl IntoIterator<Item = I>) -> (&mut [I], &mut Self) {
909 self.collect_aligned_impl(core::mem::align_of::<I>(), iter.into_iter())
910 }
911
912 #[track_caller]
913 #[inline]
914 fn collect_aligned_impl<I: Iterator>(
915 &mut self,
916 align: usize,
917 iter: I,
918 ) -> (&mut [I::Item], &mut Self)
919 where
920 I::Item: Pod,
921 {
922 let sizeof_val = core::mem::size_of::<I::Item>();
923 let alignof_val = core::mem::align_of::<I::Item>();
924 let align_offset = self.buffer.as_mut_ptr().align_offset(align);
925
926 check_alignment(align, alignof_val, core::any::type_name::<I::Item>());
927 check_enough_space_for_align_offset(self.buffer.len(), align, align_offset);
928
929 let buffer = unsafe { self.buffer.get_unchecked_mut(align_offset..) };
930 let buffer_len = buffer.len();
931 let buffer_ptr = buffer.as_mut_ptr();
932 unsafe {
933 let len = init_pod_array_with_iter(
934 iter,
935 slice::from_raw_parts_mut(
936 buffer_ptr as *mut I::Item,
937 if sizeof_val == 0 {
938 usize::MAX
939 } else {
940 buffer_len / sizeof_val
941 },
942 ),
943 );
944
945 let taken = slice::from_raw_parts_mut(buffer_ptr as *mut I::Item, len);
946 let remaining_slice = slice::from_raw_parts_mut(
947 buffer_ptr.add(len * sizeof_val),
948 buffer_len - len * sizeof_val,
949 );
950 (taken, Self::new(remaining_slice))
951 }
952 }
953}
954
955#[cfg(all(test, feature = "alloc"))]
956mod dyn_stack_tests {
957 use super::*;
958 use alloc::Global;
959
960 #[test]
961 fn empty_in() {
962 let mut buf = MemBuffer::new_in(StackReq::new::<i32>(0), Global);
963 let stack = MemStack::new(&mut buf);
964 let (_arr0, _stack) = stack.make_with::<i32>(0, |i| i as i32);
965 }
966
967 #[test]
968 #[should_panic]
969 fn empty_overflow_in() {
970 let mut buf = MemBuffer::new_in(StackReq::new::<i32>(0), Global);
971 let stack = MemStack::new(&mut buf);
972 let (_arr0, _stack) = stack.make_with::<i32>(1, |i| i as i32);
973 }
974
975 #[test]
976 fn empty_collect_in() {
977 let mut buf = MemBuffer::new_in(StackReq::new::<i32>(0), Global);
978 let stack = MemStack::new(&mut buf);
979 let (_arr0, _stack) = stack.collect(0..0);
980 }
981
982 #[test]
983 fn empty_collect_overflow_in() {
984 let mut buf = MemBuffer::new_in(StackReq::new::<i32>(0), Global);
985 let stack = MemStack::new(&mut buf);
986 let (arr0, _stack) = stack.collect(0..1);
987 assert!(arr0.is_empty());
988 }
989
990 #[test]
991 #[should_panic]
992 fn overflow_in() {
993 let mut buf = MemBuffer::new_in(StackReq::new::<i32>(1), Global);
994 let stack = MemStack::new(&mut buf);
995 let (_arr0, _stack) = stack.make_with::<i32>(2, |i| i as i32);
996 }
997
998 #[test]
999 fn collect_overflow_in() {
1000 let mut buf = MemBuffer::new_in(StackReq::new::<i32>(1), Global);
1001 let stack = MemStack::new(&mut buf);
1002 let (arr0, _stack) = stack.collect(1..3);
1003 assert_eq!(arr0.len(), 1);
1004 assert_eq!(arr0[0], 1)
1005 }
1006
1007 #[test]
1008 fn empty() {
1009 let mut buf = MemBuffer::new(StackReq::new::<i32>(0));
1010 let stack = MemStack::new(&mut buf);
1011 let (_arr0, _stack) = stack.make_with::<i32>(0, |i| i as i32);
1012 }
1013
1014 #[test]
1015 #[should_panic]
1016 fn empty_overflow() {
1017 let mut buf = MemBuffer::new(StackReq::new::<i32>(0));
1018 let stack = MemStack::new(&mut buf);
1019 let (_arr0, _stack) = stack.make_with::<i32>(1, |i| i as i32);
1020 }
1021
1022 #[test]
1023 fn empty_collect() {
1024 let mut buf = MemBuffer::new(StackReq::new::<i32>(0));
1025 let stack = MemStack::new(&mut buf);
1026 let (_arr0, _stack) = stack.collect(0..0);
1027 }
1028
1029 #[test]
1030 fn empty_collect_overflow() {
1031 let mut buf = MemBuffer::new(StackReq::new::<i32>(0));
1032 let stack = MemStack::new(&mut buf);
1033 let (arr0, _stack) = stack.collect(0..1);
1034 assert!(arr0.is_empty());
1035 }
1036
1037 #[test]
1038 #[should_panic]
1039 fn overflow() {
1040 let mut buf = MemBuffer::new(StackReq::new::<i32>(1));
1041 let stack = MemStack::new(&mut buf);
1042 let (_arr0, _stack) = stack.make_with::<i32>(2, |i| i as i32);
1043 }
1044
1045 #[test]
1046 fn collect_overflow() {
1047 let mut buf = MemBuffer::new(StackReq::new::<i32>(1));
1048 let stack = MemStack::new(&mut buf);
1049 let (arr0, _stack) = stack.collect(1..3);
1050 assert_eq!(arr0.len(), 1);
1051 assert_eq!(arr0[0], 1)
1052 }
1053
1054 #[test]
1055 fn basic_nested() {
1056 let mut buf = MemBuffer::new(StackReq::new::<i32>(6));
1057
1058 let stack = MemStack::new(&mut buf);
1059 assert!(stack.can_hold(StackReq::new::<i32>(6)));
1060 assert!(!stack.can_hold(StackReq::new::<i32>(7)));
1061
1062 let (arr0, stack) = stack.make_with::<i32>(3, |i| i as i32);
1063 assert_eq!(arr0[0], 0);
1064 assert_eq!(arr0[1], 1);
1065 assert_eq!(arr0[2], 2);
1066
1067 let (arr1, _) = stack.make_with::<i32>(3, |i| i as i32 + 3);
1068
1069 assert_eq!(arr0[0], 0);
1071 assert_eq!(arr0[1], 1);
1072 assert_eq!(arr0[2], 2);
1073
1074 assert_eq!(arr1[0], 3);
1075 assert_eq!(arr1[1], 4);
1076 assert_eq!(arr1[2], 5);
1077 }
1078
1079 #[test]
1080 fn basic_disjoint() {
1081 let mut buf = MemBuffer::new(StackReq::new::<i32>(3));
1082
1083 let stack = MemStack::new(&mut buf);
1084
1085 {
1086 let (arr0, _) = stack.make_with::<i32>(3, |i| i as i32);
1087 assert_eq!(arr0[0], 0);
1088 assert_eq!(arr0[1], 1);
1089 assert_eq!(arr0[2], 2);
1090 }
1091 {
1092 let (arr1, _) = stack.make_with::<i32>(3, |i| i as i32 + 3);
1093
1094 assert_eq!(arr1[0], 3);
1095 assert_eq!(arr1[1], 4);
1096 assert_eq!(arr1[2], 5);
1097 }
1098 }
1099
1100 #[test]
1101 fn basic_nested_collect() {
1102 let mut buf = MemBuffer::new(StackReq::new::<i32>(6));
1103 let stack = MemStack::new(&mut buf);
1104
1105 let (arr0, stack) = stack.collect(0..3_i32);
1106 assert_eq!(arr0[0], 0);
1107 assert_eq!(arr0[1], 1);
1108 assert_eq!(arr0[2], 2);
1109
1110 let (arr1, _) = stack.collect(3..6_i32);
1111
1112 assert_eq!(arr0[0], 0);
1114 assert_eq!(arr0[1], 1);
1115 assert_eq!(arr0[2], 2);
1116
1117 assert_eq!(arr1[0], 3);
1118 assert_eq!(arr1[1], 4);
1119 assert_eq!(arr1[2], 5);
1120 }
1121
1122 #[test]
1123 fn basic_disjoint_collect() {
1124 let mut buf = MemBuffer::new(StackReq::new::<i32>(3));
1125
1126 let stack = MemStack::new(&mut buf);
1127
1128 {
1129 let (arr0, _) = stack.collect(0..3_i32);
1130 assert_eq!(arr0[0], 0);
1131 assert_eq!(arr0[1], 1);
1132 assert_eq!(arr0[2], 2);
1133 }
1134 {
1135 let (arr1, _) = stack.collect(3..6_i32);
1136
1137 assert_eq!(arr1[0], 3);
1138 assert_eq!(arr1[1], 4);
1139 assert_eq!(arr1[2], 5);
1140 }
1141 }
1142
1143 #[test]
1144 fn drop_nested() {
1145 use core::sync::atomic::{AtomicI32, Ordering};
1146 static DROP_COUNT: AtomicI32 = AtomicI32::new(0);
1147
1148 struct CountedDrop;
1149 impl Drop for CountedDrop {
1150 fn drop(&mut self) {
1151 DROP_COUNT.fetch_add(1, Ordering::SeqCst);
1152 }
1153 }
1154
1155 let mut buf = MemBuffer::new(StackReq::new::<CountedDrop>(6));
1156 let stack = MemStack::new(&mut buf);
1157
1158 let stack = {
1159 let (_arr, stack) = stack.make_with(3, |_| CountedDrop);
1160 stack
1161 };
1162 assert_eq!(DROP_COUNT.load(Ordering::SeqCst), 3);
1163 let _stack = {
1164 let (_arr, stack) = stack.make_with(4, |_| CountedDrop);
1165 stack
1166 };
1167 assert_eq!(DROP_COUNT.load(Ordering::SeqCst), 7);
1168 }
1169
1170 #[test]
1171 fn drop_disjoint() {
1172 use core::sync::atomic::{AtomicI32, Ordering};
1173 static DROP_COUNT: AtomicI32 = AtomicI32::new(0);
1174
1175 struct CountedDrop;
1176 impl Drop for CountedDrop {
1177 fn drop(&mut self) {
1178 DROP_COUNT.fetch_add(1, Ordering::SeqCst);
1179 }
1180 }
1181
1182 let mut buf = MemBuffer::new(StackReq::new::<CountedDrop>(6));
1183 let stack = MemStack::new(&mut buf);
1184
1185 {
1186 let _ = stack.make_with(3, |_| CountedDrop);
1187 assert_eq!(DROP_COUNT.load(Ordering::SeqCst), 3);
1188 }
1189
1190 {
1191 let _ = stack.make_with(4, |_| CountedDrop);
1192 assert_eq!(DROP_COUNT.load(Ordering::SeqCst), 7);
1193 }
1194 }
1195}
1196
1197#[cfg(all(test, feature = "alloc"))]
1198mod pod_stack_tests {
1199 use super::*;
1200
1201 #[test]
1202 fn empty() {
1203 let mut buf = PodBuffer::new(StackReq::new::<i32>(0));
1204 let stack = PodStack::new(&mut buf);
1205 let (_arr0, _stack) = stack.make_with::<i32>(0, |i| i as i32);
1206 }
1207
1208 #[test]
1209 #[should_panic]
1210 fn empty_overflow() {
1211 let mut buf = PodBuffer::new(StackReq::new::<i32>(0));
1212 let stack = PodStack::new(&mut buf);
1213 let (_arr0, _stack) = stack.make_with::<i32>(1, |i| i as i32);
1214 }
1215
1216 #[test]
1217 fn empty_collect() {
1218 let mut buf = PodBuffer::new(StackReq::new::<i32>(0));
1219 let stack = PodStack::new(&mut buf);
1220 let (_arr0, _stack) = stack.collect(0..0);
1221 }
1222
1223 #[test]
1224 fn empty_collect_overflow() {
1225 let mut buf = PodBuffer::new(StackReq::new::<i32>(0));
1226 let stack = PodStack::new(&mut buf);
1227 let (arr0, _stack) = stack.collect(0..1);
1228 assert!(arr0.is_empty());
1229 }
1230
1231 #[test]
1232 #[should_panic]
1233 fn overflow() {
1234 let mut buf = PodBuffer::new(StackReq::new::<i32>(1));
1235 let stack = PodStack::new(&mut buf);
1236 let (_arr0, _stack) = stack.make_with::<i32>(2, |i| i as i32);
1237 }
1238
1239 #[test]
1240 fn collect_overflow() {
1241 let mut buf = PodBuffer::new(StackReq::new::<i32>(1));
1242 let stack = PodStack::new(&mut buf);
1243 let (arr0, _stack) = stack.collect(1..3);
1244 assert_eq!(arr0.len(), 1);
1245 assert_eq!(arr0[0], 1)
1246 }
1247
1248 #[test]
1249 fn basic_nested() {
1250 let mut buf = PodBuffer::new(StackReq::new::<i32>(6));
1251
1252 let stack = PodStack::new(&mut buf);
1253 assert!(stack.can_hold(StackReq::new::<i32>(6)));
1254 assert!(!stack.can_hold(StackReq::new::<i32>(7)));
1255
1256 let (arr0, stack) = stack.make_with::<i32>(3, |i| i as i32);
1257 assert_eq!(arr0[0], 0);
1258 assert_eq!(arr0[1], 1);
1259 assert_eq!(arr0[2], 2);
1260
1261 let (arr1, _) = stack.make_with::<i32>(3, |i| i as i32 + 3);
1262
1263 assert_eq!(arr0[0], 0);
1265 assert_eq!(arr0[1], 1);
1266 assert_eq!(arr0[2], 2);
1267
1268 assert_eq!(arr1[0], 3);
1269 assert_eq!(arr1[1], 4);
1270 assert_eq!(arr1[2], 5);
1271 }
1272
1273 #[test]
1274 fn basic_disjoint() {
1275 let mut buf = PodBuffer::new(StackReq::new::<i32>(3));
1276
1277 let stack = PodStack::new(&mut buf);
1278
1279 {
1280 let (arr0, _) = stack.make_with::<i32>(3, |i| i as i32);
1281 assert_eq!(arr0[0], 0);
1282 assert_eq!(arr0[1], 1);
1283 assert_eq!(arr0[2], 2);
1284 }
1285 {
1286 let (arr1, _) = stack.make_with::<i32>(3, |i| i as i32 + 3);
1287
1288 assert_eq!(arr1[0], 3);
1289 assert_eq!(arr1[1], 4);
1290 assert_eq!(arr1[2], 5);
1291 }
1292 }
1293
1294 #[test]
1295 fn basic_nested_collect() {
1296 let mut buf = PodBuffer::new(StackReq::new::<i32>(6));
1297 let stack = PodStack::new(&mut buf);
1298
1299 let (arr0, stack) = stack.collect(0..3_i32);
1300 assert_eq!(arr0[0], 0);
1301 assert_eq!(arr0[1], 1);
1302 assert_eq!(arr0[2], 2);
1303
1304 let (arr1, _) = stack.collect(3..6_i32);
1305
1306 assert_eq!(arr0[0], 0);
1308 assert_eq!(arr0[1], 1);
1309 assert_eq!(arr0[2], 2);
1310
1311 assert_eq!(arr1[0], 3);
1312 assert_eq!(arr1[1], 4);
1313 assert_eq!(arr1[2], 5);
1314 }
1315
1316 #[test]
1317 fn basic_disjoint_collect() {
1318 let mut buf = PodBuffer::new(StackReq::new::<i32>(3));
1319
1320 let stack = PodStack::new(&mut buf);
1321
1322 {
1323 let (arr0, _) = stack.collect(0..3_i32);
1324 assert_eq!(arr0[0], 0);
1325 assert_eq!(arr0[1], 1);
1326 assert_eq!(arr0[2], 2);
1327 }
1328 {
1329 let (arr1, _) = stack.collect(3..6_i32);
1330
1331 assert_eq!(arr1[0], 3);
1332 assert_eq!(arr1[1], 4);
1333 assert_eq!(arr1[2], 5);
1334 }
1335 }
1336
1337 #[test]
1338 fn make_raw() {
1339 let mut buf = PodBuffer::new(StackReq::new::<i32>(3));
1340 buf.fill(0);
1341
1342 let stack = PodStack::new(&mut buf);
1343
1344 {
1345 let (arr0, _) = stack.make_raw::<i32>(3);
1346 assert_eq!(arr0[0], 0);
1347 assert_eq!(arr0[1], 0);
1348 assert_eq!(arr0[2], 0);
1349 }
1350 {
1351 let (arr0, _) = stack.collect(0..3_i32);
1352 assert_eq!(arr0[0], 0);
1353 assert_eq!(arr0[1], 1);
1354 assert_eq!(arr0[2], 2);
1355 }
1356 {
1357 let (arr1, _) = stack.make_raw::<i32>(3);
1358
1359 assert_eq!(arr1[0], 0);
1360 assert_eq!(arr1[1], 1);
1361 assert_eq!(arr1[2], 2);
1362 }
1363 }
1364
1365 #[test]
1366 fn make_unpod() {
1367 let mut buf = PodBuffer::new(StackReq::new::<i32>(3));
1368 let stack = PodStack::new(&mut buf);
1369
1370 {
1371 let (mut stack, _) = unsafe { stack.make_aligned_unpod(12, 4) };
1372
1373 let stack = &mut *stack;
1374 let (mem, _) = stack.make_uninit::<u32>(3);
1375 mem.fill(MaybeUninit::uninit());
1376
1377 let mut stack = stack;
1378 let mut buf = MemBuffer::new_in(
1379 StackReq::new::<u32>(3),
1380 alloc::DynAlloc::from_mut(stack.bump()),
1381 );
1382 let stack = MemStack::new(&mut buf);
1383 let _ = stack.make_uninit::<u32>(3);
1384 }
1385
1386 let (mem, _) = stack.make_raw::<u32>(3);
1387 for x in mem {
1388 *x = *x;
1389 }
1390 }
1391}