1#![no_std]
2#![cfg_attr(docsrs, feature(doc_cfg))]
3
4use core::{
18 alloc::Layout,
19 fmt::Debug,
20 marker::PhantomData,
21 mem::{align_of, size_of, ManuallyDrop},
22 ops::{Deref, DerefMut},
23 ptr::{null_mut, NonNull},
24};
25use equator::assert;
26use raw::ARawVec;
27
28mod raw;
29extern crate alloc;
30
31pub const CACHELINE_ALIGN: usize = {
33 #[cfg(any(
34 target_arch = "x86_64",
35 target_arch = "aarch64",
36 target_arch = "powerpc64",
37 ))]
38 {
39 128
40 }
41 #[cfg(any(
42 target_arch = "arm",
43 target_arch = "mips",
44 target_arch = "mips64",
45 target_arch = "riscv64",
46 ))]
47 {
48 32
49 }
50 #[cfg(target_arch = "s390x")]
51 {
52 256
53 }
54 #[cfg(not(any(
55 target_arch = "x86_64",
56 target_arch = "aarch64",
57 target_arch = "powerpc64",
58 target_arch = "arm",
59 target_arch = "mips",
60 target_arch = "mips64",
61 target_arch = "riscv64",
62 target_arch = "s390x",
63 )))]
64 {
65 64
66 }
67};
68
69mod private {
70 pub trait Seal {}
71}
72
73pub trait Alignment: Copy + private::Seal {
75 #[must_use]
79 fn new(align: usize, minimum_align: usize) -> Self;
80 #[must_use]
84 fn alignment(self, minimum_align: usize) -> usize;
85}
86
87#[derive(Copy, Clone)]
89pub struct RuntimeAlign {
90 align: usize,
91}
92
93#[derive(Copy, Clone)]
95pub struct ConstAlign<const ALIGN: usize>;
96
97impl private::Seal for RuntimeAlign {}
98impl<const ALIGN: usize> private::Seal for ConstAlign<ALIGN> {}
99
100impl<T, A: Alignment> core::convert::From<ABox<[T], A>> for AVec<T, A> {
101 #[inline]
102 fn from(value: ABox<[T], A>) -> Self {
103 let len = (*value).len();
104 let (ptr, align) = ABox::into_raw_parts(value);
105 unsafe { AVec::<T, A>::from_raw_parts(ptr as *mut T, align, len, len) }
106 }
107}
108
109impl Alignment for RuntimeAlign {
110 #[inline]
111 #[track_caller]
112 fn new(align: usize, minimum_align: usize) -> Self {
113 if align != 0 {
114 assert!(
115 align.is_power_of_two(),
116 "alignment ({align}) is not a power of two.",
117 );
118 }
119 RuntimeAlign {
120 align: fix_alignment(align, minimum_align),
121 }
122 }
123
124 #[inline]
125 fn alignment(self, minimum_align: usize) -> usize {
126 let _ = minimum_align;
127 self.align
128 }
129}
130impl<const ALIGN: usize> Alignment for ConstAlign<ALIGN> {
131 #[inline]
132 #[track_caller]
133 fn new(align: usize, minimum_align: usize) -> Self {
134 let _ = minimum_align;
135 let max = Ord::max;
136 if align != 0 {
137 assert!(
138 align.is_power_of_two(),
139 "alignment ({align}) is not a power of two.",
140 );
141 }
142 assert!(
143 ALIGN.is_power_of_two(),
144 "alignment ({ALIGN}) is not a power of two.",
145 );
146 assert!(
147 align <= max(ALIGN, minimum_align),
148 "provided alignment ({align}) is greater than the specified constant value ({ALIGN})",
149 );
150 ConstAlign::<ALIGN>
151 }
152
153 #[inline]
154 fn alignment(self, minimum_align: usize) -> usize {
155 fix_alignment(ALIGN, minimum_align)
156 }
157}
158
159pub struct AVec<T, A: Alignment = ConstAlign<CACHELINE_ALIGN>> {
163 buf: ARawVec<T, A>,
164 len: usize,
165}
166
167pub struct ABox<T: ?Sized, A: Alignment = ConstAlign<CACHELINE_ALIGN>> {
171 ptr: NonNull<T>,
172 align: A,
173 _marker: PhantomData<T>,
174}
175
176impl<T: ?Sized, A: Alignment> Deref for ABox<T, A> {
177 type Target = T;
178
179 #[inline]
180 fn deref(&self) -> &Self::Target {
181 unsafe { &*self.ptr.as_ptr() }
182 }
183}
184
185impl<T: ?Sized, A: Alignment> DerefMut for ABox<T, A> {
186 #[inline]
187 fn deref_mut(&mut self) -> &mut Self::Target {
188 unsafe { &mut *self.ptr.as_ptr() }
189 }
190}
191
192impl<T: ?Sized, A: Alignment> AsRef<T> for ABox<T, A> {
193 #[inline]
194 fn as_ref(&self) -> &T {
195 &**self
196 }
197}
198
199impl<T: ?Sized, A: Alignment> AsMut<T> for ABox<T, A> {
200 #[inline]
201 fn as_mut(&mut self) -> &mut T {
202 &mut **self
203 }
204}
205
206struct AllocDrop {
207 ptr: *mut u8,
208 size_bytes: usize,
209 align: usize,
210}
211impl Drop for AllocDrop {
212 #[inline]
213 fn drop(&mut self) {
214 if self.size_bytes > 0 {
215 unsafe {
216 alloc::alloc::dealloc(
217 self.ptr,
218 alloc::alloc::Layout::from_size_align_unchecked(self.size_bytes, self.align),
219 )
220 }
221 }
222 }
223}
224
225impl<T: ?Sized, A: Alignment> Drop for ABox<T, A> {
226 #[inline]
227 fn drop(&mut self) {
228 let size_bytes = core::mem::size_of_val(self.deref_mut());
229 let align_bytes = core::mem::align_of_val(self.deref_mut());
230 let ptr = self.deref_mut() as *mut T;
231 let _alloc_drop = AllocDrop {
232 ptr: ptr as *mut u8,
233 size_bytes,
234 align: self.align.alignment(align_bytes),
235 };
236 unsafe { ptr.drop_in_place() };
237 }
238}
239
240impl<T, A: Alignment> Deref for AVec<T, A> {
241 type Target = [T];
242
243 #[inline]
244 fn deref(&self) -> &Self::Target {
245 self.as_slice()
246 }
247}
248impl<T, A: Alignment> DerefMut for AVec<T, A> {
249 #[inline]
250 fn deref_mut(&mut self) -> &mut Self::Target {
251 self.as_mut_slice()
252 }
253}
254
255impl<T, A: Alignment> AsRef<[T]> for AVec<T, A> {
256 #[inline]
257 fn as_ref(&self) -> &[T] {
258 &**self
259 }
260}
261
262impl<T, A: Alignment> AsMut<[T]> for AVec<T, A> {
263 #[inline]
264 fn as_mut(&mut self) -> &mut [T] {
265 &mut **self
266 }
267}
268
269impl<T, A: Alignment> ABox<T, A> {
270 #[inline]
272 #[track_caller]
273 pub fn new(align: usize, value: T) -> Self {
274 let align = A::new(align, align_of::<T>()).alignment(align_of::<T>());
275 let ptr = if size_of::<T>() == 0 {
276 null_mut::<u8>().wrapping_add(align) as *mut T
277 } else {
278 unsafe { raw::with_capacity_unchecked(1, align, size_of::<T>()) as *mut T }
279 };
280 unsafe { ptr.write(value) };
281 unsafe { Self::from_raw_parts(align, ptr) }
282 }
283
284 #[inline]
286 pub fn alignment(&self) -> usize {
287 self.align.alignment(align_of::<T>())
288 }
289}
290
291impl<T: ?Sized, A: Alignment> ABox<T, A> {
292 #[inline]
299 #[track_caller]
300 pub unsafe fn from_raw_parts(align: usize, ptr: *mut T) -> Self {
301 Self {
302 ptr: NonNull::<T>::new_unchecked(ptr),
303 align: A::new(align, core::mem::align_of_val(&*ptr)),
304 _marker: PhantomData,
305 }
306 }
307
308 #[inline]
310 pub fn into_raw_parts(this: Self) -> (*mut T, usize) {
311 let this = ManuallyDrop::new(this);
312 let align = core::mem::align_of_val(unsafe { &*this.ptr.as_ptr() });
313 (this.ptr.as_ptr(), this.align.alignment(align))
314 }
315}
316
317impl<T, A: Alignment> Drop for AVec<T, A> {
318 #[inline]
319 fn drop(&mut self) {
320 unsafe { (self.as_mut_slice() as *mut [T]).drop_in_place() }
322 }
323}
324
325#[inline]
326fn fix_alignment(align: usize, base_align: usize) -> usize {
327 align.max(base_align)
328}
329
330#[derive(Copy, Clone, Debug)]
331pub enum TryReserveError {
332 CapacityOverflow,
333 AllocError { layout: Layout },
334}
335
336impl<T, A: Alignment> AVec<T, A> {
337 #[inline]
339 #[must_use]
340 #[track_caller]
341 pub fn new(align: usize) -> Self {
342 unsafe {
343 Self {
344 buf: ARawVec::new_unchecked(
345 A::new(align, align_of::<T>()).alignment(align_of::<T>()),
346 ),
347 len: 0,
348 }
349 }
350 }
351
352 #[inline]
359 #[must_use]
360 #[track_caller]
361 pub fn with_capacity(align: usize, capacity: usize) -> Self {
362 unsafe {
363 Self {
364 buf: ARawVec::with_capacity_unchecked(
365 capacity,
366 A::new(align, align_of::<T>()).alignment(align_of::<T>()),
367 ),
368 len: 0,
369 }
370 }
371 }
372
373 #[inline]
380 #[must_use]
381 pub unsafe fn from_raw_parts(ptr: *mut T, align: usize, len: usize, capacity: usize) -> Self {
382 Self {
383 buf: ARawVec::from_raw_parts(ptr, capacity, align),
384 len,
385 }
386 }
387
388 #[inline]
390 pub fn into_raw_parts(self) -> (*mut T, usize, usize, usize) {
391 let mut this = ManuallyDrop::new(self);
392 let len = this.len();
393 let cap = this.capacity();
394 let align = this.alignment();
395 let ptr = this.as_mut_ptr();
396 (ptr, align, len, cap)
397 }
398
399 #[inline]
401 #[must_use]
402 pub fn len(&self) -> usize {
403 self.len
404 }
405
406 #[inline]
408 #[must_use]
409 pub fn is_empty(&self) -> bool {
410 self.len() == 0
411 }
412
413 #[inline]
415 #[must_use]
416 pub fn capacity(&self) -> usize {
417 self.buf.capacity()
418 }
419
420 #[inline]
428 pub fn reserve(&mut self, additional: usize) {
429 if additional > self.capacity().wrapping_sub(self.len) {
430 unsafe { self.buf.grow_amortized(self.len, additional) };
431 }
432 }
433
434 pub fn try_reserve(&mut self, additional: usize) -> Result<(), TryReserveError> {
435 if additional > self.capacity().wrapping_sub(self.len) {
436 unsafe { self.buf.try_grow_amortized(self.len, additional) }
437 } else {
438 Ok(())
439 }
440 }
441
442 #[inline]
450 pub fn reserve_exact(&mut self, additional: usize) {
451 if additional > self.capacity().wrapping_sub(self.len) {
452 unsafe { self.buf.grow_exact(self.len, additional) };
453 }
454 }
455
456 pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), TryReserveError> {
457 if additional > self.capacity().wrapping_sub(self.len) {
458 unsafe { self.buf.try_grow_exact(self.len, additional) }
459 } else {
460 Ok(())
461 }
462 }
463
464 #[inline]
466 #[must_use]
467 pub fn alignment(&self) -> usize {
468 self.buf.align()
469 }
470
471 #[inline]
473 #[must_use]
474 pub fn as_ptr(&self) -> *const T {
475 self.buf.as_ptr()
476 }
477
478 #[inline]
480 #[must_use]
481 pub fn as_mut_ptr(&mut self) -> *mut T {
482 self.buf.as_mut_ptr()
483 }
484
485 #[inline]
487 #[must_use]
488 pub fn as_slice(&self) -> &[T] {
489 let len = self.len();
490 let ptr = self.as_ptr();
491
492 unsafe { core::slice::from_raw_parts(ptr, len) }
495 }
496
497 #[inline]
499 #[must_use]
500 pub fn as_mut_slice(&mut self) -> &mut [T] {
501 let len = self.len();
502 let ptr = self.as_mut_ptr();
503
504 unsafe { core::slice::from_raw_parts_mut(ptr, len) }
507 }
508
509 #[inline]
511 pub fn push(&mut self, value: T) {
512 if self.len == self.capacity() {
513 unsafe { self.buf.grow_amortized(self.len, 1) };
514 }
515
516 unsafe {
518 let past_the_end = self.as_mut_ptr().add(self.len);
519 past_the_end.write(value);
520 self.len += 1;
521 }
522 }
523
524 #[inline]
526 pub fn pop(&mut self) -> Option<T> {
527 if self.len == 0 {
528 None
529 } else {
530 self.len -= 1;
531 Some(unsafe { self.as_mut_ptr().add(self.len()).read() })
533 }
534 }
535
536 #[inline]
540 pub fn shrink_to(&mut self, min_capacity: usize) {
541 let min_capacity = min_capacity.max(self.len());
542 if self.capacity() > min_capacity {
543 unsafe { self.buf.shrink_to(min_capacity) };
544 }
545 }
546
547 #[inline]
549 pub fn shrink_to_fit(&mut self) {
550 if self.capacity() > self.len {
551 unsafe { self.buf.shrink_to(self.len) };
552 }
553 }
554
555 #[inline]
558 pub fn truncate(&mut self, len: usize) {
559 if len < self.len {
560 let old_len = self.len;
561 self.len = len;
562 unsafe {
563 let ptr = self.as_mut_ptr();
564 core::ptr::slice_from_raw_parts_mut(ptr.add(len), old_len - len).drop_in_place()
565 }
566 }
567 }
568
569 #[inline]
571 pub fn clear(&mut self) {
572 let old_len = self.len;
573 self.len = 0;
574 unsafe {
575 let ptr = self.as_mut_ptr();
576 core::ptr::slice_from_raw_parts_mut(ptr, old_len).drop_in_place()
577 }
578 }
579
580 #[inline]
583 pub fn into_boxed_slice(self) -> ABox<[T], A> {
584 let mut this = self;
585 this.shrink_to_fit();
586 let (ptr, align, len, _) = this.into_raw_parts();
587 unsafe {
588 ABox::<[T], A>::from_raw_parts(align, core::ptr::slice_from_raw_parts_mut(ptr, len))
589 }
590 }
591
592 #[track_caller]
598 pub fn insert(&mut self, index: usize, element: T) {
599 #[cold]
601 #[inline(never)]
602 #[track_caller]
603 fn assert_failed(index: usize, len: usize) -> ! {
604 panic!("insertion index (is {index}) should be <= len (is {len})");
605 }
606
607 let len = self.len();
608
609 self.reserve(1);
611
612 unsafe {
613 let p = self.as_mut_ptr().add(index);
614 if index < len {
615 core::ptr::copy(p, p.add(1), len - index);
618 } else if index == len {
619 } else {
621 assert_failed(index, len);
622 }
623 core::ptr::write(p, element);
624
625 self.len += 1;
626 }
627 }
628
629 #[track_caller]
636 pub fn remove(&mut self, index: usize) -> T {
637 #[cold]
639 #[inline(never)]
640 #[track_caller]
641 fn assert_failed(index: usize, len: usize) -> ! {
642 panic!("removal index (is {index}) should be < len (is {len})");
643 }
644
645 let len = self.len();
646 if index >= len {
647 assert_failed(index, len);
648 }
649
650 unsafe {
651 let ptr = self.as_mut_ptr().add(index);
653 let ret = core::ptr::read(ptr);
656
657 core::ptr::copy(ptr.add(1), ptr, len - index - 1);
659
660 self.len -= 1;
661
662 ret
663 }
664 }
665
666 #[inline]
668 pub fn from_iter<I: IntoIterator<Item = T>>(align: usize, iter: I) -> Self {
669 Self::from_iter_impl(iter.into_iter(), align)
670 }
671
672 #[inline]
674 pub fn from_slice(align: usize, slice: &[T]) -> Self
675 where
676 T: Clone,
677 {
678 let len = slice.len();
679 let mut vec = AVec::with_capacity(align, len);
680 {
681 let len = &mut vec.len;
682 let ptr: *mut T = vec.buf.ptr.as_ptr();
683
684 for (i, item) in slice.iter().enumerate() {
685 unsafe { ptr.add(i).write(item.clone()) };
686 *len += 1;
687 }
688 }
689 vec
690 }
691
692 fn from_iter_impl<I: Iterator<Item = T>>(mut iter: I, align: usize) -> Self {
693 let (lower_bound, upper_bound) = iter.size_hint();
694 let mut this = Self::with_capacity(align, lower_bound);
695
696 if upper_bound == Some(lower_bound) {
697 let len = &mut this.len;
698 let ptr = this.buf.ptr.as_ptr();
699
700 let first_chunk = iter.take(lower_bound);
701 first_chunk.enumerate().for_each(|(i, item)| {
702 unsafe { ptr.add(i).write(item) };
703 *len += 1;
704 });
705 } else {
706 let len = &mut this.len;
707 let ptr = this.buf.ptr.as_ptr();
708
709 let first_chunk = (&mut iter).take(lower_bound);
710 first_chunk.enumerate().for_each(|(i, item)| {
711 unsafe { ptr.add(i).write(item) };
712 *len += 1;
713 });
714 iter.for_each(|item| {
715 this.push(item);
716 });
717 }
718
719 this
720 }
721
722 #[inline]
723 pub unsafe fn set_len(&mut self, new_len: usize) {
724 self.len = new_len;
725 }
726
727 pub fn append<OtherA: Alignment>(&mut self, other: &mut AVec<T, OtherA>) {
728 unsafe {
729 let len = self.len();
730 let count = other.len();
731 self.reserve(count);
732 core::ptr::copy_nonoverlapping(other.as_ptr(), self.as_mut_ptr().add(len), count);
733 self.len += count;
734 other.len = 0;
735 }
736 }
737
738 #[inline(always)]
739 #[doc(hidden)]
740 pub fn __from_elem(align: usize, elem: T, count: usize) -> Self
741 where
742 T: Clone,
743 {
744 Self::from_iter(align, core::iter::repeat(elem).take(count))
745 }
746
747 #[inline(always)]
748 #[doc(hidden)]
749 pub fn __copy_from_ptr(align: usize, src: *const T, len: usize) -> Self {
751 let mut v = Self::with_capacity(align, len);
752 let dst = v.as_mut_ptr();
753 unsafe { core::ptr::copy_nonoverlapping(src, dst, len) };
754 v.len = len;
755 v
756 }
757}
758
759impl<T: Clone, A: Alignment> AVec<T, A> {
760 pub fn resize(&mut self, new_len: usize, value: T) {
766 let len = self.len();
768
769 if new_len > len {
770 self.extend_with(new_len - len, value)
771 } else {
772 self.truncate(new_len);
773 }
774 }
775
776 fn extend_with(&mut self, n: usize, value: T) {
778 self.reserve(n);
780
781 unsafe {
782 let mut ptr = self.as_mut_ptr().add(self.len());
783
784 for _ in 1..n {
786 core::ptr::write(ptr, value.clone());
787 ptr = ptr.add(1);
788 self.len += 1;
790 }
791
792 if n > 0 {
793 core::ptr::write(ptr, value);
795 self.len += 1;
796 }
797 }
798 }
799
800 pub fn extend_from_slice(&mut self, other: &[T]) {
802 let count = other.len();
804 self.reserve(count);
805 let len = self.len();
806 unsafe {
807 core::ptr::copy_nonoverlapping(other.as_ptr(), self.as_mut_ptr().add(len), count)
808 };
809 self.len += count;
810 }
811}
812
813impl<T: Debug, A: Alignment> Debug for AVec<T, A> {
814 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
815 f.debug_list().entries(self.iter()).finish()
816 }
817}
818
819impl<T: Debug + ?Sized, A: Alignment> Debug for ABox<T, A> {
820 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
821 (&**self).fmt(f)
822 }
823}
824
825impl<T: Clone, A: Alignment> Clone for AVec<T, A> {
826 fn clone(&self) -> Self {
827 Self::from_slice(self.alignment(), self.deref())
828 }
829}
830
831impl<T: Clone, A: Alignment> Clone for ABox<T, A> {
832 fn clone(&self) -> Self {
833 ABox::new(self.align.alignment(align_of::<T>()), self.deref().clone())
834 }
835}
836
837impl<T: Clone, A: Alignment> Clone for ABox<[T], A> {
838 fn clone(&self) -> Self {
839 AVec::from_slice(self.align.alignment(align_of::<T>()), self.deref()).into_boxed_slice()
840 }
841}
842
843impl<T: PartialEq, A: Alignment> PartialEq for AVec<T, A> {
844 fn eq(&self, other: &Self) -> bool {
845 self.as_slice().eq(other.as_slice())
846 }
847}
848impl<T: Eq, A: Alignment> Eq for AVec<T, A> {}
849impl<T: PartialOrd, A: Alignment> PartialOrd for AVec<T, A> {
850 fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
851 self.as_slice().partial_cmp(other.as_slice())
852 }
853}
854impl<T: Ord, A: Alignment> Ord for AVec<T, A> {
855 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
856 self.as_slice().cmp(other.as_slice())
857 }
858}
859
860impl<T: PartialEq + ?Sized, A: Alignment> PartialEq for ABox<T, A> {
861 fn eq(&self, other: &Self) -> bool {
862 (&**self).eq(&**other)
863 }
864}
865impl<T: Eq + ?Sized, A: Alignment> Eq for ABox<T, A> {}
866impl<T: PartialOrd + ?Sized, A: Alignment> PartialOrd for ABox<T, A> {
867 fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
868 (&**self).partial_cmp(&**other)
869 }
870}
871impl<T: Ord + ?Sized, A: Alignment> Ord for ABox<T, A> {
872 fn cmp(&self, other: &Self) -> core::cmp::Ordering {
873 (&**self).cmp(&**other)
874 }
875}
876unsafe impl<T: Sync, A: Alignment + Sync> Sync for AVec<T, A> {}
877unsafe impl<T: Send, A: Alignment + Send> Send for AVec<T, A> {}
878unsafe impl<T: ?Sized + Sync, A: Alignment + Sync> Sync for ABox<T, A> {}
879unsafe impl<T: ?Sized + Send, A: Alignment + Send> Send for ABox<T, A> {}
880
881#[cfg(feature = "serde")]
882mod serde {
883 use super::*;
884 use ::serde::{Deserialize, Serialize};
885
886 #[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
887 impl<T: ?Sized + Serialize, A: Alignment> Serialize for ABox<T, A> {
888 #[inline]
889 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
890 where
891 S: ::serde::Serializer,
892 {
893 (&**self).serialize(serializer)
894 }
895 }
896
897 #[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
898 impl<T: Serialize, A: Alignment> Serialize for AVec<T, A> {
899 #[inline]
900 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
901 where
902 S: ::serde::Serializer,
903 {
904 (&**self).serialize(serializer)
905 }
906 }
907
908 #[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
909 impl<'de, T: Deserialize<'de>, const N: usize> Deserialize<'de> for ABox<T, ConstAlign<N>> {
910 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
911 where
912 D: ::serde::Deserializer<'de>,
913 {
914 Ok(ABox::<T, ConstAlign<N>>::new(
915 N,
916 T::deserialize(deserializer)?,
917 ))
918 }
919 }
920
921 #[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
922 impl<'de, T: Deserialize<'de>, const N: usize> Deserialize<'de> for ABox<[T], ConstAlign<N>> {
923 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
924 where
925 D: ::serde::Deserializer<'de>,
926 {
927 Ok(AVec::<T, ConstAlign<N>>::deserialize(deserializer)?.into_boxed_slice())
928 }
929 }
930
931 #[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
932 impl<'de, T: Deserialize<'de>, const N: usize> Deserialize<'de> for AVec<T, ConstAlign<N>> {
933 fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
934 where
935 D: ::serde::Deserializer<'de>,
936 {
937 struct AVecVisitor<T, const N: usize> {
938 _marker: PhantomData<fn() -> AVec<T, ConstAlign<N>>>,
939 }
940
941 impl<'de, T: Deserialize<'de>, const N: usize> ::serde::de::Visitor<'de> for AVecVisitor<T, N> {
942 type Value = AVec<T, ConstAlign<N>>;
943
944 fn expecting(&self, formatter: &mut core::fmt::Formatter) -> core::fmt::Result {
945 formatter.write_str("a sequence")
946 }
947
948 fn visit_seq<S>(self, mut seq: S) -> Result<Self::Value, S::Error>
949 where
950 S: ::serde::de::SeqAccess<'de>,
951 {
952 let mut vec =
953 AVec::<T, ConstAlign<N>>::with_capacity(N, cautious::<T>(seq.size_hint()));
954
955 while let Some(elem) = seq.next_element::<T>()? {
956 vec.push(elem)
957 }
958
959 Ok(vec)
960 }
961 }
962
963 deserializer.deserialize_seq(AVecVisitor {
964 _marker: PhantomData,
965 })
966 }
967 }
968
969 pub fn cautious<Element>(hint: Option<usize>) -> usize {
970 use core::{cmp, mem};
971
972 const MAX_PREALLOC_BYTES: usize = 1024 * 1024;
973
974 if mem::size_of::<Element>() == 0 {
975 0
976 } else {
977 cmp::min(
978 hint.unwrap_or(0),
979 MAX_PREALLOC_BYTES / mem::size_of::<Element>(),
980 )
981 }
982 }
983}
984
985#[macro_export]
987macro_rules! avec {
988 () => {
989 $crate::AVec::<_>::new(0)
990 };
991 ([$align: expr]| ) => {
992 $crate::AVec::<_, $crate::ConstAlign::<$align>>::new(0)
993 };
994 ([$align: expr]| $elem: expr; $count: expr) => {
995 $crate::AVec::<_, $crate::ConstAlign::<$align>>::__from_elem(0, $elem, $count)
996 };
997 ([$align: expr]| $($elem: expr),*) => {
998 {
999 let __data = &::core::mem::ManuallyDrop::new([$($elem,)*]);
1000 let __len = __data.len();
1001 let __ptr = __data.as_ptr();
1002 let mut __aligned_vec = $crate::AVec::<_, $crate::ConstAlign::<$align>>::__copy_from_ptr(0, __ptr, __len);
1003 __aligned_vec
1004 }
1005 };
1006 ($elem: expr; $count: expr) => {
1007 $crate::AVec::<_>::__from_elem(0, $elem, $count)
1008 };
1009 ($($elem: expr),*) => {
1010 {
1011 let __data = &::core::mem::ManuallyDrop::new([$($elem,)*]);
1012 let __len = __data.len();
1013 let __ptr = __data.as_ptr();
1014 let mut __aligned_vec = $crate::AVec::<_>::__copy_from_ptr(0, __ptr, __len);
1015 __aligned_vec
1016 }
1017 };
1018}
1019
1020#[macro_export]
1022macro_rules! avec_rt {
1023 ([$align: expr]$(|)?) => {
1024 $crate::AVec::<_, $crate::RuntimeAlign>::new($align)
1025 };
1026 ([$align: expr]| $elem: expr; $count: expr) => {
1027 $crate::AVec::<_, $crate::RuntimeAlign>::__from_elem($align, $elem, $count)
1028 };
1029 ([$align: expr]| $($elem: expr),*) => {
1030 {
1031 let __data = &::core::mem::ManuallyDrop::new([$($elem,)*]);
1032 let __len = __data.len();
1033 let __ptr = __data.as_ptr();
1034 let mut __aligned_vec = $crate::AVec::<_>::__copy_from_ptr($align, __ptr, __len);
1035 __aligned_vec
1036 }
1037 };
1038}
1039
1040#[cfg(test)]
1041mod tests {
1042 use super::*;
1043 use alloc::vec;
1044 use core::iter::repeat;
1045 use equator::assert;
1046
1047 #[test]
1048 fn new() {
1049 let v = AVec::<i32>::new(32);
1050 assert_eq!(v.len(), 0);
1051 assert_eq!(v.capacity(), 0);
1052 assert_eq!(v.alignment(), CACHELINE_ALIGN);
1053 assert_eq!(v.as_ptr().align_offset(CACHELINE_ALIGN), 0);
1054 let v = AVec::<()>::new(32);
1055 assert_eq!(v.len(), 0);
1056 assert_eq!(v.capacity(), usize::MAX);
1057 assert_eq!(v.alignment(), CACHELINE_ALIGN);
1058 assert_eq!(v.as_ptr().align_offset(CACHELINE_ALIGN), 0);
1059
1060 #[repr(align(4096))]
1061 struct OverAligned;
1062 let v = AVec::<OverAligned>::new(32);
1063 assert_eq!(v.len(), 0);
1064 assert_eq!(v.capacity(), usize::MAX);
1065 assert_eq!(v.alignment(), 4096);
1066 assert_eq!(v.as_ptr().align_offset(CACHELINE_ALIGN), 0);
1067 assert_eq!(v.as_ptr().align_offset(4096), 0);
1068 }
1069
1070 #[test]
1071 fn collect() {
1072 let v = AVec::<_>::from_iter(64, 0..4);
1073 assert_eq!(&*v, &[0, 1, 2, 3]);
1074 let v = AVec::<_>::from_iter(64, repeat(()).take(4));
1075 assert_eq!(&*v, &[(), (), (), ()]);
1076 }
1077
1078 #[test]
1079 fn push() {
1080 let mut v = AVec::<i32>::new(16);
1081 v.push(0);
1082 v.push(1);
1083 v.push(2);
1084 v.push(3);
1085 assert_eq!(&*v, &[0, 1, 2, 3]);
1086
1087 let mut v = AVec::<_>::from_iter(64, 0..4);
1088 v.push(4);
1089 v.push(5);
1090 v.push(6);
1091 v.push(7);
1092 assert_eq!(&*v, &[0, 1, 2, 3, 4, 5, 6, 7]);
1093
1094 let mut v = AVec::<_>::from_iter(64, repeat(()).take(4));
1095 v.push(());
1096 v.push(());
1097 v.push(());
1098 v.push(());
1099 assert_eq!(&*v, &[(), (), (), (), (), (), (), ()]);
1100 }
1101
1102 #[test]
1103 fn insert() {
1104 let mut v = AVec::<i32>::new(16);
1105 v.insert(0, 1);
1106 v.insert(1, 3);
1107 v.insert(1, 2);
1108 v.insert(0, 0);
1109 assert_eq!(&*v, &[0, 1, 2, 3]);
1110
1111 let mut v = AVec::<_>::from_iter(64, 0..4);
1112 v.insert(0, -1);
1113 v.insert(5, 5);
1114 v.insert(5, 4);
1115 v.insert(1, 0);
1116 v.insert(2, 0);
1117 assert_eq!(&*v, &[-1, 0, 0, 0, 1, 2, 3, 4, 5]);
1118
1119 let mut v = AVec::<_>::from_iter(64, repeat(()).take(4));
1120 v.insert(3, ());
1121 v.insert(0, ());
1122 v.insert(2, ());
1123 v.insert(7, ());
1124 assert_eq!(&*v, &[(), (), (), (), (), (), (), ()]);
1125 }
1126
1127 #[test]
1128 fn pop() {
1129 let mut v = AVec::<i32>::new(16);
1130 v.push(0);
1131 v.push(1);
1132 v.push(2);
1133 v.push(3);
1134 assert_eq!(v.pop(), Some(3));
1135 assert_eq!(v.pop(), Some(2));
1136 assert_eq!(v.pop(), Some(1));
1137 assert_eq!(v.pop(), Some(0));
1138 assert_eq!(v.pop(), None);
1139 assert_eq!(v.pop(), None);
1140 assert_eq!(&*v, &[]);
1141 assert!(v.is_empty());
1142
1143 let mut v = AVec::<()>::new(16);
1144 v.push(());
1145 v.push(());
1146 v.push(());
1147 v.push(());
1148 assert_eq!(v.pop(), Some(()));
1149 assert_eq!(v.pop(), Some(()));
1150 assert_eq!(v.pop(), Some(()));
1151 assert_eq!(v.pop(), Some(()));
1152 assert_eq!(v.pop(), None);
1153 assert_eq!(v.pop(), None);
1154 assert_eq!(&*v, &[]);
1155 assert!(v.is_empty());
1156 }
1157
1158 #[test]
1159 fn remove() {
1160 let mut v = AVec::<i32>::new(16);
1161 v.push(0);
1162 v.push(1);
1163 v.push(2);
1164 v.push(3);
1165 assert_eq!(v.remove(2), 2);
1166 assert_eq!(v.remove(2), 3);
1167 assert_eq!(v.remove(0), 0);
1168 assert_eq!(v.remove(0), 1);
1169 assert_eq!(&*v, &[]);
1170 assert!(v.is_empty());
1171
1172 let mut v = AVec::<()>::new(16);
1173 v.push(());
1174 v.push(());
1175 v.push(());
1176 v.push(());
1177 assert_eq!(v.remove(0), ());
1178 assert_eq!(v.remove(0), ());
1179 assert_eq!(v.remove(0), ());
1180 assert_eq!(v.remove(0), ());
1181 assert_eq!(&*v, &[]);
1182 assert!(v.is_empty());
1183 }
1184
1185 #[test]
1186 fn shrink() {
1187 let mut v = AVec::<i32>::with_capacity(16, 10);
1188 v.push(0);
1189 v.push(1);
1190 v.push(2);
1191
1192 assert_eq!(v.capacity(), 10);
1193 v.shrink_to_fit();
1194 assert_eq!(v.len(), 3);
1195 assert_eq!(v.capacity(), 3);
1196
1197 let mut v = AVec::<i32>::with_capacity(16, 10);
1198 v.push(0);
1199 v.push(1);
1200 v.push(2);
1201
1202 assert_eq!(v.capacity(), 10);
1203 v.shrink_to(0);
1204 assert_eq!(v.len(), 3);
1205 assert_eq!(v.capacity(), 3);
1206 }
1207
1208 #[test]
1209 fn truncate() {
1210 let mut v = AVec::<i32>::new(16);
1211 v.push(0);
1212 v.push(1);
1213 v.push(2);
1214
1215 v.truncate(1);
1216 assert_eq!(v.len(), 1);
1217 assert_eq!(&*v, &[0]);
1218
1219 v.clear();
1220 assert_eq!(v.len(), 0);
1221 assert_eq!(&*v, &[]);
1222
1223 let mut v = AVec::<()>::new(16);
1224 v.push(());
1225 v.push(());
1226 v.push(());
1227
1228 v.truncate(1);
1229 assert_eq!(v.len(), 1);
1230 assert_eq!(&*v, &[()]);
1231
1232 v.clear();
1233 assert_eq!(v.len(), 0);
1234 assert_eq!(&*v, &[]);
1235 }
1236
1237 #[test]
1238 fn extend_from_slice() {
1239 let mut v = AVec::<i32>::new(16);
1240 v.extend_from_slice(&[0, 1, 2, 3]);
1241 v.extend_from_slice(&[4, 5, 6, 7, 8]);
1242 assert_eq!(&*v, &[0, 1, 2, 3, 4, 5, 6, 7, 8]);
1243
1244 let mut v = AVec::<()>::new(16);
1245 v.extend_from_slice(&[(), (), (), ()]);
1246 v.extend_from_slice(&[(), (), ()]);
1247 assert_eq!(&*v, &[(), (), (), (), (), (), ()]);
1248 }
1249
1250 #[test]
1251 fn resize() {
1252 let mut v = AVec::<i32>::new(16);
1253 v.push(0);
1254 v.push(1);
1255 v.push(2);
1256
1257 v.resize(1, 10);
1258 assert_eq!(v.len(), 1);
1259 assert_eq!(&*v, &[0]);
1260
1261 v.resize(3, 20);
1262 assert_eq!(v.len(), 3);
1263 assert_eq!(&*v, &[0, 20, 20]);
1264
1265 let mut v = AVec::<()>::new(16);
1266 v.push(());
1267 v.push(());
1268 v.push(());
1269
1270 v.resize(2, ());
1271 assert_eq!(v.len(), 2);
1272 assert_eq!(&*v, &[(), ()]);
1273
1274 v.resize(3, ());
1275 assert_eq!(v.len(), 3);
1276 assert_eq!(&*v, &[(), (), ()]);
1277 }
1278
1279 #[test]
1280 fn into_boxed_slice() {
1281 let mut v = AVec::<i32>::new(16);
1282 v.push(0);
1283 v.push(1);
1284 v.push(2);
1285
1286 let boxed = v.into_boxed_slice();
1287 assert_eq!(&*boxed, &[0, 1, 2]);
1288 }
1289
1290 #[test]
1291 fn box_new() {
1292 let boxed = ABox::<_>::new(64, 3);
1293 assert_eq!(&*boxed, &3);
1294 }
1295
1296 #[test]
1297 fn box_clone() {
1298 let boxed = ABox::<_>::new(64, 3);
1299 assert_eq!(boxed, boxed.clone());
1300 }
1301
1302 #[test]
1303 fn box_slice_clone() {
1304 let boxed = AVec::<_>::from_iter(64, 0..123).into_boxed_slice();
1305 assert_eq!(boxed, boxed.clone());
1306 }
1307
1308 #[test]
1309 fn macros() {
1310 let u: AVec<()> = avec![];
1311 assert_eq!(u.len(), 0);
1312 assert_eq!(u.as_ptr().align_offset(CACHELINE_ALIGN), 0);
1313
1314 let v = avec![0; 4];
1315 assert_eq!(v.len(), 4);
1316 assert_eq!(v.as_ptr().align_offset(CACHELINE_ALIGN), 0);
1317
1318 let mut w = avec![vec![0, 1], vec![3, 4], vec![5, 6], vec![7, 8]];
1319 w[0].push(2);
1320 w[3].pop();
1321 assert_eq!(w.len(), 4);
1322 assert_eq!(w.as_ptr().align_offset(CACHELINE_ALIGN), 0);
1323 assert_eq!(w[0], vec![0, 1, 2]);
1324 assert_eq!(w[1], vec![3, 4]);
1325 assert_eq!(w[2], vec![5, 6]);
1326 assert_eq!(w[3], vec![7]);
1327 }
1328
1329 #[test]
1330 fn macros_2() {
1331 let u: AVec<(), _> = avec![[4096]| ];
1332 assert_eq!(u.len(), 0);
1333 assert_eq!(u.as_ptr().align_offset(4096), 0);
1334
1335 let v = avec![[4096]| 0; 4];
1336 assert_eq!(v.len(), 4);
1337 assert_eq!(v.as_ptr().align_offset(4096), 0);
1338
1339 let mut w = avec![[4096] | vec![0, 1], vec![3, 4], vec![5, 6], vec![7, 8]];
1340 w[0].push(2);
1341 w[3].pop();
1342 assert_eq!(w.len(), 4);
1343 assert_eq!(w.as_ptr().align_offset(4096), 0);
1344 assert_eq!(w[0], vec![0, 1, 2]);
1345 assert_eq!(w[1], vec![3, 4]);
1346 assert_eq!(w[2], vec![5, 6]);
1347 assert_eq!(w[3], vec![7]);
1348 }
1349
1350 #[test]
1351 fn macros_rt() {
1352 let u: AVec<(), _> = avec_rt![[32]];
1353 assert_eq!(u.len(), 0);
1354 assert_eq!(u.as_ptr().align_offset(32), 0);
1355
1356 let v = avec_rt![[32]| 0; 4];
1357 assert_eq!(v.len(), 4);
1358 assert_eq!(v.as_ptr().align_offset(32), 0);
1359
1360 let mut w = avec_rt![[64] | vec![0, 1], vec![3, 4], vec![5, 6], vec![7, 8]];
1361 w[0].push(2);
1362 w[3].pop();
1363 assert_eq!(w.len(), 4);
1364 assert_eq!(w.as_ptr().align_offset(64), 0);
1365 assert_eq!(w[0], vec![0, 1, 2]);
1366 assert_eq!(w[1], vec![3, 4]);
1367 assert_eq!(w[2], vec![5, 6]);
1368 assert_eq!(w[3], vec![7]);
1369 }
1370}
1371
1372#[cfg(all(test, feature = "serde"))]
1373mod serde_tests {
1374 use super::*;
1375
1376 use ::serde::Deserialize;
1377 use bincode::{DefaultOptions, Deserializer, Options};
1378
1379 #[test]
1380 fn can_limit_deserialization_size() {
1381 let ser = vec![
1383 253, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 1, 1, 1, 1, 1, 1, 1, 1,
1384 ];
1385
1386 let options = DefaultOptions::new().with_limit(12);
1387
1388 let mut deserializer = Deserializer::from_slice(&ser, options);
1389 let result = <AVec<u32> as Deserialize>::deserialize(&mut deserializer);
1390
1391 let err = match result {
1392 Ok(_) => panic!("Expected a failure"),
1393 Err(e) => e,
1394 };
1395
1396 match *err {
1397 bincode::ErrorKind::SizeLimit => {}
1398 _ => panic!("Expected ErrorKind::SizeLimit, got {err:#?}"),
1399 };
1400 }
1401}