1use core::borrow::{Borrow, BorrowMut};
10use core::cmp::Ordering;
11use core::fmt::{Debug, Error, Formatter};
12use core::hash::{Hash, Hasher};
13use core::iter::FromIterator;
14use core::marker::PhantomData;
15use core::mem::{self, MaybeUninit};
16use core::ops::{Deref, DerefMut};
17use core::ptr;
18use core::ptr::NonNull;
19use core::slice::{from_raw_parts, from_raw_parts_mut, Iter as SliceIter, IterMut as SliceIterMut};
20
21mod iter;
22pub use self::iter::{Drain, Iter};
23
24#[repr(C)]
77pub struct InlineArray<A, T> {
78 _header_align: [(u64, usize); 0],
108 _phantom: PhantomData<A>,
109 data: MaybeUninit<T>,
110}
111
112const fn capacity(
113 host_size: usize,
114 header_size: usize,
115 element_size: usize,
116 element_align: usize,
117 container_align: usize,
118) -> usize {
119 if element_size == 0 {
120 usize::MAX
121 } else if element_align <= container_align && host_size > header_size {
122 (host_size - header_size) / element_size
123 } else {
124 0 }
126}
127
128impl<A, T> InlineArray<A, T> {
129 const HOST_SIZE: usize = mem::size_of::<T>();
130 const ELEMENT_SIZE: usize = mem::size_of::<A>();
131 const HEADER_SIZE: usize = mem::size_of::<usize>();
132 const HEADER_FIRST: bool = mem::align_of::<usize>() >= mem::align_of::<A>();
134 const ELEMENT_SKIP: usize = Self::HEADER_FIRST as usize;
137 const HEADER_SKIP: usize = Self::CAPACITY * (1 - Self::ELEMENT_SKIP);
139
140 pub const CAPACITY: usize = capacity(
142 Self::HOST_SIZE,
143 Self::HEADER_SIZE,
144 Self::ELEMENT_SIZE,
145 mem::align_of::<A>(),
146 mem::align_of::<Self>(),
147 );
148
149 #[inline]
150 #[must_use]
151 unsafe fn len_const(&self) -> *const usize {
152 let ptr = self
153 .data
154 .as_ptr()
155 .cast::<A>()
156 .add(Self::HEADER_SKIP)
157 .cast::<usize>();
158 debug_assert!(ptr as usize % mem::align_of::<usize>() == 0);
159 ptr
160 }
161
162 #[inline]
163 #[must_use]
164 pub(crate) unsafe fn len_mut(&mut self) -> *mut usize {
165 let ptr = self
166 .data
167 .as_mut_ptr()
168 .cast::<A>()
169 .add(Self::HEADER_SKIP)
170 .cast::<usize>();
171 debug_assert!(ptr as usize % mem::align_of::<usize>() == 0);
172 ptr
173 }
174
175 #[inline]
176 #[must_use]
177 pub(crate) unsafe fn data(&self) -> *const A {
178 if Self::CAPACITY == 0 {
179 return NonNull::<A>::dangling().as_ptr();
180 }
181 let ptr = self
182 .data
183 .as_ptr()
184 .cast::<usize>()
185 .add(Self::ELEMENT_SKIP)
186 .cast::<A>();
187 debug_assert!(ptr as usize % mem::align_of::<A>() == 0);
188 ptr
189 }
190
191 #[inline]
192 #[must_use]
193 unsafe fn data_mut(&mut self) -> *mut A {
194 if Self::CAPACITY == 0 {
195 return NonNull::<A>::dangling().as_ptr();
196 }
197 let ptr = self
198 .data
199 .as_mut_ptr()
200 .cast::<usize>()
201 .add(Self::ELEMENT_SKIP)
202 .cast::<A>();
203 debug_assert!(ptr as usize % mem::align_of::<A>() == 0);
204 ptr
205 }
206
207 #[inline]
208 #[must_use]
209 unsafe fn ptr_at(&self, index: usize) -> *const A {
210 debug_assert!(index < Self::CAPACITY);
211 self.data().add(index)
212 }
213
214 #[inline]
215 #[must_use]
216 unsafe fn ptr_at_mut(&mut self, index: usize) -> *mut A {
217 debug_assert!(index < Self::CAPACITY);
218 self.data_mut().add(index)
219 }
220
221 #[inline]
222 unsafe fn read_at(&self, index: usize) -> A {
223 ptr::read(self.ptr_at(index))
224 }
225
226 #[inline]
227 unsafe fn write_at(&mut self, index: usize, value: A) {
228 ptr::write(self.ptr_at_mut(index), value);
229 }
230
231 #[inline]
233 #[must_use]
234 pub fn len(&self) -> usize {
235 unsafe { *self.len_const() }
236 }
237
238 #[inline]
240 #[must_use]
241 pub fn is_empty(&self) -> bool {
242 self.len() == 0
243 }
244
245 #[inline]
247 #[must_use]
248 pub fn is_full(&self) -> bool {
249 self.len() >= Self::CAPACITY
250 }
251
252 #[inline]
259 #[must_use]
260 pub fn new() -> Self {
261 assert!(Self::HOST_SIZE > Self::HEADER_SIZE);
262 assert!(
263 (Self::CAPACITY == 0) || (mem::align_of::<Self>() % mem::align_of::<A>() == 0),
264 "InlineArray can't satisfy alignment of {}",
265 core::any::type_name::<A>()
266 );
267
268 let mut self_ = Self {
269 _header_align: [],
270 _phantom: PhantomData,
271 data: MaybeUninit::uninit(),
272 };
273 assert_eq!(
276 &self_ as *const _ as usize,
277 self_.data.as_ptr() as usize,
278 "Padding at the start of struct",
279 );
280 assert_eq!(
281 self_.data.as_ptr() as usize % mem::align_of::<usize>(),
282 0,
283 "Unaligned header"
284 );
285 assert!(mem::size_of::<Self>() == mem::size_of::<T>() || mem::align_of::<T>() < mem::align_of::<Self>());
286 assert_eq!(0, unsafe { self_.data() } as usize % mem::align_of::<A>());
287 assert_eq!(0, unsafe { self_.data_mut() } as usize % mem::align_of::<A>());
288 assert!(Self::ELEMENT_SKIP == 0 || Self::HEADER_SKIP == 0);
289 unsafe { ptr::write(self_.len_mut(), 0usize) };
290 self_
291 }
292
293 pub fn push(&mut self, value: A) {
299 if self.is_full() {
300 panic!("InlineArray::push: chunk size overflow");
301 }
302 unsafe {
303 self.write_at(self.len(), value);
304 *self.len_mut() += 1;
305 }
306 }
307
308 pub fn pop(&mut self) -> Option<A> {
314 if self.is_empty() {
315 None
316 } else {
317 unsafe {
318 *self.len_mut() -= 1;
319 }
320 Some(unsafe { self.read_at(self.len()) })
321 }
322 }
323
324 pub fn insert(&mut self, index: usize, value: A) {
331 if self.is_full() {
332 panic!("InlineArray::push: chunk size overflow");
333 }
334 if index > self.len() {
335 panic!("InlineArray::insert: index out of bounds");
336 }
337 unsafe {
338 let src = self.ptr_at_mut(index);
339 ptr::copy(src, src.add(1), self.len() - index);
340 ptr::write(src, value);
341 *self.len_mut() += 1;
342 }
343 }
344
345 pub fn remove(&mut self, index: usize) -> Option<A> {
353 if index >= self.len() {
354 None
355 } else {
356 unsafe {
357 let src = self.ptr_at_mut(index);
358 let value = ptr::read(src);
359 *self.len_mut() -= 1;
360 ptr::copy(src.add(1), src, self.len() - index);
361 Some(value)
362 }
363 }
364 }
365
366 pub fn split_off(&mut self, index: usize) -> Self {
374 if index > self.len() {
375 panic!("InlineArray::split_off: index out of bounds");
376 }
377 let mut out = Self::new();
378 if index < self.len() {
379 unsafe {
380 ptr::copy(self.ptr_at(index), out.data_mut(), self.len() - index);
381 *out.len_mut() = self.len() - index;
382 *self.len_mut() = index;
383 }
384 }
385 out
386 }
387
388 #[inline]
389 unsafe fn drop_contents(&mut self) {
390 ptr::drop_in_place::<[A]>(&mut **self) }
392
393 pub fn clear(&mut self) {
397 unsafe {
398 self.drop_contents();
399 *self.len_mut() = 0;
400 }
401 }
402
403 pub fn drain(&mut self) -> Drain<'_, A, T> {
405 Drain { array: self }
406 }
407}
408
409impl<A, T> Drop for InlineArray<A, T> {
410 fn drop(&mut self) {
411 unsafe { self.drop_contents() }
412 }
413}
414
415impl<A, T> Default for InlineArray<A, T> {
416 fn default() -> Self {
417 Self::new()
418 }
419}
420
421impl<A, T> Clone for InlineArray<A, T>
425where
426 A: Clone,
427{
428 fn clone(&self) -> Self {
429 let mut copy = Self::new();
430 for i in 0..self.len() {
431 unsafe {
432 copy.write_at(i, self.get_unchecked(i).clone());
433 }
434 }
435 unsafe {
436 *copy.len_mut() = self.len();
437 }
438 copy
439 }
440}
441
442impl<A, T> Deref for InlineArray<A, T> {
443 type Target = [A];
444 fn deref(&self) -> &Self::Target {
445 unsafe { from_raw_parts(self.data(), self.len()) }
446 }
447}
448
449impl<A, T> DerefMut for InlineArray<A, T> {
450 fn deref_mut(&mut self) -> &mut Self::Target {
451 unsafe { from_raw_parts_mut(self.data_mut(), self.len()) }
452 }
453}
454
455impl<A, T> Borrow<[A]> for InlineArray<A, T> {
456 fn borrow(&self) -> &[A] {
457 self.deref()
458 }
459}
460
461impl<A, T> BorrowMut<[A]> for InlineArray<A, T> {
462 fn borrow_mut(&mut self) -> &mut [A] {
463 self.deref_mut()
464 }
465}
466
467impl<A, T> AsRef<[A]> for InlineArray<A, T> {
468 fn as_ref(&self) -> &[A] {
469 self.deref()
470 }
471}
472
473impl<A, T> AsMut<[A]> for InlineArray<A, T> {
474 fn as_mut(&mut self) -> &mut [A] {
475 self.deref_mut()
476 }
477}
478impl<A, T, Slice> PartialEq<Slice> for InlineArray<A, T>
479where
480 Slice: Borrow<[A]>,
481 A: PartialEq,
482{
483 fn eq(&self, other: &Slice) -> bool {
484 self.deref() == other.borrow()
485 }
486}
487
488impl<A, T> Eq for InlineArray<A, T> where A: Eq {}
489
490impl<A, T> PartialOrd for InlineArray<A, T>
491where
492 A: PartialOrd,
493{
494 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
495 self.iter().partial_cmp(other.iter())
496 }
497}
498
499impl<A, T> Ord for InlineArray<A, T>
500where
501 A: Ord,
502{
503 fn cmp(&self, other: &Self) -> Ordering {
504 self.iter().cmp(other.iter())
505 }
506}
507
508impl<A, T> Debug for InlineArray<A, T>
509where
510 A: Debug,
511{
512 fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
513 f.write_str("Chunk")?;
514 f.debug_list().entries(self.iter()).finish()
515 }
516}
517
518impl<A, T> Hash for InlineArray<A, T>
519where
520 A: Hash,
521{
522 fn hash<H>(&self, hasher: &mut H)
523 where
524 H: Hasher,
525 {
526 for item in self {
527 item.hash(hasher)
528 }
529 }
530}
531
532impl<A, T> IntoIterator for InlineArray<A, T> {
533 type Item = A;
534 type IntoIter = Iter<A, T>;
535 fn into_iter(self) -> Self::IntoIter {
536 Iter { array: self }
537 }
538}
539
540impl<A, T> FromIterator<A> for InlineArray<A, T> {
541 fn from_iter<I>(it: I) -> Self
542 where
543 I: IntoIterator<Item = A>,
544 {
545 let mut chunk = Self::new();
546 for item in it {
547 chunk.push(item);
548 }
549 chunk
550 }
551}
552
553impl<'a, A, T> IntoIterator for &'a InlineArray<A, T> {
554 type Item = &'a A;
555 type IntoIter = SliceIter<'a, A>;
556 fn into_iter(self) -> Self::IntoIter {
557 self.iter()
558 }
559}
560
561impl<'a, A, T> IntoIterator for &'a mut InlineArray<A, T> {
562 type Item = &'a mut A;
563 type IntoIter = SliceIterMut<'a, A>;
564 fn into_iter(self) -> Self::IntoIter {
565 self.iter_mut()
566 }
567}
568
569impl<A, T> Extend<A> for InlineArray<A, T> {
570 fn extend<I>(&mut self, it: I)
576 where
577 I: IntoIterator<Item = A>,
578 {
579 for item in it {
580 self.push(item);
581 }
582 }
583}
584
585impl<'a, A, T> Extend<&'a A> for InlineArray<A, T>
586where
587 A: 'a + Copy,
588{
589 fn extend<I>(&mut self, it: I)
595 where
596 I: IntoIterator<Item = &'a A>,
597 {
598 for item in it {
599 self.push(*item);
600 }
601 }
602}
603
604#[cfg(test)]
605mod test {
606 use super::*;
607 use crate::tests::DropTest;
608 use std::sync::atomic::{AtomicUsize, Ordering};
609
610 #[test]
611 fn dropping() {
612 let counter = AtomicUsize::new(0);
613 {
614 let mut chunk: InlineArray<DropTest<'_>, [usize; 32]> = InlineArray::new();
615 for _i in 0..16 {
616 chunk.push(DropTest::new(&counter));
617 }
618 assert_eq!(16, counter.load(Ordering::Relaxed));
619 for _i in 0..8 {
620 chunk.pop();
621 }
622 assert_eq!(8, counter.load(Ordering::Relaxed));
623 }
624 assert_eq!(0, counter.load(Ordering::Relaxed));
625 }
626
627 #[test]
628 fn zero_sized_values() {
629 let mut chunk: InlineArray<(), [usize; 32]> = InlineArray::new();
630 for _i in 0..65536 {
631 chunk.push(());
632 }
633 assert_eq!(65536, chunk.len());
634 assert_eq!(Some(()), chunk.pop());
635 }
636
637 #[test]
638 fn low_align_base() {
639 let mut chunk: InlineArray<String, [u8; 512]> = InlineArray::new();
640 chunk.push("Hello".to_owned());
641 assert_eq!(chunk[0], "Hello");
642
643 let mut chunk: InlineArray<String, [u16; 512]> = InlineArray::new();
644 chunk.push("Hello".to_owned());
645 assert_eq!(chunk[0], "Hello");
646 }
647
648 #[test]
649 fn float_align() {
650 let mut chunk: InlineArray<f64, [u8; 16]> = InlineArray::new();
651 chunk.push(1234.);
652 assert_eq!(chunk[0], 1234.);
653
654 let mut chunk: InlineArray<f64, [u8; 17]> = InlineArray::new();
655 chunk.push(1234.);
656 assert_eq!(chunk[0], 1234.);
657 }
658
659 #[test]
660 fn recursive_types_compile() {
661 #[allow(dead_code)]
662 enum Recursive {
663 A(InlineArray<Recursive, u64>),
664 B,
665 }
666 }
667
668 #[test]
669 fn insufficient_alignment1() {
670 #[repr(align(256))]
671 struct BigAlign(u8);
672 #[repr(align(32))]
673 struct MediumAlign(u8);
674
675 assert_eq!(0, InlineArray::<BigAlign, [usize; 256]>::CAPACITY);
676 assert_eq!(0, InlineArray::<BigAlign, [u64; 256]>::CAPACITY);
677 assert_eq!(0, InlineArray::<BigAlign, [f64; 256]>::CAPACITY);
678 assert_eq!(0, InlineArray::<BigAlign, [MediumAlign; 256]>::CAPACITY);
679 }
680
681 #[test]
682 fn insufficient_alignment2() {
683 #[repr(align(256))]
684 struct BigAlign(usize);
685
686 let mut bad: InlineArray<BigAlign, [usize; 256]> = InlineArray::new();
687 assert_eq!(0, InlineArray::<BigAlign, [usize; 256]>::CAPACITY);
688 assert_eq!(0, bad.len());
689 assert_eq!(0, bad[..].len());
690 assert_eq!(true, bad.is_full());
691 assert_eq!(0, bad.drain().count());
692 assert!(bad.pop().is_none());
693 assert!(bad.remove(0).is_none());
694 assert!(bad.split_off(0).is_full());
695 bad.clear();
696 }
697
698 #[test]
699 fn sufficient_alignment1() {
700 #[repr(align(256))]
701 struct BigAlign(u8);
702
703 assert_eq!(13, InlineArray::<BigAlign, [BigAlign; 14]>::CAPACITY);
704 assert_eq!(1, InlineArray::<BigAlign, [BigAlign; 2]>::CAPACITY);
705 assert_eq!(0, InlineArray::<BigAlign, [BigAlign; 1]>::CAPACITY);
706
707 let mut chunk: InlineArray<BigAlign, [BigAlign; 2]> = InlineArray::new();
708 chunk.push(BigAlign(42));
709 assert_eq!(
710 chunk.get(0).unwrap() as *const _ as usize % mem::align_of::<BigAlign>(),
711 0
712 );
713 }
714
715 #[test]
716 fn sufficient_alignment2() {
717 #[repr(align(128))]
718 struct BigAlign([u8; 64]);
719 #[repr(align(256))]
720 struct BiggerAlign(u8);
721 assert_eq!(128, mem::align_of::<BigAlign>());
722 assert_eq!(256, mem::align_of::<BiggerAlign>());
723
724 assert_eq!(199, InlineArray::<BigAlign, [BiggerAlign; 100]>::CAPACITY);
725 assert_eq!(3, InlineArray::<BigAlign, [BiggerAlign; 2]>::CAPACITY);
726 assert_eq!(1, InlineArray::<BigAlign, [BiggerAlign; 1]>::CAPACITY);
727 assert_eq!(0, InlineArray::<BigAlign, [BiggerAlign; 0]>::CAPACITY);
728
729 let mut chunk: InlineArray<BigAlign, [BiggerAlign; 1]> = InlineArray::new();
730 chunk.push(BigAlign([0; 64]));
731 assert_eq!(
732 chunk.get(0).unwrap() as *const _ as usize % mem::align_of::<BigAlign>(),
733 0
734 );
735 }
736}