use core::borrow::{Borrow, BorrowMut};
use core::cmp::Ordering;
use core::fmt::{Debug, Error, Formatter};
use core::hash::{Hash, Hasher};
use core::iter::FromIterator;
use core::marker::PhantomData;
use core::mem::{self, MaybeUninit};
use core::ops::{Deref, DerefMut};
use core::ptr;
use core::ptr::NonNull;
use core::slice::{from_raw_parts, from_raw_parts_mut, Iter as SliceIter, IterMut as SliceIterMut};
mod iter;
pub use self::iter::{Drain, Iter};
#[repr(C)]
pub struct InlineArray<A, T> {
_header_align: [(u64, usize); 0],
_phantom: PhantomData<A>,
data: MaybeUninit<T>,
}
const fn capacity(
host_size: usize,
header_size: usize,
element_size: usize,
element_align: usize,
container_align: usize,
) -> usize {
if element_size == 0 {
usize::MAX
} else if element_align <= container_align && host_size > header_size {
(host_size - header_size) / element_size
} else {
0
}
}
impl<A, T> InlineArray<A, T> {
const HOST_SIZE: usize = mem::size_of::<T>();
const ELEMENT_SIZE: usize = mem::size_of::<A>();
const HEADER_SIZE: usize = mem::size_of::<usize>();
const HEADER_FIRST: bool = mem::align_of::<usize>() >= mem::align_of::<A>();
const ELEMENT_SKIP: usize = Self::HEADER_FIRST as usize;
const HEADER_SKIP: usize = Self::CAPACITY * (1 - Self::ELEMENT_SKIP);
pub const CAPACITY: usize = capacity(
Self::HOST_SIZE,
Self::HEADER_SIZE,
Self::ELEMENT_SIZE,
mem::align_of::<A>(),
mem::align_of::<Self>(),
);
#[inline]
#[must_use]
unsafe fn len_const(&self) -> *const usize {
let ptr = self
.data
.as_ptr()
.cast::<A>()
.add(Self::HEADER_SKIP)
.cast::<usize>();
debug_assert!(ptr as usize % mem::align_of::<usize>() == 0);
ptr
}
#[inline]
#[must_use]
pub(crate) unsafe fn len_mut(&mut self) -> *mut usize {
let ptr = self
.data
.as_mut_ptr()
.cast::<A>()
.add(Self::HEADER_SKIP)
.cast::<usize>();
debug_assert!(ptr as usize % mem::align_of::<usize>() == 0);
ptr
}
#[inline]
#[must_use]
pub(crate) unsafe fn data(&self) -> *const A {
if Self::CAPACITY == 0 {
return NonNull::<A>::dangling().as_ptr();
}
let ptr = self
.data
.as_ptr()
.cast::<usize>()
.add(Self::ELEMENT_SKIP)
.cast::<A>();
debug_assert!(ptr as usize % mem::align_of::<A>() == 0);
ptr
}
#[inline]
#[must_use]
unsafe fn data_mut(&mut self) -> *mut A {
if Self::CAPACITY == 0 {
return NonNull::<A>::dangling().as_ptr();
}
let ptr = self
.data
.as_mut_ptr()
.cast::<usize>()
.add(Self::ELEMENT_SKIP)
.cast::<A>();
debug_assert!(ptr as usize % mem::align_of::<A>() == 0);
ptr
}
#[inline]
#[must_use]
unsafe fn ptr_at(&self, index: usize) -> *const A {
debug_assert!(index < Self::CAPACITY);
self.data().add(index)
}
#[inline]
#[must_use]
unsafe fn ptr_at_mut(&mut self, index: usize) -> *mut A {
debug_assert!(index < Self::CAPACITY);
self.data_mut().add(index)
}
#[inline]
unsafe fn read_at(&self, index: usize) -> A {
ptr::read(self.ptr_at(index))
}
#[inline]
unsafe fn write_at(&mut self, index: usize, value: A) {
ptr::write(self.ptr_at_mut(index), value);
}
#[inline]
#[must_use]
pub fn len(&self) -> usize {
unsafe { *self.len_const() }
}
#[inline]
#[must_use]
pub fn is_empty(&self) -> bool {
self.len() == 0
}
#[inline]
#[must_use]
pub fn is_full(&self) -> bool {
self.len() >= Self::CAPACITY
}
#[inline]
#[must_use]
pub fn new() -> Self {
assert!(Self::HOST_SIZE > Self::HEADER_SIZE);
assert!(
(Self::CAPACITY == 0) || (mem::align_of::<Self>() % mem::align_of::<A>() == 0),
"InlineArray can't satisfy alignment of {}",
core::any::type_name::<A>()
);
let mut self_ = Self {
_header_align: [],
_phantom: PhantomData,
data: MaybeUninit::uninit(),
};
assert_eq!(
&self_ as *const _ as usize,
self_.data.as_ptr() as usize,
"Padding at the start of struct",
);
assert_eq!(
self_.data.as_ptr() as usize % mem::align_of::<usize>(),
0,
"Unaligned header"
);
assert!(mem::size_of::<Self>() == mem::size_of::<T>() || mem::align_of::<T>() < mem::align_of::<Self>());
assert_eq!(0, unsafe { self_.data() } as usize % mem::align_of::<A>());
assert_eq!(0, unsafe { self_.data_mut() } as usize % mem::align_of::<A>());
assert!(Self::ELEMENT_SKIP == 0 || Self::HEADER_SKIP == 0);
unsafe { ptr::write(self_.len_mut(), 0usize) };
self_
}
pub fn push(&mut self, value: A) {
if self.is_full() {
panic!("InlineArray::push: chunk size overflow");
}
unsafe {
self.write_at(self.len(), value);
*self.len_mut() += 1;
}
}
pub fn pop(&mut self) -> Option<A> {
if self.is_empty() {
None
} else {
unsafe {
*self.len_mut() -= 1;
}
Some(unsafe { self.read_at(self.len()) })
}
}
pub fn insert(&mut self, index: usize, value: A) {
if self.is_full() {
panic!("InlineArray::push: chunk size overflow");
}
if index > self.len() {
panic!("InlineArray::insert: index out of bounds");
}
unsafe {
let src = self.ptr_at_mut(index);
ptr::copy(src, src.add(1), self.len() - index);
ptr::write(src, value);
*self.len_mut() += 1;
}
}
pub fn remove(&mut self, index: usize) -> Option<A> {
if index >= self.len() {
None
} else {
unsafe {
let src = self.ptr_at_mut(index);
let value = ptr::read(src);
*self.len_mut() -= 1;
ptr::copy(src.add(1), src, self.len() - index);
Some(value)
}
}
}
pub fn split_off(&mut self, index: usize) -> Self {
if index > self.len() {
panic!("InlineArray::split_off: index out of bounds");
}
let mut out = Self::new();
if index < self.len() {
unsafe {
ptr::copy(self.ptr_at(index), out.data_mut(), self.len() - index);
*out.len_mut() = self.len() - index;
*self.len_mut() = index;
}
}
out
}
#[inline]
unsafe fn drop_contents(&mut self) {
ptr::drop_in_place::<[A]>(&mut **self)
}
pub fn clear(&mut self) {
unsafe {
self.drop_contents();
*self.len_mut() = 0;
}
}
pub fn drain(&mut self) -> Drain<'_, A, T> {
Drain { array: self }
}
}
impl<A, T> Drop for InlineArray<A, T> {
fn drop(&mut self) {
unsafe { self.drop_contents() }
}
}
impl<A, T> Default for InlineArray<A, T> {
fn default() -> Self {
Self::new()
}
}
impl<A, T> Clone for InlineArray<A, T>
where
A: Clone,
{
fn clone(&self) -> Self {
let mut copy = Self::new();
for i in 0..self.len() {
unsafe {
copy.write_at(i, self.get_unchecked(i).clone());
}
}
unsafe {
*copy.len_mut() = self.len();
}
copy
}
}
impl<A, T> Deref for InlineArray<A, T> {
type Target = [A];
fn deref(&self) -> &Self::Target {
unsafe { from_raw_parts(self.data(), self.len()) }
}
}
impl<A, T> DerefMut for InlineArray<A, T> {
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { from_raw_parts_mut(self.data_mut(), self.len()) }
}
}
impl<A, T> Borrow<[A]> for InlineArray<A, T> {
fn borrow(&self) -> &[A] {
self.deref()
}
}
impl<A, T> BorrowMut<[A]> for InlineArray<A, T> {
fn borrow_mut(&mut self) -> &mut [A] {
self.deref_mut()
}
}
impl<A, T> AsRef<[A]> for InlineArray<A, T> {
fn as_ref(&self) -> &[A] {
self.deref()
}
}
impl<A, T> AsMut<[A]> for InlineArray<A, T> {
fn as_mut(&mut self) -> &mut [A] {
self.deref_mut()
}
}
impl<A, T, Slice> PartialEq<Slice> for InlineArray<A, T>
where
Slice: Borrow<[A]>,
A: PartialEq,
{
fn eq(&self, other: &Slice) -> bool {
self.deref() == other.borrow()
}
}
impl<A, T> Eq for InlineArray<A, T> where A: Eq {}
impl<A, T> PartialOrd for InlineArray<A, T>
where
A: PartialOrd,
{
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.iter().partial_cmp(other.iter())
}
}
impl<A, T> Ord for InlineArray<A, T>
where
A: Ord,
{
fn cmp(&self, other: &Self) -> Ordering {
self.iter().cmp(other.iter())
}
}
impl<A, T> Debug for InlineArray<A, T>
where
A: Debug,
{
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
f.write_str("Chunk")?;
f.debug_list().entries(self.iter()).finish()
}
}
impl<A, T> Hash for InlineArray<A, T>
where
A: Hash,
{
fn hash<H>(&self, hasher: &mut H)
where
H: Hasher,
{
for item in self {
item.hash(hasher)
}
}
}
impl<A, T> IntoIterator for InlineArray<A, T> {
type Item = A;
type IntoIter = Iter<A, T>;
fn into_iter(self) -> Self::IntoIter {
Iter { array: self }
}
}
impl<A, T> FromIterator<A> for InlineArray<A, T> {
fn from_iter<I>(it: I) -> Self
where
I: IntoIterator<Item = A>,
{
let mut chunk = Self::new();
for item in it {
chunk.push(item);
}
chunk
}
}
impl<'a, A, T> IntoIterator for &'a InlineArray<A, T> {
type Item = &'a A;
type IntoIter = SliceIter<'a, A>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
impl<'a, A, T> IntoIterator for &'a mut InlineArray<A, T> {
type Item = &'a mut A;
type IntoIter = SliceIterMut<'a, A>;
fn into_iter(self) -> Self::IntoIter {
self.iter_mut()
}
}
impl<A, T> Extend<A> for InlineArray<A, T> {
fn extend<I>(&mut self, it: I)
where
I: IntoIterator<Item = A>,
{
for item in it {
self.push(item);
}
}
}
impl<'a, A, T> Extend<&'a A> for InlineArray<A, T>
where
A: 'a + Copy,
{
fn extend<I>(&mut self, it: I)
where
I: IntoIterator<Item = &'a A>,
{
for item in it {
self.push(*item);
}
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::tests::DropTest;
use std::sync::atomic::{AtomicUsize, Ordering};
#[test]
fn dropping() {
let counter = AtomicUsize::new(0);
{
let mut chunk: InlineArray<DropTest<'_>, [usize; 32]> = InlineArray::new();
for _i in 0..16 {
chunk.push(DropTest::new(&counter));
}
assert_eq!(16, counter.load(Ordering::Relaxed));
for _i in 0..8 {
chunk.pop();
}
assert_eq!(8, counter.load(Ordering::Relaxed));
}
assert_eq!(0, counter.load(Ordering::Relaxed));
}
#[test]
fn zero_sized_values() {
let mut chunk: InlineArray<(), [usize; 32]> = InlineArray::new();
for _i in 0..65536 {
chunk.push(());
}
assert_eq!(65536, chunk.len());
assert_eq!(Some(()), chunk.pop());
}
#[test]
fn low_align_base() {
let mut chunk: InlineArray<String, [u8; 512]> = InlineArray::new();
chunk.push("Hello".to_owned());
assert_eq!(chunk[0], "Hello");
let mut chunk: InlineArray<String, [u16; 512]> = InlineArray::new();
chunk.push("Hello".to_owned());
assert_eq!(chunk[0], "Hello");
}
#[test]
fn float_align() {
let mut chunk: InlineArray<f64, [u8; 16]> = InlineArray::new();
chunk.push(1234.);
assert_eq!(chunk[0], 1234.);
let mut chunk: InlineArray<f64, [u8; 17]> = InlineArray::new();
chunk.push(1234.);
assert_eq!(chunk[0], 1234.);
}
#[test]
fn recursive_types_compile() {
#[allow(dead_code)]
enum Recursive {
A(InlineArray<Recursive, u64>),
B,
}
}
#[test]
fn insufficient_alignment1() {
#[repr(align(256))]
struct BigAlign(u8);
#[repr(align(32))]
struct MediumAlign(u8);
assert_eq!(0, InlineArray::<BigAlign, [usize; 256]>::CAPACITY);
assert_eq!(0, InlineArray::<BigAlign, [u64; 256]>::CAPACITY);
assert_eq!(0, InlineArray::<BigAlign, [f64; 256]>::CAPACITY);
assert_eq!(0, InlineArray::<BigAlign, [MediumAlign; 256]>::CAPACITY);
}
#[test]
fn insufficient_alignment2() {
#[repr(align(256))]
struct BigAlign(usize);
let mut bad: InlineArray<BigAlign, [usize; 256]> = InlineArray::new();
assert_eq!(0, InlineArray::<BigAlign, [usize; 256]>::CAPACITY);
assert_eq!(0, bad.len());
assert_eq!(0, bad[..].len());
assert_eq!(true, bad.is_full());
assert_eq!(0, bad.drain().count());
assert!(bad.pop().is_none());
assert!(bad.remove(0).is_none());
assert!(bad.split_off(0).is_full());
bad.clear();
}
#[test]
fn sufficient_alignment1() {
#[repr(align(256))]
struct BigAlign(u8);
assert_eq!(13, InlineArray::<BigAlign, [BigAlign; 14]>::CAPACITY);
assert_eq!(1, InlineArray::<BigAlign, [BigAlign; 2]>::CAPACITY);
assert_eq!(0, InlineArray::<BigAlign, [BigAlign; 1]>::CAPACITY);
let mut chunk: InlineArray<BigAlign, [BigAlign; 2]> = InlineArray::new();
chunk.push(BigAlign(42));
assert_eq!(
chunk.get(0).unwrap() as *const _ as usize % mem::align_of::<BigAlign>(),
0
);
}
#[test]
fn sufficient_alignment2() {
#[repr(align(128))]
struct BigAlign([u8; 64]);
#[repr(align(256))]
struct BiggerAlign(u8);
assert_eq!(128, mem::align_of::<BigAlign>());
assert_eq!(256, mem::align_of::<BiggerAlign>());
assert_eq!(199, InlineArray::<BigAlign, [BiggerAlign; 100]>::CAPACITY);
assert_eq!(3, InlineArray::<BigAlign, [BiggerAlign; 2]>::CAPACITY);
assert_eq!(1, InlineArray::<BigAlign, [BiggerAlign; 1]>::CAPACITY);
assert_eq!(0, InlineArray::<BigAlign, [BiggerAlign; 0]>::CAPACITY);
let mut chunk: InlineArray<BigAlign, [BiggerAlign; 1]> = InlineArray::new();
chunk.push(BigAlign([0; 64]));
assert_eq!(
chunk.get(0).unwrap() as *const _ as usize % mem::align_of::<BigAlign>(),
0
);
}
}