#![doc(
html_root_url = "https://doc.rust-lang.org/nightly/",
test(no_crate_inject, attr(deny(warnings)))
)]
#![feature(rustc_private, core_intrinsics)]
#![feature(dropck_eyepatch)]
#![feature(raw_vec_internals)]
#![cfg_attr(test, feature(test))]
#![allow(deprecated)]
extern crate alloc;
use rustc_data_structures::cold_path;
use smallvec::SmallVec;
use std::cell::{Cell, RefCell};
use std::cmp;
use std::intrinsics;
use std::marker::{PhantomData, Send};
use std::mem;
use std::ptr;
use std::slice;
use alloc::raw_vec::RawVec;
pub struct TypedArena<T> {
ptr: Cell<*mut T>,
end: Cell<*mut T>,
chunks: RefCell<Vec<TypedArenaChunk<T>>>,
_own: PhantomData<T>,
}
struct TypedArenaChunk<T> {
storage: RawVec<T>,
entries: usize,
}
impl<T> TypedArenaChunk<T> {
#[inline]
unsafe fn new(capacity: usize) -> TypedArenaChunk<T> {
TypedArenaChunk { storage: RawVec::with_capacity(capacity), entries: 0 }
}
#[inline]
unsafe fn destroy(&mut self, len: usize) {
if mem::needs_drop::<T>() {
let mut start = self.start();
for _ in 0..len {
ptr::drop_in_place(start);
start = start.offset(1);
}
}
}
#[inline]
fn start(&self) -> *mut T {
self.storage.ptr()
}
#[inline]
fn end(&self) -> *mut T {
unsafe {
if mem::size_of::<T>() == 0 {
!0 as *mut T
} else {
self.start().add(self.storage.capacity())
}
}
}
}
const PAGE: usize = 4096;
const HUGE_PAGE: usize = 2 * 1024 * 1024;
impl<T> Default for TypedArena<T> {
fn default() -> TypedArena<T> {
TypedArena {
ptr: Cell::new(ptr::null_mut()),
end: Cell::new(ptr::null_mut()),
chunks: RefCell::new(vec![]),
_own: PhantomData,
}
}
}
impl<T> TypedArena<T> {
#[inline]
pub fn alloc(&self, object: T) -> &mut T {
if self.ptr == self.end {
self.grow(1)
}
unsafe {
if mem::size_of::<T>() == 0 {
self.ptr.set(intrinsics::arith_offset(self.ptr.get() as *mut u8, 1) as *mut T);
let ptr = mem::align_of::<T>() as *mut T;
ptr::write(ptr, object);
&mut *ptr
} else {
let ptr = self.ptr.get();
self.ptr.set(self.ptr.get().offset(1));
ptr::write(ptr, object);
&mut *ptr
}
}
}
#[inline]
fn can_allocate(&self, len: usize) -> bool {
let available_capacity_bytes = self.end.get() as usize - self.ptr.get() as usize;
let at_least_bytes = len.checked_mul(mem::size_of::<T>()).unwrap();
available_capacity_bytes >= at_least_bytes
}
#[inline]
fn ensure_capacity(&self, len: usize) {
if !self.can_allocate(len) {
self.grow(len);
debug_assert!(self.can_allocate(len));
}
}
#[inline]
unsafe fn alloc_raw_slice(&self, len: usize) -> *mut T {
assert!(mem::size_of::<T>() != 0);
assert!(len != 0);
self.ensure_capacity(len);
let start_ptr = self.ptr.get();
self.ptr.set(start_ptr.add(len));
start_ptr
}
#[inline]
pub fn alloc_slice(&self, slice: &[T]) -> &mut [T]
where
T: Copy,
{
unsafe {
let len = slice.len();
let start_ptr = self.alloc_raw_slice(len);
slice.as_ptr().copy_to_nonoverlapping(start_ptr, len);
slice::from_raw_parts_mut(start_ptr, len)
}
}
#[inline]
pub fn alloc_from_iter<I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
assert!(mem::size_of::<T>() != 0);
let mut vec: SmallVec<[_; 8]> = iter.into_iter().collect();
if vec.is_empty() {
return &mut [];
}
unsafe {
let len = vec.len();
let start_ptr = self.alloc_raw_slice(len);
vec.as_ptr().copy_to_nonoverlapping(start_ptr, len);
vec.set_len(0);
slice::from_raw_parts_mut(start_ptr, len)
}
}
#[inline(never)]
#[cold]
fn grow(&self, n: usize) {
unsafe {
let elem_size = cmp::max(1, mem::size_of::<T>());
let mut chunks = self.chunks.borrow_mut();
let (chunk, mut new_capacity);
if let Some(last_chunk) = chunks.last_mut() {
let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
let currently_used_cap = used_bytes / mem::size_of::<T>();
last_chunk.entries = currently_used_cap;
if last_chunk.storage.reserve_in_place(currently_used_cap, n) {
self.end.set(last_chunk.end());
return;
} else {
new_capacity = last_chunk.storage.capacity();
if new_capacity < HUGE_PAGE / elem_size {
new_capacity = new_capacity.checked_mul(2).unwrap();
}
}
} else {
new_capacity = PAGE / elem_size;
}
new_capacity = cmp::max(n, new_capacity);
chunk = TypedArenaChunk::<T>::new(new_capacity);
self.ptr.set(chunk.start());
self.end.set(chunk.end());
chunks.push(chunk);
}
}
pub fn clear(&mut self) {
unsafe {
let mut chunks_borrow = self.chunks.borrow_mut();
if let Some(mut last_chunk) = chunks_borrow.last_mut() {
self.clear_last_chunk(&mut last_chunk);
let len = chunks_borrow.len();
for mut chunk in chunks_borrow.drain(..len - 1) {
chunk.destroy(chunk.entries);
}
}
}
}
fn clear_last_chunk(&self, last_chunk: &mut TypedArenaChunk<T>) {
let start = last_chunk.start() as usize;
let end = self.ptr.get() as usize;
let diff = if mem::size_of::<T>() == 0 {
end - start
} else {
(end - start) / mem::size_of::<T>()
};
unsafe {
last_chunk.destroy(diff);
}
self.ptr.set(last_chunk.start());
}
}
unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
fn drop(&mut self) {
unsafe {
let mut chunks_borrow = self.chunks.borrow_mut();
if let Some(mut last_chunk) = chunks_borrow.pop() {
self.clear_last_chunk(&mut last_chunk);
for chunk in chunks_borrow.iter_mut() {
chunk.destroy(chunk.entries);
}
}
}
}
}
unsafe impl<T: Send> Send for TypedArena<T> {}
pub struct DroplessArena {
ptr: Cell<*mut u8>,
end: Cell<*mut u8>,
chunks: RefCell<Vec<TypedArenaChunk<u8>>>,
}
unsafe impl Send for DroplessArena {}
impl Default for DroplessArena {
#[inline]
fn default() -> DroplessArena {
DroplessArena {
ptr: Cell::new(ptr::null_mut()),
end: Cell::new(ptr::null_mut()),
chunks: Default::default(),
}
}
}
impl DroplessArena {
#[inline]
fn align(&self, align: usize) {
let final_address = ((self.ptr.get() as usize) + align - 1) & !(align - 1);
self.ptr.set(final_address as *mut u8);
assert!(self.ptr <= self.end);
}
#[inline(never)]
#[cold]
fn grow(&self, needed_bytes: usize) {
unsafe {
let mut chunks = self.chunks.borrow_mut();
let (chunk, mut new_capacity);
if let Some(last_chunk) = chunks.last_mut() {
let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
if last_chunk.storage.reserve_in_place(used_bytes, needed_bytes) {
self.end.set(last_chunk.end());
return;
} else {
new_capacity = last_chunk.storage.capacity();
if new_capacity < HUGE_PAGE {
new_capacity = new_capacity.checked_mul(2).unwrap();
}
}
} else {
new_capacity = PAGE;
}
new_capacity = cmp::max(needed_bytes, new_capacity);
chunk = TypedArenaChunk::<u8>::new(new_capacity);
self.ptr.set(chunk.start());
self.end.set(chunk.end());
chunks.push(chunk);
}
}
#[inline]
pub fn alloc_raw(&self, bytes: usize, align: usize) -> &mut [u8] {
unsafe {
assert!(bytes != 0);
self.align(align);
let future_end = intrinsics::arith_offset(self.ptr.get(), bytes as isize);
if (future_end as *mut u8) >= self.end.get() {
self.grow(bytes);
}
let ptr = self.ptr.get();
self.ptr.set(intrinsics::arith_offset(self.ptr.get(), bytes as isize) as *mut u8);
slice::from_raw_parts_mut(ptr, bytes)
}
}
#[inline]
pub fn alloc<T>(&self, object: T) -> &mut T {
assert!(!mem::needs_drop::<T>());
let mem = self.alloc_raw(mem::size_of::<T>(), mem::align_of::<T>()) as *mut _ as *mut T;
unsafe {
ptr::write(mem, object);
&mut *mem
}
}
#[inline]
pub fn alloc_slice<T>(&self, slice: &[T]) -> &mut [T]
where
T: Copy,
{
assert!(!mem::needs_drop::<T>());
assert!(mem::size_of::<T>() != 0);
assert!(!slice.is_empty());
let mem = self.alloc_raw(slice.len() * mem::size_of::<T>(), mem::align_of::<T>()) as *mut _
as *mut T;
unsafe {
let arena_slice = slice::from_raw_parts_mut(mem, slice.len());
arena_slice.copy_from_slice(slice);
arena_slice
}
}
#[inline]
unsafe fn write_from_iter<T, I: Iterator<Item = T>>(
&self,
mut iter: I,
len: usize,
mem: *mut T,
) -> &mut [T] {
let mut i = 0;
loop {
let value = iter.next();
if i >= len || value.is_none() {
return slice::from_raw_parts_mut(mem, i);
}
ptr::write(mem.add(i), value.unwrap());
i += 1;
}
}
#[inline]
pub fn alloc_from_iter<T, I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
let iter = iter.into_iter();
assert!(mem::size_of::<T>() != 0);
assert!(!mem::needs_drop::<T>());
let size_hint = iter.size_hint();
match size_hint {
(min, Some(max)) if min == max => {
let len = min;
if len == 0 {
return &mut [];
}
let size = len.checked_mul(mem::size_of::<T>()).unwrap();
let mem = self.alloc_raw(size, mem::align_of::<T>()) as *mut _ as *mut T;
unsafe { self.write_from_iter(iter, len, mem) }
}
(_, _) => {
cold_path(move || -> &mut [T] {
let mut vec: SmallVec<[_; 8]> = iter.collect();
if vec.is_empty() {
return &mut [];
}
unsafe {
let len = vec.len();
let start_ptr = self
.alloc_raw(len * mem::size_of::<T>(), mem::align_of::<T>())
as *mut _ as *mut T;
vec.as_ptr().copy_to_nonoverlapping(start_ptr, len);
vec.set_len(0);
slice::from_raw_parts_mut(start_ptr, len)
}
})
}
}
}
}
struct DropType {
drop_fn: unsafe fn(*mut u8),
obj: *mut u8,
}
unsafe fn drop_for_type<T>(to_drop: *mut u8) {
std::ptr::drop_in_place(to_drop as *mut T)
}
impl Drop for DropType {
fn drop(&mut self) {
unsafe { (self.drop_fn)(self.obj) }
}
}
#[derive(Default)]
pub struct DropArena {
destructors: RefCell<Vec<DropType>>,
arena: DroplessArena,
}
impl DropArena {
#[inline]
pub unsafe fn alloc<T>(&self, object: T) -> &mut T {
let mem =
self.arena.alloc_raw(mem::size_of::<T>(), mem::align_of::<T>()) as *mut _ as *mut T;
ptr::write(mem, object);
let result = &mut *mem;
self.destructors
.borrow_mut()
.push(DropType { drop_fn: drop_for_type::<T>, obj: result as *mut T as *mut u8 });
result
}
#[inline]
pub unsafe fn alloc_from_iter<T, I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
let mut vec: SmallVec<[_; 8]> = iter.into_iter().collect();
if vec.is_empty() {
return &mut [];
}
let len = vec.len();
let start_ptr = self
.arena
.alloc_raw(len.checked_mul(mem::size_of::<T>()).unwrap(), mem::align_of::<T>())
as *mut _ as *mut T;
let mut destructors = self.destructors.borrow_mut();
destructors.reserve(len);
vec.as_ptr().copy_to_nonoverlapping(start_ptr, len);
mem::forget(vec.drain(..));
for i in 0..len {
destructors.push(DropType {
drop_fn: drop_for_type::<T>,
obj: start_ptr.offset(i as isize) as *mut u8,
});
}
slice::from_raw_parts_mut(start_ptr, len)
}
}
#[macro_export]
macro_rules! arena_for_type {
([][$ty:ty]) => {
$crate::TypedArena<$ty>
};
([few $(, $attrs:ident)*][$ty:ty]) => {
::std::marker::PhantomData<$ty>
};
([$ignore:ident $(, $attrs:ident)*]$args:tt) => {
$crate::arena_for_type!([$($attrs),*]$args)
};
}
#[macro_export]
macro_rules! which_arena_for_type {
([][$arena:expr]) => {
::std::option::Option::Some($arena)
};
([few$(, $attrs:ident)*][$arena:expr]) => {
::std::option::Option::None
};
([$ignore:ident$(, $attrs:ident)*]$args:tt) => {
$crate::which_arena_for_type!([$($attrs),*]$args)
};
}
#[macro_export]
macro_rules! declare_arena {
([], [$($a:tt $name:ident: $ty:ty,)*], $tcx:lifetime) => {
#[derive(Default)]
pub struct Arena<$tcx> {
pub dropless: $crate::DroplessArena,
drop: $crate::DropArena,
$($name: $crate::arena_for_type!($a[$ty]),)*
}
#[marker]
pub trait ArenaAllocatable {}
impl<T: Copy> ArenaAllocatable for T {}
unsafe trait ArenaField<'tcx>: Sized {
fn arena<'a>(arena: &'a Arena<'tcx>) -> Option<&'a $crate::TypedArena<Self>>;
}
unsafe impl<'tcx, T> ArenaField<'tcx> for T {
#[inline]
default fn arena<'a>(_: &'a Arena<'tcx>) -> Option<&'a $crate::TypedArena<Self>> {
panic!()
}
}
$(
#[allow(unused_lifetimes)]
impl<$tcx> ArenaAllocatable for $ty {}
unsafe impl<$tcx> ArenaField<$tcx> for $ty {
#[inline]
fn arena<'a>(_arena: &'a Arena<$tcx>) -> Option<&'a $crate::TypedArena<Self>> {
$crate::which_arena_for_type!($a[&_arena.$name])
}
}
)*
impl<'tcx> Arena<'tcx> {
#[inline]
pub fn alloc<T: ArenaAllocatable>(&self, value: T) -> &mut T {
if !::std::mem::needs_drop::<T>() {
return self.dropless.alloc(value);
}
match <T as ArenaField<'tcx>>::arena(self) {
::std::option::Option::Some(arena) => arena.alloc(value),
::std::option::Option::None => unsafe { self.drop.alloc(value) },
}
}
#[inline]
pub fn alloc_slice<T: ::std::marker::Copy>(&self, value: &[T]) -> &mut [T] {
if value.is_empty() {
return &mut [];
}
self.dropless.alloc_slice(value)
}
pub fn alloc_from_iter<'a, T: ArenaAllocatable>(
&'a self,
iter: impl ::std::iter::IntoIterator<Item = T>,
) -> &'a mut [T] {
if !::std::mem::needs_drop::<T>() {
return self.dropless.alloc_from_iter(iter);
}
match <T as ArenaField<'tcx>>::arena(self) {
::std::option::Option::Some(arena) => arena.alloc_from_iter(iter),
::std::option::Option::None => unsafe { self.drop.alloc_from_iter(iter) },
}
}
}
}
}
#[cfg(test)]
mod tests;