John
d2eb165759
This is copied almost verbatim from rustc-arena e82c861d7e/compiler/rustc_arena/src/lib.rs (L203)
TODO: Add unit tests, run unit tests in Miri
397 lines
12 KiB
Rust
397 lines
12 KiB
Rust
//! Typed and dropless arena allocation, paraphrased from [the Rust Compiler's `rustc_arena`](https://github.com/rust-lang/rust/blob/master/compiler/rustc_arena/src/lib.rs). See [LICENSE][1].
|
|
//!
|
|
//! An Arena Allocator is a type of allocator which provides stable locations for allocations within
|
|
//! itself for the entire duration of its lifetime.
|
|
//!
|
|
//! [1]: https://raw.githubusercontent.com/rust-lang/rust/master/LICENSE-MIT
|
|
|
|
#![feature(dropck_eyepatch, new_uninit, strict_provenance)]
|
|
#![no_std]
|
|
|
|
extern crate alloc;
|
|
|
|
pub(crate) mod constants {
|
|
//! Size constants for arena chunk growth
|
|
pub(crate) const MIN_CHUNK: usize = 4096;
|
|
pub(crate) const MAX_CHUNK: usize = 2 * 1024 * 1024;
|
|
}
|
|
|
|
mod chunk {
|
|
//! An [ArenaChunk] contains a block of raw memory for use in arena allocators.
|
|
use alloc::boxed::Box;
|
|
use core::{
|
|
mem::{self, MaybeUninit},
|
|
ptr::{self, NonNull},
|
|
};
|
|
|
|
pub struct ArenaChunk<T> {
|
|
pub(crate) mem: NonNull<[MaybeUninit<T>]>,
|
|
pub(crate) filled: usize,
|
|
}
|
|
|
|
impl<T: Sized> ArenaChunk<T> {
|
|
pub fn new(cap: usize) -> Self {
|
|
let slice = Box::new_uninit_slice(cap);
|
|
Self { mem: NonNull::from(Box::leak(slice)), filled: 0 }
|
|
}
|
|
|
|
/// Drops all elements inside self, and resets the filled count to 0
|
|
///
|
|
/// # Safety
|
|
///
|
|
/// The caller must ensure that `self.filled` elements of self are currently initialized
|
|
pub unsafe fn drop_elements(&mut self) {
|
|
if mem::needs_drop::<T>() {
|
|
// Safety: the caller has ensured that `filled` elements are initialized
|
|
unsafe {
|
|
let slice = self.mem.as_mut();
|
|
for t in slice[..self.filled].iter_mut() {
|
|
t.assume_init_drop();
|
|
}
|
|
}
|
|
self.filled = 0;
|
|
}
|
|
}
|
|
|
|
/// Gets a pointer to the start of the arena
|
|
pub fn start(&mut self) -> *mut T {
|
|
self.mem.as_ptr() as _
|
|
}
|
|
|
|
/// Gets a pointer to the end of the arena
|
|
pub fn end(&mut self) -> *mut T {
|
|
if mem::size_of::<T>() == 0 {
|
|
ptr::without_provenance_mut(usize::MAX) // pointers to ZSTs must be unique
|
|
} else {
|
|
unsafe { self.start().add(self.mem.len()) }
|
|
}
|
|
}
|
|
}
|
|
|
|
impl<T> Drop for ArenaChunk<T> {
|
|
fn drop(&mut self) {
|
|
let _ = unsafe { Box::from_raw(self.mem.as_ptr()) };
|
|
}
|
|
}
|
|
}
|
|
|
|
pub mod typed_arena {
|
|
//! A [TypedArena] can hold many instances of a single type, and will properly [Drop] them.
|
|
#![allow(clippy::mut_from_ref)]
|
|
|
|
use crate::{chunk::ArenaChunk, constants::*};
|
|
use alloc::vec::Vec;
|
|
use core::{
|
|
cell::{Cell, RefCell},
|
|
marker::PhantomData,
|
|
mem, ptr, slice,
|
|
};
|
|
|
|
/// A [TypedArena] can hold many instances of a single type, and will properly [Drop] them when
|
|
/// it falls out of scope.
|
|
pub struct TypedArena<'arena, T> {
|
|
_lives: PhantomData<&'arena T>,
|
|
_drops: PhantomData<T>,
|
|
chunks: RefCell<Vec<ArenaChunk<T>>>,
|
|
head: Cell<*mut T>,
|
|
tail: Cell<*mut T>,
|
|
}
|
|
|
|
impl<'arena, T> Default for TypedArena<'arena, T> {
|
|
fn default() -> Self {
|
|
Self::new()
|
|
}
|
|
}
|
|
|
|
impl<'arena, T> TypedArena<'arena, T> {
|
|
pub const fn new() -> Self {
|
|
Self {
|
|
_lives: PhantomData,
|
|
_drops: PhantomData,
|
|
chunks: RefCell::new(Vec::new()),
|
|
head: Cell::new(ptr::null_mut()),
|
|
tail: Cell::new(ptr::null_mut()),
|
|
}
|
|
}
|
|
|
|
pub fn alloc(&'arena self, value: T) -> &'arena mut T {
|
|
if self.head == self.tail {
|
|
self.grow(1);
|
|
}
|
|
|
|
let out = if mem::size_of::<T>() == 0 {
|
|
self.head
|
|
.set(ptr::without_provenance_mut(self.head.get().addr() + 1));
|
|
ptr::NonNull::<T>::dangling().as_ptr()
|
|
} else {
|
|
let out = self.head.get();
|
|
self.head.set(unsafe { out.add(1) });
|
|
out
|
|
};
|
|
|
|
unsafe {
|
|
ptr::write(out, value);
|
|
&mut *out
|
|
}
|
|
}
|
|
|
|
fn can_allocate(&self, len: usize) -> bool {
|
|
len <= unsafe { self.tail.get().offset_from(self.head.get()) as usize }
|
|
}
|
|
|
|
/// # Panics
|
|
/// Panics if size_of::<T> == 0 || len == 0
|
|
#[inline]
|
|
fn alloc_raw_slice(&self, len: usize) -> *mut T {
|
|
assert!(mem::size_of::<T>() != 0);
|
|
assert!(len != 0);
|
|
|
|
if !self.can_allocate(len) {
|
|
self.grow(len)
|
|
}
|
|
|
|
let out = self.head.get();
|
|
|
|
unsafe { self.head.set(out.add(len)) };
|
|
out
|
|
}
|
|
|
|
pub fn alloc_from_iter<I>(&'arena self, iter: I) -> &'arena mut [T]
|
|
where I: IntoIterator<Item = T> {
|
|
// Collect them all into a buffer so they're allocated contiguously
|
|
let mut buf = iter.into_iter().collect::<Vec<_>>();
|
|
if buf.is_empty() {
|
|
return &mut [];
|
|
}
|
|
|
|
let len = buf.len();
|
|
// If T is a ZST, calling alloc_raw_slice will panic
|
|
let slice = if mem::size_of::<T>() == 0 {
|
|
self.head
|
|
.set(ptr::without_provenance_mut(self.head.get().addr() + len));
|
|
ptr::NonNull::dangling().as_ptr()
|
|
} else {
|
|
self.alloc_raw_slice(len)
|
|
};
|
|
|
|
unsafe {
|
|
buf.as_ptr().copy_to_nonoverlapping(slice, len);
|
|
buf.set_len(0);
|
|
slice::from_raw_parts_mut(slice, len)
|
|
}
|
|
}
|
|
|
|
#[cold]
|
|
#[inline(never)]
|
|
fn grow(&self, len: usize) {
|
|
let size = mem::size_of::<T>().max(1);
|
|
|
|
let mut chunks = self.chunks.borrow_mut();
|
|
|
|
let capacity = if let Some(last) = chunks.last_mut() {
|
|
last.filled = self.get_filled_of_chunk(last);
|
|
last.mem.len().min(MAX_CHUNK / size) * 2
|
|
} else {
|
|
MIN_CHUNK / size
|
|
}
|
|
.max(len);
|
|
|
|
let mut chunk = ArenaChunk::<T>::new(capacity);
|
|
|
|
self.head.set(chunk.start());
|
|
self.tail.set(chunk.end());
|
|
chunks.push(chunk);
|
|
}
|
|
|
|
fn get_filled_of_chunk(&self, chunk: &mut ArenaChunk<T>) -> usize {
|
|
let Self { head: tail, .. } = self;
|
|
let head = chunk.start();
|
|
if mem::size_of::<T>() == 0 {
|
|
tail.get().addr() - head.addr()
|
|
} else {
|
|
unsafe { tail.get().offset_from(head) as usize }
|
|
}
|
|
}
|
|
}
|
|
|
|
unsafe impl<'arena, T: Send> Send for TypedArena<'arena, T> {}
|
|
|
|
unsafe impl<'arena, #[may_dangle] T> Drop for TypedArena<'arena, T> {
|
|
fn drop(&mut self) {
|
|
let mut chunks = self.chunks.borrow_mut();
|
|
|
|
if let Some(last) = chunks.last_mut() {
|
|
last.filled = self.get_filled_of_chunk(last);
|
|
self.tail.set(self.head.get());
|
|
}
|
|
|
|
for chunk in chunks.iter_mut() {
|
|
unsafe { chunk.drop_elements() }
|
|
}
|
|
}
|
|
}
|
|
|
|
#[cfg(test)]
|
|
mod tests;
|
|
}
|
|
|
|
pub mod dropless_arena {
|
|
//! A [DroplessArena] can hold *any* combination of types as long as they don't implement
|
|
//! [Drop].
|
|
use crate::{chunk::ArenaChunk, constants::*};
|
|
use alloc::vec::Vec;
|
|
use core::{
|
|
alloc::Layout,
|
|
cell::{Cell, RefCell},
|
|
marker::PhantomData,
|
|
mem, ptr, slice,
|
|
};
|
|
|
|
pub struct DroplessArena<'arena> {
|
|
_lives: PhantomData<&'arena u8>,
|
|
chunks: RefCell<Vec<ArenaChunk<u8>>>,
|
|
head: Cell<*mut u8>,
|
|
tail: Cell<*mut u8>,
|
|
}
|
|
|
|
impl Default for DroplessArena<'_> {
|
|
fn default() -> Self {
|
|
Self::new()
|
|
}
|
|
}
|
|
|
|
impl<'arena> DroplessArena<'arena> {
|
|
pub const fn new() -> Self {
|
|
Self {
|
|
_lives: PhantomData,
|
|
chunks: RefCell::new(Vec::new()),
|
|
head: Cell::new(ptr::null_mut()),
|
|
tail: Cell::new(ptr::null_mut()),
|
|
}
|
|
}
|
|
|
|
/// Allocates a `T` in the [DroplessArena], and returns a mutable reference to it.
|
|
///
|
|
/// # Panics
|
|
/// - Panics if T implements [Drop]
|
|
/// - Panics if T is zero-sized
|
|
#[allow(clippy::mut_from_ref)]
|
|
pub fn alloc<T>(&'arena self, value: T) -> &'arena mut T {
|
|
assert!(!mem::needs_drop::<T>());
|
|
assert!(mem::size_of::<T>() != 0);
|
|
|
|
let out = self.alloc_raw(Layout::new::<T>()) as *mut T;
|
|
|
|
unsafe {
|
|
ptr::write(out, value);
|
|
&mut *out
|
|
}
|
|
}
|
|
|
|
/// Allocates a slice of `T`s`, copied from the given slice, returning a mutable reference
|
|
/// to it.
|
|
///
|
|
/// # Panics
|
|
/// - Panics if T implements [Drop]
|
|
/// - Panics if T is zero-sized
|
|
/// - Panics if the slice is empty
|
|
#[allow(clippy::mut_from_ref)]
|
|
pub fn alloc_slice<T: Copy>(&'arena self, slice: &[T]) -> &'arena mut [T] {
|
|
assert!(!mem::needs_drop::<T>());
|
|
assert!(mem::size_of::<T>() != 0);
|
|
assert!(!slice.is_empty());
|
|
|
|
let mem = self.alloc_raw(Layout::for_value::<[T]>(slice)) as *mut T;
|
|
|
|
unsafe {
|
|
mem.copy_from_nonoverlapping(slice.as_ptr(), slice.len());
|
|
slice::from_raw_parts_mut(mem, slice.len())
|
|
}
|
|
}
|
|
|
|
/// Allocates a copy of the given [`&str`](str), returning a reference to the allocation.
|
|
///
|
|
/// # Panics
|
|
/// Panics if the string is empty.
|
|
pub fn alloc_str(&'arena self, string: &str) -> &'arena str {
|
|
let slice = self.alloc_slice(string.as_bytes());
|
|
|
|
// Safety: This is a clone of the input string, which was valid
|
|
unsafe { core::str::from_utf8_unchecked(slice) }
|
|
}
|
|
|
|
/// Allocates some [bytes](u8) based on the given [Layout].
|
|
///
|
|
/// # Panics
|
|
/// Panics if the provided [Layout] has size 0
|
|
pub fn alloc_raw(&'arena self, layout: Layout) -> *mut u8 {
|
|
/// Rounds the given size (or pointer value) *up* to the given alignment
|
|
fn align_up(size: usize, align: usize) -> usize {
|
|
(size + align - 1) & !(align - 1)
|
|
}
|
|
/// Rounds the given size (or pointer value) *down* to the given alignment
|
|
fn align_down(size: usize, align: usize) -> usize {
|
|
size & !(align - 1)
|
|
}
|
|
|
|
assert!(layout.size() != 0);
|
|
loop {
|
|
let Self { head, tail, .. } = self;
|
|
let start = head.get().addr();
|
|
let end = tail.get().addr();
|
|
|
|
let align = 8.max(layout.align());
|
|
|
|
let bytes = align_up(layout.size(), align);
|
|
|
|
if let Some(end) = end.checked_sub(bytes) {
|
|
let end = align_down(end, layout.align());
|
|
|
|
if start <= end {
|
|
tail.set(tail.get().with_addr(end));
|
|
return tail.get();
|
|
}
|
|
}
|
|
|
|
self.grow(layout.size());
|
|
}
|
|
}
|
|
|
|
/// Grows the allocator, doubling the chunk size until it reaches [MAX_CHUNK].
|
|
#[cold]
|
|
#[inline(never)]
|
|
fn grow(&self, len: usize) {
|
|
let mut chunks = self.chunks.borrow_mut();
|
|
|
|
let capacity = if let Some(last) = chunks.last_mut() {
|
|
last.mem.len().min(MAX_CHUNK / 2) * 2
|
|
} else {
|
|
MIN_CHUNK
|
|
}
|
|
.max(len);
|
|
|
|
let mut chunk = ArenaChunk::<u8>::new(capacity);
|
|
|
|
self.head.set(chunk.start());
|
|
self.tail.set(chunk.end());
|
|
chunks.push(chunk);
|
|
}
|
|
|
|
/// Checks whether the given slice is allocated in this arena
|
|
pub fn contains_slice<T>(&self, slice: &[T]) -> bool {
|
|
let ptr = slice.as_ptr().cast::<u8>().cast_mut();
|
|
for chunk in self.chunks.borrow_mut().iter_mut() {
|
|
if chunk.start() <= ptr && ptr <= chunk.end() {
|
|
return true;
|
|
}
|
|
}
|
|
false
|
|
}
|
|
}
|
|
|
|
unsafe impl<'arena> Send for DroplessArena<'arena> {}
|
|
|
|
#[cfg(test)]
|
|
mod tests;
|
|
}
|