1 //! The arena, a fast but limited type of allocator.
3 //! Arenas are a type of allocator that destroy the objects within, all at
4 //! once, once the arena itself is destroyed. They do not support deallocation
5 //! of individual objects while the arena itself is still alive. The benefit
6 //! of an arena is very fast allocation; just a pointer bump.
8 //! This crate implements several kinds of arena.
11 html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/",
12 test(no_crate_inject, attr(deny(warnings)))
14 #![feature(dropck_eyepatch)]
15 #![feature(new_uninit)]
16 #![feature(maybe_uninit_slice)]
17 #![feature(min_specialization)]
18 #![cfg_attr(test, feature(test))]
20 use smallvec::SmallVec;
22 use std::alloc::Layout;
23 use std::cell::{Cell, RefCell};
25 use std::marker::{PhantomData, Send};
26 use std::mem::{self, MaybeUninit};
32 fn cold_path<F: FnOnce() -> R, R>(f: F) -> R {
36 /// An arena that can hold objects of only one type.
37 pub struct TypedArena<T> {
38 /// A pointer to the next object to be allocated.
41 /// A pointer to the end of the allocated area. When this pointer is
42 /// reached, a new chunk is allocated.
45 /// A vector of arena chunks.
46 chunks: RefCell<Vec<TypedArenaChunk<T>>>,
48 /// Marker indicating that dropping the arena causes its owned
49 /// instances of `T` to be dropped.
53 struct TypedArenaChunk<T> {
54 /// The raw storage for the arena chunk.
55 storage: Box<[MaybeUninit<T>]>,
56 /// The number of valid entries in the chunk.
60 impl<T> TypedArenaChunk<T> {
62 unsafe fn new(capacity: usize) -> TypedArenaChunk<T> {
63 TypedArenaChunk { storage: Box::new_uninit_slice(capacity), entries: 0 }
66 /// Destroys this arena chunk.
68 unsafe fn destroy(&mut self, len: usize) {
69 // The branch on needs_drop() is an -O1 performance optimization.
70 // Without the branch, dropping TypedArena<u8> takes linear time.
71 if mem::needs_drop::<T>() {
72 ptr::drop_in_place(MaybeUninit::slice_assume_init_mut(&mut self.storage[..len]));
76 // Returns a pointer to the first allocated object.
78 fn start(&mut self) -> *mut T {
79 MaybeUninit::slice_as_mut_ptr(&mut self.storage)
82 // Returns a pointer to the end of the allocated space.
84 fn end(&mut self) -> *mut T {
86 if mem::size_of::<T>() == 0 {
87 // A pointer as large as possible for zero-sized elements.
90 self.start().add(self.storage.len())
96 // The arenas start with PAGE-sized chunks, and then each new chunk is twice as
97 // big as its predecessor, up until we reach HUGE_PAGE-sized chunks, whereupon
98 // we stop growing. This scales well, from arenas that are barely used up to
99 // arenas that are used for 100s of MiBs. Note also that the chosen sizes match
100 // the usual sizes of pages and huge pages on Linux.
101 const PAGE: usize = 4096;
102 const HUGE_PAGE: usize = 2 * 1024 * 1024;
104 impl<T> Default for TypedArena<T> {
105 /// Creates a new `TypedArena`.
106 fn default() -> TypedArena<T> {
108 // We set both `ptr` and `end` to 0 so that the first call to
109 // alloc() will trigger a grow().
110 ptr: Cell::new(ptr::null_mut()),
111 end: Cell::new(ptr::null_mut()),
112 chunks: RefCell::new(vec![]),
119 fn alloc_from_iter(self, arena: &TypedArena<T>) -> &mut [T];
122 impl<I, T> IterExt<T> for I
124 I: IntoIterator<Item = T>,
127 default fn alloc_from_iter(self, arena: &TypedArena<T>) -> &mut [T] {
128 let vec: SmallVec<[_; 8]> = self.into_iter().collect();
129 vec.alloc_from_iter(arena)
133 impl<T, const N: usize> IterExt<T> for std::array::IntoIter<T, N> {
135 fn alloc_from_iter(self, arena: &TypedArena<T>) -> &mut [T] {
136 let len = self.len();
140 // Move the content to the arena by copying and then forgetting it
142 let start_ptr = arena.alloc_raw_slice(len);
143 self.as_slice().as_ptr().copy_to_nonoverlapping(start_ptr, len);
145 slice::from_raw_parts_mut(start_ptr, len)
150 impl<T> IterExt<T> for Vec<T> {
152 fn alloc_from_iter(mut self, arena: &TypedArena<T>) -> &mut [T] {
153 let len = self.len();
157 // Move the content to the arena by copying and then forgetting it
159 let start_ptr = arena.alloc_raw_slice(len);
160 self.as_ptr().copy_to_nonoverlapping(start_ptr, len);
162 slice::from_raw_parts_mut(start_ptr, len)
167 impl<A: smallvec::Array> IterExt<A::Item> for SmallVec<A> {
169 fn alloc_from_iter(mut self, arena: &TypedArena<A::Item>) -> &mut [A::Item] {
170 let len = self.len();
174 // Move the content to the arena by copying and then forgetting it
176 let start_ptr = arena.alloc_raw_slice(len);
177 self.as_ptr().copy_to_nonoverlapping(start_ptr, len);
179 slice::from_raw_parts_mut(start_ptr, len)
184 impl<T> TypedArena<T> {
185 /// Allocates an object in the `TypedArena`, returning a reference to it.
187 pub fn alloc(&self, object: T) -> &mut T {
188 if self.ptr == self.end {
193 if mem::size_of::<T>() == 0 {
194 self.ptr.set((self.ptr.get() as *mut u8).wrapping_offset(1) as *mut T);
195 let ptr = mem::align_of::<T>() as *mut T;
196 // Don't drop the object. This `write` is equivalent to `forget`.
197 ptr::write(ptr, object);
200 let ptr = self.ptr.get();
201 // Advance the pointer.
202 self.ptr.set(self.ptr.get().offset(1));
203 // Write into uninitialized memory.
204 ptr::write(ptr, object);
211 fn can_allocate(&self, additional: usize) -> bool {
212 let available_bytes = self.end.get() as usize - self.ptr.get() as usize;
213 let additional_bytes = additional.checked_mul(mem::size_of::<T>()).unwrap();
214 available_bytes >= additional_bytes
217 /// Ensures there's enough space in the current chunk to fit `len` objects.
219 fn ensure_capacity(&self, additional: usize) {
220 if !self.can_allocate(additional) {
221 self.grow(additional);
222 debug_assert!(self.can_allocate(additional));
227 unsafe fn alloc_raw_slice(&self, len: usize) -> *mut T {
228 assert!(mem::size_of::<T>() != 0);
231 self.ensure_capacity(len);
233 let start_ptr = self.ptr.get();
234 self.ptr.set(start_ptr.add(len));
238 /// Allocates a slice of objects that are copied into the `TypedArena`, returning a mutable
239 /// reference to it. Will panic if passed a zero-sized types.
243 /// - Zero-sized types
244 /// - Zero-length slices
246 pub fn alloc_slice(&self, slice: &[T]) -> &mut [T]
251 let len = slice.len();
252 let start_ptr = self.alloc_raw_slice(len);
253 slice.as_ptr().copy_to_nonoverlapping(start_ptr, len);
254 slice::from_raw_parts_mut(start_ptr, len)
259 pub fn alloc_from_iter<I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
260 assert!(mem::size_of::<T>() != 0);
261 iter.alloc_from_iter(self)
267 fn grow(&self, additional: usize) {
269 // We need the element size to convert chunk sizes (ranging from
270 // PAGE to HUGE_PAGE bytes) to element counts.
271 let elem_size = cmp::max(1, mem::size_of::<T>());
272 let mut chunks = self.chunks.borrow_mut();
274 if let Some(last_chunk) = chunks.last_mut() {
275 // If a type is `!needs_drop`, we don't need to keep track of how many elements
276 // the chunk stores - the field will be ignored anyway.
277 if mem::needs_drop::<T>() {
278 let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
279 last_chunk.entries = used_bytes / mem::size_of::<T>();
282 // If the previous chunk's len is less than HUGE_PAGE
283 // bytes, then this chunk will be least double the previous
285 new_cap = last_chunk.storage.len().min(HUGE_PAGE / elem_size / 2);
288 new_cap = PAGE / elem_size;
290 // Also ensure that this chunk can fit `additional`.
291 new_cap = cmp::max(additional, new_cap);
293 let mut chunk = TypedArenaChunk::<T>::new(new_cap);
294 self.ptr.set(chunk.start());
295 self.end.set(chunk.end());
300 /// Clears the arena. Deallocates all but the longest chunk which may be reused.
301 pub fn clear(&mut self) {
303 // Clear the last chunk, which is partially filled.
304 let mut chunks_borrow = self.chunks.borrow_mut();
305 if let Some(mut last_chunk) = chunks_borrow.last_mut() {
306 self.clear_last_chunk(&mut last_chunk);
307 let len = chunks_borrow.len();
308 // If `T` is ZST, code below has no effect.
309 for mut chunk in chunks_borrow.drain(..len - 1) {
310 chunk.destroy(chunk.entries);
316 // Drops the contents of the last chunk. The last chunk is partially empty, unlike all other
318 fn clear_last_chunk(&self, last_chunk: &mut TypedArenaChunk<T>) {
319 // Determine how much was filled.
320 let start = last_chunk.start() as usize;
321 // We obtain the value of the pointer to the first uninitialized element.
322 let end = self.ptr.get() as usize;
323 // We then calculate the number of elements to be dropped in the last chunk,
324 // which is the filled area's length.
325 let diff = if mem::size_of::<T>() == 0 {
326 // `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get
327 // the number of zero-sized values in the last and only chunk, just out of caution.
328 // Recall that `end` was incremented for each allocated value.
331 (end - start) / mem::size_of::<T>()
333 // Pass that to the `destroy` method.
335 last_chunk.destroy(diff);
338 self.ptr.set(last_chunk.start());
342 unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
345 // Determine how much was filled.
346 let mut chunks_borrow = self.chunks.borrow_mut();
347 if let Some(mut last_chunk) = chunks_borrow.pop() {
348 // Drop the contents of the last chunk.
349 self.clear_last_chunk(&mut last_chunk);
350 // The last chunk will be dropped. Destroy all other chunks.
351 for chunk in chunks_borrow.iter_mut() {
352 chunk.destroy(chunk.entries);
355 // Box handles deallocation of `last_chunk` and `self.chunks`.
360 unsafe impl<T: Send> Send for TypedArena<T> {}
362 pub struct DroplessArena {
363 /// A pointer to the start of the free space.
364 start: Cell<*mut u8>,
366 /// A pointer to the end of free space.
368 /// The allocation proceeds from the end of the chunk towards the start.
369 /// When this pointer crosses the start pointer, a new chunk is allocated.
372 /// A vector of arena chunks.
373 chunks: RefCell<Vec<TypedArenaChunk<u8>>>,
376 unsafe impl Send for DroplessArena {}
378 impl Default for DroplessArena {
380 fn default() -> DroplessArena {
382 start: Cell::new(ptr::null_mut()),
383 end: Cell::new(ptr::null_mut()),
384 chunks: Default::default(),
392 fn grow(&self, additional: usize) {
394 let mut chunks = self.chunks.borrow_mut();
396 if let Some(last_chunk) = chunks.last_mut() {
397 // There is no need to update `last_chunk.entries` because that
398 // field isn't used by `DroplessArena`.
400 // If the previous chunk's len is less than HUGE_PAGE
401 // bytes, then this chunk will be least double the previous
403 new_cap = last_chunk.storage.len().min(HUGE_PAGE / 2);
408 // Also ensure that this chunk can fit `additional`.
409 new_cap = cmp::max(additional, new_cap);
411 let mut chunk = TypedArenaChunk::<u8>::new(new_cap);
412 self.start.set(chunk.start());
413 self.end.set(chunk.end());
418 /// Allocates a byte slice with specified layout from the current memory
419 /// chunk. Returns `None` if there is no free space left to satisfy the
422 fn alloc_raw_without_grow(&self, layout: Layout) -> Option<*mut u8> {
423 let start = self.start.get() as usize;
424 let end = self.end.get() as usize;
426 let align = layout.align();
427 let bytes = layout.size();
429 let new_end = end.checked_sub(bytes)? & !(align - 1);
430 if start <= new_end {
431 let new_end = new_end as *mut u8;
432 self.end.set(new_end);
440 pub fn alloc_raw(&self, layout: Layout) -> *mut u8 {
441 assert!(layout.size() != 0);
443 if let Some(a) = self.alloc_raw_without_grow(layout) {
446 // No free space left. Allocate a new chunk to satisfy the request.
447 // On failure the grow will panic or abort.
448 self.grow(layout.size());
453 pub fn alloc<T>(&self, object: T) -> &mut T {
454 assert!(!mem::needs_drop::<T>());
456 let mem = self.alloc_raw(Layout::for_value::<T>(&object)) as *mut T;
459 // Write into uninitialized memory.
460 ptr::write(mem, object);
465 /// Allocates a slice of objects that are copied into the `DroplessArena`, returning a mutable
466 /// reference to it. Will panic if passed a zero-sized type.
470 /// - Zero-sized types
471 /// - Zero-length slices
473 pub fn alloc_slice<T>(&self, slice: &[T]) -> &mut [T]
477 assert!(!mem::needs_drop::<T>());
478 assert!(mem::size_of::<T>() != 0);
479 assert!(!slice.is_empty());
481 let mem = self.alloc_raw(Layout::for_value::<[T]>(slice)) as *mut T;
484 mem.copy_from_nonoverlapping(slice.as_ptr(), slice.len());
485 slice::from_raw_parts_mut(mem, slice.len())
490 unsafe fn write_from_iter<T, I: Iterator<Item = T>>(
497 // Use a manual loop since LLVM manages to optimize it better for
500 let value = iter.next();
501 if i >= len || value.is_none() {
502 // We only return as many items as the iterator gave us, even
503 // though it was supposed to give us `len`
504 return slice::from_raw_parts_mut(mem, i);
506 ptr::write(mem.add(i), value.unwrap());
512 pub fn alloc_from_iter<T, I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
513 let iter = iter.into_iter();
514 assert!(mem::size_of::<T>() != 0);
515 assert!(!mem::needs_drop::<T>());
517 let size_hint = iter.size_hint();
520 (min, Some(max)) if min == max => {
521 // We know the exact number of elements the iterator will produce here
528 let mem = self.alloc_raw(Layout::array::<T>(len).unwrap()) as *mut T;
529 unsafe { self.write_from_iter(iter, len, mem) }
532 cold_path(move || -> &mut [T] {
533 let mut vec: SmallVec<[_; 8]> = iter.collect();
537 // Move the content to the arena by copying it and then forgetting
538 // the content of the SmallVec
542 self.alloc_raw(Layout::for_value::<[T]>(vec.as_slice())) as *mut T;
543 vec.as_ptr().copy_to_nonoverlapping(start_ptr, len);
545 slice::from_raw_parts_mut(start_ptr, len)
553 /// Calls the destructor for an object when dropped.
555 drop_fn: unsafe fn(*mut u8),
559 unsafe fn drop_for_type<T>(to_drop: *mut u8) {
560 std::ptr::drop_in_place(to_drop as *mut T)
563 impl Drop for DropType {
565 unsafe { (self.drop_fn)(self.obj) }
569 /// An arena which can be used to allocate any type.
573 /// Allocating in this arena is unsafe since the type system
574 /// doesn't know which types it contains. In order to
575 /// allocate safely, you must store a `PhantomData<T>`
576 /// alongside this arena for each type `T` you allocate.
578 pub struct DropArena {
579 /// A list of destructors to run when the arena drops.
580 /// Ordered so `destructors` gets dropped before the arena
581 /// since its destructor can reference memory in the arena.
582 destructors: RefCell<Vec<DropType>>,
583 arena: DroplessArena,
588 pub unsafe fn alloc<T>(&self, object: T) -> &mut T {
589 let mem = self.arena.alloc_raw(Layout::new::<T>()) as *mut T;
590 // Write into uninitialized memory.
591 ptr::write(mem, object);
592 let result = &mut *mem;
593 // Record the destructor after doing the allocation as that may panic
594 // and would cause `object`'s destructor to run twice if it was recorded before.
597 .push(DropType { drop_fn: drop_for_type::<T>, obj: result as *mut T as *mut u8 });
602 pub unsafe fn alloc_from_iter<T, I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
603 let mut vec: SmallVec<[_; 8]> = iter.into_iter().collect();
609 let start_ptr = self.arena.alloc_raw(Layout::array::<T>(len).unwrap()) as *mut T;
611 let mut destructors = self.destructors.borrow_mut();
612 // Reserve space for the destructors so we can't panic while adding them.
613 destructors.reserve(len);
615 // Move the content to the arena by copying it and then forgetting
616 // the content of the SmallVec.
617 vec.as_ptr().copy_to_nonoverlapping(start_ptr, len);
618 mem::forget(vec.drain(..));
620 // Record the destructors after doing the allocation as that may panic
621 // and would cause `object`'s destructor to run twice if it was recorded before.
624 .push(DropType { drop_fn: drop_for_type::<T>, obj: start_ptr.add(i) as *mut u8 });
627 slice::from_raw_parts_mut(start_ptr, len)
632 macro_rules! arena_for_type {
634 $crate::TypedArena<$ty>
636 ([few $(, $attrs:ident)*][$ty:ty]) => {
637 ::std::marker::PhantomData<$ty>
639 ([$ignore:ident $(, $attrs:ident)*]$args:tt) => {
640 $crate::arena_for_type!([$($attrs),*]$args)
645 macro_rules! which_arena_for_type {
646 ([][$arena:expr]) => {
647 ::std::option::Option::Some($arena)
649 ([few$(, $attrs:ident)*][$arena:expr]) => {
650 ::std::option::Option::None
652 ([$ignore:ident$(, $attrs:ident)*]$args:tt) => {
653 $crate::which_arena_for_type!([$($attrs),*]$args)
658 macro_rules! declare_arena {
659 ([], [$($a:tt $name:ident: $ty:ty,)*], $tcx:lifetime) => {
661 pub struct Arena<$tcx> {
662 pub dropless: $crate::DroplessArena,
663 drop: $crate::DropArena,
664 $($name: $crate::arena_for_type!($a[$ty]),)*
667 pub trait ArenaAllocatable<'tcx, T = Self>: Sized {
668 fn allocate_on<'a>(self, arena: &'a Arena<'tcx>) -> &'a mut Self;
669 fn allocate_from_iter<'a>(
670 arena: &'a Arena<'tcx>,
671 iter: impl ::std::iter::IntoIterator<Item = Self>,
675 impl<'tcx, T: Copy> ArenaAllocatable<'tcx, ()> for T {
677 fn allocate_on<'a>(self, arena: &'a Arena<'tcx>) -> &'a mut Self {
678 arena.dropless.alloc(self)
681 fn allocate_from_iter<'a>(
682 arena: &'a Arena<'tcx>,
683 iter: impl ::std::iter::IntoIterator<Item = Self>,
684 ) -> &'a mut [Self] {
685 arena.dropless.alloc_from_iter(iter)
690 impl<$tcx> ArenaAllocatable<$tcx, $ty> for $ty {
692 fn allocate_on<'a>(self, arena: &'a Arena<$tcx>) -> &'a mut Self {
693 if !::std::mem::needs_drop::<Self>() {
694 return arena.dropless.alloc(self);
696 match $crate::which_arena_for_type!($a[&arena.$name]) {
697 ::std::option::Option::<&$crate::TypedArena<Self>>::Some(ty_arena) => {
700 ::std::option::Option::None => unsafe { arena.drop.alloc(self) },
705 fn allocate_from_iter<'a>(
706 arena: &'a Arena<$tcx>,
707 iter: impl ::std::iter::IntoIterator<Item = Self>,
708 ) -> &'a mut [Self] {
709 if !::std::mem::needs_drop::<Self>() {
710 return arena.dropless.alloc_from_iter(iter);
712 match $crate::which_arena_for_type!($a[&arena.$name]) {
713 ::std::option::Option::<&$crate::TypedArena<Self>>::Some(ty_arena) => {
714 ty_arena.alloc_from_iter(iter)
716 ::std::option::Option::None => unsafe { arena.drop.alloc_from_iter(iter) },
722 impl<'tcx> Arena<'tcx> {
724 pub fn alloc<T: ArenaAllocatable<'tcx, U>, U>(&self, value: T) -> &mut T {
725 value.allocate_on(self)
729 pub fn alloc_slice<T: ::std::marker::Copy>(&self, value: &[T]) -> &mut [T] {
730 if value.is_empty() {
733 self.dropless.alloc_slice(value)
736 pub fn alloc_from_iter<'a, T: ArenaAllocatable<'tcx, U>, U>(
738 iter: impl ::std::iter::IntoIterator<Item = T>,
740 T::allocate_from_iter(self, iter)