1 //! The arena, a fast but limited type of allocator.
3 //! Arenas are a type of allocator that destroy the objects within, all at
4 //! once, once the arena itself is destroyed. They do not support deallocation
5 //! of individual objects while the arena itself is still alive. The benefit
6 //! of an arena is very fast allocation; just a pointer bump.
8 //! This crate implements several kinds of arena.
11 html_root_url = "https://doc.rust-lang.org/nightly/",
12 test(no_crate_inject, attr(deny(warnings)))
14 #![feature(dropck_eyepatch)]
15 #![feature(new_uninit)]
16 #![feature(maybe_uninit_slice)]
17 #![cfg_attr(test, feature(test))]
20 use rustc_data_structures::cold_path;
21 use smallvec::SmallVec;
23 use std::alloc::Layout;
24 use std::cell::{Cell, RefCell};
26 use std::marker::{PhantomData, Send};
27 use std::mem::{self, MaybeUninit};
31 /// An arena that can hold objects of only one type.
32 pub struct TypedArena<T> {
33 /// A pointer to the next object to be allocated.
36 /// A pointer to the end of the allocated area. When this pointer is
37 /// reached, a new chunk is allocated.
40 /// A vector of arena chunks.
41 chunks: RefCell<Vec<TypedArenaChunk<T>>>,
43 /// Marker indicating that dropping the arena causes its owned
44 /// instances of `T` to be dropped.
48 struct TypedArenaChunk<T> {
49 /// The raw storage for the arena chunk.
50 storage: Box<[MaybeUninit<T>]>,
51 /// The number of valid entries in the chunk.
55 impl<T> TypedArenaChunk<T> {
57 unsafe fn new(capacity: usize) -> TypedArenaChunk<T> {
58 TypedArenaChunk { storage: Box::new_uninit_slice(capacity), entries: 0 }
61 /// Destroys this arena chunk.
63 unsafe fn destroy(&mut self, len: usize) {
64 // The branch on needs_drop() is an -O1 performance optimization.
65 // Without the branch, dropping TypedArena<u8> takes linear time.
66 if mem::needs_drop::<T>() {
67 ptr::drop_in_place(MaybeUninit::slice_assume_init_mut(&mut self.storage[..len]));
71 // Returns a pointer to the first allocated object.
73 fn start(&mut self) -> *mut T {
74 MaybeUninit::slice_as_mut_ptr(&mut self.storage)
77 // Returns a pointer to the end of the allocated space.
79 fn end(&mut self) -> *mut T {
81 if mem::size_of::<T>() == 0 {
82 // A pointer as large as possible for zero-sized elements.
85 self.start().add(self.storage.len())
91 // The arenas start with PAGE-sized chunks, and then each new chunk is twice as
92 // big as its predecessor, up until we reach HUGE_PAGE-sized chunks, whereupon
93 // we stop growing. This scales well, from arenas that are barely used up to
94 // arenas that are used for 100s of MiBs. Note also that the chosen sizes match
95 // the usual sizes of pages and huge pages on Linux.
96 const PAGE: usize = 4096;
97 const HUGE_PAGE: usize = 2 * 1024 * 1024;
99 impl<T> Default for TypedArena<T> {
100 /// Creates a new `TypedArena`.
101 fn default() -> TypedArena<T> {
103 // We set both `ptr` and `end` to 0 so that the first call to
104 // alloc() will trigger a grow().
105 ptr: Cell::new(ptr::null_mut()),
106 end: Cell::new(ptr::null_mut()),
107 chunks: RefCell::new(vec![]),
113 impl<T> TypedArena<T> {
114 /// Allocates an object in the `TypedArena`, returning a reference to it.
116 pub fn alloc(&self, object: T) -> &mut T {
117 if self.ptr == self.end {
122 if mem::size_of::<T>() == 0 {
123 self.ptr.set((self.ptr.get() as *mut u8).wrapping_offset(1) as *mut T);
124 let ptr = mem::align_of::<T>() as *mut T;
125 // Don't drop the object. This `write` is equivalent to `forget`.
126 ptr::write(ptr, object);
129 let ptr = self.ptr.get();
130 // Advance the pointer.
131 self.ptr.set(self.ptr.get().offset(1));
132 // Write into uninitialized memory.
133 ptr::write(ptr, object);
140 fn can_allocate(&self, additional: usize) -> bool {
141 let available_bytes = self.end.get() as usize - self.ptr.get() as usize;
142 let additional_bytes = additional.checked_mul(mem::size_of::<T>()).unwrap();
143 available_bytes >= additional_bytes
146 /// Ensures there's enough space in the current chunk to fit `len` objects.
148 fn ensure_capacity(&self, additional: usize) {
149 if !self.can_allocate(additional) {
150 self.grow(additional);
151 debug_assert!(self.can_allocate(additional));
156 unsafe fn alloc_raw_slice(&self, len: usize) -> *mut T {
157 assert!(mem::size_of::<T>() != 0);
160 self.ensure_capacity(len);
162 let start_ptr = self.ptr.get();
163 self.ptr.set(start_ptr.add(len));
167 /// Allocates a slice of objects that are copied into the `TypedArena`, returning a mutable
168 /// reference to it. Will panic if passed a zero-sized types.
172 /// - Zero-sized types
173 /// - Zero-length slices
175 pub fn alloc_slice(&self, slice: &[T]) -> &mut [T]
180 let len = slice.len();
181 let start_ptr = self.alloc_raw_slice(len);
182 slice.as_ptr().copy_to_nonoverlapping(start_ptr, len);
183 slice::from_raw_parts_mut(start_ptr, len)
188 pub fn alloc_from_iter<I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
189 assert!(mem::size_of::<T>() != 0);
190 let mut vec: SmallVec<[_; 8]> = iter.into_iter().collect();
194 // Move the content to the arena by copying it and then forgetting
195 // the content of the SmallVec
198 let start_ptr = self.alloc_raw_slice(len);
199 vec.as_ptr().copy_to_nonoverlapping(start_ptr, len);
201 slice::from_raw_parts_mut(start_ptr, len)
208 fn grow(&self, additional: usize) {
210 // We need the element size to convert chunk sizes (ranging from
211 // PAGE to HUGE_PAGE bytes) to element counts.
212 let elem_size = cmp::max(1, mem::size_of::<T>());
213 let mut chunks = self.chunks.borrow_mut();
215 if let Some(last_chunk) = chunks.last_mut() {
216 let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
217 last_chunk.entries = used_bytes / mem::size_of::<T>();
219 // If the previous chunk's len is less than HUGE_PAGE
220 // bytes, then this chunk will be least double the previous
222 new_cap = last_chunk.storage.len();
223 if new_cap < HUGE_PAGE / elem_size {
224 new_cap = new_cap.checked_mul(2).unwrap();
227 new_cap = PAGE / elem_size;
229 // Also ensure that this chunk can fit `additional`.
230 new_cap = cmp::max(additional, new_cap);
232 let mut chunk = TypedArenaChunk::<T>::new(new_cap);
233 self.ptr.set(chunk.start());
234 self.end.set(chunk.end());
239 /// Clears the arena. Deallocates all but the longest chunk which may be reused.
240 pub fn clear(&mut self) {
242 // Clear the last chunk, which is partially filled.
243 let mut chunks_borrow = self.chunks.borrow_mut();
244 if let Some(mut last_chunk) = chunks_borrow.last_mut() {
245 self.clear_last_chunk(&mut last_chunk);
246 let len = chunks_borrow.len();
247 // If `T` is ZST, code below has no effect.
248 for mut chunk in chunks_borrow.drain(..len - 1) {
249 chunk.destroy(chunk.entries);
255 // Drops the contents of the last chunk. The last chunk is partially empty, unlike all other
257 fn clear_last_chunk(&self, last_chunk: &mut TypedArenaChunk<T>) {
258 // Determine how much was filled.
259 let start = last_chunk.start() as usize;
260 // We obtain the value of the pointer to the first uninitialized element.
261 let end = self.ptr.get() as usize;
262 // We then calculate the number of elements to be dropped in the last chunk,
263 // which is the filled area's length.
264 let diff = if mem::size_of::<T>() == 0 {
265 // `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get
266 // the number of zero-sized values in the last and only chunk, just out of caution.
267 // Recall that `end` was incremented for each allocated value.
270 (end - start) / mem::size_of::<T>()
272 // Pass that to the `destroy` method.
274 last_chunk.destroy(diff);
277 self.ptr.set(last_chunk.start());
281 unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
284 // Determine how much was filled.
285 let mut chunks_borrow = self.chunks.borrow_mut();
286 if let Some(mut last_chunk) = chunks_borrow.pop() {
287 // Drop the contents of the last chunk.
288 self.clear_last_chunk(&mut last_chunk);
289 // The last chunk will be dropped. Destroy all other chunks.
290 for chunk in chunks_borrow.iter_mut() {
291 chunk.destroy(chunk.entries);
294 // Box handles deallocation of `last_chunk` and `self.chunks`.
299 unsafe impl<T: Send> Send for TypedArena<T> {}
301 pub struct DroplessArena {
302 /// A pointer to the start of the free space.
303 start: Cell<*mut u8>,
305 /// A pointer to the end of free space.
307 /// The allocation proceeds from the end of the chunk towards the start.
308 /// When this pointer crosses the start pointer, a new chunk is allocated.
311 /// A vector of arena chunks.
312 chunks: RefCell<Vec<TypedArenaChunk<u8>>>,
315 unsafe impl Send for DroplessArena {}
317 impl Default for DroplessArena {
319 fn default() -> DroplessArena {
321 start: Cell::new(ptr::null_mut()),
322 end: Cell::new(ptr::null_mut()),
323 chunks: Default::default(),
331 fn grow(&self, additional: usize) {
333 let mut chunks = self.chunks.borrow_mut();
335 if let Some(last_chunk) = chunks.last_mut() {
336 // There is no need to update `last_chunk.entries` because that
337 // field isn't used by `DroplessArena`.
339 // If the previous chunk's len is less than HUGE_PAGE
340 // bytes, then this chunk will be least double the previous
342 new_cap = last_chunk.storage.len();
343 if new_cap < HUGE_PAGE {
344 new_cap = new_cap.checked_mul(2).unwrap();
349 // Also ensure that this chunk can fit `additional`.
350 new_cap = cmp::max(additional, new_cap);
352 let mut chunk = TypedArenaChunk::<u8>::new(new_cap);
353 self.start.set(chunk.start());
354 self.end.set(chunk.end());
359 /// Allocates a byte slice with specified layout from the current memory
360 /// chunk. Returns `None` if there is no free space left to satisfy the
363 fn alloc_raw_without_grow(&self, layout: Layout) -> Option<*mut u8> {
364 let start = self.start.get() as usize;
365 let end = self.end.get() as usize;
367 let align = layout.align();
368 let bytes = layout.size();
370 let new_end = end.checked_sub(bytes)? & !(align - 1);
371 if start <= new_end {
372 let new_end = new_end as *mut u8;
373 self.end.set(new_end);
381 pub fn alloc_raw(&self, layout: Layout) -> *mut u8 {
382 assert!(layout.size() != 0);
384 if let Some(a) = self.alloc_raw_without_grow(layout) {
387 // No free space left. Allocate a new chunk to satisfy the request.
388 // On failure the grow will panic or abort.
389 self.grow(layout.size());
394 pub fn alloc<T>(&self, object: T) -> &mut T {
395 assert!(!mem::needs_drop::<T>());
397 let mem = self.alloc_raw(Layout::for_value::<T>(&object)) as *mut T;
400 // Write into uninitialized memory.
401 ptr::write(mem, object);
406 /// Allocates a slice of objects that are copied into the `DroplessArena`, returning a mutable
407 /// reference to it. Will panic if passed a zero-sized type.
411 /// - Zero-sized types
412 /// - Zero-length slices
414 pub fn alloc_slice<T>(&self, slice: &[T]) -> &mut [T]
418 assert!(!mem::needs_drop::<T>());
419 assert!(mem::size_of::<T>() != 0);
420 assert!(!slice.is_empty());
422 let mem = self.alloc_raw(Layout::for_value::<[T]>(slice)) as *mut T;
425 mem.copy_from_nonoverlapping(slice.as_ptr(), slice.len());
426 slice::from_raw_parts_mut(mem, slice.len())
431 unsafe fn write_from_iter<T, I: Iterator<Item = T>>(
438 // Use a manual loop since LLVM manages to optimize it better for
441 let value = iter.next();
442 if i >= len || value.is_none() {
443 // We only return as many items as the iterator gave us, even
444 // though it was supposed to give us `len`
445 return slice::from_raw_parts_mut(mem, i);
447 ptr::write(mem.add(i), value.unwrap());
453 pub fn alloc_from_iter<T, I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
454 let iter = iter.into_iter();
455 assert!(mem::size_of::<T>() != 0);
456 assert!(!mem::needs_drop::<T>());
458 let size_hint = iter.size_hint();
461 (min, Some(max)) if min == max => {
462 // We know the exact number of elements the iterator will produce here
469 let mem = self.alloc_raw(Layout::array::<T>(len).unwrap()) as *mut T;
470 unsafe { self.write_from_iter(iter, len, mem) }
473 cold_path(move || -> &mut [T] {
474 let mut vec: SmallVec<[_; 8]> = iter.collect();
478 // Move the content to the arena by copying it and then forgetting
479 // the content of the SmallVec
483 self.alloc_raw(Layout::for_value::<[T]>(vec.as_slice())) as *mut T;
484 vec.as_ptr().copy_to_nonoverlapping(start_ptr, len);
486 slice::from_raw_parts_mut(start_ptr, len)
494 /// Calls the destructor for an object when dropped.
496 drop_fn: unsafe fn(*mut u8),
500 unsafe fn drop_for_type<T>(to_drop: *mut u8) {
501 std::ptr::drop_in_place(to_drop as *mut T)
504 impl Drop for DropType {
506 unsafe { (self.drop_fn)(self.obj) }
510 /// An arena which can be used to allocate any type.
511 /// Allocating in this arena is unsafe since the type system
512 /// doesn't know which types it contains. In order to
513 /// allocate safely, you must store a PhantomData<T>
514 /// alongside this arena for each type T you allocate.
516 pub struct DropArena {
517 /// A list of destructors to run when the arena drops.
518 /// Ordered so `destructors` gets dropped before the arena
519 /// since its destructor can reference memory in the arena.
520 destructors: RefCell<Vec<DropType>>,
521 arena: DroplessArena,
526 pub unsafe fn alloc<T>(&self, object: T) -> &mut T {
527 let mem = self.arena.alloc_raw(Layout::new::<T>()) as *mut T;
528 // Write into uninitialized memory.
529 ptr::write(mem, object);
530 let result = &mut *mem;
531 // Record the destructor after doing the allocation as that may panic
532 // and would cause `object`'s destuctor to run twice if it was recorded before
535 .push(DropType { drop_fn: drop_for_type::<T>, obj: result as *mut T as *mut u8 });
540 pub unsafe fn alloc_from_iter<T, I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
541 let mut vec: SmallVec<[_; 8]> = iter.into_iter().collect();
547 let start_ptr = self.arena.alloc_raw(Layout::array::<T>(len).unwrap()) as *mut T;
549 let mut destructors = self.destructors.borrow_mut();
550 // Reserve space for the destructors so we can't panic while adding them
551 destructors.reserve(len);
553 // Move the content to the arena by copying it and then forgetting
554 // the content of the SmallVec
555 vec.as_ptr().copy_to_nonoverlapping(start_ptr, len);
556 mem::forget(vec.drain(..));
558 // Record the destructors after doing the allocation as that may panic
559 // and would cause `object`'s destuctor to run twice if it was recorded before
561 destructors.push(DropType {
562 drop_fn: drop_for_type::<T>,
563 obj: start_ptr.offset(i as isize) as *mut u8,
567 slice::from_raw_parts_mut(start_ptr, len)
572 macro_rules! arena_for_type {
574 $crate::TypedArena<$ty>
576 ([few $(, $attrs:ident)*][$ty:ty]) => {
577 ::std::marker::PhantomData<$ty>
579 ([$ignore:ident $(, $attrs:ident)*]$args:tt) => {
580 $crate::arena_for_type!([$($attrs),*]$args)
585 macro_rules! which_arena_for_type {
586 ([][$arena:expr]) => {
587 ::std::option::Option::Some($arena)
589 ([few$(, $attrs:ident)*][$arena:expr]) => {
590 ::std::option::Option::None
592 ([$ignore:ident$(, $attrs:ident)*]$args:tt) => {
593 $crate::which_arena_for_type!([$($attrs),*]$args)
598 macro_rules! declare_arena {
599 ([], [$($a:tt $name:ident: $ty:ty,)*], $tcx:lifetime) => {
601 pub struct Arena<$tcx> {
602 pub dropless: $crate::DroplessArena,
603 drop: $crate::DropArena,
604 $($name: $crate::arena_for_type!($a[$ty]),)*
607 pub trait ArenaAllocatable<'tcx, T = Self>: Sized {
608 fn allocate_on<'a>(self, arena: &'a Arena<'tcx>) -> &'a mut Self;
609 fn allocate_from_iter<'a>(
610 arena: &'a Arena<'tcx>,
611 iter: impl ::std::iter::IntoIterator<Item = Self>,
615 impl<'tcx, T: Copy> ArenaAllocatable<'tcx, ()> for T {
617 fn allocate_on<'a>(self, arena: &'a Arena<'tcx>) -> &'a mut Self {
618 arena.dropless.alloc(self)
621 fn allocate_from_iter<'a>(
622 arena: &'a Arena<'tcx>,
623 iter: impl ::std::iter::IntoIterator<Item = Self>,
624 ) -> &'a mut [Self] {
625 arena.dropless.alloc_from_iter(iter)
630 impl<$tcx> ArenaAllocatable<$tcx, $ty> for $ty {
632 fn allocate_on<'a>(self, arena: &'a Arena<$tcx>) -> &'a mut Self {
633 if !::std::mem::needs_drop::<Self>() {
634 return arena.dropless.alloc(self);
636 match $crate::which_arena_for_type!($a[&arena.$name]) {
637 ::std::option::Option::<&$crate::TypedArena<Self>>::Some(ty_arena) => {
640 ::std::option::Option::None => unsafe { arena.drop.alloc(self) },
645 fn allocate_from_iter<'a>(
646 arena: &'a Arena<$tcx>,
647 iter: impl ::std::iter::IntoIterator<Item = Self>,
648 ) -> &'a mut [Self] {
649 if !::std::mem::needs_drop::<Self>() {
650 return arena.dropless.alloc_from_iter(iter);
652 match $crate::which_arena_for_type!($a[&arena.$name]) {
653 ::std::option::Option::<&$crate::TypedArena<Self>>::Some(ty_arena) => {
654 ty_arena.alloc_from_iter(iter)
656 ::std::option::Option::None => unsafe { arena.drop.alloc_from_iter(iter) },
662 impl<'tcx> Arena<'tcx> {
664 pub fn alloc<T: ArenaAllocatable<'tcx, U>, U>(&self, value: T) -> &mut T {
665 value.allocate_on(self)
669 pub fn alloc_slice<T: ::std::marker::Copy>(&self, value: &[T]) -> &mut [T] {
670 if value.is_empty() {
673 self.dropless.alloc_slice(value)
676 pub fn alloc_from_iter<'a, T: ArenaAllocatable<'tcx, U>, U>(
678 iter: impl ::std::iter::IntoIterator<Item = T>,
680 T::allocate_from_iter(self, iter)