1 //! A minimal arena allocator inspired by `rustc_arena::DroplessArena`.
3 //! This is unfortunately a minimal re-implementation rather than a dependency
4 //! as it is difficult to depend on crates from within `proc_macro`, due to it
5 //! being built at the same time as `std`.
7 use std::cell::{Cell, RefCell};
9 use std::mem::MaybeUninit;
15 // The arenas start with PAGE-sized chunks, and then each new chunk is twice as
16 // big as its predecessor, up until we reach HUGE_PAGE-sized chunks, whereupon
17 // we stop growing. This scales well, from arenas that are barely used up to
18 // arenas that are used for 100s of MiBs. Note also that the chosen sizes match
19 // the usual sizes of pages and huge pages on Linux.
20 const PAGE: usize = 4096;
21 const HUGE_PAGE: usize = 2 * 1024 * 1024;
23 /// A minimal arena allocator inspired by `rustc_arena::DroplessArena`.
25 /// This is unfortunately a complete re-implementation rather than a dependency
26 /// as it is difficult to depend on crates from within `proc_macro`, due to it
27 /// being built at the same time as `std`.
29 /// This arena doesn't have support for allocating anything other than byte
30 /// slices, as that is all that is necessary.
31 pub(crate) struct Arena {
32 start: Cell<*mut MaybeUninit<u8>>,
33 end: Cell<*mut MaybeUninit<u8>>,
34 chunks: RefCell<Vec<Box<[MaybeUninit<u8>]>>>,
38 pub(crate) fn new() -> Self {
40 start: Cell::new(ptr::null_mut()),
41 end: Cell::new(ptr::null_mut()),
42 chunks: RefCell::new(Vec::new()),
46 /// Add a new chunk with at least `additional` free bytes.
49 fn grow(&self, additional: usize) {
50 let mut chunks = self.chunks.borrow_mut();
52 if let Some(last_chunk) = chunks.last_mut() {
53 // If the previous chunk's len is less than HUGE_PAGE
54 // bytes, then this chunk will be least double the previous
56 new_cap = last_chunk.len().min(HUGE_PAGE / 2);
61 // Also ensure that this chunk can fit `additional`.
62 new_cap = cmp::max(additional, new_cap);
64 let mut chunk = Box::new_uninit_slice(new_cap);
65 let Range { start, end } = chunk.as_mut_ptr_range();
66 self.start.set(start);
71 /// Allocates a byte slice with specified size from the current memory
72 /// chunk. Returns `None` if there is no free space left to satisfy the
74 fn alloc_raw_without_grow(&self, bytes: usize) -> Option<&mut [MaybeUninit<u8>]> {
75 let start = self.start.get().addr();
76 let old_end = self.end.get();
77 let end = old_end.addr();
79 let new_end = end.checked_sub(bytes)?;
81 let new_end = old_end.with_addr(new_end);
82 self.end.set(new_end);
83 // SAFETY: `bytes` bytes starting at `new_end` were just reserved.
84 Some(unsafe { slice::from_raw_parts_mut(new_end, bytes) })
90 fn alloc_raw(&self, bytes: usize) -> &mut [MaybeUninit<u8>] {
96 if let Some(a) = self.alloc_raw_without_grow(bytes) {
99 // No free space left. Allocate a new chunk to satisfy the request.
100 // On failure the grow will panic or abort.
105 pub(crate) fn alloc_str<'a>(&'a self, string: &str) -> &'a mut str {
106 let alloc = self.alloc_raw(string.len());
107 let bytes = MaybeUninit::write_slice(alloc, string.as_bytes());
109 // SAFETY: we convert from `&str` to `&[u8]`, clone it into the arena,
110 // and immediately convert the clone back to `&str`.
111 unsafe { str::from_utf8_unchecked_mut(bytes) }