1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! The arena, a fast but limited type of allocator.
13 //! Arenas are a type of allocator that destroy the objects within, all at
14 //! once, once the arena itself is destroyed. They do not support deallocation
15 //! of individual objects while the arena itself is still alive. The benefit
16 //! of an arena is very fast allocation; just a pointer bump.
18 //! This crate has two arenas implemented: `TypedArena`, which is a simpler
19 //! arena but can only hold objects of a single type, and `Arena`, which is a
20 //! more complex, slower arena which can hold objects of any type.
22 #![crate_name = "arena"]
23 #![unstable(feature = "rustc_private", issue = "27812")]
24 #![crate_type = "rlib"]
25 #![crate_type = "dylib"]
26 #![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
27 html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
28 html_root_url = "https://doc.rust-lang.org/nightly/",
29 test(no_crate_inject, attr(deny(warnings))))]
32 #![feature(core_intrinsics)]
33 #![feature(drop_in_place)]
37 #![feature(staged_api)]
38 #![feature(dropck_parametricity)]
39 #![cfg_attr(test, feature(test))]
43 use std::cell::{Cell, RefCell};
46 use std::marker::{PhantomData, Send};
51 use alloc::raw_vec::RawVec;
53 // The way arena uses arrays is really deeply awful. The arrays are
54 // allocated, and have capacities reserved, but the fill for the array
55 // will always stay at 0.
63 fn new(size: usize, is_copy: bool) -> Chunk {
65 data: RawVec::with_capacity(size),
67 is_copy: Cell::new(is_copy),
71 fn capacity(&self) -> usize {
75 unsafe fn as_ptr(&self) -> *const u8 {
80 /// A slower reflection-based arena that can allocate objects of any type.
82 /// This arena uses `Vec<u8>` as a backing store to allocate objects from. For
83 /// each allocated object, the arena stores a pointer to the type descriptor
84 /// followed by the object (potentially with alignment padding after each
85 /// element). When the arena is destroyed, it iterates through all of its
86 /// chunks, and uses the tydesc information to trace through the objects,
87 /// calling the destructors on them. One subtle point that needs to be
88 /// addressed is how to handle panics while running the user provided
89 /// initializer function. It is important to not run the destructor on
90 /// uninitialized objects, but how to detect them is somewhat subtle. Since
91 /// `alloc()` can be invoked recursively, it is not sufficient to simply exclude
92 /// the most recent object. To solve this without requiring extra space, we
93 /// use the low order bit of the tydesc pointer to encode whether the object
94 /// it describes has been fully initialized.
96 /// As an optimization, objects with destructors are stored in different chunks
97 /// than objects without destructors. This reduces overhead when initializing
98 /// plain-old-data (`Copy` types) and means we don't need to waste time running
99 /// their destructors.
100 pub struct Arena<'longer_than_self> {
101 // The head is separated out from the list as a unbenchmarked
102 // microoptimization, to avoid needing to case on the list to access the
104 head: RefCell<Chunk>,
105 copy_head: RefCell<Chunk>,
106 chunks: RefCell<Vec<Chunk>>,
107 _marker: PhantomData<*mut &'longer_than_self ()>,
111 /// Allocates a new Arena with 32 bytes preallocated.
112 pub fn new() -> Arena<'a> {
113 Arena::new_with_size(32)
116 /// Allocates a new Arena with `initial_size` bytes preallocated.
117 pub fn new_with_size(initial_size: usize) -> Arena<'a> {
119 head: RefCell::new(Chunk::new(initial_size, false)),
120 copy_head: RefCell::new(Chunk::new(initial_size, true)),
121 chunks: RefCell::new(Vec::new()),
122 _marker: PhantomData,
127 impl<'longer_than_self> Drop for Arena<'longer_than_self> {
130 destroy_chunk(&*self.head.borrow());
131 for chunk in self.chunks.borrow().iter() {
132 if !chunk.is_copy.get() {
133 destroy_chunk(chunk);
141 fn round_up(base: usize, align: usize) -> usize {
142 (base.checked_add(align - 1)).unwrap() & !(align - 1)
145 // Walk down a chunk, running the destructors for any objects stored
147 unsafe fn destroy_chunk(chunk: &Chunk) {
149 let buf = chunk.as_ptr();
150 let fill = chunk.fill.get();
153 let tydesc_data = buf.offset(idx as isize) as *const usize;
154 let (tydesc, is_done) = un_bitpack_tydesc_ptr(*tydesc_data);
155 let (size, align) = ((*tydesc).size, (*tydesc).align);
157 let after_tydesc = idx + mem::size_of::<*const TyDesc>();
159 let start = round_up(after_tydesc, align);
162 ((*tydesc).drop_glue)(buf.offset(start as isize) as *const i8);
165 // Find where the next tydesc lives
166 idx = round_up(start + size, mem::align_of::<*const TyDesc>());
170 // We encode whether the object a tydesc describes has been
171 // initialized in the arena in the low bit of the tydesc pointer. This
172 // is necessary in order to properly do cleanup if a panic occurs
173 // during an initializer.
175 fn bitpack_tydesc_ptr(p: *const TyDesc, is_done: bool) -> usize {
176 p as usize | (is_done as usize)
179 fn un_bitpack_tydesc_ptr(p: usize) -> (*const TyDesc, bool) {
180 ((p & !1) as *const TyDesc, p & 1 == 1)
183 // HACK(eddyb) TyDesc replacement using a trait object vtable.
184 // This could be replaced in the future with a custom DST layout,
185 // or `&'static (drop_glue, size, align)` created by a `const fn`.
187 drop_glue: fn(*const i8),
196 impl<T: ?Sized> AllTypes for T {}
198 unsafe fn get_tydesc<T>() -> *const TyDesc {
199 use std::raw::TraitObject;
201 let ptr = &*(1 as *const T);
203 // Can use any trait that is implemented for all types.
204 let obj = mem::transmute::<&AllTypes, TraitObject>(ptr);
205 obj.vtable as *const TyDesc
208 impl<'longer_than_self> Arena<'longer_than_self> {
210 fn chunk_size(&self) -> usize {
211 self.copy_head.borrow().capacity()
214 // Functions for the POD part of the arena
216 fn alloc_copy_grow(&self, n_bytes: usize, align: usize) -> *const u8 {
217 // Allocate a new chunk.
218 let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
219 let new_chunk = Chunk::new((new_min_chunk_size + 1).next_power_of_two(), true);
220 let mut copy_head = self.copy_head.borrow_mut();
221 let old_chunk = mem::replace(&mut *copy_head, new_chunk);
222 self.chunks.borrow_mut().push(old_chunk);
224 self.alloc_copy_inner(n_bytes, align)
228 fn alloc_copy_inner(&self, n_bytes: usize, align: usize) -> *const u8 {
229 let start = round_up(self.copy_head.borrow().fill.get(), align);
230 let chunk_size = self.chunk_size();
232 let end = start + n_bytes;
233 if end > chunk_size {
234 if !self.copy_head.borrow_mut().data.reserve_in_place(start, n_bytes) {
235 return self.alloc_copy_grow(n_bytes, align);
239 let copy_head = self.copy_head.borrow();
240 copy_head.fill.set(end);
242 unsafe { copy_head.as_ptr().offset(start as isize) }
246 fn alloc_copy<T, F>(&self, op: F) -> &mut T
247 where F: FnOnce() -> T
250 let ptr = self.alloc_copy_inner(mem::size_of::<T>(), mem::align_of::<T>());
251 let ptr = ptr as *mut T;
252 ptr::write(&mut (*ptr), op());
257 // Functions for the non-POD part of the arena
258 fn alloc_noncopy_grow(&self, n_bytes: usize, align: usize) -> (*const u8, *const u8) {
259 // Allocate a new chunk.
260 let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
261 let new_chunk = Chunk::new((new_min_chunk_size + 1).next_power_of_two(), false);
262 let mut head = self.head.borrow_mut();
263 let old_chunk = mem::replace(&mut *head, new_chunk);
264 self.chunks.borrow_mut().push(old_chunk);
266 self.alloc_noncopy_inner(n_bytes, align)
270 fn alloc_noncopy_inner(&self, n_bytes: usize, align: usize) -> (*const u8, *const u8) {
271 // Be careful to not maintain any `head` borrows active, because
272 // `alloc_noncopy_grow` borrows it mutably.
273 let (start, end, tydesc_start, head_capacity) = {
274 let head = self.head.borrow();
275 let fill = head.fill.get();
277 let tydesc_start = fill;
278 let after_tydesc = fill + mem::size_of::<*const TyDesc>();
279 let start = round_up(after_tydesc, align);
280 let end = start + n_bytes;
282 (start, end, tydesc_start, head.capacity())
285 if end > head_capacity {
286 return self.alloc_noncopy_grow(n_bytes, align);
289 let head = self.head.borrow();
290 head.fill.set(round_up(end, mem::align_of::<*const TyDesc>()));
293 let buf = head.as_ptr();
294 (buf.offset(tydesc_start as isize),
295 buf.offset(start as isize))
300 fn alloc_noncopy<T, F>(&self, op: F) -> &mut T
301 where F: FnOnce() -> T
304 let tydesc = get_tydesc::<T>();
305 let (ty_ptr, ptr) = self.alloc_noncopy_inner(mem::size_of::<T>(), mem::align_of::<T>());
306 let ty_ptr = ty_ptr as *mut usize;
307 let ptr = ptr as *mut T;
308 // Write in our tydesc along with a bit indicating that it
309 // has *not* been initialized yet.
310 *ty_ptr = bitpack_tydesc_ptr(tydesc, false);
311 // Actually initialize it
312 ptr::write(&mut (*ptr), op());
313 // Now that we are done, update the tydesc to indicate that
314 // the object is there.
315 *ty_ptr = bitpack_tydesc_ptr(tydesc, true);
321 /// Allocates a new item in the arena, using `op` to initialize the value,
322 /// and returns a reference to it.
324 pub fn alloc<T: 'longer_than_self, F>(&self, op: F) -> &mut T
325 where F: FnOnce() -> T
328 if intrinsics::needs_drop::<T>() {
329 self.alloc_noncopy(op)
338 fn test_arena_destructors() {
339 let arena = Arena::new();
341 // Arena allocate something with drop glue to make sure it
343 arena.alloc(|| Rc::new(i));
344 // Allocate something with funny size and alignment, to keep
345 // things interesting.
346 arena.alloc(|| [0u8, 1u8, 2u8]);
352 fn test_arena_destructors_fail() {
353 let arena = Arena::new();
354 // Put some stuff in the arena.
356 // Arena allocate something with drop glue to make sure it
358 arena.alloc(|| Rc::new(i));
359 // Allocate something with funny size and alignment, to keep
360 // things interesting.
361 arena.alloc(|| [0u8, 1, 2]);
363 // Now, panic while allocating
364 arena.alloc::<Rc<i32>, _>(|| {
369 /// A faster arena that can hold objects of only one type.
370 pub struct TypedArena<T> {
371 /// A pointer to the next object to be allocated.
374 /// A pointer to the end of the allocated area. When this pointer is
375 /// reached, a new chunk is allocated.
378 /// A vector arena segments.
379 chunks: RefCell<Vec<TypedArenaChunk<T>>>,
381 /// Marker indicating that dropping the arena causes its owned
382 /// instances of `T` to be dropped.
383 _own: PhantomData<T>,
386 struct TypedArenaChunk<T> {
387 /// Pointer to the next arena segment.
391 impl<T> TypedArenaChunk<T> {
393 unsafe fn new(capacity: usize) -> TypedArenaChunk<T> {
394 TypedArenaChunk { storage: RawVec::with_capacity(capacity) }
397 /// Destroys this arena chunk.
399 unsafe fn destroy(&mut self, len: usize) {
400 // The branch on needs_drop() is an -O1 performance optimization.
401 // Without the branch, dropping TypedArena<u8> takes linear time.
402 if intrinsics::needs_drop::<T>() {
403 let mut start = self.start();
404 // Destroy all allocated objects.
406 ptr::drop_in_place(start);
407 start = start.offset(1);
412 // Returns a pointer to the first allocated object.
414 fn start(&self) -> *mut T {
418 // Returns a pointer to the end of the allocated space.
420 fn end(&self) -> *mut T {
422 if mem::size_of::<T>() == 0 {
423 // A pointer as large as possible for zero-sized elements.
426 self.start().offset(self.storage.cap() as isize)
432 const PAGE: usize = 4096;
434 impl<T> TypedArena<T> {
435 /// Creates a new `TypedArena` with preallocated space for many objects.
437 pub fn new() -> TypedArena<T> {
438 // Reserve at least one page.
439 let elem_size = cmp::max(1, mem::size_of::<T>());
440 TypedArena::with_capacity(PAGE / elem_size)
443 /// Creates a new `TypedArena` with preallocated space for the given number of
446 pub fn with_capacity(capacity: usize) -> TypedArena<T> {
448 let chunk = TypedArenaChunk::<T>::new(cmp::max(1, capacity));
450 ptr: Cell::new(chunk.start()),
451 end: Cell::new(chunk.end()),
452 chunks: RefCell::new(vec![chunk]),
458 /// Allocates an object in the `TypedArena`, returning a reference to it.
460 pub fn alloc(&self, object: T) -> &mut T {
461 if self.ptr == self.end {
466 if mem::size_of::<T>() == 0 {
467 self.ptr.set(intrinsics::arith_offset(self.ptr.get() as *mut u8, 1) as *mut T);
468 let ptr = heap::EMPTY as *mut T;
469 // Don't drop the object. This `write` is equivalent to `forget`.
470 ptr::write(ptr, object);
473 let ptr = self.ptr.get();
474 // Advance the pointer.
475 self.ptr.set(self.ptr.get().offset(1));
476 // Write into uninitialized memory.
477 ptr::write(ptr, object);
488 let mut chunks = self.chunks.borrow_mut();
489 let prev_capacity = chunks.last().unwrap().storage.cap();
490 let new_capacity = prev_capacity.checked_mul(2).unwrap();
491 if chunks.last_mut().unwrap().storage.double_in_place() {
492 self.end.set(chunks.last().unwrap().end());
494 let chunk = TypedArenaChunk::<T>::new(new_capacity);
495 self.ptr.set(chunk.start());
496 self.end.set(chunk.end());
503 impl<T> Drop for TypedArena<T> {
504 #[unsafe_destructor_blind_to_params]
507 // Determine how much was filled.
508 let mut chunks_borrow = self.chunks.borrow_mut();
509 let mut last_chunk = chunks_borrow.pop().unwrap();
510 let start = last_chunk.start() as usize;
511 let end = self.ptr.get() as usize;
512 let diff = if mem::size_of::<T>() == 0 {
513 // Avoid division by zero.
516 (end - start) / mem::size_of::<T>()
519 // Pass that to the `destroy` method.
520 last_chunk.destroy(diff);
521 // Destroy this chunk.
522 let _: RawVec<T> = mem::transmute(last_chunk);
524 for chunk in chunks_borrow.iter_mut() {
525 let cap = chunk.storage.cap();
532 unsafe impl<T: Send> Send for TypedArena<T> {}
537 use self::test::Bencher;
538 use super::{Arena, TypedArena};
548 fn test_arena_alloc_nested() {
560 struct Wrap<'a>(TypedArena<EI<'a>>);
563 fn alloc_inner<F: Fn() -> Inner>(&self, f: F) -> &Inner {
564 let r: &EI = self.0.alloc(EI::I(f()));
565 if let &EI::I(ref i) = r {
571 fn alloc_outer<F: Fn() -> Outer<'a>>(&self, f: F) -> &Outer {
572 let r: &EI = self.0.alloc(EI::O(f()));
573 if let &EI::O(ref o) = r {
581 let arena = Wrap(TypedArena::new());
583 let result = arena.alloc_outer(|| {
584 Outer { inner: arena.alloc_inner(|| Inner { value: 10 }) }
587 assert_eq!(result.inner.value, 10);
592 let arena = TypedArena::new();
594 arena.alloc(Point { x: 1, y: 2, z: 3 });
599 pub fn bench_copy(b: &mut Bencher) {
600 let arena = TypedArena::new();
601 b.iter(|| arena.alloc(Point { x: 1, y: 2, z: 3 }))
605 pub fn bench_copy_nonarena(b: &mut Bencher) {
607 let _: Box<_> = Box::new(Point {
616 pub fn bench_copy_old_arena(b: &mut Bencher) {
617 let arena = Arena::new();
618 b.iter(|| arena.alloc(|| Point { x: 1, y: 2, z: 3 }))
628 pub fn test_noncopy() {
629 let arena = TypedArena::new();
631 arena.alloc(Noncopy {
632 string: "hello world".to_string(),
633 array: vec![1, 2, 3, 4, 5],
639 pub fn bench_noncopy(b: &mut Bencher) {
640 let arena = TypedArena::new();
642 arena.alloc(Noncopy {
643 string: "hello world".to_string(),
644 array: vec![1, 2, 3, 4, 5],
650 pub fn bench_noncopy_nonarena(b: &mut Bencher) {
652 let _: Box<_> = Box::new(Noncopy {
653 string: "hello world".to_string(),
654 array: vec![1, 2, 3, 4, 5],
660 pub fn bench_noncopy_old_arena(b: &mut Bencher) {
661 let arena = Arena::new();
665 string: "hello world".to_string(),
666 array: vec![1, 2, 3, 4, 5],
673 pub fn test_zero_sized() {
674 let arena = TypedArena::new();