1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! The arena, a fast but limited type of allocator.
13 //! Arenas are a type of allocator that destroy the objects within, all at
14 //! once, once the arena itself is destroyed. They do not support deallocation
15 //! of individual objects while the arena itself is still alive. The benefit
16 //! of an arena is very fast allocation; just a pointer bump.
18 //! This crate has two arenas implemented: `TypedArena`, which is a simpler
19 //! arena but can only hold objects of a single type, and `Arena`, which is a
20 //! more complex, slower arena which can hold objects of any type.
22 // Do not remove on snapshot creation. Needed for bootstrap. (Issue #22364)
23 #![cfg_attr(stage0, feature(custom_attribute))]
24 #![crate_name = "arena"]
25 #![unstable(feature = "rustc_private")]
27 #![crate_type = "rlib"]
28 #![crate_type = "dylib"]
29 #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
30 html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
31 html_root_url = "http://doc.rust-lang.org/nightly/")]
34 #![feature(box_syntax)]
35 #![feature(core_intrinsics)]
36 #![feature(ptr_as_ref)]
38 #![feature(staged_api)]
39 #![feature(unboxed_closures)]
40 #![cfg_attr(test, feature(test))]
44 use std::cell::{Cell, RefCell};
51 use std::rt::heap::{allocate, deallocate};
53 // The way arena uses arrays is really deeply awful. The arrays are
54 // allocated, and have capacities reserved, but the fill for the array
55 // will always stay at 0.
56 #[derive(Clone, PartialEq)]
58 data: Rc<RefCell<Vec<u8>>>,
64 fn capacity(&self) -> usize {
65 self.data.borrow().capacity()
68 unsafe fn as_ptr(&self) -> *const u8 {
69 self.data.borrow().as_ptr()
73 /// A slower reflection-based arena that can allocate objects of any type.
75 /// This arena uses `Vec<u8>` as a backing store to allocate objects from. For
76 /// each allocated object, the arena stores a pointer to the type descriptor
77 /// followed by the object (potentially with alignment padding after each
78 /// element). When the arena is destroyed, it iterates through all of its
79 /// chunks, and uses the tydesc information to trace through the objects,
80 /// calling the destructors on them. One subtle point that needs to be
81 /// addressed is how to handle panics while running the user provided
82 /// initializer function. It is important to not run the destructor on
83 /// uninitialized objects, but how to detect them is somewhat subtle. Since
84 /// `alloc()` can be invoked recursively, it is not sufficient to simply exclude
85 /// the most recent object. To solve this without requiring extra space, we
86 /// use the low order bit of the tydesc pointer to encode whether the object
87 /// it describes has been fully initialized.
89 /// As an optimization, objects with destructors are stored in different chunks
90 /// than objects without destructors. This reduces overhead when initializing
91 /// plain-old-data (`Copy` types) and means we don't need to waste time running
92 /// their destructors.
93 pub struct Arena<'longer_than_self> {
94 // The head is separated out from the list as a unbenchmarked
95 // microoptimization, to avoid needing to case on the list to access the
98 copy_head: RefCell<Chunk>,
99 chunks: RefCell<Vec<Chunk>>,
100 _marker: marker::PhantomData<*mut &'longer_than_self()>,
104 /// Allocates a new Arena with 32 bytes preallocated.
105 pub fn new() -> Arena<'a> {
106 Arena::new_with_size(32)
109 /// Allocates a new Arena with `initial_size` bytes preallocated.
110 pub fn new_with_size(initial_size: usize) -> Arena<'a> {
112 head: RefCell::new(chunk(initial_size, false)),
113 copy_head: RefCell::new(chunk(initial_size, true)),
114 chunks: RefCell::new(Vec::new()),
115 _marker: marker::PhantomData,
120 fn chunk(size: usize, is_copy: bool) -> Chunk {
122 data: Rc::new(RefCell::new(Vec::with_capacity(size))),
124 is_copy: Cell::new(is_copy),
128 impl<'longer_than_self> Drop for Arena<'longer_than_self> {
131 destroy_chunk(&*self.head.borrow());
132 for chunk in self.chunks.borrow().iter() {
133 if !chunk.is_copy.get() {
134 destroy_chunk(chunk);
142 fn round_up(base: usize, align: usize) -> usize {
143 (base.checked_add(align - 1)).unwrap() & !(align - 1)
146 // Walk down a chunk, running the destructors for any objects stored
148 unsafe fn destroy_chunk(chunk: &Chunk) {
150 let buf = chunk.as_ptr();
151 let fill = chunk.fill.get();
154 let tydesc_data: *const usize = mem::transmute(buf.offset(idx as isize));
155 let (tydesc, is_done) = un_bitpack_tydesc_ptr(*tydesc_data);
156 let (size, align) = ((*tydesc).size, (*tydesc).align);
158 let after_tydesc = idx + mem::size_of::<*const TyDesc>();
160 let start = round_up(after_tydesc, align);
162 //debug!("freeing object: idx = {}, size = {}, align = {}, done = {}",
163 // start, size, align, is_done);
165 ((*tydesc).drop_glue)(buf.offset(start as isize) as *const i8);
168 // Find where the next tydesc lives
169 idx = round_up(start + size, mem::align_of::<*const TyDesc>());
173 // We encode whether the object a tydesc describes has been
174 // initialized in the arena in the low bit of the tydesc pointer. This
175 // is necessary in order to properly do cleanup if a panic occurs
176 // during an initializer.
178 fn bitpack_tydesc_ptr(p: *const TyDesc, is_done: bool) -> usize {
179 p as usize | (is_done as usize)
182 fn un_bitpack_tydesc_ptr(p: usize) -> (*const TyDesc, bool) {
183 ((p & !1) as *const TyDesc, p & 1 == 1)
186 // HACK(eddyb) TyDesc replacement using a trait object vtable.
187 // This could be replaced in the future with a custom DST layout,
188 // or `&'static (drop_glue, size, align)` created by a `const fn`.
190 drop_glue: fn(*const i8),
195 trait AllTypes { fn dummy(&self) { } }
196 impl<T:?Sized> AllTypes for T { }
198 unsafe fn get_tydesc<T>() -> *const TyDesc {
199 use std::raw::TraitObject;
201 let ptr = &*(1 as *const T);
203 // Can use any trait that is implemented for all types.
204 let obj = mem::transmute::<&AllTypes, TraitObject>(ptr);
205 obj.vtable as *const TyDesc
208 impl<'longer_than_self> Arena<'longer_than_self> {
209 fn chunk_size(&self) -> usize {
210 self.copy_head.borrow().capacity()
213 // Functions for the POD part of the arena
214 fn alloc_copy_grow(&self, n_bytes: usize, align: usize) -> *const u8 {
215 // Allocate a new chunk.
216 let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
217 self.chunks.borrow_mut().push(self.copy_head.borrow().clone());
219 *self.copy_head.borrow_mut() =
220 chunk((new_min_chunk_size + 1).next_power_of_two(), true);
222 return self.alloc_copy_inner(n_bytes, align);
226 fn alloc_copy_inner(&self, n_bytes: usize, align: usize) -> *const u8 {
227 let start = round_up(self.copy_head.borrow().fill.get(), align);
229 let end = start + n_bytes;
230 if end > self.chunk_size() {
231 return self.alloc_copy_grow(n_bytes, align);
234 let copy_head = self.copy_head.borrow();
235 copy_head.fill.set(end);
238 copy_head.as_ptr().offset(start as isize)
243 fn alloc_copy<T, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
245 let ptr = self.alloc_copy_inner(mem::size_of::<T>(),
246 mem::min_align_of::<T>());
247 let ptr = ptr as *mut T;
248 ptr::write(&mut (*ptr), op());
253 // Functions for the non-POD part of the arena
254 fn alloc_noncopy_grow(&self, n_bytes: usize,
255 align: usize) -> (*const u8, *const u8) {
256 // Allocate a new chunk.
257 let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
258 self.chunks.borrow_mut().push(self.head.borrow().clone());
260 *self.head.borrow_mut() =
261 chunk((new_min_chunk_size + 1).next_power_of_two(), false);
263 return self.alloc_noncopy_inner(n_bytes, align);
267 fn alloc_noncopy_inner(&self, n_bytes: usize,
268 align: usize) -> (*const u8, *const u8) {
269 // Be careful to not maintain any `head` borrows active, because
270 // `alloc_noncopy_grow` borrows it mutably.
271 let (start, end, tydesc_start, head_capacity) = {
272 let head = self.head.borrow();
273 let fill = head.fill.get();
275 let tydesc_start = fill;
276 let after_tydesc = fill + mem::size_of::<*const TyDesc>();
277 let start = round_up(after_tydesc, align);
278 let end = start + n_bytes;
280 (start, end, tydesc_start, head.capacity())
283 if end > head_capacity {
284 return self.alloc_noncopy_grow(n_bytes, align);
287 let head = self.head.borrow();
288 head.fill.set(round_up(end, mem::align_of::<*const TyDesc>()));
291 let buf = head.as_ptr();
292 return (buf.offset(tydesc_start as isize), buf.offset(start as isize));
297 fn alloc_noncopy<T, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
299 let tydesc = get_tydesc::<T>();
301 self.alloc_noncopy_inner(mem::size_of::<T>(),
302 mem::min_align_of::<T>());
303 let ty_ptr = ty_ptr as *mut usize;
304 let ptr = ptr as *mut T;
305 // Write in our tydesc along with a bit indicating that it
306 // has *not* been initialized yet.
307 *ty_ptr = mem::transmute(tydesc);
308 // Actually initialize it
309 ptr::write(&mut(*ptr), op());
310 // Now that we are done, update the tydesc to indicate that
311 // the object is there.
312 *ty_ptr = bitpack_tydesc_ptr(tydesc, true);
318 /// Allocates a new item in the arena, using `op` to initialize the value,
319 /// and returns a reference to it.
321 pub fn alloc<T:'longer_than_self, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
323 if intrinsics::needs_drop::<T>() {
324 self.alloc_noncopy(op)
333 fn test_arena_destructors() {
334 let arena = Arena::new();
336 // Arena allocate something with drop glue to make sure it
338 arena.alloc(|| Rc::new(i));
339 // Allocate something with funny size and alignment, to keep
340 // things interesting.
341 arena.alloc(|| [0u8, 1u8, 2u8]);
347 fn test_arena_destructors_fail() {
348 let arena = Arena::new();
349 // Put some stuff in the arena.
351 // Arena allocate something with drop glue to make sure it
353 arena.alloc(|| { Rc::new(i) });
354 // Allocate something with funny size and alignment, to keep
355 // things interesting.
356 arena.alloc(|| { [0u8, 1, 2] });
358 // Now, panic while allocating
359 arena.alloc::<Rc<i32>, _>(|| {
364 /// A faster arena that can hold objects of only one type.
365 pub struct TypedArena<T> {
366 /// A pointer to the next object to be allocated.
369 /// A pointer to the end of the allocated area. When this pointer is
370 /// reached, a new chunk is allocated.
373 /// A pointer to the first arena segment.
374 first: RefCell<*mut TypedArenaChunk<T>>,
376 /// Marker indicating that dropping the arena causes its owned
377 /// instances of `T` to be dropped.
378 _own: marker::PhantomData<T>,
381 struct TypedArenaChunk<T> {
382 marker: marker::PhantomData<T>,
384 /// Pointer to the next arena segment.
385 next: *mut TypedArenaChunk<T>,
387 /// The number of elements that this chunk can hold.
390 // Objects follow here, suitably aligned.
393 fn calculate_size<T>(capacity: usize) -> usize {
394 let mut size = mem::size_of::<TypedArenaChunk<T>>();
395 size = round_up(size, mem::min_align_of::<T>());
396 let elem_size = mem::size_of::<T>();
397 let elems_size = elem_size.checked_mul(capacity).unwrap();
398 size = size.checked_add(elems_size).unwrap();
402 impl<T> TypedArenaChunk<T> {
404 unsafe fn new(next: *mut TypedArenaChunk<T>, capacity: usize)
405 -> *mut TypedArenaChunk<T> {
406 let size = calculate_size::<T>(capacity);
407 let chunk = allocate(size, mem::min_align_of::<TypedArenaChunk<T>>())
408 as *mut TypedArenaChunk<T>;
409 if chunk.is_null() { alloc::oom() }
410 (*chunk).next = next;
411 (*chunk).capacity = capacity;
415 /// Destroys this arena chunk. If the type descriptor is supplied, the
416 /// drop glue is called; otherwise, drop glue is not called.
418 unsafe fn destroy(&mut self, len: usize) {
419 // Destroy all the allocated objects.
420 if intrinsics::needs_drop::<T>() {
421 let mut start = self.start();
423 ptr::read(start as *const T); // run the destructor on the pointer
424 start = start.offset(mem::size_of::<T>() as isize)
428 // Destroy the next chunk.
429 let next = self.next;
430 let size = calculate_size::<T>(self.capacity);
431 let self_ptr: *mut TypedArenaChunk<T> = self;
432 deallocate(self_ptr as *mut u8, size,
433 mem::min_align_of::<TypedArenaChunk<T>>());
435 let capacity = (*next).capacity;
436 (*next).destroy(capacity);
440 // Returns a pointer to the first allocated object.
442 fn start(&self) -> *const u8 {
443 let this: *const TypedArenaChunk<T> = self;
445 mem::transmute(round_up(this.offset(1) as usize,
446 mem::min_align_of::<T>()))
450 // Returns a pointer to the end of the allocated space.
452 fn end(&self) -> *const u8 {
454 let size = mem::size_of::<T>().checked_mul(self.capacity).unwrap();
455 self.start().offset(size as isize)
460 impl<T> TypedArena<T> {
461 /// Creates a new `TypedArena` with preallocated space for eight objects.
463 pub fn new() -> TypedArena<T> {
464 TypedArena::with_capacity(8)
467 /// Creates a new `TypedArena` with preallocated space for the given number of
470 pub fn with_capacity(capacity: usize) -> TypedArena<T> {
472 let chunk = TypedArenaChunk::<T>::new(ptr::null_mut(), capacity);
474 ptr: Cell::new((*chunk).start() as *const T),
475 end: Cell::new((*chunk).end() as *const T),
476 first: RefCell::new(chunk),
477 _own: marker::PhantomData,
482 /// Allocates an object in the `TypedArena`, returning a reference to it.
484 pub fn alloc(&self, object: T) -> &mut T {
485 if self.ptr == self.end {
489 let ptr: &mut T = unsafe {
490 let ptr: &mut T = mem::transmute(self.ptr.clone());
491 ptr::write(ptr, object);
492 self.ptr.set(self.ptr.get().offset(1));
503 let chunk = *self.first.borrow_mut();
504 let new_capacity = (*chunk).capacity.checked_mul(2).unwrap();
505 let chunk = TypedArenaChunk::<T>::new(chunk, new_capacity);
506 self.ptr.set((*chunk).start() as *const T);
507 self.end.set((*chunk).end() as *const T);
508 *self.first.borrow_mut() = chunk
513 impl<T> Drop for TypedArena<T> {
516 // Determine how much was filled.
517 let start = self.first.borrow().as_ref().unwrap().start() as usize;
518 let end = self.ptr.get() as usize;
519 let diff = (end - start) / mem::size_of::<T>();
521 // Pass that to the `destroy` method.
522 (**self.first.borrow_mut()).destroy(diff)
530 use self::test::Bencher;
531 use super::{Arena, TypedArena};
541 fn test_arena_alloc_nested() {
542 struct Inner { value: u8 }
543 struct Outer<'a> { inner: &'a Inner }
544 enum EI<'e> { I(Inner), O(Outer<'e>) }
546 struct Wrap<'a>(TypedArena<EI<'a>>);
549 fn alloc_inner<F:Fn() -> Inner>(&self, f: F) -> &Inner {
550 let r: &EI = self.0.alloc(EI::I(f()));
551 if let &EI::I(ref i) = r {
557 fn alloc_outer<F:Fn() -> Outer<'a>>(&self, f: F) -> &Outer {
558 let r: &EI = self.0.alloc(EI::O(f()));
559 if let &EI::O(ref o) = r {
567 let arena = Wrap(TypedArena::new());
569 let result = arena.alloc_outer(|| Outer {
570 inner: arena.alloc_inner(|| Inner { value: 10 }) });
572 assert_eq!(result.inner.value, 10);
577 let arena = TypedArena::new();
588 pub fn bench_copy(b: &mut Bencher) {
589 let arena = TypedArena::new();
600 pub fn bench_copy_nonarena(b: &mut Bencher) {
602 let _: Box<_> = box Point {
611 pub fn bench_copy_old_arena(b: &mut Bencher) {
612 let arena = Arena::new();
631 pub fn test_noncopy() {
632 let arena = TypedArena::new();
634 arena.alloc(Noncopy {
635 string: "hello world".to_string(),
636 array: vec!( 1, 2, 3, 4, 5 ),
642 pub fn bench_noncopy(b: &mut Bencher) {
643 let arena = TypedArena::new();
645 arena.alloc(Noncopy {
646 string: "hello world".to_string(),
647 array: vec!( 1, 2, 3, 4, 5 ),
653 pub fn bench_noncopy_nonarena(b: &mut Bencher) {
655 let _: Box<_> = box Noncopy {
656 string: "hello world".to_string(),
657 array: vec!( 1, 2, 3, 4, 5 ),
663 pub fn bench_noncopy_old_arena(b: &mut Bencher) {
664 let arena = Arena::new();
666 arena.alloc(|| Noncopy {
667 string: "hello world".to_string(),
668 array: vec!( 1, 2, 3, 4, 5 ),