1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! The arena, a fast but limited type of allocator.
13 //! Arenas are a type of allocator that destroy the objects within, all at
14 //! once, once the arena itself is destroyed. They do not support deallocation
15 //! of individual objects while the arena itself is still alive. The benefit
16 //! of an arena is very fast allocation; just a pointer bump.
18 //! This crate has two arenas implemented: `TypedArena`, which is a simpler
19 //! arena but can only hold objects of a single type, and `Arena`, which is a
20 //! more complex, slower arena which can hold objects of any type.
22 #![crate_name = "arena"]
24 #![crate_type = "rlib"]
25 #![crate_type = "dylib"]
26 #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
27 html_favicon_url = "http://www.rust-lang.org/favicon.ico",
28 html_root_url = "http://doc.rust-lang.org/nightly/")]
30 #![feature(unsafe_destructor)]
31 #![feature(unboxed_closures)]
32 #![allow(missing_docs)]
36 use std::cell::{Cell, RefCell};
38 use std::intrinsics::{TyDesc, get_tydesc};
41 use std::num::{Int, UnsignedInt};
44 use std::rt::heap::{allocate, deallocate};
46 // The way arena uses arrays is really deeply awful. The arrays are
47 // allocated, and have capacities reserved, but the fill for the array
48 // will always stay at 0.
49 #[derive(Clone, PartialEq)]
51 data: Rc<RefCell<Vec<u8>>>,
57 fn capacity(&self) -> uint {
58 self.data.borrow().capacity()
61 unsafe fn as_ptr(&self) -> *const u8 {
62 self.data.borrow().as_ptr()
66 /// A slower reflection-based arena that can allocate objects of any type.
68 /// This arena uses `Vec<u8>` as a backing store to allocate objects from. For
69 /// each allocated object, the arena stores a pointer to the type descriptor
70 /// followed by the object (potentially with alignment padding after each
71 /// element). When the arena is destroyed, it iterates through all of its
72 /// chunks, and uses the tydesc information to trace through the objects,
73 /// calling the destructors on them. One subtle point that needs to be
74 /// addressed is how to handle panics while running the user provided
75 /// initializer function. It is important to not run the destructor on
76 /// uninitialized objects, but how to detect them is somewhat subtle. Since
77 /// `alloc()` can be invoked recursively, it is not sufficient to simply exclude
78 /// the most recent object. To solve this without requiring extra space, we
79 /// use the low order bit of the tydesc pointer to encode whether the object
80 /// it describes has been fully initialized.
82 /// As an optimization, objects with destructors are stored in different chunks
83 /// than objects without destructors. This reduces overhead when initializing
84 /// plain-old-data (`Copy` types) and means we don't need to waste time running
85 /// their destructors.
87 // The head is separated out from the list as a unbenchmarked
88 // microoptimization, to avoid needing to case on the list to access the
91 copy_head: RefCell<Chunk>,
92 chunks: RefCell<Vec<Chunk>>,
96 /// Allocates a new Arena with 32 bytes preallocated.
97 pub fn new() -> Arena {
98 Arena::new_with_size(32u)
101 /// Allocates a new Arena with `initial_size` bytes preallocated.
102 pub fn new_with_size(initial_size: uint) -> Arena {
104 head: RefCell::new(chunk(initial_size, false)),
105 copy_head: RefCell::new(chunk(initial_size, true)),
106 chunks: RefCell::new(Vec::new()),
111 fn chunk(size: uint, is_copy: bool) -> Chunk {
113 data: Rc::new(RefCell::new(Vec::with_capacity(size))),
115 is_copy: Cell::new(is_copy),
120 impl Drop for Arena {
123 destroy_chunk(&*self.head.borrow());
124 for chunk in self.chunks.borrow().iter() {
125 if !chunk.is_copy.get() {
126 destroy_chunk(chunk);
134 fn round_up(base: uint, align: uint) -> uint {
135 (base.checked_add(align - 1)).unwrap() & !(align - 1)
138 // Walk down a chunk, running the destructors for any objects stored
140 unsafe fn destroy_chunk(chunk: &Chunk) {
142 let buf = chunk.as_ptr();
143 let fill = chunk.fill.get();
146 let tydesc_data: *const uint = mem::transmute(buf.offset(idx as int));
147 let (tydesc, is_done) = un_bitpack_tydesc_ptr(*tydesc_data);
148 let (size, align) = ((*tydesc).size, (*tydesc).align);
150 let after_tydesc = idx + mem::size_of::<*const TyDesc>();
152 let start = round_up(after_tydesc, align);
154 //debug!("freeing object: idx = {}, size = {}, align = {}, done = {}",
155 // start, size, align, is_done);
157 ((*tydesc).drop_glue)(buf.offset(start as int) as *const i8);
160 // Find where the next tydesc lives
161 idx = round_up(start + size, mem::align_of::<*const TyDesc>());
165 // We encode whether the object a tydesc describes has been
166 // initialized in the arena in the low bit of the tydesc pointer. This
167 // is necessary in order to properly do cleanup if a panic occurs
168 // during an initializer.
170 fn bitpack_tydesc_ptr(p: *const TyDesc, is_done: bool) -> uint {
171 p as uint | (is_done as uint)
174 fn un_bitpack_tydesc_ptr(p: uint) -> (*const TyDesc, bool) {
175 ((p & !1) as *const TyDesc, p & 1 == 1)
179 fn chunk_size(&self) -> uint {
180 self.copy_head.borrow().capacity()
183 // Functions for the POD part of the arena
184 fn alloc_copy_grow(&self, n_bytes: uint, align: uint) -> *const u8 {
185 // Allocate a new chunk.
186 let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
187 self.chunks.borrow_mut().push(self.copy_head.borrow().clone());
189 *self.copy_head.borrow_mut() =
190 chunk((new_min_chunk_size + 1u).next_power_of_two(), true);
192 return self.alloc_copy_inner(n_bytes, align);
196 fn alloc_copy_inner(&self, n_bytes: uint, align: uint) -> *const u8 {
197 let start = round_up(self.copy_head.borrow().fill.get(), align);
199 let end = start + n_bytes;
200 if end > self.chunk_size() {
201 return self.alloc_copy_grow(n_bytes, align);
204 let copy_head = self.copy_head.borrow();
205 copy_head.fill.set(end);
208 copy_head.as_ptr().offset(start as int)
213 fn alloc_copy<T, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
215 let ptr = self.alloc_copy_inner(mem::size_of::<T>(),
216 mem::min_align_of::<T>());
217 let ptr = ptr as *mut T;
218 ptr::write(&mut (*ptr), op());
223 // Functions for the non-POD part of the arena
224 fn alloc_noncopy_grow(&self, n_bytes: uint,
225 align: uint) -> (*const u8, *const u8) {
226 // Allocate a new chunk.
227 let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
228 self.chunks.borrow_mut().push(self.head.borrow().clone());
230 *self.head.borrow_mut() =
231 chunk((new_min_chunk_size + 1u).next_power_of_two(), false);
233 return self.alloc_noncopy_inner(n_bytes, align);
237 fn alloc_noncopy_inner(&self, n_bytes: uint,
238 align: uint) -> (*const u8, *const u8) {
239 // Be careful to not maintain any `head` borrows active, because
240 // `alloc_noncopy_grow` borrows it mutably.
241 let (start, end, tydesc_start, head_capacity) = {
242 let head = self.head.borrow();
243 let fill = head.fill.get();
245 let tydesc_start = fill;
246 let after_tydesc = fill + mem::size_of::<*const TyDesc>();
247 let start = round_up(after_tydesc, align);
248 let end = start + n_bytes;
250 (start, end, tydesc_start, head.capacity())
253 if end > head_capacity {
254 return self.alloc_noncopy_grow(n_bytes, align);
257 let head = self.head.borrow();
258 head.fill.set(round_up(end, mem::align_of::<*const TyDesc>()));
261 let buf = head.as_ptr();
262 return (buf.offset(tydesc_start as int), buf.offset(start as int));
267 fn alloc_noncopy<T, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
269 let tydesc = get_tydesc::<T>();
271 self.alloc_noncopy_inner(mem::size_of::<T>(),
272 mem::min_align_of::<T>());
273 let ty_ptr = ty_ptr as *mut uint;
274 let ptr = ptr as *mut T;
275 // Write in our tydesc along with a bit indicating that it
276 // has *not* been initialized yet.
277 *ty_ptr = mem::transmute(tydesc);
278 // Actually initialize it
279 ptr::write(&mut(*ptr), op());
280 // Now that we are done, update the tydesc to indicate that
281 // the object is there.
282 *ty_ptr = bitpack_tydesc_ptr(tydesc, true);
288 /// Allocates a new item in the arena, using `op` to initialize the value,
289 /// and returns a reference to it.
291 pub fn alloc<T, F>(&self, op: F) -> &mut T where F: FnOnce() -> T {
293 if intrinsics::needs_drop::<T>() {
294 self.alloc_noncopy(op)
303 fn test_arena_destructors() {
304 let arena = Arena::new();
305 for i in range(0u, 10) {
306 // Arena allocate something with drop glue to make sure it
308 arena.alloc(|| Rc::new(i));
309 // Allocate something with funny size and alignment, to keep
310 // things interesting.
311 arena.alloc(|| [0u8, 1u8, 2u8]);
316 fn test_arena_alloc_nested() {
317 struct Inner { value: uint }
318 struct Outer<'a> { inner: &'a Inner }
320 let arena = Arena::new();
322 let result = arena.alloc(|| Outer {
323 inner: arena.alloc(|| Inner { value: 10 })
326 assert_eq!(result.inner.value, 10);
331 fn test_arena_destructors_fail() {
332 let arena = Arena::new();
333 // Put some stuff in the arena.
334 for i in range(0u, 10) {
335 // Arena allocate something with drop glue to make sure it
337 arena.alloc(|| { Rc::new(i) });
338 // Allocate something with funny size and alignment, to keep
339 // things interesting.
340 arena.alloc(|| { [0u8, 1u8, 2u8] });
342 // Now, panic while allocating
343 arena.alloc::<Rc<int>, _>(|| {
348 /// A faster arena that can hold objects of only one type.
350 /// Safety note: Modifying objects in the arena that have already had their
351 /// `drop` destructors run can cause leaks, because the destructor will not
352 /// run again for these objects.
353 pub struct TypedArena<T> {
354 /// A pointer to the next object to be allocated.
357 /// A pointer to the end of the allocated area. When this pointer is
358 /// reached, a new chunk is allocated.
361 /// A pointer to the first arena segment.
362 first: RefCell<*mut TypedArenaChunk<T>>,
365 struct TypedArenaChunk<T> {
366 /// Pointer to the next arena segment.
367 next: *mut TypedArenaChunk<T>,
369 /// The number of elements that this chunk can hold.
372 // Objects follow here, suitably aligned.
375 fn calculate_size<T>(capacity: uint) -> uint {
376 let mut size = mem::size_of::<TypedArenaChunk<T>>();
377 size = round_up(size, mem::min_align_of::<T>());
378 let elem_size = mem::size_of::<T>();
379 let elems_size = elem_size.checked_mul(capacity).unwrap();
380 size = size.checked_add(elems_size).unwrap();
384 impl<T> TypedArenaChunk<T> {
386 unsafe fn new(next: *mut TypedArenaChunk<T>, capacity: uint)
387 -> *mut TypedArenaChunk<T> {
388 let size = calculate_size::<T>(capacity);
389 let chunk = allocate(size, mem::min_align_of::<TypedArenaChunk<T>>())
390 as *mut TypedArenaChunk<T>;
391 if chunk.is_null() { alloc::oom() }
392 (*chunk).next = next;
393 (*chunk).capacity = capacity;
397 /// Destroys this arena chunk. If the type descriptor is supplied, the
398 /// drop glue is called; otherwise, drop glue is not called.
400 unsafe fn destroy(&mut self, len: uint) {
401 // Destroy all the allocated objects.
402 if intrinsics::needs_drop::<T>() {
403 let mut start = self.start();
404 for _ in range(0, len) {
405 ptr::read(start as *const T); // run the destructor on the pointer
406 start = start.offset(mem::size_of::<T>() as int)
410 // Destroy the next chunk.
411 let next = self.next;
412 let size = calculate_size::<T>(self.capacity);
413 deallocate(self as *mut TypedArenaChunk<T> as *mut u8, size,
414 mem::min_align_of::<TypedArenaChunk<T>>());
416 let capacity = (*next).capacity;
417 (*next).destroy(capacity);
421 // Returns a pointer to the first allocated object.
423 fn start(&self) -> *const u8 {
424 let this: *const TypedArenaChunk<T> = self;
426 mem::transmute(round_up(this.offset(1) as uint,
427 mem::min_align_of::<T>()))
431 // Returns a pointer to the end of the allocated space.
433 fn end(&self) -> *const u8 {
435 let size = mem::size_of::<T>().checked_mul(self.capacity).unwrap();
436 self.start().offset(size as int)
441 impl<T> TypedArena<T> {
442 /// Creates a new `TypedArena` with preallocated space for eight objects.
444 pub fn new() -> TypedArena<T> {
445 TypedArena::with_capacity(8)
448 /// Creates a new `TypedArena` with preallocated space for the given number of
451 pub fn with_capacity(capacity: uint) -> TypedArena<T> {
453 let chunk = TypedArenaChunk::<T>::new(ptr::null_mut(), capacity);
455 ptr: Cell::new((*chunk).start() as *const T),
456 end: Cell::new((*chunk).end() as *const T),
457 first: RefCell::new(chunk),
462 /// Allocates an object in the `TypedArena`, returning a reference to it.
464 pub fn alloc(&self, object: T) -> &mut T {
465 if self.ptr == self.end {
469 let ptr: &mut T = unsafe {
470 let ptr: &mut T = mem::transmute(self.ptr.clone());
471 ptr::write(ptr, object);
472 self.ptr.set(self.ptr.get().offset(1));
483 let chunk = *self.first.borrow_mut();
484 let new_capacity = (*chunk).capacity.checked_mul(2).unwrap();
485 let chunk = TypedArenaChunk::<T>::new(chunk, new_capacity);
486 self.ptr.set((*chunk).start() as *const T);
487 self.end.set((*chunk).end() as *const T);
488 *self.first.borrow_mut() = chunk
494 impl<T> Drop for TypedArena<T> {
497 // Determine how much was filled.
498 let start = self.first.borrow().as_ref().unwrap().start() as uint;
499 let end = self.ptr.get() as uint;
500 let diff = (end - start) / mem::size_of::<T>();
502 // Pass that to the `destroy` method.
503 (**self.first.borrow_mut()).destroy(diff)
511 use self::test::Bencher;
512 use super::{Arena, TypedArena};
523 let arena = TypedArena::new();
524 for _ in range(0u, 100000) {
534 pub fn bench_copy(b: &mut Bencher) {
535 let arena = TypedArena::new();
546 pub fn bench_copy_nonarena(b: &mut Bencher) {
557 pub fn bench_copy_old_arena(b: &mut Bencher) {
558 let arena = Arena::new();
577 pub fn test_noncopy() {
578 let arena = TypedArena::new();
579 for _ in range(0u, 100000) {
580 arena.alloc(Noncopy {
581 string: "hello world".to_string(),
582 array: vec!( 1, 2, 3, 4, 5 ),
588 pub fn bench_noncopy(b: &mut Bencher) {
589 let arena = TypedArena::new();
591 arena.alloc(Noncopy {
592 string: "hello world".to_string(),
593 array: vec!( 1, 2, 3, 4, 5 ),
599 pub fn bench_noncopy_nonarena(b: &mut Bencher) {
602 string: "hello world".to_string(),
603 array: vec!( 1, 2, 3, 4, 5 ),
609 pub fn bench_noncopy_old_arena(b: &mut Bencher) {
610 let arena = Arena::new();
612 arena.alloc(|| Noncopy {
613 string: "hello world".to_string(),
614 array: vec!( 1, 2, 3, 4, 5 ),