1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! The arena, a fast but limited type of allocator.
13 //! Arenas are a type of allocator that destroy the objects within, all at
14 //! once, once the arena itself is destroyed. They do not support deallocation
15 //! of individual objects while the arena itself is still alive. The benefit
16 //! of an arena is very fast allocation; just a pointer bump.
18 //! This crate has two arenas implemented: TypedArena, which is a simpler
19 //! arena but can only hold objects of a single type, and Arena, which is a
20 //! more complex, slower Arena which can hold objects of any type.
22 #![crate_id = "arena#0.11.0-pre"]
24 #![crate_type = "rlib"]
25 #![crate_type = "dylib"]
26 #![license = "MIT/ASL2"]
27 #![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
28 html_favicon_url = "http://www.rust-lang.org/favicon.ico",
29 html_root_url = "http://doc.rust-lang.org/")]
31 #![feature(unsafe_destructor)]
32 #![allow(missing_doc)]
34 use std::cell::{Cell, RefCell};
36 use std::intrinsics::{TyDesc, get_tydesc};
42 use std::rt::heap::allocate;
44 // The way arena uses arrays is really deeply awful. The arrays are
45 // allocated, and have capacities reserved, but the fill for the array
46 // will always stay at 0.
47 #[deriving(Clone, PartialEq)]
49 data: Rc<RefCell<Vec<u8> >>,
54 fn capacity(&self) -> uint {
55 self.data.borrow().capacity()
58 unsafe fn as_ptr(&self) -> *const u8 {
59 self.data.borrow().as_ptr()
63 /// A slower reflection-based arena that can allocate objects of any type.
65 /// This arena uses Vec<u8> as a backing store to allocate objects from. For
66 /// each allocated object, the arena stores a pointer to the type descriptor
67 /// followed by the object. (Potentially with alignment padding after each
68 /// element.) When the arena is destroyed, it iterates through all of its
69 /// chunks, and uses the tydesc information to trace through the objects,
70 /// calling the destructors on them. One subtle point that needs to be
71 /// addressed is how to handle failures while running the user provided
72 /// initializer function. It is important to not run the destructor on
73 /// uninitialized objects, but how to detect them is somewhat subtle. Since
74 /// alloc() can be invoked recursively, it is not sufficient to simply exclude
75 /// the most recent object. To solve this without requiring extra space, we
76 /// use the low order bit of the tydesc pointer to encode whether the object
77 /// it describes has been fully initialized.
79 /// As an optimization, objects with destructors are stored in
80 /// different chunks than objects without destructors. This reduces
81 /// overhead when initializing plain-old-data and means we don't need
82 /// to waste time running the destructors of POD.
84 // The head is separated out from the list as a unbenchmarked
85 // microoptimization, to avoid needing to case on the list to access the
88 copy_head: RefCell<Chunk>,
89 chunks: RefCell<Vec<Chunk>>,
93 /// Allocate a new Arena with 32 bytes preallocated.
94 pub fn new() -> Arena {
95 Arena::new_with_size(32u)
98 /// Allocate a new Arena with `initial_size` bytes preallocated.
99 pub fn new_with_size(initial_size: uint) -> Arena {
101 head: RefCell::new(chunk(initial_size, false)),
102 copy_head: RefCell::new(chunk(initial_size, true)),
103 chunks: RefCell::new(Vec::new()),
108 fn chunk(size: uint, is_copy: bool) -> Chunk {
110 data: Rc::new(RefCell::new(Vec::with_capacity(size))),
112 is_copy: Cell::new(is_copy),
117 impl Drop for Arena {
120 destroy_chunk(&*self.head.borrow());
121 for chunk in self.chunks.borrow().iter() {
122 if !chunk.is_copy.get() {
123 destroy_chunk(chunk);
131 fn round_up(base: uint, align: uint) -> uint {
132 (base.checked_add(&(align - 1))).unwrap() & !(&(align - 1))
135 // Walk down a chunk, running the destructors for any objects stored
137 unsafe fn destroy_chunk(chunk: &Chunk) {
139 let buf = chunk.as_ptr();
140 let fill = chunk.fill.get();
143 let tydesc_data: *const uint = mem::transmute(buf.offset(idx as int));
144 let (tydesc, is_done) = un_bitpack_tydesc_ptr(*tydesc_data);
145 let (size, align) = ((*tydesc).size, (*tydesc).align);
147 let after_tydesc = idx + mem::size_of::<*const TyDesc>();
149 let start = round_up(after_tydesc, align);
151 //debug!("freeing object: idx = {}, size = {}, align = {}, done = {}",
152 // start, size, align, is_done);
154 ((*tydesc).drop_glue)(buf.offset(start as int) as *const i8);
157 // Find where the next tydesc lives
158 idx = round_up(start + size, mem::align_of::<*const TyDesc>());
162 // We encode whether the object a tydesc describes has been
163 // initialized in the arena in the low bit of the tydesc pointer. This
164 // is necessary in order to properly do cleanup if a failure occurs
165 // during an initializer.
167 fn bitpack_tydesc_ptr(p: *const TyDesc, is_done: bool) -> uint {
168 p as uint | (is_done as uint)
171 fn un_bitpack_tydesc_ptr(p: uint) -> (*const TyDesc, bool) {
172 ((p & !1) as *const TyDesc, p & 1 == 1)
176 fn chunk_size(&self) -> uint {
177 self.copy_head.borrow().capacity()
180 // Functions for the POD part of the arena
181 fn alloc_copy_grow(&self, n_bytes: uint, align: uint) -> *const u8 {
182 // Allocate a new chunk.
183 let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
184 self.chunks.borrow_mut().push(self.copy_head.borrow().clone());
186 *self.copy_head.borrow_mut() =
187 chunk(num::next_power_of_two(new_min_chunk_size + 1u), true);
189 return self.alloc_copy_inner(n_bytes, align);
193 fn alloc_copy_inner(&self, n_bytes: uint, align: uint) -> *const u8 {
194 let start = round_up(self.copy_head.borrow().fill.get(), align);
196 let end = start + n_bytes;
197 if end > self.chunk_size() {
198 return self.alloc_copy_grow(n_bytes, align);
201 let copy_head = self.copy_head.borrow();
202 copy_head.fill.set(end);
205 copy_head.as_ptr().offset(start as int)
210 fn alloc_copy<'a, T>(&'a self, op: || -> T) -> &'a T {
212 let ptr = self.alloc_copy_inner(mem::size_of::<T>(),
213 mem::min_align_of::<T>());
214 let ptr = ptr as *mut T;
215 ptr::write(&mut (*ptr), op());
220 // Functions for the non-POD part of the arena
221 fn alloc_noncopy_grow(&self, n_bytes: uint,
222 align: uint) -> (*const u8, *const u8) {
223 // Allocate a new chunk.
224 let new_min_chunk_size = cmp::max(n_bytes, self.chunk_size());
225 self.chunks.borrow_mut().push(self.head.borrow().clone());
227 *self.head.borrow_mut() =
228 chunk(num::next_power_of_two(new_min_chunk_size + 1u), false);
230 return self.alloc_noncopy_inner(n_bytes, align);
234 fn alloc_noncopy_inner(&self, n_bytes: uint,
235 align: uint) -> (*const u8, *const u8) {
236 // Be careful to not maintain any `head` borrows active, because
237 // `alloc_noncopy_grow` borrows it mutably.
238 let (start, end, tydesc_start, head_capacity) = {
239 let head = self.head.borrow();
240 let fill = head.fill.get();
242 let tydesc_start = fill;
243 let after_tydesc = fill + mem::size_of::<*const TyDesc>();
244 let start = round_up(after_tydesc, align);
245 let end = start + n_bytes;
247 (start, end, tydesc_start, head.capacity())
250 if end > head_capacity {
251 return self.alloc_noncopy_grow(n_bytes, align);
254 let head = self.head.borrow();
255 head.fill.set(round_up(end, mem::align_of::<*const TyDesc>()));
258 let buf = head.as_ptr();
259 return (buf.offset(tydesc_start as int), buf.offset(start as int));
264 fn alloc_noncopy<'a, T>(&'a self, op: || -> T) -> &'a T {
266 let tydesc = get_tydesc::<T>();
268 self.alloc_noncopy_inner(mem::size_of::<T>(),
269 mem::min_align_of::<T>());
270 let ty_ptr = ty_ptr as *mut uint;
271 let ptr = ptr as *mut T;
272 // Write in our tydesc along with a bit indicating that it
273 // has *not* been initialized yet.
274 *ty_ptr = mem::transmute(tydesc);
275 // Actually initialize it
276 ptr::write(&mut(*ptr), op());
277 // Now that we are done, update the tydesc to indicate that
278 // the object is there.
279 *ty_ptr = bitpack_tydesc_ptr(tydesc, true);
285 /// Allocate a new item in the arena, using `op` to initialize the value
286 /// and returning a reference to it.
288 pub fn alloc<'a, T>(&'a self, op: || -> T) -> &'a T {
290 if intrinsics::needs_drop::<T>() {
291 self.alloc_noncopy(op)
300 fn test_arena_destructors() {
301 let arena = Arena::new();
302 for i in range(0u, 10) {
303 // Arena allocate something with drop glue to make sure it
305 arena.alloc(|| Rc::new(i));
306 // Allocate something with funny size and alignment, to keep
307 // things interesting.
308 arena.alloc(|| [0u8, 1u8, 2u8]);
313 fn test_arena_alloc_nested() {
314 struct Inner { value: uint }
315 struct Outer<'a> { inner: &'a Inner }
317 let arena = Arena::new();
319 let result = arena.alloc(|| Outer {
320 inner: arena.alloc(|| Inner { value: 10 })
323 assert_eq!(result.inner.value, 10);
328 fn test_arena_destructors_fail() {
329 let arena = Arena::new();
330 // Put some stuff in the arena.
331 for i in range(0u, 10) {
332 // Arena allocate something with drop glue to make sure it
334 arena.alloc(|| { Rc::new(i) });
335 // Allocate something with funny size and alignment, to keep
336 // things interesting.
337 arena.alloc(|| { [0u8, 1u8, 2u8] });
339 // Now, fail while allocating
340 arena.alloc::<Rc<int>>(|| {
346 /// A faster arena that can hold objects of only one type.
348 /// Safety note: Modifying objects in the arena that have already had their
349 /// `drop` destructors run can cause leaks, because the destructor will not
350 /// run again for these objects.
351 pub struct TypedArena<T> {
352 /// A pointer to the next object to be allocated.
355 /// A pointer to the end of the allocated area. When this pointer is
356 /// reached, a new chunk is allocated.
359 /// A pointer to the first arena segment.
360 first: RefCell<TypedArenaChunkRef<T>>,
362 type TypedArenaChunkRef<T> = Option<Box<TypedArenaChunk<T>>>;
364 struct TypedArenaChunk<T> {
365 /// Pointer to the next arena segment.
366 next: TypedArenaChunkRef<T>,
368 /// The number of elements that this chunk can hold.
371 // Objects follow here, suitably aligned.
374 impl<T> TypedArenaChunk<T> {
376 fn new(next: Option<Box<TypedArenaChunk<T>>>, capacity: uint)
377 -> Box<TypedArenaChunk<T>> {
378 let mut size = mem::size_of::<TypedArenaChunk<T>>();
379 size = round_up(size, mem::min_align_of::<T>());
380 let elem_size = mem::size_of::<T>();
381 let elems_size = elem_size.checked_mul(&capacity).unwrap();
382 size = size.checked_add(&elems_size).unwrap();
384 let mut chunk = unsafe {
385 let chunk = allocate(size, mem::min_align_of::<TypedArenaChunk<T>>());
386 let mut chunk: Box<TypedArenaChunk<T>> = mem::transmute(chunk);
387 ptr::write(&mut chunk.next, next);
391 chunk.capacity = capacity;
395 /// Destroys this arena chunk. If the type descriptor is supplied, the
396 /// drop glue is called; otherwise, drop glue is not called.
398 unsafe fn destroy(&mut self, len: uint) {
399 // Destroy all the allocated objects.
400 if intrinsics::needs_drop::<T>() {
401 let mut start = self.start();
402 for _ in range(0, len) {
403 ptr::read(start as *const T); // run the destructor on the pointer
404 start = start.offset(mem::size_of::<T>() as int)
408 // Destroy the next chunk.
409 let next_opt = mem::replace(&mut self.next, None);
413 // We assume that the next chunk is completely filled.
414 let capacity = next.capacity;
415 next.destroy(capacity)
420 // Returns a pointer to the first allocated object.
422 fn start(&self) -> *const u8 {
423 let this: *const TypedArenaChunk<T> = self;
425 mem::transmute(round_up(this.offset(1) as uint,
426 mem::min_align_of::<T>()))
430 // Returns a pointer to the end of the allocated space.
432 fn end(&self) -> *const u8 {
434 let size = mem::size_of::<T>().checked_mul(&self.capacity).unwrap();
435 self.start().offset(size as int)
440 impl<T> TypedArena<T> {
441 /// Creates a new TypedArena with preallocated space for 8 objects.
443 pub fn new() -> TypedArena<T> {
444 TypedArena::with_capacity(8)
447 /// Creates a new TypedArena with preallocated space for the given number of
450 pub fn with_capacity(capacity: uint) -> TypedArena<T> {
451 let chunk = TypedArenaChunk::<T>::new(None, capacity);
453 ptr: Cell::new(chunk.start() as *const T),
454 end: Cell::new(chunk.end() as *const T),
455 first: RefCell::new(Some(chunk)),
459 /// Allocates an object in the TypedArena, returning a reference to it.
461 pub fn alloc<'a>(&'a self, object: T) -> &'a T {
462 if self.ptr == self.end {
466 let ptr: &'a T = unsafe {
467 let ptr: &'a mut T = mem::transmute(self.ptr);
468 ptr::write(ptr, object);
469 self.ptr.set(self.ptr.get().offset(1));
479 let chunk = self.first.borrow_mut().take_unwrap();
480 let new_capacity = chunk.capacity.checked_mul(&2).unwrap();
481 let chunk = TypedArenaChunk::<T>::new(Some(chunk), new_capacity);
482 self.ptr.set(chunk.start() as *const T);
483 self.end.set(chunk.end() as *const T);
484 *self.first.borrow_mut() = Some(chunk)
489 impl<T> Drop for TypedArena<T> {
491 // Determine how much was filled.
492 let start = self.first.borrow().get_ref().start() as uint;
493 let end = self.ptr.get() as uint;
494 let diff = (end - start) / mem::size_of::<T>();
496 // Pass that to the `destroy` method.
498 self.first.borrow_mut().get_mut_ref().destroy(diff)
506 use self::test::Bencher;
507 use super::{Arena, TypedArena};
517 let arena = TypedArena::new();
518 for _ in range(0u, 100000) {
528 pub fn bench_copy(b: &mut Bencher) {
529 let arena = TypedArena::new();
540 pub fn bench_copy_nonarena(b: &mut Bencher) {
551 pub fn bench_copy_old_arena(b: &mut Bencher) {
552 let arena = Arena::new();
570 pub fn test_noncopy() {
571 let arena = TypedArena::new();
572 for _ in range(0u, 100000) {
573 arena.alloc(Noncopy {
574 string: "hello world".to_string(),
575 array: vec!( 1, 2, 3, 4, 5 ),
581 pub fn bench_noncopy(b: &mut Bencher) {
582 let arena = TypedArena::new();
584 arena.alloc(Noncopy {
585 string: "hello world".to_string(),
586 array: vec!( 1, 2, 3, 4, 5 ),
592 pub fn bench_noncopy_nonarena(b: &mut Bencher) {
595 string: "hello world".to_string(),
596 array: vec!( 1, 2, 3, 4, 5 ),
602 pub fn bench_noncopy_old_arena(b: &mut Bencher) {
603 let arena = Arena::new();
605 arena.alloc(|| Noncopy {
606 string: "hello world".to_string(),
607 array: vec!( 1, 2, 3, 4, 5 ),