1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! The arena, a fast but limited type of allocator.
13 //! Arenas are a type of allocator that destroy the objects within, all at
14 //! once, once the arena itself is destroyed. They do not support deallocation
15 //! of individual objects while the arena itself is still alive. The benefit
16 //! of an arena is very fast allocation; just a pointer bump.
18 //! This crate has two arenas implemented: `TypedArena`, which is a simpler
19 //! arena but can only hold objects of a single type, and `Arena`, which is a
20 //! more complex, slower arena which can hold objects of any type.
22 #![crate_name = "arena"]
23 #![unstable(feature = "rustc_private", issue = "27812")]
24 #![crate_type = "rlib"]
25 #![crate_type = "dylib"]
26 #![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
27 html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
28 html_root_url = "https://doc.rust-lang.org/nightly/",
29 test(no_crate_inject, attr(deny(warnings))))]
30 #![cfg_attr(not(stage0), deny(warnings))]
33 #![feature(core_intrinsics)]
34 #![feature(drop_in_place)]
38 #![feature(staged_api)]
39 #![feature(dropck_parametricity)]
40 #![cfg_attr(test, feature(test))]
46 use std::cell::{Cell, RefCell};
49 use std::marker::{PhantomData, Send};
55 use alloc::raw_vec::RawVec;
59 /// Index of the first unused byte.
61 /// Indicates whether objects with destructors are stored in this chunk.
66 fn new(size: usize, is_copy: bool) -> Chunk {
68 data: RawVec::with_capacity(size),
70 is_copy: Cell::new(is_copy),
74 fn capacity(&self) -> usize {
78 unsafe fn as_ptr(&self) -> *const u8 {
82 // Walk down a chunk, running the destructors for any objects stored
84 unsafe fn destroy(&self) {
86 let buf = self.as_ptr();
87 let fill = self.fill.get();
90 let tydesc_data = buf.offset(idx as isize) as *const usize;
91 let (tydesc, is_done) = un_bitpack_tydesc_ptr(*tydesc_data);
92 let (size, align) = ((*tydesc).size, (*tydesc).align);
94 let after_tydesc = idx + mem::size_of::<*const TyDesc>();
96 let start = round_up(after_tydesc, align);
99 ((*tydesc).drop_glue)(buf.offset(start as isize) as *const i8);
102 // Find where the next tydesc lives
103 idx = round_up(start + size, mem::align_of::<*const TyDesc>());
108 /// A slower reflection-based arena that can allocate objects of any type.
110 /// This arena uses `RawVec<u8>` as a backing store to allocate objects from.
111 /// For each allocated object, the arena stores a pointer to the type descriptor
112 /// followed by the object (potentially with alignment padding after each
113 /// element). When the arena is destroyed, it iterates through all of its
114 /// chunks, and uses the tydesc information to trace through the objects,
115 /// calling the destructors on them. One subtle point that needs to be
116 /// addressed is how to handle panics while running the user provided
117 /// initializer function. It is important to not run the destructor on
118 /// uninitialized objects, but how to detect them is somewhat subtle. Since
119 /// `alloc()` can be invoked recursively, it is not sufficient to simply exclude
120 /// the most recent object. To solve this without requiring extra space, we
121 /// use the low order bit of the tydesc pointer to encode whether the object
122 /// it describes has been fully initialized.
124 /// As an optimization, objects with destructors are stored in different chunks
125 /// than objects without destructors. This reduces overhead when initializing
126 /// plain-old-data (`Copy` types) and means we don't need to waste time running
127 /// their destructors.
128 #[unstable(feature = "rustc_private",
129 reason = "Private to rustc", issue = "0")]
130 #[rustc_deprecated(since = "1.6.0-dev", reason =
131 "The reflection-based arena is superseded by the any-arena crate")]
132 pub struct Arena<'longer_than_self> {
133 // The heads are separated out from the list as a unbenchmarked
134 // microoptimization, to avoid needing to case on the list to access a head.
135 head: RefCell<Chunk>,
136 copy_head: RefCell<Chunk>,
137 chunks: RefCell<Vec<Chunk>>,
138 _marker: PhantomData<*mut &'longer_than_self ()>,
142 /// Allocates a new Arena with 32 bytes preallocated.
143 pub fn new() -> Arena<'a> {
144 Arena::new_with_size(32)
147 /// Allocates a new Arena with `initial_size` bytes preallocated.
148 pub fn new_with_size(initial_size: usize) -> Arena<'a> {
150 head: RefCell::new(Chunk::new(initial_size, false)),
151 copy_head: RefCell::new(Chunk::new(initial_size, true)),
152 chunks: RefCell::new(Vec::new()),
153 _marker: PhantomData,
158 impl<'longer_than_self> Drop for Arena<'longer_than_self> {
161 self.head.borrow().destroy();
162 for chunk in self.chunks.borrow().iter() {
163 if !chunk.is_copy.get() {
172 fn round_up(base: usize, align: usize) -> usize {
173 (base.checked_add(align - 1)).unwrap() & !(align - 1)
176 // We encode whether the object a tydesc describes has been
177 // initialized in the arena in the low bit of the tydesc pointer. This
178 // is necessary in order to properly do cleanup if a panic occurs
179 // during an initializer.
181 fn bitpack_tydesc_ptr(p: *const TyDesc, is_done: bool) -> usize {
182 p as usize | (is_done as usize)
185 fn un_bitpack_tydesc_ptr(p: usize) -> (*const TyDesc, bool) {
186 ((p & !1) as *const TyDesc, p & 1 == 1)
189 // HACK(eddyb) TyDesc replacement using a trait object vtable.
190 // This could be replaced in the future with a custom DST layout,
191 // or `&'static (drop_glue, size, align)` created by a `const fn`.
193 // * rvalue promotion (issue #1056)
194 // * mem::{size_of, align_of} must be const fns
196 drop_glue: fn(*const i8),
205 impl<T: ?Sized> AllTypes for T {}
207 unsafe fn get_tydesc<T>() -> *const TyDesc {
208 use std::raw::TraitObject;
210 let ptr = &*(heap::EMPTY as *const T);
212 // Can use any trait that is implemented for all types.
213 let obj = mem::transmute::<&AllTypes, TraitObject>(ptr);
214 obj.vtable as *const TyDesc
217 impl<'longer_than_self> Arena<'longer_than_self> {
218 // Grows a given chunk and returns `false`, or replaces it with a bigger
219 // chunk and returns `true`.
220 // This method is shared by both parts of the arena.
222 fn alloc_grow(&self, head: &mut Chunk, used_cap: usize, n_bytes: usize) -> bool {
223 if head.data.reserve_in_place(used_cap, n_bytes) {
224 // In-place reallocation succeeded.
227 // Allocate a new chunk.
228 let new_min_chunk_size = cmp::max(n_bytes, head.capacity());
229 let new_chunk = Chunk::new((new_min_chunk_size + 1).next_power_of_two(), false);
230 let old_chunk = mem::replace(head, new_chunk);
231 if old_chunk.fill.get() != 0 {
232 self.chunks.borrow_mut().push(old_chunk);
238 // Functions for the copyable part of the arena.
241 fn alloc_copy_inner(&self, n_bytes: usize, align: usize) -> *const u8 {
242 let mut copy_head = self.copy_head.borrow_mut();
243 let fill = copy_head.fill.get();
244 let mut start = round_up(fill, align);
245 let mut end = start + n_bytes;
247 if end > copy_head.capacity() {
248 if self.alloc_grow(&mut *copy_head, fill, end - fill) {
249 // Continuing with a newly allocated chunk
252 copy_head.is_copy.set(true);
256 copy_head.fill.set(end);
258 unsafe { copy_head.as_ptr().offset(start as isize) }
262 fn alloc_copy<T, F>(&self, op: F) -> &mut T
263 where F: FnOnce() -> T
266 let ptr = self.alloc_copy_inner(mem::size_of::<T>(), mem::align_of::<T>());
267 let ptr = ptr as *mut T;
268 ptr::write(&mut (*ptr), op());
273 // Functions for the non-copyable part of the arena.
276 fn alloc_noncopy_inner(&self, n_bytes: usize, align: usize) -> (*const u8, *const u8) {
277 let mut head = self.head.borrow_mut();
278 let fill = head.fill.get();
280 let mut tydesc_start = fill;
281 let after_tydesc = fill + mem::size_of::<*const TyDesc>();
282 let mut start = round_up(after_tydesc, align);
283 let mut end = round_up(start + n_bytes, mem::align_of::<*const TyDesc>());
285 if end > head.capacity() {
286 if self.alloc_grow(&mut *head, tydesc_start, end - tydesc_start) {
287 // Continuing with a newly allocated chunk
289 start = round_up(mem::size_of::<*const TyDesc>(), align);
290 end = round_up(start + n_bytes, mem::align_of::<*const TyDesc>());
297 let buf = head.as_ptr();
298 (buf.offset(tydesc_start as isize),
299 buf.offset(start as isize))
304 fn alloc_noncopy<T, F>(&self, op: F) -> &mut T
305 where F: FnOnce() -> T
308 let tydesc = get_tydesc::<T>();
309 let (ty_ptr, ptr) = self.alloc_noncopy_inner(mem::size_of::<T>(), mem::align_of::<T>());
310 let ty_ptr = ty_ptr as *mut usize;
311 let ptr = ptr as *mut T;
312 // Write in our tydesc along with a bit indicating that it
313 // has *not* been initialized yet.
314 *ty_ptr = bitpack_tydesc_ptr(tydesc, false);
315 // Actually initialize it
316 ptr::write(&mut (*ptr), op());
317 // Now that we are done, update the tydesc to indicate that
318 // the object is there.
319 *ty_ptr = bitpack_tydesc_ptr(tydesc, true);
325 /// Allocates a new item in the arena, using `op` to initialize the value,
326 /// and returns a reference to it.
328 pub fn alloc<T: 'longer_than_self, F>(&self, op: F) -> &mut T
329 where F: FnOnce() -> T
332 if intrinsics::needs_drop::<T>() {
333 self.alloc_noncopy(op)
340 /// Allocates a slice of bytes of requested length. The bytes are not guaranteed to be zero
341 /// if the arena has previously been cleared.
345 /// Panics if the requested length is too large and causes overflow.
346 pub fn alloc_bytes(&self, len: usize) -> &mut [u8] {
348 // Check for overflow.
349 self.copy_head.borrow().fill.get().checked_add(len).expect("length overflow");
350 let ptr = self.alloc_copy_inner(len, 1);
351 intrinsics::assume(!ptr.is_null());
352 slice::from_raw_parts_mut(ptr as *mut _, len)
356 /// Clears the arena. Deallocates all but the longest chunk which may be reused.
357 pub fn clear(&mut self) {
359 self.head.borrow().destroy();
360 self.head.borrow().fill.set(0);
361 self.copy_head.borrow().fill.set(0);
362 for chunk in self.chunks.borrow().iter() {
363 if !chunk.is_copy.get() {
367 self.chunks.borrow_mut().clear();
372 /// A faster arena that can hold objects of only one type.
373 pub struct TypedArena<T> {
374 /// A pointer to the next object to be allocated.
377 /// A pointer to the end of the allocated area. When this pointer is
378 /// reached, a new chunk is allocated.
381 /// A vector arena segments.
382 chunks: RefCell<Vec<TypedArenaChunk<T>>>,
384 /// Marker indicating that dropping the arena causes its owned
385 /// instances of `T` to be dropped.
386 _own: PhantomData<T>,
389 struct TypedArenaChunk<T> {
390 /// Pointer to the next arena segment.
394 impl<T> TypedArenaChunk<T> {
396 unsafe fn new(capacity: usize) -> TypedArenaChunk<T> {
397 TypedArenaChunk { storage: RawVec::with_capacity(capacity) }
400 /// Destroys this arena chunk.
402 unsafe fn destroy(&mut self, len: usize) {
403 // The branch on needs_drop() is an -O1 performance optimization.
404 // Without the branch, dropping TypedArena<u8> takes linear time.
405 if intrinsics::needs_drop::<T>() {
406 let mut start = self.start();
407 // Destroy all allocated objects.
409 ptr::drop_in_place(start);
410 start = start.offset(1);
415 // Returns a pointer to the first allocated object.
417 fn start(&self) -> *mut T {
421 // Returns a pointer to the end of the allocated space.
423 fn end(&self) -> *mut T {
425 if mem::size_of::<T>() == 0 {
426 // A pointer as large as possible for zero-sized elements.
429 self.start().offset(self.storage.cap() as isize)
435 const PAGE: usize = 4096;
437 impl<T> TypedArena<T> {
438 /// Creates a new `TypedArena` with preallocated space for many objects.
440 pub fn new() -> TypedArena<T> {
441 // Reserve at least one page.
442 let elem_size = cmp::max(1, mem::size_of::<T>());
443 TypedArena::with_capacity(PAGE / elem_size)
446 /// Creates a new `TypedArena` with preallocated space for the given number of
449 pub fn with_capacity(capacity: usize) -> TypedArena<T> {
451 let chunk = TypedArenaChunk::<T>::new(cmp::max(1, capacity));
453 ptr: Cell::new(chunk.start()),
454 end: Cell::new(chunk.end()),
455 chunks: RefCell::new(vec![chunk]),
461 /// Allocates an object in the `TypedArena`, returning a reference to it.
463 pub fn alloc(&self, object: T) -> &mut T {
464 if self.ptr == self.end {
469 if mem::size_of::<T>() == 0 {
470 self.ptr.set(intrinsics::arith_offset(self.ptr.get() as *mut u8, 1) as *mut T);
471 let ptr = heap::EMPTY as *mut T;
472 // Don't drop the object. This `write` is equivalent to `forget`.
473 ptr::write(ptr, object);
476 let ptr = self.ptr.get();
477 // Advance the pointer.
478 self.ptr.set(self.ptr.get().offset(1));
479 // Write into uninitialized memory.
480 ptr::write(ptr, object);
491 let mut chunks = self.chunks.borrow_mut();
492 let prev_capacity = chunks.last().unwrap().storage.cap();
493 let new_capacity = prev_capacity.checked_mul(2).unwrap();
494 if chunks.last_mut().unwrap().storage.double_in_place() {
495 self.end.set(chunks.last().unwrap().end());
497 let chunk = TypedArenaChunk::<T>::new(new_capacity);
498 self.ptr.set(chunk.start());
499 self.end.set(chunk.end());
504 /// Clears the arena. Deallocates all but the longest chunk which may be reused.
505 pub fn clear(&mut self) {
507 // Clear the last chunk, which is partially filled.
508 let mut chunks_borrow = self.chunks.borrow_mut();
509 let last_idx = chunks_borrow.len() - 1;
510 self.clear_last_chunk(&mut chunks_borrow[last_idx]);
511 // If `T` is ZST, code below has no effect.
512 for mut chunk in chunks_borrow.drain(..last_idx) {
513 let cap = chunk.storage.cap();
519 // Drops the contents of the last chunk. The last chunk is partially empty, unlike all other
521 fn clear_last_chunk(&self, last_chunk: &mut TypedArenaChunk<T>) {
522 // Determine how much was filled.
523 let start = last_chunk.start() as usize;
524 // We obtain the value of the pointer to the first uninitialized element.
525 let end = self.ptr.get() as usize;
526 // We then calculate the number of elements to be dropped in the last chunk,
527 // which is the filled area's length.
528 let diff = if mem::size_of::<T>() == 0 {
529 // `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get
530 // the number of zero-sized values in the last and only chunk, just out of caution.
531 // Recall that `end` was incremented for each allocated value.
534 (end - start) / mem::size_of::<T>()
536 // Pass that to the `destroy` method.
538 last_chunk.destroy(diff);
541 self.ptr.set(last_chunk.start());
545 impl<T> Drop for TypedArena<T> {
546 #[unsafe_destructor_blind_to_params]
549 // Determine how much was filled.
550 let mut chunks_borrow = self.chunks.borrow_mut();
551 let mut last_chunk = chunks_borrow.pop().unwrap();
552 // Drop the contents of the last chunk.
553 self.clear_last_chunk(&mut last_chunk);
554 // The last chunk will be dropped. Destroy all other chunks.
555 for chunk in chunks_borrow.iter_mut() {
556 let cap = chunk.storage.cap();
559 // RawVec handles deallocation of `last_chunk` and `self.chunks`.
564 unsafe impl<T: Send> Send for TypedArena<T> {}
569 use self::test::Bencher;
570 use super::{Arena, TypedArena};
575 #[derive(Debug, Eq, PartialEq)]
583 fn test_arena_alloc_nested() {
595 struct Wrap<'a>(TypedArena<EI<'a>>);
598 fn alloc_inner<F: Fn() -> Inner>(&self, f: F) -> &Inner {
599 let r: &EI = self.0.alloc(EI::I(f()));
600 if let &EI::I(ref i) = r {
606 fn alloc_outer<F: Fn() -> Outer<'a>>(&self, f: F) -> &Outer {
607 let r: &EI = self.0.alloc(EI::O(f()));
608 if let &EI::O(ref o) = r {
616 let arena = Wrap(TypedArena::new());
618 let result = arena.alloc_outer(|| {
619 Outer { inner: arena.alloc_inner(|| Inner { value: 10 }) }
622 assert_eq!(result.inner.value, 10);
627 let arena = TypedArena::new();
629 arena.alloc(Point { x: 1, y: 2, z: 3 });
634 pub fn bench_copy(b: &mut Bencher) {
635 let arena = TypedArena::new();
636 b.iter(|| arena.alloc(Point { x: 1, y: 2, z: 3 }))
640 pub fn bench_copy_nonarena(b: &mut Bencher) {
642 let _: Box<_> = Box::new(Point { x: 1, y: 2, z: 3 });
647 pub fn bench_copy_old_arena(b: &mut Bencher) {
648 let arena = Arena::new();
649 b.iter(|| arena.alloc(|| Point { x: 1, y: 2, z: 3 }))
659 pub fn test_noncopy() {
660 let arena = TypedArena::new();
662 arena.alloc(Noncopy {
663 string: "hello world".to_string(),
664 array: vec![1, 2, 3, 4, 5],
670 pub fn test_typed_arena_zero_sized() {
671 let arena = TypedArena::new();
678 pub fn test_arena_zero_sized() {
679 let arena = Arena::new();
680 let mut points = vec![];
685 let point = arena.alloc(|| Point { x: 1, y: 2, z: 3 });
688 for point in &points {
689 assert_eq!(**point, Point { x: 1, y: 2, z: 3 });
694 pub fn test_typed_arena_clear() {
695 let mut arena = TypedArena::new();
699 arena.alloc(Point { x: 1, y: 2, z: 3 });
705 pub fn test_arena_clear() {
706 let mut arena = Arena::new();
710 arena.alloc(|| Point { x: 1, y: 2, z: 3 });
713 string: "hello world".to_string(),
722 pub fn test_arena_alloc_bytes() {
723 let arena = Arena::new();
725 arena.alloc(|| Point { x: 1, y: 2, z: 3 });
726 for byte in arena.alloc_bytes(i % 42).iter_mut() {
733 fn test_arena_destructors() {
734 let arena = Arena::new();
736 // Arena allocate something with drop glue to make sure it
738 arena.alloc(|| Rc::new(i));
739 // Allocate something with funny size and alignment, to keep
740 // things interesting.
741 arena.alloc(|| [0u8, 1u8, 2u8]);
747 fn test_arena_destructors_fail() {
748 let arena = Arena::new();
749 // Put some stuff in the arena.
751 // Arena allocate something with drop glue to make sure it
753 arena.alloc(|| Rc::new(i));
754 // Allocate something with funny size and alignment, to keep
755 // things interesting.
756 arena.alloc(|| [0u8, 1, 2]);
758 // Now, panic while allocating
759 arena.alloc::<Rc<i32>, _>(|| {
766 struct DropCounter<'a> {
767 count: &'a Cell<u32>,
770 impl<'a> Drop for DropCounter<'a> {
772 self.count.set(self.count.get() + 1);
777 fn test_arena_drop_count() {
778 let counter = Cell::new(0);
780 let arena = Arena::new();
782 // Allocate something with drop glue to make sure it doesn't leak.
783 arena.alloc(|| DropCounter { count: &counter });
784 // Allocate something with funny size and alignment, to keep
785 // things interesting.
786 arena.alloc(|| [0u8, 1u8, 2u8]);
790 assert_eq!(counter.get(), 100);
794 fn test_arena_drop_on_clear() {
795 let counter = Cell::new(0);
797 let mut arena = Arena::new();
799 // Allocate something with drop glue to make sure it doesn't leak.
800 arena.alloc(|| DropCounter { count: &counter });
801 // Allocate something with funny size and alignment, to keep
802 // things interesting.
803 arena.alloc(|| [0u8, 1u8, 2u8]);
806 assert_eq!(counter.get(), i * 100 + 100);
811 fn test_typed_arena_drop_count() {
812 let counter = Cell::new(0);
814 let arena: TypedArena<DropCounter> = TypedArena::new();
816 // Allocate something with drop glue to make sure it doesn't leak.
817 arena.alloc(DropCounter { count: &counter });
820 assert_eq!(counter.get(), 100);
824 fn test_typed_arena_drop_on_clear() {
825 let counter = Cell::new(0);
826 let mut arena: TypedArena<DropCounter> = TypedArena::new();
829 // Allocate something with drop glue to make sure it doesn't leak.
830 arena.alloc(DropCounter { count: &counter });
833 assert_eq!(counter.get(), i * 100 + 100);
838 static DROP_COUNTER: Cell<u32> = Cell::new(0)
841 struct SmallDroppable;
843 impl Drop for SmallDroppable {
845 DROP_COUNTER.with(|c| c.set(c.get() + 1));
850 fn test_arena_drop_small_count() {
851 DROP_COUNTER.with(|c| c.set(0));
853 let arena = Arena::new();
856 // Allocate something with drop glue to make sure it doesn't leak.
857 arena.alloc(|| SmallDroppable);
859 // Allocate something with funny size and alignment, to keep
860 // things interesting.
861 arena.alloc(|| [0u8, 1u8, 2u8]);
865 assert_eq!(DROP_COUNTER.with(|c| c.get()), 100);
869 fn test_typed_arena_drop_small_count() {
870 DROP_COUNTER.with(|c| c.set(0));
872 let arena: TypedArena<SmallDroppable> = TypedArena::new();
874 // Allocate something with drop glue to make sure it doesn't leak.
875 arena.alloc(SmallDroppable);
879 assert_eq!(DROP_COUNTER.with(|c| c.get()), 100);
883 pub fn bench_noncopy(b: &mut Bencher) {
884 let arena = TypedArena::new();
886 arena.alloc(Noncopy {
887 string: "hello world".to_string(),
888 array: vec![1, 2, 3, 4, 5],
894 pub fn bench_noncopy_nonarena(b: &mut Bencher) {
896 let _: Box<_> = Box::new(Noncopy {
897 string: "hello world".to_string(),
898 array: vec![1, 2, 3, 4, 5],
904 pub fn bench_noncopy_old_arena(b: &mut Bencher) {
905 let arena = Arena::new();
909 string: "hello world".to_string(),
910 array: vec![1, 2, 3, 4, 5],