1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! The arena, a fast but limited type of allocator.
13 //! Arenas are a type of allocator that destroy the objects within, all at
14 //! once, once the arena itself is destroyed. They do not support deallocation
15 //! of individual objects while the arena itself is still alive. The benefit
16 //! of an arena is very fast allocation; just a pointer bump.
18 //! This crate implements `TypedArena`, a simple arena that can only hold
19 //! objects of a single type.
21 #![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
22 html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
23 html_root_url = "https://doc.rust-lang.org/nightly/",
24 test(no_crate_inject, attr(deny(warnings))))]
27 #![feature(core_intrinsics)]
28 #![feature(dropck_eyepatch)]
29 #![cfg_attr(not(stage0), feature(nll))]
30 #![feature(raw_vec_internals)]
31 #![cfg_attr(test, feature(test))]
36 extern crate rustc_data_structures;
38 use rustc_data_structures::sync::MTLock;
40 use std::cell::{Cell, RefCell};
43 use std::marker::{PhantomData, Send};
48 use alloc::raw_vec::RawVec;
50 /// An arena that can hold objects of only one type.
51 pub struct TypedArena<T> {
52 /// A pointer to the next object to be allocated.
55 /// A pointer to the end of the allocated area. When this pointer is
56 /// reached, a new chunk is allocated.
59 /// A vector of arena chunks.
60 chunks: RefCell<Vec<TypedArenaChunk<T>>>,
62 /// Marker indicating that dropping the arena causes its owned
63 /// instances of `T` to be dropped.
67 struct TypedArenaChunk<T> {
68 /// The raw storage for the arena chunk.
72 impl<T> TypedArenaChunk<T> {
74 unsafe fn new(capacity: usize) -> TypedArenaChunk<T> {
76 storage: RawVec::with_capacity(capacity),
80 /// Destroys this arena chunk.
82 unsafe fn destroy(&mut self, len: usize) {
83 // The branch on needs_drop() is an -O1 performance optimization.
84 // Without the branch, dropping TypedArena<u8> takes linear time.
85 if mem::needs_drop::<T>() {
86 let mut start = self.start();
87 // Destroy all allocated objects.
89 ptr::drop_in_place(start);
90 start = start.offset(1);
95 // Returns a pointer to the first allocated object.
97 fn start(&self) -> *mut T {
101 // Returns a pointer to the end of the allocated space.
103 fn end(&self) -> *mut T {
105 if mem::size_of::<T>() == 0 {
106 // A pointer as large as possible for zero-sized elements.
109 self.start().offset(self.storage.cap() as isize)
115 const PAGE: usize = 4096;
117 impl<T> TypedArena<T> {
118 /// Creates a new `TypedArena`.
120 pub fn new() -> TypedArena<T> {
122 // We set both `ptr` and `end` to 0 so that the first call to
123 // alloc() will trigger a grow().
124 ptr: Cell::new(0 as *mut T),
125 end: Cell::new(0 as *mut T),
126 chunks: RefCell::new(vec![]),
131 /// Allocates an object in the `TypedArena`, returning a reference to it.
133 pub fn alloc(&self, object: T) -> &mut T {
134 if self.ptr == self.end {
139 if mem::size_of::<T>() == 0 {
141 .set(intrinsics::arith_offset(self.ptr.get() as *mut u8, 1)
143 let ptr = mem::align_of::<T>() as *mut T;
144 // Don't drop the object. This `write` is equivalent to `forget`.
145 ptr::write(ptr, object);
148 let ptr = self.ptr.get();
149 // Advance the pointer.
150 self.ptr.set(self.ptr.get().offset(1));
151 // Write into uninitialized memory.
152 ptr::write(ptr, object);
158 /// Allocates a slice of objects that are copied into the `TypedArena`, returning a mutable
159 /// reference to it. Will panic if passed a zero-sized types.
163 /// - Zero-sized types
164 /// - Zero-length slices
166 pub fn alloc_slice(&self, slice: &[T]) -> &mut [T]
170 assert!(mem::size_of::<T>() != 0);
171 assert!(slice.len() != 0);
173 let available_capacity_bytes = self.end.get() as usize - self.ptr.get() as usize;
174 let at_least_bytes = slice.len() * mem::size_of::<T>();
175 if available_capacity_bytes < at_least_bytes {
176 self.grow(slice.len());
180 let start_ptr = self.ptr.get();
181 let arena_slice = slice::from_raw_parts_mut(start_ptr, slice.len());
182 self.ptr.set(start_ptr.offset(arena_slice.len() as isize));
183 arena_slice.copy_from_slice(slice);
191 fn grow(&self, n: usize) {
193 let mut chunks = self.chunks.borrow_mut();
194 let (chunk, mut new_capacity);
195 if let Some(last_chunk) = chunks.last_mut() {
196 let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
197 let currently_used_cap = used_bytes / mem::size_of::<T>();
198 if last_chunk.storage.reserve_in_place(currently_used_cap, n) {
199 self.end.set(last_chunk.end());
202 new_capacity = last_chunk.storage.cap();
204 new_capacity = new_capacity.checked_mul(2).unwrap();
205 if new_capacity >= currently_used_cap + n {
211 let elem_size = cmp::max(1, mem::size_of::<T>());
212 new_capacity = cmp::max(n, PAGE / elem_size);
214 chunk = TypedArenaChunk::<T>::new(new_capacity);
215 self.ptr.set(chunk.start());
216 self.end.set(chunk.end());
221 /// Clears the arena. Deallocates all but the longest chunk which may be reused.
222 pub fn clear(&mut self) {
224 // Clear the last chunk, which is partially filled.
225 let mut chunks_borrow = self.chunks.borrow_mut();
226 if let Some(mut last_chunk) = chunks_borrow.pop() {
227 self.clear_last_chunk(&mut last_chunk);
228 // If `T` is ZST, code below has no effect.
229 for mut chunk in chunks_borrow.drain(..) {
230 let cap = chunk.storage.cap();
233 chunks_borrow.push(last_chunk);
238 // Drops the contents of the last chunk. The last chunk is partially empty, unlike all other
240 fn clear_last_chunk(&self, last_chunk: &mut TypedArenaChunk<T>) {
241 // Determine how much was filled.
242 let start = last_chunk.start() as usize;
243 // We obtain the value of the pointer to the first uninitialized element.
244 let end = self.ptr.get() as usize;
245 // We then calculate the number of elements to be dropped in the last chunk,
246 // which is the filled area's length.
247 let diff = if mem::size_of::<T>() == 0 {
248 // `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get
249 // the number of zero-sized values in the last and only chunk, just out of caution.
250 // Recall that `end` was incremented for each allocated value.
253 (end - start) / mem::size_of::<T>()
255 // Pass that to the `destroy` method.
257 last_chunk.destroy(diff);
260 self.ptr.set(last_chunk.start());
264 unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
267 // Determine how much was filled.
268 let mut chunks_borrow = self.chunks.borrow_mut();
269 if let Some(mut last_chunk) = chunks_borrow.pop() {
270 // Drop the contents of the last chunk.
271 self.clear_last_chunk(&mut last_chunk);
272 // The last chunk will be dropped. Destroy all other chunks.
273 for chunk in chunks_borrow.iter_mut() {
274 let cap = chunk.storage.cap();
278 // RawVec handles deallocation of `last_chunk` and `self.chunks`.
283 unsafe impl<T: Send> Send for TypedArena<T> {}
285 pub struct DroplessArena {
286 /// A pointer to the next object to be allocated.
289 /// A pointer to the end of the allocated area. When this pointer is
290 /// reached, a new chunk is allocated.
293 /// A vector of arena chunks.
294 chunks: RefCell<Vec<TypedArenaChunk<u8>>>,
297 unsafe impl Send for DroplessArena {}
300 pub fn new() -> DroplessArena {
302 ptr: Cell::new(0 as *mut u8),
303 end: Cell::new(0 as *mut u8),
304 chunks: RefCell::new(vec![]),
308 pub fn in_arena<T: ?Sized>(&self, ptr: *const T) -> bool {
309 let ptr = ptr as *const u8 as *mut u8;
310 for chunk in &*self.chunks.borrow() {
311 if chunk.start() <= ptr && ptr < chunk.end() {
319 fn align(&self, align: usize) {
320 let final_address = ((self.ptr.get() as usize) + align - 1) & !(align - 1);
321 self.ptr.set(final_address as *mut u8);
322 assert!(self.ptr <= self.end);
327 fn grow(&self, needed_bytes: usize) {
329 let mut chunks = self.chunks.borrow_mut();
330 let (chunk, mut new_capacity);
331 if let Some(last_chunk) = chunks.last_mut() {
332 let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
335 .reserve_in_place(used_bytes, needed_bytes)
337 self.end.set(last_chunk.end());
340 new_capacity = last_chunk.storage.cap();
342 new_capacity = new_capacity.checked_mul(2).unwrap();
343 if new_capacity >= used_bytes + needed_bytes {
349 new_capacity = cmp::max(needed_bytes, PAGE);
351 chunk = TypedArenaChunk::<u8>::new(new_capacity);
352 self.ptr.set(chunk.start());
353 self.end.set(chunk.end());
359 pub fn alloc_raw(&self, bytes: usize, align: usize) -> &mut [u8] {
365 let future_end = intrinsics::arith_offset(self.ptr.get(), bytes as isize);
366 if (future_end as *mut u8) >= self.end.get() {
370 let ptr = self.ptr.get();
371 // Set the pointer past ourselves
373 intrinsics::arith_offset(self.ptr.get(), bytes as isize) as *mut u8,
375 slice::from_raw_parts_mut(ptr, bytes)
380 pub fn alloc<T>(&self, object: T) -> &mut T {
381 assert!(!mem::needs_drop::<T>());
383 let mem = self.alloc_raw(
385 mem::align_of::<T>()) as *mut _ as *mut T;
388 // Write into uninitialized memory.
389 ptr::write(mem, object);
394 /// Allocates a slice of objects that are copied into the `DroplessArena`, returning a mutable
395 /// reference to it. Will panic if passed a zero-sized type.
399 /// - Zero-sized types
400 /// - Zero-length slices
402 pub fn alloc_slice<T>(&self, slice: &[T]) -> &mut [T]
406 assert!(!mem::needs_drop::<T>());
407 assert!(mem::size_of::<T>() != 0);
408 assert!(slice.len() != 0);
410 let mem = self.alloc_raw(
411 slice.len() * mem::size_of::<T>(),
412 mem::align_of::<T>()) as *mut _ as *mut T;
415 let arena_slice = slice::from_raw_parts_mut(mem, slice.len());
416 arena_slice.copy_from_slice(slice);
422 pub struct SyncTypedArena<T> {
423 lock: MTLock<TypedArena<T>>,
426 impl<T> SyncTypedArena<T> {
428 pub fn new() -> SyncTypedArena<T> {
430 lock: MTLock::new(TypedArena::new())
435 pub fn alloc(&self, object: T) -> &mut T {
436 // Extend the lifetime of the result since it's limited to the lock guard
437 unsafe { &mut *(self.lock.lock().alloc(object) as *mut T) }
441 pub fn alloc_slice(&self, slice: &[T]) -> &mut [T]
445 // Extend the lifetime of the result since it's limited to the lock guard
446 unsafe { &mut *(self.lock.lock().alloc_slice(slice) as *mut [T]) }
450 pub fn clear(&mut self) {
451 self.lock.get_mut().clear();
455 pub struct SyncDroplessArena {
456 lock: MTLock<DroplessArena>,
459 impl SyncDroplessArena {
461 pub fn new() -> SyncDroplessArena {
463 lock: MTLock::new(DroplessArena::new())
468 pub fn in_arena<T: ?Sized>(&self, ptr: *const T) -> bool {
469 self.lock.lock().in_arena(ptr)
473 pub fn alloc_raw(&self, bytes: usize, align: usize) -> &mut [u8] {
474 // Extend the lifetime of the result since it's limited to the lock guard
475 unsafe { &mut *(self.lock.lock().alloc_raw(bytes, align) as *mut [u8]) }
479 pub fn alloc<T>(&self, object: T) -> &mut T {
480 // Extend the lifetime of the result since it's limited to the lock guard
481 unsafe { &mut *(self.lock.lock().alloc(object) as *mut T) }
485 pub fn alloc_slice<T>(&self, slice: &[T]) -> &mut [T]
489 // Extend the lifetime of the result since it's limited to the lock guard
490 unsafe { &mut *(self.lock.lock().alloc_slice(slice) as *mut [T]) }
497 use self::test::Bencher;
498 use super::TypedArena;
502 #[derive(Debug, Eq, PartialEq)]
510 pub fn test_unused() {
511 let arena: TypedArena<Point> = TypedArena::new();
512 assert!(arena.chunks.borrow().is_empty());
516 fn test_arena_alloc_nested() {
528 struct Wrap<'a>(TypedArena<EI<'a>>);
531 fn alloc_inner<F: Fn() -> Inner>(&self, f: F) -> &Inner {
532 let r: &EI = self.0.alloc(EI::I(f()));
533 if let &EI::I(ref i) = r {
539 fn alloc_outer<F: Fn() -> Outer<'a>>(&self, f: F) -> &Outer {
540 let r: &EI = self.0.alloc(EI::O(f()));
541 if let &EI::O(ref o) = r {
549 let arena = Wrap(TypedArena::new());
551 let result = arena.alloc_outer(|| Outer {
552 inner: arena.alloc_inner(|| Inner { value: 10 }),
555 assert_eq!(result.inner.value, 10);
560 let arena = TypedArena::new();
562 arena.alloc(Point { x: 1, y: 2, z: 3 });
567 pub fn bench_copy(b: &mut Bencher) {
568 let arena = TypedArena::new();
569 b.iter(|| arena.alloc(Point { x: 1, y: 2, z: 3 }))
573 pub fn bench_copy_nonarena(b: &mut Bencher) {
575 let _: Box<_> = Box::new(Point { x: 1, y: 2, z: 3 });
586 pub fn test_noncopy() {
587 let arena = TypedArena::new();
589 arena.alloc(Noncopy {
590 string: "hello world".to_string(),
591 array: vec![1, 2, 3, 4, 5],
597 pub fn test_typed_arena_zero_sized() {
598 let arena = TypedArena::new();
605 pub fn test_typed_arena_clear() {
606 let mut arena = TypedArena::new();
610 arena.alloc(Point { x: 1, y: 2, z: 3 });
617 struct DropCounter<'a> {
618 count: &'a Cell<u32>,
621 impl<'a> Drop for DropCounter<'a> {
623 self.count.set(self.count.get() + 1);
628 fn test_typed_arena_drop_count() {
629 let counter = Cell::new(0);
631 let arena: TypedArena<DropCounter> = TypedArena::new();
633 // Allocate something with drop glue to make sure it doesn't leak.
634 arena.alloc(DropCounter { count: &counter });
637 assert_eq!(counter.get(), 100);
641 fn test_typed_arena_drop_on_clear() {
642 let counter = Cell::new(0);
643 let mut arena: TypedArena<DropCounter> = TypedArena::new();
646 // Allocate something with drop glue to make sure it doesn't leak.
647 arena.alloc(DropCounter { count: &counter });
650 assert_eq!(counter.get(), i * 100 + 100);
655 static DROP_COUNTER: Cell<u32> = Cell::new(0)
658 struct SmallDroppable;
660 impl Drop for SmallDroppable {
662 DROP_COUNTER.with(|c| c.set(c.get() + 1));
667 fn test_typed_arena_drop_small_count() {
668 DROP_COUNTER.with(|c| c.set(0));
670 let arena: TypedArena<SmallDroppable> = TypedArena::new();
672 // Allocate something with drop glue to make sure it doesn't leak.
673 arena.alloc(SmallDroppable);
677 assert_eq!(DROP_COUNTER.with(|c| c.get()), 100);
681 pub fn bench_noncopy(b: &mut Bencher) {
682 let arena = TypedArena::new();
684 arena.alloc(Noncopy {
685 string: "hello world".to_string(),
686 array: vec![1, 2, 3, 4, 5],
692 pub fn bench_noncopy_nonarena(b: &mut Bencher) {
694 let _: Box<_> = Box::new(Noncopy {
695 string: "hello world".to_string(),
696 array: vec![1, 2, 3, 4, 5],