1 // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! The arena, a fast but limited type of allocator.
13 //! Arenas are a type of allocator that destroy the objects within, all at
14 //! once, once the arena itself is destroyed. They do not support deallocation
15 //! of individual objects while the arena itself is still alive. The benefit
16 //! of an arena is very fast allocation; just a pointer bump.
18 //! This crate implements `TypedArena`, a simple arena that can only hold
19 //! objects of a single type.
21 #![crate_name = "arena"]
22 #![cfg_attr(stage0, unstable(feature = "rustc_private", issue = "27812"))]
23 #![crate_type = "rlib"]
24 #![crate_type = "dylib"]
25 #![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
26 html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
27 html_root_url = "https://doc.rust-lang.org/nightly/",
28 test(no_crate_inject, attr(deny(warnings))))]
32 #![feature(core_intrinsics)]
33 #![feature(dropck_eyepatch)]
34 #![feature(generic_param_attrs)]
35 #![cfg_attr(stage0, feature(staged_api))]
36 #![cfg_attr(test, feature(test))]
42 use std::cell::{Cell, RefCell};
45 use std::marker::{PhantomData, Send};
50 use alloc::raw_vec::RawVec;
52 /// An arena that can hold objects of only one type.
53 pub struct TypedArena<T> {
54 /// A pointer to the next object to be allocated.
57 /// A pointer to the end of the allocated area. When this pointer is
58 /// reached, a new chunk is allocated.
61 /// A vector of arena chunks.
62 chunks: RefCell<Vec<TypedArenaChunk<T>>>,
64 /// Marker indicating that dropping the arena causes its owned
65 /// instances of `T` to be dropped.
69 struct TypedArenaChunk<T> {
70 /// The raw storage for the arena chunk.
74 impl<T> TypedArenaChunk<T> {
76 unsafe fn new(capacity: usize) -> TypedArenaChunk<T> {
77 TypedArenaChunk { storage: RawVec::with_capacity(capacity) }
80 /// Destroys this arena chunk.
82 unsafe fn destroy(&mut self, len: usize) {
83 // The branch on needs_drop() is an -O1 performance optimization.
84 // Without the branch, dropping TypedArena<u8> takes linear time.
85 if intrinsics::needs_drop::<T>() {
86 let mut start = self.start();
87 // Destroy all allocated objects.
89 ptr::drop_in_place(start);
90 start = start.offset(1);
95 // Returns a pointer to the first allocated object.
97 fn start(&self) -> *mut T {
101 // Returns a pointer to the end of the allocated space.
103 fn end(&self) -> *mut T {
105 if mem::size_of::<T>() == 0 {
106 // A pointer as large as possible for zero-sized elements.
109 self.start().offset(self.storage.cap() as isize)
115 const PAGE: usize = 4096;
117 impl<T> TypedArena<T> {
118 /// Creates a new `TypedArena`.
120 pub fn new() -> TypedArena<T> {
122 // We set both `ptr` and `end` to 0 so that the first call to
123 // alloc() will trigger a grow().
124 ptr: Cell::new(0 as *mut T),
125 end: Cell::new(0 as *mut T),
126 chunks: RefCell::new(vec![]),
131 /// Allocates an object in the `TypedArena`, returning a reference to it.
133 pub fn alloc(&self, object: T) -> &mut T {
134 if self.ptr == self.end {
139 if mem::size_of::<T>() == 0 {
140 self.ptr.set(intrinsics::arith_offset(self.ptr.get() as *mut u8, 1) as *mut T);
141 let ptr = mem::align_of::<T>() as *mut T;
142 // Don't drop the object. This `write` is equivalent to `forget`.
143 ptr::write(ptr, object);
146 let ptr = self.ptr.get();
147 // Advance the pointer.
148 self.ptr.set(self.ptr.get().offset(1));
149 // Write into uninitialized memory.
150 ptr::write(ptr, object);
156 /// Allocates a slice of objects that are copy into the `TypedArena`, returning a mutable
157 /// reference to it. Will panic if passed a zero-sized types.
160 /// - Zero-sized types
161 /// - Zero-length slices
163 pub fn alloc_slice(&self, slice: &[T]) -> &mut [T]
165 assert!(mem::size_of::<T>() != 0);
166 assert!(slice.len() != 0);
168 let available_capacity_bytes = self.end.get() as usize - self.ptr.get() as usize;
169 let at_least_bytes = slice.len() * mem::size_of::<T>();
170 if available_capacity_bytes < at_least_bytes {
171 self.grow(slice.len());
175 let start_ptr = self.ptr.get();
176 let arena_slice = slice::from_raw_parts_mut(start_ptr, slice.len());
177 self.ptr.set(start_ptr.offset(arena_slice.len() as isize));
178 arena_slice.copy_from_slice(slice);
186 fn grow(&self, n: usize) {
188 let mut chunks = self.chunks.borrow_mut();
189 let (chunk, mut new_capacity);
190 if let Some(last_chunk) = chunks.last_mut() {
191 let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
192 let currently_used_cap = used_bytes / mem::size_of::<T>();
193 if last_chunk.storage.reserve_in_place(currently_used_cap, n) {
194 self.end.set(last_chunk.end());
197 new_capacity = last_chunk.storage.cap();
199 new_capacity = new_capacity.checked_mul(2).unwrap();
200 if new_capacity >= currently_used_cap + n {
206 let elem_size = cmp::max(1, mem::size_of::<T>());
207 new_capacity = cmp::max(n, PAGE / elem_size);
209 chunk = TypedArenaChunk::<T>::new(new_capacity);
210 self.ptr.set(chunk.start());
211 self.end.set(chunk.end());
216 /// Clears the arena. Deallocates all but the longest chunk which may be reused.
217 pub fn clear(&mut self) {
219 // Clear the last chunk, which is partially filled.
220 let mut chunks_borrow = self.chunks.borrow_mut();
221 if let Some(mut last_chunk) = chunks_borrow.pop() {
222 self.clear_last_chunk(&mut last_chunk);
223 // If `T` is ZST, code below has no effect.
224 for mut chunk in chunks_borrow.drain(..) {
225 let cap = chunk.storage.cap();
228 chunks_borrow.push(last_chunk);
233 // Drops the contents of the last chunk. The last chunk is partially empty, unlike all other
235 fn clear_last_chunk(&self, last_chunk: &mut TypedArenaChunk<T>) {
236 // Determine how much was filled.
237 let start = last_chunk.start() as usize;
238 // We obtain the value of the pointer to the first uninitialized element.
239 let end = self.ptr.get() as usize;
240 // We then calculate the number of elements to be dropped in the last chunk,
241 // which is the filled area's length.
242 let diff = if mem::size_of::<T>() == 0 {
243 // `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get
244 // the number of zero-sized values in the last and only chunk, just out of caution.
245 // Recall that `end` was incremented for each allocated value.
248 (end - start) / mem::size_of::<T>()
250 // Pass that to the `destroy` method.
252 last_chunk.destroy(diff);
255 self.ptr.set(last_chunk.start());
259 unsafe impl<#[may_dangle] T> Drop for TypedArena<T> {
262 // Determine how much was filled.
263 let mut chunks_borrow = self.chunks.borrow_mut();
264 if let Some(mut last_chunk) = chunks_borrow.pop() {
265 // Drop the contents of the last chunk.
266 self.clear_last_chunk(&mut last_chunk);
267 // The last chunk will be dropped. Destroy all other chunks.
268 for chunk in chunks_borrow.iter_mut() {
269 let cap = chunk.storage.cap();
273 // RawVec handles deallocation of `last_chunk` and `self.chunks`.
278 unsafe impl<T: Send> Send for TypedArena<T> {}
280 pub struct DroplessArena {
281 /// A pointer to the next object to be allocated.
284 /// A pointer to the end of the allocated area. When this pointer is
285 /// reached, a new chunk is allocated.
288 /// A vector of arena chunks.
289 chunks: RefCell<Vec<TypedArenaChunk<u8>>>,
293 pub fn new() -> DroplessArena {
295 ptr: Cell::new(0 as *mut u8),
296 end: Cell::new(0 as *mut u8),
297 chunks: RefCell::new(vec![]),
301 pub fn in_arena<T: ?Sized>(&self, ptr: *const T) -> bool {
302 let ptr = ptr as *const u8 as *mut u8;
303 for chunk in &*self.chunks.borrow() {
304 if chunk.start() <= ptr && ptr < chunk.end() {
312 fn align_for<T>(&self) {
313 let align = mem::align_of::<T>();
314 let final_address = ((self.ptr.get() as usize) + align - 1) & !(align - 1);
315 self.ptr.set(final_address as *mut u8);
316 assert!(self.ptr <= self.end);
321 fn grow<T>(&self, n: usize) {
322 let needed_bytes = n * mem::size_of::<T>();
324 let mut chunks = self.chunks.borrow_mut();
325 let (chunk, mut new_capacity);
326 if let Some(last_chunk) = chunks.last_mut() {
327 let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
328 if last_chunk.storage.reserve_in_place(used_bytes, needed_bytes) {
329 self.end.set(last_chunk.end());
332 new_capacity = last_chunk.storage.cap();
334 new_capacity = new_capacity.checked_mul(2).unwrap();
335 if new_capacity >= used_bytes + needed_bytes {
341 new_capacity = cmp::max(needed_bytes, PAGE);
343 chunk = TypedArenaChunk::<u8>::new(new_capacity);
344 self.ptr.set(chunk.start());
345 self.end.set(chunk.end());
351 pub fn alloc<T>(&self, object: T) -> &mut T {
353 assert!(!intrinsics::needs_drop::<T>());
354 assert!(mem::size_of::<T>() != 0);
356 self.align_for::<T>();
357 let future_end = intrinsics::arith_offset(self.ptr.get(), mem::size_of::<T>() as isize);
358 if (future_end as *mut u8) >= self.end.get() {
362 let ptr = self.ptr.get();
363 // Set the pointer past ourselves
364 self.ptr.set(intrinsics::arith_offset(
365 self.ptr.get(), mem::size_of::<T>() as isize
367 // Write into uninitialized memory.
368 ptr::write(ptr as *mut T, object);
369 &mut *(ptr as *mut T)
373 /// Allocates a slice of objects that are copied into the `DroplessArena`, returning a mutable
374 /// reference to it. Will panic if passed a zero-sized type.
377 /// - Zero-sized types
378 /// - Zero-length slices
380 pub fn alloc_slice<T>(&self, slice: &[T]) -> &mut [T]
383 assert!(!intrinsics::needs_drop::<T>());
385 assert!(mem::size_of::<T>() != 0);
386 assert!(slice.len() != 0);
387 self.align_for::<T>();
389 let future_end = unsafe {
390 intrinsics::arith_offset(self.ptr.get(), (slice.len() * mem::size_of::<T>()) as isize)
392 if (future_end as *mut u8) >= self.end.get() {
393 self.grow::<T>(slice.len());
397 let arena_slice = slice::from_raw_parts_mut(self.ptr.get() as *mut T, slice.len());
398 self.ptr.set(intrinsics::arith_offset(
399 self.ptr.get(), (slice.len() * mem::size_of::<T>()) as isize
401 arena_slice.copy_from_slice(slice);
410 use self::test::Bencher;
411 use super::TypedArena;
415 #[derive(Debug, Eq, PartialEq)]
423 pub fn test_unused() {
424 let arena: TypedArena<Point> = TypedArena::new();
425 assert!(arena.chunks.borrow().is_empty());
429 fn test_arena_alloc_nested() {
441 struct Wrap<'a>(TypedArena<EI<'a>>);
444 fn alloc_inner<F: Fn() -> Inner>(&self, f: F) -> &Inner {
445 let r: &EI = self.0.alloc(EI::I(f()));
446 if let &EI::I(ref i) = r {
452 fn alloc_outer<F: Fn() -> Outer<'a>>(&self, f: F) -> &Outer {
453 let r: &EI = self.0.alloc(EI::O(f()));
454 if let &EI::O(ref o) = r {
462 let arena = Wrap(TypedArena::new());
465 arena.alloc_outer(|| Outer { inner: arena.alloc_inner(|| Inner { value: 10 }) });
467 assert_eq!(result.inner.value, 10);
472 let arena = TypedArena::new();
474 arena.alloc(Point { x: 1, y: 2, z: 3 });
479 pub fn bench_copy(b: &mut Bencher) {
480 let arena = TypedArena::new();
481 b.iter(|| arena.alloc(Point { x: 1, y: 2, z: 3 }))
485 pub fn bench_copy_nonarena(b: &mut Bencher) {
487 let _: Box<_> = Box::new(Point { x: 1, y: 2, z: 3 });
498 pub fn test_noncopy() {
499 let arena = TypedArena::new();
501 arena.alloc(Noncopy {
502 string: "hello world".to_string(),
503 array: vec![1, 2, 3, 4, 5],
509 pub fn test_typed_arena_zero_sized() {
510 let arena = TypedArena::new();
517 pub fn test_typed_arena_clear() {
518 let mut arena = TypedArena::new();
522 arena.alloc(Point { x: 1, y: 2, z: 3 });
529 struct DropCounter<'a> {
530 count: &'a Cell<u32>,
533 impl<'a> Drop for DropCounter<'a> {
535 self.count.set(self.count.get() + 1);
540 fn test_typed_arena_drop_count() {
541 let counter = Cell::new(0);
543 let arena: TypedArena<DropCounter> = TypedArena::new();
545 // Allocate something with drop glue to make sure it doesn't leak.
546 arena.alloc(DropCounter { count: &counter });
549 assert_eq!(counter.get(), 100);
553 fn test_typed_arena_drop_on_clear() {
554 let counter = Cell::new(0);
555 let mut arena: TypedArena<DropCounter> = TypedArena::new();
558 // Allocate something with drop glue to make sure it doesn't leak.
559 arena.alloc(DropCounter { count: &counter });
562 assert_eq!(counter.get(), i * 100 + 100);
567 static DROP_COUNTER: Cell<u32> = Cell::new(0)
570 struct SmallDroppable;
572 impl Drop for SmallDroppable {
574 DROP_COUNTER.with(|c| c.set(c.get() + 1));
579 fn test_typed_arena_drop_small_count() {
580 DROP_COUNTER.with(|c| c.set(0));
582 let arena: TypedArena<SmallDroppable> = TypedArena::new();
584 // Allocate something with drop glue to make sure it doesn't leak.
585 arena.alloc(SmallDroppable);
589 assert_eq!(DROP_COUNTER.with(|c| c.get()), 100);
593 pub fn bench_noncopy(b: &mut Bencher) {
594 let arena = TypedArena::new();
596 arena.alloc(Noncopy {
597 string: "hello world".to_string(),
598 array: vec![1, 2, 3, 4, 5],
604 pub fn bench_noncopy_nonarena(b: &mut Bencher) {
606 let _: Box<_> = Box::new(Noncopy {
607 string: "hello world".to_string(),
608 array: vec![1, 2, 3, 4, 5],