1 // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! The local, garbage collected heap
17 use option::{Option, None, Some};
24 use slice::{ImmutableVector, Vector};
27 // This has no meaning with out rtdebug also turned on.
29 static TRACK_ALLOCATIONS: int = 0;
31 static MAGIC: u32 = 0xbadc0ffe;
33 pub type Box = raw::Box<()>;
35 pub struct MemoryRegion {
36 priv allocations: Vec<*AllocHeader>,
37 priv live_allocations: uint,
40 pub struct LocalHeap {
41 priv memory_region: MemoryRegion,
43 priv live_allocs: *mut raw::Box<()>,
48 pub fn new() -> LocalHeap {
49 let region = MemoryRegion {
50 allocations: Vec::new(),
54 memory_region: region,
55 live_allocs: ptr::mut_null(),
60 pub fn alloc(&mut self, drop_glue: fn(*mut u8), size: uint, align: uint) -> *mut Box {
61 let total_size = global_heap::get_box_size(size, align);
62 let alloc = self.memory_region.malloc(total_size);
64 // Make sure that we can't use `mybox` outside of this scope
65 let mybox: &mut Box = unsafe { cast::transmute(alloc) };
66 // Clear out this box, and move it to the front of the live
68 mybox.drop_glue = drop_glue;
70 mybox.prev = ptr::mut_null();
71 mybox.next = self.live_allocs;
72 if !self.live_allocs.is_null() {
73 unsafe { (*self.live_allocs).prev = alloc; }
75 self.live_allocs = alloc;
81 pub fn realloc(&mut self, ptr: *mut Box, size: uint) -> *mut Box {
82 // Make sure that we can't use `mybox` outside of this scope
83 let total_size = size + mem::size_of::<Box>();
84 let new_box = self.memory_region.realloc(ptr, total_size);
86 // Fix links because we could have moved around
87 let mybox: &mut Box = unsafe { cast::transmute(new_box) };
88 if !mybox.prev.is_null() {
89 unsafe { (*mybox.prev).next = new_box; }
91 if !mybox.next.is_null() {
92 unsafe { (*mybox.next).prev = new_box; }
95 if self.live_allocs == ptr {
96 self.live_allocs = new_box;
102 pub fn free(&mut self, alloc: *mut Box) {
104 // Make sure that we can't use `mybox` outside of this scope
105 let mybox: &mut Box = unsafe { cast::transmute(alloc) };
107 // Unlink it from the linked list
108 if !mybox.prev.is_null() {
109 unsafe { (*mybox.prev).next = mybox.next; }
111 if !mybox.next.is_null() {
112 unsafe { (*mybox.next).prev = mybox.prev; }
114 if self.live_allocs == alloc {
115 self.live_allocs = mybox.next;
119 self.memory_region.free(alloc);
123 impl Drop for LocalHeap {
125 assert!(self.live_allocs.is_null());
140 fn init(&mut self, size: u32) {
141 if TRACK_ALLOCATIONS > 0 {
148 fn init(&mut self, _size: u32) {}
151 fn assert_sane(&self) {
152 if TRACK_ALLOCATIONS > 0 {
153 rtassert!(self.magic == MAGIC);
157 fn assert_sane(&self) {}
160 fn update_size(&mut self, size: u32) {
161 if TRACK_ALLOCATIONS > 0 {
166 fn update_size(&mut self, _size: u32) {}
168 fn as_box(&mut self) -> *mut Box {
169 let myaddr: uint = unsafe { cast::transmute(self) };
170 (myaddr + AllocHeader::size()) as *mut Box
174 // For some platforms, 16 byte alignment is required.
176 let header_size = mem::size_of::<AllocHeader>();
177 return (header_size + ptr_size - 1) / ptr_size * ptr_size;
180 fn from(a_box: *mut Box) -> *mut AllocHeader {
181 (a_box as uint - AllocHeader::size()) as *mut AllocHeader
187 fn malloc(&mut self, size: uint) -> *mut Box {
188 let total_size = size + AllocHeader::size();
189 let alloc: *AllocHeader = unsafe {
190 global_heap::malloc_raw(total_size) as *AllocHeader
193 let alloc: &mut AllocHeader = unsafe { cast::transmute(alloc) };
194 alloc.init(size as u32);
196 self.live_allocations += 1;
198 return alloc.as_box();
202 fn realloc(&mut self, alloc: *mut Box, size: uint) -> *mut Box {
203 rtassert!(!alloc.is_null());
204 let orig_alloc = AllocHeader::from(alloc);
205 unsafe { (*orig_alloc).assert_sane(); }
207 let total_size = size + AllocHeader::size();
208 let alloc: *AllocHeader = unsafe {
209 global_heap::realloc_raw(orig_alloc as *mut u8,
210 total_size) as *AllocHeader
213 let alloc: &mut AllocHeader = unsafe { cast::transmute(alloc) };
215 alloc.update_size(size as u32);
216 self.update(alloc, orig_alloc as *AllocHeader);
217 return alloc.as_box();
221 fn free(&mut self, alloc: *mut Box) {
222 rtassert!(!alloc.is_null());
223 let alloc = AllocHeader::from(alloc);
225 (*alloc).assert_sane();
226 self.release(cast::transmute(alloc));
227 rtassert!(self.live_allocations > 0);
228 self.live_allocations -= 1;
229 global_heap::exchange_free(alloc as *u8)
234 fn claim(&mut self, alloc: &mut AllocHeader) {
236 if TRACK_ALLOCATIONS > 1 {
237 alloc.index = self.allocations.len() as i32;
238 self.allocations.push(&*alloc as *AllocHeader);
243 fn claim(&mut self, _alloc: &mut AllocHeader) {}
246 fn release(&mut self, alloc: &AllocHeader) {
248 if TRACK_ALLOCATIONS > 1 {
249 rtassert!(self.allocations.as_slice()[alloc.index] == alloc as *AllocHeader);
250 self.allocations.as_mut_slice()[alloc.index] = ptr::null();
255 fn release(&mut self, _alloc: &AllocHeader) {}
258 fn update(&mut self, alloc: &mut AllocHeader, orig: *AllocHeader) {
260 if TRACK_ALLOCATIONS > 1 {
261 rtassert!(self.allocations.as_slice()[alloc.index] == orig);
262 self.allocations.as_mut_slice()[alloc.index] = &*alloc as *AllocHeader;
267 fn update(&mut self, _alloc: &mut AllocHeader, _orig: *AllocHeader) {}
270 impl Drop for MemoryRegion {
272 if self.live_allocations != 0 {
273 rtabort!("leaked managed memory ({} objects)", self.live_allocations);
275 rtassert!(self.allocations.as_slice().iter().all(|s| s.is_null()));
283 pub unsafe fn local_malloc_(drop_glue: fn(*mut u8), size: uint, align: uint) -> *u8 {
284 local_malloc(drop_glue, size, align)
288 pub unsafe fn local_malloc(drop_glue: fn(*mut u8), size: uint, align: uint) -> *u8 {
289 // FIXME: Unsafe borrow for speed. Lame.
290 let task: Option<*mut Task> = Local::try_unsafe_borrow();
293 (*task).heap.alloc(drop_glue, size, align) as *u8
295 None => rtabort!("local malloc outside of task")
302 pub unsafe fn local_free_(ptr: *u8) {
306 // NB: Calls to free CANNOT be allowed to fail, as throwing an exception from
307 // inside a landing pad may corrupt the state of the exception handler. If a
308 // problem occurs, call exit instead.
310 pub unsafe fn local_free(ptr: *u8) {
311 // FIXME: Unsafe borrow for speed. Lame.
312 let task_ptr: Option<*mut Task> = Local::try_unsafe_borrow();
315 (*task).heap.free(ptr as *mut Box)
317 None => rtabort!("local free outside of task")
321 pub fn live_allocs() -> *mut Box {
322 let mut task = Local::borrow(None::<Task>);
323 task.get().heap.live_allocs
329 use self::test::BenchHarness;
332 fn alloc_managed_small(bh: &mut BenchHarness) {
333 bh.iter(|| { @10; });
337 fn alloc_managed_big(bh: &mut BenchHarness) {
338 bh.iter(|| { @([10, ..1000]); });