1 // Copyright 2013 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 //! The local, garbage collected heap
17 use libc::{c_void, free};
25 static RC_IMMORTAL : uint = 0x77777777;
27 pub type Box = raw::Box<()>;
29 pub struct MemoryRegion {
30 live_allocations: uint,
33 pub struct LocalHeap {
34 memory_region: MemoryRegion,
35 live_allocs: *mut raw::Box<()>,
39 pub fn new() -> LocalHeap {
41 memory_region: MemoryRegion { live_allocations: 0 },
42 live_allocs: ptr::mut_null(),
48 pub fn alloc(&mut self,
49 drop_glue: fn(*mut u8),
51 align: uint) -> *mut Box {
52 let total_size = util::get_box_size(size, align);
53 let alloc = self.memory_region.malloc(total_size);
55 // Make sure that we can't use `mybox` outside of this scope
56 let mybox: &mut Box = unsafe { mem::transmute(alloc) };
57 // Clear out this box, and move it to the front of the live
59 mybox.drop_glue = drop_glue;
61 mybox.prev = ptr::mut_null();
62 mybox.next = self.live_allocs;
63 if !self.live_allocs.is_null() {
64 unsafe { (*self.live_allocs).prev = alloc; }
66 self.live_allocs = alloc;
72 pub fn realloc(&mut self, ptr: *mut Box, size: uint) -> *mut Box {
73 // Make sure that we can't use `mybox` outside of this scope
74 let total_size = size + mem::size_of::<Box>();
75 let new_box = self.memory_region.realloc(ptr, total_size);
77 // Fix links because we could have moved around
78 let mybox: &mut Box = unsafe { mem::transmute(new_box) };
79 if !mybox.prev.is_null() {
80 unsafe { (*mybox.prev).next = new_box; }
82 if !mybox.next.is_null() {
83 unsafe { (*mybox.next).prev = new_box; }
86 if self.live_allocs == ptr {
87 self.live_allocs = new_box;
93 pub fn free(&mut self, alloc: *mut Box) {
95 // Make sure that we can't use `mybox` outside of this scope
96 let mybox: &mut Box = unsafe { mem::transmute(alloc) };
98 // Unlink it from the linked list
99 if !mybox.prev.is_null() {
100 unsafe { (*mybox.prev).next = mybox.next; }
102 if !mybox.next.is_null() {
103 unsafe { (*mybox.next).prev = mybox.prev; }
105 if self.live_allocs == alloc {
106 self.live_allocs = mybox.next;
110 self.memory_region.free(alloc);
113 pub unsafe fn annihilate(&mut self) {
114 let mut n_total_boxes = 0u;
116 // Pass 1: Make all boxes immortal.
118 // In this pass, nothing gets freed, so it does not matter whether
119 // we read the next field before or after the callback.
120 self.each_live_alloc(true, |_, alloc| {
122 (*alloc).ref_count = RC_IMMORTAL;
125 // Pass 2: Drop all boxes.
127 // In this pass, unique-managed boxes may get freed, but not
128 // managed boxes, so we must read the `next` field *after* the
129 // callback, as the original value may have been freed.
130 self.each_live_alloc(false, |_, alloc| {
131 let drop_glue = (*alloc).drop_glue;
132 let data = &mut (*alloc).data as *mut ();
133 drop_glue(data as *mut u8);
136 // Pass 3: Free all boxes.
138 // In this pass, managed boxes may get freed (but not
139 // unique-managed boxes, though I think that none of those are
140 // left), so we must read the `next` field before, since it will
141 // not be valid after.
142 self.each_live_alloc(true, |me, alloc| {
147 // We do logging here w/o allocation.
148 rterrln!("total boxes annihilated: {}", n_total_boxes);
152 unsafe fn each_live_alloc(&mut self, read_next_before: bool,
153 f: |&mut LocalHeap, alloc: *mut raw::Box<()>|) {
154 //! Walks the internal list of allocations
156 let mut alloc = self.live_allocs;
157 while alloc != ptr::mut_null() {
158 let next_before = (*alloc).next;
162 if read_next_before {
165 alloc = (*alloc).next;
171 impl Drop for LocalHeap {
173 assert!(self.live_allocs.is_null());
180 fn init(&mut self, _size: u32) {}
181 fn assert_sane(&self) {}
182 fn update_size(&mut self, _size: u32) {}
184 fn as_box(&mut self) -> *mut Box {
185 let myaddr: uint = unsafe { mem::transmute(self) };
186 (myaddr + AllocHeader::size()) as *mut Box
190 // For some platforms, 16 byte alignment is required.
192 let header_size = mem::size_of::<AllocHeader>();
193 return (header_size + ptr_size - 1) / ptr_size * ptr_size;
196 fn from(a_box: *mut Box) -> *mut AllocHeader {
197 (a_box as uint - AllocHeader::size()) as *mut AllocHeader
202 fn debug_mem() -> bool {
203 // FIXME: Need to port the environment struct to newsched
208 fn debug_mem() -> bool {
214 fn malloc(&mut self, size: uint) -> *mut Box {
215 let total_size = size + AllocHeader::size();
216 let alloc: *mut AllocHeader = unsafe {
217 libc_heap::malloc_raw(total_size) as *mut AllocHeader
220 let alloc: &mut AllocHeader = unsafe { mem::transmute(alloc) };
221 alloc.init(size as u32);
223 self.live_allocations += 1;
225 return alloc.as_box();
229 fn realloc(&mut self, alloc: *mut Box, size: uint) -> *mut Box {
230 rtassert!(!alloc.is_null());
231 let orig_alloc = AllocHeader::from(alloc);
232 unsafe { (*orig_alloc).assert_sane(); }
234 let total_size = size + AllocHeader::size();
235 let alloc: *mut AllocHeader = unsafe {
236 libc_heap::realloc_raw(orig_alloc as *mut u8, total_size) as *mut AllocHeader
239 let alloc: &mut AllocHeader = unsafe { mem::transmute(alloc) };
241 alloc.update_size(size as u32);
242 self.update(alloc, orig_alloc as *mut AllocHeader);
243 return alloc.as_box();
247 fn free(&mut self, alloc: *mut Box) {
248 rtassert!(!alloc.is_null());
249 let alloc = AllocHeader::from(alloc);
251 (*alloc).assert_sane();
252 self.release(mem::transmute(alloc));
253 rtassert!(self.live_allocations > 0);
254 self.live_allocations -= 1;
255 free(alloc as *mut c_void)
260 fn claim(&mut self, _alloc: &mut AllocHeader) {}
262 fn release(&mut self, _alloc: &AllocHeader) {}
264 fn update(&mut self, _alloc: &mut AllocHeader, _orig: *mut AllocHeader) {}
267 impl Drop for MemoryRegion {
269 if self.live_allocations != 0 {
270 rtabort!("leaked managed memory ({} objects)", self.live_allocations);
278 pub unsafe fn local_malloc_(drop_glue: fn(*mut u8), size: uint,
279 align: uint) -> *mut u8 {
280 local_malloc(drop_glue, size, align)
284 pub unsafe fn local_malloc(drop_glue: fn(*mut u8), size: uint,
285 align: uint) -> *mut u8 {
286 // FIXME: Unsafe borrow for speed. Lame.
287 let task: Option<*mut Task> = Local::try_unsafe_borrow();
290 (*task).heap.alloc(drop_glue, size, align) as *mut u8
292 None => rtabort!("local malloc outside of task")
299 pub unsafe fn local_free_(ptr: *mut u8) {
303 // NB: Calls to free CANNOT be allowed to fail, as throwing an exception from
304 // inside a landing pad may corrupt the state of the exception handler. If a
305 // problem occurs, call exit instead.
307 pub unsafe fn local_free(ptr: *mut u8) {
308 // FIXME: Unsafe borrow for speed. Lame.
309 let task_ptr: Option<*mut Task> = Local::try_unsafe_borrow();
312 (*task).heap.free(ptr as *mut Box)
314 None => rtabort!("local free outside of task")
321 use self::test::Bencher;
325 fn alloc_managed_small(b: &mut Bencher) {
326 b.iter(|| { box(GC) 10 });
330 fn alloc_managed_big(b: &mut Bencher) {
331 b.iter(|| { box(GC) ([10, ..1000]) });