1 // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
15 use unstable::intrinsics::TyDesc;
18 type DropGlue<'a> = 'a |**TyDesc, *c_void|;
23 * This runs at task death to free all boxes.
26 struct AnnihilateStats {
32 unsafe fn each_live_alloc(read_next_before: bool,
33 f: |alloc: *mut raw::Box<()>, uniq: bool| -> bool)
35 //! Walks the internal list of allocations
40 let mut alloc = local_heap::live_allocs();
41 while alloc != ptr::mut_null() {
42 let next_before = (*alloc).next;
43 let uniq = (*alloc).ref_count == managed::RC_MANAGED_UNIQUE;
45 if !f(alloc as *mut raw::Box<()>, uniq) {
52 alloc = (*alloc).next;
59 fn debug_mem() -> bool {
60 // XXX: Need to port the environment struct to newsched
65 fn debug_mem() -> bool {
69 /// Destroys all managed memory (i.e. @ boxes) held by the current task.
70 pub unsafe fn annihilate() {
71 use rt::local_heap::local_free;
75 let mut stats = AnnihilateStats {
81 // Pass 1: Make all boxes immortal.
83 // In this pass, nothing gets freed, so it does not matter whether
84 // we read the next field before or after the callback.
85 each_live_alloc(true, |alloc, uniq| {
86 stats.n_total_boxes += 1;
88 stats.n_unique_boxes += 1;
90 (*alloc).ref_count = managed::RC_IMMORTAL;
95 // Pass 2: Drop all boxes.
97 // In this pass, unique-managed boxes may get freed, but not
98 // managed boxes, so we must read the `next` field *after* the
99 // callback, as the original value may have been freed.
100 each_live_alloc(false, |alloc, uniq| {
102 let tydesc = (*alloc).type_desc;
103 let data = &(*alloc).data as *();
104 ((*tydesc).drop_glue)(data as *i8);
109 // Pass 3: Free all boxes.
111 // In this pass, managed boxes may get freed (but not
112 // unique-managed boxes, though I think that none of those are
113 // left), so we must read the `next` field before, since it will
114 // not be valid after.
115 each_live_alloc(true, |alloc, uniq| {
117 stats.n_bytes_freed +=
118 (*((*alloc).type_desc)).size
119 + mem::size_of::<raw::Box<()>>();
120 local_free(alloc as *i8);
126 // We do logging here w/o allocation.
127 debug!("annihilator stats:\n \
131 stats.n_total_boxes, stats.n_unique_boxes, stats.n_bytes_freed);