rust/src/libstd/cleanup.rs
2013-07-22 01:09:48 -04:00

161 lines
4.4 KiB
Rust

// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[doc(hidden)];
use libc::c_void;
use ptr::{mut_null};
use repr::BoxRepr;
use cast::transmute;
use unstable::intrinsics::TyDesc;
type DropGlue<'self> = &'self fn(**TyDesc, *c_void);
/*
* Box annihilation
*
* This runs at task death to free all boxes.
*/
struct AnnihilateStats {
n_total_boxes: uint,
n_unique_boxes: uint,
n_bytes_freed: uint
}
unsafe fn each_live_alloc(read_next_before: bool,
f: &fn(box: *mut BoxRepr, uniq: bool) -> bool) -> bool {
//! Walks the internal list of allocations
use managed;
use rt::local_heap;
let box = local_heap::live_allocs();
let mut box: *mut BoxRepr = transmute(box);
while box != mut_null() {
let next_before = transmute((*box).header.next);
let uniq =
(*box).header.ref_count == managed::raw::RC_MANAGED_UNIQUE;
if !f(box, uniq) {
return false;
}
if read_next_before {
box = next_before;
} else {
box = transmute((*box).header.next);
}
}
return true;
}
#[cfg(unix)]
fn debug_mem() -> bool {
use rt;
use rt::OldTaskContext;
// XXX: Need to port the environment struct to newsched
match rt::context() {
OldTaskContext => ::rt::env::get().debug_mem,
_ => false
}
}
#[cfg(windows)]
fn debug_mem() -> bool {
false
}
/// Destroys all managed memory (i.e. @ boxes) held by the current task.
pub unsafe fn annihilate() {
use rt::local_heap::local_free;
use io::WriterUtil;
use io;
use libc;
use rt::borrowck;
use sys;
use managed;
let mut stats = AnnihilateStats {
n_total_boxes: 0,
n_unique_boxes: 0,
n_bytes_freed: 0
};
// Quick hack: we need to free this list upon task exit, and this
// is a convenient place to do it.
borrowck::clear_task_borrow_list();
// Pass 1: Make all boxes immortal.
//
// In this pass, nothing gets freed, so it does not matter whether
// we read the next field before or after the callback.
for each_live_alloc(true) |box, uniq| {
stats.n_total_boxes += 1;
if uniq {
stats.n_unique_boxes += 1;
} else {
(*box).header.ref_count = managed::raw::RC_IMMORTAL;
}
}
// Pass 2: Drop all boxes.
//
// In this pass, unique-managed boxes may get freed, but not
// managed boxes, so we must read the `next` field *after* the
// callback, as the original value may have been freed.
for each_live_alloc(false) |box, uniq| {
if !uniq {
let tydesc: *TyDesc = transmute((*box).header.type_desc);
let data = transmute(&(*box).data);
((*tydesc).drop_glue)(data);
}
}
// Pass 3: Free all boxes.
//
// In this pass, managed boxes may get freed (but not
// unique-managed boxes, though I think that none of those are
// left), so we must read the `next` field before, since it will
// not be valid after.
for each_live_alloc(true) |box, uniq| {
if !uniq {
stats.n_bytes_freed +=
(*((*box).header.type_desc)).size
+ sys::size_of::<BoxRepr>();
local_free(transmute(box));
}
}
if debug_mem() {
// We do logging here w/o allocation.
let dbg = libc::STDERR_FILENO as io::fd_t;
dbg.write_str("annihilator stats:");
dbg.write_str("\n total_boxes: ");
dbg.write_uint(stats.n_total_boxes);
dbg.write_str("\n unique_boxes: ");
dbg.write_uint(stats.n_unique_boxes);
dbg.write_str("\n bytes_freed: ");
dbg.write_uint(stats.n_bytes_freed);
dbg.write_str("\n");
}
}
/// Bindings to the runtime
pub mod rustrt {
use libc::c_void;
#[link_name = "rustrt"]
extern {
#[rust_stack]
// FIXME (#4386): Unable to make following method private.
pub unsafe fn rust_get_task() -> *c_void;
}
}