2013-04-21 21:03:52 -05:00
|
|
|
// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
|
|
|
|
// file at the top-level directory of this distribution and at
|
|
|
|
// http://rust-lang.org/COPYRIGHT.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
|
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
|
|
|
// option. This file may not be copied, modified, or distributed
|
|
|
|
// except according to those terms.
|
|
|
|
|
|
|
|
//! The local, garbage collected heap
|
|
|
|
|
2013-10-26 03:10:39 -05:00
|
|
|
use cast;
|
2014-01-06 18:48:51 -06:00
|
|
|
use iter::Iterator;
|
2013-10-26 03:10:39 -05:00
|
|
|
use mem;
|
2013-04-21 21:03:52 -05:00
|
|
|
use ops::Drop;
|
2013-08-08 13:38:10 -05:00
|
|
|
use option::{Option, None, Some};
|
2013-10-26 03:10:39 -05:00
|
|
|
use ptr;
|
2014-01-06 18:48:51 -06:00
|
|
|
use ptr::RawPtr;
|
2013-10-26 03:10:39 -05:00
|
|
|
use rt::global_heap;
|
2013-06-22 03:09:06 -05:00
|
|
|
use rt::local::Local;
|
|
|
|
use rt::task::Task;
|
2014-02-16 02:04:33 -06:00
|
|
|
use raw;
|
2014-03-06 12:22:21 -06:00
|
|
|
use slice::{ImmutableVector, Vector};
|
2014-03-20 02:35:51 -05:00
|
|
|
use vec::Vec;
|
2013-04-21 21:03:52 -05:00
|
|
|
|
2013-10-26 03:10:39 -05:00
|
|
|
// This has no meaning with out rtdebug also turned on.
|
2013-12-08 01:55:28 -06:00
|
|
|
#[cfg(rtdebug)]
|
2013-10-26 03:10:39 -05:00
|
|
|
static TRACK_ALLOCATIONS: int = 0;
|
2013-12-08 01:55:28 -06:00
|
|
|
#[cfg(rtdebug)]
|
2013-10-26 03:10:39 -05:00
|
|
|
static MAGIC: u32 = 0xbadc0ffe;
|
2013-06-22 03:09:06 -05:00
|
|
|
|
2013-10-26 03:10:39 -05:00
|
|
|
pub type Box = raw::Box<()>;
|
2013-06-22 03:09:06 -05:00
|
|
|
|
2013-10-26 03:10:39 -05:00
|
|
|
pub struct MemoryRegion {
|
2014-03-27 17:09:47 -05:00
|
|
|
allocations: Vec<*AllocHeader>,
|
|
|
|
live_allocations: uint,
|
2013-06-22 03:09:06 -05:00
|
|
|
}
|
2013-04-21 21:03:52 -05:00
|
|
|
|
|
|
|
pub struct LocalHeap {
|
2014-03-27 17:09:47 -05:00
|
|
|
memory_region: MemoryRegion,
|
2013-10-26 03:10:39 -05:00
|
|
|
|
2014-03-27 17:09:47 -05:00
|
|
|
live_allocs: *mut raw::Box<()>,
|
2013-04-21 21:03:52 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
impl LocalHeap {
|
2014-01-15 03:45:12 -06:00
|
|
|
#[inline]
|
2013-04-21 21:03:52 -05:00
|
|
|
pub fn new() -> LocalHeap {
|
2013-10-26 03:10:39 -05:00
|
|
|
let region = MemoryRegion {
|
2014-02-10 16:41:57 -06:00
|
|
|
allocations: Vec::new(),
|
2013-10-26 03:10:39 -05:00
|
|
|
live_allocations: 0,
|
|
|
|
};
|
|
|
|
LocalHeap {
|
|
|
|
memory_region: region,
|
|
|
|
live_allocs: ptr::mut_null(),
|
2013-04-21 21:03:52 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-01-15 03:45:12 -06:00
|
|
|
#[inline]
|
2014-02-05 22:05:30 -06:00
|
|
|
pub fn alloc(&mut self, drop_glue: fn(*mut u8), size: uint, align: uint) -> *mut Box {
|
|
|
|
let total_size = global_heap::get_box_size(size, align);
|
|
|
|
let alloc = self.memory_region.malloc(total_size);
|
|
|
|
{
|
|
|
|
// Make sure that we can't use `mybox` outside of this scope
|
|
|
|
let mybox: &mut Box = unsafe { cast::transmute(alloc) };
|
|
|
|
// Clear out this box, and move it to the front of the live
|
|
|
|
// allocations list
|
|
|
|
mybox.drop_glue = drop_glue;
|
|
|
|
mybox.ref_count = 1;
|
|
|
|
mybox.prev = ptr::mut_null();
|
|
|
|
mybox.next = self.live_allocs;
|
|
|
|
if !self.live_allocs.is_null() {
|
|
|
|
unsafe { (*self.live_allocs).prev = alloc; }
|
|
|
|
}
|
|
|
|
self.live_allocs = alloc;
|
|
|
|
}
|
|
|
|
return alloc;
|
|
|
|
}
|
|
|
|
|
2014-01-15 03:45:12 -06:00
|
|
|
#[inline]
|
2013-10-26 03:10:39 -05:00
|
|
|
pub fn realloc(&mut self, ptr: *mut Box, size: uint) -> *mut Box {
|
|
|
|
// Make sure that we can't use `mybox` outside of this scope
|
|
|
|
let total_size = size + mem::size_of::<Box>();
|
|
|
|
let new_box = self.memory_region.realloc(ptr, total_size);
|
|
|
|
{
|
|
|
|
// Fix links because we could have moved around
|
|
|
|
let mybox: &mut Box = unsafe { cast::transmute(new_box) };
|
|
|
|
if !mybox.prev.is_null() {
|
|
|
|
unsafe { (*mybox.prev).next = new_box; }
|
|
|
|
}
|
|
|
|
if !mybox.next.is_null() {
|
|
|
|
unsafe { (*mybox.next).prev = new_box; }
|
|
|
|
}
|
2013-06-21 21:40:00 -05:00
|
|
|
}
|
2013-10-26 03:10:39 -05:00
|
|
|
if self.live_allocs == ptr {
|
|
|
|
self.live_allocs = new_box;
|
|
|
|
}
|
|
|
|
return new_box;
|
2013-06-21 21:40:00 -05:00
|
|
|
}
|
|
|
|
|
2014-01-15 03:45:12 -06:00
|
|
|
#[inline]
|
2014-02-05 22:05:30 -06:00
|
|
|
pub fn free(&mut self, alloc: *mut Box) {
|
|
|
|
{
|
|
|
|
// Make sure that we can't use `mybox` outside of this scope
|
|
|
|
let mybox: &mut Box = unsafe { cast::transmute(alloc) };
|
|
|
|
|
|
|
|
// Unlink it from the linked list
|
|
|
|
if !mybox.prev.is_null() {
|
|
|
|
unsafe { (*mybox.prev).next = mybox.next; }
|
|
|
|
}
|
|
|
|
if !mybox.next.is_null() {
|
|
|
|
unsafe { (*mybox.next).prev = mybox.prev; }
|
|
|
|
}
|
|
|
|
if self.live_allocs == alloc {
|
|
|
|
self.live_allocs = mybox.next;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
self.memory_region.free(alloc);
|
|
|
|
}
|
2013-04-21 21:03:52 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Drop for LocalHeap {
|
2013-09-16 20:18:07 -05:00
|
|
|
fn drop(&mut self) {
|
2013-10-26 03:10:39 -05:00
|
|
|
assert!(self.live_allocs.is_null());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(rtdebug)]
|
|
|
|
struct AllocHeader {
|
|
|
|
magic: u32,
|
|
|
|
index: i32,
|
|
|
|
size: u32,
|
|
|
|
}
|
|
|
|
#[cfg(not(rtdebug))]
|
|
|
|
struct AllocHeader;
|
|
|
|
|
|
|
|
impl AllocHeader {
|
|
|
|
#[cfg(rtdebug)]
|
|
|
|
fn init(&mut self, size: u32) {
|
|
|
|
if TRACK_ALLOCATIONS > 0 {
|
|
|
|
self.magic = MAGIC;
|
|
|
|
self.index = -1;
|
|
|
|
self.size = size;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#[cfg(not(rtdebug))]
|
|
|
|
fn init(&mut self, _size: u32) {}
|
|
|
|
|
|
|
|
#[cfg(rtdebug)]
|
|
|
|
fn assert_sane(&self) {
|
|
|
|
if TRACK_ALLOCATIONS > 0 {
|
|
|
|
rtassert!(self.magic == MAGIC);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#[cfg(not(rtdebug))]
|
|
|
|
fn assert_sane(&self) {}
|
|
|
|
|
|
|
|
#[cfg(rtdebug)]
|
|
|
|
fn update_size(&mut self, size: u32) {
|
|
|
|
if TRACK_ALLOCATIONS > 0 {
|
|
|
|
self.size = size;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#[cfg(not(rtdebug))]
|
|
|
|
fn update_size(&mut self, _size: u32) {}
|
|
|
|
|
2013-12-11 19:04:50 -06:00
|
|
|
fn as_box(&mut self) -> *mut Box {
|
2013-10-26 03:10:39 -05:00
|
|
|
let myaddr: uint = unsafe { cast::transmute(self) };
|
|
|
|
(myaddr + AllocHeader::size()) as *mut Box
|
|
|
|
}
|
|
|
|
|
|
|
|
fn size() -> uint {
|
|
|
|
// For some platforms, 16 byte alignment is required.
|
|
|
|
let ptr_size = 16;
|
|
|
|
let header_size = mem::size_of::<AllocHeader>();
|
|
|
|
return (header_size + ptr_size - 1) / ptr_size * ptr_size;
|
|
|
|
}
|
|
|
|
|
2013-12-11 19:04:50 -06:00
|
|
|
fn from(a_box: *mut Box) -> *mut AllocHeader {
|
|
|
|
(a_box as uint - AllocHeader::size()) as *mut AllocHeader
|
2013-10-26 03:10:39 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl MemoryRegion {
|
2014-01-15 03:45:12 -06:00
|
|
|
#[inline]
|
2013-10-26 03:10:39 -05:00
|
|
|
fn malloc(&mut self, size: uint) -> *mut Box {
|
|
|
|
let total_size = size + AllocHeader::size();
|
|
|
|
let alloc: *AllocHeader = unsafe {
|
|
|
|
global_heap::malloc_raw(total_size) as *AllocHeader
|
|
|
|
};
|
|
|
|
|
|
|
|
let alloc: &mut AllocHeader = unsafe { cast::transmute(alloc) };
|
|
|
|
alloc.init(size as u32);
|
|
|
|
self.claim(alloc);
|
|
|
|
self.live_allocations += 1;
|
|
|
|
|
2013-12-11 19:04:50 -06:00
|
|
|
return alloc.as_box();
|
2013-10-26 03:10:39 -05:00
|
|
|
}
|
|
|
|
|
2014-01-15 03:45:12 -06:00
|
|
|
#[inline]
|
2013-12-11 19:04:50 -06:00
|
|
|
fn realloc(&mut self, alloc: *mut Box, size: uint) -> *mut Box {
|
|
|
|
rtassert!(!alloc.is_null());
|
|
|
|
let orig_alloc = AllocHeader::from(alloc);
|
2013-10-26 03:10:39 -05:00
|
|
|
unsafe { (*orig_alloc).assert_sane(); }
|
|
|
|
|
|
|
|
let total_size = size + AllocHeader::size();
|
|
|
|
let alloc: *AllocHeader = unsafe {
|
2013-12-12 15:27:26 -06:00
|
|
|
global_heap::realloc_raw(orig_alloc as *mut u8,
|
2013-10-26 03:10:39 -05:00
|
|
|
total_size) as *AllocHeader
|
|
|
|
};
|
|
|
|
|
|
|
|
let alloc: &mut AllocHeader = unsafe { cast::transmute(alloc) };
|
|
|
|
alloc.assert_sane();
|
|
|
|
alloc.update_size(size as u32);
|
|
|
|
self.update(alloc, orig_alloc as *AllocHeader);
|
2013-12-11 19:04:50 -06:00
|
|
|
return alloc.as_box();
|
2013-10-26 03:10:39 -05:00
|
|
|
}
|
|
|
|
|
2014-01-15 03:45:12 -06:00
|
|
|
#[inline]
|
2013-12-11 19:04:50 -06:00
|
|
|
fn free(&mut self, alloc: *mut Box) {
|
|
|
|
rtassert!(!alloc.is_null());
|
|
|
|
let alloc = AllocHeader::from(alloc);
|
2013-04-21 21:03:52 -05:00
|
|
|
unsafe {
|
2013-10-26 03:10:39 -05:00
|
|
|
(*alloc).assert_sane();
|
|
|
|
self.release(cast::transmute(alloc));
|
|
|
|
rtassert!(self.live_allocations > 0);
|
|
|
|
self.live_allocations -= 1;
|
2013-12-12 15:27:26 -06:00
|
|
|
global_heap::exchange_free(alloc as *u8)
|
2013-10-26 03:10:39 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(rtdebug)]
|
|
|
|
fn claim(&mut self, alloc: &mut AllocHeader) {
|
|
|
|
alloc.assert_sane();
|
|
|
|
if TRACK_ALLOCATIONS > 1 {
|
|
|
|
alloc.index = self.allocations.len() as i32;
|
|
|
|
self.allocations.push(&*alloc as *AllocHeader);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
#[cfg(not(rtdebug))]
|
2014-01-15 03:45:12 -06:00
|
|
|
#[inline]
|
2013-10-26 03:10:39 -05:00
|
|
|
fn claim(&mut self, _alloc: &mut AllocHeader) {}
|
|
|
|
|
|
|
|
#[cfg(rtdebug)]
|
|
|
|
fn release(&mut self, alloc: &AllocHeader) {
|
|
|
|
alloc.assert_sane();
|
|
|
|
if TRACK_ALLOCATIONS > 1 {
|
2014-02-10 16:41:57 -06:00
|
|
|
rtassert!(self.allocations.as_slice()[alloc.index] == alloc as *AllocHeader);
|
|
|
|
self.allocations.as_mut_slice()[alloc.index] = ptr::null();
|
2013-10-26 03:10:39 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
#[cfg(not(rtdebug))]
|
2014-01-15 03:45:12 -06:00
|
|
|
#[inline]
|
2013-10-26 03:10:39 -05:00
|
|
|
fn release(&mut self, _alloc: &AllocHeader) {}
|
|
|
|
|
|
|
|
#[cfg(rtdebug)]
|
|
|
|
fn update(&mut self, alloc: &mut AllocHeader, orig: *AllocHeader) {
|
|
|
|
alloc.assert_sane();
|
|
|
|
if TRACK_ALLOCATIONS > 1 {
|
2014-02-10 16:41:57 -06:00
|
|
|
rtassert!(self.allocations.as_slice()[alloc.index] == orig);
|
|
|
|
self.allocations.as_mut_slice()[alloc.index] = &*alloc as *AllocHeader;
|
2013-10-26 03:10:39 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
#[cfg(not(rtdebug))]
|
2014-01-15 03:45:12 -06:00
|
|
|
#[inline]
|
2013-10-26 03:10:39 -05:00
|
|
|
fn update(&mut self, _alloc: &mut AllocHeader, _orig: *AllocHeader) {}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Drop for MemoryRegion {
|
|
|
|
fn drop(&mut self) {
|
|
|
|
if self.live_allocations != 0 {
|
|
|
|
rtabort!("leaked managed memory ({} objects)", self.live_allocations);
|
2013-04-21 21:03:52 -05:00
|
|
|
}
|
2014-02-10 16:41:57 -06:00
|
|
|
rtassert!(self.allocations.as_slice().iter().all(|s| s.is_null()));
|
2013-04-21 21:03:52 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-02-16 22:16:23 -06:00
|
|
|
|
|
|
|
#[cfg(not(test))]
|
|
|
|
#[lang="malloc"]
|
|
|
|
#[inline]
|
|
|
|
pub unsafe fn local_malloc_(drop_glue: fn(*mut u8), size: uint, align: uint) -> *u8 {
|
|
|
|
local_malloc(drop_glue, size, align)
|
|
|
|
}
|
|
|
|
|
2014-01-15 03:45:12 -06:00
|
|
|
#[inline]
|
2014-02-05 22:05:30 -06:00
|
|
|
pub unsafe fn local_malloc(drop_glue: fn(*mut u8), size: uint, align: uint) -> *u8 {
|
|
|
|
// FIXME: Unsafe borrow for speed. Lame.
|
|
|
|
let task: Option<*mut Task> = Local::try_unsafe_borrow();
|
|
|
|
match task {
|
|
|
|
Some(task) => {
|
|
|
|
(*task).heap.alloc(drop_glue, size, align) as *u8
|
|
|
|
}
|
|
|
|
None => rtabort!("local malloc outside of task")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-02-16 22:16:23 -06:00
|
|
|
#[cfg(not(test))]
|
|
|
|
#[lang="free"]
|
|
|
|
#[inline]
|
|
|
|
pub unsafe fn local_free_(ptr: *u8) {
|
|
|
|
local_free(ptr)
|
|
|
|
}
|
|
|
|
|
|
|
|
// NB: Calls to free CANNOT be allowed to fail, as throwing an exception from
|
|
|
|
// inside a landing pad may corrupt the state of the exception handler. If a
|
|
|
|
// problem occurs, call exit instead.
|
2014-01-15 03:45:12 -06:00
|
|
|
#[inline]
|
2013-12-12 15:27:26 -06:00
|
|
|
pub unsafe fn local_free(ptr: *u8) {
|
2014-01-26 02:43:42 -06:00
|
|
|
// FIXME: Unsafe borrow for speed. Lame.
|
2013-08-08 13:38:10 -05:00
|
|
|
let task_ptr: Option<*mut Task> = Local::try_unsafe_borrow();
|
|
|
|
match task_ptr {
|
2013-08-12 21:09:46 -05:00
|
|
|
Some(task) => {
|
2013-10-26 03:10:39 -05:00
|
|
|
(*task).heap.free(ptr as *mut Box)
|
2013-08-12 21:09:46 -05:00
|
|
|
}
|
|
|
|
None => rtabort!("local free outside of task")
|
2013-06-22 03:09:06 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-10-26 03:10:39 -05:00
|
|
|
pub fn live_allocs() -> *mut Box {
|
2013-12-03 21:18:58 -06:00
|
|
|
let mut task = Local::borrow(None::<Task>);
|
|
|
|
task.get().heap.live_allocs
|
2013-04-21 21:03:52 -05:00
|
|
|
}
|
2013-07-22 13:42:47 -05:00
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod bench {
|
2014-02-13 19:49:11 -06:00
|
|
|
extern crate test;
|
|
|
|
use self::test::BenchHarness;
|
2013-07-22 13:42:47 -05:00
|
|
|
|
|
|
|
#[bench]
|
|
|
|
fn alloc_managed_small(bh: &mut BenchHarness) {
|
2013-11-21 19:23:21 -06:00
|
|
|
bh.iter(|| { @10; });
|
2013-07-22 13:42:47 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[bench]
|
|
|
|
fn alloc_managed_big(bh: &mut BenchHarness) {
|
2014-01-31 22:57:17 -06:00
|
|
|
bh.iter(|| { @([10, ..1000]); });
|
2013-07-22 13:42:47 -05:00
|
|
|
}
|
|
|
|
}
|