2012-09-19 18:52:32 -05:00
|
|
|
#[doc(hidden)];
|
|
|
|
|
2012-09-20 19:18:18 -05:00
|
|
|
// NB: transitionary, de-mode-ing.
|
|
|
|
#[forbid(deprecated_mode)];
|
|
|
|
#[forbid(deprecated_pattern)];
|
|
|
|
|
2012-09-14 18:12:18 -05:00
|
|
|
use libc::{c_char, c_void, intptr_t, uintptr_t};
|
|
|
|
use ptr::{mut_null, null, to_unsafe_ptr};
|
|
|
|
use repr::BoxRepr;
|
|
|
|
use sys::TypeDesc;
|
2012-09-18 19:34:08 -05:00
|
|
|
use cast::transmute;
|
2012-09-14 18:12:18 -05:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Runtime structures
|
|
|
|
*
|
|
|
|
* NB: These must match the representation in the C++ runtime.
|
|
|
|
*/
|
|
|
|
|
|
|
|
type DropGlue = fn(**TypeDesc, *c_void);
|
|
|
|
type FreeGlue = fn(**TypeDesc, *c_void);
|
|
|
|
|
|
|
|
type TaskID = uintptr_t;
|
|
|
|
|
|
|
|
struct StackSegment { priv opaque: () }
|
|
|
|
struct Scheduler { priv opaque: () }
|
|
|
|
struct SchedulerLoop { priv opaque: () }
|
|
|
|
struct Kernel { priv opaque: () }
|
|
|
|
struct Env { priv opaque: () }
|
|
|
|
struct AllocHeader { priv opaque: () }
|
|
|
|
struct MemoryRegion { priv opaque: () }
|
|
|
|
|
2012-09-17 14:00:26 -05:00
|
|
|
#[cfg(target_arch="x86")]
|
|
|
|
struct Registers {
|
2012-09-24 22:24:10 -05:00
|
|
|
data: [u32 * 16]
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(target_arch="x86")]
|
|
|
|
struct Context {
|
|
|
|
regs: Registers,
|
|
|
|
next: *Context,
|
|
|
|
pad: [u32 * 3]
|
2012-09-17 14:00:26 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(target_arch="x86_64")]
|
2012-09-14 18:12:18 -05:00
|
|
|
struct Registers {
|
|
|
|
data: [u64 * 22]
|
|
|
|
}
|
|
|
|
|
2012-09-24 22:24:10 -05:00
|
|
|
#[cfg(target_arch="x86_64")]
|
2012-09-14 18:12:18 -05:00
|
|
|
struct Context {
|
|
|
|
regs: Registers,
|
|
|
|
next: *Context,
|
2012-09-24 22:24:10 -05:00
|
|
|
pad: uintptr_t
|
2012-09-14 18:12:18 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
struct BoxedRegion {
|
|
|
|
env: *Env,
|
|
|
|
backing_region: *MemoryRegion,
|
|
|
|
live_allocs: *BoxRepr
|
|
|
|
}
|
|
|
|
|
2012-09-24 22:24:10 -05:00
|
|
|
#[cfg(target_arch="x86")]
|
|
|
|
struct Task {
|
|
|
|
// Public fields
|
|
|
|
refcount: intptr_t, // 0
|
|
|
|
id: TaskID, // 4
|
|
|
|
pad: [u32 * 2], // 8
|
|
|
|
ctx: Context, // 16
|
|
|
|
stack_segment: *StackSegment, // 96
|
|
|
|
runtime_sp: uintptr_t, // 100
|
|
|
|
scheduler: *Scheduler, // 104
|
|
|
|
scheduler_loop: *SchedulerLoop, // 108
|
|
|
|
|
|
|
|
// Fields known only to the runtime
|
|
|
|
kernel: *Kernel, // 112
|
|
|
|
name: *c_char, // 116
|
|
|
|
list_index: i32, // 120
|
|
|
|
rendezvous_ptr: *uintptr_t, // 124
|
|
|
|
boxed_region: BoxedRegion // 128
|
|
|
|
}
|
|
|
|
|
|
|
|
#[cfg(target_arch="x86_64")]
|
2012-09-14 18:12:18 -05:00
|
|
|
struct Task {
|
|
|
|
// Public fields
|
|
|
|
refcount: intptr_t,
|
|
|
|
id: TaskID,
|
|
|
|
ctx: Context,
|
|
|
|
stack_segment: *StackSegment,
|
|
|
|
runtime_sp: uintptr_t,
|
|
|
|
scheduler: *Scheduler,
|
|
|
|
scheduler_loop: *SchedulerLoop,
|
|
|
|
|
|
|
|
// Fields known only to the runtime
|
|
|
|
kernel: *Kernel,
|
|
|
|
name: *c_char,
|
2012-09-24 22:24:10 -05:00
|
|
|
list_index: i32,
|
2012-09-14 18:12:18 -05:00
|
|
|
rendezvous_ptr: *uintptr_t,
|
|
|
|
boxed_region: BoxedRegion
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Box annihilation
|
|
|
|
*
|
|
|
|
* This runs at task death to free all boxes.
|
|
|
|
*/
|
|
|
|
|
2012-09-14 18:58:23 -05:00
|
|
|
/// Destroys all managed memory (i.e. @ boxes) held by the current task.
|
|
|
|
#[cfg(notest)]
|
|
|
|
#[lang="annihilate"]
|
|
|
|
pub unsafe fn annihilate() {
|
|
|
|
use rt::rt_free;
|
2012-09-14 20:05:15 -05:00
|
|
|
use io::WriterUtil;
|
2012-09-14 18:58:23 -05:00
|
|
|
|
2012-09-14 18:12:18 -05:00
|
|
|
let task: *Task = transmute(rustrt::rust_get_task());
|
|
|
|
|
|
|
|
// Pass 1: Make all boxes immortal.
|
|
|
|
let box = (*task).boxed_region.live_allocs;
|
|
|
|
let mut box: *mut BoxRepr = transmute(copy box);
|
|
|
|
while box != mut_null() {
|
|
|
|
debug!("making box immortal: %x", box as uint);
|
2012-09-14 21:09:38 -05:00
|
|
|
(*box).header.ref_count = 0x77777777;
|
|
|
|
box = transmute(copy (*box).header.next);
|
2012-09-14 18:12:18 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// Pass 2: Drop all boxes.
|
|
|
|
let box = (*task).boxed_region.live_allocs;
|
|
|
|
let mut box: *mut BoxRepr = transmute(copy box);
|
|
|
|
while box != mut_null() {
|
|
|
|
debug!("calling drop glue for box: %x", box as uint);
|
2012-09-14 21:09:38 -05:00
|
|
|
let tydesc: *TypeDesc = transmute(copy (*box).header.type_desc);
|
2012-09-14 18:12:18 -05:00
|
|
|
let drop_glue: DropGlue = transmute(((*tydesc).drop_glue, 0));
|
|
|
|
drop_glue(to_unsafe_ptr(&tydesc), transmute(&(*box).data));
|
|
|
|
|
2012-09-14 21:09:38 -05:00
|
|
|
box = transmute(copy (*box).header.next);
|
2012-09-14 18:12:18 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
// Pass 3: Free all boxes.
|
|
|
|
loop {
|
|
|
|
let box = (*task).boxed_region.live_allocs;
|
|
|
|
if box == null() { break; }
|
|
|
|
let mut box: *mut BoxRepr = transmute(copy box);
|
2012-09-14 21:09:38 -05:00
|
|
|
assert (*box).header.prev == null();
|
2012-09-14 18:12:18 -05:00
|
|
|
|
|
|
|
debug!("freeing box: %x", box as uint);
|
2012-09-19 00:35:28 -05:00
|
|
|
rt_free(transmute(move box));
|
2012-09-14 18:12:18 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Bindings to the runtime
|
|
|
|
extern mod rustrt {
|
2012-09-21 20:10:45 -05:00
|
|
|
#[legacy_exports];
|
2012-09-14 18:12:18 -05:00
|
|
|
#[rust_stack]
|
|
|
|
/*priv*/ fn rust_get_task() -> *c_void;
|
|
|
|
}
|
|
|
|
|