2011-09-20 18:47:59 -05:00
|
|
|
// Rust cycle collector. Temporary, but will probably stick around for some
|
|
|
|
// time until LLVM's GC infrastructure is more mature.
|
|
|
|
|
2011-09-20 19:49:14 -05:00
|
|
|
#include <map>
|
2011-09-21 20:13:46 -05:00
|
|
|
#include <set>
|
2011-09-20 19:49:14 -05:00
|
|
|
#include <vector>
|
2012-02-24 13:03:23 -06:00
|
|
|
#include <ios>
|
2011-09-20 18:47:59 -05:00
|
|
|
|
2012-04-02 22:18:01 -05:00
|
|
|
#include "rust_globals.h"
|
|
|
|
#include "rust_cc.h"
|
|
|
|
#include "rust_debug.h"
|
|
|
|
#include "rust_shape.h"
|
|
|
|
#include "rust_task.h"
|
|
|
|
|
2011-09-23 20:30:22 -05:00
|
|
|
// The number of allocations Rust code performs before performing cycle
|
|
|
|
// collection.
|
|
|
|
#define RUST_CC_FREQUENCY 5000
|
|
|
|
|
2012-02-24 13:03:23 -06:00
|
|
|
using namespace std;
|
|
|
|
|
2012-03-26 18:27:35 -05:00
|
|
|
void
|
|
|
|
annihilate_box(rust_task *task, rust_opaque_box *box);
|
|
|
|
|
2011-09-20 18:47:59 -05:00
|
|
|
namespace cc {
|
|
|
|
|
2011-09-20 19:49:14 -05:00
|
|
|
// Internal reference count computation
|
|
|
|
|
2012-02-01 20:52:08 -06:00
|
|
|
typedef std::map<rust_opaque_box*,uintptr_t> irc_map;
|
2011-09-20 19:49:14 -05:00
|
|
|
|
|
|
|
class irc : public shape::data<irc,shape::ptr> {
|
|
|
|
friend class shape::data<irc,shape::ptr>;
|
|
|
|
|
2011-09-21 20:13:46 -05:00
|
|
|
irc_map &ircs;
|
2011-09-20 19:49:14 -05:00
|
|
|
|
|
|
|
irc(const irc &other, const shape::ptr &in_dp)
|
|
|
|
: shape::data<irc,shape::ptr>(other.task, other.align, other.sp,
|
|
|
|
other.params, other.tables, in_dp),
|
|
|
|
ircs(other.ircs) {}
|
|
|
|
|
|
|
|
irc(const irc &other,
|
|
|
|
const uint8_t *in_sp,
|
|
|
|
const shape::type_param *in_params,
|
|
|
|
const rust_shape_tables *in_tables = NULL)
|
|
|
|
: shape::data<irc,shape::ptr>(other.task,
|
|
|
|
other.align,
|
|
|
|
in_sp,
|
|
|
|
in_params,
|
|
|
|
in_tables ? in_tables : other.tables,
|
|
|
|
other.dp),
|
|
|
|
ircs(other.ircs) {}
|
|
|
|
|
|
|
|
irc(const irc &other,
|
|
|
|
const uint8_t *in_sp,
|
|
|
|
const shape::type_param *in_params,
|
|
|
|
const rust_shape_tables *in_tables,
|
|
|
|
shape::ptr in_dp)
|
|
|
|
: shape::data<irc,shape::ptr>(other.task,
|
|
|
|
other.align,
|
|
|
|
in_sp,
|
|
|
|
in_params,
|
|
|
|
in_tables,
|
|
|
|
in_dp),
|
|
|
|
ircs(other.ircs) {}
|
|
|
|
|
|
|
|
irc(rust_task *in_task,
|
|
|
|
bool in_align,
|
|
|
|
const uint8_t *in_sp,
|
|
|
|
const shape::type_param *in_params,
|
|
|
|
const rust_shape_tables *in_tables,
|
|
|
|
uint8_t *in_data,
|
|
|
|
irc_map &in_ircs)
|
|
|
|
: shape::data<irc,shape::ptr>(in_task, in_align, in_sp, in_params,
|
|
|
|
in_tables, in_data),
|
|
|
|
ircs(in_ircs) {}
|
|
|
|
|
|
|
|
|
2012-05-03 14:56:55 -05:00
|
|
|
void walk_vec2(bool is_pod, std::pair<uint8_t *,uint8_t *> data_range) {
|
|
|
|
|
|
|
|
// There can't be any outbound pointers from pod.
|
|
|
|
if (is_pod)
|
|
|
|
return;
|
|
|
|
|
2011-09-20 19:49:14 -05:00
|
|
|
irc sub(*this, data_range.first);
|
|
|
|
shape::ptr data_end = sub.end_dp = data_range.second;
|
|
|
|
while (sub.dp < data_end) {
|
|
|
|
sub.walk_reset();
|
2012-05-03 14:56:55 -05:00
|
|
|
// FIXME: shouldn't this be 'sub.align = true;'?
|
2011-09-20 19:49:14 -05:00
|
|
|
align = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-05-03 14:56:55 -05:00
|
|
|
void walk_vec2(bool is_pod) {
|
|
|
|
if (shape::get_dp<void *>(dp) == NULL)
|
|
|
|
return;
|
|
|
|
|
|
|
|
walk_vec2(is_pod, get_vec_data_range(dp));
|
|
|
|
}
|
|
|
|
|
|
|
|
void walk_slice2(bool is_pod, bool is_str) {
|
|
|
|
walk_vec2(is_pod, get_slice_data_range(is_str, dp));
|
|
|
|
}
|
|
|
|
|
2012-05-03 16:11:54 -05:00
|
|
|
void walk_fixedvec2(uint16_t n_elts, size_t elt_sz, bool is_pod) {
|
|
|
|
walk_vec2(is_pod, get_fixedvec_data_range(n_elts, elt_sz, dp));
|
2012-05-03 14:56:55 -05:00
|
|
|
}
|
|
|
|
|
2012-01-11 23:31:36 -06:00
|
|
|
void walk_tag2(shape::tag_info &tinfo, uint32_t tag_variant) {
|
|
|
|
shape::data<irc,shape::ptr>::walk_variant1(tinfo, tag_variant);
|
2011-09-20 19:49:14 -05:00
|
|
|
}
|
|
|
|
|
2012-01-11 23:31:36 -06:00
|
|
|
void walk_box2() {
|
2012-02-08 11:14:30 -06:00
|
|
|
// the box ptr can be NULL for env ptrs in closures and data
|
|
|
|
// not fully initialized
|
|
|
|
rust_opaque_box *box = *(rust_opaque_box**)dp;
|
|
|
|
if (box)
|
|
|
|
shape::data<irc,shape::ptr>::walk_box_contents1();
|
2011-09-20 19:49:14 -05:00
|
|
|
}
|
|
|
|
|
2012-01-11 23:31:36 -06:00
|
|
|
void walk_uniq2() {
|
2012-05-09 16:11:46 -05:00
|
|
|
rust_opaque_box *box = *(rust_opaque_box**)dp;
|
|
|
|
if (box)
|
|
|
|
shape::data<irc,shape::ptr>::walk_uniq_contents1();
|
2012-01-11 23:31:36 -06:00
|
|
|
}
|
|
|
|
|
2012-05-03 14:56:55 -05:00
|
|
|
void walk_rptr2() {
|
|
|
|
shape::data<irc,shape::ptr>::walk_rptr_contents1();
|
|
|
|
}
|
|
|
|
|
2012-01-11 23:31:36 -06:00
|
|
|
void walk_fn2(char code) {
|
|
|
|
switch (code) {
|
|
|
|
case shape::SHAPE_BOX_FN: {
|
|
|
|
shape::bump_dp<void*>(dp); // skip over the code ptr
|
|
|
|
walk_box2(); // walk over the environment ptr
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case shape::SHAPE_BARE_FN: // Does not close over data.
|
|
|
|
case shape::SHAPE_STACK_FN: // Not reachable from heap.
|
|
|
|
case shape::SHAPE_UNIQ_FN: break; /* Can only close over sendable
|
|
|
|
* (and hence acyclic) data */
|
|
|
|
default: abort();
|
|
|
|
}
|
2011-09-20 19:49:14 -05:00
|
|
|
}
|
|
|
|
|
2012-01-11 23:31:36 -06:00
|
|
|
void walk_iface2() {
|
|
|
|
walk_box2();
|
2012-01-10 21:04:09 -06:00
|
|
|
}
|
|
|
|
|
2012-01-11 23:31:36 -06:00
|
|
|
void walk_tydesc2(char) {
|
|
|
|
}
|
|
|
|
|
|
|
|
void walk_res2(const shape::rust_fn *dtor, unsigned n_params,
|
|
|
|
const shape::type_param *params, const uint8_t *end_sp,
|
|
|
|
bool live) {
|
2011-09-20 19:49:14 -05:00
|
|
|
while (this->sp != end_sp) {
|
|
|
|
this->walk();
|
|
|
|
align = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-01-11 23:31:36 -06:00
|
|
|
void walk_subcontext2(irc &sub) { sub.walk(); }
|
2011-09-20 19:49:14 -05:00
|
|
|
|
2012-01-11 23:31:36 -06:00
|
|
|
void walk_uniq_contents2(irc &sub) { sub.walk(); }
|
|
|
|
|
2012-05-03 14:56:55 -05:00
|
|
|
void walk_rptr_contents2(irc &sub) { sub.walk(); }
|
|
|
|
|
2012-02-08 11:14:30 -06:00
|
|
|
void walk_box_contents2(irc &sub) {
|
|
|
|
maybe_record_irc();
|
2011-09-20 19:49:14 -05:00
|
|
|
|
|
|
|
// Do not traverse the contents of this box; it's in the allocation
|
|
|
|
// somewhere, so we're guaranteed to come back to it (if we haven't
|
|
|
|
// traversed it already).
|
|
|
|
}
|
|
|
|
|
2012-02-08 11:14:30 -06:00
|
|
|
void maybe_record_irc() {
|
|
|
|
rust_opaque_box *box_ptr = *(rust_opaque_box **) dp;
|
2011-10-21 15:27:44 -05:00
|
|
|
|
2012-02-08 11:14:30 -06:00
|
|
|
if (!box_ptr)
|
|
|
|
return;
|
2012-02-01 20:52:08 -06:00
|
|
|
|
2011-10-21 19:35:52 -05:00
|
|
|
// Bump the internal reference count of the box.
|
2012-02-01 20:52:08 -06:00
|
|
|
if (ircs.find(box_ptr) == ircs.end()) {
|
2011-10-21 19:35:52 -05:00
|
|
|
LOG(task, gc,
|
|
|
|
"setting internal reference count for %p to 1",
|
2012-02-01 20:52:08 -06:00
|
|
|
box_ptr);
|
|
|
|
ircs[box_ptr] = 1;
|
2011-10-21 19:35:52 -05:00
|
|
|
} else {
|
2012-02-01 20:52:08 -06:00
|
|
|
uintptr_t newcount = ircs[box_ptr] + 1;
|
2011-10-21 19:35:52 -05:00
|
|
|
LOG(task, gc,
|
|
|
|
"bumping internal reference count for %p to %lu",
|
2012-02-01 20:52:08 -06:00
|
|
|
box_ptr, newcount);
|
|
|
|
ircs[box_ptr] = newcount;
|
2011-10-21 19:35:52 -05:00
|
|
|
}
|
2011-10-21 15:27:44 -05:00
|
|
|
}
|
|
|
|
|
2012-01-11 23:31:36 -06:00
|
|
|
void walk_struct2(const uint8_t *end_sp) {
|
2011-09-20 19:49:14 -05:00
|
|
|
while (this->sp != end_sp) {
|
|
|
|
this->walk();
|
|
|
|
align = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-01-11 23:31:36 -06:00
|
|
|
void walk_variant2(shape::tag_info &tinfo, uint32_t variant_id,
|
2011-09-20 19:49:14 -05:00
|
|
|
const std::pair<const uint8_t *,const uint8_t *>
|
|
|
|
variant_ptr_and_end);
|
|
|
|
|
|
|
|
template<typename T>
|
2012-01-11 23:31:36 -06:00
|
|
|
inline void walk_number2() { /* no-op */ }
|
2011-09-20 19:49:14 -05:00
|
|
|
|
|
|
|
public:
|
|
|
|
static void compute_ircs(rust_task *task, irc_map &ircs);
|
|
|
|
};
|
|
|
|
|
2011-09-20 18:47:59 -05:00
|
|
|
void
|
2012-01-11 23:31:36 -06:00
|
|
|
irc::walk_variant2(shape::tag_info &tinfo, uint32_t variant_id,
|
|
|
|
const std::pair<const uint8_t *,const uint8_t *>
|
|
|
|
variant_ptr_and_end) {
|
2011-09-20 19:49:14 -05:00
|
|
|
irc sub(*this, variant_ptr_and_end.first, tinfo.params);
|
|
|
|
|
|
|
|
assert(variant_id < 256); // FIXME: Temporary sanity check.
|
|
|
|
|
|
|
|
const uint8_t *variant_end = variant_ptr_and_end.second;
|
|
|
|
while (sub.sp < variant_end) {
|
|
|
|
sub.walk();
|
|
|
|
align = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
irc::compute_ircs(rust_task *task, irc_map &ircs) {
|
2012-02-01 20:52:08 -06:00
|
|
|
boxed_region *boxed = &task->boxed;
|
|
|
|
for (rust_opaque_box *box = boxed->first_live_alloc();
|
|
|
|
box != NULL;
|
|
|
|
box = box->next) {
|
|
|
|
type_desc *tydesc = box->td;
|
|
|
|
uint8_t *body = (uint8_t*) box_body(box);
|
|
|
|
|
2012-04-01 01:12:06 -05:00
|
|
|
LOG(task, gc,
|
2012-02-01 20:52:08 -06:00
|
|
|
"determining internal ref counts: "
|
|
|
|
"box=%p tydesc=%p body=%p",
|
|
|
|
box, tydesc, body);
|
2012-04-01 01:12:06 -05:00
|
|
|
|
2011-09-20 18:47:59 -05:00
|
|
|
shape::arena arena;
|
|
|
|
shape::type_param *params =
|
2012-02-01 20:52:08 -06:00
|
|
|
shape::type_param::from_tydesc_and_data(tydesc, body, arena);
|
2011-09-20 21:15:13 -05:00
|
|
|
|
2011-09-26 17:06:26 -05:00
|
|
|
irc irc(task, true, tydesc->shape, params, tydesc->shape_tables,
|
2012-02-01 20:52:08 -06:00
|
|
|
body, ircs);
|
2011-09-26 17:06:26 -05:00
|
|
|
irc.walk();
|
2011-09-20 18:47:59 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-09-20 19:49:14 -05:00
|
|
|
|
2011-09-21 20:13:46 -05:00
|
|
|
// Root finding
|
|
|
|
|
|
|
|
void
|
2012-02-01 20:52:08 -06:00
|
|
|
find_roots(rust_task *task, irc_map &ircs,
|
|
|
|
std::vector<rust_opaque_box *> &roots) {
|
|
|
|
boxed_region *boxed = &task->boxed;
|
|
|
|
for (rust_opaque_box *box = boxed->first_live_alloc();
|
|
|
|
box != NULL;
|
|
|
|
box = box->next) {
|
|
|
|
uintptr_t ref_count = box->ref_count;
|
2011-09-21 20:13:46 -05:00
|
|
|
|
|
|
|
uintptr_t irc;
|
2012-02-01 20:52:08 -06:00
|
|
|
if (ircs.find(box) != ircs.end())
|
|
|
|
irc = ircs[box];
|
2011-09-21 20:13:46 -05:00
|
|
|
else
|
|
|
|
irc = 0;
|
|
|
|
|
|
|
|
if (irc < ref_count) {
|
|
|
|
// This allocation must be a root, because the internal reference
|
|
|
|
// count is smaller than the total reference count.
|
2011-10-21 19:35:52 -05:00
|
|
|
LOG(task, gc,"root found: %p, irc %lu, ref count %lu",
|
2012-02-01 20:52:08 -06:00
|
|
|
box, irc, ref_count);
|
|
|
|
roots.push_back(box);
|
2011-09-21 20:13:46 -05:00
|
|
|
} else {
|
2011-10-20 18:18:23 -05:00
|
|
|
LOG(task, gc, "nonroot found: %p, irc %lu, ref count %lu",
|
2012-02-01 20:52:08 -06:00
|
|
|
box, irc, ref_count);
|
2011-10-21 15:27:44 -05:00
|
|
|
assert(irc == ref_count && "Internal reference count must be "
|
|
|
|
"less than or equal to the total reference count!");
|
2011-09-21 20:13:46 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Marking
|
|
|
|
|
|
|
|
class mark : public shape::data<mark,shape::ptr> {
|
|
|
|
friend class shape::data<mark,shape::ptr>;
|
|
|
|
|
2012-02-01 20:52:08 -06:00
|
|
|
std::set<rust_opaque_box *> &marked;
|
2011-09-21 20:13:46 -05:00
|
|
|
|
|
|
|
mark(const mark &other, const shape::ptr &in_dp)
|
|
|
|
: shape::data<mark,shape::ptr>(other.task, other.align, other.sp,
|
|
|
|
other.params, other.tables, in_dp),
|
|
|
|
marked(other.marked) {}
|
|
|
|
|
|
|
|
mark(const mark &other,
|
|
|
|
const uint8_t *in_sp,
|
|
|
|
const shape::type_param *in_params,
|
|
|
|
const rust_shape_tables *in_tables = NULL)
|
|
|
|
: shape::data<mark,shape::ptr>(other.task,
|
|
|
|
other.align,
|
|
|
|
in_sp,
|
|
|
|
in_params,
|
|
|
|
in_tables ? in_tables : other.tables,
|
|
|
|
other.dp),
|
|
|
|
marked(other.marked) {}
|
|
|
|
|
|
|
|
mark(const mark &other,
|
|
|
|
const uint8_t *in_sp,
|
|
|
|
const shape::type_param *in_params,
|
|
|
|
const rust_shape_tables *in_tables,
|
|
|
|
shape::ptr in_dp)
|
|
|
|
: shape::data<mark,shape::ptr>(other.task,
|
|
|
|
other.align,
|
|
|
|
in_sp,
|
|
|
|
in_params,
|
|
|
|
in_tables,
|
|
|
|
in_dp),
|
|
|
|
marked(other.marked) {}
|
|
|
|
|
|
|
|
mark(rust_task *in_task,
|
|
|
|
bool in_align,
|
|
|
|
const uint8_t *in_sp,
|
|
|
|
const shape::type_param *in_params,
|
|
|
|
const rust_shape_tables *in_tables,
|
|
|
|
uint8_t *in_data,
|
2012-02-01 20:52:08 -06:00
|
|
|
std::set<rust_opaque_box*> &in_marked)
|
2011-09-21 20:13:46 -05:00
|
|
|
: shape::data<mark,shape::ptr>(in_task, in_align, in_sp, in_params,
|
|
|
|
in_tables, in_data),
|
|
|
|
marked(in_marked) {}
|
|
|
|
|
2012-05-03 14:56:55 -05:00
|
|
|
void walk_vec2(bool is_pod, std::pair<uint8_t *,uint8_t *> data_range) {
|
|
|
|
|
|
|
|
// There can't be any outbound pointers from pod.
|
|
|
|
if (is_pod)
|
|
|
|
return;
|
2011-09-21 20:13:46 -05:00
|
|
|
|
|
|
|
if (data_range.second - data_range.first > 100000)
|
|
|
|
abort(); // FIXME: Temporary sanity check.
|
|
|
|
|
|
|
|
mark sub(*this, data_range.first);
|
|
|
|
shape::ptr data_end = sub.end_dp = data_range.second;
|
|
|
|
while (sub.dp < data_end) {
|
|
|
|
sub.walk_reset();
|
|
|
|
align = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-05-03 14:56:55 -05:00
|
|
|
void walk_vec2(bool is_pod) {
|
|
|
|
if (shape::get_dp<void *>(dp) == NULL)
|
|
|
|
return;
|
|
|
|
walk_vec2(is_pod, get_vec_data_range(dp));
|
|
|
|
}
|
|
|
|
|
|
|
|
void walk_slice2(bool is_pod, bool is_str) {
|
|
|
|
walk_vec2(is_pod, get_slice_data_range(is_str, dp));
|
|
|
|
}
|
|
|
|
|
2012-05-03 16:11:54 -05:00
|
|
|
void walk_fixedvec2(uint16_t n_elts, size_t elt_sz, bool is_pod) {
|
|
|
|
walk_vec2(is_pod, get_fixedvec_data_range(n_elts, elt_sz, dp));
|
2012-05-03 14:56:55 -05:00
|
|
|
}
|
|
|
|
|
2012-01-11 23:31:36 -06:00
|
|
|
void walk_tag2(shape::tag_info &tinfo, uint32_t tag_variant) {
|
|
|
|
shape::data<mark,shape::ptr>::walk_variant1(tinfo, tag_variant);
|
2011-09-21 20:13:46 -05:00
|
|
|
}
|
|
|
|
|
2012-01-11 23:31:36 -06:00
|
|
|
void walk_box2() {
|
2012-02-08 11:14:30 -06:00
|
|
|
// the box ptr can be NULL for env ptrs in closures and data
|
|
|
|
// not fully initialized
|
|
|
|
rust_opaque_box *box = *(rust_opaque_box**)dp;
|
|
|
|
if (box)
|
|
|
|
shape::data<mark,shape::ptr>::walk_box_contents1();
|
2011-09-21 20:13:46 -05:00
|
|
|
}
|
|
|
|
|
2012-01-11 23:31:36 -06:00
|
|
|
void walk_uniq2() {
|
2012-05-09 16:11:46 -05:00
|
|
|
rust_opaque_box *box = *(rust_opaque_box**)dp;
|
|
|
|
if (box)
|
|
|
|
shape::data<mark,shape::ptr>::walk_uniq_contents1();
|
2011-09-21 20:13:46 -05:00
|
|
|
}
|
|
|
|
|
2012-05-03 14:56:55 -05:00
|
|
|
void walk_rptr2() {
|
|
|
|
shape::data<mark,shape::ptr>::walk_rptr_contents1();
|
|
|
|
}
|
|
|
|
|
2012-01-11 23:31:36 -06:00
|
|
|
void walk_fn2(char code) {
|
|
|
|
switch (code) {
|
|
|
|
case shape::SHAPE_BOX_FN: {
|
2012-04-19 20:27:41 -05:00
|
|
|
shape::bump_dp<void*>(dp); // skip over the code ptr
|
|
|
|
walk_box2(); // walk over the environment ptr
|
2012-01-11 23:31:36 -06:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case shape::SHAPE_BARE_FN: // Does not close over data.
|
|
|
|
case shape::SHAPE_STACK_FN: // Not reachable from heap.
|
|
|
|
case shape::SHAPE_UNIQ_FN: break; /* Can only close over sendable
|
|
|
|
* (and hence acyclic) data */
|
|
|
|
default: abort();
|
|
|
|
}
|
2011-09-21 20:13:46 -05:00
|
|
|
}
|
|
|
|
|
2012-01-11 23:31:36 -06:00
|
|
|
void walk_res2(const shape::rust_fn *dtor, unsigned n_params,
|
2011-09-21 20:13:46 -05:00
|
|
|
const shape::type_param *params, const uint8_t *end_sp,
|
|
|
|
bool live) {
|
|
|
|
while (this->sp != end_sp) {
|
|
|
|
this->walk();
|
|
|
|
align = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-01-11 23:31:36 -06:00
|
|
|
void walk_iface2() {
|
|
|
|
walk_box2();
|
|
|
|
}
|
|
|
|
|
|
|
|
void walk_tydesc2(char) {
|
|
|
|
}
|
2011-09-21 20:13:46 -05:00
|
|
|
|
2012-01-11 23:31:36 -06:00
|
|
|
void walk_subcontext2(mark &sub) { sub.walk(); }
|
|
|
|
|
|
|
|
void walk_uniq_contents2(mark &sub) { sub.walk(); }
|
|
|
|
|
2012-05-03 14:56:55 -05:00
|
|
|
void walk_rptr_contents2(mark &sub) { sub.walk(); }
|
|
|
|
|
2012-02-08 11:14:30 -06:00
|
|
|
void walk_box_contents2(mark &sub) {
|
|
|
|
rust_opaque_box *box_ptr = *(rust_opaque_box **) dp;
|
2011-09-21 20:13:46 -05:00
|
|
|
|
2012-02-08 11:14:30 -06:00
|
|
|
if (!box_ptr)
|
|
|
|
return;
|
2012-02-01 20:52:08 -06:00
|
|
|
|
|
|
|
if (marked.find(box_ptr) != marked.end())
|
2011-09-21 20:13:46 -05:00
|
|
|
return; // Skip to avoid chasing cycles.
|
|
|
|
|
2012-02-01 20:52:08 -06:00
|
|
|
marked.insert(box_ptr);
|
2011-09-21 20:13:46 -05:00
|
|
|
sub.walk();
|
|
|
|
}
|
|
|
|
|
2012-01-11 23:31:36 -06:00
|
|
|
void walk_struct2(const uint8_t *end_sp) {
|
2011-09-21 20:13:46 -05:00
|
|
|
while (this->sp != end_sp) {
|
|
|
|
this->walk();
|
|
|
|
align = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-01-11 23:31:36 -06:00
|
|
|
void walk_variant2(shape::tag_info &tinfo, uint32_t variant_id,
|
2011-09-21 20:13:46 -05:00
|
|
|
const std::pair<const uint8_t *,const uint8_t *>
|
|
|
|
variant_ptr_and_end);
|
|
|
|
|
|
|
|
template<typename T>
|
2012-01-11 23:31:36 -06:00
|
|
|
inline void walk_number2() { /* no-op */ }
|
2011-09-21 20:13:46 -05:00
|
|
|
|
|
|
|
public:
|
2012-02-01 20:52:08 -06:00
|
|
|
static void do_mark(rust_task *task,
|
|
|
|
const std::vector<rust_opaque_box *> &roots,
|
|
|
|
std::set<rust_opaque_box*> &marked);
|
2011-09-21 20:13:46 -05:00
|
|
|
};
|
|
|
|
|
|
|
|
void
|
2012-01-11 23:31:36 -06:00
|
|
|
mark::walk_variant2(shape::tag_info &tinfo, uint32_t variant_id,
|
2011-09-21 20:13:46 -05:00
|
|
|
const std::pair<const uint8_t *,const uint8_t *>
|
|
|
|
variant_ptr_and_end) {
|
|
|
|
mark sub(*this, variant_ptr_and_end.first, tinfo.params);
|
|
|
|
|
|
|
|
assert(variant_id < 256); // FIXME: Temporary sanity check.
|
|
|
|
|
|
|
|
const uint8_t *variant_end = variant_ptr_and_end.second;
|
|
|
|
while (sub.sp < variant_end) {
|
|
|
|
sub.walk();
|
|
|
|
align = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2012-02-01 20:52:08 -06:00
|
|
|
mark::do_mark(rust_task *task,
|
|
|
|
const std::vector<rust_opaque_box *> &roots,
|
|
|
|
std::set<rust_opaque_box *> &marked) {
|
2012-04-01 01:12:06 -05:00
|
|
|
std::vector<rust_opaque_box *>::const_iterator
|
2012-02-01 20:52:08 -06:00
|
|
|
begin(roots.begin()),
|
|
|
|
end(roots.end());
|
2011-09-21 20:13:46 -05:00
|
|
|
while (begin != end) {
|
2012-02-01 20:52:08 -06:00
|
|
|
rust_opaque_box *box = *begin;
|
|
|
|
if (marked.find(box) == marked.end()) {
|
|
|
|
marked.insert(box);
|
2011-09-21 20:13:46 -05:00
|
|
|
|
2012-02-01 20:52:08 -06:00
|
|
|
const type_desc *tydesc = box->td;
|
2011-09-21 20:13:46 -05:00
|
|
|
|
2012-02-01 20:52:08 -06:00
|
|
|
LOG(task, gc, "marking: %p, tydesc=%p", box, tydesc);
|
2011-09-21 20:13:46 -05:00
|
|
|
|
2012-02-01 20:52:08 -06:00
|
|
|
uint8_t *p = (uint8_t*) box_body(box);
|
2011-09-21 20:13:46 -05:00
|
|
|
shape::arena arena;
|
|
|
|
shape::type_param *params =
|
2011-09-23 18:51:50 -05:00
|
|
|
shape::type_param::from_tydesc_and_data(tydesc, p, arena);
|
2011-09-21 20:13:46 -05:00
|
|
|
|
|
|
|
mark mark(task, true, tydesc->shape, params, tydesc->shape_tables,
|
2012-02-01 20:52:08 -06:00
|
|
|
p, marked);
|
2011-09-21 20:13:46 -05:00
|
|
|
mark.walk();
|
|
|
|
}
|
|
|
|
|
|
|
|
++begin;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
2012-03-26 18:27:35 -05:00
|
|
|
do_sweep(rust_task *task,
|
|
|
|
const std::set<rust_opaque_box*> &marked) {
|
2012-02-01 20:52:08 -06:00
|
|
|
boxed_region *boxed = &task->boxed;
|
|
|
|
rust_opaque_box *box = boxed->first_live_alloc();
|
|
|
|
while (box != NULL) {
|
|
|
|
// save next ptr as we may be freeing box
|
|
|
|
rust_opaque_box *box_next = box->next;
|
|
|
|
if (marked.find(box) == marked.end()) {
|
|
|
|
LOG(task, gc, "object is part of a cycle: %p", box);
|
2012-03-26 18:27:35 -05:00
|
|
|
annihilate_box(task, box);
|
2011-09-21 20:13:46 -05:00
|
|
|
}
|
2012-02-01 20:52:08 -06:00
|
|
|
box = box_next;
|
2011-09-21 20:13:46 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2011-09-20 19:49:14 -05:00
|
|
|
void
|
|
|
|
do_cc(rust_task *task) {
|
2012-02-01 20:52:08 -06:00
|
|
|
LOG(task, gc, "cc");
|
2011-09-21 20:13:46 -05:00
|
|
|
|
2011-09-20 19:49:14 -05:00
|
|
|
irc_map ircs;
|
|
|
|
irc::compute_ircs(task, ircs);
|
2011-09-21 20:13:46 -05:00
|
|
|
|
2012-02-01 20:52:08 -06:00
|
|
|
std::vector<rust_opaque_box*> roots;
|
2011-09-21 20:13:46 -05:00
|
|
|
find_roots(task, ircs, roots);
|
|
|
|
|
2012-02-01 20:52:08 -06:00
|
|
|
std::set<rust_opaque_box*> marked;
|
2011-09-21 20:13:46 -05:00
|
|
|
mark::do_mark(task, roots, marked);
|
|
|
|
|
2012-03-26 18:27:35 -05:00
|
|
|
do_sweep(task, marked);
|
2011-09-20 19:49:14 -05:00
|
|
|
}
|
|
|
|
|
2012-02-24 13:03:23 -06:00
|
|
|
void
|
|
|
|
do_final_cc(rust_task *task) {
|
|
|
|
do_cc(task);
|
|
|
|
|
|
|
|
boxed_region *boxed = &task->boxed;
|
|
|
|
for (rust_opaque_box *box = boxed->first_live_alloc();
|
|
|
|
box != NULL;
|
|
|
|
box = box->next) {
|
|
|
|
cerr << "Unreclaimed object found at " << (void*) box << ": ";
|
|
|
|
const type_desc *td = box->td;
|
|
|
|
shape::arena arena;
|
|
|
|
shape::type_param *params = shape::type_param::from_tydesc(td, arena);
|
|
|
|
shape::log log(task, true, td->shape, params, td->shape_tables,
|
|
|
|
(uint8_t*)box_body(box), cerr);
|
|
|
|
log.walk();
|
|
|
|
cerr << "\n";
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2011-09-20 18:47:59 -05:00
|
|
|
void
|
|
|
|
maybe_cc(rust_task *task) {
|
2011-09-23 13:02:04 -05:00
|
|
|
static debug::flag zeal("RUST_CC_ZEAL");
|
2011-09-23 20:30:22 -05:00
|
|
|
if (*zeal) {
|
2011-09-20 18:47:59 -05:00
|
|
|
do_cc(task);
|
2011-09-23 20:30:22 -05:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
// FIXME: Needs a snapshot.
|
|
|
|
#if 0
|
|
|
|
if (task->cc_counter++ > RUST_CC_FREQUENCY) {
|
|
|
|
task->cc_counter = 0;
|
|
|
|
do_cc(task);
|
|
|
|
}
|
|
|
|
#endif
|
2011-09-20 18:47:59 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
} // end namespace cc
|
|
|
|
|