auto merge of #11535 : thestinger/rust/header, r=alexcrichton

Unique pointers and vectors currently contain a reference counting
header when containing a managed pointer.

This `{ ref_count, type_desc, prev, next }` header is not necessary and
not a sensible foundation for tracing. It adds needless complexity to
library code and is responsible for breakage in places where the branch
 has been left out.

The `borrow_offset` field can now be removed from `TyDesc` along with
the associated handling in the compiler.

Closes #9510
Closes #11533
This commit is contained in:
bors 2014-01-14 23:01:51 -08:00
commit 29070c3bee
24 changed files with 157 additions and 199 deletions

View File

@ -45,9 +45,8 @@
pub static tydesc_field_take_glue: uint = 2u;
pub static tydesc_field_drop_glue: uint = 3u;
pub static tydesc_field_visit_glue: uint = 4u;
pub static tydesc_field_borrow_offset: uint = 5u;
pub static tydesc_field_name_offset: uint = 6u;
pub static n_tydesc_fields: uint = 7u;
pub static tydesc_field_name_offset: uint = 5u;
pub static n_tydesc_fields: uint = 6u;
// The two halves of a closure: code and environment.
pub static fn_field_code: uint = 0u;

View File

@ -1584,14 +1584,9 @@ fn compile_submatch_continue<'r,
}
if any_uniq_pat(m, col) {
let pat_ty = node_id_type(bcx, pat_id);
let llbox = Load(bcx, val);
let unboxed = match ty::get(pat_ty).sty {
ty::ty_uniq(..) if !ty::type_contents(bcx.tcx(), pat_ty).owns_managed() => llbox,
_ => GEPi(bcx, llbox, [0u, abi::box_field_body])
};
compile_submatch(bcx, enter_uniq(bcx, dm, m, col, val),
vec::append(~[unboxed], vals_left), chk);
vec::append(~[llbox], vals_left), chk);
return;
}
@ -2231,13 +2226,8 @@ fn bind_irrefutable_pat<'a>(
}
}
ast::PatUniq(inner) => {
let pat_ty = node_id_type(bcx, pat.id);
let llbox = Load(bcx, val);
let unboxed = match ty::get(pat_ty).sty {
ty::ty_uniq(..) if !ty::type_contents(bcx.tcx(), pat_ty).owns_managed() => llbox,
_ => GEPi(bcx, llbox, [0u, abi::box_field_body])
};
bcx = bind_irrefutable_pat(bcx, inner, unboxed, binding_mode);
bcx = bind_irrefutable_pat(bcx, inner, llbox, binding_mode);
}
ast::PatRegion(inner) => {
let loaded_val = Load(bcx, val);

View File

@ -368,7 +368,7 @@ fn require_alloc_fn(bcx: &Block, t: ty::t, it: LangItem) -> ast::DefId {
} else {
// we treat ~fn, @fn and @[] as @ here, which isn't ideal
let langcall = match heap {
heap_managed | heap_managed_unique => {
heap_managed => {
require_alloc_fn(bcx, t, MallocFnLangItem)
}
heap_exchange_closure => {
@ -392,9 +392,7 @@ fn require_alloc_fn(bcx: &Block, t: ty::t, it: LangItem) -> ast::DefId {
langcall,
[tydesc, size],
None);
let r = rslt(r.bcx, PointerCast(r.bcx, r.val, llty));
maybe_set_managed_unique_rc(r.bcx, r.val, heap);
r
rslt(r.bcx, PointerCast(r.bcx, r.val, llty))
}
}
@ -441,27 +439,6 @@ pub fn malloc_general<'a>(bcx: &'a Block, t: ty::t, heap: heap)
malloc_general_dyn(bcx, t, heap, llsize_of(bcx.ccx(), ty))
}
pub fn heap_for_unique(bcx: &Block, t: ty::t) -> heap {
if ty::type_contents(bcx.tcx(), t).owns_managed() {
heap_managed_unique
} else {
heap_exchange
}
}
pub fn maybe_set_managed_unique_rc(bcx: &Block, bx: ValueRef, heap: heap) {
assert!(heap != heap_exchange);
if heap == heap_managed_unique {
// In cases where we are looking at a unique-typed allocation in the
// managed heap (thus have refcount 1 from the managed allocator),
// such as a ~(@foo) or such. These need to have their refcount forced
// to -2 so the annihilator ignores them.
let rc = GEPi(bcx, bx, [0u, abi::box_field_refcnt]);
let rc_val = C_int(bcx.ccx(), -2);
Store(bcx, rc_val, rc);
}
}
// Type descriptor and type glue stuff
pub fn get_tydesc_simple(ccx: &CrateContext, t: ty::t) -> ValueRef {

View File

@ -150,14 +150,6 @@ pub fn mk_closure_tys(tcx: ty::ctxt,
return cdata_ty;
}
fn heap_for_unique_closure(bcx: &Block, t: ty::t) -> heap {
if ty::type_contents(bcx.tcx(), t).owns_managed() {
heap_managed_unique
} else {
heap_exchange_closure
}
}
pub fn allocate_cbox<'a>(
bcx: &'a Block<'a>,
sigil: ast::Sigil,
@ -173,7 +165,7 @@ pub fn allocate_cbox<'a>(
tcx.sess.bug("trying to trans allocation of @fn")
}
ast::OwnedSigil => {
malloc_raw(bcx, cdata_ty, heap_for_unique_closure(bcx, cdata_ty))
malloc_raw(bcx, cdata_ty, heap_exchange_closure)
}
ast::BorrowedSigil => {
let cbox_ty = tuplify_box_ty(tcx, cdata_ty);

View File

@ -90,7 +90,6 @@ pub struct tydesc_info {
tydesc: ValueRef,
size: ValueRef,
align: ValueRef,
borrow_offset: ValueRef,
name: ValueRef,
take_glue: Cell<Option<ValueRef>>,
drop_glue: Cell<Option<ValueRef>>,
@ -316,7 +315,6 @@ pub fn warn_not_to_commit(ccx: &CrateContext, msg: &str) {
#[deriving(Eq)]
pub enum heap {
heap_managed,
heap_managed_unique,
heap_exchange,
heap_exchange_closure
}
@ -498,7 +496,7 @@ pub fn add_clean_temp_mem_in_scope_(bcx: &Block, scope_id: Option<ast::NodeId>,
pub fn add_clean_free(cx: &Block, ptr: ValueRef, heap: heap) {
let free_fn = match heap {
heap_managed | heap_managed_unique => {
heap_managed => {
@GCHeapFreeingCleanupFunction {
ptr: ptr,
} as @CleanupFunction

View File

@ -570,11 +570,6 @@ pub fn box_body(&self, bcx: &Block) -> Datum {
let (content_ty, header) = match ty::get(self.ty).sty {
ty::ty_box(typ) => (typ, true),
ty::ty_uniq(typ) => (typ, false),
ty::ty_vec(_, ty::vstore_uniq) | ty::ty_str(ty::vstore_uniq) => {
let unit_ty = ty::sequence_element_type(bcx.tcx(), self.ty);
let unboxed_vec_ty = ty::mk_mut_unboxed_vec(bcx.tcx(), unit_ty);
(unboxed_vec_ty, true)
}
_ => {
bcx.tcx().sess.bug(format!(
"box_body() invoked on non-box type {}",
@ -582,7 +577,7 @@ pub fn box_body(&self, bcx: &Block) -> Datum {
}
};
if !header && !ty::type_contents(bcx.tcx(), content_ty).owns_managed() {
if !header {
let ptr = self.to_value_llval(bcx);
let ty = type_of::type_of(bcx.ccx(), content_ty);
let body = PointerCast(bcx, ptr, ty.ptr_to());

View File

@ -2147,10 +2147,6 @@ fn create_pointer_to_box_metadata(cx: &CrateContext,
ty::vstore_fixed(len) => {
fixed_vec_metadata(cx, mt.ty, len, usage_site_span)
}
ty::vstore_uniq if ty::type_contents(cx.tcx, mt.ty).owns_managed() => {
let boxed_vec_metadata = boxed_vec_metadata(cx, mt.ty, usage_site_span);
pointer_type_metadata(cx, t, boxed_vec_metadata)
}
ty::vstore_uniq => {
let vec_metadata = vec_metadata(cx, mt.ty, usage_site_span);
pointer_type_metadata(cx, t, vec_metadata)
@ -2165,12 +2161,8 @@ fn create_pointer_to_box_metadata(cx: &CrateContext,
}
},
ty::ty_uniq(typ) => {
if ty::type_contents(cx.tcx, typ).owns_managed() {
create_pointer_to_box_metadata(cx, t, typ)
} else {
let pointee = type_metadata(cx, typ, usage_site_span);
pointer_type_metadata(cx, t, pointee)
}
let pointee = type_metadata(cx, typ, usage_site_span);
pointer_type_metadata(cx, t, pointee)
}
ty::ty_ptr(ref mt) | ty::ty_rptr(_, ref mt) => {
let pointee = type_metadata(cx, mt.ty, usage_site_span);

View File

@ -398,29 +398,7 @@ fn auto_borrow_obj<'a>(
autoderefs));
derefd_datum.to_rptr(bcx).to_value_llval(bcx)
}
ty::UniqTraitStore(..) => {
// For a ~T box, there may or may not be a header,
// depending on whether the type T references managed
// boxes. However, since we do not *know* the type T
// for objects, this presents a hurdle. Our solution is
// to load the "borrow offset" from the type descriptor;
// this value will either be 0 or sizeof(BoxHeader), depending
// on the type T.
let llopaque =
PointerCast(bcx, source_data, Type::opaque().ptr_to());
let lltydesc_ptr_ptr =
PointerCast(bcx, vtable,
bcx.ccx().tydesc_type.ptr_to().ptr_to());
let lltydesc_ptr =
Load(bcx, lltydesc_ptr_ptr);
let borrow_offset_ptr =
GEPi(bcx, lltydesc_ptr,
[0, abi::tydesc_field_borrow_offset]);
let borrow_offset =
Load(bcx, borrow_offset_ptr);
InBoundsGEP(bcx, llopaque, [borrow_offset])
}
ty::RegionTraitStore(..) => {
ty::UniqTraitStore(..) | ty::RegionTraitStore(..) => {
source_data
}
};
@ -608,8 +586,7 @@ fn trans_rvalue_datum_unadjusted<'a>(bcx: &'a Block<'a>, expr: &ast::Expr)
expr, contents);
}
ast::ExprVstore(contents, ast::ExprVstoreUniq) => {
let heap = heap_for_unique(bcx, expr_ty(bcx, contents));
return tvec::trans_uniq_or_managed_vstore(bcx, heap,
return tvec::trans_uniq_or_managed_vstore(bcx, heap_exchange,
expr, contents);
}
ast::ExprBox(_, contents) => {
@ -617,7 +594,7 @@ fn trans_rvalue_datum_unadjusted<'a>(bcx: &'a Block<'a>, expr: &ast::Expr)
// `trans_rvalue_dps_unadjusted`.)
let box_ty = expr_ty(bcx, expr);
let contents_ty = expr_ty(bcx, contents);
let heap = heap_for_unique(bcx, contents_ty);
let heap = heap_exchange;
return trans_boxed_expr(bcx, box_ty, contents, contents_ty, heap)
}
ast::ExprLit(lit) => {
@ -1461,8 +1438,7 @@ fn trans_unary_datum<'a>(
trans_boxed_expr(bcx, un_ty, sub_expr, sub_ty, heap_managed)
}
ast::UnUniq => {
let heap = heap_for_unique(bcx, un_ty);
trans_boxed_expr(bcx, un_ty, sub_expr, sub_ty, heap)
trans_boxed_expr(bcx, un_ty, sub_expr, sub_ty, heap_exchange)
}
ast::UnDeref => {
bcx.sess().bug("deref expressions should have been \

View File

@ -303,11 +303,7 @@ pub fn make_free_glue<'a>(bcx: &'a Block<'a>, v: ValueRef, t: ty::t)
with_cond(bcx, not_null, |bcx| {
let body_datum = box_datum.box_body(bcx);
let bcx = drop_ty(bcx, body_datum.to_ref_llval(bcx), body_datum.ty);
if ty::type_contents(bcx.tcx(), t).owns_managed() {
trans_free(bcx, box_datum.val)
} else {
trans_exchange_free(bcx, box_datum.val)
}
trans_exchange_free(bcx, box_datum.val)
})
}
ty::ty_vec(_, ty::vstore_uniq) | ty::ty_str(ty::vstore_uniq) |
@ -550,18 +546,6 @@ pub fn declare_tydesc(ccx: &CrateContext, t: ty::t) -> @tydesc_info {
ppaux::ty_to_str(ccx.tcx, t));
}
let has_header = match ty::get(t).sty {
ty::ty_box(..) => true,
ty::ty_uniq(..) => ty::type_contents(ccx.tcx, t).owns_managed(),
_ => false
};
let borrow_offset = if has_header {
ccx.offsetof_gep(llty, [0u, abi::box_field_body])
} else {
C_uint(ccx, 0)
};
let llsize = llsize_of(ccx, llty);
let llalign = llalign_of(ccx, llty);
let name = mangle_internal_name_by_type_and_seq(ccx, t, "tydesc").to_managed();
@ -580,7 +564,6 @@ pub fn declare_tydesc(ccx: &CrateContext, t: ty::t) -> @tydesc_info {
tydesc: gvar,
size: llsize,
align: llalign,
borrow_offset: borrow_offset,
name: ty_name,
take_glue: Cell::new(None),
drop_glue: Cell::new(None),
@ -690,15 +673,12 @@ pub fn emit_tydescs(ccx: &CrateContext) {
}
};
debug!("ti.borrow_offset: {}", ccx.tn.val_to_str(ti.borrow_offset));
let tydesc = C_named_struct(ccx.tydesc_type,
[ti.size, // size
ti.align, // align
take_glue, // take_glue
drop_glue, // drop_glue
visit_glue, // visit_glue
ti.borrow_offset, // borrow_offset
ti.name]); // name
unsafe {

View File

@ -184,11 +184,7 @@ pub fn visit_ty(&mut self, t: ty::t) {
ty::ty_vec(ref mt, vst) => {
let (name, extra) = self.vstore_name_and_extra(t, vst);
let extra = extra + self.c_mt(mt);
if "uniq" == name && ty::type_contents(bcx.tcx(), t).owns_managed() {
self.visit("evec_uniq_managed", extra)
} else {
self.visit(~"evec_" + name, extra)
}
self.visit(~"evec_" + name, extra)
}
// Should remove mt from box and uniq.
ty::ty_box(typ) => {
@ -203,11 +199,7 @@ pub fn visit_ty(&mut self, t: ty::t) {
ty: typ,
mutbl: ast::MutImmutable,
});
if ty::type_contents(bcx.tcx(), t).owns_managed() {
self.visit("uniq_managed", extra)
} else {
self.visit("uniq", extra)
}
self.visit("uniq", extra)
}
ty::ty_ptr(ref mt) => {
let extra = self.c_mt(mt);

View File

@ -64,7 +64,14 @@ pub fn get_alloc(bcx: &Block, vptr: ValueRef) -> ValueRef {
}
pub fn get_bodyptr(bcx: &Block, vptr: ValueRef, t: ty::t) -> ValueRef {
if ty::type_contents(bcx.tcx(), t).owns_managed() {
let vt = vec_types(bcx, t);
let managed = match ty::get(vt.vec_ty).sty {
ty::ty_str(ty::vstore_box) | ty::ty_vec(_, ty::vstore_box) => true,
_ => false
};
if managed {
GEPi(bcx, vptr, [0u, abi::box_field_body])
} else {
vptr
@ -106,7 +113,6 @@ pub fn alloc_raw<'a>(
base::malloc_general_dyn(bcx, vecbodyty, heap, vecsize);
Store(bcx, fill, GEPi(bcx, body, [0u, abi::vec_elt_fill]));
Store(bcx, alloc, GEPi(bcx, body, [0u, abi::vec_elt_alloc]));
base::maybe_set_managed_unique_rc(bcx, bx, heap);
return rslt(bcx, bx);
}
}
@ -117,7 +123,7 @@ pub fn alloc_uniq_raw<'a>(
fill: ValueRef,
alloc: ValueRef)
-> Result<'a> {
alloc_raw(bcx, unit_ty, fill, alloc, base::heap_for_unique(bcx, unit_ty))
alloc_raw(bcx, unit_ty, fill, alloc, heap_exchange)
}
pub fn alloc_vec<'a>(
@ -350,7 +356,7 @@ pub fn trans_uniq_or_managed_vstore<'a>(
}
}
heap_exchange_closure => fail!("vectors use exchange_alloc"),
heap_managed | heap_managed_unique => {}
heap_managed => {}
}
let vt = vec_types_from_expr(bcx, vstore_expr);

View File

@ -220,7 +220,6 @@ pub fn tydesc(arch: Architecture) -> Type {
glue_fn_ty, // take
glue_fn_ty, // drop
glue_fn_ty, // visit
int_ty, // borrow_offset
Type::struct_([Type::i8p(), Type::int(arch)], false)]; // name
tydesc.set_struct_body(elems, false);
@ -269,10 +268,6 @@ pub fn opaque_box(ctx: &CrateContext) -> Type {
Type::smart_ptr(ctx, &Type::opaque())
}
pub fn unique(ctx: &CrateContext, ty: &Type) -> Type {
Type::smart_ptr(ctx, ty)
}
pub fn opaque_cbox_ptr(cx: &CrateContext) -> Type {
Type::opaque_box(cx).ptr_to()
}
@ -281,7 +276,7 @@ pub fn opaque_trait(ctx: &CrateContext, store: ty::TraitStore) -> Type {
let tydesc_ptr = ctx.tydesc_type.ptr_to();
let box_ty = match store {
ty::BoxTraitStore => Type::opaque_box(ctx),
ty::UniqTraitStore => Type::unique(ctx, &Type::i8()),
ty::UniqTraitStore => Type::i8(),
ty::RegionTraitStore(..) => Type::i8()
};
Type::struct_([tydesc_ptr, box_ty.ptr_to()], false)

View File

@ -245,21 +245,11 @@ pub fn type_of(cx: &CrateContext, t: ty::t) -> Type {
Type::smart_ptr(cx, &ty).ptr_to()
}
ty::ty_uniq(typ) => {
let ty = type_of(cx, typ);
if ty::type_contents(cx.tcx, typ).owns_managed() {
Type::unique(cx, &ty).ptr_to()
} else {
ty.ptr_to()
}
type_of(cx, typ).ptr_to()
}
ty::ty_vec(ref mt, ty::vstore_uniq) => {
let ty = type_of(cx, mt.ty);
let ty = Type::vec(cx.sess.targ_cfg.arch, &ty);
if ty::type_contents(cx.tcx, mt.ty).owns_managed() {
Type::unique(cx, &ty).ptr_to()
} else {
ty.ptr_to()
}
Type::vec(cx.sess.targ_cfg.arch, &ty).ptr_to()
}
ty::ty_unboxed_vec(ref mt) => {
let ty = type_of(cx, mt.ty);

View File

@ -230,4 +230,12 @@ fn test_dead() {
drop(x);
assert!(y.upgrade().is_none());
}
#[test]
fn gc_inside() {
// see issue #11532
use gc::Gc;
let a = Rc::new(RefCell::new(Gc::new(1)));
assert!(a.borrow().try_borrow_mut().is_some());
}
}

View File

@ -227,6 +227,7 @@ fn visit_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
true
}
#[cfg(stage0)]
fn visit_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<~u8>();
if ! self.inner.visit_uniq_managed(mtbl, inner) { return false; }
@ -275,6 +276,7 @@ fn visit_evec_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
true
}
#[cfg(stage0)]
fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<~[@u8]>();
if ! self.inner.visit_evec_uniq_managed(mtbl, inner) { return false; }

View File

@ -310,6 +310,7 @@ fn visit_uniq(&mut self, _mtbl: uint, inner: *TyDesc) -> bool {
})
}
#[cfg(stage0)]
fn visit_uniq_managed(&mut self, _mtbl: uint, inner: *TyDesc) -> bool {
self.writer.write(['~' as u8]);
self.get::<&raw::Box<()>>(|this, b| {
@ -358,6 +359,7 @@ fn visit_evec_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
})
}
#[cfg(stage0)]
fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.get::<&raw::Box<raw::Vec<()>>>(|this, b| {
this.writer.write(['~' as u8]);

View File

@ -98,13 +98,6 @@ pub struct TyDesc {
// Called by reflection visitor to visit a value of type `T`
visit_glue: GlueFn,
// If T represents a box pointer (`@U` or `~U`), then
// `borrow_offset` is the amount that the pointer must be adjusted
// to find the payload. This is always derivable from the type
// `U`, but in the case of `@Trait` or `~Trait` objects, the type
// `U` is unknown.
borrow_offset: uint,
// Name corresponding to the type
name: &'static str
}
@ -146,6 +139,7 @@ pub trait TyVisitor {
fn visit_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
fn visit_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
#[cfg(stage0)]
fn visit_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
fn visit_ptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
fn visit_rptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
@ -154,6 +148,7 @@ pub trait TyVisitor {
fn visit_unboxed_vec(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
fn visit_evec_box(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
fn visit_evec_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
#[cfg(stage0)]
fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
fn visit_evec_slice(&mut self, mtbl: uint, inner: *TyDesc) -> bool;
fn visit_evec_fixed(&mut self, n: uint, sz: uint, align: uint,

View File

@ -57,6 +57,7 @@ impl<'a, T> Repr<Slice<T>> for &'a [T] {}
impl<'a> Repr<Slice<u8>> for &'a str {}
impl<T> Repr<*Box<T>> for @T {}
impl<T> Repr<*Box<Vec<T>>> for @[T] {}
impl<T> Repr<*Vec<T>> for ~[T] {}
impl Repr<*String> for ~str {}
impl Repr<*Box<String>> for @str {}

View File

@ -116,14 +116,18 @@
use ptr;
use ptr::RawPtr;
use rt::global_heap::{malloc_raw, realloc_raw, exchange_free};
#[cfg(stage0)]
use rt::local_heap::local_free;
use mem;
use mem::size_of;
use uint;
use unstable::finally::Finally;
use unstable::intrinsics;
#[cfg(stage0)]
use unstable::intrinsics::{get_tydesc, owns_managed};
use unstable::raw::{Box, Repr, Slice, Vec};
use unstable::raw::{Repr, Slice, Vec};
#[cfg(stage0)]
use unstable::raw::Box;
use util;
/**
@ -178,6 +182,7 @@ pub fn from_elem<T:Clone>(n_elts: uint, t: T) -> ~[T] {
/// Creates a new vector with a capacity of `capacity`
#[inline]
#[cfg(stage0)]
pub fn with_capacity<T>(capacity: uint) -> ~[T] {
unsafe {
if owns_managed::<T>() {
@ -198,6 +203,23 @@ pub fn with_capacity<T>(capacity: uint) -> ~[T] {
}
}
/// Creates a new vector with a capacity of `capacity`
#[inline]
#[cfg(not(stage0))]
pub fn with_capacity<T>(capacity: uint) -> ~[T] {
unsafe {
let alloc = capacity * mem::nonzero_size_of::<T>();
let size = alloc + mem::size_of::<Vec<()>>();
if alloc / mem::nonzero_size_of::<T>() != capacity || size < alloc {
fail!("vector size is too large: {}", capacity);
}
let ptr = malloc_raw(size) as *mut Vec<()>;
(*ptr).alloc = alloc;
(*ptr).fill = 0;
cast::transmute(ptr)
}
}
/**
* Builds a vector by calling a provided function with an argument
* function that pushes an element to the back of a vector.
@ -784,7 +806,7 @@ impl<T> Container for ~[T] {
/// Returns the length of a vector
#[inline]
fn len(&self) -> uint {
self.repr().len
self.as_slice().len()
}
}
@ -1481,6 +1503,7 @@ fn move_rev_iter(self) -> MoveRevIterator<T> {
self.move_iter().invert()
}
#[cfg(stage0)]
fn reserve(&mut self, n: uint) {
// Only make the (slow) call into the runtime if we have to
if self.capacity() < n {
@ -1504,6 +1527,24 @@ fn reserve(&mut self, n: uint) {
}
}
#[cfg(not(stage0))]
fn reserve(&mut self, n: uint) {
// Only make the (slow) call into the runtime if we have to
if self.capacity() < n {
unsafe {
let ptr: *mut *mut Vec<()> = cast::transmute(self);
let alloc = n * mem::nonzero_size_of::<T>();
let size = alloc + mem::size_of::<Vec<()>>();
if alloc / mem::nonzero_size_of::<T>() != n || size < alloc {
fail!("vector size is too large: {}", n);
}
*ptr = realloc_raw(*ptr as *mut c_void, size)
as *mut Vec<()>;
(**ptr).alloc = alloc;
}
}
}
#[inline]
fn reserve_at_least(&mut self, n: uint) {
self.reserve(uint::next_power_of_two_opt(n).unwrap_or(n));
@ -1520,6 +1561,7 @@ fn reserve_additional(&mut self, n: uint) {
}
#[inline]
#[cfg(stage0)]
fn capacity(&self) -> uint {
unsafe {
if owns_managed::<T>() {
@ -1532,6 +1574,15 @@ fn capacity(&self) -> uint {
}
}
#[inline]
#[cfg(not(stage0))]
fn capacity(&self) -> uint {
unsafe {
let repr: **Vec<()> = cast::transmute(self);
(**repr).alloc / mem::nonzero_size_of::<T>()
}
}
fn shrink_to_fit(&mut self) {
unsafe {
let ptr: *mut *mut Vec<()> = cast::transmute(self);
@ -1543,6 +1594,7 @@ fn shrink_to_fit(&mut self) {
}
#[inline]
#[cfg(stage0)]
fn push(&mut self, t: T) {
unsafe {
if owns_managed::<T>() {
@ -1583,7 +1635,31 @@ unsafe fn push_fast<T>(this: &mut ~[T], t: T) {
intrinsics::move_val_init(&mut(*p), t);
}
}
}
#[inline]
#[cfg(not(stage0))]
fn push(&mut self, t: T) {
unsafe {
let repr: **Vec<()> = cast::transmute(&mut *self);
let fill = (**repr).fill;
if (**repr).alloc <= fill {
self.reserve_additional(1);
}
push_fast(self, t);
}
// This doesn't bother to make sure we have space.
#[inline] // really pretty please
unsafe fn push_fast<T>(this: &mut ~[T], t: T) {
let repr: **mut Vec<u8> = cast::transmute(this);
let fill = (**repr).fill;
(**repr).fill += mem::nonzero_size_of::<T>();
let p = to_unsafe_ptr(&((**repr).data));
let p = ptr::offset(p, fill as int) as *mut T;
intrinsics::move_val_init(&mut(*p), t);
}
}
#[inline]
@ -1746,6 +1822,7 @@ fn grow_fn(&mut self, n: uint, op: |uint| -> T) {
}
}
#[inline]
#[cfg(stage0)]
unsafe fn set_len(&mut self, new_len: uint) {
if owns_managed::<T>() {
let repr: **mut Box<Vec<()>> = cast::transmute(self);
@ -1755,6 +1832,13 @@ unsafe fn set_len(&mut self, new_len: uint) {
(**repr).fill = new_len * mem::nonzero_size_of::<T>();
}
}
#[inline]
#[cfg(not(stage0))]
unsafe fn set_len(&mut self, new_len: uint) {
let repr: **mut Vec<()> = cast::transmute(self);
(**repr).fill = new_len * mem::nonzero_size_of::<T>();
}
}
impl<T> Mutable for ~[T] {
@ -2926,6 +3010,7 @@ fn next_back(&mut self) -> Option<T> {
}
#[unsafe_destructor]
#[cfg(stage0)]
impl<T> Drop for MoveIterator<T> {
fn drop(&mut self) {
// destroy the remaining elements
@ -2940,6 +3025,18 @@ fn drop(&mut self) {
}
}
#[unsafe_destructor]
#[cfg(not(stage0))]
impl<T> Drop for MoveIterator<T> {
fn drop(&mut self) {
// destroy the remaining elements
for _x in *self {}
unsafe {
exchange_free(self.allocation as *u8 as *c_char)
}
}
}
/// An iterator that moves out of a vector in reverse order.
pub type MoveRevIterator<T> = Invert<MoveIterator<T>>;

View File

@ -17,16 +17,16 @@
// debugger:run
// debugger:finish
// debugger:print unique->val.elements[0]->val
// debugger:print unique->elements[0]->val
// check:$1 = 10
// debugger:print unique->val.elements[1]->val
// debugger:print unique->elements[1]->val
// check:$2 = 11
// debugger:print unique->val.elements[2]->val
// debugger:print unique->elements[2]->val
// check:$3 = 12
// debugger:print unique->val.elements[3]->val
// debugger:print unique->elements[3]->val
// check:$4 = 13
#[allow(unused_variable)];

View File

@ -21,28 +21,22 @@
// debugger:print *ordinary_unique
// check:$1 = {-1, -2}
// debugger:print managed_within_unique.val->x
// debugger:print managed_within_unique->x
// check:$2 = -3
// debugger:print managed_within_unique.val->y->val
// debugger:print managed_within_unique->y->val
// check:$3 = -4
#[allow(unused_variable)];
struct ContainsManaged
{
x: int,
y: @int
struct ContainsManaged {
x: int,
y: @int
}
fn main() {
let ordinary_unique = ~(-1, -2);
let ordinary_unique = ~(-1, -2);
// This is a special case: Normally values allocated in the exchange heap are not boxed, unless,
// however, if they contain managed pointers.
// This test case verifies that both cases are handled correctly.
let managed_within_unique = ~ContainsManaged { x: -3, y: @-4 };
zzz();

View File

@ -49,9 +49,9 @@
// debugger:print stack_managed.next.val->val.value
// check:$12 = 11
// debugger:print unique_managed->val.value
// debugger:print unique_managed->value
// check:$13 = 12
// debugger:print unique_managed->val.next.val->val.value
// debugger:print unique_managed->next.val->val.value
// check:$14 = 13
// debugger:print box_managed->val.value

View File

@ -223,13 +223,6 @@ fn visit_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
true
}
fn visit_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<~u8>();
if ! self.inner().visit_uniq_managed(mtbl, inner) { return false; }
self.bump_past::<~u8>();
true
}
fn visit_ptr(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<*u8>();
if ! self.inner().visit_ptr(mtbl, inner) { return false; }
@ -275,13 +268,6 @@ fn visit_evec_uniq(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
true
}
fn visit_evec_uniq_managed(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<~[@u8]>();
if ! self.inner().visit_evec_uniq_managed(mtbl, inner) { return false; }
self.bump_past::<~[@u8]>();
true
}
fn visit_evec_slice(&mut self, mtbl: uint, inner: *TyDesc) -> bool {
self.align_to::<&'static [u8]>();
if ! self.inner().visit_evec_slice(mtbl, inner) { return false; }
@ -549,7 +535,6 @@ fn visit_estr_fixed(&mut self, _n: uint, _sz: uint,
fn visit_box(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_uniq(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_uniq_managed(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_ptr(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_rptr(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
@ -557,7 +542,6 @@ fn visit_vec(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_unboxed_vec(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_evec_box(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_evec_uniq(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_evec_uniq_managed(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_evec_slice(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_evec_fixed(&mut self, _n: uint, _sz: uint, _align: uint,
_mtbl: uint, _inner: *TyDesc) -> bool { true }

View File

@ -70,7 +70,6 @@ fn visit_estr_fixed(&mut self,
fn visit_box(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_uniq(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_uniq_managed(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_ptr(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_rptr(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
@ -83,12 +82,6 @@ fn visit_evec_uniq(&mut self, _mtbl: uint, inner: *TyDesc) -> bool {
self.types.push(~"]");
true
}
fn visit_evec_uniq_managed(&mut self, _mtbl: uint, inner: *TyDesc) -> bool {
self.types.push(~"[");
unsafe { visit_tydesc(inner, &mut *self as &mut TyVisitor) };
self.types.push(~"]");
true
}
fn visit_evec_slice(&mut self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
fn visit_evec_fixed(&mut self, _n: uint, _sz: uint, _align: uint,
_mtbl: uint, _inner: *TyDesc) -> bool { true }