auto merge of #7605 : thestinger/rust/vec, r=Aatch
This is work continued from the now landed #7495 and #7521 pulls. Removing the headers from unique vectors is another project, so I've separated the allocator.
This commit is contained in:
commit
48ad726f2a
@ -63,33 +63,34 @@ pub enum LangItem {
|
||||
FailFnLangItem, // 24
|
||||
FailBoundsCheckFnLangItem, // 25
|
||||
ExchangeMallocFnLangItem, // 26
|
||||
ClosureExchangeMallocFnLangItem, // 27
|
||||
ExchangeFreeFnLangItem, // 28
|
||||
MallocFnLangItem, // 29
|
||||
FreeFnLangItem, // 30
|
||||
BorrowAsImmFnLangItem, // 31
|
||||
BorrowAsMutFnLangItem, // 32
|
||||
ReturnToMutFnLangItem, // 33
|
||||
CheckNotBorrowedFnLangItem, // 34
|
||||
StrDupUniqFnLangItem, // 35
|
||||
RecordBorrowFnLangItem, // 36
|
||||
UnrecordBorrowFnLangItem, // 37
|
||||
VectorExchangeMallocFnLangItem, // 27
|
||||
ClosureExchangeMallocFnLangItem, // 28
|
||||
ExchangeFreeFnLangItem, // 29
|
||||
MallocFnLangItem, // 30
|
||||
FreeFnLangItem, // 31
|
||||
BorrowAsImmFnLangItem, // 32
|
||||
BorrowAsMutFnLangItem, // 33
|
||||
ReturnToMutFnLangItem, // 34
|
||||
CheckNotBorrowedFnLangItem, // 35
|
||||
StrDupUniqFnLangItem, // 36
|
||||
RecordBorrowFnLangItem, // 37
|
||||
UnrecordBorrowFnLangItem, // 38
|
||||
|
||||
StartFnLangItem, // 38
|
||||
StartFnLangItem, // 39
|
||||
|
||||
TyDescStructLangItem, // 39
|
||||
TyVisitorTraitLangItem, // 40
|
||||
OpaqueStructLangItem, // 41
|
||||
TyDescStructLangItem, // 40
|
||||
TyVisitorTraitLangItem, // 41
|
||||
OpaqueStructLangItem, // 42
|
||||
}
|
||||
|
||||
pub struct LanguageItems {
|
||||
items: [Option<def_id>, ..42]
|
||||
items: [Option<def_id>, ..43]
|
||||
}
|
||||
|
||||
impl LanguageItems {
|
||||
pub fn new() -> LanguageItems {
|
||||
LanguageItems {
|
||||
items: [ None, ..42 ]
|
||||
items: [ None, ..43 ]
|
||||
}
|
||||
}
|
||||
|
||||
@ -129,23 +130,24 @@ impl LanguageItems {
|
||||
24 => "fail_",
|
||||
25 => "fail_bounds_check",
|
||||
26 => "exchange_malloc",
|
||||
27 => "closure_exchange_malloc",
|
||||
28 => "exchange_free",
|
||||
29 => "malloc",
|
||||
30 => "free",
|
||||
31 => "borrow_as_imm",
|
||||
32 => "borrow_as_mut",
|
||||
33 => "return_to_mut",
|
||||
34 => "check_not_borrowed",
|
||||
35 => "strdup_uniq",
|
||||
36 => "record_borrow",
|
||||
37 => "unrecord_borrow",
|
||||
27 => "vector_exchange_malloc",
|
||||
28 => "closure_exchange_malloc",
|
||||
29 => "exchange_free",
|
||||
30 => "malloc",
|
||||
31 => "free",
|
||||
32 => "borrow_as_imm",
|
||||
33 => "borrow_as_mut",
|
||||
34 => "return_to_mut",
|
||||
35 => "check_not_borrowed",
|
||||
36 => "strdup_uniq",
|
||||
37 => "record_borrow",
|
||||
38 => "unrecord_borrow",
|
||||
|
||||
38 => "start",
|
||||
39 => "start",
|
||||
|
||||
39 => "ty_desc",
|
||||
40 => "ty_visitor",
|
||||
41 => "opaque",
|
||||
40 => "ty_desc",
|
||||
41 => "ty_visitor",
|
||||
42 => "opaque",
|
||||
|
||||
_ => "???"
|
||||
}
|
||||
@ -238,6 +240,9 @@ impl LanguageItems {
|
||||
pub fn exchange_malloc_fn(&self) -> def_id {
|
||||
self.items[ExchangeMallocFnLangItem as uint].get()
|
||||
}
|
||||
pub fn vector_exchange_malloc_fn(&self) -> def_id {
|
||||
self.items[VectorExchangeMallocFnLangItem as uint].get()
|
||||
}
|
||||
pub fn closure_exchange_malloc_fn(&self) -> def_id {
|
||||
self.items[ClosureExchangeMallocFnLangItem as uint].get()
|
||||
}
|
||||
@ -331,6 +336,7 @@ impl<'self> LanguageItemCollector<'self> {
|
||||
item_refs.insert(@"fail_bounds_check",
|
||||
FailBoundsCheckFnLangItem as uint);
|
||||
item_refs.insert(@"exchange_malloc", ExchangeMallocFnLangItem as uint);
|
||||
item_refs.insert(@"vector_exchange_malloc", VectorExchangeMallocFnLangItem as uint);
|
||||
item_refs.insert(@"closure_exchange_malloc", ClosureExchangeMallocFnLangItem as uint);
|
||||
item_refs.insert(@"exchange_free", ExchangeFreeFnLangItem as uint);
|
||||
item_refs.insert(@"malloc", MallocFnLangItem as uint);
|
||||
|
@ -1395,8 +1395,12 @@ pub fn compile_submatch(bcx: block,
|
||||
}
|
||||
|
||||
if any_uniq_pat(m, col) {
|
||||
let pat_ty = node_id_type(bcx, pat_id);
|
||||
let llbox = Load(bcx, val);
|
||||
let unboxed = GEPi(bcx, llbox, [0u, abi::box_field_body]);
|
||||
let unboxed = match ty::get(pat_ty).sty {
|
||||
ty::ty_uniq(*) if !ty::type_contents(bcx.tcx(), pat_ty).contains_managed() => llbox,
|
||||
_ => GEPi(bcx, llbox, [0u, abi::box_field_body])
|
||||
};
|
||||
compile_submatch(bcx, enter_uniq(bcx, dm, m, col, val),
|
||||
vec::append(~[unboxed], vals_left), chk);
|
||||
return;
|
||||
@ -1868,8 +1872,12 @@ pub fn bind_irrefutable_pat(bcx: block,
|
||||
}
|
||||
}
|
||||
ast::pat_box(inner) | ast::pat_uniq(inner) => {
|
||||
let pat_ty = node_id_type(bcx, pat.id);
|
||||
let llbox = Load(bcx, val);
|
||||
let unboxed = GEPi(bcx, llbox, [0u, abi::box_field_body]);
|
||||
let unboxed = match ty::get(pat_ty).sty {
|
||||
ty::ty_uniq(*) if !ty::type_contents(bcx.tcx(), pat_ty).contains_managed() => llbox,
|
||||
_ => GEPi(bcx, llbox, [0u, abi::box_field_body])
|
||||
};
|
||||
bcx = bind_irrefutable_pat(bcx,
|
||||
inner,
|
||||
unboxed,
|
||||
|
@ -289,21 +289,25 @@ pub fn malloc_raw_dyn(bcx: block,
|
||||
let _icx = push_ctxt("malloc_raw");
|
||||
let ccx = bcx.ccx();
|
||||
|
||||
let (mk_fn, langcall) = match heap {
|
||||
heap_managed | heap_managed_unique => {
|
||||
(ty::mk_imm_box, bcx.tcx().lang_items.malloc_fn())
|
||||
}
|
||||
heap_exchange => {
|
||||
(ty::mk_imm_uniq, bcx.tcx().lang_items.exchange_malloc_fn())
|
||||
}
|
||||
heap_exchange_closure => {
|
||||
(ty::mk_imm_uniq, bcx.tcx().lang_items.closure_exchange_malloc_fn())
|
||||
}
|
||||
};
|
||||
|
||||
if heap == heap_exchange {
|
||||
let llty_value = type_of::type_of(ccx, t);
|
||||
let llalign = llalign_of_min(ccx, llty_value);
|
||||
|
||||
// Allocate space:
|
||||
let rval = alloca(bcx, Type::i8p());
|
||||
let bcx = callee::trans_lang_call(
|
||||
bcx,
|
||||
bcx.tcx().lang_items.exchange_malloc_fn(),
|
||||
[C_i32(llalign as i32), size],
|
||||
expr::SaveIn(rval));
|
||||
rslt(bcx, PointerCast(bcx, Load(bcx, rval), llty_value.ptr_to()))
|
||||
} else if heap == heap_exchange_vector {
|
||||
// Grab the TypeRef type of box_ptr_ty.
|
||||
let box_ptr_ty = mk_fn(bcx.tcx(), t);
|
||||
let element_type = match ty::get(t).sty {
|
||||
ty::ty_unboxed_vec(e) => e,
|
||||
_ => fail!("not a vector body")
|
||||
};
|
||||
let box_ptr_ty = ty::mk_evec(bcx.tcx(), element_type, ty::vstore_uniq);
|
||||
let llty = type_of(ccx, box_ptr_ty);
|
||||
|
||||
let llty_value = type_of::type_of(ccx, t);
|
||||
@ -313,11 +317,22 @@ pub fn malloc_raw_dyn(bcx: block,
|
||||
let rval = alloca(bcx, Type::i8p());
|
||||
let bcx = callee::trans_lang_call(
|
||||
bcx,
|
||||
langcall,
|
||||
bcx.tcx().lang_items.vector_exchange_malloc_fn(),
|
||||
[C_i32(llalign as i32), size],
|
||||
expr::SaveIn(rval));
|
||||
rslt(bcx, PointerCast(bcx, Load(bcx, rval), llty))
|
||||
} else {
|
||||
// we treat ~fn, @fn and @[] as @ here, which isn't ideal
|
||||
let (mk_fn, langcall) = match heap {
|
||||
heap_managed | heap_managed_unique => {
|
||||
(ty::mk_imm_box, bcx.tcx().lang_items.malloc_fn())
|
||||
}
|
||||
heap_exchange_closure => {
|
||||
(ty::mk_imm_box, bcx.tcx().lang_items.closure_exchange_malloc_fn())
|
||||
}
|
||||
_ => fail!("heap_exchange/heap_exchange_vector already handled")
|
||||
};
|
||||
|
||||
// Grab the TypeRef type of box_ptr_ty.
|
||||
let box_ptr_ty = mk_fn(bcx.tcx(), t);
|
||||
let llty = type_of(ccx, box_ptr_ty);
|
||||
@ -359,6 +374,7 @@ pub struct MallocResult {
|
||||
// and pulls out the body
|
||||
pub fn malloc_general_dyn(bcx: block, t: ty::t, heap: heap, size: ValueRef)
|
||||
-> MallocResult {
|
||||
assert!(heap != heap_exchange);
|
||||
let _icx = push_ctxt("malloc_general");
|
||||
let Result {bcx: bcx, val: llbox} = malloc_raw_dyn(bcx, t, heap, size);
|
||||
let body = GEPi(bcx, llbox, [0u, abi::box_field_body]);
|
||||
@ -366,9 +382,9 @@ pub fn malloc_general_dyn(bcx: block, t: ty::t, heap: heap, size: ValueRef)
|
||||
MallocResult { bcx: bcx, box: llbox, body: body }
|
||||
}
|
||||
|
||||
pub fn malloc_general(bcx: block, t: ty::t, heap: heap)
|
||||
-> MallocResult {
|
||||
let ty = type_of(bcx.ccx(), t);
|
||||
pub fn malloc_general(bcx: block, t: ty::t, heap: heap) -> MallocResult {
|
||||
let ty = type_of(bcx.ccx(), t);
|
||||
assert!(heap != heap_exchange);
|
||||
malloc_general_dyn(bcx, t, heap, llsize_of(bcx.ccx(), ty))
|
||||
}
|
||||
pub fn malloc_boxed(bcx: block, t: ty::t)
|
||||
@ -385,6 +401,7 @@ pub fn heap_for_unique(bcx: block, t: ty::t) -> heap {
|
||||
}
|
||||
|
||||
pub fn maybe_set_managed_unique_rc(bcx: block, bx: ValueRef, heap: heap) {
|
||||
assert!(heap != heap_exchange);
|
||||
if heap == heap_managed_unique {
|
||||
// In cases where we are looking at a unique-typed allocation in the
|
||||
// managed heap (thus have refcount 1 from the managed allocator),
|
||||
@ -396,11 +413,6 @@ pub fn maybe_set_managed_unique_rc(bcx: block, bx: ValueRef, heap: heap) {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn malloc_unique(bcx: block, t: ty::t)
|
||||
-> MallocResult {
|
||||
malloc_general(bcx, t, heap_for_unique(bcx, t))
|
||||
}
|
||||
|
||||
// Type descriptor and type glue stuff
|
||||
|
||||
pub fn get_tydesc_simple(ccx: &mut CrateContext, t: ty::t) -> ValueRef {
|
||||
|
@ -274,6 +274,7 @@ pub enum heap {
|
||||
heap_managed,
|
||||
heap_managed_unique,
|
||||
heap_exchange,
|
||||
heap_exchange_vector,
|
||||
heap_exchange_closure
|
||||
}
|
||||
|
||||
@ -395,7 +396,7 @@ pub fn add_clean_free(cx: block, ptr: ValueRef, heap: heap) {
|
||||
let f: @fn(block) -> block = |a| glue::trans_free(a, ptr);
|
||||
f
|
||||
}
|
||||
heap_exchange | heap_exchange_closure => {
|
||||
heap_exchange | heap_exchange_vector | heap_exchange_closure => {
|
||||
let f: @fn(block) -> block = |a| glue::trans_exchange_free(a, ptr);
|
||||
f
|
||||
}
|
||||
|
@ -100,6 +100,7 @@ use middle::trans::glue;
|
||||
use middle::trans::tvec;
|
||||
use middle::trans::type_of;
|
||||
use middle::trans::write_guard;
|
||||
use middle::trans::type_::Type;
|
||||
use middle::ty;
|
||||
use util::common::indenter;
|
||||
use util::ppaux::ty_to_str;
|
||||
@ -567,8 +568,14 @@ impl Datum {
|
||||
* This datum must represent an @T or ~T box. Returns a new
|
||||
* by-ref datum of type T, pointing at the contents. */
|
||||
|
||||
let content_ty = match ty::get(self.ty).sty {
|
||||
ty::ty_box(mt) | ty::ty_uniq(mt) => mt.ty,
|
||||
let (content_ty, header) = match ty::get(self.ty).sty {
|
||||
ty::ty_box(mt) => (mt.ty, true),
|
||||
ty::ty_uniq(mt) => (mt.ty, false),
|
||||
ty::ty_evec(_, ty::vstore_uniq) | ty::ty_estr(ty::vstore_uniq) => {
|
||||
let unit_ty = ty::sequence_element_type(bcx.tcx(), self.ty);
|
||||
let unboxed_vec_ty = ty::mk_mut_unboxed_vec(bcx.tcx(), unit_ty);
|
||||
(unboxed_vec_ty, true)
|
||||
}
|
||||
_ => {
|
||||
bcx.tcx().sess.bug(fmt!(
|
||||
"box_body() invoked on non-box type %s",
|
||||
@ -576,9 +583,16 @@ impl Datum {
|
||||
}
|
||||
};
|
||||
|
||||
let ptr = self.to_value_llval(bcx);
|
||||
let body = opaque_box_body(bcx, content_ty, ptr);
|
||||
Datum {val: body, ty: content_ty, mode: ByRef(ZeroMem)}
|
||||
if !header && !ty::type_contents(bcx.tcx(), content_ty).contains_managed() {
|
||||
let ptr = self.to_value_llval(bcx);
|
||||
let ty = type_of(bcx.ccx(), content_ty);
|
||||
let body = PointerCast(bcx, ptr, ty.ptr_to());
|
||||
Datum {val: body, ty: content_ty, mode: ByRef(ZeroMem)}
|
||||
} else { // has a header
|
||||
let ptr = self.to_value_llval(bcx);
|
||||
let body = opaque_box_body(bcx, content_ty, ptr);
|
||||
Datum {val: body, ty: content_ty, mode: ByRef(ZeroMem)}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_rptr(&self, bcx: block) -> Datum {
|
||||
|
@ -150,6 +150,7 @@ use middle::ty::{AutoPtr, AutoBorrowVec, AutoBorrowVecRef, AutoBorrowFn,
|
||||
use middle::ty;
|
||||
use util::common::indenter;
|
||||
use util::ppaux::Repr;
|
||||
use middle::trans::machine::llsize_of;
|
||||
|
||||
use middle::trans::type_::Type;
|
||||
|
||||
@ -464,7 +465,7 @@ fn trans_rvalue_datum_unadjusted(bcx: block, expr: @ast::expr) -> DatumBlock {
|
||||
expr, contents);
|
||||
}
|
||||
ast::expr_vstore(contents, ast::expr_vstore_uniq) => {
|
||||
let heap = heap_for_unique(bcx, expr_ty(bcx, contents));
|
||||
let heap = tvec::heap_for_unique_vector(bcx, expr_ty(bcx, contents));
|
||||
return tvec::trans_uniq_or_managed_vstore(bcx, heap,
|
||||
expr, contents);
|
||||
}
|
||||
@ -1329,12 +1330,23 @@ fn trans_unary_datum(bcx: block,
|
||||
contents_ty: ty::t,
|
||||
heap: heap) -> DatumBlock {
|
||||
let _icx = push_ctxt("trans_boxed_expr");
|
||||
let base::MallocResult { bcx, box: bx, body } =
|
||||
base::malloc_general(bcx, contents_ty, heap);
|
||||
add_clean_free(bcx, bx, heap);
|
||||
let bcx = trans_into(bcx, contents, SaveIn(body));
|
||||
revoke_clean(bcx, bx);
|
||||
return immediate_rvalue_bcx(bcx, bx, box_ty);
|
||||
if heap == heap_exchange {
|
||||
let llty = type_of(bcx.ccx(), contents_ty);
|
||||
let size = llsize_of(bcx.ccx(), llty);
|
||||
let Result { bcx: bcx, val: val } = malloc_raw_dyn(bcx, contents_ty,
|
||||
heap_exchange, size);
|
||||
add_clean_free(bcx, val, heap_exchange);
|
||||
let bcx = trans_into(bcx, contents, SaveIn(val));
|
||||
revoke_clean(bcx, val);
|
||||
return immediate_rvalue_bcx(bcx, val, box_ty);
|
||||
} else {
|
||||
let base::MallocResult { bcx, box: bx, body } =
|
||||
base::malloc_general(bcx, contents_ty, heap);
|
||||
add_clean_free(bcx, bx, heap);
|
||||
let bcx = trans_into(bcx, contents, SaveIn(body));
|
||||
revoke_clean(bcx, bx);
|
||||
return immediate_rvalue_bcx(bcx, bx, box_ty);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -386,7 +386,9 @@ pub fn make_free_glue(bcx: block, v: ValueRef, t: ty::t) {
|
||||
ty::ty_uniq(*) => {
|
||||
uniq::make_free_glue(bcx, v, t)
|
||||
}
|
||||
ty::ty_evec(_, ty::vstore_uniq) | ty::ty_estr(ty::vstore_uniq) |
|
||||
ty::ty_evec(_, ty::vstore_uniq) | ty::ty_estr(ty::vstore_uniq) => {
|
||||
tvec::make_uniq_free_glue(bcx, v, t)
|
||||
}
|
||||
ty::ty_evec(_, ty::vstore_box) | ty::ty_estr(ty::vstore_box) => {
|
||||
make_free_glue(bcx, v,
|
||||
tvec::expand_boxed_vec_ty(bcx.tcx(), t));
|
||||
|
@ -548,6 +548,7 @@ pub fn trans_trait_callee_from_llval(bcx: block,
|
||||
|
||||
let _icx = push_ctxt("impl::trans_trait_callee");
|
||||
let ccx = bcx.ccx();
|
||||
let mut bcx = bcx;
|
||||
|
||||
// Load the vtable from the @Trait pair
|
||||
debug!("(translating trait callee) loading vtable from pair %s",
|
||||
@ -576,6 +577,10 @@ pub fn trans_trait_callee_from_llval(bcx: block,
|
||||
}
|
||||
ast::sty_region(*) => {
|
||||
match store {
|
||||
ty::UniqTraitStore
|
||||
if !ty::type_contents(bcx.tcx(), callee_ty).contains_managed() => {
|
||||
llself = llbox;
|
||||
}
|
||||
ty::BoxTraitStore |
|
||||
ty::UniqTraitStore => {
|
||||
llself = GEPi(bcx, llbox, [0u, abi::box_field_body]);
|
||||
|
@ -194,7 +194,11 @@ impl Reflector {
|
||||
}
|
||||
ty::ty_uniq(ref mt) => {
|
||||
let extra = self.c_mt(mt);
|
||||
self.visit("uniq", extra)
|
||||
if ty::type_contents(bcx.tcx(), t).contains_managed() {
|
||||
self.visit("uniq_managed", extra)
|
||||
} else {
|
||||
self.visit("uniq", extra)
|
||||
}
|
||||
}
|
||||
ty::ty_ptr(ref mt) => {
|
||||
let extra = self.c_mt(mt);
|
||||
|
@ -33,6 +33,23 @@ use std::option::None;
|
||||
use syntax::ast;
|
||||
use syntax::codemap;
|
||||
|
||||
pub fn make_uniq_free_glue(bcx: block, vptrptr: ValueRef, box_ty: ty::t)
|
||||
-> block {
|
||||
let box_datum = immediate_rvalue(Load(bcx, vptrptr), box_ty);
|
||||
|
||||
let not_null = IsNotNull(bcx, box_datum.val);
|
||||
do with_cond(bcx, not_null) |bcx| {
|
||||
let body_datum = box_datum.box_body(bcx);
|
||||
let bcx = glue::drop_ty(bcx, body_datum.to_ref_llval(bcx),
|
||||
body_datum.ty);
|
||||
if ty::type_contents(bcx.tcx(), box_ty).contains_managed() {
|
||||
glue::trans_free(bcx, box_datum.val)
|
||||
} else {
|
||||
glue::trans_exchange_free(bcx, box_datum.val)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Boxed vector types are in some sense currently a "shorthand" for a box
|
||||
// containing an unboxed vector. This expands a boxed vector type into such an
|
||||
// expanded type. It doesn't respect mutability, but that doesn't matter at
|
||||
@ -42,7 +59,7 @@ pub fn expand_boxed_vec_ty(tcx: ty::ctxt, t: ty::t) -> ty::t {
|
||||
let unboxed_vec_ty = ty::mk_mut_unboxed_vec(tcx, unit_ty);
|
||||
match ty::get(t).sty {
|
||||
ty::ty_estr(ty::vstore_uniq) | ty::ty_evec(_, ty::vstore_uniq) => {
|
||||
ty::mk_imm_uniq(tcx, unboxed_vec_ty)
|
||||
fail!("cannot treat vectors/strings as exchange allocations yet");
|
||||
}
|
||||
ty::ty_estr(ty::vstore_box) | ty::ty_evec(_, ty::vstore_box) => {
|
||||
ty::mk_imm_box(tcx, unboxed_vec_ty)
|
||||
@ -95,9 +112,17 @@ pub fn alloc_raw(bcx: block, unit_ty: ty::t,
|
||||
return rslt(bcx, bx);
|
||||
}
|
||||
|
||||
pub fn heap_for_unique_vector(bcx: block, t: ty::t) -> heap {
|
||||
if ty::type_contents(bcx.tcx(), t).contains_managed() {
|
||||
heap_managed_unique
|
||||
} else {
|
||||
heap_exchange_vector
|
||||
}
|
||||
}
|
||||
|
||||
pub fn alloc_uniq_raw(bcx: block, unit_ty: ty::t,
|
||||
fill: ValueRef, alloc: ValueRef) -> Result {
|
||||
alloc_raw(bcx, unit_ty, fill, alloc, base::heap_for_unique(bcx, unit_ty))
|
||||
alloc_raw(bcx, unit_ty, fill, alloc, heap_for_unique_vector(bcx, unit_ty))
|
||||
}
|
||||
|
||||
pub fn alloc_vec(bcx: block,
|
||||
@ -298,7 +323,7 @@ pub fn trans_uniq_or_managed_vstore(bcx: block, heap: heap, vstore_expr: @ast::e
|
||||
|
||||
// Handle ~"".
|
||||
match heap {
|
||||
heap_exchange => {
|
||||
heap_exchange_vector => {
|
||||
match content_expr.node {
|
||||
ast::expr_lit(@codemap::spanned {
|
||||
node: ast::lit_str(s), _
|
||||
@ -321,7 +346,7 @@ pub fn trans_uniq_or_managed_vstore(bcx: block, heap: heap, vstore_expr: @ast::e
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
heap_exchange_closure => fail!("vectors are not allocated with closure_exchange_alloc"),
|
||||
heap_exchange | heap_exchange_closure => fail!("vectors use vector_exchange_alloc"),
|
||||
heap_managed | heap_managed_unique => {}
|
||||
}
|
||||
|
||||
|
@ -72,29 +72,6 @@ pub fn type_of_fn_from_ty(cx: &mut CrateContext, fty: ty::t) -> Type {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn type_of_non_gc_box(cx: &mut CrateContext, t: ty::t) -> Type {
|
||||
assert!(!ty::type_needs_infer(t));
|
||||
|
||||
let t_norm = ty::normalize_ty(cx.tcx, t);
|
||||
if t != t_norm {
|
||||
type_of_non_gc_box(cx, t_norm)
|
||||
} else {
|
||||
match ty::get(t).sty {
|
||||
ty::ty_box(mt) => {
|
||||
let ty = type_of(cx, mt.ty);
|
||||
Type::box(cx, &ty).ptr_to()
|
||||
}
|
||||
ty::ty_uniq(mt) => {
|
||||
let ty = type_of(cx, mt.ty);
|
||||
Type::unique(cx, &ty).ptr_to()
|
||||
}
|
||||
_ => {
|
||||
cx.sess.bug("non-box in type_of_non_gc_box");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// A "sizing type" is an LLVM type, the size and alignment of which are
|
||||
// guaranteed to be equivalent to what you would get out of `type_of()`. It's
|
||||
// useful because:
|
||||
@ -231,7 +208,11 @@ pub fn type_of(cx: &mut CrateContext, t: ty::t) -> Type {
|
||||
ty::ty_opaque_box => Type::opaque_box(cx).ptr_to(),
|
||||
ty::ty_uniq(ref mt) => {
|
||||
let ty = type_of(cx, mt.ty);
|
||||
Type::unique(cx, &ty).ptr_to()
|
||||
if ty::type_contents(cx.tcx, mt.ty).contains_managed() {
|
||||
Type::unique(cx, &ty).ptr_to()
|
||||
} else {
|
||||
ty.ptr_to()
|
||||
}
|
||||
}
|
||||
ty::ty_evec(ref mt, ty::vstore_uniq) => {
|
||||
let ty = type_of(cx, mt.ty);
|
||||
|
@ -17,6 +17,9 @@ use middle::trans::datum::immediate_rvalue;
|
||||
use middle::trans::datum;
|
||||
use middle::trans::glue;
|
||||
use middle::ty;
|
||||
use middle::trans::machine::llsize_of;
|
||||
use middle::trans::type_of;
|
||||
use middle::trans::type_of::*;
|
||||
|
||||
pub fn make_free_glue(bcx: block, vptrptr: ValueRef, box_ty: ty::t)
|
||||
-> block {
|
||||
@ -44,12 +47,21 @@ pub fn duplicate(bcx: block, src_box: ValueRef, src_ty: ty::t) -> Result {
|
||||
let body_datum = src_datum.box_body(bcx);
|
||||
|
||||
// Malloc space in exchange heap and copy src into it
|
||||
let MallocResult {
|
||||
bcx: bcx,
|
||||
box: dst_box,
|
||||
body: dst_body
|
||||
} = malloc_unique(bcx, body_datum.ty);
|
||||
body_datum.copy_to(bcx, datum::INIT, dst_body);
|
||||
if ty::type_contents(bcx.tcx(), src_ty).contains_managed() {
|
||||
let MallocResult {
|
||||
bcx: bcx,
|
||||
box: dst_box,
|
||||
body: dst_body
|
||||
} = malloc_general(bcx, body_datum.ty, heap_managed_unique);
|
||||
body_datum.copy_to(bcx, datum::INIT, dst_body);
|
||||
|
||||
rslt(bcx, dst_box)
|
||||
rslt(bcx, dst_box)
|
||||
} else {
|
||||
let body_datum = body_datum.to_value_datum(bcx);
|
||||
let llty = type_of(bcx.ccx(), body_datum.ty);
|
||||
let size = llsize_of(bcx.ccx(), llty);
|
||||
let Result { bcx: bcx, val: val } = malloc_raw_dyn(bcx, body_datum.ty, heap_exchange, size);
|
||||
body_datum.copy_to(bcx, datum::INIT, val);
|
||||
Result { bcx: bcx, val: val }
|
||||
}
|
||||
}
|
||||
|
@ -248,6 +248,14 @@ impl<V:TyVisitor + MovePtr> TyVisitor for MovePtrAdaptor<V> {
|
||||
true
|
||||
}
|
||||
|
||||
#[cfg(not(stage0))]
|
||||
fn visit_uniq_managed(&self, mtbl: uint, inner: *TyDesc) -> bool {
|
||||
self.align_to::<~u8>();
|
||||
if ! self.inner.visit_uniq_managed(mtbl, inner) { return false; }
|
||||
self.bump_past::<~u8>();
|
||||
true
|
||||
}
|
||||
|
||||
fn visit_ptr(&self, mtbl: uint, inner: *TyDesc) -> bool {
|
||||
self.align_to::<*u8>();
|
||||
if ! self.inner.visit_ptr(mtbl, inner) { return false; }
|
||||
|
@ -300,6 +300,15 @@ impl TyVisitor for ReprVisitor {
|
||||
}
|
||||
|
||||
fn visit_uniq(&self, mtbl: uint, inner: *TyDesc) -> bool {
|
||||
self.writer.write_char('~');
|
||||
self.write_mut_qualifier(mtbl);
|
||||
do self.get::<*c_void> |b| {
|
||||
self.visit_ptr_inner(*b, inner);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(stage0))]
|
||||
fn visit_uniq_managed(&self, mtbl: uint, inner: *TyDesc) -> bool {
|
||||
self.writer.write_char('~');
|
||||
self.write_mut_qualifier(mtbl);
|
||||
do self.get::<&managed::raw::BoxRepr> |b| {
|
||||
|
@ -80,7 +80,14 @@ pub unsafe fn exchange_malloc(td: *c_char, size: uintptr_t) -> *c_char {
|
||||
#[cfg(not(stage0), not(test))]
|
||||
#[lang="exchange_malloc"]
|
||||
#[inline]
|
||||
pub unsafe fn exchange_malloc(align: u32, size: uintptr_t) -> *c_char {
|
||||
pub unsafe fn exchange_malloc(_align: u32, size: uintptr_t) -> *c_char {
|
||||
malloc_raw(size as uint) as *c_char
|
||||
}
|
||||
|
||||
#[cfg(not(test))]
|
||||
#[lang="vector_exchange_malloc"]
|
||||
#[inline]
|
||||
pub unsafe fn vector_exchange_malloc(align: u32, size: uintptr_t) -> *c_char {
|
||||
let total_size = get_box_size(size as uint, align as uint);
|
||||
malloc_raw(total_size as uint) as *c_char
|
||||
}
|
||||
|
@ -91,6 +91,7 @@ pub trait TyVisitor {
|
||||
|
||||
fn visit_box(&self, mtbl: uint, inner: *TyDesc) -> bool;
|
||||
fn visit_uniq(&self, mtbl: uint, inner: *TyDesc) -> bool;
|
||||
fn visit_uniq_managed(&self, mtbl: uint, inner: *TyDesc) -> bool;
|
||||
fn visit_ptr(&self, mtbl: uint, inner: *TyDesc) -> bool;
|
||||
fn visit_rptr(&self, mtbl: uint, inner: *TyDesc) -> bool;
|
||||
|
||||
|
@ -33,28 +33,15 @@ use sys::size_of;
|
||||
use uint;
|
||||
use unstable::intrinsics;
|
||||
#[cfg(stage0)]
|
||||
use intrinsic::{get_tydesc};
|
||||
use intrinsic::{get_tydesc, TyDesc};
|
||||
#[cfg(not(stage0))]
|
||||
use unstable::intrinsics::{get_tydesc, contains_managed};
|
||||
use unstable::intrinsics::{get_tydesc, contains_managed, TyDesc};
|
||||
use vec;
|
||||
use util;
|
||||
|
||||
#[doc(hidden)]
|
||||
pub mod rustrt {
|
||||
use libc;
|
||||
use vec::raw;
|
||||
#[cfg(stage0)]
|
||||
use intrinsic::{TyDesc};
|
||||
#[cfg(not(stage0))]
|
||||
use unstable::intrinsics::{TyDesc};
|
||||
|
||||
#[abi = "cdecl"]
|
||||
pub extern {
|
||||
#[fast_ffi]
|
||||
unsafe fn vec_reserve_shared_actual(t: *TyDesc,
|
||||
v: **raw::VecRepr,
|
||||
n: libc::size_t);
|
||||
}
|
||||
extern {
|
||||
#[fast_ffi]
|
||||
unsafe fn vec_reserve_shared_actual(t: *TyDesc, v: **raw::VecRepr, n: libc::size_t);
|
||||
}
|
||||
|
||||
/// Returns true if two vectors have the same length
|
||||
@ -1152,7 +1139,7 @@ impl<T> OwnedVector<T> for ~[T] {
|
||||
let td = get_tydesc::<T>();
|
||||
if ((**ptr).box_header.ref_count ==
|
||||
managed::raw::RC_MANAGED_UNIQUE) {
|
||||
rustrt::vec_reserve_shared_actual(td, ptr as **raw::VecRepr, n as libc::size_t);
|
||||
vec_reserve_shared_actual(td, ptr as **raw::VecRepr, n as libc::size_t);
|
||||
} else {
|
||||
let alloc = n * sys::nonzero_size_of::<T>();
|
||||
*ptr = realloc_raw(*ptr as *mut c_void, alloc + size_of::<raw::VecRepr>())
|
||||
@ -1182,7 +1169,7 @@ impl<T> OwnedVector<T> for ~[T] {
|
||||
let ptr: *mut *mut raw::VecRepr = cast::transmute(self);
|
||||
let td = get_tydesc::<T>();
|
||||
if contains_managed::<T>() {
|
||||
rustrt::vec_reserve_shared_actual(td, ptr as **raw::VecRepr, n as libc::size_t);
|
||||
vec_reserve_shared_actual(td, ptr as **raw::VecRepr, n as libc::size_t);
|
||||
} else {
|
||||
let alloc = n * sys::nonzero_size_of::<T>();
|
||||
*ptr = realloc_raw(*ptr as *mut c_void, alloc + size_of::<raw::VecRepr>())
|
||||
|
@ -232,6 +232,13 @@ impl<V:TyVisitor + movable_ptr> TyVisitor for ptr_visit_adaptor<V> {
|
||||
true
|
||||
}
|
||||
|
||||
fn visit_uniq_managed(&self, mtbl: uint, inner: *TyDesc) -> bool {
|
||||
self.align_to::<~u8>();
|
||||
if ! self.inner.visit_uniq_managed(mtbl, inner) { return false; }
|
||||
self.bump_past::<~u8>();
|
||||
true
|
||||
}
|
||||
|
||||
fn visit_ptr(&self, mtbl: uint, inner: *TyDesc) -> bool {
|
||||
self.align_to::<*u8>();
|
||||
if ! self.inner.visit_ptr(mtbl, inner) { return false; }
|
||||
@ -552,6 +559,7 @@ impl TyVisitor for my_visitor {
|
||||
|
||||
fn visit_box(&self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_uniq(&self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_uniq_managed(&self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_ptr(&self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_rptr(&self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
|
||||
|
@ -70,6 +70,7 @@ impl TyVisitor for MyVisitor {
|
||||
|
||||
fn visit_box(&self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_uniq(&self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_uniq_managed(&self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_ptr(&self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
fn visit_rptr(&self, _mtbl: uint, _inner: *TyDesc) -> bool { true }
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user