Auto merge of #31430 - nagisa:mir-dyndrop, r=nikomatsakis

Zeroing on-drop seems to work fine. Still thinking about the best way to approach zeroing on-move.

(based on top of the other drop PR; only the last 2 commits are relevant)
This commit is contained in:
bors 2016-03-01 23:30:49 +00:00
commit 339a409bfd
22 changed files with 405 additions and 221 deletions

View File

@ -198,8 +198,8 @@ pub fn visit_all_items_in_krate<'tcx,V,F>(tcx: &ty::ctxt<'tcx>,
fn visit_item(&mut self, i: &'tcx hir::Item) {
let item_def_id = self.tcx.map.local_def_id(i.id);
let task_id = (self.dep_node_fn)(item_def_id);
debug!("About to start task {:?}", task_id);
let _task = self.tcx.dep_graph.in_task(task_id);
debug!("Started task {:?}", task_id);
self.tcx.dep_graph.read(DepNode::Hir(item_def_id));
self.visitor.visit_item(i)
}

View File

@ -2165,6 +2165,8 @@ extern {
NumInputs: c_uint)
-> OperandBundleDefRef;
pub fn LLVMRustFreeOperandBundleDef(Bundle: OperandBundleDefRef);
pub fn LLVMRustPositionBuilderAtStart(B: BuilderRef, BB: BasicBlockRef);
}
// LLVM requires symbols from this library, but apparently they're not printed

View File

@ -21,14 +21,64 @@ impl<'a,'tcx> Builder<'a,'tcx> {
-> BlockAnd<()> {
let Block { extent, span, stmts, expr } = self.hir.mirror(ast_block);
self.in_scope(extent, block, move |this| {
unpack!(block = this.stmts(block, stmts));
match expr {
Some(expr) => this.into(destination, block, expr),
None => {
this.cfg.push_assign_unit(block, span, destination);
block.unit()
// This convoluted structure is to avoid using recursion as we walk down a list
// of statements. Basically, the structure we get back is something like:
//
// let x = <init> in {
// expr1;
// let y = <init> in {
// expr2;
// expr3;
// ...
// }
// }
//
// The let bindings are valid till the end of block so all we have to do is to pop all
// the let-scopes at the end.
//
// First we build all the statements in the block.
let mut let_extent_stack = Vec::with_capacity(8);
for stmt in stmts {
let Stmt { span: _, kind } = this.hir.mirror(stmt);
match kind {
StmtKind::Expr { scope, expr } => {
unpack!(block = this.in_scope(scope, block, |this| {
let expr = this.hir.mirror(expr);
let temp = this.temp(expr.ty.clone());
unpack!(block = this.into(&temp, block, expr));
unpack!(block = this.build_drop(block, temp));
block.unit()
}));
}
StmtKind::Let { remainder_scope, init_scope, pattern, initializer } => {
this.push_scope(remainder_scope);
let_extent_stack.push(remainder_scope);
unpack!(block = this.in_scope(init_scope, block, move |this| {
// FIXME #30046 ^~~~
if let Some(init) = initializer {
this.expr_into_pattern(block, remainder_scope, pattern, init)
} else {
this.declare_bindings(remainder_scope, &pattern);
block.unit()
}
}));
}
}
}
// Then, the block may have an optional trailing expression which is a “return” value
// of the block.
if let Some(expr) = expr {
unpack!(block = this.into(destination, block, expr));
} else {
// FIXME(#31472)
this.cfg.push_assign_unit(block, span, destination);
}
// Finally, we pop all the let scopes before exiting out from the scope of block
// itself.
for extent in let_extent_stack.into_iter().rev() {
unpack!(block = this.pop_scope(extent, block));
}
block.unit()
})
}
}

View File

@ -185,4 +185,3 @@ mod into;
mod matches;
mod misc;
mod scope;
mod stmt;

View File

@ -249,6 +249,7 @@ impl<'a,'tcx> Builder<'a,'tcx> {
extent: CodeExtent,
mut block: BasicBlock,
target: BasicBlock) {
debug!("exit_scope(extent={:?}, block={:?}, target={:?})", extent, block, target);
let scope_count = 1 + self.scopes.iter().rev().position(|scope| scope.extent == extent)
.unwrap_or_else(||{
self.hir.span_bug(span, &format!("extent {:?} does not enclose", extent))

View File

@ -1,83 +0,0 @@
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use build::{BlockAnd, BlockAndExtension, Builder};
use hair::*;
use rustc::mir::repr::*;
impl<'a,'tcx> Builder<'a,'tcx> {
pub fn stmts(&mut self, mut block: BasicBlock, stmts: Vec<StmtRef<'tcx>>) -> BlockAnd<()> {
// This convoluted structure is to avoid using recursion as we walk down a list
// of statements. Basically, the structure we get back is something like:
//
// let x = <init> in {
// let y = <init> in {
// expr1;
// expr2;
// }
// }
//
// To process this, we keep a stack of (Option<CodeExtent>,
// vec::IntoIter<Stmt>) pairs. At each point we pull off the
// top most pair and extract one statement from the
// iterator. Once it's complete, we pop the scope from the
// first half the pair.
let this = self;
let mut stmt_lists = vec![(None, stmts.into_iter())];
while !stmt_lists.is_empty() {
let stmt = {
let &mut (_, ref mut stmts) = stmt_lists.last_mut().unwrap();
stmts.next()
};
let stmt = match stmt {
Some(stmt) => stmt,
None => {
let (extent, _) = stmt_lists.pop().unwrap();
if let Some(extent) = extent {
unpack!(block = this.pop_scope(extent, block));
}
continue
}
};
let Stmt { span: _, kind } = this.hir.mirror(stmt);
match kind {
StmtKind::Let { remainder_scope, init_scope, pattern, initializer, stmts } => {
this.push_scope(remainder_scope);
stmt_lists.push((Some(remainder_scope), stmts.into_iter()));
unpack!(block = this.in_scope(init_scope, block, move |this| {
// FIXME #30046 ^~~~
match initializer {
Some(initializer) => {
this.expr_into_pattern(block, remainder_scope, pattern, initializer)
}
None => {
this.declare_bindings(remainder_scope, &pattern);
block.unit()
}
}
}));
}
StmtKind::Expr { scope, expr } => {
unpack!(block = this.in_scope(scope, block, |this| {
let expr = this.hir.mirror(expr);
let temp = this.temp(expr.ty.clone());
unpack!(block = this.into(&temp, block, expr));
unpack!(block = this.build_drop(block, temp));
block.unit()
}));
}
}
}
block.unit()
}
}

View File

@ -21,62 +21,52 @@ impl<'tcx> Mirror<'tcx> for &'tcx hir::Block {
fn make_mirror<'a>(self, cx: &mut Cx<'a, 'tcx>) -> Block<'tcx> {
// We have to eagerly translate the "spine" of the statements
// in order to get the lexical scoping correctly.
let stmts = mirror_stmts(cx, self.id, self.stmts.iter().enumerate());
let stmts = mirror_stmts(cx, self.id, &*self.stmts);
Block {
extent: cx.tcx.region_maps.node_extent(self.id),
span: self.span,
stmts: stmts,
expr: self.expr.to_ref(),
expr: self.expr.to_ref()
}
}
}
fn mirror_stmts<'a,'tcx:'a,STMTS>(cx: &mut Cx<'a,'tcx>,
block_id: ast::NodeId,
mut stmts: STMTS)
-> Vec<StmtRef<'tcx>>
where STMTS: Iterator<Item=(usize, &'tcx hir::Stmt)>
fn mirror_stmts<'a,'tcx:'a>(cx: &mut Cx<'a,'tcx>,
block_id: ast::NodeId,
stmts: &'tcx [hir::Stmt])
-> Vec<StmtRef<'tcx>>
{
let mut result = vec![];
while let Some((index, stmt)) = stmts.next() {
for (index, stmt) in stmts.iter().enumerate() {
match stmt.node {
hir::StmtExpr(ref expr, id) | hir::StmtSemi(ref expr, id) =>
result.push(
StmtRef::Mirror(
Box::new(Stmt { span: stmt.span,
kind: StmtKind::Expr {
scope: cx.tcx.region_maps.node_extent(id),
expr: expr.to_ref() } }))),
hir::StmtDecl(ref decl, id) => {
match decl.node {
hir::DeclItem(..) => { /* ignore for purposes of the MIR */ }
hir::DeclLocal(ref local) => {
let remainder_extent = CodeExtentData::Remainder(BlockRemainder {
block: block_id,
first_statement_index: index as u32,
});
let remainder_extent =
cx.tcx.region_maps.lookup_code_extent(remainder_extent);
// pull in all following statements, since
// they are within the scope of this let:
let following_stmts = mirror_stmts(cx, block_id, stmts);
let pattern = cx.irrefutable_pat(&local.pat);
result.push(StmtRef::Mirror(Box::new(Stmt {
span: stmt.span,
kind: StmtKind::Let {
remainder_scope: remainder_extent,
init_scope: cx.tcx.region_maps.node_extent(id),
pattern: pattern,
initializer: local.init.to_ref(),
stmts: following_stmts,
},
})));
return result;
result.push(StmtRef::Mirror(Box::new(Stmt {
span: stmt.span,
kind: StmtKind::Expr {
scope: cx.tcx.region_maps.node_extent(id),
expr: expr.to_ref()
}
}))),
hir::StmtDecl(ref decl, id) => match decl.node {
hir::DeclItem(..) => { /* ignore for purposes of the MIR */ }
hir::DeclLocal(ref local) => {
let remainder_extent = CodeExtentData::Remainder(BlockRemainder {
block: block_id,
first_statement_index: index as u32,
});
let remainder_extent =
cx.tcx.region_maps.lookup_code_extent(remainder_extent);
let pattern = cx.irrefutable_pat(&local.pat);
result.push(StmtRef::Mirror(Box::new(Stmt {
span: stmt.span,
kind: StmtKind::Let {
remainder_scope: remainder_extent,
init_scope: cx.tcx.region_maps.node_extent(id),
pattern: pattern,
initializer: local.init.to_ref(),
},
})));
}
}
}

View File

@ -78,10 +78,7 @@ pub enum StmtKind<'tcx> {
pattern: Pattern<'tcx>,
/// let pat = <INIT> ...
initializer: Option<ExprRef<'tcx>>,
/// let pat = init; <STMTS>
stmts: Vec<StmtRef<'tcx>>,
initializer: Option<ExprRef<'tcx>>
},
}

View File

@ -1295,22 +1295,29 @@ pub fn init_zero_mem<'blk, 'tcx>(cx: Block<'blk, 'tcx>, llptr: ValueRef, t: Ty<'
fn memfill<'a, 'tcx>(b: &Builder<'a, 'tcx>, llptr: ValueRef, ty: Ty<'tcx>, byte: u8) {
let _icx = push_ctxt("memfill");
let ccx = b.ccx;
let llty = type_of::type_of(ccx, ty);
let ptr_width = &ccx.sess().target.target.target_pointer_width[..];
let intrinsic_key = format!("llvm.memset.p0i8.i{}", ptr_width);
let llintrinsicfn = ccx.get_intrinsic(&intrinsic_key);
let llptr = b.pointercast(llptr, Type::i8(ccx).ptr_to());
let llzeroval = C_u8(ccx, byte);
let size = machine::llsize_of(ccx, llty);
let align = C_i32(ccx, type_of::align_of(ccx, ty) as i32);
let volatile = C_bool(ccx, false);
b.call(llintrinsicfn,
&[llptr, llzeroval, size, align, volatile],
None, None);
call_memset(b, llptr, llzeroval, size, align, false);
}
pub fn call_memset<'bcx, 'tcx>(b: &Builder<'bcx, 'tcx>,
ptr: ValueRef,
fill_byte: ValueRef,
size: ValueRef,
align: ValueRef,
volatile: bool) {
let ccx = b.ccx;
let ptr_width = &ccx.sess().target.target.target_pointer_width[..];
let intrinsic_key = format!("llvm.memset.p0i8.i{}", ptr_width);
let llintrinsicfn = ccx.get_intrinsic(&intrinsic_key);
let volatile = C_bool(ccx, volatile);
b.call(llintrinsicfn, &[ptr, fill_byte, size, align, volatile], None, None);
}
/// In general, when we create an scratch value in an alloca, the
/// creator may not know if the block (that initializes the scratch
/// with the desired value) actually dominates the cleanup associated

View File

@ -104,6 +104,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
}
}
pub fn position_at_start(&self, llbb: BasicBlockRef) {
unsafe {
llvm::LLVMRustPositionBuilderAtStart(self.llbuilder, llbb);
}
}
pub fn ret_void(&self) {
self.count_insn("retvoid");
unsafe {

View File

@ -735,6 +735,15 @@ impl<'blk, 'tcx> BlockAndBuilder<'blk, 'tcx> {
BlockAndBuilder::new(bcx, owned_builder)
}
pub fn at_start<F, R>(&self, f: F) -> R
where F: FnOnce(&BlockAndBuilder<'blk, 'tcx>) -> R
{
self.position_at_start(self.bcx.llbb);
let r = f(self);
self.position_at_end(self.bcx.llbb);
r
}
// Methods delegated to bcx
pub fn ccx(&self) -> &'blk CrateContext<'blk, 'tcx> {

View File

@ -20,11 +20,11 @@ use trans::common::{self, Block, BlockAndBuilder};
use trans::debuginfo::DebugLoc;
use trans::Disr;
use trans::foreign;
use trans::glue;
use trans::type_of;
use trans::glue;
use trans::type_::Type;
use super::MirContext;
use super::{MirContext, drop};
use super::operand::OperandValue::{FatPtr, Immediate, Ref};
use super::operand::OperandRef;
@ -188,8 +188,10 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
unwind.llbb(),
cleanup_bundle.as_ref(),
None);
self.bcx(target).at_start(|bcx| drop::drop_fill(bcx, lvalue.llval, ty));
} else {
bcx.call(drop_fn, &[llvalue], cleanup_bundle.as_ref(), None);
drop::drop_fill(&bcx, lvalue.llval, ty);
funclet_br(bcx, self.llblock(target));
}
}
@ -250,59 +252,41 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
landingpad.llbb(),
cleanup_bundle.as_ref(),
Some(attrs));
landingpad.at_start(|bcx| for op in args {
self.set_operand_dropped(bcx, op);
});
},
(false, &Some(cleanup), &Some((_, success))) => {
let cleanup = self.bcx(cleanup);
let landingpad = self.make_landing_pad(cleanup);
let (target, postinvoke) = if must_copy_dest {
(self.fcx.new_block("", None).build(), Some(self.bcx(success)))
} else {
(self.bcx(success), None)
};
let invokeret = bcx.invoke(callee.immediate(),
&llargs[..],
target.llbb(),
self.llblock(success),
landingpad.llbb(),
cleanup_bundle.as_ref(),
Some(attrs));
if let Some(postinvoketarget) = postinvoke {
// We translate the copy into a temporary block. The temporary block is
// necessary because the current block has already been terminated (by
// `invoke`) and we cannot really translate into the target block
// because:
// * The target block may have more than a single precedesor;
// * Some LLVM insns cannot have a preceeding store insn (phi,
// cleanuppad), and adding/prepending the store now may render
// those other instructions invalid.
//
// NB: This approach still may break some LLVM code. For example if the
// target block starts with a `phi` (which may only match on immediate
// precedesors), it cannot know about this temporary block thus
// resulting in an invalid code:
//
// this:
// …
// %0 = …
// %1 = invoke to label %temp …
// temp:
// store ty %1, ty* %dest
// br label %actualtargetblock
// actualtargetblock: ; preds: %temp, …
// phi … [%this, …], [%0, …] ; ERROR: phi requires to match only on
// ; immediate precedesors
if must_copy_dest {
let (ret_dest, ret_ty) = ret_dest_ty
.expect("return destination and type not set");
target.with_block(|target| {
base::store_ty(target, invokeret, ret_dest.llval, ret_ty);
});
target.br(postinvoketarget.llbb());
// We translate the copy straight into the beginning of the target
// block.
self.bcx(success).at_start(|bcx| bcx.with_block( |bcx| {
base::store_ty(bcx, invokeret, ret_dest.llval, ret_ty);
}));
}
self.bcx(success).at_start(|bcx| for op in args {
self.set_operand_dropped(bcx, op);
});
landingpad.at_start(|bcx| for op in args {
self.set_operand_dropped(bcx, op);
});
},
(false, _, &None) => {
bcx.call(callee.immediate(),
&llargs[..],
cleanup_bundle.as_ref(),
Some(attrs));
// no need to drop args, because the call never returns
bcx.unreachable();
}
(false, _, &Some((_, target))) => {
@ -317,6 +301,9 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
base::store_ty(bcx, llret, ret_dest.llval, ret_ty);
});
}
for op in args {
self.set_operand_dropped(&bcx, op);
}
funclet_br(bcx, self.llblock(target));
}
// Foreign functions
@ -333,6 +320,9 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
debugloc)
});
if let Some((_, target)) = *destination {
for op in args {
self.set_operand_dropped(&bcx, op);
}
funclet_br(bcx, self.llblock(target));
}
},
@ -388,7 +378,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
let use_funclets = base::wants_msvc_seh(bcx.sess()) && data.is_cleanup;
let cleanup_pad = if use_funclets {
bcx.set_personality_fn(self.fcx.eh_personality());
Some(bcx.cleanup_pad(None, &[]))
bcx.at_start(|bcx| Some(bcx.cleanup_pad(None, &[])))
} else {
None
};
@ -416,7 +406,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
self.blocks[bb.index()].build()
}
fn llblock(&self, bb: mir::BasicBlock) -> BasicBlockRef {
pub fn llblock(&self, bb: mir::BasicBlock) -> BasicBlockRef {
self.blocks[bb.index()].llbb
}
}

View File

@ -0,0 +1,27 @@
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use llvm::ValueRef;
use rustc::middle::ty::Ty;
use trans::adt;
use trans::base;
use trans::common::{self, BlockAndBuilder};
use trans::machine;
use trans::type_of;
use trans::type_::Type;
pub fn drop_fill<'bcx, 'tcx>(bcx: &BlockAndBuilder<'bcx, 'tcx>, value: ValueRef, ty: Ty<'tcx>) {
let llty = type_of::type_of(bcx.ccx(), ty);
let llptr = bcx.pointercast(value, Type::i8(bcx.ccx()).ptr_to());
let filling = common::C_u8(bcx.ccx(), adt::DTOR_DONE);
let size = machine::llsize_of(bcx.ccx(), llty);
let align = common::C_u32(bcx.ccx(), machine::llalign_of_min(bcx.ccx(), llty));
base::call_memset(&bcx, llptr, filling, size, align, false);
}

View File

@ -17,6 +17,7 @@ use trans::base;
use trans::common::{self, BlockAndBuilder};
use trans::machine;
use trans::type_of;
use trans::mir::drop;
use llvm;
use trans::Disr;
@ -48,6 +49,7 @@ impl<'tcx> LvalueRef<'tcx> {
{
assert!(!ty.has_erasable_regions());
let lltemp = bcx.with_block(|bcx| base::alloc_ty(bcx, ty, name));
drop::drop_fill(bcx, lltemp, ty);
LvalueRef::new_sized(lltemp, LvalueTy::from_ty(ty))
}
}

View File

@ -197,6 +197,7 @@ mod analyze;
mod block;
mod constant;
mod did;
mod drop;
mod lvalue;
mod operand;
mod rvalue;

View File

@ -16,8 +16,9 @@ use trans::base;
use trans::common::{self, Block, BlockAndBuilder};
use trans::datum;
use trans::Disr;
use trans::glue;
use super::{MirContext, TempRef};
use super::{MirContext, TempRef, drop};
use super::lvalue::LvalueRef;
/// The representation of a Rust value. The enum variant is in fact
@ -158,31 +159,13 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
}
}
pub fn trans_operand_into(&mut self,
bcx: &BlockAndBuilder<'bcx, 'tcx>,
lldest: ValueRef,
operand: &mir::Operand<'tcx>)
{
debug!("trans_operand_into(lldest={}, operand={:?})",
bcx.val_to_string(lldest),
operand);
// FIXME: consider not copying constants through the
// stack.
let o = self.trans_operand(bcx, operand);
self.store_operand(bcx, lldest, o);
}
pub fn store_operand(&mut self,
bcx: &BlockAndBuilder<'bcx, 'tcx>,
lldest: ValueRef,
operand: OperandRef<'tcx>)
{
debug!("store_operand: operand={}", operand.repr(bcx));
bcx.with_block(|bcx| {
self.store_operand_direct(bcx, lldest, operand)
})
bcx.with_block(|bcx| self.store_operand_direct(bcx, lldest, operand))
}
pub fn store_operand_direct(&mut self,
@ -245,4 +228,29 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
}), ty)
}).collect()
}
pub fn set_operand_dropped(&mut self,
bcx: &BlockAndBuilder<'bcx, 'tcx>,
operand: &mir::Operand<'tcx>) {
match *operand {
mir::Operand::Constant(_) => return,
mir::Operand::Consume(ref lvalue) => {
if let mir::Lvalue::Temp(idx) = *lvalue {
if let TempRef::Operand(..) = self.temps[idx as usize] {
// All lvalues which have an associated drop are promoted to an alloca
// beforehand. If this is an operand, it is safe to say this is never
// dropped and theres no reason for us to zero this out at all.
return
}
}
let lvalue = self.trans_lvalue(bcx, lvalue);
let ty = lvalue.ty.to_ty(bcx.tcx());
if !glue::type_needs_drop(bcx.tcx(), ty) {
return
} else {
drop::drop_fill(bcx, lvalue.llval, ty);
}
}
}
}
}

View File

@ -42,10 +42,14 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
rvalue);
match *rvalue {
mir::Rvalue::Use(ref operand) => {
self.trans_operand_into(&bcx, dest.llval, operand);
bcx
}
mir::Rvalue::Use(ref operand) => {
let tr_operand = self.trans_operand(&bcx, operand);
// FIXME: consider not copying constants through stack. (fixable by translating
// constants into OperandValue::Ref, why dont we do that yet if we dont?)
self.store_operand(&bcx, dest.llval, tr_operand);
self.set_operand_dropped(&bcx, operand);
bcx
}
mir::Rvalue::Cast(mir::CastKind::Unsize, ref operand, cast_ty) => {
if common::type_is_fat_ptr(bcx.tcx(), cast_ty) {
@ -89,15 +93,17 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
}
mir::Rvalue::Repeat(ref elem, ref count) => {
let elem = self.trans_operand(&bcx, elem);
let tr_elem = self.trans_operand(&bcx, elem);
let size = self.trans_constval(&bcx, &count.value, count.ty).immediate();
bcx.map_block(|block| {
let bcx = bcx.map_block(|block| {
let base = expr::get_dataptr(block, dest.llval);
tvec::iter_vec_raw(block, base, elem.ty, size, |block, llslot, _| {
self.store_operand_direct(block, llslot, elem);
tvec::iter_vec_raw(block, base, tr_elem.ty, size, |block, llslot, _| {
self.store_operand_direct(block, llslot, tr_elem);
block
})
})
});
self.set_operand_dropped(&bcx, elem);
bcx
}
mir::Rvalue::Aggregate(ref kind, ref operands) => {
@ -117,6 +123,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
adt::trans_field_ptr(bcx, &repr, val, disr, i)
});
self.store_operand(&bcx, lldest_i, op);
self.set_operand_dropped(&bcx, operand);
}
}
},
@ -130,6 +137,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
// not be structs but arrays.
let dest = bcx.gepi(dest.llval, &[0, i]);
self.store_operand(&bcx, dest, op);
self.set_operand_dropped(&bcx, operand);
}
}
}
@ -179,11 +187,6 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
assert!(rvalue_creates_operand(rvalue), "cannot trans {:?} to operand", rvalue);
match *rvalue {
mir::Rvalue::Use(ref operand) => {
let operand = self.trans_operand(&bcx, operand);
(bcx, operand)
}
mir::Rvalue::Cast(ref kind, ref operand, cast_ty) => {
let operand = self.trans_operand(&bcx, operand);
debug!("cast operand is {}", operand.repr(&bcx));
@ -426,6 +429,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
(bcx, operand)
}
mir::Rvalue::Use(..) |
mir::Rvalue::Repeat(..) |
mir::Rvalue::Aggregate(..) |
mir::Rvalue::Slice { .. } |
@ -543,7 +547,6 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
pub fn rvalue_creates_operand<'tcx>(rvalue: &mir::Rvalue<'tcx>) -> bool {
match *rvalue {
mir::Rvalue::Use(..) | // (*)
mir::Rvalue::Ref(..) |
mir::Rvalue::Len(..) |
mir::Rvalue::Cast(..) | // (*)
@ -551,6 +554,7 @@ pub fn rvalue_creates_operand<'tcx>(rvalue: &mir::Rvalue<'tcx>) -> bool {
mir::Rvalue::UnaryOp(..) |
mir::Rvalue::Box(..) =>
true,
mir::Rvalue::Use(..) | // (**)
mir::Rvalue::Repeat(..) |
mir::Rvalue::Aggregate(..) |
mir::Rvalue::Slice { .. } |
@ -559,4 +563,7 @@ pub fn rvalue_creates_operand<'tcx>(rvalue: &mir::Rvalue<'tcx>) -> bool {
}
// (*) this is only true if the type is suitable
// (**) we need to zero-out the source operand after moving, so we are restricted to either
// ensuring all users of `Use` zero it out themselves or not allowing to “create” operand for
// it.
}

View File

@ -1167,3 +1167,8 @@ LLVMRustBuildInvoke(LLVMBuilderRef B,
return LLVMBuildInvoke(B, Fn, Args, NumArgs, Then, Catch, Name);
}
#endif
extern "C" void LLVMRustPositionBuilderAtStart(LLVMBuilderRef B, LLVMBasicBlockRef BB) {
auto point = unwrap(BB)->getFirstInsertionPt();
unwrap(B)->SetInsertPoint(unwrap(BB), point);
}

View File

@ -0,0 +1,42 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(rustc_attrs)]
// error-pattern:drop 1
// error-pattern:drop 2
use std::io::{self, Write};
/// Structure which will not allow to be dropped twice.
struct Droppable<'a>(&'a mut bool, u32);
impl<'a> Drop for Droppable<'a> {
fn drop(&mut self) {
if *self.0 {
writeln!(io::stderr(), "{} dropped twice", self.1);
::std::process::exit(1);
}
writeln!(io::stderr(), "drop {}", self.1);
*self.0 = true;
}
}
#[rustc_mir]
fn mir(){
let (mut xv, mut yv) = (false, false);
let x = Droppable(&mut xv, 1);
let y = Droppable(&mut yv, 2);
let mut z = x;
let k = y;
z = k;
}
fn main() {
mir();
panic!();
}

View File

@ -0,0 +1,40 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(rustc_attrs)]
// error-pattern:drop 1
use std::io::{self, Write};
/// Structure which will not allow to be dropped twice.
struct Droppable<'a>(&'a mut bool, u32);
impl<'a> Drop for Droppable<'a> {
fn drop(&mut self) {
if *self.0 {
writeln!(io::stderr(), "{} dropped twice", self.1);
::std::process::exit(1);
}
writeln!(io::stderr(), "drop {}", self.1);
*self.0 = true;
}
}
#[rustc_mir]
fn mir<'a>(d: Droppable<'a>){
loop {
let x = d;
break;
}
}
fn main() {
let mut xv = false;
mir(Droppable(&mut xv, 1));
panic!();
}

View File

@ -0,0 +1,45 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(rustc_attrs)]
// error-pattern:unwind happens
// error-pattern:drop 3
// error-pattern:drop 2
// error-pattern:drop 1
use std::io::{self, Write};
/// Structure which will not allow to be dropped twice.
struct Droppable<'a>(&'a mut bool, u32);
impl<'a> Drop for Droppable<'a> {
fn drop(&mut self) {
if *self.0 {
writeln!(io::stderr(), "{} dropped twice", self.1);
::std::process::exit(1);
}
writeln!(io::stderr(), "drop {}", self.1);
*self.0 = true;
}
}
fn may_panic<'a>() -> Droppable<'a> {
panic!("unwind happens");
}
#[rustc_mir]
fn mir<'a>(d: Droppable<'a>){
let (mut a, mut b) = (false, false);
let y = Droppable(&mut a, 2);
let x = [Droppable(&mut b, 1), y, d, may_panic()];
}
fn main() {
let mut c = false;
mir(Droppable(&mut c, 3));
}

View File

@ -0,0 +1,39 @@
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// test that ordinary fat pointer operations work.
#![feature(braced_empty_structs)]
#![feature(rustc_attrs)]
use std::sync::atomic;
use std::sync::atomic::Ordering::SeqCst;
static COUNTER: atomic::AtomicUsize = atomic::ATOMIC_USIZE_INIT;
struct DropMe {
}
impl Drop for DropMe {
fn drop(&mut self) {
COUNTER.fetch_add(1, SeqCst);
}
}
#[rustc_mir]
fn fat_ptr_move_then_drop(a: Box<[DropMe]>) {
let b = a;
}
fn main() {
let a: Box<[DropMe]> = Box::new([DropMe { }]);
fat_ptr_move_then_drop(a);
assert_eq!(COUNTER.load(SeqCst), 1);
}