Rollup merge of #30823 - pnkfelix:put-back-alloca-zeroing-for-issue-30530, r=dotdash

Put back alloca zeroing for issues #29092, #30018, #30530; inject zeroing for #30822.

----

Background context: `fn alloca_zeroed` was removed in PR #22969, so we haven't been "zero'ing" (\*) the alloca's since at least that point, but the logic behind that PR seems sound, so its not entirely obvious how *long* the underlying bug has actually been present.  In other words, I have not yet done a survey to see when the new `alloc_ty` and `lvalue_scratch_datum` calls were introduced that should have had "zero'ing" the alloca's.

----

I first fixed #30018, then decided to do a survey of `alloc_ty` calls to see if they needed similar treatment, which quickly led to a rediscovery of #30530.

While making the regression test for the latter, I discovered #30822, which is a slightly different bug (in terms of where the "zero'ing" needs to go), but still relevant.

I haven't finished the aforementioned survey of `fn alloc_ty` calls, but I decided I wanted to get this up for review in its current state (namely to see if my attempt to force developers to include a justification for passing `Uninit` can possibly fly, or if I should abandon that path of action).

----

(*): I am putting quotation marks around "zero'ing" because we no longer use zero as our "dropped" marker value.

Fix #29092
Fix #30018
Fix #30530
Fix #30822
This commit is contained in:
Manish Goregaokar 2016-01-14 04:22:19 +05:30
commit 9036c82781
10 changed files with 392 additions and 44 deletions

View File

@ -1760,6 +1760,9 @@ fn mk_binding_alloca<'blk, 'tcx, A, F>(bcx: Block<'blk, 'tcx>,
let lvalue = Lvalue::new_with_hint(caller_name, bcx, p_id, HintKind::DontZeroJustUse);
let datum = Datum::new(llval, var_ty, lvalue);
debug!("mk_binding_alloca cleanup_scope={:?} llval={} var_ty={:?}",
cleanup_scope, bcx.ccx().tn().val_to_string(llval), var_ty);
// Subtle: be sure that we *populate* the memory *before*
// we schedule the cleanup.
call_lifetime_start(bcx, llval);

View File

@ -55,6 +55,7 @@ use syntax::ast;
use syntax::attr;
use syntax::attr::IntType;
use trans::_match;
use trans::base::InitAlloca;
use trans::build::*;
use trans::cleanup;
use trans::cleanup::CleanupMethods;
@ -1279,7 +1280,12 @@ pub fn trans_drop_flag_ptr<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
let custom_cleanup_scope = fcx.push_custom_cleanup_scope();
let scratch = unpack_datum!(bcx, datum::lvalue_scratch_datum(
bcx, tcx.dtor_type(), "drop_flag",
cleanup::CustomScope(custom_cleanup_scope), (), |_, bcx, _| bcx
InitAlloca::Uninit("drop flag itself has no dtor"),
cleanup::CustomScope(custom_cleanup_scope), (), |_, bcx, _| {
debug!("no-op populate call for trans_drop_flag_ptr on dtor_type={:?}",
tcx.dtor_type());
bcx
}
));
bcx = fold_variants(bcx, r, val, |variant_cx, st, value| {
let ptr = struct_field_ptr(variant_cx, st, MaybeSizedValue::sized(value),

View File

@ -1147,48 +1147,63 @@ pub fn with_cond<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, val: ValueRef, f: F) ->
next_cx
}
pub fn call_lifetime_start(cx: Block, ptr: ValueRef) {
if cx.sess().opts.optimize == config::No {
enum Lifetime { Start, End }
// If LLVM lifetime intrinsic support is enabled (i.e. optimizations
// on), and `ptr` is nonzero-sized, then extracts the size of `ptr`
// and the intrinsic for `lt` and passes them to `emit`, which is in
// charge of generating code to call the passed intrinsic on whatever
// block of generated code is targetted for the intrinsic.
//
// If LLVM lifetime intrinsic support is disabled (i.e. optimizations
// off) or `ptr` is zero-sized, then no-op (does not call `emit`).
fn core_lifetime_emit<'blk, 'tcx, F>(ccx: &'blk CrateContext<'blk, 'tcx>,
ptr: ValueRef,
lt: Lifetime,
emit: F)
where F: FnOnce(&'blk CrateContext<'blk, 'tcx>, machine::llsize, ValueRef)
{
if ccx.sess().opts.optimize == config::No {
return;
}
let _icx = push_ctxt("lifetime_start");
let ccx = cx.ccx();
let _icx = push_ctxt(match lt {
Lifetime::Start => "lifetime_start",
Lifetime::End => "lifetime_end"
});
let size = machine::llsize_of_alloc(ccx, val_ty(ptr).element_type());
if size == 0 {
return;
}
let ptr = PointerCast(cx, ptr, Type::i8p(ccx));
let lifetime_start = ccx.get_intrinsic(&"llvm.lifetime.start");
Call(cx,
lifetime_start,
&[C_u64(ccx, size), ptr],
None,
DebugLoc::None);
let lifetime_intrinsic = ccx.get_intrinsic(match lt {
Lifetime::Start => "llvm.lifetime.start",
Lifetime::End => "llvm.lifetime.end"
});
emit(ccx, size, lifetime_intrinsic)
}
pub fn call_lifetime_start(cx: Block, ptr: ValueRef) {
core_lifetime_emit(cx.ccx(), ptr, Lifetime::Start, |ccx, size, lifetime_start| {
let ptr = PointerCast(cx, ptr, Type::i8p(ccx));
Call(cx,
lifetime_start,
&[C_u64(ccx, size), ptr],
None,
DebugLoc::None);
})
}
pub fn call_lifetime_end(cx: Block, ptr: ValueRef) {
if cx.sess().opts.optimize == config::No {
return;
}
let _icx = push_ctxt("lifetime_end");
let ccx = cx.ccx();
let size = machine::llsize_of_alloc(ccx, val_ty(ptr).element_type());
if size == 0 {
return;
}
let ptr = PointerCast(cx, ptr, Type::i8p(ccx));
let lifetime_end = ccx.get_intrinsic(&"llvm.lifetime.end");
Call(cx,
lifetime_end,
&[C_u64(ccx, size), ptr],
None,
DebugLoc::None);
core_lifetime_emit(cx.ccx(), ptr, Lifetime::End, |ccx, size, lifetime_end| {
let ptr = PointerCast(cx, ptr, Type::i8p(ccx));
Call(cx,
lifetime_end,
&[C_u64(ccx, size), ptr],
None,
DebugLoc::None);
})
}
// Generates code for resumption of unwind at the end of a landing pad.
@ -1285,12 +1300,81 @@ fn memfill<'a, 'tcx>(b: &Builder<'a, 'tcx>, llptr: ValueRef, ty: Ty<'tcx>, byte:
None);
}
pub fn alloc_ty<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>, name: &str) -> ValueRef {
/// In general, when we create an scratch value in an alloca, the
/// creator may not know if the block (that initializes the scratch
/// with the desired value) actually dominates the cleanup associated
/// with the scratch value.
///
/// To deal with this, when we do an alloca (at the *start* of whole
/// function body), we optionally can also set the associated
/// dropped-flag state of the alloca to "dropped."
#[derive(Copy, Clone, Debug)]
pub enum InitAlloca {
/// Indicates that the state should have its associated drop flag
/// set to "dropped" at the point of allocation.
Dropped,
/// Indicates the value of the associated drop flag is irrelevant.
/// The embedded string literal is a programmer provided argument
/// for why. This is a safeguard forcing compiler devs to
/// document; it might be a good idea to also emit this as a
/// comment with the alloca itself when emitting LLVM output.ll.
Uninit(&'static str),
}
pub fn alloc_ty<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
t: Ty<'tcx>,
name: &str) -> ValueRef {
// pnkfelix: I do not know why alloc_ty meets the assumptions for
// passing Uninit, but it was never needed (even back when we had
// the original boolean `zero` flag on `lvalue_scratch_datum`).
alloc_ty_init(bcx, t, InitAlloca::Uninit("all alloc_ty are uninit"), name)
}
/// This variant of `fn alloc_ty` does not necessarily assume that the
/// alloca should be created with no initial value. Instead the caller
/// controls that assumption via the `init` flag.
///
/// Note that if the alloca *is* initialized via `init`, then we will
/// also inject an `llvm.lifetime.start` before that initialization
/// occurs, and thus callers should not call_lifetime_start
/// themselves. But if `init` says "uninitialized", then callers are
/// in charge of choosing where to call_lifetime_start and
/// subsequently populate the alloca.
///
/// (See related discussion on PR #30823.)
pub fn alloc_ty_init<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
t: Ty<'tcx>,
init: InitAlloca,
name: &str) -> ValueRef {
let _icx = push_ctxt("alloc_ty");
let ccx = bcx.ccx();
let ty = type_of::type_of(ccx, t);
assert!(!t.has_param_types());
alloca(bcx, ty, name)
match init {
InitAlloca::Dropped => alloca_dropped(bcx, t, name),
InitAlloca::Uninit(_) => alloca(bcx, ty, name),
}
}
pub fn alloca_dropped<'blk, 'tcx>(cx: Block<'blk, 'tcx>, ty: Ty<'tcx>, name: &str) -> ValueRef {
let _icx = push_ctxt("alloca_dropped");
let llty = type_of::type_of(cx.ccx(), ty);
if cx.unreachable.get() {
unsafe { return llvm::LLVMGetUndef(llty.ptr_to().to_ref()); }
}
let p = alloca(cx, llty, name);
let b = cx.fcx.ccx.builder();
b.position_before(cx.fcx.alloca_insert_pt.get().unwrap());
// This is just like `call_lifetime_start` (but latter expects a
// Block, which we do not have for `alloca_insert_pt`).
core_lifetime_emit(cx.ccx(), p, Lifetime::Start, |ccx, size, lifetime_start| {
let ptr = b.pointercast(p, Type::i8p(ccx));
b.call(lifetime_start, &[C_u64(ccx, size), ptr], None);
});
memfill(&b, p, ty, adt::DTOR_DONE);
p
}
pub fn alloca(cx: Block, ty: Type, name: &str) -> ValueRef {
@ -1639,6 +1723,8 @@ pub fn create_datums_for_fn_args<'a, 'tcx>(mut bcx: Block<'a, 'tcx>,
let fcx = bcx.fcx;
let arg_scope_id = cleanup::CustomScope(arg_scope);
debug!("create_datums_for_fn_args");
// Return an array wrapping the ValueRefs that we get from `get_param` for
// each argument into datums.
//
@ -1650,6 +1736,7 @@ pub fn create_datums_for_fn_args<'a, 'tcx>(mut bcx: Block<'a, 'tcx>,
// This alloca should be optimized away by LLVM's mem-to-reg pass in
// the event it's not truly needed.
let mut idx = fcx.arg_offset() as c_uint;
let uninit_reason = InitAlloca::Uninit("fn_arg populate dominates dtor");
for (i, &arg_ty) in arg_tys.iter().enumerate() {
let arg_datum = if !has_tupled_arg || i < arg_tys.len() - 1 {
if type_of::arg_is_indirect(bcx.ccx(), arg_ty) &&
@ -1669,9 +1756,12 @@ pub fn create_datums_for_fn_args<'a, 'tcx>(mut bcx: Block<'a, 'tcx>,
let data = get_param(fcx.llfn, idx);
let extra = get_param(fcx.llfn, idx + 1);
idx += 2;
unpack_datum!(bcx, datum::lvalue_scratch_datum(bcx, arg_ty, "",
unpack_datum!(bcx, datum::lvalue_scratch_datum(bcx, arg_ty, "", uninit_reason,
arg_scope_id, (data, extra),
|(data, extra), bcx, dst| {
debug!("populate call for create_datum_for_fn_args \
early fat arg, on arg[{}] ty={:?}", i, arg_ty);
Store(bcx, data, expr::get_dataptr(bcx, dst));
Store(bcx, extra, expr::get_meta(bcx, dst));
bcx
@ -1684,9 +1774,16 @@ pub fn create_datums_for_fn_args<'a, 'tcx>(mut bcx: Block<'a, 'tcx>,
datum::lvalue_scratch_datum(bcx,
arg_ty,
"",
uninit_reason,
arg_scope_id,
tmp,
|tmp, bcx, dst| tmp.store_to(bcx, dst)))
|tmp, bcx, dst| {
debug!("populate call for create_datum_for_fn_args \
early thin arg, on arg[{}] ty={:?}", i, arg_ty);
tmp.store_to(bcx, dst)
}))
}
} else {
// FIXME(pcwalton): Reduce the amount of code bloat this is responsible for.
@ -1696,11 +1793,14 @@ pub fn create_datums_for_fn_args<'a, 'tcx>(mut bcx: Block<'a, 'tcx>,
datum::lvalue_scratch_datum(bcx,
arg_ty,
"tupled_args",
uninit_reason,
arg_scope_id,
(),
|(),
mut bcx,
llval| {
llval| {
debug!("populate call for create_datum_for_fn_args \
tupled_args, on arg[{}] ty={:?}", i, arg_ty);
for (j, &tupled_arg_ty) in
tupled_arg_tys.iter().enumerate() {
let lldest = StructGEP(bcx, llval, j);

View File

@ -288,20 +288,31 @@ pub fn immediate_rvalue_bcx<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
return DatumBlock::new(bcx, immediate_rvalue(val, ty))
}
/// Allocates temporary space on the stack using alloca() and returns a by-ref Datum pointing to
/// it. The memory will be dropped upon exit from `scope`. The callback `populate` should
/// initialize the memory.
///
/// The flag `zero` indicates how the temporary space itself should be
/// initialized at the outset of the function; the only time that
/// `InitAlloca::Uninit` is a valid value for `zero` is when the
/// caller can prove that either (1.) the code injected by `populate`
/// onto `bcx` always dominates the end of `scope`, or (2.) the data
/// being allocated has no associated destructor.
pub fn lvalue_scratch_datum<'blk, 'tcx, A, F>(bcx: Block<'blk, 'tcx>,
ty: Ty<'tcx>,
name: &str,
zero: InitAlloca,
scope: cleanup::ScopeId,
arg: A,
populate: F)
-> DatumBlock<'blk, 'tcx, Lvalue> where
F: FnOnce(A, Block<'blk, 'tcx>, ValueRef) -> Block<'blk, 'tcx>,
{
let scratch = alloc_ty(bcx, ty, name);
// Very subtle: potentially initialize the scratch memory at point where it is alloca'ed.
// (See discussion at Issue 30530.)
let scratch = alloc_ty_init(bcx, ty, zero, name);
debug!("lvalue_scratch_datum scope={:?} scratch={} ty={:?}",
scope, bcx.ccx().tn().val_to_string(scratch), ty);
// Subtle. Populate the scratch memory *before* scheduling cleanup.
let bcx = populate(arg, bcx, scratch);
@ -340,6 +351,8 @@ fn add_rvalue_clean<'a, 'tcx>(mode: RvalueMode,
scope: cleanup::ScopeId,
val: ValueRef,
ty: Ty<'tcx>) {
debug!("add_rvalue_clean scope={:?} val={} ty={:?}",
scope, fcx.ccx.tn().val_to_string(val), ty);
match mode {
ByValue => { fcx.schedule_drop_immediate(scope, val, ty); }
ByRef => {
@ -496,9 +509,13 @@ impl<'tcx> Datum<'tcx, Rvalue> {
ByValue => {
lvalue_scratch_datum(
bcx, self.ty, name, scope, self,
bcx, self.ty, name, InitAlloca::Dropped, scope, self,
|this, bcx, llval| {
call_lifetime_start(bcx, llval);
debug!("populate call for Datum::to_lvalue_datum_in_scope \
self.ty={:?}", this.ty);
// do not call_lifetime_start here; the
// `InitAlloc::Dropped` will start scratch
// value's lifetime at open of function body.
let bcx = this.store_to(bcx, llval);
bcx.fcx.schedule_lifetime_end(scope, llval);
bcx

View File

@ -1487,6 +1487,8 @@ pub fn trans_adt<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
}
};
debug!("trans_adt");
// This scope holds intermediates that must be cleaned should
// panic occur before the ADT as a whole is ready.
let custom_cleanup_scope = fcx.push_custom_cleanup_scope();

View File

@ -111,8 +111,15 @@ pub fn trans_slice_vec<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
// Always create an alloca even if zero-sized, to preserve
// the non-null invariant of the inner slice ptr
let llfixed = base::alloc_ty(bcx, fixed_ty, "");
call_lifetime_start(bcx, llfixed);
let llfixed;
// Issue 30018: ensure state is initialized as dropped if necessary.
if fcx.type_needs_drop(vt.unit_ty) {
llfixed = base::alloc_ty_init(bcx, fixed_ty, InitAlloca::Dropped, "");
} else {
let uninit = InitAlloca::Uninit("fcx says vt.unit_ty is non-drop");
llfixed = base::alloc_ty_init(bcx, fixed_ty, uninit, "");
call_lifetime_start(bcx, llfixed);
};
if count > 0 {
// Arrange for the backing array to be cleaned up.
@ -212,8 +219,8 @@ fn write_content<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
bcx = expr::trans_into(bcx, &**element,
SaveIn(lleltptr));
let scope = cleanup::CustomScope(temp_scope);
fcx.schedule_lifetime_end(scope, lleltptr);
fcx.schedule_drop_mem(scope, lleltptr, vt.unit_ty, None);
// Issue #30822: mark memory as dropped after running destructor
fcx.schedule_drop_and_fill_mem(scope, lleltptr, vt.unit_ty, None);
}
fcx.pop_custom_cleanup_scope(temp_scope);
}

View File

@ -0,0 +1,35 @@
// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Regression test for Issue #29092.
//
// (Possibly redundant with regression test run-pass/issue-30530.rs)
use self::Term::*;
#[derive(Clone)]
pub enum Term {
Dummy,
A(Box<Term>),
B(Box<Term>),
}
// a small-step evaluator
pub fn small_eval(v: Term) -> Term {
match v {
A(t) => *t.clone(),
B(t) => *t.clone(),
_ => Dummy,
}
}
fn main() {
small_eval(Dummy);
}

View File

@ -0,0 +1,111 @@
// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// More thorough regression test for Issues #30018 and #30822. This
// attempts to explore different ways that array element construction
// (for both scratch arrays and non-scratch ones) interacts with
// breaks in the control-flow, in terms of the order of evaluation of
// the destructors (which may change; see RFC Issue 744) and the
// number of times that the destructor evaluates for each value (which
// should never exceed 1; this latter case is what #30822 is about).
use std::cell::RefCell;
struct D<'a>(&'a RefCell<Vec<i32>>, i32);
impl<'a> Drop for D<'a> {
fn drop(&mut self) {
println!("Dropping D({})", self.1);
(self.0).borrow_mut().push(self.1);
}
}
fn main() {
println!("Start");
break_during_elem();
break_after_whole();
println!("Finis");
}
fn break_during_elem() {
let log = &RefCell::new(Vec::new());
// CASE 1: Fixed-size array itself is stored in _r slot.
loop {
let _r = [D(log, 10),
D(log, 11),
{ D(log, 12); break; },
D(log, 13)];
}
assert_eq!(&log.borrow()[..], &[12, 11, 10]);
log.borrow_mut().clear();
// CASE 2: Slice (borrow of array) is stored in _r slot.
// This is the case that is actually being reported in #30018.
loop {
let _r = &[D(log, 20),
D(log, 21),
{ D(log, 22); break; },
D(log, 23)];
}
assert_eq!(&log.borrow()[..], &[22, 21, 20]);
log.borrow_mut().clear();
// CASE 3: (Borrow of) slice-index of array is stored in _r slot.
loop {
let _r = &[D(log, 30),
D(log, 31),
{ D(log, 32); break; },
D(log, 33)][..];
}
assert_eq!(&log.borrow()[..], &[32, 31, 30]);
log.borrow_mut().clear();
}
// The purpose of these functions is to test what happens when we
// panic after an array has been constructed in its entirety.
//
// It is meant to act as proof that we still need to continue
// scheduling the destruction of an array even after we've scheduling
// drop for its elements during construction; the latter is tested by
// `fn break_during_elem()`.
fn break_after_whole() {
let log = &RefCell::new(Vec::new());
// CASE 1: Fixed-size array itself is stored in _r slot.
loop {
let _r = [D(log, 10),
D(log, 11),
D(log, 12)];
break;
}
assert_eq!(&log.borrow()[..], &[10, 11, 12]);
log.borrow_mut().clear();
// CASE 2: Slice (borrow of array) is stored in _r slot.
loop {
let _r = &[D(log, 20),
D(log, 21),
D(log, 22)];
break;
}
assert_eq!(&log.borrow()[..], &[20, 21, 22]);
log.borrow_mut().clear();
// CASE 3: (Borrow of) slice-index of array is stored in _r slot.
loop {
let _r = &[D(log, 30),
D(log, 31),
D(log, 32)][..];
break;
}
assert_eq!(&log.borrow()[..], &[30, 31, 32]);
log.borrow_mut().clear();
}

View File

@ -0,0 +1,32 @@
// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Regression test for Issue #30018. This is very similar to the
// original reported test, except that the panic is wrapped in a
// spawned thread to isolate the expected error result from the
// SIGTRAP injected by the drop-flag consistency checking.
struct Foo;
impl Drop for Foo {
fn drop(&mut self) {}
}
fn foo() -> Foo {
panic!();
}
fn main() {
use std::thread;
let handle = thread::spawn(|| {
let _ = &[foo()];
});
let _ = handle.join();
}

View File

@ -0,0 +1,35 @@
// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Regression test for Issue #30530: alloca's created for storing
// intermediate scratch values during brace-less match arms need to be
// initialized with their drop-flag set to "dropped" (or else we end
// up running the destructors on garbage data at the end of the
// function).
pub enum Handler {
Default,
#[allow(dead_code)]
Custom(*mut Box<Fn()>),
}
fn main() {
take(Handler::Default, Box::new(main));
}
#[inline(never)]
pub fn take(h: Handler, f: Box<Fn()>) -> Box<Fn()> {
unsafe {
match h {
Handler::Custom(ptr) => *Box::from_raw(ptr),
Handler::Default => f,
}
}
}