Auto merge of #55716 - RalfJung:escape-to-raw, r=oli-obk

Add escape-to-raw MIR statement

Add a new MIR "ghost state statement": Escaping a ptr to permit raw accesses.

~~This includes #55549, [click here](https://github.com/RalfJung/rust/compare/miri-visitor...RalfJung:escape-to-raw) for just the new commits.~~
This commit is contained in:
bors 2018-11-15 01:12:01 +00:00
commit 4ec0ba9545
31 changed files with 267 additions and 163 deletions

View File

@ -220,6 +220,9 @@ fn hash_stable<W: StableHasherResult>(&self,
mir::StatementKind::EndRegion(ref region_scope) => {
region_scope.hash_stable(hcx, hasher);
}
mir::StatementKind::EscapeToRaw(ref place) => {
place.hash_stable(hcx, hasher);
}
mir::StatementKind::Retag { fn_entry, ref place } => {
fn_entry.hash_stable(hcx, hasher);
place.hash_stable(hcx, hasher);

View File

@ -1782,6 +1782,13 @@ pub enum StatementKind<'tcx> {
place: Place<'tcx>,
},
/// Escape the given reference to a raw pointer, so that it can be accessed
/// without precise provenance tracking. These statements are currently only interpreted
/// by miri and only generated when "-Z mir-emit-retag" is passed.
/// See <https://internals.rust-lang.org/t/stacked-borrows-an-aliasing-model-for-rust/8153/>
/// for more details.
EscapeToRaw(Operand<'tcx>),
/// Mark one terminating point of a region scope (i.e. static region).
/// (The starting point(s) arise implicitly from borrows.)
EndRegion(region::Scope),
@ -1843,6 +1850,7 @@ fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
EndRegion(ref ce) => write!(fmt, "EndRegion({})", ty::ReScope(*ce)),
Retag { fn_entry, ref place } =>
write!(fmt, "Retag({}{:?})", if fn_entry { "[fn entry] " } else { "" }, place),
EscapeToRaw(ref place) => write!(fmt, "EscapeToRaw({:?})", place),
StorageLive(ref place) => write!(fmt, "StorageLive({:?})", place),
StorageDead(ref place) => write!(fmt, "StorageDead({:?})", place),
SetDiscriminant {
@ -3019,6 +3027,7 @@ impl<'tcx> TypeFoldable<'tcx> for StatementKind<'tcx> {
(StatementKind::StorageDead)(a),
(StatementKind::InlineAsm) { asm, outputs, inputs },
(StatementKind::Retag) { fn_entry, place },
(StatementKind::EscapeToRaw)(place),
(StatementKind::EndRegion)(a),
(StatementKind::AscribeUserType)(a, v, b),
(StatementKind::Nop),

View File

@ -385,6 +385,9 @@ fn super_statement(&mut self,
location
);
}
StatementKind::EscapeToRaw(ref $($mutability)* op) => {
self.visit_operand(op, location);
}
StatementKind::StorageLive(ref $($mutability)* local) => {
self.visit_local(
local,
@ -1022,7 +1025,7 @@ pub enum MutatingUseContext<'tcx> {
/// f(&mut x.y);
///
Projection,
/// Retagging (updating the "Stacked Borrows" tag)
/// Retagging, a "Stacked Borrows" shadow state operation
Retag,
}

View File

@ -105,8 +105,9 @@ pub fn codegen_statement(&mut self,
bx
}
mir::StatementKind::FakeRead(..) |
mir::StatementKind::EndRegion(_) |
mir::StatementKind::EndRegion(..) |
mir::StatementKind::Retag { .. } |
mir::StatementKind::EscapeToRaw { .. } |
mir::StatementKind::AscribeUserType(..) |
mir::StatementKind::Nop => bx,
}

View File

@ -599,6 +599,7 @@ fn visit_statement_entry(
StatementKind::Nop
| StatementKind::AscribeUserType(..)
| StatementKind::Retag { .. }
| StatementKind::EscapeToRaw { .. }
| StatementKind::StorageLive(..) => {
// `Nop`, `AscribeUserType`, `Retag`, and `StorageLive` are irrelevant
// to borrow check.

View File

@ -137,6 +137,7 @@ fn visit_statement(&mut self,
StatementKind::Nop |
StatementKind::AscribeUserType(..) |
StatementKind::Retag { .. } |
StatementKind::EscapeToRaw { .. } |
StatementKind::StorageLive(..) => {
// `Nop`, `AscribeUserType`, `Retag`, and `StorageLive` are irrelevant
// to borrow check.

View File

@ -1311,11 +1311,12 @@ fn check_stmt(&mut self, mir: &Mir<'tcx>, stmt: &Statement<'tcx>, location: Loca
}
}
StatementKind::FakeRead(..)
| StatementKind::StorageLive(_)
| StatementKind::StorageDead(_)
| StatementKind::StorageLive(..)
| StatementKind::StorageDead(..)
| StatementKind::InlineAsm { .. }
| StatementKind::EndRegion(_)
| StatementKind::Retag { .. }
| StatementKind::EscapeToRaw { .. }
| StatementKind::Nop => {}
}
}

View File

@ -86,6 +86,9 @@ fn expr_as_place(
// region_scope=None so place indexes live forever. They are scalars so they
// do not need storage annotations, and they are often copied between
// places.
// Making this a *fresh* temporary also means we do not have to worry about
// the index changing later: Nothing will ever change this temporary.
// The "retagging" transformation (for Stacked Borrows) relies on this.
let idx = unpack!(block = this.as_temp(block, None, index, Mutability::Mut));
// bounds check:

View File

@ -351,7 +351,6 @@ impl<'a, 'mir, 'tcx> interpret::Machine<'a, 'mir, 'tcx>
type MemoryMap = FxHashMap<AllocId, (MemoryKind<!>, Allocation)>;
const STATIC_KIND: Option<!> = None; // no copying of statics allowed
const ENABLE_PTR_TRACKING_HOOKS: bool = false; // we don't have no provenance
#[inline(always)]
fn enforce_validity(_ecx: &EvalContext<'a, 'mir, 'tcx, Self>) -> bool {

View File

@ -339,6 +339,7 @@ fn statement_effect(&self, sets: &mut BlockSets<BorrowIndex>, location: Location
mir::StatementKind::SetDiscriminant { .. } |
mir::StatementKind::StorageLive(..) |
mir::StatementKind::Retag { .. } |
mir::StatementKind::EscapeToRaw { .. } |
mir::StatementKind::AscribeUserType(..) |
mir::StatementKind::Nop => {}

View File

@ -301,8 +301,9 @@ fn gather_statement(&mut self, stmt: &Statement<'tcx>) {
span_bug!(stmt.source_info.span,
"SetDiscriminant should not exist during borrowck");
}
StatementKind::EndRegion(_) |
StatementKind::EndRegion(..) |
StatementKind::Retag { .. } |
StatementKind::EscapeToRaw { .. } |
StatementKind::AscribeUserType(..) |
StatementKind::Nop => {}
}

View File

@ -44,28 +44,16 @@ pub fn cast(
}
Misc => {
let src_layout = src.layout;
let src = self.read_immediate(src)?;
let src = if M::ENABLE_PTR_TRACKING_HOOKS && src_layout.ty.is_region_ptr() {
// The only `Misc` casts on references are those creating raw pointers.
assert!(dest.layout.ty.is_unsafe_ptr());
// For the purpose of the "ptr tag hooks", treat this as creating
// a new, raw reference.
let place = self.ref_to_mplace(src)?;
self.create_ref(place, None)?
} else {
*src
};
if self.type_is_fat_ptr(src_layout.ty) {
match (src, self.type_is_fat_ptr(dest.layout.ty)) {
if self.type_is_fat_ptr(src.layout.ty) {
match (*src, self.type_is_fat_ptr(dest.layout.ty)) {
// pointers to extern types
(Immediate::Scalar(_),_) |
// slices and trait objects to other slices/trait objects
(Immediate::ScalarPair(..), true) => {
// No change to immediate
self.write_immediate(src, dest)?;
self.write_immediate(*src, dest)?;
}
// slices and trait objects to thin pointers (dropping the metadata)
(Immediate::ScalarPair(data, _), false) => {
@ -73,11 +61,11 @@ pub fn cast(
}
}
} else {
match src_layout.variants {
match src.layout.variants {
layout::Variants::Single { index } => {
if let Some(def) = src_layout.ty.ty_adt_def() {
if let Some(def) = src.layout.ty.ty_adt_def() {
// Cast from a univariant enum
assert!(src_layout.is_zst());
assert!(src.layout.is_zst());
let discr_val = def
.discriminant_for_variant(*self.tcx, index)
.val;
@ -90,8 +78,7 @@ pub fn cast(
layout::Variants::NicheFilling { .. } => {},
}
let src = src.to_scalar()?;
let dest_val = self.cast_scalar(src, src_layout, dest.layout)?;
let dest_val = self.cast_scalar(src.to_scalar()?, src.layout, dest.layout)?;
self.write_scalar(dest_val, dest)?;
}
}

View File

@ -201,8 +201,7 @@ pub fn hook_fn(
} else if Some(def_id) == self.tcx.lang_items().panic_fn() {
assert!(args.len() == 1);
// &(&'static str, &'static str, u32, u32)
let ptr = self.read_immediate(args[0])?;
let place = self.ref_to_mplace(ptr)?;
let place = self.deref_operand(args[0])?;
let (msg, file, line, col) = (
self.mplace_field(place, 0)?,
self.mplace_field(place, 1)?,
@ -210,9 +209,9 @@ pub fn hook_fn(
self.mplace_field(place, 3)?,
);
let msg_place = self.ref_to_mplace(self.read_immediate(msg.into())?)?;
let msg_place = self.deref_operand(msg.into())?;
let msg = Symbol::intern(self.read_str(msg_place)?);
let file_place = self.ref_to_mplace(self.read_immediate(file.into())?)?;
let file_place = self.deref_operand(file.into())?;
let file = Symbol::intern(self.read_str(file_place)?);
let line = self.read_scalar(line.into())?.to_u32()?;
let col = self.read_scalar(col.into())?.to_u32()?;
@ -221,17 +220,16 @@ pub fn hook_fn(
assert!(args.len() == 2);
// &'static str, &(&'static str, u32, u32)
let msg = args[0];
let ptr = self.read_immediate(args[1])?;
let place = self.ref_to_mplace(ptr)?;
let place = self.deref_operand(args[1])?;
let (file, line, col) = (
self.mplace_field(place, 0)?,
self.mplace_field(place, 1)?,
self.mplace_field(place, 2)?,
);
let msg_place = self.ref_to_mplace(self.read_immediate(msg.into())?)?;
let msg_place = self.deref_operand(msg.into())?;
let msg = Symbol::intern(self.read_str(msg_place)?);
let file_place = self.ref_to_mplace(self.read_immediate(file.into())?)?;
let file_place = self.deref_operand(file.into())?;
let file = Symbol::intern(self.read_str(file_place)?);
let line = self.read_scalar(line.into())?.to_u32()?;
let col = self.read_scalar(col.into())?.to_u32()?;

View File

@ -95,11 +95,6 @@ pub trait Machine<'a, 'mir, 'tcx>: Sized {
/// that is added to the memory so that the work is not done twice.
const STATIC_KIND: Option<Self::MemoryKinds>;
/// As an optimization, you can prevent the pointer tracking hooks from ever being
/// called. You should only do this if you do not care about provenance tracking.
/// This controls the `tag_reference` and `tag_dereference` hooks.
const ENABLE_PTR_TRACKING_HOOKS: bool;
/// Whether to enforce the validity invariant
fn enforce_validity(ecx: &EvalContext<'a, 'mir, 'tcx, Self>) -> bool;
@ -181,18 +176,6 @@ fn tag_new_allocation(
kind: MemoryKind<Self::MemoryKinds>,
) -> EvalResult<'tcx, Pointer<Self::PointerTag>>;
/// Executed when evaluating the `&` operator: Creating a new reference.
/// This has the chance to adjust the tag. It should not change anything else!
/// `mutability` can be `None` in case a raw ptr is being created.
#[inline]
fn tag_reference(
_ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
place: MPlaceTy<'tcx, Self::PointerTag>,
_mutability: Option<hir::Mutability>,
) -> EvalResult<'tcx, Scalar<Self::PointerTag>> {
Ok(place.ptr)
}
/// Executed when evaluating the `*` operator: Following a reference.
/// This has the chance to adjust the tag. It should not change anything else!
/// `mutability` can be `None` in case a raw ptr is being dereferenced.
@ -205,7 +188,7 @@ fn tag_dereference(
Ok(place.ptr)
}
/// Execute a validation operation
/// Execute a retagging operation
#[inline]
fn retag(
_ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
@ -214,4 +197,13 @@ fn retag(
) -> EvalResult<'tcx> {
Ok(())
}
/// Execute an escape-to-raw operation
#[inline]
fn escape_to_raw(
_ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
_ptr: OpTy<'tcx, Self::PointerTag>,
) -> EvalResult<'tcx> {
Ok(())
}
}

View File

@ -431,17 +431,6 @@ pub fn operand_downcast(
})
}
// Take an operand, representing a pointer, and dereference it to a place -- that
// will always be a MemPlace.
pub(super) fn deref_operand(
&self,
src: OpTy<'tcx, M::PointerTag>,
) -> EvalResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
let val = self.read_immediate(src)?;
trace!("deref to {} on {:?}", val.layout.ty, *val);
Ok(self.ref_to_mplace(val)?)
}
pub fn operand_projection(
&self,
base: OpTy<'tcx, M::PointerTag>,

View File

@ -151,6 +151,16 @@ pub fn to_ptr(self) -> EvalResult<'tcx, Pointer<Tag>> {
// it now must be aligned.
self.to_scalar_ptr_align().0.to_ptr()
}
/// Turn a mplace into a (thin or fat) pointer, as a reference, pointing to the same space.
/// This is the inverse of `ref_to_mplace`.
#[inline(always)]
pub fn to_ref(self) -> Immediate<Tag> {
match self.meta {
None => Immediate::Scalar(self.ptr.into()),
Some(meta) => Immediate::ScalarPair(self.ptr.into(), meta.into()),
}
}
}
impl<'tcx, Tag> MPlaceTy<'tcx, Tag> {
@ -267,7 +277,9 @@ impl<'a, 'mir, 'tcx, Tag, M> EvalContext<'a, 'mir, 'tcx, M>
M::AllocExtra: AllocationExtra<Tag>,
{
/// Take a value, which represents a (thin or fat) reference, and make it a place.
/// Alignment is just based on the type. This is the inverse of `create_ref`.
/// Alignment is just based on the type. This is the inverse of `MemPlace::to_ref()`.
/// This does NOT call the "deref" machine hook, so it does NOT count as a
/// deref as far as Stacked Borrows is concerned. Use `deref_operand` for that!
pub fn ref_to_mplace(
&self,
val: ImmTy<'tcx, M::PointerTag>,
@ -275,42 +287,35 @@ pub fn ref_to_mplace(
let pointee_type = val.layout.ty.builtin_deref(true).unwrap().ty;
let layout = self.layout_of(pointee_type)?;
let align = layout.align;
let meta = val.to_meta()?;
let ptr = val.to_scalar_ptr()?;
let mplace = MemPlace { ptr, align, meta };
let mut mplace = MPlaceTy { mplace, layout };
// Pointer tag tracking might want to adjust the tag.
if M::ENABLE_PTR_TRACKING_HOOKS {
let mutbl = match val.layout.ty.sty {
// `builtin_deref` considers boxes immutable, that's useless for our purposes
ty::Ref(_, _, mutbl) => Some(mutbl),
ty::Adt(def, _) if def.is_box() => Some(hir::MutMutable),
ty::RawPtr(_) => None,
_ => bug!("Unexpected pointer type {}", val.layout.ty.sty),
};
mplace.mplace.ptr = M::tag_dereference(self, mplace, mutbl)?;
}
// Done
Ok(mplace)
let mplace = MemPlace {
ptr: val.to_scalar_ptr()?,
align: layout.align,
meta: val.to_meta()?,
};
Ok(MPlaceTy { mplace, layout })
}
/// Turn a mplace into a (thin or fat) pointer, as a reference, pointing to the same space.
/// This is the inverse of `ref_to_mplace`.
/// `mutbl` indicates whether we are create a shared or mutable ref, or a raw pointer (`None`).
pub fn create_ref(
&mut self,
mut place: MPlaceTy<'tcx, M::PointerTag>,
mutbl: Option<hir::Mutability>,
) -> EvalResult<'tcx, Immediate<M::PointerTag>> {
// Pointer tag tracking might want to adjust the tag
if M::ENABLE_PTR_TRACKING_HOOKS {
place.mplace.ptr = M::tag_reference(self, place, mutbl)?
}
Ok(match place.meta {
None => Immediate::Scalar(place.ptr.into()),
Some(meta) => Immediate::ScalarPair(place.ptr.into(), meta.into()),
})
// Take an operand, representing a pointer, and dereference it to a place -- that
// will always be a MemPlace. Lives in `place.rs` because it creates a place.
// This calls the "deref" machine hook, and counts as a deref as far as
// Stacked Borrows is concerned.
pub fn deref_operand(
&self,
src: OpTy<'tcx, M::PointerTag>,
) -> EvalResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
let val = self.read_immediate(src)?;
trace!("deref to {} on {:?}", val.layout.ty, *val);
let mut place = self.ref_to_mplace(val)?;
// Pointer tag tracking might want to adjust the tag.
let mutbl = match val.layout.ty.sty {
// `builtin_deref` considers boxes immutable, that's useless for our purposes
ty::Ref(_, _, mutbl) => Some(mutbl),
ty::Adt(def, _) if def.is_box() => Some(hir::MutMutable),
ty::RawPtr(_) => None,
_ => bug!("Unexpected pointer type {}", val.layout.ty.sty),
};
place.mplace.ptr = M::tag_dereference(self, place, mutbl)?;
Ok(place)
}
/// Offset a pointer to project to a field. Unlike place_field, this is always

View File

@ -12,7 +12,7 @@
//!
//! The main entry point is the `step` method.
use rustc::{hir, mir};
use rustc::mir;
use rustc::ty::layout::LayoutOf;
use rustc::mir::interpret::{EvalResult, Scalar, PointerArithmetic};
@ -118,12 +118,17 @@ fn statement(&mut self, stmt: &mir::Statement<'tcx>) -> EvalResult<'tcx> {
// interpreter is solely intended for borrowck'ed code.
FakeRead(..) => {}
// Retagging.
// Stacked Borrows.
Retag { fn_entry, ref place } => {
let dest = self.eval_place(place)?;
M::retag(self, fn_entry, dest)?;
}
EscapeToRaw(ref op) => {
let op = self.eval_operand(op, None)?;
M::escape_to_raw(self, op)?;
}
// Statements we do not track.
EndRegion(..) => {}
AscribeUserType(..) => {}
@ -247,19 +252,10 @@ fn eval_rvalue_into_place(
)?;
}
Ref(_, borrow_kind, ref place) => {
Ref(_, _, ref place) => {
let src = self.eval_place(place)?;
let val = self.force_allocation(src)?;
let mutbl = match borrow_kind {
mir::BorrowKind::Mut { .. } |
mir::BorrowKind::Unique =>
hir::MutMutable,
mir::BorrowKind::Shared |
mir::BorrowKind::Shallow =>
hir::MutImmutable,
};
let val = self.create_ref(val, Some(mutbl))?;
self.write_immediate(val, dest)?;
self.write_immediate(val.to_ref(), dest)?;
}
NullaryOp(mir::NullOp::Box, _) => {

View File

@ -402,7 +402,7 @@ fn eval_fn_call(
ty::InstanceDef::Virtual(_, idx) => {
let ptr_size = self.pointer_size();
let ptr_align = self.tcx.data_layout.pointer_align;
let ptr = self.ref_to_mplace(self.read_immediate(args[0])?)?;
let ptr = self.deref_operand(args[0])?;
let vtable = ptr.vtable()?;
let fn_ptr = self.memory.read_ptr_sized(
vtable.offset(ptr_size * (idx as u64 + 3), self)?,
@ -447,10 +447,7 @@ fn drop_in_place(
};
let arg = OpTy {
op: Operand::Immediate(self.create_ref(
place,
None // this is a "raw reference"
)?),
op: Operand::Immediate(place.to_ref()),
layout: self.layout_of(self.tcx.mk_mut_ptr(place.layout.ty))?,
};

View File

@ -373,13 +373,10 @@ fn visit_primitive(&mut self, value: ImmTy<'tcx, M::PointerTag>) -> EvalResult<'
}
}
}
// Turn ptr into place.
// `ref_to_mplace` also calls the machine hook for (re)activating the tag,
// which in turn will (in full miri) check if the pointer is dereferencable.
let place = self.ecx.ref_to_mplace(value)?;
// Recursive checking
if let Some(ref mut ref_tracking) = self.ref_tracking {
assert!(self.const_mode, "We should only do recursie checking in const mode");
let place = self.ecx.ref_to_mplace(value)?;
if size != Size::ZERO {
// Non-ZST also have to be dereferencable
let ptr = try_validation!(place.ptr.to_ptr(),

View File

@ -223,6 +223,15 @@ fn build_drop_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
);
if let Some(..) = ty {
// The first argument (index 0), but add 1 for the return value.
let dropee_ptr = Place::Local(Local::new(1+0));
if tcx.sess.opts.debugging_opts.mir_emit_retag {
// We use raw ptr operations, better prepare the alias tracking for that
mir.basic_blocks_mut()[START_BLOCK].statements.insert(0, Statement {
source_info,
kind: StatementKind::EscapeToRaw(Operand::Copy(dropee_ptr.clone())),
})
}
let patch = {
let param_env = tcx.param_env(def_id).with_reveal_all();
let mut elaborator = DropShimElaborator {
@ -231,7 +240,7 @@ fn build_drop_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
tcx,
param_env
};
let dropee = Place::Local(Local::new(1+0)).deref();
let dropee = dropee_ptr.deref();
let resume_block = elaborator.patch.resume_block();
elaborate_drops::elaborate_drop(
&mut elaborator,

View File

@ -20,42 +20,46 @@
pub struct AddRetag;
/// Determines whether this place is local: If it is part of a local variable.
/// We do not consider writes to pointers local, only writes that immediately assign
/// to a local variable.
/// One important property here is that evaluating the place immediately after
/// the assignment must produce the same place as what was used during the assignment.
fn is_local<'tcx>(
/// Determines whether this place is "stable": Whether, if we evaluate it again
/// after the assignment, we can be sure to obtain the same place value.
/// (Concurrent accesses by other threads are no problem as these are anyway non-atomic
/// copies. Data races are UB.)
fn is_stable<'tcx>(
place: &Place<'tcx>,
) -> bool {
use rustc::mir::Place::*;
match *place {
Local { .. } => true,
Promoted(_) |
Static(_) => false,
// Locals and statics have stable addresses, for sure
Local { .. } |
Promoted { .. } |
Static { .. } =>
true,
// Recurse for projections
Projection(ref proj) => {
match proj.elem {
ProjectionElem::Deref |
ProjectionElem::Index(_) =>
// Which place these point to depends on external circumstances
// (a local storing the array index, the current value of
// the projection base), so we stop tracking here.
// Which place this evaluates to can change with any memory write,
// so cannot assume this to be stable.
ProjectionElem::Deref =>
false,
// Array indices are intersting, but MIR building generates a *fresh*
// temporary for every array access, so the index cannot be changed as
// a side-effect.
ProjectionElem::Index { .. } |
// The rest is completely boring, they just offset by a constant.
ProjectionElem::Field { .. } |
ProjectionElem::ConstantIndex { .. } |
ProjectionElem::Subslice { .. } |
ProjectionElem::Downcast { .. } =>
// These just offset by a constant, entirely independent of everything else.
is_local(&proj.base),
is_stable(&proj.base),
}
}
}
}
/// Determine whether this type has a reference in it, recursing below compound types but
/// Determine whether this type may have a reference in it, recursing below compound types but
/// not below references.
fn has_reference<'a, 'gcx, 'tcx>(ty: Ty<'tcx>, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> bool {
fn may_have_reference<'a, 'gcx, 'tcx>(ty: Ty<'tcx>, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> bool {
match ty.sty {
// Primitive types that are not references
ty::Bool | ty::Char |
@ -68,12 +72,12 @@ fn has_reference<'a, 'gcx, 'tcx>(ty: Ty<'tcx>, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> b
ty::Adt(..) if ty.is_box() => true,
// Compound types
ty::Array(ty, ..) | ty::Slice(ty) =>
has_reference(ty, tcx),
may_have_reference(ty, tcx),
ty::Tuple(tys) =>
tys.iter().any(|ty| has_reference(ty, tcx)),
tys.iter().any(|ty| may_have_reference(ty, tcx)),
ty::Adt(adt, substs) =>
adt.variants.iter().any(|v| v.fields.iter().any(|f|
has_reference(f.ty(tcx, substs), tcx)
may_have_reference(f.ty(tcx, substs), tcx)
)),
// Conservative fallback
_ => true,
@ -92,7 +96,9 @@ fn run_pass<'a, 'tcx>(&self,
let (span, arg_count) = (mir.span, mir.arg_count);
let (basic_blocks, local_decls) = mir.basic_blocks_and_local_decls_mut();
let needs_retag = |place: &Place<'tcx>| {
is_local(place) && has_reference(place.ty(&*local_decls, tcx).to_ty(tcx), tcx)
// FIXME: Instead of giving up for unstable places, we should introduce
// a temporary and retag on that.
is_stable(place) && may_have_reference(place.ty(&*local_decls, tcx).to_ty(tcx), tcx)
};
// PART 1
@ -118,23 +124,29 @@ fn run_pass<'a, 'tcx>(&self,
}
// PART 2
// Retag return values of functions.
// Retag return values of functions. Also escape-to-raw the argument of `drop`.
// We collect the return destinations because we cannot mutate while iterating.
let mut returns: Vec<(SourceInfo, Place<'tcx>, BasicBlock)> = Vec::new();
for block_data in basic_blocks.iter_mut() {
match block_data.terminator {
Some(Terminator { kind: TerminatorKind::Call { ref destination, .. },
source_info }) => {
match block_data.terminator().kind {
TerminatorKind::Call { ref destination, .. } => {
// Remember the return destination for later
if let Some(ref destination) = destination {
if needs_retag(&destination.0) {
returns.push((source_info, destination.0.clone(), destination.1));
returns.push((
block_data.terminator().source_info,
destination.0.clone(),
destination.1,
));
}
}
}
TerminatorKind::Drop { .. } |
TerminatorKind::DropAndReplace { .. } => {
// `Drop` is also a call, but it doesn't return anything so we are good.
}
_ => {
// Not a block ending in a Call -> ignore.
// `Drop` is also a call, but it doesn't return anything so we are good.
}
}
}
@ -153,21 +165,43 @@ fn run_pass<'a, 'tcx>(&self,
// iterate backwards using indices.
for i in (0..block_data.statements.len()).rev() {
match block_data.statements[i].kind {
// Assignments can make values obtained elsewhere "local".
// We could try to be smart here and e.g. only retag if the assignment
// loaded from memory, but that seems risky: We might miss a subtle corner
// case.
StatementKind::Assign(ref place, box Rvalue::Use(..))
if needs_retag(place) => {
// If we are casting *from* a reference, we may have to escape-to-raw.
StatementKind::Assign(_, box Rvalue::Cast(
CastKind::Misc,
ref src,
dest_ty,
)) => {
let src_ty = src.ty(&*local_decls, tcx);
if src_ty.is_region_ptr() {
// The only `Misc` casts on references are those creating raw pointers.
assert!(dest_ty.is_unsafe_ptr());
// Insert escape-to-raw before the cast. We are not concerned
// with stability here: Our EscapeToRaw will not change the value
// that the cast will then use.
// `src` might be a "move", but we rely on this not actually moving
// but just doing a memcpy. It is crucial that we do EscapeToRaw
// on the src because we need it with its original type.
let source_info = block_data.statements[i].source_info;
block_data.statements.insert(i, Statement {
source_info,
kind: StatementKind::EscapeToRaw(src.clone()),
});
}
}
// Assignments of reference or ptr type are the ones where we may have
// to update tags. This includes `x = &[mut] ...` and hence
// we also retag after taking a reference!
StatementKind::Assign(ref place, _) if needs_retag(place) => {
// Insert a retag after the assignment.
let source_info = block_data.statements[i].source_info;
block_data.statements.insert(i+1,Statement {
block_data.statements.insert(i+1, Statement {
source_info,
kind: StatementKind::Retag { fn_entry: false, place: place.clone() },
});
}
// Do nothing for the rest
_ => {},
}
};
}
}
}

View File

@ -114,6 +114,7 @@ fn visit_statement(&mut self,
StatementKind::StorageDead(..) |
StatementKind::EndRegion(..) |
StatementKind::Retag { .. } |
StatementKind::EscapeToRaw { .. } |
StatementKind::AscribeUserType(..) |
StatementKind::Nop => {
// safe (at least as emitted during MIR construction)

View File

@ -685,6 +685,13 @@ fn create_generator_drop_shim<'a, 'tcx>(
is_block_tail: None,
is_user_variable: None,
};
if tcx.sess.opts.debugging_opts.mir_emit_retag {
// Alias tracking must know we changed the type
mir.basic_blocks_mut()[START_BLOCK].statements.insert(0, Statement {
source_info,
kind: StatementKind::EscapeToRaw(Operand::Copy(Place::Local(self_arg()))),
})
}
no_landing_pads(tcx, &mut mir);

View File

@ -1170,6 +1170,7 @@ fn visit_statement(&mut self, bb: BasicBlock, statement: &Statement<'tcx>, locat
StatementKind::InlineAsm {..} |
StatementKind::EndRegion(_) |
StatementKind::Retag { .. } |
StatementKind::EscapeToRaw { .. } |
StatementKind::AscribeUserType(..) |
StatementKind::Nop => {}
}

View File

@ -242,6 +242,7 @@ fn check_statement(
| StatementKind::StorageLive(_)
| StatementKind::StorageDead(_)
| StatementKind::Retag { .. }
| StatementKind::EscapeToRaw { .. }
| StatementKind::EndRegion(_)
| StatementKind::AscribeUserType(..)
| StatementKind::Nop => Ok(()),

View File

@ -65,10 +65,11 @@ fn is_nop_landing_pad(
// turn a landing pad to a non-nop
}
StatementKind::Assign(_, _) |
StatementKind::Assign { .. } |
StatementKind::SetDiscriminant { .. } |
StatementKind::InlineAsm { .. } |
StatementKind::Retag { .. } => {
StatementKind::Retag { .. } |
StatementKind::EscapeToRaw { .. } => {
return false;
}
}

View File

@ -163,6 +163,7 @@ fn each_block<'a, 'tcx, O>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
mir::StatementKind::InlineAsm { .. } |
mir::StatementKind::EndRegion(_) |
mir::StatementKind::Retag { .. } |
mir::StatementKind::EscapeToRaw { .. } |
mir::StatementKind::AscribeUserType(..) |
mir::StatementKind::Nop => continue,
mir::StatementKind::SetDiscriminant{ .. } =>

View File

@ -85,6 +85,7 @@ fn visit_statement(&mut self,
StatementKind::FakeRead(..) => "StatementKind::FakeRead",
StatementKind::EndRegion(..) => "StatementKind::EndRegion",
StatementKind::Retag { .. } => "StatementKind::Retag",
StatementKind::EscapeToRaw { .. } => "StatementKind::EscapeToRaw",
StatementKind::SetDiscriminant { .. } => "StatementKind::SetDiscriminant",
StatementKind::StorageLive(..) => "StatementKind::StorageLive",
StatementKind::StorageDead(..) => "StatementKind::StorageDead",

View File

@ -0,0 +1,43 @@
// Retagging (from Stacked Borrows) relies on the array index being a fresh
// temporary, so that side-effects cannot change it.
// Test that this is indeed the case.
unsafe fn foo(z: *mut usize) -> u32 {
*z = 2;
99
}
fn main() {
let mut x = [42, 43, 44];
let mut y = 1;
let z: *mut usize = &mut y;
x[y] = unsafe { foo(z) };
}
// END RUST SOURCE
// START rustc.main.EraseRegions.after.mir
// bb0: {
// ...
// _6 = &mut _2;
// _5 = &mut (*_6);
// _4 = move _5 as *mut usize (Misc);
// _3 = move _4;
// ...
// _8 = _3;
// _7 = const foo(move _8) -> bb1;
// }
//
// bb1: {
// ...
// _9 = _2;
// _10 = Len(_1);
// _11 = Lt(_9, _10);
// assert(move _11, "index out of bounds: the len is move _10 but the index is _9") -> bb2;
// }
//
// bb2: {
// _1[_9] = move _7;
// ...
// return;
// }
// END rustc.main.EraseRegions.after.mir

View File

@ -32,6 +32,8 @@ fn bar() -> bool {
// bb0: {
// ...
// Retag(_3);
// ...
// Retag(_3);
// Retag(_6);
// StorageLive(_9);
// _9 = (*_3);

View File

@ -26,7 +26,9 @@ fn main() {
{
let v = Test(0).foo(&mut x); // just making sure we do not panic when there is a tuple struct ctor
let w = { v }; // assignment
let _w = w; // reborrow
let w = w; // reborrow
// escape-to-raw (mut)
let _w = w as *mut _;
}
// Also test closures
@ -35,6 +37,9 @@ fn main() {
// need to call `foo_shr` or it doesn't even get generated
Test(0).foo_shr(&0);
// escape-to-raw (shr)
let _w = _w as *const _;
}
// END RUST SOURCE
@ -44,6 +49,7 @@ fn main() {
// Retag([fn entry] _2);
// ...
// _0 = &mut (*_3);
// Retag(_0);
// ...
// return;
// }
@ -73,23 +79,36 @@ fn main() {
// _9 = move _3;
// Retag(_9);
// _8 = &mut (*_9);
// Retag(_8);
// StorageDead(_9);
// StorageLive(_10);
// _10 = move _8;
// Retag(_10);
// ...
// _13 = move _14(move _15) -> bb2;
// _14 = &mut (*_10);
// Retag(_14);
// EscapeToRaw(move _14);
// _13 = move _14 as *mut i32 (Misc);
// ...
// _17 = move _18(move _19) -> bb2;
// }
//
// bb2: {
// Retag(_13);
// Retag(_17);
// ...
// _21 = const Test::foo_shr(move _22, move _24) -> bb3;
// }
//
// bb3: {
// ...
// return;
// }
//
// ...
// }
// END rustc.main.EraseRegions.after.mir
// START rustc.main-{{closure}}.EraseRegions.after.mir
// fn main::{{closure}}(_1: &[closure@NodeId(117)], _2: &i32) -> &i32 {
// fn main::{{closure}}(_1: &[closure@NodeId(124)], _2: &i32) -> &i32 {
// ...
// bb0: {
// Retag([fn entry] _1);