Rollup merge of #105317 - RalfJung:retag-rework, r=oli-obk

make retagging work even with 'unstable' places

This is based on top of https://github.com/rust-lang/rust/pull/105301. Only the last two commits are new.

While investigating https://github.com/rust-lang/unsafe-code-guidelines/issues/381 I realized that we would have caught this issue much earlier if the add_retag pass wouldn't bail out on assignments of the form `*ptr = ...`.

So this PR changes our retag strategy:
- When a new reference is created via `Rvalue::Ref` (or a raw ptr via `Rvalue::AddressOf`), we do the retagging as part of just executing that address-taking operation.
- For everything else, we still insert retags -- these retags basically serve to ensure that references stored in local variables (and their fields) are always freshly tagged, so skipping this for assignments like `*ptr = ...` is less egregious.
r? ```@oli-obk```
This commit is contained in:
Matthias Krüger 2022-12-08 12:57:30 +01:00 committed by GitHub
commit f1f7560598
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 302 additions and 275 deletions

View File

@ -373,9 +373,21 @@ pub trait Machine<'mir, 'tcx>: Sized {
Ok(())
}
/// Executes a retagging operation.
/// Executes a retagging operation for a single pointer.
/// Returns the possibly adjusted pointer.
#[inline]
fn retag(
fn retag_ptr_value(
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
_kind: mir::RetagKind,
val: &ImmTy<'tcx, Self::Provenance>,
) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>> {
Ok(val.clone())
}
/// Executes a retagging operation on a compound value.
/// Replaces all pointers stored in the given place.
#[inline]
fn retag_place_contents(
_ecx: &mut InterpCx<'mir, 'tcx, Self>,
_kind: mir::RetagKind,
_place: &PlaceTy<'tcx, Self::Provenance>,

View File

@ -8,7 +8,7 @@ use rustc_middle::mir;
use rustc_middle::mir::interpret::{InterpResult, Scalar};
use rustc_middle::ty::layout::LayoutOf;
use super::{InterpCx, Machine};
use super::{ImmTy, InterpCx, Machine};
/// Classify whether an operator is "left-homogeneous", i.e., the LHS has the
/// same type as the result.
@ -108,7 +108,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// Stacked Borrows.
Retag(kind, place) => {
let dest = self.eval_place(**place)?;
M::retag(self, *kind, &dest)?;
M::retag_place_contents(self, *kind, &dest)?;
}
Intrinsic(box ref intrinsic) => self.emulate_nondiverging_intrinsic(intrinsic)?,
@ -247,10 +247,41 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
self.write_scalar(Scalar::from_machine_usize(len, self), &dest)?;
}
AddressOf(_, place) | Ref(_, _, place) => {
Ref(_, borrow_kind, place) => {
let src = self.eval_place(place)?;
let place = self.force_allocation(&src)?;
self.write_immediate(place.to_ref(self), &dest)?;
let val = ImmTy::from_immediate(place.to_ref(self), dest.layout);
// A fresh reference was created, make sure it gets retagged.
let val = M::retag_ptr_value(
self,
if borrow_kind.allows_two_phase_borrow() {
mir::RetagKind::TwoPhase
} else {
mir::RetagKind::Default
},
&val,
)?;
self.write_immediate(*val, &dest)?;
}
AddressOf(_, place) => {
// Figure out whether this is an addr_of of an already raw place.
let place_base_raw = if place.has_deref() {
let ty = self.frame().body.local_decls[place.local].ty;
ty.is_unsafe_ptr()
} else {
// Not a deref, and thus not raw.
false
};
let src = self.eval_place(place)?;
let place = self.force_allocation(&src)?;
let mut val = ImmTy::from_immediate(place.to_ref(self), dest.layout);
if !place_base_raw {
// If this was not already raw, it needs retagging.
val = M::retag_ptr_value(self, mir::RetagKind::Raw, &val)?;
}
self.write_immediate(*val, &dest)?;
}
NullaryOp(null_op, ty) => {

View File

@ -400,7 +400,7 @@ impl std::fmt::Display for NonDivergingIntrinsic<'_> {
#[derive(Copy, Clone, TyEncodable, TyDecodable, Debug, PartialEq, Eq, Hash, HashStable)]
#[rustc_pass_by_value]
pub enum RetagKind {
/// The initial retag when entering a function.
/// The initial retag of arguments when entering a function.
FnEntry,
/// Retag preparing for a two-phase borrow.
TwoPhase,

View File

@ -10,16 +10,6 @@ use rustc_middle::ty::{self, Ty, TyCtxt};
pub struct AddRetag;
/// Determines whether this place is "stable": Whether, if we evaluate it again
/// after the assignment, we can be sure to obtain the same place value.
/// (Concurrent accesses by other threads are no problem as these are anyway non-atomic
/// copies. Data races are UB.)
fn is_stable(place: PlaceRef<'_>) -> bool {
// Which place this evaluates to can change with any memory write,
// so cannot assume deref to be stable.
!place.has_deref()
}
/// Determine whether this type may contain a reference (or box), and thus needs retagging.
/// We will only recurse `depth` times into Tuples/ADTs to bound the cost of this.
fn may_contain_reference<'tcx>(ty: Ty<'tcx>, depth: u32, tcx: TyCtxt<'tcx>) -> bool {
@ -69,22 +59,10 @@ impl<'tcx> MirPass<'tcx> for AddRetag {
let basic_blocks = body.basic_blocks.as_mut();
let local_decls = &body.local_decls;
let needs_retag = |place: &Place<'tcx>| {
// FIXME: Instead of giving up for unstable places, we should introduce
// a temporary and retag on that.
is_stable(place.as_ref())
!place.has_deref() // we're not eally interested in stores to "outside" locations, they are hard to keep track of anyway
&& may_contain_reference(place.ty(&*local_decls, tcx).ty, /*depth*/ 3, tcx)
&& !local_decls[place.local].is_deref_temp()
};
let place_base_raw = |place: &Place<'tcx>| {
// If this is a `Deref`, get the type of what we are deref'ing.
if place.has_deref() {
let ty = &local_decls[place.local].ty;
ty.is_unsafe_ptr()
} else {
// Not a deref, and thus not raw.
false
}
};
// PART 1
// Retag arguments at the beginning of the start block.
@ -108,7 +86,7 @@ impl<'tcx> MirPass<'tcx> for AddRetag {
}
// PART 2
// Retag return values of functions. Also escape-to-raw the argument of `drop`.
// Retag return values of functions.
// We collect the return destinations because we cannot mutate while iterating.
let returns = basic_blocks
.iter_mut()
@ -140,30 +118,25 @@ impl<'tcx> MirPass<'tcx> for AddRetag {
}
// PART 3
// Add retag after assignment.
// Add retag after assignments where data "enters" this function: the RHS is behind a deref and the LHS is not.
for block_data in basic_blocks {
// We want to insert statements as we iterate. To this end, we
// iterate backwards using indices.
for i in (0..block_data.statements.len()).rev() {
let (retag_kind, place) = match block_data.statements[i].kind {
// Retag-as-raw after escaping to a raw pointer, if the referent
// is not already a raw pointer.
StatementKind::Assign(box (lplace, Rvalue::AddressOf(_, ref rplace)))
if !place_base_raw(rplace) =>
{
(RetagKind::Raw, lplace)
}
// Retag after assignments of reference type.
StatementKind::Assign(box (ref place, ref rvalue)) if needs_retag(place) => {
let kind = match rvalue {
Rvalue::Ref(_, borrow_kind, _)
if borrow_kind.allows_two_phase_borrow() =>
{
RetagKind::TwoPhase
}
_ => RetagKind::Default,
let add_retag = match rvalue {
// Ptr-creating operations already do their own internal retagging, no
// need to also add a retag statement.
Rvalue::Ref(..) | Rvalue::AddressOf(..) => false,
_ => true,
};
(kind, *place)
if add_retag {
(RetagKind::Default, *place)
} else {
continue;
}
}
// Do nothing for the rest
_ => continue,

View File

@ -985,16 +985,6 @@ fn create_generator_drop_shim<'tcx>(
tcx.mk_ptr(ty::TypeAndMut { ty: gen_ty, mutbl: hir::Mutability::Mut }),
source_info,
);
if tcx.sess.opts.unstable_opts.mir_emit_retag {
// Alias tracking must know we changed the type
body.basic_blocks_mut()[START_BLOCK].statements.insert(
0,
Statement {
source_info,
kind: StatementKind::Retag(RetagKind::Raw, Box::new(Place::from(SELF_ARG))),
},
)
}
// Make sure we remove dead blocks to remove
// unrelated code from the resume part of the function

View File

@ -177,16 +177,6 @@ fn build_drop_shim<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId, ty: Option<Ty<'tcx>>)
if ty.is_some() {
// The first argument (index 0), but add 1 for the return value.
let dropee_ptr = Place::from(Local::new(1 + 0));
if tcx.sess.opts.unstable_opts.mir_emit_retag {
// Function arguments should be retagged, and we make this one raw.
body.basic_blocks_mut()[START_BLOCK].statements.insert(
0,
Statement {
source_info,
kind: StatementKind::Retag(RetagKind::Raw, Box::new(dropee_ptr)),
},
);
}
let patch = {
let param_env = tcx.param_env_reveal_all_normalized(def_id);
let mut elaborator =

View File

@ -38,9 +38,7 @@ fn bar() -> bool {
// + literal: Const { ty: &i32, val: Unevaluated(bar, [], Some(promoted[1])) }
Retag(_10); // scope 1 at $DIR/inline_retag.rs:+2:7: +2:9
_4 = &(*_10); // scope 1 at $DIR/inline_retag.rs:+2:7: +2:9
Retag(_4); // scope 1 at $DIR/inline_retag.rs:+2:7: +2:9
_3 = &(*_4); // scope 1 at $DIR/inline_retag.rs:+2:7: +2:9
Retag(_3); // scope 1 at $DIR/inline_retag.rs:+2:7: +2:9
StorageLive(_6); // scope 1 at $DIR/inline_retag.rs:+2:11: +2:14
StorageLive(_7); // scope 1 at $DIR/inline_retag.rs:+2:11: +2:14
_9 = const _; // scope 1 at $DIR/inline_retag.rs:+2:11: +2:14
@ -49,9 +47,7 @@ fn bar() -> bool {
// + literal: Const { ty: &i32, val: Unevaluated(bar, [], Some(promoted[0])) }
Retag(_9); // scope 1 at $DIR/inline_retag.rs:+2:11: +2:14
_7 = &(*_9); // scope 1 at $DIR/inline_retag.rs:+2:11: +2:14
Retag(_7); // scope 1 at $DIR/inline_retag.rs:+2:11: +2:14
_6 = &(*_7); // scope 1 at $DIR/inline_retag.rs:+2:11: +2:14
Retag(_6); // scope 1 at $DIR/inline_retag.rs:+2:11: +2:14
Retag(_3); // scope 2 at $DIR/inline_retag.rs:16:8: 16:9
Retag(_6); // scope 2 at $DIR/inline_retag.rs:16:17: 16:18
StorageLive(_11); // scope 2 at $DIR/inline_retag.rs:17:5: 17:7

View File

@ -68,9 +68,7 @@ fn array_casts() -> () {
StorageLive(_3); // scope 1 at $DIR/retag.rs:+2:13: +2:19
StorageLive(_4); // scope 1 at $DIR/retag.rs:+2:13: +2:19
_4 = &mut _1; // scope 1 at $DIR/retag.rs:+2:13: +2:19
Retag(_4); // scope 1 at $DIR/retag.rs:+2:13: +2:19
_3 = &raw mut (*_4); // scope 1 at $DIR/retag.rs:+2:13: +2:19
Retag([raw] _3); // scope 1 at $DIR/retag.rs:+2:13: +2:19
_2 = move _3 as *mut usize (Pointer(ArrayToPointer)); // scope 1 at $DIR/retag.rs:+2:13: +2:33
StorageDead(_3); // scope 1 at $DIR/retag.rs:+2:32: +2:33
StorageDead(_4); // scope 1 at $DIR/retag.rs:+2:33: +2:34
@ -96,9 +94,7 @@ fn array_casts() -> () {
StorageLive(_10); // scope 4 at $DIR/retag.rs:+6:13: +6:15
StorageLive(_11); // scope 4 at $DIR/retag.rs:+6:13: +6:15
_11 = &_8; // scope 4 at $DIR/retag.rs:+6:13: +6:15
Retag(_11); // scope 4 at $DIR/retag.rs:+6:13: +6:15
_10 = &raw const (*_11); // scope 4 at $DIR/retag.rs:+6:13: +6:15
Retag([raw] _10); // scope 4 at $DIR/retag.rs:+6:13: +6:15
_9 = move _10 as *const usize (Pointer(ArrayToPointer)); // scope 4 at $DIR/retag.rs:+6:13: +6:31
StorageDead(_10); // scope 4 at $DIR/retag.rs:+6:30: +6:31
StorageDead(_11); // scope 4 at $DIR/retag.rs:+6:31: +6:32
@ -119,7 +115,6 @@ fn array_casts() -> () {
StorageDead(_17); // scope 6 at $DIR/retag.rs:+7:33: +7:34
_15 = (*_16); // scope 6 at $DIR/retag.rs:+7:25: +7:34
_14 = &_15; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
Retag(_14); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_18); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_35 = const _; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
// mir::Constant
@ -127,7 +122,6 @@ fn array_casts() -> () {
// + literal: Const { ty: &usize, val: Unevaluated(array_casts, [], Some(promoted[0])) }
Retag(_35); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_18 = &(*_35); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
Retag(_18); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
Deinit(_13); // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
(_13.0: &usize) = move _14; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
(_13.1: &usize) = move _18; // scope 5 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
@ -164,15 +158,11 @@ fn array_casts() -> () {
StorageLive(_30); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_31); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_31 = &(*_20); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
Retag(_31); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_30 = &(*_31); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
Retag(_30); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_32); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_33); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_33 = &(*_21); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
Retag(_33); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
_32 = &(*_33); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
Retag(_32); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
StorageLive(_34); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
Deinit(_34); // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL
discriminant(_34) = 0; // scope 8 at $SRC_DIR/core/src/macros/mod.rs:LL:COL

View File

@ -6,7 +6,6 @@ fn std::ptr::drop_in_place(_1: *mut Test) -> () {
let mut _3: (); // in scope 0 at $SRC_DIR/core/src/ptr/mod.rs:+0:1: +0:56
bb0: {
Retag([raw] _1); // scope 0 at $SRC_DIR/core/src/ptr/mod.rs:+0:1: +0:56
_2 = &mut (*_1); // scope 0 at $SRC_DIR/core/src/ptr/mod.rs:+0:1: +0:56
_3 = <Test as Drop>::drop(move _2) -> bb1; // scope 0 at $SRC_DIR/core/src/ptr/mod.rs:+0:1: +0:56
// mir::Constant

View File

@ -15,7 +15,6 @@ fn main::{closure#0}(_1: &[closure@main::{closure#0}], _2: &i32) -> &i32 {
_3 = _2; // scope 0 at $DIR/retag.rs:+1:18: +1:19
Retag(_3); // scope 0 at $DIR/retag.rs:+1:18: +1:19
_0 = &(*_2); // scope 1 at $DIR/retag.rs:+2:9: +2:10
Retag(_0); // scope 1 at $DIR/retag.rs:+2:9: +2:10
StorageDead(_3); // scope 0 at $DIR/retag.rs:+3:5: +3:6
return; // scope 0 at $DIR/retag.rs:+3:6: +3:6
}

View File

@ -65,13 +65,10 @@ fn main() -> () {
Deinit(_5); // scope 1 at $DIR/retag.rs:+3:17: +3:24
(_5.0: i32) = const 0_i32; // scope 1 at $DIR/retag.rs:+3:17: +3:24
_4 = &_5; // scope 1 at $DIR/retag.rs:+3:17: +3:36
Retag(_4); // scope 1 at $DIR/retag.rs:+3:17: +3:36
StorageLive(_6); // scope 1 at $DIR/retag.rs:+3:29: +3:35
StorageLive(_7); // scope 1 at $DIR/retag.rs:+3:29: +3:35
_7 = &mut _1; // scope 1 at $DIR/retag.rs:+3:29: +3:35
Retag(_7); // scope 1 at $DIR/retag.rs:+3:29: +3:35
_6 = &mut (*_7); // scope 1 at $DIR/retag.rs:+3:29: +3:35
Retag([2phase] _6); // scope 1 at $DIR/retag.rs:+3:29: +3:35
_3 = Test::foo(move _4, move _6) -> [return: bb1, unwind: bb8]; // scope 1 at $DIR/retag.rs:+3:17: +3:36
// mir::Constant
// + span: $DIR/retag.rs:33:25: 33:28
@ -93,7 +90,6 @@ fn main() -> () {
_9 = move _3; // scope 2 at $DIR/retag.rs:+4:19: +4:20
Retag(_9); // scope 2 at $DIR/retag.rs:+4:19: +4:20
_8 = &mut (*_9); // scope 2 at $DIR/retag.rs:+4:19: +4:20
Retag(_8); // scope 2 at $DIR/retag.rs:+4:19: +4:20
StorageDead(_9); // scope 2 at $DIR/retag.rs:+4:22: +4:23
StorageLive(_10); // scope 3 at $DIR/retag.rs:+5:13: +5:14
_10 = move _8; // scope 3 at $DIR/retag.rs:+5:17: +5:18
@ -101,7 +97,6 @@ fn main() -> () {
StorageLive(_11); // scope 4 at $DIR/retag.rs:+7:13: +7:15
StorageLive(_12); // scope 4 at $DIR/retag.rs:+7:18: +7:29
_12 = &raw mut (*_10); // scope 4 at $DIR/retag.rs:+7:18: +7:19
Retag([raw] _12); // scope 4 at $DIR/retag.rs:+7:18: +7:19
_11 = _12; // scope 4 at $DIR/retag.rs:+7:18: +7:29
StorageDead(_12); // scope 4 at $DIR/retag.rs:+7:29: +7:30
_2 = const (); // scope 1 at $DIR/retag.rs:+2:5: +8:6
@ -122,9 +117,7 @@ fn main() -> () {
StorageLive(_17); // scope 6 at $DIR/retag.rs:+15:16: +15:18
StorageLive(_18); // scope 6 at $DIR/retag.rs:+15:16: +15:18
_18 = &_1; // scope 6 at $DIR/retag.rs:+15:16: +15:18
Retag(_18); // scope 6 at $DIR/retag.rs:+15:16: +15:18
_17 = &(*_18); // scope 6 at $DIR/retag.rs:+15:16: +15:18
Retag(_17); // scope 6 at $DIR/retag.rs:+15:16: +15:18
_15 = move _16(move _17) -> bb3; // scope 6 at $DIR/retag.rs:+15:14: +15:19
}
@ -139,7 +132,6 @@ fn main() -> () {
Deinit(_21); // scope 7 at $DIR/retag.rs:+18:5: +18:12
(_21.0: i32) = const 0_i32; // scope 7 at $DIR/retag.rs:+18:5: +18:12
_20 = &_21; // scope 7 at $DIR/retag.rs:+18:5: +18:24
Retag(_20); // scope 7 at $DIR/retag.rs:+18:5: +18:24
StorageLive(_22); // scope 7 at $DIR/retag.rs:+18:21: +18:23
StorageLive(_23); // scope 7 at $DIR/retag.rs:+18:21: +18:23
_28 = const _; // scope 7 at $DIR/retag.rs:+18:21: +18:23
@ -148,9 +140,7 @@ fn main() -> () {
// + literal: Const { ty: &i32, val: Unevaluated(main, [], Some(promoted[0])) }
Retag(_28); // scope 7 at $DIR/retag.rs:+18:21: +18:23
_23 = &(*_28); // scope 7 at $DIR/retag.rs:+18:21: +18:23
Retag(_23); // scope 7 at $DIR/retag.rs:+18:21: +18:23
_22 = &(*_23); // scope 7 at $DIR/retag.rs:+18:21: +18:23
Retag(_22); // scope 7 at $DIR/retag.rs:+18:21: +18:23
_19 = Test::foo_shr(move _20, move _22) -> [return: bb4, unwind: bb7]; // scope 7 at $DIR/retag.rs:+18:5: +18:24
// mir::Constant
// + span: $DIR/retag.rs:48:13: 48:20
@ -171,7 +161,6 @@ fn main() -> () {
StorageLive(_25); // scope 7 at $DIR/retag.rs:+21:9: +21:11
StorageLive(_26); // scope 7 at $DIR/retag.rs:+21:14: +21:28
_26 = &raw const (*_15); // scope 7 at $DIR/retag.rs:+21:14: +21:16
Retag([raw] _26); // scope 7 at $DIR/retag.rs:+21:14: +21:16
_25 = _26; // scope 7 at $DIR/retag.rs:+21:14: +21:28
StorageDead(_26); // scope 7 at $DIR/retag.rs:+21:28: +21:29
StorageLive(_27); // scope 8 at $DIR/retag.rs:+23:5: +23:18

View File

@ -11,9 +11,7 @@ fn <impl at $DIR/retag.rs:12:1: 12:10>::foo(_1: &Test, _2: &mut i32) -> &mut i32
Retag([fn entry] _2); // scope 0 at $DIR/retag.rs:+0:23: +0:24
StorageLive(_3); // scope 0 at $DIR/retag.rs:+1:9: +1:10
_3 = &mut (*_2); // scope 0 at $DIR/retag.rs:+1:9: +1:10
Retag(_3); // scope 0 at $DIR/retag.rs:+1:9: +1:10
_0 = &mut (*_3); // scope 0 at $DIR/retag.rs:+1:9: +1:10
Retag(_0); // scope 0 at $DIR/retag.rs:+1:9: +1:10
StorageDead(_3); // scope 0 at $DIR/retag.rs:+2:5: +2:6
return; // scope 0 at $DIR/retag.rs:+2:6: +2:6
}

View File

@ -11,7 +11,6 @@ use rustc_target::abi::Size;
use crate::*;
pub mod stacked_borrows;
use stacked_borrows::diagnostics::RetagCause;
pub type CallId = NonZeroU64;
@ -265,11 +264,19 @@ impl GlobalStateInner {
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn retag(&mut self, kind: RetagKind, place: &PlaceTy<'tcx, Provenance>) -> InterpResult<'tcx> {
fn retag_ptr_value(&mut self, kind: RetagKind, val: &ImmTy<'tcx, Provenance>) -> InterpResult<'tcx, ImmTy<'tcx, Provenance>> {
let this = self.eval_context_mut();
let method = this.machine.borrow_tracker.as_ref().unwrap().borrow().borrow_tracker_method;
match method {
BorrowTrackerMethod::StackedBorrows => this.sb_retag(kind, place),
BorrowTrackerMethod::StackedBorrows => this.sb_retag_ptr_value(kind, val),
}
}
fn retag_place_contents(&mut self, kind: RetagKind, place: &PlaceTy<'tcx, Provenance>) -> InterpResult<'tcx> {
let this = self.eval_context_mut();
let method = this.machine.borrow_tracker.as_ref().unwrap().borrow().borrow_tracker_method;
match method {
BorrowTrackerMethod::StackedBorrows => this.sb_retag_place_contents(kind, place),
}
}

View File

@ -459,10 +459,10 @@ impl<'history, 'ecx, 'mir, 'tcx> DiagnosticCx<'history, 'ecx, 'mir, 'tcx> {
Operation::Dealloc(_) => format!(" due to deallocation"),
Operation::Access(AccessOp { kind, tag, .. }) =>
format!(" due to {kind:?} access for {tag:?}"),
Operation::Retag(RetagOp { orig_tag, permission, .. }) => {
Operation::Retag(RetagOp { orig_tag, permission, new_tag, .. }) => {
let permission = permission
.expect("start_grant should set the current permission before popping a tag");
format!(" due to {permission:?} retag from {orig_tag:?}")
format!(" due to {permission:?} retag from {orig_tag:?} (that retag created {new_tag:?})")
}
};

View File

@ -1,9 +1,13 @@
//! Implements "Stacked Borrows". See <https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/stacked-borrows.md>
//! for further information.
pub mod diagnostics;
mod item;
mod stack;
use log::trace;
use std::cmp;
use std::fmt::{self, Write};
use std::fmt::Write;
use rustc_data_structures::fx::FxHashSet;
use rustc_middle::mir::{Mutability, RetagKind};
@ -15,15 +19,13 @@ use rustc_target::abi::{Abi, Size};
use crate::borrow_tracker::{
stacked_borrows::diagnostics::{AllocHistory, DiagnosticCx, DiagnosticCxBuilder, TagHistory},
AccessKind, GlobalStateInner, ProtectorKind, RetagCause, RetagFields,
AccessKind, GlobalStateInner, ProtectorKind, RetagFields,
};
use crate::*;
mod item;
use diagnostics::RetagCause;
pub use item::{Item, Permission};
mod stack;
pub use stack::Stack;
pub mod diagnostics;
pub type AllocState = Stacks;
@ -40,30 +42,104 @@ pub struct Stacks {
modified_since_last_gc: bool,
}
/// Indicates which kind of reference is being created.
/// Used by high-level `reborrow` to compute which permissions to grant to the
/// new pointer.
#[derive(Copy, Clone, Hash, PartialEq, Eq)]
enum RefKind {
/// `Box`.
Box,
/// `&mut`.
Unique { two_phase: bool },
/// `&` with or without interior mutability.
Shared,
/// `*mut`/`*const` (raw pointers).
Raw { mutable: bool },
/// Indicates which permissions to grant to the retagged pointer.
#[derive(Clone, Debug)]
enum NewPermission {
Uniform {
perm: Permission,
access: Option<AccessKind>,
protector: Option<ProtectorKind>,
},
FreezeSensitive {
freeze_perm: Permission,
freeze_access: Option<AccessKind>,
freeze_protector: Option<ProtectorKind>,
nonfreeze_perm: Permission,
nonfreeze_access: Option<AccessKind>,
// nonfreeze_protector must always be None
},
}
impl fmt::Display for RefKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
impl NewPermission {
/// A key function: determine the permissions to grant at a retag for the given kind of
/// reference/pointer.
fn from_ref_ty<'tcx>(
ty: ty::Ty<'tcx>,
kind: RetagKind,
cx: &crate::MiriInterpCx<'_, 'tcx>,
) -> Self {
let protector = (kind == RetagKind::FnEntry).then_some(ProtectorKind::StrongProtector);
match ty.kind() {
ty::Ref(_, pointee, Mutability::Mut) => {
if kind == RetagKind::TwoPhase {
// We mostly just give up on 2phase-borrows, and treat these exactly like raw pointers.
assert!(protector.is_none()); // RetagKind can't be both FnEntry and TwoPhase.
NewPermission::Uniform {
perm: Permission::SharedReadWrite,
access: None,
protector: None,
}
} else if pointee.is_unpin(*cx.tcx, cx.param_env()) {
// A regular full mutable reference.
NewPermission::Uniform {
perm: Permission::Unique,
access: Some(AccessKind::Write),
protector,
}
} else {
NewPermission::Uniform {
perm: Permission::SharedReadWrite,
// FIXME: We emit `dereferenceable` for `!Unpin` mutable references, so we
// should do fake accesses here. But then we run into
// <https://github.com/rust-lang/unsafe-code-guidelines/issues/381>, so for now
// we don't do that.
access: None,
protector,
}
}
}
ty::RawPtr(ty::TypeAndMut { mutbl: Mutability::Mut, .. }) => {
assert!(protector.is_none()); // RetagKind can't be both FnEntry and Raw.
// Mutable raw pointer. No access, not protected.
NewPermission::Uniform {
perm: Permission::SharedReadWrite,
access: None,
protector: None,
}
}
ty::Ref(_, _pointee, Mutability::Not) => {
NewPermission::FreezeSensitive {
freeze_perm: Permission::SharedReadOnly,
freeze_access: Some(AccessKind::Read),
freeze_protector: protector,
nonfreeze_perm: Permission::SharedReadWrite,
// Inside UnsafeCell, this does *not* count as an access, as there
// might actually be mutable references further up the stack that
// we have to keep alive.
nonfreeze_access: None,
// We do not protect inside UnsafeCell.
// This fixes https://github.com/rust-lang/rust/issues/55005.
}
}
ty::RawPtr(ty::TypeAndMut { mutbl: Mutability::Not, .. }) => {
assert!(protector.is_none()); // RetagKind can't be both FnEntry and Raw.
// `*const T`, when freshly created, are read-only in the frozen part.
NewPermission::FreezeSensitive {
freeze_perm: Permission::SharedReadOnly,
freeze_access: Some(AccessKind::Read),
freeze_protector: None,
nonfreeze_perm: Permission::SharedReadWrite,
nonfreeze_access: None,
}
}
_ => unreachable!(),
}
}
fn protector(&self) -> Option<ProtectorKind> {
match self {
RefKind::Box => write!(f, "Box"),
RefKind::Unique { two_phase: false } => write!(f, "unique reference"),
RefKind::Unique { two_phase: true } => write!(f, "unique reference (two-phase)"),
RefKind::Shared => write!(f, "shared reference"),
RefKind::Raw { mutable: true } => write!(f, "raw (mutable) pointer"),
RefKind::Raw { mutable: false } => write!(f, "raw (constant) pointer"),
NewPermission::Uniform { protector, .. } => *protector,
NewPermission::FreezeSensitive { freeze_protector, .. } => *freeze_protector,
}
}
}
@ -518,10 +594,9 @@ trait EvalContextPrivExt<'mir: 'ecx, 'tcx: 'mir, 'ecx>: crate::MiriInterpCxExt<'
&mut self,
place: &MPlaceTy<'tcx, Provenance>,
size: Size,
kind: RefKind,
retag_cause: RetagCause, // What caused this retag, for diagnostics only
new_perm: NewPermission,
new_tag: BorTag,
protect: Option<ProtectorKind>,
retag_cause: RetagCause, // What caused this retag, for diagnostics only
) -> InterpResult<'tcx, Option<AllocId>> {
let this = self.eval_context_mut();
@ -532,20 +607,16 @@ trait EvalContextPrivExt<'mir: 'ecx, 'tcx: 'mir, 'ecx>: crate::MiriInterpCxExt<'
let global = this.machine.borrow_tracker.as_ref().unwrap().borrow();
let ty = place.layout.ty;
if global.tracked_pointer_tags.contains(&new_tag) {
let mut kind_str = format!("{kind}");
match kind {
RefKind::Unique { two_phase: false }
if !ty.is_unpin(*this.tcx, this.param_env()) =>
{
write!(kind_str, " (!Unpin pointee type {ty})").unwrap()
},
RefKind::Shared
if !ty.is_freeze(*this.tcx, this.param_env()) =>
{
write!(kind_str, " (!Freeze pointee type {ty})").unwrap()
},
_ => write!(kind_str, " (pointee type {ty})").unwrap(),
};
let mut kind_str = String::new();
match new_perm {
NewPermission::Uniform { perm, .. } =>
write!(kind_str, "{perm:?} permission").unwrap(),
NewPermission::FreezeSensitive { freeze_perm, .. } if ty.is_freeze(*this.tcx, this.param_env()) =>
write!(kind_str, "{freeze_perm:?} permission").unwrap(),
NewPermission::FreezeSensitive { freeze_perm, nonfreeze_perm, .. } =>
write!(kind_str, "{freeze_perm:?}/{nonfreeze_perm:?} permission for frozen/non-frozen parts").unwrap(),
}
write!(kind_str, " (pointee type {ty})").unwrap();
this.emit_diagnostic(NonHaltingDiagnostic::CreatedPointerTag(
new_tag.inner(),
Some(kind_str),
@ -579,7 +650,7 @@ trait EvalContextPrivExt<'mir: 'ecx, 'tcx: 'mir, 'ecx>: crate::MiriInterpCxExt<'
);
let mut dcx = dcx.build(&mut stacked_borrows.history, base_offset);
dcx.log_creation();
if protect.is_some() {
if new_perm.protector().is_some() {
dcx.log_protector();
}
},
@ -592,8 +663,7 @@ trait EvalContextPrivExt<'mir: 'ecx, 'tcx: 'mir, 'ecx>: crate::MiriInterpCxExt<'
if size == Size::ZERO {
trace!(
"reborrow of size 0: {} reference {:?} derived from {:?} (pointee {})",
kind,
"reborrow of size 0: reference {:?} derived from {:?} (pointee {})",
new_tag,
place.ptr,
place.layout.ty,
@ -630,8 +700,7 @@ trait EvalContextPrivExt<'mir: 'ecx, 'tcx: 'mir, 'ecx>: crate::MiriInterpCxExt<'
}
trace!(
"reborrow: {} reference {:?} derived from {:?} (pointee {}): {:?}, size {}",
kind,
"reborrow: reference {:?} derived from {:?} (pointee {}): {:?}, size {}",
new_tag,
orig_tag,
place.layout.ty,
@ -639,7 +708,7 @@ trait EvalContextPrivExt<'mir: 'ecx, 'tcx: 'mir, 'ecx>: crate::MiriInterpCxExt<'
size.bytes()
);
if let Some(protect) = protect {
if let Some(protect) = new_perm.protector() {
// See comment in `Stack::item_invalidated` for why we store the tag twice.
this.frame_mut().extra.borrow_tracker.as_mut().unwrap().protected_tags.push(new_tag);
this.machine
@ -651,30 +720,45 @@ trait EvalContextPrivExt<'mir: 'ecx, 'tcx: 'mir, 'ecx>: crate::MiriInterpCxExt<'
.insert(new_tag, protect);
}
// Update the stacks.
// Make sure that raw pointers and mutable shared references are reborrowed "weak":
// There could be existing unique pointers reborrowed from them that should remain valid!
let (perm, access) = match kind {
RefKind::Unique { two_phase } => {
// Permission is Unique only if the type is `Unpin` and this is not twophase
if !two_phase && place.layout.ty.is_unpin(*this.tcx, this.param_env()) {
(Permission::Unique, Some(AccessKind::Write))
} else {
// FIXME: We emit `dereferenceable` for `!Unpin` mutable references, so we
// should do fake accesses here. But then we run into
// <https://github.com/rust-lang/unsafe-code-guidelines/issues/381>, so for now
// we don't do that.
(Permission::SharedReadWrite, None)
// Update the stacks, according to the new permission information we are given.
match new_perm {
NewPermission::Uniform { perm, access, protector } => {
assert!(perm != Permission::SharedReadOnly);
// Here we can avoid `borrow()` calls because we have mutable references.
// Note that this asserts that the allocation is mutable -- but since we are creating a
// mutable pointer, that seems reasonable.
let (alloc_extra, machine) = this.get_alloc_extra_mut(alloc_id)?;
let stacked_borrows = alloc_extra.borrow_tracker_sb_mut().get_mut();
let item = Item::new(new_tag, perm, protector.is_some());
let range = alloc_range(base_offset, size);
let global = machine.borrow_tracker.as_ref().unwrap().borrow();
let dcx = DiagnosticCxBuilder::retag(
machine,
retag_cause,
new_tag,
orig_tag,
alloc_range(base_offset, size),
);
stacked_borrows.for_each(range, dcx, |stack, dcx, exposed_tags| {
stack.grant(orig_tag, item, access, &global, dcx, exposed_tags)
})?;
drop(global);
if let Some(access) = access {
assert_eq!(access, AccessKind::Write);
// Make sure the data race model also knows about this.
if let Some(data_race) = alloc_extra.data_race.as_mut() {
data_race.write(alloc_id, range, machine)?;
}
}
}
RefKind::Box => (Permission::Unique, Some(AccessKind::Write)),
RefKind::Raw { mutable: true } => {
// Creating a raw ptr does not count as an access
(Permission::SharedReadWrite, None)
}
RefKind::Shared | RefKind::Raw { mutable: false } => {
// Shared references and *const are a whole different kind of game, the
// permission is not uniform across the entire range!
NewPermission::FreezeSensitive {
freeze_perm,
freeze_access,
freeze_protector,
nonfreeze_perm,
nonfreeze_access,
} => {
// The permission is not uniform across the entire range!
// We need a frozen-sensitive reborrow.
// We have to use shared references to alloc/memory_extra here since
// `visit_freeze_sensitive` needs to access the global state.
@ -684,22 +768,12 @@ trait EvalContextPrivExt<'mir: 'ecx, 'tcx: 'mir, 'ecx>: crate::MiriInterpCxExt<'
// Adjust range.
range.start += base_offset;
// We are only ever `SharedReadOnly` inside the frozen bits.
let (perm, access) = if frozen {
(Permission::SharedReadOnly, Some(AccessKind::Read))
let (perm, access, protector) = if frozen {
(freeze_perm, freeze_access, freeze_protector)
} else {
// Inside UnsafeCell, this does *not* count as an access, as there
// might actually be mutable references further up the stack that
// we have to keep alive.
(Permission::SharedReadWrite, None)
(nonfreeze_perm, nonfreeze_access, None)
};
let protected = if frozen {
protect.is_some()
} else {
// We do not protect inside UnsafeCell.
// This fixes https://github.com/rust-lang/rust/issues/55005.
false
};
let item = Item::new(new_tag, perm, protected);
let item = Item::new(new_tag, perm, protector.is_some());
let global = this.machine.borrow_tracker.as_ref().unwrap().borrow();
let dcx = DiagnosticCxBuilder::retag(
&this.machine,
@ -721,34 +795,6 @@ trait EvalContextPrivExt<'mir: 'ecx, 'tcx: 'mir, 'ecx>: crate::MiriInterpCxExt<'
}
Ok(())
})?;
return Ok(Some(alloc_id));
}
};
// Here we can avoid `borrow()` calls because we have mutable references.
// Note that this asserts that the allocation is mutable -- but since we are creating a
// mutable pointer, that seems reasonable.
let (alloc_extra, machine) = this.get_alloc_extra_mut(alloc_id)?;
let stacked_borrows = alloc_extra.borrow_tracker_sb_mut().get_mut();
let item = Item::new(new_tag, perm, protect.is_some());
let range = alloc_range(base_offset, size);
let global = machine.borrow_tracker.as_ref().unwrap().borrow();
let dcx = DiagnosticCxBuilder::retag(
machine,
retag_cause,
new_tag,
orig_tag,
alloc_range(base_offset, size),
);
stacked_borrows.for_each(range, dcx, |stack, dcx, exposed_tags| {
stack.grant(orig_tag, item, access, &global, dcx, exposed_tags)
})?;
drop(global);
if let Some(access) = access {
assert_eq!(access, AccessKind::Write);
// Make sure the data race model also knows about this.
if let Some(data_race) = alloc_extra.data_race.as_mut() {
data_race.write(alloc_id, range, machine)?;
}
}
@ -760,9 +806,8 @@ trait EvalContextPrivExt<'mir: 'ecx, 'tcx: 'mir, 'ecx>: crate::MiriInterpCxExt<'
fn sb_retag_reference(
&mut self,
val: &ImmTy<'tcx, Provenance>,
kind: RefKind,
retag_cause: RetagCause, // What caused this retag, for diagnostics only
protect: Option<ProtectorKind>,
new_perm: NewPermission,
cause: RetagCause, // What caused this retag, for diagnostics only
) -> InterpResult<'tcx, ImmTy<'tcx, Provenance>> {
let this = self.eval_context_mut();
// We want a place for where the ptr *points to*, so we get one.
@ -780,7 +825,7 @@ trait EvalContextPrivExt<'mir: 'ecx, 'tcx: 'mir, 'ecx>: crate::MiriInterpCxExt<'
let new_tag = this.machine.borrow_tracker.as_mut().unwrap().get_mut().new_ptr();
// Reborrow.
let alloc_id = this.sb_reborrow(&place, size, kind, retag_cause, new_tag, protect)?;
let alloc_id = this.sb_reborrow(&place, size, new_perm, new_tag, cause)?;
// Adjust pointer.
let new_place = place.map_provenance(|p| {
@ -807,7 +852,22 @@ trait EvalContextPrivExt<'mir: 'ecx, 'tcx: 'mir, 'ecx>: crate::MiriInterpCxExt<'
impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriInterpCx<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn sb_retag(
fn sb_retag_ptr_value(
&mut self,
kind: RetagKind,
val: &ImmTy<'tcx, Provenance>,
) -> InterpResult<'tcx, ImmTy<'tcx, Provenance>> {
let this = self.eval_context_mut();
let new_perm = NewPermission::from_ref_ty(val.layout.ty, kind, this);
let retag_cause = match kind {
RetagKind::TwoPhase { .. } => RetagCause::TwoPhase,
RetagKind::FnEntry => unreachable!(),
RetagKind::Raw | RetagKind::Default => RetagCause::Normal,
};
this.sb_retag_reference(&val, new_perm, retag_cause)
}
fn sb_retag_place_contents(
&mut self,
kind: RetagKind,
place: &PlaceTy<'tcx, Provenance>,
@ -815,9 +875,9 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
let this = self.eval_context_mut();
let retag_fields = this.machine.borrow_tracker.as_mut().unwrap().get_mut().retag_fields;
let retag_cause = match kind {
RetagKind::TwoPhase { .. } => RetagCause::TwoPhase,
RetagKind::Raw | RetagKind::TwoPhase { .. } => unreachable!(), // these can only happen in `retag_ptr_value`
RetagKind::FnEntry => RetagCause::FnEntry,
RetagKind::Raw | RetagKind::Default => RetagCause::Normal,
RetagKind::Default => RetagCause::Normal,
};
let mut visitor = RetagVisitor { ecx: this, kind, retag_cause, retag_fields };
return visitor.visit_value(place);
@ -831,15 +891,14 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
}
impl<'ecx, 'mir, 'tcx> RetagVisitor<'ecx, 'mir, 'tcx> {
#[inline(always)] // yes this helps in our benchmarks
fn retag_place(
fn retag_ptr_inplace(
&mut self,
place: &PlaceTy<'tcx, Provenance>,
ref_kind: RefKind,
new_perm: NewPermission,
retag_cause: RetagCause,
protector: Option<ProtectorKind>,
) -> InterpResult<'tcx> {
let val = self.ecx.read_immediate(&self.ecx.place_to_op(place)?)?;
let val = self.ecx.sb_retag_reference(&val, ref_kind, retag_cause, protector)?;
let val = self.ecx.sb_retag_reference(&val, new_perm, retag_cause)?;
self.ecx.write_immediate(*val, place)?;
Ok(())
}
@ -856,13 +915,13 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
fn visit_box(&mut self, place: &PlaceTy<'tcx, Provenance>) -> InterpResult<'tcx> {
// Boxes get a weak protectors, since they may be deallocated.
self.retag_place(
place,
RefKind::Box,
self.retag_cause,
/*protector*/
(self.kind == RetagKind::FnEntry).then_some(ProtectorKind::WeakProtector),
)
let new_perm = NewPermission::Uniform {
perm: Permission::Unique,
access: Some(AccessKind::Write),
protector: (self.kind == RetagKind::FnEntry)
.then_some(ProtectorKind::WeakProtector),
};
self.retag_ptr_inplace(place, new_perm, self.retag_cause)
}
fn visit_value(&mut self, place: &PlaceTy<'tcx, Provenance>) -> InterpResult<'tcx> {
@ -876,36 +935,16 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
// Check the type of this value to see what to do with it (retag, or recurse).
match place.layout.ty.kind() {
ty::Ref(_, _, mutbl) => {
let ref_kind = match mutbl {
Mutability::Mut =>
RefKind::Unique { two_phase: self.kind == RetagKind::TwoPhase },
Mutability::Not => RefKind::Shared,
};
self.retag_place(
place,
ref_kind,
self.retag_cause,
/*protector*/
(self.kind == RetagKind::FnEntry)
.then_some(ProtectorKind::StrongProtector),
)?;
ty::Ref(..) => {
let new_perm =
NewPermission::from_ref_ty(place.layout.ty, self.kind, self.ecx);
self.retag_ptr_inplace(place, new_perm, self.retag_cause)?;
}
ty::RawPtr(tym) => {
// We definitely do *not* want to recurse into raw pointers -- wide raw
// pointers have fields, and for dyn Trait pointees those can have reference
// type!
if self.kind == RetagKind::Raw {
// Raw pointers need to be enabled.
self.retag_place(
place,
RefKind::Raw { mutable: tym.mutbl == Mutability::Mut },
self.retag_cause,
/*protector*/ None,
)?;
}
ty::RawPtr(..) => {
// We do *not* want to recurse into raw pointers -- wide raw pointers have
// fields, and for dyn Trait pointees those can have reference type!
}
_ if place.layout.ty.ty_adt_def().is_some_and(|adt| adt.is_box()) => {
ty::Adt(adt, _) if adt.is_box() => {
// Recurse for boxes, they require some tricky handling and will end up in `visit_box` above.
// (Yes this means we technically also recursively retag the allocator itself
// even if field retagging is not enabled. *shrug*)
@ -953,12 +992,12 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> {
let ptr_layout = this.layout_of(this.tcx.mk_mut_ptr(return_place.layout.ty))?;
let val = ImmTy::from_immediate(return_place.to_ref(this), ptr_layout);
// Reborrow it. With protection! That is part of the point.
let val = this.sb_retag_reference(
&val,
RefKind::Unique { two_phase: false },
RetagCause::FnReturn,
/*protector*/ Some(ProtectorKind::StrongProtector),
)?;
let new_perm = NewPermission::Uniform {
perm: Permission::Unique,
access: Some(AccessKind::Write),
protector: Some(ProtectorKind::StrongProtector),
};
let val = this.sb_retag_reference(&val, new_perm, RetagCause::FnReturn)?;
// And use reborrowed pointer for return place.
let return_place = this.ref_to_mplace(&val)?;
this.frame_mut().return_place = return_place.into();

View File

@ -63,9 +63,9 @@ impl MachineStopType for TerminationInfo {}
/// Miri specific diagnostics
pub enum NonHaltingDiagnostic {
/// (new_tag, new_kind, (alloc_id, base_offset, orig_tag))
/// (new_tag, new_perm, (alloc_id, base_offset, orig_tag))
///
/// new_kind is `None` for base tags.
/// new_perm is `None` for base tags.
CreatedPointerTag(NonZeroU64, Option<String>, Option<(AllocId, AllocRange, ProvenanceExtra)>),
/// This `Item` was popped from the borrow stack. The string explains the reason.
PoppedPointerTag(Item, String),
@ -393,10 +393,10 @@ impl<'mir, 'tcx> MiriMachine<'mir, 'tcx> {
let msg = match &e {
CreatedPointerTag(tag, None, _) => format!("created base tag {tag:?}"),
CreatedPointerTag(tag, Some(kind), None) => format!("created {tag:?} for {kind}"),
CreatedPointerTag(tag, Some(kind), Some((alloc_id, range, orig_tag))) =>
CreatedPointerTag(tag, Some(perm), None) => format!("created {tag:?} with {perm} derived from unknown tag"),
CreatedPointerTag(tag, Some(perm), Some((alloc_id, range, orig_tag))) =>
format!(
"created tag {tag:?} for {kind} at {alloc_id:?}{range:?} derived from {orig_tag:?}"
"created tag {tag:?} with {perm} at {alloc_id:?}{range:?} derived from {orig_tag:?}"
),
PoppedPointerTag(item, cause) => format!("popped tracked tag for item {item:?}{cause}"),
CreatedCallId(id) => format!("function call with id {id}"),

View File

@ -967,8 +967,9 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for MiriMachine<'mir, 'tcx> {
ptr: Pointer<Self::Provenance>,
) -> InterpResult<'tcx> {
match ptr.provenance {
Provenance::Concrete { alloc_id, tag } =>
intptrcast::GlobalStateInner::expose_ptr(ecx, alloc_id, tag),
Provenance::Concrete { alloc_id, tag } => {
intptrcast::GlobalStateInner::expose_ptr(ecx, alloc_id, tag)
}
Provenance::Wildcard => {
// No need to do anything for wildcard pointers as
// their provenances have already been previously exposed.
@ -1055,13 +1056,26 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for MiriMachine<'mir, 'tcx> {
}
#[inline(always)]
fn retag(
fn retag_ptr_value(
ecx: &mut InterpCx<'mir, 'tcx, Self>,
kind: mir::RetagKind,
val: &ImmTy<'tcx, Provenance>,
) -> InterpResult<'tcx, ImmTy<'tcx, Provenance>> {
if ecx.machine.borrow_tracker.is_some() {
ecx.retag_ptr_value(kind, val)
} else {
Ok(val.clone())
}
}
#[inline(always)]
fn retag_place_contents(
ecx: &mut InterpCx<'mir, 'tcx, Self>,
kind: mir::RetagKind,
place: &PlaceTy<'tcx, Provenance>,
) -> InterpResult<'tcx> {
if ecx.machine.borrow_tracker.is_some() {
ecx.retag(kind, place)?;
ecx.retag_place_contents(kind, place)?;
}
Ok(())
}