diff --git a/README.md b/README.md index a7ee11e82e0..bfc32d04a9d 100644 --- a/README.md +++ b/README.md @@ -369,6 +369,9 @@ to Miri failing to detect cases of undefined behavior in a program. application instead of raising an error within the context of Miri (and halting execution). Note that code might not expect these operations to ever panic, so this flag can lead to strange (mis)behavior. +* `-Zmiri-retag-fields` changes Stacked Borrows retagging to recurse into fields. + This means that references in fields of structs/enums/tuples/arrays/... are retagged, + and in particular, they are protected when passed as function arguments. * `-Zmiri-track-alloc-id=,,...` shows a backtrace when the given allocations are being allocated or freed. This helps in debugging memory leaks and use after free bugs. Specifying this argument multiple times does not overwrite the previous diff --git a/rust-version b/rust-version index 4f24d29e34b..323af716826 100644 --- a/rust-version +++ b/rust-version @@ -1 +1 @@ -493c960a3e6cdd2e2fbe8b6ea130fadea05f1ab0 +ddcbba036aee08f0709f98a92a342a278eae5c05 diff --git a/src/bin/miri.rs b/src/bin/miri.rs index 1ccc54d6be1..4f00e4be18a 100644 --- a/src/bin/miri.rs +++ b/src/bin/miri.rs @@ -383,6 +383,8 @@ fn main() { miri_config.provenance_mode = ProvenanceMode::Permissive; } else if arg == "-Zmiri-mute-stdout-stderr" { miri_config.mute_stdout_stderr = true; + } else if arg == "-Zmiri-retag-fields" { + miri_config.retag_fields = true; } else if arg == "-Zmiri-track-raw-pointers" { eprintln!( "WARNING: `-Zmiri-track-raw-pointers` has no effect; it is enabled by default" diff --git a/src/eval.rs b/src/eval.rs index 12f1f52c78a..c9fc05500a3 100644 --- a/src/eval.rs +++ b/src/eval.rs @@ -124,6 +124,8 @@ pub struct MiriConfig { pub preemption_rate: f64, /// Report the current instruction being executed every N basic blocks. pub report_progress: Option, + /// Whether Stacked Borrows retagging should recurse into fields of datatypes. + pub retag_fields: bool, } impl Default for MiriConfig { @@ -154,6 +156,7 @@ fn default() -> MiriConfig { mute_stdout_stderr: false, preemption_rate: 0.01, // 1% report_progress: None, + retag_fields: false, } } } diff --git a/src/machine.rs b/src/machine.rs index 5b184754589..4aeb42d3d0f 100644 --- a/src/machine.rs +++ b/src/machine.rs @@ -354,6 +354,7 @@ pub(crate) fn new(config: &MiriConfig, layout_cx: LayoutCx<'tcx, TyCtxt<'tcx>>) Some(RefCell::new(stacked_borrows::GlobalStateInner::new( config.tracked_pointer_tags.clone(), config.tracked_call_ids.clone(), + config.retag_fields, ))) } else { None diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index 66f756d7b08..3fc0eaf10c0 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -156,6 +156,8 @@ pub struct GlobalStateInner { tracked_pointer_tags: HashSet, /// The call ids to trace tracked_call_ids: HashSet, + /// Whether to recurse into datatypes when searching for pointers to retag. + retag_fields: bool, } /// We need interior mutable access to the global state. @@ -204,7 +206,11 @@ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { /// Utilities for initialization and ID generation impl GlobalStateInner { - pub fn new(tracked_pointer_tags: HashSet, tracked_call_ids: HashSet) -> Self { + pub fn new( + tracked_pointer_tags: HashSet, + tracked_call_ids: HashSet, + retag_fields: bool, + ) -> Self { GlobalStateInner { next_ptr_tag: SbTag(NonZeroU64::new(1).unwrap()), base_ptr_tags: FxHashMap::default(), @@ -212,6 +218,7 @@ pub fn new(tracked_pointer_tags: HashSet, tracked_call_ids: HashSet, kind: RetagKind) -> Option<(RefKind, bool)> { } } - // We only reborrow "bare" references/boxes. - // Not traversing into fields helps with , - // but might also cost us optimization and analyses. We will have to experiment more with this. + // We need a visitor to visit all references. However, that requires + // a `MPlaceTy` (or `OpTy), so we have a fast path for reference types that + // avoids allocating. + if let Some((mutbl, protector)) = qualify(place.layout.ty, kind) { // Fast path. let val = this.read_immediate(&this.place_to_op(place)?)?; let val = this.retag_reference(&val, mutbl, protector)?; this.write_immediate(*val, place)?; + return Ok(()); } - Ok(()) + // If we don't want to recurse, we are already done. + if !this.machine.stacked_borrows.as_mut().unwrap().get_mut().retag_fields { + return Ok(()); + } + + // Skip some types that have no further structure we might care about. + if matches!( + place.layout.ty.kind(), + ty::RawPtr(..) + | ty::Ref(..) + | ty::Int(..) + | ty::Uint(..) + | ty::Float(..) + | ty::Bool + | ty::Char + ) { + return Ok(()); + } + // Now go visit this thing. + let place = this.force_allocation(place)?; + + let mut visitor = RetagVisitor { ecx: this, kind }; + return visitor.visit_value(&place); + + // The actual visitor. + struct RetagVisitor<'ecx, 'mir, 'tcx> { + ecx: &'ecx mut MiriEvalContext<'mir, 'tcx>, + kind: RetagKind, + } + impl<'ecx, 'mir, 'tcx> MutValueVisitor<'mir, 'tcx, Evaluator<'mir, 'tcx>> + for RetagVisitor<'ecx, 'mir, 'tcx> + { + type V = MPlaceTy<'tcx, Tag>; + + #[inline(always)] + fn ecx(&mut self) -> &mut MiriEvalContext<'mir, 'tcx> { + &mut self.ecx + } + + fn visit_value(&mut self, place: &MPlaceTy<'tcx, Tag>) -> InterpResult<'tcx> { + if let Some((mutbl, protector)) = qualify(place.layout.ty, self.kind) { + let val = self.ecx.read_immediate(&place.into())?; + let val = self.ecx.retag_reference(&val, mutbl, protector)?; + self.ecx.write_immediate(*val, &(*place).into())?; + } else { + // Maybe we need to go deeper. + self.walk_value(place)?; + } + Ok(()) + } + } } /// After a stack frame got pushed, retag the return place so that we are sure diff --git a/tests/fail/stacked_borrows/newtype_retagging.rs b/tests/fail/stacked_borrows/newtype_retagging.rs new file mode 100644 index 00000000000..8f932f08086 --- /dev/null +++ b/tests/fail/stacked_borrows/newtype_retagging.rs @@ -0,0 +1,19 @@ +// compile-flags: -Zmiri-retag-fields +// error-pattern: incompatible item is protected +struct Newtype<'a>(&'a mut i32); + +fn dealloc_while_running(_n: Newtype<'_>, dealloc: impl FnOnce()) { + dealloc(); +} + +// Make sure that we protect references inside structs. +fn main() { + let ptr = Box::into_raw(Box::new(0i32)); + #[rustfmt::skip] // I like my newlines + unsafe { + dealloc_while_running( + Newtype(&mut *ptr), + || drop(Box::from_raw(ptr)), + ) + }; +} diff --git a/tests/fail/stacked_borrows/newtype_retagging.stderr b/tests/fail/stacked_borrows/newtype_retagging.stderr new file mode 100644 index 00000000000..f65231b661f --- /dev/null +++ b/tests/fail/stacked_borrows/newtype_retagging.stderr @@ -0,0 +1,50 @@ +error: Undefined Behavior: not granting access to tag because incompatible item is protected: [Unique for (call ID)] + --> RUSTLIB/alloc/src/boxed.rs:LL:CC + | +LL | Box(unsafe { Unique::new_unchecked(raw) }, alloc) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ not granting access to tag because incompatible item is protected: [Unique for (call ID)] + | + = help: this indicates a potential bug in the program: it performed an invalid operation, but the Stacked Borrows rules it violated are still experimental + = help: see https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/stacked-borrows.md for further information +help: was created by a retag at offsets [0x0..0x4] + --> $DIR/newtype_retagging.rs:LL:CC + | +LL | let ptr = Box::into_raw(Box::new(0i32)); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +help: was protected due to which was created here + --> $DIR/newtype_retagging.rs:LL:CC + | +LL | Newtype(&mut *ptr), + | ^^^^^^^^^^^^^^^^^^ +help: this protector is live for this call + --> $DIR/newtype_retagging.rs:LL:CC + | +LL | / fn dealloc_while_running(_n: Newtype<'_>, dealloc: impl FnOnce()) { +LL | | dealloc(); +LL | | } + | |_^ + = note: inside `std::boxed::Box::::from_raw_in` at RUSTLIB/alloc/src/boxed.rs:LL:CC + = note: inside `std::boxed::Box::::from_raw` at RUSTLIB/alloc/src/boxed.rs:LL:CC +note: inside closure at $DIR/newtype_retagging.rs:LL:CC + --> $DIR/newtype_retagging.rs:LL:CC + | +LL | || drop(Box::from_raw(ptr)), + | ^^^^^^^^^^^^^^^^^^ +note: inside `dealloc_while_running::<[closure@$DIR/newtype_retagging.rs:LL:CC]>` at $DIR/newtype_retagging.rs:LL:CC + --> $DIR/newtype_retagging.rs:LL:CC + | +LL | dealloc(); + | ^^^^^^^^^ +note: inside `main` at $DIR/newtype_retagging.rs:LL:CC + --> $DIR/newtype_retagging.rs:LL:CC + | +LL | / dealloc_while_running( +LL | | Newtype(&mut *ptr), +LL | | || drop(Box::from_raw(ptr)), +LL | | ) + | |_________^ + +note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace + +error: aborting due to previous error + diff --git a/tests/pass/stacked-borrows/interior_mutability.rs b/tests/pass/stacked-borrows/interior_mutability.rs index 79958ab5539..96ad67505a7 100644 --- a/tests/pass/stacked-borrows/interior_mutability.rs +++ b/tests/pass/stacked-borrows/interior_mutability.rs @@ -1,4 +1,5 @@ -use std::cell::{Cell, RefCell, UnsafeCell}; +// compile-flags: -Zmiri-retag-fields +use std::cell::{Cell, Ref, RefCell, RefMut, UnsafeCell}; use std::mem::{self, MaybeUninit}; fn main() { @@ -8,6 +9,10 @@ fn main() { unsafe_cell_2phase(); unsafe_cell_deallocate(); unsafe_cell_invalidate(); + refcell_basic(); + ref_protector(); + ref_mut_protector(); + rust_issue_68303(); } fn aliasing_mut_and_shr() { @@ -99,3 +104,72 @@ fn f(_x: &UnsafeCell, y: *mut i32) { // So using raw1 invalidates raw2. f(unsafe { mem::transmute(raw2) }, raw1); } + +fn refcell_basic() { + let c = RefCell::new(42); + { + let s1 = c.borrow(); + let _x: i32 = *s1; + let s2 = c.borrow(); + let _x: i32 = *s1; + let _y: i32 = *s2; + let _x: i32 = *s1; + let _y: i32 = *s2; + } + { + let mut m = c.borrow_mut(); + let _z: i32 = *m; + { + let s: &i32 = &*m; + let _x = *s; + } + *m = 23; + let _z: i32 = *m; + } + { + let s1 = c.borrow(); + let _x: i32 = *s1; + let s2 = c.borrow(); + let _x: i32 = *s1; + let _y: i32 = *s2; + let _x: i32 = *s1; + let _y: i32 = *s2; + } +} + +// Adding a Stacked Borrows protector for `Ref` would break this +fn ref_protector() { + fn break_it(rc: &RefCell, r: Ref<'_, i32>) { + // `r` has a shared reference, it is passed in as argument and hence + // a protector is added that marks this memory as read-only for the entire + // duration of this function. + drop(r); + // *oops* here we can mutate that memory. + *rc.borrow_mut() = 2; + } + + let rc = RefCell::new(0); + break_it(&rc, rc.borrow()) +} + +fn ref_mut_protector() { + fn break_it(rc: &RefCell, r: RefMut<'_, i32>) { + // `r` has a shared reference, it is passed in as argument and hence + // a protector is added that marks this memory as inaccessible for the entire + // duration of this function + drop(r); + // *oops* here we can mutate that memory. + *rc.borrow_mut() = 2; + } + + let rc = RefCell::new(0); + break_it(&rc, rc.borrow_mut()) +} + +/// Make sure we do not have bad enum layout optimizations. +fn rust_issue_68303() { + let optional = Some(RefCell::new(false)); + let mut handle = optional.as_ref().unwrap().borrow_mut(); + assert!(optional.is_some()); + *handle = true; +} diff --git a/tests/pass/stacked-borrows/refcell.rs b/tests/pass/stacked-borrows/refcell.rs deleted file mode 100644 index ecb6f48d30b..00000000000 --- a/tests/pass/stacked-borrows/refcell.rs +++ /dev/null @@ -1,77 +0,0 @@ -use std::cell::{Ref, RefCell, RefMut}; - -fn main() { - basic(); - ref_protector(); - ref_mut_protector(); - rust_issue_68303(); -} - -fn basic() { - let c = RefCell::new(42); - { - let s1 = c.borrow(); - let _x: i32 = *s1; - let s2 = c.borrow(); - let _x: i32 = *s1; - let _y: i32 = *s2; - let _x: i32 = *s1; - let _y: i32 = *s2; - } - { - let mut m = c.borrow_mut(); - let _z: i32 = *m; - { - let s: &i32 = &*m; - let _x = *s; - } - *m = 23; - let _z: i32 = *m; - } - { - let s1 = c.borrow(); - let _x: i32 = *s1; - let s2 = c.borrow(); - let _x: i32 = *s1; - let _y: i32 = *s2; - let _x: i32 = *s1; - let _y: i32 = *s2; - } -} - -// Adding a Stacked Borrows protector for `Ref` would break this -fn ref_protector() { - fn break_it(rc: &RefCell, r: Ref<'_, i32>) { - // `r` has a shared reference, it is passed in as argument and hence - // a protector is added that marks this memory as read-only for the entire - // duration of this function. - drop(r); - // *oops* here we can mutate that memory. - *rc.borrow_mut() = 2; - } - - let rc = RefCell::new(0); - break_it(&rc, rc.borrow()) -} - -fn ref_mut_protector() { - fn break_it(rc: &RefCell, r: RefMut<'_, i32>) { - // `r` has a shared reference, it is passed in as argument and hence - // a protector is added that marks this memory as inaccessible for the entire - // duration of this function - drop(r); - // *oops* here we can mutate that memory. - *rc.borrow_mut() = 2; - } - - let rc = RefCell::new(0); - break_it(&rc, rc.borrow_mut()) -} - -/// Make sure we do not have bad enum layout optimizations. -fn rust_issue_68303() { - let optional = Some(RefCell::new(false)); - let mut handle = optional.as_ref().unwrap().borrow_mut(); - assert!(optional.is_some()); - *handle = true; -} diff --git a/tests/pass/stacked-borrows/stacked-borrows.rs b/tests/pass/stacked-borrows/stacked-borrows.rs index eb0ff167eb1..3669a08a1bc 100644 --- a/tests/pass/stacked-borrows/stacked-borrows.rs +++ b/tests/pass/stacked-borrows/stacked-borrows.rs @@ -1,3 +1,4 @@ +// compile-flags: -Zmiri-retag-fields use std::ptr; // Test various stacked-borrows-related things.