Auto merge of #2071 - RalfJung:provenance, r=RalfJung
adjust for provenance cleanup This is the Miri side of https://github.com/rust-lang/rust/pull/96165
This commit is contained in:
commit
c9039de613
@ -1 +1 @@
|
||||
c8422403f775126c40d558838d321c063554c822
|
||||
27af5175497936ea3413bef5816e7c0172514b9c
|
||||
|
@ -999,7 +999,7 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
|
||||
if let Some(data_race) = &this.machine.data_race {
|
||||
if data_race.multi_threaded.get() {
|
||||
let size = place.layout.size;
|
||||
let (alloc_id, base_offset, ptr) = this.ptr_get_alloc_id(place.ptr)?;
|
||||
let (alloc_id, base_offset, _tag) = this.ptr_get_alloc_id(place.ptr)?;
|
||||
// Load and log the atomic operation.
|
||||
// Note that atomic loads are possible even from read-only allocations, so `get_alloc_extra_mut` is not an option.
|
||||
let alloc_meta = &this.get_alloc_extra(alloc_id)?.data_race.as_ref().unwrap();
|
||||
@ -1007,7 +1007,7 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
|
||||
"Atomic op({}) with ordering {:?} on {:?} (size={})",
|
||||
description,
|
||||
&atomic,
|
||||
ptr,
|
||||
place.ptr,
|
||||
size.bytes()
|
||||
);
|
||||
|
||||
@ -1039,7 +1039,7 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
|
||||
{
|
||||
log::trace!(
|
||||
"Updated atomic memory({:?}, size={}) to {:#?}",
|
||||
ptr,
|
||||
place.ptr,
|
||||
size.bytes(),
|
||||
range.atomic_ops
|
||||
);
|
||||
|
@ -271,8 +271,6 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
|
||||
/// Visits the memory covered by `place`, sensitive to freezing: the 2nd parameter
|
||||
/// of `action` will be true if this is frozen, false if this is in an `UnsafeCell`.
|
||||
/// The range is relative to `place`.
|
||||
///
|
||||
/// Assumes that the `place` has a proper pointer in it.
|
||||
fn visit_freeze_sensitive(
|
||||
&self,
|
||||
place: &MPlaceTy<'tcx, Tag>,
|
||||
@ -290,33 +288,30 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
|
||||
// Store how far we proceeded into the place so far. Everything to the left of
|
||||
// this offset has already been handled, in the sense that the frozen parts
|
||||
// have had `action` called on them.
|
||||
let ptr = place.ptr.into_pointer_or_addr().unwrap();
|
||||
let start_offset = ptr.into_parts().1 as Size; // we just compare offsets, the abs. value never matters
|
||||
let mut cur_offset = start_offset;
|
||||
let start_addr = place.ptr.addr();
|
||||
let mut cur_addr = start_addr;
|
||||
// Called when we detected an `UnsafeCell` at the given offset and size.
|
||||
// Calls `action` and advances `cur_ptr`.
|
||||
let mut unsafe_cell_action = |unsafe_cell_ptr: Pointer<Option<Tag>>,
|
||||
let mut unsafe_cell_action = |unsafe_cell_ptr: &Pointer<Option<Tag>>,
|
||||
unsafe_cell_size: Size| {
|
||||
let unsafe_cell_ptr = unsafe_cell_ptr.into_pointer_or_addr().unwrap();
|
||||
debug_assert_eq!(unsafe_cell_ptr.provenance, ptr.provenance);
|
||||
// We assume that we are given the fields in increasing offset order,
|
||||
// and nothing else changes.
|
||||
let unsafe_cell_offset = unsafe_cell_ptr.into_parts().1 as Size; // we just compare offsets, the abs. value never matters
|
||||
assert!(unsafe_cell_offset >= cur_offset);
|
||||
let frozen_size = unsafe_cell_offset - cur_offset;
|
||||
let unsafe_cell_addr = unsafe_cell_ptr.addr();
|
||||
assert!(unsafe_cell_addr >= cur_addr);
|
||||
let frozen_size = unsafe_cell_addr - cur_addr;
|
||||
// Everything between the cur_ptr and this `UnsafeCell` is frozen.
|
||||
if frozen_size != Size::ZERO {
|
||||
action(alloc_range(cur_offset - start_offset, frozen_size), /*frozen*/ true)?;
|
||||
action(alloc_range(cur_addr - start_addr, frozen_size), /*frozen*/ true)?;
|
||||
}
|
||||
cur_offset += frozen_size;
|
||||
cur_addr += frozen_size;
|
||||
// This `UnsafeCell` is NOT frozen.
|
||||
if unsafe_cell_size != Size::ZERO {
|
||||
action(
|
||||
alloc_range(cur_offset - start_offset, unsafe_cell_size),
|
||||
alloc_range(cur_addr - start_addr, unsafe_cell_size),
|
||||
/*frozen*/ false,
|
||||
)?;
|
||||
}
|
||||
cur_offset += unsafe_cell_size;
|
||||
cur_addr += unsafe_cell_size;
|
||||
// Done
|
||||
Ok(())
|
||||
};
|
||||
@ -334,7 +329,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
|
||||
.unwrap_or_else(|| place.layout.size);
|
||||
// Now handle this `UnsafeCell`, unless it is empty.
|
||||
if unsafe_cell_size != Size::ZERO {
|
||||
unsafe_cell_action(place.ptr, unsafe_cell_size)
|
||||
unsafe_cell_action(&place.ptr, unsafe_cell_size)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
@ -344,7 +339,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
|
||||
}
|
||||
// The part between the end_ptr and the end of the place is also frozen.
|
||||
// So pretend there is a 0-sized `UnsafeCell` at the end.
|
||||
unsafe_cell_action(place.ptr.wrapping_offset(size, this), Size::ZERO)?;
|
||||
unsafe_cell_action(&place.ptr.offset(size, this)?, Size::ZERO)?;
|
||||
// Done!
|
||||
return Ok(());
|
||||
|
||||
@ -428,9 +423,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
|
||||
let mut places =
|
||||
fields.collect::<InterpResult<'tcx, Vec<MPlaceTy<'tcx, Tag>>>>()?;
|
||||
// we just compare offsets, the abs. value never matters
|
||||
places.sort_by_key(|place| {
|
||||
place.ptr.into_pointer_or_addr().unwrap().into_parts().1 as Size
|
||||
});
|
||||
places.sort_by_key(|place| place.ptr.addr());
|
||||
self.walk_aggregate(place, places.into_iter().map(Ok))
|
||||
}
|
||||
FieldsShape::Union { .. } | FieldsShape::Primitive => {
|
||||
@ -777,6 +770,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
|
||||
/// Mark a machine allocation that was just created as immutable.
|
||||
fn mark_immutable(&mut self, mplace: &MemPlace<Tag>) {
|
||||
let this = self.eval_context_mut();
|
||||
// This got just allocated, so there definitely is a pointer here.
|
||||
this.alloc_mark_immutable(mplace.ptr.into_pointer_or_addr().unwrap().provenance.alloc_id)
|
||||
.unwrap();
|
||||
}
|
||||
|
@ -128,7 +128,7 @@ impl<'mir, 'tcx> GlobalStateInner {
|
||||
|
||||
/// Convert a relative (tcx) pointer to an absolute address.
|
||||
pub fn rel_ptr_to_addr(ecx: &MiriEvalContext<'mir, 'tcx>, ptr: Pointer<AllocId>) -> u64 {
|
||||
let (alloc_id, offset) = ptr.into_parts(); // offset is relative
|
||||
let (alloc_id, offset) = ptr.into_parts(); // offset is relative (AllocId provenance)
|
||||
let base_addr = GlobalStateInner::alloc_base_addr(ecx, alloc_id);
|
||||
|
||||
// Add offset with the right kind of pointer-overflowing arithmetic.
|
||||
@ -137,7 +137,7 @@ impl<'mir, 'tcx> GlobalStateInner {
|
||||
}
|
||||
|
||||
pub fn abs_ptr_to_rel(ecx: &MiriEvalContext<'mir, 'tcx>, ptr: Pointer<Tag>) -> Size {
|
||||
let (tag, addr) = ptr.into_parts(); // addr is absolute
|
||||
let (tag, addr) = ptr.into_parts(); // addr is absolute (Tag provenance)
|
||||
let base_addr = GlobalStateInner::alloc_base_addr(ecx, tag.alloc_id);
|
||||
|
||||
// Wrapping "addr - base_addr"
|
||||
|
@ -360,6 +360,7 @@ impl<'mir, 'tcx> Evaluator<'mir, 'tcx> {
|
||||
name: &str,
|
||||
ptr: Pointer<Option<Tag>>,
|
||||
) {
|
||||
// This got just allocated, so there definitely is a pointer here.
|
||||
let ptr = ptr.into_pointer_or_addr().unwrap();
|
||||
this.machine.extern_statics.try_insert(Symbol::intern(name), ptr).unwrap();
|
||||
}
|
||||
@ -431,11 +432,13 @@ impl<'mir, 'tcx> MiriEvalContextExt<'mir, 'tcx> for MiriEvalContext<'mir, 'tcx>
|
||||
/// Machine hook implementations.
|
||||
impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
|
||||
type MemoryKind = MiriMemoryKind;
|
||||
type ExtraFnVal = Dlsym;
|
||||
|
||||
type FrameExtra = FrameData<'tcx>;
|
||||
type AllocExtra = AllocExtra;
|
||||
|
||||
type PointerTag = Tag;
|
||||
type ExtraFnVal = Dlsym;
|
||||
type TagExtra = SbTag;
|
||||
|
||||
type MemoryMap =
|
||||
MonoHashMap<AllocId, (MemoryKind<MiriMemoryKind>, Allocation<Tag, Self::AllocExtra>)>;
|
||||
@ -607,9 +610,9 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
|
||||
fn ptr_get_alloc(
|
||||
ecx: &MiriEvalContext<'mir, 'tcx>,
|
||||
ptr: Pointer<Self::PointerTag>,
|
||||
) -> (AllocId, Size) {
|
||||
) -> (AllocId, Size, Self::TagExtra) {
|
||||
let rel = intptrcast::GlobalStateInner::abs_ptr_to_rel(ecx, ptr);
|
||||
(ptr.provenance.alloc_id, rel)
|
||||
(ptr.provenance.alloc_id, rel, ptr.provenance.sb)
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
@ -617,16 +620,16 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
|
||||
_tcx: TyCtxt<'tcx>,
|
||||
machine: &Self,
|
||||
alloc_extra: &AllocExtra,
|
||||
tag: Tag,
|
||||
(alloc_id, tag): (AllocId, Self::TagExtra),
|
||||
range: AllocRange,
|
||||
) -> InterpResult<'tcx> {
|
||||
if let Some(data_race) = &alloc_extra.data_race {
|
||||
data_race.read(tag.alloc_id, range, machine.data_race.as_ref().unwrap())?;
|
||||
data_race.read(alloc_id, range, machine.data_race.as_ref().unwrap())?;
|
||||
}
|
||||
if let Some(stacked_borrows) = &alloc_extra.stacked_borrows {
|
||||
stacked_borrows.memory_read(
|
||||
tag.alloc_id,
|
||||
tag.sb,
|
||||
alloc_id,
|
||||
tag,
|
||||
range,
|
||||
machine.stacked_borrows.as_ref().unwrap(),
|
||||
)
|
||||
@ -640,16 +643,16 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
|
||||
_tcx: TyCtxt<'tcx>,
|
||||
machine: &mut Self,
|
||||
alloc_extra: &mut AllocExtra,
|
||||
tag: Tag,
|
||||
(alloc_id, tag): (AllocId, Self::TagExtra),
|
||||
range: AllocRange,
|
||||
) -> InterpResult<'tcx> {
|
||||
if let Some(data_race) = &mut alloc_extra.data_race {
|
||||
data_race.write(tag.alloc_id, range, machine.data_race.as_mut().unwrap())?;
|
||||
data_race.write(alloc_id, range, machine.data_race.as_mut().unwrap())?;
|
||||
}
|
||||
if let Some(stacked_borrows) = &mut alloc_extra.stacked_borrows {
|
||||
stacked_borrows.memory_written(
|
||||
tag.alloc_id,
|
||||
tag.sb,
|
||||
alloc_id,
|
||||
tag,
|
||||
range,
|
||||
machine.stacked_borrows.as_mut().unwrap(),
|
||||
)
|
||||
@ -663,19 +666,19 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
|
||||
_tcx: TyCtxt<'tcx>,
|
||||
machine: &mut Self,
|
||||
alloc_extra: &mut AllocExtra,
|
||||
tag: Tag,
|
||||
(alloc_id, tag): (AllocId, Self::TagExtra),
|
||||
range: AllocRange,
|
||||
) -> InterpResult<'tcx> {
|
||||
if Some(tag.alloc_id) == machine.tracked_alloc_id {
|
||||
register_diagnostic(NonHaltingDiagnostic::FreedAlloc(tag.alloc_id));
|
||||
if Some(alloc_id) == machine.tracked_alloc_id {
|
||||
register_diagnostic(NonHaltingDiagnostic::FreedAlloc(alloc_id));
|
||||
}
|
||||
if let Some(data_race) = &mut alloc_extra.data_race {
|
||||
data_race.deallocate(tag.alloc_id, range, machine.data_race.as_mut().unwrap())?;
|
||||
data_race.deallocate(alloc_id, range, machine.data_race.as_mut().unwrap())?;
|
||||
}
|
||||
if let Some(stacked_borrows) = &mut alloc_extra.stacked_borrows {
|
||||
stacked_borrows.memory_deallocated(
|
||||
tag.alloc_id,
|
||||
tag.sb,
|
||||
alloc_id,
|
||||
tag,
|
||||
range,
|
||||
machine.stacked_borrows.as_mut().unwrap(),
|
||||
)
|
||||
|
@ -124,7 +124,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
|
||||
|
||||
let ptr = this.read_pointer(ptr)?;
|
||||
// Take apart the pointer, we need its pieces.
|
||||
let (alloc_id, offset, ptr) = this.ptr_get_alloc_id(ptr)?;
|
||||
let (alloc_id, offset, _tag) = this.ptr_get_alloc_id(ptr)?;
|
||||
|
||||
let fn_instance =
|
||||
if let Some(GlobalAlloc::Function(instance)) = this.tcx.get_global_alloc(alloc_id) {
|
||||
|
@ -83,10 +83,9 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
|
||||
}
|
||||
|
||||
let ptr = this.read_pointer(ptr_op)?;
|
||||
if let Ok(ptr) = ptr.into_pointer_or_addr() {
|
||||
if let Ok((alloc_id, _offset, _)) = this.ptr_try_get_alloc_id(ptr) {
|
||||
// Only do anything if we can identify the allocation this goes to.
|
||||
let (_, cur_align) =
|
||||
this.get_alloc_size_and_align(ptr.provenance.alloc_id, AllocCheck::MaybeDead)?;
|
||||
let (_, cur_align) = this.get_alloc_size_and_align(alloc_id, AllocCheck::MaybeDead)?;
|
||||
if cur_align.bytes() >= req_align {
|
||||
// If the allocation alignment is at least the required alignment we use the
|
||||
// real implementation.
|
||||
|
@ -702,8 +702,7 @@ trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
let (alloc_id, base_offset, ptr) = this.ptr_get_alloc_id(place.ptr)?;
|
||||
let orig_tag = ptr.provenance.sb;
|
||||
let (alloc_id, base_offset, orig_tag) = this.ptr_get_alloc_id(place.ptr)?;
|
||||
|
||||
// Ensure we bail out if the pointer goes out-of-bounds (see miri#1050).
|
||||
let (alloc_size, _) =
|
||||
|
48
tests/run-pass/issue-miri-2068.rs
Normal file
48
tests/run-pass/issue-miri-2068.rs
Normal file
@ -0,0 +1,48 @@
|
||||
#![feature(pin_macro)]
|
||||
|
||||
use core::future::Future;
|
||||
use core::pin::Pin;
|
||||
use core::task::{Context, Poll};
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
struct NopWaker;
|
||||
|
||||
impl std::task::Wake for NopWaker {
|
||||
fn wake(self: Arc<Self>) {}
|
||||
}
|
||||
|
||||
pub fn fuzzing_block_on<O, F: Future<Output = O>>(fut: F) -> O {
|
||||
let mut fut = std::pin::pin!(fut);
|
||||
let waker = std::task::Waker::from(Arc::new(NopWaker));
|
||||
let mut context = std::task::Context::from_waker(&waker);
|
||||
loop {
|
||||
match fut.as_mut().poll(&mut context) {
|
||||
Poll::Ready(v) => return v,
|
||||
Poll::Pending => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct LastFuture<S> {
|
||||
last: S,
|
||||
}
|
||||
|
||||
impl<S> Future for LastFuture<S>
|
||||
where
|
||||
Self: Unpin,
|
||||
S: Unpin + Copy,
|
||||
{
|
||||
type Output = S;
|
||||
|
||||
fn poll(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Self::Output> {
|
||||
return Poll::Ready(self.last);
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
fuzzing_block_on(async {
|
||||
LastFuture { last: &0u32 }.await;
|
||||
LastFuture { last: Option::<u32>::None }.await;
|
||||
});
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user