avoid into_pointer_or_addr and into_parts in visit_freeze_sensitive

This commit is contained in:
Ralf Jung 2022-04-18 12:38:26 -04:00
parent ec1dc749a3
commit f1023fbdc9
4 changed files with 19 additions and 25 deletions

View File

@ -271,8 +271,6 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
/// Visits the memory covered by `place`, sensitive to freezing: the 2nd parameter /// Visits the memory covered by `place`, sensitive to freezing: the 2nd parameter
/// of `action` will be true if this is frozen, false if this is in an `UnsafeCell`. /// of `action` will be true if this is frozen, false if this is in an `UnsafeCell`.
/// The range is relative to `place`. /// The range is relative to `place`.
///
/// Assumes that the `place` has a proper pointer in it.
fn visit_freeze_sensitive( fn visit_freeze_sensitive(
&self, &self,
place: &MPlaceTy<'tcx, Tag>, place: &MPlaceTy<'tcx, Tag>,
@ -290,33 +288,30 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
// Store how far we proceeded into the place so far. Everything to the left of // Store how far we proceeded into the place so far. Everything to the left of
// this offset has already been handled, in the sense that the frozen parts // this offset has already been handled, in the sense that the frozen parts
// have had `action` called on them. // have had `action` called on them.
let ptr = place.ptr.into_pointer_or_addr().unwrap(); let start_addr = place.ptr.addr();
let start_offset = ptr.into_parts().1 as Size; // we just compare offsets, the abs. value never matters let mut cur_addr = start_addr;
let mut cur_offset = start_offset;
// Called when we detected an `UnsafeCell` at the given offset and size. // Called when we detected an `UnsafeCell` at the given offset and size.
// Calls `action` and advances `cur_ptr`. // Calls `action` and advances `cur_ptr`.
let mut unsafe_cell_action = |unsafe_cell_ptr: Pointer<Option<Tag>>, let mut unsafe_cell_action = |unsafe_cell_ptr: &Pointer<Option<Tag>>,
unsafe_cell_size: Size| { unsafe_cell_size: Size| {
let unsafe_cell_ptr = unsafe_cell_ptr.into_pointer_or_addr().unwrap();
debug_assert_eq!(unsafe_cell_ptr.provenance, ptr.provenance);
// We assume that we are given the fields in increasing offset order, // We assume that we are given the fields in increasing offset order,
// and nothing else changes. // and nothing else changes.
let unsafe_cell_offset = unsafe_cell_ptr.into_parts().1 as Size; // we just compare offsets, the abs. value never matters let unsafe_cell_addr = unsafe_cell_ptr.addr();
assert!(unsafe_cell_offset >= cur_offset); assert!(unsafe_cell_addr >= cur_addr);
let frozen_size = unsafe_cell_offset - cur_offset; let frozen_size = unsafe_cell_addr - cur_addr;
// Everything between the cur_ptr and this `UnsafeCell` is frozen. // Everything between the cur_ptr and this `UnsafeCell` is frozen.
if frozen_size != Size::ZERO { if frozen_size != Size::ZERO {
action(alloc_range(cur_offset - start_offset, frozen_size), /*frozen*/ true)?; action(alloc_range(cur_addr - start_addr, frozen_size), /*frozen*/ true)?;
} }
cur_offset += frozen_size; cur_addr += frozen_size;
// This `UnsafeCell` is NOT frozen. // This `UnsafeCell` is NOT frozen.
if unsafe_cell_size != Size::ZERO { if unsafe_cell_size != Size::ZERO {
action( action(
alloc_range(cur_offset - start_offset, unsafe_cell_size), alloc_range(cur_addr - start_addr, unsafe_cell_size),
/*frozen*/ false, /*frozen*/ false,
)?; )?;
} }
cur_offset += unsafe_cell_size; cur_addr += unsafe_cell_size;
// Done // Done
Ok(()) Ok(())
}; };
@ -334,7 +329,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
.unwrap_or_else(|| place.layout.size); .unwrap_or_else(|| place.layout.size);
// Now handle this `UnsafeCell`, unless it is empty. // Now handle this `UnsafeCell`, unless it is empty.
if unsafe_cell_size != Size::ZERO { if unsafe_cell_size != Size::ZERO {
unsafe_cell_action(place.ptr, unsafe_cell_size) unsafe_cell_action(&place.ptr, unsafe_cell_size)
} else { } else {
Ok(()) Ok(())
} }
@ -344,7 +339,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
} }
// The part between the end_ptr and the end of the place is also frozen. // The part between the end_ptr and the end of the place is also frozen.
// So pretend there is a 0-sized `UnsafeCell` at the end. // So pretend there is a 0-sized `UnsafeCell` at the end.
unsafe_cell_action(place.ptr.wrapping_offset(size, this), Size::ZERO)?; unsafe_cell_action(&place.ptr.offset(size, this)?, Size::ZERO)?;
// Done! // Done!
return Ok(()); return Ok(());
@ -428,9 +423,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
let mut places = let mut places =
fields.collect::<InterpResult<'tcx, Vec<MPlaceTy<'tcx, Tag>>>>()?; fields.collect::<InterpResult<'tcx, Vec<MPlaceTy<'tcx, Tag>>>>()?;
// we just compare offsets, the abs. value never matters // we just compare offsets, the abs. value never matters
places.sort_by_key(|place| { places.sort_by_key(|place| place.ptr.addr());
place.ptr.into_pointer_or_addr().unwrap().into_parts().1 as Size
});
self.walk_aggregate(place, places.into_iter().map(Ok)) self.walk_aggregate(place, places.into_iter().map(Ok))
} }
FieldsShape::Union { .. } | FieldsShape::Primitive => { FieldsShape::Union { .. } | FieldsShape::Primitive => {
@ -777,6 +770,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
/// Mark a machine allocation that was just created as immutable. /// Mark a machine allocation that was just created as immutable.
fn mark_immutable(&mut self, mplace: &MemPlace<Tag>) { fn mark_immutable(&mut self, mplace: &MemPlace<Tag>) {
let this = self.eval_context_mut(); let this = self.eval_context_mut();
// This got just allocated, so there definitely is a pointer here.
this.alloc_mark_immutable(mplace.ptr.into_pointer_or_addr().unwrap().provenance.alloc_id) this.alloc_mark_immutable(mplace.ptr.into_pointer_or_addr().unwrap().provenance.alloc_id)
.unwrap(); .unwrap();
} }

View File

@ -128,7 +128,7 @@ impl<'mir, 'tcx> GlobalStateInner {
/// Convert a relative (tcx) pointer to an absolute address. /// Convert a relative (tcx) pointer to an absolute address.
pub fn rel_ptr_to_addr(ecx: &MiriEvalContext<'mir, 'tcx>, ptr: Pointer<AllocId>) -> u64 { pub fn rel_ptr_to_addr(ecx: &MiriEvalContext<'mir, 'tcx>, ptr: Pointer<AllocId>) -> u64 {
let (alloc_id, offset) = ptr.into_parts(); // offset is relative let (alloc_id, offset) = ptr.into_parts(); // offset is relative (AllocId provenance)
let base_addr = GlobalStateInner::alloc_base_addr(ecx, alloc_id); let base_addr = GlobalStateInner::alloc_base_addr(ecx, alloc_id);
// Add offset with the right kind of pointer-overflowing arithmetic. // Add offset with the right kind of pointer-overflowing arithmetic.
@ -137,7 +137,7 @@ impl<'mir, 'tcx> GlobalStateInner {
} }
pub fn abs_ptr_to_rel(ecx: &MiriEvalContext<'mir, 'tcx>, ptr: Pointer<Tag>) -> Size { pub fn abs_ptr_to_rel(ecx: &MiriEvalContext<'mir, 'tcx>, ptr: Pointer<Tag>) -> Size {
let (tag, addr) = ptr.into_parts(); // addr is absolute let (tag, addr) = ptr.into_parts(); // addr is absolute (Tag provenance)
let base_addr = GlobalStateInner::alloc_base_addr(ecx, tag.alloc_id); let base_addr = GlobalStateInner::alloc_base_addr(ecx, tag.alloc_id);
// Wrapping "addr - base_addr" // Wrapping "addr - base_addr"

View File

@ -360,6 +360,7 @@ impl<'mir, 'tcx> Evaluator<'mir, 'tcx> {
name: &str, name: &str,
ptr: Pointer<Option<Tag>>, ptr: Pointer<Option<Tag>>,
) { ) {
// This got just allocated, so there definitely is a pointer here.
let ptr = ptr.into_pointer_or_addr().unwrap(); let ptr = ptr.into_pointer_or_addr().unwrap();
this.machine.extern_statics.try_insert(Symbol::intern(name), ptr).unwrap(); this.machine.extern_statics.try_insert(Symbol::intern(name), ptr).unwrap();
} }

View File

@ -83,10 +83,9 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
} }
let ptr = this.read_pointer(ptr_op)?; let ptr = this.read_pointer(ptr_op)?;
if let Ok(ptr) = ptr.into_pointer_or_addr() { if let Ok((alloc_id, _offset, _)) = this.ptr_try_get_alloc_id(ptr) {
// Only do anything if we can identify the allocation this goes to. // Only do anything if we can identify the allocation this goes to.
let (_, cur_align) = let (_, cur_align) = this.get_alloc_size_and_align(alloc_id, AllocCheck::MaybeDead)?;
this.get_alloc_size_and_align(ptr.provenance.alloc_id, AllocCheck::MaybeDead)?;
if cur_align.bytes() >= req_align { if cur_align.bytes() >= req_align {
// If the allocation alignment is at least the required alignment we use the // If the allocation alignment is at least the required alignment we use the
// real implementation. // real implementation.