Auto merge of #54254 - RalfJung:miri-dangling, r=eddyb
miri engine: keep around some information for dead allocations We use it to test if a dangling ptr is aligned and non-NULL. This makes some code pass that should pass (writing a ZST to a properly aligned dangling pointer), and makes some code fail that should fail (writing a ZST to a pointer obtained via pointer arithmetic from a real location, but ouf-of-bounds -- that pointer could be NULL, so we cannot allow writing to it). CTFE does not allow these operations; tests are added to miri with https://github.com/solson/miri/pull/453.
This commit is contained in:
commit
0b0d2edf79
@ -51,6 +51,11 @@ pub struct Memory<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
|
||||
/// a static creates a copy here, in the machine.
|
||||
alloc_map: FxHashMap<AllocId, (MemoryKind<M::MemoryKinds>, Allocation)>,
|
||||
|
||||
/// To be able to compare pointers with NULL, and to check alignment for accesses
|
||||
/// to ZSTs (where pointers may dangle), we keep track of the size even for allocations
|
||||
/// that do not exist any more.
|
||||
dead_alloc_map: FxHashMap<AllocId, (Size, Align)>,
|
||||
|
||||
pub tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
|
||||
}
|
||||
|
||||
@ -74,6 +79,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
Memory {
|
||||
data,
|
||||
alloc_map: FxHashMap::default(),
|
||||
dead_alloc_map: FxHashMap::default(),
|
||||
tcx,
|
||||
}
|
||||
}
|
||||
@ -150,6 +156,8 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
size_and_align: Option<(Size, Align)>,
|
||||
kind: MemoryKind<M::MemoryKinds>,
|
||||
) -> EvalResult<'tcx> {
|
||||
debug!("deallocating: {}", ptr.alloc_id);
|
||||
|
||||
if ptr.offset.bytes() != 0 {
|
||||
return err!(DeallocateNonBasePtr);
|
||||
}
|
||||
@ -189,23 +197,41 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
}
|
||||
}
|
||||
|
||||
debug!("deallocated : {}", ptr.alloc_id);
|
||||
// Don't forget to remember size and align of this now-dead allocation
|
||||
let old = self.dead_alloc_map.insert(
|
||||
ptr.alloc_id,
|
||||
(Size::from_bytes(alloc.bytes.len() as u64), alloc.align)
|
||||
);
|
||||
if old.is_some() {
|
||||
bug!("Nothing can be deallocated twice");
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Check that the pointer is aligned AND non-NULL. This supports scalars
|
||||
/// for the benefit of other parts of miri that need to check alignment even for ZST.
|
||||
/// Check that the pointer is aligned AND non-NULL. This supports ZSTs in two ways:
|
||||
/// You can pass a scalar, and a `Pointer` does not have to actually still be allocated.
|
||||
pub fn check_align(&self, ptr: Scalar, required_align: Align) -> EvalResult<'tcx> {
|
||||
// Check non-NULL/Undef, extract offset
|
||||
let (offset, alloc_align) = match ptr {
|
||||
Scalar::Ptr(ptr) => {
|
||||
let alloc = self.get(ptr.alloc_id)?;
|
||||
(ptr.offset.bytes(), alloc.align)
|
||||
let (size, align) = self.get_size_and_align(ptr.alloc_id)?;
|
||||
// check this is not NULL -- which we can ensure only if this is in-bounds
|
||||
// of some (potentially dead) allocation.
|
||||
if ptr.offset > size {
|
||||
return err!(PointerOutOfBounds {
|
||||
ptr,
|
||||
access: true,
|
||||
allocation_size: size,
|
||||
});
|
||||
};
|
||||
// keep data for alignment check
|
||||
(ptr.offset.bytes(), align)
|
||||
}
|
||||
Scalar::Bits { bits, size } => {
|
||||
assert_eq!(size as u64, self.pointer_size().bytes());
|
||||
assert!(bits < (1u128 << self.pointer_size().bits()));
|
||||
// check this is not NULL
|
||||
if bits == 0 {
|
||||
return err!(InvalidNullPointerUsage);
|
||||
}
|
||||
@ -306,6 +332,21 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_size_and_align(&self, id: AllocId) -> EvalResult<'tcx, (Size, Align)> {
|
||||
Ok(match self.get(id) {
|
||||
Ok(alloc) => (Size::from_bytes(alloc.bytes.len() as u64), alloc.align),
|
||||
Err(err) => match err.kind {
|
||||
EvalErrorKind::DanglingPointerDeref =>
|
||||
// This should be in the dead allocation map
|
||||
*self.dead_alloc_map.get(&id).expect(
|
||||
"allocation missing in dead_alloc_map"
|
||||
),
|
||||
// E.g. a function ptr allocation
|
||||
_ => return Err(err)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_mut(
|
||||
&mut self,
|
||||
id: AllocId,
|
||||
|
@ -219,7 +219,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
}
|
||||
let (ptr, ptr_align) = mplace.to_scalar_ptr_align();
|
||||
|
||||
if mplace.layout.size.bytes() == 0 {
|
||||
if mplace.layout.is_zst() {
|
||||
// Not all ZSTs have a layout we would handle below, so just short-circuit them
|
||||
// all here.
|
||||
self.memory.check_align(ptr, ptr_align)?;
|
||||
@ -376,14 +376,14 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
})
|
||||
}
|
||||
|
||||
// Take an operand, representing a pointer, and dereference it -- that
|
||||
// Take an operand, representing a pointer, and dereference it to a place -- that
|
||||
// will always be a MemPlace.
|
||||
pub(super) fn deref_operand(
|
||||
&self,
|
||||
src: OpTy<'tcx>,
|
||||
) -> EvalResult<'tcx, MPlaceTy<'tcx>> {
|
||||
let val = self.read_value(src)?;
|
||||
trace!("deref to {} on {:?}", val.layout.ty, val);
|
||||
trace!("deref to {} on {:?}", val.layout.ty, *val);
|
||||
Ok(self.ref_to_mplace(val)?)
|
||||
}
|
||||
|
||||
|
@ -612,7 +612,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
|
||||
// wrong type.
|
||||
|
||||
// Nothing to do for ZSTs, other than checking alignment
|
||||
if dest.layout.size.bytes() == 0 {
|
||||
if dest.layout.is_zst() {
|
||||
self.memory.check_align(ptr, ptr_align)?;
|
||||
return Ok(());
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user