interpret: get_alloc_info: also return mutability

This commit is contained in:
Ralf Jung 2024-11-09 11:13:44 +01:00
parent 62bb2ac03e
commit 30a2ae6f05
9 changed files with 34 additions and 23 deletions

View File

@ -472,7 +472,7 @@ fn report_validation_error<'tcx>(
backtrace.print_backtrace(); backtrace.print_backtrace();
let bytes = ecx.print_alloc_bytes_for_diagnostics(alloc_id); let bytes = ecx.print_alloc_bytes_for_diagnostics(alloc_id);
let (size, align, _) = ecx.get_alloc_info(alloc_id); let (size, align, ..) = ecx.get_alloc_info(alloc_id);
let raw_bytes = errors::RawBytesNote { size: size.bytes(), align: align.bytes(), bytes }; let raw_bytes = errors::RawBytesNote { size: size.bytes(), align: align.bytes(), bytes };
crate::const_eval::report( crate::const_eval::report(

View File

@ -524,7 +524,7 @@ fn is_offset_misaligned(offset: u64, align: Align) -> Option<Misalignment> {
match self.ptr_try_get_alloc_id(ptr, 0) { match self.ptr_try_get_alloc_id(ptr, 0) {
Err(addr) => is_offset_misaligned(addr, align), Err(addr) => is_offset_misaligned(addr, align),
Ok((alloc_id, offset, _prov)) => { Ok((alloc_id, offset, _prov)) => {
let (_size, alloc_align, kind) = self.get_alloc_info(alloc_id); let (_size, alloc_align, kind, _mutbl) = self.get_alloc_info(alloc_id);
if let Some(misalign) = if let Some(misalign) =
M::alignment_check(self, alloc_id, alloc_align, kind, offset, align) M::alignment_check(self, alloc_id, alloc_align, kind, offset, align)
{ {
@ -818,19 +818,19 @@ pub fn is_alloc_live(&self, id: AllocId) -> bool {
/// Obtain the size and alignment of an allocation, even if that allocation has /// Obtain the size and alignment of an allocation, even if that allocation has
/// been deallocated. /// been deallocated.
pub fn get_alloc_info(&self, id: AllocId) -> (Size, Align, AllocKind) { pub fn get_alloc_info(&self, id: AllocId) -> (Size, Align, AllocKind, Mutability) {
// # Regular allocations // # Regular allocations
// Don't use `self.get_raw` here as that will // Don't use `self.get_raw` here as that will
// a) cause cycles in case `id` refers to a static // a) cause cycles in case `id` refers to a static
// b) duplicate a global's allocation in miri // b) duplicate a global's allocation in miri
if let Some((_, alloc)) = self.memory.alloc_map.get(id) { if let Some((_, alloc)) = self.memory.alloc_map.get(id) {
return (alloc.size(), alloc.align, AllocKind::LiveData); return (alloc.size(), alloc.align, AllocKind::LiveData, alloc.mutability);
} }
// # Function pointers // # Function pointers
// (both global from `alloc_map` and local from `extra_fn_ptr_map`) // (both global from `alloc_map` and local from `extra_fn_ptr_map`)
if self.get_fn_alloc(id).is_some() { if self.get_fn_alloc(id).is_some() {
return (Size::ZERO, Align::ONE, AllocKind::Function); return (Size::ZERO, Align::ONE, AllocKind::Function, Mutability::Not);
} }
// # Statics // # Statics
@ -842,17 +842,17 @@ pub fn get_alloc_info(&self, id: AllocId) -> (Size, Align, AllocKind) {
// `ThreadLocalRef`; we can never have a pointer to them as a regular constant value. // `ThreadLocalRef`; we can never have a pointer to them as a regular constant value.
assert!(!self.tcx.is_thread_local_static(def_id)); assert!(!self.tcx.is_thread_local_static(def_id));
let DefKind::Static { nested, .. } = self.tcx.def_kind(def_id) else { let DefKind::Static { nested, mutability, .. } = self.tcx.def_kind(def_id) else {
bug!("GlobalAlloc::Static is not a static") bug!("GlobalAlloc::Static is not a static")
}; };
let (size, align) = if nested { let (size, align, mutability) = if nested {
// Nested anonymous statics are untyped, so let's get their // Nested anonymous statics are untyped, so let's get their
// size and alignment from the allocation itself. This always // size and alignment from the allocation itself. This always
// succeeds, as the query is fed at DefId creation time, so no // succeeds, as the query is fed at DefId creation time, so no
// evaluation actually occurs. // evaluation actually occurs.
let alloc = self.tcx.eval_static_initializer(def_id).unwrap(); let alloc = self.tcx.eval_static_initializer(def_id).unwrap();
(alloc.0.size(), alloc.0.align) (alloc.0.size(), alloc.0.align, alloc.0.mutability)
} else { } else {
// Use size and align of the type for everything else. We need // Use size and align of the type for everything else. We need
// to do that to // to do that to
@ -865,22 +865,33 @@ pub fn get_alloc_info(&self, id: AllocId) -> (Size, Align, AllocKind) {
.expect("statics should not have generic parameters"); .expect("statics should not have generic parameters");
let layout = self.tcx.layout_of(ParamEnv::empty().and(ty)).unwrap(); let layout = self.tcx.layout_of(ParamEnv::empty().and(ty)).unwrap();
assert!(layout.is_sized()); assert!(layout.is_sized());
(layout.size, layout.align.abi) let mutability = match mutability {
Mutability::Not if !ty.is_freeze(*self.tcx, ParamEnv::empty()) => {
Mutability::Not
}
_ => Mutability::Mut,
};
(layout.size, layout.align.abi, mutability)
}; };
(size, align, AllocKind::LiveData) (size, align, AllocKind::LiveData, mutability)
} }
Some(GlobalAlloc::Memory(alloc)) => { Some(GlobalAlloc::Memory(alloc)) => {
// Need to duplicate the logic here, because the global allocations have // Need to duplicate the logic here, because the global allocations have
// different associated types than the interpreter-local ones. // different associated types than the interpreter-local ones.
let alloc = alloc.inner(); let alloc = alloc.inner();
(alloc.size(), alloc.align, AllocKind::LiveData) (alloc.size(), alloc.align, AllocKind::LiveData, alloc.mutability)
} }
Some(GlobalAlloc::Function { .. }) => { Some(GlobalAlloc::Function { .. }) => {
bug!("We already checked function pointers above") bug!("We already checked function pointers above")
} }
Some(GlobalAlloc::VTable(..)) => { Some(GlobalAlloc::VTable(..)) => {
// No data to be accessed here. But vtables are pointer-aligned. // No data to be accessed here. But vtables are pointer-aligned.
return (Size::ZERO, self.tcx.data_layout.pointer_align.abi, AllocKind::VTable); return (
Size::ZERO,
self.tcx.data_layout.pointer_align.abi,
AllocKind::VTable,
Mutability::Not,
);
} }
// The rest must be dead. // The rest must be dead.
None => { None => {
@ -891,7 +902,7 @@ pub fn get_alloc_info(&self, id: AllocId) -> (Size, Align, AllocKind) {
.dead_alloc_map .dead_alloc_map
.get(&id) .get(&id)
.expect("deallocated pointers should all be recorded in `dead_alloc_map`"); .expect("deallocated pointers should all be recorded in `dead_alloc_map`");
(size, align, AllocKind::Dead) (size, align, AllocKind::Dead, Mutability::Not)
} }
} }
} }
@ -902,7 +913,7 @@ fn get_live_alloc_size_and_align(
id: AllocId, id: AllocId,
msg: CheckInAllocMsg, msg: CheckInAllocMsg,
) -> InterpResult<'tcx, (Size, Align)> { ) -> InterpResult<'tcx, (Size, Align)> {
let (size, align, kind) = self.get_alloc_info(id); let (size, align, kind, _mutbl) = self.get_alloc_info(id);
if matches!(kind, AllocKind::Dead) { if matches!(kind, AllocKind::Dead) {
throw_ub!(PointerUseAfterFree(id, msg)) throw_ub!(PointerUseAfterFree(id, msg))
} }
@ -1458,7 +1469,7 @@ pub fn scalar_may_be_null(&self, scalar: Scalar<M::Provenance>) -> InterpResult<
let ptr = scalar.to_pointer(self)?; let ptr = scalar.to_pointer(self)?;
match self.ptr_try_get_alloc_id(ptr, 0) { match self.ptr_try_get_alloc_id(ptr, 0) {
Ok((alloc_id, offset, _)) => { Ok((alloc_id, offset, _)) => {
let (size, _align, _kind) = self.get_alloc_info(alloc_id); let (size, _align, _kind, _mutbl) = self.get_alloc_info(alloc_id);
// If the pointer is out-of-bounds, it may be null. // If the pointer is out-of-bounds, it may be null.
// Note that one-past-the-end (offset == size) is still inbounds, and never null. // Note that one-past-the-end (offset == size) is still inbounds, and never null.
offset > size offset > size

View File

@ -594,7 +594,7 @@ fn check_safe_pointer(
} }
// Dangling and Mutability check. // Dangling and Mutability check.
let (size, _align, alloc_kind) = self.ecx.get_alloc_info(alloc_id); let (size, _align, alloc_kind, _mutbl) = self.ecx.get_alloc_info(alloc_id);
if alloc_kind == AllocKind::Dead { if alloc_kind == AllocKind::Dead {
// This can happen for zero-sized references. We can't have *any* references to // This can happen for zero-sized references. We can't have *any* references to
// non-existing allocations in const-eval though, interning rejects them all as // non-existing allocations in const-eval though, interning rejects them all as

View File

@ -157,7 +157,7 @@ fn addr_from_alloc_id_uncached(
) -> InterpResult<'tcx, u64> { ) -> InterpResult<'tcx, u64> {
let ecx = self.eval_context_ref(); let ecx = self.eval_context_ref();
let mut rng = ecx.machine.rng.borrow_mut(); let mut rng = ecx.machine.rng.borrow_mut();
let (size, align, kind) = ecx.get_alloc_info(alloc_id); let (size, align, kind, _mutbl) = ecx.get_alloc_info(alloc_id);
// This is either called immediately after allocation (and then cached), or when // This is either called immediately after allocation (and then cached), or when
// adjusting `tcx` pointers (which never get freed). So assert that we are looking // adjusting `tcx` pointers (which never get freed). So assert that we are looking
// at a live allocation. This also ensures that we never re-assign an address to an // at a live allocation. This also ensures that we never re-assign an address to an

View File

@ -363,7 +363,7 @@ fn on_stack_pop(
// If it does exist, then we have the guarantee that the // If it does exist, then we have the guarantee that the
// pointer is readable, and the implicit read access inserted // pointer is readable, and the implicit read access inserted
// will never cause UB on the pointer itself. // will never cause UB on the pointer itself.
let (_, _, kind) = this.get_alloc_info(*alloc_id); let (_, _, kind, _mutbl) = this.get_alloc_info(*alloc_id);
if matches!(kind, AllocKind::LiveData) { if matches!(kind, AllocKind::LiveData) {
let alloc_extra = this.get_alloc_extra(*alloc_id)?; // can still fail for `extern static` let alloc_extra = this.get_alloc_extra(*alloc_id)?; // can still fail for `extern static`
let alloc_borrow_tracker = &alloc_extra.borrow_tracker.as_ref().unwrap(); let alloc_borrow_tracker = &alloc_extra.borrow_tracker.as_ref().unwrap();

View File

@ -626,7 +626,7 @@ fn sb_reborrow(
return interp_ok(()) return interp_ok(())
}; };
let (_size, _align, alloc_kind) = this.get_alloc_info(alloc_id); let (_size, _align, alloc_kind, _mutbl) = this.get_alloc_info(alloc_id);
match alloc_kind { match alloc_kind {
AllocKind::LiveData => { AllocKind::LiveData => {
// This should have alloc_extra data, but `get_alloc_extra` can still fail // This should have alloc_extra data, but `get_alloc_extra` can still fail
@ -1017,7 +1017,7 @@ fn sb_expose_tag(&mut self, alloc_id: AllocId, tag: BorTag) -> InterpResult<'tcx
// Function pointers and dead objects don't have an alloc_extra so we ignore them. // Function pointers and dead objects don't have an alloc_extra so we ignore them.
// This is okay because accessing them is UB anyway, no need for any Stacked Borrows checks. // This is okay because accessing them is UB anyway, no need for any Stacked Borrows checks.
// NOT using `get_alloc_extra_mut` since this might be a read-only allocation! // NOT using `get_alloc_extra_mut` since this might be a read-only allocation!
let (_size, _align, kind) = this.get_alloc_info(alloc_id); let (_size, _align, kind, _mutbl) = this.get_alloc_info(alloc_id);
match kind { match kind {
AllocKind::LiveData => { AllocKind::LiveData => {
// This should have alloc_extra data, but `get_alloc_extra` can still fail // This should have alloc_extra data, but `get_alloc_extra` can still fail

View File

@ -538,7 +538,7 @@ fn tb_expose_tag(&mut self, alloc_id: AllocId, tag: BorTag) -> InterpResult<'tcx
// Function pointers and dead objects don't have an alloc_extra so we ignore them. // Function pointers and dead objects don't have an alloc_extra so we ignore them.
// This is okay because accessing them is UB anyway, no need for any Tree Borrows checks. // This is okay because accessing them is UB anyway, no need for any Tree Borrows checks.
// NOT using `get_alloc_extra_mut` since this might be a read-only allocation! // NOT using `get_alloc_extra_mut` since this might be a read-only allocation!
let (_size, _align, kind) = this.get_alloc_info(alloc_id); let (_size, _align, kind, _mutbl) = this.get_alloc_info(alloc_id);
match kind { match kind {
AllocKind::LiveData => { AllocKind::LiveData => {
// This should have alloc_extra data, but `get_alloc_extra` can still fail // This should have alloc_extra data, but `get_alloc_extra` can still fail

View File

@ -1125,7 +1125,7 @@ fn extern_static_pointer(
let Provenance::Concrete { alloc_id, .. } = ptr.provenance else { let Provenance::Concrete { alloc_id, .. } = ptr.provenance else {
panic!("extern_statics cannot contain wildcards") panic!("extern_statics cannot contain wildcards")
}; };
let (shim_size, shim_align, _kind) = ecx.get_alloc_info(alloc_id); let (shim_size, shim_align, _kind, _mutbl) = ecx.get_alloc_info(alloc_id);
let def_ty = ecx.tcx.type_of(def_id).instantiate_identity(); let def_ty = ecx.tcx.type_of(def_id).instantiate_identity();
let extern_decl_layout = ecx.tcx.layout_of(ty::ParamEnv::empty().and(def_ty)).unwrap(); let extern_decl_layout = ecx.tcx.layout_of(ty::ParamEnv::empty().and(def_ty)).unwrap();
if extern_decl_layout.size != shim_size || extern_decl_layout.align.abi != shim_align { if extern_decl_layout.size != shim_size || extern_decl_layout.align.abi != shim_align {

View File

@ -409,7 +409,7 @@ fn emulate_foreign_item_inner(
); );
} }
if let Ok((alloc_id, offset, ..)) = this.ptr_try_get_alloc_id(ptr, 0) { if let Ok((alloc_id, offset, ..)) = this.ptr_try_get_alloc_id(ptr, 0) {
let (_size, alloc_align, _kind) = this.get_alloc_info(alloc_id); let (_size, alloc_align, _kind, _mutbl) = this.get_alloc_info(alloc_id);
// If the newly promised alignment is bigger than the native alignment of this // If the newly promised alignment is bigger than the native alignment of this
// allocation, and bigger than the previously promised alignment, then set it. // allocation, and bigger than the previously promised alignment, then set it.
if align > alloc_align if align > alloc_align