Auto merge of #114483 - RalfJung:unsized-fields, r=oli-obk

interpret: fix projecting into an unsized field of a local

See the new Miri testcase that didn't work before.

r? `@oli-obk`
This commit is contained in:
bors 2023-08-30 11:55:48 +00:00
commit 26089ba0a2
17 changed files with 374 additions and 213 deletions

View File

@ -384,7 +384,7 @@ const_eval_unreachable_unwind =
const_eval_unsigned_offset_from_overflow = const_eval_unsigned_offset_from_overflow =
`ptr_offset_from_unsigned` called when first pointer has smaller offset than second: {$a_offset} < {$b_offset} `ptr_offset_from_unsigned` called when first pointer has smaller offset than second: {$a_offset} < {$b_offset}
const_eval_unsized_local = unsized locals are not supported
const_eval_unstable_const_fn = `{$def_path}` is not yet stable as a const fn const_eval_unstable_const_fn = `{$def_path}` is not yet stable as a const fn
const_eval_unstable_in_stable = const_eval_unstable_in_stable =

View File

@ -61,6 +61,7 @@ fn eval_body_using_ecx<'mir, 'tcx>(
&ret.clone().into(), &ret.clone().into(),
StackPopCleanup::Root { cleanup: false }, StackPopCleanup::Root { cleanup: false },
)?; )?;
ecx.storage_live_for_always_live_locals()?;
// The main interpreter loop. // The main interpreter loop.
while ecx.step()? {} while ecx.step()? {}

View File

@ -795,6 +795,7 @@ fn diagnostic_message(&self) -> DiagnosticMessage {
use crate::fluent_generated::*; use crate::fluent_generated::*;
match self { match self {
UnsupportedOpInfo::Unsupported(s) => s.clone().into(), UnsupportedOpInfo::Unsupported(s) => s.clone().into(),
UnsupportedOpInfo::UnsizedLocal => const_eval_unsized_local,
UnsupportedOpInfo::OverwritePartialPointer(_) => const_eval_partial_pointer_overwrite, UnsupportedOpInfo::OverwritePartialPointer(_) => const_eval_partial_pointer_overwrite,
UnsupportedOpInfo::ReadPartialPointer(_) => const_eval_partial_pointer_copy, UnsupportedOpInfo::ReadPartialPointer(_) => const_eval_partial_pointer_copy,
UnsupportedOpInfo::ReadPointerAsInt(_) => const_eval_read_pointer_as_int, UnsupportedOpInfo::ReadPointerAsInt(_) => const_eval_read_pointer_as_int,
@ -814,7 +815,7 @@ fn add_args<G: EmissionGuarantee>(self, _: &Handler, builder: &mut DiagnosticBui
// `ReadPointerAsInt(Some(info))` is never printed anyway, it only serves as an error to // `ReadPointerAsInt(Some(info))` is never printed anyway, it only serves as an error to
// be further processed by validity checking which then turns it into something nice to // be further processed by validity checking which then turns it into something nice to
// print. So it's not worth the effort of having diagnostics that can print the `info`. // print. So it's not worth the effort of having diagnostics that can print the `info`.
Unsupported(_) | ReadPointerAsInt(_) => {} UnsizedLocal | Unsupported(_) | ReadPointerAsInt(_) => {}
OverwritePartialPointer(ptr) | ReadPartialPointer(ptr) => { OverwritePartialPointer(ptr) | ReadPartialPointer(ptr) => {
builder.set_arg("ptr", ptr); builder.set_arg("ptr", ptr);
} }

View File

@ -158,7 +158,8 @@ pub enum StackPopCleanup {
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct LocalState<'tcx, Prov: Provenance = AllocId> { pub struct LocalState<'tcx, Prov: Provenance = AllocId> {
pub value: LocalValue<Prov>, pub value: LocalValue<Prov>,
/// Don't modify if `Some`, this is only used to prevent computing the layout twice /// Don't modify if `Some`, this is only used to prevent computing the layout twice.
/// Avoids computing the layout of locals that are never actually initialized.
pub layout: Cell<Option<TyAndLayout<'tcx>>>, pub layout: Cell<Option<TyAndLayout<'tcx>>>,
} }
@ -177,7 +178,7 @@ pub enum LocalValue<Prov: Provenance = AllocId> {
impl<'tcx, Prov: Provenance + 'static> LocalState<'tcx, Prov> { impl<'tcx, Prov: Provenance + 'static> LocalState<'tcx, Prov> {
/// Read the local's value or error if the local is not yet live or not live anymore. /// Read the local's value or error if the local is not yet live or not live anymore.
#[inline] #[inline(always)]
pub fn access(&self) -> InterpResult<'tcx, &Operand<Prov>> { pub fn access(&self) -> InterpResult<'tcx, &Operand<Prov>> {
match &self.value { match &self.value {
LocalValue::Dead => throw_ub!(DeadLocal), // could even be "invalid program"? LocalValue::Dead => throw_ub!(DeadLocal), // could even be "invalid program"?
@ -190,7 +191,7 @@ pub fn access(&self) -> InterpResult<'tcx, &Operand<Prov>> {
/// ///
/// Note: This may only be invoked from the `Machine::access_local_mut` hook and not from /// Note: This may only be invoked from the `Machine::access_local_mut` hook and not from
/// anywhere else. You may be invalidating machine invariants if you do! /// anywhere else. You may be invalidating machine invariants if you do!
#[inline] #[inline(always)]
pub fn access_mut(&mut self) -> InterpResult<'tcx, &mut Operand<Prov>> { pub fn access_mut(&mut self) -> InterpResult<'tcx, &mut Operand<Prov>> {
match &mut self.value { match &mut self.value {
LocalValue::Dead => throw_ub!(DeadLocal), // could even be "invalid program"? LocalValue::Dead => throw_ub!(DeadLocal), // could even be "invalid program"?
@ -483,7 +484,7 @@ pub fn frame_mut(&mut self) -> &mut Frame<'mir, 'tcx, M::Provenance, M::FrameExt
} }
#[inline(always)] #[inline(always)]
pub(super) fn body(&self) -> &'mir mir::Body<'tcx> { pub fn body(&self) -> &'mir mir::Body<'tcx> {
self.frame().body self.frame().body
} }
@ -705,15 +706,15 @@ pub fn push_stack_frame(
return_to_block: StackPopCleanup, return_to_block: StackPopCleanup,
) -> InterpResult<'tcx> { ) -> InterpResult<'tcx> {
trace!("body: {:#?}", body); trace!("body: {:#?}", body);
let dead_local = LocalState { value: LocalValue::Dead, layout: Cell::new(None) };
let locals = IndexVec::from_elem(dead_local, &body.local_decls);
// First push a stack frame so we have access to the local args // First push a stack frame so we have access to the local args
let pre_frame = Frame { let pre_frame = Frame {
body, body,
loc: Right(body.span), // Span used for errors caused during preamble. loc: Right(body.span), // Span used for errors caused during preamble.
return_to_block, return_to_block,
return_place: return_place.clone(), return_place: return_place.clone(),
// empty local array, we fill it in below, after we are inside the stack frame and locals,
// all methods actually know about the frame
locals: IndexVec::new(),
instance, instance,
tracing_span: SpanGuard::new(), tracing_span: SpanGuard::new(),
extra: (), extra: (),
@ -728,19 +729,7 @@ pub fn push_stack_frame(
self.eval_mir_constant(&ct, Some(span), None)?; self.eval_mir_constant(&ct, Some(span), None)?;
} }
// Most locals are initially dead.
let dummy = LocalState { value: LocalValue::Dead, layout: Cell::new(None) };
let mut locals = IndexVec::from_elem(dummy, &body.local_decls);
// Now mark those locals as live that have no `Storage*` annotations.
let always_live = always_storage_live_locals(self.body());
for local in locals.indices() {
if always_live.contains(local) {
locals[local].value = LocalValue::Live(Operand::Immediate(Immediate::Uninit));
}
}
// done // done
self.frame_mut().locals = locals;
M::after_stack_push(self)?; M::after_stack_push(self)?;
self.frame_mut().loc = Left(mir::Location::START); self.frame_mut().loc = Left(mir::Location::START);
@ -907,12 +896,96 @@ pub(super) fn pop_stack_frame(&mut self, unwinding: bool) -> InterpResult<'tcx>
} }
} }
/// Mark a storage as live, killing the previous content. /// In the current stack frame, mark all locals as live that are not arguments and don't have
pub fn storage_live(&mut self, local: mir::Local) -> InterpResult<'tcx> { /// `Storage*` annotations (this includes the return place).
assert!(local != mir::RETURN_PLACE, "Cannot make return place live"); pub fn storage_live_for_always_live_locals(&mut self) -> InterpResult<'tcx> {
self.storage_live(mir::RETURN_PLACE)?;
let body = self.body();
let always_live = always_storage_live_locals(body);
for local in body.vars_and_temps_iter() {
if always_live.contains(local) {
self.storage_live(local)?;
}
}
Ok(())
}
pub fn storage_live_dyn(
&mut self,
local: mir::Local,
meta: MemPlaceMeta<M::Provenance>,
) -> InterpResult<'tcx> {
trace!("{:?} is now live", local); trace!("{:?} is now live", local);
let local_val = LocalValue::Live(Operand::Immediate(Immediate::Uninit)); // We avoid `ty.is_trivially_sized` since that (a) cannot assume WF, so it recurses through
// all fields of a tuple, and (b) does something expensive for ADTs.
fn is_very_trivially_sized(ty: Ty<'_>) -> bool {
match ty.kind() {
ty::Infer(ty::IntVar(_) | ty::FloatVar(_))
| ty::Uint(_)
| ty::Int(_)
| ty::Bool
| ty::Float(_)
| ty::FnDef(..)
| ty::FnPtr(_)
| ty::RawPtr(..)
| ty::Char
| ty::Ref(..)
| ty::Generator(..)
| ty::GeneratorWitness(..)
| ty::GeneratorWitnessMIR(..)
| ty::Array(..)
| ty::Closure(..)
| ty::Never
| ty::Error(_) => true,
ty::Str | ty::Slice(_) | ty::Dynamic(..) | ty::Foreign(..) => false,
ty::Tuple(tys) => tys.last().iter().all(|ty| is_very_trivially_sized(**ty)),
// We don't want to do any queries, so there is not much we can do with ADTs.
ty::Adt(..) => false,
ty::Alias(..) | ty::Param(_) | ty::Placeholder(..) => false,
ty::Infer(ty::TyVar(_)) => false,
ty::Bound(..)
| ty::Infer(ty::FreshTy(_) | ty::FreshIntTy(_) | ty::FreshFloatTy(_)) => {
bug!("`is_very_trivially_sized` applied to unexpected type: {:?}", ty)
}
}
}
// This is a hot function, we avoid computing the layout when possible.
// `unsized_` will be `None` for sized types and `Some(layout)` for unsized types.
let unsized_ = if is_very_trivially_sized(self.body().local_decls[local].ty) {
None
} else {
// We need the layout.
let layout = self.layout_of_local(self.frame(), local, None)?;
if layout.is_sized() { None } else { Some(layout) }
};
let local_val = LocalValue::Live(if let Some(layout) = unsized_ {
if !meta.has_meta() {
throw_unsup!(UnsizedLocal);
}
// Need to allocate some memory, since `Immediate::Uninit` cannot be unsized.
let dest_place = self.allocate_dyn(layout, MemoryKind::Stack, meta)?;
Operand::Indirect(*dest_place)
} else {
assert!(!meta.has_meta()); // we're dropping the metadata
// Just make this an efficient immediate.
// Note that not calling `layout_of` here does have one real consequence:
// if the type is too big, we'll only notice this when the local is actually initialized,
// which is a bit too late -- we should ideally notice this alreayd here, when the memory
// is conceptually allocated. But given how rare that error is and that this is a hot function,
// we accept this downside for now.
Operand::Immediate(Immediate::Uninit)
});
// StorageLive expects the local to be dead, and marks it live. // StorageLive expects the local to be dead, and marks it live.
let old = mem::replace(&mut self.frame_mut().locals[local].value, local_val); let old = mem::replace(&mut self.frame_mut().locals[local].value, local_val);
if !matches!(old, LocalValue::Dead) { if !matches!(old, LocalValue::Dead) {
@ -921,6 +994,12 @@ pub fn storage_live(&mut self, local: mir::Local) -> InterpResult<'tcx> {
Ok(()) Ok(())
} }
/// Mark a storage as live, killing the previous content.
#[inline(always)]
pub fn storage_live(&mut self, local: mir::Local) -> InterpResult<'tcx> {
self.storage_live_dyn(local, MemPlaceMeta::None)
}
pub fn storage_dead(&mut self, local: mir::Local) -> InterpResult<'tcx> { pub fn storage_dead(&mut self, local: mir::Local) -> InterpResult<'tcx> {
assert!(local != mir::RETURN_PLACE, "Cannot make return place dead"); assert!(local != mir::RETURN_PLACE, "Cannot make return place dead");
trace!("{:?} is now dead", local); trace!("{:?} is now dead", local);

View File

@ -33,7 +33,7 @@ pub enum Immediate<Prov: Provenance = AllocId> {
/// A pair of two scalar value (must have `ScalarPair` ABI where both fields are /// A pair of two scalar value (must have `ScalarPair` ABI where both fields are
/// `Scalar::Initialized`). /// `Scalar::Initialized`).
ScalarPair(Scalar<Prov>, Scalar<Prov>), ScalarPair(Scalar<Prov>, Scalar<Prov>),
/// A value of fully uninitialized memory. Can have arbitrary size and layout. /// A value of fully uninitialized memory. Can have arbitrary size and layout, but must be sized.
Uninit, Uninit,
} }
@ -190,16 +190,19 @@ fn from(val: ImmTy<'tcx, Prov>) -> Self {
impl<'tcx, Prov: Provenance> ImmTy<'tcx, Prov> { impl<'tcx, Prov: Provenance> ImmTy<'tcx, Prov> {
#[inline] #[inline]
pub fn from_scalar(val: Scalar<Prov>, layout: TyAndLayout<'tcx>) -> Self { pub fn from_scalar(val: Scalar<Prov>, layout: TyAndLayout<'tcx>) -> Self {
debug_assert!(layout.abi.is_scalar(), "`ImmTy::from_scalar` on non-scalar layout");
ImmTy { imm: val.into(), layout } ImmTy { imm: val.into(), layout }
} }
#[inline] #[inline(always)]
pub fn from_immediate(imm: Immediate<Prov>, layout: TyAndLayout<'tcx>) -> Self { pub fn from_immediate(imm: Immediate<Prov>, layout: TyAndLayout<'tcx>) -> Self {
debug_assert!(layout.is_sized(), "immediates must be sized");
ImmTy { imm, layout } ImmTy { imm, layout }
} }
#[inline] #[inline]
pub fn uninit(layout: TyAndLayout<'tcx>) -> Self { pub fn uninit(layout: TyAndLayout<'tcx>) -> Self {
debug_assert!(layout.is_sized(), "immediates must be sized");
ImmTy { imm: Immediate::Uninit, layout } ImmTy { imm: Immediate::Uninit, layout }
} }
@ -291,23 +294,21 @@ fn layout(&self) -> TyAndLayout<'tcx> {
self.layout self.layout
} }
fn meta<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>( #[inline(always)]
&self, fn meta(&self) -> MemPlaceMeta<Prov> {
_ecx: &InterpCx<'mir, 'tcx, M>, debug_assert!(self.layout.is_sized()); // unsized ImmTy can only exist temporarily and should never reach this here
) -> InterpResult<'tcx, MemPlaceMeta<M::Provenance>> { MemPlaceMeta::None
assert!(self.layout.is_sized()); // unsized ImmTy can only exist temporarily and should never reach this here
Ok(MemPlaceMeta::None)
} }
fn offset_with_meta( fn offset_with_meta<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>(
&self, &self,
offset: Size, offset: Size,
meta: MemPlaceMeta<Prov>, meta: MemPlaceMeta<Prov>,
layout: TyAndLayout<'tcx>, layout: TyAndLayout<'tcx>,
cx: &impl HasDataLayout, ecx: &InterpCx<'mir, 'tcx, M>,
) -> InterpResult<'tcx, Self> { ) -> InterpResult<'tcx, Self> {
assert_matches!(meta, MemPlaceMeta::None); // we can't store this anywhere anyway assert_matches!(meta, MemPlaceMeta::None); // we can't store this anywhere anyway
Ok(self.offset_(offset, layout, cx)) Ok(self.offset_(offset, layout, ecx))
} }
fn to_op<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>( fn to_op<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>(
@ -318,49 +319,37 @@ fn to_op<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>(
} }
} }
impl<'tcx, Prov: Provenance> OpTy<'tcx, Prov> {
// Provided as inherent method since it doesn't need the `ecx` of `Projectable::meta`.
pub fn meta(&self) -> InterpResult<'tcx, MemPlaceMeta<Prov>> {
Ok(if self.layout.is_unsized() {
if matches!(self.op, Operand::Immediate(_)) {
// Unsized immediate OpTy cannot occur. We create a MemPlace for all unsized locals during argument passing.
// However, ConstProp doesn't do that, so we can run into this nonsense situation.
throw_inval!(ConstPropNonsense);
}
// There are no unsized immediates.
self.assert_mem_place().meta
} else {
MemPlaceMeta::None
})
}
}
impl<'tcx, Prov: Provenance + 'static> Projectable<'tcx, Prov> for OpTy<'tcx, Prov> { impl<'tcx, Prov: Provenance + 'static> Projectable<'tcx, Prov> for OpTy<'tcx, Prov> {
#[inline(always)] #[inline(always)]
fn layout(&self) -> TyAndLayout<'tcx> { fn layout(&self) -> TyAndLayout<'tcx> {
self.layout self.layout
} }
fn meta<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>( #[inline]
&self, fn meta(&self) -> MemPlaceMeta<Prov> {
_ecx: &InterpCx<'mir, 'tcx, M>, match self.as_mplace_or_imm() {
) -> InterpResult<'tcx, MemPlaceMeta<M::Provenance>> { Left(mplace) => mplace.meta,
self.meta() Right(_) => {
debug_assert!(self.layout.is_sized(), "unsized immediates are not a thing");
MemPlaceMeta::None
}
}
} }
fn offset_with_meta( fn offset_with_meta<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>(
&self, &self,
offset: Size, offset: Size,
meta: MemPlaceMeta<Prov>, meta: MemPlaceMeta<Prov>,
layout: TyAndLayout<'tcx>, layout: TyAndLayout<'tcx>,
cx: &impl HasDataLayout, ecx: &InterpCx<'mir, 'tcx, M>,
) -> InterpResult<'tcx, Self> { ) -> InterpResult<'tcx, Self> {
match self.as_mplace_or_imm() { match self.as_mplace_or_imm() {
Left(mplace) => Ok(mplace.offset_with_meta(offset, meta, layout, cx)?.into()), Left(mplace) => Ok(mplace.offset_with_meta(offset, meta, layout, ecx)?.into()),
Right(imm) => { Right(imm) => {
assert!(!meta.has_meta()); // no place to store metadata here debug_assert!(layout.is_sized(), "unsized immediates are not a thing");
assert_matches!(meta, MemPlaceMeta::None); // no place to store metadata here
// Every part of an uninit is uninit. // Every part of an uninit is uninit.
Ok(imm.offset(offset, layout, cx)?.into()) Ok(imm.offset_(offset, layout, ecx).into())
} }
} }
} }
@ -588,6 +577,13 @@ pub fn local_to_op(
) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> { ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
let layout = self.layout_of_local(frame, local, layout)?; let layout = self.layout_of_local(frame, local, layout)?;
let op = *frame.locals[local].access()?; let op = *frame.locals[local].access()?;
if matches!(op, Operand::Immediate(_)) {
if layout.is_unsized() {
// ConstProp marks *all* locals as `Immediate::Uninit` since it cannot
// efficiently check whether they are sized. We have to catch that case here.
throw_inval!(ConstPropNonsense);
}
}
Ok(OpTy { op, layout, align: Some(layout.align.abi) }) Ok(OpTy { op, layout, align: Some(layout.align.abi) })
} }
@ -601,16 +597,15 @@ pub fn place_to_op(
match place.as_mplace_or_local() { match place.as_mplace_or_local() {
Left(mplace) => Ok(mplace.into()), Left(mplace) => Ok(mplace.into()),
Right((frame, local, offset)) => { Right((frame, local, offset)) => {
debug_assert!(place.layout.is_sized()); // only sized locals can ever be `Place::Local`.
let base = self.local_to_op(&self.stack()[frame], local, None)?; let base = self.local_to_op(&self.stack()[frame], local, None)?;
let mut field = if let Some(offset) = offset { let mut field = match offset {
// This got offset. We can be sure that the field is sized. Some(offset) => base.offset(offset, place.layout, self)?,
base.offset(offset, place.layout, self)? None => {
} else { // In the common case this hasn't been projected.
assert_eq!(place.layout, base.layout); debug_assert_eq!(place.layout, base.layout);
// Unsized cases are possible here since an unsized local will be a base
// `Place::Local` until the first projection calls `place_to_op` to extract the }
// underlying mplace.
base
}; };
field.align = Some(place.align); field.align = Some(place.align);
Ok(field) Ok(field)

View File

@ -13,7 +13,7 @@
use rustc_middle::ty; use rustc_middle::ty;
use rustc_middle::ty::layout::{LayoutOf, TyAndLayout}; use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
use rustc_middle::ty::Ty; use rustc_middle::ty::Ty;
use rustc_target::abi::{self, Abi, Align, FieldIdx, HasDataLayout, Size, FIRST_VARIANT}; use rustc_target::abi::{Abi, Align, FieldIdx, HasDataLayout, Size, FIRST_VARIANT};
use super::{ use super::{
alloc_range, mir_assign_valid_types, AllocId, AllocRef, AllocRefMut, CheckInAllocMsg, alloc_range, mir_assign_valid_types, AllocId, AllocRef, AllocRefMut, CheckInAllocMsg,
@ -41,33 +41,13 @@ pub fn unwrap_meta(self) -> Scalar<Prov> {
} }
} }
#[inline(always)]
pub fn has_meta(self) -> bool { pub fn has_meta(self) -> bool {
match self { match self {
Self::Meta(_) => true, Self::Meta(_) => true,
Self::None => false, Self::None => false,
} }
} }
pub(crate) fn len<'tcx>(
&self,
layout: TyAndLayout<'tcx>,
cx: &impl HasDataLayout,
) -> InterpResult<'tcx, u64> {
if layout.is_unsized() {
// We need to consult `meta` metadata
match layout.ty.kind() {
ty::Slice(..) | ty::Str => self.unwrap_meta().to_target_usize(cx),
_ => bug!("len not supported on unsized type {:?}", layout.ty),
}
} else {
// Go through the layout. There are lots of types that support a length,
// e.g., SIMD types. (But not all repr(simd) types even have FieldsShape::Array!)
match layout.fields {
abi::FieldsShape::Array { count, .. } => Ok(count),
_ => bug!("len not supported on sized type {:?}", layout.ty),
}
}
}
} }
#[derive(Copy, Clone, Hash, PartialEq, Eq, Debug)] #[derive(Copy, Clone, Hash, PartialEq, Eq, Debug)]
@ -111,6 +91,8 @@ pub enum Place<Prov: Provenance = AllocId> {
/// (Without that optimization, we'd just always be a `MemPlace`.) /// (Without that optimization, we'd just always be a `MemPlace`.)
/// Note that this only stores the frame index, not the thread this frame belongs to -- that is /// Note that this only stores the frame index, not the thread this frame belongs to -- that is
/// implicit. This means a `Place` must never be moved across interpreter thread boundaries! /// implicit. This means a `Place` must never be moved across interpreter thread boundaries!
///
/// This variant shall not be used for unsized types -- those must always live in memory.
Local { frame: usize, local: mir::Local, offset: Option<Size> }, Local { frame: usize, local: mir::Local, offset: Option<Size> },
} }
@ -157,7 +139,7 @@ pub fn map_provenance(self, f: impl FnOnce(Option<Prov>) -> Option<Prov>) -> Sel
} }
/// Turn a mplace into a (thin or wide) pointer, as a reference, pointing to the same space. /// Turn a mplace into a (thin or wide) pointer, as a reference, pointing to the same space.
#[inline(always)] #[inline]
pub fn to_ref(self, cx: &impl HasDataLayout) -> Immediate<Prov> { pub fn to_ref(self, cx: &impl HasDataLayout) -> Immediate<Prov> {
match self.meta { match self.meta {
MemPlaceMeta::None => Immediate::from(Scalar::from_maybe_pointer(self.ptr, cx)), MemPlaceMeta::None => Immediate::from(Scalar::from_maybe_pointer(self.ptr, cx)),
@ -220,22 +202,20 @@ fn layout(&self) -> TyAndLayout<'tcx> {
self.layout self.layout
} }
fn meta<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>( #[inline(always)]
&self, fn meta(&self) -> MemPlaceMeta<Prov> {
_ecx: &InterpCx<'mir, 'tcx, M>, self.meta
) -> InterpResult<'tcx, MemPlaceMeta<M::Provenance>> {
Ok(self.meta)
} }
fn offset_with_meta( fn offset_with_meta<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>(
&self, &self,
offset: Size, offset: Size,
meta: MemPlaceMeta<Prov>, meta: MemPlaceMeta<Prov>,
layout: TyAndLayout<'tcx>, layout: TyAndLayout<'tcx>,
cx: &impl HasDataLayout, ecx: &InterpCx<'mir, 'tcx, M>,
) -> InterpResult<'tcx, Self> { ) -> InterpResult<'tcx, Self> {
Ok(MPlaceTy { Ok(MPlaceTy {
mplace: self.mplace.offset_with_meta_(offset, meta, cx)?, mplace: self.mplace.offset_with_meta_(offset, meta, ecx)?,
align: self.align.restrict_for_offset(offset), align: self.align.restrict_for_offset(offset),
layout, layout,
}) })
@ -255,25 +235,30 @@ fn layout(&self) -> TyAndLayout<'tcx> {
self.layout self.layout
} }
fn meta<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>( #[inline]
&self, fn meta(&self) -> MemPlaceMeta<Prov> {
ecx: &InterpCx<'mir, 'tcx, M>, match self.as_mplace_or_local() {
) -> InterpResult<'tcx, MemPlaceMeta<M::Provenance>> { Left(mplace) => mplace.meta,
ecx.place_meta(self) Right(_) => {
debug_assert!(self.layout.is_sized(), "unsized locals should live in memory");
MemPlaceMeta::None
}
}
} }
fn offset_with_meta( fn offset_with_meta<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>(
&self, &self,
offset: Size, offset: Size,
meta: MemPlaceMeta<Prov>, meta: MemPlaceMeta<Prov>,
layout: TyAndLayout<'tcx>, layout: TyAndLayout<'tcx>,
cx: &impl HasDataLayout, ecx: &InterpCx<'mir, 'tcx, M>,
) -> InterpResult<'tcx, Self> { ) -> InterpResult<'tcx, Self> {
Ok(match self.as_mplace_or_local() { Ok(match self.as_mplace_or_local() {
Left(mplace) => mplace.offset_with_meta(offset, meta, layout, cx)?.into(), Left(mplace) => mplace.offset_with_meta(offset, meta, layout, ecx)?.into(),
Right((frame, local, old_offset)) => { Right((frame, local, old_offset)) => {
debug_assert!(layout.is_sized(), "unsized locals should live in memory");
assert_matches!(meta, MemPlaceMeta::None); // we couldn't store it anyway... assert_matches!(meta, MemPlaceMeta::None); // we couldn't store it anyway...
let new_offset = cx let new_offset = ecx
.data_layout() .data_layout()
.offset(old_offset.unwrap_or(Size::ZERO).bytes(), offset.bytes())?; .offset(old_offset.unwrap_or(Size::ZERO).bytes(), offset.bytes())?;
PlaceTy { PlaceTy {
@ -323,7 +308,7 @@ pub fn assert_mem_place(&self) -> MPlaceTy<'tcx, Prov> {
impl<'tcx, Prov: Provenance + 'static> PlaceTy<'tcx, Prov> { impl<'tcx, Prov: Provenance + 'static> PlaceTy<'tcx, Prov> {
/// A place is either an mplace or some local. /// A place is either an mplace or some local.
#[inline] #[inline(always)]
pub fn as_mplace_or_local( pub fn as_mplace_or_local(
&self, &self,
) -> Either<MPlaceTy<'tcx, Prov>, (usize, mir::Local, Option<Size>)> { ) -> Either<MPlaceTy<'tcx, Prov>, (usize, mir::Local, Option<Size>)> {
@ -399,20 +384,6 @@ impl<'mir, 'tcx: 'mir, Prov, M> InterpCx<'mir, 'tcx, M>
Prov: Provenance + 'static, Prov: Provenance + 'static,
M: Machine<'mir, 'tcx, Provenance = Prov>, M: Machine<'mir, 'tcx, Provenance = Prov>,
{ {
/// Get the metadata of the given place.
pub(super) fn place_meta(
&self,
place: &PlaceTy<'tcx, M::Provenance>,
) -> InterpResult<'tcx, MemPlaceMeta<M::Provenance>> {
if place.layout.is_unsized() {
// For `Place::Local`, the metadata is stored with the local, not the place. So we have
// to look that up first.
self.place_to_op(place)?.meta()
} else {
Ok(MemPlaceMeta::None)
}
}
/// Take a value, which represents a (thin or wide) reference, and make it a place. /// Take a value, which represents a (thin or wide) reference, and make it a place.
/// Alignment is just based on the type. This is the inverse of `mplace_to_ref()`. /// Alignment is just based on the type. This is the inverse of `mplace_to_ref()`.
/// ///
@ -537,8 +508,24 @@ pub fn local_to_place(
frame: usize, frame: usize,
local: mir::Local, local: mir::Local,
) -> InterpResult<'tcx, PlaceTy<'tcx, M::Provenance>> { ) -> InterpResult<'tcx, PlaceTy<'tcx, M::Provenance>> {
let layout = self.layout_of_local(&self.stack()[frame], local, None)?; // Other parts of the system rely on `Place::Local` never being unsized.
let place = Place::Local { frame, local, offset: None }; // So we eagerly check here if this local has an MPlace, and if yes we use it.
let frame_ref = &self.stack()[frame];
let layout = self.layout_of_local(frame_ref, local, None)?;
let place = if layout.is_sized() {
// We can just always use the `Local` for sized values.
Place::Local { frame, local, offset: None }
} else {
// Unsized `Local` isn't okay (we cannot store the metadata).
match frame_ref.locals[local].access()? {
Operand::Immediate(_) => {
// ConstProp marks *all* locals as `Immediate::Uninit` since it cannot
// efficiently check whether they are sized. We have to catch that case here.
throw_inval!(ConstPropNonsense);
}
Operand::Indirect(mplace) => Place::Ptr(*mplace),
}
};
Ok(PlaceTy { place, layout, align: layout.align.abi }) Ok(PlaceTy { place, layout, align: layout.align.abi })
} }
@ -896,9 +883,7 @@ pub fn force_allocation(
// that has different alignment than the outer field. // that has different alignment than the outer field.
let local_layout = let local_layout =
self.layout_of_local(&self.stack()[frame], local, None)?; self.layout_of_local(&self.stack()[frame], local, None)?;
if local_layout.is_unsized() { assert!(local_layout.is_sized(), "unsized locals cannot be immediate");
throw_unsup_format!("unsized locals are not supported");
}
let mplace = self.allocate(local_layout, MemoryKind::Stack)?; let mplace = self.allocate(local_layout, MemoryKind::Stack)?;
// Preserve old value. (As an optimization, we can skip this if it was uninit.) // Preserve old value. (As an optimization, we can skip this if it was uninit.)
if !matches!(local_val, Immediate::Uninit) { if !matches!(local_val, Immediate::Uninit) {

View File

@ -7,12 +7,13 @@
//! but we still need to do bounds checking and adjust the layout. To not duplicate that with MPlaceTy, we actually //! but we still need to do bounds checking and adjust the layout. To not duplicate that with MPlaceTy, we actually
//! implement the logic on OpTy, and MPlaceTy calls that. //! implement the logic on OpTy, and MPlaceTy calls that.
use std::marker::PhantomData;
use std::ops::Range;
use rustc_middle::mir; use rustc_middle::mir;
use rustc_middle::ty; use rustc_middle::ty;
use rustc_middle::ty::layout::{LayoutOf, TyAndLayout}; use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
use rustc_middle::ty::Ty; use rustc_middle::ty::Ty;
use rustc_middle::ty::TyCtxt;
use rustc_target::abi::HasDataLayout;
use rustc_target::abi::Size; use rustc_target::abi::Size;
use rustc_target::abi::{self, VariantIdx}; use rustc_target::abi::{self, VariantIdx};
@ -24,44 +25,59 @@ pub trait Projectable<'tcx, Prov: Provenance>: Sized + std::fmt::Debug {
fn layout(&self) -> TyAndLayout<'tcx>; fn layout(&self) -> TyAndLayout<'tcx>;
/// Get the metadata of a wide value. /// Get the metadata of a wide value.
fn meta<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>( fn meta(&self) -> MemPlaceMeta<Prov>;
&self,
ecx: &InterpCx<'mir, 'tcx, M>,
) -> InterpResult<'tcx, MemPlaceMeta<M::Provenance>>;
/// Get the length of a slice/string/array stored here.
fn len<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>( fn len<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>(
&self, &self,
ecx: &InterpCx<'mir, 'tcx, M>, ecx: &InterpCx<'mir, 'tcx, M>,
) -> InterpResult<'tcx, u64> { ) -> InterpResult<'tcx, u64> {
self.meta(ecx)?.len(self.layout(), ecx) let layout = self.layout();
if layout.is_unsized() {
// We need to consult `meta` metadata
match layout.ty.kind() {
ty::Slice(..) | ty::Str => self.meta().unwrap_meta().to_target_usize(ecx),
_ => bug!("len not supported on unsized type {:?}", layout.ty),
}
} else {
// Go through the layout. There are lots of types that support a length,
// e.g., SIMD types. (But not all repr(simd) types even have FieldsShape::Array!)
match layout.fields {
abi::FieldsShape::Array { count, .. } => Ok(count),
_ => bug!("len not supported on sized type {:?}", layout.ty),
}
}
} }
/// Offset the value by the given amount, replacing the layout and metadata. /// Offset the value by the given amount, replacing the layout and metadata.
fn offset_with_meta( fn offset_with_meta<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>(
&self, &self,
offset: Size, offset: Size,
meta: MemPlaceMeta<Prov>, meta: MemPlaceMeta<Prov>,
layout: TyAndLayout<'tcx>, layout: TyAndLayout<'tcx>,
cx: &impl HasDataLayout, ecx: &InterpCx<'mir, 'tcx, M>,
) -> InterpResult<'tcx, Self>; ) -> InterpResult<'tcx, Self>;
fn offset( #[inline]
fn offset<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>(
&self, &self,
offset: Size, offset: Size,
layout: TyAndLayout<'tcx>, layout: TyAndLayout<'tcx>,
cx: &impl HasDataLayout, ecx: &InterpCx<'mir, 'tcx, M>,
) -> InterpResult<'tcx, Self> { ) -> InterpResult<'tcx, Self> {
assert!(layout.is_sized()); assert!(layout.is_sized());
self.offset_with_meta(offset, MemPlaceMeta::None, layout, cx) self.offset_with_meta(offset, MemPlaceMeta::None, layout, ecx)
} }
fn transmute( #[inline]
fn transmute<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>(
&self, &self,
layout: TyAndLayout<'tcx>, layout: TyAndLayout<'tcx>,
cx: &impl HasDataLayout, ecx: &InterpCx<'mir, 'tcx, M>,
) -> InterpResult<'tcx, Self> { ) -> InterpResult<'tcx, Self> {
assert!(self.layout().is_sized() && layout.is_sized());
assert_eq!(self.layout().size, layout.size); assert_eq!(self.layout().size, layout.size);
self.offset_with_meta(Size::ZERO, MemPlaceMeta::None, layout, cx) self.offset_with_meta(Size::ZERO, MemPlaceMeta::None, layout, ecx)
} }
/// Convert this to an `OpTy`. This might be an irreversible transformation, but is useful for /// Convert this to an `OpTy`. This might be an irreversible transformation, but is useful for
@ -72,6 +88,28 @@ fn to_op<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>(
) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>>; ) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>>;
} }
/// A type representing iteration over the elements of an array.
pub struct ArrayIterator<'tcx, 'a, Prov: Provenance + 'static, P: Projectable<'tcx, Prov>> {
base: &'a P,
range: Range<u64>,
stride: Size,
field_layout: TyAndLayout<'tcx>,
_phantom: PhantomData<Prov>, // otherwise it says `Prov` is never used...
}
impl<'tcx, 'a, Prov: Provenance + 'static, P: Projectable<'tcx, Prov>>
ArrayIterator<'tcx, 'a, Prov, P>
{
/// Should be the same `ecx` on each call, and match the one used to create the iterator.
pub fn next<'mir, M: Machine<'mir, 'tcx, Provenance = Prov>>(
&mut self,
ecx: &InterpCx<'mir, 'tcx, M>,
) -> InterpResult<'tcx, Option<(u64, P)>> {
let Some(idx) = self.range.next() else { return Ok(None) };
Ok(Some((idx, self.base.offset(self.stride * idx, self.field_layout, ecx)?)))
}
}
// FIXME: Working around https://github.com/rust-lang/rust/issues/54385 // FIXME: Working around https://github.com/rust-lang/rust/issues/54385
impl<'mir, 'tcx: 'mir, Prov, M> InterpCx<'mir, 'tcx, M> impl<'mir, 'tcx: 'mir, Prov, M> InterpCx<'mir, 'tcx, M>
where where
@ -104,7 +142,7 @@ pub fn project_field<P: Projectable<'tcx, M::Provenance>>(
// But const-prop actually feeds us such nonsense MIR! (see test `const_prop/issue-86351.rs`) // But const-prop actually feeds us such nonsense MIR! (see test `const_prop/issue-86351.rs`)
throw_inval!(ConstPropNonsense); throw_inval!(ConstPropNonsense);
} }
let base_meta = base.meta(self)?; let base_meta = base.meta();
// Re-use parent metadata to determine dynamic field layout. // Re-use parent metadata to determine dynamic field layout.
// With custom DSTS, this *will* execute user-defined code, but the same // With custom DSTS, this *will* execute user-defined code, but the same
// happens at run-time so that's okay. // happens at run-time so that's okay.
@ -132,7 +170,7 @@ pub fn project_downcast<P: Projectable<'tcx, M::Provenance>>(
base: &P, base: &P,
variant: VariantIdx, variant: VariantIdx,
) -> InterpResult<'tcx, P> { ) -> InterpResult<'tcx, P> {
assert!(!base.meta(self)?.has_meta()); assert!(!base.meta().has_meta());
// Downcasts only change the layout. // Downcasts only change the layout.
// (In particular, no check about whether this is even the active variant -- that's by design, // (In particular, no check about whether this is even the active variant -- that's by design,
// see https://github.com/rust-lang/rust/issues/93688#issuecomment-1032929496.) // see https://github.com/rust-lang/rust/issues/93688#issuecomment-1032929496.)
@ -206,20 +244,13 @@ fn project_constant_index<P: Projectable<'tcx, M::Provenance>>(
pub fn project_array_fields<'a, P: Projectable<'tcx, M::Provenance>>( pub fn project_array_fields<'a, P: Projectable<'tcx, M::Provenance>>(
&self, &self,
base: &'a P, base: &'a P,
) -> InterpResult<'tcx, impl Iterator<Item = InterpResult<'tcx, P>> + 'a> ) -> InterpResult<'tcx, ArrayIterator<'tcx, 'a, M::Provenance, P>> {
where
'tcx: 'a,
{
let abi::FieldsShape::Array { stride, .. } = base.layout().fields else { let abi::FieldsShape::Array { stride, .. } = base.layout().fields else {
span_bug!(self.cur_span(), "operand_array_fields: expected an array layout"); span_bug!(self.cur_span(), "operand_array_fields: expected an array layout");
}; };
let len = base.len(self)?; let len = base.len(self)?;
let field_layout = base.layout().field(self, 0); let field_layout = base.layout().field(self, 0);
let tcx: TyCtxt<'tcx> = *self.tcx; Ok(ArrayIterator { base, range: 0..len, stride, field_layout, _phantom: PhantomData })
// `Size` multiplication
Ok((0..len).map(move |i| {
base.offset_with_meta(stride * i, MemPlaceMeta::None, field_layout, &tcx)
}))
} }
/// Subslicing /// Subslicing

View File

@ -2,19 +2,20 @@
use either::Either; use either::Either;
use rustc_ast::ast::InlineAsmOptions; use rustc_ast::ast::InlineAsmOptions;
use rustc_middle::mir::ProjectionElem;
use rustc_middle::ty::layout::{FnAbiOf, LayoutOf, TyAndLayout}; use rustc_middle::ty::layout::{FnAbiOf, LayoutOf, TyAndLayout};
use rustc_middle::ty::Instance; use rustc_middle::ty::Instance;
use rustc_middle::{ use rustc_middle::{
mir, mir,
ty::{self, Ty}, ty::{self, Ty},
}; };
use rustc_target::abi;
use rustc_target::abi::call::{ArgAbi, ArgAttribute, ArgAttributes, FnAbi, PassMode}; use rustc_target::abi::call::{ArgAbi, ArgAttribute, ArgAttributes, FnAbi, PassMode};
use rustc_target::abi::{self, FieldIdx};
use rustc_target::spec::abi::Abi; use rustc_target::spec::abi::Abi;
use super::{ use super::{
AllocId, FnVal, ImmTy, Immediate, InterpCx, InterpResult, MPlaceTy, Machine, MemoryKind, OpTy, AllocId, FnVal, ImmTy, InterpCx, InterpResult, MPlaceTy, Machine, OpTy, PlaceTy, Projectable,
Operand, PlaceTy, Provenance, Scalar, StackPopCleanup, Provenance, Scalar, StackPopCleanup,
}; };
use crate::fluent_generated as fluent; use crate::fluent_generated as fluent;
@ -358,23 +359,28 @@ fn pass_argument<'x, 'y>(
Item = (&'x FnArg<'tcx, M::Provenance>, &'y ArgAbi<'tcx, Ty<'tcx>>), Item = (&'x FnArg<'tcx, M::Provenance>, &'y ArgAbi<'tcx, Ty<'tcx>>),
>, >,
callee_abi: &ArgAbi<'tcx, Ty<'tcx>>, callee_abi: &ArgAbi<'tcx, Ty<'tcx>>,
callee_arg: &PlaceTy<'tcx, M::Provenance>, callee_arg: &mir::Place<'tcx>,
callee_ty: Ty<'tcx>,
already_live: bool,
) -> InterpResult<'tcx> ) -> InterpResult<'tcx>
where where
'tcx: 'x, 'tcx: 'x,
'tcx: 'y, 'tcx: 'y,
{ {
if matches!(callee_abi.mode, PassMode::Ignore) { if matches!(callee_abi.mode, PassMode::Ignore) {
// This one is skipped. // This one is skipped. Still must be made live though!
if !already_live {
self.storage_live(callee_arg.as_local().unwrap())?;
}
return Ok(()); return Ok(());
} }
// Find next caller arg. // Find next caller arg.
let Some((caller_arg, caller_abi)) = caller_args.next() else { let Some((caller_arg, caller_abi)) = caller_args.next() else {
throw_ub_custom!(fluent::const_eval_not_enough_caller_args); throw_ub_custom!(fluent::const_eval_not_enough_caller_args);
}; };
// Now, check // Check compatibility
if !Self::check_argument_compat(caller_abi, callee_abi) { if !Self::check_argument_compat(caller_abi, callee_abi) {
let callee_ty = format!("{}", callee_arg.layout.ty); let callee_ty = format!("{}", callee_ty);
let caller_ty = format!("{}", caller_arg.layout().ty); let caller_ty = format!("{}", caller_arg.layout().ty);
throw_ub_custom!( throw_ub_custom!(
fluent::const_eval_incompatible_types, fluent::const_eval_incompatible_types,
@ -386,35 +392,22 @@ fn pass_argument<'x, 'y>(
// will later protect the source it comes from. This means the callee cannot observe if we // will later protect the source it comes from. This means the callee cannot observe if we
// did in-place of by-copy argument passing, except for pointer equality tests. // did in-place of by-copy argument passing, except for pointer equality tests.
let caller_arg_copy = self.copy_fn_arg(&caller_arg)?; let caller_arg_copy = self.copy_fn_arg(&caller_arg)?;
// Special handling for unsized parameters. if !already_live {
if caller_arg_copy.layout.is_unsized() { let local = callee_arg.as_local().unwrap();
// `check_argument_compat` ensures that both have the same type, so we know they will use the metadata the same way. let meta = caller_arg_copy.meta();
assert_eq!(caller_arg_copy.layout.ty, callee_arg.layout.ty); // `check_argument_compat` ensures that if metadata is needed, both have the same type,
// We have to properly pre-allocate the memory for the callee. // so we know they will use the metadata the same way.
// So let's tear down some abstractions. assert!(!meta.has_meta() || caller_arg_copy.layout.ty == callee_ty);
// This all has to be in memory, there are no immediate unsized values.
let src = caller_arg_copy.assert_mem_place(); self.storage_live_dyn(local, meta)?;
// The destination cannot be one of these "spread args".
let (dest_frame, dest_local, dest_offset) = callee_arg
.as_mplace_or_local()
.right()
.expect("callee fn arguments must be locals");
// We are just initializing things, so there can't be anything here yet.
assert!(matches!(
*self.local_to_op(&self.stack()[dest_frame], dest_local, None)?,
Operand::Immediate(Immediate::Uninit)
));
assert_eq!(dest_offset, None);
// Allocate enough memory to hold `src`.
let dest_place = self.allocate_dyn(src.layout, MemoryKind::Stack, src.meta)?;
// Update the local to be that new place.
*M::access_local_mut(self, dest_frame, dest_local)? = Operand::Indirect(*dest_place);
} }
// Now we can finally actually evaluate the callee place.
let callee_arg = self.eval_place(*callee_arg)?;
// We allow some transmutes here. // We allow some transmutes here.
// FIXME: Depending on the PassMode, this should reset some padding to uninitialized. (This // FIXME: Depending on the PassMode, this should reset some padding to uninitialized. (This
// is true for all `copy_op`, but there are a lot of special cases for argument passing // is true for all `copy_op`, but there are a lot of special cases for argument passing
// specifically.) // specifically.)
self.copy_op(&caller_arg_copy, callee_arg, /*allow_transmute*/ true)?; self.copy_op(&caller_arg_copy, &callee_arg, /*allow_transmute*/ true)?;
// If this was an in-place pass, protect the place it comes from for the duration of the call. // If this was an in-place pass, protect the place it comes from for the duration of the call.
if let FnArg::InPlace(place) = caller_arg { if let FnArg::InPlace(place) = caller_arg {
M::protect_in_place_function_argument(self, place)?; M::protect_in_place_function_argument(self, place)?;
@ -600,18 +593,47 @@ pub(crate) fn eval_fn_call(
// not advance `caller_iter` for ZSTs. // not advance `caller_iter` for ZSTs.
let mut callee_args_abis = callee_fn_abi.args.iter(); let mut callee_args_abis = callee_fn_abi.args.iter();
for local in body.args_iter() { for local in body.args_iter() {
let dest = self.eval_place(mir::Place::from(local))?; // Construct the destination place for this argument. At this point all
// locals are still dead, so we cannot construct a `PlaceTy`.
let dest = mir::Place::from(local);
// `layout_of_local` does more than just the substitution we need to get the
// type, but the result gets cached so this avoids calling the substitution
// query *again* the next time this local is accessed.
let ty = self.layout_of_local(self.frame(), local, None)?.ty;
if Some(local) == body.spread_arg { if Some(local) == body.spread_arg {
// Make the local live once, then fill in the value field by field.
self.storage_live(local)?;
// Must be a tuple // Must be a tuple
for i in 0..dest.layout.fields.count() { let ty::Tuple(fields) = ty.kind() else {
let dest = self.project_field(&dest, i)?; span_bug!(
self.cur_span(),
"non-tuple type for `spread_arg`: {ty:?}"
)
};
for (i, field_ty) in fields.iter().enumerate() {
let dest = dest.project_deeper(
&[ProjectionElem::Field(FieldIdx::from_usize(i), field_ty)],
*self.tcx,
);
let callee_abi = callee_args_abis.next().unwrap(); let callee_abi = callee_args_abis.next().unwrap();
self.pass_argument(&mut caller_args, callee_abi, &dest)?; self.pass_argument(
&mut caller_args,
callee_abi,
&dest,
field_ty,
/* already_live */ true,
)?;
} }
} else { } else {
// Normal argument // Normal argument. Cannot mark it as live yet, it might be unsized!
let callee_abi = callee_args_abis.next().unwrap(); let callee_abi = callee_args_abis.next().unwrap();
self.pass_argument(&mut caller_args, callee_abi, &dest)?; self.pass_argument(
&mut caller_args,
callee_abi,
&dest,
ty,
/* already_live */ false,
)?;
} }
} }
// If the callee needs a caller location, pretend we consume one more argument from the ABI. // If the callee needs a caller location, pretend we consume one more argument from the ABI.
@ -644,6 +666,9 @@ pub(crate) fn eval_fn_call(
// Nothing to do for locals, they are always properly allocated and aligned. // Nothing to do for locals, they are always properly allocated and aligned.
} }
M::protect_in_place_function_argument(self, destination)?; M::protect_in_place_function_argument(self, destination)?;
// Don't forget to mark "initially live" locals as live.
self.storage_live_for_always_live_locals()?;
}; };
match res { match res {
Err(err) => { Err(err) => {

View File

@ -170,8 +170,9 @@ fn walk_value(&mut self, v: &Self::V) -> InterpResult<'tcx> {
} }
} }
FieldsShape::Array { .. } => { FieldsShape::Array { .. } => {
for (idx, field) in self.ecx().project_array_fields(v)?.enumerate() { let mut iter = self.ecx().project_array_fields(v)?;
self.visit_field(v, idx, &field?)?; while let Some((idx, field)) = iter.next(self.ecx())? {
self.visit_field(v, idx.try_into().unwrap(), &field)?;
} }
} }
} }

View File

@ -415,6 +415,8 @@ pub enum UnsupportedOpInfo {
/// Free-form case. Only for errors that are never caught! /// Free-form case. Only for errors that are never caught!
// FIXME still use translatable diagnostics // FIXME still use translatable diagnostics
Unsupported(String), Unsupported(String),
/// Unsized local variables.
UnsizedLocal,
// //
// The variants below are only reachable from CTFE/const prop, miri will never emit them. // The variants below are only reachable from CTFE/const prop, miri will never emit them.
// //

View File

@ -376,6 +376,16 @@ fn new(
) )
.expect("failed to push initial stack frame"); .expect("failed to push initial stack frame");
for local in body.local_decls.indices() {
// Mark everything initially live.
// This is somewhat dicey since some of them might be unsized and it is incoherent to
// mark those as live... We rely on `local_to_place`/`local_to_op` in the interpreter
// stopping us before those unsized immediates can cause issues deeper in the
// interpreter.
ecx.frame_mut().locals[local].value =
LocalValue::Live(interpret::Operand::Immediate(Immediate::Uninit));
}
ConstPropagator { ecx, tcx, param_env, local_decls: &dummy_body.local_decls } ConstPropagator { ecx, tcx, param_env, local_decls: &dummy_body.local_decls }
} }

View File

@ -206,6 +206,16 @@ fn new(
) )
.expect("failed to push initial stack frame"); .expect("failed to push initial stack frame");
for local in body.local_decls.indices() {
// Mark everything initially live.
// This is somewhat dicey since some of them might be unsized and it is incoherent to
// mark those as live... We rely on `local_to_place`/`local_to_op` in the interpreter
// stopping us before those unsized immediates can cause issues deeper in the
// interpreter.
ecx.frame_mut().locals[local].value =
LocalValue::Live(interpret::Operand::Immediate(Immediate::Uninit));
}
ConstPropagator { ConstPropagator {
ecx, ecx,
tcx, tcx,

View File

@ -283,7 +283,7 @@ pub fn report_error<'tcx, 'mir>(
"resource exhaustion", "resource exhaustion",
Unsupported( Unsupported(
// We list only the ones that can actually happen. // We list only the ones that can actually happen.
UnsupportedOpInfo::Unsupported(_) UnsupportedOpInfo::Unsupported(_) | UnsupportedOpInfo::UnsizedLocal
) => ) =>
"unsupported operation", "unsupported operation",
InvalidProgram( InvalidProgram(

View File

@ -14,7 +14,7 @@
use rustc_middle::ty::{ use rustc_middle::ty::{
self, self,
layout::{IntegerExt as _, LayoutOf, TyAndLayout}, layout::{IntegerExt as _, LayoutOf, TyAndLayout},
List, Ty, TyCtxt, Ty, TyCtxt,
}; };
use rustc_span::{def_id::CrateNum, sym, Span, Symbol}; use rustc_span::{def_id::CrateNum, sym, Span, Symbol};
use rustc_target::abi::{Align, FieldIdx, FieldsShape, Integer, Size, Variants}; use rustc_target::abi::{Align, FieldIdx, FieldsShape, Integer, Size, Variants};
@ -282,13 +282,6 @@ fn ptr_is_null(&self, ptr: Pointer<Option<Provenance>>) -> InterpResult<'tcx, bo
Ok(ptr.addr().bytes() == 0) Ok(ptr.addr().bytes() == 0)
} }
/// Get the `Place` for a local
fn local_place(&self, local: mir::Local) -> InterpResult<'tcx, PlaceTy<'tcx, Provenance>> {
let this = self.eval_context_ref();
let place = mir::Place { local, projection: List::empty() };
this.eval_place(place)
}
/// Generate some random bytes, and write them to `dest`. /// Generate some random bytes, and write them to `dest`.
fn gen_random(&mut self, ptr: Pointer<Option<Provenance>>, len: u64) -> InterpResult<'tcx> { fn gen_random(&mut self, ptr: Pointer<Option<Provenance>>, len: u64) -> InterpResult<'tcx> {
// Some programs pass in a null pointer and a length of 0 // Some programs pass in a null pointer and a length of 0
@ -350,17 +343,21 @@ fn call_function(
// Initialize arguments. // Initialize arguments.
let mut callee_args = this.frame().body.args_iter(); let mut callee_args = this.frame().body.args_iter();
for arg in args { for arg in args {
let callee_arg = this.local_place( let local = callee_args
callee_args .next()
.next() .ok_or_else(|| err_ub_format!("callee has fewer arguments than expected"))?;
.ok_or_else(|| err_ub_format!("callee has fewer arguments than expected"))?, // Make the local live, and insert the initial value.
)?; this.storage_live(local)?;
let callee_arg = this.local_to_place(this.frame_idx(), local)?;
this.write_immediate(*arg, &callee_arg)?; this.write_immediate(*arg, &callee_arg)?;
} }
if callee_args.next().is_some() { if callee_args.next().is_some() {
throw_ub_format!("callee has more arguments than expected"); throw_ub_format!("callee has more arguments than expected");
} }
// Initialize remaining locals.
this.storage_live_for_always_live_locals()?;
Ok(()) Ok(())
} }

View File

@ -1,6 +1,6 @@
//@ignore-32bit //@ignore-32bit
fn main() { fn main() {
let _fat: [u8; (1 << 61) + (1 << 31)]; let _fat: [u8; (1 << 61) + (1 << 31)]; // ideally we'd error here, but we avoid computing the layout until absolutely necessary
_fat = [0; (1u64 << 61) as usize + (1u64 << 31) as usize]; //~ ERROR: post-monomorphization error _fat = [0; (1u64 << 61) as usize + (1u64 << 31) as usize]; //~ ERROR: post-monomorphization error
} }

View File

@ -2,7 +2,7 @@ error: unsupported operation: unsized locals are not supported
--> $DIR/unsized-local.rs:LL:CC --> $DIR/unsized-local.rs:LL:CC
| |
LL | let x = *(Box::new(A) as Box<dyn Foo>); LL | let x = *(Box::new(A) as Box<dyn Foo>);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unsized locals are not supported | ^ unsized locals are not supported
| |
= help: this is likely not a bug in the program; it indicates that the program performed an operation that the interpreter does not support = help: this is likely not a bug in the program; it indicates that the program performed an operation that the interpreter does not support
= note: BACKTRACE: = note: BACKTRACE:

View File

@ -2,6 +2,7 @@
//@[tree]compile-flags: -Zmiri-tree-borrows //@[tree]compile-flags: -Zmiri-tree-borrows
#![feature(unsized_tuple_coercion)] #![feature(unsized_tuple_coercion)]
#![feature(unsized_fn_params)] #![feature(unsized_fn_params)]
#![feature(custom_mir, core_intrinsics)]
use std::mem; use std::mem;
@ -32,7 +33,30 @@ pub fn f3(_p: dyn Send) {}
f3(*p); f3(*p);
} }
fn unsized_field_projection() {
use std::intrinsics::mir::*;
pub struct S<T: ?Sized>(T);
#[custom_mir(dialect = "runtime", phase = "optimized")]
fn f(x: S<[u8]>) {
mir! {
{
let idx = 0;
// Project to an unsized field of an unsized local.
x.0[idx] = 0;
let _val = x.0[idx];
Return()
}
}
}
let x: Box<S<[u8]>> = Box::new(S([0]));
f(*x);
}
fn main() { fn main() {
unsized_tuple(); unsized_tuple();
unsized_params(); unsized_params();
unsized_field_projection();
} }