From 8ef0caa23ca31ccc8ef3769a0aeb35bea15532a6 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Sat, 2 Jul 2022 16:24:42 -0400 Subject: [PATCH] interpret: remove LocalValue::Unallocated, add Operand::Uninit Operand::Uninit is an *allocated* operand that is fully uninitialized. This lets us lazily allocate the actual backing store of *all* locals (no matter their ABI). I also reordered things in pop_stack_frame at the same time. I should probably have made that a separate commit... --- .../src/const_eval/eval_queries.rs | 1 + .../rustc_const_eval/src/interpret/cast.rs | 3 +- .../src/interpret/eval_context.rs | 109 ++++++++------- .../rustc_const_eval/src/interpret/machine.rs | 19 +-- .../rustc_const_eval/src/interpret/operand.rs | 24 +++- .../rustc_const_eval/src/interpret/place.rs | 131 +++++++----------- .../rustc_mir_transform/src/const_prop.rs | 34 +++-- .../src/const_prop_lint.rs | 33 +++-- 8 files changed, 179 insertions(+), 175 deletions(-) diff --git a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs index 0dac4f8978e..f84dd9521ee 100644 --- a/compiler/rustc_const_eval/src/const_eval/eval_queries.rs +++ b/compiler/rustc_const_eval/src/const_eval/eval_queries.rs @@ -189,6 +189,7 @@ pub(super) fn op_to_const<'tcx>( let len: usize = len.try_into().unwrap(); ConstValue::Slice { data, start, end: start + len } } + Immediate::Uninit => to_const_value(&op.assert_mem_place()), }, } } diff --git a/compiler/rustc_const_eval/src/interpret/cast.rs b/compiler/rustc_const_eval/src/interpret/cast.rs index fc81b22b406..ea65595f050 100644 --- a/compiler/rustc_const_eval/src/interpret/cast.rs +++ b/compiler/rustc_const_eval/src/interpret/cast.rs @@ -153,7 +153,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { assert_eq!(dest_layout.size, self.pointer_size()); assert!(src.layout.ty.is_unsafe_ptr()); return match **src { - Immediate::ScalarPair(data, _) => Ok(data.into()), + Immediate::ScalarPair(data, _) => Ok(data.check_init()?.into()), Immediate::Scalar(..) => span_bug!( self.cur_span(), "{:?} input to a fat-to-thin cast ({:?} -> {:?})", @@ -161,6 +161,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { src.layout.ty, cast_ty ), + Immediate::Uninit => throw_ub!(InvalidUninitBytes(None)), }; } } diff --git a/compiler/rustc_const_eval/src/interpret/eval_context.rs b/compiler/rustc_const_eval/src/interpret/eval_context.rs index 031d508d70f..767032845ac 100644 --- a/compiler/rustc_const_eval/src/interpret/eval_context.rs +++ b/compiler/rustc_const_eval/src/interpret/eval_context.rs @@ -112,6 +112,8 @@ pub struct Frame<'mir, 'tcx, Tag: Provenance = AllocId, Extra = ()> { /// The locals are stored as `Option`s. /// `None` represents a local that is currently dead, while a live local /// can either directly contain `Scalar` or refer to some part of an `Allocation`. + /// + /// Do *not* access this directly; always go through the machine hook! pub locals: IndexVec>, /// The span of the `tracing` crate is stored here. @@ -179,10 +181,6 @@ pub struct LocalState<'tcx, Tag: Provenance = AllocId> { pub enum LocalValue { /// This local is not currently alive, and cannot be used at all. Dead, - /// This local is alive but not yet allocated. It cannot be read from or have its address taken, - /// and will be allocated on the first write. This is to support unsized locals, where we cannot - /// know their size in advance. - Unallocated, /// A normal, live local. /// Mostly for convenience, we re-use the `Operand` type here. /// This is an optimization over just always having a pointer here; @@ -196,12 +194,10 @@ impl<'tcx, Tag: Provenance + 'static> LocalState<'tcx, Tag> { /// /// Note: This may only be invoked from the `Machine::access_local` hook and not from /// anywhere else. You may be invalidating machine invariants if you do! - pub fn access(&self) -> InterpResult<'tcx, Operand> { - match self.value { - LocalValue::Dead => throw_ub!(DeadLocal), - LocalValue::Unallocated => { - bug!("The type checker should prevent reading from a never-written local") - } + #[inline] + pub fn access(&self) -> InterpResult<'tcx, &Operand> { + match &self.value { + LocalValue::Dead => throw_ub!(DeadLocal), // could even be "invalid program"? LocalValue::Live(val) => Ok(val), } } @@ -211,15 +207,11 @@ impl<'tcx, Tag: Provenance + 'static> LocalState<'tcx, Tag> { /// /// Note: This may only be invoked from the `Machine::access_local_mut` hook and not from /// anywhere else. You may be invalidating machine invariants if you do! - pub fn access_mut( - &mut self, - ) -> InterpResult<'tcx, Result<&mut LocalValue, MemPlace>> { - match self.value { - LocalValue::Dead => throw_ub!(DeadLocal), - LocalValue::Live(Operand::Indirect(mplace)) => Ok(Err(mplace)), - ref mut local @ (LocalValue::Live(Operand::Immediate(_)) | LocalValue::Unallocated) => { - Ok(Ok(local)) - } + #[inline] + pub fn access_mut(&mut self) -> InterpResult<'tcx, &mut Operand> { + match &mut self.value { + LocalValue::Dead => throw_ub!(DeadLocal), // could even be "invalid program"? + LocalValue::Live(val) => Ok(val), } } } @@ -710,16 +702,15 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { })?; } - // Locals are initially unallocated. - let dummy = LocalState { value: LocalValue::Unallocated, layout: Cell::new(None) }; + // Most locals are initially dead. + let dummy = LocalState { value: LocalValue::Dead, layout: Cell::new(None) }; let mut locals = IndexVec::from_elem(dummy, &body.local_decls); - // Now mark those locals as dead that we do not want to initialize - // Mark locals that use `Storage*` annotations as dead on function entry. + // Now mark those locals as live that have no `Storage*` annotations. let always_live = always_live_locals(self.body()); for local in locals.indices() { - if !always_live.contains(local) { - locals[local].value = LocalValue::Dead; + if always_live.contains(local) { + locals[local].value = LocalValue::Live(Operand::Immediate(Immediate::Uninit)); } } // done @@ -791,7 +782,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { if unwinding { "during unwinding" } else { "returning from function" } ); - // Sanity check `unwinding`. + // Check `unwinding`. assert_eq!( unwinding, match self.frame().loc { @@ -799,51 +790,61 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { Err(_) => true, } ); - if unwinding && self.frame_idx() == 0 { throw_ub_format!("unwinding past the topmost frame of the stack"); } - let frame = - self.stack_mut().pop().expect("tried to pop a stack frame, but there were none"); - - if !unwinding { - let op = self.local_to_op(&frame, mir::RETURN_PLACE, None)?; - self.copy_op_transmute(&op, &frame.return_place)?; - trace!("{:?}", self.dump_place(*frame.return_place)); - } - - let return_to_block = frame.return_to_block; - - // Now where do we jump next? + // Copy return value. Must of course happen *before* we deallocate the locals. + let copy_ret_result = if !unwinding { + let op = self + .local_to_op(self.frame(), mir::RETURN_PLACE, None) + .expect("return place should always be live"); + let dest = self.frame().return_place; + let err = self.copy_op_transmute(&op, &dest); + trace!("return value: {:?}", self.dump_place(*dest)); + // We delay actually short-circuiting on this error until *after* the stack frame is + // popped, since we want this error to be attributed to the caller, whose type defines + // this transmute. + err + } else { + Ok(()) + }; + // Cleanup: deallocate locals. // Usually we want to clean up (deallocate locals), but in a few rare cases we don't. - // In that case, we return early. We also avoid validation in that case, - // because this is CTFE and the final value will be thoroughly validated anyway. + // We do this while the frame is still on the stack, so errors point to the callee. + let return_to_block = self.frame().return_to_block; let cleanup = match return_to_block { StackPopCleanup::Goto { .. } => true, StackPopCleanup::Root { cleanup, .. } => cleanup, }; + if cleanup { + // We need to take the locals out, since we need to mutate while iterating. + let locals = mem::take(&mut self.frame_mut().locals); + for local in &locals { + self.deallocate_local(local.value)?; + } + } + // All right, now it is time to actually pop the frame. + // Note that its locals are gone already, but that's fine. + let frame = + self.stack_mut().pop().expect("tried to pop a stack frame, but there were none"); + // Report error from return value copy, if any. + copy_ret_result?; + + // If we are not doing cleanup, also skip everything else. if !cleanup { assert!(self.stack().is_empty(), "only the topmost frame should ever be leaked"); assert!(!unwinding, "tried to skip cleanup during unwinding"); - // Leak the locals, skip validation, skip machine hook. + // Skip machine hook. return Ok(()); } - - trace!("locals: {:#?}", frame.locals); - - // Cleanup: deallocate all locals that are backed by an allocation. - for local in &frame.locals { - self.deallocate_local(local.value)?; - } - if M::after_stack_pop(self, frame, unwinding)? == StackPopJump::NoJump { // The hook already did everything. - // We want to skip the `info!` below, hence early return. return Ok(()); } + // Normal return, figure out where to jump. if unwinding { // Follow the unwind edge. @@ -874,7 +875,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { assert!(local != mir::RETURN_PLACE, "Cannot make return place live"); trace!("{:?} is now live", local); - let local_val = LocalValue::Unallocated; + let local_val = LocalValue::Live(Operand::Immediate(Immediate::Uninit)); // StorageLive expects the local to be dead, and marks it live. let old = mem::replace(&mut self.frame_mut().locals[local].value, local_val); if !matches!(old, LocalValue::Dead) { @@ -977,7 +978,9 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> std::fmt::Debug match self.ecx.stack()[frame].locals[local].value { LocalValue::Dead => write!(fmt, " is dead")?, - LocalValue::Unallocated => write!(fmt, " is unallocated")?, + LocalValue::Live(Operand::Immediate(Immediate::Uninit)) => { + write!(fmt, " is uninitialized")? + } LocalValue::Live(Operand::Indirect(mplace)) => { write!( fmt, diff --git a/compiler/rustc_const_eval/src/interpret/machine.rs b/compiler/rustc_const_eval/src/interpret/machine.rs index 4661a7c2828..b3461b414b6 100644 --- a/compiler/rustc_const_eval/src/interpret/machine.rs +++ b/compiler/rustc_const_eval/src/interpret/machine.rs @@ -14,8 +14,7 @@ use rustc_target::spec::abi::Abi; use super::{ AllocId, AllocRange, Allocation, ConstAllocation, Frame, ImmTy, InterpCx, InterpResult, - LocalValue, MemPlace, MemoryKind, OpTy, Operand, PlaceTy, Pointer, Provenance, Scalar, - StackPopUnwind, + MemoryKind, OpTy, Operand, PlaceTy, Pointer, Provenance, Scalar, StackPopUnwind, }; /// Data returned by Machine::stack_pop, @@ -226,11 +225,13 @@ pub trait Machine<'mir, 'tcx>: Sized { /// Since reading a ZST is not actually accessing memory or locals, this is never invoked /// for ZST reads. #[inline] - fn access_local( - _ecx: &InterpCx<'mir, 'tcx, Self>, - frame: &Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>, + fn access_local<'a>( + frame: &'a Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>, local: mir::Local, - ) -> InterpResult<'tcx, Operand> { + ) -> InterpResult<'tcx, &'a Operand> + where + 'tcx: 'mir, + { frame.locals[local].access() } @@ -242,7 +243,7 @@ pub trait Machine<'mir, 'tcx>: Sized { ecx: &'a mut InterpCx<'mir, 'tcx, Self>, frame: usize, local: mir::Local, - ) -> InterpResult<'tcx, Result<&'a mut LocalValue, MemPlace>> + ) -> InterpResult<'tcx, &'a mut Operand> where 'tcx: 'mir, { @@ -418,12 +419,14 @@ pub trait Machine<'mir, 'tcx>: Sized { } /// Called immediately after a stack frame got popped, but before jumping back to the caller. + /// The `locals` have already been destroyed! fn after_stack_pop( _ecx: &mut InterpCx<'mir, 'tcx, Self>, _frame: Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>, - _unwinding: bool, + unwinding: bool, ) -> InterpResult<'tcx, StackPopJump> { // By default, we do not support unwinding from panics + assert!(!unwinding); Ok(StackPopJump::Normal) } } diff --git a/compiler/rustc_const_eval/src/interpret/operand.rs b/compiler/rustc_const_eval/src/interpret/operand.rs index 145d95a40ea..805dcb38895 100644 --- a/compiler/rustc_const_eval/src/interpret/operand.rs +++ b/compiler/rustc_const_eval/src/interpret/operand.rs @@ -14,7 +14,7 @@ use rustc_target::abi::{self, Abi, Align, HasDataLayout, Size, TagEncoding}; use rustc_target::abi::{VariantIdx, Variants}; use super::{ - alloc_range, from_known_layout, mir_assign_valid_types, AllocId, ConstValue, GlobalId, + alloc_range, from_known_layout, mir_assign_valid_types, AllocId, ConstValue, Frame, GlobalId, InterpCx, InterpResult, MPlaceTy, Machine, MemPlace, Place, PlaceTy, Pointer, PointerArithmetic, Provenance, Scalar, ScalarMaybeUninit, }; @@ -28,8 +28,15 @@ use super::{ /// defined on `Immediate`, and do not have to work with a `Place`. #[derive(Copy, Clone, PartialEq, Eq, HashStable, Hash, Debug)] pub enum Immediate { + /// A single scalar value (must have *initialized* `Scalar` ABI). + /// FIXME: we also currently often use this for ZST. + /// `ScalarMaybeUninit` should reject ZST, and we should use `Uninit` for them instead. Scalar(ScalarMaybeUninit), + /// A pair of two scalar value (must have `ScalarPair` ABI where both fields are + /// `Scalar::Initialized`). ScalarPair(ScalarMaybeUninit, ScalarMaybeUninit), + /// A value of fully uninitialized memory. Can have and size and layout. + Uninit, } #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] @@ -75,6 +82,7 @@ impl<'tcx, Tag: Provenance> Immediate { match self { Immediate::Scalar(val) => val, Immediate::ScalarPair(..) => bug!("Got a scalar pair where a scalar was expected"), + Immediate::Uninit => ScalarMaybeUninit::Uninit, } } @@ -88,6 +96,7 @@ impl<'tcx, Tag: Provenance> Immediate { match self { Immediate::ScalarPair(val1, val2) => (val1, val2), Immediate::Scalar(..) => bug!("Got a scalar where a scalar pair was expected"), + Immediate::Uninit => (ScalarMaybeUninit::Uninit, ScalarMaybeUninit::Uninit), } } @@ -149,7 +158,10 @@ impl std::fmt::Display for ImmTy<'_, Tag> { } Immediate::ScalarPair(a, b) => { // FIXME(oli-obk): at least print tuples and slices nicely - write!(f, "({:x}, {:x}): {}", a, b, self.layout.ty,) + write!(f, "({:x}, {:x}): {}", a, b, self.layout.ty) + } + Immediate::Uninit => { + write!(f, "uninit: {}", self.layout.ty) } } }) @@ -397,7 +409,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { self.scalar_to_ptr(self.read_scalar(op)?.check_init()?) } - // Turn the wide MPlace into a string (must already be dereferenced!) + /// Turn the wide MPlace into a string (must already be dereferenced!) pub fn read_str(&self, mplace: &MPlaceTy<'tcx, M::PointerTag>) -> InterpResult<'tcx, &str> { let len = mplace.len(self)?; let bytes = self.read_bytes_ptr(mplace.ptr, Size::from_bytes(len))?; @@ -528,10 +540,10 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { /// Will not access memory, instead an indirect `Operand` is returned. /// /// This is public because it is used by [priroda](https://github.com/oli-obk/priroda) to get an - /// OpTy from a local + /// OpTy from a local. pub fn local_to_op( &self, - frame: &super::Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>, + frame: &Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>, local: mir::Local, layout: Option>, ) -> InterpResult<'tcx, OpTy<'tcx, M::PointerTag>> { @@ -540,7 +552,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { // Do not read from ZST, they might not be initialized Operand::Immediate(Scalar::ZST.into()) } else { - M::access_local(&self, frame, local)? + *M::access_local(frame, local)? }; Ok(OpTy { op, layout, align: Some(layout.align.abi) }) } diff --git a/compiler/rustc_const_eval/src/interpret/place.rs b/compiler/rustc_const_eval/src/interpret/place.rs index f4dc18af23c..5ec6c79a86f 100644 --- a/compiler/rustc_const_eval/src/interpret/place.rs +++ b/compiler/rustc_const_eval/src/interpret/place.rs @@ -15,8 +15,8 @@ use rustc_target::abi::{HasDataLayout, Size, VariantIdx, Variants}; use super::{ alloc_range, mir_assign_valid_types, AllocId, AllocRef, AllocRefMut, CheckInAllocMsg, - ConstAlloc, ImmTy, Immediate, InterpCx, InterpResult, LocalValue, Machine, MemoryKind, OpTy, - Operand, Pointer, Provenance, Scalar, ScalarMaybeUninit, + ConstAlloc, ImmTy, Immediate, InterpCx, InterpResult, Machine, MemoryKind, OpTy, Operand, + Pointer, Provenance, Scalar, ScalarMaybeUninit, }; #[derive(Copy, Clone, Hash, PartialEq, Eq, HashStable, Debug)] @@ -314,6 +314,7 @@ where let (ptr, meta) = match **val { Immediate::Scalar(ptr) => (ptr, MemPlaceMeta::None), Immediate::ScalarPair(ptr, meta) => (ptr, MemPlaceMeta::Meta(meta.check_init()?)), + Immediate::Uninit => throw_ub!(InvalidUninitBytes(None)), }; let mplace = MemPlace { ptr: self.scalar_to_ptr(ptr.check_init()?)?, meta }; @@ -746,14 +747,14 @@ where let mplace = match dest.place { Place::Local { frame, local } => { match M::access_local_mut(self, frame, local)? { - Ok(local) => { + Operand::Immediate(local) => { // Local can be updated in-place. - *local = LocalValue::Live(Operand::Immediate(src)); + *local = src; return Ok(()); } - Err(mplace) => { + Operand::Indirect(mplace) => { // The local is in memory, go on below. - mplace + *mplace } } } @@ -817,6 +818,7 @@ where alloc.write_scalar(alloc_range(Size::ZERO, a_size), a_val)?; alloc.write_scalar(alloc_range(b_offset, b_size), b_val) } + Immediate::Uninit => alloc.write_uninit(), } } @@ -825,25 +827,13 @@ where Ok(mplace) => mplace, Err((frame, local)) => { match M::access_local_mut(self, frame, local)? { - Ok(local) => match dest.layout.abi { - Abi::Scalar(_) => { - *local = LocalValue::Live(Operand::Immediate(Immediate::Scalar( - ScalarMaybeUninit::Uninit, - ))); - return Ok(()); - } - Abi::ScalarPair(..) => { - *local = LocalValue::Live(Operand::Immediate(Immediate::ScalarPair( - ScalarMaybeUninit::Uninit, - ScalarMaybeUninit::Uninit, - ))); - return Ok(()); - } - _ => self.force_allocation(dest)?, - }, - Err(mplace) => { + Operand::Immediate(local) => { + *local = Immediate::Uninit; + return Ok(()); + } + Operand::Indirect(mplace) => { // The local is in memory, go on below. - MPlaceTy { mplace, layout: dest.layout, align: dest.align } + MPlaceTy { mplace: *mplace, layout: dest.layout, align: dest.align } } } } @@ -908,19 +898,18 @@ where // Slow path, this does not fit into an immediate. Just memcpy. trace!("copy_op: {:?} <- {:?}: {}", *dest, src, dest.layout.ty); - // This interprets `src.meta` with the `dest` local's layout, if an unsized local - // is being initialized! - let (dest, size) = self.force_allocation_maybe_sized(dest, src.meta)?; - let size = size.unwrap_or_else(|| { - assert!( - !dest.layout.is_unsized(), - "Cannot copy into already initialized unsized place" - ); - dest.layout.size - }); - assert_eq!(src.meta, dest.meta, "Can only copy between equally-sized instances"); + let dest = self.force_allocation(dest)?; + assert!(!(src.layout.is_unsized() || dest.layout.is_unsized()), "cannot copy unsized data"); + assert_eq!(src.layout.size, dest.layout.size, "Cannot copy differently-sized data"); - self.mem_copy(src.ptr, src.align, dest.ptr, dest.align, size, /*nonoverlapping*/ false) + self.mem_copy( + src.ptr, + src.align, + dest.ptr, + dest.align, + dest.layout.size, + /*nonoverlapping*/ false, + ) } /// Copies the data from an operand to a place. The layouts may disagree, but they must @@ -931,9 +920,16 @@ where dest: &PlaceTy<'tcx, M::PointerTag>, ) -> InterpResult<'tcx> { if mir_assign_valid_types(*self.tcx, self.param_env, src.layout, dest.layout) { - // Fast path: Just use normal `copy_op` + // Fast path: Just use normal `copy_op`. This is faster because it tries + // `read_immediate_raw` first before doing `force_allocation`. return self.copy_op(src, dest); } + // Unsized copies rely on interpreting `src.meta` with `dest.layout`, we want + // to avoid that here. + assert!( + !src.layout.is_unsized() && !dest.layout.is_unsized(), + "Cannot transmute unsized data" + ); // We still require the sizes to match. if src.layout.size != dest.layout.size { span_bug!( @@ -943,12 +939,6 @@ where dest ); } - // Unsized copies rely on interpreting `src.meta` with `dest.layout`, we want - // to avoid that here. - assert!( - !src.layout.is_unsized() && !dest.layout.is_unsized(), - "Cannot transmute unsized data" - ); // The hard case is `ScalarPair`. `src` is already read from memory in this case, // using `src.layout` to figure out which bytes to use for the 1st and 2nd field. @@ -976,20 +966,15 @@ where /// If the place currently refers to a local that doesn't yet have a matching allocation, /// create such an allocation. /// This is essentially `force_to_memplace`. - /// - /// This supports unsized types and returns the computed size to avoid some - /// redundant computation when copying; use `force_allocation` for a simpler, sized-only - /// version. #[instrument(skip(self), level = "debug")] - pub fn force_allocation_maybe_sized( + pub fn force_allocation( &mut self, place: &PlaceTy<'tcx, M::PointerTag>, - meta: MemPlaceMeta, - ) -> InterpResult<'tcx, (MPlaceTy<'tcx, M::PointerTag>, Option)> { - let (mplace, size) = match place.place { + ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> { + let mplace = match place.place { Place::Local { frame, local } => { match M::access_local_mut(self, frame, local)? { - Ok(&mut local_val) => { + &mut Operand::Immediate(local_val) => { // We need to make an allocation. // We need the layout of the local. We can NOT use the layout we got, @@ -997,44 +982,29 @@ where // that has different alignment than the outer field. let local_layout = self.layout_of_local(&self.stack()[frame], local, None)?; - // We also need to support unsized types, and hence cannot use `allocate`. - let (size, align) = self - .size_and_align_of(&meta, &local_layout)? - .expect("Cannot allocate for non-dyn-sized type"); - let ptr = self.allocate_ptr(size, align, MemoryKind::Stack)?; - let mplace = MemPlace { ptr: ptr.into(), meta }; - if let LocalValue::Live(Operand::Immediate(value)) = local_val { - // Preserve old value. + if local_layout.is_unsized() { + throw_unsup_format!("unsized locals are not supported"); + } + let mplace = self.allocate(local_layout, MemoryKind::Stack)?; + if !matches!(local_val, Immediate::Uninit) { + // Preserve old value. (As an optimization, we can skip this if it was uninit.) // We don't have to validate as we can assume the local // was already valid for its type. - let mplace = MPlaceTy { - mplace, - layout: local_layout, - align: local_layout.align.abi, - }; - self.write_immediate_to_mplace_no_validate(value, &mplace)?; + self.write_immediate_to_mplace_no_validate(local_val, &mplace)?; } // Now we can call `access_mut` again, asserting it goes well, // and actually overwrite things. - *M::access_local_mut(self, frame, local).unwrap().unwrap() = - LocalValue::Live(Operand::Indirect(mplace)); - (mplace, Some(size)) + *M::access_local_mut(self, frame, local).unwrap() = + Operand::Indirect(*mplace); + *mplace } - Err(mplace) => (mplace, None), // this already was an indirect local + &mut Operand::Indirect(mplace) => mplace, // this already was an indirect local } } - Place::Ptr(mplace) => (mplace, None), + Place::Ptr(mplace) => mplace, }; // Return with the original layout, so that the caller can go on - Ok((MPlaceTy { mplace, layout: place.layout, align: place.align }, size)) - } - - #[inline(always)] - pub fn force_allocation( - &mut self, - place: &PlaceTy<'tcx, M::PointerTag>, - ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> { - Ok(self.force_allocation_maybe_sized(place, MemPlaceMeta::None)?.0) + Ok(MPlaceTy { mplace, layout: place.layout, align: place.align }) } pub fn allocate( @@ -1042,6 +1012,7 @@ where layout: TyAndLayout<'tcx>, kind: MemoryKind, ) -> InterpResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> { + assert!(!layout.is_unsized()); let ptr = self.allocate_ptr(layout.size, layout.align.abi, kind)?; Ok(MPlaceTy::from_aligned_ptr(ptr.into(), layout)) } diff --git a/compiler/rustc_mir_transform/src/const_prop.rs b/compiler/rustc_mir_transform/src/const_prop.rs index 070e563f396..71e45a1383b 100644 --- a/compiler/rustc_mir_transform/src/const_prop.rs +++ b/compiler/rustc_mir_transform/src/const_prop.rs @@ -29,9 +29,8 @@ use rustc_trait_selection::traits; use crate::MirPass; use rustc_const_eval::interpret::{ self, compile_time_machine, AllocId, ConstAllocation, ConstValue, CtfeValidationMode, Frame, - ImmTy, Immediate, InterpCx, InterpResult, LocalState, LocalValue, MemPlace, MemoryKind, OpTy, - Operand as InterpOperand, PlaceTy, Pointer, Scalar, ScalarMaybeUninit, StackPopCleanup, - StackPopUnwind, + ImmTy, Immediate, InterpCx, InterpResult, LocalState, LocalValue, MemoryKind, OpTy, PlaceTy, + Pointer, Scalar, ScalarMaybeUninit, StackPopCleanup, StackPopUnwind, }; /// The maximum number of bytes that we'll allocate space for a local or the return value. @@ -237,15 +236,19 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx> throw_machine_stop_str!("pointer arithmetic or comparisons aren't supported in ConstProp") } - fn access_local( - _ecx: &InterpCx<'mir, 'tcx, Self>, - frame: &Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>, + fn access_local<'a>( + frame: &'a Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>, local: Local, - ) -> InterpResult<'tcx, InterpOperand> { + ) -> InterpResult<'tcx, &'a interpret::Operand> { let l = &frame.locals[local]; - if l.value == LocalValue::Unallocated { - throw_machine_stop_str!("tried to access an unallocated local") + if matches!( + l.value, + LocalValue::Live(interpret::Operand::Immediate(interpret::Immediate::Uninit)) + ) { + // For us "uninit" means "we don't know its value, might be initiailized or not". + // So stop here. + throw_machine_stop_str!("tried to access alocal with unknown value ") } l.access() @@ -255,8 +258,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx> ecx: &'a mut InterpCx<'mir, 'tcx, Self>, frame: usize, local: Local, - ) -> InterpResult<'tcx, Result<&'a mut LocalValue, MemPlace>> - { + ) -> InterpResult<'tcx, &'a mut interpret::Operand> { if ecx.machine.can_const_prop[local] == ConstPropMode::NoPropagation { throw_machine_stop_str!("tried to write to a local that is marked as not propagatable") } @@ -436,8 +438,10 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { /// Remove `local` from the pool of `Locals`. Allows writing to them, /// but not reading from them anymore. fn remove_const(ecx: &mut InterpCx<'mir, 'tcx, ConstPropMachine<'mir, 'tcx>>, local: Local) { - ecx.frame_mut().locals[local] = - LocalState { value: LocalValue::Unallocated, layout: Cell::new(None) }; + ecx.frame_mut().locals[local] = LocalState { + value: LocalValue::Live(interpret::Operand::Immediate(interpret::Immediate::Uninit)), + layout: Cell::new(None), + }; } fn use_ecx(&mut self, f: F) -> Option @@ -1042,7 +1046,9 @@ impl<'tcx> MutVisitor<'tcx> for ConstPropagator<'_, 'tcx> { let frame = self.ecx.frame_mut(); frame.locals[local].value = if let StatementKind::StorageLive(_) = statement.kind { - LocalValue::Unallocated + LocalValue::Live(interpret::Operand::Immediate( + interpret::Immediate::Uninit, + )) } else { LocalValue::Dead }; diff --git a/compiler/rustc_mir_transform/src/const_prop_lint.rs b/compiler/rustc_mir_transform/src/const_prop_lint.rs index e3ab42d09ef..54c55c8f8d5 100644 --- a/compiler/rustc_mir_transform/src/const_prop_lint.rs +++ b/compiler/rustc_mir_transform/src/const_prop_lint.rs @@ -31,8 +31,8 @@ use crate::MirLint; use rustc_const_eval::const_eval::ConstEvalErr; use rustc_const_eval::interpret::{ self, compile_time_machine, AllocId, ConstAllocation, Frame, ImmTy, InterpCx, InterpResult, - LocalState, LocalValue, MemPlace, MemoryKind, OpTy, Operand as InterpOperand, PlaceTy, Pointer, - Scalar, ScalarMaybeUninit, StackPopCleanup, StackPopUnwind, + LocalState, LocalValue, MemoryKind, OpTy, PlaceTy, Pointer, Scalar, ScalarMaybeUninit, + StackPopCleanup, StackPopUnwind, }; /// The maximum number of bytes that we'll allocate space for a local or the return value. @@ -229,15 +229,19 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx> throw_machine_stop_str!("pointer arithmetic or comparisons aren't supported in ConstProp") } - fn access_local( - _ecx: &InterpCx<'mir, 'tcx, Self>, - frame: &Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>, + fn access_local<'a>( + frame: &'a Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>, local: Local, - ) -> InterpResult<'tcx, InterpOperand> { + ) -> InterpResult<'tcx, &'a interpret::Operand> { let l = &frame.locals[local]; - if l.value == LocalValue::Unallocated { - throw_machine_stop_str!("tried to access an uninitialized local") + if matches!( + l.value, + LocalValue::Live(interpret::Operand::Immediate(interpret::Immediate::Uninit)) + ) { + // For us "uninit" means "we don't know its value, might be initiailized or not". + // So stop here. + throw_machine_stop_str!("tried to access a local with unknown value") } l.access() @@ -247,8 +251,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine<'mir, 'tcx> ecx: &'a mut InterpCx<'mir, 'tcx, Self>, frame: usize, local: Local, - ) -> InterpResult<'tcx, Result<&'a mut LocalValue, MemPlace>> - { + ) -> InterpResult<'tcx, &'a mut interpret::Operand> { if ecx.machine.can_const_prop[local] == ConstPropMode::NoPropagation { throw_machine_stop_str!("tried to write to a local that is marked as not propagatable") } @@ -430,8 +433,10 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> { /// Remove `local` from the pool of `Locals`. Allows writing to them, /// but not reading from them anymore. fn remove_const(ecx: &mut InterpCx<'mir, 'tcx, ConstPropMachine<'mir, 'tcx>>, local: Local) { - ecx.frame_mut().locals[local] = - LocalState { value: LocalValue::Unallocated, layout: Cell::new(None) }; + ecx.frame_mut().locals[local] = LocalState { + value: LocalValue::Live(interpret::Operand::Immediate(interpret::Immediate::Uninit)), + layout: Cell::new(None), + }; } fn lint_root(&self, source_info: SourceInfo) -> Option { @@ -915,7 +920,9 @@ impl<'tcx> Visitor<'tcx> for ConstPropagator<'_, 'tcx> { let frame = self.ecx.frame_mut(); frame.locals[local].value = if let StatementKind::StorageLive(_) = statement.kind { - LocalValue::Unallocated + LocalValue::Live(interpret::Operand::Immediate( + interpret::Immediate::Uninit, + )) } else { LocalValue::Dead };