From 359609882369fa3027f1a4706b821fef695fa8a2 Mon Sep 17 00:00:00 2001 From: Scott McMurray Date: Wed, 10 Apr 2024 23:08:34 -0700 Subject: [PATCH] Put `PlaceValue` into `OperandValue::Ref`, rather than 3 tuple fields --- compiler/rustc_codegen_gcc/src/builder.rs | 7 +-- .../rustc_codegen_gcc/src/intrinsic/mod.rs | 11 ++-- compiler/rustc_codegen_llvm/src/abi.rs | 11 ++-- compiler/rustc_codegen_llvm/src/builder.rs | 7 +-- compiler/rustc_codegen_ssa/src/mir/block.rs | 52 +++++++++---------- .../rustc_codegen_ssa/src/mir/debuginfo.rs | 4 +- compiler/rustc_codegen_ssa/src/mir/operand.rs | 29 ++++++----- compiler/rustc_codegen_ssa/src/mir/rvalue.rs | 18 +++---- 8 files changed, 76 insertions(+), 63 deletions(-) diff --git a/compiler/rustc_codegen_gcc/src/builder.rs b/compiler/rustc_codegen_gcc/src/builder.rs index ce2c18c68a8..6253816d37d 100644 --- a/compiler/rustc_codegen_gcc/src/builder.rs +++ b/compiler/rustc_codegen_gcc/src/builder.rs @@ -999,8 +999,9 @@ fn scalar_load_metadata<'a, 'gcc, 'tcx>( } } - let val = if let Some(llextra) = place.val.llextra { - OperandValue::Ref(place.val.llval, Some(llextra), place.val.align) + let val = if let Some(_) = place.val.llextra { + // FIXME: Merge with the `else` below? + OperandValue::Ref(place.val) } else if place.layout.is_gcc_immediate() { let load = self.load(place.layout.gcc_type(self), place.val.llval, place.val.align); if let abi::Abi::Scalar(ref scalar) = place.layout.abi { @@ -1031,7 +1032,7 @@ fn scalar_load_metadata<'a, 'gcc, 'tcx>( load(1, b, place.val.align.restrict_for_offset(b_offset)), ) } else { - OperandValue::Ref(place.val.llval, None, place.val.align) + OperandValue::Ref(place.val) }; OperandRef { val, layout: place.layout } diff --git a/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs b/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs index 57cb81a8ece..bee6bda007c 100644 --- a/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs +++ b/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs @@ -11,7 +11,7 @@ use rustc_codegen_ssa::common::IntPredicate; use rustc_codegen_ssa::errors::InvalidMonomorphization; use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue}; -use rustc_codegen_ssa::mir::place::PlaceRef; +use rustc_codegen_ssa::mir::place::{PlaceRef, PlaceValue}; use rustc_codegen_ssa::traits::{ ArgAbiMethods, BuilderMethods, ConstMethods, IntrinsicCallMethods, }; @@ -502,7 +502,7 @@ fn store( return; } if self.is_sized_indirect() { - OperandValue::Ref(val, None, self.layout.align.abi).store(bx, dst) + OperandValue::Ref(PlaceValue::new_sized(val, self.layout.align.abi)).store(bx, dst) } else if self.is_unsized_indirect() { bug!("unsized `ArgAbi` must be handled through `store_fn_arg`"); } else if let PassMode::Cast { ref cast, .. } = self.mode { @@ -571,7 +571,12 @@ fn store_fn_arg<'a>( OperandValue::Pair(next(), next()).store(bx, dst); } PassMode::Indirect { meta_attrs: Some(_), .. } => { - OperandValue::Ref(next(), Some(next()), self.layout.align.abi).store(bx, dst); + let place_val = PlaceValue { + llval: next(), + llextra: Some(next()), + align: self.layout.align.abi, + }; + OperandValue::Ref(place_val).store(bx, dst); } PassMode::Direct(_) | PassMode::Indirect { meta_attrs: None, .. } diff --git a/compiler/rustc_codegen_llvm/src/abi.rs b/compiler/rustc_codegen_llvm/src/abi.rs index cd9e5515958..c0b43b77897 100644 --- a/compiler/rustc_codegen_llvm/src/abi.rs +++ b/compiler/rustc_codegen_llvm/src/abi.rs @@ -7,7 +7,7 @@ use crate::value::Value; use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue}; -use rustc_codegen_ssa::mir::place::PlaceRef; +use rustc_codegen_ssa::mir::place::{PlaceRef, PlaceValue}; use rustc_codegen_ssa::traits::*; use rustc_codegen_ssa::MemFlags; use rustc_middle::bug; @@ -207,7 +207,7 @@ fn store( // Sized indirect arguments PassMode::Indirect { attrs, meta_attrs: None, on_stack: _ } => { let align = attrs.pointee_align.unwrap_or(self.layout.align.abi); - OperandValue::Ref(val, None, align).store(bx, dst); + OperandValue::Ref(PlaceValue::new_sized(val, align)).store(bx, dst); } // Unsized indirect qrguments PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => { @@ -265,7 +265,12 @@ fn store_fn_arg( OperandValue::Pair(next(), next()).store(bx, dst); } PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => { - OperandValue::Ref(next(), Some(next()), self.layout.align.abi).store(bx, dst); + let place_val = PlaceValue { + llval: next(), + llextra: Some(next()), + align: self.layout.align.abi, + }; + OperandValue::Ref(place_val).store(bx, dst); } PassMode::Direct(_) | PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: _ } diff --git a/compiler/rustc_codegen_llvm/src/builder.rs b/compiler/rustc_codegen_llvm/src/builder.rs index 6819c22698c..06d9be1869c 100644 --- a/compiler/rustc_codegen_llvm/src/builder.rs +++ b/compiler/rustc_codegen_llvm/src/builder.rs @@ -579,8 +579,9 @@ fn scalar_load_metadata<'a, 'll, 'tcx>( } } - let val = if let Some(llextra) = place.val.llextra { - OperandValue::Ref(place.val.llval, Some(llextra), place.val.align) + let val = if let Some(_) = place.val.llextra { + // FIXME: Merge with the `else` below? + OperandValue::Ref(place.val) } else if place.layout.is_llvm_immediate() { let mut const_llval = None; let llty = place.layout.llvm_type(self); @@ -623,7 +624,7 @@ fn scalar_load_metadata<'a, 'll, 'tcx>( load(1, b, place.layout, place.val.align.restrict_for_offset(b_offset), b_offset), ) } else { - OperandValue::Ref(place.val.llval, None, place.val.align) + OperandValue::Ref(place.val) }; OperandRef { val, layout: place.layout } diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs index fbcdea47ebf..8c3c02db315 100644 --- a/compiler/rustc_codegen_ssa/src/mir/block.rs +++ b/compiler/rustc_codegen_ssa/src/mir/block.rs @@ -455,8 +455,8 @@ fn codegen_return_terminator(&mut self, bx: &mut Bx) { PassMode::Direct(_) | PassMode::Pair(..) => { let op = self.codegen_consume(bx, mir::Place::return_place().as_ref()); - if let Ref(llval, _, align) = op.val { - bx.load(bx.backend_type(op.layout), llval, align) + if let Ref(place_val) = op.val { + bx.load(bx.backend_type(op.layout), place_val.llval, place_val.align) } else { op.immediate_or_packed_pair(bx) } @@ -466,10 +466,9 @@ fn codegen_return_terminator(&mut self, bx: &mut Bx) { let op = match self.locals[mir::RETURN_PLACE] { LocalRef::Operand(op) => op, LocalRef::PendingOperand => bug!("use of return before def"), - LocalRef::Place(cg_place) => OperandRef { - val: Ref(cg_place.val.llval, None, cg_place.val.align), - layout: cg_place.layout, - }, + LocalRef::Place(cg_place) => { + OperandRef { val: Ref(cg_place.val), layout: cg_place.layout } + } LocalRef::UnsizedPlace(_) => bug!("return type must be sized"), }; let llslot = match op.val { @@ -478,9 +477,12 @@ fn codegen_return_terminator(&mut self, bx: &mut Bx) { op.val.store(bx, scratch); scratch.val.llval } - Ref(llval, _, align) => { - assert_eq!(align, op.layout.align.abi, "return place is unaligned!"); - llval + Ref(place_val) => { + assert_eq!( + place_val.align, op.layout.align.abi, + "return place is unaligned!" + ); + place_val.llval } ZeroSized => bug!("ZST return value shouldn't be in PassMode::Cast"), }; @@ -1032,7 +1034,7 @@ fn codegen_call_terminator( llargs.push(data_ptr); continue 'make_args; } - Ref(data_ptr, Some(meta), _) => { + Ref(PlaceValue { llval: data_ptr, llextra: Some(meta), .. }) => { // by-value dynamic dispatch llfn = Some(meth::VirtualIndex::from_index(idx).get_fn( bx, @@ -1079,12 +1081,12 @@ fn codegen_call_terminator( // The callee needs to own the argument memory if we pass it // by-ref, so make a local copy of non-immediate constants. match (&arg.node, op.val) { - (&mir::Operand::Copy(_), Ref(_, None, _)) - | (&mir::Operand::Constant(_), Ref(_, None, _)) => { + (&mir::Operand::Copy(_), Ref(PlaceValue { llextra: None, .. })) + | (&mir::Operand::Constant(_), Ref(PlaceValue { llextra: None, .. })) => { let tmp = PlaceRef::alloca(bx, op.layout); bx.lifetime_start(tmp.val.llval, tmp.layout.size); op.val.store(bx, tmp); - op.val = Ref(tmp.val.llval, None, tmp.val.align); + op.val = Ref(tmp.val); copied_constant_arguments.push(tmp); } _ => {} @@ -1428,7 +1430,7 @@ fn codegen_argument( _ => bug!("codegen_argument: {:?} invalid for pair argument", op), }, PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => match op.val { - Ref(a, Some(b), _) => { + Ref(PlaceValue { llval: a, llextra: Some(b), .. }) => { llargs.push(a); llargs.push(b); return; @@ -1459,28 +1461,25 @@ fn codegen_argument( } _ => (op.immediate_or_packed_pair(bx), arg.layout.align.abi, false), }, - Ref(llval, llextra, align) => match arg.mode { + Ref(op_place_val) => match arg.mode { PassMode::Indirect { attrs, .. } => { let required_align = match attrs.pointee_align { Some(pointee_align) => cmp::max(pointee_align, arg.layout.align.abi), None => arg.layout.align.abi, }; - if align < required_align { + if op_place_val.align < required_align { // For `foo(packed.large_field)`, and types with <4 byte alignment on x86, // alignment requirements may be higher than the type's alignment, so copy // to a higher-aligned alloca. let scratch = PlaceRef::alloca_aligned(bx, arg.layout, required_align); - let op_place = PlaceRef { - val: PlaceValue { llval, llextra, align }, - layout: op.layout, - }; + let op_place = PlaceRef { val: op_place_val, layout: op.layout }; bx.typed_place_copy(scratch, op_place); (scratch.val.llval, scratch.val.align, true) } else { - (llval, align, true) + (op_place_val.llval, op_place_val.align, true) } } - _ => (llval, align, true), + _ => (op_place_val.llval, op_place_val.align, true), }, ZeroSized => match arg.mode { PassMode::Indirect { on_stack, .. } => { @@ -1560,15 +1559,16 @@ fn codegen_arguments_untupled( let tuple = self.codegen_operand(bx, operand); // Handle both by-ref and immediate tuples. - if let Ref(llval, None, align) = tuple.val { - let tuple_ptr = PlaceRef::new_sized_aligned(llval, tuple.layout, align); + if let Ref(place_val) = tuple.val { + if place_val.llextra.is_some() { + bug!("closure arguments must be sized"); + } + let tuple_ptr = PlaceRef { val: place_val, layout: tuple.layout }; for i in 0..tuple.layout.fields.count() { let field_ptr = tuple_ptr.project_field(bx, i); let field = bx.load_operand(field_ptr); self.codegen_argument(bx, field, llargs, &args[i]); } - } else if let Ref(_, Some(_), _) = tuple.val { - bug!("closure arguments must be sized") } else { // If the tuple is immediate, the elements are as well. for i in 0..tuple.layout.fields.count() { diff --git a/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs b/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs index 3f3c738984e..5b213f7d358 100644 --- a/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs +++ b/compiler/rustc_codegen_ssa/src/mir/debuginfo.rs @@ -14,7 +14,7 @@ use rustc_target::abi::{Abi, FieldIdx, FieldsShape, Size, VariantIdx}; use super::operand::{OperandRef, OperandValue}; -use super::place::PlaceRef; +use super::place::{PlaceRef, PlaceValue}; use super::{FunctionCx, LocalRef}; use std::ops::Range; @@ -334,7 +334,7 @@ pub fn debug_introduce_local(&self, bx: &mut Bx, local: mir::Local) { bx.set_var_name(place.val.llval, name); } LocalRef::Operand(operand) => match operand.val { - OperandValue::Ref(x, ..) | OperandValue::Immediate(x) => { + OperandValue::Ref(PlaceValue { llval: x, .. }) | OperandValue::Immediate(x) => { bx.set_var_name(x, name); } OperandValue::Pair(a, b) => { diff --git a/compiler/rustc_codegen_ssa/src/mir/operand.rs b/compiler/rustc_codegen_ssa/src/mir/operand.rs index d91869d622e..9cf64e2d676 100644 --- a/compiler/rustc_codegen_ssa/src/mir/operand.rs +++ b/compiler/rustc_codegen_ssa/src/mir/operand.rs @@ -23,11 +23,14 @@ pub enum OperandValue { /// The second value, if any, is the extra data (vtable or length) /// which indicates that it refers to an unsized rvalue. /// - /// An `OperandValue` has this variant for types which are neither - /// `Immediate` nor `Pair`s. The backend value in this variant must be a - /// pointer to the *non*-immediate backend type. That pointee type is the + /// An `OperandValue` *must* be this variant for any type for which + /// [`LayoutTypeMethods::is_backend_ref`] returns `true`. + /// (That basically amounts to "isn't one of the other variants".) + /// + /// This holds a [`PlaceValue`] (like a [`PlaceRef`] does) with a pointer + /// to the location holding the value. The type behind that pointer is the /// one returned by [`LayoutTypeMethods::backend_type`]. - Ref(V, Option, Align), + Ref(PlaceValue), /// A single LLVM immediate value. /// /// An `OperandValue` *must* be this variant for any type for which @@ -362,7 +365,7 @@ pub fn poison>( OperandValue::Pair(bx.const_poison(ibty0), bx.const_poison(ibty1)) } else { let ptr = bx.cx().type_ptr(); - OperandValue::Ref(bx.const_poison(ptr), None, layout.align.abi) + OperandValue::Ref(PlaceValue::new_sized(bx.const_poison(ptr), layout.align.abi)) } } @@ -410,17 +413,14 @@ pub(crate) fn store_with_flags>( // Avoid generating stores of zero-sized values, because the only way to have a zero-sized // value is through `undef`/`poison`, and the store itself is useless. } - OperandValue::Ref(llval, None, source_align) => { + OperandValue::Ref(val) => { assert!(dest.layout.is_sized(), "cannot directly store unsized values"); - let source_place = PlaceRef { - val: PlaceValue::new_sized(llval, source_align), - layout: dest.layout, - }; + if val.llextra.is_some() { + bug!("cannot directly store unsized values"); + } + let source_place = PlaceRef { val, layout: dest.layout }; bx.typed_place_copy_with_flags(dest, source_place, flags); } - OperandValue::Ref(_, Some(_), _) => { - bug!("cannot directly store unsized values"); - } OperandValue::Immediate(s) => { let val = bx.from_immediate(s); bx.store_with_flags(val, dest.val.llval, dest.val.align, flags); @@ -457,7 +457,8 @@ pub fn store_unsized>( .unwrap_or_else(|| bug!("indirect_dest has non-pointer type: {:?}", indirect_dest)) .ty; - let OperandValue::Ref(llptr, Some(llextra), _) = self else { + let OperandValue::Ref(PlaceValue { llval: llptr, llextra: Some(llextra), .. }) = self + else { bug!("store_unsized called with a sized value (or with an extern type)") }; diff --git a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs index 6f1a76e7834..6725a6d9e38 100644 --- a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs +++ b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs @@ -68,13 +68,13 @@ pub fn codegen_rvalue( base::coerce_unsized_into(bx, scratch, dest); scratch.storage_dead(bx); } - OperandValue::Ref(llref, None, align) => { - let source = PlaceRef::new_sized_aligned(llref, operand.layout, align); + OperandValue::Ref(val) => { + if val.llextra.is_some() { + bug!("unsized coercion on an unsized rvalue"); + } + let source = PlaceRef { val, layout: operand.layout }; base::coerce_unsized_into(bx, source, dest); } - OperandValue::Ref(_, Some(_), _) => { - bug!("unsized coercion on an unsized rvalue"); - } OperandValue::ZeroSized => { bug!("unsized coercion on a ZST rvalue"); } @@ -220,10 +220,10 @@ fn codegen_transmute_operand( let cast_kind = self.value_kind(cast); match operand.val { - OperandValue::Ref(ptr, meta, align) => { - debug_assert_eq!(meta, None); + OperandValue::Ref(source_place_val) => { + debug_assert_eq!(source_place_val.llextra, None); debug_assert!(matches!(operand_kind, OperandValueKind::Ref)); - let fake_place = PlaceRef::new_sized_aligned(ptr, cast, align); + let fake_place = PlaceRef { val: source_place_val, layout: cast }; Some(bx.load_operand(fake_place).val) } OperandValue::ZeroSized => { @@ -490,7 +490,7 @@ pub fn codegen_rvalue_operand( } mir::CastKind::DynStar => { let (lldata, llextra) = match operand.val { - OperandValue::Ref(_, _, _) => todo!(), + OperandValue::Ref(..) => todo!(), OperandValue::Immediate(v) => (v, None), OperandValue::Pair(v, l) => (v, Some(l)), OperandValue::ZeroSized => bug!("ZST -- which is not PointerLike -- in DynStar"),