Put PlaceValue into OperandValue::Ref, rather than 3 tuple fields

This commit is contained in:
Scott McMurray 2024-04-10 23:08:34 -07:00
parent 89502e584b
commit 3596098823
8 changed files with 76 additions and 63 deletions

View File

@ -999,8 +999,9 @@ fn scalar_load_metadata<'a, 'gcc, 'tcx>(
} }
} }
let val = if let Some(llextra) = place.val.llextra { let val = if let Some(_) = place.val.llextra {
OperandValue::Ref(place.val.llval, Some(llextra), place.val.align) // FIXME: Merge with the `else` below?
OperandValue::Ref(place.val)
} else if place.layout.is_gcc_immediate() { } else if place.layout.is_gcc_immediate() {
let load = self.load(place.layout.gcc_type(self), place.val.llval, place.val.align); let load = self.load(place.layout.gcc_type(self), place.val.llval, place.val.align);
if let abi::Abi::Scalar(ref scalar) = place.layout.abi { if let abi::Abi::Scalar(ref scalar) = place.layout.abi {
@ -1031,7 +1032,7 @@ fn scalar_load_metadata<'a, 'gcc, 'tcx>(
load(1, b, place.val.align.restrict_for_offset(b_offset)), load(1, b, place.val.align.restrict_for_offset(b_offset)),
) )
} else { } else {
OperandValue::Ref(place.val.llval, None, place.val.align) OperandValue::Ref(place.val)
}; };
OperandRef { val, layout: place.layout } OperandRef { val, layout: place.layout }

View File

@ -11,7 +11,7 @@
use rustc_codegen_ssa::common::IntPredicate; use rustc_codegen_ssa::common::IntPredicate;
use rustc_codegen_ssa::errors::InvalidMonomorphization; use rustc_codegen_ssa::errors::InvalidMonomorphization;
use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue}; use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
use rustc_codegen_ssa::mir::place::PlaceRef; use rustc_codegen_ssa::mir::place::{PlaceRef, PlaceValue};
use rustc_codegen_ssa::traits::{ use rustc_codegen_ssa::traits::{
ArgAbiMethods, BuilderMethods, ConstMethods, IntrinsicCallMethods, ArgAbiMethods, BuilderMethods, ConstMethods, IntrinsicCallMethods,
}; };
@ -502,7 +502,7 @@ fn store(
return; return;
} }
if self.is_sized_indirect() { if self.is_sized_indirect() {
OperandValue::Ref(val, None, self.layout.align.abi).store(bx, dst) OperandValue::Ref(PlaceValue::new_sized(val, self.layout.align.abi)).store(bx, dst)
} else if self.is_unsized_indirect() { } else if self.is_unsized_indirect() {
bug!("unsized `ArgAbi` must be handled through `store_fn_arg`"); bug!("unsized `ArgAbi` must be handled through `store_fn_arg`");
} else if let PassMode::Cast { ref cast, .. } = self.mode { } else if let PassMode::Cast { ref cast, .. } = self.mode {
@ -571,7 +571,12 @@ fn store_fn_arg<'a>(
OperandValue::Pair(next(), next()).store(bx, dst); OperandValue::Pair(next(), next()).store(bx, dst);
} }
PassMode::Indirect { meta_attrs: Some(_), .. } => { PassMode::Indirect { meta_attrs: Some(_), .. } => {
OperandValue::Ref(next(), Some(next()), self.layout.align.abi).store(bx, dst); let place_val = PlaceValue {
llval: next(),
llextra: Some(next()),
align: self.layout.align.abi,
};
OperandValue::Ref(place_val).store(bx, dst);
} }
PassMode::Direct(_) PassMode::Direct(_)
| PassMode::Indirect { meta_attrs: None, .. } | PassMode::Indirect { meta_attrs: None, .. }

View File

@ -7,7 +7,7 @@
use crate::value::Value; use crate::value::Value;
use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue}; use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
use rustc_codegen_ssa::mir::place::PlaceRef; use rustc_codegen_ssa::mir::place::{PlaceRef, PlaceValue};
use rustc_codegen_ssa::traits::*; use rustc_codegen_ssa::traits::*;
use rustc_codegen_ssa::MemFlags; use rustc_codegen_ssa::MemFlags;
use rustc_middle::bug; use rustc_middle::bug;
@ -207,7 +207,7 @@ fn store(
// Sized indirect arguments // Sized indirect arguments
PassMode::Indirect { attrs, meta_attrs: None, on_stack: _ } => { PassMode::Indirect { attrs, meta_attrs: None, on_stack: _ } => {
let align = attrs.pointee_align.unwrap_or(self.layout.align.abi); let align = attrs.pointee_align.unwrap_or(self.layout.align.abi);
OperandValue::Ref(val, None, align).store(bx, dst); OperandValue::Ref(PlaceValue::new_sized(val, align)).store(bx, dst);
} }
// Unsized indirect qrguments // Unsized indirect qrguments
PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => { PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
@ -265,7 +265,12 @@ fn store_fn_arg(
OperandValue::Pair(next(), next()).store(bx, dst); OperandValue::Pair(next(), next()).store(bx, dst);
} }
PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => { PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => {
OperandValue::Ref(next(), Some(next()), self.layout.align.abi).store(bx, dst); let place_val = PlaceValue {
llval: next(),
llextra: Some(next()),
align: self.layout.align.abi,
};
OperandValue::Ref(place_val).store(bx, dst);
} }
PassMode::Direct(_) PassMode::Direct(_)
| PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: _ } | PassMode::Indirect { attrs: _, meta_attrs: None, on_stack: _ }

View File

@ -579,8 +579,9 @@ fn scalar_load_metadata<'a, 'll, 'tcx>(
} }
} }
let val = if let Some(llextra) = place.val.llextra { let val = if let Some(_) = place.val.llextra {
OperandValue::Ref(place.val.llval, Some(llextra), place.val.align) // FIXME: Merge with the `else` below?
OperandValue::Ref(place.val)
} else if place.layout.is_llvm_immediate() { } else if place.layout.is_llvm_immediate() {
let mut const_llval = None; let mut const_llval = None;
let llty = place.layout.llvm_type(self); let llty = place.layout.llvm_type(self);
@ -623,7 +624,7 @@ fn scalar_load_metadata<'a, 'll, 'tcx>(
load(1, b, place.layout, place.val.align.restrict_for_offset(b_offset), b_offset), load(1, b, place.layout, place.val.align.restrict_for_offset(b_offset), b_offset),
) )
} else { } else {
OperandValue::Ref(place.val.llval, None, place.val.align) OperandValue::Ref(place.val)
}; };
OperandRef { val, layout: place.layout } OperandRef { val, layout: place.layout }

View File

@ -455,8 +455,8 @@ fn codegen_return_terminator(&mut self, bx: &mut Bx) {
PassMode::Direct(_) | PassMode::Pair(..) => { PassMode::Direct(_) | PassMode::Pair(..) => {
let op = self.codegen_consume(bx, mir::Place::return_place().as_ref()); let op = self.codegen_consume(bx, mir::Place::return_place().as_ref());
if let Ref(llval, _, align) = op.val { if let Ref(place_val) = op.val {
bx.load(bx.backend_type(op.layout), llval, align) bx.load(bx.backend_type(op.layout), place_val.llval, place_val.align)
} else { } else {
op.immediate_or_packed_pair(bx) op.immediate_or_packed_pair(bx)
} }
@ -466,10 +466,9 @@ fn codegen_return_terminator(&mut self, bx: &mut Bx) {
let op = match self.locals[mir::RETURN_PLACE] { let op = match self.locals[mir::RETURN_PLACE] {
LocalRef::Operand(op) => op, LocalRef::Operand(op) => op,
LocalRef::PendingOperand => bug!("use of return before def"), LocalRef::PendingOperand => bug!("use of return before def"),
LocalRef::Place(cg_place) => OperandRef { LocalRef::Place(cg_place) => {
val: Ref(cg_place.val.llval, None, cg_place.val.align), OperandRef { val: Ref(cg_place.val), layout: cg_place.layout }
layout: cg_place.layout, }
},
LocalRef::UnsizedPlace(_) => bug!("return type must be sized"), LocalRef::UnsizedPlace(_) => bug!("return type must be sized"),
}; };
let llslot = match op.val { let llslot = match op.val {
@ -478,9 +477,12 @@ fn codegen_return_terminator(&mut self, bx: &mut Bx) {
op.val.store(bx, scratch); op.val.store(bx, scratch);
scratch.val.llval scratch.val.llval
} }
Ref(llval, _, align) => { Ref(place_val) => {
assert_eq!(align, op.layout.align.abi, "return place is unaligned!"); assert_eq!(
llval place_val.align, op.layout.align.abi,
"return place is unaligned!"
);
place_val.llval
} }
ZeroSized => bug!("ZST return value shouldn't be in PassMode::Cast"), ZeroSized => bug!("ZST return value shouldn't be in PassMode::Cast"),
}; };
@ -1032,7 +1034,7 @@ fn codegen_call_terminator(
llargs.push(data_ptr); llargs.push(data_ptr);
continue 'make_args; continue 'make_args;
} }
Ref(data_ptr, Some(meta), _) => { Ref(PlaceValue { llval: data_ptr, llextra: Some(meta), .. }) => {
// by-value dynamic dispatch // by-value dynamic dispatch
llfn = Some(meth::VirtualIndex::from_index(idx).get_fn( llfn = Some(meth::VirtualIndex::from_index(idx).get_fn(
bx, bx,
@ -1079,12 +1081,12 @@ fn codegen_call_terminator(
// The callee needs to own the argument memory if we pass it // The callee needs to own the argument memory if we pass it
// by-ref, so make a local copy of non-immediate constants. // by-ref, so make a local copy of non-immediate constants.
match (&arg.node, op.val) { match (&arg.node, op.val) {
(&mir::Operand::Copy(_), Ref(_, None, _)) (&mir::Operand::Copy(_), Ref(PlaceValue { llextra: None, .. }))
| (&mir::Operand::Constant(_), Ref(_, None, _)) => { | (&mir::Operand::Constant(_), Ref(PlaceValue { llextra: None, .. })) => {
let tmp = PlaceRef::alloca(bx, op.layout); let tmp = PlaceRef::alloca(bx, op.layout);
bx.lifetime_start(tmp.val.llval, tmp.layout.size); bx.lifetime_start(tmp.val.llval, tmp.layout.size);
op.val.store(bx, tmp); op.val.store(bx, tmp);
op.val = Ref(tmp.val.llval, None, tmp.val.align); op.val = Ref(tmp.val);
copied_constant_arguments.push(tmp); copied_constant_arguments.push(tmp);
} }
_ => {} _ => {}
@ -1428,7 +1430,7 @@ fn codegen_argument(
_ => bug!("codegen_argument: {:?} invalid for pair argument", op), _ => bug!("codegen_argument: {:?} invalid for pair argument", op),
}, },
PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => match op.val { PassMode::Indirect { attrs: _, meta_attrs: Some(_), on_stack: _ } => match op.val {
Ref(a, Some(b), _) => { Ref(PlaceValue { llval: a, llextra: Some(b), .. }) => {
llargs.push(a); llargs.push(a);
llargs.push(b); llargs.push(b);
return; return;
@ -1459,28 +1461,25 @@ fn codegen_argument(
} }
_ => (op.immediate_or_packed_pair(bx), arg.layout.align.abi, false), _ => (op.immediate_or_packed_pair(bx), arg.layout.align.abi, false),
}, },
Ref(llval, llextra, align) => match arg.mode { Ref(op_place_val) => match arg.mode {
PassMode::Indirect { attrs, .. } => { PassMode::Indirect { attrs, .. } => {
let required_align = match attrs.pointee_align { let required_align = match attrs.pointee_align {
Some(pointee_align) => cmp::max(pointee_align, arg.layout.align.abi), Some(pointee_align) => cmp::max(pointee_align, arg.layout.align.abi),
None => arg.layout.align.abi, None => arg.layout.align.abi,
}; };
if align < required_align { if op_place_val.align < required_align {
// For `foo(packed.large_field)`, and types with <4 byte alignment on x86, // For `foo(packed.large_field)`, and types with <4 byte alignment on x86,
// alignment requirements may be higher than the type's alignment, so copy // alignment requirements may be higher than the type's alignment, so copy
// to a higher-aligned alloca. // to a higher-aligned alloca.
let scratch = PlaceRef::alloca_aligned(bx, arg.layout, required_align); let scratch = PlaceRef::alloca_aligned(bx, arg.layout, required_align);
let op_place = PlaceRef { let op_place = PlaceRef { val: op_place_val, layout: op.layout };
val: PlaceValue { llval, llextra, align },
layout: op.layout,
};
bx.typed_place_copy(scratch, op_place); bx.typed_place_copy(scratch, op_place);
(scratch.val.llval, scratch.val.align, true) (scratch.val.llval, scratch.val.align, true)
} else { } else {
(llval, align, true) (op_place_val.llval, op_place_val.align, true)
} }
} }
_ => (llval, align, true), _ => (op_place_val.llval, op_place_val.align, true),
}, },
ZeroSized => match arg.mode { ZeroSized => match arg.mode {
PassMode::Indirect { on_stack, .. } => { PassMode::Indirect { on_stack, .. } => {
@ -1560,15 +1559,16 @@ fn codegen_arguments_untupled(
let tuple = self.codegen_operand(bx, operand); let tuple = self.codegen_operand(bx, operand);
// Handle both by-ref and immediate tuples. // Handle both by-ref and immediate tuples.
if let Ref(llval, None, align) = tuple.val { if let Ref(place_val) = tuple.val {
let tuple_ptr = PlaceRef::new_sized_aligned(llval, tuple.layout, align); if place_val.llextra.is_some() {
bug!("closure arguments must be sized");
}
let tuple_ptr = PlaceRef { val: place_val, layout: tuple.layout };
for i in 0..tuple.layout.fields.count() { for i in 0..tuple.layout.fields.count() {
let field_ptr = tuple_ptr.project_field(bx, i); let field_ptr = tuple_ptr.project_field(bx, i);
let field = bx.load_operand(field_ptr); let field = bx.load_operand(field_ptr);
self.codegen_argument(bx, field, llargs, &args[i]); self.codegen_argument(bx, field, llargs, &args[i]);
} }
} else if let Ref(_, Some(_), _) = tuple.val {
bug!("closure arguments must be sized")
} else { } else {
// If the tuple is immediate, the elements are as well. // If the tuple is immediate, the elements are as well.
for i in 0..tuple.layout.fields.count() { for i in 0..tuple.layout.fields.count() {

View File

@ -14,7 +14,7 @@
use rustc_target::abi::{Abi, FieldIdx, FieldsShape, Size, VariantIdx}; use rustc_target::abi::{Abi, FieldIdx, FieldsShape, Size, VariantIdx};
use super::operand::{OperandRef, OperandValue}; use super::operand::{OperandRef, OperandValue};
use super::place::PlaceRef; use super::place::{PlaceRef, PlaceValue};
use super::{FunctionCx, LocalRef}; use super::{FunctionCx, LocalRef};
use std::ops::Range; use std::ops::Range;
@ -334,7 +334,7 @@ pub fn debug_introduce_local(&self, bx: &mut Bx, local: mir::Local) {
bx.set_var_name(place.val.llval, name); bx.set_var_name(place.val.llval, name);
} }
LocalRef::Operand(operand) => match operand.val { LocalRef::Operand(operand) => match operand.val {
OperandValue::Ref(x, ..) | OperandValue::Immediate(x) => { OperandValue::Ref(PlaceValue { llval: x, .. }) | OperandValue::Immediate(x) => {
bx.set_var_name(x, name); bx.set_var_name(x, name);
} }
OperandValue::Pair(a, b) => { OperandValue::Pair(a, b) => {

View File

@ -23,11 +23,14 @@ pub enum OperandValue<V> {
/// The second value, if any, is the extra data (vtable or length) /// The second value, if any, is the extra data (vtable or length)
/// which indicates that it refers to an unsized rvalue. /// which indicates that it refers to an unsized rvalue.
/// ///
/// An `OperandValue` has this variant for types which are neither /// An `OperandValue` *must* be this variant for any type for which
/// `Immediate` nor `Pair`s. The backend value in this variant must be a /// [`LayoutTypeMethods::is_backend_ref`] returns `true`.
/// pointer to the *non*-immediate backend type. That pointee type is the /// (That basically amounts to "isn't one of the other variants".)
///
/// This holds a [`PlaceValue`] (like a [`PlaceRef`] does) with a pointer
/// to the location holding the value. The type behind that pointer is the
/// one returned by [`LayoutTypeMethods::backend_type`]. /// one returned by [`LayoutTypeMethods::backend_type`].
Ref(V, Option<V>, Align), Ref(PlaceValue<V>),
/// A single LLVM immediate value. /// A single LLVM immediate value.
/// ///
/// An `OperandValue` *must* be this variant for any type for which /// An `OperandValue` *must* be this variant for any type for which
@ -362,7 +365,7 @@ pub fn poison<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
OperandValue::Pair(bx.const_poison(ibty0), bx.const_poison(ibty1)) OperandValue::Pair(bx.const_poison(ibty0), bx.const_poison(ibty1))
} else { } else {
let ptr = bx.cx().type_ptr(); let ptr = bx.cx().type_ptr();
OperandValue::Ref(bx.const_poison(ptr), None, layout.align.abi) OperandValue::Ref(PlaceValue::new_sized(bx.const_poison(ptr), layout.align.abi))
} }
} }
@ -410,17 +413,14 @@ pub(crate) fn store_with_flags<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
// Avoid generating stores of zero-sized values, because the only way to have a zero-sized // Avoid generating stores of zero-sized values, because the only way to have a zero-sized
// value is through `undef`/`poison`, and the store itself is useless. // value is through `undef`/`poison`, and the store itself is useless.
} }
OperandValue::Ref(llval, None, source_align) => { OperandValue::Ref(val) => {
assert!(dest.layout.is_sized(), "cannot directly store unsized values"); assert!(dest.layout.is_sized(), "cannot directly store unsized values");
let source_place = PlaceRef { if val.llextra.is_some() {
val: PlaceValue::new_sized(llval, source_align),
layout: dest.layout,
};
bx.typed_place_copy_with_flags(dest, source_place, flags);
}
OperandValue::Ref(_, Some(_), _) => {
bug!("cannot directly store unsized values"); bug!("cannot directly store unsized values");
} }
let source_place = PlaceRef { val, layout: dest.layout };
bx.typed_place_copy_with_flags(dest, source_place, flags);
}
OperandValue::Immediate(s) => { OperandValue::Immediate(s) => {
let val = bx.from_immediate(s); let val = bx.from_immediate(s);
bx.store_with_flags(val, dest.val.llval, dest.val.align, flags); bx.store_with_flags(val, dest.val.llval, dest.val.align, flags);
@ -457,7 +457,8 @@ pub fn store_unsized<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
.unwrap_or_else(|| bug!("indirect_dest has non-pointer type: {:?}", indirect_dest)) .unwrap_or_else(|| bug!("indirect_dest has non-pointer type: {:?}", indirect_dest))
.ty; .ty;
let OperandValue::Ref(llptr, Some(llextra), _) = self else { let OperandValue::Ref(PlaceValue { llval: llptr, llextra: Some(llextra), .. }) = self
else {
bug!("store_unsized called with a sized value (or with an extern type)") bug!("store_unsized called with a sized value (or with an extern type)")
}; };

View File

@ -68,13 +68,13 @@ pub fn codegen_rvalue(
base::coerce_unsized_into(bx, scratch, dest); base::coerce_unsized_into(bx, scratch, dest);
scratch.storage_dead(bx); scratch.storage_dead(bx);
} }
OperandValue::Ref(llref, None, align) => { OperandValue::Ref(val) => {
let source = PlaceRef::new_sized_aligned(llref, operand.layout, align); if val.llextra.is_some() {
base::coerce_unsized_into(bx, source, dest);
}
OperandValue::Ref(_, Some(_), _) => {
bug!("unsized coercion on an unsized rvalue"); bug!("unsized coercion on an unsized rvalue");
} }
let source = PlaceRef { val, layout: operand.layout };
base::coerce_unsized_into(bx, source, dest);
}
OperandValue::ZeroSized => { OperandValue::ZeroSized => {
bug!("unsized coercion on a ZST rvalue"); bug!("unsized coercion on a ZST rvalue");
} }
@ -220,10 +220,10 @@ fn codegen_transmute_operand(
let cast_kind = self.value_kind(cast); let cast_kind = self.value_kind(cast);
match operand.val { match operand.val {
OperandValue::Ref(ptr, meta, align) => { OperandValue::Ref(source_place_val) => {
debug_assert_eq!(meta, None); debug_assert_eq!(source_place_val.llextra, None);
debug_assert!(matches!(operand_kind, OperandValueKind::Ref)); debug_assert!(matches!(operand_kind, OperandValueKind::Ref));
let fake_place = PlaceRef::new_sized_aligned(ptr, cast, align); let fake_place = PlaceRef { val: source_place_val, layout: cast };
Some(bx.load_operand(fake_place).val) Some(bx.load_operand(fake_place).val)
} }
OperandValue::ZeroSized => { OperandValue::ZeroSized => {
@ -490,7 +490,7 @@ pub fn codegen_rvalue_operand(
} }
mir::CastKind::DynStar => { mir::CastKind::DynStar => {
let (lldata, llextra) = match operand.val { let (lldata, llextra) = match operand.val {
OperandValue::Ref(_, _, _) => todo!(), OperandValue::Ref(..) => todo!(),
OperandValue::Immediate(v) => (v, None), OperandValue::Immediate(v) => (v, None),
OperandValue::Pair(v, l) => (v, Some(l)), OperandValue::Pair(v, l) => (v, Some(l)),
OperandValue::ZeroSized => bug!("ZST -- which is not PointerLike -- in DynStar"), OperandValue::ZeroSized => bug!("ZST -- which is not PointerLike -- in DynStar"),