Move mir::Fieldabi::FieldIdx

The first PR for https://github.com/rust-lang/compiler-team/issues/606

This is just the move-and-rename, because it's plenty big-and-bitrotty already.  Future PRs will start using `FieldIdx` more broadly, and concomitantly removing `FieldIdx::new`s.
This commit is contained in:
Scott McMurray 2023-03-28 12:32:57 -07:00
parent 37ab4bd419
commit 406eb96437
9 changed files with 18 additions and 18 deletions

View File

@ -327,7 +327,7 @@ enum ArgKind<'tcx> {
ArgKind::Spread(params) => {
for (i, param) in params.into_iter().enumerate() {
if let Some(param) = param {
place.place_field(fx, mir::Field::new(i)).write_cvalue(fx, param);
place.place_field(fx, FieldIdx::new(i)).write_cvalue(fx, param);
}
}
}
@ -460,7 +460,7 @@ pub(crate) fn codegen_terminator_call<'tcx>(
args.push(self_arg);
for i in 0..tupled_arguments.len() {
args.push(CallArgument {
value: pack_arg.value.value_field(fx, mir::Field::new(i)),
value: pack_arg.value.value_field(fx, FieldIdx::new(i)),
is_owned: pack_arg.is_owned,
});
}

View File

@ -797,7 +797,7 @@ fn is_fat_ptr<'tcx>(fx: &FunctionCx<'_, '_, 'tcx>, ty: Ty<'tcx>) -> bool {
let index = fx.bcx.ins().iconst(fx.pointer_type, field_index as i64);
variant_dest.place_index(fx, index)
} else {
variant_dest.place_field(fx, mir::Field::new(field_index))
variant_dest.place_field(fx, FieldIdx::new(field_index))
};
to.write_cvalue(fx, operand);
}

View File

@ -26,7 +26,7 @@ pub(crate) fn codegen_set_discriminant<'tcx>(
tag_encoding: TagEncoding::Direct,
variants: _,
} => {
let ptr = place.place_field(fx, mir::Field::new(tag_field));
let ptr = place.place_field(fx, FieldIdx::new(tag_field));
let to = layout.ty.discriminant_for_variant(fx.tcx, variant_index).unwrap().val;
let to = if ptr.layout().abi.is_signed() {
ty::ScalarInt::try_from_int(
@ -47,7 +47,7 @@ pub(crate) fn codegen_set_discriminant<'tcx>(
variants: _,
} => {
if variant_index != untagged_variant {
let niche = place.place_field(fx, mir::Field::new(tag_field));
let niche = place.place_field(fx, FieldIdx::new(tag_field));
let niche_type = fx.clif_type(niche.layout().ty).unwrap();
let niche_value = variant_index.as_u32() - niche_variants.start().as_u32();
let niche_value = (niche_value as u128).wrapping_add(niche_start);
@ -107,7 +107,7 @@ pub(crate) fn codegen_get_discriminant<'tcx>(
let cast_to = fx.clif_type(dest_layout.ty).unwrap();
// Read the tag/niche-encoded discriminant from memory.
let tag = value.value_field(fx, mir::Field::new(tag_field));
let tag = value.value_field(fx, FieldIdx::new(tag_field));
let tag = tag.load_scalar(fx);
// Decode the discriminant (specifically if it's niche-encoded).

View File

@ -179,8 +179,8 @@ fn llvm_add_sub<'tcx>(
// c + carry -> c + first intermediate carry or borrow respectively
let int0 = crate::num::codegen_checked_int_binop(fx, bin_op, a, b);
let c = int0.value_field(fx, mir::Field::new(0));
let cb0 = int0.value_field(fx, mir::Field::new(1)).load_scalar(fx);
let c = int0.value_field(fx, FieldIdx::new(0));
let cb0 = int0.value_field(fx, FieldIdx::new(1)).load_scalar(fx);
// c + carry -> c + second intermediate carry or borrow respectively
let cb_in_as_u64 = fx.bcx.ins().uextend(types::I64, cb_in);

View File

@ -253,7 +253,7 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>(
}
ret.write_cvalue(fx, base);
let ret_lane = ret.place_field(fx, mir::Field::new(idx.try_into().unwrap()));
let ret_lane = ret.place_field(fx, FieldIdx::new(idx.try_into().unwrap()));
ret_lane.write_cvalue(fx, val);
}

View File

@ -86,7 +86,7 @@ mod prelude {
self, FloatTy, Instance, InstanceDef, IntTy, ParamEnv, Ty, TyCtxt, TypeAndMut,
TypeFoldable, TypeVisitableExt, UintTy,
};
pub(crate) use rustc_target::abi::{Abi, Scalar, Size, VariantIdx, FIRST_VARIANT};
pub(crate) use rustc_target::abi::{Abi, FieldIdx, Scalar, Size, VariantIdx, FIRST_VARIANT};
pub(crate) use rustc_data_structures::fx::FxHashMap;

View File

@ -147,8 +147,8 @@ pub(crate) fn coerce_unsized_into<'tcx>(
assert_eq!(def_a, def_b);
for i in 0..def_a.variant(FIRST_VARIANT).fields.len() {
let src_f = src.value_field(fx, mir::Field::new(i));
let dst_f = dst.place_field(fx, mir::Field::new(i));
let src_f = src.value_field(fx, FieldIdx::new(i));
let dst_f = dst.place_field(fx, FieldIdx::new(i));
if dst_f.layout().is_zst() {
continue;

View File

@ -10,7 +10,7 @@ fn codegen_field<'tcx>(
base: Pointer,
extra: Option<Value>,
layout: TyAndLayout<'tcx>,
field: mir::Field,
field: FieldIdx,
) -> (Pointer, TyAndLayout<'tcx>) {
let field_offset = layout.fields.offset(field.index());
let field_layout = layout.field(&*fx, field.index());
@ -210,7 +210,7 @@ pub(crate) fn load_scalar_pair(self, fx: &mut FunctionCx<'_, '_, 'tcx>) -> (Valu
pub(crate) fn value_field(
self,
fx: &mut FunctionCx<'_, '_, 'tcx>,
field: mir::Field,
field: FieldIdx,
) -> CValue<'tcx> {
let layout = self.1;
match self.0 {
@ -687,7 +687,7 @@ pub(crate) fn place_opaque_cast(
pub(crate) fn place_field(
self,
fx: &mut FunctionCx<'_, '_, 'tcx>,
field: mir::Field,
field: FieldIdx,
) -> CPlace<'tcx> {
let layout = self.layout();

View File

@ -50,7 +50,7 @@ pub(crate) fn get_ptr_and_method_ref<'tcx>(
if let Abi::Scalar(_) = arg.layout().abi {
'descend_newtypes: while !arg.layout().ty.is_unsafe_ptr() && !arg.layout().ty.is_ref() {
for i in 0..arg.layout().fields.count() {
let field = arg.value_field(fx, mir::Field::new(i));
let field = arg.value_field(fx, FieldIdx::new(i));
if !field.layout().is_zst() {
// we found the one non-zero-sized field that is allowed
// now find *its* non-zero-sized field, or stop if it's a
@ -68,9 +68,9 @@ pub(crate) fn get_ptr_and_method_ref<'tcx>(
if ty.is_dyn_star() {
let inner_layout = fx.layout_of(arg.layout().ty.builtin_deref(true).unwrap().ty);
let dyn_star = CPlace::for_ptr(Pointer::new(arg.load_scalar(fx)), inner_layout);
let ptr = dyn_star.place_field(fx, mir::Field::new(0)).to_ptr();
let ptr = dyn_star.place_field(fx, FieldIdx::new(0)).to_ptr();
let vtable =
dyn_star.place_field(fx, mir::Field::new(1)).to_cvalue(fx).load_scalar(fx);
dyn_star.place_field(fx, FieldIdx::new(1)).to_cvalue(fx).load_scalar(fx);
break 'block (ptr, vtable);
}
}