diff --git a/src/abi/mod.rs b/src/abi/mod.rs index c532a3dfec2..dfca5dcbec8 100644 --- a/src/abi/mod.rs +++ b/src/abi/mod.rs @@ -193,7 +193,7 @@ fn make_local_place<'tcx>( ); } let place = if is_ssa { - if let rustc_target::abi::Abi::ScalarPair(_, _) = layout.abi { + if let BackendRepr::ScalarPair(_, _) = layout.backend_repr { CPlace::new_var_pair(fx, local, layout) } else { CPlace::new_var(fx, local, layout) diff --git a/src/abi/pass_mode.rs b/src/abi/pass_mode.rs index 38c322b5e04..ad0a13dc7e5 100644 --- a/src/abi/pass_mode.rs +++ b/src/abi/pass_mode.rs @@ -78,19 +78,19 @@ impl<'tcx> ArgAbiExt<'tcx> for ArgAbi<'tcx, Ty<'tcx>> { fn get_abi_param(&self, tcx: TyCtxt<'tcx>) -> SmallVec<[AbiParam; 2]> { match self.mode { PassMode::Ignore => smallvec![], - PassMode::Direct(attrs) => match self.layout.abi { - Abi::Scalar(scalar) => smallvec![apply_arg_attrs_to_abi_param( + PassMode::Direct(attrs) => match self.layout.backend_repr { + BackendRepr::Scalar(scalar) => smallvec![apply_arg_attrs_to_abi_param( AbiParam::new(scalar_to_clif_type(tcx, scalar)), attrs )], - Abi::Vector { .. } => { + BackendRepr::Vector { .. } => { let vector_ty = crate::intrinsics::clif_vector_type(tcx, self.layout); smallvec![AbiParam::new(vector_ty)] } - _ => unreachable!("{:?}", self.layout.abi), + _ => unreachable!("{:?}", self.layout.backend_repr), }, - PassMode::Pair(attrs_a, attrs_b) => match self.layout.abi { - Abi::ScalarPair(a, b) => { + PassMode::Pair(attrs_a, attrs_b) => match self.layout.backend_repr { + BackendRepr::ScalarPair(a, b) => { let a = scalar_to_clif_type(tcx, a); let b = scalar_to_clif_type(tcx, b); smallvec![ @@ -98,7 +98,7 @@ impl<'tcx> ArgAbiExt<'tcx> for ArgAbi<'tcx, Ty<'tcx>> { apply_arg_attrs_to_abi_param(AbiParam::new(b), attrs_b), ] } - _ => unreachable!("{:?}", self.layout.abi), + _ => unreachable!("{:?}", self.layout.backend_repr), }, PassMode::Cast { ref cast, pad_i32 } => { assert!(!pad_i32, "padding support not yet implemented"); @@ -130,23 +130,23 @@ impl<'tcx> ArgAbiExt<'tcx> for ArgAbi<'tcx, Ty<'tcx>> { fn get_abi_return(&self, tcx: TyCtxt<'tcx>) -> (Option, Vec) { match self.mode { PassMode::Ignore => (None, vec![]), - PassMode::Direct(_) => match self.layout.abi { - Abi::Scalar(scalar) => { + PassMode::Direct(_) => match self.layout.backend_repr { + BackendRepr::Scalar(scalar) => { (None, vec![AbiParam::new(scalar_to_clif_type(tcx, scalar))]) } - Abi::Vector { .. } => { + BackendRepr::Vector { .. } => { let vector_ty = crate::intrinsics::clif_vector_type(tcx, self.layout); (None, vec![AbiParam::new(vector_ty)]) } - _ => unreachable!("{:?}", self.layout.abi), + _ => unreachable!("{:?}", self.layout.backend_repr), }, - PassMode::Pair(_, _) => match self.layout.abi { - Abi::ScalarPair(a, b) => { + PassMode::Pair(_, _) => match self.layout.backend_repr { + BackendRepr::ScalarPair(a, b) => { let a = scalar_to_clif_type(tcx, a); let b = scalar_to_clif_type(tcx, b); (None, vec![AbiParam::new(a), AbiParam::new(b)]) } - _ => unreachable!("{:?}", self.layout.abi), + _ => unreachable!("{:?}", self.layout.backend_repr), }, PassMode::Cast { ref cast, .. } => { (None, cast_target_to_abi_params(cast).into_iter().collect()) diff --git a/src/base.rs b/src/base.rs index e7a67c11fe8..10d5dce9b36 100644 --- a/src/base.rs +++ b/src/base.rs @@ -290,7 +290,7 @@ fn codegen_fn_body(fx: &mut FunctionCx<'_, '_, '_>, start_block: Block) { let arg_uninhabited = fx .mir .args_iter() - .any(|arg| fx.layout_of(fx.monomorphize(fx.mir.local_decls[arg].ty)).abi.is_uninhabited()); + .any(|arg| fx.layout_of(fx.monomorphize(fx.mir.local_decls[arg].ty)).is_uninhabited()); if arg_uninhabited { fx.bcx.append_block_params_for_function_params(fx.block_map[START_BLOCK]); fx.bcx.switch_to_block(fx.block_map[START_BLOCK]); @@ -644,9 +644,9 @@ fn codegen_stmt<'tcx>( _ => unreachable!("un op Neg for {:?}", layout.ty), } } - UnOp::PtrMetadata => match layout.abi { - Abi::Scalar(_) => CValue::zst(dest_layout), - Abi::ScalarPair(_, _) => { + UnOp::PtrMetadata => match layout.backend_repr { + BackendRepr::Scalar(_) => CValue::zst(dest_layout), + BackendRepr::ScalarPair(_, _) => { CValue::by_val(operand.load_scalar_pair(fx).1, dest_layout) } _ => bug!("Unexpected `PtrToMetadata` operand: {operand:?}"), diff --git a/src/discriminant.rs b/src/discriminant.rs index d462dcd63a9..45794a42665 100644 --- a/src/discriminant.rs +++ b/src/discriminant.rs @@ -14,7 +14,7 @@ pub(crate) fn codegen_set_discriminant<'tcx>( variant_index: VariantIdx, ) { let layout = place.layout(); - if layout.for_variant(fx, variant_index).abi.is_uninhabited() { + if layout.for_variant(fx, variant_index).is_uninhabited() { return; } match layout.variants { @@ -80,7 +80,7 @@ pub(crate) fn codegen_get_discriminant<'tcx>( ) { let layout = value.layout(); - if layout.abi.is_uninhabited() { + if layout.is_uninhabited() { return; } diff --git a/src/intrinsics/mod.rs b/src/intrinsics/mod.rs index 6371b52720d..e5a12162687 100644 --- a/src/intrinsics/mod.rs +++ b/src/intrinsics/mod.rs @@ -51,8 +51,8 @@ fn report_atomic_type_validation_error<'tcx>( } pub(crate) fn clif_vector_type<'tcx>(tcx: TyCtxt<'tcx>, layout: TyAndLayout<'tcx>) -> Type { - let (element, count) = match layout.abi { - Abi::Vector { element, count } => (element, count), + let (element, count) = match layout.backend_repr { + BackendRepr::Vector { element, count } => (element, count), _ => unreachable!(), }; @@ -506,7 +506,7 @@ fn codegen_regular_intrinsic_call<'tcx>( let layout = fx.layout_of(generic_args.type_at(0)); // Note: Can't use is_unsized here as truly unsized types need to take the fixed size // branch - let meta = if let Abi::ScalarPair(_, _) = ptr.layout().abi { + let meta = if let BackendRepr::ScalarPair(_, _) = ptr.layout().backend_repr { Some(ptr.load_scalar_pair(fx).1) } else { None @@ -520,7 +520,7 @@ fn codegen_regular_intrinsic_call<'tcx>( let layout = fx.layout_of(generic_args.type_at(0)); // Note: Can't use is_unsized here as truly unsized types need to take the fixed size // branch - let meta = if let Abi::ScalarPair(_, _) = ptr.layout().abi { + let meta = if let BackendRepr::ScalarPair(_, _) = ptr.layout().backend_repr { Some(ptr.load_scalar_pair(fx).1) } else { None @@ -694,7 +694,7 @@ fn codegen_regular_intrinsic_call<'tcx>( let layout = fx.layout_of(ty); let msg_str = with_no_visible_paths!({ with_no_trimmed_paths!({ - if layout.abi.is_uninhabited() { + if layout.is_uninhabited() { // Use this error even for the other intrinsics as it is more precise. format!("attempted to instantiate uninhabited type `{}`", ty) } else if intrinsic == sym::assert_zero_valid { diff --git a/src/lib.rs b/src/lib.rs index f63685acdef..3b75283257c 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -92,6 +92,7 @@ mod prelude { StackSlotData, StackSlotKind, TrapCode, Type, Value, types, }; pub(crate) use cranelift_module::{self, DataDescription, FuncId, Linkage, Module}; + pub(crate) use rustc_abi::{BackendRepr, FIRST_VARIANT, FieldIdx, Scalar, Size, VariantIdx}; pub(crate) use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; pub(crate) use rustc_hir::def_id::{DefId, LOCAL_CRATE}; pub(crate) use rustc_index::Idx; @@ -101,7 +102,6 @@ mod prelude { self, FloatTy, Instance, InstanceKind, IntTy, ParamEnv, Ty, TyCtxt, UintTy, }; pub(crate) use rustc_span::Span; - pub(crate) use rustc_target::abi::{Abi, FIRST_VARIANT, FieldIdx, Scalar, Size, VariantIdx}; pub(crate) use crate::abi::*; pub(crate) use crate::base::{codegen_operand, codegen_place}; diff --git a/src/value_and_place.rs b/src/value_and_place.rs index fd77502224e..900d7e69714 100644 --- a/src/value_and_place.rs +++ b/src/value_and_place.rs @@ -131,8 +131,8 @@ pub(crate) fn dyn_star_force_data_on_stack( match self.0 { CValueInner::ByRef(ptr, None) => { - let (a_scalar, b_scalar) = match self.1.abi { - Abi::ScalarPair(a, b) => (a, b), + let (a_scalar, b_scalar) = match self.1.backend_repr { + BackendRepr::ScalarPair(a, b) => (a, b), _ => unreachable!("dyn_star_force_data_on_stack({:?})", self), }; let b_offset = scalar_pair_calculate_b_offset(fx.tcx, a_scalar, b_scalar); @@ -164,15 +164,15 @@ pub(crate) fn try_to_ptr(self) -> Option<(Pointer, Option)> { } } - /// Load a value with layout.abi of scalar + /// Load a value with layout.backend_repr of scalar #[track_caller] pub(crate) fn load_scalar(self, fx: &mut FunctionCx<'_, '_, 'tcx>) -> Value { let layout = self.1; match self.0 { CValueInner::ByRef(ptr, None) => { - let clif_ty = match layout.abi { - Abi::Scalar(scalar) => scalar_to_clif_type(fx.tcx, scalar), - Abi::Vector { element, count } => scalar_to_clif_type(fx.tcx, element) + let clif_ty = match layout.backend_repr { + BackendRepr::Scalar(scalar) => scalar_to_clif_type(fx.tcx, scalar), + BackendRepr::Vector { element, count } => scalar_to_clif_type(fx.tcx, element) .by(u32::try_from(count).unwrap()) .unwrap(), _ => unreachable!("{:?}", layout.ty), @@ -187,14 +187,14 @@ pub(crate) fn load_scalar(self, fx: &mut FunctionCx<'_, '_, 'tcx>) -> Value { } } - /// Load a value pair with layout.abi of scalar pair + /// Load a value pair with layout.backend_repr of scalar pair #[track_caller] pub(crate) fn load_scalar_pair(self, fx: &mut FunctionCx<'_, '_, 'tcx>) -> (Value, Value) { let layout = self.1; match self.0 { CValueInner::ByRef(ptr, None) => { - let (a_scalar, b_scalar) = match layout.abi { - Abi::ScalarPair(a, b) => (a, b), + let (a_scalar, b_scalar) = match layout.backend_repr { + BackendRepr::ScalarPair(a, b) => (a, b), _ => unreachable!("load_scalar_pair({:?})", self), }; let b_offset = scalar_pair_calculate_b_offset(fx.tcx, a_scalar, b_scalar); @@ -222,8 +222,8 @@ pub(crate) fn value_field( let layout = self.1; match self.0 { CValueInner::ByVal(_) => unreachable!(), - CValueInner::ByValPair(val1, val2) => match layout.abi { - Abi::ScalarPair(_, _) => { + CValueInner::ByValPair(val1, val2) => match layout.backend_repr { + BackendRepr::ScalarPair(_, _) => { let val = match field.as_u32() { 0 => val1, 1 => val2, @@ -232,7 +232,7 @@ pub(crate) fn value_field( let field_layout = layout.field(&*fx, usize::from(field)); CValue::by_val(val, field_layout) } - _ => unreachable!("value_field for ByValPair with abi {:?}", layout.abi), + _ => unreachable!("value_field for ByValPair with abi {:?}", layout.backend_repr), }, CValueInner::ByRef(ptr, None) => { let (field_ptr, field_layout) = codegen_field(fx, ptr, None, layout, field); @@ -360,7 +360,7 @@ pub(crate) fn const_val( pub(crate) fn cast_pointer_to(self, layout: TyAndLayout<'tcx>) -> Self { assert!(matches!(self.layout().ty.kind(), ty::Ref(..) | ty::RawPtr(..) | ty::FnPtr(..))); assert!(matches!(layout.ty.kind(), ty::Ref(..) | ty::RawPtr(..) | ty::FnPtr(..))); - assert_eq!(self.layout().abi, layout.abi); + assert_eq!(self.layout().backend_repr, layout.backend_repr); CValue(self.0, layout) } } @@ -609,8 +609,8 @@ fn transmute_scalar<'tcx>( let dst_layout = self.layout(); match self.inner { CPlaceInner::Var(_local, var) => { - let data = match from.1.abi { - Abi::Scalar(_) => CValue(from.0, dst_layout).load_scalar(fx), + let data = match from.1.backend_repr { + BackendRepr::Scalar(_) => CValue(from.0, dst_layout).load_scalar(fx), _ => { let (ptr, meta) = from.force_stack(fx); assert!(meta.is_none()); @@ -621,8 +621,10 @@ fn transmute_scalar<'tcx>( transmute_scalar(fx, var, data, dst_ty); } CPlaceInner::VarPair(_local, var1, var2) => { - let (data1, data2) = match from.1.abi { - Abi::ScalarPair(_, _) => CValue(from.0, dst_layout).load_scalar_pair(fx), + let (data1, data2) = match from.1.backend_repr { + BackendRepr::ScalarPair(_, _) => { + CValue(from.0, dst_layout).load_scalar_pair(fx) + } _ => { let (ptr, meta) = from.force_stack(fx); assert!(meta.is_none()); @@ -635,7 +637,9 @@ fn transmute_scalar<'tcx>( } CPlaceInner::Addr(_, Some(_)) => bug!("Can't write value to unsized place {:?}", self), CPlaceInner::Addr(to_ptr, None) => { - if dst_layout.size == Size::ZERO || dst_layout.abi == Abi::Uninhabited { + if dst_layout.size == Size::ZERO + || dst_layout.backend_repr == BackendRepr::Uninhabited + { return; } @@ -646,23 +650,28 @@ fn transmute_scalar<'tcx>( CValueInner::ByVal(val) => { to_ptr.store(fx, val, flags); } - CValueInner::ByValPair(val1, val2) => match from.layout().abi { - Abi::ScalarPair(a_scalar, b_scalar) => { + CValueInner::ByValPair(val1, val2) => match from.layout().backend_repr { + BackendRepr::ScalarPair(a_scalar, b_scalar) => { let b_offset = scalar_pair_calculate_b_offset(fx.tcx, a_scalar, b_scalar); to_ptr.store(fx, val1, flags); to_ptr.offset(fx, b_offset).store(fx, val2, flags); } - _ => bug!("Non ScalarPair abi {:?} for ByValPair CValue", dst_layout.abi), + _ => { + bug!( + "Non ScalarPair repr {:?} for ByValPair CValue", + dst_layout.backend_repr + ) + } }, CValueInner::ByRef(from_ptr, None) => { - match from.layout().abi { - Abi::Scalar(_) => { + match from.layout().backend_repr { + BackendRepr::Scalar(_) => { let val = from.load_scalar(fx); to_ptr.store(fx, val, flags); return; } - Abi::ScalarPair(a_scalar, b_scalar) => { + BackendRepr::ScalarPair(a_scalar, b_scalar) => { let b_offset = scalar_pair_calculate_b_offset(fx.tcx, a_scalar, b_scalar); let (val1, val2) = from.load_scalar_pair(fx); diff --git a/src/vtable.rs b/src/vtable.rs index 14c607ccad7..82b6178be9d 100644 --- a/src/vtable.rs +++ b/src/vtable.rs @@ -47,7 +47,7 @@ pub(crate) fn get_ptr_and_method_ref<'tcx>( idx: usize, ) -> (Pointer, Value) { let (ptr, vtable) = 'block: { - if let Abi::Scalar(_) = arg.layout().abi { + if let BackendRepr::Scalar(_) = arg.layout().backend_repr { while !arg.layout().ty.is_unsafe_ptr() && !arg.layout().ty.is_ref() { let (idx, _) = arg .layout() @@ -68,7 +68,7 @@ pub(crate) fn get_ptr_and_method_ref<'tcx>( } } - if let Abi::ScalarPair(_, _) = arg.layout().abi { + if let BackendRepr::ScalarPair(_, _) = arg.layout().backend_repr { let (ptr, vtable) = arg.load_scalar_pair(fx); (Pointer::new(ptr), vtable) } else {