diff --git a/compiler/rustc_abi/src/callconv/reg.rs b/compiler/rustc_abi/src/callconv/reg.rs index 66f47c52c15..8cf140dbaad 100644 --- a/compiler/rustc_abi/src/callconv/reg.rs +++ b/compiler/rustc_abi/src/callconv/reg.rs @@ -57,7 +57,7 @@ impl Reg { 128 => dl.f128_align.abi, _ => panic!("unsupported float: {self:?}"), }, - RegKind::Vector => dl.vector_align(self.size).abi, + RegKind::Vector => dl.llvmlike_vector_align(self.size).abi, } } } diff --git a/compiler/rustc_abi/src/layout.rs b/compiler/rustc_abi/src/layout.rs index 53243266f99..3f83787ea37 100644 --- a/compiler/rustc_abi/src/layout.rs +++ b/compiler/rustc_abi/src/layout.rs @@ -310,10 +310,10 @@ impl LayoutCalculator { let mut align = if repr.pack.is_some() { dl.i8_align } else { dl.aggregate_align }; let mut max_repr_align = repr.align; - // If all the non-ZST fields have the same ABI and union ABI optimizations aren't - // disabled, we can use that common ABI for the union as a whole. + // If all the non-ZST fields have the same repr and union repr optimizations aren't + // disabled, we can use that common repr for the union as a whole. struct AbiMismatch; - let mut common_non_zst_abi_and_align = if repr.inhibits_union_abi_opt() { + let mut common_non_zst_repr_and_align = if repr.inhibits_union_abi_opt() { // Can't optimize Err(AbiMismatch) } else { @@ -337,14 +337,14 @@ impl LayoutCalculator { continue; } - if let Ok(common) = common_non_zst_abi_and_align { + if let Ok(common) = common_non_zst_repr_and_align { // Discard valid range information and allow undef let field_abi = field.backend_repr.to_union(); if let Some((common_abi, common_align)) = common { if common_abi != field_abi { // Different fields have different ABI: disable opt - common_non_zst_abi_and_align = Err(AbiMismatch); + common_non_zst_repr_and_align = Err(AbiMismatch); } else { // Fields with the same non-Aggregate ABI should also // have the same alignment @@ -357,7 +357,7 @@ impl LayoutCalculator { } } else { // First non-ZST field: record its ABI and alignment - common_non_zst_abi_and_align = Ok(Some((field_abi, field.align.abi))); + common_non_zst_repr_and_align = Ok(Some((field_abi, field.align.abi))); } } } @@ -376,16 +376,25 @@ impl LayoutCalculator { // If all non-ZST fields have the same ABI, we may forward that ABI // for the union as a whole, unless otherwise inhibited. - let abi = match common_non_zst_abi_and_align { + let backend_repr = match common_non_zst_repr_and_align { Err(AbiMismatch) | Ok(None) => BackendRepr::Memory { sized: true }, - Ok(Some((abi, _))) => { - if abi.inherent_align(dl).map(|a| a.abi) != Some(align.abi) { - // Mismatched alignment (e.g. union is #[repr(packed)]): disable opt + Ok(Some((repr, _))) => match repr { + // Mismatched alignment (e.g. union is #[repr(packed)]): disable opt + BackendRepr::Scalar(_) | BackendRepr::ScalarPair(_, _) + if repr.scalar_align(dl).unwrap() != align.abi => + { BackendRepr::Memory { sized: true } - } else { - abi } - } + // Vectors require at least element alignment, else disable the opt + BackendRepr::Vector { element, count: _ } if element.align(dl).abi > align.abi => { + BackendRepr::Memory { sized: true } + } + // the alignment tests passed and we can use this + BackendRepr::Scalar(..) + | BackendRepr::ScalarPair(..) + | BackendRepr::Vector { .. } + | BackendRepr::Memory { .. } => repr, + }, }; let Some(union_field_count) = NonZeroUsize::new(only_variant.len()) else { @@ -400,7 +409,7 @@ impl LayoutCalculator { Ok(LayoutData { variants: Variants::Single { index: only_variant_idx }, fields: FieldsShape::Union(union_field_count), - backend_repr: abi, + backend_repr, largest_niche: None, uninhabited: false, align, diff --git a/compiler/rustc_abi/src/lib.rs b/compiler/rustc_abi/src/lib.rs index 34228912041..eb90bb42b72 100644 --- a/compiler/rustc_abi/src/lib.rs +++ b/compiler/rustc_abi/src/lib.rs @@ -408,16 +408,21 @@ impl TargetDataLayout { } } + /// psABI-mandated alignment for a vector type, if any #[inline] - pub fn vector_align(&self, vec_size: Size) -> AbiAndPrefAlign { - for &(size, align) in &self.vector_align { - if size == vec_size { - return align; - } - } - // Default to natural alignment, which is what LLVM does. - // That is, use the size, rounded up to a power of 2. - AbiAndPrefAlign::new(Align::from_bytes(vec_size.bytes().next_power_of_two()).unwrap()) + fn cabi_vector_align(&self, vec_size: Size) -> Option { + self.vector_align + .iter() + .find(|(size, _align)| *size == vec_size) + .map(|(_size, align)| *align) + } + + /// an alignment resembling the one LLVM would pick for a vector + #[inline] + pub fn llvmlike_vector_align(&self, vec_size: Size) -> AbiAndPrefAlign { + self.cabi_vector_align(vec_size).unwrap_or(AbiAndPrefAlign::new( + Align::from_bytes(vec_size.bytes().next_power_of_two()).unwrap(), + )) } } @@ -810,20 +815,19 @@ impl Align { self.bits().try_into().unwrap() } - /// Computes the best alignment possible for the given offset - /// (the largest power of two that the offset is a multiple of). + /// Obtain the greatest factor of `size` that is an alignment + /// (the largest power of two the Size is a multiple of). /// - /// N.B., for an offset of `0`, this happens to return `2^64`. + /// Note that all numbers are factors of 0 #[inline] - pub fn max_for_offset(offset: Size) -> Align { - Align { pow2: offset.bytes().trailing_zeros() as u8 } + pub fn max_aligned_factor(size: Size) -> Align { + Align { pow2: size.bytes().trailing_zeros() as u8 } } - /// Lower the alignment, if necessary, such that the given offset - /// is aligned to it (the offset is a multiple of the alignment). + /// Reduces Align to an aligned factor of `size`. #[inline] - pub fn restrict_for_offset(self, offset: Size) -> Align { - self.min(Align::max_for_offset(offset)) + pub fn restrict_for_offset(self, size: Size) -> Align { + self.min(Align::max_aligned_factor(size)) } } @@ -1455,37 +1459,38 @@ impl BackendRepr { matches!(*self, BackendRepr::Scalar(s) if s.is_bool()) } - /// Returns the fixed alignment of this ABI, if any is mandated. - pub fn inherent_align(&self, cx: &C) -> Option { - Some(match *self { - BackendRepr::Scalar(s) => s.align(cx), - BackendRepr::ScalarPair(s1, s2) => s1.align(cx).max(s2.align(cx)), - BackendRepr::Vector { element, count } => { - cx.data_layout().vector_align(element.size(cx) * count) - } - BackendRepr::Memory { .. } => return None, - }) + /// The psABI alignment for a `Scalar` or `ScalarPair` + /// + /// `None` for other variants. + pub fn scalar_align(&self, cx: &C) -> Option { + match *self { + BackendRepr::Scalar(s) => Some(s.align(cx).abi), + BackendRepr::ScalarPair(s1, s2) => Some(s1.align(cx).max(s2.align(cx)).abi), + // The align of a Vector can vary in surprising ways + BackendRepr::Vector { .. } | BackendRepr::Memory { .. } => None, + } } - /// Returns the fixed size of this ABI, if any is mandated. - pub fn inherent_size(&self, cx: &C) -> Option { - Some(match *self { - BackendRepr::Scalar(s) => { - // No padding in scalars. - s.size(cx) - } + /// The psABI size for a `Scalar` or `ScalarPair` + /// + /// `None` for other variants + pub fn scalar_size(&self, cx: &C) -> Option { + match *self { + // No padding in scalars. + BackendRepr::Scalar(s) => Some(s.size(cx)), + // May have some padding between the pair. BackendRepr::ScalarPair(s1, s2) => { - // May have some padding between the pair. let field2_offset = s1.size(cx).align_to(s2.align(cx).abi); - (field2_offset + s2.size(cx)).align_to(self.inherent_align(cx)?.abi) + let size = (field2_offset + s2.size(cx)).align_to( + self.scalar_align(cx) + // We absolutely must have an answer here or everything is FUBAR. + .unwrap(), + ); + Some(size) } - BackendRepr::Vector { element, count } => { - // No padding in vectors, except possibly for trailing padding - // to make the size a multiple of align (e.g. for vectors of size 3). - (element.size(cx) * count).align_to(self.inherent_align(cx)?.abi) - } - BackendRepr::Memory { .. } => return None, - }) + // The size of a Vector can vary in surprising ways + BackendRepr::Vector { .. } | BackendRepr::Memory { .. } => None, + } } /// Discard validity range information and allow undef. diff --git a/compiler/rustc_ty_utils/src/layout.rs b/compiler/rustc_ty_utils/src/layout.rs index c8b3c8a796f..2ab6b8e17c4 100644 --- a/compiler/rustc_ty_utils/src/layout.rs +++ b/compiler/rustc_ty_utils/src/layout.rs @@ -547,12 +547,15 @@ fn layout_of_uncached<'tcx>( ( BackendRepr::Memory { sized: true }, AbiAndPrefAlign { - abi: Align::max_for_offset(size), - pref: dl.vector_align(size).pref, + abi: Align::max_aligned_factor(size), + pref: dl.llvmlike_vector_align(size).pref, }, ) } else { - (BackendRepr::Vector { element: e_abi, count: e_len }, dl.vector_align(size)) + ( + BackendRepr::Vector { element: e_abi, count: e_len }, + dl.llvmlike_vector_align(size), + ) }; let size = size.align_to(align.abi); diff --git a/compiler/rustc_ty_utils/src/layout/invariant.rs b/compiler/rustc_ty_utils/src/layout/invariant.rs index 5ea6716c5ca..cbbe993b62f 100644 --- a/compiler/rustc_ty_utils/src/layout/invariant.rs +++ b/compiler/rustc_ty_utils/src/layout/invariant.rs @@ -69,31 +69,30 @@ pub(super) fn layout_sanity_check<'tcx>(cx: &LayoutCx<'tcx>, layout: &TyAndLayou } fn check_layout_abi<'tcx>(cx: &LayoutCx<'tcx>, layout: &TyAndLayout<'tcx>) { - // Verify the ABI mandated alignment and size. - let align = layout.backend_repr.inherent_align(cx).map(|align| align.abi); - let size = layout.backend_repr.inherent_size(cx); - let Some((align, size)) = align.zip(size) else { - assert_matches!( - layout.layout.backend_repr(), - BackendRepr::Memory { .. }, - "ABI unexpectedly missing alignment and/or size in {layout:#?}" + // Verify the ABI-mandated alignment and size for scalars. + let align = layout.backend_repr.scalar_align(cx); + let size = layout.backend_repr.scalar_size(cx); + if let Some(align) = align { + assert_eq!( + layout.layout.align().abi, + align, + "alignment mismatch between ABI and layout in {layout:#?}" ); - return; - }; - assert_eq!( - layout.layout.align().abi, - align, - "alignment mismatch between ABI and layout in {layout:#?}" - ); - assert_eq!( - layout.layout.size(), - size, - "size mismatch between ABI and layout in {layout:#?}" - ); + } + if let Some(size) = size { + assert_eq!( + layout.layout.size(), + size, + "size mismatch between ABI and layout in {layout:#?}" + ); + } // Verify per-ABI invariants match layout.layout.backend_repr() { BackendRepr::Scalar(_) => { + // These must always be present for `Scalar` types. + let align = align.unwrap(); + let size = size.unwrap(); // Check that this matches the underlying field. let inner = skip_newtypes(cx, layout); assert!( @@ -235,9 +234,15 @@ pub(super) fn layout_sanity_check<'tcx>(cx: &LayoutCx<'tcx>, layout: &TyAndLayou "`ScalarPair` second field with bad ABI in {inner:#?}", ); } - BackendRepr::Vector { element, .. } => { - assert!(align >= element.align(cx).abi); // just sanity-checking `vector_align`. - // FIXME: Do some kind of check of the inner type, like for Scalar and ScalarPair. + BackendRepr::Vector { element, count } => { + let align = layout.align.abi; + let size = layout.size; + let element_align = element.align(cx).abi; + let element_size = element.size(cx); + // Currently, vectors must always be aligned to at least their elements: + assert!(align >= element_align); + // And the size has to be element * count plus alignment padding, of course + assert!(size == (element_size * count).align_to(align)); } BackendRepr::Memory { .. } => {} // Nothing to check. } diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs index f5a7b658123..a72bcad50a0 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/layout.rs @@ -179,7 +179,7 @@ fn layout_of_simd_ty( .size .checked_mul(e_len, dl) .ok_or(LayoutError::BadCalc(LayoutCalculatorError::SizeOverflow))?; - let align = dl.vector_align(size); + let align = dl.llvmlike_vector_align(size); let size = size.align_to(align.abi); // Compute the placement of the vector fields: