unchecked_{shl|shr}
should use u32
as the RHS
This commit is contained in:
parent
1ca6777c01
commit
3b16c04676
@ -130,6 +130,7 @@
|
|||||||
#![feature(const_pin)]
|
#![feature(const_pin)]
|
||||||
#![feature(const_ptr_sub_ptr)]
|
#![feature(const_ptr_sub_ptr)]
|
||||||
#![feature(const_replace)]
|
#![feature(const_replace)]
|
||||||
|
#![feature(const_result_drop)]
|
||||||
#![feature(const_ptr_as_ref)]
|
#![feature(const_ptr_as_ref)]
|
||||||
#![feature(const_ptr_is_null)]
|
#![feature(const_ptr_is_null)]
|
||||||
#![feature(const_ptr_read)]
|
#![feature(const_ptr_read)]
|
||||||
|
@ -757,10 +757,11 @@ macro_rules! int_impl {
|
|||||||
#[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")]
|
#[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")]
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||||
pub const unsafe fn unchecked_shl(self, rhs: Self) -> Self {
|
pub const unsafe fn unchecked_shl(self, rhs: u32) -> Self {
|
||||||
// SAFETY: the caller must uphold the safety contract for
|
// SAFETY: the caller must uphold the safety contract for
|
||||||
// `unchecked_shl`.
|
// `unchecked_shl`.
|
||||||
unsafe { intrinsics::unchecked_shl(self, rhs) }
|
// Any legal shift amount is losslessly representable in the self type.
|
||||||
|
unsafe { intrinsics::unchecked_shl(self, rhs.try_into().ok().unwrap_unchecked()) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checked shift right. Computes `self >> rhs`, returning `None` if `rhs` is
|
/// Checked shift right. Computes `self >> rhs`, returning `None` if `rhs` is
|
||||||
@ -804,10 +805,11 @@ macro_rules! int_impl {
|
|||||||
#[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")]
|
#[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")]
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||||
pub const unsafe fn unchecked_shr(self, rhs: Self) -> Self {
|
pub const unsafe fn unchecked_shr(self, rhs: u32) -> Self {
|
||||||
// SAFETY: the caller must uphold the safety contract for
|
// SAFETY: the caller must uphold the safety contract for
|
||||||
// `unchecked_shr`.
|
// `unchecked_shr`.
|
||||||
unsafe { intrinsics::unchecked_shr(self, rhs) }
|
// Any legal shift amount is losslessly representable in the self type.
|
||||||
|
unsafe { intrinsics::unchecked_shr(self, rhs.try_into().ok().unwrap_unchecked()) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checked absolute value. Computes `self.abs()`, returning `None` if
|
/// Checked absolute value. Computes `self.abs()`, returning `None` if
|
||||||
@ -1354,11 +1356,12 @@ macro_rules! int_impl {
|
|||||||
#[must_use = "this returns the result of the operation, \
|
#[must_use = "this returns the result of the operation, \
|
||||||
without modifying the original"]
|
without modifying the original"]
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
|
#[rustc_allow_const_fn_unstable(const_inherent_unchecked_arith)]
|
||||||
pub const fn wrapping_shl(self, rhs: u32) -> Self {
|
pub const fn wrapping_shl(self, rhs: u32) -> Self {
|
||||||
// SAFETY: the masking by the bitsize of the type ensures that we do not shift
|
// SAFETY: the masking by the bitsize of the type ensures that we do not shift
|
||||||
// out of bounds
|
// out of bounds
|
||||||
unsafe {
|
unsafe {
|
||||||
intrinsics::unchecked_shl(self, (rhs & ($BITS - 1)) as $SelfT)
|
self.unchecked_shl(rhs & ($BITS - 1))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1383,11 +1386,12 @@ macro_rules! int_impl {
|
|||||||
#[must_use = "this returns the result of the operation, \
|
#[must_use = "this returns the result of the operation, \
|
||||||
without modifying the original"]
|
without modifying the original"]
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
|
#[rustc_allow_const_fn_unstable(const_inherent_unchecked_arith)]
|
||||||
pub const fn wrapping_shr(self, rhs: u32) -> Self {
|
pub const fn wrapping_shr(self, rhs: u32) -> Self {
|
||||||
// SAFETY: the masking by the bitsize of the type ensures that we do not shift
|
// SAFETY: the masking by the bitsize of the type ensures that we do not shift
|
||||||
// out of bounds
|
// out of bounds
|
||||||
unsafe {
|
unsafe {
|
||||||
intrinsics::unchecked_shr(self, (rhs & ($BITS - 1)) as $SelfT)
|
self.unchecked_shr(rhs & ($BITS - 1))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3,6 +3,7 @@
|
|||||||
#![stable(feature = "rust1", since = "1.0.0")]
|
#![stable(feature = "rust1", since = "1.0.0")]
|
||||||
|
|
||||||
use crate::ascii;
|
use crate::ascii;
|
||||||
|
use crate::convert::TryInto;
|
||||||
use crate::error::Error;
|
use crate::error::Error;
|
||||||
use crate::intrinsics;
|
use crate::intrinsics;
|
||||||
use crate::mem;
|
use crate::mem;
|
||||||
|
@ -901,10 +901,11 @@ macro_rules! uint_impl {
|
|||||||
#[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")]
|
#[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")]
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||||
pub const unsafe fn unchecked_shl(self, rhs: Self) -> Self {
|
pub const unsafe fn unchecked_shl(self, rhs: u32) -> Self {
|
||||||
// SAFETY: the caller must uphold the safety contract for
|
// SAFETY: the caller must uphold the safety contract for
|
||||||
// `unchecked_shl`.
|
// `unchecked_shl`.
|
||||||
unsafe { intrinsics::unchecked_shl(self, rhs) }
|
// Any legal shift amount is losslessly representable in the self type.
|
||||||
|
unsafe { intrinsics::unchecked_shl(self, rhs.try_into().ok().unwrap_unchecked()) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checked shift right. Computes `self >> rhs`, returning `None`
|
/// Checked shift right. Computes `self >> rhs`, returning `None`
|
||||||
@ -948,10 +949,11 @@ macro_rules! uint_impl {
|
|||||||
#[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")]
|
#[rustc_const_unstable(feature = "const_inherent_unchecked_arith", issue = "85122")]
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
|
||||||
pub const unsafe fn unchecked_shr(self, rhs: Self) -> Self {
|
pub const unsafe fn unchecked_shr(self, rhs: u32) -> Self {
|
||||||
// SAFETY: the caller must uphold the safety contract for
|
// SAFETY: the caller must uphold the safety contract for
|
||||||
// `unchecked_shr`.
|
// `unchecked_shr`.
|
||||||
unsafe { intrinsics::unchecked_shr(self, rhs) }
|
// Any legal shift amount is losslessly representable in the self type.
|
||||||
|
unsafe { intrinsics::unchecked_shr(self, rhs.try_into().ok().unwrap_unchecked()) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checked exponentiation. Computes `self.pow(exp)`, returning `None` if
|
/// Checked exponentiation. Computes `self.pow(exp)`, returning `None` if
|
||||||
@ -1367,11 +1369,12 @@ macro_rules! uint_impl {
|
|||||||
#[must_use = "this returns the result of the operation, \
|
#[must_use = "this returns the result of the operation, \
|
||||||
without modifying the original"]
|
without modifying the original"]
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
|
#[rustc_allow_const_fn_unstable(const_inherent_unchecked_arith)]
|
||||||
pub const fn wrapping_shl(self, rhs: u32) -> Self {
|
pub const fn wrapping_shl(self, rhs: u32) -> Self {
|
||||||
// SAFETY: the masking by the bitsize of the type ensures that we do not shift
|
// SAFETY: the masking by the bitsize of the type ensures that we do not shift
|
||||||
// out of bounds
|
// out of bounds
|
||||||
unsafe {
|
unsafe {
|
||||||
intrinsics::unchecked_shl(self, (rhs & ($BITS - 1)) as $SelfT)
|
self.unchecked_shl(rhs & ($BITS - 1))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1399,11 +1402,12 @@ macro_rules! uint_impl {
|
|||||||
#[must_use = "this returns the result of the operation, \
|
#[must_use = "this returns the result of the operation, \
|
||||||
without modifying the original"]
|
without modifying the original"]
|
||||||
#[inline(always)]
|
#[inline(always)]
|
||||||
|
#[rustc_allow_const_fn_unstable(const_inherent_unchecked_arith)]
|
||||||
pub const fn wrapping_shr(self, rhs: u32) -> Self {
|
pub const fn wrapping_shr(self, rhs: u32) -> Self {
|
||||||
// SAFETY: the masking by the bitsize of the type ensures that we do not shift
|
// SAFETY: the masking by the bitsize of the type ensures that we do not shift
|
||||||
// out of bounds
|
// out of bounds
|
||||||
unsafe {
|
unsafe {
|
||||||
intrinsics::unchecked_shr(self, (rhs & ($BITS - 1)) as $SelfT)
|
self.unchecked_shr(rhs & ($BITS - 1))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
65
src/test/codegen/unchecked_shifts.rs
Normal file
65
src/test/codegen/unchecked_shifts.rs
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
// compile-flags: -O
|
||||||
|
// min-llvm-version: 15.0 (LLVM 13 in CI does this differently from submodule LLVM)
|
||||||
|
|
||||||
|
#![crate_type = "lib"]
|
||||||
|
#![feature(unchecked_math)]
|
||||||
|
|
||||||
|
// CHECK-LABEL: @unchecked_shl_unsigned_same
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe fn unchecked_shl_unsigned_same(a: u32, b: u32) -> u32 {
|
||||||
|
// CHECK-NOT: and i32
|
||||||
|
// CHECK: shl i32 %a, %b
|
||||||
|
// CHECK-NOT: and i32
|
||||||
|
a.unchecked_shl(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CHECK-LABEL: @unchecked_shl_unsigned_smaller
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe fn unchecked_shl_unsigned_smaller(a: u16, b: u32) -> u16 {
|
||||||
|
// This uses -DAG to avoid failing on irrelevant reorderings,
|
||||||
|
// like emitting the truncation earlier.
|
||||||
|
|
||||||
|
// CHECK-DAG: %[[INRANGE:.+]] = icmp ult i32 %b, 65536
|
||||||
|
// CHECK-DAG: tail call void @llvm.assume(i1 %[[INRANGE]])
|
||||||
|
// CHECK-DAG: %[[TRUNC:.+]] = trunc i32 %b to i16
|
||||||
|
// CHECK-DAG: shl i16 %a, %[[TRUNC]]
|
||||||
|
a.unchecked_shl(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CHECK-LABEL: @unchecked_shl_unsigned_bigger
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe fn unchecked_shl_unsigned_bigger(a: u64, b: u32) -> u64 {
|
||||||
|
// CHECK: %[[EXT:.+]] = zext i32 %b to i64
|
||||||
|
// CHECK: shl i64 %a, %[[EXT]]
|
||||||
|
a.unchecked_shl(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CHECK-LABEL: @unchecked_shr_signed_same
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe fn unchecked_shr_signed_same(a: i32, b: u32) -> i32 {
|
||||||
|
// CHECK-NOT: and i32
|
||||||
|
// CHECK: ashr i32 %a, %b
|
||||||
|
// CHECK-NOT: and i32
|
||||||
|
a.unchecked_shr(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CHECK-LABEL: @unchecked_shr_signed_smaller
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe fn unchecked_shr_signed_smaller(a: i16, b: u32) -> i16 {
|
||||||
|
// This uses -DAG to avoid failing on irrelevant reorderings,
|
||||||
|
// like emitting the truncation earlier.
|
||||||
|
|
||||||
|
// CHECK-DAG: %[[INRANGE:.+]] = icmp ult i32 %b, 32768
|
||||||
|
// CHECK-DAG: tail call void @llvm.assume(i1 %[[INRANGE]])
|
||||||
|
// CHECK-DAG: %[[TRUNC:.+]] = trunc i32 %b to i16
|
||||||
|
// CHECK-DAG: ashr i16 %a, %[[TRUNC]]
|
||||||
|
a.unchecked_shr(b)
|
||||||
|
}
|
||||||
|
|
||||||
|
// CHECK-LABEL: @unchecked_shr_signed_bigger
|
||||||
|
#[no_mangle]
|
||||||
|
pub unsafe fn unchecked_shr_signed_bigger(a: i64, b: u32) -> i64 {
|
||||||
|
// CHECK: %[[EXT:.+]] = zext i32 %b to i64
|
||||||
|
// CHECK: ashr i64 %a, %[[EXT]]
|
||||||
|
a.unchecked_shr(b)
|
||||||
|
}
|
Loading…
x
Reference in New Issue
Block a user