diff --git a/library/core/src/cell.rs b/library/core/src/cell.rs index 030040ba09a..c77fa371cc7 100644 --- a/library/core/src/cell.rs +++ b/library/core/src/cell.rs @@ -237,9 +237,9 @@ use crate::cmp::Ordering; use crate::fmt::{self, Debug, Display}; -use crate::intrinsics::is_nonoverlapping; +use crate::intrinsics; use crate::marker::{PhantomData, Unsize}; -use crate::mem; +use crate::mem::{self, size_of}; use crate::ops::{CoerceUnsized, Deref, DerefMut, DispatchFromDyn}; use crate::ptr::{self, NonNull}; @@ -435,11 +435,15 @@ pub fn set(&self, val: T) { #[inline] #[stable(feature = "move_cell", since = "1.17.0")] pub fn swap(&self, other: &Self) { + fn is_nonoverlapping(src: *const T, dst: *const T) -> bool { + intrinsics::is_nonoverlapping(src.cast(), dst.cast(), size_of::(), 1) + } + if ptr::eq(self, other) { // Swapping wouldn't change anything. return; } - if !is_nonoverlapping(self, other, 1) { + if !is_nonoverlapping(self, other) { // See for why we need to stop here. panic!("`Cell::swap` on overlapping non-identical `Cell`s"); } diff --git a/library/core/src/intrinsics.rs b/library/core/src/intrinsics.rs index 18c82f97c28..c8259c0024c 100644 --- a/library/core/src/intrinsics.rs +++ b/library/core/src/intrinsics.rs @@ -56,7 +56,7 @@ use crate::marker::DiscriminantKind; use crate::marker::Tuple; -use crate::mem::{self, align_of}; +use crate::mem::align_of; pub mod mir; pub mod simd; @@ -1027,7 +1027,7 @@ pub unsafe fn drop_in_place(to_drop: *mut T) { /// The size of the referenced value in bytes. /// - /// The stabilized version of this intrinsic is [`mem::size_of_val`]. + /// The stabilized version of this intrinsic is [`crate::mem::size_of_val`]. #[rustc_const_unstable(feature = "const_size_of_val", issue = "46571")] #[rustc_nounwind] pub fn size_of_val(_: *const T) -> usize; @@ -1107,7 +1107,7 @@ pub unsafe fn drop_in_place(to_drop: *mut T) { /// Moves a value out of scope without running drop glue. /// - /// This exists solely for [`mem::forget_unsized`]; normal `forget` uses + /// This exists solely for [`crate::mem::forget_unsized`]; normal `forget` uses /// `ManuallyDrop` instead. /// /// Note that, unlike most intrinsics, this is safe to call; @@ -1233,7 +1233,7 @@ pub unsafe fn drop_in_place(to_drop: *mut T) { /// Depending on what the code is doing, the following alternatives are preferable to /// pointer-to-integer transmutation: /// - If the code just wants to store data of arbitrary type in some buffer and needs to pick a - /// type for that buffer, it can use [`MaybeUninit`][mem::MaybeUninit]. + /// type for that buffer, it can use [`MaybeUninit`][crate::mem::MaybeUninit]. /// - If the code actually wants to work on the address the pointer points to, it can use `as` /// casts or [`ptr.addr()`][pointer::addr]. /// @@ -2317,7 +2317,7 @@ pub unsafe fn drop_in_place(to_drop: *mut T) { /// Therefore, implementations must not require the user to uphold /// any safety invariants. /// - /// The to-be-stabilized version of this intrinsic is [`mem::variant_count`]. + /// The to-be-stabilized version of this intrinsic is [`crate::mem::variant_count`]. #[rustc_const_unstable(feature = "variant_count", issue = "73662")] #[rustc_safe_intrinsic] #[rustc_nounwind] @@ -2569,6 +2569,19 @@ pub fn const_eval_select( #[rustc_nounwind] pub fn is_val_statically_known(arg: T) -> bool; + /// Returns the value of `cfg!(debug_assertions)`, but after monomorphization instead of in + /// macro expansion. + /// + /// This always returns `false` in const eval and Miri. The interpreter provides better + /// diagnostics than the checks that this is used to implement. However, this means + /// you should only be using this intrinsic to guard requirements that, if violated, + /// immediately lead to UB. Otherwise, const-eval and Miri will miss out on those + /// checks entirely. + /// + /// Since this is evaluated after monomorphization, branching on this value can be used to + /// implement debug assertions that are included in the precompiled standard library, but can + /// be optimized out by builds that monomorphize the standard library code with debug + /// assertions disabled. This intrinsic is primarily used by [`assert_unsafe_precondition`]. #[rustc_const_unstable(feature = "delayed_debug_assertions", issue = "none")] #[rustc_safe_intrinsic] #[cfg(not(bootstrap))] @@ -2597,7 +2610,7 @@ pub(crate) const fn debug_assertions() -> bool { /// These checks are behind a condition which is evaluated at codegen time, not expansion time like /// [`debug_assert`]. This means that a standard library built with optimizations and debug /// assertions disabled will have these checks optimized out of its monomorphizations, but if a -/// a caller of the standard library has debug assertions enabled and monomorphizes an expansion of +/// caller of the standard library has debug assertions enabled and monomorphizes an expansion of /// this macro, that monomorphization will contain the check. /// /// Since these checks cannot be optimized out in MIR, some care must be taken in both call and @@ -2606,8 +2619,8 @@ pub(crate) const fn debug_assertions() -> bool { /// combination of properties ensures that the code for the checks is only compiled once, and has a /// minimal impact on the caller's code size. /// -/// Caller should also introducing any other `let` bindings or any code outside this macro in order -/// to call it. Since the precompiled standard library is built with full debuginfo and these +/// Callers should also avoid introducing any other `let` bindings or any code outside this macro in +/// order to call it. Since the precompiled standard library is built with full debuginfo and these /// variables cannot be optimized out in MIR, an innocent-looking `let` can produce enough /// debuginfo to have a measurable compile-time impact on debug builds. /// @@ -2659,12 +2672,9 @@ pub(crate) fn is_valid_allocation_size(size: usize, len: usize) -> bool { len <= max_len } -pub(crate) fn is_nonoverlapping_mono( - src: *const (), - dst: *const (), - size: usize, - count: usize, -) -> bool { +/// Checks whether the regions of memory starting at `src` and `dst` of size +/// `count * size` do *not* overlap. +pub(crate) fn is_nonoverlapping(src: *const (), dst: *const (), size: usize, count: usize) -> bool { let src_usize = src.addr(); let dst_usize = dst.addr(); let Some(size) = size.checked_mul(count) else { @@ -2678,24 +2688,6 @@ pub(crate) fn is_nonoverlapping_mono( diff >= size } -/// Checks whether the regions of memory starting at `src` and `dst` of size -/// `count * size_of::()` do *not* overlap. -#[inline] -pub(crate) fn is_nonoverlapping(src: *const T, dst: *const T, count: usize) -> bool { - let src_usize = src.addr(); - let dst_usize = dst.addr(); - let Some(size) = mem::size_of::().checked_mul(count) else { - // Use panic_nounwind instead of Option::expect, so that this function is nounwind. - crate::panicking::panic_nounwind( - "is_nonoverlapping: `size_of::() * count` overflows a usize", - ) - }; - let diff = src_usize.abs_diff(dst_usize); - // If the absolute distance between the ptrs is at least as big as the size of the buffer, - // they do not overlap. - diff >= size -} - /// Copies `count * size_of::()` bytes from `src` to `dst`. The source /// and destination must *not* overlap. /// @@ -2809,7 +2801,7 @@ pub(crate) fn is_nonoverlapping(src: *const T, dst: *const T, count: usize) - ) => is_aligned_and_not_null(src, align) && is_aligned_and_not_null(dst, align) - && is_nonoverlapping_mono(src, dst, size, count) + && is_nonoverlapping(src, dst, size, count) ); copy_nonoverlapping(src, dst, count) } diff --git a/library/core/src/ptr/mod.rs b/library/core/src/ptr/mod.rs index 1c2c9901e52..2bd14f357d8 100644 --- a/library/core/src/ptr/mod.rs +++ b/library/core/src/ptr/mod.rs @@ -383,7 +383,7 @@ use crate::fmt; use crate::hash; use crate::intrinsics::{ - self, assert_unsafe_precondition, is_aligned_and_not_null, is_nonoverlapping_mono, + self, assert_unsafe_precondition, is_aligned_and_not_null, is_nonoverlapping, }; use crate::marker::FnPtr; @@ -978,7 +978,7 @@ macro_rules! attempt_swap_as_chunks { ) => is_aligned_and_not_null(x, align) && is_aligned_and_not_null(y, align) - && is_nonoverlapping_mono(x, y, size, count) + && is_nonoverlapping(x, y, size, count) ); }