Auto merge of #132661 - matthiaskrgr:rollup-npytbl6, r=matthiaskrgr

Rollup of 8 pull requests

Successful merges:

 - #132259 (rustc_codegen_llvm: Add a new 'pc' option to branch-protection)
 - #132409 (CI: switch 7 linux jobs to free runners)
 - #132498 (Suggest fixing typos and let bindings at the same time)
 - #132524 (chore(style): sync submodule exclusion list between tidy and rustfmt)
 - #132567 (Properly suggest `E::assoc` when we encounter `E::Variant::assoc`)
 - #132571 (add const_eval_select macro to reduce redundancy)
 - #132637 (Do not filter empty lint passes & re-do CTFE pass)
 - #132642 (Add documentation on `ast::Attribute`)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2024-11-05 22:43:15 +00:00
commit 4a91ff6bb5
45 changed files with 762 additions and 529 deletions

View File

@ -110,11 +110,7 @@ jobs:
# less disk space.
- name: free up disk space
uses: jlumbroso/free-disk-space@54081f138730dfa15788a46383842cd2f914a1be
if: contains(matrix.os, 'ubuntu')
with:
# Removing packages with APT saves ~5 GiB, but takes several
# minutes (and potentially removes important packages).
large-packages: false
if: matrix.free_disk
# Rust Log Analyzer can't currently detect the PR number of a GitHub
# Actions build on its own, so a hint in the log message is needed to

View File

@ -136,6 +136,13 @@ pub fn is_word(&self) -> bool {
}
}
/// Returns a list of meta items if the attribute is delimited with parenthesis:
///
/// ```text
/// #[attr(a, b = "c")] // Returns `Some()`.
/// #[attr = ""] // Returns `None`.
/// #[attr] // Returns `None`.
/// ```
pub fn meta_item_list(&self) -> Option<ThinVec<MetaItemInner>> {
match &self.kind {
AttrKind::Normal(normal) => normal.item.meta_item_list(),
@ -143,6 +150,21 @@ pub fn meta_item_list(&self) -> Option<ThinVec<MetaItemInner>> {
}
}
/// Returns the string value in:
///
/// ```text
/// #[attribute = "value"]
/// ^^^^^^^
/// ```
///
/// It returns `None` in any other cases, including doc comments if they
/// are not under the form `#[doc = "..."]`.
///
/// It also returns `None` for:
///
/// ```text
/// #[attr("value")]
/// ```
pub fn value_str(&self) -> Option<Symbol> {
match &self.kind {
AttrKind::Normal(normal) => normal.item.value_str(),
@ -232,6 +254,18 @@ pub fn meta_item_list(&self) -> Option<ThinVec<MetaItemInner>> {
}
}
/// Returns the string value in:
///
/// ```text
/// #[attribute = "value"]
/// ^^^^^^^
/// ```
///
/// It returns `None` in any other cases like:
///
/// ```text
/// #[attr("value")]
/// ```
fn value_str(&self) -> Option<Symbol> {
match &self.args {
AttrArgs::Eq(_, args) => args.value_str(),
@ -315,6 +349,18 @@ pub fn name_value_literal_span(&self) -> Option<Span> {
Some(self.name_value_literal()?.span)
}
/// Returns the string value in:
///
/// ```text
/// #[attribute = "value"]
/// ^^^^^^^
/// ```
///
/// It returns `None` in any other cases like:
///
/// ```text
/// #[attr("value")]
/// ```
pub fn value_str(&self) -> Option<Symbol> {
match &self.kind {
MetaItemKind::NameValue(v) => v.kind.str(),

View File

@ -419,7 +419,10 @@ pub(crate) fn llfn_attrs_from_instance<'ll, 'tcx>(
if bti {
to_add.push(llvm::CreateAttrString(cx.llcx, "branch-target-enforcement"));
}
if let Some(PacRet { leaf, key }) = pac_ret {
if let Some(PacRet { leaf, pc, key }) = pac_ret {
if pc {
to_add.push(llvm::CreateAttrString(cx.llcx, "branch-protection-pauth-lr"));
}
to_add.push(llvm::CreateAttrStringValue(
cx.llcx,
"sign-return-address",

View File

@ -308,7 +308,13 @@ pub(crate) unsafe fn create_module<'ll>(
"sign-return-address",
pac_ret.is_some().into(),
);
let pac_opts = pac_ret.unwrap_or(PacRet { leaf: false, key: PAuthKey::A });
let pac_opts = pac_ret.unwrap_or(PacRet { leaf: false, pc: false, key: PAuthKey::A });
llvm::add_module_flag_u32(
llmod,
llvm::ModuleFlagMergeBehavior::Min,
"branch-protection-pauth-lr",
pac_opts.pc.into(),
);
llvm::add_module_flag_u32(
llmod,
llvm::ModuleFlagMergeBehavior::Min,

View File

@ -764,7 +764,7 @@ macro_rules! tracked {
branch_protection,
Some(BranchProtection {
bti: true,
pac_ret: Some(PacRet { leaf: true, key: PAuthKey::B })
pac_ret: Some(PacRet { leaf: true, pc: true, key: PAuthKey::B })
})
);
tracked!(codegen_backend, Some("abc".to_string()));

View File

@ -422,6 +422,9 @@ fn late_lint_crate<'tcx>(tcx: TyCtxt<'tcx>) {
.into_iter()
.filter(|pass| {
let lints = (**pass).get_lints();
// Lintless passes are always in
lints.is_empty() ||
// If the pass doesn't have a single needed lint, omit it
!lints.iter().all(|lint| lints_that_dont_need_to_run.contains(&LintId::of(lint)))
})
.collect();

View File

@ -459,7 +459,7 @@ pub(crate) fn smart_resolve_report_errors(
return (err, Vec::new());
}
let (found, mut candidates) = self.try_lookup_name_relaxed(
let (found, suggested_candidates, mut candidates) = self.try_lookup_name_relaxed(
&mut err,
source,
path,
@ -478,7 +478,15 @@ pub(crate) fn smart_resolve_report_errors(
}
let mut fallback = self.suggest_trait_and_bounds(&mut err, source, res, span, &base_error);
fallback |= self.suggest_typo(&mut err, source, path, following_seg, span, &base_error);
fallback |= self.suggest_typo(
&mut err,
source,
path,
following_seg,
span,
&base_error,
suggested_candidates,
);
if fallback {
// Fallback label.
@ -589,7 +597,16 @@ fn try_lookup_name_relaxed(
span: Span,
res: Option<Res>,
base_error: &BaseError,
) -> (bool, Vec<ImportSuggestion>) {
) -> (bool, FxHashSet<String>, Vec<ImportSuggestion>) {
let span = match following_seg {
Some(_) if path[0].ident.span.eq_ctxt(path[path.len() - 1].ident.span) => {
// The path `span` that comes in includes any following segments, which we don't
// want to replace in the suggestions.
path[0].ident.span.to(path[path.len() - 1].ident.span)
}
_ => span,
};
let mut suggested_candidates = FxHashSet::default();
// Try to lookup name in more relaxed fashion for better error reporting.
let ident = path.last().unwrap().ident;
let is_expected = &|res| source.is_expected(res);
@ -646,6 +663,11 @@ fn try_lookup_name_relaxed(
};
let msg = format!("{preamble}try using the variant's enum");
suggested_candidates.extend(
enum_candidates
.iter()
.map(|(_variant_path, enum_ty_path)| enum_ty_path.clone()),
);
err.span_suggestions(
span,
msg,
@ -658,7 +680,8 @@ fn try_lookup_name_relaxed(
// Try finding a suitable replacement.
let typo_sugg = self
.lookup_typo_candidate(path, following_seg, source.namespace(), is_expected)
.to_opt_suggestion();
.to_opt_suggestion()
.filter(|sugg| !suggested_candidates.contains(sugg.candidate.as_str()));
if let [segment] = path
&& !matches!(source, PathSource::Delegation)
&& self.self_type_is_available()
@ -719,7 +742,7 @@ fn try_lookup_name_relaxed(
}
}
self.r.add_typo_suggestion(err, typo_sugg, ident_span);
return (true, candidates);
return (true, suggested_candidates, candidates);
}
// If the first argument in call is `self` suggest calling a method.
@ -737,7 +760,7 @@ fn try_lookup_name_relaxed(
format!("self.{path_str}({args_snippet})"),
Applicability::MachineApplicable,
);
return (true, candidates);
return (true, suggested_candidates, candidates);
}
}
@ -754,7 +777,7 @@ fn try_lookup_name_relaxed(
) {
// We do this to avoid losing a secondary span when we override the main error span.
self.r.add_typo_suggestion(err, typo_sugg, ident_span);
return (true, candidates);
return (true, suggested_candidates, candidates);
}
}
@ -772,7 +795,7 @@ fn try_lookup_name_relaxed(
ident.span,
format!("the binding `{path_str}` is available in a different scope in the same function"),
);
return (true, candidates);
return (true, suggested_candidates, candidates);
}
}
}
@ -781,7 +804,7 @@ fn try_lookup_name_relaxed(
candidates = self.smart_resolve_partial_mod_path_errors(path, following_seg);
}
(false, candidates)
(false, suggested_candidates, candidates)
}
fn suggest_trait_and_bounds(
@ -869,13 +892,16 @@ fn suggest_typo(
following_seg: Option<&Segment>,
span: Span,
base_error: &BaseError,
suggested_candidates: FxHashSet<String>,
) -> bool {
let is_expected = &|res| source.is_expected(res);
let ident_span = path.last().map_or(span, |ident| ident.ident.span);
let typo_sugg =
self.lookup_typo_candidate(path, following_seg, source.namespace(), is_expected);
let mut fallback = false;
let typo_sugg = typo_sugg.to_opt_suggestion();
let typo_sugg = typo_sugg
.to_opt_suggestion()
.filter(|sugg| !suggested_candidates.contains(sugg.candidate.as_str()));
if !self.r.add_typo_suggestion(err, typo_sugg, ident_span) {
fallback = true;
match self.diag_metadata.current_let_binding {
@ -894,10 +920,13 @@ fn suggest_typo(
// If the trait has a single item (which wasn't matched by the algorithm), suggest it
let suggestion = self.get_single_associated_item(path, &source, is_expected);
if !self.r.add_typo_suggestion(err, suggestion, ident_span) {
fallback = !self.let_binding_suggestion(err, ident_span);
}
self.r.add_typo_suggestion(err, suggestion, ident_span);
}
if self.let_binding_suggestion(err, ident_span) {
fallback = false;
}
fallback
}

View File

@ -1320,6 +1320,7 @@ pub enum PAuthKey {
#[derive(Clone, Copy, Hash, Debug, PartialEq)]
pub struct PacRet {
pub leaf: bool,
pub pc: bool,
pub key: PAuthKey,
}

View File

@ -442,8 +442,7 @@ mod desc {
pub(crate) const parse_polonius: &str = "either no value or `legacy` (the default), or `next`";
pub(crate) const parse_stack_protector: &str =
"one of (`none` (default), `basic`, `strong`, or `all`)";
pub(crate) const parse_branch_protection: &str =
"a `,` separated combination of `bti`, `b-key`, `pac-ret`, or `leaf`";
pub(crate) const parse_branch_protection: &str = "a `,` separated combination of `bti`, `pac-ret`, followed by a combination of `pc`, `b-key`, or `leaf`";
pub(crate) const parse_proc_macro_execution_strategy: &str =
"one of supported execution strategies (`same-thread`, or `cross-thread`)";
pub(crate) const parse_remap_path_scope: &str =
@ -1401,7 +1400,7 @@ pub(crate) fn parse_branch_protection(
match opt {
"bti" => slot.bti = true,
"pac-ret" if slot.pac_ret.is_none() => {
slot.pac_ret = Some(PacRet { leaf: false, key: PAuthKey::A })
slot.pac_ret = Some(PacRet { leaf: false, pc: false, key: PAuthKey::A })
}
"leaf" => match slot.pac_ret.as_mut() {
Some(pac) => pac.leaf = true,
@ -1411,6 +1410,10 @@ pub(crate) fn parse_branch_protection(
Some(pac) => pac.key = PAuthKey::B,
_ => return false,
},
"pc" => match slot.pac_ret.as_mut() {
Some(pac) => pac.pc = true,
_ => return false,
},
_ => return false,
};
}

View File

@ -1,7 +1,7 @@
//! impl char {}
use super::*;
use crate::macros::const_panic;
use crate::panic::const_panic;
use crate::slice;
use crate::str::from_utf8_unchecked_mut;
use crate::unicode::printable::is_printable;

View File

@ -3,11 +3,12 @@
use crate::cmp::Ordering;
use crate::error::Error;
use crate::ffi::c_char;
use crate::intrinsics::const_eval_select;
use crate::iter::FusedIterator;
use crate::marker::PhantomData;
use crate::ptr::NonNull;
use crate::slice::memchr;
use crate::{fmt, intrinsics, ops, slice, str};
use crate::{fmt, ops, slice, str};
// FIXME: because this is doc(inline)d, we *have* to use intra-doc links because the actual link
// depends on where the item is being documented. however, since this is libcore, we can't
@ -411,37 +412,35 @@ pub const fn from_bytes_with_nul(bytes: &[u8]) -> Result<&Self, FromBytesWithNul
#[rustc_const_stable(feature = "const_cstr_unchecked", since = "1.59.0")]
#[rustc_allow_const_fn_unstable(const_eval_select)]
pub const unsafe fn from_bytes_with_nul_unchecked(bytes: &[u8]) -> &CStr {
#[inline]
fn rt_impl(bytes: &[u8]) -> &CStr {
// Chance at catching some UB at runtime with debug builds.
debug_assert!(!bytes.is_empty() && bytes[bytes.len() - 1] == 0);
const_eval_select!(
@capture { bytes: &[u8] } -> &CStr:
if const {
// Saturating so that an empty slice panics in the assert with a good
// message, not here due to underflow.
let mut i = bytes.len().saturating_sub(1);
assert!(!bytes.is_empty() && bytes[i] == 0, "input was not nul-terminated");
// SAFETY: Casting to CStr is safe because its internal representation
// is a [u8] too (safe only inside std).
// Dereferencing the obtained pointer is safe because it comes from a
// reference. Making a reference is then safe because its lifetime
// is bound by the lifetime of the given `bytes`.
unsafe { &*(bytes as *const [u8] as *const CStr) }
}
// Ending nul byte exists, skip to the rest.
while i != 0 {
i -= 1;
let byte = bytes[i];
assert!(byte != 0, "input contained interior nul");
}
const fn const_impl(bytes: &[u8]) -> &CStr {
// Saturating so that an empty slice panics in the assert with a good
// message, not here due to underflow.
let mut i = bytes.len().saturating_sub(1);
assert!(!bytes.is_empty() && bytes[i] == 0, "input was not nul-terminated");
// SAFETY: See runtime cast comment below.
unsafe { &*(bytes as *const [u8] as *const CStr) }
} else {
// Chance at catching some UB at runtime with debug builds.
debug_assert!(!bytes.is_empty() && bytes[bytes.len() - 1] == 0);
// Ending nul byte exists, skip to the rest.
while i != 0 {
i -= 1;
let byte = bytes[i];
assert!(byte != 0, "input contained interior nul");
// SAFETY: Casting to CStr is safe because its internal representation
// is a [u8] too (safe only inside std).
// Dereferencing the obtained pointer is safe because it comes from a
// reference. Making a reference is then safe because its lifetime
// is bound by the lifetime of the given `bytes`.
unsafe { &*(bytes as *const [u8] as *const CStr) }
}
// SAFETY: See `rt_impl` cast.
unsafe { &*(bytes as *const [u8] as *const CStr) }
}
intrinsics::const_eval_select((bytes,), const_impl, rt_impl)
)
}
/// Returns the inner pointer to this C string.
@ -735,29 +734,27 @@ fn as_ref(&self) -> &CStr {
#[cfg_attr(bootstrap, rustc_const_stable(feature = "const_cstr_from_ptr", since = "1.81.0"))]
#[rustc_allow_const_fn_unstable(const_eval_select)]
const unsafe fn strlen(ptr: *const c_char) -> usize {
const fn strlen_ct(s: *const c_char) -> usize {
let mut len = 0;
const_eval_select!(
@capture { s: *const c_char = ptr } -> usize:
if const {
let mut len = 0;
// SAFETY: Outer caller has provided a pointer to a valid C string.
while unsafe { *s.add(len) } != 0 {
len += 1;
// SAFETY: Outer caller has provided a pointer to a valid C string.
while unsafe { *s.add(len) } != 0 {
len += 1;
}
len
} else {
extern "C" {
/// Provided by libc or compiler_builtins.
fn strlen(s: *const c_char) -> usize;
}
// SAFETY: Outer caller has provided a pointer to a valid C string.
unsafe { strlen(s) }
}
len
}
#[inline]
fn strlen_rt(s: *const c_char) -> usize {
extern "C" {
/// Provided by libc or compiler_builtins.
fn strlen(s: *const c_char) -> usize;
}
// SAFETY: Outer caller has provided a pointer to a valid C string.
unsafe { strlen(s) }
}
intrinsics::const_eval_select((ptr,), strlen_ct, strlen_rt)
)
}
/// An iterator over the bytes of a [`CStr`], without the nul terminator.

View File

@ -2940,6 +2940,68 @@ pub const fn const_eval_select<ARG: Tuple, F, G, RET>(
unreachable!()
}
/// A macro to make it easier to invoke const_eval_select. Use as follows:
/// ```rust,ignore (just a macro example)
/// const_eval_select!(
/// @capture { arg1: i32 = some_expr, arg2: T = other_expr } -> U:
/// if const #[attributes_for_const_arm] {
/// // Compile-time code goes here.
/// } else #[attributes_for_runtime_arm] {
/// // Run-time code goes here.
/// }
/// )
/// ```
/// The `@capture` block declares which surrounding variables / expressions can be
/// used inside the `if const`.
/// Note that the two arms of this `if` really each become their own function, which is why the
/// macro supports setting attributes for those functions. The runtime function is always
/// markes as `#[inline]`.
///
/// See [`const_eval_select()`] for the rules and requirements around that intrinsic.
pub(crate) macro const_eval_select {
(
@capture { $($arg:ident : $ty:ty = $val:expr),* $(,)? } $( -> $ret:ty )? :
if const
$(#[$compiletime_attr:meta])* $compiletime:block
else
$(#[$runtime_attr:meta])* $runtime:block
) => {{
#[inline] // avoid the overhead of an extra fn call
$(#[$runtime_attr])*
fn runtime($($arg: $ty),*) $( -> $ret )? {
$runtime
}
#[inline] // prevent codegen on this function
$(#[$compiletime_attr])*
const fn compiletime($($arg: $ty),*) $( -> $ret )? {
// Don't warn if one of the arguments is unused.
$(let _ = $arg;)*
$compiletime
}
const_eval_select(($($val,)*), compiletime, runtime)
}},
// We support leaving away the `val` expressions for *all* arguments
// (but not for *some* arguments, that's too tricky).
(
@capture { $($arg:ident : $ty:ty),* $(,)? } $( -> $ret:ty )? :
if const
$(#[$compiletime_attr:meta])* $compiletime:block
else
$(#[$runtime_attr:meta])* $runtime:block
) => {
$crate::intrinsics::const_eval_select!(
@capture { $($arg : $ty = $arg),* } $(-> $ret)? :
if const
$(#[$compiletime_attr])* $compiletime
else
$(#[$runtime_attr])* $runtime
)
},
}
/// Returns whether the argument's value is statically known at
/// compile-time.
///
@ -2982,7 +3044,7 @@ pub const fn const_eval_select<ARG: Tuple, F, G, RET>(
/// # Stability concerns
///
/// While it is safe to call, this intrinsic may behave differently in
/// a `const` context than otherwise. See the [`const_eval_select`]
/// a `const` context than otherwise. See the [`const_eval_select()`]
/// documentation for an explanation of the issues this can cause. Unlike
/// `const_eval_select`, this intrinsic isn't guaranteed to behave
/// deterministically even in a `const` context.
@ -3868,14 +3930,15 @@ pub(crate) const fn miri_promise_symbolic_alignment(ptr: *const (), align: usize
fn miri_promise_symbolic_alignment(ptr: *const (), align: usize);
}
fn runtime(ptr: *const (), align: usize) {
// SAFETY: this call is always safe.
unsafe {
miri_promise_symbolic_alignment(ptr, align);
const_eval_select!(
@capture { ptr: *const (), align: usize}:
if const {
// Do nothing.
} else {
// SAFETY: this call is always safe.
unsafe {
miri_promise_symbolic_alignment(ptr, align);
}
}
}
const fn compiletime(_ptr: *const (), _align: usize) {}
const_eval_select((ptr, align), compiletime, runtime);
)
}

View File

@ -12,54 +12,6 @@ macro_rules! panic {
};
}
/// Helper macro for panicking in a `const fn`.
/// Invoke as:
/// ```rust,ignore (just an example)
/// core::macros::const_panic!("boring message", "flavored message {a} {b:?}", a: u32 = foo.len(), b: Something = bar);
/// ```
/// where the first message will be printed in const-eval,
/// and the second message will be printed at runtime.
// All uses of this macro are FIXME(const-hack).
#[unstable(feature = "panic_internals", issue = "none")]
#[doc(hidden)]
pub macro const_panic {
($const_msg:literal, $runtime_msg:literal, $($arg:ident : $ty:ty = $val:expr),* $(,)?) => {{
#[inline]
#[track_caller]
fn runtime($($arg: $ty),*) -> ! {
$crate::panic!($runtime_msg);
}
#[inline]
#[track_caller]
const fn compiletime($(_: $ty),*) -> ! {
$crate::panic!($const_msg);
}
// Wrap call to `const_eval_select` in a function so that we can
// add the `rustc_allow_const_fn_unstable`. This is okay to do
// because both variants will panic, just with different messages.
#[rustc_allow_const_fn_unstable(const_eval_select)]
#[inline(always)]
#[track_caller]
#[cfg_attr(bootstrap, rustc_const_stable(feature = "const_panic", since = "CURRENT_RUSTC_VERSION"))]
const fn do_panic($($arg: $ty),*) -> ! {
$crate::intrinsics::const_eval_select(($($arg),* ,), compiletime, runtime)
}
do_panic($($val),*)
}},
// We support leaving away the `val` expressions for *all* arguments
// (but not for *some* arguments, that's too tricky).
($const_msg:literal, $runtime_msg:literal, $($arg:ident : $ty:ty),* $(,)?) => {
$crate::macros::const_panic!(
$const_msg,
$runtime_msg,
$($arg: $ty = $arg),*
)
},
}
/// Asserts that two expressions are equal to each other (using [`PartialEq`]).
///
/// Assertions are always checked in both debug and release builds, and cannot
@ -244,19 +196,6 @@ macro_rules! assert_ne {
},
}
/// A version of `assert` that prints a non-formatting message in const contexts.
///
/// See [`const_panic!`].
#[unstable(feature = "panic_internals", issue = "none")]
#[doc(hidden)]
pub macro const_assert {
($condition: expr, $const_msg:literal, $runtime_msg:literal, $($arg:tt)*) => {{
if !$crate::intrinsics::likely($condition) {
$crate::macros::const_panic!($const_msg, $runtime_msg, $($arg)*)
}
}}
}
/// A macro for defining `#[cfg]` match-like statements.
///
/// It is similar to the `if/elif` C preprocessor macro by allowing definition of a cascade of

View File

@ -14,9 +14,9 @@
use crate::convert::FloatToInt;
#[cfg(not(test))]
use crate::intrinsics;
use crate::macros::const_assert;
use crate::mem;
use crate::num::FpCategory;
use crate::panic::const_assert;
/// Basic mathematical constants.
#[unstable(feature = "f128", issue = "116909")]

View File

@ -14,9 +14,9 @@
use crate::convert::FloatToInt;
#[cfg(not(test))]
use crate::intrinsics;
use crate::macros::const_assert;
use crate::mem;
use crate::num::FpCategory;
use crate::panic::const_assert;
/// Basic mathematical constants.
#[unstable(feature = "f16", issue = "116909")]

View File

@ -14,9 +14,9 @@
use crate::convert::FloatToInt;
#[cfg(not(test))]
use crate::intrinsics;
use crate::macros::const_assert;
use crate::mem;
use crate::num::FpCategory;
use crate::panic::const_assert;
/// The radix or base of the internal representation of `f32`.
/// Use [`f32::RADIX`] instead.

View File

@ -14,9 +14,9 @@
use crate::convert::FloatToInt;
#[cfg(not(test))]
use crate::intrinsics;
use crate::macros::const_assert;
use crate::mem;
use crate::num::FpCategory;
use crate::panic::const_assert;
/// The radix or base of the internal representation of `f64`.
/// Use [`f64::RADIX`] instead.

View File

@ -2,7 +2,7 @@
#![stable(feature = "rust1", since = "1.0.0")]
use crate::macros::const_panic;
use crate::panic::const_panic;
use crate::str::FromStr;
use crate::ub_checks::assert_unsafe_precondition;
use crate::{ascii, intrinsics, mem};

View File

@ -189,3 +189,59 @@ fn as_str(&mut self) -> Option<&str> {
None
}
}
/// Helper macro for panicking in a `const fn`.
/// Invoke as:
/// ```rust,ignore (just an example)
/// core::macros::const_panic!("boring message", "flavored message {a} {b:?}", a: u32 = foo.len(), b: Something = bar);
/// ```
/// where the first message will be printed in const-eval,
/// and the second message will be printed at runtime.
// All uses of this macro are FIXME(const-hack).
#[unstable(feature = "panic_internals", issue = "none")]
#[doc(hidden)]
pub macro const_panic {
($const_msg:literal, $runtime_msg:literal, $($arg:ident : $ty:ty = $val:expr),* $(,)?) => {{
// Wrap call to `const_eval_select` in a function so that we can
// add the `rustc_allow_const_fn_unstable`. This is okay to do
// because both variants will panic, just with different messages.
#[rustc_allow_const_fn_unstable(const_eval_select)]
#[inline(always)]
#[track_caller]
#[cfg_attr(bootstrap, rustc_const_stable(feature = "const_panic", since = "CURRENT_RUSTC_VERSION"))]
const fn do_panic($($arg: $ty),*) -> ! {
$crate::intrinsics::const_eval_select!(
@capture { $($arg: $ty),* } -> !:
if const #[track_caller] {
$crate::panic!($const_msg)
} else #[track_caller] {
$crate::panic!($runtime_msg)
}
)
}
do_panic($($val),*)
}},
// We support leaving away the `val` expressions for *all* arguments
// (but not for *some* arguments, that's too tricky).
($const_msg:literal, $runtime_msg:literal, $($arg:ident : $ty:ty),* $(,)?) => {
$crate::panic::const_panic!(
$const_msg,
$runtime_msg,
$($arg: $ty = $arg),*
)
},
}
/// A version of `assert` that prints a non-formatting message in const contexts.
///
/// See [`const_panic!`].
#[unstable(feature = "panic_internals", issue = "none")]
#[doc(hidden)]
pub macro const_assert {
($condition: expr, $const_msg:literal, $runtime_msg:literal, $($arg:tt)*) => {{
if !$crate::intrinsics::likely($condition) {
$crate::panic::const_panic!($const_msg, $runtime_msg, $($arg)*)
}
}}
}

View File

@ -29,6 +29,7 @@
)]
use crate::fmt;
use crate::intrinsics::const_eval_select;
use crate::panic::{Location, PanicInfo};
#[cfg(feature = "panic_immediate_abort")]
@ -89,40 +90,35 @@ pub const fn panic_fmt(fmt: fmt::Arguments<'_>) -> ! {
#[cfg_attr(not(bootstrap), rustc_const_stable_indirect)] // must follow stable const rules since it is exposed to stable
#[rustc_allow_const_fn_unstable(const_eval_select)]
pub const fn panic_nounwind_fmt(fmt: fmt::Arguments<'_>, force_no_backtrace: bool) -> ! {
#[inline] // this should always be inlined into `panic_nounwind_fmt`
#[track_caller]
fn runtime(fmt: fmt::Arguments<'_>, force_no_backtrace: bool) -> ! {
if cfg!(feature = "panic_immediate_abort") {
super::intrinsics::abort()
const_eval_select!(
@capture { fmt: fmt::Arguments<'_>, force_no_backtrace: bool } -> !:
if const #[track_caller] {
// We don't unwind anyway at compile-time so we can call the regular `panic_fmt`.
panic_fmt(fmt)
} else #[track_caller] {
if cfg!(feature = "panic_immediate_abort") {
super::intrinsics::abort()
}
// NOTE This function never crosses the FFI boundary; it's a Rust-to-Rust call
// that gets resolved to the `#[panic_handler]` function.
extern "Rust" {
#[lang = "panic_impl"]
fn panic_impl(pi: &PanicInfo<'_>) -> !;
}
// PanicInfo with the `can_unwind` flag set to false forces an abort.
let pi = PanicInfo::new(
&fmt,
Location::caller(),
/* can_unwind */ false,
force_no_backtrace,
);
// SAFETY: `panic_impl` is defined in safe Rust code and thus is safe to call.
unsafe { panic_impl(&pi) }
}
// NOTE This function never crosses the FFI boundary; it's a Rust-to-Rust call
// that gets resolved to the `#[panic_handler]` function.
extern "Rust" {
#[lang = "panic_impl"]
fn panic_impl(pi: &PanicInfo<'_>) -> !;
}
// PanicInfo with the `can_unwind` flag set to false forces an abort.
let pi = PanicInfo::new(
&fmt,
Location::caller(),
/* can_unwind */ false,
force_no_backtrace,
);
// SAFETY: `panic_impl` is defined in safe Rust code and thus is safe to call.
unsafe { panic_impl(&pi) }
}
#[inline]
#[track_caller]
const fn comptime(fmt: fmt::Arguments<'_>, _force_no_backtrace: bool) -> ! {
// We don't unwind anyway at compile-time so we can call the regular `panic_fmt`.
panic_fmt(fmt);
}
super::intrinsics::const_eval_select((fmt, force_no_backtrace), comptime, runtime);
)
}
// Next we define a bunch of higher-level wrappers that all bottom out in the two core functions

View File

@ -33,26 +33,23 @@ impl<T: ?Sized> *const T {
#[rustc_diagnostic_item = "ptr_const_is_null"]
#[inline]
pub const fn is_null(self) -> bool {
#[inline]
fn runtime_impl(ptr: *const u8) -> bool {
ptr.addr() == 0
}
#[inline]
#[rustc_const_unstable(feature = "const_ptr_is_null", issue = "74939")]
const fn const_impl(ptr: *const u8) -> bool {
match (ptr).guaranteed_eq(null_mut()) {
Some(res) => res,
// To remain maximally convervative, we stop execution when we don't
// know whether the pointer is null or not.
// We can *not* return `false` here, that would be unsound in `NonNull::new`!
None => panic!("null-ness of this pointer cannot be determined in const context"),
}
}
// Compare via a cast to a thin pointer, so fat pointers are only
// considering their "data" part for null-ness.
const_eval_select((self as *const u8,), const_impl, runtime_impl)
let ptr = self as *const u8;
const_eval_select!(
@capture { ptr: *const u8 } -> bool:
if const #[rustc_const_unstable(feature = "const_ptr_is_null", issue = "74939")] {
match (ptr).guaranteed_eq(null_mut()) {
Some(res) => res,
// To remain maximally convervative, we stop execution when we don't
// know whether the pointer is null or not.
// We can *not* return `false` here, that would be unsound in `NonNull::new`!
None => panic!("null-ness of this pointer cannot be determined in const context"),
}
} else {
ptr.addr() == 0
}
)
}
/// Casts to a pointer of another type.
@ -410,22 +407,21 @@ pub const fn to_raw_parts(self) -> (*const (), <T as super::Pointee>::Metadata)
#[inline]
#[rustc_allow_const_fn_unstable(const_eval_select)]
const fn runtime_offset_nowrap(this: *const (), count: isize, size: usize) -> bool {
#[inline]
fn runtime(this: *const (), count: isize, size: usize) -> bool {
// We know `size <= isize::MAX` so the `as` cast here is not lossy.
let Some(byte_offset) = count.checked_mul(size as isize) else {
return false;
};
let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
!overflow
}
const fn comptime(_: *const (), _: isize, _: usize) -> bool {
true
}
// We can use const_eval_select here because this is only for UB checks.
intrinsics::const_eval_select((this, count, size), comptime, runtime)
const_eval_select!(
@capture { this: *const (), count: isize, size: usize } -> bool:
if const {
true
} else {
// `size` is the size of a Rust type, so we know that
// `size <= isize::MAX` and thus `as` cast here is not lossy.
let Some(byte_offset) = count.checked_mul(size as isize) else {
return false;
};
let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
!overflow
}
)
}
ub_checks::assert_unsafe_precondition!(
@ -763,14 +759,14 @@ pub fn mask(self, mask: usize) -> *const T {
{
#[rustc_allow_const_fn_unstable(const_eval_select)]
const fn runtime_ptr_ge(this: *const (), origin: *const ()) -> bool {
fn runtime(this: *const (), origin: *const ()) -> bool {
this >= origin
}
const fn comptime(_: *const (), _: *const ()) -> bool {
true
}
intrinsics::const_eval_select((this, origin), comptime, runtime)
const_eval_select!(
@capture { this: *const (), origin: *const () } -> bool:
if const {
true
} else {
this >= origin
}
)
}
ub_checks::assert_unsafe_precondition!(
@ -924,20 +920,18 @@ pub const fn guaranteed_ne(self, other: *const T) -> Option<bool>
#[inline]
#[rustc_allow_const_fn_unstable(const_eval_select)]
const fn runtime_add_nowrap(this: *const (), count: usize, size: usize) -> bool {
#[inline]
fn runtime(this: *const (), count: usize, size: usize) -> bool {
let Some(byte_offset) = count.checked_mul(size) else {
return false;
};
let (_, overflow) = this.addr().overflowing_add(byte_offset);
byte_offset <= (isize::MAX as usize) && !overflow
}
const fn comptime(_: *const (), _: usize, _: usize) -> bool {
true
}
intrinsics::const_eval_select((this, count, size), comptime, runtime)
const_eval_select!(
@capture { this: *const (), count: usize, size: usize } -> bool:
if const {
true
} else {
let Some(byte_offset) = count.checked_mul(size) else {
return false;
};
let (_, overflow) = this.addr().overflowing_add(byte_offset);
byte_offset <= (isize::MAX as usize) && !overflow
}
)
}
#[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
@ -1033,19 +1027,17 @@ const fn comptime(_: *const (), _: usize, _: usize) -> bool {
#[inline]
#[rustc_allow_const_fn_unstable(const_eval_select)]
const fn runtime_sub_nowrap(this: *const (), count: usize, size: usize) -> bool {
#[inline]
fn runtime(this: *const (), count: usize, size: usize) -> bool {
let Some(byte_offset) = count.checked_mul(size) else {
return false;
};
byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
}
const fn comptime(_: *const (), _: usize, _: usize) -> bool {
true
}
intrinsics::const_eval_select((this, count, size), comptime, runtime)
const_eval_select!(
@capture { this: *const (), count: usize, size: usize } -> bool:
if const {
true
} else {
let Some(byte_offset) = count.checked_mul(size) else {
return false;
};
byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
}
)
}
#[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.

View File

@ -1,5 +1,6 @@
use super::*;
use crate::cmp::Ordering::{Equal, Greater, Less};
use crate::intrinsics::const_eval_select;
use crate::mem::SizedTypeProperties;
use crate::slice::{self, SliceIndex};
@ -404,23 +405,21 @@ pub const fn to_raw_parts(self) -> (*mut (), <T as super::Pointee>::Metadata) {
#[inline]
#[rustc_allow_const_fn_unstable(const_eval_select)]
const fn runtime_offset_nowrap(this: *const (), count: isize, size: usize) -> bool {
#[inline]
fn runtime(this: *const (), count: isize, size: usize) -> bool {
// `size` is the size of a Rust type, so we know that
// `size <= isize::MAX` and thus `as` cast here is not lossy.
let Some(byte_offset) = count.checked_mul(size as isize) else {
return false;
};
let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
!overflow
}
const fn comptime(_: *const (), _: isize, _: usize) -> bool {
true
}
// We can use const_eval_select here because this is only for UB checks.
intrinsics::const_eval_select((this, count, size), comptime, runtime)
const_eval_select!(
@capture { this: *const (), count: isize, size: usize } -> bool:
if const {
true
} else {
// `size` is the size of a Rust type, so we know that
// `size <= isize::MAX` and thus `as` cast here is not lossy.
let Some(byte_offset) = count.checked_mul(size as isize) else {
return false;
};
let (_, overflow) = this.addr().overflowing_add_signed(byte_offset);
!overflow
}
)
}
ub_checks::assert_unsafe_precondition!(
@ -1002,20 +1001,18 @@ pub const fn guaranteed_ne(self, other: *mut T) -> Option<bool>
#[inline]
#[rustc_allow_const_fn_unstable(const_eval_select)]
const fn runtime_add_nowrap(this: *const (), count: usize, size: usize) -> bool {
#[inline]
fn runtime(this: *const (), count: usize, size: usize) -> bool {
let Some(byte_offset) = count.checked_mul(size) else {
return false;
};
let (_, overflow) = this.addr().overflowing_add(byte_offset);
byte_offset <= (isize::MAX as usize) && !overflow
}
const fn comptime(_: *const (), _: usize, _: usize) -> bool {
true
}
intrinsics::const_eval_select((this, count, size), comptime, runtime)
const_eval_select!(
@capture { this: *const (), count: usize, size: usize } -> bool:
if const {
true
} else {
let Some(byte_offset) = count.checked_mul(size) else {
return false;
};
let (_, overflow) = this.addr().overflowing_add(byte_offset);
byte_offset <= (isize::MAX as usize) && !overflow
}
)
}
#[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.
@ -1111,19 +1108,17 @@ const fn comptime(_: *const (), _: usize, _: usize) -> bool {
#[inline]
#[rustc_allow_const_fn_unstable(const_eval_select)]
const fn runtime_sub_nowrap(this: *const (), count: usize, size: usize) -> bool {
#[inline]
fn runtime(this: *const (), count: usize, size: usize) -> bool {
let Some(byte_offset) = count.checked_mul(size) else {
return false;
};
byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
}
const fn comptime(_: *const (), _: usize, _: usize) -> bool {
true
}
intrinsics::const_eval_select((this, count, size), comptime, runtime)
const_eval_select!(
@capture { this: *const (), count: usize, size: usize } -> bool:
if const {
true
} else {
let Some(byte_offset) = count.checked_mul(size) else {
return false;
};
byte_offset <= (isize::MAX as usize) && this.addr() >= byte_offset
}
)
}
#[cfg(debug_assertions)] // Expensive, and doesn't catch much in the wild.

View File

@ -351,89 +351,87 @@ pub const fn is_ascii_simple(mut bytes: &[u8]) -> bool {
const fn is_ascii(s: &[u8]) -> bool {
// The runtime version behaves the same as the compiletime version, it's
// just more optimized.
return const_eval_select((s,), compiletime, runtime);
const_eval_select!(
@capture { s: &[u8] } -> bool:
if const {
is_ascii_simple(s)
} else {
const USIZE_SIZE: usize = mem::size_of::<usize>();
const fn compiletime(s: &[u8]) -> bool {
is_ascii_simple(s)
}
let len = s.len();
let align_offset = s.as_ptr().align_offset(USIZE_SIZE);
#[inline]
fn runtime(s: &[u8]) -> bool {
const USIZE_SIZE: usize = mem::size_of::<usize>();
let len = s.len();
let align_offset = s.as_ptr().align_offset(USIZE_SIZE);
// If we wouldn't gain anything from the word-at-a-time implementation, fall
// back to a scalar loop.
//
// We also do this for architectures where `size_of::<usize>()` isn't
// sufficient alignment for `usize`, because it's a weird edge case.
if len < USIZE_SIZE || len < align_offset || USIZE_SIZE < mem::align_of::<usize>() {
return is_ascii_simple(s);
}
// We always read the first word unaligned, which means `align_offset` is
// 0, we'd read the same value again for the aligned read.
let offset_to_aligned = if align_offset == 0 { USIZE_SIZE } else { align_offset };
let start = s.as_ptr();
// SAFETY: We verify `len < USIZE_SIZE` above.
let first_word = unsafe { (start as *const usize).read_unaligned() };
if contains_nonascii(first_word) {
return false;
}
// We checked this above, somewhat implicitly. Note that `offset_to_aligned`
// is either `align_offset` or `USIZE_SIZE`, both of are explicitly checked
// above.
debug_assert!(offset_to_aligned <= len);
// SAFETY: word_ptr is the (properly aligned) usize ptr we use to read the
// middle chunk of the slice.
let mut word_ptr = unsafe { start.add(offset_to_aligned) as *const usize };
// `byte_pos` is the byte index of `word_ptr`, used for loop end checks.
let mut byte_pos = offset_to_aligned;
// Paranoia check about alignment, since we're about to do a bunch of
// unaligned loads. In practice this should be impossible barring a bug in
// `align_offset` though.
// While this method is allowed to spuriously fail in CTFE, if it doesn't
// have alignment information it should have given a `usize::MAX` for
// `align_offset` earlier, sending things through the scalar path instead of
// this one, so this check should pass if it's reachable.
debug_assert!(word_ptr.is_aligned_to(mem::align_of::<usize>()));
// Read subsequent words until the last aligned word, excluding the last
// aligned word by itself to be done in tail check later, to ensure that
// tail is always one `usize` at most to extra branch `byte_pos == len`.
while byte_pos < len - USIZE_SIZE {
// Sanity check that the read is in bounds
debug_assert!(byte_pos + USIZE_SIZE <= len);
// And that our assumptions about `byte_pos` hold.
debug_assert!(word_ptr.cast::<u8>() == start.wrapping_add(byte_pos));
// SAFETY: We know `word_ptr` is properly aligned (because of
// `align_offset`), and we know that we have enough bytes between `word_ptr` and the end
let word = unsafe { word_ptr.read() };
if contains_nonascii(word) {
return false;
// If we wouldn't gain anything from the word-at-a-time implementation, fall
// back to a scalar loop.
//
// We also do this for architectures where `size_of::<usize>()` isn't
// sufficient alignment for `usize`, because it's a weird edge case.
if len < USIZE_SIZE || len < align_offset || USIZE_SIZE < mem::align_of::<usize>() {
return is_ascii_simple(s);
}
byte_pos += USIZE_SIZE;
// SAFETY: We know that `byte_pos <= len - USIZE_SIZE`, which means that
// after this `add`, `word_ptr` will be at most one-past-the-end.
word_ptr = unsafe { word_ptr.add(1) };
// We always read the first word unaligned, which means `align_offset` is
// 0, we'd read the same value again for the aligned read.
let offset_to_aligned = if align_offset == 0 { USIZE_SIZE } else { align_offset };
let start = s.as_ptr();
// SAFETY: We verify `len < USIZE_SIZE` above.
let first_word = unsafe { (start as *const usize).read_unaligned() };
if contains_nonascii(first_word) {
return false;
}
// We checked this above, somewhat implicitly. Note that `offset_to_aligned`
// is either `align_offset` or `USIZE_SIZE`, both of are explicitly checked
// above.
debug_assert!(offset_to_aligned <= len);
// SAFETY: word_ptr is the (properly aligned) usize ptr we use to read the
// middle chunk of the slice.
let mut word_ptr = unsafe { start.add(offset_to_aligned) as *const usize };
// `byte_pos` is the byte index of `word_ptr`, used for loop end checks.
let mut byte_pos = offset_to_aligned;
// Paranoia check about alignment, since we're about to do a bunch of
// unaligned loads. In practice this should be impossible barring a bug in
// `align_offset` though.
// While this method is allowed to spuriously fail in CTFE, if it doesn't
// have alignment information it should have given a `usize::MAX` for
// `align_offset` earlier, sending things through the scalar path instead of
// this one, so this check should pass if it's reachable.
debug_assert!(word_ptr.is_aligned_to(mem::align_of::<usize>()));
// Read subsequent words until the last aligned word, excluding the last
// aligned word by itself to be done in tail check later, to ensure that
// tail is always one `usize` at most to extra branch `byte_pos == len`.
while byte_pos < len - USIZE_SIZE {
// Sanity check that the read is in bounds
debug_assert!(byte_pos + USIZE_SIZE <= len);
// And that our assumptions about `byte_pos` hold.
debug_assert!(word_ptr.cast::<u8>() == start.wrapping_add(byte_pos));
// SAFETY: We know `word_ptr` is properly aligned (because of
// `align_offset`), and we know that we have enough bytes between `word_ptr` and the end
let word = unsafe { word_ptr.read() };
if contains_nonascii(word) {
return false;
}
byte_pos += USIZE_SIZE;
// SAFETY: We know that `byte_pos <= len - USIZE_SIZE`, which means that
// after this `add`, `word_ptr` will be at most one-past-the-end.
word_ptr = unsafe { word_ptr.add(1) };
}
// Sanity check to ensure there really is only one `usize` left. This should
// be guaranteed by our loop condition.
debug_assert!(byte_pos <= len && len - byte_pos <= USIZE_SIZE);
// SAFETY: This relies on `len >= USIZE_SIZE`, which we check at the start.
let last_word = unsafe { (start.add(len - USIZE_SIZE) as *const usize).read_unaligned() };
!contains_nonascii(last_word)
}
// Sanity check to ensure there really is only one `usize` left. This should
// be guaranteed by our loop condition.
debug_assert!(byte_pos <= len && len - byte_pos <= USIZE_SIZE);
// SAFETY: This relies on `len >= USIZE_SIZE`, which we check at the start.
let last_word = unsafe { (start.add(len - USIZE_SIZE) as *const usize).read_unaligned() };
!contains_nonascii(last_word)
}
)
}

View File

@ -1,6 +1,6 @@
//! Indexing implementations for `[T]`.
use crate::macros::const_panic;
use crate::panic::const_panic;
use crate::ub_checks::assert_unsafe_precondition;
use crate::{ops, range};

View File

@ -56,61 +56,59 @@ const fn memchr_naive(x: u8, text: &[u8]) -> Option<usize> {
const fn memchr_aligned(x: u8, text: &[u8]) -> Option<usize> {
// The runtime version behaves the same as the compiletime version, it's
// just more optimized.
return const_eval_select((x, text), compiletime, runtime);
const_eval_select!(
@capture { x: u8, text: &[u8] } -> Option<usize>:
if const {
memchr_naive(x, text)
} else {
// Scan for a single byte value by reading two `usize` words at a time.
//
// Split `text` in three parts
// - unaligned initial part, before the first word aligned address in text
// - body, scan by 2 words at a time
// - the last remaining part, < 2 word size
const fn compiletime(x: u8, text: &[u8]) -> Option<usize> {
memchr_naive(x, text)
}
// search up to an aligned boundary
let len = text.len();
let ptr = text.as_ptr();
let mut offset = ptr.align_offset(USIZE_BYTES);
#[inline]
fn runtime(x: u8, text: &[u8]) -> Option<usize> {
// Scan for a single byte value by reading two `usize` words at a time.
//
// Split `text` in three parts
// - unaligned initial part, before the first word aligned address in text
// - body, scan by 2 words at a time
// - the last remaining part, < 2 word size
// search up to an aligned boundary
let len = text.len();
let ptr = text.as_ptr();
let mut offset = ptr.align_offset(USIZE_BYTES);
if offset > 0 {
offset = offset.min(len);
let slice = &text[..offset];
if let Some(index) = memchr_naive(x, slice) {
return Some(index);
}
}
// search the body of the text
let repeated_x = usize::repeat_u8(x);
while offset <= len - 2 * USIZE_BYTES {
// SAFETY: the while's predicate guarantees a distance of at least 2 * usize_bytes
// between the offset and the end of the slice.
unsafe {
let u = *(ptr.add(offset) as *const usize);
let v = *(ptr.add(offset + USIZE_BYTES) as *const usize);
// break if there is a matching byte
let zu = contains_zero_byte(u ^ repeated_x);
let zv = contains_zero_byte(v ^ repeated_x);
if zu || zv {
break;
if offset > 0 {
offset = offset.min(len);
let slice = &text[..offset];
if let Some(index) = memchr_naive(x, slice) {
return Some(index);
}
}
offset += USIZE_BYTES * 2;
}
// Find the byte after the point the body loop stopped.
// FIXME(const-hack): Use `?` instead.
// FIXME(const-hack, fee1-dead): use range slicing
let slice =
// SAFETY: offset is within bounds
unsafe { super::from_raw_parts(text.as_ptr().add(offset), text.len() - offset) };
if let Some(i) = memchr_naive(x, slice) { Some(offset + i) } else { None }
}
// search the body of the text
let repeated_x = usize::repeat_u8(x);
while offset <= len - 2 * USIZE_BYTES {
// SAFETY: the while's predicate guarantees a distance of at least 2 * usize_bytes
// between the offset and the end of the slice.
unsafe {
let u = *(ptr.add(offset) as *const usize);
let v = *(ptr.add(offset + USIZE_BYTES) as *const usize);
// break if there is a matching byte
let zu = contains_zero_byte(u ^ repeated_x);
let zv = contains_zero_byte(v ^ repeated_x);
if zu || zv {
break;
}
}
offset += USIZE_BYTES * 2;
}
// Find the byte after the point the body loop stopped.
// FIXME(const-hack): Use `?` instead.
// FIXME(const-hack, fee1-dead): use range slicing
let slice =
// SAFETY: offset is within bounds
unsafe { super::from_raw_parts(text.as_ptr().add(offset), text.len() - offset) };
if let Some(i) = memchr_naive(x, slice) { Some(offset + i) } else { None }
}
)
}
/// Returns the last index matching the byte `x` in `text`.

View File

@ -132,19 +132,16 @@ pub(super) const fn run_utf8_validation(v: &[u8]) -> Result<(), Utf8Error> {
let ascii_block_size = 2 * USIZE_BYTES;
let blocks_end = if len >= ascii_block_size { len - ascii_block_size + 1 } else { 0 };
let align = {
const fn compiletime(_v: &[u8]) -> usize {
// Below, we safely fall back to a slower codepath if the offset is `usize::MAX`,
// so the end-to-end behavior is the same at compiletime and runtime.
let align = const_eval_select!(
@capture { v: &[u8] } -> usize:
if const {
usize::MAX
}
fn runtime(v: &[u8]) -> usize {
} else {
v.as_ptr().align_offset(USIZE_BYTES)
}
// Below, we safely fall back to a slower codepath if the offset is `usize::MAX`,
// so the end-to-end behavior is the same at compiletime and runtime.
const_eval_select((v,), compiletime, runtime)
};
);
while index < len {
let old_offset = index;

View File

@ -95,20 +95,18 @@ const fn precondition_check($($name:$ty),*) {
#[inline]
#[rustc_allow_const_fn_unstable(const_eval_select)]
pub(crate) const fn check_language_ub() -> bool {
#[inline]
fn runtime() -> bool {
// Disable UB checks in Miri.
!cfg!(miri)
}
#[inline]
const fn comptime() -> bool {
// Always disable UB checks.
false
}
// Only used for UB checks so we may const_eval_select.
intrinsics::ub_checks() && const_eval_select((), comptime, runtime)
intrinsics::ub_checks()
&& const_eval_select!(
@capture { } -> bool:
if const {
// Always disable UB checks.
false
} else {
// Disable UB checks in Miri.
!cfg!(miri)
}
)
}
/// Checks whether `ptr` is properly aligned with respect to the given alignment, and
@ -120,19 +118,15 @@ const fn comptime() -> bool {
#[inline]
#[rustc_const_unstable(feature = "const_ub_checks", issue = "none")]
pub(crate) const fn is_aligned_and_not_null(ptr: *const (), align: usize, is_zst: bool) -> bool {
#[inline]
fn runtime(ptr: *const (), align: usize, is_zst: bool) -> bool {
ptr.is_aligned_to(align) && (is_zst || !ptr.is_null())
}
#[inline]
#[rustc_const_unstable(feature = "const_ub_checks", issue = "none")]
const fn comptime(ptr: *const (), _align: usize, is_zst: bool) -> bool {
is_zst || !ptr.is_null()
}
// This is just for safety checks so we can const_eval_select.
const_eval_select((ptr, align, is_zst), comptime, runtime)
const_eval_select!(
@capture { ptr: *const (), align: usize, is_zst: bool } -> bool:
if const #[rustc_const_unstable(feature = "const_ub_checks", issue = "none")] {
is_zst || !ptr.is_null()
} else {
ptr.is_aligned_to(align) && (is_zst || !ptr.is_null())
}
)
}
#[inline]
@ -154,26 +148,23 @@ pub(crate) const fn is_nonoverlapping(
size: usize,
count: usize,
) -> bool {
#[inline]
fn runtime(src: *const (), dst: *const (), size: usize, count: usize) -> bool {
let src_usize = src.addr();
let dst_usize = dst.addr();
let Some(size) = size.checked_mul(count) else {
crate::panicking::panic_nounwind(
"is_nonoverlapping: `size_of::<T>() * count` overflows a usize",
)
};
let diff = src_usize.abs_diff(dst_usize);
// If the absolute distance between the ptrs is at least as big as the size of the buffer,
// they do not overlap.
diff >= size
}
#[inline]
const fn comptime(_: *const (), _: *const (), _: usize, _: usize) -> bool {
true
}
// This is just for safety checks so we can const_eval_select.
const_eval_select((src, dst, size, count), comptime, runtime)
const_eval_select!(
@capture { src: *const (), dst: *const (), size: usize, count: usize } -> bool:
if const {
true
} else {
let src_usize = src.addr();
let dst_usize = dst.addr();
let Some(size) = size.checked_mul(count) else {
crate::panicking::panic_nounwind(
"is_nonoverlapping: `size_of::<T>() * count` overflows a usize",
)
};
let diff = src_usize.abs_diff(dst_usize);
// If the absolute distance between the ptrs is at least as big as the size of the buffer,
// they do not overlap.
diff >= size
}
)
}

View File

@ -26,8 +26,6 @@ ignore = [
"/tests/ui-fulldeps/", # Some are whitespace-sensitive (e.g. `// ~ERROR` comments).
# Do not format submodules.
# FIXME: sync submodule list with tidy/bootstrap/etc
# tidy/src/walk.rs:filter_dirs
"library/backtrace",
"library/portable-simd",
"library/stdarch",
@ -41,6 +39,7 @@ ignore = [
"src/llvm-project",
"src/tools/cargo",
"src/tools/clippy",
"src/tools/enzyme",
"src/tools/miri",
"src/tools/rust-analyzer",
"src/tools/rustc-perf",

View File

@ -6,6 +6,8 @@ runners:
- &job-linux-4c
os: ubuntu-20.04
# Free some disk space to avoid running out of space during the build.
free_disk: true
<<: *base-job
# Large runner used mainly for its bigger disk capacity
@ -135,7 +137,7 @@ auto:
- image: dist-aarch64-linux
env:
CODEGEN_BACKENDS: llvm,cranelift
<<: *job-linux-4c-largedisk
<<: *job-linux-4c
- image: dist-android
<<: *job-linux-4c
@ -156,28 +158,28 @@ auto:
<<: *job-linux-4c
- image: dist-loongarch64-linux
<<: *job-linux-4c-largedisk
<<: *job-linux-4c
- image: dist-loongarch64-musl
<<: *job-linux-4c-largedisk
<<: *job-linux-4c
- image: dist-ohos
<<: *job-linux-4c
- image: dist-powerpc-linux
<<: *job-linux-4c-largedisk
<<: *job-linux-4c
- image: dist-powerpc64-linux
<<: *job-linux-4c-largedisk
<<: *job-linux-4c
- image: dist-powerpc64le-linux
<<: *job-linux-4c-largedisk
<<: *job-linux-4c
- image: dist-riscv64-linux
<<: *job-linux-4c
- image: dist-s390x-linux
<<: *job-linux-4c-largedisk
<<: *job-linux-4c
- image: dist-various-1
<<: *job-linux-4c

View File

@ -9,11 +9,12 @@ This option is only accepted when targeting AArch64 architectures.
It takes some combination of the following values, separated by a `,`.
- `pac-ret` - Enable pointer authentication for non-leaf functions.
- `pc` - Use PC as a diversifier using PAuthLR instructions
- `leaf` - Enable pointer authentication for all functions, including leaf functions.
- `b-key` - Sign return addresses with key B, instead of the default key A.
- `bti` - Enable branch target identification.
`leaf` and `b-key` are only valid if `pac-ret` was previously specified.
`leaf`, `b-key` and `pc` are only valid if `pac-ret` was previously specified.
For example, `-Z branch-protection=bti,pac-ret,leaf` is valid, but
`-Z branch-protection=bti,leaf,pac-ret` is not.

View File

@ -1,29 +1,16 @@
use rustc_hir::def_id::LocalDefId;
use rustc_hir::intravisit::FnKind;
use rustc_hir::{Body, FnDecl};
use rustc_lint::Level::Deny;
use rustc_lint::{LateContext, LateLintPass, Lint};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::declare_lint_pass;
use rustc_span::Span;
/// Ensures that Constant-time Function Evaluation is being done (specifically, MIR lint passes).
/// As Clippy deactivates codegen, this lint ensures that CTFE (used in hard errors) is still ran.
pub static CLIPPY_CTFE: &Lint = &Lint {
name: &"clippy::CLIPPY_CTFE",
default_level: Deny,
desc: "Ensure CTFE is being made",
edition_lint_opts: None,
report_in_external_macro: true,
future_incompatible: None,
is_externally_loaded: true,
crate_level_only: false,
eval_always: true,
..Lint::default_fields_for_macro()
};
// No static CLIPPY_CTFE_INFO because we want this lint to be invisible
declare_lint_pass! { ClippyCtfe => [CLIPPY_CTFE] }
declare_lint_pass! {
/// Ensures that Constant-time Function Evaluation is being done (specifically, MIR lint passes).
/// As Clippy deactivates codegen, this lint ensures that CTFE (used in hard errors) is still ran.
ClippyCtfe => []
}
impl<'tcx> LateLintPass<'tcx> for ClippyCtfe {
fn check_fn(

View File

@ -7,7 +7,6 @@
/// The default directory filter.
pub fn filter_dirs(path: &Path) -> bool {
// FIXME: sync submodule exclusion list with rustfmt.toml
// bootstrap/etc
let skip = [
"tidy-test-file",

View File

@ -1,9 +1,13 @@
// Test that PAC instructions are emitted when branch-protection is specified.
//@ revisions: PACRET PAUTHLR_NOP PAUTHLR
//@ assembly-output: emit-asm
//@ compile-flags: --target aarch64-unknown-linux-gnu
//@ compile-flags: -Z branch-protection=pac-ret,leaf
//@ needs-llvm-components: aarch64
//@ compile-flags: --target aarch64-unknown-linux-gnu
//@ [PACRET] compile-flags: -Z branch-protection=pac-ret,leaf
//@ [PAUTHLR_NOP] compile-flags: -Z branch-protection=pac-ret,pc,leaf
//@ [PAUTHLR] compile-flags: -C target-feature=+pauth-lr -Z branch-protection=pac-ret,pc,leaf
//@ min-llvm-version: 19
#![feature(no_core, lang_items)]
#![no_std]
@ -13,8 +17,13 @@
#[lang = "sized"]
trait Sized {}
// CHECK: hint #25
// CHECK: hint #29
// PACRET: hint #25
// PACRET: hint #29
// PAUTHLR_NOP: hint #25
// PAUTHLR_NOP: hint #39
// PAUTHLR_NOP: hint #29
// PAUTHLR: paciasppc
// PAUTHLR: autiasppc
#[no_mangle]
pub fn test() -> u8 {
42

View File

@ -1,11 +1,15 @@
// Test that the correct module flags are emitted with different branch protection flags.
//@ revisions: BTI PACRET LEAF BKEY NONE
//@ revisions: BTI PACRET LEAF BKEY PAUTHLR PAUTHLR_BKEY PAUTHLR_LEAF PAUTHLR_BTI NONE
//@ needs-llvm-components: aarch64
//@ [BTI] compile-flags: -Z branch-protection=bti
//@ [PACRET] compile-flags: -Z branch-protection=pac-ret
//@ [LEAF] compile-flags: -Z branch-protection=pac-ret,leaf
//@ [BKEY] compile-flags: -Z branch-protection=pac-ret,b-key
//@ [PAUTHLR] compile-flags: -Z branch-protection=pac-ret,pc
//@ [PAUTHLR_BKEY] compile-flags: -Z branch-protection=pac-ret,pc,b-key
//@ [PAUTHLR_LEAF] compile-flags: -Z branch-protection=pac-ret,pc,leaf
//@ [PAUTHLR_BTI] compile-flags: -Z branch-protection=bti,pac-ret,pc
//@ compile-flags: --target aarch64-unknown-linux-gnu
//@ min-llvm-version: 19
@ -24,6 +28,7 @@ pub fn test() {}
// BTI: attributes [[ATTR]] = {{.*}} "branch-target-enforcement"
// BTI: !"branch-target-enforcement", i32 1
// BTI: !"sign-return-address", i32 0
// BTI: !"branch-protection-pauth-lr", i32 0
// BTI: !"sign-return-address-all", i32 0
// BTI: !"sign-return-address-with-bkey", i32 0
@ -31,6 +36,7 @@ pub fn test() {}
// PACRET-SAME: "sign-return-address-key"="a_key"
// PACRET: !"branch-target-enforcement", i32 0
// PACRET: !"sign-return-address", i32 1
// PACRET: !"branch-protection-pauth-lr", i32 0
// PACRET: !"sign-return-address-all", i32 0
// PACRET: !"sign-return-address-with-bkey", i32 0
@ -38,6 +44,7 @@ pub fn test() {}
// LEAF-SAME: "sign-return-address-key"="a_key"
// LEAF: !"branch-target-enforcement", i32 0
// LEAF: !"sign-return-address", i32 1
// LEAF: !"branch-protection-pauth-lr", i32 0
// LEAF: !"sign-return-address-all", i32 1
// LEAF: !"sign-return-address-with-bkey", i32 0
@ -45,9 +52,42 @@ pub fn test() {}
// BKEY-SAME: "sign-return-address-key"="b_key"
// BKEY: !"branch-target-enforcement", i32 0
// BKEY: !"sign-return-address", i32 1
// BKEY: !"branch-protection-pauth-lr", i32 0
// BKEY: !"sign-return-address-all", i32 0
// BKEY: !"sign-return-address-with-bkey", i32 1
// PAUTHLR: attributes [[ATTR]] = {{.*}} "sign-return-address"="non-leaf"
// PAUTHLR-SAME: "sign-return-address-key"="a_key"
// PAUTHLR: !"branch-target-enforcement", i32 0
// PAUTHLR: !"sign-return-address", i32 1
// PAUTHLR: !"branch-protection-pauth-lr", i32 1
// PAUTHLR: !"sign-return-address-all", i32 0
// PAUTHLR: !"sign-return-address-with-bkey", i32 0
// PAUTHLR_BKEY: attributes [[ATTR]] = {{.*}} "sign-return-address"="non-leaf"
// PAUTHLR_BKEY-SAME: "sign-return-address-key"="b_key"
// PAUTHLR_BKEY: !"branch-target-enforcement", i32 0
// PAUTHLR_BKEY: !"sign-return-address", i32 1
// PAUTHLR_BKEY: !"branch-protection-pauth-lr", i32 1
// PAUTHLR_BKEY: !"sign-return-address-all", i32 0
// PAUTHLR_BKEY: !"sign-return-address-with-bkey", i32 1
// PAUTHLR_LEAF: attributes [[ATTR]] = {{.*}} "sign-return-address"="all"
// PAUTHLR_LEAF-SAME: "sign-return-address-key"="a_key"
// PAUTHLR_LEAF: !"branch-target-enforcement", i32 0
// PAUTHLR_LEAF: !"sign-return-address", i32 1
// PAUTHLR_LEAF: !"branch-protection-pauth-lr", i32 1
// PAUTHLR_LEAF: !"sign-return-address-all", i32 1
// PAUTHLR_LEAF: !"sign-return-address-with-bkey", i32 0
// PAUTHLR_BTI: attributes [[ATTR]] = {{.*}} "sign-return-address"="non-leaf"
// PAUTHLR_BTI-SAME: "sign-return-address-key"="a_key"
// PAUTHLR_BTI: !"branch-target-enforcement", i32 1
// PAUTHLR_BTI: !"sign-return-address", i32 1
// PAUTHLR_BTI: !"branch-protection-pauth-lr", i32 1
// PAUTHLR_BTI: !"sign-return-address-all", i32 0
// PAUTHLR_BTI: !"sign-return-address-with-bkey", i32 0
// NONE-NOT: branch-target-enforcement
// NONE-NOT: sign-return-address
// NONE-NOT: sign-return-address-all

View File

@ -1,7 +1,7 @@
// `-Z branch protection` is an unstable compiler feature which adds pointer-authentication
// code (PAC), a useful hashing measure for verifying that pointers have not been modified.
// This test checks that compilation and execution is successful when this feature is activated,
// with some of its possible extra arguments (bti, pac-ret, leaf).
// with some of its possible extra arguments (bti, pac-ret, pc, leaf, b-key).
// See https://github.com/rust-lang/rust/pull/88354
//@ only-aarch64
@ -25,4 +25,16 @@ fn main() {
llvm_ar().obj_to_ar().output_input("libtest.a", &obj_file).run();
rustc().arg("-Zbranch-protection=bti,pac-ret,leaf").input("test.rs").run();
run("test");
// FIXME: +pc was only recently added to LLVM
// cc().arg("-v")
// .arg("-c")
// .out_exe("test")
// .input("test.c")
// .arg("-mbranch-protection=bti+pac-ret+pc+leaf")
// .run();
// let obj_file = if is_msvc() { "test.obj" } else { "test" };
// llvm_ar().obj_to_ar().output_input("libtest.a", &obj_file).run();
// rustc().arg("-Zbranch-protection=bti,pac-ret,pc,leaf").input("test.rs").run();
// run("test");
}

View File

@ -3,7 +3,7 @@ error[E0080]: evaluation of constant value failed
|
= note: the evaluated program panicked at 'null-ness of this pointer cannot be determined in const context', $SRC_DIR/core/src/ptr/const_ptr.rs:LL:COL
|
note: inside `std::ptr::const_ptr::<impl *const T>::is_null::const_impl`
note: inside `std::ptr::const_ptr::<impl *const T>::is_null::compiletime`
--> $SRC_DIR/core/src/ptr/const_ptr.rs:LL:COL
note: inside `std::ptr::const_ptr::<impl *const i32>::is_null`
--> $SRC_DIR/core/src/ptr/const_ptr.rs:LL:COL
@ -12,7 +12,7 @@ note: inside `MAYBE_NULL`
|
LL | assert!(!ptr.wrapping_sub(512).is_null());
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= note: this error originates in the macro `$crate::panic::panic_2021` which comes from the expansion of the macro `panic` (in Nightly builds, run with -Z macro-backtrace for more info)
= note: this error originates in the macro `$crate::panic::panic_2021` which comes from the expansion of the macro `const_eval_select` (in Nightly builds, run with -Z macro-backtrace for more info)
error: aborting due to 1 previous error

View File

@ -0,0 +1,15 @@
#[derive(Default)]
enum E {
A {},
B {},
#[default]
C,
}
impl E {
fn f() {}
}
fn main() {
E::A::f(); //~ ERROR failed to resolve: `A` is a variant, not a module
}

View File

@ -0,0 +1,14 @@
error[E0433]: failed to resolve: `A` is a variant, not a module
--> $DIR/assoc-fn-call-on-variant.rs:14:8
|
LL | E::A::f();
| ^ `A` is a variant, not a module
|
help: there is an enum variant `E::A`; try using the variant's enum
|
LL | E::f();
| ~
error: aborting due to 1 previous error
For more information about this error, try `rustc --explain E0433`.

View File

@ -2,7 +2,16 @@ error[E0425]: cannot find value `y` in this scope
--> $DIR/error-festival.rs:14:5
|
LL | y = 2;
| ^ help: a local variable with a similar name exists: `x`
| ^
|
help: a local variable with a similar name exists
|
LL | x = 2;
| ~
help: you might have meant to introduce a new binding
|
LL | let y = 2;
| +++
error[E0603]: constant `FOO` is private
--> $DIR/error-festival.rs:22:10

View File

@ -1,2 +1,2 @@
error: incorrect value `leaf` for unstable option `branch-protection` - a `,` separated combination of `bti`, `b-key`, `pac-ret`, or `leaf` was expected
error: incorrect value `leaf` for unstable option `branch-protection` - a `,` separated combination of `bti`, `pac-ret`, followed by a combination of `pc`, `b-key`, or `leaf` was expected

View File

@ -0,0 +1,2 @@
error: incorrect value `pc` for unstable option `branch-protection` - a `,` separated combination of `bti`, `pac-ret`, followed by a combination of `pc`, `b-key`, or `leaf` was expected

View File

@ -1,7 +1,10 @@
//@ revisions: BADFLAGS BADTARGET
//@ revisions: BADFLAGS BADFLAGSPC BADTARGET
//@ [BADFLAGS] compile-flags: --target=aarch64-unknown-linux-gnu -Zbranch-protection=leaf
//@ [BADFLAGS] check-fail
//@ [BADFLAGS] needs-llvm-components: aarch64
//@ [BADFLAGSPC] compile-flags: --target=aarch64-unknown-linux-gnu -Zbranch-protection=pc
//@ [BADFLAGSPC] check-fail
//@ [BADFLAGSPC] needs-llvm-components: aarch64
//@ [BADTARGET] compile-flags: --target=x86_64-unknown-linux-gnu -Zbranch-protection=bti
//@ [BADTARGET] check-fail
//@ [BADTARGET] needs-llvm-components: x86
@ -10,5 +13,5 @@
#![feature(no_core, lang_items)]
#![no_core]
#[lang="sized"]
trait Sized { }
#[lang = "sized"]
trait Sized {}

View File

@ -6,7 +6,7 @@ LL | E::V::associated_item;
|
help: there is an enum variant `E::V`; try using the variant's enum
|
LL | E;
LL | E::associated_item;
| ~
error[E0433]: failed to resolve: `V` is a variant, not a module
@ -17,10 +17,6 @@ LL | V::associated_item;
|
help: there is an enum variant `E::V`; try using the variant's enum
|
LL | E;
| ~
help: an enum with a similar name exists
|
LL | E::associated_item;
| ~

View File

@ -0,0 +1,7 @@
fn main() {
let x1 = 0;
x2 = 1;
//~^ ERROR E0425
other_val = 2;
//~^ ERROR E0425
}

View File

@ -0,0 +1,29 @@
error[E0425]: cannot find value `x2` in this scope
--> $DIR/suggest-let-and-typo-issue-132483.rs:3:5
|
LL | x2 = 1;
| ^^
|
help: a local variable with a similar name exists
|
LL | x1 = 1;
| ~~
help: you might have meant to introduce a new binding
|
LL | let x2 = 1;
| +++
error[E0425]: cannot find value `other_val` in this scope
--> $DIR/suggest-let-and-typo-issue-132483.rs:5:5
|
LL | other_val = 2;
| ^^^^^^^^^
|
help: you might have meant to introduce a new binding
|
LL | let other_val = 2;
| +++
error: aborting due to 2 previous errors
For more information about this error, try `rustc --explain E0425`.