Auto merge of #122763 - matthiaskrgr:rollup-o8a2mye, r=matthiaskrgr
Rollup of 6 pull requests Successful merges: - #121543 (various clippy fixes) - #122540 (Do not use `?`-induced skewing of type inference in the compiler) - #122730 (Expose `ucred::peer_cred` on QNX targets to enable dist builds) - #122732 (Remove redundant coroutine captures note) - #122739 (Add "put" as a confusable for insert on hash map/set) - #122748 (Reduce `pub` usage in `rustc_session`.) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
0f706af330
@ -11,6 +11,7 @@ use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_macros::HashStable_Generic;
|
||||
use rustc_span::symbol::{kw, sym};
|
||||
#[allow(clippy::useless_attribute)] // FIXME: following use of `hidden_glob_reexports` incorrectly triggers `useless_attribute` lint.
|
||||
#[allow(hidden_glob_reexports)]
|
||||
use rustc_span::symbol::{Ident, Symbol};
|
||||
use rustc_span::{edition::Edition, ErrorGuaranteed, Span, DUMMY_SP};
|
||||
|
@ -189,7 +189,7 @@ pub fn parse_asm_args<'a>(
|
||||
args.templates.push(template);
|
||||
continue;
|
||||
} else {
|
||||
return p.unexpected();
|
||||
p.unexpected_any()?
|
||||
};
|
||||
|
||||
allow_templates = false;
|
||||
|
@ -151,7 +151,7 @@ fn parse_assert<'a>(cx: &mut ExtCtxt<'a>, sp: Span, stream: TokenStream) -> PRes
|
||||
};
|
||||
|
||||
if parser.token != token::Eof {
|
||||
return parser.unexpected();
|
||||
parser.unexpected()?;
|
||||
}
|
||||
|
||||
Ok(Assert { cond_expr, custom_message })
|
||||
|
@ -315,6 +315,7 @@ pub unsafe fn create_module<'ll>(
|
||||
//
|
||||
// On the wasm targets it will get hooked up to the "producer" sections
|
||||
// `processed-by` information.
|
||||
#[allow(clippy::option_env_unwrap)]
|
||||
let rustc_producer =
|
||||
format!("rustc version {}", option_env!("CFG_VERSION").expect("CFG_VERSION"));
|
||||
let name_metadata = llvm::LLVMMDStringInContext(
|
||||
|
@ -293,7 +293,9 @@ pub fn intern_const_alloc_for_constprop<
|
||||
return Ok(());
|
||||
}
|
||||
// Move allocation to `tcx`.
|
||||
for _ in intern_shallow(ecx, alloc_id, Mutability::Not).map_err(|()| err_ub!(DeadLocal))? {
|
||||
if let Some(_) =
|
||||
(intern_shallow(ecx, alloc_id, Mutability::Not).map_err(|()| err_ub!(DeadLocal))?).next()
|
||||
{
|
||||
// We are not doing recursive interning, so we don't currently support provenance.
|
||||
// (If this assertion ever triggers, we should just implement a
|
||||
// proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
|
||||
|
@ -12,6 +12,7 @@
|
||||
#![feature(proc_macro_internals)]
|
||||
#![feature(proc_macro_span)]
|
||||
#![feature(try_blocks)]
|
||||
#![feature(yeet_expr)]
|
||||
#![allow(rustc::diagnostic_outside_of_impl)]
|
||||
#![allow(internal_features)]
|
||||
|
||||
|
@ -62,7 +62,7 @@ pub(crate) fn parse_external_mod(
|
||||
|
||||
// Ensure file paths are acyclic.
|
||||
if let Some(pos) = module.file_path_stack.iter().position(|p| p == &mp.file_path) {
|
||||
Err(ModError::CircularInclusion(module.file_path_stack[pos..].to_vec()))?;
|
||||
do yeet ModError::CircularInclusion(module.file_path_stack[pos..].to_vec());
|
||||
}
|
||||
|
||||
// Actually parse the external file as a module.
|
||||
|
@ -990,7 +990,7 @@ impl<'a, 'tcx> FindInferSourceVisitor<'a, 'tcx> {
|
||||
let generics_def_id = tcx.res_generics_def_id(path.res)?;
|
||||
let generics = tcx.generics_of(generics_def_id);
|
||||
if generics.has_impl_trait() {
|
||||
None?;
|
||||
do yeet ();
|
||||
}
|
||||
let insert_span =
|
||||
path.segments.last().unwrap().ident.span.shrink_to_hi().with_hi(path.span.hi());
|
||||
@ -1044,7 +1044,7 @@ impl<'a, 'tcx> FindInferSourceVisitor<'a, 'tcx> {
|
||||
let generics = tcx.generics_of(def_id);
|
||||
let segment: Option<_> = try {
|
||||
if !segment.infer_args || generics.has_impl_trait() {
|
||||
None?;
|
||||
do yeet ();
|
||||
}
|
||||
let span = tcx.hir().span(segment.hir_id);
|
||||
let insert_span = segment.ident.span.shrink_to_hi().with_hi(span.hi());
|
||||
|
@ -27,6 +27,7 @@
|
||||
#![feature(iterator_try_collect)]
|
||||
#![cfg_attr(bootstrap, feature(min_specialization))]
|
||||
#![feature(try_blocks)]
|
||||
#![feature(yeet_expr)]
|
||||
#![recursion_limit = "512"] // For rustdoc
|
||||
|
||||
#[macro_use]
|
||||
|
@ -2198,7 +2198,7 @@ impl<D: Decoder> Decodable<D> for EncodedMetadata {
|
||||
let mmap = if len > 0 {
|
||||
let mut mmap = MmapMut::map_anon(len).unwrap();
|
||||
for _ in 0..len {
|
||||
(&mut mmap[..]).write(&[d.read_u8()]).unwrap();
|
||||
(&mut mmap[..]).write_all(&[d.read_u8()]).unwrap();
|
||||
}
|
||||
mmap.flush().unwrap();
|
||||
Some(mmap.make_read_only().unwrap())
|
||||
|
@ -76,20 +76,16 @@ impl<'hir> Iterator for ParentOwnerIterator<'hir> {
|
||||
if self.current_id == CRATE_HIR_ID {
|
||||
return None;
|
||||
}
|
||||
loop {
|
||||
// There are nodes that do not have entries, so we need to skip them.
|
||||
let parent_id = self.map.def_key(self.current_id.owner.def_id).parent;
|
||||
|
||||
let parent_id = parent_id.map_or(CRATE_OWNER_ID, |local_def_index| {
|
||||
let def_id = LocalDefId { local_def_index };
|
||||
self.map.tcx.local_def_id_to_hir_id(def_id).owner
|
||||
});
|
||||
self.current_id = HirId::make_owner(parent_id.def_id);
|
||||
let parent_id = self.map.def_key(self.current_id.owner.def_id).parent;
|
||||
let parent_id = parent_id.map_or(CRATE_OWNER_ID, |local_def_index| {
|
||||
let def_id = LocalDefId { local_def_index };
|
||||
self.map.tcx.local_def_id_to_hir_id(def_id).owner
|
||||
});
|
||||
self.current_id = HirId::make_owner(parent_id.def_id);
|
||||
|
||||
// If this `HirId` doesn't have an entry, skip it and look for its `parent_id`.
|
||||
let node = self.map.tcx.hir_owner_node(self.current_id.owner);
|
||||
return Some((self.current_id.owner, node));
|
||||
}
|
||||
let node = self.map.tcx.hir_owner_node(self.current_id.owner);
|
||||
return Some((self.current_id.owner, node));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -671,11 +671,11 @@ pub fn read_target_uint(endianness: Endian, mut source: &[u8]) -> Result<u128, i
|
||||
// So we do not read exactly 16 bytes into the u128, just the "payload".
|
||||
let uint = match endianness {
|
||||
Endian::Little => {
|
||||
source.read(&mut buf)?;
|
||||
source.read_exact(&mut buf[..source.len()])?;
|
||||
Ok(u128::from_le_bytes(buf))
|
||||
}
|
||||
Endian::Big => {
|
||||
source.read(&mut buf[16 - source.len()..])?;
|
||||
source.read_exact(&mut buf[16 - source.len()..])?;
|
||||
Ok(u128::from_be_bytes(buf))
|
||||
}
|
||||
};
|
||||
|
@ -229,7 +229,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
span: Span,
|
||||
scrutinee_span: Span,
|
||||
) -> BlockAnd<()> {
|
||||
let scrutinee_span = scrutinee_span;
|
||||
let scrutinee_place =
|
||||
unpack!(block = self.lower_scrutinee(block, scrutinee_id, scrutinee_span));
|
||||
|
||||
|
@ -380,12 +380,12 @@ impl<'a> Parser<'a> {
|
||||
};
|
||||
|
||||
if let Some(item) = nt_meta {
|
||||
return match item.meta(item.path.span) {
|
||||
match item.meta(item.path.span) {
|
||||
Some(meta) => {
|
||||
self.bump();
|
||||
Ok(meta)
|
||||
return Ok(meta);
|
||||
}
|
||||
None => self.unexpected(),
|
||||
None => self.unexpected()?,
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -481,7 +481,7 @@ impl<'a> Parser<'a> {
|
||||
}))
|
||||
} else {
|
||||
self.maybe_recover_bounds_doubled_colon(&ty)?;
|
||||
self.unexpected()
|
||||
self.unexpected_any()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1514,7 +1514,7 @@ impl<'a> Parser<'a> {
|
||||
let ident = this.parse_field_ident("enum", vlo)?;
|
||||
|
||||
if this.token == token::Not {
|
||||
if let Err(err) = this.unexpected::<()>() {
|
||||
if let Err(err) = this.unexpected() {
|
||||
err.with_note(fluent::parse_macro_expands_to_enum_variant).emit();
|
||||
}
|
||||
|
||||
@ -1937,7 +1937,7 @@ impl<'a> Parser<'a> {
|
||||
) -> PResult<'a, FieldDef> {
|
||||
let name = self.parse_field_ident(adt_ty, lo)?;
|
||||
if self.token.kind == token::Not {
|
||||
if let Err(mut err) = self.unexpected::<FieldDef>() {
|
||||
if let Err(mut err) = self.unexpected() {
|
||||
// Encounter the macro invocation
|
||||
err.subdiagnostic(self.dcx(), MacroExpandsToAdtField { adt_ty });
|
||||
return Err(err);
|
||||
@ -2067,7 +2067,7 @@ impl<'a> Parser<'a> {
|
||||
let params = self.parse_token_tree(); // `MacParams`
|
||||
let pspan = params.span();
|
||||
if !self.check(&token::OpenDelim(Delimiter::Brace)) {
|
||||
return self.unexpected();
|
||||
self.unexpected()?;
|
||||
}
|
||||
let body = self.parse_token_tree(); // `MacBody`
|
||||
// Convert `MacParams MacBody` into `{ MacParams => MacBody }`.
|
||||
@ -2077,7 +2077,7 @@ impl<'a> Parser<'a> {
|
||||
let dspan = DelimSpan::from_pair(pspan.shrink_to_lo(), bspan.shrink_to_hi());
|
||||
P(DelimArgs { dspan, delim: Delimiter::Brace, tokens })
|
||||
} else {
|
||||
return self.unexpected();
|
||||
self.unexpected_any()?
|
||||
};
|
||||
|
||||
self.psess.gated_spans.gate(sym::decl_macro, lo.to(self.prev_token.span));
|
||||
@ -2692,7 +2692,7 @@ impl<'a> Parser<'a> {
|
||||
debug!("parse_param_general parse_pat (is_name_required:{})", is_name_required);
|
||||
let (pat, colon) = this.parse_fn_param_pat_colon()?;
|
||||
if !colon {
|
||||
let mut err = this.unexpected::<()>().unwrap_err();
|
||||
let mut err = this.unexpected().unwrap_err();
|
||||
return if let Some(ident) =
|
||||
this.parameter_without_type(&mut err, pat, is_name_required, first_param)
|
||||
{
|
||||
@ -2716,7 +2716,7 @@ impl<'a> Parser<'a> {
|
||||
{
|
||||
// This wasn't actually a type, but a pattern looking like a type,
|
||||
// so we are going to rollback and re-parse for recovery.
|
||||
ty = this.unexpected();
|
||||
ty = this.unexpected_any();
|
||||
}
|
||||
match ty {
|
||||
Ok(ty) => {
|
||||
|
@ -465,7 +465,9 @@ impl<'a> Parser<'a> {
|
||||
matches!(self.recovery, Recovery::Allowed)
|
||||
}
|
||||
|
||||
pub fn unexpected<T>(&mut self) -> PResult<'a, T> {
|
||||
/// Version of [`unexpected`](Parser::unexpected) that "returns" any type in the `Ok`
|
||||
/// (both those functions never return "Ok", and so can lie like that in the type).
|
||||
pub fn unexpected_any<T>(&mut self) -> PResult<'a, T> {
|
||||
match self.expect_one_of(&[], &[]) {
|
||||
Err(e) => Err(e),
|
||||
// We can get `Ok(true)` from `recover_closing_delimiter`
|
||||
@ -474,6 +476,10 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unexpected(&mut self) -> PResult<'a, ()> {
|
||||
self.unexpected_any()
|
||||
}
|
||||
|
||||
/// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
|
||||
pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, Recovered> {
|
||||
if self.expected_tokens.is_empty() {
|
||||
@ -1278,7 +1284,11 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
fn parse_delim_args(&mut self) -> PResult<'a, P<DelimArgs>> {
|
||||
if let Some(args) = self.parse_delim_args_inner() { Ok(P(args)) } else { self.unexpected() }
|
||||
if let Some(args) = self.parse_delim_args_inner() {
|
||||
Ok(P(args))
|
||||
} else {
|
||||
self.unexpected_any()
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_attr_args(&mut self) -> PResult<'a, AttrArgs> {
|
||||
|
@ -660,7 +660,7 @@ impl<'a> Parser<'a> {
|
||||
// Add `>` to the list of expected tokens.
|
||||
self.check(&token::Gt);
|
||||
// Handle `,` to `;` substitution
|
||||
let mut err = self.unexpected::<()>().unwrap_err();
|
||||
let mut err = self.unexpected().unwrap_err();
|
||||
self.bump();
|
||||
err.span_suggestion_verbose(
|
||||
self.prev_token.span.until(self.token.span),
|
||||
|
@ -313,7 +313,7 @@ pub struct LocationDetail {
|
||||
}
|
||||
|
||||
impl LocationDetail {
|
||||
pub fn all() -> Self {
|
||||
pub(crate) fn all() -> Self {
|
||||
Self { file: true, line: true, column: true }
|
||||
}
|
||||
}
|
||||
@ -549,7 +549,7 @@ impl OutputTypes {
|
||||
OutputTypes(BTreeMap::from_iter(entries.iter().map(|&(k, ref v)| (k, v.clone()))))
|
||||
}
|
||||
|
||||
pub fn get(&self, key: &OutputType) -> Option<&Option<OutFileName>> {
|
||||
pub(crate) fn get(&self, key: &OutputType) -> Option<&Option<OutFileName>> {
|
||||
self.0.get(key)
|
||||
}
|
||||
|
||||
@ -662,10 +662,6 @@ impl Externs {
|
||||
pub fn iter(&self) -> BTreeMapIter<'_, String, ExternEntry> {
|
||||
self.0.iter()
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.0.len()
|
||||
}
|
||||
}
|
||||
|
||||
impl ExternEntry {
|
||||
@ -854,13 +850,13 @@ impl OutFileName {
|
||||
|
||||
#[derive(Clone, Hash, Debug, HashStable_Generic, Encodable, Decodable)]
|
||||
pub struct OutputFilenames {
|
||||
pub out_directory: PathBuf,
|
||||
pub(crate) out_directory: PathBuf,
|
||||
/// Crate name. Never contains '-'.
|
||||
crate_stem: String,
|
||||
/// Typically based on `.rs` input file name. Any '-' is preserved.
|
||||
filestem: String,
|
||||
pub single_output_file: Option<OutFileName>,
|
||||
pub temps_directory: Option<PathBuf>,
|
||||
temps_directory: Option<PathBuf>,
|
||||
pub outputs: OutputTypes,
|
||||
}
|
||||
|
||||
@ -898,7 +894,7 @@ impl OutputFilenames {
|
||||
|
||||
/// Gets the output path where a compilation artifact of the given type
|
||||
/// should be placed on disk.
|
||||
pub fn output_path(&self, flavor: OutputType) -> PathBuf {
|
||||
fn output_path(&self, flavor: OutputType) -> PathBuf {
|
||||
let extension = flavor.extension();
|
||||
match flavor {
|
||||
OutputType::Metadata => {
|
||||
@ -1092,7 +1088,7 @@ impl Options {
|
||||
|| self.unstable_opts.query_dep_graph
|
||||
}
|
||||
|
||||
pub fn file_path_mapping(&self) -> FilePathMapping {
|
||||
pub(crate) fn file_path_mapping(&self) -> FilePathMapping {
|
||||
file_path_mapping(self.remap_path_prefix.clone(), &self.unstable_opts)
|
||||
}
|
||||
|
||||
@ -1173,14 +1169,14 @@ pub enum Passes {
|
||||
}
|
||||
|
||||
impl Passes {
|
||||
pub fn is_empty(&self) -> bool {
|
||||
fn is_empty(&self) -> bool {
|
||||
match *self {
|
||||
Passes::Some(ref v) => v.is_empty(),
|
||||
Passes::All => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extend(&mut self, passes: impl IntoIterator<Item = String>) {
|
||||
pub(crate) fn extend(&mut self, passes: impl IntoIterator<Item = String>) {
|
||||
match *self {
|
||||
Passes::Some(ref mut v) => v.extend(passes),
|
||||
Passes::All => {}
|
||||
@ -1206,7 +1202,7 @@ pub struct BranchProtection {
|
||||
pub pac_ret: Option<PacRet>,
|
||||
}
|
||||
|
||||
pub const fn default_lib_output() -> CrateType {
|
||||
pub(crate) const fn default_lib_output() -> CrateType {
|
||||
CrateType::Rlib
|
||||
}
|
||||
|
||||
@ -1584,15 +1580,15 @@ pub fn build_target_config(
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
||||
pub enum OptionStability {
|
||||
enum OptionStability {
|
||||
Stable,
|
||||
Unstable,
|
||||
}
|
||||
|
||||
pub struct RustcOptGroup {
|
||||
pub apply: Box<dyn Fn(&mut getopts::Options) -> &mut getopts::Options>,
|
||||
pub name: &'static str,
|
||||
pub stability: OptionStability,
|
||||
name: &'static str,
|
||||
stability: OptionStability,
|
||||
}
|
||||
|
||||
impl RustcOptGroup {
|
||||
@ -1628,8 +1624,8 @@ mod opt {
|
||||
|
||||
use super::RustcOptGroup;
|
||||
|
||||
pub type R = RustcOptGroup;
|
||||
pub type S = &'static str;
|
||||
type R = RustcOptGroup;
|
||||
type S = &'static str;
|
||||
|
||||
fn stable<F>(name: S, f: F) -> R
|
||||
where
|
||||
@ -1649,32 +1645,34 @@ mod opt {
|
||||
if a.len() > b.len() { a } else { b }
|
||||
}
|
||||
|
||||
pub fn opt_s(a: S, b: S, c: S, d: S) -> R {
|
||||
pub(crate) fn opt_s(a: S, b: S, c: S, d: S) -> R {
|
||||
stable(longer(a, b), move |opts| opts.optopt(a, b, c, d))
|
||||
}
|
||||
pub fn multi_s(a: S, b: S, c: S, d: S) -> R {
|
||||
pub(crate) fn multi_s(a: S, b: S, c: S, d: S) -> R {
|
||||
stable(longer(a, b), move |opts| opts.optmulti(a, b, c, d))
|
||||
}
|
||||
pub fn flag_s(a: S, b: S, c: S) -> R {
|
||||
pub(crate) fn flag_s(a: S, b: S, c: S) -> R {
|
||||
stable(longer(a, b), move |opts| opts.optflag(a, b, c))
|
||||
}
|
||||
pub fn flagmulti_s(a: S, b: S, c: S) -> R {
|
||||
pub(crate) fn flagmulti_s(a: S, b: S, c: S) -> R {
|
||||
stable(longer(a, b), move |opts| opts.optflagmulti(a, b, c))
|
||||
}
|
||||
|
||||
pub fn opt(a: S, b: S, c: S, d: S) -> R {
|
||||
fn opt(a: S, b: S, c: S, d: S) -> R {
|
||||
unstable(longer(a, b), move |opts| opts.optopt(a, b, c, d))
|
||||
}
|
||||
pub fn multi(a: S, b: S, c: S, d: S) -> R {
|
||||
pub(crate) fn multi(a: S, b: S, c: S, d: S) -> R {
|
||||
unstable(longer(a, b), move |opts| opts.optmulti(a, b, c, d))
|
||||
}
|
||||
}
|
||||
|
||||
static EDITION_STRING: LazyLock<String> = LazyLock::new(|| {
|
||||
format!(
|
||||
"Specify which edition of the compiler to use when compiling code. \
|
||||
The default is {DEFAULT_EDITION} and the latest stable edition is {LATEST_STABLE_EDITION}."
|
||||
)
|
||||
});
|
||||
|
||||
/// Returns the "short" subset of the rustc command line options,
|
||||
/// including metadata for each option, such as whether the option is
|
||||
/// part of the stable long-term interface for rustc.
|
||||
@ -1864,9 +1862,9 @@ pub fn parse_color(early_dcx: &EarlyDiagCtxt, matches: &getopts::Matches) -> Col
|
||||
/// Possible json config files
|
||||
pub struct JsonConfig {
|
||||
pub json_rendered: HumanReadableErrorType,
|
||||
pub json_artifact_notifications: bool,
|
||||
json_artifact_notifications: bool,
|
||||
pub json_unused_externs: JsonUnusedExterns,
|
||||
pub json_future_incompat: bool,
|
||||
json_future_incompat: bool,
|
||||
}
|
||||
|
||||
/// Report unused externs in event stream
|
||||
@ -2992,7 +2990,7 @@ pub mod nightly_options {
|
||||
is_nightly_build(matches.opt_str("crate-name").as_deref())
|
||||
}
|
||||
|
||||
pub fn is_nightly_build(krate: Option<&str>) -> bool {
|
||||
fn is_nightly_build(krate: Option<&str>) -> bool {
|
||||
UnstableFeatures::from_environment(krate).is_nightly_build()
|
||||
}
|
||||
|
||||
@ -3199,7 +3197,7 @@ pub(crate) mod dep_tracking {
|
||||
use std::num::NonZero;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub trait DepTrackingHash {
|
||||
pub(crate) trait DepTrackingHash {
|
||||
fn hash(
|
||||
&self,
|
||||
hasher: &mut DefaultHasher,
|
||||
|
@ -12,9 +12,9 @@ use rustc_target::spec::{SplitDebuginfo, StackProtector, TargetTriple};
|
||||
|
||||
use crate::{config::CrateType, parse::ParseSess};
|
||||
|
||||
pub struct FeatureGateError {
|
||||
pub span: MultiSpan,
|
||||
pub explain: DiagMessage,
|
||||
pub(crate) struct FeatureGateError {
|
||||
pub(crate) span: MultiSpan,
|
||||
pub(crate) explain: DiagMessage,
|
||||
}
|
||||
|
||||
impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for FeatureGateError {
|
||||
@ -26,22 +26,22 @@ impl<'a, G: EmissionGuarantee> Diagnostic<'a, G> for FeatureGateError {
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
#[note(session_feature_diagnostic_for_issue)]
|
||||
pub struct FeatureDiagnosticForIssue {
|
||||
pub n: NonZero<u32>,
|
||||
pub(crate) struct FeatureDiagnosticForIssue {
|
||||
pub(crate) n: NonZero<u32>,
|
||||
}
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
#[note(session_feature_suggest_upgrade_compiler)]
|
||||
pub struct SuggestUpgradeCompiler {
|
||||
pub(crate) struct SuggestUpgradeCompiler {
|
||||
date: &'static str,
|
||||
}
|
||||
|
||||
impl SuggestUpgradeCompiler {
|
||||
pub fn ui_testing() -> Self {
|
||||
pub(crate) fn ui_testing() -> Self {
|
||||
Self { date: "YYYY-MM-DD" }
|
||||
}
|
||||
|
||||
pub fn new() -> Option<Self> {
|
||||
pub(crate) fn new() -> Option<Self> {
|
||||
let date = option_env!("CFG_VER_DATE")?;
|
||||
|
||||
Some(Self { date })
|
||||
@ -50,8 +50,8 @@ impl SuggestUpgradeCompiler {
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
#[help(session_feature_diagnostic_help)]
|
||||
pub struct FeatureDiagnosticHelp {
|
||||
pub feature: Symbol,
|
||||
pub(crate) struct FeatureDiagnosticHelp {
|
||||
pub(crate) feature: Symbol,
|
||||
}
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
@ -68,102 +68,102 @@ pub struct FeatureDiagnosticSuggestion {
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
#[help(session_cli_feature_diagnostic_help)]
|
||||
pub struct CliFeatureDiagnosticHelp {
|
||||
pub feature: Symbol,
|
||||
pub(crate) struct CliFeatureDiagnosticHelp {
|
||||
pub(crate) feature: Symbol,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_not_circumvent_feature)]
|
||||
pub struct NotCircumventFeature;
|
||||
pub(crate) struct NotCircumventFeature;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_linker_plugin_lto_windows_not_supported)]
|
||||
pub struct LinkerPluginToWindowsNotSupported;
|
||||
pub(crate) struct LinkerPluginToWindowsNotSupported;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_profile_use_file_does_not_exist)]
|
||||
pub struct ProfileUseFileDoesNotExist<'a> {
|
||||
pub path: &'a std::path::Path,
|
||||
pub(crate) struct ProfileUseFileDoesNotExist<'a> {
|
||||
pub(crate) path: &'a std::path::Path,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_profile_sample_use_file_does_not_exist)]
|
||||
pub struct ProfileSampleUseFileDoesNotExist<'a> {
|
||||
pub path: &'a std::path::Path,
|
||||
pub(crate) struct ProfileSampleUseFileDoesNotExist<'a> {
|
||||
pub(crate) path: &'a std::path::Path,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_target_requires_unwind_tables)]
|
||||
pub struct TargetRequiresUnwindTables;
|
||||
pub(crate) struct TargetRequiresUnwindTables;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_instrumentation_not_supported)]
|
||||
pub struct InstrumentationNotSupported {
|
||||
pub us: String,
|
||||
pub(crate) struct InstrumentationNotSupported {
|
||||
pub(crate) us: String,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_sanitizer_not_supported)]
|
||||
pub struct SanitizerNotSupported {
|
||||
pub us: String,
|
||||
pub(crate) struct SanitizerNotSupported {
|
||||
pub(crate) us: String,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_sanitizers_not_supported)]
|
||||
pub struct SanitizersNotSupported {
|
||||
pub us: String,
|
||||
pub(crate) struct SanitizersNotSupported {
|
||||
pub(crate) us: String,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_cannot_mix_and_match_sanitizers)]
|
||||
pub struct CannotMixAndMatchSanitizers {
|
||||
pub first: String,
|
||||
pub second: String,
|
||||
pub(crate) struct CannotMixAndMatchSanitizers {
|
||||
pub(crate) first: String,
|
||||
pub(crate) second: String,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_cannot_enable_crt_static_linux)]
|
||||
pub struct CannotEnableCrtStaticLinux;
|
||||
pub(crate) struct CannotEnableCrtStaticLinux;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_sanitizer_cfi_requires_lto)]
|
||||
pub struct SanitizerCfiRequiresLto;
|
||||
pub(crate) struct SanitizerCfiRequiresLto;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_sanitizer_cfi_requires_single_codegen_unit)]
|
||||
pub struct SanitizerCfiRequiresSingleCodegenUnit;
|
||||
pub(crate) struct SanitizerCfiRequiresSingleCodegenUnit;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_sanitizer_cfi_canonical_jump_tables_requires_cfi)]
|
||||
pub struct SanitizerCfiCanonicalJumpTablesRequiresCfi;
|
||||
pub(crate) struct SanitizerCfiCanonicalJumpTablesRequiresCfi;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_sanitizer_cfi_generalize_pointers_requires_cfi)]
|
||||
pub struct SanitizerCfiGeneralizePointersRequiresCfi;
|
||||
pub(crate) struct SanitizerCfiGeneralizePointersRequiresCfi;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_sanitizer_cfi_normalize_integers_requires_cfi)]
|
||||
pub struct SanitizerCfiNormalizeIntegersRequiresCfi;
|
||||
pub(crate) struct SanitizerCfiNormalizeIntegersRequiresCfi;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_split_lto_unit_requires_lto)]
|
||||
pub struct SplitLtoUnitRequiresLto;
|
||||
pub(crate) struct SplitLtoUnitRequiresLto;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_unstable_virtual_function_elimination)]
|
||||
pub struct UnstableVirtualFunctionElimination;
|
||||
pub(crate) struct UnstableVirtualFunctionElimination;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_unsupported_dwarf_version)]
|
||||
pub struct UnsupportedDwarfVersion {
|
||||
pub dwarf_version: u32,
|
||||
pub(crate) struct UnsupportedDwarfVersion {
|
||||
pub(crate) dwarf_version: u32,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_target_stack_protector_not_supported)]
|
||||
pub struct StackProtectorNotSupportedForTarget<'a> {
|
||||
pub stack_protector: StackProtector,
|
||||
pub target_triple: &'a TargetTriple,
|
||||
pub(crate) struct StackProtectorNotSupportedForTarget<'a> {
|
||||
pub(crate) stack_protector: StackProtector,
|
||||
pub(crate) target_triple: &'a TargetTriple,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
@ -172,58 +172,58 @@ pub(crate) struct BranchProtectionRequiresAArch64;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_split_debuginfo_unstable_platform)]
|
||||
pub struct SplitDebugInfoUnstablePlatform {
|
||||
pub debuginfo: SplitDebuginfo,
|
||||
pub(crate) struct SplitDebugInfoUnstablePlatform {
|
||||
pub(crate) debuginfo: SplitDebuginfo,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_file_is_not_writeable)]
|
||||
pub struct FileIsNotWriteable<'a> {
|
||||
pub file: &'a std::path::Path,
|
||||
pub(crate) struct FileIsNotWriteable<'a> {
|
||||
pub(crate) file: &'a std::path::Path,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_file_write_fail)]
|
||||
pub(crate) struct FileWriteFail<'a> {
|
||||
pub path: &'a std::path::Path,
|
||||
pub err: String,
|
||||
pub(crate) path: &'a std::path::Path,
|
||||
pub(crate) err: String,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_crate_name_does_not_match)]
|
||||
pub struct CrateNameDoesNotMatch {
|
||||
pub(crate) struct CrateNameDoesNotMatch {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub s: Symbol,
|
||||
pub name: Symbol,
|
||||
pub(crate) span: Span,
|
||||
pub(crate) s: Symbol,
|
||||
pub(crate) name: Symbol,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_crate_name_invalid)]
|
||||
pub struct CrateNameInvalid<'a> {
|
||||
pub s: &'a str,
|
||||
pub(crate) struct CrateNameInvalid<'a> {
|
||||
pub(crate) s: &'a str,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_crate_name_empty)]
|
||||
pub struct CrateNameEmpty {
|
||||
pub(crate) struct CrateNameEmpty {
|
||||
#[primary_span]
|
||||
pub span: Option<Span>,
|
||||
pub(crate) span: Option<Span>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_invalid_character_in_create_name)]
|
||||
pub struct InvalidCharacterInCrateName {
|
||||
pub(crate) struct InvalidCharacterInCrateName {
|
||||
#[primary_span]
|
||||
pub span: Option<Span>,
|
||||
pub character: char,
|
||||
pub crate_name: Symbol,
|
||||
pub(crate) span: Option<Span>,
|
||||
pub(crate) character: char,
|
||||
pub(crate) crate_name: Symbol,
|
||||
#[subdiagnostic]
|
||||
pub crate_name_help: Option<InvalidCrateNameHelp>,
|
||||
pub(crate) crate_name_help: Option<InvalidCrateNameHelp>,
|
||||
}
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
pub enum InvalidCrateNameHelp {
|
||||
pub(crate) enum InvalidCrateNameHelp {
|
||||
#[help(session_invalid_character_in_create_name_help)]
|
||||
AddCrateName,
|
||||
}
|
||||
@ -232,9 +232,9 @@ pub enum InvalidCrateNameHelp {
|
||||
#[multipart_suggestion(session_expr_parentheses_needed, applicability = "machine-applicable")]
|
||||
pub struct ExprParenthesesNeeded {
|
||||
#[suggestion_part(code = "(")]
|
||||
pub left: Span,
|
||||
left: Span,
|
||||
#[suggestion_part(code = ")")]
|
||||
pub right: Span,
|
||||
right: Span,
|
||||
}
|
||||
|
||||
impl ExprParenthesesNeeded {
|
||||
@ -245,13 +245,13 @@ impl ExprParenthesesNeeded {
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_skipping_const_checks)]
|
||||
pub struct SkippingConstChecks {
|
||||
pub(crate) struct SkippingConstChecks {
|
||||
#[subdiagnostic]
|
||||
pub unleashed_features: Vec<UnleashedFeatureHelp>,
|
||||
pub(crate) unleashed_features: Vec<UnleashedFeatureHelp>,
|
||||
}
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
pub enum UnleashedFeatureHelp {
|
||||
pub(crate) enum UnleashedFeatureHelp {
|
||||
#[help(session_unleashed_feature_help_named)]
|
||||
Named {
|
||||
#[primary_span]
|
||||
@ -267,101 +267,101 @@ pub enum UnleashedFeatureHelp {
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_invalid_literal_suffix)]
|
||||
pub(crate) struct InvalidLiteralSuffix<'a> {
|
||||
struct InvalidLiteralSuffix<'a> {
|
||||
#[primary_span]
|
||||
#[label]
|
||||
pub span: Span,
|
||||
span: Span,
|
||||
// FIXME(#100717)
|
||||
pub kind: &'a str,
|
||||
pub suffix: Symbol,
|
||||
kind: &'a str,
|
||||
suffix: Symbol,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_invalid_int_literal_width)]
|
||||
#[help]
|
||||
pub(crate) struct InvalidIntLiteralWidth {
|
||||
struct InvalidIntLiteralWidth {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub width: String,
|
||||
span: Span,
|
||||
width: String,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_invalid_num_literal_base_prefix)]
|
||||
#[note]
|
||||
pub(crate) struct InvalidNumLiteralBasePrefix {
|
||||
struct InvalidNumLiteralBasePrefix {
|
||||
#[primary_span]
|
||||
#[suggestion(applicability = "maybe-incorrect", code = "{fixed}")]
|
||||
pub span: Span,
|
||||
pub fixed: String,
|
||||
span: Span,
|
||||
fixed: String,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_invalid_num_literal_suffix)]
|
||||
#[help]
|
||||
pub(crate) struct InvalidNumLiteralSuffix {
|
||||
struct InvalidNumLiteralSuffix {
|
||||
#[primary_span]
|
||||
#[label]
|
||||
pub span: Span,
|
||||
pub suffix: String,
|
||||
span: Span,
|
||||
suffix: String,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_invalid_float_literal_width)]
|
||||
#[help]
|
||||
pub(crate) struct InvalidFloatLiteralWidth {
|
||||
struct InvalidFloatLiteralWidth {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub width: String,
|
||||
span: Span,
|
||||
width: String,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_invalid_float_literal_suffix)]
|
||||
#[help]
|
||||
pub(crate) struct InvalidFloatLiteralSuffix {
|
||||
struct InvalidFloatLiteralSuffix {
|
||||
#[primary_span]
|
||||
#[label]
|
||||
pub span: Span,
|
||||
pub suffix: String,
|
||||
span: Span,
|
||||
suffix: String,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_int_literal_too_large)]
|
||||
#[note]
|
||||
pub(crate) struct IntLiteralTooLarge {
|
||||
struct IntLiteralTooLarge {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub limit: String,
|
||||
span: Span,
|
||||
limit: String,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_hexadecimal_float_literal_not_supported)]
|
||||
pub(crate) struct HexadecimalFloatLiteralNotSupported {
|
||||
struct HexadecimalFloatLiteralNotSupported {
|
||||
#[primary_span]
|
||||
#[label(session_not_supported)]
|
||||
pub span: Span,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_octal_float_literal_not_supported)]
|
||||
pub(crate) struct OctalFloatLiteralNotSupported {
|
||||
struct OctalFloatLiteralNotSupported {
|
||||
#[primary_span]
|
||||
#[label(session_not_supported)]
|
||||
pub span: Span,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_binary_float_literal_not_supported)]
|
||||
pub(crate) struct BinaryFloatLiteralNotSupported {
|
||||
struct BinaryFloatLiteralNotSupported {
|
||||
#[primary_span]
|
||||
#[label(session_not_supported)]
|
||||
pub span: Span,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_unsupported_crate_type_for_target)]
|
||||
pub struct UnsupportedCrateTypeForTarget<'a> {
|
||||
pub crate_type: CrateType,
|
||||
pub target_triple: &'a TargetTriple,
|
||||
pub(crate) struct UnsupportedCrateTypeForTarget<'a> {
|
||||
pub(crate) crate_type: CrateType,
|
||||
pub(crate) target_triple: &'a TargetTriple,
|
||||
}
|
||||
|
||||
pub fn report_lit_error(
|
||||
@ -443,16 +443,16 @@ pub fn report_lit_error(
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_optimization_fuel_exhausted)]
|
||||
pub struct OptimisationFuelExhausted {
|
||||
pub msg: String,
|
||||
pub(crate) struct OptimisationFuelExhausted {
|
||||
pub(crate) msg: String,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_incompatible_linker_flavor)]
|
||||
#[note]
|
||||
pub struct IncompatibleLinkerFlavor {
|
||||
pub flavor: &'static str,
|
||||
pub compatible_list: String,
|
||||
pub(crate) struct IncompatibleLinkerFlavor {
|
||||
pub(crate) flavor: &'static str,
|
||||
pub(crate) compatible_list: String,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
@ -465,6 +465,6 @@ pub(crate) struct FunctionReturnThunkExternRequiresNonLargeCodeModel;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(session_failed_to_create_profiler)]
|
||||
pub struct FailedToCreateProfiler {
|
||||
pub err: String,
|
||||
pub(crate) struct FailedToCreateProfiler {
|
||||
pub(crate) err: String,
|
||||
}
|
||||
|
@ -3209,71 +3209,69 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
||||
}
|
||||
};
|
||||
|
||||
// Don't print the tuple of capture types
|
||||
'print: {
|
||||
if !is_upvar_tys_infer_tuple {
|
||||
let ty_str = tcx.short_ty_string(ty, &mut long_ty_file);
|
||||
let msg = format!("required because it appears within the type `{ty_str}`");
|
||||
match ty.kind() {
|
||||
ty::Adt(def, _) => match tcx.opt_item_ident(def.did()) {
|
||||
Some(ident) => err.span_note(ident.span, msg),
|
||||
None => err.note(msg),
|
||||
},
|
||||
ty::Alias(ty::Opaque, ty::AliasTy { def_id, .. }) => {
|
||||
// If the previous type is async fn, this is the future generated by the body of an async function.
|
||||
// Avoid printing it twice (it was already printed in the `ty::Coroutine` arm below).
|
||||
let is_future = tcx.ty_is_opaque_future(ty);
|
||||
debug!(
|
||||
?obligated_types,
|
||||
?is_future,
|
||||
"note_obligation_cause_code: check for async fn"
|
||||
);
|
||||
if is_future
|
||||
&& obligated_types.last().is_some_and(|ty| match ty.kind() {
|
||||
ty::Coroutine(last_def_id, ..) => {
|
||||
tcx.coroutine_is_async(*last_def_id)
|
||||
}
|
||||
_ => false,
|
||||
})
|
||||
{
|
||||
break 'print;
|
||||
}
|
||||
err.span_note(tcx.def_span(def_id), msg)
|
||||
if !is_upvar_tys_infer_tuple {
|
||||
let ty_str = tcx.short_ty_string(ty, &mut long_ty_file);
|
||||
let msg = format!("required because it appears within the type `{ty_str}`");
|
||||
match ty.kind() {
|
||||
ty::Adt(def, _) => match tcx.opt_item_ident(def.did()) {
|
||||
Some(ident) => {
|
||||
err.span_note(ident.span, msg);
|
||||
}
|
||||
ty::CoroutineWitness(def_id, args) => {
|
||||
use std::fmt::Write;
|
||||
None => {
|
||||
err.note(msg);
|
||||
}
|
||||
},
|
||||
ty::Alias(ty::Opaque, ty::AliasTy { def_id, .. }) => {
|
||||
// If the previous type is async fn, this is the future generated by the body of an async function.
|
||||
// Avoid printing it twice (it was already printed in the `ty::Coroutine` arm below).
|
||||
let is_future = tcx.ty_is_opaque_future(ty);
|
||||
debug!(
|
||||
?obligated_types,
|
||||
?is_future,
|
||||
"note_obligation_cause_code: check for async fn"
|
||||
);
|
||||
if is_future
|
||||
&& obligated_types.last().is_some_and(|ty| match ty.kind() {
|
||||
ty::Coroutine(last_def_id, ..) => {
|
||||
tcx.coroutine_is_async(*last_def_id)
|
||||
}
|
||||
_ => false,
|
||||
})
|
||||
{
|
||||
// See comment above; skip printing twice.
|
||||
} else {
|
||||
err.span_note(tcx.def_span(def_id), msg);
|
||||
}
|
||||
}
|
||||
ty::Coroutine(def_id, _) => {
|
||||
let sp = tcx.def_span(def_id);
|
||||
|
||||
// FIXME: this is kind of an unusual format for rustc, can we make it more clear?
|
||||
// Maybe we should just remove this note altogether?
|
||||
// FIXME: only print types which don't meet the trait requirement
|
||||
let mut msg =
|
||||
"required because it captures the following types: ".to_owned();
|
||||
for bty in tcx.coroutine_hidden_types(*def_id) {
|
||||
let ty = bty.instantiate(tcx, args);
|
||||
write!(msg, "`{ty}`, ").unwrap();
|
||||
}
|
||||
err.note(msg.trim_end_matches(", ").to_string())
|
||||
}
|
||||
ty::Coroutine(def_id, _) => {
|
||||
let sp = tcx.def_span(def_id);
|
||||
|
||||
// Special-case this to say "async block" instead of `[static coroutine]`.
|
||||
let kind = tcx.coroutine_kind(def_id).unwrap();
|
||||
err.span_note(
|
||||
sp,
|
||||
with_forced_trimmed_paths!(format!(
|
||||
"required because it's used within this {kind:#}",
|
||||
)),
|
||||
)
|
||||
}
|
||||
ty::Closure(def_id, _) => err.span_note(
|
||||
// Special-case this to say "async block" instead of `[static coroutine]`.
|
||||
let kind = tcx.coroutine_kind(def_id).unwrap();
|
||||
err.span_note(
|
||||
sp,
|
||||
with_forced_trimmed_paths!(format!(
|
||||
"required because it's used within this {kind:#}",
|
||||
)),
|
||||
);
|
||||
}
|
||||
ty::CoroutineWitness(..) => {
|
||||
// Skip printing coroutine-witnesses, since we'll drill into
|
||||
// the bad field in another derived obligation cause.
|
||||
}
|
||||
ty::Closure(def_id, _) | ty::CoroutineClosure(def_id, _) => {
|
||||
err.span_note(
|
||||
tcx.def_span(def_id),
|
||||
"required because it's used within this closure",
|
||||
),
|
||||
ty::Str => err.note("`str` is considered to contain a `[u8]` slice for auto trait purposes"),
|
||||
_ => err.note(msg),
|
||||
};
|
||||
}
|
||||
);
|
||||
}
|
||||
ty::Str => {
|
||||
err.note("`str` is considered to contain a `[u8]` slice for auto trait purposes");
|
||||
}
|
||||
_ => {
|
||||
err.note(msg);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
obligated_types.push(ty);
|
||||
|
@ -57,11 +57,11 @@ pub(crate) fn read_target_uint(mut bytes: &[u8]) -> Result<u128, Error> {
|
||||
let mut buf = [0u8; std::mem::size_of::<u128>()];
|
||||
match MachineInfo::target_endianess() {
|
||||
Endian::Little => {
|
||||
bytes.read(&mut buf)?;
|
||||
bytes.read_exact(&mut buf[..bytes.len()])?;
|
||||
Ok(u128::from_le_bytes(buf))
|
||||
}
|
||||
Endian::Big => {
|
||||
bytes.read(&mut buf[16 - bytes.len()..])?;
|
||||
bytes.read_exact(&mut buf[16 - bytes.len()..])?;
|
||||
Ok(u128::from_be_bytes(buf))
|
||||
}
|
||||
}
|
||||
@ -72,11 +72,11 @@ pub(crate) fn read_target_int(mut bytes: &[u8]) -> Result<i128, Error> {
|
||||
let mut buf = [0u8; std::mem::size_of::<i128>()];
|
||||
match MachineInfo::target_endianess() {
|
||||
Endian::Little => {
|
||||
bytes.read(&mut buf)?;
|
||||
bytes.read_exact(&mut buf[..bytes.len()])?;
|
||||
Ok(i128::from_le_bytes(buf))
|
||||
}
|
||||
Endian::Big => {
|
||||
bytes.read(&mut buf[16 - bytes.len()..])?;
|
||||
bytes.read_exact(&mut buf[16 - bytes.len()..])?;
|
||||
Ok(i128::from_be_bytes(buf))
|
||||
}
|
||||
}
|
||||
|
@ -1101,7 +1101,7 @@ where
|
||||
/// ```
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[rustc_confusables("push", "append")]
|
||||
#[rustc_confusables("push", "append", "put")]
|
||||
pub fn insert(&mut self, k: K, v: V) -> Option<V> {
|
||||
self.base.insert(k, v)
|
||||
}
|
||||
|
@ -885,7 +885,7 @@ where
|
||||
/// ```
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[rustc_confusables("push", "append")]
|
||||
#[rustc_confusables("push", "append", "put")]
|
||||
pub fn insert(&mut self, value: T) -> bool {
|
||||
self.base.insert(value)
|
||||
}
|
||||
|
@ -328,10 +328,9 @@ impl<R: ?Sized + Read> Read for BufReader<R> {
|
||||
self.discard_buffer();
|
||||
return self.inner.read_vectored(bufs);
|
||||
}
|
||||
let nread = {
|
||||
let mut rem = self.fill_buf()?;
|
||||
rem.read_vectored(bufs)?
|
||||
};
|
||||
let mut rem = self.fill_buf()?;
|
||||
let nread = rem.read_vectored(bufs)?;
|
||||
|
||||
self.consume(nread);
|
||||
Ok(nread)
|
||||
}
|
||||
|
@ -8,7 +8,8 @@
|
||||
target_os = "macos",
|
||||
target_os = "watchos",
|
||||
target_os = "netbsd",
|
||||
target_os = "openbsd"
|
||||
target_os = "openbsd",
|
||||
target_os = "nto"
|
||||
))]
|
||||
use super::{peer_cred, UCred};
|
||||
#[cfg(any(doc, target_os = "android", target_os = "linux"))]
|
||||
@ -234,7 +235,8 @@ impl UnixStream {
|
||||
target_os = "macos",
|
||||
target_os = "watchos",
|
||||
target_os = "netbsd",
|
||||
target_os = "openbsd"
|
||||
target_os = "openbsd",
|
||||
target_os = "nto"
|
||||
))]
|
||||
pub fn peer_cred(&self) -> io::Result<UCred> {
|
||||
peer_cred(self)
|
||||
|
@ -30,7 +30,8 @@ pub(super) use self::impl_linux::peer_cred;
|
||||
target_os = "dragonfly",
|
||||
target_os = "freebsd",
|
||||
target_os = "openbsd",
|
||||
target_os = "netbsd"
|
||||
target_os = "netbsd",
|
||||
target_os = "nto"
|
||||
))]
|
||||
pub(super) use self::impl_bsd::peer_cred;
|
||||
|
||||
|
@ -61,14 +61,16 @@ fn args(builder: &Builder<'_>) -> Vec<String> {
|
||||
}
|
||||
}
|
||||
|
||||
args.extend(strings(&["--", "--cap-lints", "warn"]));
|
||||
args.extend(strings(&["--"]));
|
||||
|
||||
if deny.is_empty() && forbid.is_empty() {
|
||||
args.extend(strings(&["--cap-lints", "warn"]));
|
||||
}
|
||||
|
||||
let all_args = std::env::args().collect::<Vec<_>>();
|
||||
args.extend(get_clippy_rules_in_order(&all_args, allow, deny, warn, forbid));
|
||||
|
||||
args.extend(ignored_lints.iter().map(|lint| format!("-Aclippy::{}", lint)));
|
||||
let mut clippy_lint_levels: Vec<String> = Vec::new();
|
||||
allow.iter().for_each(|v| clippy_lint_levels.push(format!("-A{}", v)));
|
||||
deny.iter().for_each(|v| clippy_lint_levels.push(format!("-D{}", v)));
|
||||
warn.iter().for_each(|v| clippy_lint_levels.push(format!("-W{}", v)));
|
||||
forbid.iter().for_each(|v| clippy_lint_levels.push(format!("-F{}", v)));
|
||||
args.extend(clippy_lint_levels);
|
||||
args.extend(builder.config.free_args.clone());
|
||||
args
|
||||
} else {
|
||||
@ -76,6 +78,32 @@ fn args(builder: &Builder<'_>) -> Vec<String> {
|
||||
}
|
||||
}
|
||||
|
||||
/// We need to keep the order of the given clippy lint rules before passing them.
|
||||
/// Since clap doesn't offer any useful interface for this purpose out of the box,
|
||||
/// we have to handle it manually.
|
||||
pub(crate) fn get_clippy_rules_in_order(
|
||||
all_args: &[String],
|
||||
allow_rules: &[String],
|
||||
deny_rules: &[String],
|
||||
warn_rules: &[String],
|
||||
forbid_rules: &[String],
|
||||
) -> Vec<String> {
|
||||
let mut result = vec![];
|
||||
|
||||
for (prefix, item) in
|
||||
[("-A", allow_rules), ("-D", deny_rules), ("-W", warn_rules), ("-F", forbid_rules)]
|
||||
{
|
||||
item.iter().for_each(|v| {
|
||||
let rule = format!("{prefix}{v}");
|
||||
let position = all_args.iter().position(|t| t == &rule).unwrap();
|
||||
result.push((position, rule));
|
||||
});
|
||||
}
|
||||
|
||||
result.sort_by_key(|&(position, _)| position);
|
||||
result.into_iter().map(|v| v.1).collect()
|
||||
}
|
||||
|
||||
fn cargo_subcommand(kind: Kind) -> &'static str {
|
||||
match kind {
|
||||
Kind::Check => "check",
|
||||
|
@ -1,4 +1,5 @@
|
||||
use super::{flags::Flags, ChangeIdWrapper, Config};
|
||||
use crate::core::build_steps::check::get_clippy_rules_in_order;
|
||||
use crate::core::config::{LldMode, TomlConfig};
|
||||
|
||||
use clap::CommandFactory;
|
||||
@ -11,12 +12,13 @@ use std::{
|
||||
};
|
||||
|
||||
fn parse(config: &str) -> Config {
|
||||
let config = format!("{config} \r\n build.rustc = \"/does-not-exists\" ");
|
||||
Config::parse_inner(
|
||||
&[
|
||||
"check".to_owned(),
|
||||
"--config=/does/not/exist".to_owned(),
|
||||
"--skip-stage0-validation".to_owned(),
|
||||
"check".to_string(),
|
||||
"--set=build.rustc=/does/not/exist".to_string(),
|
||||
"--set=build.cargo=/does/not/exist".to_string(),
|
||||
"--config=/does/not/exist".to_string(),
|
||||
"--skip-stage0-validation".to_string(),
|
||||
],
|
||||
|&_| toml::from_str(&config).unwrap(),
|
||||
)
|
||||
@ -169,7 +171,10 @@ fn override_toml_duplicate() {
|
||||
Config::parse_inner(
|
||||
&[
|
||||
"check".to_owned(),
|
||||
"--set=build.rustc=/does/not/exist".to_string(),
|
||||
"--set=build.cargo=/does/not/exist".to_string(),
|
||||
"--config=/does/not/exist".to_owned(),
|
||||
"--skip-stage0-validation".to_owned(),
|
||||
"--set=change-id=1".to_owned(),
|
||||
"--set=change-id=2".to_owned(),
|
||||
],
|
||||
@ -192,7 +197,15 @@ fn profile_user_dist() {
|
||||
.and_then(|table: toml::Value| TomlConfig::deserialize(table))
|
||||
.unwrap()
|
||||
}
|
||||
Config::parse_inner(&["check".to_owned()], get_toml);
|
||||
Config::parse_inner(
|
||||
&[
|
||||
"check".to_owned(),
|
||||
"--set=build.rustc=/does/not/exist".to_string(),
|
||||
"--set=build.cargo=/does/not/exist".to_string(),
|
||||
"--skip-stage0-validation".to_string(),
|
||||
],
|
||||
get_toml,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -254,3 +267,34 @@ fn parse_change_id_with_unknown_field() {
|
||||
let change_id_wrapper: ChangeIdWrapper = toml::from_str(config).unwrap();
|
||||
assert_eq!(change_id_wrapper.inner, Some(3461));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn order_of_clippy_rules() {
|
||||
let args = vec![
|
||||
"clippy".to_string(),
|
||||
"--fix".to_string(),
|
||||
"--allow-dirty".to_string(),
|
||||
"--allow-staged".to_string(),
|
||||
"-Aclippy:all".to_string(),
|
||||
"-Wclippy::style".to_string(),
|
||||
"-Aclippy::foo1".to_string(),
|
||||
"-Aclippy::foo2".to_string(),
|
||||
];
|
||||
let config = Config::parse(&args);
|
||||
|
||||
let actual = match &config.cmd {
|
||||
crate::Subcommand::Clippy { allow, deny, warn, forbid, .. } => {
|
||||
get_clippy_rules_in_order(&args, &allow, &deny, &warn, &forbid)
|
||||
}
|
||||
_ => panic!("invalid subcommand"),
|
||||
};
|
||||
|
||||
let expected = vec![
|
||||
"-Aclippy:all".to_string(),
|
||||
"-Wclippy::style".to_string(),
|
||||
"-Aclippy::foo1".to_string(),
|
||||
"-Aclippy::foo2".to_string(),
|
||||
];
|
||||
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
@ -38,7 +38,6 @@ LL | async fn bar2<T>(_: T) -> ! {
|
||||
LL | | panic!()
|
||||
LL | | }
|
||||
| |_^
|
||||
= note: required because it captures the following types: `impl Future<Output = !>`
|
||||
note: required because it's used within this `async` fn body
|
||||
--> $DIR/async-await-let-else.rs:18:32
|
||||
|
|
||||
|
@ -58,7 +58,6 @@ note: required because it appears within the type `impl Future<Output = Arc<RefC
|
||||
|
|
||||
LL | fn make_non_send_future2() -> impl Future<Output = Arc<RefCell<i32>>> {
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
= note: required because it captures the following types: `impl Future<Output = Arc<RefCell<i32>>>`, `Ready<i32>`
|
||||
note: required because it's used within this `async` block
|
||||
--> $DIR/issue-68112.rs:57:20
|
||||
|
|
||||
|
@ -25,7 +25,6 @@ LL | async fn baz<T>(_c: impl FnMut() -> T) where T: Future<Output=()> {
|
||||
| ___________________________________________________________________^
|
||||
LL | | }
|
||||
| |_^
|
||||
= note: required because it captures the following types: `impl Future<Output = ()>`
|
||||
note: required because it's used within this `async` block
|
||||
--> $DIR/issue-70935-complex-spans.rs:18:5
|
||||
|
|
||||
@ -63,7 +62,6 @@ LL | async fn baz<T>(_c: impl FnMut() -> T) where T: Future<Output=()> {
|
||||
| ___________________________________________________________________^
|
||||
LL | | }
|
||||
| |_^
|
||||
= note: required because it captures the following types: `impl Future<Output = ()>`
|
||||
note: required because it's used within this `async` block
|
||||
--> $DIR/issue-70935-complex-spans.rs:18:5
|
||||
|
|
||||
|
@ -12,7 +12,6 @@ LL | pub async fn run() {
|
||||
| ------------------ within this `impl Future<Output = ()>`
|
||||
|
|
||||
= help: within `impl Future<Output = ()>`, the trait `Send` is not implemented for `MutexGuard<'_, ()>`, which is required by `impl Future<Output = ()>: Send`
|
||||
= note: required because it captures the following types: `Arc<Mutex<()>>`, `MutexGuard<'_, ()>`, `impl Future<Output = ()>`
|
||||
note: required because it's used within this `async` fn body
|
||||
--> $DIR/auxiliary/issue_67893.rs:9:20
|
||||
|
|
||||
|
@ -8,7 +8,6 @@ fn main() {
|
||||
//~| NOTE cannot be sent
|
||||
//~| NOTE bound introduced by
|
||||
//~| NOTE appears within the type
|
||||
//~| NOTE captures the following types
|
||||
}
|
||||
|
||||
fn gimme_send<T: Send>(t: T) {
|
||||
|
@ -11,9 +11,8 @@ LL | async fn foo() {
|
||||
|
|
||||
= help: within `impl Future<Output = ()>`, the trait `Send` is not implemented for `NotSend`, which is required by `impl Future<Output = ()>: Send`
|
||||
= note: required because it appears within the type `(NotSend,)`
|
||||
= note: required because it captures the following types: `(NotSend,)`, `impl Future<Output = ()>`
|
||||
note: required because it's used within this `async` fn body
|
||||
--> $DIR/partial-drop-partial-reinit.rs:28:16
|
||||
--> $DIR/partial-drop-partial-reinit.rs:27:16
|
||||
|
|
||||
LL | async fn foo() {
|
||||
| ________________^
|
||||
@ -25,7 +24,7 @@ LL | | bar().await;
|
||||
LL | | }
|
||||
| |_^
|
||||
note: required by a bound in `gimme_send`
|
||||
--> $DIR/partial-drop-partial-reinit.rs:14:18
|
||||
--> $DIR/partial-drop-partial-reinit.rs:13:18
|
||||
|
|
||||
LL | fn gimme_send<T: Send>(t: T) {
|
||||
| ^^^^ required by this bound in `gimme_send`
|
||||
|
@ -65,7 +65,6 @@ fn test2() {
|
||||
//~^ ERROR `RefCell<i32>` cannot be shared between threads safely
|
||||
//~| NOTE `RefCell<i32>` cannot be shared between threads safely
|
||||
//~| NOTE required for
|
||||
//~| NOTE captures the following types
|
||||
//~| NOTE use `std::sync::RwLock` instead
|
||||
}
|
||||
|
||||
|
@ -44,7 +44,6 @@ note: required because it appears within the type `impl Coroutine<Return = Arc<R
|
||||
|
|
||||
LL | fn make_non_send_coroutine2() -> impl Coroutine<Return = Arc<RefCell<i32>>> {
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
= note: required because it captures the following types: `impl Coroutine<Return = Arc<RefCell<i32>>>`
|
||||
note: required because it's used within this coroutine
|
||||
--> $DIR/issue-68112.rs:60:20
|
||||
|
|
||||
|
@ -43,7 +43,6 @@ note: required because it appears within the type `Opaque(DefId(0:36 ~ coroutine
|
||||
|
|
||||
LL | fn make_non_send_coroutine2() -> impl Coroutine<Return = Arc<RefCell<i32>>> {
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
= note: required because it captures the following types: `Opaque(DefId(0:36 ~ coroutine_print_verbose_1[75fb]::make_non_send_coroutine2::{opaque#0}), [])`
|
||||
note: required because it's used within this coroutine
|
||||
--> $DIR/coroutine-print-verbose-1.rs:52:20
|
||||
|
|
||||
|
Loading…
x
Reference in New Issue
Block a user