Auto merge of #129750 - GuillaumeGomez:rollup-gphsb7y, r=GuillaumeGomez
Rollup of 7 pull requests Successful merges: - #123940 (debug-fmt-detail option) - #128166 (Improved `checked_isqrt` and `isqrt` methods) - #128970 (Add `-Zlint-llvm-ir`) - #129316 (riscv64imac: allow shadow call stack sanitizer) - #129690 (Add `needs-unwind` compiletest directive to `libtest-thread-limit` and replace some `Path` with `path` in `run-make`) - #129732 (Add `unreachable_pub`, round 3) - #129743 (Fix rustdoc clippy lints) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
0d634185df
@ -4,9 +4,10 @@
|
||||
use rustc_ast::visit::Visitor;
|
||||
use rustc_ast::*;
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
use rustc_hir as hir;
|
||||
use rustc_session::config::FmtDebug;
|
||||
use rustc_span::symbol::{kw, Ident};
|
||||
use rustc_span::{sym, Span, Symbol};
|
||||
use {rustc_ast as ast, rustc_hir as hir};
|
||||
|
||||
use super::LoweringContext;
|
||||
|
||||
@ -243,7 +244,10 @@ fn make_argument<'hir>(
|
||||
hir::LangItem::FormatArgument,
|
||||
match ty {
|
||||
Format(Display) => sym::new_display,
|
||||
Format(Debug) => sym::new_debug,
|
||||
Format(Debug) => match ctx.tcx.sess.opts.unstable_opts.fmt_debug {
|
||||
FmtDebug::Full | FmtDebug::Shallow => sym::new_debug,
|
||||
FmtDebug::None => sym::new_debug_noop,
|
||||
},
|
||||
Format(LowerExp) => sym::new_lower_exp,
|
||||
Format(UpperExp) => sym::new_upper_exp,
|
||||
Format(Octal) => sym::new_octal,
|
||||
|
@ -1,5 +1,6 @@
|
||||
use rustc_ast::{self as ast, EnumDef, MetaItem};
|
||||
use rustc_expand::base::{Annotatable, ExtCtxt};
|
||||
use rustc_session::config::FmtDebug;
|
||||
use rustc_span::symbol::{sym, Ident, Symbol};
|
||||
use rustc_span::Span;
|
||||
use thin_vec::{thin_vec, ThinVec};
|
||||
@ -49,6 +50,11 @@ fn show_substructure(cx: &ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) ->
|
||||
// We want to make sure we have the ctxt set so that we can use unstable methods
|
||||
let span = cx.with_def_site_ctxt(span);
|
||||
|
||||
let fmt_detail = cx.sess.opts.unstable_opts.fmt_debug;
|
||||
if fmt_detail == FmtDebug::None {
|
||||
return BlockOrExpr::new_expr(cx.expr_ok(span, cx.expr_tuple(span, ThinVec::new())));
|
||||
}
|
||||
|
||||
let (ident, vdata, fields) = match substr.fields {
|
||||
Struct(vdata, fields) => (substr.type_ident, *vdata, fields),
|
||||
EnumMatching(_, v, fields) => (v.ident, &v.data, fields),
|
||||
@ -61,6 +67,13 @@ fn show_substructure(cx: &ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) ->
|
||||
let name = cx.expr_str(span, ident.name);
|
||||
let fmt = substr.nonselflike_args[0].clone();
|
||||
|
||||
// Fieldless enums have been special-cased earlier
|
||||
if fmt_detail == FmtDebug::Shallow {
|
||||
let fn_path_write_str = cx.std_path(&[sym::fmt, sym::Formatter, sym::write_str]);
|
||||
let expr = cx.expr_call_global(span, fn_path_write_str, thin_vec![fmt, name]);
|
||||
return BlockOrExpr::new_expr(expr);
|
||||
}
|
||||
|
||||
// Struct and tuples are similar enough that we use the same code for both,
|
||||
// with some extra pieces for structs due to the field names.
|
||||
let (is_struct, args_per_field) = match vdata {
|
||||
|
@ -571,6 +571,7 @@ pub(crate) unsafe fn llvm_optimize(
|
||||
cgcx.opts.cg.linker_plugin_lto.enabled(),
|
||||
config.no_prepopulate_passes,
|
||||
config.verify_llvm_ir,
|
||||
config.lint_llvm_ir,
|
||||
using_thin_buffers,
|
||||
config.merge_functions,
|
||||
unroll_loops,
|
||||
|
@ -2225,6 +2225,7 @@ pub fn LLVMRustOptimize<'a>(
|
||||
IsLinkerPluginLTO: bool,
|
||||
NoPrepopulatePasses: bool,
|
||||
VerifyIR: bool,
|
||||
LintIR: bool,
|
||||
UseThinLTOBuffers: bool,
|
||||
MergeFunctions: bool,
|
||||
UnrollLoops: bool,
|
||||
|
@ -112,6 +112,7 @@ pub struct ModuleConfig {
|
||||
// Miscellaneous flags. These are mostly copied from command-line
|
||||
// options.
|
||||
pub verify_llvm_ir: bool,
|
||||
pub lint_llvm_ir: bool,
|
||||
pub no_prepopulate_passes: bool,
|
||||
pub no_builtins: bool,
|
||||
pub time_module: bool,
|
||||
@ -237,6 +238,7 @@ macro_rules! if_regular {
|
||||
bc_cmdline: sess.target.bitcode_llvm_cmdline.to_string(),
|
||||
|
||||
verify_llvm_ir: sess.verify_llvm_ir(),
|
||||
lint_llvm_ir: sess.opts.unstable_opts.lint_llvm_ir,
|
||||
no_prepopulate_passes: sess.opts.cg.no_prepopulate_passes,
|
||||
no_builtins: no_builtins || sess.target.no_builtins,
|
||||
|
||||
|
@ -37,6 +37,8 @@ macro_rules! cfg_fn {
|
||||
(sym::relocation_model, sym::cfg_relocation_model, cfg_fn!(cfg_relocation_model)),
|
||||
(sym::sanitizer_cfi_generalize_pointers, sym::cfg_sanitizer_cfi, cfg_fn!(cfg_sanitizer_cfi)),
|
||||
(sym::sanitizer_cfi_normalize_integers, sym::cfg_sanitizer_cfi, cfg_fn!(cfg_sanitizer_cfi)),
|
||||
// this is consistent with naming of the compiler flag it's for
|
||||
(sym::fmt_debug, sym::fmt_debug, cfg_fn!(fmt_debug)),
|
||||
];
|
||||
|
||||
/// Find a gated cfg determined by the `pred`icate which is given the cfg's name.
|
||||
|
@ -471,6 +471,8 @@ pub fn internal(&self, feature: Symbol) -> bool {
|
||||
(unstable, ffi_const, "1.45.0", Some(58328)),
|
||||
/// Allows the use of `#[ffi_pure]` on foreign functions.
|
||||
(unstable, ffi_pure, "1.45.0", Some(58329)),
|
||||
/// Controlling the behavior of fmt::Debug
|
||||
(unstable, fmt_debug, "CURRENT_RUSTC_VERSION", Some(129709)),
|
||||
/// Allows using `#[repr(align(...))]` on function items
|
||||
(unstable, fn_align, "1.53.0", Some(82232)),
|
||||
/// Support delegating implementation of functions to other already implemented functions.
|
||||
|
@ -10,11 +10,11 @@
|
||||
use rustc_session::config::{
|
||||
build_configuration, build_session_options, rustc_optgroups, BranchProtection, CFGuard, Cfg,
|
||||
CollapseMacroDebuginfo, CoverageLevel, CoverageOptions, DebugInfo, DumpMonoStatsFormat,
|
||||
ErrorOutputType, ExternEntry, ExternLocation, Externs, FunctionReturn, InliningThreshold,
|
||||
Input, InstrumentCoverage, InstrumentXRay, LinkSelfContained, LinkerPluginLto, LocationDetail,
|
||||
LtoCli, NextSolverConfig, OomStrategy, Options, OutFileName, OutputType, OutputTypes, PAuthKey,
|
||||
PacRet, Passes, PatchableFunctionEntry, Polonius, ProcMacroExecutionStrategy, Strip,
|
||||
SwitchWithOptPath, SymbolManglingVersion, WasiExecModel,
|
||||
ErrorOutputType, ExternEntry, ExternLocation, Externs, FmtDebug, FunctionReturn,
|
||||
InliningThreshold, Input, InstrumentCoverage, InstrumentXRay, LinkSelfContained,
|
||||
LinkerPluginLto, LocationDetail, LtoCli, NextSolverConfig, OomStrategy, Options, OutFileName,
|
||||
OutputType, OutputTypes, PAuthKey, PacRet, Passes, PatchableFunctionEntry, Polonius,
|
||||
ProcMacroExecutionStrategy, Strip, SwitchWithOptPath, SymbolManglingVersion, WasiExecModel,
|
||||
};
|
||||
use rustc_session::lint::Level;
|
||||
use rustc_session::search_paths::SearchPath;
|
||||
@ -780,6 +780,7 @@ macro_rules! tracked {
|
||||
tracked!(fewer_names, Some(true));
|
||||
tracked!(fixed_x18, true);
|
||||
tracked!(flatten_format_args, false);
|
||||
tracked!(fmt_debug, FmtDebug::Shallow);
|
||||
tracked!(force_unstable_if_unmarked, true);
|
||||
tracked!(fuel, Some(("abc".to_string(), 99)));
|
||||
tracked!(function_return, FunctionReturn::ThunkExtern);
|
||||
@ -794,6 +795,7 @@ macro_rules! tracked {
|
||||
tracked!(instrument_xray, Some(InstrumentXRay::default()));
|
||||
tracked!(link_directives, false);
|
||||
tracked!(link_only, true);
|
||||
tracked!(lint_llvm_ir, true);
|
||||
tracked!(llvm_module_flag, vec![("bar".to_string(), 123, "max".to_string())]);
|
||||
tracked!(llvm_plugins, vec![String::from("plugin_name")]);
|
||||
tracked!(location_detail, LocationDetail { file: true, line: false, column: false });
|
||||
|
@ -713,7 +713,7 @@ extern "C" LLVMRustResult LLVMRustOptimize(
|
||||
LLVMModuleRef ModuleRef, LLVMTargetMachineRef TMRef,
|
||||
LLVMRustPassBuilderOptLevel OptLevelRust, LLVMRustOptStage OptStage,
|
||||
bool IsLinkerPluginLTO, bool NoPrepopulatePasses, bool VerifyIR,
|
||||
bool UseThinLTOBuffers, bool MergeFunctions, bool UnrollLoops,
|
||||
bool LintIR, bool UseThinLTOBuffers, bool MergeFunctions, bool UnrollLoops,
|
||||
bool SLPVectorize, bool LoopVectorize, bool DisableSimplifyLibCalls,
|
||||
bool EmitLifetimeMarkers, LLVMRustSanitizerOptions *SanitizerOptions,
|
||||
const char *PGOGenPath, const char *PGOUsePath, bool InstrumentCoverage,
|
||||
@ -842,6 +842,13 @@ extern "C" LLVMRustResult LLVMRustOptimize(
|
||||
});
|
||||
}
|
||||
|
||||
if (LintIR) {
|
||||
PipelineStartEPCallbacks.push_back(
|
||||
[](ModulePassManager &MPM, OptimizationLevel Level) {
|
||||
MPM.addPass(createModuleToFunctionPassAdaptor(LintPass()));
|
||||
});
|
||||
}
|
||||
|
||||
if (InstrumentGCOV) {
|
||||
PipelineStartEPCallbacks.push_back(
|
||||
[](ModulePassManager &MPM, OptimizationLevel Level) {
|
||||
|
@ -55,7 +55,7 @@
|
||||
///
|
||||
/// See rustc dev guide for more examples on using the `#[derive(Diagnostic)]`:
|
||||
/// <https://rustc-dev-guide.rust-lang.org/diagnostics/diagnostic-structs.html>
|
||||
pub fn diagnostic_derive(mut s: Structure<'_>) -> TokenStream {
|
||||
pub(super) fn diagnostic_derive(mut s: Structure<'_>) -> TokenStream {
|
||||
s.underscore_const(true);
|
||||
DiagnosticDerive::new(s).into_tokens()
|
||||
}
|
||||
@ -102,7 +102,7 @@ pub fn diagnostic_derive(mut s: Structure<'_>) -> TokenStream {
|
||||
///
|
||||
/// See rustc dev guide for more examples on using the `#[derive(LintDiagnostic)]`:
|
||||
/// <https://rustc-dev-guide.rust-lang.org/diagnostics/diagnostic-structs.html#reference>
|
||||
pub fn lint_diagnostic_derive(mut s: Structure<'_>) -> TokenStream {
|
||||
pub(super) fn lint_diagnostic_derive(mut s: Structure<'_>) -> TokenStream {
|
||||
s.underscore_const(true);
|
||||
LintDiagnosticDerive::new(s).into_tokens()
|
||||
}
|
||||
@ -153,7 +153,7 @@ pub fn lint_diagnostic_derive(mut s: Structure<'_>) -> TokenStream {
|
||||
///
|
||||
/// diag.subdiagnostic(RawIdentifierSuggestion { span, applicability, ident });
|
||||
/// ```
|
||||
pub fn subdiagnostic_derive(mut s: Structure<'_>) -> TokenStream {
|
||||
pub(super) fn subdiagnostic_derive(mut s: Structure<'_>) -> TokenStream {
|
||||
s.underscore_const(true);
|
||||
SubdiagnosticDerive::new().into_tokens(s)
|
||||
}
|
||||
|
@ -6,6 +6,7 @@
|
||||
#![feature(proc_macro_diagnostic)]
|
||||
#![feature(proc_macro_span)]
|
||||
#![feature(proc_macro_tracked_env)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use proc_macro::TokenStream;
|
||||
|
@ -1,7 +1,7 @@
|
||||
use quote::quote;
|
||||
use syn::parse_quote;
|
||||
|
||||
pub fn lift_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
|
||||
pub(super) fn lift_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
|
||||
s.add_bounds(synstructure::AddBounds::Generics);
|
||||
s.bind_with(|_| synstructure::BindStyle::Move);
|
||||
s.underscore_const(true);
|
||||
|
@ -307,7 +307,7 @@ pub fn #name<'tcx>(tcx: TyCtxt<'tcx>, key: crate::query::queries::#name::Key<'tc
|
||||
});
|
||||
}
|
||||
|
||||
pub fn rustc_queries(input: TokenStream) -> TokenStream {
|
||||
pub(super) fn rustc_queries(input: TokenStream) -> TokenStream {
|
||||
let queries = parse_macro_input!(input as List<Query>);
|
||||
|
||||
let mut query_stream = quote! {};
|
||||
|
@ -3,7 +3,9 @@
|
||||
use syn::parse_quote;
|
||||
use syn::spanned::Spanned;
|
||||
|
||||
pub fn type_decodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
|
||||
pub(super) fn type_decodable_derive(
|
||||
mut s: synstructure::Structure<'_>,
|
||||
) -> proc_macro2::TokenStream {
|
||||
let decoder_ty = quote! { __D };
|
||||
let bound = if s.ast().generics.lifetimes().any(|lt| lt.lifetime.ident == "tcx") {
|
||||
quote! { <I = ::rustc_middle::ty::TyCtxt<'tcx>> }
|
||||
@ -20,7 +22,9 @@ pub fn type_decodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2:
|
||||
decodable_body(s, decoder_ty)
|
||||
}
|
||||
|
||||
pub fn meta_decodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
|
||||
pub(super) fn meta_decodable_derive(
|
||||
mut s: synstructure::Structure<'_>,
|
||||
) -> proc_macro2::TokenStream {
|
||||
if !s.ast().generics.lifetimes().any(|lt| lt.lifetime.ident == "tcx") {
|
||||
s.add_impl_generic(parse_quote! { 'tcx });
|
||||
}
|
||||
@ -32,7 +36,7 @@ pub fn meta_decodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2:
|
||||
decodable_body(s, decoder_ty)
|
||||
}
|
||||
|
||||
pub fn decodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
|
||||
pub(super) fn decodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
|
||||
let decoder_ty = quote! { __D };
|
||||
s.add_impl_generic(parse_quote! { #decoder_ty: ::rustc_span::SpanDecoder });
|
||||
s.add_bounds(synstructure::AddBounds::Generics);
|
||||
@ -41,7 +45,9 @@ pub fn decodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::Toke
|
||||
decodable_body(s, decoder_ty)
|
||||
}
|
||||
|
||||
pub fn decodable_generic_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
|
||||
pub(super) fn decodable_generic_derive(
|
||||
mut s: synstructure::Structure<'_>,
|
||||
) -> proc_macro2::TokenStream {
|
||||
let decoder_ty = quote! { __D };
|
||||
s.add_impl_generic(parse_quote! { #decoder_ty: ::rustc_serialize::Decoder });
|
||||
s.add_bounds(synstructure::AddBounds::Generics);
|
||||
@ -123,7 +129,9 @@ fn decode_field(field: &syn::Field) -> proc_macro2::TokenStream {
|
||||
quote_spanned! { field_span=> #decode_inner_method(#__decoder) }
|
||||
}
|
||||
|
||||
pub fn type_encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
|
||||
pub(super) fn type_encodable_derive(
|
||||
mut s: synstructure::Structure<'_>,
|
||||
) -> proc_macro2::TokenStream {
|
||||
let bound = if s.ast().generics.lifetimes().any(|lt| lt.lifetime.ident == "tcx") {
|
||||
quote! { <I = ::rustc_middle::ty::TyCtxt<'tcx>> }
|
||||
} else if s.ast().generics.type_params().any(|ty| ty.ident == "I") {
|
||||
@ -140,7 +148,9 @@ pub fn type_encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2:
|
||||
encodable_body(s, encoder_ty, false)
|
||||
}
|
||||
|
||||
pub fn meta_encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
|
||||
pub(super) fn meta_encodable_derive(
|
||||
mut s: synstructure::Structure<'_>,
|
||||
) -> proc_macro2::TokenStream {
|
||||
if !s.ast().generics.lifetimes().any(|lt| lt.lifetime.ident == "tcx") {
|
||||
s.add_impl_generic(parse_quote! { 'tcx });
|
||||
}
|
||||
@ -152,7 +162,7 @@ pub fn meta_encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2:
|
||||
encodable_body(s, encoder_ty, true)
|
||||
}
|
||||
|
||||
pub fn encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
|
||||
pub(super) fn encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
|
||||
let encoder_ty = quote! { __E };
|
||||
s.add_impl_generic(parse_quote! { #encoder_ty: ::rustc_span::SpanEncoder });
|
||||
s.add_bounds(synstructure::AddBounds::Generics);
|
||||
@ -161,7 +171,9 @@ pub fn encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::Toke
|
||||
encodable_body(s, encoder_ty, false)
|
||||
}
|
||||
|
||||
pub fn encodable_generic_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
|
||||
pub(super) fn encodable_generic_derive(
|
||||
mut s: synstructure::Structure<'_>,
|
||||
) -> proc_macro2::TokenStream {
|
||||
let encoder_ty = quote! { __E };
|
||||
s.add_impl_generic(parse_quote! { #encoder_ty: ::rustc_serialize::Encoder });
|
||||
s.add_bounds(synstructure::AddBounds::Generics);
|
||||
|
@ -131,7 +131,7 @@ fn error(&mut self, span: Span, message: String) {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn symbols(input: TokenStream) -> TokenStream {
|
||||
pub(super) fn symbols(input: TokenStream) -> TokenStream {
|
||||
let (mut output, errors) = symbols_with_errors(input);
|
||||
|
||||
// If we generated any errors, then report them as compiler_error!() macro calls.
|
||||
|
@ -1,7 +1,7 @@
|
||||
use quote::{quote, ToTokens};
|
||||
use syn::parse_quote;
|
||||
|
||||
pub fn type_foldable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
|
||||
pub(super) fn type_foldable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
|
||||
if let syn::Data::Union(_) = s.ast().data {
|
||||
panic!("cannot derive on union")
|
||||
}
|
||||
|
@ -1,7 +1,9 @@
|
||||
use quote::quote;
|
||||
use syn::parse_quote;
|
||||
|
||||
pub fn type_visitable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream {
|
||||
pub(super) fn type_visitable_derive(
|
||||
mut s: synstructure::Structure<'_>,
|
||||
) -> proc_macro2::TokenStream {
|
||||
if let syn::Data::Union(_) = s.ast().data {
|
||||
panic!("cannot derive on union")
|
||||
}
|
||||
|
@ -16,6 +16,7 @@
|
||||
#![feature(proc_macro_internals)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![feature(trusted_len)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
extern crate proc_macro;
|
||||
|
@ -56,13 +56,13 @@ fn deref(&self) -> &[u8] {
|
||||
|
||||
impl MetadataBlob {
|
||||
/// Runs the [`MemDecoder`] validation and if it passes, constructs a new [`MetadataBlob`].
|
||||
pub fn new(slice: OwnedSlice) -> Result<Self, ()> {
|
||||
pub(crate) fn new(slice: OwnedSlice) -> Result<Self, ()> {
|
||||
if MemDecoder::new(&slice, 0).is_ok() { Ok(Self(slice)) } else { Err(()) }
|
||||
}
|
||||
|
||||
/// Since this has passed the validation of [`MetadataBlob::new`], this returns bytes which are
|
||||
/// known to pass the [`MemDecoder`] validation.
|
||||
pub fn bytes(&self) -> &OwnedSlice {
|
||||
pub(crate) fn bytes(&self) -> &OwnedSlice {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
@ -332,12 +332,12 @@ fn tcx(&self) -> TyCtxt<'tcx> {
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn blob(&self) -> &'a MetadataBlob {
|
||||
pub(crate) fn blob(&self) -> &'a MetadataBlob {
|
||||
self.blob
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn cdata(&self) -> CrateMetadataRef<'a> {
|
||||
fn cdata(&self) -> CrateMetadataRef<'a> {
|
||||
debug_assert!(self.cdata.is_some(), "missing CrateMetadata in DecodeContext");
|
||||
self.cdata.unwrap()
|
||||
}
|
||||
@ -377,7 +377,7 @@ fn read_lazy_table<I, T>(&mut self, width: usize, len: usize) -> LazyTable<I, T>
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn read_raw_bytes(&mut self, len: usize) -> &[u8] {
|
||||
fn read_raw_bytes(&mut self, len: usize) -> &[u8] {
|
||||
self.opaque.read_raw_bytes(len)
|
||||
}
|
||||
}
|
||||
|
@ -17,7 +17,7 @@ pub(crate) enum DefPathHashMapRef<'tcx> {
|
||||
|
||||
impl DefPathHashMapRef<'_> {
|
||||
#[inline]
|
||||
pub fn def_path_hash_to_def_index(&self, def_path_hash: &DefPathHash) -> DefIndex {
|
||||
pub(crate) fn def_path_hash_to_def_index(&self, def_path_hash: &DefPathHash) -> DefIndex {
|
||||
match *self {
|
||||
DefPathHashMapRef::OwnedFromMetadata(ref map) => {
|
||||
map.get(&def_path_hash.local_hash()).unwrap()
|
||||
|
@ -2309,7 +2309,7 @@ fn encode_root_position(mut file: &File, pos: usize) -> Result<(), std::io::Erro
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn provide(providers: &mut Providers) {
|
||||
pub(crate) fn provide(providers: &mut Providers) {
|
||||
*providers = Providers {
|
||||
doc_link_resolutions: |tcx, def_id| {
|
||||
tcx.resolutions(())
|
||||
|
@ -61,8 +61,9 @@
|
||||
use rustc_hir::definitions::DefPathHash;
|
||||
use rustc_hir::{HirId, ItemLocalId, OwnerId};
|
||||
pub use rustc_query_system::dep_graph::dep_node::DepKind;
|
||||
pub use rustc_query_system::dep_graph::DepNode;
|
||||
use rustc_query_system::dep_graph::FingerprintStyle;
|
||||
pub use rustc_query_system::dep_graph::{DepContext, DepNode, DepNodeParams};
|
||||
pub(crate) use rustc_query_system::dep_graph::{DepContext, DepNodeParams};
|
||||
use rustc_span::symbol::Symbol;
|
||||
|
||||
use crate::mir::mono::MonoItem;
|
||||
@ -101,7 +102,7 @@ pub mod dep_kinds {
|
||||
|
||||
// This checks that the discriminants of the variants have been assigned consecutively
|
||||
// from 0 so that they can be used as a dense index.
|
||||
pub const DEP_KIND_VARIANTS: u16 = {
|
||||
pub(crate) const DEP_KIND_VARIANTS: u16 = {
|
||||
let deps = &[$(dep_kinds::$variant,)*];
|
||||
let mut i = 0;
|
||||
while i < deps.len() {
|
||||
|
@ -62,6 +62,7 @@
|
||||
#![feature(try_blocks)]
|
||||
#![feature(type_alias_impl_trait)]
|
||||
#![feature(yeet_expr)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -18,9 +18,9 @@ pub struct BasicBlocks<'tcx> {
|
||||
}
|
||||
|
||||
// Typically 95%+ of basic blocks have 4 or fewer predecessors.
|
||||
pub type Predecessors = IndexVec<BasicBlock, SmallVec<[BasicBlock; 4]>>;
|
||||
type Predecessors = IndexVec<BasicBlock, SmallVec<[BasicBlock; 4]>>;
|
||||
|
||||
pub type SwitchSources = FxHashMap<(BasicBlock, BasicBlock), SmallVec<[Option<u128>; 1]>>;
|
||||
type SwitchSources = FxHashMap<(BasicBlock, BasicBlock), SmallVec<[Option<u128>; 1]>>;
|
||||
|
||||
#[derive(Clone, Default, Debug)]
|
||||
struct Cache {
|
||||
|
@ -2,7 +2,7 @@
|
||||
use rustc_middle::mir::*;
|
||||
|
||||
/// Convert an MIR function into a gsgdt Graph
|
||||
pub fn mir_fn_to_generic_graph<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'_>) -> Graph {
|
||||
pub(crate) fn mir_fn_to_generic_graph<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'_>) -> Graph {
|
||||
let def_id = body.source.def_id();
|
||||
let def_name = graphviz_safe_def_name(def_id);
|
||||
let graph_name = format!("Mir_{def_name}");
|
||||
|
@ -243,7 +243,7 @@ fn hash<H: hash::Hasher>(&self, state: &mut H) {
|
||||
}
|
||||
|
||||
impl InitMaskMaterialized {
|
||||
pub const BLOCK_SIZE: u64 = 64;
|
||||
const BLOCK_SIZE: u64 = 64;
|
||||
|
||||
fn new(size: Size, state: bool) -> Self {
|
||||
let mut m = InitMaskMaterialized { blocks: vec![] };
|
||||
|
@ -396,7 +396,7 @@ pub fn items_in_deterministic_order(
|
||||
// The codegen tests rely on items being process in the same order as
|
||||
// they appear in the file, so for local items, we sort by node_id first
|
||||
#[derive(PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct ItemSortKey<'tcx>(Option<usize>, SymbolName<'tcx>);
|
||||
struct ItemSortKey<'tcx>(Option<usize>, SymbolName<'tcx>);
|
||||
|
||||
fn item_sort_key<'tcx>(tcx: TyCtxt<'tcx>, item: MonoItem<'tcx>) -> ItemSortKey<'tcx> {
|
||||
ItemSortKey(
|
||||
|
@ -2190,7 +2190,7 @@ struct DebugStat {
|
||||
all_infer: usize,
|
||||
}
|
||||
|
||||
pub fn go(fmt: &mut std::fmt::Formatter<'_>, tcx: TyCtxt<'_>) -> std::fmt::Result {
|
||||
pub(crate) fn go(fmt: &mut std::fmt::Formatter<'_>, tcx: TyCtxt<'_>) -> std::fmt::Result {
|
||||
let mut total = DebugStat {
|
||||
total: 0,
|
||||
lt_infer: 0,
|
||||
|
@ -1027,7 +1027,7 @@ fn emit_requires_unsafe_err(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn check_unsafety(tcx: TyCtxt<'_>, def: LocalDefId) {
|
||||
pub(crate) fn check_unsafety(tcx: TyCtxt<'_>, def: LocalDefId) {
|
||||
// Closures and inline consts are handled by their owner, if it has a body
|
||||
// Also, don't safety check custom MIR
|
||||
if tcx.is_typeck_child(def.to_def_id()) || tcx.has_attr(def, sym::custom_mir) {
|
||||
|
@ -8,6 +8,7 @@
|
||||
#![feature(if_let_guard)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(try_blocks)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
mod build;
|
||||
|
@ -24,7 +24,7 @@
|
||||
};
|
||||
use crate::framework::BitSetExt;
|
||||
|
||||
pub type EntrySets<'tcx, A> = IndexVec<BasicBlock, <A as AnalysisDomain<'tcx>>::Domain>;
|
||||
type EntrySets<'tcx, A> = IndexVec<BasicBlock, <A as AnalysisDomain<'tcx>>::Domain>;
|
||||
|
||||
/// A dataflow analysis that has converged to fixpoint.
|
||||
#[derive(Clone)]
|
||||
|
@ -510,7 +510,7 @@ fn kill(&mut self, elem: T) {
|
||||
|
||||
// NOTE: DO NOT CHANGE VARIANT ORDER. The derived `Ord` impls rely on the current order.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum Effect {
|
||||
enum Effect {
|
||||
/// The "before" effect (e.g., `apply_before_statement_effect`) for a statement (or
|
||||
/// terminator).
|
||||
Before,
|
||||
@ -520,7 +520,7 @@ pub enum Effect {
|
||||
}
|
||||
|
||||
impl Effect {
|
||||
pub const fn at_index(self, statement_index: usize) -> EffectIndex {
|
||||
const fn at_index(self, statement_index: usize) -> EffectIndex {
|
||||
EffectIndex { effect: self, statement_index }
|
||||
}
|
||||
}
|
||||
|
@ -5,6 +5,7 @@
|
||||
#![feature(exact_size_is_empty)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(try_blocks)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use rustc_middle::ty;
|
||||
|
@ -15,12 +15,12 @@
|
||||
use rustc_middle::ty::Ty;
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct AbstractOperand;
|
||||
pub(crate) struct AbstractOperand;
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
|
||||
pub struct AbstractType;
|
||||
pub type AbstractElem = ProjectionElem<AbstractOperand, AbstractType>;
|
||||
pub(crate) struct AbstractType;
|
||||
pub(crate) type AbstractElem = ProjectionElem<AbstractOperand, AbstractType>;
|
||||
|
||||
pub trait Lift {
|
||||
pub(crate) trait Lift {
|
||||
type Abstract;
|
||||
fn lift(&self) -> Self::Abstract;
|
||||
}
|
||||
|
@ -242,12 +242,12 @@
|
||||
use crate::errors::{self, EncounteredErrorWhileInstantiating, NoOptimizedMir, RecursionLimit};
|
||||
|
||||
#[derive(PartialEq)]
|
||||
pub enum MonoItemCollectionStrategy {
|
||||
pub(crate) enum MonoItemCollectionStrategy {
|
||||
Eager,
|
||||
Lazy,
|
||||
}
|
||||
|
||||
pub struct UsageMap<'tcx> {
|
||||
pub(crate) struct UsageMap<'tcx> {
|
||||
// Maps every mono item to the mono items used by it.
|
||||
used_map: UnordMap<MonoItem<'tcx>, Vec<MonoItem<'tcx>>>,
|
||||
|
||||
@ -306,13 +306,17 @@ fn record_used<'a>(
|
||||
assert!(self.used_map.insert(user_item, used_items).is_none());
|
||||
}
|
||||
|
||||
pub fn get_user_items(&self, item: MonoItem<'tcx>) -> &[MonoItem<'tcx>] {
|
||||
pub(crate) fn get_user_items(&self, item: MonoItem<'tcx>) -> &[MonoItem<'tcx>] {
|
||||
self.user_map.get(&item).map(|items| items.as_slice()).unwrap_or(&[])
|
||||
}
|
||||
|
||||
/// Internally iterate over all inlined items used by `item`.
|
||||
pub fn for_each_inlined_used_item<F>(&self, tcx: TyCtxt<'tcx>, item: MonoItem<'tcx>, mut f: F)
|
||||
where
|
||||
pub(crate) fn for_each_inlined_used_item<F>(
|
||||
&self,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
item: MonoItem<'tcx>,
|
||||
mut f: F,
|
||||
) where
|
||||
F: FnMut(MonoItem<'tcx>),
|
||||
{
|
||||
let used_items = self.used_map.get(&item).unwrap();
|
||||
@ -1615,6 +1619,6 @@ pub(crate) fn collect_crate_mono_items<'tcx>(
|
||||
(mono_items, state.usage_map.into_inner())
|
||||
}
|
||||
|
||||
pub fn provide(providers: &mut Providers) {
|
||||
pub(crate) fn provide(providers: &mut Providers) {
|
||||
providers.hooks.should_codegen_locally = should_codegen_locally;
|
||||
}
|
||||
|
@ -8,7 +8,7 @@
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(monomorphize_recursion_limit)]
|
||||
pub struct RecursionLimit {
|
||||
pub(crate) struct RecursionLimit {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub shrunk: String,
|
||||
@ -22,13 +22,13 @@ pub struct RecursionLimit {
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(monomorphize_no_optimized_mir)]
|
||||
pub struct NoOptimizedMir {
|
||||
pub(crate) struct NoOptimizedMir {
|
||||
#[note]
|
||||
pub span: Span,
|
||||
pub crate_name: Symbol,
|
||||
}
|
||||
|
||||
pub struct UnusedGenericParamsHint {
|
||||
pub(crate) struct UnusedGenericParamsHint {
|
||||
pub span: Span,
|
||||
pub param_spans: Vec<Span>,
|
||||
pub param_names: Vec<String>,
|
||||
@ -53,7 +53,7 @@ fn into_diag(self, dcx: DiagCtxtHandle<'_>, level: Level) -> Diag<'_, G> {
|
||||
#[derive(LintDiagnostic)]
|
||||
#[diag(monomorphize_large_assignments)]
|
||||
#[note]
|
||||
pub struct LargeAssignmentsLint {
|
||||
pub(crate) struct LargeAssignmentsLint {
|
||||
#[label]
|
||||
pub span: Span,
|
||||
pub size: u64,
|
||||
@ -62,7 +62,7 @@ pub struct LargeAssignmentsLint {
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(monomorphize_symbol_already_defined)]
|
||||
pub struct SymbolAlreadyDefined {
|
||||
pub(crate) struct SymbolAlreadyDefined {
|
||||
#[primary_span]
|
||||
pub span: Option<Span>,
|
||||
pub symbol: String,
|
||||
@ -70,13 +70,13 @@ pub struct SymbolAlreadyDefined {
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(monomorphize_couldnt_dump_mono_stats)]
|
||||
pub struct CouldntDumpMonoStats {
|
||||
pub(crate) struct CouldntDumpMonoStats {
|
||||
pub error: String,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(monomorphize_encountered_error_while_instantiating)]
|
||||
pub struct EncounteredErrorWhileInstantiating {
|
||||
pub(crate) struct EncounteredErrorWhileInstantiating {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub formatted_item: String,
|
||||
@ -85,10 +85,10 @@ pub struct EncounteredErrorWhileInstantiating {
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(monomorphize_start_not_found)]
|
||||
#[help]
|
||||
pub struct StartNotFound;
|
||||
pub(crate) struct StartNotFound;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(monomorphize_unknown_cgu_collection_mode)]
|
||||
pub struct UnknownCguCollectionMode<'a> {
|
||||
pub(crate) struct UnknownCguCollectionMode<'a> {
|
||||
pub mode: &'a str,
|
||||
}
|
||||
|
@ -1,5 +1,6 @@
|
||||
// tidy-alphabetical-start
|
||||
#![feature(array_windows)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use rustc_hir::lang_items::LangItem;
|
||||
|
@ -1300,7 +1300,7 @@ struct MonoItem {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn provide(providers: &mut Providers) {
|
||||
pub(crate) fn provide(providers: &mut Providers) {
|
||||
providers.collect_and_partition_mono_items = collect_and_partition_mono_items;
|
||||
|
||||
providers.is_codegened_item = |tcx, def_id| {
|
||||
|
@ -19,7 +19,7 @@
|
||||
use crate::errors::UnusedGenericParamsHint;
|
||||
|
||||
/// Provide implementations of queries relating to polymorphization analysis.
|
||||
pub fn provide(providers: &mut Providers) {
|
||||
pub(crate) fn provide(providers: &mut Providers) {
|
||||
providers.unused_generic_params = unused_generic_params;
|
||||
}
|
||||
|
||||
|
@ -4,6 +4,10 @@
|
||||
//! but were uplifted in the process of making the new trait solver generic.
|
||||
//! So if you got to this crate from the old solver, it's totally normal.
|
||||
|
||||
// tidy-alphabetical-start
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
pub mod canonicalizer;
|
||||
pub mod coherence;
|
||||
pub mod delegate;
|
||||
|
@ -92,7 +92,7 @@ pub struct EvalCtxt<'a, D, I = <D as SolverDelegate>::Interner>
|
||||
#[derive(TypeVisitable_Generic, TypeFoldable_Generic, Lift_Generic)]
|
||||
#[cfg_attr(feature = "nightly", derive(TyDecodable, TyEncodable, HashStable_NoContext))]
|
||||
// FIXME: This can be made crate-private once `EvalCtxt` also lives in this crate.
|
||||
pub struct NestedGoals<I: Interner> {
|
||||
struct NestedGoals<I: Interner> {
|
||||
/// These normalizes-to goals are treated specially during the evaluation
|
||||
/// loop. In each iteration we take the RHS of the projection, replace it with
|
||||
/// a fresh inference variable, and only after evaluating that goal do we
|
||||
@ -109,11 +109,11 @@ pub struct NestedGoals<I: Interner> {
|
||||
}
|
||||
|
||||
impl<I: Interner> NestedGoals<I> {
|
||||
pub fn new() -> Self {
|
||||
fn new() -> Self {
|
||||
Self { normalizes_to_goals: Vec::new(), goals: Vec::new() }
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
fn is_empty(&self) -> bool {
|
||||
self.normalizes_to_goals.is_empty() && self.goals.is_empty()
|
||||
}
|
||||
}
|
||||
|
@ -222,13 +222,13 @@ fn as_mut(&mut self) -> Option<&mut DebugSolver<I>> {
|
||||
self.state.as_deref_mut()
|
||||
}
|
||||
|
||||
pub fn take_and_enter_probe(&mut self) -> ProofTreeBuilder<D> {
|
||||
pub(crate) fn take_and_enter_probe(&mut self) -> ProofTreeBuilder<D> {
|
||||
let mut nested = ProofTreeBuilder { state: self.state.take(), _infcx: PhantomData };
|
||||
nested.enter_probe();
|
||||
nested
|
||||
}
|
||||
|
||||
pub fn finalize(self) -> Option<inspect::GoalEvaluation<I>> {
|
||||
pub(crate) fn finalize(self) -> Option<inspect::GoalEvaluation<I>> {
|
||||
match *self.state? {
|
||||
DebugSolver::GoalEvaluation(wip_goal_evaluation) => {
|
||||
Some(wip_goal_evaluation.finalize())
|
||||
@ -237,22 +237,22 @@ pub fn finalize(self) -> Option<inspect::GoalEvaluation<I>> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_maybe_root(generate_proof_tree: GenerateProofTree) -> ProofTreeBuilder<D> {
|
||||
pub(crate) fn new_maybe_root(generate_proof_tree: GenerateProofTree) -> ProofTreeBuilder<D> {
|
||||
match generate_proof_tree {
|
||||
GenerateProofTree::No => ProofTreeBuilder::new_noop(),
|
||||
GenerateProofTree::Yes => ProofTreeBuilder::new_root(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_root() -> ProofTreeBuilder<D> {
|
||||
fn new_root() -> ProofTreeBuilder<D> {
|
||||
ProofTreeBuilder::new(DebugSolver::Root)
|
||||
}
|
||||
|
||||
pub fn new_noop() -> ProofTreeBuilder<D> {
|
||||
fn new_noop() -> ProofTreeBuilder<D> {
|
||||
ProofTreeBuilder { state: None, _infcx: PhantomData }
|
||||
}
|
||||
|
||||
pub fn is_noop(&self) -> bool {
|
||||
pub(crate) fn is_noop(&self) -> bool {
|
||||
self.state.is_none()
|
||||
}
|
||||
|
||||
@ -272,7 +272,7 @@ pub(in crate::solve) fn new_goal_evaluation(
|
||||
})
|
||||
}
|
||||
|
||||
pub fn new_canonical_goal_evaluation(
|
||||
pub(crate) fn new_canonical_goal_evaluation(
|
||||
&mut self,
|
||||
goal: CanonicalInput<I>,
|
||||
) -> ProofTreeBuilder<D> {
|
||||
@ -284,7 +284,10 @@ pub fn new_canonical_goal_evaluation(
|
||||
})
|
||||
}
|
||||
|
||||
pub fn canonical_goal_evaluation(&mut self, canonical_goal_evaluation: ProofTreeBuilder<D>) {
|
||||
pub(crate) fn canonical_goal_evaluation(
|
||||
&mut self,
|
||||
canonical_goal_evaluation: ProofTreeBuilder<D>,
|
||||
) {
|
||||
if let Some(this) = self.as_mut() {
|
||||
match (this, *canonical_goal_evaluation.state.unwrap()) {
|
||||
(
|
||||
@ -299,7 +302,7 @@ pub fn canonical_goal_evaluation(&mut self, canonical_goal_evaluation: ProofTree
|
||||
}
|
||||
}
|
||||
|
||||
pub fn canonical_goal_evaluation_overflow(&mut self) {
|
||||
pub(crate) fn canonical_goal_evaluation_overflow(&mut self) {
|
||||
if let Some(this) = self.as_mut() {
|
||||
match this {
|
||||
DebugSolver::CanonicalGoalEvaluation(canonical_goal_evaluation) => {
|
||||
@ -310,7 +313,7 @@ pub fn canonical_goal_evaluation_overflow(&mut self) {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn goal_evaluation(&mut self, goal_evaluation: ProofTreeBuilder<D>) {
|
||||
pub(crate) fn goal_evaluation(&mut self, goal_evaluation: ProofTreeBuilder<D>) {
|
||||
if let Some(this) = self.as_mut() {
|
||||
match this {
|
||||
DebugSolver::Root => *this = *goal_evaluation.state.unwrap(),
|
||||
@ -322,7 +325,7 @@ pub fn goal_evaluation(&mut self, goal_evaluation: ProofTreeBuilder<D>) {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_goal_evaluation_step(
|
||||
pub(crate) fn new_goal_evaluation_step(
|
||||
&mut self,
|
||||
var_values: ty::CanonicalVarValues<I>,
|
||||
instantiated_goal: QueryInput<I, I::Predicate>,
|
||||
@ -340,7 +343,7 @@ pub fn new_goal_evaluation_step(
|
||||
})
|
||||
}
|
||||
|
||||
pub fn goal_evaluation_step(&mut self, goal_evaluation_step: ProofTreeBuilder<D>) {
|
||||
pub(crate) fn goal_evaluation_step(&mut self, goal_evaluation_step: ProofTreeBuilder<D>) {
|
||||
if let Some(this) = self.as_mut() {
|
||||
match (this, *goal_evaluation_step.state.unwrap()) {
|
||||
(
|
||||
@ -354,7 +357,7 @@ pub fn goal_evaluation_step(&mut self, goal_evaluation_step: ProofTreeBuilder<D>
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_var_value<T: Into<I::GenericArg>>(&mut self, arg: T) {
|
||||
pub(crate) fn add_var_value<T: Into<I::GenericArg>>(&mut self, arg: T) {
|
||||
match self.as_mut() {
|
||||
None => {}
|
||||
Some(DebugSolver::CanonicalGoalEvaluationStep(state)) => {
|
||||
@ -364,7 +367,7 @@ pub fn add_var_value<T: Into<I::GenericArg>>(&mut self, arg: T) {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn enter_probe(&mut self) {
|
||||
fn enter_probe(&mut self) {
|
||||
match self.as_mut() {
|
||||
None => {}
|
||||
Some(DebugSolver::CanonicalGoalEvaluationStep(state)) => {
|
||||
@ -381,7 +384,7 @@ pub fn enter_probe(&mut self) {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn probe_kind(&mut self, probe_kind: inspect::ProbeKind<I>) {
|
||||
pub(crate) fn probe_kind(&mut self, probe_kind: inspect::ProbeKind<I>) {
|
||||
match self.as_mut() {
|
||||
None => {}
|
||||
Some(DebugSolver::CanonicalGoalEvaluationStep(state)) => {
|
||||
@ -392,7 +395,11 @@ pub fn probe_kind(&mut self, probe_kind: inspect::ProbeKind<I>) {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn probe_final_state(&mut self, delegate: &D, max_input_universe: ty::UniverseIndex) {
|
||||
pub(crate) fn probe_final_state(
|
||||
&mut self,
|
||||
delegate: &D,
|
||||
max_input_universe: ty::UniverseIndex,
|
||||
) {
|
||||
match self.as_mut() {
|
||||
None => {}
|
||||
Some(DebugSolver::CanonicalGoalEvaluationStep(state)) => {
|
||||
@ -409,7 +416,7 @@ pub fn probe_final_state(&mut self, delegate: &D, max_input_universe: ty::Univer
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_normalizes_to_goal(
|
||||
pub(crate) fn add_normalizes_to_goal(
|
||||
&mut self,
|
||||
delegate: &D,
|
||||
max_input_universe: ty::UniverseIndex,
|
||||
@ -423,7 +430,7 @@ pub fn add_normalizes_to_goal(
|
||||
);
|
||||
}
|
||||
|
||||
pub fn add_goal(
|
||||
pub(crate) fn add_goal(
|
||||
&mut self,
|
||||
delegate: &D,
|
||||
max_input_universe: ty::UniverseIndex,
|
||||
@ -469,7 +476,7 @@ pub(crate) fn record_impl_args(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn make_canonical_response(&mut self, shallow_certainty: Certainty) {
|
||||
pub(crate) fn make_canonical_response(&mut self, shallow_certainty: Certainty) {
|
||||
match self.as_mut() {
|
||||
Some(DebugSolver::CanonicalGoalEvaluationStep(state)) => {
|
||||
state
|
||||
@ -482,7 +489,7 @@ pub fn make_canonical_response(&mut self, shallow_certainty: Certainty) {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn finish_probe(mut self) -> ProofTreeBuilder<D> {
|
||||
pub(crate) fn finish_probe(mut self) -> ProofTreeBuilder<D> {
|
||||
match self.as_mut() {
|
||||
None => {}
|
||||
Some(DebugSolver::CanonicalGoalEvaluationStep(state)) => {
|
||||
@ -497,7 +504,7 @@ pub fn finish_probe(mut self) -> ProofTreeBuilder<D> {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn query_result(&mut self, result: QueryResult<I>) {
|
||||
pub(crate) fn query_result(&mut self, result: QueryResult<I>) {
|
||||
if let Some(this) = self.as_mut() {
|
||||
match this {
|
||||
DebugSolver::CanonicalGoalEvaluation(canonical_goal_evaluation) => {
|
||||
|
@ -97,7 +97,7 @@ pub(super) fn normalize_opaque_type(
|
||||
/// Checks whether each generic argument is simply a unique generic placeholder.
|
||||
///
|
||||
/// FIXME: Interner argument is needed to constrain the `I` parameter.
|
||||
pub fn uses_unique_placeholders_ignoring_regions<I: Interner>(
|
||||
fn uses_unique_placeholders_ignoring_regions<I: Interner>(
|
||||
_cx: I,
|
||||
args: I::GenericArgs,
|
||||
) -> Result<(), NotUniqueParam<I>> {
|
||||
@ -130,7 +130,7 @@ pub fn uses_unique_placeholders_ignoring_regions<I: Interner>(
|
||||
}
|
||||
|
||||
// FIXME: This should check for dupes and non-params first, then infer vars.
|
||||
pub enum NotUniqueParam<I: Interner> {
|
||||
enum NotUniqueParam<I: Interner> {
|
||||
DuplicateParam(I::GenericArg),
|
||||
NotParam(I::GenericArg),
|
||||
}
|
||||
|
@ -260,7 +260,7 @@ pub(crate) struct NotAsNegationOperator {
|
||||
}
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
pub enum NotAsNegationOperatorSub {
|
||||
pub(crate) enum NotAsNegationOperatorSub {
|
||||
#[suggestion(
|
||||
parse_unexpected_token_after_not_default,
|
||||
style = "verbose",
|
||||
@ -424,7 +424,7 @@ pub(crate) enum IfExpressionMissingThenBlockSub {
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_ternary_operator)]
|
||||
#[help]
|
||||
pub struct TernaryOperator {
|
||||
pub(crate) struct TernaryOperator {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
}
|
||||
@ -1088,7 +1088,7 @@ pub(crate) enum ExpectedIdentifierFound {
|
||||
}
|
||||
|
||||
impl ExpectedIdentifierFound {
|
||||
pub fn new(token_descr: Option<TokenDescription>, span: Span) -> Self {
|
||||
pub(crate) fn new(token_descr: Option<TokenDescription>, span: Span) -> Self {
|
||||
(match token_descr {
|
||||
Some(TokenDescription::ReservedIdentifier) => {
|
||||
ExpectedIdentifierFound::ReservedIdentifier
|
||||
@ -1659,7 +1659,7 @@ pub(crate) struct SelfArgumentPointer {
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_unexpected_token_after_dot)]
|
||||
pub struct UnexpectedTokenAfterDot<'a> {
|
||||
pub(crate) struct UnexpectedTokenAfterDot<'a> {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub actual: Cow<'a, str>,
|
||||
@ -1928,7 +1928,7 @@ pub(crate) enum UnexpectedTokenAfterStructName {
|
||||
}
|
||||
|
||||
impl UnexpectedTokenAfterStructName {
|
||||
pub fn new(span: Span, token: Token) -> Self {
|
||||
pub(crate) fn new(span: Span, token: Token) -> Self {
|
||||
match TokenDescription::from_token(&token) {
|
||||
Some(TokenDescription::ReservedIdentifier) => Self::ReservedIdentifier { span, token },
|
||||
Some(TokenDescription::Keyword) => Self::Keyword { span, token },
|
||||
@ -2006,7 +2006,7 @@ pub(crate) enum TopLevelOrPatternNotAllowed {
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_cannot_be_raw_ident)]
|
||||
pub struct CannotBeRawIdent {
|
||||
pub(crate) struct CannotBeRawIdent {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub ident: Symbol,
|
||||
@ -2014,14 +2014,14 @@ pub struct CannotBeRawIdent {
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_keyword_lifetime)]
|
||||
pub struct KeywordLifetime {
|
||||
pub(crate) struct KeywordLifetime {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_invalid_label)]
|
||||
pub struct InvalidLabel {
|
||||
pub(crate) struct InvalidLabel {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub name: Symbol,
|
||||
@ -2029,7 +2029,7 @@ pub struct InvalidLabel {
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_cr_doc_comment)]
|
||||
pub struct CrDocComment {
|
||||
pub(crate) struct CrDocComment {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub block: bool,
|
||||
@ -2037,14 +2037,14 @@ pub struct CrDocComment {
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_no_digits_literal, code = E0768)]
|
||||
pub struct NoDigitsLiteral {
|
||||
pub(crate) struct NoDigitsLiteral {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_invalid_digit_literal)]
|
||||
pub struct InvalidDigitLiteral {
|
||||
pub(crate) struct InvalidDigitLiteral {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub base: u32,
|
||||
@ -2052,14 +2052,14 @@ pub struct InvalidDigitLiteral {
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_empty_exponent_float)]
|
||||
pub struct EmptyExponentFloat {
|
||||
pub(crate) struct EmptyExponentFloat {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_float_literal_unsupported_base)]
|
||||
pub struct FloatLiteralUnsupportedBase {
|
||||
pub(crate) struct FloatLiteralUnsupportedBase {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub base: &'static str,
|
||||
@ -2068,7 +2068,7 @@ pub struct FloatLiteralUnsupportedBase {
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_unknown_prefix)]
|
||||
#[note]
|
||||
pub struct UnknownPrefix<'a> {
|
||||
pub(crate) struct UnknownPrefix<'a> {
|
||||
#[primary_span]
|
||||
#[label]
|
||||
pub span: Span,
|
||||
@ -2079,12 +2079,12 @@ pub struct UnknownPrefix<'a> {
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
#[note(parse_macro_expands_to_adt_field)]
|
||||
pub struct MacroExpandsToAdtField<'a> {
|
||||
pub(crate) struct MacroExpandsToAdtField<'a> {
|
||||
pub adt_ty: &'a str,
|
||||
}
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
pub enum UnknownPrefixSugg {
|
||||
pub(crate) enum UnknownPrefixSugg {
|
||||
#[suggestion(
|
||||
parse_suggestion_br,
|
||||
code = "br",
|
||||
@ -2114,7 +2114,7 @@ pub enum UnknownPrefixSugg {
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_too_many_hashes)]
|
||||
pub struct TooManyHashes {
|
||||
pub(crate) struct TooManyHashes {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub num: u32,
|
||||
@ -2122,7 +2122,7 @@ pub struct TooManyHashes {
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_unknown_start_of_token)]
|
||||
pub struct UnknownTokenStart {
|
||||
pub(crate) struct UnknownTokenStart {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub escaped: String,
|
||||
@ -2135,7 +2135,7 @@ pub struct UnknownTokenStart {
|
||||
}
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
pub enum TokenSubstitution {
|
||||
pub(crate) enum TokenSubstitution {
|
||||
#[suggestion(
|
||||
parse_sugg_quotes,
|
||||
code = "{suggestion}",
|
||||
@ -2168,16 +2168,16 @@ pub enum TokenSubstitution {
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
#[note(parse_note_repeats)]
|
||||
pub struct UnknownTokenRepeat {
|
||||
pub(crate) struct UnknownTokenRepeat {
|
||||
pub repeats: usize,
|
||||
}
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
#[help(parse_help_null)]
|
||||
pub struct UnknownTokenNull;
|
||||
pub(crate) struct UnknownTokenNull;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
pub enum UnescapeError {
|
||||
pub(crate) enum UnescapeError {
|
||||
#[diag(parse_invalid_unicode_escape)]
|
||||
#[help]
|
||||
InvalidUnicodeEscape {
|
||||
@ -2322,7 +2322,7 @@ pub enum UnescapeError {
|
||||
}
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
pub enum MoreThanOneCharSugg {
|
||||
pub(crate) enum MoreThanOneCharSugg {
|
||||
#[suggestion(
|
||||
parse_consider_normalized,
|
||||
code = "{normalized}",
|
||||
@ -2370,7 +2370,7 @@ pub enum MoreThanOneCharSugg {
|
||||
}
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
pub enum MoreThanOneCharNote {
|
||||
pub(crate) enum MoreThanOneCharNote {
|
||||
#[note(parse_followed_by)]
|
||||
AllCombining {
|
||||
#[primary_span]
|
||||
@ -2388,7 +2388,7 @@ pub enum MoreThanOneCharNote {
|
||||
}
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
pub enum NoBraceUnicodeSub {
|
||||
pub(crate) enum NoBraceUnicodeSub {
|
||||
#[suggestion(
|
||||
parse_use_braces,
|
||||
code = "{suggestion}",
|
||||
@ -2703,7 +2703,7 @@ pub(crate) struct InvalidDynKeyword {
|
||||
}
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
pub enum HelpUseLatestEdition {
|
||||
pub(crate) enum HelpUseLatestEdition {
|
||||
#[help(parse_help_set_edition_cargo)]
|
||||
#[note(parse_note_edition_guide)]
|
||||
Cargo { edition: Edition },
|
||||
@ -2713,7 +2713,7 @@ pub enum HelpUseLatestEdition {
|
||||
}
|
||||
|
||||
impl HelpUseLatestEdition {
|
||||
pub fn new() -> Self {
|
||||
pub(crate) fn new() -> Self {
|
||||
let edition = LATEST_STABLE_EDITION;
|
||||
if rustc_session::utils::was_invoked_from_cargo() {
|
||||
Self::Cargo { edition }
|
||||
@ -2725,7 +2725,7 @@ pub fn new() -> Self {
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_box_syntax_removed)]
|
||||
pub struct BoxSyntaxRemoved {
|
||||
pub(crate) struct BoxSyntaxRemoved {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
#[subdiagnostic]
|
||||
@ -2738,7 +2738,7 @@ pub struct BoxSyntaxRemoved {
|
||||
applicability = "machine-applicable",
|
||||
style = "verbose"
|
||||
)]
|
||||
pub struct AddBoxNew {
|
||||
pub(crate) struct AddBoxNew {
|
||||
#[suggestion_part(code = "Box::new(")]
|
||||
pub box_kw_and_lo: Span,
|
||||
#[suggestion_part(code = ")")]
|
||||
@ -3190,7 +3190,7 @@ pub(crate) struct DotDotRangeAttribute {
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_invalid_attr_unsafe)]
|
||||
#[note]
|
||||
pub struct InvalidAttrUnsafe {
|
||||
pub(crate) struct InvalidAttrUnsafe {
|
||||
#[primary_span]
|
||||
#[label]
|
||||
pub span: Span,
|
||||
@ -3199,7 +3199,7 @@ pub struct InvalidAttrUnsafe {
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_unsafe_attr_outside_unsafe)]
|
||||
pub struct UnsafeAttrOutsideUnsafe {
|
||||
pub(crate) struct UnsafeAttrOutsideUnsafe {
|
||||
#[primary_span]
|
||||
#[label]
|
||||
pub span: Span,
|
||||
@ -3212,7 +3212,7 @@ pub struct UnsafeAttrOutsideUnsafe {
|
||||
parse_unsafe_attr_outside_unsafe_suggestion,
|
||||
applicability = "machine-applicable"
|
||||
)]
|
||||
pub struct UnsafeAttrOutsideUnsafeSuggestion {
|
||||
pub(crate) struct UnsafeAttrOutsideUnsafeSuggestion {
|
||||
#[suggestion_part(code = "unsafe(")]
|
||||
pub left: Span,
|
||||
#[suggestion_part(code = ")")]
|
||||
@ -3221,7 +3221,7 @@ pub struct UnsafeAttrOutsideUnsafeSuggestion {
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_binder_before_modifiers)]
|
||||
pub struct BinderBeforeModifiers {
|
||||
pub(crate) struct BinderBeforeModifiers {
|
||||
#[primary_span]
|
||||
pub binder_span: Span,
|
||||
#[label]
|
||||
@ -3230,7 +3230,7 @@ pub struct BinderBeforeModifiers {
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_binder_and_polarity)]
|
||||
pub struct BinderAndPolarity {
|
||||
pub(crate) struct BinderAndPolarity {
|
||||
#[primary_span]
|
||||
pub polarity_span: Span,
|
||||
#[label]
|
||||
@ -3240,7 +3240,7 @@ pub struct BinderAndPolarity {
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_modifiers_and_polarity)]
|
||||
pub struct PolarityAndModifiers {
|
||||
pub(crate) struct PolarityAndModifiers {
|
||||
#[primary_span]
|
||||
pub polarity_span: Span,
|
||||
#[label]
|
||||
|
@ -11,6 +11,7 @@
|
||||
#![feature(if_let_guard)]
|
||||
#![feature(iter_intersperse)]
|
||||
#![feature(let_chains)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use std::path::Path;
|
||||
|
@ -13,6 +13,7 @@
|
||||
html_playground_url = "https://play.rust-lang.org/",
|
||||
test(attr(deny(warnings)))
|
||||
)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use std::{iter, str, string};
|
||||
|
@ -95,6 +95,6 @@ fn debugger_visualizers(tcx: TyCtxt<'_>, _: LocalCrate) -> Vec<DebuggerVisualize
|
||||
visitor.visualizers
|
||||
}
|
||||
|
||||
pub fn provide(providers: &mut Providers) {
|
||||
pub(crate) fn provide(providers: &mut Providers) {
|
||||
providers.debugger_visualizers = debugger_visualizers;
|
||||
}
|
||||
|
@ -90,7 +90,7 @@ fn all_diagnostic_items(tcx: TyCtxt<'_>, (): ()) -> DiagnosticItems {
|
||||
items
|
||||
}
|
||||
|
||||
pub fn provide(providers: &mut Providers) {
|
||||
pub(crate) fn provide(providers: &mut Providers) {
|
||||
providers.diagnostic_items = diagnostic_items;
|
||||
providers.all_diagnostic_items = all_diagnostic_items;
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -359,6 +359,6 @@ fn visit_assoc_item(&mut self, i: &'ast ast::AssocItem, ctxt: visit::AssocCtxt)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn provide(providers: &mut Providers) {
|
||||
pub(crate) fn provide(providers: &mut Providers) {
|
||||
providers.get_lang_items = get_lang_items;
|
||||
}
|
||||
|
@ -12,6 +12,7 @@
|
||||
#![feature(map_try_insert)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![feature(try_blocks)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use rustc_middle::query::Providers;
|
||||
|
@ -16,7 +16,7 @@
|
||||
|
||||
use crate::errors::{FeaturePreviouslyDeclared, FeatureStableTwice};
|
||||
|
||||
pub struct LibFeatureCollector<'tcx> {
|
||||
struct LibFeatureCollector<'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
lib_features: LibFeatures,
|
||||
}
|
||||
@ -153,6 +153,6 @@ fn lib_features(tcx: TyCtxt<'_>, LocalCrate: LocalCrate) -> LibFeatures {
|
||||
collector.lib_features
|
||||
}
|
||||
|
||||
pub fn provide(providers: &mut Providers) {
|
||||
pub(crate) fn provide(providers: &mut Providers) {
|
||||
providers.lib_features = lib_features;
|
||||
}
|
||||
|
@ -178,7 +178,7 @@ fn check_liveness(tcx: TyCtxt<'_>, def_id: LocalDefId) {
|
||||
lsets.warn_about_unused_args(&body, entry_ln);
|
||||
}
|
||||
|
||||
pub fn provide(providers: &mut Providers) {
|
||||
pub(crate) fn provide(providers: &mut Providers) {
|
||||
*providers = Providers { check_liveness, ..*providers };
|
||||
}
|
||||
|
||||
|
@ -500,6 +500,6 @@ fn reachable_set(tcx: TyCtxt<'_>, (): ()) -> LocalDefIdSet {
|
||||
reachable_context.reachable_symbols
|
||||
}
|
||||
|
||||
pub fn provide(providers: &mut Providers) {
|
||||
pub(crate) fn provide(providers: &mut Providers) {
|
||||
*providers = Providers { reachable_set, ..*providers };
|
||||
}
|
||||
|
@ -9,7 +9,7 @@
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_span::Span;
|
||||
|
||||
pub fn provide(providers: &mut Providers) {
|
||||
pub(crate) fn provide(providers: &mut Providers) {
|
||||
providers.upvars_mentioned = |tcx, def_id| {
|
||||
if !tcx.is_closure_like(def_id) {
|
||||
return None;
|
||||
|
@ -15,7 +15,11 @@
|
||||
|
||||
/// Checks the crate for usage of weak lang items, returning a vector of all the
|
||||
/// lang items required by this crate, but not defined yet.
|
||||
pub fn check_crate(tcx: TyCtxt<'_>, items: &mut lang_items::LanguageItems, krate: &ast::Crate) {
|
||||
pub(crate) fn check_crate(
|
||||
tcx: TyCtxt<'_>,
|
||||
items: &mut lang_items::LanguageItems,
|
||||
krate: &ast::Crate,
|
||||
) {
|
||||
// These are never called by user code, they're generated by the compiler.
|
||||
// They will never implicitly be added to the `missing` array unless we do
|
||||
// so here.
|
||||
|
@ -6,6 +6,7 @@
|
||||
#![allow(rustc::diagnostic_outside_of_impl)]
|
||||
#![allow(rustc::untranslatable_diagnostic)]
|
||||
#![cfg_attr(feature = "rustc", feature(let_chains))]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
pub mod constructor;
|
||||
|
@ -5,7 +5,7 @@
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(privacy_field_is_private, code = E0451)]
|
||||
pub struct FieldIsPrivate {
|
||||
pub(crate) struct FieldIsPrivate {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub field_name: Symbol,
|
||||
@ -16,7 +16,7 @@ pub struct FieldIsPrivate {
|
||||
}
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
pub enum FieldIsPrivateLabel {
|
||||
pub(crate) enum FieldIsPrivateLabel {
|
||||
#[label(privacy_field_is_private_is_update_syntax_label)]
|
||||
IsUpdateSyntax {
|
||||
#[primary_span]
|
||||
@ -32,7 +32,7 @@ pub enum FieldIsPrivateLabel {
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(privacy_item_is_private)]
|
||||
pub struct ItemIsPrivate<'a> {
|
||||
pub(crate) struct ItemIsPrivate<'a> {
|
||||
#[primary_span]
|
||||
#[label]
|
||||
pub span: Span,
|
||||
@ -42,7 +42,7 @@ pub struct ItemIsPrivate<'a> {
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(privacy_unnamed_item_is_private)]
|
||||
pub struct UnnamedItemIsPrivate {
|
||||
pub(crate) struct UnnamedItemIsPrivate {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub kind: &'static str,
|
||||
@ -50,7 +50,7 @@ pub struct UnnamedItemIsPrivate {
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(privacy_in_public_interface, code = E0446)]
|
||||
pub struct InPublicInterface<'a> {
|
||||
pub(crate) struct InPublicInterface<'a> {
|
||||
#[primary_span]
|
||||
#[label]
|
||||
pub span: Span,
|
||||
@ -63,7 +63,7 @@ pub struct InPublicInterface<'a> {
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(privacy_report_effective_visibility)]
|
||||
pub struct ReportEffectiveVisibility {
|
||||
pub(crate) struct ReportEffectiveVisibility {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub descr: String,
|
||||
@ -71,7 +71,7 @@ pub struct ReportEffectiveVisibility {
|
||||
|
||||
#[derive(LintDiagnostic)]
|
||||
#[diag(privacy_from_private_dep_in_public_interface)]
|
||||
pub struct FromPrivateDependencyInPublicInterface<'a> {
|
||||
pub(crate) struct FromPrivateDependencyInPublicInterface<'a> {
|
||||
pub kind: &'a str,
|
||||
pub descr: DiagArgFromDisplay<'a>,
|
||||
pub krate: Symbol,
|
||||
@ -79,7 +79,7 @@ pub struct FromPrivateDependencyInPublicInterface<'a> {
|
||||
|
||||
#[derive(LintDiagnostic)]
|
||||
#[diag(privacy_unnameable_types_lint)]
|
||||
pub struct UnnameableTypesLint<'a> {
|
||||
pub(crate) struct UnnameableTypesLint<'a> {
|
||||
#[label]
|
||||
pub span: Span,
|
||||
pub kind: &'a str,
|
||||
@ -93,7 +93,7 @@ pub struct UnnameableTypesLint<'a> {
|
||||
// See https://rust-lang.github.io/rfcs/2145-type-privacy.html for more details.
|
||||
#[derive(LintDiagnostic)]
|
||||
#[diag(privacy_private_interface_or_bounds_lint)]
|
||||
pub struct PrivateInterfacesOrBoundsLint<'a> {
|
||||
pub(crate) struct PrivateInterfacesOrBoundsLint<'a> {
|
||||
#[label(privacy_item_label)]
|
||||
pub item_span: Span,
|
||||
pub item_kind: &'a str,
|
||||
|
@ -6,6 +6,7 @@
|
||||
#![feature(let_chains)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![feature(try_blocks)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
mod errors;
|
||||
@ -1497,7 +1498,7 @@ fn get(&self, def_id: LocalDefId) -> Option<EffectiveVisibility> {
|
||||
self.effective_visibilities.effective_vis(def_id).copied()
|
||||
}
|
||||
|
||||
pub fn check_item(&mut self, id: ItemId) {
|
||||
fn check_item(&mut self, id: ItemId) {
|
||||
let tcx = self.tcx;
|
||||
let def_id = id.owner_id.def_id;
|
||||
let item_visibility = tcx.local_visibility(def_id);
|
||||
|
@ -8,6 +8,7 @@
|
||||
#![feature(min_specialization)]
|
||||
#![feature(rustc_attrs)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use field_offset::offset_of;
|
||||
|
@ -541,7 +541,7 @@ macro_rules! expand_if_cached {
|
||||
/// Don't show the backtrace for query system by default
|
||||
/// use `RUST_BACKTRACE=full` to show all the backtraces
|
||||
#[inline(never)]
|
||||
pub fn __rust_begin_short_backtrace<F, T>(f: F) -> T
|
||||
pub(crate) fn __rust_begin_short_backtrace<F, T>(f: F) -> T
|
||||
where
|
||||
F: FnOnce() -> T,
|
||||
{
|
||||
@ -557,17 +557,17 @@ macro_rules! define_queries {
|
||||
$($(#[$attr:meta])*
|
||||
[$($modifiers:tt)*] fn $name:ident($($K:tt)*) -> $V:ty,)*) => {
|
||||
|
||||
pub(crate) mod query_impl { $(pub mod $name {
|
||||
pub(crate) mod query_impl { $(pub(crate) mod $name {
|
||||
use super::super::*;
|
||||
use std::marker::PhantomData;
|
||||
|
||||
pub mod get_query_incr {
|
||||
pub(crate) mod get_query_incr {
|
||||
use super::*;
|
||||
|
||||
// Adding `__rust_end_short_backtrace` marker to backtraces so that we emit the frames
|
||||
// when `RUST_BACKTRACE=1`, add a new mod with `$name` here is to allow duplicate naming
|
||||
#[inline(never)]
|
||||
pub fn __rust_end_short_backtrace<'tcx>(
|
||||
pub(crate) fn __rust_end_short_backtrace<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
span: Span,
|
||||
key: queries::$name::Key<'tcx>,
|
||||
@ -585,11 +585,11 @@ pub fn __rust_end_short_backtrace<'tcx>(
|
||||
}
|
||||
}
|
||||
|
||||
pub mod get_query_non_incr {
|
||||
pub(crate) mod get_query_non_incr {
|
||||
use super::*;
|
||||
|
||||
#[inline(never)]
|
||||
pub fn __rust_end_short_backtrace<'tcx>(
|
||||
pub(crate) fn __rust_end_short_backtrace<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
span: Span,
|
||||
key: queries::$name::Key<'tcx>,
|
||||
@ -604,7 +604,9 @@ pub fn __rust_end_short_backtrace<'tcx>(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn dynamic_query<'tcx>() -> DynamicQuery<'tcx, queries::$name::Storage<'tcx>> {
|
||||
pub(crate) fn dynamic_query<'tcx>()
|
||||
-> DynamicQuery<'tcx, queries::$name::Storage<'tcx>>
|
||||
{
|
||||
DynamicQuery {
|
||||
name: stringify!($name),
|
||||
eval_always: is_eval_always!([$($modifiers)*]),
|
||||
@ -667,7 +669,7 @@ pub fn dynamic_query<'tcx>() -> DynamicQuery<'tcx, queries::$name::Storage<'tcx>
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Default)]
|
||||
pub struct QueryType<'tcx> {
|
||||
pub(crate) struct QueryType<'tcx> {
|
||||
data: PhantomData<&'tcx ()>
|
||||
}
|
||||
|
||||
@ -696,7 +698,7 @@ fn restore(value: <Self::Config as QueryConfig<QueryCtxt<'tcx>>>::Value) -> Self
|
||||
}
|
||||
}
|
||||
|
||||
pub fn try_collect_active_jobs<'tcx>(tcx: TyCtxt<'tcx>, qmap: &mut QueryMap) {
|
||||
pub(crate) fn try_collect_active_jobs<'tcx>(tcx: TyCtxt<'tcx>, qmap: &mut QueryMap) {
|
||||
let make_query = |tcx, key| {
|
||||
let kind = rustc_middle::dep_graph::dep_kinds::$name;
|
||||
let name = stringify!($name);
|
||||
@ -711,11 +713,17 @@ pub fn try_collect_active_jobs<'tcx>(tcx: TyCtxt<'tcx>, qmap: &mut QueryMap) {
|
||||
// don't `unwrap()` here, just manually check for `None` and do best-effort error
|
||||
// reporting.
|
||||
if res.is_none() {
|
||||
tracing::warn!("Failed to collect active jobs for query with name `{}`!", stringify!($name));
|
||||
tracing::warn!(
|
||||
"Failed to collect active jobs for query with name `{}`!",
|
||||
stringify!($name)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn alloc_self_profile_query_strings<'tcx>(tcx: TyCtxt<'tcx>, string_cache: &mut QueryKeyStringCache) {
|
||||
pub(crate) fn alloc_self_profile_query_strings<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
string_cache: &mut QueryKeyStringCache
|
||||
) {
|
||||
$crate::profiling_support::alloc_self_profile_query_strings_for_query_cache(
|
||||
tcx,
|
||||
stringify!($name),
|
||||
@ -725,7 +733,7 @@ pub fn alloc_self_profile_query_strings<'tcx>(tcx: TyCtxt<'tcx>, string_cache: &
|
||||
}
|
||||
|
||||
item_if_cached! { [$($modifiers)*] {
|
||||
pub fn encode_query_results<'tcx>(
|
||||
pub(crate) fn encode_query_results<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
encoder: &mut CacheEncoder<'_, 'tcx>,
|
||||
query_result_index: &mut EncodedDepNodeIndex
|
||||
@ -739,7 +747,7 @@ pub fn encode_query_results<'tcx>(
|
||||
}
|
||||
}}
|
||||
|
||||
pub fn query_key_hash_verify<'tcx>(tcx: TyCtxt<'tcx>) {
|
||||
pub(crate) fn query_key_hash_verify<'tcx>(tcx: TyCtxt<'tcx>) {
|
||||
$crate::plumbing::query_key_hash_verify(
|
||||
query_impl::$name::QueryType::config(tcx),
|
||||
QueryCtxt::new(tcx),
|
||||
@ -795,7 +803,7 @@ mod query_callbacks {
|
||||
use rustc_query_system::dep_graph::FingerprintStyle;
|
||||
|
||||
// We use this for most things when incr. comp. is turned off.
|
||||
pub fn Null<'tcx>() -> DepKindStruct<'tcx> {
|
||||
pub(crate) fn Null<'tcx>() -> DepKindStruct<'tcx> {
|
||||
DepKindStruct {
|
||||
is_anon: false,
|
||||
is_eval_always: false,
|
||||
@ -807,7 +815,7 @@ pub fn Null<'tcx>() -> DepKindStruct<'tcx> {
|
||||
}
|
||||
|
||||
// We use this for the forever-red node.
|
||||
pub fn Red<'tcx>() -> DepKindStruct<'tcx> {
|
||||
pub(crate) fn Red<'tcx>() -> DepKindStruct<'tcx> {
|
||||
DepKindStruct {
|
||||
is_anon: false,
|
||||
is_eval_always: false,
|
||||
@ -818,7 +826,7 @@ pub fn Red<'tcx>() -> DepKindStruct<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn TraitSelect<'tcx>() -> DepKindStruct<'tcx> {
|
||||
pub(crate) fn TraitSelect<'tcx>() -> DepKindStruct<'tcx> {
|
||||
DepKindStruct {
|
||||
is_anon: true,
|
||||
is_eval_always: false,
|
||||
@ -829,7 +837,7 @@ pub fn TraitSelect<'tcx>() -> DepKindStruct<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn CompileCodegenUnit<'tcx>() -> DepKindStruct<'tcx> {
|
||||
pub(crate) fn CompileCodegenUnit<'tcx>() -> DepKindStruct<'tcx> {
|
||||
DepKindStruct {
|
||||
is_anon: false,
|
||||
is_eval_always: false,
|
||||
@ -840,7 +848,7 @@ pub fn CompileCodegenUnit<'tcx>() -> DepKindStruct<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn CompileMonoItem<'tcx>() -> DepKindStruct<'tcx> {
|
||||
pub(crate) fn CompileMonoItem<'tcx>() -> DepKindStruct<'tcx> {
|
||||
DepKindStruct {
|
||||
is_anon: false,
|
||||
is_eval_always: false,
|
||||
|
@ -617,14 +617,14 @@ fn finish(self, profiler: &SelfProfilerRef) -> FileEncodeResult {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct GraphEncoder<D: Deps> {
|
||||
pub(crate) struct GraphEncoder<D: Deps> {
|
||||
profiler: SelfProfilerRef,
|
||||
status: Lock<Option<EncoderState<D>>>,
|
||||
record_graph: Option<Lock<DepGraphQuery>>,
|
||||
}
|
||||
|
||||
impl<D: Deps> GraphEncoder<D> {
|
||||
pub fn new(
|
||||
pub(crate) fn new(
|
||||
encoder: FileEncoder,
|
||||
prev_node_count: usize,
|
||||
record_graph: bool,
|
||||
@ -723,7 +723,7 @@ pub(crate) fn send_promoted(
|
||||
)
|
||||
}
|
||||
|
||||
pub fn finish(&self) -> FileEncodeResult {
|
||||
pub(crate) fn finish(&self) -> FileEncodeResult {
|
||||
let _prof_timer = self.profiler.generic_activity("incr_comp_encode_dep_graph_finish");
|
||||
|
||||
self.status.lock().take().unwrap().finish(&self.profiler)
|
||||
|
@ -5,7 +5,7 @@
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
#[note(query_system_cycle_stack_middle)]
|
||||
pub struct CycleStack {
|
||||
pub(crate) struct CycleStack {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub desc: String,
|
||||
@ -20,7 +20,7 @@ pub enum HandleCycleError {
|
||||
}
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
pub enum StackCount {
|
||||
pub(crate) enum StackCount {
|
||||
#[note(query_system_cycle_stack_single)]
|
||||
Single,
|
||||
#[note(query_system_cycle_stack_multiple)]
|
||||
@ -28,7 +28,7 @@ pub enum StackCount {
|
||||
}
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
pub enum Alias {
|
||||
pub(crate) enum Alias {
|
||||
#[note(query_system_cycle_recursive_ty_alias)]
|
||||
#[help(query_system_cycle_recursive_ty_alias_help1)]
|
||||
#[help(query_system_cycle_recursive_ty_alias_help2)]
|
||||
@ -39,7 +39,7 @@ pub enum Alias {
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
#[note(query_system_cycle_usage)]
|
||||
pub struct CycleUsage {
|
||||
pub(crate) struct CycleUsage {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub usage: String,
|
||||
@ -47,7 +47,7 @@ pub struct CycleUsage {
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(query_system_cycle, code = E0391)]
|
||||
pub struct Cycle {
|
||||
pub(crate) struct Cycle {
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
pub stack_bottom: String,
|
||||
@ -65,14 +65,14 @@ pub struct Cycle {
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(query_system_reentrant)]
|
||||
pub struct Reentrant;
|
||||
pub(crate) struct Reentrant;
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(query_system_increment_compilation)]
|
||||
#[help]
|
||||
#[note(query_system_increment_compilation_note1)]
|
||||
#[note(query_system_increment_compilation_note2)]
|
||||
pub struct IncrementCompilation {
|
||||
pub(crate) struct IncrementCompilation {
|
||||
pub run_cmd: String,
|
||||
pub dep_node: String,
|
||||
}
|
||||
|
@ -5,6 +5,7 @@
|
||||
#![feature(hash_raw_entry)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(min_specialization)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
pub mod cache;
|
||||
|
@ -21,6 +21,7 @@
|
||||
#![feature(let_chains)]
|
||||
#![feature(rustc_attrs)]
|
||||
#![feature(rustdoc_internals)]
|
||||
#![warn(unreachable_pub)]
|
||||
// tidy-alphabetical-end
|
||||
|
||||
use std::cell::{Cell, RefCell};
|
||||
|
@ -22,7 +22,9 @@
|
||||
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
|
||||
use rustc_span::edition::{Edition, DEFAULT_EDITION, EDITION_NAME_LIST, LATEST_STABLE_EDITION};
|
||||
use rustc_span::source_map::FilePathMapping;
|
||||
use rustc_span::{FileName, FileNameDisplayPreference, RealFileName, SourceFileHashAlgorithm};
|
||||
use rustc_span::{
|
||||
sym, FileName, FileNameDisplayPreference, RealFileName, SourceFileHashAlgorithm, Symbol,
|
||||
};
|
||||
use rustc_target::spec::{
|
||||
FramePointer, LinkSelfContainedComponents, LinkerFeatures, SplitDebuginfo, Target, TargetTriple,
|
||||
};
|
||||
@ -402,6 +404,23 @@ pub(crate) fn all() -> Self {
|
||||
}
|
||||
}
|
||||
|
||||
/// Values for the `-Z fmt-debug` flag.
|
||||
#[derive(Copy, Clone, PartialEq, Hash, Debug)]
|
||||
pub enum FmtDebug {
|
||||
/// Derive fully-featured implementation
|
||||
Full,
|
||||
/// Print only type name, without fields
|
||||
Shallow,
|
||||
/// `#[derive(Debug)]` and `{:?}` are no-ops
|
||||
None,
|
||||
}
|
||||
|
||||
impl FmtDebug {
|
||||
pub(crate) fn all() -> [Symbol; 3] {
|
||||
[sym::full, sym::none, sym::shallow]
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Hash, Debug)]
|
||||
pub enum SwitchWithOptPath {
|
||||
Enabled(Option<PathBuf>),
|
||||
@ -2994,7 +3013,7 @@ pub(crate) mod dep_tracking {
|
||||
|
||||
use super::{
|
||||
BranchProtection, CFGuard, CFProtection, CollapseMacroDebuginfo, CoverageOptions,
|
||||
CrateType, DebugInfo, DebugInfoCompression, ErrorOutputType, FunctionReturn,
|
||||
CrateType, DebugInfo, DebugInfoCompression, ErrorOutputType, FmtDebug, FunctionReturn,
|
||||
InliningThreshold, InstrumentCoverage, InstrumentXRay, LinkerPluginLto, LocationDetail,
|
||||
LtoCli, NextSolverConfig, OomStrategy, OptLevel, OutFileName, OutputType, OutputTypes,
|
||||
PatchableFunctionEntry, Polonius, RemapPathScopeComponents, ResolveDocLinks,
|
||||
@ -3088,6 +3107,7 @@ fn hash(
|
||||
OutputType,
|
||||
RealFileName,
|
||||
LocationDetail,
|
||||
FmtDebug,
|
||||
BranchProtection,
|
||||
OomStrategy,
|
||||
LanguageIdentifier,
|
||||
|
@ -31,7 +31,7 @@
|
||||
use rustc_target::abi::Align;
|
||||
use rustc_target::spec::{PanicStrategy, RelocModel, SanitizerSet, Target, TargetTriple, TARGETS};
|
||||
|
||||
use crate::config::CrateType;
|
||||
use crate::config::{CrateType, FmtDebug};
|
||||
use crate::Session;
|
||||
|
||||
/// The parsed `--cfg` options that define the compilation environment of the
|
||||
@ -142,6 +142,7 @@ pub(crate) fn disallow_cfgs(sess: &Session, user_cfgs: &Cfg) {
|
||||
| (sym::target_has_atomic_equal_alignment, Some(_))
|
||||
| (sym::target_has_atomic_load_store, Some(_))
|
||||
| (sym::target_thread_local, None) => disallow(cfg, "--target"),
|
||||
(sym::fmt_debug, None | Some(_)) => disallow(cfg, "-Z fmt-debug"),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
@ -179,6 +180,20 @@ macro_rules! ins_sym {
|
||||
ins_none!(sym::debug_assertions);
|
||||
}
|
||||
|
||||
if sess.is_nightly_build() {
|
||||
match sess.opts.unstable_opts.fmt_debug {
|
||||
FmtDebug::Full => {
|
||||
ins_sym!(sym::fmt_debug, sym::full);
|
||||
}
|
||||
FmtDebug::Shallow => {
|
||||
ins_sym!(sym::fmt_debug, sym::shallow);
|
||||
}
|
||||
FmtDebug::None => {
|
||||
ins_sym!(sym::fmt_debug, sym::none);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if sess.overflow_checks() {
|
||||
ins_none!(sym::overflow_checks);
|
||||
}
|
||||
@ -326,6 +341,8 @@ macro_rules! ins {
|
||||
|
||||
ins!(sym::debug_assertions, no_values);
|
||||
|
||||
ins!(sym::fmt_debug, empty_values).extend(FmtDebug::all());
|
||||
|
||||
// These four are never set by rustc, but we set them anyway; they
|
||||
// should not trigger the lint because `cargo clippy`, `cargo doc`,
|
||||
// `cargo test`, `cargo miri run` and `cargo fmt` (respectively)
|
||||
|
@ -408,6 +408,7 @@ mod desc {
|
||||
pub const parse_linker_plugin_lto: &str =
|
||||
"either a boolean (`yes`, `no`, `on`, `off`, etc), or the path to the linker plugin";
|
||||
pub const parse_location_detail: &str = "either `none`, or a comma separated list of location details to track: `file`, `line`, or `column`";
|
||||
pub const parse_fmt_debug: &str = "either `full`, `shallow`, or `none`";
|
||||
pub const parse_switch_with_opt_path: &str =
|
||||
"an optional path to the profiling data output directory";
|
||||
pub const parse_merge_functions: &str = "one of: `disabled`, `trampolines`, or `aliases`";
|
||||
@ -589,6 +590,16 @@ pub(crate) fn parse_list_with_polarity(
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn parse_fmt_debug(opt: &mut FmtDebug, v: Option<&str>) -> bool {
|
||||
*opt = match v {
|
||||
Some("full") => FmtDebug::Full,
|
||||
Some("shallow") => FmtDebug::Shallow,
|
||||
Some("none") => FmtDebug::None,
|
||||
_ => return false,
|
||||
};
|
||||
true
|
||||
}
|
||||
|
||||
pub(crate) fn parse_location_detail(ld: &mut LocationDetail, v: Option<&str>) -> bool {
|
||||
if let Some(v) = v {
|
||||
ld.line = false;
|
||||
@ -1724,6 +1735,9 @@ pub(crate) fn parse_wasm_c_abi(slot: &mut WasmCAbi, v: Option<&str>) -> bool {
|
||||
flatten_format_args: bool = (true, parse_bool, [TRACKED],
|
||||
"flatten nested format_args!() and literals into a simplified format_args!() call \
|
||||
(default: yes)"),
|
||||
fmt_debug: FmtDebug = (FmtDebug::Full, parse_fmt_debug, [TRACKED],
|
||||
"how detailed `#[derive(Debug)]` should be. `full` prints types recursively, \
|
||||
`shallow` prints only type names, `none` prints nothing and disables `{:?}`. (default: `full`)"),
|
||||
force_unstable_if_unmarked: bool = (false, parse_bool, [TRACKED],
|
||||
"force all crates to be `rustc_private` unstable (default: no)"),
|
||||
fuel: Option<(String, u64)> = (None, parse_optimization_fuel, [TRACKED],
|
||||
@ -1797,6 +1811,8 @@ pub(crate) fn parse_wasm_c_abi(slot: &mut WasmCAbi, v: Option<&str>) -> bool {
|
||||
"link the `.rlink` file generated by `-Z no-link` (default: no)"),
|
||||
linker_features: LinkerFeaturesCli = (LinkerFeaturesCli::default(), parse_linker_features, [UNTRACKED],
|
||||
"a comma-separated list of linker features to enable (+) or disable (-): `lld`"),
|
||||
lint_llvm_ir: bool = (false, parse_bool, [TRACKED],
|
||||
"lint LLVM IR (default: no)"),
|
||||
lint_mir: bool = (false, parse_bool, [UNTRACKED],
|
||||
"lint MIR before and after each transformation"),
|
||||
llvm_module_flag: Vec<(String, u32, String)> = (Vec::new(), parse_llvm_module_flag, [TRACKED],
|
||||
|
@ -536,6 +536,7 @@
|
||||
cfg_attr_multi,
|
||||
cfg_doctest,
|
||||
cfg_eval,
|
||||
cfg_fmt_debug,
|
||||
cfg_hide,
|
||||
cfg_overflow_checks,
|
||||
cfg_panic,
|
||||
@ -895,6 +896,7 @@
|
||||
fmaf32,
|
||||
fmaf64,
|
||||
fmt,
|
||||
fmt_debug,
|
||||
fmul_algebraic,
|
||||
fmul_fast,
|
||||
fn_align,
|
||||
@ -938,6 +940,7 @@
|
||||
fs_create_dir,
|
||||
fsub_algebraic,
|
||||
fsub_fast,
|
||||
full,
|
||||
fundamental,
|
||||
fused_iterator,
|
||||
future,
|
||||
@ -1281,6 +1284,7 @@
|
||||
new_binary,
|
||||
new_const,
|
||||
new_debug,
|
||||
new_debug_noop,
|
||||
new_display,
|
||||
new_lower_exp,
|
||||
new_lower_hex,
|
||||
@ -1715,6 +1719,7 @@
|
||||
semitransparent,
|
||||
sha512_sm_x86,
|
||||
shadow_call_stack,
|
||||
shallow,
|
||||
shl,
|
||||
shl_assign,
|
||||
shorter_tail_lifetimes,
|
||||
|
@ -28,7 +28,7 @@ pub fn target() -> Target {
|
||||
code_model: Some(CodeModel::Medium),
|
||||
emit_debug_gdb_scripts: false,
|
||||
eh_frame_header: false,
|
||||
supported_sanitizers: SanitizerSet::KERNELADDRESS,
|
||||
supported_sanitizers: SanitizerSet::KERNELADDRESS | SanitizerSet::SHADOWCALLSTACK,
|
||||
..Default::default()
|
||||
},
|
||||
}
|
||||
|
@ -27,7 +27,7 @@ pub fn target() -> Target {
|
||||
code_model: Some(CodeModel::Medium),
|
||||
emit_debug_gdb_scripts: false,
|
||||
eh_frame_header: false,
|
||||
supported_sanitizers: SanitizerSet::KERNELADDRESS,
|
||||
supported_sanitizers: SanitizerSet::KERNELADDRESS | SanitizerSet::SHADOWCALLSTACK,
|
||||
..Default::default()
|
||||
},
|
||||
}
|
||||
|
@ -8,6 +8,7 @@
|
||||
#![feature(iter_array_chunks)]
|
||||
#![feature(iter_next_chunk)]
|
||||
#![feature(iter_advance_by)]
|
||||
#![feature(isqrt)]
|
||||
|
||||
extern crate test;
|
||||
|
||||
|
62
library/core/benches/num/int_sqrt/mod.rs
Normal file
62
library/core/benches/num/int_sqrt/mod.rs
Normal file
@ -0,0 +1,62 @@
|
||||
use rand::Rng;
|
||||
use test::{black_box, Bencher};
|
||||
|
||||
macro_rules! int_sqrt_bench {
|
||||
($t:ty, $predictable:ident, $random:ident, $random_small:ident, $random_uniform:ident) => {
|
||||
#[bench]
|
||||
fn $predictable(bench: &mut Bencher) {
|
||||
bench.iter(|| {
|
||||
for n in 0..(<$t>::BITS / 8) {
|
||||
for i in 1..=(100 as $t) {
|
||||
let x = black_box(i << (n * 8));
|
||||
black_box(x.isqrt());
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn $random(bench: &mut Bencher) {
|
||||
let mut rng = crate::bench_rng();
|
||||
/* Exponentially distributed random numbers from the whole range of the type. */
|
||||
let numbers: Vec<$t> =
|
||||
(0..256).map(|_| rng.gen::<$t>() >> rng.gen_range(0..<$t>::BITS)).collect();
|
||||
bench.iter(|| {
|
||||
for x in &numbers {
|
||||
black_box(black_box(x).isqrt());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn $random_small(bench: &mut Bencher) {
|
||||
let mut rng = crate::bench_rng();
|
||||
/* Exponentially distributed random numbers from the range 0..256. */
|
||||
let numbers: Vec<$t> =
|
||||
(0..256).map(|_| (rng.gen::<u8>() >> rng.gen_range(0..u8::BITS)) as $t).collect();
|
||||
bench.iter(|| {
|
||||
for x in &numbers {
|
||||
black_box(black_box(x).isqrt());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[bench]
|
||||
fn $random_uniform(bench: &mut Bencher) {
|
||||
let mut rng = crate::bench_rng();
|
||||
/* Exponentially distributed random numbers from the whole range of the type. */
|
||||
let numbers: Vec<$t> = (0..256).map(|_| rng.gen::<$t>()).collect();
|
||||
bench.iter(|| {
|
||||
for x in &numbers {
|
||||
black_box(black_box(x).isqrt());
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
int_sqrt_bench! {u8, u8_sqrt_predictable, u8_sqrt_random, u8_sqrt_random_small, u8_sqrt_uniform}
|
||||
int_sqrt_bench! {u16, u16_sqrt_predictable, u16_sqrt_random, u16_sqrt_random_small, u16_sqrt_uniform}
|
||||
int_sqrt_bench! {u32, u32_sqrt_predictable, u32_sqrt_random, u32_sqrt_random_small, u32_sqrt_uniform}
|
||||
int_sqrt_bench! {u64, u64_sqrt_predictable, u64_sqrt_random, u64_sqrt_random_small, u64_sqrt_uniform}
|
||||
int_sqrt_bench! {u128, u128_sqrt_predictable, u128_sqrt_random, u128_sqrt_random_small, u128_sqrt_uniform}
|
@ -2,6 +2,7 @@
|
||||
mod flt2dec;
|
||||
mod int_log;
|
||||
mod int_pow;
|
||||
mod int_sqrt;
|
||||
|
||||
use std::str::FromStr;
|
||||
|
||||
|
@ -118,6 +118,10 @@ pub fn new_debug<'b, T: Debug>(x: &'b T) -> Argument<'_> {
|
||||
Self::new(x, Debug::fmt)
|
||||
}
|
||||
#[inline(always)]
|
||||
pub fn new_debug_noop<'b, T: Debug>(x: &'b T) -> Argument<'_> {
|
||||
Self::new(x, |_, _| Ok(()))
|
||||
}
|
||||
#[inline(always)]
|
||||
pub fn new_octal<'b, T: Octal>(x: &'b T) -> Argument<'_> {
|
||||
Self::new(x, Octal::fmt)
|
||||
}
|
||||
|
@ -1641,7 +1641,33 @@ pub const fn checked_isqrt(self) -> Option<Self> {
|
||||
if self < 0 {
|
||||
None
|
||||
} else {
|
||||
Some((self as $UnsignedT).isqrt() as Self)
|
||||
// SAFETY: Input is nonnegative in this `else` branch.
|
||||
let result = unsafe {
|
||||
crate::num::int_sqrt::$ActualT(self as $ActualT) as $SelfT
|
||||
};
|
||||
|
||||
// Inform the optimizer what the range of outputs is. If
|
||||
// testing `core` crashes with no panic message and a
|
||||
// `num::int_sqrt::i*` test failed, it's because your edits
|
||||
// caused these assertions to become false.
|
||||
//
|
||||
// SAFETY: Integer square root is a monotonically nondecreasing
|
||||
// function, which means that increasing the input will never
|
||||
// cause the output to decrease. Thus, since the input for
|
||||
// nonnegative signed integers is bounded by
|
||||
// `[0, <$ActualT>::MAX]`, sqrt(n) will be bounded by
|
||||
// `[sqrt(0), sqrt(<$ActualT>::MAX)]`.
|
||||
unsafe {
|
||||
// SAFETY: `<$ActualT>::MAX` is nonnegative.
|
||||
const MAX_RESULT: $SelfT = unsafe {
|
||||
crate::num::int_sqrt::$ActualT(<$ActualT>::MAX) as $SelfT
|
||||
};
|
||||
|
||||
crate::hint::assert_unchecked(result >= 0);
|
||||
crate::hint::assert_unchecked(result <= MAX_RESULT);
|
||||
}
|
||||
|
||||
Some(result)
|
||||
}
|
||||
}
|
||||
|
||||
@ -2862,15 +2888,11 @@ pub const fn pow(self, mut exp: u32) -> Self {
|
||||
#[must_use = "this returns the result of the operation, \
|
||||
without modifying the original"]
|
||||
#[inline]
|
||||
#[track_caller]
|
||||
pub const fn isqrt(self) -> Self {
|
||||
// I would like to implement it as
|
||||
// ```
|
||||
// self.checked_isqrt().expect("argument of integer square root must be non-negative")
|
||||
// ```
|
||||
// but `expect` is not yet stable as a `const fn`.
|
||||
match self.checked_isqrt() {
|
||||
Some(sqrt) => sqrt,
|
||||
None => panic!("argument of integer square root must be non-negative"),
|
||||
None => crate::num::int_sqrt::panic_for_negative_argument(),
|
||||
}
|
||||
}
|
||||
|
||||
|
316
library/core/src/num/int_sqrt.rs
Normal file
316
library/core/src/num/int_sqrt.rs
Normal file
@ -0,0 +1,316 @@
|
||||
//! These functions use the [Karatsuba square root algorithm][1] to compute the
|
||||
//! [integer square root](https://en.wikipedia.org/wiki/Integer_square_root)
|
||||
//! for the primitive integer types.
|
||||
//!
|
||||
//! The signed integer functions can only handle **nonnegative** inputs, so
|
||||
//! that must be checked before calling those.
|
||||
//!
|
||||
//! [1]: <https://web.archive.org/web/20230511212802/https://inria.hal.science/inria-00072854v1/file/RR-3805.pdf>
|
||||
//! "Paul Zimmermann. Karatsuba Square Root. \[Research Report\] RR-3805,
|
||||
//! INRIA. 1999, pp.8. (inria-00072854)"
|
||||
|
||||
/// This array stores the [integer square roots](
|
||||
/// https://en.wikipedia.org/wiki/Integer_square_root) and remainders of each
|
||||
/// [`u8`](prim@u8) value. For example, `U8_ISQRT_WITH_REMAINDER[17]` will be
|
||||
/// `(4, 1)` because the integer square root of 17 is 4 and because 17 is 1
|
||||
/// higher than 4 squared.
|
||||
const U8_ISQRT_WITH_REMAINDER: [(u8, u8); 256] = {
|
||||
let mut result = [(0, 0); 256];
|
||||
|
||||
let mut n: usize = 0;
|
||||
let mut isqrt_n: usize = 0;
|
||||
while n < result.len() {
|
||||
result[n] = (isqrt_n as u8, (n - isqrt_n.pow(2)) as u8);
|
||||
|
||||
n += 1;
|
||||
if n == (isqrt_n + 1).pow(2) {
|
||||
isqrt_n += 1;
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
};
|
||||
|
||||
/// Returns the [integer square root](
|
||||
/// https://en.wikipedia.org/wiki/Integer_square_root) of any [`u8`](prim@u8)
|
||||
/// input.
|
||||
#[must_use = "this returns the result of the operation, \
|
||||
without modifying the original"]
|
||||
#[inline]
|
||||
pub const fn u8(n: u8) -> u8 {
|
||||
U8_ISQRT_WITH_REMAINDER[n as usize].0
|
||||
}
|
||||
|
||||
/// Generates an `i*` function that returns the [integer square root](
|
||||
/// https://en.wikipedia.org/wiki/Integer_square_root) of any **nonnegative**
|
||||
/// input of a specific signed integer type.
|
||||
macro_rules! signed_fn {
|
||||
($SignedT:ident, $UnsignedT:ident) => {
|
||||
/// Returns the [integer square root](
|
||||
/// https://en.wikipedia.org/wiki/Integer_square_root) of any
|
||||
/// **nonnegative**
|
||||
#[doc = concat!("[`", stringify!($SignedT), "`](prim@", stringify!($SignedT), ")")]
|
||||
/// input.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// This results in undefined behavior when the input is negative.
|
||||
#[must_use = "this returns the result of the operation, \
|
||||
without modifying the original"]
|
||||
#[inline]
|
||||
pub const unsafe fn $SignedT(n: $SignedT) -> $SignedT {
|
||||
debug_assert!(n >= 0, "Negative input inside `isqrt`.");
|
||||
$UnsignedT(n as $UnsignedT) as $SignedT
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
signed_fn!(i8, u8);
|
||||
signed_fn!(i16, u16);
|
||||
signed_fn!(i32, u32);
|
||||
signed_fn!(i64, u64);
|
||||
signed_fn!(i128, u128);
|
||||
|
||||
/// Generates a `u*` function that returns the [integer square root](
|
||||
/// https://en.wikipedia.org/wiki/Integer_square_root) of any input of
|
||||
/// a specific unsigned integer type.
|
||||
macro_rules! unsigned_fn {
|
||||
($UnsignedT:ident, $HalfBitsT:ident, $stages:ident) => {
|
||||
/// Returns the [integer square root](
|
||||
/// https://en.wikipedia.org/wiki/Integer_square_root) of any
|
||||
#[doc = concat!("[`", stringify!($UnsignedT), "`](prim@", stringify!($UnsignedT), ")")]
|
||||
/// input.
|
||||
#[must_use = "this returns the result of the operation, \
|
||||
without modifying the original"]
|
||||
#[inline]
|
||||
pub const fn $UnsignedT(mut n: $UnsignedT) -> $UnsignedT {
|
||||
if n <= <$HalfBitsT>::MAX as $UnsignedT {
|
||||
$HalfBitsT(n as $HalfBitsT) as $UnsignedT
|
||||
} else {
|
||||
// The normalization shift satisfies the Karatsuba square root
|
||||
// algorithm precondition "a₃ ≥ b/4" where a₃ is the most
|
||||
// significant quarter of `n`'s bits and b is the number of
|
||||
// values that can be represented by that quarter of the bits.
|
||||
//
|
||||
// b/4 would then be all 0s except the second most significant
|
||||
// bit (010...0) in binary. Since a₃ must be at least b/4, a₃'s
|
||||
// most significant bit or its neighbor must be a 1. Since a₃'s
|
||||
// most significant bits are `n`'s most significant bits, the
|
||||
// same applies to `n`.
|
||||
//
|
||||
// The reason to shift by an even number of bits is because an
|
||||
// even number of bits produces the square root shifted to the
|
||||
// left by half of the normalization shift:
|
||||
//
|
||||
// sqrt(n << (2 * p))
|
||||
// sqrt(2.pow(2 * p) * n)
|
||||
// sqrt(2.pow(2 * p)) * sqrt(n)
|
||||
// 2.pow(p) * sqrt(n)
|
||||
// sqrt(n) << p
|
||||
//
|
||||
// Shifting by an odd number of bits leaves an ugly sqrt(2)
|
||||
// multiplied in:
|
||||
//
|
||||
// sqrt(n << (2 * p + 1))
|
||||
// sqrt(2.pow(2 * p + 1) * n)
|
||||
// sqrt(2 * 2.pow(2 * p) * n)
|
||||
// sqrt(2) * sqrt(2.pow(2 * p)) * sqrt(n)
|
||||
// sqrt(2) * 2.pow(p) * sqrt(n)
|
||||
// sqrt(2) * (sqrt(n) << p)
|
||||
const EVEN_MAKING_BITMASK: u32 = !1;
|
||||
let normalization_shift = n.leading_zeros() & EVEN_MAKING_BITMASK;
|
||||
n <<= normalization_shift;
|
||||
|
||||
let s = $stages(n);
|
||||
|
||||
let denormalization_shift = normalization_shift >> 1;
|
||||
s >> denormalization_shift
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// Generates the first stage of the computation after normalization.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// `$n` must be nonzero.
|
||||
macro_rules! first_stage {
|
||||
($original_bits:literal, $n:ident) => {{
|
||||
debug_assert!($n != 0, "`$n` is zero in `first_stage!`.");
|
||||
|
||||
const N_SHIFT: u32 = $original_bits - 8;
|
||||
let n = $n >> N_SHIFT;
|
||||
|
||||
let (s, r) = U8_ISQRT_WITH_REMAINDER[n as usize];
|
||||
|
||||
// Inform the optimizer that `s` is nonzero. This will allow it to
|
||||
// avoid generating code to handle division-by-zero panics in the next
|
||||
// stage.
|
||||
//
|
||||
// SAFETY: If the original `$n` is zero, the top of the `unsigned_fn`
|
||||
// macro recurses instead of continuing to this point, so the original
|
||||
// `$n` wasn't a 0 if we've reached here.
|
||||
//
|
||||
// Then the `unsigned_fn` macro normalizes `$n` so that at least one of
|
||||
// its two most-significant bits is a 1.
|
||||
//
|
||||
// Then this stage puts the eight most-significant bits of `$n` into
|
||||
// `n`. This means that `n` here has at least one 1 bit in its two
|
||||
// most-significant bits, making `n` nonzero.
|
||||
//
|
||||
// `U8_ISQRT_WITH_REMAINDER[n as usize]` will give a nonzero `s` when
|
||||
// given a nonzero `n`.
|
||||
unsafe { crate::hint::assert_unchecked(s != 0) };
|
||||
(s, r)
|
||||
}};
|
||||
}
|
||||
|
||||
/// Generates a middle stage of the computation.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// `$s` must be nonzero.
|
||||
macro_rules! middle_stage {
|
||||
($original_bits:literal, $ty:ty, $n:ident, $s:ident, $r:ident) => {{
|
||||
debug_assert!($s != 0, "`$s` is zero in `middle_stage!`.");
|
||||
|
||||
const N_SHIFT: u32 = $original_bits - <$ty>::BITS;
|
||||
let n = ($n >> N_SHIFT) as $ty;
|
||||
|
||||
const HALF_BITS: u32 = <$ty>::BITS >> 1;
|
||||
const QUARTER_BITS: u32 = <$ty>::BITS >> 2;
|
||||
const LOWER_HALF_1_BITS: $ty = (1 << HALF_BITS) - 1;
|
||||
const LOWEST_QUARTER_1_BITS: $ty = (1 << QUARTER_BITS) - 1;
|
||||
|
||||
let lo = n & LOWER_HALF_1_BITS;
|
||||
let numerator = (($r as $ty) << QUARTER_BITS) | (lo >> QUARTER_BITS);
|
||||
let denominator = ($s as $ty) << 1;
|
||||
let q = numerator / denominator;
|
||||
let u = numerator % denominator;
|
||||
|
||||
let mut s = ($s << QUARTER_BITS) as $ty + q;
|
||||
let (mut r, overflow) =
|
||||
((u << QUARTER_BITS) | (lo & LOWEST_QUARTER_1_BITS)).overflowing_sub(q * q);
|
||||
if overflow {
|
||||
r = r.wrapping_add(2 * s - 1);
|
||||
s -= 1;
|
||||
}
|
||||
|
||||
// Inform the optimizer that `s` is nonzero. This will allow it to
|
||||
// avoid generating code to handle division-by-zero panics in the next
|
||||
// stage.
|
||||
//
|
||||
// SAFETY: If the original `$n` is zero, the top of the `unsigned_fn`
|
||||
// macro recurses instead of continuing to this point, so the original
|
||||
// `$n` wasn't a 0 if we've reached here.
|
||||
//
|
||||
// Then the `unsigned_fn` macro normalizes `$n` so that at least one of
|
||||
// its two most-significant bits is a 1.
|
||||
//
|
||||
// Then these stages take as many of the most-significant bits of `$n`
|
||||
// as will fit in this stage's type. For example, the stage that
|
||||
// handles `u32` deals with the 32 most-significant bits of `$n`. This
|
||||
// means that each stage has at least one 1 bit in `n`'s two
|
||||
// most-significant bits, making `n` nonzero.
|
||||
//
|
||||
// Then this stage will produce the correct integer square root for
|
||||
// that `n` value. Since `n` is nonzero, `s` will also be nonzero.
|
||||
unsafe { crate::hint::assert_unchecked(s != 0) };
|
||||
(s, r)
|
||||
}};
|
||||
}
|
||||
|
||||
/// Generates the last stage of the computation before denormalization.
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// `$s` must be nonzero.
|
||||
macro_rules! last_stage {
|
||||
($ty:ty, $n:ident, $s:ident, $r:ident) => {{
|
||||
debug_assert!($s != 0, "`$s` is zero in `last_stage!`.");
|
||||
|
||||
const HALF_BITS: u32 = <$ty>::BITS >> 1;
|
||||
const QUARTER_BITS: u32 = <$ty>::BITS >> 2;
|
||||
const LOWER_HALF_1_BITS: $ty = (1 << HALF_BITS) - 1;
|
||||
|
||||
let lo = $n & LOWER_HALF_1_BITS;
|
||||
let numerator = (($r as $ty) << QUARTER_BITS) | (lo >> QUARTER_BITS);
|
||||
let denominator = ($s as $ty) << 1;
|
||||
|
||||
let q = numerator / denominator;
|
||||
let mut s = ($s << QUARTER_BITS) as $ty + q;
|
||||
let (s_squared, overflow) = s.overflowing_mul(s);
|
||||
if overflow || s_squared > $n {
|
||||
s -= 1;
|
||||
}
|
||||
s
|
||||
}};
|
||||
}
|
||||
|
||||
/// Takes the normalized [`u16`](prim@u16) input and gets its normalized
|
||||
/// [integer square root](https://en.wikipedia.org/wiki/Integer_square_root).
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// `n` must be nonzero.
|
||||
#[inline]
|
||||
const fn u16_stages(n: u16) -> u16 {
|
||||
let (s, r) = first_stage!(16, n);
|
||||
last_stage!(u16, n, s, r)
|
||||
}
|
||||
|
||||
/// Takes the normalized [`u32`](prim@u32) input and gets its normalized
|
||||
/// [integer square root](https://en.wikipedia.org/wiki/Integer_square_root).
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// `n` must be nonzero.
|
||||
#[inline]
|
||||
const fn u32_stages(n: u32) -> u32 {
|
||||
let (s, r) = first_stage!(32, n);
|
||||
let (s, r) = middle_stage!(32, u16, n, s, r);
|
||||
last_stage!(u32, n, s, r)
|
||||
}
|
||||
|
||||
/// Takes the normalized [`u64`](prim@u64) input and gets its normalized
|
||||
/// [integer square root](https://en.wikipedia.org/wiki/Integer_square_root).
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// `n` must be nonzero.
|
||||
#[inline]
|
||||
const fn u64_stages(n: u64) -> u64 {
|
||||
let (s, r) = first_stage!(64, n);
|
||||
let (s, r) = middle_stage!(64, u16, n, s, r);
|
||||
let (s, r) = middle_stage!(64, u32, n, s, r);
|
||||
last_stage!(u64, n, s, r)
|
||||
}
|
||||
|
||||
/// Takes the normalized [`u128`](prim@u128) input and gets its normalized
|
||||
/// [integer square root](https://en.wikipedia.org/wiki/Integer_square_root).
|
||||
///
|
||||
/// # Safety
|
||||
///
|
||||
/// `n` must be nonzero.
|
||||
#[inline]
|
||||
const fn u128_stages(n: u128) -> u128 {
|
||||
let (s, r) = first_stage!(128, n);
|
||||
let (s, r) = middle_stage!(128, u16, n, s, r);
|
||||
let (s, r) = middle_stage!(128, u32, n, s, r);
|
||||
let (s, r) = middle_stage!(128, u64, n, s, r);
|
||||
last_stage!(u128, n, s, r)
|
||||
}
|
||||
|
||||
unsigned_fn!(u16, u8, u16_stages);
|
||||
unsigned_fn!(u32, u16, u32_stages);
|
||||
unsigned_fn!(u64, u32, u64_stages);
|
||||
unsigned_fn!(u128, u64, u128_stages);
|
||||
|
||||
/// Instantiate this panic logic once, rather than for all the isqrt methods
|
||||
/// on every single primitive type.
|
||||
#[cold]
|
||||
#[track_caller]
|
||||
pub const fn panic_for_negative_argument() -> ! {
|
||||
panic!("argument of integer square root cannot be negative")
|
||||
}
|
@ -41,6 +41,7 @@ macro_rules! unlikely {
|
||||
|
||||
mod error;
|
||||
mod int_log10;
|
||||
mod int_sqrt;
|
||||
mod nonzero;
|
||||
mod overflow_panic;
|
||||
mod saturating;
|
||||
|
@ -7,7 +7,7 @@
|
||||
use crate::ops::{BitOr, BitOrAssign, Div, DivAssign, Neg, Rem, RemAssign};
|
||||
use crate::panic::{RefUnwindSafe, UnwindSafe};
|
||||
use crate::str::FromStr;
|
||||
use crate::{fmt, hint, intrinsics, ptr, ub_checks};
|
||||
use crate::{fmt, intrinsics, ptr, ub_checks};
|
||||
|
||||
/// A marker trait for primitive types which can be zero.
|
||||
///
|
||||
@ -1545,31 +1545,14 @@ pub const fn is_power_of_two(self) -> bool {
|
||||
without modifying the original"]
|
||||
#[inline]
|
||||
pub const fn isqrt(self) -> Self {
|
||||
// The algorithm is based on the one presented in
|
||||
// <https://en.wikipedia.org/wiki/Methods_of_computing_square_roots#Binary_numeral_system_(base_2)>
|
||||
// which cites as source the following C code:
|
||||
// <https://web.archive.org/web/20120306040058/http://medialab.freaknet.org/martin/src/sqrt/sqrt.c>.
|
||||
let result = self.get().isqrt();
|
||||
|
||||
let mut op = self.get();
|
||||
let mut res = 0;
|
||||
let mut one = 1 << (self.ilog2() & !1);
|
||||
|
||||
while one != 0 {
|
||||
if op >= res + one {
|
||||
op -= res + one;
|
||||
res = (res >> 1) + one;
|
||||
} else {
|
||||
res >>= 1;
|
||||
}
|
||||
one >>= 2;
|
||||
}
|
||||
|
||||
// SAFETY: The result fits in an integer with half as many bits.
|
||||
// Inform the optimizer about it.
|
||||
unsafe { hint::assert_unchecked(res < 1 << (Self::BITS / 2)) };
|
||||
|
||||
// SAFETY: The square root of an integer >= 1 is always >= 1.
|
||||
unsafe { Self::new_unchecked(res) }
|
||||
// SAFETY: Integer square root is a monotonically nondecreasing
|
||||
// function, which means that increasing the input will never cause
|
||||
// the output to decrease. Thus, since the input for nonzero
|
||||
// unsigned integers has a lower bound of 1, the lower bound of the
|
||||
// results will be sqrt(1), which is 1, so a result can't be zero.
|
||||
unsafe { Self::new_unchecked(result) }
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -2762,10 +2762,24 @@ pub const fn pow(self, mut exp: u32) -> Self {
|
||||
without modifying the original"]
|
||||
#[inline]
|
||||
pub const fn isqrt(self) -> Self {
|
||||
match NonZero::new(self) {
|
||||
Some(x) => x.isqrt().get(),
|
||||
None => 0,
|
||||
let result = crate::num::int_sqrt::$ActualT(self as $ActualT) as $SelfT;
|
||||
|
||||
// Inform the optimizer what the range of outputs is. If testing
|
||||
// `core` crashes with no panic message and a `num::int_sqrt::u*`
|
||||
// test failed, it's because your edits caused these assertions or
|
||||
// the assertions in `fn isqrt` of `nonzero.rs` to become false.
|
||||
//
|
||||
// SAFETY: Integer square root is a monotonically nondecreasing
|
||||
// function, which means that increasing the input will never
|
||||
// cause the output to decrease. Thus, since the input for unsigned
|
||||
// integers is bounded by `[0, <$ActualT>::MAX]`, sqrt(n) will be
|
||||
// bounded by `[sqrt(0), sqrt(<$ActualT>::MAX)]`.
|
||||
unsafe {
|
||||
const MAX_RESULT: $SelfT = crate::num::int_sqrt::$ActualT(<$ActualT>::MAX) as $SelfT;
|
||||
crate::hint::assert_unchecked(result <= MAX_RESULT);
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Performs Euclidean division.
|
||||
|
@ -288,38 +288,6 @@ fn test_pow() {
|
||||
assert_eq!(r.saturating_pow(0), 1 as $T);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_isqrt() {
|
||||
assert_eq!($T::MIN.checked_isqrt(), None);
|
||||
assert_eq!((-1 as $T).checked_isqrt(), None);
|
||||
assert_eq!((0 as $T).isqrt(), 0 as $T);
|
||||
assert_eq!((1 as $T).isqrt(), 1 as $T);
|
||||
assert_eq!((2 as $T).isqrt(), 1 as $T);
|
||||
assert_eq!((99 as $T).isqrt(), 9 as $T);
|
||||
assert_eq!((100 as $T).isqrt(), 10 as $T);
|
||||
}
|
||||
|
||||
#[cfg(not(miri))] // Miri is too slow
|
||||
#[test]
|
||||
fn test_lots_of_isqrt() {
|
||||
let n_max: $T = (1024 * 1024).min($T::MAX as u128) as $T;
|
||||
for n in 0..=n_max {
|
||||
let isqrt: $T = n.isqrt();
|
||||
|
||||
assert!(isqrt.pow(2) <= n);
|
||||
let (square, overflow) = (isqrt + 1).overflowing_pow(2);
|
||||
assert!(overflow || square > n);
|
||||
}
|
||||
|
||||
for n in ($T::MAX - 127)..=$T::MAX {
|
||||
let isqrt: $T = n.isqrt();
|
||||
|
||||
assert!(isqrt.pow(2) <= n);
|
||||
let (square, overflow) = (isqrt + 1).overflowing_pow(2);
|
||||
assert!(overflow || square > n);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_div_floor() {
|
||||
let a: $T = 8;
|
||||
|
248
library/core/tests/num/int_sqrt.rs
Normal file
248
library/core/tests/num/int_sqrt.rs
Normal file
@ -0,0 +1,248 @@
|
||||
macro_rules! tests {
|
||||
($isqrt_consistency_check_fn_macro:ident : $($T:ident)+) => {
|
||||
$(
|
||||
mod $T {
|
||||
$isqrt_consistency_check_fn_macro!($T);
|
||||
|
||||
// Check that the following produce the correct values from
|
||||
// `isqrt`:
|
||||
//
|
||||
// * the first and last 128 nonnegative values
|
||||
// * powers of two, minus one
|
||||
// * powers of two
|
||||
//
|
||||
// For signed types, check that `checked_isqrt` and `isqrt`
|
||||
// either produce the same numeric value or respectively
|
||||
// produce `None` and a panic. Make sure to do a consistency
|
||||
// check for `<$T>::MIN` as well, as no nonnegative values
|
||||
// negate to it.
|
||||
//
|
||||
// For unsigned types check that `isqrt` produces the same
|
||||
// numeric value for `$T` and `NonZero<$T>`.
|
||||
#[test]
|
||||
fn isqrt() {
|
||||
isqrt_consistency_check(<$T>::MIN);
|
||||
|
||||
for n in (0..=127)
|
||||
.chain(<$T>::MAX - 127..=<$T>::MAX)
|
||||
.chain((0..<$T>::MAX.count_ones()).map(|exponent| (1 << exponent) - 1))
|
||||
.chain((0..<$T>::MAX.count_ones()).map(|exponent| 1 << exponent))
|
||||
{
|
||||
isqrt_consistency_check(n);
|
||||
|
||||
let isqrt_n = n.isqrt();
|
||||
assert!(
|
||||
isqrt_n
|
||||
.checked_mul(isqrt_n)
|
||||
.map(|isqrt_n_squared| isqrt_n_squared <= n)
|
||||
.unwrap_or(false),
|
||||
"`{n}.isqrt()` should be lower than {isqrt_n}."
|
||||
);
|
||||
assert!(
|
||||
(isqrt_n + 1)
|
||||
.checked_mul(isqrt_n + 1)
|
||||
.map(|isqrt_n_plus_1_squared| n < isqrt_n_plus_1_squared)
|
||||
.unwrap_or(true),
|
||||
"`{n}.isqrt()` should be higher than {isqrt_n})."
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Check the square roots of:
|
||||
//
|
||||
// * the first 1,024 perfect squares
|
||||
// * halfway between each of the first 1,024 perfect squares
|
||||
// and the next perfect square
|
||||
// * the next perfect square after the each of the first 1,024
|
||||
// perfect squares, minus one
|
||||
// * the last 1,024 perfect squares
|
||||
// * the last 1,024 perfect squares, minus one
|
||||
// * halfway between each of the last 1,024 perfect squares
|
||||
// and the previous perfect square
|
||||
#[test]
|
||||
// Skip this test on Miri, as it takes too long to run.
|
||||
#[cfg(not(miri))]
|
||||
fn isqrt_extended() {
|
||||
// The correct value is worked out by using the fact that
|
||||
// the nth nonzero perfect square is the sum of the first n
|
||||
// odd numbers:
|
||||
//
|
||||
// 1 = 1
|
||||
// 4 = 1 + 3
|
||||
// 9 = 1 + 3 + 5
|
||||
// 16 = 1 + 3 + 5 + 7
|
||||
//
|
||||
// Note also that the last odd number added in is two times
|
||||
// the square root of the previous perfect square, plus
|
||||
// one:
|
||||
//
|
||||
// 1 = 2*0 + 1
|
||||
// 3 = 2*1 + 1
|
||||
// 5 = 2*2 + 1
|
||||
// 7 = 2*3 + 1
|
||||
//
|
||||
// That means we can add the square root of this perfect
|
||||
// square once to get about halfway to the next perfect
|
||||
// square, then we can add the square root of this perfect
|
||||
// square again to get to the next perfect square, minus
|
||||
// one, then we can add one to get to the next perfect
|
||||
// square.
|
||||
//
|
||||
// This allows us to, for each of the first 1,024 perfect
|
||||
// squares, test that the square roots of the following are
|
||||
// all correct and equal to each other:
|
||||
//
|
||||
// * the current perfect square
|
||||
// * about halfway to the next perfect square
|
||||
// * the next perfect square, minus one
|
||||
let mut n: $T = 0;
|
||||
for sqrt_n in 0..1_024.min((1_u128 << (<$T>::MAX.count_ones()/2)) - 1) as $T {
|
||||
isqrt_consistency_check(n);
|
||||
assert_eq!(
|
||||
n.isqrt(),
|
||||
sqrt_n,
|
||||
"`{sqrt_n}.pow(2).isqrt()` should be {sqrt_n}."
|
||||
);
|
||||
|
||||
n += sqrt_n;
|
||||
isqrt_consistency_check(n);
|
||||
assert_eq!(
|
||||
n.isqrt(),
|
||||
sqrt_n,
|
||||
"{n} is about halfway between `{sqrt_n}.pow(2)` and `{}.pow(2)`, so `{n}.isqrt()` should be {sqrt_n}.",
|
||||
sqrt_n + 1
|
||||
);
|
||||
|
||||
n += sqrt_n;
|
||||
isqrt_consistency_check(n);
|
||||
assert_eq!(
|
||||
n.isqrt(),
|
||||
sqrt_n,
|
||||
"`({}.pow(2) - 1).isqrt()` should be {sqrt_n}.",
|
||||
sqrt_n + 1
|
||||
);
|
||||
|
||||
n += 1;
|
||||
}
|
||||
|
||||
// Similarly, for each of the last 1,024 perfect squares,
|
||||
// check:
|
||||
//
|
||||
// * the current perfect square
|
||||
// * the current perfect square, minus one
|
||||
// * about halfway to the previous perfect square
|
||||
//
|
||||
// `MAX`'s `isqrt` return value is verified in the `isqrt`
|
||||
// test function above.
|
||||
let maximum_sqrt = <$T>::MAX.isqrt();
|
||||
let mut n = maximum_sqrt * maximum_sqrt;
|
||||
|
||||
for sqrt_n in (maximum_sqrt - 1_024.min((1_u128 << (<$T>::MAX.count_ones()/2)) - 1) as $T..maximum_sqrt).rev() {
|
||||
isqrt_consistency_check(n);
|
||||
assert_eq!(
|
||||
n.isqrt(),
|
||||
sqrt_n + 1,
|
||||
"`{0}.pow(2).isqrt()` should be {0}.",
|
||||
sqrt_n + 1
|
||||
);
|
||||
|
||||
n -= 1;
|
||||
isqrt_consistency_check(n);
|
||||
assert_eq!(
|
||||
n.isqrt(),
|
||||
sqrt_n,
|
||||
"`({}.pow(2) - 1).isqrt()` should be {sqrt_n}.",
|
||||
sqrt_n + 1
|
||||
);
|
||||
|
||||
n -= sqrt_n;
|
||||
isqrt_consistency_check(n);
|
||||
assert_eq!(
|
||||
n.isqrt(),
|
||||
sqrt_n,
|
||||
"{n} is about halfway between `{sqrt_n}.pow(2)` and `{}.pow(2)`, so `{n}.isqrt()` should be {sqrt_n}.",
|
||||
sqrt_n + 1
|
||||
);
|
||||
|
||||
n -= sqrt_n;
|
||||
}
|
||||
}
|
||||
}
|
||||
)*
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! signed_check {
|
||||
($T:ident) => {
|
||||
/// This takes an input and, if it's nonnegative or
|
||||
#[doc = concat!("`", stringify!($T), "::MIN`,")]
|
||||
/// checks that `isqrt` and `checked_isqrt` produce equivalent results
|
||||
/// for that input and for the negative of that input.
|
||||
///
|
||||
/// # Note
|
||||
///
|
||||
/// This cannot check that negative inputs to `isqrt` cause panics if
|
||||
/// panics abort instead of unwind.
|
||||
fn isqrt_consistency_check(n: $T) {
|
||||
// `<$T>::MIN` will be negative, so ignore it in this nonnegative
|
||||
// section.
|
||||
if n >= 0 {
|
||||
assert_eq!(
|
||||
Some(n.isqrt()),
|
||||
n.checked_isqrt(),
|
||||
"`{n}.checked_isqrt()` should match `Some({n}.isqrt())`.",
|
||||
);
|
||||
}
|
||||
|
||||
// `wrapping_neg` so that `<$T>::MIN` will negate to itself rather
|
||||
// than panicking.
|
||||
let negative_n = n.wrapping_neg();
|
||||
|
||||
// Zero negated will still be nonnegative, so ignore it in this
|
||||
// negative section.
|
||||
if negative_n < 0 {
|
||||
assert_eq!(
|
||||
negative_n.checked_isqrt(),
|
||||
None,
|
||||
"`({negative_n}).checked_isqrt()` should be `None`, as {negative_n} is negative.",
|
||||
);
|
||||
|
||||
// `catch_unwind` only works when panics unwind rather than abort.
|
||||
#[cfg(panic = "unwind")]
|
||||
{
|
||||
std::panic::catch_unwind(core::panic::AssertUnwindSafe(|| (-n).isqrt())).expect_err(
|
||||
&format!("`({negative_n}).isqrt()` should have panicked, as {negative_n} is negative.")
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! unsigned_check {
|
||||
($T:ident) => {
|
||||
/// This takes an input and, if it's nonzero, checks that `isqrt`
|
||||
/// produces the same numeric value for both
|
||||
#[doc = concat!("`", stringify!($T), "` and ")]
|
||||
#[doc = concat!("`NonZero<", stringify!($T), ">`.")]
|
||||
fn isqrt_consistency_check(n: $T) {
|
||||
// Zero cannot be turned into a `NonZero` value, so ignore it in
|
||||
// this nonzero section.
|
||||
if n > 0 {
|
||||
assert_eq!(
|
||||
n.isqrt(),
|
||||
core::num::NonZero::<$T>::new(n)
|
||||
.expect(
|
||||
"Was not able to create a new `NonZero` value from a nonzero number."
|
||||
)
|
||||
.isqrt()
|
||||
.get(),
|
||||
"`{n}.isqrt` should match `NonZero`'s `{n}.isqrt().get()`.",
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
tests!(signed_check: i8 i16 i32 i64 i128);
|
||||
tests!(unsigned_check: u8 u16 u32 u64 u128);
|
@ -27,6 +27,7 @@
|
||||
mod dec2flt;
|
||||
mod flt2dec;
|
||||
mod int_log;
|
||||
mod int_sqrt;
|
||||
mod ops;
|
||||
mod wrapping;
|
||||
|
||||
|
@ -99,7 +99,7 @@ the need to specify them manually.
|
||||
Well known names and values are implicitly added as long as at least one `--check-cfg` argument
|
||||
is present.
|
||||
|
||||
As of `2024-05-06T`, the list of known names is as follows:
|
||||
As of `2024-08-20T`, the list of known names is as follows:
|
||||
|
||||
<!--- See CheckCfg::fill_well_known in compiler/rustc_session/src/config.rs -->
|
||||
|
||||
@ -107,6 +107,7 @@ As of `2024-05-06T`, the list of known names is as follows:
|
||||
- `debug_assertions`
|
||||
- `doc`
|
||||
- `doctest`
|
||||
- `fmt_debug`
|
||||
- `miri`
|
||||
- `overflow_checks`
|
||||
- `panic`
|
||||
|
15
src/doc/unstable-book/src/compiler-flags/fmt-debug.md
Normal file
15
src/doc/unstable-book/src/compiler-flags/fmt-debug.md
Normal file
@ -0,0 +1,15 @@
|
||||
# `fmt-debug`
|
||||
|
||||
The tracking issue for this feature is: [#129709](https://github.com/rust-lang/rust/issues/129709).
|
||||
|
||||
------------------------
|
||||
|
||||
Option `-Z fmt-debug=val` controls verbosity of derived `Debug` implementations
|
||||
and debug formatting in format strings (`{:?}`).
|
||||
|
||||
* `full` — `#[derive(Debug)]` prints types recursively. This is the default behavior.
|
||||
|
||||
* `shallow` — `#[derive(Debug)]` prints only the type name, or name of a variant of a fieldless enums. Details of the `Debug` implementation are not stable and may change in the future. Behavior of custom `fmt::Debug` implementations is not affected.
|
||||
|
||||
* `none` — `#[derive(Debug)]` does not print anything at all. `{:?}` in formatting strings has no effect.
|
||||
This option may reduce size of binaries, and remove occurrences of type names in the binary that are not removed by striping symbols. However, it may also cause `panic!` and `assert!` messages to be incomplete.
|
7
src/doc/unstable-book/src/compiler-flags/lint-llvm-ir.md
Normal file
7
src/doc/unstable-book/src/compiler-flags/lint-llvm-ir.md
Normal file
@ -0,0 +1,7 @@
|
||||
# `lint-llvm-ir`
|
||||
|
||||
---------------------
|
||||
|
||||
This flag will add `LintPass` to the start of the pipeline.
|
||||
You can use it to check for common errors in the LLVM IR generated by `rustc`.
|
||||
You can add `-Cllvm-args=-lint-abort-on-error` to abort the process if errors were found.
|
@ -775,22 +775,47 @@ See the [Clang SafeStack documentation][clang-safestack] for more details.
|
||||
|
||||
# ShadowCallStack
|
||||
|
||||
ShadowCallStack provides backward edge control flow protection by storing a function's return address in a separately allocated 'shadow call stack' and loading the return address from that shadow call stack.
|
||||
|
||||
ShadowCallStack requires a platform ABI which reserves `x18` as the instrumentation makes use of this register.
|
||||
ShadowCallStack provides backward edge control flow protection by storing a function's return address in a separately allocated 'shadow call stack'
|
||||
and loading the return address from that shadow call stack.
|
||||
AArch64 and RISC-V both have a platform register defined in their ABIs, which is `x18` and `x3`/`gp` respectively, that can optionally be reserved for this purpose.
|
||||
Software support from the operating system and runtime may be required depending on the target platform which is detailed in the remaining section.
|
||||
See the [Clang ShadowCallStack documentation][clang-scs] for more details.
|
||||
|
||||
ShadowCallStack can be enabled with `-Zsanitizer=shadow-call-stack` option and is supported on the following targets:
|
||||
|
||||
* `aarch64-linux-android`
|
||||
## AArch64 family
|
||||
|
||||
ShadowCallStack requires the use of the ABI defined platform register, `x18`, which is required for code generation purposes.
|
||||
When `x18` is not reserved, and is instead used as a scratch register subsequently, enabling ShadowCallStack would lead to undefined behaviour
|
||||
due to corruption of return address or invalid memory access when the instrumentation restores return register to the link register `lr` from the
|
||||
already clobbered `x18` register.
|
||||
In other words, code that is calling into or called by functions instrumented with ShadowCallStack must reserve the `x18` register or preserve its value.
|
||||
|
||||
### `aarch64-linux-android` and `aarch64-unknown-fuchsia`/`aarch64-fuchsia`
|
||||
|
||||
This target already reserves the `x18` register.
|
||||
A runtime must be provided by the application or operating system.
|
||||
If `bionic` is used on this target, the software support is provided.
|
||||
Otherwise, a runtime needs to prepare a memory region and points `x18` to the region which serves as the shadow call stack.
|
||||
|
||||
See the [Clang ShadowCallStack documentation][clang-scs] for more details.
|
||||
|
||||
* `aarch64-unknown-none`
|
||||
### `aarch64-unknown-none`
|
||||
|
||||
In addition to support from a runtime by the application or operating system, the `-Zfixed-x18` flag is also mandatory.
|
||||
|
||||
## RISC-V 64 family
|
||||
|
||||
ShadowCallStack uses either the `gp` register for software shadow stack, also known as `x3`, or the `ssp` register if [`Zicfiss`][riscv-zicfiss] extension is available.
|
||||
`gp`/`x3` is currently always reserved and available for ShadowCallStack instrumentation, and `ssp` in case of `Zicfiss` is only accessible through its dedicated shadow stack instructions.
|
||||
|
||||
Support from the runtime and operating system is required when `gp`/`x3` is used for software shadow stack.
|
||||
A runtime must prepare a memory region and point `gp`/`x3` to the region before executing the code.
|
||||
|
||||
The following targets support ShadowCallStack.
|
||||
|
||||
* `riscv64imac-unknown-none-elf`
|
||||
* `riscv64gc-unknown-none-elf`
|
||||
* `riscv64gc-unknown-fuchsia`
|
||||
|
||||
# ThreadSanitizer
|
||||
|
||||
ThreadSanitizer is a data race detection tool. It is supported on the following
|
||||
@ -912,3 +937,4 @@ Sanitizers produce symbolized stacktraces when llvm-symbolizer binary is in `PAT
|
||||
[clang-tsan]: https://clang.llvm.org/docs/ThreadSanitizer.html
|
||||
[linux-kasan]: https://www.kernel.org/doc/html/latest/dev-tools/kasan.html
|
||||
[llvm-memtag]: https://llvm.org/docs/MemTagSanitizer.html
|
||||
[riscv-zicfiss]: https://github.com/riscv/riscv-cfi/blob/3f8e450c481ac303bd5643444f7a89672f24476e/src/cfi_backward.adoc
|
||||
|
@ -341,7 +341,7 @@ fn clean_region_outlives_constraints<'tcx>(
|
||||
.map(|®ion| {
|
||||
let lifetime = early_bound_region_name(region)
|
||||
.inspect(|name| assert!(region_params.contains(name)))
|
||||
.map(|name| Lifetime(name))
|
||||
.map(Lifetime)
|
||||
.unwrap_or(Lifetime::statik());
|
||||
clean::GenericBound::Outlives(lifetime)
|
||||
})
|
||||
|
@ -24,7 +24,7 @@ pub(crate) fn synthesize_blanket_impls(
|
||||
let mut blanket_impls = Vec::new();
|
||||
for trait_def_id in tcx.all_traits() {
|
||||
if !cx.cache.effective_visibilities.is_reachable(tcx, trait_def_id)
|
||||
|| cx.generated_synthetics.get(&(ty.skip_binder(), trait_def_id)).is_some()
|
||||
|| cx.generated_synthetics.contains(&(ty.skip_binder(), trait_def_id))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
@ -54,7 +54,7 @@ pub(crate) fn try_inline(
|
||||
debug!("attrs={attrs:?}");
|
||||
|
||||
let attrs_without_docs = attrs.map(|(attrs, def_id)| {
|
||||
(attrs.into_iter().filter(|a| a.doc_str().is_none()).cloned().collect::<Vec<_>>(), def_id)
|
||||
(attrs.iter().filter(|a| a.doc_str().is_none()).cloned().collect::<Vec<_>>(), def_id)
|
||||
});
|
||||
let attrs_without_docs =
|
||||
attrs_without_docs.as_ref().map(|(attrs, def_id)| (&attrs[..], *def_id));
|
||||
@ -288,10 +288,7 @@ pub(crate) fn build_external_trait(cx: &mut DocContext<'_>, did: DefId) -> clean
|
||||
clean::Trait { def_id: did, generics, items: trait_items, bounds: supertrait_bounds }
|
||||
}
|
||||
|
||||
pub(crate) fn build_function<'tcx>(
|
||||
cx: &mut DocContext<'tcx>,
|
||||
def_id: DefId,
|
||||
) -> Box<clean::Function> {
|
||||
pub(crate) fn build_function(cx: &mut DocContext<'_>, def_id: DefId) -> Box<clean::Function> {
|
||||
let sig = cx.tcx.fn_sig(def_id).instantiate_identity();
|
||||
// The generics need to be cleaned before the signature.
|
||||
let mut generics =
|
||||
@ -425,7 +422,7 @@ pub(crate) fn merge_attrs(
|
||||
both.cfg(cx.tcx, &cx.cache.hidden_cfg),
|
||||
)
|
||||
} else {
|
||||
(Attributes::from_ast(&old_attrs), old_attrs.cfg(cx.tcx, &cx.cache.hidden_cfg))
|
||||
(Attributes::from_ast(old_attrs), old_attrs.cfg(cx.tcx, &cx.cache.hidden_cfg))
|
||||
}
|
||||
}
|
||||
|
||||
@ -791,16 +788,15 @@ fn build_macro(
|
||||
/// implementation for `AssociatedType`
|
||||
fn filter_non_trait_generics(trait_did: DefId, mut g: clean::Generics) -> clean::Generics {
|
||||
for pred in &mut g.where_predicates {
|
||||
match *pred {
|
||||
clean::WherePredicate::BoundPredicate { ty: clean::SelfTy, ref mut bounds, .. } => {
|
||||
bounds.retain(|bound| match bound {
|
||||
clean::GenericBound::TraitBound(clean::PolyTrait { trait_, .. }, _) => {
|
||||
trait_.def_id() != trait_did
|
||||
}
|
||||
_ => true,
|
||||
});
|
||||
}
|
||||
_ => {}
|
||||
if let clean::WherePredicate::BoundPredicate { ty: clean::SelfTy, ref mut bounds, .. } =
|
||||
*pred
|
||||
{
|
||||
bounds.retain(|bound| match bound {
|
||||
clean::GenericBound::TraitBound(clean::PolyTrait { trait_, .. }, _) => {
|
||||
trait_.def_id() != trait_did
|
||||
}
|
||||
_ => true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -266,7 +266,7 @@ fn clean_poly_trait_ref_with_constraints<'tcx>(
|
||||
)
|
||||
}
|
||||
|
||||
fn clean_lifetime<'tcx>(lifetime: &hir::Lifetime, cx: &mut DocContext<'tcx>) -> Lifetime {
|
||||
fn clean_lifetime(lifetime: &hir::Lifetime, cx: &mut DocContext<'_>) -> Lifetime {
|
||||
if let Some(
|
||||
rbv::ResolvedArg::EarlyBound(did)
|
||||
| rbv::ResolvedArg::LateBound(_, _, did)
|
||||
@ -274,7 +274,7 @@ fn clean_lifetime<'tcx>(lifetime: &hir::Lifetime, cx: &mut DocContext<'tcx>) ->
|
||||
) = cx.tcx.named_bound_var(lifetime.hir_id)
|
||||
&& let Some(lt) = cx.args.get(&did.to_def_id()).and_then(|arg| arg.as_lt())
|
||||
{
|
||||
return lt.clone();
|
||||
return *lt;
|
||||
}
|
||||
Lifetime(lifetime.ident.name)
|
||||
}
|
||||
@ -285,7 +285,7 @@ pub(crate) fn clean_const<'tcx>(
|
||||
) -> ConstantKind {
|
||||
match &constant.kind {
|
||||
hir::ConstArgKind::Path(qpath) => {
|
||||
ConstantKind::Path { path: qpath_to_string(&qpath).into() }
|
||||
ConstantKind::Path { path: qpath_to_string(qpath).into() }
|
||||
}
|
||||
hir::ConstArgKind::Anon(anon) => ConstantKind::Anonymous { body: anon.body },
|
||||
}
|
||||
@ -299,7 +299,7 @@ pub(crate) fn clean_middle_const<'tcx>(
|
||||
ConstantKind::TyConst { expr: constant.skip_binder().to_string().into() }
|
||||
}
|
||||
|
||||
pub(crate) fn clean_middle_region<'tcx>(region: ty::Region<'tcx>) -> Option<Lifetime> {
|
||||
pub(crate) fn clean_middle_region(region: ty::Region<'_>) -> Option<Lifetime> {
|
||||
match *region {
|
||||
ty::ReStatic => Some(Lifetime::statik()),
|
||||
_ if !region.has_name() => None,
|
||||
@ -389,8 +389,8 @@ fn clean_poly_trait_predicate<'tcx>(
|
||||
})
|
||||
}
|
||||
|
||||
fn clean_region_outlives_predicate<'tcx>(
|
||||
pred: ty::RegionOutlivesPredicate<'tcx>,
|
||||
fn clean_region_outlives_predicate(
|
||||
pred: ty::RegionOutlivesPredicate<'_>,
|
||||
) -> Option<WherePredicate> {
|
||||
let ty::OutlivesPredicate(a, b) = pred;
|
||||
|
||||
@ -513,10 +513,10 @@ fn projection_to_path_segment<'tcx>(
|
||||
}
|
||||
}
|
||||
|
||||
fn clean_generic_param_def<'tcx>(
|
||||
fn clean_generic_param_def(
|
||||
def: &ty::GenericParamDef,
|
||||
defaults: ParamDefaults,
|
||||
cx: &mut DocContext<'tcx>,
|
||||
cx: &mut DocContext<'_>,
|
||||
) -> GenericParamDef {
|
||||
let (name, kind) = match def.kind {
|
||||
ty::GenericParamDefKind::Lifetime => {
|
||||
@ -1303,10 +1303,7 @@ pub(crate) fn clean_impl_item<'tcx>(
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn clean_middle_assoc_item<'tcx>(
|
||||
assoc_item: &ty::AssocItem,
|
||||
cx: &mut DocContext<'tcx>,
|
||||
) -> Item {
|
||||
pub(crate) fn clean_middle_assoc_item(assoc_item: &ty::AssocItem, cx: &mut DocContext<'_>) -> Item {
|
||||
let tcx = cx.tcx;
|
||||
let kind = match assoc_item.kind {
|
||||
ty::AssocKind::Const => {
|
||||
@ -1459,7 +1456,7 @@ fn param_eq_arg(param: &GenericParamDef, arg: &GenericArg) -> bool {
|
||||
// which only has one associated type, which is not a GAT, so whatever.
|
||||
}
|
||||
}
|
||||
bounds.extend(mem::replace(pred_bounds, Vec::new()));
|
||||
bounds.extend(mem::take(pred_bounds));
|
||||
false
|
||||
}
|
||||
_ => true,
|
||||
@ -1661,7 +1658,7 @@ fn clean_qpath<'tcx>(hir_ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> Type
|
||||
expanded
|
||||
} else {
|
||||
// First we check if it's a private re-export.
|
||||
let path = if let Some(path) = first_non_private(cx, hir_id, &path) {
|
||||
let path = if let Some(path) = first_non_private(cx, hir_id, path) {
|
||||
path
|
||||
} else {
|
||||
clean_path(path, cx)
|
||||
@ -1796,7 +1793,7 @@ fn maybe_expand_private_type_alias<'tcx>(
|
||||
}
|
||||
|
||||
Some(cx.enter_alias(args, def_id.to_def_id(), |cx| {
|
||||
cx.with_param_env(def_id.to_def_id(), |cx| clean_ty(&ty, cx))
|
||||
cx.with_param_env(def_id.to_def_id(), |cx| clean_ty(ty, cx))
|
||||
}))
|
||||
}
|
||||
|
||||
@ -1806,8 +1803,8 @@ pub(crate) fn clean_ty<'tcx>(ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> T
|
||||
match ty.kind {
|
||||
TyKind::Never => Primitive(PrimitiveType::Never),
|
||||
TyKind::Ptr(ref m) => RawPointer(m.mutbl, Box::new(clean_ty(m.ty, cx))),
|
||||
TyKind::Ref(ref l, ref m) => {
|
||||
let lifetime = if l.is_anonymous() { None } else { Some(clean_lifetime(*l, cx)) };
|
||||
TyKind::Ref(l, ref m) => {
|
||||
let lifetime = if l.is_anonymous() { None } else { Some(clean_lifetime(l, cx)) };
|
||||
BorrowedRef { lifetime, mutability: m.mutbl, type_: Box::new(clean_ty(m.ty, cx)) }
|
||||
}
|
||||
TyKind::Slice(ty) => Slice(Box::new(clean_ty(ty, cx))),
|
||||
@ -1843,17 +1840,17 @@ pub(crate) fn clean_ty<'tcx>(ty: &hir::Ty<'tcx>, cx: &mut DocContext<'tcx>) -> T
|
||||
TyKind::Tup(tys) => Tuple(tys.iter().map(|ty| clean_ty(ty, cx)).collect()),
|
||||
TyKind::OpaqueDef(item_id, _, _) => {
|
||||
let item = cx.tcx.hir().item(item_id);
|
||||
if let hir::ItemKind::OpaqueTy(ref ty) = item.kind {
|
||||
if let hir::ItemKind::OpaqueTy(ty) = item.kind {
|
||||
ImplTrait(ty.bounds.iter().filter_map(|x| clean_generic_bound(x, cx)).collect())
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
}
|
||||
TyKind::Path(_) => clean_qpath(ty, cx),
|
||||
TyKind::TraitObject(bounds, ref lifetime, _) => {
|
||||
TyKind::TraitObject(bounds, lifetime, _) => {
|
||||
let bounds = bounds.iter().map(|(bound, _)| clean_poly_trait_ref(bound, cx)).collect();
|
||||
let lifetime =
|
||||
if !lifetime.is_elided() { Some(clean_lifetime(*lifetime, cx)) } else { None };
|
||||
if !lifetime.is_elided() { Some(clean_lifetime(lifetime, cx)) } else { None };
|
||||
DynTrait(bounds, lifetime)
|
||||
}
|
||||
TyKind::BareFn(barefn) => BareFunction(Box::new(clean_bare_fn_ty(barefn, cx))),
|
||||
@ -2355,7 +2352,7 @@ pub(crate) fn clean_field<'tcx>(field: &hir::FieldDef<'tcx>, cx: &mut DocContext
|
||||
clean_field_with_def_id(field.def_id.to_def_id(), field.ident.name, clean_ty(field.ty, cx), cx)
|
||||
}
|
||||
|
||||
pub(crate) fn clean_middle_field<'tcx>(field: &ty::FieldDef, cx: &mut DocContext<'tcx>) -> Item {
|
||||
pub(crate) fn clean_middle_field(field: &ty::FieldDef, cx: &mut DocContext<'_>) -> Item {
|
||||
clean_field_with_def_id(
|
||||
field.did,
|
||||
field.name,
|
||||
@ -2378,7 +2375,7 @@ pub(crate) fn clean_field_with_def_id(
|
||||
Item::from_def_id_and_parts(def_id, Some(name), StructFieldItem(ty), cx)
|
||||
}
|
||||
|
||||
pub(crate) fn clean_variant_def<'tcx>(variant: &ty::VariantDef, cx: &mut DocContext<'tcx>) -> Item {
|
||||
pub(crate) fn clean_variant_def(variant: &ty::VariantDef, cx: &mut DocContext<'_>) -> Item {
|
||||
let discriminant = match variant.discr {
|
||||
ty::VariantDiscr::Explicit(def_id) => Some(Discriminant { expr: None, value: def_id }),
|
||||
ty::VariantDiscr::Relative(_) => None,
|
||||
@ -2526,7 +2523,7 @@ fn clean_generic_args<'tcx>(
|
||||
.filter_map(|arg| {
|
||||
Some(match arg {
|
||||
hir::GenericArg::Lifetime(lt) if !lt.is_anonymous() => {
|
||||
GenericArg::Lifetime(clean_lifetime(*lt, cx))
|
||||
GenericArg::Lifetime(clean_lifetime(lt, cx))
|
||||
}
|
||||
hir::GenericArg::Lifetime(_) => GenericArg::Lifetime(Lifetime::elided()),
|
||||
hir::GenericArg::Type(ty) => GenericArg::Type(clean_ty(ty, cx)),
|
||||
@ -2579,11 +2576,11 @@ fn clean_bare_fn_ty<'tcx>(
|
||||
BareFunctionDecl { safety: bare_fn.safety, abi: bare_fn.abi, decl, generic_params }
|
||||
}
|
||||
|
||||
pub(crate) fn reexport_chain<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
pub(crate) fn reexport_chain(
|
||||
tcx: TyCtxt<'_>,
|
||||
import_def_id: LocalDefId,
|
||||
target_def_id: DefId,
|
||||
) -> &'tcx [Reexport] {
|
||||
) -> &[Reexport] {
|
||||
for child in tcx.module_children_local(tcx.local_parent(import_def_id)) {
|
||||
if child.res.opt_def_id() == Some(target_def_id)
|
||||
&& child.reexport_chain.first().and_then(|r| r.id()) == Some(import_def_id.to_def_id())
|
||||
@ -2803,7 +2800,7 @@ fn clean_maybe_renamed_item<'tcx>(
|
||||
fields: variant_data.fields().iter().map(|x| clean_field(x, cx)).collect(),
|
||||
}),
|
||||
ItemKind::Impl(impl_) => return clean_impl(impl_, item.owner_id.def_id, cx),
|
||||
ItemKind::Macro(ref macro_def, MacroKind::Bang) => {
|
||||
ItemKind::Macro(macro_def, MacroKind::Bang) => {
|
||||
let ty_vis = cx.tcx.visibility(def_id);
|
||||
MacroItem(Macro {
|
||||
// FIXME this shouldn't be false
|
||||
@ -3134,9 +3131,7 @@ fn clean_assoc_item_constraint<'tcx>(
|
||||
}
|
||||
}
|
||||
|
||||
fn clean_bound_vars<'tcx>(
|
||||
bound_vars: &'tcx ty::List<ty::BoundVariableKind>,
|
||||
) -> Vec<GenericParamDef> {
|
||||
fn clean_bound_vars(bound_vars: &ty::List<ty::BoundVariableKind>) -> Vec<GenericParamDef> {
|
||||
bound_vars
|
||||
.into_iter()
|
||||
.filter_map(|var| match var {
|
||||
|
@ -70,7 +70,7 @@ pub(crate) fn where_clauses(cx: &DocContext<'_>, clauses: ThinVec<WP>) -> ThinVe
|
||||
|
||||
pub(crate) fn merge_bounds(
|
||||
cx: &clean::DocContext<'_>,
|
||||
bounds: &mut Vec<clean::GenericBound>,
|
||||
bounds: &mut [clean::GenericBound],
|
||||
trait_did: DefId,
|
||||
assoc: clean::PathSegment,
|
||||
rhs: &clean::Term,
|
||||
|
@ -368,11 +368,11 @@ fn is_field_vis_inherited(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
|
||||
}
|
||||
|
||||
impl Item {
|
||||
pub(crate) fn stability<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Option<Stability> {
|
||||
pub(crate) fn stability(&self, tcx: TyCtxt<'_>) -> Option<Stability> {
|
||||
self.def_id().and_then(|did| tcx.lookup_stability(did))
|
||||
}
|
||||
|
||||
pub(crate) fn const_stability<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Option<ConstStability> {
|
||||
pub(crate) fn const_stability(&self, tcx: TyCtxt<'_>) -> Option<ConstStability> {
|
||||
self.def_id().and_then(|did| tcx.lookup_const_stability(did))
|
||||
}
|
||||
|
||||
@ -945,9 +945,9 @@ pub(crate) trait AttributesExt {
|
||||
where
|
||||
Self: 'a;
|
||||
|
||||
fn lists<'a>(&'a self, name: Symbol) -> Self::AttributeIterator<'a>;
|
||||
fn lists(&self, name: Symbol) -> Self::AttributeIterator<'_>;
|
||||
|
||||
fn iter<'a>(&'a self) -> Self::Attributes<'a>;
|
||||
fn iter(&self) -> Self::Attributes<'_>;
|
||||
|
||||
fn cfg(&self, tcx: TyCtxt<'_>, hidden_cfg: &FxHashSet<Cfg>) -> Option<Arc<Cfg>> {
|
||||
let sess = tcx.sess;
|
||||
@ -1043,15 +1043,15 @@ impl AttributesExt for [ast::Attribute] {
|
||||
type AttributeIterator<'a> = impl Iterator<Item = ast::NestedMetaItem> + 'a;
|
||||
type Attributes<'a> = impl Iterator<Item = &'a ast::Attribute> + 'a;
|
||||
|
||||
fn lists<'a>(&'a self, name: Symbol) -> Self::AttributeIterator<'a> {
|
||||
fn lists(&self, name: Symbol) -> Self::AttributeIterator<'_> {
|
||||
self.iter()
|
||||
.filter(move |attr| attr.has_name(name))
|
||||
.filter_map(ast::Attribute::meta_item_list)
|
||||
.flatten()
|
||||
}
|
||||
|
||||
fn iter<'a>(&'a self) -> Self::Attributes<'a> {
|
||||
self.into_iter()
|
||||
fn iter(&self) -> Self::Attributes<'_> {
|
||||
self.iter()
|
||||
}
|
||||
}
|
||||
|
||||
@ -1061,15 +1061,15 @@ impl AttributesExt for [(Cow<'_, ast::Attribute>, Option<DefId>)] {
|
||||
type Attributes<'a> = impl Iterator<Item = &'a ast::Attribute> + 'a
|
||||
where Self: 'a;
|
||||
|
||||
fn lists<'a>(&'a self, name: Symbol) -> Self::AttributeIterator<'a> {
|
||||
fn lists(&self, name: Symbol) -> Self::AttributeIterator<'_> {
|
||||
AttributesExt::iter(self)
|
||||
.filter(move |attr| attr.has_name(name))
|
||||
.filter_map(ast::Attribute::meta_item_list)
|
||||
.flatten()
|
||||
}
|
||||
|
||||
fn iter<'a>(&'a self) -> Self::Attributes<'a> {
|
||||
self.into_iter().map(move |(attr, _)| match attr {
|
||||
fn iter(&self) -> Self::Attributes<'_> {
|
||||
self.iter().map(move |(attr, _)| match attr {
|
||||
Cow::Borrowed(attr) => *attr,
|
||||
Cow::Owned(attr) => attr,
|
||||
})
|
||||
@ -1389,7 +1389,7 @@ pub(crate) struct FnDecl {
|
||||
|
||||
impl FnDecl {
|
||||
pub(crate) fn receiver_type(&self) -> Option<&Type> {
|
||||
self.inputs.values.get(0).and_then(|v| v.to_receiver())
|
||||
self.inputs.values.first().and_then(|v| v.to_receiver())
|
||||
}
|
||||
}
|
||||
|
||||
@ -1502,7 +1502,7 @@ impl Type {
|
||||
pub(crate) fn without_borrowed_ref(&self) -> &Type {
|
||||
let mut result = self;
|
||||
while let Type::BorrowedRef { type_, .. } = result {
|
||||
result = &*type_;
|
||||
result = type_;
|
||||
}
|
||||
result
|
||||
}
|
||||
@ -1631,10 +1631,7 @@ pub(crate) fn is_assoc_ty(&self) -> bool {
|
||||
}
|
||||
|
||||
pub(crate) fn is_self_type(&self) -> bool {
|
||||
match *self {
|
||||
SelfTy => true,
|
||||
_ => false,
|
||||
}
|
||||
matches!(*self, Type::SelfTy)
|
||||
}
|
||||
|
||||
pub(crate) fn generic_args(&self) -> Option<&GenericArgs> {
|
||||
@ -1673,7 +1670,7 @@ pub(crate) fn projection(&self) -> Option<(&Type, DefId, PathSegment)> {
|
||||
pub(crate) fn def_id(&self, cache: &Cache) -> Option<DefId> {
|
||||
let t: PrimitiveType = match *self {
|
||||
Type::Path { ref path } => return Some(path.def_id()),
|
||||
DynTrait(ref bounds, _) => return bounds.get(0).map(|b| b.trait_.def_id()),
|
||||
DynTrait(ref bounds, _) => return bounds.first().map(|b| b.trait_.def_id()),
|
||||
Primitive(p) => return cache.primitive_locations.get(&p).cloned(),
|
||||
BorrowedRef { type_: box Generic(..), .. } => PrimitiveType::Reference,
|
||||
BorrowedRef { ref type_, .. } => return type_.def_id(cache),
|
||||
|
@ -321,9 +321,9 @@ pub(crate) fn name_from_pat(p: &hir::Pat<'_>) -> Symbol {
|
||||
"({})",
|
||||
elts.iter().map(|p| name_from_pat(p).to_string()).collect::<Vec<String>>().join(", ")
|
||||
),
|
||||
PatKind::Box(p) => return name_from_pat(&*p),
|
||||
PatKind::Deref(p) => format!("deref!({})", name_from_pat(&*p)),
|
||||
PatKind::Ref(p, _) => return name_from_pat(&*p),
|
||||
PatKind::Box(p) => return name_from_pat(p),
|
||||
PatKind::Deref(p) => format!("deref!({})", name_from_pat(p)),
|
||||
PatKind::Ref(p, _) => return name_from_pat(p),
|
||||
PatKind::Lit(..) => {
|
||||
warn!(
|
||||
"tried to get argument name from PatKind::Lit, which is silly in function arguments"
|
||||
@ -333,7 +333,7 @@ pub(crate) fn name_from_pat(p: &hir::Pat<'_>) -> Symbol {
|
||||
PatKind::Range(..) => return kw::Underscore,
|
||||
PatKind::Slice(begin, ref mid, end) => {
|
||||
let begin = begin.iter().map(|p| name_from_pat(p).to_string());
|
||||
let mid = mid.as_ref().map(|p| format!("..{}", name_from_pat(&**p))).into_iter();
|
||||
let mid = mid.as_ref().map(|p| format!("..{}", name_from_pat(p))).into_iter();
|
||||
let end = end.iter().map(|p| name_from_pat(p).to_string());
|
||||
format!("[{}]", begin.chain(mid).chain(end).collect::<Vec<_>>().join(", "))
|
||||
}
|
||||
@ -344,7 +344,7 @@ pub(crate) fn print_const(cx: &DocContext<'_>, n: ty::Const<'_>) -> String {
|
||||
match n.kind() {
|
||||
ty::ConstKind::Unevaluated(ty::UnevaluatedConst { def, args: _ }) => {
|
||||
let s = if let Some(def) = def.as_local() {
|
||||
rendered_const(cx.tcx, &cx.tcx.hir().body_owned_by(def), def)
|
||||
rendered_const(cx.tcx, cx.tcx.hir().body_owned_by(def), def)
|
||||
} else {
|
||||
inline::print_inlined_const(cx.tcx, def)
|
||||
};
|
||||
@ -383,7 +383,7 @@ pub(crate) fn print_evaluated_const(
|
||||
|
||||
fn format_integer_with_underscore_sep(num: &str) -> String {
|
||||
let num_chars: Vec<_> = num.chars().collect();
|
||||
let mut num_start_index = if num_chars.get(0) == Some(&'-') { 1 } else { 0 };
|
||||
let mut num_start_index = if num_chars.first() == Some(&'-') { 1 } else { 0 };
|
||||
let chunk_size = match num[num_start_index..].as_bytes() {
|
||||
[b'0', b'b' | b'x', ..] => {
|
||||
num_start_index += 2;
|
||||
|
@ -360,7 +360,7 @@ pub(crate) fn from_matches(
|
||||
return None;
|
||||
}
|
||||
|
||||
if rustc_driver::describe_flag_categories(early_dcx, &matches) {
|
||||
if rustc_driver::describe_flag_categories(early_dcx, matches) {
|
||||
return None;
|
||||
}
|
||||
|
||||
@ -374,7 +374,7 @@ pub(crate) fn from_matches(
|
||||
let codegen_options = CodegenOptions::build(early_dcx, matches);
|
||||
let unstable_opts = UnstableOptions::build(early_dcx, matches);
|
||||
|
||||
let remap_path_prefix = match parse_remap_path_prefix(&matches) {
|
||||
let remap_path_prefix = match parse_remap_path_prefix(matches) {
|
||||
Ok(prefix_mappings) => prefix_mappings,
|
||||
Err(err) => {
|
||||
early_dcx.early_fatal(err);
|
||||
@ -486,7 +486,7 @@ fn println_condition(condition: Condition) {
|
||||
_ => dcx.fatal("too many file operands"),
|
||||
}
|
||||
};
|
||||
let input = make_input(early_dcx, &input);
|
||||
let input = make_input(early_dcx, input);
|
||||
|
||||
let externs = parse_externs(early_dcx, matches, &unstable_opts);
|
||||
let extern_html_root_urls = match parse_extern_html_roots(matches) {
|
||||
|
@ -288,7 +288,7 @@ pub(crate) fn create_config(
|
||||
let hir = tcx.hir();
|
||||
let body = hir.body_owned_by(def_id);
|
||||
debug!("visiting body for {def_id:?}");
|
||||
EmitIgnoredResolutionErrors::new(tcx).visit_body(&body);
|
||||
EmitIgnoredResolutionErrors::new(tcx).visit_body(body);
|
||||
(rustc_interface::DEFAULT_QUERY_PROVIDERS.typeck)(tcx, def_id)
|
||||
};
|
||||
}),
|
||||
|
@ -272,7 +272,7 @@ pub(crate) fn run_tests(
|
||||
let mut tests_runner = runner::DocTestRunner::new();
|
||||
|
||||
let rustdoc_test_options = IndividualTestOptions::new(
|
||||
&rustdoc_options,
|
||||
rustdoc_options,
|
||||
&Some(format!("merged_doctest_{edition}")),
|
||||
PathBuf::from(format!("doctest_{edition}.rs")),
|
||||
);
|
||||
@ -307,7 +307,7 @@ pub(crate) fn run_tests(
|
||||
doctest,
|
||||
scraped_test,
|
||||
opts.clone(),
|
||||
Arc::clone(&rustdoc_options),
|
||||
Arc::clone(rustdoc_options),
|
||||
unused_extern_reports.clone(),
|
||||
));
|
||||
}
|
||||
@ -316,7 +316,7 @@ pub(crate) fn run_tests(
|
||||
// We need to call `test_main` even if there is no doctest to run to get the output
|
||||
// `running 0 tests...`.
|
||||
if ran_edition_tests == 0 || !standalone_tests.is_empty() {
|
||||
standalone_tests.sort_by(|a, b| a.desc.name.as_slice().cmp(&b.desc.name.as_slice()));
|
||||
standalone_tests.sort_by(|a, b| a.desc.name.as_slice().cmp(b.desc.name.as_slice()));
|
||||
test::test_main(&test_args, standalone_tests, None);
|
||||
}
|
||||
if nb_errors != 0 {
|
||||
@ -421,7 +421,7 @@ fn add_exe_suffix(input: String, target: &TargetTriple) -> String {
|
||||
}
|
||||
|
||||
fn wrapped_rustc_command(rustc_wrappers: &[PathBuf], rustc_binary: &Path) -> Command {
|
||||
let mut args = rustc_wrappers.iter().map(PathBuf::as_path).chain([rustc_binary].into_iter());
|
||||
let mut args = rustc_wrappers.iter().map(PathBuf::as_path).chain([rustc_binary]);
|
||||
|
||||
let exe = args.next().expect("unable to create rustc command");
|
||||
let mut command = Command::new(exe);
|
||||
@ -452,7 +452,7 @@ pub(crate) struct RunnableDocTest {
|
||||
|
||||
impl RunnableDocTest {
|
||||
fn path_for_merged_doctest(&self) -> PathBuf {
|
||||
self.test_opts.outdir.path().join(&format!("doctest_{}.rs", self.edition))
|
||||
self.test_opts.outdir.path().join(format!("doctest_{}.rs", self.edition))
|
||||
}
|
||||
}
|
||||
|
||||
@ -477,13 +477,13 @@ fn run_test(
|
||||
.unwrap_or_else(|| rustc_interface::util::rustc_path().expect("found rustc"));
|
||||
let mut compiler = wrapped_rustc_command(&rustdoc_options.test_builder_wrappers, rustc_binary);
|
||||
|
||||
compiler.arg(&format!("@{}", doctest.global_opts.args_file.display()));
|
||||
compiler.arg(format!("@{}", doctest.global_opts.args_file.display()));
|
||||
|
||||
if let Some(sysroot) = &rustdoc_options.maybe_sysroot {
|
||||
compiler.arg(format!("--sysroot={}", sysroot.display()));
|
||||
}
|
||||
|
||||
compiler.arg("--edition").arg(&doctest.edition.to_string());
|
||||
compiler.arg("--edition").arg(doctest.edition.to_string());
|
||||
if !doctest.is_multiple_tests {
|
||||
// Setting these environment variables is unneeded if this is a merged doctest.
|
||||
compiler.env("UNSTABLE_RUSTDOC_TEST_PATH", &doctest.test_opts.path);
|
||||
@ -692,7 +692,7 @@ impl IndividualTestOptions {
|
||||
fn new(options: &RustdocOptions, test_id: &Option<String>, test_path: PathBuf) -> Self {
|
||||
let outdir = if let Some(ref path) = options.persist_doctests {
|
||||
let mut path = path.clone();
|
||||
path.push(&test_id.as_deref().unwrap_or_else(|| "<doctest>"));
|
||||
path.push(&test_id.as_deref().unwrap_or("<doctest>"));
|
||||
|
||||
if let Err(err) = std::fs::create_dir_all(&path) {
|
||||
eprintln!("Couldn't create directory for doctest executables: {err}");
|
||||
|
@ -311,7 +311,7 @@ fn check_item(item: &ast::Item, info: &mut ParseSourceInfo, crate_name: &Option<
|
||||
}
|
||||
ast::ItemKind::ExternCrate(original) => {
|
||||
if !info.found_extern_crate
|
||||
&& let Some(ref crate_name) = crate_name
|
||||
&& let Some(crate_name) = crate_name
|
||||
{
|
||||
info.found_extern_crate = match original {
|
||||
Some(name) => name.as_str() == *crate_name,
|
||||
|
@ -73,7 +73,7 @@ pub(crate) fn test(options: Options) -> Result<(), String> {
|
||||
use rustc_session::config::Input;
|
||||
let input_str = match &options.input {
|
||||
Input::File(path) => {
|
||||
read_to_string(&path).map_err(|err| format!("{}: {err}", path.display()))?
|
||||
read_to_string(path).map_err(|err| format!("{}: {err}", path.display()))?
|
||||
}
|
||||
Input::Str { name: _, input } => input.clone(),
|
||||
};
|
||||
|
@ -98,8 +98,10 @@ pub(crate) fn run_merged_tests(
|
||||
|
||||
code.push_str("extern crate test;\n");
|
||||
|
||||
let test_args =
|
||||
test_args.iter().map(|arg| format!("{arg:?}.to_string(),")).collect::<String>();
|
||||
let test_args = test_args.iter().fold(String::new(), |mut x, arg| {
|
||||
write!(x, "{arg:?}.to_string(),").unwrap();
|
||||
x
|
||||
});
|
||||
write!(
|
||||
code,
|
||||
"\
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user