Auto merge of #108464 - compiler-errors:rollup-trl1g70, r=compiler-errors

Rollup of 7 pull requests

Successful merges:

 - #105736 (Test that the compiler/library builds with validate-mir)
 - #107291 ([breaking change] Remove a rustdoc back compat warning)
 - #107675 (Implement -Zlink-directives=yes/no)
 - #107848 (Split `x setup` sub-actions to CLI arguments)
 - #107911 (Add check for invalid #[macro_export] arguments)
 - #108229 ([107049] Recognise top level keys in config.toml.example)
 - #108333 (Make object bound candidates sound in the new trait solver)

Failed merges:

 - #108337 (hir-analysis: make a helpful note)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2023-02-25 23:15:10 +00:00
commit f37f9f6512
33 changed files with 627 additions and 65 deletions

View File

@ -11,10 +11,6 @@ interface_mixed_bin_crate =
interface_mixed_proc_macro_crate = interface_mixed_proc_macro_crate =
cannot mix `proc-macro` crate type with others cannot mix `proc-macro` crate type with others
interface_proc_macro_doc_without_arg =
Trying to document proc macro crate without passing '--crate-type proc-macro to rustdoc
.warn = The generated documentation may be incorrect
interface_error_writing_dependencies = interface_error_writing_dependencies =
error writing dependencies to `{$path}`: {$error} error writing dependencies to `{$path}`: {$error}

View File

@ -31,10 +31,6 @@ pub struct MixedBinCrate;
#[diag(interface_mixed_proc_macro_crate)] #[diag(interface_mixed_proc_macro_crate)]
pub struct MixedProcMacroCrate; pub struct MixedProcMacroCrate;
#[derive(Diagnostic)]
#[diag(interface_proc_macro_doc_without_arg)]
pub struct ProcMacroDocWithoutArg;
#[derive(Diagnostic)] #[derive(Diagnostic)]
#[diag(interface_error_writing_dependencies)] #[diag(interface_error_writing_dependencies)]
pub struct ErrorWritingDependencies<'a> { pub struct ErrorWritingDependencies<'a> {

View File

@ -287,15 +287,6 @@ fn configure_and_expand(mut krate: ast::Crate, resolver: &mut Resolver<'_, '_>)
sess.emit_warning(errors::ProcMacroCratePanicAbort); sess.emit_warning(errors::ProcMacroCratePanicAbort);
} }
// For backwards compatibility, we don't try to run proc macro injection
// if rustdoc is run on a proc macro crate without '--crate-type proc-macro' being
// specified. This should only affect users who manually invoke 'rustdoc', as
// 'cargo doc' will automatically pass the proper '--crate-type' flags.
// However, we do emit a warning, to let such users know that they should
// start passing '--crate-type proc-macro'
if has_proc_macro_decls && sess.opts.actually_rustdoc && !is_proc_macro_crate {
sess.emit_warning(errors::ProcMacroDocWithoutArg);
} else {
krate = sess.time("maybe_create_a_macro_crate", || { krate = sess.time("maybe_create_a_macro_crate", || {
let is_test_crate = sess.opts.test; let is_test_crate = sess.opts.test;
rustc_builtin_macros::proc_macro_harness::inject( rustc_builtin_macros::proc_macro_harness::inject(
@ -308,7 +299,6 @@ fn configure_and_expand(mut krate: ast::Crate, resolver: &mut Resolver<'_, '_>)
sess.diagnostic(), sess.diagnostic(),
) )
}); });
}
// Done with macro expansion! // Done with macro expansion!

View File

@ -756,6 +756,7 @@ fn test_unstable_options_tracking_hash() {
tracked!(instrument_coverage, Some(InstrumentCoverage::All)); tracked!(instrument_coverage, Some(InstrumentCoverage::All));
tracked!(instrument_mcount, true); tracked!(instrument_mcount, true);
tracked!(instrument_xray, Some(InstrumentXRay::default())); tracked!(instrument_xray, Some(InstrumentXRay::default()));
tracked!(link_directives, false);
tracked!(link_only, true); tracked!(link_only, true);
tracked!(llvm_plugins, vec![String::from("plugin_name")]); tracked!(llvm_plugins, vec![String::from("plugin_name")]);
tracked!(location_detail, LocationDetail { file: true, line: false, column: false }); tracked!(location_detail, LocationDetail { file: true, line: false, column: false });

View File

@ -4103,3 +4103,33 @@ declare_lint! {
}; };
report_in_external_macro report_in_external_macro
} }
declare_lint! {
/// The `invalid_macro_export_arguments` lint detects cases where `#[macro_export]` is being used with invalid arguments.
///
/// ### Example
///
/// ```rust,compile_fail
/// #![deny(invalid_macro_export_arguments)]
///
/// #[macro_export(invalid_parameter)]
/// macro_rules! myMacro {
/// () => {
/// // [...]
/// }
/// }
///
/// #[macro_export(too, many, items)]
/// ```
///
/// {{produces}}
///
/// ### Explanation
///
/// The only valid argument is `#[macro_export(local_inner_macros)]` or no argument (`#[macro_export]`).
/// You can't have multiple arguments in a `#[macro_export(..)]`, or mention arguments other than `local_inner_macros`.
///
pub INVALID_MACRO_EXPORT_ARGUMENTS,
Warn,
"\"invalid_parameter\" isn't a valid argument for `#[macro_export]`",
}

View File

@ -103,8 +103,13 @@ impl<'tcx> Collector<'tcx> {
} }
// Process all of the #[link(..)]-style arguments // Process all of the #[link(..)]-style arguments
let sess = &self.tcx.sess; let sess = self.tcx.sess;
let features = self.tcx.features(); let features = self.tcx.features();
if !sess.opts.unstable_opts.link_directives {
return;
}
for m in self.tcx.hir().attrs(it.hir_id()).iter().filter(|a| a.has_name(sym::link)) { for m in self.tcx.hir().attrs(it.hir_id()).iter().filter(|a| a.has_name(sym::link)) {
let Some(items) = m.meta_item_list() else { let Some(items) = m.meta_item_list() else {
continue; continue;

View File

@ -745,3 +745,7 @@ passes_proc_macro_invalid_abi = proc macro functions may not be `extern "{$abi}"
passes_proc_macro_unsafe = proc macro functions may not be `unsafe` passes_proc_macro_unsafe = proc macro functions may not be `unsafe`
passes_skipping_const_checks = skipping const checks passes_skipping_const_checks = skipping const checks
passes_invalid_macro_export_arguments = `{$name}` isn't a valid `#[macro_export]` argument
passes_invalid_macro_export_arguments_too_many_items = `#[macro_export]` can only take 1 or 0 arguments

View File

@ -23,7 +23,8 @@ use rustc_middle::ty::fast_reject::{DeepRejectCtxt, TreatParams};
use rustc_middle::ty::query::Providers; use rustc_middle::ty::query::Providers;
use rustc_middle::ty::{ParamEnv, TyCtxt}; use rustc_middle::ty::{ParamEnv, TyCtxt};
use rustc_session::lint::builtin::{ use rustc_session::lint::builtin::{
CONFLICTING_REPR_HINTS, INVALID_DOC_ATTRIBUTES, UNUSED_ATTRIBUTES, CONFLICTING_REPR_HINTS, INVALID_DOC_ATTRIBUTES, INVALID_MACRO_EXPORT_ARGUMENTS,
UNUSED_ATTRIBUTES,
}; };
use rustc_session::parse::feature_err; use rustc_session::parse::feature_err;
use rustc_span::symbol::{kw, sym, Symbol}; use rustc_span::symbol::{kw, sym, Symbol};
@ -2102,7 +2103,33 @@ impl CheckAttrVisitor<'_> {
fn check_macro_export(&self, hir_id: HirId, attr: &Attribute, target: Target) { fn check_macro_export(&self, hir_id: HirId, attr: &Attribute, target: Target) {
if target != Target::MacroDef { if target != Target::MacroDef {
self.tcx.emit_spanned_lint(UNUSED_ATTRIBUTES, hir_id, attr.span, errors::MacroExport); self.tcx.emit_spanned_lint(
UNUSED_ATTRIBUTES,
hir_id,
attr.span,
errors::MacroExport::Normal,
);
} else if let Some(meta_item_list) = attr.meta_item_list() &&
!meta_item_list.is_empty() {
if meta_item_list.len() > 1 {
self.tcx.emit_spanned_lint(
INVALID_MACRO_EXPORT_ARGUMENTS,
hir_id,
attr.span,
errors::MacroExport::TooManyItems,
);
} else {
if meta_item_list[0].name_or_empty() != sym::local_inner_macros {
self.tcx.emit_spanned_lint(
INVALID_MACRO_EXPORT_ARGUMENTS,
hir_id,
meta_item_list[0].span(),
errors::MacroExport::UnknownItem {
name: meta_item_list[0].name_or_empty(),
},
);
}
}
} }
} }

View File

@ -641,8 +641,16 @@ pub struct MacroUse {
} }
#[derive(LintDiagnostic)] #[derive(LintDiagnostic)]
#[diag(passes_macro_export)] pub enum MacroExport {
pub struct MacroExport; #[diag(passes_macro_export)]
Normal,
#[diag(passes_invalid_macro_export_arguments)]
UnknownItem { name: Symbol },
#[diag(passes_invalid_macro_export_arguments_too_many_items)]
TooManyItems,
}
#[derive(LintDiagnostic)] #[derive(LintDiagnostic)]
#[diag(passes_plugin_registrar)] #[diag(passes_plugin_registrar)]

View File

@ -1489,6 +1489,8 @@ options! {
"keep hygiene data after analysis (default: no)"), "keep hygiene data after analysis (default: no)"),
layout_seed: Option<u64> = (None, parse_opt_number, [TRACKED], layout_seed: Option<u64> = (None, parse_opt_number, [TRACKED],
"seed layout randomization"), "seed layout randomization"),
link_directives: bool = (true, parse_bool, [TRACKED],
"honor #[link] directives in the compiled crate (default: yes)"),
link_native_libraries: bool = (true, parse_bool, [UNTRACKED], link_native_libraries: bool = (true, parse_bool, [UNTRACKED],
"link native libraries in the linker invocation (default: yes)"), "link native libraries in the linker invocation (default: yes)"),
link_only: bool = (false, parse_bool, [TRACKED], link_only: bool = (false, parse_bool, [TRACKED],

View File

@ -99,6 +99,15 @@ pub(super) trait GoalKind<'tcx>: TypeFoldable<TyCtxt<'tcx>> + Copy + Eq {
requirements: impl IntoIterator<Item = Goal<'tcx, ty::Predicate<'tcx>>>, requirements: impl IntoIterator<Item = Goal<'tcx, ty::Predicate<'tcx>>>,
) -> QueryResult<'tcx>; ) -> QueryResult<'tcx>;
// Consider a clause specifically for a `dyn Trait` self type. This requires
// additionally checking all of the supertraits and object bounds to hold,
// since they're not implied by the well-formedness of the object type.
fn consider_object_bound_candidate(
ecx: &mut EvalCtxt<'_, 'tcx>,
goal: Goal<'tcx, Self>,
assumption: ty::Predicate<'tcx>,
) -> QueryResult<'tcx>;
fn consider_impl_candidate( fn consider_impl_candidate(
ecx: &mut EvalCtxt<'_, 'tcx>, ecx: &mut EvalCtxt<'_, 'tcx>,
goal: Goal<'tcx, Self>, goal: Goal<'tcx, Self>,
@ -455,7 +464,7 @@ impl<'tcx> EvalCtxt<'_, 'tcx> {
for assumption in for assumption in
elaborate_predicates(tcx, bounds.iter().map(|bound| bound.with_self_ty(tcx, self_ty))) elaborate_predicates(tcx, bounds.iter().map(|bound| bound.with_self_ty(tcx, self_ty)))
{ {
match G::consider_implied_clause(self, goal, assumption.predicate, []) { match G::consider_object_bound_candidate(self, goal, assumption.predicate) {
Ok(result) => { Ok(result) => {
candidates.push(Candidate { source: CandidateSource::BuiltinImpl, result }) candidates.push(Candidate { source: CandidateSource::BuiltinImpl, result })
} }

View File

@ -128,6 +128,51 @@ impl<'tcx> assembly::GoalKind<'tcx> for ProjectionPredicate<'tcx> {
} }
} }
fn consider_object_bound_candidate(
ecx: &mut EvalCtxt<'_, 'tcx>,
goal: Goal<'tcx, Self>,
assumption: ty::Predicate<'tcx>,
) -> QueryResult<'tcx> {
if let Some(poly_projection_pred) = assumption.to_opt_poly_projection_pred()
&& poly_projection_pred.projection_def_id() == goal.predicate.def_id()
{
ecx.probe(|ecx| {
let assumption_projection_pred =
ecx.instantiate_binder_with_infer(poly_projection_pred);
let mut nested_goals = ecx.eq(
goal.param_env,
goal.predicate.projection_ty,
assumption_projection_pred.projection_ty,
)?;
let tcx = ecx.tcx();
let ty::Dynamic(bounds, _, _) = *goal.predicate.self_ty().kind() else {
bug!("expected object type in `consider_object_bound_candidate`");
};
nested_goals.extend(
structural_traits::predicates_for_object_candidate(
ecx,
goal.param_env,
goal.predicate.projection_ty.trait_ref(tcx),
bounds,
)
.into_iter()
.map(|pred| goal.with(tcx, pred)),
);
let subst_certainty = ecx.evaluate_all(nested_goals)?;
ecx.eq_term_and_make_canonical_response(
goal,
subst_certainty,
assumption_projection_pred.term,
)
})
} else {
Err(NoSolution)
}
}
fn consider_impl_candidate( fn consider_impl_candidate(
ecx: &mut EvalCtxt<'_, 'tcx>, ecx: &mut EvalCtxt<'_, 'tcx>,
goal: Goal<'tcx, ProjectionPredicate<'tcx>>, goal: Goal<'tcx, ProjectionPredicate<'tcx>>,

View File

@ -86,6 +86,46 @@ impl<'tcx> assembly::GoalKind<'tcx> for TraitPredicate<'tcx> {
} }
} }
fn consider_object_bound_candidate(
ecx: &mut EvalCtxt<'_, 'tcx>,
goal: Goal<'tcx, Self>,
assumption: ty::Predicate<'tcx>,
) -> QueryResult<'tcx> {
if let Some(poly_trait_pred) = assumption.to_opt_poly_trait_pred()
&& poly_trait_pred.def_id() == goal.predicate.def_id()
{
// FIXME: Constness and polarity
ecx.probe(|ecx| {
let assumption_trait_pred =
ecx.instantiate_binder_with_infer(poly_trait_pred);
let mut nested_goals = ecx.eq(
goal.param_env,
goal.predicate.trait_ref,
assumption_trait_pred.trait_ref,
)?;
let tcx = ecx.tcx();
let ty::Dynamic(bounds, _, _) = *goal.predicate.self_ty().kind() else {
bug!("expected object type in `consider_object_bound_candidate`");
};
nested_goals.extend(
structural_traits::predicates_for_object_candidate(
ecx,
goal.param_env,
goal.predicate.trait_ref,
bounds,
)
.into_iter()
.map(|pred| goal.with(tcx, pred)),
);
ecx.evaluate_all_and_make_canonical_response(nested_goals)
})
} else {
Err(NoSolution)
}
}
fn consider_auto_trait_candidate( fn consider_auto_trait_candidate(
ecx: &mut EvalCtxt<'_, 'tcx>, ecx: &mut EvalCtxt<'_, 'tcx>,
goal: Goal<'tcx, Self>, goal: Goal<'tcx, Self>,

View File

@ -1,6 +1,7 @@
use rustc_hir::{Movability, Mutability}; use rustc_data_structures::fx::FxHashMap;
use rustc_hir::{def_id::DefId, Movability, Mutability};
use rustc_infer::traits::query::NoSolution; use rustc_infer::traits::query::NoSolution;
use rustc_middle::ty::{self, Ty, TyCtxt}; use rustc_middle::ty::{self, Ty, TyCtxt, TypeFoldable, TypeFolder, TypeSuperFoldable};
use crate::solve::EvalCtxt; use crate::solve::EvalCtxt;
@ -231,3 +232,112 @@ pub(crate) fn extract_tupled_inputs_and_output_from_callable<'tcx>(
} }
} }
} }
/// Assemble a list of predicates that would be present on a theoretical
/// user impl for an object type. These predicates must be checked any time
/// we assemble a built-in object candidate for an object type, since they
/// are not implied by the well-formedness of the type.
///
/// For example, given the following traits:
///
/// ```rust,ignore (theoretical code)
/// trait Foo: Baz {
/// type Bar: Copy;
/// }
///
/// trait Baz {}
/// ```
///
/// For the dyn type `dyn Foo<Item = Ty>`, we can imagine there being a
/// pair of theoretical impls:
///
/// ```rust,ignore (theoretical code)
/// impl Foo for dyn Foo<Item = Ty>
/// where
/// Self: Baz,
/// <Self as Foo>::Bar: Copy,
/// {
/// type Bar = Ty;
/// }
///
/// impl Baz for dyn Foo<Item = Ty> {}
/// ```
///
/// However, in order to make such impls well-formed, we need to do an
/// additional step of eagerly folding the associated types in the where
/// clauses of the impl. In this example, that means replacing
/// `<Self as Foo>::Bar` with `Ty` in the first impl.
pub(crate) fn predicates_for_object_candidate<'tcx>(
ecx: &EvalCtxt<'_, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
trait_ref: ty::TraitRef<'tcx>,
object_bound: &'tcx ty::List<ty::PolyExistentialPredicate<'tcx>>,
) -> Vec<ty::Predicate<'tcx>> {
let tcx = ecx.tcx();
let mut requirements = vec![];
requirements.extend(
tcx.super_predicates_of(trait_ref.def_id).instantiate(tcx, trait_ref.substs).predicates,
);
for item in tcx.associated_items(trait_ref.def_id).in_definition_order() {
// FIXME(associated_const_equality): Also add associated consts to
// the requirements here.
if item.kind == ty::AssocKind::Type {
requirements.extend(tcx.item_bounds(item.def_id).subst(tcx, trait_ref.substs));
}
}
let mut replace_projection_with = FxHashMap::default();
for bound in object_bound {
if let ty::ExistentialPredicate::Projection(proj) = bound.skip_binder() {
let proj = proj.with_self_ty(tcx, trait_ref.self_ty());
let old_ty = replace_projection_with.insert(proj.def_id(), bound.rebind(proj));
assert_eq!(
old_ty,
None,
"{} has two substitutions: {} and {}",
proj.projection_ty,
proj.term,
old_ty.unwrap()
);
}
}
requirements.fold_with(&mut ReplaceProjectionWith {
ecx,
param_env,
mapping: replace_projection_with,
})
}
struct ReplaceProjectionWith<'a, 'tcx> {
ecx: &'a EvalCtxt<'a, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
mapping: FxHashMap<DefId, ty::PolyProjectionPredicate<'tcx>>,
}
impl<'tcx> TypeFolder<TyCtxt<'tcx>> for ReplaceProjectionWith<'_, 'tcx> {
fn interner(&self) -> TyCtxt<'tcx> {
self.ecx.tcx()
}
fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
if let ty::Alias(ty::Projection, alias_ty) = *ty.kind()
&& let Some(replacement) = self.mapping.get(&alias_ty.def_id)
{
// We may have a case where our object type's projection bound is higher-ranked,
// but the where clauses we instantiated are not. We can solve this by instantiating
// the binder at the usage site.
let proj = self.ecx.instantiate_binder_with_infer(*replacement);
// FIXME: Technically this folder could be fallible?
let nested = self
.ecx
.eq(self.param_env, alias_ty, proj.projection_ty)
.expect("expected to be able to unify goal projection with dyn's projection");
// FIXME: Technically we could register these too..
assert!(nested.is_empty(), "did not expect unification to have any nested goals");
proj.term.ty().unwrap()
} else {
ty.super_fold_with(self)
}
}
}

View File

@ -666,6 +666,9 @@ changelog-seen = 2
# LTO entirely. # LTO entirely.
#lto = "thin-local" #lto = "thin-local"
# Build compiler with the optimization enabled and -Zvalidate-mir, currently only for `std`
#validate-mir-opts = 3
# ============================================================================= # =============================================================================
# Options for specific targets # Options for specific targets
# #

View File

@ -792,7 +792,7 @@ impl<'a> Builder<'a> {
run::CollectLicenseMetadata, run::CollectLicenseMetadata,
run::GenerateCopyright, run::GenerateCopyright,
), ),
Kind::Setup => describe!(setup::Profile), Kind::Setup => describe!(setup::Profile, setup::Hook, setup::Link, setup::Vscode),
Kind::Clean => describe!(clean::CleanAll, clean::Rustc, clean::Std), Kind::Clean => describe!(clean::CleanAll, clean::Rustc, clean::Std),
// special-cased in Build::build() // special-cased in Build::build()
Kind::Format => vec![], Kind::Format => vec![],
@ -1915,6 +1915,13 @@ impl<'a> Builder<'a> {
} }
} }
if matches!(mode, Mode::Std) {
if let Some(mir_opt_level) = self.config.rust_validate_mir_opts {
rustflags.arg("-Zvalidate-mir");
rustflags.arg(&format!("-Zmir-opt-level={}", mir_opt_level));
}
}
Cargo { command: cargo, rustflags, rustdocflags, allow_features } Cargo { command: cargo, rustflags, rustdocflags, allow_features }
} }

View File

@ -173,6 +173,7 @@ pub struct Config {
pub rust_profile_use: Option<String>, pub rust_profile_use: Option<String>,
pub rust_profile_generate: Option<String>, pub rust_profile_generate: Option<String>,
pub rust_lto: RustcLto, pub rust_lto: RustcLto,
pub rust_validate_mir_opts: Option<u32>,
pub llvm_profile_use: Option<String>, pub llvm_profile_use: Option<String>,
pub llvm_profile_generate: bool, pub llvm_profile_generate: bool,
pub llvm_libunwind_default: Option<LlvmLibunwind>, pub llvm_libunwind_default: Option<LlvmLibunwind>,
@ -770,6 +771,7 @@ define_config! {
// ignored; this is set from an env var set by bootstrap.py // ignored; this is set from an env var set by bootstrap.py
download_rustc: Option<StringOrBool> = "download-rustc", download_rustc: Option<StringOrBool> = "download-rustc",
lto: Option<String> = "lto", lto: Option<String> = "lto",
validate_mir_opts: Option<u32> = "validate-mir-opts",
} }
} }
@ -1149,6 +1151,7 @@ impl Config {
.as_deref() .as_deref()
.map(|value| RustcLto::from_str(value).unwrap()) .map(|value| RustcLto::from_str(value).unwrap())
.unwrap_or_default(); .unwrap_or_default();
config.rust_validate_mir_opts = rust.validate_mir_opts;
} else { } else {
config.rust_profile_use = flags.rust_profile_use; config.rust_profile_use = flags.rust_profile_use;
config.rust_profile_generate = flags.rust_profile_generate; config.rust_profile_generate = flags.rust_profile_generate;

View File

@ -379,8 +379,14 @@ cur_section = None
sections[None] = [] sections[None] = []
section_order = [None] section_order = [None]
targets = {} targets = {}
top_level_keys = []
for line in open(rust_dir + '/config.toml.example').read().split("\n"): for line in open(rust_dir + '/config.toml.example').read().split("\n"):
if cur_section == None:
if line.count('=') == 1:
top_level_key = line.split('=')[0]
top_level_key = top_level_key.strip(' #')
top_level_keys.append(top_level_key)
if line.startswith('['): if line.startswith('['):
cur_section = line[1:-1] cur_section = line[1:-1]
if cur_section.startswith('target'): if cur_section.startswith('target'):
@ -459,12 +465,22 @@ def configure_section(lines, config):
raise RuntimeError("failed to find config line for {}".format(key)) raise RuntimeError("failed to find config line for {}".format(key))
for section_key in config: def configure_top_level_key(lines, top_level_key, value):
section_config = config[section_key] for i, line in enumerate(lines):
if section_key not in sections: if line.startswith('#' + top_level_key + ' = ') or line.startswith(top_level_key + ' = '):
raise RuntimeError("config key {} not in sections".format(section_key)) lines[i] = "{} = {}".format(top_level_key, value)
return
if section_key == 'target': raise RuntimeError("failed to find config line for {}".format(top_level_key))
for section_key, section_config in config.items():
if section_key not in sections and section_key not in top_level_keys:
raise RuntimeError("config key {} not in sections or top_level_keys".format(section_key))
if section_key in top_level_keys:
configure_top_level_key(sections[None], section_key, section_config)
elif section_key == 'target':
for target in section_config: for target in section_config:
configure_section(targets[target], section_config[target]) configure_section(targets[target], section_config[target])
else: else:

View File

@ -554,7 +554,8 @@ Arguments:
Kind::Setup => { Kind::Setup => {
subcommand_help.push_str(&format!( subcommand_help.push_str(&format!(
"\n "\n
x.py setup creates a `config.toml` which changes the defaults for x.py itself. x.py setup creates a `config.toml` which changes the defaults for x.py itself,
as well as setting up a git pre-push hook, VS code config and toolchain link.
Arguments: Arguments:
This subcommand accepts a 'profile' to use for builds. For example: This subcommand accepts a 'profile' to use for builds. For example:
@ -564,7 +565,13 @@ Arguments:
The profile is optional and you will be prompted interactively if it is not given. The profile is optional and you will be prompted interactively if it is not given.
The following profiles are available: The following profiles are available:
{}", {}
To only set up the git hook, VS code or toolchain link, you may use
./x.py setup hook
./x.py setup vscode
./x.py setup link
",
Profile::all_for_help(" ").trim_end() Profile::all_for_help(" ").trim_end()
)); ));
} }
@ -638,7 +645,7 @@ Arguments:
} }
Kind::Setup => { Kind::Setup => {
let profile = if paths.len() > 1 { let profile = if paths.len() > 1 {
eprintln!("\nerror: At most one profile can be passed to setup\n"); eprintln!("\nerror: At most one option can be passed to setup\n");
usage(1, &opts, verbose, &subcommand_help) usage(1, &opts, verbose, &subcommand_help)
} else if let Some(path) = paths.pop() { } else if let Some(path) = paths.pop() {
let profile_string = t!(path.into_os_string().into_string().map_err( let profile_string = t!(path.into_os_string().into_string().map_err(

View File

@ -21,6 +21,7 @@ pub enum Profile {
Library, Library,
Tools, Tools,
User, User,
None,
} }
/// A list of historical hashes of `src/etc/vscode_settings.json`. /// A list of historical hashes of `src/etc/vscode_settings.json`.
@ -41,7 +42,7 @@ impl Profile {
pub fn all() -> impl Iterator<Item = Self> { pub fn all() -> impl Iterator<Item = Self> {
use Profile::*; use Profile::*;
// N.B. these are ordered by how they are displayed, not alphabetically // N.B. these are ordered by how they are displayed, not alphabetically
[Library, Compiler, Codegen, Tools, User].iter().copied() [Library, Compiler, Codegen, Tools, User, None].iter().copied()
} }
pub fn purpose(&self) -> String { pub fn purpose(&self) -> String {
@ -52,6 +53,7 @@ impl Profile {
Codegen => "Contribute to the compiler, and also modify LLVM or codegen", Codegen => "Contribute to the compiler, and also modify LLVM or codegen",
Tools => "Contribute to tools which depend on the compiler, but do not modify it directly (e.g. rustdoc, clippy, miri)", Tools => "Contribute to tools which depend on the compiler, but do not modify it directly (e.g. rustdoc, clippy, miri)",
User => "Install Rust from source", User => "Install Rust from source",
None => "Do not modify `config.toml`"
} }
.to_string() .to_string()
} }
@ -71,6 +73,7 @@ impl Profile {
Profile::Library => "library", Profile::Library => "library",
Profile::Tools => "tools", Profile::Tools => "tools",
Profile::User => "user", Profile::User => "user",
Profile::None => "none",
} }
} }
} }
@ -87,6 +90,7 @@ impl FromStr for Profile {
"tools" | "tool" | "rustdoc" | "clippy" | "miri" | "rustfmt" | "rls" => { "tools" | "tool" | "rustdoc" | "clippy" | "miri" | "rustfmt" | "rls" => {
Ok(Profile::Tools) Ok(Profile::Tools)
} }
"none" => Ok(Profile::None),
_ => Err(format!("unknown profile: '{}'", s)), _ => Err(format!("unknown profile: '{}'", s)),
} }
} }
@ -144,17 +148,8 @@ impl Step for Profile {
} }
pub fn setup(config: &Config, profile: Profile) { pub fn setup(config: &Config, profile: Profile) {
let stage_path = let suggestions: &[&str] = match profile {
["build", config.build.rustc_target_arg(), "stage1"].join(&MAIN_SEPARATOR.to_string()); Profile::Codegen | Profile::Compiler | Profile::None => &["check", "build", "test"],
if !rustup_installed() && profile != Profile::User {
eprintln!("`rustup` is not installed; cannot link `stage1` toolchain");
} else if stage_dir_exists(&stage_path[..]) && !config.dry_run() {
attempt_toolchain_link(&stage_path[..]);
}
let suggestions = match profile {
Profile::Codegen | Profile::Compiler => &["check", "build", "test"][..],
Profile::Tools => &[ Profile::Tools => &[
"check", "check",
"build", "build",
@ -167,11 +162,6 @@ pub fn setup(config: &Config, profile: Profile) {
Profile::User => &["dist", "build"], Profile::User => &["dist", "build"],
}; };
if !config.dry_run() {
t!(install_git_hook_maybe(&config));
t!(create_vscode_settings_maybe(&config));
}
println!(); println!();
println!("To get started, try one of the following commands:"); println!("To get started, try one of the following commands:");
@ -190,6 +180,9 @@ pub fn setup(config: &Config, profile: Profile) {
} }
fn setup_config_toml(path: &PathBuf, profile: Profile, config: &Config) { fn setup_config_toml(path: &PathBuf, profile: Profile, config: &Config) {
if profile == Profile::None {
return;
}
if path.exists() { if path.exists() {
eprintln!(); eprintln!();
eprintln!( eprintln!(
@ -217,6 +210,41 @@ fn setup_config_toml(path: &PathBuf, profile: Profile, config: &Config) {
println!("`x.py` will now use the configuration at {}", include_path.display()); println!("`x.py` will now use the configuration at {}", include_path.display());
} }
/// Creates a toolchain link for stage1 using `rustup`
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
pub struct Link;
impl Step for Link {
type Output = ();
const DEFAULT: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
run.alias("link")
}
fn make_run(run: RunConfig<'_>) {
if run.builder.config.dry_run() {
return;
}
if let [cmd] = &run.paths[..] {
if cmd.assert_single_path().path.as_path().as_os_str() == "link" {
run.builder.ensure(Link);
}
}
}
fn run(self, builder: &Builder<'_>) -> Self::Output {
let config = &builder.config;
if config.dry_run() {
return;
}
let stage_path =
["build", config.build.rustc_target_arg(), "stage1"].join(&MAIN_SEPARATOR.to_string());
if !rustup_installed() {
eprintln!("`rustup` is not installed; cannot link `stage1` toolchain");
} else if stage_dir_exists(&stage_path[..]) && !config.dry_run() {
attempt_toolchain_link(&stage_path[..]);
}
}
}
fn rustup_installed() -> bool { fn rustup_installed() -> bool {
Command::new("rustup") Command::new("rustup")
.arg("--version") .arg("--version")
@ -394,6 +422,35 @@ fn prompt_user(prompt: &str) -> io::Result<Option<PromptResult>> {
} }
} }
/// Installs `src/etc/pre-push.sh` as a Git hook
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
pub struct Hook;
impl Step for Hook {
type Output = ();
const DEFAULT: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
run.alias("hook")
}
fn make_run(run: RunConfig<'_>) {
if run.builder.config.dry_run() {
return;
}
if let [cmd] = &run.paths[..] {
if cmd.assert_single_path().path.as_path().as_os_str() == "hook" {
run.builder.ensure(Hook);
}
}
}
fn run(self, builder: &Builder<'_>) -> Self::Output {
let config = &builder.config;
if config.dry_run() {
return;
}
t!(install_git_hook_maybe(&config));
}
}
// install a git hook to automatically run tidy, if they want // install a git hook to automatically run tidy, if they want
fn install_git_hook_maybe(config: &Config) -> io::Result<()> { fn install_git_hook_maybe(config: &Config) -> io::Result<()> {
let git = t!(config.git().args(&["rev-parse", "--git-common-dir"]).output().map(|output| { let git = t!(config.git().args(&["rev-parse", "--git-common-dir"]).output().map(|output| {
@ -432,6 +489,35 @@ undesirable, simply delete the `pre-push` file from .git/hooks."
Ok(()) Ok(())
} }
/// Sets up or displays `src/etc/vscode_settings.json`
#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
pub struct Vscode;
impl Step for Vscode {
type Output = ();
const DEFAULT: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
run.alias("vscode")
}
fn make_run(run: RunConfig<'_>) {
if run.builder.config.dry_run() {
return;
}
if let [cmd] = &run.paths[..] {
if cmd.assert_single_path().path.as_path().as_os_str() == "vscode" {
run.builder.ensure(Vscode);
}
}
}
fn run(self, builder: &Builder<'_>) -> Self::Output {
let config = &builder.config;
if config.dry_run() {
return;
}
t!(create_vscode_settings_maybe(&config));
}
}
/// Create a `.vscode/settings.json` file for rustc development, or just print it /// Create a `.vscode/settings.json` file for rustc development, or just print it
fn create_vscode_settings_maybe(config: &Config) -> io::Result<()> { fn create_vscode_settings_maybe(config: &Config) -> io::Result<()> {
let (current_hash, historical_hashes) = SETTINGS_HASHES.split_last().unwrap(); let (current_hash, historical_hashes) = SETTINGS_HASHES.split_last().unwrap();

View File

@ -23,6 +23,8 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
RUN curl -sL https://nodejs.org/dist/v16.9.0/node-v16.9.0-linux-x64.tar.xz | tar -xJ RUN curl -sL https://nodejs.org/dist/v16.9.0/node-v16.9.0-linux-x64.tar.xz | tar -xJ
ENV PATH="/node-v16.9.0-linux-x64/bin:${PATH}" ENV PATH="/node-v16.9.0-linux-x64/bin:${PATH}"
ENV RUST_CONFIGURE_ARGS="--set rust.validate-mir-opts=3"
# Install es-check # Install es-check
# Pin its version to prevent unrelated CI failures due to future es-check versions. # Pin its version to prevent unrelated CI failures due to future es-check versions.
RUN npm install es-check@6.1.1 eslint@8.6.0 -g RUN npm install es-check@6.1.1 eslint@8.6.0 -g
@ -38,7 +40,7 @@ COPY host-x86_64/mingw-check/validate-error-codes.sh /scripts/
ENV RUN_CHECK_WITH_PARALLEL_QUERIES 1 ENV RUN_CHECK_WITH_PARALLEL_QUERIES 1
ENV SCRIPT python3 ../x.py --stage 2 test src/tools/expand-yaml-anchors && \ ENV SCRIPT python3 ../x.py --stage 2 test src/tools/expand-yaml-anchors && \
python3 ../x.py check --target=i686-pc-windows-gnu --host=i686-pc-windows-gnu --all-targets && \ python3 ../x.py check --target=i686-pc-windows-gnu --host=i686-pc-windows-gnu && \
python3 ../x.py build --stage 0 src/tools/build-manifest && \ python3 ../x.py build --stage 0 src/tools/build-manifest && \
python3 ../x.py test --stage 0 src/tools/compiletest && \ python3 ../x.py test --stage 0 src/tools/compiletest && \
python3 ../x.py test --stage 0 core alloc std test proc_macro && \ python3 ../x.py test --stage 0 core alloc std test proc_macro && \

View File

@ -0,0 +1,12 @@
// regression test for failing to pass `--crate-type proc-macro` to rustdoc
// when documenting a proc macro crate https://github.com/rust-lang/rust/pull/107291
extern crate proc_macro;
use proc_macro::TokenStream;
#[proc_macro_derive(DeriveA)]
//~^ ERROR the `#[proc_macro_derive]` attribute is only usable with crates of the `proc-macro` crate type
pub fn a_derive(input: TokenStream) -> TokenStream {
input
}

View File

@ -0,0 +1,8 @@
error: the `#[proc_macro_derive]` attribute is only usable with crates of the `proc-macro` crate type
--> $DIR/proc_macro_bug.rs:8:1
|
LL | #[proc_macro_derive(DeriveA)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: aborting due to previous error

View File

@ -81,6 +81,7 @@
Multiple options can be combined with commas. Multiple options can be combined with commas.
-Z keep-hygiene-data=val -- keep hygiene data after analysis (default: no) -Z keep-hygiene-data=val -- keep hygiene data after analysis (default: no)
-Z layout-seed=val -- seed layout randomization -Z layout-seed=val -- seed layout randomization
-Z link-directives=val -- honor #[link] directives in the compiled crate (default: yes)
-Z link-native-libraries=val -- link native libraries in the linker invocation (default: yes) -Z link-native-libraries=val -- link native libraries in the linker invocation (default: yes)
-Z link-only=val -- link the `.rlink` file generated by `-Z no-link` (default: no) -Z link-only=val -- link the `.rlink` file generated by `-Z no-link` (default: no)
-Z llvm-plugins=val -- a list LLVM plugins to enable (space separated) -Z llvm-plugins=val -- a list LLVM plugins to enable (space separated)

View File

@ -0,0 +1,26 @@
// check-pass
#[macro_export(hello, world)] //~ WARN `#[macro_export]` can only take 1 or 0 arguments
macro_rules! a {
() => ()
}
#[macro_export(not_local_inner_macros)] //~ WARN `not_local_inner_macros` isn't a valid `#[macro_export]` argument
macro_rules! b {
() => ()
}
#[macro_export]
macro_rules! c {
() => ()
}
#[macro_export(local_inner_macros)]
macro_rules! d {
() => ()
}
#[macro_export()]
macro_rules! e {
() => ()
}
fn main() {}

View File

@ -0,0 +1,16 @@
warning: `#[macro_export]` can only take 1 or 0 arguments
--> $DIR/invalid_macro_export_argument.rs:2:1
|
LL | #[macro_export(hello, world)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= note: `#[warn(invalid_macro_export_arguments)]` on by default
warning: `not_local_inner_macros` isn't a valid `#[macro_export]` argument
--> $DIR/invalid_macro_export_argument.rs:7:16
|
LL | #[macro_export(not_local_inner_macros)]
| ^^^^^^^^^^^^^^^^^^^^^^
warning: 2 warnings emitted

View File

@ -0,0 +1,10 @@
// run-pass
// compile-flags: -Zlink-directives=no
// ignore-windows - this will probably only work on unixish systems
// ignore-fuchsia - missing __libc_start_main for some reason (#84733)
// ignore-cross-compile - default-linker-libraries=yes doesn't play well with cross compiling
#[link(name = "some-random-non-existent-library", kind = "static")]
extern "C" {}
fn main() {}

View File

@ -0,0 +1,17 @@
// compile-flags: -Ztrait-solver=next
// check-pass
trait Trait<'a> {
type Item: for<'b> Trait2<'b>;
}
trait Trait2<'a> {}
impl Trait2<'_> for () {}
fn needs_trait(_: Box<impl for<'a> Trait<'a> + ?Sized>) {}
fn foo(x: Box<dyn for<'a> Trait<'a, Item = ()>>) {
needs_trait(x);
}
fn main() {}

View File

@ -0,0 +1,27 @@
// compile-flags: -Ztrait-solver=next
// From #80800
trait SuperTrait {
type A;
type B;
}
trait Trait: SuperTrait<A = <Self as SuperTrait>::B> {}
fn transmute<A, B>(x: A) -> B {
foo::<A, B, dyn Trait<A = A, B = B>>(x)
//~^ ERROR type annotations needed: cannot satisfy `dyn Trait<A = A, B = B>: Trait`
}
fn foo<A, B, T: ?Sized>(x: T::A) -> B
where
T: Trait<B = B>,
{
x
}
static X: u8 = 0;
fn main() {
let x = transmute::<&u8, &[u8; 1_000_000]>(&X);
println!("{:?}", x[100_000]);
}

View File

@ -0,0 +1,19 @@
error[E0283]: type annotations needed: cannot satisfy `dyn Trait<A = A, B = B>: Trait`
--> $DIR/more-object-bound.rs:12:5
|
LL | foo::<A, B, dyn Trait<A = A, B = B>>(x)
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= note: cannot satisfy `dyn Trait<A = A, B = B>: Trait`
note: required by a bound in `foo`
--> $DIR/more-object-bound.rs:18:8
|
LL | fn foo<A, B, T: ?Sized>(x: T::A) -> B
| --- required by a bound in this function
LL | where
LL | T: Trait<B = B>,
| ^^^^^^^^^^^^ required by this bound in `foo`
error: aborting due to previous error
For more information about this error, try `rustc --explain E0283`.

View File

@ -0,0 +1,20 @@
// compile-flags: -Ztrait-solver=next
trait Setup {
type From: Copy;
}
fn copy<U: Setup + ?Sized>(from: &U::From) -> U::From {
*from
}
pub fn copy_any<T>(t: &T) -> T {
copy::<dyn Setup<From=T>>(t)
//~^ ERROR the trait bound `dyn Setup<From = T>: Setup` is not satisfied
}
fn main() {
let x = String::from("Hello, world");
let y = copy_any(&x);
println!("{y}");
}

View File

@ -0,0 +1,19 @@
error[E0277]: the trait bound `dyn Setup<From = T>: Setup` is not satisfied
--> $DIR/object-unsafety.rs:12:12
|
LL | copy::<dyn Setup<From=T>>(t)
| ^^^^^^^^^^^^^^^^^ the trait `Setup` is not implemented for `dyn Setup<From = T>`
|
note: required by a bound in `copy`
--> $DIR/object-unsafety.rs:7:12
|
LL | fn copy<U: Setup + ?Sized>(from: &U::From) -> U::From {
| ^^^^^ required by this bound in `copy`
help: consider introducing a `where` clause, but there might be an alternative better way to express this requirement
|
LL | pub fn copy_any<T>(t: &T) -> T where dyn Setup<From = T>: Setup {
| ++++++++++++++++++++++++++++++++
error: aborting due to previous error
For more information about this error, try `rustc --explain E0277`.