Rollup merge of #121340 - GrigorenkoPV:bootstrap-clippy, r=onur-ozkan

bootstrap: apply most of clippy's suggestions
This commit is contained in:
León Orell Valerian Liehr 2024-02-21 16:32:57 +01:00 committed by GitHub
commit 216f9a4778
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
29 changed files with 313 additions and 322 deletions

View File

@ -39,14 +39,14 @@ fn main() {
.open(&lock_path)));
_build_lock_guard = match build_lock.try_write() {
Ok(mut lock) => {
t!(lock.write(&process::id().to_string().as_ref()));
t!(lock.write(process::id().to_string().as_ref()));
lock
}
err => {
drop(err);
println!("WARNING: build directory locked by process {pid}, waiting for lock");
let mut lock = t!(build_lock.write());
t!(lock.write(&process::id().to_string().as_ref()));
t!(lock.write(process::id().to_string().as_ref()));
lock
}
};
@ -113,14 +113,14 @@ fn main() {
continue;
}
let file = t!(fs::File::open(&entry.path()));
let file = t!(fs::File::open(entry.path()));
// To ensure deterministic results we must sort the dump lines.
// This is necessary because the order of rustc invocations different
// almost all the time.
let mut lines: Vec<String> = t!(BufReader::new(&file).lines().collect());
lines.sort_by_key(|t| t.to_lowercase());
let mut file = t!(OpenOptions::new().write(true).truncate(true).open(&entry.path()));
let mut file = t!(OpenOptions::new().write(true).truncate(true).open(entry.path()));
t!(file.write_all(lines.join("\n").as_bytes()));
}
}
@ -156,7 +156,7 @@ fn check_version(config: &Config) -> Option<String> {
msg.push_str("There have been changes to x.py since you last updated:\n");
for change in changes {
msg.push_str(&format!(" [{}] {}\n", change.severity.to_string(), change.summary));
msg.push_str(&format!(" [{}] {}\n", change.severity, change.summary));
msg.push_str(&format!(
" - PR Link https://github.com/rust-lang/rust/pull/{}\n",
change.change_id

View File

@ -276,7 +276,7 @@ fn main() {
dur.as_secs(),
dur.subsec_millis(),
if rusage_data.is_some() { " " } else { "" },
rusage_data.unwrap_or(String::new()),
rusage_data.unwrap_or_default(),
);
}
}
@ -440,5 +440,5 @@ fn format_rusage_data(_child: Child) -> Option<String> {
));
}
return Some(init_str);
Some(init_str)
}

View File

@ -18,9 +18,9 @@ fn main() {
// Invoke sccache with said compiler
let sccache_path = env::var_os("SCCACHE_PATH").unwrap();
let mut cmd = Command::new(&sccache_path);
let mut cmd = Command::new(sccache_path);
cmd.arg(compiler.path());
for &(ref k, ref v) in compiler.env() {
for (k, v) in compiler.env() {
cmd.env(k, v);
}
for arg in env::args().skip(1) {

View File

@ -34,7 +34,7 @@ fn strings<'a>(arr: &'a [&str]) -> impl Iterator<Item = String> + 'a {
&builder.config.cmd
{
// disable the most spammy clippy lints
let ignored_lints = vec![
let ignored_lints = [
"many_single_char_names", // there are a lot in stdarch
"collapsible_if",
"type_complexity",
@ -150,7 +150,7 @@ fn run(self, builder: &Builder<'_>) {
if compiler.stage == 0 {
let libdir = builder.sysroot_libdir(compiler, target);
let hostdir = builder.sysroot_libdir(compiler, compiler.host);
add_to_sysroot(&builder, &libdir, &hostdir, &libstd_stamp(builder, compiler, target));
add_to_sysroot(builder, &libdir, &hostdir, &libstd_stamp(builder, compiler, target));
}
drop(_guard);
@ -301,7 +301,7 @@ fn run(self, builder: &Builder<'_>) {
let libdir = builder.sysroot_libdir(compiler, target);
let hostdir = builder.sysroot_libdir(compiler, compiler.host);
add_to_sysroot(&builder, &libdir, &hostdir, &librustc_stamp(builder, compiler, target));
add_to_sysroot(builder, &libdir, &hostdir, &librustc_stamp(builder, compiler, target));
}
}
@ -353,7 +353,7 @@ fn run(self, builder: &Builder<'_>) {
.arg(builder.src.join(format!("compiler/rustc_codegen_{backend}/Cargo.toml")));
rustc_cargo_env(builder, &mut cargo, target, compiler.stage);
let _guard = builder.msg_check(&backend, target);
let _guard = builder.msg_check(backend, target);
run_cargo(
builder,

View File

@ -107,8 +107,8 @@ fn copy_extra_objects(
) -> Vec<(PathBuf, DependencyType)> {
let mut deps = Vec::new();
if !self.is_for_mir_opt_tests {
deps.extend(copy_third_party_objects(builder, &compiler, target));
deps.extend(copy_self_contained_objects(builder, &compiler, target));
deps.extend(copy_third_party_objects(builder, compiler, target));
deps.extend(copy_self_contained_objects(builder, compiler, target));
}
deps
}
@ -186,7 +186,7 @@ fn run(self, builder: &Builder<'_>) {
// Profiler information requires LLVM's compiler-rt
if builder.config.profiler {
builder.update_submodule(&Path::new("src/llvm-project"));
builder.update_submodule(Path::new("src/llvm-project"));
}
let mut target_deps = builder.ensure(StartupObjects { compiler, target });
@ -271,7 +271,7 @@ fn run(self, builder: &Builder<'_>) {
if target.is_synthetic() {
cargo.env("RUSTC_BOOTSTRAP_SYNTHETIC_TARGET", "1");
}
for rustflag in self.extra_rust_args.into_iter() {
for rustflag in self.extra_rust_args.iter() {
cargo.rustflag(rustflag);
}
@ -333,7 +333,7 @@ fn copy_third_party_objects(
// The sanitizers are only copied in stage1 or above,
// to avoid creating dependency on LLVM.
target_deps.extend(
copy_sanitizers(builder, &compiler, target)
copy_sanitizers(builder, compiler, target)
.into_iter()
.map(|d| (d, DependencyType::Target)),
);
@ -487,7 +487,7 @@ pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, stage: u32, car
// for no-std targets we only compile a few no_std crates
cargo
.args(&["-p", "alloc"])
.args(["-p", "alloc"])
.arg("--manifest-path")
.arg(builder.src.join("library/alloc/Cargo.toml"))
.arg("--features")
@ -626,20 +626,20 @@ fn run(self, builder: &Builder<'_>) {
.build
.config
.initial_rustc
.starts_with(builder.out.join(&compiler.host.triple).join("stage0/bin"))
.starts_with(builder.out.join(compiler.host.triple).join("stage0/bin"))
{
// Copy bin files from stage0/bin to stage0-sysroot/bin
let sysroot = builder.out.join(&compiler.host.triple).join("stage0-sysroot");
let sysroot = builder.out.join(compiler.host.triple).join("stage0-sysroot");
let host = compiler.host.triple;
let stage0_bin_dir = builder.out.join(&host).join("stage0/bin");
let stage0_bin_dir = builder.out.join(host).join("stage0/bin");
let sysroot_bin_dir = sysroot.join("bin");
t!(fs::create_dir_all(&sysroot_bin_dir));
builder.cp_r(&stage0_bin_dir, &sysroot_bin_dir);
// Copy all *.so files from stage0/lib to stage0-sysroot/lib
let stage0_lib_dir = builder.out.join(&host).join("stage0/lib");
if let Ok(files) = fs::read_dir(&stage0_lib_dir) {
let stage0_lib_dir = builder.out.join(host).join("stage0/lib");
if let Ok(files) = fs::read_dir(stage0_lib_dir) {
for file in files {
let file = t!(file);
let path = file.path();
@ -654,9 +654,9 @@ fn run(self, builder: &Builder<'_>) {
t!(fs::create_dir_all(&sysroot_codegen_backends));
let stage0_codegen_backends = builder
.out
.join(&host)
.join(host)
.join("stage0/lib/rustlib")
.join(&host)
.join(host)
.join("codegen-backends");
if stage0_codegen_backends.exists() {
builder.cp_r(&stage0_codegen_backends, &sysroot_codegen_backends);
@ -1179,7 +1179,7 @@ fn rustc_llvm_env(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetSelect
// The config can also specify its own llvm linker flags.
if let Some(ref s) = builder.config.llvm_ldflags {
if !llvm_linker_flags.is_empty() {
llvm_linker_flags.push_str(" ");
llvm_linker_flags.push(' ');
}
llvm_linker_flags.push_str(s);
}
@ -1270,7 +1270,7 @@ fn needs_codegen_config(run: &RunConfig<'_>) -> bool {
for path_set in &run.paths {
needs_codegen_cfg = match path_set {
PathSet::Set(set) => set.iter().any(|p| is_codegen_cfg_needed(p, run)),
PathSet::Suite(suite) => is_codegen_cfg_needed(&suite, run),
PathSet::Suite(suite) => is_codegen_cfg_needed(suite, run),
}
}
needs_codegen_cfg
@ -1279,7 +1279,7 @@ fn needs_codegen_config(run: &RunConfig<'_>) -> bool {
pub(crate) const CODEGEN_BACKEND_PREFIX: &str = "rustc_codegen_";
fn is_codegen_cfg_needed(path: &TaskPath, run: &RunConfig<'_>) -> bool {
if path.path.to_str().unwrap().contains(&CODEGEN_BACKEND_PREFIX) {
if path.path.to_str().unwrap().contains(CODEGEN_BACKEND_PREFIX) {
let mut needs_codegen_backend_config = true;
for &backend in run.builder.config.codegen_backends(run.target) {
if path
@ -1300,7 +1300,7 @@ fn is_codegen_cfg_needed(path: &TaskPath, run: &RunConfig<'_>) -> bool {
}
}
return false;
false
}
impl Step for CodegenBackend {
@ -1393,7 +1393,7 @@ fn run(self, builder: &Builder<'_>) {
}
let stamp = codegen_backend_stamp(builder, compiler, target, backend);
let codegen_backend = codegen_backend.to_str().unwrap();
t!(fs::write(&stamp, &codegen_backend));
t!(fs::write(stamp, codegen_backend));
}
}
@ -1441,7 +1441,7 @@ fn copy_codegen_backends_to_sysroot(
let dot = filename.find('.').unwrap();
format!("{}-{}{}", &filename[..dash], builder.rust_release(), &filename[dot..])
};
builder.copy(&file, &dst.join(target_filename));
builder.copy(file, &dst.join(target_filename));
}
}
@ -1519,7 +1519,7 @@ fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
/// 1-3.
fn run(self, builder: &Builder<'_>) -> Interned<PathBuf> {
let compiler = self.compiler;
let host_dir = builder.out.join(&compiler.host.triple);
let host_dir = builder.out.join(compiler.host.triple);
let sysroot_dir = |stage| {
if stage == 0 {
@ -1578,7 +1578,7 @@ fn run(self, builder: &Builder<'_>) -> Interned<PathBuf> {
let mut add_filtered_files = |suffix, contents| {
for path in contents {
let path = Path::new(&path);
if path.parent().map_or(false, |parent| parent.ends_with(&suffix)) {
if path.parent().map_or(false, |parent| parent.ends_with(suffix)) {
filtered_files.push(path.file_name().unwrap().to_owned());
}
}
@ -1802,7 +1802,7 @@ fn run(self, builder: &Builder<'_>) -> Compiler {
if let Some(lld_install) = lld_install {
let src_exe = exe("lld", target_compiler.host);
let dst_exe = exe("rust-lld", target_compiler.host);
builder.copy(&lld_install.join("bin").join(&src_exe), &libdir_bin.join(&dst_exe));
builder.copy(&lld_install.join("bin").join(src_exe), &libdir_bin.join(dst_exe));
let self_contained_lld_dir = libdir_bin.join("gcc-ld");
t!(fs::create_dir_all(&self_contained_lld_dir));
let lld_wrapper_exe = builder.ensure(crate::core::build_steps::tool::LldWrapper {
@ -1850,7 +1850,7 @@ fn run(self, builder: &Builder<'_>) -> Compiler {
let out_dir = builder.cargo_out(build_compiler, Mode::Rustc, host);
let rustc = out_dir.join(exe("rustc-main", host));
let bindir = sysroot.join("bin");
t!(fs::create_dir_all(&bindir));
t!(fs::create_dir_all(bindir));
let compiler = builder.rustc(target_compiler);
builder.copy(&rustc, &compiler);
@ -1869,9 +1869,9 @@ pub fn add_to_sysroot(
stamp: &Path,
) {
let self_contained_dst = &sysroot_dst.join("self-contained");
t!(fs::create_dir_all(&sysroot_dst));
t!(fs::create_dir_all(&sysroot_host_dst));
t!(fs::create_dir_all(&self_contained_dst));
t!(fs::create_dir_all(sysroot_dst));
t!(fs::create_dir_all(sysroot_host_dst));
t!(fs::create_dir_all(self_contained_dst));
for (path, dependency_type) in builder.read_stamp_file(stamp) {
let dst = match dependency_type {
DependencyType::Host => sysroot_host_dst,
@ -2009,14 +2009,14 @@ pub fn run_cargo(
.map(|e| (e.path(), e.file_name().into_string().unwrap(), t!(e.metadata())))
.collect::<Vec<_>>();
for (prefix, extension, expected_len) in toplevel {
let candidates = contents.iter().filter(|&&(_, ref filename, ref meta)| {
let candidates = contents.iter().filter(|&(_, filename, meta)| {
meta.len() == expected_len
&& filename
.strip_prefix(&prefix[..])
.map(|s| s.starts_with('-') && s.ends_with(&extension[..]))
.unwrap_or(false)
});
let max = candidates.max_by_key(|&&(_, _, ref metadata)| {
let max = candidates.max_by_key(|&(_, _, metadata)| {
metadata.modified().expect("mtime should be available on all relevant OSes")
});
let path_to_add = match max {
@ -2045,7 +2045,7 @@ pub fn run_cargo(
new_contents.extend(dep.to_str().unwrap().as_bytes());
new_contents.extend(b"\0");
}
t!(fs::write(&stamp, &new_contents));
t!(fs::write(stamp, &new_contents));
deps.into_iter().map(|(d, _)| d).collect()
}

View File

@ -78,7 +78,7 @@ fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> {
let mut tarball = Tarball::new(builder, "rust-docs", &host.triple);
tarball.set_product_name("Rust Documentation");
tarball.add_bulk_dir(&builder.doc_out(host), dest);
tarball.add_file(&builder.src.join("src/doc/robots.txt"), dest, 0o644);
tarball.add_file(builder.src.join("src/doc/robots.txt"), dest, 0o644);
Some(tarball.generate())
}
}
@ -342,7 +342,7 @@ fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> {
// thrown away (this contains the runtime DLLs included in the rustc package
// above) and the second argument is where to place all the MinGW components
// (which is what we want).
make_win_dist(&tmpdir(builder), tarball.image_dir(), host, &builder);
make_win_dist(&tmpdir(builder), tarball.image_dir(), host, builder);
Some(tarball.generate())
}
@ -658,7 +658,7 @@ fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> {
let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target);
let stamp = compile::libstd_stamp(builder, compiler_to_use, target);
verify_uefi_rlib_format(builder, target, &stamp);
copy_target_libs(builder, target, &tarball.image_dir(), &stamp);
copy_target_libs(builder, target, tarball.image_dir(), &stamp);
Some(tarball.generate())
}
@ -734,7 +734,7 @@ impl Step for Analysis {
const DEFAULT: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let default = should_build_extended_tool(&run.builder, "analysis");
let default = should_build_extended_tool(run.builder, "analysis");
run.alias("rust-analysis").default_condition(default)
}
@ -890,7 +890,7 @@ fn make_run(run: RunConfig<'_>) {
/// Creates the `rust-src` installer component
fn run(self, builder: &Builder<'_>) -> GeneratedTarball {
if !builder.config.dry_run() {
builder.update_submodule(&Path::new("src/llvm-project"));
builder.update_submodule(Path::new("src/llvm-project"));
}
let tarball = Tarball::new_targetless(builder, "rust-src");
@ -976,7 +976,7 @@ fn run(self, builder: &Builder<'_>) -> GeneratedTarball {
];
let src_dirs = ["src", "compiler", "library", "tests"];
copy_src_dirs(builder, &builder.src, &src_dirs, &[], &plain_dst_src);
copy_src_dirs(builder, &builder.src, &src_dirs, &[], plain_dst_src);
// Copy the files normally
for item in &src_files {
@ -986,8 +986,8 @@ fn run(self, builder: &Builder<'_>) -> GeneratedTarball {
// Create the version file
builder.create(&plain_dst_src.join("version"), &builder.rust_version());
if let Some(info) = builder.rust_info().info() {
channel::write_commit_hash_file(&plain_dst_src, &info.sha);
channel::write_commit_info_file(&plain_dst_src, info);
channel::write_commit_hash_file(plain_dst_src, &info.sha);
channel::write_commit_info_file(plain_dst_src, info);
}
// If we're building from git or tarball sources, we need to vendor
@ -1014,7 +1014,7 @@ fn run(self, builder: &Builder<'_>) -> GeneratedTarball {
// Will read the libstd Cargo.toml
// which uses the unstable `public-dependency` feature.
.env("RUSTC_BOOTSTRAP", "1")
.current_dir(&plain_dst_src);
.current_dir(plain_dst_src);
let config = if !builder.config.dry_run() {
t!(String::from_utf8(t!(cmd.output()).stdout))
@ -1043,7 +1043,7 @@ impl Step for Cargo {
const ONLY_HOSTS: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let default = should_build_extended_tool(&run.builder, "cargo");
let default = should_build_extended_tool(run.builder, "cargo");
run.alias("cargo").default_condition(default)
}
@ -1070,7 +1070,7 @@ fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> {
let mut tarball = Tarball::new(builder, "cargo", &target.triple);
tarball.set_overlay(OverlayKind::Cargo);
tarball.add_file(&cargo, "bin", 0o755);
tarball.add_file(cargo, "bin", 0o755);
tarball.add_file(etc.join("_cargo"), "share/zsh/site-functions", 0o644);
tarball.add_renamed_file(etc.join("cargo.bashcomp.sh"), "etc/bash_completion.d", "cargo");
tarball.add_dir(etc.join("man"), "share/man/man1");
@ -1092,7 +1092,7 @@ impl Step for Rls {
const DEFAULT: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let default = should_build_extended_tool(&run.builder, "rls");
let default = should_build_extended_tool(run.builder, "rls");
run.alias("rls").default_condition(default)
}
@ -1134,7 +1134,7 @@ impl Step for RustAnalyzer {
const ONLY_HOSTS: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let default = should_build_extended_tool(&run.builder, "rust-analyzer");
let default = should_build_extended_tool(run.builder, "rust-analyzer");
run.alias("rust-analyzer").default_condition(default)
}
@ -1176,7 +1176,7 @@ impl Step for Clippy {
const ONLY_HOSTS: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let default = should_build_extended_tool(&run.builder, "clippy");
let default = should_build_extended_tool(run.builder, "clippy");
run.alias("clippy").default_condition(default)
}
@ -1224,7 +1224,7 @@ impl Step for Miri {
const ONLY_HOSTS: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let default = should_build_extended_tool(&run.builder, "miri");
let default = should_build_extended_tool(run.builder, "miri");
run.alias("miri").default_condition(default)
}
@ -1337,12 +1337,12 @@ fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> {
let src = builder.sysroot(compiler);
let backends_src = builder.sysroot_codegen_backends(compiler);
let backends_rel = backends_src
.strip_prefix(&src)
.strip_prefix(src)
.unwrap()
.strip_prefix(builder.sysroot_libdir_relative(compiler))
.unwrap();
// Don't use custom libdir here because ^lib/ will be resolved again with installer
let backends_dst = PathBuf::from("lib").join(&backends_rel);
let backends_dst = PathBuf::from("lib").join(backends_rel);
let backend_name = format!("rustc_codegen_{}", backend);
let mut found_backend = false;
@ -1371,7 +1371,7 @@ impl Step for Rustfmt {
const ONLY_HOSTS: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let default = should_build_extended_tool(&run.builder, "rustfmt");
let default = should_build_extended_tool(run.builder, "rustfmt");
run.alias("rustfmt").default_condition(default)
}
@ -1454,7 +1454,7 @@ fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> {
let mut tarball = Tarball::new(builder, "rust-demangler", &target.triple);
tarball.set_overlay(OverlayKind::RustDemangler);
tarball.is_preview(true);
tarball.add_file(&rust_demangler, "bin", 0o755);
tarball.add_file(rust_demangler, "bin", 0o755);
tarball.add_legal_and_readme_to("share/doc/rust-demangler");
Some(tarball.generate())
}
@ -1609,7 +1609,7 @@ fn filter(contents: &str, marker: &str) -> String {
let prepare = |name: &str| {
builder.create_dir(&pkg.join(name));
builder.cp_r(
&work.join(&format!("{}-{}", pkgname(builder, name), target.triple)),
&work.join(format!("{}-{}", pkgname(builder, name), target.triple)),
&pkg.join(name),
);
builder.install(&etc.join("pkg/postinstall"), &pkg.join(name), 0o755);
@ -1673,7 +1673,7 @@ fn filter(contents: &str, marker: &str) -> String {
name.to_string()
};
builder.cp_r(
&work.join(&format!("{}-{}", pkgname(builder, name), target.triple)).join(dir),
&work.join(format!("{}-{}", pkgname(builder, name), target.triple)).join(dir),
&exe.join(name),
);
builder.remove(&exe.join(name).join("manifest.in"));
@ -1707,7 +1707,7 @@ fn filter(contents: &str, marker: &str) -> String {
.current_dir(&exe)
.arg("dir")
.arg("rustc")
.args(&heat_flags)
.args(heat_flags)
.arg("-cg")
.arg("RustcGroup")
.arg("-dr")
@ -1723,7 +1723,7 @@ fn filter(contents: &str, marker: &str) -> String {
.current_dir(&exe)
.arg("dir")
.arg("rust-docs")
.args(&heat_flags)
.args(heat_flags)
.arg("-cg")
.arg("DocsGroup")
.arg("-dr")
@ -1741,7 +1741,7 @@ fn filter(contents: &str, marker: &str) -> String {
.current_dir(&exe)
.arg("dir")
.arg("cargo")
.args(&heat_flags)
.args(heat_flags)
.arg("-cg")
.arg("CargoGroup")
.arg("-dr")
@ -1758,7 +1758,7 @@ fn filter(contents: &str, marker: &str) -> String {
.current_dir(&exe)
.arg("dir")
.arg("rust-std")
.args(&heat_flags)
.args(heat_flags)
.arg("-cg")
.arg("StdGroup")
.arg("-dr")
@ -1774,7 +1774,7 @@ fn filter(contents: &str, marker: &str) -> String {
.current_dir(&exe)
.arg("dir")
.arg("rust-analyzer")
.args(&heat_flags)
.args(heat_flags)
.arg("-cg")
.arg("RustAnalyzerGroup")
.arg("-dr")
@ -1793,7 +1793,7 @@ fn filter(contents: &str, marker: &str) -> String {
.current_dir(&exe)
.arg("dir")
.arg("clippy")
.args(&heat_flags)
.args(heat_flags)
.arg("-cg")
.arg("ClippyGroup")
.arg("-dr")
@ -1812,7 +1812,7 @@ fn filter(contents: &str, marker: &str) -> String {
.current_dir(&exe)
.arg("dir")
.arg("rust-demangler")
.args(&heat_flags)
.args(heat_flags)
.arg("-cg")
.arg("RustDemanglerGroup")
.arg("-dr")
@ -1831,7 +1831,7 @@ fn filter(contents: &str, marker: &str) -> String {
.current_dir(&exe)
.arg("dir")
.arg("miri")
.args(&heat_flags)
.args(heat_flags)
.arg("-cg")
.arg("MiriGroup")
.arg("-dr")
@ -1849,7 +1849,7 @@ fn filter(contents: &str, marker: &str) -> String {
.current_dir(&exe)
.arg("dir")
.arg("rust-analysis")
.args(&heat_flags)
.args(heat_flags)
.arg("-cg")
.arg("AnalysisGroup")
.arg("-dr")
@ -1867,7 +1867,7 @@ fn filter(contents: &str, marker: &str) -> String {
.current_dir(&exe)
.arg("dir")
.arg("rust-mingw")
.args(&heat_flags)
.args(heat_flags)
.arg("-cg")
.arg("GccGroup")
.arg("-dr")
@ -1890,10 +1890,10 @@ fn filter(contents: &str, marker: &str) -> String {
.arg("-dStdDir=rust-std")
.arg("-dAnalysisDir=rust-analysis")
.arg("-arch")
.arg(&arch)
.arg(arch)
.arg("-out")
.arg(&output)
.arg(&input);
.arg(input);
add_env(builder, &mut cmd, target);
if built_tools.contains("clippy") {
@ -2026,7 +2026,7 @@ fn install_llvm_file(builder: &Builder<'_>, source: &Path, destination: &Path) {
return;
}
builder.install(&source, destination, 0o644);
builder.install(source, destination, 0o644);
}
/// Maybe add LLVM object files to the given destination lib-dir. Allows either static or dynamic linking.
@ -2123,7 +2123,7 @@ impl Step for LlvmTools {
const DEFAULT: bool = true;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
let default = should_build_extended_tool(&run.builder, "llvm-tools");
let default = should_build_extended_tool(run.builder, "llvm-tools");
// FIXME: allow using the names of the tools themselves?
run.alias("llvm-tools").default_condition(default)
}
@ -2231,12 +2231,12 @@ fn run(self, builder: &Builder<'_>) -> Option<GeneratedTarball> {
tarball.add_file(lld_path, "bin", 0o755);
}
tarball.add_file(&builder.llvm_filecheck(target), "bin", 0o755);
tarball.add_file(builder.llvm_filecheck(target), "bin", 0o755);
// Copy the include directory as well; needed mostly to build
// librustc_llvm properly (e.g., llvm-config.h is in here). But also
// just broadly useful to be able to link against the bundled LLVM.
tarball.add_dir(&builder.llvm_out(target).join("include"), "include");
tarball.add_dir(builder.llvm_out(target).join("include"), "include");
// Copy libLLVM.so to the target lib dir as well, so the RPATH like
// `$ORIGIN/../lib` can find it. It may also be used as a dependency
@ -2312,7 +2312,7 @@ fn run(self, builder: &Builder<'_>) -> GeneratedTarball {
let build_manifest = builder.tool_exe(Tool::BuildManifest);
let tarball = Tarball::new(builder, "build-manifest", &self.target.triple);
tarball.add_file(&build_manifest, "bin", 0o755);
tarball.add_file(build_manifest, "bin", 0o755);
tarball.generate()
}
}

View File

@ -151,7 +151,7 @@ fn run(self, builder: &Builder<'_>) {
builder.info(&format!("Rustbook ({target}) - {name}"));
let _ = fs::remove_dir_all(&out);
builder.run(rustbook_cmd.arg("build").arg(&src).arg("-d").arg(out));
builder.run(rustbook_cmd.arg("build").arg(src).arg("-d").arg(out));
}
if self.parent.is_some() {
@ -384,7 +384,7 @@ fn run(self, builder: &Builder<'_>) {
// with no particular explicit doc requested (e.g. library/core).
if builder.paths.is_empty() || builder.was_invoked_explicitly::<Self>(Kind::Doc) {
let index = out.join("index.html");
builder.open_in_browser(&index);
builder.open_in_browser(index);
}
}
}
@ -517,7 +517,7 @@ fn run(self, builder: &Builder<'_>) -> Self::Output {
.replace("VERSION", &builder.rust_release())
.replace("SHORT_HASH", builder.rust_info().sha_short().unwrap_or(""))
.replace("STAMP", builder.rust_info().sha().unwrap_or(""));
t!(fs::write(&version_info, &info));
t!(fs::write(&version_info, info));
}
builder.copy(&builder.src.join("src").join("doc").join("rust.css"), &out.join("rust.css"));
@ -714,11 +714,11 @@ fn doc_std(
}
let description =
format!("library{} in {} format", crate_description(&requested_crates), format.as_str());
let _guard = builder.msg_doc(compiler, &description, target);
format!("library{} in {} format", crate_description(requested_crates), format.as_str());
let _guard = builder.msg_doc(compiler, description, target);
builder.run(&mut cargo.into());
builder.cp_r(&out_dir, &out);
builder.cp_r(&out_dir, out);
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
@ -781,7 +781,7 @@ fn run(self, builder: &Builder<'_>) {
let _guard = builder.msg_sysroot_tool(
Kind::Doc,
stage,
&format!("compiler{}", crate_description(&self.crates)),
format!("compiler{}", crate_description(&self.crates)),
compiler.host,
target,
);
@ -819,7 +819,7 @@ fn run(self, builder: &Builder<'_>) {
// Create all crate output directories first to make sure rustdoc uses
// relative links.
// FIXME: Cargo should probably do this itself.
let dir_name = krate.replace("-", "_");
let dir_name = krate.replace('-', "_");
t!(fs::create_dir_all(out_dir.join(&*dir_name)));
cargo.arg("-p").arg(krate);
if to_open.is_none() {
@ -844,7 +844,7 @@ fn run(self, builder: &Builder<'_>) {
if !builder.config.dry_run() {
// Sanity check on linked compiler crates
for krate in &*self.crates {
let dir_name = krate.replace("-", "_");
let dir_name = krate.replace('-', "_");
// Making sure the directory exists and is not empty.
assert!(out.join(&*dir_name).read_dir().unwrap().next().is_some());
}
@ -1160,7 +1160,7 @@ fn run(self, builder: &Builder<'_>) {
cmd.arg(&out_listing);
cmd.arg("--rustc");
cmd.arg(&rustc);
cmd.arg("--rustc-target").arg(&self.target.rustc_target_arg());
cmd.arg("--rustc-target").arg(self.target.rustc_target_arg());
if builder.is_verbose() {
cmd.arg("--verbose");
}

View File

@ -11,7 +11,7 @@
use std::sync::mpsc::SyncSender;
fn rustfmt(src: &Path, rustfmt: &Path, paths: &[PathBuf], check: bool) -> impl FnMut(bool) -> bool {
let mut cmd = Command::new(&rustfmt);
let mut cmd = Command::new(rustfmt);
// avoid the submodule config paths from coming into play,
// we only allow a single global config for the workspace for now
cmd.arg("--config-path").arg(&src.canonicalize().unwrap());
@ -162,7 +162,7 @@ pub fn format(build: &Builder<'_>, check: bool, paths: &[PathBuf]) {
// against anything like `compiler/rustc_foo/src/foo.rs`,
// preventing the latter from being formatted.
untracked_count += 1;
fmt_override.add(&format!("!/{untracked_path}")).expect(&untracked_path);
fmt_override.add(&format!("!/{untracked_path}")).expect(untracked_path);
}
// Only check modified files locally to speed up runtime.
// We still check all files in CI to avoid bugs in `get_modified_rs_files` letting regressions slip through;
@ -221,7 +221,7 @@ pub fn format(build: &Builder<'_>, check: bool, paths: &[PathBuf]) {
assert!(rustfmt_path.exists(), "{}", rustfmt_path.display());
let src = build.src.clone();
let (tx, rx): (SyncSender<PathBuf>, _) = std::sync::mpsc::sync_channel(128);
let walker = match paths.get(0) {
let walker = match paths.first() {
Some(first) => {
let find_shortcut_candidates = |p: &PathBuf| {
let mut candidates = Vec::new();

View File

@ -24,7 +24,7 @@
// We have to run a few shell scripts, which choke quite a bit on both `\`
// characters and on `C:\` paths, so normalize both of them away.
fn sanitize_sh(path: &Path) -> String {
let path = path.to_str().unwrap().replace("\\", "/");
let path = path.to_str().unwrap().replace('\\', "/");
return change_drive(unc_to_lfs(&path)).unwrap_or(path);
fn unc_to_lfs(s: &str) -> &str {
@ -44,7 +44,7 @@ fn change_drive(s: &str) -> Option<String> {
}
}
fn is_dir_writable_for_user(dir: &PathBuf) -> bool {
fn is_dir_writable_for_user(dir: &Path) -> bool {
let tmp = dir.join(".tmp");
match fs::create_dir_all(&tmp) {
Ok(_) => {

View File

@ -110,7 +110,7 @@ pub fn prebuilt_llvm_config(
let smart_stamp_hash = STAMP_HASH_MEMO.get_or_init(|| {
generate_smart_stamp_hash(
&builder.config.src.join("src/llvm-project"),
&builder.in_tree_llvm_info.sha().unwrap_or_default(),
builder.in_tree_llvm_info.sha().unwrap_or_default(),
)
});
@ -289,7 +289,7 @@ fn run(self, builder: &Builder<'_>) -> LlvmResult {
let _guard = builder.msg_unstaged(Kind::Build, "LLVM", target);
t!(stamp.remove());
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
t!(fs::create_dir_all(&out_dir));
// https://llvm.org/docs/CMake.html
@ -355,7 +355,7 @@ fn run(self, builder: &Builder<'_>) -> LlvmResult {
cfg.define("LLVM_BUILD_RUNTIME", "No");
}
if let Some(path) = builder.config.llvm_profile_use.as_ref() {
cfg.define("LLVM_PROFDATA_FILE", &path);
cfg.define("LLVM_PROFDATA_FILE", path);
}
// Disable zstd to avoid a dependency on libzstd.so.
@ -643,7 +643,7 @@ fn configure_cmake(
let sanitize_cc = |cc: &Path| {
if target.is_msvc() {
OsString::from(cc.to_str().unwrap().replace("\\", "/"))
OsString::from(cc.to_str().unwrap().replace('\\', "/"))
} else {
cc.as_os_str().to_owned()
}
@ -808,10 +808,10 @@ fn configure_llvm(builder: &Builder<'_>, target: TargetSelection, cfg: &mut cmak
// Adapted from https://github.com/alexcrichton/cc-rs/blob/fba7feded71ee4f63cfe885673ead6d7b4f2f454/src/lib.rs#L2347-L2365
fn get_var(var_base: &str, host: &str, target: &str) -> Option<OsString> {
let kind = if host == target { "HOST" } else { "TARGET" };
let target_u = target.replace("-", "_");
env::var_os(&format!("{var_base}_{target}"))
.or_else(|| env::var_os(&format!("{}_{}", var_base, target_u)))
.or_else(|| env::var_os(&format!("{}_{}", kind, var_base)))
let target_u = target.replace('-', "_");
env::var_os(format!("{var_base}_{target}"))
.or_else(|| env::var_os(format!("{}_{}", var_base, target_u)))
.or_else(|| env::var_os(format!("{}_{}", kind, var_base)))
.or_else(|| env::var_os(var_base))
}
@ -862,7 +862,7 @@ fn run(self, builder: &Builder<'_>) -> PathBuf {
}
let _guard = builder.msg_unstaged(Kind::Build, "LLD", target);
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
t!(fs::create_dir_all(&out_dir));
let mut cfg = cmake::Config::new(builder.src.join("src/llvm-project/lld"));
@ -986,7 +986,7 @@ fn run(self, builder: &Builder<'_>) -> Self::Output {
let _guard = builder.msg_unstaged(Kind::Build, "sanitizers", self.target);
t!(stamp.remove());
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
let mut cfg = cmake::Config::new(&compiler_rt_dir);
cfg.profile("Release");
@ -1051,7 +1051,7 @@ fn supported_sanitizers(
.map(move |c| SanitizerRuntime {
cmake_target: format!("clang_rt.{}_{}_dynamic", c, os),
path: out_dir
.join(&format!("build/lib/darwin/libclang_rt.{}_{}_dynamic.dylib", c, os)),
.join(format!("build/lib/darwin/libclang_rt.{}_{}_dynamic.dylib", c, os)),
name: format!("librustc-{}_rt.{}.dylib", channel, c),
})
.collect()
@ -1062,7 +1062,7 @@ fn supported_sanitizers(
.iter()
.map(move |c| SanitizerRuntime {
cmake_target: format!("clang_rt.{}-{}", c, arch),
path: out_dir.join(&format!("build/lib/{}/libclang_rt.{}-{}.a", os, c, arch)),
path: out_dir.join(format!("build/lib/{}/libclang_rt.{}-{}.a", os, c, arch)),
name: format!("librustc-{}_rt.{}.a", channel, c),
})
.collect()
@ -1165,7 +1165,7 @@ fn make_run(run: RunConfig<'_>) {
/// Build crtbegin.o/crtend.o for musl target.
fn run(self, builder: &Builder<'_>) -> Self::Output {
builder.update_submodule(&Path::new("src/llvm-project"));
builder.update_submodule(Path::new("src/llvm-project"));
let out_dir = builder.native_dir(self.target).join("crt");
@ -1233,7 +1233,7 @@ fn make_run(run: RunConfig<'_>) {
/// Build libunwind.a
fn run(self, builder: &Builder<'_>) -> Self::Output {
builder.update_submodule(&Path::new("src/llvm-project"));
builder.update_submodule(Path::new("src/llvm-project"));
if builder.config.dry_run() {
return PathBuf::new();

View File

@ -23,7 +23,7 @@ impl Step for ExpandYamlAnchors {
fn run(self, builder: &Builder<'_>) {
builder.info("Expanding YAML anchors in the GitHub Actions configuration");
builder.run_delaying_failure(
&mut builder.tool_cmd(Tool::ExpandYamlAnchors).arg("generate").arg(&builder.src),
builder.tool_cmd(Tool::ExpandYamlAnchors).arg("generate").arg(&builder.src),
);
}

View File

@ -8,7 +8,7 @@
use std::fmt::Write as _;
use std::fs::File;
use std::io::Write;
use std::path::{Path, PathBuf, MAIN_SEPARATOR};
use std::path::{Path, PathBuf, MAIN_SEPARATOR_STR};
use std::process::Command;
use std::str::FromStr;
use std::{fmt, fs, io};
@ -257,8 +257,7 @@ fn run(self, builder: &Builder<'_>) -> Self::Output {
return;
}
let stage_path =
["build", config.build.rustc_target_arg(), "stage1"].join(&MAIN_SEPARATOR.to_string());
["build", config.build.rustc_target_arg(), "stage1"].join(MAIN_SEPARATOR_STR);
if !rustup_installed() {
eprintln!("`rustup` is not installed; cannot link `stage1` toolchain");
} else if stage_dir_exists(&stage_path[..]) && !config.dry_run() {
@ -276,7 +275,7 @@ fn rustup_installed() -> bool {
}
fn stage_dir_exists(stage_path: &str) -> bool {
match fs::create_dir(&stage_path) {
match fs::create_dir(stage_path) {
Ok(_) => true,
Err(_) => Path::new(&stage_path).exists(),
}
@ -294,7 +293,7 @@ fn attempt_toolchain_link(stage_path: &str) {
return;
}
if try_link_toolchain(&stage_path) {
if try_link_toolchain(stage_path) {
println!(
"Added `stage1` rustup toolchain; try `cargo +stage1 build` on a separate rust project to run a newly-built toolchain"
);
@ -310,7 +309,7 @@ fn attempt_toolchain_link(stage_path: &str) {
fn toolchain_is_linked() -> bool {
match Command::new("rustup")
.args(&["toolchain", "list"])
.args(["toolchain", "list"])
.stdout(std::process::Stdio::piped())
.output()
{
@ -337,7 +336,7 @@ fn toolchain_is_linked() -> bool {
fn try_link_toolchain(stage_path: &str) -> bool {
Command::new("rustup")
.stdout(std::process::Stdio::null())
.args(&["toolchain", "link", "stage1", &stage_path])
.args(["toolchain", "link", "stage1", stage_path])
.output()
.map_or(false, |output| output.status.success())
}
@ -366,7 +365,7 @@ fn ensure_stage1_toolchain_placeholder_exists(stage_path: &str) -> bool {
return false;
}
return true;
true
}
// Used to get the path for `Subcommand::Setup`
@ -469,13 +468,13 @@ fn run(self, builder: &Builder<'_>) -> Self::Output {
if config.dry_run() {
return;
}
t!(install_git_hook_maybe(&config));
t!(install_git_hook_maybe(config));
}
}
// install a git hook to automatically run tidy, if they want
fn install_git_hook_maybe(config: &Config) -> io::Result<()> {
let git = t!(config.git().args(&["rev-parse", "--git-common-dir"]).output().map(|output| {
let git = t!(config.git().args(["rev-parse", "--git-common-dir"]).output().map(|output| {
assert!(output.status.success(), "failed to run `git`");
PathBuf::from(t!(String::from_utf8(output.stdout)).trim())
}));
@ -541,7 +540,7 @@ fn run(self, builder: &Builder<'_>) -> Self::Output {
if config.dry_run() {
return;
}
while !t!(create_vscode_settings_maybe(&config)) {}
while !t!(create_vscode_settings_maybe(config)) {}
}
}
@ -608,7 +607,7 @@ fn create_vscode_settings_maybe(config: &Config) -> io::Result<bool> {
}
_ => "Created",
};
fs::write(&vscode_settings, &RUST_ANALYZER_SETTINGS)?;
fs::write(&vscode_settings, RUST_ANALYZER_SETTINGS)?;
println!("{verb} `.vscode/settings.json`");
} else {
println!("\n{RUST_ANALYZER_SETTINGS}");

View File

@ -36,7 +36,7 @@ pub fn suggest(builder: &Builder<'_>, run: bool) {
// this code expects one suggestion per line in the following format:
// <x_subcommand> {some number of flags} [optional stage number]
let cmd = sections.next().unwrap();
let stage = sections.next_back().map(|s| str::parse(s).ok()).flatten();
let stage = sections.next_back().and_then(|s| str::parse(s).ok());
let paths: Vec<PathBuf> = sections.map(|p| PathBuf::from_str(p).unwrap()).collect();
(cmd, stage, paths)

View File

@ -79,7 +79,7 @@ fn create_synthetic_target(
customize(spec_map);
std::fs::write(&path, &serde_json::to_vec_pretty(&spec).unwrap()).unwrap();
std::fs::write(&path, serde_json::to_vec_pretty(&spec).unwrap()).unwrap();
let target = TargetSelection::create_synthetic(&name, path.to_str().unwrap());
crate::utils::cc_detect::find_target(builder, target);

View File

@ -156,7 +156,7 @@ fn run(self, builder: &Builder<'_>) {
// Run the linkchecker.
let _guard =
builder.msg(Kind::Test, compiler.stage, "Linkcheck", bootstrap_host, bootstrap_host);
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
builder.run_delaying_failure(linkchecker.arg(builder.out.join(host.triple).join("doc")));
}
@ -253,15 +253,15 @@ fn run(self, builder: &Builder<'_>) {
let out_dir = builder.out.join("ct");
t!(fs::create_dir_all(&out_dir));
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
let mut cmd = builder.tool_cmd(Tool::CargoTest);
let mut cmd = cmd
let cmd = cmd
.arg(&cargo)
.arg(&out_dir)
.args(builder.config.test_args())
.env("RUSTC", builder.rustc(compiler))
.env("RUSTDOC", builder.rustdoc(compiler));
add_rustdoc_cargo_linker_args(&mut cmd, builder, compiler.host, LldThreads::No);
add_rustdoc_cargo_linker_args(cmd, builder, compiler.host, LldThreads::No);
builder.run_delaying_failure(cmd);
}
}
@ -322,7 +322,7 @@ fn run(self, builder: &Builder<'_>) {
builder,
);
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
add_flags_and_try_run_tests(builder, &mut cargo);
}
}
@ -474,7 +474,7 @@ fn run(self, builder: &Builder<'_>) {
);
let dir = testdir(builder, compiler.host);
t!(fs::create_dir_all(&dir));
t!(fs::create_dir_all(dir));
cargo.env("RUST_DEMANGLER_DRIVER_PATH", rust_demangler);
cargo.add_rustc_lib_path(builder);
@ -525,7 +525,7 @@ pub fn build_miri_sysroot(
// Tell `cargo miri setup` where to find the sources.
cargo.env("MIRI_LIB_SRC", builder.src.join("library"));
// Tell it where to find Miri.
cargo.env("MIRI", &miri);
cargo.env("MIRI", miri);
// Tell it where to put the sysroot.
cargo.env("MIRI_SYSROOT", &miri_sysroot);
// Debug things.
@ -637,7 +637,7 @@ fn run(self, builder: &Builder<'_>) {
// does not understand the flags added by `add_flags_and_try_run_test`.
let mut cargo = prepare_cargo_test(cargo, &[], &[], "miri", compiler, target, builder);
{
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
builder.run(&mut cargo);
}
@ -649,11 +649,11 @@ fn run(self, builder: &Builder<'_>) {
// `MIRI_SKIP_UI_CHECKS` and `RUSTC_BLESS` are incompatible
cargo.env_remove("RUSTC_BLESS");
// Optimizations can change error locations and remove UB so don't run `fail` tests.
cargo.args(&["tests/pass", "tests/panic"]);
cargo.args(["tests/pass", "tests/panic"]);
let mut cargo = prepare_cargo_test(cargo, &[], &[], "miri", compiler, target, builder);
{
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
builder.run(&mut cargo);
}
}
@ -693,7 +693,7 @@ fn run(self, builder: &Builder<'_>) {
let mut cargo = Command::from(cargo);
{
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
builder.run(&mut cargo);
}
}
@ -946,7 +946,7 @@ fn run(self, builder: &Builder<'_>) {
}
fn get_browser_ui_test_version_inner(npm: &Path, global: bool) -> Option<String> {
let mut command = Command::new(&npm);
let mut command = Command::new(npm);
command.arg("list").arg("--parseable").arg("--long").arg("--depth=0");
if global {
command.arg("--global");
@ -954,7 +954,7 @@ fn get_browser_ui_test_version_inner(npm: &Path, global: bool) -> Option<String>
let lines = command
.output()
.map(|output| String::from_utf8_lossy(&output.stdout).into_owned())
.unwrap_or(String::new());
.unwrap_or_default();
lines
.lines()
.find_map(|l| l.split(':').nth(1)?.strip_prefix("browser-ui-test@"))
@ -1048,7 +1048,7 @@ fn run(self, builder: &Builder<'_>) {
cmd.arg("--npm").arg(npm);
}
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
let _guard = builder.msg_sysroot_tool(
Kind::Test,
self.compiler.stage,
@ -1096,7 +1096,7 @@ fn run(self, builder: &Builder<'_>) {
cmd.arg(format!("--extra-checks={s}"));
}
let mut args = std::env::args_os();
if let Some(_) = args.find(|arg| arg == OsStr::new("--")) {
if args.any(|arg| arg == OsStr::new("--")) {
cmd.arg("--");
cmd.args(args);
}
@ -1116,7 +1116,7 @@ fn run(self, builder: &Builder<'_>) {
);
crate::exit!(1);
}
crate::core::build_steps::format::format(&builder, !builder.config.cmd.bless(), &[]);
crate::core::build_steps::format::format(builder, !builder.config.cmd.bless(), &[]);
}
builder.info("tidy check");
@ -1171,7 +1171,7 @@ fn run(self, builder: &Builder<'_>) {
}
builder.info("Ensuring the YAML anchors in the GitHub Actions config were expanded");
builder.run_delaying_failure(
&mut builder.tool_cmd(Tool::ExpandYamlAnchors).arg("check").arg(&builder.src),
builder.tool_cmd(Tool::ExpandYamlAnchors).arg("check").arg(&builder.src),
);
}
@ -1759,7 +1759,7 @@ fn run(self, builder: &Builder<'_>) {
for exclude in &builder.config.skip {
cmd.arg("--skip");
cmd.arg(&exclude);
cmd.arg(exclude);
}
// Get paths from cmd args
@ -1780,7 +1780,7 @@ fn run(self, builder: &Builder<'_>) {
// so the correct filters are passed to libtest
if cfg!(windows) {
let test_args_win: Vec<String> =
test_args.iter().map(|s| s.replace("/", "\\")).collect();
test_args.iter().map(|s| s.replace('/', "\\")).collect();
cmd.args(&test_args_win);
} else {
cmd.args(&test_args);
@ -1900,7 +1900,7 @@ fn run(self, builder: &Builder<'_>) {
// Note that if we encounter `PATH` we make sure to append to our own `PATH`
// rather than stomp over it.
if !builder.config.dry_run() && target.is_msvc() {
for &(ref k, ref v) in builder.cc.borrow()[&target].env() {
for (k, v) in builder.cc.borrow()[&target].env() {
if k != "PATH" {
cmd.env(k, v);
}
@ -1996,7 +1996,7 @@ fn run(self, builder: &Builder<'_>) {
let _group = builder.msg(
Kind::Test,
compiler.stage,
&format!("compiletest suite={suite} mode={mode}"),
format!("compiletest suite={suite} mode={mode}"),
compiler.host,
target,
);
@ -2022,7 +2022,7 @@ fn run(self, builder: &Builder<'_>) {
"Check compiletest suite={} mode={} compare_mode={} ({} -> {})",
suite, mode, compare_mode, &compiler.host, target
));
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
try_run_tests(builder, &mut cmd, false);
}
}
@ -2094,7 +2094,7 @@ fn run_ext_doc(self, builder: &Builder<'_>) {
compiler.host,
compiler.host,
);
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
let toolstate = if builder.run_delaying_failure(&mut rustbook_cmd) {
ToolState::TestPass
} else {
@ -2111,12 +2111,12 @@ fn run_local_doc(self, builder: &Builder<'_>) {
builder.ensure(compile::Std::new(compiler, host));
let _guard =
builder.msg(Kind::Test, compiler.stage, &format!("book {}", self.name), host, host);
builder.msg(Kind::Test, compiler.stage, format!("book {}", self.name), host, host);
// Do a breadth-first traversal of the `src/doc` directory and just run
// tests for all files that end in `*.md`
let mut stack = vec![builder.src.join(self.path)];
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
let mut files = Vec::new();
while let Some(p) = stack.pop() {
if p.is_dir() {
@ -2227,7 +2227,7 @@ fn run(self, builder: &Builder<'_>) {
let guard =
builder.msg(Kind::Test, compiler.stage, "error-index", compiler.host, compiler.host);
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
builder.run_quiet(&mut tool);
drop(guard);
// The tests themselves need to link to std, so make sure it is
@ -2315,11 +2315,8 @@ fn make_run(run: RunConfig<'_>) {
let builder = run.builder;
let host = run.build_triple();
let compiler = builder.compiler_for(builder.top_stage, host, host);
let crates = run
.paths
.iter()
.map(|p| builder.crate_paths[&p.assert_single_path().path].clone())
.collect();
let crates =
run.paths.iter().map(|p| builder.crate_paths[&p.assert_single_path().path]).collect();
builder.ensure(CrateLibrustc { compiler, target: run.target, crates });
}
@ -2351,7 +2348,7 @@ fn run_cargo_test<'a>(
) -> bool {
let mut cargo =
prepare_cargo_test(cargo, libtest_args, crates, primary_crate, compiler, target, builder);
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
let _group = description.into().and_then(|what| {
builder.msg_sysroot_tool(Kind::Test, compiler.stage, what, compiler.host, target)
});
@ -2406,7 +2403,7 @@ fn prepare_cargo_test(
if krate.has_lib {
cargo.arg("--lib");
}
cargo.args(&["--bins", "--examples", "--tests", "--benches"]);
cargo.args(["--bins", "--examples", "--tests", "--benches"]);
}
DocTests::Yes => {}
}
@ -2468,11 +2465,8 @@ fn make_run(run: RunConfig<'_>) {
let builder = run.builder;
let host = run.build_triple();
let compiler = builder.compiler_for(builder.top_stage, host, host);
let crates = run
.paths
.iter()
.map(|p| builder.crate_paths[&p.assert_single_path().path].clone())
.collect();
let crates =
run.paths.iter().map(|p| builder.crate_paths[&p.assert_single_path().path]).collect();
builder.ensure(Crate { compiler, target: run.target, mode: Mode::Std, crates });
}
@ -2844,11 +2838,11 @@ fn run(self, builder: &Builder<'_>) {
let compiler = builder.compiler(0, host);
let _guard = builder.msg(Kind::Test, 0, "bootstrap", host, host);
let mut check_bootstrap = Command::new(&builder.python());
let mut check_bootstrap = Command::new(builder.python());
check_bootstrap
.args(["-m", "unittest", "bootstrap_test.py"])
.env("BUILD_DIR", &builder.out)
.env("BUILD_PLATFORM", &builder.build.build.triple)
.env("BUILD_PLATFORM", builder.build.build.triple)
.current_dir(builder.src.join("src/bootstrap/"));
// NOTE: we intentionally don't pass test_args here because the args for unittest and cargo test are mutually incompatible.
// Use `python -m unittest` manually if you want to pass arguments.
@ -3171,7 +3165,7 @@ fn run(self, builder: &Builder<'_>) {
&compiler.host,
target
));
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
// FIXME handle vendoring for source tarballs before removing the --skip-test below
let download_dir = builder.out.join("cg_clif_download");
@ -3300,7 +3294,7 @@ fn run(self, builder: &Builder<'_>) {
&compiler.host,
target
));
let _time = helpers::timeit(&builder);
let _time = helpers::timeit(builder);
// FIXME: Uncomment the `prepare` command below once vendoring is implemented.
/*

View File

@ -819,7 +819,7 @@ pub fn tool_cmd(&self, tool: Tool) -> Command {
if compiler.host.is_msvc() {
let curpaths = env::var_os("PATH").unwrap_or_default();
let curpaths = env::split_paths(&curpaths).collect::<Vec<_>>();
for &(ref k, ref v) in self.cc.borrow()[&compiler.host].env() {
for (k, v) in self.cc.borrow()[&compiler.host].env() {
if k != "PATH" {
continue;
}

View File

@ -346,7 +346,7 @@ fn git_config(key: &str, value: &str) {
let credential = format!("https://{token}:x-oauth-basic@github.com\n",);
let git_credential_path = PathBuf::from(t!(env::var("HOME"))).join(".git-credentials");
t!(fs::write(&git_credential_path, credential));
t!(fs::write(git_credential_path, credential));
}
/// Reads the latest toolstate from the toolstate repo.
@ -389,7 +389,7 @@ fn commit_toolstate_change(current_toolstate: &ToolstateData) {
// Upload the test results (the new commit-to-toolstate mapping) to the toolstate repo.
// This does *not* change the "current toolstate"; that only happens post-landing
// via `src/ci/docker/publish_toolstate.sh`.
publish_test_results(&current_toolstate);
publish_test_results(current_toolstate);
// `git commit` failing means nothing to commit.
let status = t!(Command::new("git")

View File

@ -290,7 +290,7 @@ pub fn assert_single_path(&self) -> &TaskPath {
const PATH_REMAP: &[(&str, &str)] = &[("rust-analyzer-proc-macro-srv", "proc-macro-srv-cli")];
fn remap_paths(paths: &mut Vec<&Path>) {
fn remap_paths(paths: &mut [&Path]) {
for path in paths.iter_mut() {
for &(search, replace) in PATH_REMAP {
if path.to_str() == Some(search) {
@ -329,7 +329,7 @@ fn maybe_run(&self, builder: &Builder<'_>, mut pathsets: Vec<PathSet>) {
}
fn is_excluded(&self, builder: &Builder<'_>, pathset: &PathSet) -> bool {
if builder.config.skip.iter().any(|e| pathset.has(&e, builder.kind)) {
if builder.config.skip.iter().any(|e| pathset.has(e, builder.kind)) {
if !matches!(builder.config.dry_run, DryRun::SelfCheck) {
println!("Skipping {pathset:?} because it is excluded");
}
@ -369,8 +369,7 @@ fn run(v: &[StepDescription], builder: &Builder<'_>, paths: &[PathBuf]) {
}
// strip CurDir prefix if present
let mut paths: Vec<_> =
paths.into_iter().map(|p| p.strip_prefix(".").unwrap_or(p)).collect();
let mut paths: Vec<_> = paths.iter().map(|p| p.strip_prefix(".").unwrap_or(p)).collect();
remap_paths(&mut paths);
@ -378,7 +377,7 @@ fn run(v: &[StepDescription], builder: &Builder<'_>, paths: &[PathBuf]) {
// (This is separate from the loop below to avoid having to handle multiple paths in `is_suite_path` somehow.)
paths.retain(|path| {
for (desc, should_run) in v.iter().zip(&should_runs) {
if let Some(suite) = should_run.is_suite_path(&path) {
if let Some(suite) = should_run.is_suite_path(path) {
desc.maybe_run(builder, vec![suite.clone()]);
return false;
}
@ -537,7 +536,7 @@ pub fn paths(mut self, paths: &[&str]) -> Self {
.iter()
.map(|p| {
// assert only if `p` isn't submodule
if submodules_paths.iter().find(|sm_p| p.contains(*sm_p)).is_none() {
if !submodules_paths.iter().any(|sm_p| p.contains(sm_p)) {
assert!(
self.builder.src.join(p).exists(),
"`should_run.paths` should correspond to real on-disk paths - use `alias` if there is no relevant path: {}",
@ -1208,7 +1207,7 @@ pub fn cargo_clippy_cmd(&self, run_compiler: Compiler) -> Command {
}
pub fn rustdoc_cmd(&self, compiler: Compiler) -> Command {
let mut cmd = Command::new(&self.bootstrap_out.join("rustdoc"));
let mut cmd = Command::new(self.bootstrap_out.join("rustdoc"));
cmd.env("RUSTC_STAGE", compiler.stage.to_string())
.env("RUSTC_SYSROOT", self.sysroot(compiler))
// Note that this is *not* the sysroot_libdir because rustdoc must be linked
@ -1351,7 +1350,7 @@ fn cargo(
// See comment in rustc_llvm/build.rs for why this is necessary, largely llvm-config
// needs to not accidentally link to libLLVM in stage0/lib.
cargo.env("REAL_LIBRARY_PATH_VAR", &helpers::dylib_path_var());
cargo.env("REAL_LIBRARY_PATH_VAR", helpers::dylib_path_var());
if let Some(e) = env::var_os(helpers::dylib_path_var()) {
cargo.env("REAL_LIBRARY_PATH", e);
}
@ -1620,8 +1619,8 @@ fn cargo(
.env("RUSTBUILD_NATIVE_DIR", self.native_dir(target))
.env("RUSTC_REAL", self.rustc(compiler))
.env("RUSTC_STAGE", stage.to_string())
.env("RUSTC_SYSROOT", &sysroot)
.env("RUSTC_LIBDIR", &libdir)
.env("RUSTC_SYSROOT", sysroot)
.env("RUSTC_LIBDIR", libdir)
.env("RUSTDOC", self.bootstrap_out.join("rustdoc"))
.env(
"RUSTDOC_REAL",
@ -1754,7 +1753,7 @@ fn cargo(
cargo.env("RUSTC_BOOTSTRAP", "1");
if self.config.dump_bootstrap_shims {
prepare_behaviour_dump_dir(&self.build);
prepare_behaviour_dump_dir(self.build);
cargo
.env("DUMP_BOOTSTRAP_SHIMS", self.build.out.join("bootstrap-shims-dump"))
@ -1793,7 +1792,7 @@ fn cargo(
// platform-specific environment variable as a workaround.
if mode == Mode::ToolRustc || mode == Mode::Codegen {
if let Some(llvm_config) = self.llvm_config(target) {
let llvm_libdir = output(Command::new(&llvm_config).arg("--libdir"));
let llvm_libdir = output(Command::new(llvm_config).arg("--libdir"));
add_link_lib_path(vec![llvm_libdir.trim().into()], &mut cargo);
}
}
@ -2080,7 +2079,7 @@ pub fn ensure<S: Step>(&'a self, step: S) -> S::Output {
if self.config.print_step_timings && !self.config.dry_run() {
let step_string = format!("{step:?}");
let brace_index = step_string.find("{").unwrap_or(0);
let brace_index = step_string.find('{').unwrap_or(0);
let type_string = type_name::<S>();
println!(
"[TIMING] {} {} -- {}.{:03}",
@ -2429,7 +2428,7 @@ fn configure_linker(&mut self, builder: &Builder<'_>) -> &mut Cargo {
_ => s.display().to_string(),
}
};
let triple_underscored = target.triple.replace("-", "_");
let triple_underscored = target.triple.replace('-', "_");
let cc = ccacheify(&builder.cc(target));
self.command.env(format!("CC_{triple_underscored}"), &cc);

View File

@ -468,7 +468,7 @@ pub struct TargetSelection {
pub fn target_selection_list(s: &str) -> Result<TargetSelectionList, String> {
Ok(TargetSelectionList(
s.split(",").filter(|s| !s.is_empty()).map(TargetSelection::from_user).collect(),
s.split(',').filter(|s| !s.is_empty()).map(TargetSelection::from_user).collect(),
))
}
@ -963,10 +963,10 @@ fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
if ["s", "z"].iter().find(|x| **x == value).is_some() {
if matches!(value, "s" | "z") {
Ok(RustOptimize::String(value.to_string()))
} else {
Err(format_optimize_error_msg(value)).map_err(serde::de::Error::custom)
Err(serde::de::Error::custom(format_optimize_error_msg(value)))
}
}
@ -977,7 +977,7 @@ fn visit_i64<E>(self, value: i64) -> Result<Self::Value, E>
if matches!(value, 0..=3) {
Ok(RustOptimize::Int(value as u8))
} else {
Err(format_optimize_error_msg(value)).map_err(serde::de::Error::custom)
Err(serde::de::Error::custom(format_optimize_error_msg(value)))
}
}
@ -1144,41 +1144,44 @@ struct TomlTarget {
impl Config {
pub fn default_opts() -> Config {
let mut config = Config::default();
config.bypass_bootstrap_lock = false;
config.llvm_optimize = true;
config.ninja_in_file = true;
config.llvm_static_stdcpp = false;
config.backtrace = true;
config.rust_optimize = RustOptimize::Bool(true);
config.rust_optimize_tests = true;
config.submodules = None;
config.docs = true;
config.docs_minification = true;
config.rust_rpath = true;
config.rust_strip = false;
config.channel = "dev".to_string();
config.codegen_tests = true;
config.rust_dist_src = true;
config.rust_codegen_backends = vec![INTERNER.intern_str("llvm")];
config.deny_warnings = true;
config.bindir = "bin".into();
config.dist_include_mingw_linker = true;
config.dist_compression_profile = "fast".into();
config.rustc_parallel = true;
Config {
bypass_bootstrap_lock: false,
llvm_optimize: true,
ninja_in_file: true,
llvm_static_stdcpp: false,
backtrace: true,
rust_optimize: RustOptimize::Bool(true),
rust_optimize_tests: true,
submodules: None,
docs: true,
docs_minification: true,
rust_rpath: true,
rust_strip: false,
channel: "dev".to_string(),
codegen_tests: true,
rust_dist_src: true,
rust_codegen_backends: vec![INTERNER.intern_str("llvm")],
deny_warnings: true,
bindir: "bin".into(),
dist_include_mingw_linker: true,
dist_compression_profile: "fast".into(),
rustc_parallel: true,
config.stdout_is_tty = std::io::stdout().is_terminal();
config.stderr_is_tty = std::io::stderr().is_terminal();
stdout_is_tty: std::io::stdout().is_terminal(),
stderr_is_tty: std::io::stderr().is_terminal(),
// set by build.rs
config.build = TargetSelection::from_user(&env!("BUILD_TRIPLE"));
// set by build.rs
build: TargetSelection::from_user(env!("BUILD_TRIPLE")),
let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
// Undo `src/bootstrap`
config.src = manifest_dir.parent().unwrap().parent().unwrap().to_owned();
config.out = PathBuf::from("build");
src: {
let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
// Undo `src/bootstrap`
manifest_dir.parent().unwrap().parent().unwrap().to_owned()
},
out: PathBuf::from("build"),
config
..Default::default()
}
}
pub fn parse(args: &[String]) -> Config {
@ -1204,7 +1207,7 @@ fn get_toml(file: &Path) -> TomlConfig {
}
pub(crate) fn parse_inner(args: &[String], get_toml: impl Fn(&Path) -> TomlConfig) -> Config {
let mut flags = Flags::parse(&args);
let mut flags = Flags::parse(args);
let mut config = Config::default_opts();
// Set flags.
@ -1252,7 +1255,7 @@ pub(crate) fn parse_inner(args: &[String], get_toml: impl Fn(&Path) -> TomlConfi
// Bootstrap is quite bad at handling /? in front of paths
let src = match s.strip_prefix("\\\\?\\") {
Some(p) => PathBuf::from(p),
None => PathBuf::from(git_root),
None => git_root,
};
// If this doesn't have at least `stage0.json`, we guessed wrong. This can happen when,
// for example, the build directory is inside of another unrelated git directory.
@ -1278,7 +1281,7 @@ pub(crate) fn parse_inner(args: &[String], get_toml: impl Fn(&Path) -> TomlConfi
.to_path_buf();
}
let stage0_json = t!(std::fs::read(&config.src.join("src").join("stage0.json")));
let stage0_json = t!(std::fs::read(config.src.join("src").join("stage0.json")));
config.stage0_metadata = t!(serde_json::from_slice::<Stage0Metadata>(&stage0_json));
@ -1324,8 +1327,7 @@ pub(crate) fn parse_inner(args: &[String], get_toml: impl Fn(&Path) -> TomlConfi
let mut override_toml = TomlConfig::default();
for option in flags.set.iter() {
fn get_table(option: &str) -> Result<TomlConfig, toml::de::Error> {
toml::from_str(&option)
.and_then(|table: toml::Value| TomlConfig::deserialize(table))
toml::from_str(option).and_then(|table: toml::Value| TomlConfig::deserialize(table))
}
let mut err = match get_table(option) {
@ -1337,7 +1339,7 @@ fn get_table(option: &str) -> Result<TomlConfig, toml::de::Error> {
};
// We want to be able to set string values without quotes,
// like in `configure.py`. Try adding quotes around the right hand side
if let Some((key, value)) = option.split_once("=") {
if let Some((key, value)) = option.split_once('=') {
if !value.contains('"') {
match get_table(&format!(r#"{key}="{value}""#)) {
Ok(v) => {
@ -1660,7 +1662,7 @@ fn get_table(option: &str) -> Result<TomlConfig, toml::de::Error> {
llvm_libunwind.map(|v| v.parse().expect("failed to parse rust.llvm-libunwind"));
if let Some(ref backends) = codegen_backends {
let available_backends = vec!["llvm", "cranelift", "gcc"];
let available_backends = ["llvm", "cranelift", "gcc"];
config.rust_codegen_backends = backends.iter().map(|s| {
if let Some(backend) = s.strip_prefix(CODEGEN_BACKEND_PREFIX) {
@ -1808,7 +1810,7 @@ fn get_table(option: &str) -> Result<TomlConfig, toml::de::Error> {
let mut target = Target::from_triple(&triple);
if let Some(ref s) = cfg.llvm_config {
if config.download_rustc_commit.is_some() && triple == &*config.build.triple {
if config.download_rustc_commit.is_some() && triple == *config.build.triple {
panic!(
"setting llvm_config for the host is incompatible with download-rustc"
);
@ -1847,7 +1849,7 @@ fn get_table(option: &str) -> Result<TomlConfig, toml::de::Error> {
target.rpath = cfg.rpath;
if let Some(ref backends) = cfg.codegen_backends {
let available_backends = vec!["llvm", "cranelift", "gcc"];
let available_backends = ["llvm", "cranelift", "gcc"];
target.codegen_backends = Some(backends.iter().map(|s| {
if let Some(backend) = s.strip_prefix(CODEGEN_BACKEND_PREFIX) {
@ -1874,7 +1876,7 @@ fn get_table(option: &str) -> Result<TomlConfig, toml::de::Error> {
let build_target = config
.target_config
.entry(config.build)
.or_insert_with(|| Target::from_triple(&triple));
.or_insert_with(|| Target::from_triple(triple));
check_ci_llvm!(build_target.llvm_config);
check_ci_llvm!(build_target.llvm_filecheck);
@ -2208,7 +2210,7 @@ pub fn verbose(&self, msg: &str) {
}
pub fn sanitizers_enabled(&self, target: TargetSelection) -> bool {
self.target_config.get(&target).map(|t| t.sanitizers).flatten().unwrap_or(self.sanitizers)
self.target_config.get(&target).and_then(|t| t.sanitizers).unwrap_or(self.sanitizers)
}
pub fn needs_sanitizer_runtime_built(&self, target: TargetSelection) -> bool {
@ -2243,7 +2245,7 @@ pub fn any_profiler_enabled(&self) -> bool {
}
pub fn rpath_enabled(&self, target: TargetSelection) -> bool {
self.target_config.get(&target).map(|t| t.rpath).flatten().unwrap_or(self.rust_rpath)
self.target_config.get(&target).and_then(|t| t.rpath).unwrap_or(self.rust_rpath)
}
pub fn llvm_enabled(&self, target: TargetSelection) -> bool {
@ -2274,7 +2276,7 @@ pub fn codegen_backends(&self, target: TargetSelection) -> &[Interned<String>] {
}
pub fn default_codegen_backend(&self, target: TargetSelection) -> Option<Interned<String>> {
self.codegen_backends(target).get(0).cloned()
self.codegen_backends(target).first().cloned()
}
pub fn git_config(&self) -> GitConfig<'_> {
@ -2303,9 +2305,9 @@ pub fn check_build_rustc_version(&self, rustc_path: &str) {
.next()
.unwrap()
.to_owned();
let rustc_version = Version::parse(&rustc_output.trim()).unwrap();
let rustc_version = Version::parse(rustc_output.trim()).unwrap();
let source_version =
Version::parse(&fs::read_to_string(self.src.join("src/version")).unwrap().trim())
Version::parse(fs::read_to_string(self.src.join("src/version")).unwrap().trim())
.unwrap();
if !(source_version == rustc_version
|| (source_version.major == rustc_version.major
@ -2333,7 +2335,7 @@ fn download_ci_rustc_commit(&self, download_rustc: Option<StringOrBool>) -> Opti
};
// Handle running from a directory other than the top level
let top_level = output(self.git().args(&["rev-parse", "--show-toplevel"]));
let top_level = output(self.git().args(["rev-parse", "--show-toplevel"]));
let top_level = top_level.trim_end();
let compiler = format!("{top_level}/compiler/");
let library = format!("{top_level}/library/");
@ -2344,7 +2346,7 @@ fn download_ci_rustc_commit(&self, download_rustc: Option<StringOrBool>) -> Opti
self.git()
.arg("rev-list")
.arg(format!("--author={}", self.stage0_metadata.config.git_merge_commit_email))
.args(&["-n1", "--first-parent", "HEAD"]),
.args(["-n1", "--first-parent", "HEAD"]),
);
let commit = merge_base.trim_end();
if commit.is_empty() {
@ -2358,7 +2360,7 @@ fn download_ci_rustc_commit(&self, download_rustc: Option<StringOrBool>) -> Opti
// Warn if there were changes to the compiler or standard library since the ancestor commit.
let has_changes = !t!(self
.git()
.args(&["diff-index", "--quiet", &commit, "--", &compiler, &library])
.args(["diff-index", "--quiet", commit, "--", &compiler, &library])
.status())
.success();
if has_changes {
@ -2397,7 +2399,7 @@ fn parse_download_ci_llvm(
// there are some untracked changes in the the given paths.
false
} else {
llvm::is_ci_llvm_available(&self, asserts)
llvm::is_ci_llvm_available(self, asserts)
}
};
match download_ci_llvm {
@ -2406,7 +2408,7 @@ fn parse_download_ci_llvm(
// FIXME: "if-available" is deprecated. Remove this block later (around mid 2024)
// to not break builds between the recent-to-old checkouts.
Some(StringOrBool::String(s)) if s == "if-available" => {
llvm::is_ci_llvm_available(&self, asserts)
llvm::is_ci_llvm_available(self, asserts)
}
Some(StringOrBool::String(s)) if s == "if-unchanged" => if_unchanged(),
Some(StringOrBool::String(other)) => {
@ -2424,7 +2426,7 @@ pub fn last_modified_commit(
if_unchanged: bool,
) -> Option<String> {
// Handle running from a directory other than the top level
let top_level = output(self.git().args(&["rev-parse", "--show-toplevel"]));
let top_level = output(self.git().args(["rev-parse", "--show-toplevel"]));
let top_level = top_level.trim_end();
// Look for a version to compare to based on the current commit.
@ -2433,7 +2435,7 @@ pub fn last_modified_commit(
self.git()
.arg("rev-list")
.arg(format!("--author={}", self.stage0_metadata.config.git_merge_commit_email))
.args(&["-n1", "--first-parent", "HEAD"]),
.args(["-n1", "--first-parent", "HEAD"]),
);
let commit = merge_base.trim_end();
if commit.is_empty() {
@ -2446,7 +2448,7 @@ pub fn last_modified_commit(
// Warn if there were changes to the compiler or standard library since the ancestor commit.
let mut git = self.git();
git.args(&["diff-index", "--quiet", &commit, "--"]);
git.args(["diff-index", "--quiet", commit, "--"]);
for path in modified_paths {
git.arg(format!("{top_level}/{path}"));

View File

@ -159,7 +159,7 @@ fn fix_bin_or_dylib(&self, fname: &Path) {
";
nix_build_succeeded = try_run(
self,
Command::new("nix-build").args(&[
Command::new("nix-build").args([
Path::new("-E"),
Path::new(NIX_EXPR),
Path::new("-o"),
@ -188,7 +188,7 @@ fn fix_bin_or_dylib(&self, fname: &Path) {
let dynamic_linker_path = nix_deps_dir.join("nix-support/dynamic-linker");
// FIXME: can we support utf8 here? `args` doesn't accept Vec<u8>, only OsString ...
let dynamic_linker = t!(String::from_utf8(t!(fs::read(dynamic_linker_path))));
patchelf.args(&["--set-interpreter", dynamic_linker.trim_end()]);
patchelf.args(["--set-interpreter", dynamic_linker.trim_end()]);
}
let _ = try_run(self, patchelf.arg(fname));
@ -218,7 +218,7 @@ fn download_http_with_retries(&self, tempfile: &Path, url: &str, help_on_error:
println!("downloading {url}");
// Try curl. If that fails and we are on windows, fallback to PowerShell.
let mut curl = Command::new("curl");
curl.args(&[
curl.args([
"-y",
"30",
"-Y",
@ -242,7 +242,7 @@ fn download_http_with_retries(&self, tempfile: &Path, url: &str, help_on_error:
if self.build.contains("windows-msvc") {
eprintln!("Fallback to PowerShell");
for _ in 0..3 {
if try_run(self, Command::new("PowerShell.exe").args(&[
if try_run(self, Command::new("PowerShell.exe").args([
"/nologo",
"-Command",
"[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;",
@ -388,7 +388,7 @@ pub(crate) fn download_clippy(&self) -> PathBuf {
let bin_root = self.out.join(host.triple).join("stage0");
let clippy_stamp = bin_root.join(".clippy-stamp");
let cargo_clippy = bin_root.join("bin").join(exe("cargo-clippy", host));
if cargo_clippy.exists() && !program_out_of_date(&clippy_stamp, &date) {
if cargo_clippy.exists() && !program_out_of_date(&clippy_stamp, date) {
return cargo_clippy;
}
@ -421,14 +421,14 @@ pub(crate) fn maybe_download_rustfmt(&self) -> Option<PathBuf> {
DownloadSource::Dist,
format!("rustfmt-{version}-{build}.tar.xz", build = host.triple),
"rustfmt-preview",
&date,
date,
"rustfmt",
);
self.download_component(
DownloadSource::Dist,
format!("rustc-{version}-{build}.tar.xz", build = host.triple),
"rustc",
&date,
date,
"rustfmt",
);
@ -665,7 +665,7 @@ pub(crate) fn maybe_download_ci_llvm(&self) {
}
let llvm_root = self.ci_llvm_root();
let llvm_stamp = llvm_root.join(".llvm-stamp");
let llvm_sha = detect_llvm_sha(&self, self.rust_info.is_managed_git_subrepository());
let llvm_sha = detect_llvm_sha(self, self.rust_info.is_managed_git_subrepository());
let key = format!("{}{}", llvm_sha, self.llvm_assertions);
if program_out_of_date(&llvm_stamp, &key) && !self.dry_run() {
self.download_ci_llvm(&llvm_sha);
@ -685,11 +685,11 @@ pub(crate) fn maybe_download_ci_llvm(&self) {
// rebuild.
let now = filetime::FileTime::from_system_time(std::time::SystemTime::now());
let llvm_config = llvm_root.join("bin").join(exe("llvm-config", self.build));
t!(filetime::set_file_times(&llvm_config, now, now));
t!(filetime::set_file_times(llvm_config, now, now));
if self.should_fix_bins_and_dylibs() {
let llvm_lib = llvm_root.join("lib");
for entry in t!(fs::read_dir(&llvm_lib)) {
for entry in t!(fs::read_dir(llvm_lib)) {
let lib = t!(entry).path();
if lib.extension().map_or(false, |ext| ext == "so") {
self.fix_bin_or_dylib(&lib);

View File

@ -467,7 +467,7 @@ pub fn new(mut config: Config) -> Build {
}
// Make a symbolic link so we can use a consistent directory in the documentation.
let build_triple = build.out.join(&build.build.triple);
let build_triple = build.out.join(build.build.triple);
t!(fs::create_dir_all(&build_triple));
let host = build.out.join("host");
if host.is_symlink() {
@ -491,7 +491,7 @@ pub fn new(mut config: Config) -> Build {
///
/// `relative_path` should be relative to the root of the git repository, not an absolute path.
pub(crate) fn update_submodule(&self, relative_path: &Path) {
if !self.config.submodules(&self.rust_info()) {
if !self.config.submodules(self.rust_info()) {
return;
}
@ -507,11 +507,11 @@ pub(crate) fn update_submodule(&self, relative_path: &Path) {
// check_submodule
let checked_out_hash =
output(Command::new("git").args(&["rev-parse", "HEAD"]).current_dir(&absolute_path));
output(Command::new("git").args(["rev-parse", "HEAD"]).current_dir(&absolute_path));
// update_submodules
let recorded = output(
Command::new("git")
.args(&["ls-tree", "HEAD"])
.args(["ls-tree", "HEAD"])
.arg(relative_path)
.current_dir(&self.config.src),
);
@ -529,7 +529,7 @@ pub(crate) fn update_submodule(&self, relative_path: &Path) {
println!("Updating submodule {}", relative_path.display());
self.run(
Command::new("git")
.args(&["submodule", "-q", "sync"])
.args(["submodule", "-q", "sync"])
.arg(relative_path)
.current_dir(&self.config.src),
);
@ -560,7 +560,7 @@ pub(crate) fn update_submodule(&self, relative_path: &Path) {
let branch = branch.strip_prefix("heads/").unwrap_or(&branch);
git.arg("-c").arg(format!("branch.{branch}.remote=origin"));
}
git.args(&["submodule", "update", "--init", "--recursive", "--depth=1"]);
git.args(["submodule", "update", "--init", "--recursive", "--depth=1"]);
if progress {
git.arg("--progress");
}
@ -577,7 +577,7 @@ pub(crate) fn update_submodule(&self, relative_path: &Path) {
let has_local_modifications = !self.run_cmd(
BootstrapCommand::from(
Command::new("git")
.args(&["diff-index", "--quiet", "HEAD"])
.args(["diff-index", "--quiet", "HEAD"])
.current_dir(&absolute_path),
)
.allow_failure()
@ -587,14 +587,14 @@ pub(crate) fn update_submodule(&self, relative_path: &Path) {
}),
);
if has_local_modifications {
self.run(Command::new("git").args(&["stash", "push"]).current_dir(&absolute_path));
self.run(Command::new("git").args(["stash", "push"]).current_dir(&absolute_path));
}
self.run(Command::new("git").args(&["reset", "-q", "--hard"]).current_dir(&absolute_path));
self.run(Command::new("git").args(&["clean", "-qdfx"]).current_dir(&absolute_path));
self.run(Command::new("git").args(["reset", "-q", "--hard"]).current_dir(&absolute_path));
self.run(Command::new("git").args(["clean", "-qdfx"]).current_dir(&absolute_path));
if has_local_modifications {
self.run(Command::new("git").args(&["stash", "pop"]).current_dir(absolute_path));
self.run(Command::new("git").args(["stash", "pop"]).current_dir(absolute_path));
}
}
@ -602,20 +602,20 @@ pub(crate) fn update_submodule(&self, relative_path: &Path) {
/// This avoids contributors checking in a submodule change by accident.
pub fn update_existing_submodules(&self) {
// Avoid running git when there isn't a git checkout.
if !self.config.submodules(&self.rust_info()) {
if !self.config.submodules(self.rust_info()) {
return;
}
let output = output(
self.config
.git()
.args(&["config", "--file"])
.args(["config", "--file"])
.arg(&self.config.src.join(".gitmodules"))
.args(&["--get-regexp", "path"]),
.args(["--get-regexp", "path"]),
);
for line in output.lines() {
// Look for `submodule.$name.path = $path`
// Sample output: `submodule.src/rust-installer.path src/tools/rust-installer`
let submodule = Path::new(line.splitn(2, ' ').nth(1).unwrap());
let submodule = Path::new(line.split_once(' ').unwrap().1);
// Don't update the submodule unless it's already been cloned.
if GitInfo::new(false, submodule).is_managed_git_subrepository() {
self.update_submodule(submodule);
@ -630,26 +630,26 @@ pub fn build(&mut self) {
}
// Download rustfmt early so that it can be used in rust-analyzer configs.
let _ = &builder::Builder::new(&self).initial_rustfmt();
let _ = &builder::Builder::new(self).initial_rustfmt();
// hardcoded subcommands
match &self.config.cmd {
Subcommand::Format { check } => {
return core::build_steps::format::format(
&builder::Builder::new(&self),
&builder::Builder::new(self),
*check,
&self.config.paths,
);
}
Subcommand::Suggest { run } => {
return core::build_steps::suggest::suggest(&builder::Builder::new(&self), *run);
return core::build_steps::suggest::suggest(&builder::Builder::new(self), *run);
}
_ => (),
}
{
let builder = builder::Builder::new(&self);
if let Some(path) = builder.paths.get(0) {
let builder = builder::Builder::new(self);
if let Some(path) = builder.paths.first() {
if path == Path::new("nonexistent/path/to/trigger/cargo/metadata") {
return;
}
@ -659,14 +659,14 @@ pub fn build(&mut self) {
if !self.config.dry_run() {
{
self.config.dry_run = DryRun::SelfCheck;
let builder = builder::Builder::new(&self);
let builder = builder::Builder::new(self);
builder.execute_cli();
}
self.config.dry_run = DryRun::Disabled;
let builder = builder::Builder::new(&self);
let builder = builder::Builder::new(self);
builder.execute_cli();
} else {
let builder = builder::Builder::new(&self);
let builder = builder::Builder::new(self);
builder.execute_cli();
}
@ -936,7 +936,7 @@ fn rustc_snapshot_sysroot(&self) -> &Path {
static SYSROOT_CACHE: OnceLock<PathBuf> = OnceLock::new();
SYSROOT_CACHE.get_or_init(|| {
let mut rustc = Command::new(&self.initial_rustc);
rustc.args(&["--print", "sysroot"]);
rustc.args(["--print", "sysroot"]);
output(&mut rustc).trim().into()
})
}
@ -1162,7 +1162,7 @@ fn msg_sysroot_tool(
fn group(&self, msg: &str) -> Option<gha::Group> {
match self.config.dry_run {
DryRun::SelfCheck => None,
DryRun::Disabled | DryRun::UserSelected => Some(gha::group(&msg)),
DryRun::Disabled | DryRun::UserSelected => Some(gha::group(msg)),
}
}
@ -1322,7 +1322,7 @@ fn musl_root(&self, target: TargetSelection) -> Option<&Path> {
.target_config
.get(&target)
.and_then(|t| t.musl_root.as_ref())
.or_else(|| self.config.musl_root.as_ref())
.or(self.config.musl_root.as_ref())
.map(|p| &**p)
}
@ -1511,11 +1511,11 @@ fn rust_sha(&self) -> Option<&str> {
/// Returns the `a.b.c` version that the given package is at.
fn release_num(&self, package: &str) -> String {
let toml_file_name = self.src.join(&format!("src/tools/{package}/Cargo.toml"));
let toml = t!(fs::read_to_string(&toml_file_name));
let toml_file_name = self.src.join(format!("src/tools/{package}/Cargo.toml"));
let toml = t!(fs::read_to_string(toml_file_name));
for line in toml.lines() {
if let Some(stripped) =
line.strip_prefix("version = \"").and_then(|s| s.strip_suffix("\""))
line.strip_prefix("version = \"").and_then(|s| s.strip_suffix('"'))
{
return stripped.to_owned();
}
@ -1618,7 +1618,7 @@ fn copy_internal(&self, src: &Path, dst: &Path, dereference_symlinks: bool) {
if src == dst {
return;
}
let _ = fs::remove_file(&dst);
let _ = fs::remove_file(dst);
let metadata = t!(src.symlink_metadata());
let mut src = src.to_path_buf();
if metadata.file_type().is_symlink() {
@ -1908,7 +1908,7 @@ pub fn prepare_behaviour_dump_dir(build: &Build) {
let dump_path = build.out.join("bootstrap-shims-dump");
let initialized = INITIALIZED.get().unwrap_or_else(|| &false);
let initialized = INITIALIZED.get().unwrap_or(&false);
if !initialized {
// clear old dumps
if dump_path.exists() {

View File

@ -39,8 +39,7 @@ pub(crate) fn maybe_dump(dump_name: String, cmd: &Command) {
if let Ok(dump_dir) = env::var("DUMP_BOOTSTRAP_SHIMS") {
let dump_file = format!("{dump_dir}/{dump_name}");
let mut file =
OpenOptions::new().create(true).write(true).append(true).open(&dump_file).unwrap();
let mut file = OpenOptions::new().create(true).append(true).open(dump_file).unwrap();
let cmd_dump = format!("{:?}\n", cmd);
let cmd_dump = cmd_dump.replace(&env::var("BUILD_OUT").unwrap(), "${BUILD_OUT}");

View File

@ -64,7 +64,7 @@ unsafe impl<T> Sync for Interned<T> {}
impl fmt::Display for Interned<String> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let s: &str = &*self;
let s: &str = self;
f.write_str(s)
}
}
@ -74,7 +74,7 @@ impl<T, U: ?Sized + fmt::Debug> fmt::Debug for Interned<T>
Self: Deref<Target = U>,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let s: &U = &*self;
let s: &U = self;
f.write_fmt(format_args!("{s:?}"))
}
}
@ -132,7 +132,7 @@ fn intern_borrow<B>(&mut self, item: &B) -> Interned<T>
B: Eq + Hash + ToOwned<Owned = T> + ?Sized,
T: Borrow<B>,
{
if let Some(i) = self.set.get(&item) {
if let Some(i) = self.set.get(item) {
return *i;
}
let item = item.to_owned();
@ -233,7 +233,7 @@ pub fn put<S: Step>(&self, step: S, value: S::Output) {
let type_id = TypeId::of::<S>();
let stepcache = cache
.entry(type_id)
.or_insert_with(|| Box::new(HashMap::<S, S::Output>::new()))
.or_insert_with(|| Box::<HashMap<S, S::Output>>::default())
.downcast_mut::<HashMap<S, S::Output>>()
.expect("invalid type mapped");
assert!(!stepcache.contains_key(&step), "processing {step:?} a second time");
@ -245,7 +245,7 @@ pub fn get<S: Step>(&self, step: &S) -> Option<S::Output> {
let type_id = TypeId::of::<S>();
let stepcache = cache
.entry(type_id)
.or_insert_with(|| Box::new(HashMap::<S, S::Output>::new()))
.or_insert_with(|| Box::<HashMap<S, S::Output>>::default())
.downcast_mut::<HashMap<S, S::Output>>()
.expect("invalid type mapped");
stepcache.get(step).cloned()

View File

@ -35,7 +35,7 @@
// try to infer the archiver path from the C compiler path.
// In the future this logic should be replaced by calling into the `cc` crate.
fn cc2ar(cc: &Path, target: TargetSelection) -> Option<PathBuf> {
if let Some(ar) = env::var_os(format!("AR_{}", target.triple.replace("-", "_"))) {
if let Some(ar) = env::var_os(format!("AR_{}", target.triple.replace('-', "_"))) {
Some(PathBuf::from(ar))
} else if let Some(ar) = env::var_os("AR") {
Some(PathBuf::from(ar))
@ -172,11 +172,9 @@ fn default_compiler(
// When compiling for android we may have the NDK configured in the
// config.toml in which case we look there. Otherwise the default
// compiler already takes into account the triple in question.
t if t.contains("android") => build
.config
.android_ndk
.as_ref()
.map(|ndk| ndk_compiler(compiler, &*target.triple, ndk)),
t if t.contains("android") => {
build.config.android_ndk.as_ref().map(|ndk| ndk_compiler(compiler, &target.triple, ndk))
}
// The default gcc version from OpenBSD may be too old, try using egcc,
// which is a gcc version from ports, if this is the case.
@ -230,7 +228,7 @@ fn default_compiler(
}
pub(crate) fn ndk_compiler(compiler: Language, triple: &str, ndk: &Path) -> PathBuf {
let mut triple_iter = triple.split("-");
let mut triple_iter = triple.split('-');
let triple_translated = if let Some(arch) = triple_iter.next() {
let arch_new = match arch {
"arm" | "armv7" | "armv7neon" | "thumbv7" | "thumbv7neon" => "armv7a",

View File

@ -2,6 +2,8 @@
//! with the goal of keeping developers synchronized with important modifications in
//! the bootstrap.
use std::fmt::Display;
#[cfg(test)]
mod tests;
@ -24,11 +26,11 @@ pub enum ChangeSeverity {
Warning,
}
impl ToString for ChangeSeverity {
fn to_string(&self) -> String {
impl Display for ChangeSeverity {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
ChangeSeverity::Info => "INFO".to_string(),
ChangeSeverity::Warning => "WARNING".to_string(),
ChangeSeverity::Info => write!(f, "INFO"),
ChangeSeverity::Warning => write!(f, "WARNING"),
}
}
}
@ -40,7 +42,7 @@ pub fn find_recent_config_change_ids(current_id: usize) -> Vec<ChangeInfo> {
// older one); otherwise, return the full list (assuming the user provided
// the incorrect change-id by accident).
if let Some(config) = CONFIG_CHANGE_HISTORY.iter().max_by_key(|config| config.change_id) {
if &current_id > &config.change_id {
if current_id > config.change_id {
return Vec::new();
}
}

View File

@ -97,7 +97,7 @@ pub fn commit_date(&self) -> Option<&str> {
pub fn version(&self, build: &Build, num: &str) -> String {
let mut version = build.release(num);
if let Some(ref inner) = self.info() {
if let Some(inner) = self.info() {
version.push_str(" (");
version.push_str(&inner.short_sha);
version.push(' ');
@ -150,7 +150,7 @@ pub fn read_commit_info_file(root: &Path) -> Option<Info> {
/// root.
pub fn write_commit_info_file(root: &Path, info: &Info) {
let commit_info = format!("{}\n{}\n{}\n", info.sha, info.short_sha, info.commit_date);
t!(fs::write(root.join("git-commit-info"), &commit_info));
t!(fs::write(root.join("git-commit-info"), commit_info));
}
/// Write the commit hash to the `git-commit-hash` file given the project root.

View File

@ -425,7 +425,7 @@ pub fn get_clang_cl_resource_dir(clang_cl_path: &str) -> PathBuf {
// Similar to how LLVM does it, to find clang's library runtime directory:
// - we ask `clang-cl` to locate the `clang_rt.builtins` lib.
let mut builtins_locator = Command::new(clang_cl_path);
builtins_locator.args(&["/clang:-print-libgcc-file-name", "/clang:--rtlib=compiler-rt"]);
builtins_locator.args(["/clang:-print-libgcc-file-name", "/clang:--rtlib=compiler-rt"]);
let clang_rt_builtins = output(&mut builtins_locator);
let clang_rt_builtins = Path::new(clang_rt_builtins.trim());
@ -475,7 +475,7 @@ pub fn dir_is_empty(dir: &Path) -> bool {
/// the "y" part from the string.
pub fn extract_beta_rev(version: &str) -> Option<String> {
let parts = version.splitn(2, "-beta.").collect::<Vec<_>>();
let count = parts.get(1).and_then(|s| s.find(' ').map(|p| (&s[..p]).to_string()));
let count = parts.get(1).and_then(|s| s.find(' ').map(|p| s[..p].to_string()));
count
}
@ -559,11 +559,10 @@ pub fn check_cfg_arg(name: &str, values: Option<&[&str]>) -> String {
// ',values("tvos","watchos")' or '' (nothing) when there are no values.
let next = match values {
Some(values) => {
let mut tmp =
values.iter().map(|val| [",", "\"", val, "\""]).flatten().collect::<String>();
let mut tmp = values.iter().flat_map(|val| [",", "\"", val, "\""]).collect::<String>();
tmp.insert_str(1, "values(");
tmp.push_str(")");
tmp.push(')');
tmp
}
None => "".to_string(),

View File

@ -15,10 +15,10 @@
const TERSE_TESTS_PER_LINE: usize = 88;
pub(crate) fn add_flags_and_try_run_tests(builder: &Builder<'_>, cmd: &mut Command) -> bool {
if cmd.get_args().position(|arg| arg == "--").is_none() {
if !cmd.get_args().any(|arg| arg == "--") {
cmd.arg("--");
}
cmd.args(&["-Z", "unstable-options", "--format", "json"]);
cmd.args(["-Z", "unstable-options", "--format", "json"]);
try_run_tests(builder, cmd, false)
}
@ -303,19 +303,19 @@ impl Outcome<'_> {
fn write_short(&self, writer: &mut dyn WriteColor) -> Result<(), std::io::Error> {
match self {
Outcome::Ok => {
writer.set_color(&ColorSpec::new().set_fg(Some(Color::Green)))?;
writer.set_color(ColorSpec::new().set_fg(Some(Color::Green)))?;
write!(writer, ".")?;
}
Outcome::BenchOk => {
writer.set_color(&ColorSpec::new().set_fg(Some(Color::Cyan)))?;
writer.set_color(ColorSpec::new().set_fg(Some(Color::Cyan)))?;
write!(writer, "b")?;
}
Outcome::Failed => {
writer.set_color(&ColorSpec::new().set_fg(Some(Color::Red)))?;
writer.set_color(ColorSpec::new().set_fg(Some(Color::Red)))?;
write!(writer, "F")?;
}
Outcome::Ignored { .. } => {
writer.set_color(&ColorSpec::new().set_fg(Some(Color::Yellow)))?;
writer.set_color(ColorSpec::new().set_fg(Some(Color::Yellow)))?;
write!(writer, "i")?;
}
}
@ -325,19 +325,19 @@ fn write_short(&self, writer: &mut dyn WriteColor) -> Result<(), std::io::Error>
fn write_long(&self, writer: &mut dyn WriteColor) -> Result<(), std::io::Error> {
match self {
Outcome::Ok => {
writer.set_color(&ColorSpec::new().set_fg(Some(Color::Green)))?;
writer.set_color(ColorSpec::new().set_fg(Some(Color::Green)))?;
write!(writer, "ok")?;
}
Outcome::BenchOk => {
writer.set_color(&ColorSpec::new().set_fg(Some(Color::Cyan)))?;
writer.set_color(ColorSpec::new().set_fg(Some(Color::Cyan)))?;
write!(writer, "benchmarked")?;
}
Outcome::Failed => {
writer.set_color(&ColorSpec::new().set_fg(Some(Color::Red)))?;
writer.set_color(ColorSpec::new().set_fg(Some(Color::Red)))?;
write!(writer, "FAILED")?;
}
Outcome::Ignored { reason } => {
writer.set_color(&ColorSpec::new().set_fg(Some(Color::Yellow)))?;
writer.set_color(ColorSpec::new().set_fg(Some(Color::Yellow)))?;
write!(writer, "ignored")?;
if let Some(reason) = reason {
write!(writer, ", {reason}")?;

View File

@ -226,8 +226,7 @@ pub(crate) fn generate(self) -> GeneratedTarball {
if self.include_target_in_component_name {
component_name.push('-');
component_name.push_str(
&self
.target
self.target
.as_ref()
.expect("include_target_in_component_name used in a targetless tarball"),
);
@ -326,7 +325,7 @@ fn run(self, build_cli: impl FnOnce(&Tarball<'a>, &mut Command)) -> GeneratedTar
assert!(!formats.is_empty(), "dist.compression-formats can't be empty");
cmd.arg("--compression-formats").arg(formats.join(","));
}
cmd.args(&["--compression-profile", &self.builder.config.dist_compression_profile]);
cmd.args(["--compression-profile", &self.builder.config.dist_compression_profile]);
self.builder.run(&mut cmd);
// Ensure there are no symbolic links in the tarball. In particular,
@ -347,7 +346,7 @@ fn run(self, build_cli: impl FnOnce(&Tarball<'a>, &mut Command)) -> GeneratedTar
.config
.dist_compression_formats
.as_ref()
.and_then(|formats| formats.get(0))
.and_then(|formats| formats.first())
.map(|s| s.as_str())
.unwrap_or("gz");