Rollup merge of #124434 - GKFX:remove-lazy-dependencies, r=jieyouxu
Remove lazycell and once_cell from compiletest dependencies Use the standard library `OnceLock` instead of third-party equivalents. A macro is used for the regexes to make their initialization less unwieldy.
This commit is contained in:
commit
8f962a6256
@ -760,11 +760,9 @@ dependencies = [
|
||||
"glob",
|
||||
"home",
|
||||
"indexmap",
|
||||
"lazycell",
|
||||
"libc",
|
||||
"miow",
|
||||
"miropt-test-tools",
|
||||
"once_cell",
|
||||
"regex",
|
||||
"rustfix 0.8.1",
|
||||
"serde",
|
||||
@ -2151,12 +2149,6 @@ version = "1.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||
|
||||
[[package]]
|
||||
name = "lazycell"
|
||||
version = "1.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
|
||||
|
||||
[[package]]
|
||||
name = "leb128"
|
||||
version = "0.2.5"
|
||||
|
@ -21,10 +21,8 @@ regex = "1.0"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
rustfix = "0.8.1"
|
||||
once_cell = "1.16.0"
|
||||
walkdir = "2"
|
||||
glob = "0.3.0"
|
||||
lazycell = "1.3.0"
|
||||
anyhow = "1"
|
||||
home = "0.5.5"
|
||||
|
||||
|
@ -6,10 +6,10 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
use std::str::FromStr;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use crate::util::{add_dylib_path, PathBufExt};
|
||||
use build_helper::git::GitConfig;
|
||||
use lazycell::AtomicLazyCell;
|
||||
use serde::de::{Deserialize, Deserializer, Error as _};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use test::{ColorConfig, OutputFormat};
|
||||
@ -384,7 +384,7 @@ pub struct Config {
|
||||
/// Only rerun the tests that result has been modified accoring to Git status
|
||||
pub only_modified: bool,
|
||||
|
||||
pub target_cfgs: AtomicLazyCell<TargetCfgs>,
|
||||
pub target_cfgs: OnceLock<TargetCfgs>,
|
||||
|
||||
pub nocapture: bool,
|
||||
|
||||
@ -406,13 +406,7 @@ pub fn run_enabled(&self) -> bool {
|
||||
}
|
||||
|
||||
pub fn target_cfgs(&self) -> &TargetCfgs {
|
||||
match self.target_cfgs.borrow() {
|
||||
Some(cfgs) => cfgs,
|
||||
None => {
|
||||
let _ = self.target_cfgs.fill(TargetCfgs::new(self));
|
||||
self.target_cfgs.borrow().unwrap()
|
||||
}
|
||||
}
|
||||
self.target_cfgs.get_or_init(|| TargetCfgs::new(self))
|
||||
}
|
||||
|
||||
pub fn target_cfg(&self) -> &TargetCfg {
|
||||
|
@ -6,8 +6,8 @@
|
||||
use std::io::BufReader;
|
||||
use std::path::Path;
|
||||
use std::str::FromStr;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use tracing::*;
|
||||
|
||||
@ -117,10 +117,11 @@ fn parse_expected(
|
||||
// //~^^^^^
|
||||
// //[rev1]~
|
||||
// //[rev1,rev2]~^^
|
||||
static RE: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"//(?:\[(?P<revs>[\w\-,]+)])?~(?P<adjust>\||\^*)").unwrap());
|
||||
static RE: OnceLock<Regex> = OnceLock::new();
|
||||
|
||||
let captures = RE.captures(line)?;
|
||||
let captures = RE
|
||||
.get_or_init(|| Regex::new(r"//(?:\[(?P<revs>[\w\-,]+)])?~(?P<adjust>\||\^*)").unwrap())
|
||||
.captures(line)?;
|
||||
|
||||
match (test_revision, captures.name("revs")) {
|
||||
// Only error messages that contain our revision between the square brackets apply to us.
|
||||
|
@ -5,8 +5,8 @@
|
||||
use std::io::BufReader;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use regex::Regex;
|
||||
use tracing::*;
|
||||
|
||||
@ -1021,8 +1021,9 @@ fn iter_header(
|
||||
let mut line_number = 0;
|
||||
|
||||
// Match on error annotations like `//~ERROR`.
|
||||
static REVISION_MAGIC_COMMENT_RE: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new("//(\\[.*\\])?~.*").unwrap());
|
||||
static REVISION_MAGIC_COMMENT_RE: OnceLock<Regex> = OnceLock::new();
|
||||
let revision_magic_comment_re =
|
||||
REVISION_MAGIC_COMMENT_RE.get_or_init(|| Regex::new("//(\\[.*\\])?~.*").unwrap());
|
||||
|
||||
loop {
|
||||
line_number += 1;
|
||||
@ -1087,7 +1088,7 @@ fn iter_header(
|
||||
});
|
||||
// Then we try to check for legacy-style candidates, which are not the magic ~ERROR family
|
||||
// error annotations.
|
||||
} else if !REVISION_MAGIC_COMMENT_RE.is_match(ln) {
|
||||
} else if !revision_magic_comment_re.is_match(ln) {
|
||||
let Some((_, rest)) = line_directive("//", ln) else {
|
||||
continue;
|
||||
};
|
||||
|
@ -24,13 +24,13 @@
|
||||
use build_helper::git::{get_git_modified_files, get_git_untracked_files};
|
||||
use core::panic;
|
||||
use getopts::Options;
|
||||
use lazycell::AtomicLazyCell;
|
||||
use std::collections::HashSet;
|
||||
use std::ffi::OsString;
|
||||
use std::fs;
|
||||
use std::io::{self, ErrorKind};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::{Command, Stdio};
|
||||
use std::sync::{Arc, OnceLock};
|
||||
use std::time::SystemTime;
|
||||
use std::{env, vec};
|
||||
use test::ColorConfig;
|
||||
@ -39,7 +39,6 @@
|
||||
|
||||
use self::header::{make_test_description, EarlyProps};
|
||||
use crate::header::HeadersCache;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub fn parse_config(args: Vec<String>) -> Config {
|
||||
let mut opts = Options::new();
|
||||
@ -320,7 +319,7 @@ fn make_absolute(path: PathBuf) -> PathBuf {
|
||||
|
||||
force_rerun: matches.opt_present("force-rerun"),
|
||||
|
||||
target_cfgs: AtomicLazyCell::new(),
|
||||
target_cfgs: OnceLock::new(),
|
||||
|
||||
nocapture: matches.opt_present("nocapture"),
|
||||
|
||||
|
@ -36,7 +36,6 @@
|
||||
|
||||
use anyhow::Context;
|
||||
use glob::glob;
|
||||
use once_cell::sync::Lazy;
|
||||
use tracing::*;
|
||||
|
||||
use crate::extract_gdb_version;
|
||||
@ -48,6 +47,13 @@
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
macro_rules! static_regex {
|
||||
($re:literal) => {{
|
||||
static RE: ::std::sync::OnceLock<::regex::Regex> = ::std::sync::OnceLock::new();
|
||||
RE.get_or_init(|| ::regex::Regex::new($re).unwrap())
|
||||
}};
|
||||
}
|
||||
|
||||
const FAKE_SRC_BASE: &str = "fake-test-src-base";
|
||||
|
||||
#[cfg(windows)]
|
||||
@ -765,28 +771,23 @@ fn anonymize_coverage_line_numbers(coverage: &str) -> String {
|
||||
// ` 100|` => ` LL|`
|
||||
// ` | 1000|` => ` | LL|`
|
||||
// ` | | 1000|` => ` | | LL|`
|
||||
static LINE_NUMBER_RE: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"(?m:^)(?<prefix>(?: \|)*) *[0-9]+\|").unwrap());
|
||||
let coverage = LINE_NUMBER_RE.replace_all(&coverage, "${prefix} LL|");
|
||||
let coverage = static_regex!(r"(?m:^)(?<prefix>(?: \|)*) *[0-9]+\|")
|
||||
.replace_all(&coverage, "${prefix} LL|");
|
||||
|
||||
// ` | Branch (1:` => ` | Branch (LL:`
|
||||
// ` | | Branch (10:` => ` | | Branch (LL:`
|
||||
static BRANCH_LINE_NUMBER_RE: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"(?m:^)(?<prefix>(?: \|)+ Branch \()[0-9]+:").unwrap());
|
||||
let coverage = BRANCH_LINE_NUMBER_RE.replace_all(&coverage, "${prefix}LL:");
|
||||
let coverage = static_regex!(r"(?m:^)(?<prefix>(?: \|)+ Branch \()[0-9]+:")
|
||||
.replace_all(&coverage, "${prefix}LL:");
|
||||
|
||||
// ` |---> MC/DC Decision Region (1:30) to (2:` => ` |---> MC/DC Decision Region (LL:30) to (LL:`
|
||||
static MCDC_DECISION_LINE_NUMBER_RE: Lazy<Regex> = Lazy::new(|| {
|
||||
Regex::new(r"(?m:^)(?<prefix>(?: \|)+---> MC/DC Decision Region \()[0-9]+:(?<middle>[0-9]+\) to \()[0-9]+:").unwrap()
|
||||
});
|
||||
let coverage =
|
||||
MCDC_DECISION_LINE_NUMBER_RE.replace_all(&coverage, "${prefix}LL:${middle}LL:");
|
||||
static_regex!(r"(?m:^)(?<prefix>(?: \|)+---> MC/DC Decision Region \()[0-9]+:(?<middle>[0-9]+\) to \()[0-9]+:")
|
||||
.replace_all(&coverage, "${prefix}LL:${middle}LL:");
|
||||
|
||||
// ` | Condition C1 --> (1:` => ` | Condition C1 --> (LL:`
|
||||
static MCDC_CONDITION_LINE_NUMBER_RE: Lazy<Regex> = Lazy::new(|| {
|
||||
Regex::new(r"(?m:^)(?<prefix>(?: \|)+ Condition C[0-9]+ --> \()[0-9]+:").unwrap()
|
||||
});
|
||||
let coverage = MCDC_CONDITION_LINE_NUMBER_RE.replace_all(&coverage, "${prefix}LL:");
|
||||
let coverage =
|
||||
static_regex!(r"(?m:^)(?<prefix>(?: \|)+ Condition C[0-9]+ --> \()[0-9]+:")
|
||||
.replace_all(&coverage, "${prefix}LL:");
|
||||
|
||||
coverage.into_owned()
|
||||
}
|
||||
@ -3471,13 +3472,12 @@ fn codegen_units_to_str(cgus: &HashSet<String>) -> String {
|
||||
// the form <crate-name1>.<crate-disambiguator1>-in-<crate-name2>.<crate-disambiguator2>,
|
||||
// remove all crate-disambiguators.
|
||||
fn remove_crate_disambiguator_from_cgu(cgu: &str) -> String {
|
||||
static RE: Lazy<Regex> = Lazy::new(|| {
|
||||
Regex::new(r"^[^\.]+(?P<d1>\.[[:alnum:]]+)(-in-[^\.]+(?P<d2>\.[[:alnum:]]+))?")
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
let captures =
|
||||
RE.captures(cgu).unwrap_or_else(|| panic!("invalid cgu name encountered: {}", cgu));
|
||||
let Some(captures) =
|
||||
static_regex!(r"^[^\.]+(?P<d1>\.[[:alnum:]]+)(-in-[^\.]+(?P<d2>\.[[:alnum:]]+))?")
|
||||
.captures(cgu)
|
||||
else {
|
||||
panic!("invalid cgu name encountered: {cgu}");
|
||||
};
|
||||
|
||||
let mut new_name = cgu.to_owned();
|
||||
|
||||
@ -4073,18 +4073,16 @@ fn load_compare_outputs(
|
||||
// 'uploaded "$TEST_BUILD_DIR/<test_executable>, waiting for result"'
|
||||
// is printed to stdout by the client and then captured in the ProcRes,
|
||||
// so it needs to be removed when comparing the run-pass test execution output.
|
||||
static REMOTE_TEST_RE: Lazy<Regex> = Lazy::new(|| {
|
||||
Regex::new(
|
||||
"^uploaded \"\\$TEST_BUILD_DIR(/[[:alnum:]_\\-.]+)+\", waiting for result\n"
|
||||
)
|
||||
.unwrap()
|
||||
});
|
||||
normalized_stdout = REMOTE_TEST_RE.replace(&normalized_stdout, "").to_string();
|
||||
normalized_stdout = static_regex!(
|
||||
"^uploaded \"\\$TEST_BUILD_DIR(/[[:alnum:]_\\-.]+)+\", waiting for result\n"
|
||||
)
|
||||
.replace(&normalized_stdout, "")
|
||||
.to_string();
|
||||
// When there is a panic, the remote-test-client also prints "died due to signal";
|
||||
// that needs to be removed as well.
|
||||
static SIGNAL_DIED_RE: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new("^died due to signal [0-9]+\n").unwrap());
|
||||
normalized_stdout = SIGNAL_DIED_RE.replace(&normalized_stdout, "").to_string();
|
||||
normalized_stdout = static_regex!("^died due to signal [0-9]+\n")
|
||||
.replace(&normalized_stdout, "")
|
||||
.to_string();
|
||||
// FIXME: it would be much nicer if we could just tell the remote-test-client to not
|
||||
// print these things.
|
||||
}
|
||||
@ -4556,10 +4554,9 @@ fn normalize_output(&self, output: &str, custom_rules: &[(String, String)]) -> S
|
||||
// with placeholders as we do not want tests needing updated when compiler source code
|
||||
// changes.
|
||||
// eg. $SRC_DIR/libcore/mem.rs:323:14 becomes $SRC_DIR/libcore/mem.rs:LL:COL
|
||||
static SRC_DIR_RE: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new("SRC_DIR(.+):\\d+:\\d+(: \\d+:\\d+)?").unwrap());
|
||||
|
||||
normalized = SRC_DIR_RE.replace_all(&normalized, "SRC_DIR$1:LL:COL").into_owned();
|
||||
normalized = static_regex!("SRC_DIR(.+):\\d+:\\d+(: \\d+:\\d+)?")
|
||||
.replace_all(&normalized, "SRC_DIR$1:LL:COL")
|
||||
.into_owned();
|
||||
|
||||
normalized = Self::normalize_platform_differences(&normalized);
|
||||
normalized = normalized.replace("\t", "\\t"); // makes tabs visible
|
||||
@ -4568,34 +4565,29 @@ fn normalize_output(&self, output: &str, custom_rules: &[(String, String)]) -> S
|
||||
// since they duplicate actual errors and make the output hard to read.
|
||||
// This mirrors the regex in src/tools/tidy/src/style.rs, please update
|
||||
// both if either are changed.
|
||||
static ANNOTATION_RE: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new("\\s*//(\\[.*\\])?~.*").unwrap());
|
||||
|
||||
normalized = ANNOTATION_RE.replace_all(&normalized, "").into_owned();
|
||||
normalized =
|
||||
static_regex!("\\s*//(\\[.*\\])?~.*").replace_all(&normalized, "").into_owned();
|
||||
|
||||
// This code normalizes various hashes in v0 symbol mangling that is
|
||||
// emitted in the ui and mir-opt tests.
|
||||
static V0_CRATE_HASH_PREFIX_RE: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"_R.*?Cs[0-9a-zA-Z]+_").unwrap());
|
||||
static V0_CRATE_HASH_RE: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"Cs[0-9a-zA-Z]+_").unwrap());
|
||||
let v0_crate_hash_prefix_re = static_regex!(r"_R.*?Cs[0-9a-zA-Z]+_");
|
||||
let v0_crate_hash_re = static_regex!(r"Cs[0-9a-zA-Z]+_");
|
||||
|
||||
const V0_CRATE_HASH_PLACEHOLDER: &str = r"CsCRATE_HASH_";
|
||||
if V0_CRATE_HASH_PREFIX_RE.is_match(&normalized) {
|
||||
if v0_crate_hash_prefix_re.is_match(&normalized) {
|
||||
// Normalize crate hash
|
||||
normalized =
|
||||
V0_CRATE_HASH_RE.replace_all(&normalized, V0_CRATE_HASH_PLACEHOLDER).into_owned();
|
||||
v0_crate_hash_re.replace_all(&normalized, V0_CRATE_HASH_PLACEHOLDER).into_owned();
|
||||
}
|
||||
|
||||
static V0_BACK_REF_PREFIX_RE: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"\(_R.*?B[0-9a-zA-Z]_").unwrap());
|
||||
static V0_BACK_REF_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"B[0-9a-zA-Z]_").unwrap());
|
||||
let v0_back_ref_prefix_re = static_regex!(r"\(_R.*?B[0-9a-zA-Z]_");
|
||||
let v0_back_ref_re = static_regex!(r"B[0-9a-zA-Z]_");
|
||||
|
||||
const V0_BACK_REF_PLACEHOLDER: &str = r"B<REF>_";
|
||||
if V0_BACK_REF_PREFIX_RE.is_match(&normalized) {
|
||||
if v0_back_ref_prefix_re.is_match(&normalized) {
|
||||
// Normalize back references (see RFC 2603)
|
||||
normalized =
|
||||
V0_BACK_REF_RE.replace_all(&normalized, V0_BACK_REF_PLACEHOLDER).into_owned();
|
||||
v0_back_ref_re.replace_all(&normalized, V0_BACK_REF_PLACEHOLDER).into_owned();
|
||||
}
|
||||
|
||||
// AllocId are numbered globally in a compilation session. This can lead to changes
|
||||
@ -4608,26 +4600,22 @@ fn normalize_output(&self, output: &str, custom_rules: &[(String, String)]) -> S
|
||||
let mut seen_allocs = indexmap::IndexSet::new();
|
||||
|
||||
// The alloc-id appears in pretty-printed allocations.
|
||||
static ALLOC_ID_PP_RE: Lazy<Regex> = Lazy::new(|| {
|
||||
Regex::new(r"╾─*a(lloc)?([0-9]+)(\+0x[0-9]+)?(<imm>)?( \([0-9]+ ptr bytes\))?─*╼")
|
||||
.unwrap()
|
||||
});
|
||||
normalized = ALLOC_ID_PP_RE
|
||||
.replace_all(&normalized, |caps: &Captures<'_>| {
|
||||
// Renumber the captured index.
|
||||
let index = caps.get(2).unwrap().as_str().to_string();
|
||||
let (index, _) = seen_allocs.insert_full(index);
|
||||
let offset = caps.get(3).map_or("", |c| c.as_str());
|
||||
let imm = caps.get(4).map_or("", |c| c.as_str());
|
||||
// Do not bother keeping it pretty, just make it deterministic.
|
||||
format!("╾ALLOC{index}{offset}{imm}╼")
|
||||
})
|
||||
.into_owned();
|
||||
normalized = static_regex!(
|
||||
r"╾─*a(lloc)?([0-9]+)(\+0x[0-9]+)?(<imm>)?( \([0-9]+ ptr bytes\))?─*╼"
|
||||
)
|
||||
.replace_all(&normalized, |caps: &Captures<'_>| {
|
||||
// Renumber the captured index.
|
||||
let index = caps.get(2).unwrap().as_str().to_string();
|
||||
let (index, _) = seen_allocs.insert_full(index);
|
||||
let offset = caps.get(3).map_or("", |c| c.as_str());
|
||||
let imm = caps.get(4).map_or("", |c| c.as_str());
|
||||
// Do not bother keeping it pretty, just make it deterministic.
|
||||
format!("╾ALLOC{index}{offset}{imm}╼")
|
||||
})
|
||||
.into_owned();
|
||||
|
||||
// The alloc-id appears in a sentence.
|
||||
static ALLOC_ID_RE: Lazy<Regex> =
|
||||
Lazy::new(|| Regex::new(r"\balloc([0-9]+)\b").unwrap());
|
||||
normalized = ALLOC_ID_RE
|
||||
normalized = static_regex!(r"\balloc([0-9]+)\b")
|
||||
.replace_all(&normalized, |caps: &Captures<'_>| {
|
||||
let index = caps.get(1).unwrap().as_str().to_string();
|
||||
let (index, _) = seen_allocs.insert_full(index);
|
||||
@ -4650,12 +4638,13 @@ fn normalize_output(&self, output: &str, custom_rules: &[(String, String)]) -> S
|
||||
/// Replaces backslashes in paths with forward slashes, and replaces CRLF line endings
|
||||
/// with LF.
|
||||
fn normalize_platform_differences(output: &str) -> String {
|
||||
/// Used to find Windows paths.
|
||||
///
|
||||
/// It's not possible to detect paths in the error messages generally, but this is a
|
||||
/// decent enough heuristic.
|
||||
static PATH_BACKSLASH_RE: Lazy<Regex> = Lazy::new(|| {
|
||||
Regex::new(
|
||||
let output = output.replace(r"\\", r"\");
|
||||
|
||||
// Used to find Windows paths.
|
||||
//
|
||||
// It's not possible to detect paths in the error messages generally, but this is a
|
||||
// decent enough heuristic.
|
||||
static_regex!(
|
||||
r#"(?x)
|
||||
(?:
|
||||
# Match paths that don't include spaces.
|
||||
@ -4663,14 +4652,8 @@ fn normalize_platform_differences(output: &str) -> String {
|
||||
|
|
||||
# If the path starts with a well-known root, then allow spaces and no file extension.
|
||||
\$(?:DIR|SRC_DIR|TEST_BUILD_DIR|BUILD_DIR|LIB_DIR)(?:\\[\pL\pN\.\-_'\ ]+)+
|
||||
)"#,
|
||||
)"#
|
||||
)
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
let output = output.replace(r"\\", r"\");
|
||||
|
||||
PATH_BACKSLASH_RE
|
||||
.replace_all(&output, |caps: &Captures<'_>| {
|
||||
println!("{}", &caps[0]);
|
||||
caps[0].replace(r"\", "/")
|
||||
|
Loading…
Reference in New Issue
Block a user