Rollup merge of #124434 - GKFX:remove-lazy-dependencies, r=jieyouxu
Remove lazycell and once_cell from compiletest dependencies Use the standard library `OnceLock` instead of third-party equivalents. A macro is used for the regexes to make their initialization less unwieldy.
This commit is contained in:
commit
8f962a6256
@ -760,11 +760,9 @@ dependencies = [
|
|||||||
"glob",
|
"glob",
|
||||||
"home",
|
"home",
|
||||||
"indexmap",
|
"indexmap",
|
||||||
"lazycell",
|
|
||||||
"libc",
|
"libc",
|
||||||
"miow",
|
"miow",
|
||||||
"miropt-test-tools",
|
"miropt-test-tools",
|
||||||
"once_cell",
|
|
||||||
"regex",
|
"regex",
|
||||||
"rustfix 0.8.1",
|
"rustfix 0.8.1",
|
||||||
"serde",
|
"serde",
|
||||||
@ -2151,12 +2149,6 @@ version = "1.4.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "lazycell"
|
|
||||||
version = "1.3.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "leb128"
|
name = "leb128"
|
||||||
version = "0.2.5"
|
version = "0.2.5"
|
||||||
|
@ -21,10 +21,8 @@ regex = "1.0"
|
|||||||
serde = { version = "1.0", features = ["derive"] }
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
rustfix = "0.8.1"
|
rustfix = "0.8.1"
|
||||||
once_cell = "1.16.0"
|
|
||||||
walkdir = "2"
|
walkdir = "2"
|
||||||
glob = "0.3.0"
|
glob = "0.3.0"
|
||||||
lazycell = "1.3.0"
|
|
||||||
anyhow = "1"
|
anyhow = "1"
|
||||||
home = "0.5.5"
|
home = "0.5.5"
|
||||||
|
|
||||||
|
@ -6,10 +6,10 @@
|
|||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
use std::sync::OnceLock;
|
||||||
|
|
||||||
use crate::util::{add_dylib_path, PathBufExt};
|
use crate::util::{add_dylib_path, PathBufExt};
|
||||||
use build_helper::git::GitConfig;
|
use build_helper::git::GitConfig;
|
||||||
use lazycell::AtomicLazyCell;
|
|
||||||
use serde::de::{Deserialize, Deserializer, Error as _};
|
use serde::de::{Deserialize, Deserializer, Error as _};
|
||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{HashMap, HashSet};
|
||||||
use test::{ColorConfig, OutputFormat};
|
use test::{ColorConfig, OutputFormat};
|
||||||
@ -384,7 +384,7 @@ pub struct Config {
|
|||||||
/// Only rerun the tests that result has been modified accoring to Git status
|
/// Only rerun the tests that result has been modified accoring to Git status
|
||||||
pub only_modified: bool,
|
pub only_modified: bool,
|
||||||
|
|
||||||
pub target_cfgs: AtomicLazyCell<TargetCfgs>,
|
pub target_cfgs: OnceLock<TargetCfgs>,
|
||||||
|
|
||||||
pub nocapture: bool,
|
pub nocapture: bool,
|
||||||
|
|
||||||
@ -406,13 +406,7 @@ pub fn run_enabled(&self) -> bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn target_cfgs(&self) -> &TargetCfgs {
|
pub fn target_cfgs(&self) -> &TargetCfgs {
|
||||||
match self.target_cfgs.borrow() {
|
self.target_cfgs.get_or_init(|| TargetCfgs::new(self))
|
||||||
Some(cfgs) => cfgs,
|
|
||||||
None => {
|
|
||||||
let _ = self.target_cfgs.fill(TargetCfgs::new(self));
|
|
||||||
self.target_cfgs.borrow().unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn target_cfg(&self) -> &TargetCfg {
|
pub fn target_cfg(&self) -> &TargetCfg {
|
||||||
|
@ -6,8 +6,8 @@
|
|||||||
use std::io::BufReader;
|
use std::io::BufReader;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
use std::sync::OnceLock;
|
||||||
|
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use tracing::*;
|
use tracing::*;
|
||||||
|
|
||||||
@ -117,10 +117,11 @@ fn parse_expected(
|
|||||||
// //~^^^^^
|
// //~^^^^^
|
||||||
// //[rev1]~
|
// //[rev1]~
|
||||||
// //[rev1,rev2]~^^
|
// //[rev1,rev2]~^^
|
||||||
static RE: Lazy<Regex> =
|
static RE: OnceLock<Regex> = OnceLock::new();
|
||||||
Lazy::new(|| Regex::new(r"//(?:\[(?P<revs>[\w\-,]+)])?~(?P<adjust>\||\^*)").unwrap());
|
|
||||||
|
|
||||||
let captures = RE.captures(line)?;
|
let captures = RE
|
||||||
|
.get_or_init(|| Regex::new(r"//(?:\[(?P<revs>[\w\-,]+)])?~(?P<adjust>\||\^*)").unwrap())
|
||||||
|
.captures(line)?;
|
||||||
|
|
||||||
match (test_revision, captures.name("revs")) {
|
match (test_revision, captures.name("revs")) {
|
||||||
// Only error messages that contain our revision between the square brackets apply to us.
|
// Only error messages that contain our revision between the square brackets apply to us.
|
||||||
|
@ -5,8 +5,8 @@
|
|||||||
use std::io::BufReader;
|
use std::io::BufReader;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::process::Command;
|
use std::process::Command;
|
||||||
|
use std::sync::OnceLock;
|
||||||
|
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use tracing::*;
|
use tracing::*;
|
||||||
|
|
||||||
@ -1021,8 +1021,9 @@ fn iter_header(
|
|||||||
let mut line_number = 0;
|
let mut line_number = 0;
|
||||||
|
|
||||||
// Match on error annotations like `//~ERROR`.
|
// Match on error annotations like `//~ERROR`.
|
||||||
static REVISION_MAGIC_COMMENT_RE: Lazy<Regex> =
|
static REVISION_MAGIC_COMMENT_RE: OnceLock<Regex> = OnceLock::new();
|
||||||
Lazy::new(|| Regex::new("//(\\[.*\\])?~.*").unwrap());
|
let revision_magic_comment_re =
|
||||||
|
REVISION_MAGIC_COMMENT_RE.get_or_init(|| Regex::new("//(\\[.*\\])?~.*").unwrap());
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
line_number += 1;
|
line_number += 1;
|
||||||
@ -1087,7 +1088,7 @@ fn iter_header(
|
|||||||
});
|
});
|
||||||
// Then we try to check for legacy-style candidates, which are not the magic ~ERROR family
|
// Then we try to check for legacy-style candidates, which are not the magic ~ERROR family
|
||||||
// error annotations.
|
// error annotations.
|
||||||
} else if !REVISION_MAGIC_COMMENT_RE.is_match(ln) {
|
} else if !revision_magic_comment_re.is_match(ln) {
|
||||||
let Some((_, rest)) = line_directive("//", ln) else {
|
let Some((_, rest)) = line_directive("//", ln) else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
@ -24,13 +24,13 @@
|
|||||||
use build_helper::git::{get_git_modified_files, get_git_untracked_files};
|
use build_helper::git::{get_git_modified_files, get_git_untracked_files};
|
||||||
use core::panic;
|
use core::panic;
|
||||||
use getopts::Options;
|
use getopts::Options;
|
||||||
use lazycell::AtomicLazyCell;
|
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
use std::ffi::OsString;
|
use std::ffi::OsString;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::io::{self, ErrorKind};
|
use std::io::{self, ErrorKind};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::process::{Command, Stdio};
|
use std::process::{Command, Stdio};
|
||||||
|
use std::sync::{Arc, OnceLock};
|
||||||
use std::time::SystemTime;
|
use std::time::SystemTime;
|
||||||
use std::{env, vec};
|
use std::{env, vec};
|
||||||
use test::ColorConfig;
|
use test::ColorConfig;
|
||||||
@ -39,7 +39,6 @@
|
|||||||
|
|
||||||
use self::header::{make_test_description, EarlyProps};
|
use self::header::{make_test_description, EarlyProps};
|
||||||
use crate::header::HeadersCache;
|
use crate::header::HeadersCache;
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
pub fn parse_config(args: Vec<String>) -> Config {
|
pub fn parse_config(args: Vec<String>) -> Config {
|
||||||
let mut opts = Options::new();
|
let mut opts = Options::new();
|
||||||
@ -320,7 +319,7 @@ fn make_absolute(path: PathBuf) -> PathBuf {
|
|||||||
|
|
||||||
force_rerun: matches.opt_present("force-rerun"),
|
force_rerun: matches.opt_present("force-rerun"),
|
||||||
|
|
||||||
target_cfgs: AtomicLazyCell::new(),
|
target_cfgs: OnceLock::new(),
|
||||||
|
|
||||||
nocapture: matches.opt_present("nocapture"),
|
nocapture: matches.opt_present("nocapture"),
|
||||||
|
|
||||||
|
@ -36,7 +36,6 @@
|
|||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use glob::glob;
|
use glob::glob;
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use tracing::*;
|
use tracing::*;
|
||||||
|
|
||||||
use crate::extract_gdb_version;
|
use crate::extract_gdb_version;
|
||||||
@ -48,6 +47,13 @@
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests;
|
mod tests;
|
||||||
|
|
||||||
|
macro_rules! static_regex {
|
||||||
|
($re:literal) => {{
|
||||||
|
static RE: ::std::sync::OnceLock<::regex::Regex> = ::std::sync::OnceLock::new();
|
||||||
|
RE.get_or_init(|| ::regex::Regex::new($re).unwrap())
|
||||||
|
}};
|
||||||
|
}
|
||||||
|
|
||||||
const FAKE_SRC_BASE: &str = "fake-test-src-base";
|
const FAKE_SRC_BASE: &str = "fake-test-src-base";
|
||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
@ -765,28 +771,23 @@ fn anonymize_coverage_line_numbers(coverage: &str) -> String {
|
|||||||
// ` 100|` => ` LL|`
|
// ` 100|` => ` LL|`
|
||||||
// ` | 1000|` => ` | LL|`
|
// ` | 1000|` => ` | LL|`
|
||||||
// ` | | 1000|` => ` | | LL|`
|
// ` | | 1000|` => ` | | LL|`
|
||||||
static LINE_NUMBER_RE: Lazy<Regex> =
|
let coverage = static_regex!(r"(?m:^)(?<prefix>(?: \|)*) *[0-9]+\|")
|
||||||
Lazy::new(|| Regex::new(r"(?m:^)(?<prefix>(?: \|)*) *[0-9]+\|").unwrap());
|
.replace_all(&coverage, "${prefix} LL|");
|
||||||
let coverage = LINE_NUMBER_RE.replace_all(&coverage, "${prefix} LL|");
|
|
||||||
|
|
||||||
// ` | Branch (1:` => ` | Branch (LL:`
|
// ` | Branch (1:` => ` | Branch (LL:`
|
||||||
// ` | | Branch (10:` => ` | | Branch (LL:`
|
// ` | | Branch (10:` => ` | | Branch (LL:`
|
||||||
static BRANCH_LINE_NUMBER_RE: Lazy<Regex> =
|
let coverage = static_regex!(r"(?m:^)(?<prefix>(?: \|)+ Branch \()[0-9]+:")
|
||||||
Lazy::new(|| Regex::new(r"(?m:^)(?<prefix>(?: \|)+ Branch \()[0-9]+:").unwrap());
|
.replace_all(&coverage, "${prefix}LL:");
|
||||||
let coverage = BRANCH_LINE_NUMBER_RE.replace_all(&coverage, "${prefix}LL:");
|
|
||||||
|
|
||||||
// ` |---> MC/DC Decision Region (1:30) to (2:` => ` |---> MC/DC Decision Region (LL:30) to (LL:`
|
// ` |---> MC/DC Decision Region (1:30) to (2:` => ` |---> MC/DC Decision Region (LL:30) to (LL:`
|
||||||
static MCDC_DECISION_LINE_NUMBER_RE: Lazy<Regex> = Lazy::new(|| {
|
|
||||||
Regex::new(r"(?m:^)(?<prefix>(?: \|)+---> MC/DC Decision Region \()[0-9]+:(?<middle>[0-9]+\) to \()[0-9]+:").unwrap()
|
|
||||||
});
|
|
||||||
let coverage =
|
let coverage =
|
||||||
MCDC_DECISION_LINE_NUMBER_RE.replace_all(&coverage, "${prefix}LL:${middle}LL:");
|
static_regex!(r"(?m:^)(?<prefix>(?: \|)+---> MC/DC Decision Region \()[0-9]+:(?<middle>[0-9]+\) to \()[0-9]+:")
|
||||||
|
.replace_all(&coverage, "${prefix}LL:${middle}LL:");
|
||||||
|
|
||||||
// ` | Condition C1 --> (1:` => ` | Condition C1 --> (LL:`
|
// ` | Condition C1 --> (1:` => ` | Condition C1 --> (LL:`
|
||||||
static MCDC_CONDITION_LINE_NUMBER_RE: Lazy<Regex> = Lazy::new(|| {
|
let coverage =
|
||||||
Regex::new(r"(?m:^)(?<prefix>(?: \|)+ Condition C[0-9]+ --> \()[0-9]+:").unwrap()
|
static_regex!(r"(?m:^)(?<prefix>(?: \|)+ Condition C[0-9]+ --> \()[0-9]+:")
|
||||||
});
|
.replace_all(&coverage, "${prefix}LL:");
|
||||||
let coverage = MCDC_CONDITION_LINE_NUMBER_RE.replace_all(&coverage, "${prefix}LL:");
|
|
||||||
|
|
||||||
coverage.into_owned()
|
coverage.into_owned()
|
||||||
}
|
}
|
||||||
@ -3471,13 +3472,12 @@ fn codegen_units_to_str(cgus: &HashSet<String>) -> String {
|
|||||||
// the form <crate-name1>.<crate-disambiguator1>-in-<crate-name2>.<crate-disambiguator2>,
|
// the form <crate-name1>.<crate-disambiguator1>-in-<crate-name2>.<crate-disambiguator2>,
|
||||||
// remove all crate-disambiguators.
|
// remove all crate-disambiguators.
|
||||||
fn remove_crate_disambiguator_from_cgu(cgu: &str) -> String {
|
fn remove_crate_disambiguator_from_cgu(cgu: &str) -> String {
|
||||||
static RE: Lazy<Regex> = Lazy::new(|| {
|
let Some(captures) =
|
||||||
Regex::new(r"^[^\.]+(?P<d1>\.[[:alnum:]]+)(-in-[^\.]+(?P<d2>\.[[:alnum:]]+))?")
|
static_regex!(r"^[^\.]+(?P<d1>\.[[:alnum:]]+)(-in-[^\.]+(?P<d2>\.[[:alnum:]]+))?")
|
||||||
.unwrap()
|
.captures(cgu)
|
||||||
});
|
else {
|
||||||
|
panic!("invalid cgu name encountered: {cgu}");
|
||||||
let captures =
|
};
|
||||||
RE.captures(cgu).unwrap_or_else(|| panic!("invalid cgu name encountered: {}", cgu));
|
|
||||||
|
|
||||||
let mut new_name = cgu.to_owned();
|
let mut new_name = cgu.to_owned();
|
||||||
|
|
||||||
@ -4073,18 +4073,16 @@ fn load_compare_outputs(
|
|||||||
// 'uploaded "$TEST_BUILD_DIR/<test_executable>, waiting for result"'
|
// 'uploaded "$TEST_BUILD_DIR/<test_executable>, waiting for result"'
|
||||||
// is printed to stdout by the client and then captured in the ProcRes,
|
// is printed to stdout by the client and then captured in the ProcRes,
|
||||||
// so it needs to be removed when comparing the run-pass test execution output.
|
// so it needs to be removed when comparing the run-pass test execution output.
|
||||||
static REMOTE_TEST_RE: Lazy<Regex> = Lazy::new(|| {
|
normalized_stdout = static_regex!(
|
||||||
Regex::new(
|
"^uploaded \"\\$TEST_BUILD_DIR(/[[:alnum:]_\\-.]+)+\", waiting for result\n"
|
||||||
"^uploaded \"\\$TEST_BUILD_DIR(/[[:alnum:]_\\-.]+)+\", waiting for result\n"
|
)
|
||||||
)
|
.replace(&normalized_stdout, "")
|
||||||
.unwrap()
|
.to_string();
|
||||||
});
|
|
||||||
normalized_stdout = REMOTE_TEST_RE.replace(&normalized_stdout, "").to_string();
|
|
||||||
// When there is a panic, the remote-test-client also prints "died due to signal";
|
// When there is a panic, the remote-test-client also prints "died due to signal";
|
||||||
// that needs to be removed as well.
|
// that needs to be removed as well.
|
||||||
static SIGNAL_DIED_RE: Lazy<Regex> =
|
normalized_stdout = static_regex!("^died due to signal [0-9]+\n")
|
||||||
Lazy::new(|| Regex::new("^died due to signal [0-9]+\n").unwrap());
|
.replace(&normalized_stdout, "")
|
||||||
normalized_stdout = SIGNAL_DIED_RE.replace(&normalized_stdout, "").to_string();
|
.to_string();
|
||||||
// FIXME: it would be much nicer if we could just tell the remote-test-client to not
|
// FIXME: it would be much nicer if we could just tell the remote-test-client to not
|
||||||
// print these things.
|
// print these things.
|
||||||
}
|
}
|
||||||
@ -4556,10 +4554,9 @@ fn normalize_output(&self, output: &str, custom_rules: &[(String, String)]) -> S
|
|||||||
// with placeholders as we do not want tests needing updated when compiler source code
|
// with placeholders as we do not want tests needing updated when compiler source code
|
||||||
// changes.
|
// changes.
|
||||||
// eg. $SRC_DIR/libcore/mem.rs:323:14 becomes $SRC_DIR/libcore/mem.rs:LL:COL
|
// eg. $SRC_DIR/libcore/mem.rs:323:14 becomes $SRC_DIR/libcore/mem.rs:LL:COL
|
||||||
static SRC_DIR_RE: Lazy<Regex> =
|
normalized = static_regex!("SRC_DIR(.+):\\d+:\\d+(: \\d+:\\d+)?")
|
||||||
Lazy::new(|| Regex::new("SRC_DIR(.+):\\d+:\\d+(: \\d+:\\d+)?").unwrap());
|
.replace_all(&normalized, "SRC_DIR$1:LL:COL")
|
||||||
|
.into_owned();
|
||||||
normalized = SRC_DIR_RE.replace_all(&normalized, "SRC_DIR$1:LL:COL").into_owned();
|
|
||||||
|
|
||||||
normalized = Self::normalize_platform_differences(&normalized);
|
normalized = Self::normalize_platform_differences(&normalized);
|
||||||
normalized = normalized.replace("\t", "\\t"); // makes tabs visible
|
normalized = normalized.replace("\t", "\\t"); // makes tabs visible
|
||||||
@ -4568,34 +4565,29 @@ fn normalize_output(&self, output: &str, custom_rules: &[(String, String)]) -> S
|
|||||||
// since they duplicate actual errors and make the output hard to read.
|
// since they duplicate actual errors and make the output hard to read.
|
||||||
// This mirrors the regex in src/tools/tidy/src/style.rs, please update
|
// This mirrors the regex in src/tools/tidy/src/style.rs, please update
|
||||||
// both if either are changed.
|
// both if either are changed.
|
||||||
static ANNOTATION_RE: Lazy<Regex> =
|
normalized =
|
||||||
Lazy::new(|| Regex::new("\\s*//(\\[.*\\])?~.*").unwrap());
|
static_regex!("\\s*//(\\[.*\\])?~.*").replace_all(&normalized, "").into_owned();
|
||||||
|
|
||||||
normalized = ANNOTATION_RE.replace_all(&normalized, "").into_owned();
|
|
||||||
|
|
||||||
// This code normalizes various hashes in v0 symbol mangling that is
|
// This code normalizes various hashes in v0 symbol mangling that is
|
||||||
// emitted in the ui and mir-opt tests.
|
// emitted in the ui and mir-opt tests.
|
||||||
static V0_CRATE_HASH_PREFIX_RE: Lazy<Regex> =
|
let v0_crate_hash_prefix_re = static_regex!(r"_R.*?Cs[0-9a-zA-Z]+_");
|
||||||
Lazy::new(|| Regex::new(r"_R.*?Cs[0-9a-zA-Z]+_").unwrap());
|
let v0_crate_hash_re = static_regex!(r"Cs[0-9a-zA-Z]+_");
|
||||||
static V0_CRATE_HASH_RE: Lazy<Regex> =
|
|
||||||
Lazy::new(|| Regex::new(r"Cs[0-9a-zA-Z]+_").unwrap());
|
|
||||||
|
|
||||||
const V0_CRATE_HASH_PLACEHOLDER: &str = r"CsCRATE_HASH_";
|
const V0_CRATE_HASH_PLACEHOLDER: &str = r"CsCRATE_HASH_";
|
||||||
if V0_CRATE_HASH_PREFIX_RE.is_match(&normalized) {
|
if v0_crate_hash_prefix_re.is_match(&normalized) {
|
||||||
// Normalize crate hash
|
// Normalize crate hash
|
||||||
normalized =
|
normalized =
|
||||||
V0_CRATE_HASH_RE.replace_all(&normalized, V0_CRATE_HASH_PLACEHOLDER).into_owned();
|
v0_crate_hash_re.replace_all(&normalized, V0_CRATE_HASH_PLACEHOLDER).into_owned();
|
||||||
}
|
}
|
||||||
|
|
||||||
static V0_BACK_REF_PREFIX_RE: Lazy<Regex> =
|
let v0_back_ref_prefix_re = static_regex!(r"\(_R.*?B[0-9a-zA-Z]_");
|
||||||
Lazy::new(|| Regex::new(r"\(_R.*?B[0-9a-zA-Z]_").unwrap());
|
let v0_back_ref_re = static_regex!(r"B[0-9a-zA-Z]_");
|
||||||
static V0_BACK_REF_RE: Lazy<Regex> = Lazy::new(|| Regex::new(r"B[0-9a-zA-Z]_").unwrap());
|
|
||||||
|
|
||||||
const V0_BACK_REF_PLACEHOLDER: &str = r"B<REF>_";
|
const V0_BACK_REF_PLACEHOLDER: &str = r"B<REF>_";
|
||||||
if V0_BACK_REF_PREFIX_RE.is_match(&normalized) {
|
if v0_back_ref_prefix_re.is_match(&normalized) {
|
||||||
// Normalize back references (see RFC 2603)
|
// Normalize back references (see RFC 2603)
|
||||||
normalized =
|
normalized =
|
||||||
V0_BACK_REF_RE.replace_all(&normalized, V0_BACK_REF_PLACEHOLDER).into_owned();
|
v0_back_ref_re.replace_all(&normalized, V0_BACK_REF_PLACEHOLDER).into_owned();
|
||||||
}
|
}
|
||||||
|
|
||||||
// AllocId are numbered globally in a compilation session. This can lead to changes
|
// AllocId are numbered globally in a compilation session. This can lead to changes
|
||||||
@ -4608,26 +4600,22 @@ fn normalize_output(&self, output: &str, custom_rules: &[(String, String)]) -> S
|
|||||||
let mut seen_allocs = indexmap::IndexSet::new();
|
let mut seen_allocs = indexmap::IndexSet::new();
|
||||||
|
|
||||||
// The alloc-id appears in pretty-printed allocations.
|
// The alloc-id appears in pretty-printed allocations.
|
||||||
static ALLOC_ID_PP_RE: Lazy<Regex> = Lazy::new(|| {
|
normalized = static_regex!(
|
||||||
Regex::new(r"╾─*a(lloc)?([0-9]+)(\+0x[0-9]+)?(<imm>)?( \([0-9]+ ptr bytes\))?─*╼")
|
r"╾─*a(lloc)?([0-9]+)(\+0x[0-9]+)?(<imm>)?( \([0-9]+ ptr bytes\))?─*╼"
|
||||||
.unwrap()
|
)
|
||||||
});
|
.replace_all(&normalized, |caps: &Captures<'_>| {
|
||||||
normalized = ALLOC_ID_PP_RE
|
// Renumber the captured index.
|
||||||
.replace_all(&normalized, |caps: &Captures<'_>| {
|
let index = caps.get(2).unwrap().as_str().to_string();
|
||||||
// Renumber the captured index.
|
let (index, _) = seen_allocs.insert_full(index);
|
||||||
let index = caps.get(2).unwrap().as_str().to_string();
|
let offset = caps.get(3).map_or("", |c| c.as_str());
|
||||||
let (index, _) = seen_allocs.insert_full(index);
|
let imm = caps.get(4).map_or("", |c| c.as_str());
|
||||||
let offset = caps.get(3).map_or("", |c| c.as_str());
|
// Do not bother keeping it pretty, just make it deterministic.
|
||||||
let imm = caps.get(4).map_or("", |c| c.as_str());
|
format!("╾ALLOC{index}{offset}{imm}╼")
|
||||||
// Do not bother keeping it pretty, just make it deterministic.
|
})
|
||||||
format!("╾ALLOC{index}{offset}{imm}╼")
|
.into_owned();
|
||||||
})
|
|
||||||
.into_owned();
|
|
||||||
|
|
||||||
// The alloc-id appears in a sentence.
|
// The alloc-id appears in a sentence.
|
||||||
static ALLOC_ID_RE: Lazy<Regex> =
|
normalized = static_regex!(r"\balloc([0-9]+)\b")
|
||||||
Lazy::new(|| Regex::new(r"\balloc([0-9]+)\b").unwrap());
|
|
||||||
normalized = ALLOC_ID_RE
|
|
||||||
.replace_all(&normalized, |caps: &Captures<'_>| {
|
.replace_all(&normalized, |caps: &Captures<'_>| {
|
||||||
let index = caps.get(1).unwrap().as_str().to_string();
|
let index = caps.get(1).unwrap().as_str().to_string();
|
||||||
let (index, _) = seen_allocs.insert_full(index);
|
let (index, _) = seen_allocs.insert_full(index);
|
||||||
@ -4650,12 +4638,13 @@ fn normalize_output(&self, output: &str, custom_rules: &[(String, String)]) -> S
|
|||||||
/// Replaces backslashes in paths with forward slashes, and replaces CRLF line endings
|
/// Replaces backslashes in paths with forward slashes, and replaces CRLF line endings
|
||||||
/// with LF.
|
/// with LF.
|
||||||
fn normalize_platform_differences(output: &str) -> String {
|
fn normalize_platform_differences(output: &str) -> String {
|
||||||
/// Used to find Windows paths.
|
let output = output.replace(r"\\", r"\");
|
||||||
///
|
|
||||||
/// It's not possible to detect paths in the error messages generally, but this is a
|
// Used to find Windows paths.
|
||||||
/// decent enough heuristic.
|
//
|
||||||
static PATH_BACKSLASH_RE: Lazy<Regex> = Lazy::new(|| {
|
// It's not possible to detect paths in the error messages generally, but this is a
|
||||||
Regex::new(
|
// decent enough heuristic.
|
||||||
|
static_regex!(
|
||||||
r#"(?x)
|
r#"(?x)
|
||||||
(?:
|
(?:
|
||||||
# Match paths that don't include spaces.
|
# Match paths that don't include spaces.
|
||||||
@ -4663,14 +4652,8 @@ fn normalize_platform_differences(output: &str) -> String {
|
|||||||
|
|
|
|
||||||
# If the path starts with a well-known root, then allow spaces and no file extension.
|
# If the path starts with a well-known root, then allow spaces and no file extension.
|
||||||
\$(?:DIR|SRC_DIR|TEST_BUILD_DIR|BUILD_DIR|LIB_DIR)(?:\\[\pL\pN\.\-_'\ ]+)+
|
\$(?:DIR|SRC_DIR|TEST_BUILD_DIR|BUILD_DIR|LIB_DIR)(?:\\[\pL\pN\.\-_'\ ]+)+
|
||||||
)"#,
|
)"#
|
||||||
)
|
)
|
||||||
.unwrap()
|
|
||||||
});
|
|
||||||
|
|
||||||
let output = output.replace(r"\\", r"\");
|
|
||||||
|
|
||||||
PATH_BACKSLASH_RE
|
|
||||||
.replace_all(&output, |caps: &Captures<'_>| {
|
.replace_all(&output, |caps: &Captures<'_>| {
|
||||||
println!("{}", &caps[0]);
|
println!("{}", &caps[0]);
|
||||||
caps[0].replace(r"\", "/")
|
caps[0].replace(r"\", "/")
|
||||||
|
Loading…
Reference in New Issue
Block a user