Auto merge of #10920 - blyxyas:speedtest, r=llogiq

Add `SPEEDTEST`

In the `master` branch, we currently don't have any way to test the performance of a single lint in changes.
This PR adds `SPEEDTEST`, the environment variable which lets you do a speed test on a lint / category of tests with various configuration options.

Maybe we should merge this with `lintcheck` 🤔
See the book page for more information.

changelog:none
This commit is contained in:
bors 2023-07-02 15:48:50 +00:00
commit ea4ca225fb
2 changed files with 63 additions and 6 deletions

View File

@ -0,0 +1,24 @@
# Speedtest
`SPEEDTEST` is the tool we use to measure lint's performance, it works by executing the same test several times.
It's useful for measuring changes to current lints and deciding if the performance changes too much. `SPEEDTEST` is
accessed by the `SPEEDTEST` (and `SPEEDTEST_*`) environment variables.
## Checking Speedtest
To do a simple speed test of a lint (e.g. `allow_attributes`), use this command.
```sh
$ SPEEDTEST=ui TESTNAME="allow_attributes" cargo uitest -- --nocapture
```
This will test all `ui` tests (`SPEEDTEST=ui`) whose names start with `allow_attributes`. By default, `SPEEDTEST` will
iterate your test 1000 times. But you can change this with `SPEEDTEST_ITERATIONS`.
```sh
$ SPEEDTEST=toml SPEEDTEST_ITERATIONS=100 TESTNAME="semicolon_block" cargo uitest -- --nocapture
```
> **WARNING**: Be sure to use `-- --nocapture` at the end of the command to see the average test time. If you don't
> use `-- --nocapture` (e.g. `SPEEDTEST=ui` `TESTNAME="let_underscore_untyped" cargo uitest -- --nocapture`), this
> will not show up.

View File

@ -217,12 +217,45 @@ fn main() {
} }
set_var("CLIPPY_DISABLE_DOCS_LINKS", "true"); set_var("CLIPPY_DISABLE_DOCS_LINKS", "true");
// The SPEEDTEST_* env variables can be used to check Clippy's performance on your PR. It runs the
// affected test 1000 times and gets the average.
if let Ok(speedtest) = std::env::var("SPEEDTEST") {
println!("----------- STARTING SPEEDTEST -----------");
let f = match speedtest.as_str() {
"ui" => run_ui as fn(),
"cargo" => run_ui_cargo as fn(),
"toml" => run_ui_toml as fn(),
"internal" => run_internal_tests as fn(),
"rustfix-coverage-known-exceptions-accuracy" => rustfix_coverage_known_exceptions_accuracy as fn(),
"ui-cargo-toml-metadata" => ui_cargo_toml_metadata as fn(),
_ => panic!("unknown speedtest: {speedtest} || accepted speedtests are: [ui, cargo, toml, internal]"),
};
let iterations;
if let Ok(iterations_str) = std::env::var("SPEEDTEST_ITERATIONS") {
iterations = iterations_str
.parse::<u64>()
.unwrap_or_else(|_| panic!("Couldn't parse `{iterations_str}`, please use a valid u64"));
} else {
iterations = 1000;
}
let mut sum = 0;
for _ in 0..iterations {
let start = std::time::Instant::now();
f();
sum += start.elapsed().as_millis();
}
println!("average {} time: {} millis.", speedtest.to_uppercase(), sum / 1000);
} else {
run_ui(); run_ui();
run_ui_toml(); run_ui_toml();
run_ui_cargo(); run_ui_cargo();
run_internal_tests(); run_internal_tests();
rustfix_coverage_known_exceptions_accuracy(); rustfix_coverage_known_exceptions_accuracy();
ui_cargo_toml_metadata(); ui_cargo_toml_metadata();
}
} }
const RUSTFIX_COVERAGE_KNOWN_EXCEPTIONS: &[&str] = &[ const RUSTFIX_COVERAGE_KNOWN_EXCEPTIONS: &[&str] = &[