2021-06-13 04:33:08 -05:00
|
|
|
use std::{
|
|
|
|
collections::HashSet,
|
|
|
|
path::{Path, PathBuf},
|
|
|
|
};
|
2020-03-17 04:26:29 -05:00
|
|
|
|
2021-07-03 14:11:03 -05:00
|
|
|
use walkdir::{DirEntry, WalkDir};
|
2021-03-08 12:39:09 -06:00
|
|
|
use xshell::{cmd, pushd, pushenv, read_file};
|
2021-03-01 11:16:23 -06:00
|
|
|
|
2021-07-03 14:11:03 -05:00
|
|
|
use crate::project_root;
|
2021-03-08 12:13:15 -06:00
|
|
|
|
2020-05-06 03:25:25 -05:00
|
|
|
#[test]
|
|
|
|
fn check_code_formatting() {
|
2021-03-08 12:39:09 -06:00
|
|
|
let _dir = pushd(project_root()).unwrap();
|
|
|
|
let _e = pushenv("RUSTUP_TOOLCHAIN", "stable");
|
2021-07-03 14:11:03 -05:00
|
|
|
|
|
|
|
let out = cmd!("rustfmt --version").read().unwrap();
|
|
|
|
if !out.contains("stable") {
|
|
|
|
panic!(
|
|
|
|
"Failed to run rustfmt from toolchain 'stable'. \
|
|
|
|
Please run `rustup component add rustfmt --toolchain stable` to install it.",
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2021-03-08 12:39:09 -06:00
|
|
|
let res = cmd!("cargo fmt -- --check").run();
|
2021-06-03 05:46:56 -05:00
|
|
|
if res.is_err() {
|
2021-03-08 12:39:09 -06:00
|
|
|
let _ = cmd!("cargo fmt").run();
|
|
|
|
}
|
|
|
|
res.unwrap()
|
2020-05-06 03:25:25 -05:00
|
|
|
}
|
2020-03-17 04:26:29 -05:00
|
|
|
|
2020-10-14 06:30:06 -05:00
|
|
|
#[test]
|
|
|
|
fn check_lsp_extensions_docs() {
|
|
|
|
let expected_hash = {
|
|
|
|
let lsp_ext_rs =
|
2020-10-16 12:46:03 -05:00
|
|
|
read_file(project_root().join("crates/rust-analyzer/src/lsp_ext.rs")).unwrap();
|
2020-10-14 06:30:06 -05:00
|
|
|
stable_hash(lsp_ext_rs.as_str())
|
|
|
|
};
|
|
|
|
|
|
|
|
let actual_hash = {
|
|
|
|
let lsp_extensions_md =
|
2020-10-16 12:46:03 -05:00
|
|
|
read_file(project_root().join("docs/dev/lsp-extensions.md")).unwrap();
|
2020-10-14 06:30:06 -05:00
|
|
|
let text = lsp_extensions_md
|
|
|
|
.lines()
|
|
|
|
.find_map(|line| line.strip_prefix("lsp_ext.rs hash:"))
|
|
|
|
.unwrap()
|
|
|
|
.trim();
|
|
|
|
u64::from_str_radix(text, 16).unwrap()
|
|
|
|
};
|
|
|
|
|
|
|
|
if actual_hash != expected_hash {
|
|
|
|
panic!(
|
|
|
|
"
|
|
|
|
lsp_ext.rs was changed without touching lsp-extensions.md.
|
|
|
|
|
|
|
|
Expected hash: {:x}
|
|
|
|
Actual hash: {:x}
|
|
|
|
|
|
|
|
Please adjust docs/dev/lsp-extensions.md.
|
|
|
|
",
|
|
|
|
expected_hash, actual_hash
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-17 04:26:29 -05:00
|
|
|
#[test]
|
|
|
|
fn rust_files_are_tidy() {
|
|
|
|
let mut tidy_docs = TidyDocs::default();
|
2021-06-13 04:33:08 -05:00
|
|
|
let mut tidy_marks = TidyMarks::default();
|
2021-01-21 07:37:08 -06:00
|
|
|
for path in rust_files() {
|
2020-10-16 12:46:03 -05:00
|
|
|
let text = read_file(&path).unwrap();
|
2020-03-17 04:26:29 -05:00
|
|
|
check_todo(&path, &text);
|
2020-12-27 21:27:54 -06:00
|
|
|
check_dbg(&path, &text);
|
2021-06-15 08:54:43 -05:00
|
|
|
check_test_attrs(&path, &text);
|
2020-03-17 04:46:46 -05:00
|
|
|
check_trailing_ws(&path, &text);
|
2020-08-12 05:45:38 -05:00
|
|
|
deny_clippy(&path, &text);
|
2020-03-17 04:26:29 -05:00
|
|
|
tidy_docs.visit(&path, &text);
|
2021-06-13 04:33:08 -05:00
|
|
|
tidy_marks.visit(&path, &text);
|
2020-03-17 04:26:29 -05:00
|
|
|
}
|
|
|
|
tidy_docs.finish();
|
2021-06-13 04:33:08 -05:00
|
|
|
tidy_marks.finish();
|
2020-03-17 04:26:29 -05:00
|
|
|
}
|
|
|
|
|
2021-02-03 08:01:09 -06:00
|
|
|
#[test]
|
|
|
|
fn cargo_files_are_tidy() {
|
|
|
|
for cargo in cargo_files() {
|
|
|
|
let mut section = None;
|
|
|
|
for (line_no, text) in read_file(&cargo).unwrap().lines().enumerate() {
|
|
|
|
let text = text.trim();
|
2021-03-01 13:13:16 -06:00
|
|
|
if text.starts_with('[') {
|
|
|
|
if !text.ends_with(']') {
|
|
|
|
panic!(
|
|
|
|
"\nplease don't add comments or trailing whitespace in section lines.\n\
|
|
|
|
{}:{}\n",
|
|
|
|
cargo.display(),
|
|
|
|
line_no + 1
|
|
|
|
)
|
|
|
|
}
|
2021-02-03 08:01:09 -06:00
|
|
|
section = Some(text);
|
|
|
|
continue;
|
|
|
|
}
|
2021-03-01 13:13:16 -06:00
|
|
|
let text: String = text.split_whitespace().collect();
|
|
|
|
if !text.contains("path=") {
|
2021-02-03 08:01:09 -06:00
|
|
|
continue;
|
|
|
|
}
|
2021-03-01 13:13:16 -06:00
|
|
|
match section {
|
|
|
|
Some(s) if s.contains("dev-dependencies") => {
|
|
|
|
if text.contains("version") {
|
|
|
|
panic!(
|
|
|
|
"\ncargo internal dev-dependencies should not have a version.\n\
|
|
|
|
{}:{}\n",
|
|
|
|
cargo.display(),
|
|
|
|
line_no + 1
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Some(s) if s.contains("dependencies") => {
|
|
|
|
if !text.contains("version") {
|
|
|
|
panic!(
|
|
|
|
"\ncargo internal dependencies should have a version.\n\
|
|
|
|
{}:{}\n",
|
|
|
|
cargo.display(),
|
|
|
|
line_no + 1
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => {}
|
2021-02-03 08:01:09 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-08-24 16:17:00 -05:00
|
|
|
#[test]
|
|
|
|
fn check_merge_commits() {
|
2020-12-27 11:21:41 -06:00
|
|
|
let stdout = cmd!("git rev-list --merges --invert-grep --author 'bors\\[bot\\]' HEAD~19..")
|
|
|
|
.read()
|
|
|
|
.unwrap();
|
2020-09-01 02:38:17 -05:00
|
|
|
if !stdout.is_empty() {
|
|
|
|
panic!(
|
|
|
|
"
|
|
|
|
Merge commits are not allowed in the history.
|
|
|
|
|
|
|
|
When updating a pull-request, please rebase your feature branch
|
|
|
|
on top of master by running `git rebase master`. If rebase fails,
|
|
|
|
you can re-apply your changes like this:
|
|
|
|
|
2021-01-07 03:00:07 -06:00
|
|
|
# Just look around to see the current state.
|
|
|
|
$ git status
|
|
|
|
$ git log
|
|
|
|
|
|
|
|
# Abort in-progress rebase and merges, if any.
|
2020-09-01 02:38:17 -05:00
|
|
|
$ git rebase --abort
|
2021-01-07 03:00:07 -06:00
|
|
|
$ git merge --abort
|
2020-09-01 02:38:17 -05:00
|
|
|
|
|
|
|
# Make the branch point to the latest commit from master,
|
|
|
|
# while maintaining your local changes uncommited.
|
|
|
|
$ git reset --soft origin/master
|
|
|
|
|
|
|
|
# Commit all changes in a single batch.
|
|
|
|
$ git commit -am'My changes'
|
|
|
|
|
2021-01-07 03:00:07 -06:00
|
|
|
# Verify that everything looks alright.
|
|
|
|
$ git status
|
|
|
|
$ git log
|
|
|
|
|
2020-09-01 02:38:17 -05:00
|
|
|
# Push the changes. We did a rebase, so we need `--force` option.
|
|
|
|
# `--force-with-lease` is a more safe (Rusty) version of `--force`.
|
|
|
|
$ git push --force-with-lease
|
|
|
|
|
2021-01-07 03:00:07 -06:00
|
|
|
# Verify that both local and remote branch point to the same commit.
|
|
|
|
$ git log
|
|
|
|
|
2020-09-01 02:38:17 -05:00
|
|
|
And don't fear to mess something up during a rebase -- you can
|
|
|
|
always restore the previous state using `git ref-log`:
|
|
|
|
|
|
|
|
https://github.blog/2015-06-08-how-to-undo-almost-anything-with-git/#redo-after-undo-local
|
|
|
|
"
|
|
|
|
);
|
2020-08-24 16:17:00 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-03-21 09:33:18 -05:00
|
|
|
fn deny_clippy(path: &Path, text: &str) {
|
2020-10-20 14:29:31 -05:00
|
|
|
let ignore = &[
|
|
|
|
// The documentation in string literals may contain anything for its own purposes
|
2021-06-04 12:18:45 -05:00
|
|
|
"ide_db/src/helpers/generated_lints.rs",
|
|
|
|
// The tests test clippy lint hovers
|
|
|
|
"ide/src/hover.rs",
|
2020-10-20 14:29:31 -05:00
|
|
|
];
|
|
|
|
if ignore.iter().any(|p| path.ends_with(p)) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2020-12-31 16:58:04 -06:00
|
|
|
if text.contains("\u{61}llow(clippy") {
|
2020-08-12 05:45:38 -05:00
|
|
|
panic!(
|
|
|
|
"\n\nallowing lints is forbidden: {}.
|
|
|
|
rust-analyzer intentionally doesn't check clippy on CI.
|
|
|
|
You can allow lint globally via `xtask clippy`.
|
2020-08-12 06:03:43 -05:00
|
|
|
See https://github.com/rust-lang/rust-clippy/issues/5537 for discussion.
|
2020-08-12 05:45:38 -05:00
|
|
|
|
|
|
|
",
|
|
|
|
path.display()
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-07-14 10:14:00 -05:00
|
|
|
#[test]
|
|
|
|
fn check_licenses() {
|
|
|
|
let expected = "
|
|
|
|
0BSD OR MIT OR Apache-2.0
|
2020-07-31 19:55:04 -05:00
|
|
|
Apache-2.0
|
2020-07-14 10:14:00 -05:00
|
|
|
Apache-2.0 OR BSL-1.0
|
|
|
|
Apache-2.0 OR MIT
|
|
|
|
Apache-2.0/MIT
|
|
|
|
BSD-3-Clause
|
2021-04-15 11:15:23 -05:00
|
|
|
CC0-1.0 OR Artistic-2.0
|
2020-07-14 10:14:00 -05:00
|
|
|
ISC
|
|
|
|
MIT
|
|
|
|
MIT / Apache-2.0
|
|
|
|
MIT OR Apache-2.0
|
2020-11-17 08:39:25 -06:00
|
|
|
MIT OR Apache-2.0 OR Zlib
|
2020-10-08 08:43:00 -05:00
|
|
|
MIT OR Zlib OR Apache-2.0
|
2020-07-14 10:14:00 -05:00
|
|
|
MIT/Apache-2.0
|
|
|
|
Unlicense OR MIT
|
|
|
|
Unlicense/MIT
|
2020-08-18 03:49:18 -05:00
|
|
|
Zlib OR Apache-2.0 OR MIT
|
2020-07-14 10:14:00 -05:00
|
|
|
"
|
|
|
|
.lines()
|
|
|
|
.filter(|it| !it.is_empty())
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
|
2020-10-16 12:46:03 -05:00
|
|
|
let meta = cmd!("cargo metadata --format-version 1").read().unwrap();
|
2020-07-14 10:14:00 -05:00
|
|
|
let mut licenses = meta
|
|
|
|
.split(|c| c == ',' || c == '{' || c == '}')
|
|
|
|
.filter(|it| it.contains(r#""license""#))
|
|
|
|
.map(|it| it.trim())
|
|
|
|
.map(|it| it[r#""license":"#.len()..].trim_matches('"'))
|
|
|
|
.collect::<Vec<_>>();
|
2021-06-03 05:46:56 -05:00
|
|
|
licenses.sort_unstable();
|
2020-07-14 10:14:00 -05:00
|
|
|
licenses.dedup();
|
2020-10-16 12:46:03 -05:00
|
|
|
if licenses != expected {
|
|
|
|
let mut diff = String::new();
|
|
|
|
|
2021-06-03 06:01:16 -05:00
|
|
|
diff.push_str("New Licenses:\n");
|
2020-10-16 12:46:03 -05:00
|
|
|
for &l in licenses.iter() {
|
|
|
|
if !expected.contains(&l) {
|
|
|
|
diff += &format!(" {}\n", l)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-03 06:01:16 -05:00
|
|
|
diff.push_str("\nMissing Licenses:\n");
|
2020-10-16 12:46:03 -05:00
|
|
|
for &l in expected.iter() {
|
|
|
|
if !licenses.contains(&l) {
|
|
|
|
diff += &format!(" {}\n", l)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
panic!("different set of licenses!\n{}", diff);
|
|
|
|
}
|
2020-07-31 18:52:24 -05:00
|
|
|
assert_eq!(licenses, expected);
|
2020-07-14 10:14:00 -05:00
|
|
|
}
|
|
|
|
|
2020-03-17 04:26:29 -05:00
|
|
|
fn check_todo(path: &Path, text: &str) {
|
2020-07-08 15:47:50 -05:00
|
|
|
let need_todo = &[
|
|
|
|
// This file itself obviously needs to use todo (<- like this!).
|
2020-12-27 21:27:54 -06:00
|
|
|
"tests/tidy.rs",
|
2020-07-08 15:47:50 -05:00
|
|
|
// Some of our assists generate `todo!()`.
|
2020-05-19 17:07:00 -05:00
|
|
|
"handlers/add_turbo_fish.rs",
|
2020-07-03 11:15:03 -05:00
|
|
|
"handlers/generate_function.rs",
|
2021-05-24 13:53:58 -05:00
|
|
|
"handlers/fill_match_arms.rs",
|
2020-07-08 15:47:50 -05:00
|
|
|
// To support generating `todo!()` in assists, we have `expr_todo()` in
|
|
|
|
// `ast::make`.
|
2020-04-10 15:41:11 -05:00
|
|
|
"ast/make.rs",
|
2020-08-09 12:35:42 -05:00
|
|
|
// The documentation in string literals may contain anything for its own purposes
|
2021-06-04 12:18:45 -05:00
|
|
|
"ide_db/src/helpers/generated_lints.rs",
|
2020-04-10 15:41:11 -05:00
|
|
|
];
|
2020-07-08 15:47:50 -05:00
|
|
|
if need_todo.iter().any(|p| path.ends_with(p)) {
|
2020-03-17 04:26:29 -05:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
if text.contains("TODO") || text.contains("TOOD") || text.contains("todo!") {
|
2020-11-09 06:07:18 -06:00
|
|
|
// Generated by an assist
|
|
|
|
if text.contains("${0:todo!()}") {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
2020-03-17 04:26:29 -05:00
|
|
|
panic!(
|
2020-04-17 03:32:12 -05:00
|
|
|
"\nTODO markers or todo! macros should not be committed to the master branch,\n\
|
2020-03-17 04:26:29 -05:00
|
|
|
use FIXME instead\n\
|
|
|
|
{}\n",
|
|
|
|
path.display(),
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-12-27 21:27:54 -06:00
|
|
|
fn check_dbg(path: &Path, text: &str) {
|
|
|
|
let need_dbg = &[
|
|
|
|
// This file itself obviously needs to use dbg.
|
|
|
|
"tests/tidy.rs",
|
|
|
|
// Assists to remove `dbg!()`
|
|
|
|
"handlers/remove_dbg.rs",
|
|
|
|
// We have .dbg postfix
|
2021-02-17 08:53:31 -06:00
|
|
|
"ide_completion/src/completions/postfix.rs",
|
2020-12-27 21:27:54 -06:00
|
|
|
// The documentation in string literals may contain anything for its own purposes
|
2021-02-17 08:53:31 -06:00
|
|
|
"ide_completion/src/lib.rs",
|
2021-06-04 12:18:45 -05:00
|
|
|
"ide_db/src/helpers/generated_lints.rs",
|
2020-12-27 21:27:54 -06:00
|
|
|
// test for doc test for remove_dbg
|
|
|
|
"src/tests/generated.rs",
|
|
|
|
];
|
|
|
|
if need_dbg.iter().any(|p| path.ends_with(p)) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
if text.contains("dbg!") {
|
|
|
|
panic!(
|
|
|
|
"\ndbg! macros should not be committed to the master branch,\n\
|
|
|
|
{}\n",
|
|
|
|
path.display(),
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-15 08:54:43 -05:00
|
|
|
fn check_test_attrs(path: &Path, text: &str) {
|
|
|
|
let ignore_rule =
|
|
|
|
"https://github.com/rust-analyzer/rust-analyzer/blob/master/docs/dev/style.md#ignore";
|
|
|
|
let need_ignore: &[&str] = &[
|
|
|
|
// Special case to run `#[ignore]` tests
|
|
|
|
"ide/src/runnables.rs",
|
|
|
|
// A legit test which needs to be ignored, as it takes too long to run
|
|
|
|
// :(
|
|
|
|
"hir_def/src/nameres/collector.rs",
|
2021-07-03 14:11:03 -05:00
|
|
|
// Long sourcegen test to generate lint completions.
|
|
|
|
"ide_completion/src/tests/sourcegen.rs",
|
2021-06-15 08:54:43 -05:00
|
|
|
// Obviously needs ignore.
|
|
|
|
"ide_assists/src/handlers/toggle_ignore.rs",
|
|
|
|
// See above.
|
|
|
|
"ide_assists/src/tests/generated.rs",
|
|
|
|
];
|
|
|
|
if text.contains("#[ignore") && !need_ignore.iter().any(|p| path.ends_with(p)) {
|
|
|
|
panic!("\ndon't `#[ignore]` tests, see:\n\n {}\n\n {}\n", ignore_rule, path.display(),)
|
|
|
|
}
|
|
|
|
|
|
|
|
let panic_rule =
|
2021-06-15 09:20:11 -05:00
|
|
|
"https://github.com/rust-analyzer/rust-analyzer/blob/master/docs/dev/style.md#should_panic";
|
2021-06-15 08:54:43 -05:00
|
|
|
let need_panic: &[&str] = &["test_utils/src/fixture.rs"];
|
|
|
|
if text.contains("#[should_panic") && !need_panic.iter().any(|p| path.ends_with(p)) {
|
|
|
|
panic!(
|
|
|
|
"\ndon't add `#[should_panic]` tests, see:\n\n {}\n\n {}\n",
|
|
|
|
panic_rule,
|
|
|
|
path.display(),
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-17 04:46:46 -05:00
|
|
|
fn check_trailing_ws(path: &Path, text: &str) {
|
|
|
|
if is_exclude_dir(path, &["test_data"]) {
|
|
|
|
return;
|
|
|
|
}
|
2020-04-17 03:32:12 -05:00
|
|
|
for (line_number, line) in text.lines().enumerate() {
|
2020-03-17 04:46:46 -05:00
|
|
|
if line.chars().last().map(char::is_whitespace) == Some(true) {
|
2021-06-18 06:47:26 -05:00
|
|
|
panic!("Trailing whitespace in {} at line {}", path.display(), line_number + 1)
|
2020-03-17 04:46:46 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-17 04:26:29 -05:00
|
|
|
#[derive(Default)]
|
|
|
|
struct TidyDocs {
|
|
|
|
missing_docs: Vec<String>,
|
|
|
|
contains_fixme: Vec<PathBuf>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl TidyDocs {
|
|
|
|
fn visit(&mut self, path: &Path, text: &str) {
|
2021-05-17 04:04:17 -05:00
|
|
|
// Tests and diagnostic fixes don't need module level comments.
|
2021-05-22 09:20:22 -05:00
|
|
|
if is_exclude_dir(path, &["tests", "test_data", "fixes", "grammar"]) {
|
2020-03-17 04:46:46 -05:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if is_exclude_file(path) {
|
2020-03-17 04:26:29 -05:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
let first_line = match text.lines().next() {
|
|
|
|
Some(it) => it,
|
|
|
|
None => return,
|
|
|
|
};
|
|
|
|
|
|
|
|
if first_line.starts_with("//!") {
|
|
|
|
if first_line.contains("FIXME") {
|
2020-05-31 02:59:38 -05:00
|
|
|
self.contains_fixme.push(path.to_path_buf());
|
2020-03-17 04:26:29 -05:00
|
|
|
}
|
|
|
|
} else {
|
2021-06-13 06:41:19 -05:00
|
|
|
if text.contains("// Feature:")
|
|
|
|
|| text.contains("// Assist:")
|
|
|
|
|| text.contains("// Diagnostic:")
|
|
|
|
{
|
2020-05-31 02:59:38 -05:00
|
|
|
return;
|
|
|
|
}
|
2020-03-17 04:26:29 -05:00
|
|
|
self.missing_docs.push(path.display().to_string());
|
|
|
|
}
|
|
|
|
|
|
|
|
fn is_exclude_file(d: &Path) -> bool {
|
2021-01-12 13:19:13 -06:00
|
|
|
let file_names = ["tests.rs", "famous_defs_fixture.rs"];
|
2020-03-17 04:26:29 -05:00
|
|
|
|
|
|
|
d.file_name()
|
|
|
|
.unwrap_or_default()
|
|
|
|
.to_str()
|
|
|
|
.map(|f_n| file_names.iter().any(|name| *name == f_n))
|
|
|
|
.unwrap_or(false)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn finish(self) {
|
|
|
|
if !self.missing_docs.is_empty() {
|
|
|
|
panic!(
|
|
|
|
"\nMissing docs strings\n\n\
|
|
|
|
modules:\n{}\n\n",
|
|
|
|
self.missing_docs.join("\n")
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2021-05-22 09:39:19 -05:00
|
|
|
for path in self.contains_fixme {
|
2020-03-17 04:26:29 -05:00
|
|
|
panic!("FIXME doc in a fully-documented crate: {}", path.display())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-17 04:46:46 -05:00
|
|
|
fn is_exclude_dir(p: &Path, dirs_to_exclude: &[&str]) -> bool {
|
2020-06-02 15:15:23 -05:00
|
|
|
p.strip_prefix(project_root())
|
|
|
|
.unwrap()
|
|
|
|
.components()
|
|
|
|
.rev()
|
|
|
|
.skip(1)
|
|
|
|
.filter_map(|it| it.as_os_str().to_str())
|
|
|
|
.any(|it| dirs_to_exclude.contains(&it))
|
2020-03-17 04:46:46 -05:00
|
|
|
}
|
2020-10-14 06:30:06 -05:00
|
|
|
|
2021-06-13 04:33:08 -05:00
|
|
|
#[derive(Default)]
|
|
|
|
struct TidyMarks {
|
|
|
|
hits: HashSet<String>,
|
|
|
|
checks: HashSet<String>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl TidyMarks {
|
|
|
|
fn visit(&mut self, _path: &Path, text: &str) {
|
|
|
|
for line in text.lines() {
|
|
|
|
if let Some(mark) = find_mark(line, "hit") {
|
|
|
|
self.hits.insert(mark.to_string());
|
|
|
|
}
|
|
|
|
if let Some(mark) = find_mark(line, "check") {
|
|
|
|
self.checks.insert(mark.to_string());
|
|
|
|
}
|
|
|
|
if let Some(mark) = find_mark(line, "check_count") {
|
|
|
|
self.checks.insert(mark.to_string());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn finish(self) {
|
|
|
|
assert!(!self.hits.is_empty());
|
|
|
|
|
|
|
|
let diff: Vec<_> =
|
|
|
|
self.hits.symmetric_difference(&self.checks).map(|it| it.as_str()).collect();
|
|
|
|
|
|
|
|
if !diff.is_empty() {
|
|
|
|
panic!("unpaired marks: {:?}", diff)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-10-14 06:30:06 -05:00
|
|
|
#[allow(deprecated)]
|
|
|
|
fn stable_hash(text: &str) -> u64 {
|
|
|
|
use std::hash::{Hash, Hasher, SipHasher};
|
|
|
|
|
|
|
|
let text = text.replace('\r', "");
|
|
|
|
let mut hasher = SipHasher::default();
|
|
|
|
text.hash(&mut hasher);
|
|
|
|
hasher.finish()
|
|
|
|
}
|
2021-06-13 04:33:08 -05:00
|
|
|
|
|
|
|
fn find_mark<'a>(text: &'a str, mark: &'static str) -> Option<&'a str> {
|
|
|
|
let idx = text.find(mark)?;
|
|
|
|
let text = text[idx + mark.len()..].strip_prefix("!(")?;
|
|
|
|
let idx = text.find(|c: char| !(c.is_alphanumeric() || c == '_'))?;
|
|
|
|
let text = &text[..idx];
|
|
|
|
Some(text)
|
|
|
|
}
|
2021-07-03 14:11:03 -05:00
|
|
|
|
|
|
|
fn rust_files() -> impl Iterator<Item = PathBuf> {
|
|
|
|
rust_files_in(&project_root().join("crates"))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn cargo_files() -> impl Iterator<Item = PathBuf> {
|
|
|
|
files_in(&project_root(), "toml")
|
|
|
|
.filter(|path| path.file_name().map(|it| it == "Cargo.toml").unwrap_or(false))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn rust_files_in(path: &Path) -> impl Iterator<Item = PathBuf> {
|
|
|
|
files_in(path, "rs")
|
|
|
|
}
|
|
|
|
|
|
|
|
fn files_in(path: &Path, ext: &'static str) -> impl Iterator<Item = PathBuf> {
|
|
|
|
let iter = WalkDir::new(path);
|
|
|
|
return iter
|
|
|
|
.into_iter()
|
|
|
|
.filter_entry(|e| !is_hidden(e))
|
|
|
|
.map(|e| e.unwrap())
|
|
|
|
.filter(|e| !e.file_type().is_dir())
|
|
|
|
.map(|e| e.into_path())
|
|
|
|
.filter(move |path| path.extension().map(|it| it == ext).unwrap_or(false));
|
|
|
|
|
|
|
|
fn is_hidden(entry: &DirEntry) -> bool {
|
|
|
|
entry.file_name().to_str().map(|s| s.starts_with('.')).unwrap_or(false)
|
|
|
|
}
|
|
|
|
}
|