rust/xtask/src/tidy.rs

420 lines
12 KiB
Rust
Raw Normal View History

2021-05-22 09:39:19 -05:00
use std::path::{Path, PathBuf};
2020-03-17 04:26:29 -05:00
2021-03-08 12:39:09 -06:00
use xshell::{cmd, pushd, pushenv, read_file};
2021-03-08 12:39:09 -06:00
use crate::{cargo_files, codegen, project_root, rust_files};
2020-05-06 03:25:25 -05:00
#[test]
fn generate_grammar() {
2021-03-08 12:39:09 -06:00
codegen::generate_syntax().unwrap()
2020-05-06 03:25:25 -05:00
}
#[test]
fn generate_parser_tests() {
2021-03-08 12:39:09 -06:00
codegen::generate_parser_tests().unwrap()
2020-05-06 03:25:25 -05:00
}
#[test]
fn generate_assists_tests() {
2021-03-08 12:39:09 -06:00
codegen::generate_assists_tests().unwrap();
2020-05-06 03:25:25 -05:00
}
/// This clones rustc repo, and so is not worth to keep up-to-date. We update
/// manually by un-ignoring the test from time to time.
#[test]
#[ignore]
fn generate_lint_completions() {
2021-03-08 12:39:09 -06:00
codegen::generate_lint_completions().unwrap()
}
2020-05-06 03:25:25 -05:00
#[test]
fn check_code_formatting() {
2021-03-08 12:39:09 -06:00
let _dir = pushd(project_root()).unwrap();
let _e = pushenv("RUSTUP_TOOLCHAIN", "stable");
crate::ensure_rustfmt().unwrap();
let res = cmd!("cargo fmt -- --check").run();
if res.is_err() {
2021-03-08 12:39:09 -06:00
let _ = cmd!("cargo fmt").run();
}
res.unwrap()
2020-05-06 03:25:25 -05:00
}
2020-03-17 04:26:29 -05:00
2020-10-06 08:17:16 -05:00
#[test]
fn smoke_test_generate_documentation() {
codegen::docs().unwrap()
2020-10-06 08:17:16 -05:00
}
#[test]
fn check_lsp_extensions_docs() {
let expected_hash = {
let lsp_ext_rs =
2020-10-16 12:46:03 -05:00
read_file(project_root().join("crates/rust-analyzer/src/lsp_ext.rs")).unwrap();
stable_hash(lsp_ext_rs.as_str())
};
let actual_hash = {
let lsp_extensions_md =
2020-10-16 12:46:03 -05:00
read_file(project_root().join("docs/dev/lsp-extensions.md")).unwrap();
let text = lsp_extensions_md
.lines()
.find_map(|line| line.strip_prefix("lsp_ext.rs hash:"))
.unwrap()
.trim();
u64::from_str_radix(text, 16).unwrap()
};
if actual_hash != expected_hash {
panic!(
"
lsp_ext.rs was changed without touching lsp-extensions.md.
Expected hash: {:x}
Actual hash: {:x}
Please adjust docs/dev/lsp-extensions.md.
",
expected_hash, actual_hash
)
}
}
2020-03-17 04:26:29 -05:00
#[test]
fn rust_files_are_tidy() {
let mut tidy_docs = TidyDocs::default();
for path in rust_files() {
2020-10-16 12:46:03 -05:00
let text = read_file(&path).unwrap();
2020-03-17 04:26:29 -05:00
check_todo(&path, &text);
2020-12-27 21:27:54 -06:00
check_dbg(&path, &text);
check_trailing_ws(&path, &text);
2020-08-12 05:45:38 -05:00
deny_clippy(&path, &text);
2020-03-17 04:26:29 -05:00
tidy_docs.visit(&path, &text);
}
tidy_docs.finish();
}
2021-02-03 08:01:09 -06:00
#[test]
fn cargo_files_are_tidy() {
for cargo in cargo_files() {
let mut section = None;
for (line_no, text) in read_file(&cargo).unwrap().lines().enumerate() {
let text = text.trim();
if text.starts_with('[') {
if !text.ends_with(']') {
panic!(
"\nplease don't add comments or trailing whitespace in section lines.\n\
{}:{}\n",
cargo.display(),
line_no + 1
)
}
2021-02-03 08:01:09 -06:00
section = Some(text);
continue;
}
let text: String = text.split_whitespace().collect();
if !text.contains("path=") {
2021-02-03 08:01:09 -06:00
continue;
}
match section {
Some(s) if s.contains("dev-dependencies") => {
if text.contains("version") {
panic!(
"\ncargo internal dev-dependencies should not have a version.\n\
{}:{}\n",
cargo.display(),
line_no + 1
);
}
}
Some(s) if s.contains("dependencies") => {
if !text.contains("version") {
panic!(
"\ncargo internal dependencies should have a version.\n\
{}:{}\n",
cargo.display(),
line_no + 1
);
}
}
_ => {}
2021-02-03 08:01:09 -06:00
}
}
}
}
2020-08-24 16:17:00 -05:00
#[test]
fn check_merge_commits() {
2020-12-27 11:21:41 -06:00
let stdout = cmd!("git rev-list --merges --invert-grep --author 'bors\\[bot\\]' HEAD~19..")
.read()
.unwrap();
2020-09-01 02:38:17 -05:00
if !stdout.is_empty() {
panic!(
"
Merge commits are not allowed in the history.
When updating a pull-request, please rebase your feature branch
on top of master by running `git rebase master`. If rebase fails,
you can re-apply your changes like this:
2021-01-07 03:00:07 -06:00
# Just look around to see the current state.
$ git status
$ git log
# Abort in-progress rebase and merges, if any.
2020-09-01 02:38:17 -05:00
$ git rebase --abort
2021-01-07 03:00:07 -06:00
$ git merge --abort
2020-09-01 02:38:17 -05:00
# Make the branch point to the latest commit from master,
# while maintaining your local changes uncommited.
$ git reset --soft origin/master
# Commit all changes in a single batch.
$ git commit -am'My changes'
2021-01-07 03:00:07 -06:00
# Verify that everything looks alright.
$ git status
$ git log
2020-09-01 02:38:17 -05:00
# Push the changes. We did a rebase, so we need `--force` option.
# `--force-with-lease` is a more safe (Rusty) version of `--force`.
$ git push --force-with-lease
2021-01-07 03:00:07 -06:00
# Verify that both local and remote branch point to the same commit.
$ git log
2020-09-01 02:38:17 -05:00
And don't fear to mess something up during a rebase -- you can
always restore the previous state using `git ref-log`:
https://github.blog/2015-06-08-how-to-undo-almost-anything-with-git/#redo-after-undo-local
"
);
2020-08-24 16:17:00 -05:00
}
}
2021-03-21 09:33:18 -05:00
fn deny_clippy(path: &Path, text: &str) {
let ignore = &[
// The documentation in string literals may contain anything for its own purposes
"ide_db/src/helpers/generated_lints.rs",
// The tests test clippy lint hovers
"ide/src/hover.rs",
];
if ignore.iter().any(|p| path.ends_with(p)) {
return;
}
2020-12-31 16:58:04 -06:00
if text.contains("\u{61}llow(clippy") {
2020-08-12 05:45:38 -05:00
panic!(
"\n\nallowing lints is forbidden: {}.
rust-analyzer intentionally doesn't check clippy on CI.
You can allow lint globally via `xtask clippy`.
2020-08-12 06:03:43 -05:00
See https://github.com/rust-lang/rust-clippy/issues/5537 for discussion.
2020-08-12 05:45:38 -05:00
",
path.display()
)
}
}
2020-07-14 10:14:00 -05:00
#[test]
fn check_licenses() {
let expected = "
0BSD OR MIT OR Apache-2.0
2020-07-31 19:55:04 -05:00
Apache-2.0
2020-07-14 10:14:00 -05:00
Apache-2.0 OR BSL-1.0
Apache-2.0 OR MIT
Apache-2.0/MIT
BSD-3-Clause
2021-04-15 11:15:23 -05:00
CC0-1.0 OR Artistic-2.0
2020-07-14 10:14:00 -05:00
ISC
MIT
MIT / Apache-2.0
MIT OR Apache-2.0
2020-11-17 08:39:25 -06:00
MIT OR Apache-2.0 OR Zlib
MIT OR Zlib OR Apache-2.0
2020-07-14 10:14:00 -05:00
MIT/Apache-2.0
Unlicense OR MIT
Unlicense/MIT
2020-08-18 03:49:18 -05:00
Zlib OR Apache-2.0 OR MIT
2020-07-14 10:14:00 -05:00
"
.lines()
.filter(|it| !it.is_empty())
.collect::<Vec<_>>();
2020-10-16 12:46:03 -05:00
let meta = cmd!("cargo metadata --format-version 1").read().unwrap();
2020-07-14 10:14:00 -05:00
let mut licenses = meta
.split(|c| c == ',' || c == '{' || c == '}')
.filter(|it| it.contains(r#""license""#))
.map(|it| it.trim())
.map(|it| it[r#""license":"#.len()..].trim_matches('"'))
.collect::<Vec<_>>();
licenses.sort_unstable();
2020-07-14 10:14:00 -05:00
licenses.dedup();
2020-10-16 12:46:03 -05:00
if licenses != expected {
let mut diff = String::new();
2021-06-03 06:01:16 -05:00
diff.push_str("New Licenses:\n");
2020-10-16 12:46:03 -05:00
for &l in licenses.iter() {
if !expected.contains(&l) {
diff += &format!(" {}\n", l)
}
}
2021-06-03 06:01:16 -05:00
diff.push_str("\nMissing Licenses:\n");
2020-10-16 12:46:03 -05:00
for &l in expected.iter() {
if !licenses.contains(&l) {
diff += &format!(" {}\n", l)
}
}
panic!("different set of licenses!\n{}", diff);
}
2020-07-31 18:52:24 -05:00
assert_eq!(licenses, expected);
2020-07-14 10:14:00 -05:00
}
2020-03-17 04:26:29 -05:00
fn check_todo(path: &Path, text: &str) {
2020-07-08 15:47:50 -05:00
let need_todo = &[
// This file itself obviously needs to use todo (<- like this!).
2020-12-27 21:27:54 -06:00
"tests/tidy.rs",
2020-07-08 15:47:50 -05:00
// Some of our assists generate `todo!()`.
2020-05-19 17:07:00 -05:00
"handlers/add_turbo_fish.rs",
2020-07-03 11:15:03 -05:00
"handlers/generate_function.rs",
2021-05-24 13:53:58 -05:00
"handlers/fill_match_arms.rs",
2020-07-08 15:47:50 -05:00
// To support generating `todo!()` in assists, we have `expr_todo()` in
// `ast::make`.
"ast/make.rs",
2020-08-09 12:35:42 -05:00
// The documentation in string literals may contain anything for its own purposes
"ide_db/src/helpers/generated_lints.rs",
];
2020-07-08 15:47:50 -05:00
if need_todo.iter().any(|p| path.ends_with(p)) {
2020-03-17 04:26:29 -05:00
return;
}
if text.contains("TODO") || text.contains("TOOD") || text.contains("todo!") {
2020-11-09 06:07:18 -06:00
// Generated by an assist
if text.contains("${0:todo!()}") {
return;
}
2020-03-17 04:26:29 -05:00
panic!(
"\nTODO markers or todo! macros should not be committed to the master branch,\n\
2020-03-17 04:26:29 -05:00
use FIXME instead\n\
{}\n",
path.display(),
)
}
}
2020-12-27 21:27:54 -06:00
fn check_dbg(path: &Path, text: &str) {
let need_dbg = &[
// This file itself obviously needs to use dbg.
"tests/tidy.rs",
// Assists to remove `dbg!()`
"handlers/remove_dbg.rs",
// We have .dbg postfix
"ide_completion/src/completions/postfix.rs",
2020-12-27 21:27:54 -06:00
// The documentation in string literals may contain anything for its own purposes
"ide_completion/src/lib.rs",
"ide_db/src/helpers/generated_lints.rs",
2020-12-27 21:27:54 -06:00
// test for doc test for remove_dbg
"src/tests/generated.rs",
];
if need_dbg.iter().any(|p| path.ends_with(p)) {
return;
}
if text.contains("dbg!") {
panic!(
"\ndbg! macros should not be committed to the master branch,\n\
{}\n",
path.display(),
)
}
}
fn check_trailing_ws(path: &Path, text: &str) {
if is_exclude_dir(path, &["test_data"]) {
return;
}
for (line_number, line) in text.lines().enumerate() {
if line.chars().last().map(char::is_whitespace) == Some(true) {
panic!("Trailing whitespace in {} at line {}", path.display(), line_number)
}
}
}
2020-03-17 04:26:29 -05:00
#[derive(Default)]
struct TidyDocs {
missing_docs: Vec<String>,
contains_fixme: Vec<PathBuf>,
}
impl TidyDocs {
fn visit(&mut self, path: &Path, text: &str) {
// Tests and diagnostic fixes don't need module level comments.
2021-05-22 09:20:22 -05:00
if is_exclude_dir(path, &["tests", "test_data", "fixes", "grammar"]) {
return;
}
if is_exclude_file(path) {
2020-03-17 04:26:29 -05:00
return;
}
let first_line = match text.lines().next() {
Some(it) => it,
None => return,
};
if first_line.starts_with("//!") {
if first_line.contains("FIXME") {
self.contains_fixme.push(path.to_path_buf());
2020-03-17 04:26:29 -05:00
}
} else {
if text.contains("// Feature:") || text.contains("// Assist:") {
return;
}
2020-03-17 04:26:29 -05:00
self.missing_docs.push(path.display().to_string());
}
fn is_exclude_file(d: &Path) -> bool {
let file_names = ["tests.rs", "famous_defs_fixture.rs"];
2020-03-17 04:26:29 -05:00
d.file_name()
.unwrap_or_default()
.to_str()
.map(|f_n| file_names.iter().any(|name| *name == f_n))
.unwrap_or(false)
}
}
fn finish(self) {
if !self.missing_docs.is_empty() {
panic!(
"\nMissing docs strings\n\n\
modules:\n{}\n\n",
self.missing_docs.join("\n")
)
}
2021-05-22 09:39:19 -05:00
for path in self.contains_fixme {
2020-03-17 04:26:29 -05:00
panic!("FIXME doc in a fully-documented crate: {}", path.display())
}
}
}
fn is_exclude_dir(p: &Path, dirs_to_exclude: &[&str]) -> bool {
p.strip_prefix(project_root())
.unwrap()
.components()
.rev()
.skip(1)
.filter_map(|it| it.as_os_str().to_str())
.any(|it| dirs_to_exclude.contains(&it))
}
#[allow(deprecated)]
fn stable_hash(text: &str) -> u64 {
use std::hash::{Hash, Hasher, SipHasher};
let text = text.replace('\r', "");
let mut hasher = SipHasher::default();
text.hash(&mut hasher);
hasher.finish()
}