rust/crates/rust-analyzer/tests/slow-tests/tidy.rs

362 lines
10 KiB
Rust
Raw Normal View History

#![allow(clippy::disallowed_types, clippy::print_stderr)]
use std::{
collections::HashSet,
path::{Path, PathBuf},
};
2020-03-17 04:26:29 -05:00
use xshell::Shell;
#[cfg(not(feature = "in-rust-tree"))]
use xshell::cmd;
#[test]
fn check_lsp_extensions_docs() {
2022-03-13 16:20:51 -05:00
let sh = &Shell::new().unwrap();
let expected_hash = {
2022-03-13 16:20:51 -05:00
let lsp_ext_rs = sh
.read_file(sourcegen::project_root().join("crates/rust-analyzer/src/lsp/ext.rs"))
2022-03-13 16:20:51 -05:00
.unwrap();
stable_hash(lsp_ext_rs.as_str())
};
let actual_hash = {
let lsp_extensions_md =
2022-03-13 16:20:51 -05:00
sh.read_file(sourcegen::project_root().join("docs/dev/lsp-extensions.md")).unwrap();
let text = lsp_extensions_md
.lines()
.find_map(|line| line.strip_prefix("lsp/ext.rs hash:"))
.unwrap()
.trim();
u64::from_str_radix(text, 16).unwrap()
};
if actual_hash != expected_hash {
panic!(
"
lsp/ext.rs was changed without touching lsp-extensions.md.
2022-12-30 01:59:11 -06:00
Expected hash: {expected_hash:x}
Actual hash: {actual_hash:x}
Please adjust docs/dev/lsp-extensions.md.
2022-12-30 01:59:11 -06:00
"
)
}
}
2020-03-17 04:26:29 -05:00
#[test]
fn files_are_tidy() {
2022-03-13 16:20:51 -05:00
let sh = &Shell::new().unwrap();
let files = sourcegen::list_files(&sourcegen::project_root().join("crates"));
2020-03-17 04:26:29 -05:00
let mut tidy_docs = TidyDocs::default();
let mut tidy_marks = TidyMarks::default();
for path in files {
let extension = path.extension().unwrap_or_default().to_str().unwrap_or_default();
match extension {
"rs" => {
2022-03-13 16:20:51 -05:00
let text = sh.read_file(&path).unwrap();
check_test_attrs(&path, &text);
check_trailing_ws(&path, &text);
tidy_docs.visit(&path, &text);
tidy_marks.visit(&path, &text);
}
"toml" => {
2022-03-13 16:20:51 -05:00
let text = sh.read_file(&path).unwrap();
check_cargo_toml(&path, text);
}
_ => (),
}
2020-03-17 04:26:29 -05:00
}
2020-03-17 04:26:29 -05:00
tidy_docs.finish();
tidy_marks.finish();
2020-03-17 04:26:29 -05:00
}
2021-10-16 06:32:55 -05:00
fn check_cargo_toml(path: &Path, text: String) {
let mut section = None;
for (line_no, text) in text.lines().enumerate() {
let text = text.trim();
if text.starts_with('[') {
if !text.ends_with(']') {
panic!(
"\nplease don't add comments or trailing whitespace in section lines.\n\
{}:{}\n",
path.display(),
line_no + 1
)
}
section = Some(text);
continue;
}
let text: String = text.split_whitespace().collect();
if !text.contains("path=") {
continue;
}
match section {
Some(s) if s.contains("dev-dependencies") => {
if text.contains("version") {
panic!(
"\ncargo internal dev-dependencies should not have a version.\n\
{}:{}\n",
path.display(),
line_no + 1
);
}
2021-02-03 08:01:09 -06:00
}
Some(s) if s.contains("dependencies") => {
if !text.contains("version") {
panic!(
"\ncargo internal dependencies should have a version.\n\
{}:{}\n",
path.display(),
line_no + 1
);
}
2021-02-03 08:01:09 -06:00
}
_ => {}
2021-02-03 08:01:09 -06:00
}
}
}
#[cfg(not(feature = "in-rust-tree"))]
2020-07-14 10:14:00 -05:00
#[test]
fn check_licenses() {
2022-03-13 16:20:51 -05:00
let sh = &Shell::new().unwrap();
2020-07-14 10:14:00 -05:00
let expected = "
2022-10-15 06:07:21 -05:00
(MIT OR Apache-2.0) AND Unicode-DFS-2016
2020-07-14 10:14:00 -05:00
0BSD OR MIT OR Apache-2.0
2020-07-31 19:55:04 -05:00
Apache-2.0
2020-07-14 10:14:00 -05:00
Apache-2.0 OR BSL-1.0
Apache-2.0 OR MIT
Apache-2.0 WITH LLVM-exception
2022-03-17 12:10:45 -05:00
Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT
2020-07-14 10:14:00 -05:00
Apache-2.0/MIT
BSD-3-Clause
CC0-1.0
2020-07-14 10:14:00 -05:00
ISC
MIT
MIT / Apache-2.0
MIT OR Apache-2.0
2020-11-17 08:39:25 -06:00
MIT OR Apache-2.0 OR Zlib
MIT OR Zlib OR Apache-2.0
2020-07-14 10:14:00 -05:00
MIT/Apache-2.0
2023-01-01 10:52:47 -06:00
Unlicense OR MIT
2020-07-14 10:14:00 -05:00
Unlicense/MIT
2020-08-18 03:49:18 -05:00
Zlib OR Apache-2.0 OR MIT
2020-07-14 10:14:00 -05:00
"
.lines()
.filter(|it| !it.is_empty())
.collect::<Vec<_>>();
2022-03-13 16:20:51 -05:00
let meta = cmd!(sh, "cargo metadata --format-version 1").read().unwrap();
2020-07-14 10:14:00 -05:00
let mut licenses = meta
.split(|c| c == ',' || c == '{' || c == '}')
.filter(|it| it.contains(r#""license""#))
.map(|it| it.trim())
.map(|it| it[r#""license":"#.len()..].trim_matches('"'))
.collect::<Vec<_>>();
licenses.sort_unstable();
2020-07-14 10:14:00 -05:00
licenses.dedup();
2020-10-16 12:46:03 -05:00
if licenses != expected {
let mut diff = String::new();
2021-06-03 06:01:16 -05:00
diff.push_str("New Licenses:\n");
2020-10-16 12:46:03 -05:00
for &l in licenses.iter() {
if !expected.contains(&l) {
diff += &format!(" {l}\n")
2020-10-16 12:46:03 -05:00
}
}
2021-06-03 06:01:16 -05:00
diff.push_str("\nMissing Licenses:\n");
2020-10-16 12:46:03 -05:00
for &l in expected.iter() {
if !licenses.contains(&l) {
diff += &format!(" {l}\n")
2020-10-16 12:46:03 -05:00
}
}
panic!("different set of licenses!\n{diff}");
2020-10-16 12:46:03 -05:00
}
2020-07-31 18:52:24 -05:00
assert_eq!(licenses, expected);
2020-07-14 10:14:00 -05:00
}
fn check_test_attrs(path: &Path, text: &str) {
let ignore_rule =
2022-07-08 08:44:49 -05:00
"https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/style.md#ignore";
let need_ignore: &[&str] = &[
// This file.
"slow-tests/tidy.rs",
// Special case to run `#[ignore]` tests.
"ide/src/runnables.rs",
// A legit test which needs to be ignored, as it takes too long to run
// :(
"hir-def/src/nameres/collector.rs",
// Long sourcegen test to generate lint completions.
"ide-db/src/tests/sourcegen_lints.rs",
// Obviously needs ignore.
"ide-assists/src/handlers/toggle_ignore.rs",
// See above.
"ide-assists/src/tests/generated.rs",
];
if text.contains("#[ignore") && !need_ignore.iter().any(|p| path.ends_with(p)) {
panic!("\ndon't `#[ignore]` tests, see:\n\n {ignore_rule}\n\n {}\n", path.display(),)
}
let panic_rule =
2022-07-08 08:44:49 -05:00
"https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/style.md#should_panic";
let need_panic: &[&str] = &[
// This file.
"slow-tests/tidy.rs",
"test-utils/src/fixture.rs",
2023-09-28 19:44:40 -05:00
// Generated code from lints contains doc tests in string literals.
"ide-db/src/generated/lints.rs",
];
if text.contains("#[should_panic") && !need_panic.iter().any(|p| path.ends_with(p)) {
panic!(
"\ndon't add `#[should_panic]` tests, see:\n\n {}\n\n {}\n",
panic_rule,
path.display(),
)
}
}
fn check_trailing_ws(path: &Path, text: &str) {
if is_exclude_dir(path, &["test_data"]) {
return;
}
for (line_number, line) in text.lines().enumerate() {
if line.chars().last().is_some_and(char::is_whitespace) {
2021-06-18 06:47:26 -05:00
panic!("Trailing whitespace in {} at line {}", path.display(), line_number + 1)
}
}
}
2020-03-17 04:26:29 -05:00
#[derive(Default)]
struct TidyDocs {
missing_docs: Vec<String>,
contains_fixme: Vec<PathBuf>,
}
impl TidyDocs {
fn visit(&mut self, path: &Path, text: &str) {
// Tests and diagnostic fixes don't need module level comments.
2021-05-22 09:20:22 -05:00
if is_exclude_dir(path, &["tests", "test_data", "fixes", "grammar"]) {
return;
}
if is_exclude_file(path) {
2020-03-17 04:26:29 -05:00
return;
}
let first_line = match text.lines().next() {
Some(it) => it,
None => return,
};
if first_line.starts_with("//!") {
if first_line.contains("FIXME") {
self.contains_fixme.push(path.to_path_buf());
2020-03-17 04:26:29 -05:00
}
} else {
if text.contains("// Feature:")
|| text.contains("// Assist:")
|| text.contains("// Diagnostic:")
{
return;
}
2020-03-17 04:26:29 -05:00
self.missing_docs.push(path.display().to_string());
}
fn is_exclude_file(d: &Path) -> bool {
let file_names = ["tests.rs", "famous_defs_fixture.rs"];
2020-03-17 04:26:29 -05:00
d.file_name()
.unwrap_or_default()
.to_str()
.map(|f_n| file_names.iter().any(|name| *name == f_n))
.unwrap_or(false)
}
}
fn finish(self) {
if !self.missing_docs.is_empty() {
panic!(
"\nMissing docs strings\n\n\
modules:\n{}\n\n",
self.missing_docs.join("\n")
)
}
2024-01-19 07:15:24 -06:00
if let Some(path) = self.contains_fixme.first() {
2020-03-17 04:26:29 -05:00
panic!("FIXME doc in a fully-documented crate: {}", path.display())
}
}
}
fn is_exclude_dir(p: &Path, dirs_to_exclude: &[&str]) -> bool {
p.strip_prefix(sourcegen::project_root())
.unwrap()
.components()
.rev()
.skip(1)
.filter_map(|it| it.as_os_str().to_str())
.any(|it| dirs_to_exclude.contains(&it))
}
#[derive(Default)]
struct TidyMarks {
hits: HashSet<String>,
checks: HashSet<String>,
}
impl TidyMarks {
fn visit(&mut self, _path: &Path, text: &str) {
find_marks(&mut self.hits, text, "hit");
find_marks(&mut self.checks, text, "check");
find_marks(&mut self.checks, text, "check_count");
}
fn finish(self) {
assert!(!self.hits.is_empty());
let diff: Vec<_> =
self.hits.symmetric_difference(&self.checks).map(|it| it.as_str()).collect();
if !diff.is_empty() {
panic!("unpaired marks: {diff:?}")
}
}
}
#[allow(deprecated)]
fn stable_hash(text: &str) -> u64 {
use std::hash::{Hash, Hasher, SipHasher};
let text = text.replace('\r', "");
let mut hasher = SipHasher::default();
text.hash(&mut hasher);
hasher.finish()
}
fn find_marks(set: &mut HashSet<String>, text: &str, mark: &str) {
let mut text = text;
let mut prev_text = "";
while text != prev_text {
prev_text = text;
if let Some(idx) = text.find(mark) {
text = &text[idx + mark.len()..];
if let Some(stripped_text) = text.strip_prefix("!(") {
text = stripped_text.trim_start();
if let Some(idx2) = text.find(|c: char| !(c.is_alphanumeric() || c == '_')) {
let mark_text = &text[..idx2];
set.insert(mark_text.to_owned());
text = &text[idx2..];
}
}
}
}
}