rust/crates/ra_syntax/tests/test.rs

112 lines
3.2 KiB
Rust
Raw Normal View History

2018-09-16 04:54:24 -05:00
extern crate ra_syntax;
2018-08-25 06:26:34 -05:00
extern crate test_utils;
2018-08-13 09:42:43 -05:00
extern crate walkdir;
2018-01-07 05:56:08 -06:00
2018-07-30 07:25:52 -05:00
use std::{
fmt::Write,
2018-12-23 05:15:46 -06:00
path::{PathBuf, Component},
2018-07-30 07:25:52 -05:00
};
2018-01-07 05:56:08 -06:00
use test_utils::{project_dir, dir_tests, read_text, collect_tests};
2019-03-21 12:05:12 -05:00
use ra_syntax::{SourceFile, AstNode, fuzz};
2018-08-25 04:10:35 -05:00
2018-08-11 02:03:03 -05:00
#[test]
fn lexer_tests() {
dir_tests(&test_data_dir(), &["lexer"], |text, _| {
2018-09-16 04:54:24 -05:00
let tokens = ra_syntax::tokenize(text);
2018-08-11 02:03:03 -05:00
dump_tokens(&tokens, text)
})
}
#[test]
fn parser_tests() {
2019-02-08 05:49:43 -06:00
dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], |text, path| {
let file = SourceFile::parse(text);
let errors = file.errors();
assert_eq!(
&*errors,
&[] as &[ra_syntax::SyntaxError],
"There should be no errors in the file {:?}",
path.display()
);
2019-02-21 06:51:22 -06:00
file.syntax().debug_dump()
2019-02-08 05:49:43 -06:00
});
dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], |text, path| {
let file = SourceFile::parse(text);
let errors = file.errors();
assert_ne!(
&*errors,
&[] as &[ra_syntax::SyntaxError],
"There should be errors in the file {:?}",
path.display()
);
2019-02-21 06:51:22 -06:00
file.syntax().debug_dump()
2019-02-08 05:49:43 -06:00
});
}
#[test]
fn parser_fuzz_tests() {
for (_, text) in collect_tests(&test_data_dir(), &["parser/fuzz-failures"]) {
2019-03-21 12:05:12 -05:00
fuzz::check_parser(&text)
}
2018-08-25 06:45:17 -05:00
}
2019-03-21 12:06:48 -05:00
#[test]
fn reparse_fuzz_tests() {
for (_, text) in collect_tests(&test_data_dir(), &["reparse/fuzz-failures"]) {
let check = fuzz::CheckReparse::from_data(text.as_bytes()).unwrap();
println!("{:?}", check);
check.run();
}
}
/// Test that Rust-analyzer can parse and validate the rust-analyser
/// TODO: Use this as a benchmark
#[test]
fn self_hosting_parsing() {
use std::ffi::OsStr;
let dir = project_dir().join("crates");
let mut count = 0;
for entry in walkdir::WalkDir::new(dir)
.into_iter()
.filter_entry(|entry| {
!entry.path().components().any(|component| {
// Get all files which are not in the crates/ra_syntax/tests/data folder
component == Component::Normal(OsStr::new("data"))
})
})
.map(|e| e.unwrap())
.filter(|entry| {
// Get all `.rs ` files
!entry.path().is_dir() && (entry.path().extension() == Some(OsStr::new("rs")))
})
{
count += 1;
let text = read_text(entry.path());
2019-01-07 07:15:47 -06:00
let node = SourceFile::parse(&text);
let errors = node.errors();
2019-02-08 05:49:43 -06:00
assert_eq!(&*errors, &[], "There should be no errors in the file {:?}", entry);
}
assert!(
count > 30,
"self_hosting_parsing found too few files - is it running in the right directory?"
)
}
2018-01-07 05:56:08 -06:00
fn test_data_dir() -> PathBuf {
2018-09-16 04:54:24 -05:00
project_dir().join("crates/ra_syntax/tests/data")
2018-08-11 02:03:03 -05:00
}
2018-09-16 04:54:24 -05:00
fn dump_tokens(tokens: &[ra_syntax::Token], text: &str) -> String {
2018-08-11 02:03:03 -05:00
let mut acc = String::new();
let mut offset = 0;
for token in tokens {
let len: u32 = token.len.into();
let len = len as usize;
let token_text = &text[offset..offset + len];
offset += len;
write!(acc, "{:?} {} {:?}\n", token.kind, token.len, token_text).unwrap()
}
acc
2018-02-03 03:51:06 -06:00
}