2018-09-16 04:54:24 -05:00
|
|
|
extern crate ra_syntax;
|
2018-08-25 06:26:34 -05:00
|
|
|
extern crate test_utils;
|
2018-08-13 09:42:43 -05:00
|
|
|
extern crate walkdir;
|
2018-01-07 05:56:08 -06:00
|
|
|
|
2018-07-30 07:25:52 -05:00
|
|
|
use std::{
|
2018-10-15 16:44:23 -05:00
|
|
|
fmt::Write,
|
2018-12-23 05:15:46 -06:00
|
|
|
path::{PathBuf, Component},
|
2018-07-30 07:25:52 -05:00
|
|
|
};
|
2018-01-07 05:56:08 -06:00
|
|
|
|
2018-12-23 05:05:54 -06:00
|
|
|
use test_utils::{project_dir, dir_tests, read_text, collect_tests};
|
2018-09-16 04:54:24 -05:00
|
|
|
use ra_syntax::{
|
2018-10-15 16:44:23 -05:00
|
|
|
utils::{check_fuzz_invariants, dump_tree},
|
2018-11-07 09:32:33 -06:00
|
|
|
SourceFileNode,
|
2018-08-25 06:45:17 -05:00
|
|
|
};
|
2018-08-25 04:10:35 -05:00
|
|
|
|
2018-08-11 02:03:03 -05:00
|
|
|
#[test]
|
|
|
|
fn lexer_tests() {
|
2018-12-23 05:05:54 -06:00
|
|
|
dir_tests(&test_data_dir(), &["lexer"], |text, _| {
|
2018-09-16 04:54:24 -05:00
|
|
|
let tokens = ra_syntax::tokenize(text);
|
2018-08-11 02:03:03 -05:00
|
|
|
dump_tokens(&tokens, text)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2018-09-10 13:14:09 -05:00
|
|
|
#[test]
|
|
|
|
fn parser_tests() {
|
2018-12-23 05:15:46 -06:00
|
|
|
dir_tests(
|
|
|
|
&test_data_dir(),
|
|
|
|
&["parser/inline/ok", "parser/ok"],
|
|
|
|
|text, path| {
|
|
|
|
let file = SourceFileNode::parse(text);
|
|
|
|
let errors = file.errors();
|
|
|
|
assert_eq!(
|
|
|
|
&*errors,
|
|
|
|
&[] as &[ra_syntax::SyntaxError],
|
|
|
|
"There should be no errors in the file {:?}",
|
|
|
|
path.display()
|
|
|
|
);
|
|
|
|
dump_tree(file.syntax())
|
|
|
|
},
|
|
|
|
);
|
|
|
|
dir_tests(
|
|
|
|
&test_data_dir(),
|
|
|
|
&["parser/err", "parser/inline/err"],
|
|
|
|
|text, path| {
|
|
|
|
let file = SourceFileNode::parse(text);
|
|
|
|
let errors = file.errors();
|
|
|
|
assert_ne!(
|
|
|
|
&*errors,
|
|
|
|
&[] as &[ra_syntax::SyntaxError],
|
|
|
|
"There should be errors in the file {:?}",
|
|
|
|
path.display()
|
|
|
|
);
|
|
|
|
dump_tree(file.syntax())
|
|
|
|
},
|
|
|
|
);
|
2018-09-10 13:14:09 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn parser_fuzz_tests() {
|
2018-12-23 05:05:54 -06:00
|
|
|
for (_, text) in collect_tests(&test_data_dir(), &["parser/fuzz-failures"]) {
|
2018-09-10 13:14:09 -05:00
|
|
|
check_fuzz_invariants(&text)
|
|
|
|
}
|
2018-08-25 06:45:17 -05:00
|
|
|
}
|
|
|
|
|
2018-12-09 13:19:23 -06:00
|
|
|
/// Test that Rust-analyzer can parse and validate the rust-analyser
|
|
|
|
/// TODO: Use this as a benchmark
|
|
|
|
#[test]
|
|
|
|
fn self_hosting_parsing() {
|
2018-12-19 15:19:32 -06:00
|
|
|
use std::ffi::OsStr;
|
|
|
|
let dir = project_dir().join("crates");
|
|
|
|
let mut count = 0;
|
2018-12-09 13:19:23 -06:00
|
|
|
for entry in walkdir::WalkDir::new(dir)
|
|
|
|
.into_iter()
|
|
|
|
.filter_entry(|entry| {
|
2018-12-21 16:44:31 -06:00
|
|
|
!entry.path().components().any(|component| {
|
|
|
|
// Get all files which are not in the crates/ra_syntax/tests/data folder
|
|
|
|
component == Component::Normal(OsStr::new("data"))
|
|
|
|
})
|
2018-12-09 13:19:23 -06:00
|
|
|
})
|
|
|
|
.map(|e| e.unwrap())
|
|
|
|
.filter(|entry| {
|
|
|
|
// Get all `.rs ` files
|
2018-12-19 15:19:32 -06:00
|
|
|
!entry.path().is_dir() && (entry.path().extension() == Some(OsStr::new("rs")))
|
2018-12-09 13:19:23 -06:00
|
|
|
})
|
|
|
|
{
|
2018-12-19 15:19:32 -06:00
|
|
|
count += 1;
|
2018-12-09 13:19:23 -06:00
|
|
|
let text = read_text(entry.path());
|
|
|
|
let node = SourceFileNode::parse(&text);
|
|
|
|
let errors = node.errors();
|
|
|
|
assert_eq!(
|
2018-12-20 04:50:40 -06:00
|
|
|
&*errors,
|
|
|
|
&[],
|
2018-12-09 13:19:23 -06:00
|
|
|
"There should be no errors in the file {:?}",
|
|
|
|
entry
|
|
|
|
);
|
|
|
|
}
|
2018-12-19 15:19:32 -06:00
|
|
|
assert!(
|
|
|
|
count > 30,
|
|
|
|
"self_hosting_parsing found too few files - is it running in the right directory?"
|
|
|
|
)
|
2018-12-09 13:19:23 -06:00
|
|
|
}
|
2018-01-07 05:56:08 -06:00
|
|
|
|
|
|
|
fn test_data_dir() -> PathBuf {
|
2018-09-16 04:54:24 -05:00
|
|
|
project_dir().join("crates/ra_syntax/tests/data")
|
2018-08-11 02:03:03 -05:00
|
|
|
}
|
|
|
|
|
2018-09-16 04:54:24 -05:00
|
|
|
fn dump_tokens(tokens: &[ra_syntax::Token], text: &str) -> String {
|
2018-08-11 02:03:03 -05:00
|
|
|
let mut acc = String::new();
|
|
|
|
let mut offset = 0;
|
|
|
|
for token in tokens {
|
|
|
|
let len: u32 = token.len.into();
|
|
|
|
let len = len as usize;
|
|
|
|
let token_text = &text[offset..offset + len];
|
|
|
|
offset += len;
|
|
|
|
write!(acc, "{:?} {} {:?}\n", token.kind, token.len, token_text).unwrap()
|
|
|
|
}
|
|
|
|
acc
|
2018-02-03 03:51:06 -06:00
|
|
|
}
|