update tests
This commit is contained in:
parent
afeaea7051
commit
bc2550b196
@ -31,7 +31,7 @@
|
|||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
pub mod fuzz;
|
pub mod fuzz;
|
||||||
|
|
||||||
use std::sync::Arc;
|
use std::{sync::Arc, fmt::Write};
|
||||||
|
|
||||||
use ra_text_edit::AtomTextEdit;
|
use ra_text_edit::AtomTextEdit;
|
||||||
|
|
||||||
@ -68,6 +68,14 @@ pub fn ok(self) -> Result<TreeArc<SourceFile>, Arc<Vec<SyntaxError>>> {
|
|||||||
Err(self.errors)
|
Err(self.errors)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn debug_dump(&self) -> String {
|
||||||
|
let mut buf = self.tree.syntax().debug_dump();
|
||||||
|
for err in self.errors.iter() {
|
||||||
|
writeln!(buf, "err: `{}`", err).unwrap();
|
||||||
|
}
|
||||||
|
buf
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// `SourceFile` represents a parse tree for a single Rust file.
|
/// `SourceFile` represents a parse tree for a single Rust file.
|
||||||
@ -83,6 +91,12 @@ fn new(green: GreenNode) -> TreeArc<SourceFile> {
|
|||||||
TreeArc::cast(root)
|
TreeArc::cast(root)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn parse2(text: &str) -> Parse {
|
||||||
|
let (green, errors) = parsing::parse_text(text);
|
||||||
|
let tree = SourceFile::new(green);
|
||||||
|
Parse { tree, errors: Arc::new(errors) }
|
||||||
|
}
|
||||||
|
|
||||||
pub fn parse(text: &str) -> TreeArc<SourceFile> {
|
pub fn parse(text: &str) -> TreeArc<SourceFile> {
|
||||||
let (green, _errors) = parsing::parse_text(text);
|
let (green, _errors) = parsing::parse_text(text);
|
||||||
SourceFile::new(green)
|
SourceFile::new(green)
|
||||||
|
@ -166,9 +166,11 @@ fn merge_errors(
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use test_utils::{extract_range, assert_eq_text};
|
use test_utils::{extract_range, assert_eq_text};
|
||||||
|
|
||||||
use crate::{SourceFile, AstNode};
|
use crate::{SourceFile, AstNode, Parse};
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
fn do_check(before: &str, replace_with: &str, reparsed_len: u32) {
|
fn do_check(before: &str, replace_with: &str, reparsed_len: u32) {
|
||||||
@ -176,19 +178,19 @@ fn do_check(before: &str, replace_with: &str, reparsed_len: u32) {
|
|||||||
let edit = AtomTextEdit::replace(range, replace_with.to_owned());
|
let edit = AtomTextEdit::replace(range, replace_with.to_owned());
|
||||||
let after = edit.apply(before.clone());
|
let after = edit.apply(before.clone());
|
||||||
|
|
||||||
let fully_reparsed = SourceFile::parse(&after);
|
let fully_reparsed = SourceFile::parse2(&after);
|
||||||
let incrementally_reparsed = {
|
let incrementally_reparsed = {
|
||||||
let f = SourceFile::parse(&before);
|
let f = SourceFile::parse(&before);
|
||||||
let edit = AtomTextEdit { delete: range, insert: replace_with.to_string() };
|
let edit = AtomTextEdit { delete: range, insert: replace_with.to_string() };
|
||||||
let (green, new_errors, range) =
|
let (green, new_errors, range) =
|
||||||
incremental_reparse(f.syntax(), &edit, f.errors()).unwrap();
|
incremental_reparse(f.syntax(), &edit, f.errors()).unwrap();
|
||||||
assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length");
|
assert_eq!(range.len(), reparsed_len.into(), "reparsed fragment has wrong length");
|
||||||
SourceFile::new(green, new_errors)
|
Parse { tree: SourceFile::new(green), errors: Arc::new(new_errors) }
|
||||||
};
|
};
|
||||||
|
|
||||||
assert_eq_text!(
|
assert_eq_text!(
|
||||||
&fully_reparsed.syntax().debug_dump(),
|
&fully_reparsed.tree.syntax().debug_dump(),
|
||||||
&incrementally_reparsed.syntax().debug_dump(),
|
&incrementally_reparsed.tree.syntax().debug_dump(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -256,37 +256,18 @@ pub fn memory_size_of_subtree(&self) -> usize {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn debug_dump(&self) -> String {
|
pub fn debug_dump(&self) -> String {
|
||||||
let mut errors: Vec<_> = match self.ancestors().find_map(SourceFile::cast) {
|
|
||||||
Some(file) => file.errors(),
|
|
||||||
None => self.root_data().to_vec(),
|
|
||||||
};
|
|
||||||
errors.sort_by_key(|e| e.offset());
|
|
||||||
let mut err_pos = 0;
|
|
||||||
let mut level = 0;
|
let mut level = 0;
|
||||||
let mut buf = String::new();
|
let mut buf = String::new();
|
||||||
macro_rules! indent {
|
|
||||||
() => {
|
|
||||||
for _ in 0..level {
|
|
||||||
buf.push_str(" ");
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
for event in self.preorder_with_tokens() {
|
for event in self.preorder_with_tokens() {
|
||||||
match event {
|
match event {
|
||||||
WalkEvent::Enter(element) => {
|
WalkEvent::Enter(element) => {
|
||||||
indent!();
|
for _ in 0..level {
|
||||||
|
buf.push_str(" ");
|
||||||
|
}
|
||||||
match element {
|
match element {
|
||||||
SyntaxElement::Node(node) => writeln!(buf, "{:?}", node).unwrap(),
|
SyntaxElement::Node(node) => writeln!(buf, "{:?}", node).unwrap(),
|
||||||
SyntaxElement::Token(token) => {
|
SyntaxElement::Token(token) => writeln!(buf, "{:?}", token).unwrap(),
|
||||||
writeln!(buf, "{:?}", token).unwrap();
|
|
||||||
let off = token.range().end();
|
|
||||||
while err_pos < errors.len() && errors[err_pos].offset() <= off {
|
|
||||||
indent!();
|
|
||||||
writeln!(buf, "err: `{}`", errors[err_pos]).unwrap();
|
|
||||||
err_pos += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
level += 1;
|
level += 1;
|
||||||
}
|
}
|
||||||
@ -295,9 +276,6 @@ macro_rules! indent {
|
|||||||
}
|
}
|
||||||
|
|
||||||
assert_eq!(level, 0);
|
assert_eq!(level, 0);
|
||||||
for err in errors[err_pos..].iter() {
|
|
||||||
writeln!(buf, "err: `{}`", err).unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
buf
|
buf
|
||||||
}
|
}
|
||||||
|
@ -8,7 +8,7 @@
|
|||||||
};
|
};
|
||||||
|
|
||||||
use test_utils::{project_dir, dir_tests, read_text, collect_tests};
|
use test_utils::{project_dir, dir_tests, read_text, collect_tests};
|
||||||
use ra_syntax::{SourceFile, AstNode, fuzz};
|
use ra_syntax::{SourceFile, fuzz};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn lexer_tests() {
|
fn lexer_tests() {
|
||||||
@ -21,26 +21,21 @@ fn lexer_tests() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn parser_tests() {
|
fn parser_tests() {
|
||||||
dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], |text, path| {
|
dir_tests(&test_data_dir(), &["parser/inline/ok", "parser/ok"], |text, path| {
|
||||||
let file = SourceFile::parse(text);
|
let parse = SourceFile::parse2(text);
|
||||||
let errors = file.errors();
|
let errors = parse.errors.as_slice();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
&*errors,
|
errors,
|
||||||
&[] as &[ra_syntax::SyntaxError],
|
&[] as &[ra_syntax::SyntaxError],
|
||||||
"There should be no errors in the file {:?}",
|
"There should be no errors in the file {:?}",
|
||||||
path.display()
|
path.display(),
|
||||||
);
|
);
|
||||||
file.syntax().debug_dump()
|
parse.debug_dump()
|
||||||
});
|
});
|
||||||
dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], |text, path| {
|
dir_tests(&test_data_dir(), &["parser/err", "parser/inline/err"], |text, path| {
|
||||||
let file = SourceFile::parse(text);
|
let parse = SourceFile::parse2(text);
|
||||||
let errors = file.errors();
|
let errors = parse.errors.as_slice();
|
||||||
assert_ne!(
|
assert!(!errors.is_empty(), "There should be errors in the file {:?}", path.display());
|
||||||
&*errors,
|
parse.debug_dump()
|
||||||
&[] as &[ra_syntax::SyntaxError],
|
|
||||||
"There should be errors in the file {:?}",
|
|
||||||
path.display()
|
|
||||||
);
|
|
||||||
file.syntax().debug_dump()
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user