Incremental reparsing for single tokens (WHITESPACE, COMMENT, DOC_COMMENT, IDENT, STRING, RAW_STRING)

This commit is contained in:
darksv 2018-09-13 23:25:05 +02:00
parent b6f8037a6f
commit 4356240fa4
2 changed files with 122 additions and 14 deletions

View File

@ -82,22 +82,68 @@ pub fn reparse(&self, edit: &AtomEdit) -> File {
self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit)) self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit))
} }
pub fn incremental_reparse(&self, edit: &AtomEdit) -> Option<File> { pub fn incremental_reparse(&self, edit: &AtomEdit) -> Option<File> {
let (node, green, new_errors) =
self.reparse_leaf(&edit).or_else(|| self.reparse_block(&edit))?;
let green_root = node.replace_with(green);
let errors = merge_errors(self.errors(), new_errors, node, edit);
Some(File::new(green_root, errors))
}
fn reparse_leaf(&self, edit: &AtomEdit) -> Option<(SyntaxNodeRef, GreenNode, Vec<SyntaxError>)> {
let node = algo::find_covering_node(self.syntax(), edit.delete);
match node.kind() {
| WHITESPACE
| COMMENT
| DOC_COMMENT
| IDENT
| STRING
| RAW_STRING => {
let text = get_text_after_edit(node, &edit);
let tokens = tokenize(&text);
if tokens.len() != 1 || tokens[0].kind != node.kind() {
return None;
}
let reparser: fn(&mut Parser) = if node.kind().is_trivia() {
// since trivia is omitted by parser when it doesn't have a parent, \
// we need to create one for it
|p| {
p.start().complete(p, ROOT);
}
} else {
|p| {
p.bump();
}
};
let (green, new_errors) =
parser_impl::parse_with::<yellow::GreenBuilder>(
&text, &tokens, reparser,
);
let green = if node.kind().is_trivia() {
green.children().first().cloned().unwrap()
} else {
green
};
Some((node, green, new_errors))
},
_ => None,
}
}
fn reparse_block(&self, edit: &AtomEdit) -> Option<(SyntaxNodeRef, GreenNode, Vec<SyntaxError>)> {
let (node, reparser) = find_reparsable_node(self.syntax(), edit.delete)?; let (node, reparser) = find_reparsable_node(self.syntax(), edit.delete)?;
let text = replace_range( let text = get_text_after_edit(node, &edit);
node.text().to_string(),
edit.delete - node.range().start(),
&edit.insert,
);
let tokens = tokenize(&text); let tokens = tokenize(&text);
if !is_balanced(&tokens) { if !is_balanced(&tokens) {
return None; return None;
} }
let (green, new_errors) = parser_impl::parse_with::<yellow::GreenBuilder>( let (green, new_errors) =
&text, &tokens, reparser, parser_impl::parse_with::<yellow::GreenBuilder>(
); &text, &tokens, reparser,
let green_root = node.replace_with(green); );
let errors = merge_errors(self.errors(), new_errors, node, edit); Some((node, green, new_errors))
Some(File::new(green_root, errors))
} }
fn full_reparse(&self, edit: &AtomEdit) -> File { fn full_reparse(&self, edit: &AtomEdit) -> File {
let text = replace_range(self.syntax().text().to_string(), edit.delete, &edit.insert); let text = replace_range(self.syntax().text().to_string(), edit.delete, &edit.insert);
@ -134,6 +180,14 @@ pub fn insert(offset: TextUnit, text: String) -> AtomEdit {
} }
} }
fn get_text_after_edit(node: SyntaxNodeRef, edit: &AtomEdit) -> String {
replace_range(
node.text().to_string(),
edit.delete - node.range().start(),
&edit.insert,
)
}
fn find_reparsable_node(node: SyntaxNodeRef, range: TextRange) -> Option<(SyntaxNodeRef, fn(&mut Parser))> { fn find_reparsable_node(node: SyntaxNodeRef, range: TextRange) -> Option<(SyntaxNodeRef, fn(&mut Parser))> {
let node = algo::find_covering_node(node, range); let node = algo::find_covering_node(node, range);
return algo::ancestors(node) return algo::ancestors(node)
@ -200,9 +254,9 @@ fn merge_errors(
) -> Vec<SyntaxError> { ) -> Vec<SyntaxError> {
let mut res = Vec::new(); let mut res = Vec::new();
for e in old_errors { for e in old_errors {
if e.offset < old_node.range().start() { if e.offset <= old_node.range().start() {
res.push(e) res.push(e)
} else if e.offset > old_node.range().end() { } else if e.offset >= old_node.range().end() {
res.push(SyntaxError { res.push(SyntaxError {
msg: e.msg, msg: e.msg,
offset: e.offset + TextUnit::of_str(&edit.insert) - edit.delete.len(), offset: e.offset + TextUnit::of_str(&edit.insert) - edit.delete.len(),

View File

@ -33,7 +33,7 @@ fn do_check(before: &str, replace_with: &str) {
let incrementally_reparsed = { let incrementally_reparsed = {
let f = File::parse(&before); let f = File::parse(&before);
let edit = AtomEdit { delete: range, insert: replace_with.to_string() }; let edit = AtomEdit { delete: range, insert: replace_with.to_string() };
f.incremental_reparse(&edit).unwrap() f.incremental_reparse(&edit).expect("cannot incrementally reparse")
}; };
assert_eq_text!( assert_eq_text!(
&dump_tree(fully_reparsed.syntax()), &dump_tree(fully_reparsed.syntax()),
@ -45,6 +45,11 @@ fn do_check(before: &str, replace_with: &str) {
fn foo() { fn foo() {
let x = foo + <|>bar<|> let x = foo + <|>bar<|>
} }
", "baz");
do_check(r"
fn foo() {
let x = foo<|> + bar<|>
}
", "baz"); ", "baz");
do_check(r" do_check(r"
struct Foo { struct Foo {
@ -67,6 +72,11 @@ fn <|><|>
trait Foo { trait Foo {
type <|>Foo<|>; type <|>Foo<|>;
} }
", "Output");
do_check(r"
trait Foo {
type<|> Foo<|>;
}
", "Output"); ", "Output");
do_check(r" do_check(r"
impl IntoIterator<Item=i32> for Foo { impl IntoIterator<Item=i32> for Foo {
@ -94,6 +104,50 @@ fn foo() {
fn<|>;<|> fn<|>;<|>
} }
", " exit(code: c_int)"); ", " exit(code: c_int)");
do_check(r"<|><|>
fn foo() -> i32 {
1
}
", "\n\n\n \n");
do_check(r"
fn foo() -> <|><|> {}
", " \n");
do_check(r"
fn <|>foo<|>() -> i32 {
1
}
", "bar");
do_check(r"
fn aa<|><|>bb() {
}
", "foofoo");
do_check(r"
fn aabb /* <|><|> */ () {
}
", "some comment");
do_check(r"
fn aabb <|><|> () {
}
", " \t\t\n\n");
do_check(r"
trait foo {
// comment <|><|>
}
", "\n");
do_check(r"
/// good <|><|>omment
mod {
}
", "c");
do_check(r#"
fn -> &str { "Hello<|><|>" }
"#, ", world");
do_check(r#"
fn -> &str { // "Hello<|><|>"
"#, ", world");
} }
#[test] #[test]