2018-10-15 11:55:32 -05:00
|
|
|
use crate::algo;
|
|
|
|
use crate::grammar;
|
|
|
|
use crate::lexer::{tokenize, Token};
|
|
|
|
use crate::parser_api::Parser;
|
2018-10-15 16:44:23 -05:00
|
|
|
use crate::parser_impl;
|
2018-10-15 11:55:32 -05:00
|
|
|
use crate::text_utils::replace_range;
|
2018-10-15 16:44:23 -05:00
|
|
|
use crate::yellow::{self, GreenNode, SyntaxError, SyntaxNodeRef};
|
|
|
|
use crate::{SyntaxKind::*, TextRange, TextUnit};
|
2018-12-10 15:09:12 -06:00
|
|
|
use ra_text_edit::AtomEdit;
|
2018-09-15 06:35:55 -05:00
|
|
|
|
|
|
|
pub(crate) fn incremental_reparse(
|
|
|
|
node: SyntaxNodeRef,
|
|
|
|
edit: &AtomEdit,
|
|
|
|
errors: Vec<SyntaxError>,
|
|
|
|
) -> Option<(GreenNode, Vec<SyntaxError>)> {
|
|
|
|
let (node, green, new_errors) =
|
|
|
|
reparse_leaf(node, &edit).or_else(|| reparse_block(node, &edit))?;
|
|
|
|
let green_root = node.replace_with(green);
|
|
|
|
let errors = merge_errors(errors, new_errors, node, edit);
|
|
|
|
Some((green_root, errors))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn reparse_leaf<'node>(
|
|
|
|
node: SyntaxNodeRef<'node>,
|
|
|
|
edit: &AtomEdit,
|
|
|
|
) -> Option<(SyntaxNodeRef<'node>, GreenNode, Vec<SyntaxError>)> {
|
|
|
|
let node = algo::find_covering_node(node, edit.delete);
|
|
|
|
match node.kind() {
|
2018-10-31 16:38:18 -05:00
|
|
|
WHITESPACE | COMMENT | IDENT | STRING | RAW_STRING => {
|
2018-09-15 06:35:55 -05:00
|
|
|
let text = get_text_after_edit(node, &edit);
|
|
|
|
let tokens = tokenize(&text);
|
|
|
|
let token = match tokens[..] {
|
|
|
|
[token] if token.kind == node.kind() => token,
|
|
|
|
_ => return None,
|
|
|
|
};
|
|
|
|
|
|
|
|
if token.kind == IDENT && is_contextual_kw(&text) {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
2018-10-02 09:07:12 -05:00
|
|
|
let green = GreenNode::new_leaf(node.kind(), text.into());
|
2018-09-15 06:35:55 -05:00
|
|
|
let new_errors = vec![];
|
|
|
|
Some((node, green, new_errors))
|
|
|
|
}
|
|
|
|
_ => None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn reparse_block<'node>(
|
|
|
|
node: SyntaxNodeRef<'node>,
|
|
|
|
edit: &AtomEdit,
|
|
|
|
) -> Option<(SyntaxNodeRef<'node>, GreenNode, Vec<SyntaxError>)> {
|
|
|
|
let (node, reparser) = find_reparsable_node(node, edit.delete)?;
|
|
|
|
let text = get_text_after_edit(node, &edit);
|
|
|
|
let tokens = tokenize(&text);
|
|
|
|
if !is_balanced(&tokens) {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
let (green, new_errors) =
|
2018-10-15 16:44:23 -05:00
|
|
|
parser_impl::parse_with(yellow::GreenBuilder::new(), &text, &tokens, reparser);
|
2018-09-15 06:35:55 -05:00
|
|
|
Some((node, green, new_errors))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn get_text_after_edit(node: SyntaxNodeRef, edit: &AtomEdit) -> String {
|
|
|
|
replace_range(
|
|
|
|
node.text().to_string(),
|
|
|
|
edit.delete - node.range().start(),
|
|
|
|
&edit.insert,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
fn is_contextual_kw(text: &str) -> bool {
|
|
|
|
match text {
|
2018-10-15 16:44:23 -05:00
|
|
|
"auto" | "default" | "union" => true,
|
2018-09-15 06:35:55 -05:00
|
|
|
_ => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-10-16 13:08:52 -05:00
|
|
|
type ParseFn = fn(&mut Parser);
|
|
|
|
fn find_reparsable_node(
|
|
|
|
node: SyntaxNodeRef<'_>,
|
2018-09-15 06:35:55 -05:00
|
|
|
range: TextRange,
|
2018-10-16 13:08:52 -05:00
|
|
|
) -> Option<(SyntaxNodeRef<'_>, ParseFn)> {
|
2018-09-15 06:35:55 -05:00
|
|
|
let node = algo::find_covering_node(node, range);
|
2018-10-15 16:44:23 -05:00
|
|
|
return node
|
|
|
|
.ancestors()
|
2018-09-15 06:35:55 -05:00
|
|
|
.filter_map(|node| reparser(node).map(|r| (node, r)))
|
|
|
|
.next();
|
|
|
|
|
2018-10-16 13:08:52 -05:00
|
|
|
fn reparser(node: SyntaxNodeRef) -> Option<ParseFn> {
|
2018-09-15 06:35:55 -05:00
|
|
|
let res = match node.kind() {
|
|
|
|
BLOCK => grammar::block,
|
|
|
|
NAMED_FIELD_DEF_LIST => grammar::named_field_def_list,
|
|
|
|
NAMED_FIELD_LIST => grammar::named_field_list,
|
|
|
|
ENUM_VARIANT_LIST => grammar::enum_variant_list,
|
|
|
|
MATCH_ARM_LIST => grammar::match_arm_list,
|
|
|
|
USE_TREE_LIST => grammar::use_tree_list,
|
|
|
|
EXTERN_ITEM_LIST => grammar::extern_item_list,
|
|
|
|
TOKEN_TREE if node.first_child().unwrap().kind() == L_CURLY => grammar::token_tree,
|
|
|
|
ITEM_LIST => {
|
|
|
|
let parent = node.parent().unwrap();
|
|
|
|
match parent.kind() {
|
|
|
|
IMPL_ITEM => grammar::impl_item_list,
|
|
|
|
TRAIT_DEF => grammar::trait_item_list,
|
|
|
|
MODULE => grammar::mod_item_list,
|
|
|
|
_ => return None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => return None,
|
|
|
|
};
|
|
|
|
Some(res)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn is_balanced(tokens: &[Token]) -> bool {
|
2018-10-17 18:25:37 -05:00
|
|
|
if tokens.is_empty()
|
2018-09-15 06:35:55 -05:00
|
|
|
|| tokens.first().unwrap().kind != L_CURLY
|
2018-10-15 16:44:23 -05:00
|
|
|
|| tokens.last().unwrap().kind != R_CURLY
|
|
|
|
{
|
2018-09-15 06:35:55 -05:00
|
|
|
return false;
|
|
|
|
}
|
|
|
|
let mut balance = 0usize;
|
|
|
|
for t in tokens.iter() {
|
|
|
|
match t.kind {
|
|
|
|
L_CURLY => balance += 1,
|
2018-10-15 16:44:23 -05:00
|
|
|
R_CURLY => {
|
|
|
|
balance = match balance.checked_sub(1) {
|
|
|
|
Some(b) => b,
|
|
|
|
None => return false,
|
|
|
|
}
|
|
|
|
}
|
2018-09-15 06:35:55 -05:00
|
|
|
_ => (),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
balance == 0
|
|
|
|
}
|
|
|
|
|
|
|
|
fn merge_errors(
|
|
|
|
old_errors: Vec<SyntaxError>,
|
|
|
|
new_errors: Vec<SyntaxError>,
|
|
|
|
old_node: SyntaxNodeRef,
|
|
|
|
edit: &AtomEdit,
|
|
|
|
) -> Vec<SyntaxError> {
|
|
|
|
let mut res = Vec::new();
|
|
|
|
for e in old_errors {
|
2018-11-05 11:38:34 -06:00
|
|
|
if e.offset() <= old_node.range().start() {
|
2018-09-15 06:35:55 -05:00
|
|
|
res.push(e)
|
2018-11-05 11:38:34 -06:00
|
|
|
} else if e.offset() >= old_node.range().end() {
|
|
|
|
res.push(e.add_offset(TextUnit::of_str(&edit.insert) - edit.delete.len()));
|
2018-09-15 06:35:55 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
for e in new_errors {
|
2018-11-05 11:38:34 -06:00
|
|
|
res.push(e.add_offset(old_node.range().start()));
|
2018-09-15 06:35:55 -05:00
|
|
|
}
|
|
|
|
res
|
|
|
|
}
|
2018-09-15 07:35:30 -05:00
|
|
|
|
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
2018-12-06 12:16:37 -06:00
|
|
|
use test_utils::{extract_range, assert_eq_text};
|
|
|
|
|
|
|
|
use crate::{SourceFileNode, text_utils::replace_range, utils::dump_tree };
|
|
|
|
use super::*;
|
2018-09-15 07:35:30 -05:00
|
|
|
|
2018-10-15 16:44:23 -05:00
|
|
|
fn do_check<F>(before: &str, replace_with: &str, reparser: F)
|
|
|
|
where
|
2018-11-24 12:52:49 -06:00
|
|
|
for<'a> F: Fn(
|
|
|
|
SyntaxNodeRef<'a>,
|
|
|
|
&AtomEdit,
|
|
|
|
) -> Option<(SyntaxNodeRef<'a>, GreenNode, Vec<SyntaxError>)>,
|
2018-09-15 10:05:08 -05:00
|
|
|
{
|
|
|
|
let (range, before) = extract_range(before);
|
|
|
|
let after = replace_range(before.clone(), range, replace_with);
|
2018-09-15 07:35:30 -05:00
|
|
|
|
2018-11-07 09:32:33 -06:00
|
|
|
let fully_reparsed = SourceFileNode::parse(&after);
|
2018-09-15 10:05:08 -05:00
|
|
|
let incrementally_reparsed = {
|
2018-11-07 09:32:33 -06:00
|
|
|
let f = SourceFileNode::parse(&before);
|
2018-10-15 16:44:23 -05:00
|
|
|
let edit = AtomEdit {
|
|
|
|
delete: range,
|
|
|
|
insert: replace_with.to_string(),
|
|
|
|
};
|
2018-09-15 10:05:08 -05:00
|
|
|
let (node, green, new_errors) =
|
|
|
|
reparser(f.syntax(), &edit).expect("cannot incrementally reparse");
|
|
|
|
let green_root = node.replace_with(green);
|
|
|
|
let errors = super::merge_errors(f.errors(), new_errors, node, &edit);
|
2018-11-07 09:32:33 -06:00
|
|
|
SourceFileNode::new(green_root, errors)
|
2018-09-15 10:05:08 -05:00
|
|
|
};
|
|
|
|
|
|
|
|
assert_eq_text!(
|
|
|
|
&dump_tree(fully_reparsed.syntax()),
|
|
|
|
&dump_tree(incrementally_reparsed.syntax()),
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn reparse_block_tests() {
|
2018-10-15 16:44:23 -05:00
|
|
|
let do_check = |before, replace_to| do_check(before, replace_to, reparse_block);
|
2018-09-15 07:35:30 -05:00
|
|
|
|
2018-10-15 16:44:23 -05:00
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 07:35:30 -05:00
|
|
|
fn foo() {
|
|
|
|
let x = foo + <|>bar<|>
|
|
|
|
}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"baz",
|
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 07:35:30 -05:00
|
|
|
fn foo() {
|
|
|
|
let x = foo<|> + bar<|>
|
|
|
|
}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"baz",
|
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 07:35:30 -05:00
|
|
|
struct Foo {
|
|
|
|
f: foo<|><|>
|
|
|
|
}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
",\n g: (),",
|
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 07:35:30 -05:00
|
|
|
fn foo {
|
|
|
|
let;
|
|
|
|
1 + 1;
|
|
|
|
<|>92<|>;
|
|
|
|
}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"62",
|
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 07:35:30 -05:00
|
|
|
mod foo {
|
|
|
|
fn <|><|>
|
|
|
|
}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"bar",
|
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 07:35:30 -05:00
|
|
|
trait Foo {
|
|
|
|
type <|>Foo<|>;
|
|
|
|
}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"Output",
|
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 07:35:30 -05:00
|
|
|
impl IntoIterator<Item=i32> for Foo {
|
|
|
|
f<|><|>
|
|
|
|
}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"n next(",
|
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 07:35:30 -05:00
|
|
|
use a::b::{foo,<|>,bar<|>};
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"baz",
|
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 07:35:30 -05:00
|
|
|
pub enum A {
|
|
|
|
Foo<|><|>
|
|
|
|
}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"\nBar;\n",
|
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 07:35:30 -05:00
|
|
|
foo!{a, b<|><|> d}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
", c[3]",
|
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 07:35:30 -05:00
|
|
|
fn foo() {
|
|
|
|
vec![<|><|>]
|
|
|
|
}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"123",
|
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 07:35:30 -05:00
|
|
|
extern {
|
|
|
|
fn<|>;<|>
|
|
|
|
}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
" exit(code: c_int)",
|
|
|
|
);
|
2018-09-15 10:05:08 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn reparse_leaf_tests() {
|
2018-10-15 16:44:23 -05:00
|
|
|
let do_check = |before, replace_to| do_check(before, replace_to, reparse_leaf);
|
2018-09-15 10:05:08 -05:00
|
|
|
|
2018-10-15 16:44:23 -05:00
|
|
|
do_check(
|
|
|
|
r"<|><|>
|
2018-09-15 10:05:08 -05:00
|
|
|
fn foo() -> i32 { 1 }
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"\n\n\n \n",
|
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 07:35:30 -05:00
|
|
|
fn foo() -> <|><|> {}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
" \n",
|
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 10:05:08 -05:00
|
|
|
fn <|>foo<|>() -> i32 { 1 }
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"bar",
|
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 10:05:08 -05:00
|
|
|
fn foo<|><|>foo() { }
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"bar",
|
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 10:05:08 -05:00
|
|
|
fn foo /* <|><|> */ () {}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"some comment",
|
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 10:05:08 -05:00
|
|
|
fn baz <|><|> () {}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
" \t\t\n\n",
|
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 10:05:08 -05:00
|
|
|
fn baz <|><|> () {}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
" \t\t\n\n",
|
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 10:05:08 -05:00
|
|
|
/// foo <|><|>omment
|
|
|
|
mod { }
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"c",
|
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r#"
|
2018-09-15 07:35:30 -05:00
|
|
|
fn -> &str { "Hello<|><|>" }
|
2018-10-15 16:44:23 -05:00
|
|
|
"#,
|
|
|
|
", world",
|
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r#"
|
2018-09-15 07:35:30 -05:00
|
|
|
fn -> &str { // "Hello<|><|>"
|
2018-10-15 16:44:23 -05:00
|
|
|
"#,
|
|
|
|
", world",
|
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r##"
|
2018-09-15 10:05:08 -05:00
|
|
|
fn -> &str { r#"Hello<|><|>"#
|
2018-10-15 16:44:23 -05:00
|
|
|
"##,
|
|
|
|
", world",
|
|
|
|
);
|
|
|
|
do_check(
|
|
|
|
r"
|
2018-09-15 10:05:08 -05:00
|
|
|
#[derive(<|>Copy<|>)]
|
|
|
|
enum Foo {
|
|
|
|
|
|
|
|
}
|
2018-10-15 16:44:23 -05:00
|
|
|
",
|
|
|
|
"Clone",
|
|
|
|
);
|
2018-09-15 07:35:30 -05:00
|
|
|
}
|
2018-10-02 09:07:12 -05:00
|
|
|
}
|