Add more keywords
This commit is contained in:
parent
b4af02d110
commit
9f91901f7e
@ -15,6 +15,7 @@ mod complete_unqualified_path;
|
||||
mod complete_postfix;
|
||||
mod complete_macro_in_item_position;
|
||||
mod complete_trait_impl;
|
||||
mod patterns;
|
||||
#[cfg(test)]
|
||||
mod test_utils;
|
||||
|
||||
|
@ -1,12 +1,6 @@
|
||||
//! FIXME: write short doc here
|
||||
|
||||
use ra_syntax::{
|
||||
algo::non_trivia_sibling,
|
||||
ast::{self, LoopBodyOwner},
|
||||
match_ast, AstNode, Direction, NodeOrToken, SyntaxElement,
|
||||
SyntaxKind::*,
|
||||
SyntaxToken,
|
||||
};
|
||||
use ra_syntax::ast;
|
||||
|
||||
use crate::completion::{
|
||||
CompletionContext, CompletionItem, CompletionItemKind, CompletionKind, Completions,
|
||||
@ -53,110 +47,56 @@ fn keyword(ctx: &CompletionContext, kw: &str, snippet: &str) -> CompletionItem {
|
||||
.build()
|
||||
}
|
||||
|
||||
fn add_top_level_keywords(acc: &mut Completions, ctx: &CompletionContext) {
|
||||
if let Some(token) = previous_non_triva_element(&ctx.token).and_then(|it| it.into_token()) {
|
||||
if token.kind() == UNSAFE_KW {
|
||||
acc.add(keyword(ctx, "impl", "impl $0 {}"));
|
||||
acc.add(keyword(ctx, "trait", "trait $0 {}"));
|
||||
acc.add(keyword(ctx, "fn", "fn $0() {}"));
|
||||
return;
|
||||
}
|
||||
fn add_keyword(
|
||||
ctx: &CompletionContext,
|
||||
acc: &mut Completions,
|
||||
kw: &str,
|
||||
snippet: &str,
|
||||
should_add: bool,
|
||||
) {
|
||||
if should_add {
|
||||
acc.add(keyword(ctx, kw, snippet));
|
||||
}
|
||||
acc.add(keyword(ctx, "impl", "impl $0 {}"));
|
||||
acc.add(keyword(ctx, "enum", "enum $0 {}"));
|
||||
acc.add(keyword(ctx, "struct", "struct $0 {}"));
|
||||
acc.add(keyword(ctx, "trait", "trait $0 {}"));
|
||||
acc.add(keyword(ctx, "fn", "fn $0() {}"));
|
||||
acc.add(keyword(ctx, "unsafe", "unsafe "));
|
||||
}
|
||||
|
||||
pub(super) fn complete_expr_keyword(acc: &mut Completions, ctx: &CompletionContext) {
|
||||
if ctx.is_new_item {
|
||||
add_top_level_keywords(acc, ctx);
|
||||
return;
|
||||
}
|
||||
if !ctx.is_trivial_path {
|
||||
return;
|
||||
}
|
||||
add_keyword(ctx, acc, "fn", "fn $0() {}", ctx.is_new_item || ctx.block_expr_parent);
|
||||
add_keyword(ctx, acc, "type", "type ", ctx.is_new_item || ctx.block_expr_parent);
|
||||
add_keyword(ctx, acc, "fn", "fn $0() {}", ctx.is_new_item || ctx.block_expr_parent);
|
||||
add_keyword(ctx, acc, "impl", "impl $0 {}", ctx.is_new_item);
|
||||
add_keyword(ctx, acc, "trait", "impl $0 {}", ctx.is_new_item);
|
||||
add_keyword(ctx, acc, "enum", "enum $0 {}", ctx.is_new_item && !ctx.after_unsafe);
|
||||
add_keyword(ctx, acc, "struct", "struct $0 {}", ctx.is_new_item && !ctx.after_unsafe);
|
||||
add_keyword(ctx, acc, "union", "union $0 {}", ctx.is_new_item && !ctx.after_unsafe);
|
||||
add_keyword(ctx, acc, "match", "match $0 {}", ctx.block_expr_parent);
|
||||
add_keyword(ctx, acc, "loop", "loop {$0}", ctx.block_expr_parent);
|
||||
add_keyword(ctx, acc, "while", "while $0 {}", ctx.block_expr_parent);
|
||||
add_keyword(ctx, acc, "let", "let ", ctx.after_if || ctx.block_expr_parent);
|
||||
add_keyword(ctx, acc, "let", "let ", ctx.after_if || ctx.block_expr_parent);
|
||||
add_keyword(ctx, acc, "else", "else {$0}", ctx.after_if);
|
||||
add_keyword(ctx, acc, "else if", "else if $0 {}", ctx.after_if);
|
||||
add_keyword(ctx, acc, "mod", "mod $0 {}", ctx.is_new_item || ctx.block_expr_parent);
|
||||
add_keyword(ctx, acc, "mut", "mut ", ctx.bind_pat_parent || ctx.ref_pat_parent);
|
||||
add_keyword(ctx, acc, "true", "true", !ctx.is_new_item); // this should be defined properly
|
||||
add_keyword(ctx, acc, "false", "false", !ctx.is_new_item); // this should be defined properly
|
||||
add_keyword(ctx, acc, "const", "const ", ctx.is_new_item || ctx.block_expr_parent);
|
||||
add_keyword(ctx, acc, "type", "type ", ctx.is_new_item || ctx.block_expr_parent);
|
||||
add_keyword(ctx, acc, "static", "static ", ctx.is_new_item || ctx.block_expr_parent);
|
||||
add_keyword(ctx, acc, "extern", "extern ", ctx.is_new_item || ctx.block_expr_parent);
|
||||
add_keyword(ctx, acc, "unsafe", "unsafe ", ctx.is_new_item || ctx.block_expr_parent);
|
||||
add_keyword(ctx, acc, "continue", "continue;", ctx.in_loop_body && ctx.can_be_stmt);
|
||||
add_keyword(ctx, acc, "break", "break;", ctx.in_loop_body && ctx.can_be_stmt);
|
||||
add_keyword(ctx, acc, "continue", "continue", ctx.in_loop_body && !ctx.can_be_stmt);
|
||||
add_keyword(ctx, acc, "break", "break", ctx.in_loop_body && !ctx.can_be_stmt);
|
||||
complete_use_tree_keyword(acc, ctx);
|
||||
|
||||
let fn_def = match &ctx.function_syntax {
|
||||
Some(it) => it,
|
||||
None => return,
|
||||
};
|
||||
acc.add(keyword(ctx, "if", "if $0 {}"));
|
||||
acc.add(keyword(ctx, "match", "match $0 {}"));
|
||||
acc.add(keyword(ctx, "while", "while $0 {}"));
|
||||
acc.add(keyword(ctx, "loop", "loop {$0}"));
|
||||
|
||||
if ctx.after_if {
|
||||
acc.add(keyword(ctx, "else", "else {$0}"));
|
||||
acc.add(keyword(ctx, "else if", "else if $0 {}"));
|
||||
}
|
||||
if is_in_loop_body(&ctx.token) {
|
||||
if ctx.can_be_stmt {
|
||||
acc.add(keyword(ctx, "continue", "continue;"));
|
||||
acc.add(keyword(ctx, "break", "break;"));
|
||||
} else {
|
||||
acc.add(keyword(ctx, "continue", "continue"));
|
||||
acc.add(keyword(ctx, "break", "break"));
|
||||
}
|
||||
}
|
||||
acc.add_all(complete_return(ctx, &fn_def, ctx.can_be_stmt));
|
||||
}
|
||||
|
||||
fn previous_non_triva_element(token: &SyntaxToken) -> Option<SyntaxElement> {
|
||||
// trying to get first non triva sibling if we have one
|
||||
let token_sibling = non_trivia_sibling(NodeOrToken::Token(token.to_owned()), Direction::Prev);
|
||||
let mut wrapped = if let Some(sibling) = token_sibling {
|
||||
sibling
|
||||
} else {
|
||||
// if not trying to find first ancestor which has such a sibling
|
||||
let node = token.parent();
|
||||
let range = node.text_range();
|
||||
let top_node = node.ancestors().take_while(|it| it.text_range() == range).last()?;
|
||||
let prev_sibling_node = top_node.ancestors().find(|it| {
|
||||
non_trivia_sibling(NodeOrToken::Node(it.to_owned()), Direction::Prev).is_some()
|
||||
})?;
|
||||
non_trivia_sibling(NodeOrToken::Node(prev_sibling_node), Direction::Prev)?
|
||||
};
|
||||
// traversing the tree down to get the last token or node, i.e. the closest one
|
||||
loop {
|
||||
if let Some(token) = wrapped.as_token() {
|
||||
return Some(NodeOrToken::Token(token.clone()));
|
||||
} else {
|
||||
let new = wrapped.as_node().and_then(|n| n.last_child_or_token());
|
||||
if new.is_some() {
|
||||
wrapped = new.unwrap().clone();
|
||||
} else {
|
||||
return Some(wrapped);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn is_in_loop_body(leaf: &SyntaxToken) -> bool {
|
||||
// FIXME move this to CompletionContext and make it handle macros
|
||||
for node in leaf.parent().ancestors() {
|
||||
if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR {
|
||||
break;
|
||||
}
|
||||
let loop_body = match_ast! {
|
||||
match node {
|
||||
ast::ForExpr(it) => it.loop_body(),
|
||||
ast::WhileExpr(it) => it.loop_body(),
|
||||
ast::LoopExpr(it) => it.loop_body(),
|
||||
_ => None,
|
||||
}
|
||||
};
|
||||
if let Some(body) = loop_body {
|
||||
if body.syntax().text_range().contains_range(leaf.text_range()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
fn complete_return(
|
||||
ctx: &CompletionContext,
|
||||
fn_def: &ast::FnDef,
|
||||
@ -320,139 +260,6 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn completes_unsafe_context_in_item_position_with_non_empty_token() {
|
||||
assert_debug_snapshot!(
|
||||
do_keyword_completion(
|
||||
r"
|
||||
mod my_mod {
|
||||
unsafe i<|>
|
||||
}
|
||||
",
|
||||
),
|
||||
@r###"
|
||||
[
|
||||
CompletionItem {
|
||||
label: "fn",
|
||||
source_range: 57..58,
|
||||
delete: 57..58,
|
||||
insert: "fn $0() {}",
|
||||
kind: Keyword,
|
||||
},
|
||||
CompletionItem {
|
||||
label: "impl",
|
||||
source_range: 57..58,
|
||||
delete: 57..58,
|
||||
insert: "impl $0 {}",
|
||||
kind: Keyword,
|
||||
},
|
||||
CompletionItem {
|
||||
label: "trait",
|
||||
source_range: 57..58,
|
||||
delete: 57..58,
|
||||
insert: "trait $0 {}",
|
||||
kind: Keyword,
|
||||
},
|
||||
]
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn completes_unsafe_context_in_item_position_with_empty_token() {
|
||||
assert_debug_snapshot!(
|
||||
do_keyword_completion(
|
||||
r"
|
||||
mod my_mod {
|
||||
unsafe <|>
|
||||
}
|
||||
",
|
||||
),
|
||||
@r###"
|
||||
[
|
||||
CompletionItem {
|
||||
label: "fn",
|
||||
source_range: 57..57,
|
||||
delete: 57..57,
|
||||
insert: "fn $0() {}",
|
||||
kind: Keyword,
|
||||
},
|
||||
CompletionItem {
|
||||
label: "impl",
|
||||
source_range: 57..57,
|
||||
delete: 57..57,
|
||||
insert: "impl $0 {}",
|
||||
kind: Keyword,
|
||||
},
|
||||
CompletionItem {
|
||||
label: "trait",
|
||||
source_range: 57..57,
|
||||
delete: 57..57,
|
||||
insert: "trait $0 {}",
|
||||
kind: Keyword,
|
||||
},
|
||||
]
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn completes_keywords_in_item_position_with_empty_token() {
|
||||
assert_debug_snapshot!(
|
||||
do_keyword_completion(
|
||||
r"
|
||||
<|>
|
||||
",
|
||||
),
|
||||
@r###"
|
||||
[
|
||||
CompletionItem {
|
||||
label: "enum",
|
||||
source_range: 17..17,
|
||||
delete: 17..17,
|
||||
insert: "enum $0 {}",
|
||||
kind: Keyword,
|
||||
},
|
||||
CompletionItem {
|
||||
label: "fn",
|
||||
source_range: 17..17,
|
||||
delete: 17..17,
|
||||
insert: "fn $0() {}",
|
||||
kind: Keyword,
|
||||
},
|
||||
CompletionItem {
|
||||
label: "impl",
|
||||
source_range: 17..17,
|
||||
delete: 17..17,
|
||||
insert: "impl $0 {}",
|
||||
kind: Keyword,
|
||||
},
|
||||
CompletionItem {
|
||||
label: "struct",
|
||||
source_range: 17..17,
|
||||
delete: 17..17,
|
||||
insert: "struct $0 {}",
|
||||
kind: Keyword,
|
||||
},
|
||||
CompletionItem {
|
||||
label: "trait",
|
||||
source_range: 17..17,
|
||||
delete: 17..17,
|
||||
insert: "trait $0 {}",
|
||||
kind: Keyword,
|
||||
},
|
||||
CompletionItem {
|
||||
label: "unsafe",
|
||||
source_range: 17..17,
|
||||
delete: 17..17,
|
||||
insert: "unsafe ",
|
||||
kind: Keyword,
|
||||
},
|
||||
]
|
||||
"###
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn completes_else_after_if() {
|
||||
assert_debug_snapshot!(
|
||||
|
@ -5,12 +5,16 @@ use ra_db::SourceDatabase;
|
||||
use ra_ide_db::RootDatabase;
|
||||
use ra_syntax::{
|
||||
algo::{find_covering_element, find_node_at_offset},
|
||||
ast, match_ast, AstNode,
|
||||
ast, match_ast, AstNode, NodeOrToken,
|
||||
SyntaxKind::*,
|
||||
SyntaxNode, SyntaxToken, TextRange, TextSize,
|
||||
};
|
||||
use ra_text_edit::Indel;
|
||||
|
||||
use super::patterns::{
|
||||
goes_after_unsafe, has_bind_pat_parent, has_block_expr_parent, has_ref_pat_parent,
|
||||
is_in_loop_body,
|
||||
};
|
||||
use crate::{call_info::ActiveParameter, completion::CompletionConfig, FilePosition};
|
||||
use test_utils::mark;
|
||||
|
||||
@ -60,6 +64,11 @@ pub(crate) struct CompletionContext<'a> {
|
||||
pub(super) is_path_type: bool,
|
||||
pub(super) has_type_args: bool,
|
||||
pub(super) attribute_under_caret: Option<ast::Attr>,
|
||||
pub(super) after_unsafe: bool,
|
||||
pub(super) block_expr_parent: bool,
|
||||
pub(super) bind_pat_parent: bool,
|
||||
pub(super) ref_pat_parent: bool,
|
||||
pub(super) in_loop_body: bool,
|
||||
}
|
||||
|
||||
impl<'a> CompletionContext<'a> {
|
||||
@ -118,6 +127,11 @@ impl<'a> CompletionContext<'a> {
|
||||
has_type_args: false,
|
||||
dot_receiver_is_ambiguous_float_literal: false,
|
||||
attribute_under_caret: None,
|
||||
after_unsafe: false,
|
||||
in_loop_body: false,
|
||||
ref_pat_parent: false,
|
||||
bind_pat_parent: false,
|
||||
block_expr_parent: false,
|
||||
};
|
||||
|
||||
let mut original_file = original_file.syntax().clone();
|
||||
@ -159,7 +173,7 @@ impl<'a> CompletionContext<'a> {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
ctx.fill_keyword_patterns(&hypothetical_file, offset);
|
||||
ctx.fill(&original_file, hypothetical_file, offset);
|
||||
Some(ctx)
|
||||
}
|
||||
@ -188,6 +202,16 @@ impl<'a> CompletionContext<'a> {
|
||||
self.sema.scope_at_offset(&self.token.parent(), self.offset)
|
||||
}
|
||||
|
||||
fn fill_keyword_patterns(&mut self, file_with_fake_ident: &SyntaxNode, offset: TextSize) {
|
||||
let fake_ident_token = file_with_fake_ident.token_at_offset(offset).right_biased().unwrap();
|
||||
let syntax_element = NodeOrToken::Token(fake_ident_token.clone());
|
||||
self.block_expr_parent = has_block_expr_parent(syntax_element.clone());
|
||||
self.after_unsafe = goes_after_unsafe(syntax_element.clone());
|
||||
self.bind_pat_parent = has_bind_pat_parent(syntax_element.clone());
|
||||
self.ref_pat_parent = has_ref_pat_parent(syntax_element.clone());
|
||||
self.in_loop_body = is_in_loop_body(syntax_element.clone());
|
||||
}
|
||||
|
||||
fn fill(
|
||||
&mut self,
|
||||
original_file: &SyntaxNode,
|
||||
|
117
crates/ra_ide/src/completion/patterns.rs
Normal file
117
crates/ra_ide/src/completion/patterns.rs
Normal file
@ -0,0 +1,117 @@
|
||||
use ra_syntax::{
|
||||
algo::non_trivia_sibling,
|
||||
ast::{self, LoopBodyOwner},
|
||||
match_ast, AstNode, Direction, NodeOrToken, SyntaxElement,
|
||||
SyntaxKind::*,
|
||||
SyntaxNode,
|
||||
};
|
||||
|
||||
pub(crate) fn inside_impl(element: SyntaxElement) -> bool {
|
||||
let node = match element {
|
||||
NodeOrToken::Node(node) => node,
|
||||
NodeOrToken::Token(token) => token.parent(),
|
||||
};
|
||||
node.ancestors().find(|it| it.kind() == IMPL_DEF).is_some()
|
||||
}
|
||||
|
||||
pub(crate) fn has_bind_pat_parent(element: SyntaxElement) -> bool {
|
||||
let node = match element {
|
||||
NodeOrToken::Node(node) => node,
|
||||
NodeOrToken::Token(token) => token.parent(),
|
||||
};
|
||||
node.ancestors().find(|it| it.kind() == BIND_PAT).is_some()
|
||||
}
|
||||
|
||||
pub(crate) fn has_ref_pat_parent(element: SyntaxElement) -> bool {
|
||||
let node = match element {
|
||||
NodeOrToken::Node(node) => node,
|
||||
NodeOrToken::Token(token) => token.parent(),
|
||||
};
|
||||
node.ancestors().find(|it| it.kind() == REF_PAT).is_some()
|
||||
}
|
||||
|
||||
pub(crate) fn goes_after_unsafe(element: SyntaxElement) -> bool {
|
||||
if let Some(token) = previous_non_triva_element(element).and_then(|it| it.into_token()) {
|
||||
if token.kind() == UNSAFE_KW {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
pub(crate) fn has_block_expr_parent(element: SyntaxElement) -> bool {
|
||||
not_same_range_parent(element).filter(|it| it.kind() == BLOCK_EXPR).is_some()
|
||||
}
|
||||
|
||||
pub(crate) fn has_item_list_parent(element: SyntaxElement) -> bool {
|
||||
not_same_range_parent(element).filter(|it| it.kind() == ITEM_LIST).is_some()
|
||||
}
|
||||
|
||||
pub(crate) fn is_in_loop_body(element: SyntaxElement) -> bool {
|
||||
let leaf = match element {
|
||||
NodeOrToken::Node(node) => node,
|
||||
NodeOrToken::Token(token) => token.parent(),
|
||||
};
|
||||
for node in leaf.ancestors() {
|
||||
if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR {
|
||||
break;
|
||||
}
|
||||
let loop_body = match_ast! {
|
||||
match node {
|
||||
ast::ForExpr(it) => it.loop_body(),
|
||||
ast::WhileExpr(it) => it.loop_body(),
|
||||
ast::LoopExpr(it) => it.loop_body(),
|
||||
_ => None,
|
||||
}
|
||||
};
|
||||
if let Some(body) = loop_body {
|
||||
if body.syntax().text_range().contains_range(leaf.text_range()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
fn not_same_range_parent(element: SyntaxElement) -> Option<SyntaxNode> {
|
||||
let node = match element {
|
||||
NodeOrToken::Node(node) => node,
|
||||
NodeOrToken::Token(token) => token.parent(),
|
||||
};
|
||||
let range = node.text_range();
|
||||
node.ancestors().take_while(|it| it.text_range() == range).last().and_then(|it| it.parent())
|
||||
}
|
||||
|
||||
fn previous_non_triva_element(element: SyntaxElement) -> Option<SyntaxElement> {
|
||||
// trying to get first non triva sibling if we have one
|
||||
let token_sibling = non_trivia_sibling(element.clone(), Direction::Prev);
|
||||
let mut wrapped = if let Some(sibling) = token_sibling {
|
||||
sibling
|
||||
} else {
|
||||
// if not trying to find first ancestor which has such a sibling
|
||||
let node = match element {
|
||||
NodeOrToken::Node(node) => node,
|
||||
NodeOrToken::Token(token) => token.parent(),
|
||||
};
|
||||
let range = node.text_range();
|
||||
let top_node = node.ancestors().take_while(|it| it.text_range() == range).last()?;
|
||||
let prev_sibling_node = top_node.ancestors().find(|it| {
|
||||
non_trivia_sibling(NodeOrToken::Node(it.to_owned()), Direction::Prev).is_some()
|
||||
})?;
|
||||
non_trivia_sibling(NodeOrToken::Node(prev_sibling_node), Direction::Prev)?
|
||||
};
|
||||
//I think you can avoid this loop if you use SyntaxToken::prev_token -- unlike prev_sibling_or_token, it works across parents.
|
||||
// traversing the tree down to get the last token or node, i.e. the closest one
|
||||
loop {
|
||||
if let Some(token) = wrapped.as_token() {
|
||||
return Some(NodeOrToken::Token(token.clone()));
|
||||
} else {
|
||||
let new = wrapped.as_node().and_then(|n| n.last_child_or_token());
|
||||
if new.is_some() {
|
||||
wrapped = new.unwrap().clone();
|
||||
} else {
|
||||
return Some(wrapped);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user