176: Move completio to ra_analysis r=matklad a=matklad

While we should handle completion for isolated file, it's better
achieved by using empty Analysis, rather than working only with &File:
we need memoization for type inference even inside a single file.

Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2018-10-31 18:05:12 +00:00
commit 1dc5608d0b
20 changed files with 1066 additions and 1066 deletions

View File

@ -1,16 +1,32 @@
use ra_editor::{CompletionItem, find_node_at_offset};
use rustc_hash::{FxHashMap, FxHashSet};
use ra_editor::{find_node_at_offset};
use ra_syntax::{
AtomEdit, File, TextUnit, AstNode,
ast::{self, ModuleItemOwner, AstChildren},
AtomEdit, File, TextUnit, AstNode, SyntaxNodeRef,
algo::visit::{visitor, visitor_ctx, Visitor, VisitorCtx},
ast::{self, AstChildren, LoopBodyOwner, ModuleItemOwner},
SyntaxKind::*,
};
use crate::{
FileId, Cancelable,
input::FilesDatabase,
db::{self, SyntaxDatabase},
descriptors::module::{ModulesDatabase, ModuleTree, ModuleId, scope::ModuleScope},
descriptors::DescriptorDatabase,
descriptors::function::FnScopes,
descriptors::module::{ModuleTree, ModuleId, ModuleScope},
};
#[derive(Debug)]
pub struct CompletionItem {
/// What user sees in pop-up
pub label: String,
/// What string is used for filtering, defaults to label
pub lookup: Option<String>,
/// What is inserted, defaults to label
pub snippet: Option<String>,
}
pub(crate) fn resolve_based_completion(db: &db::RootDatabase, file_id: FileId, offset: TextUnit) -> Cancelable<Option<Vec<CompletionItem>>> {
let source_root_id = db.file_source_root(file_id);
let file = db.file_syntax(file_id);
@ -72,3 +88,602 @@ fn crate_path(name_ref: ast::NameRef) -> Option<Vec<ast::NameRef>> {
res.reverse();
Some(res)
}
pub(crate) fn scope_completion(
db: &db::RootDatabase,
file_id: FileId,
offset: TextUnit,
) -> Option<Vec<CompletionItem>> {
let original_file = db.file_syntax(file_id);
// Insert a fake ident to get a valid parse tree
let file = {
let edit = AtomEdit::insert(offset, "intellijRulezz".to_string());
original_file.reparse(&edit)
};
let mut has_completions = false;
let mut res = Vec::new();
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(file.syntax(), offset) {
has_completions = true;
complete_name_ref(&file, name_ref, &mut res);
// special case, `trait T { fn foo(i_am_a_name_ref) {} }`
if is_node::<ast::Param>(name_ref.syntax()) {
param_completions(name_ref.syntax(), &mut res);
}
let name_range = name_ref.syntax().range();
let top_node = name_ref
.syntax()
.ancestors()
.take_while(|it| it.range() == name_range)
.last()
.unwrap();
match top_node.parent().map(|it| it.kind()) {
Some(ROOT) | Some(ITEM_LIST) => complete_mod_item_snippets(&mut res),
_ => (),
}
}
if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), offset) {
if is_node::<ast::Param>(name.syntax()) {
has_completions = true;
param_completions(name.syntax(), &mut res);
}
}
if has_completions {
Some(res)
} else {
None
}
}
fn complete_module_items(
file: &File,
items: AstChildren<ast::ModuleItem>,
this_item: Option<ast::NameRef>,
acc: &mut Vec<CompletionItem>,
) {
let scope = ModuleScope::from_items(items);
acc.extend(
scope
.entries()
.iter()
.filter(|entry| {
let syntax = entry.ptr().resolve(file);
Some(syntax.borrowed()) != this_item.map(|it| it.syntax())
})
.map(|entry| CompletionItem {
label: entry.name().to_string(),
lookup: None,
snippet: None,
}),
);
}
fn complete_name_ref(
file: &File,
name_ref: ast::NameRef,
acc: &mut Vec<CompletionItem>,
) {
if !is_node::<ast::Path>(name_ref.syntax()) {
return;
}
let mut visited_fn = false;
for node in name_ref.syntax().ancestors() {
if let Some(items) = visitor()
.visit::<ast::Root, _>(|it| Some(it.items()))
.visit::<ast::Module, _>(|it| Some(it.item_list()?.items()))
.accept(node)
{
if let Some(items) = items {
complete_module_items(file, items, Some(name_ref), acc);
}
break;
} else if !visited_fn {
if let Some(fn_def) = ast::FnDef::cast(node) {
visited_fn = true;
complete_expr_keywords(&file, fn_def, name_ref, acc);
complete_expr_snippets(acc);
let scopes = FnScopes::new(fn_def);
complete_fn(name_ref, &scopes, acc);
}
}
}
}
fn param_completions(ctx: SyntaxNodeRef, acc: &mut Vec<CompletionItem>) {
let mut params = FxHashMap::default();
for node in ctx.ancestors() {
let _ = visitor_ctx(&mut params)
.visit::<ast::Root, _>(process)
.visit::<ast::ItemList, _>(process)
.accept(node);
}
params
.into_iter()
.filter_map(|(label, (count, param))| {
let lookup = param.pat()?.syntax().text().to_string();
if count < 2 {
None
} else {
Some((label, lookup))
}
})
.for_each(|(label, lookup)| {
acc.push(CompletionItem {
label,
lookup: Some(lookup),
snippet: None,
})
});
fn process<'a, N: ast::FnDefOwner<'a>>(
node: N,
params: &mut FxHashMap<String, (u32, ast::Param<'a>)>,
) {
node.functions()
.filter_map(|it| it.param_list())
.flat_map(|it| it.params())
.for_each(|param| {
let text = param.syntax().text().to_string();
params.entry(text).or_insert((0, param)).0 += 1;
})
}
}
fn is_node<'a, N: AstNode<'a>>(node: SyntaxNodeRef<'a>) -> bool {
match node.ancestors().filter_map(N::cast).next() {
None => false,
Some(n) => n.syntax().range() == node.range(),
}
}
fn complete_expr_keywords(
file: &File,
fn_def: ast::FnDef,
name_ref: ast::NameRef,
acc: &mut Vec<CompletionItem>,
) {
acc.push(keyword("if", "if $0 {}"));
acc.push(keyword("match", "match $0 {}"));
acc.push(keyword("while", "while $0 {}"));
acc.push(keyword("loop", "loop {$0}"));
if let Some(off) = name_ref.syntax().range().start().checked_sub(2.into()) {
if let Some(if_expr) = find_node_at_offset::<ast::IfExpr>(file.syntax(), off) {
if if_expr.syntax().range().end() < name_ref.syntax().range().start() {
acc.push(keyword("else", "else {$0}"));
acc.push(keyword("else if", "else if $0 {}"));
}
}
}
if is_in_loop_body(name_ref) {
acc.push(keyword("continue", "continue"));
acc.push(keyword("break", "break"));
}
acc.extend(complete_return(fn_def, name_ref));
}
fn is_in_loop_body(name_ref: ast::NameRef) -> bool {
for node in name_ref.syntax().ancestors() {
if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR {
break;
}
let loop_body = visitor()
.visit::<ast::ForExpr, _>(LoopBodyOwner::loop_body)
.visit::<ast::WhileExpr, _>(LoopBodyOwner::loop_body)
.visit::<ast::LoopExpr, _>(LoopBodyOwner::loop_body)
.accept(node);
if let Some(Some(body)) = loop_body {
if name_ref.syntax().range().is_subrange(&body.syntax().range()) {
return true;
}
}
}
false
}
fn complete_return(fn_def: ast::FnDef, name_ref: ast::NameRef) -> Option<CompletionItem> {
// let is_last_in_block = name_ref.syntax().ancestors().filter_map(ast::Expr::cast)
// .next()
// .and_then(|it| it.syntax().parent())
// .and_then(ast::Block::cast)
// .is_some();
// if is_last_in_block {
// return None;
// }
let is_stmt = match name_ref
.syntax()
.ancestors()
.filter_map(ast::ExprStmt::cast)
.next()
{
None => false,
Some(expr_stmt) => expr_stmt.syntax().range() == name_ref.syntax().range(),
};
let snip = match (is_stmt, fn_def.ret_type().is_some()) {
(true, true) => "return $0;",
(true, false) => "return;",
(false, true) => "return $0",
(false, false) => "return",
};
Some(keyword("return", snip))
}
fn keyword(kw: &str, snip: &str) -> CompletionItem {
CompletionItem {
label: kw.to_string(),
lookup: None,
snippet: Some(snip.to_string()),
}
}
fn complete_expr_snippets(acc: &mut Vec<CompletionItem>) {
acc.push(CompletionItem {
label: "pd".to_string(),
lookup: None,
snippet: Some("eprintln!(\"$0 = {:?}\", $0);".to_string()),
});
acc.push(CompletionItem {
label: "ppd".to_string(),
lookup: None,
snippet: Some("eprintln!(\"$0 = {:#?}\", $0);".to_string()),
});
}
fn complete_mod_item_snippets(acc: &mut Vec<CompletionItem>) {
acc.push(CompletionItem {
label: "tfn".to_string(),
lookup: None,
snippet: Some("#[test]\nfn $1() {\n $0\n}".to_string()),
});
acc.push(CompletionItem {
label: "pub(crate)".to_string(),
lookup: None,
snippet: Some("pub(crate) $0".to_string()),
})
}
fn complete_fn(name_ref: ast::NameRef, scopes: &FnScopes, acc: &mut Vec<CompletionItem>) {
let mut shadowed = FxHashSet::default();
acc.extend(
scopes
.scope_chain(name_ref.syntax())
.flat_map(|scope| scopes.entries(scope).iter())
.filter(|entry| shadowed.insert(entry.name()))
.map(|entry| CompletionItem {
label: entry.name().to_string(),
lookup: None,
snippet: None,
}),
);
if scopes.self_param.is_some() {
acc.push(CompletionItem {
label: "self".to_string(),
lookup: None,
snippet: None,
})
}
}
#[cfg(test)]
mod tests {
use test_utils::{assert_eq_dbg, extract_offset};
use crate::FileId;
use crate::mock_analysis::MockAnalysis;
use super::*;
fn check_scope_completion(code: &str, expected_completions: &str) {
let (off, code) = extract_offset(&code);
let analysis = MockAnalysis::with_files(&[("/main.rs", &code)]).analysis();
let file_id = FileId(1);
let completions = scope_completion(&analysis.imp.db, file_id, off)
.unwrap()
.into_iter()
.filter(|c| c.snippet.is_none())
.collect::<Vec<_>>();
assert_eq_dbg(expected_completions, &completions);
}
fn check_snippet_completion(code: &str, expected_completions: &str) {
let (off, code) = extract_offset(&code);
let analysis = MockAnalysis::with_files(&[("/main.rs", &code)]).analysis();
let file_id = FileId(1);
let completions = scope_completion(&analysis.imp.db, file_id, off)
.unwrap()
.into_iter()
.filter(|c| c.snippet.is_some())
.collect::<Vec<_>>();
assert_eq_dbg(expected_completions, &completions);
}
#[test]
fn test_completion_let_scope() {
check_scope_completion(
r"
fn quux(x: i32) {
let y = 92;
1 + <|>;
let z = ();
}
",
r#"[CompletionItem { label: "y", lookup: None, snippet: None },
CompletionItem { label: "x", lookup: None, snippet: None },
CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
);
}
#[test]
fn test_completion_if_let_scope() {
check_scope_completion(
r"
fn quux() {
if let Some(x) = foo() {
let y = 92;
};
if let Some(a) = bar() {
let b = 62;
1 + <|>
}
}
",
r#"[CompletionItem { label: "b", lookup: None, snippet: None },
CompletionItem { label: "a", lookup: None, snippet: None },
CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
);
}
#[test]
fn test_completion_for_scope() {
check_scope_completion(
r"
fn quux() {
for x in &[1, 2, 3] {
<|>
}
}
",
r#"[CompletionItem { label: "x", lookup: None, snippet: None },
CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
);
}
#[test]
fn test_completion_mod_scope() {
check_scope_completion(
r"
struct Foo;
enum Baz {}
fn quux() {
<|>
}
",
r#"[CompletionItem { label: "Foo", lookup: None, snippet: None },
CompletionItem { label: "Baz", lookup: None, snippet: None },
CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
);
}
#[test]
fn test_completion_mod_scope_no_self_use() {
check_scope_completion(
r"
use foo<|>;
",
r#"[]"#,
);
}
#[test]
fn test_completion_mod_scope_nested() {
check_scope_completion(
r"
struct Foo;
mod m {
struct Bar;
fn quux() { <|> }
}
",
r#"[CompletionItem { label: "Bar", lookup: None, snippet: None },
CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
);
}
#[test]
fn test_complete_type() {
check_scope_completion(
r"
struct Foo;
fn x() -> <|>
",
r#"[CompletionItem { label: "Foo", lookup: None, snippet: None },
CompletionItem { label: "x", lookup: None, snippet: None }]"#,
)
}
#[test]
fn test_complete_shadowing() {
check_scope_completion(
r"
fn foo() -> {
let bar = 92;
{
let bar = 62;
<|>
}
}
",
r#"[CompletionItem { label: "bar", lookup: None, snippet: None },
CompletionItem { label: "foo", lookup: None, snippet: None }]"#,
)
}
#[test]
fn test_complete_self() {
check_scope_completion(
r"
impl S { fn foo(&self) { <|> } }
",
r#"[CompletionItem { label: "self", lookup: None, snippet: None }]"#,
)
}
#[test]
fn test_completion_kewords() {
check_snippet_completion(r"
fn quux() {
<|>
}
", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
CompletionItem { label: "return", lookup: None, snippet: Some("return") },
CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
}
#[test]
fn test_completion_else() {
check_snippet_completion(r"
fn quux() {
if true {
()
} <|>
}
", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
CompletionItem { label: "else", lookup: None, snippet: Some("else {$0}") },
CompletionItem { label: "else if", lookup: None, snippet: Some("else if $0 {}") },
CompletionItem { label: "return", lookup: None, snippet: Some("return") },
CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
}
#[test]
fn test_completion_return_value() {
check_snippet_completion(r"
fn quux() -> i32 {
<|>
92
}
", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
CompletionItem { label: "return", lookup: None, snippet: Some("return $0;") },
CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
check_snippet_completion(r"
fn quux() {
<|>
92
}
", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
CompletionItem { label: "return", lookup: None, snippet: Some("return;") },
CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
}
#[test]
fn test_completion_return_no_stmt() {
check_snippet_completion(r"
fn quux() -> i32 {
match () {
() => <|>
}
}
", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
CompletionItem { label: "return", lookup: None, snippet: Some("return $0") },
CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
}
#[test]
fn test_continue_break_completion() {
check_snippet_completion(r"
fn quux() -> i32 {
loop { <|> }
}
", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
CompletionItem { label: "continue", lookup: None, snippet: Some("continue") },
CompletionItem { label: "break", lookup: None, snippet: Some("break") },
CompletionItem { label: "return", lookup: None, snippet: Some("return $0") },
CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
check_snippet_completion(r"
fn quux() -> i32 {
loop { || { <|> } }
}
", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
CompletionItem { label: "return", lookup: None, snippet: Some("return $0") },
CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
}
#[test]
fn test_param_completion_last_param() {
check_scope_completion(r"
fn foo(file_id: FileId) {}
fn bar(file_id: FileId) {}
fn baz(file<|>) {}
", r#"[CompletionItem { label: "file_id: FileId", lookup: Some("file_id"), snippet: None }]"#);
}
#[test]
fn test_param_completion_nth_param() {
check_scope_completion(r"
fn foo(file_id: FileId) {}
fn bar(file_id: FileId) {}
fn baz(file<|>, x: i32) {}
", r#"[CompletionItem { label: "file_id: FileId", lookup: Some("file_id"), snippet: None }]"#);
}
#[test]
fn test_param_completion_trait_param() {
check_scope_completion(r"
pub(crate) trait SourceRoot {
pub fn contains(&self, file_id: FileId) -> bool;
pub fn module_map(&self) -> &ModuleMap;
pub fn lines(&self, file_id: FileId) -> &LineIndex;
pub fn syntax(&self, file<|>)
}
", r#"[CompletionItem { label: "self", lookup: None, snippet: None },
CompletionItem { label: "SourceRoot", lookup: None, snippet: None },
CompletionItem { label: "file_id: FileId", lookup: Some("file_id"), snippet: None }]"#);
}
#[test]
fn test_item_snippets() {
// check_snippet_completion(r"
// <|>
// ",
// r##"[CompletionItem { label: "tfn", lookup: None, snippet: Some("#[test]\nfn $1() {\n $0\n}") }]"##,
// );
check_snippet_completion(r"
#[cfg(test)]
mod tests {
<|>
}
",
r##"[CompletionItem { label: "tfn", lookup: None, snippet: Some("#[test]\nfn $1() {\n $0\n}") },
CompletionItem { label: "pub(crate)", lookup: None, snippet: Some("pub(crate) $0") }]"##,
);
}
}

View File

@ -9,7 +9,10 @@ use salsa;
use crate::{
db,
Cancelable, Canceled,
descriptors::module::{SubmodulesQuery, ModuleTreeQuery, ModulesDatabase, ModuleScopeQuery},
descriptors::{
DescriptorDatabase, SubmodulesQuery, ModuleTreeQuery, ModuleScopeQuery,
FnSyntaxQuery, FnScopesQuery
},
symbol_index::SymbolIndex,
syntax_ptr::{SyntaxPtrDatabase, ResolveSyntaxPtrQuery},
FileId,
@ -63,10 +66,12 @@ salsa::database_storage! {
fn file_lines() for FileLinesQuery;
fn file_symbols() for FileSymbolsQuery;
}
impl ModulesDatabase {
impl DescriptorDatabase {
fn module_tree() for ModuleTreeQuery;
fn module_descriptor() for SubmodulesQuery;
fn module_scope() for ModuleScopeQuery;
fn fn_syntax() for FnSyntaxQuery;
fn fn_scopes() for FnScopesQuery;
}
impl SyntaxPtrDatabase {
fn resolve_syntax_ptr() for ResolveSyntaxPtrQuery;

View File

@ -0,0 +1,26 @@
use std::sync::Arc;
use ra_syntax::{
ast::{AstNode, FnDef, FnDefNode},
};
use crate::{
descriptors::{
DescriptorDatabase,
function::{FnId, FnScopes},
},
};
/// Resolve `FnId` to the corresponding `SyntaxNode`
/// TODO: this should return something more type-safe then `SyntaxNode`
pub(crate) fn fn_syntax(db: &impl DescriptorDatabase, fn_id: FnId) -> FnDefNode {
let syntax = db.resolve_syntax_ptr(fn_id.0);
let fn_def = FnDef::cast(syntax.borrowed()).unwrap();
FnDefNode::new(fn_def)
}
pub(crate) fn fn_scopes(db: &impl DescriptorDatabase, fn_id: FnId) -> Arc<FnScopes> {
let syntax = db.fn_syntax(fn_id);
let res = FnScopes::new(syntax.ast());
Arc::new(res)
}

View File

@ -0,0 +1,83 @@
pub(super) mod imp;
mod scope;
use ra_syntax::{
ast::{self, AstNode, NameOwner}
};
use crate::{
FileId,
syntax_ptr::SyntaxPtr
};
pub(crate) use self::scope::{FnScopes, resolve_local_name};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub(crate) struct FnId(SyntaxPtr);
impl FnId {
pub(crate) fn new(file_id: FileId, fn_def: ast::FnDef) -> FnId {
let ptr = SyntaxPtr::new(file_id, fn_def.syntax());
FnId(ptr)
}
}
#[derive(Debug, Clone)]
pub struct FnDescriptor {
pub name: String,
pub label: String,
pub ret_type: Option<String>,
pub params: Vec<String>,
}
impl FnDescriptor {
pub fn new(node: ast::FnDef) -> Option<Self> {
let name = node.name()?.text().to_string();
// Strip the body out for the label.
let label: String = if let Some(body) = node.body() {
let body_range = body.syntax().range();
let label: String = node
.syntax()
.children()
.filter(|child| !child.range().is_subrange(&body_range))
.map(|node| node.text().to_string())
.collect();
label
} else {
node.syntax().text().to_string()
};
let params = FnDescriptor::param_list(node);
let ret_type = node.ret_type().map(|r| r.syntax().text().to_string());
Some(FnDescriptor {
name,
ret_type,
params,
label,
})
}
fn param_list(node: ast::FnDef) -> Vec<String> {
let mut res = vec![];
if let Some(param_list) = node.param_list() {
if let Some(self_param) = param_list.self_param() {
res.push(self_param.syntax().text().to_string())
}
// Maybe use param.pat here? See if we can just extract the name?
//res.extend(param_list.params().map(|p| p.syntax().text().to_string()));
res.extend(
param_list
.params()
.filter_map(|p| p.pat())
.map(|pat| pat.syntax().text().to_string()),
);
}
res
}
}

View File

@ -1,29 +1,42 @@
use std::fmt;
use rustc_hash::FxHashMap;
use rustc_hash::{FxHashMap, FxHashSet};
use ra_syntax::{
algo::generate,
ast::{self, ArgListOwner, LoopBodyOwner, NameOwner},
AstNode, SmolStr, SyntaxNode, SyntaxNodeRef,
AstNode, SmolStr, SyntaxNodeRef,
};
type ScopeId = usize;
use crate::syntax_ptr::LocalSyntaxPtr;
#[derive(Debug)]
#[derive(Clone, Copy, PartialEq, Eq, Debug)]
pub(crate) struct ScopeId(u32);
#[derive(Debug, PartialEq, Eq)]
pub struct FnScopes {
pub self_param: Option<SyntaxNode>,
pub(crate) self_param: Option<LocalSyntaxPtr>,
scopes: Vec<ScopeData>,
scope_for: FxHashMap<SyntaxNode, ScopeId>,
scope_for: FxHashMap<LocalSyntaxPtr, ScopeId>,
}
#[derive(Debug, PartialEq, Eq)]
pub struct ScopeEntry {
name: SmolStr,
ptr: LocalSyntaxPtr,
}
#[derive(Debug, PartialEq, Eq)]
struct ScopeData {
parent: Option<ScopeId>,
entries: Vec<ScopeEntry>,
}
impl FnScopes {
pub fn new(fn_def: ast::FnDef) -> FnScopes {
pub(crate) fn new(fn_def: ast::FnDef) -> FnScopes {
let mut scopes = FnScopes {
self_param: fn_def
.param_list()
.and_then(|it| it.self_param())
.map(|it| it.syntax().owned()),
.map(|it| LocalSyntaxPtr::new(it.syntax())),
scopes: Vec::new(),
scope_for: FxHashMap::default(),
};
@ -34,16 +47,16 @@ impl FnScopes {
}
scopes
}
pub fn entries(&self, scope: ScopeId) -> &[ScopeEntry] {
&self.scopes[scope].entries
pub(crate) fn entries(&self, scope: ScopeId) -> &[ScopeEntry] {
&self.get(scope).entries
}
pub fn scope_chain<'a>(&'a self, node: SyntaxNodeRef) -> impl Iterator<Item = ScopeId> + 'a {
generate(self.scope_for(node), move |&scope| {
self.scopes[scope].parent
self.get(scope).parent
})
}
fn root_scope(&mut self) -> ScopeId {
let res = self.scopes.len();
let res = ScopeId(self.scopes.len() as u32);
self.scopes.push(ScopeData {
parent: None,
entries: vec![],
@ -51,7 +64,7 @@ impl FnScopes {
res
}
fn new_scope(&mut self, parent: ScopeId) -> ScopeId {
let res = self.scopes.len();
let res = ScopeId(self.scopes.len() as u32);
self.scopes.push(ScopeData {
parent: Some(parent),
entries: vec![],
@ -64,7 +77,7 @@ impl FnScopes {
.descendants()
.filter_map(ast::BindPat::cast)
.filter_map(ScopeEntry::new);
self.scopes[scope].entries.extend(entries);
self.get_mut(scope).entries.extend(entries);
}
fn add_params_bindings(&mut self, scope: ScopeId, params: Option<ast::ParamList>) {
params
@ -74,43 +87,36 @@ impl FnScopes {
.for_each(|it| self.add_bindings(scope, it));
}
fn set_scope(&mut self, node: SyntaxNodeRef, scope: ScopeId) {
self.scope_for.insert(node.owned(), scope);
self.scope_for.insert(LocalSyntaxPtr::new(node), scope);
}
fn scope_for(&self, node: SyntaxNodeRef) -> Option<ScopeId> {
node.ancestors()
.filter_map(|it| self.scope_for.get(&it.owned()).map(|&scope| scope))
.map(LocalSyntaxPtr::new)
.filter_map(|it| self.scope_for.get(&it).map(|&scope| scope))
.next()
}
}
pub struct ScopeEntry {
syntax: SyntaxNode,
fn get(&self, scope: ScopeId) -> &ScopeData {
&self.scopes[scope.0 as usize]
}
fn get_mut(&mut self, scope: ScopeId) -> &mut ScopeData {
&mut self.scopes[scope.0 as usize]
}
}
impl ScopeEntry {
fn new(pat: ast::BindPat) -> Option<ScopeEntry> {
if pat.name().is_some() {
Some(ScopeEntry {
syntax: pat.syntax().owned(),
})
} else {
None
}
let name = pat.name()?;
let res = ScopeEntry {
name: name.text(),
ptr: LocalSyntaxPtr::new(pat.syntax()),
};
Some(res)
}
pub fn name(&self) -> SmolStr {
self.ast().name().unwrap().text()
pub(crate) fn name(&self) -> &SmolStr {
&self.name
}
pub fn ast(&self) -> ast::BindPat {
ast::BindPat::cast(self.syntax.borrowed()).unwrap()
}
}
impl fmt::Debug for ScopeEntry {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("ScopeEntry")
.field("name", &self.name())
.field("syntax", &self.syntax)
.finish()
pub(crate) fn ptr(&self) -> LocalSyntaxPtr {
self.ptr
}
}
@ -251,33 +257,28 @@ fn compute_expr_scopes(expr: ast::Expr, scopes: &mut FnScopes, scope: ScopeId) {
}
}
#[derive(Debug)]
struct ScopeData {
parent: Option<ScopeId>,
entries: Vec<ScopeEntry>,
}
pub fn resolve_local_name<'a>(
name_ref: ast::NameRef,
scopes: &'a FnScopes,
) -> Option<&'a ScopeEntry> {
use rustc_hash::FxHashSet;
let mut shadowed = FxHashSet::default();
let ret = scopes
.scope_chain(name_ref.syntax())
.flat_map(|scope| scopes.entries(scope).iter())
.filter(|entry| shadowed.insert(entry.name()))
.filter(|entry| entry.name() == name_ref.text())
.filter(|entry| entry.name() == &name_ref.text())
.nth(0);
ret
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{find_node_at_offset, test_utils::extract_offset};
use ra_syntax::File;
use test_utils::extract_offset;
use ra_editor::{find_node_at_offset};
use super::*;
fn do_check(code: &str, expected: &[&str]) {
let (off, code) = extract_offset(code);
@ -384,14 +385,11 @@ mod tests {
let scopes = FnScopes::new(fn_def);
let local_name = resolve_local_name(name_ref, &scopes)
.unwrap()
.ast()
.name()
.unwrap();
let local_name_entry = resolve_local_name(name_ref, &scopes).unwrap();
let local_name = local_name_entry.ptr().resolve(&file);
let expected_name =
find_node_at_offset::<ast::Name>(file.syntax(), expected_offset.into()).unwrap();
assert_eq!(local_name.syntax().range(), expected_name.syntax().range());
assert_eq!(local_name.range(), expected_name.syntax().range());
}
#[test]

View File

@ -1,62 +1,46 @@
pub(crate) mod module;
pub(crate) mod function;
use std::sync::Arc;
use ra_syntax::{
ast::{self, AstNode, NameOwner},
SmolStr,
ast::{FnDefNode},
};
#[derive(Debug, Clone)]
pub struct FnDescriptor {
pub name: String,
pub label: String,
pub ret_type: Option<String>,
pub params: Vec<String>,
}
use crate::{
FileId, Cancelable,
db::SyntaxDatabase,
descriptors::module::{ModuleTree, ModuleId, ModuleScope},
descriptors::function::{FnId, FnScopes},
input::SourceRootId,
syntax_ptr::SyntaxPtrDatabase,
};
impl FnDescriptor {
pub fn new(node: ast::FnDef) -> Option<Self> {
let name = node.name()?.text().to_string();
// Strip the body out for the label.
let label: String = if let Some(body) = node.body() {
let body_range = body.syntax().range();
let label: String = node
.syntax()
.children()
.filter(|child| !child.range().is_subrange(&body_range))
.map(|node| node.text().to_string())
.collect();
label
} else {
node.syntax().text().to_string()
};
let params = FnDescriptor::param_list(node);
let ret_type = node.ret_type().map(|r| r.syntax().text().to_string());
Some(FnDescriptor {
name,
ret_type,
params,
label,
})
}
fn param_list(node: ast::FnDef) -> Vec<String> {
let mut res = vec![];
if let Some(param_list) = node.param_list() {
if let Some(self_param) = param_list.self_param() {
res.push(self_param.syntax().text().to_string())
}
// Maybe use param.pat here? See if we can just extract the name?
//res.extend(param_list.params().map(|p| p.syntax().text().to_string()));
res.extend(
param_list
.params()
.filter_map(|p| p.pat())
.map(|pat| pat.syntax().text().to_string()),
);
salsa::query_group! {
pub(crate) trait DescriptorDatabase: SyntaxDatabase + SyntaxPtrDatabase {
fn module_tree(source_root_id: SourceRootId) -> Cancelable<Arc<ModuleTree>> {
type ModuleTreeQuery;
use fn module::imp::module_tree;
}
fn submodules(file_id: FileId) -> Cancelable<Arc<Vec<SmolStr>>> {
type SubmodulesQuery;
use fn module::imp::submodules;
}
fn module_scope(source_root_id: SourceRootId, module_id: ModuleId) -> Cancelable<Arc<ModuleScope>> {
type ModuleScopeQuery;
use fn module::imp::module_scope;
}
fn fn_syntax(fn_id: FnId) -> FnDefNode {
type FnSyntaxQuery;
// Don't retain syntax trees in memory
storage volatile;
use fn function::imp::fn_syntax;
}
fn fn_scopes(fn_id: FnId) -> Arc<FnScopes> {
type FnScopesQuery;
use fn function::imp::fn_scopes;
}
res
}
}

View File

@ -10,14 +10,15 @@ use ra_syntax::{
use crate::{
FileId, Cancelable, FileResolverImp, db,
input::{SourceRoot, SourceRootId},
descriptors::DescriptorDatabase,
};
use super::{
ModuleData, ModuleTree, ModuleId, LinkId, LinkData, Problem, ModulesDatabase, ModuleScope
ModuleData, ModuleTree, ModuleId, LinkId, LinkData, Problem, ModuleScope
};
pub(super) fn submodules(db: &impl ModulesDatabase, file_id: FileId) -> Cancelable<Arc<Vec<SmolStr>>> {
pub(crate) fn submodules(db: &impl DescriptorDatabase, file_id: FileId) -> Cancelable<Arc<Vec<SmolStr>>> {
db::check_canceled(db)?;
let file = db.file_syntax(file_id);
let root = file.ast();
@ -25,7 +26,7 @@ pub(super) fn submodules(db: &impl ModulesDatabase, file_id: FileId) -> Cancelab
Ok(Arc::new(submodules))
}
pub(super) fn modules(root: ast::Root<'_>) -> impl Iterator<Item = (SmolStr, ast::Module<'_>)> {
pub(crate) fn modules(root: ast::Root<'_>) -> impl Iterator<Item = (SmolStr, ast::Module<'_>)> {
root.modules().filter_map(|module| {
let name = module.name()?.text();
if !module.has_semi() {
@ -35,8 +36,8 @@ pub(super) fn modules(root: ast::Root<'_>) -> impl Iterator<Item = (SmolStr, ast
})
}
pub(super) fn module_scope(
db: &impl ModulesDatabase,
pub(crate) fn module_scope(
db: &impl DescriptorDatabase,
source_root_id: SourceRootId,
module_id: ModuleId,
) -> Cancelable<Arc<ModuleScope>> {
@ -47,8 +48,8 @@ pub(super) fn module_scope(
Ok(Arc::new(res))
}
pub(super) fn module_tree(
db: &impl ModulesDatabase,
pub(crate) fn module_tree(
db: &impl DescriptorDatabase,
source_root: SourceRootId,
) -> Cancelable<Arc<ModuleTree>> {
db::check_canceled(db)?;
@ -64,7 +65,7 @@ pub struct Submodule {
fn create_module_tree<'a>(
db: &impl ModulesDatabase,
db: &impl DescriptorDatabase,
source_root: SourceRootId,
) -> Cancelable<ModuleTree> {
let mut tree = ModuleTree {
@ -88,7 +89,7 @@ fn create_module_tree<'a>(
}
fn build_subtree(
db: &impl ModulesDatabase,
db: &impl DescriptorDatabase,
source_root: &SourceRoot,
tree: &mut ModuleTree,
visited: &mut FxHashSet<FileId>,

View File

@ -1,37 +1,13 @@
mod imp;
pub(super) mod imp;
pub(crate) mod scope;
use std::sync::Arc;
use relative_path::RelativePathBuf;
use ra_syntax::{ast::{self, NameOwner, AstNode}, SmolStr, SyntaxNode};
use crate::{
FileId, Cancelable,
db::SyntaxDatabase,
input::SourceRootId,
};
use crate::FileId;
pub(crate) use self::scope::ModuleScope;
salsa::query_group! {
pub(crate) trait ModulesDatabase: SyntaxDatabase {
fn module_tree(source_root_id: SourceRootId) -> Cancelable<Arc<ModuleTree>> {
type ModuleTreeQuery;
use fn imp::module_tree;
}
fn submodules(file_id: FileId) -> Cancelable<Arc<Vec<SmolStr>>> {
type SubmodulesQuery;
use fn imp::submodules;
}
fn module_scope(source_root_id: SourceRootId, module_id: ModuleId) -> Cancelable<Arc<ModuleScope>> {
type ModuleScopeQuery;
use fn imp::module_scope;
}
}
}
#[derive(Debug, PartialEq, Eq, Hash)]
pub(crate) struct ModuleTree {
mods: Vec<ModuleData>,

View File

@ -2,8 +2,8 @@
use ra_syntax::{
ast::{self, AstChildren, ModuleItemOwner},
File, AstNode, SmolStr, SyntaxNode, SyntaxNodeRef,
ast::{self, ModuleItemOwner},
File, AstNode, SmolStr,
};
use crate::syntax_ptr::LocalSyntaxPtr;
@ -30,8 +30,12 @@ enum EntryKind {
impl ModuleScope {
pub fn new(file: &File) -> ModuleScope {
ModuleScope::from_items(file.ast().items())
}
pub fn from_items<'a>(items: impl Iterator<Item = ast::ModuleItem<'a>>) -> ModuleScope {
let mut entries = Vec::new();
for item in file.ast().items() {
for item in items {
let entry = match item {
ast::ModuleItem::StructDef(item) => Entry::new(item),
ast::ModuleItem::EnumDef(item) => Entry::new(item),
@ -99,7 +103,7 @@ fn collect_imports(tree: ast::UseTree, acc: &mut Vec<Entry>) {
#[cfg(test)]
mod tests {
use super::*;
use ra_syntax::{ast::ModuleItemOwner, File};
use ra_syntax::{File};
fn do_check(code: &str, expected: &[&str]) {
let file = File::parse(&code);

View File

@ -3,7 +3,7 @@ use std::{
sync::Arc,
};
use ra_editor::{self, find_node_at_offset, resolve_local_name, FileSymbol, LineIndex, LocalEdit, CompletionItem};
use ra_editor::{self, find_node_at_offset, FileSymbol, LineIndex, LocalEdit};
use ra_syntax::{
ast::{self, ArgListOwner, Expr, NameOwner},
AstNode, File, SmolStr,
@ -21,9 +21,14 @@ use crate::{
self, SyntaxDatabase, FileSyntaxQuery,
},
input::{SourceRootId, FilesDatabase, SourceRoot, WORKSPACE},
descriptors::module::{ModulesDatabase, ModuleTree, Problem},
descriptors::{FnDescriptor},
descriptors::{
DescriptorDatabase,
module::{ModuleTree, Problem},
function::{FnDescriptor, FnId},
},
completion::{scope_completion, resolve_based_completion, CompletionItem},
symbol_index::SymbolIndex,
syntax_ptr::SyntaxPtrDatabase,
CrateGraph, CrateId, Diagnostic, FileId, FileResolver, FileSystemEdit, Position,
Query, SourceChange, SourceFileEdit, Cancelable,
};
@ -175,7 +180,7 @@ impl AnalysisHostImpl {
#[derive(Debug)]
pub(crate) struct AnalysisImpl {
db: db::RootDatabase,
pub(crate) db: db::RootDatabase,
}
impl AnalysisImpl {
@ -245,12 +250,11 @@ impl AnalysisImpl {
pub fn completions(&self, file_id: FileId, offset: TextUnit) -> Cancelable<Option<Vec<CompletionItem>>> {
let mut res = Vec::new();
let mut has_completions = false;
let file = self.file_syntax(file_id);
if let Some(scope_based) = ra_editor::scope_completion(&file, offset) {
if let Some(scope_based) = scope_completion(&self.db, file_id, offset) {
res.extend(scope_based);
has_completions = true;
}
if let Some(scope_based) = crate::completion::resolve_based_completion(&self.db, file_id, offset)? {
if let Some(scope_based) = resolve_based_completion(&self.db, file_id, offset)? {
res.extend(scope_based);
has_completions = true;
}
@ -271,7 +275,7 @@ impl AnalysisImpl {
let syntax = file.syntax();
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, offset) {
// First try to resolve the symbol locally
return if let Some((name, range)) = resolve_local_name(name_ref) {
return if let Some((name, range)) = resolve_local_name(&self.db, file_id, name_ref) {
let mut vec = vec![];
vec.push((
file_id,
@ -325,7 +329,7 @@ impl AnalysisImpl {
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(syntax, offset) {
// We are only handing local references for now
if let Some(resolved) = resolve_local_name(name_ref) {
if let Some(resolved) = resolve_local_name(&self.db, file_id, name_ref) {
ret.push((file_id, resolved.1));
@ -333,7 +337,7 @@ impl AnalysisImpl {
let refs : Vec<_> = fn_def.syntax().descendants()
.filter_map(ast::NameRef::cast)
.filter(|&n: &ast::NameRef| resolve_local_name(n) == Some(resolved.clone()))
.filter(|&n: &ast::NameRef| resolve_local_name(&self.db, file_id, n) == Some(resolved.clone()))
.collect();
for r in refs {
@ -597,3 +601,16 @@ impl<'a> FnCallNode<'a> {
}
}
}
fn resolve_local_name(
db: &db::RootDatabase,
file_id: FileId,
name_ref: ast::NameRef,
) -> Option<(SmolStr, TextRange)> {
let fn_def = name_ref.syntax().ancestors().find_map(ast::FnDef::cast)?;
let fn_id = FnId::new(file_id, fn_def);
let scopes = db.fn_scopes(fn_id);
let scope_entry = crate::descriptors::function::resolve_local_name(name_ref, &scopes)?;
let syntax = db.resolve_syntax_ptr(scope_entry.ptr().into_global(file_id));
Some((scope_entry.name().clone(), syntax.range()))
}

View File

@ -13,6 +13,7 @@ mod imp;
mod symbol_index;
mod completion;
mod syntax_ptr;
mod mock_analysis;
use std::{
fmt,
@ -29,11 +30,13 @@ use crate::{
};
pub use crate::{
descriptors::FnDescriptor,
input::{FileId, FileResolver, CrateGraph, CrateId}
descriptors::function::FnDescriptor,
completion::CompletionItem,
input::{FileId, FileResolver, CrateGraph, CrateId},
mock_analysis::MockAnalysis,
};
pub use ra_editor::{
CompletionItem, FileSymbol, Fold, FoldKind, HighlightedRange, LineIndex, Runnable,
FileSymbol, Fold, FoldKind, HighlightedRange, LineIndex, Runnable,
RunnableKind, StructureNode,
};
@ -197,7 +200,7 @@ impl Query {
#[derive(Debug)]
pub struct Analysis {
imp: AnalysisImpl,
pub(crate) imp: AnalysisImpl,
}
impl Analysis {

View File

@ -0,0 +1,71 @@
use std::sync::Arc;
use relative_path::{RelativePath, RelativePathBuf};
use crate::{
AnalysisChange, Analysis, AnalysisHost, FileId, FileResolver,
};
/// Mock analysis is used in test to bootstrap an AnalysisHost/Analysis
/// from a set of in-memory files.
#[derive(Debug, Default)]
pub struct MockAnalysis {
files: Vec<(String, String)>,
}
impl MockAnalysis {
pub fn new() -> MockAnalysis {
MockAnalysis::default()
}
pub fn with_files(files: &[(&str, &str)]) -> MockAnalysis {
let files = files.iter()
.map(|it| (it.0.to_string(), it.1.to_string()))
.collect();
MockAnalysis { files }
}
pub fn analysis_host(self) -> AnalysisHost {
let mut host = AnalysisHost::new();
let mut file_map = Vec::new();
let mut change = AnalysisChange::new();
for (id, (path, contents)) in self.files.into_iter().enumerate() {
let file_id = FileId((id + 1) as u32);
assert!(path.starts_with('/'));
let path = RelativePathBuf::from_path(&path[1..]).unwrap();
change.add_file(file_id, contents);
file_map.push((file_id, path));
}
change.set_file_resolver(Arc::new(FileMap(file_map)));
host.apply_change(change);
host
}
pub fn analysis(self) -> Analysis {
self.analysis_host().analysis()
}
}
#[derive(Debug)]
struct FileMap(Vec<(FileId, RelativePathBuf)>);
impl FileMap {
fn iter<'a>(&'a self) -> impl Iterator<Item = (FileId, &'a RelativePath)> + 'a {
self.0
.iter()
.map(|(id, path)| (*id, path.as_relative_path()))
}
fn path(&self, id: FileId) -> &RelativePath {
self.iter().find(|&(it, _)| it == id).unwrap().1
}
}
impl FileResolver for FileMap {
fn file_stem(&self, id: FileId) -> String {
self.path(id).file_stem().unwrap().to_string()
}
fn resolve(&self, id: FileId, rel: &RelativePath) -> Option<FileId> {
let path = self.path(id).join(rel).normalize();
let id = self.iter().find(|&(_, p)| path == p)?.0;
Some(id)
}
}

View File

@ -12,6 +12,7 @@ salsa::query_group! {
pub(crate) trait SyntaxPtrDatabase: SyntaxDatabase {
fn resolve_syntax_ptr(ptr: SyntaxPtr) -> SyntaxNode {
type ResolveSyntaxPtrQuery;
// Don't retain syntax trees in memory
storage volatile;
}
}
@ -83,6 +84,10 @@ impl LocalSyntaxPtr {
.unwrap_or_else(|| panic!("can't resolve local ptr to SyntaxNode: {:?}", self))
}
}
pub(crate) fn into_global(self, file_id: FileId) -> SyntaxPtr {
SyntaxPtr { file_id, local: self}
}
}

View File

@ -5,62 +5,16 @@ extern crate relative_path;
extern crate rustc_hash;
extern crate test_utils;
use std::{
sync::Arc,
};
use ra_syntax::TextRange;
use relative_path::{RelativePath, RelativePathBuf};
use test_utils::{assert_eq_dbg, extract_offset};
use ra_analysis::{
AnalysisChange, Analysis, AnalysisHost, CrateGraph, CrateId, FileId, FileResolver, FnDescriptor,
MockAnalysis,
AnalysisChange, Analysis, CrateGraph, CrateId, FileId, FnDescriptor,
};
#[derive(Debug)]
struct FileMap(Vec<(FileId, RelativePathBuf)>);
impl FileMap {
fn iter<'a>(&'a self) -> impl Iterator<Item = (FileId, &'a RelativePath)> + 'a {
self.0
.iter()
.map(|(id, path)| (*id, path.as_relative_path()))
}
fn path(&self, id: FileId) -> &RelativePath {
self.iter().find(|&(it, _)| it == id).unwrap().1
}
}
impl FileResolver for FileMap {
fn file_stem(&self, id: FileId) -> String {
self.path(id).file_stem().unwrap().to_string()
}
fn resolve(&self, id: FileId, rel: &RelativePath) -> Option<FileId> {
let path = self.path(id).join(rel).normalize();
let id = self.iter().find(|&(_, p)| path == p)?.0;
Some(id)
}
}
fn analysis_host(files: &[(&str, &str)]) -> AnalysisHost {
let mut host = AnalysisHost::new();
let mut file_map = Vec::new();
let mut change = AnalysisChange::new();
for (id, &(path, contents)) in files.iter().enumerate() {
let file_id = FileId((id + 1) as u32);
assert!(path.starts_with('/'));
let path = RelativePathBuf::from_path(&path[1..]).unwrap();
change.add_file(file_id, contents.to_string());
file_map.push((file_id, path));
}
change.set_file_resolver(Arc::new(FileMap(file_map)));
host.apply_change(change);
host
}
fn analysis(files: &[(&str, &str)]) -> Analysis {
analysis_host(files).analysis()
MockAnalysis::with_files(files).analysis()
}
fn get_signature(text: &str) -> (FnDescriptor, Option<usize>) {
@ -125,7 +79,9 @@ fn test_resolve_parent_module() {
#[test]
fn test_resolve_crate_root() {
let mut host = analysis_host(&[("/lib.rs", "mod foo;"), ("/foo.rs", "")]);
let mut host = MockAnalysis::with_files(
&[("/lib.rs", "mod foo;"), ("/foo.rs", "")]
).analysis_host();
let snap = host.analysis();
assert!(snap.crate_for(FileId(2)).unwrap().is_empty());

View File

@ -1,602 +0,0 @@
/// FIXME: move completion from ra_editor to ra_analysis
use rustc_hash::{FxHashMap, FxHashSet};
use ra_syntax::{
algo::visit::{visitor, visitor_ctx, Visitor, VisitorCtx},
ast::{self, AstChildren, LoopBodyOwner, ModuleItemOwner},
AstNode, File,
SyntaxKind::*,
SyntaxNodeRef, TextUnit,
};
use crate::{
find_node_at_offset,
scope::{FnScopes, ModuleScope},
AtomEdit,
};
#[derive(Debug)]
pub struct CompletionItem {
/// What user sees in pop-up
pub label: String,
/// What string is used for filtering, defaults to label
pub lookup: Option<String>,
/// What is inserted, defaults to label
pub snippet: Option<String>,
}
pub fn scope_completion(file: &File, offset: TextUnit) -> Option<Vec<CompletionItem>> {
// Insert a fake ident to get a valid parse tree
let file = {
let edit = AtomEdit::insert(offset, "intellijRulezz".to_string());
file.reparse(&edit)
};
let mut has_completions = false;
let mut res = Vec::new();
if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(file.syntax(), offset) {
has_completions = true;
complete_name_ref(&file, name_ref, &mut res);
// special case, `trait T { fn foo(i_am_a_name_ref) {} }`
if is_node::<ast::Param>(name_ref.syntax()) {
param_completions(name_ref.syntax(), &mut res);
}
let name_range = name_ref.syntax().range();
let top_node = name_ref
.syntax()
.ancestors()
.take_while(|it| it.range() == name_range)
.last()
.unwrap();
match top_node.parent().map(|it| it.kind()) {
Some(ROOT) | Some(ITEM_LIST) => complete_mod_item_snippets(&mut res),
_ => (),
}
}
if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), offset) {
if is_node::<ast::Param>(name.syntax()) {
has_completions = true;
param_completions(name.syntax(), &mut res);
}
}
if has_completions {
Some(res)
} else {
None
}
}
pub fn complete_module_items(items: AstChildren<ast::ModuleItem>, this_item: Option<ast::NameRef>, acc: &mut Vec<CompletionItem>) {
let scope = ModuleScope::new(items);
acc.extend(
scope
.entries()
.iter()
.filter(|entry| Some(entry.syntax()) != this_item.map(|it| it.syntax()))
.map(|entry| CompletionItem {
label: entry.name().to_string(),
lookup: None,
snippet: None,
}),
);
}
fn complete_name_ref(file: &File, name_ref: ast::NameRef, acc: &mut Vec<CompletionItem>) {
if !is_node::<ast::Path>(name_ref.syntax()) {
return;
}
let mut visited_fn = false;
for node in name_ref.syntax().ancestors() {
if let Some(items) = visitor()
.visit::<ast::Root, _>(|it| Some(it.items()))
.visit::<ast::Module, _>(|it| Some(it.item_list()?.items()))
.accept(node)
{
if let Some(items) = items {
complete_module_items(items, Some(name_ref), acc);
}
break;
} else if !visited_fn {
if let Some(fn_def) = ast::FnDef::cast(node) {
visited_fn = true;
complete_expr_keywords(&file, fn_def, name_ref, acc);
complete_expr_snippets(acc);
let scopes = FnScopes::new(fn_def);
complete_fn(name_ref, &scopes, acc);
}
}
}
}
fn param_completions(ctx: SyntaxNodeRef, acc: &mut Vec<CompletionItem>) {
let mut params = FxHashMap::default();
for node in ctx.ancestors() {
let _ = visitor_ctx(&mut params)
.visit::<ast::Root, _>(process)
.visit::<ast::ItemList, _>(process)
.accept(node);
}
params
.into_iter()
.filter_map(|(label, (count, param))| {
let lookup = param.pat()?.syntax().text().to_string();
if count < 2 {
None
} else {
Some((label, lookup))
}
})
.for_each(|(label, lookup)| {
acc.push(CompletionItem {
label,
lookup: Some(lookup),
snippet: None,
})
});
fn process<'a, N: ast::FnDefOwner<'a>>(
node: N,
params: &mut FxHashMap<String, (u32, ast::Param<'a>)>,
) {
node.functions()
.filter_map(|it| it.param_list())
.flat_map(|it| it.params())
.for_each(|param| {
let text = param.syntax().text().to_string();
params.entry(text).or_insert((0, param)).0 += 1;
})
}
}
fn is_node<'a, N: AstNode<'a>>(node: SyntaxNodeRef<'a>) -> bool {
match node.ancestors().filter_map(N::cast).next() {
None => false,
Some(n) => n.syntax().range() == node.range(),
}
}
fn complete_expr_keywords(
file: &File,
fn_def: ast::FnDef,
name_ref: ast::NameRef,
acc: &mut Vec<CompletionItem>,
) {
acc.push(keyword("if", "if $0 {}"));
acc.push(keyword("match", "match $0 {}"));
acc.push(keyword("while", "while $0 {}"));
acc.push(keyword("loop", "loop {$0}"));
if let Some(off) = name_ref.syntax().range().start().checked_sub(2.into()) {
if let Some(if_expr) = find_node_at_offset::<ast::IfExpr>(file.syntax(), off) {
if if_expr.syntax().range().end() < name_ref.syntax().range().start() {
acc.push(keyword("else", "else {$0}"));
acc.push(keyword("else if", "else if $0 {}"));
}
}
}
if is_in_loop_body(name_ref) {
acc.push(keyword("continue", "continue"));
acc.push(keyword("break", "break"));
}
acc.extend(complete_return(fn_def, name_ref));
}
fn is_in_loop_body(name_ref: ast::NameRef) -> bool {
for node in name_ref.syntax().ancestors() {
if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR {
break;
}
let loop_body = visitor()
.visit::<ast::ForExpr, _>(LoopBodyOwner::loop_body)
.visit::<ast::WhileExpr, _>(LoopBodyOwner::loop_body)
.visit::<ast::LoopExpr, _>(LoopBodyOwner::loop_body)
.accept(node);
if let Some(Some(body)) = loop_body {
if name_ref.syntax().range().is_subrange(&body.syntax().range()) {
return true;
}
}
}
false
}
fn complete_return(fn_def: ast::FnDef, name_ref: ast::NameRef) -> Option<CompletionItem> {
// let is_last_in_block = name_ref.syntax().ancestors().filter_map(ast::Expr::cast)
// .next()
// .and_then(|it| it.syntax().parent())
// .and_then(ast::Block::cast)
// .is_some();
// if is_last_in_block {
// return None;
// }
let is_stmt = match name_ref
.syntax()
.ancestors()
.filter_map(ast::ExprStmt::cast)
.next()
{
None => false,
Some(expr_stmt) => expr_stmt.syntax().range() == name_ref.syntax().range(),
};
let snip = match (is_stmt, fn_def.ret_type().is_some()) {
(true, true) => "return $0;",
(true, false) => "return;",
(false, true) => "return $0",
(false, false) => "return",
};
Some(keyword("return", snip))
}
fn keyword(kw: &str, snip: &str) -> CompletionItem {
CompletionItem {
label: kw.to_string(),
lookup: None,
snippet: Some(snip.to_string()),
}
}
fn complete_expr_snippets(acc: &mut Vec<CompletionItem>) {
acc.push(CompletionItem {
label: "pd".to_string(),
lookup: None,
snippet: Some("eprintln!(\"$0 = {:?}\", $0);".to_string()),
});
acc.push(CompletionItem {
label: "ppd".to_string(),
lookup: None,
snippet: Some("eprintln!(\"$0 = {:#?}\", $0);".to_string()),
});
}
fn complete_mod_item_snippets(acc: &mut Vec<CompletionItem>) {
acc.push(CompletionItem {
label: "tfn".to_string(),
lookup: None,
snippet: Some("#[test]\nfn $1() {\n $0\n}".to_string()),
});
acc.push(CompletionItem {
label: "pub(crate)".to_string(),
lookup: None,
snippet: Some("pub(crate) $0".to_string()),
})
}
fn complete_fn(name_ref: ast::NameRef, scopes: &FnScopes, acc: &mut Vec<CompletionItem>) {
let mut shadowed = FxHashSet::default();
acc.extend(
scopes
.scope_chain(name_ref.syntax())
.flat_map(|scope| scopes.entries(scope).iter())
.filter(|entry| shadowed.insert(entry.name()))
.map(|entry| CompletionItem {
label: entry.name().to_string(),
lookup: None,
snippet: None,
}),
);
if scopes.self_param.is_some() {
acc.push(CompletionItem {
label: "self".to_string(),
lookup: None,
snippet: None,
})
}
}
#[cfg(test)]
mod tests {
use super::*;
use test_utils::{assert_eq_dbg, extract_offset};
fn check_scope_completion(code: &str, expected_completions: &str) {
let (off, code) = extract_offset(&code);
let file = File::parse(&code);
let completions = scope_completion(&file, off)
.unwrap()
.into_iter()
.filter(|c| c.snippet.is_none())
.collect::<Vec<_>>();
assert_eq_dbg(expected_completions, &completions);
}
fn check_snippet_completion(code: &str, expected_completions: &str) {
let (off, code) = extract_offset(&code);
let file = File::parse(&code);
let completions = scope_completion(&file, off)
.unwrap()
.into_iter()
.filter(|c| c.snippet.is_some())
.collect::<Vec<_>>();
assert_eq_dbg(expected_completions, &completions);
}
#[test]
fn test_completion_let_scope() {
check_scope_completion(
r"
fn quux(x: i32) {
let y = 92;
1 + <|>;
let z = ();
}
",
r#"[CompletionItem { label: "y", lookup: None, snippet: None },
CompletionItem { label: "x", lookup: None, snippet: None },
CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
);
}
#[test]
fn test_completion_if_let_scope() {
check_scope_completion(
r"
fn quux() {
if let Some(x) = foo() {
let y = 92;
};
if let Some(a) = bar() {
let b = 62;
1 + <|>
}
}
",
r#"[CompletionItem { label: "b", lookup: None, snippet: None },
CompletionItem { label: "a", lookup: None, snippet: None },
CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
);
}
#[test]
fn test_completion_for_scope() {
check_scope_completion(
r"
fn quux() {
for x in &[1, 2, 3] {
<|>
}
}
",
r#"[CompletionItem { label: "x", lookup: None, snippet: None },
CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
);
}
#[test]
fn test_completion_mod_scope() {
check_scope_completion(
r"
struct Foo;
enum Baz {}
fn quux() {
<|>
}
",
r#"[CompletionItem { label: "Foo", lookup: None, snippet: None },
CompletionItem { label: "Baz", lookup: None, snippet: None },
CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
);
}
#[test]
fn test_completion_mod_scope_no_self_use() {
check_scope_completion(
r"
use foo<|>;
",
r#"[]"#,
);
}
#[test]
fn test_completion_mod_scope_nested() {
check_scope_completion(
r"
struct Foo;
mod m {
struct Bar;
fn quux() { <|> }
}
",
r#"[CompletionItem { label: "Bar", lookup: None, snippet: None },
CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
);
}
#[test]
fn test_complete_type() {
check_scope_completion(
r"
struct Foo;
fn x() -> <|>
",
r#"[CompletionItem { label: "Foo", lookup: None, snippet: None },
CompletionItem { label: "x", lookup: None, snippet: None }]"#,
)
}
#[test]
fn test_complete_shadowing() {
check_scope_completion(
r"
fn foo() -> {
let bar = 92;
{
let bar = 62;
<|>
}
}
",
r#"[CompletionItem { label: "bar", lookup: None, snippet: None },
CompletionItem { label: "foo", lookup: None, snippet: None }]"#,
)
}
#[test]
fn test_complete_self() {
check_scope_completion(
r"
impl S { fn foo(&self) { <|> } }
",
r#"[CompletionItem { label: "self", lookup: None, snippet: None }]"#,
)
}
#[test]
fn test_completion_kewords() {
check_snippet_completion(r"
fn quux() {
<|>
}
", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
CompletionItem { label: "return", lookup: None, snippet: Some("return") },
CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
}
#[test]
fn test_completion_else() {
check_snippet_completion(r"
fn quux() {
if true {
()
} <|>
}
", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
CompletionItem { label: "else", lookup: None, snippet: Some("else {$0}") },
CompletionItem { label: "else if", lookup: None, snippet: Some("else if $0 {}") },
CompletionItem { label: "return", lookup: None, snippet: Some("return") },
CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
}
#[test]
fn test_completion_return_value() {
check_snippet_completion(r"
fn quux() -> i32 {
<|>
92
}
", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
CompletionItem { label: "return", lookup: None, snippet: Some("return $0;") },
CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
check_snippet_completion(r"
fn quux() {
<|>
92
}
", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
CompletionItem { label: "return", lookup: None, snippet: Some("return;") },
CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
}
#[test]
fn test_completion_return_no_stmt() {
check_snippet_completion(r"
fn quux() -> i32 {
match () {
() => <|>
}
}
", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
CompletionItem { label: "return", lookup: None, snippet: Some("return $0") },
CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
}
#[test]
fn test_continue_break_completion() {
check_snippet_completion(r"
fn quux() -> i32 {
loop { <|> }
}
", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
CompletionItem { label: "continue", lookup: None, snippet: Some("continue") },
CompletionItem { label: "break", lookup: None, snippet: Some("break") },
CompletionItem { label: "return", lookup: None, snippet: Some("return $0") },
CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
check_snippet_completion(r"
fn quux() -> i32 {
loop { || { <|> } }
}
", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
CompletionItem { label: "return", lookup: None, snippet: Some("return $0") },
CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
}
#[test]
fn test_param_completion_last_param() {
check_scope_completion(r"
fn foo(file_id: FileId) {}
fn bar(file_id: FileId) {}
fn baz(file<|>) {}
", r#"[CompletionItem { label: "file_id: FileId", lookup: Some("file_id"), snippet: None }]"#);
}
#[test]
fn test_param_completion_nth_param() {
check_scope_completion(r"
fn foo(file_id: FileId) {}
fn bar(file_id: FileId) {}
fn baz(file<|>, x: i32) {}
", r#"[CompletionItem { label: "file_id: FileId", lookup: Some("file_id"), snippet: None }]"#);
}
#[test]
fn test_param_completion_trait_param() {
check_scope_completion(r"
pub(crate) trait SourceRoot {
pub fn contains(&self, file_id: FileId) -> bool;
pub fn module_map(&self) -> &ModuleMap;
pub fn lines(&self, file_id: FileId) -> &LineIndex;
pub fn syntax(&self, file<|>)
}
", r#"[CompletionItem { label: "self", lookup: None, snippet: None },
CompletionItem { label: "SourceRoot", lookup: None, snippet: None },
CompletionItem { label: "file_id: FileId", lookup: Some("file_id"), snippet: None }]"#);
}
#[test]
fn test_item_snippets() {
// check_snippet_completion(r"
// <|>
// ",
// r##"[CompletionItem { label: "tfn", lookup: None, snippet: Some("#[test]\nfn $1() {\n $0\n}") }]"##,
// );
check_snippet_completion(r"
#[cfg(test)]
mod tests {
<|>
}
",
r##"[CompletionItem { label: "tfn", lookup: None, snippet: Some("#[test]\nfn $1() {\n $0\n}") },
CompletionItem { label: "pub(crate)", lookup: None, snippet: Some("pub(crate) $0") }]"##,
);
}
}

View File

@ -8,12 +8,10 @@ extern crate superslice;
extern crate test_utils as _test_utils;
mod code_actions;
mod completion;
mod edit;
mod extend_selection;
mod folding_ranges;
mod line_index;
mod scope;
mod symbols;
#[cfg(test)]
mod test_utils;
@ -21,7 +19,6 @@ mod typing;
pub use self::{
code_actions::{add_derive, add_impl, flip_comma, introduce_variable, LocalEdit},
completion::{scope_completion, complete_module_items, CompletionItem},
edit::{Edit, EditBuilder},
extend_selection::extend_selection,
folding_ranges::{folding_ranges, Fold, FoldKind},
@ -33,7 +30,7 @@ pub use ra_syntax::AtomEdit;
use ra_syntax::{
algo::find_leaf_at_offset,
ast::{self, AstNode, NameOwner},
File, SmolStr,
File,
SyntaxKind::{self, *},
SyntaxNodeRef, TextRange, TextUnit,
};
@ -151,15 +148,7 @@ pub fn find_node_at_offset<'a, N: AstNode<'a>>(
leaf.ancestors().filter_map(N::cast).next()
}
pub fn resolve_local_name(
name_ref: ast::NameRef,
) -> Option<(SmolStr, TextRange)> {
let fn_def = name_ref.syntax().ancestors().find_map(ast::FnDef::cast)?;
let scopes = scope::FnScopes::new(fn_def);
let scope_entry = scope::resolve_local_name(name_ref, &scopes)?;
let name = scope_entry.ast().name()?;
Some((scope_entry.name(), name.syntax().range()))
}
#[cfg(test)]
mod tests {

View File

@ -1,7 +0,0 @@
mod fn_scope;
mod mod_scope;
pub use self::{
fn_scope::{resolve_local_name, FnScopes},
mod_scope::ModuleScope,
};

View File

@ -1,124 +0,0 @@
/// FIXME: this is now moved to ra_analysis::descriptors::module::scope.
///
/// Current copy will be deleted as soon as we move the rest of the completion
/// to the analyezer.
use ra_syntax::{
ast::{self, AstChildren},
AstNode, SmolStr, SyntaxNode, SyntaxNodeRef,
};
pub struct ModuleScope {
entries: Vec<Entry>,
}
pub struct Entry {
node: SyntaxNode,
kind: EntryKind,
}
enum EntryKind {
Item,
Import,
}
impl ModuleScope {
pub fn new(items: AstChildren<ast::ModuleItem>) -> ModuleScope {
let mut entries = Vec::new();
for item in items {
let entry = match item {
ast::ModuleItem::StructDef(item) => Entry::new_item(item),
ast::ModuleItem::EnumDef(item) => Entry::new_item(item),
ast::ModuleItem::FnDef(item) => Entry::new_item(item),
ast::ModuleItem::ConstDef(item) => Entry::new_item(item),
ast::ModuleItem::StaticDef(item) => Entry::new_item(item),
ast::ModuleItem::TraitDef(item) => Entry::new_item(item),
ast::ModuleItem::TypeDef(item) => Entry::new_item(item),
ast::ModuleItem::Module(item) => Entry::new_item(item),
ast::ModuleItem::UseItem(item) => {
if let Some(tree) = item.use_tree() {
collect_imports(tree, &mut entries);
}
continue;
}
ast::ModuleItem::ExternCrateItem(_) | ast::ModuleItem::ImplItem(_) => continue,
};
entries.extend(entry)
}
ModuleScope { entries }
}
pub fn entries(&self) -> &[Entry] {
self.entries.as_slice()
}
}
impl Entry {
fn new_item<'a>(item: impl ast::NameOwner<'a>) -> Option<Entry> {
let name = item.name()?;
Some(Entry {
node: name.syntax().owned(),
kind: EntryKind::Item,
})
}
fn new_import(path: ast::Path) -> Option<Entry> {
let name_ref = path.segment()?.name_ref()?;
Some(Entry {
node: name_ref.syntax().owned(),
kind: EntryKind::Import,
})
}
pub fn name(&self) -> SmolStr {
match self.kind {
EntryKind::Item => ast::Name::cast(self.node.borrowed()).unwrap().text(),
EntryKind::Import => ast::NameRef::cast(self.node.borrowed()).unwrap().text(),
}
}
pub fn syntax(&self) -> SyntaxNodeRef {
self.node.borrowed()
}
}
fn collect_imports(tree: ast::UseTree, acc: &mut Vec<Entry>) {
if let Some(use_tree_list) = tree.use_tree_list() {
return use_tree_list
.use_trees()
.for_each(|it| collect_imports(it, acc));
}
if let Some(path) = tree.path() {
acc.extend(Entry::new_import(path));
}
}
#[cfg(test)]
mod tests {
use super::*;
use ra_syntax::{ast::ModuleItemOwner, File};
fn do_check(code: &str, expected: &[&str]) {
let file = File::parse(&code);
let scope = ModuleScope::new(file.ast().items());
let actual = scope.entries.iter().map(|it| it.name()).collect::<Vec<_>>();
assert_eq!(expected, actual.as_slice());
}
#[test]
fn test_module_scope() {
do_check(
"
struct Foo;
enum Bar {}
mod baz {}
fn quux() {}
use x::{
y::z,
t,
};
type T = ();
",
&["Foo", "Bar", "baz", "quux", "z", "t", "T"],
)
}
}

View File

@ -15,7 +15,7 @@ use crate::{
pub struct ArgListNode(SyntaxNode);
impl ArgListNode {
pub fn new(&self, ast: ArgList) -> ArgListNode {
pub fn new(ast: ArgList) -> ArgListNode {
let syntax = ast.syntax().owned();
ArgListNode(syntax)
}
@ -50,7 +50,7 @@ impl<'a> ArgList<'a> {
pub struct ArrayExprNode(SyntaxNode);
impl ArrayExprNode {
pub fn new(&self, ast: ArrayExpr) -> ArrayExprNode {
pub fn new(ast: ArrayExpr) -> ArrayExprNode {
let syntax = ast.syntax().owned();
ArrayExprNode(syntax)
}
@ -81,7 +81,7 @@ impl<'a> ArrayExpr<'a> {}
pub struct ArrayTypeNode(SyntaxNode);
impl ArrayTypeNode {
pub fn new(&self, ast: ArrayType) -> ArrayTypeNode {
pub fn new(ast: ArrayType) -> ArrayTypeNode {
let syntax = ast.syntax().owned();
ArrayTypeNode(syntax)
}
@ -112,7 +112,7 @@ impl<'a> ArrayType<'a> {}
pub struct AttrNode(SyntaxNode);
impl AttrNode {
pub fn new(&self, ast: Attr) -> AttrNode {
pub fn new(ast: Attr) -> AttrNode {
let syntax = ast.syntax().owned();
AttrNode(syntax)
}
@ -147,7 +147,7 @@ impl<'a> Attr<'a> {
pub struct BinExprNode(SyntaxNode);
impl BinExprNode {
pub fn new(&self, ast: BinExpr) -> BinExprNode {
pub fn new(ast: BinExpr) -> BinExprNode {
let syntax = ast.syntax().owned();
BinExprNode(syntax)
}
@ -178,7 +178,7 @@ impl<'a> BinExpr<'a> {}
pub struct BindPatNode(SyntaxNode);
impl BindPatNode {
pub fn new(&self, ast: BindPat) -> BindPatNode {
pub fn new(ast: BindPat) -> BindPatNode {
let syntax = ast.syntax().owned();
BindPatNode(syntax)
}
@ -210,7 +210,7 @@ impl<'a> BindPat<'a> {}
pub struct BlockNode(SyntaxNode);
impl BlockNode {
pub fn new(&self, ast: Block) -> BlockNode {
pub fn new(ast: Block) -> BlockNode {
let syntax = ast.syntax().owned();
BlockNode(syntax)
}
@ -249,7 +249,7 @@ impl<'a> Block<'a> {
pub struct BlockExprNode(SyntaxNode);
impl BlockExprNode {
pub fn new(&self, ast: BlockExpr) -> BlockExprNode {
pub fn new(ast: BlockExpr) -> BlockExprNode {
let syntax = ast.syntax().owned();
BlockExprNode(syntax)
}
@ -284,7 +284,7 @@ impl<'a> BlockExpr<'a> {
pub struct BreakExprNode(SyntaxNode);
impl BreakExprNode {
pub fn new(&self, ast: BreakExpr) -> BreakExprNode {
pub fn new(ast: BreakExpr) -> BreakExprNode {
let syntax = ast.syntax().owned();
BreakExprNode(syntax)
}
@ -315,7 +315,7 @@ impl<'a> BreakExpr<'a> {}
pub struct CallExprNode(SyntaxNode);
impl CallExprNode {
pub fn new(&self, ast: CallExpr) -> CallExprNode {
pub fn new(ast: CallExpr) -> CallExprNode {
let syntax = ast.syntax().owned();
CallExprNode(syntax)
}
@ -351,7 +351,7 @@ impl<'a> CallExpr<'a> {
pub struct CastExprNode(SyntaxNode);
impl CastExprNode {
pub fn new(&self, ast: CastExpr) -> CastExprNode {
pub fn new(ast: CastExpr) -> CastExprNode {
let syntax = ast.syntax().owned();
CastExprNode(syntax)
}
@ -382,7 +382,7 @@ impl<'a> CastExpr<'a> {}
pub struct CommentNode(SyntaxNode);
impl CommentNode {
pub fn new(&self, ast: Comment) -> CommentNode {
pub fn new(ast: Comment) -> CommentNode {
let syntax = ast.syntax().owned();
CommentNode(syntax)
}
@ -413,7 +413,7 @@ impl<'a> Comment<'a> {}
pub struct ConditionNode(SyntaxNode);
impl ConditionNode {
pub fn new(&self, ast: Condition) -> ConditionNode {
pub fn new(ast: Condition) -> ConditionNode {
let syntax = ast.syntax().owned();
ConditionNode(syntax)
}
@ -452,7 +452,7 @@ impl<'a> Condition<'a> {
pub struct ConstDefNode(SyntaxNode);
impl ConstDefNode {
pub fn new(&self, ast: ConstDef) -> ConstDefNode {
pub fn new(ast: ConstDef) -> ConstDefNode {
let syntax = ast.syntax().owned();
ConstDefNode(syntax)
}
@ -486,7 +486,7 @@ impl<'a> ConstDef<'a> {}
pub struct ContinueExprNode(SyntaxNode);
impl ContinueExprNode {
pub fn new(&self, ast: ContinueExpr) -> ContinueExprNode {
pub fn new(ast: ContinueExpr) -> ContinueExprNode {
let syntax = ast.syntax().owned();
ContinueExprNode(syntax)
}
@ -517,7 +517,7 @@ impl<'a> ContinueExpr<'a> {}
pub struct DynTraitTypeNode(SyntaxNode);
impl DynTraitTypeNode {
pub fn new(&self, ast: DynTraitType) -> DynTraitTypeNode {
pub fn new(ast: DynTraitType) -> DynTraitTypeNode {
let syntax = ast.syntax().owned();
DynTraitTypeNode(syntax)
}
@ -548,7 +548,7 @@ impl<'a> DynTraitType<'a> {}
pub struct EnumDefNode(SyntaxNode);
impl EnumDefNode {
pub fn new(&self, ast: EnumDef) -> EnumDefNode {
pub fn new(ast: EnumDef) -> EnumDefNode {
let syntax = ast.syntax().owned();
EnumDefNode(syntax)
}
@ -582,7 +582,7 @@ impl<'a> EnumDef<'a> {}
pub struct ExprNode(SyntaxNode);
impl ExprNode {
pub fn new(&self, ast: Expr) -> ExprNode {
pub fn new(ast: Expr) -> ExprNode {
let syntax = ast.syntax().owned();
ExprNode(syntax)
}
@ -710,7 +710,7 @@ impl<'a> Expr<'a> {}
pub struct ExprStmtNode(SyntaxNode);
impl ExprStmtNode {
pub fn new(&self, ast: ExprStmt) -> ExprStmtNode {
pub fn new(ast: ExprStmt) -> ExprStmtNode {
let syntax = ast.syntax().owned();
ExprStmtNode(syntax)
}
@ -745,7 +745,7 @@ impl<'a> ExprStmt<'a> {
pub struct ExternCrateItemNode(SyntaxNode);
impl ExternCrateItemNode {
pub fn new(&self, ast: ExternCrateItem) -> ExternCrateItemNode {
pub fn new(ast: ExternCrateItem) -> ExternCrateItemNode {
let syntax = ast.syntax().owned();
ExternCrateItemNode(syntax)
}
@ -776,7 +776,7 @@ impl<'a> ExternCrateItem<'a> {}
pub struct FieldExprNode(SyntaxNode);
impl FieldExprNode {
pub fn new(&self, ast: FieldExpr) -> FieldExprNode {
pub fn new(ast: FieldExpr) -> FieldExprNode {
let syntax = ast.syntax().owned();
FieldExprNode(syntax)
}
@ -807,7 +807,7 @@ impl<'a> FieldExpr<'a> {}
pub struct FieldPatListNode(SyntaxNode);
impl FieldPatListNode {
pub fn new(&self, ast: FieldPatList) -> FieldPatListNode {
pub fn new(ast: FieldPatList) -> FieldPatListNode {
let syntax = ast.syntax().owned();
FieldPatListNode(syntax)
}
@ -838,7 +838,7 @@ impl<'a> FieldPatList<'a> {}
pub struct FnDefNode(SyntaxNode);
impl FnDefNode {
pub fn new(&self, ast: FnDef) -> FnDefNode {
pub fn new(ast: FnDef) -> FnDefNode {
let syntax = ast.syntax().owned();
FnDefNode(syntax)
}
@ -884,7 +884,7 @@ impl<'a> FnDef<'a> {
pub struct FnPointerTypeNode(SyntaxNode);
impl FnPointerTypeNode {
pub fn new(&self, ast: FnPointerType) -> FnPointerTypeNode {
pub fn new(ast: FnPointerType) -> FnPointerTypeNode {
let syntax = ast.syntax().owned();
FnPointerTypeNode(syntax)
}
@ -915,7 +915,7 @@ impl<'a> FnPointerType<'a> {}
pub struct ForExprNode(SyntaxNode);
impl ForExprNode {
pub fn new(&self, ast: ForExpr) -> ForExprNode {
pub fn new(ast: ForExpr) -> ForExprNode {
let syntax = ast.syntax().owned();
ForExprNode(syntax)
}
@ -955,7 +955,7 @@ impl<'a> ForExpr<'a> {
pub struct ForTypeNode(SyntaxNode);
impl ForTypeNode {
pub fn new(&self, ast: ForType) -> ForTypeNode {
pub fn new(ast: ForType) -> ForTypeNode {
let syntax = ast.syntax().owned();
ForTypeNode(syntax)
}
@ -986,7 +986,7 @@ impl<'a> ForType<'a> {}
pub struct IfExprNode(SyntaxNode);
impl IfExprNode {
pub fn new(&self, ast: IfExpr) -> IfExprNode {
pub fn new(ast: IfExpr) -> IfExprNode {
let syntax = ast.syntax().owned();
IfExprNode(syntax)
}
@ -1021,7 +1021,7 @@ impl<'a> IfExpr<'a> {
pub struct ImplItemNode(SyntaxNode);
impl ImplItemNode {
pub fn new(&self, ast: ImplItem) -> ImplItemNode {
pub fn new(ast: ImplItem) -> ImplItemNode {
let syntax = ast.syntax().owned();
ImplItemNode(syntax)
}
@ -1052,7 +1052,7 @@ impl<'a> ImplItem<'a> {}
pub struct ImplTraitTypeNode(SyntaxNode);
impl ImplTraitTypeNode {
pub fn new(&self, ast: ImplTraitType) -> ImplTraitTypeNode {
pub fn new(ast: ImplTraitType) -> ImplTraitTypeNode {
let syntax = ast.syntax().owned();
ImplTraitTypeNode(syntax)
}
@ -1083,7 +1083,7 @@ impl<'a> ImplTraitType<'a> {}
pub struct IndexExprNode(SyntaxNode);
impl IndexExprNode {
pub fn new(&self, ast: IndexExpr) -> IndexExprNode {
pub fn new(ast: IndexExpr) -> IndexExprNode {
let syntax = ast.syntax().owned();
IndexExprNode(syntax)
}
@ -1114,7 +1114,7 @@ impl<'a> IndexExpr<'a> {}
pub struct ItemListNode(SyntaxNode);
impl ItemListNode {
pub fn new(&self, ast: ItemList) -> ItemListNode {
pub fn new(ast: ItemList) -> ItemListNode {
let syntax = ast.syntax().owned();
ItemListNode(syntax)
}
@ -1147,7 +1147,7 @@ impl<'a> ItemList<'a> {}
pub struct LabelNode(SyntaxNode);
impl LabelNode {
pub fn new(&self, ast: Label) -> LabelNode {
pub fn new(ast: Label) -> LabelNode {
let syntax = ast.syntax().owned();
LabelNode(syntax)
}
@ -1178,7 +1178,7 @@ impl<'a> Label<'a> {}
pub struct LambdaExprNode(SyntaxNode);
impl LambdaExprNode {
pub fn new(&self, ast: LambdaExpr) -> LambdaExprNode {
pub fn new(ast: LambdaExpr) -> LambdaExprNode {
let syntax = ast.syntax().owned();
LambdaExprNode(syntax)
}
@ -1217,7 +1217,7 @@ impl<'a> LambdaExpr<'a> {
pub struct LetStmtNode(SyntaxNode);
impl LetStmtNode {
pub fn new(&self, ast: LetStmt) -> LetStmtNode {
pub fn new(ast: LetStmt) -> LetStmtNode {
let syntax = ast.syntax().owned();
LetStmtNode(syntax)
}
@ -1256,7 +1256,7 @@ impl<'a> LetStmt<'a> {
pub struct LifetimeNode(SyntaxNode);
impl LifetimeNode {
pub fn new(&self, ast: Lifetime) -> LifetimeNode {
pub fn new(ast: Lifetime) -> LifetimeNode {
let syntax = ast.syntax().owned();
LifetimeNode(syntax)
}
@ -1287,7 +1287,7 @@ impl<'a> Lifetime<'a> {}
pub struct LifetimeParamNode(SyntaxNode);
impl LifetimeParamNode {
pub fn new(&self, ast: LifetimeParam) -> LifetimeParamNode {
pub fn new(ast: LifetimeParam) -> LifetimeParamNode {
let syntax = ast.syntax().owned();
LifetimeParamNode(syntax)
}
@ -1322,7 +1322,7 @@ impl<'a> LifetimeParam<'a> {
pub struct LiteralNode(SyntaxNode);
impl LiteralNode {
pub fn new(&self, ast: Literal) -> LiteralNode {
pub fn new(ast: Literal) -> LiteralNode {
let syntax = ast.syntax().owned();
LiteralNode(syntax)
}
@ -1353,7 +1353,7 @@ impl<'a> Literal<'a> {}
pub struct LoopExprNode(SyntaxNode);
impl LoopExprNode {
pub fn new(&self, ast: LoopExpr) -> LoopExprNode {
pub fn new(ast: LoopExpr) -> LoopExprNode {
let syntax = ast.syntax().owned();
LoopExprNode(syntax)
}
@ -1385,7 +1385,7 @@ impl<'a> LoopExpr<'a> {}
pub struct MatchArmNode(SyntaxNode);
impl MatchArmNode {
pub fn new(&self, ast: MatchArm) -> MatchArmNode {
pub fn new(ast: MatchArm) -> MatchArmNode {
let syntax = ast.syntax().owned();
MatchArmNode(syntax)
}
@ -1428,7 +1428,7 @@ impl<'a> MatchArm<'a> {
pub struct MatchArmListNode(SyntaxNode);
impl MatchArmListNode {
pub fn new(&self, ast: MatchArmList) -> MatchArmListNode {
pub fn new(ast: MatchArmList) -> MatchArmListNode {
let syntax = ast.syntax().owned();
MatchArmListNode(syntax)
}
@ -1463,7 +1463,7 @@ impl<'a> MatchArmList<'a> {
pub struct MatchExprNode(SyntaxNode);
impl MatchExprNode {
pub fn new(&self, ast: MatchExpr) -> MatchExprNode {
pub fn new(ast: MatchExpr) -> MatchExprNode {
let syntax = ast.syntax().owned();
MatchExprNode(syntax)
}
@ -1502,7 +1502,7 @@ impl<'a> MatchExpr<'a> {
pub struct MatchGuardNode(SyntaxNode);
impl MatchGuardNode {
pub fn new(&self, ast: MatchGuard) -> MatchGuardNode {
pub fn new(ast: MatchGuard) -> MatchGuardNode {
let syntax = ast.syntax().owned();
MatchGuardNode(syntax)
}
@ -1533,7 +1533,7 @@ impl<'a> MatchGuard<'a> {}
pub struct MethodCallExprNode(SyntaxNode);
impl MethodCallExprNode {
pub fn new(&self, ast: MethodCallExpr) -> MethodCallExprNode {
pub fn new(ast: MethodCallExpr) -> MethodCallExprNode {
let syntax = ast.syntax().owned();
MethodCallExprNode(syntax)
}
@ -1569,7 +1569,7 @@ impl<'a> MethodCallExpr<'a> {
pub struct ModuleNode(SyntaxNode);
impl ModuleNode {
pub fn new(&self, ast: Module) -> ModuleNode {
pub fn new(ast: Module) -> ModuleNode {
let syntax = ast.syntax().owned();
ModuleNode(syntax)
}
@ -1606,7 +1606,7 @@ impl<'a> Module<'a> {
pub struct ModuleItemNode(SyntaxNode);
impl ModuleItemNode {
pub fn new(&self, ast: ModuleItem) -> ModuleItemNode {
pub fn new(ast: ModuleItem) -> ModuleItemNode {
let syntax = ast.syntax().owned();
ModuleItemNode(syntax)
}
@ -1671,7 +1671,7 @@ impl<'a> ModuleItem<'a> {}
pub struct NameNode(SyntaxNode);
impl NameNode {
pub fn new(&self, ast: Name) -> NameNode {
pub fn new(ast: Name) -> NameNode {
let syntax = ast.syntax().owned();
NameNode(syntax)
}
@ -1702,7 +1702,7 @@ impl<'a> Name<'a> {}
pub struct NameRefNode(SyntaxNode);
impl NameRefNode {
pub fn new(&self, ast: NameRef) -> NameRefNode {
pub fn new(ast: NameRef) -> NameRefNode {
let syntax = ast.syntax().owned();
NameRefNode(syntax)
}
@ -1733,7 +1733,7 @@ impl<'a> NameRef<'a> {}
pub struct NamedFieldNode(SyntaxNode);
impl NamedFieldNode {
pub fn new(&self, ast: NamedField) -> NamedFieldNode {
pub fn new(ast: NamedField) -> NamedFieldNode {
let syntax = ast.syntax().owned();
NamedFieldNode(syntax)
}
@ -1764,7 +1764,7 @@ impl<'a> NamedField<'a> {}
pub struct NamedFieldDefNode(SyntaxNode);
impl NamedFieldDefNode {
pub fn new(&self, ast: NamedFieldDef) -> NamedFieldDefNode {
pub fn new(ast: NamedFieldDef) -> NamedFieldDefNode {
let syntax = ast.syntax().owned();
NamedFieldDefNode(syntax)
}
@ -1797,7 +1797,7 @@ impl<'a> NamedFieldDef<'a> {}
pub struct NamedFieldListNode(SyntaxNode);
impl NamedFieldListNode {
pub fn new(&self, ast: NamedFieldList) -> NamedFieldListNode {
pub fn new(ast: NamedFieldList) -> NamedFieldListNode {
let syntax = ast.syntax().owned();
NamedFieldListNode(syntax)
}
@ -1828,7 +1828,7 @@ impl<'a> NamedFieldList<'a> {}
pub struct NeverTypeNode(SyntaxNode);
impl NeverTypeNode {
pub fn new(&self, ast: NeverType) -> NeverTypeNode {
pub fn new(ast: NeverType) -> NeverTypeNode {
let syntax = ast.syntax().owned();
NeverTypeNode(syntax)
}
@ -1859,7 +1859,7 @@ impl<'a> NeverType<'a> {}
pub struct NominalDefNode(SyntaxNode);
impl NominalDefNode {
pub fn new(&self, ast: NominalDef) -> NominalDefNode {
pub fn new(ast: NominalDef) -> NominalDefNode {
let syntax = ast.syntax().owned();
NominalDefNode(syntax)
}
@ -1900,7 +1900,7 @@ impl<'a> NominalDef<'a> {}
pub struct ParamNode(SyntaxNode);
impl ParamNode {
pub fn new(&self, ast: Param) -> ParamNode {
pub fn new(ast: Param) -> ParamNode {
let syntax = ast.syntax().owned();
ParamNode(syntax)
}
@ -1935,7 +1935,7 @@ impl<'a> Param<'a> {
pub struct ParamListNode(SyntaxNode);
impl ParamListNode {
pub fn new(&self, ast: ParamList) -> ParamListNode {
pub fn new(ast: ParamList) -> ParamListNode {
let syntax = ast.syntax().owned();
ParamListNode(syntax)
}
@ -1974,7 +1974,7 @@ impl<'a> ParamList<'a> {
pub struct ParenExprNode(SyntaxNode);
impl ParenExprNode {
pub fn new(&self, ast: ParenExpr) -> ParenExprNode {
pub fn new(ast: ParenExpr) -> ParenExprNode {
let syntax = ast.syntax().owned();
ParenExprNode(syntax)
}
@ -2005,7 +2005,7 @@ impl<'a> ParenExpr<'a> {}
pub struct ParenTypeNode(SyntaxNode);
impl ParenTypeNode {
pub fn new(&self, ast: ParenType) -> ParenTypeNode {
pub fn new(ast: ParenType) -> ParenTypeNode {
let syntax = ast.syntax().owned();
ParenTypeNode(syntax)
}
@ -2036,7 +2036,7 @@ impl<'a> ParenType<'a> {}
pub struct PatNode(SyntaxNode);
impl PatNode {
pub fn new(&self, ast: Pat) -> PatNode {
pub fn new(ast: Pat) -> PatNode {
let syntax = ast.syntax().owned();
PatNode(syntax)
}
@ -2098,7 +2098,7 @@ impl<'a> Pat<'a> {}
pub struct PathNode(SyntaxNode);
impl PathNode {
pub fn new(&self, ast: Path) -> PathNode {
pub fn new(ast: Path) -> PathNode {
let syntax = ast.syntax().owned();
PathNode(syntax)
}
@ -2137,7 +2137,7 @@ impl<'a> Path<'a> {
pub struct PathExprNode(SyntaxNode);
impl PathExprNode {
pub fn new(&self, ast: PathExpr) -> PathExprNode {
pub fn new(ast: PathExpr) -> PathExprNode {
let syntax = ast.syntax().owned();
PathExprNode(syntax)
}
@ -2172,7 +2172,7 @@ impl<'a> PathExpr<'a> {
pub struct PathPatNode(SyntaxNode);
impl PathPatNode {
pub fn new(&self, ast: PathPat) -> PathPatNode {
pub fn new(ast: PathPat) -> PathPatNode {
let syntax = ast.syntax().owned();
PathPatNode(syntax)
}
@ -2203,7 +2203,7 @@ impl<'a> PathPat<'a> {}
pub struct PathSegmentNode(SyntaxNode);
impl PathSegmentNode {
pub fn new(&self, ast: PathSegment) -> PathSegmentNode {
pub fn new(ast: PathSegment) -> PathSegmentNode {
let syntax = ast.syntax().owned();
PathSegmentNode(syntax)
}
@ -2238,7 +2238,7 @@ impl<'a> PathSegment<'a> {
pub struct PathTypeNode(SyntaxNode);
impl PathTypeNode {
pub fn new(&self, ast: PathType) -> PathTypeNode {
pub fn new(ast: PathType) -> PathTypeNode {
let syntax = ast.syntax().owned();
PathTypeNode(syntax)
}
@ -2269,7 +2269,7 @@ impl<'a> PathType<'a> {}
pub struct PlaceholderPatNode(SyntaxNode);
impl PlaceholderPatNode {
pub fn new(&self, ast: PlaceholderPat) -> PlaceholderPatNode {
pub fn new(ast: PlaceholderPat) -> PlaceholderPatNode {
let syntax = ast.syntax().owned();
PlaceholderPatNode(syntax)
}
@ -2300,7 +2300,7 @@ impl<'a> PlaceholderPat<'a> {}
pub struct PlaceholderTypeNode(SyntaxNode);
impl PlaceholderTypeNode {
pub fn new(&self, ast: PlaceholderType) -> PlaceholderTypeNode {
pub fn new(ast: PlaceholderType) -> PlaceholderTypeNode {
let syntax = ast.syntax().owned();
PlaceholderTypeNode(syntax)
}
@ -2331,7 +2331,7 @@ impl<'a> PlaceholderType<'a> {}
pub struct PointerTypeNode(SyntaxNode);
impl PointerTypeNode {
pub fn new(&self, ast: PointerType) -> PointerTypeNode {
pub fn new(ast: PointerType) -> PointerTypeNode {
let syntax = ast.syntax().owned();
PointerTypeNode(syntax)
}
@ -2362,7 +2362,7 @@ impl<'a> PointerType<'a> {}
pub struct PrefixExprNode(SyntaxNode);
impl PrefixExprNode {
pub fn new(&self, ast: PrefixExpr) -> PrefixExprNode {
pub fn new(ast: PrefixExpr) -> PrefixExprNode {
let syntax = ast.syntax().owned();
PrefixExprNode(syntax)
}
@ -2393,7 +2393,7 @@ impl<'a> PrefixExpr<'a> {}
pub struct RangeExprNode(SyntaxNode);
impl RangeExprNode {
pub fn new(&self, ast: RangeExpr) -> RangeExprNode {
pub fn new(ast: RangeExpr) -> RangeExprNode {
let syntax = ast.syntax().owned();
RangeExprNode(syntax)
}
@ -2424,7 +2424,7 @@ impl<'a> RangeExpr<'a> {}
pub struct RangePatNode(SyntaxNode);
impl RangePatNode {
pub fn new(&self, ast: RangePat) -> RangePatNode {
pub fn new(ast: RangePat) -> RangePatNode {
let syntax = ast.syntax().owned();
RangePatNode(syntax)
}
@ -2455,7 +2455,7 @@ impl<'a> RangePat<'a> {}
pub struct RefExprNode(SyntaxNode);
impl RefExprNode {
pub fn new(&self, ast: RefExpr) -> RefExprNode {
pub fn new(ast: RefExpr) -> RefExprNode {
let syntax = ast.syntax().owned();
RefExprNode(syntax)
}
@ -2486,7 +2486,7 @@ impl<'a> RefExpr<'a> {}
pub struct RefPatNode(SyntaxNode);
impl RefPatNode {
pub fn new(&self, ast: RefPat) -> RefPatNode {
pub fn new(ast: RefPat) -> RefPatNode {
let syntax = ast.syntax().owned();
RefPatNode(syntax)
}
@ -2517,7 +2517,7 @@ impl<'a> RefPat<'a> {}
pub struct ReferenceTypeNode(SyntaxNode);
impl ReferenceTypeNode {
pub fn new(&self, ast: ReferenceType) -> ReferenceTypeNode {
pub fn new(ast: ReferenceType) -> ReferenceTypeNode {
let syntax = ast.syntax().owned();
ReferenceTypeNode(syntax)
}
@ -2548,7 +2548,7 @@ impl<'a> ReferenceType<'a> {}
pub struct RetTypeNode(SyntaxNode);
impl RetTypeNode {
pub fn new(&self, ast: RetType) -> RetTypeNode {
pub fn new(ast: RetType) -> RetTypeNode {
let syntax = ast.syntax().owned();
RetTypeNode(syntax)
}
@ -2579,7 +2579,7 @@ impl<'a> RetType<'a> {}
pub struct ReturnExprNode(SyntaxNode);
impl ReturnExprNode {
pub fn new(&self, ast: ReturnExpr) -> ReturnExprNode {
pub fn new(ast: ReturnExpr) -> ReturnExprNode {
let syntax = ast.syntax().owned();
ReturnExprNode(syntax)
}
@ -2610,7 +2610,7 @@ impl<'a> ReturnExpr<'a> {}
pub struct RootNode(SyntaxNode);
impl RootNode {
pub fn new(&self, ast: Root) -> RootNode {
pub fn new(ast: Root) -> RootNode {
let syntax = ast.syntax().owned();
RootNode(syntax)
}
@ -2647,7 +2647,7 @@ impl<'a> Root<'a> {
pub struct SelfParamNode(SyntaxNode);
impl SelfParamNode {
pub fn new(&self, ast: SelfParam) -> SelfParamNode {
pub fn new(ast: SelfParam) -> SelfParamNode {
let syntax = ast.syntax().owned();
SelfParamNode(syntax)
}
@ -2678,7 +2678,7 @@ impl<'a> SelfParam<'a> {}
pub struct SlicePatNode(SyntaxNode);
impl SlicePatNode {
pub fn new(&self, ast: SlicePat) -> SlicePatNode {
pub fn new(ast: SlicePat) -> SlicePatNode {
let syntax = ast.syntax().owned();
SlicePatNode(syntax)
}
@ -2709,7 +2709,7 @@ impl<'a> SlicePat<'a> {}
pub struct SliceTypeNode(SyntaxNode);
impl SliceTypeNode {
pub fn new(&self, ast: SliceType) -> SliceTypeNode {
pub fn new(ast: SliceType) -> SliceTypeNode {
let syntax = ast.syntax().owned();
SliceTypeNode(syntax)
}
@ -2740,7 +2740,7 @@ impl<'a> SliceType<'a> {}
pub struct StaticDefNode(SyntaxNode);
impl StaticDefNode {
pub fn new(&self, ast: StaticDef) -> StaticDefNode {
pub fn new(ast: StaticDef) -> StaticDefNode {
let syntax = ast.syntax().owned();
StaticDefNode(syntax)
}
@ -2774,7 +2774,7 @@ impl<'a> StaticDef<'a> {}
pub struct StmtNode(SyntaxNode);
impl StmtNode {
pub fn new(&self, ast: Stmt) -> StmtNode {
pub fn new(ast: Stmt) -> StmtNode {
let syntax = ast.syntax().owned();
StmtNode(syntax)
}
@ -2812,7 +2812,7 @@ impl<'a> Stmt<'a> {}
pub struct StructDefNode(SyntaxNode);
impl StructDefNode {
pub fn new(&self, ast: StructDef) -> StructDefNode {
pub fn new(ast: StructDef) -> StructDefNode {
let syntax = ast.syntax().owned();
StructDefNode(syntax)
}
@ -2850,7 +2850,7 @@ impl<'a> StructDef<'a> {
pub struct StructLitNode(SyntaxNode);
impl StructLitNode {
pub fn new(&self, ast: StructLit) -> StructLitNode {
pub fn new(ast: StructLit) -> StructLitNode {
let syntax = ast.syntax().owned();
StructLitNode(syntax)
}
@ -2881,7 +2881,7 @@ impl<'a> StructLit<'a> {}
pub struct StructPatNode(SyntaxNode);
impl StructPatNode {
pub fn new(&self, ast: StructPat) -> StructPatNode {
pub fn new(ast: StructPat) -> StructPatNode {
let syntax = ast.syntax().owned();
StructPatNode(syntax)
}
@ -2912,7 +2912,7 @@ impl<'a> StructPat<'a> {}
pub struct TokenTreeNode(SyntaxNode);
impl TokenTreeNode {
pub fn new(&self, ast: TokenTree) -> TokenTreeNode {
pub fn new(ast: TokenTree) -> TokenTreeNode {
let syntax = ast.syntax().owned();
TokenTreeNode(syntax)
}
@ -2943,7 +2943,7 @@ impl<'a> TokenTree<'a> {}
pub struct TraitDefNode(SyntaxNode);
impl TraitDefNode {
pub fn new(&self, ast: TraitDef) -> TraitDefNode {
pub fn new(ast: TraitDef) -> TraitDefNode {
let syntax = ast.syntax().owned();
TraitDefNode(syntax)
}
@ -2976,7 +2976,7 @@ impl<'a> TraitDef<'a> {}
pub struct TryExprNode(SyntaxNode);
impl TryExprNode {
pub fn new(&self, ast: TryExpr) -> TryExprNode {
pub fn new(ast: TryExpr) -> TryExprNode {
let syntax = ast.syntax().owned();
TryExprNode(syntax)
}
@ -3007,7 +3007,7 @@ impl<'a> TryExpr<'a> {}
pub struct TupleExprNode(SyntaxNode);
impl TupleExprNode {
pub fn new(&self, ast: TupleExpr) -> TupleExprNode {
pub fn new(ast: TupleExpr) -> TupleExprNode {
let syntax = ast.syntax().owned();
TupleExprNode(syntax)
}
@ -3038,7 +3038,7 @@ impl<'a> TupleExpr<'a> {}
pub struct TuplePatNode(SyntaxNode);
impl TuplePatNode {
pub fn new(&self, ast: TuplePat) -> TuplePatNode {
pub fn new(ast: TuplePat) -> TuplePatNode {
let syntax = ast.syntax().owned();
TuplePatNode(syntax)
}
@ -3069,7 +3069,7 @@ impl<'a> TuplePat<'a> {}
pub struct TupleStructPatNode(SyntaxNode);
impl TupleStructPatNode {
pub fn new(&self, ast: TupleStructPat) -> TupleStructPatNode {
pub fn new(ast: TupleStructPat) -> TupleStructPatNode {
let syntax = ast.syntax().owned();
TupleStructPatNode(syntax)
}
@ -3100,7 +3100,7 @@ impl<'a> TupleStructPat<'a> {}
pub struct TupleTypeNode(SyntaxNode);
impl TupleTypeNode {
pub fn new(&self, ast: TupleType) -> TupleTypeNode {
pub fn new(ast: TupleType) -> TupleTypeNode {
let syntax = ast.syntax().owned();
TupleTypeNode(syntax)
}
@ -3131,7 +3131,7 @@ impl<'a> TupleType<'a> {}
pub struct TypeDefNode(SyntaxNode);
impl TypeDefNode {
pub fn new(&self, ast: TypeDef) -> TypeDefNode {
pub fn new(ast: TypeDef) -> TypeDefNode {
let syntax = ast.syntax().owned();
TypeDefNode(syntax)
}
@ -3165,7 +3165,7 @@ impl<'a> TypeDef<'a> {}
pub struct TypeParamNode(SyntaxNode);
impl TypeParamNode {
pub fn new(&self, ast: TypeParam) -> TypeParamNode {
pub fn new(ast: TypeParam) -> TypeParamNode {
let syntax = ast.syntax().owned();
TypeParamNode(syntax)
}
@ -3197,7 +3197,7 @@ impl<'a> TypeParam<'a> {}
pub struct TypeParamListNode(SyntaxNode);
impl TypeParamListNode {
pub fn new(&self, ast: TypeParamList) -> TypeParamListNode {
pub fn new(ast: TypeParamList) -> TypeParamListNode {
let syntax = ast.syntax().owned();
TypeParamListNode(syntax)
}
@ -3236,7 +3236,7 @@ impl<'a> TypeParamList<'a> {
pub struct TypeRefNode(SyntaxNode);
impl TypeRefNode {
pub fn new(&self, ast: TypeRef) -> TypeRefNode {
pub fn new(ast: TypeRef) -> TypeRefNode {
let syntax = ast.syntax().owned();
TypeRefNode(syntax)
}
@ -3307,7 +3307,7 @@ impl<'a> TypeRef<'a> {}
pub struct UseItemNode(SyntaxNode);
impl UseItemNode {
pub fn new(&self, ast: UseItem) -> UseItemNode {
pub fn new(ast: UseItem) -> UseItemNode {
let syntax = ast.syntax().owned();
UseItemNode(syntax)
}
@ -3342,7 +3342,7 @@ impl<'a> UseItem<'a> {
pub struct UseTreeNode(SyntaxNode);
impl UseTreeNode {
pub fn new(&self, ast: UseTree) -> UseTreeNode {
pub fn new(ast: UseTree) -> UseTreeNode {
let syntax = ast.syntax().owned();
UseTreeNode(syntax)
}
@ -3381,7 +3381,7 @@ impl<'a> UseTree<'a> {
pub struct UseTreeListNode(SyntaxNode);
impl UseTreeListNode {
pub fn new(&self, ast: UseTreeList) -> UseTreeListNode {
pub fn new(ast: UseTreeList) -> UseTreeListNode {
let syntax = ast.syntax().owned();
UseTreeListNode(syntax)
}
@ -3416,7 +3416,7 @@ impl<'a> UseTreeList<'a> {
pub struct WhereClauseNode(SyntaxNode);
impl WhereClauseNode {
pub fn new(&self, ast: WhereClause) -> WhereClauseNode {
pub fn new(ast: WhereClause) -> WhereClauseNode {
let syntax = ast.syntax().owned();
WhereClauseNode(syntax)
}
@ -3447,7 +3447,7 @@ impl<'a> WhereClause<'a> {}
pub struct WhileExprNode(SyntaxNode);
impl WhileExprNode {
pub fn new(&self, ast: WhileExpr) -> WhileExprNode {
pub fn new(ast: WhileExpr) -> WhileExprNode {
let syntax = ast.syntax().owned();
WhileExprNode(syntax)
}
@ -3483,7 +3483,7 @@ impl<'a> WhileExpr<'a> {
pub struct WhitespaceNode(SyntaxNode);
impl WhitespaceNode {
pub fn new(&self, ast: Whitespace) -> WhitespaceNode {
pub fn new(ast: Whitespace) -> WhitespaceNode {
let syntax = ast.syntax().owned();
WhitespaceNode(syntax)
}

View File

@ -17,7 +17,7 @@ use crate::{
pub struct {{ node }}Node(SyntaxNode);
impl {{ node }}Node {
pub fn new(&self, ast: {{ node }}) -> {{ node }}Node {
pub fn new(ast: {{ node }}) -> {{ node }}Node {
let syntax = ast.syntax().owned();
{{ node }}Node(syntax)
}