Simplify
This commit is contained in:
parent
ed73460971
commit
4450365ec8
@ -105,31 +105,24 @@ fn impls_for_trait_item(
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use ide_db::base_db::FileRange;
|
use ide_db::base_db::FileRange;
|
||||||
|
use itertools::Itertools;
|
||||||
|
|
||||||
use crate::fixture;
|
use crate::fixture;
|
||||||
|
|
||||||
fn check(ra_fixture: &str) {
|
fn check(ra_fixture: &str) {
|
||||||
let (analysis, position, annotations) = fixture::annotations(ra_fixture);
|
let (analysis, position, expected) = fixture::annotations(ra_fixture);
|
||||||
|
|
||||||
let navs = analysis.goto_implementation(position).unwrap().unwrap().info;
|
let navs = analysis.goto_implementation(position).unwrap().unwrap().info;
|
||||||
|
|
||||||
let key = |frange: &FileRange| (frange.file_id, frange.range.start());
|
let cmp = |frange: &FileRange| (frange.file_id, frange.range.start());
|
||||||
|
|
||||||
let mut expected = annotations
|
let actual = navs
|
||||||
.into_iter()
|
|
||||||
.map(|(range, data)| {
|
|
||||||
assert!(data.is_empty());
|
|
||||||
range
|
|
||||||
})
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
expected.sort_by_key(key);
|
|
||||||
|
|
||||||
let mut actual = navs
|
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() })
|
.map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() })
|
||||||
|
.sorted_by_key(cmp)
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
actual.sort_by_key(key);
|
let expected =
|
||||||
|
expected.into_iter().map(|(range, _)| range).sorted_by_key(cmp).collect::<Vec<_>>();
|
||||||
assert_eq!(expected, actual);
|
assert_eq!(expected, actual);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
//! rust-analyzer is lazy and doesn't not compute anything unless asked. This
|
//! rust-analyzer is lazy and doesn't compute anything unless asked. This
|
||||||
//! sometimes is counter productive when, for example, the first goto definition
|
//! sometimes is counter productive when, for example, the first goto definition
|
||||||
//! request takes longer to compute. This modules implemented prepopulating of
|
//! request takes longer to compute. This modules implemented prepopulation of
|
||||||
//! various caches, it's not really advanced at the moment.
|
//! various caches, it's not really advanced at the moment.
|
||||||
|
|
||||||
use hir::db::DefDatabase;
|
use hir::db::DefDatabase;
|
||||||
@ -27,7 +27,7 @@ pub(crate) fn prime_caches(db: &RootDatabase, cb: &(dyn Fn(PrimeCachesProgress)
|
|||||||
let topo = &graph.crates_in_topological_order();
|
let topo = &graph.crates_in_topological_order();
|
||||||
|
|
||||||
cb(PrimeCachesProgress::Started);
|
cb(PrimeCachesProgress::Started);
|
||||||
// Take care to emit the finish signal even when the computation is canceled.
|
// Take care to emit the finish signal even when the computation is canceled.
|
||||||
let _d = stdx::defer(|| cb(PrimeCachesProgress::Finished));
|
let _d = stdx::defer(|| cb(PrimeCachesProgress::Finished));
|
||||||
|
|
||||||
// FIXME: This would be easy to parallelize, since it's in the ideal ordering for that.
|
// FIXME: This would be easy to parallelize, since it's in the ideal ordering for that.
|
||||||
|
@ -13,7 +13,7 @@
|
|||||||
use ide_db::{
|
use ide_db::{
|
||||||
base_db::FileId,
|
base_db::FileId,
|
||||||
defs::{Definition, NameClass, NameRefClass},
|
defs::{Definition, NameClass, NameRefClass},
|
||||||
search::{ReferenceAccess, SearchScope},
|
search::{ReferenceAccess, SearchScope, UsageSearchResult},
|
||||||
RootDatabase,
|
RootDatabase,
|
||||||
};
|
};
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
@ -56,48 +56,20 @@ pub(crate) fn find_all_refs(
|
|||||||
let _p = profile::span("find_all_refs");
|
let _p = profile::span("find_all_refs");
|
||||||
let syntax = sema.parse(position.file_id).syntax().clone();
|
let syntax = sema.parse(position.file_id).syntax().clone();
|
||||||
|
|
||||||
let (def, is_literal_search) =
|
let mut is_literal_search = false;
|
||||||
if let Some(name) = get_name_of_item_declaration(&syntax, position) {
|
let def = if let Some(name) = name_for_constructor_search(&syntax, position) {
|
||||||
(
|
is_literal_search = true;
|
||||||
match NameClass::classify(sema, &name)? {
|
match NameClass::classify(sema, &name)? {
|
||||||
NameClass::Definition(it) | NameClass::ConstReference(it) => it,
|
NameClass::Definition(it) | NameClass::ConstReference(it) => it,
|
||||||
NameClass::PatFieldShorthand { local_def: _, field_ref } => {
|
NameClass::PatFieldShorthand { local_def: _, field_ref } => {
|
||||||
Definition::Field(field_ref)
|
Definition::Field(field_ref)
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
true,
|
} else {
|
||||||
)
|
find_def(sema, &syntax, position.offset)?
|
||||||
} else {
|
};
|
||||||
(find_def(sema, &syntax, position.offset)?, false)
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut usages = def.usages(sema).set_scope(search_scope).include_self_refs().all();
|
let mut usages = def.usages(sema).set_scope(search_scope).include_self_refs().all();
|
||||||
if is_literal_search {
|
|
||||||
// filter for constructor-literals
|
|
||||||
let refs = usages.references.values_mut();
|
|
||||||
match def {
|
|
||||||
Definition::ModuleDef(hir::ModuleDef::Adt(hir::Adt::Enum(enum_))) => {
|
|
||||||
refs.for_each(|it| {
|
|
||||||
it.retain(|reference| {
|
|
||||||
reference
|
|
||||||
.name
|
|
||||||
.as_name_ref()
|
|
||||||
.map_or(false, |name_ref| is_enum_lit_name_ref(sema, enum_, name_ref))
|
|
||||||
})
|
|
||||||
});
|
|
||||||
usages.references.retain(|_, it| !it.is_empty());
|
|
||||||
}
|
|
||||||
Definition::ModuleDef(hir::ModuleDef::Adt(_) | hir::ModuleDef::Variant(_)) => {
|
|
||||||
refs.for_each(|it| {
|
|
||||||
it.retain(|reference| {
|
|
||||||
reference.name.as_name_ref().map_or(false, is_lit_name_ref)
|
|
||||||
})
|
|
||||||
});
|
|
||||||
usages.references.retain(|_, it| !it.is_empty());
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let declaration = match def {
|
let declaration = match def {
|
||||||
Definition::ModuleDef(hir::ModuleDef::Module(module)) => {
|
Definition::ModuleDef(hir::ModuleDef::Module(module)) => {
|
||||||
Some(NavigationTarget::from_module_to_decl(sema.db, module))
|
Some(NavigationTarget::from_module_to_decl(sema.db, module))
|
||||||
@ -108,6 +80,10 @@ pub(crate) fn find_all_refs(
|
|||||||
let decl_range = nav.focus_or_full_range();
|
let decl_range = nav.focus_or_full_range();
|
||||||
Declaration { nav, access: decl_access(&def, &syntax, decl_range) }
|
Declaration { nav, access: decl_access(&def, &syntax, decl_range) }
|
||||||
});
|
});
|
||||||
|
if is_literal_search {
|
||||||
|
retain_adt_literal_usages(&mut usages, def, sema);
|
||||||
|
}
|
||||||
|
|
||||||
let references = usages
|
let references = usages
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(file_id, refs)| {
|
.map(|(file_id, refs)| {
|
||||||
@ -174,7 +150,37 @@ pub(crate) fn decl_access(
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_name_of_item_declaration(syntax: &SyntaxNode, position: FilePosition) -> Option<ast::Name> {
|
/// Filter out all non-literal usages for adt-defs
|
||||||
|
fn retain_adt_literal_usages(
|
||||||
|
usages: &mut UsageSearchResult,
|
||||||
|
def: Definition,
|
||||||
|
sema: &Semantics<RootDatabase>,
|
||||||
|
) {
|
||||||
|
let refs = usages.references.values_mut();
|
||||||
|
match def {
|
||||||
|
Definition::ModuleDef(hir::ModuleDef::Adt(hir::Adt::Enum(enum_))) => {
|
||||||
|
refs.for_each(|it| {
|
||||||
|
it.retain(|reference| {
|
||||||
|
reference
|
||||||
|
.name
|
||||||
|
.as_name_ref()
|
||||||
|
.map_or(false, |name_ref| is_enum_lit_name_ref(sema, enum_, name_ref))
|
||||||
|
})
|
||||||
|
});
|
||||||
|
usages.references.retain(|_, it| !it.is_empty());
|
||||||
|
}
|
||||||
|
Definition::ModuleDef(hir::ModuleDef::Adt(_) | hir::ModuleDef::Variant(_)) => {
|
||||||
|
refs.for_each(|it| {
|
||||||
|
it.retain(|reference| reference.name.as_name_ref().map_or(false, is_lit_name_ref))
|
||||||
|
});
|
||||||
|
usages.references.retain(|_, it| !it.is_empty());
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns `Some` if the cursor is at a position for an item to search for all its constructor/literal usages
|
||||||
|
fn name_for_constructor_search(syntax: &SyntaxNode, position: FilePosition) -> Option<ast::Name> {
|
||||||
let token = syntax.token_at_offset(position.offset).right_biased()?;
|
let token = syntax.token_at_offset(position.offset).right_biased()?;
|
||||||
let token_parent = token.parent()?;
|
let token_parent = token.parent()?;
|
||||||
let kind = token.kind();
|
let kind = token.kind();
|
||||||
|
@ -17,7 +17,7 @@
|
|||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, HasFormatSpecifier},
|
ast::{self, HasFormatSpecifier},
|
||||||
AstNode, AstToken, Direction, NodeOrToken,
|
match_ast, AstNode, AstToken, Direction, NodeOrToken,
|
||||||
SyntaxKind::*,
|
SyntaxKind::*,
|
||||||
SyntaxNode, TextRange, WalkEvent, T,
|
SyntaxNode, TextRange, WalkEvent, T,
|
||||||
};
|
};
|
||||||
@ -159,15 +159,16 @@ pub(crate) fn highlight(
|
|||||||
// Determine the root based on the given range.
|
// Determine the root based on the given range.
|
||||||
let (root, range_to_highlight) = {
|
let (root, range_to_highlight) = {
|
||||||
let source_file = sema.parse(file_id);
|
let source_file = sema.parse(file_id);
|
||||||
|
let source_file = source_file.syntax();
|
||||||
match range_to_highlight {
|
match range_to_highlight {
|
||||||
Some(range) => {
|
Some(range) => {
|
||||||
let node = match source_file.syntax().covering_element(range) {
|
let node = match source_file.covering_element(range) {
|
||||||
NodeOrToken::Node(it) => it,
|
NodeOrToken::Node(it) => it,
|
||||||
NodeOrToken::Token(it) => it.parent().unwrap(),
|
NodeOrToken::Token(it) => it.parent().unwrap_or_else(|| source_file.clone()),
|
||||||
};
|
};
|
||||||
(node, range)
|
(node, range)
|
||||||
}
|
}
|
||||||
None => (source_file.syntax().clone(), source_file.syntax().text_range()),
|
None => (source_file.clone(), source_file.text_range()),
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -211,62 +212,61 @@ fn traverse(
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Track "inside macro" state
|
match event.clone() {
|
||||||
match event.clone().map(|it| it.into_node().and_then(ast::MacroCall::cast)) {
|
WalkEvent::Enter(NodeOrToken::Node(node)) => {
|
||||||
WalkEvent::Enter(Some(mc)) => {
|
match_ast! {
|
||||||
if let Some(range) = macro_call_range(&mc) {
|
match node {
|
||||||
hl.add(HlRange {
|
ast::MacroCall(mcall) => {
|
||||||
range,
|
if let Some(range) = macro_call_range(&mcall) {
|
||||||
highlight: HlTag::Symbol(SymbolKind::Macro).into(),
|
hl.add(HlRange {
|
||||||
binding_hash: None,
|
range,
|
||||||
});
|
highlight: HlTag::Symbol(SymbolKind::Macro).into(),
|
||||||
}
|
binding_hash: None,
|
||||||
current_macro_call = Some(mc.clone());
|
});
|
||||||
continue;
|
}
|
||||||
}
|
current_macro_call = Some(mcall);
|
||||||
WalkEvent::Leave(Some(mc)) => {
|
continue;
|
||||||
assert_eq!(current_macro_call, Some(mc));
|
},
|
||||||
current_macro_call = None;
|
ast::Macro(mac) => {
|
||||||
}
|
macro_highlighter.init();
|
||||||
_ => (),
|
current_macro = Some(mac);
|
||||||
}
|
continue;
|
||||||
match event.clone().map(|it| it.into_node().and_then(ast::Item::cast)) {
|
},
|
||||||
WalkEvent::Enter(Some(item)) => {
|
ast::Item(item) => {
|
||||||
if sema.is_attr_macro_call(&item) {
|
if sema.is_attr_macro_call(&item) {
|
||||||
current_attr_macro_call = Some(item);
|
current_attr_macro_call = Some(item);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
ast::Attr(__) => inside_attribute = true,
|
||||||
|
_ => ()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
WalkEvent::Leave(Some(item)) => {
|
WalkEvent::Leave(NodeOrToken::Node(node)) => {
|
||||||
if current_attr_macro_call == Some(item) {
|
match_ast! {
|
||||||
current_attr_macro_call = None;
|
match node {
|
||||||
|
ast::MacroCall(mcall) => {
|
||||||
|
assert_eq!(current_macro_call, Some(mcall));
|
||||||
|
current_macro_call = None;
|
||||||
|
},
|
||||||
|
ast::Macro(mac) => {
|
||||||
|
assert_eq!(current_macro, Some(mac));
|
||||||
|
current_macro = None;
|
||||||
|
macro_highlighter = MacroHighlighter::default();
|
||||||
|
},
|
||||||
|
ast::Item(item) => {
|
||||||
|
if current_attr_macro_call == Some(item) {
|
||||||
|
current_attr_macro_call = None;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
ast::Attr(__) => inside_attribute = false,
|
||||||
|
_ => ()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
match event.clone().map(|it| it.into_node().and_then(ast::Macro::cast)) {
|
|
||||||
WalkEvent::Enter(Some(mac)) => {
|
|
||||||
macro_highlighter.init();
|
|
||||||
current_macro = Some(mac);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
WalkEvent::Leave(Some(mac)) => {
|
|
||||||
assert_eq!(current_macro, Some(mac));
|
|
||||||
current_macro = None;
|
|
||||||
macro_highlighter = MacroHighlighter::default();
|
|
||||||
}
|
|
||||||
_ => (),
|
|
||||||
}
|
|
||||||
match &event {
|
|
||||||
WalkEvent::Enter(NodeOrToken::Node(node)) if ast::Attr::can_cast(node.kind()) => {
|
|
||||||
inside_attribute = true
|
|
||||||
}
|
|
||||||
WalkEvent::Leave(NodeOrToken::Node(node)) if ast::Attr::can_cast(node.kind()) => {
|
|
||||||
inside_attribute = false
|
|
||||||
}
|
|
||||||
_ => (),
|
|
||||||
}
|
|
||||||
|
|
||||||
let element = match event {
|
let element = match event {
|
||||||
WalkEvent::Enter(it) => it,
|
WalkEvent::Enter(it) => it,
|
||||||
WalkEvent::Leave(it) => {
|
WalkEvent::Leave(it) => {
|
||||||
|
Loading…
Reference in New Issue
Block a user