Merge pull request #18409 from Veykril/veykril/push-rkrkpvzvumvx
Only construct a resolver in macro descension when needed
This commit is contained in:
commit
cf5ab635ab
@ -1043,12 +1043,12 @@ impl HasResolver for ModuleId {
|
|||||||
fn resolver(self, db: &dyn DefDatabase) -> Resolver {
|
fn resolver(self, db: &dyn DefDatabase) -> Resolver {
|
||||||
let mut def_map = self.def_map(db);
|
let mut def_map = self.def_map(db);
|
||||||
let mut module_id = self.local_id;
|
let mut module_id = self.local_id;
|
||||||
let mut modules: SmallVec<[_; 1]> = smallvec![];
|
|
||||||
|
|
||||||
if !self.is_block_module() {
|
if !self.is_block_module() {
|
||||||
return Resolver { scopes: vec![], module_scope: ModuleItemMap { def_map, module_id } };
|
return Resolver { scopes: vec![], module_scope: ModuleItemMap { def_map, module_id } };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let mut modules: SmallVec<[_; 1]> = smallvec![];
|
||||||
while let Some(parent) = def_map.parent() {
|
while let Some(parent) = def_map.parent() {
|
||||||
let block_def_map = mem::replace(&mut def_map, parent.def_map(db));
|
let block_def_map = mem::replace(&mut def_map, parent.def_map(db));
|
||||||
modules.push(block_def_map);
|
modules.push(block_def_map);
|
||||||
|
@ -936,16 +936,7 @@ fn descend_into_macros_impl<T>(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let (file_id, tokens) = stack.first()?;
|
|
||||||
// make sure we pick the token in the expanded include if we encountered an include,
|
|
||||||
// otherwise we'll get the wrong semantics
|
|
||||||
let sa =
|
|
||||||
tokens.first()?.0.parent().and_then(|parent| {
|
|
||||||
self.analyze_impl(InFile::new(*file_id, &parent), None, false)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let mut m_cache = self.macro_call_cache.borrow_mut();
|
let mut m_cache = self.macro_call_cache.borrow_mut();
|
||||||
let def_map = sa.resolver.def_map();
|
|
||||||
|
|
||||||
// Filters out all tokens that contain the given range (usually the macro call), any such
|
// Filters out all tokens that contain the given range (usually the macro call), any such
|
||||||
// token is redundant as the corresponding macro call has already been processed
|
// token is redundant as the corresponding macro call has already been processed
|
||||||
@ -1024,8 +1015,16 @@ fn descend_into_macros_impl<T>(
|
|||||||
) {
|
) {
|
||||||
call.as_macro_file()
|
call.as_macro_file()
|
||||||
} else {
|
} else {
|
||||||
// FIXME: This is wrong, the SourceAnalyzer might be invalid here
|
token
|
||||||
sa.expand(self.db, mcall.as_ref())?
|
.parent()
|
||||||
|
.and_then(|parent| {
|
||||||
|
self.analyze_impl(
|
||||||
|
InFile::new(expansion, &parent),
|
||||||
|
None,
|
||||||
|
false,
|
||||||
|
)
|
||||||
|
})?
|
||||||
|
.expand(self.db, mcall.as_ref())?
|
||||||
};
|
};
|
||||||
m_cache.insert(mcall, it);
|
m_cache.insert(mcall, it);
|
||||||
it
|
it
|
||||||
@ -1095,9 +1094,16 @@ fn descend_into_macros_impl<T>(
|
|||||||
attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
|
attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
|
||||||
// Not an attribute, nor a derive, so it's either an intert attribute or a derive helper
|
// Not an attribute, nor a derive, so it's either an intert attribute or a derive helper
|
||||||
// Try to resolve to a derive helper and downmap
|
// Try to resolve to a derive helper and downmap
|
||||||
|
let resolver = &token
|
||||||
|
.parent()
|
||||||
|
.and_then(|parent| {
|
||||||
|
self.analyze_impl(InFile::new(expansion, &parent), None, false)
|
||||||
|
})?
|
||||||
|
.resolver;
|
||||||
let id = self.db.ast_id_map(expansion).ast_id(&adt);
|
let id = self.db.ast_id_map(expansion).ast_id(&adt);
|
||||||
let helpers =
|
let helpers = resolver
|
||||||
def_map.derive_helpers_in_scope(InFile::new(expansion, id))?;
|
.def_map()
|
||||||
|
.derive_helpers_in_scope(InFile::new(expansion, id))?;
|
||||||
|
|
||||||
if !helpers.is_empty() {
|
if !helpers.is_empty() {
|
||||||
let text_range = attr.syntax().text_range();
|
let text_range = attr.syntax().text_range();
|
||||||
|
Loading…
Reference in New Issue
Block a user