This commit is contained in:
Lukas Wirth 2022-08-15 16:16:59 +02:00
parent f982c76161
commit 3f149a63d2
4 changed files with 15 additions and 15 deletions

View File

@ -15,7 +15,7 @@
use la_arena::{Arena, Idx}; use la_arena::{Arena, Idx};
use profile::Count; use profile::Count;
use rustc_hash::FxHasher; use rustc_hash::FxHasher;
use syntax::{ast, match_ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr}; use syntax::{ast, AstNode, AstPtr, SyntaxNode, SyntaxNodePtr};
/// `AstId` points to an AST node in a specific file. /// `AstId` points to an AST node in a specific file.
pub struct FileAstId<N: AstNode> { pub struct FileAstId<N: AstNode> {
@ -92,18 +92,12 @@ pub(crate) fn from_source(node: &SyntaxNode) -> AstIdMap {
// change parent's id. This means that, say, adding a new function to a // change parent's id. This means that, say, adding a new function to a
// trait does not change ids of top-level items, which helps caching. // trait does not change ids of top-level items, which helps caching.
bdfs(node, |it| { bdfs(node, |it| {
match_ast! { let kind = it.kind();
match it { if ast::Item::can_cast(kind) || ast::BlockExpr::can_cast(kind) {
ast::Item(module_item) => { res.alloc(&it);
res.alloc(module_item.syntax()); true
true } else {
}, false
ast::BlockExpr(block) => {
res.alloc(block.syntax());
true
},
_ => false,
}
} }
}); });
res.map = hashbrown::HashMap::with_capacity_and_hasher(res.arena.len(), ()); res.map = hashbrown::HashMap::with_capacity_and_hasher(res.arena.len(), ());
@ -123,6 +117,7 @@ pub fn ast_id<N: AstNode>(&self, item: &N) -> FileAstId<N> {
let raw = self.erased_ast_id(item.syntax()); let raw = self.erased_ast_id(item.syntax());
FileAstId { raw, _ty: PhantomData } FileAstId { raw, _ty: PhantomData }
} }
fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId { fn erased_ast_id(&self, item: &SyntaxNode) -> ErasedFileAstId {
let ptr = SyntaxNodePtr::new(item); let ptr = SyntaxNodePtr::new(item);
let hash = hash_ptr(&ptr); let hash = hash_ptr(&ptr);

View File

@ -321,7 +321,11 @@ fn censor_for_macro_input(loc: &MacroCallLoc, node: &SyntaxNode) -> FxHashSet<Sy
ast::Item::cast(node.clone())? ast::Item::cast(node.clone())?
.attrs() .attrs()
.take(derive_attr_index as usize + 1) .take(derive_attr_index as usize + 1)
// FIXME // FIXME, this resolution should not be done syntactically
// derive is a proper macro now, no longer builtin
// But we do not have resolution at this stage, this means
// we need to know about all macro calls for the given ast item here
// so we require some kind of mapping...
.filter(|attr| attr.simple_name().as_deref() == Some("derive")) .filter(|attr| attr.simple_name().as_deref() == Some("derive"))
.map(|it| it.syntax().clone()) .map(|it| it.syntax().clone())
.collect() .collect()

View File

@ -130,7 +130,6 @@ pub struct MacroDefId {
pub enum MacroDefKind { pub enum MacroDefKind {
Declarative(AstId<ast::Macro>), Declarative(AstId<ast::Macro>),
BuiltIn(BuiltinFnLikeExpander, AstId<ast::Macro>), BuiltIn(BuiltinFnLikeExpander, AstId<ast::Macro>),
// FIXME: maybe just Builtin and rename BuiltinFnLikeExpander to BuiltinExpander
BuiltInAttr(BuiltinAttrExpander, AstId<ast::Macro>), BuiltInAttr(BuiltinAttrExpander, AstId<ast::Macro>),
BuiltInDerive(BuiltinDeriveExpander, AstId<ast::Macro>), BuiltInDerive(BuiltinDeriveExpander, AstId<ast::Macro>),
BuiltInEager(EagerExpander, AstId<ast::Macro>), BuiltInEager(EagerExpander, AstId<ast::Macro>),

View File

@ -119,6 +119,8 @@ pub(crate) fn hover(
} }
let in_attr = matches!(original_token.parent().and_then(ast::TokenTree::cast), Some(tt) if tt.syntax().ancestors().any(|it| ast::Meta::can_cast(it.kind()))); let in_attr = matches!(original_token.parent().and_then(ast::TokenTree::cast), Some(tt) if tt.syntax().ancestors().any(|it| ast::Meta::can_cast(it.kind())));
// prefer descending the same token kind in attribute expansions, in normal macros text
// equivalency is more important
let descended = if in_attr { let descended = if in_attr {
[sema.descend_into_macros_with_kind_preference(original_token.clone())].into() [sema.descend_into_macros_with_kind_preference(original_token.clone())].into()
} else { } else {