//! See `Semantics`. mod source_to_def; use std::{ cell::RefCell, fmt, iter, mem, ops::{self, ControlFlow, Not}, }; use base_db::{FileId, FileRange}; use either::Either; use hir_def::{ hir::Expr, lower::LowerCtx, nameres::MacroSubNs, resolver::{self, HasResolver, Resolver, TypeNs}, type_ref::Mutability, AsMacroCall, DefWithBodyId, FunctionId, MacroId, TraitId, VariantId, }; use hir_expand::{ attrs::collect_attrs, db::ExpandDatabase, files::InRealFile, name::AsName, ExpansionInfo, InMacroFile, MacroCallId, MacroFileId, MacroFileIdExt, }; use itertools::Itertools; use rustc_hash::{FxHashMap, FxHashSet}; use smallvec::{smallvec, SmallVec}; use span::{Span, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}; use stdx::TupleExt; use syntax::{ algo::skip_trivia_token, ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody, IsString as _}, match_ast, AstNode, AstToken, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextRange, TextSize, }; use crate::{ db::HirDatabase, semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx}, source_analyzer::{resolve_hir_path, SourceAnalyzer}, Access, Adjust, Adjustment, AutoBorrow, BindingMode, BuiltinAttr, Callable, ConstParam, Crate, DeriveHelper, Field, Function, HasSource, HirFileId, Impl, InFile, Label, LifetimeParam, Local, Macro, Module, ModuleDef, Name, OverloadedDeref, Path, ScopeDef, Struct, ToolModule, Trait, TupleField, Type, TypeAlias, TypeParam, VariantDef, }; pub enum DescendPreference { SameText, SameKind, None, } #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum PathResolution { /// An item Def(ModuleDef), /// A local binding (only value namespace) Local(Local), /// A type parameter TypeParam(TypeParam), /// A const parameter ConstParam(ConstParam), SelfType(Impl), BuiltinAttr(BuiltinAttr), ToolModule(ToolModule), DeriveHelper(DeriveHelper), } impl PathResolution { pub(crate) fn in_type_ns(&self) -> Option { match self { PathResolution::Def(ModuleDef::Adt(adt)) => Some(TypeNs::AdtId((*adt).into())), PathResolution::Def(ModuleDef::BuiltinType(builtin)) => { Some(TypeNs::BuiltinType((*builtin).into())) } PathResolution::Def( ModuleDef::Const(_) | ModuleDef::Variant(_) | ModuleDef::Macro(_) | ModuleDef::Function(_) | ModuleDef::Module(_) | ModuleDef::Static(_) | ModuleDef::Trait(_) | ModuleDef::TraitAlias(_), ) => None, PathResolution::Def(ModuleDef::TypeAlias(alias)) => { Some(TypeNs::TypeAliasId((*alias).into())) } PathResolution::BuiltinAttr(_) | PathResolution::ToolModule(_) | PathResolution::Local(_) | PathResolution::DeriveHelper(_) | PathResolution::ConstParam(_) => None, PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())), PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())), } } } #[derive(Debug)] pub struct TypeInfo { /// The original type of the expression or pattern. pub original: Type, /// The adjusted type, if an adjustment happened. pub adjusted: Option, } impl TypeInfo { pub fn original(self) -> Type { self.original } pub fn has_adjustment(&self) -> bool { self.adjusted.is_some() } /// The adjusted type, or the original in case no adjustments occurred. pub fn adjusted(self) -> Type { self.adjusted.unwrap_or(self.original) } } /// Primary API to get semantic information, like types, from syntax trees. pub struct Semantics<'db, DB> { pub db: &'db DB, imp: SemanticsImpl<'db>, } pub struct SemanticsImpl<'db> { pub db: &'db dyn HirDatabase, s2d_cache: RefCell, /// Rootnode to HirFileId cache cache: RefCell>, // These 2 caches are mainly useful for semantic highlighting as nothing else descends a lot of tokens // So we might wanna move them out into something specific for semantic highlighting expansion_info_cache: RefCell>, /// MacroCall to its expansion's MacroFileId cache macro_call_cache: RefCell, MacroFileId>>, } impl fmt::Debug for Semantics<'_, DB> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "Semantics {{ ... }}") } } impl<'db, DB> ops::Deref for Semantics<'db, DB> { type Target = SemanticsImpl<'db>; fn deref(&self) -> &Self::Target { &self.imp } } impl<'db, DB: HirDatabase> Semantics<'db, DB> { pub fn new(db: &DB) -> Semantics<'_, DB> { let impl_ = SemanticsImpl::new(db); Semantics { db, imp: impl_ } } pub fn hir_file_for(&self, syntax_node: &SyntaxNode) -> HirFileId { self.imp.find_file(syntax_node).file_id } pub fn token_ancestors_with_macros( &self, token: SyntaxToken, ) -> impl Iterator + '_ { token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it)) } /// Find an AstNode by offset inside SyntaxNode, if it is inside *Macrofile*, /// search up until it is of the target AstNode type pub fn find_node_at_offset_with_macros( &self, node: &SyntaxNode, offset: TextSize, ) -> Option { self.imp.ancestors_at_offset_with_macros(node, offset).find_map(N::cast) } /// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*, /// descend it and find again pub fn find_node_at_offset_with_descend( &self, node: &SyntaxNode, offset: TextSize, ) -> Option { self.imp.descend_node_at_offset(node, offset).flatten().find_map(N::cast) } /// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*, /// descend it and find again pub fn find_nodes_at_offset_with_descend<'slf, N: AstNode + 'slf>( &'slf self, node: &SyntaxNode, offset: TextSize, ) -> impl Iterator + 'slf { self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast)) } pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option { self.imp.resolve_await_to_poll(await_expr).map(Function::from) } pub fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option { self.imp.resolve_prefix_expr(prefix_expr).map(Function::from) } pub fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option { self.imp.resolve_index_expr(index_expr).map(Function::from) } pub fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option { self.imp.resolve_bin_expr(bin_expr).map(Function::from) } pub fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option { self.imp.resolve_try_expr(try_expr).map(Function::from) } pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option { self.imp.resolve_variant(record_lit).map(VariantDef::from) } pub fn to_module_def(&self, file: FileId) -> Option { self.imp.to_module_def(file).next() } pub fn to_module_defs(&self, file: FileId) -> impl Iterator { self.imp.to_module_def(file) } pub fn to_struct_def(&self, s: &ast::Struct) -> Option { self.imp.to_def(s).map(Struct::from) } pub fn to_impl_def(&self, i: &ast::Impl) -> Option { self.imp.to_def(i).map(Impl::from) } } impl<'db> SemanticsImpl<'db> { fn new(db: &'db dyn HirDatabase) -> Self { SemanticsImpl { db, s2d_cache: Default::default(), cache: Default::default(), expansion_info_cache: Default::default(), macro_call_cache: Default::default(), } } pub fn parse(&self, file_id: FileId) -> ast::SourceFile { let tree = self.db.parse(file_id).tree(); self.cache(tree.syntax().clone(), file_id.into()); tree } pub fn parse_or_expand(&self, file_id: HirFileId) -> SyntaxNode { let node = self.db.parse_or_expand(file_id); self.cache(node.clone(), file_id); node } pub fn expand(&self, macro_call: &ast::MacroCall) -> Option { let sa = self.analyze_no_infer(macro_call.syntax())?; let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?; let node = self.parse_or_expand(file_id.into()); Some(node) } /// If `item` has an attribute macro attached to it, expands it. pub fn expand_attr_macro(&self, item: &ast::Item) -> Option { let src = self.wrap_node_infile(item.clone()); let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src))?; Some(self.parse_or_expand(macro_call_id.as_file())) } pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option { let adt = attr.syntax().parent().and_then(ast::Adt::cast)?; let src = self.wrap_node_infile(attr.clone()); let call_id = self.with_ctx(|ctx| { ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it) })?; Some(self.parse_or_expand(call_id.as_file())) } pub fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option>> { let calls = self.derive_macro_calls(attr)?; self.with_ctx(|ctx| { Some( calls .into_iter() .map(|call| { macro_call_to_macro_id(ctx, self.db.upcast(), call?).map(|id| Macro { id }) }) .collect(), ) }) } pub fn expand_derive_macro(&self, attr: &ast::Attr) -> Option> { let res: Vec<_> = self .derive_macro_calls(attr)? .into_iter() .flat_map(|call| { let file_id = call?.as_file(); let node = self.db.parse_or_expand(file_id); self.cache(node.clone(), file_id); Some(node) }) .collect(); Some(res) } fn derive_macro_calls(&self, attr: &ast::Attr) -> Option>> { let adt = attr.syntax().parent().and_then(ast::Adt::cast)?; let file_id = self.find_file(adt.syntax()).file_id; let adt = InFile::new(file_id, &adt); let src = InFile::new(file_id, attr.clone()); self.with_ctx(|ctx| { let (.., res) = ctx.attr_to_derive_macro_call(adt, src)?; Some(res.to_vec()) }) } pub fn is_derive_annotated(&self, adt: &ast::Adt) -> bool { let file_id = self.find_file(adt.syntax()).file_id; let adt = InFile::new(file_id, adt); self.with_ctx(|ctx| ctx.has_derives(adt)) } pub fn is_attr_macro_call(&self, item: &ast::Item) -> bool { let file_id = self.find_file(item.syntax()).file_id; let src = InFile::new(file_id, item.clone()); self.with_ctx(|ctx| ctx.item_to_macro_call(src).is_some()) } /// Expand the macro call with a different token tree, mapping the `token_to_map` down into the /// expansion. `token_to_map` should be a token from the `speculative args` node. pub fn speculative_expand( &self, actual_macro_call: &ast::MacroCall, speculative_args: &ast::TokenTree, token_to_map: SyntaxToken, ) -> Option<(SyntaxNode, SyntaxToken)> { let SourceAnalyzer { file_id, resolver, .. } = self.analyze_no_infer(actual_macro_call.syntax())?; let macro_call = InFile::new(file_id, actual_macro_call); let krate = resolver.krate(); let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| { resolver.resolve_path_as_macro_def(self.db.upcast(), &path, Some(MacroSubNs::Bang)) })?; hir_expand::db::expand_speculative( self.db.upcast(), macro_call_id, speculative_args.syntax(), token_to_map, ) } /// Expand the macro call with a different item as the input, mapping the `token_to_map` down into the /// expansion. `token_to_map` should be a token from the `speculative args` node. pub fn speculative_expand_attr_macro( &self, actual_macro_call: &ast::Item, speculative_args: &ast::Item, token_to_map: SyntaxToken, ) -> Option<(SyntaxNode, SyntaxToken)> { let macro_call = self.wrap_node_infile(actual_macro_call.clone()); let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call))?; hir_expand::db::expand_speculative( self.db.upcast(), macro_call_id, speculative_args.syntax(), token_to_map, ) } pub fn speculative_expand_derive_as_pseudo_attr_macro( &self, actual_macro_call: &ast::Attr, speculative_args: &ast::Attr, token_to_map: SyntaxToken, ) -> Option<(SyntaxNode, SyntaxToken)> { let attr = self.wrap_node_infile(actual_macro_call.clone()); let adt = actual_macro_call.syntax().parent().and_then(ast::Adt::cast)?; let macro_call_id = self.with_ctx(|ctx| { ctx.attr_to_derive_macro_call(attr.with_value(&adt), attr).map(|(_, it, _)| it) })?; hir_expand::db::expand_speculative( self.db.upcast(), macro_call_id, speculative_args.syntax(), token_to_map, ) } pub fn as_format_args_parts( &self, string: &ast::String, ) -> Option)>> { if let Some(quote) = string.open_quote_text_range() { return self .descend_into_macros(DescendPreference::SameText, string.syntax().clone()) .into_iter() .find_map(|token| { let string = ast::String::cast(token)?; let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?; let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?; let source_analyzer = self.analyze_no_infer(format_args.syntax())?; let format_args = self.wrap_node_infile(format_args); let res = source_analyzer .as_format_args_parts(self.db, format_args.as_ref())? .map(|(range, res)| (range + quote.end(), res)) .collect(); Some(res) }); } None } pub fn check_for_format_args_template( &self, original_token: SyntaxToken, offset: TextSize, ) -> Option<(TextRange, Option)> { if let Some(original_string) = ast::String::cast(original_token.clone()) { if let Some(quote) = original_string.open_quote_text_range() { return self .descend_into_macros(DescendPreference::SameText, original_token) .into_iter() .find_map(|token| { self.resolve_offset_in_format_args( ast::String::cast(token)?, offset.checked_sub(quote.end())?, ) }) .map(|(range, res)| (range + quote.end(), res)); } } None } fn resolve_offset_in_format_args( &self, string: ast::String, offset: TextSize, ) -> Option<(TextRange, Option)> { debug_assert!(offset <= string.syntax().text_range().len()); let literal = string.syntax().parent().filter(|it| it.kind() == SyntaxKind::LITERAL)?; let format_args = ast::FormatArgsExpr::cast(literal.parent()?)?; let source_analyzer = &self.analyze_no_infer(format_args.syntax())?; let format_args = self.wrap_node_infile(format_args); source_analyzer.resolve_offset_in_format_args(self.db, format_args.as_ref(), offset) } /// Maps a node down by mapping its first and last token down. pub fn descend_node_into_attributes(&self, node: N) -> SmallVec<[N; 1]> { // This might not be the correct way to do this, but it works for now let mut res = smallvec![]; let tokens = (|| { // FIXME: the trivia skipping should not be necessary let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?; let last = skip_trivia_token(node.syntax().last_token()?, Direction::Prev)?; Some((first, last)) })(); let (first, last) = match tokens { Some(it) => it, None => return res, }; if first == last { // node is just the token, so descend the token self.descend_into_macros_impl(first, &mut |InFile { value, .. }| { if let Some(node) = value .parent_ancestors() .take_while(|it| it.text_range() == value.text_range()) .find_map(N::cast) { res.push(node) } ControlFlow::Continue(()) }); } else { // Descend first and last token, then zip them to look for the node they belong to let mut scratch: SmallVec<[_; 1]> = smallvec![]; self.descend_into_macros_impl(first, &mut |token| { scratch.push(token); ControlFlow::Continue(()) }); let mut scratch = scratch.into_iter(); self.descend_into_macros_impl( last, &mut |InFile { value: last, file_id: last_fid }| { if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() { if first_fid == last_fid { if let Some(p) = first.parent() { let range = first.text_range().cover(last.text_range()); let node = find_root(&p) .covering_element(range) .ancestors() .take_while(|it| it.text_range() == range) .find_map(N::cast); if let Some(node) = node { res.push(node); } } } } ControlFlow::Continue(()) }, ); } res } /// Descend the token into its macro call if it is part of one, returning the tokens in the /// expansion that it is associated with. pub fn descend_into_macros( &self, mode: DescendPreference, token: SyntaxToken, ) -> SmallVec<[SyntaxToken; 1]> { enum Dp<'t> { SameText(&'t str), SameKind(SyntaxKind), None, } let fetch_kind = |token: &SyntaxToken| match token.parent() { Some(node) => match node.kind() { kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => kind, _ => token.kind(), }, None => token.kind(), }; let mode = match mode { DescendPreference::SameText => Dp::SameText(token.text()), DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)), DescendPreference::None => Dp::None, }; let mut res = smallvec![]; self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| { let is_a_match = match mode { Dp::SameText(text) => value.text() == text, Dp::SameKind(preferred_kind) => { let kind = fetch_kind(&value); kind == preferred_kind // special case for derive macros || (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF) } Dp::None => true, }; if is_a_match { res.push(value); } ControlFlow::Continue(()) }); if res.is_empty() { res.push(token); } res } pub fn descend_into_macros_single( &self, mode: DescendPreference, token: SyntaxToken, ) -> SyntaxToken { enum Dp<'t> { SameText(&'t str), SameKind(SyntaxKind), None, } let fetch_kind = |token: &SyntaxToken| match token.parent() { Some(node) => match node.kind() { kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => kind, _ => token.kind(), }, None => token.kind(), }; let mode = match mode { DescendPreference::SameText => Dp::SameText(token.text()), DescendPreference::SameKind => Dp::SameKind(fetch_kind(&token)), DescendPreference::None => Dp::None, }; let mut res = token.clone(); self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| { let is_a_match = match mode { Dp::SameText(text) => value.text() == text, Dp::SameKind(preferred_kind) => { let kind = fetch_kind(&value); kind == preferred_kind // special case for derive macros || (preferred_kind == SyntaxKind::IDENT && kind == SyntaxKind::NAME_REF) } Dp::None => true, }; res = value; if is_a_match { ControlFlow::Break(()) } else { ControlFlow::Continue(()) } }); res } // return: // SourceAnalyzer(file_id that original call include!) // macro file id // token in include! macro mapped from token in params // span for the mapped token fn is_from_include_file( &self, token: SyntaxToken, ) -> Option<(SourceAnalyzer, HirFileId, SyntaxToken, Span)> { let parent = token.parent()?; let file_id = self.find_file(&parent).file_id.file_id()?; let mut cache = self.expansion_info_cache.borrow_mut(); // iterate related crates and find all include! invocations that include_file_id matches for (invoc, _) in self .db .relevant_crates(file_id) .iter() .flat_map(|krate| self.db.include_macro_invoc(*krate)) .filter(|&(_, include_file_id)| include_file_id == file_id) { let macro_file = invoc.as_macro_file(); let expansion_info = cache .entry(macro_file) .or_insert_with(|| macro_file.expansion_info(self.db.upcast())); // Create the source analyzer for the macro call scope let Some(sa) = self.analyze_no_infer(&self.parse_or_expand(expansion_info.call_file())) else { continue; }; { let InMacroFile { file_id: macro_file, value } = expansion_info.expanded(); self.cache(value, macro_file.into()); } // get mapped token in the include! macro file let span = span::SpanData { range: token.text_range(), anchor: span::SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID }, ctx: SyntaxContextId::ROOT, }; let Some(InMacroFile { file_id, value: mut mapped_tokens }) = expansion_info.map_range_down(span) else { continue; }; // if we find one, then return if let Some(t) = mapped_tokens.next() { return Some((sa, file_id.into(), t, span)); } } None } fn descend_into_macros_impl( &self, mut token: SyntaxToken, f: &mut dyn FnMut(InFile) -> ControlFlow<()>, ) { let _p = tracing::span!(tracing::Level::INFO, "descend_into_macros"); let (sa, span, file_id) = match token.parent().and_then(|parent| self.analyze_no_infer(&parent)) { Some(sa) => match sa.file_id.file_id() { Some(file_id) => ( sa, self.db.real_span_map(file_id).span_for_range(token.text_range()), file_id.into(), ), None => { stdx::never!(); return; } }, None => { // if we cannot find a source analyzer for this token, then we try to find out // whether this file is an included file and treat that as the include input let Some((it, macro_file_id, mapped_token, s)) = self.is_from_include_file(token) else { return; }; token = mapped_token; (it, s, macro_file_id) } }; let mut cache = self.expansion_info_cache.borrow_mut(); let mut mcache = self.macro_call_cache.borrow_mut(); let def_map = sa.resolver.def_map(); let mut stack: Vec<(_, SmallVec<[_; 2]>)> = vec![(file_id, smallvec![token])]; let mut process_expansion_for_token = |stack: &mut Vec<_>, macro_file| { let expansion_info = cache .entry(macro_file) .or_insert_with(|| macro_file.expansion_info(self.db.upcast())); { let InMacroFile { file_id, value } = expansion_info.expanded(); self.cache(value, file_id.into()); } let InMacroFile { file_id, value: mapped_tokens } = expansion_info.map_range_down(span)?; let mapped_tokens: SmallVec<[_; 2]> = mapped_tokens.collect(); // if the length changed we have found a mapping for the token let res = mapped_tokens.is_empty().not().then_some(()); // requeue the tokens we got from mapping our current token down stack.push((HirFileId::from(file_id), mapped_tokens)); res }; while let Some((file_id, mut tokens)) = stack.pop() { while let Some(token) = tokens.pop() { let was_not_remapped = (|| { // First expand into attribute invocations let containing_attribute_macro_call = self.with_ctx(|ctx| { token.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| { // Don't force populate the dyn cache for items that don't have an attribute anyways item.attrs().next()?; Some(( ctx.item_to_macro_call(InFile::new(file_id, item.clone()))?, item, )) }) }); if let Some((call_id, item)) = containing_attribute_macro_call { let file_id = call_id.as_macro_file(); let attr_id = match self.db.lookup_intern_macro_call(call_id).kind { hir_expand::MacroCallKind::Attr { invoc_attr_index, .. } => { invoc_attr_index.ast_index() } _ => 0, }; // FIXME: here, the attribute's text range is used to strip away all // entries from the start of the attribute "list" up the invoking // attribute. But in // ``` // mod foo { // #![inner] // } // ``` // we don't wanna strip away stuff in the `mod foo {` range, that is // here if the id corresponds to an inner attribute we got strip all // text ranges of the outer ones, and then all of the inner ones up // to the invoking attribute so that the inbetween is ignored. let text_range = item.syntax().text_range(); let start = collect_attrs(&item) .nth(attr_id) .map(|attr| match attr.1 { Either::Left(it) => it.syntax().text_range().start(), Either::Right(it) => it.syntax().text_range().start(), }) .unwrap_or_else(|| text_range.start()); let text_range = TextRange::new(start, text_range.end()); // remove any other token in this macro input, all their mappings are the // same as this one tokens.retain(|t| !text_range.contains_range(t.text_range())); return process_expansion_for_token(&mut stack, file_id); } // Then check for token trees, that means we are either in a function-like macro or // secondary attribute inputs let tt = token.parent_ancestors().map_while(ast::TokenTree::cast).last()?; let parent = tt.syntax().parent()?; if tt.left_delimiter_token().map_or(false, |it| it == token) { return None; } if tt.right_delimiter_token().map_or(false, |it| it == token) { return None; } if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) { let mcall: hir_expand::files::InFileWrapper = InFile::new(file_id, macro_call); let file_id = match mcache.get(&mcall) { Some(&it) => it, None => { let it = sa.expand(self.db, mcall.as_ref())?; mcache.insert(mcall, it); it } }; let text_range = tt.syntax().text_range(); // remove any other token in this macro input, all their mappings are the // same as this one tokens.retain(|t| !text_range.contains_range(t.text_range())); process_expansion_for_token(&mut stack, file_id) } else if let Some(meta) = ast::Meta::cast(parent) { // attribute we failed expansion for earlier, this might be a derive invocation // or derive helper attribute let attr = meta.parent_attr()?; let adt = if let Some(adt) = attr.syntax().parent().and_then(ast::Adt::cast) { // this might be a derive, or a derive helper on an ADT let derive_call = self.with_ctx(|ctx| { // so try downmapping the token into the pseudo derive expansion // see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works ctx.attr_to_derive_macro_call( InFile::new(file_id, &adt), InFile::new(file_id, attr.clone()), ) .map(|(_, call_id, _)| call_id) }); match derive_call { Some(call_id) => { // resolved to a derive let file_id = call_id.as_macro_file(); let text_range = attr.syntax().text_range(); // remove any other token in this macro input, all their mappings are the // same as this one tokens.retain(|t| !text_range.contains_range(t.text_range())); return process_expansion_for_token(&mut stack, file_id); } None => Some(adt), } } else { // Otherwise this could be a derive helper on a variant or field if let Some(field) = attr.syntax().parent().and_then(ast::RecordField::cast) { field.syntax().ancestors().take(4).find_map(ast::Adt::cast) } else if let Some(field) = attr.syntax().parent().and_then(ast::TupleField::cast) { field.syntax().ancestors().take(4).find_map(ast::Adt::cast) } else if let Some(variant) = attr.syntax().parent().and_then(ast::Variant::cast) { variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast) } else { None } }?; if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(file_id, &adt))) { return None; } // Not an attribute, nor a derive, so it's either a builtin or a derive helper // Try to resolve to a derive helper and downmap let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name(); let id = self.db.ast_id_map(file_id).ast_id(&adt); let helpers = def_map.derive_helpers_in_scope(InFile::new(file_id, id))?; let mut res = None; for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) { res = res.or(process_expansion_for_token( &mut stack, derive.as_macro_file(), )); } res } else { None } })() .is_none(); if was_not_remapped && f(InFile::new(file_id, token)).is_break() { break; } } } } // Note this return type is deliberate as [`find_nodes_at_offset_with_descend`] wants to stop // traversing the inner iterator when it finds a node. // The outer iterator is over the tokens descendants // The inner iterator is the ancestors of a descendant fn descend_node_at_offset( &self, node: &SyntaxNode, offset: TextSize, ) -> impl Iterator + '_> + '_ { node.token_at_offset(offset) .map(move |token| self.descend_into_macros(DescendPreference::None, token)) .map(|descendants| { descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it)) }) // re-order the tokens from token_at_offset by returning the ancestors with the smaller first nodes first // See algo::ancestors_at_offset, which uses the same approach .kmerge_by(|left, right| { left.clone() .map(|node| node.text_range().len()) .lt(right.clone().map(|node| node.text_range().len())) }) } /// Attempts to map the node out of macro expanded files returning the original file range. /// If upmapping is not possible, this will fall back to the range of the macro call of the /// macro file the node resides in. pub fn original_range(&self, node: &SyntaxNode) -> FileRange { let node = self.find_file(node); node.original_file_range(self.db.upcast()) } /// Attempts to map the node out of macro expanded files returning the original file range. pub fn original_range_opt(&self, node: &SyntaxNode) -> Option { let node = self.find_file(node); node.original_file_range_opt(self.db.upcast()) .filter(|(_, ctx)| ctx.is_root()) .map(TupleExt::head) } /// Attempts to map the node out of macro expanded files. /// This only work for attribute expansions, as other ones do not have nodes as input. pub fn original_ast_node(&self, node: N) -> Option { self.wrap_node_infile(node).original_ast_node_rooted(self.db.upcast()).map( |InRealFile { file_id, value }| { self.cache(find_root(value.syntax()), file_id.into()); value }, ) } /// Attempts to map the node out of macro expanded files. /// This only work for attribute expansions, as other ones do not have nodes as input. pub fn original_syntax_node(&self, node: &SyntaxNode) -> Option { let InFile { file_id, .. } = self.find_file(node); InFile::new(file_id, node).original_syntax_node(self.db.upcast()).map( |InRealFile { file_id, value }| { self.cache(find_root(&value), file_id.into()); value }, ) } pub fn diagnostics_display_range(&self, src: InFile) -> FileRange { let root = self.parse_or_expand(src.file_id); let node = src.map(|it| it.to_node(&root)); node.as_ref().original_file_range(self.db.upcast()) } fn token_ancestors_with_macros( &self, token: SyntaxToken, ) -> impl Iterator + Clone + '_ { token.parent().into_iter().flat_map(move |parent| self.ancestors_with_macros(parent)) } /// Iterates the ancestors of the given node, climbing up macro expansions while doing so. pub fn ancestors_with_macros( &self, node: SyntaxNode, ) -> impl Iterator + Clone + '_ { let node = self.find_file(&node); let db = self.db.upcast(); iter::successors(Some(node.cloned()), move |&InFile { file_id, ref value }| { match value.parent() { Some(parent) => Some(InFile::new(file_id, parent)), None => { self.cache(value.clone(), file_id); Some(file_id.macro_file()?.call_node(db)) } } }) .map(|it| it.value) } pub fn ancestors_at_offset_with_macros( &self, node: &SyntaxNode, offset: TextSize, ) -> impl Iterator + '_ { node.token_at_offset(offset) .map(|token| self.token_ancestors_with_macros(token)) .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len()) } pub fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option { let text = lifetime.text(); let lifetime_param = lifetime.syntax().ancestors().find_map(|syn| { let gpl = ast::AnyHasGenericParams::cast(syn)?.generic_param_list()?; gpl.lifetime_params() .find(|tp| tp.lifetime().as_ref().map(|lt| lt.text()).as_ref() == Some(&text)) })?; let src = self.wrap_node_infile(lifetime_param); ToDef::to_def(self, src) } pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option