rust/crates/hir_expand/src/db.rs

444 lines
17 KiB
Rust
Raw Normal View History

2019-10-29 08:03:29 -05:00
//! Defines database & queries for macro expansion.
use std::sync::Arc;
2020-08-13 09:25:38 -05:00
use base_db::{salsa, SourceDatabase};
use itertools::Itertools;
2021-07-10 15:49:17 -05:00
use limit::Limit;
2021-05-04 12:36:48 -05:00
use mbe::{ExpandError, ExpandResult};
use parser::{FragmentKind, T};
use syntax::{
algo::diff,
ast::{self, AttrsOwner, NameOwner},
AstNode, GreenNode, Parse, SyntaxNode, SyntaxToken, TextRange,
};
use crate::{
ast_id_map::AstIdMap, hygiene::HygieneFrame, BuiltinAttrExpander, BuiltinDeriveExpander,
BuiltinFnLikeExpander, HirFileId, HirFileIdRepr, MacroCallId, MacroCallKind, MacroCallLoc,
MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander,
};
/// Total limit on the number of tokens produced by any macro invocation.
///
/// If an invocation produces more tokens than this limit, it will not be stored in the database and
/// an error will be emitted.
///
/// Actual max for `analysis-stats .` at some point: 30672.
static TOKEN_LIMIT: Limit = Limit::new(524_288);
2019-11-09 21:03:24 -06:00
#[derive(Debug, Clone, Eq, PartialEq)]
pub enum TokenExpander {
/// Old-style `macro_rules`.
MacroRules { mac: mbe::MacroRules, def_site_token_map: mbe::TokenMap },
/// AKA macros 2.0.
MacroDef { mac: mbe::MacroDef, def_site_token_map: mbe::TokenMap },
/// Stuff like `line!` and `file!`.
Builtin(BuiltinFnLikeExpander),
/// `global_allocator` and such.
BuiltinAttr(BuiltinAttrExpander),
/// `derive(Copy)` and such.
BuiltinDerive(BuiltinDeriveExpander),
/// The thing we love the most here in rust-analyzer -- procedural macros.
2020-03-18 04:47:59 -05:00
ProcMacro(ProcMacroExpander),
2019-11-09 21:03:24 -06:00
}
impl TokenExpander {
2021-05-04 12:29:30 -05:00
fn expand(
2019-11-09 21:03:24 -06:00
&self,
db: &dyn AstDatabase,
2021-05-19 13:19:08 -05:00
id: MacroCallId,
2019-11-09 21:03:24 -06:00
tt: &tt::Subtree,
) -> mbe::ExpandResult<tt::Subtree> {
2019-11-09 21:03:24 -06:00
match self {
TokenExpander::MacroRules { mac, .. } => mac.expand(tt),
TokenExpander::MacroDef { mac, .. } => mac.expand(tt),
TokenExpander::Builtin(it) => it.expand(db, id, tt),
TokenExpander::BuiltinAttr(it) => it.expand(db, id, tt),
TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt),
2020-05-14 04:57:37 -05:00
TokenExpander::ProcMacro(_) => {
2021-01-08 08:46:48 -06:00
// We store the result in salsa db to prevent non-deterministic behavior in
2020-05-14 04:57:37 -05:00
// some proc-macro implementation
// See #4315 for details
db.expand_proc_macro(id)
2020-05-14 04:57:37 -05:00
}
2019-11-09 21:03:24 -06:00
}
}
2021-05-04 12:29:30 -05:00
pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
2019-11-09 21:03:24 -06:00
match self {
TokenExpander::MacroRules { mac, .. } => mac.map_id_down(id),
TokenExpander::MacroDef { mac, .. } => mac.map_id_down(id),
TokenExpander::Builtin(..)
| TokenExpander::BuiltinAttr(..)
| TokenExpander::BuiltinDerive(..)
| TokenExpander::ProcMacro(..) => id,
}
}
2021-05-04 12:29:30 -05:00
pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
match self {
TokenExpander::MacroRules { mac, .. } => mac.map_id_up(id),
TokenExpander::MacroDef { mac, .. } => mac.map_id_up(id),
TokenExpander::Builtin(..)
| TokenExpander::BuiltinAttr(..)
| TokenExpander::BuiltinDerive(..)
| TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call),
2019-11-09 21:03:24 -06:00
}
}
}
2019-10-29 07:20:08 -05:00
// FIXME: rename to ExpandDatabase
#[salsa::query_group(AstDatabaseStorage)]
pub trait AstDatabase: SourceDatabase {
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
2021-06-08 14:51:28 -05:00
/// Main public API -- parses a hir file, not caring whether it's a real
2021-05-04 14:40:10 -05:00
/// file or a macro expansion.
#[salsa::transparent]
fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode>;
2021-05-04 14:40:10 -05:00
/// Implementation for the macro case.
fn parse_macro_expansion(
2020-11-24 12:00:23 -06:00
&self,
macro_file: MacroFile,
) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>>;
2021-05-04 14:40:10 -05:00
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
/// reason why we use salsa at all.
///
/// We encode macro definitions into ids of macro calls, this what allows us
/// to be incremental.
2021-05-04 13:49:00 -05:00
#[salsa::interned]
2021-05-19 13:19:08 -05:00
fn intern_macro(&self, macro_call: MacroCallLoc) -> MacroCallId;
2020-05-14 04:57:37 -05:00
2021-05-04 14:40:10 -05:00
/// Lowers syntactic macro call to a token tree representation.
2021-05-04 13:49:00 -05:00
#[salsa::transparent]
fn macro_arg(&self, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>>;
2021-05-04 14:40:10 -05:00
/// Extracts syntax node, corresponding to a macro call. That's a firewall
/// query, only typing in the macro call itself changes the returned
/// subtree.
2021-05-04 13:49:00 -05:00
fn macro_arg_text(&self, id: MacroCallId) -> Option<GreenNode>;
2021-05-04 14:40:10 -05:00
/// Gets the expander for this macro. This compiles declarative macros, and
/// just fetches procedural ones.
fn macro_def(&self, id: MacroDefId) -> Option<Arc<TokenExpander>>;
2021-05-04 13:49:00 -05:00
2021-05-04 14:40:10 -05:00
/// Expand macro call to a token tree. This query is LRUed (we keep 128 or so results in memory)
2021-05-04 13:49:00 -05:00
fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>>;
2021-05-04 14:40:10 -05:00
/// Special case of the previous query for procedural macros. We can't LRU
/// proc macros, since they are not deterministic in general, and
/// non-determinism breaks salsa in a very, very, very bad way. @edwin0cheng
/// heroically debugged this once!
fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<tt::Subtree>;
2021-05-04 13:49:00 -05:00
/// Firewall query that returns the error from the `macro_expand` query.
fn macro_expand_error(&self, macro_call: MacroCallId) -> Option<ExpandError>;
2021-01-03 20:53:31 -06:00
fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>;
}
/// This expands the given macro call, but with different arguments. This is
/// used for completion, where we want to see what 'would happen' if we insert a
/// token. The `token_to_map` mapped down into the expansion, with the mapped
/// token returned.
pub fn expand_speculative(
db: &dyn AstDatabase,
actual_macro_call: MacroCallId,
speculative_args: &ast::TokenTree,
2021-05-04 12:36:48 -05:00
token_to_map: SyntaxToken,
) -> Option<(SyntaxNode, SyntaxToken)> {
let (tt, tmap_1) = mbe::syntax_node_to_token_tree(speculative_args.syntax());
let range =
token_to_map.text_range().checked_sub(speculative_args.syntax().text_range().start())?;
let token_id = tmap_1.token_by_range(range)?;
2021-05-04 13:49:00 -05:00
let macro_def = {
2021-05-19 13:19:08 -05:00
let loc: MacroCallLoc = db.lookup_intern_macro(actual_macro_call);
db.macro_def(loc.def)?
};
let speculative_expansion = macro_def.expand(db, actual_macro_call, &tt);
2021-05-08 18:36:06 -05:00
let fragment_kind = macro_fragment_kind(db, actual_macro_call);
let (node, tmap_2) =
mbe::token_tree_to_syntax_node(&speculative_expansion.value, fragment_kind).ok()?;
2021-05-04 13:49:00 -05:00
let token_id = macro_def.map_id_down(token_id);
let range = tmap_2.first_range_by_token(token_id, token_to_map.kind())?;
2021-01-15 11:15:33 -06:00
let token = node.syntax_node().covering_element(range).into_token()?;
Some((node.syntax_node(), token))
}
2020-11-24 14:55:08 -06:00
fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
2021-05-04 13:49:00 -05:00
let map = db.parse_or_expand(file_id).map(|it| AstIdMap::from_source(&it)).unwrap_or_default();
Arc::new(map)
}
2021-05-04 13:49:00 -05:00
fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> {
match file_id.0 {
HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()),
HirFileIdRepr::MacroFile(macro_file) => {
db.parse_macro_expansion(macro_file).value.map(|(it, _)| it.syntax_node())
}
}
}
fn parse_macro_expansion(
db: &dyn AstDatabase,
macro_file: MacroFile,
) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>> {
let _p = profile::span("parse_macro_expansion");
let result = db.macro_expand(macro_file.macro_call_id);
2021-05-04 13:49:00 -05:00
if let Some(err) = &result.err {
// Note:
// The final goal we would like to make all parse_macro success,
// such that the following log will not call anyway.
2021-05-19 13:19:08 -05:00
let loc: MacroCallLoc = db.lookup_intern_macro(macro_file.macro_call_id);
let node = loc.kind.to_node(db);
2021-05-19 13:19:08 -05:00
// collect parent information for warning log
let parents =
std::iter::successors(loc.kind.file_id().call_node(db), |it| it.file_id.call_node(db))
2021-05-04 13:49:00 -05:00
.map(|n| format!("{:#}", n.value))
.collect::<Vec<_>>()
.join("\n");
2021-05-19 13:19:08 -05:00
log::warn!(
"fail on macro_parse: (reason: {:?} macro_call: {:#}) parents: {}",
err,
node.value,
parents
);
2021-05-04 13:49:00 -05:00
}
let tt = match result.value {
Some(tt) => tt,
None => return ExpandResult { value: None, err: result.err },
};
2021-05-08 18:36:06 -05:00
let fragment_kind = macro_fragment_kind(db, macro_file.macro_call_id);
2021-05-04 13:49:00 -05:00
log::debug!("expanded = {}", tt.as_debug_string());
log::debug!("kind = {:?}", fragment_kind);
let (parse, rev_token_map) = match mbe::token_tree_to_syntax_node(&tt, fragment_kind) {
Ok(it) => it,
Err(err) => {
log::debug!(
2021-08-21 11:06:03 -05:00
"failed to parse expansion to {:?} = {}",
2021-05-04 13:49:00 -05:00
fragment_kind,
tt.as_debug_string()
);
return ExpandResult::only_err(err);
}
};
match result.err {
Some(err) => {
// Safety check for recursive identity macro.
let node = parse.syntax_node();
let file: HirFileId = macro_file.into();
let call_node = match file.call_node(db) {
Some(it) => it,
None => {
return ExpandResult::only_err(err);
}
};
if is_self_replicating(&node, &call_node.value) {
2021-06-12 23:05:29 -05:00
ExpandResult::only_err(err)
2021-05-04 13:49:00 -05:00
} else {
ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: Some(err) }
}
}
None => {
log::debug!("parse = {:?}", parse.syntax_node().kind());
ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: None }
}
}
}
fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> {
let arg = db.macro_arg_text(id)?;
let loc = db.lookup_intern_macro(id);
let node = SyntaxNode::new_root(arg);
let censor = match loc.kind {
MacroCallKind::FnLike { .. } => None,
MacroCallKind::Derive { derive_attr_index, .. } => match ast::Item::cast(node.clone()) {
Some(item) => item
.attrs()
.map(|attr| attr.syntax().text_range())
.take(derive_attr_index as usize + 1)
.fold1(TextRange::cover),
None => None,
},
MacroCallKind::Attr { invoc_attr_index, .. } => match ast::Item::cast(node.clone()) {
Some(item) => {
item.attrs().nth(invoc_attr_index as usize).map(|attr| attr.syntax().text_range())
}
None => None,
},
};
let (mut tt, tmap) = mbe::syntax_node_to_token_tree_censored(&node, censor);
2021-05-19 13:19:08 -05:00
if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
tt.delimiter = None;
}
2021-05-04 13:49:00 -05:00
Some(Arc::new((tt, tmap)))
}
fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> {
let loc = db.lookup_intern_macro(id);
let arg = loc.kind.arg(db)?;
if matches!(loc.kind, MacroCallKind::FnLike { .. }) {
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
let well_formed_tt =
matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
if !well_formed_tt {
// Don't expand malformed (unbalanced) macro invocations. This is
// less than ideal, but trying to expand unbalanced macro calls
// sometimes produces pathological, deeply nested code which breaks
// all kinds of things.
//
// Some day, we'll have explicit recursion counters for all
// recursive things, at which point this code might be removed.
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
return None;
}
}
2021-05-06 00:22:51 -05:00
Some(arg.green().into())
2021-05-04 13:49:00 -05:00
}
fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<TokenExpander>> {
2019-11-11 04:45:55 -06:00
match id.kind {
2021-03-27 00:44:54 -05:00
MacroDefKind::Declarative(ast_id) => match ast_id.to_node(db) {
2021-05-04 12:36:48 -05:00
ast::Macro::MacroRules(macro_rules) => {
2021-03-27 00:44:54 -05:00
let arg = macro_rules.token_tree()?;
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
let mac = match mbe::MacroRules::parse(&tt) {
2021-03-27 00:44:54 -05:00
Ok(it) => it,
Err(err) => {
let name = macro_rules.name().map(|n| n.to_string()).unwrap_or_default();
log::warn!("fail on macro_def parse ({}): {:?} {:#?}", name, err, tt);
return None;
}
};
Some(Arc::new(TokenExpander::MacroRules { mac, def_site_token_map }))
2021-03-27 00:44:54 -05:00
}
2021-05-04 12:36:48 -05:00
ast::Macro::MacroDef(macro_def) => {
2021-03-27 00:44:54 -05:00
let arg = macro_def.body()?;
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
let mac = match mbe::MacroDef::parse(&tt) {
2021-03-27 00:44:54 -05:00
Ok(it) => it,
Err(err) => {
let name = macro_def.name().map(|n| n.to_string()).unwrap_or_default();
log::warn!("fail on macro_def parse ({}): {:?} {:#?}", name, err, tt);
return None;
}
};
Some(Arc::new(TokenExpander::MacroDef { mac, def_site_token_map }))
2021-03-27 00:44:54 -05:00
}
},
MacroDefKind::BuiltIn(expander, _) => Some(Arc::new(TokenExpander::Builtin(expander))),
MacroDefKind::BuiltInAttr(expander, _) => {
Some(Arc::new(TokenExpander::BuiltinAttr(expander)))
}
MacroDefKind::BuiltInDerive(expander, _) => {
Some(Arc::new(TokenExpander::BuiltinDerive(expander)))
2019-11-09 21:03:24 -06:00
}
MacroDefKind::BuiltInEager(..) => None,
MacroDefKind::ProcMacro(expander, ..) => Some(Arc::new(TokenExpander::ProcMacro(expander))),
2019-11-09 21:03:24 -06:00
}
2019-10-29 07:11:42 -05:00
}
fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>> {
2020-11-27 11:07:16 -06:00
let _p = profile::span("macro_expand");
2021-05-19 13:19:08 -05:00
let loc: MacroCallLoc = db.lookup_intern_macro(id);
if let Some(eager) = &loc.eager {
2021-08-16 06:27:36 -05:00
return ExpandResult {
value: Some(eager.arg_or_expansion.clone()),
// FIXME: There could be errors here!
err: None,
};
2021-05-19 13:19:08 -05:00
}
2020-02-17 05:32:13 -06:00
2021-08-16 06:27:36 -05:00
let macro_arg = match db.macro_arg(id) {
Some(it) => it,
None => return ExpandResult::str_err("Fail to args in to tt::TokenTree".into()),
};
2019-10-29 07:11:42 -05:00
let macro_rules = match db.macro_def(loc.def) {
Some(it) => it,
None => return ExpandResult::str_err("Fail to find macro definition".into()),
};
2021-05-19 13:19:08 -05:00
let ExpandResult { value: tt, err } = macro_rules.expand(db, id, &macro_arg.0);
2019-10-29 07:11:42 -05:00
// Set a hard limit for the expanded tt
let count = tt.count();
2021-07-10 15:49:17 -05:00
// XXX: Make ExpandResult a real error and use .map_err instead?
if TOKEN_LIMIT.check(count).is_err() {
return ExpandResult::str_err(format!(
"macro invocation exceeds token limit: produced {} tokens, limit is {}",
2021-07-10 15:49:17 -05:00
count,
TOKEN_LIMIT.inner(),
));
2019-10-29 07:11:42 -05:00
}
2020-11-24 12:00:23 -06:00
ExpandResult { value: Some(Arc::new(tt)), err }
2019-10-29 07:11:42 -05:00
}
2021-08-16 06:27:36 -05:00
fn macro_expand_error(db: &dyn AstDatabase, macro_call: MacroCallId) -> Option<ExpandError> {
db.macro_expand(macro_call).err
}
fn expand_proc_macro(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<tt::Subtree> {
2021-05-19 13:19:08 -05:00
let loc: MacroCallLoc = db.lookup_intern_macro(id);
2020-05-14 04:57:37 -05:00
let macro_arg = match db.macro_arg(id) {
Some(it) => it,
None => return ExpandResult::str_err("No arguments for proc-macro".to_string()),
2020-05-14 04:57:37 -05:00
};
let expander = match loc.def.kind {
2021-03-18 10:11:18 -05:00
MacroDefKind::ProcMacro(expander, ..) => expander,
2020-05-14 04:57:37 -05:00
_ => unreachable!(),
};
2021-05-31 06:37:11 -05:00
let attr_arg = match &loc.kind {
2021-08-21 11:06:03 -05:00
MacroCallKind::Attr { attr_args, .. } => {
2021-08-21 11:19:18 -05:00
let mut attr_args = attr_args.0.clone();
2021-08-21 11:06:03 -05:00
mbe::Shift::new(&macro_arg.0).shift_all(&mut attr_args);
Some(attr_args)
}
2021-05-31 06:37:11 -05:00
_ => None,
};
2021-08-21 11:06:03 -05:00
expander.expand(db, loc.krate, &macro_arg.0, attr_arg.as_ref())
2020-05-14 04:57:37 -05:00
}
fn is_self_replicating(from: &SyntaxNode, to: &SyntaxNode) -> bool {
if diff(from, to).is_empty() {
return true;
}
2021-05-04 12:36:48 -05:00
if let Some(stmts) = ast::MacroStmts::cast(from.clone()) {
if stmts.statements().any(|stmt| diff(stmt.syntax(), to).is_empty()) {
return true;
}
if let Some(expr) = stmts.expr() {
if diff(expr.syntax(), to).is_empty() {
return true;
2020-11-24 12:00:23 -06:00
}
2020-03-21 09:43:48 -05:00
}
}
false
2019-10-29 07:11:42 -05:00
}
2019-12-08 02:16:52 -06:00
2021-01-03 20:53:31 -06:00
fn hygiene_frame(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<HygieneFrame> {
Arc::new(HygieneFrame::new(db, file_id))
}
2021-05-08 18:36:06 -05:00
fn macro_fragment_kind(db: &dyn AstDatabase, id: MacroCallId) -> FragmentKind {
2021-05-19 13:19:08 -05:00
let loc: MacroCallLoc = db.lookup_intern_macro(id);
loc.kind.fragment_kind()
2019-12-08 02:16:52 -06:00
}