2019-10-29 08:03:29 -05:00
|
|
|
//! Defines database & queries for macro expansion.
|
|
|
|
|
2019-10-29 06:55:39 -05:00
|
|
|
use std::sync::Arc;
|
|
|
|
|
2020-08-13 09:25:38 -05:00
|
|
|
use base_db::{salsa, SourceDatabase};
|
2021-07-10 15:49:17 -05:00
|
|
|
use limit::Limit;
|
2021-05-04 12:36:48 -05:00
|
|
|
use mbe::{ExpandError, ExpandResult};
|
fix: avoid pathological macro expansions
Today, rust-analyzer (and rustc, and bat, and IntelliJ) fail badly on
some kinds of maliciously constructed code, like a deep sequence of
nested parenthesis.
"Who writes 100k nested parenthesis" you'd ask?
Well, in a language with macros, a run-away macro expansion might do
that (see the added tests)! Such expansion can be broad, rather than
deep, so it bypasses recursion check at the macro-expansion layer, but
triggers deep recursion in parser.
In the ideal world, the parser would just handle deeply nested structs
gracefully. We'll get there some day, but at the moment, let's try to be
simple, and just avoid expanding macros with unbalanced parenthesis in
the first place.
closes #9358
2021-08-09 08:06:49 -05:00
|
|
|
use parser::{FragmentKind, T};
|
2021-03-25 14:52:35 -05:00
|
|
|
use syntax::{
|
|
|
|
algo::diff,
|
2021-05-04 12:36:48 -05:00
|
|
|
ast::{self, NameOwner},
|
2021-05-08 18:36:06 -05:00
|
|
|
AstNode, GreenNode, Parse, SyntaxNode, SyntaxToken,
|
2021-03-25 14:52:35 -05:00
|
|
|
};
|
2019-10-29 06:55:39 -05:00
|
|
|
|
|
|
|
use crate::{
|
2021-06-09 11:02:31 -05:00
|
|
|
ast_id_map::AstIdMap, hygiene::HygieneFrame, input::process_macro_input, BuiltinAttrExpander,
|
|
|
|
BuiltinDeriveExpander, BuiltinFnLikeExpander, HirFileId, HirFileIdRepr, MacroCallId,
|
|
|
|
MacroCallKind, MacroCallLoc, MacroDefId, MacroDefKind, MacroFile, ProcMacroExpander,
|
2019-10-29 06:55:39 -05:00
|
|
|
};
|
|
|
|
|
2020-12-10 10:50:56 -06:00
|
|
|
/// Total limit on the number of tokens produced by any macro invocation.
|
|
|
|
///
|
|
|
|
/// If an invocation produces more tokens than this limit, it will not be stored in the database and
|
|
|
|
/// an error will be emitted.
|
2021-07-31 10:25:45 -05:00
|
|
|
///
|
|
|
|
/// Actual max for `analysis-stats .` at some point: 30672.
|
|
|
|
static TOKEN_LIMIT: Limit = Limit::new(524_288);
|
2020-12-10 10:50:56 -06:00
|
|
|
|
2019-11-09 21:03:24 -06:00
|
|
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
|
|
|
pub enum TokenExpander {
|
2021-05-04 10:20:10 -05:00
|
|
|
/// Old-style `macro_rules`.
|
2021-05-04 14:03:16 -05:00
|
|
|
MacroRules { mac: mbe::MacroRules, def_site_token_map: mbe::TokenMap },
|
2021-05-04 10:20:10 -05:00
|
|
|
/// AKA macros 2.0.
|
2021-05-04 14:03:16 -05:00
|
|
|
MacroDef { mac: mbe::MacroDef, def_site_token_map: mbe::TokenMap },
|
2021-05-04 10:20:10 -05:00
|
|
|
/// Stuff like `line!` and `file!`.
|
2019-11-23 07:54:39 -06:00
|
|
|
Builtin(BuiltinFnLikeExpander),
|
2021-06-09 11:02:31 -05:00
|
|
|
/// `global_allocator` and such.
|
|
|
|
BuiltinAttr(BuiltinAttrExpander),
|
2021-05-04 10:20:10 -05:00
|
|
|
/// `derive(Copy)` and such.
|
2019-12-05 08:10:33 -06:00
|
|
|
BuiltinDerive(BuiltinDeriveExpander),
|
2021-05-04 10:20:10 -05:00
|
|
|
/// The thing we love the most here in rust-analyzer -- procedural macros.
|
2020-03-18 04:47:59 -05:00
|
|
|
ProcMacro(ProcMacroExpander),
|
2019-11-09 21:03:24 -06:00
|
|
|
}
|
|
|
|
|
|
|
|
impl TokenExpander {
|
2021-05-04 12:29:30 -05:00
|
|
|
fn expand(
|
2019-11-09 21:03:24 -06:00
|
|
|
&self,
|
|
|
|
db: &dyn AstDatabase,
|
2021-05-19 13:19:08 -05:00
|
|
|
id: MacroCallId,
|
2019-11-09 21:03:24 -06:00
|
|
|
tt: &tt::Subtree,
|
2020-03-13 07:03:31 -05:00
|
|
|
) -> mbe::ExpandResult<tt::Subtree> {
|
2019-11-09 21:03:24 -06:00
|
|
|
match self {
|
2021-05-04 14:03:16 -05:00
|
|
|
TokenExpander::MacroRules { mac, .. } => mac.expand(tt),
|
|
|
|
TokenExpander::MacroDef { mac, .. } => mac.expand(tt),
|
2020-11-30 12:21:25 -06:00
|
|
|
TokenExpander::Builtin(it) => it.expand(db, id, tt),
|
2021-08-18 09:30:09 -05:00
|
|
|
TokenExpander::BuiltinAttr(it) => match db.macro_arg(id) {
|
2021-08-20 07:34:46 -05:00
|
|
|
Some(macro_arg) => it.expand(db, id, tt, ¯o_arg.0),
|
|
|
|
None => mbe::ExpandResult::str_err("No item argument for attribute".to_string()),
|
2021-08-18 09:30:09 -05:00
|
|
|
},
|
2021-08-20 07:28:36 -05:00
|
|
|
TokenExpander::BuiltinDerive(it) => it.expand(db, id, tt),
|
2020-05-14 04:57:37 -05:00
|
|
|
TokenExpander::ProcMacro(_) => {
|
2021-01-08 08:46:48 -06:00
|
|
|
// We store the result in salsa db to prevent non-deterministic behavior in
|
2020-05-14 04:57:37 -05:00
|
|
|
// some proc-macro implementation
|
|
|
|
// See #4315 for details
|
2021-08-20 07:34:46 -05:00
|
|
|
db.expand_proc_macro(id)
|
2020-05-14 04:57:37 -05:00
|
|
|
}
|
2019-11-09 21:03:24 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-04 12:29:30 -05:00
|
|
|
pub(crate) fn map_id_down(&self, id: tt::TokenId) -> tt::TokenId {
|
2019-11-09 21:03:24 -06:00
|
|
|
match self {
|
2021-05-04 14:03:16 -05:00
|
|
|
TokenExpander::MacroRules { mac, .. } => mac.map_id_down(id),
|
|
|
|
TokenExpander::MacroDef { mac, .. } => mac.map_id_down(id),
|
|
|
|
TokenExpander::Builtin(..)
|
2021-06-09 11:02:31 -05:00
|
|
|
| TokenExpander::BuiltinAttr(..)
|
2021-05-04 14:03:16 -05:00
|
|
|
| TokenExpander::BuiltinDerive(..)
|
|
|
|
| TokenExpander::ProcMacro(..) => id,
|
2019-11-17 10:11:43 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-05-04 12:29:30 -05:00
|
|
|
pub(crate) fn map_id_up(&self, id: tt::TokenId) -> (tt::TokenId, mbe::Origin) {
|
2019-11-17 10:11:43 -06:00
|
|
|
match self {
|
2021-05-04 14:03:16 -05:00
|
|
|
TokenExpander::MacroRules { mac, .. } => mac.map_id_up(id),
|
|
|
|
TokenExpander::MacroDef { mac, .. } => mac.map_id_up(id),
|
|
|
|
TokenExpander::Builtin(..)
|
2021-06-09 11:02:31 -05:00
|
|
|
| TokenExpander::BuiltinAttr(..)
|
2021-05-04 14:03:16 -05:00
|
|
|
| TokenExpander::BuiltinDerive(..)
|
|
|
|
| TokenExpander::ProcMacro(..) => (id, mbe::Origin::Call),
|
2019-11-09 21:03:24 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-10-29 07:20:08 -05:00
|
|
|
// FIXME: rename to ExpandDatabase
|
2019-10-29 06:55:39 -05:00
|
|
|
#[salsa::query_group(AstDatabaseStorage)]
|
|
|
|
pub trait AstDatabase: SourceDatabase {
|
|
|
|
fn ast_id_map(&self, file_id: HirFileId) -> Arc<AstIdMap>;
|
|
|
|
|
2021-06-08 14:51:28 -05:00
|
|
|
/// Main public API -- parses a hir file, not caring whether it's a real
|
2021-05-04 14:40:10 -05:00
|
|
|
/// file or a macro expansion.
|
2019-10-29 06:55:39 -05:00
|
|
|
#[salsa::transparent]
|
|
|
|
fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode>;
|
2021-05-04 14:40:10 -05:00
|
|
|
/// Implementation for the macro case.
|
2020-11-24 14:57:51 -06:00
|
|
|
fn parse_macro_expansion(
|
2020-11-24 12:00:23 -06:00
|
|
|
&self,
|
|
|
|
macro_file: MacroFile,
|
2020-11-26 09:48:17 -06:00
|
|
|
) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>>;
|
2020-11-26 13:09:54 -06:00
|
|
|
|
2021-05-04 14:40:10 -05:00
|
|
|
/// Macro ids. That's probably the tricksiest bit in rust-analyzer, and the
|
|
|
|
/// reason why we use salsa at all.
|
|
|
|
///
|
|
|
|
/// We encode macro definitions into ids of macro calls, this what allows us
|
|
|
|
/// to be incremental.
|
2021-05-04 13:49:00 -05:00
|
|
|
#[salsa::interned]
|
2021-05-19 13:19:08 -05:00
|
|
|
fn intern_macro(&self, macro_call: MacroCallLoc) -> MacroCallId;
|
2020-05-14 04:57:37 -05:00
|
|
|
|
2021-05-04 14:40:10 -05:00
|
|
|
/// Lowers syntactic macro call to a token tree representation.
|
2021-05-04 13:49:00 -05:00
|
|
|
#[salsa::transparent]
|
|
|
|
fn macro_arg(&self, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>>;
|
2021-05-04 14:40:10 -05:00
|
|
|
/// Extracts syntax node, corresponding to a macro call. That's a firewall
|
|
|
|
/// query, only typing in the macro call itself changes the returned
|
|
|
|
/// subtree.
|
2021-05-04 13:49:00 -05:00
|
|
|
fn macro_arg_text(&self, id: MacroCallId) -> Option<GreenNode>;
|
2021-05-04 14:40:10 -05:00
|
|
|
/// Gets the expander for this macro. This compiles declarative macros, and
|
|
|
|
/// just fetches procedural ones.
|
2021-05-04 14:03:16 -05:00
|
|
|
fn macro_def(&self, id: MacroDefId) -> Option<Arc<TokenExpander>>;
|
2021-05-04 13:49:00 -05:00
|
|
|
|
2021-05-04 14:40:10 -05:00
|
|
|
/// Expand macro call to a token tree. This query is LRUed (we keep 128 or so results in memory)
|
2021-05-04 13:49:00 -05:00
|
|
|
fn macro_expand(&self, macro_call: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>>;
|
2021-05-04 14:40:10 -05:00
|
|
|
/// Special case of the previous query for procedural macros. We can't LRU
|
|
|
|
/// proc macros, since they are not deterministic in general, and
|
|
|
|
/// non-determinism breaks salsa in a very, very, very bad way. @edwin0cheng
|
|
|
|
/// heroically debugged this once!
|
2021-08-20 07:34:46 -05:00
|
|
|
fn expand_proc_macro(&self, call: MacroCallId) -> ExpandResult<tt::Subtree>;
|
2021-05-04 13:49:00 -05:00
|
|
|
/// Firewall query that returns the error from the `macro_expand` query.
|
|
|
|
fn macro_expand_error(&self, macro_call: MacroCallId) -> Option<ExpandError>;
|
2021-01-03 20:53:31 -06:00
|
|
|
|
|
|
|
fn hygiene_frame(&self, file_id: HirFileId) -> Arc<HygieneFrame>;
|
2019-10-29 06:55:39 -05:00
|
|
|
}
|
|
|
|
|
2020-03-08 05:02:14 -05:00
|
|
|
/// This expands the given macro call, but with different arguments. This is
|
|
|
|
/// used for completion, where we want to see what 'would happen' if we insert a
|
|
|
|
/// token. The `token_to_map` mapped down into the expansion, with the mapped
|
|
|
|
/// token returned.
|
2021-05-24 14:21:25 -05:00
|
|
|
pub fn expand_speculative(
|
2020-03-13 10:05:46 -05:00
|
|
|
db: &dyn AstDatabase,
|
2020-03-08 05:02:14 -05:00
|
|
|
actual_macro_call: MacroCallId,
|
2021-05-24 14:21:25 -05:00
|
|
|
speculative_args: &ast::TokenTree,
|
2021-05-04 12:36:48 -05:00
|
|
|
token_to_map: SyntaxToken,
|
|
|
|
) -> Option<(SyntaxNode, SyntaxToken)> {
|
2021-05-24 14:21:25 -05:00
|
|
|
let (tt, tmap_1) = mbe::syntax_node_to_token_tree(speculative_args.syntax());
|
2020-03-08 05:02:14 -05:00
|
|
|
let range =
|
2021-05-24 14:21:25 -05:00
|
|
|
token_to_map.text_range().checked_sub(speculative_args.syntax().text_range().start())?;
|
2020-03-08 05:02:14 -05:00
|
|
|
let token_id = tmap_1.token_by_range(range)?;
|
2021-05-04 13:49:00 -05:00
|
|
|
|
2021-05-04 14:20:04 -05:00
|
|
|
let macro_def = {
|
2021-05-19 13:19:08 -05:00
|
|
|
let loc: MacroCallLoc = db.lookup_intern_macro(actual_macro_call);
|
2021-05-04 14:20:04 -05:00
|
|
|
db.macro_def(loc.def)?
|
|
|
|
};
|
|
|
|
|
2021-05-24 14:21:25 -05:00
|
|
|
let speculative_expansion = macro_def.expand(db, actual_macro_call, &tt);
|
2021-05-04 14:20:04 -05:00
|
|
|
|
2021-05-08 18:36:06 -05:00
|
|
|
let fragment_kind = macro_fragment_kind(db, actual_macro_call);
|
2021-05-04 14:20:04 -05:00
|
|
|
|
|
|
|
let (node, tmap_2) =
|
2021-05-24 14:21:25 -05:00
|
|
|
mbe::token_tree_to_syntax_node(&speculative_expansion.value, fragment_kind).ok()?;
|
2021-05-04 13:49:00 -05:00
|
|
|
|
2021-05-04 14:03:16 -05:00
|
|
|
let token_id = macro_def.map_id_down(token_id);
|
2021-05-24 13:29:48 -05:00
|
|
|
let range = tmap_2.range_by_token(token_id, token_to_map.kind())?;
|
2021-01-15 11:15:33 -06:00
|
|
|
let token = node.syntax_node().covering_element(range).into_token()?;
|
2020-03-08 05:02:14 -05:00
|
|
|
Some((node.syntax_node(), token))
|
|
|
|
}
|
|
|
|
|
2020-11-24 14:55:08 -06:00
|
|
|
fn ast_id_map(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<AstIdMap> {
|
2021-05-04 13:49:00 -05:00
|
|
|
let map = db.parse_or_expand(file_id).map(|it| AstIdMap::from_source(&it)).unwrap_or_default();
|
2019-10-29 06:55:39 -05:00
|
|
|
Arc::new(map)
|
|
|
|
}
|
|
|
|
|
2021-05-04 13:49:00 -05:00
|
|
|
fn parse_or_expand(db: &dyn AstDatabase, file_id: HirFileId) -> Option<SyntaxNode> {
|
|
|
|
match file_id.0 {
|
|
|
|
HirFileIdRepr::FileId(file_id) => Some(db.parse(file_id).tree().syntax().clone()),
|
|
|
|
HirFileIdRepr::MacroFile(macro_file) => {
|
|
|
|
db.parse_macro_expansion(macro_file).value.map(|(it, _)| it.syntax_node())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn parse_macro_expansion(
|
|
|
|
db: &dyn AstDatabase,
|
|
|
|
macro_file: MacroFile,
|
|
|
|
) -> ExpandResult<Option<(Parse<SyntaxNode>, Arc<mbe::TokenMap>)>> {
|
|
|
|
let _p = profile::span("parse_macro_expansion");
|
2021-05-04 14:20:04 -05:00
|
|
|
let result = db.macro_expand(macro_file.macro_call_id);
|
2021-05-04 13:49:00 -05:00
|
|
|
|
|
|
|
if let Some(err) = &result.err {
|
|
|
|
// Note:
|
|
|
|
// The final goal we would like to make all parse_macro success,
|
|
|
|
// such that the following log will not call anyway.
|
2021-05-19 13:19:08 -05:00
|
|
|
let loc: MacroCallLoc = db.lookup_intern_macro(macro_file.macro_call_id);
|
internal: move diagnostics to hir
The idea here is to eventually get rid of `dyn Diagnostic` and
`DiagnosticSink` infrastructure altogether, and just have a `enum
hir::Diagnostic` instead.
The problem with `dyn Diagnostic` is that it is defined in the lowest
level of the stack (hir_expand), but is used by the highest level (ide).
As a first step, we free hir_expand and hir_def from `dyn Diagnostic`
and kick the can up to `hir_ty`, as an intermediate state. The plan is
then to move DiagnosticSink similarly to the hir crate, and, as final
third step, remove its usage from the ide.
One currently unsolved problem is testing. You can notice that the test
which checks precise diagnostic ranges, unresolved_import_in_use_tree,
was moved to the ide layer. Logically, only IDE should have the infra to
render a specific range.
At the same time, the range is determined with the data produced in
hir_def and hir crates, so this layering is rather unfortunate. Working
on hir_def shouldn't require compiling `ide` for testing.
2021-05-23 15:31:59 -05:00
|
|
|
let node = loc.kind.to_node(db);
|
2021-05-19 13:19:08 -05:00
|
|
|
|
|
|
|
// collect parent information for warning log
|
|
|
|
let parents =
|
|
|
|
std::iter::successors(loc.kind.file_id().call_node(db), |it| it.file_id.call_node(db))
|
2021-05-04 13:49:00 -05:00
|
|
|
.map(|n| format!("{:#}", n.value))
|
|
|
|
.collect::<Vec<_>>()
|
|
|
|
.join("\n");
|
|
|
|
|
2021-05-19 13:19:08 -05:00
|
|
|
log::warn!(
|
|
|
|
"fail on macro_parse: (reason: {:?} macro_call: {:#}) parents: {}",
|
|
|
|
err,
|
|
|
|
node.value,
|
|
|
|
parents
|
|
|
|
);
|
2021-05-04 13:49:00 -05:00
|
|
|
}
|
|
|
|
let tt = match result.value {
|
|
|
|
Some(tt) => tt,
|
|
|
|
None => return ExpandResult { value: None, err: result.err },
|
|
|
|
};
|
|
|
|
|
2021-05-08 18:36:06 -05:00
|
|
|
let fragment_kind = macro_fragment_kind(db, macro_file.macro_call_id);
|
2021-05-04 13:49:00 -05:00
|
|
|
|
|
|
|
log::debug!("expanded = {}", tt.as_debug_string());
|
|
|
|
log::debug!("kind = {:?}", fragment_kind);
|
|
|
|
|
|
|
|
let (parse, rev_token_map) = match mbe::token_tree_to_syntax_node(&tt, fragment_kind) {
|
|
|
|
Ok(it) => it,
|
|
|
|
Err(err) => {
|
|
|
|
log::debug!(
|
|
|
|
"failed to parse expanstion to {:?} = {}",
|
|
|
|
fragment_kind,
|
|
|
|
tt.as_debug_string()
|
|
|
|
);
|
|
|
|
return ExpandResult::only_err(err);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
match result.err {
|
|
|
|
Some(err) => {
|
|
|
|
// Safety check for recursive identity macro.
|
|
|
|
let node = parse.syntax_node();
|
|
|
|
let file: HirFileId = macro_file.into();
|
|
|
|
let call_node = match file.call_node(db) {
|
|
|
|
Some(it) => it,
|
|
|
|
None => {
|
|
|
|
return ExpandResult::only_err(err);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
if is_self_replicating(&node, &call_node.value) {
|
2021-06-12 23:05:29 -05:00
|
|
|
ExpandResult::only_err(err)
|
2021-05-04 13:49:00 -05:00
|
|
|
} else {
|
|
|
|
ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: Some(err) }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None => {
|
|
|
|
log::debug!("parse = {:?}", parse.syntax_node().kind());
|
|
|
|
ExpandResult { value: Some((parse, Arc::new(rev_token_map))), err: None }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn macro_arg(db: &dyn AstDatabase, id: MacroCallId) -> Option<Arc<(tt::Subtree, mbe::TokenMap)>> {
|
|
|
|
let arg = db.macro_arg_text(id)?;
|
2021-05-11 18:01:51 -05:00
|
|
|
let (mut tt, tmap) = mbe::syntax_node_to_token_tree(&SyntaxNode::new_root(arg));
|
|
|
|
|
2021-05-19 13:19:08 -05:00
|
|
|
let loc: MacroCallLoc = db.lookup_intern_macro(id);
|
|
|
|
if loc.def.is_proc_macro() {
|
|
|
|
// proc macros expect their inputs without parentheses, MBEs expect it with them included
|
|
|
|
tt.delimiter = None;
|
2021-05-11 18:01:51 -05:00
|
|
|
}
|
|
|
|
|
2021-05-04 13:49:00 -05:00
|
|
|
Some(Arc::new((tt, tmap)))
|
|
|
|
}
|
|
|
|
|
|
|
|
fn macro_arg_text(db: &dyn AstDatabase, id: MacroCallId) -> Option<GreenNode> {
|
|
|
|
let loc = db.lookup_intern_macro(id);
|
|
|
|
let arg = loc.kind.arg(db)?;
|
2021-08-16 06:45:54 -05:00
|
|
|
let arg = process_macro_input(&loc.kind, arg);
|
fix: avoid pathological macro expansions
Today, rust-analyzer (and rustc, and bat, and IntelliJ) fail badly on
some kinds of maliciously constructed code, like a deep sequence of
nested parenthesis.
"Who writes 100k nested parenthesis" you'd ask?
Well, in a language with macros, a run-away macro expansion might do
that (see the added tests)! Such expansion can be broad, rather than
deep, so it bypasses recursion check at the macro-expansion layer, but
triggers deep recursion in parser.
In the ideal world, the parser would just handle deeply nested structs
gracefully. We'll get there some day, but at the moment, let's try to be
simple, and just avoid expanding macros with unbalanced parenthesis in
the first place.
closes #9358
2021-08-09 08:06:49 -05:00
|
|
|
if matches!(loc.kind, MacroCallKind::FnLike { .. }) {
|
|
|
|
let first = arg.first_child_or_token().map_or(T![.], |it| it.kind());
|
|
|
|
let last = arg.last_child_or_token().map_or(T![.], |it| it.kind());
|
|
|
|
let well_formed_tt =
|
|
|
|
matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']));
|
|
|
|
if !well_formed_tt {
|
|
|
|
// Don't expand malformed (unbalanced) macro invocations. This is
|
|
|
|
// less than ideal, but trying to expand unbalanced macro calls
|
|
|
|
// sometimes produces pathological, deeply nested code which breaks
|
|
|
|
// all kinds of things.
|
|
|
|
//
|
|
|
|
// Some day, we'll have explicit recursion counters for all
|
|
|
|
// recursive things, at which point this code might be removed.
|
|
|
|
cov_mark::hit!(issue9358_bad_macro_stack_overflow);
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
}
|
2021-05-06 00:22:51 -05:00
|
|
|
Some(arg.green().into())
|
2021-05-04 13:49:00 -05:00
|
|
|
}
|
|
|
|
|
2021-05-04 14:03:16 -05:00
|
|
|
fn macro_def(db: &dyn AstDatabase, id: MacroDefId) -> Option<Arc<TokenExpander>> {
|
2019-11-11 04:45:55 -06:00
|
|
|
match id.kind {
|
2021-03-27 00:44:54 -05:00
|
|
|
MacroDefKind::Declarative(ast_id) => match ast_id.to_node(db) {
|
2021-05-04 12:36:48 -05:00
|
|
|
ast::Macro::MacroRules(macro_rules) => {
|
2021-03-27 00:44:54 -05:00
|
|
|
let arg = macro_rules.token_tree()?;
|
2021-08-09 07:41:19 -05:00
|
|
|
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
|
2021-05-04 14:03:16 -05:00
|
|
|
let mac = match mbe::MacroRules::parse(&tt) {
|
2021-03-27 00:44:54 -05:00
|
|
|
Ok(it) => it,
|
|
|
|
Err(err) => {
|
|
|
|
let name = macro_rules.name().map(|n| n.to_string()).unwrap_or_default();
|
|
|
|
log::warn!("fail on macro_def parse ({}): {:?} {:#?}", name, err, tt);
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
};
|
2021-05-04 14:03:16 -05:00
|
|
|
Some(Arc::new(TokenExpander::MacroRules { mac, def_site_token_map }))
|
2021-03-27 00:44:54 -05:00
|
|
|
}
|
2021-05-04 12:36:48 -05:00
|
|
|
ast::Macro::MacroDef(macro_def) => {
|
2021-03-27 00:44:54 -05:00
|
|
|
let arg = macro_def.body()?;
|
2021-08-09 07:41:19 -05:00
|
|
|
let (tt, def_site_token_map) = mbe::syntax_node_to_token_tree(arg.syntax());
|
2021-05-04 14:03:16 -05:00
|
|
|
let mac = match mbe::MacroDef::parse(&tt) {
|
2021-03-27 00:44:54 -05:00
|
|
|
Ok(it) => it,
|
|
|
|
Err(err) => {
|
|
|
|
let name = macro_def.name().map(|n| n.to_string()).unwrap_or_default();
|
|
|
|
log::warn!("fail on macro_def parse ({}): {:?} {:#?}", name, err, tt);
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
};
|
2021-05-04 14:03:16 -05:00
|
|
|
Some(Arc::new(TokenExpander::MacroDef { mac, def_site_token_map }))
|
2021-03-27 00:44:54 -05:00
|
|
|
}
|
|
|
|
},
|
2021-05-04 14:03:16 -05:00
|
|
|
MacroDefKind::BuiltIn(expander, _) => Some(Arc::new(TokenExpander::Builtin(expander))),
|
2021-06-09 11:02:31 -05:00
|
|
|
MacroDefKind::BuiltInAttr(expander, _) => {
|
|
|
|
Some(Arc::new(TokenExpander::BuiltinAttr(expander)))
|
|
|
|
}
|
2021-03-18 09:37:14 -05:00
|
|
|
MacroDefKind::BuiltInDerive(expander, _) => {
|
2021-05-04 14:03:16 -05:00
|
|
|
Some(Arc::new(TokenExpander::BuiltinDerive(expander)))
|
2019-11-09 21:03:24 -06:00
|
|
|
}
|
2021-03-18 09:37:14 -05:00
|
|
|
MacroDefKind::BuiltInEager(..) => None,
|
2021-05-04 14:03:16 -05:00
|
|
|
MacroDefKind::ProcMacro(expander, ..) => Some(Arc::new(TokenExpander::ProcMacro(expander))),
|
2019-11-09 21:03:24 -06:00
|
|
|
}
|
2019-10-29 07:11:42 -05:00
|
|
|
}
|
|
|
|
|
2020-11-26 09:48:17 -06:00
|
|
|
fn macro_expand(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<Option<Arc<tt::Subtree>>> {
|
2020-11-27 11:07:16 -06:00
|
|
|
let _p = profile::span("macro_expand");
|
2021-05-19 13:19:08 -05:00
|
|
|
let loc: MacroCallLoc = db.lookup_intern_macro(id);
|
|
|
|
if let Some(eager) = &loc.eager {
|
2021-08-16 06:27:36 -05:00
|
|
|
return ExpandResult {
|
|
|
|
value: Some(eager.arg_or_expansion.clone()),
|
|
|
|
// FIXME: There could be errors here!
|
|
|
|
err: None,
|
|
|
|
};
|
2021-05-19 13:19:08 -05:00
|
|
|
}
|
2020-02-17 05:32:13 -06:00
|
|
|
|
2021-08-16 06:27:36 -05:00
|
|
|
let macro_arg = match db.macro_arg(id) {
|
2020-03-13 07:03:31 -05:00
|
|
|
Some(it) => it,
|
2020-11-26 09:48:17 -06:00
|
|
|
None => return ExpandResult::str_err("Fail to args in to tt::TokenTree".into()),
|
2020-03-13 07:03:31 -05:00
|
|
|
};
|
2019-10-29 07:11:42 -05:00
|
|
|
|
2020-03-13 07:03:31 -05:00
|
|
|
let macro_rules = match db.macro_def(loc.def) {
|
|
|
|
Some(it) => it,
|
2020-11-26 09:48:17 -06:00
|
|
|
None => return ExpandResult::str_err("Fail to find macro definition".into()),
|
2020-03-13 07:03:31 -05:00
|
|
|
};
|
2021-05-19 13:19:08 -05:00
|
|
|
let ExpandResult { value: tt, err } = macro_rules.expand(db, id, ¯o_arg.0);
|
2019-10-29 07:11:42 -05:00
|
|
|
// Set a hard limit for the expanded tt
|
|
|
|
let count = tt.count();
|
2021-07-10 15:49:17 -05:00
|
|
|
// XXX: Make ExpandResult a real error and use .map_err instead?
|
|
|
|
if TOKEN_LIMIT.check(count).is_err() {
|
2020-11-26 09:48:17 -06:00
|
|
|
return ExpandResult::str_err(format!(
|
2020-12-10 10:50:56 -06:00
|
|
|
"macro invocation exceeds token limit: produced {} tokens, limit is {}",
|
2021-07-10 15:49:17 -05:00
|
|
|
count,
|
|
|
|
TOKEN_LIMIT.inner(),
|
2020-11-26 09:48:17 -06:00
|
|
|
));
|
2019-10-29 07:11:42 -05:00
|
|
|
}
|
2020-11-24 12:00:23 -06:00
|
|
|
|
2020-11-26 09:48:17 -06:00
|
|
|
ExpandResult { value: Some(Arc::new(tt)), err }
|
2019-10-29 07:11:42 -05:00
|
|
|
}
|
|
|
|
|
2021-08-16 06:27:36 -05:00
|
|
|
fn macro_expand_error(db: &dyn AstDatabase, macro_call: MacroCallId) -> Option<ExpandError> {
|
|
|
|
db.macro_expand(macro_call).err
|
|
|
|
}
|
|
|
|
|
2021-08-20 07:34:46 -05:00
|
|
|
fn expand_proc_macro(db: &dyn AstDatabase, id: MacroCallId) -> ExpandResult<tt::Subtree> {
|
2021-05-19 13:19:08 -05:00
|
|
|
let loc: MacroCallLoc = db.lookup_intern_macro(id);
|
2020-05-14 04:57:37 -05:00
|
|
|
let macro_arg = match db.macro_arg(id) {
|
|
|
|
Some(it) => it,
|
2021-08-20 07:34:46 -05:00
|
|
|
None => return ExpandResult::str_err("No arguments for proc-macro".to_string()),
|
2020-05-14 04:57:37 -05:00
|
|
|
};
|
|
|
|
|
|
|
|
let expander = match loc.def.kind {
|
2021-03-18 10:11:18 -05:00
|
|
|
MacroDefKind::ProcMacro(expander, ..) => expander,
|
2020-05-14 04:57:37 -05:00
|
|
|
_ => unreachable!(),
|
|
|
|
};
|
|
|
|
|
2021-05-31 06:37:11 -05:00
|
|
|
let attr_arg = match &loc.kind {
|
|
|
|
MacroCallKind::Attr { attr_args, .. } => Some(attr_args),
|
|
|
|
_ => None,
|
|
|
|
};
|
|
|
|
|
|
|
|
expander.expand(db, loc.krate, ¯o_arg.0, attr_arg)
|
2020-05-14 04:57:37 -05:00
|
|
|
}
|
|
|
|
|
2021-03-25 14:52:35 -05:00
|
|
|
fn is_self_replicating(from: &SyntaxNode, to: &SyntaxNode) -> bool {
|
|
|
|
if diff(from, to).is_empty() {
|
|
|
|
return true;
|
|
|
|
}
|
2021-05-04 12:36:48 -05:00
|
|
|
if let Some(stmts) = ast::MacroStmts::cast(from.clone()) {
|
2021-03-25 14:52:35 -05:00
|
|
|
if stmts.statements().any(|stmt| diff(stmt.syntax(), to).is_empty()) {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
if let Some(expr) = stmts.expr() {
|
|
|
|
if diff(expr.syntax(), to).is_empty() {
|
|
|
|
return true;
|
2020-11-24 12:00:23 -06:00
|
|
|
}
|
2020-03-21 09:43:48 -05:00
|
|
|
}
|
|
|
|
}
|
2021-03-25 14:52:35 -05:00
|
|
|
false
|
2019-10-29 07:11:42 -05:00
|
|
|
}
|
2019-12-08 02:16:52 -06:00
|
|
|
|
2021-01-03 20:53:31 -06:00
|
|
|
fn hygiene_frame(db: &dyn AstDatabase, file_id: HirFileId) -> Arc<HygieneFrame> {
|
|
|
|
Arc::new(HygieneFrame::new(db, file_id))
|
|
|
|
}
|
|
|
|
|
2021-05-08 18:36:06 -05:00
|
|
|
fn macro_fragment_kind(db: &dyn AstDatabase, id: MacroCallId) -> FragmentKind {
|
2021-05-19 13:19:08 -05:00
|
|
|
let loc: MacroCallLoc = db.lookup_intern_macro(id);
|
|
|
|
loc.kind.fragment_kind()
|
2019-12-08 02:16:52 -06:00
|
|
|
}
|