Fix macro expansion expression parenthesis wrapping
This commit is contained in:
parent
efa67294ed
commit
d2a31acda1
@ -14,8 +14,25 @@ pub const ROOT_ERASED_FILE_AST_ID: ErasedFileAstId =
|
|||||||
|
|
||||||
pub type SpanData = tt::SpanData<SpanAnchor, SyntaxContextId>;
|
pub type SpanData = tt::SpanData<SpanAnchor, SyntaxContextId>;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
pub struct SyntaxContextId(InternId);
|
pub struct SyntaxContextId(InternId);
|
||||||
|
|
||||||
|
impl fmt::Debug for SyntaxContextId {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
if *self == Self::SELF_REF {
|
||||||
|
f.debug_tuple("SyntaxContextId")
|
||||||
|
.field(&{
|
||||||
|
#[derive(Debug)]
|
||||||
|
#[allow(non_camel_case_types)]
|
||||||
|
struct SELF_REF;
|
||||||
|
SELF_REF
|
||||||
|
})
|
||||||
|
.finish()
|
||||||
|
} else {
|
||||||
|
f.debug_tuple("SyntaxContextId").field(&self.0).finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
crate::impl_intern_key!(SyntaxContextId);
|
crate::impl_intern_key!(SyntaxContextId);
|
||||||
|
|
||||||
impl fmt::Display for SyntaxContextId {
|
impl fmt::Display for SyntaxContextId {
|
||||||
@ -30,7 +47,7 @@ impl SyntaxContext for SyntaxContextId {
|
|||||||
// inherent trait impls please tyvm
|
// inherent trait impls please tyvm
|
||||||
impl SyntaxContextId {
|
impl SyntaxContextId {
|
||||||
pub const ROOT: Self = SyntaxContextId(unsafe { InternId::new_unchecked(0) });
|
pub const ROOT: Self = SyntaxContextId(unsafe { InternId::new_unchecked(0) });
|
||||||
// veykril(HACK): salsa doesn't allow us fetching the id of the current input to be allocated so
|
// veykril(HACK): FIXME salsa doesn't allow us fetching the id of the current input to be allocated so
|
||||||
// we need a special value that behaves as the current context.
|
// we need a special value that behaves as the current context.
|
||||||
pub const SELF_REF: Self =
|
pub const SELF_REF: Self =
|
||||||
SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 1) });
|
SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 1) });
|
||||||
@ -107,7 +124,7 @@ pub struct MacroFileId {
|
|||||||
|
|
||||||
/// `MacroCallId` identifies a particular macro invocation, like
|
/// `MacroCallId` identifies a particular macro invocation, like
|
||||||
/// `println!("Hello, {}", world)`.
|
/// `println!("Hello, {}", world)`.
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
pub struct MacroCallId(salsa::InternId);
|
pub struct MacroCallId(salsa::InternId);
|
||||||
crate::impl_intern_key!(MacroCallId);
|
crate::impl_intern_key!(MacroCallId);
|
||||||
|
|
||||||
|
@ -1025,7 +1025,7 @@ impl ExprCollector<'_> {
|
|||||||
|
|
||||||
let id = collector(self, Some(expansion.tree()));
|
let id = collector(self, Some(expansion.tree()));
|
||||||
self.ast_id_map = prev_ast_id_map;
|
self.ast_id_map = prev_ast_id_map;
|
||||||
self.expander.exit(self.db, mark);
|
self.expander.exit(mark);
|
||||||
id
|
id
|
||||||
}
|
}
|
||||||
None => collector(self, None),
|
None => collector(self, None),
|
||||||
|
@ -2,6 +2,7 @@ mod block;
|
|||||||
|
|
||||||
use base_db::{fixture::WithFixture, SourceDatabase};
|
use base_db::{fixture::WithFixture, SourceDatabase};
|
||||||
use expect_test::{expect, Expect};
|
use expect_test::{expect, Expect};
|
||||||
|
use hir_expand::db::ExpandDatabase;
|
||||||
|
|
||||||
use crate::{test_db::TestDB, ModuleDefId};
|
use crate::{test_db::TestDB, ModuleDefId};
|
||||||
|
|
||||||
@ -143,7 +144,6 @@ mod m {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn desugar_builtin_format_args() {
|
fn desugar_builtin_format_args() {
|
||||||
// Regression test for a path resolution bug introduced with inner item handling.
|
|
||||||
let (db, body, def) = lower(
|
let (db, body, def) = lower(
|
||||||
r#"
|
r#"
|
||||||
//- minicore: fmt
|
//- minicore: fmt
|
||||||
@ -221,3 +221,70 @@ fn main() {
|
|||||||
}"#]]
|
}"#]]
|
||||||
.assert_eq(&body.pretty_print(&db, def))
|
.assert_eq(&body.pretty_print(&db, def))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_macro_hygiene() {
|
||||||
|
let (db, body, def) = lower(
|
||||||
|
r##"
|
||||||
|
//- minicore: fmt, from
|
||||||
|
//- /main.rs
|
||||||
|
mod error;
|
||||||
|
|
||||||
|
use crate::error::error;
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
// _ = forces body expansion instead of block def map expansion
|
||||||
|
_ = error!("Failed to resolve path `{}`", node.text());
|
||||||
|
}
|
||||||
|
//- /error.rs
|
||||||
|
macro_rules! _error {
|
||||||
|
($fmt:expr, $($arg:tt)+) => {$crate::error::intermediate!(format_args!($fmt, $($arg)+))}
|
||||||
|
}
|
||||||
|
pub(crate) use _error as error;
|
||||||
|
macro_rules! _intermediate {
|
||||||
|
($arg:expr) => {$crate::error::SsrError::new($arg)}
|
||||||
|
}
|
||||||
|
pub(crate) use _intermediate as intermediate;
|
||||||
|
|
||||||
|
pub struct SsrError(pub(crate) core::fmt::Arguments);
|
||||||
|
|
||||||
|
impl SsrError {
|
||||||
|
pub(crate) fn new(message: impl Into<core::fmt::Arguments>) -> SsrError {
|
||||||
|
SsrError(message.into())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"##,
|
||||||
|
);
|
||||||
|
println!("{}", db.dump_syntax_contexts());
|
||||||
|
|
||||||
|
assert_eq!(db.body_with_source_map(def.into()).1.diagnostics(), &[]);
|
||||||
|
expect![[r#"
|
||||||
|
fn main() {
|
||||||
|
_ = $crate::error::SsrError::new(
|
||||||
|
builtin#lang(Arguments::new_v1_formatted)(
|
||||||
|
&[
|
||||||
|
"\"Failed to resolve path `", "`\"",
|
||||||
|
],
|
||||||
|
&[
|
||||||
|
builtin#lang(Argument::new_display)(
|
||||||
|
&node.text(),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
&[
|
||||||
|
builtin#lang(Placeholder::new)(
|
||||||
|
0usize,
|
||||||
|
' ',
|
||||||
|
builtin#lang(Alignment::Unknown),
|
||||||
|
0u32,
|
||||||
|
builtin#lang(Count::Implied),
|
||||||
|
builtin#lang(Count::Implied),
|
||||||
|
),
|
||||||
|
],
|
||||||
|
unsafe {
|
||||||
|
builtin#lang(UnsafeArg::new)()
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}"#]]
|
||||||
|
.assert_eq(&body.pretty_print(&db, def))
|
||||||
|
}
|
||||||
|
@ -794,7 +794,7 @@ impl<'a> AssocItemCollector<'a> {
|
|||||||
|
|
||||||
self.collect(&item_tree, tree_id, &iter);
|
self.collect(&item_tree, tree_id, &iter);
|
||||||
|
|
||||||
self.expander.exit(self.db, mark);
|
self.expander.exit(mark);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -94,8 +94,8 @@ impl Expander {
|
|||||||
ExpandResult { value: Some(InFile::new(macro_file.into(), value.0)), err: error.or(err) }
|
ExpandResult { value: Some(InFile::new(macro_file.into(), value.0)), err: error.or(err) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn exit(&mut self, db: &dyn DefDatabase, mut mark: Mark) {
|
pub fn exit(&mut self, mut mark: Mark) {
|
||||||
self.span_map = db.span_map(mark.file_id);
|
self.span_map = mark.span_map;
|
||||||
self.current_file_id = mark.file_id;
|
self.current_file_id = mark.file_id;
|
||||||
if self.recursion_depth == u32::MAX {
|
if self.recursion_depth == u32::MAX {
|
||||||
// Recursion limit has been reached somewhere in the macro expansion tree. Reset the
|
// Recursion limit has been reached somewhere in the macro expansion tree. Reset the
|
||||||
@ -174,10 +174,11 @@ impl Expander {
|
|||||||
let parse = value.cast::<T>()?;
|
let parse = value.cast::<T>()?;
|
||||||
|
|
||||||
self.recursion_depth += 1;
|
self.recursion_depth += 1;
|
||||||
self.span_map = db.span_map(file_id);
|
let old_span_map = std::mem::replace(&mut self.span_map, db.span_map(file_id));
|
||||||
let old_file_id = std::mem::replace(&mut self.current_file_id, file_id);
|
let old_file_id = std::mem::replace(&mut self.current_file_id, file_id);
|
||||||
let mark = Mark {
|
let mark = Mark {
|
||||||
file_id: old_file_id,
|
file_id: old_file_id,
|
||||||
|
span_map: old_span_map,
|
||||||
bomb: DropBomb::new("expansion mark dropped"),
|
bomb: DropBomb::new("expansion mark dropped"),
|
||||||
};
|
};
|
||||||
Some((mark, parse))
|
Some((mark, parse))
|
||||||
@ -190,5 +191,6 @@ impl Expander {
|
|||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Mark {
|
pub struct Mark {
|
||||||
file_id: HirFileId,
|
file_id: HirFileId,
|
||||||
|
span_map: SpanMap,
|
||||||
bomb: DropBomb,
|
bomb: DropBomb,
|
||||||
}
|
}
|
||||||
|
@ -439,7 +439,7 @@ impl GenericParams {
|
|||||||
let ctx = expander.ctx(db);
|
let ctx = expander.ctx(db);
|
||||||
let type_ref = TypeRef::from_ast(&ctx, expanded.tree());
|
let type_ref = TypeRef::from_ast(&ctx, expanded.tree());
|
||||||
self.fill_implicit_impl_trait_args(db, &mut *exp, &type_ref);
|
self.fill_implicit_impl_trait_args(db, &mut *exp, &type_ref);
|
||||||
exp.1.exit(db, mark);
|
exp.1.exit(mark);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
@ -997,9 +997,9 @@ macro_rules! vec {
|
|||||||
fn f() {
|
fn f() {
|
||||||
{
|
{
|
||||||
let mut v = Vec::new();
|
let mut v = Vec::new();
|
||||||
v.push((1));
|
v.push(1);
|
||||||
v.push((2));
|
v.push(2);
|
||||||
v.push((3));
|
v.push(3);
|
||||||
v
|
v
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@ -1468,8 +1468,8 @@ macro_rules! matches {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
fn main() {
|
fn main() {
|
||||||
match (0) {
|
match 0 {
|
||||||
0|1 if (true )=>true , _=>false
|
0|1 if true =>true , _=>false
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
"#]],
|
"#]],
|
||||||
|
@ -62,10 +62,10 @@ macro_rules !implement_methods {
|
|||||||
struct Foo;
|
struct Foo;
|
||||||
impl Foo {
|
impl Foo {
|
||||||
fn alpha() -> &'static[u32] {
|
fn alpha() -> &'static[u32] {
|
||||||
&[(1), (2), (3)]
|
&[1, 2, 3]
|
||||||
}
|
}
|
||||||
fn beta() -> &'static[u32] {
|
fn beta() -> &'static[u32] {
|
||||||
&[(1), (2), (3)]
|
&[1, 2, 3]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"#]],
|
"#]],
|
||||||
|
@ -39,8 +39,8 @@ fn main() {
|
|||||||
};
|
};
|
||||||
{
|
{
|
||||||
let mut v = Vec::new();
|
let mut v = Vec::new();
|
||||||
v.push((1u32));
|
v.push(1u32);
|
||||||
v.push((2));
|
v.push(2);
|
||||||
v
|
v
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -192,9 +192,9 @@ macro_rules! constant {
|
|||||||
($e:expr ;) => {$e};
|
($e:expr ;) => {$e};
|
||||||
}
|
}
|
||||||
|
|
||||||
const _: () = (0.0);
|
const _: () = 0.0;
|
||||||
const _: () = (0.);
|
const _: () = 0.;
|
||||||
const _: () = (0e0);
|
const _: () = 0e0;
|
||||||
"#]],
|
"#]],
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,10 @@
|
|||||||
//! Defines database & queries for macro expansion.
|
//! Defines database & queries for macro expansion.
|
||||||
|
|
||||||
use base_db::{salsa, span::SyntaxContextId, CrateId, Edition, FileId, SourceDatabase};
|
use base_db::{
|
||||||
|
salsa::{self, debug::DebugQueryTable},
|
||||||
|
span::SyntaxContextId,
|
||||||
|
CrateId, Edition, FileId, SourceDatabase,
|
||||||
|
};
|
||||||
use either::Either;
|
use either::Either;
|
||||||
use limit::Limit;
|
use limit::Limit;
|
||||||
use mbe::{syntax_node_to_token_tree, ValueResult};
|
use mbe::{syntax_node_to_token_tree, ValueResult};
|
||||||
@ -17,7 +21,7 @@ use crate::{
|
|||||||
builtin_attr_macro::pseudo_derive_attr_expansion,
|
builtin_attr_macro::pseudo_derive_attr_expansion,
|
||||||
builtin_fn_macro::EagerExpander,
|
builtin_fn_macro::EagerExpander,
|
||||||
fixup::{self, SyntaxFixupUndoInfo},
|
fixup::{self, SyntaxFixupUndoInfo},
|
||||||
hygiene::{self, SyntaxContextData, Transparency},
|
hygiene::{apply_mark, SyntaxContextData, Transparency},
|
||||||
span::{RealSpanMap, SpanMap, SpanMapRef},
|
span::{RealSpanMap, SpanMap, SpanMapRef},
|
||||||
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
|
tt, AstId, BuiltinAttrExpander, BuiltinDeriveExpander, BuiltinFnLikeExpander, EagerCallInfo,
|
||||||
ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, HirFileId, HirFileIdRepr, MacroCallId,
|
ExpandError, ExpandResult, ExpandTo, ExpansionSpanMap, HirFileId, HirFileIdRepr, MacroCallId,
|
||||||
@ -53,7 +57,7 @@ impl DeclarativeMacroExpander {
|
|||||||
),
|
),
|
||||||
None => self
|
None => self
|
||||||
.mac
|
.mac
|
||||||
.expand(&tt, |s| s.ctx = db.apply_mark(s.ctx, call_id, self.transparency))
|
.expand(&tt, |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency))
|
||||||
.map_err(Into::into),
|
.map_err(Into::into),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -115,16 +119,11 @@ pub trait ExpandDatabase: SourceDatabase {
|
|||||||
fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId;
|
fn intern_macro_call(&self, macro_call: MacroCallLoc) -> MacroCallId;
|
||||||
#[salsa::interned]
|
#[salsa::interned]
|
||||||
fn intern_syntax_context(&self, ctx: SyntaxContextData) -> SyntaxContextId;
|
fn intern_syntax_context(&self, ctx: SyntaxContextData) -> SyntaxContextId;
|
||||||
|
|
||||||
#[salsa::transparent]
|
#[salsa::transparent]
|
||||||
fn setup_syntax_context_root(&self) -> ();
|
fn setup_syntax_context_root(&self) -> ();
|
||||||
#[salsa::transparent]
|
#[salsa::transparent]
|
||||||
#[salsa::invoke(hygiene::apply_mark)]
|
fn dump_syntax_contexts(&self) -> String;
|
||||||
fn apply_mark(
|
|
||||||
&self,
|
|
||||||
ctxt: SyntaxContextId,
|
|
||||||
call_id: MacroCallId,
|
|
||||||
transparency: hygiene::Transparency,
|
|
||||||
) -> SyntaxContextId;
|
|
||||||
|
|
||||||
/// Lowers syntactic macro call to a token tree representation. That's a firewall
|
/// Lowers syntactic macro call to a token tree representation. That's a firewall
|
||||||
/// query, only typing in the macro call itself changes the returned
|
/// query, only typing in the macro call itself changes the returned
|
||||||
@ -269,7 +268,8 @@ pub fn expand_speculative(
|
|||||||
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt),
|
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt),
|
||||||
};
|
};
|
||||||
|
|
||||||
let expand_to = macro_expand_to(db, actual_macro_call);
|
let expand_to = loc.expand_to();
|
||||||
|
|
||||||
fixup::reverse_fixups(&mut speculative_expansion.value, &undo_info);
|
fixup::reverse_fixups(&mut speculative_expansion.value, &undo_info);
|
||||||
let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
|
let (node, rev_tmap) = token_tree_to_syntax_node(&speculative_expansion.value, expand_to);
|
||||||
|
|
||||||
@ -318,12 +318,9 @@ fn parse_macro_expansion(
|
|||||||
macro_file: MacroFileId,
|
macro_file: MacroFileId,
|
||||||
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
|
) -> ExpandResult<(Parse<SyntaxNode>, Arc<ExpansionSpanMap>)> {
|
||||||
let _p = profile::span("parse_macro_expansion");
|
let _p = profile::span("parse_macro_expansion");
|
||||||
let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id);
|
let loc = db.lookup_intern_macro_call(macro_file.macro_call_id);
|
||||||
|
let expand_to = loc.expand_to();
|
||||||
let expand_to = macro_expand_to(db, macro_file.macro_call_id);
|
let mbe::ValueResult { value: tt, err } = macro_expand(db, macro_file.macro_call_id, loc);
|
||||||
|
|
||||||
tracing::debug!("expanded = {}", tt.as_debug_string());
|
|
||||||
tracing::debug!("kind = {:?}", expand_to);
|
|
||||||
|
|
||||||
let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to);
|
let (parse, rev_token_map) = token_tree_to_syntax_node(&tt, expand_to);
|
||||||
|
|
||||||
@ -575,9 +572,9 @@ fn macro_expander(db: &dyn ExpandDatabase, id: MacroDefId) -> TokenExpander {
|
|||||||
fn macro_expand(
|
fn macro_expand(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
macro_call_id: MacroCallId,
|
macro_call_id: MacroCallId,
|
||||||
|
loc: MacroCallLoc,
|
||||||
) -> ExpandResult<Arc<tt::Subtree>> {
|
) -> ExpandResult<Arc<tt::Subtree>> {
|
||||||
let _p = profile::span("macro_expand");
|
let _p = profile::span("macro_expand");
|
||||||
let loc = db.lookup_intern_macro_call(macro_call_id);
|
|
||||||
|
|
||||||
let ExpandResult { value: tt, mut err } = match loc.def.kind {
|
let ExpandResult { value: tt, mut err } = match loc.def.kind {
|
||||||
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id),
|
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id),
|
||||||
@ -711,10 +708,6 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
|
|||||||
ExpandResult { value: Arc::new(tt), err }
|
ExpandResult { value: Arc::new(tt), err }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn macro_expand_to(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandTo {
|
|
||||||
db.lookup_intern_macro_call(id).expand_to()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn token_tree_to_syntax_node(
|
fn token_tree_to_syntax_node(
|
||||||
tt: &tt::Subtree,
|
tt: &tt::Subtree,
|
||||||
expand_to: ExpandTo,
|
expand_to: ExpandTo,
|
||||||
@ -751,3 +744,40 @@ fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>
|
|||||||
fn setup_syntax_context_root(db: &dyn ExpandDatabase) {
|
fn setup_syntax_context_root(db: &dyn ExpandDatabase) {
|
||||||
db.intern_syntax_context(SyntaxContextData::root());
|
db.intern_syntax_context(SyntaxContextData::root());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String {
|
||||||
|
let mut s = String::from("Expansions:");
|
||||||
|
let mut entries = InternMacroCallLookupQuery.in_db(db).entries::<Vec<_>>();
|
||||||
|
entries.sort_by_key(|e| e.key);
|
||||||
|
for e in entries {
|
||||||
|
let id = e.key;
|
||||||
|
let expn_data = e.value.as_ref().unwrap();
|
||||||
|
s.push_str(&format!(
|
||||||
|
"\n{:?}: parent: {:?}, call_site_ctxt: {:?}, def_site_ctxt: {:?}, kind: {:?}",
|
||||||
|
id,
|
||||||
|
expn_data.kind.file_id(),
|
||||||
|
expn_data.call_site,
|
||||||
|
SyntaxContextId::ROOT, // FIXME expn_data.def_site,
|
||||||
|
expn_data.kind.descr(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
s.push_str("\n\nSyntaxContexts:\n");
|
||||||
|
let mut entries = InternSyntaxContextLookupQuery.in_db(db).entries::<Vec<_>>();
|
||||||
|
entries.sort_by_key(|e| e.key);
|
||||||
|
for e in entries {
|
||||||
|
struct SyntaxContextDebug<'a>(
|
||||||
|
&'a dyn ExpandDatabase,
|
||||||
|
SyntaxContextId,
|
||||||
|
&'a SyntaxContextData,
|
||||||
|
);
|
||||||
|
|
||||||
|
impl<'a> std::fmt::Debug for SyntaxContextDebug<'a> {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
self.2.fancy_debug(self.1, self.0, f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stdx::format_to!(s, "{:?}\n", SyntaxContextDebug(db, e.key, &e.value.unwrap()));
|
||||||
|
}
|
||||||
|
s
|
||||||
|
}
|
||||||
|
@ -8,7 +8,7 @@ use base_db::span::{MacroCallId, SpanData, SyntaxContextId};
|
|||||||
|
|
||||||
use crate::db::ExpandDatabase;
|
use crate::db::ExpandDatabase;
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
|
#[derive(Copy, Clone, Hash, PartialEq, Eq)]
|
||||||
pub struct SyntaxContextData {
|
pub struct SyntaxContextData {
|
||||||
pub outer_expn: Option<MacroCallId>,
|
pub outer_expn: Option<MacroCallId>,
|
||||||
pub outer_transparency: Transparency,
|
pub outer_transparency: Transparency,
|
||||||
@ -19,6 +19,18 @@ pub struct SyntaxContextData {
|
|||||||
pub opaque_and_semitransparent: SyntaxContextId,
|
pub opaque_and_semitransparent: SyntaxContextId,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Debug for SyntaxContextData {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
f.debug_struct("SyntaxContextData")
|
||||||
|
.field("outer_expn", &self.outer_expn)
|
||||||
|
.field("outer_transparency", &self.outer_transparency)
|
||||||
|
.field("parent", &self.parent)
|
||||||
|
.field("opaque", &self.opaque)
|
||||||
|
.field("opaque_and_semitransparent", &self.opaque_and_semitransparent)
|
||||||
|
.finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl SyntaxContextData {
|
impl SyntaxContextData {
|
||||||
pub fn root() -> Self {
|
pub fn root() -> Self {
|
||||||
SyntaxContextData {
|
SyntaxContextData {
|
||||||
@ -29,6 +41,22 @@ impl SyntaxContextData {
|
|||||||
opaque_and_semitransparent: SyntaxContextId::ROOT,
|
opaque_and_semitransparent: SyntaxContextId::ROOT,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn fancy_debug(
|
||||||
|
self,
|
||||||
|
self_id: SyntaxContextId,
|
||||||
|
db: &dyn ExpandDatabase,
|
||||||
|
f: &mut std::fmt::Formatter<'_>,
|
||||||
|
) -> std::fmt::Result {
|
||||||
|
write!(f, "#{self_id} parent: #{}, outer_mark: (", self.parent)?;
|
||||||
|
match self.outer_expn {
|
||||||
|
Some(id) => {
|
||||||
|
write!(f, "{:?}::{{{{expn{:?}}}}}", db.lookup_intern_macro_call(id).krate, id)?
|
||||||
|
}
|
||||||
|
None => write!(f, "root")?,
|
||||||
|
}
|
||||||
|
write!(f, ", {:?})", self.outer_transparency)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A property of a macro expansion that determines how identifiers
|
/// A property of a macro expansion that determines how identifiers
|
||||||
@ -80,7 +108,7 @@ fn span_with_ctxt_from_mark(
|
|||||||
expn_id: MacroCallId,
|
expn_id: MacroCallId,
|
||||||
transparency: Transparency,
|
transparency: Transparency,
|
||||||
) -> SpanData {
|
) -> SpanData {
|
||||||
SpanData { ctx: db.apply_mark(SyntaxContextId::ROOT, expn_id, transparency), ..span }
|
SpanData { ctx: apply_mark(db, SyntaxContextId::ROOT, expn_id, transparency), ..span }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn apply_mark(
|
pub(super) fn apply_mark(
|
||||||
|
@ -122,6 +122,7 @@ pub struct MacroDefId {
|
|||||||
pub kind: MacroDefKind,
|
pub kind: MacroDefKind,
|
||||||
pub local_inner: bool,
|
pub local_inner: bool,
|
||||||
pub allow_internal_unsafe: bool,
|
pub allow_internal_unsafe: bool,
|
||||||
|
// pub def_site: SyntaxContextId,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
@ -463,6 +464,14 @@ impl MacroCallLoc {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl MacroCallKind {
|
impl MacroCallKind {
|
||||||
|
fn descr(&self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
MacroCallKind::FnLike { .. } => "macro call",
|
||||||
|
MacroCallKind::Derive { .. } => "derive macro",
|
||||||
|
MacroCallKind::Attr { .. } => "attribute macro",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns the file containing the macro invocation.
|
/// Returns the file containing the macro invocation.
|
||||||
fn file_id(&self) -> HirFileId {
|
fn file_id(&self) -> HirFileId {
|
||||||
match *self {
|
match *self {
|
||||||
|
@ -390,6 +390,7 @@ impl InferenceContext<'_> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
enum ValuePathResolution {
|
enum ValuePathResolution {
|
||||||
// It's awkward to wrap a single ID in two enums, but we need both and this saves fallible
|
// It's awkward to wrap a single ID in two enums, but we need both and this saves fallible
|
||||||
// conversion between them + `unwrap()`.
|
// conversion between them + `unwrap()`.
|
||||||
|
@ -407,11 +407,7 @@ impl<'a> TyLoweringContext<'a> {
|
|||||||
drop(expander);
|
drop(expander);
|
||||||
let ty = self.lower_ty(&type_ref);
|
let ty = self.lower_ty(&type_ref);
|
||||||
|
|
||||||
self.expander
|
self.expander.borrow_mut().as_mut().unwrap().exit(mark);
|
||||||
.borrow_mut()
|
|
||||||
.as_mut()
|
|
||||||
.unwrap()
|
|
||||||
.exit(self.db.upcast(), mark);
|
|
||||||
Some(ty)
|
Some(ty)
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
|
@ -128,7 +128,6 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
|
|||||||
None => continue,
|
None => continue,
|
||||||
};
|
};
|
||||||
let def_map = module.def_map(&db);
|
let def_map = module.def_map(&db);
|
||||||
dbg!(def_map.dump(&db));
|
|
||||||
visit_module(&db, &def_map, module.local_id, &mut |it| defs.push(it));
|
visit_module(&db, &def_map, module.local_id, &mut |it| defs.push(it));
|
||||||
}
|
}
|
||||||
defs.sort_by_key(|def| match def {
|
defs.sort_by_key(|def| match def {
|
||||||
|
@ -792,9 +792,21 @@ fn match_meta_var<S: Span>(
|
|||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
};
|
};
|
||||||
return input
|
return input.expect_fragment(parser::PrefixEntryPoint::Expr).map(|tt| {
|
||||||
.expect_fragment(parser::PrefixEntryPoint::Expr)
|
tt.map(|tt| match tt {
|
||||||
.map(|tt| tt.map(tt::TokenTree::subtree_or_wrap).map(Fragment::Expr));
|
tt::TokenTree::Leaf(leaf) => tt::Subtree {
|
||||||
|
delimiter: tt::Delimiter::dummy_invisible(),
|
||||||
|
token_trees: vec![leaf.into()],
|
||||||
|
},
|
||||||
|
tt::TokenTree::Subtree(mut s) => {
|
||||||
|
if s.delimiter.kind == tt::DelimiterKind::Invisible {
|
||||||
|
s.delimiter.kind = tt::DelimiterKind::Parenthesis;
|
||||||
|
}
|
||||||
|
s
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.map(Fragment::Expr)
|
||||||
|
});
|
||||||
}
|
}
|
||||||
MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => {
|
MetaVarKind::Ident | MetaVarKind::Tt | MetaVarKind::Lifetime | MetaVarKind::Literal => {
|
||||||
let tt_result = match kind {
|
let tt_result = match kind {
|
||||||
|
@ -444,15 +444,8 @@ fn expand_repeat<S: Span>(
|
|||||||
fn push_fragment<S: Span>(buf: &mut Vec<tt::TokenTree<S>>, fragment: Fragment<S>) {
|
fn push_fragment<S: Span>(buf: &mut Vec<tt::TokenTree<S>>, fragment: Fragment<S>) {
|
||||||
match fragment {
|
match fragment {
|
||||||
Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt),
|
Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt),
|
||||||
Fragment::Expr(mut tt) => {
|
Fragment::Expr(sub) => {
|
||||||
if tt.delimiter.kind == tt::DelimiterKind::Invisible {
|
push_subtree(buf, sub);
|
||||||
tt.delimiter = tt::Delimiter {
|
|
||||||
open: S::DUMMY,
|
|
||||||
close: S::DUMMY,
|
|
||||||
kind: tt::DelimiterKind::Parenthesis,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
buf.push(tt.into())
|
|
||||||
}
|
}
|
||||||
Fragment::Path(tt) => fix_up_and_push_path_tt(buf, tt),
|
Fragment::Path(tt) => fix_up_and_push_path_tt(buf, tt),
|
||||||
Fragment::Tokens(tt) => buf.push(tt),
|
Fragment::Tokens(tt) => buf.push(tt),
|
||||||
|
Loading…
x
Reference in New Issue
Block a user