Remove usages of Span::DUMMY
This commit is contained in:
parent
f48ecb6e09
commit
4ec81230db
@ -93,6 +93,7 @@ lsp-server = { version = "0.7.4" }
|
|||||||
|
|
||||||
# non-local crates
|
# non-local crates
|
||||||
anyhow = "1.0.75"
|
anyhow = "1.0.75"
|
||||||
|
arrayvec = "0.7.4"
|
||||||
bitflags = "2.4.1"
|
bitflags = "2.4.1"
|
||||||
cargo_metadata = "0.18.1"
|
cargo_metadata = "0.18.1"
|
||||||
command-group = "2.0.1"
|
command-group = "2.0.1"
|
||||||
|
@ -12,7 +12,7 @@ rust-version.workspace = true
|
|||||||
doctest = false
|
doctest = false
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
arrayvec = "0.7.2"
|
arrayvec.workspace = true
|
||||||
bitflags.workspace = true
|
bitflags.workspace = true
|
||||||
cov-mark = "2.0.0-pre.1"
|
cov-mark = "2.0.0-pre.1"
|
||||||
dashmap.workspace = true
|
dashmap.workspace = true
|
||||||
|
@ -55,7 +55,7 @@
|
|||||||
use profile::Count;
|
use profile::Count;
|
||||||
use rustc_hash::FxHashMap;
|
use rustc_hash::FxHashMap;
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use span::SyntaxContextId;
|
use span::Span;
|
||||||
use stdx::never;
|
use stdx::never;
|
||||||
use syntax::{ast, match_ast, SyntaxKind};
|
use syntax::{ast, match_ast, SyntaxKind};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
@ -747,7 +747,7 @@ pub struct MacroCall {
|
|||||||
pub path: Interned<ModPath>,
|
pub path: Interned<ModPath>,
|
||||||
pub ast_id: FileAstId<ast::MacroCall>,
|
pub ast_id: FileAstId<ast::MacroCall>,
|
||||||
pub expand_to: ExpandTo,
|
pub expand_to: ExpandTo,
|
||||||
pub call_site: SyntaxContextId,
|
pub call_site: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
|
@ -549,7 +549,7 @@ fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option<FileItemTreeId<Macr
|
|||||||
path,
|
path,
|
||||||
ast_id,
|
ast_id,
|
||||||
expand_to,
|
expand_to,
|
||||||
call_site: span_map.span_for_range(m.syntax().text_range()).ctx,
|
call_site: span_map.span_for_range(m.syntax().text_range()),
|
||||||
};
|
};
|
||||||
Some(id(self.data().macro_calls.alloc(res)))
|
Some(id(self.data().macro_calls.alloc(res)))
|
||||||
}
|
}
|
||||||
|
@ -80,7 +80,7 @@ macro_rules! eprintln {
|
|||||||
use item_tree::ExternBlock;
|
use item_tree::ExternBlock;
|
||||||
use la_arena::Idx;
|
use la_arena::Idx;
|
||||||
use nameres::DefMap;
|
use nameres::DefMap;
|
||||||
use span::SyntaxContextId;
|
use span::Span;
|
||||||
use stdx::impl_from;
|
use stdx::impl_from;
|
||||||
use syntax::{ast, AstNode};
|
use syntax::{ast, AstNode};
|
||||||
|
|
||||||
@ -1172,7 +1172,7 @@ fn as_call_id_with_errors(
|
|||||||
return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation")));
|
return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation")));
|
||||||
};
|
};
|
||||||
|
|
||||||
let call_site = span_map.span_for_range(self.value.syntax().text_range()).ctx;
|
let call_site = span_map.span_for_range(self.value.syntax().text_range());
|
||||||
|
|
||||||
macro_call_as_call_id_with_eager(
|
macro_call_as_call_id_with_eager(
|
||||||
db,
|
db,
|
||||||
@ -1202,7 +1202,7 @@ fn new(file_id: HirFileId, ast_id: FileAstId<T>, path: path::ModPath) -> AstIdWi
|
|||||||
fn macro_call_as_call_id(
|
fn macro_call_as_call_id(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
call: &AstIdWithPath<ast::MacroCall>,
|
call: &AstIdWithPath<ast::MacroCall>,
|
||||||
call_site: SyntaxContextId,
|
call_site: Span,
|
||||||
expand_to: ExpandTo,
|
expand_to: ExpandTo,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
|
resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
|
||||||
@ -1214,7 +1214,7 @@ fn macro_call_as_call_id(
|
|||||||
fn macro_call_as_call_id_with_eager(
|
fn macro_call_as_call_id_with_eager(
|
||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
call: &AstIdWithPath<ast::MacroCall>,
|
call: &AstIdWithPath<ast::MacroCall>,
|
||||||
call_site: SyntaxContextId,
|
call_site: Span,
|
||||||
expand_to: ExpandTo,
|
expand_to: ExpandTo,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
resolver: impl FnOnce(path::ModPath) -> Option<MacroDefId>,
|
resolver: impl FnOnce(path::ModPath) -> Option<MacroDefId>,
|
||||||
@ -1320,7 +1320,7 @@ fn derive_macro_as_call_id(
|
|||||||
item_attr: &AstIdWithPath<ast::Adt>,
|
item_attr: &AstIdWithPath<ast::Adt>,
|
||||||
derive_attr_index: AttrId,
|
derive_attr_index: AttrId,
|
||||||
derive_pos: u32,
|
derive_pos: u32,
|
||||||
call_site: SyntaxContextId,
|
call_site: Span,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
|
resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
|
||||||
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
|
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
|
||||||
@ -1365,7 +1365,7 @@ fn attr_macro_as_call_id(
|
|||||||
attr_args: arg.map(Arc::new),
|
attr_args: arg.map(Arc::new),
|
||||||
invoc_attr_index: macro_attr.id,
|
invoc_attr_index: macro_attr.id,
|
||||||
},
|
},
|
||||||
macro_attr.ctxt,
|
macro_attr.span,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -228,13 +228,13 @@ enum MacroDirectiveKind {
|
|||||||
FnLike {
|
FnLike {
|
||||||
ast_id: AstIdWithPath<ast::MacroCall>,
|
ast_id: AstIdWithPath<ast::MacroCall>,
|
||||||
expand_to: ExpandTo,
|
expand_to: ExpandTo,
|
||||||
call_site: SyntaxContextId,
|
call_site: Span,
|
||||||
},
|
},
|
||||||
Derive {
|
Derive {
|
||||||
ast_id: AstIdWithPath<ast::Adt>,
|
ast_id: AstIdWithPath<ast::Adt>,
|
||||||
derive_attr: AttrId,
|
derive_attr: AttrId,
|
||||||
derive_pos: usize,
|
derive_pos: usize,
|
||||||
call_site: SyntaxContextId,
|
call_site: Span,
|
||||||
},
|
},
|
||||||
Attr {
|
Attr {
|
||||||
ast_id: AstIdWithPath<ast::Item>,
|
ast_id: AstIdWithPath<ast::Item>,
|
||||||
@ -1305,14 +1305,13 @@ fn resolve_macros(&mut self) -> ReachedFixedPoint {
|
|||||||
// Not resolved to a derive helper or the derive attribute, so try to treat as a normal attribute.
|
// Not resolved to a derive helper or the derive attribute, so try to treat as a normal attribute.
|
||||||
let call_id =
|
let call_id =
|
||||||
attr_macro_as_call_id(self.db, file_ast_id, attr, self.def_map.krate, def);
|
attr_macro_as_call_id(self.db, file_ast_id, attr, self.def_map.krate, def);
|
||||||
let loc: MacroCallLoc = self.db.lookup_intern_macro_call(call_id);
|
|
||||||
|
|
||||||
// If proc attribute macro expansion is disabled, skip expanding it here
|
// If proc attribute macro expansion is disabled, skip expanding it here
|
||||||
if !self.db.expand_proc_attr_macros() {
|
if !self.db.expand_proc_attr_macros() {
|
||||||
self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro(
|
self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro(
|
||||||
directive.module_id,
|
directive.module_id,
|
||||||
loc.kind,
|
self.db.lookup_intern_macro_call(call_id).kind,
|
||||||
loc.def.krate,
|
def.krate,
|
||||||
));
|
));
|
||||||
return recollect_without(self);
|
return recollect_without(self);
|
||||||
}
|
}
|
||||||
@ -1320,14 +1319,14 @@ fn resolve_macros(&mut self) -> ReachedFixedPoint {
|
|||||||
// Skip #[test]/#[bench] expansion, which would merely result in more memory usage
|
// Skip #[test]/#[bench] expansion, which would merely result in more memory usage
|
||||||
// due to duplicating functions into macro expansions
|
// due to duplicating functions into macro expansions
|
||||||
if matches!(
|
if matches!(
|
||||||
loc.def.kind,
|
def.kind,
|
||||||
MacroDefKind::BuiltInAttr(expander, _)
|
MacroDefKind::BuiltInAttr(expander, _)
|
||||||
if expander.is_test() || expander.is_bench()
|
if expander.is_test() || expander.is_bench()
|
||||||
) {
|
) {
|
||||||
return recollect_without(self);
|
return recollect_without(self);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let MacroDefKind::ProcMacro(exp, ..) = loc.def.kind {
|
if let MacroDefKind::ProcMacro(exp, ..) = def.kind {
|
||||||
if exp.is_dummy() {
|
if exp.is_dummy() {
|
||||||
// If there's no expander for the proc macro (e.g.
|
// If there's no expander for the proc macro (e.g.
|
||||||
// because proc macros are disabled, or building the
|
// because proc macros are disabled, or building the
|
||||||
@ -1335,8 +1334,8 @@ fn resolve_macros(&mut self) -> ReachedFixedPoint {
|
|||||||
// expansion like we would if it was disabled
|
// expansion like we would if it was disabled
|
||||||
self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro(
|
self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro(
|
||||||
directive.module_id,
|
directive.module_id,
|
||||||
loc.kind,
|
self.db.lookup_intern_macro_call(call_id).kind,
|
||||||
loc.def.krate,
|
def.krate,
|
||||||
));
|
));
|
||||||
|
|
||||||
return recollect_without(self);
|
return recollect_without(self);
|
||||||
|
@ -7,7 +7,7 @@
|
|||||||
use intern::Interned;
|
use intern::Interned;
|
||||||
use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
|
use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
|
||||||
use smallvec::{smallvec, SmallVec};
|
use smallvec::{smallvec, SmallVec};
|
||||||
use span::SyntaxContextId;
|
use span::Span;
|
||||||
use syntax::{ast, match_ast, AstNode, AstToken, SmolStr, SyntaxNode};
|
use syntax::{ast, match_ast, AstNode, AstToken, SmolStr, SyntaxNode};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
@ -53,7 +53,7 @@ pub fn new(
|
|||||||
id,
|
id,
|
||||||
input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
|
input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
|
||||||
path: Interned::new(ModPath::from(crate::name!(doc))),
|
path: Interned::new(ModPath::from(crate::name!(doc))),
|
||||||
ctxt: span_map.span_for_range(comment.syntax().text_range()).ctx,
|
span: span_map.span_for_range(comment.syntax().text_range()),
|
||||||
}),
|
}),
|
||||||
});
|
});
|
||||||
let entries: Arc<[Attr]> = Arc::from_iter(entries);
|
let entries: Arc<[Attr]> = Arc::from_iter(entries);
|
||||||
@ -177,7 +177,7 @@ pub struct Attr {
|
|||||||
pub id: AttrId,
|
pub id: AttrId,
|
||||||
pub path: Interned<ModPath>,
|
pub path: Interned<ModPath>,
|
||||||
pub input: Option<Interned<AttrInput>>,
|
pub input: Option<Interned<AttrInput>>,
|
||||||
pub ctxt: SyntaxContextId,
|
pub span: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
@ -218,7 +218,7 @@ fn from_src(
|
|||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
Some(Attr { id, path, input, ctxt: span_map.span_for_range(ast.syntax().text_range()).ctx })
|
Some(Attr { id, path, input, span: span_map.span_for_range(ast.syntax().text_range()) })
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option<Attr> {
|
fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option<Attr> {
|
||||||
@ -266,7 +266,7 @@ pub fn token_tree_value(&self) -> Option<&Subtree> {
|
|||||||
pub fn parse_path_comma_token_tree<'a>(
|
pub fn parse_path_comma_token_tree<'a>(
|
||||||
&'a self,
|
&'a self,
|
||||||
db: &'a dyn ExpandDatabase,
|
db: &'a dyn ExpandDatabase,
|
||||||
) -> Option<impl Iterator<Item = (ModPath, SyntaxContextId)> + 'a> {
|
) -> Option<impl Iterator<Item = (ModPath, Span)> + 'a> {
|
||||||
let args = self.token_tree_value()?;
|
let args = self.token_tree_value()?;
|
||||||
|
|
||||||
if args.delimiter.kind != DelimiterKind::Parenthesis {
|
if args.delimiter.kind != DelimiterKind::Parenthesis {
|
||||||
@ -294,7 +294,7 @@ pub fn parse_path_comma_token_tree<'a>(
|
|||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
let path = meta.path()?;
|
let path = meta.path()?;
|
||||||
let call_site = span_map.span_at(path.syntax().text_range().start()).ctx;
|
let call_site = span_map.span_at(path.syntax().text_range().start());
|
||||||
Some((
|
Some((
|
||||||
ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(&span_map))?,
|
ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(&span_map))?,
|
||||||
call_site,
|
call_site,
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
//! Builtin attributes.
|
//! Builtin attributes.
|
||||||
use span::{FileId, MacroCallId, Span, SyntaxContextId, ROOT_ERASED_FILE_AST_ID};
|
use span::{MacroCallId, Span};
|
||||||
use syntax::{TextRange, TextSize};
|
|
||||||
|
|
||||||
use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallKind};
|
use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallKind};
|
||||||
|
|
||||||
@ -110,20 +109,13 @@ fn derive_attr_expand(
|
|||||||
pub fn pseudo_derive_attr_expansion(
|
pub fn pseudo_derive_attr_expansion(
|
||||||
tt: &tt::Subtree,
|
tt: &tt::Subtree,
|
||||||
args: &tt::Subtree,
|
args: &tt::Subtree,
|
||||||
call_site: SyntaxContextId,
|
call_site: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let mk_leaf = |char| {
|
let mk_leaf = |char| {
|
||||||
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
|
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
|
||||||
char,
|
char,
|
||||||
spacing: tt::Spacing::Alone,
|
spacing: tt::Spacing::Alone,
|
||||||
span: Span {
|
span: call_site,
|
||||||
range: TextRange::empty(TextSize::new(0)),
|
|
||||||
anchor: span::SpanAnchor {
|
|
||||||
file_id: FileId::BOGUS,
|
|
||||||
ast_id: ROOT_ERASED_FILE_AST_ID,
|
|
||||||
},
|
|
||||||
ctx: call_site,
|
|
||||||
},
|
|
||||||
}))
|
}))
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@
|
|||||||
use limit::Limit;
|
use limit::Limit;
|
||||||
use mbe::{syntax_node_to_token_tree, ValueResult};
|
use mbe::{syntax_node_to_token_tree, ValueResult};
|
||||||
use rustc_hash::FxHashSet;
|
use rustc_hash::FxHashSet;
|
||||||
use span::SyntaxContextId;
|
use span::{Span, SyntaxContextId};
|
||||||
use syntax::{
|
use syntax::{
|
||||||
ast::{self, HasAttrs},
|
ast::{self, HasAttrs},
|
||||||
AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
|
AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
|
||||||
@ -57,7 +57,8 @@ pub fn expand(
|
|||||||
tt: tt::Subtree,
|
tt: tt::Subtree,
|
||||||
call_id: MacroCallId,
|
call_id: MacroCallId,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let toolchain = &db.crate_graph()[db.lookup_intern_macro_call(call_id).def.krate].toolchain;
|
let loc = db.lookup_intern_macro_call(call_id);
|
||||||
|
let toolchain = &db.crate_graph()[loc.def.krate].toolchain;
|
||||||
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
|
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
|
||||||
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
|
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
|
||||||
&base_db::Version {
|
&base_db::Version {
|
||||||
@ -80,6 +81,7 @@ pub fn expand(
|
|||||||
&tt,
|
&tt,
|
||||||
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency),
|
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency),
|
||||||
new_meta_vars,
|
new_meta_vars,
|
||||||
|
loc.call_site,
|
||||||
)
|
)
|
||||||
.map_err(Into::into),
|
.map_err(Into::into),
|
||||||
}
|
}
|
||||||
@ -90,6 +92,7 @@ pub fn expand_unhygienic(
|
|||||||
db: &dyn ExpandDatabase,
|
db: &dyn ExpandDatabase,
|
||||||
tt: tt::Subtree,
|
tt: tt::Subtree,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
|
call_site: Span,
|
||||||
) -> ExpandResult<tt::Subtree> {
|
) -> ExpandResult<tt::Subtree> {
|
||||||
let toolchain = &db.crate_graph()[krate].toolchain;
|
let toolchain = &db.crate_graph()[krate].toolchain;
|
||||||
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
|
let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
|
||||||
@ -108,7 +111,7 @@ pub fn expand_unhygienic(
|
|||||||
tt::Subtree::empty(tt::DelimSpan::DUMMY),
|
tt::Subtree::empty(tt::DelimSpan::DUMMY),
|
||||||
ExpandError::other(format!("invalid macro definition: {e}")),
|
ExpandError::other(format!("invalid macro definition: {e}")),
|
||||||
),
|
),
|
||||||
None => self.mac.expand(&tt, |_| (), new_meta_vars).map_err(Into::into),
|
None => self.mac.expand(&tt, |_| (), new_meta_vars, call_site).map_err(Into::into),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -315,9 +318,12 @@ pub fn expand_speculative(
|
|||||||
let adt = ast::Adt::cast(speculative_args.clone()).unwrap();
|
let adt = ast::Adt::cast(speculative_args.clone()).unwrap();
|
||||||
expander.expand(db, actual_macro_call, &adt, span_map)
|
expander.expand(db, actual_macro_call, &adt, span_map)
|
||||||
}
|
}
|
||||||
MacroDefKind::Declarative(it) => {
|
MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand_unhygienic(
|
||||||
db.decl_macro_expander(loc.krate, it).expand_unhygienic(db, tt, loc.def.krate)
|
db,
|
||||||
}
|
tt,
|
||||||
|
loc.def.krate,
|
||||||
|
loc.call_site,
|
||||||
|
),
|
||||||
MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
|
MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
|
||||||
MacroDefKind::BuiltInEager(it, _) => {
|
MacroDefKind::BuiltInEager(it, _) => {
|
||||||
it.expand(db, actual_macro_call, &tt).map_err(Into::into)
|
it.expand(db, actual_macro_call, &tt).map_err(Into::into)
|
||||||
|
@ -19,7 +19,7 @@
|
|||||||
//!
|
//!
|
||||||
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
|
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
|
||||||
use base_db::CrateId;
|
use base_db::CrateId;
|
||||||
use span::SyntaxContextId;
|
use span::Span;
|
||||||
use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent};
|
use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent};
|
||||||
use triomphe::Arc;
|
use triomphe::Arc;
|
||||||
|
|
||||||
@ -37,7 +37,7 @@ pub fn expand_eager_macro_input(
|
|||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
macro_call: InFile<ast::MacroCall>,
|
macro_call: InFile<ast::MacroCall>,
|
||||||
def: MacroDefId,
|
def: MacroDefId,
|
||||||
call_site: SyntaxContextId,
|
call_site: Span,
|
||||||
resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
|
resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
|
||||||
) -> ExpandResult<Option<MacroCallId>> {
|
) -> ExpandResult<Option<MacroCallId>> {
|
||||||
let ast_map = db.ast_id_map(macro_call.file_id);
|
let ast_map = db.ast_id_map(macro_call.file_id);
|
||||||
@ -102,7 +102,7 @@ fn lazy_expand(
|
|||||||
def: &MacroDefId,
|
def: &MacroDefId,
|
||||||
macro_call: InFile<ast::MacroCall>,
|
macro_call: InFile<ast::MacroCall>,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
call_site: SyntaxContextId,
|
call_site: Span,
|
||||||
) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<ExpansionSpanMap>)> {
|
) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<ExpansionSpanMap>)> {
|
||||||
let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value);
|
let ast_id = db.ast_id_map(macro_call.file_id).ast_id(¯o_call.value);
|
||||||
|
|
||||||
@ -122,7 +122,7 @@ fn eager_macro_recur(
|
|||||||
mut offset: TextSize,
|
mut offset: TextSize,
|
||||||
curr: InFile<SyntaxNode>,
|
curr: InFile<SyntaxNode>,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
call_site: SyntaxContextId,
|
call_site: Span,
|
||||||
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
|
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
|
||||||
) -> ExpandResult<Option<(SyntaxNode, TextSize)>> {
|
) -> ExpandResult<Option<(SyntaxNode, TextSize)>> {
|
||||||
let original = curr.value.clone_for_update();
|
let original = curr.value.clone_for_update();
|
||||||
|
@ -116,7 +116,7 @@ pub(super) fn apply_mark(
|
|||||||
return apply_mark_internal(db, ctxt, Some(call_id), transparency);
|
return apply_mark_internal(db, ctxt, Some(call_id), transparency);
|
||||||
}
|
}
|
||||||
|
|
||||||
let call_site_ctxt = db.lookup_intern_macro_call(call_id).call_site;
|
let call_site_ctxt = db.lookup_intern_macro_call(call_id).call_site.ctx;
|
||||||
let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
|
let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
|
||||||
call_site_ctxt.normalize_to_macros_2_0(db)
|
call_site_ctxt.normalize_to_macros_2_0(db)
|
||||||
} else {
|
} else {
|
||||||
|
@ -116,18 +116,20 @@ pub struct MacroCallLoc {
|
|||||||
pub krate: CrateId,
|
pub krate: CrateId,
|
||||||
/// Some if this is a macro call for an eager macro. Note that this is `None`
|
/// Some if this is a macro call for an eager macro. Note that this is `None`
|
||||||
/// for the eager input macro file.
|
/// for the eager input macro file.
|
||||||
|
// FIXME: This seems bad to save in an interned structure
|
||||||
eager: Option<Arc<EagerCallInfo>>,
|
eager: Option<Arc<EagerCallInfo>>,
|
||||||
pub kind: MacroCallKind,
|
pub kind: MacroCallKind,
|
||||||
pub call_site: SyntaxContextId,
|
pub call_site: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// FIXME: Might make sense to intern this? Given it's gonna be the same for a bunch of macro calls
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
pub struct MacroDefId {
|
pub struct MacroDefId {
|
||||||
pub krate: CrateId,
|
pub krate: CrateId,
|
||||||
pub kind: MacroDefKind,
|
pub kind: MacroDefKind,
|
||||||
pub local_inner: bool,
|
pub local_inner: bool,
|
||||||
pub allow_internal_unsafe: bool,
|
pub allow_internal_unsafe: bool,
|
||||||
// pub def_site: SyntaxContextId,
|
// pub def_site: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
@ -348,7 +350,7 @@ pub fn as_lazy_macro(
|
|||||||
db: &dyn db::ExpandDatabase,
|
db: &dyn db::ExpandDatabase,
|
||||||
krate: CrateId,
|
krate: CrateId,
|
||||||
kind: MacroCallKind,
|
kind: MacroCallKind,
|
||||||
call_site: SyntaxContextId,
|
call_site: Span,
|
||||||
) -> MacroCallId {
|
) -> MacroCallId {
|
||||||
db.intern_macro_call(MacroCallLoc { def: self, krate, eager: None, kind, call_site })
|
db.intern_macro_call(MacroCallLoc { def: self, krate, eager: None, kind, call_site })
|
||||||
}
|
}
|
||||||
|
@ -14,7 +14,7 @@ doctest = false
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
cov-mark = "2.0.0-pre.1"
|
cov-mark = "2.0.0-pre.1"
|
||||||
itertools.workspace = true
|
itertools.workspace = true
|
||||||
arrayvec = "0.7.2"
|
arrayvec.workspace = true
|
||||||
bitflags.workspace = true
|
bitflags.workspace = true
|
||||||
smallvec.workspace = true
|
smallvec.workspace = true
|
||||||
ena = "0.14.0"
|
ena = "0.14.0"
|
||||||
|
@ -14,7 +14,7 @@ doctest = false
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
rustc-hash.workspace = true
|
rustc-hash.workspace = true
|
||||||
either.workspace = true
|
either.workspace = true
|
||||||
arrayvec = "0.7.2"
|
arrayvec.workspace = true
|
||||||
itertools.workspace = true
|
itertools.workspace = true
|
||||||
smallvec.workspace = true
|
smallvec.workspace = true
|
||||||
triomphe.workspace = true
|
triomphe.workspace = true
|
||||||
|
@ -20,7 +20,7 @@ rustc-hash.workspace = true
|
|||||||
once_cell = "1.17.0"
|
once_cell = "1.17.0"
|
||||||
either.workspace = true
|
either.workspace = true
|
||||||
itertools.workspace = true
|
itertools.workspace = true
|
||||||
arrayvec = "0.7.2"
|
arrayvec.workspace = true
|
||||||
indexmap.workspace = true
|
indexmap.workspace = true
|
||||||
memchr = "2.6.4"
|
memchr = "2.6.4"
|
||||||
triomphe.workspace = true
|
triomphe.workspace = true
|
||||||
|
@ -14,7 +14,7 @@ doctest = false
|
|||||||
[dependencies]
|
[dependencies]
|
||||||
cov-mark = "2.0.0-pre.1"
|
cov-mark = "2.0.0-pre.1"
|
||||||
crossbeam-channel = "0.5.5"
|
crossbeam-channel = "0.5.5"
|
||||||
arrayvec = "0.7.4"
|
arrayvec.workspace = true
|
||||||
either.workspace = true
|
either.workspace = true
|
||||||
itertools.workspace = true
|
itertools.workspace = true
|
||||||
tracing.workspace = true
|
tracing.workspace = true
|
||||||
|
@ -41,7 +41,7 @@ fn benchmark_expand_macro_rules() {
|
|||||||
invocations
|
invocations
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(id, tt)| {
|
.map(|(id, tt)| {
|
||||||
let res = rules[&id].expand(&tt, |_| (), true);
|
let res = rules[&id].expand(&tt, |_| (), true, DUMMY);
|
||||||
assert!(res.err.is_none());
|
assert!(res.err.is_none());
|
||||||
res.value.token_trees.len()
|
res.value.token_trees.len()
|
||||||
})
|
})
|
||||||
@ -108,7 +108,7 @@ fn invocation_fixtures(
|
|||||||
for op in rule.lhs.iter() {
|
for op in rule.lhs.iter() {
|
||||||
collect_from_op(op, &mut subtree, &mut seed);
|
collect_from_op(op, &mut subtree, &mut seed);
|
||||||
}
|
}
|
||||||
if it.expand(&subtree, |_| (), true).err.is_none() {
|
if it.expand(&subtree, |_| (), true, DUMMY).err.is_none() {
|
||||||
res.push((name.clone(), subtree));
|
res.push((name.clone(), subtree));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -17,6 +17,7 @@ pub(crate) fn expand_rules<S: Span>(
|
|||||||
marker: impl Fn(&mut S) + Copy,
|
marker: impl Fn(&mut S) + Copy,
|
||||||
is_2021: bool,
|
is_2021: bool,
|
||||||
new_meta_vars: bool,
|
new_meta_vars: bool,
|
||||||
|
call_site: S,
|
||||||
) -> ExpandResult<tt::Subtree<S>> {
|
) -> ExpandResult<tt::Subtree<S>> {
|
||||||
let mut match_: Option<(matcher::Match<S>, &crate::Rule<S>)> = None;
|
let mut match_: Option<(matcher::Match<S>, &crate::Rule<S>)> = None;
|
||||||
for rule in rules {
|
for rule in rules {
|
||||||
@ -26,8 +27,13 @@ pub(crate) fn expand_rules<S: Span>(
|
|||||||
// If we find a rule that applies without errors, we're done.
|
// If we find a rule that applies without errors, we're done.
|
||||||
// Unconditionally returning the transcription here makes the
|
// Unconditionally returning the transcription here makes the
|
||||||
// `test_repeat_bad_var` test fail.
|
// `test_repeat_bad_var` test fail.
|
||||||
let ExpandResult { value, err: transcribe_err } =
|
let ExpandResult { value, err: transcribe_err } = transcriber::transcribe(
|
||||||
transcriber::transcribe(&rule.rhs, &new_match.bindings, marker, new_meta_vars);
|
&rule.rhs,
|
||||||
|
&new_match.bindings,
|
||||||
|
marker,
|
||||||
|
new_meta_vars,
|
||||||
|
call_site,
|
||||||
|
);
|
||||||
if transcribe_err.is_none() {
|
if transcribe_err.is_none() {
|
||||||
return ExpandResult::ok(value);
|
return ExpandResult::ok(value);
|
||||||
}
|
}
|
||||||
@ -46,7 +52,7 @@ pub(crate) fn expand_rules<S: Span>(
|
|||||||
if let Some((match_, rule)) = match_ {
|
if let Some((match_, rule)) = match_ {
|
||||||
// if we got here, there was no match without errors
|
// if we got here, there was no match without errors
|
||||||
let ExpandResult { value, err: transcribe_err } =
|
let ExpandResult { value, err: transcribe_err } =
|
||||||
transcriber::transcribe(&rule.rhs, &match_.bindings, marker, new_meta_vars);
|
transcriber::transcribe(&rule.rhs, &match_.bindings, marker, new_meta_vars, call_site);
|
||||||
ExpandResult { value, err: match_.err.or(transcribe_err) }
|
ExpandResult { value, err: match_.err.or(transcribe_err) }
|
||||||
} else {
|
} else {
|
||||||
ExpandResult::new(
|
ExpandResult::new(
|
||||||
|
@ -132,8 +132,9 @@ pub(super) fn transcribe<S: Span>(
|
|||||||
bindings: &Bindings<S>,
|
bindings: &Bindings<S>,
|
||||||
marker: impl Fn(&mut S) + Copy,
|
marker: impl Fn(&mut S) + Copy,
|
||||||
new_meta_vars: bool,
|
new_meta_vars: bool,
|
||||||
|
call_site: S,
|
||||||
) -> ExpandResult<tt::Subtree<S>> {
|
) -> ExpandResult<tt::Subtree<S>> {
|
||||||
let mut ctx = ExpandCtx { bindings, nesting: Vec::new(), new_meta_vars };
|
let mut ctx = ExpandCtx { bindings, nesting: Vec::new(), new_meta_vars, call_site };
|
||||||
let mut arena: Vec<tt::TokenTree<S>> = Vec::new();
|
let mut arena: Vec<tt::TokenTree<S>> = Vec::new();
|
||||||
expand_subtree(&mut ctx, template, None, &mut arena, marker)
|
expand_subtree(&mut ctx, template, None, &mut arena, marker)
|
||||||
}
|
}
|
||||||
@ -154,6 +155,7 @@ struct ExpandCtx<'a, S> {
|
|||||||
bindings: &'a Bindings<S>,
|
bindings: &'a Bindings<S>,
|
||||||
nesting: Vec<NestingState>,
|
nesting: Vec<NestingState>,
|
||||||
new_meta_vars: bool,
|
new_meta_vars: bool,
|
||||||
|
call_site: S,
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_subtree<S: Span>(
|
fn expand_subtree<S: Span>(
|
||||||
@ -208,13 +210,13 @@ fn expand_subtree<S: Span>(
|
|||||||
Op::Var { name, id, .. } => {
|
Op::Var { name, id, .. } => {
|
||||||
let ExpandResult { value: fragment, err: e } = expand_var(ctx, name, *id, marker);
|
let ExpandResult { value: fragment, err: e } = expand_var(ctx, name, *id, marker);
|
||||||
err = err.or(e);
|
err = err.or(e);
|
||||||
push_fragment(arena, fragment);
|
push_fragment(ctx, arena, fragment);
|
||||||
}
|
}
|
||||||
Op::Repeat { tokens: subtree, kind, separator } => {
|
Op::Repeat { tokens: subtree, kind, separator } => {
|
||||||
let ExpandResult { value: fragment, err: e } =
|
let ExpandResult { value: fragment, err: e } =
|
||||||
expand_repeat(ctx, subtree, *kind, separator, arena, marker);
|
expand_repeat(ctx, subtree, *kind, separator, arena, marker);
|
||||||
err = err.or(e);
|
err = err.or(e);
|
||||||
push_fragment(arena, fragment)
|
push_fragment(ctx, arena, fragment)
|
||||||
}
|
}
|
||||||
Op::Ignore { name, id } => {
|
Op::Ignore { name, id } => {
|
||||||
// Expand the variable, but ignore the result. This registers the repetition count.
|
// Expand the variable, but ignore the result. This registers the repetition count.
|
||||||
@ -227,9 +229,7 @@ fn expand_subtree<S: Span>(
|
|||||||
arena.push(
|
arena.push(
|
||||||
tt::Leaf::Literal(tt::Literal {
|
tt::Leaf::Literal(tt::Literal {
|
||||||
text: index.to_string().into(),
|
text: index.to_string().into(),
|
||||||
// FIXME
|
span: ctx.call_site,
|
||||||
#[allow(deprecated)]
|
|
||||||
span: S::DUMMY,
|
|
||||||
})
|
})
|
||||||
.into(),
|
.into(),
|
||||||
);
|
);
|
||||||
@ -242,9 +242,7 @@ fn expand_subtree<S: Span>(
|
|||||||
arena.push(
|
arena.push(
|
||||||
tt::Leaf::Literal(tt::Literal {
|
tt::Leaf::Literal(tt::Literal {
|
||||||
text: length.to_string().into(),
|
text: length.to_string().into(),
|
||||||
// FIXME
|
span: ctx.call_site,
|
||||||
#[allow(deprecated)]
|
|
||||||
span: S::DUMMY,
|
|
||||||
})
|
})
|
||||||
.into(),
|
.into(),
|
||||||
);
|
);
|
||||||
@ -309,9 +307,7 @@ fn expand_subtree<S: Span>(
|
|||||||
arena.push(
|
arena.push(
|
||||||
tt::Leaf::Literal(tt::Literal {
|
tt::Leaf::Literal(tt::Literal {
|
||||||
text: c.to_string().into(),
|
text: c.to_string().into(),
|
||||||
// FIXME
|
span: ctx.call_site,
|
||||||
#[allow(deprecated)]
|
|
||||||
span: S::DUMMY,
|
|
||||||
})
|
})
|
||||||
.into(),
|
.into(),
|
||||||
);
|
);
|
||||||
@ -367,12 +363,8 @@ fn expand_var<S: Span>(
|
|||||||
}
|
}
|
||||||
Err(e) => ExpandResult {
|
Err(e) => ExpandResult {
|
||||||
value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree::empty(tt::DelimSpan {
|
value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree::empty(tt::DelimSpan {
|
||||||
// FIXME
|
open: ctx.call_site,
|
||||||
#[allow(deprecated)]
|
close: ctx.call_site,
|
||||||
open: S::DUMMY,
|
|
||||||
// FIXME
|
|
||||||
#[allow(deprecated)]
|
|
||||||
close: S::DUMMY,
|
|
||||||
}))),
|
}))),
|
||||||
err: Some(e),
|
err: Some(e),
|
||||||
},
|
},
|
||||||
@ -475,13 +467,17 @@ fn expand_repeat<S: Span>(
|
|||||||
ExpandResult { value: Fragment::Tokens(tt), err }
|
ExpandResult { value: Fragment::Tokens(tt), err }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn push_fragment<S: Span>(buf: &mut Vec<tt::TokenTree<S>>, fragment: Fragment<S>) {
|
fn push_fragment<S: Span>(
|
||||||
|
ctx: &ExpandCtx<'_, S>,
|
||||||
|
buf: &mut Vec<tt::TokenTree<S>>,
|
||||||
|
fragment: Fragment<S>,
|
||||||
|
) {
|
||||||
match fragment {
|
match fragment {
|
||||||
Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt),
|
Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt),
|
||||||
Fragment::Expr(sub) => {
|
Fragment::Expr(sub) => {
|
||||||
push_subtree(buf, sub);
|
push_subtree(buf, sub);
|
||||||
}
|
}
|
||||||
Fragment::Path(tt) => fix_up_and_push_path_tt(buf, tt),
|
Fragment::Path(tt) => fix_up_and_push_path_tt(ctx, buf, tt),
|
||||||
Fragment::Tokens(tt) => buf.push(tt),
|
Fragment::Tokens(tt) => buf.push(tt),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -496,7 +492,11 @@ fn push_subtree<S>(buf: &mut Vec<tt::TokenTree<S>>, tt: tt::Subtree<S>) {
|
|||||||
/// Inserts the path separator `::` between an identifier and its following generic
|
/// Inserts the path separator `::` between an identifier and its following generic
|
||||||
/// argument list, and then pushes into the buffer. See [`Fragment::Path`] for why
|
/// argument list, and then pushes into the buffer. See [`Fragment::Path`] for why
|
||||||
/// we need this fixup.
|
/// we need this fixup.
|
||||||
fn fix_up_and_push_path_tt<S: Span>(buf: &mut Vec<tt::TokenTree<S>>, subtree: tt::Subtree<S>) {
|
fn fix_up_and_push_path_tt<S: Span>(
|
||||||
|
ctx: &ExpandCtx<'_, S>,
|
||||||
|
buf: &mut Vec<tt::TokenTree<S>>,
|
||||||
|
subtree: tt::Subtree<S>,
|
||||||
|
) {
|
||||||
stdx::always!(matches!(subtree.delimiter.kind, tt::DelimiterKind::Invisible));
|
stdx::always!(matches!(subtree.delimiter.kind, tt::DelimiterKind::Invisible));
|
||||||
let mut prev_was_ident = false;
|
let mut prev_was_ident = false;
|
||||||
// Note that we only need to fix up the top-level `TokenTree`s because the
|
// Note that we only need to fix up the top-level `TokenTree`s because the
|
||||||
@ -513,9 +513,7 @@ fn fix_up_and_push_path_tt<S: Span>(buf: &mut Vec<tt::TokenTree<S>>, subtree: tt
|
|||||||
tt::Leaf::Punct(tt::Punct {
|
tt::Leaf::Punct(tt::Punct {
|
||||||
char: ':',
|
char: ':',
|
||||||
spacing: tt::Spacing::Joint,
|
spacing: tt::Spacing::Joint,
|
||||||
// FIXME
|
span: ctx.call_site,
|
||||||
#[allow(deprecated)]
|
|
||||||
span: S::DUMMY,
|
|
||||||
})
|
})
|
||||||
.into(),
|
.into(),
|
||||||
);
|
);
|
||||||
@ -523,9 +521,7 @@ fn fix_up_and_push_path_tt<S: Span>(buf: &mut Vec<tt::TokenTree<S>>, subtree: tt
|
|||||||
tt::Leaf::Punct(tt::Punct {
|
tt::Leaf::Punct(tt::Punct {
|
||||||
char: ':',
|
char: ':',
|
||||||
spacing: tt::Spacing::Alone,
|
spacing: tt::Spacing::Alone,
|
||||||
// FIXME
|
span: ctx.call_site,
|
||||||
#[allow(deprecated)]
|
|
||||||
span: S::DUMMY,
|
|
||||||
})
|
})
|
||||||
.into(),
|
.into(),
|
||||||
);
|
);
|
||||||
|
@ -252,8 +252,9 @@ pub fn expand(
|
|||||||
tt: &tt::Subtree<S>,
|
tt: &tt::Subtree<S>,
|
||||||
marker: impl Fn(&mut S) + Copy,
|
marker: impl Fn(&mut S) + Copy,
|
||||||
new_meta_vars: bool,
|
new_meta_vars: bool,
|
||||||
|
call_site: S,
|
||||||
) -> ExpandResult<tt::Subtree<S>> {
|
) -> ExpandResult<tt::Subtree<S>> {
|
||||||
expander::expand_rules(&self.rules, &tt, marker, self.is_2021, new_meta_vars)
|
expander::expand_rules(&self.rules, &tt, marker, self.is_2021, new_meta_vars, call_site)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user