Auto merge of #16844 - Veykril:macarons, r=Veykril

internal: Replace `Span` with `SyntaxContextId` in `MacroCallLoc`

This makes these a lot more stable again
This commit is contained in:
bors 2024-03-15 12:32:06 +00:00
commit d763e05182
25 changed files with 224 additions and 244 deletions

View File

@ -715,7 +715,7 @@ fn collect_item(
}
AssocItem::MacroCall(call) => {
let file_id = self.expander.current_file_id();
let MacroCall { ast_id, expand_to, call_site, ref path } = item_tree[call];
let MacroCall { ast_id, expand_to, ctxt, ref path } = item_tree[call];
let module = self.expander.module.local_id;
let resolver = |path| {
@ -734,7 +734,7 @@ fn collect_item(
match macro_call_as_call_id(
self.db.upcast(),
&AstIdWithPath::new(file_id, ast_id, Clone::clone(path)),
call_site,
ctxt,
expand_to,
self.expander.module.krate(),
resolver,

View File

@ -49,7 +49,7 @@
use la_arena::{Arena, Idx, IdxRange, RawIdx};
use rustc_hash::FxHashMap;
use smallvec::SmallVec;
use span::{AstIdNode, FileAstId, Span};
use span::{AstIdNode, FileAstId, SyntaxContextId};
use stdx::never;
use syntax::{ast, match_ast, SyntaxKind};
use triomphe::Arc;
@ -790,7 +790,7 @@ pub struct MacroCall {
pub path: Interned<ModPath>,
pub ast_id: FileAstId<ast::MacroCall>,
pub expand_to: ExpandTo,
pub call_site: Span,
pub ctxt: SyntaxContextId,
}
#[derive(Debug, Clone, Eq, PartialEq)]

View File

@ -567,7 +567,7 @@ fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option<FileItemTreeId<Macr
})?);
let ast_id = self.source_ast_id_map.ast_id(m);
let expand_to = hir_expand::ExpandTo::from_call_site(m);
let res = MacroCall { path, ast_id, expand_to, call_site: span_map.span_for_range(range) };
let res = MacroCall { path, ast_id, expand_to, ctxt: span_map.span_for_range(range).ctx };
Some(id(self.data().macro_calls.alloc(res)))
}

View File

@ -487,12 +487,12 @@ fn print_mod_item(&mut self, item: ModItem) {
}
}
ModItem::MacroCall(it) => {
let MacroCall { path, ast_id, expand_to, call_site } = &self.tree[it];
let MacroCall { path, ast_id, expand_to, ctxt } = &self.tree[it];
let _ = writeln!(
self,
"// AstId: {:?}, Span: {}, ExpandTo: {:?}",
"// AstId: {:?}, SyntaxContext: {}, ExpandTo: {:?}",
ast_id.erase().into_raw(),
call_site,
ctxt,
expand_to
);
wln!(self, "{}!(...);", path.display(self.db.upcast()));

View File

@ -278,7 +278,7 @@ macro_rules! m { ... }
// AstId: 2
pub macro m2 { ... }
// AstId: 3, Span: 0:3@0..1#0, ExpandTo: Items
// AstId: 3, SyntaxContext: 0, ExpandTo: Items
m!(...);
"#]],
);

View File

@ -90,7 +90,7 @@
use item_tree::ExternBlock;
use la_arena::Idx;
use nameres::DefMap;
use span::{AstIdNode, FileAstId, FileId, Span};
use span::{AstIdNode, FileAstId, FileId, SyntaxContextId};
use stdx::impl_from;
use syntax::{ast, AstNode};
@ -1357,7 +1357,7 @@ fn as_call_id_with_errors(
macro_call_as_call_id_with_eager(
db,
&AstIdWithPath::new(ast_id.file_id, ast_id.value, path),
call_site,
call_site.ctx,
expands_to,
krate,
resolver,
@ -1382,7 +1382,7 @@ fn new(file_id: HirFileId, ast_id: FileAstId<T>, path: path::ModPath) -> AstIdWi
fn macro_call_as_call_id(
db: &dyn ExpandDatabase,
call: &AstIdWithPath<ast::MacroCall>,
call_site: Span,
call_site: SyntaxContextId,
expand_to: ExpandTo,
krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
@ -1394,7 +1394,7 @@ fn macro_call_as_call_id(
fn macro_call_as_call_id_with_eager(
db: &dyn ExpandDatabase,
call: &AstIdWithPath<ast::MacroCall>,
call_site: Span,
call_site: SyntaxContextId,
expand_to: ExpandTo,
krate: CrateId,
resolver: impl FnOnce(path::ModPath) -> Option<MacroDefId>,

View File

@ -33,7 +33,7 @@ fn test_expand_bad_literal() {
"#,
expect![[r#"
macro_rules! m { ($i:literal) => {}; }
/* error: mismatched delimiters */"#]],
/* error: expected literal */"#]],
);
}

View File

@ -98,7 +98,7 @@ macro_rules! m1 { ($x:ident) => { ($x } }
macro_rules! m2 { ($x:ident) => {} }
/* error: macro definition has parse errors */
/* error: mismatched delimiters */
/* error: expected ident */
"#]],
)
}

View File

@ -5,7 +5,7 @@
attrs::{Attr, AttrId, AttrInput},
MacroCallId, MacroCallKind, MacroDefId,
};
use span::Span;
use span::SyntaxContextId;
use syntax::{ast, SmolStr};
use triomphe::Arc;
@ -109,7 +109,7 @@ pub(super) fn attr_macro_as_call_id(
let arg = match macro_attr.input.as_deref() {
Some(AttrInput::TokenTree(tt)) => {
let mut tt = tt.as_ref().clone();
tt.delimiter = tt::Delimiter::invisible_spanned(macro_attr.span);
tt.delimiter.kind = tt::DelimiterKind::Invisible;
Some(tt)
}
@ -124,7 +124,7 @@ pub(super) fn attr_macro_as_call_id(
attr_args: arg.map(Arc::new),
invoc_attr_index: macro_attr.id,
},
macro_attr.span,
macro_attr.ctxt,
)
}
@ -133,7 +133,7 @@ pub(super) fn derive_macro_as_call_id(
item_attr: &AstIdWithPath<ast::Adt>,
derive_attr_index: AttrId,
derive_pos: u32,
call_site: Span,
call_site: SyntaxContextId,
krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {

View File

@ -230,13 +230,13 @@ enum MacroDirectiveKind {
FnLike {
ast_id: AstIdWithPath<ast::MacroCall>,
expand_to: ExpandTo,
call_site: Span,
ctxt: SyntaxContextId,
},
Derive {
ast_id: AstIdWithPath<ast::Adt>,
derive_attr: AttrId,
derive_pos: usize,
call_site: Span,
ctxt: SyntaxContextId,
},
Attr {
ast_id: AstIdWithPath<ast::Item>,
@ -1126,7 +1126,7 @@ enum Resolved {
let resolver_def_id = |path| resolver(path).map(|(_, it)| it);
match &directive.kind {
MacroDirectiveKind::FnLike { ast_id, expand_to, call_site } => {
MacroDirectiveKind::FnLike { ast_id, expand_to, ctxt: call_site } => {
let call_id = macro_call_as_call_id(
self.db.upcast(),
ast_id,
@ -1146,7 +1146,7 @@ enum Resolved {
return Resolved::Yes;
}
}
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site } => {
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, ctxt: call_site } => {
let id = derive_macro_as_call_id(
self.db,
ast_id,
@ -1266,7 +1266,7 @@ enum Resolved {
ast_id,
derive_attr: attr.id,
derive_pos: idx,
call_site,
ctxt: call_site.ctx,
},
container: directive.container,
});
@ -1428,7 +1428,7 @@ fn finish(mut self) -> DefMap {
for directive in &self.unresolved_macros {
match &directive.kind {
MacroDirectiveKind::FnLike { ast_id, expand_to, call_site } => {
MacroDirectiveKind::FnLike { ast_id, expand_to, ctxt: call_site } => {
// FIXME: we shouldn't need to re-resolve the macro here just to get the unresolved error!
let macro_call_as_call_id = macro_call_as_call_id(
self.db.upcast(),
@ -1460,7 +1460,7 @@ fn finish(mut self) -> DefMap {
));
}
}
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site: _ } => {
MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, ctxt: _ } => {
self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
directive.module_id,
MacroCallKind::Derive {
@ -2289,7 +2289,7 @@ fn collect_macro_def(&mut self, id: FileItemTreeId<Macro2>, module: ModuleId) {
fn collect_macro_call(
&mut self,
&MacroCall { ref path, ast_id, expand_to, call_site }: &MacroCall,
&MacroCall { ref path, ast_id, expand_to, ctxt }: &MacroCall,
container: ItemContainerId,
) {
let ast_id = AstIdWithPath::new(self.file_id(), ast_id, ModPath::clone(path));
@ -2303,7 +2303,7 @@ fn collect_macro_call(
if let Ok(res) = macro_call_as_call_id_with_eager(
db.upcast(),
&ast_id,
call_site,
ctxt,
expand_to,
self.def_collector.def_map.krate,
|path| {
@ -2361,7 +2361,7 @@ fn collect_macro_call(
self.def_collector.unresolved_macros.push(MacroDirective {
module_id: self.module_id,
depth: self.macro_depth + 1,
kind: MacroDirectiveKind::FnLike { ast_id, expand_to, call_site },
kind: MacroDirectiveKind::FnLike { ast_id, expand_to, ctxt },
container,
});
}

View File

@ -7,7 +7,7 @@
use intern::Interned;
use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
use smallvec::{smallvec, SmallVec};
use span::Span;
use span::{Span, SyntaxContextId};
use syntax::{ast, match_ast, AstNode, AstToken, SmolStr, SyntaxNode};
use triomphe::Arc;
@ -53,7 +53,7 @@ pub fn new(
id,
input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
path: Interned::new(ModPath::from(crate::name!(doc))),
span: span_map.span_for_range(comment.syntax().text_range()),
ctxt: span_map.span_for_range(comment.syntax().text_range()).ctx,
}),
});
let entries: Arc<[Attr]> = Arc::from_iter(entries);
@ -173,7 +173,7 @@ pub struct Attr {
pub id: AttrId,
pub path: Interned<ModPath>,
pub input: Option<Interned<AttrInput>>,
pub span: Span,
pub ctxt: SyntaxContextId,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@ -219,11 +219,11 @@ fn from_src(
} else {
None
};
Some(Attr { id, path, input, span })
Some(Attr { id, path, input, ctxt: span.ctx })
}
fn from_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree], id: AttrId) -> Option<Attr> {
let span = tt.first()?.first_span();
let ctxt = tt.first()?.first_span().ctx;
let path_end = tt
.iter()
.position(|tt| {
@ -255,7 +255,7 @@ fn from_tt(db: &dyn ExpandDatabase, tt: &[tt::TokenTree], id: AttrId) -> Option<
}
_ => None,
};
Some(Attr { id, path, input, span })
Some(Attr { id, path, input, ctxt })
}
pub fn path(&self) -> &ModPath {

View File

@ -11,7 +11,7 @@ pub enum BuiltinAttrExpander {
}
impl BuiltinAttrExpander {
pub fn expander(&self) -> fn (&dyn ExpandDatabase, MacroCallId, &tt::Subtree) -> ExpandResult<tt::Subtree> {
pub fn expander(&self) -> fn (&dyn ExpandDatabase, MacroCallId, &tt::Subtree, Span) -> ExpandResult<tt::Subtree> {
match *self {
$( BuiltinAttrExpander::$variant => $expand, )*
}
@ -34,8 +34,9 @@ pub fn expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
span: Span,
) -> ExpandResult<tt::Subtree> {
self.expander()(db, id, tt)
self.expander()(db, id, tt, span)
}
pub fn is_derive(self) -> bool {
@ -71,6 +72,7 @@ fn dummy_attr_expand(
_db: &dyn ExpandDatabase,
_id: MacroCallId,
tt: &tt::Subtree,
_span: Span,
) -> ExpandResult<tt::Subtree> {
ExpandResult::ok(tt.clone())
}
@ -100,6 +102,7 @@ fn derive_expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
span: Span,
) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(id);
let derives = match &loc.kind {
@ -107,13 +110,10 @@ fn derive_expand(
attr_args
}
_ => {
return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan {
open: loc.call_site,
close: loc.call_site,
}))
return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan { open: span, close: span }))
}
};
pseudo_derive_attr_expansion(tt, derives, loc.call_site)
pseudo_derive_attr_expansion(tt, derives, span)
}
pub fn pseudo_derive_attr_expansion(

View File

@ -50,8 +50,8 @@ pub fn expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
span: Span,
) -> ExpandResult<tt::Subtree> {
let span = db.lookup_intern_macro_call(id).call_site;
let span = span_with_def_site_ctxt(db, span, id);
self.expander()(span, tt)
}

View File

@ -62,8 +62,8 @@ pub fn expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
span: Span,
) -> ExpandResult<tt::Subtree> {
let span = db.lookup_intern_macro_call(id).call_site;
let span = span_with_def_site_ctxt(db, span, id);
self.expander()(db, id, tt, span)
}
@ -75,8 +75,8 @@ pub fn expand(
db: &dyn ExpandDatabase,
id: MacroCallId,
tt: &tt::Subtree,
span: Span,
) -> ExpandResult<tt::Subtree> {
let span = db.lookup_intern_macro_call(id).call_site;
let span = span_with_def_site_ctxt(db, span, id);
self.expander()(db, id, tt, span)
}

View File

@ -3,7 +3,7 @@
use base_db::{salsa, CrateId, FileId, SourceDatabase};
use either::Either;
use limit::Limit;
use mbe::{syntax_node_to_token_tree, ValueResult};
use mbe::syntax_node_to_token_tree;
use rustc_hash::FxHashSet;
use span::{AstIdMap, Span, SyntaxContextData, SyntaxContextId};
use syntax::{ast, AstNode, Parse, SyntaxElement, SyntaxError, SyntaxNode, SyntaxToken, T};
@ -98,10 +98,7 @@ fn parse_macro_expansion(
/// Lowers syntactic macro call to a token tree representation. That's a firewall
/// query, only typing in the macro call itself changes the returned
/// subtree.
fn macro_arg(
&self,
id: MacroCallId,
) -> ValueResult<(Arc<tt::Subtree>, SyntaxFixupUndoInfo), Arc<Box<[SyntaxError]>>>;
fn macro_arg(&self, id: MacroCallId) -> (Arc<tt::Subtree>, SyntaxFixupUndoInfo, Span);
/// Fetches the expander for this macro.
#[salsa::transparent]
#[salsa::invoke(TokenExpander::macro_expander)]
@ -147,14 +144,16 @@ pub fn expand_speculative(
let span_map = RealSpanMap::absolute(FileId::BOGUS);
let span_map = SpanMapRef::RealSpanMap(&span_map);
let (_, _, span) = db.macro_arg(actual_macro_call);
// Build the subtree and token mapping for the speculative args
let (mut tt, undo_info) = match loc.kind {
MacroCallKind::FnLike { .. } => (
mbe::syntax_node_to_token_tree(speculative_args, span_map, loc.call_site),
mbe::syntax_node_to_token_tree(speculative_args, span_map, span),
SyntaxFixupUndoInfo::NONE,
),
MacroCallKind::Attr { .. } if loc.def.is_attribute_derive() => (
mbe::syntax_node_to_token_tree(speculative_args, span_map, loc.call_site),
mbe::syntax_node_to_token_tree(speculative_args, span_map, span),
SyntaxFixupUndoInfo::NONE,
),
MacroCallKind::Derive { derive_attr_index: index, .. }
@ -162,12 +161,15 @@ pub fn expand_speculative(
let censor = if let MacroCallKind::Derive { .. } = loc.kind {
censor_derive_input(index, &ast::Adt::cast(speculative_args.clone())?)
} else {
censor_attr_input(index, &ast::Item::cast(speculative_args.clone())?)
attr_source(index, &ast::Item::cast(speculative_args.clone())?)
.into_iter()
.map(|it| it.syntax().clone().into())
.collect()
};
let censor_cfg =
cfg_process::process_cfg_attrs(speculative_args, &loc, db).unwrap_or_default();
let mut fixups = fixup::fixup_syntax(span_map, speculative_args, loc.call_site);
let mut fixups = fixup::fixup_syntax(span_map, speculative_args, span);
fixups.append.retain(|it, _| match it {
syntax::NodeOrToken::Token(_) => true,
it => !censor.contains(it) && !censor_cfg.contains(it),
@ -181,7 +183,7 @@ pub fn expand_speculative(
span_map,
fixups.append,
fixups.remove,
loc.call_site,
span,
),
fixups.undo_info,
)
@ -203,9 +205,8 @@ pub fn expand_speculative(
}?;
match attr.token_tree() {
Some(token_tree) => {
let mut tree =
syntax_node_to_token_tree(token_tree.syntax(), span_map, loc.call_site);
tree.delimiter = tt::Delimiter::invisible_spanned(loc.call_site);
let mut tree = syntax_node_to_token_tree(token_tree.syntax(), span_map, span);
tree.delimiter = tt::Delimiter::invisible_spanned(span);
Some(tree)
}
@ -219,8 +220,8 @@ pub fn expand_speculative(
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
let mut speculative_expansion = match loc.def.kind {
MacroDefKind::ProcMacro(expander, _, ast) => {
tt.delimiter = tt::Delimiter::invisible_spanned(loc.call_site);
let span = db.proc_macro_span(ast);
tt.delimiter = tt::Delimiter::invisible_spanned(span);
expander.expand(
db,
loc.def.krate,
@ -233,22 +234,21 @@ pub fn expand_speculative(
)
}
MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => {
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, loc.call_site)
pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?, span)
}
MacroDefKind::Declarative(it) => {
db.decl_macro_expander(loc.krate, it).expand_unhygienic(db, tt, loc.def.krate, span)
}
MacroDefKind::BuiltIn(it, _) => {
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
}
MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand_unhygienic(
db,
tt,
loc.def.krate,
loc.call_site,
),
MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
MacroDefKind::BuiltInDerive(it, ..) => {
it.expand(db, actual_macro_call, &tt).map_err(Into::into)
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
}
MacroDefKind::BuiltInEager(it, _) => {
it.expand(db, actual_macro_call, &tt).map_err(Into::into)
it.expand(db, actual_macro_call, &tt, span).map_err(Into::into)
}
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt),
MacroDefKind::BuiltInAttr(it, _) => it.expand(db, actual_macro_call, &tt, span),
};
let expand_to = loc.expand_to();
@ -344,7 +344,7 @@ pub(crate) fn parse_with_map(
fn macro_arg(
db: &dyn ExpandDatabase,
id: MacroCallId,
) -> ValueResult<(Arc<tt::Subtree>, SyntaxFixupUndoInfo), Arc<Box<[SyntaxError]>>> {
) -> (Arc<tt::Subtree>, SyntaxFixupUndoInfo, Span) {
let loc = db.lookup_intern_macro_call(id);
if let MacroCallLoc {
@ -353,38 +353,33 @@ fn macro_arg(
..
} = &loc
{
return ValueResult::ok((eager.arg.clone(), SyntaxFixupUndoInfo::NONE));
return (eager.arg.clone(), SyntaxFixupUndoInfo::NONE, eager.span);
}
let (parse, map) = parse_with_map(db, loc.kind.file_id());
let root = parse.syntax_node();
let (censor, item_node) = match loc.kind {
let (censor, item_node, span) = match loc.kind {
MacroCallKind::FnLike { ast_id, .. } => {
let node = &ast_id.to_ptr(db).to_node(&root);
let path_range = node
.path()
.map_or_else(|| node.syntax().text_range(), |path| path.syntax().text_range());
let span = map.span_for_range(path_range);
let dummy_tt = |kind| {
(
Arc::new(tt::Subtree {
delimiter: tt::Delimiter {
open: loc.call_site,
close: loc.call_site,
kind,
},
delimiter: tt::Delimiter { open: span, close: span, kind },
token_trees: Box::default(),
}),
SyntaxFixupUndoInfo::default(),
span,
)
};
let node = &ast_id.to_ptr(db).to_node(&root);
let offset = node.syntax().text_range().start();
let Some(tt) = node.token_tree() else {
return ValueResult::new(
dummy_tt(tt::DelimiterKind::Invisible),
Arc::new(Box::new([SyntaxError::new_at_offset(
"missing token tree".to_owned(),
offset,
)])),
);
return dummy_tt(tt::DelimiterKind::Invisible);
};
let first = tt.left_delimiter_token().map(|it| it.kind()).unwrap_or(T!['(']);
let last = tt.right_delimiter_token().map(|it| it.kind()).unwrap_or(T![.]);
@ -409,48 +404,46 @@ fn macro_arg(
T!['{'] => tt::DelimiterKind::Brace,
_ => tt::DelimiterKind::Invisible,
};
return ValueResult::new(
dummy_tt(kind),
Arc::new(Box::new([SyntaxError::new_at_offset(
"mismatched delimiters".to_owned(),
offset,
)])),
);
return dummy_tt(kind);
}
let mut tt = mbe::syntax_node_to_token_tree(tt.syntax(), map.as_ref(), loc.call_site);
let mut tt = mbe::syntax_node_to_token_tree(tt.syntax(), map.as_ref(), span);
if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included
tt.delimiter.kind = tt::DelimiterKind::Invisible;
}
let val = (Arc::new(tt), SyntaxFixupUndoInfo::NONE);
return if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) {
match parse.errors() {
errors if errors.is_empty() => ValueResult::ok(val),
errors => ValueResult::new(
val,
// Box::<[_]>::from(res.errors()), not stable yet
Arc::new(errors.to_vec().into_boxed_slice()),
),
}
} else {
ValueResult::ok(val)
};
return (Arc::new(tt), SyntaxFixupUndoInfo::NONE, span);
}
MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
let node = ast_id.to_ptr(db).to_node(&root);
(censor_derive_input(derive_attr_index, &node), node.into())
let censor_derive_input = censor_derive_input(derive_attr_index, &node);
let item_node = node.into();
let attr_source = attr_source(derive_attr_index, &item_node);
// FIXME: This is wrong, this should point to the path of the derive attribute`
let span =
map.span_for_range(attr_source.as_ref().and_then(|it| it.path()).map_or_else(
|| item_node.syntax().text_range(),
|it| it.syntax().text_range(),
));
(censor_derive_input, item_node, span)
}
MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
let node = ast_id.to_ptr(db).to_node(&root);
(censor_attr_input(invoc_attr_index, &node), node)
let attr_source = attr_source(invoc_attr_index, &node);
let span = map.span_for_range(
attr_source
.as_ref()
.and_then(|it| it.path())
.map_or_else(|| node.syntax().text_range(), |it| it.syntax().text_range()),
);
(attr_source.into_iter().map(|it| it.syntax().clone().into()).collect(), node, span)
}
};
let (mut tt, undo_info) = {
let syntax = item_node.syntax();
let censor_cfg = cfg_process::process_cfg_attrs(syntax, &loc, db).unwrap_or_default();
let mut fixups = fixup::fixup_syntax(map.as_ref(), syntax, loc.call_site);
let mut fixups = fixup::fixup_syntax(map.as_ref(), syntax, span);
fixups.append.retain(|it, _| match it {
syntax::NodeOrToken::Token(_) => true,
it => !censor.contains(it) && !censor_cfg.contains(it),
@ -464,7 +457,7 @@ fn macro_arg(
map,
fixups.append,
fixups.remove,
loc.call_site,
span,
),
fixups.undo_info,
)
@ -475,11 +468,11 @@ fn macro_arg(
tt.delimiter.kind = tt::DelimiterKind::Invisible;
}
ValueResult::ok((Arc::new(tt), undo_info))
(Arc::new(tt), undo_info, span)
}
// FIXME: Censoring info should be calculated by the caller! Namely by name resolution
/// Derives expect all `#[derive(..)]` invocations up to the currently invoked one to be stripped
/// Derives expect all `#[derive(..)]` invocations up to (and including) the currently invoked one to be stripped
fn censor_derive_input(derive_attr_index: AttrId, node: &ast::Adt) -> FxHashSet<SyntaxElement> {
// FIXME: handle `cfg_attr`
cov_mark::hit!(derive_censoring);
@ -496,16 +489,11 @@ fn censor_derive_input(derive_attr_index: AttrId, node: &ast::Adt) -> FxHashSet<
.collect()
}
/// Attributes expect the invoking attribute to be stripped\
fn censor_attr_input(invoc_attr_index: AttrId, node: &ast::Item) -> FxHashSet<SyntaxElement> {
/// Attributes expect the invoking attribute to be stripped
fn attr_source(invoc_attr_index: AttrId, node: &ast::Item) -> Option<ast::Attr> {
// FIXME: handle `cfg_attr`
cov_mark::hit!(attribute_macro_attr_censoring);
collect_attrs(node)
.nth(invoc_attr_index.ast_index())
.and_then(|(_, attr)| Either::left(attr))
.map(|attr| attr.syntax().clone().into())
.into_iter()
.collect()
collect_attrs(node).nth(invoc_attr_index.ast_index()).and_then(|(_, attr)| Either::left(attr))
}
impl TokenExpander {
@ -535,70 +523,54 @@ fn macro_expand(
) -> ExpandResult<CowArc<tt::Subtree>> {
let _p = tracing::span!(tracing::Level::INFO, "macro_expand").entered();
let ExpandResult { value: tt, err } = match loc.def.kind {
let (ExpandResult { value: tt, err }, span) = match loc.def.kind {
MacroDefKind::ProcMacro(..) => return db.expand_proc_macro(macro_call_id).map(CowArc::Arc),
_ => {
let ValueResult { value: (macro_arg, undo_info), err } = db.macro_arg(macro_call_id);
let format_parse_err = |err: Arc<Box<[SyntaxError]>>| {
let mut buf = String::new();
for err in &**err {
use std::fmt::Write;
_ = write!(buf, "{}, ", err);
}
buf.pop();
buf.pop();
ExpandError::other(buf)
};
let (macro_arg, undo_info, span) = db.macro_arg(macro_call_id);
let arg = &*macro_arg;
let res = match loc.def.kind {
MacroDefKind::Declarative(id) => {
db.decl_macro_expander(loc.def.krate, id).expand(db, arg.clone(), macro_call_id)
}
MacroDefKind::BuiltIn(it, _) => {
it.expand(db, macro_call_id, arg).map_err(Into::into)
}
MacroDefKind::BuiltInDerive(it, _) => {
it.expand(db, macro_call_id, arg).map_err(Into::into)
}
MacroDefKind::BuiltInEager(it, _) => {
// This might look a bit odd, but we do not expand the inputs to eager macros here.
// Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
// That kind of expansion uses the ast id map of an eager macros input though which goes through
// the HirFileId machinery. As eager macro inputs are assigned a macro file id that query
// will end up going through here again, whereas we want to just want to inspect the raw input.
// As such we just return the input subtree here.
let eager = match &loc.kind {
MacroCallKind::FnLike { eager: None, .. } => {
return ExpandResult {
value: CowArc::Arc(macro_arg.clone()),
err: err.map(format_parse_err),
};
}
MacroCallKind::FnLike { eager: Some(eager), .. } => Some(&**eager),
_ => None,
};
let mut res = it.expand(db, macro_call_id, arg).map_err(Into::into);
if let Some(EagerCallInfo { error, .. }) = eager {
// FIXME: We should report both errors!
res.err = error.clone().or(res.err);
let res =
match loc.def.kind {
MacroDefKind::Declarative(id) => db
.decl_macro_expander(loc.def.krate, id)
.expand(db, arg.clone(), macro_call_id, span),
MacroDefKind::BuiltIn(it, _) => {
it.expand(db, macro_call_id, arg, span).map_err(Into::into)
}
res
}
MacroDefKind::BuiltInAttr(it, _) => {
let mut res = it.expand(db, macro_call_id, arg);
fixup::reverse_fixups(&mut res.value, &undo_info);
res
}
_ => unreachable!(),
};
ExpandResult {
value: res.value,
// if the arg had parse errors, show them instead of the expansion errors
err: err.map(format_parse_err).or(res.err),
}
MacroDefKind::BuiltInDerive(it, _) => {
it.expand(db, macro_call_id, arg, span).map_err(Into::into)
}
MacroDefKind::BuiltInEager(it, _) => {
// This might look a bit odd, but we do not expand the inputs to eager macros here.
// Eager macros inputs are expanded, well, eagerly when we collect the macro calls.
// That kind of expansion uses the ast id map of an eager macros input though which goes through
// the HirFileId machinery. As eager macro inputs are assigned a macro file id that query
// will end up going through here again, whereas we want to just want to inspect the raw input.
// As such we just return the input subtree here.
let eager = match &loc.kind {
MacroCallKind::FnLike { eager: None, .. } => {
return ExpandResult::ok(CowArc::Arc(macro_arg.clone()));
}
MacroCallKind::FnLike { eager: Some(eager), .. } => Some(&**eager),
_ => None,
};
let mut res = it.expand(db, macro_call_id, arg, span).map_err(Into::into);
if let Some(EagerCallInfo { error, .. }) = eager {
// FIXME: We should report both errors!
res.err = error.clone().or(res.err);
}
res
}
MacroDefKind::BuiltInAttr(it, _) => {
let mut res = it.expand(db, macro_call_id, arg, span);
fixup::reverse_fixups(&mut res.value, &undo_info);
res
}
_ => unreachable!(),
};
(ExpandResult { value: res.value, err: res.err }, span)
}
};
@ -608,7 +580,7 @@ fn macro_expand(
if let Err(value) = check_tt_count(&tt) {
return value.map(|()| {
CowArc::Owned(tt::Subtree {
delimiter: tt::Delimiter::invisible_spanned(loc.call_site),
delimiter: tt::Delimiter::invisible_spanned(span),
token_trees: Box::new([]),
})
});
@ -631,7 +603,7 @@ fn proc_macro_span(db: &dyn ExpandDatabase, ast: AstId<ast::Fn>) -> Span {
fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<Arc<tt::Subtree>> {
let loc = db.lookup_intern_macro_call(id);
let (macro_arg, undo_info) = db.macro_arg(id).value;
let (macro_arg, undo_info, span) = db.macro_arg(id);
let (expander, ast) = match loc.def.kind {
MacroDefKind::ProcMacro(expander, _, ast) => (expander, ast),
@ -643,23 +615,25 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
_ => None,
};
let span = db.proc_macro_span(ast);
let ExpandResult { value: mut tt, err } = expander.expand(
db,
loc.def.krate,
loc.krate,
&macro_arg,
attr_arg,
span_with_def_site_ctxt(db, span, id),
span_with_call_site_ctxt(db, span, id),
span_with_mixed_site_ctxt(db, span, id),
);
let ExpandResult { value: mut tt, err } = {
let span = db.proc_macro_span(ast);
expander.expand(
db,
loc.def.krate,
loc.krate,
&macro_arg,
attr_arg,
span_with_def_site_ctxt(db, span, id),
span_with_call_site_ctxt(db, span, id),
span_with_mixed_site_ctxt(db, span, id),
)
};
// Set a hard limit for the expanded tt
if let Err(value) = check_tt_count(&tt) {
return value.map(|()| {
Arc::new(tt::Subtree {
delimiter: tt::Delimiter::invisible_spanned(loc.call_site),
delimiter: tt::Delimiter::invisible_spanned(span),
token_trees: Box::new([]),
})
});

View File

@ -29,6 +29,7 @@ pub fn expand(
db: &dyn ExpandDatabase,
tt: tt::Subtree,
call_id: MacroCallId,
span: Span,
) -> ExpandResult<tt::Subtree> {
let loc = db.lookup_intern_macro_call(call_id);
let toolchain = db.toolchain(loc.def.krate);
@ -45,7 +46,7 @@ pub fn expand(
});
match self.mac.err() {
Some(_) => ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan { open: loc.call_site, close: loc.call_site }),
tt::Subtree::empty(tt::DelimSpan { open: span, close: span }),
ExpandError::MacroDefinition,
),
None => self
@ -54,7 +55,7 @@ pub fn expand(
&tt,
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency),
new_meta_vars,
loc.call_site,
span,
)
.map_err(Into::into),
}

View File

@ -19,7 +19,7 @@
//!
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
use base_db::CrateId;
use span::Span;
use span::SyntaxContextId;
use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent};
use triomphe::Arc;
@ -37,7 +37,7 @@ pub fn expand_eager_macro_input(
macro_call: &ast::MacroCall,
ast_id: AstId<ast::MacroCall>,
def: MacroDefId,
call_site: Span,
call_site: SyntaxContextId,
resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
) -> ExpandResult<Option<MacroCallId>> {
let expand_to = ExpandTo::from_call_site(macro_call);
@ -50,9 +50,10 @@ pub fn expand_eager_macro_input(
def,
krate,
kind: MacroCallKind::FnLike { ast_id, expand_to: ExpandTo::Expr, eager: None },
call_site,
ctxt: call_site,
}
.intern(db);
let (_, _, span) = db.macro_arg(arg_id);
let ExpandResult { value: (arg_exp, arg_exp_map), err: parse_err } =
db.parse_macro_expansion(arg_id.as_macro_file());
@ -79,7 +80,7 @@ pub fn expand_eager_macro_input(
return ExpandResult { value: None, err };
};
let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map, call_site);
let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map, span);
subtree.delimiter.kind = crate::tt::DelimiterKind::Invisible;
@ -93,9 +94,10 @@ pub fn expand_eager_macro_input(
arg: Arc::new(subtree),
arg_id,
error: err.clone(),
span,
})),
},
call_site,
ctxt: call_site,
};
ExpandResult { value: Some(loc.intern(db)), err }
@ -107,7 +109,7 @@ fn lazy_expand(
macro_call: &ast::MacroCall,
ast_id: AstId<ast::MacroCall>,
krate: CrateId,
call_site: Span,
call_site: SyntaxContextId,
) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<ExpansionSpanMap>)> {
let expand_to = ExpandTo::from_call_site(macro_call);
let id = def.make_call(
@ -129,7 +131,7 @@ fn eager_macro_recur(
mut offset: TextSize,
curr: InFile<SyntaxNode>,
krate: CrateId,
call_site: Span,
call_site: SyntaxContextId,
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
) -> ExpandResult<Option<(SyntaxNode, TextSize)>> {
let original = curr.value.clone_for_update();

View File

@ -3,7 +3,7 @@
use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec;
use span::{ErasedFileAstId, Span, SpanAnchor, SpanData, FIXUP_ERASED_FILE_AST_ID_MARKER};
use span::{ErasedFileAstId, Span, SpanAnchor, FIXUP_ERASED_FILE_AST_ID_MARKER};
use stdx::never;
use syntax::{
ast::{self, AstNode, HasLoopBody},
@ -57,7 +57,7 @@ pub(crate) fn fixup_syntax(
let dummy_range = FIXUP_DUMMY_RANGE;
let fake_span = |range| {
let span = span_map.span_for_range(range);
SpanData {
Span {
range: dummy_range,
anchor: SpanAnchor { ast_id: FIXUP_DUMMY_AST_ID, ..span.anchor },
ctx: span.ctx,
@ -76,7 +76,7 @@ pub(crate) fn fixup_syntax(
let span = span_map.span_for_range(node_range);
let replacement = Leaf::Ident(Ident {
text: "__ra_fixup".into(),
span: SpanData {
span: Span {
range: TextRange::new(TextSize::new(idx), FIXUP_DUMMY_RANGE_END),
anchor: SpanAnchor { ast_id: FIXUP_DUMMY_AST_ID, ..span.anchor },
ctx: span.ctx,
@ -305,8 +305,8 @@ pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo)
tt.delimiter.close.anchor.ast_id == FIXUP_DUMMY_AST_ID
|| tt.delimiter.open.anchor.ast_id == FIXUP_DUMMY_AST_ID
) {
tt.delimiter.close = SpanData::DUMMY;
tt.delimiter.open = SpanData::DUMMY;
tt.delimiter.close = Span::DUMMY;
tt.delimiter.open = Span::DUMMY;
}
reverse_fixups_(tt, undo_info);
}

View File

@ -65,7 +65,7 @@ pub(super) fn apply_mark(
return apply_mark_internal(db, ctxt, call_id, transparency);
}
let call_site_ctxt = db.lookup_intern_macro_call(call_id).call_site.ctx;
let call_site_ctxt = db.lookup_intern_macro_call(call_id).ctxt;
let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
call_site_ctxt.normalize_to_macros_2_0(db)
} else {
@ -205,11 +205,10 @@ pub(crate) fn dump_syntax_contexts(db: &dyn ExpandDatabase) -> String {
let id = e.key;
let expn_data = e.value.as_ref().unwrap();
s.push_str(&format!(
"\n{:?}: parent: {:?}, call_site_ctxt: {:?}, def_site_ctxt: {:?}, kind: {:?}",
"\n{:?}: parent: {:?}, call_site_ctxt: {:?}, kind: {:?}",
id,
expn_data.kind.file_id(),
expn_data.call_site,
SyntaxContextId::ROOT, // FIXME expn_data.def_site,
expn_data.ctxt,
expn_data.kind.descr(),
));
}

View File

@ -171,8 +171,7 @@ pub struct MacroCallLoc {
pub def: MacroDefId,
pub krate: CrateId,
pub kind: MacroCallKind,
// FIXME: Spans while relative to an anchor, are still rather unstable
pub call_site: Span,
pub ctxt: SyntaxContextId,
}
impl_intern_value_trivial!(MacroCallLoc);
@ -202,6 +201,8 @@ pub struct EagerCallInfo {
/// Call id of the eager macro's input file (this is the macro file for its fully expanded input).
arg_id: MacroCallId,
error: Option<ExpandError>,
/// TODO: Doc
span: Span,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@ -429,9 +430,9 @@ pub fn make_call(
db: &dyn ExpandDatabase,
krate: CrateId,
kind: MacroCallKind,
call_site: Span,
ctxt: SyntaxContextId,
) -> MacroCallId {
MacroCallLoc { def: self, krate, kind, call_site }.intern(db)
MacroCallLoc { def: self, krate, kind, ctxt }.intern(db)
}
pub fn definition_range(&self, db: &dyn ExpandDatabase) -> InFile<TextRange> {
@ -805,7 +806,7 @@ pub fn new(db: &dyn ExpandDatabase, macro_file: MacroFileId) -> ExpansionInfo {
let (parse, exp_map) = db.parse_macro_expansion(macro_file).value;
let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() };
let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value;
let (macro_arg, _, _) = db.macro_arg(macro_file.macro_call_id);
let def = loc.def.ast_id().left().and_then(|id| {
let def_tt = match id.to_node(db) {

View File

@ -697,7 +697,7 @@ fn is_from_include_file(
};
// get mapped token in the include! macro file
let span = span::SpanData {
let span = span::Span {
range: token.text_range(),
anchor: span::SpanAnchor { file_id, ast_id: ROOT_ERASED_FILE_AST_ID },
ctx: SyntaxContextId::ROOT,

View File

@ -1,6 +1,7 @@
//! This module add real world mbe example for benchmark tests
use rustc_hash::FxHashMap;
use span::Span;
use syntax::{
ast::{self, HasName},
AstNode, SmolStr,
@ -9,7 +10,7 @@
use crate::{
parser::{MetaVarKind, Op, RepeatKind, Separator},
syntax_node_to_token_tree, DeclarativeMacro, DummyTestSpanData, DummyTestSpanMap, DUMMY,
syntax_node_to_token_tree, DeclarativeMacro, DummyTestSpanMap, DUMMY,
};
#[test]
@ -50,14 +51,14 @@ fn benchmark_expand_macro_rules() {
assert_eq!(hash, 69413);
}
fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro<DummyTestSpanData>> {
fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro<Span>> {
macro_rules_fixtures_tt()
.into_iter()
.map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt, true, true)))
.collect()
}
fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<DummyTestSpanData>> {
fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<Span>> {
let fixture = bench_fixture::numerous_macro_rules();
let source_file = ast::SourceFile::parse(&fixture).ok().unwrap();
@ -79,8 +80,8 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<DummyTestSpanData>
/// Generate random invocation fixtures from rules
fn invocation_fixtures(
rules: &FxHashMap<String, DeclarativeMacro<DummyTestSpanData>>,
) -> Vec<(String, tt::Subtree<DummyTestSpanData>)> {
rules: &FxHashMap<String, DeclarativeMacro<Span>>,
) -> Vec<(String, tt::Subtree<Span>)> {
let mut seed = 123456789;
let mut res = Vec::new();
@ -128,8 +129,8 @@ fn invocation_fixtures(
return res;
fn collect_from_op(
op: &Op<DummyTestSpanData>,
token_trees: &mut Vec<tt::TokenTree<DummyTestSpanData>>,
op: &Op<Span>,
token_trees: &mut Vec<tt::TokenTree<Span>>,
seed: &mut usize,
) {
return match op {
@ -221,19 +222,19 @@ fn rand(seed: &mut usize) -> usize {
*seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c);
*seed
}
fn make_ident(ident: &str) -> tt::TokenTree<DummyTestSpanData> {
fn make_ident(ident: &str) -> tt::TokenTree<Span> {
tt::Leaf::Ident(tt::Ident { span: DUMMY, text: SmolStr::new(ident) }).into()
}
fn make_punct(char: char) -> tt::TokenTree<DummyTestSpanData> {
fn make_punct(char: char) -> tt::TokenTree<Span> {
tt::Leaf::Punct(tt::Punct { span: DUMMY, char, spacing: tt::Spacing::Alone }).into()
}
fn make_literal(lit: &str) -> tt::TokenTree<DummyTestSpanData> {
fn make_literal(lit: &str) -> tt::TokenTree<Span> {
tt::Leaf::Literal(tt::Literal { span: DUMMY, text: SmolStr::new(lit) }).into()
}
fn make_subtree(
kind: tt::DelimiterKind,
token_trees: Option<Vec<tt::TokenTree<DummyTestSpanData>>>,
) -> tt::TokenTree<DummyTestSpanData> {
token_trees: Option<Vec<tt::TokenTree<Span>>>,
) -> tt::TokenTree<Span> {
tt::Subtree {
delimiter: tt::Delimiter { open: DUMMY, close: DUMMY, kind },
token_trees: token_trees.map(Vec::into_boxed_slice).unwrap_or_default(),

View File

@ -41,32 +41,30 @@ fn span_for(&self, range: TextRange) -> S {
/// Dummy things for testing where spans don't matter.
pub(crate) mod dummy_test_span_utils {
use span::{Span, SyntaxContextId};
use super::*;
pub type DummyTestSpanData = span::SpanData<DummyTestSyntaxContext>;
pub const DUMMY: DummyTestSpanData = span::SpanData {
pub const DUMMY: Span = Span {
range: TextRange::empty(TextSize::new(0)),
anchor: span::SpanAnchor {
file_id: span::FileId::BOGUS,
ast_id: span::ROOT_ERASED_FILE_AST_ID,
},
ctx: DummyTestSyntaxContext,
ctx: SyntaxContextId::ROOT,
};
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub struct DummyTestSyntaxContext;
pub struct DummyTestSpanMap;
impl SpanMapper<span::SpanData<DummyTestSyntaxContext>> for DummyTestSpanMap {
fn span_for(&self, range: syntax::TextRange) -> span::SpanData<DummyTestSyntaxContext> {
span::SpanData {
impl SpanMapper<Span> for DummyTestSpanMap {
fn span_for(&self, range: syntax::TextRange) -> Span {
Span {
range,
anchor: span::SpanAnchor {
file_id: span::FileId::BOGUS,
ast_id: span::ROOT_ERASED_FILE_AST_ID,
},
ctx: DummyTestSyntaxContext,
ctx: SyntaxContextId::ROOT,
}
}
}

View File

@ -1,4 +1,5 @@
use rustc_hash::FxHashMap;
use span::Span;
use syntax::{ast, AstNode};
use test_utils::extract_annotations;
use tt::{
@ -6,7 +7,7 @@
Leaf, Punct, Spacing,
};
use crate::{syntax_node_to_token_tree, DummyTestSpanData, DummyTestSpanMap, DUMMY};
use crate::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY};
fn check_punct_spacing(fixture: &str) {
let source_file = ast::SourceFile::parse(fixture).ok().unwrap();
@ -28,7 +29,7 @@ fn check_punct_spacing(fixture: &str) {
while !cursor.eof() {
while let Some(token_tree) = cursor.token_tree() {
if let TokenTreeRef::Leaf(
Leaf::Punct(Punct { spacing, span: DummyTestSpanData { range, .. }, .. }),
Leaf::Punct(Punct { spacing, span: Span { range, .. }, .. }),
_,
) = token_tree
{

View File

@ -44,6 +44,9 @@ pub struct FileRange {
pub type Span = SpanData<SyntaxContextId>;
/// Spans represent a region of code, used by the IDE to be able link macro inputs and outputs
/// together. Positions in spans are relative to some [`SpanAnchor`] to make them more incremental
/// friendly.
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
pub struct SpanData<Ctx> {
/// The text range of this span, relative to the anchor.
@ -84,7 +87,7 @@ pub fn eq_ignoring_ctx(self, other: Self) -> bool {
impl Span {
#[deprecated = "dummy spans will panic if surfaced incorrectly, as such they should be replaced appropriately"]
pub const DUMMY: Self = SpanData {
pub const DUMMY: Self = Self {
range: TextRange::empty(TextSize::new(0)),
anchor: SpanAnchor { file_id: FileId::BOGUS, ast_id: ROOT_ERASED_FILE_AST_ID },
ctx: SyntaxContextId::ROOT,