Auto merge of #16167 - Veykril:dummy-spans, r=Veykril

fix: Fully remove dummy spans

Fixes https://github.com/rust-lang/rust-analyzer/issues/16008

Some of these spans are certainly wrong, but since we discard invisible delimiters currently it doesn't really matter.
This commit is contained in:
bors 2023-12-20 13:33:36 +00:00
commit 337e2abb67
32 changed files with 292 additions and 256 deletions

View File

@ -93,6 +93,7 @@ lsp-server = { version = "0.7.4" }
# non-local crates # non-local crates
anyhow = "1.0.75" anyhow = "1.0.75"
arrayvec = "0.7.4"
bitflags = "2.4.1" bitflags = "2.4.1"
cargo_metadata = "0.18.1" cargo_metadata = "0.18.1"
command-group = "2.0.1" command-group = "2.0.1"

View File

@ -1,6 +1,6 @@
use arbitrary::{Arbitrary, Unstructured}; use arbitrary::{Arbitrary, Unstructured};
use expect_test::{expect, Expect}; use expect_test::{expect, Expect};
use mbe::{syntax_node_to_token_tree, DummyTestSpanMap}; use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY};
use syntax::{ast, AstNode}; use syntax::{ast, AstNode};
use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr}; use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
@ -8,7 +8,7 @@ use crate::{CfgAtom, CfgExpr, CfgOptions, DnfExpr};
fn assert_parse_result(input: &str, expected: CfgExpr) { fn assert_parse_result(input: &str, expected: CfgExpr) {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap); let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
assert_eq!(cfg, expected); assert_eq!(cfg, expected);
} }
@ -16,7 +16,7 @@ fn assert_parse_result(input: &str, expected: CfgExpr) {
fn check_dnf(input: &str, expect: Expect) { fn check_dnf(input: &str, expect: Expect) {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap); let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
let actual = format!("#![cfg({})]", DnfExpr::new(cfg)); let actual = format!("#![cfg({})]", DnfExpr::new(cfg));
expect.assert_eq(&actual); expect.assert_eq(&actual);
@ -25,7 +25,7 @@ fn check_dnf(input: &str, expect: Expect) {
fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) { fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap); let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg); let dnf = DnfExpr::new(cfg);
let why_inactive = dnf.why_inactive(opts).unwrap().to_string(); let why_inactive = dnf.why_inactive(opts).unwrap().to_string();
@ -36,7 +36,7 @@ fn check_why_inactive(input: &str, opts: &CfgOptions, expect: Expect) {
fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) { fn check_enable_hints(input: &str, opts: &CfgOptions, expected_hints: &[&str]) {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap); let tt = syntax_node_to_token_tree(tt.syntax(), DummyTestSpanMap, DUMMY);
let cfg = CfgExpr::parse(&tt); let cfg = CfgExpr::parse(&tt);
let dnf = DnfExpr::new(cfg); let dnf = DnfExpr::new(cfg);
let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>(); let hints = dnf.compute_enable_hints(opts).map(|diff| diff.to_string()).collect::<Vec<_>>();

View File

@ -12,7 +12,7 @@ rust-version.workspace = true
doctest = false doctest = false
[dependencies] [dependencies]
arrayvec = "0.7.2" arrayvec.workspace = true
bitflags.workspace = true bitflags.workspace = true
cov-mark = "2.0.0-pre.1" cov-mark = "2.0.0-pre.1"
dashmap.workspace = true dashmap.workspace = true

View File

@ -1,19 +1,23 @@
//! This module contains tests for doc-expression parsing. //! This module contains tests for doc-expression parsing.
//! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`. //! Currently, it tests `#[doc(hidden)]` and `#[doc(alias)]`.
use triomphe::Arc;
use base_db::FileId; use base_db::FileId;
use hir_expand::span_map::{RealSpanMap, SpanMapRef}; use hir_expand::span_map::{RealSpanMap, SpanMap};
use mbe::syntax_node_to_token_tree; use mbe::syntax_node_to_token_tree;
use syntax::{ast, AstNode}; use syntax::{ast, AstNode, TextRange};
use crate::attr::{DocAtom, DocExpr}; use crate::attr::{DocAtom, DocExpr};
fn assert_parse_result(input: &str, expected: DocExpr) { fn assert_parse_result(input: &str, expected: DocExpr) {
let source_file = ast::SourceFile::parse(input).ok().unwrap(); let source_file = ast::SourceFile::parse(input).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0))));
let tt = syntax_node_to_token_tree( let tt = syntax_node_to_token_tree(
tt.syntax(), tt.syntax(),
SpanMapRef::RealSpanMap(&RealSpanMap::absolute(FileId::from_raw(0))), map.as_ref(),
map.span_for_range(TextRange::empty(0.into())),
); );
let cfg = DocExpr::parse(&tt); let cfg = DocExpr::parse(&tt);
assert_eq!(cfg, expected); assert_eq!(cfg, expected);

View File

@ -55,7 +55,7 @@ use la_arena::{Arena, Idx, IdxRange, RawIdx};
use profile::Count; use profile::Count;
use rustc_hash::FxHashMap; use rustc_hash::FxHashMap;
use smallvec::SmallVec; use smallvec::SmallVec;
use span::SyntaxContextId; use span::Span;
use stdx::never; use stdx::never;
use syntax::{ast, match_ast, SyntaxKind}; use syntax::{ast, match_ast, SyntaxKind};
use triomphe::Arc; use triomphe::Arc;
@ -747,7 +747,7 @@ pub struct MacroCall {
pub path: Interned<ModPath>, pub path: Interned<ModPath>,
pub ast_id: FileAstId<ast::MacroCall>, pub ast_id: FileAstId<ast::MacroCall>,
pub expand_to: ExpandTo, pub expand_to: ExpandTo,
pub call_site: SyntaxContextId, pub call_site: Span,
} }
#[derive(Debug, Clone, Eq, PartialEq)] #[derive(Debug, Clone, Eq, PartialEq)]

View File

@ -549,7 +549,7 @@ impl<'a> Ctx<'a> {
path, path,
ast_id, ast_id,
expand_to, expand_to,
call_site: span_map.span_for_range(m.syntax().text_range()).ctx, call_site: span_map.span_for_range(m.syntax().text_range()),
}; };
Some(id(self.data().macro_calls.alloc(res))) Some(id(self.data().macro_calls.alloc(res)))
} }

View File

@ -80,7 +80,7 @@ use hir_expand::{
use item_tree::ExternBlock; use item_tree::ExternBlock;
use la_arena::Idx; use la_arena::Idx;
use nameres::DefMap; use nameres::DefMap;
use span::SyntaxContextId; use span::Span;
use stdx::impl_from; use stdx::impl_from;
use syntax::{ast, AstNode}; use syntax::{ast, AstNode};
@ -1172,7 +1172,7 @@ impl AsMacroCall for InFile<&ast::MacroCall> {
return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation"))); return Ok(ExpandResult::only_err(ExpandError::other("malformed macro invocation")));
}; };
let call_site = span_map.span_for_range(self.value.syntax().text_range()).ctx; let call_site = span_map.span_for_range(self.value.syntax().text_range());
macro_call_as_call_id_with_eager( macro_call_as_call_id_with_eager(
db, db,
@ -1202,7 +1202,7 @@ impl<T: AstIdNode> AstIdWithPath<T> {
fn macro_call_as_call_id( fn macro_call_as_call_id(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
call: &AstIdWithPath<ast::MacroCall>, call: &AstIdWithPath<ast::MacroCall>,
call_site: SyntaxContextId, call_site: Span,
expand_to: ExpandTo, expand_to: ExpandTo,
krate: CrateId, krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy, resolver: impl Fn(path::ModPath) -> Option<MacroDefId> + Copy,
@ -1214,7 +1214,7 @@ fn macro_call_as_call_id(
fn macro_call_as_call_id_with_eager( fn macro_call_as_call_id_with_eager(
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
call: &AstIdWithPath<ast::MacroCall>, call: &AstIdWithPath<ast::MacroCall>,
call_site: SyntaxContextId, call_site: Span,
expand_to: ExpandTo, expand_to: ExpandTo,
krate: CrateId, krate: CrateId,
resolver: impl FnOnce(path::ModPath) -> Option<MacroDefId>, resolver: impl FnOnce(path::ModPath) -> Option<MacroDefId>,
@ -1320,7 +1320,7 @@ fn derive_macro_as_call_id(
item_attr: &AstIdWithPath<ast::Adt>, item_attr: &AstIdWithPath<ast::Adt>,
derive_attr_index: AttrId, derive_attr_index: AttrId,
derive_pos: u32, derive_pos: u32,
call_site: SyntaxContextId, call_site: Span,
krate: CrateId, krate: CrateId,
resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>, resolver: impl Fn(path::ModPath) -> Option<(MacroId, MacroDefId)>,
) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> { ) -> Result<(MacroId, MacroDefId, MacroCallId), UnresolvedMacro> {
@ -1350,7 +1350,7 @@ fn attr_macro_as_call_id(
let arg = match macro_attr.input.as_deref() { let arg = match macro_attr.input.as_deref() {
Some(AttrInput::TokenTree(tt)) => { Some(AttrInput::TokenTree(tt)) => {
let mut tt = tt.as_ref().clone(); let mut tt = tt.as_ref().clone();
tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE; tt.delimiter = tt::Delimiter::invisible_spanned(macro_attr.span);
Some(tt) Some(tt)
} }
@ -1365,7 +1365,7 @@ fn attr_macro_as_call_id(
attr_args: arg.map(Arc::new), attr_args: arg.map(Arc::new),
invoc_attr_index: macro_attr.id, invoc_attr_index: macro_attr.id,
}, },
macro_attr.ctxt, macro_attr.span,
) )
} }

View File

@ -228,13 +228,13 @@ enum MacroDirectiveKind {
FnLike { FnLike {
ast_id: AstIdWithPath<ast::MacroCall>, ast_id: AstIdWithPath<ast::MacroCall>,
expand_to: ExpandTo, expand_to: ExpandTo,
call_site: SyntaxContextId, call_site: Span,
}, },
Derive { Derive {
ast_id: AstIdWithPath<ast::Adt>, ast_id: AstIdWithPath<ast::Adt>,
derive_attr: AttrId, derive_attr: AttrId,
derive_pos: usize, derive_pos: usize,
call_site: SyntaxContextId, call_site: Span,
}, },
Attr { Attr {
ast_id: AstIdWithPath<ast::Item>, ast_id: AstIdWithPath<ast::Item>,
@ -1307,14 +1307,13 @@ impl DefCollector<'_> {
// Not resolved to a derive helper or the derive attribute, so try to treat as a normal attribute. // Not resolved to a derive helper or the derive attribute, so try to treat as a normal attribute.
let call_id = let call_id =
attr_macro_as_call_id(self.db, file_ast_id, attr, self.def_map.krate, def); attr_macro_as_call_id(self.db, file_ast_id, attr, self.def_map.krate, def);
let loc: MacroCallLoc = self.db.lookup_intern_macro_call(call_id);
// If proc attribute macro expansion is disabled, skip expanding it here // If proc attribute macro expansion is disabled, skip expanding it here
if !self.db.expand_proc_attr_macros() { if !self.db.expand_proc_attr_macros() {
self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro( self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro(
directive.module_id, directive.module_id,
loc.kind, self.db.lookup_intern_macro_call(call_id).kind,
loc.def.krate, def.krate,
)); ));
return recollect_without(self); return recollect_without(self);
} }
@ -1322,14 +1321,14 @@ impl DefCollector<'_> {
// Skip #[test]/#[bench] expansion, which would merely result in more memory usage // Skip #[test]/#[bench] expansion, which would merely result in more memory usage
// due to duplicating functions into macro expansions // due to duplicating functions into macro expansions
if matches!( if matches!(
loc.def.kind, def.kind,
MacroDefKind::BuiltInAttr(expander, _) MacroDefKind::BuiltInAttr(expander, _)
if expander.is_test() || expander.is_bench() if expander.is_test() || expander.is_bench()
) { ) {
return recollect_without(self); return recollect_without(self);
} }
if let MacroDefKind::ProcMacro(exp, ..) = loc.def.kind { if let MacroDefKind::ProcMacro(exp, ..) = def.kind {
if exp.is_dummy() { if exp.is_dummy() {
// If there's no expander for the proc macro (e.g. // If there's no expander for the proc macro (e.g.
// because proc macros are disabled, or building the // because proc macros are disabled, or building the
@ -1337,8 +1336,8 @@ impl DefCollector<'_> {
// expansion like we would if it was disabled // expansion like we would if it was disabled
self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro( self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro(
directive.module_id, directive.module_id,
loc.kind, self.db.lookup_intern_macro_call(call_id).kind,
loc.def.krate, def.krate,
)); ));
return recollect_without(self); return recollect_without(self);

View File

@ -7,7 +7,7 @@ use either::Either;
use intern::Interned; use intern::Interned;
use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct}; use mbe::{syntax_node_to_token_tree, DelimiterKind, Punct};
use smallvec::{smallvec, SmallVec}; use smallvec::{smallvec, SmallVec};
use span::SyntaxContextId; use span::Span;
use syntax::{ast, match_ast, AstNode, AstToken, SmolStr, SyntaxNode}; use syntax::{ast, match_ast, AstNode, AstToken, SmolStr, SyntaxNode};
use triomphe::Arc; use triomphe::Arc;
@ -53,7 +53,7 @@ impl RawAttrs {
id, id,
input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))), input: Some(Interned::new(AttrInput::Literal(SmolStr::new(doc)))),
path: Interned::new(ModPath::from(crate::name!(doc))), path: Interned::new(ModPath::from(crate::name!(doc))),
ctxt: span_map.span_for_range(comment.syntax().text_range()).ctx, span: span_map.span_for_range(comment.syntax().text_range()),
}), }),
}); });
let entries: Arc<[Attr]> = Arc::from_iter(entries); let entries: Arc<[Attr]> = Arc::from_iter(entries);
@ -120,7 +120,7 @@ impl RawAttrs {
let attrs = let attrs =
parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(|(idx, attr)| { parts.enumerate().take(1 << AttrId::CFG_ATTR_BITS).filter_map(|(idx, attr)| {
let tree = Subtree { let tree = Subtree {
delimiter: tt::Delimiter::dummy_invisible(), delimiter: tt::Delimiter::invisible_spanned(attr.first()?.first_span()),
token_trees: attr.to_vec(), token_trees: attr.to_vec(),
}; };
Attr::from_tt(db, &tree, index.with_cfg_attr(idx)) Attr::from_tt(db, &tree, index.with_cfg_attr(idx))
@ -177,7 +177,7 @@ pub struct Attr {
pub id: AttrId, pub id: AttrId,
pub path: Interned<ModPath>, pub path: Interned<ModPath>,
pub input: Option<Interned<AttrInput>>, pub input: Option<Interned<AttrInput>>,
pub ctxt: SyntaxContextId, pub span: Span,
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
@ -206,6 +206,7 @@ impl Attr {
id: AttrId, id: AttrId,
) -> Option<Attr> { ) -> Option<Attr> {
let path = Interned::new(ModPath::from_src(db, ast.path()?, span_map)?); let path = Interned::new(ModPath::from_src(db, ast.path()?, span_map)?);
let span = span_map.span_for_range(ast.syntax().text_range());
let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() { let input = if let Some(ast::Expr::Literal(lit)) = ast.expr() {
let value = match lit.kind() { let value = match lit.kind() {
ast::LiteralKind::String(string) => string.value()?.into(), ast::LiteralKind::String(string) => string.value()?.into(),
@ -213,12 +214,12 @@ impl Attr {
}; };
Some(Interned::new(AttrInput::Literal(value))) Some(Interned::new(AttrInput::Literal(value)))
} else if let Some(tt) = ast.token_tree() { } else if let Some(tt) = ast.token_tree() {
let tree = syntax_node_to_token_tree(tt.syntax(), span_map); let tree = syntax_node_to_token_tree(tt.syntax(), span_map, span);
Some(Interned::new(AttrInput::TokenTree(Box::new(tree)))) Some(Interned::new(AttrInput::TokenTree(Box::new(tree))))
} else { } else {
None None
}; };
Some(Attr { id, path, input, ctxt: span_map.span_for_range(ast.syntax().text_range()).ctx }) Some(Attr { id, path, input, span })
} }
fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option<Attr> { fn from_tt(db: &dyn ExpandDatabase, tt: &tt::Subtree, id: AttrId) -> Option<Attr> {
@ -266,7 +267,7 @@ impl Attr {
pub fn parse_path_comma_token_tree<'a>( pub fn parse_path_comma_token_tree<'a>(
&'a self, &'a self,
db: &'a dyn ExpandDatabase, db: &'a dyn ExpandDatabase,
) -> Option<impl Iterator<Item = (ModPath, SyntaxContextId)> + 'a> { ) -> Option<impl Iterator<Item = (ModPath, Span)> + 'a> {
let args = self.token_tree_value()?; let args = self.token_tree_value()?;
if args.delimiter.kind != DelimiterKind::Parenthesis { if args.delimiter.kind != DelimiterKind::Parenthesis {
@ -282,7 +283,7 @@ impl Attr {
// FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation // FIXME: This is necessarily a hack. It'd be nice if we could avoid allocation
// here or maybe just parse a mod path from a token tree directly // here or maybe just parse a mod path from a token tree directly
let subtree = tt::Subtree { let subtree = tt::Subtree {
delimiter: tt::Delimiter::dummy_invisible(), delimiter: tt::Delimiter::invisible_spanned(tts.first()?.first_span()),
token_trees: tts.to_vec(), token_trees: tts.to_vec(),
}; };
let (parse, span_map) = let (parse, span_map) =
@ -294,7 +295,7 @@ impl Attr {
return None; return None;
} }
let path = meta.path()?; let path = meta.path()?;
let call_site = span_map.span_at(path.syntax().text_range().start()).ctx; let call_site = span_map.span_at(path.syntax().text_range().start());
Some(( Some((
ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(&span_map))?, ModPath::from_src(db, path, SpanMapRef::ExpansionSpanMap(&span_map))?,
call_site, call_site,

View File

@ -1,6 +1,5 @@
//! Builtin attributes. //! Builtin attributes.
use span::{FileId, MacroCallId, Span, SyntaxContextId, ROOT_ERASED_FILE_AST_ID}; use span::{MacroCallId, Span};
use syntax::{TextRange, TextSize};
use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallKind}; use crate::{db::ExpandDatabase, name, tt, ExpandResult, MacroCallKind};
@ -102,7 +101,12 @@ fn derive_attr_expand(
MacroCallKind::Attr { attr_args: Some(attr_args), .. } if loc.def.is_attribute_derive() => { MacroCallKind::Attr { attr_args: Some(attr_args), .. } if loc.def.is_attribute_derive() => {
attr_args attr_args
} }
_ => return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan::DUMMY)), _ => {
return ExpandResult::ok(tt::Subtree::empty(tt::DelimSpan {
open: loc.call_site,
close: loc.call_site,
}))
}
}; };
pseudo_derive_attr_expansion(tt, derives, loc.call_site) pseudo_derive_attr_expansion(tt, derives, loc.call_site)
} }
@ -110,20 +114,13 @@ fn derive_attr_expand(
pub fn pseudo_derive_attr_expansion( pub fn pseudo_derive_attr_expansion(
tt: &tt::Subtree, tt: &tt::Subtree,
args: &tt::Subtree, args: &tt::Subtree,
call_site: SyntaxContextId, call_site: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let mk_leaf = |char| { let mk_leaf = |char| {
tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char, char,
spacing: tt::Spacing::Alone, spacing: tt::Spacing::Alone,
span: Span { span: call_site,
range: TextRange::empty(TextSize::new(0)),
anchor: span::SpanAnchor {
file_id: FileId::BOGUS,
ast_id: ROOT_ERASED_FILE_AST_ID,
},
ctx: call_site,
},
})) }))
}; };

View File

@ -246,7 +246,7 @@ fn parse_adt(
match this { match this {
Some(it) => { Some(it) => {
param_type_set.insert(it.as_name()); param_type_set.insert(it.as_name());
mbe::syntax_node_to_token_tree(it.syntax(), tm) mbe::syntax_node_to_token_tree(it.syntax(), tm, call_site)
} }
None => { None => {
tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site }) tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
@ -254,15 +254,15 @@ fn parse_adt(
} }
}; };
let bounds = match &param { let bounds = match &param {
ast::TypeOrConstParam::Type(it) => { ast::TypeOrConstParam::Type(it) => it
it.type_bound_list().map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm)) .type_bound_list()
} .map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm, call_site)),
ast::TypeOrConstParam::Const(_) => None, ast::TypeOrConstParam::Const(_) => None,
}; };
let ty = if let ast::TypeOrConstParam::Const(param) = param { let ty = if let ast::TypeOrConstParam::Const(param) = param {
let ty = param let ty = param
.ty() .ty()
.map(|ty| mbe::syntax_node_to_token_tree(ty.syntax(), tm)) .map(|ty| mbe::syntax_node_to_token_tree(ty.syntax(), tm, call_site))
.unwrap_or_else(|| { .unwrap_or_else(|| {
tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site }) tt::Subtree::empty(::tt::DelimSpan { open: call_site, close: call_site })
}); });
@ -298,7 +298,7 @@ fn parse_adt(
let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name(); let name = p.path()?.qualifier()?.as_single_name_ref()?.as_name();
param_type_set.contains(&name).then_some(p) param_type_set.contains(&name).then_some(p)
}) })
.map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm)) .map(|it| mbe::syntax_node_to_token_tree(it.syntax(), tm, call_site))
.collect(); .collect();
let name_token = name_to_token(tm, name)?; let name_token = name_to_token(tm, name)?;
Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types }) Ok(BasicAdtInfo { name: name_token, shape, param_types, associated_types })

View File

@ -154,7 +154,7 @@ fn line_expand(
// Note that `line!` and `column!` will never be implemented properly, as they are by definition // Note that `line!` and `column!` will never be implemented properly, as they are by definition
// not incremental // not incremental
ExpandResult::ok(tt::Subtree { ExpandResult::ok(tt::Subtree {
delimiter: tt::Delimiter::dummy_invisible(), delimiter: tt::Delimiter::invisible_spanned(span),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
text: "0u32".into(), text: "0u32".into(),
span, span,
@ -201,12 +201,12 @@ fn assert_expand(
tt: &tt::Subtree, tt: &tt::Subtree,
span: Span, span: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let args = parse_exprs_with_sep(tt, ','); let args = parse_exprs_with_sep(tt, ',', span);
let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span }; let dollar_crate = tt::Ident { text: SmolStr::new_inline("$crate"), span };
let expanded = match &*args { let expanded = match &*args {
[cond, panic_args @ ..] => { [cond, panic_args @ ..] => {
let comma = tt::Subtree { let comma = tt::Subtree {
delimiter: tt::Delimiter::dummy_invisible(), delimiter: tt::Delimiter::invisible_spanned(span),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct {
char: ',', char: ',',
spacing: tt::Spacing::Alone, spacing: tt::Spacing::Alone,
@ -631,7 +631,7 @@ fn include_bytes_expand(
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
// FIXME: actually read the file here if the user asked for macro expansion // FIXME: actually read the file here if the user asked for macro expansion
let res = tt::Subtree { let res = tt::Subtree {
delimiter: tt::Delimiter::dummy_invisible(), delimiter: tt::Delimiter::invisible_spanned(span),
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal { token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Literal(tt::Literal {
text: r#"b"""#.into(), text: r#"b"""#.into(),
span, span,

View File

@ -10,7 +10,7 @@ use either::Either;
use limit::Limit; use limit::Limit;
use mbe::{syntax_node_to_token_tree, ValueResult}; use mbe::{syntax_node_to_token_tree, ValueResult};
use rustc_hash::FxHashSet; use rustc_hash::FxHashSet;
use span::SyntaxContextId; use span::{Span, SyntaxContextId};
use syntax::{ use syntax::{
ast::{self, HasAttrs}, ast::{self, HasAttrs},
AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T, AstNode, Parse, SyntaxError, SyntaxNode, SyntaxToken, T,
@ -57,7 +57,8 @@ impl DeclarativeMacroExpander {
tt: tt::Subtree, tt: tt::Subtree,
call_id: MacroCallId, call_id: MacroCallId,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let toolchain = &db.crate_graph()[db.lookup_intern_macro_call(call_id).def.krate].toolchain; let loc = db.lookup_intern_macro_call(call_id);
let toolchain = &db.crate_graph()[loc.def.krate].toolchain;
let new_meta_vars = toolchain.as_ref().map_or(false, |version| { let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches(
&base_db::Version { &base_db::Version {
@ -71,7 +72,7 @@ impl DeclarativeMacroExpander {
}); });
match self.mac.err() { match self.mac.err() {
Some(e) => ExpandResult::new( Some(e) => ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan::DUMMY), tt::Subtree::empty(tt::DelimSpan { open: loc.call_site, close: loc.call_site }),
ExpandError::other(format!("invalid macro definition: {e}")), ExpandError::other(format!("invalid macro definition: {e}")),
), ),
None => self None => self
@ -80,6 +81,7 @@ impl DeclarativeMacroExpander {
&tt, &tt,
|s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency), |s| s.ctx = apply_mark(db, s.ctx, call_id, self.transparency),
new_meta_vars, new_meta_vars,
loc.call_site,
) )
.map_err(Into::into), .map_err(Into::into),
} }
@ -90,6 +92,7 @@ impl DeclarativeMacroExpander {
db: &dyn ExpandDatabase, db: &dyn ExpandDatabase,
tt: tt::Subtree, tt: tt::Subtree,
krate: CrateId, krate: CrateId,
call_site: Span,
) -> ExpandResult<tt::Subtree> { ) -> ExpandResult<tt::Subtree> {
let toolchain = &db.crate_graph()[krate].toolchain; let toolchain = &db.crate_graph()[krate].toolchain;
let new_meta_vars = toolchain.as_ref().map_or(false, |version| { let new_meta_vars = toolchain.as_ref().map_or(false, |version| {
@ -105,10 +108,10 @@ impl DeclarativeMacroExpander {
}); });
match self.mac.err() { match self.mac.err() {
Some(e) => ExpandResult::new( Some(e) => ExpandResult::new(
tt::Subtree::empty(tt::DelimSpan::DUMMY), tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }),
ExpandError::other(format!("invalid macro definition: {e}")), ExpandError::other(format!("invalid macro definition: {e}")),
), ),
None => self.mac.expand(&tt, |_| (), new_meta_vars).map_err(Into::into), None => self.mac.expand(&tt, |_| (), new_meta_vars, call_site).map_err(Into::into),
} }
} }
} }
@ -241,12 +244,13 @@ pub fn expand_speculative(
// Build the subtree and token mapping for the speculative args // Build the subtree and token mapping for the speculative args
let (mut tt, undo_info) = match loc.kind { let (mut tt, undo_info) = match loc.kind {
MacroCallKind::FnLike { .. } => { MacroCallKind::FnLike { .. } => (
(mbe::syntax_node_to_token_tree(speculative_args, span_map), SyntaxFixupUndoInfo::NONE) mbe::syntax_node_to_token_tree(speculative_args, span_map, loc.call_site),
} SyntaxFixupUndoInfo::NONE,
),
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => { MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
let censor = censor_for_macro_input(&loc, speculative_args); let censor = censor_for_macro_input(&loc, speculative_args);
let mut fixups = fixup::fixup_syntax(span_map, speculative_args); let mut fixups = fixup::fixup_syntax(span_map, speculative_args, loc.call_site);
fixups.append.retain(|it, _| match it { fixups.append.retain(|it, _| match it {
syntax::NodeOrToken::Node(it) => !censor.contains(it), syntax::NodeOrToken::Node(it) => !censor.contains(it),
syntax::NodeOrToken::Token(_) => true, syntax::NodeOrToken::Token(_) => true,
@ -258,6 +262,7 @@ pub fn expand_speculative(
span_map, span_map,
fixups.append, fixups.append,
fixups.remove, fixups.remove,
loc.call_site,
), ),
fixups.undo_info, fixups.undo_info,
) )
@ -279,8 +284,9 @@ pub fn expand_speculative(
}?; }?;
match attr.token_tree() { match attr.token_tree() {
Some(token_tree) => { Some(token_tree) => {
let mut tree = syntax_node_to_token_tree(token_tree.syntax(), span_map); let mut tree =
tree.delimiter = tt::Delimiter::DUMMY_INVISIBLE; syntax_node_to_token_tree(token_tree.syntax(), span_map, loc.call_site);
tree.delimiter = tt::Delimiter::invisible_spanned(loc.call_site);
Some(tree) Some(tree)
} }
@ -294,7 +300,7 @@ pub fn expand_speculative(
// Otherwise the expand query will fetch the non speculative attribute args and pass those instead. // Otherwise the expand query will fetch the non speculative attribute args and pass those instead.
let mut speculative_expansion = match loc.def.kind { let mut speculative_expansion = match loc.def.kind {
MacroDefKind::ProcMacro(expander, ..) => { MacroDefKind::ProcMacro(expander, ..) => {
tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE; tt.delimiter = tt::Delimiter::invisible_spanned(loc.call_site);
let call_site = loc.span(db); let call_site = loc.span(db);
expander.expand( expander.expand(
db, db,
@ -315,9 +321,12 @@ pub fn expand_speculative(
let adt = ast::Adt::cast(speculative_args.clone()).unwrap(); let adt = ast::Adt::cast(speculative_args.clone()).unwrap();
expander.expand(db, actual_macro_call, &adt, span_map) expander.expand(db, actual_macro_call, &adt, span_map)
} }
MacroDefKind::Declarative(it) => { MacroDefKind::Declarative(it) => db.decl_macro_expander(loc.krate, it).expand_unhygienic(
db.decl_macro_expander(loc.krate, it).expand_unhygienic(db, tt, loc.def.krate) db,
} tt,
loc.def.krate,
loc.call_site,
),
MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into), MacroDefKind::BuiltIn(it, _) => it.expand(db, actual_macro_call, &tt).map_err(Into::into),
MacroDefKind::BuiltInEager(it, _) => { MacroDefKind::BuiltInEager(it, _) => {
it.expand(db, actual_macro_call, &tt).map_err(Into::into) it.expand(db, actual_macro_call, &tt).map_err(Into::into)
@ -467,12 +476,13 @@ fn macro_arg(
MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(), MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(),
}; };
let (mut tt, undo_info) = match loc.kind { let (mut tt, undo_info) = match loc.kind {
MacroCallKind::FnLike { .. } => { MacroCallKind::FnLike { .. } => (
(mbe::syntax_node_to_token_tree(&syntax, map.as_ref()), SyntaxFixupUndoInfo::NONE) mbe::syntax_node_to_token_tree(&syntax, map.as_ref(), loc.call_site),
} SyntaxFixupUndoInfo::NONE,
),
MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => { MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => {
let censor = censor_for_macro_input(&loc, &syntax); let censor = censor_for_macro_input(&loc, &syntax);
let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax); let mut fixups = fixup::fixup_syntax(map.as_ref(), &syntax, loc.call_site);
fixups.append.retain(|it, _| match it { fixups.append.retain(|it, _| match it {
syntax::NodeOrToken::Node(it) => !censor.contains(it), syntax::NodeOrToken::Node(it) => !censor.contains(it),
syntax::NodeOrToken::Token(_) => true, syntax::NodeOrToken::Token(_) => true,
@ -484,6 +494,7 @@ fn macro_arg(
map.as_ref(), map.as_ref(),
fixups.append.clone(), fixups.append.clone(),
fixups.remove.clone(), fixups.remove.clone(),
loc.call_site,
); );
reverse_fixups(&mut tt, &fixups.undo_info); reverse_fixups(&mut tt, &fixups.undo_info);
} }
@ -493,6 +504,7 @@ fn macro_arg(
map, map,
fixups.append, fixups.append,
fixups.remove, fixups.remove,
loc.call_site,
), ),
fixups.undo_info, fixups.undo_info,
) )
@ -501,7 +513,7 @@ fn macro_arg(
if loc.def.is_proc_macro() { if loc.def.is_proc_macro() {
// proc macros expect their inputs without parentheses, MBEs expect it with them included // proc macros expect their inputs without parentheses, MBEs expect it with them included
tt.delimiter = tt::Delimiter::DUMMY_INVISIBLE; tt.delimiter.kind = tt::DelimiterKind::Invisible;
} }
if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) { if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) {
@ -605,7 +617,11 @@ fn decl_macro_expander(
ast::Macro::MacroRules(macro_rules) => ( ast::Macro::MacroRules(macro_rules) => (
match macro_rules.token_tree() { match macro_rules.token_tree() {
Some(arg) => { Some(arg) => {
let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref()); let tt = mbe::syntax_node_to_token_tree(
arg.syntax(),
map.as_ref(),
map.span_for_range(macro_rules.macro_rules_token().unwrap().text_range()),
);
let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021, new_meta_vars); let mac = mbe::DeclarativeMacro::parse_macro_rules(&tt, is_2021, new_meta_vars);
mac mac
} }
@ -619,7 +635,11 @@ fn decl_macro_expander(
ast::Macro::MacroDef(macro_def) => ( ast::Macro::MacroDef(macro_def) => (
match macro_def.body() { match macro_def.body() {
Some(arg) => { Some(arg) => {
let tt = mbe::syntax_node_to_token_tree(arg.syntax(), map.as_ref()); let tt = mbe::syntax_node_to_token_tree(
arg.syntax(),
map.as_ref(),
map.span_for_range(macro_def.macro_token().unwrap().text_range()),
);
let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021, new_meta_vars); let mac = mbe::DeclarativeMacro::parse_macro2(&tt, is_2021, new_meta_vars);
mac mac
} }
@ -671,7 +691,7 @@ fn macro_expand(
let Some((macro_arg, undo_info)) = value else { let Some((macro_arg, undo_info)) = value else {
return ExpandResult { return ExpandResult {
value: Arc::new(tt::Subtree { value: Arc::new(tt::Subtree {
delimiter: tt::Delimiter::DUMMY_INVISIBLE, delimiter: tt::Delimiter::invisible_spanned(loc.call_site),
token_trees: Vec::new(), token_trees: Vec::new(),
}), }),
// FIXME: We should make sure to enforce an invariant that invalid macro // FIXME: We should make sure to enforce an invariant that invalid macro
@ -730,7 +750,7 @@ fn macro_expand(
// Skip checking token tree limit for include! macro call // Skip checking token tree limit for include! macro call
if !loc.def.is_include() { if !loc.def.is_include() {
// Set a hard limit for the expanded tt // Set a hard limit for the expanded tt
if let Err(value) = check_tt_count(&tt) { if let Err(value) = check_tt_count(&tt, loc.call_site) {
return value; return value;
} }
} }
@ -743,7 +763,7 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
let Some((macro_arg, undo_info)) = db.macro_arg(id).value else { let Some((macro_arg, undo_info)) = db.macro_arg(id).value else {
return ExpandResult { return ExpandResult {
value: Arc::new(tt::Subtree { value: Arc::new(tt::Subtree {
delimiter: tt::Delimiter::DUMMY_INVISIBLE, delimiter: tt::Delimiter::invisible_spanned(loc.call_site),
token_trees: Vec::new(), token_trees: Vec::new(),
}), }),
// FIXME: We should make sure to enforce an invariant that invalid macro // FIXME: We should make sure to enforce an invariant that invalid macro
@ -777,7 +797,7 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult<A
); );
// Set a hard limit for the expanded tt // Set a hard limit for the expanded tt
if let Err(value) = check_tt_count(&tt) { if let Err(value) = check_tt_count(&tt, loc.call_site) {
return value; return value;
} }
@ -800,12 +820,12 @@ fn token_tree_to_syntax_node(
mbe::token_tree_to_syntax_node(tt, entry_point) mbe::token_tree_to_syntax_node(tt, entry_point)
} }
fn check_tt_count(tt: &tt::Subtree) -> Result<(), ExpandResult<Arc<tt::Subtree>>> { fn check_tt_count(tt: &tt::Subtree, call_site: Span) -> Result<(), ExpandResult<Arc<tt::Subtree>>> {
let count = tt.count(); let count = tt.count();
if TOKEN_LIMIT.check(count).is_err() { if TOKEN_LIMIT.check(count).is_err() {
Err(ExpandResult { Err(ExpandResult {
value: Arc::new(tt::Subtree { value: Arc::new(tt::Subtree {
delimiter: tt::Delimiter::DUMMY_INVISIBLE, delimiter: tt::Delimiter::invisible_spanned(call_site),
token_trees: vec![], token_trees: vec![],
}), }),
err: Some(ExpandError::other(format!( err: Some(ExpandError::other(format!(

View File

@ -19,7 +19,7 @@
//! //!
//! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros> //! See the full discussion : <https://rust-lang.zulipchat.com/#narrow/stream/131828-t-compiler/topic/Eager.20expansion.20of.20built-in.20macros>
use base_db::CrateId; use base_db::CrateId;
use span::SyntaxContextId; use span::Span;
use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent}; use syntax::{ted, Parse, SyntaxElement, SyntaxNode, TextSize, WalkEvent};
use triomphe::Arc; use triomphe::Arc;
@ -37,7 +37,7 @@ pub fn expand_eager_macro_input(
krate: CrateId, krate: CrateId,
macro_call: InFile<ast::MacroCall>, macro_call: InFile<ast::MacroCall>,
def: MacroDefId, def: MacroDefId,
call_site: SyntaxContextId, call_site: Span,
resolver: &dyn Fn(ModPath) -> Option<MacroDefId>, resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
) -> ExpandResult<Option<MacroCallId>> { ) -> ExpandResult<Option<MacroCallId>> {
let ast_map = db.ast_id_map(macro_call.file_id); let ast_map = db.ast_id_map(macro_call.file_id);
@ -82,9 +82,9 @@ pub fn expand_eager_macro_input(
return ExpandResult { value: None, err }; return ExpandResult { value: None, err };
}; };
let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map); let mut subtree = mbe::syntax_node_to_token_tree(&expanded_eager_input, arg_map, call_site);
subtree.delimiter = crate::tt::Delimiter::DUMMY_INVISIBLE; subtree.delimiter.kind = crate::tt::DelimiterKind::Invisible;
let loc = MacroCallLoc { let loc = MacroCallLoc {
def, def,
@ -102,7 +102,7 @@ fn lazy_expand(
def: &MacroDefId, def: &MacroDefId,
macro_call: InFile<ast::MacroCall>, macro_call: InFile<ast::MacroCall>,
krate: CrateId, krate: CrateId,
call_site: SyntaxContextId, call_site: Span,
) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<ExpansionSpanMap>)> { ) -> ExpandResult<(InFile<Parse<SyntaxNode>>, Arc<ExpansionSpanMap>)> {
let ast_id = db.ast_id_map(macro_call.file_id).ast_id(&macro_call.value); let ast_id = db.ast_id_map(macro_call.file_id).ast_id(&macro_call.value);
@ -122,7 +122,7 @@ fn eager_macro_recur(
mut offset: TextSize, mut offset: TextSize,
curr: InFile<SyntaxNode>, curr: InFile<SyntaxNode>,
krate: CrateId, krate: CrateId,
call_site: SyntaxContextId, call_site: Span,
macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>, macro_resolver: &dyn Fn(ModPath) -> Option<MacroDefId>,
) -> ExpandResult<Option<(SyntaxNode, TextSize)>> { ) -> ExpandResult<Option<(SyntaxNode, TextSize)>> {
let original = curr.value.clone_for_update(); let original = curr.value.clone_for_update();

View File

@ -4,7 +4,7 @@
use la_arena::RawIdx; use la_arena::RawIdx;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use smallvec::SmallVec; use smallvec::SmallVec;
use span::{ErasedFileAstId, FileId, SpanAnchor, SpanData}; use span::{ErasedFileAstId, FileId, Span, SpanAnchor, SpanData};
use stdx::never; use stdx::never;
use syntax::{ use syntax::{
ast::{self, AstNode, HasLoopBody}, ast::{self, AstNode, HasLoopBody},
@ -48,7 +48,11 @@ const FIXUP_DUMMY_AST_ID: ErasedFileAstId = ErasedFileAstId::from_raw(RawIdx::fr
const FIXUP_DUMMY_RANGE: TextRange = TextRange::empty(TextSize::new(0)); const FIXUP_DUMMY_RANGE: TextRange = TextRange::empty(TextSize::new(0));
const FIXUP_DUMMY_RANGE_END: TextSize = TextSize::new(!0); const FIXUP_DUMMY_RANGE_END: TextSize = TextSize::new(!0);
pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> SyntaxFixups { pub(crate) fn fixup_syntax(
span_map: SpanMapRef<'_>,
node: &SyntaxNode,
call_site: Span,
) -> SyntaxFixups {
let mut append = FxHashMap::<SyntaxElement, _>::default(); let mut append = FxHashMap::<SyntaxElement, _>::default();
let mut remove = FxHashSet::<SyntaxNode>::default(); let mut remove = FxHashSet::<SyntaxNode>::default();
let mut preorder = node.preorder(); let mut preorder = node.preorder();
@ -69,7 +73,7 @@ pub(crate) fn fixup_syntax(span_map: SpanMapRef<'_>, node: &SyntaxNode) -> Synta
if can_handle_error(&node) && has_error_to_handle(&node) { if can_handle_error(&node) && has_error_to_handle(&node) {
remove.insert(node.clone().into()); remove.insert(node.clone().into());
// the node contains an error node, we have to completely replace it by something valid // the node contains an error node, we have to completely replace it by something valid
let original_tree = mbe::syntax_node_to_token_tree(&node, span_map); let original_tree = mbe::syntax_node_to_token_tree(&node, span_map, call_site);
let idx = original.len() as u32; let idx = original.len() as u32;
original.push(original_tree); original.push(original_tree);
let replacement = Leaf::Ident(Ident { let replacement = Leaf::Ident(Ident {
@ -358,6 +362,7 @@ fn reverse_fixups_(tt: &mut Subtree, undo_info: &[Subtree]) {
mod tests { mod tests {
use base_db::FileId; use base_db::FileId;
use expect_test::{expect, Expect}; use expect_test::{expect, Expect};
use syntax::TextRange;
use triomphe::Arc; use triomphe::Arc;
use crate::{ use crate::{
@ -395,12 +400,17 @@ mod tests {
fn check(ra_fixture: &str, mut expect: Expect) { fn check(ra_fixture: &str, mut expect: Expect) {
let parsed = syntax::SourceFile::parse(ra_fixture); let parsed = syntax::SourceFile::parse(ra_fixture);
let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0)))); let span_map = SpanMap::RealSpanMap(Arc::new(RealSpanMap::absolute(FileId::from_raw(0))));
let fixups = super::fixup_syntax(span_map.as_ref(), &parsed.syntax_node()); let fixups = super::fixup_syntax(
span_map.as_ref(),
&parsed.syntax_node(),
span_map.span_for_range(TextRange::empty(0.into())),
);
let mut tt = mbe::syntax_node_to_token_tree_modified( let mut tt = mbe::syntax_node_to_token_tree_modified(
&parsed.syntax_node(), &parsed.syntax_node(),
span_map.as_ref(), span_map.as_ref(),
fixups.append, fixups.append,
fixups.remove, fixups.remove,
span_map.span_for_range(TextRange::empty(0.into())),
); );
let actual = format!("{tt}\n"); let actual = format!("{tt}\n");
@ -420,8 +430,11 @@ mod tests {
// the fixed-up + reversed version should be equivalent to the original input // the fixed-up + reversed version should be equivalent to the original input
// modulo token IDs and `Punct`s' spacing. // modulo token IDs and `Punct`s' spacing.
let original_as_tt = let original_as_tt = mbe::syntax_node_to_token_tree(
mbe::syntax_node_to_token_tree(&parsed.syntax_node(), span_map.as_ref()); &parsed.syntax_node(),
span_map.as_ref(),
span_map.span_for_range(TextRange::empty(0.into())),
);
assert!( assert!(
check_subtree_eq(&tt, &original_as_tt), check_subtree_eq(&tt, &original_as_tt),
"different token tree:\n{tt:?}\n\n{original_as_tt:?}" "different token tree:\n{tt:?}\n\n{original_as_tt:?}"

View File

@ -116,7 +116,7 @@ pub(super) fn apply_mark(
return apply_mark_internal(db, ctxt, Some(call_id), transparency); return apply_mark_internal(db, ctxt, Some(call_id), transparency);
} }
let call_site_ctxt = db.lookup_intern_macro_call(call_id).call_site; let call_site_ctxt = db.lookup_intern_macro_call(call_id).call_site.ctx;
let mut call_site_ctxt = if transparency == Transparency::SemiTransparent { let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
call_site_ctxt.normalize_to_macros_2_0(db) call_site_ctxt.normalize_to_macros_2_0(db)
} else { } else {

View File

@ -116,18 +116,20 @@ pub struct MacroCallLoc {
pub krate: CrateId, pub krate: CrateId,
/// Some if this is a macro call for an eager macro. Note that this is `None` /// Some if this is a macro call for an eager macro. Note that this is `None`
/// for the eager input macro file. /// for the eager input macro file.
// FIXME: This seems bad to save in an interned structure
eager: Option<Arc<EagerCallInfo>>, eager: Option<Arc<EagerCallInfo>>,
pub kind: MacroCallKind, pub kind: MacroCallKind,
pub call_site: SyntaxContextId, pub call_site: Span,
} }
// FIXME: Might make sense to intern this? Given it's gonna be the same for a bunch of macro calls
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct MacroDefId { pub struct MacroDefId {
pub krate: CrateId, pub krate: CrateId,
pub kind: MacroDefKind, pub kind: MacroDefKind,
pub local_inner: bool, pub local_inner: bool,
pub allow_internal_unsafe: bool, pub allow_internal_unsafe: bool,
// pub def_site: SyntaxContextId, // pub def_site: Span,
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
@ -348,7 +350,7 @@ impl MacroDefId {
db: &dyn db::ExpandDatabase, db: &dyn db::ExpandDatabase,
krate: CrateId, krate: CrateId,
kind: MacroCallKind, kind: MacroCallKind,
call_site: SyntaxContextId, call_site: Span,
) -> MacroCallId { ) -> MacroCallId {
db.intern_macro_call(MacroCallLoc { def: self, krate, eager: None, kind, call_site }) db.intern_macro_call(MacroCallLoc { def: self, krate, eager: None, kind, call_site })
} }
@ -717,7 +719,7 @@ impl ExpansionInfo {
let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| { let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| {
( (
Arc::new(tt::Subtree { Arc::new(tt::Subtree {
delimiter: tt::Delimiter::DUMMY_INVISIBLE, delimiter: tt::Delimiter::invisible_spanned(loc.call_site),
token_trees: Vec::new(), token_trees: Vec::new(),
}), }),
SyntaxFixupUndoInfo::NONE, SyntaxFixupUndoInfo::NONE,

View File

@ -14,7 +14,7 @@ doctest = false
[dependencies] [dependencies]
cov-mark = "2.0.0-pre.1" cov-mark = "2.0.0-pre.1"
itertools.workspace = true itertools.workspace = true
arrayvec = "0.7.2" arrayvec.workspace = true
bitflags.workspace = true bitflags.workspace = true
smallvec.workspace = true smallvec.workspace = true
ena = "0.14.0" ena = "0.14.0"

View File

@ -14,7 +14,7 @@ doctest = false
[dependencies] [dependencies]
rustc-hash.workspace = true rustc-hash.workspace = true
either.workspace = true either.workspace = true
arrayvec = "0.7.2" arrayvec.workspace = true
itertools.workspace = true itertools.workspace = true
smallvec.workspace = true smallvec.workspace = true
triomphe.workspace = true triomphe.workspace = true

View File

@ -20,7 +20,7 @@ rustc-hash.workspace = true
once_cell = "1.17.0" once_cell = "1.17.0"
either.workspace = true either.workspace = true
itertools.workspace = true itertools.workspace = true
arrayvec = "0.7.2" arrayvec.workspace = true
indexmap.workspace = true indexmap.workspace = true
memchr = "2.6.4" memchr = "2.6.4"
triomphe.workspace = true triomphe.workspace = true

View File

@ -14,7 +14,7 @@ doctest = false
[dependencies] [dependencies]
cov-mark = "2.0.0-pre.1" cov-mark = "2.0.0-pre.1"
crossbeam-channel = "0.5.5" crossbeam-channel = "0.5.5"
arrayvec = "0.7.4" arrayvec.workspace = true
either.workspace = true either.workspace = true
itertools.workspace = true itertools.workspace = true
tracing.workspace = true tracing.workspace = true

View File

@ -41,7 +41,7 @@ fn benchmark_expand_macro_rules() {
invocations invocations
.into_iter() .into_iter()
.map(|(id, tt)| { .map(|(id, tt)| {
let res = rules[&id].expand(&tt, |_| (), true); let res = rules[&id].expand(&tt, |_| (), true, DUMMY);
assert!(res.err.is_none()); assert!(res.err.is_none());
res.value.token_trees.len() res.value.token_trees.len()
}) })
@ -67,8 +67,11 @@ fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree<DummyTestSpanData>
.filter_map(ast::MacroRules::cast) .filter_map(ast::MacroRules::cast)
.map(|rule| { .map(|rule| {
let id = rule.name().unwrap().to_string(); let id = rule.name().unwrap().to_string();
let def_tt = let def_tt = syntax_node_to_token_tree(
syntax_node_to_token_tree(rule.token_tree().unwrap().syntax(), DummyTestSpanMap); rule.token_tree().unwrap().syntax(),
DummyTestSpanMap,
DUMMY,
);
(id, def_tt) (id, def_tt)
}) })
.collect() .collect()
@ -108,7 +111,7 @@ fn invocation_fixtures(
for op in rule.lhs.iter() { for op in rule.lhs.iter() {
collect_from_op(op, &mut subtree, &mut seed); collect_from_op(op, &mut subtree, &mut seed);
} }
if it.expand(&subtree, |_| (), true).err.is_none() { if it.expand(&subtree, |_| (), true, DUMMY).err.is_none() {
res.push((name.clone(), subtree)); res.push((name.clone(), subtree));
break; break;
} }

View File

@ -17,6 +17,7 @@ pub(crate) fn expand_rules<S: Span>(
marker: impl Fn(&mut S) + Copy, marker: impl Fn(&mut S) + Copy,
is_2021: bool, is_2021: bool,
new_meta_vars: bool, new_meta_vars: bool,
call_site: S,
) -> ExpandResult<tt::Subtree<S>> { ) -> ExpandResult<tt::Subtree<S>> {
let mut match_: Option<(matcher::Match<S>, &crate::Rule<S>)> = None; let mut match_: Option<(matcher::Match<S>, &crate::Rule<S>)> = None;
for rule in rules { for rule in rules {
@ -26,8 +27,13 @@ pub(crate) fn expand_rules<S: Span>(
// If we find a rule that applies without errors, we're done. // If we find a rule that applies without errors, we're done.
// Unconditionally returning the transcription here makes the // Unconditionally returning the transcription here makes the
// `test_repeat_bad_var` test fail. // `test_repeat_bad_var` test fail.
let ExpandResult { value, err: transcribe_err } = let ExpandResult { value, err: transcribe_err } = transcriber::transcribe(
transcriber::transcribe(&rule.rhs, &new_match.bindings, marker, new_meta_vars); &rule.rhs,
&new_match.bindings,
marker,
new_meta_vars,
call_site,
);
if transcribe_err.is_none() { if transcribe_err.is_none() {
return ExpandResult::ok(value); return ExpandResult::ok(value);
} }
@ -46,11 +52,14 @@ pub(crate) fn expand_rules<S: Span>(
if let Some((match_, rule)) = match_ { if let Some((match_, rule)) = match_ {
// if we got here, there was no match without errors // if we got here, there was no match without errors
let ExpandResult { value, err: transcribe_err } = let ExpandResult { value, err: transcribe_err } =
transcriber::transcribe(&rule.rhs, &match_.bindings, marker, new_meta_vars); transcriber::transcribe(&rule.rhs, &match_.bindings, marker, new_meta_vars, call_site);
ExpandResult { value, err: match_.err.or(transcribe_err) } ExpandResult { value, err: match_.err.or(transcribe_err) }
} else { } else {
ExpandResult::new( ExpandResult::new(
tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] }, tt::Subtree {
delimiter: tt::Delimiter::invisible_spanned(call_site),
token_trees: vec![],
},
ExpandError::NoMatchingRule, ExpandError::NoMatchingRule,
) )
} }
@ -122,6 +131,7 @@ enum Binding<S> {
#[derive(Debug, Clone, PartialEq, Eq)] #[derive(Debug, Clone, PartialEq, Eq)]
enum Fragment<S> { enum Fragment<S> {
Empty,
/// token fragments are just copy-pasted into the output /// token fragments are just copy-pasted into the output
Tokens(tt::TokenTree<S>), Tokens(tt::TokenTree<S>),
/// Expr ast fragments are surrounded with `()` on insertion to preserve /// Expr ast fragments are surrounded with `()` on insertion to preserve

View File

@ -63,7 +63,7 @@ use std::rc::Rc;
use smallvec::{smallvec, SmallVec}; use smallvec::{smallvec, SmallVec};
use syntax::SmolStr; use syntax::SmolStr;
use tt::Span; use tt::{DelimSpan, Span};
use crate::{ use crate::{
expander::{Binding, Bindings, ExpandResult, Fragment}, expander::{Binding, Bindings, ExpandResult, Fragment},
@ -74,11 +74,7 @@ use crate::{
impl<S: Span> Bindings<S> { impl<S: Span> Bindings<S> {
fn push_optional(&mut self, name: &SmolStr) { fn push_optional(&mut self, name: &SmolStr) {
// FIXME: Do we have a better way to represent an empty token ? self.inner.insert(name.clone(), Binding::Fragment(Fragment::Empty));
// Insert an empty subtree for empty token
let tt =
tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] }.into();
self.inner.insert(name.clone(), Binding::Fragment(Fragment::Tokens(tt)));
} }
fn push_empty(&mut self, name: &SmolStr) { fn push_empty(&mut self, name: &SmolStr) {
@ -387,6 +383,7 @@ fn match_loop_inner<'t, S: Span>(
eof_items: &mut SmallVec<[MatchState<'t, S>; 1]>, eof_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
error_items: &mut SmallVec<[MatchState<'t, S>; 1]>, error_items: &mut SmallVec<[MatchState<'t, S>; 1]>,
is_2021: bool, is_2021: bool,
delim_span: tt::DelimSpan<S>,
) { ) {
macro_rules! try_push { macro_rules! try_push {
($items: expr, $it:expr) => { ($items: expr, $it:expr) => {
@ -474,7 +471,7 @@ fn match_loop_inner<'t, S: Span>(
cur_items.push(new_item); cur_items.push(new_item);
} }
cur_items.push(MatchState { cur_items.push(MatchState {
dot: tokens.iter_delimited(None), dot: tokens.iter_delimited(delim_span),
stack: Default::default(), stack: Default::default(),
up: Some(Box::new(item)), up: Some(Box::new(item)),
sep: separator.clone(), sep: separator.clone(),
@ -489,7 +486,7 @@ fn match_loop_inner<'t, S: Span>(
if let Ok(subtree) = src.clone().expect_subtree() { if let Ok(subtree) = src.clone().expect_subtree() {
if subtree.delimiter.kind == delimiter.kind { if subtree.delimiter.kind == delimiter.kind {
item.stack.push(item.dot); item.stack.push(item.dot);
item.dot = tokens.iter_delimited(Some(*delimiter)); item.dot = tokens.iter_delimited_with(*delimiter);
cur_items.push(item); cur_items.push(item);
} }
} }
@ -497,7 +494,7 @@ fn match_loop_inner<'t, S: Span>(
OpDelimited::Op(Op::Var { kind, name, .. }) => { OpDelimited::Op(Op::Var { kind, name, .. }) => {
if let &Some(kind) = kind { if let &Some(kind) = kind {
let mut fork = src.clone(); let mut fork = src.clone();
let match_res = match_meta_var(kind, &mut fork, is_2021); let match_res = match_meta_var(kind, &mut fork, is_2021, delim_span);
match match_res.err { match match_res.err {
None => { None => {
// Some meta variables are optional (e.g. vis) // Some meta variables are optional (e.g. vis)
@ -611,6 +608,7 @@ fn match_loop_inner<'t, S: Span>(
} }
fn match_loop<S: Span>(pattern: &MetaTemplate<S>, src: &tt::Subtree<S>, is_2021: bool) -> Match<S> { fn match_loop<S: Span>(pattern: &MetaTemplate<S>, src: &tt::Subtree<S>, is_2021: bool) -> Match<S> {
let span = src.delimiter.delim_span();
let mut src = TtIter::new(src); let mut src = TtIter::new(src);
let mut stack: SmallVec<[TtIter<'_, S>; 1]> = SmallVec::new(); let mut stack: SmallVec<[TtIter<'_, S>; 1]> = SmallVec::new();
let mut res = Match::default(); let mut res = Match::default();
@ -619,7 +617,7 @@ fn match_loop<S: Span>(pattern: &MetaTemplate<S>, src: &tt::Subtree<S>, is_2021:
let mut bindings_builder = BindingsBuilder::default(); let mut bindings_builder = BindingsBuilder::default();
let mut cur_items = smallvec![MatchState { let mut cur_items = smallvec![MatchState {
dot: pattern.iter_delimited(None), dot: pattern.iter_delimited(span),
stack: Default::default(), stack: Default::default(),
up: None, up: None,
sep: None, sep: None,
@ -650,6 +648,7 @@ fn match_loop<S: Span>(pattern: &MetaTemplate<S>, src: &tt::Subtree<S>, is_2021:
&mut eof_items, &mut eof_items,
&mut error_items, &mut error_items,
is_2021, is_2021,
span,
); );
stdx::always!(cur_items.is_empty()); stdx::always!(cur_items.is_empty());
@ -763,12 +762,13 @@ fn match_meta_var<S: Span>(
kind: MetaVarKind, kind: MetaVarKind,
input: &mut TtIter<'_, S>, input: &mut TtIter<'_, S>,
is_2021: bool, is_2021: bool,
delim_span: DelimSpan<S>,
) -> ExpandResult<Option<Fragment<S>>> { ) -> ExpandResult<Option<Fragment<S>>> {
let fragment = match kind { let fragment = match kind {
MetaVarKind::Path => { MetaVarKind::Path => {
return input return input.expect_fragment(parser::PrefixEntryPoint::Path).map(|it| {
.expect_fragment(parser::PrefixEntryPoint::Path) it.map(|it| tt::TokenTree::subtree_or_wrap(it, delim_span)).map(Fragment::Path)
.map(|it| it.map(tt::TokenTree::subtree_or_wrap).map(Fragment::Path)); });
} }
MetaVarKind::Ty => parser::PrefixEntryPoint::Ty, MetaVarKind::Ty => parser::PrefixEntryPoint::Ty,
MetaVarKind::Pat if is_2021 => parser::PrefixEntryPoint::PatTop, MetaVarKind::Pat if is_2021 => parser::PrefixEntryPoint::PatTop,
@ -797,7 +797,7 @@ fn match_meta_var<S: Span>(
return input.expect_fragment(parser::PrefixEntryPoint::Expr).map(|tt| { return input.expect_fragment(parser::PrefixEntryPoint::Expr).map(|tt| {
tt.map(|tt| match tt { tt.map(|tt| match tt {
tt::TokenTree::Leaf(leaf) => tt::Subtree { tt::TokenTree::Leaf(leaf) => tt::Subtree {
delimiter: tt::Delimiter::dummy_invisible(), delimiter: tt::Delimiter::invisible_spanned(*leaf.span()),
token_trees: vec![leaf.into()], token_trees: vec![leaf.into()],
}, },
tt::TokenTree::Subtree(mut s) => { tt::TokenTree::Subtree(mut s) => {
@ -831,7 +831,7 @@ fn match_meta_var<S: Span>(
match neg { match neg {
None => lit.into(), None => lit.into(),
Some(neg) => tt::TokenTree::Subtree(tt::Subtree { Some(neg) => tt::TokenTree::Subtree(tt::Subtree {
delimiter: tt::Delimiter::dummy_invisible(), delimiter: tt::Delimiter::invisible_spanned(*literal.span()),
token_trees: vec![neg, lit.into()], token_trees: vec![neg, lit.into()],
}), }),
} }
@ -860,11 +860,14 @@ fn collect_vars<S: Span>(collector_fun: &mut impl FnMut(SmolStr), pattern: &Meta
} }
} }
impl<S: Span> MetaTemplate<S> { impl<S: Span> MetaTemplate<S> {
fn iter_delimited(&self, delimited: Option<tt::Delimiter<S>>) -> OpDelimitedIter<'_, S> { fn iter_delimited_with(&self, delimiter: tt::Delimiter<S>) -> OpDelimitedIter<'_, S> {
OpDelimitedIter { inner: &self.0, idx: 0, delimited: delimiter }
}
fn iter_delimited(&self, span: tt::DelimSpan<S>) -> OpDelimitedIter<'_, S> {
OpDelimitedIter { OpDelimitedIter {
inner: &self.0, inner: &self.0,
idx: 0, idx: 0,
delimited: delimited.unwrap_or(tt::Delimiter::DUMMY_INVISIBLE), delimited: tt::Delimiter::invisible_delim_spanned(span),
} }
} }
} }
@ -960,11 +963,13 @@ impl<S: Span> TtIter<'_, S> {
self.expect_lifetime() self.expect_lifetime()
} else { } else {
let puncts = self.expect_glued_punct()?; let puncts = self.expect_glued_punct()?;
let delimiter = tt::Delimiter {
open: puncts.first().unwrap().span,
close: puncts.last().unwrap().span,
kind: tt::DelimiterKind::Invisible,
};
let token_trees = puncts.into_iter().map(|p| tt::Leaf::Punct(p).into()).collect(); let token_trees = puncts.into_iter().map(|p| tt::Leaf::Punct(p).into()).collect();
Ok(tt::TokenTree::Subtree(tt::Subtree { Ok(tt::TokenTree::Subtree(tt::Subtree { delimiter, token_trees }))
delimiter: tt::Delimiter::dummy_invisible(),
token_trees,
}))
} }
} else { } else {
self.next().ok_or(()).cloned() self.next().ok_or(()).cloned()
@ -979,7 +984,11 @@ impl<S: Span> TtIter<'_, S> {
let ident = self.expect_ident_or_underscore()?; let ident = self.expect_ident_or_underscore()?;
Ok(tt::Subtree { Ok(tt::Subtree {
delimiter: tt::Delimiter::dummy_invisible(), delimiter: tt::Delimiter {
open: punct.span,
close: ident.span,
kind: tt::DelimiterKind::Invisible,
},
token_trees: vec![ token_trees: vec![
tt::Leaf::Punct(*punct).into(), tt::Leaf::Punct(*punct).into(),
tt::Leaf::Ident(ident.clone()).into(), tt::Leaf::Ident(ident.clone()).into(),

View File

@ -59,12 +59,12 @@ impl<S: Span> Bindings<S> {
token_trees: token_trees.clone(), token_trees: token_trees.clone(),
}; };
Ok(match f { Ok(match f {
Fragment::Tokens(_) => unreachable!(), Fragment::Tokens(_) | Fragment::Empty => unreachable!(),
Fragment::Expr(_) => Fragment::Expr, Fragment::Expr(_) => Fragment::Expr,
Fragment::Path(_) => Fragment::Path, Fragment::Path(_) => Fragment::Path,
}(subtree)) }(subtree))
} }
Binding::Fragment(it @ Fragment::Tokens(_)) => Ok(it.clone()), Binding::Fragment(it @ (Fragment::Tokens(_) | Fragment::Empty)) => Ok(it.clone()),
// emit some reasonable default expansion for missing bindings, // emit some reasonable default expansion for missing bindings,
// this gives better recovery than emitting the `$fragment-name` verbatim // this gives better recovery than emitting the `$fragment-name` verbatim
Binding::Missing(it) => Ok({ Binding::Missing(it) => Ok({
@ -87,10 +87,7 @@ impl<S: Span> Bindings<S> {
})), })),
// FIXME: Meta and Item should get proper defaults // FIXME: Meta and Item should get proper defaults
MetaVarKind::Meta | MetaVarKind::Item | MetaVarKind::Tt | MetaVarKind::Vis => { MetaVarKind::Meta | MetaVarKind::Item | MetaVarKind::Tt | MetaVarKind::Vis => {
Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree { Fragment::Empty
delimiter: tt::Delimiter::DUMMY_INVISIBLE,
token_trees: vec![],
}))
} }
MetaVarKind::Path MetaVarKind::Path
| MetaVarKind::Ty | MetaVarKind::Ty
@ -132,8 +129,9 @@ pub(super) fn transcribe<S: Span>(
bindings: &Bindings<S>, bindings: &Bindings<S>,
marker: impl Fn(&mut S) + Copy, marker: impl Fn(&mut S) + Copy,
new_meta_vars: bool, new_meta_vars: bool,
call_site: S,
) -> ExpandResult<tt::Subtree<S>> { ) -> ExpandResult<tt::Subtree<S>> {
let mut ctx = ExpandCtx { bindings, nesting: Vec::new(), new_meta_vars }; let mut ctx = ExpandCtx { bindings, nesting: Vec::new(), new_meta_vars, call_site };
let mut arena: Vec<tt::TokenTree<S>> = Vec::new(); let mut arena: Vec<tt::TokenTree<S>> = Vec::new();
expand_subtree(&mut ctx, template, None, &mut arena, marker) expand_subtree(&mut ctx, template, None, &mut arena, marker)
} }
@ -154,6 +152,7 @@ struct ExpandCtx<'a, S> {
bindings: &'a Bindings<S>, bindings: &'a Bindings<S>,
nesting: Vec<NestingState>, nesting: Vec<NestingState>,
new_meta_vars: bool, new_meta_vars: bool,
call_site: S,
} }
fn expand_subtree<S: Span>( fn expand_subtree<S: Span>(
@ -208,13 +207,13 @@ fn expand_subtree<S: Span>(
Op::Var { name, id, .. } => { Op::Var { name, id, .. } => {
let ExpandResult { value: fragment, err: e } = expand_var(ctx, name, *id, marker); let ExpandResult { value: fragment, err: e } = expand_var(ctx, name, *id, marker);
err = err.or(e); err = err.or(e);
push_fragment(arena, fragment); push_fragment(ctx, arena, fragment);
} }
Op::Repeat { tokens: subtree, kind, separator } => { Op::Repeat { tokens: subtree, kind, separator } => {
let ExpandResult { value: fragment, err: e } = let ExpandResult { value: fragment, err: e } =
expand_repeat(ctx, subtree, *kind, separator, arena, marker); expand_repeat(ctx, subtree, *kind, separator, arena, marker);
err = err.or(e); err = err.or(e);
push_fragment(arena, fragment) push_fragment(ctx, arena, fragment)
} }
Op::Ignore { name, id } => { Op::Ignore { name, id } => {
// Expand the variable, but ignore the result. This registers the repetition count. // Expand the variable, but ignore the result. This registers the repetition count.
@ -227,9 +226,7 @@ fn expand_subtree<S: Span>(
arena.push( arena.push(
tt::Leaf::Literal(tt::Literal { tt::Leaf::Literal(tt::Literal {
text: index.to_string().into(), text: index.to_string().into(),
// FIXME span: ctx.call_site,
#[allow(deprecated)]
span: S::DUMMY,
}) })
.into(), .into(),
); );
@ -242,9 +239,7 @@ fn expand_subtree<S: Span>(
arena.push( arena.push(
tt::Leaf::Literal(tt::Literal { tt::Leaf::Literal(tt::Literal {
text: length.to_string().into(), text: length.to_string().into(),
// FIXME span: ctx.call_site,
#[allow(deprecated)]
span: S::DUMMY,
}) })
.into(), .into(),
); );
@ -309,9 +304,7 @@ fn expand_subtree<S: Span>(
arena.push( arena.push(
tt::Leaf::Literal(tt::Literal { tt::Leaf::Literal(tt::Literal {
text: c.to_string().into(), text: c.to_string().into(),
// FIXME span: ctx.call_site,
#[allow(deprecated)]
span: S::DUMMY,
}) })
.into(), .into(),
); );
@ -322,7 +315,7 @@ fn expand_subtree<S: Span>(
let tts = arena.drain(start_elements..).collect(); let tts = arena.drain(start_elements..).collect();
ExpandResult { ExpandResult {
value: tt::Subtree { value: tt::Subtree {
delimiter: delimiter.unwrap_or_else(tt::Delimiter::dummy_invisible), delimiter: delimiter.unwrap_or_else(|| tt::Delimiter::invisible_spanned(ctx.call_site)),
token_trees: tts, token_trees: tts,
}, },
err, err,
@ -355,7 +348,7 @@ fn expand_var<S: Span>(
// ``` // ```
// We just treat it a normal tokens // We just treat it a normal tokens
let tt = tt::Subtree { let tt = tt::Subtree {
delimiter: tt::Delimiter::DUMMY_INVISIBLE, delimiter: tt::Delimiter::invisible_spanned(id),
token_trees: vec![ token_trees: vec![
tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id }) tt::Leaf::from(tt::Punct { char: '$', spacing: tt::Spacing::Alone, span: id })
.into(), .into(),
@ -367,12 +360,8 @@ fn expand_var<S: Span>(
} }
Err(e) => ExpandResult { Err(e) => ExpandResult {
value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree::empty(tt::DelimSpan { value: Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree::empty(tt::DelimSpan {
// FIXME open: ctx.call_site,
#[allow(deprecated)] close: ctx.call_site,
open: S::DUMMY,
// FIXME
#[allow(deprecated)]
close: S::DUMMY,
}))), }))),
err: Some(e), err: Some(e),
}, },
@ -416,7 +405,7 @@ fn expand_repeat<S: Span>(
return ExpandResult { return ExpandResult {
value: Fragment::Tokens( value: Fragment::Tokens(
tt::Subtree { tt::Subtree {
delimiter: tt::Delimiter::dummy_invisible(), delimiter: tt::Delimiter::invisible_spanned(ctx.call_site),
token_trees: vec![], token_trees: vec![],
} }
.into(), .into(),
@ -430,7 +419,7 @@ fn expand_repeat<S: Span>(
continue; continue;
} }
t.delimiter = tt::Delimiter::DUMMY_INVISIBLE; t.delimiter.kind = tt::DelimiterKind::Invisible;
push_subtree(&mut buf, t); push_subtree(&mut buf, t);
if let Some(sep) = separator { if let Some(sep) = separator {
@ -464,7 +453,11 @@ fn expand_repeat<S: Span>(
// Check if it is a single token subtree without any delimiter // Check if it is a single token subtree without any delimiter
// e.g {Delimiter:None> ['>'] /Delimiter:None>} // e.g {Delimiter:None> ['>'] /Delimiter:None>}
let tt = tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: buf }.into(); let tt = tt::Subtree {
delimiter: tt::Delimiter::invisible_spanned(ctx.call_site),
token_trees: buf,
}
.into();
if RepeatKind::OneOrMore == kind && counter == 0 { if RepeatKind::OneOrMore == kind && counter == 0 {
return ExpandResult { return ExpandResult {
@ -475,14 +468,19 @@ fn expand_repeat<S: Span>(
ExpandResult { value: Fragment::Tokens(tt), err } ExpandResult { value: Fragment::Tokens(tt), err }
} }
fn push_fragment<S: Span>(buf: &mut Vec<tt::TokenTree<S>>, fragment: Fragment<S>) { fn push_fragment<S: Span>(
ctx: &ExpandCtx<'_, S>,
buf: &mut Vec<tt::TokenTree<S>>,
fragment: Fragment<S>,
) {
match fragment { match fragment {
Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt), Fragment::Tokens(tt::TokenTree::Subtree(tt)) => push_subtree(buf, tt),
Fragment::Expr(sub) => { Fragment::Expr(sub) => {
push_subtree(buf, sub); push_subtree(buf, sub);
} }
Fragment::Path(tt) => fix_up_and_push_path_tt(buf, tt), Fragment::Path(tt) => fix_up_and_push_path_tt(ctx, buf, tt),
Fragment::Tokens(tt) => buf.push(tt), Fragment::Tokens(tt) => buf.push(tt),
Fragment::Empty => (),
} }
} }
@ -496,7 +494,11 @@ fn push_subtree<S>(buf: &mut Vec<tt::TokenTree<S>>, tt: tt::Subtree<S>) {
/// Inserts the path separator `::` between an identifier and its following generic /// Inserts the path separator `::` between an identifier and its following generic
/// argument list, and then pushes into the buffer. See [`Fragment::Path`] for why /// argument list, and then pushes into the buffer. See [`Fragment::Path`] for why
/// we need this fixup. /// we need this fixup.
fn fix_up_and_push_path_tt<S: Span>(buf: &mut Vec<tt::TokenTree<S>>, subtree: tt::Subtree<S>) { fn fix_up_and_push_path_tt<S: Span>(
ctx: &ExpandCtx<'_, S>,
buf: &mut Vec<tt::TokenTree<S>>,
subtree: tt::Subtree<S>,
) {
stdx::always!(matches!(subtree.delimiter.kind, tt::DelimiterKind::Invisible)); stdx::always!(matches!(subtree.delimiter.kind, tt::DelimiterKind::Invisible));
let mut prev_was_ident = false; let mut prev_was_ident = false;
// Note that we only need to fix up the top-level `TokenTree`s because the // Note that we only need to fix up the top-level `TokenTree`s because the
@ -513,9 +515,7 @@ fn fix_up_and_push_path_tt<S: Span>(buf: &mut Vec<tt::TokenTree<S>>, subtree: tt
tt::Leaf::Punct(tt::Punct { tt::Leaf::Punct(tt::Punct {
char: ':', char: ':',
spacing: tt::Spacing::Joint, spacing: tt::Spacing::Joint,
// FIXME span: ctx.call_site,
#[allow(deprecated)]
span: S::DUMMY,
}) })
.into(), .into(),
); );
@ -523,9 +523,7 @@ fn fix_up_and_push_path_tt<S: Span>(buf: &mut Vec<tt::TokenTree<S>>, subtree: tt
tt::Leaf::Punct(tt::Punct { tt::Leaf::Punct(tt::Punct {
char: ':', char: ':',
spacing: tt::Spacing::Alone, spacing: tt::Spacing::Alone,
// FIXME span: ctx.call_site,
#[allow(deprecated)]
span: S::DUMMY,
}) })
.into(), .into(),
); );

View File

@ -252,8 +252,9 @@ impl<S: Span> DeclarativeMacro<S> {
tt: &tt::Subtree<S>, tt: &tt::Subtree<S>,
marker: impl Fn(&mut S) + Copy, marker: impl Fn(&mut S) + Copy,
new_meta_vars: bool, new_meta_vars: bool,
call_site: S,
) -> ExpandResult<tt::Subtree<S>> { ) -> ExpandResult<tt::Subtree<S>> {
expander::expand_rules(&self.rules, &tt, marker, self.is_2021, new_meta_vars) expander::expand_rules(&self.rules, &tt, marker, self.is_2021, new_meta_vars, call_site)
} }
} }

View File

@ -11,7 +11,7 @@ use syntax::{
}; };
use tt::{ use tt::{
buffer::{Cursor, TokenBuffer}, buffer::{Cursor, TokenBuffer},
Span, SyntaxContext, Span,
}; };
use crate::{to_parser_input::to_parser_input, tt_iter::TtIter}; use crate::{to_parser_input::to_parser_input, tt_iter::TtIter};
@ -37,7 +37,6 @@ impl<S: Span, SM: SpanMapper<S>> SpanMapper<S> for &SM {
/// Dummy things for testing where spans don't matter. /// Dummy things for testing where spans don't matter.
pub(crate) mod dummy_test_span_utils { pub(crate) mod dummy_test_span_utils {
use tt::SyntaxContext;
use super::*; use super::*;
@ -53,9 +52,6 @@ pub(crate) mod dummy_test_span_utils {
#[derive(Debug, Copy, Clone, PartialEq, Eq)] #[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub struct DummyTestSyntaxContext; pub struct DummyTestSyntaxContext;
impl SyntaxContext for DummyTestSyntaxContext {
const DUMMY: Self = DummyTestSyntaxContext;
}
pub struct DummyTestSpanMap; pub struct DummyTestSpanMap;
@ -78,13 +74,14 @@ pub(crate) mod dummy_test_span_utils {
pub fn syntax_node_to_token_tree<Ctx, SpanMap>( pub fn syntax_node_to_token_tree<Ctx, SpanMap>(
node: &SyntaxNode, node: &SyntaxNode,
map: SpanMap, map: SpanMap,
span: SpanData<Ctx>,
) -> tt::Subtree<SpanData<Ctx>> ) -> tt::Subtree<SpanData<Ctx>>
where where
SpanData<Ctx>: Span, SpanData<Ctx>: Span,
Ctx: SyntaxContext, Ctx: Copy,
SpanMap: SpanMapper<SpanData<Ctx>>, SpanMap: SpanMapper<SpanData<Ctx>>,
{ {
let mut c = Converter::new(node, map, Default::default(), Default::default()); let mut c = Converter::new(node, map, Default::default(), Default::default(), span);
convert_tokens(&mut c) convert_tokens(&mut c)
} }
@ -96,13 +93,14 @@ pub fn syntax_node_to_token_tree_modified<Ctx, SpanMap>(
map: SpanMap, map: SpanMap,
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<SpanData<Ctx>>>>, append: FxHashMap<SyntaxElement, Vec<tt::Leaf<SpanData<Ctx>>>>,
remove: FxHashSet<SyntaxNode>, remove: FxHashSet<SyntaxNode>,
call_site: SpanData<Ctx>,
) -> tt::Subtree<SpanData<Ctx>> ) -> tt::Subtree<SpanData<Ctx>>
where where
SpanMap: SpanMapper<SpanData<Ctx>>, SpanMap: SpanMapper<SpanData<Ctx>>,
SpanData<Ctx>: Span, SpanData<Ctx>: Span,
Ctx: SyntaxContext, Ctx: Copy,
{ {
let mut c = Converter::new(node, map, append, remove); let mut c = Converter::new(node, map, append, remove, call_site);
convert_tokens(&mut c) convert_tokens(&mut c)
} }
@ -126,7 +124,7 @@ pub fn token_tree_to_syntax_node<Ctx>(
) -> (Parse<SyntaxNode>, SpanMap<SpanData<Ctx>>) ) -> (Parse<SyntaxNode>, SpanMap<SpanData<Ctx>>)
where where
SpanData<Ctx>: Span, SpanData<Ctx>: Span,
Ctx: SyntaxContext, Ctx: Copy,
{ {
let buffer = match tt { let buffer = match tt {
tt::Subtree { tt::Subtree {
@ -163,7 +161,7 @@ pub fn parse_to_token_tree<Ctx>(
) -> Option<tt::Subtree<SpanData<Ctx>>> ) -> Option<tt::Subtree<SpanData<Ctx>>>
where where
SpanData<Ctx>: Span, SpanData<Ctx>: Span,
Ctx: SyntaxContext, Ctx: Copy,
{ {
let lexed = parser::LexedStr::new(text); let lexed = parser::LexedStr::new(text);
if lexed.errors().next().is_some() { if lexed.errors().next().is_some() {
@ -187,7 +185,11 @@ where
} }
/// Split token tree with separate expr: $($e:expr)SEP* /// Split token tree with separate expr: $($e:expr)SEP*
pub fn parse_exprs_with_sep<S: Span>(tt: &tt::Subtree<S>, sep: char) -> Vec<tt::Subtree<S>> { pub fn parse_exprs_with_sep<S: Span>(
tt: &tt::Subtree<S>,
sep: char,
span: S,
) -> Vec<tt::Subtree<S>> {
if tt.token_trees.is_empty() { if tt.token_trees.is_empty() {
return Vec::new(); return Vec::new();
} }
@ -200,7 +202,7 @@ pub fn parse_exprs_with_sep<S: Span>(tt: &tt::Subtree<S>, sep: char) -> Vec<tt::
res.push(match expanded.value { res.push(match expanded.value {
None => break, None => break,
Some(tt) => tt.subtree_or_wrap(), Some(tt) => tt.subtree_or_wrap(tt::DelimSpan { open: span, close: span }),
}); });
let mut fork = iter.clone(); let mut fork = iter.clone();
@ -212,7 +214,7 @@ pub fn parse_exprs_with_sep<S: Span>(tt: &tt::Subtree<S>, sep: char) -> Vec<tt::
if iter.peek_n(0).is_some() { if iter.peek_n(0).is_some() {
res.push(tt::Subtree { res.push(tt::Subtree {
delimiter: tt::Delimiter::DUMMY_INVISIBLE, delimiter: tt::Delimiter::invisible_spanned(span),
token_trees: iter.cloned().collect(), token_trees: iter.cloned().collect(),
}); });
} }
@ -225,7 +227,10 @@ where
C: TokenConverter<S>, C: TokenConverter<S>,
S: Span, S: Span,
{ {
let entry = tt::Subtree { delimiter: tt::Delimiter::DUMMY_INVISIBLE, token_trees: vec![] }; let entry = tt::Subtree {
delimiter: tt::Delimiter::invisible_spanned(conv.call_site()),
token_trees: vec![],
};
let mut stack = NonEmptyVec::new(entry); let mut stack = NonEmptyVec::new(entry);
while let Some((token, abs_range)) = conv.bump() { while let Some((token, abs_range)) = conv.bump() {
@ -501,6 +506,8 @@ trait TokenConverter<S>: Sized {
fn peek(&self) -> Option<Self::Token>; fn peek(&self) -> Option<Self::Token>;
fn span_for(&self, range: TextRange) -> S; fn span_for(&self, range: TextRange) -> S;
fn call_site(&self) -> S;
} }
impl<S, Ctx> SrcToken<RawConverter<'_, Ctx>, S> for usize { impl<S, Ctx> SrcToken<RawConverter<'_, Ctx>, S> for usize {
@ -531,7 +538,7 @@ impl<S: Span> SrcToken<StaticRawConverter<'_, S>, S> for usize {
} }
} }
impl<Ctx: SyntaxContext> TokenConverter<SpanData<Ctx>> for RawConverter<'_, Ctx> impl<Ctx: Copy> TokenConverter<SpanData<Ctx>> for RawConverter<'_, Ctx>
where where
SpanData<Ctx>: Span, SpanData<Ctx>: Span,
{ {
@ -568,6 +575,10 @@ where
fn span_for(&self, range: TextRange) -> SpanData<Ctx> { fn span_for(&self, range: TextRange) -> SpanData<Ctx> {
SpanData { range, anchor: self.anchor, ctx: self.ctx } SpanData { range, anchor: self.anchor, ctx: self.ctx }
} }
fn call_site(&self) -> SpanData<Ctx> {
SpanData { range: TextRange::empty(0.into()), anchor: self.anchor, ctx: self.ctx }
}
} }
impl<S> TokenConverter<S> for StaticRawConverter<'_, S> impl<S> TokenConverter<S> for StaticRawConverter<'_, S>
@ -603,6 +614,10 @@ where
fn span_for(&self, _: TextRange) -> S { fn span_for(&self, _: TextRange) -> S {
self.span self.span
} }
fn call_site(&self) -> S {
self.span
}
} }
struct Converter<SpanMap, S> { struct Converter<SpanMap, S> {
@ -615,6 +630,7 @@ struct Converter<SpanMap, S> {
map: SpanMap, map: SpanMap,
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>, append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
remove: FxHashSet<SyntaxNode>, remove: FxHashSet<SyntaxNode>,
call_site: S,
} }
impl<SpanMap, S> Converter<SpanMap, S> { impl<SpanMap, S> Converter<SpanMap, S> {
@ -623,6 +639,7 @@ impl<SpanMap, S> Converter<SpanMap, S> {
map: SpanMap, map: SpanMap,
append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>, append: FxHashMap<SyntaxElement, Vec<tt::Leaf<S>>>,
remove: FxHashSet<SyntaxNode>, remove: FxHashSet<SyntaxNode>,
call_site: S,
) -> Self { ) -> Self {
let mut this = Converter { let mut this = Converter {
current: None, current: None,
@ -632,6 +649,7 @@ impl<SpanMap, S> Converter<SpanMap, S> {
map, map,
append, append,
remove, remove,
call_site,
current_leafs: vec![], current_leafs: vec![],
}; };
let first = this.next_token(); let first = this.next_token();
@ -791,6 +809,9 @@ where
fn span_for(&self, range: TextRange) -> S { fn span_for(&self, range: TextRange) -> S {
self.map.span_for(range) self.map.span_for(range)
} }
fn call_site(&self) -> S {
self.call_site
}
} }
struct TtTreeSink<'a, Ctx> struct TtTreeSink<'a, Ctx>

View File

@ -7,11 +7,11 @@ use tt::{
Leaf, Punct, Spacing, Leaf, Punct, Spacing,
}; };
use crate::{syntax_node_to_token_tree, DummyTestSpanData, DummyTestSpanMap}; use crate::{syntax_node_to_token_tree, DummyTestSpanData, DummyTestSpanMap, DUMMY};
fn check_punct_spacing(fixture: &str) { fn check_punct_spacing(fixture: &str) {
let source_file = ast::SourceFile::parse(fixture).ok().unwrap(); let source_file = ast::SourceFile::parse(fixture).ok().unwrap();
let subtree = syntax_node_to_token_tree(source_file.syntax(), DummyTestSpanMap); let subtree = syntax_node_to_token_tree(source_file.syntax(), DummyTestSpanMap, DUMMY);
let mut annotations: HashMap<_, _> = extract_annotations(fixture) let mut annotations: HashMap<_, _> = extract_annotations(fixture)
.into_iter() .into_iter()
.map(|(range, annotation)| { .map(|(range, annotation)| {

View File

@ -176,10 +176,10 @@ impl<'a, S: Span> TtIter<'a, S> {
} }
self.inner = self.inner.as_slice()[res.len()..].iter(); self.inner = self.inner.as_slice()[res.len()..].iter();
let res = match res.len() { let res = match &*res {
0 | 1 => res.pop(), [] | [_] => res.pop(),
_ => Some(tt::TokenTree::Subtree(tt::Subtree { [first, ..] => Some(tt::TokenTree::Subtree(tt::Subtree {
delimiter: tt::Delimiter::DUMMY_INVISIBLE, delimiter: tt::Delimiter::invisible_spanned(first.first_span()),
token_trees: res, token_trees: res,
})), })),
}; };

View File

@ -54,9 +54,7 @@ impl std::fmt::Debug for TokenId {
} }
} }
impl tt::Span for TokenId { impl tt::Span for TokenId {}
const DUMMY: Self = TokenId(!0);
}
#[derive(Serialize, Deserialize, Debug)] #[derive(Serialize, Deserialize, Debug)]
pub struct FlatTree { pub struct FlatTree {

View File

@ -209,7 +209,7 @@ mod tests {
use super::*; use super::*;
use cfg::CfgExpr; use cfg::CfgExpr;
use mbe::{syntax_node_to_token_tree, DummyTestSpanMap}; use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY};
use syntax::{ use syntax::{
ast::{self, AstNode}, ast::{self, AstNode},
SmolStr, SmolStr,
@ -219,7 +219,7 @@ mod tests {
let cfg_expr = { let cfg_expr = {
let source_file = ast::SourceFile::parse(cfg).ok().unwrap(); let source_file = ast::SourceFile::parse(cfg).ok().unwrap();
let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap(); let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
let tt = syntax_node_to_token_tree(tt.syntax(), &DummyTestSpanMap); let tt = syntax_node_to_token_tree(tt.syntax(), &DummyTestSpanMap, DUMMY);
CfgExpr::parse(&tt) CfgExpr::parse(&tt)
}; };

View File

@ -11,34 +11,10 @@ use stdx::impl_from;
pub use smol_str::SmolStr; pub use smol_str::SmolStr;
pub use text_size::{TextRange, TextSize}; pub use text_size::{TextRange, TextSize};
pub trait Span: std::fmt::Debug + Copy + Sized + Eq { pub trait Span: std::fmt::Debug + Copy + Sized + Eq {}
// FIXME: Should not exist. Dummy spans will always be wrong if they leak somewhere. Instead,
// the call site or def site spans should be used in relevant places, its just that we don't
// expose those everywhere in the yet.
#[deprecated = "dummy spans will panic if surfaced incorrectly, as such they should be replaced appropriately"]
const DUMMY: Self;
}
pub trait SyntaxContext: std::fmt::Debug + Copy + Sized + Eq { impl<Ctx> Span for span::SpanData<Ctx> where span::SpanData<Ctx>: std::fmt::Debug + Copy + Sized + Eq
#[deprecated = "dummy spans will panic if surfaced incorrectly, as such they should be replaced appropriately"] {}
const DUMMY: Self;
}
impl<Ctx: SyntaxContext> Span for span::SpanData<Ctx> {
#[allow(deprecated)]
const DUMMY: Self = span::SpanData {
range: TextRange::empty(TextSize::new(0)),
anchor: span::SpanAnchor {
file_id: span::FileId::BOGUS,
ast_id: span::ROOT_ERASED_FILE_AST_ID,
},
ctx: Ctx::DUMMY,
};
}
impl SyntaxContext for span::SyntaxContextId {
const DUMMY: Self = Self::ROOT;
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum TokenTree<S> { pub enum TokenTree<S> {
@ -54,15 +30,7 @@ impl<S: Span> TokenTree<S> {
}) })
} }
pub fn subtree_or_wrap(self) -> Subtree<S> { pub fn subtree_or_wrap(self, span: DelimSpan<S>) -> Subtree<S> {
match self {
TokenTree::Leaf(_) => {
Subtree { delimiter: Delimiter::DUMMY_INVISIBLE, token_trees: vec![self] }
}
TokenTree::Subtree(s) => s,
}
}
pub fn subtree_or_wrap2(self, span: DelimSpan<S>) -> Subtree<S> {
match self { match self {
TokenTree::Leaf(_) => Subtree { TokenTree::Leaf(_) => Subtree {
delimiter: Delimiter::invisible_delim_spanned(span), delimiter: Delimiter::invisible_delim_spanned(span),
@ -71,6 +39,13 @@ impl<S: Span> TokenTree<S> {
TokenTree::Subtree(s) => s, TokenTree::Subtree(s) => s,
} }
} }
pub fn first_span(&self) -> S {
match self {
TokenTree::Leaf(l) => *l.span(),
TokenTree::Subtree(s) => s.delimiter.open,
}
}
} }
#[derive(Debug, Clone, PartialEq, Eq, Hash)] #[derive(Debug, Clone, PartialEq, Eq, Hash)]
@ -122,12 +97,6 @@ pub struct DelimSpan<S> {
pub close: S, pub close: S,
} }
impl<S: Span> DelimSpan<S> {
// FIXME should not exist
#[allow(deprecated)]
pub const DUMMY: Self = Self { open: S::DUMMY, close: S::DUMMY };
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct Delimiter<S> { pub struct Delimiter<S> {
pub open: S, pub open: S,
@ -136,16 +105,6 @@ pub struct Delimiter<S> {
} }
impl<S: Span> Delimiter<S> { impl<S: Span> Delimiter<S> {
// FIXME should not exist
#[allow(deprecated)]
pub const DUMMY_INVISIBLE: Self =
Self { open: S::DUMMY, close: S::DUMMY, kind: DelimiterKind::Invisible };
// FIXME should not exist
pub const fn dummy_invisible() -> Self {
Self::DUMMY_INVISIBLE
}
pub const fn invisible_spanned(span: S) -> Self { pub const fn invisible_spanned(span: S) -> Self {
Delimiter { open: span, close: span, kind: DelimiterKind::Invisible } Delimiter { open: span, close: span, kind: DelimiterKind::Invisible }
} }