diff --git a/Cargo.lock b/Cargo.lock index 775231f3ea2..c71ca2f2126 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1255,6 +1255,9 @@ checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116" name = "proc-macro-api" version = "0.0.0" dependencies = [ + "base-db", + "indexmap", + "la-arena 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", "memmap2", "object 0.32.0", "paths", diff --git a/crates/base-db/src/fixture.rs b/crates/base-db/src/fixture.rs index 7236b56f6d4..cfba01a0329 100644 --- a/crates/base-db/src/fixture.rs +++ b/crates/base-db/src/fixture.rs @@ -543,6 +543,9 @@ fn expand( subtree: &Subtree, _: Option<&Subtree>, _: &Env, + _: SpanData, + _: SpanData, + _: SpanData, ) -> Result, ProcMacroExpansionError> { Ok(subtree.clone()) } @@ -557,6 +560,9 @@ fn expand( _: &Subtree, attrs: Option<&Subtree>, _: &Env, + _: SpanData, + _: SpanData, + _: SpanData, ) -> Result, ProcMacroExpansionError> { attrs .cloned() @@ -572,6 +578,9 @@ fn expand( input: &Subtree, _: Option<&Subtree>, _: &Env, + _: SpanData, + _: SpanData, + _: SpanData, ) -> Result, ProcMacroExpansionError> { fn traverse(input: &Subtree) -> Subtree { let mut token_trees = vec![]; @@ -599,6 +608,9 @@ fn expand( input: &Subtree, _: Option<&Subtree>, _: &Env, + _: SpanData, + _: SpanData, + _: SpanData, ) -> Result, ProcMacroExpansionError> { return Ok(traverse(input)); diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index 0b04a91f626..12b449932dd 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -262,6 +262,9 @@ fn expand( subtree: &tt::Subtree, attrs: Option<&tt::Subtree>, env: &Env, + def_site: SpanData, + call_site: SpanData, + mixed_site: SpanData, ) -> Result, ProcMacroExpansionError>; } diff --git a/crates/base-db/src/span.rs b/crates/base-db/src/span.rs index 607b8027ca9..d2b40ecdd21 100644 --- a/crates/base-db/src/span.rs +++ b/crates/base-db/src/span.rs @@ -38,6 +38,8 @@ impl SyntaxContextId { // currently (which kind of makes sense but we need it here!) pub const SELF_REF: Self = SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 1) }); + /// Used syntax fixups + pub const FAKE: Self = SyntaxContextId(unsafe { InternId::new_unchecked(InternId::MAX - 2) }); pub fn is_root(self) -> bool { self == Self::ROOT diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs index 355b82a5f42..bcbf4047caa 100644 --- a/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -16,7 +16,7 @@ use std::{iter, ops::Range, sync}; -use base_db::{fixture::WithFixture, ProcMacro, SourceDatabase}; +use base_db::{fixture::WithFixture, span::SpanData, ProcMacro, SourceDatabase}; use expect_test::Expect; use hir_expand::{db::ExpandDatabase, span::SpanMapRef, HirFileIdExt, InFile, MacroFileId}; use stdx::format_to; @@ -307,6 +307,9 @@ fn expand( subtree: &Subtree, _: Option<&Subtree>, _: &base_db::Env, + _: SpanData, + _: SpanData, + _: SpanData, ) -> Result { let (parse, _) = ::mbe::token_tree_to_syntax_node(subtree, ::mbe::TopEntryPoint::MacroItems); diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 601a754abbd..1d55aaf1703 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -233,7 +233,17 @@ pub fn expand_speculative( let speculative_expansion = match loc.def.kind { MacroDefKind::ProcMacro(expander, ..) => { tt.delimiter = tt::Delimiter::UNSPECIFIED; - expander.expand(db, loc.def.krate, loc.krate, &tt, attr_arg.as_ref()) + let call_site = loc.span(db); + expander.expand( + db, + loc.def.krate, + loc.krate, + &tt, + attr_arg.as_ref(), + call_site, + call_site, + call_site, + ) } MacroDefKind::BuiltInAttr(BuiltinAttrExpander::Derive, _) => { pseudo_derive_attr_expansion(&tt, attr_arg.as_ref()?) @@ -398,17 +408,23 @@ fn macro_arg( MacroCallKind::Attr { ast_id, .. } => ast_id.to_ptr(db).to_node(&root).syntax().clone(), }; let censor = censor_for_macro_input(&loc, &syntax); - // let mut fixups = fixup::fixup_syntax(&node); - // fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new()))); - // let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications( - // &node, - // fixups.token_map, - // fixups.next_id, - // fixups.replace, - // fixups.append, - // ); - - let mut tt = mbe::syntax_node_to_token_tree_censored(&syntax, map.as_ref(), censor); + let mut tt = match loc.kind { + MacroCallKind::FnLike { .. } => { + mbe::syntax_node_to_token_tree_censored(&syntax, map.as_ref(), censor) + } + MacroCallKind::Derive { .. } | MacroCallKind::Attr { .. } => { + // let mut fixups = crate::fixup::fixup_syntax(&syntax); + // fixups.replace.extend(censor.into_iter().map(|node| (node.into(), Vec::new()))); + // let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications( + // &node, + // fixups.token_map, + // fixups.next_id, + // fixups.replace, + // fixups.append, + // ); + mbe::syntax_node_to_token_tree_censored(&syntax, map.as_ref(), censor) + } + }; if loc.def.is_proc_macro() { // proc macros expect their inputs without parentheses, MBEs expect it with them included @@ -658,8 +674,19 @@ fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult None, }; - let ExpandResult { value: tt, err } = - expander.expand(db, loc.def.krate, loc.krate, ¯o_arg, attr_arg); + let call_site = loc.span(db); + let ExpandResult { value: tt, err } = expander.expand( + db, + loc.def.krate, + loc.krate, + ¯o_arg, + attr_arg, + // FIXME + call_site, + call_site, + // FIXME + call_site, + ); // Set a hard limit for the expanded tt if let Err(value) = check_tt_count(&tt) { diff --git a/crates/hir-expand/src/fixup.rs b/crates/hir-expand/src/fixup.rs index e6e8d8c0299..326ff1dec48 100644 --- a/crates/hir-expand/src/fixup.rs +++ b/crates/hir-expand/src/fixup.rs @@ -2,25 +2,30 @@ //! fix up syntax errors in the code we're passing to them. use std::mem; -use mbe::{SyntheticToken, SyntheticTokenId, TokenMap}; +use base_db::{ + span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId}, + FileId, +}; +use la_arena::RawIdx; +use mbe::TokenMap; use rustc_hash::FxHashMap; use smallvec::SmallVec; use syntax::{ ast::{self, AstNode, HasLoopBody}, - match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, + match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange, TextSize, }; -use tt::token_id::Subtree; +use tt::Spacing; + +use crate::tt::{Ident, Leaf, Punct, Subtree}; /// The result of calculating fixes for a syntax node -- a bunch of changes /// (appending to and replacing nodes), the information that is needed to /// reverse those changes afterwards, and a token map. #[derive(Debug, Default)] pub(crate) struct SyntaxFixups { - pub(crate) append: FxHashMap>, - pub(crate) replace: FxHashMap>, + pub(crate) append: FxHashMap>, + pub(crate) replace: FxHashMap>, pub(crate) undo_info: SyntaxFixupUndoInfo, - pub(crate) token_map: TokenMap, - pub(crate) next_id: u32, } /// This is the information needed to reverse the fixups. @@ -29,21 +34,25 @@ pub struct SyntaxFixupUndoInfo { original: Box<[Subtree]>, } -const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0); +// censoring -> just don't convert the node +// replacement -> censor + append +// append -> insert a fake node, here we need to assemble some dummy span that we can figure out how +// to remove later pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { let mut append = FxHashMap::::default(); let mut replace = FxHashMap::::default(); let mut preorder = node.preorder(); let mut original = Vec::new(); - let mut token_map = TokenMap::default(); - let mut next_id = 0; + let dummy_range = TextRange::empty(TextSize::new(0)); + let dummy_anchor = + SpanAnchor { file_id: FileId(!0), ast_id: ErasedFileAstId::from_raw(RawIdx::from(0)) }; + let fake_span = + SpanData { range: dummy_range, anchor: dummy_anchor, ctx: SyntaxContextId::FAKE }; while let Some(event) = preorder.next() { - let node = match event { - syntax::WalkEvent::Enter(node) => node, - syntax::WalkEvent::Leave(_) => continue, - }; + let syntax::WalkEvent::Enter(node) = event else { continue }; + /* TODO if can_handle_error(&node) && has_error_to_handle(&node) { // the node contains an error node, we have to completely replace it by something valid let (original_tree, new_tmap, new_next_id) = @@ -68,6 +77,7 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { preorder.skip_subtree(); continue; } + */ // In some other situations, we can fix things by just appending some tokens. let end_range = TextRange::empty(node.text_range().end()); match_ast! { @@ -76,36 +86,32 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { if it.name_ref().is_none() { // incomplete field access: some_expr.| append.insert(node.clone().into(), vec![ - SyntheticToken { - kind: SyntaxKind::IDENT, + Leaf::Ident(Ident { text: "__ra_fixup".into(), - range: end_range, - id: EMPTY_ID, - }, + span: fake_span + }), ]); } }, ast::ExprStmt(it) => { if it.semicolon_token().is_none() { append.insert(node.clone().into(), vec![ - SyntheticToken { - kind: SyntaxKind::SEMICOLON, - text: ";".into(), - range: end_range, - id: EMPTY_ID, - }, + Leaf::Punct(Punct { + char: ';', + spacing: Spacing::Alone, + span: fake_span + }), ]); } }, ast::LetStmt(it) => { if it.semicolon_token().is_none() { append.insert(node.clone().into(), vec![ - SyntheticToken { - kind: SyntaxKind::SEMICOLON, - text: ";".into(), - range: end_range, - id: EMPTY_ID, - }, + Leaf::Punct(Punct { + char: ';', + spacing: Spacing::Alone, + span: fake_span + }), ]); } }, @@ -117,28 +123,25 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { None => continue, }; append.insert(if_token.into(), vec![ - SyntheticToken { - kind: SyntaxKind::IDENT, + Leaf::Ident(Ident { text: "__ra_fixup".into(), - range: end_range, - id: EMPTY_ID, - }, + span: fake_span + }), ]); } if it.then_branch().is_none() { append.insert(node.clone().into(), vec![ - SyntheticToken { - kind: SyntaxKind::L_CURLY, - text: "{".into(), - range: end_range, - id: EMPTY_ID, - }, - SyntheticToken { - kind: SyntaxKind::R_CURLY, - text: "}".into(), - range: end_range, - id: EMPTY_ID, - }, + // FIXME: THis should be a subtree no? + Leaf::Punct(Punct { + char: '{', + spacing: Spacing::Alone, + span: fake_span + }), + Leaf::Punct(Punct { + char: '}', + spacing: Spacing::Alone, + span: fake_span + }), ]); } }, @@ -150,46 +153,42 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { None => continue, }; append.insert(while_token.into(), vec![ - SyntheticToken { - kind: SyntaxKind::IDENT, + Leaf::Ident(Ident { text: "__ra_fixup".into(), - range: end_range, - id: EMPTY_ID, - }, + span: fake_span + }), ]); } if it.loop_body().is_none() { append.insert(node.clone().into(), vec![ - SyntheticToken { - kind: SyntaxKind::L_CURLY, - text: "{".into(), - range: end_range, - id: EMPTY_ID, - }, - SyntheticToken { - kind: SyntaxKind::R_CURLY, - text: "}".into(), - range: end_range, - id: EMPTY_ID, - }, + // FIXME: THis should be a subtree no? + Leaf::Punct(Punct { + char: '{', + spacing: Spacing::Alone, + span: fake_span + }), + Leaf::Punct(Punct { + char: '}', + spacing: Spacing::Alone, + span: fake_span + }), ]); } }, ast::LoopExpr(it) => { if it.loop_body().is_none() { append.insert(node.clone().into(), vec![ - SyntheticToken { - kind: SyntaxKind::L_CURLY, - text: "{".into(), - range: end_range, - id: EMPTY_ID, - }, - SyntheticToken { - kind: SyntaxKind::R_CURLY, - text: "}".into(), - range: end_range, - id: EMPTY_ID, - }, + // FIXME: THis should be a subtree no? + Leaf::Punct(Punct { + char: '{', + spacing: Spacing::Alone, + span: fake_span + }), + Leaf::Punct(Punct { + char: '}', + spacing: Spacing::Alone, + span: fake_span + }), ]); } }, @@ -201,29 +200,26 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { None => continue }; append.insert(match_token.into(), vec![ - SyntheticToken { - kind: SyntaxKind::IDENT, + Leaf::Ident(Ident { text: "__ra_fixup".into(), - range: end_range, - id: EMPTY_ID - }, + span: fake_span + }), ]); } if it.match_arm_list().is_none() { // No match arms append.insert(node.clone().into(), vec![ - SyntheticToken { - kind: SyntaxKind::L_CURLY, - text: "{".into(), - range: end_range, - id: EMPTY_ID, - }, - SyntheticToken { - kind: SyntaxKind::R_CURLY, - text: "}".into(), - range: end_range, - id: EMPTY_ID, - }, + // FIXME: THis should be a subtree no? + Leaf::Punct(Punct { + char: '{', + spacing: Spacing::Alone, + span: fake_span + }), + Leaf::Punct(Punct { + char: '}', + spacing: Spacing::Alone, + span: fake_span + }), ]); } }, @@ -234,10 +230,15 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { }; let [pat, in_token, iter] = [ - (SyntaxKind::UNDERSCORE, "_"), - (SyntaxKind::IN_KW, "in"), - (SyntaxKind::IDENT, "__ra_fixup") - ].map(|(kind, text)| SyntheticToken { kind, text: text.into(), range: end_range, id: EMPTY_ID}); + "_", + "in", + "__ra_fixup" + ].map(|text| + Leaf::Ident(Ident { + text: text.into(), + span: fake_span + }), + ); if it.pat().is_none() && it.in_token().is_none() && it.iterable().is_none() { append.insert(for_token.into(), vec![pat, in_token, iter]); @@ -248,18 +249,17 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { if it.loop_body().is_none() { append.insert(node.clone().into(), vec![ - SyntheticToken { - kind: SyntaxKind::L_CURLY, - text: "{".into(), - range: end_range, - id: EMPTY_ID, - }, - SyntheticToken { - kind: SyntaxKind::R_CURLY, - text: "}".into(), - range: end_range, - id: EMPTY_ID, - }, + // FIXME: THis should be a subtree no? + Leaf::Punct(Punct { + char: '{', + spacing: Spacing::Alone, + span: fake_span + }), + Leaf::Punct(Punct { + char: '}', + spacing: Spacing::Alone, + span: fake_span + }), ]); } }, @@ -270,8 +270,6 @@ pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups { SyntaxFixups { append, replace, - token_map, - next_id, undo_info: SyntaxFixupUndoInfo { original: original.into_boxed_slice() }, } } @@ -288,40 +286,33 @@ fn has_error_to_handle(node: &SyntaxNode) -> bool { has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c)) } -pub(crate) fn reverse_fixups( - tt: &mut Subtree, - token_map: &TokenMap, - undo_info: &SyntaxFixupUndoInfo, -) { +pub(crate) fn reverse_fixups(tt: &mut Subtree, undo_info: &SyntaxFixupUndoInfo) { let tts = std::mem::take(&mut tt.token_trees); tt.token_trees = tts .into_iter() + // delete all fake nodes .filter(|tt| match tt { - tt::TokenTree::Leaf(leaf) => { - token_map.synthetic_token_id(*leaf.span()) != Some(EMPTY_ID) - } - tt::TokenTree::Subtree(st) => { - token_map.synthetic_token_id(st.delimiter.open) != Some(EMPTY_ID) - } - }) - .flat_map(|tt| match tt { - tt::TokenTree::Subtree(mut tt) => { - reverse_fixups(&mut tt, token_map, undo_info); - SmallVec::from_const([tt.into()]) - } - tt::TokenTree::Leaf(leaf) => { - if let Some(id) = token_map.synthetic_token_id(*leaf.span()) { - let original = undo_info.original[id.0 as usize].clone(); - if original.delimiter.kind == tt::DelimiterKind::Invisible { - original.token_trees.into() - } else { - SmallVec::from_const([original.into()]) - } - } else { - SmallVec::from_const([leaf.into()]) - } - } + tt::TokenTree::Leaf(leaf) => leaf.span().ctx != SyntaxContextId::FAKE, + tt::TokenTree::Subtree(st) => st.delimiter.open.ctx != SyntaxContextId::FAKE, }) + // .flat_map(|tt| match tt { TODO + // tt::TokenTree::Subtree(mut tt) => { + // reverse_fixups(&mut tt, undo_info); + // SmallVec::from_const([tt.into()]) + // } + // tt::TokenTree::Leaf(leaf) => { + // if let Some(id) = leaf.span().anchor { + // let original = undo_info.original[id.0 as usize].clone(); + // if original.delimiter.kind == tt::DelimiterKind::Invisible { + // original.token_trees.into() + // } else { + // SmallVec::from_const([original.into()]) + // } + // } else { + // SmallVec::from_const([leaf.into()]) + // } + // } + // }) .collect(); } diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs index 41675c630dc..04b5b7b0b6a 100644 --- a/crates/hir-expand/src/proc_macro.rs +++ b/crates/hir-expand/src/proc_macro.rs @@ -1,6 +1,6 @@ //! Proc Macro Expander stub -use base_db::{CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind}; +use base_db::{span::SpanData, CrateId, ProcMacroExpansionError, ProcMacroId, ProcMacroKind}; use stdx::never; use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult}; @@ -33,6 +33,9 @@ pub fn expand( calling_crate: CrateId, tt: &tt::Subtree, attr_arg: Option<&tt::Subtree>, + def_site: SpanData, + call_site: SpanData, + mixed_site: SpanData, ) -> ExpandResult { match self.proc_macro_id { ProcMacroId(DUMMY_ID) => { @@ -68,7 +71,8 @@ pub fn expand( let krate_graph = db.crate_graph(); // Proc macros have access to the environment variables of the invoking crate. let env = &krate_graph[calling_crate].env; - match proc_macro.expander.expand(tt, attr_arg, env) { + match proc_macro.expander.expand(tt, attr_arg, env, def_site, call_site, mixed_site) + { Ok(t) => ExpandResult::ok(t), Err(err) => match err { // Don't discard the item in case something unexpected happened while expanding attributes diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs index 4d131909493..ed4175c4583 100644 --- a/crates/load-cargo/src/lib.rs +++ b/crates/load-cargo/src/lib.rs @@ -376,16 +376,16 @@ fn expand( subtree: &tt::Subtree, attrs: Option<&tt::Subtree>, env: &Env, + def_site: SpanData, + call_site: SpanData, + mixed_site: SpanData, ) -> Result, ProcMacroExpansionError> { - let _ = (subtree, attrs, env); - - // let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect(); - // match self.0.expand(subtree, attrs, env) { - // Ok(Ok(subtree)) => Ok(subtree), - // Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)), - // Err(err) => Err(ProcMacroExpansionError::System(err.to_string())), - // } - todo!() + let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect(); + match self.0.expand(subtree, attrs, env, def_site, call_site, mixed_site) { + Ok(Ok(subtree)) => Ok(subtree), + Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)), + Err(err) => Err(ProcMacroExpansionError::System(err.to_string())), + } } } @@ -399,6 +399,9 @@ fn expand( subtree: &tt::Subtree, _: Option<&tt::Subtree>, _: &Env, + _: SpanData, + _: SpanData, + _: SpanData, ) -> Result, ProcMacroExpansionError> { Ok(subtree.clone()) } @@ -414,6 +417,9 @@ fn expand( _: &tt::Subtree, _: Option<&tt::Subtree>, _: &Env, + _: SpanData, + _: SpanData, + _: SpanData, ) -> Result, ProcMacroExpansionError> { Ok(tt::Subtree::empty()) } diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs index c8b326fa6c5..8aedd731406 100644 --- a/crates/mbe/src/expander/transcriber.rs +++ b/crates/mbe/src/expander/transcriber.rs @@ -51,7 +51,7 @@ macro_rules! binding_err { marker(&mut span); let subtree = tt::Subtree { delimiter: tt::Delimiter { - // TODO split span + // FIXME split span open: span, close: span, kind: delimiter.kind, diff --git a/crates/mbe/src/lib.rs b/crates/mbe/src/lib.rs index 1b13b39f017..fdf97ad538c 100644 --- a/crates/mbe/src/lib.rs +++ b/crates/mbe/src/lib.rs @@ -34,7 +34,8 @@ pub use crate::{ syntax_bridge::{ - map_from_syntax_node, parse_exprs_with_sep, parse_to_token_tree, syntax_node_to_token_tree, + map_from_syntax_node, parse_exprs_with_sep, parse_to_token_tree, + parse_to_token_tree_static_span, syntax_node_to_token_tree, syntax_node_to_token_tree_censored, token_tree_to_syntax_node, SpanMapper, }, token_map::TokenMap, diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index 5d802ba86c0..c61c5262860 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -63,7 +63,7 @@ fn span_for( /// Convert the syntax node to a `TokenTree` (what macro /// will consume). -/// TODO: Flesh out the doc comment more thoroughly +/// FIXME: Flesh out the doc comment more thoroughly pub fn syntax_node_to_token_tree( node: &SyntaxNode, map: SpanMap, @@ -179,6 +179,19 @@ pub fn parse_to_token_tree( Some(convert_tokens(&mut conv)) } +/// Convert a string to a `TokenTree` +pub fn parse_to_token_tree_static_span(span: S, text: &str) -> Option> +where + S: Span, +{ + let lexed = parser::LexedStr::new(text); + if lexed.errors().next().is_some() { + return None; + } + let mut conv = StaticRawConverter { lexed, pos: 0, span }; + Some(convert_tokens(&mut conv)) +} + /// Split token tree with separate expr: $($e:expr)SEP* pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec> { if tt.token_trees.is_empty() { @@ -213,12 +226,10 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec(conv: &mut C) -> tt::Subtree> +fn convert_tokens(conv: &mut C) -> tt::Subtree where - C: TokenConverter, - Ctx: SyntaxContext, - SpanData: Span, - Anchor: Copy, + C: TokenConverter, + S: Span, { let entry = tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: vec![] }; let mut stack = NonEmptyVec::new(entry); @@ -452,6 +463,12 @@ struct RawConverter<'a, Anchor> { pos: usize, anchor: Anchor, } +/// A raw token (straight from lexer) converter that gives every token the same span. +struct StaticRawConverter<'a, S> { + lexed: parser::LexedStr<'a>, + pos: usize, + span: S, +} trait SrcToken: std::fmt::Debug { fn kind(&self, ctx: &Ctx) -> SyntaxKind; @@ -461,20 +478,16 @@ trait SrcToken: std::fmt::Debug { fn to_text(&self, ctx: &Ctx) -> SmolStr; } -trait TokenConverter: Sized { +trait TokenConverter: Sized { type Token: SrcToken; - fn convert_doc_comment( - &self, - token: &Self::Token, - span: SpanData, - ) -> Option>>>; + fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option>>; fn bump(&mut self) -> Option<(Self::Token, TextRange)>; fn peek(&self) -> Option; - fn span_for(&self, range: TextRange) -> SpanData; + fn span_for(&self, range: TextRange) -> S; } impl SrcToken> for usize { @@ -491,7 +504,22 @@ fn to_text(&self, ctx: &RawConverter<'_, Anchor>) -> SmolStr { } } -impl TokenConverter for RawConverter<'_, Anchor> +impl SrcToken> for usize { + fn kind(&self, ctx: &StaticRawConverter<'_, S>) -> SyntaxKind { + ctx.lexed.kind(*self) + } + + fn to_char(&self, ctx: &StaticRawConverter<'_, S>) -> Option { + ctx.lexed.text(*self).chars().next() + } + + fn to_text(&self, ctx: &StaticRawConverter<'_, S>) -> SmolStr { + ctx.lexed.text(*self).into() + } +} + +impl TokenConverter> + for RawConverter<'_, Anchor> where SpanData: Span, { @@ -530,6 +558,41 @@ fn span_for(&self, range: TextRange) -> SpanData { } } +impl TokenConverter for StaticRawConverter<'_, S> +where + S: Span, +{ + type Token = usize; + + fn convert_doc_comment(&self, &token: &usize, span: S) -> Option>> { + let text = self.lexed.text(token); + convert_doc_comment(&doc_comment(text), span) + } + + fn bump(&mut self) -> Option<(Self::Token, TextRange)> { + if self.pos == self.lexed.len() { + return None; + } + let token = self.pos; + self.pos += 1; + let range = self.lexed.text_range(token); + let range = TextRange::new(range.start.try_into().ok()?, range.end.try_into().ok()?); + + Some((token, range)) + } + + fn peek(&self) -> Option { + if self.pos == self.lexed.len() { + return None; + } + Some(self.pos) + } + + fn span_for(&self, _: TextRange) -> S { + self.span + } +} + struct Converter { current: Option, preorder: PreorderWithTokens, @@ -596,17 +659,13 @@ fn to_text(&self, _ctx: &Converter) -> SmolStr { } } -impl TokenConverter for Converter +impl TokenConverter for Converter where - SpanData: Span, - SpanMap: SpanMapper>, + S: Span, + SpanMap: SpanMapper, { type Token = SynToken; - fn convert_doc_comment( - &self, - token: &Self::Token, - span: SpanData, - ) -> Option>>> { + fn convert_doc_comment(&self, token: &Self::Token, span: S) -> Option>> { convert_doc_comment(token.token(), span) } @@ -661,7 +720,7 @@ fn peek(&self) -> Option { Some(token) } - fn span_for(&self, range: TextRange) -> SpanData { + fn span_for(&self, range: TextRange) -> S { self.map.span_for(range) } } diff --git a/crates/proc-macro-api/Cargo.toml b/crates/proc-macro-api/Cargo.toml index 4c87c89adde..2cbbc9489a2 100644 --- a/crates/proc-macro-api/Cargo.toml +++ b/crates/proc-macro-api/Cargo.toml @@ -25,6 +25,7 @@ tracing.workspace = true triomphe.workspace = true memmap2 = "0.5.4" snap = "1.1.0" +indexmap = "2.1.0" # local deps paths.workspace = true @@ -32,5 +33,7 @@ tt.workspace = true stdx.workspace = true profile.workspace = true text-size.workspace = true -# Intentionally *not* depend on anything salsa-related -# base-db.workspace = true +# Ideally this crate would not depend on salsa things, but we need span information here which wraps +# InternIds for the syntax context +base-db.workspace = true +la-arena.workspace = true diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index 7a3580f814e..9fc5a53d935 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -11,6 +11,8 @@ mod process; mod version; +use base_db::span::SpanData; +use indexmap::IndexSet; use paths::AbsPathBuf; use std::{fmt, io, sync::Mutex}; use triomphe::Arc; @@ -18,7 +20,7 @@ use serde::{Deserialize, Serialize}; use crate::{ - msg::{flat::SerializableSpan, ExpandMacro, FlatTree, PanicMessage}, + msg::{ExpandMacro, ExpnGlobals, FlatTree, PanicMessage, HAS_GLOBAL_SPANS}, process::ProcMacroProcessSrv, }; @@ -132,32 +134,49 @@ pub fn kind(&self) -> ProcMacroKind { self.kind } - pub fn expand>( + pub fn expand( &self, - subtree: &tt::Subtree, - attr: Option<&tt::Subtree>, + subtree: &tt::Subtree, + attr: Option<&tt::Subtree>, env: Vec<(String, String)>, - ) -> Result, PanicMessage>, ServerError> { + def_site: SpanData, + call_site: SpanData, + mixed_site: SpanData, + ) -> Result, PanicMessage>, ServerError> { let version = self.process.lock().unwrap_or_else(|e| e.into_inner()).version(); let current_dir = env .iter() .find(|(name, _)| name == "CARGO_MANIFEST_DIR") .map(|(_, value)| value.clone()); + let mut span_data_table = IndexSet::default(); + let def_site = span_data_table.insert_full(def_site).0; + let call_site = span_data_table.insert_full(call_site).0; + let mixed_site = span_data_table.insert_full(mixed_site).0; let task = ExpandMacro { - macro_body: FlatTree::new(subtree, version), + macro_body: FlatTree::new(subtree, version, &mut span_data_table), macro_name: self.name.to_string(), - attributes: attr.map(|subtree| FlatTree::new(subtree, version)), + attributes: attr.map(|subtree| FlatTree::new(subtree, version, &mut span_data_table)), lib: self.dylib_path.to_path_buf().into(), env, current_dir, + has_global_spans: ExpnGlobals { + serialize: version >= HAS_GLOBAL_SPANS, + def_site, + call_site, + mixed_site, + }, }; - let request = msg::Request::ExpandMacro(task); - let response = self.process.lock().unwrap_or_else(|e| e.into_inner()).send_task(request)?; + let response = self + .process + .lock() + .unwrap_or_else(|e| e.into_inner()) + .send_task(msg::Request::ExpandMacro(task))?; + match response { msg::Response::ExpandMacro(it) => { - Ok(it.map(|tree| FlatTree::to_subtree(tree, version))) + Ok(it.map(|tree| FlatTree::to_subtree_resolved(tree, version, &span_data_table))) } msg::Response::ListMacros(..) | msg::Response::ApiVersionCheck(..) => { Err(ServerError { message: "unexpected response".to_string(), io: None }) diff --git a/crates/proc-macro-api/src/msg.rs b/crates/proc-macro-api/src/msg.rs index 4e6984f61b7..ddac514ff70 100644 --- a/crates/proc-macro-api/src/msg.rs +++ b/crates/proc-macro-api/src/msg.rs @@ -10,21 +10,15 @@ use crate::ProcMacroKind; -pub use crate::msg::flat::FlatTree; +pub use crate::msg::flat::{FlatTree, TokenId}; // The versions of the server protocol pub const NO_VERSION_CHECK_VERSION: u32 = 0; pub const VERSION_CHECK_VERSION: u32 = 1; pub const ENCODE_CLOSE_SPAN_VERSION: u32 = 2; -/// This version changes how spans are encoded, kind of. Prior to this version, -/// spans were represented as a single u32 which effectively forced spans to be -/// token ids. Starting with this version, the span fields are still u32, -/// but if the size of the span is greater than 1 then the span data is encoded in -/// an additional vector where the span represents the offset into that vector. -/// This allows encoding bigger spans while supporting the previous versions. -pub const VARIABLE_SIZED_SPANS: u32 = 2; +pub const HAS_GLOBAL_SPANS: u32 = 3; -pub const CURRENT_API_VERSION: u32 = VARIABLE_SIZED_SPANS; +pub const CURRENT_API_VERSION: u32 = HAS_GLOBAL_SPANS; #[derive(Debug, Serialize, Deserialize)] pub enum Request { @@ -66,6 +60,24 @@ pub struct ExpandMacro { pub env: Vec<(String, String)>, pub current_dir: Option, + /// marker for serde skip stuff + #[serde(skip_serializing_if = "ExpnGlobals::skip_serializing_if")] + pub has_global_spans: ExpnGlobals, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct ExpnGlobals { + #[serde(skip_serializing)] + pub serialize: bool, + pub def_site: usize, + pub call_site: usize, + pub mixed_site: usize, +} + +impl ExpnGlobals { + fn skip_serializing_if(&self) -> bool { + !self.serialize + } } pub trait Message: Serialize + DeserializeOwned { @@ -120,38 +132,89 @@ fn write_json(out: &mut impl Write, msg: &str) -> io::Result<()> { Ok(()) } -/*TODO - #[cfg(test)] mod tests { - use tt::{ - Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, SpanAnchor, Subtree, - TokenId, TokenTree, + use base_db::{ + span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId}, + FileId, }; + use la_arena::RawIdx; + use text_size::{TextRange, TextSize}; + use tt::{Delimiter, DelimiterKind, Ident, Leaf, Literal, Punct, Spacing, Subtree, TokenTree}; use super::*; - fn fixture_token_tree() -> Subtree { - let mut subtree = Subtree { delimiter: Delimiter::unspecified(), token_trees: Vec::new() }; - subtree - .token_trees - .push(TokenTree::Leaf(Ident { text: "struct".into(), span: TokenId(0) }.into())); - subtree - .token_trees - .push(TokenTree::Leaf(Ident { text: "Foo".into(), span: TokenId(1) }.into())); + fn fixture_token_tree() -> Subtree { + let anchor = + SpanAnchor { file_id: FileId(0), ast_id: ErasedFileAstId::from_raw(RawIdx::from(0)) }; + let mut subtree = Subtree { + delimiter: Delimiter { + open: SpanData { + range: TextRange::empty(TextSize::new(0)), + anchor, + ctx: SyntaxContextId::ROOT, + }, + close: SpanData { + range: TextRange::empty(TextSize::new(13)), + anchor, + ctx: SyntaxContextId::ROOT, + }, + kind: DelimiterKind::Invisible, + }, + token_trees: Vec::new(), + }; + subtree.token_trees.push(TokenTree::Leaf( + Ident { + text: "struct".into(), + span: SpanData { + range: TextRange::at(TextSize::new(0), TextSize::of("struct")), + anchor, + ctx: SyntaxContextId::ROOT, + }, + } + .into(), + )); + subtree.token_trees.push(TokenTree::Leaf( + Ident { + text: "Foo".into(), + span: SpanData { + range: TextRange::at(TextSize::new(5), TextSize::of("Foo")), + anchor, + ctx: SyntaxContextId::ROOT, + }, + } + .into(), + )); subtree.token_trees.push(TokenTree::Leaf(Leaf::Literal(Literal { text: "Foo".into(), - span: TokenId::DUMMY, + + span: SpanData { + range: TextRange::at(TextSize::new(8), TextSize::of("Foo")), + anchor, + ctx: SyntaxContextId::ROOT, + }, }))); subtree.token_trees.push(TokenTree::Leaf(Leaf::Punct(Punct { char: '@', - span: TokenId::DUMMY, + span: SpanData { + range: TextRange::at(TextSize::new(11), TextSize::of('@')), + anchor, + ctx: SyntaxContextId::ROOT, + }, spacing: Spacing::Joint, }))); subtree.token_trees.push(TokenTree::Subtree(Subtree { delimiter: Delimiter { - open: TokenId(2), - close: TokenId::DUMMY, + open: SpanData { + range: TextRange::at(TextSize::new(12), TextSize::of('{')), + anchor, + ctx: SyntaxContextId::ROOT, + }, + close: SpanData { + range: TextRange::at(TextSize::new(13), TextSize::of('}')), + anchor, + ctx: SyntaxContextId::ROOT, + }, kind: DelimiterKind::Brace, }, token_trees: vec![], @@ -162,20 +225,26 @@ fn fixture_token_tree() -> Subtree { #[test] fn test_proc_macro_rpc_works() { let tt = fixture_token_tree(); + let mut span_data_table = Default::default(); let task = ExpandMacro { - macro_body: FlatTree::new(&tt, CURRENT_API_VERSION), + macro_body: FlatTree::new(&tt, CURRENT_API_VERSION, &mut span_data_table), macro_name: Default::default(), attributes: None, lib: std::env::current_dir().unwrap(), env: Default::default(), current_dir: Default::default(), + has_global_spans: ExpnGlobals { + serialize: true, + def_site: 0, + call_site: 0, + mixed_site: 0, + }, }; let json = serde_json::to_string(&task).unwrap(); // println!("{}", json); let back: ExpandMacro = serde_json::from_str(&json).unwrap(); - assert_eq!(tt, back.macro_body.to_subtree(CURRENT_API_VERSION)); + assert_eq!(tt, back.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table)); } } -*/ diff --git a/crates/proc-macro-api/src/msg/flat.rs b/crates/proc-macro-api/src/msg/flat.rs index f29aac52956..baf8bbad4bf 100644 --- a/crates/proc-macro-api/src/msg/flat.rs +++ b/crates/proc-macro-api/src/msg/flat.rs @@ -37,40 +37,40 @@ use std::collections::{HashMap, VecDeque}; +use base_db::span::SpanData; +use indexmap::IndexSet; use serde::{Deserialize, Serialize}; -use text_size::TextRange; -use tt::{Span, SyntaxContext}; -use crate::msg::{ENCODE_CLOSE_SPAN_VERSION, VARIABLE_SIZED_SPANS}; +use crate::msg::ENCODE_CLOSE_SPAN_VERSION; -pub trait SerializableSpan: Span { - fn into_u32(self) -> [u32; L]; - fn from_u32(input: [u32; L]) -> Self; -} -// impl SerializableSpan<1> for tt::TokenId { -// fn into_u32(self) -> [u32; 1] { -// [self.0] -// } -// fn from_u32([input]: [u32; 1]) -> Self { -// tt::TokenId(input) -// } -// } +pub type SpanDataIndexMap = IndexSet; -impl SerializableSpan<3> for tt::SpanData -where - Anchor: From + Into, - Self: Span, - Ctx: SyntaxContext, -{ - fn into_u32(self) -> [u32; 3] { - [self.anchor.into(), self.range.start().into(), self.range.end().into()] +#[derive(Clone, Copy, PartialEq, Eq, Hash)] +pub struct TokenId(pub u32); + +impl std::fmt::Debug for TokenId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) } - fn from_u32([file_id, start, end]: [u32; 3]) -> Self { - tt::SpanData { - anchor: file_id.into(), - range: TextRange::new(start.into(), end.into()), - ctx: Ctx::DUMMY, - } +} + +impl TokenId { + pub const DEF_SITE: Self = TokenId(0); + pub const CALL_SITE: Self = TokenId(0); + pub const MIXED_SITE: Self = TokenId(0); +} + +impl tt::Span for TokenId { + const DUMMY: Self = TokenId(!0); + + type Anchor = (); + + fn anchor(self) -> Self::Anchor { + () + } + + fn mk(_: Self::Anchor, _: text_size::TextRange) -> Self { + Self::DUMMY } } @@ -82,82 +82,41 @@ pub struct FlatTree { ident: Vec, token_tree: Vec, text: Vec, - #[serde(skip_serializing_if = "SpanMap::do_serialize")] - #[serde(default)] - span_map: SpanMap, } -#[derive(Serialize, Deserialize, Debug)] -struct SpanMap { - #[serde(skip_serializing)] - serialize: bool, - span_size: u32, - spans: Vec, -} - -impl Default for SpanMap { - fn default() -> Self { - Self { serialize: false, span_size: 1, spans: Default::default() } - } -} - -impl SpanMap { - fn serialize_span>(&mut self, span: S) -> u32 { - let u32s = span.into_u32(); - if L == 1 { - u32s[0] - } else { - let offset = self.spans.len() as u32; - self.spans.extend(u32s); - offset - } - } - fn deserialize_span>(&self, offset: u32) -> S { - S::from_u32(if L == 1 { - [offset].as_ref().try_into().unwrap() - } else { - self.spans[offset as usize..][..L].try_into().unwrap() - }) - } -} - -impl SpanMap { - fn do_serialize(&self) -> bool { - self.serialize - } -} - -struct SubtreeRepr { - open: S, - close: S, +struct SubtreeRepr { + open: TokenId, + close: TokenId, kind: tt::DelimiterKind, tt: [u32; 2], } -struct LiteralRepr { - id: S, +struct LiteralRepr { + id: TokenId, text: u32, } -struct PunctRepr { - id: S, +struct PunctRepr { + id: TokenId, char: char, spacing: tt::Spacing, } -struct IdentRepr { - id: S, +struct IdentRepr { + id: TokenId, text: u32, } impl FlatTree { - pub fn new>( - subtree: &tt::Subtree, + pub fn new( + subtree: &tt::Subtree, version: u32, + span_data_table: &mut SpanDataIndexMap, ) -> FlatTree { let mut w = Writer { string_table: HashMap::new(), work: VecDeque::new(), + span_data_table, subtree: Vec::new(), literal: Vec::new(), @@ -167,78 +126,111 @@ pub fn new>( text: Vec::new(), }; w.write(subtree); - assert!(L == 1 || version >= VARIABLE_SIZED_SPANS); - let mut span_map = SpanMap { - serialize: version >= VARIABLE_SIZED_SPANS && L != 1, - span_size: L as u32, - spans: Vec::new(), - }; - return FlatTree { + + FlatTree { subtree: if version >= ENCODE_CLOSE_SPAN_VERSION { - write_vec(&mut span_map, w.subtree, SubtreeRepr::write_with_close_span) + write_vec(w.subtree, SubtreeRepr::write_with_close_span) } else { - write_vec(&mut span_map, w.subtree, SubtreeRepr::write) + write_vec(w.subtree, SubtreeRepr::write) }, - literal: write_vec(&mut span_map, w.literal, LiteralRepr::write), - punct: write_vec(&mut span_map, w.punct, PunctRepr::write), - ident: write_vec(&mut span_map, w.ident, IdentRepr::write), + literal: write_vec(w.literal, LiteralRepr::write), + punct: write_vec(w.punct, PunctRepr::write), + ident: write_vec(w.ident, IdentRepr::write), token_tree: w.token_tree, text: w.text, - span_map, - }; - - fn write_vec [u32; N], const N: usize>( - map: &mut SpanMap, - xs: Vec, - f: F, - ) -> Vec { - xs.into_iter().flat_map(|it| f(it, map)).collect() } } - pub fn to_subtree>( + pub fn new_raw(subtree: &tt::Subtree, version: u32) -> FlatTree { + let mut w = Writer { + string_table: HashMap::new(), + work: VecDeque::new(), + span_data_table: &mut (), + + subtree: Vec::new(), + literal: Vec::new(), + punct: Vec::new(), + ident: Vec::new(), + token_tree: Vec::new(), + text: Vec::new(), + }; + w.write(subtree); + + FlatTree { + subtree: if version >= ENCODE_CLOSE_SPAN_VERSION { + write_vec(w.subtree, SubtreeRepr::write_with_close_span) + } else { + write_vec(w.subtree, SubtreeRepr::write) + }, + literal: write_vec(w.literal, LiteralRepr::write), + punct: write_vec(w.punct, PunctRepr::write), + ident: write_vec(w.ident, IdentRepr::write), + token_tree: w.token_tree, + text: w.text, + } + } + + pub fn to_subtree_resolved( self, version: u32, - ) -> tt::Subtree { - assert!((version >= VARIABLE_SIZED_SPANS || L == 1) && L as u32 == self.span_map.span_size); - return Reader { + span_data_table: &SpanDataIndexMap, + ) -> tt::Subtree { + Reader { subtree: if version >= ENCODE_CLOSE_SPAN_VERSION { - read_vec(&self.span_map, self.subtree, SubtreeRepr::read_with_close_span) + read_vec(self.subtree, SubtreeRepr::read_with_close_span) } else { - read_vec(&self.span_map, self.subtree, SubtreeRepr::read) + read_vec(self.subtree, SubtreeRepr::read) }, - literal: read_vec(&self.span_map, self.literal, LiteralRepr::read), - punct: read_vec(&self.span_map, self.punct, PunctRepr::read), - ident: read_vec(&self.span_map, self.ident, IdentRepr::read), + literal: read_vec(self.literal, LiteralRepr::read), + punct: read_vec(self.punct, PunctRepr::read), + ident: read_vec(self.ident, IdentRepr::read), token_tree: self.token_tree, text: self.text, + span_data_table, } - .read(); + .read() + } - fn read_vec T, const N: usize>( - map: &SpanMap, - xs: Vec, - f: F, - ) -> Vec { - let mut chunks = xs.chunks_exact(N); - let res = chunks.by_ref().map(|chunk| f(chunk.try_into().unwrap(), map)).collect(); - assert!(chunks.remainder().is_empty()); - res + pub fn to_subtree_unresolved(self, version: u32) -> tt::Subtree { + Reader { + subtree: if version >= ENCODE_CLOSE_SPAN_VERSION { + read_vec(self.subtree, SubtreeRepr::read_with_close_span) + } else { + read_vec(self.subtree, SubtreeRepr::read) + }, + literal: read_vec(self.literal, LiteralRepr::read), + punct: read_vec(self.punct, PunctRepr::read), + ident: read_vec(self.ident, IdentRepr::read), + token_tree: self.token_tree, + text: self.text, + span_data_table: &(), } + .read() } } -impl> SubtreeRepr { - fn write(self, map: &mut SpanMap) -> [u32; 4] { +fn read_vec T, const N: usize>(xs: Vec, f: F) -> Vec { + let mut chunks = xs.chunks_exact(N); + let res = chunks.by_ref().map(|chunk| f(chunk.try_into().unwrap())).collect(); + assert!(chunks.remainder().is_empty()); + res +} + +fn write_vec [u32; N], const N: usize>(xs: Vec, f: F) -> Vec { + xs.into_iter().flat_map(f).collect() +} + +impl SubtreeRepr { + fn write(self) -> [u32; 4] { let kind = match self.kind { tt::DelimiterKind::Invisible => 0, tt::DelimiterKind::Parenthesis => 1, tt::DelimiterKind::Brace => 2, tt::DelimiterKind::Bracket => 3, }; - [map.serialize_span(self.open), kind, self.tt[0], self.tt[1]] + [self.open.0, kind, self.tt[0], self.tt[1]] } - fn read([open, kind, lo, len]: [u32; 4], map: &SpanMap) -> Self { + fn read([open, kind, lo, len]: [u32; 4]) -> SubtreeRepr { let kind = match kind { 0 => tt::DelimiterKind::Invisible, 1 => tt::DelimiterKind::Parenthesis, @@ -246,24 +238,18 @@ impl> SubtreeRepr { 3 => tt::DelimiterKind::Bracket, other => panic!("bad kind {other}"), }; - SubtreeRepr { open: map.deserialize_span(open), close: S::DUMMY, kind, tt: [lo, len] } + SubtreeRepr { open: TokenId(open), close: TokenId(!0), kind, tt: [lo, len] } } - fn write_with_close_span(self, map: &mut SpanMap) -> [u32; 5] { + fn write_with_close_span(self) -> [u32; 5] { let kind = match self.kind { tt::DelimiterKind::Invisible => 0, tt::DelimiterKind::Parenthesis => 1, tt::DelimiterKind::Brace => 2, tt::DelimiterKind::Bracket => 3, }; - [ - map.serialize_span(self.open), - map.serialize_span(self.close), - kind, - self.tt[0], - self.tt[1], - ] + [self.open.0, self.close.0, kind, self.tt[0], self.tt[1]] } - fn read_with_close_span([open, close, kind, lo, len]: [u32; 5], map: &SpanMap) -> Self { + fn read_with_close_span([open, close, kind, lo, len]: [u32; 5]) -> SubtreeRepr { let kind = match kind { 0 => tt::DelimiterKind::Invisible, 1 => tt::DelimiterKind::Parenthesis, @@ -271,64 +257,86 @@ impl> SubtreeRepr { 3 => tt::DelimiterKind::Bracket, other => panic!("bad kind {other}"), }; - SubtreeRepr { - open: map.deserialize_span(open), - close: map.deserialize_span(close), - kind, - tt: [lo, len], - } + SubtreeRepr { open: TokenId(open), close: TokenId(close), kind, tt: [lo, len] } } } -impl> LiteralRepr { - fn write(self, map: &mut SpanMap) -> [u32; 2] { - [map.serialize_span(self.id), self.text] +impl LiteralRepr { + fn write(self) -> [u32; 2] { + [self.id.0, self.text] } - fn read([id, text]: [u32; 2], map: &SpanMap) -> Self { - LiteralRepr { id: map.deserialize_span(id), text } + fn read([id, text]: [u32; 2]) -> LiteralRepr { + LiteralRepr { id: TokenId(id), text } } } -impl> PunctRepr { - fn write(self, map: &mut SpanMap) -> [u32; 3] { +impl PunctRepr { + fn write(self) -> [u32; 3] { let spacing = match self.spacing { tt::Spacing::Alone => 0, tt::Spacing::Joint => 1, }; - [map.serialize_span(self.id), self.char as u32, spacing] + [self.id.0, self.char as u32, spacing] } - fn read([id, char, spacing]: [u32; 3], map: &SpanMap) -> Self { + fn read([id, char, spacing]: [u32; 3]) -> PunctRepr { let spacing = match spacing { 0 => tt::Spacing::Alone, 1 => tt::Spacing::Joint, other => panic!("bad spacing {other}"), }; - PunctRepr { id: map.deserialize_span(id), char: char.try_into().unwrap(), spacing } + PunctRepr { id: TokenId(id), char: char.try_into().unwrap(), spacing } } } -impl> IdentRepr { - fn write(self, map: &mut SpanMap) -> [u32; 2] { - [map.serialize_span(self.id), self.text] +impl IdentRepr { + fn write(self) -> [u32; 2] { + [self.id.0, self.text] } - fn read(data: [u32; 2], map: &SpanMap) -> Self { - IdentRepr { id: map.deserialize_span(data[0]), text: data[1] } + fn read(data: [u32; 2]) -> IdentRepr { + IdentRepr { id: TokenId(data[0]), text: data[1] } } } -struct Writer<'a, const L: usize, S> { +trait Span: Copy { + type Table; + fn token_id_of(table: &mut Self::Table, s: Self) -> TokenId; + fn span_for_token_id(table: &Self::Table, id: TokenId) -> Self; +} + +impl Span for TokenId { + type Table = (); + fn token_id_of((): &mut Self::Table, token_id: Self) -> TokenId { + token_id + } + + fn span_for_token_id((): &Self::Table, id: TokenId) -> Self { + id + } +} +impl Span for SpanData { + type Table = IndexSet; + fn token_id_of(table: &mut Self::Table, span: Self) -> TokenId { + TokenId(table.insert_full(span).0 as u32) + } + fn span_for_token_id(table: &Self::Table, id: TokenId) -> Self { + *table.get_index(id.0 as usize).unwrap_or_else(|| &table[0]) + } +} + +struct Writer<'a, 'span, S: Span> { work: VecDeque<(usize, &'a tt::Subtree)>, string_table: HashMap<&'a str, u32>, + span_data_table: &'span mut S::Table, - subtree: Vec>, - literal: Vec>, - punct: Vec>, - ident: Vec>, + subtree: Vec, + literal: Vec, + punct: Vec, + ident: Vec, token_tree: Vec, text: Vec, } -impl<'a, const L: usize, S: Copy> Writer<'a, L, S> { +impl<'a, 'span, S: Span> Writer<'a, 'span, S> { fn write(&mut self, root: &'a tt::Subtree) { self.enqueue(root); while let Some((idx, subtree)) = self.work.pop_front() { @@ -336,6 +344,10 @@ fn write(&mut self, root: &'a tt::Subtree) { } } + fn token_id_of(&mut self, span: S) -> TokenId { + S::token_id_of(self.span_data_table, span) + } + fn subtree(&mut self, idx: usize, subtree: &'a tt::Subtree) { let mut first_tt = self.token_tree.len(); let n_tt = subtree.token_trees.len(); @@ -353,22 +365,21 @@ fn subtree(&mut self, idx: usize, subtree: &'a tt::Subtree) { tt::Leaf::Literal(lit) => { let idx = self.literal.len() as u32; let text = self.intern(&lit.text); - self.literal.push(LiteralRepr { id: lit.span, text }); + let id = self.token_id_of(lit.span); + self.literal.push(LiteralRepr { id, text }); idx << 2 | 0b01 } tt::Leaf::Punct(punct) => { let idx = self.punct.len() as u32; - self.punct.push(PunctRepr { - char: punct.char, - spacing: punct.spacing, - id: punct.span, - }); + let id = self.token_id_of(punct.span); + self.punct.push(PunctRepr { char: punct.char, spacing: punct.spacing, id }); idx << 2 | 0b10 } tt::Leaf::Ident(ident) => { let idx = self.ident.len() as u32; let text = self.intern(&ident.text); - self.ident.push(IdentRepr { id: ident.span, text }); + let id = self.token_id_of(ident.span); + self.ident.push(IdentRepr { id, text }); idx << 2 | 0b11 } }, @@ -380,8 +391,8 @@ fn subtree(&mut self, idx: usize, subtree: &'a tt::Subtree) { fn enqueue(&mut self, subtree: &'a tt::Subtree) -> u32 { let idx = self.subtree.len(); - let open = subtree.delimiter.open; - let close = subtree.delimiter.close; + let open = self.token_id_of(subtree.delimiter.open); + let close = self.token_id_of(subtree.delimiter.close); let delimiter_kind = subtree.delimiter.kind; self.subtree.push(SubtreeRepr { open, close, kind: delimiter_kind, tt: [!0, !0] }); self.work.push_back((idx, subtree)); @@ -398,23 +409,29 @@ pub(crate) fn intern(&mut self, text: &'a str) -> u32 { } } -struct Reader { - subtree: Vec>, - literal: Vec>, - punct: Vec>, - ident: Vec>, +struct Reader<'span, S: Span> { + subtree: Vec, + literal: Vec, + punct: Vec, + ident: Vec, token_tree: Vec, text: Vec, + span_data_table: &'span S::Table, } -impl> Reader { +impl<'span, S: Span> Reader<'span, S> { pub(crate) fn read(self) -> tt::Subtree { let mut res: Vec>> = vec![None; self.subtree.len()]; + let read_span = |id| S::span_for_token_id(self.span_data_table, id); for i in (0..self.subtree.len()).rev() { let repr = &self.subtree[i]; let token_trees = &self.token_tree[repr.tt[0] as usize..repr.tt[1] as usize]; let s = tt::Subtree { - delimiter: tt::Delimiter { open: repr.open, close: repr.close, kind: repr.kind }, + delimiter: tt::Delimiter { + open: read_span(repr.open), + close: read_span(repr.close), + kind: repr.kind, + }, token_trees: token_trees .iter() .copied() @@ -429,7 +446,7 @@ pub(crate) fn read(self) -> tt::Subtree { let repr = &self.literal[idx]; tt::Leaf::Literal(tt::Literal { text: self.text[repr.text as usize].as_str().into(), - span: repr.id, + span: read_span(repr.id), }) .into() } @@ -438,7 +455,7 @@ pub(crate) fn read(self) -> tt::Subtree { tt::Leaf::Punct(tt::Punct { char: repr.char, spacing: repr.spacing, - span: repr.id, + span: read_span(repr.id), }) .into() } @@ -446,7 +463,7 @@ pub(crate) fn read(self) -> tt::Subtree { let repr = &self.ident[idx]; tt::Leaf::Ident(tt::Ident { text: self.text[repr.text as usize].as_str().into(), - span: repr.id, + span: read_span(repr.id), }) .into() } diff --git a/crates/proc-macro-srv-cli/src/main.rs b/crates/proc-macro-srv-cli/src/main.rs index ea65c33604f..50ce586fc42 100644 --- a/crates/proc-macro-srv-cli/src/main.rs +++ b/crates/proc-macro-srv-cli/src/main.rs @@ -18,14 +18,12 @@ fn main() -> std::io::Result<()> { run() } -#[cfg(not(FALSE))] -#[cfg(not(feature = "sysroot-abi"))] +#[cfg(not(any(feature = "sysroot-abi", rust_analyzer)))] fn run() -> io::Result<()> { panic!("proc-macro-srv-cli requires the `sysroot-abi` feature to be enabled"); } -#[cfg(FALSE)] -#[cfg(feature = "sysroot-abi")] +#[cfg(any(feature = "sysroot-abi", rust_analyzer))] fn run() -> io::Result<()> { use proc_macro_api::msg::{self, Message}; diff --git a/crates/proc-macro-srv/src/dylib.rs b/crates/proc-macro-srv/src/dylib.rs index dd05e250c2d..80bce3af1a0 100644 --- a/crates/proc-macro-srv/src/dylib.rs +++ b/crates/proc-macro-srv/src/dylib.rs @@ -11,7 +11,7 @@ use memmap2::Mmap; use object::Object; use paths::AbsPath; -use proc_macro_api::{read_dylib_info, ProcMacroKind}; +use proc_macro_api::{msg::TokenId, read_dylib_info, ProcMacroKind}; const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_"; @@ -152,8 +152,14 @@ pub fn expand( macro_name: &str, macro_body: &crate::tt::Subtree, attributes: Option<&crate::tt::Subtree>, + def_site: TokenId, + call_site: TokenId, + mixed_site: TokenId, ) -> Result { - let result = self.inner.proc_macros.expand(macro_name, macro_body, attributes); + let result = self + .inner + .proc_macros + .expand(macro_name, macro_body, attributes, def_site, call_site, mixed_site); result.map_err(|e| e.as_str().unwrap_or_else(|| "".to_string())) } diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs index bd0d1b79fa1..32a07a84777 100644 --- a/crates/proc-macro-srv/src/lib.rs +++ b/crates/proc-macro-srv/src/lib.rs @@ -10,7 +10,6 @@ //! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable` //! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)… -#![cfg(FALSE)] // TODO #![cfg(any(feature = "sysroot-abi", rust_analyzer))] #![feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)] #![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)] @@ -32,12 +31,25 @@ time::SystemTime, }; +use ::tt::Span; use proc_macro_api::{ - msg::{self, CURRENT_API_VERSION}, + msg::{self, ExpnGlobals, TokenId, CURRENT_API_VERSION, HAS_GLOBAL_SPANS}, ProcMacroKind, }; -use ::tt::token_id as tt; +mod tt { + pub use proc_macro_api::msg::TokenId; + + pub use ::tt::*; + + pub type Subtree = ::tt::Subtree; + pub type TokenTree = ::tt::TokenTree; + pub type Delimiter = ::tt::Delimiter; + pub type Leaf = ::tt::Leaf; + pub type Literal = ::tt::Literal; + pub type Punct = ::tt::Punct; + pub type Ident = ::tt::Ident; +} // see `build.rs` include!(concat!(env!("OUT_DIR"), "/rustc_version.rs")); @@ -71,16 +83,28 @@ pub fn expand(&mut self, task: msg::ExpandMacro) -> Result None, }; - let macro_body = task.macro_body.to_subtree(CURRENT_API_VERSION); - let attributes = task.attributes.map(|it| it.to_subtree(CURRENT_API_VERSION)); + let ExpnGlobals { def_site, call_site, mixed_site, .. } = task.has_global_spans; + let def_site = TokenId(def_site as u32); + let call_site = TokenId(call_site as u32); + let mixed_site = TokenId(mixed_site as u32); + + let macro_body = task.macro_body.to_subtree_unresolved(CURRENT_API_VERSION); + let attributes = task.attributes.map(|it| it.to_subtree_unresolved(CURRENT_API_VERSION)); let result = thread::scope(|s| { let thread = thread::Builder::new() .stack_size(EXPANDER_STACK_SIZE) .name(task.macro_name.clone()) .spawn_scoped(s, || { expander - .expand(&task.macro_name, ¯o_body, attributes.as_ref()) - .map(|it| msg::FlatTree::new(&it, CURRENT_API_VERSION)) + .expand( + &task.macro_name, + ¯o_body, + attributes.as_ref(), + def_site, + call_site, + mixed_site, + ) + .map(|it| msg::FlatTree::new_raw(&it, CURRENT_API_VERSION)) }); let res = match thread { Ok(handle) => handle.join(), diff --git a/crates/proc-macro-srv/src/proc_macros.rs b/crates/proc-macro-srv/src/proc_macros.rs index 3c6f3203319..4f87fa281b7 100644 --- a/crates/proc-macro-srv/src/proc_macros.rs +++ b/crates/proc-macro-srv/src/proc_macros.rs @@ -1,7 +1,7 @@ //! Proc macro ABI use libloading::Library; -use proc_macro_api::{ProcMacroKind, RustCInfo}; +use proc_macro_api::{msg::TokenId, ProcMacroKind, RustCInfo}; use crate::{dylib::LoadProcMacroDylibError, server::SYMBOL_INTERNER, tt}; @@ -45,6 +45,9 @@ pub(crate) fn expand( macro_name: &str, macro_body: &tt::Subtree, attributes: Option<&tt::Subtree>, + def_site: TokenId, + call_site: TokenId, + mixed_site: TokenId, ) -> Result { let parsed_body = crate::server::TokenStream::with_subtree(macro_body.clone()); @@ -59,34 +62,56 @@ pub(crate) fn expand( } if *trait_name == macro_name => { let res = client.run( &proc_macro::bridge::server::SameThread, - crate::server::RustAnalyzer { interner: &SYMBOL_INTERNER }, + crate::server::RustAnalyzer { + interner: &SYMBOL_INTERNER, + call_site, + def_site, + mixed_site, + }, parsed_body, true, ); - return res.map(|it| it.into_subtree()).map_err(crate::PanicMessage::from); + return res + .map(|it| it.into_subtree(call_site)) + .map_err(crate::PanicMessage::from); } proc_macro::bridge::client::ProcMacro::Bang { name, client } if *name == macro_name => { let res = client.run( &proc_macro::bridge::server::SameThread, - crate::server::RustAnalyzer { interner: &SYMBOL_INTERNER }, + crate::server::RustAnalyzer { + interner: &SYMBOL_INTERNER, + call_site, + def_site, + mixed_site, + }, parsed_body, true, ); - return res.map(|it| it.into_subtree()).map_err(crate::PanicMessage::from); + return res + .map(|it| it.into_subtree(call_site)) + .map_err(crate::PanicMessage::from); } proc_macro::bridge::client::ProcMacro::Attr { name, client } if *name == macro_name => { let res = client.run( &proc_macro::bridge::server::SameThread, - crate::server::RustAnalyzer { interner: &SYMBOL_INTERNER }, + crate::server::RustAnalyzer { + interner: &SYMBOL_INTERNER, + + call_site, + def_site, + mixed_site, + }, parsed_attributes, parsed_body, true, ); - return res.map(|it| it.into_subtree()).map_err(crate::PanicMessage::from); + return res + .map(|it| it.into_subtree(call_site)) + .map_err(crate::PanicMessage::from); } _ => continue, } diff --git a/crates/proc-macro-srv/src/server.rs b/crates/proc-macro-srv/src/server.rs index fe18451d384..fc080eccc0f 100644 --- a/crates/proc-macro-srv/src/server.rs +++ b/crates/proc-macro-srv/src/server.rs @@ -11,6 +11,7 @@ use proc_macro::bridge::{self, server}; mod token_stream; +use proc_macro_api::msg::TokenId; pub use token_stream::TokenStream; use token_stream::TokenStreamBuilder; @@ -43,6 +44,9 @@ pub struct SourceFile { pub struct RustAnalyzer { // FIXME: store span information here. pub(crate) interner: SymbolInternerRef, + pub call_site: TokenId, + pub def_site: TokenId, + pub mixed_site: TokenId, } impl server::Types for RustAnalyzer { @@ -69,7 +73,7 @@ fn literal_from_str( kind: bridge::LitKind::Err, symbol: Symbol::intern(self.interner, s), suffix: None, - span: tt::TokenId::unspecified(), + span: self.call_site, }) } @@ -83,7 +87,7 @@ fn is_empty(&mut self, stream: &Self::TokenStream) -> bool { stream.is_empty() } fn from_str(&mut self, src: &str) -> Self::TokenStream { - src.parse().expect("cannot parse string") + Self::TokenStream::from_str(src, self.call_site).expect("cannot parse string") } fn to_string(&mut self, stream: &Self::TokenStream) -> String { stream.to_string() @@ -280,7 +284,7 @@ fn save_span(&mut self, _span: Self::Span) -> usize { } fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span { // FIXME stub - tt::TokenId::unspecified() + self.call_site } /// Recent feature, not yet in the proc_macro /// @@ -317,15 +321,15 @@ fn subspan( } fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span { // FIXME handle span - tt::TokenId::unspecified() + self.call_site } fn end(&mut self, _self_: Self::Span) -> Self::Span { - tt::TokenId::unspecified() + self.call_site } fn start(&mut self, _self_: Self::Span) -> Self::Span { - tt::TokenId::unspecified() + self.call_site } fn line(&mut self, _span: Self::Span) -> usize { @@ -349,9 +353,9 @@ fn normalize_and_validate_ident(&mut self, string: &str) -> Result bridge::ExpnGlobals { bridge::ExpnGlobals { - def_site: Span::unspecified(), - call_site: Span::unspecified(), - mixed_site: Span::unspecified(), + def_site: self.def_site, + call_site: self.call_site, + mixed_site: self.mixed_site, } } @@ -422,6 +426,8 @@ fn with_symbol_and_suffix( #[cfg(test)] mod tests { + use ::tt::Span; + use super::*; #[test] @@ -430,16 +436,16 @@ fn test_ra_server_to_string() { token_trees: vec![ tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: "struct".into(), - span: tt::TokenId::unspecified(), + span: tt::TokenId::DUMMY, })), tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: "T".into(), - span: tt::TokenId::unspecified(), + span: tt::TokenId::DUMMY, })), tt::TokenTree::Subtree(tt::Subtree { delimiter: tt::Delimiter { - open: tt::TokenId::unspecified(), - close: tt::TokenId::unspecified(), + open: tt::TokenId::DUMMY, + close: tt::TokenId::DUMMY, kind: tt::DelimiterKind::Brace, }, token_trees: vec![], @@ -452,33 +458,32 @@ fn test_ra_server_to_string() { #[test] fn test_ra_server_from_str() { - use std::str::FromStr; let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree { delimiter: tt::Delimiter { - open: tt::TokenId::unspecified(), - close: tt::TokenId::unspecified(), + open: tt::TokenId::DUMMY, + close: tt::TokenId::DUMMY, kind: tt::DelimiterKind::Parenthesis, }, token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: "a".into(), - span: tt::TokenId::unspecified(), + span: tt::TokenId::DUMMY, }))], }); - let t1 = TokenStream::from_str("(a)").unwrap(); + let t1 = TokenStream::from_str("(a)", tt::TokenId::DUMMY).unwrap(); assert_eq!(t1.token_trees.len(), 1); assert_eq!(t1.token_trees[0], subtree_paren_a); - let t2 = TokenStream::from_str("(a);").unwrap(); + let t2 = TokenStream::from_str("(a);", tt::TokenId::DUMMY).unwrap(); assert_eq!(t2.token_trees.len(), 2); assert_eq!(t2.token_trees[0], subtree_paren_a); - let underscore = TokenStream::from_str("_").unwrap(); + let underscore = TokenStream::from_str("_", tt::TokenId::DUMMY).unwrap(); assert_eq!( underscore.token_trees[0], tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { text: "_".into(), - span: tt::TokenId::unspecified(), + span: tt::TokenId::DUMMY, })) ); } diff --git a/crates/proc-macro-srv/src/server/token_stream.rs b/crates/proc-macro-srv/src/server/token_stream.rs index 2589d8b64d4..36be8825038 100644 --- a/crates/proc-macro-srv/src/server/token_stream.rs +++ b/crates/proc-macro-srv/src/server/token_stream.rs @@ -1,5 +1,7 @@ //! TokenStream implementation used by sysroot ABI +use proc_macro_api::msg::TokenId; + use crate::tt::{self, TokenTree}; #[derive(Debug, Default, Clone)] @@ -20,8 +22,15 @@ pub(crate) fn with_subtree(subtree: tt::Subtree) -> Self { } } - pub(crate) fn into_subtree(self) -> tt::Subtree { - tt::Subtree { delimiter: tt::Delimiter::UNSPECIFIED, token_trees: self.token_trees } + pub(crate) fn into_subtree(self, call_site: TokenId) -> tt::Subtree { + tt::Subtree { + delimiter: tt::Delimiter { + open: call_site, + close: call_site, + kind: tt::DelimiterKind::Invisible, + }, + token_trees: self.token_trees, + } } pub(super) fn is_empty(&self) -> bool { @@ -84,7 +93,7 @@ pub(super) struct TokenStreamBuilder { /// pub(super)lic implementation details for the `TokenStream` type, such as iterators. pub(super) mod token_stream { - use std::str::FromStr; + use proc_macro_api::msg::TokenId; use super::{tt, TokenStream, TokenTree}; @@ -109,14 +118,15 @@ fn into_iter(self) -> Self::IntoIter { /// /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to /// change these errors into `LexError`s later. - impl FromStr for TokenStream { - type Err = LexError; + #[rustfmt::skip] + impl /*FromStr for*/ TokenStream { + // type Err = LexError; - fn from_str(src: &str) -> Result { - let (subtree, _token_map) = - mbe::parse_to_token_tree(src).ok_or("Failed to parse from mbe")?; + pub(crate) fn from_str(src: &str, call_site: TokenId) -> Result { + let subtree = + mbe::parse_to_token_tree_static_span(call_site, src).ok_or("Failed to parse from mbe")?; - let subtree = subtree_replace_token_ids_with_unspecified(subtree); + let subtree = subtree_replace_token_ids_with_call_site(subtree,call_site); Ok(TokenStream::with_subtree(subtree)) } } @@ -127,43 +137,39 @@ fn to_string(&self) -> String { } } - fn subtree_replace_token_ids_with_unspecified(subtree: tt::Subtree) -> tt::Subtree { + fn subtree_replace_token_ids_with_call_site( + subtree: tt::Subtree, + call_site: TokenId, + ) -> tt::Subtree { tt::Subtree { - delimiter: tt::Delimiter { - open: tt::TokenId::UNSPECIFIED, - close: tt::TokenId::UNSPECIFIED, - ..subtree.delimiter - }, + delimiter: tt::Delimiter { open: call_site, close: call_site, ..subtree.delimiter }, token_trees: subtree .token_trees .into_iter() - .map(token_tree_replace_token_ids_with_unspecified) + .map(|it| token_tree_replace_token_ids_with_call_site(it, call_site)) .collect(), } } - fn token_tree_replace_token_ids_with_unspecified(tt: tt::TokenTree) -> tt::TokenTree { + fn token_tree_replace_token_ids_with_call_site( + tt: tt::TokenTree, + call_site: TokenId, + ) -> tt::TokenTree { match tt { tt::TokenTree::Leaf(leaf) => { - tt::TokenTree::Leaf(leaf_replace_token_ids_with_unspecified(leaf)) + tt::TokenTree::Leaf(leaf_replace_token_ids_with_call_site(leaf, call_site)) } tt::TokenTree::Subtree(subtree) => { - tt::TokenTree::Subtree(subtree_replace_token_ids_with_unspecified(subtree)) + tt::TokenTree::Subtree(subtree_replace_token_ids_with_call_site(subtree, call_site)) } } } - fn leaf_replace_token_ids_with_unspecified(leaf: tt::Leaf) -> tt::Leaf { + fn leaf_replace_token_ids_with_call_site(leaf: tt::Leaf, call_site: TokenId) -> tt::Leaf { match leaf { - tt::Leaf::Literal(lit) => { - tt::Leaf::Literal(tt::Literal { span: tt::TokenId::unspecified(), ..lit }) - } - tt::Leaf::Punct(punct) => { - tt::Leaf::Punct(tt::Punct { span: tt::TokenId::unspecified(), ..punct }) - } - tt::Leaf::Ident(ident) => { - tt::Leaf::Ident(tt::Ident { span: tt::TokenId::unspecified(), ..ident }) - } + tt::Leaf::Literal(lit) => tt::Leaf::Literal(tt::Literal { span: call_site, ..lit }), + tt::Leaf::Punct(punct) => tt::Leaf::Punct(tt::Punct { span: call_site, ..punct }), + tt::Leaf::Ident(ident) => tt::Leaf::Ident(tt::Ident { span: call_site, ..ident }), } } } diff --git a/crates/proc-macro-srv/src/tests/utils.rs b/crates/proc-macro-srv/src/tests/utils.rs index 49b4d973b63..ccfefafb2cf 100644 --- a/crates/proc-macro-srv/src/tests/utils.rs +++ b/crates/proc-macro-srv/src/tests/utils.rs @@ -1,18 +1,19 @@ //! utils used in proc-macro tests use expect_test::Expect; -use std::str::FromStr; +use proc_macro_api::msg::TokenId; +use tt::Span; use crate::{dylib, proc_macro_test_dylib_path, ProcMacroSrv}; -fn parse_string(code: &str) -> Option { +fn parse_string(code: &str, call_site: TokenId) -> Option { // This is a bit strange. We need to parse a string into a token stream into // order to create a tt::SubTree from it in fixtures. `into_subtree` is // implemented by all the ABIs we have so we arbitrarily choose one ABI to // write a `parse_string` function for and use that. The tests don't really // care which ABI we're using as the `into_subtree` function isn't part of // the ABI and shouldn't change between ABI versions. - crate::server::TokenStream::from_str(code).ok() + crate::server::TokenStream::from_str(code, call_site).ok() } pub fn assert_expand(macro_name: &str, ra_fixture: &str, expect: Expect) { @@ -24,12 +25,22 @@ pub fn assert_expand_attr(macro_name: &str, ra_fixture: &str, attr_args: &str, e } fn assert_expand_impl(macro_name: &str, input: &str, attr: Option<&str>, expect: Expect) { + let call_site = TokenId::DUMMY; let path = proc_macro_test_dylib_path(); let expander = dylib::Expander::new(&path).unwrap(); - let fixture = parse_string(input).unwrap(); - let attr = attr.map(|attr| parse_string(attr).unwrap().into_subtree()); + let fixture = parse_string(input, call_site).unwrap(); + let attr = attr.map(|attr| parse_string(attr, call_site).unwrap().into_subtree(call_site)); - let res = expander.expand(macro_name, &fixture.into_subtree(), attr.as_ref()).unwrap(); + let res = expander + .expand( + macro_name, + &fixture.into_subtree(call_site), + attr.as_ref(), + TokenId::DUMMY, + TokenId::DUMMY, + TokenId::DUMMY, + ) + .unwrap(); expect.assert_eq(&format!("{res:?}")); } diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs index a29e18811d8..a865d9e4ab8 100644 --- a/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -56,7 +56,7 @@ fn integrated_highlighting_benchmark() { analysis.highlight_as_html(file_id, false).unwrap(); } - profile::init_from("*>1"); + profile::init_from("*>100"); { let _it = stdx::timeit("change");