From a892237ed4bdfcb92531292f52a449d963c8cd0f Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 11 Dec 2023 12:16:12 +0100 Subject: [PATCH] Add rust-analyzer-span server feature equivalent to the ID server --- Cargo.lock | 2 + crates/proc-macro-api/src/lib.rs | 14 +- crates/proc-macro-api/src/msg.rs | 24 +- crates/proc-macro-api/src/msg/flat.rs | 41 +- crates/proc-macro-api/src/process.rs | 34 +- crates/proc-macro-srv-cli/src/main.rs | 9 +- crates/proc-macro-srv/Cargo.toml | 2 + crates/proc-macro-srv/src/dylib.rs | 22 +- crates/proc-macro-srv/src/lib.rs | 187 +++++--- crates/proc-macro-srv/src/proc_macros.rs | 46 +- crates/proc-macro-srv/src/server.rs | 399 +----------------- .../src/server/rust_analyzer_span.rs | 327 ++++++++++++++ crates/proc-macro-srv/src/server/token_id.rs | 375 ++++++++++++++++ .../proc-macro-srv/src/server/token_stream.rs | 99 ++--- crates/proc-macro-srv/src/tests/mod.rs | 58 ++- crates/proc-macro-srv/src/tests/utils.rs | 70 ++- crates/span/src/lib.rs | 8 + 17 files changed, 1159 insertions(+), 558 deletions(-) create mode 100644 crates/proc-macro-srv/src/server/rust_analyzer_span.rs create mode 100644 crates/proc-macro-srv/src/server/token_id.rs diff --git a/Cargo.lock b/Cargo.lock index 2cb26d0c597..aeb7444aa30 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1277,6 +1277,7 @@ dependencies = [ name = "proc-macro-srv" version = "0.0.0" dependencies = [ + "base-db", "expect-test", "libloading", "mbe", @@ -1285,6 +1286,7 @@ dependencies = [ "paths", "proc-macro-api", "proc-macro-test", + "span", "stdx", "tt", ] diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index a5d4cfeb275..be68f6eff66 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -20,7 +20,10 @@ use serde::{Deserialize, Serialize}; use crate::{ - msg::{ExpandMacro, ExpnGlobals, FlatTree, PanicMessage, HAS_GLOBAL_SPANS}, + msg::{ + flat::serialize_span_data_index_map, ExpandMacro, ExpnGlobals, FlatTree, PanicMessage, + HAS_GLOBAL_SPANS, RUST_ANALYZER_SPAN_SUPPORT, + }, process::ProcMacroProcessSrv, }; @@ -166,6 +169,11 @@ pub fn expand( call_site, mixed_site, }, + span_data_table: if version >= RUST_ANALYZER_SPAN_SUPPORT { + serialize_span_data_index_map(&span_data_table) + } else { + Vec::new() + }, }; let response = self @@ -178,9 +186,7 @@ pub fn expand( msg::Response::ExpandMacro(it) => { Ok(it.map(|tree| FlatTree::to_subtree_resolved(tree, version, &span_data_table))) } - msg::Response::ListMacros(..) | msg::Response::ApiVersionCheck(..) => { - Err(ServerError { message: "unexpected response".to_string(), io: None }) - } + _ => Err(ServerError { message: "unexpected response".to_string(), io: None }), } } } diff --git a/crates/proc-macro-api/src/msg.rs b/crates/proc-macro-api/src/msg.rs index 18fd9ed7284..7ef6d5b0556 100644 --- a/crates/proc-macro-api/src/msg.rs +++ b/crates/proc-macro-api/src/msg.rs @@ -10,28 +10,42 @@ use crate::ProcMacroKind; -pub use crate::msg::flat::{FlatTree, TokenId}; +pub use crate::msg::flat::{ + deserialize_span_data_index_map, serialize_span_data_index_map, FlatTree, SpanDataIndexMap, + TokenId, +}; // The versions of the server protocol pub const NO_VERSION_CHECK_VERSION: u32 = 0; pub const VERSION_CHECK_VERSION: u32 = 1; pub const ENCODE_CLOSE_SPAN_VERSION: u32 = 2; pub const HAS_GLOBAL_SPANS: u32 = 3; +pub const RUST_ANALYZER_SPAN_SUPPORT: u32 = 4; -pub const CURRENT_API_VERSION: u32 = HAS_GLOBAL_SPANS; +pub const CURRENT_API_VERSION: u32 = RUST_ANALYZER_SPAN_SUPPORT; #[derive(Debug, Serialize, Deserialize)] pub enum Request { ListMacros { dylib_path: PathBuf }, ExpandMacro(ExpandMacro), + SetSpanMode(SpanMode), ApiVersionCheck {}, } +#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize)] +pub enum SpanMode { + #[default] + Id, + RustAnalyzer, +} + #[derive(Debug, Serialize, Deserialize)] pub enum Response { ListMacros(Result, String>), ExpandMacro(Result), + ExpandMacroSpans(Result<(FlatTree, Vec), PanicMessage>), ApiVersionCheck(u32), + SetSpanMode(SpanMode), } #[derive(Debug, Serialize, Deserialize)] @@ -64,9 +78,12 @@ pub struct ExpandMacro { #[serde(skip_serializing_if = "ExpnGlobals::skip_serializing_if")] #[serde(default)] pub has_global_spans: ExpnGlobals, + #[serde(skip_serializing_if = "Vec::is_empty")] + #[serde(default)] + pub span_data_table: Vec, } -#[derive(Default, Debug, Serialize, Deserialize)] +#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize)] pub struct ExpnGlobals { #[serde(skip_serializing)] #[serde(default)] @@ -241,6 +258,7 @@ fn test_proc_macro_rpc_works() { call_site: 0, mixed_site: 0, }, + span_data_table: Vec::new(), }; let json = serde_json::to_string(&task).unwrap(); diff --git a/crates/proc-macro-api/src/msg/flat.rs b/crates/proc-macro-api/src/msg/flat.rs index a12581ac138..8dfaba52625 100644 --- a/crates/proc-macro-api/src/msg/flat.rs +++ b/crates/proc-macro-api/src/msg/flat.rs @@ -38,12 +38,45 @@ use std::collections::{HashMap, VecDeque}; use indexmap::IndexSet; +use la_arena::RawIdx; use serde::{Deserialize, Serialize}; -use span::Span; +use span::{ErasedFileAstId, FileId, Span, SpanAnchor, SyntaxContextId}; +use text_size::TextRange; use crate::msg::ENCODE_CLOSE_SPAN_VERSION; -type SpanIndexMap = IndexSet; +pub type SpanDataIndexMap = IndexSet; + +pub fn serialize_span_data_index_map(map: &SpanDataIndexMap) -> Vec { + map.iter() + .flat_map(|span| { + [ + span.anchor.file_id.index(), + span.anchor.ast_id.into_raw().into_u32(), + span.range.start().into(), + span.range.end().into(), + span.ctx.into_u32(), + ] + }) + .collect() +} + +pub fn deserialize_span_data_index_map(map: &[u32]) -> SpanDataIndexMap { + debug_assert!(map.len() % 5 == 0); + map.chunks_exact(5) + .map(|span| { + let &[file_id, ast_id, start, end, e] = span else { unreachable!() }; + Span { + anchor: SpanAnchor { + file_id: FileId::from_raw(file_id), + ast_id: ErasedFileAstId::from_raw(RawIdx::from_u32(ast_id)), + }, + range: TextRange::new(start.into(), end.into()), + ctx: SyntaxContextId::from_u32(e), + } + }) + .collect() +} #[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct TokenId(pub u32); @@ -93,7 +126,7 @@ impl FlatTree { pub fn new( subtree: &tt::Subtree, version: u32, - span_data_table: &mut SpanIndexMap, + span_data_table: &mut SpanDataIndexMap, ) -> FlatTree { let mut w = Writer { string_table: HashMap::new(), @@ -155,7 +188,7 @@ pub fn new_raw(subtree: &tt::Subtree, version: u32) -> FlatTree { pub fn to_subtree_resolved( self, version: u32, - span_data_table: &SpanIndexMap, + span_data_table: &SpanDataIndexMap, ) -> tt::Subtree { Reader { subtree: if version >= ENCODE_CLOSE_SPAN_VERSION { diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs index 9a20fa63ed7..ed55ca5bd53 100644 --- a/crates/proc-macro-api/src/process.rs +++ b/crates/proc-macro-api/src/process.rs @@ -9,7 +9,7 @@ use stdx::JodChild; use crate::{ - msg::{Message, Request, Response, CURRENT_API_VERSION}, + msg::{Message, Request, Response, SpanMode, CURRENT_API_VERSION, RUST_ANALYZER_SPAN_SUPPORT}, ProcMacroKind, ServerError, }; @@ -19,6 +19,7 @@ pub(crate) struct ProcMacroProcessSrv { stdin: ChildStdin, stdout: BufReader, version: u32, + mode: SpanMode, } impl ProcMacroProcessSrv { @@ -27,7 +28,13 @@ pub(crate) fn run(process_path: AbsPathBuf) -> io::Result { let mut process = Process::run(process_path.clone(), null_stderr)?; let (stdin, stdout) = process.stdio().expect("couldn't access child stdio"); - io::Result::Ok(ProcMacroProcessSrv { _process: process, stdin, stdout, version: 0 }) + io::Result::Ok(ProcMacroProcessSrv { + _process: process, + stdin, + stdout, + version: 0, + mode: SpanMode::Id, + }) }; let mut srv = create_srv(true)?; tracing::info!("sending version check"); @@ -43,6 +50,11 @@ pub(crate) fn run(process_path: AbsPathBuf) -> io::Result { tracing::info!("got version {v}"); srv = create_srv(false)?; srv.version = v; + if srv.version > RUST_ANALYZER_SPAN_SUPPORT { + if let Ok(mode) = srv.enable_rust_analyzer_spans() { + srv.mode = mode; + } + } Ok(srv) } Err(e) => { @@ -62,9 +74,17 @@ pub(crate) fn version_check(&mut self) -> Result { match response { Response::ApiVersionCheck(version) => Ok(version), - Response::ExpandMacro { .. } | Response::ListMacros { .. } => { - Err(ServerError { message: "unexpected response".to_string(), io: None }) - } + _ => Err(ServerError { message: "unexpected response".to_string(), io: None }), + } + } + + fn enable_rust_analyzer_spans(&mut self) -> Result { + let request = Request::SetSpanMode(crate::msg::SpanMode::RustAnalyzer); + let response = self.send_task(request)?; + + match response { + Response::SetSpanMode(span_mode) => Ok(span_mode), + _ => Err(ServerError { message: "unexpected response".to_string(), io: None }), } } @@ -78,9 +98,7 @@ pub(crate) fn find_proc_macros( match response { Response::ListMacros(it) => Ok(it), - Response::ExpandMacro { .. } | Response::ApiVersionCheck { .. } => { - Err(ServerError { message: "unexpected response".to_string(), io: None }) - } + _ => Err(ServerError { message: "unexpected response".to_string(), io: None }), } } diff --git a/crates/proc-macro-srv-cli/src/main.rs b/crates/proc-macro-srv-cli/src/main.rs index 50ce586fc42..f4e88ed437e 100644 --- a/crates/proc-macro-srv-cli/src/main.rs +++ b/crates/proc-macro-srv-cli/src/main.rs @@ -39,10 +39,17 @@ fn run() -> io::Result<()> { msg::Request::ListMacros { dylib_path } => { msg::Response::ListMacros(srv.list_macros(&dylib_path)) } - msg::Request::ExpandMacro(task) => msg::Response::ExpandMacro(srv.expand(task)), + msg::Request::ExpandMacro(task) => match srv.span_mode() { + msg::SpanMode::Id => msg::Response::ExpandMacro(srv.expand(task).map(|(it, _)| it)), + msg::SpanMode::RustAnalyzer => msg::Response::ExpandMacroSpans(srv.expand(task)), + }, msg::Request::ApiVersionCheck {} => { msg::Response::ApiVersionCheck(proc_macro_api::msg::CURRENT_API_VERSION) } + msg::Request::SetSpanMode(span_mode) => { + srv.set_span_mode(span_mode); + msg::Response::SetSpanMode(span_mode) + } }; write_response(res)? } diff --git a/crates/proc-macro-srv/Cargo.toml b/crates/proc-macro-srv/Cargo.toml index 99993f16e27..3bcc6e12cb1 100644 --- a/crates/proc-macro-srv/Cargo.toml +++ b/crates/proc-macro-srv/Cargo.toml @@ -26,6 +26,8 @@ stdx.workspace = true tt.workspace = true mbe.workspace = true paths.workspace = true +base-db.workspace = true +span.workspace = true proc-macro-api.workspace = true [dev-dependencies] diff --git a/crates/proc-macro-srv/src/dylib.rs b/crates/proc-macro-srv/src/dylib.rs index f20e6832f6e..52b4cced5f5 100644 --- a/crates/proc-macro-srv/src/dylib.rs +++ b/crates/proc-macro-srv/src/dylib.rs @@ -11,7 +11,10 @@ use memmap2::Mmap; use object::Object; use paths::AbsPath; -use proc_macro_api::{msg::TokenId, read_dylib_info, ProcMacroKind}; +use proc_macro::bridge; +use proc_macro_api::{read_dylib_info, ProcMacroKind}; + +use crate::ProcMacroSrvSpan; const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_"; @@ -147,15 +150,18 @@ pub fn new(lib: &Path) -> Result { Ok(Expander { inner: library }) } - pub fn expand( + pub fn expand( &self, macro_name: &str, - macro_body: &crate::tt::Subtree, - attributes: Option<&crate::tt::Subtree>, - def_site: TokenId, - call_site: TokenId, - mixed_site: TokenId, - ) -> Result { + macro_body: tt::Subtree, + attributes: Option>, + def_site: S, + call_site: S, + mixed_site: S, + ) -> Result, String> + where + ::TokenStream: Default, + { let result = self .inner .proc_macros diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs index 56529f71d85..3f62ded24cf 100644 --- a/crates/proc-macro-srv/src/lib.rs +++ b/crates/proc-macro-srv/src/lib.rs @@ -32,36 +32,60 @@ }; use proc_macro_api::{ - msg::{self, ExpnGlobals, TokenId, CURRENT_API_VERSION}, + msg::{ + self, deserialize_span_data_index_map, serialize_span_data_index_map, ExpnGlobals, + SpanDataIndexMap, SpanMode, TokenId, CURRENT_API_VERSION, + }, ProcMacroKind, }; +use span::Span; -mod tt { - pub use proc_macro_api::msg::TokenId; - - pub use ::tt::*; - - pub type Subtree = ::tt::Subtree; - pub type TokenTree = ::tt::TokenTree; - pub type Delimiter = ::tt::Delimiter; - pub type Leaf = ::tt::Leaf; - pub type Literal = ::tt::Literal; - pub type Punct = ::tt::Punct; - pub type Ident = ::tt::Ident; -} +use crate::server::TokenStream; // see `build.rs` include!(concat!(env!("OUT_DIR"), "/rustc_version.rs")); +trait ProcMacroSrvSpan: tt::Span { + type Server: proc_macro::bridge::server::Server>; + fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server; +} + +impl ProcMacroSrvSpan for TokenId { + type Server = server::token_id::TokenIdServer; + + fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server { + Self::Server { interner: &server::SYMBOL_INTERNER, call_site, def_site, mixed_site } + } +} +impl ProcMacroSrvSpan for Span { + type Server = server::rust_analyzer_span::RaSpanServer; + fn make_server(call_site: Self, def_site: Self, mixed_site: Self) -> Self::Server { + Self::Server { interner: &server::SYMBOL_INTERNER, call_site, def_site, mixed_site } + } +} + #[derive(Default)] pub struct ProcMacroSrv { expanders: HashMap<(PathBuf, SystemTime), dylib::Expander>, + span_mode: SpanMode, } const EXPANDER_STACK_SIZE: usize = 8 * 1024 * 1024; impl ProcMacroSrv { - pub fn expand(&mut self, task: msg::ExpandMacro) -> Result { + pub fn set_span_mode(&mut self, span_mode: SpanMode) { + self.span_mode = span_mode; + } + + pub fn span_mode(&self) -> SpanMode { + self.span_mode + } + + pub fn expand( + &mut self, + task: msg::ExpandMacro, + ) -> Result<(msg::FlatTree, Vec), msg::PanicMessage> { + let span_mode = self.span_mode; let expander = self.expander(task.lib.as_ref()).map_err(|err| { debug_assert!(false, "should list macros before asking to expand"); msg::PanicMessage(format!("failed to load macro: {err}")) @@ -71,10 +95,10 @@ pub fn expand(&mut self, task: msg::ExpandMacro) -> Result { let prev_working_dir = std::env::current_dir().ok(); - if let Err(err) = std::env::set_current_dir(&dir) { + if let Err(err) = std::env::set_current_dir(dir) { eprintln!("Failed to set the current working dir to {dir}. Error: {err:?}") } prev_working_dir @@ -83,38 +107,15 @@ pub fn expand(&mut self, task: msg::ExpandMacro) -> Result handle.join(), - Err(e) => std::panic::resume_unwind(Box::new(e)), - }; - - match res { - Ok(res) => res, - Err(e) => std::panic::resume_unwind(e), + let result = match span_mode { + SpanMode::Id => { + expand_id(task, expander, def_site, call_site, mixed_site).map(|it| (it, vec![])) } - }); + SpanMode::RustAnalyzer => { + expand_ra_span(task, expander, def_site, call_site, mixed_site) + } + }; prev_env.rollback(); @@ -155,6 +156,98 @@ fn expander(&mut self, path: &Path) -> Result<&dylib::Expander, String> { } } +fn expand_id( + task: msg::ExpandMacro, + expander: &dylib::Expander, + def_site: usize, + call_site: usize, + mixed_site: usize, +) -> Result { + let def_site = TokenId(def_site as u32); + let call_site = TokenId(call_site as u32); + let mixed_site = TokenId(mixed_site as u32); + + let macro_body = task.macro_body.to_subtree_unresolved(CURRENT_API_VERSION); + let attributes = task.attributes.map(|it| it.to_subtree_unresolved(CURRENT_API_VERSION)); + let result = thread::scope(|s| { + let thread = thread::Builder::new() + .stack_size(EXPANDER_STACK_SIZE) + .name(task.macro_name.clone()) + .spawn_scoped(s, || { + expander + .expand( + &task.macro_name, + macro_body, + attributes, + def_site, + call_site, + mixed_site, + ) + .map(|it| msg::FlatTree::new_raw(&it, CURRENT_API_VERSION)) + }); + let res = match thread { + Ok(handle) => handle.join(), + Err(e) => std::panic::resume_unwind(Box::new(e)), + }; + + match res { + Ok(res) => res, + Err(e) => std::panic::resume_unwind(e), + } + }); + result +} + +fn expand_ra_span( + task: msg::ExpandMacro, + expander: &dylib::Expander, + def_site: usize, + call_site: usize, + mixed_site: usize, +) -> Result<(msg::FlatTree, Vec), String> { + let mut span_data_table = deserialize_span_data_index_map(&task.span_data_table); + + let def_site = span_data_table[def_site]; + let call_site = span_data_table[call_site]; + let mixed_site = span_data_table[mixed_site]; + + let macro_body = task.macro_body.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table); + let attributes = + task.attributes.map(|it| it.to_subtree_resolved(CURRENT_API_VERSION, &span_data_table)); + let result = thread::scope(|s| { + let thread = thread::Builder::new() + .stack_size(EXPANDER_STACK_SIZE) + .name(task.macro_name.clone()) + .spawn_scoped(s, || { + expander + .expand( + &task.macro_name, + macro_body, + attributes, + def_site, + call_site, + mixed_site, + ) + .map(|it| { + ( + msg::FlatTree::new(&it, CURRENT_API_VERSION, &mut span_data_table), + serialize_span_data_index_map(&span_data_table), + ) + }) + }); + let res = match thread { + Ok(handle) => handle.join(), + Err(e) => std::panic::resume_unwind(Box::new(e)), + }; + + match res { + Ok(res) => res, + Err(e) => std::panic::resume_unwind(e), + } + }); + result +} + pub struct PanicMessage { message: Option, } diff --git a/crates/proc-macro-srv/src/proc_macros.rs b/crates/proc-macro-srv/src/proc_macros.rs index 716b85d096d..a75c0e619bc 100644 --- a/crates/proc-macro-srv/src/proc_macros.rs +++ b/crates/proc-macro-srv/src/proc_macros.rs @@ -2,9 +2,9 @@ use libloading::Library; use proc_macro::bridge; -use proc_macro_api::{msg::TokenId, ProcMacroKind, RustCInfo}; +use proc_macro_api::{ProcMacroKind, RustCInfo}; -use crate::{dylib::LoadProcMacroDylibError, server::SYMBOL_INTERNER, tt}; +use crate::{dylib::LoadProcMacroDylibError, ProcMacroSrvSpan}; pub(crate) struct ProcMacros { exported_macros: Vec, @@ -40,19 +40,19 @@ pub(crate) fn from_lib( Err(LoadProcMacroDylibError::AbiMismatch(info.version_string)) } - pub(crate) fn expand( + pub(crate) fn expand( &self, macro_name: &str, - macro_body: &tt::Subtree, - attributes: Option<&tt::Subtree>, - def_site: TokenId, - call_site: TokenId, - mixed_site: TokenId, - ) -> Result { - let parsed_body = crate::server::TokenStream::with_subtree(macro_body.clone()); + macro_body: tt::Subtree, + attributes: Option>, + def_site: S, + call_site: S, + mixed_site: S, + ) -> Result, crate::PanicMessage> { + let parsed_body = crate::server::TokenStream::with_subtree(macro_body); - let parsed_attributes = attributes.map_or(crate::server::TokenStream::new(), |attr| { - crate::server::TokenStream::with_subtree(attr.clone()) + let parsed_attributes = attributes.map_or_else(crate::server::TokenStream::new, |attr| { + crate::server::TokenStream::with_subtree(attr) }); for proc_macro in &self.exported_macros { @@ -62,12 +62,7 @@ pub(crate) fn expand( { let res = client.run( &bridge::server::SameThread, - crate::server::RustAnalyzer { - interner: &SYMBOL_INTERNER, - call_site, - def_site, - mixed_site, - }, + S::make_server(call_site, def_site, mixed_site), parsed_body, false, ); @@ -78,12 +73,7 @@ pub(crate) fn expand( bridge::client::ProcMacro::Bang { name, client } if *name == macro_name => { let res = client.run( &bridge::server::SameThread, - crate::server::RustAnalyzer { - interner: &SYMBOL_INTERNER, - call_site, - def_site, - mixed_site, - }, + S::make_server(call_site, def_site, mixed_site), parsed_body, false, ); @@ -94,13 +84,7 @@ pub(crate) fn expand( bridge::client::ProcMacro::Attr { name, client } if *name == macro_name => { let res = client.run( &bridge::server::SameThread, - crate::server::RustAnalyzer { - interner: &SYMBOL_INTERNER, - - call_site, - def_site, - mixed_site, - }, + S::make_server(call_site, def_site, mixed_site), parsed_attributes, parsed_body, false, diff --git a/crates/proc-macro-srv/src/server.rs b/crates/proc-macro-srv/src/server.rs index 917d8a6e26a..1854322ddb5 100644 --- a/crates/proc-macro-srv/src/server.rs +++ b/crates/proc-macro-srv/src/server.rs @@ -8,226 +8,18 @@ //! //! FIXME: No span and source file information is implemented yet -use proc_macro::bridge::{self, server}; +use proc_macro::bridge; mod token_stream; -use proc_macro_api::msg::TokenId; pub use token_stream::TokenStream; -use token_stream::TokenStreamBuilder; +pub mod token_id; +pub mod rust_analyzer_span; mod symbol; pub use symbol::*; +use tt::Spacing; -use std::{ - iter, - ops::{Bound, Range}, -}; - -use crate::tt; - -type Group = tt::Subtree; -type TokenTree = tt::TokenTree; -#[allow(unused)] -type Punct = tt::Punct; -type Spacing = tt::Spacing; -#[allow(unused)] -type Literal = tt::Literal; -type Span = tt::TokenId; - -#[derive(Clone)] -pub struct SourceFile { - // FIXME stub -} - -pub struct FreeFunctions; - -pub struct RustAnalyzer { - // FIXME: store span information here. - pub(crate) interner: SymbolInternerRef, - pub call_site: TokenId, - pub def_site: TokenId, - pub mixed_site: TokenId, -} - -impl server::Types for RustAnalyzer { - type FreeFunctions = FreeFunctions; - type TokenStream = TokenStream; - type SourceFile = SourceFile; - type Span = Span; - type Symbol = Symbol; -} - -impl server::FreeFunctions for RustAnalyzer { - fn injected_env_var(&mut self, _var: &str) -> Option { - None - } - - fn track_env_var(&mut self, _var: &str, _value: Option<&str>) { - // FIXME: track env var accesses - // https://github.com/rust-lang/rust/pull/71858 - } - fn track_path(&mut self, _path: &str) {} - - fn literal_from_str( - &mut self, - s: &str, - ) -> Result, ()> { - // FIXME: keep track of LitKind and Suffix - Ok(bridge::Literal { - kind: bridge::LitKind::Err, - symbol: Symbol::intern(self.interner, s), - suffix: None, - span: self.call_site, - }) - } - - fn emit_diagnostic(&mut self, _: bridge::Diagnostic) { - // FIXME handle diagnostic - } -} - -impl server::TokenStream for RustAnalyzer { - fn is_empty(&mut self, stream: &Self::TokenStream) -> bool { - stream.is_empty() - } - fn from_str(&mut self, src: &str) -> Self::TokenStream { - Self::TokenStream::from_str(src, self.call_site).expect("cannot parse string") - } - fn to_string(&mut self, stream: &Self::TokenStream) -> String { - stream.to_string() - } - fn from_token_tree( - &mut self, - tree: bridge::TokenTree, - ) -> Self::TokenStream { - match tree { - bridge::TokenTree::Group(group) => { - let group = Group { - delimiter: delim_to_internal(group.delimiter, group.span), - token_trees: match group.stream { - Some(stream) => stream.into_iter().collect(), - None => Vec::new(), - }, - }; - let tree = TokenTree::from(group); - Self::TokenStream::from_iter(iter::once(tree)) - } - - bridge::TokenTree::Ident(ident) => { - let text = ident.sym.text(self.interner); - let text = - if ident.is_raw { ::tt::SmolStr::from_iter(["r#", &text]) } else { text }; - let ident: tt::Ident = tt::Ident { text, span: ident.span }; - let leaf = tt::Leaf::from(ident); - let tree = TokenTree::from(leaf); - Self::TokenStream::from_iter(iter::once(tree)) - } - - bridge::TokenTree::Literal(literal) => { - let literal = LiteralFormatter(literal); - let text = literal.with_stringify_parts(self.interner, |parts| { - ::tt::SmolStr::from_iter(parts.iter().copied()) - }); - - let literal = tt::Literal { text, span: literal.0.span }; - let leaf = tt::Leaf::from(literal); - let tree = TokenTree::from(leaf); - Self::TokenStream::from_iter(iter::once(tree)) - } - - bridge::TokenTree::Punct(p) => { - let punct = tt::Punct { - char: p.ch as char, - spacing: if p.joint { Spacing::Joint } else { Spacing::Alone }, - span: p.span, - }; - let leaf = tt::Leaf::from(punct); - let tree = TokenTree::from(leaf); - Self::TokenStream::from_iter(iter::once(tree)) - } - } - } - - fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result { - Ok(self_.clone()) - } - - fn concat_trees( - &mut self, - base: Option, - trees: Vec>, - ) -> Self::TokenStream { - let mut builder = TokenStreamBuilder::new(); - if let Some(base) = base { - builder.push(base); - } - for tree in trees { - builder.push(self.from_token_tree(tree)); - } - builder.build() - } - - fn concat_streams( - &mut self, - base: Option, - streams: Vec, - ) -> Self::TokenStream { - let mut builder = TokenStreamBuilder::new(); - if let Some(base) = base { - builder.push(base); - } - for stream in streams { - builder.push(stream); - } - builder.build() - } - - fn into_trees( - &mut self, - stream: Self::TokenStream, - ) -> Vec> { - stream - .into_iter() - .map(|tree| match tree { - tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { - bridge::TokenTree::Ident(bridge::Ident { - sym: Symbol::intern(self.interner, ident.text.trim_start_matches("r#")), - is_raw: ident.text.starts_with("r#"), - span: ident.span, - }) - } - tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { - bridge::TokenTree::Literal(bridge::Literal { - // FIXME: handle literal kinds - kind: bridge::LitKind::Err, - symbol: Symbol::intern(self.interner, &lit.text), - // FIXME: handle suffixes - suffix: None, - span: lit.span, - }) - } - tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => { - bridge::TokenTree::Punct(bridge::Punct { - ch: punct.char as u8, - joint: punct.spacing == Spacing::Joint, - span: punct.span, - }) - } - tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group { - delimiter: delim_to_external(subtree.delimiter), - stream: if subtree.token_trees.is_empty() { - None - } else { - Some(subtree.token_trees.into_iter().collect()) - }, - span: bridge::DelimSpan::from_single(subtree.delimiter.open), - }), - }) - .collect() - } -} - -fn delim_to_internal(d: proc_macro::Delimiter, span: bridge::DelimSpan) -> tt::Delimiter { +fn delim_to_internal(d: proc_macro::Delimiter, span: bridge::DelimSpan) -> tt::Delimiter { let kind = match d { proc_macro::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis, proc_macro::Delimiter::Brace => tt::DelimiterKind::Brace, @@ -237,7 +29,7 @@ fn delim_to_internal(d: proc_macro::Delimiter, span: bridge::DelimSpan) -> tt::Delimiter { open: span.open, close: span.close, kind } } -fn delim_to_external(d: tt::Delimiter) -> proc_macro::Delimiter { +fn delim_to_external(d: tt::Delimiter) -> proc_macro::Delimiter { match d.kind { tt::DelimiterKind::Parenthesis => proc_macro::Delimiter::Parenthesis, tt::DelimiterKind::Brace => proc_macro::Delimiter::Brace, @@ -262,121 +54,9 @@ fn spacing_to_external(spacing: Spacing) -> proc_macro::Spacing { } } -impl server::SourceFile for RustAnalyzer { - // FIXME these are all stubs - fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool { - true - } - fn path(&mut self, _file: &Self::SourceFile) -> String { - String::new() - } - fn is_real(&mut self, _file: &Self::SourceFile) -> bool { - true - } -} +struct LiteralFormatter(bridge::Literal); -impl server::Span for RustAnalyzer { - fn debug(&mut self, span: Self::Span) -> String { - format!("{:?}", span.0) - } - fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile { - SourceFile {} - } - fn save_span(&mut self, _span: Self::Span) -> usize { - // FIXME stub - 0 - } - fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span { - // FIXME stub - self.call_site - } - /// Recent feature, not yet in the proc_macro - /// - /// See PR: - /// https://github.com/rust-lang/rust/pull/55780 - fn source_text(&mut self, _span: Self::Span) -> Option { - None - } - - fn parent(&mut self, _span: Self::Span) -> Option { - // FIXME handle span - None - } - fn source(&mut self, span: Self::Span) -> Self::Span { - // FIXME handle span - span - } - fn byte_range(&mut self, _span: Self::Span) -> Range { - // FIXME handle span - Range { start: 0, end: 0 } - } - fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option { - // Just return the first span again, because some macros will unwrap the result. - Some(first) - } - fn subspan( - &mut self, - span: Self::Span, - _start: Bound, - _end: Bound, - ) -> Option { - // Just return the span again, because some macros will unwrap the result. - Some(span) - } - fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span { - // FIXME handle span - self.call_site - } - - fn end(&mut self, _self_: Self::Span) -> Self::Span { - self.call_site - } - - fn start(&mut self, _self_: Self::Span) -> Self::Span { - self.call_site - } - - fn line(&mut self, _span: Self::Span) -> usize { - // FIXME handle line - 0 - } - - fn column(&mut self, _span: Self::Span) -> usize { - // FIXME handle column - 0 - } -} - -impl server::Symbol for RustAnalyzer { - fn normalize_and_validate_ident(&mut self, string: &str) -> Result { - // FIXME: nfc-normalize and validate idents - Ok(::intern_symbol(string)) - } -} - -impl server::Server for RustAnalyzer { - fn globals(&mut self) -> bridge::ExpnGlobals { - bridge::ExpnGlobals { - def_site: self.def_site, - call_site: self.call_site, - mixed_site: self.mixed_site, - } - } - - fn intern_symbol(ident: &str) -> Self::Symbol { - // FIXME: should be `self.interner` once the proc-macro api allows it. - Symbol::intern(&SYMBOL_INTERNER, &::tt::SmolStr::from(ident)) - } - - fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) { - // FIXME: should be `self.interner` once the proc-macro api allows it. - f(symbol.text(&SYMBOL_INTERNER).as_str()) - } -} - -struct LiteralFormatter(bridge::Literal); - -impl LiteralFormatter { +impl LiteralFormatter { /// Invokes the callback with a `&[&str]` consisting of each part of the /// literal's representation. This is done to allow the `ToString` and /// `Display` implementations to borrow references to symbol values, and @@ -427,66 +107,3 @@ fn with_symbol_and_suffix( f(symbol.as_str(), suffix.as_str()) } } - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_ra_server_to_string() { - let s = TokenStream { - token_trees: vec![ - tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: "struct".into(), - span: tt::TokenId(0), - })), - tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: "T".into(), - span: tt::TokenId(0), - })), - tt::TokenTree::Subtree(tt::Subtree { - delimiter: tt::Delimiter { - open: tt::TokenId(0), - close: tt::TokenId(0), - kind: tt::DelimiterKind::Brace, - }, - token_trees: vec![], - }), - ], - }; - - assert_eq!(s.to_string(), "struct T {}"); - } - - #[test] - fn test_ra_server_from_str() { - let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree { - delimiter: tt::Delimiter { - open: tt::TokenId(0), - close: tt::TokenId(0), - kind: tt::DelimiterKind::Parenthesis, - }, - token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: "a".into(), - span: tt::TokenId(0), - }))], - }); - - let t1 = TokenStream::from_str("(a)", tt::TokenId(0)).unwrap(); - assert_eq!(t1.token_trees.len(), 1); - assert_eq!(t1.token_trees[0], subtree_paren_a); - - let t2 = TokenStream::from_str("(a);", tt::TokenId(0)).unwrap(); - assert_eq!(t2.token_trees.len(), 2); - assert_eq!(t2.token_trees[0], subtree_paren_a); - - let underscore = TokenStream::from_str("_", tt::TokenId(0)).unwrap(); - assert_eq!( - underscore.token_trees[0], - tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: "_".into(), - span: tt::TokenId(0), - })) - ); - } -} diff --git a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs new file mode 100644 index 00000000000..92e1ec0a948 --- /dev/null +++ b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs @@ -0,0 +1,327 @@ +use std::{ + iter, + ops::{Bound, Range}, +}; + +use proc_macro::bridge::{self, server}; +use span::Span; + +use crate::server::{ + delim_to_external, delim_to_internal, token_stream::TokenStreamBuilder, LiteralFormatter, + Symbol, SymbolInternerRef, SYMBOL_INTERNER, +}; +mod tt { + pub use proc_macro_api::msg::TokenId; + + pub use ::tt::*; + + pub type Subtree = ::tt::Subtree; + pub type TokenTree = ::tt::TokenTree; + pub type Leaf = ::tt::Leaf; + pub type Literal = ::tt::Literal; + pub type Punct = ::tt::Punct; + pub type Ident = ::tt::Ident; +} +type Group = tt::Subtree; +type TokenTree = tt::TokenTree; +type Punct = tt::Punct; +type Spacing = tt::Spacing; +type Literal = tt::Literal; +type TokenStream = crate::server::TokenStream; + +#[derive(Clone)] +pub struct SourceFile; +pub struct FreeFunctions; + +pub struct RaSpanServer { + pub(crate) interner: SymbolInternerRef, + pub call_site: Span, + pub def_site: Span, + pub mixed_site: Span, +} + +impl server::Types for RaSpanServer { + type FreeFunctions = FreeFunctions; + type TokenStream = TokenStream; + type SourceFile = SourceFile; + type Span = Span; + type Symbol = Symbol; +} + +impl server::FreeFunctions for RaSpanServer { + fn track_env_var(&mut self, _var: &str, _value: Option<&str>) { + // FIXME: track env var accesses + // https://github.com/rust-lang/rust/pull/71858 + } + fn track_path(&mut self, _path: &str) {} + + fn literal_from_str( + &mut self, + s: &str, + ) -> Result, ()> { + // FIXME: keep track of LitKind and Suffix + Ok(bridge::Literal { + kind: bridge::LitKind::Err, + symbol: Symbol::intern(self.interner, s), + suffix: None, + span: self.call_site, + }) + } + + fn emit_diagnostic(&mut self, _: bridge::Diagnostic) { + // FIXME handle diagnostic + } +} + +impl server::TokenStream for RaSpanServer { + fn is_empty(&mut self, stream: &Self::TokenStream) -> bool { + stream.is_empty() + } + fn from_str(&mut self, src: &str) -> Self::TokenStream { + Self::TokenStream::from_str(src, self.call_site).expect("cannot parse string") + } + fn to_string(&mut self, stream: &Self::TokenStream) -> String { + stream.to_string() + } + fn from_token_tree( + &mut self, + tree: bridge::TokenTree, + ) -> Self::TokenStream { + match tree { + bridge::TokenTree::Group(group) => { + let group = Group { + delimiter: delim_to_internal(group.delimiter, group.span), + token_trees: match group.stream { + Some(stream) => stream.into_iter().collect(), + None => Vec::new(), + }, + }; + let tree = TokenTree::from(group); + Self::TokenStream::from_iter(iter::once(tree)) + } + + bridge::TokenTree::Ident(ident) => { + let text = ident.sym.text(self.interner); + let text = + if ident.is_raw { ::tt::SmolStr::from_iter(["r#", &text]) } else { text }; + let ident: tt::Ident = tt::Ident { text, span: ident.span }; + let leaf = tt::Leaf::from(ident); + let tree = TokenTree::from(leaf); + Self::TokenStream::from_iter(iter::once(tree)) + } + + bridge::TokenTree::Literal(literal) => { + let literal = LiteralFormatter(literal); + let text = literal.with_stringify_parts(self.interner, |parts| { + ::tt::SmolStr::from_iter(parts.iter().copied()) + }); + + let literal = tt::Literal { text, span: literal.0.span }; + let leaf = tt::Leaf::from(literal); + let tree = TokenTree::from(leaf); + Self::TokenStream::from_iter(iter::once(tree)) + } + + bridge::TokenTree::Punct(p) => { + let punct = tt::Punct { + char: p.ch as char, + spacing: if p.joint { Spacing::Joint } else { Spacing::Alone }, + span: p.span, + }; + let leaf = tt::Leaf::from(punct); + let tree = TokenTree::from(leaf); + Self::TokenStream::from_iter(iter::once(tree)) + } + } + } + + fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result { + Ok(self_.clone()) + } + + fn concat_trees( + &mut self, + base: Option, + trees: Vec>, + ) -> Self::TokenStream { + let mut builder = TokenStreamBuilder::new(); + if let Some(base) = base { + builder.push(base); + } + for tree in trees { + builder.push(self.from_token_tree(tree)); + } + builder.build() + } + + fn concat_streams( + &mut self, + base: Option, + streams: Vec, + ) -> Self::TokenStream { + let mut builder = TokenStreamBuilder::new(); + if let Some(base) = base { + builder.push(base); + } + for stream in streams { + builder.push(stream); + } + builder.build() + } + + fn into_trees( + &mut self, + stream: Self::TokenStream, + ) -> Vec> { + stream + .into_iter() + .map(|tree| match tree { + tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { + bridge::TokenTree::Ident(bridge::Ident { + sym: Symbol::intern(self.interner, ident.text.trim_start_matches("r#")), + is_raw: ident.text.starts_with("r#"), + span: ident.span, + }) + } + tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { + bridge::TokenTree::Literal(bridge::Literal { + // FIXME: handle literal kinds + kind: bridge::LitKind::Err, + symbol: Symbol::intern(self.interner, &lit.text), + // FIXME: handle suffixes + suffix: None, + span: lit.span, + }) + } + tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => { + bridge::TokenTree::Punct(bridge::Punct { + ch: punct.char as u8, + joint: punct.spacing == Spacing::Joint, + span: punct.span, + }) + } + tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group { + delimiter: delim_to_external(subtree.delimiter), + stream: if subtree.token_trees.is_empty() { + None + } else { + Some(subtree.token_trees.into_iter().collect()) + }, + span: bridge::DelimSpan::from_single(subtree.delimiter.open), + }), + }) + .collect() + } +} + +impl server::SourceFile for RaSpanServer { + // FIXME these are all stubs + fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool { + true + } + fn path(&mut self, _file: &Self::SourceFile) -> String { + String::new() + } + fn is_real(&mut self, _file: &Self::SourceFile) -> bool { + true + } +} + +impl server::Span for RaSpanServer { + fn debug(&mut self, span: Self::Span) -> String { + format!("{:?}", span) + } + fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile { + SourceFile {} + } + fn save_span(&mut self, _span: Self::Span) -> usize { + // FIXME stub + 0 + } + fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span { + // FIXME stub + self.call_site + } + /// Recent feature, not yet in the proc_macro + /// + /// See PR: + /// https://github.com/rust-lang/rust/pull/55780 + fn source_text(&mut self, _span: Self::Span) -> Option { + None + } + + fn parent(&mut self, _span: Self::Span) -> Option { + // FIXME handle span + None + } + fn source(&mut self, span: Self::Span) -> Self::Span { + // FIXME handle span + span + } + fn byte_range(&mut self, _span: Self::Span) -> Range { + // FIXME handle span + Range { start: 0, end: 0 } + } + fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option { + // Just return the first span again, because some macros will unwrap the result. + Some(first) + } + fn subspan( + &mut self, + span: Self::Span, + _start: Bound, + _end: Bound, + ) -> Option { + // Just return the span again, because some macros will unwrap the result. + Some(span) + } + fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span { + // FIXME handle span + self.call_site + } + + fn end(&mut self, _self_: Self::Span) -> Self::Span { + self.call_site + } + + fn start(&mut self, _self_: Self::Span) -> Self::Span { + self.call_site + } + + fn line(&mut self, _span: Self::Span) -> usize { + // FIXME handle line + 0 + } + + fn column(&mut self, _span: Self::Span) -> usize { + // FIXME handle column + 0 + } +} + +impl server::Symbol for RaSpanServer { + fn normalize_and_validate_ident(&mut self, string: &str) -> Result { + // FIXME: nfc-normalize and validate idents + Ok(::intern_symbol(string)) + } +} + +impl server::Server for RaSpanServer { + fn globals(&mut self) -> bridge::ExpnGlobals { + bridge::ExpnGlobals { + def_site: self.def_site, + call_site: self.call_site, + mixed_site: self.mixed_site, + } + } + + fn intern_symbol(ident: &str) -> Self::Symbol { + // FIXME: should be `self.interner` once the proc-macro api allows it. + Symbol::intern(&SYMBOL_INTERNER, &::tt::SmolStr::from(ident)) + } + + fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) { + // FIXME: should be `self.interner` once the proc-macro api allows it. + f(symbol.text(&SYMBOL_INTERNER).as_str()) + } +} diff --git a/crates/proc-macro-srv/src/server/token_id.rs b/crates/proc-macro-srv/src/server/token_id.rs new file mode 100644 index 00000000000..01ca7488fa8 --- /dev/null +++ b/crates/proc-macro-srv/src/server/token_id.rs @@ -0,0 +1,375 @@ +use std::{ + iter, + ops::{Bound, Range}, +}; + +use proc_macro::bridge::{self, server}; + +use crate::server::{ + delim_to_external, delim_to_internal, token_stream::TokenStreamBuilder, LiteralFormatter, + Symbol, SymbolInternerRef, SYMBOL_INTERNER, +}; +mod tt { + pub use proc_macro_api::msg::TokenId; + + pub use ::tt::*; + + pub type Subtree = ::tt::Subtree; + pub type TokenTree = ::tt::TokenTree; + pub type Leaf = ::tt::Leaf; + pub type Literal = ::tt::Literal; + pub type Punct = ::tt::Punct; + pub type Ident = ::tt::Ident; +} +type Group = tt::Subtree; +type TokenTree = tt::TokenTree; +#[allow(unused)] +type Punct = tt::Punct; +type Spacing = tt::Spacing; +#[allow(unused)] +type Literal = tt::Literal; +type Span = tt::TokenId; +type TokenStream = crate::server::TokenStream; + +#[derive(Clone)] +pub struct SourceFile; +pub struct FreeFunctions; + +pub struct TokenIdServer { + pub(crate) interner: SymbolInternerRef, + pub call_site: Span, + pub def_site: Span, + pub mixed_site: Span, +} + +impl server::Types for TokenIdServer { + type FreeFunctions = FreeFunctions; + type TokenStream = TokenStream; + type SourceFile = SourceFile; + type Span = Span; + type Symbol = Symbol; +} + +impl server::FreeFunctions for TokenIdServer { + fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {} + fn track_path(&mut self, _path: &str) {} + fn literal_from_str( + &mut self, + s: &str, + ) -> Result, ()> { + // FIXME: keep track of LitKind and Suffix + Ok(bridge::Literal { + kind: bridge::LitKind::Err, + symbol: Symbol::intern(self.interner, s), + suffix: None, + span: self.call_site, + }) + } + + fn emit_diagnostic(&mut self, _: bridge::Diagnostic) {} +} + +impl server::TokenStream for TokenIdServer { + fn is_empty(&mut self, stream: &Self::TokenStream) -> bool { + stream.is_empty() + } + fn from_str(&mut self, src: &str) -> Self::TokenStream { + Self::TokenStream::from_str(src, self.call_site).expect("cannot parse string") + } + fn to_string(&mut self, stream: &Self::TokenStream) -> String { + stream.to_string() + } + fn from_token_tree( + &mut self, + tree: bridge::TokenTree, + ) -> Self::TokenStream { + match tree { + bridge::TokenTree::Group(group) => { + let group = Group { + delimiter: delim_to_internal(group.delimiter, group.span), + token_trees: match group.stream { + Some(stream) => stream.into_iter().collect(), + None => Vec::new(), + }, + }; + let tree = TokenTree::from(group); + Self::TokenStream::from_iter(iter::once(tree)) + } + + bridge::TokenTree::Ident(ident) => { + let text = ident.sym.text(self.interner); + let text = + if ident.is_raw { ::tt::SmolStr::from_iter(["r#", &text]) } else { text }; + let ident: tt::Ident = tt::Ident { text, span: ident.span }; + let leaf = tt::Leaf::from(ident); + let tree = TokenTree::from(leaf); + Self::TokenStream::from_iter(iter::once(tree)) + } + + bridge::TokenTree::Literal(literal) => { + let literal = LiteralFormatter(literal); + let text = literal.with_stringify_parts(self.interner, |parts| { + ::tt::SmolStr::from_iter(parts.iter().copied()) + }); + + let literal = tt::Literal { text, span: literal.0.span }; + let leaf = tt::Leaf::from(literal); + let tree = TokenTree::from(leaf); + Self::TokenStream::from_iter(iter::once(tree)) + } + + bridge::TokenTree::Punct(p) => { + let punct = tt::Punct { + char: p.ch as char, + spacing: if p.joint { Spacing::Joint } else { Spacing::Alone }, + span: p.span, + }; + let leaf = tt::Leaf::from(punct); + let tree = TokenTree::from(leaf); + Self::TokenStream::from_iter(iter::once(tree)) + } + } + } + + fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result { + Ok(self_.clone()) + } + + fn concat_trees( + &mut self, + base: Option, + trees: Vec>, + ) -> Self::TokenStream { + let mut builder = TokenStreamBuilder::new(); + if let Some(base) = base { + builder.push(base); + } + for tree in trees { + builder.push(self.from_token_tree(tree)); + } + builder.build() + } + + fn concat_streams( + &mut self, + base: Option, + streams: Vec, + ) -> Self::TokenStream { + let mut builder = TokenStreamBuilder::new(); + if let Some(base) = base { + builder.push(base); + } + for stream in streams { + builder.push(stream); + } + builder.build() + } + + fn into_trees( + &mut self, + stream: Self::TokenStream, + ) -> Vec> { + stream + .into_iter() + .map(|tree| match tree { + tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => { + bridge::TokenTree::Ident(bridge::Ident { + sym: Symbol::intern(self.interner, ident.text.trim_start_matches("r#")), + is_raw: ident.text.starts_with("r#"), + span: ident.span, + }) + } + tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { + bridge::TokenTree::Literal(bridge::Literal { + // FIXME: handle literal kinds + kind: bridge::LitKind::Err, + symbol: Symbol::intern(self.interner, &lit.text), + // FIXME: handle suffixes + suffix: None, + span: lit.span, + }) + } + tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => { + bridge::TokenTree::Punct(bridge::Punct { + ch: punct.char as u8, + joint: punct.spacing == Spacing::Joint, + span: punct.span, + }) + } + tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group { + delimiter: delim_to_external(subtree.delimiter), + stream: if subtree.token_trees.is_empty() { + None + } else { + Some(subtree.token_trees.into_iter().collect()) + }, + span: bridge::DelimSpan::from_single(subtree.delimiter.open), + }), + }) + .collect() + } +} + +impl server::SourceFile for TokenIdServer { + fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool { + true + } + fn path(&mut self, _file: &Self::SourceFile) -> String { + String::new() + } + fn is_real(&mut self, _file: &Self::SourceFile) -> bool { + true + } +} + +impl server::Span for TokenIdServer { + fn debug(&mut self, span: Self::Span) -> String { + format!("{:?}", span.0) + } + fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile { + SourceFile {} + } + fn save_span(&mut self, _span: Self::Span) -> usize { + 0 + } + fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span { + self.call_site + } + /// Recent feature, not yet in the proc_macro + /// + /// See PR: + /// https://github.com/rust-lang/rust/pull/55780 + fn source_text(&mut self, _span: Self::Span) -> Option { + None + } + + fn parent(&mut self, _span: Self::Span) -> Option { + None + } + fn source(&mut self, span: Self::Span) -> Self::Span { + span + } + fn byte_range(&mut self, _span: Self::Span) -> Range { + Range { start: 0, end: 0 } + } + fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option { + // Just return the first span again, because some macros will unwrap the result. + Some(first) + } + fn subspan( + &mut self, + span: Self::Span, + _start: Bound, + _end: Bound, + ) -> Option { + // Just return the span again, because some macros will unwrap the result. + Some(span) + } + fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span { + self.call_site + } + + fn end(&mut self, _self_: Self::Span) -> Self::Span { + self.call_site + } + + fn start(&mut self, _self_: Self::Span) -> Self::Span { + self.call_site + } + + fn line(&mut self, _span: Self::Span) -> usize { + 0 + } + + fn column(&mut self, _span: Self::Span) -> usize { + 0 + } +} + +impl server::Symbol for TokenIdServer { + fn normalize_and_validate_ident(&mut self, string: &str) -> Result { + // FIXME: nfc-normalize and validate idents + Ok(::intern_symbol(string)) + } +} + +impl server::Server for TokenIdServer { + fn globals(&mut self) -> bridge::ExpnGlobals { + bridge::ExpnGlobals { + def_site: self.def_site, + call_site: self.call_site, + mixed_site: self.mixed_site, + } + } + + fn intern_symbol(ident: &str) -> Self::Symbol { + Symbol::intern(&SYMBOL_INTERNER, &::tt::SmolStr::from(ident)) + } + + fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) { + f(symbol.text(&SYMBOL_INTERNER).as_str()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_ra_server_to_string() { + let s = TokenStream { + token_trees: vec![ + tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + text: "struct".into(), + span: tt::TokenId(0), + })), + tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + text: "T".into(), + span: tt::TokenId(0), + })), + tt::TokenTree::Subtree(tt::Subtree { + delimiter: tt::Delimiter { + open: tt::TokenId(0), + close: tt::TokenId(0), + kind: tt::DelimiterKind::Brace, + }, + token_trees: vec![], + }), + ], + }; + + assert_eq!(s.to_string(), "struct T {}"); + } + + #[test] + fn test_ra_server_from_str() { + let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree { + delimiter: tt::Delimiter { + open: tt::TokenId(0), + close: tt::TokenId(0), + kind: tt::DelimiterKind::Parenthesis, + }, + token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + text: "a".into(), + span: tt::TokenId(0), + }))], + }); + + let t1 = TokenStream::from_str("(a)", tt::TokenId(0)).unwrap(); + assert_eq!(t1.token_trees.len(), 1); + assert_eq!(t1.token_trees[0], subtree_paren_a); + + let t2 = TokenStream::from_str("(a);", tt::TokenId(0)).unwrap(); + assert_eq!(t2.token_trees.len(), 2); + assert_eq!(t2.token_trees[0], subtree_paren_a); + + let underscore = TokenStream::from_str("_", tt::TokenId(0)).unwrap(); + assert_eq!( + underscore.token_trees[0], + tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + text: "_".into(), + span: tt::TokenId(0), + })) + ); + } +} diff --git a/crates/proc-macro-srv/src/server/token_stream.rs b/crates/proc-macro-srv/src/server/token_stream.rs index 36be8825038..93f2e01addd 100644 --- a/crates/proc-macro-srv/src/server/token_stream.rs +++ b/crates/proc-macro-srv/src/server/token_stream.rs @@ -1,20 +1,24 @@ //! TokenStream implementation used by sysroot ABI -use proc_macro_api::msg::TokenId; +use tt::TokenTree; -use crate::tt::{self, TokenTree}; - -#[derive(Debug, Default, Clone)] -pub struct TokenStream { - pub(super) token_trees: Vec, +#[derive(Debug, Clone)] +pub struct TokenStream { + pub(super) token_trees: Vec>, } -impl TokenStream { +impl Default for TokenStream { + fn default() -> Self { + Self { token_trees: vec![] } + } +} + +impl TokenStream { pub(crate) fn new() -> Self { - TokenStream::default() + TokenStream { token_trees: vec![] } } - pub(crate) fn with_subtree(subtree: tt::Subtree) -> Self { + pub(crate) fn with_subtree(subtree: tt::Subtree) -> Self { if subtree.delimiter.kind != tt::DelimiterKind::Invisible { TokenStream { token_trees: vec![TokenTree::Subtree(subtree)] } } else { @@ -22,7 +26,10 @@ pub(crate) fn with_subtree(subtree: tt::Subtree) -> Self { } } - pub(crate) fn into_subtree(self, call_site: TokenId) -> tt::Subtree { + pub(crate) fn into_subtree(self, call_site: S) -> tt::Subtree + where + S: Copy, + { tt::Subtree { delimiter: tt::Delimiter { open: call_site, @@ -39,37 +46,37 @@ pub(super) fn is_empty(&self) -> bool { } /// Creates a token stream containing a single token tree. -impl From for TokenStream { - fn from(tree: TokenTree) -> TokenStream { +impl From> for TokenStream { + fn from(tree: TokenTree) -> TokenStream { TokenStream { token_trees: vec![tree] } } } /// Collects a number of token trees into a single stream. -impl FromIterator for TokenStream { - fn from_iter>(trees: I) -> Self { +impl FromIterator> for TokenStream { + fn from_iter>>(trees: I) -> Self { trees.into_iter().map(TokenStream::from).collect() } } /// A "flattening" operation on token streams, collects token trees /// from multiple token streams into a single stream. -impl FromIterator for TokenStream { - fn from_iter>(streams: I) -> Self { +impl FromIterator> for TokenStream { + fn from_iter>>(streams: I) -> Self { let mut builder = TokenStreamBuilder::new(); streams.into_iter().for_each(|stream| builder.push(stream)); builder.build() } } -impl Extend for TokenStream { - fn extend>(&mut self, trees: I) { +impl Extend> for TokenStream { + fn extend>>(&mut self, trees: I) { self.extend(trees.into_iter().map(TokenStream::from)); } } -impl Extend for TokenStream { - fn extend>(&mut self, streams: I) { +impl Extend> for TokenStream { + fn extend>>(&mut self, streams: I) { for item in streams { for tkn in item { match tkn { @@ -87,22 +94,21 @@ fn extend>(&mut self, streams: I) { } } -pub(super) struct TokenStreamBuilder { - acc: TokenStream, +pub(super) struct TokenStreamBuilder { + acc: TokenStream, } /// pub(super)lic implementation details for the `TokenStream` type, such as iterators. pub(super) mod token_stream { - use proc_macro_api::msg::TokenId; - use super::{tt, TokenStream, TokenTree}; + use super::{TokenStream, TokenTree}; /// An iterator over `TokenStream`'s `TokenTree`s. /// The iteration is "shallow", e.g., the iterator doesn't recurse into delimited groups, /// and returns whole groups as token trees. - impl IntoIterator for TokenStream { - type Item = TokenTree; - type IntoIter = std::vec::IntoIter; + impl IntoIterator for TokenStream { + type Item = TokenTree; + type IntoIter = std::vec::IntoIter>; fn into_iter(self) -> Self::IntoIter { self.token_trees.into_iter() @@ -119,53 +125,52 @@ fn into_iter(self) -> Self::IntoIter { /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to /// change these errors into `LexError`s later. #[rustfmt::skip] - impl /*FromStr for*/ TokenStream { + impl /*FromStr for*/ TokenStream { // type Err = LexError; - pub(crate) fn from_str(src: &str, call_site: TokenId) -> Result { + pub(crate) fn from_str(src: &str, call_site: S) -> Result, LexError> { let subtree = mbe::parse_to_token_tree_static_span(call_site, src).ok_or("Failed to parse from mbe")?; - let subtree = subtree_replace_token_ids_with_call_site(subtree,call_site); Ok(TokenStream::with_subtree(subtree)) } } - impl ToString for TokenStream { + impl ToString for TokenStream { fn to_string(&self) -> String { ::tt::pretty(&self.token_trees) } } - fn subtree_replace_token_ids_with_call_site( - subtree: tt::Subtree, - call_site: TokenId, - ) -> tt::Subtree { + fn subtree_replace_spans_with_call_site( + subtree: tt::Subtree, + call_site: S, + ) -> tt::Subtree { tt::Subtree { delimiter: tt::Delimiter { open: call_site, close: call_site, ..subtree.delimiter }, token_trees: subtree .token_trees .into_iter() - .map(|it| token_tree_replace_token_ids_with_call_site(it, call_site)) + .map(|it| token_tree_replace_spans_with_call_site(it, call_site)) .collect(), } } - fn token_tree_replace_token_ids_with_call_site( - tt: tt::TokenTree, - call_site: TokenId, - ) -> tt::TokenTree { + fn token_tree_replace_spans_with_call_site( + tt: tt::TokenTree, + call_site: S, + ) -> tt::TokenTree { match tt { tt::TokenTree::Leaf(leaf) => { - tt::TokenTree::Leaf(leaf_replace_token_ids_with_call_site(leaf, call_site)) + tt::TokenTree::Leaf(leaf_replace_spans_with_call_site(leaf, call_site)) } tt::TokenTree::Subtree(subtree) => { - tt::TokenTree::Subtree(subtree_replace_token_ids_with_call_site(subtree, call_site)) + tt::TokenTree::Subtree(subtree_replace_spans_with_call_site(subtree, call_site)) } } } - fn leaf_replace_token_ids_with_call_site(leaf: tt::Leaf, call_site: TokenId) -> tt::Leaf { + fn leaf_replace_spans_with_call_site(leaf: tt::Leaf, call_site: S) -> tt::Leaf { match leaf { tt::Leaf::Literal(lit) => tt::Leaf::Literal(tt::Literal { span: call_site, ..lit }), tt::Leaf::Punct(punct) => tt::Leaf::Punct(tt::Punct { span: call_site, ..punct }), @@ -174,16 +179,16 @@ fn leaf_replace_token_ids_with_call_site(leaf: tt::Leaf, call_site: TokenId) -> } } -impl TokenStreamBuilder { - pub(super) fn new() -> TokenStreamBuilder { +impl TokenStreamBuilder { + pub(super) fn new() -> TokenStreamBuilder { TokenStreamBuilder { acc: TokenStream::new() } } - pub(super) fn push(&mut self, stream: TokenStream) { + pub(super) fn push(&mut self, stream: TokenStream) { self.acc.extend(stream.into_iter()) } - pub(super) fn build(self) -> TokenStream { + pub(super) fn build(self) -> TokenStream { self.acc } } diff --git a/crates/proc-macro-srv/src/tests/mod.rs b/crates/proc-macro-srv/src/tests/mod.rs index b04e3ca19ac..1f4d1cf74ea 100644 --- a/crates/proc-macro-srv/src/tests/mod.rs +++ b/crates/proc-macro-srv/src/tests/mod.rs @@ -8,7 +8,7 @@ #[test] fn test_derive_empty() { - assert_expand("DeriveEmpty", r#"struct S;"#, expect!["SUBTREE $$ 1 1"]); + assert_expand("DeriveEmpty", r#"struct S;"#, expect!["SUBTREE $$ 1 1"], expect!["SUBTREE $$ SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"]); } #[test] @@ -23,6 +23,13 @@ fn test_derive_error() { SUBTREE () 1 1 LITERAL "#[derive(DeriveError)] struct S ;" 1 PUNCH ; [alone] 1"##]], + expect![[r##" + SUBTREE $$ SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT compile_error SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH ! [alone] SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + SUBTREE () SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL "#[derive(DeriveError)] struct S ;" SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH ; [alone] SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"##]], ); } @@ -40,6 +47,15 @@ fn test_fn_like_macro_noop() { LITERAL 1 1 PUNCH , [alone] 1 SUBTREE [] 1 1"#]], + expect![[r#" + SUBTREE $$ SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT ident SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 0 SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 1 SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + SUBTREE [] SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]], ); } @@ -53,6 +69,11 @@ fn test_fn_like_macro_clone_ident_subtree() { IDENT ident 1 PUNCH , [alone] 1 SUBTREE [] 1 1"#]], + expect![[r#" + SUBTREE $$ SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT ident SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + SUBTREE [] SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]], ); } @@ -64,6 +85,9 @@ fn test_fn_like_macro_clone_raw_ident() { expect![[r#" SUBTREE $$ 1 1 IDENT r#async 1"#]], + expect![[r#" + SUBTREE $$ SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT r#async SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]], ); } @@ -81,6 +105,15 @@ fn test_fn_like_mk_literals() { LITERAL 3.14 1 LITERAL 123i64 1 LITERAL 123 1"#]], + expect![[r#" + SUBTREE $$ SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL b"byte_string" SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 'c' SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL "string" SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 3.14f64 SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 3.14 SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 123i64 SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 123 SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]], ); } @@ -93,6 +126,10 @@ fn test_fn_like_mk_idents() { SUBTREE $$ 1 1 IDENT standard 1 IDENT r#raw 1"#]], + expect![[r#" + SUBTREE $$ SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT standard SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT r#raw SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]], ); } @@ -113,6 +150,18 @@ fn test_fn_like_macro_clone_literals() { LITERAL 3.14f32 1 PUNCH , [alone] 1 LITERAL "hello bridge" 1"#]], + expect![[r#" + SUBTREE $$ SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 1u16 SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 2_u32 SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH - [alone] SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 4i64 SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 3.14f32 SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL "hello bridge" SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]], ); } @@ -132,6 +181,13 @@ fn test_attr_macro() { SUBTREE () 1 1 LITERAL "#[attr_error(some arguments)] mod m {}" 1 PUNCH ; [alone] 1"##]], + expect![[r##" + SUBTREE $$ SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + IDENT compile_error SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH ! [alone] SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + SUBTREE () SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL "#[attr_error(some arguments)] mod m {}" SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH ; [alone] SpanData { range: 52..77, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"##]], ); } diff --git a/crates/proc-macro-srv/src/tests/utils.rs b/crates/proc-macro-srv/src/tests/utils.rs index c12096d140c..9386e00117e 100644 --- a/crates/proc-macro-srv/src/tests/utils.rs +++ b/crates/proc-macro-srv/src/tests/utils.rs @@ -1,11 +1,16 @@ //! utils used in proc-macro tests +use base_db::{ + span::{ErasedFileAstId, SpanAnchor, SpanData, SyntaxContextId}, + FileId, +}; use expect_test::Expect; -use proc_macro_api::msg::TokenId; +use proc_macro_api::msg::{SpanDataIndexMap, TokenId}; +use tt::TextRange; use crate::{dylib, proc_macro_test_dylib_path, ProcMacroSrv}; -fn parse_string(code: &str, call_site: TokenId) -> Option { +fn parse_string(code: &str, call_site: S) -> Option> { // This is a bit strange. We need to parse a string into a token stream into // order to create a tt::SubTree from it in fixtures. `into_subtree` is // implemented by all the ABIs we have so we arbitrarily choose one ABI to @@ -15,34 +20,73 @@ fn parse_string(code: &str, call_site: TokenId) -> Option, expect: Expect) { +fn assert_expand_impl( + macro_name: &str, + input: &str, + attr: Option<&str>, + expect: Expect, + expect_s: Expect, +) { + let path = proc_macro_test_dylib_path(); + let expander = dylib::Expander::new(&path).unwrap(); + let def_site = TokenId(0); let call_site = TokenId(1); let mixed_site = TokenId(2); - let path = proc_macro_test_dylib_path(); - let expander = dylib::Expander::new(&path).unwrap(); - let fixture = parse_string(input, call_site).unwrap(); - let attr = attr.map(|attr| parse_string(attr, call_site).unwrap().into_subtree(call_site)); + let input_ts = parse_string(input, call_site).unwrap(); + let attr_ts = attr.map(|attr| parse_string(attr, call_site).unwrap().into_subtree(call_site)); let res = expander .expand( macro_name, - &fixture.into_subtree(call_site), - attr.as_ref(), + input_ts.into_subtree(call_site), + attr_ts, def_site, call_site, mixed_site, ) .unwrap(); expect.assert_eq(&format!("{res:?}")); + + let def_site = SpanData { + range: TextRange::new(0.into(), 150.into()), + anchor: SpanAnchor { + file_id: FileId::from_raw(41), + ast_id: ErasedFileAstId::from_raw(From::from(1)), + }, + ctx: SyntaxContextId::ROOT, + }; + let call_site = SpanData { + range: TextRange::new(52.into(), 77.into()), + anchor: SpanAnchor { + file_id: FileId::from_raw(42), + ast_id: ErasedFileAstId::from_raw(From::from(2)), + }, + ctx: SyntaxContextId::ROOT, + }; + let mixed_site = call_site; + + let fixture = parse_string(input, call_site).unwrap(); + let attr = attr.map(|attr| parse_string(attr, call_site).unwrap().into_subtree(call_site)); + + let res = expander + .expand(macro_name, fixture.into_subtree(call_site), attr, def_site, call_site, mixed_site) + .unwrap(); + expect_s.assert_eq(&format!("{res:?}")); } pub(crate) fn list() -> Vec { diff --git a/crates/span/src/lib.rs b/crates/span/src/lib.rs index 09af34ce7e0..7341a529a65 100644 --- a/crates/span/src/lib.rs +++ b/crates/span/src/lib.rs @@ -97,6 +97,14 @@ impl SyntaxContextId { pub fn is_root(self) -> bool { self == Self::ROOT } + + pub fn into_u32(self) -> u32 { + self.0.as_u32() + } + + pub fn from_u32(u32: u32) -> Self { + Self(InternId::from(u32)) + } } #[derive(Copy, Clone, PartialEq, Eq, Hash)]