2022-07-20 11:27:58 -05:00
|
|
|
//! proc-macro server implementation
|
|
|
|
//!
|
|
|
|
//! Based on idea from <https://github.com/fedochet/rust-proc-macro-expander>
|
|
|
|
//! The lib-proc-macro server backend is `TokenStream`-agnostic, such that
|
|
|
|
//! we could provide any TokenStream implementation.
|
|
|
|
//! The original idea from fedochet is using proc-macro2 as backend,
|
|
|
|
//! we use tt instead for better integration with RA.
|
|
|
|
//!
|
|
|
|
//! FIXME: No span and source file information is implemented yet
|
|
|
|
|
2023-06-21 07:02:59 -05:00
|
|
|
use proc_macro::bridge::{self, server};
|
2022-07-20 11:27:58 -05:00
|
|
|
|
2022-07-20 11:32:27 -05:00
|
|
|
mod token_stream;
|
2022-07-20 11:43:59 -05:00
|
|
|
pub use token_stream::TokenStream;
|
|
|
|
use token_stream::TokenStreamBuilder;
|
2022-07-20 11:32:27 -05:00
|
|
|
|
2022-07-20 11:36:10 -05:00
|
|
|
mod symbol;
|
|
|
|
pub use symbol::*;
|
|
|
|
|
2023-03-10 15:16:23 -06:00
|
|
|
use std::ops::{Bound, Range};
|
2022-07-20 11:27:58 -05:00
|
|
|
|
2023-02-13 05:55:14 -06:00
|
|
|
use crate::tt;
|
|
|
|
|
2022-07-20 11:27:58 -05:00
|
|
|
type Group = tt::Subtree;
|
|
|
|
type TokenTree = tt::TokenTree;
|
2023-06-05 04:04:23 -05:00
|
|
|
#[allow(unused)]
|
2022-07-20 11:27:58 -05:00
|
|
|
type Punct = tt::Punct;
|
|
|
|
type Spacing = tt::Spacing;
|
2023-06-05 04:04:23 -05:00
|
|
|
#[allow(unused)]
|
2022-07-20 11:27:58 -05:00
|
|
|
type Literal = tt::Literal;
|
|
|
|
type Span = tt::TokenId;
|
|
|
|
|
|
|
|
#[derive(Clone)]
|
|
|
|
pub struct SourceFile {
|
|
|
|
// FIXME stub
|
|
|
|
}
|
|
|
|
|
|
|
|
pub struct FreeFunctions;
|
|
|
|
|
|
|
|
pub struct RustAnalyzer {
|
|
|
|
// FIXME: store span information here.
|
2023-06-05 04:04:23 -05:00
|
|
|
pub(crate) interner: SymbolInternerRef,
|
2022-07-20 11:27:58 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
impl server::Types for RustAnalyzer {
|
|
|
|
type FreeFunctions = FreeFunctions;
|
|
|
|
type TokenStream = TokenStream;
|
|
|
|
type SourceFile = SourceFile;
|
|
|
|
type Span = Span;
|
2022-07-20 11:32:27 -05:00
|
|
|
type Symbol = Symbol;
|
2022-07-20 11:27:58 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
impl server::FreeFunctions for RustAnalyzer {
|
|
|
|
fn track_env_var(&mut self, _var: &str, _value: Option<&str>) {
|
|
|
|
// FIXME: track env var accesses
|
|
|
|
// https://github.com/rust-lang/rust/pull/71858
|
|
|
|
}
|
|
|
|
fn track_path(&mut self, _path: &str) {}
|
2022-07-20 11:43:59 -05:00
|
|
|
|
|
|
|
fn literal_from_str(
|
|
|
|
&mut self,
|
2022-07-20 12:23:25 -05:00
|
|
|
s: &str,
|
2022-07-20 11:43:59 -05:00
|
|
|
) -> Result<bridge::Literal<Self::Span, Self::Symbol>, ()> {
|
2022-07-21 06:37:41 -05:00
|
|
|
// FIXME: keep track of LitKind and Suffix
|
2022-07-20 12:23:25 -05:00
|
|
|
Ok(bridge::Literal {
|
|
|
|
kind: bridge::LitKind::Err,
|
2023-06-05 04:04:23 -05:00
|
|
|
symbol: Symbol::intern(self.interner, s),
|
2022-07-20 12:23:25 -05:00
|
|
|
suffix: None,
|
|
|
|
span: tt::TokenId::unspecified(),
|
|
|
|
})
|
2022-07-20 11:43:59 -05:00
|
|
|
}
|
2022-07-24 23:43:33 -05:00
|
|
|
|
|
|
|
fn emit_diagnostic(&mut self, _: bridge::Diagnostic<Self::Span>) {
|
|
|
|
// FIXME handle diagnostic
|
|
|
|
}
|
2022-07-20 11:27:58 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
impl server::TokenStream for RustAnalyzer {
|
|
|
|
fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
|
|
|
|
stream.is_empty()
|
|
|
|
}
|
|
|
|
fn from_str(&mut self, src: &str) -> Self::TokenStream {
|
|
|
|
use std::str::FromStr;
|
|
|
|
|
|
|
|
Self::TokenStream::from_str(src).expect("cannot parse string")
|
|
|
|
}
|
|
|
|
fn to_string(&mut self, stream: &Self::TokenStream) -> String {
|
|
|
|
stream.to_string()
|
|
|
|
}
|
|
|
|
fn from_token_tree(
|
|
|
|
&mut self,
|
2022-07-20 11:43:59 -05:00
|
|
|
tree: bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>,
|
2022-07-20 11:27:58 -05:00
|
|
|
) -> Self::TokenStream {
|
|
|
|
match tree {
|
|
|
|
bridge::TokenTree::Group(group) => {
|
|
|
|
let group = Group {
|
2023-06-19 01:14:04 -05:00
|
|
|
delimiter: delim_to_internal(group.delimiter, group.span),
|
2022-07-20 11:27:58 -05:00
|
|
|
token_trees: match group.stream {
|
|
|
|
Some(stream) => stream.into_iter().collect(),
|
|
|
|
None => Vec::new(),
|
|
|
|
},
|
|
|
|
};
|
|
|
|
let tree = TokenTree::from(group);
|
|
|
|
Self::TokenStream::from_iter(vec![tree])
|
|
|
|
}
|
|
|
|
|
2022-07-20 12:13:06 -05:00
|
|
|
bridge::TokenTree::Ident(ident) => {
|
2023-06-05 04:04:23 -05:00
|
|
|
let text = ident.sym.text(self.interner);
|
2023-02-13 05:55:14 -06:00
|
|
|
let text =
|
|
|
|
if ident.is_raw { ::tt::SmolStr::from_iter(["r#", &text]) } else { text };
|
|
|
|
let ident: tt::Ident = tt::Ident { text, span: ident.span };
|
2022-07-20 12:13:06 -05:00
|
|
|
let leaf = tt::Leaf::from(ident);
|
|
|
|
let tree = TokenTree::from(leaf);
|
|
|
|
Self::TokenStream::from_iter(vec![tree])
|
2022-07-20 11:27:58 -05:00
|
|
|
}
|
|
|
|
|
2022-07-20 12:13:06 -05:00
|
|
|
bridge::TokenTree::Literal(literal) => {
|
2022-07-21 11:44:41 -05:00
|
|
|
let literal = LiteralFormatter(literal);
|
2023-06-05 04:04:23 -05:00
|
|
|
let text = literal.with_stringify_parts(self.interner, |parts| {
|
|
|
|
::tt::SmolStr::from_iter(parts.iter().copied())
|
|
|
|
});
|
2022-07-20 12:13:06 -05:00
|
|
|
|
2023-02-13 05:55:14 -06:00
|
|
|
let literal = tt::Literal { text, span: literal.0.span };
|
2022-07-20 12:13:06 -05:00
|
|
|
let leaf = tt::Leaf::from(literal);
|
|
|
|
let tree = TokenTree::from(leaf);
|
|
|
|
Self::TokenStream::from_iter(vec![tree])
|
2022-07-20 11:27:58 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
bridge::TokenTree::Punct(p) => {
|
|
|
|
let punct = tt::Punct {
|
|
|
|
char: p.ch as char,
|
|
|
|
spacing: if p.joint { Spacing::Joint } else { Spacing::Alone },
|
2023-02-13 05:55:14 -06:00
|
|
|
span: p.span,
|
2022-07-20 11:27:58 -05:00
|
|
|
};
|
|
|
|
let leaf = tt::Leaf::from(punct);
|
|
|
|
let tree = TokenTree::from(leaf);
|
|
|
|
Self::TokenStream::from_iter(vec![tree])
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn expand_expr(&mut self, self_: &Self::TokenStream) -> Result<Self::TokenStream, ()> {
|
|
|
|
Ok(self_.clone())
|
|
|
|
}
|
|
|
|
|
|
|
|
fn concat_trees(
|
|
|
|
&mut self,
|
|
|
|
base: Option<Self::TokenStream>,
|
2022-07-20 11:43:59 -05:00
|
|
|
trees: Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>>,
|
2022-07-20 11:27:58 -05:00
|
|
|
) -> Self::TokenStream {
|
|
|
|
let mut builder = TokenStreamBuilder::new();
|
|
|
|
if let Some(base) = base {
|
|
|
|
builder.push(base);
|
|
|
|
}
|
|
|
|
for tree in trees {
|
|
|
|
builder.push(self.from_token_tree(tree));
|
|
|
|
}
|
|
|
|
builder.build()
|
|
|
|
}
|
|
|
|
|
|
|
|
fn concat_streams(
|
|
|
|
&mut self,
|
|
|
|
base: Option<Self::TokenStream>,
|
|
|
|
streams: Vec<Self::TokenStream>,
|
|
|
|
) -> Self::TokenStream {
|
|
|
|
let mut builder = TokenStreamBuilder::new();
|
|
|
|
if let Some(base) = base {
|
|
|
|
builder.push(base);
|
|
|
|
}
|
|
|
|
for stream in streams {
|
|
|
|
builder.push(stream);
|
|
|
|
}
|
|
|
|
builder.build()
|
|
|
|
}
|
|
|
|
|
|
|
|
fn into_trees(
|
|
|
|
&mut self,
|
|
|
|
stream: Self::TokenStream,
|
2022-07-20 11:43:59 -05:00
|
|
|
) -> Vec<bridge::TokenTree<Self::TokenStream, Self::Span, Self::Symbol>> {
|
2022-07-20 11:27:58 -05:00
|
|
|
stream
|
|
|
|
.into_iter()
|
|
|
|
.map(|tree| match tree {
|
2022-07-20 12:13:06 -05:00
|
|
|
tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
|
|
|
|
bridge::TokenTree::Ident(bridge::Ident {
|
2023-06-05 04:04:23 -05:00
|
|
|
sym: Symbol::intern(self.interner, ident.text.trim_start_matches("r#")),
|
2023-01-09 12:36:22 -06:00
|
|
|
is_raw: ident.text.starts_with("r#"),
|
2023-02-13 05:55:14 -06:00
|
|
|
span: ident.span,
|
2022-07-20 12:13:06 -05:00
|
|
|
})
|
2022-07-20 11:27:58 -05:00
|
|
|
}
|
2022-07-20 12:13:06 -05:00
|
|
|
tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
|
|
|
|
bridge::TokenTree::Literal(bridge::Literal {
|
|
|
|
// FIXME: handle literal kinds
|
|
|
|
kind: bridge::LitKind::Err,
|
2023-06-05 04:04:23 -05:00
|
|
|
symbol: Symbol::intern(self.interner, &lit.text),
|
2022-07-20 12:13:06 -05:00
|
|
|
// FIXME: handle suffixes
|
|
|
|
suffix: None,
|
2023-02-13 05:55:14 -06:00
|
|
|
span: lit.span,
|
2022-07-20 12:13:06 -05:00
|
|
|
})
|
2022-07-20 11:43:59 -05:00
|
|
|
}
|
2022-07-20 11:27:58 -05:00
|
|
|
tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => {
|
|
|
|
bridge::TokenTree::Punct(bridge::Punct {
|
|
|
|
ch: punct.char as u8,
|
|
|
|
joint: punct.spacing == Spacing::Joint,
|
2023-02-13 05:55:14 -06:00
|
|
|
span: punct.span,
|
2022-07-20 11:27:58 -05:00
|
|
|
})
|
|
|
|
}
|
|
|
|
tt::TokenTree::Subtree(subtree) => bridge::TokenTree::Group(bridge::Group {
|
|
|
|
delimiter: delim_to_external(subtree.delimiter),
|
|
|
|
stream: if subtree.token_trees.is_empty() {
|
|
|
|
None
|
|
|
|
} else {
|
|
|
|
Some(subtree.token_trees.into_iter().collect())
|
|
|
|
},
|
2023-02-13 05:55:14 -06:00
|
|
|
span: bridge::DelimSpan::from_single(subtree.delimiter.open),
|
2022-07-20 11:27:58 -05:00
|
|
|
}),
|
|
|
|
})
|
|
|
|
.collect()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-06-19 01:14:04 -05:00
|
|
|
fn delim_to_internal(d: proc_macro::Delimiter, span: bridge::DelimSpan<Span>) -> tt::Delimiter {
|
2022-07-20 11:27:58 -05:00
|
|
|
let kind = match d {
|
2022-07-20 11:36:10 -05:00
|
|
|
proc_macro::Delimiter::Parenthesis => tt::DelimiterKind::Parenthesis,
|
|
|
|
proc_macro::Delimiter::Brace => tt::DelimiterKind::Brace,
|
|
|
|
proc_macro::Delimiter::Bracket => tt::DelimiterKind::Bracket,
|
2023-02-13 05:55:14 -06:00
|
|
|
proc_macro::Delimiter::None => tt::DelimiterKind::Invisible,
|
2022-07-20 11:27:58 -05:00
|
|
|
};
|
2023-06-19 01:14:04 -05:00
|
|
|
tt::Delimiter { open: span.open, close: span.close, kind }
|
2022-07-20 11:27:58 -05:00
|
|
|
}
|
|
|
|
|
2023-02-13 05:55:14 -06:00
|
|
|
fn delim_to_external(d: tt::Delimiter) -> proc_macro::Delimiter {
|
|
|
|
match d.kind {
|
|
|
|
tt::DelimiterKind::Parenthesis => proc_macro::Delimiter::Parenthesis,
|
|
|
|
tt::DelimiterKind::Brace => proc_macro::Delimiter::Brace,
|
|
|
|
tt::DelimiterKind::Bracket => proc_macro::Delimiter::Bracket,
|
|
|
|
tt::DelimiterKind::Invisible => proc_macro::Delimiter::None,
|
2022-07-20 11:27:58 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-06-05 04:04:23 -05:00
|
|
|
#[allow(unused)]
|
2022-07-20 11:36:10 -05:00
|
|
|
fn spacing_to_internal(spacing: proc_macro::Spacing) -> Spacing {
|
2022-07-20 11:27:58 -05:00
|
|
|
match spacing {
|
2022-07-20 11:36:10 -05:00
|
|
|
proc_macro::Spacing::Alone => Spacing::Alone,
|
|
|
|
proc_macro::Spacing::Joint => Spacing::Joint,
|
2022-07-20 11:27:58 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-06-05 04:04:23 -05:00
|
|
|
#[allow(unused)]
|
2022-07-20 11:36:10 -05:00
|
|
|
fn spacing_to_external(spacing: Spacing) -> proc_macro::Spacing {
|
2022-07-20 11:27:58 -05:00
|
|
|
match spacing {
|
2022-07-20 11:36:10 -05:00
|
|
|
Spacing::Alone => proc_macro::Spacing::Alone,
|
|
|
|
Spacing::Joint => proc_macro::Spacing::Joint,
|
2022-07-20 11:27:58 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl server::SourceFile for RustAnalyzer {
|
|
|
|
// FIXME these are all stubs
|
|
|
|
fn eq(&mut self, _file1: &Self::SourceFile, _file2: &Self::SourceFile) -> bool {
|
|
|
|
true
|
|
|
|
}
|
|
|
|
fn path(&mut self, _file: &Self::SourceFile) -> String {
|
|
|
|
String::new()
|
|
|
|
}
|
|
|
|
fn is_real(&mut self, _file: &Self::SourceFile) -> bool {
|
|
|
|
true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl server::Span for RustAnalyzer {
|
|
|
|
fn debug(&mut self, span: Self::Span) -> String {
|
|
|
|
format!("{:?}", span.0)
|
|
|
|
}
|
|
|
|
fn source_file(&mut self, _span: Self::Span) -> Self::SourceFile {
|
|
|
|
SourceFile {}
|
|
|
|
}
|
|
|
|
fn save_span(&mut self, _span: Self::Span) -> usize {
|
|
|
|
// FIXME stub
|
|
|
|
0
|
|
|
|
}
|
|
|
|
fn recover_proc_macro_span(&mut self, _id: usize) -> Self::Span {
|
|
|
|
// FIXME stub
|
|
|
|
tt::TokenId::unspecified()
|
|
|
|
}
|
|
|
|
/// Recent feature, not yet in the proc_macro
|
|
|
|
///
|
|
|
|
/// See PR:
|
|
|
|
/// https://github.com/rust-lang/rust/pull/55780
|
|
|
|
fn source_text(&mut self, _span: Self::Span) -> Option<String> {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
|
|
|
|
fn parent(&mut self, _span: Self::Span) -> Option<Self::Span> {
|
|
|
|
// FIXME handle span
|
|
|
|
None
|
|
|
|
}
|
|
|
|
fn source(&mut self, span: Self::Span) -> Self::Span {
|
|
|
|
// FIXME handle span
|
|
|
|
span
|
|
|
|
}
|
2023-03-11 05:14:06 -06:00
|
|
|
fn byte_range(&mut self, _span: Self::Span) -> Range<usize> {
|
2023-03-10 15:16:23 -06:00
|
|
|
// FIXME handle span
|
|
|
|
Range { start: 0, end: 0 }
|
|
|
|
}
|
2022-07-20 11:27:58 -05:00
|
|
|
fn join(&mut self, first: Self::Span, _second: Self::Span) -> Option<Self::Span> {
|
|
|
|
// Just return the first span again, because some macros will unwrap the result.
|
|
|
|
Some(first)
|
|
|
|
}
|
2022-07-20 11:43:59 -05:00
|
|
|
fn subspan(
|
|
|
|
&mut self,
|
|
|
|
span: Self::Span,
|
|
|
|
_start: Bound<usize>,
|
|
|
|
_end: Bound<usize>,
|
|
|
|
) -> Option<Self::Span> {
|
|
|
|
// Just return the span again, because some macros will unwrap the result.
|
|
|
|
Some(span)
|
|
|
|
}
|
2022-07-20 11:27:58 -05:00
|
|
|
fn resolved_at(&mut self, _span: Self::Span, _at: Self::Span) -> Self::Span {
|
|
|
|
// FIXME handle span
|
|
|
|
tt::TokenId::unspecified()
|
|
|
|
}
|
|
|
|
|
2023-05-14 17:11:27 -05:00
|
|
|
fn end(&mut self, _self_: Self::Span) -> Self::Span {
|
2022-07-20 11:27:58 -05:00
|
|
|
tt::TokenId::unspecified()
|
|
|
|
}
|
|
|
|
|
2023-05-14 17:11:27 -05:00
|
|
|
fn start(&mut self, _self_: Self::Span) -> Self::Span {
|
2022-07-20 11:27:58 -05:00
|
|
|
tt::TokenId::unspecified()
|
|
|
|
}
|
2023-05-14 17:30:18 -05:00
|
|
|
|
|
|
|
fn line(&mut self, _span: Self::Span) -> usize {
|
|
|
|
// FIXME handle line
|
|
|
|
0
|
|
|
|
}
|
|
|
|
|
|
|
|
fn column(&mut self, _span: Self::Span) -> usize {
|
|
|
|
// FIXME handle column
|
|
|
|
0
|
|
|
|
}
|
2022-07-20 11:27:58 -05:00
|
|
|
}
|
|
|
|
|
2022-07-20 11:43:59 -05:00
|
|
|
impl server::Symbol for RustAnalyzer {
|
2022-07-21 06:37:41 -05:00
|
|
|
fn normalize_and_validate_ident(&mut self, string: &str) -> Result<Self::Symbol, ()> {
|
|
|
|
// FIXME: nfc-normalize and validate idents
|
|
|
|
Ok(<Self as server::Server>::intern_symbol(string))
|
2022-07-20 11:43:59 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-07-20 11:27:58 -05:00
|
|
|
impl server::Server for RustAnalyzer {
|
|
|
|
fn globals(&mut self) -> bridge::ExpnGlobals<Self::Span> {
|
|
|
|
bridge::ExpnGlobals {
|
|
|
|
def_site: Span::unspecified(),
|
|
|
|
call_site: Span::unspecified(),
|
|
|
|
mixed_site: Span::unspecified(),
|
|
|
|
}
|
|
|
|
}
|
2022-07-20 11:43:59 -05:00
|
|
|
|
2022-07-20 12:13:06 -05:00
|
|
|
fn intern_symbol(ident: &str) -> Self::Symbol {
|
2023-06-05 04:04:23 -05:00
|
|
|
// FIXME: should be self.interner once the proc-macro api allows is
|
|
|
|
Symbol::intern(&SYMBOL_INTERNER, &::tt::SmolStr::from(ident))
|
2022-07-20 11:43:59 -05:00
|
|
|
}
|
|
|
|
|
2022-07-20 12:13:06 -05:00
|
|
|
fn with_symbol_string(symbol: &Self::Symbol, f: impl FnOnce(&str)) {
|
2023-06-05 04:04:23 -05:00
|
|
|
// FIXME: should be self.interner once the proc-macro api allows is
|
|
|
|
f(symbol.text(&SYMBOL_INTERNER).as_str())
|
2022-07-20 11:43:59 -05:00
|
|
|
}
|
2022-07-20 11:27:58 -05:00
|
|
|
}
|
|
|
|
|
2022-07-21 11:44:41 -05:00
|
|
|
struct LiteralFormatter(bridge::Literal<tt::TokenId, Symbol>);
|
|
|
|
|
|
|
|
impl LiteralFormatter {
|
|
|
|
/// Invokes the callback with a `&[&str]` consisting of each part of the
|
|
|
|
/// literal's representation. This is done to allow the `ToString` and
|
|
|
|
/// `Display` implementations to borrow references to symbol values, and
|
|
|
|
/// both be optimized to reduce overhead.
|
2023-06-05 04:04:23 -05:00
|
|
|
fn with_stringify_parts<R>(
|
|
|
|
&self,
|
|
|
|
interner: SymbolInternerRef,
|
|
|
|
f: impl FnOnce(&[&str]) -> R,
|
|
|
|
) -> R {
|
2022-07-21 11:44:41 -05:00
|
|
|
/// Returns a string containing exactly `num` '#' characters.
|
|
|
|
/// Uses a 256-character source string literal which is always safe to
|
|
|
|
/// index with a `u8` index.
|
|
|
|
fn get_hashes_str(num: u8) -> &'static str {
|
|
|
|
const HASHES: &str = "\
|
|
|
|
################################################################\
|
|
|
|
################################################################\
|
|
|
|
################################################################\
|
|
|
|
################################################################\
|
|
|
|
";
|
|
|
|
const _: () = assert!(HASHES.len() == 256);
|
|
|
|
&HASHES[..num as usize]
|
|
|
|
}
|
|
|
|
|
2023-06-05 04:04:23 -05:00
|
|
|
self.with_symbol_and_suffix(interner, |symbol, suffix| match self.0.kind {
|
2022-07-21 11:44:41 -05:00
|
|
|
bridge::LitKind::Byte => f(&["b'", symbol, "'", suffix]),
|
|
|
|
bridge::LitKind::Char => f(&["'", symbol, "'", suffix]),
|
|
|
|
bridge::LitKind::Str => f(&["\"", symbol, "\"", suffix]),
|
|
|
|
bridge::LitKind::StrRaw(n) => {
|
|
|
|
let hashes = get_hashes_str(n);
|
|
|
|
f(&["r", hashes, "\"", symbol, "\"", hashes, suffix])
|
|
|
|
}
|
|
|
|
bridge::LitKind::ByteStr => f(&["b\"", symbol, "\"", suffix]),
|
|
|
|
bridge::LitKind::ByteStrRaw(n) => {
|
|
|
|
let hashes = get_hashes_str(n);
|
|
|
|
f(&["br", hashes, "\"", symbol, "\"", hashes, suffix])
|
|
|
|
}
|
|
|
|
_ => f(&[symbol, suffix]),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2023-06-05 04:04:23 -05:00
|
|
|
fn with_symbol_and_suffix<R>(
|
|
|
|
&self,
|
|
|
|
interner: SymbolInternerRef,
|
|
|
|
f: impl FnOnce(&str, &str) -> R,
|
|
|
|
) -> R {
|
|
|
|
let symbol = self.0.symbol.text(interner);
|
|
|
|
let suffix = self.0.suffix.map(|s| s.text(interner)).unwrap_or_default();
|
2022-07-21 14:32:46 -05:00
|
|
|
f(symbol.as_str(), suffix.as_str())
|
2022-07-21 11:44:41 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-07-20 11:27:58 -05:00
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
|
|
|
use super::*;
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_ra_server_to_string() {
|
|
|
|
let s = TokenStream {
|
|
|
|
token_trees: vec![
|
|
|
|
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
|
|
|
text: "struct".into(),
|
2023-02-13 05:55:14 -06:00
|
|
|
span: tt::TokenId::unspecified(),
|
2022-07-20 11:27:58 -05:00
|
|
|
})),
|
|
|
|
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
|
|
|
text: "T".into(),
|
2023-02-13 05:55:14 -06:00
|
|
|
span: tt::TokenId::unspecified(),
|
2022-07-20 11:27:58 -05:00
|
|
|
})),
|
|
|
|
tt::TokenTree::Subtree(tt::Subtree {
|
2023-02-13 05:55:14 -06:00
|
|
|
delimiter: tt::Delimiter {
|
|
|
|
open: tt::TokenId::unspecified(),
|
|
|
|
close: tt::TokenId::unspecified(),
|
2022-07-20 11:27:58 -05:00
|
|
|
kind: tt::DelimiterKind::Brace,
|
2023-02-13 05:55:14 -06:00
|
|
|
},
|
2022-07-20 11:27:58 -05:00
|
|
|
token_trees: vec![],
|
|
|
|
}),
|
|
|
|
],
|
|
|
|
};
|
|
|
|
|
|
|
|
assert_eq!(s.to_string(), "struct T {}");
|
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn test_ra_server_from_str() {
|
|
|
|
use std::str::FromStr;
|
|
|
|
let subtree_paren_a = tt::TokenTree::Subtree(tt::Subtree {
|
2023-02-13 05:55:14 -06:00
|
|
|
delimiter: tt::Delimiter {
|
|
|
|
open: tt::TokenId::unspecified(),
|
|
|
|
close: tt::TokenId::unspecified(),
|
2022-07-20 11:27:58 -05:00
|
|
|
kind: tt::DelimiterKind::Parenthesis,
|
2023-02-13 05:55:14 -06:00
|
|
|
},
|
2022-07-20 11:27:58 -05:00
|
|
|
token_trees: vec![tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
|
|
|
text: "a".into(),
|
2023-02-13 05:55:14 -06:00
|
|
|
span: tt::TokenId::unspecified(),
|
2022-07-20 11:27:58 -05:00
|
|
|
}))],
|
|
|
|
});
|
|
|
|
|
|
|
|
let t1 = TokenStream::from_str("(a)").unwrap();
|
|
|
|
assert_eq!(t1.token_trees.len(), 1);
|
|
|
|
assert_eq!(t1.token_trees[0], subtree_paren_a);
|
|
|
|
|
|
|
|
let t2 = TokenStream::from_str("(a);").unwrap();
|
|
|
|
assert_eq!(t2.token_trees.len(), 2);
|
|
|
|
assert_eq!(t2.token_trees[0], subtree_paren_a);
|
|
|
|
|
|
|
|
let underscore = TokenStream::from_str("_").unwrap();
|
|
|
|
assert_eq!(
|
|
|
|
underscore.token_trees[0],
|
|
|
|
tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident {
|
|
|
|
text: "_".into(),
|
2023-02-13 05:55:14 -06:00
|
|
|
span: tt::TokenId::unspecified(),
|
2022-07-20 11:27:58 -05:00
|
|
|
}))
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|