rust/crates/ra_mbe/src/syntax_bridge.rs

415 lines
14 KiB
Rust
Raw Normal View History

2019-02-23 08:21:56 -06:00
use ra_parser::{TokenSource, TreeSink, ParseError};
2019-02-11 12:12:06 -06:00
use ra_syntax::{
2019-03-30 05:25:53 -05:00
AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement,
ast, SyntaxKind::*, TextUnit, classify_literal
2019-02-11 12:12:06 -06:00
};
2019-01-31 12:29:04 -06:00
2019-02-11 12:31:54 -06:00
/// Maps `tt::TokenId` to the relative range of the original token.
2019-02-11 12:12:06 -06:00
#[derive(Default)]
pub struct TokenMap {
/// Maps `tt::TokenId` to the *relative* source range.
2019-02-26 11:59:32 -06:00
tokens: Vec<TextRange>,
2019-01-31 12:29:04 -06:00
}
2019-02-11 12:31:54 -06:00
/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
/// will consume).
2019-02-11 12:12:06 -06:00
pub fn ast_to_token_tree(ast: &ast::TokenTree) -> Option<(tt::Subtree, TokenMap)> {
let mut token_map = TokenMap::default();
let node = ast.syntax();
let tt = convert_tt(&mut token_map, node.range().start(), node)?;
Some((tt, token_map))
}
2019-02-23 05:06:10 -06:00
/// Parses the token tree (result of macro expansion) as a sequence of items
2019-02-23 08:21:56 -06:00
pub fn token_tree_to_ast_item_list(tt: &tt::Subtree) -> TreeArc<ast::SourceFile> {
let token_source = TtTokenSource::new(tt);
let mut tree_sink = TtTreeSink::new(&token_source.tokens);
ra_parser::parse(&token_source, &mut tree_sink);
let syntax = tree_sink.inner.finish();
ast::SourceFile::cast(&syntax).unwrap().to_owned()
2019-02-23 05:06:10 -06:00
}
2019-02-11 12:12:06 -06:00
impl TokenMap {
2019-02-11 12:31:54 -06:00
pub fn relative_range_of(&self, tt: tt::TokenId) -> Option<TextRange> {
let idx = tt.0 as usize;
2019-02-26 11:59:32 -06:00
self.tokens.get(idx).map(|&it| it)
2019-02-11 12:31:54 -06:00
}
2019-02-11 12:12:06 -06:00
fn alloc(&mut self, relative_range: TextRange) -> tt::TokenId {
2019-02-26 11:59:32 -06:00
let id = self.tokens.len();
self.tokens.push(relative_range);
2019-02-11 12:12:06 -06:00
tt::TokenId(id as u32)
}
}
fn convert_tt(
token_map: &mut TokenMap,
global_offset: TextUnit,
tt: &SyntaxNode,
) -> Option<tt::Subtree> {
2019-03-30 05:25:53 -05:00
let first_child = tt.first_child_or_token()?;
let last_child = tt.last_child_or_token()?;
2019-01-31 12:29:04 -06:00
let delimiter = match (first_child.kind(), last_child.kind()) {
(L_PAREN, R_PAREN) => tt::Delimiter::Parenthesis,
(L_CURLY, R_CURLY) => tt::Delimiter::Brace,
(L_BRACK, R_BRACK) => tt::Delimiter::Bracket,
_ => return None,
};
let mut token_trees = Vec::new();
2019-03-30 05:25:53 -05:00
for child in tt.children_with_tokens().skip(1) {
2019-01-31 12:29:04 -06:00
if child == first_child || child == last_child || child.kind().is_trivia() {
continue;
}
2019-03-30 05:25:53 -05:00
match child {
SyntaxElement::Token(token) => {
if token.kind().is_punct() {
let mut prev = None;
for char in token.text().chars() {
if let Some(char) = prev {
token_trees.push(
tt::Leaf::from(tt::Punct { char, spacing: tt::Spacing::Joint })
.into(),
);
}
prev = Some(char)
}
if let Some(char) = prev {
token_trees.push(
tt::Leaf::from(tt::Punct { char, spacing: tt::Spacing::Alone }).into(),
);
}
} else {
let child = if token.kind().is_keyword() || token.kind() == IDENT {
let relative_range = token.range() - global_offset;
let id = token_map.alloc(relative_range);
let text = token.text().clone();
tt::Leaf::from(tt::Ident { text, id }).into()
} else if token.kind().is_literal() {
tt::Leaf::from(tt::Literal { text: token.text().clone() }).into()
} else {
return None;
};
token_trees.push(child);
2019-01-31 12:29:04 -06:00
}
}
2019-03-30 05:25:53 -05:00
SyntaxElement::Node(node) => {
let child = convert_tt(token_map, global_offset, node)?.into();
token_trees.push(child);
2019-01-31 12:29:04 -06:00
}
2019-03-30 05:25:53 -05:00
};
2019-01-31 12:29:04 -06:00
}
2019-02-08 05:49:43 -06:00
let res = tt::Subtree { delimiter, token_trees };
2019-01-31 12:29:04 -06:00
Some(res)
}
2019-02-23 05:18:32 -06:00
#[derive(Debug)]
pub(crate) struct TtTokenSource {
pub tokens: Vec<TtToken>,
2019-02-23 06:29:06 -06:00
}
#[derive(Debug)]
pub(crate) struct TtToken {
pub kind: SyntaxKind,
pub is_joint_to_next: bool,
pub text: SmolStr,
pub n_tokens: usize,
2019-02-23 06:29:06 -06:00
}
2019-02-23 05:18:32 -06:00
// Some helper functions
fn to_punct(tt: &tt::TokenTree) -> Option<&tt::Punct> {
if let tt::TokenTree::Leaf(tt::Leaf::Punct(pp)) = tt {
return Some(pp);
}
None
}
pub(crate) struct TokenPeek<'a, I>
where
I: Iterator<Item = &'a tt::TokenTree>,
{
iter: itertools::MultiPeek<I>,
}
impl<'a, I> TokenPeek<'a, I>
where
I: Iterator<Item = &'a tt::TokenTree>,
{
pub fn new(iter: I) -> Self {
TokenPeek { iter: itertools::multipeek(iter) }
}
pub fn next(&mut self) -> Option<&tt::TokenTree> {
self.iter.next()
}
fn current_punct2(&mut self, p: &tt::Punct) -> Option<((char, char), bool)> {
if p.spacing != tt::Spacing::Joint {
return None;
}
self.iter.reset_peek();
let p1 = to_punct(self.iter.peek()?)?;
Some(((p.char, p1.char), p1.spacing == tt::Spacing::Joint))
}
fn current_punct3(&mut self, p: &tt::Punct) -> Option<((char, char, char), bool)> {
self.current_punct2(p).and_then(|((p0, p1), last_joint)| {
if !last_joint {
None
} else {
let p2 = to_punct(*self.iter.peek()?)?;
Some(((p0, p1, p2.char), p2.spacing == tt::Spacing::Joint))
}
})
}
}
2019-02-23 05:18:32 -06:00
impl TtTokenSource {
pub fn new(tt: &tt::Subtree) -> TtTokenSource {
2019-02-23 06:29:06 -06:00
let mut res = TtTokenSource { tokens: Vec::new() };
res.convert_subtree(tt);
res
}
fn convert_subtree(&mut self, sub: &tt::Subtree) {
self.push_delim(sub.delimiter, false);
let mut peek = TokenPeek::new(sub.token_trees.iter());
while let Some(tt) = peek.iter.next() {
self.convert_tt(tt, &mut peek);
}
2019-02-23 06:29:06 -06:00
self.push_delim(sub.delimiter, true)
}
fn convert_tt<'a, I>(&mut self, tt: &tt::TokenTree, iter: &mut TokenPeek<'a, I>)
where
I: Iterator<Item = &'a tt::TokenTree>,
{
2019-02-23 06:29:06 -06:00
match tt {
tt::TokenTree::Leaf(token) => self.convert_token(token, iter),
2019-02-23 06:29:06 -06:00
tt::TokenTree::Subtree(sub) => self.convert_subtree(sub),
}
}
fn convert_token<'a, I>(&mut self, token: &tt::Leaf, iter: &mut TokenPeek<'a, I>)
where
I: Iterator<Item = &'a tt::TokenTree>,
{
2019-03-30 05:25:53 -05:00
let tok = match token {
2019-02-24 04:14:14 -06:00
tt::Leaf::Literal(l) => TtToken {
kind: classify_literal(&l.text).unwrap().kind,
2019-02-23 06:29:06 -06:00
is_joint_to_next: false,
2019-02-23 06:43:45 -06:00
text: l.text.clone(),
n_tokens: 1,
2019-02-23 06:29:06 -06:00
},
2019-02-23 08:51:23 -06:00
tt::Leaf::Punct(p) => {
if let Some((kind, is_joint_to_next, text, size)) =
Self::convert_multi_char_punct(p, iter)
{
for _ in 0..size - 1 {
iter.next();
}
TtToken { kind, is_joint_to_next, text: text.into(), n_tokens: size }
} else {
let kind = match p.char {
// lexer may produce combpund tokens for these ones
'.' => DOT,
':' => COLON,
'=' => EQ,
'!' => EXCL,
'-' => MINUS,
c => SyntaxKind::from_char(c).unwrap(),
};
let text = {
let mut buf = [0u8; 4];
let s: &str = p.char.encode_utf8(&mut buf);
SmolStr::new(s)
};
TtToken {
kind,
is_joint_to_next: p.spacing == tt::Spacing::Joint,
text,
n_tokens: 1,
}
}
2019-02-23 08:51:23 -06:00
}
2019-02-23 06:29:06 -06:00
tt::Leaf::Ident(ident) => {
2019-02-23 08:51:23 -06:00
let kind = SyntaxKind::from_keyword(ident.text.as_str()).unwrap_or(IDENT);
TtToken { kind, is_joint_to_next: false, text: ident.text.clone(), n_tokens: 1 }
2019-02-23 06:29:06 -06:00
}
};
self.tokens.push(tok)
}
pub(crate) fn convert_multi_char_punct<'a, I>(
p: &tt::Punct,
iter: &mut TokenPeek<'a, I>,
) -> Option<(SyntaxKind, bool, &'static str, usize)>
where
I: Iterator<Item = &'a tt::TokenTree>,
{
if let Some((m, is_joint_to_next)) = iter.current_punct3(p) {
if let Some((kind, text)) = match m {
2019-04-04 14:54:18 -05:00
('<', '<', '=') => Some((SHLEQ, "<<=")),
('>', '>', '=') => Some((SHREQ, ">>=")),
('.', '.', '.') => Some((DOTDOTDOT, "...")),
('.', '.', '=') => Some((DOTDOTEQ, "..=")),
_ => None,
} {
return Some((kind, is_joint_to_next, text, 3));
}
}
if let Some((m, is_joint_to_next)) = iter.current_punct2(p) {
if let Some((kind, text)) = match m {
2019-04-04 14:54:18 -05:00
('<', '<') => Some((SHL, "<<")),
('>', '>') => Some((SHR, ">>")),
2019-04-04 14:54:18 -05:00
('|', '|') => Some((PIPEPIPE, "||")),
('&', '&') => Some((AMPAMP, "&&")),
('%', '=') => Some((PERCENTEQ, "%=")),
('*', '=') => Some((STAREQ, "*=")),
('/', '=') => Some((SLASHEQ, "/=")),
('^', '=') => Some((CARETEQ, "^=")),
2019-04-04 14:54:18 -05:00
('&', '=') => Some((AMPEQ, "&=")),
('|', '=') => Some((PIPEEQ, "|=")),
('-', '=') => Some((MINUSEQ, "-=")),
('+', '=') => Some((PLUSEQ, "+=")),
('>', '=') => Some((GTEQ, ">=")),
('<', '=') => Some((LTEQ, "<=")),
2019-04-04 14:54:18 -05:00
('-', '>') => Some((THIN_ARROW, "->")),
('!', '=') => Some((NEQ, "!=")),
('=', '>') => Some((FAT_ARROW, "=>")),
('=', '=') => Some((EQEQ, "==")),
('.', '.') => Some((DOTDOT, "..")),
(':', ':') => Some((COLONCOLON, "::")),
_ => None,
} {
return Some((kind, is_joint_to_next, text, 2));
}
}
None
}
2019-02-23 06:29:06 -06:00
fn push_delim(&mut self, d: tt::Delimiter, closing: bool) {
2019-02-23 06:43:45 -06:00
let (kinds, texts) = match d {
tt::Delimiter::Parenthesis => ([L_PAREN, R_PAREN], "()"),
tt::Delimiter::Brace => ([L_CURLY, R_CURLY], "{}"),
tt::Delimiter::Bracket => ([L_BRACK, R_BRACK], "[]"),
2019-02-23 06:29:06 -06:00
tt::Delimiter::None => return,
};
2019-02-23 06:43:45 -06:00
let idx = closing as usize;
let kind = kinds[idx];
let text = &texts[idx..texts.len() - (1 - idx)];
let tok = TtToken { kind, is_joint_to_next: false, text: SmolStr::new(text), n_tokens: 1 };
2019-02-23 06:29:06 -06:00
self.tokens.push(tok)
2019-02-23 05:18:32 -06:00
}
}
impl TokenSource for TtTokenSource {
fn token_kind(&self, pos: usize) -> SyntaxKind {
2019-02-23 08:51:23 -06:00
if let Some(tok) = self.tokens.get(pos) {
tok.kind
} else {
SyntaxKind::EOF
}
2019-02-23 05:18:32 -06:00
}
fn is_token_joint_to_next(&self, pos: usize) -> bool {
2019-02-23 06:29:06 -06:00
self.tokens[pos].is_joint_to_next
2019-02-23 05:18:32 -06:00
}
fn is_keyword(&self, pos: usize, kw: &str) -> bool {
2019-02-23 06:43:45 -06:00
self.tokens[pos].text == *kw
2019-02-23 05:18:32 -06:00
}
}
2019-02-23 08:21:56 -06:00
#[derive(Default)]
struct TtTreeSink<'a> {
buf: String,
2019-02-24 04:14:14 -06:00
tokens: &'a [TtToken],
2019-02-23 08:21:56 -06:00
text_pos: TextUnit,
token_pos: usize,
inner: SyntaxTreeBuilder,
}
impl<'a> TtTreeSink<'a> {
2019-02-24 04:14:14 -06:00
fn new(tokens: &'a [TtToken]) -> TtTreeSink {
2019-02-23 08:21:56 -06:00
TtTreeSink {
buf: String::new(),
tokens,
text_pos: 0.into(),
token_pos: 0,
inner: SyntaxTreeBuilder::default(),
}
}
}
impl<'a> TreeSink for TtTreeSink<'a> {
2019-03-30 05:25:53 -05:00
fn token(&mut self, kind: SyntaxKind, n_tokens: u8) {
2019-02-23 08:21:56 -06:00
for _ in 0..n_tokens {
self.buf += self.tokens[self.token_pos].text.as_str();
self.token_pos += 1;
}
self.text_pos += TextUnit::of_str(&self.buf);
let text = SmolStr::new(self.buf.as_str());
self.buf.clear();
2019-03-30 05:25:53 -05:00
self.inner.token(kind, text)
2019-02-23 08:21:56 -06:00
}
2019-03-30 05:25:53 -05:00
fn start_node(&mut self, kind: SyntaxKind) {
self.inner.start_node(kind);
2019-02-23 08:21:56 -06:00
}
2019-03-30 05:25:53 -05:00
fn finish_node(&mut self) {
self.inner.finish_node();
2019-02-23 08:21:56 -06:00
}
fn error(&mut self, error: ParseError) {
self.inner.error(error, self.text_pos)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::{expand, create_rules};
#[test]
fn convert_tt_token_source() {
let rules = create_rules(
r#"
macro_rules! literals {
($i:ident) => {
{
let a = 'c';
let c = 1000;
let f = 12E+99_f64;
let s = "rust1";
}
}
}
"#,
);
let expansion = expand(&rules, "literals!(foo)");
let tt_src = TtTokenSource::new(&expansion);
// [{]
// [let] [a] [=] ['c'] [;]
assert_eq!(tt_src.tokens[1 + 3].text, "'c'");
assert_eq!(tt_src.tokens[1 + 3].kind, CHAR);
// [let] [c] [=] [1000] [;]
assert_eq!(tt_src.tokens[1 + 5 + 3].text, "1000");
assert_eq!(tt_src.tokens[1 + 5 + 3].kind, INT_NUMBER);
// [let] [f] [=] [12E+99_f64] [;]
assert_eq!(tt_src.tokens[1 + 10 + 3].text, "12E+99_f64");
assert_eq!(tt_src.tokens[1 + 10 + 3].kind, FLOAT_NUMBER);
// [let] [s] [=] ["rust1"] [;]
assert_eq!(tt_src.tokens[1 + 15 + 3].text, "\"rust1\"");
assert_eq!(tt_src.tokens[1 + 15 + 3].kind, STRING);
}
}