791: docs r=matklad a=matklad



Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2019-02-11 18:32:18 +00:00
commit a78142cc64
5 changed files with 65 additions and 13 deletions

View File

@ -144,8 +144,8 @@ impl_froms!(TokenTree: Leaf, Subtree);
let macro_invocation =
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
let definition_tt = ast_to_token_tree(macro_definition.token_tree().unwrap()).unwrap();
let invocation_tt = ast_to_token_tree(macro_invocation.token_tree().unwrap()).unwrap();
let (definition_tt, _) = ast_to_token_tree(macro_definition.token_tree().unwrap()).unwrap();
let (invocation_tt, _) = ast_to_token_tree(macro_invocation.token_tree().unwrap()).unwrap();
let rules = crate::MacroRules::parse(&definition_tt).unwrap();
let expansion = rules.expand(&invocation_tt).unwrap();
assert_eq!(
@ -160,7 +160,7 @@ impl_froms!(TokenTree: Leaf, Subtree);
let macro_definition =
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
let definition_tt = ast_to_token_tree(macro_definition.token_tree().unwrap()).unwrap();
let (definition_tt, _) = ast_to_token_tree(macro_definition.token_tree().unwrap()).unwrap();
crate::MacroRules::parse(&definition_tt).unwrap()
}
@ -169,7 +169,7 @@ impl_froms!(TokenTree: Leaf, Subtree);
let macro_invocation =
source_file.syntax().descendants().find_map(ast::MacroCall::cast).unwrap();
let invocation_tt = ast_to_token_tree(macro_invocation.token_tree().unwrap()).unwrap();
let (invocation_tt, _) = ast_to_token_tree(macro_invocation.token_tree().unwrap()).unwrap();
let expaned = rules.expand(&invocation_tt).unwrap();
assert_eq!(expaned.to_string(), expansion);

View File

@ -3,6 +3,7 @@
/// `tt::TokenTree` for the result of the expansion.
use rustc_hash::FxHashMap;
use ra_syntax::SmolStr;
use tt::TokenId;
use crate::tt_cursor::TtCursor;
@ -185,7 +186,8 @@ fn expand_tt(
}
crate::TokenTree::Leaf(leaf) => match leaf {
crate::Leaf::Ident(ident) => {
tt::Leaf::from(tt::Ident { text: ident.text.clone() }).into()
tt::Leaf::from(tt::Ident { text: ident.text.clone(), id: TokenId::unspecified() })
.into()
}
crate::Leaf::Punct(punct) => tt::Leaf::from(punct.clone()).into(),
crate::Leaf::Var(v) => bindings.get(&v.text, nesting)?.clone(),

View File

@ -41,7 +41,7 @@ fn parse_subtree(tt: &tt::Subtree) -> Option<crate::Subtree> {
}
}
tt::Leaf::Punct(punct) => crate::Leaf::from(*punct).into(),
tt::Leaf::Ident(tt::Ident { text }) => {
tt::Leaf::Ident(tt::Ident { text, id: _ }) => {
crate::Leaf::from(crate::Ident { text: text.clone() }).into()
}
tt::Leaf::Literal(tt::Literal { text }) => {

View File

@ -1,10 +1,42 @@
use ra_syntax::{ast, AstNode, SyntaxNode, SyntaxKind::*};
use ra_syntax::{
AstNode, SyntaxNode, TextRange,
ast, SyntaxKind::*, TextUnit
};
pub fn ast_to_token_tree(ast: &ast::TokenTree) -> Option<tt::Subtree> {
convert_tt(ast.syntax())
/// Maps `tt::TokenId` to the relative range of the original token.
#[derive(Default)]
pub struct TokenMap {
/// Maps `tt::TokenId` to the *relative* source range.
toknes: Vec<TextRange>,
}
fn convert_tt(tt: &SyntaxNode) -> Option<tt::Subtree> {
/// Convert the syntax tree (what user has written) to a `TokenTree` (what macro
/// will consume).
pub fn ast_to_token_tree(ast: &ast::TokenTree) -> Option<(tt::Subtree, TokenMap)> {
let mut token_map = TokenMap::default();
let node = ast.syntax();
let tt = convert_tt(&mut token_map, node.range().start(), node)?;
Some((tt, token_map))
}
impl TokenMap {
pub fn relative_range_of(&self, tt: tt::TokenId) -> Option<TextRange> {
let idx = tt.0 as usize;
self.toknes.get(idx).map(|&it| it)
}
fn alloc(&mut self, relative_range: TextRange) -> tt::TokenId {
let id = self.toknes.len();
self.toknes.push(relative_range);
tt::TokenId(id as u32)
}
}
fn convert_tt(
token_map: &mut TokenMap,
global_offset: TextUnit,
tt: &SyntaxNode,
) -> Option<tt::Subtree> {
let first_child = tt.first_child()?;
let last_child = tt.last_child()?;
let delimiter = match (first_child.kind(), last_child.kind()) {
@ -34,10 +66,12 @@ fn convert_tt(tt: &SyntaxNode) -> Option<tt::Subtree> {
}
} else {
let child: tt::TokenTree = if child.kind() == TOKEN_TREE {
convert_tt(child)?.into()
convert_tt(token_map, global_offset, child)?.into()
} else if child.kind().is_keyword() || child.kind() == IDENT {
let relative_range = child.range() - global_offset;
let id = token_map.alloc(relative_range);
let text = child.leaf_text().unwrap().clone();
tt::Leaf::from(tt::Ident { text }).into()
tt::Leaf::from(tt::Ident { text, id }).into()
} else if child.kind().is_literal() {
tt::Leaf::from(tt::Literal { text: child.leaf_text().unwrap().clone() }).into()
} else {

View File

@ -1,4 +1,4 @@
/// `tt` crate defines a `TokenTree` datastructure: this is the interface (both
/// `tt` crate defines a `TokenTree` data structure: this is the interface (both
/// input and output) of macros. It closely mirrors `proc_macro` crate's
/// `TokenTree`.
@ -18,6 +18,21 @@ use std::fmt;
use smol_str::SmolStr;
/// Represents identity of the token.
///
/// For hygiene purposes, we need to track which expanded tokens originated from
/// which source tokens. We do it by assigning an distinct identity to each
/// source token and making sure that identities are preserved during macro
/// expansion.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct TokenId(pub u32);
impl TokenId {
pub const fn unspecified() -> TokenId {
TokenId(!0)
}
}
#[derive(Debug, Clone)]
pub enum TokenTree {
Leaf(Leaf),
@ -67,6 +82,7 @@ pub enum Spacing {
#[derive(Debug, Clone)]
pub struct Ident {
pub text: SmolStr,
pub id: TokenId,
}
impl fmt::Display for TokenTree {