Fix literal support in token tree to ast item list

This commit is contained in:
Edwin Cheng 2019-04-05 18:23:01 +08:00
parent be9a44e9ba
commit 1ab78d6056
4 changed files with 45 additions and 4 deletions

View File

@ -337,4 +337,45 @@ SOURCE_FILE@[0; 40)
);
}
#[test]
fn expand_literals_to_item_list() {
fn to_subtree(tt: &tt::TokenTree) -> &tt::Subtree {
if let tt::TokenTree::Subtree(subtree) = tt {
return &subtree;
}
assert!(false, "It is not a subtree");
unreachable!();
}
fn to_literal(tt: &tt::TokenTree) -> &tt::Literal {
if let tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) = tt {
return lit;
}
assert!(false, "It is not a literal");
unreachable!();
}
let rules = create_rules(
r#"
macro_rules! literals {
($i:ident) => {
{
let a = 'c';
let c = 1000;
let f = 12E+99_f64;
}
}
}
"#,
);
let expansion = expand(&rules, "literals!(foo)");
let stm_tokens = &to_subtree(&expansion.token_trees[0]).token_trees;
// [let] [a] [=] ['c'] [;]
assert_eq!(to_literal(&stm_tokens[3]).text, "'c'");
// [let] [c] [=] [1000] [;]
assert_eq!(to_literal(&stm_tokens[5 + 3]).text, "1000");
// [let] [f] [=] [12E+99_f64] [;]
assert_eq!(to_literal(&stm_tokens[10 + 3]).text, "12E+99_f64");
}
}

View File

@ -1,7 +1,7 @@
use ra_parser::{TokenSource, TreeSink, ParseError};
use ra_syntax::{
AstNode, SyntaxNode, TextRange, SyntaxKind, SmolStr, SyntaxTreeBuilder, TreeArc, SyntaxElement,
ast, SyntaxKind::*, TextUnit
ast, SyntaxKind::*, TextUnit, next_token
};
/// Maps `tt::TokenId` to the relative range of the original token.
@ -189,7 +189,7 @@ impl TtTokenSource {
{
let tok = match token {
tt::Leaf::Literal(l) => TtToken {
kind: SyntaxKind::INT_NUMBER, // FIXME
kind: next_token(&l.text).kind,
is_joint_to_next: false,
text: l.text.clone(),
},

View File

@ -40,7 +40,7 @@ pub use crate::{
syntax_text::SyntaxText,
syntax_node::{Direction, SyntaxNode, WalkEvent, TreeArc, SyntaxTreeBuilder, SyntaxElement, SyntaxToken},
ptr::{SyntaxNodePtr, AstPtr},
parsing::{tokenize, Token},
parsing::{tokenize, next_token, Token},
};
use ra_text_edit::AtomTextEdit;

View File

@ -11,7 +11,7 @@ use crate::{
syntax_node::GreenNode,
};
pub use self::lexer::{tokenize, Token};
pub use self::lexer::{tokenize, next_token, Token};
pub(crate) use self::reparsing::incremental_reparse;