rollup merge of #18229 : bjz/ttdelim
This commit is contained in:
commit
b3c676ed86
@ -56,7 +56,7 @@ extern crate rustc;
|
||||
|
||||
use syntax::codemap::Span;
|
||||
use syntax::parse::token::{IDENT, get_ident};
|
||||
use syntax::ast::{TokenTree, TTTok};
|
||||
use syntax::ast::{TokenTree, TtToken};
|
||||
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacExpr};
|
||||
use syntax::ext::build::AstBuilder; // trait for expr_uint
|
||||
use rustc::plugin::Registry;
|
||||
@ -71,7 +71,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
|
||||
("I", 1)];
|
||||
|
||||
let text = match args {
|
||||
[TTTok(_, IDENT(s, _))] => get_ident(s).to_string(),
|
||||
[TtToken(_, IDENT(s, _))] => get_ident(s).to_string(),
|
||||
_ => {
|
||||
cx.span_err(sp, "argument should be a single identifier");
|
||||
return DummyResult::any(sp);
|
||||
|
@ -24,6 +24,9 @@ use std::fmt::Show;
|
||||
use std::rc::Rc;
|
||||
use serialize::{Encodable, Decodable, Encoder, Decoder};
|
||||
|
||||
#[cfg(stage0)]
|
||||
pub use self::TtToken as TTTok;
|
||||
|
||||
// FIXME #6993: in librustc, uses of "ident" should be replaced
|
||||
// by just "Name".
|
||||
|
||||
@ -592,6 +595,28 @@ pub enum CaptureClause {
|
||||
CaptureByRef,
|
||||
}
|
||||
|
||||
/// A token that delimits a sequence of token trees
|
||||
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
|
||||
pub struct Delimiter {
|
||||
pub span: Span,
|
||||
pub token: ::parse::token::Token,
|
||||
}
|
||||
|
||||
impl Delimiter {
|
||||
/// Convert the delimiter to a `TtToken`
|
||||
pub fn to_tt(&self) -> TokenTree {
|
||||
TtToken(self.span, self.token.clone())
|
||||
}
|
||||
}
|
||||
|
||||
/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
|
||||
/// for token sequences.
|
||||
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
|
||||
pub enum KleeneOp {
|
||||
ZeroOrMore,
|
||||
OneOrMore,
|
||||
}
|
||||
|
||||
/// When the main rust parser encounters a syntax-extension invocation, it
|
||||
/// parses the arguments to the invocation as a token-tree. This is a very
|
||||
/// loose structure, such that all sorts of different AST-fragments can
|
||||
@ -600,9 +625,9 @@ pub enum CaptureClause {
|
||||
/// If the syntax extension is an MBE macro, it will attempt to match its
|
||||
/// LHS "matchers" against the provided token tree, and if it finds a
|
||||
/// match, will transcribe the RHS token tree, splicing in any captured
|
||||
/// macro_parser::matched_nonterminals into the TTNonterminals it finds.
|
||||
/// `macro_parser::matched_nonterminals` into the `TtNonterminal`s it finds.
|
||||
///
|
||||
/// The RHS of an MBE macro is the only place a TTNonterminal or TTSeq
|
||||
/// The RHS of an MBE macro is the only place a `TtNonterminal` or `TtSequence`
|
||||
/// makes any real sense. You could write them elsewhere but nothing
|
||||
/// else knows what to do with them, so you'll probably get a syntax
|
||||
/// error.
|
||||
@ -610,22 +635,29 @@ pub enum CaptureClause {
|
||||
#[doc="For macro invocations; parsing is delegated to the macro"]
|
||||
pub enum TokenTree {
|
||||
/// A single token
|
||||
TTTok(Span, ::parse::token::Token),
|
||||
/// A delimited sequence (the delimiters appear as the first
|
||||
/// and last elements of the vector)
|
||||
// FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST.
|
||||
TTDelim(Rc<Vec<TokenTree>>),
|
||||
TtToken(Span, ::parse::token::Token),
|
||||
/// A delimited sequence of token trees
|
||||
TtDelimited(Span, Rc<(Delimiter, Vec<TokenTree>, Delimiter)>),
|
||||
|
||||
// These only make sense for right-hand-sides of MBE macros:
|
||||
|
||||
/// A kleene-style repetition sequence with a span, a TTForest,
|
||||
/// an optional separator, and a boolean where true indicates
|
||||
/// zero or more (..), and false indicates one or more (+).
|
||||
/// A Kleene-style repetition sequence with an optional separator.
|
||||
// FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST.
|
||||
TTSeq(Span, Rc<Vec<TokenTree>>, Option<::parse::token::Token>, bool),
|
||||
|
||||
TtSequence(Span, Rc<Vec<TokenTree>>, Option<::parse::token::Token>, KleeneOp),
|
||||
/// A syntactic variable that will be filled in by macro expansion.
|
||||
TTNonterminal(Span, Ident)
|
||||
TtNonterminal(Span, Ident)
|
||||
}
|
||||
|
||||
impl TokenTree {
|
||||
/// Returns the `Span` corresponding to this token tree.
|
||||
pub fn get_span(&self) -> Span {
|
||||
match *self {
|
||||
TtToken(span, _) => span,
|
||||
TtDelimited(span, _) => span,
|
||||
TtSequence(span, _, _, _) => span,
|
||||
TtNonterminal(span, _) => span,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Matchers are nodes defined-by and recognized-by the main rust parser and
|
||||
@ -684,9 +716,9 @@ pub type Matcher = Spanned<Matcher_>;
|
||||
pub enum Matcher_ {
|
||||
/// Match one token
|
||||
MatchTok(::parse::token::Token),
|
||||
/// Match repetitions of a sequence: body, separator, zero ok?,
|
||||
/// Match repetitions of a sequence: body, separator, Kleene operator,
|
||||
/// lo, hi position-in-match-array used:
|
||||
MatchSeq(Vec<Matcher> , Option<::parse::token::Token>, bool, uint, uint),
|
||||
MatchSeq(Vec<Matcher> , Option<::parse::token::Token>, KleeneOp, uint, uint),
|
||||
/// Parse a Rust NT: name to bind, name of NT, position in match array:
|
||||
MatchNonterminal(Ident, Ident, uint)
|
||||
}
|
||||
|
@ -50,7 +50,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
|
||||
token_tree: &[TokenTree])
|
||||
-> Box<MacResult+'cx> {
|
||||
let code = match token_tree {
|
||||
[ast::TTTok(_, token::IDENT(code, _))] => code,
|
||||
[ast::TtToken(_, token::IDENT(code, _))] => code,
|
||||
_ => unreachable!()
|
||||
};
|
||||
with_registered_diagnostics(|diagnostics| {
|
||||
@ -82,12 +82,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
|
||||
token_tree: &[TokenTree])
|
||||
-> Box<MacResult+'cx> {
|
||||
let (code, description) = match token_tree {
|
||||
[ast::TTTok(_, token::IDENT(ref code, _))] => {
|
||||
[ast::TtToken(_, token::IDENT(ref code, _))] => {
|
||||
(code, None)
|
||||
},
|
||||
[ast::TTTok(_, token::IDENT(ref code, _)),
|
||||
ast::TTTok(_, token::COMMA),
|
||||
ast::TTTok(_, token::LIT_STR_RAW(description, _))] => {
|
||||
[ast::TtToken(_, token::IDENT(ref code, _)),
|
||||
ast::TtToken(_, token::COMMA),
|
||||
ast::TtToken(_, token::LIT_STR_RAW(description, _))] => {
|
||||
(code, Some(description))
|
||||
}
|
||||
_ => unreachable!()
|
||||
@ -110,7 +110,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
|
||||
token_tree: &[TokenTree])
|
||||
-> Box<MacResult+'cx> {
|
||||
let name = match token_tree {
|
||||
[ast::TTTok(_, token::IDENT(ref name, _))] => name,
|
||||
[ast::TtToken(_, token::IDENT(ref name, _))] => name,
|
||||
_ => unreachable!()
|
||||
};
|
||||
|
||||
|
@ -684,8 +684,8 @@ pub fn get_single_str_from_tts(cx: &ExtCtxt,
|
||||
cx.span_err(sp, format!("{} takes 1 argument.", name).as_slice());
|
||||
} else {
|
||||
match tts[0] {
|
||||
ast::TTTok(_, token::LIT_STR(ident)) => return Some(parse::str_lit(ident.as_str())),
|
||||
ast::TTTok(_, token::LIT_STR_RAW(ident, _)) => {
|
||||
ast::TtToken(_, token::LIT_STR(ident)) => return Some(parse::str_lit(ident.as_str())),
|
||||
ast::TtToken(_, token::LIT_STR_RAW(ident, _)) => {
|
||||
return Some(parse::raw_str_lit(ident.as_str()))
|
||||
}
|
||||
_ => {
|
||||
|
@ -23,7 +23,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]
|
||||
for (i, e) in tts.iter().enumerate() {
|
||||
if i & 1 == 1 {
|
||||
match *e {
|
||||
ast::TTTok(_, token::COMMA) => (),
|
||||
ast::TtToken(_, token::COMMA) => (),
|
||||
_ => {
|
||||
cx.span_err(sp, "concat_idents! expecting comma.");
|
||||
return DummyResult::expr(sp);
|
||||
@ -31,7 +31,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]
|
||||
}
|
||||
} else {
|
||||
match *e {
|
||||
ast::TTTok(_, token::IDENT(ident,_)) => {
|
||||
ast::TtToken(_, token::IDENT(ident,_)) => {
|
||||
res_str.push_str(token::get_ident(ident).get())
|
||||
}
|
||||
_ => {
|
||||
|
@ -13,16 +13,14 @@ use codemap;
|
||||
use ext::base;
|
||||
use print;
|
||||
|
||||
use std::rc::Rc;
|
||||
|
||||
pub fn expand_syntax_ext<'cx>(cx: &'cx mut base::ExtCtxt,
|
||||
sp: codemap::Span,
|
||||
tt: &[ast::TokenTree])
|
||||
tts: &[ast::TokenTree])
|
||||
-> Box<base::MacResult+'cx> {
|
||||
|
||||
cx.print_backtrace();
|
||||
println!("{}", print::pprust::tt_to_string(&ast::TTDelim(
|
||||
Rc::new(tt.iter().map(|x| (*x).clone()).collect()))));
|
||||
|
||||
println!("{}", print::pprust::tts_to_string(tts));
|
||||
|
||||
// any so that `log_syntax` can be invoked as an expression and item.
|
||||
base::DummyResult::any(sp)
|
||||
|
@ -23,7 +23,7 @@ use ptr::P;
|
||||
*
|
||||
* This is registered as a set of expression syntax extension called quote!
|
||||
* that lifts its argument token-tree to an AST representing the
|
||||
* construction of the same token tree, with ast::TTNonterminal nodes
|
||||
* construction of the same token tree, with ast::TtNonterminal nodes
|
||||
* interpreted as antiquotes (splices).
|
||||
*
|
||||
*/
|
||||
@ -637,12 +637,12 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
|
||||
}
|
||||
|
||||
|
||||
fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
|
||||
fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
|
||||
match *tt {
|
||||
ast::TTTok(sp, ref tok) => {
|
||||
ast::TtToken(sp, ref tok) => {
|
||||
let e_sp = cx.expr_ident(sp, id_ext("_sp"));
|
||||
let e_tok = cx.expr_call(sp,
|
||||
mk_ast_path(cx, sp, "TTTok"),
|
||||
mk_ast_path(cx, sp, "TtToken"),
|
||||
vec!(e_sp, mk_token(cx, sp, tok)));
|
||||
let e_push =
|
||||
cx.expr_method_call(sp,
|
||||
@ -650,13 +650,16 @@ fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
|
||||
id_ext("push"),
|
||||
vec!(e_tok));
|
||||
vec!(cx.stmt_expr(e_push))
|
||||
}
|
||||
|
||||
ast::TTDelim(ref tts) => mk_tts(cx, sp, tts.as_slice()),
|
||||
ast::TTSeq(..) => fail!("TTSeq in quote!"),
|
||||
|
||||
ast::TTNonterminal(sp, ident) => {
|
||||
|
||||
},
|
||||
ast::TtDelimited(sp, ref delimed) => {
|
||||
let (ref open, ref tts, ref close) = **delimed;
|
||||
mk_tt(cx, sp, &open.to_tt()).into_iter()
|
||||
.chain(tts.iter().flat_map(|tt| mk_tt(cx, sp, tt).into_iter()))
|
||||
.chain(mk_tt(cx, sp, &close.to_tt()).into_iter())
|
||||
.collect()
|
||||
},
|
||||
ast::TtSequence(..) => fail!("TtSequence in quote!"),
|
||||
ast::TtNonterminal(sp, ident) => {
|
||||
// tt.extend($ident.to_tokens(ext_cx).into_iter())
|
||||
|
||||
let e_to_toks =
|
||||
@ -674,7 +677,7 @@ fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
|
||||
vec!(e_to_toks));
|
||||
|
||||
vec!(cx.stmt_expr(e_push))
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@ -690,7 +693,7 @@ fn mk_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
-> (P<ast::Expr>, P<ast::Expr>) {
|
||||
// NB: It appears that the main parser loses its mind if we consider
|
||||
// $foo as a TTNonterminal during the main parse, so we have to re-parse
|
||||
// $foo as a TtNonterminal during the main parse, so we have to re-parse
|
||||
// under quote_depth > 0. This is silly and should go away; the _guess_ is
|
||||
// it has to do with transition away from supporting old-style macros, so
|
||||
// try removing it when enough of them are gone.
|
||||
|
@ -20,10 +20,10 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt,
|
||||
tt: &[ast::TokenTree])
|
||||
-> Box<base::MacResult+'static> {
|
||||
match tt {
|
||||
[ast::TTTok(_, ref tok)] if is_keyword(keywords::True, tok) => {
|
||||
[ast::TtToken(_, ref tok)] if is_keyword(keywords::True, tok) => {
|
||||
cx.set_trace_macros(true);
|
||||
}
|
||||
[ast::TTTok(_, ref tok)] if is_keyword(keywords::False, tok) => {
|
||||
[ast::TtToken(_, ref tok)] if is_keyword(keywords::False, tok) => {
|
||||
cx.set_trace_macros(false);
|
||||
}
|
||||
_ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"),
|
||||
|
@ -323,9 +323,9 @@ pub fn parse(sess: &ParseSess,
|
||||
} else {
|
||||
match ei.elts[idx].node.clone() {
|
||||
/* need to descend into sequence */
|
||||
MatchSeq(ref matchers, ref sep, zero_ok,
|
||||
MatchSeq(ref matchers, ref sep, kleene_op,
|
||||
match_idx_lo, match_idx_hi) => {
|
||||
if zero_ok {
|
||||
if kleene_op == ast::ZeroOrMore {
|
||||
let mut new_ei = ei.clone();
|
||||
new_ei.idx += 1u;
|
||||
//we specifically matched zero repeats.
|
||||
|
@ -8,7 +8,7 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use ast::{Ident, Matcher_, Matcher, MatchTok, MatchNonterminal, MatchSeq, TTDelim};
|
||||
use ast::{Ident, Matcher_, Matcher, MatchTok, MatchNonterminal, MatchSeq, TtDelimited};
|
||||
use ast;
|
||||
use codemap::{Span, Spanned, DUMMY_SP};
|
||||
use ext::base::{ExtCtxt, MacResult, MacroDef};
|
||||
@ -147,13 +147,9 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
|
||||
rhses: &[Rc<NamedMatch>])
|
||||
-> Box<MacResult+'cx> {
|
||||
if cx.trace_macros() {
|
||||
println!("{}! {} {} {}",
|
||||
println!("{}! {{ {} }}",
|
||||
token::get_ident(name),
|
||||
"{",
|
||||
print::pprust::tt_to_string(&TTDelim(Rc::new(arg.iter()
|
||||
.map(|x| (*x).clone())
|
||||
.collect()))),
|
||||
"}");
|
||||
print::pprust::tts_to_string(arg));
|
||||
}
|
||||
|
||||
// Which arm's failure should we report? (the one furthest along)
|
||||
@ -175,15 +171,12 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
|
||||
// okay, what's your transcriber?
|
||||
MatchedNonterminal(NtTT(ref tt)) => {
|
||||
match **tt {
|
||||
// cut off delimiters; don't parse 'em
|
||||
TTDelim(ref tts) => {
|
||||
(*tts).slice(1u,(*tts).len()-1u)
|
||||
.iter()
|
||||
.map(|x| (*x).clone())
|
||||
.collect()
|
||||
}
|
||||
_ => cx.span_fatal(
|
||||
sp, "macro rhs must be delimited")
|
||||
// ignore delimiters
|
||||
TtDelimited(_, ref delimed) => {
|
||||
let (_, ref tts, _) = **delimed;
|
||||
tts.clone()
|
||||
},
|
||||
_ => cx.span_fatal(sp, "macro rhs must be delimited"),
|
||||
}
|
||||
},
|
||||
_ => cx.span_bug(sp, "bad thing in rhs")
|
||||
@ -239,10 +232,11 @@ pub fn add_new_extension<'cx>(cx: &'cx mut ExtCtxt,
|
||||
ms(MatchSeq(vec!(
|
||||
ms(MatchNonterminal(lhs_nm, special_idents::matchers, 0u)),
|
||||
ms(MatchTok(FAT_ARROW)),
|
||||
ms(MatchNonterminal(rhs_nm, special_idents::tt, 1u))), Some(SEMI), false, 0u, 2u)),
|
||||
ms(MatchNonterminal(rhs_nm, special_idents::tt, 1u))), Some(SEMI),
|
||||
ast::OneOrMore, 0u, 2u)),
|
||||
//to phase into semicolon-termination instead of
|
||||
//semicolon-separation
|
||||
ms(MatchSeq(vec!(ms(MatchTok(SEMI))), None, true, 2u, 2u)));
|
||||
ms(MatchSeq(vec!(ms(MatchTok(SEMI))), None, ast::ZeroOrMore, 2u, 2u)));
|
||||
|
||||
|
||||
// Parse the macro_rules! invocation (`none` is for no interpolations):
|
||||
|
@ -9,7 +9,7 @@
|
||||
// except according to those terms.
|
||||
|
||||
use ast;
|
||||
use ast::{TokenTree, TTDelim, TTTok, TTSeq, TTNonterminal, Ident};
|
||||
use ast::{TokenTree, TtDelimited, TtToken, TtSequence, TtNonterminal, Ident};
|
||||
use codemap::{Span, DUMMY_SP};
|
||||
use diagnostic::SpanHandler;
|
||||
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
|
||||
@ -18,6 +18,7 @@ use parse::token;
|
||||
use parse::lexer::TokenAndSpan;
|
||||
|
||||
use std::rc::Rc;
|
||||
use std::ops::Add;
|
||||
use std::collections::HashMap;
|
||||
|
||||
///an unzipping of `TokenTree`s
|
||||
@ -44,7 +45,7 @@ pub struct TtReader<'a> {
|
||||
}
|
||||
|
||||
/// This can do Macro-By-Example transcription. On the other hand, if
|
||||
/// `src` contains no `TTSeq`s and `TTNonterminal`s, `interp` can (and
|
||||
/// `src` contains no `TtSequence`s and `TtNonterminal`s, `interp` can (and
|
||||
/// should) be none.
|
||||
pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
|
||||
interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
|
||||
@ -104,37 +105,45 @@ enum LockstepIterSize {
|
||||
LisContradiction(String),
|
||||
}
|
||||
|
||||
fn lis_merge(lhs: LockstepIterSize, rhs: LockstepIterSize) -> LockstepIterSize {
|
||||
match lhs {
|
||||
LisUnconstrained => rhs.clone(),
|
||||
LisContradiction(_) => lhs.clone(),
|
||||
LisConstraint(l_len, l_id) => match rhs {
|
||||
LisUnconstrained => lhs.clone(),
|
||||
LisContradiction(_) => rhs.clone(),
|
||||
LisConstraint(r_len, _) if l_len == r_len => lhs.clone(),
|
||||
LisConstraint(r_len, r_id) => {
|
||||
let l_n = token::get_ident(l_id);
|
||||
let r_n = token::get_ident(r_id);
|
||||
LisContradiction(format!("inconsistent lockstep iteration: \
|
||||
'{}' has {} items, but '{}' has {}",
|
||||
l_n, l_len, r_n, r_len).to_string())
|
||||
}
|
||||
impl Add<LockstepIterSize, LockstepIterSize> for LockstepIterSize {
|
||||
fn add(&self, other: &LockstepIterSize) -> LockstepIterSize {
|
||||
match *self {
|
||||
LisUnconstrained => other.clone(),
|
||||
LisContradiction(_) => self.clone(),
|
||||
LisConstraint(l_len, l_id) => match *other {
|
||||
LisUnconstrained => self.clone(),
|
||||
LisContradiction(_) => other.clone(),
|
||||
LisConstraint(r_len, _) if l_len == r_len => self.clone(),
|
||||
LisConstraint(r_len, r_id) => {
|
||||
let l_n = token::get_ident(l_id);
|
||||
let r_n = token::get_ident(r_id);
|
||||
LisContradiction(format!("inconsistent lockstep iteration: \
|
||||
'{}' has {} items, but '{}' has {}",
|
||||
l_n, l_len, r_n, r_len).to_string())
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
|
||||
match *t {
|
||||
TTDelim(ref tts) | TTSeq(_, ref tts, _, _) => {
|
||||
tts.iter().fold(LisUnconstrained, |lis, tt| {
|
||||
lis_merge(lis, lockstep_iter_size(tt, r))
|
||||
TtDelimited(_, ref delimed) => {
|
||||
let (_, ref tts, _) = **delimed;
|
||||
tts.iter().fold(LisUnconstrained, |size, tt| {
|
||||
size + lockstep_iter_size(tt, r)
|
||||
})
|
||||
}
|
||||
TTTok(..) => LisUnconstrained,
|
||||
TTNonterminal(_, name) => match *lookup_cur_matched(r, name) {
|
||||
},
|
||||
TtSequence(_, ref tts, _, _) => {
|
||||
tts.iter().fold(LisUnconstrained, |size, tt| {
|
||||
size + lockstep_iter_size(tt, r)
|
||||
})
|
||||
},
|
||||
TtToken(..) => LisUnconstrained,
|
||||
TtNonterminal(_, name) => match *lookup_cur_matched(r, name) {
|
||||
MatchedNonterminal(_) => LisUnconstrained,
|
||||
MatchedSeq(ref ads, _) => LisConstraint(ads.len(), name)
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@ -189,32 +198,38 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
||||
}
|
||||
}
|
||||
}
|
||||
loop { /* because it's easiest, this handles `TTDelim` not starting
|
||||
with a `TTTok`, even though it won't happen */
|
||||
loop { /* because it's easiest, this handles `TtDelimited` not starting
|
||||
with a `TtToken`, even though it won't happen */
|
||||
let t = {
|
||||
let frame = r.stack.last().unwrap();
|
||||
// FIXME(pcwalton): Bad copy.
|
||||
(*frame.forest)[frame.idx].clone()
|
||||
};
|
||||
match t {
|
||||
TTDelim(tts) => {
|
||||
TtDelimited(_, ref delimed) => {
|
||||
let (ref open, ref tts, ref close) = **delimed;
|
||||
let mut forest = Vec::with_capacity(1 + tts.len() + 1);
|
||||
forest.push(open.to_tt());
|
||||
forest.extend(tts.iter().map(|x| (*x).clone()));
|
||||
forest.push(close.to_tt());
|
||||
|
||||
r.stack.push(TtFrame {
|
||||
forest: tts,
|
||||
forest: Rc::new(forest),
|
||||
idx: 0,
|
||||
dotdotdoted: false,
|
||||
sep: None
|
||||
});
|
||||
// if this could be 0-length, we'd need to potentially recur here
|
||||
}
|
||||
TTTok(sp, tok) => {
|
||||
TtToken(sp, tok) => {
|
||||
r.cur_span = sp;
|
||||
r.cur_tok = tok;
|
||||
r.stack.last_mut().unwrap().idx += 1;
|
||||
return ret_val;
|
||||
}
|
||||
TTSeq(sp, tts, sep, zerok) => {
|
||||
TtSequence(sp, tts, sep, kleene_op) => {
|
||||
// FIXME(pcwalton): Bad copy.
|
||||
match lockstep_iter_size(&TTSeq(sp, tts.clone(), sep.clone(), zerok), r) {
|
||||
match lockstep_iter_size(&TtSequence(sp, tts.clone(), sep.clone(), kleene_op), r) {
|
||||
LisUnconstrained => {
|
||||
r.sp_diag.span_fatal(
|
||||
sp.clone(), /* blame macro writer */
|
||||
@ -228,7 +243,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
||||
}
|
||||
LisConstraint(len, _) => {
|
||||
if len == 0 {
|
||||
if !zerok {
|
||||
if kleene_op == ast::OneOrMore {
|
||||
// FIXME #2887 blame invoker
|
||||
r.sp_diag.span_fatal(sp.clone(),
|
||||
"this must repeat at least once");
|
||||
@ -249,7 +264,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
||||
}
|
||||
}
|
||||
// FIXME #2887: think about span stuff here
|
||||
TTNonterminal(sp, ident) => {
|
||||
TtNonterminal(sp, ident) => {
|
||||
r.stack.last_mut().unwrap().idx += 1;
|
||||
match *lookup_cur_matched(r, ident) {
|
||||
/* sidestep the interpolation tricks for ident because
|
||||
|
@ -569,16 +569,29 @@ pub fn noop_fold_arg<T: Folder>(Arg {id, pat, ty}: Arg, fld: &mut T) -> Arg {
|
||||
|
||||
pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
|
||||
match *tt {
|
||||
TTTok(span, ref tok) =>
|
||||
TTTok(span, fld.fold_token(tok.clone())),
|
||||
TTDelim(ref tts) => TTDelim(Rc::new(fld.fold_tts(tts.as_slice()))),
|
||||
TTSeq(span, ref pattern, ref sep, is_optional) =>
|
||||
TTSeq(span,
|
||||
Rc::new(fld.fold_tts(pattern.as_slice())),
|
||||
sep.clone().map(|tok| fld.fold_token(tok)),
|
||||
is_optional),
|
||||
TTNonterminal(sp,ref ident) =>
|
||||
TTNonterminal(sp,fld.fold_ident(*ident))
|
||||
TtToken(span, ref tok) =>
|
||||
TtToken(span, fld.fold_token(tok.clone())),
|
||||
TtDelimited(span, ref delimed) => {
|
||||
let (ref open, ref tts, ref close) = **delimed;
|
||||
TtDelimited(span, Rc::new((
|
||||
Delimiter {
|
||||
span: open.span,
|
||||
token: fld.fold_token(open.token.clone())
|
||||
},
|
||||
fld.fold_tts(tts.as_slice()),
|
||||
Delimiter {
|
||||
span: close.span,
|
||||
token: fld.fold_token(close.token.clone())
|
||||
},
|
||||
)))
|
||||
},
|
||||
TtSequence(span, ref pattern, ref sep, is_optional) =>
|
||||
TtSequence(span,
|
||||
Rc::new(fld.fold_tts(pattern.as_slice())),
|
||||
sep.clone().map(|tok| fld.fold_token(tok)),
|
||||
is_optional),
|
||||
TtNonterminal(sp,ref ident) =>
|
||||
TtNonterminal(sp,fld.fold_ident(*ident))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -788,65 +788,57 @@ mod test {
|
||||
}
|
||||
|
||||
// check the token-tree-ization of macros
|
||||
#[test] fn string_to_tts_macro () {
|
||||
#[test]
|
||||
fn string_to_tts_macro () {
|
||||
let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string());
|
||||
let tts: &[ast::TokenTree] = tts.as_slice();
|
||||
match tts {
|
||||
[ast::TTTok(_,_),
|
||||
ast::TTTok(_,token::NOT),
|
||||
ast::TTTok(_,_),
|
||||
ast::TTDelim(ref delim_elts)] => {
|
||||
let delim_elts: &[ast::TokenTree] = delim_elts.as_slice();
|
||||
match delim_elts {
|
||||
[ast::TTTok(_,token::LPAREN),
|
||||
ast::TTDelim(ref first_set),
|
||||
ast::TTTok(_,token::FAT_ARROW),
|
||||
ast::TTDelim(ref second_set),
|
||||
ast::TTTok(_,token::RPAREN)] => {
|
||||
let first_set: &[ast::TokenTree] =
|
||||
first_set.as_slice();
|
||||
match first_set {
|
||||
[ast::TTTok(_,token::LPAREN),
|
||||
ast::TTTok(_,token::DOLLAR),
|
||||
ast::TTTok(_,_),
|
||||
ast::TTTok(_,token::RPAREN)] => {
|
||||
let second_set: &[ast::TokenTree] =
|
||||
second_set.as_slice();
|
||||
match second_set {
|
||||
[ast::TTTok(_,token::LPAREN),
|
||||
ast::TTTok(_,token::DOLLAR),
|
||||
ast::TTTok(_,_),
|
||||
ast::TTTok(_,token::RPAREN)] => {
|
||||
assert_eq!("correct","correct")
|
||||
}
|
||||
_ => assert_eq!("wrong 4","correct")
|
||||
}
|
||||
},
|
||||
_ => {
|
||||
error!("failing value 3: {}",first_set);
|
||||
assert_eq!("wrong 3","correct")
|
||||
}
|
||||
[ast::TtToken(_, token::IDENT(name_macro_rules, false)),
|
||||
ast::TtToken(_, token::NOT),
|
||||
ast::TtToken(_, token::IDENT(name_zip, false)),
|
||||
ast::TtDelimited(_, ref macro_delimed)]
|
||||
if name_macro_rules.as_str() == "macro_rules"
|
||||
&& name_zip.as_str() == "zip" => {
|
||||
let (ref macro_open, ref macro_tts, ref macro_close) = **macro_delimed;
|
||||
match (macro_open, macro_tts.as_slice(), macro_close) {
|
||||
(&ast::Delimiter { token: token::LPAREN, .. },
|
||||
[ast::TtDelimited(_, ref first_delimed),
|
||||
ast::TtToken(_, token::FAT_ARROW),
|
||||
ast::TtDelimited(_, ref second_delimed)],
|
||||
&ast::Delimiter { token: token::RPAREN, .. }) => {
|
||||
let (ref first_open, ref first_tts, ref first_close) = **first_delimed;
|
||||
match (first_open, first_tts.as_slice(), first_close) {
|
||||
(&ast::Delimiter { token: token::LPAREN, .. },
|
||||
[ast::TtToken(_, token::DOLLAR),
|
||||
ast::TtToken(_, token::IDENT(name, false))],
|
||||
&ast::Delimiter { token: token::RPAREN, .. })
|
||||
if name.as_str() == "a" => {},
|
||||
_ => fail!("value 3: {}", **first_delimed),
|
||||
}
|
||||
let (ref second_open, ref second_tts, ref second_close) = **second_delimed;
|
||||
match (second_open, second_tts.as_slice(), second_close) {
|
||||
(&ast::Delimiter { token: token::LPAREN, .. },
|
||||
[ast::TtToken(_, token::DOLLAR),
|
||||
ast::TtToken(_, token::IDENT(name, false))],
|
||||
&ast::Delimiter { token: token::RPAREN, .. })
|
||||
if name.as_str() == "a" => {},
|
||||
_ => fail!("value 4: {}", **second_delimed),
|
||||
}
|
||||
},
|
||||
_ => {
|
||||
error!("failing value 2: {}",delim_elts);
|
||||
assert_eq!("wrong","correct");
|
||||
}
|
||||
_ => fail!("value 2: {}", **macro_delimed),
|
||||
}
|
||||
},
|
||||
_ => {
|
||||
error!("failing value: {}",tts);
|
||||
assert_eq!("wrong 1","correct");
|
||||
}
|
||||
_ => fail!("value: {}",tts),
|
||||
}
|
||||
}
|
||||
|
||||
#[test] fn string_to_tts_1 () {
|
||||
#[test]
|
||||
fn string_to_tts_1 () {
|
||||
let tts = string_to_tts("fn a (b : int) { b; }".to_string());
|
||||
assert_eq!(json::encode(&tts),
|
||||
"[\
|
||||
{\
|
||||
\"variant\":\"TTTok\",\
|
||||
\"variant\":\"TtToken\",\
|
||||
\"fields\":[\
|
||||
null,\
|
||||
{\
|
||||
@ -859,7 +851,7 @@ mod test {
|
||||
]\
|
||||
},\
|
||||
{\
|
||||
\"variant\":\"TTTok\",\
|
||||
\"variant\":\"TtToken\",\
|
||||
\"fields\":[\
|
||||
null,\
|
||||
{\
|
||||
@ -872,96 +864,90 @@ mod test {
|
||||
]\
|
||||
},\
|
||||
{\
|
||||
\"variant\":\"TTDelim\",\
|
||||
\"variant\":\"TtDelimited\",\
|
||||
\"fields\":[\
|
||||
null,\
|
||||
[\
|
||||
{\
|
||||
\"variant\":\"TTTok\",\
|
||||
\"fields\":[\
|
||||
null,\
|
||||
\"LPAREN\"\
|
||||
]\
|
||||
\"span\":null,\
|
||||
\"token\":\"LPAREN\"\
|
||||
},\
|
||||
[\
|
||||
{\
|
||||
\"variant\":\"TtToken\",\
|
||||
\"fields\":[\
|
||||
null,\
|
||||
{\
|
||||
\"variant\":\"IDENT\",\
|
||||
\"fields\":[\
|
||||
\"b\",\
|
||||
false\
|
||||
]\
|
||||
}\
|
||||
]\
|
||||
},\
|
||||
{\
|
||||
\"variant\":\"TtToken\",\
|
||||
\"fields\":[\
|
||||
null,\
|
||||
\"COLON\"\
|
||||
]\
|
||||
},\
|
||||
{\
|
||||
\"variant\":\"TtToken\",\
|
||||
\"fields\":[\
|
||||
null,\
|
||||
{\
|
||||
\"variant\":\"IDENT\",\
|
||||
\"fields\":[\
|
||||
\"int\",\
|
||||
false\
|
||||
]\
|
||||
}\
|
||||
]\
|
||||
}\
|
||||
],\
|
||||
{\
|
||||
\"variant\":\"TTTok\",\
|
||||
\"fields\":[\
|
||||
null,\
|
||||
{\
|
||||
\"variant\":\"IDENT\",\
|
||||
\"fields\":[\
|
||||
\"b\",\
|
||||
false\
|
||||
]\
|
||||
}\
|
||||
]\
|
||||
},\
|
||||
{\
|
||||
\"variant\":\"TTTok\",\
|
||||
\"fields\":[\
|
||||
null,\
|
||||
\"COLON\"\
|
||||
]\
|
||||
},\
|
||||
{\
|
||||
\"variant\":\"TTTok\",\
|
||||
\"fields\":[\
|
||||
null,\
|
||||
{\
|
||||
\"variant\":\"IDENT\",\
|
||||
\"fields\":[\
|
||||
\"int\",\
|
||||
false\
|
||||
]\
|
||||
}\
|
||||
]\
|
||||
},\
|
||||
{\
|
||||
\"variant\":\"TTTok\",\
|
||||
\"fields\":[\
|
||||
null,\
|
||||
\"RPAREN\"\
|
||||
]\
|
||||
\"span\":null,\
|
||||
\"token\":\"RPAREN\"\
|
||||
}\
|
||||
]\
|
||||
]\
|
||||
},\
|
||||
{\
|
||||
\"variant\":\"TTDelim\",\
|
||||
\"variant\":\"TtDelimited\",\
|
||||
\"fields\":[\
|
||||
null,\
|
||||
[\
|
||||
{\
|
||||
\"variant\":\"TTTok\",\
|
||||
\"fields\":[\
|
||||
null,\
|
||||
\"LBRACE\"\
|
||||
]\
|
||||
\"span\":null,\
|
||||
\"token\":\"LBRACE\"\
|
||||
},\
|
||||
[\
|
||||
{\
|
||||
\"variant\":\"TtToken\",\
|
||||
\"fields\":[\
|
||||
null,\
|
||||
{\
|
||||
\"variant\":\"IDENT\",\
|
||||
\"fields\":[\
|
||||
\"b\",\
|
||||
false\
|
||||
]\
|
||||
}\
|
||||
]\
|
||||
},\
|
||||
{\
|
||||
\"variant\":\"TtToken\",\
|
||||
\"fields\":[\
|
||||
null,\
|
||||
\"SEMI\"\
|
||||
]\
|
||||
}\
|
||||
],\
|
||||
{\
|
||||
\"variant\":\"TTTok\",\
|
||||
\"fields\":[\
|
||||
null,\
|
||||
{\
|
||||
\"variant\":\"IDENT\",\
|
||||
\"fields\":[\
|
||||
\"b\",\
|
||||
false\
|
||||
]\
|
||||
}\
|
||||
]\
|
||||
},\
|
||||
{\
|
||||
\"variant\":\"TTTok\",\
|
||||
\"fields\":[\
|
||||
null,\
|
||||
\"SEMI\"\
|
||||
]\
|
||||
},\
|
||||
{\
|
||||
\"variant\":\"TTTok\",\
|
||||
\"fields\":[\
|
||||
null,\
|
||||
\"RBRACE\"\
|
||||
]\
|
||||
\"span\":null,\
|
||||
\"token\":\"RBRACE\"\
|
||||
}\
|
||||
]\
|
||||
]\
|
||||
|
@ -48,8 +48,8 @@ use ast::{StmtExpr, StmtSemi, StmtMac, StructDef, StructField};
|
||||
use ast::{StructVariantKind, BiSub};
|
||||
use ast::StrStyle;
|
||||
use ast::{SelfExplicit, SelfRegion, SelfStatic, SelfValue};
|
||||
use ast::{TokenTree, TraitItem, TraitRef, TTDelim, TTSeq, TTTok};
|
||||
use ast::{TTNonterminal, TupleVariantKind, Ty, Ty_, TyBot};
|
||||
use ast::{Delimiter, TokenTree, TraitItem, TraitRef, TtDelimited, TtSequence, TtToken};
|
||||
use ast::{TtNonterminal, TupleVariantKind, Ty, Ty_, TyBot};
|
||||
use ast::{TypeField, TyFixedLengthVec, TyClosure, TyProc, TyBareFn};
|
||||
use ast::{TyTypeof, TyInfer, TypeMethod};
|
||||
use ast::{TyNil, TyParam, TyParamBound, TyParen, TyPath, TyPtr, TyQPath};
|
||||
@ -2497,27 +2497,30 @@ impl<'a> Parser<'a> {
|
||||
return e;
|
||||
}
|
||||
|
||||
/// Parse an optional separator followed by a kleene-style
|
||||
/// Parse an optional separator followed by a Kleene-style
|
||||
/// repetition token (+ or *).
|
||||
pub fn parse_sep_and_zerok(&mut self) -> (Option<token::Token>, bool) {
|
||||
fn parse_zerok(parser: &mut Parser) -> Option<bool> {
|
||||
pub fn parse_sep_and_kleene_op(&mut self) -> (Option<token::Token>, ast::KleeneOp) {
|
||||
fn parse_kleene_op(parser: &mut Parser) -> Option<ast::KleeneOp> {
|
||||
match parser.token {
|
||||
token::BINOP(token::STAR) | token::BINOP(token::PLUS) => {
|
||||
let zerok = parser.token == token::BINOP(token::STAR);
|
||||
token::BINOP(token::STAR) => {
|
||||
parser.bump();
|
||||
Some(zerok)
|
||||
Some(ast::ZeroOrMore)
|
||||
},
|
||||
token::BINOP(token::PLUS) => {
|
||||
parser.bump();
|
||||
Some(ast::OneOrMore)
|
||||
},
|
||||
_ => None
|
||||
}
|
||||
};
|
||||
|
||||
match parse_zerok(self) {
|
||||
Some(zerok) => return (None, zerok),
|
||||
match parse_kleene_op(self) {
|
||||
Some(kleene_op) => return (None, kleene_op),
|
||||
None => {}
|
||||
}
|
||||
|
||||
let separator = self.bump_and_get();
|
||||
match parse_zerok(self) {
|
||||
match parse_kleene_op(self) {
|
||||
Some(zerok) => (Some(separator), zerok),
|
||||
None => self.fatal("expected `*` or `+`")
|
||||
}
|
||||
@ -2526,8 +2529,8 @@ impl<'a> Parser<'a> {
|
||||
/// parse a single token tree from the input.
|
||||
pub fn parse_token_tree(&mut self) -> TokenTree {
|
||||
// FIXME #6994: currently, this is too eager. It
|
||||
// parses token trees but also identifies TTSeq's
|
||||
// and TTNonterminal's; it's too early to know yet
|
||||
// parses token trees but also identifies TtSequence's
|
||||
// and TtNonterminal's; it's too early to know yet
|
||||
// whether something will be a nonterminal or a seq
|
||||
// yet.
|
||||
maybe_whole!(deref self, NtTT);
|
||||
@ -2564,26 +2567,21 @@ impl<'a> Parser<'a> {
|
||||
seq_sep_none(),
|
||||
|p| p.parse_token_tree()
|
||||
);
|
||||
let (s, z) = p.parse_sep_and_zerok();
|
||||
let (sep, repeat) = p.parse_sep_and_kleene_op();
|
||||
let seq = match seq {
|
||||
Spanned { node, .. } => node,
|
||||
};
|
||||
TTSeq(mk_sp(sp.lo, p.span.hi), Rc::new(seq), s, z)
|
||||
TtSequence(mk_sp(sp.lo, p.span.hi), Rc::new(seq), sep, repeat)
|
||||
} else {
|
||||
TTNonterminal(sp, p.parse_ident())
|
||||
TtNonterminal(sp, p.parse_ident())
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
parse_any_tt_tok(p)
|
||||
TtToken(p.span, p.bump_and_get())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// turn the next token into a TTTok:
|
||||
fn parse_any_tt_tok(p: &mut Parser) -> TokenTree {
|
||||
TTTok(p.span, p.bump_and_get())
|
||||
}
|
||||
|
||||
match (&self.token, token::close_delimiter_for(&self.token)) {
|
||||
(&token::EOF, _) => {
|
||||
let open_braces = self.open_braces.clone();
|
||||
@ -2595,21 +2593,32 @@ impl<'a> Parser<'a> {
|
||||
self.fatal("this file contains an un-closed delimiter ");
|
||||
}
|
||||
(_, Some(close_delim)) => {
|
||||
// The span for beginning of the delimited section
|
||||
let pre_span = self.span;
|
||||
|
||||
// Parse the open delimiter.
|
||||
self.open_braces.push(self.span);
|
||||
let mut result = vec!(parse_any_tt_tok(self));
|
||||
let open = Delimiter {
|
||||
span: self.span,
|
||||
token: self.bump_and_get(),
|
||||
};
|
||||
|
||||
let trees =
|
||||
self.parse_seq_to_before_end(&close_delim,
|
||||
seq_sep_none(),
|
||||
|p| p.parse_token_tree());
|
||||
result.extend(trees.into_iter());
|
||||
// Parse the token trees within the delimeters
|
||||
let tts = self.parse_seq_to_before_end(
|
||||
&close_delim, seq_sep_none(), |p| p.parse_token_tree()
|
||||
);
|
||||
|
||||
// Parse the close delimiter.
|
||||
result.push(parse_any_tt_tok(self));
|
||||
let close = Delimiter {
|
||||
span: self.span,
|
||||
token: self.bump_and_get(),
|
||||
};
|
||||
self.open_braces.pop().unwrap();
|
||||
|
||||
TTDelim(Rc::new(result))
|
||||
// Expand to cover the entire delimited token tree
|
||||
let span = Span { hi: self.span.hi, ..pre_span };
|
||||
|
||||
TtDelimited(span, Rc::new((open, tts, close)))
|
||||
}
|
||||
_ => parse_non_delim_tt_tok(self)
|
||||
}
|
||||
@ -2673,8 +2682,8 @@ impl<'a> Parser<'a> {
|
||||
if ms.len() == 0u {
|
||||
self.fatal("repetition body must be nonempty");
|
||||
}
|
||||
let (sep, zerok) = self.parse_sep_and_zerok();
|
||||
MatchSeq(ms, sep, zerok, name_idx_lo, *name_idx)
|
||||
let (sep, kleene_op) = self.parse_sep_and_kleene_op();
|
||||
MatchSeq(ms, sep, kleene_op, name_idx_lo, *name_idx)
|
||||
} else {
|
||||
let bound_to = self.parse_ident();
|
||||
self.expect(&token::COLON);
|
||||
|
@ -1020,8 +1020,15 @@ impl<'a> State<'a> {
|
||||
/// expression arguments as expressions). It can be done! I think.
|
||||
pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> {
|
||||
match *tt {
|
||||
ast::TTDelim(ref tts) => self.print_tts(tts.as_slice()),
|
||||
ast::TTTok(_, ref tk) => {
|
||||
ast::TtDelimited(_, ref delimed) => {
|
||||
let (ref open, ref tts, ref close) = **delimed;
|
||||
try!(word(&mut self.s, parse::token::to_string(&open.token).as_slice()));
|
||||
try!(space(&mut self.s));
|
||||
try!(self.print_tts(tts.as_slice()));
|
||||
try!(space(&mut self.s));
|
||||
word(&mut self.s, parse::token::to_string(&close.token).as_slice())
|
||||
},
|
||||
ast::TtToken(_, ref tk) => {
|
||||
try!(word(&mut self.s, parse::token::to_string(tk).as_slice()));
|
||||
match *tk {
|
||||
parse::token::DOC_COMMENT(..) => {
|
||||
@ -1030,22 +1037,25 @@ impl<'a> State<'a> {
|
||||
_ => Ok(())
|
||||
}
|
||||
}
|
||||
ast::TTSeq(_, ref tts, ref sep, zerok) => {
|
||||
ast::TtSequence(_, ref tts, ref separator, kleene_op) => {
|
||||
try!(word(&mut self.s, "$("));
|
||||
for tt_elt in (*tts).iter() {
|
||||
try!(self.print_tt(tt_elt));
|
||||
}
|
||||
try!(word(&mut self.s, ")"));
|
||||
match *sep {
|
||||
match *separator {
|
||||
Some(ref tk) => {
|
||||
try!(word(&mut self.s,
|
||||
parse::token::to_string(tk).as_slice()));
|
||||
}
|
||||
None => ()
|
||||
}
|
||||
word(&mut self.s, if zerok { "*" } else { "+" })
|
||||
match kleene_op {
|
||||
ast::ZeroOrMore => word(&mut self.s, "*"),
|
||||
ast::OneOrMore => word(&mut self.s, "+"),
|
||||
}
|
||||
}
|
||||
ast::TTNonterminal(_, name) => {
|
||||
ast::TtNonterminal(_, name) => {
|
||||
try!(word(&mut self.s, "$"));
|
||||
self.print_ident(name)
|
||||
}
|
||||
|
@ -18,7 +18,7 @@ extern crate rustc;
|
||||
|
||||
use syntax::codemap::Span;
|
||||
use syntax::parse::token::{IDENT, get_ident};
|
||||
use syntax::ast::{TokenTree, TTTok};
|
||||
use syntax::ast::{TokenTree, TtToken};
|
||||
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacExpr};
|
||||
use syntax::ext::build::AstBuilder; // trait for expr_uint
|
||||
use rustc::plugin::Registry;
|
||||
@ -39,7 +39,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
|
||||
("I", 1)];
|
||||
|
||||
let text = match args {
|
||||
[TTTok(_, IDENT(s, _))] => get_ident(s).to_string(),
|
||||
[TtToken(_, IDENT(s, _))] => get_ident(s).to_string(),
|
||||
_ => {
|
||||
cx.span_err(sp, "argument should be a single identifier");
|
||||
return DummyResult::any(sp);
|
||||
|
Loading…
x
Reference in New Issue
Block a user