Remove Token::MatchNt
.
This commit is contained in:
parent
d8b34e9a74
commit
0cc7053efa
@ -1109,10 +1109,6 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> {
|
||||
token::Token::Ident(ident) |
|
||||
token::Token::Lifetime(ident) |
|
||||
token::Token::SubstNt(ident) => ident.name.as_str().hash(self.st),
|
||||
token::Token::MatchNt(ident1, ident2) => {
|
||||
ident1.name.as_str().hash(self.st);
|
||||
ident2.name.as_str().hash(self.st);
|
||||
}
|
||||
|
||||
token::Token::Interpolated(ref non_terminal) => {
|
||||
// FIXME(mw): This could be implemented properly. It's just a
|
||||
|
@ -315,7 +315,7 @@ impl<'a> Classifier<'a> {
|
||||
token::Lifetime(..) => Class::Lifetime,
|
||||
|
||||
token::Underscore | token::Eof | token::Interpolated(..) |
|
||||
token::MatchNt(..) | token::SubstNt(..) | token::Tilde | token::At => Class::None,
|
||||
token::SubstNt(..) | token::Tilde | token::At => Class::None,
|
||||
};
|
||||
|
||||
// Anything that didn't return above is the simple case where we the
|
||||
|
@ -85,9 +85,7 @@ use errors::FatalError;
|
||||
use ext::tt::quoted;
|
||||
use parse::{Directory, ParseSess};
|
||||
use parse::parser::{PathStyle, Parser};
|
||||
use parse::token::{DocComment, MatchNt};
|
||||
use parse::token::{Token, Nonterminal};
|
||||
use parse::token;
|
||||
use parse::token::{self, DocComment, Token, Nonterminal};
|
||||
use print::pprust;
|
||||
use tokenstream::TokenTree;
|
||||
use util::small_vector::SmallVector;
|
||||
@ -156,7 +154,7 @@ pub fn count_names(ms: &[quoted::TokenTree]) -> usize {
|
||||
TokenTree::Delimited(_, ref delim) => {
|
||||
count_names(&delim.tts)
|
||||
}
|
||||
TokenTree::Token(_, MatchNt(..)) => {
|
||||
TokenTree::MetaVarDecl(..) => {
|
||||
1
|
||||
}
|
||||
TokenTree::Token(..) => 0,
|
||||
@ -221,7 +219,7 @@ fn nameize<I: Iterator<Item=Rc<NamedMatch>>>(ms: &[quoted::TokenTree], mut res:
|
||||
n_rec(next_m, res.by_ref(), ret_val)?;
|
||||
}
|
||||
}
|
||||
TokenTree::Token(sp, MatchNt(bind_name, _)) => {
|
||||
TokenTree::MetaVarDecl(sp, bind_name, _) => {
|
||||
match ret_val.entry(bind_name) {
|
||||
Vacant(spot) => {
|
||||
spot.insert(res.next().unwrap());
|
||||
@ -377,7 +375,7 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>,
|
||||
top_elts: Tt(TokenTree::Sequence(sp, seq)),
|
||||
}));
|
||||
}
|
||||
TokenTree::Token(_, MatchNt(..)) => {
|
||||
TokenTree::MetaVarDecl(..) => {
|
||||
// Built-in nonterminals never start with these tokens,
|
||||
// so we can eliminate them from consideration.
|
||||
match *token {
|
||||
@ -445,7 +443,7 @@ pub fn parse(sess: &ParseSess,
|
||||
}
|
||||
} else if (!bb_eis.is_empty() && !next_eis.is_empty()) || bb_eis.len() > 1 {
|
||||
let nts = bb_eis.iter().map(|ei| match ei.top_elts.get_tt(ei.idx) {
|
||||
TokenTree::Token(_, MatchNt(bind, name)) => {
|
||||
TokenTree::MetaVarDecl(_, bind, name) => {
|
||||
format!("{} ('{}')", name, bind)
|
||||
}
|
||||
_ => panic!()
|
||||
@ -467,7 +465,7 @@ pub fn parse(sess: &ParseSess,
|
||||
parser.bump();
|
||||
} else /* bb_eis.len() == 1 */ {
|
||||
let mut ei = bb_eis.pop().unwrap();
|
||||
if let TokenTree::Token(span, MatchNt(_, ident)) = ei.top_elts.get_tt(ei.idx) {
|
||||
if let TokenTree::MetaVarDecl(span, _, ident) = ei.top_elts.get_tt(ei.idx) {
|
||||
let match_cur = ei.match_cur;
|
||||
ei.matches[match_cur].push(Rc::new(MatchedNonterminal(
|
||||
Rc::new(parse_nt(&mut parser, span, &ident.name.as_str())))));
|
||||
|
@ -20,7 +20,7 @@ use ext::tt::quoted;
|
||||
use ext::tt::transcribe::transcribe;
|
||||
use parse::{Directory, ParseSess};
|
||||
use parse::parser::Parser;
|
||||
use parse::token::{self, NtTT, Token};
|
||||
use parse::token::{self, NtTT};
|
||||
use parse::token::Token::*;
|
||||
use print;
|
||||
use symbol::Symbol;
|
||||
@ -165,14 +165,12 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
|
||||
// $( $lhs:tt => $rhs:tt );+
|
||||
// ...quasiquoting this would be nice.
|
||||
// These spans won't matter, anyways
|
||||
let match_lhs_tok = MatchNt(lhs_nm, ast::Ident::from_str("tt"));
|
||||
let match_rhs_tok = MatchNt(rhs_nm, ast::Ident::from_str("tt"));
|
||||
let argument_gram = vec![
|
||||
quoted::TokenTree::Sequence(DUMMY_SP, Rc::new(quoted::SequenceRepetition {
|
||||
tts: vec![
|
||||
quoted::TokenTree::Token(DUMMY_SP, match_lhs_tok),
|
||||
quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")),
|
||||
quoted::TokenTree::Token(DUMMY_SP, token::FatArrow),
|
||||
quoted::TokenTree::Token(DUMMY_SP, match_rhs_tok),
|
||||
quoted::TokenTree::MetaVarDecl(DUMMY_SP, rhs_nm, ast::Ident::from_str("tt")),
|
||||
],
|
||||
separator: Some(token::Semi),
|
||||
op: quoted::KleeneOp::OneOrMore,
|
||||
@ -272,7 +270,7 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
|
||||
use self::quoted::TokenTree;
|
||||
for tt in tts {
|
||||
match *tt {
|
||||
TokenTree::Token(_, _) => (),
|
||||
TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => (),
|
||||
TokenTree::Delimited(_, ref del) => if !check_lhs_no_empty_seq(sess, &del.tts) {
|
||||
return false;
|
||||
},
|
||||
@ -352,13 +350,12 @@ impl FirstSets {
|
||||
let mut first = TokenSet::empty();
|
||||
for tt in tts.iter().rev() {
|
||||
match *tt {
|
||||
TokenTree::Token(sp, ref tok) => {
|
||||
first.replace_with((sp, tok.clone()));
|
||||
TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => {
|
||||
first.replace_with(tt.clone());
|
||||
}
|
||||
TokenTree::Delimited(span, ref delimited) => {
|
||||
build_recur(sets, &delimited.tts[..]);
|
||||
first.replace_with((delimited.open_tt(span).span(),
|
||||
Token::OpenDelim(delimited.delim)));
|
||||
first.replace_with(delimited.open_tt(span));
|
||||
}
|
||||
TokenTree::Sequence(sp, ref seq_rep) => {
|
||||
let subfirst = build_recur(sets, &seq_rep.tts[..]);
|
||||
@ -383,7 +380,7 @@ impl FirstSets {
|
||||
|
||||
if let (Some(ref sep), true) = (seq_rep.separator.clone(),
|
||||
subfirst.maybe_empty) {
|
||||
first.add_one_maybe((sp, sep.clone()));
|
||||
first.add_one_maybe(TokenTree::Token(sp, sep.clone()));
|
||||
}
|
||||
|
||||
// Reverse scan: Sequence comes before `first`.
|
||||
@ -413,13 +410,12 @@ impl FirstSets {
|
||||
for tt in tts.iter() {
|
||||
assert!(first.maybe_empty);
|
||||
match *tt {
|
||||
TokenTree::Token(sp, ref tok) => {
|
||||
first.add_one((sp, tok.clone()));
|
||||
TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => {
|
||||
first.add_one(tt.clone());
|
||||
return first;
|
||||
}
|
||||
TokenTree::Delimited(span, ref delimited) => {
|
||||
first.add_one((delimited.open_tt(span).span(),
|
||||
Token::OpenDelim(delimited.delim)));
|
||||
first.add_one(delimited.open_tt(span));
|
||||
return first;
|
||||
}
|
||||
TokenTree::Sequence(sp, ref seq_rep) => {
|
||||
@ -431,7 +427,7 @@ impl FirstSets {
|
||||
|
||||
if let (Some(ref sep), true) = (seq_rep.separator.clone(),
|
||||
subfirst.maybe_empty) {
|
||||
first.add_one_maybe((sp, sep.clone()));
|
||||
first.add_one_maybe(TokenTree::Token(sp, sep.clone()));
|
||||
}
|
||||
|
||||
assert!(first.maybe_empty);
|
||||
@ -467,8 +463,8 @@ impl FirstSets {
|
||||
}
|
||||
}
|
||||
|
||||
// A set of Tokens, which may include MatchNt tokens (for
|
||||
// macro-by-example syntactic variables). It also carries the
|
||||
// A set of `quoted::TokenTree`s, which may include `TokenTree::Match`s
|
||||
// (for macro-by-example syntactic variables). It also carries the
|
||||
// `maybe_empty` flag; that is true if and only if the matcher can
|
||||
// match an empty token sequence.
|
||||
//
|
||||
@ -479,7 +475,7 @@ impl FirstSets {
|
||||
// (Notably, we must allow for *-op to occur zero times.)
|
||||
#[derive(Clone, Debug)]
|
||||
struct TokenSet {
|
||||
tokens: Vec<(Span, Token)>,
|
||||
tokens: Vec<quoted::TokenTree>,
|
||||
maybe_empty: bool,
|
||||
}
|
||||
|
||||
@ -489,13 +485,13 @@ impl TokenSet {
|
||||
|
||||
// Returns the set `{ tok }` for the single-token (and thus
|
||||
// non-empty) sequence [tok].
|
||||
fn singleton(tok: (Span, Token)) -> Self {
|
||||
fn singleton(tok: quoted::TokenTree) -> Self {
|
||||
TokenSet { tokens: vec![tok], maybe_empty: false }
|
||||
}
|
||||
|
||||
// Changes self to be the set `{ tok }`.
|
||||
// Since `tok` is always present, marks self as non-empty.
|
||||
fn replace_with(&mut self, tok: (Span, Token)) {
|
||||
fn replace_with(&mut self, tok: quoted::TokenTree) {
|
||||
self.tokens.clear();
|
||||
self.tokens.push(tok);
|
||||
self.maybe_empty = false;
|
||||
@ -510,7 +506,7 @@ impl TokenSet {
|
||||
}
|
||||
|
||||
// Adds `tok` to the set for `self`, marking sequence as non-empy.
|
||||
fn add_one(&mut self, tok: (Span, Token)) {
|
||||
fn add_one(&mut self, tok: quoted::TokenTree) {
|
||||
if !self.tokens.contains(&tok) {
|
||||
self.tokens.push(tok);
|
||||
}
|
||||
@ -518,7 +514,7 @@ impl TokenSet {
|
||||
}
|
||||
|
||||
// Adds `tok` to the set for `self`. (Leaves `maybe_empty` flag alone.)
|
||||
fn add_one_maybe(&mut self, tok: (Span, Token)) {
|
||||
fn add_one_maybe(&mut self, tok: quoted::TokenTree) {
|
||||
if !self.tokens.contains(&tok) {
|
||||
self.tokens.push(tok);
|
||||
}
|
||||
@ -558,7 +554,6 @@ fn check_matcher_core(sess: &ParseSess,
|
||||
first_sets: &FirstSets,
|
||||
matcher: &[quoted::TokenTree],
|
||||
follow: &TokenSet) -> TokenSet {
|
||||
use print::pprust::token_to_string;
|
||||
use self::quoted::TokenTree;
|
||||
|
||||
let mut last = TokenSet::empty();
|
||||
@ -584,11 +579,11 @@ fn check_matcher_core(sess: &ParseSess,
|
||||
// First, update `last` so that it corresponds to the set
|
||||
// of NT tokens that might end the sequence `... token`.
|
||||
match *token {
|
||||
TokenTree::Token(sp, ref tok) => {
|
||||
TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => {
|
||||
let can_be_followed_by_any;
|
||||
if let Err(bad_frag) = has_legal_fragment_specifier(tok) {
|
||||
if let Err(bad_frag) = has_legal_fragment_specifier(token) {
|
||||
let msg = format!("invalid fragment specifier `{}`", bad_frag);
|
||||
sess.span_diagnostic.struct_span_err(sp, &msg)
|
||||
sess.span_diagnostic.struct_span_err(token.span(), &msg)
|
||||
.help("valid fragment specifiers are `ident`, `block`, \
|
||||
`stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt` \
|
||||
and `item`")
|
||||
@ -597,7 +592,7 @@ fn check_matcher_core(sess: &ParseSess,
|
||||
// from error messages.)
|
||||
can_be_followed_by_any = true;
|
||||
} else {
|
||||
can_be_followed_by_any = token_can_be_followed_by_any(tok);
|
||||
can_be_followed_by_any = token_can_be_followed_by_any(token);
|
||||
}
|
||||
|
||||
if can_be_followed_by_any {
|
||||
@ -607,13 +602,12 @@ fn check_matcher_core(sess: &ParseSess,
|
||||
// followed by anything against SUFFIX.
|
||||
continue 'each_token;
|
||||
} else {
|
||||
last.replace_with((sp, tok.clone()));
|
||||
last.replace_with(token.clone());
|
||||
suffix_first = build_suffix_first();
|
||||
}
|
||||
}
|
||||
TokenTree::Delimited(span, ref d) => {
|
||||
let my_suffix = TokenSet::singleton((d.close_tt(span).span(),
|
||||
Token::CloseDelim(d.delim)));
|
||||
let my_suffix = TokenSet::singleton(d.close_tt(span));
|
||||
check_matcher_core(sess, first_sets, &d.tts, &my_suffix);
|
||||
// don't track non NT tokens
|
||||
last.replace_with_irrelevant();
|
||||
@ -637,7 +631,7 @@ fn check_matcher_core(sess: &ParseSess,
|
||||
let mut new;
|
||||
let my_suffix = if let Some(ref u) = seq_rep.separator {
|
||||
new = suffix_first.clone();
|
||||
new.add_one_maybe((sp, u.clone()));
|
||||
new.add_one_maybe(TokenTree::Token(sp, u.clone()));
|
||||
&new
|
||||
} else {
|
||||
&suffix_first
|
||||
@ -663,12 +657,13 @@ fn check_matcher_core(sess: &ParseSess,
|
||||
|
||||
// Now `last` holds the complete set of NT tokens that could
|
||||
// end the sequence before SUFFIX. Check that every one works with `suffix`.
|
||||
'each_last: for &(_sp, ref t) in &last.tokens {
|
||||
if let MatchNt(ref name, ref frag_spec) = *t {
|
||||
for &(sp, ref next_token) in &suffix_first.tokens {
|
||||
'each_last: for token in &last.tokens {
|
||||
if let TokenTree::MetaVarDecl(_, ref name, ref frag_spec) = *token {
|
||||
for next_token in &suffix_first.tokens {
|
||||
match is_in_follow(next_token, &frag_spec.name.as_str()) {
|
||||
Err((msg, help)) => {
|
||||
sess.span_diagnostic.struct_span_err(sp, &msg).help(help).emit();
|
||||
sess.span_diagnostic.struct_span_err(next_token.span(), &msg)
|
||||
.help(help).emit();
|
||||
// don't bother reporting every source of
|
||||
// conflict for a particular element of `last`.
|
||||
continue 'each_last;
|
||||
@ -684,12 +679,12 @@ fn check_matcher_core(sess: &ParseSess,
|
||||
};
|
||||
|
||||
sess.span_diagnostic.span_err(
|
||||
sp,
|
||||
next_token.span(),
|
||||
&format!("`${name}:{frag}` {may_be} followed by `{next}`, which \
|
||||
is not allowed for `{frag}` fragments",
|
||||
name=name,
|
||||
frag=frag_spec,
|
||||
next=token_to_string(next_token),
|
||||
next=quoted_tt_to_string(next_token),
|
||||
may_be=may_be)
|
||||
);
|
||||
}
|
||||
@ -701,8 +696,8 @@ fn check_matcher_core(sess: &ParseSess,
|
||||
last
|
||||
}
|
||||
|
||||
fn token_can_be_followed_by_any(tok: &Token) -> bool {
|
||||
if let &MatchNt(_, ref frag_spec) = tok {
|
||||
fn token_can_be_followed_by_any(tok: "ed::TokenTree) -> bool {
|
||||
if let quoted::TokenTree::MetaVarDecl(_, _, frag_spec) = *tok {
|
||||
frag_can_be_followed_by_any(&frag_spec.name.as_str())
|
||||
} else {
|
||||
// (Non NT's can always be followed by anthing in matchers.)
|
||||
@ -740,8 +735,10 @@ fn frag_can_be_followed_by_any(frag: &str) -> bool {
|
||||
/// break macros that were relying on that binary operator as a
|
||||
/// separator.
|
||||
// when changing this do not forget to update doc/book/macros.md!
|
||||
fn is_in_follow(tok: &Token, frag: &str) -> Result<bool, (String, &'static str)> {
|
||||
if let &CloseDelim(_) = tok {
|
||||
fn is_in_follow(tok: "ed::TokenTree, frag: &str) -> Result<bool, (String, &'static str)> {
|
||||
use self::quoted::TokenTree;
|
||||
|
||||
if let TokenTree::Token(_, token::CloseDelim(_)) = *tok {
|
||||
// closing a token tree can never be matched by any fragment;
|
||||
// iow, we always require that `(` and `)` match, etc.
|
||||
Ok(true)
|
||||
@ -757,27 +754,30 @@ fn is_in_follow(tok: &Token, frag: &str) -> Result<bool, (String, &'static str)>
|
||||
// maintain
|
||||
Ok(true)
|
||||
},
|
||||
"stmt" | "expr" => {
|
||||
match *tok {
|
||||
"stmt" | "expr" => match *tok {
|
||||
TokenTree::Token(_, ref tok) => match *tok {
|
||||
FatArrow | Comma | Semi => Ok(true),
|
||||
_ => Ok(false)
|
||||
}
|
||||
},
|
||||
_ => Ok(false),
|
||||
},
|
||||
"pat" => {
|
||||
match *tok {
|
||||
"pat" => match *tok {
|
||||
TokenTree::Token(_, ref tok) => match *tok {
|
||||
FatArrow | Comma | Eq | BinOp(token::Or) => Ok(true),
|
||||
Ident(i) if i.name == "if" || i.name == "in" => Ok(true),
|
||||
_ => Ok(false)
|
||||
}
|
||||
},
|
||||
_ => Ok(false),
|
||||
},
|
||||
"path" | "ty" => {
|
||||
match *tok {
|
||||
"path" | "ty" => match *tok {
|
||||
TokenTree::Token(_, ref tok) => match *tok {
|
||||
OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) |
|
||||
Comma | FatArrow | Colon | Eq | Gt | Semi | BinOp(token::Or) => Ok(true),
|
||||
MatchNt(_, ref frag) if frag.name == "block" => Ok(true),
|
||||
Ident(i) if i.name == "as" || i.name == "where" => Ok(true),
|
||||
_ => Ok(false)
|
||||
}
|
||||
},
|
||||
TokenTree::MetaVarDecl(_, _, frag) if frag.name == "block" => Ok(true),
|
||||
_ => Ok(false),
|
||||
},
|
||||
"ident" => {
|
||||
// being a single token, idents are harmless
|
||||
@ -796,9 +796,9 @@ fn is_in_follow(tok: &Token, frag: &str) -> Result<bool, (String, &'static str)>
|
||||
}
|
||||
}
|
||||
|
||||
fn has_legal_fragment_specifier(tok: &Token) -> Result<(), String> {
|
||||
fn has_legal_fragment_specifier(tok: "ed::TokenTree) -> Result<(), String> {
|
||||
debug!("has_legal_fragment_specifier({:?})", tok);
|
||||
if let &MatchNt(_, ref frag_spec) = tok {
|
||||
if let quoted::TokenTree::MetaVarDecl(_, _, frag_spec) = *tok {
|
||||
let s = &frag_spec.name.as_str();
|
||||
if !is_legal_fragment_specifier(s) {
|
||||
return Err(s.to_string());
|
||||
@ -814,3 +814,11 @@ fn is_legal_fragment_specifier(frag: &str) -> bool {
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn quoted_tt_to_string(tt: "ed::TokenTree) -> String {
|
||||
match *tt {
|
||||
quoted::TokenTree::Token(_, ref tok) => ::print::pprust::token_to_string(tok),
|
||||
quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
|
||||
_ => panic!("unexpected quoted::TokenTree::{Sequence or Delimited} in follow set checker"),
|
||||
}
|
||||
}
|
||||
|
@ -58,7 +58,7 @@ pub struct SequenceRepetition {
|
||||
pub separator: Option<token::Token>,
|
||||
/// Whether the sequence can be repeated zero (*), or one or more times (+)
|
||||
pub op: KleeneOp,
|
||||
/// The number of `MatchNt`s that appear in the sequence (and subsequences)
|
||||
/// The number of `Match`s that appear in the sequence (and subsequences)
|
||||
pub num_captures: usize,
|
||||
}
|
||||
|
||||
@ -78,6 +78,8 @@ pub enum TokenTree {
|
||||
Delimited(Span, Rc<Delimited>),
|
||||
/// A kleene-style repetition sequence with a span
|
||||
Sequence(Span, Rc<SequenceRepetition>),
|
||||
/// Matches a nonterminal. This is only used in the left hand side of MBE macros.
|
||||
MetaVarDecl(Span, ast::Ident /* name to bind */, ast::Ident /* kind of nonterminal */),
|
||||
}
|
||||
|
||||
impl TokenTree {
|
||||
@ -88,7 +90,7 @@ impl TokenTree {
|
||||
_ => delimed.tts.len() + 2,
|
||||
},
|
||||
TokenTree::Sequence(_, ref seq) => seq.tts.len(),
|
||||
TokenTree::Token(..) => 0,
|
||||
_ => 0,
|
||||
}
|
||||
}
|
||||
|
||||
@ -115,6 +117,7 @@ impl TokenTree {
|
||||
pub fn span(&self) -> Span {
|
||||
match *self {
|
||||
TokenTree::Token(sp, _) |
|
||||
TokenTree::MetaVarDecl(sp, _, _) |
|
||||
TokenTree::Delimited(sp, _) |
|
||||
TokenTree::Sequence(sp, _) => sp,
|
||||
}
|
||||
@ -133,7 +136,7 @@ pub fn parse(input: &[tokenstream::TokenTree], expect_matchers: bool, sess: &Par
|
||||
Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() {
|
||||
Some(tokenstream::TokenTree::Token(end_sp, token::Ident(kind))) => {
|
||||
let span = Span { lo: start_sp.lo, ..end_sp };
|
||||
result.push(TokenTree::Token(span, token::MatchNt(ident, kind)));
|
||||
result.push(TokenTree::MetaVarDecl(span, ident, kind));
|
||||
continue
|
||||
}
|
||||
tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
|
||||
|
@ -12,7 +12,7 @@ use ast::Ident;
|
||||
use errors::Handler;
|
||||
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
|
||||
use ext::tt::quoted;
|
||||
use parse::token::{self, MatchNt, SubstNt, Token, NtIdent, NtTT};
|
||||
use parse::token::{self, SubstNt, Token, NtIdent, NtTT};
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
use tokenstream::{TokenTree, Delimited};
|
||||
use util::small_vector::SmallVector;
|
||||
@ -61,7 +61,7 @@ impl Iterator for Frame {
|
||||
}
|
||||
|
||||
/// This can do Macro-By-Example transcription. On the other hand, if
|
||||
/// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can
|
||||
/// `src` contains no `TokenTree::{Sequence, Match}`s, or `SubstNt`s, `interp` can
|
||||
/// (and should) be None.
|
||||
pub fn transcribe(sp_diag: &Handler,
|
||||
interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
|
||||
@ -177,6 +177,7 @@ pub fn transcribe(sp_diag: &Handler,
|
||||
result_stack.push(mem::replace(&mut result, Vec::new()));
|
||||
}
|
||||
quoted::TokenTree::Token(span, tok) => result.push(TokenTree::Token(span, tok)),
|
||||
quoted::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -243,7 +244,7 @@ fn lockstep_iter_size(tree: "ed::TokenTree,
|
||||
size + lockstep_iter_size(tt, interpolations, repeat_idx)
|
||||
})
|
||||
},
|
||||
TokenTree::Token(_, SubstNt(name)) | TokenTree::Token(_, MatchNt(name, _)) =>
|
||||
TokenTree::Token(_, SubstNt(name)) | TokenTree::MetaVarDecl(_, name, _) =>
|
||||
match lookup_cur_matched(name, interpolations, repeat_idx) {
|
||||
Some(matched) => match *matched {
|
||||
MatchedNonterminal(_) => LockstepIterSize::Unconstrained,
|
||||
|
@ -571,7 +571,6 @@ pub fn noop_fold_token<T: Folder>(t: token::Token, fld: &mut T) -> token::Token
|
||||
token::Interpolated(Rc::new(fld.fold_interpolated(nt)))
|
||||
}
|
||||
token::SubstNt(ident) => token::SubstNt(fld.fold_ident(ident)),
|
||||
token::MatchNt(name, kind) => token::MatchNt(fld.fold_ident(name), fld.fold_ident(kind)),
|
||||
_ => t
|
||||
}
|
||||
}
|
||||
|
@ -152,9 +152,6 @@ pub enum Token {
|
||||
// Can be expanded into several tokens.
|
||||
/// Doc comment
|
||||
DocComment(ast::Name),
|
||||
// In left-hand-sides of MBE macros:
|
||||
/// Parse a nonterminal (name to bind, name of NT)
|
||||
MatchNt(ast::Ident, ast::Ident),
|
||||
// In right-hand-sides of MBE macros:
|
||||
/// A syntactic variable that will be filled in by macro expansion.
|
||||
SubstNt(ast::Ident),
|
||||
|
@ -271,7 +271,6 @@ pub fn token_to_string(tok: &Token) -> String {
|
||||
/* Other */
|
||||
token::DocComment(s) => s.to_string(),
|
||||
token::SubstNt(s) => format!("${}", s),
|
||||
token::MatchNt(s, t) => format!("${}:{}", s, t),
|
||||
token::Eof => "<eof>".to_string(),
|
||||
token::Whitespace => " ".to_string(),
|
||||
token::Comment => "/* */".to_string(),
|
||||
|
Loading…
x
Reference in New Issue
Block a user