Add Span and separate open/close delims to TTDelim
This came up when working [on the gl-rs generator extension](990383de80/src/gl_generator/lib.rs (L135-L146)
).
The new definition of `TTDelim` adds an associated `Span` that covers the whole token tree and enforces the invariant that a delimited sequence of token trees must have an opening and closing delimiter.
A `get_span` method has also been added to `TokenTree` type to make it easier to implement better error messages for syntax extensions.
This commit is contained in:
parent
80e5fe1a56
commit
971d776aa5
@ -592,6 +592,20 @@ pub enum CaptureClause {
|
||||
CaptureByRef,
|
||||
}
|
||||
|
||||
/// A token that delimits a sequence of token trees
|
||||
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
|
||||
pub struct Delimiter {
|
||||
pub span: Span,
|
||||
pub token: ::parse::token::Token,
|
||||
}
|
||||
|
||||
impl Delimiter {
|
||||
/// Convert the delimiter to a `TTTok`
|
||||
pub fn to_tt(&self) -> TokenTree {
|
||||
TTTok(self.span, self.token.clone())
|
||||
}
|
||||
}
|
||||
|
||||
/// When the main rust parser encounters a syntax-extension invocation, it
|
||||
/// parses the arguments to the invocation as a token-tree. This is a very
|
||||
/// loose structure, such that all sorts of different AST-fragments can
|
||||
@ -611,10 +625,9 @@ pub enum CaptureClause {
|
||||
pub enum TokenTree {
|
||||
/// A single token
|
||||
TTTok(Span, ::parse::token::Token),
|
||||
/// A delimited sequence (the delimiters appear as the first
|
||||
/// and last elements of the vector)
|
||||
/// A delimited sequence of token trees
|
||||
// FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST.
|
||||
TTDelim(Rc<Vec<TokenTree>>),
|
||||
TTDelim(Span, Delimiter, Rc<Vec<TokenTree>>, Delimiter),
|
||||
|
||||
// These only make sense for right-hand-sides of MBE macros:
|
||||
|
||||
@ -628,6 +641,18 @@ pub enum TokenTree {
|
||||
TTNonterminal(Span, Ident)
|
||||
}
|
||||
|
||||
impl TokenTree {
|
||||
/// Returns the `Span` corresponding to this token tree.
|
||||
pub fn get_span(&self) -> Span {
|
||||
match *self {
|
||||
TTTok(span, _) => span,
|
||||
TTDelim(span, _, _, _) => span,
|
||||
TTSeq(span, _, _, _) => span,
|
||||
TTNonterminal(span, _) => span,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Matchers are nodes defined-by and recognized-by the main rust parser and
|
||||
// language, but they're only ever found inside syntax-extension invocations;
|
||||
// indeed, the only thing that ever _activates_ the rules in the rust parser
|
||||
|
@ -13,16 +13,14 @@ use codemap;
|
||||
use ext::base;
|
||||
use print;
|
||||
|
||||
use std::rc::Rc;
|
||||
|
||||
pub fn expand_syntax_ext<'cx>(cx: &'cx mut base::ExtCtxt,
|
||||
sp: codemap::Span,
|
||||
tt: &[ast::TokenTree])
|
||||
tts: &[ast::TokenTree])
|
||||
-> Box<base::MacResult+'cx> {
|
||||
|
||||
cx.print_backtrace();
|
||||
println!("{}", print::pprust::tt_to_string(&ast::TTDelim(
|
||||
Rc::new(tt.iter().map(|x| (*x).clone()).collect()))));
|
||||
|
||||
println!("{}", print::pprust::tts_to_string(tts));
|
||||
|
||||
// any so that `log_syntax` can be invoked as an expression and item.
|
||||
base::DummyResult::any(sp)
|
||||
|
@ -637,7 +637,7 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
|
||||
}
|
||||
|
||||
|
||||
fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
|
||||
fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
|
||||
match *tt {
|
||||
ast::TTTok(sp, ref tok) => {
|
||||
let e_sp = cx.expr_ident(sp, id_ext("_sp"));
|
||||
@ -650,13 +650,16 @@ fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
|
||||
id_ext("push"),
|
||||
vec!(e_tok));
|
||||
vec!(cx.stmt_expr(e_push))
|
||||
}
|
||||
|
||||
ast::TTDelim(ref tts) => mk_tts(cx, sp, tts.as_slice()),
|
||||
},
|
||||
ast::TTDelim(sp, ref open, ref tts, ref close) => {
|
||||
let mut stmts = vec![];
|
||||
stmts.extend(mk_tt(cx, sp, &open.to_tt()).into_iter());
|
||||
stmts.extend(tts.iter().flat_map(|tt| mk_tt(cx, sp, tt).into_iter()));
|
||||
stmts.extend(mk_tt(cx, sp, &close.to_tt()).into_iter());
|
||||
stmts
|
||||
},
|
||||
ast::TTSeq(..) => fail!("TTSeq in quote!"),
|
||||
|
||||
ast::TTNonterminal(sp, ident) => {
|
||||
|
||||
// tt.extend($ident.to_tokens(ext_cx).into_iter())
|
||||
|
||||
let e_to_toks =
|
||||
@ -674,7 +677,7 @@ fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
|
||||
vec!(e_to_toks));
|
||||
|
||||
vec!(cx.stmt_expr(e_push))
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -147,13 +147,9 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
|
||||
rhses: &[Rc<NamedMatch>])
|
||||
-> Box<MacResult+'cx> {
|
||||
if cx.trace_macros() {
|
||||
println!("{}! {} {} {}",
|
||||
println!("{}! {{ {} }}",
|
||||
token::get_ident(name),
|
||||
"{",
|
||||
print::pprust::tt_to_string(&TTDelim(Rc::new(arg.iter()
|
||||
.map(|x| (*x).clone())
|
||||
.collect()))),
|
||||
"}");
|
||||
print::pprust::tts_to_string(arg));
|
||||
}
|
||||
|
||||
// Which arm's failure should we report? (the one furthest along)
|
||||
@ -175,15 +171,9 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
|
||||
// okay, what's your transcriber?
|
||||
MatchedNonterminal(NtTT(ref tt)) => {
|
||||
match **tt {
|
||||
// cut off delimiters; don't parse 'em
|
||||
TTDelim(ref tts) => {
|
||||
(*tts).slice(1u,(*tts).len()-1u)
|
||||
.iter()
|
||||
.map(|x| (*x).clone())
|
||||
.collect()
|
||||
}
|
||||
_ => cx.span_fatal(
|
||||
sp, "macro rhs must be delimited")
|
||||
// ignore delimiters
|
||||
TTDelim(_, _, ref tts, _) => (**tts).clone(),
|
||||
_ => cx.span_fatal(sp, "macro rhs must be delimited"),
|
||||
}
|
||||
},
|
||||
_ => cx.span_bug(sp, "bad thing in rhs")
|
||||
|
@ -18,6 +18,7 @@ use parse::token;
|
||||
use parse::lexer::TokenAndSpan;
|
||||
|
||||
use std::rc::Rc;
|
||||
use std::ops::Add;
|
||||
use std::collections::HashMap;
|
||||
|
||||
///an unzipping of `TokenTree`s
|
||||
@ -104,37 +105,41 @@ enum LockstepIterSize {
|
||||
LisContradiction(String),
|
||||
}
|
||||
|
||||
fn lis_merge(lhs: LockstepIterSize, rhs: LockstepIterSize) -> LockstepIterSize {
|
||||
match lhs {
|
||||
LisUnconstrained => rhs.clone(),
|
||||
LisContradiction(_) => lhs.clone(),
|
||||
LisConstraint(l_len, l_id) => match rhs {
|
||||
LisUnconstrained => lhs.clone(),
|
||||
LisContradiction(_) => rhs.clone(),
|
||||
LisConstraint(r_len, _) if l_len == r_len => lhs.clone(),
|
||||
LisConstraint(r_len, r_id) => {
|
||||
let l_n = token::get_ident(l_id);
|
||||
let r_n = token::get_ident(r_id);
|
||||
LisContradiction(format!("inconsistent lockstep iteration: \
|
||||
'{}' has {} items, but '{}' has {}",
|
||||
l_n, l_len, r_n, r_len).to_string())
|
||||
}
|
||||
impl Add<LockstepIterSize, LockstepIterSize> for LockstepIterSize {
|
||||
fn add(&self, other: &LockstepIterSize) -> LockstepIterSize {
|
||||
match *self {
|
||||
LisUnconstrained => other.clone(),
|
||||
LisContradiction(_) => self.clone(),
|
||||
LisConstraint(l_len, l_id) => match *other {
|
||||
LisUnconstrained => self.clone(),
|
||||
LisContradiction(_) => other.clone(),
|
||||
LisConstraint(r_len, _) if l_len == r_len => self.clone(),
|
||||
LisConstraint(r_len, r_id) => {
|
||||
let l_n = token::get_ident(l_id);
|
||||
let r_n = token::get_ident(r_id);
|
||||
LisContradiction(format!("inconsistent lockstep iteration: \
|
||||
'{}' has {} items, but '{}' has {}",
|
||||
l_n, l_len, r_n, r_len).to_string())
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
|
||||
match *t {
|
||||
TTDelim(ref tts) | TTSeq(_, ref tts, _, _) => {
|
||||
tts.iter().fold(LisUnconstrained, |lis, tt| {
|
||||
lis_merge(lis, lockstep_iter_size(tt, r))
|
||||
// The opening and closing delimiters are both tokens, so they are
|
||||
// treated as `LisUnconstrained`.
|
||||
TTDelim(_, _, ref tts, _) | TTSeq(_, ref tts, _, _) => {
|
||||
tts.iter().fold(LisUnconstrained, |size, tt| {
|
||||
size + lockstep_iter_size(tt, r)
|
||||
})
|
||||
}
|
||||
},
|
||||
TTTok(..) => LisUnconstrained,
|
||||
TTNonterminal(_, name) => match *lookup_cur_matched(r, name) {
|
||||
MatchedNonterminal(_) => LisUnconstrained,
|
||||
MatchedSeq(ref ads, _) => LisConstraint(ads.len(), name)
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@ -197,9 +202,14 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
||||
(*frame.forest)[frame.idx].clone()
|
||||
};
|
||||
match t {
|
||||
TTDelim(tts) => {
|
||||
TTDelim(_, open, delimed_tts, close) => {
|
||||
let mut tts = vec![];
|
||||
tts.push(open.to_tt());
|
||||
tts.extend(delimed_tts.iter().map(|x| (*x).clone()));
|
||||
tts.push(close.to_tt());
|
||||
|
||||
r.stack.push(TtFrame {
|
||||
forest: tts,
|
||||
forest: Rc::new(tts),
|
||||
idx: 0,
|
||||
dotdotdoted: false,
|
||||
sep: None
|
||||
|
@ -571,7 +571,17 @@ pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
|
||||
match *tt {
|
||||
TTTok(span, ref tok) =>
|
||||
TTTok(span, fld.fold_token(tok.clone())),
|
||||
TTDelim(ref tts) => TTDelim(Rc::new(fld.fold_tts(tts.as_slice()))),
|
||||
TTDelim(span, ref open, ref tts, ref close) =>
|
||||
TTDelim(span,
|
||||
Delimiter {
|
||||
span: open.span,
|
||||
token: fld.fold_token(open.token.clone())
|
||||
},
|
||||
Rc::new(fld.fold_tts(tts.as_slice())),
|
||||
Delimiter {
|
||||
span: close.span,
|
||||
token: fld.fold_token(close.token.clone())
|
||||
}),
|
||||
TTSeq(span, ref pattern, ref sep, is_optional) =>
|
||||
TTSeq(span,
|
||||
Rc::new(fld.fold_tts(pattern.as_slice())),
|
||||
|
@ -788,35 +788,34 @@ mod test {
|
||||
}
|
||||
|
||||
// check the token-tree-ization of macros
|
||||
#[test] fn string_to_tts_macro () {
|
||||
#[test]
|
||||
fn string_to_tts_macro () {
|
||||
let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string());
|
||||
let tts: &[ast::TokenTree] = tts.as_slice();
|
||||
match tts {
|
||||
[ast::TTTok(_,_),
|
||||
ast::TTTok(_,token::NOT),
|
||||
ast::TTTok(_,_),
|
||||
ast::TTDelim(ref delim_elts)] => {
|
||||
[ast::TTTok(_, _),
|
||||
ast::TTTok(_, token::NOT),
|
||||
ast::TTTok(_, _),
|
||||
ast::TTDelim(_, ast::TTTok(_, token::LPAREN),
|
||||
ref delim_elts,
|
||||
ast::TTTok(_, token::RPAREN))] => {
|
||||
let delim_elts: &[ast::TokenTree] = delim_elts.as_slice();
|
||||
match delim_elts {
|
||||
[ast::TTTok(_,token::LPAREN),
|
||||
ast::TTDelim(ref first_set),
|
||||
ast::TTTok(_,token::FAT_ARROW),
|
||||
ast::TTDelim(ref second_set),
|
||||
ast::TTTok(_,token::RPAREN)] => {
|
||||
[ast::TTDelim(_, ast::TTTok(_, token::LPAREN),
|
||||
ref first_set,
|
||||
ast::TTTok(_, token::RPAREN)),
|
||||
ast::TTTok(_, token::FAT_ARROW),
|
||||
ast::TTDelim(_, ast::TTTok(_, token::LPAREN),
|
||||
ref second_set,
|
||||
ast::TTTok(_, token::RPAREN))] => {
|
||||
let first_set: &[ast::TokenTree] =
|
||||
first_set.as_slice();
|
||||
match first_set {
|
||||
[ast::TTTok(_,token::LPAREN),
|
||||
ast::TTTok(_,token::DOLLAR),
|
||||
ast::TTTok(_,_),
|
||||
ast::TTTok(_,token::RPAREN)] => {
|
||||
[ast::TTTok(_, token::DOLLAR), ast::TTTok(_, _)] => {
|
||||
let second_set: &[ast::TokenTree] =
|
||||
second_set.as_slice();
|
||||
match second_set {
|
||||
[ast::TTTok(_,token::LPAREN),
|
||||
ast::TTTok(_,token::DOLLAR),
|
||||
ast::TTTok(_,_),
|
||||
ast::TTTok(_,token::RPAREN)] => {
|
||||
[ast::TTTok(_, token::DOLLAR), ast::TTTok(_, _)] => {
|
||||
assert_eq!("correct","correct")
|
||||
}
|
||||
_ => assert_eq!("wrong 4","correct")
|
||||
@ -837,7 +836,7 @@ mod test {
|
||||
_ => {
|
||||
error!("failing value: {}",tts);
|
||||
assert_eq!("wrong 1","correct");
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -48,7 +48,7 @@ use ast::{StmtExpr, StmtSemi, StmtMac, StructDef, StructField};
|
||||
use ast::{StructVariantKind, BiSub};
|
||||
use ast::StrStyle;
|
||||
use ast::{SelfExplicit, SelfRegion, SelfStatic, SelfValue};
|
||||
use ast::{TokenTree, TraitItem, TraitRef, TTDelim, TTSeq, TTTok};
|
||||
use ast::{Delimiter, TokenTree, TraitItem, TraitRef, TTDelim, TTSeq, TTTok};
|
||||
use ast::{TTNonterminal, TupleVariantKind, Ty, Ty_, TyBot};
|
||||
use ast::{TypeField, TyFixedLengthVec, TyClosure, TyProc, TyBareFn};
|
||||
use ast::{TyTypeof, TyInfer, TypeMethod};
|
||||
@ -2574,16 +2574,11 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
parse_any_tt_tok(p)
|
||||
TTTok(p.span, p.bump_and_get())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// turn the next token into a TTTok:
|
||||
fn parse_any_tt_tok(p: &mut Parser) -> TokenTree {
|
||||
TTTok(p.span, p.bump_and_get())
|
||||
}
|
||||
|
||||
match (&self.token, token::close_delimiter_for(&self.token)) {
|
||||
(&token::EOF, _) => {
|
||||
let open_braces = self.open_braces.clone();
|
||||
@ -2595,21 +2590,32 @@ impl<'a> Parser<'a> {
|
||||
self.fatal("this file contains an un-closed delimiter ");
|
||||
}
|
||||
(_, Some(close_delim)) => {
|
||||
// The span for beginning of the delimited section
|
||||
let pre_span = self.span;
|
||||
|
||||
// Parse the open delimiter.
|
||||
self.open_braces.push(self.span);
|
||||
let mut result = vec!(parse_any_tt_tok(self));
|
||||
let open = Delimiter {
|
||||
span: self.span,
|
||||
token: self.bump_and_get(),
|
||||
};
|
||||
|
||||
let trees =
|
||||
self.parse_seq_to_before_end(&close_delim,
|
||||
seq_sep_none(),
|
||||
|p| p.parse_token_tree());
|
||||
result.extend(trees.into_iter());
|
||||
// Parse the token trees within the delimeters
|
||||
let tts = self.parse_seq_to_before_end(
|
||||
&close_delim, seq_sep_none(), |p| p.parse_token_tree()
|
||||
);
|
||||
|
||||
// Parse the close delimiter.
|
||||
result.push(parse_any_tt_tok(self));
|
||||
let close = Delimiter {
|
||||
span: self.span,
|
||||
token: self.bump_and_get(),
|
||||
};
|
||||
self.open_braces.pop().unwrap();
|
||||
|
||||
TTDelim(Rc::new(result))
|
||||
// Expand to cover the entire delimited token tree
|
||||
let span = Span { hi: self.span.hi, ..pre_span };
|
||||
|
||||
TTDelim(span, open, Rc::new(tts), close)
|
||||
}
|
||||
_ => parse_non_delim_tt_tok(self)
|
||||
}
|
||||
|
@ -1020,7 +1020,13 @@ impl<'a> State<'a> {
|
||||
/// expression arguments as expressions). It can be done! I think.
|
||||
pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> {
|
||||
match *tt {
|
||||
ast::TTDelim(ref tts) => self.print_tts(tts.as_slice()),
|
||||
ast::TTDelim(_, ref open, ref tts, ref close) => {
|
||||
try!(word(&mut self.s, parse::token::to_string(&open.token).as_slice()));
|
||||
try!(space(&mut self.s));
|
||||
try!(self.print_tts(tts.as_slice()));
|
||||
try!(space(&mut self.s));
|
||||
word(&mut self.s, parse::token::to_string(&close.token).as_slice())
|
||||
},
|
||||
ast::TTTok(_, ref tk) => {
|
||||
try!(word(&mut self.s, parse::token::to_string(tk).as_slice()));
|
||||
match *tk {
|
||||
|
Loading…
x
Reference in New Issue
Block a user