Add new syntax for interpolation and repetition, and allow the transcription of separators.

This commit is contained in:
Paul Stansifer 2012-07-05 14:30:56 -07:00
parent 62db5706e6
commit 7f9b1fbe35
4 changed files with 70 additions and 39 deletions

View File

@ -379,7 +379,7 @@ enum token_tree {
tt_delim(~[token_tree]),
tt_flat(span, token::token),
/* These only make sense for right-hand-sides of MBE macros*/
tt_dotdotdot(span, ~[token_tree]),
tt_dotdotdot(span, ~[token_tree], option<token::token>, bool),
tt_interpolate(span, ident)
}

View File

@ -18,7 +18,8 @@ enum tt_frame_up { /* to break a circularity */
readme: ~[ast::token_tree],
mut idx: uint,
dotdotdoted: bool,
up: tt_frame_up
sep: option<token>,
up: tt_frame_up,
};
type tt_reader = @{
@ -43,7 +44,7 @@ fn new_tt_reader(span_diagnostic: span_handler, itr: @interner<@str>,
-> tt_reader {
let r = @{span_diagnostic: span_diagnostic, interner: itr,
mut cur: @{readme: src, mut idx: 0u, dotdotdoted: false,
up: tt_frame_up(option::none)},
sep: none, up: tt_frame_up(option::none)},
interpolations: alt interp { /* just a convienience */
none { std::map::box_str_hash::<@arb_depth>() }
some(x) { x }
@ -59,7 +60,7 @@ fn new_tt_reader(span_diagnostic: span_handler, itr: @interner<@str>,
pure fn dup_tt_frame(&&f: tt_frame) -> tt_frame {
@{readme: f.readme, mut idx: f.idx, dotdotdoted: f.dotdotdoted,
up: alt f.up {
sep: f.sep, up: alt f.up {
tt_frame_up(some(up_frame)) {
tt_frame_up(some(dup_tt_frame(up_frame)))
}
@ -114,7 +115,7 @@ fn lis_merge(lhs: lis, rhs: lis) -> lis {
}
}
alt t {
tt_delim(tts) | tt_dotdotdot(_, tts) {
tt_delim(tts) | tt_dotdotdot(_, tts, _, _) {
vec::foldl(lis_unconstrained, tts, {|lis, tt|
lis_merge(lis, lockstep_iter_size(tt, r)) })
}
@ -155,6 +156,13 @@ fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} {
} else {
r.cur.idx = 0u;
r.repeat_idx[r.repeat_idx.len() - 1u] += 1u;
alt r.cur.sep {
some(tk) {
r.cur_tok = tk; /* repeat same span, I guess */
ret ret_val;
}
none {}
}
}
}
/* if `tt_delim`s could be 0-length, we'd need to be able to switch
@ -164,15 +172,15 @@ fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} {
alt r.cur.readme[r.cur.idx] {
tt_delim(tts) {
r.cur = @{readme: tts, mut idx: 0u, dotdotdoted: false,
up: tt_frame_up(option::some(r.cur)) };
sep: none, up: tt_frame_up(option::some(r.cur)) };
}
tt_flat(sp, tok) {
r.cur_span = sp; r.cur_tok = tok;
r.cur.idx += 1u;
ret ret_val;
}
tt_dotdotdot(sp, tts) {
alt lockstep_iter_size(tt_dotdotdot(sp, tts), r) {
tt_dotdotdot(sp, tts, sep, zerok) {
alt lockstep_iter_size(tt_dotdotdot(sp, tts, sep, zerok), r) {
lis_unconstrained {
r.span_diagnostic.span_fatal(
copy r.cur_span, /* blame macro writer */
@ -183,10 +191,14 @@ fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} {
r.span_diagnostic.span_fatal(sp, msg);
}
lis_constraint(len, _) {
if len == 0 && !zerok {
r.span_diagnostic.span_fatal(sp, "this must repeat \
at least once");
}
vec::push(r.repeat_len, len);
vec::push(r.repeat_idx, 0u);
r.cur = @{readme: tts, mut idx: 0u, dotdotdoted: true,
up: tt_frame_up(option::some(r.cur)) };
sep: sep, up: tt_frame_up(option::some(r.cur)) };
}
}
}

View File

@ -1130,6 +1130,26 @@ fn parse_dot_or_call_expr_with(e0: pexpr) -> pexpr {
ret e;
}
fn parse_sep_and_zerok() -> (option<token::token>, bool) {
if self.token == token::BINOP(token::STAR)
|| self.token == token::BINOP(token::PLUS) {
let zerok = self.token == token::BINOP(token::STAR);
self.bump();
ret (none, zerok);
} else {
let sep = self.token;
self.bump();
if self.token == token::BINOP(token::STAR)
|| self.token == token::BINOP(token::PLUS) {
let zerok = self.token == token::BINOP(token::STAR);
self.bump();
ret (some(sep), zerok);
} else {
self.fatal("expected '*' or '+'");
}
}
}
fn parse_token_tree() -> token_tree {
/// what's the opposite delimiter?
fn flip(&t: token::token) -> token::token {
@ -1142,12 +1162,6 @@ fn flip(&t: token::token) -> token::token {
}
fn parse_tt_flat(p: parser, delim_ok: bool) -> token_tree {
if p.eat_keyword("many") && p.quote_depth > 0u {
let seq = p.parse_seq(token::LPAREN, token::RPAREN,
seq_sep_none(),
|p| p.parse_token_tree());
ret tt_dotdotdot(seq.span, seq.node);
}
alt p.token {
token::RPAREN | token::RBRACE | token::RBRACKET
if !delim_ok {
@ -1161,7 +1175,16 @@ fn parse_tt_flat(p: parser, delim_ok: bool) -> token_tree {
token::DOLLAR if p.quote_depth > 0u {
p.bump();
let sp = p.span;
ret tt_interpolate(sp, p.parse_ident());
if p.token == token::LPAREN {
let seq = p.parse_seq(token::LPAREN, token::RPAREN,
seq_sep_none(),
|p| p.parse_token_tree());
let (s, z) = p.parse_sep_and_zerok();
ret tt_dotdotdot(mk_sp(sp.lo ,p.span.hi), seq.node, s, z);
} else {
ret tt_interpolate(sp, p.parse_ident());
}
}
_ { /* ok */ }
}
@ -1221,34 +1244,32 @@ fn parse_tt_mac_demo() -> @expr {
fn parse_matcher(name_idx: @mut uint) -> matcher {
let lo = self.span.lo;
let mut sep = none;
if self.eat_keyword("sep") { sep = some(self.token); self.bump(); }
let m = if self.is_keyword("many")||self.is_keyword("at_least_one") {
let zero_ok = self.is_keyword("many");
let m = if self.token == token::DOLLAR {
self.bump();
let ms = (self.parse_seq(token::LPAREN, token::RPAREN,
common::seq_sep_none(),
|p| p.parse_matcher(name_idx)).node);
if ms.len() == 0u {
self.fatal("repetition body must be nonempty");
if self.token == token::LPAREN {
let ms = (self.parse_seq(token::LPAREN, token::RPAREN,
common::seq_sep_none(),
|p| p.parse_matcher(name_idx)).node);
if ms.len() == 0u {
self.fatal("repetition body must be nonempty");
}
let (sep, zerok) = self.parse_sep_and_zerok();
mtc_rep(ms, sep, zerok)
} else {
let bound_to = self.parse_ident();
self.expect(token::COLON);
let nt_name = self.parse_ident();
let m = mtc_bb(bound_to, nt_name, *name_idx);
*name_idx += 1u;
m
}
mtc_rep(ms, sep, zero_ok)
} else if option::is_some(sep) {
self.fatal("`sep <tok>` must preceed `many` or `at_least_one`");
} else if self.eat_keyword("parse") {
let bound_to = self.parse_ident();
self.expect(token::EQ);
let nt_name = self.parse_ident();
let m = mtc_bb(bound_to, nt_name, *name_idx);
*name_idx += 1u;
m
} else {
let m = mtc_tok(self.token);
self.bump();
m
};
ret spanned(lo, self.span.hi, m);
}

View File

@ -274,9 +274,7 @@ fn contextual_keyword_table() -> hashmap<str, ()> {
"self", "send", "static",
"to",
"use",
"with",
/* temp */
"sep", "many", "at_least_one", "parse"
"with"
];
for keys.each |word| {
words.insert(word, ());