2012-12-03 18:48:01 -06:00
|
|
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
|
|
|
|
// file at the top-level directory of this distribution and at
|
|
|
|
// http://rust-lang.org/COPYRIGHT.
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
|
|
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
|
|
|
// option. This file may not be copied, modified, or distributed
|
|
|
|
// except according to those terms.
|
|
|
|
|
2012-12-23 16:41:37 -06:00
|
|
|
use ast;
|
2012-09-04 13:37:29 -05:00
|
|
|
use ast::{token_tree, tt_delim, tt_tok, tt_seq, tt_nonterminal,ident};
|
2013-01-30 11:56:33 -06:00
|
|
|
use codemap::{span, dummy_sp};
|
2012-12-23 16:41:37 -06:00
|
|
|
use diagnostic::span_handler;
|
|
|
|
use ext::tt::macro_parser::{named_match, matched_seq, matched_nonterminal};
|
2013-06-04 13:09:18 -05:00
|
|
|
use parse::token::{EOF, INTERPOLATED, IDENT, Token, nt_ident};
|
|
|
|
use parse::token::{ident_to_str};
|
2013-01-30 11:56:33 -06:00
|
|
|
use parse::lexer::TokenAndSpan;
|
2012-12-23 16:41:37 -06:00
|
|
|
|
2013-06-24 19:40:33 -05:00
|
|
|
use std::hashmap::HashMap;
|
|
|
|
use std::option;
|
2012-06-27 17:29:35 -05:00
|
|
|
|
|
|
|
///an unzipping of `token_tree`s
|
2013-02-04 16:02:01 -06:00
|
|
|
struct TtFrame {
|
2013-03-12 13:37:31 -05:00
|
|
|
forest: @mut ~[ast::token_tree],
|
2013-02-04 16:02:01 -06:00
|
|
|
idx: uint,
|
2012-06-29 20:26:34 -05:00
|
|
|
dotdotdoted: bool,
|
2012-10-15 16:56:42 -05:00
|
|
|
sep: Option<Token>,
|
2013-02-04 16:02:01 -06:00
|
|
|
up: Option<@mut TtFrame>,
|
|
|
|
}
|
2012-06-27 17:29:35 -05:00
|
|
|
|
2013-02-04 16:02:01 -06:00
|
|
|
pub struct TtReader {
|
2013-03-12 15:00:50 -05:00
|
|
|
sp_diag: @span_handler,
|
2013-03-05 19:02:23 -06:00
|
|
|
// the unzipped tree:
|
2013-03-12 13:37:31 -05:00
|
|
|
stack: @mut TtFrame,
|
2012-06-27 17:29:35 -05:00
|
|
|
/* for MBE-style macro transcription */
|
2013-04-03 08:28:36 -05:00
|
|
|
interpolations: HashMap<ident, @named_match>,
|
2013-02-04 16:02:01 -06:00
|
|
|
repeat_idx: ~[uint],
|
|
|
|
repeat_len: ~[uint],
|
2012-06-27 17:29:35 -05:00
|
|
|
/* cached: */
|
2013-02-04 16:02:01 -06:00
|
|
|
cur_tok: Token,
|
|
|
|
cur_span: span
|
|
|
|
}
|
2012-06-27 17:29:35 -05:00
|
|
|
|
|
|
|
/** This can do Macro-By-Example transcription. On the other hand, if
|
2012-07-27 21:14:46 -05:00
|
|
|
* `src` contains no `tt_seq`s and `tt_nonterminal`s, `interp` can (and
|
2012-06-27 17:29:35 -05:00
|
|
|
* should) be none. */
|
2013-03-12 15:00:50 -05:00
|
|
|
pub fn new_tt_reader(sp_diag: @span_handler,
|
2013-04-03 08:28:36 -05:00
|
|
|
interp: Option<HashMap<ident,@named_match>>,
|
2013-04-17 11:15:08 -05:00
|
|
|
src: ~[ast::token_tree])
|
2013-02-04 16:02:01 -06:00
|
|
|
-> @mut TtReader {
|
|
|
|
let r = @mut TtReader {
|
|
|
|
sp_diag: sp_diag,
|
2013-03-12 13:37:31 -05:00
|
|
|
stack: @mut TtFrame {
|
|
|
|
forest: @mut src,
|
2013-02-04 16:02:01 -06:00
|
|
|
idx: 0u,
|
|
|
|
dotdotdoted: false,
|
|
|
|
sep: None,
|
|
|
|
up: option::None
|
|
|
|
},
|
|
|
|
interpolations: match interp { /* just a convienience */
|
2013-04-03 08:28:36 -05:00
|
|
|
None => HashMap::new(),
|
2013-02-04 16:02:01 -06:00
|
|
|
Some(x) => x
|
|
|
|
},
|
|
|
|
repeat_idx: ~[],
|
|
|
|
repeat_len: ~[],
|
|
|
|
/* dummy values, never read: */
|
|
|
|
cur_tok: EOF,
|
|
|
|
cur_span: dummy_sp()
|
|
|
|
};
|
2012-06-27 17:29:35 -05:00
|
|
|
tt_next_token(r); /* get cur_tok and cur_span set up */
|
2012-08-01 19:30:05 -05:00
|
|
|
return r;
|
2012-06-27 17:29:35 -05:00
|
|
|
}
|
|
|
|
|
2013-03-22 13:09:13 -05:00
|
|
|
fn dup_tt_frame(f: @mut TtFrame) -> @mut TtFrame {
|
2013-02-04 16:02:01 -06:00
|
|
|
@mut TtFrame {
|
2013-03-12 13:37:31 -05:00
|
|
|
forest: @mut (copy *f.forest),
|
2013-02-04 16:02:01 -06:00
|
|
|
idx: f.idx,
|
|
|
|
dotdotdoted: f.dotdotdoted,
|
2013-03-02 13:17:11 -06:00
|
|
|
sep: copy f.sep,
|
2013-02-04 16:02:01 -06:00
|
|
|
up: match f.up {
|
|
|
|
Some(up_frame) => Some(dup_tt_frame(up_frame)),
|
|
|
|
None => None
|
2012-06-27 17:29:35 -05:00
|
|
|
}
|
2013-02-04 16:02:01 -06:00
|
|
|
}
|
2012-06-27 17:29:35 -05:00
|
|
|
}
|
|
|
|
|
2013-03-22 13:09:13 -05:00
|
|
|
pub fn dup_tt_reader(r: @mut TtReader) -> @mut TtReader {
|
2013-02-04 16:02:01 -06:00
|
|
|
@mut TtReader {
|
|
|
|
sp_diag: r.sp_diag,
|
2013-03-12 13:37:31 -05:00
|
|
|
stack: dup_tt_frame(r.stack),
|
2013-02-04 16:02:01 -06:00
|
|
|
repeat_idx: copy r.repeat_idx,
|
|
|
|
repeat_len: copy r.repeat_len,
|
2013-02-24 23:27:51 -06:00
|
|
|
cur_tok: copy r.cur_tok,
|
2013-05-11 23:25:31 -05:00
|
|
|
cur_span: r.cur_span,
|
|
|
|
interpolations: copy r.interpolations,
|
2013-02-04 16:02:01 -06:00
|
|
|
}
|
2012-06-27 17:29:35 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2013-03-22 13:09:13 -05:00
|
|
|
fn lookup_cur_matched_by_matched(r: &mut TtReader,
|
2013-03-16 13:11:31 -05:00
|
|
|
start: @named_match)
|
|
|
|
-> @named_match {
|
2013-04-17 11:15:08 -05:00
|
|
|
fn red(ad: @named_match, idx: &uint) -> @named_match {
|
2012-08-06 14:34:08 -05:00
|
|
|
match *ad {
|
2012-08-03 21:59:04 -05:00
|
|
|
matched_nonterminal(_) => {
|
2012-07-27 21:14:46 -05:00
|
|
|
// end of the line; duplicate henceforth
|
|
|
|
ad
|
|
|
|
}
|
2013-02-24 23:27:51 -06:00
|
|
|
matched_seq(ref ads, _) => ads[*idx]
|
2012-06-29 20:26:34 -05:00
|
|
|
}
|
|
|
|
}
|
2013-06-08 00:12:39 -05:00
|
|
|
r.repeat_idx.iter().fold(start, red)
|
2012-06-29 20:26:34 -05:00
|
|
|
}
|
|
|
|
|
2013-03-16 13:11:31 -05:00
|
|
|
fn lookup_cur_matched(r: &mut TtReader, name: ident) -> @named_match {
|
2013-05-22 13:21:09 -05:00
|
|
|
match r.interpolations.find_copy(&name) {
|
|
|
|
Some(s) => lookup_cur_matched_by_matched(r, s),
|
|
|
|
None => {
|
|
|
|
r.sp_diag.span_fatal(r.cur_span, fmt!("unknown macro variable `%s`",
|
2013-06-12 12:02:55 -05:00
|
|
|
ident_to_str(&name)));
|
2013-05-22 13:21:09 -05:00
|
|
|
}
|
|
|
|
}
|
2012-06-29 20:26:34 -05:00
|
|
|
}
|
|
|
|
enum lis {
|
2012-07-14 00:57:48 -05:00
|
|
|
lis_unconstrained, lis_constraint(uint, ident), lis_contradiction(~str)
|
2012-06-29 20:26:34 -05:00
|
|
|
}
|
|
|
|
|
2013-05-11 23:25:31 -05:00
|
|
|
fn lockstep_iter_size(t: &token_tree, r: &mut TtReader) -> lis {
|
2013-06-04 17:14:56 -05:00
|
|
|
fn lis_merge(lhs: lis, rhs: lis) -> lis {
|
2012-08-06 14:34:08 -05:00
|
|
|
match lhs {
|
2013-02-24 23:27:51 -06:00
|
|
|
lis_unconstrained => copy rhs,
|
|
|
|
lis_contradiction(_) => copy lhs,
|
2013-06-04 14:34:25 -05:00
|
|
|
lis_constraint(l_len, ref l_id) => match rhs {
|
2013-02-24 23:27:51 -06:00
|
|
|
lis_unconstrained => copy lhs,
|
|
|
|
lis_contradiction(_) => copy rhs,
|
|
|
|
lis_constraint(r_len, _) if l_len == r_len => copy lhs,
|
2013-06-04 14:34:25 -05:00
|
|
|
lis_constraint(r_len, ref r_id) => {
|
2013-06-12 12:02:55 -05:00
|
|
|
let l_n = ident_to_str(l_id);
|
|
|
|
let r_n = ident_to_str(r_id);
|
2012-08-22 19:24:52 -05:00
|
|
|
lis_contradiction(fmt!("Inconsistent lockstep iteration: \
|
2012-08-03 21:59:04 -05:00
|
|
|
'%s' has %u items, but '%s' has %u",
|
2012-08-22 19:24:52 -05:00
|
|
|
l_n, l_len, r_n, r_len))
|
2012-06-29 20:26:34 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2013-05-11 23:25:31 -05:00
|
|
|
match *t {
|
2012-12-04 12:50:00 -06:00
|
|
|
tt_delim(ref tts) | tt_seq(_, ref tts, _, _) => {
|
2013-06-08 00:12:39 -05:00
|
|
|
do tts.iter().fold(lis_unconstrained) |lis, tt| {
|
2013-05-11 23:25:31 -05:00
|
|
|
let lis2 = lockstep_iter_size(tt, r);
|
2013-06-04 17:14:56 -05:00
|
|
|
lis_merge(lis, lis2)
|
2013-06-08 00:12:39 -05:00
|
|
|
}
|
2012-06-29 20:26:34 -05:00
|
|
|
}
|
2012-08-03 21:59:04 -05:00
|
|
|
tt_tok(*) => lis_unconstrained,
|
2012-08-06 14:34:08 -05:00
|
|
|
tt_nonterminal(_, name) => match *lookup_cur_matched(r, name) {
|
2012-08-03 21:59:04 -05:00
|
|
|
matched_nonterminal(_) => lis_unconstrained,
|
2013-02-24 23:27:51 -06:00
|
|
|
matched_seq(ref ads, _) => lis_constraint(ads.len(), name)
|
2012-06-29 20:26:34 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2013-03-12 13:37:31 -05:00
|
|
|
// return the next token from the TtReader.
|
|
|
|
// EFFECT: advances the reader's token field
|
2013-03-16 13:11:31 -05:00
|
|
|
pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
2013-02-24 23:27:51 -06:00
|
|
|
let ret_val = TokenAndSpan {
|
|
|
|
tok: copy r.cur_tok,
|
|
|
|
sp: r.cur_span,
|
|
|
|
};
|
2013-03-16 13:11:31 -05:00
|
|
|
loop {
|
|
|
|
{
|
2013-03-12 13:37:31 -05:00
|
|
|
let stack = &mut *r.stack;
|
|
|
|
let forest = &mut *stack.forest;
|
|
|
|
if stack.idx < forest.len() {
|
2013-03-16 13:11:31 -05:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-06-29 20:26:34 -05:00
|
|
|
/* done with this set; pop or repeat? */
|
2013-03-12 13:37:31 -05:00
|
|
|
if ! r.stack.dotdotdoted
|
2013-03-05 21:39:18 -06:00
|
|
|
|| { *r.repeat_idx.last() == *r.repeat_len.last() - 1 } {
|
2012-07-06 20:04:28 -05:00
|
|
|
|
2013-03-12 13:37:31 -05:00
|
|
|
match r.stack.up {
|
2013-02-04 16:02:01 -06:00
|
|
|
None => {
|
2012-06-29 20:26:34 -05:00
|
|
|
r.cur_tok = EOF;
|
2012-08-01 19:30:05 -05:00
|
|
|
return ret_val;
|
2012-06-29 20:26:34 -05:00
|
|
|
}
|
2013-02-04 16:02:01 -06:00
|
|
|
Some(tt_f) => {
|
2013-03-12 13:37:31 -05:00
|
|
|
if r.stack.dotdotdoted {
|
2012-09-28 00:20:47 -05:00
|
|
|
r.repeat_idx.pop();
|
|
|
|
r.repeat_len.pop();
|
2012-07-06 20:04:28 -05:00
|
|
|
}
|
|
|
|
|
2013-03-12 13:37:31 -05:00
|
|
|
r.stack = tt_f;
|
|
|
|
r.stack.idx += 1u;
|
2012-06-29 20:26:34 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-07-06 20:04:28 -05:00
|
|
|
} else { /* repeat */
|
2013-03-12 13:37:31 -05:00
|
|
|
r.stack.idx = 0u;
|
2012-06-29 20:26:34 -05:00
|
|
|
r.repeat_idx[r.repeat_idx.len() - 1u] += 1u;
|
2013-05-29 18:59:33 -05:00
|
|
|
match copy r.stack.sep {
|
|
|
|
Some(tk) => {
|
2013-02-04 16:02:01 -06:00
|
|
|
r.cur_tok = tk; /* repeat same span, I guess */
|
2012-08-01 19:30:05 -05:00
|
|
|
return ret_val;
|
2012-07-05 16:30:56 -05:00
|
|
|
}
|
2012-08-20 14:23:37 -05:00
|
|
|
None => ()
|
2012-07-05 16:30:56 -05:00
|
|
|
}
|
2012-06-27 17:29:35 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
loop { /* because it's easiest, this handles `tt_delim` not starting
|
2012-07-27 21:14:46 -05:00
|
|
|
with a `tt_tok`, even though it won't happen */
|
2013-05-29 18:59:33 -05:00
|
|
|
match copy r.stack.forest[r.stack.idx] {
|
|
|
|
tt_delim(tts) => {
|
2013-03-12 13:37:31 -05:00
|
|
|
r.stack = @mut TtFrame {
|
|
|
|
forest: @mut tts,
|
2013-02-04 16:02:01 -06:00
|
|
|
idx: 0u,
|
|
|
|
dotdotdoted: false,
|
|
|
|
sep: None,
|
2013-03-12 13:37:31 -05:00
|
|
|
up: option::Some(r.stack)
|
2013-02-04 16:02:01 -06:00
|
|
|
};
|
2012-07-06 20:04:28 -05:00
|
|
|
// if this could be 0-length, we'd need to potentially recur here
|
2012-06-27 17:29:35 -05:00
|
|
|
}
|
2013-05-29 18:59:33 -05:00
|
|
|
tt_tok(sp, tok) => {
|
2013-02-24 23:27:51 -06:00
|
|
|
r.cur_span = sp;
|
|
|
|
r.cur_tok = tok;
|
2013-03-12 13:37:31 -05:00
|
|
|
r.stack.idx += 1u;
|
2012-08-01 19:30:05 -05:00
|
|
|
return ret_val;
|
2012-06-27 17:29:35 -05:00
|
|
|
}
|
2013-05-29 18:59:33 -05:00
|
|
|
tt_seq(sp, tts, sep, zerok) => {
|
2013-03-02 13:17:11 -06:00
|
|
|
let t = tt_seq(sp, copy tts, copy sep, zerok);
|
2013-05-11 23:25:31 -05:00
|
|
|
match lockstep_iter_size(&t, r) {
|
2013-02-04 16:02:01 -06:00
|
|
|
lis_unconstrained => {
|
|
|
|
r.sp_diag.span_fatal(
|
2013-02-04 15:22:43 -06:00
|
|
|
sp, /* blame macro writer */
|
2013-05-19 00:07:44 -05:00
|
|
|
"attempted to repeat an expression \
|
|
|
|
containing no syntax \
|
|
|
|
variables matched as repeating at this depth");
|
2013-02-04 14:07:44 -06:00
|
|
|
}
|
|
|
|
lis_contradiction(ref msg) => {
|
|
|
|
/* FIXME #2887 blame macro invoker instead*/
|
|
|
|
r.sp_diag.span_fatal(sp, (*msg));
|
|
|
|
}
|
|
|
|
lis_constraint(len, _) => {
|
2013-02-04 15:22:43 -06:00
|
|
|
if len == 0 {
|
|
|
|
if !zerok {
|
|
|
|
r.sp_diag.span_fatal(sp, /* FIXME #2887 blame invoker
|
|
|
|
*/
|
2013-05-19 00:07:44 -05:00
|
|
|
"this must repeat at least \
|
|
|
|
once");
|
2013-02-04 14:07:44 -06:00
|
|
|
}
|
2012-07-24 13:44:32 -05:00
|
|
|
|
2013-03-12 13:37:31 -05:00
|
|
|
r.stack.idx += 1u;
|
2012-08-01 19:30:05 -05:00
|
|
|
return tt_next_token(r);
|
2012-07-24 13:44:32 -05:00
|
|
|
} else {
|
2012-09-26 19:33:34 -05:00
|
|
|
r.repeat_len.push(len);
|
|
|
|
r.repeat_idx.push(0u);
|
2013-03-12 13:37:31 -05:00
|
|
|
r.stack = @mut TtFrame {
|
|
|
|
forest: @mut tts,
|
2013-02-04 16:02:01 -06:00
|
|
|
idx: 0u,
|
2012-12-04 23:13:02 -06:00
|
|
|
dotdotdoted: true,
|
2013-02-04 16:02:01 -06:00
|
|
|
sep: sep,
|
2013-03-12 13:37:31 -05:00
|
|
|
up: Some(r.stack)
|
2012-12-04 23:13:02 -06:00
|
|
|
};
|
2012-07-06 20:04:28 -05:00
|
|
|
}
|
2012-06-29 20:26:34 -05:00
|
|
|
}
|
|
|
|
}
|
2012-06-27 17:29:35 -05:00
|
|
|
}
|
2012-07-12 20:08:55 -05:00
|
|
|
// FIXME #2887: think about span stuff here
|
2012-08-03 21:59:04 -05:00
|
|
|
tt_nonterminal(sp, ident) => {
|
2012-08-06 14:34:08 -05:00
|
|
|
match *lookup_cur_matched(r, ident) {
|
2012-07-03 20:39:37 -05:00
|
|
|
/* sidestep the interpolation tricks for ident because
|
|
|
|
(a) idents can be in lots of places, so it'd be a pain
|
|
|
|
(b) we actually can, since it's a token. */
|
2012-08-03 21:59:04 -05:00
|
|
|
matched_nonterminal(nt_ident(sn,b)) => {
|
2012-07-03 20:39:37 -05:00
|
|
|
r.cur_span = sp; r.cur_tok = IDENT(sn,b);
|
2013-03-12 13:37:31 -05:00
|
|
|
r.stack.idx += 1u;
|
2012-08-01 19:30:05 -05:00
|
|
|
return ret_val;
|
2012-07-03 20:39:37 -05:00
|
|
|
}
|
2012-12-04 12:50:00 -06:00
|
|
|
matched_nonterminal(ref other_whole_nt) => {
|
2013-02-24 23:27:51 -06:00
|
|
|
r.cur_span = sp;
|
|
|
|
r.cur_tok = INTERPOLATED(copy *other_whole_nt);
|
2013-03-12 13:37:31 -05:00
|
|
|
r.stack.idx += 1u;
|
2012-08-01 19:30:05 -05:00
|
|
|
return ret_val;
|
2012-06-29 20:26:34 -05:00
|
|
|
}
|
2012-08-03 21:59:04 -05:00
|
|
|
matched_seq(*) => {
|
2012-07-06 20:04:28 -05:00
|
|
|
r.sp_diag.span_fatal(
|
2013-06-27 19:41:35 -05:00
|
|
|
r.cur_span, /* blame the macro writer */
|
2012-08-22 19:24:52 -05:00
|
|
|
fmt!("variable '%s' is still repeating at this depth",
|
2013-06-12 12:02:55 -05:00
|
|
|
ident_to_str(&ident)));
|
2012-06-29 20:26:34 -05:00
|
|
|
}
|
|
|
|
}
|
2012-06-27 17:29:35 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-08-14 17:27:06 -05:00
|
|
|
}
|