2012-04-19 21:18:33 -07:00
|
|
|
import std::map::{hashmap};
|
2012-04-19 21:24:42 -07:00
|
|
|
import ast_util::spanned;
|
2012-04-19 21:18:33 -07:00
|
|
|
import parser::parser;
|
|
|
|
|
2012-05-24 12:38:45 -07:00
|
|
|
type seq_sep = {
|
|
|
|
sep: option<token::token>,
|
|
|
|
trailing_opt: bool // is trailing separator optional?
|
|
|
|
};
|
|
|
|
|
|
|
|
fn seq_sep(t: token::token) -> seq_sep {
|
|
|
|
ret {sep: option::some(t), trailing_opt: false};
|
|
|
|
}
|
|
|
|
fn seq_sep_opt(t: token::token) -> seq_sep {
|
|
|
|
ret {sep: option::some(t), trailing_opt: true};
|
|
|
|
}
|
|
|
|
fn seq_sep_none() -> seq_sep {
|
|
|
|
ret {sep: option::none, trailing_opt: false};
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-04-19 21:18:33 -07:00
|
|
|
fn token_to_str(reader: reader, token: token::token) -> str {
|
|
|
|
token::to_str(*reader.interner, token)
|
|
|
|
}
|
|
|
|
|
|
|
|
|
2012-05-24 12:38:45 -07:00
|
|
|
// This should be done with traits, once traits work
|
|
|
|
impl parser_common for parser {
|
2012-04-19 21:18:33 -07:00
|
|
|
|
2012-05-24 12:38:45 -07:00
|
|
|
fn unexpected_last(t: token::token) -> ! {
|
|
|
|
self.span_fatal(self.last_span, "unexpected token: '"
|
|
|
|
+ token_to_str(self.reader, t) + "'");
|
2012-04-19 21:18:33 -07:00
|
|
|
}
|
|
|
|
|
2012-05-24 12:38:45 -07:00
|
|
|
fn unexpected() -> ! {
|
|
|
|
self.fatal("unexpected token: '"
|
|
|
|
+ token_to_str(self.reader, self.token) + "'");
|
2012-04-19 21:18:33 -07:00
|
|
|
}
|
|
|
|
|
2012-05-24 12:38:45 -07:00
|
|
|
fn expect(t: token::token) {
|
|
|
|
if self.token == t {
|
|
|
|
self.bump();
|
|
|
|
} else {
|
|
|
|
let mut s: str = "expecting '";
|
|
|
|
s += token_to_str(self.reader, t);
|
|
|
|
s += "' but found '";
|
|
|
|
s += token_to_str(self.reader, self.token);
|
|
|
|
self.fatal(s + "'");
|
|
|
|
}
|
|
|
|
}
|
2012-04-19 21:18:33 -07:00
|
|
|
|
2012-05-24 12:38:45 -07:00
|
|
|
fn parse_ident() -> ast::ident {
|
|
|
|
alt self.token {
|
|
|
|
token::IDENT(i, _) { self.bump(); ret self.get_str(i); }
|
|
|
|
_ { self.fatal("expecting ident, found "
|
|
|
|
+ token_to_str(self.reader, self.token)); }
|
|
|
|
}
|
|
|
|
}
|
2012-04-19 21:18:33 -07:00
|
|
|
|
2012-05-24 12:38:45 -07:00
|
|
|
fn parse_path_list_ident() -> ast::path_list_ident {
|
|
|
|
let lo = self.span.lo;
|
|
|
|
let ident = self.parse_ident();
|
|
|
|
let hi = self.span.hi;
|
|
|
|
ret spanned(lo, hi, {name: ident, id: self.get_id()});
|
2012-04-19 21:18:33 -07:00
|
|
|
}
|
|
|
|
|
2012-05-24 12:38:45 -07:00
|
|
|
fn parse_value_ident() -> ast::ident {
|
|
|
|
self.check_restricted_keywords();
|
|
|
|
ret self.parse_ident();
|
2012-05-08 23:01:38 +02:00
|
|
|
}
|
|
|
|
|
2012-05-24 12:38:45 -07:00
|
|
|
fn eat(tok: token::token) -> bool {
|
|
|
|
ret if self.token == tok { self.bump(); true } else { false };
|
|
|
|
}
|
2012-04-19 21:18:33 -07:00
|
|
|
|
2012-05-24 12:38:45 -07:00
|
|
|
// A sanity check that the word we are asking for is a known keyword
|
|
|
|
fn require_keyword(word: str) {
|
|
|
|
if !self.keywords.contains_key(word) {
|
|
|
|
self.bug(#fmt("unknown keyword: %s", word));
|
|
|
|
}
|
2012-04-19 21:18:33 -07:00
|
|
|
}
|
|
|
|
|
2012-05-24 12:38:45 -07:00
|
|
|
fn token_is_keyword(word: str, tok: token::token) -> bool {
|
|
|
|
self.require_keyword(word);
|
|
|
|
alt tok {
|
|
|
|
token::IDENT(sid, false) { str::eq(word, self.get_str(sid)) }
|
|
|
|
_ { false }
|
|
|
|
}
|
2012-04-19 21:18:33 -07:00
|
|
|
}
|
|
|
|
|
2012-05-24 12:38:45 -07:00
|
|
|
fn is_keyword(word: str) -> bool {
|
|
|
|
self.token_is_keyword(word, self.token)
|
|
|
|
}
|
2012-04-24 21:05:25 -07:00
|
|
|
|
2012-05-24 12:38:45 -07:00
|
|
|
fn eat_keyword(word: str) -> bool {
|
|
|
|
self.require_keyword(word);
|
|
|
|
alt self.token {
|
|
|
|
token::IDENT(sid, false) {
|
|
|
|
if str::eq(word, self.get_str(sid)) {
|
|
|
|
self.bump();
|
|
|
|
ret true;
|
|
|
|
} else { ret false; }
|
|
|
|
}
|
|
|
|
_ { ret false; }
|
|
|
|
}
|
2012-04-19 21:18:33 -07:00
|
|
|
}
|
|
|
|
|
2012-05-24 12:38:45 -07:00
|
|
|
fn expect_keyword(word: str) {
|
|
|
|
self.require_keyword(word);
|
|
|
|
if !self.eat_keyword(word) {
|
|
|
|
self.fatal("expecting " + word + ", found " +
|
|
|
|
token_to_str(self.reader, self.token));
|
2012-04-27 15:44:40 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-05-24 12:38:45 -07:00
|
|
|
fn is_restricted_keyword(word: str) -> bool {
|
|
|
|
self.restricted_keywords.contains_key(word)
|
2012-04-19 21:18:33 -07:00
|
|
|
}
|
|
|
|
|
2012-05-24 12:38:45 -07:00
|
|
|
fn check_restricted_keywords() {
|
|
|
|
alt self.token {
|
|
|
|
token::IDENT(_, false) {
|
|
|
|
let w = token_to_str(self.reader, self.token);
|
|
|
|
self.check_restricted_keywords_(w);
|
|
|
|
}
|
2012-04-19 21:18:33 -07:00
|
|
|
_ { }
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-05-24 12:38:45 -07:00
|
|
|
fn check_restricted_keywords_(w: ast::ident) {
|
|
|
|
if self.is_restricted_keyword(w) {
|
|
|
|
self.fatal("found `" + w + "` in restricted position");
|
|
|
|
}
|
|
|
|
}
|
2012-04-19 21:18:33 -07:00
|
|
|
|
2012-05-24 12:38:45 -07:00
|
|
|
fn expect_gt() {
|
|
|
|
if self.token == token::GT {
|
|
|
|
self.bump();
|
|
|
|
} else if self.token == token::BINOP(token::SHR) {
|
|
|
|
self.swap(token::GT, self.span.lo + 1u, self.span.hi);
|
|
|
|
} else {
|
|
|
|
let mut s: str = "expecting ";
|
|
|
|
s += token_to_str(self.reader, token::GT);
|
|
|
|
s += ", found ";
|
|
|
|
s += token_to_str(self.reader, self.token);
|
|
|
|
self.fatal(s);
|
|
|
|
}
|
|
|
|
}
|
2012-04-19 21:18:33 -07:00
|
|
|
|
2012-05-24 12:38:45 -07:00
|
|
|
fn parse_seq_to_before_gt<T: copy>(sep: option<token::token>,
|
|
|
|
f: fn(parser) -> T) -> [T] {
|
|
|
|
let mut first = true;
|
|
|
|
let mut v = [];
|
|
|
|
while self.token != token::GT
|
|
|
|
&& self.token != token::BINOP(token::SHR) {
|
|
|
|
alt sep {
|
|
|
|
some(t) { if first { first = false; }
|
|
|
|
else { self.expect(t); } }
|
|
|
|
_ { }
|
|
|
|
}
|
|
|
|
v += [f(self)];
|
|
|
|
}
|
2012-04-19 21:18:33 -07:00
|
|
|
|
2012-05-24 12:38:45 -07:00
|
|
|
ret v;
|
|
|
|
}
|
2012-04-19 21:18:33 -07:00
|
|
|
|
2012-05-24 12:38:45 -07:00
|
|
|
fn parse_seq_to_gt<T: copy>(sep: option<token::token>,
|
|
|
|
f: fn(parser) -> T) -> [T] {
|
|
|
|
let v = self.parse_seq_to_before_gt(sep, f);
|
|
|
|
self.expect_gt();
|
2012-04-19 21:18:33 -07:00
|
|
|
|
2012-05-24 12:38:45 -07:00
|
|
|
ret v;
|
|
|
|
}
|
2012-04-19 21:18:33 -07:00
|
|
|
|
2012-05-24 12:38:45 -07:00
|
|
|
fn parse_seq_lt_gt<T: copy>(sep: option<token::token>,
|
|
|
|
f: fn(parser) -> T) -> spanned<[T]> {
|
|
|
|
let lo = self.span.lo;
|
|
|
|
self.expect(token::LT);
|
|
|
|
let result = self.parse_seq_to_before_gt::<T>(sep, f);
|
|
|
|
let hi = self.span.hi;
|
|
|
|
self.expect_gt();
|
|
|
|
ret spanned(lo, hi, result);
|
|
|
|
}
|
2012-04-19 21:18:33 -07:00
|
|
|
|
2012-05-24 12:38:45 -07:00
|
|
|
fn parse_seq_to_end<T: copy>(ket: token::token, sep: seq_sep,
|
|
|
|
f: fn(parser) -> T) -> [T] {
|
|
|
|
let val = self.parse_seq_to_before_end(ket, sep, f);
|
|
|
|
self.bump();
|
|
|
|
ret val;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
fn parse_seq_to_before_end<T: copy>(ket: token::token, sep: seq_sep,
|
|
|
|
f: fn(parser) -> T) -> [T] {
|
|
|
|
let mut first: bool = true;
|
|
|
|
let mut v: [T] = [];
|
|
|
|
while self.token != ket {
|
|
|
|
alt sep.sep {
|
|
|
|
some(t) { if first { first = false; }
|
|
|
|
else { self.expect(t); } }
|
|
|
|
_ { }
|
|
|
|
}
|
|
|
|
if sep.trailing_opt && self.token == ket { break; }
|
|
|
|
v += [f(self)];
|
2012-04-19 21:18:33 -07:00
|
|
|
}
|
2012-05-24 12:38:45 -07:00
|
|
|
ret v;
|
2012-04-19 21:18:33 -07:00
|
|
|
}
|
|
|
|
|
2012-05-24 12:38:45 -07:00
|
|
|
fn parse_seq<T: copy>(bra: token::token, ket: token::token, sep: seq_sep,
|
|
|
|
f: fn(parser) -> T) -> spanned<[T]> {
|
|
|
|
let lo = self.span.lo;
|
|
|
|
self.expect(bra);
|
|
|
|
let result = self.parse_seq_to_before_end::<T>(ket, sep, f);
|
|
|
|
let hi = self.span.hi;
|
|
|
|
self.bump();
|
|
|
|
ret spanned(lo, hi, result);
|
|
|
|
}
|
|
|
|
}
|