rust/src/libsyntax/parse/common.rs

291 lines
9.3 KiB
Rust
Raw Normal View History

import std::map::{hashmap};
2012-04-19 21:24:42 -07:00
import ast_util::spanned;
import parser::parser;
2012-07-09 16:01:07 -07:00
import lexer::reader;
2012-05-24 12:38:45 -07:00
type seq_sep = {
sep: option<token::token>,
trailing_sep_allowed: bool
2012-05-24 12:38:45 -07:00
};
fn seq_sep_trailing_disallowed(t: token::token) -> seq_sep {
2012-08-01 17:30:05 -07:00
return {sep: option::some(t), trailing_sep_allowed: false};
2012-05-24 12:38:45 -07:00
}
fn seq_sep_trailing_allowed(t: token::token) -> seq_sep {
2012-08-01 17:30:05 -07:00
return {sep: option::some(t), trailing_sep_allowed: true};
2012-05-24 12:38:45 -07:00
}
fn seq_sep_none() -> seq_sep {
2012-08-01 17:30:05 -07:00
return {sep: option::none, trailing_sep_allowed: false};
2012-05-24 12:38:45 -07:00
}
fn token_to_str(reader: reader, ++token: token::token) -> ~str {
2012-05-30 11:36:30 -07:00
token::to_str(*reader.interner(), token)
}
trait parser_common {
fn unexpected_last(t: token::token) -> !;
fn unexpected() -> !;
fn expect(t: token::token);
fn parse_ident() -> ast::ident;
fn parse_path_list_ident() -> ast::path_list_ident;
fn parse_value_ident() -> ast::ident;
fn eat(tok: token::token) -> bool;
// A sanity check that the word we are asking for is a known keyword
fn require_keyword(word: ~str);
fn token_is_keyword(word: ~str, ++tok: token::token) -> bool;
fn is_keyword(word: ~str) -> bool;
fn is_any_keyword(tok: token::token) -> bool;
fn eat_keyword(word: ~str) -> bool;
fn expect_keyword(word: ~str);
fn is_restricted_keyword(word: ~str) -> bool;
fn check_restricted_keywords();
fn check_restricted_keywords_(w: ~str);
fn expect_gt();
fn parse_seq_to_before_gt<T: copy>(sep: option<token::token>,
f: fn(parser) -> T) -> ~[T];
fn parse_seq_to_gt<T: copy>(sep: option<token::token>,
f: fn(parser) -> T) -> ~[T];
fn parse_seq_lt_gt<T: copy>(sep: option<token::token>,
f: fn(parser) -> T) -> spanned<~[T]>;
fn parse_seq_to_end<T: copy>(ket: token::token, sep: seq_sep,
f: fn(parser) -> T) -> ~[T];
fn parse_seq_to_before_end<T: copy>(ket: token::token, sep: seq_sep,
f: fn(parser) -> T) -> ~[T];
fn parse_unspanned_seq<T: copy>(bra: token::token,
ket: token::token,
sep: seq_sep,
f: fn(parser) -> T) -> ~[T];
fn parse_seq<T: copy>(bra: token::token, ket: token::token, sep: seq_sep,
f: fn(parser) -> T) -> spanned<~[T]>;
}
2012-08-07 18:10:06 -07:00
impl parser: parser_common {
2012-05-24 12:38:45 -07:00
fn unexpected_last(t: token::token) -> ! {
self.span_fatal(
copy self.last_span,
~"unexpected token: `" + token_to_str(self.reader, t) + ~"`");
}
2012-05-24 12:38:45 -07:00
fn unexpected() -> ! {
self.fatal(~"unexpected token: `"
+ token_to_str(self.reader, self.token) + ~"`");
}
2012-05-24 12:38:45 -07:00
fn expect(t: token::token) {
if self.token == t {
self.bump();
} else {
let mut s: ~str = ~"expected `";
2012-05-24 12:38:45 -07:00
s += token_to_str(self.reader, t);
s += ~"` but found `";
2012-05-24 12:38:45 -07:00
s += token_to_str(self.reader, self.token);
self.fatal(s + ~"`");
2012-05-24 12:38:45 -07:00
}
}
2012-05-24 12:38:45 -07:00
fn parse_ident() -> ast::ident {
2012-08-06 12:34:08 -07:00
match copy self.token {
2012-08-03 19:59:04 -07:00
token::IDENT(i, _) => { self.bump(); return self.get_str(i); }
token::INTERPOLATED(token::nt_ident(*)) => { self.bug(
~"ident interpolation not converted to real token"); }
2012-08-03 19:59:04 -07:00
_ => { self.fatal(~"expected ident, found `"
+ token_to_str(self.reader, self.token)
+ ~"`"); }
2012-05-24 12:38:45 -07:00
}
}
2012-05-24 12:38:45 -07:00
fn parse_path_list_ident() -> ast::path_list_ident {
let lo = self.span.lo;
let ident = self.parse_ident();
let hi = self.span.hi;
2012-08-01 17:30:05 -07:00
return spanned(lo, hi, {name: ident, id: self.get_id()});
}
2012-05-24 12:38:45 -07:00
fn parse_value_ident() -> ast::ident {
self.check_restricted_keywords();
2012-08-01 17:30:05 -07:00
return self.parse_ident();
}
2012-05-24 12:38:45 -07:00
fn eat(tok: token::token) -> bool {
2012-08-01 17:30:05 -07:00
return if self.token == tok { self.bump(); true } else { false };
2012-05-24 12:38:45 -07:00
}
2012-05-24 12:38:45 -07:00
// A sanity check that the word we are asking for is a known keyword
fn require_keyword(word: ~str) {
if !self.keywords.contains_key_ref(&word) {
self.bug(fmt!{"unknown keyword: %s", word});
2012-05-24 12:38:45 -07:00
}
}
2012-07-30 16:33:02 -07:00
fn token_is_word(word: ~str, ++tok: token::token) -> bool {
2012-08-06 12:34:08 -07:00
match tok {
token::IDENT(sid, false) => { word == *self.get_str(sid) }
2012-07-30 16:33:02 -07:00
_ => { false }
2012-05-24 12:38:45 -07:00
}
}
2012-07-30 16:33:02 -07:00
fn token_is_keyword(word: ~str, ++tok: token::token) -> bool {
self.require_keyword(word);
self.token_is_word(word, tok)
}
fn is_keyword(word: ~str) -> bool {
2012-05-24 12:38:45 -07:00
self.token_is_keyword(word, self.token)
}
fn is_any_keyword(tok: token::token) -> bool {
2012-08-06 12:34:08 -07:00
match tok {
2012-08-03 19:59:04 -07:00
token::IDENT(sid, false) => {
self.keywords.contains_key_ref(self.get_str(sid))
}
2012-08-03 19:59:04 -07:00
_ => false
}
}
fn eat_keyword(word: ~str) -> bool {
2012-05-24 12:38:45 -07:00
self.require_keyword(word);
2012-06-05 16:01:00 -07:00
2012-07-25 19:13:58 -07:00
let mut bump = false;
2012-08-06 12:34:08 -07:00
let val = match self.token {
2012-08-03 19:59:04 -07:00
token::IDENT(sid, false) => {
if word == *self.get_str(sid) {
2012-07-25 19:13:58 -07:00
bump = true;
true
} else { false }
2012-05-24 12:38:45 -07:00
}
2012-08-03 19:59:04 -07:00
_ => false
2012-07-25 19:13:58 -07:00
};
if bump { self.bump() }
val
}
fn expect_keyword(word: ~str) {
2012-05-24 12:38:45 -07:00
self.require_keyword(word);
if !self.eat_keyword(word) {
self.fatal(~"expected `" + word + ~"`, found `" +
token_to_str(self.reader, self.token) +
~"`");
2012-06-12 10:59:50 -07:00
}
}
fn is_restricted_keyword(word: ~str) -> bool {
self.restricted_keywords.contains_key_ref(&word)
}
2012-05-24 12:38:45 -07:00
fn check_restricted_keywords() {
2012-08-06 12:34:08 -07:00
match self.token {
2012-08-03 19:59:04 -07:00
token::IDENT(_, false) => {
2012-05-24 12:38:45 -07:00
let w = token_to_str(self.reader, self.token);
self.check_restricted_keywords_(w);
}
2012-08-03 19:59:04 -07:00
_ => ()
}
}
fn check_restricted_keywords_(w: ~str) {
2012-05-24 12:38:45 -07:00
if self.is_restricted_keyword(w) {
self.fatal(~"found `" + w + ~"` in restricted position");
2012-05-24 12:38:45 -07:00
}
}
2012-05-24 12:38:45 -07:00
fn expect_gt() {
if self.token == token::GT {
self.bump();
} else if self.token == token::BINOP(token::SHR) {
self.swap(token::GT, self.span.lo + 1u, self.span.hi);
} else {
let mut s: ~str = ~"expected `";
2012-05-24 12:38:45 -07:00
s += token_to_str(self.reader, token::GT);
s += ~"`, found `";
2012-05-24 12:38:45 -07:00
s += token_to_str(self.reader, self.token);
s += ~"`";
2012-05-24 12:38:45 -07:00
self.fatal(s);
}
}
2012-05-24 12:38:45 -07:00
fn parse_seq_to_before_gt<T: copy>(sep: option<token::token>,
f: fn(parser) -> T) -> ~[T] {
2012-05-24 12:38:45 -07:00
let mut first = true;
let mut v = ~[];
2012-05-24 12:38:45 -07:00
while self.token != token::GT
&& self.token != token::BINOP(token::SHR) {
2012-08-06 12:34:08 -07:00
match sep {
2012-08-03 19:59:04 -07:00
some(t) => {
if first { first = false; }
else { self.expect(t); }
}
_ => ()
2012-05-24 12:38:45 -07:00
}
vec::push(v, f(self));
2012-05-24 12:38:45 -07:00
}
2012-08-01 17:30:05 -07:00
return v;
2012-05-24 12:38:45 -07:00
}
2012-05-24 12:38:45 -07:00
fn parse_seq_to_gt<T: copy>(sep: option<token::token>,
f: fn(parser) -> T) -> ~[T] {
2012-05-24 12:38:45 -07:00
let v = self.parse_seq_to_before_gt(sep, f);
self.expect_gt();
2012-08-01 17:30:05 -07:00
return v;
2012-05-24 12:38:45 -07:00
}
2012-05-24 12:38:45 -07:00
fn parse_seq_lt_gt<T: copy>(sep: option<token::token>,
f: fn(parser) -> T) -> spanned<~[T]> {
2012-05-24 12:38:45 -07:00
let lo = self.span.lo;
self.expect(token::LT);
let result = self.parse_seq_to_before_gt::<T>(sep, f);
let hi = self.span.hi;
self.expect_gt();
2012-08-01 17:30:05 -07:00
return spanned(lo, hi, result);
2012-05-24 12:38:45 -07:00
}
2012-05-24 12:38:45 -07:00
fn parse_seq_to_end<T: copy>(ket: token::token, sep: seq_sep,
f: fn(parser) -> T) -> ~[T] {
2012-05-24 12:38:45 -07:00
let val = self.parse_seq_to_before_end(ket, sep, f);
self.bump();
2012-08-01 17:30:05 -07:00
return val;
2012-05-24 12:38:45 -07:00
}
fn parse_seq_to_before_end<T: copy>(ket: token::token, sep: seq_sep,
f: fn(parser) -> T) -> ~[T] {
2012-05-24 12:38:45 -07:00
let mut first: bool = true;
let mut v: ~[T] = ~[];
2012-05-24 12:38:45 -07:00
while self.token != ket {
2012-08-06 12:34:08 -07:00
match sep.sep {
2012-08-03 19:59:04 -07:00
some(t) => {
if first { first = false; }
else { self.expect(t); }
}
_ => ()
2012-05-24 12:38:45 -07:00
}
if sep.trailing_sep_allowed && self.token == ket { break; }
vec::push(v, f(self));
}
2012-08-01 17:30:05 -07:00
return v;
}
fn parse_unspanned_seq<T: copy>(bra: token::token,
ket: token::token,
sep: seq_sep,
f: fn(parser) -> T) -> ~[T] {
self.expect(bra);
let result = self.parse_seq_to_before_end::<T>(ket, sep, f);
self.bump();
2012-08-01 17:30:05 -07:00
return result;
}
// NB: Do not use this function unless you actually plan to place the
// spanned list in the AST.
2012-05-24 12:38:45 -07:00
fn parse_seq<T: copy>(bra: token::token, ket: token::token, sep: seq_sep,
f: fn(parser) -> T) -> spanned<~[T]> {
2012-05-24 12:38:45 -07:00
let lo = self.span.lo;
self.expect(bra);
let result = self.parse_seq_to_before_end::<T>(ket, sep, f);
let hi = self.span.hi;
self.bump();
2012-08-01 17:30:05 -07:00
return spanned(lo, hi, result);
2012-05-24 12:38:45 -07:00
}
2012-06-07 21:53:47 -07:00
}