auto merge of #6737 : dotdash/rust/static_keywords, r=pcwalton
Currently, keywords are stored in hashsets that are recreated for every Parser instance, which is quite expensive since macro expansion creates lots of them. Additionally, the parser functions that look for a keyword currently accept a string and have a runtime check to validate that they actually received a keyword. By creating an enum for the keywords and inserting them into the ident interner, we can avoid the creation of the hashsets and get static checks for the keywords. For libstd, this cuts the parse+expansion part from ~2.6s to ~1.6s.
This commit is contained in:
commit
9d37d038fe
@ -16,6 +16,7 @@ use ext::base::ExtCtxt;
|
||||
use ext::base;
|
||||
use parse::lexer::{new_tt_reader, reader};
|
||||
use parse::parser::Parser;
|
||||
use parse::token::keywords;
|
||||
|
||||
pub fn expand_trace_macros(cx: @ExtCtxt,
|
||||
sp: span,
|
||||
@ -36,9 +37,9 @@ pub fn expand_trace_macros(cx: @ExtCtxt,
|
||||
rdr.dup()
|
||||
);
|
||||
|
||||
if rust_parser.is_keyword("true") {
|
||||
if rust_parser.is_keyword(keywords::True) {
|
||||
cx.set_trace_macros(true);
|
||||
} else if rust_parser.is_keyword("false") {
|
||||
} else if rust_parser.is_keyword(keywords::False) {
|
||||
cx.set_trace_macros(false);
|
||||
} else {
|
||||
cx.span_fatal(sp, "trace_macros! only accepts `true` or `false`")
|
||||
|
@ -14,6 +14,7 @@ use ast;
|
||||
use codemap::{BytePos, spanned};
|
||||
use parse::lexer::reader;
|
||||
use parse::parser::Parser;
|
||||
use parse::token::keywords;
|
||||
use parse::token;
|
||||
|
||||
use opt_vec;
|
||||
@ -133,54 +134,15 @@ pub impl Parser {
|
||||
return if *self.token == *tok { self.bump(); true } else { false };
|
||||
}
|
||||
|
||||
// Storing keywords as interned idents instead of strings would be nifty.
|
||||
|
||||
// A sanity check that the word we are asking for is a known keyword
|
||||
// NOTE: this could be done statically....
|
||||
fn require_keyword(&self, word: &str) {
|
||||
if !self.keywords.contains_equiv(&word) {
|
||||
self.bug(fmt!("unknown keyword: %s", word));
|
||||
}
|
||||
fn is_keyword(&self, kw: keywords::Keyword) -> bool {
|
||||
token::is_keyword(kw, self.token)
|
||||
}
|
||||
|
||||
// return true when this token represents the given string, and is not
|
||||
// followed immediately by :: .
|
||||
fn token_is_word(&self, word: &str, tok: &token::Token) -> bool {
|
||||
match *tok {
|
||||
token::IDENT(sid, false) => { word == *self.id_to_str(sid) }
|
||||
_ => { false }
|
||||
}
|
||||
}
|
||||
|
||||
fn token_is_keyword(&self, word: &str, tok: &token::Token) -> bool {
|
||||
self.require_keyword(word);
|
||||
self.token_is_word(word, tok)
|
||||
}
|
||||
|
||||
fn is_keyword(&self, word: &str) -> bool {
|
||||
self.token_is_keyword(word, © *self.token)
|
||||
}
|
||||
|
||||
fn id_is_any_keyword(&self, id: ast::ident) -> bool {
|
||||
self.keywords.contains(self.id_to_str(id))
|
||||
}
|
||||
|
||||
fn is_any_keyword(&self, tok: &token::Token) -> bool {
|
||||
match *tok {
|
||||
token::IDENT(sid, false) => {
|
||||
self.keywords.contains(self.id_to_str(sid))
|
||||
}
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
// if the given word is not a keyword, signal an error.
|
||||
// if the next token is the given keyword, eat it and return
|
||||
// true. Otherwise, return false.
|
||||
fn eat_keyword(&self, word: &str) -> bool {
|
||||
self.require_keyword(word);
|
||||
fn eat_keyword(&self, kw: keywords::Keyword) -> bool {
|
||||
let is_kw = match *self.token {
|
||||
token::IDENT(sid, false) => word == *self.id_to_str(sid),
|
||||
token::IDENT(sid, false) => kw.to_ident().repr == sid.repr,
|
||||
_ => false
|
||||
};
|
||||
if is_kw { self.bump() }
|
||||
@ -190,63 +152,30 @@ pub impl Parser {
|
||||
// if the given word is not a keyword, signal an error.
|
||||
// if the next token is not the given word, signal an error.
|
||||
// otherwise, eat it.
|
||||
fn expect_keyword(&self, word: &str) {
|
||||
self.require_keyword(word);
|
||||
if !self.eat_keyword(word) {
|
||||
fn expect_keyword(&self, kw: keywords::Keyword) {
|
||||
if !self.eat_keyword(kw) {
|
||||
self.fatal(
|
||||
fmt!(
|
||||
"expected `%s`, found `%s`",
|
||||
word,
|
||||
*self.id_to_str(kw.to_ident()),
|
||||
self.this_token_to_str()
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// return true if the given string is a strict keyword
|
||||
fn is_strict_keyword(&self, word: &str) -> bool {
|
||||
self.strict_keywords.contains_equiv(&word)
|
||||
}
|
||||
|
||||
// signal an error if the current token is a strict keyword
|
||||
fn check_strict_keywords(&self) {
|
||||
match *self.token {
|
||||
token::IDENT(_, false) => {
|
||||
let w = token_to_str(self.reader, © *self.token);
|
||||
self.check_strict_keywords_(w);
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
}
|
||||
|
||||
// signal an error if the given string is a strict keyword
|
||||
fn check_strict_keywords_(&self, w: &str) {
|
||||
if self.is_strict_keyword(w) {
|
||||
fn check_strict_keywords(&self) {
|
||||
if token::is_strict_keyword(self.token) {
|
||||
self.span_err(*self.last_span,
|
||||
fmt!("found `%s` in ident position", w));
|
||||
fmt!("found `%s` in ident position", self.this_token_to_str()));
|
||||
}
|
||||
}
|
||||
|
||||
// return true if this is a reserved keyword
|
||||
fn is_reserved_keyword(&self, word: &str) -> bool {
|
||||
self.reserved_keywords.contains_equiv(&word)
|
||||
}
|
||||
|
||||
// signal an error if the current token is a reserved keyword
|
||||
fn check_reserved_keywords(&self) {
|
||||
match *self.token {
|
||||
token::IDENT(_, false) => {
|
||||
let w = token_to_str(self.reader, © *self.token);
|
||||
self.check_reserved_keywords_(w);
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
}
|
||||
|
||||
// signal an error if the given string is a reserved keyword
|
||||
fn check_reserved_keywords_(&self, w: &str) {
|
||||
if self.is_reserved_keyword(w) {
|
||||
self.fatal(fmt!("`%s` is a reserved keyword", w));
|
||||
if token::is_reserved_keyword(self.token) {
|
||||
self.fatal(fmt!("`%s` is a reserved keyword", self.this_token_to_str()));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -23,7 +23,7 @@ use ast::{expr, expr_lit, lit_nil, attribute};
|
||||
use ast;
|
||||
use codemap::{span, respan};
|
||||
use parse::parser::Parser;
|
||||
use parse::token::Token;
|
||||
use parse::token::{keywords, Token};
|
||||
use parse::token;
|
||||
|
||||
use core::to_bytes;
|
||||
@ -295,9 +295,9 @@ pub impl Parser {
|
||||
}
|
||||
|
||||
fn try_parse_obsolete_priv_section(&self, attrs: &[attribute]) -> bool {
|
||||
if self.is_keyword("priv") && self.look_ahead(1) == token::LBRACE {
|
||||
if self.is_keyword(keywords::Priv) && self.look_ahead(1) == token::LBRACE {
|
||||
self.obsolete(copy *self.span, ObsoletePrivSection);
|
||||
self.eat_keyword("priv");
|
||||
self.eat_keyword(keywords::Priv);
|
||||
self.bump();
|
||||
while *self.token != token::RBRACE {
|
||||
self.parse_single_struct_field(ast::private, attrs.to_owned());
|
||||
|
@ -86,7 +86,7 @@ use parse::obsolete::{ObsoletePurity, ObsoleteStaticMethod};
|
||||
use parse::obsolete::{ObsoleteConstItem, ObsoleteFixedLengthVectorType};
|
||||
use parse::obsolete::{ObsoleteNamedExternModule};
|
||||
use parse::token::{can_begin_expr, is_ident, is_ident_or_path};
|
||||
use parse::token::{is_plain_ident, INTERPOLATED, special_idents, token_to_binop};
|
||||
use parse::token::{is_plain_ident, INTERPOLATED, keywords, special_idents, token_to_binop};
|
||||
use parse::token;
|
||||
use parse::{new_sub_parser_from_file, next_node_id, ParseSess};
|
||||
use opt_vec;
|
||||
@ -234,9 +234,6 @@ pub fn Parser(sess: @mut ParseSess,
|
||||
tokens_consumed: @mut 0,
|
||||
restriction: @mut UNRESTRICTED,
|
||||
quote_depth: @mut 0,
|
||||
keywords: token::keyword_table(),
|
||||
strict_keywords: token::strict_keyword_table(),
|
||||
reserved_keywords: token::reserved_keyword_table(),
|
||||
obsolete_set: @mut HashSet::new(),
|
||||
mod_path_stack: @mut ~[],
|
||||
}
|
||||
@ -260,9 +257,6 @@ pub struct Parser {
|
||||
quote_depth: @mut uint, // not (yet) related to the quasiquoter
|
||||
reader: @reader,
|
||||
interner: @token::ident_interner,
|
||||
keywords: HashSet<~str>,
|
||||
strict_keywords: HashSet<~str>,
|
||||
reserved_keywords: HashSet<~str>,
|
||||
/// The set of seen errors about obsolete syntax. Used to suppress
|
||||
/// extra detail when the same error is seen twice
|
||||
obsolete_set: @mut HashSet<ObsoleteSyntax>,
|
||||
@ -340,10 +334,10 @@ pub impl Parser {
|
||||
|
||||
// is this one of the keywords that signals a closure type?
|
||||
fn token_is_closure_keyword(&self, tok: &token::Token) -> bool {
|
||||
self.token_is_keyword("pure", tok) ||
|
||||
self.token_is_keyword("unsafe", tok) ||
|
||||
self.token_is_keyword("once", tok) ||
|
||||
self.token_is_keyword("fn", tok)
|
||||
token::is_keyword(keywords::Pure, tok) ||
|
||||
token::is_keyword(keywords::Unsafe, tok) ||
|
||||
token::is_keyword(keywords::Once, tok) ||
|
||||
token::is_keyword(keywords::Fn, tok)
|
||||
}
|
||||
|
||||
fn token_is_lifetime(&self, tok: &token::Token) -> bool {
|
||||
@ -380,7 +374,7 @@ pub impl Parser {
|
||||
let opt_abis = self.parse_opt_abis();
|
||||
let abis = opt_abis.get_or_default(AbiSet::Rust());
|
||||
let purity = self.parse_unsafety();
|
||||
self.expect_keyword("fn");
|
||||
self.expect_keyword(keywords::Fn);
|
||||
let (decl, lifetimes) = self.parse_ty_fn_decl();
|
||||
return ty_bare_fn(@TyBareFn {
|
||||
abis: abis,
|
||||
@ -416,7 +410,7 @@ pub impl Parser {
|
||||
|
||||
let purity = self.parse_unsafety();
|
||||
let onceness = parse_onceness(self);
|
||||
self.expect_keyword("fn");
|
||||
self.expect_keyword(keywords::Fn);
|
||||
let bounds = self.parse_optional_ty_param_bounds();
|
||||
|
||||
if self.parse_fn_ty_sigil().is_some() {
|
||||
@ -436,7 +430,7 @@ pub impl Parser {
|
||||
});
|
||||
|
||||
fn parse_onceness(this: &Parser) -> Onceness {
|
||||
if this.eat_keyword("once") {
|
||||
if this.eat_keyword(keywords::Once) {
|
||||
Once
|
||||
} else {
|
||||
Many
|
||||
@ -446,10 +440,10 @@ pub impl Parser {
|
||||
|
||||
// looks like this should be called parse_unsafety
|
||||
fn parse_unsafety(&self) -> purity {
|
||||
if self.eat_keyword("pure") {
|
||||
if self.eat_keyword(keywords::Pure) {
|
||||
self.obsolete(*self.last_span, ObsoletePurity);
|
||||
return impure_fn;
|
||||
} else if self.eat_keyword("unsafe") {
|
||||
} else if self.eat_keyword(keywords::Unsafe) {
|
||||
return unsafe_fn;
|
||||
} else {
|
||||
return impure_fn;
|
||||
@ -704,7 +698,7 @@ pub impl Parser {
|
||||
// BORROWED POINTER
|
||||
self.bump();
|
||||
self.parse_borrowed_pointee()
|
||||
} else if self.eat_keyword("extern") {
|
||||
} else if self.eat_keyword(keywords::Extern) {
|
||||
// EXTERN FUNCTION
|
||||
self.parse_ty_bare_fn()
|
||||
} else if self.token_is_closure_keyword(© *self.token) {
|
||||
@ -828,7 +822,7 @@ pub impl Parser {
|
||||
let mut is_mutbl = false;
|
||||
let pat = if require_name || self.is_named_argument() {
|
||||
self.parse_arg_mode();
|
||||
is_mutbl = self.eat_keyword("mut");
|
||||
is_mutbl = self.eat_keyword(keywords::Mut);
|
||||
let pat = self.parse_pat(false);
|
||||
self.expect(&token::COLON);
|
||||
pat
|
||||
@ -856,7 +850,7 @@ pub impl Parser {
|
||||
// parse an argument in a lambda header e.g. |arg, arg|
|
||||
fn parse_fn_block_arg(&self) -> arg_or_capture_item {
|
||||
self.parse_arg_mode();
|
||||
let is_mutbl = self.eat_keyword("mut");
|
||||
let is_mutbl = self.eat_keyword(keywords::Mut);
|
||||
let pat = self.parse_pat(false);
|
||||
let t = if self.eat(&token::COLON) {
|
||||
self.parse_ty(false)
|
||||
@ -907,9 +901,9 @@ pub impl Parser {
|
||||
// matches lit = true | false | token_lit
|
||||
fn parse_lit(&self) -> lit {
|
||||
let lo = self.span.lo;
|
||||
let lit = if self.eat_keyword("true") {
|
||||
let lit = if self.eat_keyword(keywords::True) {
|
||||
lit_bool(true)
|
||||
} else if self.eat_keyword("false") {
|
||||
} else if self.eat_keyword(keywords::False) {
|
||||
lit_bool(false)
|
||||
} else {
|
||||
// XXX: This is a really bad copy!
|
||||
@ -1145,15 +1139,15 @@ pub impl Parser {
|
||||
}
|
||||
|
||||
fn token_is_mutability(&self, tok: &token::Token) -> bool {
|
||||
self.token_is_keyword("mut", tok) ||
|
||||
self.token_is_keyword("const", tok)
|
||||
token::is_keyword(keywords::Mut, tok) ||
|
||||
token::is_keyword(keywords::Const, tok)
|
||||
}
|
||||
|
||||
// parse mutability declaration (mut/const/imm)
|
||||
fn parse_mutability(&self) -> mutability {
|
||||
if self.eat_keyword("mut") {
|
||||
if self.eat_keyword(keywords::Mut) {
|
||||
m_mutbl
|
||||
} else if self.eat_keyword("const") {
|
||||
} else if self.eat_keyword(keywords::Const) {
|
||||
m_const
|
||||
} else {
|
||||
m_imm
|
||||
@ -1251,30 +1245,30 @@ pub impl Parser {
|
||||
expr_block(blk));
|
||||
} else if token::is_bar(&*self.token) {
|
||||
return self.parse_lambda_expr();
|
||||
} else if self.eat_keyword("self") {
|
||||
} else if self.eat_keyword(keywords::Self) {
|
||||
ex = expr_self;
|
||||
hi = self.span.hi;
|
||||
} else if self.eat_keyword("if") {
|
||||
} else if self.eat_keyword(keywords::If) {
|
||||
return self.parse_if_expr();
|
||||
} else if self.eat_keyword("for") {
|
||||
} else if self.eat_keyword(keywords::For) {
|
||||
return self.parse_sugary_call_expr(~"for", ForSugar,
|
||||
expr_loop_body);
|
||||
} else if self.eat_keyword("do") {
|
||||
} else if self.eat_keyword(keywords::Do) {
|
||||
return self.parse_sugary_call_expr(~"do", DoSugar,
|
||||
expr_do_body);
|
||||
} else if self.eat_keyword("while") {
|
||||
} else if self.eat_keyword(keywords::While) {
|
||||
return self.parse_while_expr();
|
||||
} else if self.token_is_lifetime(&*self.token) {
|
||||
let lifetime = self.get_lifetime(&*self.token);
|
||||
self.bump();
|
||||
self.expect(&token::COLON);
|
||||
self.expect_keyword("loop");
|
||||
self.expect_keyword(keywords::Loop);
|
||||
return self.parse_loop_expr(Some(lifetime));
|
||||
} else if self.eat_keyword("loop") {
|
||||
} else if self.eat_keyword(keywords::Loop) {
|
||||
return self.parse_loop_expr(None);
|
||||
} else if self.eat_keyword("match") {
|
||||
} else if self.eat_keyword(keywords::Match) {
|
||||
return self.parse_match_expr();
|
||||
} else if self.eat_keyword("unsafe") {
|
||||
} else if self.eat_keyword(keywords::Unsafe) {
|
||||
return self.parse_block_expr(lo, unsafe_blk);
|
||||
} else if *self.token == token::LBRACKET {
|
||||
self.bump();
|
||||
@ -1314,7 +1308,7 @@ pub impl Parser {
|
||||
}
|
||||
}
|
||||
hi = self.last_span.hi;
|
||||
} else if self.eat_keyword("__log") {
|
||||
} else if self.eat_keyword(keywords::__Log) {
|
||||
// LOG expression
|
||||
self.expect(&token::LPAREN);
|
||||
let lvl = self.parse_expr();
|
||||
@ -1323,14 +1317,14 @@ pub impl Parser {
|
||||
ex = expr_log(lvl, e);
|
||||
hi = self.span.hi;
|
||||
self.expect(&token::RPAREN);
|
||||
} else if self.eat_keyword("return") {
|
||||
} else if self.eat_keyword(keywords::Return) {
|
||||
// RETURN expression
|
||||
if can_begin_expr(&*self.token) {
|
||||
let e = self.parse_expr();
|
||||
hi = e.span.hi;
|
||||
ex = expr_ret(Some(e));
|
||||
} else { ex = expr_ret(None); }
|
||||
} else if self.eat_keyword("break") {
|
||||
} else if self.eat_keyword(keywords::Break) {
|
||||
// BREAK expression
|
||||
if self.token_is_lifetime(&*self.token) {
|
||||
let lifetime = self.get_lifetime(&*self.token);
|
||||
@ -1340,14 +1334,14 @@ pub impl Parser {
|
||||
ex = expr_break(None);
|
||||
}
|
||||
hi = self.span.hi;
|
||||
} else if self.eat_keyword("copy") {
|
||||
} else if self.eat_keyword(keywords::Copy) {
|
||||
// COPY expression
|
||||
let e = self.parse_expr();
|
||||
ex = expr_copy(e);
|
||||
hi = e.span.hi;
|
||||
} else if *self.token == token::MOD_SEP ||
|
||||
is_ident(&*self.token) && !self.is_keyword("true") &&
|
||||
!self.is_keyword("false") {
|
||||
is_ident(&*self.token) && !self.is_keyword(keywords::True) &&
|
||||
!self.is_keyword(keywords::False) {
|
||||
let pth = self.parse_path_with_tps(true);
|
||||
|
||||
// `!`, as an operator, is prefix, so we know this isn't that
|
||||
@ -1827,7 +1821,7 @@ pub impl Parser {
|
||||
}
|
||||
}
|
||||
None => {
|
||||
if as_prec > min_prec && self.eat_keyword("as") {
|
||||
if as_prec > min_prec && self.eat_keyword(keywords::As) {
|
||||
let rhs = self.parse_ty(true);
|
||||
let _as = self.mk_expr(lhs.span.lo,
|
||||
rhs.span.hi,
|
||||
@ -1901,7 +1895,7 @@ pub impl Parser {
|
||||
let thn = self.parse_block();
|
||||
let mut els: Option<@expr> = None;
|
||||
let mut hi = thn.span.hi;
|
||||
if self.eat_keyword("else") {
|
||||
if self.eat_keyword(keywords::Else) {
|
||||
let elexpr = self.parse_else_expr();
|
||||
els = Some(elexpr);
|
||||
hi = elexpr.span.hi;
|
||||
@ -1968,7 +1962,7 @@ pub impl Parser {
|
||||
}
|
||||
|
||||
fn parse_else_expr(&self) -> @expr {
|
||||
if self.eat_keyword("if") {
|
||||
if self.eat_keyword(keywords::If) {
|
||||
return self.parse_if_expr();
|
||||
} else {
|
||||
let blk = self.parse_block();
|
||||
@ -2082,7 +2076,7 @@ pub impl Parser {
|
||||
fn looking_at_record_literal(&self) -> bool {
|
||||
let lookahead = self.look_ahead(1);
|
||||
*self.token == token::LBRACE &&
|
||||
(self.token_is_keyword("mut", &lookahead) ||
|
||||
(token::is_keyword(keywords::Mut, &lookahead) ||
|
||||
(is_plain_ident(&lookahead) &&
|
||||
self.look_ahead(2) == token::COLON))
|
||||
}
|
||||
@ -2095,7 +2089,7 @@ pub impl Parser {
|
||||
while *self.token != token::RBRACE {
|
||||
let pats = self.parse_pats();
|
||||
let mut guard = None;
|
||||
if self.eat_keyword("if") { guard = Some(self.parse_expr()); }
|
||||
if self.eat_keyword(keywords::If) { guard = Some(self.parse_expr()); }
|
||||
self.expect(&token::FAT_ARROW);
|
||||
let expr = self.parse_expr_res(RESTRICT_STMT_EXPR);
|
||||
|
||||
@ -2384,8 +2378,8 @@ pub impl Parser {
|
||||
}
|
||||
ref tok => {
|
||||
if !is_ident_or_path(tok)
|
||||
|| self.is_keyword("true")
|
||||
|| self.is_keyword("false")
|
||||
|| self.is_keyword(keywords::True)
|
||||
|| self.is_keyword(keywords::False)
|
||||
{
|
||||
// Parse an expression pattern or exp .. exp.
|
||||
//
|
||||
@ -2404,11 +2398,11 @@ pub impl Parser {
|
||||
} else {
|
||||
pat = pat_lit(val);
|
||||
}
|
||||
} else if self.eat_keyword("ref") {
|
||||
} else if self.eat_keyword(keywords::Ref) {
|
||||
// parse ref pat
|
||||
let mutbl = self.parse_mutability();
|
||||
pat = self.parse_pat_ident(refutable, bind_by_ref(mutbl));
|
||||
} else if self.eat_keyword("copy") {
|
||||
} else if self.eat_keyword(keywords::Copy) {
|
||||
// parse copy pat
|
||||
pat = self.parse_pat_ident(refutable, bind_by_copy);
|
||||
} else {
|
||||
@ -2558,7 +2552,7 @@ pub impl Parser {
|
||||
|
||||
// parse a "let" stmt
|
||||
fn parse_let(&self) -> @decl {
|
||||
let is_mutbl = self.eat_keyword("mut");
|
||||
let is_mutbl = self.eat_keyword(keywords::Mut);
|
||||
let lo = self.span.lo;
|
||||
let mut locals = ~[self.parse_local(is_mutbl)];
|
||||
while self.eat(&token::COMMA) {
|
||||
@ -2572,7 +2566,7 @@ pub impl Parser {
|
||||
pr: visibility,
|
||||
attrs: ~[attribute]) -> @struct_field {
|
||||
let lo = self.span.lo;
|
||||
if self.eat_keyword("mut") {
|
||||
if self.eat_keyword(keywords::Mut) {
|
||||
// Do nothing, for backwards compatibility.
|
||||
// XXX: Remove after snapshot.
|
||||
}
|
||||
@ -2604,13 +2598,13 @@ pub impl Parser {
|
||||
}
|
||||
|
||||
let lo = self.span.lo;
|
||||
if self.is_keyword("let") {
|
||||
if self.is_keyword(keywords::Let) {
|
||||
check_expected_item(self, item_attrs);
|
||||
self.expect_keyword("let");
|
||||
self.expect_keyword(keywords::Let);
|
||||
let decl = self.parse_let();
|
||||
return @spanned(lo, decl.span.hi, stmt_decl(decl, self.get_id()));
|
||||
} else if is_ident(&*self.token)
|
||||
&& !self.is_any_keyword(© *self.token)
|
||||
&& !token::is_any_keyword(self.token)
|
||||
&& self.look_ahead(1) == token::NOT {
|
||||
// parse a macro invocation. Looks like there's serious
|
||||
// overlap here; if this clause doesn't catch it (and it
|
||||
@ -2690,7 +2684,7 @@ pub impl Parser {
|
||||
maybe_whole!(self, nt_block);
|
||||
|
||||
let lo = self.span.lo;
|
||||
if self.eat_keyword("unsafe") {
|
||||
if self.eat_keyword(keywords::Unsafe) {
|
||||
self.obsolete(copy *self.span, ObsoleteUnsafeBlock);
|
||||
}
|
||||
self.expect(&token::LBRACE);
|
||||
@ -2705,7 +2699,7 @@ pub impl Parser {
|
||||
maybe_whole!(pair_empty self, nt_block);
|
||||
|
||||
let lo = self.span.lo;
|
||||
if self.eat_keyword("unsafe") {
|
||||
if self.eat_keyword(keywords::Unsafe) {
|
||||
self.obsolete(copy *self.span, ObsoleteUnsafeBlock);
|
||||
}
|
||||
self.expect(&token::LBRACE);
|
||||
@ -2849,10 +2843,10 @@ pub impl Parser {
|
||||
}
|
||||
|
||||
fn parse_optional_purity(&self) -> ast::purity {
|
||||
if self.eat_keyword("pure") {
|
||||
if self.eat_keyword(keywords::Pure) {
|
||||
self.obsolete(*self.last_span, ObsoletePurity);
|
||||
ast::impure_fn
|
||||
} else if self.eat_keyword("unsafe") {
|
||||
} else if self.eat_keyword(keywords::Unsafe) {
|
||||
ast::unsafe_fn
|
||||
} else {
|
||||
ast::impure_fn
|
||||
@ -2860,7 +2854,7 @@ pub impl Parser {
|
||||
}
|
||||
|
||||
fn parse_optional_onceness(&self) -> ast::Onceness {
|
||||
if self.eat_keyword("once") { ast::Once } else { ast::Many }
|
||||
if self.eat_keyword(keywords::Once) { ast::Once } else { ast::Many }
|
||||
}
|
||||
|
||||
// matches optbounds = ( ( : ( boundseq )? )? )
|
||||
@ -3023,10 +3017,10 @@ pub impl Parser {
|
||||
p: &Parser
|
||||
) -> ast::explicit_self_ {
|
||||
// We need to make sure it isn't a mode or a type
|
||||
if p.token_is_keyword("self", &p.look_ahead(1)) ||
|
||||
((p.token_is_keyword("const", &p.look_ahead(1)) ||
|
||||
p.token_is_keyword("mut", &p.look_ahead(1))) &&
|
||||
p.token_is_keyword("self", &p.look_ahead(2))) {
|
||||
if token::is_keyword(keywords::Self, &p.look_ahead(1)) ||
|
||||
((token::is_keyword(keywords::Const, &p.look_ahead(1)) ||
|
||||
token::is_keyword(keywords::Mut, &p.look_ahead(1))) &&
|
||||
token::is_keyword(keywords::Self, &p.look_ahead(2))) {
|
||||
|
||||
p.bump();
|
||||
let mutability = p.parse_mutability();
|
||||
@ -3047,25 +3041,25 @@ pub impl Parser {
|
||||
//
|
||||
// We already know that the current token is `&`.
|
||||
|
||||
if (this.token_is_keyword("self", &this.look_ahead(1))) {
|
||||
if (token::is_keyword(keywords::Self, &this.look_ahead(1))) {
|
||||
this.bump();
|
||||
this.expect_self_ident();
|
||||
sty_region(None, m_imm)
|
||||
} else if (this.token_is_mutability(&this.look_ahead(1)) &&
|
||||
this.token_is_keyword("self", &this.look_ahead(2))) {
|
||||
token::is_keyword(keywords::Self, &this.look_ahead(2))) {
|
||||
this.bump();
|
||||
let mutability = this.parse_mutability();
|
||||
this.expect_self_ident();
|
||||
sty_region(None, mutability)
|
||||
} else if (this.token_is_lifetime(&this.look_ahead(1)) &&
|
||||
this.token_is_keyword("self", &this.look_ahead(2))) {
|
||||
token::is_keyword(keywords::Self, &this.look_ahead(2))) {
|
||||
this.bump();
|
||||
let lifetime = @this.parse_lifetime();
|
||||
this.expect_self_ident();
|
||||
sty_region(Some(lifetime), m_imm)
|
||||
} else if (this.token_is_lifetime(&this.look_ahead(1)) &&
|
||||
this.token_is_mutability(&this.look_ahead(2)) &&
|
||||
this.token_is_keyword("self", &this.look_ahead(3))) {
|
||||
token::is_keyword(keywords::Self, &this.look_ahead(3))) {
|
||||
this.bump();
|
||||
let lifetime = @this.parse_lifetime();
|
||||
let mutability = this.parse_mutability();
|
||||
@ -3274,7 +3268,7 @@ pub impl Parser {
|
||||
let mut ty = self.parse_ty(false);
|
||||
|
||||
// Parse traits, if necessary.
|
||||
let opt_trait = if could_be_trait && self.eat_keyword("for") {
|
||||
let opt_trait = if could_be_trait && self.eat_keyword(keywords::For) {
|
||||
// New-style trait. Reinterpret the type as a trait.
|
||||
let opt_trait_ref = match ty.node {
|
||||
ty_path(path, node_id) => {
|
||||
@ -3449,11 +3443,11 @@ pub impl Parser {
|
||||
return ~[];
|
||||
}
|
||||
|
||||
if self.eat_keyword("priv") {
|
||||
if self.eat_keyword(keywords::Priv) {
|
||||
return ~[self.parse_single_struct_field(private, attrs)]
|
||||
}
|
||||
|
||||
if self.eat_keyword("pub") {
|
||||
if self.eat_keyword(keywords::Pub) {
|
||||
return ~[self.parse_single_struct_field(public, attrs)];
|
||||
}
|
||||
|
||||
@ -3466,13 +3460,13 @@ pub impl Parser {
|
||||
|
||||
// parse visiility: PUB, PRIV, or nothing
|
||||
fn parse_visibility(&self) -> visibility {
|
||||
if self.eat_keyword("pub") { public }
|
||||
else if self.eat_keyword("priv") { private }
|
||||
if self.eat_keyword(keywords::Pub) { public }
|
||||
else if self.eat_keyword(keywords::Priv) { private }
|
||||
else { inherited }
|
||||
}
|
||||
|
||||
fn parse_staticness(&self) -> bool {
|
||||
if self.eat_keyword("static") {
|
||||
if self.eat_keyword(keywords::Static) {
|
||||
self.obsolete(*self.last_span, ObsoleteStaticMethod);
|
||||
true
|
||||
} else {
|
||||
@ -3692,10 +3686,10 @@ pub impl Parser {
|
||||
let lo = self.span.lo;
|
||||
|
||||
// XXX: Obsolete; remove after snap.
|
||||
if self.eat_keyword("const") {
|
||||
if self.eat_keyword(keywords::Const) {
|
||||
self.obsolete(*self.last_span, ObsoleteConstItem);
|
||||
} else {
|
||||
self.expect_keyword("static");
|
||||
self.expect_keyword(keywords::Static);
|
||||
}
|
||||
|
||||
let ident = self.parse_ident();
|
||||
@ -3713,14 +3707,14 @@ pub impl Parser {
|
||||
|
||||
// parse safe/unsafe and fn
|
||||
fn parse_fn_purity(&self) -> purity {
|
||||
if self.eat_keyword("fn") { impure_fn }
|
||||
else if self.eat_keyword("pure") {
|
||||
if self.eat_keyword(keywords::Fn) { impure_fn }
|
||||
else if self.eat_keyword(keywords::Pure) {
|
||||
self.obsolete(*self.last_span, ObsoletePurity);
|
||||
self.expect_keyword("fn");
|
||||
self.expect_keyword(keywords::Fn);
|
||||
// NB: We parse this as impure for bootstrapping purposes.
|
||||
impure_fn
|
||||
} else if self.eat_keyword("unsafe") {
|
||||
self.expect_keyword("fn");
|
||||
} else if self.eat_keyword(keywords::Unsafe) {
|
||||
self.expect_keyword(keywords::Fn);
|
||||
unsafe_fn
|
||||
}
|
||||
else { self.unexpected(); }
|
||||
@ -3762,9 +3756,9 @@ pub impl Parser {
|
||||
items_allowed: bool)
|
||||
-> item_or_view_item {
|
||||
let mut must_be_named_mod = false;
|
||||
if self.is_keyword("mod") {
|
||||
if self.is_keyword(keywords::Mod) {
|
||||
must_be_named_mod = true;
|
||||
self.expect_keyword("mod");
|
||||
self.expect_keyword(keywords::Mod);
|
||||
} else if *self.token != token::LBRACE {
|
||||
self.span_fatal(
|
||||
copy *self.span,
|
||||
@ -4049,7 +4043,7 @@ pub impl Parser {
|
||||
let visibility = self.parse_visibility();
|
||||
|
||||
// must be a view item:
|
||||
if self.eat_keyword("use") {
|
||||
if self.eat_keyword(keywords::Use) {
|
||||
// USE ITEM (iovi_view_item)
|
||||
let view_item = self.parse_use();
|
||||
self.expect(&token::SEMI);
|
||||
@ -4061,10 +4055,10 @@ pub impl Parser {
|
||||
});
|
||||
}
|
||||
// either a view item or an item:
|
||||
if self.eat_keyword("extern") {
|
||||
if self.eat_keyword(keywords::Extern) {
|
||||
let opt_abis = self.parse_opt_abis();
|
||||
|
||||
if self.eat_keyword("fn") {
|
||||
if self.eat_keyword(keywords::Fn) {
|
||||
// EXTERN FUNCTION ITEM
|
||||
let abis = opt_abis.get_or_default(AbiSet::C());
|
||||
let (ident, item_, extra_attrs) =
|
||||
@ -4080,11 +4074,11 @@ pub impl Parser {
|
||||
}
|
||||
}
|
||||
// the rest are all guaranteed to be items:
|
||||
if (self.is_keyword("const") ||
|
||||
(self.is_keyword("static") &&
|
||||
!self.token_is_keyword("fn", &self.look_ahead(1)))) {
|
||||
if (self.is_keyword(keywords::Const) ||
|
||||
(self.is_keyword(keywords::Static) &&
|
||||
!token::is_keyword(keywords::Fn, &self.look_ahead(1)))) {
|
||||
// CONST / STATIC ITEM
|
||||
if self.is_keyword("const") {
|
||||
if self.is_keyword(keywords::Const) {
|
||||
self.obsolete(*self.span, ObsoleteConstItem);
|
||||
}
|
||||
self.bump();
|
||||
@ -4093,7 +4087,7 @@ pub impl Parser {
|
||||
visibility,
|
||||
maybe_append(attrs, extra_attrs)));
|
||||
}
|
||||
if self.is_keyword("fn") &&
|
||||
if self.is_keyword(keywords::Fn) &&
|
||||
!self.fn_expr_lookahead(self.look_ahead(1u)) {
|
||||
// FUNCTION ITEM
|
||||
self.bump();
|
||||
@ -4103,28 +4097,28 @@ pub impl Parser {
|
||||
visibility,
|
||||
maybe_append(attrs, extra_attrs)));
|
||||
}
|
||||
if self.eat_keyword("pure") {
|
||||
if self.eat_keyword(keywords::Pure) {
|
||||
// PURE FUNCTION ITEM (obsolete)
|
||||
self.obsolete(*self.last_span, ObsoletePurity);
|
||||
self.expect_keyword("fn");
|
||||
self.expect_keyword(keywords::Fn);
|
||||
let (ident, item_, extra_attrs) =
|
||||
self.parse_item_fn(impure_fn, AbiSet::Rust());
|
||||
return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_,
|
||||
visibility,
|
||||
maybe_append(attrs, extra_attrs)));
|
||||
}
|
||||
if self.is_keyword("unsafe")
|
||||
if self.is_keyword(keywords::Unsafe)
|
||||
&& self.look_ahead(1u) != token::LBRACE {
|
||||
// UNSAFE FUNCTION ITEM
|
||||
self.bump();
|
||||
self.expect_keyword("fn");
|
||||
self.expect_keyword(keywords::Fn);
|
||||
let (ident, item_, extra_attrs) =
|
||||
self.parse_item_fn(unsafe_fn, AbiSet::Rust());
|
||||
return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_,
|
||||
visibility,
|
||||
maybe_append(attrs, extra_attrs)));
|
||||
}
|
||||
if self.eat_keyword("mod") {
|
||||
if self.eat_keyword(keywords::Mod) {
|
||||
// MODULE ITEM
|
||||
let (ident, item_, extra_attrs) =
|
||||
self.parse_item_mod(/*bad*/ copy attrs);
|
||||
@ -4132,28 +4126,28 @@ pub impl Parser {
|
||||
visibility,
|
||||
maybe_append(attrs, extra_attrs)));
|
||||
}
|
||||
if self.eat_keyword("type") {
|
||||
if self.eat_keyword(keywords::Type) {
|
||||
// TYPE ITEM
|
||||
let (ident, item_, extra_attrs) = self.parse_item_type();
|
||||
return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_,
|
||||
visibility,
|
||||
maybe_append(attrs, extra_attrs)));
|
||||
}
|
||||
if self.eat_keyword("enum") {
|
||||
if self.eat_keyword(keywords::Enum) {
|
||||
// ENUM ITEM
|
||||
let (ident, item_, extra_attrs) = self.parse_item_enum();
|
||||
return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_,
|
||||
visibility,
|
||||
maybe_append(attrs, extra_attrs)));
|
||||
}
|
||||
if self.eat_keyword("trait") {
|
||||
if self.eat_keyword(keywords::Trait) {
|
||||
// TRAIT ITEM
|
||||
let (ident, item_, extra_attrs) = self.parse_item_trait();
|
||||
return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_,
|
||||
visibility,
|
||||
maybe_append(attrs, extra_attrs)));
|
||||
}
|
||||
if self.eat_keyword("impl") {
|
||||
if self.eat_keyword(keywords::Impl) {
|
||||
// IMPL ITEM
|
||||
let (ident, item_, extra_attrs) =
|
||||
self.parse_item_impl(visibility);
|
||||
@ -4161,7 +4155,7 @@ pub impl Parser {
|
||||
visibility,
|
||||
maybe_append(attrs, extra_attrs)));
|
||||
}
|
||||
if self.eat_keyword("struct") {
|
||||
if self.eat_keyword(keywords::Struct) {
|
||||
// STRUCT ITEM
|
||||
let (ident, item_, extra_attrs) = self.parse_item_struct();
|
||||
return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_,
|
||||
@ -4182,13 +4176,13 @@ pub impl Parser {
|
||||
|
||||
let visibility = self.parse_visibility();
|
||||
|
||||
if (self.is_keyword("const") || self.is_keyword("static")) {
|
||||
if (self.is_keyword(keywords::Const) || self.is_keyword(keywords::Static)) {
|
||||
// FOREIGN CONST ITEM
|
||||
let item = self.parse_item_foreign_const(visibility, attrs);
|
||||
return iovi_foreign_item(item);
|
||||
}
|
||||
if (self.is_keyword("fn") || self.is_keyword("pure") ||
|
||||
self.is_keyword("unsafe")) {
|
||||
if (self.is_keyword(keywords::Fn) || self.is_keyword(keywords::Pure) ||
|
||||
self.is_keyword(keywords::Unsafe)) {
|
||||
// FOREIGN FUNCTION ITEM
|
||||
let item = self.parse_item_foreign_fn(attrs);
|
||||
return iovi_foreign_item(item);
|
||||
@ -4204,7 +4198,7 @@ pub impl Parser {
|
||||
lo : BytePos,
|
||||
visibility : visibility
|
||||
) -> item_or_view_item {
|
||||
if macros_allowed && !self.is_any_keyword(© *self.token)
|
||||
if macros_allowed && !token::is_any_keyword(self.token)
|
||||
&& self.look_ahead(1) == token::NOT
|
||||
&& (is_plain_ident(&self.look_ahead(2))
|
||||
|| self.look_ahead(2) == token::LPAREN
|
||||
@ -4379,16 +4373,16 @@ pub impl Parser {
|
||||
|
||||
fn is_view_item(&self) -> bool {
|
||||
let tok, next_tok;
|
||||
if !self.is_keyword("pub") && !self.is_keyword("priv") {
|
||||
if !self.is_keyword(keywords::Pub) && !self.is_keyword(keywords::Priv) {
|
||||
tok = copy *self.token;
|
||||
next_tok = self.look_ahead(1);
|
||||
} else {
|
||||
tok = self.look_ahead(1);
|
||||
next_tok = self.look_ahead(2);
|
||||
};
|
||||
self.token_is_keyword("use", &tok)
|
||||
|| (self.token_is_keyword("extern", &tok) &&
|
||||
self.token_is_keyword("mod", &next_tok))
|
||||
token::is_keyword(keywords::Use, &tok)
|
||||
|| (token::is_keyword(keywords::Extern, &tok) &&
|
||||
token::is_keyword(keywords::Mod, &next_tok))
|
||||
}
|
||||
|
||||
// parse a view item.
|
||||
@ -4398,10 +4392,10 @@ pub impl Parser {
|
||||
vis: visibility
|
||||
) -> @view_item {
|
||||
let lo = self.span.lo;
|
||||
let node = if self.eat_keyword("use") {
|
||||
let node = if self.eat_keyword(keywords::Use) {
|
||||
self.parse_use()
|
||||
} else if self.eat_keyword("extern") {
|
||||
self.expect_keyword("mod");
|
||||
} else if self.eat_keyword(keywords::Extern) {
|
||||
self.expect_keyword(keywords::Mod);
|
||||
let ident = self.parse_ident();
|
||||
let metadata = self.parse_optional_meta();
|
||||
view_item_extern_mod(ident, metadata, self.get_id())
|
||||
|
@ -17,7 +17,6 @@ use util::interner::StrInterner;
|
||||
use util::interner;
|
||||
|
||||
use core::cmp::Equiv;
|
||||
use core::hashmap::HashSet;
|
||||
use core::to_bytes;
|
||||
|
||||
#[deriving(Encodable, Decodable, Eq)]
|
||||
@ -452,6 +451,45 @@ fn mk_fresh_ident_interner() -> @ident_interner {
|
||||
"__field__", // 32
|
||||
"C", // 33
|
||||
"Self", // 34
|
||||
|
||||
"as", // 35
|
||||
"break", // 36
|
||||
"const", // 37
|
||||
"copy", // 38
|
||||
"do", // 39
|
||||
"drop", // 40
|
||||
"else", // 41
|
||||
"enum", // 42
|
||||
"extern", // 43
|
||||
"false", // 44
|
||||
"fn", // 45
|
||||
"for", // 46
|
||||
"if", // 47
|
||||
"impl", // 48
|
||||
"let", // 49
|
||||
"__log", // 50
|
||||
"loop", // 51
|
||||
"match", // 52
|
||||
"mod", // 53
|
||||
"mut", // 54
|
||||
"once", // 55
|
||||
"priv", // 56
|
||||
"pub", // 57
|
||||
"pure", // 58
|
||||
"ref", // 59
|
||||
"return", // 60
|
||||
"static", // 29 -- also a special ident
|
||||
"self", // 8 -- also a special ident
|
||||
"struct", // 61
|
||||
"super", // 62
|
||||
"true", // 63
|
||||
"trait", // 64
|
||||
"type", // 65
|
||||
"unsafe", // 66
|
||||
"use", // 67
|
||||
"while", // 68
|
||||
|
||||
"be", // 69
|
||||
];
|
||||
|
||||
@ident_interner {
|
||||
@ -495,61 +533,134 @@ pub fn intern(str : &str) -> ast::ident {
|
||||
/**
|
||||
* All the valid words that have meaning in the Rust language.
|
||||
*
|
||||
* Rust keywords are either 'temporary', 'strict' or 'reserved'. Temporary
|
||||
* keywords are contextual and may be used as identifiers anywhere. They are
|
||||
* expected to disappear from the grammar soon. Strict keywords may not
|
||||
* Rust keywords are either 'strict' or 'reserved'. Strict keywords may not
|
||||
* appear as identifiers at all. Reserved keywords are not used anywhere in
|
||||
* the language and may not appear as identifiers.
|
||||
*/
|
||||
pub fn keyword_table() -> HashSet<~str> {
|
||||
let mut keywords = HashSet::new();
|
||||
let mut strict = strict_keyword_table();
|
||||
let mut reserved = reserved_keyword_table();
|
||||
pub mod keywords {
|
||||
use ast::ident;
|
||||
|
||||
do strict.consume |word| {
|
||||
keywords.insert(word);
|
||||
}
|
||||
do reserved.consume |word| {
|
||||
keywords.insert(word);
|
||||
pub enum Keyword {
|
||||
// Strict keywords
|
||||
As,
|
||||
Break,
|
||||
Const,
|
||||
Copy,
|
||||
Do,
|
||||
Drop,
|
||||
Else,
|
||||
Enum,
|
||||
Extern,
|
||||
False,
|
||||
Fn,
|
||||
For,
|
||||
If,
|
||||
Impl,
|
||||
Let,
|
||||
__Log,
|
||||
Loop,
|
||||
Match,
|
||||
Mod,
|
||||
Mut,
|
||||
Once,
|
||||
Priv,
|
||||
Pub,
|
||||
Pure,
|
||||
Ref,
|
||||
Return,
|
||||
Static,
|
||||
Self,
|
||||
Struct,
|
||||
Super,
|
||||
True,
|
||||
Trait,
|
||||
Type,
|
||||
Unsafe,
|
||||
Use,
|
||||
While,
|
||||
|
||||
// Reserved keywords
|
||||
Be,
|
||||
}
|
||||
|
||||
keywords
|
||||
pub impl Keyword {
|
||||
fn to_ident(&self) -> ident {
|
||||
match *self {
|
||||
As => ident { repr: 35, ctxt: 0 },
|
||||
Break => ident { repr: 36, ctxt: 0 },
|
||||
Const => ident { repr: 37, ctxt: 0 },
|
||||
Copy => ident { repr: 38, ctxt: 0 },
|
||||
Do => ident { repr: 39, ctxt: 0 },
|
||||
Drop => ident { repr: 40, ctxt: 0 },
|
||||
Else => ident { repr: 41, ctxt: 0 },
|
||||
Enum => ident { repr: 42, ctxt: 0 },
|
||||
Extern => ident { repr: 43, ctxt: 0 },
|
||||
False => ident { repr: 44, ctxt: 0 },
|
||||
Fn => ident { repr: 45, ctxt: 0 },
|
||||
For => ident { repr: 46, ctxt: 0 },
|
||||
If => ident { repr: 47, ctxt: 0 },
|
||||
Impl => ident { repr: 48, ctxt: 0 },
|
||||
Let => ident { repr: 49, ctxt: 0 },
|
||||
__Log => ident { repr: 50, ctxt: 0 },
|
||||
Loop => ident { repr: 51, ctxt: 0 },
|
||||
Match => ident { repr: 52, ctxt: 0 },
|
||||
Mod => ident { repr: 53, ctxt: 0 },
|
||||
Mut => ident { repr: 54, ctxt: 0 },
|
||||
Once => ident { repr: 55, ctxt: 0 },
|
||||
Priv => ident { repr: 56, ctxt: 0 },
|
||||
Pub => ident { repr: 57, ctxt: 0 },
|
||||
Pure => ident { repr: 58, ctxt: 0 },
|
||||
Ref => ident { repr: 59, ctxt: 0 },
|
||||
Return => ident { repr: 60, ctxt: 0 },
|
||||
Static => ident { repr: 29, ctxt: 0 },
|
||||
Self => ident { repr: 8, ctxt: 0 },
|
||||
Struct => ident { repr: 61, ctxt: 0 },
|
||||
Super => ident { repr: 62, ctxt: 0 },
|
||||
True => ident { repr: 63, ctxt: 0 },
|
||||
Trait => ident { repr: 64, ctxt: 0 },
|
||||
Type => ident { repr: 65, ctxt: 0 },
|
||||
Unsafe => ident { repr: 66, ctxt: 0 },
|
||||
Use => ident { repr: 67, ctxt: 0 },
|
||||
While => ident { repr: 68, ctxt: 0 },
|
||||
Be => ident { repr: 69, ctxt: 0 },
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Full keywords. May not appear anywhere else.
|
||||
pub fn strict_keyword_table() -> HashSet<~str> {
|
||||
let mut words = HashSet::new();
|
||||
let keys = ~[
|
||||
~"as",
|
||||
~"break",
|
||||
~"const", ~"copy",
|
||||
~"do", ~"drop",
|
||||
~"else", ~"enum", ~"extern",
|
||||
~"false", ~"fn", ~"for",
|
||||
~"if", ~"impl",
|
||||
~"let", ~"__log", ~"loop",
|
||||
~"match", ~"mod", ~"mut",
|
||||
~"once",
|
||||
~"priv", ~"pub", ~"pure",
|
||||
~"ref", ~"return",
|
||||
~"static", ~"self", ~"struct", ~"super",
|
||||
~"true", ~"trait", ~"type",
|
||||
~"unsafe", ~"use",
|
||||
~"while"
|
||||
];
|
||||
do vec::consume(keys) |_, w| {
|
||||
words.insert(w);
|
||||
pub fn is_keyword(kw: keywords::Keyword, tok: &Token) -> bool {
|
||||
match *tok {
|
||||
token::IDENT(sid, false) => { kw.to_ident().repr == sid.repr }
|
||||
_ => { false }
|
||||
}
|
||||
return words;
|
||||
}
|
||||
|
||||
pub fn reserved_keyword_table() -> HashSet<~str> {
|
||||
let mut words = HashSet::new();
|
||||
let keys = ~[
|
||||
~"be"
|
||||
];
|
||||
do vec::consume(keys) |_, s| {
|
||||
words.insert(s);
|
||||
pub fn is_any_keyword(tok: &Token) -> bool {
|
||||
match *tok {
|
||||
token::IDENT(sid, false) => match sid.repr {
|
||||
8 | 29 | 35 .. 69 => true,
|
||||
_ => false,
|
||||
},
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_strict_keyword(tok: &Token) -> bool {
|
||||
match *tok {
|
||||
token::IDENT(sid, false) => match sid.repr {
|
||||
8 | 29 | 35 .. 68 => true,
|
||||
_ => false,
|
||||
},
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_reserved_keyword(tok: &Token) -> bool {
|
||||
match *tok {
|
||||
token::IDENT(sid, false) => match sid.repr {
|
||||
69 => true,
|
||||
_ => false,
|
||||
},
|
||||
_ => false,
|
||||
}
|
||||
return words;
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user