syntax: Divide keywords into contextual/restricted. No bad words
This commit is contained in:
parent
08d0707556
commit
c9e3f387f4
@ -132,7 +132,7 @@ fn new_parser(sess: parse_sess, cfg: ast::crate_cfg, rdr: lexer::reader,
|
||||
reader: rdr,
|
||||
binop_precs: prec::binop_prec_table(),
|
||||
keywords: token::keyword_table(),
|
||||
bad_expr_words: token::bad_expr_word_table()}
|
||||
restricted_keywords: token::restricted_keyword_table()}
|
||||
}
|
||||
|
||||
fn new_parser_from_source_str(sess: parse_sess, cfg: ast::crate_cfg,
|
||||
|
@ -43,7 +43,7 @@ fn parse_path_list_ident(p: parser) -> ast::path_list_ident {
|
||||
}
|
||||
|
||||
fn parse_value_ident(p: parser) -> ast::ident {
|
||||
check_bad_expr_word(p);
|
||||
check_restricted_keywords(p);
|
||||
ret parse_ident(p);
|
||||
}
|
||||
|
||||
@ -87,15 +87,15 @@ fn expect_keyword(p: parser, word: str) {
|
||||
}
|
||||
}
|
||||
|
||||
fn is_bad_expr_word(p: parser, word: str) -> bool {
|
||||
p.bad_expr_words.contains_key(word)
|
||||
fn is_restricted_keyword(p: parser, word: str) -> bool {
|
||||
p.restricted_keywords.contains_key(word)
|
||||
}
|
||||
|
||||
fn check_bad_expr_word(p: parser) {
|
||||
fn check_restricted_keywords(p: parser) {
|
||||
alt p.token {
|
||||
token::IDENT(_, false) {
|
||||
let w = token_to_str(p.reader, p.token);
|
||||
if is_bad_expr_word(p, w) {
|
||||
if is_restricted_keyword(p, w) {
|
||||
p.fatal("found `" + w + "` in expression position");
|
||||
}
|
||||
}
|
||||
|
@ -59,7 +59,7 @@ type parser = @{
|
||||
reader: reader,
|
||||
binop_precs: @[op_spec],
|
||||
keywords: hashmap<str, ()>,
|
||||
bad_expr_words: hashmap<str, ()>
|
||||
restricted_keywords: hashmap<str, ()>
|
||||
};
|
||||
|
||||
impl parser for parser {
|
||||
@ -543,7 +543,7 @@ fn parse_path(p: parser) -> @ast::path {
|
||||
fn parse_value_path(p: parser) -> @ast::path {
|
||||
let pt = parse_path(p);
|
||||
let last_word = vec::last(pt.idents);
|
||||
if is_bad_expr_word(p, last_word) {
|
||||
if is_restricted_keyword(p, last_word) {
|
||||
p.fatal("found " + last_word + " in expression position");
|
||||
}
|
||||
pt
|
||||
@ -802,7 +802,7 @@ fn parse_bottom_expr(p: parser) -> pexpr {
|
||||
} else if p.token == token::MOD_SEP ||
|
||||
is_ident(p.token) && !is_keyword(p, "true") &&
|
||||
!is_keyword(p, "false") {
|
||||
check_bad_expr_word(p);
|
||||
check_restricted_keywords(p);
|
||||
let pth = parse_path_and_ty_param_substs(p, true);
|
||||
hi = pth.span.hi;
|
||||
ex = ast::expr_path(pth);
|
||||
@ -1370,7 +1370,7 @@ fn parse_pat(p: parser) -> @ast::pat {
|
||||
p.bump();
|
||||
subpat = parse_pat(p);
|
||||
} else {
|
||||
if is_bad_expr_word(p, fieldname) {
|
||||
if is_restricted_keyword(p, fieldname) {
|
||||
p.fatal("found " + fieldname + " in binding position");
|
||||
}
|
||||
subpat = @{id: p.get_id(),
|
||||
@ -2098,7 +2098,7 @@ fn parse_item_enum(p: parser, attrs: [ast::attribute]) -> @ast::item {
|
||||
let mut variants: [ast::variant] = [];
|
||||
// Newtype syntax
|
||||
if p.token == token::EQ {
|
||||
if is_bad_expr_word(p, id) {
|
||||
if is_restricted_keyword(p, id) {
|
||||
p.fatal("found " + id + " in enum constructor position");
|
||||
}
|
||||
p.bump();
|
||||
|
@ -201,15 +201,28 @@ fn is_bar(t: token::token) -> bool {
|
||||
}
|
||||
|
||||
#[doc = "
|
||||
All the valid words that have meaning in the Rust language. Some of these are
|
||||
nonetheless valid as identifiers becasue they are unambiguous.
|
||||
All the valid words that have meaning in the Rust language.
|
||||
|
||||
Rust keywords are either 'contextual' or 'restricted'. Contextual
|
||||
keywords may be used as identifiers because their appearance in
|
||||
the grammar is unambiguous. Restricted keywords may not appear
|
||||
in positions that might otherwise contain _value identifiers_.
|
||||
"]
|
||||
fn keyword_table() -> hashmap<str, ()> {
|
||||
let keywords = str_hash();
|
||||
for bad_expr_word_table().each_key {|word|
|
||||
for contextual_keyword_table().each_key {|word|
|
||||
keywords.insert(word, ());
|
||||
}
|
||||
let other_keywords = [
|
||||
for restricted_keyword_table().each_key {|word|
|
||||
keywords.insert(word, ());
|
||||
}
|
||||
ret keywords;
|
||||
}
|
||||
|
||||
#[doc = "Keywords that may be used as identifiers"]
|
||||
fn contextual_keyword_table() -> hashmap<str, ()> {
|
||||
let words = str_hash();
|
||||
let keys = [
|
||||
"as",
|
||||
"bind",
|
||||
"else",
|
||||
@ -217,32 +230,50 @@ fn keyword_table() -> hashmap<str, ()> {
|
||||
"move",
|
||||
"of",
|
||||
"priv",
|
||||
"self",
|
||||
"send",
|
||||
"static",
|
||||
"self", "send", "static",
|
||||
"to",
|
||||
"use",
|
||||
"with"
|
||||
];
|
||||
for other_keywords.each {|word|
|
||||
keywords.insert(word, ());
|
||||
for keys.each {|word|
|
||||
words.insert(word, ());
|
||||
}
|
||||
ret keywords;
|
||||
words
|
||||
}
|
||||
|
||||
#[doc = "
|
||||
These are the words that shouldn't be allowed as value identifiers,
|
||||
because, if used at the start of a line, they will cause the line to be
|
||||
interpreted as a specific kind of statement, which would be confusing.
|
||||
Keywords that may not appear in any position that might otherwise contain a
|
||||
_value identifier_. Restricted keywords may still be used as other types of
|
||||
identifiers.
|
||||
|
||||
Reasons:
|
||||
|
||||
* For some (most?), if used at the start of a line, they will cause the line
|
||||
to be interpreted as a specific kind of statement, which would be confusing.
|
||||
|
||||
* `true` or `false` as identifiers would always be shadowed by
|
||||
the boolean constants
|
||||
"]
|
||||
fn bad_expr_word_table() -> hashmap<str, ()> {
|
||||
fn restricted_keyword_table() -> hashmap<str, ()> {
|
||||
let words = str_hash();
|
||||
let keys = ["alt", "assert", "be", "break", "check", "claim",
|
||||
"class", "const", "cont", "copy", "crust", "do", "else",
|
||||
"enum", "export", "fail", "false", "fn", "for", "if",
|
||||
"iface", "impl", "import", "let", "log", "loop", "mod",
|
||||
"mut", "native", "new", "pure", "resource", "true",
|
||||
"ret", "trait", "type", "unchecked", "unsafe", "while"];
|
||||
let keys = [
|
||||
"alt",
|
||||
"assert",
|
||||
"be", "break",
|
||||
"check", "claim", "class", "const", "cont", "copy", "crust",
|
||||
"do",
|
||||
"else", "enum", "export",
|
||||
"fail", "false", "fn", "for",
|
||||
"if", "iface", "impl", "import",
|
||||
"let", "log", "loop",
|
||||
"mod", "mut",
|
||||
"native", "new",
|
||||
"pure",
|
||||
"resource", "ret",
|
||||
"true", "trait", "type",
|
||||
"unchecked", "unsafe",
|
||||
"while"
|
||||
];
|
||||
for keys.each {|word|
|
||||
words.insert(word, ());
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user