Use an enum rather than a bool in token::Ident

This commit is contained in:
Brendan Zabarauskas 2014-10-28 02:01:44 +11:00
parent fcb78d65f2
commit cd049591a2
6 changed files with 96 additions and 57 deletions

View File

@ -35,7 +35,7 @@ use syntax::parse::lexer::TokenAndSpan;
fn parse_token_list(file: &str) -> HashMap<String, Token> {
fn id() -> Token {
token::Ident(ast::Ident { name: Name(0), ctxt: 0, }, false)
token::Ident(ast::Ident { name: Name(0), ctxt: 0, }, token::Plain)
}
let mut res = HashMap::new();
@ -198,7 +198,8 @@ fn parse_antlr_token(s: &str, tokens: &HashMap<String, Token>) -> TokenAndSpan {
token::LitFloat(..) => token::LitFloat(nm),
token::LitBinary(..) => token::LitBinary(nm),
token::LitBinaryRaw(..) => token::LitBinaryRaw(fix(content), count(content)),
token::Ident(..) => token::Ident(ast::Ident { name: nm, ctxt: 0 }, true),
token::Ident(..) => token::Ident(ast::Ident { name: nm, ctxt: 0 },
token::ModName),
token::Lifetime(..) => token::Lifetime(ast::Ident { name: nm, ctxt: 0 }),
ref t => t.clone()
};

View File

@ -531,6 +531,7 @@ fn mk_binop(cx: &ExtCtxt, sp: Span, bop: token::BinOpToken) -> P<ast::Expr> {
mk_token_path(cx, sp, name)
}
#[allow(non_uppercase_statics)] // NOTE(stage0): remove this attribute after the next snapshot
fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
match *tok {
token::BinOp(binop) => {
@ -575,10 +576,14 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
vec!(mk_name(cx, sp, ident.ident()), cx.expr_uint(sp, n)));
}
token::Ident(ident, b) => {
token::Ident(ident, style) => {
return cx.expr_call(sp,
mk_token_path(cx, sp, "Ident"),
vec!(mk_ident(cx, sp, ident), cx.expr_bool(sp, b)));
vec![mk_ident(cx, sp, ident),
match style {
ModName => mk_token_path(cx, sp, "ModName"),
Plain => mk_token_path(cx, sp, "Plain"),
}]);
}
token::Lifetime(ident) => {

View File

@ -921,12 +921,14 @@ impl<'a> StringReader<'a> {
if string == "_" {
token::Underscore
} else {
let is_mod_name = self.curr_is(':') && self.nextch_is(':');
// FIXME: perform NFKC normalization here. (Issue #2253)
token::Ident(str_to_ident(string), is_mod_name)
if self.curr_is(':') && self.nextch_is(':') {
token::Ident(str_to_ident(string), token::ModName)
} else {
token::Ident(str_to_ident(string), token::Plain)
}
}
})
});
}
if is_dec_digit(c) {
@ -937,8 +939,11 @@ impl<'a> StringReader<'a> {
match (c.unwrap(), self.nextch(), self.nextnextch()) {
('\x00', Some('n'), Some('a')) => {
let ast_ident = self.scan_embedded_hygienic_ident();
let is_mod_name = self.curr_is(':') && self.nextch_is(':');
return token::Ident(ast_ident, is_mod_name);
return if self.curr_is(':') && self.nextch_is(':') {
token::Ident(ast_ident, token::ModName)
} else {
token::Ident(ast_ident, token::Plain)
};
}
_ => {}
}
@ -1056,7 +1061,7 @@ impl<'a> StringReader<'a> {
str_to_ident(lifetime_name)
});
let keyword_checking_token =
&token::Ident(keyword_checking_ident, false);
&token::Ident(keyword_checking_ident, token::Plain);
let last_bpos = self.last_pos;
if keyword_checking_token.is_keyword(token::keywords::Self) {
self.err_span_(start,
@ -1434,7 +1439,7 @@ mod test {
assert_eq!(string_reader.next_token().tok, token::Whitespace);
let tok1 = string_reader.next_token();
let tok2 = TokenAndSpan{
tok:token::Ident(id, false),
tok:token::Ident(id, token::Plain),
sp:Span {lo:BytePos(21),hi:BytePos(23),expn_id: NO_EXPANSION}};
assert_eq!(tok1,tok2);
assert_eq!(string_reader.next_token().tok, token::Whitespace);
@ -1443,7 +1448,7 @@ mod test {
// read another token:
let tok3 = string_reader.next_token();
let tok4 = TokenAndSpan{
tok:token::Ident(str_to_ident("main"), false),
tok:token::Ident(str_to_ident("main"), token::Plain),
sp:Span {lo:BytePos(24),hi:BytePos(28),expn_id: NO_EXPANSION}};
assert_eq!(tok3,tok4);
// the lparen is already read:
@ -1458,39 +1463,45 @@ mod test {
}
}
// make the identifier by looking up the string in the interner
#[cfg(stage0)]
fn mk_ident (id: &str, is_mod_name: bool) -> token::Token {
token::Ident (str_to_ident(id),is_mod_name)
token::Ident(str_to_ident(id), is_mod_name)
}
// make the identifier by looking up the string in the interner
#[cfg(not(stage0))]
fn mk_ident(id: &str, style: token::IdentStyle) -> token::Token {
token::Ident(str_to_ident(id), style)
}
#[test] fn doublecolonparsing () {
check_tokenization(setup(&mk_sh(), "a b".to_string()),
vec!(mk_ident("a",false),
token::Whitespace,
mk_ident("b",false)));
vec![mk_ident("a", token::Plain),
token::Whitespace,
mk_ident("b", token::Plain)]);
}
#[test] fn dcparsing_2 () {
check_tokenization(setup(&mk_sh(), "a::b".to_string()),
vec!(mk_ident("a",true),
token::ModSep,
mk_ident("b",false)));
vec![mk_ident("a",token::ModName),
token::ModSep,
mk_ident("b", token::Plain)]);
}
#[test] fn dcparsing_3 () {
check_tokenization(setup(&mk_sh(), "a ::b".to_string()),
vec!(mk_ident("a",false),
token::Whitespace,
token::ModSep,
mk_ident("b",false)));
vec![mk_ident("a", token::Plain),
token::Whitespace,
token::ModSep,
mk_ident("b", token::Plain)]);
}
#[test] fn dcparsing_4 () {
check_tokenization(setup(&mk_sh(), "a:: b".to_string()),
vec!(mk_ident("a",true),
token::ModSep,
token::Whitespace,
mk_ident("b",false)));
vec![mk_ident("a",token::ModName),
token::ModSep,
token::Whitespace,
mk_ident("b", token::Plain)]);
}
#[test] fn character_a() {

View File

@ -793,9 +793,9 @@ mod test {
let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string());
let tts: &[ast::TokenTree] = tts.as_slice();
match tts {
[ast::TtToken(_, token::Ident(name_macro_rules, false)),
[ast::TtToken(_, token::Ident(name_macro_rules, token::Plain)),
ast::TtToken(_, token::Not),
ast::TtToken(_, token::Ident(name_zip, false)),
ast::TtToken(_, token::Ident(name_zip, token::Plain)),
ast::TtDelimited(_, ref macro_delimed)]
if name_macro_rules.as_str() == "macro_rules"
&& name_zip.as_str() == "zip" => {
@ -810,7 +810,7 @@ mod test {
match (first_open, first_tts.as_slice(), first_close) {
(&ast::Delimiter { token: token::LParen, .. },
[ast::TtToken(_, token::Dollar),
ast::TtToken(_, token::Ident(name, false))],
ast::TtToken(_, token::Ident(name, token::Plain))],
&ast::Delimiter { token: token::RParen, .. })
if name.as_str() == "a" => {},
_ => fail!("value 3: {}", **first_delimed),
@ -819,7 +819,7 @@ mod test {
match (second_open, second_tts.as_slice(), second_close) {
(&ast::Delimiter { token: token::LParen, .. },
[ast::TtToken(_, token::Dollar),
ast::TtToken(_, token::Ident(name, false))],
ast::TtToken(_, token::Ident(name, token::Plain))],
&ast::Delimiter { token: token::RParen, .. })
if name.as_str() == "a" => {},
_ => fail!("value 4: {}", **second_delimed),
@ -845,7 +845,7 @@ mod test {
\"variant\":\"Ident\",\
\"fields\":[\
\"fn\",\
false\
\"Plain\"\
]\
}\
]\
@ -858,7 +858,7 @@ mod test {
\"variant\":\"Ident\",\
\"fields\":[\
\"a\",\
false\
\"Plain\"\
]\
}\
]\
@ -881,7 +881,7 @@ mod test {
\"variant\":\"Ident\",\
\"fields\":[\
\"b\",\
false\
\"Plain\"\
]\
}\
]\
@ -901,7 +901,7 @@ mod test {
\"variant\":\"Ident\",\
\"fields\":[\
\"int\",\
false\
\"Plain\"\
]\
}\
]\
@ -932,7 +932,7 @@ mod test {
\"variant\":\"Ident\",\
\"fields\":[\
\"b\",\
false\
\"Plain\"\
]\
}\
]\

View File

@ -2067,10 +2067,10 @@ impl<'a> Parser<'a> {
},
// FIXME #13626: Should be able to stick in
// token::SELF_KEYWORD_NAME
token::Ident(id @ ast::Ident{
name: ast::Name(token::SELF_KEYWORD_NAME_NUM),
ctxt: _
} ,false) => {
token::Ident(id @ ast::Ident {
name: ast::Name(token::SELF_KEYWORD_NAME_NUM),
ctxt: _
}, token::Plain) => {
self.bump();
let path = ast_util::ident_to_path(mk_sp(lo, hi), id);
ex = ExprPath(path);
@ -4094,14 +4094,14 @@ impl<'a> Parser<'a> {
fn is_self_ident(&mut self) -> bool {
match self.token {
token::Ident(id, false) => id.name == special_idents::self_.name,
token::Ident(id, token::Plain) => id.name == special_idents::self_.name,
_ => false
}
}
fn expect_self_ident(&mut self) -> ast::Ident {
match self.token {
token::Ident(id, false) if id.name == special_idents::self_.name => {
token::Ident(id, token::Plain) if id.name == special_idents::self_.name => {
self.bump();
id
},

View File

@ -98,6 +98,21 @@ pub enum BinOpToken {
Shr,
}
#[cfg(stage0)]
#[allow(non_uppercase_statics)]
pub const ModName: bool = true;
#[cfg(stage0)]
#[allow(non_uppercase_statics)]
pub const Plain: bool = false;
#[deriving(Clone, Encodable, Decodable, PartialEq, Eq, Hash, Show)]
#[cfg(not(stage0))]
pub enum IdentStyle {
/// `::` follows the identifier with no whitespace in-between.
ModName,
Plain,
}
#[allow(non_camel_case_types)]
#[deriving(Clone, Encodable, Decodable, PartialEq, Eq, Hash, Show)]
pub enum Token {
@ -149,10 +164,10 @@ pub enum Token {
LitBinaryRaw(ast::Name, uint), /* raw binary str delimited by n hash symbols */
/* Name components */
/// An identifier contains an "is_mod_name" boolean,
/// indicating whether :: follows this token with no
/// whitespace in between.
#[cfg(stage0)]
Ident(ast::Ident, bool),
#[cfg(not(stage0))]
Ident(ast::Ident, IdentStyle),
Underscore,
Lifetime(ast::Ident),
@ -252,10 +267,11 @@ impl Token {
/// Returns `true` if the token is a path that is not followed by a `::`
/// token.
#[allow(non_uppercase_statics)] // NOTE(stage0): remove this attribute after the next snapshot
pub fn is_plain_ident(&self) -> bool {
match *self {
Ident(_, false) => true,
_ => false,
Ident(_, Plain) => true,
_ => false,
}
}
@ -299,18 +315,20 @@ impl Token {
}
/// Returns `true` if the token is a given keyword, `kw`.
#[allow(non_uppercase_statics)] // NOTE(stage0): remove this attribute after the next snapshot
pub fn is_keyword(&self, kw: keywords::Keyword) -> bool {
match *self {
Ident(sid, false) => kw.to_name() == sid.name,
_ => false,
Ident(sid, Plain) => kw.to_name() == sid.name,
_ => false,
}
}
/// Returns `true` if the token is either a special identifier, or a strict
/// or reserved keyword.
#[allow(non_uppercase_statics)] // NOTE(stage0): remove this attribute after the next snapshot
pub fn is_any_keyword(&self) -> bool {
match *self {
Ident(sid, false) => {
Ident(sid, Plain) => {
let n = sid.name;
n == SELF_KEYWORD_NAME
@ -324,9 +342,10 @@ impl Token {
}
/// Returns `true` if the token may not appear as an identifier.
#[allow(non_uppercase_statics)] // NOTE(stage0): remove this attribute after the next snapshot
pub fn is_strict_keyword(&self) -> bool {
match *self {
Ident(sid, false) => {
Ident(sid, Plain) => {
let n = sid.name;
n == SELF_KEYWORD_NAME
@ -335,7 +354,7 @@ impl Token {
|| STRICT_KEYWORD_START <= n
&& n <= STRICT_KEYWORD_FINAL
},
Ident(sid, true) => {
Ident(sid, ModName) => {
let n = sid.name;
n != SELF_KEYWORD_NAME
@ -349,9 +368,10 @@ impl Token {
/// Returns `true` if the token is a keyword that has been reserved for
/// possible future use.
#[allow(non_uppercase_statics)] // NOTE(stage0): remove this attribute after the next snapshot
pub fn is_reserved_keyword(&self) -> bool {
match *self {
Ident(sid, false) => {
Ident(sid, Plain) => {
let n = sid.name;
RESERVED_KEYWORD_START <= n
@ -382,8 +402,10 @@ pub enum Nonterminal {
NtPat( P<ast::Pat>),
NtExpr( P<ast::Expr>),
NtTy( P<ast::Ty>),
/// See IDENT, above, for meaning of bool in NtIdent:
#[cfg(stage0)]
NtIdent(Box<ast::Ident>, bool),
#[cfg(not(stage0))]
NtIdent(Box<ast::Ident>, IdentStyle),
/// Stuff inside brackets for attributes
NtMeta( P<ast::MetaItem>),
NtPath(Box<ast::Path>),
@ -857,6 +879,6 @@ mod test {
assert!(Gt.mtwt_eq(&Gt));
let a = str_to_ident("bac");
let a1 = mark_ident(a,92);
assert!(Ident(a,true).mtwt_eq(&Ident(a1,false)));
assert!(Ident(a, ModName).mtwt_eq(&Ident(a1, Plain)));
}
}