syntax: Removing uses of HashMap
This commit is contained in:
parent
fa7772893a
commit
e4c3d805a4
@ -656,6 +656,11 @@ pub mod linear {
|
||||
fn reserve_at_least(&mut self, n: uint) {
|
||||
self.map.reserve_at_least(n)
|
||||
}
|
||||
|
||||
/// Consumes all of the elements in the set, emptying it out
|
||||
fn consume(&mut self, f: &fn(T)) {
|
||||
self.map.consume(|k, _| f(k))
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -186,9 +186,9 @@ pub enum named_match {
|
||||
pub type earley_item = ~MatcherPos;
|
||||
|
||||
pub fn nameize(p_s: @mut ParseSess, ms: ~[matcher], res: ~[@named_match])
|
||||
-> HashMap<ident,@named_match> {
|
||||
-> LinearMap<ident,@named_match> {
|
||||
fn n_rec(p_s: @mut ParseSess, m: matcher, res: ~[@named_match],
|
||||
ret_val: HashMap<ident, @named_match>) {
|
||||
ret_val: &mut LinearMap<ident, @named_match>) {
|
||||
match m {
|
||||
codemap::spanned {node: match_tok(_), _} => (),
|
||||
codemap::spanned {node: match_seq(ref more_ms, _, _, _, _), _} => {
|
||||
@ -207,13 +207,13 @@ pub fn nameize(p_s: @mut ParseSess, ms: ~[matcher], res: ~[@named_match])
|
||||
}
|
||||
}
|
||||
}
|
||||
let ret_val = HashMap();
|
||||
for ms.each() |m| { n_rec(p_s, *m, res, ret_val) }
|
||||
let mut ret_val = LinearMap::new();
|
||||
for ms.each() |m| { n_rec(p_s, *m, res, &mut ret_val) }
|
||||
return ret_val;
|
||||
}
|
||||
|
||||
pub enum parse_result {
|
||||
success(HashMap<ident, @named_match>),
|
||||
success(LinearMap<ident, @named_match>),
|
||||
failure(codemap::span, ~str),
|
||||
error(codemap::span, ~str)
|
||||
}
|
||||
@ -223,11 +223,11 @@ pub fn parse_or_else(
|
||||
+cfg: ast::crate_cfg,
|
||||
rdr: @reader,
|
||||
ms: ~[matcher]
|
||||
) -> HashMap<ident, @named_match> {
|
||||
) -> LinearMap<ident, @named_match> {
|
||||
match parse(sess, cfg, rdr, ms) {
|
||||
success(m) => m,
|
||||
failure(sp, ref str) => sess.span_diagnostic.span_fatal(sp, (*str)),
|
||||
error(sp, ref str) => sess.span_diagnostic.span_fatal(sp, (*str))
|
||||
failure(sp, str) => sess.span_diagnostic.span_fatal(sp, str),
|
||||
error(sp, str) => sess.span_diagnostic.span_fatal(sp, str)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -63,12 +63,12 @@ pub fn add_new_extension(cx: @ext_ctxt,
|
||||
argument_gram);
|
||||
|
||||
// Extract the arguments:
|
||||
let lhses = match argument_map.get(&lhs_nm) {
|
||||
let lhses = match *argument_map.get(&lhs_nm) {
|
||||
@matched_seq(ref s, _) => /* FIXME (#2543) */ copy *s,
|
||||
_ => cx.span_bug(sp, ~"wrong-structured lhs")
|
||||
};
|
||||
|
||||
let rhses = match argument_map.get(&rhs_nm) {
|
||||
let rhses = match *argument_map.get(&rhs_nm) {
|
||||
@matched_seq(ref s, _) => /* FIXME (#2543) */ copy *s,
|
||||
_ => cx.span_bug(sp, ~"wrong-structured rhs")
|
||||
};
|
||||
|
@ -18,6 +18,7 @@ use ext::tt::macro_parser::{named_match, matched_seq, matched_nonterminal};
|
||||
use parse::token::{EOF, INTERPOLATED, IDENT, Token, nt_ident, ident_interner};
|
||||
use parse::lexer::TokenAndSpan;
|
||||
|
||||
use core::hashmap::linear::LinearMap;
|
||||
use core::option;
|
||||
use core::vec;
|
||||
|
||||
@ -38,7 +39,7 @@ pub struct TtReader {
|
||||
// the unzipped tree:
|
||||
cur: @mut TtFrame,
|
||||
/* for MBE-style macro transcription */
|
||||
interpolations: std::oldmap::HashMap<ident, @named_match>,
|
||||
interpolations: LinearMap<ident, @named_match>,
|
||||
repeat_idx: ~[uint],
|
||||
repeat_len: ~[uint],
|
||||
/* cached: */
|
||||
@ -51,7 +52,7 @@ pub struct TtReader {
|
||||
* should) be none. */
|
||||
pub fn new_tt_reader(sp_diag: @span_handler,
|
||||
itr: @ident_interner,
|
||||
interp: Option<std::oldmap::HashMap<ident,@named_match>>,
|
||||
interp: Option<LinearMap<ident,@named_match>>,
|
||||
+src: ~[ast::token_tree])
|
||||
-> @mut TtReader {
|
||||
let r = @mut TtReader {
|
||||
@ -65,7 +66,7 @@ pub fn new_tt_reader(sp_diag: @span_handler,
|
||||
up: option::None
|
||||
},
|
||||
interpolations: match interp { /* just a convienience */
|
||||
None => std::oldmap::HashMap(),
|
||||
None => LinearMap::new(),
|
||||
Some(x) => x
|
||||
},
|
||||
repeat_idx: ~[],
|
||||
@ -123,7 +124,10 @@ fn lookup_cur_matched_by_matched(r: &mut TtReader,
|
||||
}
|
||||
|
||||
fn lookup_cur_matched(r: &mut TtReader, name: ident) -> @named_match {
|
||||
lookup_cur_matched_by_matched(r, r.interpolations.get(&name))
|
||||
// FIXME (#3850): this looks a bit silly with an extra scope.
|
||||
let start;
|
||||
{ start = *r.interpolations.get(&name); }
|
||||
return lookup_cur_matched_by_matched(r, start);
|
||||
}
|
||||
enum lis {
|
||||
lis_unconstrained, lis_constraint(uint, ident), lis_contradiction(~str)
|
||||
|
@ -129,7 +129,7 @@ pub impl Parser {
|
||||
|
||||
// A sanity check that the word we are asking for is a known keyword
|
||||
fn require_keyword(&self, word: &~str) {
|
||||
if !self.keywords.contains_key(word) {
|
||||
if !self.keywords.contains(word) {
|
||||
self.bug(fmt!("unknown keyword: %s", *word));
|
||||
}
|
||||
}
|
||||
@ -153,7 +153,7 @@ pub impl Parser {
|
||||
fn is_any_keyword(&self, tok: &token::Token) -> bool {
|
||||
match *tok {
|
||||
token::IDENT(sid, false) => {
|
||||
self.keywords.contains_key(self.id_to_str(sid))
|
||||
self.keywords.contains(self.id_to_str(sid))
|
||||
}
|
||||
_ => false
|
||||
}
|
||||
@ -183,7 +183,7 @@ pub impl Parser {
|
||||
}
|
||||
|
||||
fn is_strict_keyword(&self, word: &~str) -> bool {
|
||||
self.strict_keywords.contains_key(word)
|
||||
self.strict_keywords.contains(word)
|
||||
}
|
||||
|
||||
fn check_strict_keywords(&self) {
|
||||
@ -203,7 +203,7 @@ pub impl Parser {
|
||||
}
|
||||
|
||||
fn is_reserved_keyword(&self, word: &~str) -> bool {
|
||||
self.reserved_keywords.contains_key(word)
|
||||
self.reserved_keywords.contains(word)
|
||||
}
|
||||
|
||||
fn check_reserved_keywords(&self) {
|
||||
|
@ -225,9 +225,9 @@ pub impl Parser {
|
||||
desc: &str) {
|
||||
self.span_err(sp, fmt!("obsolete syntax: %s", kind_str));
|
||||
|
||||
if !self.obsolete_set.contains_key(&kind) {
|
||||
if !self.obsolete_set.contains(&kind) {
|
||||
self.sess.span_diagnostic.handler().note(fmt!("%s", desc));
|
||||
self.obsolete_set.insert(kind, ());
|
||||
self.obsolete_set.insert(kind);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -92,8 +92,8 @@ use opt_vec::OptVec;
|
||||
|
||||
use core::either::{Either, Left, Right};
|
||||
use core::either;
|
||||
use core::hashmap::linear::LinearSet;
|
||||
use core::vec;
|
||||
use std::oldmap::HashMap;
|
||||
|
||||
#[deriving(Eq)]
|
||||
enum restriction {
|
||||
@ -240,7 +240,7 @@ pub fn Parser(sess: @mut ParseSess,
|
||||
keywords: token::keyword_table(),
|
||||
strict_keywords: token::strict_keyword_table(),
|
||||
reserved_keywords: token::reserved_keyword_table(),
|
||||
obsolete_set: HashMap(),
|
||||
obsolete_set: @mut LinearSet::new(),
|
||||
mod_path_stack: @mut ~[],
|
||||
}
|
||||
}
|
||||
@ -259,12 +259,12 @@ pub struct Parser {
|
||||
quote_depth: @mut uint, // not (yet) related to the quasiquoter
|
||||
reader: @reader,
|
||||
interner: @token::ident_interner,
|
||||
keywords: HashMap<~str, ()>,
|
||||
strict_keywords: HashMap<~str, ()>,
|
||||
reserved_keywords: HashMap<~str, ()>,
|
||||
keywords: LinearSet<~str>,
|
||||
strict_keywords: LinearSet<~str>,
|
||||
reserved_keywords: LinearSet<~str>,
|
||||
/// The set of seen errors about obsolete syntax. Used to suppress
|
||||
/// extra detail when the same error is seen twice
|
||||
obsolete_set: HashMap<ObsoleteSyntax, ()>,
|
||||
obsolete_set: @mut LinearSet<ObsoleteSyntax>,
|
||||
/// Used to determine the path to externally loaded source files
|
||||
mod_path_stack: @mut ~[~str],
|
||||
|
||||
|
@ -18,9 +18,9 @@ use util::interner;
|
||||
|
||||
use core::cast;
|
||||
use core::char;
|
||||
use core::hashmap::linear::LinearSet;
|
||||
use core::str;
|
||||
use core::task;
|
||||
use std::oldmap::HashMap;
|
||||
|
||||
#[auto_encode]
|
||||
#[auto_decode]
|
||||
@ -458,35 +458,33 @@ pub fn mk_fake_ident_interner() -> @ident_interner {
|
||||
* appear as identifiers at all. Reserved keywords are not used anywhere in
|
||||
* the language and may not appear as identifiers.
|
||||
*/
|
||||
pub fn keyword_table() -> HashMap<~str, ()> {
|
||||
let keywords = HashMap();
|
||||
for temporary_keyword_table().each_key |&word| {
|
||||
keywords.insert(word, ());
|
||||
}
|
||||
for strict_keyword_table().each_key |&word| {
|
||||
keywords.insert(word, ());
|
||||
}
|
||||
for reserved_keyword_table().each_key |&word| {
|
||||
keywords.insert(word, ());
|
||||
}
|
||||
keywords
|
||||
pub fn keyword_table() -> LinearSet<~str> {
|
||||
let mut keywords = LinearSet::new();
|
||||
let mut tmp = temporary_keyword_table();
|
||||
let mut strict = strict_keyword_table();
|
||||
let mut reserved = reserved_keyword_table();
|
||||
|
||||
do tmp.consume |word| { keywords.insert(word); }
|
||||
do strict.consume |word| { keywords.insert(word); }
|
||||
do reserved.consume |word| { keywords.insert(word); }
|
||||
return keywords;
|
||||
}
|
||||
|
||||
/// Keywords that may be used as identifiers
|
||||
pub fn temporary_keyword_table() -> HashMap<~str, ()> {
|
||||
let words = HashMap();
|
||||
pub fn temporary_keyword_table() -> LinearSet<~str> {
|
||||
let mut words = LinearSet::new();
|
||||
let keys = ~[
|
||||
~"self", ~"static",
|
||||
];
|
||||
for keys.each |word| {
|
||||
words.insert(copy *word, ());
|
||||
do vec::consume(keys) |_, s| {
|
||||
words.insert(s);
|
||||
}
|
||||
words
|
||||
return words;
|
||||
}
|
||||
|
||||
/// Full keywords. May not appear anywhere else.
|
||||
pub fn strict_keyword_table() -> HashMap<~str, ()> {
|
||||
let words = HashMap();
|
||||
pub fn strict_keyword_table() -> LinearSet<~str> {
|
||||
let mut words = LinearSet::new();
|
||||
let keys = ~[
|
||||
~"as",
|
||||
~"break",
|
||||
@ -505,21 +503,21 @@ pub fn strict_keyword_table() -> HashMap<~str, ()> {
|
||||
~"unsafe", ~"use",
|
||||
~"while"
|
||||
];
|
||||
for keys.each |word| {
|
||||
words.insert(copy *word, ());
|
||||
do vec::consume(keys) |_, w| {
|
||||
words.insert(w);
|
||||
}
|
||||
words
|
||||
return words;
|
||||
}
|
||||
|
||||
pub fn reserved_keyword_table() -> HashMap<~str, ()> {
|
||||
let words = HashMap();
|
||||
pub fn reserved_keyword_table() -> LinearSet<~str> {
|
||||
let mut words = LinearSet::new();
|
||||
let keys = ~[
|
||||
~"be"
|
||||
];
|
||||
for keys.each |word| {
|
||||
words.insert(copy *word, ());
|
||||
do vec::consume(keys) |_, s| {
|
||||
words.insert(s);
|
||||
}
|
||||
words
|
||||
return words;
|
||||
}
|
||||
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user