Move syntax::util::interner
-> syntax::symbol
, cleanup.
This commit is contained in:
parent
f177a00ac9
commit
d2f8fb0a0a
@ -34,8 +34,9 @@
|
||||
use syntax::ext::base::*;
|
||||
use syntax::ext::base;
|
||||
use syntax::ext::proc_macro_shim::build_block_emitter;
|
||||
use syntax::parse::token::{self, Token, gensym_ident, str_to_ident};
|
||||
use syntax::parse::token::{self, Token};
|
||||
use syntax::print::pprust;
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax::tokenstream::{TokenTree, TokenStream};
|
||||
|
||||
// ____________________________________________________________________________________________
|
||||
@ -124,7 +125,7 @@ fn qquote_iter<'cx>(cx: &'cx mut ExtCtxt, depth: i64, ts: TokenStream) -> (Bindi
|
||||
} // produce an error or something first
|
||||
let exp = vec![exp.unwrap().to_owned()];
|
||||
debug!("RHS: {:?}", exp.clone());
|
||||
let new_id = gensym_ident("tmp");
|
||||
let new_id = Ident::with_empty_ctxt(Symbol::gensym("tmp"));
|
||||
debug!("RHS TS: {:?}", TokenStream::from_tts(exp.clone()));
|
||||
debug!("RHS TS TT: {:?}", TokenStream::from_tts(exp.clone()).to_vec());
|
||||
bindings.push((new_id, TokenStream::from_tts(exp)));
|
||||
@ -179,7 +180,7 @@ fn unravel_concats(tss: Vec<TokenStream>) -> TokenStream {
|
||||
};
|
||||
|
||||
while let Some(ts) = pushes.pop() {
|
||||
output = build_fn_call(str_to_ident("concat"),
|
||||
output = build_fn_call(Ident::from_str("concat"),
|
||||
concat(concat(ts,
|
||||
from_tokens(vec![Token::Comma])),
|
||||
output));
|
||||
@ -209,18 +210,19 @@ fn convert_complex_tts<'cx>(cx: &'cx mut ExtCtxt, tts: Vec<QTT>) -> (Bindings, T
|
||||
// FIXME handle sequence repetition tokens
|
||||
QTT::QDL(qdl) => {
|
||||
debug!(" QDL: {:?} ", qdl.tts);
|
||||
let new_id = gensym_ident("qdl_tmp");
|
||||
let new_id = Ident::with_empty_ctxt(Symbol::gensym("qdl_tmp"));
|
||||
let mut cct_rec = convert_complex_tts(cx, qdl.tts);
|
||||
bindings.append(&mut cct_rec.0);
|
||||
bindings.push((new_id, cct_rec.1));
|
||||
|
||||
let sep = build_delim_tok(qdl.delim);
|
||||
|
||||
pushes.push(build_mod_call(vec![str_to_ident("proc_macro_tokens"),
|
||||
str_to_ident("build"),
|
||||
str_to_ident("build_delimited")],
|
||||
concat(from_tokens(vec![Token::Ident(new_id)]),
|
||||
concat(lex(","), sep))));
|
||||
pushes.push(build_mod_call(
|
||||
vec![Ident::from_str("proc_macro_tokens"),
|
||||
Ident::from_str("build"),
|
||||
Ident::from_str("build_delimited")],
|
||||
concat(from_tokens(vec![Token::Ident(new_id)]), concat(lex(","), sep)),
|
||||
));
|
||||
}
|
||||
QTT::QIdent(t) => {
|
||||
pushes.push(TokenStream::from_tts(vec![t]));
|
||||
@ -250,13 +252,13 @@ fn unravel(binds: Bindings) -> TokenStream {
|
||||
|
||||
/// Checks if the Ident is `unquote`.
|
||||
fn is_unquote(id: Ident) -> bool {
|
||||
let qq = str_to_ident("unquote");
|
||||
let qq = Ident::from_str("unquote");
|
||||
id.name == qq.name // We disregard context; unquote is _reserved_
|
||||
}
|
||||
|
||||
/// Checks if the Ident is `quote`.
|
||||
fn is_qquote(id: Ident) -> bool {
|
||||
let qq = str_to_ident("qquote");
|
||||
let qq = Ident::from_str("qquote");
|
||||
id.name == qq.name // We disregard context; qquote is _reserved_
|
||||
}
|
||||
|
||||
@ -266,7 +268,8 @@ mod int_build {
|
||||
|
||||
use syntax::ast::{self, Ident};
|
||||
use syntax::codemap::{DUMMY_SP};
|
||||
use syntax::parse::token::{self, Token, keywords, str_to_ident};
|
||||
use syntax::parse::token::{self, Token, Lit};
|
||||
use syntax::symbol::keywords;
|
||||
use syntax::tokenstream::{TokenTree, TokenStream};
|
||||
|
||||
// ____________________________________________________________________________________________
|
||||
@ -277,19 +280,19 @@ pub fn emit_token(t: Token) -> TokenStream {
|
||||
build_paren_delimited(build_vec(build_token_tt(t))))
|
||||
}
|
||||
|
||||
pub fn emit_lit(l: token::Lit, n: Option<ast::Name>) -> TokenStream {
|
||||
pub fn emit_lit(l: Lit, n: Option<ast::Name>) -> TokenStream {
|
||||
let suf = match n {
|
||||
Some(n) => format!("Some(ast::Name({}))", n.0),
|
||||
Some(n) => format!("Some(ast::Name({}))", n.as_u32()),
|
||||
None => "None".to_string(),
|
||||
};
|
||||
|
||||
let lit = match l {
|
||||
token::Lit::Byte(n) => format!("Lit::Byte(token::intern(\"{}\"))", n.to_string()),
|
||||
token::Lit::Char(n) => format!("Lit::Char(token::intern(\"{}\"))", n.to_string()),
|
||||
token::Lit::Integer(n) => format!("Lit::Integer(token::intern(\"{}\"))", n.to_string()),
|
||||
token::Lit::Float(n) => format!("Lit::Float(token::intern(\"{}\"))", n.to_string()),
|
||||
token::Lit::Str_(n) => format!("Lit::Str_(token::intern(\"{}\"))", n.to_string()),
|
||||
token::Lit::ByteStr(n) => format!("Lit::ByteStr(token::intern(\"{}\"))", n.to_string()),
|
||||
Lit::Byte(n) => format!("Lit::Byte(Symbol::intern(\"{}\"))", n.to_string()),
|
||||
Lit::Char(n) => format!("Lit::Char(Symbol::intern(\"{}\"))", n.to_string()),
|
||||
Lit::Float(n) => format!("Lit::Float(Symbol::intern(\"{}\"))", n.to_string()),
|
||||
Lit::Str_(n) => format!("Lit::Str_(Symbol::intern(\"{}\"))", n.to_string()),
|
||||
Lit::Integer(n) => format!("Lit::Integer(Symbol::intern(\"{}\"))", n.to_string()),
|
||||
Lit::ByteStr(n) => format!("Lit::ByteStr(Symbol::intern(\"{}\"))", n.to_string()),
|
||||
_ => panic!("Unsupported literal"),
|
||||
};
|
||||
|
||||
@ -388,9 +391,10 @@ pub fn build_token_tt(t: Token) -> TokenStream {
|
||||
Token::Underscore => lex("_"),
|
||||
Token::Literal(lit, sfx) => emit_lit(lit, sfx),
|
||||
// fix ident expansion information... somehow
|
||||
Token::Ident(ident) => lex(&format!("Token::Ident(str_to_ident(\"{}\"))", ident.name)),
|
||||
Token::Lifetime(ident) => lex(&format!("Token::Ident(str_to_ident(\"{}\"))",
|
||||
ident.name)),
|
||||
Token::Ident(ident) =>
|
||||
lex(&format!("Token::Ident(Ident::from_str(\"{}\"))", ident.name)),
|
||||
Token::Lifetime(ident) =>
|
||||
lex(&format!("Token::Ident(Ident::from_str(\"{}\"))", ident.name)),
|
||||
_ => panic!("Unhandled case!"),
|
||||
}
|
||||
}
|
||||
@ -408,7 +412,7 @@ pub fn as_tt(t: Token) -> TokenTree {
|
||||
|
||||
/// Takes `input` and returns `vec![input]`.
|
||||
pub fn build_vec(ts: TokenStream) -> TokenStream {
|
||||
build_mac_call(str_to_ident("vec"), ts)
|
||||
build_mac_call(Ident::from_str("vec"), ts)
|
||||
// tts.clone().to_owned()
|
||||
}
|
||||
|
||||
|
@ -13,7 +13,8 @@
|
||||
|
||||
use syntax::ast::Ident;
|
||||
use syntax::codemap::DUMMY_SP;
|
||||
use syntax::parse::token::{self, Token, keywords, str_to_ident};
|
||||
use syntax::parse::token::{self, Token};
|
||||
use syntax::symbol::keywords;
|
||||
use syntax::tokenstream::{self, TokenTree, TokenStream};
|
||||
use std::rc::Rc;
|
||||
|
||||
@ -43,13 +44,13 @@ pub fn ident_eq(tident: &TokenTree, id: Ident) -> bool {
|
||||
|
||||
/// Convert a `&str` into a Token.
|
||||
pub fn str_to_token_ident(s: &str) -> Token {
|
||||
Token::Ident(str_to_ident(s))
|
||||
Token::Ident(Ident::from_str(s))
|
||||
}
|
||||
|
||||
/// Converts a keyword (from `syntax::parse::token::keywords`) into a Token that
|
||||
/// corresponds to it.
|
||||
pub fn keyword_to_token_ident(kw: keywords::Keyword) -> Token {
|
||||
Token::Ident(str_to_ident(&kw.name().as_str()[..]))
|
||||
Token::Ident(Ident::from_str(&kw.name().as_str()[..]))
|
||||
}
|
||||
|
||||
// ____________________________________________________________________________________________
|
||||
|
@ -53,8 +53,8 @@
|
||||
use syntax::errors;
|
||||
use syntax::ptr::P;
|
||||
use syntax::codemap::{respan, Spanned};
|
||||
use syntax::parse::token;
|
||||
use syntax::std_inject;
|
||||
use syntax::symbol::{Symbol, keywords};
|
||||
use syntax::visit::{self, Visitor};
|
||||
use syntax_pos::Span;
|
||||
|
||||
@ -149,7 +149,7 @@ fn diagnostic(&self) -> &errors::Handler {
|
||||
}
|
||||
|
||||
fn str_to_ident(&self, s: &'static str) -> Name {
|
||||
token::gensym(s)
|
||||
Symbol::gensym(s)
|
||||
}
|
||||
|
||||
fn with_parent_def<T, F>(&mut self, parent_id: NodeId, f: F) -> T
|
||||
@ -400,8 +400,8 @@ fn lower_ty_param(&mut self, tp: &TyParam) -> hir::TyParam {
|
||||
// Don't expose `Self` (recovered "keyword used as ident" parse error).
|
||||
// `rustc::ty` expects `Self` to be only used for a trait's `Self`.
|
||||
// Instead, use gensym("Self") to create a distinct name that looks the same.
|
||||
if name == token::keywords::SelfType.name() {
|
||||
name = token::gensym("Self");
|
||||
if name == keywords::SelfType.name() {
|
||||
name = Symbol::gensym("Self");
|
||||
}
|
||||
|
||||
hir::TyParam {
|
||||
@ -540,7 +540,7 @@ fn lower_struct_field(&mut self, (index, f): (usize, &StructField)) -> hir::Stru
|
||||
hir::StructField {
|
||||
span: f.span,
|
||||
id: f.id,
|
||||
name: f.ident.map(|ident| ident.name).unwrap_or(token::intern(&index.to_string())),
|
||||
name: f.ident.map(|ident| ident.name).unwrap_or(Symbol::intern(&index.to_string())),
|
||||
vis: self.lower_visibility(&f.vis),
|
||||
ty: self.lower_ty(&f.ty),
|
||||
attrs: self.lower_attrs(&f.attrs),
|
||||
@ -1189,7 +1189,7 @@ fn make_struct(this: &mut LoweringContext,
|
||||
e.span,
|
||||
hir::PopUnstableBlock,
|
||||
ThinVec::new());
|
||||
this.field(token::intern(s), signal_block, ast_expr.span)
|
||||
this.field(Symbol::intern(s), signal_block, ast_expr.span)
|
||||
}).collect();
|
||||
let attrs = ast_expr.attrs.clone();
|
||||
|
||||
@ -1953,9 +1953,9 @@ fn path_all(&mut self,
|
||||
fn std_path_components(&mut self, components: &[&str]) -> Vec<Name> {
|
||||
let mut v = Vec::new();
|
||||
if let Some(s) = self.crate_root {
|
||||
v.push(token::intern(s));
|
||||
v.push(Symbol::intern(s));
|
||||
}
|
||||
v.extend(components.iter().map(|s| token::intern(s)));
|
||||
v.extend(components.iter().map(|s| Symbol::intern(s)));
|
||||
return v;
|
||||
}
|
||||
|
||||
|
@ -19,7 +19,7 @@
|
||||
use syntax::ast::*;
|
||||
use syntax::ext::hygiene::Mark;
|
||||
use syntax::visit;
|
||||
use syntax::parse::token::{self, keywords};
|
||||
use syntax::symbol::{Symbol, keywords};
|
||||
|
||||
/// Creates def ids for nodes in the HIR.
|
||||
pub struct DefCollector<'a> {
|
||||
@ -169,7 +169,7 @@ fn visit_item(&mut self, i: &Item) {
|
||||
this.with_parent(variant_def_index, |this| {
|
||||
for (index, field) in v.node.data.fields().iter().enumerate() {
|
||||
let name = field.ident.map(|ident| ident.name)
|
||||
.unwrap_or_else(|| token::intern(&index.to_string()));
|
||||
.unwrap_or_else(|| Symbol::intern(&index.to_string()));
|
||||
this.create_def(field.id, DefPathData::Field(name.as_str()));
|
||||
}
|
||||
|
||||
@ -188,7 +188,7 @@ fn visit_item(&mut self, i: &Item) {
|
||||
|
||||
for (index, field) in struct_def.fields().iter().enumerate() {
|
||||
let name = field.ident.map(|ident| ident.name.as_str())
|
||||
.unwrap_or(token::intern(&index.to_string()).as_str());
|
||||
.unwrap_or(Symbol::intern(&index.to_string()).as_str());
|
||||
this.create_def(field.id, DefPathData::Field(name));
|
||||
}
|
||||
}
|
||||
|
@ -14,7 +14,7 @@
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
use syntax::ast;
|
||||
use syntax::parse::token::{self, InternedString};
|
||||
use syntax::symbol::{Symbol, InternedString};
|
||||
use ty::TyCtxt;
|
||||
use util::nodemap::NodeMap;
|
||||
|
||||
@ -328,7 +328,7 @@ pub fn get_opt_name(&self) -> Option<ast::Name> {
|
||||
LifetimeDef(ref name) |
|
||||
EnumVariant(ref name) |
|
||||
Binding(ref name) |
|
||||
Field(ref name) => Some(token::intern(name)),
|
||||
Field(ref name) => Some(Symbol::intern(name)),
|
||||
|
||||
Impl |
|
||||
CrateRoot |
|
||||
|
@ -40,8 +40,8 @@
|
||||
use syntax::abi::Abi;
|
||||
use syntax::ast::{Name, NodeId, DUMMY_NODE_ID, AsmDialect};
|
||||
use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem};
|
||||
use syntax::parse::token::{keywords, InternedString};
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::{keywords, InternedString};
|
||||
use syntax::tokenstream::TokenTree;
|
||||
use syntax::util::ThinVec;
|
||||
|
||||
|
@ -13,13 +13,14 @@
|
||||
use syntax::abi::Abi;
|
||||
use syntax::ast;
|
||||
use syntax::codemap::{CodeMap, Spanned};
|
||||
use syntax::parse::token::{self, keywords, BinOpToken};
|
||||
use syntax::parse::token::{self, BinOpToken};
|
||||
use syntax::parse::lexer::comments;
|
||||
use syntax::print::pp::{self, break_offset, word, space, hardbreak};
|
||||
use syntax::print::pp::{Breaks, eof};
|
||||
use syntax::print::pp::Breaks::{Consistent, Inconsistent};
|
||||
use syntax::print::pprust::{self as ast_pp, PrintState};
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::keywords;
|
||||
use syntax_pos::{self, BytePos};
|
||||
use errors;
|
||||
|
||||
|
@ -91,8 +91,8 @@
|
||||
use std::char::from_u32;
|
||||
use std::fmt;
|
||||
use syntax::ast;
|
||||
use syntax::parse::token;
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax_pos::{self, Pos, Span};
|
||||
use errors::DiagnosticBuilder;
|
||||
|
||||
@ -1219,7 +1219,7 @@ fn pick_lifetime(&self,
|
||||
names.push(lt_name);
|
||||
}
|
||||
names.sort();
|
||||
let name = token::intern(&names[0]);
|
||||
let name = Symbol::intern(&names[0]);
|
||||
return (name_to_dummy_lifetime(name), Kept);
|
||||
}
|
||||
return (self.life_giver.give_lifetime(), Fresh);
|
||||
@ -1931,7 +1931,7 @@ fn give_lifetime(&self) -> hir::Lifetime {
|
||||
let mut s = String::from("'");
|
||||
s.push_str(&num_to_string(self.counter.get()));
|
||||
if !self.taken.contains(&s) {
|
||||
lifetime = name_to_dummy_lifetime(token::intern(&s[..]));
|
||||
lifetime = name_to_dummy_lifetime(Symbol::intern(&s));
|
||||
self.generated.borrow_mut().push(lifetime);
|
||||
break;
|
||||
}
|
||||
|
@ -8,7 +8,7 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use syntax::parse::token::InternedString;
|
||||
use syntax::symbol::InternedString;
|
||||
use syntax::ast;
|
||||
use std::rc::Rc;
|
||||
use hir::def_id::DefId;
|
||||
|
@ -39,7 +39,7 @@
|
||||
use syntax::attr;
|
||||
use syntax::ext::base::SyntaxExtension;
|
||||
use syntax::ptr::P;
|
||||
use syntax::parse::token::InternedString;
|
||||
use syntax::symbol::InternedString;
|
||||
use syntax_pos::Span;
|
||||
use rustc_back::target::Target;
|
||||
use hir;
|
||||
|
@ -30,7 +30,7 @@
|
||||
use util::nodemap::FxHashMap;
|
||||
|
||||
use syntax::ast;
|
||||
use syntax::parse::token::InternedString;
|
||||
use syntax::symbol::InternedString;
|
||||
use hir::itemlikevisit::ItemLikeVisitor;
|
||||
use hir;
|
||||
|
||||
|
@ -123,8 +123,8 @@
|
||||
use std::io;
|
||||
use std::rc::Rc;
|
||||
use syntax::ast::{self, NodeId};
|
||||
use syntax::parse::token::keywords;
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::keywords;
|
||||
use syntax_pos::Span;
|
||||
|
||||
use hir::Expr;
|
||||
|
@ -27,7 +27,7 @@
|
||||
use ty;
|
||||
use std::mem::replace;
|
||||
use syntax::ast;
|
||||
use syntax::parse::token::keywords;
|
||||
use syntax::symbol::keywords;
|
||||
use syntax_pos::Span;
|
||||
use util::nodemap::NodeMap;
|
||||
|
||||
|
@ -21,7 +21,7 @@
|
||||
use hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId, DefIndex, LOCAL_CRATE};
|
||||
use ty::{self, TyCtxt, AdtKind};
|
||||
use middle::privacy::AccessLevels;
|
||||
use syntax::parse::token::InternedString;
|
||||
use syntax::symbol::InternedString;
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
use syntax::ast;
|
||||
use syntax::ast::{NodeId, Attribute};
|
||||
|
@ -16,7 +16,7 @@
|
||||
|
||||
use rustc_back::PanicStrategy;
|
||||
use syntax::ast;
|
||||
use syntax::parse::token::InternedString;
|
||||
use syntax::symbol::InternedString;
|
||||
use syntax_pos::Span;
|
||||
use hir::intravisit::Visitor;
|
||||
use hir::intravisit;
|
||||
|
@ -25,8 +25,8 @@
|
||||
use middle::cstore;
|
||||
|
||||
use syntax::ast::{self, IntTy, UintTy};
|
||||
use syntax::parse::{self, token};
|
||||
use syntax::parse::token::InternedString;
|
||||
use syntax::parse;
|
||||
use syntax::symbol::{Symbol, InternedString};
|
||||
use syntax::feature_gate::UnstableFeatures;
|
||||
|
||||
use errors::{ColorConfig, FatalError, Handler};
|
||||
@ -927,7 +927,7 @@ pub fn default_lib_output() -> CrateType {
|
||||
}
|
||||
|
||||
pub fn default_configuration(sess: &Session) -> ast::CrateConfig {
|
||||
use syntax::parse::token::intern_and_get_ident as intern;
|
||||
use syntax::symbol::intern_and_get_ident as intern;
|
||||
|
||||
let end = &sess.target.target.target_endian;
|
||||
let arch = &sess.target.target.arch;
|
||||
@ -947,33 +947,33 @@ pub fn default_configuration(sess: &Session) -> ast::CrateConfig {
|
||||
|
||||
let mut ret = HashSet::new();
|
||||
// Target bindings.
|
||||
ret.insert((token::intern("target_os"), Some(intern(os))));
|
||||
ret.insert((token::intern("target_family"), Some(fam.clone())));
|
||||
ret.insert((token::intern("target_arch"), Some(intern(arch))));
|
||||
ret.insert((token::intern("target_endian"), Some(intern(end))));
|
||||
ret.insert((token::intern("target_pointer_width"), Some(intern(wordsz))));
|
||||
ret.insert((token::intern("target_env"), Some(intern(env))));
|
||||
ret.insert((token::intern("target_vendor"), Some(intern(vendor))));
|
||||
ret.insert((Symbol::intern("target_os"), Some(intern(os))));
|
||||
ret.insert((Symbol::intern("target_family"), Some(fam.clone())));
|
||||
ret.insert((Symbol::intern("target_arch"), Some(intern(arch))));
|
||||
ret.insert((Symbol::intern("target_endian"), Some(intern(end))));
|
||||
ret.insert((Symbol::intern("target_pointer_width"), Some(intern(wordsz))));
|
||||
ret.insert((Symbol::intern("target_env"), Some(intern(env))));
|
||||
ret.insert((Symbol::intern("target_vendor"), Some(intern(vendor))));
|
||||
if &fam == "windows" || &fam == "unix" {
|
||||
ret.insert((token::intern(&fam), None));
|
||||
ret.insert((Symbol::intern(&fam), None));
|
||||
}
|
||||
if sess.target.target.options.has_elf_tls {
|
||||
ret.insert((token::intern("target_thread_local"), None));
|
||||
ret.insert((Symbol::intern("target_thread_local"), None));
|
||||
}
|
||||
for &i in &[8, 16, 32, 64, 128] {
|
||||
if i <= max_atomic_width {
|
||||
let s = i.to_string();
|
||||
ret.insert((token::intern("target_has_atomic"), Some(intern(&s))));
|
||||
ret.insert((Symbol::intern("target_has_atomic"), Some(intern(&s))));
|
||||
if &s == wordsz {
|
||||
ret.insert((token::intern("target_has_atomic"), Some(intern("ptr"))));
|
||||
ret.insert((Symbol::intern("target_has_atomic"), Some(intern("ptr"))));
|
||||
}
|
||||
}
|
||||
}
|
||||
if sess.opts.debug_assertions {
|
||||
ret.insert((token::intern("debug_assertions"), None));
|
||||
ret.insert((Symbol::intern("debug_assertions"), None));
|
||||
}
|
||||
if sess.opts.crate_types.contains(&CrateTypeProcMacro) {
|
||||
ret.insert((token::intern("proc_macro"), None));
|
||||
ret.insert((Symbol::intern("proc_macro"), None));
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
@ -986,7 +986,7 @@ pub fn build_configuration(sess: &Session,
|
||||
let default_cfg = default_configuration(sess);
|
||||
// If the user wants a test runner, then add the test cfg
|
||||
if sess.opts.test {
|
||||
user_cfg.insert((token::intern("test"), None));
|
||||
user_cfg.insert((Symbol::intern("test"), None));
|
||||
}
|
||||
user_cfg.extend(default_cfg.iter().cloned());
|
||||
user_cfg
|
||||
|
@ -28,7 +28,7 @@
|
||||
use syntax::feature_gate;
|
||||
use syntax::parse;
|
||||
use syntax::parse::ParseSess;
|
||||
use syntax::parse::token;
|
||||
use syntax::symbol::{Symbol, InternedString};
|
||||
use syntax::{ast, codemap};
|
||||
use syntax::feature_gate::AttributeType;
|
||||
use syntax_pos::{Span, MultiSpan};
|
||||
@ -89,7 +89,7 @@ pub struct Session {
|
||||
// forms a unique global identifier for the crate. It is used to allow
|
||||
// multiple crates with the same name to coexist. See the
|
||||
// trans::back::symbol_names module for more information.
|
||||
pub crate_disambiguator: RefCell<token::InternedString>,
|
||||
pub crate_disambiguator: RefCell<InternedString>,
|
||||
pub features: RefCell<feature_gate::Features>,
|
||||
|
||||
/// The maximum recursion limit for potentially infinitely recursive
|
||||
@ -129,7 +129,7 @@ pub struct PerfStats {
|
||||
}
|
||||
|
||||
impl Session {
|
||||
pub fn local_crate_disambiguator(&self) -> token::InternedString {
|
||||
pub fn local_crate_disambiguator(&self) -> InternedString {
|
||||
self.crate_disambiguator.borrow().clone()
|
||||
}
|
||||
pub fn struct_span_warn<'a, S: Into<MultiSpan>>(&'a self,
|
||||
@ -610,7 +610,7 @@ pub fn build_session_(sopts: config::Options,
|
||||
plugin_attributes: RefCell::new(Vec::new()),
|
||||
crate_types: RefCell::new(Vec::new()),
|
||||
dependency_formats: RefCell::new(FxHashMap()),
|
||||
crate_disambiguator: RefCell::new(token::intern("").as_str()),
|
||||
crate_disambiguator: RefCell::new(Symbol::intern("").as_str()),
|
||||
features: RefCell::new(feature_gate::Features::new()),
|
||||
recursion_limit: Cell::new(64),
|
||||
next_node_id: Cell::new(NodeId::new(1)),
|
||||
|
@ -26,8 +26,8 @@
|
||||
use hir::def_id::DefId;
|
||||
use infer::InferOk;
|
||||
use rustc_data_structures::snapshot_map::{Snapshot, SnapshotMap};
|
||||
use syntax::parse::token;
|
||||
use syntax::ast;
|
||||
use syntax::symbol::Symbol;
|
||||
use ty::subst::Subst;
|
||||
use ty::{self, ToPredicate, ToPolyTraitRef, Ty, TyCtxt};
|
||||
use ty::fold::{TypeFoldable, TypeFolder};
|
||||
@ -1245,7 +1245,7 @@ fn confirm_callable_candidate<'cx, 'gcx, 'tcx>(
|
||||
let predicate = ty::Binder(ty::ProjectionPredicate { // (1) recreate binder here
|
||||
projection_ty: ty::ProjectionTy {
|
||||
trait_ref: trait_ref,
|
||||
item_name: token::intern(FN_OUTPUT_NAME),
|
||||
item_name: Symbol::intern(FN_OUTPUT_NAME),
|
||||
},
|
||||
ty: ret_type
|
||||
});
|
||||
|
@ -49,7 +49,7 @@
|
||||
use std::iter;
|
||||
use syntax::ast::{self, Name, NodeId};
|
||||
use syntax::attr;
|
||||
use syntax::parse::token::{self, keywords};
|
||||
use syntax::symbol::{InternedString, intern_and_get_ident, keywords};
|
||||
|
||||
use hir;
|
||||
|
||||
@ -561,7 +561,7 @@ pub struct GlobalCtxt<'tcx> {
|
||||
|
||||
/// The definite name of the current crate after taking into account
|
||||
/// attributes, commandline parameters, etc.
|
||||
pub crate_name: token::InternedString,
|
||||
pub crate_name: InternedString,
|
||||
|
||||
/// Data layout specification for the current target.
|
||||
pub data_layout: TargetDataLayout,
|
||||
@ -574,7 +574,7 @@ pub struct GlobalCtxt<'tcx> {
|
||||
|
||||
/// Map from function to the `#[derive]` mode that it's defining. Only used
|
||||
/// by `proc-macro` crates.
|
||||
pub derive_macros: RefCell<NodeMap<token::InternedString>>,
|
||||
pub derive_macros: RefCell<NodeMap<InternedString>>,
|
||||
}
|
||||
|
||||
impl<'tcx> GlobalCtxt<'tcx> {
|
||||
@ -588,7 +588,7 @@ pub fn global_tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> {
|
||||
}
|
||||
|
||||
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
pub fn crate_name(self, cnum: CrateNum) -> token::InternedString {
|
||||
pub fn crate_name(self, cnum: CrateNum) -> InternedString {
|
||||
if cnum == LOCAL_CRATE {
|
||||
self.crate_name.clone()
|
||||
} else {
|
||||
@ -596,7 +596,7 @@ pub fn crate_name(self, cnum: CrateNum) -> token::InternedString {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn original_crate_name(self, cnum: CrateNum) -> token::InternedString {
|
||||
pub fn original_crate_name(self, cnum: CrateNum) -> InternedString {
|
||||
if cnum == LOCAL_CRATE {
|
||||
self.crate_name.clone()
|
||||
} else {
|
||||
@ -604,7 +604,7 @@ pub fn original_crate_name(self, cnum: CrateNum) -> token::InternedString {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn crate_disambiguator(self, cnum: CrateNum) -> token::InternedString {
|
||||
pub fn crate_disambiguator(self, cnum: CrateNum) -> InternedString {
|
||||
if cnum == LOCAL_CRATE {
|
||||
self.sess.local_crate_disambiguator()
|
||||
} else {
|
||||
@ -835,7 +835,7 @@ pub fn create_and_enter<F, R>(s: &'tcx Session,
|
||||
custom_coerce_unsized_kinds: RefCell::new(DefIdMap()),
|
||||
cast_kinds: RefCell::new(NodeMap()),
|
||||
fragment_infos: RefCell::new(DefIdMap()),
|
||||
crate_name: token::intern_and_get_ident(crate_name),
|
||||
crate_name: intern_and_get_ident(crate_name),
|
||||
data_layout: data_layout,
|
||||
layout_cache: RefCell::new(FxHashMap()),
|
||||
layout_depth: Cell::new(0),
|
||||
|
@ -12,7 +12,7 @@
|
||||
use hir::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE};
|
||||
use ty::{self, Ty, TyCtxt};
|
||||
use syntax::ast;
|
||||
use syntax::parse::token;
|
||||
use syntax::symbol::Symbol;
|
||||
|
||||
use std::cell::Cell;
|
||||
|
||||
@ -136,7 +136,7 @@ pub fn try_push_visible_item_path<T>(self, buffer: &mut T, external_def_id: DefI
|
||||
|
||||
cur_path.push(self.sess.cstore.def_key(cur_def)
|
||||
.disambiguated_data.data.get_opt_name().unwrap_or_else(||
|
||||
token::intern("<unnamed>")));
|
||||
Symbol::intern("<unnamed>")));
|
||||
match visible_parent_map.get(&cur_def) {
|
||||
Some(&def) => cur_def = def,
|
||||
None => return false,
|
||||
|
@ -44,7 +44,7 @@
|
||||
use std::mem;
|
||||
use syntax::ast::{self, Name, NodeId};
|
||||
use syntax::attr;
|
||||
use syntax::parse::token::{self, InternedString};
|
||||
use syntax::symbol::{Symbol, InternedString};
|
||||
use syntax_pos::{DUMMY_SP, Span};
|
||||
|
||||
use rustc_const_math::ConstInt;
|
||||
@ -2344,7 +2344,7 @@ pub fn item_name(self, id: DefId) -> ast::Name {
|
||||
if let Some(id) = self.map.as_local_node_id(id) {
|
||||
self.map.name(id)
|
||||
} else if id.index == CRATE_DEF_INDEX {
|
||||
token::intern(&self.sess.cstore.original_crate_name(id.krate))
|
||||
Symbol::intern(&self.sess.cstore.original_crate_name(id.krate))
|
||||
} else {
|
||||
let def_key = self.sess.cstore.def_key(id);
|
||||
// The name of a StructCtor is that of its struct parent.
|
||||
|
@ -23,7 +23,7 @@
|
||||
use std::ops;
|
||||
use syntax::abi;
|
||||
use syntax::ast::{self, Name};
|
||||
use syntax::parse::token::{keywords, InternedString};
|
||||
use syntax::symbol::{keywords, InternedString};
|
||||
|
||||
use serialize;
|
||||
|
||||
|
@ -25,8 +25,8 @@
|
||||
use std::usize;
|
||||
|
||||
use syntax::abi::Abi;
|
||||
use syntax::parse::token;
|
||||
use syntax::ast::CRATE_NODE_ID;
|
||||
use syntax::symbol::Symbol;
|
||||
use hir;
|
||||
|
||||
pub fn verbose() -> bool {
|
||||
@ -284,7 +284,7 @@ fn in_binder<'a, 'gcx, 'tcx, T, U>(f: &mut fmt::Formatter,
|
||||
ty::BrAnon(_) |
|
||||
ty::BrFresh(_) |
|
||||
ty::BrEnv => {
|
||||
let name = token::intern("'r");
|
||||
let name = Symbol::intern("'r");
|
||||
let _ = write!(f, "{}", name);
|
||||
ty::BrNamed(tcx.map.local_def_id(CRATE_NODE_ID),
|
||||
name,
|
||||
|
@ -53,7 +53,8 @@
|
||||
use syntax::{ast, diagnostics, visit};
|
||||
use syntax::attr;
|
||||
use syntax::ext::base::ExtCtxt;
|
||||
use syntax::parse::{self, PResult, token};
|
||||
use syntax::parse::{self, PResult};
|
||||
use syntax::symbol::{self, Symbol};
|
||||
use syntax::util::node_count::NodeCounter;
|
||||
use syntax;
|
||||
use syntax_ext;
|
||||
@ -561,7 +562,7 @@ pub fn phase_2_configure_and_expand<'a, F>(sess: &Session,
|
||||
|
||||
*sess.crate_types.borrow_mut() = collect_crate_types(sess, &krate.attrs);
|
||||
*sess.crate_disambiguator.borrow_mut() =
|
||||
token::intern(&compute_crate_disambiguator(sess)).as_str();
|
||||
Symbol::intern(&compute_crate_disambiguator(sess)).as_str();
|
||||
|
||||
time(time_passes, "recursion limit", || {
|
||||
middle::recursion_limit::update_recursion_limit(sess, &krate);
|
||||
@ -1360,6 +1361,6 @@ pub fn build_output_filenames(input: &Input,
|
||||
pub fn reset_thread_local_state() {
|
||||
// These may be left in an incoherent state after a previous compile.
|
||||
syntax::ext::hygiene::reset_hygiene_data();
|
||||
// `clear_ident_interner` can be used to free memory, but it does not restore the initial state.
|
||||
token::reset_ident_interner();
|
||||
// `clear_interner` can be used to free memory, but it does not restore the initial state.
|
||||
symbol::reset_interner();
|
||||
}
|
||||
|
@ -450,15 +450,15 @@ fn pp_ann<'a>(&'a self) -> &'a pprust::PpAnn {
|
||||
impl<'ast> pprust::PpAnn for HygieneAnnotation<'ast> {
|
||||
fn post(&self, s: &mut pprust::State, node: pprust::AnnNode) -> io::Result<()> {
|
||||
match node {
|
||||
pprust::NodeIdent(&ast::Ident { name: ast::Name(nm), ctxt }) => {
|
||||
pprust::NodeIdent(&ast::Ident { name, ctxt }) => {
|
||||
pp::space(&mut s.s)?;
|
||||
// FIXME #16420: this doesn't display the connections
|
||||
// between syntax contexts
|
||||
s.synth_comment(format!("{}{:?}", nm, ctxt))
|
||||
s.synth_comment(format!("{}{:?}", name.as_u32(), ctxt))
|
||||
}
|
||||
pprust::NodeName(&ast::Name(nm)) => {
|
||||
pprust::NodeName(&name) => {
|
||||
pp::space(&mut s.s)?;
|
||||
s.synth_comment(nm.to_string())
|
||||
s.synth_comment(name.as_u32().to_string())
|
||||
}
|
||||
_ => Ok(()),
|
||||
}
|
||||
|
@ -13,7 +13,7 @@
|
||||
use rustc::session::Session;
|
||||
use rustc_trans::back::write::create_target_machine;
|
||||
use syntax::feature_gate::UnstableFeatures;
|
||||
use syntax::parse::token::{self, intern_and_get_ident as intern};
|
||||
use syntax::symbol::{Symbol, intern_and_get_ident as intern};
|
||||
use libc::c_char;
|
||||
|
||||
// WARNING: the features must be known to LLVM or the feature
|
||||
@ -40,7 +40,7 @@ pub fn add_configuration(cfg: &mut ast::CrateConfig, sess: &Session) {
|
||||
_ => &[],
|
||||
};
|
||||
|
||||
let tf = token::intern("target_feature");
|
||||
let tf = Symbol::intern("target_feature");
|
||||
for feat in whitelist {
|
||||
assert_eq!(feat.chars().last(), Some('\0'));
|
||||
if unsafe { LLVMRustHasFeature(target_machine, feat.as_ptr() as *const c_char) } {
|
||||
|
@ -18,6 +18,7 @@
|
||||
use syntax::ast::{self, Name, NodeId};
|
||||
use syntax::attr;
|
||||
use syntax::parse::token;
|
||||
use syntax::symbol::InternedString;
|
||||
use syntax_pos::{Span, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos};
|
||||
use syntax::tokenstream;
|
||||
use rustc::hir;
|
||||
@ -169,8 +170,8 @@ enum SawAbiComponent<'a> {
|
||||
|
||||
// FIXME (#14132): should we include (some function of)
|
||||
// ident.ctxt as well?
|
||||
SawIdent(token::InternedString),
|
||||
SawStructDef(token::InternedString),
|
||||
SawIdent(InternedString),
|
||||
SawStructDef(InternedString),
|
||||
|
||||
SawLifetime,
|
||||
SawLifetimeDef(usize),
|
||||
@ -232,11 +233,11 @@ enum SawAbiComponent<'a> {
|
||||
#[derive(Hash)]
|
||||
enum SawExprComponent<'a> {
|
||||
|
||||
SawExprLoop(Option<token::InternedString>),
|
||||
SawExprField(token::InternedString),
|
||||
SawExprLoop(Option<InternedString>),
|
||||
SawExprField(InternedString),
|
||||
SawExprTupField(usize),
|
||||
SawExprBreak(Option<token::InternedString>),
|
||||
SawExprAgain(Option<token::InternedString>),
|
||||
SawExprBreak(Option<InternedString>),
|
||||
SawExprAgain(Option<InternedString>),
|
||||
|
||||
SawExprBox,
|
||||
SawExprArray,
|
||||
|
@ -48,7 +48,7 @@
|
||||
use rustc::hir::itemlikevisit::ItemLikeVisitor;
|
||||
use syntax::ast::{self, Attribute, NestedMetaItem};
|
||||
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
|
||||
use syntax::parse::token;
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax_pos::Span;
|
||||
use rustc::ty::TyCtxt;
|
||||
use ich::Fingerprint;
|
||||
@ -286,7 +286,7 @@ fn check_config(tcx: TyCtxt, attr: &ast::Attribute) -> bool {
|
||||
|
||||
fn expect_associated_value(tcx: TyCtxt, item: &NestedMetaItem) -> ast::Name {
|
||||
if let Some(value) = item.value_str() {
|
||||
token::intern(&value)
|
||||
Symbol::intern(&value)
|
||||
} else {
|
||||
let msg = if let Some(name) = item.name() {
|
||||
format!("associated value expected for `{}`", name)
|
||||
|
@ -20,7 +20,7 @@
|
||||
use syntax::ast;
|
||||
use syntax::attr;
|
||||
use syntax::feature_gate::{BUILTIN_ATTRIBUTES, AttributeType};
|
||||
use syntax::parse::token::keywords;
|
||||
use syntax::symbol::keywords;
|
||||
use syntax::ptr::P;
|
||||
use syntax_pos::Span;
|
||||
|
||||
@ -48,7 +48,7 @@ fn check_unused_mut_pat(&self, cx: &LateContext, pats: &[P<hir::Pat>]) {
|
||||
let name = path1.node;
|
||||
if let hir::BindByValue(hir::MutMutable) = mode {
|
||||
if !name.as_str().starts_with("_") {
|
||||
match mutables.entry(name.0 as usize) {
|
||||
match mutables.entry(name) {
|
||||
Vacant(entry) => {
|
||||
entry.insert(vec![id]);
|
||||
}
|
||||
|
@ -37,7 +37,7 @@
|
||||
use syntax::attr;
|
||||
use syntax::ext::base::SyntaxExtension;
|
||||
use syntax::feature_gate::{self, GateIssue};
|
||||
use syntax::parse::token::{self, InternedString};
|
||||
use syntax::symbol::{Symbol, InternedString};
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
use log;
|
||||
|
||||
@ -582,11 +582,11 @@ fn register_custom_derive(&mut self,
|
||||
trait_name: &str,
|
||||
expand: fn(TokenStream) -> TokenStream,
|
||||
attributes: &[&'static str]) {
|
||||
let attrs = attributes.iter().cloned().map(token::intern).collect();
|
||||
let attrs = attributes.iter().cloned().map(Symbol::intern).collect();
|
||||
let derive = SyntaxExtension::CustomDerive(
|
||||
Box::new(CustomDerive::new(expand, attrs))
|
||||
);
|
||||
self.0.push((token::intern(trait_name), Rc::new(derive)));
|
||||
self.0.push((Symbol::intern(trait_name), Rc::new(derive)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -31,7 +31,8 @@
|
||||
use std::path::PathBuf;
|
||||
use syntax::ast;
|
||||
use syntax::attr;
|
||||
use syntax::parse::{token, new_parser_from_source_str};
|
||||
use syntax::parse::new_parser_from_source_str;
|
||||
use syntax::symbol::{InternedString, intern_and_get_ident};
|
||||
use syntax_pos::mk_sp;
|
||||
use rustc::hir::svh::Svh;
|
||||
use rustc_back::target::Target;
|
||||
@ -262,14 +263,14 @@ fn panic_strategy(&self, cnum: CrateNum) -> PanicStrategy {
|
||||
self.get_crate_data(cnum).panic_strategy()
|
||||
}
|
||||
|
||||
fn crate_name(&self, cnum: CrateNum) -> token::InternedString
|
||||
fn crate_name(&self, cnum: CrateNum) -> InternedString
|
||||
{
|
||||
token::intern_and_get_ident(&self.get_crate_data(cnum).name[..])
|
||||
intern_and_get_ident(&self.get_crate_data(cnum).name[..])
|
||||
}
|
||||
|
||||
fn original_crate_name(&self, cnum: CrateNum) -> token::InternedString
|
||||
fn original_crate_name(&self, cnum: CrateNum) -> InternedString
|
||||
{
|
||||
token::intern_and_get_ident(&self.get_crate_data(cnum).name())
|
||||
intern_and_get_ident(&self.get_crate_data(cnum).name())
|
||||
}
|
||||
|
||||
fn extern_crate(&self, cnum: CrateNum) -> Option<ExternCrate>
|
||||
@ -282,9 +283,9 @@ fn crate_hash(&self, cnum: CrateNum) -> Svh
|
||||
self.get_crate_hash(cnum)
|
||||
}
|
||||
|
||||
fn crate_disambiguator(&self, cnum: CrateNum) -> token::InternedString
|
||||
fn crate_disambiguator(&self, cnum: CrateNum) -> InternedString
|
||||
{
|
||||
token::intern_and_get_ident(&self.get_crate_data(cnum).disambiguator())
|
||||
intern_and_get_ident(&self.get_crate_data(cnum).disambiguator())
|
||||
}
|
||||
|
||||
fn plugin_registrar_fn(&self, cnum: CrateNum) -> Option<DefId>
|
||||
|
@ -34,7 +34,7 @@
|
||||
use std::u32;
|
||||
use syntax::ast::{self, CRATE_NODE_ID};
|
||||
use syntax::attr;
|
||||
use syntax;
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax_pos;
|
||||
|
||||
use rustc::hir::{self, PatKind};
|
||||
@ -600,7 +600,7 @@ fn encode_fn_arg_names(&mut self, decl: &hir::FnDecl) -> LazySeq<ast::Name> {
|
||||
if let PatKind::Binding(_, ref path1, _) = arg.pat.node {
|
||||
path1.node
|
||||
} else {
|
||||
syntax::parse::token::intern("")
|
||||
Symbol::intern("")
|
||||
}
|
||||
}))
|
||||
}
|
||||
@ -1119,7 +1119,7 @@ fn get_ordered_deps(cstore: &cstore::CStore) -> Vec<(CrateNum, Rc<cstore::CrateM
|
||||
let deps = get_ordered_deps(self.cstore);
|
||||
self.lazy_seq(deps.iter().map(|&(_, ref dep)| {
|
||||
CrateDep {
|
||||
name: syntax::parse::token::intern(dep.name()),
|
||||
name: Symbol::intern(dep.name()),
|
||||
hash: dep.hash(),
|
||||
kind: dep.dep_kind.get(),
|
||||
}
|
||||
|
@ -18,7 +18,7 @@
|
||||
use rustc::hir;
|
||||
use syntax::abi::Abi;
|
||||
use syntax::ast;
|
||||
use syntax::parse::token::keywords;
|
||||
use syntax::symbol::keywords;
|
||||
use syntax_pos::Span;
|
||||
|
||||
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
|
||||
|
@ -29,7 +29,7 @@
|
||||
use rustc::infer::InferCtxt;
|
||||
use rustc::ty::subst::Subst;
|
||||
use rustc::ty::{self, Ty, TyCtxt};
|
||||
use syntax::parse::token;
|
||||
use syntax::symbol::{Symbol, InternedString};
|
||||
use rustc::hir;
|
||||
use rustc_const_math::{ConstInt, ConstUsize};
|
||||
|
||||
@ -121,7 +121,7 @@ pub fn unit_ty(&mut self) -> Ty<'tcx> {
|
||||
self.tcx.mk_nil()
|
||||
}
|
||||
|
||||
pub fn str_literal(&mut self, value: token::InternedString) -> Literal<'tcx> {
|
||||
pub fn str_literal(&mut self, value: InternedString) -> Literal<'tcx> {
|
||||
Literal::Value { value: ConstVal::Str(value) }
|
||||
}
|
||||
|
||||
@ -145,7 +145,7 @@ pub fn trait_method(&mut self,
|
||||
self_ty: Ty<'tcx>,
|
||||
params: &[Ty<'tcx>])
|
||||
-> (Ty<'tcx>, Literal<'tcx>) {
|
||||
let method_name = token::intern(method_name);
|
||||
let method_name = Symbol::intern(method_name);
|
||||
let substs = self.tcx.mk_substs_trait(self_ty, params);
|
||||
for item in self.tcx.associated_items(trait_def_id) {
|
||||
if item.kind == ty::AssociatedKind::Method && item.name == method_name {
|
||||
|
@ -21,7 +21,8 @@
|
||||
use syntax::ast::*;
|
||||
use syntax::attr;
|
||||
use syntax::codemap::Spanned;
|
||||
use syntax::parse::token::{self, keywords};
|
||||
use syntax::parse::token;
|
||||
use syntax::symbol::keywords;
|
||||
use syntax::visit::{self, Visitor};
|
||||
use syntax_pos::Span;
|
||||
use errors;
|
||||
|
@ -17,7 +17,7 @@
|
||||
|
||||
use syntax::ext::base::{SyntaxExtension, NamedSyntaxExtension, NormalTT, IdentTT};
|
||||
use syntax::ext::base::MacroExpanderFn;
|
||||
use syntax::parse::token;
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax::ast;
|
||||
use syntax::feature_gate::AttributeType;
|
||||
use syntax_pos::Span;
|
||||
@ -121,7 +121,7 @@ pub fn register_syntax_extension(&mut self, name: ast::Name, extension: SyntaxEx
|
||||
/// It builds for you a `NormalTT` that calls `expander`,
|
||||
/// and also takes care of interning the macro's name.
|
||||
pub fn register_macro(&mut self, name: &str, expander: MacroExpanderFn) {
|
||||
self.register_syntax_extension(token::intern(name),
|
||||
self.register_syntax_extension(Symbol::intern(name),
|
||||
NormalTT(Box::new(expander), None, false));
|
||||
}
|
||||
|
||||
|
@ -40,7 +40,7 @@
|
||||
use syntax::ext::expand::mark_tts;
|
||||
use syntax::ext::hygiene::Mark;
|
||||
use syntax::ext::tt::macro_rules;
|
||||
use syntax::parse::token::keywords;
|
||||
use syntax::symbol::keywords;
|
||||
use syntax::visit::{self, Visitor};
|
||||
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
|
@ -57,7 +57,7 @@
|
||||
use syntax::ast::{self, FloatTy};
|
||||
use syntax::ast::{CRATE_NODE_ID, Name, NodeId, Ident, SpannedIdent, IntTy, UintTy};
|
||||
use syntax::ext::base::SyntaxExtension;
|
||||
use syntax::parse::token::{self, keywords};
|
||||
use syntax::symbol::{Symbol, InternedString, keywords};
|
||||
use syntax::util::lev_distance::find_best_match_for_name;
|
||||
|
||||
use syntax::visit::{self, FnKind, Visitor};
|
||||
@ -90,7 +90,7 @@
|
||||
|
||||
enum SuggestionType {
|
||||
Macro(String),
|
||||
Function(token::InternedString),
|
||||
Function(InternedString),
|
||||
NotFound,
|
||||
}
|
||||
|
||||
@ -1039,7 +1039,7 @@ fn new() -> PrimitiveTypeTable {
|
||||
}
|
||||
|
||||
fn intern(&mut self, string: &str, primitive_type: PrimTy) {
|
||||
self.primitive_types.insert(token::intern(string), primitive_type);
|
||||
self.primitive_types.insert(Symbol::intern(string), primitive_type);
|
||||
}
|
||||
}
|
||||
|
||||
@ -3606,7 +3606,7 @@ fn collect_mod(names: &mut Vec<Ident>, module: Module) {
|
||||
}
|
||||
} else {
|
||||
// danger, shouldn't be ident?
|
||||
names.push(token::str_to_ident("<opaque>"));
|
||||
names.push(Ident::from_str("<opaque>"));
|
||||
collect_mod(names, module.parent.unwrap());
|
||||
}
|
||||
}
|
||||
|
@ -39,7 +39,8 @@
|
||||
use std::hash::*;
|
||||
|
||||
use syntax::ast::{self, NodeId, PatKind, Attribute, CRATE_NODE_ID};
|
||||
use syntax::parse::token::{self, keywords};
|
||||
use syntax::parse::token;
|
||||
use syntax::symbol::keywords;
|
||||
use syntax::visit::{self, Visitor};
|
||||
use syntax::print::pprust::{path_to_string, ty_to_string, bounds_to_string, generics_to_string};
|
||||
use syntax::ptr::P;
|
||||
|
@ -54,7 +54,8 @@
|
||||
|
||||
use syntax::ast::{self, NodeId, PatKind, Attribute, CRATE_NODE_ID};
|
||||
use syntax::parse::lexer::comments::strip_doc_comment_decoration;
|
||||
use syntax::parse::token::{self, keywords};
|
||||
use syntax::parse::token;
|
||||
use syntax::symbol::{Symbol, keywords};
|
||||
use syntax::visit::{self, Visitor};
|
||||
use syntax::print::pprust::{ty_to_string, arg_to_string};
|
||||
use syntax::codemap::MacroAttribute;
|
||||
@ -728,7 +729,7 @@ fn visit_pat(&mut self, p: &ast::Pat) {
|
||||
}
|
||||
|
||||
fn docs_for_attrs(attrs: &[Attribute]) -> String {
|
||||
let doc = token::intern("doc");
|
||||
let doc = Symbol::intern("doc");
|
||||
let mut result = String::new();
|
||||
|
||||
for attr in attrs {
|
||||
|
@ -18,7 +18,8 @@
|
||||
|
||||
use syntax::ast;
|
||||
use syntax::parse::lexer::{self, Reader, StringReader};
|
||||
use syntax::parse::token::{self, keywords, Token};
|
||||
use syntax::parse::token::{self, Token};
|
||||
use syntax::symbol::keywords;
|
||||
use syntax_pos::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -29,7 +29,7 @@
|
||||
|
||||
use rustc::ty::TyCtxt;
|
||||
use syntax::ast;
|
||||
use syntax::parse::token;
|
||||
use syntax::symbol::Symbol;
|
||||
|
||||
use {ModuleSource, ModuleTranslation};
|
||||
|
||||
@ -117,7 +117,7 @@ fn field(&self, attr: &ast::Attribute, name: &str) -> ast::Name {
|
||||
for item in attr.meta_item_list().unwrap_or(&[]) {
|
||||
if item.check_name(name) {
|
||||
if let Some(value) = item.value_str() {
|
||||
return token::intern(&value);
|
||||
return Symbol::intern(&value);
|
||||
} else {
|
||||
self.tcx.sess.span_fatal(
|
||||
item.span,
|
||||
|
@ -113,7 +113,7 @@
|
||||
use rustc::util::common::record_time;
|
||||
|
||||
use syntax::attr;
|
||||
use syntax::parse::token::{self, InternedString};
|
||||
use syntax::symbol::{Symbol, InternedString, intern_and_get_ident};
|
||||
|
||||
fn get_symbol_hash<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
|
||||
|
||||
@ -275,7 +275,7 @@ fn root_mode(&self) -> &RootMode {
|
||||
}
|
||||
|
||||
fn push(&mut self, text: &str) {
|
||||
self.names.push(token::intern(text).as_str());
|
||||
self.names.push(Symbol::intern(text).as_str());
|
||||
}
|
||||
}
|
||||
|
||||
@ -288,7 +288,7 @@ pub fn exported_name_from_type_and_prefix<'a, 'tcx>(scx: &SharedCrateContext<'a,
|
||||
krate: LOCAL_CRATE,
|
||||
};
|
||||
let hash = get_symbol_hash(scx, &empty_def_path, t, None);
|
||||
let path = [token::intern_and_get_ident(prefix)];
|
||||
let path = [intern_and_get_ident(prefix)];
|
||||
mangle(path.iter().cloned(), &hash)
|
||||
}
|
||||
|
||||
|
@ -52,8 +52,7 @@
|
||||
use std::cell::{Cell, RefCell, Ref};
|
||||
|
||||
use syntax::ast;
|
||||
use syntax::parse::token::InternedString;
|
||||
use syntax::parse::token;
|
||||
use syntax::symbol::{Symbol, InternedString};
|
||||
use syntax_pos::{DUMMY_SP, Span};
|
||||
|
||||
pub use context::{CrateContext, SharedCrateContext};
|
||||
@ -225,7 +224,7 @@ pub fn from_ty(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
VariantInfo {
|
||||
discr: Disr(0),
|
||||
fields: v.iter().enumerate().map(|(i, &t)| {
|
||||
Field(token::intern(&i.to_string()), t)
|
||||
Field(Symbol::intern(&i.to_string()), t)
|
||||
}).collect()
|
||||
}
|
||||
}
|
||||
|
@ -42,7 +42,7 @@
|
||||
use std::rc::Rc;
|
||||
use std::str;
|
||||
use syntax::ast;
|
||||
use syntax::parse::token::InternedString;
|
||||
use syntax::symbol::InternedString;
|
||||
use abi::FnType;
|
||||
|
||||
pub struct Stats {
|
||||
|
@ -45,9 +45,8 @@
|
||||
use std::path::Path;
|
||||
use std::ptr;
|
||||
use std::rc::Rc;
|
||||
use syntax::util::interner::Interner;
|
||||
use syntax::ast;
|
||||
use syntax::parse::token;
|
||||
use syntax::symbol::{Interner, InternedString};
|
||||
use syntax_pos::{self, Span};
|
||||
|
||||
|
||||
@ -1566,7 +1565,7 @@ fn prepare_enum_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
|
||||
|
||||
fn get_enum_discriminant_name(cx: &CrateContext,
|
||||
def_id: DefId)
|
||||
-> token::InternedString {
|
||||
-> InternedString {
|
||||
cx.tcx().item_name(def_id).as_str()
|
||||
}
|
||||
}
|
||||
|
@ -30,7 +30,7 @@
|
||||
use Disr;
|
||||
use rustc::hir;
|
||||
use syntax::ast;
|
||||
use syntax::parse::token;
|
||||
use syntax::symbol::intern_and_get_ident;
|
||||
|
||||
use rustc::session::Session;
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
@ -208,7 +208,7 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
|
||||
}
|
||||
(_, "type_name") => {
|
||||
let tp_ty = substs.type_at(0);
|
||||
let ty_name = token::intern_and_get_ident(&tp_ty.to_string());
|
||||
let ty_name = intern_and_get_ident(&tp_ty.to_string());
|
||||
C_str_slice(ccx, ty_name)
|
||||
}
|
||||
(_, "type_id") => {
|
||||
|
@ -30,7 +30,7 @@
|
||||
use type_::Type;
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use syntax::parse::token;
|
||||
use syntax::symbol::intern_and_get_ident;
|
||||
|
||||
use super::{MirContext, LocalRef};
|
||||
use super::analyze::CleanupKind;
|
||||
@ -321,7 +321,7 @@ pub fn trans_block(&mut self, bb: mir::BasicBlock) {
|
||||
|
||||
// Get the location information.
|
||||
let loc = bcx.sess().codemap().lookup_char_pos(span.lo);
|
||||
let filename = token::intern_and_get_ident(&loc.file.name);
|
||||
let filename = intern_and_get_ident(&loc.file.name);
|
||||
let filename = C_str_slice(bcx.ccx(), filename);
|
||||
let line = C_u32(bcx.ccx(), loc.line as u32);
|
||||
|
||||
@ -351,7 +351,7 @@ pub fn trans_block(&mut self, bb: mir::BasicBlock) {
|
||||
const_err)
|
||||
}
|
||||
mir::AssertMessage::Math(ref err) => {
|
||||
let msg_str = token::intern_and_get_ident(err.description());
|
||||
let msg_str = intern_and_get_ident(err.description());
|
||||
let msg_str = C_str_slice(bcx.ccx(), msg_str);
|
||||
let msg_file_line = C_struct(bcx.ccx(),
|
||||
&[msg_str, filename, line],
|
||||
|
@ -21,7 +21,7 @@
|
||||
use type_of;
|
||||
|
||||
use syntax_pos::{DUMMY_SP, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos};
|
||||
use syntax::parse::token::keywords;
|
||||
use syntax::symbol::keywords;
|
||||
|
||||
use std::cell::Ref;
|
||||
use std::iter;
|
||||
|
@ -132,7 +132,7 @@
|
||||
use std::collections::hash_map::DefaultHasher;
|
||||
use symbol_map::SymbolMap;
|
||||
use syntax::ast::NodeId;
|
||||
use syntax::parse::token::{self, InternedString};
|
||||
use syntax::symbol::{InternedString, intern_and_get_ident};
|
||||
use trans_item::TransItem;
|
||||
use util::nodemap::{FxHashMap, FxHashSet};
|
||||
|
||||
@ -542,11 +542,11 @@ fn compute_codegen_unit_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
|
||||
mod_path.push_str(".volatile");
|
||||
}
|
||||
|
||||
return token::intern_and_get_ident(&mod_path[..]);
|
||||
return intern_and_get_ident(&mod_path[..]);
|
||||
}
|
||||
|
||||
fn numbered_codegen_unit_name(crate_name: &str, index: usize) -> InternedString {
|
||||
token::intern_and_get_ident(&format!("{}{}{}",
|
||||
intern_and_get_ident(&format!("{}{}{}",
|
||||
crate_name,
|
||||
NUMBERED_CODEGEN_UNIT_MARKER,
|
||||
index)[..])
|
||||
|
@ -71,7 +71,7 @@
|
||||
use std::cell::RefCell;
|
||||
use syntax::{abi, ast};
|
||||
use syntax::feature_gate::{GateIssue, emit_feature_err};
|
||||
use syntax::parse::token::{self, keywords};
|
||||
use syntax::symbol::{Symbol, keywords};
|
||||
use syntax_pos::{Span, Pos};
|
||||
use errors::DiagnosticBuilder;
|
||||
|
||||
@ -645,7 +645,7 @@ fn convert_parenthesized_parameters(&self,
|
||||
};
|
||||
|
||||
let output_binding = ConvertedBinding {
|
||||
item_name: token::intern(FN_OUTPUT_NAME),
|
||||
item_name: Symbol::intern(FN_OUTPUT_NAME),
|
||||
ty: output,
|
||||
span: output_span
|
||||
};
|
||||
|
@ -20,7 +20,7 @@
|
||||
use rustc::hir;
|
||||
|
||||
use syntax_pos::Span;
|
||||
use syntax::parse::token;
|
||||
use syntax::symbol::Symbol;
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
enum AutoderefKind {
|
||||
@ -120,7 +120,7 @@ fn overloaded_deref_ty(&mut self, ty: Ty<'tcx>) -> Option<Ty<'tcx>> {
|
||||
let normalized = traits::normalize_projection_type(&mut selcx,
|
||||
ty::ProjectionTy {
|
||||
trait_ref: trait_ref,
|
||||
item_name: token::intern("Target"),
|
||||
item_name: Symbol::intern("Target"),
|
||||
},
|
||||
cause,
|
||||
0);
|
||||
@ -198,7 +198,7 @@ pub fn try_overloaded_deref(&self,
|
||||
(PreferMutLvalue, Some(trait_did)) => {
|
||||
self.lookup_method_in_trait(span,
|
||||
base_expr,
|
||||
token::intern("deref_mut"),
|
||||
Symbol::intern("deref_mut"),
|
||||
trait_did,
|
||||
base_ty,
|
||||
None)
|
||||
@ -211,7 +211,7 @@ pub fn try_overloaded_deref(&self,
|
||||
(None, Some(trait_did)) => {
|
||||
self.lookup_method_in_trait(span,
|
||||
base_expr,
|
||||
token::intern("deref"),
|
||||
Symbol::intern("deref"),
|
||||
trait_did,
|
||||
base_ty,
|
||||
None)
|
||||
|
@ -16,7 +16,7 @@
|
||||
use hir::print;
|
||||
use rustc::{infer, traits};
|
||||
use rustc::ty::{self, LvaluePreference, Ty};
|
||||
use syntax::parse::token;
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax::ptr::P;
|
||||
use syntax_pos::Span;
|
||||
|
||||
@ -160,9 +160,9 @@ fn try_overloaded_call_traits(&self,
|
||||
-> Option<ty::MethodCallee<'tcx>> {
|
||||
// Try the options that are least restrictive on the caller first.
|
||||
for &(opt_trait_def_id, method_name) in
|
||||
&[(self.tcx.lang_items.fn_trait(), token::intern("call")),
|
||||
(self.tcx.lang_items.fn_mut_trait(), token::intern("call_mut")),
|
||||
(self.tcx.lang_items.fn_once_trait(), token::intern("call_once"))] {
|
||||
&[(self.tcx.lang_items.fn_trait(), Symbol::intern("call")),
|
||||
(self.tcx.lang_items.fn_mut_trait(), Symbol::intern("call_mut")),
|
||||
(self.tcx.lang_items.fn_once_trait(), Symbol::intern("call_once"))] {
|
||||
let trait_def_id = match opt_trait_def_id {
|
||||
Some(def_id) => def_id,
|
||||
None => continue,
|
||||
|
@ -21,7 +21,7 @@
|
||||
|
||||
use syntax::abi::Abi;
|
||||
use syntax::ast;
|
||||
use syntax::parse::token;
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax_pos::Span;
|
||||
|
||||
use rustc::hir;
|
||||
@ -75,7 +75,7 @@ fn equate_intrinsic_type<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
|
||||
/// and in libcore/intrinsics.rs
|
||||
pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &hir::ForeignItem) {
|
||||
fn param<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, n: u32) -> Ty<'tcx> {
|
||||
let name = token::intern(&format!("P{}", n));
|
||||
let name = Symbol::intern(&format!("P{}", n));
|
||||
ccx.tcx.mk_param(n, name)
|
||||
}
|
||||
|
||||
@ -326,7 +326,7 @@ fn param<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, n: u32) -> Ty<'tcx> {
|
||||
pub fn check_platform_intrinsic_type(ccx: &CrateCtxt,
|
||||
it: &hir::ForeignItem) {
|
||||
let param = |n| {
|
||||
let name = token::intern(&format!("P{}", n));
|
||||
let name = Symbol::intern(&format!("P{}", n));
|
||||
ccx.tcx.mk_param(n, name)
|
||||
};
|
||||
|
||||
|
@ -115,8 +115,8 @@
|
||||
use syntax::attr;
|
||||
use syntax::codemap::{self, original_sp, Spanned};
|
||||
use syntax::feature_gate::{GateIssue, emit_feature_err};
|
||||
use syntax::parse::token::{self, InternedString, keywords};
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::{Symbol, InternedString, keywords};
|
||||
use syntax::util::lev_distance::find_best_match_for_name;
|
||||
use syntax_pos::{self, BytePos, Span};
|
||||
|
||||
@ -2369,7 +2369,7 @@ fn try_index_step(&self,
|
||||
(PreferMutLvalue, Some(trait_did)) => {
|
||||
self.lookup_method_in_trait_adjusted(expr.span,
|
||||
Some(&base_expr),
|
||||
token::intern("index_mut"),
|
||||
Symbol::intern("index_mut"),
|
||||
trait_did,
|
||||
autoderefs,
|
||||
unsize,
|
||||
@ -2384,7 +2384,7 @@ fn try_index_step(&self,
|
||||
(None, Some(trait_did)) => {
|
||||
self.lookup_method_in_trait_adjusted(expr.span,
|
||||
Some(&base_expr),
|
||||
token::intern("index"),
|
||||
Symbol::intern("index"),
|
||||
trait_did,
|
||||
autoderefs,
|
||||
unsize,
|
||||
|
@ -14,7 +14,7 @@
|
||||
use hir::def_id::DefId;
|
||||
use rustc::ty::{Ty, TypeFoldable, PreferMutLvalue};
|
||||
use syntax::ast;
|
||||
use syntax::parse::token;
|
||||
use syntax::symbol::Symbol;
|
||||
use rustc::hir;
|
||||
|
||||
impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
|
||||
@ -182,7 +182,7 @@ fn check_overloaded_binop(&self,
|
||||
let rhs_ty_var = self.next_ty_var();
|
||||
|
||||
let return_ty = match self.lookup_op_method(expr, lhs_ty, vec![rhs_ty_var],
|
||||
token::intern(name), trait_def_id,
|
||||
Symbol::intern(name), trait_def_id,
|
||||
lhs_expr) {
|
||||
Ok(return_ty) => return_ty,
|
||||
Err(()) => {
|
||||
@ -248,9 +248,8 @@ pub fn check_user_unop(&self,
|
||||
-> Ty<'tcx>
|
||||
{
|
||||
assert!(op.is_by_value());
|
||||
match self.lookup_op_method(ex, operand_ty, vec![],
|
||||
token::intern(mname), trait_did,
|
||||
operand_expr) {
|
||||
let mname = Symbol::intern(mname);
|
||||
match self.lookup_op_method(ex, operand_ty, vec![], mname, trait_did, operand_expr) {
|
||||
Ok(t) => t,
|
||||
Err(()) => {
|
||||
self.type_error_message(ex.span, |actual| {
|
||||
|
@ -79,7 +79,7 @@
|
||||
use std::cell::RefCell;
|
||||
|
||||
use syntax::{abi, ast, attr};
|
||||
use syntax::parse::token::{self, keywords};
|
||||
use syntax::symbol::{Symbol, keywords};
|
||||
use syntax_pos::Span;
|
||||
|
||||
use rustc::hir::{self, map as hir_map, print as pprust};
|
||||
@ -585,7 +585,7 @@ fn convert_closure<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
|
||||
let upvar_decls : Vec<_> = tcx.with_freevars(node_id, |fv| {
|
||||
fv.iter().enumerate().map(|(i, _)| ty::TypeParameterDef {
|
||||
index: (base_generics.count() as u32) + (i as u32),
|
||||
name: token::intern("<upvar>"),
|
||||
name: Symbol::intern("<upvar>"),
|
||||
def_id: def_id,
|
||||
default_def_id: base_def_id,
|
||||
default: None,
|
||||
|
@ -14,15 +14,16 @@
|
||||
pub use self::UnsafeSource::*;
|
||||
pub use self::ViewPath_::*;
|
||||
pub use self::PathParameters::*;
|
||||
pub use symbol::Symbol as Name;
|
||||
pub use util::ThinVec;
|
||||
|
||||
use syntax_pos::{mk_sp, Span, DUMMY_SP, ExpnId};
|
||||
use codemap::{respan, Spanned};
|
||||
use abi::Abi;
|
||||
use ext::hygiene::SyntaxContext;
|
||||
use parse::token::{self, keywords, InternedString};
|
||||
use print::pprust;
|
||||
use ptr::P;
|
||||
use symbol::{Symbol, keywords, InternedString};
|
||||
use tokenstream::{TokenTree};
|
||||
|
||||
use std::collections::HashSet;
|
||||
@ -32,60 +33,24 @@
|
||||
|
||||
use serialize::{self, Encodable, Decodable, Encoder, Decoder};
|
||||
|
||||
/// A name is a part of an identifier, representing a string or gensym. It's
|
||||
/// the result of interning.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct Name(pub u32);
|
||||
|
||||
/// An identifier contains a Name (index into the interner
|
||||
/// table) and a SyntaxContext to track renaming and
|
||||
/// macro expansion per Flatt et al., "Macros That Work Together"
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct Ident {
|
||||
pub name: Name,
|
||||
pub name: Symbol,
|
||||
pub ctxt: SyntaxContext
|
||||
}
|
||||
|
||||
impl Name {
|
||||
pub fn as_str(self) -> token::InternedString {
|
||||
token::InternedString::new_from_name(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Name {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{}({})", self, self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Name {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Display::fmt(&self.as_str(), f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for Name {
|
||||
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
|
||||
s.emit_str(&self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for Name {
|
||||
fn decode<D: Decoder>(d: &mut D) -> Result<Name, D::Error> {
|
||||
Ok(token::intern(&d.read_str()?))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> ::std::cmp::PartialEq<&'a str> for Name {
|
||||
fn eq(&self, other: &&str) -> bool {
|
||||
*self.as_str() == **other
|
||||
}
|
||||
}
|
||||
|
||||
impl Ident {
|
||||
pub const fn with_empty_ctxt(name: Name) -> Ident {
|
||||
Ident { name: name, ctxt: SyntaxContext::empty() }
|
||||
}
|
||||
|
||||
/// Maps a string to an identifier with an empty syntax context.
|
||||
pub fn from_str(s: &str) -> Ident {
|
||||
Ident::with_empty_ctxt(Symbol::intern(s))
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Ident {
|
||||
|
@ -23,9 +23,9 @@
|
||||
use errors::Handler;
|
||||
use feature_gate::{Features, GatedCfg};
|
||||
use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
|
||||
use parse::token::InternedString;
|
||||
use parse::{ParseSess, token};
|
||||
use parse::ParseSess;
|
||||
use ptr::P;
|
||||
use symbol::{self, Symbol, InternedString};
|
||||
use util::ThinVec;
|
||||
|
||||
use std::cell::{RefCell, Cell};
|
||||
@ -278,8 +278,8 @@ pub fn with_desugared_doc<T, F>(&self, f: F) -> T where
|
||||
if self.is_sugared_doc {
|
||||
let comment = self.value_str().unwrap();
|
||||
let meta = mk_name_value_item_str(
|
||||
token::intern("doc"),
|
||||
token::intern_and_get_ident(&strip_doc_comment_decoration(
|
||||
Symbol::intern("doc"),
|
||||
symbol::intern_and_get_ident(&strip_doc_comment_decoration(
|
||||
&comment)));
|
||||
if self.style == ast::AttrStyle::Outer {
|
||||
f(&mk_attr_outer(self.id, meta))
|
||||
@ -392,7 +392,7 @@ pub fn mk_sugared_doc_attr(id: AttrId, text: InternedString, lo: BytePos, hi: By
|
||||
style: style,
|
||||
value: MetaItem {
|
||||
span: mk_sp(lo, hi),
|
||||
name: token::intern("doc"),
|
||||
name: Symbol::intern("doc"),
|
||||
node: MetaItemKind::NameValue(lit),
|
||||
},
|
||||
is_sugared_doc: true,
|
||||
|
@ -19,6 +19,7 @@
|
||||
use ext::build::AstBuilder;
|
||||
use parse::token;
|
||||
use ptr::P;
|
||||
use symbol::Symbol;
|
||||
use tokenstream::{TokenTree};
|
||||
use util::small_vector::SmallVector;
|
||||
|
||||
@ -141,7 +142,7 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
|
||||
));
|
||||
}
|
||||
});
|
||||
let sym = Ident::with_empty_ctxt(token::gensym(&format!(
|
||||
let sym = Ident::with_empty_ctxt(Symbol::gensym(&format!(
|
||||
"__register_diagnostic_{}", code
|
||||
)));
|
||||
MacEager::items(SmallVector::many(vec![
|
||||
|
@ -20,8 +20,8 @@
|
||||
use fold::{self, Folder};
|
||||
use parse::{self, parser};
|
||||
use parse::token;
|
||||
use parse::token::{InternedString, str_to_ident};
|
||||
use ptr::P;
|
||||
use symbol::{Symbol, InternedString};
|
||||
use util::small_vector::SmallVector;
|
||||
|
||||
use std::path::PathBuf;
|
||||
@ -735,7 +735,7 @@ pub fn set_trace_macros(&mut self, x: bool) {
|
||||
self.ecfg.trace_mac = x
|
||||
}
|
||||
pub fn ident_of(&self, st: &str) -> ast::Ident {
|
||||
str_to_ident(st)
|
||||
ast::Ident::from_str(st)
|
||||
}
|
||||
pub fn std_path(&self, components: &[&str]) -> Vec<ast::Ident> {
|
||||
let mut v = Vec::new();
|
||||
@ -746,7 +746,7 @@ pub fn std_path(&self, components: &[&str]) -> Vec<ast::Ident> {
|
||||
return v
|
||||
}
|
||||
pub fn name_of(&self, st: &str) -> ast::Name {
|
||||
token::intern(st)
|
||||
Symbol::intern(st)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -14,8 +14,8 @@
|
||||
use syntax_pos::{Span, DUMMY_SP, Pos};
|
||||
use codemap::{dummy_spanned, respan, Spanned};
|
||||
use ext::base::ExtCtxt;
|
||||
use parse::token::{self, keywords, InternedString};
|
||||
use ptr::P;
|
||||
use symbol::{intern_and_get_ident, keywords, InternedString};
|
||||
|
||||
// Transitional reexports so qquote can find the paths it is looking for
|
||||
mod syntax {
|
||||
@ -787,8 +787,7 @@ fn expr_tuple(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr> {
|
||||
|
||||
fn expr_fail(&self, span: Span, msg: InternedString) -> P<ast::Expr> {
|
||||
let loc = self.codemap().lookup_char_pos(span.lo);
|
||||
let expr_file = self.expr_str(span,
|
||||
token::intern_and_get_ident(&loc.file.name));
|
||||
let expr_file = self.expr_str(span, intern_and_get_ident(&loc.file.name));
|
||||
let expr_line = self.expr_u32(span, loc.line as u32);
|
||||
let expr_file_line_tuple = self.expr_tuple(span, vec![expr_file, expr_line]);
|
||||
let expr_file_line_ptr = self.expr_addr_of(span, expr_file_line_tuple);
|
||||
|
@ -23,10 +23,11 @@
|
||||
use fold::*;
|
||||
use parse::{ParseSess, PResult, lexer};
|
||||
use parse::parser::Parser;
|
||||
use parse::token::{self, keywords};
|
||||
use parse::token;
|
||||
use print::pprust;
|
||||
use ptr::P;
|
||||
use std_inject;
|
||||
use symbol::keywords;
|
||||
use tokenstream::{TokenTree, TokenStream};
|
||||
use util::small_vector::SmallVector;
|
||||
use visit::Visitor;
|
||||
@ -190,7 +191,7 @@ pub fn new(cx: &'a mut ExtCtxt<'b>, monotonic: bool) -> Self {
|
||||
pub fn expand_crate(&mut self, mut krate: ast::Crate) -> ast::Crate {
|
||||
self.cx.crate_root = std_inject::injected_crate_name(&krate);
|
||||
let mut module = ModuleData {
|
||||
mod_path: vec![token::str_to_ident(&self.cx.ecfg.crate_name)],
|
||||
mod_path: vec![Ident::from_str(&self.cx.ecfg.crate_name)],
|
||||
directory: PathBuf::from(self.cx.codemap().span_to_filename(krate.span)),
|
||||
};
|
||||
module.directory.pop();
|
||||
@ -246,7 +247,7 @@ fn expand(&mut self, expansion: Expansion) -> Expansion {
|
||||
self.cx.resolver.resolve_macro(scope, &mac.node.path, force)
|
||||
}
|
||||
InvocationKind::Attr { ref attr, .. } => {
|
||||
let ident = ast::Ident::with_empty_ctxt(attr.name());
|
||||
let ident = Ident::with_empty_ctxt(attr.name());
|
||||
let path = ast::Path::from_ident(attr.span, ident);
|
||||
self.cx.resolver.resolve_macro(scope, &path, force)
|
||||
}
|
||||
|
@ -13,8 +13,8 @@
|
||||
use ext::base::ExtCtxt;
|
||||
use ext::expand::{Expansion, ExpansionKind};
|
||||
use fold::*;
|
||||
use parse::token::{intern, keywords};
|
||||
use ptr::P;
|
||||
use symbol::{Symbol, keywords};
|
||||
use util::move_map::MoveMap;
|
||||
use util::small_vector::SmallVector;
|
||||
|
||||
@ -227,7 +227,7 @@ pub fn reconstructed_macro_rules(def: &ast::MacroDef) -> Expansion {
|
||||
span: DUMMY_SP,
|
||||
global: false,
|
||||
segments: vec![ast::PathSegment {
|
||||
identifier: ast::Ident::with_empty_ctxt(intern("macro_rules")),
|
||||
identifier: ast::Ident::with_empty_ctxt(Symbol::intern("macro_rules")),
|
||||
parameters: ast::PathParameters::none(),
|
||||
}],
|
||||
},
|
||||
|
@ -66,6 +66,7 @@ pub mod prelude {
|
||||
pub use ast::Ident;
|
||||
pub use codemap::{DUMMY_SP, Span};
|
||||
pub use ext::base::{ExtCtxt, MacResult};
|
||||
pub use parse::token::{self, Token, DelimToken, keywords, str_to_ident};
|
||||
pub use parse::token::{self, Token, DelimToken};
|
||||
pub use symbol::keywords;
|
||||
pub use tokenstream::{TokenTree, TokenStream};
|
||||
}
|
||||
|
@ -33,6 +33,7 @@ pub mod rt {
|
||||
use parse::{self, token, classify};
|
||||
use ptr::P;
|
||||
use std::rc::Rc;
|
||||
use symbol;
|
||||
|
||||
use tokenstream::{self, TokenTree};
|
||||
|
||||
@ -239,7 +240,7 @@ fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||
impl ToTokens for str {
|
||||
fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||
let lit = ast::LitKind::Str(
|
||||
token::intern_and_get_ident(self), ast::StrStyle::Cooked);
|
||||
symbol::intern_and_get_ident(self), ast::StrStyle::Cooked);
|
||||
dummy_spanned(lit).to_tokens(cx)
|
||||
}
|
||||
}
|
||||
@ -527,12 +528,12 @@ pub fn expand_quote_matcher(cx: &mut ExtCtxt,
|
||||
base::MacEager::expr(expanded)
|
||||
}
|
||||
|
||||
fn ids_ext(strs: Vec<String> ) -> Vec<ast::Ident> {
|
||||
strs.iter().map(|str| str_to_ident(&(*str))).collect()
|
||||
fn ids_ext(strs: Vec<String>) -> Vec<ast::Ident> {
|
||||
strs.iter().map(|s| ast::Ident::from_str(s)).collect()
|
||||
}
|
||||
|
||||
fn id_ext(str: &str) -> ast::Ident {
|
||||
str_to_ident(str)
|
||||
fn id_ext(s: &str) -> ast::Ident {
|
||||
ast::Ident::from_str(s)
|
||||
}
|
||||
|
||||
// Lift an ident to the expr that evaluates to that ident.
|
||||
|
@ -17,6 +17,7 @@
|
||||
use parse;
|
||||
use print::pprust;
|
||||
use ptr::P;
|
||||
use symbol;
|
||||
use tokenstream;
|
||||
use util::small_vector::SmallVector;
|
||||
|
||||
@ -60,15 +61,14 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
|
||||
|
||||
let topmost = cx.expansion_cause();
|
||||
let loc = cx.codemap().lookup_char_pos(topmost.lo);
|
||||
let filename = token::intern_and_get_ident(&loc.file.name);
|
||||
let filename = symbol::intern_and_get_ident(&loc.file.name);
|
||||
base::MacEager::expr(cx.expr_str(topmost, filename))
|
||||
}
|
||||
|
||||
pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
|
||||
-> Box<base::MacResult+'static> {
|
||||
let s = pprust::tts_to_string(tts);
|
||||
base::MacEager::expr(cx.expr_str(sp,
|
||||
token::intern_and_get_ident(&s[..])))
|
||||
base::MacEager::expr(cx.expr_str(sp, symbol::intern_and_get_ident(&s)))
|
||||
}
|
||||
|
||||
pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
|
||||
@ -77,9 +77,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
|
||||
let mod_path = &cx.current_expansion.module.mod_path;
|
||||
let string = mod_path.iter().map(|x| x.to_string()).collect::<Vec<String>>().join("::");
|
||||
|
||||
base::MacEager::expr(cx.expr_str(
|
||||
sp,
|
||||
token::intern_and_get_ident(&string[..])))
|
||||
base::MacEager::expr(cx.expr_str(sp, symbol::intern_and_get_ident(&string)))
|
||||
}
|
||||
|
||||
/// include! : parse the given file as an expr
|
||||
@ -144,7 +142,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenT
|
||||
// Add this input file to the code map to make it available as
|
||||
// dependency information
|
||||
let filename = format!("{}", file.display());
|
||||
let interned = token::intern_and_get_ident(&src[..]);
|
||||
let interned = symbol::intern_and_get_ident(&src);
|
||||
cx.codemap().new_filemap_and_lines(&filename, None, &src);
|
||||
|
||||
base::MacEager::expr(cx.expr_str(sp, interned))
|
||||
|
@ -20,9 +20,10 @@
|
||||
use parse::ParseSess;
|
||||
use parse::lexer::new_tt_reader;
|
||||
use parse::parser::{Parser, Restrictions};
|
||||
use parse::token::{self, gensym_ident, NtTT, Token};
|
||||
use parse::token::{self, NtTT, Token};
|
||||
use parse::token::Token::*;
|
||||
use print;
|
||||
use symbol::Symbol;
|
||||
use tokenstream::{self, TokenTree};
|
||||
|
||||
use std::collections::{HashMap};
|
||||
@ -187,16 +188,16 @@ fn expand(&self,
|
||||
|
||||
/// Converts a `macro_rules!` invocation into a syntax extension.
|
||||
pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
|
||||
let lhs_nm = gensym_ident("lhs");
|
||||
let rhs_nm = gensym_ident("rhs");
|
||||
let lhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("lhs"));
|
||||
let rhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("rhs"));
|
||||
|
||||
// The pattern that macro_rules matches.
|
||||
// The grammar for macro_rules! is:
|
||||
// $( $lhs:tt => $rhs:tt );+
|
||||
// ...quasiquoting this would be nice.
|
||||
// These spans won't matter, anyways
|
||||
let match_lhs_tok = MatchNt(lhs_nm, token::str_to_ident("tt"));
|
||||
let match_rhs_tok = MatchNt(rhs_nm, token::str_to_ident("tt"));
|
||||
let match_lhs_tok = MatchNt(lhs_nm, ast::Ident::from_str("tt"));
|
||||
let match_rhs_tok = MatchNt(rhs_nm, ast::Ident::from_str("tt"));
|
||||
let argument_gram = vec![
|
||||
TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition {
|
||||
tts: vec![
|
||||
|
@ -33,7 +33,7 @@
|
||||
use errors::{DiagnosticBuilder, Handler};
|
||||
use visit::{self, FnKind, Visitor};
|
||||
use parse::ParseSess;
|
||||
use parse::token::InternedString;
|
||||
use symbol::InternedString;
|
||||
|
||||
use std::ascii::AsciiExt;
|
||||
use std::env;
|
||||
|
@ -22,8 +22,9 @@
|
||||
use ast;
|
||||
use syntax_pos::Span;
|
||||
use codemap::{Spanned, respan};
|
||||
use parse::token::{self, keywords};
|
||||
use parse::token;
|
||||
use ptr::P;
|
||||
use symbol::keywords;
|
||||
use tokenstream::*;
|
||||
use util::small_vector::SmallVector;
|
||||
use util::move_map::MoveMap;
|
||||
|
@ -83,7 +83,6 @@ pub mod diagnostics {
|
||||
pub mod diagnostic_list;
|
||||
|
||||
pub mod util {
|
||||
pub mod interner;
|
||||
pub mod lev_distance;
|
||||
pub mod node_count;
|
||||
pub mod parser;
|
||||
@ -118,6 +117,7 @@ pub mod syntax {
|
||||
pub mod show_span;
|
||||
pub mod std_inject;
|
||||
pub mod str;
|
||||
pub mod symbol;
|
||||
pub mod test;
|
||||
pub mod tokenstream;
|
||||
pub mod visit;
|
||||
|
@ -8,13 +8,14 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use ast;
|
||||
use ast::{self, Ident};
|
||||
use syntax_pos::{self, BytePos, CharPos, Pos, Span};
|
||||
use codemap::CodeMap;
|
||||
use errors::{FatalError, Handler, DiagnosticBuilder};
|
||||
use ext::tt::transcribe::tt_next_token;
|
||||
use parse::token::{self, keywords, str_to_ident};
|
||||
use parse::token;
|
||||
use str::char_at;
|
||||
use symbol::{Symbol, keywords};
|
||||
use rustc_unicode::property::Pattern_White_Space;
|
||||
|
||||
use std::borrow::Cow;
|
||||
@ -350,13 +351,13 @@ pub fn with_str_from<T, F>(&self, start: BytePos, f: F) -> T
|
||||
/// single-byte delimiter).
|
||||
pub fn name_from(&self, start: BytePos) -> ast::Name {
|
||||
debug!("taking an ident from {:?} to {:?}", start, self.pos);
|
||||
self.with_str_from(start, token::intern)
|
||||
self.with_str_from(start, Symbol::intern)
|
||||
}
|
||||
|
||||
/// As name_from, with an explicit endpoint.
|
||||
pub fn name_from_to(&self, start: BytePos, end: BytePos) -> ast::Name {
|
||||
debug!("taking an ident from {:?} to {:?}", start, end);
|
||||
self.with_str_from_to(start, end, token::intern)
|
||||
self.with_str_from_to(start, end, Symbol::intern)
|
||||
}
|
||||
|
||||
/// Calls `f` with a string slice of the source text spanning from `start`
|
||||
@ -492,7 +493,7 @@ fn scan_optional_raw_name(&mut self) -> Option<ast::Name> {
|
||||
if string == "_" {
|
||||
None
|
||||
} else {
|
||||
Some(token::intern(string))
|
||||
Some(Symbol::intern(string))
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -540,7 +541,7 @@ fn scan_comment(&mut self) -> Option<TokenAndSpan> {
|
||||
self.with_str_from(start_bpos, |string| {
|
||||
// comments with only more "/"s are not doc comments
|
||||
let tok = if is_doc_comment(string) {
|
||||
token::DocComment(token::intern(string))
|
||||
token::DocComment(Symbol::intern(string))
|
||||
} else {
|
||||
token::Comment
|
||||
};
|
||||
@ -669,7 +670,7 @@ fn scan_block_comment(&mut self) -> Option<TokenAndSpan> {
|
||||
} else {
|
||||
string.into()
|
||||
};
|
||||
token::DocComment(token::intern(&string[..]))
|
||||
token::DocComment(Symbol::intern(&string[..]))
|
||||
} else {
|
||||
token::Comment
|
||||
};
|
||||
@ -758,7 +759,7 @@ fn scan_number(&mut self, c: char) -> token::Lit {
|
||||
self.err_span_(start_bpos,
|
||||
self.pos,
|
||||
"no valid digits found for number");
|
||||
return token::Integer(token::intern("0"));
|
||||
return token::Integer(Symbol::intern("0"));
|
||||
}
|
||||
|
||||
// might be a float, but don't be greedy if this is actually an
|
||||
@ -1097,7 +1098,7 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
|
||||
token::Underscore
|
||||
} else {
|
||||
// FIXME: perform NFKC normalization here. (Issue #2253)
|
||||
token::Ident(str_to_ident(string))
|
||||
token::Ident(Ident::from_str(string))
|
||||
}
|
||||
}));
|
||||
}
|
||||
@ -1277,13 +1278,13 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
|
||||
// expansion purposes. See #12512 for the gory details of why
|
||||
// this is necessary.
|
||||
let ident = self.with_str_from(start, |lifetime_name| {
|
||||
str_to_ident(&format!("'{}", lifetime_name))
|
||||
Ident::from_str(&format!("'{}", lifetime_name))
|
||||
});
|
||||
|
||||
// Conjure up a "keyword checking ident" to make sure that
|
||||
// the lifetime name is not a keyword.
|
||||
let keyword_checking_ident = self.with_str_from(start, |lifetime_name| {
|
||||
str_to_ident(lifetime_name)
|
||||
Ident::from_str(lifetime_name)
|
||||
});
|
||||
let keyword_checking_token = &token::Ident(keyword_checking_ident);
|
||||
let last_bpos = self.pos;
|
||||
@ -1310,7 +1311,7 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
|
||||
let id = if valid {
|
||||
self.name_from(start)
|
||||
} else {
|
||||
token::intern("0")
|
||||
Symbol::intern("0")
|
||||
};
|
||||
self.bump(); // advance ch past token
|
||||
let suffix = self.scan_optional_raw_name();
|
||||
@ -1352,7 +1353,7 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
|
||||
let id = if valid {
|
||||
self.name_from(start_bpos + BytePos(1))
|
||||
} else {
|
||||
token::intern("??")
|
||||
Symbol::intern("??")
|
||||
};
|
||||
self.bump();
|
||||
let suffix = self.scan_optional_raw_name();
|
||||
@ -1424,7 +1425,7 @@ fn next_token_inner(&mut self) -> Result<token::Token, ()> {
|
||||
let id = if valid {
|
||||
self.name_from_to(content_start_bpos, content_end_bpos)
|
||||
} else {
|
||||
token::intern("??")
|
||||
Symbol::intern("??")
|
||||
};
|
||||
let suffix = self.scan_optional_raw_name();
|
||||
return Ok(token::Literal(token::StrRaw(id, hash_count), suffix));
|
||||
@ -1551,7 +1552,7 @@ fn scan_byte(&mut self) -> token::Lit {
|
||||
let id = if valid {
|
||||
self.name_from(start)
|
||||
} else {
|
||||
token::intern("?")
|
||||
Symbol::intern("?")
|
||||
};
|
||||
self.bump(); // advance ch past token
|
||||
return token::Byte(id);
|
||||
@ -1584,7 +1585,7 @@ fn scan_byte_string(&mut self) -> token::Lit {
|
||||
let id = if valid {
|
||||
self.name_from(start)
|
||||
} else {
|
||||
token::intern("??")
|
||||
Symbol::intern("??")
|
||||
};
|
||||
self.bump();
|
||||
return token::ByteStr(id);
|
||||
@ -1700,11 +1701,11 @@ fn ident_continue(c: Option<char>) -> bool {
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
use ast::Ident;
|
||||
use syntax_pos::{BytePos, Span, NO_EXPANSION};
|
||||
use codemap::CodeMap;
|
||||
use errors;
|
||||
use parse::token;
|
||||
use parse::token::str_to_ident;
|
||||
use std::io;
|
||||
use std::rc::Rc;
|
||||
|
||||
@ -1732,7 +1733,7 @@ fn t1() {
|
||||
&sh,
|
||||
"/* my source file */ fn main() { println!(\"zebra\"); }\n"
|
||||
.to_string());
|
||||
let id = str_to_ident("fn");
|
||||
let id = Ident::from_str("fn");
|
||||
assert_eq!(string_reader.next_token().tok, token::Comment);
|
||||
assert_eq!(string_reader.next_token().tok, token::Whitespace);
|
||||
let tok1 = string_reader.next_token();
|
||||
@ -1813,7 +1814,7 @@ fn character_a() {
|
||||
let cm = Rc::new(CodeMap::new());
|
||||
let sh = mk_sh(cm.clone());
|
||||
assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok,
|
||||
token::Literal(token::Char(token::intern("a")), None));
|
||||
token::Literal(token::Char(Symbol::intern("a")), None));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -1821,7 +1822,7 @@ fn character_space() {
|
||||
let cm = Rc::new(CodeMap::new());
|
||||
let sh = mk_sh(cm.clone());
|
||||
assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok,
|
||||
token::Literal(token::Char(token::intern(" ")), None));
|
||||
token::Literal(token::Char(Symbol::intern(" ")), None));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -1829,7 +1830,7 @@ fn character_escaped() {
|
||||
let cm = Rc::new(CodeMap::new());
|
||||
let sh = mk_sh(cm.clone());
|
||||
assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok,
|
||||
token::Literal(token::Char(token::intern("\\n")), None));
|
||||
token::Literal(token::Char(Symbol::intern("\\n")), None));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -1847,7 +1848,7 @@ fn raw_string() {
|
||||
assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string())
|
||||
.next_token()
|
||||
.tok,
|
||||
token::Literal(token::StrRaw(token::intern("\"#a\\b\x00c\""), 3), None));
|
||||
token::Literal(token::StrRaw(Symol::intern("\"#a\\b\x00c\""), 3), None));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -1857,11 +1858,11 @@ fn literal_suffixes() {
|
||||
macro_rules! test {
|
||||
($input: expr, $tok_type: ident, $tok_contents: expr) => {{
|
||||
assert_eq!(setup(&cm, &sh, format!("{}suffix", $input)).next_token().tok,
|
||||
token::Literal(token::$tok_type(token::intern($tok_contents)),
|
||||
Some(token::intern("suffix"))));
|
||||
token::Literal(token::$tok_type(Symbol::intern($tok_contents)),
|
||||
Some(Symbol::intern("suffix"))));
|
||||
// with a whitespace separator:
|
||||
assert_eq!(setup(&cm, &sh, format!("{} suffix", $input)).next_token().tok,
|
||||
token::Literal(token::$tok_type(token::intern($tok_contents)),
|
||||
token::Literal(token::$tok_type(Symbol::intern($tok_contents)),
|
||||
None));
|
||||
}}
|
||||
}
|
||||
@ -1877,14 +1878,14 @@ macro_rules! test {
|
||||
test!("1.0e10", Float, "1.0e10");
|
||||
|
||||
assert_eq!(setup(&cm, &sh, "2us".to_string()).next_token().tok,
|
||||
token::Literal(token::Integer(token::intern("2")),
|
||||
Some(token::intern("us"))));
|
||||
token::Literal(token::Integer(Symbol::intern("2")),
|
||||
Some(Symbol::intern("us"))));
|
||||
assert_eq!(setup(&cm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok,
|
||||
token::Literal(token::StrRaw(token::intern("raw"), 3),
|
||||
Some(token::intern("suffix"))));
|
||||
token::Literal(token::StrRaw(Symbol::intern("raw"), 3),
|
||||
Some(Symbol::intern("suffix"))));
|
||||
assert_eq!(setup(&cm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok,
|
||||
token::Literal(token::ByteStrRaw(token::intern("raw"), 3),
|
||||
Some(token::intern("suffix"))));
|
||||
token::Literal(token::ByteStrRaw(Symbol::intern("raw"), 3),
|
||||
Some(Symbol::intern("suffix"))));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -1904,7 +1905,7 @@ fn nested_block_comments() {
|
||||
_ => panic!("expected a comment!"),
|
||||
}
|
||||
assert_eq!(lexer.next_token().tok,
|
||||
token::Literal(token::Char(token::intern("a")), None));
|
||||
token::Literal(token::Char(Symbol::intern("a")), None));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -1917,6 +1918,6 @@ fn crlf_comments() {
|
||||
assert_eq!(comment.sp, ::syntax_pos::mk_sp(BytePos(0), BytePos(7)));
|
||||
assert_eq!(lexer.next_token().tok, token::Whitespace);
|
||||
assert_eq!(lexer.next_token().tok,
|
||||
token::DocComment(token::intern("/// test")));
|
||||
token::DocComment(Symbol::intern("/// test")));
|
||||
}
|
||||
}
|
||||
|
@ -16,9 +16,9 @@
|
||||
use errors::{Handler, ColorConfig, DiagnosticBuilder};
|
||||
use feature_gate::UnstableFeatures;
|
||||
use parse::parser::Parser;
|
||||
use parse::token::InternedString;
|
||||
use ptr::P;
|
||||
use str::char_at;
|
||||
use symbol::{self, InternedString};
|
||||
use tokenstream;
|
||||
|
||||
use std::cell::RefCell;
|
||||
@ -372,7 +372,7 @@ fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
|
||||
s[1..].chars().all(|c| '0' <= c && c <= '9')
|
||||
}
|
||||
|
||||
fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>,
|
||||
fn filtered_float_lit(data: InternedString, suffix: Option<&str>,
|
||||
sd: &Handler, sp: Span) -> ast::LitKind {
|
||||
debug!("filtered_float_lit: {}, {:?}", data, suffix);
|
||||
match suffix.as_ref().map(|s| &**s) {
|
||||
@ -400,7 +400,7 @@ pub fn float_lit(s: &str, suffix: Option<InternedString>,
|
||||
debug!("float_lit: {:?}, {:?}", s, suffix);
|
||||
// FIXME #2252: bounds checking float literals is deferred until trans
|
||||
let s = s.chars().filter(|&c| c != '_').collect::<String>();
|
||||
let data = token::intern_and_get_ident(&s);
|
||||
let data = symbol::intern_and_get_ident(&s);
|
||||
filtered_float_lit(data, suffix.as_ref().map(|s| &**s), sd, sp)
|
||||
}
|
||||
|
||||
@ -530,7 +530,7 @@ pub fn integer_lit(s: &str,
|
||||
2 => sd.span_err(sp, "binary float literal is not supported"),
|
||||
_ => ()
|
||||
}
|
||||
let ident = token::intern_and_get_ident(&s);
|
||||
let ident = symbol::intern_and_get_ident(&s);
|
||||
return filtered_float_lit(ident, Some(&suf), sd, sp)
|
||||
}
|
||||
}
|
||||
|
@ -48,13 +48,14 @@
|
||||
use parse::common::SeqSep;
|
||||
use parse::lexer::{Reader, TokenAndSpan};
|
||||
use parse::obsolete::ObsoleteSyntax;
|
||||
use parse::token::{self, intern, keywords, MatchNt, SubstNt, InternedString};
|
||||
use parse::token::{self, MatchNt, SubstNt};
|
||||
use parse::{new_sub_parser_from_file, ParseSess};
|
||||
use util::parser::{AssocOp, Fixity};
|
||||
use print::pprust;
|
||||
use ptr::P;
|
||||
use parse::PResult;
|
||||
use tokenstream::{self, Delimited, SequenceRepetition, TokenTree};
|
||||
use symbol::{self, Symbol, keywords, InternedString};
|
||||
use util::ThinVec;
|
||||
|
||||
use std::collections::HashSet;
|
||||
@ -1537,13 +1538,13 @@ pub fn parse_lit_token(&mut self) -> PResult<'a, LitKind> {
|
||||
|
||||
token::Str_(s) => {
|
||||
(true,
|
||||
LitKind::Str(token::intern_and_get_ident(&parse::str_lit(&s.as_str())),
|
||||
LitKind::Str(symbol::intern_and_get_ident(&parse::str_lit(&s.as_str())),
|
||||
ast::StrStyle::Cooked))
|
||||
}
|
||||
token::StrRaw(s, n) => {
|
||||
(true,
|
||||
LitKind::Str(
|
||||
token::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())),
|
||||
symbol::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())),
|
||||
ast::StrStyle::Raw(n)))
|
||||
}
|
||||
token::ByteStr(i) =>
|
||||
@ -2627,7 +2628,7 @@ fn parse_unquoted(&mut self) -> PResult<'a, TokenTree> {
|
||||
})));
|
||||
} else if self.token.is_keyword(keywords::Crate) {
|
||||
let ident = match self.token {
|
||||
token::Ident(id) => ast::Ident { name: token::intern("$crate"), ..id },
|
||||
token::Ident(id) => ast::Ident { name: Symbol::intern("$crate"), ..id },
|
||||
_ => unreachable!(),
|
||||
};
|
||||
self.bump();
|
||||
@ -4835,7 +4836,7 @@ fn complain_if_pub_macro(&mut self, visa: &Visibility, span: Span) {
|
||||
Visibility::Inherited => (),
|
||||
_ => {
|
||||
let is_macro_rules: bool = match self.token {
|
||||
token::Ident(sid) => sid.name == intern("macro_rules"),
|
||||
token::Ident(sid) => sid.name == Symbol::intern("macro_rules"),
|
||||
_ => false,
|
||||
};
|
||||
if is_macro_rules {
|
||||
|
@ -16,13 +16,10 @@
|
||||
|
||||
use ast::{self};
|
||||
use ptr::P;
|
||||
use util::interner::Interner;
|
||||
use symbol::keywords;
|
||||
use tokenstream;
|
||||
|
||||
use serialize::{Decodable, Decoder, Encodable, Encoder};
|
||||
use std::cell::RefCell;
|
||||
use std::fmt;
|
||||
use std::ops::Deref;
|
||||
use std::rc::Rc;
|
||||
|
||||
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
|
||||
@ -335,266 +332,3 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// In this macro, there is the requirement that the name (the number) must be monotonically
|
||||
// increasing by one in the special identifiers, starting at 0; the same holds for the keywords,
|
||||
// except starting from the next number instead of zero.
|
||||
macro_rules! declare_keywords {(
|
||||
$( ($index: expr, $konst: ident, $string: expr) )*
|
||||
) => {
|
||||
pub mod keywords {
|
||||
use ast;
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
pub struct Keyword {
|
||||
ident: ast::Ident,
|
||||
}
|
||||
impl Keyword {
|
||||
#[inline] pub fn ident(self) -> ast::Ident { self.ident }
|
||||
#[inline] pub fn name(self) -> ast::Name { self.ident.name }
|
||||
}
|
||||
$(
|
||||
#[allow(non_upper_case_globals)]
|
||||
pub const $konst: Keyword = Keyword {
|
||||
ident: ast::Ident::with_empty_ctxt(ast::Name($index))
|
||||
};
|
||||
)*
|
||||
}
|
||||
|
||||
fn mk_fresh_ident_interner() -> IdentInterner {
|
||||
Interner::prefill(&[$($string,)*])
|
||||
}
|
||||
}}
|
||||
|
||||
// NB: leaving holes in the ident table is bad! a different ident will get
|
||||
// interned with the id from the hole, but it will be between the min and max
|
||||
// of the reserved words, and thus tagged as "reserved".
|
||||
// After modifying this list adjust `is_strict_keyword`/`is_reserved_keyword`,
|
||||
// this should be rarely necessary though if the keywords are kept in alphabetic order.
|
||||
declare_keywords! {
|
||||
// Invalid identifier
|
||||
(0, Invalid, "")
|
||||
|
||||
// Strict keywords used in the language.
|
||||
(1, As, "as")
|
||||
(2, Box, "box")
|
||||
(3, Break, "break")
|
||||
(4, Const, "const")
|
||||
(5, Continue, "continue")
|
||||
(6, Crate, "crate")
|
||||
(7, Else, "else")
|
||||
(8, Enum, "enum")
|
||||
(9, Extern, "extern")
|
||||
(10, False, "false")
|
||||
(11, Fn, "fn")
|
||||
(12, For, "for")
|
||||
(13, If, "if")
|
||||
(14, Impl, "impl")
|
||||
(15, In, "in")
|
||||
(16, Let, "let")
|
||||
(17, Loop, "loop")
|
||||
(18, Match, "match")
|
||||
(19, Mod, "mod")
|
||||
(20, Move, "move")
|
||||
(21, Mut, "mut")
|
||||
(22, Pub, "pub")
|
||||
(23, Ref, "ref")
|
||||
(24, Return, "return")
|
||||
(25, SelfValue, "self")
|
||||
(26, SelfType, "Self")
|
||||
(27, Static, "static")
|
||||
(28, Struct, "struct")
|
||||
(29, Super, "super")
|
||||
(30, Trait, "trait")
|
||||
(31, True, "true")
|
||||
(32, Type, "type")
|
||||
(33, Unsafe, "unsafe")
|
||||
(34, Use, "use")
|
||||
(35, Where, "where")
|
||||
(36, While, "while")
|
||||
|
||||
// Keywords reserved for future use.
|
||||
(37, Abstract, "abstract")
|
||||
(38, Alignof, "alignof")
|
||||
(39, Become, "become")
|
||||
(40, Do, "do")
|
||||
(41, Final, "final")
|
||||
(42, Macro, "macro")
|
||||
(43, Offsetof, "offsetof")
|
||||
(44, Override, "override")
|
||||
(45, Priv, "priv")
|
||||
(46, Proc, "proc")
|
||||
(47, Pure, "pure")
|
||||
(48, Sizeof, "sizeof")
|
||||
(49, Typeof, "typeof")
|
||||
(50, Unsized, "unsized")
|
||||
(51, Virtual, "virtual")
|
||||
(52, Yield, "yield")
|
||||
|
||||
// Weak keywords, have special meaning only in specific contexts.
|
||||
(53, Default, "default")
|
||||
(54, StaticLifetime, "'static")
|
||||
(55, Union, "union")
|
||||
}
|
||||
|
||||
// looks like we can get rid of this completely...
|
||||
pub type IdentInterner = Interner;
|
||||
|
||||
// if an interner exists in TLS, return it. Otherwise, prepare a
|
||||
// fresh one.
|
||||
// FIXME(eddyb) #8726 This should probably use a thread-local reference.
|
||||
pub fn with_ident_interner<T, F: FnOnce(&mut IdentInterner) -> T>(f: F) -> T {
|
||||
thread_local!(static KEY: RefCell<IdentInterner> = {
|
||||
RefCell::new(mk_fresh_ident_interner())
|
||||
});
|
||||
KEY.with(|interner| f(&mut *interner.borrow_mut()))
|
||||
}
|
||||
|
||||
/// Reset the ident interner to its initial state.
|
||||
pub fn reset_ident_interner() {
|
||||
with_ident_interner(|interner| *interner = mk_fresh_ident_interner());
|
||||
}
|
||||
|
||||
/// Represents a string stored in the thread-local interner. Because the
|
||||
/// interner lives for the life of the thread, this can be safely treated as an
|
||||
/// immortal string, as long as it never crosses between threads.
|
||||
///
|
||||
/// FIXME(pcwalton): You must be careful about what you do in the destructors
|
||||
/// of objects stored in TLS, because they may run after the interner is
|
||||
/// destroyed. In particular, they must not access string contents. This can
|
||||
/// be fixed in the future by just leaking all strings until thread death
|
||||
/// somehow.
|
||||
#[derive(Clone, PartialEq, Hash, PartialOrd, Eq, Ord)]
|
||||
pub struct InternedString {
|
||||
string: Rc<str>,
|
||||
}
|
||||
|
||||
impl InternedString {
|
||||
#[inline]
|
||||
pub fn new(string: &'static str) -> InternedString {
|
||||
InternedString {
|
||||
string: Rc::__from_str(string),
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn new_from_name(name: ast::Name) -> InternedString {
|
||||
with_ident_interner(|interner| InternedString { string: interner.get(name) })
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for InternedString {
|
||||
type Target = str;
|
||||
|
||||
fn deref(&self) -> &str { &self.string }
|
||||
}
|
||||
|
||||
impl fmt::Debug for InternedString {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Debug::fmt(&self.string, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for InternedString {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Display::fmt(&self.string, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PartialEq<&'a str> for InternedString {
|
||||
#[inline(always)]
|
||||
fn eq(&self, other: & &'a str) -> bool {
|
||||
PartialEq::eq(&self.string[..], *other)
|
||||
}
|
||||
#[inline(always)]
|
||||
fn ne(&self, other: & &'a str) -> bool {
|
||||
PartialEq::ne(&self.string[..], *other)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PartialEq<InternedString> for &'a str {
|
||||
#[inline(always)]
|
||||
fn eq(&self, other: &InternedString) -> bool {
|
||||
PartialEq::eq(*self, &other.string[..])
|
||||
}
|
||||
#[inline(always)]
|
||||
fn ne(&self, other: &InternedString) -> bool {
|
||||
PartialEq::ne(*self, &other.string[..])
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<str> for InternedString {
|
||||
#[inline(always)]
|
||||
fn eq(&self, other: &str) -> bool {
|
||||
PartialEq::eq(&self.string[..], other)
|
||||
}
|
||||
#[inline(always)]
|
||||
fn ne(&self, other: &str) -> bool {
|
||||
PartialEq::ne(&self.string[..], other)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<InternedString> for str {
|
||||
#[inline(always)]
|
||||
fn eq(&self, other: &InternedString) -> bool {
|
||||
PartialEq::eq(self, &other.string[..])
|
||||
}
|
||||
#[inline(always)]
|
||||
fn ne(&self, other: &InternedString) -> bool {
|
||||
PartialEq::ne(self, &other.string[..])
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for InternedString {
|
||||
fn decode<D: Decoder>(d: &mut D) -> Result<InternedString, D::Error> {
|
||||
Ok(intern(&d.read_str()?).as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for InternedString {
|
||||
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
|
||||
s.emit_str(&self.string)
|
||||
}
|
||||
}
|
||||
|
||||
/// Interns and returns the string contents of an identifier, using the
|
||||
/// thread-local interner.
|
||||
#[inline]
|
||||
pub fn intern_and_get_ident(s: &str) -> InternedString {
|
||||
intern(s).as_str()
|
||||
}
|
||||
|
||||
/// Maps a string to its interned representation.
|
||||
#[inline]
|
||||
pub fn intern(s: &str) -> ast::Name {
|
||||
with_ident_interner(|interner| interner.intern(s))
|
||||
}
|
||||
|
||||
/// gensym's a new usize, using the current interner.
|
||||
#[inline]
|
||||
pub fn gensym(s: &str) -> ast::Name {
|
||||
with_ident_interner(|interner| interner.gensym(s))
|
||||
}
|
||||
|
||||
/// Maps a string to an identifier with an empty syntax context.
|
||||
#[inline]
|
||||
pub fn str_to_ident(s: &str) -> ast::Ident {
|
||||
ast::Ident::with_empty_ctxt(intern(s))
|
||||
}
|
||||
|
||||
/// Maps a string to a gensym'ed identifier.
|
||||
#[inline]
|
||||
pub fn gensym_ident(s: &str) -> ast::Ident {
|
||||
ast::Ident::with_empty_ctxt(gensym(s))
|
||||
}
|
||||
|
||||
// create a fresh name that maps to the same string as the old one.
|
||||
// note that this guarantees that str_ptr_eq(ident_to_string(src),interner_get(fresh_name(src)));
|
||||
// that is, that the new name and the old one are connected to ptr_eq strings.
|
||||
pub fn fresh_name(src: ast::Ident) -> ast::Name {
|
||||
with_ident_interner(|interner| interner.gensym_copy(src.name))
|
||||
// following: debug version. Could work in final except that it's incompatible with
|
||||
// good error messages and uses of struct names in ambiguous could-be-binding
|
||||
// locations. Also definitely destroys the guarantee given above about ptr_eq.
|
||||
/*let num = rand::thread_rng().gen_uint_range(0,0xffff);
|
||||
gensym(format!("{}_{}",ident_to_string(src),num))*/
|
||||
}
|
||||
|
@ -19,7 +19,7 @@
|
||||
use codemap::{self, CodeMap};
|
||||
use syntax_pos::{self, BytePos};
|
||||
use errors;
|
||||
use parse::token::{self, keywords, BinOpToken, Token};
|
||||
use parse::token::{self, BinOpToken, Token};
|
||||
use parse::lexer::comments;
|
||||
use parse;
|
||||
use print::pp::{self, break_offset, word, space, zerobreak, hardbreak};
|
||||
@ -27,6 +27,7 @@
|
||||
use print::pp::Breaks::{Consistent, Inconsistent};
|
||||
use ptr::P;
|
||||
use std_inject;
|
||||
use symbol::{Symbol, keywords};
|
||||
use tokenstream::{self, TokenTree};
|
||||
|
||||
use std::ascii;
|
||||
@ -119,13 +120,13 @@ pub fn print_crate<'a>(cm: &'a CodeMap,
|
||||
// of the feature gate, so we fake them up here.
|
||||
|
||||
// #![feature(prelude_import)]
|
||||
let prelude_import_meta = attr::mk_list_word_item(token::intern("prelude_import"));
|
||||
let list = attr::mk_list_item(token::intern("feature"), vec![prelude_import_meta]);
|
||||
let prelude_import_meta = attr::mk_list_word_item(Symbol::intern("prelude_import"));
|
||||
let list = attr::mk_list_item(Symbol::intern("feature"), vec![prelude_import_meta]);
|
||||
let fake_attr = attr::mk_attr_inner(attr::mk_attr_id(), list);
|
||||
try!(s.print_attribute(&fake_attr));
|
||||
|
||||
// #![no_std]
|
||||
let no_std_meta = attr::mk_word_item(token::intern("no_std"));
|
||||
let no_std_meta = attr::mk_word_item(Symbol::intern("no_std"));
|
||||
let fake_attr = attr::mk_attr_inner(attr::mk_attr_id(), no_std_meta);
|
||||
try!(s.print_attribute(&fake_attr));
|
||||
}
|
||||
|
@ -10,10 +10,10 @@
|
||||
|
||||
use ast;
|
||||
use attr;
|
||||
use symbol::{Symbol, keywords};
|
||||
use syntax_pos::{DUMMY_SP, Span};
|
||||
use codemap::{self, ExpnInfo, NameAndSpan, MacroAttribute};
|
||||
use parse::token::{intern, keywords};
|
||||
use parse::{token, ParseSess};
|
||||
use parse::ParseSess;
|
||||
use ptr::P;
|
||||
|
||||
/// Craft a span that will be ignored by the stability lint's
|
||||
@ -23,7 +23,7 @@ fn ignored_span(sess: &ParseSess, sp: Span) -> Span {
|
||||
let info = ExpnInfo {
|
||||
call_site: DUMMY_SP,
|
||||
callee: NameAndSpan {
|
||||
format: MacroAttribute(intern("std_inject")),
|
||||
format: MacroAttribute(Symbol::intern("std_inject")),
|
||||
span: None,
|
||||
allow_internal_unstable: true,
|
||||
}
|
||||
@ -53,14 +53,14 @@ pub fn maybe_inject_crates_ref(sess: &ParseSess,
|
||||
None => return krate,
|
||||
};
|
||||
|
||||
let crate_name = token::intern(&alt_std_name.unwrap_or(name.to_string()));
|
||||
let crate_name = Symbol::intern(&alt_std_name.unwrap_or(name.to_string()));
|
||||
|
||||
krate.module.items.insert(0, P(ast::Item {
|
||||
attrs: vec![attr::mk_attr_outer(attr::mk_attr_id(),
|
||||
attr::mk_word_item(token::intern("macro_use")))],
|
||||
attr::mk_word_item(Symbol::intern("macro_use")))],
|
||||
vis: ast::Visibility::Inherited,
|
||||
node: ast::ItemKind::ExternCrate(Some(crate_name)),
|
||||
ident: token::str_to_ident(name),
|
||||
ident: ast::Ident::from_str(name),
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
span: DUMMY_SP,
|
||||
}));
|
||||
@ -70,7 +70,7 @@ pub fn maybe_inject_crates_ref(sess: &ParseSess,
|
||||
attrs: vec![ast::Attribute {
|
||||
style: ast::AttrStyle::Outer,
|
||||
value: ast::MetaItem {
|
||||
name: token::intern("prelude_import"),
|
||||
name: Symbol::intern("prelude_import"),
|
||||
node: ast::MetaItemKind::Word,
|
||||
span: span,
|
||||
},
|
||||
@ -82,7 +82,7 @@ pub fn maybe_inject_crates_ref(sess: &ParseSess,
|
||||
node: ast::ItemKind::Use(P(codemap::dummy_spanned(ast::ViewPathGlob(ast::Path {
|
||||
global: false,
|
||||
segments: vec![name, "prelude", "v1"].into_iter().map(|name| ast::PathSegment {
|
||||
identifier: token::str_to_ident(name),
|
||||
identifier: ast::Ident::from_str(name),
|
||||
parameters: ast::PathParameters::none(),
|
||||
}).collect(),
|
||||
span: span,
|
||||
|
339
src/libsyntax/symbol.rs
Normal file
339
src/libsyntax/symbol.rs
Normal file
@ -0,0 +1,339 @@
|
||||
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! An "interner" is a data structure that associates values with usize tags and
|
||||
//! allows bidirectional lookup; i.e. given a value, one can easily find the
|
||||
//! type, and vice versa.
|
||||
|
||||
use serialize::{Decodable, Decoder, Encodable, Encoder};
|
||||
use std::cell::RefCell;
|
||||
use std::collections::HashMap;
|
||||
use std::fmt;
|
||||
use std::rc::Rc;
|
||||
|
||||
/// A symbol is an interned or gensymed string.
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct Symbol(u32);
|
||||
|
||||
impl Symbol {
|
||||
/// Maps a string to its interned representation.
|
||||
pub fn intern(string: &str) -> Self {
|
||||
with_interner(|interner| interner.intern(string))
|
||||
}
|
||||
|
||||
/// gensym's a new usize, using the current interner.
|
||||
pub fn gensym(string: &str) -> Self {
|
||||
with_interner(|interner| interner.gensym(string))
|
||||
}
|
||||
|
||||
pub fn as_str(self) -> InternedString {
|
||||
with_interner(|interner| InternedString { string: interner.get(self) })
|
||||
}
|
||||
|
||||
pub fn as_u32(self) -> u32 {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Symbol {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{}({})", self, self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Symbol {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Display::fmt(&self.as_str(), f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for Symbol {
|
||||
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
|
||||
s.emit_str(&self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for Symbol {
|
||||
fn decode<D: Decoder>(d: &mut D) -> Result<Symbol, D::Error> {
|
||||
Ok(Symbol::intern(&d.read_str()?))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PartialEq<&'a str> for Symbol {
|
||||
fn eq(&self, other: &&str) -> bool {
|
||||
*self.as_str() == **other
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Interner {
|
||||
names: HashMap<Rc<str>, Symbol>,
|
||||
strings: Vec<Rc<str>>,
|
||||
}
|
||||
|
||||
impl Interner {
|
||||
pub fn new() -> Self {
|
||||
Interner::default()
|
||||
}
|
||||
|
||||
fn prefill(init: &[&str]) -> Self {
|
||||
let mut this = Interner::new();
|
||||
for &string in init {
|
||||
this.intern(string);
|
||||
}
|
||||
this
|
||||
}
|
||||
|
||||
pub fn intern(&mut self, string: &str) -> Symbol {
|
||||
if let Some(&name) = self.names.get(string) {
|
||||
return name;
|
||||
}
|
||||
|
||||
let name = Symbol(self.strings.len() as u32);
|
||||
let string = Rc::__from_str(string);
|
||||
self.strings.push(string.clone());
|
||||
self.names.insert(string, name);
|
||||
name
|
||||
}
|
||||
|
||||
fn gensym(&mut self, string: &str) -> Symbol {
|
||||
let gensym = Symbol(self.strings.len() as u32);
|
||||
// leave out of `names` to avoid colliding
|
||||
self.strings.push(Rc::__from_str(string));
|
||||
gensym
|
||||
}
|
||||
|
||||
pub fn get(&self, name: Symbol) -> Rc<str> {
|
||||
self.strings[name.0 as usize].clone()
|
||||
}
|
||||
}
|
||||
|
||||
// In this macro, there is the requirement that the name (the number) must be monotonically
|
||||
// increasing by one in the special identifiers, starting at 0; the same holds for the keywords,
|
||||
// except starting from the next number instead of zero.
|
||||
macro_rules! declare_keywords {(
|
||||
$( ($index: expr, $konst: ident, $string: expr) )*
|
||||
) => {
|
||||
pub mod keywords {
|
||||
use ast;
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
pub struct Keyword {
|
||||
ident: ast::Ident,
|
||||
}
|
||||
impl Keyword {
|
||||
#[inline] pub fn ident(self) -> ast::Ident { self.ident }
|
||||
#[inline] pub fn name(self) -> ast::Name { self.ident.name }
|
||||
}
|
||||
$(
|
||||
#[allow(non_upper_case_globals)]
|
||||
pub const $konst: Keyword = Keyword {
|
||||
ident: ast::Ident::with_empty_ctxt(ast::Name($index))
|
||||
};
|
||||
)*
|
||||
}
|
||||
|
||||
impl Interner {
|
||||
fn fresh() -> Self {
|
||||
Interner::prefill(&[$($string,)*])
|
||||
}
|
||||
}
|
||||
}}
|
||||
|
||||
// NB: leaving holes in the ident table is bad! a different ident will get
|
||||
// interned with the id from the hole, but it will be between the min and max
|
||||
// of the reserved words, and thus tagged as "reserved".
|
||||
// After modifying this list adjust `is_strict_keyword`/`is_reserved_keyword`,
|
||||
// this should be rarely necessary though if the keywords are kept in alphabetic order.
|
||||
declare_keywords! {
|
||||
// Invalid identifier
|
||||
(0, Invalid, "")
|
||||
|
||||
// Strict keywords used in the language.
|
||||
(1, As, "as")
|
||||
(2, Box, "box")
|
||||
(3, Break, "break")
|
||||
(4, Const, "const")
|
||||
(5, Continue, "continue")
|
||||
(6, Crate, "crate")
|
||||
(7, Else, "else")
|
||||
(8, Enum, "enum")
|
||||
(9, Extern, "extern")
|
||||
(10, False, "false")
|
||||
(11, Fn, "fn")
|
||||
(12, For, "for")
|
||||
(13, If, "if")
|
||||
(14, Impl, "impl")
|
||||
(15, In, "in")
|
||||
(16, Let, "let")
|
||||
(17, Loop, "loop")
|
||||
(18, Match, "match")
|
||||
(19, Mod, "mod")
|
||||
(20, Move, "move")
|
||||
(21, Mut, "mut")
|
||||
(22, Pub, "pub")
|
||||
(23, Ref, "ref")
|
||||
(24, Return, "return")
|
||||
(25, SelfValue, "self")
|
||||
(26, SelfType, "Self")
|
||||
(27, Static, "static")
|
||||
(28, Struct, "struct")
|
||||
(29, Super, "super")
|
||||
(30, Trait, "trait")
|
||||
(31, True, "true")
|
||||
(32, Type, "type")
|
||||
(33, Unsafe, "unsafe")
|
||||
(34, Use, "use")
|
||||
(35, Where, "where")
|
||||
(36, While, "while")
|
||||
|
||||
// Keywords reserved for future use.
|
||||
(37, Abstract, "abstract")
|
||||
(38, Alignof, "alignof")
|
||||
(39, Become, "become")
|
||||
(40, Do, "do")
|
||||
(41, Final, "final")
|
||||
(42, Macro, "macro")
|
||||
(43, Offsetof, "offsetof")
|
||||
(44, Override, "override")
|
||||
(45, Priv, "priv")
|
||||
(46, Proc, "proc")
|
||||
(47, Pure, "pure")
|
||||
(48, Sizeof, "sizeof")
|
||||
(49, Typeof, "typeof")
|
||||
(50, Unsized, "unsized")
|
||||
(51, Virtual, "virtual")
|
||||
(52, Yield, "yield")
|
||||
|
||||
// Weak keywords, have special meaning only in specific contexts.
|
||||
(53, Default, "default")
|
||||
(54, StaticLifetime, "'static")
|
||||
(55, Union, "union")
|
||||
}
|
||||
|
||||
// If an interner exists in TLS, return it. Otherwise, prepare a fresh one.
|
||||
fn with_interner<T, F: FnOnce(&mut Interner) -> T>(f: F) -> T {
|
||||
thread_local!(static INTERNER: RefCell<Interner> = {
|
||||
RefCell::new(Interner::fresh())
|
||||
});
|
||||
INTERNER.with(|interner| f(&mut *interner.borrow_mut()))
|
||||
}
|
||||
|
||||
/// Reset the ident interner to its initial state.
|
||||
pub fn reset_interner() {
|
||||
with_interner(|interner| *interner = Interner::fresh());
|
||||
}
|
||||
|
||||
/// Represents a string stored in the thread-local interner. Because the
|
||||
/// interner lives for the life of the thread, this can be safely treated as an
|
||||
/// immortal string, as long as it never crosses between threads.
|
||||
///
|
||||
/// FIXME(pcwalton): You must be careful about what you do in the destructors
|
||||
/// of objects stored in TLS, because they may run after the interner is
|
||||
/// destroyed. In particular, they must not access string contents. This can
|
||||
/// be fixed in the future by just leaking all strings until thread death
|
||||
/// somehow.
|
||||
#[derive(Clone, PartialEq, Hash, PartialOrd, Eq, Ord)]
|
||||
pub struct InternedString {
|
||||
string: Rc<str>,
|
||||
}
|
||||
|
||||
impl InternedString {
|
||||
pub fn new(string: &'static str) -> InternedString {
|
||||
InternedString {
|
||||
string: Rc::__from_str(string),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ::std::ops::Deref for InternedString {
|
||||
type Target = str;
|
||||
fn deref(&self) -> &str { &self.string }
|
||||
}
|
||||
|
||||
impl fmt::Debug for InternedString {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Debug::fmt(&self.string, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for InternedString {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Display::fmt(&self.string, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PartialEq<&'a str> for InternedString {
|
||||
fn eq(&self, other: & &'a str) -> bool {
|
||||
PartialEq::eq(&self.string[..], *other)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> PartialEq<InternedString> for &'a str {
|
||||
fn eq(&self, other: &InternedString) -> bool {
|
||||
PartialEq::eq(*self, &other.string[..])
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<str> for InternedString {
|
||||
fn eq(&self, other: &str) -> bool {
|
||||
PartialEq::eq(&self.string[..], other)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<InternedString> for str {
|
||||
fn eq(&self, other: &InternedString) -> bool {
|
||||
PartialEq::eq(self, &other.string[..])
|
||||
}
|
||||
}
|
||||
|
||||
impl Decodable for InternedString {
|
||||
fn decode<D: Decoder>(d: &mut D) -> Result<InternedString, D::Error> {
|
||||
Ok(Symbol::intern(&d.read_str()?).as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl Encodable for InternedString {
|
||||
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
|
||||
s.emit_str(&self.string)
|
||||
}
|
||||
}
|
||||
|
||||
/// Interns and returns the string contents of an identifier, using the
|
||||
/// thread-local interner.
|
||||
#[inline]
|
||||
pub fn intern_and_get_ident(s: &str) -> InternedString {
|
||||
Symbol::intern(s).as_str()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use ast::Name;
|
||||
|
||||
#[test]
|
||||
fn interner_tests() {
|
||||
let mut i: Interner = Interner::new();
|
||||
// first one is zero:
|
||||
assert_eq!(i.intern("dog"), Name(0));
|
||||
// re-use gets the same entry:
|
||||
assert_eq!(i.intern ("dog"), Name(0));
|
||||
// different string gets a different #:
|
||||
assert_eq!(i.intern("cat"), Name(1));
|
||||
assert_eq!(i.intern("cat"), Name(1));
|
||||
// dog is still at zero
|
||||
assert_eq!(i.intern("dog"), Name(0));
|
||||
// gensym gets 3
|
||||
assert_eq!(i.gensym("zebra"), Name(2));
|
||||
// gensym of same string gets new number :
|
||||
assert_eq!(i.gensym("zebra"), Name(3));
|
||||
// gensym of *existing* string gets new number:
|
||||
assert_eq!(i.gensym("dog"), Name(4));
|
||||
}
|
||||
}
|
@ -34,11 +34,11 @@
|
||||
use fold::Folder;
|
||||
use util::move_map::MoveMap;
|
||||
use fold;
|
||||
use parse::token::{intern, keywords, InternedString};
|
||||
use parse::{token, ParseSess};
|
||||
use print::pprust;
|
||||
use ast;
|
||||
use ast::{self, Ident};
|
||||
use ptr::P;
|
||||
use symbol::{self, Symbol, keywords, InternedString};
|
||||
use util::small_vector::SmallVector;
|
||||
|
||||
enum ShouldPanic {
|
||||
@ -48,7 +48,7 @@ enum ShouldPanic {
|
||||
|
||||
struct Test {
|
||||
span: Span,
|
||||
path: Vec<ast::Ident> ,
|
||||
path: Vec<Ident> ,
|
||||
bench: bool,
|
||||
ignore: bool,
|
||||
should_panic: ShouldPanic
|
||||
@ -57,14 +57,14 @@ struct Test {
|
||||
struct TestCtxt<'a> {
|
||||
sess: &'a ParseSess,
|
||||
span_diagnostic: &'a errors::Handler,
|
||||
path: Vec<ast::Ident>,
|
||||
path: Vec<Ident>,
|
||||
ext_cx: ExtCtxt<'a>,
|
||||
testfns: Vec<Test>,
|
||||
reexport_test_harness_main: Option<InternedString>,
|
||||
is_test_crate: bool,
|
||||
|
||||
// top-level re-export submodule, filled out after folding is finished
|
||||
toplevel_reexport: Option<ast::Ident>,
|
||||
toplevel_reexport: Option<Ident>,
|
||||
}
|
||||
|
||||
// Traverse the crate, collecting all the test functions, eliding any
|
||||
@ -91,10 +91,10 @@ pub fn modify_for_testing(sess: &ParseSess,
|
||||
|
||||
struct TestHarnessGenerator<'a> {
|
||||
cx: TestCtxt<'a>,
|
||||
tests: Vec<ast::Ident>,
|
||||
tests: Vec<Ident>,
|
||||
|
||||
// submodule name, gensym'd identifier for re-exports
|
||||
tested_submods: Vec<(ast::Ident, ast::Ident)>,
|
||||
tested_submods: Vec<(Ident, Ident)>,
|
||||
}
|
||||
|
||||
impl<'a> fold::Folder for TestHarnessGenerator<'a> {
|
||||
@ -191,8 +191,8 @@ fn fold_item(&mut self, i: P<ast::Item>) -> SmallVector<P<ast::Item>> {
|
||||
EntryPointType::MainAttr |
|
||||
EntryPointType::Start =>
|
||||
folded.map(|ast::Item {id, ident, attrs, node, vis, span}| {
|
||||
let allow_str = token::intern("allow");
|
||||
let dead_code_str = token::intern("dead_code");
|
||||
let allow_str = Symbol::intern("allow");
|
||||
let dead_code_str = Symbol::intern("dead_code");
|
||||
let word_vec = vec![attr::mk_list_word_item(dead_code_str)];
|
||||
let allow_dead_code_item = attr::mk_list_item(allow_str, word_vec);
|
||||
let allow_dead_code = attr::mk_attr_outer(attr::mk_attr_id(),
|
||||
@ -222,15 +222,18 @@ fn fold_item(&mut self, i: P<ast::Item>) -> SmallVector<P<ast::Item>> {
|
||||
fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { mac }
|
||||
}
|
||||
|
||||
fn mk_reexport_mod(cx: &mut TestCtxt, parent: ast::NodeId, tests: Vec<ast::Ident>,
|
||||
tested_submods: Vec<(ast::Ident, ast::Ident)>) -> (P<ast::Item>, ast::Ident) {
|
||||
let super_ = token::str_to_ident("super");
|
||||
fn mk_reexport_mod(cx: &mut TestCtxt,
|
||||
parent: ast::NodeId,
|
||||
tests: Vec<Ident>,
|
||||
tested_submods: Vec<(Ident, Ident)>)
|
||||
-> (P<ast::Item>, Ident) {
|
||||
let super_ = Ident::from_str("super");
|
||||
|
||||
// Generate imports with `#[allow(private_in_public)]` to work around issue #36768.
|
||||
let allow_private_in_public = cx.ext_cx.attribute(DUMMY_SP, cx.ext_cx.meta_list(
|
||||
DUMMY_SP,
|
||||
token::intern("allow"),
|
||||
vec![cx.ext_cx.meta_list_item_word(DUMMY_SP, token::intern("private_in_public"))],
|
||||
Symbol::intern("allow"),
|
||||
vec![cx.ext_cx.meta_list_item_word(DUMMY_SP, Symbol::intern("private_in_public"))],
|
||||
));
|
||||
let items = tests.into_iter().map(|r| {
|
||||
cx.ext_cx.item_use_simple(DUMMY_SP, ast::Visibility::Public,
|
||||
@ -247,7 +250,7 @@ fn mk_reexport_mod(cx: &mut TestCtxt, parent: ast::NodeId, tests: Vec<ast::Ident
|
||||
items: items,
|
||||
};
|
||||
|
||||
let sym = token::gensym_ident("__test_reexports");
|
||||
let sym = Ident::with_empty_ctxt(Symbol::gensym("__test_reexports"));
|
||||
let parent = if parent == ast::DUMMY_NODE_ID { ast::CRATE_NODE_ID } else { parent };
|
||||
cx.ext_cx.current_expansion.mark = cx.ext_cx.resolver.get_module_scope(parent);
|
||||
let it = cx.ext_cx.monotonic_expander().fold_item(P(ast::Item {
|
||||
@ -286,7 +289,7 @@ fn generate_test_harness(sess: &ParseSess,
|
||||
cx.ext_cx.bt_push(ExpnInfo {
|
||||
call_site: DUMMY_SP,
|
||||
callee: NameAndSpan {
|
||||
format: MacroAttribute(intern("test")),
|
||||
format: MacroAttribute(Symbol::intern("test")),
|
||||
span: None,
|
||||
allow_internal_unstable: false,
|
||||
}
|
||||
@ -306,7 +309,7 @@ fn ignored_span(cx: &TestCtxt, sp: Span) -> Span {
|
||||
let info = ExpnInfo {
|
||||
call_site: DUMMY_SP,
|
||||
callee: NameAndSpan {
|
||||
format: MacroAttribute(intern("test")),
|
||||
format: MacroAttribute(Symbol::intern("test")),
|
||||
span: None,
|
||||
allow_internal_unstable: true,
|
||||
}
|
||||
@ -456,7 +459,7 @@ fn main() {
|
||||
*/
|
||||
|
||||
fn mk_std(cx: &TestCtxt) -> P<ast::Item> {
|
||||
let id_test = token::str_to_ident("test");
|
||||
let id_test = Ident::from_str("test");
|
||||
let (vi, vis, ident) = if cx.is_test_crate {
|
||||
(ast::ItemKind::Use(
|
||||
P(nospan(ast::ViewPathSimple(id_test,
|
||||
@ -487,16 +490,17 @@ fn mk_main(cx: &mut TestCtxt) -> P<ast::Item> {
|
||||
let ecx = &cx.ext_cx;
|
||||
|
||||
// test::test_main_static
|
||||
let test_main_path = ecx.path(sp, vec![token::str_to_ident("test"),
|
||||
token::str_to_ident("test_main_static")]);
|
||||
let test_main_path =
|
||||
ecx.path(sp, vec![Ident::from_str("test"), Ident::from_str("test_main_static")]);
|
||||
|
||||
// test::test_main_static(...)
|
||||
let test_main_path_expr = ecx.expr_path(test_main_path);
|
||||
let tests_ident_expr = ecx.expr_ident(sp, token::str_to_ident("TESTS"));
|
||||
let tests_ident_expr = ecx.expr_ident(sp, Ident::from_str("TESTS"));
|
||||
let call_test_main = ecx.expr_call(sp, test_main_path_expr,
|
||||
vec![tests_ident_expr]);
|
||||
let call_test_main = ecx.stmt_expr(call_test_main);
|
||||
// #![main]
|
||||
let main_meta = ecx.meta_word(sp, token::intern("main"));
|
||||
let main_meta = ecx.meta_word(sp, Symbol::intern("main"));
|
||||
let main_attr = ecx.attribute(sp, main_meta);
|
||||
// pub fn main() { ... }
|
||||
let main_ret_ty = ecx.ty(sp, ast::TyKind::Tup(vec![]));
|
||||
@ -506,7 +510,7 @@ fn mk_main(cx: &mut TestCtxt) -> P<ast::Item> {
|
||||
dummy_spanned(ast::Constness::NotConst),
|
||||
::abi::Abi::Rust, ast::Generics::default(), main_body);
|
||||
let main = P(ast::Item {
|
||||
ident: token::str_to_ident("main"),
|
||||
ident: Ident::from_str("main"),
|
||||
attrs: vec![main_attr],
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
node: main,
|
||||
@ -533,7 +537,7 @@ fn mk_test_module(cx: &mut TestCtxt) -> (P<ast::Item>, Option<P<ast::Item>>) {
|
||||
items: vec![import, mainfn, tests],
|
||||
};
|
||||
let item_ = ast::ItemKind::Mod(testmod);
|
||||
let mod_ident = token::gensym_ident("__test");
|
||||
let mod_ident = Ident::with_empty_ctxt(Symbol::gensym("__test"));
|
||||
|
||||
let mut expander = cx.ext_cx.monotonic_expander();
|
||||
let item = expander.fold_item(P(ast::Item {
|
||||
@ -546,11 +550,11 @@ fn mk_test_module(cx: &mut TestCtxt) -> (P<ast::Item>, Option<P<ast::Item>>) {
|
||||
})).pop().unwrap();
|
||||
let reexport = cx.reexport_test_harness_main.as_ref().map(|s| {
|
||||
// building `use <ident> = __test::main`
|
||||
let reexport_ident = token::str_to_ident(&s);
|
||||
let reexport_ident = Ident::from_str(&s);
|
||||
|
||||
let use_path =
|
||||
nospan(ast::ViewPathSimple(reexport_ident,
|
||||
path_node(vec![mod_ident, token::str_to_ident("main")])));
|
||||
path_node(vec![mod_ident, Ident::from_str("main")])));
|
||||
|
||||
expander.fold_item(P(ast::Item {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
@ -571,7 +575,7 @@ fn nospan<T>(t: T) -> codemap::Spanned<T> {
|
||||
codemap::Spanned { node: t, span: DUMMY_SP }
|
||||
}
|
||||
|
||||
fn path_node(ids: Vec<ast::Ident> ) -> ast::Path {
|
||||
fn path_node(ids: Vec<Ident>) -> ast::Path {
|
||||
ast::Path {
|
||||
span: DUMMY_SP,
|
||||
global: false,
|
||||
@ -582,7 +586,7 @@ fn path_node(ids: Vec<ast::Ident> ) -> ast::Path {
|
||||
}
|
||||
}
|
||||
|
||||
fn path_name_i(idents: &[ast::Ident]) -> String {
|
||||
fn path_name_i(idents: &[Ident]) -> String {
|
||||
// FIXME: Bad copies (#2543 -- same for everything else that says "bad")
|
||||
idents.iter().map(|i| i.to_string()).collect::<Vec<String>>().join("::")
|
||||
}
|
||||
@ -660,7 +664,7 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> P<ast::Expr> {
|
||||
|
||||
// path to the #[test] function: "foo::bar::baz"
|
||||
let path_string = path_name_i(&path[..]);
|
||||
let name_expr = ecx.expr_str(span, token::intern_and_get_ident(&path_string[..]));
|
||||
let name_expr = ecx.expr_str(span, symbol::intern_and_get_ident(&path_string[..]));
|
||||
|
||||
// self::test::StaticTestName($name_expr)
|
||||
let name_expr = ecx.expr_call(span,
|
||||
|
@ -34,6 +34,7 @@
|
||||
use parse;
|
||||
use parse::token::{self, Token, Lit, Nonterminal};
|
||||
use print::pprust;
|
||||
use symbol::{self, Symbol};
|
||||
|
||||
use std::fmt;
|
||||
use std::iter::*;
|
||||
@ -173,10 +174,10 @@ pub fn get_tt(&self, index: usize) -> TokenTree {
|
||||
TokenTree::Delimited(sp, Rc::new(Delimited {
|
||||
delim: token::Bracket,
|
||||
open_span: sp,
|
||||
tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"))),
|
||||
tts: vec![TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"))),
|
||||
TokenTree::Token(sp, token::Eq),
|
||||
TokenTree::Token(sp, token::Literal(
|
||||
token::StrRaw(token::intern(&stripped), num_of_hashes), None))],
|
||||
token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))],
|
||||
close_span: sp,
|
||||
}))
|
||||
}
|
||||
@ -295,7 +296,7 @@ pub fn maybe_lit(&self) -> Option<token::Lit> {
|
||||
pub fn maybe_str(&self) -> Option<ast::Lit> {
|
||||
match *self {
|
||||
TokenTree::Token(sp, Token::Literal(Lit::Str_(s), _)) => {
|
||||
let l = LitKind::Str(token::intern_and_get_ident(&parse::str_lit(&s.as_str())),
|
||||
let l = LitKind::Str(symbol::intern_and_get_ident(&parse::str_lit(&s.as_str())),
|
||||
ast::StrStyle::Cooked);
|
||||
Some(Spanned {
|
||||
node: l,
|
||||
@ -303,7 +304,7 @@ pub fn maybe_str(&self) -> Option<ast::Lit> {
|
||||
})
|
||||
}
|
||||
TokenTree::Token(sp, Token::Literal(Lit::StrRaw(s, n), _)) => {
|
||||
let l = LitKind::Str(token::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())),
|
||||
let l = LitKind::Str(symbol::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())),
|
||||
ast::StrStyle::Raw(n));
|
||||
Some(Spanned {
|
||||
node: l,
|
||||
|
@ -1,111 +0,0 @@
|
||||
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! An "interner" is a data structure that associates values with usize tags and
|
||||
//! allows bidirectional lookup; i.e. given a value, one can easily find the
|
||||
//! type, and vice versa.
|
||||
|
||||
use ast::Name;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::rc::Rc;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Interner {
|
||||
names: HashMap<Rc<str>, Name>,
|
||||
strings: Vec<Rc<str>>,
|
||||
}
|
||||
|
||||
/// When traits can extend traits, we should extend index<Name,T> to get []
|
||||
impl Interner {
|
||||
pub fn new() -> Self {
|
||||
Interner::default()
|
||||
}
|
||||
|
||||
pub fn prefill(init: &[&str]) -> Self {
|
||||
let mut this = Interner::new();
|
||||
for &string in init {
|
||||
this.intern(string);
|
||||
}
|
||||
this
|
||||
}
|
||||
|
||||
pub fn intern(&mut self, string: &str) -> Name {
|
||||
if let Some(&name) = self.names.get(string) {
|
||||
return name;
|
||||
}
|
||||
|
||||
let name = Name(self.strings.len() as u32);
|
||||
let string = Rc::__from_str(string);
|
||||
self.strings.push(string.clone());
|
||||
self.names.insert(string, name);
|
||||
name
|
||||
}
|
||||
|
||||
pub fn gensym(&mut self, string: &str) -> Name {
|
||||
let gensym = Name(self.strings.len() as u32);
|
||||
// leave out of `names` to avoid colliding
|
||||
self.strings.push(Rc::__from_str(string));
|
||||
gensym
|
||||
}
|
||||
|
||||
/// Create a gensym with the same name as an existing entry.
|
||||
pub fn gensym_copy(&mut self, name: Name) -> Name {
|
||||
let gensym = Name(self.strings.len() as u32);
|
||||
// leave out of `names` to avoid colliding
|
||||
let string = self.strings[name.0 as usize].clone();
|
||||
self.strings.push(string);
|
||||
gensym
|
||||
}
|
||||
|
||||
pub fn get(&self, name: Name) -> Rc<str> {
|
||||
self.strings[name.0 as usize].clone()
|
||||
}
|
||||
|
||||
pub fn find(&self, string: &str) -> Option<Name> {
|
||||
self.names.get(string).cloned()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use ast::Name;
|
||||
|
||||
#[test]
|
||||
fn interner_tests() {
|
||||
let mut i: Interner = Interner::new();
|
||||
// first one is zero:
|
||||
assert_eq!(i.intern("dog"), Name(0));
|
||||
// re-use gets the same entry:
|
||||
assert_eq!(i.intern ("dog"), Name(0));
|
||||
// different string gets a different #:
|
||||
assert_eq!(i.intern("cat"), Name(1));
|
||||
assert_eq!(i.intern("cat"), Name(1));
|
||||
// dog is still at zero
|
||||
assert_eq!(i.intern("dog"), Name(0));
|
||||
// gensym gets 3
|
||||
assert_eq!(i.gensym("zebra"), Name(2));
|
||||
// gensym of same string gets new number :
|
||||
assert_eq!(i.gensym("zebra"), Name(3));
|
||||
// gensym of *existing* string gets new number:
|
||||
assert_eq!(i.gensym("dog"), Name(4));
|
||||
// gensym tests again with gensym_copy:
|
||||
assert_eq!(i.gensym_copy(Name(2)), Name(5));
|
||||
assert_eq!(&*i.get(Name(5)), "zebra");
|
||||
assert_eq!(i.gensym_copy(Name(2)), Name(6));
|
||||
assert_eq!(&*i.get(Name(6)), "zebra");
|
||||
assert_eq!(&*i.get(Name(0)), "dog");
|
||||
assert_eq!(&*i.get(Name(1)), "cat");
|
||||
assert_eq!(&*i.get(Name(2)), "zebra");
|
||||
assert_eq!(&*i.get(Name(3)), "zebra");
|
||||
assert_eq!(&*i.get(Name(4)), "dog");
|
||||
}
|
||||
}
|
@ -10,7 +10,7 @@
|
||||
|
||||
use ast::Name;
|
||||
use std::cmp;
|
||||
use parse::token::InternedString;
|
||||
use symbol::InternedString;
|
||||
|
||||
/// To find the Levenshtein distance between two strings
|
||||
pub fn lev_distance(a: &str, b: &str) -> usize {
|
||||
|
@ -7,7 +7,8 @@
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
use parse::token::{Token, BinOpToken, keywords};
|
||||
use parse::token::{Token, BinOpToken};
|
||||
use symbol::keywords;
|
||||
use ast::BinOpKind;
|
||||
|
||||
/// Associative operator with precedence.
|
||||
|
@ -17,9 +17,9 @@
|
||||
use syntax::ext::base;
|
||||
use syntax::ext::base::*;
|
||||
use syntax::feature_gate;
|
||||
use syntax::parse::token::intern;
|
||||
use syntax::parse::{self, token};
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::{self, Symbol, InternedString};
|
||||
use syntax::ast::AsmDialect;
|
||||
use syntax_pos::Span;
|
||||
use syntax::tokenstream;
|
||||
@ -73,7 +73,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt,
|
||||
})
|
||||
.unwrap_or(tts.len());
|
||||
let mut p = cx.new_parser_from_tts(&tts[first_colon..]);
|
||||
let mut asm = token::InternedString::new("");
|
||||
let mut asm = InternedString::new("");
|
||||
let mut asm_str_style = None;
|
||||
let mut outputs = Vec::new();
|
||||
let mut inputs = Vec::new();
|
||||
@ -139,7 +139,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt,
|
||||
let output = match ch.next() {
|
||||
Some('=') => None,
|
||||
Some('+') => {
|
||||
Some(token::intern_and_get_ident(&format!("={}", ch.as_str())))
|
||||
Some(symbol::intern_and_get_ident(&format!("={}", ch.as_str())))
|
||||
}
|
||||
_ => {
|
||||
cx.span_err(span, "output operand constraint lacks '=' or '+'");
|
||||
@ -242,7 +242,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt,
|
||||
let expn_id = cx.codemap().record_expansion(codemap::ExpnInfo {
|
||||
call_site: sp,
|
||||
callee: codemap::NameAndSpan {
|
||||
format: codemap::MacroBang(intern("asm")),
|
||||
format: codemap::MacroBang(Symbol::intern("asm")),
|
||||
span: None,
|
||||
allow_internal_unstable: false,
|
||||
},
|
||||
@ -251,7 +251,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt,
|
||||
MacEager::expr(P(ast::Expr {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
node: ast::ExprKind::InlineAsm(P(ast::InlineAsm {
|
||||
asm: token::intern_and_get_ident(&asm),
|
||||
asm: symbol::intern_and_get_ident(&asm),
|
||||
asm_str_style: asm_str_style.unwrap(),
|
||||
outputs: outputs,
|
||||
inputs: inputs,
|
||||
|
@ -11,7 +11,7 @@
|
||||
use syntax::ast;
|
||||
use syntax::ext::base;
|
||||
use syntax::ext::build::AstBuilder;
|
||||
use syntax::parse::token;
|
||||
use syntax::symbol::intern_and_get_ident;
|
||||
use syntax_pos;
|
||||
use syntax::tokenstream;
|
||||
|
||||
@ -57,5 +57,5 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
|
||||
}
|
||||
}
|
||||
}
|
||||
base::MacEager::expr(cx.expr_str(sp, token::intern_and_get_ident(&accumulator[..])))
|
||||
base::MacEager::expr(cx.expr_str(sp, intern_and_get_ident(&accumulator[..])))
|
||||
}
|
||||
|
@ -13,7 +13,6 @@
|
||||
use syntax::ext::base;
|
||||
use syntax::feature_gate;
|
||||
use syntax::parse::token;
|
||||
use syntax::parse::token::str_to_ident;
|
||||
use syntax::ptr::P;
|
||||
use syntax_pos::Span;
|
||||
use syntax::tokenstream::TokenTree;
|
||||
@ -51,7 +50,7 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt,
|
||||
}
|
||||
}
|
||||
}
|
||||
let res = str_to_ident(&res_str);
|
||||
let res = ast::Ident::from_str(&res_str);
|
||||
|
||||
struct Result {
|
||||
ident: ast::Ident,
|
||||
|
@ -15,8 +15,8 @@
|
||||
use syntax::attr;
|
||||
use syntax::ext::base::{Annotatable, ExtCtxt};
|
||||
use syntax::ext::build::AstBuilder;
|
||||
use syntax::parse::token::{self, keywords};
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::{Symbol, keywords};
|
||||
use syntax_pos::Span;
|
||||
|
||||
pub fn expand_deriving_clone(cx: &mut ExtCtxt,
|
||||
@ -74,7 +74,7 @@ pub fn expand_deriving_clone(cx: &mut ExtCtxt,
|
||||
_ => cx.span_bug(span, "#[derive(Clone)] on trait item or impl item"),
|
||||
}
|
||||
|
||||
let inline = cx.meta_word(span, token::intern("inline"));
|
||||
let inline = cx.meta_word(span, Symbol::intern("inline"));
|
||||
let attrs = vec![cx.attribute(span, inline)];
|
||||
let trait_def = TraitDef {
|
||||
span: span,
|
||||
|
@ -14,8 +14,8 @@
|
||||
use syntax::ast::{self, Expr, MetaItem};
|
||||
use syntax::ext::base::{Annotatable, ExtCtxt};
|
||||
use syntax::ext::build::AstBuilder;
|
||||
use syntax::parse::token;
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax_pos::Span;
|
||||
|
||||
pub fn expand_deriving_eq(cx: &mut ExtCtxt,
|
||||
@ -23,9 +23,9 @@ pub fn expand_deriving_eq(cx: &mut ExtCtxt,
|
||||
mitem: &MetaItem,
|
||||
item: &Annotatable,
|
||||
push: &mut FnMut(Annotatable)) {
|
||||
let inline = cx.meta_word(span, token::intern("inline"));
|
||||
let hidden = cx.meta_list_item_word(span, token::intern("hidden"));
|
||||
let doc = cx.meta_list(span, token::intern("doc"), vec![hidden]);
|
||||
let inline = cx.meta_word(span, Symbol::intern("inline"));
|
||||
let hidden = cx.meta_list_item_word(span, Symbol::intern("hidden"));
|
||||
let doc = cx.meta_list(span, Symbol::intern("doc"), vec![hidden]);
|
||||
let attrs = vec![cx.attribute(span, inline), cx.attribute(span, doc)];
|
||||
let trait_def = TraitDef {
|
||||
span: span,
|
||||
|
@ -14,8 +14,8 @@
|
||||
use syntax::ast::{self, Expr, MetaItem};
|
||||
use syntax::ext::base::{Annotatable, ExtCtxt};
|
||||
use syntax::ext::build::AstBuilder;
|
||||
use syntax::parse::token;
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax_pos::Span;
|
||||
|
||||
pub fn expand_deriving_ord(cx: &mut ExtCtxt,
|
||||
@ -23,7 +23,7 @@ pub fn expand_deriving_ord(cx: &mut ExtCtxt,
|
||||
mitem: &MetaItem,
|
||||
item: &Annotatable,
|
||||
push: &mut FnMut(Annotatable)) {
|
||||
let inline = cx.meta_word(span, token::intern("inline"));
|
||||
let inline = cx.meta_word(span, Symbol::intern("inline"));
|
||||
let attrs = vec![cx.attribute(span, inline)];
|
||||
let trait_def = TraitDef {
|
||||
span: span,
|
||||
|
@ -14,8 +14,8 @@
|
||||
use syntax::ast::{BinOpKind, Expr, MetaItem};
|
||||
use syntax::ext::base::{Annotatable, ExtCtxt};
|
||||
use syntax::ext::build::AstBuilder;
|
||||
use syntax::parse::token;
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax_pos::Span;
|
||||
|
||||
pub fn expand_deriving_partial_eq(cx: &mut ExtCtxt,
|
||||
@ -64,7 +64,7 @@ fn cs_ne(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<Expr> {
|
||||
|
||||
macro_rules! md {
|
||||
($name:expr, $f:ident) => { {
|
||||
let inline = cx.meta_word(span, token::intern("inline"));
|
||||
let inline = cx.meta_word(span, Symbol::intern("inline"));
|
||||
let attrs = vec![cx.attribute(span, inline)];
|
||||
MethodDef {
|
||||
name: $name,
|
||||
|
@ -16,8 +16,8 @@
|
||||
use syntax::ast::{self, BinOpKind, Expr, MetaItem};
|
||||
use syntax::ext::base::{Annotatable, ExtCtxt};
|
||||
use syntax::ext::build::AstBuilder;
|
||||
use syntax::parse::token;
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax_pos::Span;
|
||||
|
||||
pub fn expand_deriving_partial_ord(cx: &mut ExtCtxt,
|
||||
@ -27,7 +27,7 @@ pub fn expand_deriving_partial_ord(cx: &mut ExtCtxt,
|
||||
push: &mut FnMut(Annotatable)) {
|
||||
macro_rules! md {
|
||||
($name:expr, $op:expr, $equal:expr) => { {
|
||||
let inline = cx.meta_word(span, token::intern("inline"));
|
||||
let inline = cx.meta_word(span, Symbol::intern("inline"));
|
||||
let attrs = vec![cx.attribute(span, inline)];
|
||||
MethodDef {
|
||||
name: $name,
|
||||
@ -51,7 +51,7 @@ macro_rules! md {
|
||||
vec![Box::new(ordering_ty)],
|
||||
true));
|
||||
|
||||
let inline = cx.meta_word(span, token::intern("inline"));
|
||||
let inline = cx.meta_word(span, Symbol::intern("inline"));
|
||||
let attrs = vec![cx.attribute(span, inline)];
|
||||
|
||||
let partial_cmp_def = MethodDef {
|
||||
|
@ -11,11 +11,10 @@
|
||||
use deriving::generic::*;
|
||||
use deriving::generic::ty::*;
|
||||
|
||||
use syntax::ast;
|
||||
use syntax::ast::{self, Ident};
|
||||
use syntax::ast::{Expr, MetaItem};
|
||||
use syntax::ext::base::{Annotatable, ExtCtxt};
|
||||
use syntax::ext::build::AstBuilder;
|
||||
use syntax::parse::token;
|
||||
use syntax::ptr::P;
|
||||
use syntax_pos::{DUMMY_SP, Span};
|
||||
|
||||
@ -71,7 +70,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<E
|
||||
let span = Span { expn_id: cx.backtrace(), ..span };
|
||||
let name = cx.expr_lit(span,
|
||||
ast::LitKind::Str(ident.name.as_str(), ast::StrStyle::Cooked));
|
||||
let builder = token::str_to_ident("builder");
|
||||
let builder = Ident::from_str("builder");
|
||||
let builder_expr = cx.expr_ident(span, builder.clone());
|
||||
|
||||
let fmt = substr.nonself_args[0].clone();
|
||||
@ -83,7 +82,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<E
|
||||
if !is_struct {
|
||||
// tuple struct/"normal" variant
|
||||
let expr =
|
||||
cx.expr_method_call(span, fmt, token::str_to_ident("debug_tuple"), vec![name]);
|
||||
cx.expr_method_call(span, fmt, Ident::from_str("debug_tuple"), vec![name]);
|
||||
stmts.push(cx.stmt_let(DUMMY_SP, true, builder, expr));
|
||||
|
||||
for field in fields {
|
||||
@ -93,7 +92,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<E
|
||||
|
||||
let expr = cx.expr_method_call(span,
|
||||
builder_expr.clone(),
|
||||
token::str_to_ident("field"),
|
||||
Ident::from_str("field"),
|
||||
vec![field]);
|
||||
|
||||
// Use `let _ = expr;` to avoid triggering the
|
||||
@ -103,7 +102,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<E
|
||||
} else {
|
||||
// normal struct/struct variant
|
||||
let expr =
|
||||
cx.expr_method_call(span, fmt, token::str_to_ident("debug_struct"), vec![name]);
|
||||
cx.expr_method_call(span, fmt, Ident::from_str("debug_struct"), vec![name]);
|
||||
stmts.push(cx.stmt_let(DUMMY_SP, true, builder, expr));
|
||||
|
||||
for field in fields {
|
||||
@ -116,7 +115,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<E
|
||||
let field = cx.expr_addr_of(field.span, field);
|
||||
let expr = cx.expr_method_call(span,
|
||||
builder_expr.clone(),
|
||||
token::str_to_ident("field"),
|
||||
Ident::from_str("field"),
|
||||
vec![name, field]);
|
||||
stmts.push(stmt_let_undescore(cx, span, expr));
|
||||
}
|
||||
@ -126,7 +125,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<E
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
let expr = cx.expr_method_call(span, builder_expr, token::str_to_ident("finish"), vec![]);
|
||||
let expr = cx.expr_method_call(span, builder_expr, Ident::from_str("finish"), vec![]);
|
||||
|
||||
stmts.push(cx.stmt_expr(expr));
|
||||
let block = cx.block(span, stmts);
|
||||
|
@ -18,9 +18,9 @@
|
||||
use syntax::ast::{Expr, MetaItem, Mutability};
|
||||
use syntax::ext::base::{Annotatable, ExtCtxt};
|
||||
use syntax::ext::build::AstBuilder;
|
||||
use syntax::parse::token::InternedString;
|
||||
use syntax::parse::token;
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::InternedString;
|
||||
use syntax::symbol::intern_and_get_ident;
|
||||
use syntax_pos::Span;
|
||||
|
||||
pub fn expand_deriving_rustc_decodable(cx: &mut ExtCtxt,
|
||||
@ -202,10 +202,7 @@ fn decode_static_fields<F>(cx: &mut ExtCtxt,
|
||||
let fields = fields.iter()
|
||||
.enumerate()
|
||||
.map(|(i, &span)| {
|
||||
getarg(cx,
|
||||
span,
|
||||
token::intern_and_get_ident(&format!("_field{}", i)),
|
||||
i)
|
||||
getarg(cx, span, intern_and_get_ident(&format!("_field{}", i)), i)
|
||||
})
|
||||
.collect();
|
||||
|
||||
|
@ -14,8 +14,8 @@
|
||||
use syntax::ast::{Expr, MetaItem};
|
||||
use syntax::ext::base::{Annotatable, ExtCtxt};
|
||||
use syntax::ext::build::AstBuilder;
|
||||
use syntax::parse::token;
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax_pos::Span;
|
||||
|
||||
pub fn expand_deriving_default(cx: &mut ExtCtxt,
|
||||
@ -23,7 +23,7 @@ pub fn expand_deriving_default(cx: &mut ExtCtxt,
|
||||
mitem: &MetaItem,
|
||||
item: &Annotatable,
|
||||
push: &mut FnMut(Annotatable)) {
|
||||
let inline = cx.meta_word(span, token::intern("inline"));
|
||||
let inline = cx.meta_word(span, Symbol::intern("inline"));
|
||||
let attrs = vec![cx.attribute(span, inline)];
|
||||
let trait_def = TraitDef {
|
||||
span: span,
|
||||
|
@ -95,8 +95,8 @@
|
||||
use syntax::ast::{Expr, ExprKind, MetaItem, Mutability};
|
||||
use syntax::ext::base::{Annotatable, ExtCtxt};
|
||||
use syntax::ext::build::AstBuilder;
|
||||
use syntax::parse::token;
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::intern_and_get_ident;
|
||||
use syntax_pos::Span;
|
||||
|
||||
pub fn expand_deriving_rustc_encodable(cx: &mut ExtCtxt,
|
||||
@ -193,7 +193,7 @@ fn encodable_substructure(cx: &mut ExtCtxt,
|
||||
for (i, &FieldInfo { name, ref self_, span, .. }) in fields.iter().enumerate() {
|
||||
let name = match name {
|
||||
Some(id) => id.name.as_str(),
|
||||
None => token::intern_and_get_ident(&format!("_field{}", i)),
|
||||
None => intern_and_get_ident(&format!("_field{}", i)),
|
||||
};
|
||||
let self_ref = cx.expr_addr_of(span, self_.clone());
|
||||
let enc = cx.expr_call(span, fn_path.clone(), vec![self_ref, blkencoder.clone()]);
|
||||
|
@ -198,8 +198,8 @@
|
||||
use syntax::ext::build::AstBuilder;
|
||||
use syntax::codemap::{self, dummy_spanned, respan};
|
||||
use syntax::util::move_map::MoveMap;
|
||||
use syntax::parse::token::{self, keywords};
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::{Symbol, keywords};
|
||||
use syntax_pos::{DUMMY_SP, Span};
|
||||
use errors::Handler;
|
||||
|
||||
@ -639,13 +639,13 @@ fn create_derived_impl(&self,
|
||||
|
||||
let attr = cx.attribute(self.span,
|
||||
cx.meta_word(self.span,
|
||||
token::intern("automatically_derived")));
|
||||
Symbol::intern("automatically_derived")));
|
||||
// Just mark it now since we know that it'll end up used downstream
|
||||
attr::mark_used(&attr);
|
||||
let opt_trait_ref = Some(trait_ref);
|
||||
let unused_qual = {
|
||||
let word = cx.meta_list_item_word(self.span, token::intern("unused_qualifications"));
|
||||
cx.attribute(self.span, cx.meta_list(self.span, token::intern("allow"), vec![word]))
|
||||
let word = cx.meta_list_item_word(self.span, Symbol::intern("unused_qualifications"));
|
||||
cx.attribute(self.span, cx.meta_list(self.span, Symbol::intern("allow"), vec![word]))
|
||||
};
|
||||
|
||||
let mut a = vec![attr, unused_qual];
|
||||
|
@ -16,8 +16,8 @@
|
||||
use syntax::ext::base::{Annotatable, ExtCtxt, SyntaxExtension};
|
||||
use syntax::ext::build::AstBuilder;
|
||||
use syntax::feature_gate::{self, emit_feature_err};
|
||||
use syntax::parse::token;
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax_pos::Span;
|
||||
|
||||
macro_rules! pathvec {
|
||||
@ -80,7 +80,7 @@ fn allow_unstable(cx: &mut ExtCtxt, span: Span, attr_name: &str) -> Span {
|
||||
expn_id: cx.codemap().record_expansion(codemap::ExpnInfo {
|
||||
call_site: span,
|
||||
callee: codemap::NameAndSpan {
|
||||
format: codemap::MacroAttribute(token::intern(attr_name)),
|
||||
format: codemap::MacroAttribute(Symbol::intern(attr_name)),
|
||||
span: Some(span),
|
||||
allow_internal_unstable: true,
|
||||
},
|
||||
@ -105,7 +105,7 @@ pub fn expand_derive(cx: &mut ExtCtxt,
|
||||
}
|
||||
};
|
||||
|
||||
let derive = token::intern("derive");
|
||||
let derive = Symbol::intern("derive");
|
||||
let mut derive_attrs = Vec::new();
|
||||
item = item.map_attrs(|attrs| {
|
||||
let partition = attrs.into_iter().partition(|attr| attr.name() == derive);
|
||||
@ -176,7 +176,7 @@ pub fn expand_derive(cx: &mut ExtCtxt,
|
||||
feature_gate::EXPLAIN_CUSTOM_DERIVE);
|
||||
} else {
|
||||
cx.span_warn(titem.span, feature_gate::EXPLAIN_DEPR_CUSTOM_DERIVE);
|
||||
let name = token::intern(&format!("derive_{}", tname));
|
||||
let name = Symbol::intern(&format!("derive_{}", tname));
|
||||
let mitem = cx.meta_word(titem.span, name);
|
||||
new_attributes.push(cx.attribute(mitem.span, mitem));
|
||||
}
|
||||
@ -251,10 +251,10 @@ pub fn expand_derive(cx: &mut ExtCtxt,
|
||||
|
||||
// RFC #1445. `#[derive(PartialEq, Eq)]` adds a (trusted)
|
||||
// `#[structural_match]` attribute.
|
||||
let (partial_eq, eq) = (token::intern("PartialEq"), token::intern("Eq"));
|
||||
let (partial_eq, eq) = (Symbol::intern("PartialEq"), Symbol::intern("Eq"));
|
||||
if traits.iter().any(|t| t.name() == Some(partial_eq)) &&
|
||||
traits.iter().any(|t| t.name() == Some(eq)) {
|
||||
let structural_match = token::intern("structural_match");
|
||||
let structural_match = Symbol::intern("structural_match");
|
||||
let span = allow_unstable(cx, span, "derive(PartialEq, Eq)");
|
||||
let meta = cx.meta_word(span, structural_match);
|
||||
item = item.map(|mut i| {
|
||||
@ -267,10 +267,10 @@ pub fn expand_derive(cx: &mut ExtCtxt,
|
||||
// the same as the copy implementation.
|
||||
//
|
||||
// Add a marker attribute here picked up during #[derive(Clone)]
|
||||
let (copy, clone) = (token::intern("Copy"), token::intern("Clone"));
|
||||
let (copy, clone) = (Symbol::intern("Copy"), Symbol::intern("Clone"));
|
||||
if traits.iter().any(|t| t.name() == Some(clone)) &&
|
||||
traits.iter().any(|t| t.name() == Some(copy)) {
|
||||
let marker = token::intern("rustc_copy_clone_marker");
|
||||
let marker = Symbol::intern("rustc_copy_clone_marker");
|
||||
let span = allow_unstable(cx, span, "derive(Copy, Clone)");
|
||||
let meta = cx.meta_word(span, marker);
|
||||
item = item.map(|mut i| {
|
||||
@ -282,14 +282,14 @@ pub fn expand_derive(cx: &mut ExtCtxt,
|
||||
let mut items = Vec::new();
|
||||
for titem in traits.iter() {
|
||||
let tname = titem.word().unwrap().name();
|
||||
let name = token::intern(&format!("derive({})", tname));
|
||||
let name = Symbol::intern(&format!("derive({})", tname));
|
||||
let mitem = cx.meta_word(titem.span, name);
|
||||
|
||||
let span = Span {
|
||||
expn_id: cx.codemap().record_expansion(codemap::ExpnInfo {
|
||||
call_site: titem.span,
|
||||
callee: codemap::NameAndSpan {
|
||||
format: codemap::MacroAttribute(token::intern(&format!("derive({})", tname))),
|
||||
format: codemap::MacroAttribute(Symbol::intern(&format!("derive({})", tname))),
|
||||
span: Some(titem.span),
|
||||
allow_internal_unstable: true,
|
||||
},
|
||||
@ -408,7 +408,7 @@ fn call_intrinsic(cx: &ExtCtxt,
|
||||
span.expn_id = cx.codemap().record_expansion(codemap::ExpnInfo {
|
||||
call_site: span,
|
||||
callee: codemap::NameAndSpan {
|
||||
format: codemap::MacroAttribute(token::intern("derive")),
|
||||
format: codemap::MacroAttribute(Symbol::intern("derive")),
|
||||
span: Some(span),
|
||||
allow_internal_unstable: true,
|
||||
},
|
||||
|
@ -17,7 +17,7 @@
|
||||
use syntax::ext::base::*;
|
||||
use syntax::ext::base;
|
||||
use syntax::ext::build::AstBuilder;
|
||||
use syntax::parse::token;
|
||||
use syntax::symbol::intern_and_get_ident;
|
||||
use syntax_pos::Span;
|
||||
use syntax::tokenstream;
|
||||
|
||||
@ -49,7 +49,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt,
|
||||
Ok(s) => {
|
||||
cx.expr_call_global(sp,
|
||||
cx.std_path(&["option", "Option", "Some"]),
|
||||
vec![cx.expr_str(sp, token::intern_and_get_ident(&s[..]))])
|
||||
vec![cx.expr_str(sp, intern_and_get_ident(&s[..]))])
|
||||
}
|
||||
};
|
||||
MacEager::expr(e)
|
||||
@ -73,7 +73,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt,
|
||||
Some((v, _style)) => v,
|
||||
};
|
||||
let msg = match exprs.next() {
|
||||
None => token::intern_and_get_ident(&format!("environment variable `{}` not defined", var)),
|
||||
None => intern_and_get_ident(&format!("environment variable `{}` not defined", var)),
|
||||
Some(second) => {
|
||||
match expr_to_string(cx, second, "expected string literal") {
|
||||
None => return DummyResult::expr(sp),
|
||||
@ -92,7 +92,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt,
|
||||
cx.span_err(sp, &msg);
|
||||
cx.expr_usize(sp, 0)
|
||||
}
|
||||
Ok(s) => cx.expr_str(sp, token::intern_and_get_ident(&s)),
|
||||
Ok(s) => cx.expr_str(sp, intern_and_get_ident(&s)),
|
||||
};
|
||||
MacEager::expr(e)
|
||||
}
|
||||
|
@ -17,8 +17,9 @@
|
||||
use syntax::ext::base::*;
|
||||
use syntax::ext::base;
|
||||
use syntax::ext::build::AstBuilder;
|
||||
use syntax::parse::token::{self, keywords};
|
||||
use syntax::parse::token;
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::{self, keywords};
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
use syntax::tokenstream;
|
||||
|
||||
@ -369,7 +370,7 @@ fn trans_count(&self, c: parse::Count) -> P<ast::Expr> {
|
||||
/// Translate the accumulated string literals to a literal expression
|
||||
fn trans_literal_string(&mut self) -> P<ast::Expr> {
|
||||
let sp = self.fmtsp;
|
||||
let s = token::intern_and_get_ident(&self.literal);
|
||||
let s = symbol::intern_and_get_ident(&self.literal);
|
||||
self.literal.clear();
|
||||
self.ecx.expr_str(sp, s)
|
||||
}
|
||||
|
@ -53,7 +53,7 @@
|
||||
use syntax::ast;
|
||||
use syntax::ext::base::{MacroExpanderFn, NormalTT, IdentTT, MultiModifier, NamedSyntaxExtension};
|
||||
use syntax::ext::tt::macro_rules::MacroRulesExpander;
|
||||
use syntax::parse::token::intern;
|
||||
use syntax::symbol::Symbol;
|
||||
|
||||
pub fn register_builtins(resolver: &mut syntax::ext::base::Resolver,
|
||||
user_exts: Vec<NamedSyntaxExtension>,
|
||||
@ -62,11 +62,11 @@ pub fn register_builtins(resolver: &mut syntax::ext::base::Resolver,
|
||||
resolver.add_ext(ast::Ident::with_empty_ctxt(name), Rc::new(ext));
|
||||
};
|
||||
|
||||
register(intern("macro_rules"), IdentTT(Box::new(MacroRulesExpander), None, false));
|
||||
register(Symbol::intern("macro_rules"), IdentTT(Box::new(MacroRulesExpander), None, false));
|
||||
|
||||
macro_rules! register {
|
||||
($( $name:ident: $f:expr, )*) => { $(
|
||||
register(intern(stringify!($name)),
|
||||
register(Symbol::intern(stringify!($name)),
|
||||
NormalTT(Box::new($f as MacroExpanderFn), None, false));
|
||||
)* }
|
||||
}
|
||||
@ -112,9 +112,10 @@ macro_rules! register {
|
||||
}
|
||||
|
||||
// format_args uses `unstable` things internally.
|
||||
register(intern("format_args"), NormalTT(Box::new(format::expand_format_args), None, true));
|
||||
register(Symbol::intern("format_args"),
|
||||
NormalTT(Box::new(format::expand_format_args), None, true));
|
||||
|
||||
register(intern("derive"), MultiModifier(Box::new(deriving::expand_derive)));
|
||||
register(Symbol::intern("derive"), MultiModifier(Box::new(deriving::expand_derive)));
|
||||
|
||||
for (name, ext) in user_exts {
|
||||
register(name, ext);
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user