Replace the get method by the deref one on InternedString
This commit is contained in:
parent
966e6c0c37
commit
d58c0a7597
@ -68,6 +68,7 @@ use std::fmt::Show;
|
||||
use std::num::Int;
|
||||
use std::rc::Rc;
|
||||
use serialize::{Encodable, Decodable, Encoder, Decoder};
|
||||
use std::ops::Deref;
|
||||
|
||||
// FIXME #6993: in librustc, uses of "ident" should be replaced
|
||||
// by just "Name".
|
||||
@ -112,13 +113,13 @@ impl fmt::Display for Ident {
|
||||
impl fmt::Debug for Name {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let Name(nm) = *self;
|
||||
write!(f, "{:?}({})", token::get_name(*self).get(), nm)
|
||||
write!(f, "{:?}({})", token::get_name(*self).deref(), nm)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Name {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Display::fmt(token::get_name(*self).get(), f)
|
||||
fmt::Display::fmt(token::get_name(*self).deref(), f)
|
||||
}
|
||||
}
|
||||
|
||||
@ -174,7 +175,7 @@ impl Name {
|
||||
pub fn as_str<'a>(&'a self) -> &'a str {
|
||||
unsafe {
|
||||
// FIXME #12938: can't use copy_lifetime since &str isn't a &T
|
||||
::std::mem::transmute::<&str,&str>(token::get_name(*self).get())
|
||||
::std::mem::transmute::<&str,&str>(token::get_name(*self).deref())
|
||||
}
|
||||
}
|
||||
|
||||
@ -193,7 +194,7 @@ pub type Mrk = u32;
|
||||
|
||||
impl Encodable for Ident {
|
||||
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
|
||||
s.emit_str(token::get_ident(*self).get())
|
||||
s.emit_str(token::get_ident(*self).deref())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -23,11 +23,12 @@ use visit;
|
||||
|
||||
use std::cmp;
|
||||
use std::u32;
|
||||
use std::ops::Deref;
|
||||
|
||||
pub fn path_name_i(idents: &[Ident]) -> String {
|
||||
// FIXME: Bad copies (#2543 -- same for everything else that says "bad")
|
||||
idents.iter().map(|i| {
|
||||
token::get_ident(*i).get().to_string()
|
||||
token::get_ident(*i).deref().to_string()
|
||||
}).collect::<Vec<String>>().connect("::")
|
||||
}
|
||||
|
||||
|
@ -29,6 +29,7 @@ use std::cell::{RefCell, Cell};
|
||||
use std::collections::BitvSet;
|
||||
use std::collections::HashSet;
|
||||
use std::fmt;
|
||||
use std::ops::Deref;
|
||||
|
||||
thread_local! { static USED_ATTRS: RefCell<BitvSet> = RefCell::new(BitvSet::new()) }
|
||||
|
||||
@ -44,7 +45,7 @@ pub fn is_used(attr: &Attribute) -> bool {
|
||||
|
||||
pub trait AttrMetaMethods {
|
||||
fn check_name(&self, name: &str) -> bool {
|
||||
name == self.name().get()
|
||||
name == self.name().deref()
|
||||
}
|
||||
|
||||
/// Retrieve the name of the meta item, e.g. `foo` in `#[foo]`,
|
||||
@ -62,7 +63,7 @@ pub trait AttrMetaMethods {
|
||||
|
||||
impl AttrMetaMethods for Attribute {
|
||||
fn check_name(&self, name: &str) -> bool {
|
||||
let matches = name == self.name().get();
|
||||
let matches = name == self.name().deref();
|
||||
if matches {
|
||||
mark_used(self);
|
||||
}
|
||||
@ -142,7 +143,7 @@ impl AttributeMethods for Attribute {
|
||||
let meta = mk_name_value_item_str(
|
||||
InternedString::new("doc"),
|
||||
token::intern_and_get_ident(&strip_doc_comment_decoration(
|
||||
comment.get())[]));
|
||||
comment.deref())[]));
|
||||
if self.node.style == ast::AttrOuter {
|
||||
f(&mk_attr_outer(self.node.id, meta))
|
||||
} else {
|
||||
@ -209,7 +210,7 @@ pub fn mk_attr_outer(id: AttrId, item: P<MetaItem>) -> Attribute {
|
||||
pub fn mk_sugared_doc_attr(id: AttrId, text: InternedString, lo: BytePos,
|
||||
hi: BytePos)
|
||||
-> Attribute {
|
||||
let style = doc_comment_style(text.get());
|
||||
let style = doc_comment_style(text.deref());
|
||||
let lit = spanned(lo, hi, ast::LitStr(text, ast::CookedStr));
|
||||
let attr = Attribute_ {
|
||||
id: id,
|
||||
@ -326,11 +327,11 @@ pub fn requests_inline(attrs: &[Attribute]) -> bool {
|
||||
/// Tests if a cfg-pattern matches the cfg set
|
||||
pub fn cfg_matches(diagnostic: &SpanHandler, cfgs: &[P<MetaItem>], cfg: &ast::MetaItem) -> bool {
|
||||
match cfg.node {
|
||||
ast::MetaList(ref pred, ref mis) if pred.get() == "any" =>
|
||||
ast::MetaList(ref pred, ref mis) if pred.deref() == "any" =>
|
||||
mis.iter().any(|mi| cfg_matches(diagnostic, cfgs, &**mi)),
|
||||
ast::MetaList(ref pred, ref mis) if pred.get() == "all" =>
|
||||
ast::MetaList(ref pred, ref mis) if pred.deref() == "all" =>
|
||||
mis.iter().all(|mi| cfg_matches(diagnostic, cfgs, &**mi)),
|
||||
ast::MetaList(ref pred, ref mis) if pred.get() == "not" => {
|
||||
ast::MetaList(ref pred, ref mis) if pred.deref() == "not" => {
|
||||
if mis.len() != 1 {
|
||||
diagnostic.span_err(cfg.span, "expected 1 cfg-pattern");
|
||||
return false;
|
||||
@ -382,7 +383,7 @@ fn find_stability_generic<'a,
|
||||
|
||||
'outer: for attr in attrs {
|
||||
let tag = attr.name();
|
||||
let tag = tag.get();
|
||||
let tag = tag.deref();
|
||||
if tag != "deprecated" && tag != "unstable" && tag != "stable" {
|
||||
continue // not a stability level
|
||||
}
|
||||
@ -394,8 +395,8 @@ fn find_stability_generic<'a,
|
||||
let mut feature = None;
|
||||
let mut since = None;
|
||||
let mut reason = None;
|
||||
for meta in metas {
|
||||
if meta.name().get() == "feature" {
|
||||
for meta in metas.iter() {
|
||||
if meta.name() == "feature" {
|
||||
match meta.value_str() {
|
||||
Some(v) => feature = Some(v),
|
||||
None => {
|
||||
@ -404,7 +405,7 @@ fn find_stability_generic<'a,
|
||||
}
|
||||
}
|
||||
}
|
||||
if meta.name().get() == "since" {
|
||||
if meta.name().deref() == "since" {
|
||||
match meta.value_str() {
|
||||
Some(v) => since = Some(v),
|
||||
None => {
|
||||
@ -413,7 +414,7 @@ fn find_stability_generic<'a,
|
||||
}
|
||||
}
|
||||
}
|
||||
if meta.name().get() == "reason" {
|
||||
if meta.name().deref() == "reason" {
|
||||
match meta.value_str() {
|
||||
Some(v) => reason = Some(v),
|
||||
None => {
|
||||
@ -521,11 +522,11 @@ pub fn find_repr_attrs(diagnostic: &SpanHandler, attr: &Attribute) -> Vec<ReprAt
|
||||
for item in items {
|
||||
match item.node {
|
||||
ast::MetaWord(ref word) => {
|
||||
let hint = match word.get() {
|
||||
let hint = match word.deref() {
|
||||
// Can't use "extern" because it's not a lexical identifier.
|
||||
"C" => Some(ReprExtern),
|
||||
"packed" => Some(ReprPacked),
|
||||
_ => match int_type_of_word(word.get()) {
|
||||
_ => match int_type_of_word(word.deref()) {
|
||||
Some(ity) => Some(ReprInt(item.span, ity)),
|
||||
None => {
|
||||
// Not a word we recognize
|
||||
|
@ -10,6 +10,8 @@
|
||||
|
||||
use std::cell::RefCell;
|
||||
use std::collections::BTreeMap;
|
||||
use std::ops::Deref;
|
||||
|
||||
use ast;
|
||||
use ast::{Ident, Name, TokenTree};
|
||||
use codemap::Span;
|
||||
@ -57,7 +59,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
|
||||
match diagnostics.insert(code.name, span) {
|
||||
Some(previous_span) => {
|
||||
ecx.span_warn(span, &format!(
|
||||
"diagnostic code {} already used", token::get_ident(code).get()
|
||||
"diagnostic code {} already used", token::get_ident(code).deref()
|
||||
)[]);
|
||||
ecx.span_note(previous_span, "previous invocation");
|
||||
},
|
||||
@ -68,7 +70,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
|
||||
with_registered_diagnostics(|diagnostics| {
|
||||
if !diagnostics.contains_key(&code.name) {
|
||||
ecx.span_err(span, &format!(
|
||||
"used diagnostic code {} not registered", token::get_ident(code).get()
|
||||
"used diagnostic code {} not registered", token::get_ident(code).deref()
|
||||
)[]);
|
||||
}
|
||||
});
|
||||
@ -93,12 +95,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
|
||||
with_registered_diagnostics(|diagnostics| {
|
||||
if diagnostics.insert(code.name, description).is_some() {
|
||||
ecx.span_err(span, &format!(
|
||||
"diagnostic code {} already registered", token::get_ident(*code).get()
|
||||
"diagnostic code {} already registered", token::get_ident(*code).deref()
|
||||
)[]);
|
||||
}
|
||||
});
|
||||
let sym = Ident::new(token::gensym(&(
|
||||
"__register_diagnostic_".to_string() + token::get_ident(*code).get()
|
||||
"__register_diagnostic_".to_string() + token::get_ident(*code).deref()
|
||||
)[]));
|
||||
MacItems::new(vec![quote_item!(ecx, mod $sym {}).unwrap()].into_iter())
|
||||
}
|
||||
|
@ -22,6 +22,8 @@ use parse::token::InternedString;
|
||||
use parse::token;
|
||||
use ptr::P;
|
||||
|
||||
use std::ops::Deref;
|
||||
|
||||
enum State {
|
||||
Asm,
|
||||
Outputs,
|
||||
@ -102,7 +104,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
// It's the opposite of '=&' which means that the memory
|
||||
// cannot be shared with any other operand (usually when
|
||||
// a register is clobbered early.)
|
||||
let output = match constraint.get().slice_shift_char() {
|
||||
let output = match constraint.deref().slice_shift_char() {
|
||||
Some(('=', _)) => None,
|
||||
Some(('+', operand)) => {
|
||||
Some(token::intern_and_get_ident(&format!(
|
||||
@ -129,9 +131,9 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
|
||||
let (constraint, _str_style) = p.parse_str();
|
||||
|
||||
if constraint.get().starts_with("=") {
|
||||
if constraint.deref().starts_with("=") {
|
||||
cx.span_err(p.last_span, "input operand constraint contains '='");
|
||||
} else if constraint.get().starts_with("+") {
|
||||
} else if constraint.deref().starts_with("+") {
|
||||
cx.span_err(p.last_span, "input operand constraint contains '+'");
|
||||
}
|
||||
|
||||
@ -213,7 +215,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
MacExpr::new(P(ast::Expr {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
node: ast::ExprInlineAsm(ast::InlineAsm {
|
||||
asm: token::intern_and_get_ident(asm.get()),
|
||||
asm: token::intern_and_get_ident(asm.deref()),
|
||||
asm_str_style: asm_str_style.unwrap(),
|
||||
outputs: outputs,
|
||||
inputs: inputs,
|
||||
|
@ -28,6 +28,7 @@ use fold::Folder;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::rc::Rc;
|
||||
use std::ops::Deref;
|
||||
|
||||
pub trait ItemDecorator {
|
||||
fn expand(&self,
|
||||
@ -790,7 +791,7 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt,
|
||||
cx.span_err(sp, &format!("{} takes 1 argument", name)[]);
|
||||
}
|
||||
expr_to_string(cx, ret, "argument must be a string literal").map(|(s, _)| {
|
||||
s.get().to_string()
|
||||
s.deref().to_string()
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -21,6 +21,7 @@ use parse::token::InternedString;
|
||||
use parse::token;
|
||||
use ptr::P;
|
||||
|
||||
use std::ops::Deref;
|
||||
|
||||
// Transitional reexports so qquote can find the paths it is looking for
|
||||
mod syntax {
|
||||
@ -576,7 +577,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
|
||||
fn expr_field_access(&self, sp: Span, expr: P<ast::Expr>, ident: ast::Ident) -> P<ast::Expr> {
|
||||
let field_name = token::get_ident(ident);
|
||||
let field_span = Span {
|
||||
lo: sp.lo - Pos::from_usize(field_name.get().len()),
|
||||
lo: sp.lo - Pos::from_usize(field_name.deref().len()),
|
||||
hi: sp.hi,
|
||||
expn_id: sp.expn_id,
|
||||
};
|
||||
|
@ -15,6 +15,7 @@ use ext::build::AstBuilder;
|
||||
use parse::token;
|
||||
|
||||
use std::string::String;
|
||||
use std::ops::Deref;
|
||||
|
||||
pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
|
||||
sp: codemap::Span,
|
||||
@ -32,7 +33,7 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
|
||||
ast::LitStr(ref s, _) |
|
||||
ast::LitFloat(ref s, _) |
|
||||
ast::LitFloatUnsuffixed(ref s) => {
|
||||
accumulator.push_str(s.get());
|
||||
accumulator.push_str(s.deref());
|
||||
}
|
||||
ast::LitChar(c) => {
|
||||
accumulator.push(c);
|
||||
|
@ -16,6 +16,8 @@ use parse::token;
|
||||
use parse::token::{str_to_ident};
|
||||
use ptr::P;
|
||||
|
||||
use std::ops::Deref;
|
||||
|
||||
pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
-> Box<base::MacResult+'cx> {
|
||||
let mut res_str = String::new();
|
||||
@ -31,7 +33,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]
|
||||
} else {
|
||||
match *e {
|
||||
ast::TtToken(_, token::Ident(ident, _)) => {
|
||||
res_str.push_str(token::get_ident(ident).get())
|
||||
res_str.push_str(token::get_ident(ident).deref())
|
||||
},
|
||||
_ => {
|
||||
cx.span_err(sp, "concat_idents! requires ident args.");
|
||||
|
@ -15,6 +15,8 @@ use ext::deriving::generic::*;
|
||||
use ext::deriving::generic::ty::*;
|
||||
use ptr::P;
|
||||
|
||||
use std::ops::Deref;
|
||||
|
||||
pub fn expand_deriving_bound<F>(cx: &mut ExtCtxt,
|
||||
span: Span,
|
||||
mitem: &MetaItem,
|
||||
@ -24,7 +26,7 @@ pub fn expand_deriving_bound<F>(cx: &mut ExtCtxt,
|
||||
{
|
||||
let name = match mitem.node {
|
||||
MetaWord(ref tname) => {
|
||||
match tname.get() {
|
||||
match tname.deref() {
|
||||
"Copy" => "Copy",
|
||||
"Send" | "Sync" => {
|
||||
return cx.span_err(span,
|
||||
|
@ -189,6 +189,7 @@ use self::StructType::*;
|
||||
|
||||
use std::cell::RefCell;
|
||||
use std::vec;
|
||||
use std::ops::Deref;
|
||||
|
||||
use abi::Abi;
|
||||
use abi;
|
||||
@ -363,7 +364,7 @@ impl<'a> TraitDef<'a> {
|
||||
// generated implementations are linted
|
||||
let mut attrs = newitem.attrs.clone();
|
||||
attrs.extend(item.attrs.iter().filter(|a| {
|
||||
match a.name().get() {
|
||||
match a.name().deref() {
|
||||
"allow" | "warn" | "deny" | "forbid" => true,
|
||||
_ => false,
|
||||
}
|
||||
|
@ -18,6 +18,8 @@ use ext::base::ExtCtxt;
|
||||
use codemap::Span;
|
||||
use ptr::P;
|
||||
|
||||
use std::ops::Deref;
|
||||
|
||||
pub mod bounds;
|
||||
pub mod clone;
|
||||
pub mod encodable;
|
||||
@ -74,7 +76,7 @@ pub fn expand_meta_derive(cx: &mut ExtCtxt,
|
||||
|i| push(i)))
|
||||
}
|
||||
|
||||
match tname.get() {
|
||||
match tname.deref() {
|
||||
"Clone" => expand!(clone::expand_deriving_clone),
|
||||
|
||||
"Hash" => expand!(hash::expand_deriving_hash),
|
||||
|
@ -20,6 +20,7 @@ use parse::token;
|
||||
use ptr::P;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::ops::Deref;
|
||||
|
||||
pub fn expand_deriving_show<F>(cx: &mut ExtCtxt,
|
||||
span: Span,
|
||||
@ -72,7 +73,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span,
|
||||
}
|
||||
};
|
||||
|
||||
let mut format_string = String::from_str(token::get_ident(name).get());
|
||||
let mut format_string = String::from_str(token::get_ident(name).deref());
|
||||
// the internal fields we're actually formatting
|
||||
let mut exprs = Vec::new();
|
||||
|
||||
@ -107,7 +108,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span,
|
||||
|
||||
let name = token::get_ident(field.name.unwrap());
|
||||
format_string.push_str(" ");
|
||||
format_string.push_str(name.get());
|
||||
format_string.push_str(name.deref());
|
||||
format_string.push_str(": {:?}");
|
||||
|
||||
exprs.push(field.self_.clone());
|
||||
|
@ -22,6 +22,8 @@ use ext::build::AstBuilder;
|
||||
use parse::token;
|
||||
|
||||
use std::env;
|
||||
use std::os;
|
||||
use std::ops::Deref;
|
||||
|
||||
pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
-> Box<base::MacResult+'cx> {
|
||||
@ -101,9 +103,9 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
}
|
||||
}
|
||||
|
||||
let e = match env::var_string(var.get()) {
|
||||
Err(..) => {
|
||||
cx.span_err(sp, msg.get());
|
||||
let e = match os::getenv(var.deref()) {
|
||||
None => {
|
||||
cx.span_err(sp, msg.deref());
|
||||
cx.expr_usize(sp, 0)
|
||||
}
|
||||
Ok(s) => cx.expr_str(sp, token::intern_and_get_ident(&s[]))
|
||||
|
@ -32,6 +32,8 @@ use util::small_vector::SmallVector;
|
||||
use visit;
|
||||
use visit::Visitor;
|
||||
|
||||
use std::ops::Deref;
|
||||
|
||||
pub fn expand_type(t: P<ast::Ty>,
|
||||
fld: &mut MacroExpander,
|
||||
impl_ty: Option<P<ast::Ty>>)
|
||||
@ -375,7 +377,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
|
||||
fld.cx.span_err(
|
||||
pth.span,
|
||||
&format!("macro undefined: '{}!'",
|
||||
extnamestr.get())[]);
|
||||
extnamestr.deref())[]);
|
||||
|
||||
// let compilation continue
|
||||
None
|
||||
@ -385,7 +387,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
|
||||
fld.cx.bt_push(ExpnInfo {
|
||||
call_site: span,
|
||||
callee: NameAndSpan {
|
||||
name: extnamestr.get().to_string(),
|
||||
name: extnamestr.deref().to_string(),
|
||||
format: MacroBang,
|
||||
span: exp_span,
|
||||
},
|
||||
@ -411,7 +413,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
|
||||
fld.cx.span_err(
|
||||
pth.span,
|
||||
&format!("non-expression macro in expression position: {}",
|
||||
&extnamestr.get()[]
|
||||
&extnamestr.deref()[]
|
||||
)[]);
|
||||
return None;
|
||||
}
|
||||
@ -422,7 +424,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
|
||||
fld.cx.span_err(
|
||||
pth.span,
|
||||
&format!("'{}' is not a tt-style macro",
|
||||
extnamestr.get())[]);
|
||||
extnamestr.deref())[]);
|
||||
None
|
||||
}
|
||||
}
|
||||
@ -506,14 +508,14 @@ fn expand_item_modifiers(mut it: P<ast::Item>, fld: &mut MacroExpander)
|
||||
for attr in &modifiers {
|
||||
let mname = attr.name();
|
||||
|
||||
match fld.cx.syntax_env.find(&intern(mname.get())) {
|
||||
match fld.cx.syntax_env.find(&intern(mname.deref())) {
|
||||
Some(rc) => match *rc {
|
||||
Modifier(ref mac) => {
|
||||
attr::mark_used(attr);
|
||||
fld.cx.bt_push(ExpnInfo {
|
||||
call_site: attr.span,
|
||||
callee: NameAndSpan {
|
||||
name: mname.get().to_string(),
|
||||
name: mname.deref().to_string(),
|
||||
format: MacroAttribute,
|
||||
span: None,
|
||||
}
|
||||
@ -613,7 +615,7 @@ pub fn expand_item_mac(it: P<ast::Item>,
|
||||
fld.cx.bt_push(ExpnInfo {
|
||||
call_site: it.span,
|
||||
callee: NameAndSpan {
|
||||
name: extnamestr.get().to_string(),
|
||||
name: extnamestr.deref().to_string(),
|
||||
format: MacroBang,
|
||||
span: span
|
||||
}
|
||||
@ -626,13 +628,13 @@ pub fn expand_item_mac(it: P<ast::Item>,
|
||||
if it.ident.name == parse::token::special_idents::invalid.name {
|
||||
fld.cx.span_err(path_span,
|
||||
&format!("macro {}! expects an ident argument",
|
||||
extnamestr.get())[]);
|
||||
extnamestr.deref())[]);
|
||||
return SmallVector::zero();
|
||||
}
|
||||
fld.cx.bt_push(ExpnInfo {
|
||||
call_site: it.span,
|
||||
callee: NameAndSpan {
|
||||
name: extnamestr.get().to_string(),
|
||||
name: extnamestr.deref().to_string(),
|
||||
format: MacroBang,
|
||||
span: span
|
||||
}
|
||||
@ -651,7 +653,7 @@ pub fn expand_item_mac(it: P<ast::Item>,
|
||||
fld.cx.bt_push(ExpnInfo {
|
||||
call_site: it.span,
|
||||
callee: NameAndSpan {
|
||||
name: extnamestr.get().to_string(),
|
||||
name: extnamestr.deref().to_string(),
|
||||
format: MacroBang,
|
||||
span: None,
|
||||
}
|
||||
@ -677,7 +679,7 @@ pub fn expand_item_mac(it: P<ast::Item>,
|
||||
_ => {
|
||||
fld.cx.span_err(it.span,
|
||||
&format!("{}! is not legal in item position",
|
||||
extnamestr.get())[]);
|
||||
extnamestr.deref())[]);
|
||||
return SmallVector::zero();
|
||||
}
|
||||
}
|
||||
@ -696,7 +698,7 @@ pub fn expand_item_mac(it: P<ast::Item>,
|
||||
None => {
|
||||
fld.cx.span_err(path_span,
|
||||
&format!("non-item macro in item position: {}",
|
||||
extnamestr.get())[]);
|
||||
extnamestr.deref())[]);
|
||||
return SmallVector::zero();
|
||||
}
|
||||
};
|
||||
@ -950,7 +952,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
|
||||
fld.cx.bt_push(ExpnInfo {
|
||||
call_site: span,
|
||||
callee: NameAndSpan {
|
||||
name: extnamestr.get().to_string(),
|
||||
name: extnamestr.deref().to_string(),
|
||||
format: MacroBang,
|
||||
span: tt_span
|
||||
}
|
||||
@ -968,7 +970,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
|
||||
pth.span,
|
||||
&format!(
|
||||
"non-pattern macro in pattern position: {}",
|
||||
extnamestr.get()
|
||||
extnamestr.deref()
|
||||
)[]
|
||||
);
|
||||
return DummyResult::raw_pat(span);
|
||||
@ -981,7 +983,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
|
||||
_ => {
|
||||
fld.cx.span_err(span,
|
||||
&format!("{}! is not legal in pattern position",
|
||||
extnamestr.get())[]);
|
||||
extnamestr.deref())[]);
|
||||
return DummyResult::raw_pat(span);
|
||||
}
|
||||
}
|
||||
@ -1065,7 +1067,7 @@ fn expand_annotatable(a: Annotatable,
|
||||
for attr in a.attrs() {
|
||||
let mname = attr.name();
|
||||
|
||||
match fld.cx.syntax_env.find(&intern(mname.get())) {
|
||||
match fld.cx.syntax_env.find(&intern(mname.deref())) {
|
||||
Some(rc) => match *rc {
|
||||
Decorator(ref dec) => {
|
||||
let it = match a {
|
||||
@ -1079,7 +1081,7 @@ fn expand_annotatable(a: Annotatable,
|
||||
fld.cx.bt_push(ExpnInfo {
|
||||
call_site: attr.span,
|
||||
callee: NameAndSpan {
|
||||
name: mname.get().to_string(),
|
||||
name: mname.deref().to_string(),
|
||||
format: MacroAttribute,
|
||||
span: None
|
||||
}
|
||||
@ -1180,7 +1182,7 @@ fn modifiers(attrs: &Vec<ast::Attribute>,
|
||||
fld: &MacroExpander)
|
||||
-> (Vec<ast::Attribute>, Vec<ast::Attribute>) {
|
||||
attrs.iter().cloned().partition(|attr| {
|
||||
match fld.cx.syntax_env.find(&intern(attr.name().get())) {
|
||||
match fld.cx.syntax_env.find(&intern(attr.name().deref())) {
|
||||
Some(rc) => match *rc {
|
||||
Modifier(_) => true,
|
||||
_ => false
|
||||
@ -1195,7 +1197,7 @@ fn multi_modifiers(attrs: &[ast::Attribute],
|
||||
fld: &MacroExpander)
|
||||
-> (Vec<ast::Attribute>, Vec<ast::Attribute>) {
|
||||
attrs.iter().cloned().partition(|attr| {
|
||||
match fld.cx.syntax_env.find(&intern(attr.name().get())) {
|
||||
match fld.cx.syntax_env.find(&intern(attr.name().deref())) {
|
||||
Some(rc) => match *rc {
|
||||
MultiModifier(_) => true,
|
||||
_ => false
|
||||
@ -1220,14 +1222,14 @@ fn expand_item_multi_modifier(mut it: Annotatable,
|
||||
for attr in &modifiers {
|
||||
let mname = attr.name();
|
||||
|
||||
match fld.cx.syntax_env.find(&intern(mname.get())) {
|
||||
match fld.cx.syntax_env.find(&intern(mname.deref())) {
|
||||
Some(rc) => match *rc {
|
||||
MultiModifier(ref mac) => {
|
||||
attr::mark_used(attr);
|
||||
fld.cx.bt_push(ExpnInfo {
|
||||
call_site: attr.span,
|
||||
callee: NameAndSpan {
|
||||
name: mname.get().to_string(),
|
||||
name: mname.deref().to_string(),
|
||||
format: MacroAttribute,
|
||||
span: None,
|
||||
}
|
||||
|
@ -23,6 +23,7 @@ use ptr::P;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::iter::repeat;
|
||||
use std::ops::Deref;
|
||||
|
||||
#[derive(PartialEq)]
|
||||
enum ArgumentType {
|
||||
@ -118,7 +119,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
}
|
||||
};
|
||||
let interned_name = token::get_ident(ident);
|
||||
let name = interned_name.get();
|
||||
let name = interned_name.deref();
|
||||
p.expect(&token::Eq);
|
||||
let e = p.parse_expr();
|
||||
match names.get(name) {
|
||||
@ -672,7 +673,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span,
|
||||
None => return DummyResult::raw_expr(sp)
|
||||
};
|
||||
|
||||
let mut parser = parse::Parser::new(fmt.get());
|
||||
let mut parser = parse::Parser::new(fmt.deref());
|
||||
loop {
|
||||
match parser.next() {
|
||||
Some(piece) => {
|
||||
|
@ -25,6 +25,8 @@ use ptr::P;
|
||||
/// as antiquotes (splices).
|
||||
|
||||
pub mod rt {
|
||||
use std::ops::Deref;
|
||||
|
||||
use ast;
|
||||
use codemap::Spanned;
|
||||
use ext::base::ExtCtxt;
|
||||
@ -161,7 +163,7 @@ pub mod rt {
|
||||
|
||||
impl ToSource for ast::Ident {
|
||||
fn to_source(&self) -> String {
|
||||
token::get_ident(*self).get().to_string()
|
||||
token::get_ident(*self).deref().to_string()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -22,6 +22,7 @@ use util::small_vector::SmallVector;
|
||||
|
||||
use std::old_io::File;
|
||||
use std::rc::Rc;
|
||||
use std::ops::Deref;
|
||||
|
||||
// These macros all relate to the file system; they either return
|
||||
// the column/row/filename of the expression, or they include
|
||||
@ -73,7 +74,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
base::check_zero_tts(cx, sp, tts, "module_path!");
|
||||
let string = cx.mod_path()
|
||||
.iter()
|
||||
.map(|x| token::get_ident(*x).get().to_string())
|
||||
.map(|x| token::get_ident(*x).deref().to_string())
|
||||
.collect::<Vec<String>>()
|
||||
.connect("::");
|
||||
base::MacExpr::new(cx.expr_str(
|
||||
|
@ -99,6 +99,7 @@ use std::mem;
|
||||
use std::rc::Rc;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::hash_map::Entry::{Vacant, Occupied};
|
||||
use std::ops::Deref;
|
||||
|
||||
// To avoid costly uniqueness checks, we require that `MatchSeq` always has
|
||||
// a nonempty body.
|
||||
@ -229,7 +230,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
|
||||
p_s.span_diagnostic
|
||||
.span_fatal(sp,
|
||||
&format!("duplicated bind name: {}",
|
||||
string.get())[])
|
||||
string.deref())[])
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -487,8 +488,8 @@ pub fn parse(sess: &ParseSess,
|
||||
let name_string = token::get_ident(name);
|
||||
let match_cur = ei.match_cur;
|
||||
(&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal(
|
||||
parse_nt(&mut rust_parser, span, name_string.get()))));
|
||||
ei.idx += 1;
|
||||
parse_nt(&mut rust_parser, span, name_string.deref()))));
|
||||
ei.idx += 1us;
|
||||
ei.match_cur += 1;
|
||||
}
|
||||
_ => panic!()
|
||||
|
@ -36,6 +36,7 @@ use parse::token::{self, InternedString};
|
||||
|
||||
use std::slice;
|
||||
use std::ascii::AsciiExt;
|
||||
use std::ops::Deref;
|
||||
|
||||
// If you change this list without updating src/doc/reference.md, @cmr will be sad
|
||||
// Don't ever remove anything from this list; set them to 'Removed'.
|
||||
@ -251,7 +252,7 @@ impl<'a> PostExpansionVisitor<'a> {
|
||||
|
||||
impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> {
|
||||
fn visit_name(&mut self, sp: Span, name: ast::Name) {
|
||||
if !token::get_name(name).get().is_ascii() {
|
||||
if !token::get_name(name).deref().is_ascii() {
|
||||
self.gate_feature("non_ascii_idents", sp,
|
||||
"non-ascii idents are not fully supported.");
|
||||
}
|
||||
@ -378,7 +379,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> {
|
||||
|
||||
let links_to_llvm = match attr::first_attr_value_str_by_name(&i.attrs,
|
||||
"link_name") {
|
||||
Some(val) => val.get().starts_with("llvm."),
|
||||
Some(val) => val.deref().starts_with("llvm."),
|
||||
_ => false
|
||||
};
|
||||
if links_to_llvm {
|
||||
|
@ -84,6 +84,7 @@ use std::mem;
|
||||
use std::num::Float;
|
||||
use std::rc::Rc;
|
||||
use std::slice;
|
||||
use std::ops::Deref;
|
||||
|
||||
bitflags! {
|
||||
flags Restrictions: u8 {
|
||||
@ -5133,7 +5134,7 @@ impl<'a> Parser<'a> {
|
||||
outer_attrs, "path") {
|
||||
Some(d) => (dir_path.join(d), true),
|
||||
None => {
|
||||
let mod_name = mod_string.get().to_string();
|
||||
let mod_name = mod_string.deref().to_string();
|
||||
let default_path_str = format!("{}.rs", mod_name);
|
||||
let secondary_path_str = format!("{}/mod.rs", mod_name);
|
||||
let default_path = dir_path.join(&default_path_str[]);
|
||||
@ -5145,7 +5146,7 @@ impl<'a> Parser<'a> {
|
||||
self.span_err(id_sp,
|
||||
"cannot declare a new module at this location");
|
||||
let this_module = match self.mod_path_stack.last() {
|
||||
Some(name) => name.get().to_string(),
|
||||
Some(name) => name.deref().to_string(),
|
||||
None => self.root_module_name.as_ref().unwrap().clone(),
|
||||
};
|
||||
self.span_note(id_sp,
|
||||
@ -5191,7 +5192,7 @@ impl<'a> Parser<'a> {
|
||||
};
|
||||
|
||||
self.eval_src_mod_from_path(file_path, owns_directory,
|
||||
mod_string.get().to_string(), id_sp)
|
||||
mod_string.deref().to_string(), id_sp)
|
||||
}
|
||||
|
||||
fn eval_src_mod_from_path(&mut self,
|
||||
|
@ -625,19 +625,6 @@ impl InternedString {
|
||||
string: string,
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[deprecated = "use as_slice() instead"]
|
||||
pub fn get<'a>(&'a self) -> &'a str {
|
||||
&self.string[]
|
||||
}
|
||||
}
|
||||
|
||||
impl Str for InternedString {
|
||||
#[inline]
|
||||
fn as_slice<'a>(&'a self) -> &'a str {
|
||||
&self.string[]
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for InternedString {
|
||||
@ -652,7 +639,7 @@ impl BytesContainer for InternedString {
|
||||
// of `BytesContainer`, which is itself a workaround for the lack of
|
||||
// DST.
|
||||
unsafe {
|
||||
let this = self.get();
|
||||
let this = self.deref();
|
||||
mem::transmute::<&[u8],&[u8]>(this.container_as_bytes())
|
||||
}
|
||||
}
|
||||
|
@ -30,6 +30,7 @@ use ptr::P;
|
||||
use std::{ascii, mem};
|
||||
use std::old_io::{self, IoResult};
|
||||
use std::iter;
|
||||
use std::ops::Deref;
|
||||
|
||||
pub enum AnnNode<'a> {
|
||||
NodeIdent(&'a ast::Ident),
|
||||
@ -258,7 +259,7 @@ pub fn token_to_string(tok: &Token) -> String {
|
||||
}
|
||||
|
||||
/* Name components */
|
||||
token::Ident(s, _) => token::get_ident(s).get().to_string(),
|
||||
token::Ident(s, _) => token::get_ident(s).deref().to_string(),
|
||||
token::Lifetime(s) => format!("{}", token::get_ident(s)),
|
||||
token::Underscore => "_".to_string(),
|
||||
|
||||
@ -798,7 +799,7 @@ impl<'a> State<'a> {
|
||||
try!(self.head(&visibility_qualified(item.vis,
|
||||
"extern crate")[]));
|
||||
if let Some((ref p, style)) = *optional_path {
|
||||
try!(self.print_string(p.get(), style));
|
||||
try!(self.print_string(p.deref(), style));
|
||||
try!(space(&mut self.s));
|
||||
try!(word(&mut self.s, "as"));
|
||||
try!(space(&mut self.s));
|
||||
@ -1313,7 +1314,7 @@ impl<'a> State<'a> {
|
||||
try!(self.hardbreak_if_not_bol());
|
||||
try!(self.maybe_print_comment(attr.span.lo));
|
||||
if attr.node.is_sugared_doc {
|
||||
word(&mut self.s, attr.value_str().unwrap().get())
|
||||
word(&mut self.s, attr.value_str().unwrap().deref())
|
||||
} else {
|
||||
match attr.node.style {
|
||||
ast::AttrInner => try!(word(&mut self.s, "#![")),
|
||||
@ -1847,17 +1848,17 @@ impl<'a> State<'a> {
|
||||
ast::ExprInlineAsm(ref a) => {
|
||||
try!(word(&mut self.s, "asm!"));
|
||||
try!(self.popen());
|
||||
try!(self.print_string(a.asm.get(), a.asm_str_style));
|
||||
try!(self.print_string(a.asm.deref(), a.asm_str_style));
|
||||
try!(self.word_space(":"));
|
||||
|
||||
try!(self.commasep(Inconsistent, &a.outputs[],
|
||||
|s, &(ref co, ref o, is_rw)| {
|
||||
match co.get().slice_shift_char() {
|
||||
match co.deref().slice_shift_char() {
|
||||
Some(('=', operand)) if is_rw => {
|
||||
try!(s.print_string(&format!("+{}", operand)[],
|
||||
ast::CookedStr))
|
||||
}
|
||||
_ => try!(s.print_string(co.get(), ast::CookedStr))
|
||||
_ => try!(s.print_string(co.deref(), ast::CookedStr))
|
||||
}
|
||||
try!(s.popen());
|
||||
try!(s.print_expr(&**o));
|
||||
@ -1869,7 +1870,7 @@ impl<'a> State<'a> {
|
||||
|
||||
try!(self.commasep(Inconsistent, &a.inputs[],
|
||||
|s, &(ref co, ref o)| {
|
||||
try!(s.print_string(co.get(), ast::CookedStr));
|
||||
try!(s.print_string(co.deref(), ast::CookedStr));
|
||||
try!(s.popen());
|
||||
try!(s.print_expr(&**o));
|
||||
try!(s.pclose());
|
||||
@ -1880,7 +1881,7 @@ impl<'a> State<'a> {
|
||||
|
||||
try!(self.commasep(Inconsistent, &a.clobbers[],
|
||||
|s, co| {
|
||||
try!(s.print_string(co.get(), ast::CookedStr));
|
||||
try!(s.print_string(co.deref(), ast::CookedStr));
|
||||
Ok(())
|
||||
}));
|
||||
|
||||
@ -1954,7 +1955,7 @@ impl<'a> State<'a> {
|
||||
let encoded = ident.encode_with_hygiene();
|
||||
try!(word(&mut self.s, &encoded[]))
|
||||
} else {
|
||||
try!(word(&mut self.s, token::get_ident(ident).get()))
|
||||
try!(word(&mut self.s, token::get_ident(ident).deref()))
|
||||
}
|
||||
self.ann.post(self, NodeIdent(&ident))
|
||||
}
|
||||
@ -1964,7 +1965,7 @@ impl<'a> State<'a> {
|
||||
}
|
||||
|
||||
pub fn print_name(&mut self, name: ast::Name) -> IoResult<()> {
|
||||
try!(word(&mut self.s, token::get_name(name).get()));
|
||||
try!(word(&mut self.s, token::get_name(name).deref()));
|
||||
self.ann.post(self, NodeName(&name))
|
||||
}
|
||||
|
||||
@ -2532,15 +2533,15 @@ impl<'a> State<'a> {
|
||||
try!(self.ibox(indent_unit));
|
||||
match item.node {
|
||||
ast::MetaWord(ref name) => {
|
||||
try!(word(&mut self.s, name.get()));
|
||||
try!(word(&mut self.s, name.deref()));
|
||||
}
|
||||
ast::MetaNameValue(ref name, ref value) => {
|
||||
try!(self.word_space(name.get()));
|
||||
try!(self.word_space(name.deref()));
|
||||
try!(self.word_space("="));
|
||||
try!(self.print_literal(value));
|
||||
}
|
||||
ast::MetaList(ref name, ref items) => {
|
||||
try!(word(&mut self.s, name.get()));
|
||||
try!(word(&mut self.s, name.deref()));
|
||||
try!(self.popen());
|
||||
try!(self.commasep(Consistent,
|
||||
&items[],
|
||||
@ -2731,7 +2732,7 @@ impl<'a> State<'a> {
|
||||
_ => ()
|
||||
}
|
||||
match lit.node {
|
||||
ast::LitStr(ref st, style) => self.print_string(st.get(), style),
|
||||
ast::LitStr(ref st, style) => self.print_string(st.deref(), style),
|
||||
ast::LitByte(byte) => {
|
||||
let mut res = String::from_str("b'");
|
||||
ascii::escape_default(byte, |c| res.push(c as char));
|
||||
@ -2772,10 +2773,10 @@ impl<'a> State<'a> {
|
||||
word(&mut self.s,
|
||||
&format!(
|
||||
"{}{}",
|
||||
f.get(),
|
||||
f.deref(),
|
||||
&ast_util::float_ty_to_string(t)[])[])
|
||||
}
|
||||
ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, f.get()),
|
||||
ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, f.deref()),
|
||||
ast::LitBool(val) => {
|
||||
if val { word(&mut self.s, "true") } else { word(&mut self.s, "false") }
|
||||
}
|
||||
|
@ -37,6 +37,8 @@ use {ast, ast_util};
|
||||
use ptr::P;
|
||||
use util::small_vector::SmallVector;
|
||||
|
||||
use std::ops::Deref;
|
||||
|
||||
enum ShouldFail {
|
||||
No,
|
||||
Yes(Option<InternedString>),
|
||||
@ -512,7 +514,7 @@ fn mk_test_module(cx: &mut TestCtxt) -> (P<ast::Item>, Option<P<ast::Item>>) {
|
||||
});
|
||||
let reexport = cx.reexport_test_harness_main.as_ref().map(|s| {
|
||||
// building `use <ident> = __test::main`
|
||||
let reexport_ident = token::str_to_ident(s.get());
|
||||
let reexport_ident = token::str_to_ident(s.deref());
|
||||
|
||||
let use_path =
|
||||
nospan(ast::ViewPathSimple(reexport_ident,
|
||||
@ -575,7 +577,7 @@ fn mk_tests(cx: &TestCtxt) -> P<ast::Item> {
|
||||
|
||||
fn is_test_crate(krate: &ast::Crate) -> bool {
|
||||
match attr::find_crate_name(&krate.attrs[]) {
|
||||
Some(ref s) if "test" == &s.get()[] => true,
|
||||
Some(ref s) if "test" == &s.deref()[] => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user