Libsyntax has been updated
This commit is contained in:
parent
12f1f4c546
commit
8b12d3ddf9
@ -249,11 +249,11 @@ fn check_for_bindings_named_the_same_as_variants(cx: &MatchCheckCtxt, pat: &Pat)
|
||||
span_warn!(cx.tcx.sess, p.span, E0170,
|
||||
"pattern binding `{}` is named the same as one \
|
||||
of the variants of the type `{}`",
|
||||
token::get_ident(ident.node).get(), ty_to_string(cx.tcx, pat_ty));
|
||||
token::get_ident(&ident.node)[], ty_to_string(cx.tcx, pat_ty));
|
||||
span_help!(cx.tcx.sess, p.span,
|
||||
"if you meant to match on a variant, \
|
||||
consider making the path in the pattern qualified: `{}::{}`",
|
||||
ty_to_string(cx.tcx, pat_ty), token::get_ident(ident.node).get());
|
||||
ty_to_string(cx.tcx, pat_ty), &token::get_ident(ident.node)[]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -329,7 +329,7 @@ mod svh_visitor {
|
||||
// macro invocations, namely macro_rules definitions,
|
||||
// *can* appear as items, even in the expanded crate AST.
|
||||
|
||||
if macro_name(mac).get() == "macro_rules" {
|
||||
if ¯o_name(mac)[] == "macro_rules" {
|
||||
// Pretty-printing definition to a string strips out
|
||||
// surface artifacts (currently), such as the span
|
||||
// information, yielding a content-based hash.
|
||||
|
@ -68,7 +68,6 @@ use std::fmt::Show;
|
||||
use std::num::Int;
|
||||
use std::rc::Rc;
|
||||
use serialize::{Encodable, Decodable, Encoder, Decoder};
|
||||
use std::ops::Deref;
|
||||
|
||||
// FIXME #6993: in librustc, uses of "ident" should be replaced
|
||||
// by just "Name".
|
||||
@ -113,13 +112,13 @@ impl fmt::Display for Ident {
|
||||
impl fmt::Debug for Name {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
let Name(nm) = *self;
|
||||
write!(f, "{:?}({})", token::get_name(*self).deref(), nm)
|
||||
write!(f, "{:?}({})", token::get_name(*self), nm)
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Name {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
fmt::Display::fmt(token::get_name(*self).deref(), f)
|
||||
fmt::Display::fmt(&token::get_name(*self)[], f)
|
||||
}
|
||||
}
|
||||
|
||||
@ -175,7 +174,7 @@ impl Name {
|
||||
pub fn as_str<'a>(&'a self) -> &'a str {
|
||||
unsafe {
|
||||
// FIXME #12938: can't use copy_lifetime since &str isn't a &T
|
||||
::std::mem::transmute::<&str,&str>(token::get_name(*self).deref())
|
||||
::std::mem::transmute::<&str,&str>(&token::get_name(*self)[])
|
||||
}
|
||||
}
|
||||
|
||||
@ -194,7 +193,7 @@ pub type Mrk = u32;
|
||||
|
||||
impl Encodable for Ident {
|
||||
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
|
||||
s.emit_str(token::get_ident(*self).deref())
|
||||
s.emit_str(&token::get_ident(*self)[])
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -23,12 +23,11 @@ use visit;
|
||||
|
||||
use std::cmp;
|
||||
use std::u32;
|
||||
use std::ops::Deref;
|
||||
|
||||
pub fn path_name_i(idents: &[Ident]) -> String {
|
||||
// FIXME: Bad copies (#2543 -- same for everything else that says "bad")
|
||||
idents.iter().map(|i| {
|
||||
token::get_ident(*i).deref().to_string()
|
||||
token::get_ident(*i).to_string()
|
||||
}).collect::<Vec<String>>().connect("::")
|
||||
}
|
||||
|
||||
|
@ -29,7 +29,6 @@ use std::cell::{RefCell, Cell};
|
||||
use std::collections::BitvSet;
|
||||
use std::collections::HashSet;
|
||||
use std::fmt;
|
||||
use std::ops::Deref;
|
||||
|
||||
thread_local! { static USED_ATTRS: RefCell<BitvSet> = RefCell::new(BitvSet::new()) }
|
||||
|
||||
@ -45,7 +44,7 @@ pub fn is_used(attr: &Attribute) -> bool {
|
||||
|
||||
pub trait AttrMetaMethods {
|
||||
fn check_name(&self, name: &str) -> bool {
|
||||
name == self.name().deref()
|
||||
name == &self.name()[]
|
||||
}
|
||||
|
||||
/// Retrieve the name of the meta item, e.g. `foo` in `#[foo]`,
|
||||
@ -63,7 +62,7 @@ pub trait AttrMetaMethods {
|
||||
|
||||
impl AttrMetaMethods for Attribute {
|
||||
fn check_name(&self, name: &str) -> bool {
|
||||
let matches = name == self.name().deref();
|
||||
let matches = name == &self.name()[];
|
||||
if matches {
|
||||
mark_used(self);
|
||||
}
|
||||
@ -143,7 +142,7 @@ impl AttributeMethods for Attribute {
|
||||
let meta = mk_name_value_item_str(
|
||||
InternedString::new("doc"),
|
||||
token::intern_and_get_ident(&strip_doc_comment_decoration(
|
||||
comment.deref())[]));
|
||||
&comment[])[]));
|
||||
if self.node.style == ast::AttrOuter {
|
||||
f(&mk_attr_outer(self.node.id, meta))
|
||||
} else {
|
||||
@ -210,7 +209,7 @@ pub fn mk_attr_outer(id: AttrId, item: P<MetaItem>) -> Attribute {
|
||||
pub fn mk_sugared_doc_attr(id: AttrId, text: InternedString, lo: BytePos,
|
||||
hi: BytePos)
|
||||
-> Attribute {
|
||||
let style = doc_comment_style(text.deref());
|
||||
let style = doc_comment_style(&text[]);
|
||||
let lit = spanned(lo, hi, ast::LitStr(text, ast::CookedStr));
|
||||
let attr = Attribute_ {
|
||||
id: id,
|
||||
@ -327,11 +326,11 @@ pub fn requests_inline(attrs: &[Attribute]) -> bool {
|
||||
/// Tests if a cfg-pattern matches the cfg set
|
||||
pub fn cfg_matches(diagnostic: &SpanHandler, cfgs: &[P<MetaItem>], cfg: &ast::MetaItem) -> bool {
|
||||
match cfg.node {
|
||||
ast::MetaList(ref pred, ref mis) if pred.deref() == "any" =>
|
||||
ast::MetaList(ref pred, ref mis) if &pred[] == "any" =>
|
||||
mis.iter().any(|mi| cfg_matches(diagnostic, cfgs, &**mi)),
|
||||
ast::MetaList(ref pred, ref mis) if pred.deref() == "all" =>
|
||||
ast::MetaList(ref pred, ref mis) if &pred[] == "all" =>
|
||||
mis.iter().all(|mi| cfg_matches(diagnostic, cfgs, &**mi)),
|
||||
ast::MetaList(ref pred, ref mis) if pred.deref() == "not" => {
|
||||
ast::MetaList(ref pred, ref mis) if &pred[] == "not" => {
|
||||
if mis.len() != 1 {
|
||||
diagnostic.span_err(cfg.span, "expected 1 cfg-pattern");
|
||||
return false;
|
||||
@ -383,7 +382,7 @@ fn find_stability_generic<'a,
|
||||
|
||||
'outer: for attr in attrs {
|
||||
let tag = attr.name();
|
||||
let tag = tag.deref();
|
||||
let tag = &tag[];
|
||||
if tag != "deprecated" && tag != "unstable" && tag != "stable" {
|
||||
continue // not a stability level
|
||||
}
|
||||
@ -405,7 +404,7 @@ fn find_stability_generic<'a,
|
||||
}
|
||||
}
|
||||
}
|
||||
if meta.name().deref() == "since" {
|
||||
if &meta.name()[] == "since" {
|
||||
match meta.value_str() {
|
||||
Some(v) => since = Some(v),
|
||||
None => {
|
||||
@ -414,7 +413,7 @@ fn find_stability_generic<'a,
|
||||
}
|
||||
}
|
||||
}
|
||||
if meta.name().deref() == "reason" {
|
||||
if &meta.name()[] == "reason" {
|
||||
match meta.value_str() {
|
||||
Some(v) => reason = Some(v),
|
||||
None => {
|
||||
@ -522,11 +521,11 @@ pub fn find_repr_attrs(diagnostic: &SpanHandler, attr: &Attribute) -> Vec<ReprAt
|
||||
for item in items {
|
||||
match item.node {
|
||||
ast::MetaWord(ref word) => {
|
||||
let hint = match word.deref() {
|
||||
let hint = match &word[] {
|
||||
// Can't use "extern" because it's not a lexical identifier.
|
||||
"C" => Some(ReprExtern),
|
||||
"packed" => Some(ReprPacked),
|
||||
_ => match int_type_of_word(word.deref()) {
|
||||
_ => match int_type_of_word(&word[]) {
|
||||
Some(ity) => Some(ReprInt(item.span, ity)),
|
||||
None => {
|
||||
// Not a word we recognize
|
||||
|
@ -10,7 +10,6 @@
|
||||
|
||||
use std::cell::RefCell;
|
||||
use std::collections::BTreeMap;
|
||||
use std::ops::Deref;
|
||||
|
||||
use ast;
|
||||
use ast::{Ident, Name, TokenTree};
|
||||
@ -59,7 +58,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
|
||||
match diagnostics.insert(code.name, span) {
|
||||
Some(previous_span) => {
|
||||
ecx.span_warn(span, &format!(
|
||||
"diagnostic code {} already used", token::get_ident(code).deref()
|
||||
"diagnostic code {} already used", &token::get_ident(code)[]
|
||||
)[]);
|
||||
ecx.span_note(previous_span, "previous invocation");
|
||||
},
|
||||
@ -70,7 +69,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
|
||||
with_registered_diagnostics(|diagnostics| {
|
||||
if !diagnostics.contains_key(&code.name) {
|
||||
ecx.span_err(span, &format!(
|
||||
"used diagnostic code {} not registered", token::get_ident(code).deref()
|
||||
"used diagnostic code {} not registered", &token::get_ident(code)[]
|
||||
)[]);
|
||||
}
|
||||
});
|
||||
@ -95,12 +94,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
|
||||
with_registered_diagnostics(|diagnostics| {
|
||||
if diagnostics.insert(code.name, description).is_some() {
|
||||
ecx.span_err(span, &format!(
|
||||
"diagnostic code {} already registered", token::get_ident(*code).deref()
|
||||
"diagnostic code {} already registered", &token::get_ident(*code)[]
|
||||
)[]);
|
||||
}
|
||||
});
|
||||
let sym = Ident::new(token::gensym(&(
|
||||
"__register_diagnostic_".to_string() + token::get_ident(*code).deref()
|
||||
"__register_diagnostic_".to_string() + &token::get_ident(*code)[]
|
||||
)[]));
|
||||
MacItems::new(vec![quote_item!(ecx, mod $sym {}).unwrap()].into_iter())
|
||||
}
|
||||
|
@ -22,8 +22,6 @@ use parse::token::InternedString;
|
||||
use parse::token;
|
||||
use ptr::P;
|
||||
|
||||
use std::ops::Deref;
|
||||
|
||||
enum State {
|
||||
Asm,
|
||||
Outputs,
|
||||
@ -104,7 +102,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
// It's the opposite of '=&' which means that the memory
|
||||
// cannot be shared with any other operand (usually when
|
||||
// a register is clobbered early.)
|
||||
let output = match constraint.deref().slice_shift_char() {
|
||||
let output = match constraint.slice_shift_char() {
|
||||
Some(('=', _)) => None,
|
||||
Some(('+', operand)) => {
|
||||
Some(token::intern_and_get_ident(&format!(
|
||||
@ -131,9 +129,9 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
|
||||
let (constraint, _str_style) = p.parse_str();
|
||||
|
||||
if constraint.deref().starts_with("=") {
|
||||
if constraint.starts_with("=") {
|
||||
cx.span_err(p.last_span, "input operand constraint contains '='");
|
||||
} else if constraint.deref().starts_with("+") {
|
||||
} else if constraint.starts_with("+") {
|
||||
cx.span_err(p.last_span, "input operand constraint contains '+'");
|
||||
}
|
||||
|
||||
@ -215,7 +213,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
MacExpr::new(P(ast::Expr {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
node: ast::ExprInlineAsm(ast::InlineAsm {
|
||||
asm: token::intern_and_get_ident(asm.deref()),
|
||||
asm: token::intern_and_get_ident(&asm[]),
|
||||
asm_str_style: asm_str_style.unwrap(),
|
||||
outputs: outputs,
|
||||
inputs: inputs,
|
||||
|
@ -28,7 +28,6 @@ use fold::Folder;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::rc::Rc;
|
||||
use std::ops::Deref;
|
||||
|
||||
pub trait ItemDecorator {
|
||||
fn expand(&self,
|
||||
@ -791,7 +790,7 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt,
|
||||
cx.span_err(sp, &format!("{} takes 1 argument", name)[]);
|
||||
}
|
||||
expr_to_string(cx, ret, "argument must be a string literal").map(|(s, _)| {
|
||||
s.deref().to_string()
|
||||
s.to_string()
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -21,8 +21,6 @@ use parse::token::InternedString;
|
||||
use parse::token;
|
||||
use ptr::P;
|
||||
|
||||
use std::ops::Deref;
|
||||
|
||||
// Transitional reexports so qquote can find the paths it is looking for
|
||||
mod syntax {
|
||||
pub use ext;
|
||||
@ -577,7 +575,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
|
||||
fn expr_field_access(&self, sp: Span, expr: P<ast::Expr>, ident: ast::Ident) -> P<ast::Expr> {
|
||||
let field_name = token::get_ident(ident);
|
||||
let field_span = Span {
|
||||
lo: sp.lo - Pos::from_usize(field_name.deref().len()),
|
||||
lo: sp.lo - Pos::from_usize(field_name.len()),
|
||||
hi: sp.hi,
|
||||
expn_id: sp.expn_id,
|
||||
};
|
||||
|
@ -15,7 +15,6 @@ use ext::build::AstBuilder;
|
||||
use parse::token;
|
||||
|
||||
use std::string::String;
|
||||
use std::ops::Deref;
|
||||
|
||||
pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
|
||||
sp: codemap::Span,
|
||||
@ -33,7 +32,7 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
|
||||
ast::LitStr(ref s, _) |
|
||||
ast::LitFloat(ref s, _) |
|
||||
ast::LitFloatUnsuffixed(ref s) => {
|
||||
accumulator.push_str(s.deref());
|
||||
accumulator.push_str(&s[]);
|
||||
}
|
||||
ast::LitChar(c) => {
|
||||
accumulator.push(c);
|
||||
|
@ -16,8 +16,6 @@ use parse::token;
|
||||
use parse::token::{str_to_ident};
|
||||
use ptr::P;
|
||||
|
||||
use std::ops::Deref;
|
||||
|
||||
pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
-> Box<base::MacResult+'cx> {
|
||||
let mut res_str = String::new();
|
||||
@ -33,7 +31,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]
|
||||
} else {
|
||||
match *e {
|
||||
ast::TtToken(_, token::Ident(ident, _)) => {
|
||||
res_str.push_str(token::get_ident(ident).deref())
|
||||
res_str.push_str(&token::get_ident(ident)[])
|
||||
},
|
||||
_ => {
|
||||
cx.span_err(sp, "concat_idents! requires ident args.");
|
||||
|
@ -15,8 +15,6 @@ use ext::deriving::generic::*;
|
||||
use ext::deriving::generic::ty::*;
|
||||
use ptr::P;
|
||||
|
||||
use std::ops::Deref;
|
||||
|
||||
pub fn expand_deriving_bound<F>(cx: &mut ExtCtxt,
|
||||
span: Span,
|
||||
mitem: &MetaItem,
|
||||
@ -26,7 +24,7 @@ pub fn expand_deriving_bound<F>(cx: &mut ExtCtxt,
|
||||
{
|
||||
let name = match mitem.node {
|
||||
MetaWord(ref tname) => {
|
||||
match tname.deref() {
|
||||
match &tname[] {
|
||||
"Copy" => "Copy",
|
||||
"Send" | "Sync" => {
|
||||
return cx.span_err(span,
|
||||
|
@ -189,7 +189,6 @@ use self::StructType::*;
|
||||
|
||||
use std::cell::RefCell;
|
||||
use std::vec;
|
||||
use std::ops::Deref;
|
||||
|
||||
use abi::Abi;
|
||||
use abi;
|
||||
@ -364,7 +363,7 @@ impl<'a> TraitDef<'a> {
|
||||
// generated implementations are linted
|
||||
let mut attrs = newitem.attrs.clone();
|
||||
attrs.extend(item.attrs.iter().filter(|a| {
|
||||
match a.name().deref() {
|
||||
match &a.name()[] {
|
||||
"allow" | "warn" | "deny" | "forbid" => true,
|
||||
_ => false,
|
||||
}
|
||||
|
@ -18,8 +18,6 @@ use ext::base::ExtCtxt;
|
||||
use codemap::Span;
|
||||
use ptr::P;
|
||||
|
||||
use std::ops::Deref;
|
||||
|
||||
pub mod bounds;
|
||||
pub mod clone;
|
||||
pub mod encodable;
|
||||
@ -76,7 +74,7 @@ pub fn expand_meta_derive(cx: &mut ExtCtxt,
|
||||
|i| push(i)))
|
||||
}
|
||||
|
||||
match tname.deref() {
|
||||
match &tname[] {
|
||||
"Clone" => expand!(clone::expand_deriving_clone),
|
||||
|
||||
"Hash" => expand!(hash::expand_deriving_hash),
|
||||
|
@ -20,7 +20,6 @@ use parse::token;
|
||||
use ptr::P;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::ops::Deref;
|
||||
|
||||
pub fn expand_deriving_show<F>(cx: &mut ExtCtxt,
|
||||
span: Span,
|
||||
@ -73,7 +72,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span,
|
||||
}
|
||||
};
|
||||
|
||||
let mut format_string = String::from_str(token::get_ident(name).deref());
|
||||
let mut format_string = String::from_str(&token::get_ident(name)[]);
|
||||
// the internal fields we're actually formatting
|
||||
let mut exprs = Vec::new();
|
||||
|
||||
@ -108,7 +107,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span,
|
||||
|
||||
let name = token::get_ident(field.name.unwrap());
|
||||
format_string.push_str(" ");
|
||||
format_string.push_str(name.deref());
|
||||
format_string.push_str(&name[]);
|
||||
format_string.push_str(": {:?}");
|
||||
|
||||
exprs.push(field.self_.clone());
|
||||
|
@ -23,7 +23,6 @@ use parse::token;
|
||||
|
||||
use std::env;
|
||||
use std::os;
|
||||
use std::ops::Deref;
|
||||
|
||||
pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
-> Box<base::MacResult+'cx> {
|
||||
@ -103,9 +102,9 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
}
|
||||
}
|
||||
|
||||
let e = match os::getenv(var.deref()) {
|
||||
let e = match os::getenv(&var[]) {
|
||||
None => {
|
||||
cx.span_err(sp, msg.deref());
|
||||
cx.span_err(sp, &msg[]);
|
||||
cx.expr_usize(sp, 0)
|
||||
}
|
||||
Some(s) => cx.expr_str(sp, token::intern_and_get_ident(&s[]))
|
||||
|
@ -32,8 +32,6 @@ use util::small_vector::SmallVector;
|
||||
use visit;
|
||||
use visit::Visitor;
|
||||
|
||||
use std::ops::Deref;
|
||||
|
||||
pub fn expand_type(t: P<ast::Ty>,
|
||||
fld: &mut MacroExpander,
|
||||
impl_ty: Option<P<ast::Ty>>)
|
||||
@ -377,7 +375,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
|
||||
fld.cx.span_err(
|
||||
pth.span,
|
||||
&format!("macro undefined: '{}!'",
|
||||
extnamestr.deref())[]);
|
||||
&extnamestr[])[]);
|
||||
|
||||
// let compilation continue
|
||||
None
|
||||
@ -387,7 +385,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
|
||||
fld.cx.bt_push(ExpnInfo {
|
||||
call_site: span,
|
||||
callee: NameAndSpan {
|
||||
name: extnamestr.deref().to_string(),
|
||||
name: extnamestr.to_string(),
|
||||
format: MacroBang,
|
||||
span: exp_span,
|
||||
},
|
||||
@ -413,7 +411,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
|
||||
fld.cx.span_err(
|
||||
pth.span,
|
||||
&format!("non-expression macro in expression position: {}",
|
||||
&extnamestr.deref()[]
|
||||
&extnamestr[]
|
||||
)[]);
|
||||
return None;
|
||||
}
|
||||
@ -424,7 +422,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
|
||||
fld.cx.span_err(
|
||||
pth.span,
|
||||
&format!("'{}' is not a tt-style macro",
|
||||
extnamestr.deref())[]);
|
||||
&extnamestr[])[]);
|
||||
None
|
||||
}
|
||||
}
|
||||
@ -508,14 +506,14 @@ fn expand_item_modifiers(mut it: P<ast::Item>, fld: &mut MacroExpander)
|
||||
for attr in &modifiers {
|
||||
let mname = attr.name();
|
||||
|
||||
match fld.cx.syntax_env.find(&intern(mname.deref())) {
|
||||
match fld.cx.syntax_env.find(&intern(&mname[])) {
|
||||
Some(rc) => match *rc {
|
||||
Modifier(ref mac) => {
|
||||
attr::mark_used(attr);
|
||||
fld.cx.bt_push(ExpnInfo {
|
||||
call_site: attr.span,
|
||||
callee: NameAndSpan {
|
||||
name: mname.deref().to_string(),
|
||||
name: mname.to_string(),
|
||||
format: MacroAttribute,
|
||||
span: None,
|
||||
}
|
||||
@ -615,7 +613,7 @@ pub fn expand_item_mac(it: P<ast::Item>,
|
||||
fld.cx.bt_push(ExpnInfo {
|
||||
call_site: it.span,
|
||||
callee: NameAndSpan {
|
||||
name: extnamestr.deref().to_string(),
|
||||
name: extnamestr.to_string(),
|
||||
format: MacroBang,
|
||||
span: span
|
||||
}
|
||||
@ -628,13 +626,13 @@ pub fn expand_item_mac(it: P<ast::Item>,
|
||||
if it.ident.name == parse::token::special_idents::invalid.name {
|
||||
fld.cx.span_err(path_span,
|
||||
&format!("macro {}! expects an ident argument",
|
||||
extnamestr.deref())[]);
|
||||
&extnamestr[])[]);
|
||||
return SmallVector::zero();
|
||||
}
|
||||
fld.cx.bt_push(ExpnInfo {
|
||||
call_site: it.span,
|
||||
callee: NameAndSpan {
|
||||
name: extnamestr.deref().to_string(),
|
||||
name: extnamestr.to_string(),
|
||||
format: MacroBang,
|
||||
span: span
|
||||
}
|
||||
@ -653,7 +651,7 @@ pub fn expand_item_mac(it: P<ast::Item>,
|
||||
fld.cx.bt_push(ExpnInfo {
|
||||
call_site: it.span,
|
||||
callee: NameAndSpan {
|
||||
name: extnamestr.deref().to_string(),
|
||||
name: extnamestr.to_string(),
|
||||
format: MacroBang,
|
||||
span: None,
|
||||
}
|
||||
@ -679,7 +677,7 @@ pub fn expand_item_mac(it: P<ast::Item>,
|
||||
_ => {
|
||||
fld.cx.span_err(it.span,
|
||||
&format!("{}! is not legal in item position",
|
||||
extnamestr.deref())[]);
|
||||
&extnamestr[])[]);
|
||||
return SmallVector::zero();
|
||||
}
|
||||
}
|
||||
@ -698,7 +696,7 @@ pub fn expand_item_mac(it: P<ast::Item>,
|
||||
None => {
|
||||
fld.cx.span_err(path_span,
|
||||
&format!("non-item macro in item position: {}",
|
||||
extnamestr.deref())[]);
|
||||
&extnamestr[])[]);
|
||||
return SmallVector::zero();
|
||||
}
|
||||
};
|
||||
@ -952,7 +950,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
|
||||
fld.cx.bt_push(ExpnInfo {
|
||||
call_site: span,
|
||||
callee: NameAndSpan {
|
||||
name: extnamestr.deref().to_string(),
|
||||
name: extnamestr.to_string(),
|
||||
format: MacroBang,
|
||||
span: tt_span
|
||||
}
|
||||
@ -970,7 +968,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
|
||||
pth.span,
|
||||
&format!(
|
||||
"non-pattern macro in pattern position: {}",
|
||||
extnamestr.deref()
|
||||
&extnamestr[]
|
||||
)[]
|
||||
);
|
||||
return DummyResult::raw_pat(span);
|
||||
@ -983,7 +981,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
|
||||
_ => {
|
||||
fld.cx.span_err(span,
|
||||
&format!("{}! is not legal in pattern position",
|
||||
extnamestr.deref())[]);
|
||||
&extnamestr[])[]);
|
||||
return DummyResult::raw_pat(span);
|
||||
}
|
||||
}
|
||||
@ -1067,7 +1065,7 @@ fn expand_annotatable(a: Annotatable,
|
||||
for attr in a.attrs() {
|
||||
let mname = attr.name();
|
||||
|
||||
match fld.cx.syntax_env.find(&intern(mname.deref())) {
|
||||
match fld.cx.syntax_env.find(&intern(&mname[])) {
|
||||
Some(rc) => match *rc {
|
||||
Decorator(ref dec) => {
|
||||
let it = match a {
|
||||
@ -1081,7 +1079,7 @@ fn expand_annotatable(a: Annotatable,
|
||||
fld.cx.bt_push(ExpnInfo {
|
||||
call_site: attr.span,
|
||||
callee: NameAndSpan {
|
||||
name: mname.deref().to_string(),
|
||||
name: mname.to_string(),
|
||||
format: MacroAttribute,
|
||||
span: None
|
||||
}
|
||||
@ -1182,7 +1180,7 @@ fn modifiers(attrs: &Vec<ast::Attribute>,
|
||||
fld: &MacroExpander)
|
||||
-> (Vec<ast::Attribute>, Vec<ast::Attribute>) {
|
||||
attrs.iter().cloned().partition(|attr| {
|
||||
match fld.cx.syntax_env.find(&intern(attr.name().deref())) {
|
||||
match fld.cx.syntax_env.find(&intern(&attr.name()[])) {
|
||||
Some(rc) => match *rc {
|
||||
Modifier(_) => true,
|
||||
_ => false
|
||||
@ -1197,7 +1195,7 @@ fn multi_modifiers(attrs: &[ast::Attribute],
|
||||
fld: &MacroExpander)
|
||||
-> (Vec<ast::Attribute>, Vec<ast::Attribute>) {
|
||||
attrs.iter().cloned().partition(|attr| {
|
||||
match fld.cx.syntax_env.find(&intern(attr.name().deref())) {
|
||||
match fld.cx.syntax_env.find(&intern(&attr.name()[])) {
|
||||
Some(rc) => match *rc {
|
||||
MultiModifier(_) => true,
|
||||
_ => false
|
||||
@ -1222,14 +1220,14 @@ fn expand_item_multi_modifier(mut it: Annotatable,
|
||||
for attr in &modifiers {
|
||||
let mname = attr.name();
|
||||
|
||||
match fld.cx.syntax_env.find(&intern(mname.deref())) {
|
||||
match fld.cx.syntax_env.find(&intern(&mname[])) {
|
||||
Some(rc) => match *rc {
|
||||
MultiModifier(ref mac) => {
|
||||
attr::mark_used(attr);
|
||||
fld.cx.bt_push(ExpnInfo {
|
||||
call_site: attr.span,
|
||||
callee: NameAndSpan {
|
||||
name: mname.deref().to_string(),
|
||||
name: mname.to_string(),
|
||||
format: MacroAttribute,
|
||||
span: None,
|
||||
}
|
||||
|
@ -23,7 +23,6 @@ use ptr::P;
|
||||
|
||||
use std::collections::HashMap;
|
||||
use std::iter::repeat;
|
||||
use std::ops::Deref;
|
||||
|
||||
#[derive(PartialEq)]
|
||||
enum ArgumentType {
|
||||
@ -119,7 +118,8 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
}
|
||||
};
|
||||
let interned_name = token::get_ident(ident);
|
||||
let name = interned_name.deref();
|
||||
let name = &interned_name[];
|
||||
|
||||
p.expect(&token::Eq);
|
||||
let e = p.parse_expr();
|
||||
match names.get(name) {
|
||||
@ -673,7 +673,8 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span,
|
||||
None => return DummyResult::raw_expr(sp)
|
||||
};
|
||||
|
||||
let mut parser = parse::Parser::new(fmt.deref());
|
||||
let mut parser = parse::Parser::new(&fmt[]);
|
||||
|
||||
loop {
|
||||
match parser.next() {
|
||||
Some(piece) => {
|
||||
|
@ -25,8 +25,6 @@ use ptr::P;
|
||||
/// as antiquotes (splices).
|
||||
|
||||
pub mod rt {
|
||||
use std::ops::Deref;
|
||||
|
||||
use ast;
|
||||
use codemap::Spanned;
|
||||
use ext::base::ExtCtxt;
|
||||
@ -163,7 +161,7 @@ pub mod rt {
|
||||
|
||||
impl ToSource for ast::Ident {
|
||||
fn to_source(&self) -> String {
|
||||
token::get_ident(*self).deref().to_string()
|
||||
token::get_ident(*self).to_string()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -22,7 +22,6 @@ use util::small_vector::SmallVector;
|
||||
|
||||
use std::old_io::File;
|
||||
use std::rc::Rc;
|
||||
use std::ops::Deref;
|
||||
|
||||
// These macros all relate to the file system; they either return
|
||||
// the column/row/filename of the expression, or they include
|
||||
@ -74,7 +73,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
base::check_zero_tts(cx, sp, tts, "module_path!");
|
||||
let string = cx.mod_path()
|
||||
.iter()
|
||||
.map(|x| token::get_ident(*x).deref().to_string())
|
||||
.map(|x| token::get_ident(*x).to_string())
|
||||
.collect::<Vec<String>>()
|
||||
.connect("::");
|
||||
base::MacExpr::new(cx.expr_str(
|
||||
|
@ -99,7 +99,6 @@ use std::mem;
|
||||
use std::rc::Rc;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::hash_map::Entry::{Vacant, Occupied};
|
||||
use std::ops::Deref;
|
||||
|
||||
// To avoid costly uniqueness checks, we require that `MatchSeq` always has
|
||||
// a nonempty body.
|
||||
@ -230,7 +229,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
|
||||
p_s.span_diagnostic
|
||||
.span_fatal(sp,
|
||||
&format!("duplicated bind name: {}",
|
||||
string.deref())[])
|
||||
&string[])[])
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -488,7 +487,7 @@ pub fn parse(sess: &ParseSess,
|
||||
let name_string = token::get_ident(name);
|
||||
let match_cur = ei.match_cur;
|
||||
(&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal(
|
||||
parse_nt(&mut rust_parser, span, name_string.deref()))));
|
||||
parse_nt(&mut rust_parser, span, &name_string[]))));
|
||||
ei.idx += 1us;
|
||||
ei.match_cur += 1;
|
||||
}
|
||||
|
@ -21,6 +21,7 @@
|
||||
//! For the purpose of future feature-tracking, once code for detection of feature
|
||||
//! gate usage is added, *do not remove it again* even once the feature
|
||||
//! becomes stable.
|
||||
|
||||
use self::Status::*;
|
||||
|
||||
use abi::RustIntrinsic;
|
||||
@ -36,7 +37,6 @@ use parse::token::{self, InternedString};
|
||||
|
||||
use std::slice;
|
||||
use std::ascii::AsciiExt;
|
||||
use std::ops::Deref;
|
||||
|
||||
// If you change this list without updating src/doc/reference.md, @cmr will be sad
|
||||
// Don't ever remove anything from this list; set them to 'Removed'.
|
||||
@ -252,7 +252,7 @@ impl<'a> PostExpansionVisitor<'a> {
|
||||
|
||||
impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> {
|
||||
fn visit_name(&mut self, sp: Span, name: ast::Name) {
|
||||
if !token::get_name(name).deref().is_ascii() {
|
||||
if !token::get_name(name).is_ascii() {
|
||||
self.gate_feature("non_ascii_idents", sp,
|
||||
"non-ascii idents are not fully supported.");
|
||||
}
|
||||
@ -379,7 +379,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> {
|
||||
|
||||
let links_to_llvm = match attr::first_attr_value_str_by_name(&i.attrs,
|
||||
"link_name") {
|
||||
Some(val) => val.deref().starts_with("llvm."),
|
||||
Some(val) => val.starts_with("llvm."),
|
||||
_ => false
|
||||
};
|
||||
if links_to_llvm {
|
||||
|
@ -84,7 +84,6 @@ use std::mem;
|
||||
use std::num::Float;
|
||||
use std::rc::Rc;
|
||||
use std::slice;
|
||||
use std::ops::Deref;
|
||||
|
||||
bitflags! {
|
||||
flags Restrictions: u8 {
|
||||
@ -5134,7 +5133,7 @@ impl<'a> Parser<'a> {
|
||||
outer_attrs, "path") {
|
||||
Some(d) => (dir_path.join(d), true),
|
||||
None => {
|
||||
let mod_name = mod_string.deref().to_string();
|
||||
let mod_name = mod_string.to_string();
|
||||
let default_path_str = format!("{}.rs", mod_name);
|
||||
let secondary_path_str = format!("{}/mod.rs", mod_name);
|
||||
let default_path = dir_path.join(&default_path_str[]);
|
||||
@ -5146,7 +5145,7 @@ impl<'a> Parser<'a> {
|
||||
self.span_err(id_sp,
|
||||
"cannot declare a new module at this location");
|
||||
let this_module = match self.mod_path_stack.last() {
|
||||
Some(name) => name.deref().to_string(),
|
||||
Some(name) => name.to_string(),
|
||||
None => self.root_module_name.as_ref().unwrap().clone(),
|
||||
};
|
||||
self.span_note(id_sp,
|
||||
@ -5192,7 +5191,7 @@ impl<'a> Parser<'a> {
|
||||
};
|
||||
|
||||
self.eval_src_mod_from_path(file_path, owns_directory,
|
||||
mod_string.deref().to_string(), id_sp)
|
||||
mod_string.to_string(), id_sp)
|
||||
}
|
||||
|
||||
fn eval_src_mod_from_path(&mut self,
|
||||
|
@ -639,7 +639,7 @@ impl BytesContainer for InternedString {
|
||||
// of `BytesContainer`, which is itself a workaround for the lack of
|
||||
// DST.
|
||||
unsafe {
|
||||
let this = self.deref();
|
||||
let this = &self[];
|
||||
mem::transmute::<&[u8],&[u8]>(this.container_as_bytes())
|
||||
}
|
||||
}
|
||||
|
@ -30,7 +30,6 @@ use ptr::P;
|
||||
use std::{ascii, mem};
|
||||
use std::old_io::{self, IoResult};
|
||||
use std::iter;
|
||||
use std::ops::Deref;
|
||||
|
||||
pub enum AnnNode<'a> {
|
||||
NodeIdent(&'a ast::Ident),
|
||||
@ -259,7 +258,7 @@ pub fn token_to_string(tok: &Token) -> String {
|
||||
}
|
||||
|
||||
/* Name components */
|
||||
token::Ident(s, _) => token::get_ident(s).deref().to_string(),
|
||||
token::Ident(s, _) => token::get_ident(s).to_string(),
|
||||
token::Lifetime(s) => format!("{}", token::get_ident(s)),
|
||||
token::Underscore => "_".to_string(),
|
||||
|
||||
@ -799,7 +798,7 @@ impl<'a> State<'a> {
|
||||
try!(self.head(&visibility_qualified(item.vis,
|
||||
"extern crate")[]));
|
||||
if let Some((ref p, style)) = *optional_path {
|
||||
try!(self.print_string(p.deref(), style));
|
||||
try!(self.print_string(p, style));
|
||||
try!(space(&mut self.s));
|
||||
try!(word(&mut self.s, "as"));
|
||||
try!(space(&mut self.s));
|
||||
@ -1314,7 +1313,7 @@ impl<'a> State<'a> {
|
||||
try!(self.hardbreak_if_not_bol());
|
||||
try!(self.maybe_print_comment(attr.span.lo));
|
||||
if attr.node.is_sugared_doc {
|
||||
word(&mut self.s, attr.value_str().unwrap().deref())
|
||||
word(&mut self.s, &attr.value_str().unwrap()[])
|
||||
} else {
|
||||
match attr.node.style {
|
||||
ast::AttrInner => try!(word(&mut self.s, "#![")),
|
||||
@ -1848,17 +1847,17 @@ impl<'a> State<'a> {
|
||||
ast::ExprInlineAsm(ref a) => {
|
||||
try!(word(&mut self.s, "asm!"));
|
||||
try!(self.popen());
|
||||
try!(self.print_string(a.asm.deref(), a.asm_str_style));
|
||||
try!(self.print_string(&a.asm[], a.asm_str_style));
|
||||
try!(self.word_space(":"));
|
||||
|
||||
try!(self.commasep(Inconsistent, &a.outputs[],
|
||||
|s, &(ref co, ref o, is_rw)| {
|
||||
match co.deref().slice_shift_char() {
|
||||
match co.slice_shift_char() {
|
||||
Some(('=', operand)) if is_rw => {
|
||||
try!(s.print_string(&format!("+{}", operand)[],
|
||||
ast::CookedStr))
|
||||
}
|
||||
_ => try!(s.print_string(co.deref(), ast::CookedStr))
|
||||
_ => try!(s.print_string(&co[], ast::CookedStr))
|
||||
}
|
||||
try!(s.popen());
|
||||
try!(s.print_expr(&**o));
|
||||
@ -1870,7 +1869,7 @@ impl<'a> State<'a> {
|
||||
|
||||
try!(self.commasep(Inconsistent, &a.inputs[],
|
||||
|s, &(ref co, ref o)| {
|
||||
try!(s.print_string(co.deref(), ast::CookedStr));
|
||||
try!(s.print_string(&co[], ast::CookedStr));
|
||||
try!(s.popen());
|
||||
try!(s.print_expr(&**o));
|
||||
try!(s.pclose());
|
||||
@ -1881,7 +1880,7 @@ impl<'a> State<'a> {
|
||||
|
||||
try!(self.commasep(Inconsistent, &a.clobbers[],
|
||||
|s, co| {
|
||||
try!(s.print_string(co.deref(), ast::CookedStr));
|
||||
try!(s.print_string(&co[], ast::CookedStr));
|
||||
Ok(())
|
||||
}));
|
||||
|
||||
@ -1955,7 +1954,7 @@ impl<'a> State<'a> {
|
||||
let encoded = ident.encode_with_hygiene();
|
||||
try!(word(&mut self.s, &encoded[]))
|
||||
} else {
|
||||
try!(word(&mut self.s, token::get_ident(ident).deref()))
|
||||
try!(word(&mut self.s, &token::get_ident(ident)[]))
|
||||
}
|
||||
self.ann.post(self, NodeIdent(&ident))
|
||||
}
|
||||
@ -1965,7 +1964,7 @@ impl<'a> State<'a> {
|
||||
}
|
||||
|
||||
pub fn print_name(&mut self, name: ast::Name) -> IoResult<()> {
|
||||
try!(word(&mut self.s, token::get_name(name).deref()));
|
||||
try!(word(&mut self.s, &token::get_name(name)[]));
|
||||
self.ann.post(self, NodeName(&name))
|
||||
}
|
||||
|
||||
@ -2533,15 +2532,15 @@ impl<'a> State<'a> {
|
||||
try!(self.ibox(indent_unit));
|
||||
match item.node {
|
||||
ast::MetaWord(ref name) => {
|
||||
try!(word(&mut self.s, name.deref()));
|
||||
try!(word(&mut self.s, &name[]));
|
||||
}
|
||||
ast::MetaNameValue(ref name, ref value) => {
|
||||
try!(self.word_space(name.deref()));
|
||||
try!(self.word_space(&name[]));
|
||||
try!(self.word_space("="));
|
||||
try!(self.print_literal(value));
|
||||
}
|
||||
ast::MetaList(ref name, ref items) => {
|
||||
try!(word(&mut self.s, name.deref()));
|
||||
try!(word(&mut self.s, &name[]));
|
||||
try!(self.popen());
|
||||
try!(self.commasep(Consistent,
|
||||
&items[],
|
||||
@ -2732,7 +2731,7 @@ impl<'a> State<'a> {
|
||||
_ => ()
|
||||
}
|
||||
match lit.node {
|
||||
ast::LitStr(ref st, style) => self.print_string(st.deref(), style),
|
||||
ast::LitStr(ref st, style) => self.print_string(&st[], style),
|
||||
ast::LitByte(byte) => {
|
||||
let mut res = String::from_str("b'");
|
||||
ascii::escape_default(byte, |c| res.push(c as char));
|
||||
@ -2773,10 +2772,10 @@ impl<'a> State<'a> {
|
||||
word(&mut self.s,
|
||||
&format!(
|
||||
"{}{}",
|
||||
f.deref(),
|
||||
&f[],
|
||||
&ast_util::float_ty_to_string(t)[])[])
|
||||
}
|
||||
ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, f.deref()),
|
||||
ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, &f[]),
|
||||
ast::LitBool(val) => {
|
||||
if val { word(&mut self.s, "true") } else { word(&mut self.s, "false") }
|
||||
}
|
||||
|
@ -37,8 +37,6 @@ use {ast, ast_util};
|
||||
use ptr::P;
|
||||
use util::small_vector::SmallVector;
|
||||
|
||||
use std::ops::Deref;
|
||||
|
||||
enum ShouldFail {
|
||||
No,
|
||||
Yes(Option<InternedString>),
|
||||
@ -514,7 +512,7 @@ fn mk_test_module(cx: &mut TestCtxt) -> (P<ast::Item>, Option<P<ast::Item>>) {
|
||||
});
|
||||
let reexport = cx.reexport_test_harness_main.as_ref().map(|s| {
|
||||
// building `use <ident> = __test::main`
|
||||
let reexport_ident = token::str_to_ident(s.deref());
|
||||
let reexport_ident = token::str_to_ident(&s[]);
|
||||
|
||||
let use_path =
|
||||
nospan(ast::ViewPathSimple(reexport_ident,
|
||||
@ -577,7 +575,7 @@ fn mk_tests(cx: &TestCtxt) -> P<ast::Item> {
|
||||
|
||||
fn is_test_crate(krate: &ast::Crate) -> bool {
|
||||
match attr::find_crate_name(&krate.attrs[]) {
|
||||
Some(ref s) if "test" == &s.deref()[] => true,
|
||||
Some(ref s) if "test" == &s[] => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user