librustc has been updated
This commit is contained in:
parent
8b12d3ddf9
commit
664c41b427
@ -699,7 +699,7 @@ fn check_attribute(&mut self, cx: &Context, attr: &ast::Attribute) {
|
||||
|
||||
if !attr::is_used(attr) {
|
||||
cx.span_lint(UNUSED_ATTRIBUTES, attr.span, "unused attribute");
|
||||
if CRATE_ATTRS.contains(&attr.name().get()) {
|
||||
if CRATE_ATTRS.contains(&&attr.name()[]) {
|
||||
let msg = match attr.node.style {
|
||||
ast::AttrOuter => "crate-level attribute should be an inner \
|
||||
attribute: add an exclamation mark: #![foo]",
|
||||
@ -801,7 +801,7 @@ fn check_must_use(cx: &Context, attrs: &[ast::Attribute], sp: Span) -> bool {
|
||||
None => {}
|
||||
Some(s) => {
|
||||
msg.push_str(": ");
|
||||
msg.push_str(s.get());
|
||||
msg.push_str(&s[]);
|
||||
}
|
||||
}
|
||||
cx.span_lint(UNUSED_MUST_USE, sp, &msg[]);
|
||||
@ -826,8 +826,8 @@ impl NonCamelCaseTypes {
|
||||
fn check_case(&self, cx: &Context, sort: &str, ident: ast::Ident, span: Span) {
|
||||
fn is_camel_case(ident: ast::Ident) -> bool {
|
||||
let ident = token::get_ident(ident);
|
||||
if ident.get().is_empty() { return true; }
|
||||
let ident = ident.get().trim_matches('_');
|
||||
if ident.is_empty() { return true; }
|
||||
let ident = ident.trim_matches('_');
|
||||
|
||||
// start with a non-lowercase letter rather than non-uppercase
|
||||
// ones (some scripts don't have a concept of upper/lowercase)
|
||||
@ -844,7 +844,7 @@ fn to_camel_case(s: &str) -> String {
|
||||
let s = token::get_ident(ident);
|
||||
|
||||
if !is_camel_case(ident) {
|
||||
let c = to_camel_case(s.get());
|
||||
let c = to_camel_case(&s[]);
|
||||
let m = if c.is_empty() {
|
||||
format!("{} `{}` should have a camel case name such as `CamelCase`", sort, s)
|
||||
} else {
|
||||
@ -977,8 +977,8 @@ fn to_snake_case(mut str: &str) -> String {
|
||||
fn check_snake_case(&self, cx: &Context, sort: &str, ident: ast::Ident, span: Span) {
|
||||
fn is_snake_case(ident: ast::Ident) -> bool {
|
||||
let ident = token::get_ident(ident);
|
||||
if ident.get().is_empty() { return true; }
|
||||
let ident = ident.get().trim_left_matches('\'');
|
||||
if ident.is_empty() { return true; }
|
||||
let ident = ident.trim_left_matches('\'');
|
||||
let ident = ident.trim_matches('_');
|
||||
|
||||
let mut allow_underscore = true;
|
||||
@ -996,8 +996,8 @@ fn is_snake_case(ident: ast::Ident) -> bool {
|
||||
let s = token::get_ident(ident);
|
||||
|
||||
if !is_snake_case(ident) {
|
||||
let sc = NonSnakeCase::to_snake_case(s.get());
|
||||
if sc != s.get() {
|
||||
let sc = NonSnakeCase::to_snake_case(&s[]);
|
||||
if sc != &s[] {
|
||||
cx.span_lint(NON_SNAKE_CASE, span,
|
||||
&*format!("{} `{}` should have a snake case name such as `{}`",
|
||||
sort, s, sc));
|
||||
@ -1077,10 +1077,10 @@ impl NonUpperCaseGlobals {
|
||||
fn check_upper_case(cx: &Context, sort: &str, ident: ast::Ident, span: Span) {
|
||||
let s = token::get_ident(ident);
|
||||
|
||||
if s.get().chars().any(|c| c.is_lowercase()) {
|
||||
let uc: String = NonSnakeCase::to_snake_case(s.get()).chars()
|
||||
if s.chars().any(|c| c.is_lowercase()) {
|
||||
let uc: String = NonSnakeCase::to_snake_case(&s[]).chars()
|
||||
.map(|c| c.to_uppercase()).collect();
|
||||
if uc != s.get() {
|
||||
if uc != &s[] {
|
||||
cx.span_lint(NON_UPPER_CASE_GLOBALS, span,
|
||||
&format!("{} `{}` should have an upper case name such as `{}`",
|
||||
sort, s, uc));
|
||||
@ -1241,7 +1241,7 @@ fn check_item(&mut self, cx: &Context, item: &ast::Item) {
|
||||
match items[0].node {
|
||||
ast::PathListIdent {ref name, ..} => {
|
||||
let m = format!("braces around {} is unnecessary",
|
||||
token::get_ident(*name).get());
|
||||
&token::get_ident(*name)[]);
|
||||
cx.span_lint(UNUSED_IMPORT_BRACES, item.span,
|
||||
&m[]);
|
||||
},
|
||||
@ -1358,7 +1358,7 @@ fn check_unused_mut_pat(&self, cx: &Context, pats: &[P<ast::Pat>]) {
|
||||
pat_util::pat_bindings(&cx.tcx.def_map, &**p, |mode, id, _, path1| {
|
||||
let ident = path1.node;
|
||||
if let ast::BindByValue(ast::MutMutable) = mode {
|
||||
if !token::get_ident(ident).get().starts_with("_") {
|
||||
if !token::get_ident(ident).starts_with("_") {
|
||||
match mutables.entry(ident.name.usize()) {
|
||||
Vacant(entry) => { entry.insert(vec![id]); },
|
||||
Occupied(mut entry) => { entry.get_mut().push(id); },
|
||||
|
@ -341,7 +341,7 @@ pub fn gather_attrs(attrs: &[ast::Attribute])
|
||||
-> Vec<Result<(InternedString, Level, Span), Span>> {
|
||||
let mut out = vec!();
|
||||
for attr in attrs {
|
||||
let level = match Level::from_str(attr.name().get()) {
|
||||
let level = match Level::from_str(&attr.name()[]) {
|
||||
None => continue,
|
||||
Some(lvl) => lvl,
|
||||
};
|
||||
@ -499,10 +499,10 @@ fn with_lint_attrs<F>(&mut self,
|
||||
continue;
|
||||
}
|
||||
Ok((lint_name, level, span)) => {
|
||||
match self.lints.find_lint(lint_name.get(), &self.tcx.sess, Some(span)) {
|
||||
match self.lints.find_lint(&lint_name[], &self.tcx.sess, Some(span)) {
|
||||
Some(lint_id) => vec![(lint_id, level, span)],
|
||||
None => {
|
||||
match self.lints.lint_groups.get(lint_name.get()) {
|
||||
match self.lints.lint_groups.get(&lint_name[]) {
|
||||
Some(&(ref v, _)) => v.iter()
|
||||
.map(|lint_id: &LintId|
|
||||
(*lint_id, level, span))
|
||||
|
@ -170,7 +170,7 @@ pub fn read_crates(&mut self, krate: &ast::Crate) {
|
||||
fn process_crate(&self, c: &ast::Crate) {
|
||||
for a in c.attrs.iter().filter(|m| m.name() == "link_args") {
|
||||
match a.value_str() {
|
||||
Some(ref linkarg) => self.sess.cstore.add_used_link_args(linkarg.get()),
|
||||
Some(ref linkarg) => self.sess.cstore.add_used_link_args(&linkarg[]),
|
||||
None => { /* fallthrough */ }
|
||||
}
|
||||
}
|
||||
@ -184,15 +184,15 @@ fn extract_crate_info(&self, i: &ast::Item) -> Option<CrateInfo> {
|
||||
ident, path_opt);
|
||||
let name = match *path_opt {
|
||||
Some((ref path_str, _)) => {
|
||||
let name = path_str.get().to_string();
|
||||
let name = path_str.to_string();
|
||||
validate_crate_name(Some(self.sess), &name[],
|
||||
Some(i.span));
|
||||
name
|
||||
}
|
||||
None => ident.get().to_string(),
|
||||
None => ident.to_string(),
|
||||
};
|
||||
Some(CrateInfo {
|
||||
ident: ident.get().to_string(),
|
||||
ident: ident.to_string(),
|
||||
name: name,
|
||||
id: i.id,
|
||||
should_link: should_link(i),
|
||||
@ -237,7 +237,7 @@ fn process_item(&mut self, i: &ast::Item) {
|
||||
.collect::<Vec<&ast::Attribute>>();
|
||||
for m in &link_args {
|
||||
match m.value_str() {
|
||||
Some(linkarg) => self.sess.cstore.add_used_link_args(linkarg.get()),
|
||||
Some(linkarg) => self.sess.cstore.add_used_link_args(&linkarg[]),
|
||||
None => { /* fallthrough */ }
|
||||
}
|
||||
}
|
||||
@ -289,7 +289,7 @@ fn process_item(&mut self, i: &ast::Item) {
|
||||
}
|
||||
};
|
||||
register_native_lib(self.sess, Some(m.span),
|
||||
n.get().to_string(), kind);
|
||||
n.to_string(), kind);
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
|
@ -383,7 +383,7 @@ pub fn is_staged_api(cstore: &cstore::CStore, def: ast::DefId) -> bool {
|
||||
let cdata = cstore.get_crate_data(def.krate);
|
||||
let attrs = decoder::get_crate_attributes(cdata.data());
|
||||
for attr in &attrs {
|
||||
if attr.name().get() == "staged_api" {
|
||||
if &attr.name()[] == "staged_api" {
|
||||
match attr.node.value.node { ast::MetaWord(_) => return true, _ => (/*pass*/) }
|
||||
}
|
||||
}
|
||||
|
@ -86,11 +86,11 @@ pub struct EncodeContext<'a, 'tcx: 'a> {
|
||||
}
|
||||
|
||||
fn encode_name(rbml_w: &mut Encoder, name: ast::Name) {
|
||||
rbml_w.wr_tagged_str(tag_paths_data_name, token::get_name(name).get());
|
||||
rbml_w.wr_tagged_str(tag_paths_data_name, &token::get_name(name)[]);
|
||||
}
|
||||
|
||||
fn encode_impl_type_basename(rbml_w: &mut Encoder, name: ast::Ident) {
|
||||
rbml_w.wr_tagged_str(tag_item_impl_type_basename, token::get_ident(name).get());
|
||||
rbml_w.wr_tagged_str(tag_item_impl_type_basename, &token::get_ident(name));
|
||||
}
|
||||
|
||||
pub fn encode_def_id(rbml_w: &mut Encoder, id: DefId) {
|
||||
@ -372,7 +372,7 @@ fn encode_path<PI: Iterator<Item=PathElem>>(rbml_w: &mut Encoder, path: PI) {
|
||||
ast_map::PathMod(_) => tag_path_elem_mod,
|
||||
ast_map::PathName(_) => tag_path_elem_name
|
||||
};
|
||||
rbml_w.wr_tagged_str(tag, token::get_name(pe.name()).get());
|
||||
rbml_w.wr_tagged_str(tag, &token::get_name(pe.name())[]);
|
||||
}
|
||||
rbml_w.end_tag();
|
||||
}
|
||||
@ -915,7 +915,7 @@ fn encode_method_argument_names(rbml_w: &mut Encoder,
|
||||
rbml_w.start_tag(tag_method_argument_name);
|
||||
if let ast::PatIdent(_, ref path1, _) = arg.pat.node {
|
||||
let name = token::get_ident(path1.node);
|
||||
rbml_w.writer.write_all(name.get().as_bytes());
|
||||
rbml_w.writer.write_all(name.as_bytes());
|
||||
}
|
||||
rbml_w.end_tag();
|
||||
}
|
||||
@ -1636,7 +1636,7 @@ fn encode_meta_item(rbml_w: &mut Encoder, mi: &ast::MetaItem) {
|
||||
ast::MetaWord(ref name) => {
|
||||
rbml_w.start_tag(tag_meta_item_word);
|
||||
rbml_w.start_tag(tag_meta_item_name);
|
||||
rbml_w.writer.write_all(name.get().as_bytes());
|
||||
rbml_w.writer.write_all(name.as_bytes());
|
||||
rbml_w.end_tag();
|
||||
rbml_w.end_tag();
|
||||
}
|
||||
@ -1645,10 +1645,10 @@ fn encode_meta_item(rbml_w: &mut Encoder, mi: &ast::MetaItem) {
|
||||
ast::LitStr(ref value, _) => {
|
||||
rbml_w.start_tag(tag_meta_item_name_value);
|
||||
rbml_w.start_tag(tag_meta_item_name);
|
||||
rbml_w.writer.write_all(name.get().as_bytes());
|
||||
rbml_w.writer.write_all(name.as_bytes());
|
||||
rbml_w.end_tag();
|
||||
rbml_w.start_tag(tag_meta_item_value);
|
||||
rbml_w.writer.write_all(value.get().as_bytes());
|
||||
rbml_w.writer.write_all(value.as_bytes());
|
||||
rbml_w.end_tag();
|
||||
rbml_w.end_tag();
|
||||
}
|
||||
@ -1658,7 +1658,7 @@ fn encode_meta_item(rbml_w: &mut Encoder, mi: &ast::MetaItem) {
|
||||
ast::MetaList(ref name, ref items) => {
|
||||
rbml_w.start_tag(tag_meta_item_list);
|
||||
rbml_w.start_tag(tag_meta_item_name);
|
||||
rbml_w.writer.write_all(name.get().as_bytes());
|
||||
rbml_w.writer.write_all(name.as_bytes());
|
||||
rbml_w.end_tag();
|
||||
for inner_item in items {
|
||||
encode_meta_item(rbml_w, &**inner_item);
|
||||
@ -1695,7 +1695,7 @@ fn encode_paren_sugar(rbml_w: &mut Encoder, paren_sugar: bool) {
|
||||
fn encode_associated_type_names(rbml_w: &mut Encoder, names: &[ast::Name]) {
|
||||
rbml_w.start_tag(tag_associated_type_names);
|
||||
for &name in names {
|
||||
rbml_w.wr_tagged_str(tag_associated_type_name, token::get_name(name).get());
|
||||
rbml_w.wr_tagged_str(tag_associated_type_name, &token::get_name(name)[]);
|
||||
}
|
||||
rbml_w.end_tag();
|
||||
}
|
||||
|
@ -249,7 +249,7 @@ fn check_for_bindings_named_the_same_as_variants(cx: &MatchCheckCtxt, pat: &Pat)
|
||||
span_warn!(cx.tcx.sess, p.span, E0170,
|
||||
"pattern binding `{}` is named the same as one \
|
||||
of the variants of the type `{}`",
|
||||
token::get_ident(&ident.node)[], ty_to_string(cx.tcx, pat_ty));
|
||||
&token::get_ident(ident.node)[], ty_to_string(cx.tcx, pat_ty));
|
||||
span_help!(cx.tcx.sess, p.span,
|
||||
"if you meant to match on a variant, \
|
||||
consider making the path in the pattern qualified: `{}::{}`",
|
||||
|
@ -610,7 +610,7 @@ pub fn lit_to_const(lit: &ast::Lit) -> const_val {
|
||||
ast::LitInt(n, ast::UnsignedIntLit(_)) => const_uint(n),
|
||||
ast::LitFloat(ref n, _) |
|
||||
ast::LitFloatUnsuffixed(ref n) => {
|
||||
const_float(n.get().parse::<f64>().unwrap() as f64)
|
||||
const_float(n.parse::<f64>().unwrap() as f64)
|
||||
}
|
||||
ast::LitBool(b) => const_bool(b)
|
||||
}
|
||||
|
@ -321,7 +321,7 @@ fn has_allow_dead_code_or_lang_attr(attrs: &[ast::Attribute]) -> bool {
|
||||
for attr in lint::gather_attrs(attrs) {
|
||||
match attr {
|
||||
Ok((ref name, lint::Allow, _))
|
||||
if name.get() == dead_code => return true,
|
||||
if &name[] == dead_code => return true,
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
@ -514,7 +514,6 @@ fn report_concrete_failure(&self,
|
||||
lifetime of captured variable `{}`...",
|
||||
ty::local_var_name_str(self.tcx,
|
||||
upvar_id.var_id)
|
||||
.get()
|
||||
.to_string());
|
||||
note_and_explain_region(
|
||||
self.tcx,
|
||||
@ -526,7 +525,6 @@ fn report_concrete_failure(&self,
|
||||
&format!("...but `{}` is only valid for ",
|
||||
ty::local_var_name_str(self.tcx,
|
||||
upvar_id.var_id)
|
||||
.get()
|
||||
.to_string())[],
|
||||
sup,
|
||||
"");
|
||||
@ -570,8 +568,7 @@ fn report_concrete_failure(&self,
|
||||
&format!("captured variable `{}` does not \
|
||||
outlive the enclosing closure",
|
||||
ty::local_var_name_str(self.tcx,
|
||||
id).get()
|
||||
.to_string())[]);
|
||||
id).to_string())[]);
|
||||
note_and_explain_region(
|
||||
self.tcx,
|
||||
"captured variable is valid for ",
|
||||
@ -959,7 +956,7 @@ fn pick_lifetime(&self,
|
||||
// choice of lifetime name deterministic and thus easier to test.
|
||||
let mut names = Vec::new();
|
||||
for rn in region_names {
|
||||
let lt_name = token::get_name(*rn).get().to_string();
|
||||
let lt_name = token::get_name(*rn).to_string();
|
||||
names.push(lt_name);
|
||||
}
|
||||
names.sort();
|
||||
@ -1438,15 +1435,15 @@ fn report_inference_failure(&self,
|
||||
}
|
||||
infer::EarlyBoundRegion(_, name) => {
|
||||
format!(" for lifetime parameter `{}`",
|
||||
token::get_name(name).get())
|
||||
&token::get_name(name))
|
||||
}
|
||||
infer::BoundRegionInCoherence(name) => {
|
||||
format!(" for lifetime parameter `{}` in coherence check",
|
||||
token::get_name(name).get())
|
||||
&token::get_name(name)[])
|
||||
}
|
||||
infer::UpvarRegion(ref upvar_id, _) => {
|
||||
format!(" for capture of `{}` by closure",
|
||||
ty::local_var_name_str(self.tcx, upvar_id.var_id).get().to_string())
|
||||
ty::local_var_name_str(self.tcx, upvar_id.var_id).to_string())
|
||||
}
|
||||
};
|
||||
|
||||
@ -1527,7 +1524,6 @@ fn note_region_origin(&self, origin: &SubregionOrigin<'tcx>) {
|
||||
&format!(
|
||||
"...so that closure can access `{}`",
|
||||
ty::local_var_name_str(self.tcx, upvar_id.var_id)
|
||||
.get()
|
||||
.to_string())[])
|
||||
}
|
||||
infer::InfStackClosure(span) => {
|
||||
@ -1553,7 +1549,7 @@ fn note_region_origin(&self, origin: &SubregionOrigin<'tcx>) {
|
||||
does not outlive the enclosing closure",
|
||||
ty::local_var_name_str(
|
||||
self.tcx,
|
||||
id).get().to_string())[]);
|
||||
id).to_string())[]);
|
||||
}
|
||||
infer::IndexSlice(span) => {
|
||||
self.tcx.sess.span_note(
|
||||
@ -1730,7 +1726,7 @@ impl LifeGiver {
|
||||
fn with_taken(taken: &[ast::LifetimeDef]) -> LifeGiver {
|
||||
let mut taken_ = HashSet::new();
|
||||
for lt in taken {
|
||||
let lt_name = token::get_name(lt.lifetime.name).get().to_string();
|
||||
let lt_name = token::get_name(lt.lifetime.name).to_string();
|
||||
taken_.insert(lt_name);
|
||||
}
|
||||
LifeGiver {
|
||||
|
@ -149,7 +149,7 @@ impl<'a, 'v> Visitor<'v> for LanguageItemCollector<'a> {
|
||||
fn visit_item(&mut self, item: &ast::Item) {
|
||||
match extract(&item.attrs) {
|
||||
Some(value) => {
|
||||
let item_index = self.item_refs.get(value.get()).map(|x| *x);
|
||||
let item_index = self.item_refs.get(&value[]).map(|x| *x);
|
||||
|
||||
match item_index {
|
||||
Some(item_index) => {
|
||||
|
@ -333,7 +333,7 @@ fn variable(&self, node_id: NodeId, span: Span) -> Variable {
|
||||
fn variable_name(&self, var: Variable) -> String {
|
||||
match self.var_kinds[var.get()] {
|
||||
Local(LocalInfo { ident: nm, .. }) | Arg(_, nm) => {
|
||||
token::get_ident(nm).get().to_string()
|
||||
token::get_ident(nm).to_string()
|
||||
},
|
||||
ImplicitRet => "<implicit-ret>".to_string(),
|
||||
CleanExit => "<clean-exit>".to_string()
|
||||
|
@ -1543,7 +1543,7 @@ impl<'tcx> Repr<'tcx> for InteriorKind {
|
||||
fn repr(&self, _tcx: &ty::ctxt) -> String {
|
||||
match *self {
|
||||
InteriorField(NamedField(fld)) => {
|
||||
token::get_name(fld).get().to_string()
|
||||
token::get_name(fld).to_string()
|
||||
}
|
||||
InteriorField(PositionalField(i)) => format!("#{}", i),
|
||||
InteriorElement(_) => "[]".to_string(),
|
||||
|
@ -180,7 +180,7 @@ pub fn build(&mut self, sess: &Session, krate: &Crate, export_map: &PublicItems)
|
||||
pub fn new(krate: &Crate) -> Index {
|
||||
let mut staged_api = false;
|
||||
for attr in &krate.attrs {
|
||||
if attr.name().get() == "staged_api" {
|
||||
if &attr.name()[] == "staged_api" {
|
||||
match attr.node.value.node {
|
||||
ast::MetaWord(_) => {
|
||||
attr::mark_used(attr);
|
||||
@ -239,12 +239,12 @@ fn check(&mut self, id: ast::DefId, span: Span, stab: &Option<Stability>) {
|
||||
if !self.active_features.contains(feature) {
|
||||
let msg = match *reason {
|
||||
Some(ref r) => format!("use of unstable library feature '{}': {}",
|
||||
feature.get(), r.get()),
|
||||
None => format!("use of unstable library feature '{}'", feature.get())
|
||||
&feature[], &r[]),
|
||||
None => format!("use of unstable library feature '{}'", &feature[])
|
||||
};
|
||||
|
||||
emit_feature_warn(&self.tcx.sess.parse_sess.span_diagnostic,
|
||||
feature.get(), span, &msg[]);
|
||||
&feature[], span, &msg[]);
|
||||
}
|
||||
}
|
||||
Some(..) => {
|
||||
|
@ -86,7 +86,7 @@ fn report_on_unimplemented<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>,
|
||||
}).collect::<HashMap<String, String>>();
|
||||
generic_map.insert("Self".to_string(),
|
||||
trait_ref.self_ty().user_string(infcx.tcx));
|
||||
let parser = Parser::new(istring.get());
|
||||
let parser = Parser::new(&istring[]);
|
||||
let mut errored = false;
|
||||
let err: String = parser.filter_map(|p| {
|
||||
match p {
|
||||
|
@ -4644,7 +4644,7 @@ pub fn field_idx_strict(tcx: &ctxt, name: ast::Name, fields: &[field])
|
||||
"no field named `{}` found in the list of fields `{:?}`",
|
||||
token::get_name(name),
|
||||
fields.iter()
|
||||
.map(|f| token::get_name(f.name).get().to_string())
|
||||
.map(|f| token::get_name(f.name).to_string())
|
||||
.collect::<Vec<String>>())[]);
|
||||
}
|
||||
|
||||
|
@ -55,7 +55,7 @@ pub fn check_crate(krate: &ast::Crate,
|
||||
|
||||
pub fn link_name(attrs: &[ast::Attribute]) -> Option<InternedString> {
|
||||
lang_items::extract(attrs).and_then(|name| {
|
||||
$(if name.get() == stringify!($name) {
|
||||
$(if &name[] == stringify!($name) {
|
||||
Some(InternedString::new(stringify!($sym)))
|
||||
} else)* {
|
||||
None
|
||||
@ -110,7 +110,7 @@ impl<'a, 'v> Visitor<'v> for Context<'a> {
|
||||
fn visit_foreign_item(&mut self, i: &ast::ForeignItem) {
|
||||
match lang_items::extract(&i.attrs) {
|
||||
None => {}
|
||||
Some(lang_item) => self.register(lang_item.get(), i.span),
|
||||
Some(lang_item) => self.register(&lang_item[], i.span),
|
||||
}
|
||||
visit::walk_foreign_item(self, i)
|
||||
}
|
||||
|
@ -109,7 +109,7 @@ fn visit_item(&mut self, item: &ast::Item) {
|
||||
let mut reexport = HashSet::new();
|
||||
for attr in &item.attrs {
|
||||
let mut used = true;
|
||||
match attr.name().get() {
|
||||
match &attr.name()[] {
|
||||
"phase" => {
|
||||
self.sess.span_err(attr.span, "#[phase] is deprecated; use \
|
||||
#[macro_use], #[plugin], and/or #[no_link]");
|
||||
|
@ -217,7 +217,7 @@ pub fn region_to_string(cx: &ctxt, prefix: &str, space: bool, region: Region) ->
|
||||
match region {
|
||||
ty::ReScope(_) => prefix.to_string(),
|
||||
ty::ReEarlyBound(_, _, _, name) => {
|
||||
token::get_name(name).get().to_string()
|
||||
token::get_name(name).to_string()
|
||||
}
|
||||
ty::ReLateBound(_, br) => bound_region_to_string(cx, prefix, space, br),
|
||||
ty::ReFree(ref fr) => bound_region_to_string(cx, prefix, space, fr.bound_region),
|
||||
@ -277,7 +277,7 @@ fn bare_fn_to_string<'tcx>(cx: &ctxt<'tcx>,
|
||||
match ident {
|
||||
Some(i) => {
|
||||
s.push(' ');
|
||||
s.push_str(token::get_ident(i).get());
|
||||
s.push_str(&token::get_ident(i)[]);
|
||||
}
|
||||
_ => { }
|
||||
}
|
||||
@ -1020,19 +1020,19 @@ fn repr(&self, tcx: &ctxt<'tcx>) -> String {
|
||||
|
||||
impl<'tcx> Repr<'tcx> for ast::Name {
|
||||
fn repr(&self, _tcx: &ctxt) -> String {
|
||||
token::get_name(*self).get().to_string()
|
||||
token::get_name(*self).to_string()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> UserString<'tcx> for ast::Name {
|
||||
fn user_string(&self, _tcx: &ctxt) -> String {
|
||||
token::get_name(*self).get().to_string()
|
||||
token::get_name(*self).to_string()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> Repr<'tcx> for ast::Ident {
|
||||
fn repr(&self, _tcx: &ctxt) -> String {
|
||||
token::get_ident(*self).get().to_string()
|
||||
token::get_ident(*self).to_string()
|
||||
}
|
||||
}
|
||||
|
||||
@ -1220,7 +1220,7 @@ fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
|
||||
}
|
||||
})
|
||||
});
|
||||
let names: Vec<_> = names.iter().map(|s| s.get()).collect();
|
||||
let names: Vec<_> = names.iter().map(|s| &s[]).collect();
|
||||
|
||||
let value_str = unbound_value.user_string(tcx);
|
||||
if names.len() == 0 {
|
||||
@ -1248,7 +1248,7 @@ fn user_string(&self, tcx: &ctxt<'tcx>) -> String {
|
||||
|
||||
impl<'tcx> UserString<'tcx> for ast::Ident {
|
||||
fn user_string(&self, _tcx: &ctxt) -> String {
|
||||
token::get_name(self.name).get().to_string()
|
||||
token::get_name(self.name).to_string()
|
||||
}
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user