diff --git a/src/libsyntax/ast_map.rs b/src/libsyntax/ast_map.rs index 72e9a3d2cd0..e9f15c0a8f5 100644 --- a/src/libsyntax/ast_map.rs +++ b/src/libsyntax/ast_map.rs @@ -60,12 +60,12 @@ pub fn path_to_str_with_sep(p: &[path_elt], sep: ~str, itr: @ident_interner) str::connect(strs, sep) } -pub fn path_ident_to_str(p: path, i: ident, itr: @ident_interner) -> ~str { - if vec::is_empty(p) { +pub fn path_ident_to_str(p: &path, i: ident, itr: @ident_interner) -> ~str { + if vec::is_empty(*p) { //FIXME /* FIXME (#2543) */ copy *i copy *itr.get(i) } else { - fmt!("%s::%s", path_to_str(p, itr), *itr.get(i)) + fmt!("%s::%s", path_to_str(*p, itr), *itr.get(i)) } } @@ -338,7 +338,7 @@ pub fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str { fmt!("unknown node (id=%d)", id) } Some(&node_item(item, path)) => { - let path_str = path_ident_to_str(*path, item.ident, itr); + let path_str = path_ident_to_str(path, item.ident, itr); let item_str = match item.node { item_const(*) => ~"const", item_fn(*) => ~"fn", @@ -355,7 +355,7 @@ pub fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str { } Some(&node_foreign_item(item, abi, _, path)) => { fmt!("foreign item %s with abi %? (id=%?)", - path_ident_to_str(*path, item.ident, itr), abi, id) + path_ident_to_str(path, item.ident, itr), abi, id) } Some(&node_method(m, _, path)) => { fmt!("method %s in %s (id=%?)", diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs index b313a2fc6fc..6b2aa2416f8 100644 --- a/src/libsyntax/diagnostic.rs +++ b/src/libsyntax/diagnostic.rs @@ -184,7 +184,7 @@ fn diagnosticcolor(lvl: level) -> u8 { } } -fn print_diagnostic(topic: ~str, lvl: level, msg: &str) { +fn print_diagnostic(topic: &str, lvl: level, msg: &str) { let use_color = term::color_supported() && io::stderr().get_type() == io::Screen; if !topic.is_empty() { diff --git a/src/libsyntax/ext/asm.rs b/src/libsyntax/ext/asm.rs index 00c178b6d7c..97c5797cf57 100644 --- a/src/libsyntax/ext/asm.rs +++ b/src/libsyntax/ext/asm.rs @@ -119,13 +119,13 @@ pub fn expand_asm(cx: @ext_ctxt, sp: span, tts: &[ast::token_tree]) cons = str::connect(clobs, ","); } Options => { - let option = *p.parse_str(); + let option = p.parse_str(); - if option == ~"volatile" { + if "volatile" == *option { volatile = true; - } else if option == ~"alignstack" { + } else if "alignstack" == *option { alignstack = true; - } else if option == ~"intel" { + } else if "intel" == *option { dialect = ast::asm_intel; } diff --git a/src/libsyntax/ext/auto_encode.rs b/src/libsyntax/ext/auto_encode.rs index 9b78d9954d3..ac86d266d73 100644 --- a/src/libsyntax/ext/auto_encode.rs +++ b/src/libsyntax/ext/auto_encode.rs @@ -836,7 +836,7 @@ fn mk_struct_deser_impl( cx: @ext_ctxt, span: span, ident: ast::ident, - fields: ~[@ast::struct_field], + fields: &[@ast::struct_field], generics: &ast::Generics ) -> @ast::item { let fields = do mk_struct_fields(fields).mapi |idx, field| { @@ -1120,7 +1120,7 @@ fn mk_enum_deser_body( ext_cx: @ext_ctxt, span: span, name: ast::ident, - variants: ~[ast::variant] + variants: &[ast::variant] ) -> @ast::expr { let expr_arm_names = build::mk_base_vec_e( ext_cx, diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 3f90fd6267b..605ba65b51a 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -509,7 +509,7 @@ pub fn mk_unreachable(cx: @ext_ctxt, span: span) -> @ast::expr { ], ~[ mk_base_str(cx, span, ~"internal error: entered unreachable code"), - mk_base_str(cx, span, loc.file.name), + mk_base_str(cx, span, copy loc.file.name), mk_uint(cx, span, loc.line), ] ) diff --git a/src/libsyntax/ext/deriving/clone.rs b/src/libsyntax/ext/deriving/clone.rs index 1c33fe35070..2151e9529c4 100644 --- a/src/libsyntax/ext/deriving/clone.rs +++ b/src/libsyntax/ext/deriving/clone.rs @@ -60,11 +60,11 @@ fn cs_clone(cx: @ext_ctxt, span: span, build::mk_method_call(cx, span, field, clone_ident, ~[]); match *substr.fields { - Struct(af) => { + Struct(ref af) => { ctor_ident = ~[ substr.type_ident ]; all_fields = af; } - EnumMatching(_, variant, af) => { + EnumMatching(_, variant, ref af) => { ctor_ident = ~[ variant.node.name ]; all_fields = af; }, @@ -72,7 +72,7 @@ fn cs_clone(cx: @ext_ctxt, span: span, StaticEnum(*) | StaticStruct(*) => cx.span_bug(span, "Static method in `deriving(Clone)`") } - match all_fields { + match *all_fields { [(None, _, _), .. _] => { // enum-like let subcalls = all_fields.map(|&(_, self_f, _)| subcall(self_f)); diff --git a/src/libsyntax/ext/deriving/cmp/ord.rs b/src/libsyntax/ext/deriving/cmp/ord.rs index a9234c858f4..cdb9f620301 100644 --- a/src/libsyntax/ext/deriving/cmp/ord.rs +++ b/src/libsyntax/ext/deriving/cmp/ord.rs @@ -65,8 +65,6 @@ fn cs_ord(less: bool, equal: bool, let false_blk_expr = build::mk_block(cx, span, ~[], ~[], Some(build::mk_bool(cx, span, false))); - let true_blk = build::mk_simple_block(cx, span, - build::mk_bool(cx, span, true)); let base = build::mk_bool(cx, span, equal); cs_fold( @@ -108,6 +106,8 @@ fn cs_ord(less: bool, equal: bool, let cmp = build::mk_method_call(cx, span, self_f, binop, other_fs.to_owned()); + let true_blk = build::mk_simple_block(cx, span, + build::mk_bool(cx, span, true)); let if_ = expr_if(cmp, true_blk, Some(elseif)); build::mk_expr(cx, span, if_) diff --git a/src/libsyntax/ext/deriving/cmp/totalord.rs b/src/libsyntax/ext/deriving/cmp/totalord.rs index 7d560a197d0..5ec4e028454 100644 --- a/src/libsyntax/ext/deriving/cmp/totalord.rs +++ b/src/libsyntax/ext/deriving/cmp/totalord.rs @@ -55,15 +55,16 @@ pub fn ordering_const(cx: @ext_ctxt, span: span, cnst: Ordering) -> @expr { pub fn cs_cmp(cx: @ext_ctxt, span: span, substr: &Substructure) -> @expr { - let lexical_ord = ~[cx.ident_of("core"), - cx.ident_of("cmp"), - cx.ident_of("lexical_ordering")]; cs_same_method_fold( // foldr (possibly) nests the matches in lexical_ordering better false, |cx, span, old, new| { - build::mk_call_global(cx, span, lexical_ord, ~[old, new]) + build::mk_call_global(cx, span, + ~[cx.ident_of("core"), + cx.ident_of("cmp"), + cx.ident_of("lexical_ordering")], + ~[old, new]) }, ordering_const(cx, span, Equal), |cx, span, list, _| { diff --git a/src/libsyntax/ext/deriving/generic.rs b/src/libsyntax/ext/deriving/generic.rs index d785f3816de..be2cc6dd25e 100644 --- a/src/libsyntax/ext/deriving/generic.rs +++ b/src/libsyntax/ext/deriving/generic.rs @@ -259,14 +259,14 @@ pub enum SubstructureFields<'self> { fields: `(field ident, self, [others])`, where the field ident is only non-`None` in the case of a struct variant. */ - EnumMatching(uint, ast::variant, ~[(Option, @expr, ~[@expr])]), + EnumMatching(uint, &'self ast::variant, ~[(Option, @expr, ~[@expr])]), /** non-matching variants of the enum, [(variant index, ast::variant, [field ident, fields])] (i.e. all fields for self are in the first tuple, for other1 are in the second tuple, etc.) */ - EnumNonMatching(~[(uint, ast::variant, ~[(Option, @expr)])]), + EnumNonMatching(&'self [(uint, ast::variant, ~[(Option, @expr)])]), /// A static method where Self is a struct StaticStruct(&'self ast::struct_def, Either), @@ -290,7 +290,7 @@ representing each variant: (variant index, ast::variant instance, */ pub type EnumNonMatchFunc<'self> = &'self fn(@ext_ctxt, span, - ~[(uint, ast::variant, + &[(uint, ast::variant, ~[(Option, @expr)])], &[@expr]) -> @expr; @@ -416,8 +416,9 @@ impl<'self> MethodDef<'self> { let mut nonstatic = false; match self.self_ty { - Some(self_ptr) => { - let (self_expr, self_ty) = ty::get_explicit_self(cx, span, self_ptr); + Some(ref self_ptr) => { + let (self_expr, self_ty) = ty::get_explicit_self(cx, span, + self_ptr); ast_self_ty = self_ty; self_args.push(self_expr); @@ -616,9 +617,10 @@ impl<'self> MethodDef<'self> { self_args: &[@expr], nonself_args: &[@expr]) -> @expr { + let mut matches = ~[]; self.build_enum_match(cx, span, enum_def, type_ident, self_args, nonself_args, - None, ~[], 0) + None, &mut matches, 0) } @@ -650,58 +652,57 @@ impl<'self> MethodDef<'self> { self_args: &[@expr], nonself_args: &[@expr], matching: Option, - matches_so_far: ~[(uint, ast::variant, - ~[(Option, @expr)])], + matches_so_far: &mut ~[(uint, ast::variant, + ~[(Option, @expr)])], match_count: uint) -> @expr { if match_count == self_args.len() { // we've matched against all arguments, so make the final // expression at the bottom of the match tree - match matches_so_far { - [] => cx.span_bug(span, ~"no self match on an enum in generic `deriving`"), - _ => { - // we currently have a vec of vecs, where each - // subvec is the fields of one of the arguments, - // but if the variants all match, we want this as - // vec of tuples, where each tuple represents a - // field. + if matches_so_far.len() == 0 { + cx.span_bug(span, ~"no self match on an enum in generic \ + `deriving`"); + } + // we currently have a vec of vecs, where each + // subvec is the fields of one of the arguments, + // but if the variants all match, we want this as + // vec of tuples, where each tuple represents a + // field. - let substructure; + let substructure; - // most arms don't have matching variants, so do a - // quick check to see if they match (even though - // this means iterating twice) instead of being - // optimistic and doing a pile of allocations etc. - match matching { - Some(variant_index) => { - // `ref` inside let matches is buggy. Causes havoc wih rusc. - // let (variant_index, ref self_vec) = matches_so_far[0]; - let (variant, self_vec) = match matches_so_far[0] { - (_, v, ref s) => (v, s) - }; + // most arms don't have matching variants, so do a + // quick check to see if they match (even though + // this means iterating twice) instead of being + // optimistic and doing a pile of allocations etc. + match matching { + Some(variant_index) => { + // `ref` inside let matches is buggy. Causes havoc wih rusc. + // let (variant_index, ref self_vec) = matches_so_far[0]; + let (variant, self_vec) = match matches_so_far[0] { + (_, ref v, ref s) => (v, s) + }; - let mut enum_matching_fields = vec::from_elem(self_vec.len(), ~[]); + let mut enum_matching_fields = vec::from_elem(self_vec.len(), ~[]); - for matches_so_far.tail().each |&(_, _, other_fields)| { - for other_fields.eachi |i, &(_, other_field)| { - enum_matching_fields[i].push(other_field); - } - } - let field_tuples = - do vec::map_zip(*self_vec, - enum_matching_fields) |&(id, self_f), &other| { - (id, self_f, other) - }; - substructure = EnumMatching(variant_index, variant, field_tuples); - } - None => { - substructure = EnumNonMatching(matches_so_far); + for matches_so_far.tail().each |&(_, _, other_fields)| { + for other_fields.eachi |i, &(_, other_field)| { + enum_matching_fields[i].push(other_field); } } - self.call_substructure_method(cx, span, type_ident, - self_args, nonself_args, - &substructure) + let field_tuples = + do vec::map_zip(*self_vec, + enum_matching_fields) |&(id, self_f), &other| { + (id, self_f, other) + }; + substructure = EnumMatching(variant_index, variant, field_tuples); + } + None => { + substructure = EnumNonMatching(*matches_so_far); } } + self.call_substructure_method(cx, span, type_ident, + self_args, nonself_args, + &substructure) } else { // there are still matches to create let current_match_str = if match_count == 0 { @@ -712,9 +713,6 @@ impl<'self> MethodDef<'self> { let mut arms = ~[]; - // this is used as a stack - let mut matches_so_far = matches_so_far; - // the code for nonmatching variants only matters when // we've seen at least one other variant already if self.const_nonmatching && match_count > 0 { @@ -732,7 +730,7 @@ impl<'self> MethodDef<'self> { current_match_str, ast::m_imm); - matches_so_far.push((index, *variant, idents)); + matches_so_far.push((index, /*bad*/ copy *variant, idents)); let arm_expr = self.build_enum_match(cx, span, enum_def, type_ident, @@ -744,9 +742,10 @@ impl<'self> MethodDef<'self> { arms.push(build::mk_arm(cx, span, ~[ pattern ], arm_expr)); if enum_def.variants.len() > 1 { + let e = &EnumNonMatching(&[]); let wild_expr = self.call_substructure_method(cx, span, type_ident, self_args, nonself_args, - &EnumNonMatching(~[])); + e); let wild_arm = build::mk_arm(cx, span, ~[ build::mk_pat_wild(cx, span) ], wild_expr); @@ -760,7 +759,7 @@ impl<'self> MethodDef<'self> { current_match_str, ast::m_imm); - matches_so_far.push((index, *variant, idents)); + matches_so_far.push((index, /*bad*/ copy *variant, idents)); let new_matching = match matching { _ if match_count == 0 => Some(index), @@ -850,7 +849,7 @@ pub fn cs_fold(use_foldl: bool, cx: @ext_ctxt, span: span, substructure: &Substructure) -> @expr { match *substructure.fields { - EnumMatching(_, _, all_fields) | Struct(all_fields) => { + EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => { if use_foldl { do all_fields.foldl(base) |&old, &(_, self_f, other_fs)| { f(cx, span, old, self_f, other_fs) @@ -861,8 +860,9 @@ pub fn cs_fold(use_foldl: bool, } } }, - EnumNonMatching(all_enums) => enum_nonmatch_f(cx, span, - all_enums, substructure.nonself_args), + EnumNonMatching(ref all_enums) => enum_nonmatch_f(cx, span, + *all_enums, + substructure.nonself_args), StaticEnum(*) | StaticStruct(*) => { cx.span_bug(span, "Static function in `deriving`") } @@ -885,7 +885,7 @@ pub fn cs_same_method(f: &fn(@ext_ctxt, span, ~[@expr]) -> @expr, cx: @ext_ctxt, span: span, substructure: &Substructure) -> @expr { match *substructure.fields { - EnumMatching(_, _, all_fields) | Struct(all_fields) => { + EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => { // call self_n.method(other_1_n, other_2_n, ...) let called = do all_fields.map |&(_, self_field, other_fields)| { build::mk_method_call(cx, span, @@ -896,8 +896,9 @@ pub fn cs_same_method(f: &fn(@ext_ctxt, span, ~[@expr]) -> @expr, f(cx, span, called) }, - EnumNonMatching(all_enums) => enum_nonmatch_f(cx, span, - all_enums, substructure.nonself_args), + EnumNonMatching(ref all_enums) => enum_nonmatch_f(cx, span, + *all_enums, + substructure.nonself_args), StaticEnum(*) | StaticStruct(*) => { cx.span_bug(span, "Static function in `deriving`") } diff --git a/src/libsyntax/ext/deriving/mod.rs b/src/libsyntax/ext/deriving/mod.rs index 3b94a95dfe0..ba1f4e3ebb2 100644 --- a/src/libsyntax/ext/deriving/mod.rs +++ b/src/libsyntax/ext/deriving/mod.rs @@ -59,7 +59,7 @@ pub fn expand_meta_deriving(cx: @ext_ctxt, use ast::{meta_list, meta_name_value, meta_word}; match mitem.node { - meta_name_value(_, l) => { + meta_name_value(_, ref l) => { cx.span_err(l.span, ~"unexpected value in `deriving`"); in_items } @@ -67,7 +67,7 @@ pub fn expand_meta_deriving(cx: @ext_ctxt, cx.span_warn(mitem.span, ~"empty trait list in `deriving`"); in_items } - meta_list(_, titems) => { + meta_list(_, ref titems) => { do titems.foldr(in_items) |&titem, in_items| { match titem.node { meta_name_value(tname, _) | @@ -92,9 +92,9 @@ pub fn expand_meta_deriving(cx: @ext_ctxt, ~"ToStr" => expand!(to_str::expand_deriving_to_str), - tname => { + ref tname => { cx.span_err(titem.span, fmt!("unknown \ - `deriving` trait: `%s`", tname)); + `deriving` trait: `%s`", *tname)); in_items } } diff --git a/src/libsyntax/ext/deriving/ty.rs b/src/libsyntax/ext/deriving/ty.rs index 0bb88dae26b..768ac7458d6 100644 --- a/src/libsyntax/ext/deriving/ty.rs +++ b/src/libsyntax/ext/deriving/ty.rs @@ -63,7 +63,7 @@ pub impl Path { fn to_path(&self, cx: @ext_ctxt, span: span, self_ty: ident, self_generics: &Generics) -> @ast::Path { let idents = self.path.map(|s| cx.ident_of(*s) ); - let lt = mk_lifetime(cx, span, self.lifetime); + let lt = mk_lifetime(cx, span, &self.lifetime); let tys = self.params.map(|t| t.to_ty(cx, span, self_ty, self_generics)); if self.global { @@ -106,9 +106,9 @@ pub fn nil_ty() -> Ty { Tuple(~[]) } -fn mk_lifetime(cx: @ext_ctxt, span: span, lt: Option<~str>) -> Option<@ast::Lifetime> { - match lt { - Some(s) => Some(@build::mk_lifetime(cx, span, cx.ident_of(s))), +fn mk_lifetime(cx: @ext_ctxt, span: span, lt: &Option<~str>) -> Option<@ast::Lifetime> { + match *lt { + Some(ref s) => Some(@build::mk_lifetime(cx, span, cx.ident_of(*s))), None => None } } @@ -123,10 +123,10 @@ pub impl Ty { Owned => { build::mk_ty_uniq(cx, span, raw_ty) } - Managed(copy mutbl) => { + Managed(mutbl) => { build::mk_ty_box(cx, span, raw_ty, mutbl) } - Borrowed(copy lt, copy mutbl) => { + Borrowed(ref lt, mutbl) => { let lt = mk_lifetime(cx, span, lt); build::mk_ty_rptr(cx, span, raw_ty, lt, mutbl) } @@ -216,20 +216,20 @@ pub impl LifetimeBounds { } -pub fn get_explicit_self(cx: @ext_ctxt, span: span, self_ptr: Option) +pub fn get_explicit_self(cx: @ext_ctxt, span: span, self_ptr: &Option) -> (@expr, ast::self_ty) { let self_path = build::make_self(cx, span); - match self_ptr { + match *self_ptr { None => { (self_path, respan(span, ast::sty_value)) } - Some(ptr) => { + Some(ref ptr) => { let self_ty = respan( span, - match ptr { + match *ptr { Owned => ast::sty_uniq(ast::m_imm), Managed(mutbl) => ast::sty_box(mutbl), - Borrowed(lt, mutbl) => { + Borrowed(ref lt, mutbl) => { let lt = lt.map(|s| @build::mk_lifetime(cx, span, cx.ident_of(*s))); ast::sty_region(lt, mutbl) diff --git a/src/libsyntax/ext/fmt.rs b/src/libsyntax/ext/fmt.rs index 1a8edec3714..26b3178a911 100644 --- a/src/libsyntax/ext/fmt.rs +++ b/src/libsyntax/ext/fmt.rs @@ -62,7 +62,7 @@ fn pieces_to_expr(cx: @ext_ctxt, sp: span, // which tells the RT::conv* functions how to perform the conversion fn make_rt_conv_expr(cx: @ext_ctxt, sp: span, cnv: &Conv) -> @ast::expr { - fn make_flags(cx: @ext_ctxt, sp: span, flags: ~[Flag]) -> @ast::expr { + fn make_flags(cx: @ext_ctxt, sp: span, flags: &[Flag]) -> @ast::expr { let mut tmp_expr = make_rt_path_expr(cx, sp, "flag_none"); for flags.each |f| { let fstr = match *f { diff --git a/src/libsyntax/ext/pipes/proto.rs b/src/libsyntax/ext/pipes/proto.rs index 7c78ec066d0..f897eb787e5 100644 --- a/src/libsyntax/ext/pipes/proto.rs +++ b/src/libsyntax/ext/pipes/proto.rs @@ -154,14 +154,14 @@ pub struct protocol_ { pub impl protocol_ { /// Get a state. - fn get_state(&self, name: ~str) -> state { - self.states.find(|i| i.name == name).get() + fn get_state(&self, name: &str) -> state { + self.states.find(|i| name == i.name).get() } fn get_state_by_id(&self, id: uint) -> state { self.states[id] } - fn has_state(&self, name: ~str) -> bool { - self.states.find(|i| i.name == name).is_some() + fn has_state(&self, name: &str) -> bool { + self.states.find(|i| name == i.name).is_some() } fn filename(&self) -> ~str { diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index d5b3adca168..f4227cd2f2c 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -452,9 +452,9 @@ fn mk_binop(cx: @ext_ctxt, sp: span, bop: token::binop) -> @ast::expr { ids_ext(cx, ~[name.to_owned()])) } -fn mk_token(cx: @ext_ctxt, sp: span, tok: token::Token) -> @ast::expr { +fn mk_token(cx: @ext_ctxt, sp: span, tok: &token::Token) -> @ast::expr { - match tok { + match *tok { BINOP(binop) => { return build::mk_call(cx, sp, ids_ext(cx, ~[~"BINOP"]), @@ -561,7 +561,7 @@ fn mk_token(cx: @ext_ctxt, sp: span, tok: token::Token) -> @ast::expr { _ => () } - let name = match tok { + let name = match *tok { EQ => "EQ", LT => "LT", LE => "LE", @@ -612,7 +612,7 @@ fn mk_tt(cx: @ext_ctxt, sp: span, tt: &ast::token_tree) let e_tok = build::mk_call(cx, sp, ids_ext(cx, ~[~"tt_tok"]), - ~[e_sp, mk_token(cx, sp, *tok)]); + ~[e_sp, mk_token(cx, sp, tok)]); let e_push = build::mk_method_call(cx, sp, build::mk_path(cx, sp, ids_ext(cx, ~[~"tt"])), diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 46b09aca8b2..aa211973f1c 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -130,7 +130,6 @@ pub fn count_names(ms: &[matcher]) -> uint { }}) } -#[allow(non_implicitly_copyable_typarams)] pub fn initial_matcher_pos(ms: ~[matcher], sep: Option, lo: BytePos) -> ~MatcherPos { let mut match_idx_hi = 0u; @@ -184,15 +183,15 @@ pub enum named_match { pub type earley_item = ~MatcherPos; -pub fn nameize(p_s: @mut ParseSess, ms: ~[matcher], res: ~[@named_match]) +pub fn nameize(p_s: @mut ParseSess, ms: &[matcher], res: &[@named_match]) -> HashMap { - fn n_rec(p_s: @mut ParseSess, m: matcher, res: ~[@named_match], + fn n_rec(p_s: @mut ParseSess, m: &matcher, res: &[@named_match], ret_val: &mut HashMap) { - match m { + match *m { codemap::spanned {node: match_tok(_), _} => (), codemap::spanned {node: match_seq(ref more_ms, _, _, _, _), _} => { - for (*more_ms).each() |next_m| { - n_rec(p_s, *next_m, res, ret_val) + for more_ms.each |next_m| { + n_rec(p_s, next_m, res, ret_val) }; } codemap::spanned { @@ -207,7 +206,7 @@ pub fn nameize(p_s: @mut ParseSess, ms: ~[matcher], res: ~[@named_match]) } } let mut ret_val = HashMap::new(); - for ms.each() |m| { n_rec(p_s, *m, res, &mut ret_val) } + for ms.each |m| { n_rec(p_s, m, res, &mut ret_val) } return ret_val; } @@ -234,10 +233,10 @@ pub fn parse( sess: @mut ParseSess, cfg: ast::crate_cfg, rdr: @reader, - ms: ~[matcher] + ms: &[matcher] ) -> parse_result { let mut cur_eis = ~[]; - cur_eis.push(initial_matcher_pos(copy ms, None, rdr.peek().sp.lo)); + cur_eis.push(initial_matcher_pos(ms.to_owned(), None, rdr.peek().sp.lo)); loop { let mut bb_eis = ~[]; // black-box parsed by parser.rs @@ -277,7 +276,7 @@ pub fn parse( // Only touch the binders we have actually bound for uint::range(ei.match_lo, ei.match_hi) |idx| { - let sub = ei.matches[idx]; + let sub = copy ei.matches[idx]; new_pos.matches[idx] .push(@matched_seq(sub, mk_sp(ei.sp_lo, @@ -410,31 +409,31 @@ pub fn parse( } } -pub fn parse_nt(p: &Parser, name: ~str) -> nonterminal { +pub fn parse_nt(p: &Parser, name: &str) -> nonterminal { match name { - ~"item" => match p.parse_item(~[]) { + "item" => match p.parse_item(~[]) { Some(i) => token::nt_item(i), None => p.fatal(~"expected an item keyword") }, - ~"block" => token::nt_block(p.parse_block()), - ~"stmt" => token::nt_stmt(p.parse_stmt(~[])), - ~"pat" => token::nt_pat(p.parse_pat(true)), - ~"expr" => token::nt_expr(p.parse_expr()), - ~"ty" => token::nt_ty(p.parse_ty(false /* no need to disambiguate*/)), + "block" => token::nt_block(p.parse_block()), + "stmt" => token::nt_stmt(p.parse_stmt(~[])), + "pat" => token::nt_pat(p.parse_pat(true)), + "expr" => token::nt_expr(p.parse_expr()), + "ty" => token::nt_ty(p.parse_ty(false /* no need to disambiguate*/)), // this could be handled like a token, since it is one - ~"ident" => match *p.token { + "ident" => match *p.token { token::IDENT(sn,b) => { p.bump(); token::nt_ident(sn,b) } _ => p.fatal(~"expected ident, found " + token::to_str(p.reader.interner(), © *p.token)) }, - ~"path" => token::nt_path(p.parse_path_with_tps(false)), - ~"tt" => { + "path" => token::nt_path(p.parse_path_with_tps(false)), + "tt" => { *p.quote_depth += 1u; //but in theory, non-quoted tts might be useful let res = token::nt_tt(@p.parse_token_tree()); *p.quote_depth -= 1u; res } - ~"matchers" => token::nt_matchers(p.parse_matchers()), + "matchers" => token::nt_matchers(p.parse_matchers()), _ => p.fatal(~"Unsupported builtin nonterminal parser: " + name) } } diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 169652b1120..be6cc7a846a 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -63,19 +63,19 @@ pub fn add_new_extension(cx: @ext_ctxt, // Extract the arguments: let lhses = match *argument_map.get(&lhs_nm) { - @matched_seq(ref s, _) => /* FIXME (#2543) */ copy *s, + @matched_seq(ref s, _) => /* FIXME (#2543) */ @copy *s, _ => cx.span_bug(sp, ~"wrong-structured lhs") }; let rhses = match *argument_map.get(&rhs_nm) { - @matched_seq(ref s, _) => /* FIXME (#2543) */ copy *s, + @matched_seq(ref s, _) => /* FIXME (#2543) */ @copy *s, _ => cx.span_bug(sp, ~"wrong-structured rhs") }; // Given `lhses` and `rhses`, this is the new macro we create fn generic_extension(cx: @ext_ctxt, sp: span, name: ident, arg: &[ast::token_tree], - lhses: ~[@named_match], rhses: ~[@named_match]) + lhses: &[@named_match], rhses: &[@named_match]) -> MacResult { if cx.trace_macros() { @@ -93,7 +93,7 @@ pub fn add_new_extension(cx: @ext_ctxt, let s_d = cx.parse_sess().span_diagnostic; let itr = cx.parse_sess().interner; - for lhses.eachi() |i, lhs| { // try each arm's matchers + for lhses.eachi |i, lhs| { // try each arm's matchers match *lhs { @matched_nonterminal(nt_matchers(ref mtcs)) => { // `none` is because we're not interpolating @@ -103,7 +103,7 @@ pub fn add_new_extension(cx: @ext_ctxt, None, vec::to_owned(arg) ) as @reader; - match parse(cx.parse_sess(), cx.cfg(), arg_rdr, (*mtcs)) { + match parse(cx.parse_sess(), cx.cfg(), arg_rdr, *mtcs) { success(named_matches) => { let rhs = match rhses[i] { // okay, what's your transcriber? @@ -146,7 +146,7 @@ pub fn add_new_extension(cx: @ext_ctxt, } let exp: @fn(@ext_ctxt, span, &[ast::token_tree]) -> MacResult = - |cx, sp, arg| generic_extension(cx, sp, name, arg, lhses, rhses); + |cx, sp, arg| generic_extension(cx, sp, name, arg, *lhses, *rhses); return MRDef(MacroDef{ name: copy *cx.parse_sess().interner.get(name), diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 19c83e21a86..438efb2326c 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -91,11 +91,11 @@ pub fn dup_tt_reader(r: @mut TtReader) -> @mut TtReader { sp_diag: r.sp_diag, interner: r.interner, stack: dup_tt_frame(r.stack), - interpolations: r.interpolations, repeat_idx: copy r.repeat_idx, repeat_len: copy r.repeat_len, cur_tok: copy r.cur_tok, - cur_span: r.cur_span + cur_span: r.cur_span, + interpolations: copy r.interpolations, } } @@ -127,7 +127,7 @@ enum lis { lis_unconstrained, lis_constraint(uint, ident), lis_contradiction(~str) } -fn lockstep_iter_size(t: token_tree, r: &mut TtReader) -> lis { +fn lockstep_iter_size(t: &token_tree, r: &mut TtReader) -> lis { fn lis_merge(lhs: lis, rhs: lis, r: &mut TtReader) -> lis { match lhs { lis_unconstrained => copy rhs, @@ -146,10 +146,10 @@ fn lockstep_iter_size(t: token_tree, r: &mut TtReader) -> lis { } } } - match t { + match *t { tt_delim(ref tts) | tt_seq(_, ref tts, _, _) => { - vec::foldl(lis_unconstrained, (*tts), |lis, tt| { - let lis2 = lockstep_iter_size(*tt, r); + vec::foldl(lis_unconstrained, *tts, |lis, tt| { + let lis2 = lockstep_iter_size(tt, r); lis_merge(lis, lis2, r) }) } @@ -230,7 +230,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } tt_seq(sp, copy tts, copy sep, zerok) => { let t = tt_seq(sp, copy tts, copy sep, zerok); - match lockstep_iter_size(t, r) { + match lockstep_iter_size(&t, r) { lis_unconstrained => { r.sp_diag.span_fatal( sp, /* blame macro writer */ diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 842f9e9ab33..6ed8994ed33 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -44,7 +44,7 @@ pub trait ast_fold { pub struct AstFoldFns { //unlike the others, item_ is non-trivial fold_crate: @fn(&crate_, span, @ast_fold) -> (crate_, span), - fold_view_item: @fn(view_item_, @ast_fold) -> view_item_, + fold_view_item: @fn(&view_item_, @ast_fold) -> view_item_, fold_foreign_item: @fn(@foreign_item, @ast_fold) -> @foreign_item, fold_item: @fn(@item, @ast_fold) -> Option<@item>, fold_struct_field: @fn(@struct_field, @ast_fold) -> @struct_field, @@ -112,7 +112,7 @@ fn fold_arg_(a: arg, fld: @ast_fold) -> arg { } } //used in noop_fold_expr, and possibly elsewhere in the future -fn fold_mac_(m: mac, fld: @ast_fold) -> mac { +fn fold_mac_(m: &mac, fld: @ast_fold) -> mac { spanned { node: match m.node { mac_invoc_tt(*) => copy m.node }, span: fld.new_span(m.span), @@ -174,8 +174,8 @@ pub fn noop_fold_crate(c: &crate_, fld: @ast_fold) -> crate_ { } } -fn noop_fold_view_item(vi: view_item_, _fld: @ast_fold) -> view_item_ { - return /* FIXME (#2543) */ copy vi; +fn noop_fold_view_item(vi: &view_item_, _fld: @ast_fold) -> view_item_ { + return /* FIXME (#2543) */ copy *vi; } @@ -351,7 +351,7 @@ fn noop_fold_stmt(s: &stmt_, fld: @ast_fold) -> stmt_ { stmt_decl(d, nid) => stmt_decl(fld.fold_decl(d), fld.new_id(nid)), stmt_expr(e, nid) => stmt_expr(fld.fold_expr(e), fld.new_id(nid)), stmt_semi(e, nid) => stmt_semi(fld.fold_expr(e), fld.new_id(nid)), - stmt_mac(ref mac, semi) => stmt_mac(fold_mac((*mac)), semi) + stmt_mac(ref mac, semi) => stmt_mac(fold_mac(mac), semi) } } @@ -540,14 +540,14 @@ pub fn noop_fold_expr(e: &expr_, fld: @ast_fold) -> expr_ { fld.fold_expr(e) ) } - expr_inline_asm(a) => { + expr_inline_asm(ref a) => { expr_inline_asm(inline_asm { inputs: a.inputs.map(|&(c, in)| (c, fld.fold_expr(in))), outputs: a.outputs.map(|&(c, out)| (c, fld.fold_expr(out))), - .. a + .. copy *a }) } - expr_mac(ref mac) => expr_mac(fold_mac((*mac))), + expr_mac(ref mac) => expr_mac(fold_mac(mac)), expr_struct(path, ref fields, maybe_expr) => { expr_struct( fld.fold_path(path), @@ -590,12 +590,12 @@ pub fn noop_fold_ty(t: &ty_, fld: @ast_fold) -> ty_ { region: f.region, onceness: f.onceness, decl: fold_fn_decl(&f.decl, fld), - lifetimes: f.lifetimes, + lifetimes: copy f.lifetimes, }) } ty_bare_fn(ref f) => { ty_bare_fn(@TyBareFn { - lifetimes: f.lifetimes, + lifetimes: copy f.lifetimes, purity: f.purity, abis: f.abis, decl: fold_fn_decl(&f.decl, fld) @@ -609,7 +609,7 @@ pub fn noop_fold_ty(t: &ty_, fld: @ast_fold) -> ty_ { fld.fold_expr(e) ) } - ty_mac(ref mac) => ty_mac(fold_mac(*mac)) + ty_mac(ref mac) => ty_mac(fold_mac(mac)) } } @@ -740,7 +740,7 @@ impl ast_fold for AstFoldFns { fn fold_view_item(@self, x: @view_item) -> @view_item { @ast::view_item { - node: (self.fold_view_item)(x.node, self as @ast_fold), + node: (self.fold_view_item)(&x.node, self as @ast_fold), attrs: vec::map(x.attrs, |a| fold_attribute_(*a, self as @ast_fold)), vis: x.vis, diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs index 8faba022a90..fa91b968f69 100644 --- a/src/libsyntax/parse/comments.rs +++ b/src/libsyntax/parse/comments.rs @@ -192,7 +192,7 @@ fn read_line_comments(rdr: @mut StringReader, code_to_the_left: bool, // FIXME #3961: This is not the right way to convert string byte // offsets to characters. -fn all_whitespace(s: ~str, begin: uint, end: uint) -> bool { +fn all_whitespace(s: &str, begin: uint, end: uint) -> bool { let mut i: uint = begin; while i != end { if !is_whitespace(s[i] as char) { return false; } i += 1u; diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index 3e64133e893..211d123e887 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -253,9 +253,9 @@ pub impl Parser { } } - fn token_is_obsolete_ident(&self, ident: &str, token: Token) -> bool { - match token { - token::IDENT(copy sid, _) => { + fn token_is_obsolete_ident(&self, ident: &str, token: &Token) -> bool { + match *token { + token::IDENT(sid, _) => { str::eq_slice(*self.id_to_str(sid), ident) } _ => false @@ -263,7 +263,7 @@ pub impl Parser { } fn is_obsolete_ident(&self, ident: &str) -> bool { - self.token_is_obsolete_ident(ident, *self.token) + self.token_is_obsolete_ident(ident, self.token) } fn eat_obsolete_ident(&self, ident: &str) -> bool { @@ -289,7 +289,7 @@ pub impl Parser { fn try_parse_obsolete_with(&self) -> bool { if *self.token == token::COMMA && self.token_is_obsolete_ident("with", - self.look_ahead(1u)) { + &self.look_ahead(1u)) { self.bump(); } if self.eat_obsolete_ident("with") { @@ -301,13 +301,13 @@ pub impl Parser { } } - fn try_parse_obsolete_priv_section(&self, attrs: ~[attribute]) -> bool { + fn try_parse_obsolete_priv_section(&self, attrs: &[attribute]) -> bool { if self.is_keyword(&~"priv") && self.look_ahead(1) == token::LBRACE { self.obsolete(copy *self.span, ObsoletePrivSection); self.eat_keyword(&~"priv"); self.bump(); while *self.token != token::RBRACE { - self.parse_single_struct_field(ast::private, attrs); + self.parse_single_struct_field(ast::private, attrs.to_owned()); } self.bump(); true diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index b35ae169e1a..2a7af36f6f2 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -708,7 +708,7 @@ pub impl Parser { self.obsolete(*self.last_span, ObsoleteBareFnType); result } else if *self.token == token::MOD_SEP - || is_ident_or_path(&*self.token) { + || is_ident_or_path(self.token) { // NAMED TYPE let path = self.parse_path_with_tps(false); ty_path(path, self.get_id()) @@ -1556,9 +1556,12 @@ pub impl Parser { |p| p.parse_token_tree() ); let (s, z) = p.parse_sep_and_zerok(); + let seq = match seq { + spanned { node, _ } => node, + }; tt_seq( - mk_sp(sp.lo ,p.span.hi), - seq.node, + mk_sp(sp.lo, p.span.hi), + seq, s, z ) @@ -1624,9 +1627,9 @@ pub impl Parser { token::LBRACE | token::LPAREN | token::LBRACKET => { self.parse_matcher_subseq( name_idx, - *self.token, + copy *self.token, // tjc: not sure why we need a copy - token::flip_delimiter(&*self.token) + token::flip_delimiter(self.token) ) } _ => self.fatal(~"expected open delimiter") @@ -1986,14 +1989,15 @@ pub impl Parser { // them as the lambda arguments let e = self.parse_expr_res(RESTRICT_NO_BAR_OR_DOUBLEBAR_OP); match e.node { - expr_call(f, args, NoSugar) => { + expr_call(f, /*bad*/ copy args, NoSugar) => { let block = self.parse_lambda_block_expr(); let last_arg = self.mk_expr(block.span.lo, block.span.hi, ctor(block)); let args = vec::append(args, ~[last_arg]); self.mk_expr(lo.lo, block.span.hi, expr_call(f, args, sugar)) } - expr_method_call(f, i, tps, args, NoSugar) => { + expr_method_call(f, i, /*bad*/ copy tps, + /*bad*/ copy args, NoSugar) => { let block = self.parse_lambda_block_expr(); let last_arg = self.mk_expr(block.span.lo, block.span.hi, ctor(block)); @@ -2001,7 +2005,7 @@ pub impl Parser { self.mk_expr(lo.lo, block.span.hi, expr_method_call(f, i, tps, args, sugar)) } - expr_field(f, i, tps) => { + expr_field(f, i, /*bad*/ copy tps) => { let block = self.parse_lambda_block_expr(); let last_arg = self.mk_expr(block.span.lo, block.span.hi, ctor(block)); @@ -2259,7 +2263,7 @@ pub impl Parser { let lo = self.span.lo; let mut hi = self.span.hi; let pat; - match *self.token { + match /*bad*/ copy *self.token { // parse _ token::UNDERSCORE => { self.bump(); pat = pat_wild; } // parse @pat @@ -2373,8 +2377,8 @@ pub impl Parser { self.expect(&token::RBRACKET); pat = ast::pat_vec(before, slice, after); } - tok => { - if !is_ident_or_path(&tok) + ref tok => { + if !is_ident_or_path(tok) || self.is_keyword(&~"true") || self.is_keyword(&~"false") { @@ -2897,7 +2901,7 @@ pub impl Parser { loop; } - if is_ident_or_path(&*self.token) { + if is_ident_or_path(self.token) { self.obsolete(*self.span, ObsoleteTraitBoundSeparator); } @@ -3531,6 +3535,7 @@ pub impl Parser { fn parse_item_mod(&self, outer_attrs: ~[ast::attribute]) -> item_info { let id_span = *self.span; let id = self.parse_ident(); + let merge = ::attr::first_attr_value_str_by_name(outer_attrs, "merge"); let info_ = if *self.token == token::SEMI { self.bump(); // This mod is in an external file. Let's go get it! @@ -3550,7 +3555,7 @@ pub impl Parser { // (int-template, iter-trait). If there's a 'merge' attribute // on the mod, then we'll go and suck in another file and merge // its contents - match ::attr::first_attr_value_str_by_name(outer_attrs, ~"merge") { + match merge { Some(path) => { let prefix = Path( self.sess.cm.span_to_filename(*self.span)); @@ -3636,10 +3641,7 @@ pub impl Parser { new_sub_parser_from_file(self.sess, copy self.cfg, &full_path, id_sp); let (inner, next) = p0.parse_inner_attrs_and_next(); - let mod_attrs = vec::append( - /*bad*/ copy outer_attrs, - inner - ); + let mod_attrs = vec::append(outer_attrs, inner); let first_item_outer_attrs = next; let m0 = p0.parse_mod_items(token::EOF, first_item_outer_attrs); return (ast::item_mod(m0), mod_attrs); @@ -4105,7 +4107,8 @@ pub impl Parser { } if self.eat_keyword(&~"mod") { // MODULE ITEM - let (ident, item_, extra_attrs) = self.parse_item_mod(attrs); + let (ident, item_, extra_attrs) = + self.parse_item_mod(/*bad*/ copy attrs); return iovi_item(self.mk_item(lo, self.last_span.hi, ident, item_, visibility, maybe_append(attrs, extra_attrs))); diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index 43f62d72a9f..7944469cb96 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -457,9 +457,9 @@ pub impl Printer { } } } - fn print_str(&mut self, s: ~str) { + fn print_str(&mut self, s: &str) { while self.pending_indentation > 0 { - (*self.out).write_str(~" "); + (*self.out).write_str(" "); self.pending_indentation -= 1; } (*self.out).write_str(s); @@ -562,16 +562,16 @@ pub fn end(p: @mut Printer) { p.pretty_print(END); } pub fn eof(p: @mut Printer) { p.pretty_print(EOF); } -pub fn word(p: @mut Printer, wrd: ~str) { - p.pretty_print(STRING(@/*bad*/ copy wrd, wrd.len() as int)); +pub fn word(p: @mut Printer, wrd: &str) { + p.pretty_print(STRING(@/*bad*/ wrd.to_owned(), wrd.len() as int)); } -pub fn huge_word(p: @mut Printer, wrd: ~str) { - p.pretty_print(STRING(@/*bad*/ copy wrd, size_infinity)); +pub fn huge_word(p: @mut Printer, wrd: &str) { + p.pretty_print(STRING(@/*bad*/ wrd.to_owned(), size_infinity)); } -pub fn zero_word(p: @mut Printer, wrd: ~str) { - p.pretty_print(STRING(@/*bad*/ copy wrd, 0)); +pub fn zero_word(p: @mut Printer, wrd: &str) { + p.pretty_print(STRING(@/*bad*/ wrd.to_owned(), 0)); } pub fn spaces(p: @mut Printer, n: uint) { break_offset(p, n, 0); } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 599024a781f..1e94c16f87a 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -156,7 +156,7 @@ pub fn lifetime_to_str(e: &ast::Lifetime, intr: @ident_interner) -> ~str { } pub fn tt_to_str(tt: ast::token_tree, intr: @ident_interner) -> ~str { - to_str(tt, print_tt, intr) + to_str(&tt, print_tt, intr) } pub fn tts_to_str(tts: &[ast::token_tree], intr: @ident_interner) -> ~str { @@ -213,7 +213,7 @@ pub fn attribute_to_str(attr: ast::attribute, intr: @ident_interner) -> ~str { to_str(attr, print_attribute, intr) } -pub fn variant_to_str(var: ast::variant, intr: @ident_interner) -> ~str { +pub fn variant_to_str(var: &ast::variant, intr: @ident_interner) -> ~str { to_str(var, print_variant, intr) } @@ -229,9 +229,9 @@ pub fn box(s: @ps, u: uint, b: pp::breaks) { pub fn nbsp(s: @ps) { word(s.s, ~" "); } -pub fn word_nbsp(s: @ps, w: ~str) { word(s.s, w); nbsp(s); } +pub fn word_nbsp(s: @ps, w: &str) { word(s.s, w); nbsp(s); } -pub fn word_space(s: @ps, w: ~str) { word(s.s, w); space(s.s); } +pub fn word_space(s: @ps, w: &str) { word(s.s, w); space(s.s); } pub fn popen(s: @ps) { word(s.s, ~"("); } @@ -346,7 +346,7 @@ pub fn commasep_exprs(s: @ps, b: breaks, exprs: &[@ast::expr]) { commasep_cmnt(s, b, exprs, print_expr, expr_span); } -pub fn print_mod(s: @ps, _mod: &ast::_mod, attrs: ~[ast::attribute]) { +pub fn print_mod(s: @ps, _mod: &ast::_mod, attrs: &[ast::attribute]) { print_inner_attributes(s, attrs); for _mod.view_items.each |vitem| { print_view_item(s, *vitem); @@ -355,7 +355,7 @@ pub fn print_mod(s: @ps, _mod: &ast::_mod, attrs: ~[ast::attribute]) { } pub fn print_foreign_mod(s: @ps, nmod: &ast::foreign_mod, - attrs: ~[ast::attribute]) { + attrs: &[ast::attribute]) { print_inner_attributes(s, attrs); for nmod.view_items.each |vitem| { print_view_item(s, *vitem); @@ -539,7 +539,7 @@ pub fn print_item(s: @ps, item: @ast::item) { ast::item_enum(ref enum_definition, ref params) => { print_enum_def( s, - *enum_definition, + enum_definition, params, item.ident, item.span, @@ -621,7 +621,7 @@ fn print_trait_ref(s: @ps, t: &ast::trait_ref) { print_path(s, t.path, false); } -pub fn print_enum_def(s: @ps, enum_definition: ast::enum_def, +pub fn print_enum_def(s: @ps, enum_definition: &ast::enum_def, generics: &ast::Generics, ident: ast::ident, span: codemap::span, visibility: ast::visibility) { head(s, visibility_qualified(visibility, ~"enum")); @@ -632,7 +632,7 @@ pub fn print_enum_def(s: @ps, enum_definition: ast::enum_def, } pub fn print_variants(s: @ps, - variants: ~[ast::variant], + variants: &[ast::variant], span: codemap::span) { bopen(s); for variants.each |v| { @@ -640,7 +640,7 @@ pub fn print_variants(s: @ps, maybe_print_comment(s, v.span.lo); print_outer_attributes(s, v.node.attrs); ibox(s, indent_unit); - print_variant(s, *v); + print_variant(s, v); word(s.s, ~","); end(s); maybe_print_trailing_comment(s, v.span, None); @@ -727,15 +727,15 @@ pub fn print_struct(s: @ps, /// appropriate macro, transcribe back into the grammar we just parsed from, /// and then pretty-print the resulting AST nodes (so, e.g., we print /// expression arguments as expressions). It can be done! I think. -pub fn print_tt(s: @ps, tt: ast::token_tree) { - match tt { +pub fn print_tt(s: @ps, tt: &ast::token_tree) { + match *tt { ast::tt_delim(ref tts) => print_tts(s, *tts), ast::tt_tok(_, ref tk) => { word(s.s, parse::token::to_str(s.intr, tk)); } ast::tt_seq(_, ref tts, ref sep, zerok) => { word(s.s, ~"$("); - for (*tts).each() |tt_elt| { print_tt(s, *tt_elt); } + for (*tts).each() |tt_elt| { print_tt(s, tt_elt); } word(s.s, ~")"); match (*sep) { Some(ref tk) => word(s.s, parse::token::to_str(s.intr, tk)), @@ -756,12 +756,12 @@ pub fn print_tts(s: @ps, tts: &[ast::token_tree]) { if i != 0 { space(s.s); } - print_tt(s, *tt); + print_tt(s, tt); } end(s); } -pub fn print_variant(s: @ps, v: ast::variant) { +pub fn print_variant(s: @ps, v: &ast::variant) { print_visibility(s, v.node.vis); match v.node.kind { ast::tuple_variant_kind(ref args) => { @@ -819,7 +819,7 @@ pub fn print_method(s: @ps, meth: @ast::method) { print_block_with_attrs(s, &meth.body, meth.attrs); } -pub fn print_outer_attributes(s: @ps, attrs: ~[ast::attribute]) { +pub fn print_outer_attributes(s: @ps, attrs: &[ast::attribute]) { let mut count = 0; for attrs.each |attr| { match attr.node.style { @@ -830,7 +830,7 @@ pub fn print_outer_attributes(s: @ps, attrs: ~[ast::attribute]) { if count > 0 { hardbreak_if_not_bol(s); } } -pub fn print_inner_attributes(s: @ps, attrs: ~[ast::attribute]) { +pub fn print_inner_attributes(s: @ps, attrs: &[ast::attribute]) { let mut count = 0; for attrs.each |attr| { match attr.node.style { @@ -879,7 +879,7 @@ pub fn print_stmt(s: @ps, st: &ast::stmt) { } ast::stmt_mac(ref mac, semi) => { space_if_not_bol(s); - print_mac(s, (*mac)); + print_mac(s, mac); if semi { word(s.s, ~";"); } } } @@ -892,18 +892,18 @@ pub fn print_block(s: @ps, blk: &ast::blk) { } pub fn print_block_unclosed(s: @ps, blk: &ast::blk) { - print_possibly_embedded_block_(s, blk, block_normal, indent_unit, ~[], + print_possibly_embedded_block_(s, blk, block_normal, indent_unit, &[], false); } pub fn print_block_unclosed_indent(s: @ps, blk: &ast::blk, indented: uint) { - print_possibly_embedded_block_(s, blk, block_normal, indented, ~[], + print_possibly_embedded_block_(s, blk, block_normal, indented, &[], false); } pub fn print_block_with_attrs(s: @ps, blk: &ast::blk, - attrs: ~[ast::attribute]) { + attrs: &[ast::attribute]) { print_possibly_embedded_block_(s, blk, block_normal, indent_unit, attrs, true); } @@ -915,14 +915,14 @@ pub fn print_possibly_embedded_block(s: @ps, embedded: embed_type, indented: uint) { print_possibly_embedded_block_( - s, blk, embedded, indented, ~[], true); + s, blk, embedded, indented, &[], true); } pub fn print_possibly_embedded_block_(s: @ps, blk: &ast::blk, embedded: embed_type, indented: uint, - attrs: ~[ast::attribute], + attrs: &[ast::attribute], close_box: bool) { match blk.node.rules { ast::unsafe_blk => word_space(s, ~"unsafe"), @@ -994,7 +994,7 @@ pub fn print_if(s: @ps, test: @ast::expr, blk: &ast::blk, do_else(s, elseopt); } -pub fn print_mac(s: @ps, m: ast::mac) { +pub fn print_mac(s: @ps, m: &ast::mac) { match m.node { ast::mac_invoc_tt(pth, ref tts) => { print_path(s, pth, false); @@ -1387,7 +1387,7 @@ pub fn print_expr(s: @ps, expr: @ast::expr) { print_expr(s, expr); pclose(s); } - ast::expr_inline_asm(a) => { + ast::expr_inline_asm(ref a) => { if a.volatile { word(s.s, ~"__volatile__ asm!"); } else { @@ -1415,7 +1415,7 @@ pub fn print_expr(s: @ps, expr: @ast::expr) { print_string(s, *a.clobbers); pclose(s); } - ast::expr_mac(ref m) => print_mac(s, (*m)), + ast::expr_mac(ref m) => print_mac(s, m), ast::expr_paren(e) => { popen(s); print_expr(s, e); @@ -1559,7 +1559,7 @@ pub fn print_pat(s: @ps, pat: @ast::pat, refutable: bool) { } } } - ast::pat_struct(path, fields, etc) => { + ast::pat_struct(path, ref fields, etc) => { print_path(s, path, true); word(s.s, ~"{"); fn print_field(s: @ps, f: ast::field_pat, refutable: bool) { @@ -1570,18 +1570,18 @@ pub fn print_pat(s: @ps, pat: @ast::pat, refutable: bool) { end(s); } fn get_span(f: ast::field_pat) -> codemap::span { return f.pat.span; } - commasep_cmnt(s, consistent, fields, + commasep_cmnt(s, consistent, *fields, |s, f| print_field(s,f,refutable), get_span); if etc { - if vec::len(fields) != 0u { word_space(s, ~","); } + if fields.len() != 0u { word_space(s, ~","); } word(s.s, ~"_"); } word(s.s, ~"}"); } - ast::pat_tup(elts) => { + ast::pat_tup(ref elts) => { popen(s); - commasep(s, inconsistent, elts, |s, p| print_pat(s, p, refutable)); + commasep(s, inconsistent, *elts, |s, p| print_pat(s, p, refutable)); if elts.len() == 1 { word(s.s, ~","); } @@ -1606,9 +1606,9 @@ pub fn print_pat(s: @ps, pat: @ast::pat, refutable: bool) { word(s.s, ~".."); print_expr(s, end); } - ast::pat_vec(before, slice, after) => { + ast::pat_vec(ref before, slice, ref after) => { word(s.s, ~"["); - do commasep(s, inconsistent, before) |s, p| { + do commasep(s, inconsistent, *before) |s, p| { print_pat(s, p, refutable); } for slice.each |&p| { @@ -1617,7 +1617,7 @@ pub fn print_pat(s: @ps, pat: @ast::pat, refutable: bool) { print_pat(s, p, refutable); if !after.is_empty() { word_space(s, ~","); } } - do commasep(s, inconsistent, after) |s, p| { + do commasep(s, inconsistent, *after) |s, p| { print_pat(s, p, refutable); } word(s.s, ~"]"); @@ -1832,12 +1832,12 @@ pub fn print_view_item(s: @ps, item: @ast::view_item) { print_outer_attributes(s, item.attrs); print_visibility(s, item.vis); match item.node { - ast::view_item_extern_mod(id, mta, _) => { + ast::view_item_extern_mod(id, ref mta, _) => { head(s, ~"extern mod"); print_ident(s, id); if !mta.is_empty() { popen(s); - commasep(s, consistent, mta, print_meta_item); + commasep(s, consistent, *mta, print_meta_item); pclose(s); } } @@ -1960,7 +1960,7 @@ pub fn maybe_print_trailing_comment(s: @ps, span: codemap::span, match next_pos { None => (), Some(p) => next = p } if span.hi < (*cmnt).pos && (*cmnt).pos < next && span_line.line == comment_line.line { - print_comment(s, (*cmnt)); + print_comment(s, cmnt); s.cur_cmnt_and_lit.cur_cmnt += 1u; } } @@ -1975,7 +1975,7 @@ pub fn print_remaining_comments(s: @ps) { loop { match next_comment(s) { Some(ref cmnt) => { - print_comment(s, (*cmnt)); + print_comment(s, cmnt); s.cur_cmnt_and_lit.cur_cmnt += 1u; } _ => break @@ -2055,7 +2055,7 @@ pub fn maybe_print_comment(s: @ps, pos: BytePos) { match next_comment(s) { Some(ref cmnt) => { if (*cmnt).pos < pos { - print_comment(s, (*cmnt)); + print_comment(s, cmnt); s.cur_cmnt_and_lit.cur_cmnt += 1u; } else { break; } } @@ -2064,7 +2064,7 @@ pub fn maybe_print_comment(s: @ps, pos: BytePos) { } } -pub fn print_comment(s: @ps, cmnt: comments::cmnt) { +pub fn print_comment(s: @ps, cmnt: &comments::cmnt) { match cmnt.style { comments::mixed => { assert!((vec::len(cmnt.lines) == 1u)); @@ -2274,7 +2274,7 @@ mod test { vis: ast::public, }); - let varstr = variant_to_str(var,mock_interner); + let varstr = variant_to_str(&var,mock_interner); assert_eq!(&varstr,&~"pub principal_skinner"); } } diff --git a/src/libsyntax/syntax.rc b/src/libsyntax/syntax.rc index b8327de0f13..8deca72779e 100644 --- a/src/libsyntax/syntax.rc +++ b/src/libsyntax/syntax.rc @@ -20,7 +20,6 @@ #[license = "MIT/ASL2"]; #[crate_type = "lib"]; -#[allow(vecs_implicitly_copyable)]; #[allow(non_camel_case_types)]; #[deny(deprecated_pattern)]; diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index ea02d84ddac..4cfd54256f8 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -179,7 +179,7 @@ pub fn visit_item(i: @item, e: E, v: vt) { item_enum(ref enum_definition, ref tps) => { (v.visit_generics)(tps, e, v); visit_enum_def( - *enum_definition, + enum_definition, tps, e, v @@ -206,11 +206,11 @@ pub fn visit_item(i: @item, e: E, v: vt) { (v.visit_trait_method)(m, e, v); } } - item_mac(ref m) => visit_mac((*m), e, v) + item_mac(ref m) => visit_mac(m, e, v) } } -pub fn visit_enum_def(enum_definition: ast::enum_def, +pub fn visit_enum_def(enum_definition: &ast::enum_def, tps: &Generics, e: E, v: vt) { @@ -422,7 +422,7 @@ pub fn visit_stmt(s: @stmt, e: E, v: vt) { stmt_decl(d, _) => (v.visit_decl)(d, e, v), stmt_expr(ex, _) => (v.visit_expr)(ex, e, v), stmt_semi(ex, _) => (v.visit_expr)(ex, e, v), - stmt_mac(ref mac, _) => visit_mac((*mac), e, v) + stmt_mac(ref mac, _) => visit_mac(mac, e, v) } } @@ -445,7 +445,7 @@ pub fn visit_exprs(exprs: &[@expr], e: E, v: vt) { for exprs.each |ex| { (v.visit_expr)(*ex, e, v); } } -pub fn visit_mac(_m: mac, _e: E, _v: vt) { +pub fn visit_mac(_m: &mac, _e: E, _v: vt) { /* no user-serviceable parts inside */ } @@ -537,7 +537,7 @@ pub fn visit_expr(ex: @expr, e: E, v: vt) { (v.visit_expr)(lv, e, v); (v.visit_expr)(x, e, v); } - expr_mac(ref mac) => visit_mac((*mac), e, v), + expr_mac(ref mac) => visit_mac(mac, e, v), expr_paren(x) => (v.visit_expr)(x, e, v), expr_inline_asm(ref a) => { for a.inputs.each |&(_, in)| {