auto merge of #13203 : Kimundi/rust/de-map-vec3, r=cmr
They required unnecessary temporaries, are replaced with iterators, and would conflict with a possible future `Iterable` trait.
This commit is contained in:
commit
d79fbba0db
@ -1764,7 +1764,10 @@ access local variables in the enclosing scope.
|
||||
|
||||
~~~~
|
||||
let mut max = 0;
|
||||
[1, 2, 3].map(|x| if *x > max { max = *x });
|
||||
let f = |x: int| if x > max { max = x };
|
||||
for x in [1, 2, 3].iter() {
|
||||
f(*x);
|
||||
}
|
||||
~~~~
|
||||
|
||||
Stack closures are very efficient because their environment is
|
||||
|
@ -597,7 +597,7 @@ fn with_argv<T>(prog: &str, args: &[~str], cb: proc:(**libc::c_char) -> T) -> T
|
||||
// Next, convert each of the byte strings into a pointer. This is
|
||||
// technically unsafe as the caller could leak these pointers out of our
|
||||
// scope.
|
||||
let mut ptrs = tmps.map(|tmp| tmp.with_ref(|buf| buf));
|
||||
let mut ptrs: Vec<_> = tmps.iter().map(|tmp| tmp.with_ref(|buf| buf)).collect();
|
||||
|
||||
// Finally, make sure we add a null pointer.
|
||||
ptrs.push(ptr::null());
|
||||
@ -622,7 +622,9 @@ fn with_envp<T>(env: Option<~[(~str, ~str)]>, cb: proc:(*c_void) -> T) -> T {
|
||||
}
|
||||
|
||||
// Once again, this is unsafe.
|
||||
let mut ptrs = tmps.map(|tmp| tmp.with_ref(|buf| buf));
|
||||
let mut ptrs: Vec<*libc::c_char> = tmps.iter()
|
||||
.map(|tmp| tmp.with_ref(|buf| buf))
|
||||
.collect();
|
||||
ptrs.push(ptr::null());
|
||||
|
||||
cb(ptrs.as_ptr() as *c_void)
|
||||
|
@ -69,8 +69,8 @@ pub fn run(sess: &session::Session, llmod: ModuleRef,
|
||||
}
|
||||
|
||||
// Internalize everything but the reachable symbols of the current module
|
||||
let cstrs = reachable.map(|s| s.to_c_str());
|
||||
let arr = cstrs.map(|c| c.with_ref(|p| p));
|
||||
let cstrs: Vec<::std::c_str::CString> = reachable.iter().map(|s| s.to_c_str()).collect();
|
||||
let arr: Vec<*i8> = cstrs.iter().map(|c| c.with_ref(|p| p)).collect();
|
||||
let ptr = arr.as_ptr();
|
||||
unsafe {
|
||||
llvm::LLVMRustRunRestrictionPass(llmod, ptr as **libc::c_char,
|
||||
|
@ -943,9 +943,9 @@ pub fn build_session_options(matches: &getopts::Matches) -> session::Options {
|
||||
NoDebugInfo
|
||||
};
|
||||
|
||||
let addl_lib_search_paths = matches.opt_strs("L").map(|s| {
|
||||
let addl_lib_search_paths = matches.opt_strs("L").iter().map(|s| {
|
||||
Path::new(s.as_slice())
|
||||
}).move_iter().collect();
|
||||
}).collect();
|
||||
|
||||
let cfg = parse_cfgspecs(matches.opt_strs("cfg").move_iter().collect());
|
||||
let test = matches.opt_present("test");
|
||||
|
@ -1861,7 +1861,7 @@ pub fn type_to_str(&self, ty: Type) -> ~str {
|
||||
}
|
||||
|
||||
pub fn types_to_str(&self, tys: &[Type]) -> ~str {
|
||||
let strs = tys.map(|t| self.type_to_str(*t));
|
||||
let strs: Vec<~str> = tys.iter().map(|t| self.type_to_str(*t)).collect();
|
||||
format!("[{}]", strs.connect(","))
|
||||
}
|
||||
|
||||
|
@ -200,9 +200,9 @@ pub fn get_rust_path() -> Option<~str> {
|
||||
pub fn rust_path() -> Vec<Path> {
|
||||
let mut env_rust_path: Vec<Path> = match get_rust_path() {
|
||||
Some(env_path) => {
|
||||
let env_path_components: Vec<&str> =
|
||||
env_path.split_str(PATH_ENTRY_SEPARATOR).collect();
|
||||
env_path_components.map(|&s| Path::new(s))
|
||||
let env_path_components =
|
||||
env_path.split_str(PATH_ENTRY_SEPARATOR);
|
||||
env_path_components.map(|s| Path::new(s)).collect()
|
||||
}
|
||||
None => Vec::new()
|
||||
};
|
||||
|
@ -163,7 +163,7 @@ fn raw_pat(p: @Pat) -> @Pat {
|
||||
|
||||
fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: Vec<@Pat> ) {
|
||||
assert!((!pats.is_empty()));
|
||||
let ext = match is_useful(cx, &pats.map(|p| vec!(*p)), [wild()]) {
|
||||
let ext = match is_useful(cx, &pats.iter().map(|p| vec!(*p)).collect(), [wild()]) {
|
||||
not_useful => {
|
||||
// This is good, wildcard pattern isn't reachable
|
||||
return;
|
||||
@ -692,12 +692,12 @@ fn specialize(cx: &MatchCheckCtxt,
|
||||
DefVariant(_, variant_id, _) => {
|
||||
if variant(variant_id) == *ctor_id {
|
||||
let struct_fields = ty::lookup_struct_fields(cx.tcx, variant_id);
|
||||
let args = struct_fields.map(|sf| {
|
||||
let args = struct_fields.iter().map(|sf| {
|
||||
match pattern_fields.iter().find(|f| f.ident.name == sf.name) {
|
||||
Some(f) => f.pat,
|
||||
_ => wild()
|
||||
}
|
||||
});
|
||||
}).collect();
|
||||
Some(vec::append(args, r.tail()))
|
||||
} else {
|
||||
None
|
||||
|
@ -4707,18 +4707,20 @@ fn resolve_module_relative_path(&mut self,
|
||||
path: &Path,
|
||||
namespace: Namespace)
|
||||
-> Option<(Def, LastPrivate)> {
|
||||
let module_path_idents = path.segments.init().map(|ps| ps.identifier);
|
||||
let module_path_idents = path.segments.init().iter()
|
||||
.map(|ps| ps.identifier)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let containing_module;
|
||||
let last_private;
|
||||
match self.resolve_module_path(self.current_module,
|
||||
module_path_idents,
|
||||
module_path_idents.as_slice(),
|
||||
UseLexicalScope,
|
||||
path.span,
|
||||
PathSearch) {
|
||||
Failed => {
|
||||
let msg = format!("use of undeclared module `{}`",
|
||||
self.idents_to_str(module_path_idents));
|
||||
self.idents_to_str(module_path_idents.as_slice()));
|
||||
self.resolve_error(path.span, msg);
|
||||
return None;
|
||||
}
|
||||
@ -4772,21 +4774,23 @@ fn resolve_crate_relative_path(&mut self,
|
||||
path: &Path,
|
||||
namespace: Namespace)
|
||||
-> Option<(Def, LastPrivate)> {
|
||||
let module_path_idents = path.segments.init().map(|ps| ps.identifier);
|
||||
let module_path_idents = path.segments.init().iter()
|
||||
.map(|ps| ps.identifier)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let root_module = self.graph_root.get_module();
|
||||
|
||||
let containing_module;
|
||||
let last_private;
|
||||
match self.resolve_module_path_from_root(root_module,
|
||||
module_path_idents,
|
||||
module_path_idents.as_slice(),
|
||||
0,
|
||||
path.span,
|
||||
PathSearch,
|
||||
LastMod(AllPublic)) {
|
||||
Failed => {
|
||||
let msg = format!("use of undeclared module `::{}`",
|
||||
self.idents_to_str(module_path_idents));
|
||||
self.idents_to_str(module_path_idents.as_slice()));
|
||||
self.resolve_error(path.span, msg);
|
||||
return None;
|
||||
}
|
||||
|
@ -216,7 +216,7 @@ fn visit_fn_decl(&mut self,
|
||||
referenced_idents={:?} \
|
||||
early_count={}",
|
||||
n,
|
||||
referenced_idents.map(lifetime_show),
|
||||
referenced_idents.iter().map(lifetime_show).collect::<Vec<token::InternedString>>(),
|
||||
early_count);
|
||||
if referenced_idents.is_empty() {
|
||||
let scope1 = LateScope(n, &generics.lifetimes, scope);
|
||||
|
@ -134,7 +134,7 @@ impl<T:Subst> Subst for Vec<T> {
|
||||
fn subst_spanned(&self, tcx: &ty::ctxt,
|
||||
substs: &ty::substs,
|
||||
span: Option<Span>) -> Vec<T> {
|
||||
self.map(|t| t.subst_spanned(tcx, substs, span))
|
||||
self.iter().map(|t| t.subst_spanned(tcx, substs, span)).collect()
|
||||
}
|
||||
}
|
||||
impl<T:Subst> Subst for Rc<T> {
|
||||
@ -189,7 +189,7 @@ fn subst_spanned(&self, tcx: &ty::ctxt,
|
||||
ty::substs {
|
||||
regions: self.regions.subst_spanned(tcx, substs, span),
|
||||
self_ty: self.self_ty.map(|typ| typ.subst_spanned(tcx, substs, span)),
|
||||
tps: self.tps.map(|typ| typ.subst_spanned(tcx, substs, span))
|
||||
tps: self.tps.iter().map(|typ| typ.subst_spanned(tcx, substs, span)).collect()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1578,10 +1578,10 @@ fn compile_submatch_continue<'r,
|
||||
let pat_ty = node_id_type(bcx, pat_id);
|
||||
let pat_repr = adt::represent_type(bcx.ccx(), pat_ty);
|
||||
expr::with_field_tys(tcx, pat_ty, Some(pat_id), |discr, field_tys| {
|
||||
let rec_vals = rec_fields.map(|field_name| {
|
||||
let rec_vals = rec_fields.iter().map(|field_name| {
|
||||
let ix = ty::field_idx_strict(tcx, field_name.name, field_tys);
|
||||
adt::trans_field_ptr(bcx, pat_repr, val, discr, ix)
|
||||
});
|
||||
}).collect();
|
||||
compile_submatch(
|
||||
bcx,
|
||||
enter_rec_or_struct(bcx,
|
||||
|
@ -136,9 +136,9 @@ fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr {
|
||||
}
|
||||
ty::ty_struct(def_id, ref substs) => {
|
||||
let fields = ty::lookup_struct_fields(cx.tcx(), def_id);
|
||||
let mut ftys = fields.map(|field| {
|
||||
let mut ftys = fields.iter().map(|field| {
|
||||
ty::lookup_field_type(cx.tcx(), def_id, field.id, substs)
|
||||
});
|
||||
}).collect::<Vec<_>>();
|
||||
let packed = ty::lookup_packed(cx.tcx(), def_id);
|
||||
let dtor = ty::ty_dtor(cx.tcx(), def_id).has_drop_flag();
|
||||
if dtor { ftys.push(ty::mk_bool()); }
|
||||
@ -158,7 +158,7 @@ fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr {
|
||||
|
||||
if cases.iter().all(|c| c.tys.len() == 0) {
|
||||
// All bodies empty -> intlike
|
||||
let discrs = cases.map(|c| c.discr);
|
||||
let discrs: Vec<u64> = cases.iter().map(|c| c.discr).collect();
|
||||
let bounds = IntBounds {
|
||||
ulo: *discrs.iter().min().unwrap(),
|
||||
uhi: *discrs.iter().max().unwrap(),
|
||||
@ -218,12 +218,12 @@ fn represent_type_uncached(cx: &CrateContext, t: ty::t) -> Repr {
|
||||
let bounds = IntBounds { ulo: 0, uhi: (cases.len() - 1) as u64,
|
||||
slo: 0, shi: (cases.len() - 1) as i64 };
|
||||
let ity = range_to_inttype(cx, hint, &bounds);
|
||||
return General(ity, cases.map(|c| {
|
||||
return General(ity, cases.iter().map(|c| {
|
||||
let discr = vec!(ty_of_inttype(ity));
|
||||
mk_struct(cx,
|
||||
vec::append(discr, c.tys.as_slice()).as_slice(),
|
||||
false)
|
||||
}))
|
||||
}).collect())
|
||||
}
|
||||
_ => cx.sess().bug("adt::represent_type called on non-ADT type")
|
||||
}
|
||||
@ -270,18 +270,18 @@ fn find_ptr(&self) -> Option<uint> {
|
||||
}
|
||||
|
||||
fn get_cases(tcx: &ty::ctxt, def_id: ast::DefId, substs: &ty::substs) -> Vec<Case> {
|
||||
ty::enum_variants(tcx, def_id).map(|vi| {
|
||||
let arg_tys = vi.args.map(|&raw_ty| {
|
||||
ty::enum_variants(tcx, def_id).iter().map(|vi| {
|
||||
let arg_tys = vi.args.iter().map(|&raw_ty| {
|
||||
ty::subst(tcx, substs, raw_ty)
|
||||
});
|
||||
}).collect();
|
||||
Case { discr: vi.disr_val, tys: arg_tys }
|
||||
})
|
||||
}).collect()
|
||||
}
|
||||
|
||||
|
||||
fn mk_struct(cx: &CrateContext, tys: &[ty::t], packed: bool) -> Struct {
|
||||
let lltys = tys.map(|&ty| type_of::sizing_type_of(cx, ty));
|
||||
let llty_rec = Type::struct_(cx, lltys, packed);
|
||||
let lltys = tys.iter().map(|&ty| type_of::sizing_type_of(cx, ty)).collect::<Vec<_>>();
|
||||
let llty_rec = Type::struct_(cx, lltys.as_slice(), packed);
|
||||
Struct {
|
||||
size: machine::llsize_of_alloc(cx, llty_rec) /*bad*/as u64,
|
||||
align: machine::llalign_of_min(cx, llty_rec) /*bad*/as u64,
|
||||
@ -464,9 +464,9 @@ fn generic_type_of(cx: &CrateContext, r: &Repr, name: Option<&str>, sizing: bool
|
||||
|
||||
fn struct_llfields(cx: &CrateContext, st: &Struct, sizing: bool) -> Vec<Type> {
|
||||
if sizing {
|
||||
st.fields.map(|&ty| type_of::sizing_type_of(cx, ty))
|
||||
st.fields.iter().map(|&ty| type_of::sizing_type_of(cx, ty)).collect()
|
||||
} else {
|
||||
st.fields.map(|&ty| type_of::type_of(cx, ty))
|
||||
st.fields.iter().map(|&ty| type_of::type_of(cx, ty)).collect()
|
||||
}
|
||||
}
|
||||
|
||||
@ -700,7 +700,7 @@ fn struct_field_ptr(bcx: &Block, st: &Struct, val: ValueRef, ix: uint,
|
||||
let ccx = bcx.ccx();
|
||||
|
||||
let val = if needs_cast {
|
||||
let fields = st.fields.map(|&ty| type_of::type_of(ccx, ty));
|
||||
let fields = st.fields.iter().map(|&ty| type_of::type_of(ccx, ty)).collect::<Vec<_>>();
|
||||
let real_ty = Type::struct_(ccx, fields.as_slice(), st.packed);
|
||||
PointerCast(bcx, val, real_ty.ptr_to())
|
||||
} else {
|
||||
@ -773,11 +773,11 @@ pub fn trans_const(ccx: &CrateContext, r: &Repr, discr: Disr,
|
||||
vals).as_slice(),
|
||||
false)
|
||||
} else {
|
||||
let vals = nonnull.fields.map(|&ty| {
|
||||
let vals = nonnull.fields.iter().map(|&ty| {
|
||||
// Always use null even if it's not the `ptrfield`th
|
||||
// field; see #8506.
|
||||
C_null(type_of::sizing_type_of(ccx, ty))
|
||||
}).move_iter().collect::<Vec<ValueRef> >();
|
||||
}).collect::<Vec<ValueRef>>();
|
||||
C_struct(ccx, build_const_struct(ccx,
|
||||
nonnull,
|
||||
vals.as_slice()).as_slice(),
|
||||
|
@ -36,17 +36,17 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm)
|
||||
let temp_scope = fcx.push_custom_cleanup_scope();
|
||||
|
||||
// Prepare the output operands
|
||||
let outputs = ia.outputs.map(|&(ref c, out)| {
|
||||
let outputs = ia.outputs.iter().map(|&(ref c, out)| {
|
||||
constraints.push((*c).clone());
|
||||
|
||||
let out_datum = unpack_datum!(bcx, expr::trans(bcx, out));
|
||||
output_types.push(type_of::type_of(bcx.ccx(), out_datum.ty));
|
||||
out_datum.val
|
||||
|
||||
});
|
||||
}).collect::<Vec<_>>();
|
||||
|
||||
// Now the input operands
|
||||
let inputs = ia.inputs.map(|&(ref c, input)| {
|
||||
let inputs = ia.inputs.iter().map(|&(ref c, input)| {
|
||||
constraints.push((*c).clone());
|
||||
|
||||
let in_datum = unpack_datum!(bcx, expr::trans(bcx, input));
|
||||
@ -57,12 +57,15 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm)
|
||||
cleanup::CustomScope(temp_scope),
|
||||
callee::DontAutorefArg)
|
||||
})
|
||||
});
|
||||
}).collect::<Vec<_>>();
|
||||
|
||||
// no failure occurred preparing operands, no need to cleanup
|
||||
fcx.pop_custom_cleanup_scope(temp_scope);
|
||||
|
||||
let mut constraints = constraints.map(|s| s.get().to_str()).connect(",");
|
||||
let mut constraints = constraints.iter()
|
||||
.map(|s| s.get().to_str())
|
||||
.collect::<Vec<~str>>()
|
||||
.connect(",");
|
||||
|
||||
let mut clobbers = getClobbers();
|
||||
if !ia.clobbers.get().is_empty() && !clobbers.is_empty() {
|
||||
|
@ -121,7 +121,7 @@ pub fn Invoke(cx: &Block,
|
||||
terminate(cx, "Invoke");
|
||||
debug!("Invoke({} with arguments ({}))",
|
||||
cx.val_to_str(fn_),
|
||||
args.map(|a| cx.val_to_str(*a)).connect(", "));
|
||||
args.iter().map(|a| cx.val_to_str(*a)).collect::<Vec<~str>>().connect(", "));
|
||||
B(cx).invoke(fn_, args, then, catch, attributes)
|
||||
}
|
||||
|
||||
|
@ -780,13 +780,13 @@ pub fn inline_asm_call(&self, asm: *c_char, cons: *c_char,
|
||||
let alignstack = if alignstack { lib::llvm::True }
|
||||
else { lib::llvm::False };
|
||||
|
||||
let argtys = inputs.map(|v| {
|
||||
let argtys = inputs.iter().map(|v| {
|
||||
debug!("Asm Input Type: {:?}", self.ccx.tn.val_to_str(*v));
|
||||
val_ty(*v)
|
||||
});
|
||||
}).collect::<Vec<_>>();
|
||||
|
||||
debug!("Asm Output Type: {:?}", self.ccx.tn.type_to_str(output));
|
||||
let fty = Type::func(argtys, &output);
|
||||
let fty = Type::func(argtys.as_slice(), &output);
|
||||
unsafe {
|
||||
let v = llvm::LLVMInlineAsm(
|
||||
fty.to_ref(), asm, cons, volatile, alignstack, dia as c_uint);
|
||||
@ -800,7 +800,10 @@ pub fn call(&self, llfn: ValueRef, args: &[ValueRef],
|
||||
|
||||
debug!("Call {} with args ({})",
|
||||
self.ccx.tn.val_to_str(llfn),
|
||||
args.map(|&v| self.ccx.tn.val_to_str(v)).connect(", "));
|
||||
args.iter()
|
||||
.map(|&v| self.ccx.tn.val_to_str(v))
|
||||
.collect::<Vec<~str>>()
|
||||
.connect(", "));
|
||||
|
||||
unsafe {
|
||||
let v = llvm::LLVMBuildCall(self.llbuilder, llfn, args.as_ptr(),
|
||||
|
@ -816,7 +816,10 @@ pub fn node_id_type_params(bcx: &Block, node: ExprOrMethodCall) -> Vec<ty::t> {
|
||||
if !params.iter().all(|t| !ty::type_needs_infer(*t)) {
|
||||
bcx.sess().bug(
|
||||
format!("type parameters for node {:?} include inference types: {}",
|
||||
node, params.map(|t| bcx.ty_to_str(*t)).connect(",")));
|
||||
node, params.iter()
|
||||
.map(|t| bcx.ty_to_str(*t))
|
||||
.collect::<Vec<~str>>()
|
||||
.connect(",")));
|
||||
}
|
||||
|
||||
match bcx.fcx.param_substs {
|
||||
|
@ -733,7 +733,7 @@ pub fn create_function_debug_context(cx: &CrateContext,
|
||||
source_locations_enabled: Cell::new(false),
|
||||
};
|
||||
|
||||
let arg_pats = fn_decl.inputs.map(|arg_ref| arg_ref.pat);
|
||||
let arg_pats = fn_decl.inputs.iter().map(|arg_ref| arg_ref.pat).collect::<Vec<_>>();
|
||||
populate_scope_map(cx,
|
||||
arg_pats.as_slice(),
|
||||
top_level_block,
|
||||
@ -1218,7 +1218,7 @@ struct StructMemberDescriptionFactory {
|
||||
impl StructMemberDescriptionFactory {
|
||||
fn create_member_descriptions(&self, cx: &CrateContext)
|
||||
-> Vec<MemberDescription> {
|
||||
self.fields.map(|field| {
|
||||
self.fields.iter().map(|field| {
|
||||
let name = if field.ident.name == special_idents::unnamed_field.name {
|
||||
~""
|
||||
} else {
|
||||
@ -1231,7 +1231,7 @@ fn create_member_descriptions(&self, cx: &CrateContext)
|
||||
type_metadata: type_metadata(cx, field.mt.ty, self.span),
|
||||
offset: ComputedMemberOffset,
|
||||
}
|
||||
})
|
||||
}).collect()
|
||||
}
|
||||
}
|
||||
|
||||
@ -1321,14 +1321,14 @@ struct TupleMemberDescriptionFactory {
|
||||
impl TupleMemberDescriptionFactory {
|
||||
fn create_member_descriptions(&self, cx: &CrateContext)
|
||||
-> Vec<MemberDescription> {
|
||||
self.component_types.map(|&component_type| {
|
||||
self.component_types.iter().map(|&component_type| {
|
||||
MemberDescription {
|
||||
name: ~"",
|
||||
llvm_type: type_of::type_of(cx, component_type),
|
||||
type_metadata: type_metadata(cx, component_type, self.span),
|
||||
offset: ComputedMemberOffset,
|
||||
}
|
||||
})
|
||||
}).collect()
|
||||
}
|
||||
}
|
||||
|
||||
@ -1443,7 +1443,9 @@ fn describe_enum_variant(cx: &CrateContext,
|
||||
-> (DICompositeType, Type, MemberDescriptionFactory) {
|
||||
let variant_llvm_type =
|
||||
Type::struct_(cx, struct_def.fields
|
||||
.iter()
|
||||
.map(|&t| type_of::type_of(cx, t))
|
||||
.collect::<Vec<_>>()
|
||||
.as_slice(),
|
||||
struct_def.packed);
|
||||
// Could some consistency checks here: size, align, field count, discr type
|
||||
@ -1464,11 +1466,11 @@ fn describe_enum_variant(cx: &CrateContext,
|
||||
variant_definition_span);
|
||||
|
||||
// Get the argument names from the enum variant info
|
||||
let mut arg_names = match variant_info.arg_names {
|
||||
let mut arg_names: Vec<_> = match variant_info.arg_names {
|
||||
Some(ref names) => {
|
||||
names.map(|ident| token::get_ident(*ident).get().to_str())
|
||||
names.iter().map(|ident| token::get_ident(*ident).get().to_str()).collect()
|
||||
}
|
||||
None => variant_info.args.map(|_| ~"")
|
||||
None => variant_info.args.iter().map(|_| ~"").collect()
|
||||
};
|
||||
|
||||
// If this is not a univariant enum, there is also the (unnamed) discriminant field
|
||||
|
@ -994,7 +994,7 @@ fn trans_rec_or_struct<'a>(
|
||||
with_field_tys(tcx, ty, Some(id), |discr, field_tys| {
|
||||
let mut need_base = slice::from_elem(field_tys.len(), true);
|
||||
|
||||
let numbered_fields = fields.map(|field| {
|
||||
let numbered_fields = fields.iter().map(|field| {
|
||||
let opt_pos =
|
||||
field_tys.iter().position(|field_ty|
|
||||
field_ty.ident.name == field.ident.node.name);
|
||||
@ -1008,7 +1008,7 @@ fn trans_rec_or_struct<'a>(
|
||||
"Couldn't find field in struct type")
|
||||
}
|
||||
}
|
||||
});
|
||||
}).collect::<Vec<_>>();
|
||||
let optbase = match base {
|
||||
Some(base_expr) => {
|
||||
let mut leftovers = Vec::new();
|
||||
@ -1029,7 +1029,7 @@ fn trans_rec_or_struct<'a>(
|
||||
};
|
||||
|
||||
let repr = adt::represent_type(bcx.ccx(), ty);
|
||||
trans_adt(bcx, repr, discr, numbered_fields, optbase, dest)
|
||||
trans_adt(bcx, repr, discr, numbered_fields.as_slice(), optbase, dest)
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -851,7 +851,7 @@ fn foreign_types_for_fn_ty(ccx: &CrateContext,
|
||||
ty.repr(ccx.tcx()),
|
||||
ccx.tn.types_to_str(llsig.llarg_tys.as_slice()),
|
||||
ccx.tn.type_to_str(llsig.llret_ty),
|
||||
ccx.tn.types_to_str(fn_ty.arg_tys.map(|t| t.ty).as_slice()),
|
||||
ccx.tn.types_to_str(fn_ty.arg_tys.iter().map(|t| t.ty).collect::<Vec<_>>().as_slice()),
|
||||
ccx.tn.type_to_str(fn_ty.ret_ty.ty),
|
||||
ret_def);
|
||||
|
||||
|
@ -18,7 +18,7 @@ pub trait LlvmRepr {
|
||||
|
||||
impl<'a, T:LlvmRepr> LlvmRepr for &'a [T] {
|
||||
fn llrepr(&self, ccx: &CrateContext) -> ~str {
|
||||
let reprs = self.map(|t| t.llrepr(ccx));
|
||||
let reprs: Vec<~str> = self.iter().map(|t| t.llrepr(ccx)).collect();
|
||||
format!("[{}]", reprs.connect(","))
|
||||
}
|
||||
}
|
||||
|
@ -541,7 +541,7 @@ fn emit_vtable_methods(bcx: &Block,
|
||||
ty::populate_implementations_for_trait_if_necessary(bcx.tcx(), trt_id);
|
||||
|
||||
let trait_method_def_ids = ty::trait_method_def_ids(tcx, trt_id);
|
||||
trait_method_def_ids.map(|method_def_id| {
|
||||
trait_method_def_ids.iter().map(|method_def_id| {
|
||||
let ident = ty::method(tcx, *method_def_id).ident;
|
||||
// The substitutions we have are on the impl, so we grab
|
||||
// the method type from the impl to substitute into.
|
||||
@ -558,7 +558,7 @@ fn emit_vtable_methods(bcx: &Block,
|
||||
} else {
|
||||
trans_fn_ref_with_vtables(bcx, m_id, ExprId(0), substs, Some(vtables))
|
||||
}
|
||||
})
|
||||
}).collect()
|
||||
}
|
||||
|
||||
pub fn trans_trait_cast<'a>(bcx: &'a Block<'a>,
|
||||
|
@ -298,7 +298,7 @@ pub fn make_mono_id(ccx: &CrateContext,
|
||||
vts.repr(ccx.tcx()), substs.tys.repr(ccx.tcx()));
|
||||
let vts_iter = substs.self_vtables.iter().chain(vts.iter());
|
||||
vts_iter.zip(substs_iter).map(|(vtable, subst)| {
|
||||
let v = vtable.map(|vt| meth::vtable_id(ccx, vt));
|
||||
let v = vtable.iter().map(|vt| meth::vtable_id(ccx, vt)).collect::<Vec<_>>();
|
||||
(*subst, if !v.is_empty() { Some(@v) } else { None })
|
||||
}).collect()
|
||||
}
|
||||
|
@ -1408,14 +1408,14 @@ pub fn mk_ctor_fn(cx: &ctxt,
|
||||
binder_id: ast::NodeId,
|
||||
input_tys: &[ty::t],
|
||||
output: ty::t) -> t {
|
||||
let input_args = input_tys.map(|t| *t);
|
||||
let input_args = input_tys.iter().map(|t| *t).collect();
|
||||
mk_bare_fn(cx,
|
||||
BareFnTy {
|
||||
purity: ast::ImpureFn,
|
||||
abis: AbiSet::Rust(),
|
||||
sig: FnSig {
|
||||
binder_id: binder_id,
|
||||
inputs: Vec::from_slice(input_args),
|
||||
inputs: input_args,
|
||||
output: output,
|
||||
variadic: false
|
||||
}
|
||||
@ -2880,7 +2880,7 @@ pub fn replace_closure_return_type(tcx: &ctxt, fn_type: t, ret_type: t) -> t {
|
||||
|
||||
// Returns a vec of all the input and output types of fty.
|
||||
pub fn tys_in_fn_sig(sig: &FnSig) -> Vec<t> {
|
||||
vec::append_one(sig.inputs.map(|a| *a), sig.output)
|
||||
vec::append_one(sig.inputs.iter().map(|a| *a).collect(), sig.output)
|
||||
}
|
||||
|
||||
// Type accessors for AST nodes
|
||||
@ -3432,7 +3432,7 @@ pub fn field_idx_strict(tcx: &ctxt, name: ast::Name, fields: &[field])
|
||||
tcx.sess.bug(format!(
|
||||
"no field named `{}` found in the list of fields `{:?}`",
|
||||
token::get_name(name),
|
||||
fields.map(|f| token::get_ident(f.ident).get().to_str())));
|
||||
fields.iter().map(|f| token::get_ident(f.ident).get().to_str()).collect::<Vec<~str>>()));
|
||||
}
|
||||
|
||||
pub fn method_idx(id: ast::Ident, meths: &[@Method]) -> Option<uint> {
|
||||
@ -3724,8 +3724,8 @@ pub fn trait_supertraits(cx: &ctxt, id: ast::DefId) -> @Vec<@TraitRef> {
|
||||
|
||||
pub fn trait_ref_supertraits(cx: &ctxt, trait_ref: &ty::TraitRef) -> Vec<@TraitRef> {
|
||||
let supertrait_refs = trait_supertraits(cx, trait_ref.def_id);
|
||||
supertrait_refs.map(
|
||||
|supertrait_ref| supertrait_ref.subst(cx, &trait_ref.substs))
|
||||
supertrait_refs.iter().map(
|
||||
|supertrait_ref| supertrait_ref.subst(cx, &trait_ref.substs)).collect()
|
||||
}
|
||||
|
||||
fn lookup_locally_or_in_crate_store<V:Clone>(
|
||||
@ -3768,7 +3768,7 @@ pub fn trait_methods(cx: &ctxt, trait_did: ast::DefId) -> @Vec<@Method> {
|
||||
Some(&methods) => methods,
|
||||
None => {
|
||||
let def_ids = ty::trait_method_def_ids(cx, trait_did);
|
||||
let methods = @def_ids.map(|d| ty::method(cx, *d));
|
||||
let methods = @def_ids.iter().map(|d| ty::method(cx, *d)).collect();
|
||||
trait_methods.insert(trait_did, methods);
|
||||
methods
|
||||
}
|
||||
@ -3876,7 +3876,7 @@ pub fn from_ast_variant(cx: &ctxt,
|
||||
match ast_variant.node.kind {
|
||||
ast::TupleVariantKind(ref args) => {
|
||||
let arg_tys = if args.len() > 0 {
|
||||
ty_fn_args(ctor_ty).map(|a| *a)
|
||||
ty_fn_args(ctor_ty).iter().map(|a| *a).collect()
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
@ -3897,7 +3897,7 @@ pub fn from_ast_variant(cx: &ctxt,
|
||||
|
||||
assert!(fields.len() > 0);
|
||||
|
||||
let arg_tys = ty_fn_args(ctor_ty).map(|a| *a);
|
||||
let arg_tys = ty_fn_args(ctor_ty).iter().map(|a| *a).collect();
|
||||
let arg_names = fields.iter().map(|field| {
|
||||
match field.node.kind {
|
||||
NamedField(ident, _) => ident,
|
||||
@ -4280,7 +4280,7 @@ fn struct_field_tys(fields: &[StructField]) -> Vec<field_ty> {
|
||||
// this. Takes a list of substs with which to instantiate field types.
|
||||
pub fn struct_fields(cx: &ctxt, did: ast::DefId, substs: &substs)
|
||||
-> Vec<field> {
|
||||
lookup_struct_fields(cx, did).map(|f| {
|
||||
lookup_struct_fields(cx, did).iter().map(|f| {
|
||||
field {
|
||||
// FIXME #6993: change type of field to Name and get rid of new()
|
||||
ident: ast::Ident::new(f.name),
|
||||
@ -4289,7 +4289,7 @@ pub fn struct_fields(cx: &ctxt, did: ast::DefId, substs: &substs)
|
||||
mutbl: MutImmutable
|
||||
}
|
||||
}
|
||||
})
|
||||
}).collect()
|
||||
}
|
||||
|
||||
pub fn is_binopable(cx: &ctxt, ty: t, op: ast::BinOp) -> bool {
|
||||
|
@ -167,8 +167,8 @@ fn ast_path_substs<AC:AstConv,RS:RegionScope>(
|
||||
let expected_num_region_params = decl_generics.region_param_defs().len();
|
||||
let supplied_num_region_params = path.segments.last().unwrap().lifetimes.len();
|
||||
let regions = if expected_num_region_params == supplied_num_region_params {
|
||||
path.segments.last().unwrap().lifetimes.map(
|
||||
|l| ast_region_to_region(this.tcx(), l))
|
||||
path.segments.last().unwrap().lifetimes.iter().map(
|
||||
|l| ast_region_to_region(this.tcx(), l)).collect::<Vec<_>>()
|
||||
} else {
|
||||
let anon_regions =
|
||||
rscope.anon_regions(path.span, expected_num_region_params);
|
||||
|
@ -147,7 +147,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path,
|
||||
let vinfo =
|
||||
ty::enum_variant_with_id(tcx, enm, var);
|
||||
let var_tpt = ty::lookup_item_type(tcx, var);
|
||||
vinfo.args.map(|t| {
|
||||
vinfo.args.iter().map(|t| {
|
||||
if var_tpt.generics.type_param_defs().len() ==
|
||||
expected_substs.tps.len()
|
||||
{
|
||||
@ -157,7 +157,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path,
|
||||
*t // In this case, an error was already signaled
|
||||
// anyway
|
||||
}
|
||||
})
|
||||
}).collect()
|
||||
};
|
||||
|
||||
kind_name = "variant";
|
||||
@ -209,7 +209,7 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path,
|
||||
// Get the expected types of the arguments.
|
||||
let class_fields = ty::struct_fields(
|
||||
tcx, struct_def_id, expected_substs);
|
||||
arg_types = class_fields.map(|field| field.mt.ty);
|
||||
arg_types = class_fields.iter().map(|field| field.mt.ty).collect();
|
||||
|
||||
kind_name = "structure";
|
||||
}
|
||||
|
@ -659,7 +659,10 @@ fn push_candidates_from_impl(&mut self,
|
||||
debug!("push_candidates_from_impl: {} {} {}",
|
||||
token::get_name(self.m_name),
|
||||
impl_info.ident.repr(self.tcx()),
|
||||
impl_info.methods.map(|m| m.ident).repr(self.tcx()));
|
||||
impl_info.methods.iter()
|
||||
.map(|m| m.ident)
|
||||
.collect::<Vec<ast::Ident>>()
|
||||
.repr(self.tcx()));
|
||||
|
||||
let idx = {
|
||||
match impl_info.methods
|
||||
|
@ -449,7 +449,7 @@ fn check_fn<'a>(ccx: &'a CrateCtxt<'a>,
|
||||
let ret_ty = fn_sig.output;
|
||||
|
||||
debug!("check_fn(arg_tys={:?}, ret_ty={:?})",
|
||||
arg_tys.map(|&a| ppaux::ty_to_str(tcx, a)),
|
||||
arg_tys.iter().map(|&a| ppaux::ty_to_str(tcx, a)).collect::<Vec<~str>>(),
|
||||
ppaux::ty_to_str(tcx, ret_ty));
|
||||
|
||||
// Create the function context. This is either derived from scratch or,
|
||||
@ -1717,7 +1717,7 @@ fn check_argument_types(fcx: &FnCtxt,
|
||||
};
|
||||
|
||||
debug!("check_argument_types: formal_tys={:?}",
|
||||
formal_tys.map(|t| fcx.infcx().ty_to_str(*t)));
|
||||
formal_tys.iter().map(|t| fcx.infcx().ty_to_str(*t)).collect::<Vec<~str>>());
|
||||
|
||||
// Check the arguments.
|
||||
// We do this in a pretty awful way: first we typecheck any arguments
|
||||
@ -1886,10 +1886,10 @@ fn check_method_call(fcx: &FnCtxt,
|
||||
expr.span,
|
||||
fcx.expr_ty(rcvr));
|
||||
|
||||
let tps = tps.map(|&ast_ty| fcx.to_ty(ast_ty));
|
||||
let tps = tps.iter().map(|&ast_ty| fcx.to_ty(ast_ty)).collect::<Vec<_>>();
|
||||
let fn_ty = match method::lookup(fcx, expr, rcvr,
|
||||
method_name.name,
|
||||
expr_t, tps,
|
||||
expr_t, tps.as_slice(),
|
||||
DontDerefArgs,
|
||||
CheckTraitsAndInherentMethods,
|
||||
AutoderefReceiver) {
|
||||
@ -2235,7 +2235,7 @@ fn check_expr_fn(fcx: &FnCtxt,
|
||||
let fty = if error_happened {
|
||||
fty_sig = FnSig {
|
||||
binder_id: ast::CRATE_NODE_ID,
|
||||
inputs: fn_ty.sig.inputs.map(|_| ty::mk_err()),
|
||||
inputs: fn_ty.sig.inputs.iter().map(|_| ty::mk_err()).collect(),
|
||||
output: ty::mk_err(),
|
||||
variadic: false
|
||||
};
|
||||
@ -2938,11 +2938,11 @@ fn check_struct_enum_variant(fcx: &FnCtxt,
|
||||
}
|
||||
ast::ExprMethodCall(ident, ref tps, ref args) => {
|
||||
check_method_call(fcx, expr, ident, args.as_slice(), tps.as_slice());
|
||||
let arg_tys = args.map(|a| fcx.expr_ty(*a));
|
||||
let (args_bot, args_err) = arg_tys.iter().fold((false, false),
|
||||
let mut arg_tys = args.iter().map(|a| fcx.expr_ty(*a));
|
||||
let (args_bot, args_err) = arg_tys.fold((false, false),
|
||||
|(rest_bot, rest_err), a| {
|
||||
(rest_bot || ty::type_is_bot(*a),
|
||||
rest_err || ty::type_is_error(*a))});
|
||||
(rest_bot || ty::type_is_bot(a),
|
||||
rest_err || ty::type_is_error(a))});
|
||||
if args_err {
|
||||
fcx.write_error(id);
|
||||
} else if args_bot {
|
||||
@ -3686,8 +3686,8 @@ pub fn instantiate_path(fcx: &FnCtxt,
|
||||
let num_expected_regions = tpt.generics.region_param_defs().len();
|
||||
let num_supplied_regions = pth.segments.last().unwrap().lifetimes.len();
|
||||
let regions = if num_expected_regions == num_supplied_regions {
|
||||
OwnedSlice::from_vec(pth.segments.last().unwrap().lifetimes.map(
|
||||
|l| ast_region_to_region(fcx.tcx(), l)))
|
||||
OwnedSlice::from_vec(pth.segments.last().unwrap().lifetimes.iter().map(
|
||||
|l| ast_region_to_region(fcx.tcx(), l)).collect())
|
||||
} else {
|
||||
if num_supplied_regions != 0 {
|
||||
fcx.ccx.tcx.sess.span_err(
|
||||
|
@ -113,7 +113,7 @@ fn resolve_vtable_map_entry(fcx: &FnCtxt, sp: Span, vtable_key: MethodCall) {
|
||||
|
||||
fn resolve_origins(fcx: &FnCtxt, sp: Span,
|
||||
vtbls: vtable_res) -> vtable_res {
|
||||
@vtbls.map(|os| @os.map(|origin| {
|
||||
@vtbls.iter().map(|os| @os.iter().map(|origin| {
|
||||
match origin {
|
||||
&vtable_static(def_id, ref tys, origins) => {
|
||||
let r_tys = resolve_type_vars_in_types(fcx,
|
||||
@ -126,7 +126,7 @@ fn resolve_origins(fcx: &FnCtxt, sp: Span,
|
||||
vtable_param(n, b)
|
||||
}
|
||||
}
|
||||
}))
|
||||
}).collect()).collect()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -152,7 +152,7 @@ pub fn get_enum_variant_types(ccx: &CrateCtxt,
|
||||
let result_ty = match variant.node.kind {
|
||||
ast::TupleVariantKind(ref args) if args.len() > 0 => {
|
||||
let rs = ExplicitRscope;
|
||||
let input_tys = args.map(|va| ccx.to_ty(&rs, va.ty));
|
||||
let input_tys: Vec<_> = args.iter().map(|va| ccx.to_ty(&rs, va.ty)).collect();
|
||||
ty::mk_ctor_fn(tcx, scope, input_tys.as_slice(), enum_ty)
|
||||
}
|
||||
|
||||
@ -168,8 +168,8 @@ pub fn get_enum_variant_types(ccx: &CrateCtxt,
|
||||
|
||||
convert_struct(ccx, struct_def, tpt, variant.node.id);
|
||||
|
||||
let input_tys = struct_def.fields.map(
|
||||
|f| ty::node_id_to_type(ccx.tcx, f.node.id));
|
||||
let input_tys: Vec<_> = struct_def.fields.iter().map(
|
||||
|f| ty::node_id_to_type(ccx.tcx, f.node.id)).collect();
|
||||
ty::mk_ctor_fn(tcx, scope, input_tys.as_slice(), enum_ty)
|
||||
}
|
||||
};
|
||||
@ -222,7 +222,7 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt, trait_id: ast::NodeId) {
|
||||
}
|
||||
|
||||
// Add an entry mapping
|
||||
let method_def_ids = @ms.map(|m| {
|
||||
let method_def_ids = @ms.iter().map(|m| {
|
||||
match m {
|
||||
&ast::Required(ref ty_method) => {
|
||||
local_def(ty_method.id)
|
||||
@ -231,13 +231,11 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt, trait_id: ast::NodeId) {
|
||||
local_def(method.id)
|
||||
}
|
||||
}
|
||||
});
|
||||
}).collect();
|
||||
|
||||
let trait_def_id = local_def(trait_id);
|
||||
tcx.trait_method_def_ids.borrow_mut()
|
||||
.insert(trait_def_id, @method_def_ids.iter()
|
||||
.map(|x| *x)
|
||||
.collect());
|
||||
.insert(trait_def_id, method_def_ids);
|
||||
}
|
||||
_ => {} // Ignore things that aren't traits.
|
||||
}
|
||||
@ -697,9 +695,9 @@ pub fn convert_struct(ccx: &CrateCtxt,
|
||||
tcx.tcache.borrow_mut().insert(local_def(ctor_id), tpt);
|
||||
} else if struct_def.fields.get(0).node.kind.is_unnamed() {
|
||||
// Tuple-like.
|
||||
let inputs = struct_def.fields.map(
|
||||
let inputs: Vec<_> = struct_def.fields.iter().map(
|
||||
|field| tcx.tcache.borrow().get(
|
||||
&local_def(field.node.id)).ty);
|
||||
&local_def(field.node.id)).ty).collect();
|
||||
let ctor_fn_ty = ty::mk_ctor_fn(tcx,
|
||||
ctor_id,
|
||||
inputs.as_slice(),
|
||||
|
@ -683,7 +683,7 @@ pub fn ty_to_str(&self, t: ty::t) -> ~str {
|
||||
}
|
||||
|
||||
pub fn tys_to_str(&self, ts: &[ty::t]) -> ~str {
|
||||
let tstrs = ts.map(|t| self.ty_to_str(*t));
|
||||
let tstrs: Vec<~str> = ts.iter().map(|t| self.ty_to_str(*t)).collect();
|
||||
format!("({})", tstrs.connect(", "))
|
||||
}
|
||||
|
||||
|
@ -1164,8 +1164,14 @@ fn free_regions_first(a: &RegionAndOrigin,
|
||||
format!("collect_error_for_expanding_node() could not find error \
|
||||
for var {:?}, lower_bounds={}, upper_bounds={}",
|
||||
node_idx,
|
||||
lower_bounds.map(|x| x.region).repr(self.tcx),
|
||||
upper_bounds.map(|x| x.region).repr(self.tcx)));
|
||||
lower_bounds.iter()
|
||||
.map(|x| x.region)
|
||||
.collect::<Vec<ty::Region>>()
|
||||
.repr(self.tcx),
|
||||
upper_bounds.iter()
|
||||
.map(|x| x.region)
|
||||
.collect::<Vec<ty::Region>>()
|
||||
.repr(self.tcx)));
|
||||
}
|
||||
|
||||
fn collect_error_for_contracting_node(
|
||||
@ -1209,7 +1215,10 @@ fn collect_error_for_contracting_node(
|
||||
format!("collect_error_for_contracting_node() could not find error \
|
||||
for var {:?}, upper_bounds={}",
|
||||
node_idx,
|
||||
upper_bounds.map(|x| x.region).repr(self.tcx)));
|
||||
upper_bounds.iter()
|
||||
.map(|x| x.region)
|
||||
.collect::<Vec<ty::Region>>()
|
||||
.repr(self.tcx)));
|
||||
}
|
||||
|
||||
fn collect_concrete_regions(&self,
|
||||
|
@ -32,7 +32,7 @@ fn inf_str(&self, cx: &InferCtxt) -> ~str {
|
||||
impl InferStr for FnSig {
|
||||
fn inf_str(&self, cx: &InferCtxt) -> ~str {
|
||||
format!("({}) -> {}",
|
||||
self.inputs.map(|a| a.inf_str(cx)).connect(", "),
|
||||
self.inputs.iter().map(|a| a.inf_str(cx)).collect::<Vec<~str>>().connect(", "),
|
||||
self.output.inf_str(cx))
|
||||
}
|
||||
}
|
||||
|
@ -258,7 +258,7 @@ pub fn write_substs_to_tcx(tcx: &ty::ctxt,
|
||||
substs: Vec<ty::t> ) {
|
||||
if substs.len() > 0u {
|
||||
debug!("write_substs_to_tcx({}, {:?})", node_id,
|
||||
substs.map(|t| ppaux::ty_to_str(tcx, *t)));
|
||||
substs.iter().map(|t| ppaux::ty_to_str(tcx, *t)).collect::<Vec<~str>>());
|
||||
assert!(substs.iter().all(|t| !ty::type_needs_infer(*t)));
|
||||
|
||||
tcx.node_type_substs.borrow_mut().insert(node_id, substs);
|
||||
|
@ -67,7 +67,7 @@ pub fn indenter() -> _indenter {
|
||||
pub fn field_expr(f: ast::Field) -> @ast::Expr { return f.expr; }
|
||||
|
||||
pub fn field_exprs(fields: Vec<ast::Field> ) -> Vec<@ast::Expr> {
|
||||
fields.map(|f| f.expr)
|
||||
fields.move_iter().map(|f| f.expr).collect()
|
||||
}
|
||||
|
||||
struct LoopQueryVisitor<'a> {
|
||||
|
@ -276,7 +276,7 @@ pub fn vstore_ty_to_str(cx: &ctxt, mt: &mt, vs: ty::vstore) -> ~str {
|
||||
}
|
||||
|
||||
pub fn vec_map_to_str<T>(ts: &[T], f: |t: &T| -> ~str) -> ~str {
|
||||
let tstrs = ts.map(f);
|
||||
let tstrs = ts.iter().map(f).collect::<Vec<~str>>();
|
||||
format!("[{}]", tstrs.connect(", "))
|
||||
}
|
||||
|
||||
@ -405,7 +405,7 @@ fn push_sig_to_str(cx: &ctxt,
|
||||
ket: char,
|
||||
sig: &ty::FnSig) {
|
||||
s.push_char(bra);
|
||||
let strs = sig.inputs.map(|a| fn_input_to_str(cx, *a));
|
||||
let strs: Vec<~str> = sig.inputs.iter().map(|a| fn_input_to_str(cx, *a)).collect();
|
||||
s.push_str(strs.connect(", "));
|
||||
if sig.variadic {
|
||||
s.push_str(", ...");
|
||||
@ -447,7 +447,7 @@ fn push_sig_to_str(cx: &ctxt,
|
||||
}
|
||||
ty_unboxed_vec(ref tm) => { format!("unboxed_vec<{}>", mt_to_str(cx, tm)) }
|
||||
ty_tup(ref elems) => {
|
||||
let strs = elems.map(|elem| ty_to_str(cx, *elem));
|
||||
let strs: Vec<~str> = elems.iter().map(|elem| ty_to_str(cx, *elem)).collect();
|
||||
~"(" + strs.connect(",") + ")"
|
||||
}
|
||||
ty_closure(ref f) => {
|
||||
|
@ -314,14 +314,14 @@ pub fn unindent(s: &str) -> ~str {
|
||||
|
||||
if lines.len() >= 1 {
|
||||
let mut unindented = vec!( lines.get(0).trim() );
|
||||
unindented.push_all(lines.tail().map(|&line| {
|
||||
unindented.push_all(lines.tail().iter().map(|&line| {
|
||||
if line.is_whitespace() {
|
||||
line
|
||||
} else {
|
||||
assert!(line.len() >= min_indent);
|
||||
line.slice_from(min_indent)
|
||||
}
|
||||
}));
|
||||
}).collect::<Vec<_>>().as_slice());
|
||||
unindented.connect("\n")
|
||||
} else {
|
||||
s.to_owned()
|
||||
|
@ -1706,7 +1706,7 @@ fn to_json(&self) -> Json {
|
||||
}
|
||||
|
||||
impl<A:ToJson> ToJson for ~[A] {
|
||||
fn to_json(&self) -> Json { List(self.map(|elt| elt.to_json())) }
|
||||
fn to_json(&self) -> Json { List(self.iter().map(|elt| elt.to_json()).collect()) }
|
||||
}
|
||||
|
||||
impl<A:ToJson> ToJson for TreeMap<~str, A> {
|
||||
|
@ -285,12 +285,12 @@ fn as_str_ascii<'a>(&'a self) -> &'a str {
|
||||
|
||||
#[inline]
|
||||
fn to_lower(&self) -> ~[Ascii] {
|
||||
self.map(|a| a.to_lower())
|
||||
self.iter().map(|a| a.to_lower()).collect()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn to_upper(&self) -> ~[Ascii] {
|
||||
self.map(|a| a.to_upper())
|
||||
self.iter().map(|a| a.to_upper()).collect()
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -19,11 +19,12 @@
|
||||
|
||||
#![allow(missing_doc)]
|
||||
|
||||
use iter::Iterator;
|
||||
use io::IoResult;
|
||||
use io::net::ip::{SocketAddr, IpAddr};
|
||||
use option::{Option, Some, None};
|
||||
use rt::rtio::{IoFactory, LocalIo};
|
||||
use slice::ImmutableVector;
|
||||
use slice::OwnedVector;
|
||||
|
||||
/// Hints to the types of sockets that are desired when looking up hosts
|
||||
pub enum SocketType {
|
||||
@ -73,7 +74,7 @@ pub struct Info {
|
||||
/// Easy name resolution. Given a hostname, returns the list of IP addresses for
|
||||
/// that hostname.
|
||||
pub fn get_host_addresses(host: &str) -> IoResult<~[IpAddr]> {
|
||||
lookup(Some(host), None, None).map(|a| a.map(|i| i.address.ip))
|
||||
lookup(Some(host), None, None).map(|a| a.move_iter().map(|i| i.address.ip).collect())
|
||||
}
|
||||
|
||||
/// Full-fleged resolution. This function will perform a synchronous call to
|
||||
|
@ -348,16 +348,6 @@ pub fn append_one<T>(lhs: ~[T], x: T) -> ~[T] {
|
||||
|
||||
// Functional utilities
|
||||
|
||||
/**
|
||||
* Apply a function to each element of a vector and return a concatenation
|
||||
* of each result vector
|
||||
*/
|
||||
pub fn flat_map<T, U>(v: &[T], f: |t: &T| -> ~[U]) -> ~[U] {
|
||||
let mut result = ~[];
|
||||
for elem in v.iter() { result.push_all_move(f(elem)); }
|
||||
result
|
||||
}
|
||||
|
||||
#[allow(missing_doc)]
|
||||
pub trait VectorVector<T> {
|
||||
// FIXME #5898: calling these .concat and .connect conflicts with
|
||||
@ -902,11 +892,7 @@ pub trait ImmutableVector<'a, T> {
|
||||
fn initn(&self, n: uint) -> &'a [T];
|
||||
/// Returns the last element of a vector, or `None` if it is empty.
|
||||
fn last(&self) -> Option<&'a T>;
|
||||
/**
|
||||
* Apply a function to each element of a vector and return a concatenation
|
||||
* of each result vector
|
||||
*/
|
||||
fn flat_map<U>(&self, f: |t: &T| -> ~[U]) -> ~[U];
|
||||
|
||||
/// Returns a pointer to the element at the given index, without doing
|
||||
/// bounds checking.
|
||||
unsafe fn unsafe_ref(self, index: uint) -> &'a T;
|
||||
@ -935,11 +921,6 @@ pub trait ImmutableVector<'a, T> {
|
||||
*/
|
||||
fn bsearch(&self, f: |&T| -> Ordering) -> Option<uint>;
|
||||
|
||||
/// Deprecated, use iterators where possible
|
||||
/// (`self.iter().map(f)`). Apply a function to each element
|
||||
/// of a vector and return the results.
|
||||
fn map<U>(&self, |t: &T| -> U) -> ~[U];
|
||||
|
||||
/**
|
||||
* Returns a mutable reference to the first element in this slice
|
||||
* and adjusts the slice in place so that it no longer contains
|
||||
@ -1094,11 +1075,6 @@ fn last(&self) -> Option<&'a T> {
|
||||
if self.len() == 0 { None } else { Some(&self[self.len() - 1]) }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn flat_map<U>(&self, f: |t: &T| -> ~[U]) -> ~[U] {
|
||||
flat_map(*self, f)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn unsafe_ref(self, index: uint) -> &'a T {
|
||||
transmute(self.repr().data.offset(index as int))
|
||||
@ -1129,10 +1105,6 @@ fn bsearch(&self, f: |&T| -> Ordering) -> Option<uint> {
|
||||
return None;
|
||||
}
|
||||
|
||||
fn map<U>(&self, f: |t: &T| -> U) -> ~[U] {
|
||||
self.iter().map(f).collect()
|
||||
}
|
||||
|
||||
fn shift_ref(&mut self) -> Option<&'a T> {
|
||||
if self.len() == 0 { return None; }
|
||||
unsafe {
|
||||
@ -3329,27 +3301,6 @@ fn test_dedup_shared() {
|
||||
*/
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_map() {
|
||||
// Test on-stack map.
|
||||
let v = &[1u, 2u, 3u];
|
||||
let mut w = v.map(square_ref);
|
||||
assert_eq!(w.len(), 3u);
|
||||
assert_eq!(w[0], 1u);
|
||||
assert_eq!(w[1], 4u);
|
||||
assert_eq!(w[2], 9u);
|
||||
|
||||
// Test on-heap map.
|
||||
let v = ~[1u, 2u, 3u, 4u, 5u];
|
||||
w = v.map(square_ref);
|
||||
assert_eq!(w.len(), 5u);
|
||||
assert_eq!(w[0], 1u);
|
||||
assert_eq!(w[1], 4u);
|
||||
assert_eq!(w[2], 9u);
|
||||
assert_eq!(w[3], 16u);
|
||||
assert_eq!(w[4], 25u);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_retain() {
|
||||
let mut v = ~[1, 2, 3, 4, 5];
|
||||
@ -3730,36 +3681,6 @@ fn test_grow_fn_fail() {
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_fail]
|
||||
fn test_map_fail() {
|
||||
use rc::Rc;
|
||||
let v = [(~0, Rc::new(0)), (~0, Rc::new(0)), (~0, Rc::new(0)), (~0, Rc::new(0))];
|
||||
let mut i = 0;
|
||||
v.map(|_elt| {
|
||||
if i == 2 {
|
||||
fail!()
|
||||
}
|
||||
i += 1;
|
||||
~[(~0, Rc::new(0))]
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_fail]
|
||||
fn test_flat_map_fail() {
|
||||
use rc::Rc;
|
||||
let v = [(~0, Rc::new(0)), (~0, Rc::new(0)), (~0, Rc::new(0)), (~0, Rc::new(0))];
|
||||
let mut i = 0;
|
||||
flat_map(v, |_elt| {
|
||||
if i == 2 {
|
||||
fail!()
|
||||
}
|
||||
i += 1;
|
||||
~[(~0, Rc::new(0))]
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_fail]
|
||||
fn test_permute_fail() {
|
||||
|
@ -902,13 +902,6 @@ pub fn remove(&mut self, index: uint) -> Option<T> {
|
||||
}
|
||||
}
|
||||
|
||||
///Apply a function to each element of a vector and return the results.
|
||||
#[inline]
|
||||
#[deprecated="Use `xs.iter().map(closure)` instead."]
|
||||
pub fn map<U>(&self, f: |t: &T| -> U) -> Vec<U> {
|
||||
self.iter().map(f).collect()
|
||||
}
|
||||
|
||||
/// Takes ownership of the vector `other`, moving all elements into
|
||||
/// the current vector. This does not copy any elements, and it is
|
||||
/// illegal to use the `other` vector after calling this method
|
||||
|
@ -25,9 +25,9 @@
|
||||
|
||||
pub fn path_name_i(idents: &[Ident]) -> ~str {
|
||||
// FIXME: Bad copies (#2543 -- same for everything else that says "bad")
|
||||
idents.map(|i| {
|
||||
idents.iter().map(|i| {
|
||||
token::get_ident(*i).get().to_str()
|
||||
}).connect("::")
|
||||
}).collect::<Vec<~str>>().connect("::")
|
||||
}
|
||||
|
||||
// totally scary function: ignores all but the last element, should have
|
||||
@ -717,13 +717,15 @@ fn ident_to_segment(id : &Ident) -> PathSegment {
|
||||
}
|
||||
|
||||
#[test] fn idents_name_eq_test() {
|
||||
assert!(segments_name_eq([Ident{name:3,ctxt:4},
|
||||
Ident{name:78,ctxt:82}].map(ident_to_segment),
|
||||
[Ident{name:3,ctxt:104},
|
||||
Ident{name:78,ctxt:182}].map(ident_to_segment)));
|
||||
assert!(!segments_name_eq([Ident{name:3,ctxt:4},
|
||||
Ident{name:78,ctxt:82}].map(ident_to_segment),
|
||||
[Ident{name:3,ctxt:104},
|
||||
Ident{name:77,ctxt:182}].map(ident_to_segment)));
|
||||
assert!(segments_name_eq(
|
||||
[Ident{name:3,ctxt:4}, Ident{name:78,ctxt:82}]
|
||||
.iter().map(ident_to_segment).collect::<Vec<PathSegment>>().as_slice(),
|
||||
[Ident{name:3,ctxt:104}, Ident{name:78,ctxt:182}]
|
||||
.iter().map(ident_to_segment).collect::<Vec<PathSegment>>().as_slice()));
|
||||
assert!(!segments_name_eq(
|
||||
[Ident{name:3,ctxt:4}, Ident{name:78,ctxt:82}]
|
||||
.iter().map(ident_to_segment).collect::<Vec<PathSegment>>().as_slice(),
|
||||
[Ident{name:3,ctxt:104}, Ident{name:77,ctxt:182}]
|
||||
.iter().map(ident_to_segment).collect::<Vec<PathSegment>>().as_slice()));
|
||||
}
|
||||
}
|
||||
|
@ -746,7 +746,7 @@ fn lambda_fn_decl(&self, span: Span,
|
||||
}
|
||||
fn lambda(&self, span: Span, ids: Vec<ast::Ident> , blk: P<ast::Block>) -> @ast::Expr {
|
||||
let fn_decl = self.fn_decl(
|
||||
ids.map(|id| self.arg(span, *id, self.ty_infer(span))),
|
||||
ids.iter().map(|id| self.arg(span, *id, self.ty_infer(span))).collect(),
|
||||
self.ty_infer(span));
|
||||
|
||||
self.expr(span, ast::ExprFnBlock(fn_decl, blk))
|
||||
@ -966,16 +966,14 @@ fn view_use_simple_(&self, sp: Span, vis: ast::Visibility,
|
||||
|
||||
fn view_use_list(&self, sp: Span, vis: ast::Visibility,
|
||||
path: Vec<ast::Ident> , imports: &[ast::Ident]) -> ast::ViewItem {
|
||||
let imports = imports.map(|id| {
|
||||
let imports = imports.iter().map(|id| {
|
||||
respan(sp, ast::PathListIdent_ { name: *id, id: ast::DUMMY_NODE_ID })
|
||||
});
|
||||
}).collect();
|
||||
|
||||
self.view_use(sp, vis,
|
||||
vec!(@respan(sp,
|
||||
ast::ViewPathList(self.path(sp, path),
|
||||
imports.iter()
|
||||
.map(|x| *x)
|
||||
.collect(),
|
||||
imports,
|
||||
ast::DUMMY_NODE_ID))))
|
||||
}
|
||||
|
||||
|
@ -71,11 +71,11 @@ fn cs_clone(
|
||||
|
||||
if all_fields.len() >= 1 && all_fields.get(0).name.is_none() {
|
||||
// enum-like
|
||||
let subcalls = all_fields.map(subcall);
|
||||
let subcalls = all_fields.iter().map(subcall).collect();
|
||||
cx.expr_call_ident(trait_span, ctor_ident, subcalls)
|
||||
} else {
|
||||
// struct-like
|
||||
let fields = all_fields.map(|field| {
|
||||
let fields = all_fields.iter().map(|field| {
|
||||
let ident = match field.name {
|
||||
Some(i) => i,
|
||||
None => cx.span_bug(trait_span,
|
||||
@ -83,7 +83,7 @@ fn cs_clone(
|
||||
name))
|
||||
};
|
||||
cx.field_imm(field.span, ident, subcall(field))
|
||||
});
|
||||
}).collect::<Vec<_>>();
|
||||
|
||||
if fields.is_empty() {
|
||||
// no fields, so construct like `None`
|
||||
|
@ -56,14 +56,14 @@ fn default_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructur
|
||||
if fields.is_empty() {
|
||||
cx.expr_ident(trait_span, substr.type_ident)
|
||||
} else {
|
||||
let exprs = fields.map(|sp| default_call(*sp));
|
||||
let exprs = fields.iter().map(|sp| default_call(*sp)).collect();
|
||||
cx.expr_call_ident(trait_span, substr.type_ident, exprs)
|
||||
}
|
||||
}
|
||||
Named(ref fields) => {
|
||||
let default_fields = fields.map(|&(ident, span)| {
|
||||
let default_fields = fields.iter().map(|&(ident, span)| {
|
||||
cx.field_imm(span, ident, default_call(span))
|
||||
});
|
||||
}).collect();
|
||||
cx.expr_struct_ident(trait_span, substr.type_ident, default_fields)
|
||||
}
|
||||
}
|
||||
|
@ -371,12 +371,12 @@ fn create_derived_impl(&self,
|
||||
ty_params.extend(generics.ty_params.iter().map(|ty_param| {
|
||||
// I don't think this can be moved out of the loop, since
|
||||
// a TyParamBound requires an ast id
|
||||
let mut bounds =
|
||||
let mut bounds: Vec<_> =
|
||||
// extra restrictions on the generics parameters to the type being derived upon
|
||||
self.additional_bounds.map(|p| {
|
||||
self.additional_bounds.iter().map(|p| {
|
||||
cx.typarambound(p.to_path(cx, self.span,
|
||||
type_ident, generics))
|
||||
});
|
||||
}).collect();
|
||||
// require the current trait
|
||||
bounds.push(cx.typarambound(trait_path.clone()));
|
||||
|
||||
@ -413,7 +413,7 @@ fn create_derived_impl(&self,
|
||||
ident,
|
||||
vec::append(vec!(attr), self.attributes.as_slice()),
|
||||
ast::ItemImpl(trait_generics, opt_trait_ref,
|
||||
self_type, methods.map(|x| *x)))
|
||||
self_type, methods))
|
||||
}
|
||||
|
||||
fn expand_struct_def(&self,
|
||||
@ -421,7 +421,7 @@ fn expand_struct_def(&self,
|
||||
struct_def: &StructDef,
|
||||
type_ident: Ident,
|
||||
generics: &Generics) -> @ast::Item {
|
||||
let methods = self.methods.map(|method_def| {
|
||||
let methods = self.methods.iter().map(|method_def| {
|
||||
let (explicit_self, self_args, nonself_args, tys) =
|
||||
method_def.split_self_nonself_args(
|
||||
cx, self, type_ident, generics);
|
||||
@ -447,7 +447,7 @@ fn expand_struct_def(&self,
|
||||
type_ident, generics,
|
||||
explicit_self, tys,
|
||||
body)
|
||||
});
|
||||
}).collect();
|
||||
|
||||
self.create_derived_impl(cx, type_ident, generics, methods)
|
||||
}
|
||||
@ -457,7 +457,7 @@ fn expand_enum_def(&self,
|
||||
enum_def: &EnumDef,
|
||||
type_ident: Ident,
|
||||
generics: &Generics) -> @ast::Item {
|
||||
let methods = self.methods.map(|method_def| {
|
||||
let methods = self.methods.iter().map(|method_def| {
|
||||
let (explicit_self, self_args, nonself_args, tys) =
|
||||
method_def.split_self_nonself_args(cx, self,
|
||||
type_ident, generics);
|
||||
@ -483,7 +483,7 @@ fn expand_enum_def(&self,
|
||||
type_ident, generics,
|
||||
explicit_self, tys,
|
||||
body)
|
||||
});
|
||||
}).collect();
|
||||
|
||||
self.create_derived_impl(cx, type_ident, generics, methods)
|
||||
}
|
||||
@ -955,18 +955,18 @@ fn expand_static_enum_method_body(&self,
|
||||
self_args: &[@Expr],
|
||||
nonself_args: &[@Expr])
|
||||
-> @Expr {
|
||||
let summary = enum_def.variants.map(|v| {
|
||||
let summary = enum_def.variants.iter().map(|v| {
|
||||
let ident = v.node.name;
|
||||
let summary = match v.node.kind {
|
||||
ast::TupleVariantKind(ref args) => {
|
||||
Unnamed(args.map(|va| trait_.set_expn_info(cx, va.ty.span)))
|
||||
Unnamed(args.iter().map(|va| trait_.set_expn_info(cx, va.ty.span)).collect())
|
||||
}
|
||||
ast::StructVariantKind(struct_def) => {
|
||||
trait_.summarise_struct(cx, struct_def)
|
||||
}
|
||||
};
|
||||
(ident, v.span, summary)
|
||||
});
|
||||
}).collect();
|
||||
self.call_substructure_method(cx, trait_, type_ident,
|
||||
self_args, nonself_args,
|
||||
&StaticEnum(enum_def, summary))
|
||||
@ -1027,10 +1027,10 @@ fn create_subpatterns(&self,
|
||||
field_paths: Vec<ast::Path> ,
|
||||
mutbl: ast::Mutability)
|
||||
-> Vec<@ast::Pat> {
|
||||
field_paths.map(|path| {
|
||||
field_paths.iter().map(|path| {
|
||||
cx.pat(path.span,
|
||||
ast::PatIdent(ast::BindByRef(mutbl), (*path).clone(), None))
|
||||
})
|
||||
}).collect()
|
||||
}
|
||||
|
||||
fn create_struct_pattern(&self,
|
||||
@ -1200,12 +1200,14 @@ pub fn cs_same_method(f: |&mut ExtCtxt, Span, Vec<@Expr> | -> @Expr,
|
||||
match *substructure.fields {
|
||||
EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => {
|
||||
// call self_n.method(other_1_n, other_2_n, ...)
|
||||
let called = all_fields.map(|field| {
|
||||
let called = all_fields.iter().map(|field| {
|
||||
cx.expr_method_call(field.span,
|
||||
field.self_,
|
||||
substructure.method_ident,
|
||||
field.other.map(|e| cx.expr_addr_of(field.span, *e)))
|
||||
});
|
||||
field.other.iter()
|
||||
.map(|e| cx.expr_addr_of(field.span, *e))
|
||||
.collect())
|
||||
}).collect();
|
||||
|
||||
f(cx, trait_span, called)
|
||||
},
|
||||
|
@ -136,15 +136,15 @@ fn rand_thing(cx: &mut ExtCtxt,
|
||||
if fields.is_empty() {
|
||||
cx.expr_ident(trait_span, ctor_ident)
|
||||
} else {
|
||||
let exprs = fields.map(|span| rand_call(cx, *span));
|
||||
let exprs = fields.iter().map(|span| rand_call(cx, *span)).collect();
|
||||
cx.expr_call_ident(trait_span, ctor_ident, exprs)
|
||||
}
|
||||
}
|
||||
Named(ref fields) => {
|
||||
let rand_fields = fields.map(|&(ident, span)| {
|
||||
let rand_fields = fields.iter().map(|&(ident, span)| {
|
||||
let e = rand_call(cx, span);
|
||||
cx.field_imm(span, ident, e)
|
||||
});
|
||||
}).collect();
|
||||
cx.expr_struct_ident(trait_span, ctor_ident, rand_fields)
|
||||
}
|
||||
}
|
||||
|
@ -69,9 +69,9 @@ pub fn to_path(&self,
|
||||
self_ty: Ident,
|
||||
self_generics: &Generics)
|
||||
-> ast::Path {
|
||||
let idents = self.path.map(|s| cx.ident_of(*s) );
|
||||
let idents = self.path.iter().map(|s| cx.ident_of(*s)).collect();
|
||||
let lt = mk_lifetimes(cx, span, &self.lifetime);
|
||||
let tys = self.params.map(|t| t.to_ty(cx, span, self_ty, self_generics));
|
||||
let tys = self.params.iter().map(|t| t.to_ty(cx, span, self_ty, self_generics)).collect();
|
||||
|
||||
cx.path_all(span, self.global, idents, lt, tys)
|
||||
}
|
||||
@ -150,7 +150,9 @@ pub fn to_ty(&self,
|
||||
let ty = if fields.is_empty() {
|
||||
ast::TyNil
|
||||
} else {
|
||||
ast::TyTup(fields.map(|f| f.to_ty(cx, span, self_ty, self_generics)))
|
||||
ast::TyTup(fields.iter()
|
||||
.map(|f| f.to_ty(cx, span, self_ty, self_generics))
|
||||
.collect())
|
||||
};
|
||||
|
||||
cx.ty(span, ty)
|
||||
@ -219,10 +221,10 @@ pub fn to_generics(&self,
|
||||
self_ty: Ident,
|
||||
self_generics: &Generics)
|
||||
-> Generics {
|
||||
let lifetimes = self.lifetimes.map(|lt| {
|
||||
let lifetimes = self.lifetimes.iter().map(|lt| {
|
||||
cx.lifetime(span, cx.ident_of(*lt).name)
|
||||
});
|
||||
let ty_params = self.bounds.map(|t| {
|
||||
}).collect();
|
||||
let ty_params = self.bounds.iter().map(|t| {
|
||||
match t {
|
||||
&(ref name, ref bounds) => {
|
||||
mk_ty_param(cx,
|
||||
@ -233,7 +235,7 @@ pub fn to_generics(&self,
|
||||
self_generics)
|
||||
}
|
||||
}
|
||||
});
|
||||
}).collect();
|
||||
mk_generics(lifetimes, ty_params)
|
||||
}
|
||||
}
|
||||
|
@ -73,14 +73,14 @@ fn zero_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure)
|
||||
if fields.is_empty() {
|
||||
cx.expr_ident(trait_span, substr.type_ident)
|
||||
} else {
|
||||
let exprs = fields.map(|sp| zero_call(*sp));
|
||||
let exprs = fields.iter().map(|sp| zero_call(*sp)).collect();
|
||||
cx.expr_call_ident(trait_span, substr.type_ident, exprs)
|
||||
}
|
||||
}
|
||||
Named(ref fields) => {
|
||||
let zero_fields = fields.map(|&(ident, span)| {
|
||||
let zero_fields = fields.iter().map(|&(ident, span)| {
|
||||
cx.field_imm(span, ident, zero_call(span))
|
||||
});
|
||||
}).collect();
|
||||
cx.expr_struct_ident(trait_span, substr.type_ident, zero_fields)
|
||||
}
|
||||
}
|
||||
|
@ -776,7 +776,7 @@ pub fn expand_block(blk: &Block, fld: &mut MacroExpander) -> P<Block> {
|
||||
|
||||
// expand the elements of a block.
|
||||
pub fn expand_block_elts(b: &Block, fld: &mut MacroExpander) -> P<Block> {
|
||||
let new_view_items = b.view_items.map(|x| fld.fold_view_item(x));
|
||||
let new_view_items = b.view_items.iter().map(|x| fld.fold_view_item(x)).collect();
|
||||
let new_stmts =
|
||||
b.stmts.iter().flat_map(|x| {
|
||||
let renamed_stmt = {
|
||||
|
@ -83,7 +83,7 @@ fn to_source(&self) -> ~str {
|
||||
|
||||
impl<'a> ToSource for &'a [@ast::Item] {
|
||||
fn to_source(&self) -> ~str {
|
||||
self.map(|i| i.to_source()).connect("\n\n")
|
||||
self.iter().map(|i| i.to_source()).collect::<Vec<~str>>().connect("\n\n")
|
||||
}
|
||||
}
|
||||
|
||||
@ -95,7 +95,7 @@ fn to_source(&self) -> ~str {
|
||||
|
||||
impl<'a> ToSource for &'a [ast::Ty] {
|
||||
fn to_source(&self) -> ~str {
|
||||
self.map(|i| i.to_source()).connect(", ")
|
||||
self.iter().map(|i| i.to_source()).collect::<Vec<~str>>().connect(", ")
|
||||
}
|
||||
}
|
||||
|
||||
@ -339,7 +339,7 @@ pub fn expand_quote_stmt(cx: &mut ExtCtxt,
|
||||
}
|
||||
|
||||
fn ids_ext(strs: Vec<~str> ) -> Vec<ast::Ident> {
|
||||
strs.map(|str| str_to_ident(*str))
|
||||
strs.iter().map(|str| str_to_ident(*str)).collect()
|
||||
}
|
||||
|
||||
fn id_ext(str: &str) -> ast::Ident {
|
||||
|
@ -71,7 +71,9 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
|
||||
-> base::MacResult {
|
||||
base::check_zero_tts(cx, sp, tts, "module_path!");
|
||||
let string = cx.mod_path()
|
||||
.iter()
|
||||
.map(|x| token::get_ident(*x).get().to_str())
|
||||
.collect::<Vec<~str>>()
|
||||
.connect("::");
|
||||
base::MRExpr(cx.expr_str(sp, token::intern_and_get_ident(string)))
|
||||
}
|
||||
|
@ -373,7 +373,7 @@ pub fn parse(sess: &ParseSess,
|
||||
} else {
|
||||
if (bb_eis.len() > 0u && next_eis.len() > 0u)
|
||||
|| bb_eis.len() > 1u {
|
||||
let nts = bb_eis.map(|ei| {
|
||||
let nts = bb_eis.iter().map(|ei| {
|
||||
match ei.elts.get(ei.idx).node {
|
||||
MatchNonterminal(bind, name, _) => {
|
||||
format!("{} ('{}')",
|
||||
@ -381,7 +381,7 @@ pub fn parse(sess: &ParseSess,
|
||||
token::get_ident(bind))
|
||||
}
|
||||
_ => fail!()
|
||||
} }).connect(" or ");
|
||||
} }).collect::<Vec<~str>>().connect(" or ");
|
||||
return Error(sp, format!(
|
||||
"local ambiguity: multiple parsing options: \
|
||||
built-in NTs {} or {} other options.",
|
||||
|
@ -41,7 +41,7 @@ fn fold_view_paths(&mut self, view_paths: &[@ViewPath]) -> Vec<@ViewPath> {
|
||||
}
|
||||
ViewPathList(ref path, ref path_list_idents, node_id) => {
|
||||
ViewPathList(self.fold_path(path),
|
||||
path_list_idents.map(|path_list_ident| {
|
||||
path_list_idents.iter().map(|path_list_ident| {
|
||||
let id = self.new_id(path_list_ident.node
|
||||
.id);
|
||||
Spanned {
|
||||
@ -54,7 +54,7 @@ fn fold_view_paths(&mut self, view_paths: &[@ViewPath]) -> Vec<@ViewPath> {
|
||||
span: self.new_span(
|
||||
path_list_ident.span)
|
||||
}
|
||||
}),
|
||||
}).collect(),
|
||||
self.new_id(node_id))
|
||||
}
|
||||
};
|
||||
@ -83,7 +83,7 @@ fn fold_struct_field(&mut self, sf: &StructField) -> StructField {
|
||||
kind: sf.node.kind,
|
||||
id: self.new_id(sf.node.id),
|
||||
ty: self.fold_ty(sf.node.ty),
|
||||
attrs: sf.node.attrs.map(|e| fold_attribute_(*e, self))
|
||||
attrs: sf.node.attrs.iter().map(|e| fold_attribute_(*e, self)).collect()
|
||||
},
|
||||
span: self.new_span(sf.span)
|
||||
}
|
||||
@ -115,7 +115,7 @@ fn fold_stmt(&mut self, s: &Stmt) -> SmallVector<@Stmt> {
|
||||
|
||||
fn fold_arm(&mut self, a: &Arm) -> Arm {
|
||||
Arm {
|
||||
pats: a.pats.map(|x| self.fold_pat(*x)),
|
||||
pats: a.pats.iter().map(|x| self.fold_pat(*x)).collect(),
|
||||
guard: a.guard.map(|x| self.fold_expr(x)),
|
||||
body: self.fold_expr(a.body),
|
||||
}
|
||||
@ -163,18 +163,18 @@ fn fold_ty(&mut self, t: P<Ty>) -> P<Ty> {
|
||||
onceness: f.onceness,
|
||||
bounds: fold_opt_bounds(&f.bounds, self),
|
||||
decl: self.fold_fn_decl(f.decl),
|
||||
lifetimes: f.lifetimes.map(|l| fold_lifetime(l, self)),
|
||||
lifetimes: f.lifetimes.iter().map(|l| fold_lifetime(l, self)).collect(),
|
||||
})
|
||||
}
|
||||
TyBareFn(ref f) => {
|
||||
TyBareFn(@BareFnTy {
|
||||
lifetimes: f.lifetimes.map(|l| fold_lifetime(l, self)),
|
||||
lifetimes: f.lifetimes.iter().map(|l| fold_lifetime(l, self)).collect(),
|
||||
purity: f.purity,
|
||||
abis: f.abis,
|
||||
decl: self.fold_fn_decl(f.decl)
|
||||
})
|
||||
}
|
||||
TyTup(ref tys) => TyTup(tys.map(|&ty| self.fold_ty(ty))),
|
||||
TyTup(ref tys) => TyTup(tys.iter().map(|&ty| self.fold_ty(ty)).collect()),
|
||||
TyPath(ref path, ref bounds, id) => {
|
||||
TyPath(self.fold_path(path),
|
||||
fold_opt_bounds(bounds, self),
|
||||
@ -214,8 +214,8 @@ fn fold_variant(&mut self, v: &Variant) -> P<Variant> {
|
||||
let kind;
|
||||
match v.node.kind {
|
||||
TupleVariantKind(ref variant_args) => {
|
||||
kind = TupleVariantKind(variant_args.map(|x|
|
||||
fold_variant_arg_(x, self)))
|
||||
kind = TupleVariantKind(variant_args.iter().map(|x|
|
||||
fold_variant_arg_(x, self)).collect())
|
||||
}
|
||||
StructVariantKind(ref struct_def) => {
|
||||
kind = StructVariantKind(@ast::StructDef {
|
||||
@ -226,7 +226,7 @@ fn fold_variant(&mut self, v: &Variant) -> P<Variant> {
|
||||
}
|
||||
}
|
||||
|
||||
let attrs = v.node.attrs.map(|x| fold_attribute_(*x, self));
|
||||
let attrs = v.node.attrs.iter().map(|x| fold_attribute_(*x, self)).collect();
|
||||
|
||||
let de = match v.node.disr_expr {
|
||||
Some(e) => Some(self.fold_expr(e)),
|
||||
@ -254,11 +254,11 @@ fn fold_path(&mut self, p: &Path) -> Path {
|
||||
ast::Path {
|
||||
span: self.new_span(p.span),
|
||||
global: p.global,
|
||||
segments: p.segments.map(|segment| ast::PathSegment {
|
||||
segments: p.segments.iter().map(|segment| ast::PathSegment {
|
||||
identifier: self.fold_ident(segment.identifier),
|
||||
lifetimes: segment.lifetimes.map(|l| fold_lifetime(l, self)),
|
||||
types: segment.types.map(|&typ| self.fold_ty(typ)),
|
||||
})
|
||||
lifetimes: segment.lifetimes.iter().map(|l| fold_lifetime(l, self)).collect(),
|
||||
types: segment.types.iter().map(|&typ| self.fold_ty(typ)).collect(),
|
||||
}).collect()
|
||||
}
|
||||
}
|
||||
|
||||
@ -323,7 +323,7 @@ fn fold_meta_item_<T: Folder>(mi: @MetaItem, fld: &mut T) -> @MetaItem {
|
||||
match mi.node {
|
||||
MetaWord(ref id) => MetaWord((*id).clone()),
|
||||
MetaList(ref id, ref mis) => {
|
||||
MetaList((*id).clone(), mis.map(|e| fold_meta_item_(*e, fld)))
|
||||
MetaList((*id).clone(), mis.iter().map(|e| fold_meta_item_(*e, fld)).collect())
|
||||
}
|
||||
MetaNameValue(ref id, ref s) => {
|
||||
MetaNameValue((*id).clone(), (*s).clone())
|
||||
@ -402,7 +402,7 @@ fn maybe_fold_ident<T: Folder>(t: &token::Token, fld: &mut T) -> token::Token {
|
||||
|
||||
pub fn noop_fold_fn_decl<T: Folder>(decl: &FnDecl, fld: &mut T) -> P<FnDecl> {
|
||||
P(FnDecl {
|
||||
inputs: decl.inputs.map(|x| fold_arg_(x, fld)), // bad copy
|
||||
inputs: decl.inputs.iter().map(|x| fold_arg_(x, fld)).collect(), // bad copy
|
||||
output: fld.fold_ty(decl.output),
|
||||
cf: decl.cf,
|
||||
variadic: decl.variadic
|
||||
@ -441,7 +441,7 @@ pub fn fold_lifetime<T: Folder>(l: &Lifetime, fld: &mut T) -> Lifetime {
|
||||
|
||||
pub fn fold_lifetimes<T: Folder>(lts: &Vec<Lifetime>, fld: &mut T)
|
||||
-> Vec<Lifetime> {
|
||||
lts.map(|l| fold_lifetime(l, fld))
|
||||
lts.iter().map(|l| fold_lifetime(l, fld)).collect()
|
||||
}
|
||||
|
||||
pub fn fold_opt_lifetime<T: Folder>(o_lt: &Option<Lifetime>, fld: &mut T)
|
||||
@ -456,7 +456,7 @@ pub fn fold_generics<T: Folder>(generics: &Generics, fld: &mut T) -> Generics {
|
||||
|
||||
fn fold_struct_def<T: Folder>(struct_def: @StructDef, fld: &mut T) -> @StructDef {
|
||||
@ast::StructDef {
|
||||
fields: struct_def.fields.map(|f| fold_struct_field(f, fld)),
|
||||
fields: struct_def.fields.iter().map(|f| fold_struct_field(f, fld)).collect(),
|
||||
ctor_id: struct_def.ctor_id.map(|cid| fld.new_id(cid)),
|
||||
}
|
||||
}
|
||||
@ -474,7 +474,7 @@ fn fold_struct_field<T: Folder>(f: &StructField, fld: &mut T) -> StructField {
|
||||
kind: f.node.kind,
|
||||
id: fld.new_id(f.node.id),
|
||||
ty: fld.fold_ty(f.node.ty),
|
||||
attrs: f.node.attrs.map(|a| fold_attribute_(*a, fld)),
|
||||
attrs: f.node.attrs.iter().map(|a| fold_attribute_(*a, fld)).collect(),
|
||||
},
|
||||
span: fld.new_span(f.span),
|
||||
}
|
||||
@ -525,14 +525,14 @@ pub fn noop_fold_view_item<T: Folder>(vi: &ViewItem, folder: &mut T)
|
||||
};
|
||||
ViewItem {
|
||||
node: inner_view_item,
|
||||
attrs: vi.attrs.map(|a| fold_attribute_(*a, folder)),
|
||||
attrs: vi.attrs.iter().map(|a| fold_attribute_(*a, folder)).collect(),
|
||||
vis: vi.vis,
|
||||
span: folder.new_span(vi.span),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn noop_fold_block<T: Folder>(b: P<Block>, folder: &mut T) -> P<Block> {
|
||||
let view_items = b.view_items.map(|x| folder.fold_view_item(x));
|
||||
let view_items = b.view_items.iter().map(|x| folder.fold_view_item(x)).collect();
|
||||
let stmts = b.stmts.iter().flat_map(|s| folder.fold_stmt(*s).move_iter()).collect();
|
||||
P(Block {
|
||||
id: folder.new_id(b.id), // Needs to be first, for ast_map.
|
||||
@ -566,9 +566,9 @@ pub fn noop_fold_item_underscore<T: Folder>(i: &Item_, folder: &mut T) -> Item_
|
||||
ItemEnum(ref enum_definition, ref generics) => {
|
||||
ItemEnum(
|
||||
ast::EnumDef {
|
||||
variants: enum_definition.variants.map(|&x| {
|
||||
variants: enum_definition.variants.iter().map(|&x| {
|
||||
folder.fold_variant(x)
|
||||
}),
|
||||
}).collect(),
|
||||
},
|
||||
fold_generics(generics, folder))
|
||||
}
|
||||
@ -580,18 +580,18 @@ pub fn noop_fold_item_underscore<T: Folder>(i: &Item_, folder: &mut T) -> Item_
|
||||
ItemImpl(fold_generics(generics, folder),
|
||||
ifce.as_ref().map(|p| fold_trait_ref(p, folder)),
|
||||
folder.fold_ty(ty),
|
||||
methods.map(|x| folder.fold_method(*x))
|
||||
methods.iter().map(|x| folder.fold_method(*x)).collect()
|
||||
)
|
||||
}
|
||||
ItemTrait(ref generics, ref traits, ref methods) => {
|
||||
let methods = methods.map(|method| {
|
||||
let methods = methods.iter().map(|method| {
|
||||
match *method {
|
||||
Required(ref m) => Required(folder.fold_type_method(m)),
|
||||
Provided(method) => Provided(folder.fold_method(method))
|
||||
}
|
||||
});
|
||||
}).collect();
|
||||
ItemTrait(fold_generics(generics, folder),
|
||||
traits.map(|p| fold_trait_ref(p, folder)),
|
||||
traits.iter().map(|p| fold_trait_ref(p, folder)).collect(),
|
||||
methods)
|
||||
}
|
||||
ItemMac(ref m) => ItemMac(folder.fold_mac(m)),
|
||||
@ -602,7 +602,7 @@ pub fn noop_fold_type_method<T: Folder>(m: &TypeMethod, fld: &mut T) -> TypeMeth
|
||||
TypeMethod {
|
||||
id: fld.new_id(m.id), // Needs to be first, for ast_map.
|
||||
ident: fld.fold_ident(m.ident),
|
||||
attrs: m.attrs.map(|a| fold_attribute_(*a, fld)),
|
||||
attrs: m.attrs.iter().map(|a| fold_attribute_(*a, fld)).collect(),
|
||||
purity: m.purity,
|
||||
decl: fld.fold_fn_decl(m.decl),
|
||||
generics: fold_generics(&m.generics, fld),
|
||||
@ -623,8 +623,8 @@ pub fn noop_fold_mod<T: Folder>(m: &Mod, folder: &mut T) -> Mod {
|
||||
pub fn noop_fold_crate<T: Folder>(c: Crate, folder: &mut T) -> Crate {
|
||||
Crate {
|
||||
module: folder.fold_mod(&c.module),
|
||||
attrs: c.attrs.map(|x| fold_attribute_(*x, folder)),
|
||||
config: c.config.map(|x| fold_meta_item_(*x, folder)),
|
||||
attrs: c.attrs.iter().map(|x| fold_attribute_(*x, folder)).collect(),
|
||||
config: c.config.iter().map(|x| fold_meta_item_(*x, folder)).collect(),
|
||||
span: folder.new_span(c.span),
|
||||
}
|
||||
}
|
||||
@ -643,7 +643,7 @@ pub fn noop_fold_item<T: Folder>(i: &Item, folder: &mut T) -> SmallVector<@Item>
|
||||
SmallVector::one(@Item {
|
||||
id: id,
|
||||
ident: folder.fold_ident(ident),
|
||||
attrs: i.attrs.map(|e| fold_attribute_(*e, folder)),
|
||||
attrs: i.attrs.iter().map(|e| fold_attribute_(*e, folder)).collect(),
|
||||
node: node,
|
||||
vis: i.vis,
|
||||
span: folder.new_span(i.span)
|
||||
@ -654,11 +654,11 @@ pub fn noop_fold_foreign_item<T: Folder>(ni: &ForeignItem, folder: &mut T) -> @F
|
||||
@ForeignItem {
|
||||
id: folder.new_id(ni.id), // Needs to be first, for ast_map.
|
||||
ident: folder.fold_ident(ni.ident),
|
||||
attrs: ni.attrs.map(|x| fold_attribute_(*x, folder)),
|
||||
attrs: ni.attrs.iter().map(|x| fold_attribute_(*x, folder)).collect(),
|
||||
node: match ni.node {
|
||||
ForeignItemFn(ref fdec, ref generics) => {
|
||||
ForeignItemFn(P(FnDecl {
|
||||
inputs: fdec.inputs.map(|a| fold_arg_(a, folder)),
|
||||
inputs: fdec.inputs.iter().map(|a| fold_arg_(a, folder)).collect(),
|
||||
output: folder.fold_ty(fdec.output),
|
||||
cf: fdec.cf,
|
||||
variadic: fdec.variadic
|
||||
@ -677,7 +677,7 @@ pub fn noop_fold_method<T: Folder>(m: &Method, folder: &mut T) -> @Method {
|
||||
@Method {
|
||||
id: folder.new_id(m.id), // Needs to be first, for ast_map.
|
||||
ident: folder.fold_ident(m.ident),
|
||||
attrs: m.attrs.map(|a| fold_attribute_(*a, folder)),
|
||||
attrs: m.attrs.iter().map(|a| fold_attribute_(*a, folder)).collect(),
|
||||
generics: fold_generics(&m.generics, folder),
|
||||
explicit_self: folder.fold_explicit_self(&m.explicit_self),
|
||||
purity: m.purity,
|
||||
@ -700,28 +700,28 @@ pub fn noop_fold_pat<T: Folder>(p: @Pat, folder: &mut T) -> @Pat {
|
||||
PatLit(e) => PatLit(folder.fold_expr(e)),
|
||||
PatEnum(ref pth, ref pats) => {
|
||||
PatEnum(folder.fold_path(pth),
|
||||
pats.as_ref().map(|pats| pats.map(|x| folder.fold_pat(*x))))
|
||||
pats.as_ref().map(|pats| pats.iter().map(|x| folder.fold_pat(*x)).collect()))
|
||||
}
|
||||
PatStruct(ref pth, ref fields, etc) => {
|
||||
let pth_ = folder.fold_path(pth);
|
||||
let fs = fields.map(|f| {
|
||||
let fs = fields.iter().map(|f| {
|
||||
ast::FieldPat {
|
||||
ident: f.ident,
|
||||
pat: folder.fold_pat(f.pat)
|
||||
}
|
||||
});
|
||||
}).collect();
|
||||
PatStruct(pth_, fs, etc)
|
||||
}
|
||||
PatTup(ref elts) => PatTup(elts.map(|x| folder.fold_pat(*x))),
|
||||
PatTup(ref elts) => PatTup(elts.iter().map(|x| folder.fold_pat(*x)).collect()),
|
||||
PatUniq(inner) => PatUniq(folder.fold_pat(inner)),
|
||||
PatRegion(inner) => PatRegion(folder.fold_pat(inner)),
|
||||
PatRange(e1, e2) => {
|
||||
PatRange(folder.fold_expr(e1), folder.fold_expr(e2))
|
||||
},
|
||||
PatVec(ref before, ref slice, ref after) => {
|
||||
PatVec(before.map(|x| folder.fold_pat(*x)),
|
||||
PatVec(before.iter().map(|x| folder.fold_pat(*x)).collect(),
|
||||
slice.map(|x| folder.fold_pat(x)),
|
||||
after.map(|x| folder.fold_pat(*x)))
|
||||
after.iter().map(|x| folder.fold_pat(*x)).collect())
|
||||
}
|
||||
};
|
||||
|
||||
@ -741,21 +741,21 @@ pub fn noop_fold_expr<T: Folder>(e: @Expr, folder: &mut T) -> @Expr {
|
||||
ExprBox(folder.fold_expr(p), folder.fold_expr(e))
|
||||
}
|
||||
ExprVec(ref exprs, mutt) => {
|
||||
ExprVec(exprs.map(|&x| folder.fold_expr(x)), mutt)
|
||||
ExprVec(exprs.iter().map(|&x| folder.fold_expr(x)).collect(), mutt)
|
||||
}
|
||||
ExprRepeat(expr, count, mutt) => {
|
||||
ExprRepeat(folder.fold_expr(expr), folder.fold_expr(count), mutt)
|
||||
}
|
||||
ExprTup(ref elts) => ExprTup(elts.map(|x| folder.fold_expr(*x))),
|
||||
ExprTup(ref elts) => ExprTup(elts.iter().map(|x| folder.fold_expr(*x)).collect()),
|
||||
ExprCall(f, ref args) => {
|
||||
ExprCall(folder.fold_expr(f),
|
||||
args.map(|&x| folder.fold_expr(x)))
|
||||
args.iter().map(|&x| folder.fold_expr(x)).collect())
|
||||
}
|
||||
ExprMethodCall(i, ref tps, ref args) => {
|
||||
ExprMethodCall(
|
||||
folder.fold_ident(i),
|
||||
tps.map(|&x| folder.fold_ty(x)),
|
||||
args.map(|&x| folder.fold_expr(x)))
|
||||
tps.iter().map(|&x| folder.fold_ty(x)).collect(),
|
||||
args.iter().map(|&x| folder.fold_expr(x)).collect())
|
||||
}
|
||||
ExprBinary(binop, lhs, rhs) => {
|
||||
ExprBinary(binop,
|
||||
@ -790,7 +790,7 @@ pub fn noop_fold_expr<T: Folder>(e: @Expr, folder: &mut T) -> @Expr {
|
||||
}
|
||||
ExprMatch(expr, ref arms) => {
|
||||
ExprMatch(folder.fold_expr(expr),
|
||||
arms.map(|x| folder.fold_arm(x)))
|
||||
arms.iter().map(|x| folder.fold_arm(x)).collect())
|
||||
}
|
||||
ExprFnBlock(decl, body) => {
|
||||
ExprFnBlock(folder.fold_fn_decl(decl), folder.fold_block(body))
|
||||
@ -810,7 +810,7 @@ pub fn noop_fold_expr<T: Folder>(e: @Expr, folder: &mut T) -> @Expr {
|
||||
ExprField(el, id, ref tys) => {
|
||||
ExprField(folder.fold_expr(el),
|
||||
folder.fold_ident(id),
|
||||
tys.map(|&x| folder.fold_ty(x)))
|
||||
tys.iter().map(|&x| folder.fold_ty(x)).collect())
|
||||
}
|
||||
ExprIndex(el, er) => {
|
||||
ExprIndex(folder.fold_expr(el), folder.fold_expr(er))
|
||||
@ -823,19 +823,19 @@ pub fn noop_fold_expr<T: Folder>(e: @Expr, folder: &mut T) -> @Expr {
|
||||
}
|
||||
ExprInlineAsm(ref a) => {
|
||||
ExprInlineAsm(InlineAsm {
|
||||
inputs: a.inputs.map(|&(ref c, input)| {
|
||||
inputs: a.inputs.iter().map(|&(ref c, input)| {
|
||||
((*c).clone(), folder.fold_expr(input))
|
||||
}),
|
||||
outputs: a.outputs.map(|&(ref c, out)| {
|
||||
}).collect(),
|
||||
outputs: a.outputs.iter().map(|&(ref c, out)| {
|
||||
((*c).clone(), folder.fold_expr(out))
|
||||
}),
|
||||
}).collect(),
|
||||
.. (*a).clone()
|
||||
})
|
||||
}
|
||||
ExprMac(ref mac) => ExprMac(folder.fold_mac(mac)),
|
||||
ExprStruct(ref path, ref fields, maybe_expr) => {
|
||||
ExprStruct(folder.fold_path(path),
|
||||
fields.map(|x| fold_field_(*x, folder)),
|
||||
fields.iter().map(|x| fold_field_(*x, folder)).collect(),
|
||||
maybe_expr.map(|x| folder.fold_expr(x)))
|
||||
},
|
||||
ExprParen(ex) => ExprParen(folder.fold_expr(ex))
|
||||
|
@ -104,7 +104,7 @@ fn horizontal_trim(lines: Vec<~str> ) -> Vec<~str> {
|
||||
}
|
||||
|
||||
if can_trim {
|
||||
lines.map(|line| line.slice(i + 1, line.len()).to_owned())
|
||||
lines.iter().map(|line| line.slice(i + 1, line.len()).to_owned()).collect()
|
||||
} else {
|
||||
lines
|
||||
}
|
||||
|
@ -70,7 +70,7 @@ pub fn string_to_pat(source_str: ~str) -> @ast::Pat {
|
||||
|
||||
// convert a vector of strings to a vector of ast::Ident's
|
||||
pub fn strs_to_idents(ids: Vec<&str> ) -> Vec<ast::Ident> {
|
||||
ids.map(|u| token::str_to_ident(*u))
|
||||
ids.iter().map(|u| token::str_to_ident(*u)).collect()
|
||||
}
|
||||
|
||||
// does the given string match the pattern? whitespace in the first string
|
||||
|
@ -10,5 +10,5 @@
|
||||
|
||||
|
||||
pub unsafe fn f(xs: Vec<int> ) {
|
||||
xs.map(|_x| { unsafe fn q() { fail!(); } });
|
||||
xs.iter().map(|_x| { unsafe fn q() { fail!(); } }).collect::<Vec<()>>();
|
||||
}
|
||||
|
@ -99,7 +99,9 @@ fn add_pt(&mut self, x: int, y: int) {
|
||||
impl fmt::Show for AsciiArt {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
// Convert each line into a string.
|
||||
let lines = self.lines.map(|line| str::from_chars(line.as_slice()));
|
||||
let lines = self.lines.iter()
|
||||
.map(|line| str::from_chars(line.as_slice()))
|
||||
.collect::<Vec<~str>>();
|
||||
|
||||
// Concatenate the lines together using a new-line.
|
||||
write!(f.buf, "{}", lines.connect("\n"))
|
||||
|
Loading…
Reference in New Issue
Block a user