auto merge of #8430 : erickt/rust/cleanup-iterators, r=erickt

This PR does a bunch of cleaning up of various APIs. The major one is that it merges `Iterator` and `IteratorUtil`, and renames functions like `transform` into `map`. I also merged `DoubleEndedIterator` and `DoubleEndedIteratorUtil`, as well as I renamed various .consume* functions to .move_iter(). This helps to implement part of #7887.
This commit is contained in:
bors 2013-08-10 13:17:19 -07:00
commit 8b9e1ce75a
109 changed files with 896 additions and 997 deletions

View File

@ -142,7 +142,7 @@ fn parse_check_line(line: &str) -> Option<~str> {
fn parse_exec_env(line: &str) -> Option<(~str, ~str)> {
do parse_name_value_directive(line, ~"exec-env").map |nv| {
// nv is either FOO or FOO=BAR
let mut strs: ~[~str] = nv.splitn_iter('=', 1).transform(|s| s.to_owned()).collect();
let mut strs: ~[~str] = nv.splitn_iter('=', 1).map(|s| s.to_owned()).collect();
match strs.len() {
1u => (strs.pop(), ~""),

View File

@ -350,13 +350,13 @@ fn check_expected_errors(expected_errors: ~[errors::ExpectedError],
fatal(~"process did not return an error status");
}
let prefixes = expected_errors.iter().transform(|ee| {
let prefixes = expected_errors.iter().map(|ee| {
fmt!("%s:%u:", testfile.to_str(), ee.line)
}).collect::<~[~str]>();
fn to_lower( s : &str ) -> ~str {
let i = s.iter();
let c : ~[char] = i.transform( |c| {
let c : ~[char] = i.map( |c| {
if c.is_ascii() {
c.to_ascii().to_lower().to_char()
} else {
@ -760,7 +760,7 @@ fn _arm_exec_compiled_test(config: &config, props: &TestProps,
let cmdline = make_cmdline("", args.prog, args.args);
// get bare program string
let mut tvec: ~[~str] = args.prog.split_iter('/').transform(|ts| ts.to_owned()).collect();
let mut tvec: ~[~str] = args.prog.split_iter('/').map(|ts| ts.to_owned()).collect();
let prog_short = tvec.pop();
// copy to target
@ -938,7 +938,7 @@ fn disassemble_extract(config: &config, _props: &TestProps,
fn count_extracted_lines(p: &Path) -> uint {
let x = io::read_whole_file_str(&p.with_filetype("ll")).unwrap();
x.line_iter().len_()
x.line_iter().len()
}

View File

@ -869,7 +869,7 @@ impl BitvSet {
let min = num::min(self.bitv.storage.len(), other.bitv.storage.len());
self.bitv.storage.slice(0, min).iter().enumerate()
.zip(Repeat::new(&other.bitv.storage))
.transform(|((i, &w), o_store)| (i * uint::bits, w, o_store[i]))
.map(|((i, &w), o_store)| (i * uint::bits, w, o_store[i]))
}
/// Visits each word in self or other that extends beyond the other. This
@ -888,11 +888,11 @@ impl BitvSet {
if olen < slen {
self.bitv.storage.slice_from(olen).iter().enumerate()
.zip(Repeat::new(olen))
.transform(|((i, &w), min)| (true, (i + min) * uint::bits, w))
.map(|((i, &w), min)| (true, (i + min) * uint::bits, w))
} else {
other.bitv.storage.slice_from(slen).iter().enumerate()
.zip(Repeat::new(slen))
.transform(|((i, &w), min)| (false, (i + min) * uint::bits, w))
.map(|((i, &w), min)| (false, (i + min) * uint::bits, w))
}
}
}

View File

@ -63,7 +63,7 @@ pub struct MutDListIterator<'self, T> {
/// DList consuming iterator
#[deriving(Clone)]
pub struct ConsumeIterator<T> {
pub struct MoveIterator<T> {
priv list: DList<T>
}
@ -391,14 +391,14 @@ impl<T> DList<T> {
/// Consume the list into an iterator yielding elements by value
#[inline]
pub fn consume_iter(self) -> ConsumeIterator<T> {
ConsumeIterator{list: self}
pub fn move_iter(self) -> MoveIterator<T> {
MoveIterator{list: self}
}
/// Consume the list into an iterator yielding elements by value, in reverse
#[inline]
pub fn consume_rev_iter(self) -> Invert<ConsumeIterator<T>> {
self.consume_iter().invert()
pub fn move_rev_iter(self) -> Invert<MoveIterator<T>> {
self.move_iter().invert()
}
}
@ -557,7 +557,7 @@ impl<'self, A> ListInsertion<A> for MutDListIterator<'self, A> {
}
}
impl<A> Iterator<A> for ConsumeIterator<A> {
impl<A> Iterator<A> for MoveIterator<A> {
#[inline]
fn next(&mut self) -> Option<A> { self.list.pop_front() }
@ -567,7 +567,7 @@ impl<A> Iterator<A> for ConsumeIterator<A> {
}
}
impl<A> DoubleEndedIterator<A> for ConsumeIterator<A> {
impl<A> DoubleEndedIterator<A> for MoveIterator<A> {
#[inline]
fn next_back(&mut self) -> Option<A> { self.list.pop_back() }
}
@ -600,7 +600,7 @@ impl<A: Eq> Eq for DList<A> {
impl<A: Clone> Clone for DList<A> {
fn clone(&self) -> DList<A> {
self.iter().transform(|x| x.clone()).collect()
self.iter().map(|x| x.clone()).collect()
}
}
@ -690,7 +690,7 @@ mod tests {
#[cfg(test)]
fn list_from<T: Clone>(v: &[T]) -> DList<T> {
v.iter().transform(|x| (*x).clone()).collect()
v.iter().map(|x| (*x).clone()).collect()
}
#[test]
@ -721,7 +721,7 @@ mod tests {
check_links(&m);
let sum = v + u;
assert_eq!(sum.len(), m.len());
for elt in sum.consume_iter() {
for elt in sum.move_iter() {
assert_eq!(m.pop_front(), Some(elt))
}
}
@ -745,7 +745,7 @@ mod tests {
check_links(&m);
let sum = u + v;
assert_eq!(sum.len(), m.len());
for elt in sum.consume_iter() {
for elt in sum.move_iter() {
assert_eq!(m.pop_front(), Some(elt))
}
}
@ -770,7 +770,7 @@ mod tests {
m.rotate_backward(); check_links(&m);
m.push_front(9); check_links(&m);
m.rotate_forward(); check_links(&m);
assert_eq!(~[3,9,5,1,2], m.consume_iter().collect());
assert_eq!(~[3,9,5,1,2], m.move_iter().collect());
}
#[test]
@ -900,7 +900,7 @@ mod tests {
}
check_links(&m);
assert_eq!(m.len(), 3 + len * 2);
assert_eq!(m.consume_iter().collect::<~[int]>(), ~[-2,0,1,2,3,4,5,6,7,8,9,0,1]);
assert_eq!(m.move_iter().collect::<~[int]>(), ~[-2,0,1,2,3,4,5,6,7,8,9,0,1]);
}
#[test]
@ -911,7 +911,7 @@ mod tests {
m.merge(n, |a, b| a <= b);
assert_eq!(m.len(), len);
check_links(&m);
let res = m.consume_iter().collect::<~[int]>();
let res = m.move_iter().collect::<~[int]>();
assert_eq!(res, ~[-1, 0, 0, 0, 1, 3, 5, 6, 7, 2, 7, 7, 9]);
}
@ -927,7 +927,7 @@ mod tests {
m.push_back(4);
m.insert_ordered(3);
check_links(&m);
assert_eq!(~[2,3,4], m.consume_iter().collect::<~[int]>());
assert_eq!(~[2,3,4], m.move_iter().collect::<~[int]>());
}
#[test]
@ -1003,7 +1003,7 @@ mod tests {
check_links(&m);
let mut i = 0u;
for (a, &b) in m.consume_iter().zip(v.iter()) {
for (a, &b) in m.move_iter().zip(v.iter()) {
i += 1;
assert_eq!(a, b);
}
@ -1014,7 +1014,7 @@ mod tests {
fn bench_collect_into(b: &mut test::BenchHarness) {
let v = &[0, ..64];
do b.iter {
let _: DList<int> = v.iter().transform(|x| *x).collect();
let _: DList<int> = v.iter().map(|x| *x).collect();
}
}
@ -1075,33 +1075,33 @@ mod tests {
#[bench]
fn bench_iter(b: &mut test::BenchHarness) {
let v = &[0, ..128];
let m: DList<int> = v.iter().transform(|&x|x).collect();
let m: DList<int> = v.iter().map(|&x|x).collect();
do b.iter {
assert!(m.iter().len_() == 128);
assert!(m.iter().len() == 128);
}
}
#[bench]
fn bench_iter_mut(b: &mut test::BenchHarness) {
let v = &[0, ..128];
let mut m: DList<int> = v.iter().transform(|&x|x).collect();
let mut m: DList<int> = v.iter().map(|&x|x).collect();
do b.iter {
assert!(m.mut_iter().len_() == 128);
assert!(m.mut_iter().len() == 128);
}
}
#[bench]
fn bench_iter_rev(b: &mut test::BenchHarness) {
let v = &[0, ..128];
let m: DList<int> = v.iter().transform(|&x|x).collect();
let m: DList<int> = v.iter().map(|&x|x).collect();
do b.iter {
assert!(m.rev_iter().len_() == 128);
assert!(m.rev_iter().len() == 128);
}
}
#[bench]
fn bench_iter_mut_rev(b: &mut test::BenchHarness) {
let v = &[0, ..128];
let mut m: DList<int> = v.iter().transform(|&x|x).collect();
let mut m: DList<int> = v.iter().map(|&x|x).collect();
do b.iter {
assert!(m.mut_rev_iter().len_() == 128);
assert!(m.mut_rev_iter().len() == 128);
}
}
}

View File

@ -353,7 +353,7 @@ a literal `-`.
*/
// XXX: stupid, unclear name
pub fn pathify(vec: &[~str], stdin_hyphen : bool) -> ~[Option<Path>] {
vec.iter().transform(|str| {
vec.iter().map(|str| {
if stdin_hyphen && "-" == *str {
None
} else {

View File

@ -647,7 +647,7 @@ pub mod groups {
let desc_sep = "\n" + " ".repeat(24);
let mut rows = opts.iter().transform(|optref| {
let mut rows = opts.iter().map(|optref| {
let OptGroup{short_name: short_name,
long_name: long_name,
hint: hint,

View File

@ -948,7 +948,7 @@ impl serialize::Decoder for Decoder {
let name = match self.stack.pop() {
String(s) => s,
List(list) => {
for v in list.consume_rev_iter() {
for v in list.move_rev_iter() {
self.stack.push(v);
}
match self.stack.pop() {
@ -1066,7 +1066,7 @@ impl serialize::Decoder for Decoder {
let len = match self.stack.pop() {
List(list) => {
let len = list.len();
for v in list.consume_rev_iter() {
for v in list.move_rev_iter() {
self.stack.push(v);
}
len
@ -1086,7 +1086,7 @@ impl serialize::Decoder for Decoder {
let len = match self.stack.pop() {
Object(obj) => {
let len = obj.len();
for (key, value) in obj.consume_iter() {
for (key, value) in obj.move_iter() {
self.stack.push(value);
self.stack.push(String(key));
}

View File

@ -287,7 +287,7 @@ impl Mul<BigUint, BigUint> for BigUint {
if n == 1 { return (*a).clone(); }
let mut carry = 0;
let mut prod = do a.data.iter().transform |ai| {
let mut prod = do a.data.iter().map |ai| {
let (hi, lo) = BigDigit::from_uint(
(*ai as uint) * (n as uint) + (carry as uint)
);
@ -625,7 +625,7 @@ impl BigUint {
if n_bits == 0 || self.is_zero() { return (*self).clone(); }
let mut carry = 0;
let mut shifted = do self.data.iter().transform |elem| {
let mut shifted = do self.data.iter().map |elem| {
let (hi, lo) = BigDigit::from_uint(
(*elem as uint) << n_bits | (carry as uint)
);

View File

@ -77,7 +77,7 @@ fn map_slices<A:Clone + Send,B:Clone + Send>(
info!("num_tasks: %?", (num_tasks, futures.len()));
assert_eq!(num_tasks, futures.len());
do futures.consume_iter().transform |ys| {
do futures.move_iter().map |ys| {
let mut ys = ys;
ys.get()
}.collect()
@ -90,7 +90,7 @@ pub fn map<A:Clone + Send,B:Clone + Send>(
vec::concat(map_slices(xs, || {
let f = fn_factory();
let result: ~fn(uint, &[A]) -> ~[B] =
|_, slice| slice.iter().transform(|x| f(x)).collect();
|_, slice| slice.iter().map(|x| f(x)).collect();
result
}))
}
@ -102,7 +102,7 @@ pub fn mapi<A:Clone + Send,B:Clone + Send>(
let slices = map_slices(xs, || {
let f = fn_factory();
let result: ~fn(uint, &[A]) -> ~[B] = |base, slice| {
slice.iter().enumerate().transform(|(i, x)| {
slice.iter().enumerate().map(|(i, x)| {
f(i + base, x)
}).collect()
};

View File

@ -367,7 +367,7 @@ mod tests {
fn test_from_iter() {
let xs = ~[9u, 8, 7, 6, 5, 4, 3, 2, 1];
let mut q: PriorityQueue<uint> = xs.rev_iter().transform(|&x| x).collect();
let mut q: PriorityQueue<uint> = xs.rev_iter().map(|&x| x).collect();
for &x in xs.iter() {
assert_eq!(q.pop(), x);

View File

@ -692,11 +692,11 @@ mod tests {
fn test_from_iterator() {
use std::iterator;
let v = ~[1,2,3,4,5,6,7];
let deq: RingBuf<int> = v.iter().transform(|&x| x).collect();
let u: ~[int] = deq.iter().transform(|&x| x).collect();
let deq: RingBuf<int> = v.iter().map(|&x| x).collect();
let u: ~[int] = deq.iter().map(|&x| x).collect();
assert_eq!(u, v);
let mut seq = iterator::count(0u, 2).take_(256);
let mut seq = iterator::count(0u, 2).take(256);
let deq: RingBuf<uint> = seq.collect();
for (i, &x) in deq.iter().enumerate() {
assert_eq!(2*i, x);

View File

@ -15,7 +15,7 @@
#[allow(missing_doc)];
use std::iterator::{Iterator, IteratorUtil, Enumerate, FilterMap, Invert};
use std::iterator::{Iterator, Enumerate, FilterMap, Invert};
use std::util::replace;
use std::vec::{VecIterator, VecMutIterator};
use std::vec;
@ -152,12 +152,12 @@ impl<V> SmallIntMap<V> {
}
/// Empties the hash map, moving all values into the specified closure
pub fn consume(&mut self)
pub fn move_iter(&mut self)
-> FilterMap<(uint, Option<V>), (uint, V),
Enumerate<vec::ConsumeIterator<Option<V>>>>
Enumerate<vec::MoveIterator<Option<V>>>>
{
let values = replace(&mut self.v, ~[]);
values.consume_iter().enumerate().filter_map(|(i, v)| {
values.move_iter().enumerate().filter_map(|(i, v)| {
v.map_move(|v| (i, v))
})
}
@ -452,11 +452,11 @@ mod test_map {
}
#[test]
fn test_consume() {
fn test_move_iter() {
let mut m = SmallIntMap::new();
m.insert(1, ~2);
let mut called = false;
for (k, v) in m.consume() {
for (k, v) in m.move_iter() {
assert!(!called);
called = true;
assert_eq!(k, 1);

View File

@ -893,7 +893,7 @@ mod tests {
fn ile(x: &(&'static str), y: &(&'static str)) -> bool
{
// FIXME: #4318 Instead of to_ascii and to_str_ascii, could use
// to_ascii_consume and to_str_consume to not do a unnecessary clone.
// to_ascii_move and to_str_move to not do a unnecessary clone.
// (Actually, could just remove the to_str_* call, but needs an deriving(Ord) on
// Ascii)
let x = x.to_ascii().to_lower().to_str_ascii();

View File

@ -12,7 +12,6 @@
use std::{char, vec, util};
use std::num::strconv::{SignNone,SignNeg,SignAll,int_to_str_bytes_common};
use std::iterator::IteratorUtil;
#[deriving(Eq)]
enum States {
@ -106,7 +105,7 @@ pub fn expand(cap: &[u8], params: &[Param], vars: &mut Variables)
*dst = (*src).clone();
}
for c in cap.iter().transform(|&x| x) {
for c in cap.iter().map(|&x| x) {
let cur = c as char;
let mut old_state = state;
match state {

View File

@ -214,7 +214,7 @@ pub fn parse(file: @Reader, longnames: bool) -> Result<~TermInfo, ~str> {
}
let names_str = str::from_bytes(file.read_bytes(names_bytes as uint - 1)); // don't read NUL
let term_names: ~[~str] = names_str.split_iter('|').transform(|s| s.to_owned()).collect();
let term_names: ~[~str] = names_str.split_iter('|').map(|s| s.to_owned()).collect();
file.read_byte(); // consume NUL

View File

@ -525,12 +525,11 @@ impl ConsoleTestState {
}
pub fn fmt_metrics(mm: &MetricMap) -> ~str {
use std::iterator::IteratorUtil;
let v : ~[~str] = mm.iter()
.transform(|(k,v)| fmt!("%s: %f (+/- %f)",
*k,
v.value as float,
v.noise as float))
.map(|(k,v)| fmt!("%s: %f (+/- %f)",
*k,
v.value as float,
v.noise as float))
.collect();
v.connect(", ")
}
@ -698,7 +697,7 @@ fn run_tests(opts: &TestOpts,
// All benchmarks run at the end, in serial.
// (this includes metric fns)
for b in filtered_benchs_and_metrics.consume_iter() {
for b in filtered_benchs_and_metrics.move_iter() {
callback(TeWait(b.desc.clone()));
run_test(!opts.run_benchmarks, b, ch.clone());
let (test, result) = p.recv();
@ -744,7 +743,7 @@ pub fn filter_tests(
}
}
filtered.consume_iter().filter_map(|x| filter_fn(x, filter_str)).collect()
filtered.move_iter().filter_map(|x| filter_fn(x, filter_str)).collect()
};
// Maybe pull out the ignored test and unignore them
@ -762,7 +761,7 @@ pub fn filter_tests(
None
}
};
filtered.consume_iter().filter_map(|x| filter(x)).collect()
filtered.move_iter().filter_map(|x| filter(x)).collect()
};
// Sort the tests alphabetically

View File

@ -213,13 +213,13 @@ impl<K: TotalOrd, V> TreeMap<K, V> {
}
/// Get a lazy iterator that consumes the treemap.
pub fn consume_iter(self) -> TreeMapConsumeIterator<K, V> {
pub fn move_iter(self) -> TreeMapMoveIterator<K, V> {
let TreeMap { root: root, length: length } = self;
let stk = match root {
None => ~[],
Some(~tn) => ~[tn]
};
TreeMapConsumeIterator {
TreeMapMoveIterator {
stack: stk,
remaining: length
}
@ -331,12 +331,12 @@ fn iter_traverse_complete<'a, K, V>(it: &mut TreeMapIterator<'a, K, V>) {
}
/// Lazy forward iterator over a map that consumes the map while iterating
pub struct TreeMapConsumeIterator<K, V> {
pub struct TreeMapMoveIterator<K, V> {
priv stack: ~[TreeNode<K, V>],
priv remaining: uint
}
impl<K, V> Iterator<(K, V)> for TreeMapConsumeIterator<K,V> {
impl<K, V> Iterator<(K, V)> for TreeMapMoveIterator<K,V> {
#[inline]
fn next(&mut self) -> Option<(K, V)> {
while !self.stack.is_empty() {
@ -1259,7 +1259,7 @@ mod test_treemap {
fn test_from_iter() {
let xs = ~[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: TreeMap<int, int> = xs.iter().transform(|&x| x).collect();
let map: TreeMap<int, int> = xs.iter().map(|&x| x).collect();
for &(k, v) in xs.iter() {
assert_eq!(map.find(&k), Some(&v));
@ -1558,7 +1558,7 @@ mod test_set {
fn test_from_iter() {
let xs = ~[1, 2, 3, 4, 5, 6, 7, 8, 9];
let set: TreeSet<int> = xs.iter().transform(|&x| x).collect();
let set: TreeSet<int> = xs.iter().map(|&x| x).collect();
for x in xs.iter() {
assert!(set.contains(x));

View File

@ -128,7 +128,7 @@ fn rustc_help() {
}
fn find_cmd(command_string: &str) -> Option<Command> {
do COMMANDS.iter().find_ |command| {
do COMMANDS.iter().find |command| {
command.cmd == command_string
}.map_move(|x| *x)
}

View File

@ -935,7 +935,7 @@ pub fn link_args(sess: Session,
// Add all the link args for external crates.
do cstore::iter_crate_data(cstore) |crate_num, _| {
let link_args = csearch::get_link_args_for_crate(cstore, crate_num);
for link_arg in link_args.consume_iter() {
for link_arg in link_args.move_iter() {
args.push(link_arg);
}
}

View File

@ -14,7 +14,7 @@ use metadata::cstore;
use metadata::filesearch;
use std::hashmap::HashSet;
use std::{num, os, path, uint, util, vec};
use std::{os, util, vec};
fn not_win32(os: session::os) -> bool {
os != session::os_win32
@ -49,7 +49,7 @@ fn get_sysroot_absolute_rt_lib(sess: session::Session) -> Path {
}
pub fn rpaths_to_flags(rpaths: &[Path]) -> ~[~str] {
rpaths.iter().transform(|rpath| fmt!("-Wl,-rpath,%s",rpath.to_str())).collect()
rpaths.iter().map(|rpath| fmt!("-Wl,-rpath,%s",rpath.to_str())).collect()
}
fn get_rpaths(os: session::os,
@ -100,7 +100,7 @@ fn get_rpaths(os: session::os,
fn get_rpaths_relative_to_output(os: session::os,
output: &Path,
libs: &[Path]) -> ~[Path] {
libs.iter().transform(|a| get_rpath_relative_to_output(os, output, a)).collect()
libs.iter().map(|a| get_rpath_relative_to_output(os, output, a)).collect()
}
pub fn get_rpath_relative_to_output(os: session::os,
@ -123,7 +123,7 @@ pub fn get_rpath_relative_to_output(os: session::os,
}
fn get_absolute_rpaths(libs: &[Path]) -> ~[Path] {
libs.iter().transform(|a| get_absolute_rpath(a)).collect()
libs.iter().map(|a| get_absolute_rpath(a)).collect()
}
pub fn get_absolute_rpath(lib: &Path) -> Path {

View File

@ -120,7 +120,7 @@ pub fn build_configuration(sess: Session, argv0: @str, input: &input) ->
// Convert strings provided as --cfg [cfgspec] into a crate_cfg
fn parse_cfgspecs(cfgspecs: ~[~str],
demitter: diagnostic::Emitter) -> ast::CrateConfig {
do cfgspecs.consume_iter().transform |s| {
do cfgspecs.move_iter().map |s| {
let sess = parse::new_parse_sess(Some(demitter));
parse::parse_meta_from_source_str(@"cfgspec", s.to_managed(), ~[], sess)
}.collect::<ast::CrateConfig>()
@ -631,7 +631,7 @@ pub fn build_session_options(binary: @str,
let level_name = lint::level_to_str(*level);
// FIXME: #4318 Instead of to_ascii and to_str_ascii, could use
// to_ascii_consume and to_str_consume to not do a unnecessary copy.
// to_ascii_move and to_str_move to not do a unnecessary copy.
let level_short = level_name.slice_chars(0, 1);
let level_short = level_short.to_ascii().to_upper().to_str_ascii();
let flags = vec::append(getopts::opt_strs(matches, level_short),
@ -726,7 +726,7 @@ pub fn build_session_options(binary: @str,
let addl_lib_search_paths = getopts::opt_strs(matches, "L").map(|s| Path(*s));
let linker = getopts::opt_maybe_str(matches, "linker");
let linker_args = getopts::opt_strs(matches, "link-args").flat_map( |a| {
a.split_iter(' ').transform(|arg| arg.to_owned()).collect()
a.split_iter(' ').map(|arg| arg.to_owned()).collect()
});
let cfg = parse_cfgspecs(getopts::opt_strs(matches, "cfg"), demitter);
@ -737,7 +737,7 @@ pub fn build_session_options(binary: @str,
let custom_passes = match getopts::opt_maybe_str(matches, "passes") {
None => ~[],
Some(s) => {
s.split_iter(|c: char| c == ' ' || c == ',').transform(|s| {
s.split_iter(|c: char| c == ' ' || c == ',').map(|s| {
s.trim().to_owned()
}).collect()
}
@ -946,7 +946,7 @@ pub fn build_output_filenames(input: &input,
let linkage_metas = attr::find_linkage_metas(attrs);
if !linkage_metas.is_empty() {
// But if a linkage meta is present, that overrides
let maybe_name = linkage_metas.iter().find_(|m| "name" == m.name());
let maybe_name = linkage_metas.iter().find(|m| "name" == m.name());
match maybe_name.chain(|m| m.value_str()) {
Some(s) => stem = s,
_ => ()

View File

@ -102,12 +102,12 @@ fn fold_item_underscore(cx: @Context, item: &ast::item_,
let item = match *item {
ast::item_impl(ref a, ref b, ref c, ref methods) => {
let methods = methods.iter().filter(|m| method_in_cfg(cx, **m))
.transform(|x| *x).collect();
.map(|x| *x).collect();
ast::item_impl((*a).clone(), (*b).clone(), (*c).clone(), methods)
}
ast::item_trait(ref a, ref b, ref methods) => {
let methods = methods.iter().filter(|m| trait_method_in_cfg(cx, *m) )
.transform(|x| (*x).clone()).collect();
.map(|x| (*x).clone()).collect();
ast::item_trait((*a).clone(), (*b).clone(), methods)
}
ref item => (*item).clone(),
@ -180,5 +180,5 @@ fn trait_method_in_cfg(cx: @Context, meth: &ast::trait_method) -> bool {
// Determine if an item should be translated in the current crate
// configuration based on the item's attributes
fn in_cfg(cfg: &[@ast::MetaItem], attrs: &[ast::Attribute]) -> bool {
attr::test_cfg(cfg, attrs.iter().transform(|x| *x))
attr::test_cfg(cfg, attrs.iter().map(|x| *x))
}

View File

@ -126,7 +126,7 @@ fn fold_mod(cx: @mut TestCtxt,
let mod_nomain = ast::_mod {
view_items: m.view_items.clone(),
items: m.items.iter().transform(|i| nomain(cx, *i)).collect(),
items: m.items.iter().map(|i| nomain(cx, *i)).collect(),
};
fold::noop_fold_mod(&mod_nomain, fld)
@ -236,7 +236,7 @@ fn is_ignored(cx: @mut TestCtxt, i: @ast::item) -> bool {
do i.attrs.iter().any |attr| {
// check ignore(cfg(foo, bar))
"ignore" == attr.name() && match attr.meta_item_list() {
Some(ref cfgs) => attr::test_cfg(cx.crate.config, cfgs.iter().transform(|x| *x)),
Some(ref cfgs) => attr::test_cfg(cx.crate.config, cfgs.iter().map(|x| *x)),
None => true
}
}

View File

@ -83,7 +83,7 @@ fn warn_if_multiple_versions(e: @mut Env,
*crate_cache[crate_cache.len() - 1].metas
);
let vec: ~[Either<cache_entry, cache_entry>] = crate_cache.iter().transform(|&entry| {
let vec: ~[Either<cache_entry, cache_entry>] = crate_cache.iter().map(|&entry| {
let othername = loader::crate_name_from_metas(*entry.metas);
if name == othername {
Left(entry)
@ -183,7 +183,7 @@ fn visit_item(e: &Env, i: @ast::item) {
match fm.sort {
ast::named => {
let link_name = i.attrs.iter()
.find_(|at| "link_name" == at.name())
.find(|at| "link_name" == at.name())
.chain(|at| at.value_str());
let foreign_name = match link_name {

View File

@ -11,7 +11,6 @@
use std::option;
use std::os;
use std::{result, str};
use std::hashmap::HashSet;
// A module for searching for libraries

View File

@ -1204,7 +1204,7 @@ fn decode_side_tables(xcx: @ExtendedDecodeContext,
}
c::tag_table_capture_map => {
let cvars =
at_vec::to_managed_consume(
at_vec::to_managed_move(
val_dsr.read_to_vec(
|val_dsr| val_dsr.read_capture_var(xcx)));
dcx.maps.capture_map.insert(id, cvars);

View File

@ -112,23 +112,23 @@ impl CFGBuilder {
ast::pat_enum(_, Some(ref subpats)) |
ast::pat_tup(ref subpats) => {
let pats_exit =
self.pats_all(subpats.iter().transform(|p| *p), pred);
self.pats_all(subpats.iter().map(|p| *p), pred);
self.add_node(pat.id, [pats_exit])
}
ast::pat_struct(_, ref subpats, _) => {
let pats_exit =
self.pats_all(subpats.iter().transform(|f| f.pat), pred);
self.pats_all(subpats.iter().map(|f| f.pat), pred);
self.add_node(pat.id, [pats_exit])
}
ast::pat_vec(ref pre, ref vec, ref post) => {
let pre_exit =
self.pats_all(pre.iter().transform(|p| *p), pred);
self.pats_all(pre.iter().map(|p| *p), pred);
let vec_exit =
self.pats_all(vec.iter().transform(|p| *p), pre_exit);
self.pats_all(vec.iter().map(|p| *p), pre_exit);
let post_exit =
self.pats_all(post.iter().transform(|p| *p), vec_exit);
self.pats_all(post.iter().map(|p| *p), vec_exit);
self.add_node(pat.id, [post_exit])
}
}
@ -376,7 +376,7 @@ impl CFGBuilder {
ast::expr_struct(_, ref fields, base) => {
let base_exit = self.opt_expr(base, pred);
let field_exprs: ~[@ast::expr] =
fields.iter().transform(|f| f.expr).collect();
fields.iter().map(|f| f.expr).collect();
self.straightline(expr, base_exit, field_exprs)
}

View File

@ -169,7 +169,7 @@ pub fn check_exhaustive(cx: &MatchCheckCtxt, sp: span, pats: ~[@pat]) {
};
let variants = ty::enum_variants(cx.tcx, id);
match variants.iter().find_(|v| v.id == vid) {
match variants.iter().find(|v| v.id == vid) {
Some(v) => Some(cx.tcx.sess.str_of(v.name)),
None => {
fail!("check_exhaustive: bad variant in ctor")
@ -222,7 +222,7 @@ pub enum ctor {
pub fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@pat]) -> useful {
if m.len() == 0u { return useful_; }
if m[0].len() == 0u { return not_useful; }
let real_pat = match m.iter().find_(|r| r[0].id != 0) {
let real_pat = match m.iter().find(|r| r[0].id != 0) {
Some(r) => r[0], None => v[0]
};
let left_ty = if real_pat.id == 0 { ty::mk_nil() }
@ -470,7 +470,7 @@ pub fn ctor_arity(cx: &MatchCheckCtxt, ctor: &ctor, ty: ty::t) -> uint {
ty::ty_enum(eid, _) => {
let id = match *ctor { variant(id) => id,
_ => fail!("impossible case") };
match ty::enum_variants(cx.tcx, eid).iter().find_(|v| v.id == id ) {
match ty::enum_variants(cx.tcx, eid).iter().find(|v| v.id == id ) {
Some(v) => v.args.len(),
None => fail!("impossible case")
}
@ -627,7 +627,7 @@ pub fn specialize(cx: &MatchCheckCtxt,
if variant(variant_id) == *ctor_id {
// FIXME #4731: Is this right? --pcw
let args = flds.map(|ty_field| {
match flds.iter().find_(|f|
match flds.iter().find(|f|
f.ident == ty_field.ident) {
Some(f) => f.pat,
_ => wild()
@ -657,8 +657,8 @@ pub fn specialize(cx: &MatchCheckCtxt,
ty_to_str(cx.tcx, left_ty)));
}
}
let args = class_fields.iter().transform(|class_field| {
match flds.iter().find_(|f|
let args = class_fields.iter().map(|class_field| {
match flds.iter().find(|f|
f.ident == class_field.ident) {
Some(f) => f.pat,
_ => wild()

View File

@ -102,7 +102,7 @@ pub fn classify(e: &expr,
ast::expr_tup(ref es) |
ast::expr_vec(ref es, ast::m_imm) => {
join_all(es.iter().transform(|e| classify(*e, tcx)))
join_all(es.iter().map(|e| classify(*e, tcx)))
}
ast::expr_vstore(e, vstore) => {
@ -116,7 +116,7 @@ pub fn classify(e: &expr,
}
ast::expr_struct(_, ref fs, None) => {
let cs = do fs.iter().transform |f| {
let cs = do fs.iter().map |f| {
classify(f.expr, tcx)
};
join_all(cs)

View File

@ -994,7 +994,7 @@ fn lint_session(cx: @mut Context) -> @visit::Visitor<()> {
match cx.tcx.sess.lints.pop(&id) {
None => {},
Some(l) => {
for (lint, span, msg) in l.consume_iter() {
for (lint, span, msg) in l.move_iter() {
cx.span_lint(lint, span, msg)
}
}

View File

@ -5366,7 +5366,7 @@ impl Resolver {
if idents.len() == 0 {
return ~"???";
}
return self.idents_to_str(idents.consume_rev_iter().collect::<~[ast::ident]>());
return self.idents_to_str(idents.move_rev_iter().collect::<~[ast::ident]>());
}
pub fn dump_module(@mut self, module_: @mut Module) {

View File

@ -588,7 +588,7 @@ fn enter_opt<'r>(bcx: @mut Block,
let mut reordered_patterns = ~[];
let r = ty::lookup_struct_fields(tcx, struct_id);
for field in r.iter() {
match field_pats.iter().find_(|p| p.ident == field.ident) {
match field_pats.iter().find(|p| p.ident == field.ident) {
None => reordered_patterns.push(dummy),
Some(fp) => reordered_patterns.push(fp.pat)
}
@ -648,7 +648,7 @@ fn enter_rec_or_struct<'r>(bcx: @mut Block,
ast::pat_struct(_, ref fpats, _) => {
let mut pats = ~[];
for fname in fields.iter() {
match fpats.iter().find_(|p| p.ident == *fname) {
match fpats.iter().find(|p| p.ident == *fname) {
None => pats.push(dummy),
Some(pat) => pats.push(pat.pat)
}

View File

@ -508,7 +508,7 @@ pub fn trans_const(ccx: &mut CrateContext, r: &Repr, discr: uint,
}
General(ref cases) => {
let case = &cases[discr];
let max_sz = cases.iter().transform(|x| x.size).max().unwrap();
let max_sz = cases.iter().map(|x| x.size).max().unwrap();
let discr_ty = C_uint(ccx, discr);
let contents = build_const_struct(ccx, case,
~[discr_ty] + vals);
@ -519,7 +519,7 @@ pub fn trans_const(ccx: &mut CrateContext, r: &Repr, discr: uint,
C_struct(build_const_struct(ccx, nonnull, vals))
} else {
assert_eq!(vals.len(), 0);
let vals = do nonnull.fields.iter().enumerate().transform |(i, &ty)| {
let vals = do nonnull.fields.iter().enumerate().map |(i, &ty)| {
let llty = type_of::sizing_type_of(ccx, ty);
if i == ptrfield { C_null(llty) } else { C_undef(llty) }
}.collect::<~[ValueRef]>();

View File

@ -1278,7 +1278,7 @@ pub fn cleanup_and_leave(bcx: @mut Block,
let mut skip = 0;
let mut dest = None;
{
let r = (*inf).cleanup_paths.rev_iter().find_(|cp| cp.target == leave);
let r = (*inf).cleanup_paths.rev_iter().find(|cp| cp.target == leave);
for cp in r.iter() {
if cp.size == inf.cleanups.len() {
Br(bcx, cp.dest);

View File

@ -29,7 +29,7 @@ impl BasicBlock {
pub fn pred_iter(self) -> PredIterator {
self.as_value().user_iter()
.filter(|user| user.is_a_terminator_inst())
.transform(|user| user.get_parent().unwrap())
.map(|user| user.get_parent().unwrap())
}
pub fn get_single_predecessor(self) -> Option<BasicBlock> {

View File

@ -521,7 +521,7 @@ impl Builder {
}
self.inbounds_gep(base, small_vec.slice(0, ixs.len()))
} else {
let v = do ixs.iter().transform |i| { C_i32(*i as i32) }.collect::<~[ValueRef]>();
let v = do ixs.iter().map |i| { C_i32(*i as i32) }.collect::<~[ValueRef]>();
self.count_insn("gepi");
self.inbounds_gep(base, v)
}

View File

@ -37,7 +37,7 @@ pub struct FnType {
impl FnType {
pub fn decl_fn(&self, decl: &fn(fnty: Type) -> ValueRef) -> ValueRef {
let atys = self.arg_tys.iter().transform(|t| t.ty).collect::<~[Type]>();
let atys = self.arg_tys.iter().map(|t| t.ty).collect::<~[Type]>();
let rty = self.ret_ty.ty;
let fnty = Type::func(atys, &rty);
let llfn = decl(fnty);

View File

@ -316,7 +316,7 @@ pub struct cleanup_path {
pub fn shrink_scope_clean(scope_info: &mut ScopeInfo, size: uint) {
scope_info.landing_pad = None;
scope_info.cleanup_paths = scope_info.cleanup_paths.iter()
.take_while(|&cu| cu.size <= size).transform(|&x|x).collect();
.take_while(|&cu| cu.size <= size).map(|&x|x).collect();
}
pub fn grow_scope_clean(scope_info: &mut ScopeInfo) {
@ -1000,7 +1000,7 @@ pub fn node_id_type_params(bcx: @mut Block, id: ast::NodeId) -> ~[ty::t] {
match bcx.fcx.param_substs {
Some(substs) => {
do params.iter().transform |t| {
do params.iter().map |t| {
ty::subst_tps(tcx, substs.tys, substs.self_ty, *t)
}.collect()
}
@ -1025,7 +1025,7 @@ pub fn resolve_vtables_under_param_substs(tcx: ty::ctxt,
param_substs: Option<@param_substs>,
vts: typeck::vtable_res)
-> typeck::vtable_res {
@vts.iter().transform(|ds|
@vts.iter().map(|ds|
resolve_param_vtables_under_param_substs(tcx,
param_substs,
*ds))
@ -1037,7 +1037,7 @@ pub fn resolve_param_vtables_under_param_substs(
param_substs: Option<@param_substs>,
ds: typeck::vtable_param_res)
-> typeck::vtable_param_res {
@ds.iter().transform(
@ds.iter().map(
|d| resolve_vtable_under_param_substs(tcx,
param_substs,
d))
@ -1063,7 +1063,7 @@ pub fn resolve_vtable_under_param_substs(tcx: ty::ctxt,
typeck::vtable_static(trait_id, ref tys, sub) => {
let tys = match param_substs {
Some(substs) => {
do tys.iter().transform |t| {
do tys.iter().map |t| {
ty::subst_tps(tcx, substs.tys, substs.self_ty, *t)
}.collect()
}

View File

@ -499,8 +499,8 @@ fn const_expr_unadjusted(cx: @mut CrateContext, e: &ast::expr) -> ValueRef {
do expr::with_field_tys(tcx, ety, Some(e.id))
|discr, field_tys| {
let cs: ~[ValueRef] = field_tys.iter().enumerate()
.transform(|(ix, &field_ty)| {
match fs.iter().find_(|f| field_ty.ident == f.ident) {
.map(|(ix, &field_ty)| {
match fs.iter().find(|f| field_ty.ident == f.ident) {
Some(f) => const_expr(cx, (*f).expr),
None => {
match base_val {

View File

@ -662,7 +662,7 @@ fn enum_metadata(cx: &mut CrateContext,
let enumerators_metadata: ~[DIDescriptor] = variants
.iter()
.transform(|v| {
.map(|v| {
let name: &str = cx.sess.str_of(v.name);
let discriminant_value = v.disr_val as c_ulonglong;
@ -709,7 +709,7 @@ fn enum_metadata(cx: &mut CrateContext,
let variants_member_metadata: ~[DIDescriptor] = do struct_defs
.iter()
.enumerate()
.transform |(i, struct_def)| {
.map |(i, struct_def)| {
let variant_type_metadata = adt_struct_metadata(
cx,
struct_def,
@ -766,7 +766,7 @@ fn enum_metadata(cx: &mut CrateContext,
{
let arg_llvm_types: ~[Type] = do struct_def.fields.map |&ty| { type_of::type_of(cx, ty) };
let arg_metadata: ~[DIType] = do struct_def.fields.iter().enumerate()
.transform |(i, &ty)| {
.map |(i, &ty)| {
match discriminant_type_metadata {
Some(metadata) if i == 0 => metadata,
_ => type_metadata(cx, ty, span)
@ -816,7 +816,7 @@ fn composite_type_metadata(cx: &mut CrateContext,
let member_metadata: ~[DIDescriptor] = member_llvm_types
.iter()
.enumerate()
.transform(|(i, &member_llvm_type)| {
.map(|(i, &member_llvm_type)| {
let (member_size, member_align) = size_and_align_of(cx, member_llvm_type);
let member_offset = machine::llelement_offset(cx, composite_llvm_type, i);
let member_name: &str = member_names[i];

View File

@ -582,7 +582,7 @@ fn trans_rvalue_dps_unadjusted(bcx: @mut Block, expr: @ast::expr,
ast::expr_tup(ref args) => {
let repr = adt::represent_type(bcx.ccx(), expr_ty(bcx, expr));
let numbered_fields: ~[(uint, @ast::expr)] =
args.iter().enumerate().transform(|(i, arg)| (i, *arg)).collect();
args.iter().enumerate().map(|(i, arg)| (i, *arg)).collect();
return trans_adt(bcx, repr, 0, numbered_fields, None, dest);
}
ast::expr_lit(@codemap::spanned {node: ast::lit_str(s), _}) => {

View File

@ -288,7 +288,7 @@ pub fn method_with_name(ccx: &mut CrateContext,
let imp = ccx.tcx.impls.find(&impl_id)
.expect("could not find impl while translating");
let meth = imp.methods.iter().find_(|m| m.ident == name)
let meth = imp.methods.iter().find(|m| m.ident == name)
.expect("could not find method while translating");
ccx.impl_method_cache.insert((impl_id, name), meth.def_id);

View File

@ -245,7 +245,7 @@ pub fn monomorphic_fn(ccx: @mut CrateContext,
}
ast_map::node_variant(ref v, enum_item, _) => {
let tvs = ty::enum_variants(ccx.tcx, local_def(enum_item.id));
let this_tv = *tvs.iter().find_(|tv| { tv.id.node == fn_id.node}).unwrap();
let this_tv = *tvs.iter().find(|tv| { tv.id.node == fn_id.node}).unwrap();
let d = mk_lldecl();
set_inline_hint(d);
match v.node.kind {
@ -366,18 +366,18 @@ pub fn make_mono_id(ccx: @mut CrateContext,
param_uses: Option<@~[type_use::type_uses]>) -> mono_id {
// FIXME (possibly #5801): Need a lot of type hints to get
// .collect() to work.
let substs_iter = substs.self_ty.iter().chain_(substs.tys.iter());
let substs_iter = substs.self_ty.iter().chain(substs.tys.iter());
let precise_param_ids: ~[(ty::t, Option<@~[mono_id]>)] = match substs.vtables {
Some(vts) => {
debug!("make_mono_id vtables=%s substs=%s",
vts.repr(ccx.tcx), substs.tys.repr(ccx.tcx));
let vts_iter = substs.self_vtables.iter().chain_(vts.iter());
vts_iter.zip(substs_iter).transform(|(vtable, subst)| {
let vts_iter = substs.self_vtables.iter().chain(vts.iter());
vts_iter.zip(substs_iter).map(|(vtable, subst)| {
let v = vtable.map(|vt| meth::vtable_id(ccx, vt));
(*subst, if !v.is_empty() { Some(@v) } else { None })
}).collect()
}
None => substs_iter.transform(|subst| (*subst, None::<@~[mono_id]>)).collect()
None => substs_iter.map(|subst| (*subst, None::<@~[mono_id]>)).collect()
};
@ -387,9 +387,9 @@ pub fn make_mono_id(ccx: @mut CrateContext,
// We just say it is fully used.
let self_use =
substs.self_ty.map(|_| type_use::use_repr|type_use::use_tydesc);
let uses_iter = self_use.iter().chain_(uses.iter());
let uses_iter = self_use.iter().chain(uses.iter());
precise_param_ids.iter().zip(uses_iter).transform(|(id, uses)| {
precise_param_ids.iter().zip(uses_iter).map(|(id, uses)| {
if ccx.sess.no_monomorphic_collapse() {
match *id {
(a, b) => mono_precise(a, b)
@ -429,7 +429,7 @@ pub fn make_mono_id(ccx: @mut CrateContext,
}).collect()
}
None => {
precise_param_ids.iter().transform(|x| {
precise_param_ids.iter().map(|x| {
let (a, b) = *x;
mono_precise(a, b)
}).collect()

View File

@ -3791,9 +3791,9 @@ pub fn substd_enum_variants(cx: ctxt,
id: ast::def_id,
substs: &substs)
-> ~[@VariantInfo] {
do enum_variants(cx, id).iter().transform |variant_info| {
do enum_variants(cx, id).iter().map |variant_info| {
let substd_args = variant_info.args.iter()
.transform(|aty| subst(cx, substs, *aty)).collect();
.map(|aty| subst(cx, substs, *aty)).collect();
let substd_ctor_ty = subst(cx, substs, variant_info.ctor_ty);
@ -3935,7 +3935,7 @@ pub fn enum_variants(cx: ctxt, id: ast::def_id) -> @~[@VariantInfo] {
_
}, _) => {
let mut last_discriminant: Option<uint> = None;
@enum_definition.variants.iter().transform(|variant| {
@enum_definition.variants.iter().map(|variant| {
let mut discriminant = match last_discriminant {
Some(val) => val + 1,
@ -4117,7 +4117,7 @@ pub fn lookup_struct_field(cx: ctxt,
field_id: ast::def_id)
-> field_ty {
let r = lookup_struct_fields(cx, parent);
match r.iter().find_(
match r.iter().find(
|f| f.id.node == field_id.node) {
Some(t) => *t,
None => cx.sess.bug("struct ID not found in parent's fields")

View File

@ -724,7 +724,7 @@ pub fn ty_of_closure<AC:AstConv,RS:region_scope + Clone + 'static>(
in_binding_rscope(rscope,
RegionParamNames(bound_lifetime_names.clone()));
let input_tys = do decl.inputs.iter().enumerate().transform |(i, a)| {
let input_tys = do decl.inputs.iter().enumerate().map |(i, a)| {
let expected_arg_ty = do expected_sig.chain_ref |e| {
// no guarantee that the correct number of expected args
// were supplied

View File

@ -759,7 +759,7 @@ impl<'self> LookupContext<'self> {
-> Option<method_map_entry> {
// XXX(pcwalton): Do we need to clone here?
let relevant_candidates: ~[Candidate] =
candidates.iter().transform(|c| (*c).clone()).
candidates.iter().map(|c| (*c).clone()).
filter(|c| self.is_relevant(rcvr_ty, c)).collect();
let relevant_candidates = self.merge_candidates(relevant_candidates);

View File

@ -1122,7 +1122,7 @@ pub fn lookup_field_ty(tcx: ty::ctxt,
fieldname: ast::ident,
substs: &ty::substs) -> Option<ty::t> {
let o_field = items.iter().find_(|f| f.ident == fieldname);
let o_field = items.iter().find(|f| f.ident == fieldname);
do o_field.map() |f| {
ty::lookup_field_type(tcx, class_id, f.id, substs)
}
@ -1818,7 +1818,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
_ => ()
}
let tps : ~[ty::t] = tys.iter().transform(|ty| fcx.to_ty(ty)).collect();
let tps : ~[ty::t] = tys.iter().map(|ty| fcx.to_ty(ty)).collect();
match method::lookup(fcx,
expr,
base,
@ -2644,7 +2644,7 @@ pub fn check_expr_with_unifier(fcx: @mut FnCtxt,
let mut bot_field = false;
let mut err_field = false;
let elt_ts = do elts.iter().enumerate().transform |(i, e)| {
let elt_ts = do elts.iter().enumerate().map |(i, e)| {
let opt_hint = match flds {
Some(ref fs) if i < fs.len() => Some(fs[i]),
_ => None

View File

@ -100,7 +100,7 @@ fn lookup_vtables(vcx: &VtableContext,
let mut result =
substs.tps.rev_iter()
.zip(type_param_defs.rev_iter())
.transform(|(ty, def)|
.map(|(ty, def)|
lookup_vtables_for_param(vcx, location_info, Some(substs),
&*def.bounds, *ty, is_early))
.to_owned_vec();

View File

@ -182,7 +182,7 @@ impl CoherenceChecker {
item_impl(_, ref opt_trait, _, _) => {
let opt_trait : ~[trait_ref] =
opt_trait.iter()
.transform(|x| (*x).clone())
.map(|x| (*x).clone())
.collect();
self.check_implementation(item, opt_trait);
}

View File

@ -675,7 +675,7 @@ pub fn check_methods_against_trait(ccx: &CrateCtxt,
// we'll catch it in coherence
let trait_ms = ty::trait_methods(tcx, trait_ref.def_id);
for impl_m in impl_ms.iter() {
match trait_ms.iter().find_(|trait_m| trait_m.ident == impl_m.mty.ident) {
match trait_ms.iter().find(|trait_m| trait_m.ident == impl_m.mty.ident) {
Some(trait_m) => {
let num_impl_tps = generics.ty_params.len();
compare_impl_method(
@ -731,7 +731,7 @@ pub fn convert_methods(ccx: &CrateCtxt,
-> ~[ConvertedMethod]
{
let tcx = ccx.tcx;
return ms.iter().transform(|m| {
return ms.iter().map(|m| {
let num_rcvr_ty_params = rcvr_ty_generics.type_param_defs.len();
let m_ty_generics =
ty_generics(ccx, rcvr_ty_generics.region_param, &m.generics,

View File

@ -156,8 +156,8 @@ Available lint options:
");
let lint_dict = lint::get_lint_dict();
let mut lint_dict = lint_dict.consume()
.transform(|(k, v)| (v, k))
let mut lint_dict = lint_dict.move_iter()
.map(|(k, v)| (v, k))
.collect::<~[(lint::LintSpec, &'static str)]>();
lint_dict.qsort();
@ -173,7 +173,7 @@ Available lint options:
padded(max_key, "name"), "default", "meaning");
printfln!(" %s %7.7s %s\n",
padded(max_key, "----"), "-------", "-------");
for (spec, name) in lint_dict.consume_iter() {
for (spec, name) in lint_dict.move_iter() {
let name = name.replace("_", "-");
printfln!(" %s %7.7s %s",
padded(max_key, name),

View File

@ -27,7 +27,7 @@ pub struct CrateAttrs {
fn doc_metas(attrs: ~[ast::Attribute]) -> ~[@ast::MetaItem] {
attrs.iter()
.filter(|at| "doc" == at.name())
.transform(|at| at.desugar_doc().meta())
.map(|at| at.desugar_doc().meta())
.collect()
}
@ -41,7 +41,7 @@ pub fn parse_crate(attrs: ~[ast::Attribute]) -> CrateAttrs {
}
pub fn parse_desc(attrs: ~[ast::Attribute]) -> Option<~str> {
let doc_strs = do doc_metas(attrs).consume_iter().filter_map |meta| {
let doc_strs = do doc_metas(attrs).move_iter().filter_map |meta| {
meta.value_str()
}.collect::<~[@str]>();
if doc_strs.is_empty() {

View File

@ -123,7 +123,7 @@ fn fold_enum(
let doc = fold::default_seq_fold_enum(fold, doc);
doc::EnumDoc {
variants: do doc.variants.iter().transform |variant| {
variants: do doc.variants.iter().map |variant| {
let variant = (*variant).clone();
let desc = {
let variant = variant.clone();
@ -133,7 +133,7 @@ fn fold_enum(
node: ast::item_enum(ref enum_definition, _), _
}, _) => {
let ast_variant =
(*enum_definition.variants.iter().find_(|v| {
(*enum_definition.variants.iter().find(|v| {
to_str(v.node.name) == variant.name
}).unwrap()).clone();
@ -182,7 +182,7 @@ fn merge_method_attrs(
ast_map::node_item(@ast::item {
node: ast::item_trait(_, _, ref methods), _
}, _) => {
methods.iter().transform(|method| {
methods.iter().map(|method| {
match (*method).clone() {
ast::required(ty_m) => {
(to_str(ty_m.ident),
@ -197,7 +197,7 @@ fn merge_method_attrs(
ast_map::node_item(@ast::item {
node: ast::item_impl(_, _, _, ref methods), _
}, _) => {
methods.iter().transform(|method| {
methods.iter().map(|method| {
(to_str(method.ident),
attr_parser::parse_desc(method.attrs.clone()))
}).collect()
@ -206,7 +206,7 @@ fn merge_method_attrs(
}
};
do docs.iter().zip(attrs.iter()).transform |(doc, attrs)| {
do docs.iter().zip(attrs.iter()).map |(doc, attrs)| {
assert!(doc.name == attrs.first());
let desc = attrs.second();

View File

@ -221,7 +221,7 @@ pub fn maybe_find_pandoc(
}
};
let pandoc = do possible_pandocs.iter().find_ |&pandoc| {
let pandoc = do possible_pandocs.iter().find |&pandoc| {
let output = process_output(*pandoc, [~"--version"]);
debug!("testing pandoc cmd %s: %?", *pandoc, output);
output.status == 0

View File

@ -185,7 +185,7 @@ fn enumdoc_from_enum(
fn variantdocs_from_variants(
variants: ~[ast::variant]
) -> ~[doc::VariantDoc] {
variants.iter().transform(variantdoc_from_variant).collect()
variants.iter().map(variantdoc_from_variant).collect()
}
fn variantdoc_from_variant(variant: &ast::variant) -> doc::VariantDoc {
@ -202,7 +202,7 @@ fn traitdoc_from_trait(
) -> doc::TraitDoc {
doc::TraitDoc {
item: itemdoc,
methods: do methods.iter().transform |method| {
methods: do methods.iter().map |method| {
match (*method).clone() {
ast::required(ty_m) => {
doc::MethodDoc {
@ -238,7 +238,7 @@ fn impldoc_from_impl(
bounds_str: None,
trait_types: ~[],
self_ty: None,
methods: do methods.iter().transform |method| {
methods: do methods.iter().map |method| {
doc::MethodDoc {
name: to_str(method.ident),
brief: None,

View File

@ -153,7 +153,7 @@ pub fn default_par_fold<T:Clone>(ctxt: T) -> Fold<T> {
pub fn default_seq_fold_doc<T>(fold: &Fold<T>, doc: doc::Doc) -> doc::Doc {
doc::Doc {
pages: do doc.pages.iter().transform |page| {
pages: do doc.pages.iter().map |page| {
match (*page).clone() {
doc::CratePage(doc) => {
doc::CratePage((fold.fold_crate)(fold, doc))
@ -189,7 +189,7 @@ pub fn default_any_fold_mod<T:Clone>(
) -> doc::ModDoc {
doc::ModDoc {
item: (fold.fold_item)(fold, doc.item.clone()),
items: doc.items.iter().transform(|ItemTag| {
items: doc.items.iter().map(|ItemTag| {
fold_ItemTag(fold, (*ItemTag).clone())
}).collect(),
.. doc
@ -202,7 +202,7 @@ pub fn default_seq_fold_mod<T>(
) -> doc::ModDoc {
doc::ModDoc {
item: (fold.fold_item)(fold, doc.item.clone()),
items: doc.items.iter().transform(|ItemTag| {
items: doc.items.iter().map(|ItemTag| {
fold_ItemTag(fold, (*ItemTag).clone())
}).collect(),
.. doc
@ -215,7 +215,7 @@ pub fn default_par_fold_mod<T:Clone>(
) -> doc::ModDoc {
doc::ModDoc {
item: (fold.fold_item)(fold, doc.item.clone()),
items: doc.items.iter().transform(|ItemTag| {
items: doc.items.iter().map(|ItemTag| {
fold_ItemTag(fold, (*ItemTag).clone())
}).collect(),
.. doc
@ -228,7 +228,7 @@ pub fn default_any_fold_nmod<T:Clone>(
) -> doc::NmodDoc {
doc::NmodDoc {
item: (fold.fold_item)(fold, doc.item.clone()),
fns: doc.fns.iter().transform(|FnDoc| {
fns: doc.fns.iter().map(|FnDoc| {
(fold.fold_fn)(fold, (*FnDoc).clone())
}).collect(),
.. doc
@ -241,7 +241,7 @@ pub fn default_seq_fold_nmod<T>(
) -> doc::NmodDoc {
doc::NmodDoc {
item: (fold.fold_item)(fold, doc.item.clone()),
fns: doc.fns.iter().transform(|FnDoc| {
fns: doc.fns.iter().map(|FnDoc| {
(fold.fold_fn)(fold, (*FnDoc).clone())
}).collect(),
.. doc
@ -254,7 +254,7 @@ pub fn default_par_fold_nmod<T:Clone>(
) -> doc::NmodDoc {
doc::NmodDoc {
item: (fold.fold_item)(fold, doc.item.clone()),
fns: doc.fns.iter().transform(|FnDoc| {
fns: doc.fns.iter().map(|FnDoc| {
(fold.fold_fn)(fold, (*FnDoc).clone())
}).collect(),
.. doc

View File

@ -123,7 +123,7 @@ fn strip_mod(doc: doc::ModDoc) -> doc::ModDoc {
doc::ModTag(_) | doc::NmodTag(_) => false,
_ => true
}
}.transform(|x| (*x).clone()).collect::<~[doc::ItemTag]>(),
}.map(|x| (*x).clone()).collect::<~[doc::ItemTag]>(),
.. doc.clone()
}
}

View File

@ -43,7 +43,7 @@ fn fold_mod(
doc::ModDoc {
items: do doc.items.iter().filter |item_tag| {
!is_hidden(fold.ctxt.clone(), item_tag.item())
}.transform(|x| (*x).clone()).collect(),
}.map(|x| (*x).clone()).collect(),
.. doc
}
}

View File

@ -81,7 +81,7 @@ fn strip_priv_methods(
item_vis: ast::visibility
) -> doc::ImplDoc {
let methods = do doc.methods.iter().filter |method| {
let ast_method = do methods.iter().find_ |m| {
let ast_method = do methods.iter().find |m| {
extract::to_str(m.ident) == method.name
};
assert!(ast_method.is_some());
@ -91,7 +91,7 @@ fn strip_priv_methods(
ast::private => false,
ast::inherited => item_vis == ast::public
}
}.transform(|x| (*x).clone()).collect();
}.map(|x| (*x).clone()).collect();
doc::ImplDoc {
methods: methods,
@ -126,7 +126,7 @@ fn fold_mod(
is_visible(fold.ctxt.clone(), item_tag.item())
}
}
}).transform(|x| (*x).clone()).collect(),
}).map(|x| (*x).clone()).collect(),
.. doc
}
}

View File

@ -124,7 +124,7 @@ fn fold_enum(
let srv = fold.ctxt.clone();
doc::EnumDoc {
variants: do doc.variants.iter().transform |variant| {
variants: do doc.variants.iter().map |variant| {
let sig = {
let variant = (*variant).clone();
do astsrv::exec(srv.clone()) |ctxt| {
@ -133,7 +133,7 @@ fn fold_enum(
node: ast::item_enum(ref enum_definition, _), _
}, _) => {
let ast_variant =
(*do enum_definition.variants.iter().find_ |v| {
(*do enum_definition.variants.iter().find |v| {
to_str(v.node.name) == variant.name
}.unwrap()).clone();
@ -169,7 +169,7 @@ fn merge_methods(
item_id: doc::AstId,
docs: ~[doc::MethodDoc]
) -> ~[doc::MethodDoc] {
do docs.iter().transform |doc| {
do docs.iter().map |doc| {
doc::MethodDoc {
sig: get_method_sig(srv.clone(), item_id, doc.name.clone()),
.. (*doc).clone()
@ -187,7 +187,7 @@ fn get_method_sig(
ast_map::node_item(@ast::item {
node: ast::item_trait(_, _, ref methods), _
}, _) => {
match methods.iter().find_(|&method| {
match methods.iter().find(|&method| {
match (*method).clone() {
ast::required(ty_m) => to_str(ty_m.ident) == method_name,
ast::provided(m) => to_str(m.ident) == method_name,
@ -223,7 +223,7 @@ fn get_method_sig(
ast_map::node_item(@ast::item {
node: ast::item_impl(_, _, _, ref methods), _
}, _) => {
match methods.iter().find_(|method| {
match methods.iter().find(|method| {
to_str(method.ident) == method_name
}) {
Some(method) => {

View File

@ -167,7 +167,7 @@ impl Program {
}
let newvars = util::replace(&mut self.newvars, HashMap::new());
for (name, var) in newvars.consume() {
for (name, var) in newvars.move_iter() {
self.local_vars.insert(name, var);
}
@ -233,7 +233,7 @@ impl Program {
pub fn consume_cache(&mut self) {
let map = local_data::pop(tls_key).expect("tls is empty");
let cons_map = util::replace(map, HashMap::new());
for (name, value) in cons_map.consume() {
for (name, value) in cons_map.move_iter() {
match self.local_vars.find_mut(&name) {
Some(v) => { v.data = (*value).clone(); }
None => { fail!("unknown variable %s", name) }
@ -345,7 +345,7 @@ impl Program {
// I'm not an @ pointer, so this has to be done outside.
let cons_newvars = util::replace(newvars, HashMap::new());
for (k, v) in cons_newvars.consume() {
for (k, v) in cons_newvars.move_iter() {
self.newvars.insert(k, v);
}

View File

@ -315,7 +315,7 @@ fn compile_crate(src_filename: ~str, binary: ~str) -> Option<bool> {
// file, skip compilation and return None.
let mut should_compile = true;
let dir = os::list_dir_path(&Path(outputs.out_filename.dirname()));
let maybe_lib_path = do dir.iter().find_ |file| {
let maybe_lib_path = do dir.iter().find |file| {
// The actual file's name has a hash value and version
// number in it which is unknown at this time, so looking
// for a file that matches out_filename won't work,
@ -453,7 +453,7 @@ pub fn run_line(repl: &mut Repl, input: @io::Reader, out: @io::Writer, line: ~st
if line.starts_with(":") {
// drop the : and the \n (one byte each)
let full = line.slice(1, line.len());
let split: ~[~str] = full.word_iter().transform(|s| s.to_owned()).collect();
let split: ~[~str] = full.word_iter().map(|s| s.to_owned()).collect();
let len = split.len();
if len > 0 {

View File

@ -55,7 +55,7 @@ impl PkgSrc {
let dir;
let dirs = pkgid_src_in_workspace(&self.id, &self.root);
debug!("Checking dirs: %?", dirs);
let path = dirs.iter().find_(|&d| os::path_exists(d));
let path = dirs.iter().find(|&d| os::path_exists(d));
match path {
Some(d) => dir = (*d).clone(),
None => dir = match self.fetch_git() {

View File

@ -158,7 +158,7 @@ impl<'self> PkgScript<'self> {
let output = run::process_output(exe.to_str(), [root.to_str(), ~"configs"]);
// Run the configs() function to get the configs
let cfgs = str::from_bytes_slice(output.output).word_iter()
.transform(|w| w.to_owned()).collect();
.map(|w| w.to_owned()).collect();
(cfgs, output.status)
}
}

View File

@ -200,7 +200,7 @@ fn try_parsing_version(s: &str) -> Option<Version> {
/// Just an approximation
fn is_url_like(p: &Path) -> bool {
let str = p.to_str();
str.split_iter('/').len_() > 2
str.split_iter('/').len() > 2
}
/// If s is of the form foo#bar, where bar is a valid version
@ -215,7 +215,7 @@ pub fn split_version_general<'a>(s: &'a str, sep: char) -> Option<(&'a str, Vers
for st in s.split_iter(sep) {
debug!("whole = %s part = %s", s, st);
}
if s.split_iter(sep).len_() > 2 {
if s.split_iter(sep).len() > 2 {
return None;
}
match s.rfind(sep) {

View File

@ -37,7 +37,7 @@ pub fn each_pkg_parent_workspace(pkgid: &PkgId, action: &fn(&Path) -> bool) -> b
}
pub fn pkg_parent_workspaces(pkgid: &PkgId) -> ~[Path] {
rust_path().consume_iter()
rust_path().move_iter()
.filter(|ws| workspace_contains_package_id(pkgid, ws))
.collect()
}

View File

@ -141,11 +141,11 @@ pub fn from_elem<T:Clone>(n_elts: uint, t: T) -> @[T] {
* Creates and initializes an immutable managed vector by moving all the
* elements from an owned vector.
*/
pub fn to_managed_consume<T>(v: ~[T]) -> @[T] {
pub fn to_managed_move<T>(v: ~[T]) -> @[T] {
let mut av = @[];
unsafe {
raw::reserve(&mut av, v.len());
for x in v.consume_iter() {
for x in v.move_iter() {
raw::push(&mut av, x);
}
av
@ -331,12 +331,12 @@ mod test {
}
#[test]
fn test_to_managed_consume() {
assert_eq!(to_managed_consume::<int>(~[]), @[]);
assert_eq!(to_managed_consume(~[true]), @[true]);
assert_eq!(to_managed_consume(~[1, 2, 3, 4, 5]), @[1, 2, 3, 4, 5]);
assert_eq!(to_managed_consume(~[~"abc", ~"123"]), @[~"abc", ~"123"]);
assert_eq!(to_managed_consume(~[~[42]]), @[~[42]]);
fn test_to_managed_move() {
assert_eq!(to_managed_move::<int>(~[]), @[]);
assert_eq!(to_managed_move(~[true]), @[true]);
assert_eq!(to_managed_move(~[1, 2, 3, 4, 5]), @[1, 2, 3, 4, 5]);
assert_eq!(to_managed_move(~[~"abc", ~"123"]), @[~"abc", ~"123"]);
assert_eq!(to_managed_move(~[~[42]]), @[~[42]]);
}
#[test]

View File

@ -150,7 +150,7 @@ pub fn rights<L, R: Clone>(eithers: &[Either<L, R>]) -> ~[R] {
pub fn partition<L, R>(eithers: ~[Either<L, R>]) -> (~[L], ~[R]) {
let mut lefts: ~[L] = ~[];
let mut rights: ~[R] = ~[];
for elt in eithers.consume_iter() {
for elt in eithers.move_iter() {
match elt {
Left(l) => lefts.push(l),
Right(r) => rights.push(r)

View File

@ -19,7 +19,7 @@ use container::{Container, Mutable, Map, MutableMap, Set, MutableSet};
use clone::Clone;
use cmp::{Eq, Equiv};
use hash::Hash;
use iterator::{Iterator, IteratorUtil, FromIterator, Extendable};
use iterator::{Iterator, FromIterator, Extendable};
use iterator::{FilterMap, Chain, Repeat, Zip};
use num;
use option::{None, Option, Some};
@ -159,8 +159,8 @@ impl<K:Hash + Eq,V> HashMap<K, V> {
vec::from_fn(new_capacity, |_| None));
self.size = 0;
// consume_rev_iter is more efficient
for bucket in old_buckets.consume_rev_iter() {
// move_rev_iter is more efficient
for bucket in old_buckets.move_rev_iter() {
self.insert_opt_bucket(bucket);
}
}
@ -470,9 +470,9 @@ impl<K: Hash + Eq, V> HashMap<K, V> {
/// Creates a consuming iterator, that is, one that moves each key-value
/// pair out of the map in arbitrary order. The map cannot be used after
/// calling this.
pub fn consume(self) -> HashMapConsumeIterator<K, V> {
// `consume_rev_iter` is more efficient than `consume_iter` for vectors
HashMapConsumeIterator {iter: self.buckets.consume_rev_iter()}
pub fn move_iter(self) -> HashMapMoveIterator<K, V> {
// `move_rev_iter` is more efficient than `move_iter` for vectors
HashMapMoveIterator {iter: self.buckets.move_rev_iter()}
}
}
@ -524,9 +524,9 @@ pub struct HashMapMutIterator<'self, K, V> {
priv iter: vec::VecMutIterator<'self, Option<Bucket<K, V>>>,
}
/// HashMap consume iterator
pub struct HashMapConsumeIterator<K, V> {
priv iter: vec::ConsumeRevIterator<Option<Bucket<K, V>>>,
/// HashMap move iterator
pub struct HashMapMoveIterator<K, V> {
priv iter: vec::MoveRevIterator<Option<Bucket<K, V>>>,
}
/// HashSet iterator
@ -535,9 +535,9 @@ pub struct HashSetIterator<'self, K> {
priv iter: vec::VecIterator<'self, Option<Bucket<K, ()>>>,
}
/// HashSet consume iterator
pub struct HashSetConsumeIterator<K> {
priv iter: vec::ConsumeRevIterator<Option<Bucket<K, ()>>>,
/// HashSet move iterator
pub struct HashSetMoveIterator<K> {
priv iter: vec::MoveRevIterator<Option<Bucket<K, ()>>>,
}
impl<'self, K, V> Iterator<(&'self K, &'self V)> for HashMapIterator<'self, K, V> {
@ -566,7 +566,7 @@ impl<'self, K, V> Iterator<(&'self K, &'self mut V)> for HashMapMutIterator<'sel
}
}
impl<K, V> Iterator<(K, V)> for HashMapConsumeIterator<K, V> {
impl<K, V> Iterator<(K, V)> for HashMapMoveIterator<K, V> {
#[inline]
fn next(&mut self) -> Option<(K, V)> {
for elt in self.iter {
@ -592,7 +592,7 @@ impl<'self, K> Iterator<&'self K> for HashSetIterator<'self, K> {
}
}
impl<K> Iterator<K> for HashSetConsumeIterator<K> {
impl<K> Iterator<K> for HashSetMoveIterator<K> {
#[inline]
fn next(&mut self) -> Option<K> {
for elt in self.iter {
@ -707,9 +707,9 @@ impl<T:Hash + Eq> HashSet<T> {
/// Creates a consuming iterator, that is, one that moves each value out
/// of the set in arbitrary order. The set cannot be used after calling
/// this.
pub fn consume(self) -> HashSetConsumeIterator<T> {
// `consume_rev_iter` is more efficient than `consume_iter` for vectors
HashSetConsumeIterator {iter: self.map.buckets.consume_rev_iter()}
pub fn move_iter(self) -> HashSetMoveIterator<T> {
// `move_rev_iter` is more efficient than `move_iter` for vectors
HashSetMoveIterator {iter: self.map.buckets.move_rev_iter()}
}
/// Visit the values representing the difference
@ -724,7 +724,7 @@ impl<T:Hash + Eq> HashSet<T> {
/// Visit the values representing the symmetric difference
pub fn symmetric_difference_iter<'a>(&'a self, other: &'a HashSet<T>)
-> Chain<SetAlgebraIter<'a, T>, SetAlgebraIter<'a, T>> {
self.difference_iter(other).chain_(other.difference_iter(self))
self.difference_iter(other).chain(other.difference_iter(self))
}
/// Visit the values representing the intersection
@ -740,7 +740,7 @@ impl<T:Hash + Eq> HashSet<T> {
/// Visit the values representing the union
pub fn union_iter<'a>(&'a self, other: &'a HashSet<T>)
-> Chain<HashSetIterator<'a, T>, SetAlgebraIter<'a, T>> {
self.iter().chain_(other.difference_iter(self))
self.iter().chain(other.difference_iter(self))
}
}
@ -881,7 +881,7 @@ mod test_map {
}
#[test]
fn test_consume() {
fn test_move_iter() {
let hm = {
let mut hm = HashMap::new();
@ -891,7 +891,7 @@ mod test_map {
hm
};
let v = hm.consume().collect::<~[(char, int)]>();
let v = hm.move_iter().collect::<~[(char, int)]>();
assert!([('a', 1), ('b', 2)] == v || [('b', 2), ('a', 1)] == v);
}
@ -977,7 +977,7 @@ mod test_map {
fn test_from_iter() {
let xs = ~[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: HashMap<int, int> = xs.iter().transform(|&x| x).collect();
let map: HashMap<int, int> = xs.iter().map(|&x| x).collect();
for &(k, v) in xs.iter() {
assert_eq!(map.find(&k), Some(&v));
@ -1169,7 +1169,7 @@ mod test_set {
fn test_from_iter() {
let xs = ~[1, 2, 3, 4, 5, 6, 7, 8, 9];
let set: HashSet<int> = xs.iter().transform(|&x| x).collect();
let set: HashSet<int> = xs.iter().map(|&x| x).collect();
for x in xs.iter() {
assert!(set.contains(x));
@ -1177,7 +1177,7 @@ mod test_set {
}
#[test]
fn test_consume() {
fn test_move_iter() {
let hs = {
let mut hs = HashSet::new();
@ -1187,7 +1187,7 @@ mod test_set {
hs
};
let v = hs.consume().collect::<~[char]>();
let v = hs.move_iter().collect::<~[char]>();
assert!(['a', 'b'] == v || ['b', 'a'] == v);
}
}

File diff suppressed because it is too large Load Diff

View File

@ -134,7 +134,7 @@ impl<T> Option<T> {
/// Return a consuming iterator over the possibly contained value
#[inline]
pub fn consume(self) -> OptionIterator<T> {
pub fn move_iter(self) -> OptionIterator<T> {
OptionIterator{opt: self}
}

View File

@ -32,7 +32,7 @@ use c_str::ToCStr;
use clone::Clone;
use container::Container;
use io;
use iterator::{IteratorUtil, range};
use iterator::range;
use libc;
use libc::{c_char, c_void, c_int, size_t};
use libc::FILE;
@ -765,7 +765,7 @@ pub fn list_dir(p: &Path) -> ~[~str] {
strings
}
}
do get_list(p).consume_iter().filter |filename| {
do get_list(p).move_iter().filter |filename| {
"." != *filename && ".." != *filename
}.collect()
}

View File

@ -21,7 +21,7 @@ use c_str;
use clone::Clone;
use cmp::Eq;
use container::Container;
use iterator::{Iterator, IteratorUtil, range};
use iterator::{Iterator, range};
use libc;
use num;
use option::{None, Option, Some};
@ -961,7 +961,7 @@ impl GenericPath for WindowsPath {
match self.filestem() {
Some(stem) => {
// FIXME: #4318 Instead of to_ascii and to_str_ascii, could use
// to_ascii_consume and to_str_consume to not do a unnecessary copy.
// to_ascii_move and to_str_move to not do a unnecessary copy.
match stem.to_ascii().to_lower().to_str_ascii() {
~"con" | ~"aux" | ~"com1" | ~"com2" | ~"com3" | ~"com4" |
~"lpt1" | ~"lpt2" | ~"lpt3" | ~"prn" | ~"nul" => true,
@ -1020,7 +1020,7 @@ impl GenericPath for WindowsPath {
None => None,
// FIXME: #4318 Instead of to_ascii and to_str_ascii, could use
// to_ascii_consume and to_str_consume to not do a unnecessary copy.
// to_ascii_move and to_str_move to not do a unnecessary copy.
Some(ref device) => Some(device.to_ascii().to_upper().to_str_ascii())
},
is_absolute: self.is_absolute,

View File

@ -51,7 +51,7 @@ pub use container::{Container, Mutable, Map, MutableMap, Set, MutableSet};
pub use hash::Hash;
pub use iter::Times;
pub use iterator::Extendable;
pub use iterator::{Iterator, IteratorUtil, DoubleEndedIterator, DoubleEndedIteratorUtil};
pub use iterator::{Iterator, DoubleEndedIterator};
pub use iterator::{ClonableIterator, OrdIterator};
pub use num::{Num, NumCast};
pub use num::{Orderable, Signed, Unsigned, Round};

View File

@ -94,7 +94,7 @@ impl<T, E: ToStr> Result<T, E> {
match *self {
Ok(ref t) => Some(t),
Err(*) => None,
}.consume()
}.move_iter()
}
/// Call a method based on a previous result
@ -108,7 +108,7 @@ impl<T, E: ToStr> Result<T, E> {
match *self {
Ok(*) => None,
Err(ref t) => Some(t),
}.consume()
}.move_iter()
}
/// Unwraps a result, yielding the content of an `Ok`.

View File

@ -239,7 +239,7 @@ impl BlockedTask {
};
// Even if the task was unkillable before, we use 'Killable' because
// multiple pipes will have handles. It does not really mean killable.
handles.consume_iter().transform(|x| Killable(x)).collect()
handles.move_iter().map(|x| Killable(x)).collect()
}
// This assertion has two flavours because the wake involves an atomic op.

View File

@ -63,7 +63,7 @@ Several modules in `core` are clients of `rt`:
use cell::Cell;
use clone::Clone;
use container::Container;
use iterator::{Iterator, IteratorUtil, range};
use iterator::{Iterator, range};
use option::{Some, None};
use ptr::RawPtr;
use rt::local::Local;
@ -391,7 +391,7 @@ fn run_(main: ~fn(), use_main_sched: bool) -> int {
rtdebug!("waiting for threads");
// Wait for schedulers
for thread in threads.consume_iter() {
for thread in threads.move_iter() {
thread.join();
}

View File

@ -54,7 +54,7 @@ pub fn select<A: Select>(ports: &mut [A]) -> uint {
let task_handles = task.make_selectable(ports.len());
for (index, (port, task_handle)) in
ports.mut_iter().zip(task_handles.consume_iter()).enumerate() {
ports.mut_iter().zip(task_handles.move_iter()).enumerate() {
// If one of the ports has data by now, it will wake the handle.
if port.block_on(sched, task_handle) {
ready_index = index;
@ -128,7 +128,7 @@ mod test {
let (ports, chans) = unzip(from_fn(num_ports, |_| oneshot::<()>()));
let mut dead_chans = ~[];
let mut ports = ports;
for (i, chan) in chans.consume_iter().enumerate() {
for (i, chan) in chans.move_iter().enumerate() {
if send_on_chans.contains(&i) {
chan.send(());
} else {
@ -145,7 +145,7 @@ mod test {
let (ports, chans) = unzip(from_fn(num_ports, |_| stream::<()>()));
let mut dead_chans = ~[];
let mut ports = ports;
for (i, chan) in chans.consume_iter().enumerate() {
for (i, chan) in chans.move_iter().enumerate() {
if send_on_chans.contains(&i) {
chan.send(());
} else {

View File

@ -232,7 +232,7 @@ pub fn run_in_mt_newsched_task(f: ~fn()) {
}
// Wait for schedulers
for thread in threads.consume_iter() {
for thread in threads.move_iter() {
thread.join();
}
}

View File

@ -10,7 +10,6 @@
use container::Container;
use from_str::FromStr;
use iterator::IteratorUtil;
use libc;
use option::{Some, None};
use os;

View File

@ -23,9 +23,9 @@ use char::Char;
use clone::Clone;
use container::{Container, Mutable};
use iter::Times;
use iterator::{Iterator, FromIterator, Extendable, IteratorUtil};
use iterator::{Iterator, FromIterator, Extendable};
use iterator::{Filter, AdditiveIterator, Map};
use iterator::{Invert, DoubleEndedIterator, DoubleEndedIteratorUtil};
use iterator::{Invert, DoubleEndedIterator};
use libc;
use num::Zero;
use option::{None, Option, Some};
@ -59,7 +59,7 @@ pub fn from_bytes(vv: &[u8]) -> ~str {
use str::not_utf8::cond;
if !is_utf8(vv) {
let first_bad_byte = *vv.iter().find_(|&b| !is_utf8([*b])).unwrap();
let first_bad_byte = *vv.iter().find(|&b| !is_utf8([*b])).unwrap();
cond.raise(fmt!("from_bytes: input is not UTF-8; first bad byte is %u",
first_bad_byte as uint))
} else {
@ -76,7 +76,7 @@ pub fn from_bytes_owned(vv: ~[u8]) -> ~str {
use str::not_utf8::cond;
if !is_utf8(vv) {
let first_bad_byte = *vv.iter().find_(|&b| !is_utf8([*b])).unwrap();
let first_bad_byte = *vv.iter().find(|&b| !is_utf8([*b])).unwrap();
cond.raise(fmt!("from_bytes: input is not UTF-8; first bad byte is %u",
first_bad_byte as uint))
} else {
@ -185,7 +185,7 @@ impl<'self, S: Str> StrVector for &'self [S] {
pub fn concat(&self) -> ~str {
if self.is_empty() { return ~""; }
let len = self.iter().transform(|s| s.as_slice().len()).sum();
let len = self.iter().map(|s| s.as_slice().len()).sum();
let mut s = with_capacity(len);
@ -210,7 +210,7 @@ impl<'self, S: Str> StrVector for &'self [S] {
pub fn concat(&self) -> ~str {
if self.is_empty() { return ~""; }
let len = self.iter().transform(|s| s.as_slice().len()).sum();
let len = self.iter().map(|s| s.as_slice().len()).sum();
let mut s = with_capacity(len);
@ -239,7 +239,7 @@ impl<'self, S: Str> StrVector for &'self [S] {
// this is wrong without the guarantee that `self` is non-empty
let len = sep.len() * (self.len() - 1)
+ self.iter().transform(|s| s.as_slice().len()).sum();
+ self.iter().map(|s| s.as_slice().len()).sum();
let mut s = ~"";
let mut first = true;
@ -280,7 +280,7 @@ impl<'self, S: Str> StrVector for &'self [S] {
// this is wrong without the guarantee that `self` is non-empty
let len = sep.len() * (self.len() - 1)
+ self.iter().transform(|s| s.as_slice().len()).sum();
+ self.iter().map(|s| s.as_slice().len()).sum();
let mut s = ~"";
let mut first = true;
@ -1051,7 +1051,7 @@ pub mod raw {
/// If end is greater than the length of the string.
#[cfg(not(stage0))]
#[inline]
pub unsafe fn slice_bytes(s: &str, begin: uint, end: uint) -> &str {
pub unsafe fn slice_bytes<'a>(s: &'a str, begin: uint, end: uint) -> &'a str {
do s.as_imm_buf |sbuf, n| {
assert!((begin <= end));
assert!((end <= n));
@ -1445,7 +1445,7 @@ impl<'self> StrSlice<'self> for &'self str {
/// ~~~
#[inline]
fn iter(&self) -> CharIterator<'self> {
self.char_offset_iter().transform(|(_, c)| c)
self.char_offset_iter().map(|(_, c)| c)
}
/// An iterator over the characters of `self`, in reverse order.
@ -1457,7 +1457,7 @@ impl<'self> StrSlice<'self> for &'self str {
/// An iterator over the bytes of `self`
#[inline]
fn byte_iter(&self) -> ByteIterator<'self> {
self.as_bytes().iter().transform(|&b| b)
self.as_bytes().iter().map(|&b| b)
}
/// An iterator over the bytes of `self`, in reverse order
@ -1565,7 +1565,7 @@ impl<'self> StrSlice<'self> for &'self str {
/// An iterator over the lines of a string, separated by either
/// `\n` or (`\r\n`).
fn any_line_iter(&self) -> AnyLineIterator<'self> {
do self.line_iter().transform |line| {
do self.line_iter().map |line| {
let l = line.len();
if l > 0 && line[l - 1] == '\r' as u8 { line.slice(0, l - 1) }
else { line }
@ -1593,7 +1593,7 @@ impl<'self> StrSlice<'self> for &'self str {
/// Returns the number of characters that a string holds
#[inline]
fn char_len(&self) -> uint { self.iter().len_() }
fn char_len(&self) -> uint { self.iter().len() }
/// Returns a slice of the given string from the byte range
/// [`begin`..`end`)
@ -2546,7 +2546,6 @@ impl Zero for @str {
#[cfg(test)]
mod tests {
use iterator::IteratorUtil;
use container::Container;
use option::Some;
use libc::c_char;
@ -3687,7 +3686,7 @@ mod tests {
#[test]
fn test_str_container() {
fn sum_len<S: Container>(v: &[S]) -> uint {
v.iter().transform(|x| x.len()).sum()
v.iter().map(|x| x.len()).sum()
}
let s = ~"01234";

View File

@ -17,7 +17,7 @@ use str::OwnedStr;
use container::Container;
use cast;
use ptr;
use iterator::{Iterator, IteratorUtil};
use iterator::Iterator;
use vec::{CopyableVector, ImmutableVector};
#[cfg(stage0)]
use vec::OwnedVector;

View File

@ -79,7 +79,7 @@ use cast;
use cell::Cell;
use container::MutableMap;
use comm::{Chan, GenericChan, oneshot};
use hashmap::{HashSet, HashSetConsumeIterator};
use hashmap::{HashSet, HashSetMoveIterator};
use local_data;
use task::{Failure, SingleThreaded};
use task::{Success, TaskOpts, TaskResult};
@ -141,8 +141,8 @@ impl TaskSet {
assert!(was_present);
}
#[inline]
fn consume(self) -> HashSetConsumeIterator<TaskHandle> {
(*self).consume()
fn move_iter(self) -> HashSetMoveIterator<TaskHandle> {
(*self).move_iter()
}
}
@ -460,13 +460,13 @@ fn kill_taskgroup(state: TaskGroupInner, me: &TaskHandle, is_main: bool) {
if newstate.is_some() {
let TaskGroupData { members: members, descendants: descendants } =
newstate.unwrap();
for sibling in members.consume() {
for sibling in members.move_iter() {
// Skip self - killing ourself won't do much good.
if &sibling != me {
RuntimeGlue::kill_task(sibling);
}
}
for child in descendants.consume() {
for child in descendants.move_iter() {
assert!(&child != me);
RuntimeGlue::kill_task(child);
}

View File

@ -17,7 +17,7 @@ The `ToBytes` and `IterBytes` traits
use cast;
use io;
use io::Writer;
use iterator::IteratorUtil;
use iterator::Iterator;
use option::{None, Option, Some};
use str::StrSlice;
use vec::ImmutableVector;

View File

@ -11,7 +11,7 @@
//! An ordered map and set for integer keys implemented as a radix trie
use prelude::*;
use iterator::{IteratorUtil, FromIterator, Extendable};
use iterator::{FromIterator, Extendable};
use uint;
use util::{swap, replace};
use vec;
@ -617,7 +617,7 @@ mod test_map {
fn test_from_iter() {
let xs = ~[(1u, 1i), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)];
let map: TrieMap<int> = xs.iter().transform(|&x| x).collect();
let map: TrieMap<int> = xs.iter().map(|&x| x).collect();
for &(k, v) in xs.iter() {
assert_eq!(map.find(&k), Some(&v));
@ -680,7 +680,7 @@ mod test_set {
fn test_from_iter() {
let xs = ~[9u, 8, 7, 6, 5, 4, 3, 2, 1];
let set: TrieSet = xs.iter().transform(|&x| x).collect();
let set: TrieSet = xs.iter().map(|&x| x).collect();
for x in xs.iter() {
assert!(set.contains(x));

View File

@ -15,7 +15,7 @@
use clone::Clone;
use vec;
use vec::ImmutableVector;
use iterator::IteratorUtil;
use iterator::Iterator;
pub use self::inner::*;
@ -102,7 +102,7 @@ impl<'self,
fn map<C>(&self, f: &fn(a: &A, b: &B) -> C) -> ~[C] {
match *self {
(ref a, ref b) => {
a.iter().zip(b.iter()).transform(|(aa, bb)| f(aa, bb)).collect()
a.iter().zip(b.iter()).map(|(aa, bb)| f(aa, bb)).collect()
}
}
}
@ -122,7 +122,7 @@ impl<A:Clone, B:Clone> ExtendedTupleOps<A,B> for (~[A], ~[B]) {
fn map<C>(&self, f: &fn(a: &A, b: &B) -> C) -> ~[C] {
match *self {
(ref a, ref b) => {
a.iter().zip(b.iter()).transform(|(aa, bb)| f(aa, bb)).collect()
a.iter().zip(b.iter()).map(|(aa, bb)| f(aa, bb)).collect()
}
}
}

View File

@ -526,7 +526,7 @@ pub mod rt {
TyHexLower => uint_to_str_prec(u, 16, prec),
// FIXME: #4318 Instead of to_ascii and to_str_ascii, could use
// to_ascii_consume and to_str_consume to not do a unnecessary copy.
// to_ascii_move and to_str_move to not do a unnecessary copy.
TyHexUpper => {
let s = uint_to_str_prec(u, 16, prec);
s.to_ascii().to_upper().to_str_ascii()

View File

@ -382,7 +382,7 @@ pub fn unzip_slice<T:Clone,U:Clone>(v: &[(T, U)]) -> (~[T], ~[U]) {
pub fn unzip<T,U>(v: ~[(T, U)]) -> (~[T], ~[U]) {
let mut ts = ~[];
let mut us = ~[];
for p in v.consume_iter() {
for p in v.move_iter() {
let (t, u) = p;
ts.push(t);
us.push(u);
@ -1068,10 +1068,10 @@ impl<'self,T> ImmutableVector<'self, T> for &'self [T] {
}
/// Deprecated, use iterators where possible
/// (`self.iter().transform(f)`). Apply a function to each element
/// (`self.iter().map(f)`). Apply a function to each element
/// of a vector and return the results.
fn map<U>(&self, f: &fn(t: &T) -> U) -> ~[U] {
self.iter().transform(f).collect()
self.iter().map(f).collect()
}
/**
@ -1174,8 +1174,8 @@ impl<'self,T:Clone> ImmutableCopyableVector<T> for &'self [T] {
#[allow(missing_doc)]
pub trait OwnedVector<T> {
fn consume_iter(self) -> ConsumeIterator<T>;
fn consume_rev_iter(self) -> ConsumeRevIterator<T>;
fn move_iter(self) -> MoveIterator<T>;
fn move_rev_iter(self) -> MoveRevIterator<T>;
fn reserve(&mut self, n: uint);
fn reserve_at_least(&mut self, n: uint);
@ -1204,26 +1204,26 @@ impl<T> OwnedVector<T> for ~[T] {
/// value out of the vector (from start to end). The vector cannot
/// be used after calling this.
///
/// Note that this performs O(n) swaps, and so `consume_rev_iter`
/// Note that this performs O(n) swaps, and so `move_rev_iter`
/// (which just calls `pop` repeatedly) is more efficient.
///
/// # Examples
///
/// ~~~ {.rust}
/// let v = ~[~"a", ~"b"];
/// for s in v.consume_iter() {
/// for s in v.move_iter() {
/// // s has type ~str, not &~str
/// println(s);
/// }
/// ~~~
fn consume_iter(self) -> ConsumeIterator<T> {
ConsumeIterator { v: self, idx: 0 }
fn move_iter(self) -> MoveIterator<T> {
MoveIterator { v: self, idx: 0 }
}
/// Creates a consuming iterator that moves out of the vector in
/// reverse order. Also see `consume_iter`, however note that this
/// reverse order. Also see `move_iter`, however note that this
/// is more efficient.
fn consume_rev_iter(self) -> ConsumeRevIterator<T> {
ConsumeRevIterator { v: self }
fn move_rev_iter(self) -> MoveRevIterator<T> {
MoveRevIterator { v: self }
}
/**
@ -1540,7 +1540,7 @@ impl<T> OwnedVector<T> for ~[T] {
let mut lefts = ~[];
let mut rights = ~[];
for elt in self.consume_iter() {
for elt in self.move_iter() {
if f(&elt) {
lefts.push(elt);
} else {
@ -2148,7 +2148,7 @@ pub mod bytes {
impl<A:Clone> Clone for ~[A] {
#[inline]
fn clone(&self) -> ~[A] {
self.iter().transform(|item| item.clone()).collect()
self.iter().map(|item| item.clone()).collect()
}
}
@ -2281,12 +2281,12 @@ pub type MutRevIterator<'self, T> = Invert<VecMutIterator<'self, T>>;
/// An iterator that moves out of a vector.
#[deriving(Clone)]
pub struct ConsumeIterator<T> {
pub struct MoveIterator<T> {
priv v: ~[T],
priv idx: uint,
}
impl<T> Iterator<T> for ConsumeIterator<T> {
impl<T> Iterator<T> for MoveIterator<T> {
fn next(&mut self) -> Option<T> {
// this is peculiar, but is required for safety with respect
// to dtors. It traverses the first half of the vec, and
@ -2308,11 +2308,11 @@ impl<T> Iterator<T> for ConsumeIterator<T> {
/// An iterator that moves out of a vector in reverse order.
#[deriving(Clone)]
pub struct ConsumeRevIterator<T> {
pub struct MoveRevIterator<T> {
priv v: ~[T]
}
impl<T> Iterator<T> for ConsumeRevIterator<T> {
impl<T> Iterator<T> for MoveRevIterator<T> {
fn next(&mut self) -> Option<T> {
self.v.pop_opt()
}
@ -3323,17 +3323,17 @@ mod tests {
}
#[test]
fn test_consume_iterator() {
fn test_move_iterator() {
use iterator::*;
let xs = ~[1u,2,3,4,5];
assert_eq!(xs.consume_iter().fold(0, |a: uint, b: uint| 10*a + b), 12345);
assert_eq!(xs.move_iter().fold(0, |a: uint, b: uint| 10*a + b), 12345);
}
#[test]
fn test_consume_rev_iterator() {
fn test_move_rev_iterator() {
use iterator::*;
let xs = ~[1u,2,3,4,5];
assert_eq!(xs.consume_rev_iter().fold(0, |a: uint, b: uint| 10*a + b), 54321);
assert_eq!(xs.move_rev_iter().fold(0, |a: uint, b: uint| 10*a + b), 54321);
}
#[test]
@ -3608,7 +3608,7 @@ mod tests {
}
assert_eq!(cnt, 8);
for f in v.consume_iter() {
for f in v.move_iter() {
assert!(f == Foo);
cnt += 1;
}

View File

@ -247,7 +247,7 @@ pub fn unguarded_pat(a: &arm) -> Option<~[@pat]> {
}
pub fn public_methods(ms: ~[@method]) -> ~[@method] {
do ms.consume_iter().filter |m| {
do ms.move_iter().filter |m| {
match m.vis {
public => true,
_ => false

View File

@ -186,13 +186,13 @@ pub fn contains_name<AM: AttrMetaMethods>(metas: &[AM], name: &str) -> bool {
pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: &str)
-> Option<@str> {
attrs.iter()
.find_(|at| name == at.name())
.find(|at| name == at.name())
.chain(|at| at.value_str())
}
pub fn last_meta_item_value_str_by_name(items: &[@MetaItem], name: &str)
-> Option<@str> {
items.rev_iter().find_(|mi| name == mi.name()).chain(|i| i.value_str())
items.rev_iter().find(|mi| name == mi.name()).chain(|i| i.value_str())
}
/* Higher-level applications */
@ -201,7 +201,7 @@ pub fn sort_meta_items(items: &[@MetaItem]) -> ~[@MetaItem] {
// This is sort of stupid here, but we need to sort by
// human-readable strings.
let mut v = items.iter()
.transform(|&mi| (mi.name(), mi))
.map(|&mi| (mi.name(), mi))
.collect::<~[(@str, @MetaItem)]>();
do extra::sort::quick_sort(v) |&(a, _), &(b, _)| {
@ -209,7 +209,7 @@ pub fn sort_meta_items(items: &[@MetaItem]) -> ~[@MetaItem] {
}
// There doesn't seem to be a more optimal way to do this
do v.consume_iter().transform |(_, m)| {
do v.move_iter().map |(_, m)| {
match m.node {
MetaList(n, ref mis) => {
@spanned {

View File

@ -702,7 +702,7 @@ impl AstBuilder for @ExtCtxt {
}
fn variant(&self, span: span, name: ident, tys: ~[ast::Ty]) -> ast::variant {
let args = tys.consume_iter().transform(|ty| {
let args = tys.move_iter().map(|ty| {
ast::variant_arg { ty: ty, id: self.next_id() }
}).collect();

View File

@ -39,7 +39,7 @@ pub fn expand_cfg(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree]) -> base::MacR
// test_cfg searches for meta items looking like `cfg(foo, ...)`
let in_cfg = &[cx.meta_list(sp, @"cfg", cfgs)];
let matches_cfg = attr::test_cfg(cx.cfg(), in_cfg.iter().transform(|&x| x));
let matches_cfg = attr::test_cfg(cx.cfg(), in_cfg.iter().map(|&x| x));
let e = cx.expr_bool(sp, matches_cfg);
MRExpr(e)
}

View File

@ -90,7 +90,7 @@ fn decodable_substructure(cx: @ExtCtxt, span: span,
}
}
Right(ref fields) => {
let fields = do fields.iter().enumerate().transform |(i, f)| {
let fields = do fields.iter().enumerate().map |(i, f)| {
cx.field_imm(span, *f, getarg(cx.str_of(*f), i))
}.collect();
cx.expr_struct_ident(span, substr.type_ident, fields)
@ -132,7 +132,7 @@ fn decodable_substructure(cx: @ExtCtxt, span: span,
}
}
Right(ref fields) => {
let fields = do fields.iter().enumerate().transform |(i, f)| {
let fields = do fields.iter().enumerate().map |(i, f)| {
cx.field_imm(span, *f, getarg(i))
}.collect();
cx.expr_struct_ident(span, name, fields)

View File

@ -590,7 +590,7 @@ impl<'self> MethodDef<'self> {
// transpose raw_fields
let fields = match raw_fields {
[ref self_arg, .. rest] => {
do self_arg.iter().enumerate().transform |(i, &(opt_id, field))| {
do self_arg.iter().enumerate().map |(i, &(opt_id, field))| {
let other_fields = do rest.map |l| {
match &l[i] {
&(_, ex) => ex
@ -750,7 +750,7 @@ impl<'self> MethodDef<'self> {
let field_tuples =
do self_vec.iter()
.zip(enum_matching_fields.iter())
.transform |(&(id, self_f), other)| {
.map |(&(id, self_f), other)| {
(id, self_f, (*other).clone())
}.collect();
substructure = EnumMatching(variant_index, variant, field_tuples);

View File

@ -95,7 +95,7 @@ fn rand_substructure(cx: @ExtCtxt, span: span, substr: &Substructure) -> @expr {
let rand_variant = cx.expr_binary(span, ast::rem,
rv_call, variant_count);
let mut arms = do variants.iter().enumerate().transform |(i, id_sum)| {
let mut arms = do variants.iter().enumerate().map |(i, id_sum)| {
let i_expr = cx.expr_uint(span, i);
let pat = cx.pat_lit(span, i_expr);

View File

@ -1075,7 +1075,6 @@ mod test {
use parse::token::{intern, get_ident_interner};
use print::pprust;
use util::parser_testing::{string_to_item, string_to_pat, strs_to_idents};
use oldvisit::{mk_vt};
// make sure that fail! is present
#[test] fn fail_exists_test () {

View File

@ -269,7 +269,7 @@ fn pieces_to_expr(cx: @ExtCtxt, sp: span,
corresponding function in std::unstable::extfmt. Each function takes a
buffer to insert data into along with the data being formatted. */
let npieces = pieces.len();
for (i, pc) in pieces.consume_iter().enumerate() {
for (i, pc) in pieces.move_iter().enumerate() {
match pc {
/* Raw strings get appended via str::push_str */
PieceString(s) => {

View File

@ -353,9 +353,9 @@ impl Context {
let trans_method = |method: &parse::Method| {
let method = match *method {
parse::Select(ref arms, ref default) => {
let arms = arms.iter().transform(|arm| {
let arms = arms.iter().map(|arm| {
let p = self.ecx.path_global(sp, rtpath("SelectArm"));
let result = arm.result.iter().transform(|p| {
let result = arm.result.iter().map(|p| {
self.trans_piece(p)
}).collect();
let s = arm.selector.to_managed();
@ -368,7 +368,7 @@ impl Context {
self.ecx.expr_vec_slice(sp, result)),
])
}).collect();
let default = default.iter().transform(|p| {
let default = default.iter().map(|p| {
self.trans_piece(p)
}).collect();
self.ecx.expr_call_global(sp, rtpath("Select"), ~[
@ -381,9 +381,9 @@ impl Context {
Some(i) => { some(self.ecx.expr_uint(sp, i)) }
None => { none() }
};
let arms = arms.iter().transform(|arm| {
let arms = arms.iter().map(|arm| {
let p = self.ecx.path_global(sp, rtpath("PluralArm"));
let result = arm.result.iter().transform(|p| {
let result = arm.result.iter().map(|p| {
self.trans_piece(p)
}).collect();
let (lr, selarg) = match arm.selector {
@ -408,7 +408,7 @@ impl Context {
self.ecx.expr_vec_slice(sp, result)),
])
}).collect();
let default = default.iter().transform(|p| {
let default = default.iter().map(|p| {
self.trans_piece(p)
}).collect();
self.ecx.expr_call_global(sp, rtpath("Plural"), ~[
@ -575,8 +575,8 @@ impl Context {
Some(self.format_arg(e.span, Right(name), lname));
}
let args = names.consume_iter().transform(|a| a.unwrap());
let mut args = locals.consume_iter().chain_(args);
let args = names.move_iter().map(|a| a.unwrap());
let mut args = locals.move_iter().chain(args);
// Next, build up the actual call to the sprintf function.
let result = self.ecx.expr_call_global(self.fmtsp, ~[
@ -685,7 +685,7 @@ pub fn expand_syntax_ext(ecx: @ExtCtxt, sp: span,
};
cx.fmtsp = efmt.span;
let fmt = expr_to_str(ecx, efmt,
~"first argument to ifmt! must be a string literal.");
"first argument to ifmt! must be a string literal.");
let mut err = false;
do parse::parse_error::cond.trap(|m| {

View File

@ -105,7 +105,7 @@ pub fn expand_include_bin(cx: @ExtCtxt, sp: span, tts: &[ast::token_tree])
let file = get_single_str_from_tts(cx, sp, tts, "include_bin!");
match io::read_whole_file(&res_rel_file(cx, sp, &Path(file))) {
result::Ok(src) => {
let u8_exprs: ~[@ast::expr] = src.iter().transform(|char| cx.expr_u8(sp, *char)).collect();
let u8_exprs: ~[@ast::expr] = src.iter().map(|char| cx.expr_u8(sp, *char)).collect();
base::MRExpr(cx.expr_vec(sp, u8_exprs))
}
result::Err(ref e) => {

View File

@ -176,7 +176,7 @@ pub fn fold_ty_param(tp: TyParam,
pub fn fold_ty_params(tps: &OptVec<TyParam>,
fld: @ast_fold) -> OptVec<TyParam> {
let tps = /*bad*/ (*tps).clone();
tps.map_consume(|tp| fold_ty_param(tp, fld))
tps.map_move(|tp| fold_ty_param(tp, fld))
}
pub fn fold_lifetime(l: &Lifetime,
@ -704,7 +704,7 @@ pub fn noop_fold_ty(t: &ty_, fld: @ast_fold) -> ty_ {
// ...nor do modules
pub fn noop_fold_mod(m: &_mod, fld: @ast_fold) -> _mod {
ast::_mod {
view_items: m.view_items.iter().transform(|x| fld.fold_view_item(x)).collect(),
view_items: m.view_items.iter().map(|x| fld.fold_view_item(x)).collect(),
items: m.items.iter().filter_map(|x| fld.fold_item(*x)).collect(),
}
}
@ -713,8 +713,8 @@ fn noop_fold_foreign_mod(nm: &foreign_mod, fld: @ast_fold) -> foreign_mod {
ast::foreign_mod {
sort: nm.sort,
abis: nm.abis,
view_items: nm.view_items.iter().transform(|x| fld.fold_view_item(x)).collect(),
items: nm.items.iter().transform(|x| fld.fold_foreign_item(*x)).collect(),
view_items: nm.view_items.iter().map(|x| fld.fold_view_item(x)).collect(),
items: nm.items.iter().map(|x| fld.fold_foreign_item(*x)).collect(),
}
}
@ -734,7 +734,7 @@ fn noop_fold_variant(v: &variant_, fld: @ast_fold) -> variant_ {
struct_variant_kind(ref struct_def) => {
kind = struct_variant_kind(@ast::struct_def {
fields: struct_def.fields.iter()
.transform(|f| fld.fold_struct_field(*f)).collect(),
.map(|f| fld.fold_struct_field(*f)).collect(),
ctor_id: struct_def.ctor_id.map(|c| fld.new_id(*c))
})
}
@ -828,7 +828,7 @@ impl ast_fold for AstFoldFns {
fn fold_view_item(@self, x: &view_item) -> view_item {
ast::view_item {
node: (self.fold_view_item)(&x.node, self as @ast_fold),
attrs: x.attrs.iter().transform(|a| fold_attribute_(*a, self as @ast_fold)).collect(),
attrs: x.attrs.iter().map(|a| fold_attribute_(*a, self as @ast_fold)).collect(),
vis: x.vis,
span: (self.new_span)(x.span),
}

Some files were not shown because too many files have changed in this diff Show More