Demode vec::push (and convert to method)
This commit is contained in:
parent
cd79e1d1b2
commit
67a8e7128a
@ -1016,7 +1016,7 @@ fn iter<T>(seq: ~[T], f: fn(T)) {
|
||||
}
|
||||
fn map<T, U>(seq: ~[T], f: fn(T) -> U) -> ~[U] {
|
||||
let mut acc = ~[];
|
||||
for seq.each |elt| { vec::push(acc, f(elt)); }
|
||||
for seq.each |elt| { acc.push(f(elt)); }
|
||||
acc
|
||||
}
|
||||
~~~~
|
||||
|
@ -1651,7 +1651,7 @@ may be invoked on multiple types.
|
||||
fn map<T, U>(vector: &[T], function: fn(v: &T) -> U) -> ~[U] {
|
||||
let mut accumulator = ~[];
|
||||
for vec::each(vector) |element| {
|
||||
vec::push(accumulator, function(element));
|
||||
accumulator.push(function(element));
|
||||
}
|
||||
return accumulator;
|
||||
}
|
||||
|
@ -345,7 +345,7 @@ fn load_crate(filename: &Path) -> Option<Crate> {
|
||||
|
||||
match *ps.interner.get(attr_name) {
|
||||
~"std" | ~"core" => (),
|
||||
_ => vec::push(e.deps, query)
|
||||
_ => e.deps.push(query)
|
||||
}
|
||||
}
|
||||
_ => ()
|
||||
@ -801,7 +801,7 @@ fn install_source(c: &Cargo, path: &Path) {
|
||||
let mut cratefiles = ~[];
|
||||
for os::walk_dir(&Path(".")) |p| {
|
||||
if p.filetype() == Some(~".rc") {
|
||||
vec::push(cratefiles, *p);
|
||||
cratefiles.push(*p);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -141,7 +141,7 @@ fn make_tests(config: config) -> ~[test::TestDesc] {
|
||||
let file = copy *file;
|
||||
debug!("inspecting file %s", file.to_str());
|
||||
if is_test(config, file) {
|
||||
vec::push(tests, make_test(config, file))
|
||||
tests.push(make_test(config, file))
|
||||
}
|
||||
}
|
||||
return tests;
|
||||
|
@ -28,7 +28,7 @@ fn load_props(testfile: &Path) -> test_props {
|
||||
let mut pp_exact = option::None;
|
||||
for iter_header(testfile) |ln| {
|
||||
match parse_error_pattern(ln) {
|
||||
option::Some(ep) => vec::push(error_patterns, ep),
|
||||
option::Some(ep) => error_patterns.push(ep),
|
||||
option::None => ()
|
||||
};
|
||||
|
||||
@ -41,11 +41,11 @@ fn load_props(testfile: &Path) -> test_props {
|
||||
}
|
||||
|
||||
do parse_aux_build(ln).iter |ab| {
|
||||
vec::push(aux_builds, ab);
|
||||
aux_builds.push(ab);
|
||||
}
|
||||
|
||||
do parse_exec_env(ln).iter |ee| {
|
||||
vec::push(exec_env, ee);
|
||||
exec_env.push(ee);
|
||||
}
|
||||
};
|
||||
return {
|
||||
|
@ -19,7 +19,7 @@ fn target_env(lib_path: ~str, prog: ~str) -> ~[(~str,~str)] {
|
||||
else { (k,v) }
|
||||
};
|
||||
if str::ends_with(prog, ~"rustc.exe") {
|
||||
vec::push(env, (~"RUST_THREADS", ~"1"));
|
||||
env.push((~"RUST_THREADS", ~"1"));
|
||||
}
|
||||
return env;
|
||||
}
|
||||
|
@ -121,7 +121,7 @@ fn run_pretty_test(config: config, props: test_props, testfile: &Path) {
|
||||
procres);
|
||||
}
|
||||
|
||||
vec::push(srcs, procres.stdout);
|
||||
srcs.push(procres.stdout);
|
||||
round += 1;
|
||||
}
|
||||
|
||||
|
@ -62,7 +62,7 @@ fn test_cycles(r : rand::rng, k: uint, n: uint)
|
||||
|
||||
// Create a graph with no edges
|
||||
range(0u, vlen) {|_i|
|
||||
vec::push(v, empty_pointy());
|
||||
v.push(empty_pointy());
|
||||
}
|
||||
|
||||
// Fill in the graph with random edges, with density k/n
|
||||
@ -77,7 +77,7 @@ fn test_cycles(r : rand::rng, k: uint, n: uint)
|
||||
// https://github.com/mozilla/rust/issues/1899
|
||||
|
||||
if (likelihood(r, k, n)) { v[i].m = [p(choice(r, v))]; }
|
||||
if (likelihood(r, k, n)) { vec::push(v[i].n, mut p(choice(r, v))); }
|
||||
if (likelihood(r, k, n)) { v[i].n.push(mut p(choice(r, v))); }
|
||||
if (likelihood(r, k, n)) { v[i].o = {x: 0, y: p(choice(r, v))}; }
|
||||
}
|
||||
|
||||
|
@ -30,7 +30,7 @@ fn contains(haystack: ~str, needle: ~str) -> bool {
|
||||
fn find_rust_files(files: &mut ~[Path], path: &Path) {
|
||||
if path.filetype() == Some(~".rs") && !contains(path.to_str(), ~"utf8") {
|
||||
// ignoring "utf8" tests because something is broken
|
||||
vec::push(*files, *path);
|
||||
files.push(*path);
|
||||
} else if os::path_is_dir(path)
|
||||
&& !contains(path.to_str(), ~"compile-fail")
|
||||
&& !contains(path.to_str(), ~"build") {
|
||||
@ -124,7 +124,7 @@ fn stash_ty_if(c: fn@(@ast::ty, test_mode)->bool,
|
||||
e: @ast::ty,
|
||||
tm: test_mode) {
|
||||
if c(e, tm) {
|
||||
vec::push(*es,*e);
|
||||
es.push(e);
|
||||
} else {/* now my indices are wrong :( */ }
|
||||
}
|
||||
|
||||
|
@ -55,11 +55,11 @@ fn vec_edits<T: copy>(v: ~[T], xs: ~[T]) -> ~[~[T]] {
|
||||
|
||||
if Lv != 1u {
|
||||
// When Lv == 1u, this is redundant with omit.
|
||||
vec::push(edits, ~[]);
|
||||
edits.push(~[]);
|
||||
}
|
||||
if Lv >= 3u {
|
||||
// When Lv == 2u, this is redundant with swap.
|
||||
vec::push(edits, vec::reversed(v));
|
||||
edits.push(vec::reversed(v));
|
||||
}
|
||||
ix(0u, 1u, Lv) {|i| edits += ~[vec_omit(v, i)]; }
|
||||
ix(0u, 1u, Lv) {|i| edits += ~[vec_dup(v, i)]; }
|
||||
@ -69,10 +69,10 @@ fn vec_edits<T: copy>(v: ~[T], xs: ~[T]) -> ~[~[T]] {
|
||||
|
||||
ix(0u, 1u, len(xs)) {|j|
|
||||
ix(0u, 1u, Lv) {|i|
|
||||
vec::push(edits, vec_poke(v, i, xs[j]));
|
||||
edits.push(vec_poke(v, i, xs[j]));
|
||||
}
|
||||
ix(0u, 0u, Lv) {|i|
|
||||
vec::push(edits, vec_insert(v, i, xs[j]));
|
||||
edits.push(vec_insert(v, i, xs[j]));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -61,7 +61,7 @@ fn weighted_vec<T: copy>(v : ~[weighted<T>]) -> ~[T] {
|
||||
for {weight: weight, item: item} in v {
|
||||
let i = 0u;
|
||||
while i < weight {
|
||||
vec::push(r, item);
|
||||
r.push(item);
|
||||
i += 1u;
|
||||
}
|
||||
}
|
||||
|
@ -17,6 +17,7 @@ use tuple::{TupleOps, ExtendedTupleOps};
|
||||
use str::{StrSlice, UniqueStr};
|
||||
use vec::{ConstVector, CopyableVector, ImmutableVector};
|
||||
use vec::{ImmutableEqVector, ImmutableCopyableVector};
|
||||
use vec::{MutableVector, MutableCopyableVector};
|
||||
use iter::{BaseIter, ExtendedIter, EqIter, CopyableIter};
|
||||
use iter::{CopyableOrderedIter, Times, TimesIx};
|
||||
use num::Num;
|
||||
@ -33,6 +34,7 @@ export Num, Times, TimesIx;
|
||||
export StrSlice, UniqueStr;
|
||||
export ConstVector, CopyableVector, ImmutableVector;
|
||||
export ImmutableEqVector, ImmutableCopyableVector, IterTraitExtensions;
|
||||
export MutableVector, MutableCopyableVector;
|
||||
export BaseIter, CopyableIter, CopyableOrderedIter, ExtendedIter, EqIter;
|
||||
export TupleOps, ExtendedTupleOps;
|
||||
export Ptr;
|
||||
|
@ -172,7 +172,7 @@ impl<A> DVec<A> {
|
||||
if data_ptr.is_null() { fail ~"Recursive use of dvec"; }
|
||||
log(error, ~"a");
|
||||
self.data <- ~[move t];
|
||||
vec::push_all_move(self.data, move data);
|
||||
self.data.push_all_move(move data);
|
||||
log(error, ~"b");
|
||||
}
|
||||
}
|
||||
@ -180,7 +180,7 @@ impl<A> DVec<A> {
|
||||
/// Append a single item to the end of the list
|
||||
fn push(+t: A) {
|
||||
self.check_not_borrowed();
|
||||
vec::push(self.data, move t);
|
||||
self.data.push(move t);
|
||||
}
|
||||
|
||||
/// Remove and return the first element
|
||||
@ -240,7 +240,7 @@ impl<A: Copy> DVec<A> {
|
||||
vec::reserve(&mut v, new_len);
|
||||
let mut i = from_idx;
|
||||
while i < to_idx {
|
||||
vec::push(v, ts[i]);
|
||||
v.push(ts[i]);
|
||||
i += 1u;
|
||||
}
|
||||
move v
|
||||
@ -266,7 +266,7 @@ impl<A: Copy> DVec<A> {
|
||||
}
|
||||
};
|
||||
|
||||
for ts.each |t| { vec::push(v, *t) };
|
||||
for ts.each |t| { v.push(*t) };
|
||||
v
|
||||
}
|
||||
}
|
||||
|
@ -32,27 +32,27 @@ fn either<T, U, V>(f_left: fn((&T)) -> V,
|
||||
fn lefts<T: Copy, U>(eithers: &[Either<T, U>]) -> ~[T] {
|
||||
//! Extracts from a vector of either all the left values
|
||||
|
||||
let mut result: ~[T] = ~[];
|
||||
for vec::each(eithers) |elt| {
|
||||
match *elt {
|
||||
Left(l) => vec::push(result, l),
|
||||
_ => { /* fallthrough */ }
|
||||
do vec::build_sized(eithers.len()) |push| {
|
||||
for vec::each(eithers) |elt| {
|
||||
match *elt {
|
||||
Left(ref l) => { push(*l); }
|
||||
_ => { /* fallthrough */ }
|
||||
}
|
||||
}
|
||||
}
|
||||
move result
|
||||
}
|
||||
|
||||
fn rights<T, U: Copy>(eithers: &[Either<T, U>]) -> ~[U] {
|
||||
//! Extracts from a vector of either all the right values
|
||||
|
||||
let mut result: ~[U] = ~[];
|
||||
for vec::each(eithers) |elt| {
|
||||
match *elt {
|
||||
Right(r) => vec::push(result, r),
|
||||
_ => { /* fallthrough */ }
|
||||
do vec::build_sized(eithers.len()) |push| {
|
||||
for vec::each(eithers) |elt| {
|
||||
match *elt {
|
||||
Right(ref r) => { push(*r); }
|
||||
_ => { /* fallthrough */ }
|
||||
}
|
||||
}
|
||||
}
|
||||
move result
|
||||
}
|
||||
|
||||
fn partition<T: Copy, U: Copy>(eithers: &[Either<T, U>])
|
||||
@ -68,8 +68,8 @@ fn partition<T: Copy, U: Copy>(eithers: &[Either<T, U>])
|
||||
let mut rights: ~[U] = ~[];
|
||||
for vec::each(eithers) |elt| {
|
||||
match *elt {
|
||||
Left(l) => vec::push(lefts, l),
|
||||
Right(r) => vec::push(rights, r)
|
||||
Left(l) => lefts.push(l),
|
||||
Right(r) => rights.push(r)
|
||||
}
|
||||
}
|
||||
return {lefts: move lefts, rights: move rights};
|
||||
|
@ -90,7 +90,7 @@ mod ct {
|
||||
fn flush_buf(+buf: ~str, &pieces: ~[Piece]) -> ~str {
|
||||
if str::len(buf) > 0 {
|
||||
let piece = PieceString(move buf);
|
||||
vec::push(pieces, move piece);
|
||||
pieces.push(move piece);
|
||||
}
|
||||
return ~"";
|
||||
}
|
||||
@ -110,7 +110,7 @@ mod ct {
|
||||
} else {
|
||||
buf = flush_buf(move buf, pieces);
|
||||
let rs = parse_conversion(s, i, lim, error);
|
||||
vec::push(pieces, copy rs.piece);
|
||||
pieces.push(copy rs.piece);
|
||||
i = rs.next;
|
||||
}
|
||||
} else { buf += curr; i += size; }
|
||||
|
@ -71,12 +71,12 @@ fn test_flate_round_trip() {
|
||||
let r = rand::Rng();
|
||||
let mut words = ~[];
|
||||
for 20.times {
|
||||
vec::push(words, r.gen_bytes(r.gen_uint_range(1, 10)));
|
||||
words.push(r.gen_bytes(r.gen_uint_range(1, 10)));
|
||||
}
|
||||
for 20.times {
|
||||
let mut in = ~[];
|
||||
for 2000.times {
|
||||
vec::push_all(in, r.choose(words));
|
||||
in.push_all(r.choose(words));
|
||||
}
|
||||
debug!("de/inflate of %u bytes of random word-sequences",
|
||||
in.len());
|
||||
|
@ -143,7 +143,7 @@ fn to_str_common(num: float, digits: uint, exact: bool) -> ~str {
|
||||
// store the next digit
|
||||
frac *= 10.0;
|
||||
let digit = frac as uint;
|
||||
vec::push(fractionalParts, digit);
|
||||
fractionalParts.push(digit);
|
||||
|
||||
// calculate the next frac
|
||||
frac -= digit as float;
|
||||
|
@ -76,7 +76,7 @@ impl<T: Reader> T : ReaderUtil {
|
||||
loop {
|
||||
let ch = self.read_byte();
|
||||
if ch == -1 || ch == 10 { break; }
|
||||
vec::push(buf, ch as u8);
|
||||
buf.push(ch as u8);
|
||||
}
|
||||
str::from_bytes(buf)
|
||||
}
|
||||
@ -94,7 +94,7 @@ impl<T: Reader> T : ReaderUtil {
|
||||
i += 1;
|
||||
assert (w > 0);
|
||||
if w == 1 {
|
||||
vec::push(*chars, b0 as char);
|
||||
chars.push(b0 as char);
|
||||
loop;
|
||||
}
|
||||
// can't satisfy this char with the existing data
|
||||
@ -113,7 +113,7 @@ impl<T: Reader> T : ReaderUtil {
|
||||
// See str::char_at
|
||||
val += ((b0 << ((w + 1) as u8)) as uint)
|
||||
<< (w - 1) * 6 - w - 1u;
|
||||
vec::push(*chars, val as char);
|
||||
chars.push(val as char);
|
||||
}
|
||||
return (i, 0);
|
||||
}
|
||||
@ -128,7 +128,7 @@ impl<T: Reader> T : ReaderUtil {
|
||||
// we're split in a unicode char?
|
||||
break;
|
||||
}
|
||||
vec::push_all(buf, data);
|
||||
buf.push_all(data);
|
||||
let (offset, nbreq) = chars_from_bytes::<T>(&buf, &mut chars);
|
||||
let ncreq = n - chars.len();
|
||||
// again we either know we need a certain number of bytes
|
||||
@ -155,7 +155,7 @@ impl<T: Reader> T : ReaderUtil {
|
||||
let mut buf: ~[u8] = ~[];
|
||||
loop {
|
||||
let ch = self.read_byte();
|
||||
if ch < 1 { break; } else { vec::push(buf, ch as u8); }
|
||||
if ch < 1 { break; } else { buf.push(ch as u8); }
|
||||
}
|
||||
str::from_bytes(buf)
|
||||
}
|
||||
@ -190,7 +190,7 @@ impl<T: Reader> T : ReaderUtil {
|
||||
|
||||
fn read_whole_stream() -> ~[u8] {
|
||||
let mut buf: ~[u8] = ~[];
|
||||
while !self.eof() { vec::push_all(buf, self.read_bytes(2048u)); }
|
||||
while !self.eof() { buf.push_all(self.read_bytes(2048u)); }
|
||||
move buf
|
||||
}
|
||||
|
||||
@ -503,7 +503,7 @@ fn u64_to_le_bytes<T>(n: u64, size: uint, f: fn(v: &[u8]) -> T) -> T {
|
||||
|
||||
let mut bytes: ~[u8] = ~[], i = size, n = n;
|
||||
while i > 0u {
|
||||
vec::push(bytes, (n & 255_u64) as u8);
|
||||
bytes.push((n & 255_u64) as u8);
|
||||
n >>= 8_u64;
|
||||
i -= 1u;
|
||||
}
|
||||
@ -535,7 +535,7 @@ fn u64_to_be_bytes<T>(n: u64, size: uint, f: fn(v: &[u8]) -> T) -> T {
|
||||
let mut i = size;
|
||||
while i > 0u {
|
||||
let shift = ((i - 1u) * 8u) as u64;
|
||||
vec::push(bytes, (n >> shift) as u8);
|
||||
bytes.push((n >> shift) as u8);
|
||||
i -= 1u;
|
||||
}
|
||||
f(bytes)
|
||||
@ -737,7 +737,7 @@ fn with_str_writer(f: fn(Writer)) -> ~str {
|
||||
let mut v = with_bytes_writer(f);
|
||||
|
||||
// Make sure the vector has a trailing null and is proper utf8.
|
||||
vec::push(v, 0);
|
||||
v.push(0);
|
||||
assert str::is_utf8(v);
|
||||
|
||||
unsafe { move ::cast::transmute(v) }
|
||||
|
@ -219,7 +219,7 @@ mod global_env {
|
||||
for vec::each(rustrt::rust_env_pairs()) |p| {
|
||||
let vs = str::splitn_char(*p, '=', 1u);
|
||||
assert vec::len(vs) == 2u;
|
||||
vec::push(pairs, (copy vs[0], copy vs[1]));
|
||||
pairs.push((copy vs[0], copy vs[1]));
|
||||
}
|
||||
move pairs
|
||||
}
|
||||
|
@ -206,7 +206,7 @@ impl PosixPath : GenericPath {
|
||||
let mut ss = str::split_nonempty(
|
||||
*e,
|
||||
|c| windows::is_sep(c as u8));
|
||||
unsafe { vec::push_all_move(v, move ss); }
|
||||
unsafe { v.push_all_move(move ss); }
|
||||
}
|
||||
PosixPath { components: move v, ..self }
|
||||
}
|
||||
@ -214,7 +214,7 @@ impl PosixPath : GenericPath {
|
||||
pure fn push(s: &str) -> PosixPath {
|
||||
let mut v = copy self.components;
|
||||
let mut ss = str::split_nonempty(s, |c| windows::is_sep(c as u8));
|
||||
unsafe { vec::push_all_move(v, move ss); }
|
||||
unsafe { v.push_all_move(move ss); }
|
||||
PosixPath { components: move v, ..self }
|
||||
}
|
||||
|
||||
@ -400,7 +400,7 @@ impl WindowsPath : GenericPath {
|
||||
let mut ss = str::split_nonempty(
|
||||
*e,
|
||||
|c| windows::is_sep(c as u8));
|
||||
unsafe { vec::push_all_move(v, move ss); }
|
||||
unsafe { v.push_all_move(move ss); }
|
||||
}
|
||||
return WindowsPath { components: move v, ..self }
|
||||
}
|
||||
@ -408,7 +408,7 @@ impl WindowsPath : GenericPath {
|
||||
pure fn push(s: &str) -> WindowsPath {
|
||||
let mut v = copy self.components;
|
||||
let mut ss = str::split_nonempty(s, |c| windows::is_sep(c as u8));
|
||||
unsafe { vec::push_all_move(v, move ss); }
|
||||
unsafe { v.push_all_move(move ss); }
|
||||
return WindowsPath { components: move v, ..self }
|
||||
}
|
||||
|
||||
@ -440,7 +440,7 @@ pure fn normalize(components: &[~str]) -> ~[~str] {
|
||||
vec::pop(cs);
|
||||
loop;
|
||||
}
|
||||
vec::push(cs, copy *c);
|
||||
cs.push(copy *c);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1059,7 +1059,7 @@ pub fn PortSet<T: Send>() -> PortSet<T>{
|
||||
impl<T: Send> PortSet<T> : Recv<T> {
|
||||
|
||||
fn add(+port: pipes::Port<T>) {
|
||||
vec::push(self.ports, move port)
|
||||
self.ports.push(move port)
|
||||
}
|
||||
|
||||
fn chan() -> Chan<T> {
|
||||
|
@ -564,7 +564,7 @@ pub mod tests {
|
||||
|
||||
for uint::range(0u, num_tasks) |_i| {
|
||||
let total = total.clone();
|
||||
vec::push(futures, future::spawn(|| {
|
||||
futures.push(future::spawn(|| {
|
||||
for uint::range(0u, count) |_i| {
|
||||
do total.with |count| {
|
||||
**count += 1u;
|
||||
|
@ -215,7 +215,7 @@ impl Rng {
|
||||
let mut r = ~[];
|
||||
for v.each |item| {
|
||||
for uint::range(0u, item.weight) |_i| {
|
||||
vec::push(r, item.item);
|
||||
r.push(item.item);
|
||||
}
|
||||
}
|
||||
move r
|
||||
|
@ -280,7 +280,7 @@ fn map_vec<T,U:Copy,V:Copy>(
|
||||
let mut vs: ~[V] = vec::with_capacity(vec::len(ts));
|
||||
for vec::each(ts) |t| {
|
||||
match op(t) {
|
||||
Ok(v) => vec::push(vs, v),
|
||||
Ok(v) => vs.push(v),
|
||||
Err(u) => return Err(u)
|
||||
}
|
||||
}
|
||||
@ -317,7 +317,7 @@ fn map_vec2<S,T,U:Copy,V:Copy>(ss: &[S], ts: &[T],
|
||||
let mut i = 0u;
|
||||
while i < n {
|
||||
match op(&ss[i],&ts[i]) {
|
||||
Ok(v) => vec::push(vs, v),
|
||||
Ok(v) => vs.push(v),
|
||||
Err(u) => return Err(u)
|
||||
}
|
||||
i += 1u;
|
||||
|
@ -82,10 +82,10 @@ fn with_argv<T>(prog: &str, args: &[~str],
|
||||
let mut tmps = ~[];
|
||||
for vec::each(args) |arg| {
|
||||
let t = @copy *arg;
|
||||
vec::push(tmps, t);
|
||||
vec::push_all(argptrs, str::as_c_str(*t, |b| ~[b]));
|
||||
tmps.push(t);
|
||||
argptrs.push_all(str::as_c_str(*t, |b| ~[b]));
|
||||
}
|
||||
vec::push(argptrs, ptr::null());
|
||||
argptrs.push(ptr::null());
|
||||
vec::as_imm_buf(argptrs, |buf, _len| cb(buf))
|
||||
}
|
||||
|
||||
@ -102,10 +102,10 @@ fn with_envp<T>(env: &Option<~[(~str,~str)]>,
|
||||
for vec::each(es) |e| {
|
||||
let (k,v) = copy *e;
|
||||
let t = @(fmt!("%s=%s", k, v));
|
||||
vec::push(tmps, t);
|
||||
vec::push_all(ptrs, str::as_c_str(*t, |b| ~[b]));
|
||||
tmps.push(t);
|
||||
ptrs.push_all(str::as_c_str(*t, |b| ~[b]));
|
||||
}
|
||||
vec::push(ptrs, ptr::null());
|
||||
ptrs.push(ptr::null());
|
||||
vec::as_imm_buf(ptrs, |p, _len|
|
||||
unsafe { cb(::cast::reinterpret_cast(&p)) }
|
||||
)
|
||||
|
@ -283,18 +283,9 @@ pub mod linear {
|
||||
FoundEntry(idx) => {
|
||||
match self.buckets[idx] {
|
||||
Some(ref bkt) => {
|
||||
let ptr = unsafe {
|
||||
// FIXME(#3148)--region inference
|
||||
// fails to capture needed deps.
|
||||
// Here, the bucket value is known to
|
||||
// live as long as self, because self
|
||||
// is immutable. But the region
|
||||
// inference stupidly infers a
|
||||
// lifetime for `ref bkt` that is
|
||||
// shorter than it needs to be.
|
||||
cast::copy_lifetime(self, &bkt.value)
|
||||
};
|
||||
Some(ptr)
|
||||
// FIXME(#3148)---should be inferred
|
||||
let bkt: &self/Bucket<K,V> = bkt;
|
||||
Some(&bkt.value)
|
||||
}
|
||||
None => {
|
||||
fail ~"LinearMap::find: internal logic error"
|
||||
|
@ -468,7 +468,7 @@ pure fn chars(s: &str) -> ~[char] {
|
||||
let len = len(s);
|
||||
while i < len {
|
||||
let {ch, next} = char_range_at(s, i);
|
||||
unsafe { vec::push(buf, ch); }
|
||||
unsafe { buf.push(ch); }
|
||||
i = next;
|
||||
}
|
||||
move buf
|
||||
@ -537,8 +537,7 @@ pure fn split_char_inner(s: &str, sep: char, count: uint, allow_empty: bool)
|
||||
while i < l && done < count {
|
||||
if s[i] == b {
|
||||
if allow_empty || start < i unsafe {
|
||||
vec::push(result,
|
||||
unsafe { raw::slice_bytes(s, start, i) });
|
||||
result.push(unsafe { raw::slice_bytes(s, start, i) });
|
||||
}
|
||||
start = i + 1u;
|
||||
done += 1u;
|
||||
@ -546,7 +545,7 @@ pure fn split_char_inner(s: &str, sep: char, count: uint, allow_empty: bool)
|
||||
i += 1u;
|
||||
}
|
||||
if allow_empty || start < l {
|
||||
unsafe { vec::push(result, raw::slice_bytes(s, start, l) ) };
|
||||
unsafe { result.push(raw::slice_bytes(s, start, l) ) };
|
||||
}
|
||||
move result
|
||||
} else {
|
||||
@ -581,7 +580,7 @@ pure fn split_inner(s: &str, sepfn: fn(cc: char) -> bool, count: uint,
|
||||
let {ch, next} = char_range_at(s, i);
|
||||
if sepfn(ch) {
|
||||
if allow_empty || start < i unsafe {
|
||||
vec::push(result, unsafe { raw::slice_bytes(s, start, i)});
|
||||
result.push(unsafe { raw::slice_bytes(s, start, i)});
|
||||
}
|
||||
start = next;
|
||||
done += 1u;
|
||||
@ -589,7 +588,7 @@ pure fn split_inner(s: &str, sepfn: fn(cc: char) -> bool, count: uint,
|
||||
i = next;
|
||||
}
|
||||
if allow_empty || start < l unsafe {
|
||||
vec::push(result, unsafe { raw::slice_bytes(s, start, l) });
|
||||
result.push(unsafe { raw::slice_bytes(s, start, l) });
|
||||
}
|
||||
move result
|
||||
}
|
||||
@ -643,7 +642,7 @@ pure fn iter_between_matches(s: &a/str, sep: &b/str, f: fn(uint, uint)) {
|
||||
pure fn split_str(s: &a/str, sep: &b/str) -> ~[~str] {
|
||||
let mut result = ~[];
|
||||
do iter_between_matches(s, sep) |from, to| {
|
||||
unsafe { vec::push(result, raw::slice_bytes(s, from, to)); }
|
||||
unsafe { result.push(raw::slice_bytes(s, from, to)); }
|
||||
}
|
||||
move result
|
||||
}
|
||||
@ -652,7 +651,7 @@ pure fn split_str_nonempty(s: &a/str, sep: &b/str) -> ~[~str] {
|
||||
let mut result = ~[];
|
||||
do iter_between_matches(s, sep) |from, to| {
|
||||
if to > from {
|
||||
unsafe { vec::push(result, raw::slice_bytes(s, from, to)); }
|
||||
unsafe { result.push(raw::slice_bytes(s, from, to)); }
|
||||
}
|
||||
}
|
||||
move result
|
||||
@ -1535,14 +1534,14 @@ pure fn to_utf16(s: &str) -> ~[u16] {
|
||||
if (ch & 0xFFFF_u32) == ch unsafe {
|
||||
// The BMP falls through (assuming non-surrogate, as it should)
|
||||
assert ch <= 0xD7FF_u32 || ch >= 0xE000_u32;
|
||||
vec::push(u, ch as u16)
|
||||
u.push(ch as u16)
|
||||
} else unsafe {
|
||||
// Supplementary planes break into surrogates.
|
||||
assert ch >= 0x1_0000_u32 && ch <= 0x10_FFFF_u32;
|
||||
ch -= 0x1_0000_u32;
|
||||
let w1 = 0xD800_u16 | ((ch >> 10) as u16);
|
||||
let w2 = 0xDC00_u16 | ((ch as u16) & 0x3FF_u16);
|
||||
vec::push_all(u, ~[w1, w2])
|
||||
u.push_all(~[w1, w2])
|
||||
}
|
||||
}
|
||||
move u
|
||||
@ -2010,7 +2009,7 @@ mod raw {
|
||||
ptr::memcpy(vbuf, buf as *u8, len)
|
||||
});
|
||||
vec::raw::set_len(v, len);
|
||||
vec::push(v, 0u8);
|
||||
v.push(0u8);
|
||||
|
||||
assert is_utf8(v);
|
||||
return ::cast::transmute(move v);
|
||||
@ -2067,7 +2066,7 @@ mod raw {
|
||||
ptr::memcpy(vbuf, src, end - begin);
|
||||
}
|
||||
vec::raw::set_len(v, end - begin);
|
||||
vec::push(v, 0u8);
|
||||
v.push(0u8);
|
||||
::cast::transmute(move v)
|
||||
}
|
||||
}
|
||||
|
@ -92,6 +92,8 @@ export CopyableVector;
|
||||
export ImmutableVector;
|
||||
export ImmutableEqVector;
|
||||
export ImmutableCopyableVector;
|
||||
export MutableVector;
|
||||
export MutableCopyableVector;
|
||||
export IterTraitExtensions;
|
||||
export vec_concat;
|
||||
export traits;
|
||||
@ -238,7 +240,7 @@ pure fn with_capacity<T>(capacity: uint) -> ~[T] {
|
||||
pure fn build_sized<A>(size: uint,
|
||||
builder: fn(push: pure fn(+v: A))) -> ~[A] {
|
||||
let mut vec = with_capacity(size);
|
||||
builder(|+x| unsafe { push(vec, move x) });
|
||||
builder(|+x| unsafe { vec.push(move x) });
|
||||
move vec
|
||||
}
|
||||
|
||||
@ -330,7 +332,7 @@ pure fn slice<T: Copy>(v: &[const T], start: uint, end: uint) -> ~[T] {
|
||||
assert (end <= len(v));
|
||||
let mut result = ~[];
|
||||
unsafe {
|
||||
for uint::range(start, end) |i| { vec::push(result, v[i]) }
|
||||
for uint::range(start, end) |i| { result.push(v[i]) }
|
||||
}
|
||||
move result
|
||||
}
|
||||
@ -383,14 +385,14 @@ fn split<T: Copy>(v: &[T], f: fn(T) -> bool) -> ~[~[T]] {
|
||||
let mut result = ~[];
|
||||
while start < ln {
|
||||
match position_between(v, start, ln, f) {
|
||||
None => break,
|
||||
Some(i) => {
|
||||
push(result, slice(v, start, i));
|
||||
start = i + 1u;
|
||||
}
|
||||
None => break,
|
||||
Some(i) => {
|
||||
result.push(slice(v, start, i));
|
||||
start = i + 1u;
|
||||
}
|
||||
}
|
||||
}
|
||||
push(result, slice(v, start, ln));
|
||||
result.push(slice(v, start, ln));
|
||||
move result
|
||||
}
|
||||
|
||||
@ -407,16 +409,16 @@ fn splitn<T: Copy>(v: &[T], n: uint, f: fn(T) -> bool) -> ~[~[T]] {
|
||||
let mut result = ~[];
|
||||
while start < ln && count > 0u {
|
||||
match position_between(v, start, ln, f) {
|
||||
None => break,
|
||||
Some(i) => {
|
||||
push(result, slice(v, start, i));
|
||||
// Make sure to skip the separator.
|
||||
start = i + 1u;
|
||||
count -= 1u;
|
||||
}
|
||||
None => break,
|
||||
Some(i) => {
|
||||
result.push(slice(v, start, i));
|
||||
// Make sure to skip the separator.
|
||||
start = i + 1u;
|
||||
count -= 1u;
|
||||
}
|
||||
}
|
||||
}
|
||||
push(result, slice(v, start, ln));
|
||||
result.push(slice(v, start, ln));
|
||||
move result
|
||||
}
|
||||
|
||||
@ -432,14 +434,14 @@ fn rsplit<T: Copy>(v: &[T], f: fn(T) -> bool) -> ~[~[T]] {
|
||||
let mut result = ~[];
|
||||
while end > 0u {
|
||||
match rposition_between(v, 0u, end, f) {
|
||||
None => break,
|
||||
Some(i) => {
|
||||
push(result, slice(v, i + 1u, end));
|
||||
end = i;
|
||||
}
|
||||
None => break,
|
||||
Some(i) => {
|
||||
result.push(slice(v, i + 1u, end));
|
||||
end = i;
|
||||
}
|
||||
}
|
||||
}
|
||||
push(result, slice(v, 0u, end));
|
||||
result.push(slice(v, 0u, end));
|
||||
reverse(result);
|
||||
return move result;
|
||||
}
|
||||
@ -457,16 +459,16 @@ fn rsplitn<T: Copy>(v: &[T], n: uint, f: fn(T) -> bool) -> ~[~[T]] {
|
||||
let mut result = ~[];
|
||||
while end > 0u && count > 0u {
|
||||
match rposition_between(v, 0u, end, f) {
|
||||
None => break,
|
||||
Some(i) => {
|
||||
push(result, slice(v, i + 1u, end));
|
||||
// Make sure to skip the separator.
|
||||
end = i;
|
||||
count -= 1u;
|
||||
}
|
||||
None => break,
|
||||
Some(i) => {
|
||||
result.push(slice(v, i + 1u, end));
|
||||
// Make sure to skip the separator.
|
||||
end = i;
|
||||
count -= 1u;
|
||||
}
|
||||
}
|
||||
}
|
||||
push(result, slice(v, 0u, end));
|
||||
result.push(slice(v, 0u, end));
|
||||
reverse(result);
|
||||
move result
|
||||
}
|
||||
@ -489,7 +491,7 @@ fn shift<T>(&v: ~[T]) -> T {
|
||||
|
||||
for uint::range(1, ln) |i| {
|
||||
let r <- *ptr::offset(vv, i);
|
||||
push(v, move r);
|
||||
v.push(move r);
|
||||
}
|
||||
}
|
||||
raw::set_len(vv, 0);
|
||||
@ -503,7 +505,7 @@ fn unshift<T>(&v: ~[T], +x: T) {
|
||||
let mut vv = ~[move x];
|
||||
v <-> vv;
|
||||
while len(vv) > 0 {
|
||||
push(v, shift(vv));
|
||||
v.push(shift(vv));
|
||||
}
|
||||
}
|
||||
|
||||
@ -568,9 +570,9 @@ fn swap_remove<T>(&v: ~[const T], index: uint) -> T {
|
||||
|
||||
/// Append an element to a vector
|
||||
#[inline(always)]
|
||||
fn push<T>(&v: ~[T], +initval: T) {
|
||||
fn push<T>(v: &mut ~[T], +initval: T) {
|
||||
unsafe {
|
||||
let repr: **raw::VecRepr = ::cast::reinterpret_cast(&addr_of(v));
|
||||
let repr: **raw::VecRepr = ::cast::transmute(copy v);
|
||||
let fill = (**repr).unboxed.fill;
|
||||
if (**repr).unboxed.alloc > fill {
|
||||
push_fast(v, move initval);
|
||||
@ -583,8 +585,8 @@ fn push<T>(&v: ~[T], +initval: T) {
|
||||
|
||||
// This doesn't bother to make sure we have space.
|
||||
#[inline(always)] // really pretty please
|
||||
unsafe fn push_fast<T>(&v: ~[T], +initval: T) {
|
||||
let repr: **raw::VecRepr = ::cast::reinterpret_cast(&addr_of(v));
|
||||
unsafe fn push_fast<T>(+v: &mut ~[T], +initval: T) {
|
||||
let repr: **raw::VecRepr = ::cast::transmute(v);
|
||||
let fill = (**repr).unboxed.fill;
|
||||
(**repr).unboxed.fill += sys::size_of::<T>();
|
||||
let p = ptr::addr_of((**repr).unboxed.data);
|
||||
@ -593,14 +595,14 @@ unsafe fn push_fast<T>(&v: ~[T], +initval: T) {
|
||||
}
|
||||
|
||||
#[inline(never)]
|
||||
fn push_slow<T>(&v: ~[T], +initval: T) {
|
||||
reserve_at_least(&mut v, v.len() + 1u);
|
||||
fn push_slow<T>(+v: &mut ~[T], +initval: T) {
|
||||
reserve_at_least(v, v.len() + 1u);
|
||||
unsafe { push_fast(v, move initval) }
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn push_all<T: Copy>(&v: ~[T], rhs: &[const T]) {
|
||||
reserve(&mut v, v.len() + rhs.len());
|
||||
fn push_all<T: Copy>(+v: &mut ~[T], rhs: &[const T]) {
|
||||
reserve(v, v.len() + rhs.len());
|
||||
|
||||
for uint::range(0u, rhs.len()) |i| {
|
||||
push(v, unsafe { raw::get(rhs, i) })
|
||||
@ -608,8 +610,8 @@ fn push_all<T: Copy>(&v: ~[T], rhs: &[const T]) {
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn push_all_move<T>(&v: ~[T], -rhs: ~[const T]) {
|
||||
reserve(&mut v, v.len() + rhs.len());
|
||||
fn push_all_move<T>(v: &mut ~[T], -rhs: ~[const T]) {
|
||||
reserve(v, v.len() + rhs.len());
|
||||
unsafe {
|
||||
do as_imm_buf(rhs) |p, len| {
|
||||
for uint::range(0, len) |i| {
|
||||
@ -675,7 +677,7 @@ fn dedup<T: Eq>(&v: ~[const T]) unsafe {
|
||||
pure fn append<T: Copy>(+lhs: ~[T], rhs: &[const T]) -> ~[T] {
|
||||
let mut v <- lhs;
|
||||
unsafe {
|
||||
push_all(v, rhs);
|
||||
v.push_all(rhs);
|
||||
}
|
||||
move v
|
||||
}
|
||||
@ -683,7 +685,7 @@ pure fn append<T: Copy>(+lhs: ~[T], rhs: &[const T]) -> ~[T] {
|
||||
#[inline(always)]
|
||||
pure fn append_one<T>(+lhs: ~[T], +x: T) -> ~[T] {
|
||||
let mut v <- lhs;
|
||||
unsafe { push(v, move x); }
|
||||
unsafe { v.push(move x); }
|
||||
move v
|
||||
}
|
||||
|
||||
@ -705,7 +707,10 @@ fn grow<T: Copy>(&v: ~[T], n: uint, initval: T) {
|
||||
reserve_at_least(&mut v, len(v) + n);
|
||||
let mut i: uint = 0u;
|
||||
|
||||
while i < n { push(v, initval); i += 1u; }
|
||||
while i < n {
|
||||
v.push(initval);
|
||||
i += 1u;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -724,7 +729,10 @@ fn grow<T: Copy>(&v: ~[T], n: uint, initval: T) {
|
||||
fn grow_fn<T>(&v: ~[T], n: uint, op: iter::InitOp<T>) {
|
||||
reserve_at_least(&mut v, len(v) + n);
|
||||
let mut i: uint = 0u;
|
||||
while i < n { push(v, op(i)); i += 1u; }
|
||||
while i < n {
|
||||
v.push(op(i));
|
||||
i += 1u;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -745,14 +753,18 @@ fn grow_set<T: Copy>(&v: ~[T], index: uint, initval: T, val: T) {
|
||||
/// Apply a function to each element of a vector and return the results
|
||||
pure fn map<T, U>(v: &[T], f: fn(v: &T) -> U) -> ~[U] {
|
||||
let mut result = with_capacity(len(v));
|
||||
for each(v) |elem| { unsafe { push(result, f(elem)); } }
|
||||
for each(v) |elem| {
|
||||
unsafe {
|
||||
result.push(f(elem));
|
||||
}
|
||||
}
|
||||
move result
|
||||
}
|
||||
|
||||
fn map_consume<T, U>(+v: ~[T], f: fn(+v: T) -> U) -> ~[U] {
|
||||
let mut result = ~[];
|
||||
do consume(move v) |_i, x| {
|
||||
vec::push(result, f(move x));
|
||||
result.push(f(move x));
|
||||
}
|
||||
move result
|
||||
}
|
||||
@ -772,7 +784,7 @@ pure fn mapi<T, U>(v: &[T], f: fn(uint, v: &T) -> U) -> ~[U] {
|
||||
*/
|
||||
pure fn flat_map<T, U>(v: &[T], f: fn(T) -> ~[U]) -> ~[U] {
|
||||
let mut result = ~[];
|
||||
for each(v) |elem| { unsafe{ push_all_move(result, f(*elem)); } }
|
||||
for each(v) |elem| { unsafe{ result.push_all_move(f(*elem)); } }
|
||||
move result
|
||||
}
|
||||
|
||||
@ -784,7 +796,7 @@ pure fn map2<T: Copy, U: Copy, V>(v0: &[T], v1: &[U],
|
||||
let mut u: ~[V] = ~[];
|
||||
let mut i = 0u;
|
||||
while i < v0_len {
|
||||
unsafe { push(u, f(copy v0[i], copy v1[i])) };
|
||||
unsafe { u.push(f(copy v0[i], copy v1[i])) };
|
||||
i += 1u;
|
||||
}
|
||||
move u
|
||||
@ -802,7 +814,7 @@ pure fn filter_map<T, U: Copy>(v: &[T], f: fn(T) -> Option<U>)
|
||||
for each(v) |elem| {
|
||||
match f(*elem) {
|
||||
None => {/* no-op */ }
|
||||
Some(result_elem) => unsafe { push(result, result_elem); }
|
||||
Some(result_elem) => unsafe { result.push(result_elem); }
|
||||
}
|
||||
}
|
||||
move result
|
||||
@ -818,7 +830,7 @@ pure fn filter_map<T, U: Copy>(v: &[T], f: fn(T) -> Option<U>)
|
||||
pure fn filter<T: Copy>(v: &[T], f: fn(T) -> bool) -> ~[T] {
|
||||
let mut result = ~[];
|
||||
for each(v) |elem| {
|
||||
if f(*elem) { unsafe { push(result, *elem); } }
|
||||
if f(*elem) { unsafe { result.push(*elem); } }
|
||||
}
|
||||
move result
|
||||
}
|
||||
@ -830,7 +842,7 @@ pure fn filter<T: Copy>(v: &[T], f: fn(T) -> bool) -> ~[T] {
|
||||
*/
|
||||
pure fn concat<T: Copy>(v: &[~[T]]) -> ~[T] {
|
||||
let mut r = ~[];
|
||||
for each(v) |inner| { unsafe { push_all(r, *inner); } }
|
||||
for each(v) |inner| { unsafe { r.push_all(*inner); } }
|
||||
move r
|
||||
}
|
||||
|
||||
@ -839,8 +851,8 @@ pure fn connect<T: Copy>(v: &[~[T]], sep: T) -> ~[T] {
|
||||
let mut r: ~[T] = ~[];
|
||||
let mut first = true;
|
||||
for each(v) |inner| {
|
||||
if first { first = false; } else { unsafe { push(r, sep); } }
|
||||
unsafe { push_all(r, *inner) };
|
||||
if first { first = false; } else { unsafe { r.push(sep); } }
|
||||
unsafe { r.push_all(*inner) };
|
||||
}
|
||||
move r
|
||||
}
|
||||
@ -1059,15 +1071,15 @@ pure fn rposition_between<T>(v: &[T], start: uint, end: uint,
|
||||
* Convert a vector of pairs into a pair of vectors, by reference. As unzip().
|
||||
*/
|
||||
pure fn unzip_slice<T: Copy, U: Copy>(v: &[(T, U)]) -> (~[T], ~[U]) {
|
||||
let mut as_ = ~[], bs = ~[];
|
||||
let mut ts = ~[], us = ~[];
|
||||
for each(v) |p| {
|
||||
let (a, b) = *p;
|
||||
let (t, u) = *p;
|
||||
unsafe {
|
||||
vec::push(as_, a);
|
||||
vec::push(bs, b);
|
||||
ts.push(t);
|
||||
us.push(u);
|
||||
}
|
||||
}
|
||||
return (move as_, move bs);
|
||||
return (move ts, move us);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -1082,9 +1094,9 @@ pure fn unzip<T,U>(+v: ~[(T, U)]) -> (~[T], ~[U]) {
|
||||
let mut ts = ~[], us = ~[];
|
||||
unsafe {
|
||||
do consume(move v) |_i, p| {
|
||||
let (a,b) = move p;
|
||||
push(ts, move a);
|
||||
push(us, move b);
|
||||
let (t, u) = move p;
|
||||
ts.push(move t);
|
||||
us.push(move u);
|
||||
}
|
||||
}
|
||||
(move ts, move us)
|
||||
@ -1099,7 +1111,7 @@ pure fn zip_slice<T: Copy, U: Copy>(v: &[const T], u: &[const U])
|
||||
let sz = len(v);
|
||||
let mut i = 0u;
|
||||
assert sz == len(u);
|
||||
while i < sz unsafe { vec::push(zipped, (v[i], u[i])); i += 1u; }
|
||||
while i < sz unsafe { zipped.push((v[i], u[i])); i += 1u; }
|
||||
move zipped
|
||||
}
|
||||
|
||||
@ -1114,7 +1126,7 @@ pure fn zip<T, U>(+v: ~[const T], +u: ~[const U]) -> ~[(T, U)] {
|
||||
assert i == len(u);
|
||||
let mut w = with_capacity(i);
|
||||
while i > 0 {
|
||||
unsafe { push(w, (pop(v),pop(u))); }
|
||||
unsafe { w.push((pop(v),pop(u))); }
|
||||
i -= 1;
|
||||
}
|
||||
unsafe { reverse(w); }
|
||||
@ -1147,8 +1159,8 @@ pure fn reversed<T: Copy>(v: &[const T]) -> ~[T] {
|
||||
let mut i = len::<T>(v);
|
||||
if i == 0 { return (move rs); } else { i -= 1; }
|
||||
unsafe {
|
||||
while i != 0 { vec::push(rs, v[i]); i -= 1; }
|
||||
vec::push(rs, v[0]);
|
||||
while i != 0 { rs.push(v[i]); i -= 1; }
|
||||
rs.push(v[0]);
|
||||
}
|
||||
move rs
|
||||
}
|
||||
@ -1283,7 +1295,7 @@ pure fn permute<T: Copy>(v: &[const T], put: fn(~[T])) {
|
||||
let elt = v[i];
|
||||
let mut rest = slice(v, 0u, i);
|
||||
unsafe {
|
||||
push_all(rest, const_view(v, i+1u, ln));
|
||||
rest.push_all(const_view(v, i+1u, ln));
|
||||
permute(rest, |permutation| {
|
||||
put(append(~[elt], permutation))
|
||||
})
|
||||
@ -1299,7 +1311,7 @@ pure fn windowed<TT: Copy>(nn: uint, xx: &[TT]) -> ~[~[TT]] {
|
||||
for vec::eachi (xx) |ii, _x| {
|
||||
let len = vec::len(xx);
|
||||
if ii+nn <= len unsafe {
|
||||
vec::push(ww, vec::slice(xx, ii, ii+nn));
|
||||
ww.push(vec::slice(xx, ii, ii+nn));
|
||||
}
|
||||
}
|
||||
move ww
|
||||
@ -1551,7 +1563,7 @@ impl<T> &[T]: ImmutableVector<T> {
|
||||
let mut r = ~[];
|
||||
let mut i = 0;
|
||||
while i < self.len() {
|
||||
push(r, f(&self[i]));
|
||||
r.push(f(&self[i]));
|
||||
i += 1;
|
||||
}
|
||||
move r
|
||||
@ -1637,6 +1649,31 @@ impl<T: Copy> &[T]: ImmutableCopyableVector<T> {
|
||||
pure fn rfind(f: fn(T) -> bool) -> Option<T> { rfind(self, f) }
|
||||
}
|
||||
|
||||
trait MutableVector<T> {
|
||||
fn push(&mut self, +t: T);
|
||||
fn push_all_move(&mut self, -rhs: ~[const T]);
|
||||
}
|
||||
|
||||
trait MutableCopyableVector<T: Copy> {
|
||||
fn push_all(&mut self, rhs: &[const T]);
|
||||
}
|
||||
|
||||
impl<T> ~[T]: MutableVector<T> {
|
||||
fn push(&mut self, +t: T) {
|
||||
push(self, move t);
|
||||
}
|
||||
|
||||
fn push_all_move(&mut self, -rhs: ~[const T]) {
|
||||
push_all_move(self, move rhs);
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Copy> ~[T]: MutableCopyableVector<T> {
|
||||
fn push_all(&mut self, rhs: &[const T]) {
|
||||
push_all(self, rhs);
|
||||
}
|
||||
}
|
||||
|
||||
/// Unsafe operations
|
||||
mod raw {
|
||||
#[legacy_exports];
|
||||
@ -2109,12 +2146,12 @@ mod tests {
|
||||
fn test_push() {
|
||||
// Test on-stack push().
|
||||
let mut v = ~[];
|
||||
push(v, 1);
|
||||
v.push(1);
|
||||
assert (len(v) == 1u);
|
||||
assert (v[0] == 1);
|
||||
|
||||
// Test on-heap push().
|
||||
push(v, 2);
|
||||
v.push(2);
|
||||
assert (len(v) == 2u);
|
||||
assert (v[0] == 1);
|
||||
assert (v[1] == 2);
|
||||
@ -2380,19 +2417,19 @@ mod tests {
|
||||
let mut results: ~[~[int]];
|
||||
|
||||
results = ~[];
|
||||
permute(~[], |v| vec::push(results, copy v));
|
||||
permute(~[], |v| results.push(copy v));
|
||||
assert results == ~[~[]];
|
||||
|
||||
results = ~[];
|
||||
permute(~[7], |v| push(results, copy v));
|
||||
permute(~[7], |v| results.push(copy v));
|
||||
assert results == ~[~[7]];
|
||||
|
||||
results = ~[];
|
||||
permute(~[1,1], |v| push(results, copy v));
|
||||
permute(~[1,1], |v| results.push(copy v));
|
||||
assert results == ~[~[1,1],~[1,1]];
|
||||
|
||||
results = ~[];
|
||||
permute(~[5,2,0], |v| push(results, copy v));
|
||||
permute(~[5,2,0], |v| results.push(copy v));
|
||||
assert results ==
|
||||
~[~[5,2,0],~[5,0,2],~[2,5,0],~[2,0,5],~[0,5,2],~[0,2,5]];
|
||||
}
|
||||
|
@ -648,7 +648,7 @@ mod tests {
|
||||
let mut children = ~[];
|
||||
for 5.times {
|
||||
let arc3 = ~arc.clone();
|
||||
do task::task().future_result(|+r| vec::push(children, r)).spawn {
|
||||
do task::task().future_result(|+r| children.push(r)).spawn {
|
||||
do arc3.read |num| {
|
||||
assert *num >= 0;
|
||||
}
|
||||
@ -676,7 +676,7 @@ mod tests {
|
||||
let mut reader_convos = ~[];
|
||||
for 10.times {
|
||||
let ((rc1,rp1),(rc2,rp2)) = (pipes::stream(),pipes::stream());
|
||||
vec::push(reader_convos, (rc1,rp2));
|
||||
reader_convos.push((rc1,rp2));
|
||||
let arcn = ~arc.clone();
|
||||
do task::spawn {
|
||||
rp1.recv(); // wait for downgrader to give go-ahead
|
||||
|
@ -102,12 +102,12 @@ impl ~[u8]: FromBase64 {
|
||||
} else if ch == '=' {
|
||||
match len - i {
|
||||
1u => {
|
||||
vec::push(r, ((n >> 16u) & 0xFFu) as u8);
|
||||
vec::push(r, ((n >> 8u ) & 0xFFu) as u8);
|
||||
r.push(((n >> 16u) & 0xFFu) as u8);
|
||||
r.push(((n >> 8u ) & 0xFFu) as u8);
|
||||
return copy r;
|
||||
}
|
||||
2u => {
|
||||
vec::push(r, ((n >> 10u) & 0xFFu) as u8);
|
||||
r.push(((n >> 10u) & 0xFFu) as u8);
|
||||
return copy r;
|
||||
}
|
||||
_ => fail ~"invalid base64 padding"
|
||||
@ -119,9 +119,9 @@ impl ~[u8]: FromBase64 {
|
||||
i += 1u;
|
||||
};
|
||||
|
||||
vec::push(r, ((n >> 16u) & 0xFFu) as u8);
|
||||
vec::push(r, ((n >> 8u ) & 0xFFu) as u8);
|
||||
vec::push(r, ((n ) & 0xFFu) as u8);
|
||||
r.push(((n >> 16u) & 0xFFu) as u8);
|
||||
r.push(((n >> 8u ) & 0xFFu) as u8);
|
||||
r.push(((n ) & 0xFFu) as u8);
|
||||
}
|
||||
|
||||
r
|
||||
|
@ -38,8 +38,8 @@ fn create<T: Copy>() -> Deque<T> {
|
||||
let nalloc = uint::next_power_of_two(nelts + 1u);
|
||||
while i < nalloc {
|
||||
if i < nelts {
|
||||
vec::push(rv, elts[(lo + i) % nelts]);
|
||||
} else { vec::push(rv, None); }
|
||||
rv.push(elts[(lo + i) % nelts]);
|
||||
} else { rv.push(None); }
|
||||
i += 1u;
|
||||
}
|
||||
|
||||
|
@ -211,7 +211,7 @@ impl Writer {
|
||||
write_vuint(self.writer, tag_id);
|
||||
|
||||
// Write a placeholder four-byte size.
|
||||
vec::push(self.size_positions, self.writer.tell());
|
||||
self.size_positions.push(self.writer.tell());
|
||||
let zeroes: &[u8] = &[0u8, 0u8, 0u8, 0u8];
|
||||
self.writer.write(zeroes);
|
||||
}
|
||||
|
@ -220,7 +220,7 @@ impl Serializer {
|
||||
write_vuint(self.writer, tag_id);
|
||||
|
||||
// Write a placeholder four-byte size.
|
||||
vec::push(self.size_positions, self.writer.tell());
|
||||
self.size_positions.push(self.writer.tell());
|
||||
let zeroes: &[u8] = &[0u8, 0u8, 0u8, 0u8];
|
||||
self.writer.write(zeroes);
|
||||
}
|
||||
|
@ -234,10 +234,10 @@ fn getopts(args: &[~str], opts: &[Opt]) -> Result unsafe {
|
||||
let cur = args[i];
|
||||
let curlen = str::len(cur);
|
||||
if !is_arg(cur) {
|
||||
vec::push(free, cur);
|
||||
free.push(cur);
|
||||
} else if cur == ~"--" {
|
||||
let mut j = i + 1u;
|
||||
while j < l { vec::push(free, args[j]); j += 1u; }
|
||||
while j < l { free.push(args[j]); j += 1u; }
|
||||
break;
|
||||
} else {
|
||||
let mut names;
|
||||
@ -287,7 +287,7 @@ fn getopts(args: &[~str], opts: &[Opt]) -> Result unsafe {
|
||||
}
|
||||
}
|
||||
}
|
||||
vec::push(names, opt);
|
||||
names.push(opt);
|
||||
j = range.next;
|
||||
}
|
||||
}
|
||||
@ -303,23 +303,22 @@ fn getopts(args: &[~str], opts: &[Opt]) -> Result unsafe {
|
||||
if !i_arg.is_none() {
|
||||
return Err(UnexpectedArgument(name_str(nm)));
|
||||
}
|
||||
vec::push(vals[optid], Given);
|
||||
vals[optid].push(Given);
|
||||
}
|
||||
Maybe => {
|
||||
if !i_arg.is_none() {
|
||||
vec::push(vals[optid], Val(i_arg.get()));
|
||||
vals[optid].push(Val(i_arg.get()));
|
||||
} else if name_pos < vec::len::<Name>(names) ||
|
||||
i + 1u == l || is_arg(args[i + 1u]) {
|
||||
vec::push(vals[optid], Given);
|
||||
} else { i += 1u; vec::push(vals[optid], Val(args[i])); }
|
||||
vals[optid].push(Given);
|
||||
} else { i += 1u; vals[optid].push(Val(args[i])); }
|
||||
}
|
||||
Yes => {
|
||||
if !i_arg.is_none() {
|
||||
vec::push(vals[optid],
|
||||
Val(i_arg.get()));
|
||||
vals[optid].push(Val(i_arg.get()));
|
||||
} else if i + 1u == l {
|
||||
return Err(ArgumentMissing(name_str(nm)));
|
||||
} else { i += 1u; vec::push(vals[optid], Val(args[i])); }
|
||||
} else { i += 1u; vals[optid].push(Val(args[i])); }
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -412,7 +411,7 @@ fn opts_str(+mm: Matches, names: &[~str]) -> ~str {
|
||||
fn opt_strs(+mm: Matches, nm: &str) -> ~[~str] {
|
||||
let mut acc: ~[~str] = ~[];
|
||||
for vec::each(opt_vals(mm, nm)) |v| {
|
||||
match *v { Val(s) => vec::push(acc, s), _ => () }
|
||||
match *v { Val(s) => acc.push(s), _ => () }
|
||||
}
|
||||
return acc;
|
||||
}
|
||||
|
@ -594,7 +594,7 @@ priv impl Parser {
|
||||
|
||||
loop {
|
||||
match move self.parse_value() {
|
||||
Ok(move v) => vec::push(values, v),
|
||||
Ok(move v) => values.push(v),
|
||||
Err(move e) => return Err(e)
|
||||
}
|
||||
|
||||
@ -690,13 +690,13 @@ pub fn Deserializer(rdr: io::Reader) -> Result<Deserializer, Error> {
|
||||
}
|
||||
|
||||
priv impl Deserializer {
|
||||
fn peek() -> &self/Json {
|
||||
if self.stack.len() == 0 { vec::push(self.stack, &self.json); }
|
||||
fn peek(&self) -> &self/Json {
|
||||
if self.stack.len() == 0 { self.stack.push(&self.json); }
|
||||
vec::last(self.stack)
|
||||
}
|
||||
|
||||
fn pop() -> &self/Json {
|
||||
if self.stack.len() == 0 { vec::push(self.stack, &self.json); }
|
||||
fn pop(&self) -> &self/Json {
|
||||
if self.stack.len() == 0 { self.stack.push(&self.json); }
|
||||
vec::pop(self.stack)
|
||||
}
|
||||
}
|
||||
@ -772,7 +772,7 @@ pub impl Deserializer: serialization2::Deserializer {
|
||||
fn read_vec<T>(&self, f: fn(uint) -> T) -> T {
|
||||
debug!("read_vec()");
|
||||
let len = match *self.peek() {
|
||||
List(list) => list.len(),
|
||||
List(ref list) => list.len(),
|
||||
_ => fail ~"not a list",
|
||||
};
|
||||
let res = f(len);
|
||||
@ -784,7 +784,10 @@ pub impl Deserializer: serialization2::Deserializer {
|
||||
debug!("read_vec_elt(idx=%u)", idx);
|
||||
match *self.peek() {
|
||||
List(ref list) => {
|
||||
vec::push(self.stack, &list[idx]);
|
||||
// FIXME(#3148)---should be inferred
|
||||
let list: &self/~[Json] = list;
|
||||
|
||||
self.stack.push(&list[idx]);
|
||||
f()
|
||||
}
|
||||
_ => fail ~"not a list",
|
||||
@ -820,7 +823,7 @@ pub impl Deserializer: serialization2::Deserializer {
|
||||
match obj.find_ref(&f_name) {
|
||||
None => fail fmt!("no such field: %s", f_name),
|
||||
Some(json) => {
|
||||
vec::push(self.stack, json);
|
||||
self.stack.push(json);
|
||||
f()
|
||||
}
|
||||
}
|
||||
@ -845,8 +848,10 @@ pub impl Deserializer: serialization2::Deserializer {
|
||||
fn read_tup_elt<T>(&self, idx: uint, f: fn() -> T) -> T {
|
||||
debug!("read_tup_elt(idx=%u)", idx);
|
||||
match *self.peek() {
|
||||
List(list) => {
|
||||
vec::push(self.stack, &list[idx]);
|
||||
List(ref list) => {
|
||||
// FIXME(#3148)---should be inferred
|
||||
let list: &self/~[Json] = list;
|
||||
self.stack.push(&list[idx]);
|
||||
f()
|
||||
}
|
||||
_ => fail ~"not a list"
|
||||
@ -939,12 +944,12 @@ impl Json : Ord {
|
||||
|
||||
// XXX: this is horribly inefficient...
|
||||
for d0.each |k, v| {
|
||||
vec::push(d0_flat, (@copy *k, @copy *v));
|
||||
d0_flat.push((@copy *k, @copy *v));
|
||||
}
|
||||
d0_flat.qsort();
|
||||
|
||||
for d1.each |k, v| {
|
||||
vec::push(d1_flat, (@copy *k, @copy *v));
|
||||
d1_flat.push((@copy *k, @copy *v));
|
||||
}
|
||||
d1_flat.qsort();
|
||||
|
||||
|
@ -11,14 +11,14 @@ fn md4(msg: &[u8]) -> {a: u32, b: u32, c: u32, d: u32} {
|
||||
let mut msg = vec::append(vec::from_slice(msg), ~[0x80u8]);
|
||||
let mut bitlen = orig_len + 8u64;
|
||||
while (bitlen + 64u64) % 512u64 > 0u64 {
|
||||
vec::push(msg, 0u8);
|
||||
msg.push(0u8);
|
||||
bitlen += 8u64;
|
||||
}
|
||||
|
||||
// append length
|
||||
let mut i = 0u64;
|
||||
while i < 8u64 {
|
||||
vec::push(msg, (orig_len >> (i * 8u64)) as u8);
|
||||
msg.push((orig_len >> (i * 8u64)) as u8);
|
||||
i += 1u64;
|
||||
}
|
||||
|
||||
|
@ -277,7 +277,7 @@ extern fn get_addr_cb(handle: *uv_getaddrinfo_t, status: libc::c_int,
|
||||
result::Err(GetAddrUnknownError));
|
||||
break;
|
||||
};
|
||||
vec::push(out_vec, move new_ip_addr);
|
||||
out_vec.push(move new_ip_addr);
|
||||
|
||||
let next_addr = ll::get_next_addrinfo(curr_addr);
|
||||
if next_addr == ptr::null::<addrinfo>() as *addrinfo {
|
||||
|
@ -779,7 +779,7 @@ impl TcpSocketBuf: io::Reader {
|
||||
}
|
||||
}
|
||||
else {
|
||||
vec::push_all(self.data.buf, result::unwrap(read_result));
|
||||
self.data.buf.push_all(result::unwrap(read_result));
|
||||
}
|
||||
}
|
||||
|
||||
@ -790,7 +790,7 @@ impl TcpSocketBuf: io::Reader {
|
||||
|
||||
vec::bytes::memcpy(buf, vec::view(data, 0, data.len()), count);
|
||||
|
||||
vec::push_all(self.data.buf, vec::view(data, count, data.len()));
|
||||
self.data.buf.push_all(vec::view(data, count, data.len()));
|
||||
|
||||
count
|
||||
}
|
||||
|
@ -329,7 +329,7 @@ fn query_from_str(rawquery: &str) -> Query {
|
||||
if str::len(rawquery) != 0 {
|
||||
for str::split_char(rawquery, '&').each |p| {
|
||||
let (k, v) = split_char_first(*p, '=');
|
||||
vec::push(query, (decode_component(k), decode_component(v)));
|
||||
query.push((decode_component(k), decode_component(v)));
|
||||
};
|
||||
}
|
||||
return query;
|
||||
|
@ -55,7 +55,7 @@ fn map_slices<A: Copy Send, B: Copy Send>(
|
||||
f(base, slice)
|
||||
}
|
||||
};
|
||||
vec::push(futures, move f);
|
||||
futures.push(move f);
|
||||
};
|
||||
base += items_per_task;
|
||||
}
|
||||
|
@ -869,7 +869,7 @@ mod node {
|
||||
loop {
|
||||
match (leaf_iterator::next(&it)) {
|
||||
option::None => break,
|
||||
option::Some(x) => vec::push(forest, @Leaf(x))
|
||||
option::Some(x) => forest.push(@Leaf(x))
|
||||
}
|
||||
}
|
||||
//2. Rebuild tree from forest
|
||||
|
@ -47,9 +47,9 @@ fn merge_sort<T: Copy>(le: Le<T>, v: &[const T]) -> ~[T] {
|
||||
let mut b_ix = 0u;
|
||||
while a_ix < a_len && b_ix < b_len {
|
||||
if le(&a[a_ix], &b[b_ix]) {
|
||||
vec::push(rs, a[a_ix]);
|
||||
rs.push(a[a_ix]);
|
||||
a_ix += 1u;
|
||||
} else { vec::push(rs, b[b_ix]); b_ix += 1u; }
|
||||
} else { rs.push(b[b_ix]); b_ix += 1u; }
|
||||
}
|
||||
rs = vec::append(rs, vec::slice(a, a_ix, a_len));
|
||||
rs = vec::append(rs, vec::slice(b, b_ix, b_len));
|
||||
|
@ -82,7 +82,7 @@ fn new_sem_and_signal(count: int, num_condvars: uint)
|
||||
-> Sem<~[mut Waitqueue]> {
|
||||
let mut queues = ~[];
|
||||
for num_condvars.times {
|
||||
vec::push(queues, new_waitqueue());
|
||||
queues.push(new_waitqueue());
|
||||
}
|
||||
new_sem(count, vec::to_mut(move queues))
|
||||
}
|
||||
@ -840,7 +840,7 @@ mod tests {
|
||||
for num_waiters.times {
|
||||
let mi = ~m.clone();
|
||||
let (chan, port) = pipes::stream();
|
||||
vec::push(ports, port);
|
||||
ports.push(port);
|
||||
do task::spawn {
|
||||
do mi.lock_cond |cond| {
|
||||
chan.send(());
|
||||
@ -930,7 +930,7 @@ mod tests {
|
||||
for 2.times {
|
||||
let (c,p) = pipes::stream();
|
||||
let c = ~mut Some(c);
|
||||
vec::push(sibling_convos, p);
|
||||
sibling_convos.push(p);
|
||||
let mi = ~m2.clone();
|
||||
// spawn sibling task
|
||||
do task::spawn { // linked
|
||||
@ -1194,7 +1194,7 @@ mod tests {
|
||||
for num_waiters.times {
|
||||
let xi = ~x.clone();
|
||||
let (chan, port) = pipes::stream();
|
||||
vec::push(ports, port);
|
||||
ports.push(port);
|
||||
do task::spawn {
|
||||
do lock_cond(xi, dg1) |cond| {
|
||||
chan.send(());
|
||||
|
@ -141,7 +141,7 @@ fn run_tests_console(opts: &TestOpts,
|
||||
st.failed += 1u;
|
||||
write_failed(st.out, st.use_color);
|
||||
st.out.write_line(~"");
|
||||
vec::push(st.failures, copy test);
|
||||
st.failures.push(copy test);
|
||||
}
|
||||
TrIgnored => {
|
||||
st.ignored += 1u;
|
||||
@ -545,7 +545,7 @@ mod tests {
|
||||
for vec::each(names) |name| {
|
||||
let test = {name: *name, testfn: copy testfn, ignore: false,
|
||||
should_fail: false};
|
||||
vec::push(tests, test);
|
||||
tests.push(test);
|
||||
}
|
||||
tests
|
||||
};
|
||||
|
@ -273,9 +273,9 @@ fn map_item(i: @item, cx: ctx, v: vt) {
|
||||
}
|
||||
match i.node {
|
||||
item_mod(_) | item_foreign_mod(_) => {
|
||||
vec::push(cx.path, path_mod(i.ident));
|
||||
cx.path.push(path_mod(i.ident));
|
||||
}
|
||||
_ => vec::push(cx.path, path_name(i.ident))
|
||||
_ => cx.path.push(path_name(i.ident))
|
||||
}
|
||||
visit::visit_item(i, cx, v);
|
||||
vec::pop(cx.path);
|
||||
|
@ -313,8 +313,8 @@ fn split_trait_methods(trait_methods: ~[trait_method])
|
||||
let mut reqd = ~[], provd = ~[];
|
||||
for trait_methods.each |trt_method| {
|
||||
match *trt_method {
|
||||
required(tm) => vec::push(reqd, tm),
|
||||
provided(m) => vec::push(provd, m)
|
||||
required(tm) => reqd.push(tm),
|
||||
provided(m) => provd.push(m)
|
||||
}
|
||||
};
|
||||
(reqd, provd)
|
||||
|
@ -91,7 +91,7 @@ fn attr_meta(attr: ast::attribute) -> @ast::meta_item { @attr.node.value }
|
||||
// Get the meta_items from inside a vector of attributes
|
||||
fn attr_metas(attrs: ~[ast::attribute]) -> ~[@ast::meta_item] {
|
||||
let mut mitems = ~[];
|
||||
for attrs.each |a| { vec::push(mitems, attr_meta(*a)); }
|
||||
for attrs.each |a| { mitems.push(attr_meta(*a)); }
|
||||
return mitems;
|
||||
}
|
||||
|
||||
|
@ -84,7 +84,7 @@ fn mk_substr_filename(cm: codemap, sp: span) -> ~str
|
||||
}
|
||||
|
||||
fn next_line(file: filemap, chpos: uint, byte_pos: uint) {
|
||||
vec::push(file.lines, {ch: chpos, byte: byte_pos + file.start_pos.byte});
|
||||
file.lines.push({ch: chpos, byte: byte_pos + file.start_pos.byte});
|
||||
}
|
||||
|
||||
type lookup_fn = pure fn(file_pos) -> uint;
|
||||
@ -204,7 +204,7 @@ fn span_to_lines(sp: span, cm: codemap::codemap) -> @file_lines {
|
||||
let hi = lookup_char_pos(cm, sp.hi);
|
||||
let mut lines = ~[];
|
||||
for uint::range(lo.line - 1u, hi.line as uint) |i| {
|
||||
vec::push(lines, i);
|
||||
lines.push(i);
|
||||
};
|
||||
return @{file: lo.file, lines: lines};
|
||||
}
|
||||
|
@ -750,7 +750,7 @@ fn mk_enum_deser_body(
|
||||
body: cx.expr_blk(cx.expr(span, ast::expr_fail(None))),
|
||||
};
|
||||
|
||||
vec::push(arms, impossible_case);
|
||||
arms.push(impossible_case);
|
||||
|
||||
// ast for `|i| { match i { $(arms) } }`
|
||||
let expr_lambda = cx.expr(
|
||||
|
@ -160,7 +160,7 @@ fn mk_ctxt(parse_sess: parse::parse_sess,
|
||||
fn cfg() -> ast::crate_cfg { self.cfg }
|
||||
fn print_backtrace() { }
|
||||
fn backtrace() -> expn_info { self.backtrace }
|
||||
fn mod_push(i: ast::ident) { vec::push(self.mod_path, i); }
|
||||
fn mod_push(i: ast::ident) { self.mod_path.push(i); }
|
||||
fn mod_pop() { vec::pop(self.mod_path); }
|
||||
fn mod_path() -> ~[ast::ident] { return self.mod_path; }
|
||||
fn bt_push(ei: codemap::expn_info_) {
|
||||
|
@ -96,7 +96,7 @@ fn mk_rec_e(cx: ext_ctxt, sp: span,
|
||||
let val = field.ex;
|
||||
let astfield =
|
||||
{node: {mutbl: ast::m_imm, ident: ident, expr: val}, span: sp};
|
||||
vec::push(astfields, astfield);
|
||||
astfields.push(astfield);
|
||||
}
|
||||
let recexpr = ast::expr_rec(astfields, option::None::<@ast::expr>);
|
||||
mk_expr(cx, sp, recexpr)
|
||||
|
@ -245,7 +245,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
|
||||
for pieces.each |pc| {
|
||||
match *pc {
|
||||
PieceString(s) => {
|
||||
vec::push(piece_exprs, mk_uniq_str(cx, fmt_sp, s))
|
||||
piece_exprs.push(mk_uniq_str(cx, fmt_sp, s))
|
||||
}
|
||||
PieceConv(conv) => {
|
||||
n += 1u;
|
||||
@ -258,7 +258,7 @@ fn pieces_to_expr(cx: ext_ctxt, sp: span,
|
||||
log_conv(conv);
|
||||
let arg_expr = args[n];
|
||||
let c_expr = make_new_conv(cx, fmt_sp, conv, arg_expr);
|
||||
vec::push(piece_exprs, c_expr);
|
||||
piece_exprs.push(c_expr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -65,7 +65,7 @@ fn analyze(proto: protocol, _cx: ext_ctxt) {
|
||||
let mut self_live = ~[];
|
||||
for colive.eachi |i, bv| {
|
||||
if bv.get(i) {
|
||||
vec::push(self_live, proto.get_state_by_id(i))
|
||||
self_live.push(proto.get_state_by_id(i))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -226,7 +226,7 @@ impl state: to_type_decls {
|
||||
|
||||
let v = cx.variant(cx.ident_of(name), span, tys);
|
||||
|
||||
vec::push(items_msg, v);
|
||||
items_msg.push(v);
|
||||
}
|
||||
|
||||
~[cx.item_enum_poly(name,
|
||||
@ -245,44 +245,44 @@ impl state: to_type_decls {
|
||||
let mut items = ~[];
|
||||
for self.messages.each |m| {
|
||||
if dir == send {
|
||||
vec::push(items, m.gen_send(cx, true));
|
||||
vec::push(items, m.gen_send(cx, false));
|
||||
items.push(m.gen_send(cx, true));
|
||||
items.push(m.gen_send(cx, false));
|
||||
}
|
||||
}
|
||||
|
||||
if !self.proto.is_bounded() {
|
||||
vec::push(items,
|
||||
cx.item_ty_poly(
|
||||
self.data_name(),
|
||||
self.span,
|
||||
cx.ty_path_ast_builder(
|
||||
path(~[cx.ident_of(~"pipes"),
|
||||
cx.ident_of(dir.to_str() + ~"Packet")],
|
||||
empty_span())
|
||||
.add_ty(cx.ty_path_ast_builder(
|
||||
path(~[cx.ident_of(self.proto.name),
|
||||
self.data_name()],
|
||||
empty_span())
|
||||
.add_tys(cx.ty_vars(self.ty_params))))),
|
||||
self.ty_params));
|
||||
items.push(
|
||||
cx.item_ty_poly(
|
||||
self.data_name(),
|
||||
self.span,
|
||||
cx.ty_path_ast_builder(
|
||||
path(~[cx.ident_of(~"pipes"),
|
||||
cx.ident_of(dir.to_str() + ~"Packet")],
|
||||
empty_span())
|
||||
.add_ty(cx.ty_path_ast_builder(
|
||||
path(~[cx.ident_of(self.proto.name),
|
||||
self.data_name()],
|
||||
empty_span())
|
||||
.add_tys(cx.ty_vars(self.ty_params))))),
|
||||
self.ty_params));
|
||||
}
|
||||
else {
|
||||
vec::push(items,
|
||||
cx.item_ty_poly(
|
||||
self.data_name(),
|
||||
self.span,
|
||||
cx.ty_path_ast_builder(
|
||||
path(~[cx.ident_of(~"pipes"),
|
||||
cx.ident_of(dir.to_str()
|
||||
+ ~"PacketBuffered")],
|
||||
empty_span())
|
||||
.add_tys(~[cx.ty_path_ast_builder(
|
||||
path(~[cx.ident_of(self.proto.name),
|
||||
self.data_name()],
|
||||
empty_span())
|
||||
.add_tys(cx.ty_vars(self.ty_params))),
|
||||
self.proto.buffer_ty_path(cx)])),
|
||||
self.ty_params));
|
||||
items.push(
|
||||
cx.item_ty_poly(
|
||||
self.data_name(),
|
||||
self.span,
|
||||
cx.ty_path_ast_builder(
|
||||
path(~[cx.ident_of(~"pipes"),
|
||||
cx.ident_of(dir.to_str()
|
||||
+ ~"PacketBuffered")],
|
||||
empty_span())
|
||||
.add_tys(~[cx.ty_path_ast_builder(
|
||||
path(~[cx.ident_of(self.proto.name),
|
||||
self.data_name()],
|
||||
empty_span())
|
||||
.add_tys(cx.ty_vars(self.ty_params))),
|
||||
self.proto.buffer_ty_path(cx)])),
|
||||
self.ty_params));
|
||||
};
|
||||
items
|
||||
}
|
||||
@ -367,7 +367,7 @@ impl protocol: gen_init {
|
||||
for (copy self.states).each |s| {
|
||||
for s.ty_params.each |tp| {
|
||||
match params.find(|tpp| tp.ident == tpp.ident) {
|
||||
None => vec::push(params, *tp),
|
||||
None => params.push(*tp),
|
||||
_ => ()
|
||||
}
|
||||
}
|
||||
@ -383,7 +383,7 @@ impl protocol: gen_init {
|
||||
let fields = do (copy self.states).map_to_vec |s| {
|
||||
for s.ty_params.each |tp| {
|
||||
match params.find(|tpp| tp.ident == tpp.ident) {
|
||||
None => vec::push(params, *tp),
|
||||
None => params.push(*tp),
|
||||
_ => ()
|
||||
}
|
||||
}
|
||||
@ -415,17 +415,15 @@ impl protocol: gen_init {
|
||||
}
|
||||
|
||||
if self.is_bounded() {
|
||||
vec::push(items, self.gen_buffer_type(cx))
|
||||
items.push(self.gen_buffer_type(cx))
|
||||
}
|
||||
|
||||
vec::push(items,
|
||||
cx.item_mod(cx.ident_of(~"client"),
|
||||
self.span,
|
||||
client_states));
|
||||
vec::push(items,
|
||||
cx.item_mod(cx.ident_of(~"server"),
|
||||
self.span,
|
||||
server_states));
|
||||
items.push(cx.item_mod(cx.ident_of(~"client"),
|
||||
self.span,
|
||||
client_states));
|
||||
items.push(cx.item_mod(cx.ident_of(~"server"),
|
||||
self.span,
|
||||
server_states));
|
||||
|
||||
cx.item_mod(cx.ident_of(self.name), self.span, items)
|
||||
}
|
||||
|
@ -94,7 +94,7 @@ fn option_flatten_map<T: Copy, U: Copy>(f: fn@(T) -> Option<U>, v: ~[T]) ->
|
||||
for v.each |elem| {
|
||||
match f(*elem) {
|
||||
None => return None,
|
||||
Some(fv) => vec::push(res, fv)
|
||||
Some(fv) => res.push(fv)
|
||||
}
|
||||
}
|
||||
return Some(res);
|
||||
@ -305,8 +305,8 @@ fn transcribe_exprs(cx: ext_ctxt, b: bindings, idx_path: @mut ~[uint],
|
||||
/* Whew, we now know how how many times to repeat */
|
||||
let mut idx: uint = 0u;
|
||||
while idx < rc {
|
||||
vec::push(*idx_path, idx);
|
||||
vec::push(res, recur(repeat_me)); // whew!
|
||||
idx_path.push(idx);
|
||||
res.push(recur(repeat_me)); // whew!
|
||||
vec::pop(*idx_path);
|
||||
idx += 1u;
|
||||
}
|
||||
@ -567,7 +567,7 @@ fn p_t_s_r_ellipses(cx: ext_ctxt, repeat_me: @expr, offset: uint, s: selector,
|
||||
let mut elts = ~[];
|
||||
let mut idx = offset;
|
||||
while idx < vec::len(arg_elts) {
|
||||
vec::push(elts, leaf(match_expr(arg_elts[idx])));
|
||||
elts.push(leaf(match_expr(arg_elts[idx])));
|
||||
idx += 1u;
|
||||
}
|
||||
|
||||
@ -672,9 +672,8 @@ fn add_new_extension(cx: ext_ctxt, sp: span, arg: ast::mac_arg,
|
||||
None => cx.span_fatal(mac.span,
|
||||
~"macro must have arguments")
|
||||
};
|
||||
vec::push(clauses,
|
||||
@{params: pattern_to_selectors(cx, arg),
|
||||
body: elts[1u]});
|
||||
clauses.push(@{params: pattern_to_selectors(cx, arg),
|
||||
body: elts[1u]});
|
||||
|
||||
// FIXME (#2251): check duplicates (or just simplify
|
||||
// the macro arg situation)
|
||||
|
@ -208,7 +208,7 @@ fn parse_or_else(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader,
|
||||
fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
|
||||
-> parse_result {
|
||||
let mut cur_eis = ~[];
|
||||
vec::push(cur_eis, initial_matcher_pos(ms, None, rdr.peek().sp.lo));
|
||||
cur_eis.push(initial_matcher_pos(ms, None, rdr.peek().sp.lo));
|
||||
|
||||
loop {
|
||||
let mut bb_eis = ~[]; // black-box parsed by parser.rs
|
||||
@ -256,7 +256,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
|
||||
}
|
||||
|
||||
new_pos.idx += 1;
|
||||
vec::push(cur_eis, move new_pos);
|
||||
cur_eis.push(move new_pos);
|
||||
}
|
||||
|
||||
// can we go around again?
|
||||
@ -267,17 +267,17 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
|
||||
if tok == t { //pass the separator
|
||||
let ei_t <- ei;
|
||||
ei_t.idx += 1;
|
||||
vec::push(next_eis, move ei_t);
|
||||
next_eis.push(move ei_t);
|
||||
}
|
||||
}
|
||||
_ => { // we don't need a separator
|
||||
let ei_t <- ei;
|
||||
ei_t.idx = 0;
|
||||
vec::push(cur_eis, move ei_t);
|
||||
cur_eis.push(move ei_t);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
vec::push(eof_eis, move ei);
|
||||
eof_eis.push(move ei);
|
||||
}
|
||||
} else {
|
||||
match copy ei.elts[idx].node {
|
||||
@ -292,13 +292,13 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
|
||||
new_ei.matches[idx].push(@matched_seq(~[], sp));
|
||||
}
|
||||
|
||||
vec::push(cur_eis, move new_ei);
|
||||
cur_eis.push(move new_ei);
|
||||
}
|
||||
|
||||
let matches = vec::map(ei.matches, // fresh, same size:
|
||||
|_m| DVec::<@named_match>());
|
||||
let ei_t <- ei;
|
||||
vec::push(cur_eis, ~{
|
||||
cur_eis.push(~{
|
||||
elts: matchers, sep: sep, mut idx: 0u,
|
||||
mut up: matcher_pos_up(Some(move ei_t)),
|
||||
matches: move matches,
|
||||
@ -306,12 +306,12 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
|
||||
sp_lo: sp.lo
|
||||
});
|
||||
}
|
||||
match_nonterminal(_,_,_) => { vec::push(bb_eis, move ei) }
|
||||
match_nonterminal(_,_,_) => { bb_eis.push(move ei) }
|
||||
match_tok(t) => {
|
||||
let ei_t <- ei;
|
||||
if t == tok {
|
||||
ei_t.idx += 1;
|
||||
vec::push(next_eis, move ei_t);
|
||||
next_eis.push(move ei_t);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -323,7 +323,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
|
||||
if eof_eis.len() == 1u {
|
||||
return success(
|
||||
nameize(sess, ms,
|
||||
vec::map(eof_eis[0u].matches, |dv| dv.pop())));
|
||||
eof_eis[0u].matches.map(|dv| dv.pop())));
|
||||
} else if eof_eis.len() > 1u {
|
||||
return error(sp, ~"Ambiguity: multiple successful parses");
|
||||
} else {
|
||||
@ -350,7 +350,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
|
||||
} else if (next_eis.len() > 0u) {
|
||||
/* Now process the next token */
|
||||
while(next_eis.len() > 0u) {
|
||||
vec::push(cur_eis, vec::pop(next_eis));
|
||||
cur_eis.push(vec::pop(next_eis));
|
||||
}
|
||||
rdr.next_token();
|
||||
} else /* bb_eis.len() == 1 */ {
|
||||
@ -365,7 +365,7 @@ fn parse(sess: parse_sess, cfg: ast::crate_cfg, rdr: reader, ms: ~[matcher])
|
||||
}
|
||||
_ => fail
|
||||
}
|
||||
vec::push(cur_eis, move ei);
|
||||
cur_eis.push(move ei);
|
||||
|
||||
/* this would fail if zero-length tokens existed */
|
||||
while rdr.peek().sp.lo < rust_parser.span.lo {
|
||||
|
@ -205,8 +205,8 @@ fn tt_next_token(&&r: tt_reader) -> {tok: token, sp: span} {
|
||||
r.cur.idx += 1u;
|
||||
return tt_next_token(r);
|
||||
} else {
|
||||
vec::push(r.repeat_len, len);
|
||||
vec::push(r.repeat_idx, 0u);
|
||||
r.repeat_len.push(len);
|
||||
r.repeat_idx.push(0u);
|
||||
r.cur = @{readme: tts, mut idx: 0u, dotdotdoted: true,
|
||||
sep: sep, up: tt_frame_up(option::Some(r.cur))};
|
||||
}
|
||||
|
@ -367,9 +367,8 @@ fn noop_fold_pat(p: pat_, fld: ast_fold) -> pat_ {
|
||||
pat_rec(fields, etc) => {
|
||||
let mut fs = ~[];
|
||||
for fields.each |f| {
|
||||
vec::push(fs,
|
||||
{ident: /* FIXME (#2543) */ copy f.ident,
|
||||
pat: fld.fold_pat(f.pat)});
|
||||
fs.push({ident: /* FIXME (#2543) */ copy f.ident,
|
||||
pat: fld.fold_pat(f.pat)});
|
||||
}
|
||||
pat_rec(fs, etc)
|
||||
}
|
||||
@ -377,9 +376,8 @@ fn noop_fold_pat(p: pat_, fld: ast_fold) -> pat_ {
|
||||
let pth_ = fld.fold_path(pth);
|
||||
let mut fs = ~[];
|
||||
for fields.each |f| {
|
||||
vec::push(fs,
|
||||
{ident: /* FIXME (#2543) */ copy f.ident,
|
||||
pat: fld.fold_pat(f.pat)});
|
||||
fs.push({ident: /* FIXME (#2543) */ copy f.ident,
|
||||
pat: fld.fold_pat(f.pat)});
|
||||
}
|
||||
pat_struct(pth_, fs, etc)
|
||||
}
|
||||
|
@ -130,7 +130,7 @@ fn consume_non_eol_whitespace(rdr: string_reader) {
|
||||
fn push_blank_line_comment(rdr: string_reader, &comments: ~[cmnt]) {
|
||||
debug!(">>> blank-line comment");
|
||||
let v: ~[~str] = ~[];
|
||||
vec::push(comments, {style: blank_line, lines: v, pos: rdr.chpos});
|
||||
comments.push({style: blank_line, lines: v, pos: rdr.chpos});
|
||||
}
|
||||
|
||||
fn consume_whitespace_counting_blank_lines(rdr: string_reader,
|
||||
@ -149,7 +149,7 @@ fn read_shebang_comment(rdr: string_reader, code_to_the_left: bool,
|
||||
debug!(">>> shebang comment");
|
||||
let p = rdr.chpos;
|
||||
debug!("<<< shebang comment");
|
||||
vec::push(comments, {
|
||||
comments.push({
|
||||
style: if code_to_the_left { trailing } else { isolated },
|
||||
lines: ~[read_one_line_comment(rdr)],
|
||||
pos: p
|
||||
@ -167,12 +167,12 @@ fn read_line_comments(rdr: string_reader, code_to_the_left: bool,
|
||||
if is_doc_comment(line) { // doc-comments are not put in comments
|
||||
break;
|
||||
}
|
||||
vec::push(lines, line);
|
||||
lines.push(line);
|
||||
consume_non_eol_whitespace(rdr);
|
||||
}
|
||||
debug!("<<< line comments");
|
||||
if !lines.is_empty() {
|
||||
vec::push(comments, {
|
||||
comments.push({
|
||||
style: if code_to_the_left { trailing } else { isolated },
|
||||
lines: lines,
|
||||
pos: p
|
||||
@ -198,7 +198,7 @@ fn trim_whitespace_prefix_and_push_line(&lines: ~[~str],
|
||||
} else { s1 = ~""; }
|
||||
} else { s1 = s; }
|
||||
log(debug, ~"pushing line: " + s1);
|
||||
vec::push(lines, s1);
|
||||
lines.push(s1);
|
||||
}
|
||||
|
||||
fn read_block_comment(rdr: string_reader, code_to_the_left: bool,
|
||||
@ -257,7 +257,7 @@ fn read_block_comment(rdr: string_reader, code_to_the_left: bool,
|
||||
style = mixed;
|
||||
}
|
||||
debug!("<<< block comment");
|
||||
vec::push(comments, {style: style, lines: lines, pos: p});
|
||||
comments.push({style: style, lines: lines, pos: p});
|
||||
}
|
||||
|
||||
fn peeking_at_comment(rdr: string_reader) -> bool {
|
||||
@ -315,7 +315,7 @@ fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler,
|
||||
let {tok: tok, sp: sp} = rdr.peek();
|
||||
if token::is_lit(tok) {
|
||||
let s = get_str_from(rdr, bstart);
|
||||
vec::push(literals, {lit: s, pos: sp.lo});
|
||||
literals.push({lit: s, pos: sp.lo});
|
||||
log(debug, ~"tok lit: " + s);
|
||||
} else {
|
||||
log(debug, ~"tok: " + token::to_str(rdr.interner, tok));
|
||||
|
@ -229,7 +229,7 @@ impl parser: parser_common {
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
vec::push(v, f(self));
|
||||
v.push(f(self));
|
||||
}
|
||||
|
||||
return v;
|
||||
@ -274,7 +274,7 @@ impl parser: parser_common {
|
||||
_ => ()
|
||||
}
|
||||
if sep.trailing_sep_allowed && self.token == ket { break; }
|
||||
vec::push(v, f(self));
|
||||
v.push(f(self));
|
||||
}
|
||||
return v;
|
||||
}
|
||||
|
@ -107,7 +107,7 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: &Path,
|
||||
// Thread defids, chpos and byte_pos through the parsers
|
||||
cx.sess.chpos = r0.chpos;
|
||||
cx.sess.byte_pos = cx.sess.byte_pos + r0.pos;
|
||||
vec::push(items, i);
|
||||
items.push(i);
|
||||
}
|
||||
ast::cdir_dir_mod(vis, id, cdirs, attrs) => {
|
||||
let path = Path(cdir_path_opt(*cx.sess.interner.get(id), attrs));
|
||||
@ -126,9 +126,9 @@ fn eval_crate_directive(cx: ctx, cdir: @ast::crate_directive, prefix: &Path,
|
||||
vis: vis,
|
||||
span: cdir.span};
|
||||
cx.sess.next_id += 1;
|
||||
vec::push(items, i);
|
||||
items.push(i);
|
||||
}
|
||||
ast::cdir_view_item(vi) => vec::push(view_items, vi),
|
||||
ast::cdir_view_item(vi) => view_items.push(vi),
|
||||
ast::cdir_syntax(*) => ()
|
||||
}
|
||||
}
|
||||
|
@ -496,7 +496,7 @@ impl parser {
|
||||
let mut ts = ~[self.parse_ty(false)];
|
||||
while self.token == token::COMMA {
|
||||
self.bump();
|
||||
vec::push(ts, self.parse_ty(false));
|
||||
ts.push(self.parse_ty(false));
|
||||
}
|
||||
let t = if vec::len(ts) == 1u { ts[0].node }
|
||||
else { ty_tup(ts) };
|
||||
@ -771,10 +771,10 @@ impl parser {
|
||||
&& self.look_ahead(1u) == token::MOD_SEP;
|
||||
|
||||
if is_not_last {
|
||||
vec::push(ids, parse_ident(self));
|
||||
ids.push(parse_ident(self));
|
||||
self.expect(token::MOD_SEP);
|
||||
} else {
|
||||
vec::push(ids, parse_last_ident(self));
|
||||
ids.push(parse_last_ident(self));
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -903,7 +903,7 @@ impl parser {
|
||||
}
|
||||
let mut es = ~[self.parse_expr()];
|
||||
while self.token == token::COMMA {
|
||||
self.bump(); vec::push(es, self.parse_expr());
|
||||
self.bump(); es.push(self.parse_expr());
|
||||
}
|
||||
hi = self.span.hi;
|
||||
self.expect(token::RPAREN);
|
||||
@ -1049,7 +1049,7 @@ impl parser {
|
||||
self.bump();
|
||||
let mut fields = ~[];
|
||||
let mut base = None;
|
||||
vec::push(fields, self.parse_field(token::COLON));
|
||||
fields.push(self.parse_field(token::COLON));
|
||||
while self.token != token::RBRACE {
|
||||
|
||||
if self.try_parse_obsolete_with() {
|
||||
@ -1067,7 +1067,7 @@ impl parser {
|
||||
// Accept an optional trailing comma.
|
||||
break;
|
||||
}
|
||||
vec::push(fields, self.parse_field(token::COLON));
|
||||
fields.push(self.parse_field(token::COLON));
|
||||
}
|
||||
|
||||
hi = pth.span.hi;
|
||||
@ -1316,7 +1316,7 @@ impl parser {
|
||||
while self.token != ket || lparens > 0u {
|
||||
if self.token == token::LPAREN { lparens += 1u; }
|
||||
if self.token == token::RPAREN { lparens -= 1u; }
|
||||
vec::push(ret_val, self.parse_matcher(name_idx));
|
||||
ret_val.push(self.parse_matcher(name_idx));
|
||||
}
|
||||
|
||||
self.bump();
|
||||
@ -1722,7 +1722,7 @@ impl parser {
|
||||
// record ends by an optional trailing comma
|
||||
break;
|
||||
}
|
||||
vec::push(fields, self.parse_field(token::COLON));
|
||||
fields.push(self.parse_field(token::COLON));
|
||||
}
|
||||
self.expect(token::RBRACE);
|
||||
return expr_rec(fields, base);
|
||||
@ -1757,7 +1757,7 @@ impl parser {
|
||||
rules: default_blk},
|
||||
span: expr.span};
|
||||
|
||||
vec::push(arms, {pats: pats, guard: guard, body: blk});
|
||||
arms.push({pats: pats, guard: guard, body: blk});
|
||||
}
|
||||
let mut hi = self.span.hi;
|
||||
self.bump();
|
||||
@ -1802,7 +1802,7 @@ impl parser {
|
||||
fn parse_pats() -> ~[@pat] {
|
||||
let mut pats = ~[];
|
||||
loop {
|
||||
vec::push(pats, self.parse_pat(true));
|
||||
pats.push(self.parse_pat(true));
|
||||
if self.token == token::BINOP(token::OR) { self.bump(); }
|
||||
else { return pats; }
|
||||
};
|
||||
@ -1849,7 +1849,7 @@ impl parser {
|
||||
span: self.last_span
|
||||
};
|
||||
}
|
||||
vec::push(fields, {ident: fieldname, pat: subpat});
|
||||
fields.push({ident: fieldname, pat: subpat});
|
||||
}
|
||||
return (fields, etc);
|
||||
}
|
||||
@ -1937,7 +1937,7 @@ impl parser {
|
||||
let mut fields = ~[self.parse_pat(refutable)];
|
||||
while self.token == token::COMMA {
|
||||
self.bump();
|
||||
vec::push(fields, self.parse_pat(refutable));
|
||||
fields.push(self.parse_pat(refutable));
|
||||
}
|
||||
if vec::len(fields) == 1u { self.expect(token::COMMA); }
|
||||
hi = self.span.hi;
|
||||
@ -2126,7 +2126,7 @@ impl parser {
|
||||
let lo = self.span.lo;
|
||||
let mut locals = ~[self.parse_local(is_mutbl, true)];
|
||||
while self.eat(token::COMMA) {
|
||||
vec::push(locals, self.parse_local(is_mutbl, true));
|
||||
locals.push(self.parse_local(is_mutbl, true));
|
||||
}
|
||||
return @spanned(lo, self.last_span.hi, decl_local(locals));
|
||||
}
|
||||
@ -2266,8 +2266,8 @@ impl parser {
|
||||
|
||||
for items.each |item| {
|
||||
let decl = @spanned(item.span.lo, item.span.hi, decl_item(*item));
|
||||
push(stmts, @spanned(item.span.lo, item.span.hi,
|
||||
stmt_decl(decl, self.get_id())));
|
||||
stmts.push(@spanned(item.span.lo, item.span.hi,
|
||||
stmt_decl(decl, self.get_id())));
|
||||
}
|
||||
|
||||
let mut initial_attrs = attrs_remaining;
|
||||
@ -2278,43 +2278,43 @@ impl parser {
|
||||
|
||||
while self.token != token::RBRACE {
|
||||
match self.token {
|
||||
token::SEMI => {
|
||||
self.bump(); // empty
|
||||
}
|
||||
_ => {
|
||||
let stmt = self.parse_stmt(initial_attrs);
|
||||
initial_attrs = ~[];
|
||||
match stmt.node {
|
||||
stmt_expr(e, stmt_id) => { // Expression without semicolon:
|
||||
match self.token {
|
||||
token::SEMI => {
|
||||
self.bump();
|
||||
push(stmts,
|
||||
@{node: stmt_semi(e, stmt_id),.. *stmt});
|
||||
}
|
||||
token::RBRACE => {
|
||||
expr = Some(e);
|
||||
}
|
||||
t => {
|
||||
if classify::stmt_ends_with_semi(*stmt) {
|
||||
self.fatal(~"expected `;` or `}` after \
|
||||
expression but found `"
|
||||
+ token_to_str(self.reader, t) + ~"`");
|
||||
}
|
||||
vec::push(stmts, stmt);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_ => { // All other kinds of statements:
|
||||
vec::push(stmts, stmt);
|
||||
|
||||
if classify::stmt_ends_with_semi(*stmt) {
|
||||
self.expect(token::SEMI);
|
||||
}
|
||||
}
|
||||
token::SEMI => {
|
||||
self.bump(); // empty
|
||||
}
|
||||
_ => {
|
||||
let stmt = self.parse_stmt(initial_attrs);
|
||||
initial_attrs = ~[];
|
||||
match stmt.node {
|
||||
stmt_expr(e, stmt_id) => { // Expression without semicolon:
|
||||
match self.token {
|
||||
token::SEMI => {
|
||||
self.bump();
|
||||
stmts.push(@{node: stmt_semi(e, stmt_id),
|
||||
..*stmt});
|
||||
}
|
||||
token::RBRACE => {
|
||||
expr = Some(e);
|
||||
}
|
||||
t => {
|
||||
if classify::stmt_ends_with_semi(*stmt) {
|
||||
self.fatal(~"expected `;` or `}` after \
|
||||
expression but found `"
|
||||
+ token_to_str(self.reader, t) + ~"`");
|
||||
}
|
||||
stmts.push(stmt);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_ => { // All other kinds of statements:
|
||||
stmts.push(stmt);
|
||||
|
||||
if classify::stmt_ends_with_semi(*stmt) {
|
||||
self.expect(token::SEMI);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut hi = self.span.hi;
|
||||
@ -2356,16 +2356,16 @@ impl parser {
|
||||
};
|
||||
|
||||
match maybe_bound {
|
||||
Some(bound) => {
|
||||
self.bump();
|
||||
push(bounds, bound);
|
||||
}
|
||||
None => {
|
||||
push(bounds, bound_trait(self.parse_ty(false)));
|
||||
}
|
||||
Some(bound) => {
|
||||
self.bump();
|
||||
bounds.push(bound);
|
||||
}
|
||||
None => {
|
||||
bounds.push(bound_trait(self.parse_ty(false)));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
push(bounds, bound_trait(self.parse_ty(false)));
|
||||
bounds.push(bound_trait(self.parse_ty(false)));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2636,7 +2636,7 @@ impl parser {
|
||||
self.expect(token::LBRACE);
|
||||
while !self.eat(token::RBRACE) {
|
||||
let vis = self.parse_visibility();
|
||||
vec::push(meths, self.parse_method(vis));
|
||||
meths.push(self.parse_method(vis));
|
||||
}
|
||||
(ident, item_impl(tps, opt_trait, ty, meths), None)
|
||||
}
|
||||
@ -2722,9 +2722,9 @@ impl parser {
|
||||
for mms.each |mm| {
|
||||
match *mm {
|
||||
@field_member(struct_field) =>
|
||||
vec::push(fields, struct_field),
|
||||
fields.push(struct_field),
|
||||
@method_member(the_method_member) =>
|
||||
vec::push(methods, the_method_member)
|
||||
methods.push(the_method_member)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -2896,7 +2896,7 @@ impl parser {
|
||||
debug!("parse_mod_items: parse_item_or_view_item(attrs=%?)",
|
||||
attrs);
|
||||
match self.parse_item_or_view_item(attrs, true) {
|
||||
iovi_item(item) => vec::push(items, item),
|
||||
iovi_item(item) => items.push(item),
|
||||
iovi_view_item(view_item) => {
|
||||
self.span_fatal(view_item.span, ~"view items must be \
|
||||
declared at the top of the \
|
||||
@ -3000,7 +3000,7 @@ impl parser {
|
||||
let attrs = vec::append(initial_attrs,
|
||||
self.parse_outer_attributes());
|
||||
initial_attrs = ~[];
|
||||
vec::push(items, self.parse_foreign_item(attrs));
|
||||
items.push(self.parse_foreign_item(attrs));
|
||||
}
|
||||
return {sort: sort, view_items: view_items,
|
||||
items: items};
|
||||
@ -3113,9 +3113,9 @@ impl parser {
|
||||
for mms.each |mm| {
|
||||
match *mm {
|
||||
@field_member(struct_field) =>
|
||||
vec::push(fields, struct_field),
|
||||
fields.push(struct_field),
|
||||
@method_member(the_method_member) =>
|
||||
vec::push(methods, the_method_member)
|
||||
methods.push(the_method_member)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -3184,7 +3184,7 @@ impl parser {
|
||||
seq_sep_trailing_disallowed(token::COMMA),
|
||||
|p| p.parse_ty(false));
|
||||
for arg_tys.each |ty| {
|
||||
vec::push(args, {ty: *ty, id: self.get_id()});
|
||||
args.push({ty: *ty, id: self.get_id()});
|
||||
}
|
||||
kind = tuple_variant_kind(args);
|
||||
} else if self.eat(token::EQ) {
|
||||
@ -3200,7 +3200,7 @@ impl parser {
|
||||
let vr = {name: ident, attrs: variant_attrs,
|
||||
kind: kind, id: self.get_id(),
|
||||
disr_expr: disr_expr, vis: vis};
|
||||
vec::push(variants, spanned(vlo, self.last_span.hi, vr));
|
||||
variants.push(spanned(vlo, self.last_span.hi, vr));
|
||||
|
||||
if needs_comma && !self.eat(token::COMMA) { break; }
|
||||
}
|
||||
@ -3427,7 +3427,7 @@ impl parser {
|
||||
while self.token == token::MOD_SEP {
|
||||
self.bump();
|
||||
let id = self.parse_ident();
|
||||
vec::push(path, id);
|
||||
path.push(id);
|
||||
}
|
||||
let path = @{span: mk_sp(lo, self.span.hi), global: false,
|
||||
idents: path, rp: None, types: ~[]};
|
||||
@ -3445,7 +3445,7 @@ impl parser {
|
||||
|
||||
token::IDENT(i, _) => {
|
||||
self.bump();
|
||||
vec::push(path, i);
|
||||
path.push(i);
|
||||
}
|
||||
|
||||
// foo::bar::{a,b,c}
|
||||
@ -3488,7 +3488,7 @@ impl parser {
|
||||
let mut vp = ~[self.parse_view_path()];
|
||||
while self.token == token::COMMA {
|
||||
self.bump();
|
||||
vec::push(vp, self.parse_view_path());
|
||||
vp.push(self.parse_view_path());
|
||||
}
|
||||
return vp;
|
||||
}
|
||||
@ -3662,7 +3662,7 @@ impl parser {
|
||||
let mut first_outer_attr = first_outer_attr;
|
||||
while self.token != term {
|
||||
let cdir = @self.parse_crate_directive(first_outer_attr);
|
||||
vec::push(cdirs, cdir);
|
||||
cdirs.push(cdir);
|
||||
first_outer_attr = ~[];
|
||||
}
|
||||
return cdirs;
|
||||
|
@ -392,14 +392,14 @@ fn build_link_meta(sess: session, c: ast::crate, output: &Path,
|
||||
if attr::get_meta_item_name(*meta) == ~"name" {
|
||||
match attr::get_meta_item_value_str(*meta) {
|
||||
Some(v) => { name = Some(v); }
|
||||
None => vec::push(cmh_items, *meta)
|
||||
None => cmh_items.push(*meta)
|
||||
}
|
||||
} else if attr::get_meta_item_name(*meta) == ~"vers" {
|
||||
match attr::get_meta_item_value_str(*meta) {
|
||||
Some(v) => { vers = Some(v); }
|
||||
None => vec::push(cmh_items, *meta)
|
||||
None => cmh_items.push(*meta)
|
||||
}
|
||||
} else { vec::push(cmh_items, *meta); }
|
||||
} else { cmh_items.push(*meta); }
|
||||
}
|
||||
return {name: name, vers: vers, cmh_items: cmh_items};
|
||||
}
|
||||
@ -657,9 +657,9 @@ fn link_binary(sess: session,
|
||||
|
||||
let mut cc_args =
|
||||
vec::append(~[stage], sess.targ_cfg.target_strs.cc_args);
|
||||
vec::push(cc_args, ~"-o");
|
||||
vec::push(cc_args, output.to_str());
|
||||
vec::push(cc_args, obj_filename.to_str());
|
||||
cc_args.push(~"-o");
|
||||
cc_args.push(output.to_str());
|
||||
cc_args.push(obj_filename.to_str());
|
||||
|
||||
let mut lib_cmd;
|
||||
let os = sess.targ_cfg.os;
|
||||
@ -674,17 +674,17 @@ fn link_binary(sess: session,
|
||||
let cstore = sess.cstore;
|
||||
for cstore::get_used_crate_files(cstore).each |cratepath| {
|
||||
if cratepath.filetype() == Some(~".rlib") {
|
||||
vec::push(cc_args, cratepath.to_str());
|
||||
cc_args.push(cratepath.to_str());
|
||||
loop;
|
||||
}
|
||||
let dir = cratepath.dirname();
|
||||
if dir != ~"" { vec::push(cc_args, ~"-L" + dir); }
|
||||
if dir != ~"" { cc_args.push(~"-L" + dir); }
|
||||
let libarg = unlib(sess.targ_cfg, cratepath.filestem().get());
|
||||
vec::push(cc_args, ~"-l" + libarg);
|
||||
cc_args.push(~"-l" + libarg);
|
||||
}
|
||||
|
||||
let ula = cstore::get_used_link_args(cstore);
|
||||
for ula.each |arg| { vec::push(cc_args, *arg); }
|
||||
for ula.each |arg| { cc_args.push(*arg); }
|
||||
|
||||
// # Extern library linking
|
||||
|
||||
@ -695,41 +695,41 @@ fn link_binary(sess: session,
|
||||
// forces to make sure that library can be found at runtime.
|
||||
|
||||
let addl_paths = sess.opts.addl_lib_search_paths;
|
||||
for addl_paths.each |path| { vec::push(cc_args, ~"-L" + path.to_str()); }
|
||||
for addl_paths.each |path| { cc_args.push(~"-L" + path.to_str()); }
|
||||
|
||||
// The names of the extern libraries
|
||||
let used_libs = cstore::get_used_libraries(cstore);
|
||||
for used_libs.each |l| { vec::push(cc_args, ~"-l" + *l); }
|
||||
for used_libs.each |l| { cc_args.push(~"-l" + *l); }
|
||||
|
||||
if sess.building_library {
|
||||
vec::push(cc_args, lib_cmd);
|
||||
cc_args.push(lib_cmd);
|
||||
|
||||
// On mac we need to tell the linker to let this library
|
||||
// be rpathed
|
||||
if sess.targ_cfg.os == session::os_macos {
|
||||
vec::push(cc_args, ~"-Wl,-install_name,@rpath/"
|
||||
cc_args.push(~"-Wl,-install_name,@rpath/"
|
||||
+ output.filename().get());
|
||||
}
|
||||
}
|
||||
|
||||
if !sess.debugging_opt(session::no_rt) {
|
||||
// Always want the runtime linked in
|
||||
vec::push(cc_args, ~"-lrustrt");
|
||||
cc_args.push(~"-lrustrt");
|
||||
}
|
||||
|
||||
// On linux librt and libdl are an indirect dependencies via rustrt,
|
||||
// and binutils 2.22+ won't add them automatically
|
||||
if sess.targ_cfg.os == session::os_linux {
|
||||
vec::push_all(cc_args, ~[~"-lrt", ~"-ldl"]);
|
||||
cc_args.push_all(~[~"-lrt", ~"-ldl"]);
|
||||
|
||||
// LLVM implements the `frem` instruction as a call to `fmod`,
|
||||
// which lives in libm. Similar to above, on some linuxes we
|
||||
// have to be explicit about linking to it. See #2510
|
||||
vec::push(cc_args, ~"-lm");
|
||||
cc_args.push(~"-lm");
|
||||
}
|
||||
|
||||
if sess.targ_cfg.os == session::os_freebsd {
|
||||
vec::push_all(cc_args, ~[~"-pthread", ~"-lrt",
|
||||
cc_args.push_all(~[~"-pthread", ~"-lrt",
|
||||
~"-L/usr/local/lib", ~"-lexecinfo",
|
||||
~"-L/usr/local/lib/gcc46",
|
||||
~"-L/usr/local/lib/gcc44", ~"-lstdc++",
|
||||
@ -743,15 +743,15 @@ fn link_binary(sess: session,
|
||||
// understand how to unwind our __morestack frame, so we have to turn it
|
||||
// off. This has impacted some other projects like GHC.
|
||||
if sess.targ_cfg.os == session::os_macos {
|
||||
vec::push(cc_args, ~"-Wl,-no_compact_unwind");
|
||||
cc_args.push(~"-Wl,-no_compact_unwind");
|
||||
}
|
||||
|
||||
// Stack growth requires statically linking a __morestack function
|
||||
vec::push(cc_args, ~"-lmorestack");
|
||||
cc_args.push(~"-lmorestack");
|
||||
|
||||
// FIXME (#2397): At some point we want to rpath our guesses as to where
|
||||
// extern libraries might live, based on the addl_lib_search_paths
|
||||
vec::push_all(cc_args, rpath::get_rpath_flags(sess, &output));
|
||||
cc_args.push_all(rpath::get_rpath_flags(sess, &output));
|
||||
|
||||
debug!("%s link args: %s", cc_prog, str::connect(cc_args, ~" "));
|
||||
// We run 'cc' here
|
||||
|
@ -81,8 +81,8 @@ fn get_rpaths(os: session::os,
|
||||
log_rpaths(~"fallback", fallback_rpaths);
|
||||
|
||||
let mut rpaths = rel_rpaths;
|
||||
vec::push_all(rpaths, abs_rpaths);
|
||||
vec::push_all(rpaths, fallback_rpaths);
|
||||
rpaths.push_all(abs_rpaths);
|
||||
rpaths.push_all(fallback_rpaths);
|
||||
|
||||
// Remove duplicates
|
||||
let rpaths = minimize_rpaths(rpaths);
|
||||
@ -136,9 +136,9 @@ fn get_relative_to(abs1: &Path, abs2: &Path) -> Path {
|
||||
}
|
||||
|
||||
let mut path = ~[];
|
||||
for uint::range(start_idx, len1 - 1) |_i| { vec::push(path, ~".."); };
|
||||
for uint::range(start_idx, len1 - 1) |_i| { path.push(~".."); };
|
||||
|
||||
vec::push_all(path, vec::view(split2, start_idx, len2 - 1));
|
||||
path.push_all(vec::view(split2, start_idx, len2 - 1));
|
||||
|
||||
if vec::is_not_empty(path) {
|
||||
return Path("").push_many(path);
|
||||
@ -172,7 +172,7 @@ fn minimize_rpaths(rpaths: &[Path]) -> ~[Path] {
|
||||
for rpaths.each |rpath| {
|
||||
let s = rpath.to_str();
|
||||
if !set.contains_key(s) {
|
||||
vec::push(minimized, *rpath);
|
||||
minimized.push(*rpath);
|
||||
set.insert(s, ());
|
||||
}
|
||||
}
|
||||
|
@ -28,7 +28,7 @@ fn declare_upcalls(targ_cfg: @session::config,
|
||||
tys: ~[TypeRef], rv: TypeRef) ->
|
||||
ValueRef {
|
||||
let mut arg_tys: ~[TypeRef] = ~[];
|
||||
for tys.each |t| { vec::push(arg_tys, *t); }
|
||||
for tys.each |t| { arg_tys.push(*t); }
|
||||
let fn_ty = T_fn(arg_tys, rv);
|
||||
return base::decl_cdecl_fn(llmod, prefix + name, fn_ty);
|
||||
}
|
||||
|
@ -94,7 +94,7 @@ fn parse_cfgspecs(cfgspecs: ~[~str]) -> ast::crate_cfg {
|
||||
// meta_word variant.
|
||||
let mut words = ~[];
|
||||
for cfgspecs.each |s| {
|
||||
vec::push(words, attr::mk_word_item(*s));
|
||||
words.push(attr::mk_word_item(*s));
|
||||
}
|
||||
return words;
|
||||
}
|
||||
@ -466,7 +466,7 @@ fn build_session_options(binary: ~str,
|
||||
level_name, lint_name));
|
||||
}
|
||||
Some(lint) => {
|
||||
vec::push(lint_opts, (lint.lint, *level));
|
||||
lint_opts.push((lint.lint, *level));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -99,7 +99,7 @@ fn fold_crate(cx: test_ctxt, c: ast::crate_, fld: fold::ast_fold) ->
|
||||
fn fold_item(cx: test_ctxt, &&i: @ast::item, fld: fold::ast_fold) ->
|
||||
Option<@ast::item> {
|
||||
|
||||
vec::push(cx.path, i.ident);
|
||||
cx.path.push(i.ident);
|
||||
debug!("current path: %s",
|
||||
ast_util::path_name_i(cx.path, cx.sess.parse_sess.interner));
|
||||
|
||||
@ -286,7 +286,7 @@ fn mk_test_desc_vec(cx: test_ctxt) -> @ast::expr {
|
||||
debug!("building test vector from %u tests", cx.testfns.len());
|
||||
let mut descs = ~[];
|
||||
for cx.testfns.each |test| {
|
||||
vec::push(descs, mk_test_desc_rec(cx, *test));
|
||||
descs.push(mk_test_desc_rec(cx, *test));
|
||||
}
|
||||
|
||||
let inner_expr = @{id: cx.sess.next_node_id(),
|
||||
|
@ -115,7 +115,7 @@ fn iter_crate_data(cstore: cstore, i: fn(ast::crate_num, crate_metadata)) {
|
||||
|
||||
fn add_used_crate_file(cstore: cstore, lib: &Path) {
|
||||
if !vec::contains(p(cstore).used_crate_files, copy *lib) {
|
||||
vec::push(p(cstore).used_crate_files, copy *lib);
|
||||
p(cstore).used_crate_files.push(copy *lib);
|
||||
}
|
||||
}
|
||||
|
||||
@ -127,7 +127,7 @@ fn add_used_library(cstore: cstore, lib: ~str) -> bool {
|
||||
assert lib != ~"";
|
||||
|
||||
if vec::contains(p(cstore).used_libraries, lib) { return false; }
|
||||
vec::push(p(cstore).used_libraries, lib);
|
||||
p(cstore).used_libraries.push(lib);
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -136,7 +136,7 @@ fn get_used_libraries(cstore: cstore) -> ~[~str] {
|
||||
}
|
||||
|
||||
fn add_used_link_args(cstore: cstore, args: ~str) {
|
||||
vec::push_all(p(cstore).used_link_args, str::split_char(args, ' '));
|
||||
p(cstore).used_link_args.push_all(str::split_char(args, ' '));
|
||||
}
|
||||
|
||||
fn get_used_link_args(cstore: cstore) -> ~[~str] {
|
||||
@ -163,7 +163,7 @@ fn get_dep_hashes(cstore: cstore) -> ~[~str] {
|
||||
let cdata = cstore::get_crate_data(cstore, cnum);
|
||||
let hash = decoder::get_crate_hash(cdata.data);
|
||||
debug!("Add hash[%s]: %s", cdata.name, hash);
|
||||
vec::push(result, {name: cdata.name, hash: hash});
|
||||
result.push({name: cdata.name, hash: hash});
|
||||
};
|
||||
pure fn lteq(a: &crate_hash, b: &crate_hash) -> bool {a.name <= b.name}
|
||||
let sorted = std::sort::merge_sort(lteq, result);
|
||||
|
@ -227,7 +227,7 @@ fn item_type(item_id: ast::def_id, item: ebml::Doc,
|
||||
fn item_impl_traits(item: ebml::Doc, tcx: ty::ctxt, cdata: cmd) -> ~[ty::t] {
|
||||
let mut results = ~[];
|
||||
for ebml::tagged_docs(item, tag_impl_trait) |ity| {
|
||||
vec::push(results, doc_type(ity, tcx, cdata));
|
||||
results.push(doc_type(ity, tcx, cdata));
|
||||
};
|
||||
results
|
||||
}
|
||||
@ -239,7 +239,7 @@ fn item_ty_param_bounds(item: ebml::Doc, tcx: ty::ctxt, cdata: cmd)
|
||||
let bd = parse_bounds_data(p.data, p.start, cdata.cnum, tcx, |did| {
|
||||
translate_def_id(cdata, did)
|
||||
});
|
||||
vec::push(bounds, bd);
|
||||
bounds.push(bd);
|
||||
}
|
||||
@bounds
|
||||
}
|
||||
@ -263,7 +263,7 @@ fn enum_variant_ids(item: ebml::Doc, cdata: cmd) -> ~[ast::def_id] {
|
||||
let v = tag_items_data_item_variant;
|
||||
for ebml::tagged_docs(item, v) |p| {
|
||||
let ext = ebml::with_doc_data(p, |d| parse_def_id(d));
|
||||
vec::push(ids, {crate: cdata.cnum, node: ext.node});
|
||||
ids.push({crate: cdata.cnum, node: ext.node});
|
||||
};
|
||||
return ids;
|
||||
}
|
||||
@ -278,10 +278,10 @@ fn item_path(intr: @ident_interner, item_doc: ebml::Doc) -> ast_map::path {
|
||||
for ebml::docs(path_doc) |tag, elt_doc| {
|
||||
if tag == tag_path_elt_mod {
|
||||
let str = ebml::doc_as_str(elt_doc);
|
||||
vec::push(result, ast_map::path_mod(intr.intern(@str)));
|
||||
result.push(ast_map::path_mod(intr.intern(@str)));
|
||||
} else if tag == tag_path_elt_name {
|
||||
let str = ebml::doc_as_str(elt_doc);
|
||||
vec::push(result, ast_map::path_name(intr.intern(@str)));
|
||||
result.push(ast_map::path_name(intr.intern(@str)));
|
||||
} else {
|
||||
// ignore tag_path_len element
|
||||
}
|
||||
@ -584,7 +584,7 @@ fn get_enum_variants(intr: @ident_interner, cdata: cmd, id: ast::node_id,
|
||||
let mut arg_tys: ~[ty::t] = ~[];
|
||||
match ty::get(ctor_ty).sty {
|
||||
ty::ty_fn(f) => {
|
||||
for f.sig.inputs.each |a| { vec::push(arg_tys, a.ty); }
|
||||
for f.sig.inputs.each |a| { arg_tys.push(a.ty); }
|
||||
}
|
||||
_ => { /* Nullary enum variant. */ }
|
||||
}
|
||||
@ -592,7 +592,7 @@ fn get_enum_variants(intr: @ident_interner, cdata: cmd, id: ast::node_id,
|
||||
Some(val) => { disr_val = val; }
|
||||
_ => { /* empty */ }
|
||||
}
|
||||
vec::push(infos, @{args: arg_tys, ctor_ty: ctor_ty, name: name,
|
||||
infos.push(@{args: arg_tys, ctor_ty: ctor_ty, name: name,
|
||||
id: *did, disr_val: disr_val});
|
||||
disr_val += 1;
|
||||
}
|
||||
@ -645,7 +645,7 @@ fn item_impl_methods(intr: @ident_interner, cdata: cmd, item: ebml::Doc,
|
||||
let m_did = ebml::with_doc_data(doc, |d| parse_def_id(d));
|
||||
let mth_item = lookup_item(m_did.node, cdata.data);
|
||||
let self_ty = get_self_ty(mth_item);
|
||||
vec::push(rslt, @{did: translate_def_id(cdata, m_did),
|
||||
rslt.push(@{did: translate_def_id(cdata, m_did),
|
||||
/* FIXME (maybe #2323) tjc: take a look at this. */
|
||||
n_tps: item_ty_param_count(mth_item) - base_tps,
|
||||
ident: item_name(intr, mth_item),
|
||||
@ -675,7 +675,7 @@ fn get_impls_for_mod(intr: @ident_interner, cdata: cmd,
|
||||
let nm = item_name(intr, item);
|
||||
if match name { Some(n) => { n == nm } None => { true } } {
|
||||
let base_tps = item_ty_param_count(item);
|
||||
vec::push(result, @{
|
||||
result.push(@{
|
||||
did: local_did, ident: nm,
|
||||
methods: item_impl_methods(intr, impl_cdata, item, base_tps)
|
||||
});
|
||||
@ -701,7 +701,7 @@ fn get_trait_methods(intr: @ident_interner, cdata: cmd, id: ast::node_id,
|
||||
~"get_trait_methods: id has non-function type");
|
||||
} };
|
||||
let self_ty = get_self_ty(mth);
|
||||
vec::push(result, {ident: name, tps: bounds, fty: fty,
|
||||
result.push({ident: name, tps: bounds, fty: fty,
|
||||
self_ty: self_ty,
|
||||
vis: ast::public});
|
||||
}
|
||||
@ -753,7 +753,7 @@ fn get_class_members(intr: @ident_interner, cdata: cmd, id: ast::node_id,
|
||||
let name = item_name(intr, an_item);
|
||||
let did = item_def_id(an_item, cdata);
|
||||
let mt = field_mutability(an_item);
|
||||
vec::push(result, {ident: name, id: did, vis:
|
||||
result.push({ident: name, id: did, vis:
|
||||
family_to_visibility(f), mutability: mt});
|
||||
}
|
||||
}
|
||||
@ -835,7 +835,7 @@ fn get_meta_items(md: ebml::Doc) -> ~[@ast::meta_item] {
|
||||
for ebml::tagged_docs(md, tag_meta_item_word) |meta_item_doc| {
|
||||
let nd = ebml::get_doc(meta_item_doc, tag_meta_item_name);
|
||||
let n = str::from_bytes(ebml::doc_data(nd));
|
||||
vec::push(items, attr::mk_word_item(n));
|
||||
items.push(attr::mk_word_item(n));
|
||||
};
|
||||
for ebml::tagged_docs(md, tag_meta_item_name_value) |meta_item_doc| {
|
||||
let nd = ebml::get_doc(meta_item_doc, tag_meta_item_name);
|
||||
@ -844,13 +844,13 @@ fn get_meta_items(md: ebml::Doc) -> ~[@ast::meta_item] {
|
||||
let v = str::from_bytes(ebml::doc_data(vd));
|
||||
// FIXME (#623): Should be able to decode meta_name_value variants,
|
||||
// but currently the encoder just drops them
|
||||
vec::push(items, attr::mk_name_value_item_str(n, v));
|
||||
items.push(attr::mk_name_value_item_str(n, v));
|
||||
};
|
||||
for ebml::tagged_docs(md, tag_meta_item_list) |meta_item_doc| {
|
||||
let nd = ebml::get_doc(meta_item_doc, tag_meta_item_name);
|
||||
let n = str::from_bytes(ebml::doc_data(nd));
|
||||
let subitems = get_meta_items(meta_item_doc);
|
||||
vec::push(items, attr::mk_list_item(n, subitems));
|
||||
items.push(attr::mk_list_item(n, subitems));
|
||||
};
|
||||
return items;
|
||||
}
|
||||
@ -865,10 +865,10 @@ fn get_attributes(md: ebml::Doc) -> ~[ast::attribute] {
|
||||
// an attribute
|
||||
assert (vec::len(meta_items) == 1u);
|
||||
let meta_item = meta_items[0];
|
||||
vec::push(attrs,
|
||||
{node: {style: ast::attr_outer, value: *meta_item,
|
||||
is_sugared_doc: false},
|
||||
span: ast_util::dummy_sp()});
|
||||
attrs.push(
|
||||
{node: {style: ast::attr_outer, value: *meta_item,
|
||||
is_sugared_doc: false},
|
||||
span: ast_util::dummy_sp()});
|
||||
};
|
||||
}
|
||||
option::None => ()
|
||||
@ -910,7 +910,7 @@ fn get_crate_deps(intr: @ident_interner, data: @~[u8]) -> ~[crate_dep] {
|
||||
str::from_bytes(ebml::doc_data(ebml::get_doc(doc, tag_)))
|
||||
}
|
||||
for ebml::tagged_docs(depsdoc, tag_crate_dep) |depdoc| {
|
||||
vec::push(deps, {cnum: crate_num,
|
||||
deps.push({cnum: crate_num,
|
||||
name: intr.intern(@docstr(depdoc, tag_crate_dep_name)),
|
||||
vers: docstr(depdoc, tag_crate_dep_vers),
|
||||
hash: docstr(depdoc, tag_crate_dep_hash)});
|
||||
@ -977,7 +977,7 @@ fn get_crate_module_paths(intr: @ident_interner, cdata: cmd)
|
||||
// Collect everything by now. There might be multiple
|
||||
// paths pointing to the same did. Those will be
|
||||
// unified later by using the mods map
|
||||
vec::push(res, (did, path));
|
||||
res.push((did, path));
|
||||
}
|
||||
return do vec::filter(res) |x| {
|
||||
let (_, xp) = x;
|
||||
|
@ -118,12 +118,12 @@ type entry<T> = {val: T, pos: uint};
|
||||
fn add_to_index(ecx: @encode_ctxt, ebml_w: ebml::Writer, path: &[ident],
|
||||
&index: ~[entry<~str>], name: ident) {
|
||||
let mut full_path = ~[];
|
||||
vec::push_all(full_path, path);
|
||||
vec::push(full_path, name);
|
||||
vec::push(index,
|
||||
{val: ast_util::path_name_i(full_path,
|
||||
ecx.tcx.sess.parse_sess.interner),
|
||||
pos: ebml_w.writer.tell()});
|
||||
full_path.push_all(path);
|
||||
full_path.push(name);
|
||||
index.push(
|
||||
{val: ast_util::path_name_i(full_path,
|
||||
ecx.tcx.sess.parse_sess.interner),
|
||||
pos: ebml_w.writer.tell()});
|
||||
}
|
||||
|
||||
fn encode_trait_ref(ebml_w: ebml::Writer, ecx: @encode_ctxt, t: @trait_ref) {
|
||||
@ -225,7 +225,7 @@ fn encode_enum_variant_info(ecx: @encode_ctxt, ebml_w: ebml::Writer,
|
||||
let mut i = 0;
|
||||
let vi = ty::enum_variants(ecx.tcx, {crate: local_crate, node: id});
|
||||
for variants.each |variant| {
|
||||
vec::push(*index, {val: variant.node.id, pos: ebml_w.writer.tell()});
|
||||
index.push({val: variant.node.id, pos: ebml_w.writer.tell()});
|
||||
ebml_w.start_tag(tag_items_data_item);
|
||||
encode_def_id(ebml_w, local_def(variant.node.id));
|
||||
encode_family(ebml_w, 'v');
|
||||
@ -390,9 +390,9 @@ fn encode_info_for_class(ecx: @encode_ctxt, ebml_w: ebml::Writer,
|
||||
match field.node.kind {
|
||||
named_field(nm, mt, vis) => {
|
||||
let id = field.node.id;
|
||||
vec::push(*index, {val: id, pos: ebml_w.writer.tell()});
|
||||
vec::push(*global_index, {val: id,
|
||||
pos: ebml_w.writer.tell()});
|
||||
index.push({val: id, pos: ebml_w.writer.tell()});
|
||||
global_index.push({val: id,
|
||||
pos: ebml_w.writer.tell()});
|
||||
ebml_w.start_tag(tag_items_data_item);
|
||||
debug!("encode_info_for_class: doing %s %d",
|
||||
tcx.sess.str_of(nm), id);
|
||||
@ -411,9 +411,9 @@ fn encode_info_for_class(ecx: @encode_ctxt, ebml_w: ebml::Writer,
|
||||
for methods.each |m| {
|
||||
match m.vis {
|
||||
public | inherited => {
|
||||
vec::push(*index, {val: m.id, pos: ebml_w.writer.tell()});
|
||||
vec::push(*global_index,
|
||||
{val: m.id, pos: ebml_w.writer.tell()});
|
||||
index.push({val: m.id, pos: ebml_w.writer.tell()});
|
||||
global_index.push(
|
||||
{val: m.id, pos: ebml_w.writer.tell()});
|
||||
let impl_path = vec::append_one(path,
|
||||
ast_map::path_name(m.ident));
|
||||
debug!("encode_info_for_class: doing %s %d",
|
||||
@ -519,7 +519,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::Writer, item: @item,
|
||||
|
||||
fn add_to_index_(item: @item, ebml_w: ebml::Writer,
|
||||
index: @mut ~[entry<int>]) {
|
||||
vec::push(*index, {val: item.id, pos: ebml_w.writer.tell()});
|
||||
index.push({val: item.id, pos: ebml_w.writer.tell()});
|
||||
}
|
||||
let add_to_index = |copy ebml_w| add_to_index_(item, ebml_w, index);
|
||||
|
||||
@ -603,7 +603,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::Writer, item: @item,
|
||||
index);
|
||||
/* Encode the dtor */
|
||||
do struct_def.dtor.iter |dtor| {
|
||||
vec::push(*index, {val: dtor.node.id, pos: ebml_w.writer.tell()});
|
||||
index.push({val: dtor.node.id, pos: ebml_w.writer.tell()});
|
||||
encode_info_for_ctor(ecx, ebml_w, dtor.node.id,
|
||||
ecx.tcx.sess.ident_of(
|
||||
ecx.tcx.sess.str_of(item.ident) +
|
||||
@ -688,7 +688,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::Writer, item: @item,
|
||||
for struct_def.ctor.each |ctor| {
|
||||
debug!("encoding info for ctor %s %d",
|
||||
ecx.tcx.sess.str_of(item.ident), ctor.node.id);
|
||||
vec::push(*index, {
|
||||
index.push({
|
||||
val: ctor.node.id,
|
||||
pos: ebml_w.writer.tell()
|
||||
});
|
||||
@ -723,7 +723,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::Writer, item: @item,
|
||||
let impl_path = vec::append_one(path,
|
||||
ast_map::path_name(item.ident));
|
||||
for methods.each |m| {
|
||||
vec::push(*index, {val: m.id, pos: ebml_w.writer.tell()});
|
||||
index.push({val: m.id, pos: ebml_w.writer.tell()});
|
||||
encode_info_for_method(ecx, ebml_w, impl_path,
|
||||
should_inline(m.attrs), item.id, *m,
|
||||
vec::append(tps, m.tps));
|
||||
@ -774,7 +774,7 @@ fn encode_info_for_item(ecx: @encode_ctxt, ebml_w: ebml::Writer, item: @item,
|
||||
let ty_m = ast_util::trait_method_to_ty_method(*m);
|
||||
if ty_m.self_ty.node != ast::sty_static { loop; }
|
||||
|
||||
vec::push(*index, {val: ty_m.id, pos: ebml_w.writer.tell()});
|
||||
index.push({val: ty_m.id, pos: ebml_w.writer.tell()});
|
||||
|
||||
ebml_w.start_tag(tag_items_data_item);
|
||||
encode_def_id(ebml_w, local_def(ty_m.id));
|
||||
@ -799,7 +799,7 @@ fn encode_info_for_foreign_item(ecx: @encode_ctxt, ebml_w: ebml::Writer,
|
||||
index: @mut ~[entry<int>],
|
||||
path: ast_map::path, abi: foreign_abi) {
|
||||
if !reachable(ecx, nitem.id) { return; }
|
||||
vec::push(*index, {val: nitem.id, pos: ebml_w.writer.tell()});
|
||||
index.push({val: nitem.id, pos: ebml_w.writer.tell()});
|
||||
|
||||
ebml_w.start_tag(tag_items_data_item);
|
||||
match nitem.node {
|
||||
@ -831,7 +831,7 @@ fn encode_info_for_items(ecx: @encode_ctxt, ebml_w: ebml::Writer,
|
||||
crate: @crate) -> ~[entry<int>] {
|
||||
let index = @mut ~[];
|
||||
ebml_w.start_tag(tag_items_data);
|
||||
vec::push(*index, {val: crate_node_id, pos: ebml_w.writer.tell()});
|
||||
index.push({val: crate_node_id, pos: ebml_w.writer.tell()});
|
||||
encode_info_for_mod(ecx, ebml_w, crate.node.module,
|
||||
crate_node_id, ~[],
|
||||
syntax::parse::token::special_idents::invalid);
|
||||
@ -869,15 +869,15 @@ fn encode_info_for_items(ecx: @encode_ctxt, ebml_w: ebml::Writer,
|
||||
fn create_index<T: Copy Hash IterBytes>(index: ~[entry<T>]) ->
|
||||
~[@~[entry<T>]] {
|
||||
let mut buckets: ~[@mut ~[entry<T>]] = ~[];
|
||||
for uint::range(0u, 256u) |_i| { vec::push(buckets, @mut ~[]); };
|
||||
for uint::range(0u, 256u) |_i| { buckets.push(@mut ~[]); };
|
||||
for index.each |elt| {
|
||||
let h = elt.val.hash() as uint;
|
||||
vec::push(*buckets[h % 256], *elt);
|
||||
buckets[h % 256].push(*elt);
|
||||
}
|
||||
|
||||
let mut buckets_frozen = ~[];
|
||||
for buckets.each |bucket| {
|
||||
vec::push(buckets_frozen, @**bucket);
|
||||
buckets_frozen.push(@**bucket);
|
||||
}
|
||||
return buckets_frozen;
|
||||
}
|
||||
@ -889,7 +889,7 @@ fn encode_index<T>(ebml_w: ebml::Writer, buckets: ~[@~[entry<T>]],
|
||||
let mut bucket_locs: ~[uint] = ~[];
|
||||
ebml_w.start_tag(tag_index_buckets);
|
||||
for buckets.each |bucket| {
|
||||
vec::push(bucket_locs, ebml_w.writer.tell());
|
||||
bucket_locs.push(ebml_w.writer.tell());
|
||||
ebml_w.start_tag(tag_index_buckets_bucket);
|
||||
for vec::each(**bucket) |elt| {
|
||||
ebml_w.start_tag(tag_index_buckets_bucket_elt);
|
||||
@ -996,8 +996,7 @@ fn synthesize_crate_attrs(ecx: @encode_ctxt, crate: @crate) -> ~[attribute] {
|
||||
let mut attrs: ~[attribute] = ~[];
|
||||
let mut found_link_attr = false;
|
||||
for crate.node.attrs.each |attr| {
|
||||
vec::push(
|
||||
attrs,
|
||||
attrs.push(
|
||||
if attr::get_attr_name(*attr) != ~"link" {
|
||||
*attr
|
||||
} else {
|
||||
@ -1011,7 +1010,7 @@ fn synthesize_crate_attrs(ecx: @encode_ctxt, crate: @crate) -> ~[attribute] {
|
||||
});
|
||||
}
|
||||
|
||||
if !found_link_attr { vec::push(attrs, synthesize_link_attr(ecx, ~[])); }
|
||||
if !found_link_attr { attrs.push(synthesize_link_attr(ecx, ~[])); }
|
||||
|
||||
return attrs;
|
||||
}
|
||||
@ -1031,7 +1030,7 @@ fn encode_crate_deps(ecx: @encode_ctxt, ebml_w: ebml::Writer,
|
||||
let dep = {cnum: key, name: ecx.tcx.sess.ident_of(val.name),
|
||||
vers: decoder::get_crate_vers(val.data),
|
||||
hash: decoder::get_crate_hash(val.data)};
|
||||
vec::push(deps, dep);
|
||||
deps.push(dep);
|
||||
};
|
||||
|
||||
// Sort by cnum
|
||||
|
@ -39,15 +39,15 @@ fn mk_filesearch(maybe_sysroot: Option<Path>,
|
||||
fn lib_search_paths() -> ~[Path] {
|
||||
let mut paths = self.addl_lib_search_paths;
|
||||
|
||||
vec::push(paths,
|
||||
make_target_lib_path(&self.sysroot,
|
||||
self.target_triple));
|
||||
paths.push(
|
||||
make_target_lib_path(&self.sysroot,
|
||||
self.target_triple));
|
||||
match get_cargo_lib_path_nearest() {
|
||||
result::Ok(p) => vec::push(paths, p),
|
||||
result::Ok(p) => paths.push(p),
|
||||
result::Err(_) => ()
|
||||
}
|
||||
match get_cargo_lib_path() {
|
||||
result::Ok(p) => vec::push(paths, p),
|
||||
result::Ok(p) => paths.push(p),
|
||||
result::Err(_) => ()
|
||||
}
|
||||
paths
|
||||
|
@ -90,7 +90,7 @@ fn find_library_crate_aux(cx: ctxt,
|
||||
option::None::<()>
|
||||
} else {
|
||||
debug!("found %s with matching metadata", path.to_str());
|
||||
vec::push(matches, {ident: path.to_str(), data: cvec});
|
||||
matches.push({ident: path.to_str(), data: cvec});
|
||||
option::None::<()>
|
||||
}
|
||||
}
|
||||
|
@ -84,7 +84,7 @@ fn parse_ret_ty(st: @pstate, conv: conv_did) -> (ast::ret_style, ty::t) {
|
||||
fn parse_path(st: @pstate) -> @ast::path {
|
||||
let mut idents: ~[ast::ident] = ~[];
|
||||
fn is_last(c: char) -> bool { return c == '(' || c == ':'; }
|
||||
vec::push(idents, parse_ident_(st, is_last));
|
||||
idents.push(parse_ident_(st, is_last));
|
||||
loop {
|
||||
match peek(st) {
|
||||
':' => { next(st); next(st); }
|
||||
@ -93,7 +93,7 @@ fn parse_path(st: @pstate) -> @ast::path {
|
||||
return @{span: ast_util::dummy_sp(),
|
||||
global: false, idents: idents,
|
||||
rp: None, types: ~[]};
|
||||
} else { vec::push(idents, parse_ident_(st, is_last)); }
|
||||
} else { idents.push(parse_ident_(st, is_last)); }
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -136,7 +136,7 @@ fn parse_substs(st: @pstate, conv: conv_did) -> ty::substs {
|
||||
|
||||
assert next(st) == '[';
|
||||
let mut params: ~[ty::t] = ~[];
|
||||
while peek(st) != ']' { vec::push(params, parse_ty(st, conv)); }
|
||||
while peek(st) != ']' { params.push(parse_ty(st, conv)); }
|
||||
st.pos = st.pos + 1u;
|
||||
|
||||
return {self_r: self_r,
|
||||
@ -273,7 +273,7 @@ fn parse_ty(st: @pstate, conv: conv_did) -> ty::t {
|
||||
let mut fields: ~[ty::field] = ~[];
|
||||
while peek(st) != ']' {
|
||||
let name = st.tcx.sess.ident_of(parse_str(st, '='));
|
||||
vec::push(fields, {ident: name, mt: parse_mt(st, conv)});
|
||||
fields.push({ident: name, mt: parse_mt(st, conv)});
|
||||
}
|
||||
st.pos = st.pos + 1u;
|
||||
return ty::mk_rec(st.tcx, fields);
|
||||
@ -281,7 +281,7 @@ fn parse_ty(st: @pstate, conv: conv_did) -> ty::t {
|
||||
'T' => {
|
||||
assert (next(st) == '[');
|
||||
let mut params = ~[];
|
||||
while peek(st) != ']' { vec::push(params, parse_ty(st, conv)); }
|
||||
while peek(st) != ']' { params.push(parse_ty(st, conv)); }
|
||||
st.pos = st.pos + 1u;
|
||||
return ty::mk_tup(st.tcx, params);
|
||||
}
|
||||
@ -348,7 +348,7 @@ fn parse_mt(st: @pstate, conv: conv_did) -> ty::mt {
|
||||
|
||||
fn parse_def(st: @pstate, conv: conv_did) -> ast::def_id {
|
||||
let mut def = ~[];
|
||||
while peek(st) != '|' { vec::push(def, next_byte(st)); }
|
||||
while peek(st) != '|' { def.push(next_byte(st)); }
|
||||
st.pos = st.pos + 1u;
|
||||
return conv(parse_def_id(def));
|
||||
}
|
||||
@ -412,7 +412,7 @@ fn parse_ty_fn(st: @pstate, conv: conv_did) -> ty::FnTy {
|
||||
let mut inputs: ~[ty::arg] = ~[];
|
||||
while peek(st) != ']' {
|
||||
let mode = parse_mode(st);
|
||||
vec::push(inputs, {mode: mode, ty: parse_ty(st, conv)});
|
||||
inputs.push({mode: mode, ty: parse_ty(st, conv)});
|
||||
}
|
||||
st.pos += 1u; // eat the ']'
|
||||
let (ret_style, ret_ty) = parse_ret_ty(st, conv);
|
||||
@ -464,7 +464,7 @@ fn parse_bounds_data(data: @~[u8], start: uint,
|
||||
fn parse_bounds(st: @pstate, conv: conv_did) -> @~[ty::param_bound] {
|
||||
let mut bounds = ~[];
|
||||
loop {
|
||||
vec::push(bounds, match next(st) {
|
||||
bounds.push(match next(st) {
|
||||
'S' => ty::bound_send,
|
||||
'C' => ty::bound_copy,
|
||||
'K' => ty::bound_const,
|
||||
|
@ -122,6 +122,6 @@ fn compute_capture_vars(tcx: ty::ctxt,
|
||||
}
|
||||
|
||||
let mut result = ~[];
|
||||
for cap_map.each_value |cap_var| { vec::push(result, cap_var); }
|
||||
for cap_map.each_value |cap_var| { result.push(cap_var); }
|
||||
return result;
|
||||
}
|
||||
|
@ -67,7 +67,7 @@ fn check_arms(tcx: ty::ctxt, arms: ~[arm]) {
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
if arm.guard.is_none() { vec::push(seen, v); }
|
||||
if arm.guard.is_none() { seen.push(v); }
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -269,7 +269,7 @@ fn missing_ctor(tcx: ty::ctxt, m: matrix, left_ty: ty::t) -> Option<ctor> {
|
||||
let mut found = ~[];
|
||||
for m.each |r| {
|
||||
do option::iter(&pat_ctor_id(tcx, r[0])) |id| {
|
||||
if !vec::contains(found, id) { vec::push(found, id); }
|
||||
if !vec::contains(found, id) { found.push(id); }
|
||||
}
|
||||
}
|
||||
let variants = ty::enum_variants(tcx, eid);
|
||||
|
@ -63,7 +63,7 @@ fn collect_freevars(def_map: resolve::DefMap, blk: ast::blk)
|
||||
if i == depth { // Made it to end of loop
|
||||
let dnum = ast_util::def_id_of_def(def).node;
|
||||
if !seen.contains_key(dnum) {
|
||||
vec::push(*refs, @{def:def, span:expr.span});
|
||||
refs.push(@{def:def, span:expr.span});
|
||||
seen.insert(dnum, ());
|
||||
}
|
||||
}
|
||||
|
@ -42,17 +42,17 @@ fn kind_to_str(k: kind) -> ~str {
|
||||
let mut kinds = ~[];
|
||||
|
||||
if ty::kind_lteq(kind_const(), k) {
|
||||
vec::push(kinds, ~"const");
|
||||
kinds.push(~"const");
|
||||
}
|
||||
|
||||
if ty::kind_can_be_copied(k) {
|
||||
vec::push(kinds, ~"copy");
|
||||
kinds.push(~"copy");
|
||||
}
|
||||
|
||||
if ty::kind_can_be_sent(k) {
|
||||
vec::push(kinds, ~"send");
|
||||
kinds.push(~"send");
|
||||
} else if ty::kind_is_owned(k) {
|
||||
vec::push(kinds, ~"owned");
|
||||
kinds.push(~"owned");
|
||||
}
|
||||
|
||||
str::connect(kinds, ~" ")
|
||||
|
@ -288,7 +288,7 @@ impl ctxt {
|
||||
for metas.each |meta| {
|
||||
match meta.node {
|
||||
ast::meta_word(lintname) => {
|
||||
vec::push(triples, (*meta, *level, lintname));
|
||||
triples.push((*meta, *level, lintname));
|
||||
}
|
||||
_ => {
|
||||
self.sess.span_err(
|
||||
|
@ -302,7 +302,7 @@ fn IrMaps(tcx: ty::ctxt, method_map: typeck::method_map,
|
||||
impl IrMaps {
|
||||
fn add_live_node(lnk: LiveNodeKind) -> LiveNode {
|
||||
let ln = LiveNode(self.num_live_nodes);
|
||||
vec::push(self.lnks, lnk);
|
||||
self.lnks.push(lnk);
|
||||
self.num_live_nodes += 1u;
|
||||
|
||||
debug!("%s is of kind %?", ln.to_str(), lnk);
|
||||
@ -319,7 +319,7 @@ impl IrMaps {
|
||||
|
||||
fn add_variable(vk: VarKind) -> Variable {
|
||||
let v = Variable(self.num_vars);
|
||||
vec::push(self.var_kinds, vk);
|
||||
self.var_kinds.push(vk);
|
||||
self.num_vars += 1u;
|
||||
|
||||
match vk {
|
||||
@ -540,7 +540,7 @@ fn visit_expr(expr: @expr, &&self: @IrMaps, vt: vt<@IrMaps>) {
|
||||
cap_move | cap_drop => true, // var must be dead afterwards
|
||||
cap_copy | cap_ref => false // var can still be used
|
||||
};
|
||||
vec::push(call_caps, {ln: cv_ln, is_move: is_move, rv: rv});
|
||||
call_caps.push({ln: cv_ln, is_move: is_move, rv: rv});
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
|
@ -54,6 +54,6 @@ fn pat_bindings(dm: resolve::DefMap, pat: @pat,
|
||||
|
||||
fn pat_binding_ids(dm: resolve::DefMap, pat: @pat) -> ~[node_id] {
|
||||
let mut found = ~[];
|
||||
pat_bindings(dm, pat, |_bm, b_id, _sp, _pt| vec::push(found, b_id) );
|
||||
pat_bindings(dm, pat, |_bm, b_id, _sp, _pt| found.push(b_id) );
|
||||
return found;
|
||||
}
|
||||
|
@ -141,7 +141,7 @@ fn nearest_common_ancestor(region_map: region_map, scope_a: ast::node_id,
|
||||
match region_map.find(scope) {
|
||||
None => return result,
|
||||
Some(superscope) => {
|
||||
vec::push(result, superscope);
|
||||
result.push(superscope);
|
||||
scope = superscope;
|
||||
}
|
||||
}
|
||||
|
@ -2897,7 +2897,7 @@ impl Resolver {
|
||||
if reexport { ~"reexport" } else { ~"export"},
|
||||
self.session.str_of(ident),
|
||||
def_id_of_def(d.def));
|
||||
vec::push(*exports2, Export2 {
|
||||
exports2.push(Export2 {
|
||||
reexport: reexport,
|
||||
name: self.session.str_of(ident),
|
||||
def_id: def_id_of_def(d.def)
|
||||
@ -2949,7 +2949,7 @@ impl Resolver {
|
||||
for %?",
|
||||
self.session.str_of(name),
|
||||
module_.def_id);
|
||||
vec::push(*exports2, Export2 {
|
||||
exports2.push(Export2 {
|
||||
reexport: false,
|
||||
name: self.session.str_of(name),
|
||||
def_id: def_id_of_def(target_def)
|
||||
@ -2960,7 +2960,7 @@ impl Resolver {
|
||||
%?",
|
||||
self.session.str_of(name),
|
||||
module_.def_id);
|
||||
vec::push(*exports2, Export2 {
|
||||
exports2.push(Export2 {
|
||||
reexport: true,
|
||||
name: self.session.str_of(name),
|
||||
def_id: def_id_of_def(target_def)
|
||||
|
@ -305,7 +305,7 @@ fn enter_match(bcx: block, dm: DefMap, m: &[@Match/&r],
|
||||
_ => {}
|
||||
}
|
||||
|
||||
vec::push(result, @Match {pats: pats, data: br.data});
|
||||
result.push(@Match {pats: pats, data: br.data});
|
||||
}
|
||||
None => ()
|
||||
}
|
||||
@ -398,8 +398,8 @@ fn enter_rec_or_struct(bcx: block, dm: DefMap, m: &[@Match/&r], col: uint,
|
||||
let mut pats = ~[];
|
||||
for vec::each(fields) |fname| {
|
||||
match fpats.find(|p| p.ident == *fname) {
|
||||
None => vec::push(pats, dummy),
|
||||
Some(pat) => vec::push(pats, pat.pat)
|
||||
None => pats.push(dummy),
|
||||
Some(pat) => pats.push(pat.pat)
|
||||
}
|
||||
}
|
||||
Some(pats)
|
||||
@ -582,7 +582,7 @@ fn collect_record_or_struct_fields(m: &[@Match], col: uint) -> ~[ast::ident] {
|
||||
for field_pats.each |field_pat| {
|
||||
let field_ident = field_pat.ident;
|
||||
if !vec::any(*idents, |x| x == field_ident) {
|
||||
vec::push(*idents, field_ident);
|
||||
idents.push(field_ident);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1162,9 +1162,9 @@ fn trans_alt_inner(scope_cx: block,
|
||||
let arm_data = @ArmData {bodycx: body,
|
||||
arm: arm,
|
||||
bindings_map: bindings_map};
|
||||
vec::push(arm_datas, arm_data);
|
||||
arm_datas.push(arm_data);
|
||||
for vec::each(arm.pats) |p| {
|
||||
vec::push(matches, @Match {pats: ~[*p], data: arm_data});
|
||||
matches.push(@Match {pats: ~[*p], data: arm_data});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -76,7 +76,7 @@ impl @crate_ctxt: get_insn_ctxt {
|
||||
fn insn_ctxt(s: &str) -> icx_popper {
|
||||
debug!("new insn_ctxt: %s", s);
|
||||
if self.sess.count_llvm_insns() {
|
||||
vec::push(*self.stats.llvm_insn_ctxt, str::from_slice(s));
|
||||
self.stats.llvm_insn_ctxt.push(str::from_slice(s));
|
||||
}
|
||||
icx_popper(self)
|
||||
}
|
||||
@ -98,7 +98,7 @@ fn log_fn_time(ccx: @crate_ctxt, name: ~str, start: time::Timespec,
|
||||
end: time::Timespec) {
|
||||
let elapsed = 1000 * ((end.sec - start.sec) as int) +
|
||||
((end.nsec as int) - (start.nsec as int)) / 1000000;
|
||||
vec::push(*ccx.stats.fn_times, {ident: name, time: elapsed});
|
||||
ccx.stats.fn_times.push({ident: name, time: elapsed});
|
||||
}
|
||||
|
||||
fn decl_fn(llmod: ModuleRef, name: ~str, cc: lib::llvm::CallConv,
|
||||
@ -1153,7 +1153,7 @@ fn cleanup_and_leave(bcx: block, upto: Option<BasicBlockRef>,
|
||||
}
|
||||
let sub_cx = sub_block(bcx, ~"cleanup");
|
||||
Br(bcx, sub_cx.llbb);
|
||||
vec::push(inf.cleanup_paths, {target: leave, dest: sub_cx.llbb});
|
||||
inf.cleanup_paths.push({target: leave, dest: sub_cx.llbb});
|
||||
bcx = trans_block_cleanups_(sub_cx, block_cleanups(cur), is_lpad);
|
||||
}
|
||||
_ => ()
|
||||
@ -2001,7 +2001,7 @@ fn create_main_wrapper(ccx: @crate_ctxt, sp: span, main_llfn: ValueRef,
|
||||
let llenvarg = llvm::LLVMGetParam(llfdecl, 1 as c_uint);
|
||||
let mut args = ~[lloutputarg, llenvarg];
|
||||
if takes_argv {
|
||||
vec::push(args, llvm::LLVMGetParam(llfdecl, 2 as c_uint));
|
||||
args.push(llvm::LLVMGetParam(llfdecl, 2 as c_uint));
|
||||
}
|
||||
Call(bcx, main_llfn, args);
|
||||
|
||||
@ -2451,10 +2451,10 @@ fn create_module_map(ccx: @crate_ctxt) -> ValueRef {
|
||||
for ccx.module_data.each |key, val| {
|
||||
let elt = C_struct(~[p2i(ccx, C_cstr(ccx, key)),
|
||||
p2i(ccx, val)]);
|
||||
vec::push(elts, elt);
|
||||
elts.push(elt);
|
||||
}
|
||||
let term = C_struct(~[C_int(ccx, 0), C_int(ccx, 0)]);
|
||||
vec::push(elts, term);
|
||||
elts.push(term);
|
||||
llvm::LLVMSetInitializer(map, C_array(elttype, elts));
|
||||
return map;
|
||||
}
|
||||
@ -2492,10 +2492,10 @@ fn fill_crate_map(ccx: @crate_ctxt, map: ValueRef) {
|
||||
let cr = str::as_c_str(nm, |buf| {
|
||||
llvm::LLVMAddGlobal(ccx.llmod, ccx.int_type, buf)
|
||||
});
|
||||
vec::push(subcrates, p2i(ccx, cr));
|
||||
subcrates.push(p2i(ccx, cr));
|
||||
i += 1;
|
||||
}
|
||||
vec::push(subcrates, C_int(ccx, 0));
|
||||
subcrates.push(C_int(ccx, 0));
|
||||
|
||||
let llannihilatefn;
|
||||
let annihilate_def_id = ccx.tcx.lang_items.annihilate_fn.get();
|
||||
|
@ -435,7 +435,7 @@ fn GEP(cx: block, Pointer: ValueRef, Indices: ~[ValueRef]) -> ValueRef {
|
||||
// XXX: Use a small-vector optimization to avoid allocations here.
|
||||
fn GEPi(cx: block, base: ValueRef, ixs: &[uint]) -> ValueRef {
|
||||
let mut v: ~[ValueRef] = ~[];
|
||||
for vec::each(ixs) |i| { vec::push(v, C_i32(*i as i32)); }
|
||||
for vec::each(ixs) |i| { v.push(C_i32(*i as i32)); }
|
||||
count_insn(cx, "gepi");
|
||||
return InBoundsGEP(cx, base, v);
|
||||
}
|
||||
|
@ -478,10 +478,10 @@ fn trans_args(cx: block, llenv: ValueRef, args: CallArgs, fn_ty: ty::t,
|
||||
}
|
||||
}
|
||||
};
|
||||
vec::push(llargs, llretslot);
|
||||
llargs.push(llretslot);
|
||||
|
||||
// Arg 1: Env (closure-bindings / self value)
|
||||
vec::push(llargs, llenv);
|
||||
llargs.push(llenv);
|
||||
|
||||
// ... then explicit args.
|
||||
|
||||
@ -497,11 +497,11 @@ fn trans_args(cx: block, llenv: ValueRef, args: CallArgs, fn_ty: ty::t,
|
||||
if i == last { ret_flag } else { None },
|
||||
autoref_arg)
|
||||
});
|
||||
vec::push(llargs, arg_val);
|
||||
llargs.push(arg_val);
|
||||
}
|
||||
}
|
||||
ArgVals(vs) => {
|
||||
vec::push_all(llargs, vs);
|
||||
llargs.push_all(vs);
|
||||
}
|
||||
}
|
||||
|
||||
@ -622,7 +622,7 @@ fn trans_arg_expr(bcx: block,
|
||||
// However, we must cleanup should we fail before the
|
||||
// callee is actually invoked.
|
||||
scratch.add_clean(bcx);
|
||||
vec::push(*temp_cleanups, scratch.val);
|
||||
temp_cleanups.push(scratch.val);
|
||||
|
||||
match arg_datum.appropriate_mode() {
|
||||
ByValue => {
|
||||
|
@ -259,16 +259,16 @@ fn build_closure(bcx0: block,
|
||||
match cap_var.mode {
|
||||
capture::cap_ref => {
|
||||
assert ck == ty::ck_block;
|
||||
vec::push(env_vals, EnvValue {action: EnvRef,
|
||||
datum: datum});
|
||||
env_vals.push(EnvValue {action: EnvRef,
|
||||
datum: datum});
|
||||
}
|
||||
capture::cap_copy => {
|
||||
vec::push(env_vals, EnvValue {action: EnvStore,
|
||||
datum: datum});
|
||||
env_vals.push(EnvValue {action: EnvStore,
|
||||
datum: datum});
|
||||
}
|
||||
capture::cap_move => {
|
||||
vec::push(env_vals, EnvValue {action: EnvMove,
|
||||
datum: datum});
|
||||
env_vals.push(EnvValue {action: EnvMove,
|
||||
datum: datum});
|
||||
}
|
||||
capture::cap_drop => {
|
||||
bcx = datum.drop_val(bcx);
|
||||
@ -283,8 +283,8 @@ fn build_closure(bcx0: block,
|
||||
// Flag indicating we have returned (a by-ref bool):
|
||||
let flag_datum = Datum {val: flagptr, ty: ty::mk_bool(tcx),
|
||||
mode: ByRef, source: FromLvalue};
|
||||
vec::push(env_vals, EnvValue {action: EnvRef,
|
||||
datum: flag_datum});
|
||||
env_vals.push(EnvValue {action: EnvRef,
|
||||
datum: flag_datum});
|
||||
|
||||
// Return value (we just pass a by-ref () and cast it later to
|
||||
// the right thing):
|
||||
@ -295,8 +295,8 @@ fn build_closure(bcx0: block,
|
||||
let ret_casted = PointerCast(bcx, ret_true, T_ptr(T_nil()));
|
||||
let ret_datum = Datum {val: ret_casted, ty: ty::mk_nil(tcx),
|
||||
mode: ByRef, source: FromLvalue};
|
||||
vec::push(env_vals, EnvValue {action: EnvRef,
|
||||
datum: ret_datum});
|
||||
env_vals.push(EnvValue {action: EnvRef,
|
||||
datum: ret_datum});
|
||||
}
|
||||
|
||||
return store_environment(bcx, env_vals, ck);
|
||||
|
@ -348,9 +348,9 @@ fn add_clean(bcx: block, val: ValueRef, t: ty::t) {
|
||||
let {root, rooted} = root_for_cleanup(bcx, val, t);
|
||||
let cleanup_type = cleanup_type(bcx.tcx(), t);
|
||||
do in_scope_cx(bcx) |info| {
|
||||
vec::push(info.cleanups,
|
||||
clean(|a| glue::drop_ty_root(a, root, rooted, t),
|
||||
cleanup_type));
|
||||
info.cleanups.push(
|
||||
clean(|a| glue::drop_ty_root(a, root, rooted, t),
|
||||
cleanup_type));
|
||||
scope_clean_changed(info);
|
||||
}
|
||||
}
|
||||
@ -362,9 +362,9 @@ fn add_clean_temp_immediate(cx: block, val: ValueRef, ty: ty::t) {
|
||||
ty_to_str(cx.ccx().tcx, ty));
|
||||
let cleanup_type = cleanup_type(cx.tcx(), ty);
|
||||
do in_scope_cx(cx) |info| {
|
||||
vec::push(info.cleanups,
|
||||
clean_temp(val, |a| glue::drop_ty_immediate(a, val, ty),
|
||||
cleanup_type));
|
||||
info.cleanups.push(
|
||||
clean_temp(val, |a| glue::drop_ty_immediate(a, val, ty),
|
||||
cleanup_type));
|
||||
scope_clean_changed(info);
|
||||
}
|
||||
}
|
||||
@ -376,9 +376,9 @@ fn add_clean_temp_mem(bcx: block, val: ValueRef, t: ty::t) {
|
||||
let {root, rooted} = root_for_cleanup(bcx, val, t);
|
||||
let cleanup_type = cleanup_type(bcx.tcx(), t);
|
||||
do in_scope_cx(bcx) |info| {
|
||||
vec::push(info.cleanups,
|
||||
clean_temp(val, |a| glue::drop_ty_root(a, root, rooted, t),
|
||||
cleanup_type));
|
||||
info.cleanups.push(
|
||||
clean_temp(val, |a| glue::drop_ty_root(a, root, rooted, t),
|
||||
cleanup_type));
|
||||
scope_clean_changed(info);
|
||||
}
|
||||
}
|
||||
@ -388,8 +388,8 @@ fn add_clean_free(cx: block, ptr: ValueRef, heap: heap) {
|
||||
heap_exchange => |a| glue::trans_unique_free(a, ptr)
|
||||
};
|
||||
do in_scope_cx(cx) |info| {
|
||||
vec::push(info.cleanups, clean_temp(ptr, free_fn,
|
||||
normal_exit_and_unwind));
|
||||
info.cleanups.push(clean_temp(ptr, free_fn,
|
||||
normal_exit_and_unwind));
|
||||
scope_clean_changed(info);
|
||||
}
|
||||
}
|
||||
@ -1050,7 +1050,7 @@ fn C_postr(s: ~str) -> ValueRef {
|
||||
fn C_zero_byte_arr(size: uint) -> ValueRef unsafe {
|
||||
let mut i = 0u;
|
||||
let mut elts: ~[ValueRef] = ~[];
|
||||
while i < size { vec::push(elts, C_u8(0u)); i += 1u; }
|
||||
while i < size { elts.push(C_u8(0u)); i += 1u; }
|
||||
return llvm::LLVMConstArray(T_i8(), vec::raw::to_ptr(elts),
|
||||
elts.len() as c_uint);
|
||||
}
|
||||
|
@ -383,7 +383,7 @@ fn create_derived_type(type_tag: int, file: ValueRef, name: ~str, line: int,
|
||||
|
||||
fn add_member(cx: @struct_ctxt, name: ~str, line: int, size: int, align: int,
|
||||
ty: ValueRef) {
|
||||
vec::push(cx.members, create_derived_type(MemberTag, cx.file, name, line,
|
||||
cx.members.push(create_derived_type(MemberTag, cx.file, name, line,
|
||||
size * 8, align * 8, cx.total_size,
|
||||
ty));
|
||||
cx.total_size += size * 8;
|
||||
@ -529,7 +529,7 @@ fn create_ty(_cx: @crate_ctxt, _t: ty::t, _ty: @ast::ty)
|
||||
ty::ty_rec(fields) {
|
||||
let fs = ~[];
|
||||
for field in fields {
|
||||
vec::push(fs, {node: {ident: field.ident,
|
||||
fs.push({node: {ident: field.ident,
|
||||
mt: {ty: t_to_ty(cx, field.mt.ty, span),
|
||||
mutbl: field.mt.mutbl}},
|
||||
span: span});
|
||||
|
@ -993,7 +993,7 @@ fn trans_rec_or_struct(bcx: block,
|
||||
let dest = GEPi(bcx, addr, struct_field(ix));
|
||||
bcx = trans_into(bcx, field.node.expr, SaveIn(dest));
|
||||
add_clean_temp_mem(bcx, dest, field_tys[ix].mt.ty);
|
||||
vec::push(temp_cleanups, dest);
|
||||
temp_cleanups.push(dest);
|
||||
}
|
||||
|
||||
// copy over any remaining fields from the base (for
|
||||
@ -1046,7 +1046,7 @@ fn trans_tup(bcx: block, elts: ~[@ast::expr], dest: Dest) -> block {
|
||||
let e_ty = expr_ty(bcx, *e);
|
||||
bcx = trans_into(bcx, *e, SaveIn(dest));
|
||||
add_clean_temp_mem(bcx, dest, e_ty);
|
||||
vec::push(temp_cleanups, dest);
|
||||
temp_cleanups.push(dest);
|
||||
}
|
||||
for vec::each(temp_cleanups) |cleanup| {
|
||||
revoke_clean(bcx, *cleanup);
|
||||
|
@ -297,21 +297,21 @@ fn llreg_ty(cls: ~[x86_64_reg_class]) -> TypeRef {
|
||||
while i < e {
|
||||
match cls[i] {
|
||||
integer_class => {
|
||||
vec::push(tys, T_i64());
|
||||
tys.push(T_i64());
|
||||
}
|
||||
sse_fv_class => {
|
||||
let vec_len = llvec_len(vec::tailn(cls, i + 1u)) * 2u;
|
||||
let vec_ty = llvm::LLVMVectorType(T_f32(),
|
||||
vec_len as c_uint);
|
||||
vec::push(tys, vec_ty);
|
||||
tys.push(vec_ty);
|
||||
i += vec_len;
|
||||
loop;
|
||||
}
|
||||
sse_fs_class => {
|
||||
vec::push(tys, T_f32());
|
||||
tys.push(T_f32());
|
||||
}
|
||||
sse_ds_class => {
|
||||
vec::push(tys, T_f64());
|
||||
tys.push(T_f64());
|
||||
}
|
||||
_ => fail ~"llregtype: unhandled class"
|
||||
}
|
||||
@ -378,8 +378,8 @@ fn x86_64_tys(atys: ~[TypeRef],
|
||||
let mut attrs = ~[];
|
||||
for vec::each(atys) |t| {
|
||||
let (ty, attr) = x86_64_ty(*t, is_pass_byval, ByValAttribute);
|
||||
vec::push(arg_tys, ty);
|
||||
vec::push(attrs, attr);
|
||||
arg_tys.push(ty);
|
||||
attrs.push(attr);
|
||||
}
|
||||
let mut (ret_ty, ret_attr) = x86_64_ty(rty, is_ret_bysret,
|
||||
StructRetAttribute);
|
||||
@ -619,7 +619,7 @@ fn trans_foreign_mod(ccx: @crate_ctxt,
|
||||
} else {
|
||||
load_inbounds(bcx, llargbundle, [0u, i])
|
||||
};
|
||||
vec::push(llargvals, llargval);
|
||||
llargvals.push(llargval);
|
||||
i += 1u;
|
||||
}
|
||||
}
|
||||
@ -627,7 +627,7 @@ fn trans_foreign_mod(ccx: @crate_ctxt,
|
||||
while i < n {
|
||||
let llargval = load_inbounds(bcx, llargbundle,
|
||||
[0u, i]);
|
||||
vec::push(llargvals, llargval);
|
||||
llargvals.push(llargval);
|
||||
i += 1u;
|
||||
}
|
||||
}
|
||||
@ -1041,12 +1041,12 @@ fn trans_foreign_fn(ccx: @crate_ctxt, path: ast_map::path, decl: ast::fn_decl,
|
||||
let mut i = 0u;
|
||||
let n = vec::len(tys.arg_tys);
|
||||
let llretptr = load_inbounds(bcx, llargbundle, ~[0u, n]);
|
||||
vec::push(llargvals, llretptr);
|
||||
llargvals.push(llretptr);
|
||||
let llenvptr = C_null(T_opaque_box_ptr(bcx.ccx()));
|
||||
vec::push(llargvals, llenvptr);
|
||||
llargvals.push(llenvptr);
|
||||
while i < n {
|
||||
let llargval = load_inbounds(bcx, llargbundle, ~[0u, i]);
|
||||
vec::push(llargvals, llargval);
|
||||
llargvals.push(llargval);
|
||||
i += 1u;
|
||||
}
|
||||
return llargvals;
|
||||
|
@ -246,7 +246,7 @@ fn make_mono_id(ccx: @crate_ctxt, item: ast::def_id, substs: ~[ty::t],
|
||||
for vec::each(*bounds) |bound| {
|
||||
match *bound {
|
||||
ty::bound_trait(_) => {
|
||||
vec::push(v, meth::vtable_id(ccx, vts[i]));
|
||||
v.push(meth::vtable_id(ccx, vts[i]));
|
||||
i += 1u;
|
||||
}
|
||||
_ => ()
|
||||
|
@ -332,7 +332,7 @@ fn write_content(bcx: block,
|
||||
bcx = expr::trans_into(bcx, *element,
|
||||
SaveIn(lleltptr));
|
||||
add_clean_temp_mem(bcx, lleltptr, vt.unit_ty);
|
||||
vec::push(temp_cleanups, lleltptr);
|
||||
temp_cleanups.push(lleltptr);
|
||||
}
|
||||
for vec::each(temp_cleanups) |cleanup| {
|
||||
revoke_clean(bcx, *cleanup);
|
||||
@ -369,7 +369,7 @@ fn write_content(bcx: block,
|
||||
bcx = tmpdatum.move_to(bcx, INIT, lleltptr);
|
||||
}
|
||||
add_clean_temp_mem(bcx, lleltptr, vt.unit_ty);
|
||||
vec::push(temp_cleanups, lleltptr);
|
||||
temp_cleanups.push(lleltptr);
|
||||
}
|
||||
|
||||
for vec::each(temp_cleanups) |cleanup| {
|
||||
|
@ -39,13 +39,13 @@ fn type_of_fn(cx: @crate_ctxt, inputs: ~[ty::arg],
|
||||
let mut atys: ~[TypeRef] = ~[];
|
||||
|
||||
// Arg 0: Output pointer.
|
||||
vec::push(atys, T_ptr(type_of(cx, output)));
|
||||
atys.push(T_ptr(type_of(cx, output)));
|
||||
|
||||
// Arg 1: Environment
|
||||
vec::push(atys, T_opaque_box_ptr(cx));
|
||||
atys.push(T_opaque_box_ptr(cx));
|
||||
|
||||
// ... then explicit args.
|
||||
vec::push_all(atys, type_of_explicit_args(cx, inputs));
|
||||
atys.push_all(type_of_explicit_args(cx, inputs));
|
||||
return T_fn(atys, llvm::LLVMVoidType());
|
||||
}
|
||||
|
||||
@ -151,7 +151,7 @@ fn type_of(cx: @crate_ctxt, t: ty::t) -> TypeRef {
|
||||
let mut tys: ~[TypeRef] = ~[];
|
||||
for vec::each(fields) |f| {
|
||||
let mt_ty = f.mt.ty;
|
||||
vec::push(tys, type_of(cx, mt_ty));
|
||||
tys.push(type_of(cx, mt_ty));
|
||||
}
|
||||
|
||||
// n.b.: introduce an extra layer of indirection to match
|
||||
@ -164,7 +164,7 @@ fn type_of(cx: @crate_ctxt, t: ty::t) -> TypeRef {
|
||||
ty::ty_tup(elts) => {
|
||||
let mut tys = ~[];
|
||||
for vec::each(elts) |elt| {
|
||||
vec::push(tys, type_of(cx, *elt));
|
||||
tys.push(type_of(cx, *elt));
|
||||
}
|
||||
T_struct(tys)
|
||||
}
|
||||
|
@ -2243,10 +2243,10 @@ fn is_instantiable(cx: ctxt, r_ty: t) -> bool {
|
||||
}
|
||||
|
||||
ty_class(did, ref substs) => {
|
||||
vec::push(*seen, did);
|
||||
let r = vec::any(class_items_as_fields(cx, did, substs),
|
||||
|f| type_requires(cx, seen, r_ty, f.mt.ty));
|
||||
vec::pop(*seen);
|
||||
seen.push(did);
|
||||
let r = vec::any(class_items_as_fields(cx, did, substs),
|
||||
|f| type_requires(cx, seen, r_ty, f.mt.ty));
|
||||
vec::pop(*seen);
|
||||
r
|
||||
}
|
||||
|
||||
@ -2258,18 +2258,18 @@ fn is_instantiable(cx: ctxt, r_ty: t) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
ty_enum(did, ref substs) => {
|
||||
vec::push(*seen, did);
|
||||
let vs = enum_variants(cx, did);
|
||||
let r = vec::len(*vs) > 0u && vec::all(*vs, |variant| {
|
||||
vec::any(variant.args, |aty| {
|
||||
let sty = subst(cx, substs, aty);
|
||||
type_requires(cx, seen, r_ty, sty)
|
||||
})
|
||||
});
|
||||
vec::pop(*seen);
|
||||
r
|
||||
}
|
||||
ty_enum(did, ref substs) => {
|
||||
seen.push(did);
|
||||
let vs = enum_variants(cx, did);
|
||||
let r = vec::len(*vs) > 0u && vec::all(*vs, |variant| {
|
||||
vec::any(variant.args, |aty| {
|
||||
let sty = subst(cx, substs, aty);
|
||||
type_requires(cx, seen, r_ty, sty)
|
||||
})
|
||||
});
|
||||
vec::pop(*seen);
|
||||
r
|
||||
}
|
||||
};
|
||||
|
||||
debug!("subtypes_require(%s, %s)? %b",
|
||||
@ -3036,7 +3036,7 @@ fn param_tys_in_type(ty: t) -> ~[param_ty] {
|
||||
do walk_ty(ty) |ty| {
|
||||
match get(ty).sty {
|
||||
ty_param(p) => {
|
||||
vec::push(rslt, p);
|
||||
rslt.push(p);
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
@ -3052,7 +3052,7 @@ fn occurs_check(tcx: ctxt, sp: span, vid: TyVid, rt: t) {
|
||||
let mut rslt = ~[];
|
||||
do walk_ty(ty) |ty| {
|
||||
match get(ty).sty {
|
||||
ty_infer(TyVar(v)) => vec::push(rslt, v),
|
||||
ty_infer(TyVar(v)) => rslt.push(v),
|
||||
_ => ()
|
||||
}
|
||||
}
|
||||
@ -3704,10 +3704,10 @@ fn class_field_tys(fields: ~[@struct_field]) -> ~[field_ty] {
|
||||
for fields.each |field| {
|
||||
match field.node.kind {
|
||||
named_field(ident, mutability, visibility) => {
|
||||
vec::push(rslt, {ident: ident,
|
||||
id: ast_util::local_def(field.node.id),
|
||||
vis: visibility,
|
||||
mutability: mutability});
|
||||
rslt.push({ident: ident,
|
||||
id: ast_util::local_def(field.node.id),
|
||||
vis: visibility,
|
||||
mutability: mutability});
|
||||
}
|
||||
unnamed_field => {}
|
||||
}
|
||||
@ -3747,7 +3747,7 @@ fn class_item_fields(cx:ctxt,
|
||||
for lookup_class_fields(cx, did).each |f| {
|
||||
// consider all instance vars mut, because the
|
||||
// constructor may mutate all vars
|
||||
vec::push(rslt, {ident: f.ident, mt:
|
||||
rslt.push({ident: f.ident, mt:
|
||||
{ty: lookup_field_type(cx, did, f.id, substs),
|
||||
mutbl: frob_mutability(f.mutability)}});
|
||||
}
|
||||
|
@ -818,7 +818,7 @@ fn do_autoderef(fcx: @fn_ctxt, sp: span, t: ty::t) -> (ty::t, uint) {
|
||||
if vec::contains(enum_dids, did) {
|
||||
return (t1, autoderefs);
|
||||
}
|
||||
vec::push(enum_dids, did);
|
||||
enum_dids.push(did);
|
||||
}
|
||||
_ => { /*ok*/ }
|
||||
}
|
||||
@ -2029,8 +2029,8 @@ fn check_expr_with_unifier(fcx: @fn_ctxt,
|
||||
let name = class_field.ident;
|
||||
let (_, seen) = class_field_map.get(name);
|
||||
if !seen {
|
||||
vec::push(missing_fields,
|
||||
~"`" + tcx.sess.str_of(name) + ~"`");
|
||||
missing_fields.push(
|
||||
~"`" + tcx.sess.str_of(name) + ~"`");
|
||||
}
|
||||
}
|
||||
|
||||
@ -2298,7 +2298,7 @@ fn check_enum_variants(ccx: @crate_ctxt,
|
||||
ccx.tcx.sess.span_err(v.span,
|
||||
~"discriminator value already exists");
|
||||
}
|
||||
vec::push(*disr_vals, *disr_val);
|
||||
disr_vals.push(*disr_val);
|
||||
let ctor_ty = ty::node_id_to_type(ccx.tcx, v.node.id);
|
||||
let arg_tys;
|
||||
|
||||
@ -2321,7 +2321,8 @@ fn check_enum_variants(ccx: @crate_ctxt,
|
||||
match arg_tys {
|
||||
None => {}
|
||||
Some(arg_tys) => {
|
||||
vec::push(*variants, @{args: arg_tys, ctor_ty: ctor_ty,
|
||||
variants.push(
|
||||
@{args: arg_tys, ctor_ty: ctor_ty,
|
||||
name: v.node.name, id: local_def(v.node.id),
|
||||
disr_val: this_disr_val});
|
||||
}
|
||||
|
@ -27,13 +27,13 @@ fn replace_bound_regions_in_fn_ty(
|
||||
let region = ty::re_bound(ty::br_self);
|
||||
let ty = ty::mk_rptr(tcx, region,
|
||||
{ ty: ty::mk_self(tcx), mutbl: m });
|
||||
vec::push(all_tys, ty);
|
||||
all_tys.push(ty);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
|
||||
for self_ty.each |t| { vec::push(all_tys, *t) }
|
||||
for self_ty.each |t| { all_tys.push(*t) }
|
||||
|
||||
debug!("replace_bound_regions_in_fn_ty(self_info.self_ty=%?, fn_ty=%s, \
|
||||
all_tys=%?)",
|
||||
|
@ -51,8 +51,8 @@ fn lookup_vtables(fcx: @fn_ctxt,
|
||||
match *bound {
|
||||
ty::bound_trait(i_ty) => {
|
||||
let i_ty = ty::subst(tcx, substs, i_ty);
|
||||
vec::push(result, lookup_vtable(fcx, expr, *ty, i_ty,
|
||||
allow_unsafe, is_early));
|
||||
result.push(lookup_vtable(fcx, expr, *ty, i_ty,
|
||||
allow_unsafe, is_early));
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
@ -331,9 +331,9 @@ fn lookup_vtable(fcx: @fn_ctxt,
|
||||
// the impl as well as the resolved list
|
||||
// of type substitutions for the target
|
||||
// trait.
|
||||
vec::push(found,
|
||||
vtable_static(im.did, substs_f.tps,
|
||||
subres));
|
||||
found.push(
|
||||
vtable_static(im.did, substs_f.tps,
|
||||
subres));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -94,7 +94,7 @@ fn resolve_type_vars_for_node(wbcx: wb_ctxt, sp: span, id: ast::node_id)
|
||||
let mut new_tps = ~[];
|
||||
for substs.tps.each |subst| {
|
||||
match resolve_type_vars_in_type(fcx, sp, *subst) {
|
||||
Some(t) => vec::push(new_tps, t),
|
||||
Some(t) => new_tps.push(t),
|
||||
None => { wbcx.success = false; return None; }
|
||||
}
|
||||
}
|
||||
|
@ -198,7 +198,7 @@ impl CoherenceChecker {
|
||||
existing trait",
|
||||
sess.str_of(mi.ident));
|
||||
let mut method_infos = mis;
|
||||
push(method_infos, mi);
|
||||
method_infos.push(mi);
|
||||
pmm.insert(item.id, method_infos);
|
||||
}
|
||||
None => {
|
||||
@ -547,7 +547,7 @@ impl CoherenceChecker {
|
||||
debug!(
|
||||
"(creating impl) adding provided method `%s` to impl",
|
||||
sess.str_of(provided_method.ident));
|
||||
push(methods, *provided_method);
|
||||
methods.push(*provided_method);
|
||||
}
|
||||
}
|
||||
|
||||
@ -559,8 +559,7 @@ impl CoherenceChecker {
|
||||
let mut methods = ~[];
|
||||
|
||||
for ast_methods.each |ast_method| {
|
||||
push(methods,
|
||||
method_to_MethodInfo(*ast_method));
|
||||
methods.push(method_to_MethodInfo(*ast_method));
|
||||
}
|
||||
|
||||
// For each trait that the impl implements, see what
|
||||
@ -619,7 +618,7 @@ impl CoherenceChecker {
|
||||
-> @Impl {
|
||||
let mut methods = ~[];
|
||||
for struct_def.methods.each |ast_method| {
|
||||
push(methods, @{
|
||||
methods.push(@{
|
||||
did: local_def(ast_method.id),
|
||||
n_tps: ast_method.tps.len(),
|
||||
ident: ast_method.ident,
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user