Convert impls to new syntax
This commit is contained in:
parent
e997948c8a
commit
6a0720b439
@ -1,5 +1,3 @@
|
||||
import io::reader_util;
|
||||
|
||||
import common::config;
|
||||
|
||||
export load_errors;
|
||||
|
@ -1,6 +1,5 @@
|
||||
import option;
|
||||
import str;
|
||||
import io::reader_util;
|
||||
|
||||
import common::config;
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
import run::spawn_process;
|
||||
import io::{writer_util, reader_util};
|
||||
import io::writer_util;
|
||||
import libc::{c_int, pid_t};
|
||||
|
||||
import pipes::chan;
|
||||
|
@ -3,8 +3,6 @@
|
||||
* share immutable data between tasks.
|
||||
*/
|
||||
|
||||
import sys::methods;
|
||||
|
||||
export arc, get, clone;
|
||||
|
||||
export exclusive, methods;
|
||||
@ -96,7 +94,7 @@ fn exclusive<T:send >(-data: T) -> exclusive<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl methods<T: send> for exclusive<T> {
|
||||
impl<T: send> exclusive<T> {
|
||||
/// Duplicate an exclusive ARC. See arc::clone.
|
||||
fn clone() -> exclusive<T> {
|
||||
unsafe {
|
||||
@ -142,7 +140,6 @@ unsafe fn with<U>(f: fn(x: &mut T) -> U) -> U {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
import comm::*;
|
||||
import future::extensions;
|
||||
|
||||
#[test]
|
||||
fn manually_share_arc() {
|
||||
|
@ -123,7 +123,7 @@ fn vec_reserve_shared_actual(++t: *sys::type_desc,
|
||||
}
|
||||
}
|
||||
|
||||
impl extensions<T: copy> of vec_concat<T> for @[T] {
|
||||
impl<T: copy> @[T]: vec_concat<T> {
|
||||
#[inline(always)]
|
||||
pure fn +(rhs: &[const T]) -> @[T] {
|
||||
append(self, rhs)
|
||||
@ -131,7 +131,7 @@ impl extensions<T: copy> of vec_concat<T> for @[T] {
|
||||
}
|
||||
|
||||
#[cfg(notest)]
|
||||
impl extensions<T: copy> of add<&[const T],@[T]> for @[T] {
|
||||
impl<T: copy> @[T]: add<&[const T],@[T]> {
|
||||
#[inline(always)]
|
||||
pure fn add(rhs: &[const T]) -> @[T] {
|
||||
append(self, rhs)
|
||||
|
@ -70,7 +70,7 @@ fn port<T: send>() -> port<T> {
|
||||
port_t(@port_ptr(rustrt::new_port(sys::size_of::<T>() as size_t)))
|
||||
}
|
||||
|
||||
impl methods<T: send> for port<T> {
|
||||
impl<T: send> port<T> {
|
||||
|
||||
fn chan() -> chan<T> { chan(self) }
|
||||
fn send(+v: T) { self.chan().send(v) }
|
||||
@ -79,7 +79,7 @@ fn peek() -> bool { peek(self) }
|
||||
|
||||
}
|
||||
|
||||
impl methods<T: send> for chan<T> {
|
||||
impl<T: send> chan<T> {
|
||||
|
||||
fn chan() -> chan<T> { self }
|
||||
fn send(+v: T) { send(self, v) }
|
||||
|
@ -5,31 +5,13 @@
|
||||
import option::{some, none};
|
||||
import option = option::option;
|
||||
import path = path::path;
|
||||
import tuple::{extensions, tuple_ops, extended_tuple_ops};
|
||||
import str::{extensions, str_slice, unique_str};
|
||||
import vec::extensions;
|
||||
import tuple::{tuple_ops, extended_tuple_ops};
|
||||
import str::{str_slice, unique_str};
|
||||
import vec::{const_vector, copyable_vector, immutable_vector};
|
||||
import vec::{immutable_copyable_vector, iter_trait_extensions, vec_concat};
|
||||
import iter::{base_iter, extended_iter, copyable_iter, times, timesi};
|
||||
import option::extensions;
|
||||
import option_iter::extensions;
|
||||
import ptr::{extensions, ptr};
|
||||
import rand::extensions;
|
||||
import result::extensions;
|
||||
import int::{num, times, timesi};
|
||||
import i8::{num, times, timesi};
|
||||
import i16::{num, times, timesi};
|
||||
import i32::{num, times, timesi};
|
||||
import i64::{num, times, timesi};
|
||||
import uint::{num, times, timesi};
|
||||
import u8::{num, times, timesi};
|
||||
import u16::{num, times, timesi};
|
||||
import u32::{num, times, timesi};
|
||||
import u64::{num, times, timesi};
|
||||
import float::num;
|
||||
import f32::num;
|
||||
import f64::num;
|
||||
import num::num;
|
||||
import ptr::ptr;
|
||||
|
||||
export path, option, some, none, unreachable;
|
||||
export extensions;
|
||||
|
@ -4,8 +4,6 @@
|
||||
* Do not use ==, !=, <, etc on doubly-linked lists -- it may not terminate.
|
||||
*/
|
||||
|
||||
import dlist_iter::extensions;
|
||||
|
||||
export dlist, dlist_node;
|
||||
export new_dlist, from_elem, from_vec, extensions;
|
||||
|
||||
@ -24,7 +22,7 @@ enum dlist<T> = @{
|
||||
mut tl: dlist_link<T>,
|
||||
};
|
||||
|
||||
impl private_methods<T> for dlist_node<T> {
|
||||
priv impl<T> dlist_node<T> {
|
||||
pure fn assert_links() {
|
||||
match self.next {
|
||||
some(neighbour) => match neighbour.prev {
|
||||
@ -47,7 +45,7 @@ impl private_methods<T> for dlist_node<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl extensions<T> for dlist_node<T> {
|
||||
impl<T> dlist_node<T> {
|
||||
/// Get the next node in the list, if there is one.
|
||||
pure fn next_link() -> option<dlist_node<T>> {
|
||||
self.assert_links();
|
||||
@ -109,7 +107,7 @@ fn concat<T>(lists: dlist<dlist<T>>) -> dlist<T> {
|
||||
result
|
||||
}
|
||||
|
||||
impl private_methods<T> for dlist<T> {
|
||||
priv impl<T> dlist<T> {
|
||||
pure fn new_link(-data: T) -> dlist_link<T> {
|
||||
some(dlist_node(@{data: data, mut linked: true,
|
||||
mut prev: none, mut next: none}))
|
||||
@ -184,7 +182,7 @@ fn insert_right(neighbour: dlist_node<T>, nobe: dlist_link<T>) {
|
||||
}
|
||||
}
|
||||
|
||||
impl extensions<T> for dlist<T> {
|
||||
impl<T> dlist<T> {
|
||||
/// Get the size of the list. O(1).
|
||||
pure fn len() -> uint { self.size }
|
||||
/// Returns true if the list is empty. O(1).
|
||||
@ -413,7 +411,7 @@ fn assert_consistent() {
|
||||
}
|
||||
}
|
||||
|
||||
impl extensions<T: copy> for dlist<T> {
|
||||
impl<T: copy> dlist<T> {
|
||||
/// Remove data from the head of the list. O(1).
|
||||
fn pop() -> option<T> { self.pop_n().map (|nobe| nobe.data) }
|
||||
/// Remove data from the tail of the list. O(1).
|
||||
|
@ -5,9 +5,8 @@
|
||||
//
|
||||
// Note that recursive use is not permitted.
|
||||
|
||||
import dvec_iter::extensions;
|
||||
import unsafe::reinterpret_cast;
|
||||
import ptr::{null, extensions};
|
||||
import ptr::null;
|
||||
|
||||
export dvec;
|
||||
export from_elem;
|
||||
@ -75,7 +74,7 @@ fn unwrap<A>(-d: dvec<A>) -> ~[mut A] {
|
||||
return v;
|
||||
}
|
||||
|
||||
impl private_methods<A> for dvec<A> {
|
||||
priv impl<A> dvec<A> {
|
||||
pure fn check_not_borrowed() {
|
||||
unsafe {
|
||||
let data: *() = unsafe::reinterpret_cast(self.data);
|
||||
@ -107,7 +106,7 @@ fn give_back(-data: ~[mut A]) {
|
||||
// In theory, most everything should work with any A, but in practice
|
||||
// almost nothing works without the copy bound due to limitations
|
||||
// around closures.
|
||||
impl extensions<A> for dvec<A> {
|
||||
impl<A> dvec<A> {
|
||||
/// Reserves space for N elements
|
||||
fn reserve(count: uint) {
|
||||
vec::reserve(self.data, count)
|
||||
@ -188,7 +187,7 @@ fn reverse() {
|
||||
}
|
||||
}
|
||||
|
||||
impl extensions<A:copy> for dvec<A> {
|
||||
impl<A: copy> dvec<A> {
|
||||
/**
|
||||
* Append all elements of a vector to the end of the list
|
||||
*
|
||||
@ -311,7 +310,7 @@ fn reachi(f: fn(uint, A) -> bool) {
|
||||
}
|
||||
}
|
||||
|
||||
impl extensions<A:copy> of index<uint,A> for dvec<A> {
|
||||
impl<A:copy> dvec<A>: index<uint,A> {
|
||||
pure fn index(&&idx: uint) -> A {
|
||||
self.get_elt(idx)
|
||||
}
|
||||
|
@ -167,7 +167,7 @@ mod consts {
|
||||
return ln(n) / consts::ln_2;
|
||||
}
|
||||
|
||||
impl num of num::num for f32 {
|
||||
impl f32: num::num {
|
||||
pure fn add(&&other: f32) -> f32 { return self + other; }
|
||||
pure fn sub(&&other: f32) -> f32 { return self - other; }
|
||||
pure fn mul(&&other: f32) -> f32 { return self * other; }
|
||||
|
@ -194,7 +194,7 @@ mod consts {
|
||||
return ln(n) / consts::ln_2;
|
||||
}
|
||||
|
||||
impl num of num::num for f64 {
|
||||
impl f64: num::num {
|
||||
pure fn add(&&other: f64) -> f64 { return self + other; }
|
||||
pure fn sub(&&other: f64) -> f64 { return self - other; }
|
||||
pure fn mul(&&other: f64) -> f64 { return self * other; }
|
||||
|
@ -409,7 +409,7 @@ fn pow_with_uint(base: uint, pow: uint) -> float {
|
||||
pure fn cos(x: float) -> float { f64::cos(x as f64) as float }
|
||||
pure fn tan(x: float) -> float { f64::tan(x as f64) as float }
|
||||
|
||||
impl num of num::num for float {
|
||||
impl float: num::num {
|
||||
pure fn add(&&other: float) -> float { return self + other; }
|
||||
pure fn sub(&&other: float) -> float { return self - other; }
|
||||
pure fn mul(&&other: float) -> float { return self * other; }
|
||||
|
@ -32,7 +32,7 @@ enum future<A> = {
|
||||
};
|
||||
|
||||
/// Methods on the `future` type
|
||||
impl extensions<A:copy send> for future<A> {
|
||||
impl<A:copy send> future<A> {
|
||||
|
||||
fn get() -> A {
|
||||
//! Get the value of the future
|
||||
|
@ -62,20 +62,20 @@ fn range(lo: T, hi: T, it: fn(T) -> bool) {
|
||||
if is_negative(i) { -i } else { i }
|
||||
}
|
||||
|
||||
impl ord of ord for T {
|
||||
impl T: ord {
|
||||
pure fn lt(&&other: T) -> bool {
|
||||
return self < other;
|
||||
}
|
||||
}
|
||||
|
||||
impl eq of eq for T {
|
||||
impl T: eq {
|
||||
pure fn eq(&&other: T) -> bool {
|
||||
return self == other;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl num of num::num for T {
|
||||
impl T: num::num {
|
||||
pure fn add(&&other: T) -> T { return self + other; }
|
||||
pure fn sub(&&other: T) -> T { return self - other; }
|
||||
pure fn mul(&&other: T) -> T { return self * other; }
|
||||
@ -87,7 +87,7 @@ impl num of num::num for T {
|
||||
pure fn from_int(n: int) -> T { return n as T; }
|
||||
}
|
||||
|
||||
impl times of iter::times for T {
|
||||
impl T: iter::times {
|
||||
#[inline(always)]
|
||||
#[doc = "A convenience form for basic iteration. Given a variable `x` \
|
||||
of any numeric type, the expression `for x.times { /* anything */ }` \
|
||||
@ -107,7 +107,7 @@ fn times(it: fn() -> bool) {
|
||||
}
|
||||
}
|
||||
|
||||
impl timesi of iter::timesi for T {
|
||||
impl T: iter::timesi {
|
||||
#[inline(always)]
|
||||
/// Like `times`, but provides an index
|
||||
fn timesi(it: fn(uint) -> bool) {
|
||||
|
@ -6,7 +6,7 @@
|
||||
|
||||
import result::result;
|
||||
|
||||
import dvec::{dvec, extensions};
|
||||
import dvec::dvec;
|
||||
import libc::{c_int, c_long, c_uint, c_void, size_t, ssize_t};
|
||||
import libc::consts::os::posix88::*;
|
||||
import libc::consts::os::extra::*;
|
||||
@ -42,7 +42,7 @@ trait reader {
|
||||
|
||||
// Generic utility functions defined on readers
|
||||
|
||||
impl reader_util for reader {
|
||||
impl reader {
|
||||
fn read_bytes(len: uint) -> ~[u8] {
|
||||
let mut buf = ~[mut];
|
||||
vec::reserve(buf, len);
|
||||
@ -203,7 +203,7 @@ fn convert_whence(whence: seek_style) -> i32 {
|
||||
};
|
||||
}
|
||||
|
||||
impl of reader for *libc::FILE {
|
||||
impl *libc::FILE: reader {
|
||||
fn read(buf: &[mut u8], len: uint) -> uint {
|
||||
do vec::as_buf(buf) |buf_p, buf_len| {
|
||||
assert buf_len <= len;
|
||||
@ -227,7 +227,7 @@ fn seek(offset: int, whence: seek_style) {
|
||||
// A forwarding impl of reader that also holds on to a resource for the
|
||||
// duration of its lifetime.
|
||||
// FIXME there really should be a better way to do this // #2004
|
||||
impl <T: reader, C> of reader for {base: T, cleanup: C} {
|
||||
impl<T: reader, C> {base: T, cleanup: C}: reader {
|
||||
fn read(buf: &[mut u8], len: uint) -> uint { self.base.read(buf, len) }
|
||||
fn read_byte() -> int { self.base.read_byte() }
|
||||
fn unread_byte(byte: int) { self.base.unread_byte(byte); }
|
||||
@ -273,7 +273,7 @@ fn file_reader(path: ~str) -> result<reader, ~str> {
|
||||
|
||||
type byte_buf = {buf: ~[const u8], mut pos: uint, len: uint};
|
||||
|
||||
impl of reader for byte_buf {
|
||||
impl byte_buf: reader {
|
||||
fn read(buf: &[mut u8], len: uint) -> uint {
|
||||
let count = uint::min(len, self.len - self.pos);
|
||||
|
||||
@ -343,7 +343,7 @@ trait writer {
|
||||
fn get_type() -> writer_type;
|
||||
}
|
||||
|
||||
impl <T: writer, C> of writer for {base: T, cleanup: C} {
|
||||
impl<T: writer, C> {base: T, cleanup: C}: writer {
|
||||
fn write(bs: &[const u8]) { self.base.write(bs); }
|
||||
fn seek(off: int, style: seek_style) { self.base.seek(off, style); }
|
||||
fn tell() -> uint { self.base.tell() }
|
||||
@ -351,7 +351,7 @@ fn flush() -> int { self.base.flush() }
|
||||
fn get_type() -> writer_type { file }
|
||||
}
|
||||
|
||||
impl of writer for *libc::FILE {
|
||||
impl *libc::FILE: writer {
|
||||
fn write(v: &[const u8]) {
|
||||
do vec::as_const_buf(v) |vbuf, len| {
|
||||
let nout = libc::fwrite(vbuf as *c_void, len as size_t,
|
||||
@ -384,7 +384,7 @@ fn FILE_writer(f: *libc::FILE, cleanup: bool) -> writer {
|
||||
}
|
||||
}
|
||||
|
||||
impl of writer for fd_t {
|
||||
impl fd_t: writer {
|
||||
fn write(v: &[const u8]) {
|
||||
let mut count = 0u;
|
||||
do vec::as_const_buf(v) |vbuf, len| {
|
||||
@ -658,7 +658,7 @@ fn stderr() -> writer { fd_writer(libc::STDERR_FILENO as c_int, false) }
|
||||
|
||||
type mem_buffer = @{buf: dvec<u8>, mut pos: uint};
|
||||
|
||||
impl of writer for mem_buffer {
|
||||
impl mem_buffer: writer {
|
||||
fn write(v: &[const u8]) {
|
||||
// Fast path.
|
||||
let vlen = vec::len(v);
|
||||
|
@ -5,12 +5,12 @@
|
||||
import inst::{IMPL_T, EACH, SIZE_HINT};
|
||||
export extensions;
|
||||
|
||||
impl extensions<A> of iter::base_iter<A> for IMPL_T<A> {
|
||||
impl<A> IMPL_T<A>: iter::base_iter<A> {
|
||||
fn each(blk: fn(A) -> bool) { EACH(self, blk) }
|
||||
fn size_hint() -> option<uint> { SIZE_HINT(self) }
|
||||
}
|
||||
|
||||
impl extensions<A> of iter::extended_iter<A> for IMPL_T<A> {
|
||||
impl<A> IMPL_T<A>: iter::extended_iter<A> {
|
||||
fn eachi(blk: fn(uint, A) -> bool) { iter::eachi(self, blk) }
|
||||
fn all(blk: fn(A) -> bool) -> bool { iter::all(self, blk) }
|
||||
fn any(blk: fn(A) -> bool) -> bool { iter::any(self, blk) }
|
||||
@ -24,7 +24,7 @@ fn position(f: fn(A) -> bool) -> option<uint> {
|
||||
}
|
||||
}
|
||||
|
||||
impl extensions<A:copy> of iter::copyable_iter<A> for IMPL_T<A> {
|
||||
impl<A: copy> IMPL_T<A>: iter::copyable_iter<A> {
|
||||
fn filter_to_vec(pred: fn(A) -> bool) -> ~[A] {
|
||||
iter::filter_to_vec(self, pred)
|
||||
}
|
||||
|
@ -8,8 +8,6 @@
|
||||
* node is forbidden.
|
||||
*/
|
||||
pure fn EACH<A>(self: IMPL_T<A>, f: fn(A) -> bool) {
|
||||
import dlist::extensions;
|
||||
|
||||
let mut link = self.peek_n();
|
||||
while option::is_some(link) {
|
||||
let nobe = option::get(link);
|
||||
@ -31,6 +29,5 @@
|
||||
}
|
||||
|
||||
fn SIZE_HINT<A>(self: IMPL_T<A>) -> option<uint> {
|
||||
import dlist::extensions;
|
||||
some(self.len())
|
||||
}
|
||||
|
@ -6,11 +6,9 @@
|
||||
* Attempts to access this dvec during iteration will fail.
|
||||
*/
|
||||
fn EACH<A>(self: IMPL_T<A>, f: fn(A) -> bool) {
|
||||
import dvec::extensions;
|
||||
self.swap(|v| { vec::each(v, f); v })
|
||||
}
|
||||
|
||||
fn SIZE_HINT<A>(self: IMPL_T<A>) -> option<uint> {
|
||||
import dvec::extensions;
|
||||
some(self.len())
|
||||
}
|
||||
|
@ -136,7 +136,7 @@ fn swap_unwrap<T>(opt: &mut option<T>) -> T {
|
||||
unwrap(opt)
|
||||
}
|
||||
|
||||
impl extensions<T> for option<T> {
|
||||
impl<T> option<T> {
|
||||
/**
|
||||
* Update an optional value by optionally running its content through a
|
||||
* function that returns an option.
|
||||
@ -155,7 +155,7 @@ impl extensions<T> for option<T> {
|
||||
pure fn map<U>(f: fn(T) -> U) -> option<U> { map(self, f) }
|
||||
}
|
||||
|
||||
impl extensions<T: copy> for option<T> {
|
||||
impl<T: copy> option<T> {
|
||||
/**
|
||||
* Gets the value out of an option
|
||||
*
|
||||
|
@ -61,7 +61,6 @@
|
||||
import unsafe::{forget, reinterpret_cast, transmute};
|
||||
import either::{either, left, right};
|
||||
import option::unwrap;
|
||||
import arc::methods;
|
||||
|
||||
// Things used by code generated by the pipe compiler.
|
||||
export entangle, get_buffer, drop_buffer;
|
||||
@ -167,7 +166,7 @@ trait has_buffer {
|
||||
fn set_buffer(b: *libc::c_void);
|
||||
}
|
||||
|
||||
impl methods<T: send> of has_buffer for packet<T> {
|
||||
impl<T: send> packet<T>: has_buffer {
|
||||
fn set_buffer(b: *libc::c_void) {
|
||||
self.header.buffer = b;
|
||||
}
|
||||
@ -475,7 +474,7 @@ fn try_recv<T: send, Tbuffer: send>(-p: recv_packet_buffered<T, Tbuffer>)
|
||||
}
|
||||
}
|
||||
|
||||
impl peek<T: send, Tb: send> for recv_packet_buffered<T, Tb> {
|
||||
impl<T: send, Tb: send> recv_packet_buffered<T, Tb> {
|
||||
pure fn peek() -> bool {
|
||||
peek(self)
|
||||
}
|
||||
@ -637,7 +636,7 @@ trait selectable {
|
||||
pure fn header() -> *packet_header;
|
||||
}
|
||||
|
||||
impl of selectable for *packet_header {
|
||||
impl *packet_header: selectable {
|
||||
pure fn header() -> *packet_header { self }
|
||||
}
|
||||
|
||||
@ -923,7 +922,7 @@ fn stream<T:send>() -> (chan<T>, port<T>) {
|
||||
(chan_({ mut endp: some(c) }), port_({ mut endp: some(s) }))
|
||||
}
|
||||
|
||||
impl chan<T: send> of channel<T> for chan<T> {
|
||||
impl<T: send> chan<T>: channel<T> {
|
||||
fn send(+x: T) {
|
||||
let mut endp = none;
|
||||
endp <-> self.endp;
|
||||
@ -944,7 +943,7 @@ fn try_send(+x: T) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
impl port<T: send> of recv<T> for port<T> {
|
||||
impl<T: send> port<T>: recv<T> {
|
||||
fn recv() -> T {
|
||||
let mut endp = none;
|
||||
endp <-> self.endp;
|
||||
@ -1037,7 +1036,7 @@ fn recv() -> T {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: send> of selectable for port<T> {
|
||||
impl<T: send> port<T>: selectable {
|
||||
pure fn header() -> *packet_header unchecked {
|
||||
match self.endp {
|
||||
some(endp) => endp.header(),
|
||||
@ -1049,7 +1048,7 @@ impl<T: send> of selectable for port<T> {
|
||||
/// A channel that can be shared between many senders.
|
||||
type shared_chan<T: send> = arc::exclusive<chan<T>>;
|
||||
|
||||
impl chan<T: send> of channel<T> for shared_chan<T> {
|
||||
impl<T: send> shared_chan<T>: channel<T> {
|
||||
fn send(+x: T) {
|
||||
let mut xx = some(x);
|
||||
do self.with |chan| {
|
||||
@ -1083,7 +1082,7 @@ trait select2<T: send, U: send> {
|
||||
}
|
||||
|
||||
impl<T: send, U: send, Left: selectable recv<T>, Right: selectable recv<U>>
|
||||
of select2<T, U> for (Left, Right) {
|
||||
(Left, Right): select2<T, U> {
|
||||
|
||||
fn select() -> either<T, U> {
|
||||
match self {
|
||||
|
@ -156,7 +156,7 @@ trait ptr {
|
||||
}
|
||||
|
||||
/// Extension methods for pointers
|
||||
impl extensions<T> of ptr for *T {
|
||||
impl<T> *T: ptr {
|
||||
/// Returns true if the pointer is equal to the null pointer.
|
||||
pure fn is_null() -> bool { is_null(self) }
|
||||
|
||||
|
@ -24,7 +24,7 @@ trait rng {
|
||||
type weighted<T> = { weight: uint, item: T };
|
||||
|
||||
/// Extension methods for random number generators
|
||||
impl extensions for rng {
|
||||
impl rng {
|
||||
|
||||
/// Return a random int
|
||||
fn gen_int() -> int {
|
||||
@ -248,7 +248,7 @@ fn shuffle_mut<T>(&&values: ~[mut T]) {
|
||||
drop { rustrt::rand_free(self.c); }
|
||||
}
|
||||
|
||||
impl of rng for @rand_res {
|
||||
impl @rand_res: rng {
|
||||
fn next() -> u32 { return rustrt::rand_next((*self).c); }
|
||||
}
|
||||
|
||||
@ -279,7 +279,7 @@ fn seeded_rng(seed: ~[u8]) -> rng {
|
||||
mut w: u32
|
||||
};
|
||||
|
||||
impl of rng for xorshift_state {
|
||||
impl xorshift_state: rng {
|
||||
fn next() -> u32 {
|
||||
let x = self.x;
|
||||
let mut t = x ^ (x << 11);
|
||||
|
@ -180,7 +180,7 @@ fn map_err<T: copy, E, F: copy>(res: result<T, E>, op: fn(E) -> F)
|
||||
}
|
||||
}
|
||||
|
||||
impl extensions<T, E> for result<T, E> {
|
||||
impl<T, E> result<T, E> {
|
||||
fn is_ok() -> bool { is_ok(self) }
|
||||
|
||||
fn is_err() -> bool { is_err(self) }
|
||||
@ -200,7 +200,7 @@ fn iter_err(f: fn(E)) {
|
||||
}
|
||||
}
|
||||
|
||||
impl extensions<T:copy, E> for result<T, E> {
|
||||
impl<T: copy, E> result<T, E> {
|
||||
fn get() -> T { get(self) }
|
||||
|
||||
fn map_err<F:copy>(op: fn(E) -> F) -> result<T,F> {
|
||||
@ -211,7 +211,7 @@ fn map_err<F:copy>(op: fn(E) -> F) -> result<T,F> {
|
||||
}
|
||||
}
|
||||
|
||||
impl extensions<T, E:copy> for result<T, E> {
|
||||
impl<T, E: copy> result<T, E> {
|
||||
fn get_err() -> E { get_err(self) }
|
||||
|
||||
fn map<U:copy>(op: fn(T) -> U) -> result<U,E> {
|
||||
@ -222,7 +222,7 @@ fn map<U:copy>(op: fn(T) -> U) -> result<U,E> {
|
||||
}
|
||||
}
|
||||
|
||||
impl extensions<T:copy, E:copy> for result<T,E> {
|
||||
impl<T: copy, E: copy> result<T, E> {
|
||||
fn chain<U:copy>(op: fn(T) -> result<U,E>) -> result<U,E> {
|
||||
chain(self, op)
|
||||
}
|
||||
|
@ -1,7 +1,6 @@
|
||||
//! Process spawning
|
||||
import option::{some, none};
|
||||
import libc::{pid_t, c_void, c_int};
|
||||
import io::reader_util;
|
||||
|
||||
export program;
|
||||
export run_program;
|
||||
@ -228,7 +227,7 @@ fn destroy_repr(r: prog_repr) {
|
||||
drop { destroy_repr(self.r); }
|
||||
}
|
||||
|
||||
impl of program for prog_res {
|
||||
impl prog_res: program {
|
||||
fn get_id() -> pid_t { return self.r.pid; }
|
||||
fn input() -> io::writer { io::fd_writer(self.r.in_fd, false) }
|
||||
fn output() -> io::reader { io::FILE_reader(self.r.out_file, false) }
|
||||
|
@ -64,7 +64,7 @@ unsafe fn borrow<K>(&&k: K) -> &K {
|
||||
unsafe::reinterpret_cast(p)
|
||||
}
|
||||
|
||||
impl private_methods<K,V> for &const linear_map<K,V> {
|
||||
priv impl<K, V> &const linear_map<K,V> {
|
||||
#[inline(always)]
|
||||
pure fn to_bucket(h: uint) -> uint {
|
||||
// FIXME(#3041) borrow a more sophisticated technique here from
|
||||
@ -126,7 +126,7 @@ impl private_methods<K,V> for &const linear_map<K,V> {
|
||||
}
|
||||
}
|
||||
|
||||
impl private_methods<K,V> for &mut linear_map<K,V> {
|
||||
priv impl<K,V> &mut linear_map<K,V> {
|
||||
/// Expands the capacity of the array and re-inserts each
|
||||
/// of the existing buckets.
|
||||
fn expand() {
|
||||
@ -175,7 +175,7 @@ fn insert_internal(hash: uint, +k: K, +v: V) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
impl public_methods<K,V> for &mut linear_map<K,V> {
|
||||
impl<K,V> &mut linear_map<K,V> {
|
||||
fn insert(+k: K, +v: V) -> bool {
|
||||
if self.size >= self.resize_at {
|
||||
// n.b.: We could also do this after searching, so
|
||||
@ -230,13 +230,13 @@ fn remove(k: &K) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
impl private_methods<K,V> for &linear_map<K,V> {
|
||||
priv impl<K,V> &linear_map<K,V> {
|
||||
fn search(hash: uint, op: fn(x: &option<bucket<K,V>>) -> bool) {
|
||||
let _ = self.bucket_sequence(hash, |i| op(&self.buckets[i]));
|
||||
}
|
||||
}
|
||||
|
||||
impl public_methods<K,V> for &const linear_map<K,V> {
|
||||
impl<K,V> &const linear_map<K,V> {
|
||||
fn len() -> uint {
|
||||
self.size
|
||||
}
|
||||
@ -253,7 +253,7 @@ fn contains_key(k: &K) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
impl public_methods<K,V: copy> for &const linear_map<K,V> {
|
||||
impl<K,V: copy> &const linear_map<K,V> {
|
||||
fn find(k: &K) -> option<V> {
|
||||
match self.bucket_for_key(self.buckets, k) {
|
||||
found_entry(idx) => {
|
||||
@ -280,7 +280,7 @@ fn [](k: &K) -> V {
|
||||
}
|
||||
}
|
||||
|
||||
impl imm_methods<K,V> for &linear_map<K,V> {
|
||||
impl<K,V> &linear_map<K,V> {
|
||||
/*
|
||||
FIXME --- #2979 must be fixed to typecheck this
|
||||
fn find_ptr(k: K) -> option<&V> {
|
||||
@ -309,17 +309,17 @@ fn each_value_ref(blk: fn(v: &V) -> bool) {
|
||||
}
|
||||
}
|
||||
|
||||
impl public_methods<K: copy, V: copy> for &linear_map<K,V> {
|
||||
impl<K: copy, V: copy> &linear_map<K,V> {
|
||||
fn each(blk: fn(+K,+V) -> bool) {
|
||||
self.each_ref(|k,v| blk(copy *k, copy *v));
|
||||
}
|
||||
}
|
||||
impl public_methods<K: copy, V> for &linear_map<K,V> {
|
||||
impl<K: copy, V> &linear_map<K,V> {
|
||||
fn each_key(blk: fn(+K) -> bool) {
|
||||
self.each_key_ref(|k| blk(copy *k));
|
||||
}
|
||||
}
|
||||
impl public_methods<K, V: copy> for &linear_map<K,V> {
|
||||
impl<K, V: copy> &linear_map<K,V> {
|
||||
fn each_value(blk: fn(+V) -> bool) {
|
||||
self.each_value_ref(|v| blk(copy *v));
|
||||
}
|
||||
|
@ -1901,7 +1901,7 @@ trait unique_str {
|
||||
}
|
||||
|
||||
/// Extension methods for strings
|
||||
impl extensions of unique_str for ~str {
|
||||
impl ~str: unique_str {
|
||||
/// Returns a string with leading and trailing whitespace removed
|
||||
#[inline]
|
||||
fn trim() -> ~str { trim(self) }
|
||||
@ -1920,7 +1920,7 @@ fn trim_right() -> ~str { trim_right(self) }
|
||||
}
|
||||
|
||||
#[cfg(notest)]
|
||||
impl extensions of add<&str,~str> for ~str {
|
||||
impl ~str: add<&str,~str> {
|
||||
#[inline(always)]
|
||||
pure fn add(rhs: &str) -> ~str {
|
||||
append(self, rhs)
|
||||
@ -1956,7 +1956,7 @@ trait str_slice {
|
||||
}
|
||||
|
||||
/// Extension methods for strings
|
||||
impl extensions/& of str_slice for &str {
|
||||
impl &str: str_slice {
|
||||
/**
|
||||
* Return true if a predicate matches all characters or if the string
|
||||
* contains no characters
|
||||
|
@ -32,7 +32,7 @@ enum sem<Q: send> = exclusive<{
|
||||
blocked: Q,
|
||||
}>;
|
||||
|
||||
impl sem<Q: send> for &sem<Q> {
|
||||
impl<Q: send> &sem<Q> {
|
||||
fn acquire() {
|
||||
let mut waiter_nobe = none;
|
||||
unsafe {
|
||||
@ -72,14 +72,14 @@ fn release() {
|
||||
}
|
||||
}
|
||||
// FIXME(#3154) move both copies of this into sem<Q>, and unify the 2 structs
|
||||
impl sem_access for &sem<()> {
|
||||
impl &sem<()> {
|
||||
fn access<U>(blk: fn() -> U) -> U {
|
||||
self.acquire();
|
||||
let _x = sem_release(self);
|
||||
blk()
|
||||
}
|
||||
}
|
||||
impl sem_access for &sem<waitqueue> {
|
||||
impl &sem<waitqueue> {
|
||||
fn access<U>(blk: fn() -> U) -> U {
|
||||
self.acquire();
|
||||
let _x = sem_and_signal_release(self);
|
||||
@ -102,7 +102,7 @@ struct sem_and_signal_release {
|
||||
/// A mechanism for atomic-unlock-and-deschedule blocking and signalling.
|
||||
enum condvar = &sem<waitqueue>;
|
||||
|
||||
impl condvar for condvar {
|
||||
impl condvar {
|
||||
/// Atomically drop the associated lock, and block until a signal is sent.
|
||||
fn wait() {
|
||||
let (signal_end, wait_end) = pipes::stream();
|
||||
@ -158,7 +158,7 @@ fn broadcast() -> uint {
|
||||
}
|
||||
}
|
||||
|
||||
impl sem_and_signal for &sem<waitqueue> {
|
||||
impl &sem<waitqueue> {
|
||||
fn access_cond<U>(blk: fn(condvar) -> U) -> U {
|
||||
do self.access { blk(condvar(self)) }
|
||||
}
|
||||
@ -179,7 +179,7 @@ fn new_semaphore(count: int) -> semaphore {
|
||||
blocked: () })))
|
||||
}
|
||||
|
||||
impl semaphore for &semaphore {
|
||||
impl &semaphore {
|
||||
/// Create a new handle to the semaphore.
|
||||
fn clone() -> semaphore { semaphore(sem((***self).clone())) }
|
||||
|
||||
@ -218,7 +218,7 @@ fn new_mutex() -> mutex {
|
||||
blocked: { head: block_head, tail: block_tail } })))
|
||||
}
|
||||
|
||||
impl mutex for &mutex {
|
||||
impl &mutex {
|
||||
/// Create a new handle to the mutex.
|
||||
fn clone() -> mutex { mutex(sem((***self).clone())) }
|
||||
|
||||
|
@ -106,7 +106,7 @@ enum type_desc = {
|
||||
drop { rustrt::rust_destroy_little_lock(self.l); }
|
||||
}
|
||||
|
||||
impl methods for little_lock {
|
||||
impl little_lock {
|
||||
unsafe fn lock<T>(f: fn() -> T) -> T {
|
||||
class unlock {
|
||||
let l: rust_little_lock;
|
||||
|
@ -24,9 +24,6 @@
|
||||
*/
|
||||
|
||||
import result::result;
|
||||
import dvec::extensions;
|
||||
import dvec_iter::extensions;
|
||||
import arc::methods;
|
||||
|
||||
export task;
|
||||
export task_result;
|
||||
@ -209,7 +206,7 @@ fn task() -> task_builder {
|
||||
})
|
||||
}
|
||||
|
||||
impl private_methods for task_builder {
|
||||
priv impl task_builder {
|
||||
fn consume() -> task_builder {
|
||||
if self.consumed {
|
||||
fail ~"Cannot copy a task_builder"; // Fake move mode on self
|
||||
@ -219,7 +216,7 @@ fn consume() -> task_builder {
|
||||
}
|
||||
}
|
||||
|
||||
impl task_builder for task_builder {
|
||||
impl task_builder {
|
||||
/**
|
||||
* Decouple the child task's failure from the parent's. If either fails,
|
||||
* the other will not be killed.
|
||||
@ -1246,7 +1243,7 @@ fn new_task_in_new_sched(opts: sched_opts) -> *rust_task {
|
||||
type local_data_key<T: owned> = fn@(+@T);
|
||||
|
||||
trait local_data { }
|
||||
impl<T: owned> of local_data for @T { }
|
||||
impl<T: owned> @T: local_data { }
|
||||
|
||||
// We use dvec because it's the best data structure in core. If TLS is used
|
||||
// heavily in future, this could be made more efficient with a proper map.
|
||||
@ -1955,7 +1952,6 @@ fn test_platform_thread() {
|
||||
#[ignore(cfg(windows))]
|
||||
#[should_fail]
|
||||
fn test_unkillable() {
|
||||
import comm::methods;
|
||||
let po = comm::port();
|
||||
let ch = po.chan();
|
||||
|
||||
@ -1992,7 +1988,6 @@ fn test_unkillable() {
|
||||
#[ignore(cfg(windows))]
|
||||
#[should_fail]
|
||||
fn test_unkillable_nested() {
|
||||
import comm::methods;
|
||||
let po = comm::port();
|
||||
let ch = po.chan();
|
||||
|
||||
|
@ -2,18 +2,18 @@ trait to_bytes {
|
||||
fn to_bytes() -> ~[u8];
|
||||
}
|
||||
|
||||
impl of to_bytes for ~[u8] {
|
||||
impl ~[u8]: to_bytes {
|
||||
fn to_bytes() -> ~[u8] { copy self }
|
||||
}
|
||||
|
||||
impl of to_bytes for @~[u8] {
|
||||
impl @~[u8]: to_bytes {
|
||||
fn to_bytes() -> ~[u8] { copy *self }
|
||||
}
|
||||
|
||||
impl of to_bytes for ~str {
|
||||
impl ~str: to_bytes {
|
||||
fn to_bytes() -> ~[u8] { str::bytes(self) }
|
||||
}
|
||||
|
||||
impl of to_bytes for @(~str) {
|
||||
impl @(~str): to_bytes {
|
||||
fn to_bytes() -> ~[u8] { str::bytes(*self) }
|
||||
}
|
||||
|
@ -1,62 +1,62 @@
|
||||
trait to_str { fn to_str() -> ~str; }
|
||||
|
||||
impl of to_str for int {
|
||||
impl int: to_str {
|
||||
fn to_str() -> ~str { int::str(self) }
|
||||
}
|
||||
impl of to_str for i8 {
|
||||
impl i8: to_str {
|
||||
fn to_str() -> ~str { i8::str(self) }
|
||||
}
|
||||
impl of to_str for i16 {
|
||||
impl i16: to_str {
|
||||
fn to_str() -> ~str { i16::str(self) }
|
||||
}
|
||||
impl of to_str for i32 {
|
||||
impl i32: to_str {
|
||||
fn to_str() -> ~str { i32::str(self) }
|
||||
}
|
||||
impl of to_str for i64 {
|
||||
impl i64: to_str {
|
||||
fn to_str() -> ~str { i64::str(self) }
|
||||
}
|
||||
impl of to_str for uint {
|
||||
impl uint: to_str {
|
||||
fn to_str() -> ~str { uint::str(self) }
|
||||
}
|
||||
impl of to_str for u8 {
|
||||
impl u8: to_str {
|
||||
fn to_str() -> ~str { u8::str(self) }
|
||||
}
|
||||
impl of to_str for u16 {
|
||||
impl u16: to_str {
|
||||
fn to_str() -> ~str { u16::str(self) }
|
||||
}
|
||||
impl of to_str for u32 {
|
||||
impl u32: to_str {
|
||||
fn to_str() -> ~str { u32::str(self) }
|
||||
}
|
||||
impl of to_str for u64 {
|
||||
impl u64: to_str {
|
||||
fn to_str() -> ~str { u64::str(self) }
|
||||
}
|
||||
impl of to_str for float {
|
||||
impl float: to_str {
|
||||
fn to_str() -> ~str { float::to_str(self, 4u) }
|
||||
}
|
||||
impl of to_str for bool {
|
||||
impl bool: to_str {
|
||||
fn to_str() -> ~str { bool::to_str(self) }
|
||||
}
|
||||
impl of to_str for () {
|
||||
impl (): to_str {
|
||||
fn to_str() -> ~str { ~"()" }
|
||||
}
|
||||
impl of to_str for ~str {
|
||||
impl ~str: to_str {
|
||||
fn to_str() -> ~str { self }
|
||||
}
|
||||
|
||||
impl <A: to_str copy, B: to_str copy> of to_str for (A, B) {
|
||||
impl<A: to_str copy, B: to_str copy> (A, B): to_str {
|
||||
fn to_str() -> ~str {
|
||||
let (a, b) = self;
|
||||
~"(" + a.to_str() + ~", " + b.to_str() + ~")"
|
||||
}
|
||||
}
|
||||
impl <A: to_str copy, B: to_str copy, C: to_str copy> of to_str for (A, B, C){
|
||||
impl<A: to_str copy, B: to_str copy, C: to_str copy> (A, B, C): to_str {
|
||||
fn to_str() -> ~str {
|
||||
let (a, b, c) = self;
|
||||
~"(" + a.to_str() + ~", " + b.to_str() + ~", " + c.to_str() + ~")"
|
||||
}
|
||||
}
|
||||
|
||||
impl <A: to_str> of to_str for ~[A] {
|
||||
impl<A: to_str> ~[A]: to_str {
|
||||
fn to_str() -> ~str {
|
||||
let mut acc = ~"[", first = true;
|
||||
for vec::each(self) |elt| {
|
||||
@ -69,10 +69,10 @@ fn to_str() -> ~str {
|
||||
}
|
||||
}
|
||||
|
||||
impl <A: to_str> of to_str for @A {
|
||||
impl<A: to_str> @A: to_str {
|
||||
fn to_str() -> ~str { ~"@" + (*self).to_str() }
|
||||
}
|
||||
impl <A: to_str> of to_str for ~A {
|
||||
impl<A: to_str> ~A: to_str {
|
||||
fn to_str() -> ~str { ~"~" + (*self).to_str() }
|
||||
}
|
||||
|
||||
|
@ -6,7 +6,7 @@ trait tuple_ops<T,U> {
|
||||
pure fn swap() -> (U, T);
|
||||
}
|
||||
|
||||
impl extensions <T:copy, U:copy> of tuple_ops<T,U> for (T, U) {
|
||||
impl<T: copy, U: copy> (T, U): tuple_ops<T,U> {
|
||||
|
||||
/// Return the first element of self
|
||||
pure fn first() -> T {
|
||||
@ -33,8 +33,7 @@ trait extended_tuple_ops<A,B> {
|
||||
fn map<C>(f: fn(A, B) -> C) -> ~[C];
|
||||
}
|
||||
|
||||
impl extensions<A: copy, B: copy> of extended_tuple_ops<A,B>
|
||||
for (&[A], &[B]) {
|
||||
impl<A: copy, B: copy> (&[A], &[B]): extended_tuple_ops<A,B> {
|
||||
|
||||
fn zip() -> ~[(A, B)] {
|
||||
let (a, b) = self;
|
||||
@ -47,8 +46,7 @@ fn map<C>(f: fn(A, B) -> C) -> ~[C] {
|
||||
}
|
||||
}
|
||||
|
||||
impl extensions<A: copy, B: copy> of extended_tuple_ops<A,B>
|
||||
for (~[A], ~[B]) {
|
||||
impl<A: copy, B: copy> (~[A], ~[B]): extended_tuple_ops<A,B> {
|
||||
|
||||
fn zip() -> ~[(A, B)] {
|
||||
let (a, b) = self;
|
||||
|
@ -56,19 +56,19 @@
|
||||
max_value ^ i
|
||||
}
|
||||
|
||||
impl ord of ord for T {
|
||||
impl T: ord {
|
||||
pure fn lt(&&other: T) -> bool {
|
||||
return self < other;
|
||||
}
|
||||
}
|
||||
|
||||
impl eq of eq for T {
|
||||
impl T: eq {
|
||||
pure fn eq(&&other: T) -> bool {
|
||||
return self == other;
|
||||
}
|
||||
}
|
||||
|
||||
impl num of num::num for T {
|
||||
impl T: num::num {
|
||||
pure fn add(&&other: T) -> T { return self + other; }
|
||||
pure fn sub(&&other: T) -> T { return self - other; }
|
||||
pure fn mul(&&other: T) -> T { return self * other; }
|
||||
@ -80,7 +80,7 @@ impl num of num::num for T {
|
||||
pure fn from_int(n: int) -> T { return n as T; }
|
||||
}
|
||||
|
||||
impl times of iter::times for T {
|
||||
impl T: iter::times {
|
||||
#[inline(always)]
|
||||
#[doc = "A convenience form for basic iteration. Given a variable `x` \
|
||||
of any numeric type, the expression `for x.times { /* anything */ }` \
|
||||
@ -96,7 +96,7 @@ fn times(it: fn() -> bool) {
|
||||
}
|
||||
}
|
||||
|
||||
impl timesi of iter::timesi for T {
|
||||
impl T: iter::timesi {
|
||||
#[inline(always)]
|
||||
/// Like `times`, but with an index, `eachi`-style.
|
||||
fn timesi(it: fn(uint) -> bool) {
|
||||
|
@ -1287,7 +1287,7 @@ trait vec_concat<T> {
|
||||
pure fn +(rhs: &[const T]) -> self;
|
||||
}
|
||||
|
||||
impl extensions<T: copy> of vec_concat<T> for ~[T] {
|
||||
impl<T: copy> ~[T]: vec_concat<T> {
|
||||
#[inline(always)]
|
||||
pure fn +(rhs: &[const T]) -> ~[T] {
|
||||
append(self, rhs)
|
||||
@ -1295,21 +1295,21 @@ impl extensions<T: copy> of vec_concat<T> for ~[T] {
|
||||
}
|
||||
|
||||
#[cfg(notest)]
|
||||
impl extensions<T: copy> of add<&[const T],~[T]> for ~[T] {
|
||||
impl<T: copy> ~[T]: add<&[const T],~[T]> {
|
||||
#[inline(always)]
|
||||
pure fn add(rhs: &[const T]) -> ~[T] {
|
||||
append(self, rhs)
|
||||
}
|
||||
}
|
||||
|
||||
impl extensions<T: copy> of vec_concat<T> for ~[mut T] {
|
||||
impl<T: copy> ~[mut T]: vec_concat<T> {
|
||||
#[inline(always)]
|
||||
pure fn +(rhs: &[const T]) -> ~[mut T] {
|
||||
append_mut(self, rhs)
|
||||
}
|
||||
}
|
||||
|
||||
impl extensions<T: copy> of add<&[const T],~[mut T]> for ~[mut T] {
|
||||
impl<T: copy> ~[mut T]: add<&[const T],~[mut T]> {
|
||||
#[inline(always)]
|
||||
pure fn add(rhs: &[const T]) -> ~[mut T] {
|
||||
append_mut(self, rhs)
|
||||
@ -1323,7 +1323,7 @@ trait const_vector {
|
||||
}
|
||||
|
||||
/// Extension methods for vectors
|
||||
impl extensions/&<T> of const_vector for &[const T] {
|
||||
impl<T> &[const T]: const_vector {
|
||||
/// Returns true if a vector contains no elements
|
||||
#[inline]
|
||||
pure fn is_empty() -> bool { is_empty(self) }
|
||||
@ -1344,7 +1344,7 @@ trait copyable_vector<T> {
|
||||
}
|
||||
|
||||
/// Extension methods for vectors
|
||||
impl extensions/&<T: copy> of copyable_vector<T> for &[const T] {
|
||||
impl<T: copy> &[const T]: copyable_vector<T> {
|
||||
/// Returns the first element of a vector
|
||||
#[inline]
|
||||
pure fn head() -> T { head(self) }
|
||||
@ -1381,7 +1381,7 @@ trait immutable_vector<T> {
|
||||
}
|
||||
|
||||
/// Extension methods for vectors
|
||||
impl extensions/&<T> of immutable_vector<T> for &[T] {
|
||||
impl<T> &[T]: immutable_vector<T> {
|
||||
/// Reduce a vector from right to left
|
||||
#[inline]
|
||||
pure fn foldr<U: copy>(z: U, p: fn(T, U) -> U) -> U { foldr(self, z, p) }
|
||||
@ -1496,7 +1496,7 @@ trait immutable_copyable_vector<T> {
|
||||
}
|
||||
|
||||
/// Extension methods for vectors
|
||||
impl extensions/&<T: copy> of immutable_copyable_vector<T> for &[T] {
|
||||
impl<T: copy> &[T]: immutable_copyable_vector<T> {
|
||||
/**
|
||||
* Construct a new vector from the elements of a vector for which some
|
||||
* predicate holds.
|
||||
@ -1741,12 +1741,12 @@ fn memmove(dst: &[mut u8], src: &[const u8], count: uint) {
|
||||
// This cannot be used with iter-trait.rs because of the region pointer
|
||||
// required in the slice.
|
||||
|
||||
impl extensions/&<A> of iter::base_iter<A> for &[A] {
|
||||
impl<A> &[A]: iter::base_iter<A> {
|
||||
fn each(blk: fn(A) -> bool) { each(self, blk) }
|
||||
fn size_hint() -> option<uint> { some(len(self)) }
|
||||
}
|
||||
|
||||
impl extensions/&<A> of iter::extended_iter<A> for &[A] {
|
||||
impl<A> &[A]: iter::extended_iter<A> {
|
||||
fn eachi(blk: fn(uint, A) -> bool) { iter::eachi(self, blk) }
|
||||
fn all(blk: fn(A) -> bool) -> bool { iter::all(self, blk) }
|
||||
fn any(blk: fn(A) -> bool) -> bool { iter::any(self, blk) }
|
||||
@ -1766,7 +1766,7 @@ trait iter_trait_extensions<A> {
|
||||
fn max() -> A;
|
||||
}
|
||||
|
||||
impl extensions/&<A:copy> of iter_trait_extensions<A> for &[A] {
|
||||
impl<A: copy> &[A]: iter_trait_extensions<A> {
|
||||
fn filter_to_vec(pred: fn(A) -> bool) -> ~[A] {
|
||||
iter::filter_to_vec(self, pred)
|
||||
}
|
||||
|
@ -1,10 +1,10 @@
|
||||
import io::{reader, reader_util};
|
||||
import io::reader;
|
||||
|
||||
trait to_base64 {
|
||||
fn to_base64() -> ~str;
|
||||
}
|
||||
|
||||
impl of to_base64 for ~[u8] {
|
||||
impl ~[u8]: to_base64 {
|
||||
fn to_base64() -> ~str {
|
||||
let chars = str::chars(
|
||||
~"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
|
||||
@ -52,7 +52,7 @@ fn to_base64() -> ~str {
|
||||
}
|
||||
}
|
||||
|
||||
impl of to_base64 for ~str {
|
||||
impl ~str: to_base64 {
|
||||
fn to_base64() -> ~str {
|
||||
str::bytes(self).to_base64()
|
||||
}
|
||||
@ -62,7 +62,7 @@ trait from_base64 {
|
||||
fn from_base64() -> ~[u8];
|
||||
}
|
||||
|
||||
impl of from_base64 for ~[u8] {
|
||||
impl ~[u8]: from_base64 {
|
||||
fn from_base64() -> ~[u8] {
|
||||
if self.len() % 4u != 0u { fail ~"invalid base64 length"; }
|
||||
|
||||
@ -124,7 +124,7 @@ fn from_base64() -> ~[u8] {
|
||||
}
|
||||
}
|
||||
|
||||
impl of from_base64 for ~str {
|
||||
impl ~str: from_base64 {
|
||||
fn from_base64() -> ~[u8] {
|
||||
str::bytes(self).from_base64()
|
||||
}
|
||||
|
@ -421,7 +421,7 @@ fn ones(f: fn(uint) -> bool) {
|
||||
|
||||
pure fn right(_w0: uint, w1: uint) -> uint { return w1; }
|
||||
|
||||
impl extensions of ops::index<uint,bool> for bitv {
|
||||
impl bitv: ops::index<uint,bool> {
|
||||
pure fn index(&&i: uint) -> bool {
|
||||
self.get(i)
|
||||
}
|
||||
|
@ -6,19 +6,19 @@ trait fuzzy_eq {
|
||||
pure fn fuzzy_eq(&&other: self) -> bool;
|
||||
}
|
||||
|
||||
impl fuzzy_eq of fuzzy_eq for float {
|
||||
impl float: fuzzy_eq {
|
||||
pure fn fuzzy_eq(&&other: float) -> bool {
|
||||
return float::abs(self - other) < fuzzy_epsilon;
|
||||
}
|
||||
}
|
||||
|
||||
impl fuzzy_eq of fuzzy_eq for f32 {
|
||||
impl f32: fuzzy_eq {
|
||||
pure fn fuzzy_eq(&&other: f32) -> bool {
|
||||
return f32::abs(self - other) < (fuzzy_epsilon as f32);
|
||||
}
|
||||
}
|
||||
|
||||
impl fuzzy_eq of fuzzy_eq for f64 {
|
||||
impl f64: fuzzy_eq {
|
||||
pure fn fuzzy_eq(&&other: f64) -> bool {
|
||||
return f64::abs(self - other) < (fuzzy_epsilon as f64);
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
//! A deque. Untested as of yet. Likely buggy
|
||||
|
||||
import option::{some, none};
|
||||
import dvec::{dvec, extensions};
|
||||
import dvec::dvec;
|
||||
|
||||
trait t<T> {
|
||||
fn size() -> uint;
|
||||
@ -49,7 +49,7 @@ fn get<T: copy>(elts: dvec<cell<T>>, i: uint) -> T {
|
||||
mut hi: uint,
|
||||
elts: dvec<cell<T>>};
|
||||
|
||||
impl <T: copy> of t<T> for repr<T> {
|
||||
impl <T: copy> repr<T>: t<T> {
|
||||
fn size() -> uint { return self.nelts; }
|
||||
fn add_front(t: T) {
|
||||
let oldlo: uint = self.lo;
|
||||
|
@ -46,13 +46,13 @@ trait get_doc {
|
||||
fn [](tag: uint) -> doc;
|
||||
}
|
||||
|
||||
impl extensions of get_doc for doc {
|
||||
impl doc: get_doc {
|
||||
fn [](tag: uint) -> doc {
|
||||
get_doc(self, tag)
|
||||
}
|
||||
}
|
||||
|
||||
impl extensions of ops::index<uint,doc> for doc {
|
||||
impl doc: ops::index<uint,doc> {
|
||||
pure fn index(&&tag: uint) -> doc {
|
||||
unchecked {
|
||||
get_doc(self, tag)
|
||||
@ -214,7 +214,7 @@ fn writer(w: io::writer) -> writer {
|
||||
}
|
||||
|
||||
// FIXME (#2741): Provide a function to write the standard ebml header.
|
||||
impl writer for writer {
|
||||
impl writer {
|
||||
fn start_tag(tag_id: uint) {
|
||||
debug!{"Start tag %u", tag_id};
|
||||
|
||||
@ -339,7 +339,7 @@ trait serializer_priv {
|
||||
fn _emit_label(label: ~str);
|
||||
}
|
||||
|
||||
impl serializer of serializer_priv for ebml::writer {
|
||||
impl ebml::writer: serializer_priv {
|
||||
// used internally to emit things like the vector length and so on
|
||||
fn _emit_tagged_uint(t: ebml_serializer_tag, v: uint) {
|
||||
assert v <= 0xFFFF_FFFF_u;
|
||||
@ -357,7 +357,7 @@ fn _emit_label(label: ~str) {
|
||||
}
|
||||
}
|
||||
|
||||
impl serializer of serialization::serializer for ebml::writer {
|
||||
impl ebml::writer: serialization::serializer {
|
||||
fn emit_nil() {}
|
||||
|
||||
fn emit_uint(v: uint) { self.wr_tagged_u64(es_uint as uint, v as u64); }
|
||||
@ -424,7 +424,7 @@ fn ebml_deserializer(d: ebml::doc) -> ebml_deserializer {
|
||||
ebml_deserializer_({mut parent: d, mut pos: d.start})
|
||||
}
|
||||
|
||||
impl deserializer_priv for ebml_deserializer {
|
||||
priv impl ebml_deserializer {
|
||||
fn _check_label(lbl: ~str) {
|
||||
if self.pos < self.parent.end {
|
||||
let {tag: r_tag, doc: r_doc} =
|
||||
@ -479,7 +479,7 @@ fn _next_uint(exp_tag: ebml_serializer_tag) -> uint {
|
||||
}
|
||||
}
|
||||
|
||||
impl deserializer of serialization::deserializer for ebml_deserializer {
|
||||
impl ebml_deserializer: serialization::deserializer {
|
||||
fn read_nil() -> () { () }
|
||||
|
||||
fn read_u64() -> u64 { ebml::doc_as_u64(self.next_doc(es_u64)) }
|
||||
|
@ -5,11 +5,10 @@
|
||||
|
||||
import result::{result, ok, err};
|
||||
import io;
|
||||
import io::{reader_util, writer_util};
|
||||
import io::writer_util;
|
||||
import map;
|
||||
import map::hashmap;
|
||||
import map::map;
|
||||
import core::vec::extensions;
|
||||
|
||||
export json;
|
||||
export error;
|
||||
@ -120,7 +119,7 @@ enum parser {
|
||||
parser_(parser_)
|
||||
}
|
||||
|
||||
impl parser for parser {
|
||||
impl parser {
|
||||
fn eof() -> bool { self.ch == -1 as char }
|
||||
|
||||
fn bump() {
|
||||
@ -503,83 +502,83 @@ fn eq(value0: json, value1: json) -> bool {
|
||||
|
||||
trait to_json { fn to_json() -> json; }
|
||||
|
||||
impl of to_json for json {
|
||||
impl json: to_json {
|
||||
fn to_json() -> json { self }
|
||||
}
|
||||
|
||||
impl of to_json for @json {
|
||||
impl @json: to_json {
|
||||
fn to_json() -> json { *self }
|
||||
}
|
||||
|
||||
impl of to_json for int {
|
||||
impl int: to_json {
|
||||
fn to_json() -> json { num(self as float) }
|
||||
}
|
||||
|
||||
impl of to_json for i8 {
|
||||
impl i8: to_json {
|
||||
fn to_json() -> json { num(self as float) }
|
||||
}
|
||||
|
||||
impl of to_json for i16 {
|
||||
impl i16: to_json {
|
||||
fn to_json() -> json { num(self as float) }
|
||||
}
|
||||
|
||||
impl of to_json for i32 {
|
||||
impl i32: to_json {
|
||||
fn to_json() -> json { num(self as float) }
|
||||
}
|
||||
|
||||
impl of to_json for i64 {
|
||||
impl i64: to_json {
|
||||
fn to_json() -> json { num(self as float) }
|
||||
}
|
||||
|
||||
impl of to_json for uint {
|
||||
impl uint: to_json {
|
||||
fn to_json() -> json { num(self as float) }
|
||||
}
|
||||
|
||||
impl of to_json for u8 {
|
||||
impl u8: to_json {
|
||||
fn to_json() -> json { num(self as float) }
|
||||
}
|
||||
|
||||
impl of to_json for u16 {
|
||||
impl u16: to_json {
|
||||
fn to_json() -> json { num(self as float) }
|
||||
}
|
||||
|
||||
impl of to_json for u32 {
|
||||
impl u32: to_json {
|
||||
fn to_json() -> json { num(self as float) }
|
||||
}
|
||||
|
||||
impl of to_json for u64 {
|
||||
impl u64: to_json {
|
||||
fn to_json() -> json { num(self as float) }
|
||||
}
|
||||
|
||||
impl of to_json for float {
|
||||
impl float: to_json {
|
||||
fn to_json() -> json { num(self) }
|
||||
}
|
||||
|
||||
impl of to_json for f32 {
|
||||
impl f32: to_json {
|
||||
fn to_json() -> json { num(self as float) }
|
||||
}
|
||||
|
||||
impl of to_json for f64 {
|
||||
impl f64: to_json {
|
||||
fn to_json() -> json { num(self as float) }
|
||||
}
|
||||
|
||||
impl of to_json for () {
|
||||
impl (): to_json {
|
||||
fn to_json() -> json { null }
|
||||
}
|
||||
|
||||
impl of to_json for bool {
|
||||
impl bool: to_json {
|
||||
fn to_json() -> json { boolean(self) }
|
||||
}
|
||||
|
||||
impl of to_json for ~str {
|
||||
impl ~str: to_json {
|
||||
fn to_json() -> json { string(@copy self) }
|
||||
}
|
||||
|
||||
impl of to_json for @~str {
|
||||
impl @~str: to_json {
|
||||
fn to_json() -> json { string(self) }
|
||||
}
|
||||
|
||||
impl <A: to_json, B: to_json> of to_json for (A, B) {
|
||||
impl <A: to_json, B: to_json> (A, B): to_json {
|
||||
fn to_json() -> json {
|
||||
match self {
|
||||
(a, b) => {
|
||||
@ -589,8 +588,8 @@ fn to_json() -> json {
|
||||
}
|
||||
}
|
||||
|
||||
impl <A: to_json, B: to_json, C: to_json>
|
||||
of to_json for (A, B, C) {
|
||||
impl <A: to_json, B: to_json, C: to_json> (A, B, C): to_json {
|
||||
|
||||
fn to_json() -> json {
|
||||
match self {
|
||||
(a, b, c) => {
|
||||
@ -600,11 +599,11 @@ fn to_json() -> json {
|
||||
}
|
||||
}
|
||||
|
||||
impl <A: to_json> of to_json for ~[A] {
|
||||
impl <A: to_json> ~[A]: to_json {
|
||||
fn to_json() -> json { list(@self.map(|elt| elt.to_json())) }
|
||||
}
|
||||
|
||||
impl <A: to_json copy> of to_json for hashmap<~str, A> {
|
||||
impl <A: to_json copy> hashmap<~str, A>: to_json {
|
||||
fn to_json() -> json {
|
||||
let d = map::str_hash();
|
||||
for self.each() |key, value| {
|
||||
@ -614,7 +613,7 @@ fn to_json() -> json {
|
||||
}
|
||||
}
|
||||
|
||||
impl <A: to_json> of to_json for option<A> {
|
||||
impl <A: to_json> option<A>: to_json {
|
||||
fn to_json() -> json {
|
||||
match self {
|
||||
none => null,
|
||||
@ -623,11 +622,11 @@ fn to_json() -> json {
|
||||
}
|
||||
}
|
||||
|
||||
impl of to_str::to_str for json {
|
||||
impl json: to_str::to_str {
|
||||
fn to_str() -> ~str { to_str(self) }
|
||||
}
|
||||
|
||||
impl of to_str::to_str for error {
|
||||
impl error: to_str::to_str {
|
||||
fn to_str() -> ~str {
|
||||
fmt!{"%u:%u: %s", self.line, self.col, *self.msg}
|
||||
}
|
||||
|
@ -2,7 +2,6 @@
|
||||
|
||||
#[warn(deprecated_mode)];
|
||||
|
||||
import chained::hashmap;
|
||||
import io::writer_util;
|
||||
import to_str::to_str;
|
||||
export hashmap, hashfn, eqfn, set, map, chained, hashmap, str_hash;
|
||||
@ -127,7 +126,7 @@ enum search_result<K, V> {
|
||||
found_after(@entry<K,V>, @entry<K,V>)
|
||||
}
|
||||
|
||||
impl private_methods<K, V: copy> for t<K, V> {
|
||||
priv impl<K, V: copy> t<K, V> {
|
||||
fn search_rem(k: &K, h: uint, idx: uint,
|
||||
e_root: @entry<K,V>) -> search_result<K,V> {
|
||||
let mut e0 = e_root;
|
||||
@ -207,7 +206,7 @@ fn each_entry(blk: fn(@entry<K,V>) -> bool) {
|
||||
}
|
||||
}
|
||||
|
||||
impl hashmap<K: copy, V: copy> of map<K, V> for t<K, V> {
|
||||
impl<K: copy, V: copy> t<K, V>: map<K, V> {
|
||||
fn size() -> uint { self.count }
|
||||
|
||||
fn contains_key(+k: K) -> bool {
|
||||
@ -328,7 +327,7 @@ fn each_value_ref(blk: fn(value: &V) -> bool) {
|
||||
}
|
||||
}
|
||||
|
||||
impl hashmap<K: copy to_str, V: to_str copy> of to_str for t<K, V> {
|
||||
impl<K: copy to_str, V: to_str copy> t<K, V>: to_str {
|
||||
fn to_writer(wr: io::writer) {
|
||||
if self.count == 0u {
|
||||
wr.write_str(~"{}");
|
||||
@ -354,7 +353,7 @@ fn to_str() -> ~str {
|
||||
}
|
||||
}
|
||||
|
||||
impl hashmap<K: copy, V: copy> of ops::index<K, V> for t<K, V> {
|
||||
impl<K: copy, V: copy> t<K, V>: ops::index<K, V> {
|
||||
pure fn index(&&k: K) -> V {
|
||||
unchecked {
|
||||
self.get(k)
|
||||
|
@ -4,7 +4,6 @@
|
||||
import uint;
|
||||
import iotask = uv::iotask::iotask;
|
||||
import interact = uv::iotask::interact;
|
||||
import comm::methods;
|
||||
|
||||
import sockaddr_in = uv::ll::sockaddr_in;
|
||||
import sockaddr_in6 = uv::ll::sockaddr_in6;
|
||||
@ -147,7 +146,7 @@ trait as_unsafe_u32 {
|
||||
unsafe fn as_u32() -> u32;
|
||||
}
|
||||
|
||||
impl x of as_unsafe_u32 for ipv4_rep {
|
||||
impl ipv4_rep: as_unsafe_u32 {
|
||||
// this is pretty dastardly, i know
|
||||
unsafe fn as_u32() -> u32 {
|
||||
*((ptr::addr_of(self)) as *u32)
|
||||
|
@ -3,15 +3,12 @@
|
||||
import ip = net_ip;
|
||||
import uv::iotask;
|
||||
import uv::iotask::iotask;
|
||||
import comm::methods;
|
||||
import future_spawn = future::spawn;
|
||||
import future::extensions;
|
||||
// FIXME #1935
|
||||
// should be able to, but can't atm, replace w/ result::{result, extensions};
|
||||
import result::*;
|
||||
import libc::size_t;
|
||||
import str::extensions;
|
||||
import io::{reader, reader_util, writer};
|
||||
import io::{reader, writer};
|
||||
|
||||
// tcp interfaces
|
||||
export tcp_socket;
|
||||
@ -726,7 +723,7 @@ fn socket_buf(-sock: tcp_socket) -> tcp_socket_buf {
|
||||
}
|
||||
|
||||
/// Convenience methods extending `net::tcp::tcp_socket`
|
||||
impl tcp_socket for tcp_socket {
|
||||
impl tcp_socket {
|
||||
fn read_start() -> result::result<comm::port<
|
||||
result::result<~[u8], tcp_err_data>>, tcp_err_data> {
|
||||
read_start(self)
|
||||
@ -755,7 +752,7 @@ fn write_future(raw_write_data: ~[u8])
|
||||
}
|
||||
|
||||
/// Implementation of `io::reader` trait for a buffered `net::tcp::tcp_socket`
|
||||
impl tcp_socket_buf of io::reader for @tcp_socket_buf {
|
||||
impl @tcp_socket_buf: io::reader {
|
||||
fn read(buf: &[mut u8], len: uint) -> uint {
|
||||
// Loop until our buffer has enough data in it for us to read from.
|
||||
while self.data.buf.len() < len {
|
||||
@ -808,7 +805,7 @@ fn tell() -> uint {
|
||||
}
|
||||
|
||||
/// Implementation of `io::reader` trait for a buffered `net::tcp::tcp_socket`
|
||||
impl tcp_socket_buf of io::writer for @tcp_socket_buf {
|
||||
impl @tcp_socket_buf: io::writer {
|
||||
fn write(data: &[const u8]) unsafe {
|
||||
let socket_data_ptr =
|
||||
ptr::addr_of(*((*(self.data)).sock).socket_data);
|
||||
@ -1067,7 +1064,7 @@ trait to_tcp_err {
|
||||
fn to_tcp_err() -> tcp_err_data;
|
||||
}
|
||||
|
||||
impl of to_tcp_err for uv::ll::uv_err_data {
|
||||
impl uv::ll::uv_err_data: to_tcp_err {
|
||||
fn to_tcp_err() -> tcp_err_data {
|
||||
{ err_name: self.err_name, err_msg: self.err_msg }
|
||||
}
|
||||
|
@ -2,8 +2,8 @@
|
||||
|
||||
import map;
|
||||
import map::{hashmap, str_hash};
|
||||
import io::{reader, reader_util};
|
||||
import dvec::{dvec, extensions};
|
||||
import io::reader;
|
||||
import dvec::dvec;
|
||||
|
||||
export url, userinfo, query;
|
||||
export from_str, to_str;
|
||||
@ -673,7 +673,7 @@ fn to_str(url: url) -> ~str {
|
||||
fragment]);
|
||||
}
|
||||
|
||||
impl of to_str::to_str for url {
|
||||
impl url: to_str::to_str {
|
||||
fn to_str() -> ~str {
|
||||
to_str(self)
|
||||
}
|
||||
|
@ -3,8 +3,6 @@
|
||||
import comm::send;
|
||||
import comm::recv;
|
||||
import future_spawn = future::spawn;
|
||||
import future::extensions;
|
||||
import core::vec::extensions;
|
||||
|
||||
export map, mapi, alli, any, mapi_factory;
|
||||
|
||||
|
@ -2,7 +2,7 @@
|
||||
import io::writer_util;
|
||||
import serialization::serializer;
|
||||
|
||||
impl of serializer for writer {
|
||||
impl writer: serializer {
|
||||
fn emit_nil() {
|
||||
self.write_str(~"()")
|
||||
}
|
||||
|
@ -105,7 +105,7 @@ trait serializer_helpers {
|
||||
fn emit_from_vec<T>(v: ~[T], f: fn(T));
|
||||
}
|
||||
|
||||
impl serializer_helpers<S: serializer> of serializer_helpers for S {
|
||||
impl<S: serializer> S: serializer_helpers {
|
||||
fn emit_from_vec<T>(v: ~[T], f: fn(T)) {
|
||||
emit_from_vec(self, v, f)
|
||||
}
|
||||
@ -115,7 +115,7 @@ trait deserializer_helpers {
|
||||
fn read_to_vec<T: copy>(f: fn() -> T) -> ~[T];
|
||||
}
|
||||
|
||||
impl deserializer_helpers<D: deserializer> of deserializer_helpers for D {
|
||||
impl<D: deserializer> D: deserializer_helpers {
|
||||
fn read_to_vec<T: copy>(f: fn() -> T) -> ~[T] {
|
||||
read_to_vec(self, f)
|
||||
}
|
||||
|
@ -217,7 +217,7 @@ fn pad_msg(st: sha1state) {
|
||||
process_msg_block(st);
|
||||
}
|
||||
|
||||
impl of sha1 for sha1state {
|
||||
impl sha1state: sha1 {
|
||||
fn reset() {
|
||||
// FIXME: Should be typestate precondition (#2345)
|
||||
assert (vec::len(self.h) == digest_buf_len);
|
||||
|
@ -4,7 +4,7 @@
|
||||
*/
|
||||
import core::option;
|
||||
import core::option::{some, none};
|
||||
import dvec::{dvec, extensions};
|
||||
import dvec::dvec;
|
||||
import map::map;
|
||||
|
||||
// FIXME (#2347): Should not be @; there's a bug somewhere in rustc that
|
||||
@ -63,7 +63,7 @@ fn contains_key<T: copy>(self: smallintmap<T>, key: uint) -> bool {
|
||||
}
|
||||
|
||||
/// Implements the map::map interface for smallintmap
|
||||
impl <V: copy> of map::map<uint, V> for smallintmap<V> {
|
||||
impl<V: copy> smallintmap<V>: map::map<uint, V> {
|
||||
fn size() -> uint {
|
||||
let mut sz = 0u;
|
||||
for self.v.each |item| {
|
||||
@ -134,7 +134,7 @@ fn each_value_ref(blk: fn(value: &V) -> bool) {
|
||||
}
|
||||
}
|
||||
|
||||
impl extensions<V: copy> of ops::index<uint, V> for smallintmap<V> {
|
||||
impl<V: copy> smallintmap<V>: ops::index<uint, V> {
|
||||
pure fn index(&&key: uint) -> V {
|
||||
unchecked {
|
||||
get(self, key)
|
||||
|
@ -3,7 +3,6 @@
|
||||
import core::option;
|
||||
import option::{none, some};
|
||||
import rand;
|
||||
import core::rand::extensions;
|
||||
|
||||
fn mkdtemp(prefix: ~str, suffix: ~str) -> option<~str> {
|
||||
let r = rand::rng();
|
||||
|
@ -1,6 +1,6 @@
|
||||
import libc::{c_char, c_int, c_long, size_t, time_t};
|
||||
import io::{reader, reader_util};
|
||||
import result::{result, ok, err, extensions};
|
||||
import io::reader;
|
||||
import result::{result, ok, err};
|
||||
|
||||
export
|
||||
timespec,
|
||||
@ -730,7 +730,7 @@ fn parse_type(ch: char, tm: tm) -> ~str {
|
||||
buf
|
||||
}
|
||||
|
||||
impl tm for tm {
|
||||
impl tm {
|
||||
/// Convert time to the seconds from January 1, 1970
|
||||
fn to_timespec() -> timespec {
|
||||
let mut sec = 0i64;
|
||||
|
@ -7,7 +7,7 @@
|
||||
import get_gl = get;
|
||||
import iotask::{iotask, spawn_iotask};
|
||||
import priv::{chan_from_global_ptr, weaken_task};
|
||||
import comm::{port, chan, methods, select2, listen};
|
||||
import comm::{port, chan, select2, listen};
|
||||
import task::task_builder;
|
||||
import either::{left, right};
|
||||
|
||||
|
@ -12,7 +12,7 @@
|
||||
|
||||
import libc::c_void;
|
||||
import ptr::addr_of;
|
||||
import comm::{port, chan, methods, listen};
|
||||
import comm::{port, chan, listen};
|
||||
import task::task_builder;
|
||||
import ll = uv_ll;
|
||||
|
||||
|
@ -827,20 +827,20 @@ trait path_concat {
|
||||
}
|
||||
|
||||
// Remove after snapshot!
|
||||
impl methods of path_concat for ident {
|
||||
impl ident: path_concat {
|
||||
pure fn +(&&id: ident) -> @path {
|
||||
simple_path(self, empty_span()) + id
|
||||
}
|
||||
}
|
||||
|
||||
impl methods of ops::add<ident,@path> for ident {
|
||||
impl ident: ops::add<ident,@path> {
|
||||
pure fn add(&&id: ident) -> @path {
|
||||
simple_path(self, empty_span()) + id
|
||||
}
|
||||
}
|
||||
|
||||
// Remove after snapshot!
|
||||
impl methods of path_concat for @path {
|
||||
impl @path: path_concat {
|
||||
pure fn +(&&id: ident) -> @path {
|
||||
@{
|
||||
idents: vec::append_one(self.idents, id)
|
||||
@ -849,7 +849,7 @@ impl methods of path_concat for @path {
|
||||
}
|
||||
}
|
||||
|
||||
impl methods of ops::add<ident,@path> for @path {
|
||||
impl @path: ops::add<ident,@path> {
|
||||
pure fn add(&&id: ident) -> @path {
|
||||
@{
|
||||
idents: vec::append_one(self.idents, id)
|
||||
|
@ -3,7 +3,6 @@
|
||||
import ast::*;
|
||||
import print::pprust;
|
||||
import ast_util::path_to_ident;
|
||||
import ast_util::inlined_item_methods;
|
||||
import diagnostic::span_handler;
|
||||
|
||||
enum path_elt { path_mod(ident), path_name(ident) }
|
||||
|
@ -368,7 +368,7 @@ trait inlined_item_utils {
|
||||
fn accept<E>(e: E, v: visit::vt<E>);
|
||||
}
|
||||
|
||||
impl inlined_item_methods of inlined_item_utils for inlined_item {
|
||||
impl inlined_item: inlined_item_utils {
|
||||
fn ident() -> ident {
|
||||
match self {
|
||||
ii_item(i) => /* FIXME (#2543) */ copy i.ident,
|
||||
|
@ -1,4 +1,4 @@
|
||||
import dvec::{dvec, extensions};
|
||||
import dvec::dvec;
|
||||
|
||||
export filename;
|
||||
export filemap;
|
||||
|
@ -46,7 +46,7 @@ trait handler {
|
||||
cm: codemap::codemap
|
||||
};
|
||||
|
||||
impl codemap_span_handler of span_handler for codemap_t {
|
||||
impl codemap_t: span_handler {
|
||||
fn span_fatal(sp: span, msg: ~str) -> ! {
|
||||
self.handler.emit(some((self.cm, sp)), msg, fatal);
|
||||
fail;
|
||||
@ -72,7 +72,7 @@ fn handler() -> handler {
|
||||
}
|
||||
}
|
||||
|
||||
impl codemap_handler of handler for handler_t {
|
||||
impl handler_t: handler {
|
||||
fn fatal(msg: ~str) -> ! {
|
||||
self.emit(none, msg, fatal);
|
||||
fail;
|
||||
|
@ -150,7 +150,7 @@ fn ty_fn(span: span,
|
||||
fn at(span: span, expr: @ast::expr) -> @ast::expr;
|
||||
}
|
||||
|
||||
impl helpers of ext_ctxt_helpers for ext_ctxt {
|
||||
impl ext_ctxt: ext_ctxt_helpers {
|
||||
fn helper_path(base_path: @ast::path,
|
||||
helper_name: ~str) -> @ast::path {
|
||||
let head = vec::init(base_path.idents);
|
||||
|
@ -144,7 +144,7 @@ fn mk_ctxt(parse_sess: parse::parse_sess,
|
||||
cfg: ast::crate_cfg,
|
||||
mut backtrace: expn_info,
|
||||
mut mod_path: ~[ast::ident]};
|
||||
impl of ext_ctxt for ctxt_repr {
|
||||
impl ctxt_repr: ext_ctxt {
|
||||
fn codemap() -> codemap { self.parse_sess.cm }
|
||||
fn parse_sess() -> parse::parse_sess { self.parse_sess }
|
||||
fn cfg() -> ast::crate_cfg { self.cfg }
|
||||
@ -277,7 +277,7 @@ fn get_mac_body(cx: ext_ctxt, sp: span, args: ast::mac_body)
|
||||
fn tt_args_to_original_flavor(cx: ext_ctxt, sp: span, arg: ~[ast::token_tree])
|
||||
-> ast::mac_arg {
|
||||
import ast::{matcher, matcher_, match_tok, match_seq, match_nonterminal};
|
||||
import parse::lexer::{new_tt_reader, tt_reader_as_reader, reader};
|
||||
import parse::lexer::{new_tt_reader, reader};
|
||||
import tt::earley_parser::{parse_or_else, matched_seq,
|
||||
matched_nonterminal};
|
||||
|
||||
|
@ -36,15 +36,13 @@
|
||||
import codemap::span;
|
||||
import ext::base::ext_ctxt;
|
||||
import ast::tt_delim;
|
||||
import parse::lexer::{new_tt_reader, reader, tt_reader_as_reader};
|
||||
import parse::lexer::{new_tt_reader, reader};
|
||||
import parse::parser::{parser, SOURCE_FILE};
|
||||
import parse::common::parser_common;
|
||||
|
||||
import pipes::parse_proto::proto_parser;
|
||||
|
||||
import pipes::pipec::compile;
|
||||
import pipes::proto::{visit, protocol};
|
||||
import pipes::check::proto_check;
|
||||
|
||||
fn expand_proto(cx: ext_ctxt, _sp: span, id: ast::ident,
|
||||
tt: ~[ast::token_tree]) -> base::mac_result
|
||||
|
@ -36,7 +36,7 @@ trait append_types {
|
||||
fn add_tys(+tys: ~[@ast::ty]) -> @ast::path;
|
||||
}
|
||||
|
||||
impl methods of append_types for @ast::path {
|
||||
impl @ast::path: append_types {
|
||||
fn add_ty(ty: @ast::ty) -> @ast::path {
|
||||
@{types: vec::append_one(self.types, ty)
|
||||
with *self}
|
||||
@ -89,7 +89,7 @@ fn item_ty_poly(name: ident,
|
||||
fn ty_option(ty: @ast::ty) -> @ast::ty;
|
||||
}
|
||||
|
||||
impl ast_builder of ext_ctxt_ast_builder for ext_ctxt {
|
||||
impl ext_ctxt: ext_ctxt_ast_builder {
|
||||
fn ty_option(ty: @ast::ty) -> @ast::ty {
|
||||
self.ty_path_ast_builder(path(@~"option", self.empty_span())
|
||||
.add_ty(ty))
|
||||
|
@ -19,16 +19,14 @@
|
||||
|
||||
*/
|
||||
|
||||
import dvec::extensions;
|
||||
|
||||
import ext::base::ext_ctxt;
|
||||
|
||||
import ast::{ident};
|
||||
|
||||
import proto::{state, protocol, next_state, methods};
|
||||
import proto::{state, protocol, next_state};
|
||||
import ast_builder::empty_span;
|
||||
|
||||
impl proto_check of proto::visitor<(), (), ()> for ext_ctxt {
|
||||
impl ext_ctxt: proto::visitor<(), (), ()> {
|
||||
fn visit_proto(_proto: protocol,
|
||||
_states: &[()]) { }
|
||||
|
||||
|
@ -27,11 +27,8 @@
|
||||
|
||||
*/
|
||||
|
||||
import dvec::extensions;
|
||||
|
||||
import std::bitv::{bitv};
|
||||
|
||||
import proto::methods;
|
||||
import ast_builder::empty_span;
|
||||
|
||||
fn analyze(proto: protocol, _cx: ext_ctxt) {
|
||||
|
@ -11,7 +11,7 @@ trait proto_parser {
|
||||
fn parse_state(proto: protocol);
|
||||
}
|
||||
|
||||
impl proto_parser of proto_parser for parser {
|
||||
impl parser: proto_parser {
|
||||
fn parse_proto(id: ident) -> protocol {
|
||||
let proto = protocol(id, self.span);
|
||||
|
||||
|
@ -3,9 +3,6 @@
|
||||
import to_str::to_str;
|
||||
|
||||
import dvec::dvec;
|
||||
import dvec::extensions;
|
||||
|
||||
import tuple::extensions;
|
||||
|
||||
import ast::ident;
|
||||
import util::interner;
|
||||
@ -15,11 +12,8 @@
|
||||
import parse;
|
||||
import parse::*;
|
||||
import proto::*;
|
||||
import ast::methods;
|
||||
|
||||
import ast_builder::append_types;
|
||||
import ast_builder::ast_builder;
|
||||
import ast_builder::methods;
|
||||
import ast_builder::path;
|
||||
|
||||
// Transitional reexports so qquote can find the paths it is looking for
|
||||
@ -44,7 +38,7 @@ trait gen_init {
|
||||
fn compile(cx: ext_ctxt) -> @ast::item;
|
||||
}
|
||||
|
||||
impl compile of gen_send for message {
|
||||
impl message: gen_send {
|
||||
fn gen_send(cx: ext_ctxt, try: bool) -> @ast::item {
|
||||
debug!{"pipec: gen_send"};
|
||||
match self {
|
||||
@ -199,7 +193,7 @@ fn to_ty(cx: ext_ctxt) -> @ast::ty {
|
||||
}
|
||||
}
|
||||
|
||||
impl compile of to_type_decls for state {
|
||||
impl state: to_type_decls {
|
||||
fn to_type_decls(cx: ext_ctxt) -> ~[@ast::item] {
|
||||
debug!{"pipec: to_type_decls"};
|
||||
// This compiles into two different type declarations. Say the
|
||||
@ -283,7 +277,7 @@ fn to_endpoint_decls(cx: ext_ctxt, dir: direction) -> ~[@ast::item] {
|
||||
}
|
||||
}
|
||||
|
||||
impl compile of gen_init for protocol {
|
||||
impl protocol: gen_init {
|
||||
fn gen_init(cx: ext_ctxt) -> @ast::item {
|
||||
let ext_cx = cx;
|
||||
|
||||
@ -425,37 +419,37 @@ trait to_source {
|
||||
fn to_source() -> ~str;
|
||||
}
|
||||
|
||||
impl of to_source for @ast::item {
|
||||
impl @ast::item: to_source {
|
||||
fn to_source() -> ~str {
|
||||
item_to_str(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl of to_source for ~[@ast::item] {
|
||||
impl ~[@ast::item]: to_source {
|
||||
fn to_source() -> ~str {
|
||||
str::connect(self.map(|i| i.to_source()), ~"\n\n")
|
||||
}
|
||||
}
|
||||
|
||||
impl of to_source for @ast::ty {
|
||||
impl @ast::ty: to_source {
|
||||
fn to_source() -> ~str {
|
||||
ty_to_str(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl of to_source for ~[@ast::ty] {
|
||||
impl ~[@ast::ty]: to_source {
|
||||
fn to_source() -> ~str {
|
||||
str::connect(self.map(|i| i.to_source()), ~", ")
|
||||
}
|
||||
}
|
||||
|
||||
impl of to_source for ~[ast::ty_param] {
|
||||
impl ~[ast::ty_param]: to_source {
|
||||
fn to_source() -> ~str {
|
||||
pprust::typarams_to_str(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl of to_source for @ast::expr {
|
||||
impl @ast::expr: to_source {
|
||||
fn to_source() -> ~str {
|
||||
pprust::expr_to_str(self)
|
||||
}
|
||||
@ -467,7 +461,7 @@ trait ext_ctxt_parse_utils {
|
||||
fn parse_stmt(s: ~str) -> @ast::stmt;
|
||||
}
|
||||
|
||||
impl parse_utils of ext_ctxt_parse_utils for ext_ctxt {
|
||||
impl ext_ctxt: ext_ctxt_parse_utils {
|
||||
fn parse_item(s: ~str) -> @ast::item {
|
||||
let res = parse::parse_item_from_source_str(
|
||||
~"***protocol expansion***",
|
||||
|
@ -1,15 +1,15 @@
|
||||
import to_str::to_str;
|
||||
import dvec::{dvec, extensions};
|
||||
import dvec::dvec;
|
||||
|
||||
import ast::{ident};
|
||||
|
||||
import ast_builder::{path, methods, ast_builder, append_types};
|
||||
import ast_builder::{path, append_types};
|
||||
|
||||
enum direction {
|
||||
send, recv
|
||||
}
|
||||
|
||||
impl of to_str for direction {
|
||||
impl direction: to_str {
|
||||
fn to_str() -> ~str {
|
||||
match self {
|
||||
send => ~"send",
|
||||
@ -18,7 +18,7 @@ fn to_str() -> ~str {
|
||||
}
|
||||
}
|
||||
|
||||
impl methods for direction {
|
||||
impl direction {
|
||||
fn reverse() -> direction {
|
||||
match self {
|
||||
send => recv,
|
||||
@ -34,7 +34,7 @@ enum message {
|
||||
message(ident, span, ~[@ast::ty], state, next_state)
|
||||
}
|
||||
|
||||
impl methods for message {
|
||||
impl message {
|
||||
fn name() -> ident {
|
||||
match self {
|
||||
message(id, _, _, _, _) => id
|
||||
@ -67,7 +67,7 @@ enum state {
|
||||
}),
|
||||
}
|
||||
|
||||
impl methods for state {
|
||||
impl state {
|
||||
fn add_message(name: ident, span: span,
|
||||
+data: ~[@ast::ty], next: next_state) {
|
||||
self.messages.push(message(name, span, data, self,
|
||||
@ -161,7 +161,7 @@ fn is_bounded() -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
impl methods for protocol {
|
||||
impl protocol {
|
||||
fn add_state(name: ident, dir: direction) -> state {
|
||||
self.add_state_poly(name, dir, ~[])
|
||||
}
|
||||
|
@ -2,7 +2,7 @@
|
||||
mac_aq, mac_var};
|
||||
import parse::parser;
|
||||
import parse::parser::parse_from_source_str;
|
||||
import dvec::{dvec, extensions};
|
||||
import dvec::dvec;
|
||||
|
||||
import fold::*;
|
||||
import visit::*;
|
||||
@ -34,7 +34,7 @@ trait qq_helper {
|
||||
fn get_fold_fn() -> ~str;
|
||||
}
|
||||
|
||||
impl of qq_helper for @ast::crate {
|
||||
impl @ast::crate: qq_helper {
|
||||
fn span() -> span {self.span}
|
||||
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_crate(*self, cx, v);}
|
||||
fn extract_mac() -> option<ast::mac_> {fail}
|
||||
@ -44,7 +44,7 @@ fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
|
||||
}
|
||||
fn get_fold_fn() -> ~str {~"fold_crate"}
|
||||
}
|
||||
impl of qq_helper for @ast::expr {
|
||||
impl @ast::expr: qq_helper {
|
||||
fn span() -> span {self.span}
|
||||
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_expr(self, cx, v);}
|
||||
fn extract_mac() -> option<ast::mac_> {
|
||||
@ -59,7 +59,7 @@ fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
|
||||
}
|
||||
fn get_fold_fn() -> ~str {~"fold_expr"}
|
||||
}
|
||||
impl of qq_helper for @ast::ty {
|
||||
impl @ast::ty: qq_helper {
|
||||
fn span() -> span {self.span}
|
||||
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_ty(self, cx, v);}
|
||||
fn extract_mac() -> option<ast::mac_> {
|
||||
@ -74,7 +74,7 @@ fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
|
||||
}
|
||||
fn get_fold_fn() -> ~str {~"fold_ty"}
|
||||
}
|
||||
impl of qq_helper for @ast::item {
|
||||
impl @ast::item: qq_helper {
|
||||
fn span() -> span {self.span}
|
||||
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_item(self, cx, v);}
|
||||
fn extract_mac() -> option<ast::mac_> {fail}
|
||||
@ -84,7 +84,7 @@ fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
|
||||
}
|
||||
fn get_fold_fn() -> ~str {~"fold_item"}
|
||||
}
|
||||
impl of qq_helper for @ast::stmt {
|
||||
impl @ast::stmt: qq_helper {
|
||||
fn span() -> span {self.span}
|
||||
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_stmt(self, cx, v);}
|
||||
fn extract_mac() -> option<ast::mac_> {fail}
|
||||
@ -94,7 +94,7 @@ fn mk_parse_fn(cx: ext_ctxt, sp: span) -> @ast::expr {
|
||||
}
|
||||
fn get_fold_fn() -> ~str {~"fold_stmt"}
|
||||
}
|
||||
impl of qq_helper for @ast::pat {
|
||||
impl @ast::pat: qq_helper {
|
||||
fn span() -> span {self.span}
|
||||
fn visit(cx: aq_ctxt, v: vt<aq_ctxt>) {visit_pat(self, cx, v);}
|
||||
fn extract_mac() -> option<ast::mac_> {fail}
|
||||
|
@ -1,6 +1,6 @@
|
||||
import codemap::span;
|
||||
import std::map::{hashmap, str_hash, box_str_hash};
|
||||
import dvec::{dvec, extensions};
|
||||
import dvec::dvec;
|
||||
|
||||
import base::*;
|
||||
|
||||
|
@ -7,7 +7,7 @@
|
||||
//import parse::common::parser_common;
|
||||
import parse::common::*; //resolve bug?
|
||||
import parse::parse_sess;
|
||||
import dvec::{dvec, extensions};
|
||||
import dvec::dvec;
|
||||
import ast::{matcher, match_tok, match_seq, match_nonterminal, ident};
|
||||
import ast_util::mk_sp;
|
||||
import std::map::{hashmap, box_str_hash};
|
||||
|
@ -2,7 +2,7 @@
|
||||
import codemap::span;
|
||||
import ast::{ident, matcher_, matcher, match_tok,
|
||||
match_nonterminal, match_seq, tt_delim};
|
||||
import parse::lexer::{new_tt_reader, tt_reader_as_reader, reader};
|
||||
import parse::lexer::{new_tt_reader, reader};
|
||||
import parse::token::{FAT_ARROW, SEMI, LBRACE, RBRACE, nt_matchers, nt_tt};
|
||||
import parse::parser::{parser, SOURCE_FILE};
|
||||
import earley_parser::{parse, parse_or_else, success, failure, named_match,
|
||||
|
@ -652,7 +652,7 @@ fn default_ast_fold() -> ast_fold_precursor {
|
||||
new_span: noop_span};
|
||||
}
|
||||
|
||||
impl of ast_fold for ast_fold_precursor {
|
||||
impl ast_fold_precursor: ast_fold {
|
||||
/* naturally, a macro to write these would be nice */
|
||||
fn fold_crate(c: crate) -> crate {
|
||||
let (n, s) = self.fold_crate(c.node, c.span, self as ast_fold);
|
||||
@ -763,7 +763,7 @@ fn new_span(span: span) -> span {
|
||||
}
|
||||
}
|
||||
|
||||
impl extensions for ast_fold {
|
||||
impl ast_fold {
|
||||
fn fold_attributes(attrs: ~[attribute]) -> ~[attribute] {
|
||||
attrs.map(|x| fold_attribute_(x, self))
|
||||
}
|
||||
|
@ -1,5 +1,4 @@
|
||||
//! The main parser interface
|
||||
import dvec::extensions;
|
||||
|
||||
export parse_sess;
|
||||
export new_parse_sess, new_parse_sess_special_handler;
|
||||
@ -19,8 +18,7 @@
|
||||
import ast::node_id;
|
||||
import util::interner;
|
||||
import diagnostic::{span_handler, mk_span_handler, mk_handler, emitter};
|
||||
import lexer::{reader, string_reader, string_reader_as_reader};
|
||||
import lexer::{tt_reader_as_reader};
|
||||
import lexer::{reader, string_reader};
|
||||
|
||||
type parse_sess = @{
|
||||
cm: codemap::codemap,
|
||||
|
@ -23,7 +23,7 @@ fn parse_inner_attrs_and_next() ->
|
||||
fn parse_optional_meta() -> ~[@ast::meta_item];
|
||||
}
|
||||
|
||||
impl parser_attr of parser_attr for parser {
|
||||
impl parser: parser_attr {
|
||||
|
||||
fn parse_outer_attrs_or_ext(first_item_attrs: ~[ast::attribute])
|
||||
-> attr_or_ext
|
||||
|
@ -1,8 +1,7 @@
|
||||
import io::reader_util;
|
||||
import io::println;//XXXXXXXXxxx
|
||||
import util::interner;
|
||||
import lexer::{string_reader, bump, is_eof, nextch,
|
||||
is_whitespace, get_str_from, string_reader_as_reader, reader};
|
||||
is_whitespace, get_str_from, reader};
|
||||
|
||||
export cmnt;
|
||||
export lit;
|
||||
|
@ -59,7 +59,7 @@ fn parse_seq<T: copy>(bra: token::token, ket: token::token, sep: seq_sep,
|
||||
f: fn(parser) -> T) -> spanned<~[T]>;
|
||||
}
|
||||
|
||||
impl parser_common of parser_common for parser {
|
||||
impl parser: parser_common {
|
||||
fn unexpected_last(t: token::token) -> ! {
|
||||
self.span_fatal(
|
||||
copy self.last_span,
|
||||
|
@ -68,7 +68,7 @@ fn dup_string_reader(&&r: string_reader) -> string_reader {
|
||||
mut peek_tok: r.peek_tok, mut peek_span: r.peek_span}
|
||||
}
|
||||
|
||||
impl string_reader_as_reader of reader for string_reader {
|
||||
impl string_reader: reader {
|
||||
fn is_eof() -> bool { is_eof(self) }
|
||||
fn next_token() -> {tok: token::token, sp: span} {
|
||||
let ret_val = {tok: self.peek_tok, sp: self.peek_span};
|
||||
@ -86,7 +86,7 @@ fn peek() -> {tok: token::token, sp: span} {
|
||||
fn dup() -> reader { dup_string_reader(self) as reader }
|
||||
}
|
||||
|
||||
impl tt_reader_as_reader of reader for tt_reader {
|
||||
impl tt_reader: reader {
|
||||
fn is_eof() -> bool { self.cur_tok == token::EOF }
|
||||
fn next_token() -> {tok: token::token, sp: span} {
|
||||
/* weird resolve bug: if the following `if`, or any of its
|
||||
|
@ -8,12 +8,12 @@
|
||||
import codemap::{span,fss_none};
|
||||
import util::interner;
|
||||
import ast_util::{spanned, respan, mk_sp, ident_to_path, operator_prec};
|
||||
import lexer::{reader, tt_reader_as_reader};
|
||||
import lexer::reader;
|
||||
import prec::{as_prec, token_to_binop};
|
||||
import attr::parser_attr;
|
||||
import common::{seq_sep_trailing_disallowed, seq_sep_trailing_allowed,
|
||||
seq_sep_none, token_to_str};
|
||||
import dvec::{dvec, extensions};
|
||||
import dvec::dvec;
|
||||
import vec::{push};
|
||||
import ast::{_mod, add, alt_check, alt_exhaustive, arg, arm, attribute,
|
||||
bind_by_ref, bind_by_implicit_ref, bind_by_value,
|
||||
@ -2424,9 +2424,6 @@ fn parse_item_trait() -> item_info {
|
||||
}
|
||||
|
||||
// Parses four variants (with the region/type params always optional):
|
||||
// impl /&<T: copy> of to_str for ~[T] { ... }
|
||||
// impl name/&<T> of to_str for ~[T] { ... }
|
||||
// impl name/&<T> for ~[T] { ... }
|
||||
// impl<T> ~[T] : to_str { ... }
|
||||
fn parse_item_impl() -> item_info {
|
||||
fn wrap_path(p: parser, pt: @path) -> @ty {
|
||||
@ -2466,6 +2463,7 @@ fn wrap_path(p: parser, pt: @path) -> @ty {
|
||||
traits = ~[];
|
||||
}
|
||||
} else {
|
||||
self.warn(~"old-style named impl?");
|
||||
let mut ident_old;
|
||||
if self.token == token::BINOP(token::SLASH) {
|
||||
self.parse_region_param();
|
||||
|
@ -1,5 +1,5 @@
|
||||
import io::writer_util;
|
||||
import dvec::{dvec, extensions};
|
||||
import dvec::dvec;
|
||||
|
||||
/*
|
||||
* This pretty-printer is a direct reimplementation of Philip Karlton's
|
||||
@ -232,7 +232,7 @@ enum printer {
|
||||
printer_(@printer_)
|
||||
}
|
||||
|
||||
impl printer for printer {
|
||||
impl printer {
|
||||
fn last_token() -> token { self.token[self.right] }
|
||||
// be very careful with this!
|
||||
fn replace_last_token(t: token) { self.token[self.right] = t; }
|
||||
|
@ -7,7 +7,7 @@
|
||||
import diagnostic;
|
||||
import ast::{required, provided};
|
||||
import ast_util::{operator_prec};
|
||||
import dvec::{dvec, extensions};
|
||||
import dvec::dvec;
|
||||
import parse::classify::*;
|
||||
import util::interner;
|
||||
|
||||
|
@ -3,7 +3,7 @@
|
||||
// type, and vice versa.
|
||||
import std::map;
|
||||
import std::map::{hashmap, hashfn, eqfn};
|
||||
import dvec::{dvec, extensions};
|
||||
import dvec::dvec;
|
||||
|
||||
type hash_interner<T: const> =
|
||||
{map: hashmap<T, uint>,
|
||||
@ -25,7 +25,7 @@ trait interner<T: const copy> {
|
||||
fn len() -> uint;
|
||||
}
|
||||
|
||||
impl <T: const copy> of interner<T> for hash_interner<T> {
|
||||
impl <T: const copy> hash_interner<T>: interner<T> {
|
||||
fn intern(val: T) -> uint {
|
||||
match self.map.find(val) {
|
||||
some(idx) => return idx,
|
||||
|
@ -10,7 +10,7 @@
|
||||
import back::link;
|
||||
import result::{ok, err};
|
||||
import std::getopts;
|
||||
import io::{reader_util, writer_util};
|
||||
import io::writer_util;
|
||||
import getopts::{optopt, optmulti, optflag, optflagopt, opt_present};
|
||||
import back::{x86, x86_64};
|
||||
import std::map::hashmap;
|
||||
|
@ -18,7 +18,6 @@
|
||||
import syntax::diagnostic;
|
||||
import rustc::driver::session;
|
||||
import rustc::middle::lint;
|
||||
import io::reader_util;
|
||||
|
||||
fn version(argv0: ~str) {
|
||||
let mut vers = ~"unknown version";
|
||||
|
@ -110,7 +110,7 @@ enum session {
|
||||
session_(@session_)
|
||||
}
|
||||
|
||||
impl session for session {
|
||||
impl session {
|
||||
fn span_fatal(sp: span, msg: ~str) -> ! {
|
||||
self.span_diagnostic.span_fatal(sp, msg)
|
||||
}
|
||||
|
@ -9,7 +9,7 @@
|
||||
import driver::session;
|
||||
import session::session;
|
||||
import syntax::attr;
|
||||
import dvec::{dvec, extensions};
|
||||
import dvec::dvec;
|
||||
|
||||
export modify_for_testing;
|
||||
|
||||
|
@ -9,7 +9,7 @@
|
||||
import syntax::print::pprust;
|
||||
import filesearch::filesearch;
|
||||
import common::*;
|
||||
import dvec::{dvec, extensions};
|
||||
import dvec::dvec;
|
||||
|
||||
export read_crates;
|
||||
|
||||
|
@ -10,7 +10,7 @@
|
||||
import syntax::diagnostic::expect;
|
||||
import common::*;
|
||||
import std::map::hashmap;
|
||||
import dvec::{dvec, extensions};
|
||||
import dvec::dvec;
|
||||
|
||||
export class_dtor;
|
||||
export get_symbol;
|
||||
|
@ -2,7 +2,7 @@
|
||||
|
||||
import std::{ebml, map};
|
||||
import std::map::{hashmap, str_hash};
|
||||
import dvec::{dvec, extensions};
|
||||
import dvec::dvec;
|
||||
import io::writer_util;
|
||||
import syntax::{ast, ast_util};
|
||||
import syntax::attr;
|
||||
@ -13,7 +13,6 @@
|
||||
import syntax::print::pprust;
|
||||
import cmd=cstore::crate_metadata;
|
||||
import util::ppaux::ty_to_str;
|
||||
import ebml::deserializer;
|
||||
import syntax::diagnostic::span_handler;
|
||||
import common::*;
|
||||
|
||||
|
@ -36,7 +36,7 @@ fn mk_filesearch(maybe_sysroot: option<path>,
|
||||
type filesearch_impl = {sysroot: path,
|
||||
addl_lib_search_paths: ~[path],
|
||||
target_triple: ~str};
|
||||
impl of filesearch for filesearch_impl {
|
||||
impl filesearch_impl: filesearch {
|
||||
fn sysroot() -> path { self.sysroot }
|
||||
fn lib_search_paths() -> ~[path] {
|
||||
let mut paths = self.addl_lib_search_paths;
|
||||
|
@ -1,19 +1,14 @@
|
||||
import util::ppaux::ty_to_str;
|
||||
|
||||
import dvec::extensions;
|
||||
import syntax::ast;
|
||||
import syntax::fold;
|
||||
import syntax::fold::*;
|
||||
import syntax::visit;
|
||||
import syntax::ast_map;
|
||||
import syntax::ast_util;
|
||||
import syntax::ast_util::inlined_item_methods;
|
||||
import syntax::codemap::span;
|
||||
import std::ebml;
|
||||
import std::ebml::writer;
|
||||
import std::ebml::serializer;
|
||||
import std::ebml::deserializer;
|
||||
import std::ebml::extensions;
|
||||
import std::ebml::get_doc;
|
||||
import std::map::hashmap;
|
||||
import std::serialization::serializer;
|
||||
@ -21,7 +16,6 @@
|
||||
import std::serialization::serializer_helpers;
|
||||
import std::serialization::deserializer_helpers;
|
||||
import std::prettyprint::serializer;
|
||||
import std::smallintmap::map;
|
||||
import middle::{ty, typeck};
|
||||
import middle::typeck::{method_origin, method_map_entry,
|
||||
serialize_method_map_entry,
|
||||
@ -155,7 +149,7 @@ fn reserve_id_range(sess: session,
|
||||
return {min: to_id_min, max: to_id_min};
|
||||
}
|
||||
|
||||
impl translation_routines for extended_decode_ctxt {
|
||||
impl extended_decode_ctxt {
|
||||
fn tr_id(id: ast::node_id) -> ast::node_id {
|
||||
// from_id_range should be non-empty
|
||||
assert !ast_util::empty(self.from_id_range);
|
||||
@ -173,7 +167,7 @@ fn tr_span(_span: span) -> span {
|
||||
}
|
||||
}
|
||||
|
||||
impl of tr for ast::def_id {
|
||||
impl ast::def_id: tr {
|
||||
fn tr(xcx: extended_decode_ctxt) -> ast::def_id {
|
||||
xcx.tr_def_id(self)
|
||||
}
|
||||
@ -182,7 +176,7 @@ fn tr_intern(xcx: extended_decode_ctxt) -> ast::def_id {
|
||||
}
|
||||
}
|
||||
|
||||
impl of tr for span {
|
||||
impl span: tr {
|
||||
fn tr(xcx: extended_decode_ctxt) -> span {
|
||||
xcx.tr_span(self)
|
||||
}
|
||||
@ -192,7 +186,7 @@ trait def_id_serializer_helpers {
|
||||
fn emit_def_id(did: ast::def_id);
|
||||
}
|
||||
|
||||
impl serializer_helpers<S: serializer> of def_id_serializer_helpers for S {
|
||||
impl<S: serializer> S: def_id_serializer_helpers {
|
||||
fn emit_def_id(did: ast::def_id) {
|
||||
ast::serialize_def_id(self, did)
|
||||
}
|
||||
@ -202,8 +196,7 @@ trait def_id_deserializer_helpers {
|
||||
fn read_def_id(xcx: extended_decode_ctxt) -> ast::def_id;
|
||||
}
|
||||
|
||||
impl deserializer_helpers<D: deserializer> of def_id_deserializer_helpers
|
||||
for D {
|
||||
impl<D: deserializer> D: def_id_deserializer_helpers {
|
||||
|
||||
fn read_def_id(xcx: extended_decode_ctxt) -> ast::def_id {
|
||||
let did = ast::deserialize_def_id(self);
|
||||
@ -350,7 +343,7 @@ fn decode_def(xcx: extended_decode_ctxt, doc: ebml::doc) -> ast::def {
|
||||
def.tr(xcx)
|
||||
}
|
||||
|
||||
impl of tr for ast::def {
|
||||
impl ast::def: tr {
|
||||
fn tr(xcx: extended_decode_ctxt) -> ast::def {
|
||||
match self {
|
||||
ast::def_fn(did, p) => { ast::def_fn(did.tr(xcx), p) }
|
||||
@ -396,14 +389,14 @@ trait ebml_deserializer_helper {
|
||||
fn read_freevar_entry(xcx: extended_decode_ctxt) -> freevar_entry;
|
||||
}
|
||||
|
||||
impl helper of ebml_deserializer_helper for ebml::ebml_deserializer {
|
||||
impl ebml::ebml_deserializer: ebml_deserializer_helper {
|
||||
fn read_freevar_entry(xcx: extended_decode_ctxt) -> freevar_entry {
|
||||
let fv = deserialize_freevar_entry(self);
|
||||
fv.tr(xcx)
|
||||
}
|
||||
}
|
||||
|
||||
impl of tr for freevar_entry {
|
||||
impl freevar_entry: tr {
|
||||
fn tr(xcx: extended_decode_ctxt) -> freevar_entry {
|
||||
{def: self.def.tr(xcx), span: self.span.tr(xcx)}
|
||||
}
|
||||
@ -416,14 +409,14 @@ trait read_method_map_entry_helper {
|
||||
fn read_method_map_entry(xcx: extended_decode_ctxt) -> method_map_entry;
|
||||
}
|
||||
|
||||
impl helper of read_method_map_entry_helper for ebml::ebml_deserializer {
|
||||
impl ebml::ebml_deserializer: read_method_map_entry_helper {
|
||||
fn read_method_map_entry(xcx: extended_decode_ctxt) -> method_map_entry {
|
||||
let mme = deserialize_method_map_entry(self);
|
||||
{derefs: mme.derefs, origin: mme.origin.tr(xcx)}
|
||||
}
|
||||
}
|
||||
|
||||
impl of tr for method_origin {
|
||||
impl method_origin: tr {
|
||||
fn tr(xcx: extended_decode_ctxt) -> method_origin {
|
||||
match self {
|
||||
typeck::method_static(did) => {
|
||||
@ -502,7 +495,7 @@ trait vtable_deserialization_helpers {
|
||||
fn read_vtable_origin(xcx: extended_decode_ctxt) -> typeck::vtable_origin;
|
||||
}
|
||||
|
||||
impl helpers of vtable_deserialization_helpers for ebml::ebml_deserializer {
|
||||
impl ebml::ebml_deserializer: vtable_deserialization_helpers {
|
||||
fn read_vtable_res(xcx: extended_decode_ctxt) -> typeck::vtable_res {
|
||||
@self.read_to_vec(|| self.read_vtable_origin(xcx) )
|
||||
}
|
||||
@ -558,7 +551,7 @@ trait get_ty_str_ctxt {
|
||||
fn ty_str_ctxt() -> @tyencode::ctxt;
|
||||
}
|
||||
|
||||
impl helpers of get_ty_str_ctxt for @e::encode_ctxt {
|
||||
impl @e::encode_ctxt: get_ty_str_ctxt {
|
||||
fn ty_str_ctxt() -> @tyencode::ctxt {
|
||||
@{diag: self.tcx.sess.diagnostic(),
|
||||
ds: e::def_to_str,
|
||||
@ -575,7 +568,7 @@ trait ebml_writer_helpers {
|
||||
fn emit_tpbt(ecx: @e::encode_ctxt, tpbt: ty::ty_param_bounds_and_ty);
|
||||
}
|
||||
|
||||
impl helpers of ebml_writer_helpers for ebml::writer {
|
||||
impl ebml::writer: ebml_writer_helpers {
|
||||
fn emit_ty(ecx: @e::encode_ctxt, ty: ty::t) {
|
||||
e::write_type(ecx, self, ty)
|
||||
}
|
||||
@ -612,7 +605,7 @@ trait write_tag_and_id {
|
||||
fn id(id: ast::node_id);
|
||||
}
|
||||
|
||||
impl writer of write_tag_and_id for ebml::writer {
|
||||
impl ebml::writer: write_tag_and_id {
|
||||
fn tag(tag_id: c::astencode_tag, f: fn()) {
|
||||
do self.wr_tag(tag_id as uint) { f() }
|
||||
}
|
||||
@ -771,7 +764,7 @@ trait doc_decoder_helpers {
|
||||
fn opt_child(tag: c::astencode_tag) -> option<ebml::doc>;
|
||||
}
|
||||
|
||||
impl decoder of doc_decoder_helpers for ebml::doc {
|
||||
impl ebml::doc: doc_decoder_helpers {
|
||||
fn as_int() -> int { ebml::doc_as_u64(self) as int }
|
||||
fn opt_child(tag: c::astencode_tag) -> option<ebml::doc> {
|
||||
ebml::maybe_get_doc(self, tag as uint)
|
||||
@ -786,8 +779,7 @@ fn read_ty_param_bounds_and_ty(xcx: extended_decode_ctxt)
|
||||
-> ty::ty_param_bounds_and_ty;
|
||||
}
|
||||
|
||||
impl decoder of ebml_deserializer_decoder_helpers
|
||||
for ebml::ebml_deserializer {
|
||||
impl ebml::ebml_deserializer: ebml_deserializer_decoder_helpers {
|
||||
|
||||
fn read_ty(xcx: extended_decode_ctxt) -> ty::t {
|
||||
// Note: regions types embed local node ids. In principle, we
|
||||
@ -922,7 +914,7 @@ trait fake_ext_ctxt {
|
||||
type fake_session = ();
|
||||
|
||||
#[cfg(test)]
|
||||
impl of fake_ext_ctxt for fake_session {
|
||||
impl fake_session: fake_ext_ctxt {
|
||||
fn cfg() -> ast::crate_cfg { ~[] }
|
||||
fn parse_sess() -> parse::parse_sess { parse::new_parse_sess(none) }
|
||||
}
|
||||
|
@ -224,12 +224,12 @@
|
||||
import std::map::{int_hash, hashmap, set};
|
||||
import std::list;
|
||||
import std::list::{list, cons, nil};
|
||||
import result::{result, ok, err, extensions};
|
||||
import result::{result, ok, err};
|
||||
import syntax::print::pprust;
|
||||
import util::common::indenter;
|
||||
import ty::to_str;
|
||||
import driver::session::session;
|
||||
import dvec::{dvec, extensions};
|
||||
import dvec::dvec;
|
||||
import mem_categorization::*;
|
||||
|
||||
export check_crate, root_map, mutbl_map;
|
||||
|
@ -7,7 +7,7 @@
|
||||
// 3. assignments do not affect things loaned out as immutable
|
||||
// 4. moves to dnot affect things loaned out in any way
|
||||
|
||||
import dvec::{dvec, extensions};
|
||||
import dvec::dvec;
|
||||
|
||||
export check_loans;
|
||||
|
||||
@ -59,7 +59,7 @@ enum assignment_type {
|
||||
at_mutbl_ref,
|
||||
}
|
||||
|
||||
impl methods for assignment_type {
|
||||
impl assignment_type {
|
||||
fn checked_by_liveness() -> bool {
|
||||
// the liveness pass guarantees that immutable local variables
|
||||
// are only assigned once; but it doesn't consider &mut
|
||||
@ -78,7 +78,7 @@ fn ing_form(desc: ~str) -> ~str {
|
||||
}
|
||||
}
|
||||
|
||||
impl methods for check_loan_ctxt {
|
||||
impl check_loan_ctxt {
|
||||
fn tcx() -> ty::ctxt { self.bccx.tcx }
|
||||
|
||||
fn purity(scope_id: ast::node_id) -> option<purity_cause> {
|
||||
|
@ -7,8 +7,7 @@
|
||||
// sure that all of these loans are honored.
|
||||
|
||||
import mem_categorization::{opt_deref_kind};
|
||||
import loan::public_methods;
|
||||
import preserve::{public_methods, preserve_condition, pc_ok, pc_if_pure};
|
||||
import preserve::{preserve_condition, pc_ok, pc_if_pure};
|
||||
import ty::ty_region;
|
||||
|
||||
export gather_loans;
|
||||
@ -243,7 +242,7 @@ fn req_loans_in_expr(ex: @ast::expr,
|
||||
self.root_ub = old_root_ub;
|
||||
}
|
||||
|
||||
impl methods for gather_loan_ctxt {
|
||||
impl gather_loan_ctxt {
|
||||
fn tcx() -> ty::ctxt { self.bccx.tcx }
|
||||
|
||||
// guarantees that addr_of(cmt) will be valid for the duration of
|
||||
|
@ -5,7 +5,7 @@
|
||||
export public_methods;
|
||||
import result::{result, ok, err};
|
||||
|
||||
impl public_methods for borrowck_ctxt {
|
||||
impl borrowck_ctxt {
|
||||
fn loan(cmt: cmt,
|
||||
scope_region: ty::region,
|
||||
mutbl: ast::mutability) -> bckres<@dvec<loan>> {
|
||||
@ -33,7 +33,7 @@ enum loan_ctxt {
|
||||
loan_ctxt_(@loan_ctxt_)
|
||||
}
|
||||
|
||||
impl loan_methods for loan_ctxt {
|
||||
impl loan_ctxt {
|
||||
fn tcx() -> ty::ctxt { self.bccx.tcx }
|
||||
|
||||
fn ok_with_loan_of(cmt: cmt,
|
||||
|
@ -10,7 +10,7 @@ enum preserve_condition {
|
||||
pc_if_pure(bckerr)
|
||||
}
|
||||
|
||||
impl public_methods for preserve_condition {
|
||||
impl preserve_condition {
|
||||
// combines two preservation conditions such that if either of
|
||||
// them requires purity, the result requires purity
|
||||
fn combine(pc: preserve_condition) -> preserve_condition {
|
||||
@ -21,7 +21,7 @@ fn combine(pc: preserve_condition) -> preserve_condition {
|
||||
}
|
||||
}
|
||||
|
||||
impl public_methods for borrowck_ctxt {
|
||||
impl borrowck_ctxt {
|
||||
fn preserve(cmt: cmt,
|
||||
scope_region: ty::region,
|
||||
item_ub: ast::node_id,
|
||||
@ -54,7 +54,7 @@ enum preserve_ctxt = {
|
||||
};
|
||||
|
||||
|
||||
impl private_methods for &preserve_ctxt {
|
||||
priv impl &preserve_ctxt {
|
||||
fn tcx() -> ty::ctxt { self.bccx.tcx }
|
||||
|
||||
fn preserve(cmt: cmt) -> bckres<preserve_condition> {
|
||||
|
@ -2,7 +2,7 @@
|
||||
import syntax::{visit, ast_util, ast_map};
|
||||
import driver::session::session;
|
||||
import std::map::hashmap;
|
||||
import dvec::{dvec, extensions};
|
||||
import dvec::dvec;
|
||||
|
||||
fn check_crate(sess: session, crate: @crate, ast_map: ast_map::map,
|
||||
def_map: resolve3::DefMap,
|
||||
|
@ -1,13 +1,12 @@
|
||||
import syntax::{visit, ast_util};
|
||||
import syntax::ast::*;
|
||||
import syntax::codemap::span;
|
||||
import ty::{kind, kind_copyable, kind_noncopyable, kind_const, operators};
|
||||
import ty::{kind, kind_copyable, kind_noncopyable, kind_const};
|
||||
import driver::session::session;
|
||||
import std::map::hashmap;
|
||||
import util::ppaux::{ty_to_str, tys_to_str};
|
||||
import syntax::print::pprust::expr_to_str;
|
||||
import freevars::freevar_entry;
|
||||
import dvec::extensions;
|
||||
import lint::{non_implicitly_copyable_typarams,implicit_copies};
|
||||
|
||||
// Kind analysis pass.
|
||||
|
@ -205,7 +205,7 @@ enum ctxt {
|
||||
ctxt_(ctxt_)
|
||||
}
|
||||
|
||||
impl methods for ctxt {
|
||||
impl ctxt {
|
||||
fn get_level(lint: lint) -> level {
|
||||
get_lint_level(self.curr, lint)
|
||||
}
|
||||
|
@ -100,7 +100,7 @@
|
||||
* - `self_var`: a variable representing 'self'
|
||||
*/
|
||||
|
||||
import dvec::{dvec, extensions};
|
||||
import dvec::dvec;
|
||||
import std::map::{hashmap, int_hash, str_hash, box_str_hash};
|
||||
import syntax::{visit, ast_util};
|
||||
import syntax::print::pprust::{expr_to_str};
|
||||
@ -152,11 +152,11 @@ fn check_crate(tcx: ty::ctxt,
|
||||
return last_use_map;
|
||||
}
|
||||
|
||||
impl of to_str::to_str for live_node {
|
||||
impl live_node: to_str::to_str {
|
||||
fn to_str() -> ~str { fmt!{"ln(%u)", *self} }
|
||||
}
|
||||
|
||||
impl of to_str::to_str for variable {
|
||||
impl variable: to_str::to_str {
|
||||
fn to_str() -> ~str { fmt!{"v(%u)", *self} }
|
||||
}
|
||||
|
||||
@ -182,7 +182,7 @@ fn to_str() -> ~str { fmt!{"v(%u)", *self} }
|
||||
// variable must not be assigned if there is some successor
|
||||
// assignment. And so forth.
|
||||
|
||||
impl methods for live_node {
|
||||
impl live_node {
|
||||
pure fn is_valid() -> bool { *self != uint::max_value }
|
||||
}
|
||||
|
||||
@ -1477,7 +1477,7 @@ enum read_kind {
|
||||
moved_variable
|
||||
}
|
||||
|
||||
impl check_methods for @liveness {
|
||||
impl @liveness {
|
||||
fn check_fields(sp: span, entry_ln: live_node) {
|
||||
for self.ir.field_map.each |nm, var| {
|
||||
match (*self).live_on_entry(entry_ln, var) {
|
||||
|
@ -209,12 +209,12 @@ trait ast_node {
|
||||
fn span() -> span;
|
||||
}
|
||||
|
||||
impl of ast_node for @ast::expr {
|
||||
impl @ast::expr: ast_node {
|
||||
fn id() -> ast::node_id { self.id }
|
||||
fn span() -> span { self.span }
|
||||
}
|
||||
|
||||
impl of ast_node for @ast::pat {
|
||||
impl @ast::pat: ast_node {
|
||||
fn id() -> ast::node_id { self.id }
|
||||
fn span() -> span { self.span }
|
||||
}
|
||||
@ -223,7 +223,7 @@ trait get_type_for_node {
|
||||
fn ty<N: ast_node>(node: N) -> ty::t;
|
||||
}
|
||||
|
||||
impl methods of get_type_for_node for ty::ctxt {
|
||||
impl ty::ctxt: get_type_for_node {
|
||||
fn ty<N: ast_node>(node: N) -> ty::t {
|
||||
ty::node_id_to_type(self, node.id())
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user