rustc: Implement and enforce instance coherence

This commit is contained in:
Patrick Walton 2012-07-11 15:00:40 -07:00
parent b5729bd600
commit db020ab63c
111 changed files with 1746 additions and 526 deletions

View File

@ -11,7 +11,7 @@ import result::{ok, err};
import io::writer_util;
import std::{map, json, tempfile, term, sort, getopts};
import map::hashmap;
import json::to_str;
import to_str::to_str;
import getopts::{optflag, optopt, opt_present};
type package = {
@ -535,7 +535,7 @@ fn load_one_source_package(src: source, p: map::hashmap<~str, json::json>) {
versions: ~[]
};
alt src.packages.position(|pkg| pkg.uuid == uuid ) {
alt vec::position(src.packages, |pkg| pkg.uuid == uuid) {
some(idx) {
src.packages[idx] = newpkg;
log(debug, ~" updated package: " + src.name + ~"/" + name);

View File

@ -5,12 +5,15 @@
import option::{some, none};
import option = option::option;
import path = path::path;
import str::extensions;
import tuple::extensions;
import str::{extensions, str_slice, unique_str};
import vec::extensions;
import vec::{const_vector, copyable_vector, immutable_vector};
import vec::{immutable_copyable_vector, iter_trait_extensions, vec_concat};
import iter::{base_iter, extended_iter, copyable_iter, times};
import option::extensions;
import option_iter::extensions;
import ptr::extensions;
import ptr::{extensions, ptr};
import rand::extensions;
import result::extensions;
import int::{num, times};
@ -26,11 +29,18 @@ import u64::{num, times};
import float::num;
import f32::num;
import f64::num;
import num::num;
export path, option, some, none, unreachable;
export extensions;
// The following exports are the extension impls for numeric types
export num, times;
// The following exports are the common traits
export str_slice, unique_str;
export const_vector, copyable_vector, immutable_vector;
export immutable_copyable_vector, iter_trait_extensions, vec_concat;
export base_iter, copyable_iter, extended_iter;
export ptr;
// Export the log levels as global constants. Higher levels mean
// more-verbosity. Error is the bottom level, default logging level is

View File

@ -233,6 +233,7 @@ fn test_ifaces() {
#[test]
fn test_times() {
import iter::times;
let ten = 10 as T;
let mut accum = 0;
for ten.times { accum += 1; }
@ -243,5 +244,6 @@ fn test_times() {
#[should_fail]
#[ignore(cfg(windows))]
fn test_times_negative() {
import iter::times;
for (-10).times { log(error, ~"nope!"); }
}

View File

@ -8,6 +8,9 @@ export extensions;
impl extensions<A> of iter::base_iter<A> for IMPL_T<A> {
fn each(blk: fn(A) -> bool) { EACH(self, blk) }
fn size_hint() -> option<uint> { SIZE_HINT(self) }
}
impl extensions<A> of iter::extended_iter<A> for IMPL_T<A> {
fn eachi(blk: fn(uint, A) -> bool) { iter::eachi(self, blk) }
fn all(blk: fn(A) -> bool) -> bool { iter::all(self, blk) }
fn any(blk: fn(A) -> bool) -> bool { iter::any(self, blk) }

View File

@ -3,6 +3,16 @@ iface base_iter<A> {
fn size_hint() -> option<uint>;
}
trait extended_iter<A> {
fn eachi(blk: fn(uint, A) -> bool);
fn all(blk: fn(A) -> bool) -> bool;
fn any(blk: fn(A) -> bool) -> bool;
fn foldl<B>(+b0: B, blk: fn(B, A) -> B) -> B;
fn contains(x: A) -> bool;
fn count(x: A) -> uint;
fn position(f: fn(A) -> bool) -> option<uint>;
}
iface times {
fn times(it: fn() -> bool);
}

View File

@ -92,6 +92,7 @@ pure fn iter<T>(opt: option<T>, f: fn(T)) {
alt opt { none { } some(t) { f(t); } }
}
#[inline(always)]
pure fn unwrap<T>(-opt: option<T>) -> T {
/*!
* Moves a value out of an option type and returns it.

View File

@ -578,7 +578,11 @@ impl<T: send> of selectable for pipes::port<T> {
type shared_chan<T: send> = arc::exclusive<pipes::chan<T>>;
impl chan<T: send> for shared_chan<T> {
trait send_on_shared_chan<T> {
fn send(+x: T);
}
impl chan<T: send> of send_on_shared_chan<T> for shared_chan<T> {
fn send(+x: T) {
let mut xx = some(x);
do self.with |_c, chan| {

View File

@ -14,6 +14,7 @@ export memset;
export buf_len;
export position;
export extensions;
export ptr;
import libc::{c_void, size_t};

View File

@ -106,7 +106,9 @@ export
escape_unicode,
unsafe,
extensions;
extensions,
str_slice,
unique_str;
#[abi = "cdecl"]
extern mod rustrt {

View File

@ -278,6 +278,7 @@ fn to_str_radix17() {
#[test]
fn test_times() {
import iter::times;
let ten = 10 as T;
let mut accum = 0;
for ten.times { accum += 1; }

View File

@ -85,6 +85,12 @@ export unpack_const_slice;
export unsafe;
export u8;
export extensions;
export const_vector;
export copyable_vector;
export immutable_vector;
export immutable_copyable_vector;
export iter_trait_extensions;
export vec_concat;
#[abi = "cdecl"]
extern mod rustrt {
@ -179,11 +185,12 @@ pure fn len<T>(&&v: &[const T]) -> uint {
* Creates an immutable vector of size `n_elts` and initializes the elements
* to the value returned by the function `op`.
*/
pure fn from_fn<T>(n_elts: uint, op: init_op<T>) -> ~[T] {
pure fn from_fn<T: copy>(n_elts: uint, op: init_op<T>) -> ~[T] {
let mut v = ~[];
unchecked{reserve(v, n_elts);}
let mut i: uint = 0u;
while i < n_elts unsafe { push(v, op(i)); i += 1u; }
while i < n_elts unsafe { ref_set(v, i, op(i)); i += 1u; }
unsafe { unsafe::set_len(v, n_elts); }
ret v;
}
@ -197,8 +204,9 @@ pure fn from_elem<T: copy>(n_elts: uint, t: T) -> ~[T] {
let mut v = ~[];
unchecked{reserve(v, n_elts)}
let mut i: uint = 0u;
unsafe { // because push is impure
while i < n_elts { push(v, t); i += 1u; }
unsafe { // because ref_set is unsafe
while i < n_elts { ref_set(v, i, t); i += 1u; }
unsafe { unsafe::set_len(v, n_elts); }
}
ret v;
}
@ -468,6 +476,16 @@ unsafe fn ref<T: copy>(v: &[const T], i: uint) -> T {
unpack_slice(v, |p, _len| *ptr::offset(p, i))
}
#[inline(always)]
unsafe fn ref_set<T: copy>(v: &[mut T], i: uint, +val: T) {
let mut box = some(val);
do unpack_mut_slice(v) |p, _len| {
let mut box2 = none;
box2 <-> box;
rusti::move_val_init(*ptr::mut_offset(p, i), option::unwrap(box2));
}
}
#[inline(always)]
fn push_all<T: copy>(&v: ~[const T], rhs: &[const T]) {
reserve(v, v.len() + rhs.len());
@ -1591,6 +1609,9 @@ mod u8 {
impl extensions/&<A> of iter::base_iter<A> for &[const A] {
fn each(blk: fn(A) -> bool) { each(self, blk) }
fn size_hint() -> option<uint> { some(len(self)) }
}
impl extensions/&<A> of iter::extended_iter<A> for &[const A] {
fn eachi(blk: fn(uint, A) -> bool) { iter::eachi(self, blk) }
fn all(blk: fn(A) -> bool) -> bool { iter::all(self, blk) }
fn any(blk: fn(A) -> bool) -> bool { iter::any(self, blk) }
@ -1599,6 +1620,7 @@ impl extensions/&<A> of iter::base_iter<A> for &[const A] {
}
fn contains(x: A) -> bool { iter::contains(self, x) }
fn count(x: A) -> uint { iter::count(self, x) }
fn position(f: fn(A) -> bool) -> option<uint> { iter::position(self, f) }
}
trait iter_trait_extensions<A> {

View File

@ -6,7 +6,12 @@ import list;
import list::{list, cons, nil};
type chunk = {data: ~[u8], mut fill: uint};
type arena = {mut chunks: @list<@chunk>};
type arena_ = {mut chunks: @list<@chunk>};
enum arena {
arena_(arena_)
}
fn chunk(size: uint) -> @chunk {
let mut v = ~[];
@ -15,7 +20,7 @@ fn chunk(size: uint) -> @chunk {
}
fn arena_with_size(initial_size: uint) -> arena {
ret {mut chunks: @cons(chunk(initial_size), @nil)};
ret arena_({mut chunks: @cons(chunk(initial_size), @nil)});
}
fn arena() -> arena {

View File

@ -155,7 +155,11 @@ fn doc_as_i32(d: doc) -> i32 { doc_as_u32(d) as i32 }
fn doc_as_i64(d: doc) -> i64 { doc_as_u64(d) as i64 }
// ebml writing
type writer = {writer: io::writer, mut size_positions: ~[uint]};
type writer_ = {writer: io::writer, mut size_positions: ~[uint]};
enum writer {
writer_(writer_)
}
fn write_sized_vuint(w: io::writer, n: uint, size: uint) {
alt size {
@ -187,7 +191,7 @@ fn write_vuint(w: io::writer, n: uint) {
fn writer(w: io::writer) -> writer {
let size_positions: ~[uint] = ~[];
ret {writer: w, mut size_positions: size_positions};
ret writer_({writer: w, mut size_positions: size_positions});
}
// FIXME (#2741): Provide a function to write the standard ebml header.
@ -311,9 +315,12 @@ enum ebml_serializer_tag {
es_label // Used only when debugging
}
impl serializer of serialization::serializer for ebml::writer {
fn emit_nil() {}
trait serializer_priv {
fn _emit_tagged_uint(t: ebml_serializer_tag, v: uint);
fn _emit_label(label: ~str);
}
impl serializer of serializer_priv for ebml::writer {
// used internally to emit things like the vector length and so on
fn _emit_tagged_uint(t: ebml_serializer_tag, v: uint) {
assert v <= 0xFFFF_FFFF_u;
@ -329,6 +336,10 @@ impl serializer of serialization::serializer for ebml::writer {
// try and check failures more quickly.
if debug { self.wr_tagged_str(es_label as uint, label) }
}
}
impl serializer of serialization::serializer for ebml::writer {
fn emit_nil() {}
fn emit_uint(v: uint) { self.wr_tagged_u64(es_uint as uint, v as u64); }
fn emit_u64(v: u64) { self.wr_tagged_u64(es_u64 as uint, v); }
@ -383,14 +394,18 @@ impl serializer of serialization::serializer for ebml::writer {
fn emit_tup_elt(_idx: uint, f: fn()) { f() }
}
type ebml_deserializer = {mut parent: ebml::doc,
mut pos: uint};
type ebml_deserializer_ = {mut parent: ebml::doc,
mut pos: uint};
fn ebml_deserializer(d: ebml::doc) -> ebml_deserializer {
{mut parent: d, mut pos: d.start}
enum ebml_deserializer {
ebml_deserializer_(ebml_deserializer_)
}
impl deserializer of serialization::deserializer for ebml_deserializer {
fn ebml_deserializer(d: ebml::doc) -> ebml_deserializer {
ebml_deserializer_({mut parent: d, mut pos: d.start})
}
impl deserializer_priv for ebml_deserializer {
fn _check_label(lbl: ~str) {
if self.pos < self.parent.end {
let {tag: r_tag, doc: r_doc} =
@ -443,7 +458,9 @@ impl deserializer of serialization::deserializer for ebml_deserializer {
#debug["_next_uint exp_tag=%? result=%?", exp_tag, r];
ret r as uint;
}
}
impl deserializer of serialization::deserializer for ebml_deserializer {
fn read_nil() -> () { () }
fn read_u64() -> u64 { ebml::doc_as_u64(self.next_doc(es_u64)) }

View File

@ -8,6 +8,7 @@ import io;
import io::{reader_util, writer_util};
import map;
import map::hashmap;
import map::map;
import core::vec::extensions;
export json;
@ -114,13 +115,17 @@ fn to_str(j: json) -> ~str {
io::with_str_writer(|wr| to_writer(wr, j))
}
type parser = {
type parser_ = {
rdr: io::reader,
mut ch: char,
mut line: uint,
mut col: uint,
};
enum parser {
parser_(parser_)
}
impl parser for parser {
fn eof() -> bool { self.ch == -1 as char }
@ -463,12 +468,12 @@ impl parser for parser {
/// Deserializes a json value from an io::reader
fn from_reader(rdr: io::reader) -> result<json, error> {
let parser = {
let parser = parser_({
rdr: rdr,
mut ch: rdr.read_char(),
mut line: 1u,
mut col: 1u,
};
});
parser.parse()
}

View File

@ -92,13 +92,19 @@ mod chained {
absent
}
type t<K, V> = @{
type hashmap__<K, V> = {
mut count: uint,
mut chains: ~[mut chain<K,V>],
hasher: hashfn<K>,
eqer: eqfn<K>
};
enum hashmap_<K, V> {
hashmap_(@hashmap__<K, V>)
}
type t<K, V> = hashmap_<K, V>;
enum search_result<K, V> {
not_found,
found_first(uint, @entry<K,V>),
@ -284,10 +290,10 @@ mod chained {
}
fn mk<K, V: copy>(hasher: hashfn<K>, eqer: eqfn<K>) -> t<K,V> {
let slf: t<K, V> = @{mut count: 0u,
mut chains: chains(initial_capacity),
hasher: hasher,
eqer: eqer};
let slf: t<K, V> = hashmap_(@{mut count: 0u,
mut chains: chains(initial_capacity),
hasher: hasher,
eqer: eqer});
slf
}
}

View File

@ -148,7 +148,12 @@ mod v4 {
// the simple, old style numberic representation of
// ipv4
type ipv4_rep = { a: u8, b: u8, c: u8, d:u8 };
impl x for ipv4_rep {
trait as_unsafe_u32 {
unsafe fn as_u32() -> u32;
}
impl x of as_unsafe_u32 for ipv4_rep {
// this is pretty dastardly, i know
unsafe fn as_u32() -> u32 {
*((ptr::addr_of(self)) as *u32)

View File

@ -11,6 +11,7 @@ import future::extensions;
import result::*;
import libc::size_t;
import str::extensions;
import io::{reader, writer};
// tcp interfaces
export tcp_socket;

View File

@ -101,13 +101,21 @@ fn read_to_vec<D: deserializer, T: copy>(d: D, f: fn() -> T) -> ~[T] {
}
}
impl serializer_helpers<S: serializer> for S {
trait serializer_helpers {
fn emit_from_vec<T>(v: ~[T], f: fn(T));
}
impl serializer_helpers<S: serializer> of serializer_helpers for S {
fn emit_from_vec<T>(v: ~[T], f: fn(T)) {
emit_from_vec(self, v, f)
}
}
impl deserializer_helpers<D: deserializer> for D {
trait deserializer_helpers {
fn read_to_vec<T: copy>(f: fn() -> T) -> ~[T];
}
impl deserializer_helpers<D: deserializer> of deserializer_helpers for D {
fn read_to_vec<T: copy>(f: fn() -> T) -> ~[T] {
read_to_vec(self, f)
}

View File

@ -5,14 +5,19 @@
import core::option;
import core::option::{some, none};
import dvec::{dvec, extensions};
import map::map;
// FIXME (#2347): Should not be @; there's a bug somewhere in rustc that
// requires this to be.
type smallintmap<T: copy> = @{v: dvec<option<T>>};
type smallintmap_<T: copy> = {v: dvec<option<T>>};
enum smallintmap<T:copy> {
smallintmap_(@smallintmap_<T>)
}
/// Create a smallintmap
fn mk<T: copy>() -> smallintmap<T> {
ret @{v: dvec()};
ret smallintmap_(@{v: dvec()});
}
/**

View File

@ -65,7 +65,7 @@ fn tzset() {
rustrt::rust_tzset();
}
type tm = {
type tm_ = {
tm_sec: i32, // seconds after the minute ~[0-60]
tm_min: i32, // minutes after the hour ~[0-59]
tm_hour: i32, // hours after midnight ~[0-23]
@ -80,8 +80,12 @@ type tm = {
tm_nsec: i32, // nanoseconds
};
enum tm {
tm_(tm_)
}
fn empty_tm() -> tm {
{
tm_({
tm_sec: 0_i32,
tm_min: 0_i32,
tm_hour: 0_i32,
@ -94,7 +98,7 @@ fn empty_tm() -> tm {
tm_gmtoff: 0_i32,
tm_zone: ~"",
tm_nsec: 0_i32,
}
})
}
/// Returns the specified time in UTC
@ -563,7 +567,7 @@ fn strptime(s: ~str, format: ~str) -> result<tm, ~str> {
}
if pos == len && rdr.eof() {
ok({
ok(tm_({
tm_sec: tm.tm_sec,
tm_min: tm.tm_min,
tm_hour: tm.tm_hour,
@ -576,7 +580,7 @@ fn strptime(s: ~str, format: ~str) -> result<tm, ~str> {
tm_gmtoff: tm.tm_gmtoff,
tm_zone: tm.tm_zone,
tm_nsec: tm.tm_nsec,
})
}))
} else { result }
}
}

View File

@ -306,7 +306,13 @@ pure fn class_member_visibility(ci: @class_member) -> visibility {
}
}
impl inlined_item_methods for inlined_item {
trait inlined_item_utils {
fn ident() -> ident;
fn id() -> ast::node_id;
fn accept<E>(e: E, v: visit::vt<E>);
}
impl inlined_item_methods of inlined_item_utils for inlined_item {
fn ident() -> ident {
alt self {
ii_item(i) { /* FIXME (#2543) */ copy i.ident }

View File

@ -123,7 +123,34 @@ fn expand(cx: ext_ctxt,
}
}
impl helpers for ext_ctxt {
trait ext_ctxt_helpers {
fn helper_path(base_path: @ast::path, helper_name: ~str) -> @ast::path;
fn path(span: span, strs: ~[ast::ident]) -> @ast::path;
fn path_tps(span: span, strs: ~[ast::ident],
tps: ~[@ast::ty]) -> @ast::path;
fn ty_path(span: span, strs: ~[ast::ident], tps: ~[@ast::ty]) -> @ast::ty;
fn ty_fn(span: span,
-input_tys: ~[@ast::ty],
-output: @ast::ty) -> @ast::ty;
fn ty_nil(span: span) -> @ast::ty;
fn expr(span: span, node: ast::expr_) -> @ast::expr;
fn var_ref(span: span, name: ast::ident) -> @ast::expr;
fn blk(span: span, stmts: ~[@ast::stmt]) -> ast::blk;
fn expr_blk(expr: @ast::expr) -> ast::blk;
fn binder_pat(span: span, nm: ast::ident) -> @ast::pat;
fn stmt(expr: @ast::expr) -> @ast::stmt;
fn alt_stmt(arms: ~[ast::arm], span: span, -v: @ast::expr) -> @ast::stmt;
fn lit_str(span: span, s: @~str) -> @ast::expr;
fn lit_uint(span: span, i: uint) -> @ast::expr;
fn lambda(blk: ast::blk) -> @ast::expr;
fn clone_folder() -> fold::ast_fold;
fn clone(v: @ast::expr) -> @ast::expr;
fn clone_ty(v: @ast::ty) -> @ast::ty;
fn clone_ty_param(v: ast::ty_param) -> ast::ty_param;
fn at(span: span, expr: @ast::expr) -> @ast::expr;
}
impl helpers of ext_ctxt_helpers for ext_ctxt {
fn helper_path(base_path: @ast::path,
helper_name: ~str) -> @ast::path {
let head = vec::init(base_path.idents);

View File

@ -28,18 +28,29 @@ fn path(id: ident) -> @ast::path {
types: ~[]}
}
impl methods for ident {
trait path_concat {
fn +(id: ident) -> @ast::path;
}
impl methods of path_concat for ident {
fn +(id: ident) -> @ast::path {
path(self) + id
}
}
impl methods for @ast::path {
impl methods of path_concat for @ast::path {
fn +(id: ident) -> @ast::path {
@{idents: vec::append_one(self.idents, id)
with *self}
}
}
trait append_types {
fn add_ty(ty: @ast::ty) -> @ast::path;
fn add_tys(+tys: ~[@ast::ty]) -> @ast::path;
}
impl methods of append_types for @ast::path {
fn add_ty(ty: @ast::ty) -> @ast::path {
@{types: vec::append_one(self.types, ty)
with *self}
@ -51,7 +62,38 @@ impl methods for @ast::path {
}
}
impl ast_builder for ext_ctxt {
trait ext_ctxt_ast_builder {
fn ty_param(id: ast::ident, +bounds: ~[ast::ty_param_bound])
-> ast::ty_param;
fn arg(name: ident, ty: @ast::ty) -> ast::arg;
fn arg_mode(name: ident, ty: @ast::ty, mode: ast::rmode) -> ast::arg;
fn expr_block(e: @ast::expr) -> ast::blk;
fn fn_decl(+inputs: ~[ast::arg], output: @ast::ty) -> ast::fn_decl;
fn item(name: ident, +node: ast::item_) -> @ast::item;
fn item_fn_poly(name: ident,
+inputs: ~[ast::arg],
output: @ast::ty,
+ty_params: ~[ast::ty_param],
+body: ast::blk) -> @ast::item;
fn item_fn(name: ident,
+inputs: ~[ast::arg],
output: @ast::ty,
+body: ast::blk) -> @ast::item;
fn item_enum_poly(name: ident,
+variants: ~[ast::variant],
+ty_params: ~[ast::ty_param]) -> @ast::item;
fn item_enum(name: ident, +variants: ~[ast::variant]) -> @ast::item;
fn variant(name: ident, +tys: ~[@ast::ty]) -> ast::variant;
fn item_mod(name: ident, +items: ~[@ast::item]) -> @ast::item;
fn ty_path_ast_builder(path: @ast::path) -> @ast::ty;
fn item_ty_poly(name: ident,
ty: @ast::ty,
+params: ~[ast::ty_param]) -> @ast::item;
fn item_ty(name: ident, ty: @ast::ty) -> @ast::item;
fn ty_vars(+ty_params: ~[ast::ty_param]) -> ~[@ast::ty];
}
impl ast_builder of ext_ctxt_ast_builder for ext_ctxt {
fn ty_param(id: ast::ident, +bounds: ~[ast::ty_param_bound])
-> ast::ty_param
{
@ -153,7 +195,7 @@ impl ast_builder for ext_ctxt {
items: items}))
}
fn ty_path(path: @ast::path) -> @ast::ty {
fn ty_path_ast_builder(path: @ast::path) -> @ast::ty {
// FIXME #2886: make sure the node ids are legal.
@{id: self.next_id(),
node: ast::ty_path(path, self.next_id()),
@ -177,6 +219,6 @@ impl ast_builder for ext_ctxt {
}
fn ty_vars(+ty_params: ~[ast::ty_param]) -> ~[@ast::ty] {
ty_params.map(|p| self.ty_path(path(p.ident)))
ty_params.map(|p| self.ty_path_ast_builder(path(p.ident)))
}
}

View File

@ -6,7 +6,12 @@ import parse::token;
import pipec::*;
impl proto_parser for parser {
trait proto_parser {
fn parse_proto(id: ident) -> protocol;
fn parse_state(proto: protocol);
}
impl proto_parser of proto_parser for parser {
fn parse_proto(id: ident) -> protocol {
let proto = protocol(id);

View File

@ -15,9 +15,9 @@ import pprust::{item_to_str, ty_to_str};
import ext::base::{mk_ctxt, ext_ctxt};
import parse;
import parse::*;
import proto::*;
import ast_builder::append_types;
import ast_builder::ast_builder;
import ast_builder::methods;
import ast_builder::path;
@ -38,7 +38,7 @@ impl compile for message {
let args_ast = vec::append(
~[cx.arg_mode(@~"pipe",
cx.ty_path(path(this.data_name())
cx.ty_path_ast_builder(path(this.data_name())
.add_tys(cx.ty_vars(this.ty_params))),
ast::by_copy)],
args_ast);
@ -64,7 +64,7 @@ impl compile for message {
cx.item_fn_poly(self.name(),
args_ast,
cx.ty_path(path(next.data_name())
cx.ty_path_ast_builder(path(next.data_name())
.add_tys(next_tys)),
self.get_params(),
cx.expr_block(body))
@ -110,6 +110,11 @@ impl compile for message {
}
}
}
fn to_ty(cx: ext_ctxt) -> @ast::ty {
cx.ty_path_ast_builder(path(self.name)
.add_tys(cx.ty_vars(self.ty_params)))
}
}
impl compile for state {
@ -169,9 +174,9 @@ impl compile for state {
vec::push(items,
cx.item_ty_poly(
self.data_name(),
cx.ty_path(
cx.ty_path_ast_builder(
(@~"pipes" + @(dir.to_str() + ~"_packet"))
.add_ty(cx.ty_path(
.add_ty(cx.ty_path_ast_builder(
(self.proto.name + self.data_name())
.add_tys(cx.ty_vars(self.ty_params))))),
self.ty_params));
@ -266,7 +271,12 @@ impl of to_source for @ast::expr {
}
}
impl parse_utils for ext_ctxt {
trait ext_ctxt_parse_utils {
fn parse_item(s: ~str) -> @ast::item;
fn parse_expr(s: ~str) -> @ast::expr;
}
impl parse_utils of ext_ctxt_parse_utils for ext_ctxt {
fn parse_item(s: ~str) -> @ast::item {
let res = parse::parse_item_from_source_str(
~"***protocol expansion***",
@ -292,3 +302,20 @@ impl parse_utils for ext_ctxt {
self.parse_sess())
}
}
trait two_vector_utils<A, B> {
fn zip() -> ~[(A, B)];
fn map<C>(f: fn(A, B) -> C) -> ~[C];
}
impl methods<A: copy, B: copy> of two_vector_utils<A, B> for (~[A], ~[B]) {
fn zip() -> ~[(A, B)] {
let (a, b) = self;
vec::zip(a, b)
}
fn map<C>(f: fn(A, B) -> C) -> ~[C] {
let (a, b) = self;
vec::map2(a, b, f)
}
}

View File

@ -60,6 +60,7 @@ fn count_names(ms: &[matcher]) -> uint {
}})
}
#[warn(no_non_implicitly_copyable_typarams)]
fn new_matcher_pos(ms: ~[matcher], sep: option<token>, lo: uint)
-> matcher_pos {
~{elts: ms, sep: sep, mut idx: 0u, mut up: matcher_pos_up(none),

View File

@ -9,7 +9,21 @@ export parser_attr;
// extensions, which both begin with token.POUND
type attr_or_ext = option<either<~[ast::attribute], @ast::expr>>;
impl parser_attr for parser {
trait parser_attr {
fn parse_outer_attrs_or_ext(first_item_attrs: ~[ast::attribute])
-> attr_or_ext;
fn parse_outer_attributes() -> ~[ast::attribute];
fn parse_attribute(style: ast::attr_style) -> ast::attribute;
fn parse_attribute_naked(style: ast::attr_style, lo: uint) ->
ast::attribute;
fn parse_inner_attrs_and_next() ->
{inner: ~[ast::attribute], next: ~[ast::attribute]};
fn parse_meta_item() -> @ast::meta_item;
fn parse_meta_seq() -> ~[@ast::meta_item];
fn parse_optional_meta() -> ~[@ast::meta_item];
}
impl parser_attr of parser_attr for parser {
fn parse_outer_attrs_or_ext(first_item_attrs: ~[ast::attribute])
-> attr_or_ext

View File

@ -22,9 +22,44 @@ fn token_to_str(reader: reader, ++token: token::token) -> ~str {
token::to_str(*reader.interner(), token)
}
// This should be done with traits, once traits work
impl parser_common for parser {
trait parser_common {
fn unexpected_last(t: token::token) -> !;
fn unexpected() -> !;
fn expect(t: token::token);
fn parse_ident() -> ast::ident;
fn parse_path_list_ident() -> ast::path_list_ident;
fn parse_value_ident() -> ast::ident;
fn eat(tok: token::token) -> bool;
// A sanity check that the word we are asking for is a known keyword
fn require_keyword(word: ~str);
fn token_is_keyword(word: ~str, ++tok: token::token) -> bool;
fn is_keyword(word: ~str) -> bool;
fn is_any_keyword(tok: token::token) -> bool;
fn eat_keyword(word: ~str) -> bool;
fn expect_keyword(word: ~str);
fn is_restricted_keyword(word: ~str) -> bool;
fn check_restricted_keywords();
fn check_restricted_keywords_(w: ~str);
fn expect_gt();
fn parse_seq_to_before_gt<T: copy>(sep: option<token::token>,
f: fn(parser) -> T) -> ~[T];
fn parse_seq_to_gt<T: copy>(sep: option<token::token>,
f: fn(parser) -> T) -> ~[T];
fn parse_seq_lt_gt<T: copy>(sep: option<token::token>,
f: fn(parser) -> T) -> spanned<~[T]>;
fn parse_seq_to_end<T: copy>(ket: token::token, sep: seq_sep,
f: fn(parser) -> T) -> ~[T];
fn parse_seq_to_before_end<T: copy>(ket: token::token, sep: seq_sep,
f: fn(parser) -> T) -> ~[T];
fn parse_unspanned_seq<T: copy>(bra: token::token,
ket: token::token,
sep: seq_sep,
f: fn(parser) -> T) -> ~[T];
fn parse_seq<T: copy>(bra: token::token, ket: token::token, sep: seq_sep,
f: fn(parser) -> T) -> spanned<~[T]>;
}
impl parser_common of parser_common for parser {
fn unexpected_last(t: token::token) -> ! {
self.span_fatal(
copy self.last_span,

View File

@ -2188,14 +2188,23 @@ class parser {
}
}
fn token_is_pound_or_doc_comment(++tok: token::token) -> bool {
alt tok {
token::POUND | token::DOC_COMMENT(_) { true }
_ { false }
}
}
fn parse_single_class_item(vis: visibility)
-> @class_member {
if self.eat_keyword(~"let") {
if (self.eat_keyword(~"let") ||
self.token_is_keyword(~"mut", copy self.token) ||
!self.is_any_keyword(copy self.token)) &&
!self.token_is_pound_or_doc_comment(self.token) {
let a_var = self.parse_instance_var(vis);
self.expect(token::SEMI);
ret a_var;
}
else {
} else {
let m = self.parse_method(vis);
ret @{node: class_method(m), span: m.span};
}
@ -2510,7 +2519,7 @@ class parser {
self.parse_item_trait()
} else if self.eat_keyword(~"impl") {
self.parse_item_impl()
} else if self.eat_keyword(~"class") {
} else if self.eat_keyword(~"class") || self.eat_keyword(~"struct") {
self.parse_item_class()
} else if !self.is_any_keyword(copy self.token)
&& self.look_ahead(1) == token::NOT

View File

@ -313,8 +313,11 @@ fn restricted_keyword_table() -> hashmap<~str, ()> {
~"if", ~"iface", ~"impl", ~"import",
~"let", ~"log", ~"loop",
~"mod", ~"mut",
~"new", ~"owned",
~"pure", ~"ret",
~"new",
~"owned",
~"pure",
~"ret",
~"struct",
~"true", ~"trait", ~"type",
~"unchecked", ~"unsafe",
~"while"

View File

@ -103,22 +103,22 @@ fn mk_printer(out: io::writer, linewidth: uint) -> printer {
let token: ~[mut token] = vec::to_mut(vec::from_elem(n, EOF));
let size: ~[mut int] = vec::to_mut(vec::from_elem(n, 0));
let scan_stack: ~[mut uint] = vec::to_mut(vec::from_elem(n, 0u));
@{out: out,
buf_len: n,
mut margin: linewidth as int,
mut space: linewidth as int,
mut left: 0u,
mut right: 0u,
token: token,
size: size,
mut left_total: 0,
mut right_total: 0,
mut scan_stack: scan_stack,
mut scan_stack_empty: true,
mut top: 0u,
mut bottom: 0u,
print_stack: dvec(),
mut pending_indentation: 0}
printer_(@{out: out,
buf_len: n,
mut margin: linewidth as int,
mut space: linewidth as int,
mut left: 0u,
mut right: 0u,
token: token,
size: size,
mut left_total: 0,
mut right_total: 0,
mut scan_stack: scan_stack,
mut scan_stack_empty: true,
mut top: 0u,
mut bottom: 0u,
print_stack: dvec(),
mut pending_indentation: 0})
}
@ -199,7 +199,7 @@ fn mk_printer(out: io::writer, linewidth: uint) -> printer {
* the method called 'pretty_print', and the 'PRINT' process is the method
* called 'print'.
*/
type printer = @{
type printer_ = {
out: io::writer,
buf_len: uint,
mut margin: int, // width of lines we're constrained to
@ -226,6 +226,10 @@ type printer = @{
mut pending_indentation: int
};
enum printer {
printer_(@printer_)
}
impl printer for printer {
fn last_token() -> token { self.token[self.right] }
// be very careful with this!

View File

@ -1,6 +1,6 @@
// -*- rust -*-
import metadata::{creader, cstore, filesearch};
import session::session;
import session::{session, session_};
import syntax::parse;
import syntax::{ast, codemap};
import syntax::attr;
@ -168,7 +168,10 @@ fn compile_upto(sess: session, cfg: ast::crate_cfg,
session::sess_os_to_meta_os(sess.targ_cfg.os),
sess.opts.static));
let { def_map: def_map, exp_map: exp_map, impl_map: impl_map } =
let { def_map: def_map,
exp_map: exp_map,
impl_map: impl_map,
trait_map: trait_map } =
time(time_passes, ~"resolution", ||
middle::resolve3::resolve_crate(sess, ast_map, crate));
@ -187,6 +190,7 @@ fn compile_upto(sess: session, cfg: ast::crate_cfg,
let (method_map, vtable_map) = time(time_passes, ~"typechecking", ||
typeck::check_crate(ty_cx,
impl_map,
trait_map,
crate));
time(time_passes, ~"const checking", ||
@ -516,11 +520,12 @@ fn build_session(sopts: @session::options,
build_session_(sopts, codemap, demitter, span_diagnostic_handler)
}
fn build_session_(
sopts: @session::options, cm: codemap::codemap,
demitter: diagnostic::emitter,
span_diagnostic_handler: diagnostic::span_handler
) -> session {
fn build_session_(sopts: @session::options,
cm: codemap::codemap,
demitter: diagnostic::emitter,
span_diagnostic_handler: diagnostic::span_handler)
-> session {
let target_cfg = build_target_config(sopts, demitter);
let cstore = cstore::mk_cstore();
let filesearch = filesearch::mk_filesearch(
@ -528,19 +533,19 @@ fn build_session_(
sopts.target_triple,
sopts.addl_lib_search_paths);
let warning_settings = lint::mk_warning_settings();
@{targ_cfg: target_cfg,
opts: sopts,
cstore: cstore,
parse_sess:
session_(@{targ_cfg: target_cfg,
opts: sopts,
cstore: cstore,
parse_sess:
parse::new_parse_sess_special_handler(span_diagnostic_handler, cm),
codemap: cm,
// For a library crate, this is always none
mut main_fn: none,
span_diagnostic: span_diagnostic_handler,
filesearch: filesearch,
mut building_library: false,
working_dir: os::getcwd(),
warning_settings: warning_settings}
codemap: cm,
// For a library crate, this is always none
mut main_fn: none,
span_diagnostic: span_diagnostic_handler,
filesearch: filesearch,
mut building_library: false,
working_dir: os::getcwd(),
warning_settings: warning_settings})
}
fn parse_pretty(sess: session, &&name: ~str) -> pp_mode {

View File

@ -85,7 +85,7 @@ type options =
type crate_metadata = {name: ~str, data: ~[u8]};
type session = @{targ_cfg: @config,
type session_ = {targ_cfg: @config,
opts: @options,
cstore: metadata::cstore::cstore,
parse_sess: parse_sess,
@ -98,6 +98,10 @@ type session = @{targ_cfg: @config,
working_dir: ~str,
warning_settings: lint::warning_settings};
enum session {
session_(@session_)
}
impl session for session {
fn span_fatal(sp: span, msg: ~str) -> ! {
self.span_diagnostic.span_fatal(sp, msg)

View File

@ -10,6 +10,7 @@ import syntax::diagnostic::span_handler;
import syntax::diagnostic::expect;
import common::*;
import std::map::hashmap;
import dvec::{dvec, extensions};
export class_dtor;
export get_symbol;
@ -23,6 +24,7 @@ export lookup_method_purity;
export get_enum_variants;
export get_impls_for_mod;
export get_trait_methods;
export get_method_names_if_trait;
export each_path;
export get_type;
export get_impl_trait;
@ -140,6 +142,13 @@ fn get_trait_methods(tcx: ty::ctxt, def: ast::def_id) -> @~[ty::method] {
decoder::get_trait_methods(cdata, def.node, tcx)
}
fn get_method_names_if_trait(cstore: cstore::cstore, def: ast::def_id)
-> option<@dvec<@~str>> {
let cdata = cstore::get_crate_data(cstore, def.crate);
ret decoder::get_method_names_if_trait(cdata, def.node);
}
fn get_class_fields(tcx: ty::ctxt, def: ast::def_id) -> ~[ty::field_ty] {
let cstore = tcx.cstore;
let cdata = cstore::get_crate_data(cstore, def.crate);

View File

@ -2,6 +2,7 @@
import std::{ebml, map};
import std::map::{hashmap, str_hash};
import dvec::{dvec, extensions};
import io::writer_util;
import syntax::{ast, ast_util};
import syntax::attr;
@ -37,6 +38,7 @@ export get_crate_hash;
export get_crate_vers;
export get_impls_for_mod;
export get_trait_methods;
export get_method_names_if_trait;
export get_crate_module_paths;
export def_like;
export dl_def;
@ -640,6 +642,23 @@ fn get_trait_methods(cdata: cmd, id: ast::node_id, tcx: ty::ctxt)
@result
}
// If the item in question is a trait, returns its set of methods. Otherwise,
// returns none.
fn get_method_names_if_trait(cdata: cmd, node_id: ast::node_id)
-> option<@dvec<@~str>> {
let item = lookup_item(node_id, cdata.data);
if item_family(item) != 'I' {
ret none;
}
let resulting_method_names = @dvec();
do ebml::tagged_docs(item, tag_item_trait_method) |method| {
(*resulting_method_names).push(item_name(method));
}
ret some(resulting_method_names);
}
// Helper function that gets either fields or methods
fn get_class_members(cdata: cmd, id: ast::node_id,
p: fn(char) -> bool) -> ~[ty::field_ty] {

View File

@ -65,12 +65,16 @@ type decode_ctxt = @{
maps: maps
};
type extended_decode_ctxt = @{
type extended_decode_ctxt_ = {
dcx: decode_ctxt,
from_id_range: ast_util::id_range,
to_id_range: ast_util::id_range
};
enum extended_decode_ctxt {
extended_decode_ctxt_(@extended_decode_ctxt_)
}
iface tr {
fn tr(xcx: extended_decode_ctxt) -> self;
}
@ -112,9 +116,9 @@ fn decode_inlined_item(cdata: cstore::crate_metadata,
let ast_dsr = ebml::ebml_deserializer(ast_doc);
let from_id_range = ast_util::deserialize_id_range(ast_dsr);
let to_id_range = reserve_id_range(dcx.tcx.sess, from_id_range);
let xcx = @{dcx: dcx,
from_id_range: from_id_range,
to_id_range: to_id_range};
let xcx = extended_decode_ctxt_(@{dcx: dcx,
from_id_range: from_id_range,
to_id_range: to_id_range});
let raw_ii = decode_ast(ast_doc);
let ii = renumber_ast(xcx, raw_ii);
ast_map::map_decoded_item(tcx.sess.diagnostic(),
@ -182,13 +186,23 @@ impl of tr for span {
}
}
impl serializer_helpers<S: serializer> for S {
trait def_id_serializer_helpers {
fn emit_def_id(did: ast::def_id);
}
impl serializer_helpers<S: serializer> of def_id_serializer_helpers for S {
fn emit_def_id(did: ast::def_id) {
ast::serialize_def_id(self, did)
}
}
impl deserializer_helpers<D: deserializer> for D {
trait def_id_deserializer_helpers {
fn read_def_id(xcx: extended_decode_ctxt) -> ast::def_id;
}
impl deserializer_helpers<D: deserializer> of def_id_deserializer_helpers
for D {
fn read_def_id(xcx: extended_decode_ctxt) -> ast::def_id {
let did = ast::deserialize_def_id(self);
did.tr(xcx)
@ -370,7 +384,11 @@ fn encode_freevar_entry(ebml_w: ebml::writer, fv: freevar_entry) {
serialize_freevar_entry(ebml_w, fv)
}
impl helper for ebml::ebml_deserializer {
trait ebml_deserializer_helper {
fn read_freevar_entry(xcx: extended_decode_ctxt) -> freevar_entry;
}
impl helper of ebml_deserializer_helper for ebml::ebml_deserializer {
fn read_freevar_entry(xcx: extended_decode_ctxt) -> freevar_entry {
let fv = deserialize_freevar_entry(self);
fv.tr(xcx)
@ -386,7 +404,11 @@ impl of tr for freevar_entry {
// ______________________________________________________________________
// Encoding and decoding of method_map_entry
impl helper for ebml::ebml_deserializer {
trait read_method_map_entry_helper {
fn read_method_map_entry(xcx: extended_decode_ctxt) -> method_map_entry;
}
impl helper of read_method_map_entry_helper for ebml::ebml_deserializer {
fn read_method_map_entry(xcx: extended_decode_ctxt) -> method_map_entry {
let mme = deserialize_method_map_entry(self);
{derefs: mme.derefs, origin: mme.origin.tr(xcx)}
@ -412,7 +434,11 @@ impl of tr for method_origin {
// ______________________________________________________________________
// Encoding and decoding of borrow
impl helper for ebml::ebml_deserializer {
trait read_borrow_helper {
fn read_borrow(xcx: extended_decode_ctxt) -> ty::borrow;
}
impl helper of read_borrow_helper for ebml::ebml_deserializer {
fn read_borrow(xcx: extended_decode_ctxt) -> ty::borrow {
let borrow = ty::deserialize_borrow(self);
{scope_id: xcx.tr_id(borrow.scope_id),
@ -478,7 +504,12 @@ fn encode_vtable_origin(ecx: @e::encode_ctxt,
}
impl helpers for ebml::ebml_deserializer {
trait vtable_deserialization_helpers {
fn read_vtable_res(xcx: extended_decode_ctxt) -> typeck::vtable_res;
fn read_vtable_origin(xcx: extended_decode_ctxt) -> typeck::vtable_origin;
}
impl helpers of vtable_deserialization_helpers for ebml::ebml_deserializer {
fn read_vtable_res(xcx: extended_decode_ctxt) -> typeck::vtable_res {
@self.read_to_vec(|| self.read_vtable_origin(xcx) )
}
@ -530,7 +561,11 @@ impl helpers for ebml::ebml_deserializer {
// ______________________________________________________________________
// Encoding and decoding the side tables
impl helpers for @e::encode_ctxt {
trait get_ty_str_ctxt {
fn ty_str_ctxt() -> @tyencode::ctxt;
}
impl helpers of get_ty_str_ctxt for @e::encode_ctxt {
fn ty_str_ctxt() -> @tyencode::ctxt {
@{diag: self.tcx.sess.diagnostic(),
ds: e::def_to_str,
@ -540,7 +575,14 @@ impl helpers for @e::encode_ctxt {
}
}
impl helpers for ebml::writer {
trait ebml_writer_helpers {
fn emit_ty(ecx: @e::encode_ctxt, ty: ty::t);
fn emit_tys(ecx: @e::encode_ctxt, tys: ~[ty::t]);
fn emit_bounds(ecx: @e::encode_ctxt, bs: ty::param_bounds);
fn emit_tpbt(ecx: @e::encode_ctxt, tpbt: ty::ty_param_bounds_and_ty);
}
impl helpers of ebml_writer_helpers for ebml::writer {
fn emit_ty(ecx: @e::encode_ctxt, ty: ty::t) {
e::write_type(ecx, self, ty)
}
@ -572,7 +614,12 @@ impl helpers for ebml::writer {
}
}
impl writer for ebml::writer {
trait write_tag_and_id {
fn tag(tag_id: c::astencode_tag, f: fn());
fn id(id: ast::node_id);
}
impl writer of write_tag_and_id for ebml::writer {
fn tag(tag_id: c::astencode_tag, f: fn()) {
do self.wr_tag(tag_id as uint) { f() }
}
@ -724,7 +771,13 @@ fn encode_side_tables_for_id(ecx: @e::encode_ctxt,
}
}
impl decoder for ebml::doc {
trait doc_decoder_helpers {
fn as_int() -> int;
fn [](tag: c::astencode_tag) -> ebml::doc;
fn opt_child(tag: c::astencode_tag) -> option<ebml::doc>;
}
impl decoder of doc_decoder_helpers for ebml::doc {
fn as_int() -> int { ebml::doc_as_u64(self) as int }
fn [](tag: c::astencode_tag) -> ebml::doc {
ebml::get_doc(self, tag as uint)
@ -734,7 +787,17 @@ impl decoder for ebml::doc {
}
}
impl decoder for ebml::ebml_deserializer {
trait ebml_deserializer_decoder_helpers {
fn read_ty(xcx: extended_decode_ctxt) -> ty::t;
fn read_tys(xcx: extended_decode_ctxt) -> ~[ty::t];
fn read_bounds(xcx: extended_decode_ctxt) -> @~[ty::param_bound];
fn read_ty_param_bounds_and_ty(xcx: extended_decode_ctxt)
-> ty::ty_param_bounds_and_ty;
}
impl decoder of ebml_deserializer_decoder_helpers
for ebml::ebml_deserializer {
fn read_ty(xcx: extended_decode_ctxt) -> ty::t {
// Note: regions types embed local node ids. In principle, we
// should translate these node ids into the new decode

View File

@ -169,17 +169,18 @@ fn check_crate(tcx: ty::ctxt,
method_map: typeck::method_map,
last_use_map: liveness::last_use_map,
crate: @ast::crate) -> (root_map, mutbl_map) {
let bccx = @{tcx: tcx,
method_map: method_map,
last_use_map: last_use_map,
binding_map: int_hash(),
root_map: root_map(),
mutbl_map: int_hash(),
mut loaned_paths_same: 0,
mut loaned_paths_imm: 0,
mut stable_paths: 0,
mut req_pure_paths: 0,
mut guaranteed_paths: 0};
let bccx = borrowck_ctxt_(@{tcx: tcx,
method_map: method_map,
last_use_map: last_use_map,
binding_map: int_hash(),
root_map: root_map(),
mutbl_map: int_hash(),
mut loaned_paths_same: 0,
mut loaned_paths_imm: 0,
mut stable_paths: 0,
mut req_pure_paths: 0,
mut guaranteed_paths: 0});
let req_maps = gather_loans::gather_loans(bccx, crate);
check_loans::check_loans(bccx, req_maps, crate);
@ -210,7 +211,7 @@ fn check_crate(tcx: ty::ctxt,
// ----------------------------------------------------------------------
// Type definitions
type borrowck_ctxt = @{tcx: ty::ctxt,
type borrowck_ctxt_ = {tcx: ty::ctxt,
method_map: typeck::method_map,
last_use_map: liveness::last_use_map,
binding_map: binding_map,
@ -224,6 +225,10 @@ type borrowck_ctxt = @{tcx: ty::ctxt,
mut req_pure_paths: uint,
mut guaranteed_paths: uint};
enum borrowck_ctxt {
borrowck_ctxt_(@borrowck_ctxt_)
}
// a map mapping id's of expressions of gc'd type (@T, @[], etc) where
// the box needs to be kept live to the id of the scope for which they
// must stay live.
@ -365,7 +370,11 @@ impl of ast_node for @ast::pat {
fn span() -> span { self.span }
}
impl methods for ty::ctxt {
trait get_type_for_node {
fn ty<N: ast_node>(node: N) -> ty::t;
}
impl methods of get_type_for_node for ty::ctxt {
fn ty<N: ast_node>(node: N) -> ty::t {
ty::node_id_to_type(self, node.id())
}

View File

@ -6,17 +6,21 @@ export public_methods;
impl public_methods for borrowck_ctxt {
fn loan(cmt: cmt, mutbl: ast::mutability) -> @dvec<loan> {
let lc = @{bccx: self, loans: @dvec()};
let lc = loan_ctxt_(@{bccx: self, loans: @dvec()});
lc.loan(cmt, mutbl);
ret lc.loans;
}
}
type loan_ctxt = @{
type loan_ctxt_ = {
bccx: borrowck_ctxt,
loans: @dvec<loan>
};
enum loan_ctxt {
loan_ctxt_(@loan_ctxt_)
}
impl loan_methods for loan_ctxt {
fn ok_with_loan_of(cmt: cmt,
mutbl: ast::mutability) {

View File

@ -162,14 +162,17 @@ fn get_warning_settings_level(settings: warning_settings,
// This is kind of unfortunate. It should be somewhere else, or we should use
// a persistent data structure...
fn clone_lint_modes(modes: lint_modes) -> lint_modes {
@{v: copy modes.v}
std::smallintmap::smallintmap_(@{v: copy modes.v})
}
type ctxt = {dict: lint_dict,
curr: lint_modes,
is_default: bool,
sess: session};
type ctxt_ = {dict: lint_dict,
curr: lint_modes,
is_default: bool,
sess: session};
enum ctxt {
ctxt_(ctxt_)
}
impl methods for ctxt {
fn get_level(lint: lint) -> level {
@ -216,9 +219,10 @@ impl methods for ctxt {
// we do multiple unneeded copies of the map
// if many attributes are set, but this shouldn't
// actually be a problem...
new_ctxt = {is_default: false,
curr: clone_lint_modes(new_ctxt.curr)
with new_ctxt};
new_ctxt =
ctxt_({is_default: false,
curr: clone_lint_modes(new_ctxt.curr)
with *new_ctxt});
new_ctxt.set_level(lint, new_level);
}
}
@ -271,10 +275,10 @@ fn build_settings_item(i: @ast::item, &&cx: ctxt, v: visit::vt<ctxt>) {
fn build_settings_crate(sess: session::session, crate: @ast::crate) {
let cx = {dict: get_lint_dict(),
curr: std::smallintmap::mk(),
is_default: true,
sess: sess};
let cx = ctxt_({dict: get_lint_dict(),
curr: std::smallintmap::mk(),
is_default: true,
sess: sess});
// Install defaults.
for cx.dict.each |_k, spec| { cx.set_level(spec.lint, spec.default); }
@ -291,7 +295,7 @@ fn build_settings_crate(sess: session::session, crate: @ast::crate) {
sess.warning_settings.default_settings.insert(k, v);
}
let cx = {is_default: true with cx};
let cx = ctxt_({is_default: true with *cx});
let visit = visit::mk_vt(@{
visit_item: build_settings_item

View File

@ -426,7 +426,7 @@ fn resolve_crate(sess: session, def_map: resolve::def_map, crate: @ast::crate)
type region_paramd_items = hashmap<ast::node_id, ()>;
type dep_map = hashmap<ast::node_id, @dvec<ast::node_id>>;
type determine_rp_ctxt = @{
type determine_rp_ctxt_ = {
sess: session,
ast_map: ast_map::map,
def_map: resolve::def_map,
@ -442,6 +442,10 @@ type determine_rp_ctxt = @{
mut anon_implies_rp: bool
};
enum determine_rp_ctxt {
determine_rp_ctxt_(@determine_rp_ctxt_)
}
impl methods for determine_rp_ctxt {
fn add_rp(id: ast::node_id) {
assert id != 0;
@ -608,14 +612,14 @@ fn determine_rp_in_crate(sess: session,
ast_map: ast_map::map,
def_map: resolve::def_map,
crate: @ast::crate) -> region_paramd_items {
let cx = @{sess: sess,
ast_map: ast_map,
def_map: def_map,
region_paramd_items: int_hash(),
dep_map: int_hash(),
worklist: dvec(),
mut item_id: 0,
mut anon_implies_rp: false};
let cx = determine_rp_ctxt_(@{sess: sess,
ast_map: ast_map,
def_map: def_map,
region_paramd_items: int_hash(),
dep_map: int_hash(),
worklist: dvec(),
mut item_id: 0,
mut anon_implies_rp: false});
// gather up the base set, worklist and dep_map:
let visitor = visit::mk_vt(@{

View File

@ -1,5 +1,6 @@
import driver::session::session;
import metadata::csearch::{each_path, get_impls_for_mod, lookup_defs};
import metadata::csearch::{each_path, get_impls_for_mod};
import metadata::csearch::{get_method_names_if_trait, lookup_defs};
import metadata::cstore::find_use_stmt_cnum;
import metadata::decoder::{def_like, dl_def, dl_field, dl_impl};
import middle::lint::{error, ignore, level, unused_imports, warn};
@ -59,6 +60,9 @@ type ImplScope = @~[@Impl];
type ImplScopes = @list<ImplScope>;
type ImplMap = hashmap<node_id,ImplScopes>;
// Trait method resolution
type TraitMap = @hashmap<node_id,@dvec<def_id>>;
// Export mapping
type Export = { reexp: bool, id: def_id };
type ExportMap = hashmap<node_id, ~[Export]>;
@ -599,6 +603,8 @@ class Resolver {
let unused_import_lint_level: level;
let trait_info: hashmap<def_id,@hashmap<Atom,()>>;
// The number of imports that are currently unresolved.
let mut unresolved_imports: uint;
@ -617,6 +623,9 @@ class Resolver {
// allowed to access private names of any module.
let mut xray_context: XrayFlag;
// The trait that the current context can refer to.
let mut current_trait_ref: option<def_id>;
// The atom for the keyword "self".
let self_atom: Atom;
@ -629,6 +638,7 @@ class Resolver {
let def_map: DefMap;
let impl_map: ImplMap;
let export_map: ExportMap;
let trait_map: TraitMap;
new(session: session, ast_map: ASTMap, crate: @crate) {
self.session = session;
@ -646,12 +656,16 @@ class Resolver {
self.unused_import_lint_level = unused_import_lint_level(session);
self.trait_info = new_def_hash();
self.unresolved_imports = 0u;
self.current_module = (*self.graph_root).get_module();
self.value_ribs = @dvec();
self.type_ribs = @dvec();
self.xray_context = NoXray;
self.current_trait_ref = none;
self.self_atom = (*self.atom_table).intern(@~"self");
self.primitive_type_table = @PrimitiveTypeTable(self.atom_table);
@ -661,6 +675,7 @@ class Resolver {
self.def_map = int_hash();
self.impl_map = int_hash();
self.export_map = int_hash();
self.trait_map = @int_hash();
}
/// The main name resolution procedure.
@ -930,14 +945,34 @@ class Resolver {
visit_item(item, new_parent, visitor);
}
item_trait(*) {
(*name_bindings).define_type(def_ty(local_def(item.id)));
item_trait(_, methods) {
// Add the names of all the methods to the trait info.
let method_names = @atom_hashmap();
for methods.each |method| {
let atom;
alt method {
required(required_method) {
atom = (*self.atom_table).intern
(required_method.ident);
}
provided(provided_method) {
atom = (*self.atom_table).intern
(provided_method.ident);
}
}
(*method_names).insert(atom, ());
}
let def_id = local_def(item.id);
self.trait_info.insert(def_id, method_names);
(*name_bindings).define_type(def_ty(def_id));
visit_item(item, new_parent, visitor);
}
item_mac(*) {
fail ~"item macros unimplemented"
}
item_mac(*) {
fail ~"item macros unimplemented"
}
}
}
@ -1300,6 +1335,34 @@ class Resolver {
def_ty(def_id) {
#debug("(building reduced graph for external \
crate) building type %s", final_ident);
// If this is a trait, add all the method names
// to the trait info.
alt get_method_names_if_trait(self.session.cstore,
def_id) {
none {
// Nothing to do.
}
some(method_names) {
let interned_method_names =
@atom_hashmap();
for method_names.each |method_name| {
#debug("(building reduced graph for \
external crate) ... adding \
trait method '%?'",
method_name);
let atom =
(*self.atom_table).intern
(method_name);
(*interned_method_names).insert(atom,
());
}
self.trait_info.insert
(def_id, interned_method_names);
}
}
(*child_name_bindings).define_type(def);
}
def_class(def_id) {
@ -2724,7 +2787,9 @@ class Resolver {
// Move down in the graph.
alt name {
none { /* Nothing to do. */ }
none {
// Nothing to do.
}
some(name) {
alt orig_module.children.find(name) {
none {
@ -2903,6 +2968,7 @@ class Resolver {
item_impl(type_parameters, interface_reference, self_type,
methods) {
self.resolve_implementation(item.id,
item.span,
type_parameters,
@ -2922,8 +2988,7 @@ class Resolver {
// Create a new rib for the interface-wide type parameters.
do self.with_type_parameter_rib
(HasTypeParameters(&type_parameters, item.id, 0u,
NormalRibKind))
|| {
NormalRibKind)) {
self.resolve_type_parameters(type_parameters, visitor);
@ -2939,8 +3004,7 @@ class Resolver {
(HasTypeParameters(&ty_m.tps,
item.id,
type_parameters.len(),
NormalRibKind))
|| {
NormalRibKind)) {
// Resolve the method-specific type
// parameters.
@ -3318,13 +3382,13 @@ class Resolver {
let borrowed_type_parameters: &~[ty_param] = &type_parameters;
do self.with_type_parameter_rib(HasTypeParameters
(borrowed_type_parameters, id, 0u,
NormalRibKind))
|| {
NormalRibKind)) {
// Resolve the type parameters.
self.resolve_type_parameters(type_parameters, visitor);
// Resolve the interface reference, if necessary.
let original_trait_ref = self.current_trait_ref;
alt interface_reference {
none {
// Nothing to do.
@ -3339,6 +3403,9 @@ class Resolver {
}
some(def) {
self.record_def(interface_reference.ref_id, def);
// Record the current trait reference.
self.current_trait_ref = some(def_id_of_def(def));
}
}
}
@ -3364,6 +3431,9 @@ class Resolver {
NoCaptureClause,
visitor);
}
// Restore the original trait reference.
self.current_trait_ref = original_trait_ref;
}
}
@ -3828,9 +3898,10 @@ class Resolver {
ret ImportNameDefinition(def);
}
none {
fail ~"target for namespace doesn't refer to \
bindings that contain a definition for \
that namespace!";
// This can happen with external impls, due to
// the imperfect way we read the metadata.
ret NoNameDefinition;
}
}
}
@ -4040,6 +4111,11 @@ class Resolver {
self.record_impls_for_expr_if_necessary(expr);
// Then record candidate traits for this expression if it could result
// in the invocation of a method call.
self.record_candidate_traits_for_expr_if_necessary(expr);
// Next, resolve the node.
alt expr.node {
// The interpretation of paths depends on whether the path has
@ -4101,6 +4177,109 @@ class Resolver {
}
}
fn record_candidate_traits_for_expr_if_necessary(expr: @expr) {
alt expr.node {
expr_field(_, ident, _) {
let atom = (*self.atom_table).intern(ident);
let traits = self.search_for_traits_containing_method(atom);
self.trait_map.insert(expr.id, traits);
}
_ {
// Nothing to do.
//
// XXX: Handle more here... operator overloading, placement
// new, etc.
}
}
}
fn search_for_traits_containing_method(name: Atom) -> @dvec<def_id> {
let found_traits = @dvec();
let mut search_module = self.current_module;
loop {
// Look for the current trait.
alt copy self.current_trait_ref {
some(trait_def_id) {
self.add_trait_info_if_containing_method(found_traits,
trait_def_id,
name);
}
none {
// Nothing to do.
}
}
// Look for trait children.
for search_module.children.each |_name, child_name_bindings| {
alt child_name_bindings.def_for_namespace(TypeNS) {
some(def_ty(trait_def_id)) {
self.add_trait_info_if_containing_method(found_traits,
trait_def_id,
name);
}
some(_) | none {
// Continue.
}
}
}
// Look for imports.
for search_module.import_resolutions.each
|_atom, import_resolution| {
alt import_resolution.target_for_namespace(TypeNS) {
none {
// Continue.
}
some(target) {
alt target.bindings.def_for_namespace(TypeNS) {
some(def_ty(trait_def_id)) {
self.add_trait_info_if_containing_method
(found_traits, trait_def_id, name);
}
some(_) | none {
// Continue.
}
}
}
}
}
// Move to the next parent.
alt search_module.parent_link {
NoParentLink {
// Done.
break;
}
ModuleParentLink(parent_module, _) |
BlockParentLink(parent_module, _) {
search_module = parent_module;
}
}
}
ret found_traits;
}
fn add_trait_info_if_containing_method(found_traits: @dvec<def_id>,
trait_def_id: def_id,
name: Atom) {
alt self.trait_info.find(trait_def_id) {
some(trait_info) if trait_info.contains_key(name) {
#debug("(adding trait info if containing method) found trait \
%d:%d for method '%s'",
trait_def_id.crate,
trait_def_id.node,
*(*self.atom_table).atom_to_str(name));
(*found_traits).push(trait_def_id);
}
some(_) | none {
// Continue.
}
}
}
fn record_def(node_id: node_id, def: def) {
#debug("(recording def) recording %? for %?", def, node_id);
self.def_map.insert(node_id, def);
@ -4310,14 +4489,18 @@ class Resolver {
/// Entry point to crate resolution.
fn resolve_crate(session: session, ast_map: ASTMap, crate: @crate)
-> { def_map: DefMap, exp_map: ExportMap, impl_map: ImplMap } {
-> { def_map: DefMap,
exp_map: ExportMap,
impl_map: ImplMap,
trait_map: TraitMap } {
let resolver = @Resolver(session, ast_map, crate);
(*resolver).resolve(resolver);
ret {
def_map: resolver.def_map,
exp_map: resolver.export_map,
impl_map: resolver.impl_map
impl_map: resolver.impl_map,
trait_map: resolver.trait_map
};
}

View File

@ -94,7 +94,11 @@ class icx_popper {
}
}
impl ccx_icx for @crate_ctxt {
trait get_insn_ctxt {
fn insn_ctxt(s: ~str) -> icx_popper;
}
impl ccx_icx of get_insn_ctxt for @crate_ctxt {
fn insn_ctxt(s: ~str) -> icx_popper {
#debug("new insn_ctxt: %s", s);
if self.sess.count_llvm_insns() {
@ -104,13 +108,13 @@ impl ccx_icx for @crate_ctxt {
}
}
impl bcx_icx for block {
impl bcx_icx of get_insn_ctxt for block {
fn insn_ctxt(s: ~str) -> icx_popper {
self.ccx().insn_ctxt(s)
}
}
impl fcx_icx for fn_ctxt {
impl fcx_icx of get_insn_ctxt for fn_ctxt {
fn insn_ctxt(s: ~str) -> icx_popper {
self.ccx.insn_ctxt(s)
}

View File

@ -348,19 +348,23 @@ type scope_info = {
mut landing_pad: option<BasicBlockRef>,
};
impl node_info for @ast::expr {
trait get_node_info {
fn info() -> option<node_info>;
}
impl node_info of get_node_info for @ast::expr {
fn info() -> option<node_info> {
some({id: self.id, span: self.span})
}
}
impl node_info for ast::blk {
impl node_info of get_node_info for ast::blk {
fn info() -> option<node_info> {
some({id: self.node.id, span: self.span})
}
}
impl node_info for option<@ast::expr> {
impl node_info of get_node_info for option<@ast::expr> {
fn info() -> option<node_info> {
self.chain(|s| s.info())
}

View File

@ -165,6 +165,7 @@ export terr_sorts, terr_vec, terr_str, terr_record_size, terr_tuple_size;
export terr_regions_differ, terr_mutability, terr_purity_mismatch;
export terr_proto_mismatch;
export terr_ret_style_mismatch;
export purity_to_str;
// Data types
@ -441,7 +442,11 @@ impl of vid for region_vid {
fn to_str() -> ~str { #fmt["<R%u>", self.to_uint()] }
}
impl of to_str::to_str for purity {
trait purity_to_str {
fn to_str() -> ~str;
}
impl of purity_to_str for purity {
fn to_str() -> ~str {
purity_to_str(self)
}
@ -2359,7 +2364,8 @@ fn type_err_to_str(cx: ctxt, err: type_err) -> ~str {
~" function was expected";
}
terr_purity_mismatch(f1, f2) {
ret #fmt["expected %s fn but found %s fn", f1.to_str(), f2.to_str()];
ret #fmt["expected %s fn but found %s fn",
purity_to_str(f1), purity_to_str(f2)];
}
terr_proto_mismatch(e, a) {
ret #fmt["closure protocol mismatch (%s vs %s)",

View File

@ -154,10 +154,16 @@ type ty_param_substs_and_ty = {substs: ty::substs, ty: ty::t};
type ty_table = hashmap<ast::def_id, ty::t>;
type crate_ctxt = {impl_map: resolve::impl_map,
method_map: method_map,
vtable_map: vtable_map,
tcx: ty::ctxt};
type crate_ctxt_ = {impl_map: resolve::impl_map,
trait_map: resolve3::TraitMap,
method_map: method_map,
vtable_map: vtable_map,
coherence_info: @coherence::CoherenceInfo,
tcx: ty::ctxt};
enum crate_ctxt {
crate_ctxt_(crate_ctxt_)
}
// Functions that write types into the node type table
fn write_ty_to_tcx(tcx: ty::ctxt, node_id: ast::node_id, ty: ty::t) {
@ -284,17 +290,20 @@ fn check_for_main_fn(ccx: @crate_ctxt) {
}
}
fn check_crate(tcx: ty::ctxt, impl_map: resolve::impl_map,
crate: @ast::crate) -> (method_map, vtable_map) {
let ccx = @{impl_map: impl_map,
method_map: std::map::int_hash(),
vtable_map: std::map::int_hash(),
tcx: tcx};
collect::collect_item_types(ccx, crate);
fn check_crate(tcx: ty::ctxt,
impl_map: resolve::impl_map,
trait_map: resolve3::TraitMap,
crate: @ast::crate)
-> (method_map, vtable_map) {
if tcx.sess.coherence() {
coherence::check_coherence(ccx, crate);
}
let ccx = @crate_ctxt_({impl_map: impl_map,
trait_map: trait_map,
method_map: std::map::int_hash(),
vtable_map: std::map::int_hash(),
coherence_info: @coherence::CoherenceInfo(),
tcx: tcx});
collect::collect_item_types(ccx, crate);
coherence::check_coherence(ccx, crate);
check::check_item_types(ccx, crate);
check_for_main_fn(ccx);

View File

@ -76,7 +76,7 @@ import syntax::ast::ty_i;
import typeck::infer::{unify_methods}; // infcx.set()
import typeck::infer::{resolve_type, force_tvar};
type fn_ctxt =
type fn_ctxt_ =
// var_bindings, locals and next_var_id are shared
// with any nested functions that capture the environment
// (and with any functions whose environment is being captured).
@ -111,30 +111,39 @@ type fn_ctxt =
ccx: @crate_ctxt};
enum fn_ctxt {
fn_ctxt_(fn_ctxt_)
}
// Used by check_const and check_enum_variants
fn blank_fn_ctxt(ccx: @crate_ctxt, rty: ty::t,
region_bnd: ast::node_id) -> @fn_ctxt {
// It's kind of a kludge to manufacture a fake function context
// and statement context, but we might as well do write the code only once
@{self_ty: none,
ret_ty: rty,
indirect_ret_ty: none,
purity: ast::pure_fn,
infcx: infer::new_infer_ctxt(ccx.tcx),
locals: int_hash(),
mut region_lb: region_bnd,
mut region_ub: region_bnd,
in_scope_regions: @nil,
node_types: smallintmap::mk(),
node_type_substs: map::int_hash(),
ccx: ccx}
@fn_ctxt_({self_ty: none,
ret_ty: rty,
indirect_ret_ty: none,
purity: ast::pure_fn,
infcx: infer::new_infer_ctxt(ccx.tcx),
locals: int_hash(),
mut region_lb: region_bnd,
mut region_ub: region_bnd,
in_scope_regions: @nil,
node_types: smallintmap::mk(),
node_type_substs: map::int_hash(),
ccx: ccx})
}
// a list of mapping from in-scope-region-names ("isr") to the
// corresponding ty::region
type isr_alist = @list<(ty::bound_region, ty::region)>;
impl methods for isr_alist {
trait get_and_find_region {
fn get(br: ty::bound_region) -> ty::region;
fn find(br: ty::bound_region) -> option<ty::region>;
}
impl methods of get_and_find_region for isr_alist {
fn get(br: ty::bound_region) -> ty::region {
option::get(self.find(br))
}
@ -227,18 +236,18 @@ fn check_fn(ccx: @crate_ctxt,
}
} else { none };
@{self_ty: self_ty,
ret_ty: ret_ty,
indirect_ret_ty: indirect_ret_ty,
purity: purity,
infcx: infcx,
locals: locals,
mut region_lb: body.node.id,
mut region_ub: body.node.id,
in_scope_regions: isr,
node_types: node_types,
node_type_substs: node_type_substs,
ccx: ccx}
@fn_ctxt_({self_ty: self_ty,
ret_ty: ret_ty,
indirect_ret_ty: indirect_ret_ty,
purity: purity,
infcx: infcx,
locals: locals,
mut region_lb: body.node.id,
mut region_ub: body.node.id,
in_scope_regions: isr,
node_types: node_types,
node_type_substs: node_type_substs,
ccx: ccx})
};
gather_locals(fcx, decl, body, arg_tys);
@ -1787,8 +1796,8 @@ fn check_block_no_value(fcx: @fn_ctxt, blk: ast::blk) -> bool {
fn check_block(fcx0: @fn_ctxt, blk: ast::blk) -> bool {
let fcx = alt blk.node.rules {
ast::unchecked_blk { @{purity: ast::impure_fn with *fcx0} }
ast::unsafe_blk { @{purity: ast::unsafe_fn with *fcx0} }
ast::unchecked_blk { @fn_ctxt_({purity: ast::impure_fn with **fcx0}) }
ast::unsafe_blk { @fn_ctxt_({purity: ast::unsafe_fn with **fcx0}) }
ast::default_blk { fcx0 }
};
do fcx.with_region_lb(blk.node.id) {

View File

@ -1,9 +1,12 @@
/* Code to handle method lookups (which can be quite complex) */
import coherence::get_base_type_def_id;
import middle::resolve3::Impl;
import middle::typeck::infer::methods; // next_ty_vars
import syntax::ast::def_id;
import syntax::ast_map;
import syntax::ast_map::node_id_to_str;
import syntax::ast_util::new_def_hash;
import middle::typeck::infer::methods; // next_ty_vars
import dvec::{dvec, extensions};
type candidate = {
@ -55,11 +58,34 @@ class lookup {
// Entrypoint:
fn method() -> option<method_map_entry> {
#debug["method lookup(m_name=%s, self_ty=%s)",
*self.m_name, self.fcx.infcx.ty_to_str(self.self_ty)];
#debug["method lookup(m_name=%s, self_ty=%s, %?)",
*self.m_name, self.fcx.infcx.ty_to_str(self.self_ty),
ty::get(self.self_ty).struct];
// Determine if there are any inherent methods we can call.
let optional_inherent_methods;
alt get_base_type_def_id(self.fcx.infcx,
self.self_expr.span,
self.self_ty) {
none {
optional_inherent_methods = none;
}
some(base_type_def_id) {
#debug("(checking method) found base type");
optional_inherent_methods =
self.fcx.ccx.coherence_info.inherent_methods.find
(base_type_def_id);
if optional_inherent_methods.is_none() {
#debug("(checking method) ... no inherent methods found");
} else {
#debug("(checking method) ... inherent methods found");
}
}
}
loop {
// First, see whether this is an interface-bounded parameter
// First, see whether this is an interface-bounded parameter.
alt ty::get(self.self_ty).struct {
ty::ty_param(n, did) {
self.add_candidates_from_param(n, did);
@ -83,12 +109,20 @@ class lookup {
// would require doing an implicit borrow of the lhs.
self.add_candidates_from_scope(false);
// Look for inherent methods.
self.add_inherent_and_extension_candidates
(optional_inherent_methods, false);
// if we found anything, stop before trying borrows
if self.candidates.len() > 0u { break; }
// now look for impls in scope that might require a borrow
self.add_candidates_from_scope(true);
// Again, look for inherent methods.
self.add_inherent_and_extension_candidates
(optional_inherent_methods, true);
// if we found anything, stop before attempting auto-deref.
if self.candidates.len() > 0u { break; }
@ -296,6 +330,14 @@ class lookup {
}
fn add_candidates_from_scope(use_assignability: bool) {
// If we're using coherence and this is one of the method invocation
// forms it supports, don't use this method; it'll result in lots of
// multiple-methods-in-scope errors.
if self.fcx.ccx.trait_map.contains_key(self.expr.id) {
ret;
}
let impls_vecs = self.fcx.ccx.impl_map.get(self.expr.id);
let mut added_any = false;
@ -303,43 +345,8 @@ class lookup {
for list::each(impls_vecs) |impls| {
for vec::each(*impls) |im| {
// Check whether this impl has a method with the right name.
for im.methods.find(|m| m.ident == self.m_name).each |m| {
// determine the `self` of the impl with fresh
// variables for each parameter:
let {substs: impl_substs, ty: impl_ty} =
impl_self_ty(self.fcx, im.did);
// Depending on our argument, we find potential
// matches either by checking subtypability or
// type assignability. Collect the matches.
let matches = if use_assignability {
self.fcx.can_mk_assignty(
self.self_expr, self.borrow_lb,
self.self_ty, impl_ty)
} else {
self.fcx.can_mk_subty(self.self_ty, impl_ty)
};
#debug["matches = %?", matches];
alt matches {
result::err(_) { /* keep looking */ }
result::ok(_) {
if !self.candidate_impls.contains_key(im.did) {
let fty = self.ty_from_did(m.did);
self.candidates.push(
{self_ty: self.self_ty,
self_substs: impl_substs,
rcvr_ty: impl_ty,
n_tps_m: m.n_tps,
fty: fty,
entry: {derefs: self.derefs,
origin: method_static(m.did)}});
self.candidate_impls.insert(im.did, ());
added_any = true;
}
}
}
if self.add_candidates_from_impl(im, use_assignability) {
added_any = true;
}
}
@ -349,6 +356,53 @@ class lookup {
}
}
// Returns true if any were added and false otherwise.
fn add_candidates_from_impl(im: @resolve3::Impl,
use_assignability: bool) -> bool {
let mut added_any = false;
// Check whether this impl has a method with the right name.
for im.methods.find(|m| m.ident == self.m_name).each |m| {
// determine the `self` of the impl with fresh
// variables for each parameter:
let {substs: impl_substs, ty: impl_ty} =
impl_self_ty(self.fcx, im.did);
// Depending on our argument, we find potential
// matches either by checking subtypability or
// type assignability. Collect the matches.
let matches = if use_assignability {
self.fcx.can_mk_assignty(self.self_expr, self.borrow_lb,
self.self_ty, impl_ty)
} else {
self.fcx.can_mk_subty(self.self_ty, impl_ty)
};
#debug["matches = %?", matches];
alt matches {
result::err(_) { /* keep looking */ }
result::ok(_) {
if !self.candidate_impls.contains_key(im.did) {
let fty = self.ty_from_did(m.did);
self.candidates.push(
{self_ty: self.self_ty,
self_substs: impl_substs,
rcvr_ty: impl_ty,
n_tps_m: m.n_tps,
fty: fty,
entry: {derefs: self.derefs,
origin: method_static(m.did)}});
self.candidate_impls.insert(im.did, ());
added_any = true;
}
}
}
}
ret added_any;
}
fn add_candidates_from_m(self_substs: ty::substs,
m: ty::method,
origin: method_origin) {
@ -367,6 +421,58 @@ class lookup {
entry: {derefs: self.derefs, origin: origin}});
}
fn add_inherent_and_extension_candidates(optional_inherent_methods:
option<@dvec<@Impl>>,
use_assignability: bool) {
// Add inherent methods.
alt optional_inherent_methods {
none {
// Continue.
}
some(inherent_methods) {
#debug("(adding inherent and extension candidates) adding \
inherent candidates");
for inherent_methods.each |implementation| {
#debug("(adding inherent and extension candidates) \
adding candidates from impl: %s",
node_id_to_str(self.tcx().items,
implementation.did.node));
self.add_candidates_from_impl(implementation,
use_assignability);
}
}
}
// Add trait methods.
alt self.fcx.ccx.trait_map.find(self.expr.id) {
none {
// XXX: This particular operation is not yet trait-ified;
// leave it alone for now.
}
some(trait_ids) {
for (*trait_ids).each |trait_id| {
#debug("(adding inherent and extension candidates) \
trying trait: %s",
node_id_to_str(self.tcx().items, trait_id.node));
let coherence_info = self.fcx.ccx.coherence_info;
alt coherence_info.extension_methods.find(trait_id) {
none {
// Do nothing.
}
some(extension_methods) {
for extension_methods.each |implementation| {
self.add_candidates_from_impl
(implementation, use_assignability);
}
}
}
}
}
}
}
fn write_mty_from_candidate(cand: candidate) -> method_map_entry {
let tcx = self.fcx.ccx.tcx;

View File

@ -4,40 +4,121 @@
// has at most one implementation for each type. Then we build a mapping from
// each trait in the system to its implementations.
import middle::ty::{get, t, ty_box, ty_uniq, ty_ptr, ty_rptr, ty_enum};
import metadata::csearch::{each_path, get_impl_trait, get_impls_for_mod};
import metadata::cstore::{cstore, iter_crate_data};
import metadata::decoder::{dl_def, dl_field, dl_impl};
import middle::resolve3::Impl;
import middle::ty::{get, lookup_item_type, subst, t, ty_box};
import middle::ty::{ty_uniq, ty_ptr, ty_rptr, ty_enum};
import middle::ty::{ty_class, ty_nil, ty_bot, ty_bool, ty_int, ty_uint};
import middle::ty::{ty_float, ty_estr, ty_evec, ty_rec};
import middle::ty::{ty_fn, ty_trait, ty_tup, ty_var, ty_var_integral};
import middle::ty::{ty_param, ty_self, ty_type, ty_opaque_box};
import middle::ty::{ty_opaque_closure_ptr, ty_unboxed_vec, new_ty_hash};
import middle::ty::{subst};
import middle::typeck::infer::{infer_ctxt, mk_subty, new_infer_ctxt};
import syntax::ast::{crate, def_id, item, item_class, item_const, item_enum};
import syntax::ast::{item_fn, item_foreign_mod, item_impl, item_mac};
import syntax::ast::{item_mod, item_trait, item_ty, local_crate, method};
import syntax::ast::{node_id, trait_ref};
import syntax::ast_util::{def_id_of_def, new_def_hash};
import middle::ty::{ty_opaque_closure_ptr, ty_unboxed_vec, type_is_var};
import middle::typeck::infer::{infer_ctxt, mk_subty};
import middle::typeck::infer::{new_infer_ctxt, resolve_ivar, resolve_type};
import syntax::ast::{crate, def_id, def_mod, item, item_class, item_const};
import syntax::ast::{item_enum, item_fn, item_foreign_mod, item_impl};
import syntax::ast::{item_mac, item_mod, item_trait, item_ty, local_crate};
import syntax::ast::{method, node_id, region_param, rp_none, rp_self};
import syntax::ast::{trait_ref};
import syntax::ast_map::node_item;
import syntax::ast_util::{def_id_of_def, dummy_sp, new_def_hash};
import syntax::codemap::span;
import syntax::visit::{default_simple_visitor, default_visitor};
import syntax::visit::{mk_simple_visitor, mk_vt, visit_crate, visit_item};
import syntax::visit::{visit_mod};
import util::ppaux::ty_to_str;
import dvec::{dvec, extensions};
import result::{extensions};
import result::{extensions, ok};
import std::map::{hashmap, int_hash};
import uint::range;
import vec::{len, push};
fn get_base_type(inference_context: infer_ctxt, span: span, original_type: t)
-> option<t> {
let resolved_type;
alt resolve_type(inference_context,
original_type,
resolve_ivar) {
ok(resulting_type) if !type_is_var(resulting_type) {
resolved_type = resulting_type;
}
_ {
inference_context.tcx.sess.span_fatal(span,
~"the type of this value \
must be known in order \
to determine the base \
type");
}
}
alt get(resolved_type).struct {
ty_box(base_mutability_and_type) |
ty_uniq(base_mutability_and_type) |
ty_ptr(base_mutability_and_type) |
ty_rptr(_, base_mutability_and_type) {
#debug("(getting base type) recurring");
get_base_type(inference_context, span,
base_mutability_and_type.ty)
}
ty_enum(*) | ty_trait(*) | ty_class(*) {
#debug("(getting base type) found base type");
some(resolved_type)
}
ty_nil | ty_bot | ty_bool | ty_int(*) | ty_uint(*) | ty_float(*) |
ty_estr(*) | ty_evec(*) | ty_rec(*) |
ty_fn(*) | ty_tup(*) | ty_var(*) | ty_var_integral(*) |
ty_param(*) | ty_self | ty_type | ty_opaque_box |
ty_opaque_closure_ptr(*) | ty_unboxed_vec(*) {
#debug("(getting base type) no base type; found %?",
get(original_type).struct);
none
}
}
}
// Returns the def ID of the base type, if there is one.
fn get_base_type_def_id(inference_context: infer_ctxt,
span: span,
original_type: t)
-> option<def_id> {
alt get_base_type(inference_context, span, original_type) {
none {
ret none;
}
some(base_type) {
alt get(base_type).struct {
ty_enum(def_id, _) |
ty_class(def_id, _) |
ty_trait(def_id, _) {
ret some(def_id);
}
_ {
fail ~"get_base_type() returned a type that wasn't an \
enum, class, or trait";
}
}
}
}
}
class CoherenceInfo {
// Contains implementations of methods that are inherent to a type.
// Methods in these implementations don't need to be exported.
let inherent_methods: hashmap<t,@dvec<@item>>;
let inherent_methods: hashmap<def_id,@dvec<@Impl>>;
// Contains implementations of methods associated with a trait. For these,
// the associated trait must be imported at the call site.
let extension_methods: hashmap<def_id,@dvec<@item>>;
let extension_methods: hashmap<def_id,@dvec<@Impl>>;
new() {
self.inherent_methods = new_ty_hash();
self.inherent_methods = new_def_hash();
self.extension_methods = new_def_hash();
}
}
@ -45,11 +126,10 @@ class CoherenceInfo {
class CoherenceChecker {
let crate_context: @crate_ctxt;
let inference_context: infer_ctxt;
let info: @CoherenceInfo;
// A mapping from implementations to the corresponding base type
// definition ID.
let base_type_def_ids: hashmap<node_id,def_id>;
let base_type_def_ids: hashmap<def_id,def_id>;
// A set of implementations in privileged scopes; i.e. those
// implementations that are defined in the same scope as their base types.
@ -62,9 +142,8 @@ class CoherenceChecker {
new(crate_context: @crate_ctxt) {
self.crate_context = crate_context;
self.inference_context = new_infer_ctxt(crate_context.tcx);
self.info = @CoherenceInfo();
self.base_type_def_ids = int_hash();
self.base_type_def_ids = new_def_hash();
self.privileged_implementations = int_hash();
self.privileged_types = new_def_hash();
}
@ -88,12 +167,20 @@ class CoherenceChecker {
}));
// Check trait coherence.
for self.info.extension_methods.each |def_id, items| {
for self.crate_context.coherence_info.extension_methods.each
|def_id, items| {
self.check_implementation_coherence(def_id, items);
}
// Check whether traits with base types are in privileged scopes.
self.check_privileged_scopes(crate);
// Bring in external crates. It's fine for this to happen after the
// coherence checks, because we ensure by construction that no errors
// can happen at link time.
self.add_external_crates();
}
fn check_implementation(item: @item,
@ -102,111 +189,89 @@ class CoherenceChecker {
let self_type = self.crate_context.tcx.tcache.get(local_def(item.id));
alt optional_associated_trait {
none {
alt self.get_base_type(self_type.ty) {
alt get_base_type_def_id(self.inference_context,
item.span,
self_type.ty) {
none {
let session = self.crate_context.tcx.sess;
session.span_warn(item.span,
~"no base type found for inherent \
implementation; implement a trait \
instead");
session.span_err(item.span,
~"no base type found for inherent \
implementation; implement a \
trait instead");
}
some(base_type) {
let implementation_list;
alt self.info.inherent_methods.find(base_type) {
none {
implementation_list = @dvec();
}
some(existing_implementation_list) {
implementation_list =
existing_implementation_list;
}
}
implementation_list.push(item);
some(_) {
// Nothing to do.
}
}
}
some(associated_trait) {
let def =
self.crate_context.tcx.def_map.get(associated_trait.ref_id);
let def_id = def_id_of_def(def);
let implementation_list;
alt self.info.extension_methods.find(def_id) {
none {
implementation_list = @dvec();
}
some(existing_implementation_list) {
implementation_list = existing_implementation_list;
}
}
implementation_list.push(item);
let def = self.crate_context.tcx.def_map.get
(associated_trait.ref_id);
let implementation = self.create_impl_from_item(item);
self.add_trait_method(def_id_of_def(def), implementation);
}
}
// Add the implementation to the mapping from implementation to base
// type def ID, if there is a base type for this implementation.
alt self.get_base_type_def_id(self_type.ty) {
alt get_base_type_def_id(self.inference_context,
item.span,
self_type.ty) {
none {
// Nothing to do.
}
some(base_type_def_id) {
self.base_type_def_ids.insert(item.id, base_type_def_id);
let implementation = self.create_impl_from_item(item);
self.add_inherent_method(base_type_def_id, implementation);
self.base_type_def_ids.insert(local_def(item.id),
base_type_def_id);
}
}
}
fn get_base_type(original_type: t) -> option<t> {
alt get(original_type).struct {
ty_box(base_mutability_and_type) |
ty_uniq(base_mutability_and_type) |
ty_ptr(base_mutability_and_type) |
ty_rptr(_, base_mutability_and_type) {
self.get_base_type(base_mutability_and_type.ty)
}
fn add_inherent_method(base_def_id: def_id, implementation: @Impl) {
let implementation_list;
alt self.crate_context.coherence_info.inherent_methods
.find(base_def_id) {
ty_enum(*) | ty_trait(*) | ty_class(*) {
some(original_type)
}
ty_nil | ty_bot | ty_bool | ty_int(*) | ty_uint(*) | ty_float(*) |
ty_estr(*) | ty_evec(*) | ty_rec(*) |
ty_fn(*) | ty_tup(*) | ty_var(*) | ty_var_integral(*) |
ty_param(*) | ty_self | ty_type | ty_opaque_box |
ty_opaque_closure_ptr(*) | ty_unboxed_vec(*) {
none
}
}
}
// Returns the def ID of the base type.
fn get_base_type_def_id(original_type: t) -> option<def_id> {
alt self.get_base_type(original_type) {
none {
ret none;
implementation_list = @dvec();
self.crate_context.coherence_info.inherent_methods
.insert(base_def_id, implementation_list);
}
some(base_type) {
alt get(base_type).struct {
ty_enum(def_id, _) |
ty_class(def_id, _) |
ty_trait(def_id, _) {
ret some(def_id);
}
_ {
fail ~"get_base_type() returned a type that \
wasn't an enum, class, or trait";
}
}
some(existing_implementation_list) {
implementation_list = existing_implementation_list;
}
}
implementation_list.push(implementation);
}
fn add_trait_method(trait_id: def_id, implementation: @Impl) {
let implementation_list;
alt self.crate_context.coherence_info.extension_methods
.find(trait_id) {
none {
implementation_list = @dvec();
self.crate_context.coherence_info.extension_methods
.insert(trait_id, implementation_list);
}
some(existing_implementation_list) {
implementation_list = existing_implementation_list;
}
}
implementation_list.push(implementation);
}
fn check_implementation_coherence(_trait_def_id: def_id,
implementations: @dvec<@item>) {
implementations: @dvec<@Impl>) {
// Unify pairs of polytypes.
for implementations.eachi |i, implementation_a| {
for range(0, implementations.len()) |i| {
let implementation_a = implementations.get_elt(i);
let polytype_a =
self.get_self_type_for_implementation(implementation_a);
for range(i + 1, implementations.len()) |j| {
@ -216,12 +281,12 @@ class CoherenceChecker {
if self.polytypes_unify(polytype_a, polytype_b) {
let session = self.crate_context.tcx.sess;
session.span_err(implementation_b.span,
session.span_err(self.span_of_impl(implementation_b),
~"conflicting implementations for a \
trait");
session.span_note(
implementation_a.span,
~"note conflicting implementation here");
trait");
session.span_note(self.span_of_impl(implementation_a),
~"note conflicting implementation \
here");
}
}
}
@ -241,7 +306,7 @@ class CoherenceChecker {
// type variables.
fn universally_quantify_polytype(polytype: ty_param_bounds_and_ty) -> t {
let self_region =
if polytype.rp {none}
if !polytype.rp {none}
else {some(self.inference_context.next_region_var_nb())};
let bounds_count = polytype.bounds.len();
@ -257,25 +322,22 @@ class CoherenceChecker {
ret subst(self.crate_context.tcx, substitutions, polytype.ty);
}
fn get_self_type_for_implementation(implementation: @item)
fn get_self_type_for_implementation(implementation: @Impl)
-> ty_param_bounds_and_ty {
alt implementation.node {
item_impl(*) {
let def = local_def(implementation.id);
ret self.crate_context.tcx.tcache.get(def);
}
_ {
self.crate_context.tcx.sess.span_bug(
implementation.span,
~"not an implementation");
}
}
ret self.crate_context.tcx.tcache.get(implementation.did);
}
// Privileged scope checking
fn check_privileged_scopes(crate: @crate) {
// Gather up all privileged types.
let privileged_types =
self.gather_privileged_types(crate.node.module.items);
for privileged_types.each |privileged_type| {
self.privileged_types.insert(privileged_type, ());
}
visit_crate(*crate, (), mk_vt(@{
visit_item: |item, _context, visitor| {
alt item.node {
@ -301,7 +363,7 @@ class CoherenceChecker {
}
}
item_impl(_, optional_trait_ref, _, _) {
alt self.base_type_def_ids.find(item.id) {
alt self.base_type_def_ids.find(local_def(item.id)) {
none {
// Nothing to do.
}
@ -329,19 +391,18 @@ class CoherenceChecker {
let session =
self.crate_context.tcx.sess;
session.span_warn(item.span,
~"cannot \
implement \
inherent \
methods for a \
type outside \
the scope the \
type was \
defined in; \
define and \
implement a \
trait \
instead");
session.span_err(item.span,
~"cannot \
implement \
inherent \
methods for a \
type outside \
the scope the \
type was \
defined in; \
define and \
implement a \
trait instead");
}
some(trait_ref) {
// This is OK if and only if the
@ -357,13 +418,13 @@ class CoherenceChecker {
if trait_id.crate != local_crate {
let session = self
.crate_context.tcx.sess;
session.span_warn(item.span,
~"cannot \
session.span_err(item.span,
~"cannot \
provide \
an \
extension \
implement\
ation \
implementa\
tion \
for a \
trait not \
defined \
@ -405,6 +466,164 @@ class CoherenceChecker {
ret results;
}
// Converts an implementation in the AST to an Impl structure.
fn create_impl_from_item(item: @item) -> @Impl {
alt item.node {
item_impl(ty_params, _, _, ast_methods) {
let mut methods = ~[];
for ast_methods.each |ast_method| {
push(methods, @{
did: local_def(ast_method.id),
n_tps: ast_method.tps.len(),
ident: ast_method.ident
});
}
ret @{
did: local_def(item.id),
ident: item.ident,
methods: methods
};
}
_ {
self.crate_context.tcx.sess.span_bug(item.span,
~"can't convert a \
non-impl to an impl");
}
}
}
fn span_of_impl(implementation: @Impl) -> span {
assert implementation.did.crate == local_crate;
alt self.crate_context.tcx.items.find(implementation.did.node) {
some(node_item(item, _)) {
ret item.span;
}
_ {
self.crate_context.tcx.sess.bug(~"span_of_impl() called on \
something that wasn't an \
impl!");
}
}
}
// External crate handling
fn add_impls_for_module(impls_seen: hashmap<def_id,()>,
crate_store: cstore,
module_def_id: def_id) {
let implementations = get_impls_for_mod(crate_store,
module_def_id,
none);
for (*implementations).each |implementation| {
// Make sure we don't visit the same implementation
// multiple times.
alt impls_seen.find(implementation.did) {
none {
// Good. Continue.
impls_seen.insert(implementation.did, ());
}
some(_) {
// Skip this one.
again;
}
}
let self_type = lookup_item_type(self.crate_context.tcx,
implementation.did);
let optional_trait =
get_impl_trait(self.crate_context.tcx,
implementation.did);
alt optional_trait {
none {
// This is an inherent method. There should be
// no problems here, but perform a sanity check
// anyway.
alt get_base_type_def_id(self.inference_context,
dummy_sp(),
self_type.ty) {
none {
let session = self.crate_context.tcx.sess;
session.bug(#fmt("no base type for \
external impl with no \
trait: %s (type %s)!",
*implementation.ident,
ty_to_str
(self.crate_context.tcx,
self_type.ty)));
}
some(_) {
// Nothing to do.
}
}
}
some(trait_type) {
alt get(trait_type).struct {
ty_trait(trait_id, _) {
self.add_trait_method(trait_id,
implementation);
}
_ {
self.crate_context.tcx.sess
.bug(~"trait type returned is not a \
trait");
}
}
}
}
// Add the implementation to the mapping from
// implementation to base type def ID, if there is a base
// type for this implementation.
alt get_base_type_def_id(self.inference_context,
dummy_sp(),
self_type.ty) {
none {
// Nothing to do.
}
some(base_type_def_id) {
self.add_inherent_method(base_type_def_id,
implementation);
self.base_type_def_ids.insert(implementation.did,
base_type_def_id);
}
}
}
}
fn add_external_crates() {
let impls_seen = new_def_hash();
let crate_store = self.crate_context.tcx.sess.cstore;
do iter_crate_data(crate_store) |crate_number, _crate_metadata| {
self.add_impls_for_module(impls_seen,
crate_store,
{ crate: crate_number, node: 0 });
for each_path(crate_store, crate_number) |path_entry| {
let module_def_id;
alt path_entry.def_like {
dl_def(def_mod(def_id)) {
module_def_id = def_id;
}
dl_def(_) | dl_impl(_) | dl_field {
// Skip this.
again;
}
}
self.add_impls_for_module(impls_seen,
crate_store,
module_def_id);
}
}
}
}
fn check_coherence(crate_context: @crate_ctxt, crate: @crate) {

View File

@ -437,14 +437,24 @@ fn resolve_borrowings(cx: infer_ctxt) {
}
}
impl methods for ures {
trait then {
fn then<T:copy>(f: fn() -> result<T,ty::type_err>)
-> result<T,ty::type_err>;
}
impl methods of then for ures {
fn then<T:copy>(f: fn() -> result<T,ty::type_err>)
-> result<T,ty::type_err> {
self.chain(|_i| f())
}
}
impl methods<T:copy> for cres<T> {
trait cres_helpers<T> {
fn to_ures() -> ures;
fn compare(t: T, f: fn() -> ty::type_err) -> cres<T>;
}
impl methods<T:copy> of cres_helpers<T> for cres<T> {
fn to_ures() -> ures {
alt self {
ok(_v) { ok(()) }
@ -1097,19 +1107,22 @@ const force_rvar: uint = 0b00100000;
const force_ivar: uint = 0b01000000;
const force_all: uint = 0b01110000;
type resolve_state = @{
type resolve_state_ = {
infcx: infer_ctxt,
modes: uint,
mut err: option<fixup_err>,
mut v_seen: ~[tv_vid]
};
fn resolver(infcx: infer_ctxt, modes: uint)
-> resolve_state {
@{infcx: infcx,
modes: modes,
mut err: none,
mut v_seen: ~[]}
enum resolve_state {
resolve_state_(@resolve_state_)
}
fn resolver(infcx: infer_ctxt, modes: uint) -> resolve_state {
resolve_state_(@{infcx: infcx,
modes: modes,
mut err: none,
mut v_seen: ~[]})
}
impl methods for resolve_state {

View File

@ -6,6 +6,7 @@
of the natural-language documentation for a crate."
)];
import doc::item_utils;
import syntax::ast;
import syntax::ast_map;
import std::map::hashmap;
@ -48,13 +49,13 @@ fn fold_crate(
};
{
topmod: {
topmod: doc::moddoc_({
item: {
name: option::get_default(attrs.name, doc.topmod.name())
with doc.topmod.item
}
with doc.topmod
}
with *doc.topmod
})
}
}

View File

@ -5,6 +5,8 @@
* is interpreted as the brief description.
*/
import doc::item_utils;
export mk_pass;
fn mk_pass() -> pass {

View File

@ -2,10 +2,14 @@
type ast_id = int;
type doc = {
type doc_ = {
pages: ~[page]
};
enum doc {
doc_(doc_)
}
enum page {
cratepage(cratedoc),
itempage(itemtag)
@ -59,12 +63,16 @@ type simpleitemdoc = {
sig: option<~str>
};
type moddoc = {
type moddoc_ = {
item: itemdoc,
items: ~[itemtag],
index: option<index>
};
enum moddoc {
moddoc_(moddoc_)
}
type nmoddoc = {
item: itemdoc,
fns: ~[fndoc],
@ -221,7 +229,18 @@ impl util for moddoc {
}
}
impl util for ~[page] {
trait page_utils {
fn mods() -> ~[moddoc];
fn nmods() -> ~[nmoddoc];
fn fns() -> ~[fndoc];
fn consts() -> ~[constdoc];
fn enums() -> ~[enumdoc];
fn traits() -> ~[traitdoc];
fn impls() -> ~[impldoc];
fn types() -> ~[tydoc];
}
impl util of page_utils for ~[page] {
fn mods() -> ~[moddoc] {
do vec::filter_map(self) |page| {
@ -339,7 +358,16 @@ impl of item for impldoc {
fn item() -> itemdoc { self.item }
}
impl util<A:item> for A {
trait item_utils {
fn id() -> ast_id;
fn name() -> ~str;
fn path() -> ~[~str];
fn brief() -> option<~str>;
fn desc() -> option<~str>;
fn sections() -> ~[section];
}
impl util<A:item> of item_utils for A {
fn id() -> ast_id {
self.item().id
}

View File

@ -1,6 +1,7 @@
//! Converts the Rust AST to the rustdoc document model
import syntax::ast;
import doc::item_utils;
export from_srv, extract;
@ -20,13 +21,13 @@ fn extract(
crate: @ast::crate,
default_name: ~str
) -> doc::doc {
{
doc::doc_({
pages: ~[
doc::cratepage({
topmod: top_moddoc_from_crate(crate, default_name),
})
]
}
})
}
fn top_moddoc_from_crate(
@ -53,7 +54,7 @@ fn moddoc_from_mod(
itemdoc: doc::itemdoc,
module: ast::_mod
) -> doc::moddoc {
{
doc::moddoc_({
item: itemdoc,
items: do vec::filter_map(module.items) |item| {
let itemdoc = mk_itemdoc(item.id, item.ident);
@ -104,7 +105,7 @@ fn moddoc_from_mod(
}
},
index: none
}
})
}
fn nmoddoc_from_mod(

View File

@ -132,7 +132,7 @@ fn default_par_fold<T:send copy>(ctxt: T) -> fold<T> {
}
fn default_seq_fold_doc<T>(fold: fold<T>, doc: doc::doc) -> doc::doc {
{
doc::doc_({
pages: do vec::map(doc.pages) |page| {
alt page {
doc::cratepage(doc) {
@ -143,8 +143,8 @@ fn default_seq_fold_doc<T>(fold: fold<T>, doc: doc::doc) -> doc::doc {
}
}
}
with doc
}
with *doc
})
}
fn default_seq_fold_crate<T>(
@ -167,39 +167,39 @@ fn default_any_fold_mod<T:send copy>(
fold: fold<T>,
doc: doc::moddoc
) -> doc::moddoc {
{
doc::moddoc_({
item: fold.fold_item(fold, doc.item),
items: par::map(doc.items, |itemtag, copy fold| {
fold_itemtag(fold, itemtag)
})
with doc
}
with *doc
})
}
fn default_seq_fold_mod<T>(
fold: fold<T>,
doc: doc::moddoc
) -> doc::moddoc {
{
doc::moddoc_({
item: fold.fold_item(fold, doc.item),
items: vec::map(doc.items, |itemtag| {
fold_itemtag(fold, itemtag)
})
with doc
}
with *doc
})
}
fn default_par_fold_mod<T:send copy>(
fold: fold<T>,
doc: doc::moddoc
) -> doc::moddoc {
{
doc::moddoc_({
item: fold.fold_item(fold, doc.item),
items: par::map(doc.items, |itemtag, copy fold| {
fold_itemtag(fold, itemtag)
})
with doc
}
with *doc
})
}
fn default_any_fold_nmod<T:send copy>(

View File

@ -1,5 +1,7 @@
//! Build indexes as appropriate for the markdown pass
import doc::item_utils;
export mk_pass;
fn mk_pass(config: config::config) -> pass {
@ -31,10 +33,10 @@ fn fold_mod(
let doc = fold::default_any_fold_mod(fold, doc);
{
doc::moddoc_({
index: some(build_mod_index(doc, fold.ctxt))
with doc
}
with *doc
})
}
fn fold_nmod(

View File

@ -1,7 +1,9 @@
//! Generate markdown from a document tree
import doc::item_utils;
import markdown_writer::writer;
import markdown_writer::writer_util;
import markdown_writer::writer_utils;
import markdown_writer::writer_factory;
export mk_pass;
@ -513,20 +515,20 @@ fn should_insert_blank_line_after_fn_signature() {
#[test]
fn should_correctly_indent_fn_signature() {
let doc = test::create_doc(~"fn a() { }");
let doc = {
let doc = doc::doc_({
pages: ~[
doc::cratepage({
topmod: {
topmod: doc::moddoc_({
items: ~[doc::fntag({
sig: some(~"line 1\nline 2")
with doc.cratemod().fns()[0]
})]
with doc.cratemod()
}
with *doc.cratemod()
})
with doc.cratedoc()
})
]
};
});
let markdown = test::write_markdown_str(doc);
assert str::contains(markdown, ~" line 1\n line 2");
}

View File

@ -1,7 +1,10 @@
import doc::item_utils;
export writeinstr;
export writer;
export writer_factory;
export writer_util;
export writer_utils;
export make_writer_factory;
export future_writer_factory;
export make_filename;
@ -14,7 +17,13 @@ enum writeinstr {
type writer = fn~(+writeinstr);
type writer_factory = fn~(page: doc::page) -> writer;
impl writer_util for writer {
trait writer_utils {
fn write_str(str: ~str);
fn write_line(str: ~str);
fn write_done();
}
impl writer_util of writer_utils for writer {
fn write_str(str: ~str) {
self(write(str));
}

View File

@ -5,6 +5,7 @@
* individual modules, pages for the crate, indexes, etc.
*/
import doc::{item_utils, page_utils};
import syntax::ast;
export mk_pass;
@ -51,9 +52,9 @@ fn make_doc_from_pages(page_port: page_port) -> doc::doc {
break;
}
}
{
doc::doc_({
pages: pages
}
})
}
fn find_pages(doc: doc::doc, page_chan: page_chan) {
@ -103,7 +104,7 @@ fn fold_mod(
}
fn strip_mod(doc: doc::moddoc) -> doc::moddoc {
{
doc::moddoc_({
items: do vec::filter(doc.items) |item| {
alt item {
doc::modtag(_) { false }
@ -111,8 +112,8 @@ fn strip_mod(doc: doc::moddoc) -> doc::moddoc {
_ { true }
}
}
with doc
}
with *doc
})
}
fn fold_nmod(

View File

@ -1,5 +1,6 @@
//! Records the full path to items
import doc::item_utils;
import syntax::ast;
export mk_pass;
@ -46,10 +47,10 @@ fn fold_mod(fold: fold::fold<ctxt>, doc: doc::moddoc) -> doc::moddoc {
let doc = fold::default_any_fold_mod(fold, doc);
if !is_topmod { vec::pop(fold.ctxt.path); }
{
doc::moddoc_({
item: fold.fold_item(fold, doc.item)
with doc
}
with *doc
})
}
fn fold_nmod(fold: fold::fold<ctxt>, doc: doc::nmoddoc) -> doc::nmoddoc {

View File

@ -1,5 +1,6 @@
//! Prunes things with the #[doc(hidden)] attribute
import doc::item_utils;
import std::map::hashmap;
export mk_pass;
@ -24,12 +25,12 @@ fn fold_mod(
) -> doc::moddoc {
let doc = fold::default_any_fold_mod(fold, doc);
{
doc::moddoc_({
items: vec::filter(doc.items, |itemtag| {
!is_hidden(fold.ctxt, itemtag.item())
})
with doc
}
with *doc
})
}
fn is_hidden(srv: astsrv::srv, doc: doc::itemdoc) -> bool {

View File

@ -1,5 +1,6 @@
//! Prunes branches of the tree that are not exported
import doc::item_utils;
import syntax::ast;
import syntax::ast_util;
import syntax::ast_map;
@ -24,10 +25,10 @@ fn run(srv: astsrv::srv, doc: doc::doc) -> doc::doc {
fn fold_mod(fold: fold::fold<astsrv::srv>, doc: doc::moddoc) -> doc::moddoc {
let doc = fold::default_any_fold_mod(fold, doc);
{
doc::moddoc_({
items: exported_items(fold.ctxt, doc)
with doc
}
with *doc
})
}
fn exported_items(srv: astsrv::srv, doc: doc::moddoc) -> ~[doc::itemtag] {

View File

@ -1,5 +1,6 @@
//! Finds docs for reexported items and duplicates them
import doc::item_utils;
import std::map;
import std::map::hashmap;
import std::list;
@ -331,10 +332,10 @@ fn merge_reexports(
let new_items = get_new_items(path, fold.ctxt);
#debug("merging into %?: %?", path, new_items);
{
doc::moddoc_({
items: (doc.items + new_items)
with doc
}
with *doc
})
}
fn get_new_items(path: ~[~str], path_map: path_map) -> ~[doc::itemtag] {
@ -352,11 +353,11 @@ fn merge_reexports(
fn reexport_doc(doc: doc::itemtag, name: ~str) -> doc::itemtag {
alt doc {
doc::modtag(doc @ {item, _}) {
doc::modtag({
doc::modtag(doc @ doc::moddoc_({item, _})) {
doc::modtag(doc::moddoc_({
item: reexport(item, name)
with doc
})
with *doc
}))
}
doc::nmodtag(_) { fail }
doc::consttag(doc @ {item, _}) {

View File

@ -1,4 +1,5 @@
// Some utility interfaces
import doc::item_utils;
import doc::item;
import doc::util;
@ -47,39 +48,39 @@ fn test_run_passes() {
_srv: astsrv::srv,
doc: doc::doc
) -> doc::doc {
{
doc::doc_({
pages: ~[
doc::cratepage({
topmod: {
topmod: doc::moddoc_({
item: {
name: doc.cratemod().name() + ~"two"
with doc.cratemod().item
},
items: ~[],
index: none
}
})
})
]
}
})
}
fn pass2(
_srv: astsrv::srv,
doc: doc::doc
) -> doc::doc {
{
doc::doc_({
pages: ~[
doc::cratepage({
topmod: {
topmod: doc::moddoc_({
item: {
name: doc.cratemod().name() + ~"three"
with doc.cratemod().item
},
items: ~[],
index: none
}
})
})
]
}
})
}
let source = ~"";
do astsrv::from_str(source) |srv| {

View File

@ -1,5 +1,7 @@
//! Breaks rustdocs into sections according to their headers
import doc::item_utils;
export mk_pass;
fn mk_pass() -> pass {

View File

@ -1,5 +1,6 @@
//! Sorts items by name
import doc::item_utils;
export mk_pass;
fn mk_pass() -> pass {

View File

@ -1,5 +1,7 @@
//! Sorts items by type
import doc::item_utils;
export mk_pass;
fn mk_pass() -> pass {

View File

@ -1,5 +1,6 @@
//! A general sorting pass
import doc::item_utils;
import std::sort;
export item_lteq, mk_pass;
@ -34,10 +35,10 @@ fn fold_mod(
doc: doc::moddoc
) -> doc::moddoc {
let doc = fold::default_any_fold_mod(fold, doc);
{
doc::moddoc_({
items: sort::merge_sort(fold.ctxt, doc.items)
with doc
}
with *doc
})
}
#[test]

View File

@ -1,5 +1,7 @@
//! Generic pass for performing an operation on all descriptions
import doc::item_utils;
export mk_pass;
fn mk_pass(name: ~str, +op: fn~(~str) -> ~str) -> pass {

View File

@ -5,6 +5,8 @@
* is interpreted as the brief description.
*/
import doc::item_utils;
export mk_pass;
fn mk_pass() -> pass {

View File

@ -1,5 +1,6 @@
//! Pulls type information out of the AST and attaches it to the document
import doc::item_utils;
import syntax::ast;
import syntax::print::pprust;
import syntax::ast_map;

View File

@ -1 +1,4 @@
impl methods1 for uint { fn me() -> uint { self } }
trait me {
fn me() -> uint;
}
impl methods1 of me for uint { fn me() -> uint { self } }

View File

@ -1,6 +1,10 @@
#[link(name="cci_impl_lib", vers="0.0")];
impl helpers for uint {
trait uint_helpers {
fn to(v: uint, f: fn(uint));
}
impl helpers of uint_helpers for uint {
#[inline]
fn to(v: uint, f: fn(uint)) {
let mut i = self;
@ -9,4 +13,4 @@ impl helpers for uint {
i += 1u;
}
}
}
}

View File

@ -2,13 +2,18 @@
export rust;
import name_pool::add;
import name_pool::methods;
mod name_pool {
type name_pool = ();
impl methods for name_pool {
trait add {
fn add(s: ~str);
}
impl methods of add for name_pool {
fn add(s: ~str) {
}
}
@ -16,12 +21,19 @@ mod name_pool {
mod rust {
import name_pool::add;
export add;
export rt;
export methods;
export cx;
type rt = @();
impl methods for rt {
trait cx {
fn cx();
}
impl methods of cx for rt {
fn cx() {
}
}

View File

@ -2,4 +2,12 @@
#[crate_type = "lib"];
type t1 = uint;
impl t2 for ~str { }
trait foo {
fn foo();
}
impl t2 of foo for ~str {
fn foo() {}
}

View File

@ -7,6 +7,7 @@ An implementation of the Graph500 Breadth First Search problem in Rust.
use std;
import std::time;
import std::map;
import std::map::map;
import std::map::hashmap;
import std::deque;
import std::deque::t;

View File

@ -19,7 +19,12 @@ import std::map::hashmap;
type cmplx = {re: f64, im: f64};
type line = {i: uint, b: ~[u8]};
impl arith for cmplx {
trait times_and_plus {
fn *(x: cmplx) -> cmplx;
fn +(x: cmplx) -> cmplx;
}
impl arith of times_and_plus for cmplx {
fn *(x: cmplx) -> cmplx {
{re: self.re*x.re - self.im*x.im, im: self.re*x.im + self.im*x.re}
}

View File

@ -1,3 +0,0 @@
impl methods1 for uint { fn me() -> uint { self } } //~ NOTE candidate #1 is `methods1::me`
impl methods2 for uint { fn me() -> uint { self } } //~ NOTE candidate #2 is `methods2::me`
fn main() { 1u.me(); } //~ ERROR multiple applicable methods in scope

View File

@ -2,6 +2,10 @@
// aux-build:ambig_impl_2_lib.rs
use ambig_impl_2_lib;
import ambig_impl_2_lib::methods1;
impl methods2 for uint { fn me() -> uint { self } } //~ NOTE is `methods2::me`
import ambig_impl_2_lib::me;
trait me {
fn me() -> uint;
}
impl methods2 of me for uint { fn me() -> uint { self } } //~ NOTE is `methods2::me`
fn main() { 1u.me(); } //~ ERROR multiple applicable methods in scope
//~^ NOTE is `ambig_impl_2_lib::methods1::me`

View File

@ -1,12 +1,16 @@
impl methods for ~[uint] {
trait foo {
fn foo() -> int;
}
impl methods of foo for ~[uint] {
fn foo() -> int {1} //~ NOTE candidate #1 is `methods::foo`
}
impl methods for ~[int] {
impl methods of foo for ~[int] {
fn foo() -> int {2} //~ NOTE candidate #2 is `methods::foo`
}
fn main() {
let x = ~[];
x.foo(); //~ ERROR multiple applicable methods in scope
}
}

View File

@ -2,7 +2,11 @@ fn foo<T>() {
1u.bar::<T>(); //~ ERROR: missing `copy`
}
impl methods for uint {
trait bar {
fn bar<T:copy>();
}
impl methods of bar for uint {
fn bar<T:copy>() {
}
}

View File

@ -1,6 +1,11 @@
type point = { x: int, y: int };
impl foo for point {
trait add_and_times {
pure fn +(z: int) -> int;
fn *(z: int) -> int;
}
impl foo of add_and_times for point {
pure fn +(z: int) -> int { self.x + self.y + z }
fn *(z: int) -> int { self.x * self.y * z }
}

View File

@ -1,6 +1,12 @@
type point = { x: int, y: int };
impl foo for point {
trait methods {
fn impurem();
fn blockm(f: fn());
pure fn purem();
}
impl foo of methods for point {
fn impurem() {
}

View File

@ -1,11 +1,9 @@
iface bar { fn dup() -> self; fn blah<X>(); }
impl of bar for int { fn dup() -> int { self } fn blah<X>() {} }
impl of bar for uint { fn dup() -> uint { self } fn blah<X>() {} }
impl of bar for uint { fn dup() -> uint { self } fn blah<X>() {} }
fn main() {
10.dup::<int>(); //~ ERROR does not take type parameters
10.blah::<int, int>(); //~ ERROR incorrect number of type parameters
10u.dup(); //~ ERROR multiple applicable methods
(10 as bar).dup(); //~ ERROR contains a self type
}

View File

@ -3,10 +3,14 @@
// of such a type could ever be constructed.
enum t = @t; //~ ERROR this type cannot be instantiated
trait to_str_2 {
fn to_str() -> ~str;
}
// I use an impl here because it will cause
// the compiler to attempt autoderef and then
// try to resolve the method.
impl methods for t {
impl methods of to_str_2 for t {
fn to_str() -> ~str { ~"t" }
}

View File

@ -1,4 +1,8 @@
impl monad<A> for ~[A] {
trait vec_monad<A> {
fn bind<B>(f: fn(A) -> ~[B]);
}
impl monad<A> of vec_monad<A> for ~[A] {
fn bind<B>(f: fn(A) -> ~[B]) {
let mut r = fail;
for self.each |elt| { r += f(elt); }
@ -8,4 +12,4 @@ impl monad<A> for ~[A] {
}
fn main() {
["hi"].bind({|x| [x] });
}
}

View File

@ -4,7 +4,11 @@ type parser = {
tokens: dvec<int>,
};
impl parser for parser {
trait parse {
fn parse() -> ~[mut int];
}
impl parser of parse for parser {
fn parse() -> ~[mut int] {
dvec::unwrap(self.tokens) //~ ERROR illegal move from self
}

View File

@ -2,7 +2,11 @@ import libc, unsafe;
enum malloc_pool = ();
impl methods for malloc_pool {
trait alloc {
fn alloc(sz: int, align: int) -> *();
}
impl methods of alloc for malloc_pool {
fn alloc(sz: int, align: int) -> *() {
fail;
}

View File

@ -8,11 +8,15 @@ pure fn modify_in_box(sum: @mut {f: int}) {
sum.f = 3; //~ ERROR assigning to mutable field prohibited in pure context
}
impl foo for int {
trait modify_in_box_rec {
pure fn modify_in_box_rec(sum: @{mut f: int});
}
impl foo of modify_in_box_rec for int {
pure fn modify_in_box_rec(sum: @{mut f: int}) {
sum.f = self; //~ ERROR assigning to mutable field prohibited in pure context
}
}
fn main() {
}
}

View File

@ -1,6 +1,13 @@
type point = { x: int, y: int };
impl foo for point {
trait operators {
pure fn +(z: int) -> int;
fn *(z: int) -> int;
fn [](z: int) -> int;
fn unary-() -> int;
}
impl foo of operators for point {
// expr_binary
pure fn +(z: int) -> int { self.x + self.y + z }
fn *(z: int) -> int { self.x * self.y * z }

View File

@ -5,7 +5,12 @@ type a = &int;
type b = @a;
type c = {f: @b};
impl methods for c {
trait set_f {
fn set_f_ok(b: @b/&self);
fn set_f_bad(b: @b);
}
impl methods of set_f for c {
fn set_f_ok(b: @b/&self) {
self.f = b;
}
@ -15,4 +20,4 @@ impl methods for c {
}
}
fn main() {}
fn main() {}

View File

@ -9,7 +9,11 @@ iface foo {
type with_foo = {mut f: foo};
impl methods for with_foo {
trait set_foo_foo {
fn set_foo(f: foo);
}
impl methods of set_foo_foo for with_foo {
fn set_foo(f: foo) {
self.f = f; //~ ERROR mismatched types: expected `foo/&self` but found `foo/&`
}
@ -23,10 +27,14 @@ iface bar {
type with_bar = {mut f: bar};
impl methods for with_bar {
trait set_foo_bar {
fn set_foo(f: bar);
}
impl methods of set_foo_bar for with_bar {
fn set_foo(f: bar) {
self.f = f;
}
}
fn main() {}
fn main() {}

View File

@ -1,4 +1,8 @@
impl methods for uint {
trait double {
fn double() -> uint;
}
impl methods of double for uint {
fn double() -> uint { self * 2u }
}

View File

@ -1,8 +1,12 @@
impl methods for uint {
trait double {
fn double() -> uint;
}
impl methods of double for uint {
fn double() -> uint { self }
}
impl methods for @uint {
impl methods of double for @uint {
fn double() -> uint { *self * 2u }
}

View File

@ -1,4 +1,8 @@
impl methods for @@uint {
trait double {
fn double() -> uint;
}
impl methods of double for @@uint {
fn double() -> uint { **self * 2u }
}

View File

@ -1,4 +1,8 @@
impl methods for uint {
trait double {
fn double() -> uint;
}
impl methods of double for uint {
fn double() -> uint { self * 2u }
}

View File

@ -1,4 +1,8 @@
impl methods for uint {
trait double {
fn double() -> uint;
}
impl methods of double for uint {
fn double() -> uint { self * 2u }
}

View File

@ -2,7 +2,11 @@ enum foo = {mut bar: baz};
enum baz = @{mut baz: int};
impl quuux for foo {
trait frob {
fn frob();
}
impl quuux of frob for foo {
fn frob() {
really_impure(self.bar);
}
@ -13,4 +17,4 @@ fn really_impure(++bar: baz) {
bar.baz = 3;
}
fn main() {}
fn main() {}

View File

@ -3,6 +3,7 @@
use cci_impl_lib;
import cci_impl_lib::helpers;
import cci_impl_lib::uint_helpers;
fn main() {
//let bt0 = sys::frame_address();

View File

@ -9,6 +9,8 @@ use crate_method_reexport_grrrrrrr2;
fn main() {
import crate_method_reexport_grrrrrrr2::rust::methods;
import crate_method_reexport_grrrrrrr2::rust::add;
import crate_method_reexport_grrrrrrr2::rust::cx;
let x = @();
x.cx();
let y = ();

View File

@ -1,6 +1,6 @@
fn main() {
let arr = [1,2,3]/3;
let struct = {a: 13u8, b: arr, c: 42};
let s = sys::log_str(struct);
let struc = {a: 13u8, b: arr, c: 42};
let s = sys::log_str(struc);
assert(s == ~"(13, [1, 2, 3]/3, 42)");
}

Some files were not shown because too many files have changed in this diff Show More