libsyntax: De-export a lot of libsyntax. rs=deëxporting
This commit is contained in:
parent
66b07f1e5d
commit
95b892c8a7
File diff suppressed because it is too large
Load Diff
@ -29,12 +29,12 @@ use std::map::HashMap;
|
||||
use std::map;
|
||||
use std;
|
||||
|
||||
enum path_elt {
|
||||
pub enum path_elt {
|
||||
path_mod(ident),
|
||||
path_name(ident)
|
||||
}
|
||||
|
||||
impl path_elt : cmp::Eq {
|
||||
pub impl path_elt : cmp::Eq {
|
||||
pure fn eq(&self, other: &path_elt) -> bool {
|
||||
match (*self) {
|
||||
path_mod(e0a) => {
|
||||
@ -54,10 +54,10 @@ impl path_elt : cmp::Eq {
|
||||
pure fn ne(&self, other: &path_elt) -> bool { !(*self).eq(other) }
|
||||
}
|
||||
|
||||
type path = ~[path_elt];
|
||||
pub type path = ~[path_elt];
|
||||
|
||||
fn path_to_str_with_sep(p: &[path_elt], sep: ~str, itr: @ident_interner)
|
||||
-> ~str {
|
||||
pub fn path_to_str_with_sep(p: &[path_elt], sep: ~str, itr: @ident_interner)
|
||||
-> ~str {
|
||||
let strs = do p.map |e| {
|
||||
match *e {
|
||||
path_mod(s) => *itr.get(s),
|
||||
@ -67,7 +67,7 @@ fn path_to_str_with_sep(p: &[path_elt], sep: ~str, itr: @ident_interner)
|
||||
str::connect(strs, sep)
|
||||
}
|
||||
|
||||
fn path_ident_to_str(p: path, i: ident, itr: @ident_interner) -> ~str {
|
||||
pub fn path_ident_to_str(p: path, i: ident, itr: @ident_interner) -> ~str {
|
||||
if vec::is_empty(p) {
|
||||
//FIXME /* FIXME (#2543) */ copy *i
|
||||
*itr.get(i)
|
||||
@ -76,18 +76,18 @@ fn path_ident_to_str(p: path, i: ident, itr: @ident_interner) -> ~str {
|
||||
}
|
||||
}
|
||||
|
||||
fn path_to_str(p: &[path_elt], itr: @ident_interner) -> ~str {
|
||||
pub fn path_to_str(p: &[path_elt], itr: @ident_interner) -> ~str {
|
||||
path_to_str_with_sep(p, ~"::", itr)
|
||||
}
|
||||
|
||||
fn path_elt_to_str(pe: path_elt, itr: @ident_interner) -> ~str {
|
||||
pub fn path_elt_to_str(pe: path_elt, itr: @ident_interner) -> ~str {
|
||||
match pe {
|
||||
path_mod(s) => *itr.get(s),
|
||||
path_name(s) => *itr.get(s)
|
||||
}
|
||||
}
|
||||
|
||||
enum ast_node {
|
||||
pub enum ast_node {
|
||||
node_item(@item, @path),
|
||||
node_foreign_item(@foreign_item, foreign_abi, @path),
|
||||
node_trait_method(@trait_method, def_id /* trait did */,
|
||||
@ -107,20 +107,20 @@ enum ast_node {
|
||||
node_struct_ctor(@struct_def, @item, @path),
|
||||
}
|
||||
|
||||
type map = std::map::HashMap<node_id, ast_node>;
|
||||
struct ctx {
|
||||
pub type map = std::map::HashMap<node_id, ast_node>;
|
||||
pub struct ctx {
|
||||
map: map,
|
||||
mut path: path,
|
||||
mut local_id: uint,
|
||||
diag: span_handler,
|
||||
}
|
||||
type vt = visit::vt<ctx>;
|
||||
pub type vt = visit::vt<ctx>;
|
||||
|
||||
fn extend(cx: ctx, +elt: ident) -> @path {
|
||||
pub fn extend(cx: ctx, +elt: ident) -> @path {
|
||||
@(vec::append(cx.path, ~[path_name(elt)]))
|
||||
}
|
||||
|
||||
fn mk_ast_map_visitor() -> vt {
|
||||
pub fn mk_ast_map_visitor() -> vt {
|
||||
return visit::mk_vt(@visit::Visitor {
|
||||
visit_item: map_item,
|
||||
visit_expr: map_expr,
|
||||
@ -134,7 +134,7 @@ fn mk_ast_map_visitor() -> vt {
|
||||
});
|
||||
}
|
||||
|
||||
fn map_crate(diag: span_handler, c: crate) -> map {
|
||||
pub fn map_crate(diag: span_handler, c: crate) -> map {
|
||||
let cx = ctx {
|
||||
map: std::map::HashMap(),
|
||||
mut path: ~[],
|
||||
@ -148,8 +148,8 @@ fn map_crate(diag: span_handler, c: crate) -> map {
|
||||
// Used for items loaded from external crate that are being inlined into this
|
||||
// crate. The `path` should be the path to the item but should not include
|
||||
// the item itself.
|
||||
fn map_decoded_item(diag: span_handler,
|
||||
map: map, path: path, ii: inlined_item) {
|
||||
pub fn map_decoded_item(diag: span_handler,
|
||||
map: map, path: path, ii: inlined_item) {
|
||||
// I believe it is ok for the local IDs of inlined items from other crates
|
||||
// to overlap with the local ids from this crate, so just generate the ids
|
||||
// starting from 0. (In particular, I think these ids are only used in
|
||||
@ -182,8 +182,8 @@ fn map_decoded_item(diag: span_handler,
|
||||
ii.accept(cx, v);
|
||||
}
|
||||
|
||||
fn map_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
|
||||
sp: codemap::span, id: node_id, cx: ctx, v: vt) {
|
||||
pub fn map_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
|
||||
sp: codemap::span, id: node_id, cx: ctx, v: vt) {
|
||||
for decl.inputs.each |a| {
|
||||
cx.map.insert(a.id,
|
||||
node_arg(/* FIXME (#2543) */
|
||||
@ -210,12 +210,12 @@ fn map_fn(fk: visit::fn_kind, decl: fn_decl, body: blk,
|
||||
visit::visit_fn(fk, decl, body, sp, id, cx, v);
|
||||
}
|
||||
|
||||
fn map_block(b: blk, cx: ctx, v: vt) {
|
||||
pub fn map_block(b: blk, cx: ctx, v: vt) {
|
||||
cx.map.insert(b.node.id, node_block(/* FIXME (#2543) */ copy b));
|
||||
visit::visit_block(b, cx, v);
|
||||
}
|
||||
|
||||
fn number_pat(cx: ctx, pat: @pat) {
|
||||
pub fn number_pat(cx: ctx, pat: @pat) {
|
||||
do ast_util::walk_pat(pat) |p| {
|
||||
match p.node {
|
||||
pat_ident(*) => {
|
||||
@ -227,24 +227,24 @@ fn number_pat(cx: ctx, pat: @pat) {
|
||||
};
|
||||
}
|
||||
|
||||
fn map_local(loc: @local, cx: ctx, v: vt) {
|
||||
pub fn map_local(loc: @local, cx: ctx, v: vt) {
|
||||
number_pat(cx, loc.node.pat);
|
||||
visit::visit_local(loc, cx, v);
|
||||
}
|
||||
|
||||
fn map_arm(arm: arm, cx: ctx, v: vt) {
|
||||
pub fn map_arm(arm: arm, cx: ctx, v: vt) {
|
||||
number_pat(cx, arm.pats[0]);
|
||||
visit::visit_arm(arm, cx, v);
|
||||
}
|
||||
|
||||
fn map_method(impl_did: def_id, impl_path: @path,
|
||||
m: @method, cx: ctx) {
|
||||
pub fn map_method(impl_did: def_id, impl_path: @path,
|
||||
m: @method, cx: ctx) {
|
||||
cx.map.insert(m.id, node_method(m, impl_did, impl_path));
|
||||
cx.map.insert(m.self_id, node_local(cx.local_id));
|
||||
cx.local_id += 1u;
|
||||
}
|
||||
|
||||
fn map_item(i: @item, cx: ctx, v: vt) {
|
||||
pub fn map_item(i: @item, cx: ctx, v: vt) {
|
||||
let item_path = @/* FIXME (#2543) */ copy cx.path;
|
||||
cx.map.insert(i.id, node_item(i, item_path));
|
||||
match i.node {
|
||||
@ -306,8 +306,8 @@ fn map_item(i: @item, cx: ctx, v: vt) {
|
||||
cx.path.pop();
|
||||
}
|
||||
|
||||
fn map_struct_def(struct_def: @ast::struct_def, parent_node: ast_node,
|
||||
ident: ast::ident, cx: ctx, _v: vt) {
|
||||
pub fn map_struct_def(struct_def: @ast::struct_def, parent_node: ast_node,
|
||||
ident: ast::ident, cx: ctx, _v: vt) {
|
||||
let p = extend(cx, ident);
|
||||
// If this is a tuple-like struct, register the constructor.
|
||||
match struct_def.ctor_id {
|
||||
@ -324,7 +324,7 @@ fn map_struct_def(struct_def: @ast::struct_def, parent_node: ast_node,
|
||||
}
|
||||
}
|
||||
|
||||
fn map_view_item(vi: @view_item, cx: ctx, _v: vt) {
|
||||
pub fn map_view_item(vi: @view_item, cx: ctx, _v: vt) {
|
||||
match vi.node {
|
||||
view_item_export(vps) => for vps.each |vp| {
|
||||
let (id, name) = match vp.node {
|
||||
@ -341,17 +341,17 @@ fn map_view_item(vi: @view_item, cx: ctx, _v: vt) {
|
||||
}
|
||||
}
|
||||
|
||||
fn map_expr(ex: @expr, cx: ctx, v: vt) {
|
||||
pub fn map_expr(ex: @expr, cx: ctx, v: vt) {
|
||||
cx.map.insert(ex.id, node_expr(ex));
|
||||
visit::visit_expr(ex, cx, v);
|
||||
}
|
||||
|
||||
fn map_stmt(stmt: @stmt, cx: ctx, v: vt) {
|
||||
pub fn map_stmt(stmt: @stmt, cx: ctx, v: vt) {
|
||||
cx.map.insert(stmt_id(*stmt), node_stmt(stmt));
|
||||
visit::visit_stmt(stmt, cx, v);
|
||||
}
|
||||
|
||||
fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str {
|
||||
pub fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str {
|
||||
match map.find(id) {
|
||||
None => {
|
||||
fmt!("unknown node (id=%d)", id)
|
||||
@ -419,7 +419,7 @@ fn node_id_to_str(map: map, id: node_id, itr: @ident_interner) -> ~str {
|
||||
}
|
||||
}
|
||||
|
||||
fn node_item_query<Result>(items: map, id: node_id,
|
||||
pub fn node_item_query<Result>(items: map, id: node_id,
|
||||
query: fn(@item) -> Result,
|
||||
error_msg: ~str) -> Result {
|
||||
match items.find(id) {
|
||||
|
@ -24,43 +24,44 @@ use core::str;
|
||||
use core::to_bytes;
|
||||
use core::vec;
|
||||
|
||||
pure fn spanned<T>(+lo: BytePos, +hi: BytePos, +t: T) -> spanned<T> {
|
||||
pub pure fn spanned<T>(+lo: BytePos, +hi: BytePos, +t: T) -> spanned<T> {
|
||||
respan(mk_sp(lo, hi), move t)
|
||||
}
|
||||
|
||||
pure fn respan<T>(sp: span, +t: T) -> spanned<T> {
|
||||
pub pure fn respan<T>(sp: span, +t: T) -> spanned<T> {
|
||||
spanned {node: t, span: sp}
|
||||
}
|
||||
|
||||
pure fn dummy_spanned<T>(+t: T) -> spanned<T> {
|
||||
pub pure fn dummy_spanned<T>(+t: T) -> spanned<T> {
|
||||
respan(dummy_sp(), move t)
|
||||
}
|
||||
|
||||
/* assuming that we're not in macro expansion */
|
||||
pure fn mk_sp(+lo: BytePos, +hi: BytePos) -> span {
|
||||
pub pure fn mk_sp(+lo: BytePos, +hi: BytePos) -> span {
|
||||
span {lo: lo, hi: hi, expn_info: None}
|
||||
}
|
||||
|
||||
// make this a const, once the compiler supports it
|
||||
pure fn dummy_sp() -> span { return mk_sp(BytePos(0), BytePos(0)); }
|
||||
pub pure fn dummy_sp() -> span { return mk_sp(BytePos(0), BytePos(0)); }
|
||||
|
||||
|
||||
|
||||
pure fn path_name_i(idents: &[ident], intr: @token::ident_interner) -> ~str {
|
||||
pub pure fn path_name_i(idents: &[ident], intr: @token::ident_interner)
|
||||
-> ~str {
|
||||
// FIXME: Bad copies (#2543 -- same for everything else that says "bad")
|
||||
str::connect(idents.map(|i| *intr.get(*i)), ~"::")
|
||||
}
|
||||
|
||||
|
||||
pure fn path_to_ident(p: @path) -> ident { vec::last(p.idents) }
|
||||
pub pure fn path_to_ident(p: @path) -> ident { vec::last(p.idents) }
|
||||
|
||||
pure fn local_def(id: node_id) -> def_id {
|
||||
pub pure fn local_def(id: node_id) -> def_id {
|
||||
ast::def_id { crate: local_crate, node: id }
|
||||
}
|
||||
|
||||
pure fn is_local(did: ast::def_id) -> bool { did.crate == local_crate }
|
||||
pub pure fn is_local(did: ast::def_id) -> bool { did.crate == local_crate }
|
||||
|
||||
pure fn stmt_id(s: stmt) -> node_id {
|
||||
pub pure fn stmt_id(s: stmt) -> node_id {
|
||||
match s.node {
|
||||
stmt_decl(_, id) => id,
|
||||
stmt_expr(_, id) => id,
|
||||
@ -69,7 +70,7 @@ pure fn stmt_id(s: stmt) -> node_id {
|
||||
}
|
||||
}
|
||||
|
||||
fn variant_def_ids(d: def) -> {enm: def_id, var: def_id} {
|
||||
pub fn variant_def_ids(d: def) -> {enm: def_id, var: def_id} {
|
||||
match d {
|
||||
def_variant(enum_id, var_id) => {
|
||||
return {enm: enum_id, var: var_id}
|
||||
@ -78,7 +79,7 @@ fn variant_def_ids(d: def) -> {enm: def_id, var: def_id} {
|
||||
}
|
||||
}
|
||||
|
||||
pure fn def_id_of_def(d: def) -> def_id {
|
||||
pub pure fn def_id_of_def(d: def) -> def_id {
|
||||
match d {
|
||||
def_fn(id, _) | def_static_method(id, _, _) | def_mod(id) |
|
||||
def_foreign_mod(id) | def_const(id) |
|
||||
@ -96,7 +97,7 @@ pure fn def_id_of_def(d: def) -> def_id {
|
||||
}
|
||||
}
|
||||
|
||||
pure fn binop_to_str(op: binop) -> ~str {
|
||||
pub pure fn binop_to_str(op: binop) -> ~str {
|
||||
match op {
|
||||
add => return ~"+",
|
||||
subtract => return ~"-",
|
||||
@ -119,7 +120,7 @@ pure fn binop_to_str(op: binop) -> ~str {
|
||||
}
|
||||
}
|
||||
|
||||
pure fn binop_to_method_name(op: binop) -> Option<~str> {
|
||||
pub pure fn binop_to_method_name(op: binop) -> Option<~str> {
|
||||
match op {
|
||||
add => return Some(~"add"),
|
||||
subtract => return Some(~"sub"),
|
||||
@ -141,7 +142,7 @@ pure fn binop_to_method_name(op: binop) -> Option<~str> {
|
||||
}
|
||||
}
|
||||
|
||||
pure fn lazy_binop(b: binop) -> bool {
|
||||
pub pure fn lazy_binop(b: binop) -> bool {
|
||||
match b {
|
||||
and => true,
|
||||
or => true,
|
||||
@ -149,7 +150,7 @@ pure fn lazy_binop(b: binop) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
pure fn is_shift_binop(b: binop) -> bool {
|
||||
pub pure fn is_shift_binop(b: binop) -> bool {
|
||||
match b {
|
||||
shl => true,
|
||||
shr => true,
|
||||
@ -157,7 +158,7 @@ pure fn is_shift_binop(b: binop) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
pure fn unop_to_str(op: unop) -> ~str {
|
||||
pub pure fn unop_to_str(op: unop) -> ~str {
|
||||
match op {
|
||||
box(mt) => if mt == m_mutbl { ~"@mut " } else { ~"@" },
|
||||
uniq(mt) => if mt == m_mutbl { ~"~mut " } else { ~"~" },
|
||||
@ -167,11 +168,11 @@ pure fn unop_to_str(op: unop) -> ~str {
|
||||
}
|
||||
}
|
||||
|
||||
pure fn is_path(e: @expr) -> bool {
|
||||
pub pure fn is_path(e: @expr) -> bool {
|
||||
return match e.node { expr_path(_) => true, _ => false };
|
||||
}
|
||||
|
||||
pure fn int_ty_to_str(t: int_ty) -> ~str {
|
||||
pub pure fn int_ty_to_str(t: int_ty) -> ~str {
|
||||
match t {
|
||||
ty_char => ~"u8", // ???
|
||||
ty_i => ~"",
|
||||
@ -182,7 +183,7 @@ pure fn int_ty_to_str(t: int_ty) -> ~str {
|
||||
}
|
||||
}
|
||||
|
||||
pure fn int_ty_max(t: int_ty) -> u64 {
|
||||
pub pure fn int_ty_max(t: int_ty) -> u64 {
|
||||
match t {
|
||||
ty_i8 => 0x80u64,
|
||||
ty_i16 => 0x8000u64,
|
||||
@ -191,7 +192,7 @@ pure fn int_ty_max(t: int_ty) -> u64 {
|
||||
}
|
||||
}
|
||||
|
||||
pure fn uint_ty_to_str(t: uint_ty) -> ~str {
|
||||
pub pure fn uint_ty_to_str(t: uint_ty) -> ~str {
|
||||
match t {
|
||||
ty_u => ~"u",
|
||||
ty_u8 => ~"u8",
|
||||
@ -201,7 +202,7 @@ pure fn uint_ty_to_str(t: uint_ty) -> ~str {
|
||||
}
|
||||
}
|
||||
|
||||
pure fn uint_ty_max(t: uint_ty) -> u64 {
|
||||
pub pure fn uint_ty_max(t: uint_ty) -> u64 {
|
||||
match t {
|
||||
ty_u8 => 0xffu64,
|
||||
ty_u16 => 0xffffu64,
|
||||
@ -210,11 +211,11 @@ pure fn uint_ty_max(t: uint_ty) -> u64 {
|
||||
}
|
||||
}
|
||||
|
||||
pure fn float_ty_to_str(t: float_ty) -> ~str {
|
||||
pub pure fn float_ty_to_str(t: float_ty) -> ~str {
|
||||
match t { ty_f => ~"f", ty_f32 => ~"f32", ty_f64 => ~"f64" }
|
||||
}
|
||||
|
||||
fn is_exported(i: ident, m: _mod) -> bool {
|
||||
pub fn is_exported(i: ident, m: _mod) -> bool {
|
||||
let mut local = false;
|
||||
let mut parent_enum : Option<ident> = None;
|
||||
for m.items.each |it| {
|
||||
@ -272,24 +273,24 @@ fn is_exported(i: ident, m: _mod) -> bool {
|
||||
return !has_explicit_exports && local;
|
||||
}
|
||||
|
||||
pure fn is_call_expr(e: @expr) -> bool {
|
||||
pub pure fn is_call_expr(e: @expr) -> bool {
|
||||
match e.node { expr_call(_, _, _) => true, _ => false }
|
||||
}
|
||||
|
||||
// This makes def_id hashable
|
||||
impl def_id : to_bytes::IterBytes {
|
||||
pub impl def_id : to_bytes::IterBytes {
|
||||
#[inline(always)]
|
||||
pure fn iter_bytes(&self, +lsb0: bool, f: to_bytes::Cb) {
|
||||
to_bytes::iter_bytes_2(&self.crate, &self.node, lsb0, f);
|
||||
}
|
||||
}
|
||||
|
||||
fn block_from_expr(e: @expr) -> blk {
|
||||
pub fn block_from_expr(e: @expr) -> blk {
|
||||
let blk_ = default_block(~[], option::Some::<@expr>(e), e.id);
|
||||
return spanned {node: blk_, span: e.span};
|
||||
}
|
||||
|
||||
fn default_block(
|
||||
pub fn default_block(
|
||||
+stmts1: ~[@stmt],
|
||||
expr1: Option<@expr>,
|
||||
id1: node_id
|
||||
@ -303,7 +304,7 @@ fn default_block(
|
||||
}
|
||||
}
|
||||
|
||||
fn ident_to_path(s: span, +i: ident) -> @path {
|
||||
pub fn ident_to_path(s: span, +i: ident) -> @path {
|
||||
@ast::path { span: s,
|
||||
global: false,
|
||||
idents: ~[i],
|
||||
@ -311,24 +312,24 @@ fn ident_to_path(s: span, +i: ident) -> @path {
|
||||
types: ~[] }
|
||||
}
|
||||
|
||||
fn ident_to_pat(id: node_id, s: span, +i: ident) -> @pat {
|
||||
pub fn ident_to_pat(id: node_id, s: span, +i: ident) -> @pat {
|
||||
@ast::pat { id: id,
|
||||
node: pat_ident(bind_by_value, ident_to_path(s, i), None),
|
||||
span: s }
|
||||
}
|
||||
|
||||
pure fn is_unguarded(a: &arm) -> bool {
|
||||
pub pure fn is_unguarded(a: &arm) -> bool {
|
||||
match a.guard {
|
||||
None => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
pure fn unguarded_pat(a: &arm) -> Option<~[@pat]> {
|
||||
pub pure fn unguarded_pat(a: &arm) -> Option<~[@pat]> {
|
||||
if is_unguarded(a) { Some(/* FIXME (#2543) */ copy a.pats) } else { None }
|
||||
}
|
||||
|
||||
fn public_methods(ms: ~[@method]) -> ~[@method] {
|
||||
pub fn public_methods(ms: ~[@method]) -> ~[@method] {
|
||||
do ms.filtered |m| {
|
||||
match m.vis {
|
||||
public => true,
|
||||
@ -339,7 +340,7 @@ fn public_methods(ms: ~[@method]) -> ~[@method] {
|
||||
|
||||
// extract a ty_method from a trait_method. if the trait_method is
|
||||
// a default, pull out the useful fields to make a ty_method
|
||||
fn trait_method_to_ty_method(method: trait_method) -> ty_method {
|
||||
pub fn trait_method_to_ty_method(method: trait_method) -> ty_method {
|
||||
match method {
|
||||
required(ref m) => (*m),
|
||||
provided(m) => {
|
||||
@ -357,7 +358,7 @@ fn trait_method_to_ty_method(method: trait_method) -> ty_method {
|
||||
}
|
||||
}
|
||||
|
||||
fn split_trait_methods(trait_methods: ~[trait_method])
|
||||
pub fn split_trait_methods(trait_methods: ~[trait_method])
|
||||
-> (~[ty_method], ~[@method]) {
|
||||
let mut reqd = ~[], provd = ~[];
|
||||
for trait_methods.each |trt_method| {
|
||||
@ -369,7 +370,7 @@ fn split_trait_methods(trait_methods: ~[trait_method])
|
||||
(reqd, provd)
|
||||
}
|
||||
|
||||
pure fn struct_field_visibility(field: ast::struct_field) -> visibility {
|
||||
pub pure fn struct_field_visibility(field: ast::struct_field) -> visibility {
|
||||
match field.node.kind {
|
||||
ast::named_field(_, _, visibility) => visibility,
|
||||
ast::unnamed_field => ast::public
|
||||
@ -382,7 +383,7 @@ pub trait inlined_item_utils {
|
||||
fn accept<E>(e: E, v: visit::vt<E>);
|
||||
}
|
||||
|
||||
impl inlined_item: inlined_item_utils {
|
||||
pub impl inlined_item: inlined_item_utils {
|
||||
fn ident() -> ident {
|
||||
match self {
|
||||
ii_item(i) => /* FIXME (#2543) */ copy i.ident,
|
||||
@ -415,7 +416,7 @@ impl inlined_item: inlined_item_utils {
|
||||
|
||||
/* True if d is either a def_self, or a chain of def_upvars
|
||||
referring to a def_self */
|
||||
fn is_self(d: ast::def) -> bool {
|
||||
pub fn is_self(d: ast::def) -> bool {
|
||||
match d {
|
||||
def_self(*) => true,
|
||||
def_upvar(_, d, _, _) => is_self(*d),
|
||||
@ -424,7 +425,7 @@ fn is_self(d: ast::def) -> bool {
|
||||
}
|
||||
|
||||
/// Maps a binary operator to its precedence
|
||||
fn operator_prec(op: ast::binop) -> uint {
|
||||
pub fn operator_prec(op: ast::binop) -> uint {
|
||||
match op {
|
||||
mul | div | rem => 12u,
|
||||
// 'as' sits between here with 11
|
||||
@ -440,9 +441,11 @@ fn operator_prec(op: ast::binop) -> uint {
|
||||
}
|
||||
}
|
||||
|
||||
fn dtor_ty() -> @ast::Ty { @ast::Ty {id: 0, node: ty_nil, span: dummy_sp()} }
|
||||
pub fn dtor_ty() -> @ast::Ty {
|
||||
@ast::Ty {id: 0, node: ty_nil, span: dummy_sp()}
|
||||
}
|
||||
|
||||
fn dtor_dec() -> fn_decl {
|
||||
pub fn dtor_dec() -> fn_decl {
|
||||
let nil_t = dtor_ty();
|
||||
// dtor has no args
|
||||
ast::fn_decl {
|
||||
@ -457,16 +460,16 @@ fn dtor_dec() -> fn_decl {
|
||||
|
||||
#[auto_encode]
|
||||
#[auto_decode]
|
||||
struct id_range {
|
||||
pub struct id_range {
|
||||
min: node_id,
|
||||
max: node_id,
|
||||
}
|
||||
|
||||
fn empty(range: id_range) -> bool {
|
||||
pub fn empty(range: id_range) -> bool {
|
||||
range.min >= range.max
|
||||
}
|
||||
|
||||
fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> {
|
||||
pub fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> {
|
||||
visit::mk_simple_visitor(@visit::SimpleVisitor {
|
||||
visit_mod: |_m, _sp, id| vfn(id),
|
||||
|
||||
@ -590,11 +593,11 @@ fn id_visitor(vfn: fn@(node_id)) -> visit::vt<()> {
|
||||
})
|
||||
}
|
||||
|
||||
fn visit_ids_for_inlined_item(item: inlined_item, vfn: fn@(node_id)) {
|
||||
pub fn visit_ids_for_inlined_item(item: inlined_item, vfn: fn@(node_id)) {
|
||||
item.accept((), id_visitor(vfn));
|
||||
}
|
||||
|
||||
fn compute_id_range(visit_ids_fn: fn(fn@(node_id))) -> id_range {
|
||||
pub fn compute_id_range(visit_ids_fn: fn(fn@(node_id))) -> id_range {
|
||||
let min = @mut int::max_value;
|
||||
let max = @mut int::min_value;
|
||||
do visit_ids_fn |id| {
|
||||
@ -604,18 +607,18 @@ fn compute_id_range(visit_ids_fn: fn(fn@(node_id))) -> id_range {
|
||||
id_range { min: *min, max: *max }
|
||||
}
|
||||
|
||||
fn compute_id_range_for_inlined_item(item: inlined_item) -> id_range {
|
||||
pub fn compute_id_range_for_inlined_item(item: inlined_item) -> id_range {
|
||||
compute_id_range(|f| visit_ids_for_inlined_item(item, f))
|
||||
}
|
||||
|
||||
pure fn is_item_impl(item: @ast::item) -> bool {
|
||||
pub pure fn is_item_impl(item: @ast::item) -> bool {
|
||||
match item.node {
|
||||
item_impl(*) => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
fn walk_pat(pat: @pat, it: fn(@pat)) {
|
||||
pub fn walk_pat(pat: @pat, it: fn(@pat)) {
|
||||
it(pat);
|
||||
match pat.node {
|
||||
pat_ident(_, _, Some(p)) => walk_pat(p, it),
|
||||
@ -646,7 +649,7 @@ fn walk_pat(pat: @pat, it: fn(@pat)) {
|
||||
}
|
||||
}
|
||||
|
||||
fn view_path_id(p: @view_path) -> node_id {
|
||||
pub fn view_path_id(p: @view_path) -> node_id {
|
||||
match p.node {
|
||||
view_path_simple(_, _, _, id) | view_path_glob(_, id) |
|
||||
view_path_list(_, _, id) => id
|
||||
@ -655,13 +658,13 @@ fn view_path_id(p: @view_path) -> node_id {
|
||||
|
||||
/// Returns true if the given struct def is tuple-like; i.e. that its fields
|
||||
/// are unnamed.
|
||||
fn struct_def_is_tuple_like(struct_def: @ast::struct_def) -> bool {
|
||||
pub fn struct_def_is_tuple_like(struct_def: @ast::struct_def) -> bool {
|
||||
struct_def.ctor_id.is_some()
|
||||
}
|
||||
|
||||
|
||||
fn visibility_to_privacy(visibility: visibility,
|
||||
legacy_exports: bool) -> Privacy {
|
||||
pub fn visibility_to_privacy(visibility: visibility,
|
||||
legacy_exports: bool) -> Privacy {
|
||||
if legacy_exports {
|
||||
match visibility {
|
||||
inherited | public => Public,
|
||||
@ -675,19 +678,19 @@ fn visibility_to_privacy(visibility: visibility,
|
||||
}
|
||||
}
|
||||
|
||||
enum Privacy {
|
||||
pub enum Privacy {
|
||||
Private,
|
||||
Public
|
||||
}
|
||||
|
||||
impl Privacy : cmp::Eq {
|
||||
pub impl Privacy : cmp::Eq {
|
||||
pure fn eq(&self, other: &Privacy) -> bool {
|
||||
((*self) as uint) == ((*other) as uint)
|
||||
}
|
||||
pure fn ne(&self, other: &Privacy) -> bool { !(*self).eq(other) }
|
||||
}
|
||||
|
||||
fn has_legacy_export_attr(attrs: &[attribute]) -> bool {
|
||||
pub fn has_legacy_export_attr(attrs: &[attribute]) -> bool {
|
||||
for attrs.each |attribute| {
|
||||
match attribute.node.value.node {
|
||||
meta_word(ref w) if (*w) == ~"legacy_exports" => {
|
||||
|
@ -28,76 +28,36 @@ use std::map::HashMap;
|
||||
use std::map;
|
||||
use std;
|
||||
|
||||
// Constructors
|
||||
export mk_name_value_item_str;
|
||||
export mk_name_value_item;
|
||||
export mk_list_item;
|
||||
export mk_word_item;
|
||||
export mk_attr;
|
||||
export mk_sugared_doc_attr;
|
||||
|
||||
// Conversion
|
||||
export attr_meta;
|
||||
export attr_metas;
|
||||
export desugar_doc_attr;
|
||||
|
||||
// Accessors
|
||||
export get_attr_name;
|
||||
export get_meta_item_name;
|
||||
export get_meta_item_value_str;
|
||||
export get_meta_item_list;
|
||||
export get_name_value_str_pair;
|
||||
|
||||
// Searching
|
||||
export find_attrs_by_name;
|
||||
export find_meta_items_by_name;
|
||||
export contains;
|
||||
export contains_name;
|
||||
export attrs_contains_name;
|
||||
export first_attr_value_str_by_name;
|
||||
export last_meta_item_value_str_by_name;
|
||||
export last_meta_item_list_by_name;
|
||||
|
||||
// Higher-level applications
|
||||
export sort_meta_items;
|
||||
export remove_meta_items_by_name;
|
||||
export find_linkage_attrs;
|
||||
export find_linkage_metas;
|
||||
export foreign_abi;
|
||||
export inline_attr;
|
||||
export find_inline_attr;
|
||||
export require_unique_names;
|
||||
|
||||
/* Constructors */
|
||||
|
||||
fn mk_name_value_item_str(name: ~str, value: ~str) ->
|
||||
@ast::meta_item {
|
||||
pub fn mk_name_value_item_str(name: ~str, value: ~str)
|
||||
-> @ast::meta_item {
|
||||
let value_lit = dummy_spanned(ast::lit_str(@value));
|
||||
return mk_name_value_item(name, value_lit);
|
||||
}
|
||||
|
||||
fn mk_name_value_item(name: ~str, +value: ast::lit)
|
||||
pub fn mk_name_value_item(name: ~str, +value: ast::lit)
|
||||
-> @ast::meta_item {
|
||||
return @dummy_spanned(ast::meta_name_value(name, value));
|
||||
}
|
||||
|
||||
fn mk_list_item(name: ~str, +items: ~[@ast::meta_item]) ->
|
||||
pub fn mk_list_item(name: ~str, +items: ~[@ast::meta_item]) ->
|
||||
@ast::meta_item {
|
||||
return @dummy_spanned(ast::meta_list(name, items));
|
||||
}
|
||||
|
||||
fn mk_word_item(name: ~str) -> @ast::meta_item {
|
||||
pub fn mk_word_item(name: ~str) -> @ast::meta_item {
|
||||
return @dummy_spanned(ast::meta_word(name));
|
||||
}
|
||||
|
||||
fn mk_attr(item: @ast::meta_item) -> ast::attribute {
|
||||
pub fn mk_attr(item: @ast::meta_item) -> ast::attribute {
|
||||
dummy_spanned(ast::attribute_ { style: ast::attr_inner,
|
||||
value: *item,
|
||||
is_sugared_doc: false })
|
||||
}
|
||||
|
||||
fn mk_sugared_doc_attr(text: ~str,
|
||||
+lo: BytePos, +hi: BytePos) -> ast::attribute {
|
||||
pub fn mk_sugared_doc_attr(text: ~str,
|
||||
+lo: BytePos, +hi: BytePos) -> ast::attribute {
|
||||
let lit = spanned(lo, hi, ast::lit_str(@text));
|
||||
let attr = ast::attribute_ {
|
||||
style: doc_comment_style(text),
|
||||
@ -109,14 +69,16 @@ fn mk_sugared_doc_attr(text: ~str,
|
||||
|
||||
/* Conversion */
|
||||
|
||||
fn attr_meta(attr: ast::attribute) -> @ast::meta_item { @attr.node.value }
|
||||
pub fn attr_meta(attr: ast::attribute) -> @ast::meta_item {
|
||||
@attr.node.value
|
||||
}
|
||||
|
||||
// Get the meta_items from inside a vector of attributes
|
||||
fn attr_metas(attrs: ~[ast::attribute]) -> ~[@ast::meta_item] {
|
||||
pub fn attr_metas(attrs: ~[ast::attribute]) -> ~[@ast::meta_item] {
|
||||
do attrs.map |a| { attr_meta(*a) }
|
||||
}
|
||||
|
||||
fn desugar_doc_attr(attr: &ast::attribute) -> ast::attribute {
|
||||
pub fn desugar_doc_attr(attr: &ast::attribute) -> ast::attribute {
|
||||
if attr.node.is_sugared_doc {
|
||||
let comment = get_meta_item_value_str(@attr.node.value).get();
|
||||
let meta = mk_name_value_item_str(~"doc",
|
||||
@ -129,11 +91,11 @@ fn desugar_doc_attr(attr: &ast::attribute) -> ast::attribute {
|
||||
|
||||
/* Accessors */
|
||||
|
||||
fn get_attr_name(attr: ast::attribute) -> ~str {
|
||||
pub fn get_attr_name(attr: ast::attribute) -> ~str {
|
||||
get_meta_item_name(@attr.node.value)
|
||||
}
|
||||
|
||||
fn get_meta_item_name(meta: @ast::meta_item) -> ~str {
|
||||
pub fn get_meta_item_name(meta: @ast::meta_item) -> ~str {
|
||||
match meta.node {
|
||||
ast::meta_word(ref n) => (*n),
|
||||
ast::meta_name_value(ref n, _) => (*n),
|
||||
@ -145,7 +107,7 @@ fn get_meta_item_name(meta: @ast::meta_item) -> ~str {
|
||||
* Gets the string value if the meta_item is a meta_name_value variant
|
||||
* containing a string, otherwise none
|
||||
*/
|
||||
fn get_meta_item_value_str(meta: @ast::meta_item) -> Option<~str> {
|
||||
pub fn get_meta_item_value_str(meta: @ast::meta_item) -> Option<~str> {
|
||||
match meta.node {
|
||||
ast::meta_name_value(_, v) => match v.node {
|
||||
ast::lit_str(s) => option::Some(*s),
|
||||
@ -156,7 +118,8 @@ fn get_meta_item_value_str(meta: @ast::meta_item) -> Option<~str> {
|
||||
}
|
||||
|
||||
/// Gets a list of inner meta items from a list meta_item type
|
||||
fn get_meta_item_list(meta: @ast::meta_item) -> Option<~[@ast::meta_item]> {
|
||||
pub fn get_meta_item_list(meta: @ast::meta_item)
|
||||
-> Option<~[@ast::meta_item]> {
|
||||
match meta.node {
|
||||
ast::meta_list(_, l) => option::Some(/* FIXME (#2543) */ copy l),
|
||||
_ => option::None
|
||||
@ -167,7 +130,8 @@ fn get_meta_item_list(meta: @ast::meta_item) -> Option<~[@ast::meta_item]> {
|
||||
* If the meta item is a nam-value type with a string value then returns
|
||||
* a tuple containing the name and string value, otherwise `none`
|
||||
*/
|
||||
fn get_name_value_str_pair(item: @ast::meta_item) -> Option<(~str, ~str)> {
|
||||
pub fn get_name_value_str_pair(item: @ast::meta_item)
|
||||
-> Option<(~str, ~str)> {
|
||||
match attr::get_meta_item_value_str(item) {
|
||||
Some(ref value) => {
|
||||
let name = attr::get_meta_item_name(item);
|
||||
@ -181,7 +145,7 @@ fn get_name_value_str_pair(item: @ast::meta_item) -> Option<(~str, ~str)> {
|
||||
/* Searching */
|
||||
|
||||
/// Search a list of attributes and return only those with a specific name
|
||||
fn find_attrs_by_name(attrs: &[ast::attribute], name: &str) ->
|
||||
pub fn find_attrs_by_name(attrs: &[ast::attribute], name: &str) ->
|
||||
~[ast::attribute] {
|
||||
let filter: &fn(a: &ast::attribute) -> Option<ast::attribute> = |a| {
|
||||
if name == get_attr_name(*a) {
|
||||
@ -194,7 +158,7 @@ fn find_attrs_by_name(attrs: &[ast::attribute], name: &str) ->
|
||||
}
|
||||
|
||||
/// Search a list of meta items and return only those with a specific name
|
||||
fn find_meta_items_by_name(metas: &[@ast::meta_item], name: &str) ->
|
||||
pub fn find_meta_items_by_name(metas: &[@ast::meta_item], name: &str) ->
|
||||
~[@ast::meta_item] {
|
||||
let mut rs = ~[];
|
||||
for metas.each |mi| {
|
||||
@ -209,7 +173,8 @@ fn find_meta_items_by_name(metas: &[@ast::meta_item], name: &str) ->
|
||||
* Returns true if a list of meta items contains another meta item. The
|
||||
* comparison is performed structurally.
|
||||
*/
|
||||
fn contains(haystack: ~[@ast::meta_item], needle: @ast::meta_item) -> bool {
|
||||
pub fn contains(haystack: ~[@ast::meta_item], needle: @ast::meta_item)
|
||||
-> bool {
|
||||
for haystack.each |item| {
|
||||
if eq(*item, needle) { return true; }
|
||||
}
|
||||
@ -239,17 +204,17 @@ fn eq(a: @ast::meta_item, b: @ast::meta_item) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
fn contains_name(metas: &[@ast::meta_item], name: &str) -> bool {
|
||||
pub fn contains_name(metas: &[@ast::meta_item], name: &str) -> bool {
|
||||
let matches = find_meta_items_by_name(metas, name);
|
||||
return vec::len(matches) > 0u;
|
||||
}
|
||||
|
||||
fn attrs_contains_name(attrs: &[ast::attribute], name: &str) -> bool {
|
||||
pub fn attrs_contains_name(attrs: &[ast::attribute], name: &str) -> bool {
|
||||
!find_attrs_by_name(attrs, name).is_empty()
|
||||
}
|
||||
|
||||
fn first_attr_value_str_by_name(attrs: ~[ast::attribute], name: ~str)
|
||||
-> Option<~str> {
|
||||
pub fn first_attr_value_str_by_name(attrs: ~[ast::attribute], name: ~str)
|
||||
-> Option<~str> {
|
||||
|
||||
let mattrs = find_attrs_by_name(attrs, name);
|
||||
if vec::len(mattrs) > 0u {
|
||||
@ -265,8 +230,8 @@ fn last_meta_item_by_name(items: ~[@ast::meta_item], name: ~str)
|
||||
vec::last_opt(items)
|
||||
}
|
||||
|
||||
fn last_meta_item_value_str_by_name(items: ~[@ast::meta_item], name: ~str)
|
||||
-> Option<~str> {
|
||||
pub fn last_meta_item_value_str_by_name(items: ~[@ast::meta_item], name: ~str)
|
||||
-> Option<~str> {
|
||||
|
||||
match last_meta_item_by_name(items, name) {
|
||||
Some(item) => match attr::get_meta_item_value_str(item) {
|
||||
@ -277,7 +242,7 @@ fn last_meta_item_value_str_by_name(items: ~[@ast::meta_item], name: ~str)
|
||||
}
|
||||
}
|
||||
|
||||
fn last_meta_item_list_by_name(items: ~[@ast::meta_item], name: ~str)
|
||||
pub fn last_meta_item_list_by_name(items: ~[@ast::meta_item], name: ~str)
|
||||
-> Option<~[@ast::meta_item]> {
|
||||
|
||||
match last_meta_item_by_name(items, name) {
|
||||
@ -291,7 +256,7 @@ fn last_meta_item_list_by_name(items: ~[@ast::meta_item], name: ~str)
|
||||
|
||||
// FIXME (#607): This needs to sort by meta_item variant in addition to
|
||||
// the item name (See [Fixme-sorting])
|
||||
fn sort_meta_items(+items: ~[@ast::meta_item]) -> ~[@ast::meta_item] {
|
||||
pub fn sort_meta_items(+items: ~[@ast::meta_item]) -> ~[@ast::meta_item] {
|
||||
pure fn lteq(ma: &@ast::meta_item, mb: &@ast::meta_item) -> bool {
|
||||
pure fn key(m: &ast::meta_item) -> ~str {
|
||||
match m.node {
|
||||
@ -309,7 +274,7 @@ fn sort_meta_items(+items: ~[@ast::meta_item]) -> ~[@ast::meta_item] {
|
||||
vec::cast_from_mut(move v)
|
||||
}
|
||||
|
||||
fn remove_meta_items_by_name(items: ~[@ast::meta_item], name: ~str) ->
|
||||
pub fn remove_meta_items_by_name(items: ~[@ast::meta_item], name: ~str) ->
|
||||
~[@ast::meta_item] {
|
||||
|
||||
return vec::filter_map(items, |item| {
|
||||
@ -325,7 +290,7 @@ fn remove_meta_items_by_name(items: ~[@ast::meta_item], name: ~str) ->
|
||||
* From a list of crate attributes get only the meta_items that affect crate
|
||||
* linkage
|
||||
*/
|
||||
fn find_linkage_metas(attrs: &[ast::attribute]) -> ~[@ast::meta_item] {
|
||||
pub fn find_linkage_metas(attrs: &[ast::attribute]) -> ~[@ast::meta_item] {
|
||||
do find_attrs_by_name(attrs, ~"link").flat_map |attr| {
|
||||
match attr.node.value.node {
|
||||
ast::meta_list(_, items) => /* FIXME (#2543) */ copy items,
|
||||
@ -334,7 +299,8 @@ fn find_linkage_metas(attrs: &[ast::attribute]) -> ~[@ast::meta_item] {
|
||||
}
|
||||
}
|
||||
|
||||
fn foreign_abi(attrs: ~[ast::attribute]) -> Either<~str, ast::foreign_abi> {
|
||||
pub fn foreign_abi(attrs: ~[ast::attribute])
|
||||
-> Either<~str, ast::foreign_abi> {
|
||||
return match attr::first_attr_value_str_by_name(attrs, ~"abi") {
|
||||
option::None => {
|
||||
either::Right(ast::foreign_abi_cdecl)
|
||||
@ -354,7 +320,7 @@ fn foreign_abi(attrs: ~[ast::attribute]) -> Either<~str, ast::foreign_abi> {
|
||||
};
|
||||
}
|
||||
|
||||
enum inline_attr {
|
||||
pub enum inline_attr {
|
||||
ia_none,
|
||||
ia_hint,
|
||||
ia_always,
|
||||
@ -369,7 +335,7 @@ impl inline_attr : cmp::Eq {
|
||||
}
|
||||
|
||||
/// True if something like #[inline] is found in the list of attrs.
|
||||
fn find_inline_attr(attrs: &[ast::attribute]) -> inline_attr {
|
||||
pub fn find_inline_attr(attrs: &[ast::attribute]) -> inline_attr {
|
||||
// FIXME (#2809)---validate the usage of #[inline] and #[inline(always)]
|
||||
do vec::foldl(ia_none, attrs) |ia,attr| {
|
||||
match attr.node.value.node {
|
||||
@ -390,8 +356,8 @@ fn find_inline_attr(attrs: &[ast::attribute]) -> inline_attr {
|
||||
}
|
||||
|
||||
|
||||
fn require_unique_names(diagnostic: span_handler,
|
||||
metas: &[@ast::meta_item]) {
|
||||
pub fn require_unique_names(diagnostic: span_handler,
|
||||
metas: &[@ast::meta_item]) {
|
||||
let map = map::HashMap();
|
||||
for metas.each |meta| {
|
||||
let name = get_meta_item_name(*meta);
|
||||
|
@ -48,71 +48,71 @@ pub enum CharPos = uint;
|
||||
// XXX: Lots of boilerplate in these impls, but so far my attempts to fix
|
||||
// have been unsuccessful
|
||||
|
||||
impl BytePos: Pos {
|
||||
pub impl BytePos: Pos {
|
||||
static pure fn from_uint(n: uint) -> BytePos { BytePos(n) }
|
||||
pure fn to_uint(&self) -> uint { **self }
|
||||
}
|
||||
|
||||
impl BytePos: cmp::Eq {
|
||||
pub impl BytePos: cmp::Eq {
|
||||
pure fn eq(&self, other: &BytePos) -> bool { **self == **other }
|
||||
pure fn ne(&self, other: &BytePos) -> bool { !(*self).eq(other) }
|
||||
}
|
||||
|
||||
impl BytePos: cmp::Ord {
|
||||
pub impl BytePos: cmp::Ord {
|
||||
pure fn lt(&self, other: &BytePos) -> bool { **self < **other }
|
||||
pure fn le(&self, other: &BytePos) -> bool { **self <= **other }
|
||||
pure fn ge(&self, other: &BytePos) -> bool { **self >= **other }
|
||||
pure fn gt(&self, other: &BytePos) -> bool { **self > **other }
|
||||
}
|
||||
|
||||
impl BytePos: Add<BytePos, BytePos> {
|
||||
pub impl BytePos: Add<BytePos, BytePos> {
|
||||
pure fn add(&self, rhs: &BytePos) -> BytePos {
|
||||
BytePos(**self + **rhs)
|
||||
}
|
||||
}
|
||||
|
||||
impl BytePos: Sub<BytePos, BytePos> {
|
||||
pub impl BytePos: Sub<BytePos, BytePos> {
|
||||
pure fn sub(&self, rhs: &BytePos) -> BytePos {
|
||||
BytePos(**self - **rhs)
|
||||
}
|
||||
}
|
||||
|
||||
impl BytePos: to_bytes::IterBytes {
|
||||
pub impl BytePos: to_bytes::IterBytes {
|
||||
pure fn iter_bytes(&self, +lsb0: bool, f: to_bytes::Cb) {
|
||||
(**self).iter_bytes(lsb0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl CharPos: Pos {
|
||||
pub impl CharPos: Pos {
|
||||
static pure fn from_uint(n: uint) -> CharPos { CharPos(n) }
|
||||
pure fn to_uint(&self) -> uint { **self }
|
||||
}
|
||||
|
||||
impl CharPos: cmp::Eq {
|
||||
pub impl CharPos: cmp::Eq {
|
||||
pure fn eq(&self, other: &CharPos) -> bool { **self == **other }
|
||||
pure fn ne(&self, other: &CharPos) -> bool { !(*self).eq(other) }
|
||||
}
|
||||
|
||||
impl CharPos: cmp::Ord {
|
||||
pub impl CharPos: cmp::Ord {
|
||||
pure fn lt(&self, other: &CharPos) -> bool { **self < **other }
|
||||
pure fn le(&self, other: &CharPos) -> bool { **self <= **other }
|
||||
pure fn ge(&self, other: &CharPos) -> bool { **self >= **other }
|
||||
pure fn gt(&self, other: &CharPos) -> bool { **self > **other }
|
||||
}
|
||||
|
||||
impl CharPos: to_bytes::IterBytes {
|
||||
pub impl CharPos: to_bytes::IterBytes {
|
||||
pure fn iter_bytes(&self, +lsb0: bool, f: to_bytes::Cb) {
|
||||
(**self).iter_bytes(lsb0, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl CharPos: Add<CharPos, CharPos> {
|
||||
pub impl CharPos: Add<CharPos, CharPos> {
|
||||
pure fn add(&self, rhs: &CharPos) -> CharPos {
|
||||
CharPos(**self + **rhs)
|
||||
}
|
||||
}
|
||||
|
||||
impl CharPos: Sub<CharPos, CharPos> {
|
||||
pub impl CharPos: Sub<CharPos, CharPos> {
|
||||
pure fn sub(&self, rhs: &CharPos) -> CharPos {
|
||||
CharPos(**self - **rhs)
|
||||
}
|
||||
@ -130,19 +130,19 @@ pub struct span {
|
||||
expn_info: Option<@ExpnInfo>
|
||||
}
|
||||
|
||||
impl span : cmp::Eq {
|
||||
pub impl span : cmp::Eq {
|
||||
pure fn eq(&self, other: &span) -> bool {
|
||||
return (*self).lo == (*other).lo && (*self).hi == (*other).hi;
|
||||
}
|
||||
pure fn ne(&self, other: &span) -> bool { !(*self).eq(other) }
|
||||
}
|
||||
|
||||
impl<S: Encoder> span: Encodable<S> {
|
||||
pub impl<S: Encoder> span: Encodable<S> {
|
||||
/* Note #1972 -- spans are encoded but not decoded */
|
||||
fn encode(&self, _s: &S) { }
|
||||
}
|
||||
|
||||
impl<D: Decoder> span: Decodable<D> {
|
||||
pub impl<D: Decoder> span: Decodable<D> {
|
||||
static fn decode(_d: &D) -> span {
|
||||
ast_util::dummy_sp()
|
||||
}
|
||||
|
@ -23,18 +23,11 @@ use core::dvec::DVec;
|
||||
|
||||
use std::term;
|
||||
|
||||
export emitter, collect, emit;
|
||||
export level, fatal, error, warning, note;
|
||||
export span_handler, handler, mk_span_handler, mk_handler;
|
||||
export codemap_span_handler, codemap_handler;
|
||||
export ice_msg;
|
||||
export expect;
|
||||
|
||||
type emitter = fn@(cmsp: Option<(@codemap::CodeMap, span)>,
|
||||
pub type emitter = fn@(cmsp: Option<(@codemap::CodeMap, span)>,
|
||||
msg: &str, lvl: level);
|
||||
|
||||
|
||||
trait span_handler {
|
||||
pub trait span_handler {
|
||||
fn span_fatal(sp: span, msg: &str) -> !;
|
||||
fn span_err(sp: span, msg: &str);
|
||||
fn span_warn(sp: span, msg: &str);
|
||||
@ -44,7 +37,7 @@ trait span_handler {
|
||||
fn handler() -> handler;
|
||||
}
|
||||
|
||||
trait handler {
|
||||
pub trait handler {
|
||||
fn fatal(msg: &str) -> !;
|
||||
fn err(msg: &str);
|
||||
fn bump_err_count();
|
||||
@ -133,15 +126,16 @@ impl handler_t: handler {
|
||||
}
|
||||
}
|
||||
|
||||
fn ice_msg(msg: &str) -> ~str {
|
||||
pub fn ice_msg(msg: &str) -> ~str {
|
||||
fmt!("internal compiler error: %s", msg)
|
||||
}
|
||||
|
||||
fn mk_span_handler(handler: handler, cm: @codemap::CodeMap) -> span_handler {
|
||||
pub fn mk_span_handler(handler: handler, cm: @codemap::CodeMap)
|
||||
-> span_handler {
|
||||
@codemap_t { handler: handler, cm: cm } as span_handler
|
||||
}
|
||||
|
||||
fn mk_handler(emitter: Option<emitter>) -> handler {
|
||||
pub fn mk_handler(emitter: Option<emitter>) -> handler {
|
||||
|
||||
let emit = match emitter {
|
||||
Some(e) => e,
|
||||
@ -157,7 +151,7 @@ fn mk_handler(emitter: Option<emitter>) -> handler {
|
||||
@handler_t { mut err_count: 0, emit: emit } as handler
|
||||
}
|
||||
|
||||
enum level {
|
||||
pub enum level {
|
||||
fatal,
|
||||
error,
|
||||
warning,
|
||||
@ -205,7 +199,7 @@ fn print_diagnostic(topic: ~str, lvl: level, msg: &str) {
|
||||
io::stderr().write_str(fmt!(" %s\n", msg));
|
||||
}
|
||||
|
||||
fn collect(messages: @DVec<~str>)
|
||||
pub fn collect(messages: @DVec<~str>)
|
||||
-> fn@(Option<(@codemap::CodeMap, span)>, &str, level)
|
||||
{
|
||||
let f: @fn(Option<(@codemap::CodeMap, span)>, &str, level) =
|
||||
@ -213,7 +207,7 @@ fn collect(messages: @DVec<~str>)
|
||||
f
|
||||
}
|
||||
|
||||
fn emit(cmsp: Option<(@codemap::CodeMap, span)>, msg: &str, lvl: level) {
|
||||
pub fn emit(cmsp: Option<(@codemap::CodeMap, span)>, msg: &str, lvl: level) {
|
||||
match cmsp {
|
||||
Some((cm, sp)) => {
|
||||
let sp = cm.adjust_span(sp);
|
||||
@ -296,8 +290,9 @@ fn print_macro_backtrace(cm: @codemap::CodeMap, sp: span) {
|
||||
}
|
||||
}
|
||||
|
||||
fn expect<T: Copy>(diag: span_handler,
|
||||
opt: Option<T>, msg: fn() -> ~str) -> T {
|
||||
pub fn expect<T: Copy>(diag: span_handler,
|
||||
opt: Option<T>,
|
||||
msg: fn() -> ~str) -> T {
|
||||
match opt {
|
||||
Some(ref t) => (*t),
|
||||
None => diag.handler().bug(msg())
|
||||
|
@ -28,12 +28,11 @@ use core::vec;
|
||||
|
||||
// Transitional reexports so qquote can find the paths it is looking for
|
||||
mod syntax {
|
||||
#[legacy_exports];
|
||||
pub use ext;
|
||||
pub use parse;
|
||||
}
|
||||
|
||||
fn path(ids: ~[ident], span: span) -> @ast::path {
|
||||
pub fn path(ids: ~[ident], span: span) -> @ast::path {
|
||||
@ast::path { span: span,
|
||||
global: false,
|
||||
idents: ids,
|
||||
@ -41,7 +40,7 @@ fn path(ids: ~[ident], span: span) -> @ast::path {
|
||||
types: ~[] }
|
||||
}
|
||||
|
||||
fn path_global(ids: ~[ident], span: span) -> @ast::path {
|
||||
pub fn path_global(ids: ~[ident], span: span) -> @ast::path {
|
||||
@ast::path { span: span,
|
||||
global: true,
|
||||
idents: ids,
|
||||
@ -49,12 +48,12 @@ fn path_global(ids: ~[ident], span: span) -> @ast::path {
|
||||
types: ~[] }
|
||||
}
|
||||
|
||||
trait append_types {
|
||||
pub trait append_types {
|
||||
fn add_ty(ty: @ast::Ty) -> @ast::path;
|
||||
fn add_tys(+tys: ~[@ast::Ty]) -> @ast::path;
|
||||
}
|
||||
|
||||
impl @ast::path: append_types {
|
||||
pub impl @ast::path: append_types {
|
||||
fn add_ty(ty: @ast::Ty) -> @ast::path {
|
||||
@ast::path { types: vec::append_one(self.types, ty),
|
||||
.. *self}
|
||||
@ -113,7 +112,7 @@ pub trait ext_ctxt_ast_builder {
|
||||
fn strip_bounds(bounds: &[ast::ty_param]) -> ~[ast::ty_param];
|
||||
}
|
||||
|
||||
impl ext_ctxt: ext_ctxt_ast_builder {
|
||||
pub impl ext_ctxt: ext_ctxt_ast_builder {
|
||||
fn ty_option(ty: @ast::Ty) -> @ast::Ty {
|
||||
self.ty_path_ast_builder(path_global(~[
|
||||
self.ident_of(~"core"),
|
||||
|
@ -37,7 +37,7 @@ use ext::base::ext_ctxt;
|
||||
use ext::pipes::proto::{state, protocol, next_state};
|
||||
use ext::pipes::proto;
|
||||
|
||||
impl ext_ctxt: proto::visitor<(), (), ()> {
|
||||
pub impl ext_ctxt: proto::visitor<(), (), ()> {
|
||||
fn visit_proto(_proto: protocol,
|
||||
_states: &[()]) { }
|
||||
|
||||
@ -82,3 +82,4 @@ impl ext_ctxt: proto::visitor<(), (), ()> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -45,7 +45,7 @@ use ext::pipes::protocol;
|
||||
use core::str;
|
||||
use std::bitv::{Bitv};
|
||||
|
||||
fn analyze(proto: protocol, _cx: ext_ctxt) {
|
||||
pub fn analyze(proto: protocol, _cx: ext_ctxt) {
|
||||
debug!("initializing colive analysis");
|
||||
let num_states = proto.num_states();
|
||||
let colive = do (copy proto.states).map_to_vec |state| {
|
||||
@ -103,3 +103,4 @@ fn analyze(proto: protocol, _cx: ext_ctxt) {
|
||||
proto.bounded = Some(true);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -56,17 +56,11 @@ use parse::parser::Parser;
|
||||
|
||||
use core::option::None;
|
||||
|
||||
#[legacy_exports]
|
||||
mod ast_builder;
|
||||
#[legacy_exports]
|
||||
mod parse_proto;
|
||||
#[legacy_exports]
|
||||
mod pipec;
|
||||
#[legacy_exports]
|
||||
mod proto;
|
||||
#[legacy_exports]
|
||||
mod check;
|
||||
#[legacy_exports]
|
||||
mod liveness;
|
||||
|
||||
|
||||
|
@ -16,13 +16,13 @@ use parse::token;
|
||||
|
||||
use core::prelude::*;
|
||||
|
||||
trait proto_parser {
|
||||
pub trait proto_parser {
|
||||
fn parse_proto(id: ~str) -> protocol;
|
||||
fn parse_state(proto: protocol);
|
||||
fn parse_message(state: state);
|
||||
}
|
||||
|
||||
impl parser::Parser: proto_parser {
|
||||
pub impl parser::Parser: proto_parser {
|
||||
fn parse_proto(id: ~str) -> protocol {
|
||||
let proto = protocol(id, self.span);
|
||||
|
||||
|
@ -26,12 +26,12 @@ use core::str;
|
||||
use core::to_str::ToStr;
|
||||
use core::vec;
|
||||
|
||||
trait gen_send {
|
||||
pub trait gen_send {
|
||||
fn gen_send(cx: ext_ctxt, try: bool) -> @ast::item;
|
||||
fn to_ty(cx: ext_ctxt) -> @ast::Ty;
|
||||
}
|
||||
|
||||
trait to_type_decls {
|
||||
pub trait to_type_decls {
|
||||
fn to_type_decls(cx: ext_ctxt) -> ~[@ast::item];
|
||||
fn to_endpoint_decls(cx: ext_ctxt, dir: direction) -> ~[@ast::item];
|
||||
}
|
||||
@ -45,7 +45,7 @@ pub trait gen_init {
|
||||
fn gen_init_bounded(ext_cx: ext_ctxt) -> @ast::expr;
|
||||
}
|
||||
|
||||
impl message: gen_send {
|
||||
pub impl message: gen_send {
|
||||
fn gen_send(cx: ext_ctxt, try: bool) -> @ast::item {
|
||||
debug!("pipec: gen_send");
|
||||
match self {
|
||||
@ -201,7 +201,7 @@ impl message: gen_send {
|
||||
}
|
||||
}
|
||||
|
||||
impl state: to_type_decls {
|
||||
pub impl state: to_type_decls {
|
||||
fn to_type_decls(cx: ext_ctxt) -> ~[@ast::item] {
|
||||
debug!("pipec: to_type_decls");
|
||||
// This compiles into two different type declarations. Say the
|
||||
@ -305,7 +305,7 @@ impl state: to_type_decls {
|
||||
}
|
||||
}
|
||||
|
||||
impl protocol: gen_init {
|
||||
pub impl protocol: gen_init {
|
||||
|
||||
fn gen_init(cx: ext_ctxt) -> @ast::item {
|
||||
let ext_cx = cx;
|
||||
@ -445,4 +445,4 @@ impl protocol: gen_init {
|
||||
|
||||
cx.item_mod(cx.ident_of(self.name), self.span, items)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -19,9 +19,9 @@ use core::cmp;
|
||||
use core::dvec::DVec;
|
||||
use core::to_str::ToStr;
|
||||
|
||||
enum direction { send, recv }
|
||||
pub enum direction { send, recv }
|
||||
|
||||
impl direction : cmp::Eq {
|
||||
pub impl direction : cmp::Eq {
|
||||
pure fn eq(&self, other: &direction) -> bool {
|
||||
match ((*self), (*other)) {
|
||||
(send, send) => true,
|
||||
@ -33,7 +33,7 @@ impl direction : cmp::Eq {
|
||||
pure fn ne(&self, other: &direction) -> bool { !(*self).eq(other) }
|
||||
}
|
||||
|
||||
impl direction: ToStr {
|
||||
pub impl direction: ToStr {
|
||||
pure fn to_str() -> ~str {
|
||||
match self {
|
||||
send => ~"Send",
|
||||
@ -42,7 +42,7 @@ impl direction: ToStr {
|
||||
}
|
||||
}
|
||||
|
||||
impl direction {
|
||||
pub impl direction {
|
||||
fn reverse() -> direction {
|
||||
match self {
|
||||
send => recv,
|
||||
@ -51,17 +51,17 @@ impl direction {
|
||||
}
|
||||
}
|
||||
|
||||
struct next_state {
|
||||
pub struct next_state {
|
||||
state: ~str,
|
||||
tys: ~[@ast::Ty],
|
||||
}
|
||||
|
||||
enum message {
|
||||
pub enum message {
|
||||
// name, span, data, current state, next state
|
||||
message(~str, span, ~[@ast::Ty], state, Option<next_state>)
|
||||
}
|
||||
|
||||
impl message {
|
||||
pub impl message {
|
||||
fn name() -> ~str {
|
||||
match self {
|
||||
message(ref id, _, _, _, _) => (*id)
|
||||
@ -82,7 +82,7 @@ impl message {
|
||||
}
|
||||
}
|
||||
|
||||
enum state {
|
||||
pub enum state {
|
||||
state_(@{
|
||||
id: uint,
|
||||
name: ~str,
|
||||
@ -95,7 +95,7 @@ enum state {
|
||||
}),
|
||||
}
|
||||
|
||||
impl state {
|
||||
pub impl state {
|
||||
fn add_message(name: ~str, span: span,
|
||||
+data: ~[@ast::Ty], next: Option<next_state>) {
|
||||
self.messages.push(message(name, span, data, self,
|
||||
@ -132,13 +132,13 @@ impl state {
|
||||
}
|
||||
}
|
||||
|
||||
type protocol = @protocol_;
|
||||
pub type protocol = @protocol_;
|
||||
|
||||
fn protocol(name: ~str, +span: span) -> protocol {
|
||||
pub fn protocol(name: ~str, +span: span) -> protocol {
|
||||
@protocol_(name, span)
|
||||
}
|
||||
|
||||
fn protocol_(name: ~str, span: span) -> protocol_ {
|
||||
pub fn protocol_(name: ~str, span: span) -> protocol_ {
|
||||
protocol_ {
|
||||
name: name,
|
||||
span: span,
|
||||
@ -147,7 +147,7 @@ fn protocol_(name: ~str, span: span) -> protocol_ {
|
||||
}
|
||||
}
|
||||
|
||||
struct protocol_ {
|
||||
pub struct protocol_ {
|
||||
name: ~str,
|
||||
span: span,
|
||||
states: DVec<state>,
|
||||
@ -155,7 +155,7 @@ struct protocol_ {
|
||||
mut bounded: Option<bool>,
|
||||
}
|
||||
|
||||
impl protocol_ {
|
||||
pub impl protocol_ {
|
||||
|
||||
/// Get a state.
|
||||
fn get_state(name: ~str) -> state {
|
||||
@ -195,7 +195,7 @@ impl protocol_ {
|
||||
}
|
||||
}
|
||||
|
||||
impl protocol {
|
||||
pub impl protocol {
|
||||
fn add_state_poly(name: ~str, ident: ast::ident, dir: direction,
|
||||
+ty_params: ~[ast::ty_param]) -> state {
|
||||
let messages = DVec();
|
||||
@ -216,14 +216,14 @@ impl protocol {
|
||||
}
|
||||
}
|
||||
|
||||
trait visitor<Tproto, Tstate, Tmessage> {
|
||||
pub trait visitor<Tproto, Tstate, Tmessage> {
|
||||
fn visit_proto(proto: protocol, st: &[Tstate]) -> Tproto;
|
||||
fn visit_state(state: state, m: &[Tmessage]) -> Tstate;
|
||||
fn visit_message(name: ~str, spane: span, tys: &[@ast::Ty],
|
||||
this: state, next: Option<next_state>) -> Tmessage;
|
||||
}
|
||||
|
||||
fn visit<Tproto, Tstate, Tmessage, V: visitor<Tproto, Tstate, Tmessage>>(
|
||||
pub fn visit<Tproto, Tstate, Tmessage, V: visitor<Tproto, Tstate, Tmessage>>(
|
||||
proto: protocol, visitor: V) -> Tproto {
|
||||
|
||||
// the copy keywords prevent recursive use of dvec
|
||||
@ -236,3 +236,4 @@ fn visit<Tproto, Tstate, Tmessage, V: visitor<Tproto, Tstate, Tmessage>>(
|
||||
};
|
||||
visitor.visit_proto(proto, states)
|
||||
}
|
||||
|
||||
|
@ -189,13 +189,15 @@ fn nameize(p_s: parse_sess, ms: ~[matcher], res: ~[@named_match])
|
||||
fn n_rec(p_s: parse_sess, m: matcher, res: ~[@named_match],
|
||||
ret_val: HashMap<ident, @named_match>) {
|
||||
match m {
|
||||
spanned {node: match_tok(_), _} => (),
|
||||
spanned {node: match_seq(ref more_ms, _, _, _, _), _} => {
|
||||
ast::spanned {node: match_tok(_), _} => (),
|
||||
ast::spanned {node: match_seq(ref more_ms, _, _, _, _), _} => {
|
||||
for (*more_ms).each() |next_m| {
|
||||
n_rec(p_s, *next_m, res, ret_val)
|
||||
};
|
||||
}
|
||||
spanned {node: match_nonterminal(bind_name, _, idx), span: sp} => {
|
||||
ast::spanned {
|
||||
node: match_nonterminal(bind_name, _, idx), span: sp
|
||||
} => {
|
||||
if ret_val.contains_key(bind_name) {
|
||||
p_s.span_diagnostic.span_fatal(sp, ~"Duplicated bind name: "+
|
||||
*p_s.interner.get(bind_name))
|
||||
|
@ -18,9 +18,7 @@ use parse::token;
|
||||
|
||||
use core::either::{Either, Left, Right};
|
||||
|
||||
export parser_attr;
|
||||
|
||||
trait parser_attr {
|
||||
pub trait parser_attr {
|
||||
fn parse_outer_attributes() -> ~[ast::attribute];
|
||||
fn parse_attribute(style: ast::attr_style) -> ast::attribute;
|
||||
fn parse_attribute_naked(style: ast::attr_style, lo: BytePos) ->
|
||||
|
@ -15,7 +15,7 @@
|
||||
use ast;
|
||||
use ast_util::operator_prec;
|
||||
|
||||
fn expr_requires_semi_to_be_stmt(e: @ast::expr) -> bool {
|
||||
pub fn expr_requires_semi_to_be_stmt(e: @ast::expr) -> bool {
|
||||
match e.node {
|
||||
ast::expr_if(*)
|
||||
| ast::expr_match(*)
|
||||
@ -28,7 +28,7 @@ fn expr_requires_semi_to_be_stmt(e: @ast::expr) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
fn expr_is_simple_block(e: @ast::expr) -> bool {
|
||||
pub fn expr_is_simple_block(e: @ast::expr) -> bool {
|
||||
match e.node {
|
||||
ast::expr_block(
|
||||
ast::spanned { node: ast::blk_ { rules: ast::default_blk, _ }, _ }
|
||||
@ -37,7 +37,7 @@ fn expr_is_simple_block(e: @ast::expr) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
fn stmt_ends_with_semi(stmt: ast::stmt) -> bool {
|
||||
pub fn stmt_ends_with_semi(stmt: ast::stmt) -> bool {
|
||||
return match stmt.node {
|
||||
ast::stmt_decl(d, _) => {
|
||||
match d.node {
|
||||
|
@ -27,14 +27,7 @@ use core::str;
|
||||
use core::uint;
|
||||
use core::vec;
|
||||
|
||||
export cmnt;
|
||||
export lit;
|
||||
export cmnt_style;
|
||||
export gather_comments_and_literals;
|
||||
export is_doc_comment, doc_comment_style, strip_doc_comment_decoration;
|
||||
export isolated, trailing, mixed, blank_line;
|
||||
|
||||
enum cmnt_style {
|
||||
pub enum cmnt_style {
|
||||
isolated, // No code on either side of each line of the comment
|
||||
trailing, // Code exists to the left of the comment
|
||||
mixed, // Code before /* foo */ and after the comment
|
||||
@ -50,16 +43,16 @@ impl cmnt_style : cmp::Eq {
|
||||
}
|
||||
}
|
||||
|
||||
type cmnt = {style: cmnt_style, lines: ~[~str], pos: BytePos};
|
||||
pub type cmnt = {style: cmnt_style, lines: ~[~str], pos: BytePos};
|
||||
|
||||
fn is_doc_comment(s: ~str) -> bool {
|
||||
pub fn is_doc_comment(s: ~str) -> bool {
|
||||
s.starts_with(~"///") ||
|
||||
s.starts_with(~"//!") ||
|
||||
s.starts_with(~"/**") ||
|
||||
s.starts_with(~"/*!")
|
||||
}
|
||||
|
||||
fn doc_comment_style(comment: ~str) -> ast::attr_style {
|
||||
pub fn doc_comment_style(comment: ~str) -> ast::attr_style {
|
||||
assert is_doc_comment(comment);
|
||||
if comment.starts_with(~"//!") || comment.starts_with(~"/*!") {
|
||||
ast::attr_inner
|
||||
@ -68,7 +61,7 @@ fn doc_comment_style(comment: ~str) -> ast::attr_style {
|
||||
}
|
||||
}
|
||||
|
||||
fn strip_doc_comment_decoration(comment: ~str) -> ~str {
|
||||
pub fn strip_doc_comment_decoration(comment: ~str) -> ~str {
|
||||
|
||||
/// remove whitespace-only lines from the start/end of lines
|
||||
fn vertical_trim(lines: ~[~str]) -> ~[~str] {
|
||||
@ -306,11 +299,11 @@ fn consume_comment(rdr: string_reader, code_to_the_left: bool,
|
||||
debug!("<<< consume comment");
|
||||
}
|
||||
|
||||
type lit = {lit: ~str, pos: BytePos};
|
||||
pub type lit = {lit: ~str, pos: BytePos};
|
||||
|
||||
fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler,
|
||||
path: ~str,
|
||||
srdr: io::Reader) ->
|
||||
pub fn gather_comments_and_literals(span_diagnostic: diagnostic::span_handler,
|
||||
path: ~str,
|
||||
srdr: io::Reader) ->
|
||||
{cmnts: ~[cmnt], lits: ~[lit]} {
|
||||
let src = @str::from_bytes(srdr.read_whole_stream());
|
||||
let itr = parse::token::mk_fake_ident_interner();
|
||||
|
@ -21,26 +21,26 @@ use core::option::{None, Option, Some};
|
||||
use core::option;
|
||||
use std::map::HashMap;
|
||||
|
||||
type seq_sep = {
|
||||
pub type seq_sep = {
|
||||
sep: Option<token::Token>,
|
||||
trailing_sep_allowed: bool
|
||||
};
|
||||
|
||||
fn seq_sep_trailing_disallowed(t: token::Token) -> seq_sep {
|
||||
pub fn seq_sep_trailing_disallowed(t: token::Token) -> seq_sep {
|
||||
return {sep: option::Some(t), trailing_sep_allowed: false};
|
||||
}
|
||||
fn seq_sep_trailing_allowed(t: token::Token) -> seq_sep {
|
||||
pub fn seq_sep_trailing_allowed(t: token::Token) -> seq_sep {
|
||||
return {sep: option::Some(t), trailing_sep_allowed: true};
|
||||
}
|
||||
fn seq_sep_none() -> seq_sep {
|
||||
pub fn seq_sep_none() -> seq_sep {
|
||||
return {sep: option::None, trailing_sep_allowed: false};
|
||||
}
|
||||
|
||||
fn token_to_str(reader: reader, ++token: token::Token) -> ~str {
|
||||
pub fn token_to_str(reader: reader, ++token: token::Token) -> ~str {
|
||||
token::to_str(reader.interner(), token)
|
||||
}
|
||||
|
||||
impl Parser {
|
||||
pub impl Parser {
|
||||
fn unexpected_last(t: token::Token) -> ! {
|
||||
self.span_fatal(
|
||||
copy self.last_span,
|
||||
@ -229,7 +229,7 @@ impl Parser {
|
||||
}
|
||||
|
||||
fn parse_seq_lt_gt<T: Copy>(sep: Option<token::Token>,
|
||||
f: fn(Parser) -> T) -> spanned<~[T]> {
|
||||
f: fn(Parser) -> T) -> ast::spanned<~[T]> {
|
||||
let lo = self.span.lo;
|
||||
self.expect(token::LT);
|
||||
let result = self.parse_seq_to_before_gt::<T>(sep, f);
|
||||
@ -277,7 +277,7 @@ impl Parser {
|
||||
// NB: Do not use this function unless you actually plan to place the
|
||||
// spanned list in the AST.
|
||||
fn parse_seq<T: Copy>(bra: token::Token, ket: token::Token, sep: seq_sep,
|
||||
f: fn(Parser) -> T) -> spanned<~[T]> {
|
||||
f: fn(Parser) -> T) -> ast::spanned<~[T]> {
|
||||
let lo = self.span.lo;
|
||||
self.expect(bra);
|
||||
let result = self.parse_seq_to_before_end::<T>(ket, sep, f);
|
||||
|
@ -16,7 +16,7 @@ use codemap::{BytePos, CharPos, CodeMap, Pos, span};
|
||||
use codemap;
|
||||
use diagnostic::span_handler;
|
||||
use ext::tt::transcribe::{tt_next_token};
|
||||
use ext::tt::transcribe::{tt_reader, new_tt_reader, dup_tt_reader};
|
||||
use ext::tt::transcribe::{dup_tt_reader};
|
||||
use parse::token;
|
||||
|
||||
use core::char;
|
||||
@ -24,14 +24,11 @@ use core::either;
|
||||
use core::str;
|
||||
use core::u64;
|
||||
|
||||
pub use ext::tt::transcribe::{tt_reader, new_tt_reader};
|
||||
|
||||
use std;
|
||||
|
||||
export reader, string_reader, new_string_reader, is_whitespace;
|
||||
export tt_reader, new_tt_reader;
|
||||
export nextch, is_eof, bump, get_str_from, new_low_level_string_reader;
|
||||
export string_reader_as_reader, tt_reader_as_reader;
|
||||
|
||||
trait reader {
|
||||
pub trait reader {
|
||||
fn is_eof() -> bool;
|
||||
fn next_token() -> {tok: token::Token, sp: span};
|
||||
fn fatal(~str) -> !;
|
||||
@ -41,7 +38,7 @@ trait reader {
|
||||
fn dup() -> reader;
|
||||
}
|
||||
|
||||
type string_reader = @{
|
||||
pub type string_reader = @{
|
||||
span_diagnostic: span_handler,
|
||||
src: @~str,
|
||||
// The absolute offset within the codemap of the next character to read
|
||||
@ -59,18 +56,18 @@ type string_reader = @{
|
||||
mut peek_span: span
|
||||
};
|
||||
|
||||
fn new_string_reader(span_diagnostic: span_handler,
|
||||
filemap: @codemap::FileMap,
|
||||
itr: @token::ident_interner) -> string_reader {
|
||||
pub fn new_string_reader(span_diagnostic: span_handler,
|
||||
filemap: @codemap::FileMap,
|
||||
itr: @token::ident_interner) -> string_reader {
|
||||
let r = new_low_level_string_reader(span_diagnostic, filemap, itr);
|
||||
string_advance_token(r); /* fill in peek_* */
|
||||
return r;
|
||||
}
|
||||
|
||||
/* For comments.rs, which hackily pokes into 'pos' and 'curr' */
|
||||
fn new_low_level_string_reader(span_diagnostic: span_handler,
|
||||
filemap: @codemap::FileMap,
|
||||
itr: @token::ident_interner)
|
||||
pub fn new_low_level_string_reader(span_diagnostic: span_handler,
|
||||
filemap: @codemap::FileMap,
|
||||
itr: @token::ident_interner)
|
||||
-> string_reader {
|
||||
// Force the initial reader bump to start on a fresh line
|
||||
let initial_char = '\n';
|
||||
@ -114,7 +111,7 @@ impl string_reader: reader {
|
||||
fn dup() -> reader { dup_string_reader(self) as reader }
|
||||
}
|
||||
|
||||
impl tt_reader: reader {
|
||||
pub impl tt_reader: reader {
|
||||
fn is_eof() -> bool { self.cur_tok == token::EOF }
|
||||
fn next_token() -> {tok: token::Token, sp: span} {
|
||||
/* weird resolve bug: if the following `if`, or any of its
|
||||
@ -157,7 +154,7 @@ fn byte_offset(rdr: string_reader) -> BytePos {
|
||||
(rdr.pos - rdr.filemap.start_pos)
|
||||
}
|
||||
|
||||
fn get_str_from(rdr: string_reader, start: BytePos) -> ~str {
|
||||
pub fn get_str_from(rdr: string_reader, start: BytePos) -> ~str {
|
||||
unsafe {
|
||||
// I'm pretty skeptical about this subtraction. What if there's a
|
||||
// multi-byte character before the mark?
|
||||
@ -166,7 +163,7 @@ fn get_str_from(rdr: string_reader, start: BytePos) -> ~str {
|
||||
}
|
||||
}
|
||||
|
||||
fn bump(rdr: string_reader) {
|
||||
pub fn bump(rdr: string_reader) {
|
||||
rdr.last_pos = rdr.pos;
|
||||
let current_byte_offset = byte_offset(rdr).to_uint();;
|
||||
if current_byte_offset < (*rdr.src).len() {
|
||||
@ -190,10 +187,10 @@ fn bump(rdr: string_reader) {
|
||||
rdr.curr = -1 as char;
|
||||
}
|
||||
}
|
||||
fn is_eof(rdr: string_reader) -> bool {
|
||||
pub fn is_eof(rdr: string_reader) -> bool {
|
||||
rdr.curr == -1 as char
|
||||
}
|
||||
fn nextch(rdr: string_reader) -> char {
|
||||
pub fn nextch(rdr: string_reader) -> char {
|
||||
let offset = byte_offset(rdr).to_uint();
|
||||
if offset < (*rdr.src).len() {
|
||||
return str::char_at(*rdr.src, offset);
|
||||
@ -211,7 +208,7 @@ fn hex_digit_val(c: char) -> int {
|
||||
|
||||
fn bin_digit_value(c: char) -> int { if c == '0' { return 0; } return 1; }
|
||||
|
||||
fn is_whitespace(c: char) -> bool {
|
||||
pub fn is_whitespace(c: char) -> bool {
|
||||
return c == ' ' || c == '\t' || c == '\r' || c == '\n';
|
||||
}
|
||||
|
||||
|
@ -27,32 +27,22 @@ use core::path::Path;
|
||||
use core::result::{Err, Ok, Result};
|
||||
use core::result;
|
||||
|
||||
#[legacy_exports]
|
||||
pub mod lexer;
|
||||
#[legacy_exports]
|
||||
pub mod parser;
|
||||
#[legacy_exports]
|
||||
pub mod token;
|
||||
#[legacy_exports]
|
||||
pub mod comments;
|
||||
#[legacy_exports]
|
||||
pub mod attr;
|
||||
#[legacy_exports]
|
||||
|
||||
/// Common routines shared by parser mods
|
||||
#[legacy_exports]
|
||||
pub mod common;
|
||||
|
||||
/// Functions dealing with operator precedence
|
||||
#[legacy_exports]
|
||||
pub mod prec;
|
||||
|
||||
/// Routines the parser uses to classify AST nodes
|
||||
#[legacy_exports]
|
||||
pub mod classify;
|
||||
|
||||
/// Reporting obsolete syntax
|
||||
#[legacy_exports]
|
||||
pub mod obsolete;
|
||||
|
||||
pub type parse_sess = @{
|
||||
|
@ -50,14 +50,14 @@ pub enum ObsoleteSyntax {
|
||||
ObsoleteUnenforcedBound
|
||||
}
|
||||
|
||||
impl ObsoleteSyntax: to_bytes::IterBytes {
|
||||
pub impl ObsoleteSyntax: to_bytes::IterBytes {
|
||||
#[inline(always)]
|
||||
pure fn iter_bytes(&self, +lsb0: bool, f: to_bytes::Cb) {
|
||||
(*self as uint).iter_bytes(lsb0, f);
|
||||
}
|
||||
}
|
||||
|
||||
impl Parser {
|
||||
pub impl Parser {
|
||||
/// Reports an obsolete syntax non-fatal error.
|
||||
fn obsolete(sp: span, kind: ObsoleteSyntax) {
|
||||
let (kind_str, desc) = match kind {
|
||||
|
@ -88,10 +88,6 @@ use core::vec::push;
|
||||
use core::vec;
|
||||
use std::map::HashMap;
|
||||
|
||||
export Parser;
|
||||
|
||||
export item_or_view_item, iovi_none, iovi_view_item, iovi_item;
|
||||
|
||||
enum restriction {
|
||||
UNRESTRICTED,
|
||||
RESTRICT_STMT_EXPR,
|
||||
@ -108,7 +104,7 @@ enum class_contents { dtor_decl(blk, ~[attribute], codemap::span),
|
||||
type arg_or_capture_item = Either<arg, capture_item>;
|
||||
type item_info = (ident, item_, Option<~[attribute]>);
|
||||
|
||||
enum item_or_view_item {
|
||||
pub enum item_or_view_item {
|
||||
iovi_none,
|
||||
iovi_item(@item),
|
||||
iovi_foreign_item(@foreign_item),
|
||||
@ -180,8 +176,9 @@ pure fn maybe_append(+lhs: ~[attribute], rhs: Option<~[attribute]>)
|
||||
|
||||
/* ident is handled by common.rs */
|
||||
|
||||
fn Parser(sess: parse_sess, cfg: ast::crate_cfg,
|
||||
+rdr: reader) -> Parser {
|
||||
pub fn Parser(sess: parse_sess,
|
||||
cfg: ast::crate_cfg,
|
||||
+rdr: reader) -> Parser {
|
||||
|
||||
let tok0 = rdr.next_token();
|
||||
let span0 = tok0.sp;
|
||||
@ -209,7 +206,7 @@ fn Parser(sess: parse_sess, cfg: ast::crate_cfg,
|
||||
}
|
||||
}
|
||||
|
||||
struct Parser {
|
||||
pub struct Parser {
|
||||
sess: parse_sess,
|
||||
cfg: crate_cfg,
|
||||
mut token: token::Token,
|
||||
@ -235,7 +232,7 @@ struct Parser {
|
||||
drop {} /* do not copy the parser; its state is tied to outside state */
|
||||
}
|
||||
|
||||
impl Parser {
|
||||
pub impl Parser {
|
||||
fn bump() {
|
||||
self.last_span = self.span;
|
||||
let next = if self.buffer_start == self.buffer_end {
|
||||
@ -813,7 +810,7 @@ impl Parser {
|
||||
self.bump();
|
||||
self.lit_from_token(tok)
|
||||
};
|
||||
spanned { node: lit, span: mk_sp(lo, self.last_span.hi) }
|
||||
ast::spanned { node: lit, span: mk_sp(lo, self.last_span.hi) }
|
||||
}
|
||||
|
||||
fn parse_path_without_tps() -> @path {
|
||||
@ -888,7 +885,7 @@ impl Parser {
|
||||
self.parse_seq_lt_gt(Some(token::COMMA),
|
||||
|p| p.parse_ty(false))
|
||||
} else {
|
||||
spanned {node: ~[], span: path.span}
|
||||
ast::spanned {node: ~[], span: path.span}
|
||||
}
|
||||
};
|
||||
|
||||
@ -930,15 +927,15 @@ impl Parser {
|
||||
@expr {
|
||||
id: self.get_id(),
|
||||
callee_id: self.get_id(),
|
||||
node: expr_mac(spanned {node: m, span: mk_sp(lo, hi)}),
|
||||
node: expr_mac(ast::spanned {node: m, span: mk_sp(lo, hi)}),
|
||||
span: mk_sp(lo, hi),
|
||||
}
|
||||
}
|
||||
|
||||
fn mk_lit_u32(i: u32) -> @expr {
|
||||
let span = self.span;
|
||||
let lv_lit = @spanned { node: lit_uint(i as u64, ty_u32),
|
||||
span: span };
|
||||
let lv_lit = @ast::spanned { node: lit_uint(i as u64, ty_u32),
|
||||
span: span };
|
||||
|
||||
@expr {
|
||||
id: self.get_id(),
|
||||
@ -1418,7 +1415,9 @@ impl Parser {
|
||||
hi = e.span.hi;
|
||||
// HACK: turn &[...] into a &-evec
|
||||
ex = match e.node {
|
||||
expr_vec(*) | expr_lit(@spanned {node: lit_str(_), span: _})
|
||||
expr_vec(*) | expr_lit(@ast::spanned {
|
||||
node: lit_str(_), span: _
|
||||
})
|
||||
if m == m_imm => {
|
||||
expr_vstore(e, expr_vstore_slice)
|
||||
}
|
||||
@ -1441,7 +1440,8 @@ impl Parser {
|
||||
expr_vec(*) if m == m_mutbl =>
|
||||
expr_vstore(e, expr_vstore_mut_box),
|
||||
expr_vec(*) if m == m_imm => expr_vstore(e, expr_vstore_box),
|
||||
expr_lit(@spanned {node: lit_str(_), span: _}) if m == m_imm =>
|
||||
expr_lit(@ast::spanned {
|
||||
node: lit_str(_), span: _}) if m == m_imm =>
|
||||
expr_vstore(e, expr_vstore_box),
|
||||
_ => expr_unary(box(m), e)
|
||||
};
|
||||
@ -1453,7 +1453,8 @@ impl Parser {
|
||||
hi = e.span.hi;
|
||||
// HACK: turn ~[...] into a ~-evec
|
||||
ex = match e.node {
|
||||
expr_vec(*) | expr_lit(@spanned {node: lit_str(_), span: _})
|
||||
expr_vec(*) | expr_lit(@ast::spanned {
|
||||
node: lit_str(_), span: _})
|
||||
if m == m_imm => expr_vstore(e, expr_vstore_uniq),
|
||||
_ => expr_unary(uniq(m), e)
|
||||
};
|
||||
@ -1808,7 +1809,7 @@ impl Parser {
|
||||
self.eat(token::COMMA);
|
||||
}
|
||||
|
||||
let blk = spanned {
|
||||
let blk = ast::spanned {
|
||||
node: ast::blk_ {
|
||||
view_items: ~[],
|
||||
stmts: ~[],
|
||||
@ -1957,7 +1958,7 @@ impl Parser {
|
||||
// HACK: parse @"..." as a literal of a vstore @str
|
||||
pat = match sub.node {
|
||||
pat_lit(e@@expr {
|
||||
node: expr_lit(@spanned {node: lit_str(_), span: _}), _
|
||||
node: expr_lit(@ast::spanned {node: lit_str(_), span: _}), _
|
||||
}) => {
|
||||
let vst = @expr {
|
||||
id: self.get_id(),
|
||||
@ -1977,7 +1978,7 @@ impl Parser {
|
||||
// HACK: parse ~"..." as a literal of a vstore ~str
|
||||
pat = match sub.node {
|
||||
pat_lit(e@@expr {
|
||||
node: expr_lit(@spanned {node: lit_str(_), span: _}), _
|
||||
node: expr_lit(@ast::spanned {node: lit_str(_), span: _}), _
|
||||
}) => {
|
||||
let vst = @expr {
|
||||
id: self.get_id(),
|
||||
@ -1999,7 +2000,8 @@ impl Parser {
|
||||
// HACK: parse &"..." as a literal of a borrowed str
|
||||
pat = match sub.node {
|
||||
pat_lit(e@@expr {
|
||||
node: expr_lit(@spanned {node: lit_str(_), span: _}), _
|
||||
node: expr_lit(@ast::spanned {
|
||||
node: lit_str(_), span: _}), _
|
||||
}) => {
|
||||
let vst = @expr {
|
||||
id: self.get_id(),
|
||||
@ -2024,7 +2026,7 @@ impl Parser {
|
||||
if self.token == token::RPAREN {
|
||||
hi = self.span.hi;
|
||||
self.bump();
|
||||
let lit = @spanned {node: lit_nil, span: mk_sp(lo, hi)};
|
||||
let lit = @ast::spanned {node: lit_nil, span: mk_sp(lo, hi)};
|
||||
let expr = self.mk_expr(lo, hi, expr_lit(lit));
|
||||
pat = pat_lit(expr);
|
||||
} else {
|
||||
@ -2400,7 +2402,7 @@ impl Parser {
|
||||
match self.token {
|
||||
token::SEMI => {
|
||||
self.bump();
|
||||
stmts.push(@spanned {
|
||||
stmts.push(@ast::spanned {
|
||||
node: stmt_semi(e, stmt_id),
|
||||
.. *stmt});
|
||||
}
|
||||
@ -2425,7 +2427,7 @@ impl Parser {
|
||||
match self.token {
|
||||
token::SEMI => {
|
||||
self.bump();
|
||||
stmts.push(@spanned {
|
||||
stmts.push(@ast::spanned {
|
||||
node: stmt_mac((*m), true),
|
||||
.. *stmt});
|
||||
}
|
||||
@ -2967,10 +2969,10 @@ impl Parser {
|
||||
|
||||
let actual_dtor = do the_dtor.map |dtor| {
|
||||
let (d_body, d_attrs, d_s) = *dtor;
|
||||
spanned { node: ast::struct_dtor_ { id: self.get_id(),
|
||||
attrs: d_attrs,
|
||||
self_id: self.get_id(),
|
||||
body: d_body},
|
||||
ast::spanned { node: ast::struct_dtor_ { id: self.get_id(),
|
||||
attrs: d_attrs,
|
||||
self_id: self.get_id(),
|
||||
body: d_body},
|
||||
span: d_s}};
|
||||
let _ = self.get_id(); // XXX: Workaround for crazy bug.
|
||||
let new_id = self.get_id();
|
||||
@ -3472,10 +3474,10 @@ impl Parser {
|
||||
self.bump();
|
||||
let mut actual_dtor = do the_dtor.map |dtor| {
|
||||
let (d_body, d_attrs, d_s) = *dtor;
|
||||
spanned { node: ast::struct_dtor_ { id: self.get_id(),
|
||||
attrs: d_attrs,
|
||||
self_id: self.get_id(),
|
||||
body: d_body },
|
||||
ast::spanned { node: ast::struct_dtor_ { id: self.get_id(),
|
||||
attrs: d_attrs,
|
||||
self_id: self.get_id(),
|
||||
body: d_body },
|
||||
span: d_s }
|
||||
};
|
||||
|
||||
@ -3773,9 +3775,9 @@ impl Parser {
|
||||
_ => self.fatal(~"expected open delimiter")
|
||||
};
|
||||
let m = ast::mac_invoc_tt(pth, tts);
|
||||
let m: ast::mac = spanned { node: m,
|
||||
span: mk_sp(self.span.lo,
|
||||
self.span.hi) };
|
||||
let m: ast::mac = ast::spanned { node: m,
|
||||
span: mk_sp(self.span.lo,
|
||||
self.span.hi) };
|
||||
let item_ = item_mac(m);
|
||||
return iovi_item(self.mk_item(lo, self.last_span.hi, id, item_,
|
||||
visibility, attrs));
|
||||
|
@ -8,10 +8,6 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
export as_prec;
|
||||
export unop_prec;
|
||||
export token_to_binop;
|
||||
|
||||
use ast::*;
|
||||
use parse::token::*;
|
||||
use parse::token::Token;
|
||||
@ -19,19 +15,19 @@ use parse::token::Token;
|
||||
use core::prelude::*;
|
||||
|
||||
/// Unary operators have higher precedence than binary
|
||||
const unop_prec: uint = 100u;
|
||||
pub const unop_prec: uint = 100u;
|
||||
|
||||
/**
|
||||
* Precedence of the `as` operator, which is a binary operator
|
||||
* but is not represented in the precedence table.
|
||||
*/
|
||||
const as_prec: uint = 11u;
|
||||
pub const as_prec: uint = 11u;
|
||||
|
||||
/**
|
||||
* Maps a token to a record specifying the corresponding binary
|
||||
* operator and its precedence
|
||||
*/
|
||||
fn token_to_binop(tok: Token) -> Option<ast::binop> {
|
||||
pub fn token_to_binop(tok: Token) -> Option<ast::binop> {
|
||||
match tok {
|
||||
BINOP(STAR) => Some(mul),
|
||||
BINOP(SLASH) => Some(div),
|
||||
|
@ -25,7 +25,7 @@ use std::map::HashMap;
|
||||
|
||||
#[auto_encode]
|
||||
#[auto_decode]
|
||||
enum binop {
|
||||
pub enum binop {
|
||||
PLUS,
|
||||
MINUS,
|
||||
STAR,
|
||||
@ -40,7 +40,7 @@ enum binop {
|
||||
|
||||
#[auto_encode]
|
||||
#[auto_decode]
|
||||
enum Token {
|
||||
pub enum Token {
|
||||
/* Expression-operator symbols. */
|
||||
EQ,
|
||||
LT,
|
||||
@ -99,7 +99,7 @@ enum Token {
|
||||
#[auto_encode]
|
||||
#[auto_decode]
|
||||
/// For interpolation during macro expansion.
|
||||
enum nonterminal {
|
||||
pub enum nonterminal {
|
||||
nt_item(@ast::item),
|
||||
nt_block(ast::blk),
|
||||
nt_stmt(@ast::stmt),
|
||||
@ -112,7 +112,7 @@ enum nonterminal {
|
||||
nt_matchers(~[ast::matcher])
|
||||
}
|
||||
|
||||
fn binop_to_str(o: binop) -> ~str {
|
||||
pub fn binop_to_str(o: binop) -> ~str {
|
||||
match o {
|
||||
PLUS => ~"+",
|
||||
MINUS => ~"-",
|
||||
@ -127,7 +127,7 @@ fn binop_to_str(o: binop) -> ~str {
|
||||
}
|
||||
}
|
||||
|
||||
fn to_str(in: @ident_interner, t: Token) -> ~str {
|
||||
pub fn to_str(in: @ident_interner, t: Token) -> ~str {
|
||||
match t {
|
||||
EQ => ~"=",
|
||||
LT => ~"<",
|
||||
@ -222,7 +222,7 @@ fn to_str(in: @ident_interner, t: Token) -> ~str {
|
||||
}
|
||||
}
|
||||
|
||||
pure fn can_begin_expr(t: Token) -> bool {
|
||||
pub pure fn can_begin_expr(t: Token) -> bool {
|
||||
match t {
|
||||
LPAREN => true,
|
||||
LBRACE => true,
|
||||
@ -254,7 +254,7 @@ pure fn can_begin_expr(t: Token) -> bool {
|
||||
}
|
||||
|
||||
/// what's the opposite delimiter?
|
||||
fn flip_delimiter(t: token::Token) -> token::Token {
|
||||
pub fn flip_delimiter(t: token::Token) -> token::Token {
|
||||
match t {
|
||||
token::LPAREN => token::RPAREN,
|
||||
token::LBRACE => token::RBRACE,
|
||||
@ -268,7 +268,7 @@ fn flip_delimiter(t: token::Token) -> token::Token {
|
||||
|
||||
|
||||
|
||||
fn is_lit(t: Token) -> bool {
|
||||
pub fn is_lit(t: Token) -> bool {
|
||||
match t {
|
||||
LIT_INT(_, _) => true,
|
||||
LIT_UINT(_, _) => true,
|
||||
@ -280,79 +280,80 @@ fn is_lit(t: Token) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
pure fn is_ident(t: Token) -> bool {
|
||||
pub pure fn is_ident(t: Token) -> bool {
|
||||
match t { IDENT(_, _) => true, _ => false }
|
||||
}
|
||||
|
||||
pure fn is_ident_or_path(t: Token) -> bool {
|
||||
pub pure fn is_ident_or_path(t: Token) -> bool {
|
||||
match t {
|
||||
IDENT(_, _) | INTERPOLATED(nt_path(*)) => true,
|
||||
_ => false
|
||||
}
|
||||
}
|
||||
|
||||
pure fn is_plain_ident(t: Token) -> bool {
|
||||
pub pure fn is_plain_ident(t: Token) -> bool {
|
||||
match t { IDENT(_, false) => true, _ => false }
|
||||
}
|
||||
|
||||
pure fn is_bar(t: Token) -> bool {
|
||||
pub pure fn is_bar(t: Token) -> bool {
|
||||
match t { BINOP(OR) | OROR => true, _ => false }
|
||||
}
|
||||
|
||||
|
||||
mod special_idents {
|
||||
#[legacy_exports];
|
||||
pub mod special_idents {
|
||||
use ast::ident;
|
||||
const underscore : ident = ident { repr: 0u };
|
||||
const anon : ident = ident { repr: 1u };
|
||||
const dtor : ident = ident { repr: 2u }; // 'drop', but that's reserved
|
||||
const invalid : ident = ident { repr: 3u }; // ''
|
||||
const unary : ident = ident { repr: 4u };
|
||||
const not_fn : ident = ident { repr: 5u };
|
||||
const idx_fn : ident = ident { repr: 6u };
|
||||
const unary_minus_fn : ident = ident { repr: 7u };
|
||||
const clownshoes_extensions : ident = ident { repr: 8u };
|
||||
|
||||
const self_ : ident = ident { repr: 9u }; // 'self'
|
||||
pub const underscore : ident = ident { repr: 0u };
|
||||
pub const anon : ident = ident { repr: 1u };
|
||||
pub const dtor : ident = ident { repr: 2u }; // 'drop', but that's
|
||||
// reserved
|
||||
pub const invalid : ident = ident { repr: 3u }; // ''
|
||||
pub const unary : ident = ident { repr: 4u };
|
||||
pub const not_fn : ident = ident { repr: 5u };
|
||||
pub const idx_fn : ident = ident { repr: 6u };
|
||||
pub const unary_minus_fn : ident = ident { repr: 7u };
|
||||
pub const clownshoes_extensions : ident = ident { repr: 8u };
|
||||
|
||||
pub const self_ : ident = ident { repr: 9u }; // 'self'
|
||||
|
||||
/* for matcher NTs */
|
||||
const item : ident = ident { repr: 10u };
|
||||
const block : ident = ident { repr: 11u };
|
||||
const stmt : ident = ident { repr: 12u };
|
||||
const pat : ident = ident { repr: 13u };
|
||||
const expr : ident = ident { repr: 14u };
|
||||
const ty : ident = ident { repr: 15u };
|
||||
const ident : ident = ident { repr: 16u };
|
||||
const path : ident = ident { repr: 17u };
|
||||
const tt : ident = ident { repr: 18u };
|
||||
const matchers : ident = ident { repr: 19u };
|
||||
pub const item : ident = ident { repr: 10u };
|
||||
pub const block : ident = ident { repr: 11u };
|
||||
pub const stmt : ident = ident { repr: 12u };
|
||||
pub const pat : ident = ident { repr: 13u };
|
||||
pub const expr : ident = ident { repr: 14u };
|
||||
pub const ty : ident = ident { repr: 15u };
|
||||
pub const ident : ident = ident { repr: 16u };
|
||||
pub const path : ident = ident { repr: 17u };
|
||||
pub const tt : ident = ident { repr: 18u };
|
||||
pub const matchers : ident = ident { repr: 19u };
|
||||
|
||||
const str : ident = ident { repr: 20u }; // for the type
|
||||
pub const str : ident = ident { repr: 20u }; // for the type
|
||||
|
||||
/* outside of libsyntax */
|
||||
const ty_visitor : ident = ident { repr: 21u };
|
||||
const arg : ident = ident { repr: 22u };
|
||||
const descrim : ident = ident { repr: 23u };
|
||||
const clownshoe_abi : ident = ident { repr: 24u };
|
||||
const clownshoe_stack_shim : ident = ident { repr: 25u };
|
||||
const tydesc : ident = ident { repr: 26u };
|
||||
const literally_dtor : ident = ident { repr: 27u };
|
||||
const main : ident = ident { repr: 28u };
|
||||
const opaque : ident = ident { repr: 29u };
|
||||
const blk : ident = ident { repr: 30u };
|
||||
const static : ident = ident { repr: 31u };
|
||||
const intrinsic : ident = ident { repr: 32u };
|
||||
const clownshoes_foreign_mod: ident = ident { repr: 33 };
|
||||
const unnamed_field: ident = ident { repr: 34 };
|
||||
const c_abi: ident = ident { repr: 35 };
|
||||
const type_self: ident = ident { repr: 36 }; // `Self`
|
||||
pub const ty_visitor : ident = ident { repr: 21u };
|
||||
pub const arg : ident = ident { repr: 22u };
|
||||
pub const descrim : ident = ident { repr: 23u };
|
||||
pub const clownshoe_abi : ident = ident { repr: 24u };
|
||||
pub const clownshoe_stack_shim : ident = ident { repr: 25u };
|
||||
pub const tydesc : ident = ident { repr: 26u };
|
||||
pub const literally_dtor : ident = ident { repr: 27u };
|
||||
pub const main : ident = ident { repr: 28u };
|
||||
pub const opaque : ident = ident { repr: 29u };
|
||||
pub const blk : ident = ident { repr: 30u };
|
||||
pub const static : ident = ident { repr: 31u };
|
||||
pub const intrinsic : ident = ident { repr: 32u };
|
||||
pub const clownshoes_foreign_mod: ident = ident { repr: 33 };
|
||||
pub const unnamed_field: ident = ident { repr: 34 };
|
||||
pub const c_abi: ident = ident { repr: 35 };
|
||||
pub const type_self: ident = ident { repr: 36 }; // `Self`
|
||||
}
|
||||
|
||||
struct ident_interner {
|
||||
pub struct ident_interner {
|
||||
priv interner: Interner<@~str>,
|
||||
}
|
||||
|
||||
impl ident_interner {
|
||||
pub impl ident_interner {
|
||||
fn intern(val: @~str) -> ast::ident {
|
||||
ast::ident { repr: self.interner.intern(val) }
|
||||
}
|
||||
@ -377,7 +378,7 @@ macro_rules! interner_key (
|
||||
(-3 as uint, 0u)))
|
||||
)
|
||||
|
||||
fn mk_ident_interner() -> @ident_interner {
|
||||
pub fn mk_ident_interner() -> @ident_interner {
|
||||
unsafe {
|
||||
match task::local_data::local_data_get(interner_key!()) {
|
||||
Some(interner) => *interner,
|
||||
@ -438,7 +439,7 @@ fn mk_ident_interner() -> @ident_interner {
|
||||
|
||||
/* for when we don't care about the contents; doesn't interact with TLD or
|
||||
serialization */
|
||||
fn mk_fake_ident_interner() -> @ident_interner {
|
||||
pub fn mk_fake_ident_interner() -> @ident_interner {
|
||||
@ident_interner { interner: interner::mk::<@~str>() }
|
||||
}
|
||||
|
||||
@ -451,7 +452,7 @@ fn mk_fake_ident_interner() -> @ident_interner {
|
||||
* appear as identifiers at all. Reserved keywords are not used anywhere in
|
||||
* the language and may not appear as identifiers.
|
||||
*/
|
||||
fn keyword_table() -> HashMap<~str, ()> {
|
||||
pub fn keyword_table() -> HashMap<~str, ()> {
|
||||
let keywords = HashMap();
|
||||
for temporary_keyword_table().each_key |word| {
|
||||
keywords.insert(word, ());
|
||||
@ -466,7 +467,7 @@ fn keyword_table() -> HashMap<~str, ()> {
|
||||
}
|
||||
|
||||
/// Keywords that may be used as identifiers
|
||||
fn temporary_keyword_table() -> HashMap<~str, ()> {
|
||||
pub fn temporary_keyword_table() -> HashMap<~str, ()> {
|
||||
let words = HashMap();
|
||||
let keys = ~[
|
||||
~"self", ~"static",
|
||||
@ -478,7 +479,7 @@ fn temporary_keyword_table() -> HashMap<~str, ()> {
|
||||
}
|
||||
|
||||
/// Full keywords. May not appear anywhere else.
|
||||
fn strict_keyword_table() -> HashMap<~str, ()> {
|
||||
pub fn strict_keyword_table() -> HashMap<~str, ()> {
|
||||
let words = HashMap();
|
||||
let keys = ~[
|
||||
~"as", ~"assert",
|
||||
@ -504,7 +505,7 @@ fn strict_keyword_table() -> HashMap<~str, ()> {
|
||||
words
|
||||
}
|
||||
|
||||
fn reserved_keyword_table() -> HashMap<~str, ()> {
|
||||
pub fn reserved_keyword_table() -> HashMap<~str, ()> {
|
||||
let words = HashMap();
|
||||
let keys = ~[
|
||||
~"be"
|
||||
|
@ -593,7 +593,7 @@ fn print_item(s: ps, &&item: @ast::item) {
|
||||
|
||||
fn print_enum_def(s: ps, enum_definition: ast::enum_def,
|
||||
params: ~[ast::ty_param], ident: ast::ident,
|
||||
span: ast::span, visibility: ast::visibility) {
|
||||
span: codemap::span, visibility: ast::visibility) {
|
||||
let mut newtype =
|
||||
vec::len(enum_definition.variants) == 1u &&
|
||||
ident == enum_definition.variants[0].node.name;
|
||||
@ -626,7 +626,7 @@ fn print_enum_def(s: ps, enum_definition: ast::enum_def,
|
||||
}
|
||||
}
|
||||
|
||||
fn print_variants(s: ps, variants: ~[ast::variant], span: ast::span) {
|
||||
fn print_variants(s: ps, variants: ~[ast::variant], span: codemap::span) {
|
||||
bopen(s);
|
||||
for variants.each |v| {
|
||||
space_if_not_bol(s);
|
||||
@ -666,7 +666,7 @@ fn print_visibility(s: ps, vis: ast::visibility) {
|
||||
}
|
||||
|
||||
fn print_struct(s: ps, struct_def: @ast::struct_def, tps: ~[ast::ty_param],
|
||||
ident: ast::ident, span: ast::span) {
|
||||
ident: ast::ident, span: codemap::span) {
|
||||
print_ident(s, ident);
|
||||
nbsp(s);
|
||||
print_type_params(s, tps);
|
||||
|
@ -38,17 +38,11 @@ pub mod syntax {
|
||||
pub use parse;
|
||||
}
|
||||
|
||||
#[legacy_exports]
|
||||
mod attr;
|
||||
#[legacy_exports]
|
||||
mod diagnostic;
|
||||
#[legacy_records]
|
||||
mod codemap;
|
||||
#[legacy_exports]
|
||||
mod ast;
|
||||
#[legacy_exports]
|
||||
mod ast_util;
|
||||
#[legacy_exports]
|
||||
mod ast_map;
|
||||
#[legacy_exports]
|
||||
mod visit;
|
||||
|
Loading…
x
Reference in New Issue
Block a user