Change module dereference syntax from . to ::
This will need to be a snapshot.
This commit is contained in:
parent
dd9b6dccb4
commit
6510f1ce7c
@ -312,7 +312,7 @@ fn resolve_path(vec[ast.ident] path, vec[u8] data) -> resolve_result {
|
||||
fn eq_item(vec[u8] data, str s) -> bool {
|
||||
ret Str.eq(Str.unsafe_from_bytes(data), s);
|
||||
}
|
||||
auto s = Str.connect(path, ".");
|
||||
auto s = Str.connect(path, "::");
|
||||
auto md = EBML.new_doc(data);
|
||||
auto paths = EBML.get_doc(md, metadata.tag_paths);
|
||||
auto eqer = bind eq_item(_, s);
|
||||
|
@ -634,7 +634,6 @@ fn next_token(reader rdr) -> token.token {
|
||||
|
||||
alt (c) {
|
||||
// One-byte tokens.
|
||||
case (':') { rdr.bump(); ret token.COLON; }
|
||||
case ('?') { rdr.bump(); ret token.QUES; }
|
||||
case (';') { rdr.bump(); ret token.SEMI; }
|
||||
case (',') { rdr.bump(); ret token.COMMA; }
|
||||
@ -648,7 +647,16 @@ fn next_token(reader rdr) -> token.token {
|
||||
case ('@') { rdr.bump(); ret token.AT; }
|
||||
case ('#') { rdr.bump(); ret token.POUND; }
|
||||
case ('~') { rdr.bump(); ret token.TILDE; }
|
||||
|
||||
case (':') {
|
||||
rdr.bump();
|
||||
if (rdr.curr() == ':') {
|
||||
rdr.bump();
|
||||
ret token.MOD_SEP;
|
||||
}
|
||||
else {
|
||||
ret token.COLON;
|
||||
};
|
||||
}
|
||||
|
||||
// Multi-byte tokens.
|
||||
case ('=') {
|
||||
|
@ -322,7 +322,7 @@ fn parse_constr_arg(parser p) -> @ast.constr_arg {
|
||||
|
||||
fn parse_ty_constr(parser p) -> @ast.constr {
|
||||
auto lo = p.get_lo_pos();
|
||||
auto path = parse_path(p, GREEDY);
|
||||
auto path = parse_path(p);
|
||||
auto pf = parse_constr_arg;
|
||||
auto args = parse_seq[@ast.constr_arg](token.LPAREN,
|
||||
token.RPAREN,
|
||||
@ -472,7 +472,7 @@ fn parse_ty(parser p) -> @ast.ty {
|
||||
}
|
||||
|
||||
case (token.IDENT(_)) {
|
||||
auto path = parse_path(p, GREEDY);
|
||||
auto path = parse_path(p);
|
||||
t = ast.ty_path(path, p.get_ann());
|
||||
hi = path.span.hi;
|
||||
}
|
||||
@ -603,11 +603,6 @@ fn is_ident(token.token t) -> bool {
|
||||
ret false;
|
||||
}
|
||||
|
||||
tag greed {
|
||||
GREEDY;
|
||||
MINIMAL;
|
||||
}
|
||||
|
||||
fn parse_ty_args(parser p, uint hi) ->
|
||||
util.common.spanned[vec[@ast.ty]] {
|
||||
|
||||
@ -623,33 +618,23 @@ fn parse_ty_args(parser p, uint hi) ->
|
||||
ret spanned(hi, hi, v);
|
||||
}
|
||||
|
||||
fn parse_path(parser p, greed g) -> ast.path {
|
||||
fn parse_path(parser p) -> ast.path {
|
||||
|
||||
auto lo = p.get_lo_pos();
|
||||
auto hi = lo;
|
||||
|
||||
let vec[ast.ident] ids = vec();
|
||||
let bool more = true;
|
||||
while (more) {
|
||||
while (true) {
|
||||
alt (p.peek()) {
|
||||
case (token.IDENT(?i)) {
|
||||
hi = p.get_hi_pos();
|
||||
ids += vec(p.get_str(i));
|
||||
p.bump();
|
||||
if (p.peek() == token.DOT) {
|
||||
if (g == GREEDY) {
|
||||
p.bump();
|
||||
assert (is_ident(p.peek()));
|
||||
} else {
|
||||
more = false;
|
||||
}
|
||||
} else {
|
||||
more = false;
|
||||
}
|
||||
}
|
||||
case (_) {
|
||||
more = false;
|
||||
if (p.peek() == token.MOD_SEP) {
|
||||
p.bump();
|
||||
} else { break; }
|
||||
}
|
||||
case (_) { break; }
|
||||
}
|
||||
}
|
||||
|
||||
@ -690,7 +675,7 @@ fn parse_bottom_expr(parser p) -> @ast.expr {
|
||||
alt (p.peek()) {
|
||||
|
||||
case (token.IDENT(_)) {
|
||||
auto pth = parse_path(p, MINIMAL);
|
||||
auto pth = parse_path(p);
|
||||
hi = pth.span.hi;
|
||||
ex = ast.expr_path(pth, p.get_ann());
|
||||
}
|
||||
@ -804,7 +789,7 @@ fn parse_bottom_expr(parser p) -> @ast.expr {
|
||||
|
||||
case (token.POUND) {
|
||||
p.bump();
|
||||
auto pth = parse_path(p, GREEDY);
|
||||
auto pth = parse_path(p);
|
||||
auto pf = parse_expr;
|
||||
auto es = parse_seq[@ast.expr](token.LPAREN,
|
||||
token.RPAREN,
|
||||
@ -980,31 +965,6 @@ fn expand_syntax_ext(parser p, ast.span sp,
|
||||
}
|
||||
}
|
||||
|
||||
fn extend_expr_by_ident(parser p, uint lo, uint hi,
|
||||
@ast.expr e, ast.ident i) -> @ast.expr {
|
||||
auto e_ = e.node;
|
||||
alt (e.node) {
|
||||
case (ast.expr_path(?pth, ?ann)) {
|
||||
if (Vec.len[@ast.ty](pth.node.types) == 0u) {
|
||||
auto idents_ = pth.node.idents;
|
||||
idents_ += vec(i);
|
||||
auto tys = parse_ty_args(p, hi);
|
||||
auto pth_ = spanned(pth.span.lo, tys.span.hi,
|
||||
rec(idents=idents_,
|
||||
types=tys.node));
|
||||
e_ = ast.expr_path(pth_, ann);
|
||||
ret @spanned(pth_.span.lo, pth_.span.hi, e_);
|
||||
} else {
|
||||
e_ = ast.expr_field(e, i, ann);
|
||||
}
|
||||
}
|
||||
case (_) {
|
||||
e_ = ast.expr_field(e, i, p.get_ann());
|
||||
}
|
||||
}
|
||||
ret @spanned(lo, hi, e_);
|
||||
}
|
||||
|
||||
fn parse_self_method(parser p) -> @ast.expr {
|
||||
auto sp = p.get_span();
|
||||
let ast.ident f_name = parse_ident(p);
|
||||
@ -1042,7 +1002,9 @@ fn parse_dot_or_call_expr(parser p) -> @ast.expr {
|
||||
case (token.IDENT(?i)) {
|
||||
hi = p.get_hi_pos();
|
||||
p.bump();
|
||||
e = extend_expr_by_ident(p, lo, hi, e, p.get_str(i));
|
||||
auto e_ = ast.expr_field(e, p.get_str(i),
|
||||
p.get_ann());
|
||||
e = @spanned(lo, hi, e_);
|
||||
}
|
||||
|
||||
case (token.LPAREN) {
|
||||
@ -1404,7 +1366,7 @@ fn parse_spawn_expr(parser p) -> @ast.expr {
|
||||
expect(p, token.SPAWN);
|
||||
|
||||
// FIXME: Parse domain and name
|
||||
|
||||
// FIXME: why no full expr?
|
||||
auto fn_expr = parse_bottom_expr(p);
|
||||
auto pf = parse_expr;
|
||||
auto es = parse_seq[@ast.expr](token.LPAREN,
|
||||
@ -1509,7 +1471,7 @@ fn parse_pat(parser p) -> @ast.pat {
|
||||
}
|
||||
}
|
||||
case (token.IDENT(_)) {
|
||||
auto tag_path = parse_path(p, GREEDY);
|
||||
auto tag_path = parse_path(p);
|
||||
hi = tag_path.span.hi;
|
||||
|
||||
let vec[@ast.pat] args;
|
||||
@ -2249,7 +2211,7 @@ fn parse_rest_import_name(parser p, ast.ident first,
|
||||
auto lo = p.get_lo_pos();
|
||||
let vec[ast.ident] identifiers = vec(first);
|
||||
while (p.peek() != token.SEMI) {
|
||||
expect(p, token.DOT);
|
||||
expect(p, token.MOD_SEP);
|
||||
auto i = parse_ident(p);
|
||||
identifiers += vec(i);
|
||||
}
|
||||
@ -2377,7 +2339,7 @@ fn parse_crate_directive(parser p) -> ast.crate_directive
|
||||
alt (p.peek()) {
|
||||
case (token.AUTH) {
|
||||
p.bump();
|
||||
auto n = parse_path(p, GREEDY);
|
||||
auto n = parse_path(p);
|
||||
expect(p, token.EQ);
|
||||
auto a = parse_auth(p);
|
||||
auto hi = p.get_hi_pos();
|
||||
|
@ -47,6 +47,7 @@ tag token {
|
||||
COMMA;
|
||||
SEMI;
|
||||
COLON;
|
||||
MOD_SEP;
|
||||
QUES;
|
||||
RARROW;
|
||||
SEND;
|
||||
@ -218,6 +219,7 @@ fn to_str(lexer.reader r, token t) -> str {
|
||||
case (COMMA) { ret ","; }
|
||||
case (SEMI) { ret ";"; }
|
||||
case (COLON) { ret ":"; }
|
||||
case (MOD_SEP) { ret "::"; }
|
||||
case (QUES) { ret "?"; }
|
||||
case (RARROW) { ret "->"; }
|
||||
case (SEND) { ret "<|"; }
|
||||
|
@ -301,7 +301,7 @@ fn add_to_index(&EBML.writer ebml_w,
|
||||
&mutable vec[tup(str, uint)] index,
|
||||
&str name) {
|
||||
auto full_path = path + vec(name);
|
||||
index += vec(tup(Str.connect(full_path, "."), ebml_w.writer.tell()));
|
||||
index += vec(tup(Str.connect(full_path, "::"), ebml_w.writer.tell()));
|
||||
}
|
||||
|
||||
fn encode_native_module_item_paths(&EBML.writer ebml_w,
|
||||
|
@ -170,6 +170,7 @@ fn resolve_imports(&env e) {
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME this should use walk (will need to add walk_arm)
|
||||
fn resolve_names(&@env e, &ast.crate c) -> @ast.crate {
|
||||
auto fld = @rec(fold_pat_tag = bind fold_pat_tag(e,_,_,_,_,_),
|
||||
fold_expr_path = bind fold_expr_path(e,_,_,_,_),
|
||||
@ -277,54 +278,11 @@ fn resolve_import(&env e, &@ast.view_item it, &list[scope] sc) {
|
||||
}
|
||||
}
|
||||
|
||||
// We received a path expression of the following form:
|
||||
//
|
||||
// a.b.c.d
|
||||
//
|
||||
// Somewhere along this path there might be a split from a path-expr
|
||||
// to a runtime field-expr. For example:
|
||||
//
|
||||
// 'a' could be the name of a variable in the local scope
|
||||
// and 'b.c.d' could be a field-sequence inside it.
|
||||
//
|
||||
// Or:
|
||||
//
|
||||
// 'a.b' could be a module path to a constant record, and 'c.d'
|
||||
// could be a field within it.
|
||||
//
|
||||
// Our job here is to figure out what the prefix of 'a.b.c.d' is that
|
||||
// corresponds to a static binding-name (a module or slot, with no type info)
|
||||
// and split that off as the 'primary' expr_path, with secondary expr_field
|
||||
// expressions tacked on the end.
|
||||
|
||||
fn fold_expr_path(@env e, &list[scope] sc, &span sp, &ast.path p, &ann a)
|
||||
-> @ast.expr {
|
||||
auto idents = p.node.idents;
|
||||
auto n_idents = Vec.len(idents);
|
||||
assert (n_idents != 0u);
|
||||
|
||||
auto dcur = lookup_in_scope_strict(*e, sc, sp, idents.(0), ns_value);
|
||||
auto i = 1u;
|
||||
while (i < n_idents) {
|
||||
if (!is_module(dcur)) { break; }
|
||||
dcur = lookup_in_mod_strict(*e, dcur, sp, idents.(i), ns_value,
|
||||
outside);
|
||||
i += 1u;
|
||||
}
|
||||
if (is_module(dcur)) {
|
||||
e.sess.span_err(sp, "can't refer to a module as a first-class value");
|
||||
}
|
||||
|
||||
p = rec(node=rec(idents=Vec.slice(idents, 0u, i) with p.node) with p);
|
||||
auto ex = @fold.respan(sp, ast.expr_path(p, a));
|
||||
e.def_map.insert(ast.ann_tag(a), dcur);
|
||||
// FIXME this duplicates the ann. Is that a problem? How will we deal with
|
||||
// splitting this into path and field exprs when we don't fold?
|
||||
while (i < n_idents) {
|
||||
ex = @fold.respan(sp, ast.expr_field(ex, idents.(i), a));
|
||||
i += 1u;
|
||||
}
|
||||
ret ex;
|
||||
auto df = lookup_path_strict(*e, sc, sp, p.node.idents, ns_value);
|
||||
e.def_map.insert(ast.ann_tag(a), df);
|
||||
ret @fold.respan(sp, ast.expr_path(p, a));
|
||||
}
|
||||
|
||||
|
||||
@ -337,7 +295,7 @@ fn fold_pat_tag(@env e, &list[scope] sc, &span sp, &ast.path p,
|
||||
}
|
||||
case (_) {
|
||||
e.sess.span_err(sp, "not a tag variant: " +
|
||||
Str.connect(p.node.idents, "."));
|
||||
Str.connect(p.node.idents, "::"));
|
||||
fail;
|
||||
}
|
||||
}
|
||||
@ -383,7 +341,7 @@ fn lookup_path_strict(&env e, &list[scope] sc, &span sp, vec[ident] idents,
|
||||
i += 1u;
|
||||
}
|
||||
if (is_module(dcur)) {
|
||||
e.sess.span_err(sp, Str.connect(idents, ".") +
|
||||
e.sess.span_err(sp, Str.connect(idents, "::") +
|
||||
" is a module, not a " + ns_name(ns));
|
||||
}
|
||||
ret dcur;
|
||||
|
@ -5439,7 +5439,7 @@ fn load_if_immediate(&@block_ctxt cx, ValueRef v, &ty.t t) -> ValueRef {
|
||||
|
||||
fn trans_log(int lvl, &@block_ctxt cx, &@ast.expr e) -> result {
|
||||
auto lcx = cx.fcx.lcx;
|
||||
auto modname = Str.connect(lcx.module_path, ".");
|
||||
auto modname = Str.connect(lcx.module_path, "::");
|
||||
auto global;
|
||||
if (lcx.ccx.module_data.contains_key(modname)) {
|
||||
global = lcx.ccx.module_data.get(modname);
|
||||
|
@ -484,7 +484,7 @@ fn cname(&ctxt cx, &t typ) -> Option.t[str] { ret cx.ts.others.(typ).cname; }
|
||||
// Stringification
|
||||
|
||||
fn path_to_str(&ast.path pth) -> str {
|
||||
auto result = Str.connect(pth.node.idents, ".");
|
||||
auto result = Str.connect(pth.node.idents, "::");
|
||||
if (Vec.len[@ast.ty](pth.node.types) > 0u) {
|
||||
auto f = pretty.pprust.ty_to_str;
|
||||
result += "[";
|
||||
|
@ -751,7 +751,7 @@ fn print_path(ps s, ast.path path) {
|
||||
auto first = true;
|
||||
for (str id in path.node.idents) {
|
||||
if (first) {first = false;}
|
||||
else {wrd(s.s, ".");}
|
||||
else {wrd(s.s, "::");}
|
||||
wrd(s.s, id);
|
||||
}
|
||||
if (Vec.len[@ast.ty](path.node.types) > 0u) {
|
||||
@ -856,7 +856,7 @@ fn print_view_item(ps s, @ast.view_item item) {
|
||||
auto first = true;
|
||||
for (str elt in ids) {
|
||||
if (first) {first = false;}
|
||||
else {wrd(s.s, ".");}
|
||||
else {wrd(s.s, ":");}
|
||||
wrd(s.s, elt);
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user