rustc: Number everything with an annotation
This commit is contained in:
parent
4b5b96c511
commit
10c9266525
@ -22,7 +22,7 @@ type ty_param = ident;
|
||||
|
||||
// Annotations added during successive passes.
|
||||
tag ann {
|
||||
ann_none;
|
||||
ann_none(uint);
|
||||
ann_type(middle.ty.t,
|
||||
Option.t[vec[middle.ty.t]], /* ty param substs */
|
||||
Option.t[@ts_ann]); /* pre- and postcondition for typestate */
|
||||
|
@ -29,10 +29,10 @@ fn expand_syntax_ext(parser.parser p,
|
||||
auto var = expr_to_str(p, args.(0));
|
||||
alt (GenericOS.getenv(var)) {
|
||||
case (Option.none[str]) {
|
||||
ret make_new_str(sp, "");
|
||||
ret make_new_str(p, sp, "");
|
||||
}
|
||||
case (Option.some[str](?s)) {
|
||||
ret make_new_str(sp, s);
|
||||
ret make_new_str(p, sp, s);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -54,15 +54,15 @@ fn expr_to_str(parser.parser p,
|
||||
fail;
|
||||
}
|
||||
|
||||
fn make_new_lit(common.span sp, ast.lit_ lit) -> @ast.expr {
|
||||
fn make_new_lit(parser.parser p, common.span sp, ast.lit_ lit) -> @ast.expr {
|
||||
auto sp_lit = @rec(node=lit, span=sp);
|
||||
auto expr = ast.expr_lit(sp_lit, ast.ann_none);
|
||||
auto expr = ast.expr_lit(sp_lit, p.get_ann());
|
||||
ret @rec(node=expr, span=sp);
|
||||
}
|
||||
|
||||
fn make_new_str(common.span sp, str s) -> @ast.expr {
|
||||
fn make_new_str(parser.parser p, common.span sp, str s) -> @ast.expr {
|
||||
auto lit = ast.lit_str(s);
|
||||
ret make_new_lit(sp, lit);
|
||||
ret make_new_lit(p, sp, lit);
|
||||
}
|
||||
|
||||
//
|
||||
|
@ -4,6 +4,7 @@
|
||||
* compiler syntax extension plugin interface.
|
||||
*/
|
||||
|
||||
import front.parser.parser;
|
||||
import util.common;
|
||||
|
||||
import std.Str;
|
||||
@ -45,11 +46,12 @@ import std.ExtFmt.CT.parse_fmt_string;
|
||||
|
||||
export expand_syntax_ext;
|
||||
|
||||
// FIXME: Need to thread parser through here to handle errors correctly
|
||||
fn expand_syntax_ext(vec[@ast.expr] args,
|
||||
fn expand_syntax_ext(parser p,
|
||||
vec[@ast.expr] args,
|
||||
Option.t[str] body) -> @ast.expr {
|
||||
|
||||
if (Vec.len[@ast.expr](args) == 0u) {
|
||||
// FIXME: Handle error correctly.
|
||||
log_err "malformed #fmt call";
|
||||
fail;
|
||||
}
|
||||
@ -62,7 +64,7 @@ fn expand_syntax_ext(vec[@ast.expr] args,
|
||||
auto pieces = parse_fmt_string(fmt);
|
||||
auto args_len = Vec.len[@ast.expr](args);
|
||||
auto fmt_args = Vec.slice[@ast.expr](args, 1u, args_len - 1u);
|
||||
ret pieces_to_expr(pieces, args);
|
||||
ret pieces_to_expr(p, pieces, args);
|
||||
}
|
||||
|
||||
fn expr_to_str(@ast.expr expr) -> str {
|
||||
@ -75,6 +77,7 @@ fn expr_to_str(@ast.expr expr) -> str {
|
||||
}
|
||||
}
|
||||
}
|
||||
// FIXME: Handle error correctly.
|
||||
log_err "malformed #fmt call";
|
||||
fail;
|
||||
}
|
||||
@ -83,59 +86,62 @@ fn expr_to_str(@ast.expr expr) -> str {
|
||||
// be factored out in common with other code that builds expressions.
|
||||
// FIXME: Probably should be using the parser's span functions
|
||||
// FIXME: Cleanup the naming of these functions
|
||||
fn pieces_to_expr(vec[piece] pieces, vec[@ast.expr] args) -> @ast.expr {
|
||||
fn pieces_to_expr(parser p, vec[piece] pieces, vec[@ast.expr] args)
|
||||
-> @ast.expr {
|
||||
|
||||
fn make_new_lit(common.span sp, ast.lit_ lit) -> @ast.expr {
|
||||
fn make_new_lit(parser p, common.span sp, ast.lit_ lit) -> @ast.expr {
|
||||
auto sp_lit = @rec(node=lit, span=sp);
|
||||
auto expr = ast.expr_lit(sp_lit, ast.ann_none);
|
||||
auto expr = ast.expr_lit(sp_lit, p.get_ann());
|
||||
ret @rec(node=expr, span=sp);
|
||||
}
|
||||
|
||||
fn make_new_str(common.span sp, str s) -> @ast.expr {
|
||||
fn make_new_str(parser p, common.span sp, str s) -> @ast.expr {
|
||||
auto lit = ast.lit_str(s);
|
||||
ret make_new_lit(sp, lit);
|
||||
ret make_new_lit(p, sp, lit);
|
||||
}
|
||||
|
||||
fn make_new_int(common.span sp, int i) -> @ast.expr {
|
||||
fn make_new_int(parser p, common.span sp, int i) -> @ast.expr {
|
||||
auto lit = ast.lit_int(i);
|
||||
ret make_new_lit(sp, lit);
|
||||
ret make_new_lit(p, sp, lit);
|
||||
}
|
||||
|
||||
fn make_new_uint(common.span sp, uint u) -> @ast.expr {
|
||||
fn make_new_uint(parser p, common.span sp, uint u) -> @ast.expr {
|
||||
auto lit = ast.lit_uint(u);
|
||||
ret make_new_lit(sp, lit);
|
||||
ret make_new_lit(p, sp, lit);
|
||||
}
|
||||
|
||||
fn make_add_expr(common.span sp,
|
||||
fn make_add_expr(parser p, common.span sp,
|
||||
@ast.expr lhs, @ast.expr rhs) -> @ast.expr {
|
||||
auto binexpr = ast.expr_binary(ast.add, lhs, rhs, ast.ann_none);
|
||||
auto binexpr = ast.expr_binary(ast.add, lhs, rhs, p.get_ann());
|
||||
ret @rec(node=binexpr, span=sp);
|
||||
}
|
||||
|
||||
fn make_path_expr(common.span sp, vec[ast.ident] idents) -> @ast.expr {
|
||||
fn make_path_expr(parser p, common.span sp, vec[ast.ident] idents)
|
||||
-> @ast.expr {
|
||||
let vec[@ast.ty] types = vec();
|
||||
auto path = rec(idents=idents, types=types);
|
||||
auto sp_path = rec(node=path, span=sp);
|
||||
auto pathexpr = ast.expr_path(sp_path, none[ast.def], ast.ann_none);
|
||||
auto pathexpr = ast.expr_path(sp_path, none[ast.def], p.get_ann());
|
||||
auto sp_pathexpr = @rec(node=pathexpr, span=sp);
|
||||
ret sp_pathexpr;
|
||||
}
|
||||
|
||||
fn make_vec_expr(common.span sp, vec[@ast.expr] exprs) -> @ast.expr {
|
||||
auto vecexpr = ast.expr_vec(exprs, ast.imm, ast.ann_none);
|
||||
fn make_vec_expr(parser p, common.span sp, vec[@ast.expr] exprs)
|
||||
-> @ast.expr {
|
||||
auto vecexpr = ast.expr_vec(exprs, ast.imm, p.get_ann());
|
||||
auto sp_vecexpr = @rec(node=vecexpr, span=sp);
|
||||
ret sp_vecexpr;
|
||||
}
|
||||
|
||||
fn make_call(common.span sp, vec[ast.ident] fn_path,
|
||||
fn make_call(parser p, common.span sp, vec[ast.ident] fn_path,
|
||||
vec[@ast.expr] args) -> @ast.expr {
|
||||
auto pathexpr = make_path_expr(sp, fn_path);
|
||||
auto callexpr = ast.expr_call(pathexpr, args, ast.ann_none);
|
||||
auto pathexpr = make_path_expr(p, sp, fn_path);
|
||||
auto callexpr = ast.expr_call(pathexpr, args, p.get_ann());
|
||||
auto sp_callexpr = @rec(node=callexpr, span=sp);
|
||||
ret sp_callexpr;
|
||||
}
|
||||
|
||||
fn make_rec_expr(common.span sp,
|
||||
fn make_rec_expr(parser p, common.span sp,
|
||||
vec[tup(ast.ident, @ast.expr)] fields) -> @ast.expr {
|
||||
let vec[ast.field] astfields = vec();
|
||||
for (tup(ast.ident, @ast.expr) field in fields) {
|
||||
@ -149,7 +155,7 @@ fn pieces_to_expr(vec[piece] pieces, vec[@ast.expr] args) -> @ast.expr {
|
||||
|
||||
auto recexpr = ast.expr_rec(astfields,
|
||||
Option.none[@ast.expr],
|
||||
ast.ann_none);
|
||||
p.get_ann());
|
||||
auto sp_recexpr = @rec(node=recexpr, span=sp);
|
||||
ret sp_recexpr;
|
||||
}
|
||||
@ -160,16 +166,17 @@ fn pieces_to_expr(vec[piece] pieces, vec[@ast.expr] args) -> @ast.expr {
|
||||
ret vec("std", "ExtFmt", "RT", ident);
|
||||
}
|
||||
|
||||
fn make_rt_path_expr(common.span sp, str ident) -> @ast.expr {
|
||||
fn make_rt_path_expr(parser p, common.span sp, str ident) -> @ast.expr {
|
||||
auto path = make_path_vec(ident);
|
||||
ret make_path_expr(sp, path);
|
||||
ret make_path_expr(p, sp, path);
|
||||
}
|
||||
|
||||
// Produces an AST expression that represents a RT.conv record,
|
||||
// which tells the RT.conv* functions how to perform the conversion
|
||||
fn make_rt_conv_expr(common.span sp, &conv cnv) -> @ast.expr {
|
||||
fn make_rt_conv_expr(parser p, common.span sp, &conv cnv) -> @ast.expr {
|
||||
|
||||
fn make_flags(common.span sp, vec[flag] flags) -> @ast.expr {
|
||||
fn make_flags(parser p, common.span sp, vec[flag] flags)
|
||||
-> @ast.expr {
|
||||
let vec[@ast.expr] flagexprs = vec();
|
||||
for (flag f in flags) {
|
||||
auto fstr;
|
||||
@ -190,29 +197,29 @@ fn pieces_to_expr(vec[piece] pieces, vec[@ast.expr] args) -> @ast.expr {
|
||||
fstr = "flag_alternate";
|
||||
}
|
||||
}
|
||||
flagexprs += vec(make_rt_path_expr(sp, fstr));
|
||||
flagexprs += vec(make_rt_path_expr(p, sp, fstr));
|
||||
}
|
||||
|
||||
// FIXME: 0-length vectors can't have their type inferred
|
||||
// through the rec that these flags are a member of, so
|
||||
// this is a hack placeholder flag
|
||||
if (Vec.len[@ast.expr](flagexprs) == 0u) {
|
||||
flagexprs += vec(make_rt_path_expr(sp, "flag_none"));
|
||||
flagexprs += vec(make_rt_path_expr(p, sp, "flag_none"));
|
||||
}
|
||||
|
||||
ret make_vec_expr(sp, flagexprs);
|
||||
ret make_vec_expr(p, sp, flagexprs);
|
||||
}
|
||||
|
||||
fn make_count(common.span sp, &count cnt) -> @ast.expr {
|
||||
fn make_count(parser p, common.span sp, &count cnt) -> @ast.expr {
|
||||
alt (cnt) {
|
||||
case (count_implied) {
|
||||
ret make_rt_path_expr(sp, "count_implied");
|
||||
ret make_rt_path_expr(p, sp, "count_implied");
|
||||
}
|
||||
case (count_is(?c)) {
|
||||
auto count_lit = make_new_int(sp, c);
|
||||
auto count_lit = make_new_int(p, sp, c);
|
||||
auto count_is_path = make_path_vec("count_is");
|
||||
auto count_is_args = vec(count_lit);
|
||||
ret make_call(sp, count_is_path, count_is_args);
|
||||
ret make_call(p, sp, count_is_path, count_is_args);
|
||||
}
|
||||
case (_) {
|
||||
log_err "not implemented";
|
||||
@ -221,7 +228,7 @@ fn pieces_to_expr(vec[piece] pieces, vec[@ast.expr] args) -> @ast.expr {
|
||||
}
|
||||
}
|
||||
|
||||
fn make_ty(common.span sp, &ty t) -> @ast.expr {
|
||||
fn make_ty(parser p, common.span sp, &ty t) -> @ast.expr {
|
||||
auto rt_type;
|
||||
alt (t) {
|
||||
case (ty_hex(?c)) {
|
||||
@ -245,41 +252,43 @@ fn pieces_to_expr(vec[piece] pieces, vec[@ast.expr] args) -> @ast.expr {
|
||||
}
|
||||
}
|
||||
|
||||
ret make_rt_path_expr(sp, rt_type);
|
||||
ret make_rt_path_expr(p, sp, rt_type);
|
||||
}
|
||||
|
||||
fn make_conv_rec(common.span sp,
|
||||
fn make_conv_rec(parser p,
|
||||
common.span sp,
|
||||
@ast.expr flags_expr,
|
||||
@ast.expr width_expr,
|
||||
@ast.expr precision_expr,
|
||||
@ast.expr ty_expr) -> @ast.expr {
|
||||
ret make_rec_expr(sp, vec(tup("flags", flags_expr),
|
||||
tup("width", width_expr),
|
||||
tup("precision", precision_expr),
|
||||
tup("ty", ty_expr)));
|
||||
ret make_rec_expr(p, sp, vec(tup("flags", flags_expr),
|
||||
tup("width", width_expr),
|
||||
tup("precision", precision_expr),
|
||||
tup("ty", ty_expr)));
|
||||
}
|
||||
|
||||
auto rt_conv_flags = make_flags(sp, cnv.flags);
|
||||
auto rt_conv_width = make_count(sp, cnv.width);
|
||||
auto rt_conv_precision = make_count(sp, cnv.precision);
|
||||
auto rt_conv_ty = make_ty(sp, cnv.ty);
|
||||
ret make_conv_rec(sp,
|
||||
auto rt_conv_flags = make_flags(p, sp, cnv.flags);
|
||||
auto rt_conv_width = make_count(p, sp, cnv.width);
|
||||
auto rt_conv_precision = make_count(p, sp, cnv.precision);
|
||||
auto rt_conv_ty = make_ty(p, sp, cnv.ty);
|
||||
ret make_conv_rec(p,
|
||||
sp,
|
||||
rt_conv_flags,
|
||||
rt_conv_width,
|
||||
rt_conv_precision,
|
||||
rt_conv_ty);
|
||||
}
|
||||
|
||||
fn make_conv_call(common.span sp, str conv_type,
|
||||
fn make_conv_call(parser p, common.span sp, str conv_type,
|
||||
&conv cnv, @ast.expr arg) -> @ast.expr {
|
||||
auto fname = "conv_" + conv_type;
|
||||
auto path = make_path_vec(fname);
|
||||
auto cnv_expr = make_rt_conv_expr(sp, cnv);
|
||||
auto cnv_expr = make_rt_conv_expr(p, sp, cnv);
|
||||
auto args = vec(cnv_expr, arg);
|
||||
ret make_call(arg.span, path, args);
|
||||
ret make_call(p, arg.span, path, args);
|
||||
}
|
||||
|
||||
fn make_new_conv(conv cnv, @ast.expr arg) -> @ast.expr {
|
||||
fn make_new_conv(parser p, conv cnv, @ast.expr arg) -> @ast.expr {
|
||||
|
||||
// FIXME: Extract all this validation into ExtFmt.CT
|
||||
fn is_signed_type(conv cnv) -> bool {
|
||||
@ -361,32 +370,32 @@ fn pieces_to_expr(vec[piece] pieces, vec[@ast.expr] args) -> @ast.expr {
|
||||
|
||||
alt (cnv.ty) {
|
||||
case (ty_str) {
|
||||
ret make_conv_call(arg.span, "str", cnv, arg);
|
||||
ret make_conv_call(p, arg.span, "str", cnv, arg);
|
||||
}
|
||||
case (ty_int(?sign)) {
|
||||
alt (sign) {
|
||||
case (signed) {
|
||||
ret make_conv_call(arg.span, "int", cnv, arg);
|
||||
ret make_conv_call(p, arg.span, "int", cnv, arg);
|
||||
}
|
||||
case (unsigned) {
|
||||
ret make_conv_call(arg.span, "uint", cnv, arg);
|
||||
ret make_conv_call(p, arg.span, "uint", cnv, arg);
|
||||
}
|
||||
}
|
||||
}
|
||||
case (ty_bool) {
|
||||
ret make_conv_call(arg.span, "bool", cnv, arg);
|
||||
ret make_conv_call(p, arg.span, "bool", cnv, arg);
|
||||
}
|
||||
case (ty_char) {
|
||||
ret make_conv_call(arg.span, "char", cnv, arg);
|
||||
ret make_conv_call(p, arg.span, "char", cnv, arg);
|
||||
}
|
||||
case (ty_hex(_)) {
|
||||
ret make_conv_call(arg.span, "uint", cnv, arg);
|
||||
ret make_conv_call(p, arg.span, "uint", cnv, arg);
|
||||
}
|
||||
case (ty_bits) {
|
||||
ret make_conv_call(arg.span, "uint", cnv, arg);
|
||||
ret make_conv_call(p, arg.span, "uint", cnv, arg);
|
||||
}
|
||||
case (ty_octal) {
|
||||
ret make_conv_call(arg.span, "uint", cnv, arg);
|
||||
ret make_conv_call(p, arg.span, "uint", cnv, arg);
|
||||
}
|
||||
case (_) {
|
||||
log_err unsupported;
|
||||
@ -489,13 +498,13 @@ fn pieces_to_expr(vec[piece] pieces, vec[@ast.expr] args) -> @ast.expr {
|
||||
|
||||
auto sp = args.(0).span;
|
||||
auto n = 0u;
|
||||
auto tmp_expr = make_new_str(sp, "");
|
||||
auto tmp_expr = make_new_str(p, sp, "");
|
||||
|
||||
for (piece p in pieces) {
|
||||
alt (p) {
|
||||
for (piece pc in pieces) {
|
||||
alt (pc) {
|
||||
case (piece_string(?s)) {
|
||||
auto s_expr = make_new_str(sp, s);
|
||||
tmp_expr = make_add_expr(sp, tmp_expr, s_expr);
|
||||
auto s_expr = make_new_str(p, sp, s);
|
||||
tmp_expr = make_add_expr(p, sp, tmp_expr, s_expr);
|
||||
}
|
||||
case (piece_conv(?conv)) {
|
||||
if (n >= Vec.len[@ast.expr](args)) {
|
||||
@ -509,8 +518,8 @@ fn pieces_to_expr(vec[piece] pieces, vec[@ast.expr] args) -> @ast.expr {
|
||||
|
||||
n += 1u;
|
||||
auto arg_expr = args.(n);
|
||||
auto c_expr = make_new_conv(conv, arg_expr);
|
||||
tmp_expr = make_add_expr(sp, tmp_expr, c_expr);
|
||||
auto c_expr = make_new_conv(p, conv, arg_expr);
|
||||
tmp_expr = make_add_expr(p, sp, tmp_expr, c_expr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -42,6 +42,7 @@ state type parser =
|
||||
fn get_reader() -> lexer.reader;
|
||||
fn get_filemap() -> codemap.filemap;
|
||||
fn get_chpos() -> uint;
|
||||
fn get_ann() -> ast.ann;
|
||||
};
|
||||
|
||||
fn new_parser(session.session sess,
|
||||
@ -58,7 +59,8 @@ fn new_parser(session.session sess,
|
||||
mutable restriction res,
|
||||
ast.crate_num crate,
|
||||
lexer.reader rdr,
|
||||
vec[op_spec] precs)
|
||||
vec[op_spec] precs,
|
||||
mutable uint next_ann_var)
|
||||
{
|
||||
fn peek() -> token.token {
|
||||
ret tok;
|
||||
@ -126,6 +128,12 @@ fn new_parser(session.session sess,
|
||||
}
|
||||
|
||||
fn get_chpos() -> uint {ret rdr.get_chpos();}
|
||||
|
||||
fn get_ann() -> ast.ann {
|
||||
auto rv = ast.ann_none(next_ann_var);
|
||||
next_ann_var += 1u;
|
||||
ret rv;
|
||||
}
|
||||
}
|
||||
auto ftype = SOURCE_FILE;
|
||||
if (Str.ends_with(path, ".rc")) {
|
||||
@ -140,7 +148,7 @@ fn new_parser(session.session sess,
|
||||
auto npos = rdr.get_chpos();
|
||||
ret stdio_parser(sess, env, ftype, lexer.next_token(rdr),
|
||||
npos, npos, initial_def._1, UNRESTRICTED, initial_def._0,
|
||||
rdr, prec_table());
|
||||
rdr, prec_table(), 0u);
|
||||
}
|
||||
|
||||
fn unexpected(parser p, token.token t) {
|
||||
@ -678,14 +686,14 @@ fn parse_bottom_expr(parser p) -> @ast.expr {
|
||||
// FIXME: can only remove this sort of thing when both typestate and
|
||||
// alt-exhaustive-match checking are co-operating.
|
||||
auto lit = @spanned(lo, hi, ast.lit_nil);
|
||||
let ast.expr_ ex = ast.expr_lit(lit, ast.ann_none);
|
||||
let ast.expr_ ex = ast.expr_lit(lit, p.get_ann());
|
||||
|
||||
alt (p.peek()) {
|
||||
|
||||
case (token.IDENT(_)) {
|
||||
auto pth = parse_path(p, MINIMAL);
|
||||
hi = pth.span.hi;
|
||||
ex = ast.expr_path(pth, none[ast.def], ast.ann_none);
|
||||
ex = ast.expr_path(pth, none[ast.def], p.get_ann());
|
||||
}
|
||||
|
||||
case (token.LPAREN) {
|
||||
@ -696,7 +704,7 @@ fn parse_bottom_expr(parser p) -> @ast.expr {
|
||||
p.bump();
|
||||
auto lit = @spanned(lo, hi, ast.lit_nil);
|
||||
ret @spanned(lo, hi,
|
||||
ast.expr_lit(lit, ast.ann_none));
|
||||
ast.expr_lit(lit, p.get_ann()));
|
||||
}
|
||||
case (_) { /* fall through */ }
|
||||
}
|
||||
@ -720,7 +728,7 @@ fn parse_bottom_expr(parser p) -> @ast.expr {
|
||||
some(token.COMMA),
|
||||
pf, p);
|
||||
hi = es.span.hi;
|
||||
ex = ast.expr_tup(es.node, ast.ann_none);
|
||||
ex = ast.expr_tup(es.node, p.get_ann());
|
||||
}
|
||||
|
||||
case (token.VEC) {
|
||||
@ -733,7 +741,7 @@ fn parse_bottom_expr(parser p) -> @ast.expr {
|
||||
auto es = parse_seq_to_end[@ast.expr](token.RPAREN,
|
||||
some(token.COMMA),
|
||||
pf, hi, p);
|
||||
ex = ast.expr_vec(es, mut, ast.ann_none);
|
||||
ex = ast.expr_vec(es, mut, p.get_ann());
|
||||
}
|
||||
|
||||
case (token.REC) {
|
||||
@ -768,7 +776,7 @@ fn parse_bottom_expr(parser p) -> @ast.expr {
|
||||
|
||||
}
|
||||
|
||||
ex = ast.expr_rec(fields, base, ast.ann_none);
|
||||
ex = ast.expr_rec(fields, base, p.get_ann());
|
||||
}
|
||||
|
||||
case (token.BIND) {
|
||||
@ -792,7 +800,7 @@ fn parse_bottom_expr(parser p) -> @ast.expr {
|
||||
some(token.COMMA),
|
||||
pf, p);
|
||||
hi = es.span.hi;
|
||||
ex = ast.expr_bind(e, es.node, ast.ann_none);
|
||||
ex = ast.expr_bind(e, es.node, p.get_ann());
|
||||
}
|
||||
|
||||
case (token.POUND) {
|
||||
@ -810,28 +818,28 @@ fn parse_bottom_expr(parser p) -> @ast.expr {
|
||||
|
||||
case (token.FAIL) {
|
||||
p.bump();
|
||||
ex = ast.expr_fail(ast.ann_none);
|
||||
ex = ast.expr_fail(p.get_ann());
|
||||
}
|
||||
|
||||
case (token.LOG) {
|
||||
p.bump();
|
||||
auto e = parse_expr(p);
|
||||
auto hi = e.span.hi;
|
||||
ex = ast.expr_log(1, e, ast.ann_none);
|
||||
ex = ast.expr_log(1, e, p.get_ann());
|
||||
}
|
||||
|
||||
case (token.LOG_ERR) {
|
||||
p.bump();
|
||||
auto e = parse_expr(p);
|
||||
auto hi = e.span.hi;
|
||||
ex = ast.expr_log(0, e, ast.ann_none);
|
||||
ex = ast.expr_log(0, e, p.get_ann());
|
||||
}
|
||||
|
||||
case (token.ASSERT) {
|
||||
p.bump();
|
||||
auto e = parse_expr(p);
|
||||
auto hi = e.span.hi;
|
||||
ex = ast.expr_assert(e, ast.ann_none);
|
||||
ex = ast.expr_assert(e, p.get_ann());
|
||||
}
|
||||
|
||||
case (token.CHECK) {
|
||||
@ -841,43 +849,43 @@ fn parse_bottom_expr(parser p) -> @ast.expr {
|
||||
but the typechecker enforces that. */
|
||||
auto e = parse_expr(p);
|
||||
auto hi = e.span.hi;
|
||||
ex = ast.expr_check(e, ast.ann_none);
|
||||
ex = ast.expr_check(e, p.get_ann());
|
||||
}
|
||||
|
||||
case (token.RET) {
|
||||
p.bump();
|
||||
alt (p.peek()) {
|
||||
case (token.SEMI) {
|
||||
ex = ast.expr_ret(none[@ast.expr], ast.ann_none);
|
||||
ex = ast.expr_ret(none[@ast.expr], p.get_ann());
|
||||
}
|
||||
case (_) {
|
||||
auto e = parse_expr(p);
|
||||
hi = e.span.hi;
|
||||
ex = ast.expr_ret(some[@ast.expr](e), ast.ann_none);
|
||||
ex = ast.expr_ret(some[@ast.expr](e), p.get_ann());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
case (token.BREAK) {
|
||||
p.bump();
|
||||
ex = ast.expr_break(ast.ann_none);
|
||||
ex = ast.expr_break(p.get_ann());
|
||||
}
|
||||
|
||||
case (token.CONT) {
|
||||
p.bump();
|
||||
ex = ast.expr_cont(ast.ann_none);
|
||||
ex = ast.expr_cont(p.get_ann());
|
||||
}
|
||||
|
||||
case (token.PUT) {
|
||||
p.bump();
|
||||
alt (p.peek()) {
|
||||
case (token.SEMI) {
|
||||
ex = ast.expr_put(none[@ast.expr], ast.ann_none);
|
||||
ex = ast.expr_put(none[@ast.expr], p.get_ann());
|
||||
}
|
||||
case (_) {
|
||||
auto e = parse_expr(p);
|
||||
hi = e.span.hi;
|
||||
ex = ast.expr_put(some[@ast.expr](e), ast.ann_none);
|
||||
ex = ast.expr_put(some[@ast.expr](e), p.get_ann());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -888,7 +896,7 @@ fn parse_bottom_expr(parser p) -> @ast.expr {
|
||||
// FIXME: Is this the right place for this check?
|
||||
if /*check*/ (ast.is_call_expr(e)) {
|
||||
hi = e.span.hi;
|
||||
ex = ast.expr_be(e, ast.ann_none);
|
||||
ex = ast.expr_be(e, p.get_ann());
|
||||
}
|
||||
else {
|
||||
p.err("Non-call expression in tail call");
|
||||
@ -900,7 +908,7 @@ fn parse_bottom_expr(parser p) -> @ast.expr {
|
||||
expect(p, token.LPAREN);
|
||||
expect(p, token.RPAREN);
|
||||
hi = p.get_hi_pos();
|
||||
ex = ast.expr_port(ast.ann_none);
|
||||
ex = ast.expr_port(p.get_ann());
|
||||
}
|
||||
|
||||
case (token.CHAN) {
|
||||
@ -909,7 +917,7 @@ fn parse_bottom_expr(parser p) -> @ast.expr {
|
||||
auto e = parse_expr(p);
|
||||
hi = e.span.hi;
|
||||
expect(p, token.RPAREN);
|
||||
ex = ast.expr_chan(e, ast.ann_none);
|
||||
ex = ast.expr_chan(e, p.get_ann());
|
||||
}
|
||||
|
||||
case (token.SELF) {
|
||||
@ -925,13 +933,13 @@ fn parse_bottom_expr(parser p) -> @ast.expr {
|
||||
some(token.COMMA),
|
||||
pf, p);
|
||||
hi = es.span.hi;
|
||||
ex = ast.expr_call(f, es.node, ast.ann_none);
|
||||
ex = ast.expr_call(f, es.node, p.get_ann());
|
||||
}
|
||||
|
||||
case (_) {
|
||||
auto lit = parse_lit(p);
|
||||
hi = lit.span.hi;
|
||||
ex = ast.expr_lit(@lit, ast.ann_none);
|
||||
ex = ast.expr_lit(@lit, p.get_ann());
|
||||
}
|
||||
}
|
||||
|
||||
@ -954,17 +962,17 @@ fn expand_syntax_ext(parser p, ast.span sp,
|
||||
assert (Vec.len[ast.ident](path.node.idents) > 0u);
|
||||
auto extname = path.node.idents.(0);
|
||||
if (Str.eq(extname, "fmt")) {
|
||||
auto expanded = extfmt.expand_syntax_ext(args, body);
|
||||
auto expanded = extfmt.expand_syntax_ext(p, args, body);
|
||||
auto newexpr = ast.expr_ext(path, args, body,
|
||||
expanded,
|
||||
ast.ann_none);
|
||||
p.get_ann());
|
||||
|
||||
ret newexpr;
|
||||
} else if (Str.eq(extname, "env")) {
|
||||
auto expanded = extenv.expand_syntax_ext(p, sp, args, body);
|
||||
auto newexpr = ast.expr_ext(path, args, body,
|
||||
expanded,
|
||||
ast.ann_none);
|
||||
p.get_ann());
|
||||
|
||||
ret newexpr;
|
||||
} else {
|
||||
@ -992,7 +1000,7 @@ fn extend_expr_by_ident(parser p, uint lo, uint hi,
|
||||
}
|
||||
}
|
||||
case (_) {
|
||||
e_ = ast.expr_field(e, i, ast.ann_none);
|
||||
e_ = ast.expr_field(e, i, p.get_ann());
|
||||
}
|
||||
}
|
||||
ret @spanned(lo, hi, e_);
|
||||
@ -1002,7 +1010,7 @@ fn parse_self_method(parser p) -> @ast.expr {
|
||||
auto sp = p.get_span();
|
||||
let ast.ident f_name = parse_ident(p);
|
||||
auto hi = p.get_span();
|
||||
ret @rec(node=ast.expr_self_method(f_name, ast.ann_none), span=sp);
|
||||
ret @rec(node=ast.expr_self_method(f_name, p.get_ann()), span=sp);
|
||||
}
|
||||
|
||||
fn parse_dot_or_call_expr(parser p) -> @ast.expr {
|
||||
@ -1023,7 +1031,7 @@ fn parse_dot_or_call_expr(parser p) -> @ast.expr {
|
||||
some(token.COMMA),
|
||||
pf, p);
|
||||
hi = es.span.hi;
|
||||
auto e_ = ast.expr_call(e, es.node, ast.ann_none);
|
||||
auto e_ = ast.expr_call(e, es.node, p.get_ann());
|
||||
e = @spanned(lo, hi, e_);
|
||||
}
|
||||
}
|
||||
@ -1043,7 +1051,7 @@ fn parse_dot_or_call_expr(parser p) -> @ast.expr {
|
||||
auto ix = parse_expr(p);
|
||||
hi = ix.span.hi;
|
||||
expect(p, token.RPAREN);
|
||||
auto e_ = ast.expr_index(e, ix, ast.ann_none);
|
||||
auto e_ = ast.expr_index(e, ix, p.get_ann());
|
||||
e = @spanned(lo, hi, e_);
|
||||
}
|
||||
|
||||
@ -1074,7 +1082,7 @@ fn parse_prefix_expr(parser p) -> @ast.expr {
|
||||
// FIXME: can only remove this sort of thing when both typestate and
|
||||
// alt-exhaustive-match checking are co-operating.
|
||||
auto lit = @spanned(lo, lo, ast.lit_nil);
|
||||
let ast.expr_ ex = ast.expr_lit(lit, ast.ann_none);
|
||||
let ast.expr_ ex = ast.expr_lit(lit, p.get_ann());
|
||||
|
||||
alt (p.peek()) {
|
||||
|
||||
@ -1082,14 +1090,14 @@ fn parse_prefix_expr(parser p) -> @ast.expr {
|
||||
p.bump();
|
||||
auto e = parse_prefix_expr(p);
|
||||
hi = e.span.hi;
|
||||
ex = ast.expr_unary(ast.not, e, ast.ann_none);
|
||||
ex = ast.expr_unary(ast.not, e, p.get_ann());
|
||||
}
|
||||
|
||||
case (token.TILDE) {
|
||||
p.bump();
|
||||
auto e = parse_prefix_expr(p);
|
||||
hi = e.span.hi;
|
||||
ex = ast.expr_unary(ast.bitnot, e, ast.ann_none);
|
||||
ex = ast.expr_unary(ast.bitnot, e, p.get_ann());
|
||||
}
|
||||
|
||||
case (token.BINOP(?b)) {
|
||||
@ -1098,14 +1106,14 @@ fn parse_prefix_expr(parser p) -> @ast.expr {
|
||||
p.bump();
|
||||
auto e = parse_prefix_expr(p);
|
||||
hi = e.span.hi;
|
||||
ex = ast.expr_unary(ast.neg, e, ast.ann_none);
|
||||
ex = ast.expr_unary(ast.neg, e, p.get_ann());
|
||||
}
|
||||
|
||||
case (token.STAR) {
|
||||
p.bump();
|
||||
auto e = parse_prefix_expr(p);
|
||||
hi = e.span.hi;
|
||||
ex = ast.expr_unary(ast.deref, e, ast.ann_none);
|
||||
ex = ast.expr_unary(ast.deref, e, p.get_ann());
|
||||
}
|
||||
|
||||
case (_) {
|
||||
@ -1119,7 +1127,7 @@ fn parse_prefix_expr(parser p) -> @ast.expr {
|
||||
auto m = parse_mutability(p);
|
||||
auto e = parse_prefix_expr(p);
|
||||
hi = e.span.hi;
|
||||
ex = ast.expr_unary(ast.box(m), e, ast.ann_none);
|
||||
ex = ast.expr_unary(ast.box(m), e, p.get_ann());
|
||||
}
|
||||
|
||||
case (_) {
|
||||
@ -1175,7 +1183,7 @@ fn parse_more_binops(parser p, @ast.expr lhs, int min_prec)
|
||||
alt (cur.tok) {
|
||||
case (token.AS) {
|
||||
auto rhs = parse_ty(p);
|
||||
auto _as = ast.expr_cast(lhs, rhs, ast.ann_none);
|
||||
auto _as = ast.expr_cast(lhs, rhs, p.get_ann());
|
||||
auto span = @spanned(lhs.span.lo, rhs.span.hi, _as);
|
||||
ret parse_more_binops(p, span, min_prec);
|
||||
}
|
||||
@ -1183,7 +1191,7 @@ fn parse_more_binops(parser p, @ast.expr lhs, int min_prec)
|
||||
auto rhs = parse_more_binops(p, parse_prefix_expr(p),
|
||||
cur.prec);
|
||||
auto bin = ast.expr_binary(cur.op, lhs, rhs,
|
||||
ast.ann_none);
|
||||
p.get_ann());
|
||||
auto span = @spanned(lhs.span.lo, rhs.span.hi, bin);
|
||||
ret parse_more_binops(p, span, min_prec);
|
||||
}
|
||||
@ -1201,7 +1209,7 @@ fn parse_assign_expr(parser p) -> @ast.expr {
|
||||
p.bump();
|
||||
auto rhs = parse_expr(p);
|
||||
ret @spanned(lo, rhs.span.hi,
|
||||
ast.expr_assign(lhs, rhs, ast.ann_none));
|
||||
ast.expr_assign(lhs, rhs, p.get_ann()));
|
||||
}
|
||||
case (token.BINOPEQ(?op)) {
|
||||
p.bump();
|
||||
@ -1221,19 +1229,19 @@ fn parse_assign_expr(parser p) -> @ast.expr {
|
||||
case (token.ASR) { aop = ast.asr; }
|
||||
}
|
||||
ret @spanned(lo, rhs.span.hi,
|
||||
ast.expr_assign_op(aop, lhs, rhs, ast.ann_none));
|
||||
ast.expr_assign_op(aop, lhs, rhs, p.get_ann()));
|
||||
}
|
||||
case (token.SEND) {
|
||||
p.bump();
|
||||
auto rhs = parse_expr(p);
|
||||
ret @spanned(lo, rhs.span.hi,
|
||||
ast.expr_send(lhs, rhs, ast.ann_none));
|
||||
ast.expr_send(lhs, rhs, p.get_ann()));
|
||||
}
|
||||
case (token.LARROW) {
|
||||
p.bump();
|
||||
auto rhs = parse_expr(p);
|
||||
ret @spanned(lo, rhs.span.hi,
|
||||
ast.expr_recv(lhs, rhs, ast.ann_none));
|
||||
ast.expr_recv(lhs, rhs, p.get_ann()));
|
||||
}
|
||||
case (_) { /* fall through */ }
|
||||
}
|
||||
@ -1259,7 +1267,7 @@ fn parse_if_expr(parser p) -> @ast.expr {
|
||||
case (_) { /* fall through */ }
|
||||
}
|
||||
|
||||
ret @spanned(lo, hi, ast.expr_if(cond, thn, els, ast.ann_none));
|
||||
ret @spanned(lo, hi, ast.expr_if(cond, thn, els, p.get_ann()));
|
||||
}
|
||||
|
||||
fn parse_else_expr(parser p) -> @ast.expr {
|
||||
@ -1271,7 +1279,7 @@ fn parse_else_expr(parser p) -> @ast.expr {
|
||||
case (_) {
|
||||
auto blk = parse_block(p);
|
||||
ret @spanned(blk.span.lo, blk.span.hi,
|
||||
ast.expr_block(blk, ast.ann_none));
|
||||
ast.expr_block(blk, p.get_ann()));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1310,10 +1318,10 @@ fn parse_for_expr(parser p) -> @ast.expr {
|
||||
auto hi = body.span.hi;
|
||||
if (is_each) {
|
||||
ret @spanned(lo, hi, ast.expr_for_each(decl, seq, body,
|
||||
ast.ann_none));
|
||||
p.get_ann()));
|
||||
} else {
|
||||
ret @spanned(lo, hi, ast.expr_for(decl, seq, body,
|
||||
ast.ann_none));
|
||||
p.get_ann()));
|
||||
}
|
||||
}
|
||||
|
||||
@ -1327,7 +1335,7 @@ fn parse_while_expr(parser p) -> @ast.expr {
|
||||
expect(p, token.RPAREN);
|
||||
auto body = parse_block(p);
|
||||
auto hi = body.span.hi;
|
||||
ret @spanned(lo, hi, ast.expr_while(cond, body, ast.ann_none));
|
||||
ret @spanned(lo, hi, ast.expr_while(cond, body, p.get_ann()));
|
||||
}
|
||||
|
||||
fn parse_do_while_expr(parser p) -> @ast.expr {
|
||||
@ -1340,7 +1348,7 @@ fn parse_do_while_expr(parser p) -> @ast.expr {
|
||||
auto cond = parse_expr(p);
|
||||
expect(p, token.RPAREN);
|
||||
auto hi = cond.span.hi;
|
||||
ret @spanned(lo, hi, ast.expr_do_while(body, cond, ast.ann_none));
|
||||
ret @spanned(lo, hi, ast.expr_do_while(body, cond, p.get_ann()));
|
||||
}
|
||||
|
||||
fn parse_alt_expr(parser p) -> @ast.expr {
|
||||
@ -1375,7 +1383,7 @@ fn parse_alt_expr(parser p) -> @ast.expr {
|
||||
case (token.ELSE) {
|
||||
p.bump();
|
||||
auto hi = p.get_hi_pos();
|
||||
auto pat = @spanned(lo, hi, ast.pat_wild(ast.ann_none));
|
||||
auto pat = @spanned(lo, hi, ast.pat_wild(p.get_ann()));
|
||||
auto index = index_arm(pat);
|
||||
auto block = parse_block(p);
|
||||
arms += vec(rec(pat=pat, block=block, index=index));
|
||||
@ -1390,7 +1398,7 @@ fn parse_alt_expr(parser p) -> @ast.expr {
|
||||
auto hi = p.get_hi_pos();
|
||||
p.bump();
|
||||
|
||||
auto expr = ast.expr_alt(discriminant, arms, ast.ann_none);
|
||||
auto expr = ast.expr_alt(discriminant, arms, p.get_ann());
|
||||
ret @spanned(lo, hi, expr);
|
||||
}
|
||||
|
||||
@ -1411,7 +1419,7 @@ fn parse_spawn_expr(parser p) -> @ast.expr {
|
||||
Option.none[str],
|
||||
fn_expr,
|
||||
es.node,
|
||||
ast.ann_none);
|
||||
p.get_ann());
|
||||
ret @spanned(lo, hi, spawn_expr);
|
||||
}
|
||||
|
||||
@ -1432,7 +1440,7 @@ fn parse_expr_inner(parser p) -> @ast.expr {
|
||||
case (token.LBRACE) {
|
||||
auto blk = parse_block(p);
|
||||
ret @spanned(blk.span.lo, blk.span.hi,
|
||||
ast.expr_block(blk, ast.ann_none));
|
||||
ast.expr_block(blk, p.get_ann()));
|
||||
}
|
||||
case (token.IF) {
|
||||
ret parse_if_expr(p);
|
||||
@ -1485,7 +1493,7 @@ fn parse_pat(parser p) -> @ast.pat {
|
||||
alt (p.peek()) {
|
||||
case (token.UNDERSCORE) {
|
||||
p.bump();
|
||||
pat = ast.pat_wild(ast.ann_none);
|
||||
pat = ast.pat_wild(p.get_ann());
|
||||
}
|
||||
case (token.QUES) {
|
||||
p.bump();
|
||||
@ -1494,7 +1502,7 @@ fn parse_pat(parser p) -> @ast.pat {
|
||||
hi = p.get_hi_pos();
|
||||
p.bump();
|
||||
pat = ast.pat_bind(p.get_str(id), p.next_def_id(),
|
||||
ast.ann_none);
|
||||
p.get_ann());
|
||||
}
|
||||
case (?tok) {
|
||||
p.err("expected identifier after '?' in pattern but " +
|
||||
@ -1520,12 +1528,12 @@ fn parse_pat(parser p) -> @ast.pat {
|
||||
}
|
||||
|
||||
pat = ast.pat_tag(tag_path, args, none[ast.variant_def],
|
||||
ast.ann_none);
|
||||
p.get_ann());
|
||||
}
|
||||
case (_) {
|
||||
auto lit = parse_lit(p);
|
||||
hi = lit.span.hi;
|
||||
pat = ast.pat_lit(@lit, ast.ann_none);
|
||||
pat = ast.pat_lit(@lit, p.get_ann());
|
||||
}
|
||||
}
|
||||
|
||||
@ -1541,7 +1549,7 @@ fn parse_local_full(&Option.t[@ast.ty] tyopt,
|
||||
ident = ident,
|
||||
init = init,
|
||||
id = p.next_def_id(),
|
||||
ann = ast.ann_none);
|
||||
ann = p.get_ann());
|
||||
}
|
||||
|
||||
fn parse_typed_local(parser p) -> @ast.local {
|
||||
@ -1589,13 +1597,13 @@ fn parse_source_stmt(parser p) -> @ast.stmt {
|
||||
auto decl = parse_let(p);
|
||||
auto hi = p.get_span();
|
||||
ret @spanned
|
||||
(lo, decl.span.hi, ast.stmt_decl(decl, ast.ann_none));
|
||||
(lo, decl.span.hi, ast.stmt_decl(decl, p.get_ann()));
|
||||
}
|
||||
|
||||
case (token.AUTO) {
|
||||
auto decl = parse_auto(p);
|
||||
auto hi = p.get_span();
|
||||
ret @spanned(lo, decl.span.hi, ast.stmt_decl(decl, ast.ann_none));
|
||||
ret @spanned(lo, decl.span.hi, ast.stmt_decl(decl, p.get_ann()));
|
||||
}
|
||||
|
||||
case (_) {
|
||||
@ -1604,13 +1612,13 @@ fn parse_source_stmt(parser p) -> @ast.stmt {
|
||||
auto i = parse_item(p);
|
||||
auto hi = i.span.hi;
|
||||
auto decl = @spanned(lo, hi, ast.decl_item(i));
|
||||
ret @spanned(lo, hi, ast.stmt_decl(decl, ast.ann_none));
|
||||
ret @spanned(lo, hi, ast.stmt_decl(decl, p.get_ann()));
|
||||
|
||||
} else {
|
||||
// Remainder are line-expr stmts.
|
||||
auto e = parse_expr(p);
|
||||
auto hi = p.get_span();
|
||||
ret @spanned(lo, e.span.hi, ast.stmt_expr(e, ast.ann_none));
|
||||
ret @spanned(lo, e.span.hi, ast.stmt_expr(e, p.get_ann()));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1618,12 +1626,13 @@ fn parse_source_stmt(parser p) -> @ast.stmt {
|
||||
fail;
|
||||
}
|
||||
|
||||
fn index_block(vec[@ast.stmt] stmts, Option.t[@ast.expr] expr) -> ast.block_ {
|
||||
fn index_block(parser p, vec[@ast.stmt] stmts, Option.t[@ast.expr] expr)
|
||||
-> ast.block_ {
|
||||
auto index = new_str_hash[ast.block_index_entry]();
|
||||
for (@ast.stmt s in stmts) {
|
||||
ast.index_stmt(index, s);
|
||||
}
|
||||
ret rec(stmts=stmts, expr=expr, index=index, a=ast.ann_none);
|
||||
ret rec(stmts=stmts, expr=expr, index=index, a=p.get_ann());
|
||||
}
|
||||
|
||||
fn index_arm(@ast.pat pat) -> hashmap[ast.ident,ast.def_id] {
|
||||
@ -1761,7 +1770,7 @@ fn parse_block(parser p) -> ast.block {
|
||||
auto hi = p.get_hi_pos();
|
||||
p.bump();
|
||||
|
||||
auto bloc = index_block(stmts, expr);
|
||||
auto bloc = index_block(p, stmts, expr);
|
||||
ret spanned[ast.block_](lo, hi, bloc);
|
||||
}
|
||||
|
||||
@ -1827,7 +1836,7 @@ fn parse_item_fn_or_iter(parser p, ast.purity purity) -> @ast.item {
|
||||
auto t = parse_fn_header(p);
|
||||
auto f = parse_fn(p, proto, purity);
|
||||
auto item = ast.item_fn(t._0, f, t._1,
|
||||
p.next_def_id(), ast.ann_none);
|
||||
p.next_def_id(), p.get_ann());
|
||||
ret @spanned(lo, f.body.span.hi, item);
|
||||
}
|
||||
|
||||
@ -1836,7 +1845,7 @@ fn parse_obj_field(parser p) -> ast.obj_field {
|
||||
auto mut = parse_mutability(p); // TODO: store this, use it in typeck
|
||||
auto ty = parse_ty(p);
|
||||
auto ident = parse_ident(p);
|
||||
ret rec(ty=ty, ident=ident, id=p.next_def_id(), ann=ast.ann_none);
|
||||
ret rec(ty=ty, ident=ident, id=p.next_def_id(), ann=p.get_ann());
|
||||
}
|
||||
|
||||
fn parse_method(parser p) -> @ast.method {
|
||||
@ -1845,7 +1854,7 @@ fn parse_method(parser p) -> @ast.method {
|
||||
auto ident = parse_ident(p);
|
||||
auto f = parse_fn(p, proto, ast.impure_fn);
|
||||
auto meth = rec(ident=ident, meth=f,
|
||||
id=p.next_def_id(), ann=ast.ann_none);
|
||||
id=p.next_def_id(), ann=p.get_ann());
|
||||
ret @spanned(lo, f.body.span.hi, meth);
|
||||
}
|
||||
|
||||
@ -1864,7 +1873,7 @@ fn parse_dtor(parser p) -> @ast.method {
|
||||
let ast.method_ m = rec(ident="drop",
|
||||
meth=f,
|
||||
id=p.next_def_id(),
|
||||
ann=ast.ann_none);
|
||||
ann=p.get_ann());
|
||||
ret @spanned(lo, f.body.span.hi, m);
|
||||
}
|
||||
|
||||
@ -1904,7 +1913,7 @@ fn parse_item_obj(parser p, ast.layer lyr) -> @ast.item {
|
||||
dtor=dtor);
|
||||
|
||||
auto odid = rec(ty=p.next_def_id(), ctor=p.next_def_id());
|
||||
auto item = ast.item_obj(ident, ob, ty_params, odid, ast.ann_none);
|
||||
auto item = ast.item_obj(ident, ob, ty_params, odid, p.get_ann());
|
||||
|
||||
ret @spanned(lo, hi, item);
|
||||
}
|
||||
@ -1932,7 +1941,7 @@ fn parse_item_const(parser p) -> @ast.item {
|
||||
auto e = parse_expr(p);
|
||||
auto hi = p.get_hi_pos();
|
||||
expect(p, token.SEMI);
|
||||
auto item = ast.item_const(id, ty, e, p.next_def_id(), ast.ann_none);
|
||||
auto item = ast.item_const(id, ty, e, p.next_def_id(), p.get_ann());
|
||||
ret @spanned(lo, hi, item);
|
||||
}
|
||||
|
||||
@ -1970,7 +1979,7 @@ fn parse_item_native_fn(parser p) -> @ast.native_item {
|
||||
expect(p, token.SEMI);
|
||||
auto item = ast.native_item_fn(t._0, link_name, decl,
|
||||
t._1, p.next_def_id(),
|
||||
ast.ann_none);
|
||||
p.get_ann());
|
||||
ret @spanned(lo, hi, item);
|
||||
}
|
||||
|
||||
@ -2075,7 +2084,7 @@ fn parse_item_type(parser p) -> @ast.item {
|
||||
auto ty = parse_ty(p);
|
||||
auto hi = p.get_hi_pos();
|
||||
expect(p, token.SEMI);
|
||||
auto item = ast.item_ty(t._1, ty, tps, p.next_def_id(), ast.ann_none);
|
||||
auto item = ast.item_ty(t._1, ty, tps, p.next_def_id(), p.get_ann());
|
||||
ret @spanned(t._0, hi, item);
|
||||
}
|
||||
|
||||
@ -2114,7 +2123,7 @@ fn parse_item_tag(parser p) -> @ast.item {
|
||||
|
||||
auto id = p.next_def_id();
|
||||
auto vr = rec(name=p.get_str(name), args=args,
|
||||
id=id, ann=ast.ann_none);
|
||||
id=id, ann=p.get_ann());
|
||||
variants += vec(spanned[ast.variant_](vlo, vhi, vr));
|
||||
}
|
||||
case (token.RBRACE) { /* empty */ }
|
||||
@ -2128,7 +2137,7 @@ fn parse_item_tag(parser p) -> @ast.item {
|
||||
p.bump();
|
||||
|
||||
auto item = ast.item_tag(id, variants, ty_params, p.next_def_id(),
|
||||
ast.ann_none);
|
||||
p.get_ann());
|
||||
ret @spanned(lo, hi, item);
|
||||
}
|
||||
|
||||
|
@ -3090,7 +3090,7 @@ fn node_ann_type(@crate_ctxt cx, &ast.ann a) -> ty.t {
|
||||
|
||||
fn node_ann_ty_params(&ast.ann a) -> vec[ty.t] {
|
||||
alt (a) {
|
||||
case (ast.ann_none) {
|
||||
case (ast.ann_none(_)) {
|
||||
log_err "missing type annotation";
|
||||
fail;
|
||||
}
|
||||
@ -4106,7 +4106,7 @@ fn lval_generic_fn(@block_ctxt cx,
|
||||
auto monoty;
|
||||
let vec[ty.t] tys;
|
||||
alt (ann) {
|
||||
case (ast.ann_none) {
|
||||
case (ast.ann_none(_)) {
|
||||
cx.fcx.lcx.ccx.sess.bug("no type annotation for path!");
|
||||
fail;
|
||||
}
|
||||
|
@ -1557,7 +1557,7 @@ fn eq_ty(&t a, &t b) -> bool {
|
||||
|
||||
fn ann_to_type(&ast.ann ann) -> t {
|
||||
alt (ann) {
|
||||
case (ast.ann_none) {
|
||||
case (ast.ann_none(_)) {
|
||||
log_err "ann_to_type() called on node with no type";
|
||||
fail;
|
||||
}
|
||||
@ -1569,7 +1569,7 @@ fn ann_to_type(&ast.ann ann) -> t {
|
||||
|
||||
fn ann_to_type_params(&ast.ann ann) -> vec[t] {
|
||||
alt (ann) {
|
||||
case (ast.ann_none) {
|
||||
case (ast.ann_none(_)) {
|
||||
log_err "ann_to_type_params() called on node with no type params";
|
||||
fail;
|
||||
}
|
||||
@ -1591,7 +1591,7 @@ fn ann_to_monotype(ctxt cx, ast.ann a) -> t {
|
||||
// TODO: Refactor to use recursive pattern matching when we're more
|
||||
// confident that it works.
|
||||
alt (a) {
|
||||
case (ast.ann_none) {
|
||||
case (ast.ann_none(_)) {
|
||||
log_err "ann_to_monotype() called on expression with no type!";
|
||||
fail;
|
||||
}
|
||||
@ -1905,7 +1905,7 @@ fn expr_ty_params_and_ty(&ctxt cx, &@ast.expr expr) -> tup(vec[t], t) {
|
||||
fn expr_has_ty_params(&@ast.expr expr) -> bool {
|
||||
// FIXME: Rewrite using complex patterns when they're trustworthy.
|
||||
alt (expr_ann(expr)) {
|
||||
case (ast.ann_none) { fail; }
|
||||
case (ast.ann_none(_)) { fail; }
|
||||
case (ast.ann_type(_, ?tps_opt, _)) {
|
||||
ret !Option.is_none[vec[t]](tps_opt);
|
||||
}
|
||||
|
@ -1396,7 +1396,7 @@ mod Pushdown {
|
||||
// provided by the programmer.
|
||||
auto ty_params_opt;
|
||||
alt (ann) {
|
||||
case (ast.ann_none) {
|
||||
case (ast.ann_none(_)) {
|
||||
log_err "pushdown_expr(): no type annotation for " +
|
||||
"path expr; did you pass it to check_expr()?";
|
||||
fail;
|
||||
@ -1558,7 +1558,7 @@ fn resolve_local_types_in_annotation(&Option.t[@fn_ctxt] env, &ast.ann ann)
|
||||
|
||||
auto fcx = Option.get[@fn_ctxt](env);
|
||||
alt (ann) {
|
||||
case (ast.ann_none) {
|
||||
case (ast.ann_none(_)) {
|
||||
log "warning: no type for expression";
|
||||
ret ann;
|
||||
}
|
||||
@ -2506,7 +2506,7 @@ fn check_expr(&@fn_ctxt fcx, &@ast.expr expr) -> @ast.expr {
|
||||
Vec.push[field](fields_t, rec(ident=f.ident, mt=expr_mt));
|
||||
}
|
||||
|
||||
auto ann = ast.ann_none;
|
||||
auto ann;
|
||||
|
||||
alt (base) {
|
||||
case (none[@ast.expr]) {
|
||||
@ -2717,7 +2717,7 @@ fn check_decl_local(&@fn_ctxt fcx, &@ast.decl decl) -> @ast.decl {
|
||||
|
||||
auto a_res = local.ann;
|
||||
alt (a_res) {
|
||||
case (ann_none) {
|
||||
case (ann_none(_)) {
|
||||
a_res = triv_ann(t);
|
||||
}
|
||||
case (_) {}
|
||||
|
@ -392,7 +392,7 @@ fn mk_f_to_fn_info(@ast.crate c) -> fn_info_map {
|
||||
/**** Helpers ****/
|
||||
fn ann_to_ts_ann(ann a, uint nv) -> ts_ann {
|
||||
alt (a) {
|
||||
case (ann_none) { ret empty_ann(nv); }
|
||||
case (ann_none(_)) { ret empty_ann(nv); }
|
||||
case (ann_type(_,_,?t)) {
|
||||
alt (t) {
|
||||
/* Kind of inconsistent. empty_ann()s everywhere
|
||||
@ -406,7 +406,7 @@ fn ann_to_ts_ann(ann a, uint nv) -> ts_ann {
|
||||
|
||||
fn ann_to_ts_ann_fail(ann a) -> Option.t[@ts_ann] {
|
||||
alt (a) {
|
||||
case (ann_none) {
|
||||
case (ann_none(_)) {
|
||||
log("ann_to_ts_ann_fail: didn't expect ann_none here");
|
||||
fail;
|
||||
}
|
||||
@ -418,7 +418,7 @@ fn ann_to_ts_ann_fail(ann a) -> Option.t[@ts_ann] {
|
||||
|
||||
fn ann_to_ts_ann_fail_more(ann a) -> @ts_ann {
|
||||
alt (a) {
|
||||
case (ann_none) {
|
||||
case (ann_none(_)) {
|
||||
log("ann_to_ts_ann_fail: didn't expect ann_none here");
|
||||
fail;
|
||||
}
|
||||
@ -450,7 +450,7 @@ fn stmt_to_ann(&stmt s) -> Option.t[@ts_ann] {
|
||||
/* fails if e has no annotation */
|
||||
fn expr_states(@expr e) -> pre_and_post_state {
|
||||
alt (expr_ann(e)) {
|
||||
case (ann_none) {
|
||||
case (ann_none(_)) {
|
||||
log_err "expr_pp: the impossible happened (no annotation)";
|
||||
fail;
|
||||
}
|
||||
@ -471,7 +471,7 @@ fn expr_states(@expr e) -> pre_and_post_state {
|
||||
/* fails if e has no annotation */
|
||||
fn expr_pp(@expr e) -> pre_and_post {
|
||||
alt (expr_ann(e)) {
|
||||
case (ann_none) {
|
||||
case (ann_none(_)) {
|
||||
log_err "expr_pp: the impossible happened (no annotation)";
|
||||
fail;
|
||||
}
|
||||
@ -505,7 +505,7 @@ fn stmt_pp(&stmt s) -> pre_and_post {
|
||||
/* FIXME: factor out code in the following two functions (block_ts_ann) */
|
||||
fn block_pp(&block b) -> pre_and_post {
|
||||
alt (b.node.a) {
|
||||
case (ann_none) {
|
||||
case (ann_none(_)) {
|
||||
log_err "block_pp: the impossible happened (no ann)";
|
||||
fail;
|
||||
}
|
||||
@ -525,7 +525,7 @@ fn block_pp(&block b) -> pre_and_post {
|
||||
|
||||
fn block_states(&block b) -> pre_and_post_state {
|
||||
alt (b.node.a) {
|
||||
case (ann_none) {
|
||||
case (ann_none(_)) {
|
||||
log_err "block_pp: the impossible happened (no ann)";
|
||||
fail;
|
||||
}
|
||||
@ -605,7 +605,7 @@ fn block_poststate(&block b) -> poststate {
|
||||
/* returns a new annotation where the pre_and_post is p */
|
||||
fn with_pp(ann a, pre_and_post p) -> ann {
|
||||
alt (a) {
|
||||
case (ann_none) {
|
||||
case (ann_none(_)) {
|
||||
log("with_pp: the impossible happened");
|
||||
fail; /* shouldn't happen b/c code is typechecked */
|
||||
}
|
||||
@ -1300,7 +1300,7 @@ fn set_prestate_ann(@ann a, prestate pre) -> bool {
|
||||
assert (! is_none[@ts_ann](ts_a));
|
||||
ret set_prestate(get[@ts_ann](ts_a), pre);
|
||||
}
|
||||
case (ann_none) {
|
||||
case (ann_none(_)) {
|
||||
log("set_prestate_ann: expected an ann_type here");
|
||||
fail;
|
||||
}
|
||||
@ -1314,7 +1314,7 @@ fn extend_prestate_ann(ann a, prestate pre) -> bool {
|
||||
assert (! is_none[@ts_ann](ts_a));
|
||||
ret extend_prestate((get[@ts_ann](ts_a)).states.prestate, pre);
|
||||
}
|
||||
case (ann_none) {
|
||||
case (ann_none(_)) {
|
||||
log("set_prestate_ann: expected an ann_type here");
|
||||
fail;
|
||||
}
|
||||
@ -1327,7 +1327,7 @@ fn set_poststate_ann(ann a, poststate post) -> bool {
|
||||
assert (! is_none[@ts_ann](ts_a));
|
||||
ret set_poststate(get[@ts_ann](ts_a), post);
|
||||
}
|
||||
case (ann_none) {
|
||||
case (ann_none(_)) {
|
||||
log("set_poststate_ann: expected an ann_type here");
|
||||
fail;
|
||||
}
|
||||
@ -1340,7 +1340,7 @@ fn extend_poststate_ann(ann a, poststate post) -> bool {
|
||||
assert (! is_none[@ts_ann](ts_a));
|
||||
ret extend_poststate((*get[@ts_ann](ts_a)).states.poststate, post);
|
||||
}
|
||||
case (ann_none) {
|
||||
case (ann_none(_)) {
|
||||
log("set_poststate_ann: expected an ann_type here");
|
||||
fail;
|
||||
}
|
||||
@ -1360,7 +1360,7 @@ fn set_pre_and_post(&ann a, pre_and_post pp) -> () {
|
||||
set_precondition(t, pp.precondition);
|
||||
set_postcondition(t, pp.postcondition);
|
||||
}
|
||||
case (ann_none) {
|
||||
case (ann_none(_)) {
|
||||
log_err("set_pre_and_post: expected an ann_type here");
|
||||
fail;
|
||||
}
|
||||
@ -2042,7 +2042,7 @@ fn check_obj_state(&fn_info_map f_info_map, &vec[obj_field] fields,
|
||||
|
||||
fn init_ann(&fn_info fi, &ann a) -> ann {
|
||||
alt (a) {
|
||||
case (ann_none) {
|
||||
case (ann_none(_)) {
|
||||
// log("init_ann: shouldn't see ann_none");
|
||||
// fail;
|
||||
log("warning: init_ann: saw ann_none");
|
||||
@ -2058,7 +2058,7 @@ fn init_ann(&fn_info fi, &ann a) -> ann {
|
||||
|
||||
fn init_blank_ann(&() ignore, &ann a) -> ann {
|
||||
alt (a) {
|
||||
case (ann_none) {
|
||||
case (ann_none(_)) {
|
||||
// log("init_blank_ann: shouldn't see ann_none");
|
||||
//fail;
|
||||
log("warning: init_blank_ann: saw ann_none");
|
||||
@ -2074,7 +2074,7 @@ fn init_block(&fn_info fi, &span sp, &block_ b) -> block {
|
||||
log("init_block:");
|
||||
log_block(respan(sp, b));
|
||||
alt(b.a) {
|
||||
case (ann_none) {
|
||||
case (ann_none(_)) {
|
||||
log("init_block: shouldn't see ann_none");
|
||||
fail;
|
||||
}
|
||||
|
@ -161,7 +161,7 @@ fn log_block_err(&ast.block b) -> () {
|
||||
|
||||
fn log_ann(&ast.ann a) -> () {
|
||||
alt (a) {
|
||||
case (ast.ann_none) {
|
||||
case (ast.ann_none(_)) {
|
||||
log("ann_none");
|
||||
}
|
||||
case (ast.ann_type(_,_,_)) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user