Move to single-uint file-position representation.

This makes passing them around cheaper. There is now a table (see
front/codemap.rs) that is needed to transform such an uint into an
actual filename/line/col location.

Also cleans up the span building in the parser a bit.
This commit is contained in:
Marijn Haverbeke 2011-04-08 18:44:20 +02:00
parent 094d31f5e4
commit 1af3174fe3
11 changed files with 414 additions and 352 deletions

View File

@ -62,7 +62,7 @@ impure fn compile_input(session.session sess,
bool parse_only,
vec[str] library_search_paths) {
auto def = tup(0, 0);
auto p = parser.new_parser(sess, env, def, input);
auto p = parser.new_parser(sess, env, def, input, 0u);
auto crate = parse_input(sess, p, input);
if (parse_only) {ret;}
crate = creader.read_crates(sess, crate, library_search_paths);
@ -79,7 +79,7 @@ impure fn pretty_print_input(session.session sess,
eval.env env,
str input) {
auto def = tup(0, 0);
auto p = front.parser.new_parser(sess, env, def, input);
auto p = front.parser.new_parser(sess, env, def, input, 0u);
auto crate = front.parser.parse_crate_from_source_file(p);
pretty.pprust.print_file(crate.node.module, input, std.io.stdout());
}
@ -125,7 +125,8 @@ impure fn main(vec[str] args) {
auto crate_cache = common.new_int_hash[session.crate_metadata]();
auto target_crate_num = 0;
auto sess = session.session(target_crate_num, target_cfg, crate_cache);
auto sess = session.session(target_crate_num, target_cfg, crate_cache,
front.codemap.new_codemap());
let option.t[str] input_file = none[str];
let option.t[str] output_file = none[str];

View File

@ -1,4 +1,5 @@
import front.ast;
import front.codemap;
import util.common.span;
import util.common.ty_mach;
import std._uint;
@ -25,7 +26,8 @@ type cfg = rec(os os,
type crate_metadata = vec[u8];
obj session(ast.crate_num cnum, cfg targ,
map.hashmap[int, crate_metadata] crates) {
map.hashmap[int, crate_metadata] crates,
codemap.codemap cm) {
fn get_targ_cfg() -> cfg {
ret targ;
@ -36,10 +38,12 @@ obj session(ast.crate_num cnum, cfg targ,
}
fn span_err(span sp, str msg) {
auto lo = codemap.lookup_pos(cm, sp.lo);
auto hi = codemap.lookup_pos(cm, sp.hi);
log #fmt("%s:%u:%u:%u:%u: error: %s",
sp.filename,
sp.lo.line, sp.lo.col,
sp.hi.line, sp.hi.col,
lo.filename,
lo.line, lo.col,
hi.line, hi.col,
msg);
fail;
}
@ -50,10 +54,12 @@ obj session(ast.crate_num cnum, cfg targ,
}
fn span_warn(span sp, str msg) {
auto lo = codemap.lookup_pos(cm, sp.lo);
auto hi = codemap.lookup_pos(cm, sp.hi);
log #fmt("%s:%u:%u:%u:%u: warning: %s",
sp.filename,
sp.lo.line, sp.lo.col,
sp.hi.line, sp.hi.col,
lo.filename,
lo.line, lo.col,
hi.line, hi.col,
msg);
}
@ -63,10 +69,12 @@ obj session(ast.crate_num cnum, cfg targ,
}
fn span_unimpl(span sp, str msg) {
auto lo = codemap.lookup_pos(cm, sp.lo);
auto hi = codemap.lookup_pos(cm, sp.hi);
log #fmt("%s:%u:%u:%u:%u: error: unimplemented %s",
sp.filename,
sp.lo.line, sp.lo.col,
sp.hi.line, sp.hi.col,
lo.filename,
lo.line, lo.col,
hi.line, hi.col,
msg);
fail;
}
@ -87,6 +95,14 @@ obj session(ast.crate_num cnum, cfg targ,
fn has_external_crate(int num) -> bool {
ret crates.contains_key(num);
}
fn get_codemap() -> codemap.codemap {
ret cm;
}
fn lookup_pos(uint pos) -> codemap.loc {
ret codemap.lookup_pos(cm, pos);
}
}

65
src/comp/front/codemap.rs Normal file
View File

@ -0,0 +1,65 @@
import std._vec;
/* A codemap is a thing that maps uints to file/line/column positions
* in a crate. This to make it possible to represent the positions
* with single-word things, rather than passing records all over the
* compiler.
*/
type filemap = @rec(str name,
uint start_pos,
mutable vec[uint] lines);
type codemap = @rec(mutable vec[filemap] files);
type loc = rec(str filename, uint line, uint col);
fn new_codemap() -> codemap {
let vec[filemap] files = vec();
ret @rec(mutable files=files);
}
fn new_filemap(str filename, uint start_pos) -> filemap {
let vec[uint] lines = vec();
ret @rec(name=filename,
start_pos=start_pos,
mutable lines=lines);
}
fn next_line(filemap file, uint pos) {
_vec.push[uint](file.lines, pos);
}
fn lookup_pos(codemap map, uint pos) -> loc {
for (filemap f in map.files) {
if (f.start_pos < pos) {
auto line_num = 1u;
auto line_start = 0u;
// FIXME this can be a binary search if we need to be faster
for (uint line_start_ in f.lines) {
// FIXME duplicate code due to lack of working break
if (line_start_ > pos) {
ret rec(filename=f.name,
line=line_num,
col=pos-line_start);
}
line_start = line_start_;
line_num += 1u;
}
ret rec(filename=f.name,
line=line_num,
col=pos-line_start);
}
}
log #fmt("Failed to find a location for character %u", pos);
fail;
}
//
// Local Variables:
// mode: rust
// fill-column: 78;
// indent-tabs-mode: nil
// c-basic-offset: 4
// buffer-file-coding-system: utf-8-unix
// compile-command: "make -k -C $RBUILD 2>&1 | sed -e 's/\\/x\\//x:\\//g'";
// End:
//

View File

@ -25,6 +25,9 @@ tag val {
}
type env = vec[tup(ident, val)];
type ctx = @rec(parser p,
session.session sess,
mutable uint chpos);
fn mk_env() -> env {
let env e = vec();
@ -89,50 +92,50 @@ fn lookup(session.session sess, env e, span sp, ident i) -> val {
fail;
}
fn eval_lit(session.session sess, env e, span sp, @ast.lit lit) -> val {
fn eval_lit(ctx cx, span sp, @ast.lit lit) -> val {
alt (lit.node) {
case (ast.lit_bool(?b)) { ret val_bool(b); }
case (ast.lit_int(?i)) { ret val_int(i); }
case (ast.lit_str(?s)) { ret val_str(s); }
case (_) {
sess.span_err(sp, "evaluating unsupported literal");
cx.sess.span_err(sp, "evaluating unsupported literal");
}
}
fail;
}
fn eval_expr(session.session sess, env e, @ast.expr x) -> val {
fn eval_expr(ctx cx, env e, @ast.expr x) -> val {
alt (x.node) {
case (ast.expr_path(?pth, _, _)) {
if (_vec.len[ident](pth.node.idents) == 1u &&
_vec.len[@ast.ty](pth.node.types) == 0u) {
ret lookup(sess, e, x.span, pth.node.idents.(0));
ret lookup(cx.sess, e, x.span, pth.node.idents.(0));
}
sess.span_err(x.span, "evaluating structured path-name");
cx.sess.span_err(x.span, "evaluating structured path-name");
}
case (ast.expr_lit(?lit, _)) {
ret eval_lit(sess, e, x.span, lit);
ret eval_lit(cx, x.span, lit);
}
case (ast.expr_unary(?op, ?a, _)) {
auto av = eval_expr(sess, e, a);
auto av = eval_expr(cx, e, a);
alt (op) {
case (ast.not) {
if (val_is_bool(av)) {
ret val_bool(!val_as_bool(av));
}
sess.span_err(x.span, "bad types in '!' expression");
cx.sess.span_err(x.span, "bad types in '!' expression");
}
case (_) {
sess.span_err(x.span, "evaluating unsupported unop");
cx.sess.span_err(x.span, "evaluating unsupported unop");
}
}
}
case (ast.expr_binary(?op, ?a, ?b, _)) {
auto av = eval_expr(sess, e, a);
auto bv = eval_expr(sess, e, b);
auto av = eval_expr(cx, e, a);
auto bv = eval_expr(cx, e, b);
alt (op) {
case (ast.add) {
if (val_is_int(av) && val_is_int(bv)) {
@ -141,66 +144,66 @@ fn eval_expr(session.session sess, env e, @ast.expr x) -> val {
if (val_is_str(av) && val_is_str(bv)) {
ret val_str(val_as_str(av) + val_as_str(bv));
}
sess.span_err(x.span, "bad types in '+' expression");
cx.sess.span_err(x.span, "bad types in '+' expression");
}
case (ast.sub) {
if (val_is_int(av) && val_is_int(bv)) {
ret val_int(val_as_int(av) - val_as_int(bv));
}
sess.span_err(x.span, "bad types in '-' expression");
cx.sess.span_err(x.span, "bad types in '-' expression");
}
case (ast.mul) {
if (val_is_int(av) && val_is_int(bv)) {
ret val_int(val_as_int(av) * val_as_int(bv));
}
sess.span_err(x.span, "bad types in '*' expression");
cx.sess.span_err(x.span, "bad types in '*' expression");
}
case (ast.div) {
if (val_is_int(av) && val_is_int(bv)) {
ret val_int(val_as_int(av) / val_as_int(bv));
}
sess.span_err(x.span, "bad types in '/' expression");
cx.sess.span_err(x.span, "bad types in '/' expression");
}
case (ast.rem) {
if (val_is_int(av) && val_is_int(bv)) {
ret val_int(val_as_int(av) % val_as_int(bv));
}
sess.span_err(x.span, "bad types in '%' expression");
cx.sess.span_err(x.span, "bad types in '%' expression");
}
case (ast.and) {
if (val_is_bool(av) && val_is_bool(bv)) {
ret val_bool(val_as_bool(av) && val_as_bool(bv));
}
sess.span_err(x.span, "bad types in '&&' expression");
cx.sess.span_err(x.span, "bad types in '&&' expression");
}
case (ast.or) {
if (val_is_bool(av) && val_is_bool(bv)) {
ret val_bool(val_as_bool(av) || val_as_bool(bv));
}
sess.span_err(x.span, "bad types in '||' expression");
cx.sess.span_err(x.span, "bad types in '||' expression");
}
case (ast.eq) {
ret val_bool(val_eq(sess, x.span, av, bv));
ret val_bool(val_eq(cx.sess, x.span, av, bv));
}
case (ast.ne) {
ret val_bool(! val_eq(sess, x.span, av, bv));
ret val_bool(! val_eq(cx.sess, x.span, av, bv));
}
case (_) {
sess.span_err(x.span, "evaluating unsupported binop");
cx.sess.span_err(x.span, "evaluating unsupported binop");
}
}
}
case (_) {
sess.span_err(x.span, "evaluating unsupported expression");
cx.sess.span_err(x.span, "evaluating unsupported expression");
}
}
fail;
@ -221,7 +224,7 @@ fn val_eq(session.session sess, span sp, val av, val bv) -> bool {
fail;
}
impure fn eval_crate_directives(parser p,
impure fn eval_crate_directives(ctx cx,
env e,
vec[@ast.crate_directive] cdirs,
str prefix,
@ -231,28 +234,27 @@ impure fn eval_crate_directives(parser p,
ast.mod_index_entry] index) {
for (@ast.crate_directive sub_cdir in cdirs) {
eval_crate_directive(p, e, sub_cdir, prefix,
eval_crate_directive(cx, e, sub_cdir, prefix,
view_items, items, index);
}
}
impure fn eval_crate_directives_to_mod(parser p,
env e,
impure fn eval_crate_directives_to_mod(ctx cx, env e,
vec[@ast.crate_directive] cdirs,
str prefix) -> ast._mod {
let vec[@ast.view_item] view_items = vec();
let vec[@ast.item] items = vec();
auto index = new_str_hash[ast.mod_index_entry]();
eval_crate_directives(p, e, cdirs, prefix,
eval_crate_directives(cx, e, cdirs, prefix,
view_items, items, index);
ret rec(view_items=view_items, items=items, index=index);
}
impure fn eval_crate_directive_block(parser p,
impure fn eval_crate_directive_block(ctx cx,
env e,
&ast.block blk,
str prefix,
@ -264,19 +266,18 @@ impure fn eval_crate_directive_block(parser p,
for (@ast.stmt s in blk.node.stmts) {
alt (s.node) {
case (ast.stmt_crate_directive(?cdir)) {
eval_crate_directive(p, e, cdir, prefix,
eval_crate_directive(cx, e, cdir, prefix,
view_items, items, index);
}
case (_) {
auto sess = p.get_session();
sess.span_err(s.span,
"unsupported stmt in crate-directive block");
cx.sess.span_err(s.span,
"unsupported stmt in crate-directive block");
}
}
}
}
impure fn eval_crate_directive_expr(parser p,
impure fn eval_crate_directive_expr(ctx cx,
env e,
@ast.expr x,
str prefix,
@ -284,25 +285,23 @@ impure fn eval_crate_directive_expr(parser p,
&mutable vec[@ast.item] items,
hashmap[ast.ident,
ast.mod_index_entry] index) {
auto sess = p.get_session();
alt (x.node) {
case (ast.expr_if(?cond, ?thn, ?elopt, _)) {
auto cv = eval_expr(sess, e, cond);
auto cv = eval_expr(cx, e, cond);
if (!val_is_bool(cv)) {
sess.span_err(x.span, "bad cond type in 'if'");
cx.sess.span_err(x.span, "bad cond type in 'if'");
}
if (val_as_bool(cv)) {
ret eval_crate_directive_block(p, e, thn, prefix,
ret eval_crate_directive_block(cx, e, thn, prefix,
view_items, items,
index);
}
alt (elopt) {
case (some[@ast.expr](?els)) {
ret eval_crate_directive_expr(p, e, els, prefix,
ret eval_crate_directive_expr(cx, e, els, prefix,
view_items, items,
index);
}
@ -313,45 +312,44 @@ impure fn eval_crate_directive_expr(parser p,
}
case (ast.expr_alt(?v, ?arms, _)) {
auto vv = eval_expr(sess, e, v);
auto vv = eval_expr(cx, e, v);
for (ast.arm arm in arms) {
alt (arm.pat.node) {
case (ast.pat_lit(?lit, _)) {
auto pv = eval_lit(sess, e,
arm.pat.span, lit);
if (val_eq(sess, arm.pat.span, vv, pv)) {
auto pv = eval_lit(cx, arm.pat.span, lit);
if (val_eq(cx.sess, arm.pat.span, vv, pv)) {
ret eval_crate_directive_block
(p, e, arm.block, prefix,
(cx, e, arm.block, prefix,
view_items, items, index);
}
}
case (ast.pat_wild(_)) {
ret eval_crate_directive_block
(p, e, arm.block, prefix,
(cx, e, arm.block, prefix,
view_items, items, index);
}
case (_) {
sess.span_err(arm.pat.span,
"bad pattern type in 'alt'");
cx.sess.span_err(arm.pat.span,
"bad pattern type in 'alt'");
}
}
}
sess.span_err(x.span, "no cases matched in 'alt'");
cx.sess.span_err(x.span, "no cases matched in 'alt'");
}
case (ast.expr_block(?block, _)) {
ret eval_crate_directive_block(p, e, block, prefix,
ret eval_crate_directive_block(cx, e, block, prefix,
view_items, items,
index);
}
case (_) {
sess.span_err(x.span, "unsupported expr type");
cx.sess.span_err(x.span, "unsupported expr type");
}
}
}
impure fn eval_crate_directive(parser p,
impure fn eval_crate_directive(ctx cx,
env e,
@ast.crate_directive cdir,
str prefix,
@ -362,14 +360,14 @@ impure fn eval_crate_directive(parser p,
alt (cdir.node) {
case (ast.cdir_let(?id, ?x, ?cdirs)) {
auto v = eval_expr(p.get_session(), e, x);
auto v = eval_expr(cx, e, x);
auto e0 = vec(tup(id, v)) + e;
eval_crate_directives(p, e0, cdirs, prefix,
eval_crate_directives(cx, e0, cdirs, prefix,
view_items, items, index);
}
case (ast.cdir_expr(?x)) {
eval_crate_directive_expr(p, e, x, prefix,
eval_crate_directive_expr(cx, e, x, prefix,
view_items, items, index);
}
@ -385,13 +383,15 @@ impure fn eval_crate_directive(parser p,
auto full_path = prefix + std.fs.path_sep() + file_path;
auto start_id = p.next_def_id();
auto p0 = new_parser(p.get_session(), e, start_id, full_path);
auto start_id = cx.p.next_def_id();
auto p0 = new_parser(cx.sess, e, start_id, full_path, cx.chpos);
auto m0 = parse_mod_items(p0, token.EOF);
auto next_id = p0.next_def_id();
p.set_def(next_id._1);
// Thread defids and chpos through the parsers
cx.p.set_def(next_id._1);
cx.chpos = p0.get_chpos();
auto im = ast.item_mod(id, m0, next_id);
auto i = @spanned(cdir.span, cdir.span, im);
auto i = @spanned(cdir.span.lo, cdir.span.hi, im);
ast.index_item(index, i);
_vec.push[@ast.item](items, i);
}
@ -407,9 +407,9 @@ impure fn eval_crate_directive(parser p,
}
auto full_path = prefix + std.fs.path_sep() + path;
auto m0 = eval_crate_directives_to_mod(p, e, cdirs, full_path);
auto im = ast.item_mod(id, m0, p.next_def_id());
auto i = @spanned(cdir.span, cdir.span, im);
auto m0 = eval_crate_directives_to_mod(cx, e, cdirs, full_path);
auto im = ast.item_mod(id, m0, cx.p.next_def_id());
auto i = @spanned(cdir.span.lo, cdir.span.hi, im);
ast.index_item(index, i);
_vec.push[@ast.item](items, i);
}

View File

@ -318,9 +318,9 @@ fn parse_type(str s, uint i, uint lim) -> tup(ty, uint) {
fn pieces_to_expr(vec[piece] pieces, vec[@ast.expr] args) -> @ast.expr {
fn make_new_lit(common.span sp, ast.lit_ lit) -> @ast.expr {
auto sp_lit = @parser.spanned[ast.lit_](sp, sp, lit);
auto sp_lit = @rec(node=lit, span=sp);
auto expr = ast.expr_lit(sp_lit, ast.ann_none);
ret @parser.spanned[ast.expr_](sp, sp, expr);
ret @rec(node=expr, span=sp);
}
fn make_new_str(common.span sp, str s) -> @ast.expr {
@ -336,7 +336,7 @@ fn pieces_to_expr(vec[piece] pieces, vec[@ast.expr] args) -> @ast.expr {
fn make_add_expr(common.span sp,
@ast.expr lhs, @ast.expr rhs) -> @ast.expr {
auto binexpr = ast.expr_binary(ast.add, lhs, rhs, ast.ann_none);
ret @parser.spanned[ast.expr_](sp, sp, binexpr);
ret @rec(node=binexpr, span=sp);
}
fn make_call(common.span sp, vec[ast.ident] fn_path,
@ -344,11 +344,11 @@ fn pieces_to_expr(vec[piece] pieces, vec[@ast.expr] args) -> @ast.expr {
let vec[ast.ident] path_idents = fn_path;
let vec[@ast.ty] path_types = vec();
auto path = rec(idents = path_idents, types = path_types);
auto sp_path = parser.spanned[ast.path_](sp, sp, path);
auto sp_path = rec(node=path, span=sp);
auto pathexpr = ast.expr_path(sp_path, none[ast.def], ast.ann_none);
auto sp_pathexpr = @parser.spanned[ast.expr_](sp, sp, pathexpr);
auto sp_pathexpr = @rec(node=pathexpr, span=sp);
auto callexpr = ast.expr_call(sp_pathexpr, args, ast.ann_none);
auto sp_callexpr = @parser.spanned[ast.expr_](sp, sp, callexpr);
auto sp_callexpr = @rec(node=callexpr, span=sp);
ret sp_callexpr;
}

View File

@ -17,42 +17,32 @@ state type reader = state obj {
impure fn init();
impure fn bump();
fn mark();
fn get_filename() -> str;
fn get_mark_pos() -> common.pos;
fn get_curr_pos() -> common.pos;
fn get_mark_chpos() -> uint;
fn get_chpos() -> uint;
fn get_keywords() -> hashmap[str,token.token];
fn get_reserved() -> hashmap[str,()];
fn get_filemap() -> codemap.filemap;
};
impure fn new_reader(io.reader rdr, str filename) -> reader
{
impure fn new_reader(io.reader rdr, str filename, codemap.filemap filemap)
-> reader {
state obj reader(str file,
str filename,
uint len,
mutable uint pos,
mutable char ch,
mutable uint mark_line,
mutable uint mark_col,
mutable uint line,
mutable uint col,
mutable uint mark_chpos,
mutable uint chpos,
hashmap[str,token.token] keywords,
hashmap[str,()] reserved) {
hashmap[str,()] reserved,
codemap.filemap fm) {
fn is_eof() -> bool {
ret ch == -1 as char;
}
fn get_curr_pos() -> common.pos {
ret rec(line=line, col=col);
}
fn get_mark_pos() -> common.pos {
ret rec(line=mark_line, col=mark_col);
}
fn get_filename() -> str {
ret filename;
}
fn mark() { mark_chpos = chpos; }
fn get_mark_chpos() -> uint { ret mark_chpos; }
fn get_chpos() -> uint { ret chpos; }
fn curr() -> char {
ret ch;
@ -73,11 +63,9 @@ impure fn new_reader(io.reader rdr, str filename) -> reader
impure fn bump() {
if (pos < len) {
chpos += 1u;
if (ch == '\n') {
line += 1u;
col = 0u;
} else {
col += 1u;
codemap.next_line(fm, chpos);
}
auto next = _str.char_range_at(file, pos);
pos = next._1;
@ -87,11 +75,6 @@ impure fn new_reader(io.reader rdr, str filename) -> reader
}
}
fn mark() {
mark_line = line;
mark_col = col;
}
fn get_keywords() -> hashmap[str,token.token] {
ret keywords;
}
@ -99,8 +82,22 @@ impure fn new_reader(io.reader rdr, str filename) -> reader
fn get_reserved() -> hashmap[str,()] {
ret reserved;
}
}
fn get_filemap() -> codemap.filemap {
ret fm;
}
}
auto file = _str.unsafe_from_bytes(rdr.read_whole_stream());
auto rd = reader(file, _str.byte_len(file), 0u, -1 as char,
filemap.start_pos, filemap.start_pos,
keyword_table(),
reserved_word_table(),
filemap);
rd.init();
ret rd;
}
fn keyword_table() -> std.map.hashmap[str, token.token] {
auto keywords = new_str_hash[token.token]();
keywords.insert("mod", token.MOD);
@ -205,8 +202,11 @@ impure fn new_reader(io.reader rdr, str filename) -> reader
keywords.insert("f32", token.MACH(common.ty_f32));
keywords.insert("f64", token.MACH(common.ty_f64));
auto reserved = new_str_hash[()]();
ret keywords;
}
fn reserved_word_table() -> std.map.hashmap[str, ()] {
auto reserved = new_str_hash[()]();
reserved.insert("f16", ()); // IEEE 754-2008 'binary16' interchange fmt
reserved.insert("f80", ()); // IEEE 754-1985 'extended'
reserved.insert("f128", ()); // IEEE 754-2008 'binary128'
@ -214,15 +214,9 @@ impure fn new_reader(io.reader rdr, str filename) -> reader
reserved.insert("m64", ()); // IEEE 754-2008 'decimal64'
reserved.insert("m128", ()); // IEEE 754-2008 'decimal128'
reserved.insert("dec", ()); // One of m32, m64, m128
auto file = _str.unsafe_from_bytes(rdr.read_whole_stream());
auto rd = reader(file, filename, _str.byte_len(file), 0u, -1 as char,
1u, 0u, 1u, 0u, keywords, reserved);
rd.init();
ret rd;
ret reserved;
}
fn in_range(char c, char lo, char hi) -> bool {
ret lo <= c && c <= hi;
}
@ -797,7 +791,8 @@ tag cmnt_ {
cmnt_line(str);
cmnt_block(vec[str]);
}
type cmnt = rec(cmnt_ val, common.pos pos, bool space_after);
type cmnt = rec(cmnt_ val, uint pos, bool space_after);
impure fn consume_whitespace(reader rdr) -> uint {
auto lines = 0u;
@ -809,7 +804,7 @@ impure fn consume_whitespace(reader rdr) -> uint {
}
impure fn read_line_comment(reader rdr) -> cmnt {
auto p = rdr.get_curr_pos();
auto p = rdr.get_chpos();
rdr.bump(); rdr.bump();
while (rdr.curr() == ' ') {rdr.bump();}
auto val = "";
@ -823,7 +818,7 @@ impure fn read_line_comment(reader rdr) -> cmnt {
}
impure fn read_block_comment(reader rdr) -> cmnt {
auto p = rdr.get_curr_pos();
auto p = rdr.get_chpos();
rdr.bump(); rdr.bump();
while (rdr.curr() == ' ') {rdr.bump();}
let vec[str] lines = vec();
@ -857,7 +852,7 @@ impure fn read_block_comment(reader rdr) -> cmnt {
impure fn gather_comments(str path) -> vec[cmnt] {
auto srdr = io.file_reader(path);
auto rdr = new_reader(srdr, path);
auto rdr = new_reader(srdr, path, codemap.new_filemap(path, 0u));
let vec[cmnt] comments = vec();
while (!rdr.is_eof()) {
while (true) {

File diff suppressed because it is too large Load Diff

View File

@ -4939,8 +4939,9 @@ fn trans_check_expr(@block_ctxt cx, @ast.expr e) -> result {
fn trans_fail(@block_ctxt cx, common.span sp, str fail_str) -> result {
auto V_fail_str = p2i(C_cstr(cx.fcx.ccx, fail_str));
auto V_filename = p2i(C_cstr(cx.fcx.ccx, sp.filename));
auto V_line = sp.lo.line as int;
auto loc = cx.fcx.ccx.sess.lookup_pos(sp.lo);
auto V_filename = p2i(C_cstr(cx.fcx.ccx, loc.filename));
auto V_line = loc.line as int;
auto args = vec(V_fail_str, V_filename, C_int(V_line));
auto sub = trans_upcall(cx, "upcall_fail", args);

View File

@ -159,8 +159,7 @@ impure fn print_type(ps s, &@ast.ty ty) {
fn get_span(&ast.ty_field f) -> common.span {
// Try to reconstruct the span for this field
auto sp = f.mt.ty.span;
auto hi = rec(line=sp.hi.line,
col=sp.hi.col + _str.char_len(f.ident) + 1u);
auto hi = sp.hi + _str.char_len(f.ident) + 1u;
ret rec(hi=hi with sp);
}
auto f = print_field;
@ -329,14 +328,9 @@ impure fn print_item(ps s, @ast.item item) {
}
impure fn print_block(ps s, ast.block blk) {
auto cur_line = 0u;
maybe_print_comment(s, blk.span.lo);
bopen(s);
for (@ast.stmt st in blk.node.stmts) {
if (cur_line != 0u && st.span.lo.line > cur_line + 1u) {
line(s.s);
}
cur_line = st.span.hi.line;
maybe_print_comment(s, st.span.lo);
alt (st.node) {
case (ast.stmt_decl(?decl,_)) {print_decl(s, decl);}
@ -347,9 +341,6 @@ impure fn print_block(ps s, ast.block blk) {
}
alt (blk.node.expr) {
case (option.some[@ast.expr](?expr)) {
if (cur_line != 0u && expr.span.lo.line > cur_line + 1u) {
line(s.s);
}
print_expr(s, expr);
if (!maybe_print_line_comment(s, expr.span)) {line(s.s);}
}
@ -958,12 +949,11 @@ fn next_comment(ps s) -> option.t[lexer.cmnt] {
}
}
impure fn maybe_print_comment(ps s, common.pos pos) {
impure fn maybe_print_comment(ps s, uint pos) {
while (true) {
alt (next_comment(s)) {
case (option.some[lexer.cmnt](?cmnt)) {
if (cmnt.pos.line < pos.line ||
(cmnt.pos.line == pos.line && cmnt.pos.col < pos.col)) {
if (cmnt.pos < pos) {
print_comment(s, cmnt.val);
if (cmnt.space_after) {line(s.s);}
s.cur_cmnt += 1u;
@ -977,8 +967,7 @@ impure fn maybe_print_comment(ps s, common.pos pos) {
impure fn maybe_print_line_comment(ps s, common.span span) -> bool {
alt (next_comment(s)) {
case (option.some[lexer.cmnt](?cmnt)) {
if (span.hi.line == cmnt.pos.line &&
span.hi.col + 4u >= cmnt.pos.col) {
if (span.hi + 4u >= cmnt.pos) {
wrd(s.s, " ");
print_comment(s, cmnt.val);
s.cur_cmnt += 1u;

View File

@ -7,6 +7,7 @@ mod front {
mod ast;
mod creader;
mod extfmt;
mod codemap;
mod lexer;
mod parser;
mod token;

View File

@ -5,8 +5,7 @@ import front.ast;
type filename = str;
type pos = rec(uint line, uint col);
type span = rec(filename filename, pos lo, pos hi);
type span = rec(uint lo, uint hi);
type spanned[T] = rec(T node, span span);
tag ty_mach {