auto merge of #7004 : dotdash/rust/allocs, r=thestinger
This removes some unnecessary allocations in the lexer, the typechecker and the metadata decoder. Reduces the time spent in the parsing and typechecking passes by about 10% for me.
This commit is contained in:
commit
878a9b92eb
@ -93,6 +93,14 @@ pub mod reader {
|
||||
pub fn get(&self, tag: uint) -> Doc {
|
||||
get_doc(*self, tag)
|
||||
}
|
||||
|
||||
pub fn as_str_slice<'a>(&'a self) -> &'a str {
|
||||
str::from_bytes_slice(self.data.slice(self.start, self.end))
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> ~str {
|
||||
self.as_str_slice().to_owned()
|
||||
}
|
||||
}
|
||||
|
||||
struct Res {
|
||||
@ -239,15 +247,10 @@ pub mod reader {
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn doc_data(d: Doc) -> ~[u8] {
|
||||
vec::slice::<u8>(*d.data, d.start, d.end).to_vec()
|
||||
}
|
||||
|
||||
pub fn with_doc_data<T>(d: Doc, f: &fn(x: &[u8]) -> T) -> T {
|
||||
f(vec::slice(*d.data, d.start, d.end))
|
||||
}
|
||||
|
||||
pub fn doc_as_str(d: Doc) -> ~str { str::from_bytes(doc_data(d)) }
|
||||
|
||||
pub fn doc_as_u8(d: Doc) -> u8 {
|
||||
assert_eq!(d.end, d.start + 1u);
|
||||
@ -294,7 +297,7 @@ pub mod reader {
|
||||
|
||||
if r_tag == (EsLabel as uint) {
|
||||
self.pos = r_doc.end;
|
||||
let str = doc_as_str(r_doc);
|
||||
let str = r_doc.as_str_slice();
|
||||
if lbl != str {
|
||||
fail!("Expected label %s but found %s", lbl, str);
|
||||
}
|
||||
@ -415,7 +418,9 @@ pub mod reader {
|
||||
fn read_char(&mut self) -> char {
|
||||
doc_as_u32(self.next_doc(EsChar)) as char
|
||||
}
|
||||
fn read_str(&mut self) -> ~str { doc_as_str(self.next_doc(EsStr)) }
|
||||
fn read_str(&mut self) -> ~str {
|
||||
self.next_doc(EsStr).as_str()
|
||||
}
|
||||
|
||||
// Compound types:
|
||||
fn read_enum<T>(&mut self,
|
||||
|
@ -162,19 +162,18 @@ fn item_visibility(item: ebml::Doc) -> ast::visibility {
|
||||
|
||||
fn item_method_sort(item: ebml::Doc) -> char {
|
||||
for reader::tagged_docs(item, tag_item_trait_method_sort) |doc| {
|
||||
return str::from_bytes(reader::doc_data(doc))[0] as char;
|
||||
return doc.as_str_slice()[0] as char;
|
||||
}
|
||||
return 'r';
|
||||
}
|
||||
|
||||
fn item_symbol(item: ebml::Doc) -> ~str {
|
||||
let sym = reader::get_doc(item, tag_items_data_item_symbol);
|
||||
return str::from_bytes(reader::doc_data(sym));
|
||||
reader::get_doc(item, tag_items_data_item_symbol).as_str()
|
||||
}
|
||||
|
||||
fn item_parent_item(d: ebml::Doc) -> Option<ast::def_id> {
|
||||
for reader::tagged_docs(d, tag_items_data_parent_item) |did| {
|
||||
return Some(reader::with_doc_data(did, |d| parse_def_id(d)));
|
||||
return Some(reader::with_doc_data(did, parse_def_id));
|
||||
}
|
||||
None
|
||||
}
|
||||
@ -195,8 +194,7 @@ fn item_reqd_and_translated_parent_item(cnum: ast::crate_num,
|
||||
|
||||
fn item_def_id(d: ebml::Doc, cdata: cmd) -> ast::def_id {
|
||||
let tagdoc = reader::get_doc(d, tag_def_id);
|
||||
return translate_def_id(cdata, reader::with_doc_data(tagdoc,
|
||||
|d| parse_def_id(d)));
|
||||
return translate_def_id(cdata, reader::with_doc_data(tagdoc, parse_def_id));
|
||||
}
|
||||
|
||||
fn each_reexport(d: ebml::Doc, f: &fn(ebml::Doc) -> bool) -> bool {
|
||||
@ -210,19 +208,19 @@ fn each_reexport(d: ebml::Doc, f: &fn(ebml::Doc) -> bool) -> bool {
|
||||
|
||||
fn variant_disr_val(d: ebml::Doc) -> Option<int> {
|
||||
do reader::maybe_get_doc(d, tag_disr_val).chain |val_doc| {
|
||||
int::parse_bytes(reader::doc_data(val_doc), 10u)
|
||||
do reader::with_doc_data(val_doc) |data| { int::parse_bytes(data, 10u) }
|
||||
}
|
||||
}
|
||||
|
||||
fn doc_type(doc: ebml::Doc, tcx: ty::ctxt, cdata: cmd) -> ty::t {
|
||||
let tp = reader::get_doc(doc, tag_items_data_item_type);
|
||||
parse_ty_data(tp.data, cdata.cnum, tp.start, tcx,
|
||||
parse_ty_data(*tp.data, cdata.cnum, tp.start, tcx,
|
||||
|_, did| translate_def_id(cdata, did))
|
||||
}
|
||||
|
||||
fn doc_method_fty(doc: ebml::Doc, tcx: ty::ctxt, cdata: cmd) -> ty::BareFnTy {
|
||||
let tp = reader::get_doc(doc, tag_item_method_fty);
|
||||
parse_bare_fn_ty_data(tp.data, cdata.cnum, tp.start, tcx,
|
||||
parse_bare_fn_ty_data(*tp.data, cdata.cnum, tp.start, tcx,
|
||||
|_, did| translate_def_id(cdata, did))
|
||||
}
|
||||
|
||||
@ -231,7 +229,7 @@ fn doc_transformed_self_ty(doc: ebml::Doc,
|
||||
cdata: cmd) -> Option<ty::t>
|
||||
{
|
||||
do reader::maybe_get_doc(doc, tag_item_method_transformed_self_ty).map |tp| {
|
||||
parse_ty_data(tp.data, cdata.cnum, tp.start, tcx,
|
||||
parse_ty_data(*tp.data, cdata.cnum, tp.start, tcx,
|
||||
|_, did| translate_def_id(cdata, did))
|
||||
}
|
||||
}
|
||||
@ -242,7 +240,7 @@ pub fn item_type(_item_id: ast::def_id, item: ebml::Doc,
|
||||
}
|
||||
|
||||
fn doc_trait_ref(doc: ebml::Doc, tcx: ty::ctxt, cdata: cmd) -> ty::TraitRef {
|
||||
parse_trait_ref_data(doc.data, cdata.cnum, doc.start, tcx,
|
||||
parse_trait_ref_data(*doc.data, cdata.cnum, doc.start, tcx,
|
||||
|_, did| translate_def_id(cdata, did))
|
||||
}
|
||||
|
||||
@ -257,7 +255,7 @@ fn item_ty_param_defs(item: ebml::Doc, tcx: ty::ctxt, cdata: cmd,
|
||||
let mut bounds = ~[];
|
||||
for reader::tagged_docs(item, tag) |p| {
|
||||
let bd = parse_type_param_def_data(
|
||||
p.data, p.start, cdata.cnum, tcx,
|
||||
*p.data, p.start, cdata.cnum, tcx,
|
||||
|_, did| translate_def_id(cdata, did));
|
||||
bounds.push(bd);
|
||||
}
|
||||
@ -282,7 +280,7 @@ fn enum_variant_ids(item: ebml::Doc, cdata: cmd) -> ~[ast::def_id] {
|
||||
let mut ids: ~[ast::def_id] = ~[];
|
||||
let v = tag_items_data_item_variant;
|
||||
for reader::tagged_docs(item, v) |p| {
|
||||
let ext = reader::with_doc_data(p, |d| parse_def_id(d));
|
||||
let ext = reader::with_doc_data(p, parse_def_id);
|
||||
ids.push(ast::def_id { crate: cdata.cnum, node: ext.node });
|
||||
};
|
||||
return ids;
|
||||
@ -297,10 +295,10 @@ fn item_path(item_doc: ebml::Doc) -> ast_map::path {
|
||||
let mut result = vec::with_capacity(len);
|
||||
for reader::docs(path_doc) |tag, elt_doc| {
|
||||
if tag == tag_path_elt_mod {
|
||||
let str = reader::doc_as_str(elt_doc);
|
||||
let str = elt_doc.as_str_slice();
|
||||
result.push(ast_map::path_mod(token::str_to_ident(str)));
|
||||
} else if tag == tag_path_elt_name {
|
||||
let str = reader::doc_as_str(elt_doc);
|
||||
let str = elt_doc.as_str_slice();
|
||||
result.push(ast_map::path_name(token::str_to_ident(str)));
|
||||
} else {
|
||||
// ignore tag_path_len element
|
||||
@ -312,12 +310,10 @@ fn item_path(item_doc: ebml::Doc) -> ast_map::path {
|
||||
|
||||
fn item_name(intr: @ident_interner, item: ebml::Doc) -> ast::ident {
|
||||
let name = reader::get_doc(item, tag_paths_data_name);
|
||||
do reader::with_doc_data(name) |data| {
|
||||
let string = str::from_bytes_slice(data);
|
||||
match intr.find_equiv(&StringRef(string)) {
|
||||
None => token::str_to_ident(string),
|
||||
Some(val) => ast::new_ident(val),
|
||||
}
|
||||
let string = name.as_str_slice();
|
||||
match intr.find_equiv(&StringRef(string)) {
|
||||
None => token::str_to_ident(string),
|
||||
Some(val) => ast::new_ident(val),
|
||||
}
|
||||
}
|
||||
|
||||
@ -413,15 +409,9 @@ pub fn get_impl_trait(cdata: cmd,
|
||||
tcx: ty::ctxt) -> Option<@ty::TraitRef>
|
||||
{
|
||||
let item_doc = lookup_item(id, cdata.data);
|
||||
let mut result = None;
|
||||
for reader::tagged_docs(item_doc, tag_item_trait_ref) |tp| {
|
||||
let trait_ref =
|
||||
@parse_trait_ref_data(tp.data, cdata.cnum, tp.start, tcx,
|
||||
|_, did| translate_def_id(cdata, did));
|
||||
result = Some(trait_ref);
|
||||
break;
|
||||
};
|
||||
result
|
||||
do reader::maybe_get_doc(item_doc, tag_item_trait_ref).map |&tp| {
|
||||
@doc_trait_ref(tp, tcx, cdata)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_impl_method(intr: @ident_interner, cdata: cmd, id: ast::node_id,
|
||||
@ -430,7 +420,7 @@ pub fn get_impl_method(intr: @ident_interner, cdata: cmd, id: ast::node_id,
|
||||
let mut found = None;
|
||||
for reader::tagged_docs(find_item(id, items), tag_item_impl_method)
|
||||
|mid| {
|
||||
let m_did = reader::with_doc_data(mid, |d| parse_def_id(d));
|
||||
let m_did = reader::with_doc_data(mid, parse_def_id);
|
||||
if item_name(intr, find_item(m_did.node, items)) == name {
|
||||
found = Some(translate_def_id(cdata, m_did));
|
||||
}
|
||||
@ -513,19 +503,17 @@ pub fn each_path(intr: @ident_interner,
|
||||
let def_id_doc =
|
||||
reader::get_doc(reexport_doc,
|
||||
tag_items_data_item_reexport_def_id);
|
||||
let def_id =
|
||||
reader::with_doc_data(def_id_doc,
|
||||
|d| parse_def_id(d));
|
||||
let def_id = reader::with_doc_data(def_id_doc, parse_def_id);
|
||||
let def_id = translate_def_id(cdata, def_id);
|
||||
|
||||
let reexport_name_doc =
|
||||
reader::get_doc(reexport_doc,
|
||||
tag_items_data_item_reexport_name);
|
||||
let reexport_name = reader::doc_as_str(reexport_name_doc);
|
||||
let reexport_name = reexport_name_doc.as_str_slice();
|
||||
|
||||
let reexport_path;
|
||||
if path_is_empty {
|
||||
reexport_path = reexport_name;
|
||||
reexport_path = reexport_name.to_owned();
|
||||
} else {
|
||||
reexport_path = path + "::" + reexport_name;
|
||||
}
|
||||
@ -646,7 +634,7 @@ fn get_explicit_self(item: ebml::Doc) -> ast::explicit_self_ {
|
||||
}
|
||||
|
||||
let explicit_self_doc = reader::get_doc(item, tag_item_trait_method_explicit_self);
|
||||
let string = reader::doc_as_str(explicit_self_doc);
|
||||
let string = explicit_self_doc.as_str_slice();
|
||||
|
||||
let explicit_self_kind = string[0];
|
||||
match explicit_self_kind as char {
|
||||
@ -668,7 +656,7 @@ fn item_impl_methods(intr: @ident_interner, cdata: cmd, item: ebml::Doc,
|
||||
base_tps: uint) -> ~[@resolve::MethodInfo] {
|
||||
let mut rslt = ~[];
|
||||
for reader::tagged_docs(item, tag_item_impl_method) |doc| {
|
||||
let m_did = reader::with_doc_data(doc, |d| parse_def_id(d));
|
||||
let m_did = reader::with_doc_data(doc, parse_def_id);
|
||||
let mth_item = lookup_item(m_did.node, cdata.data);
|
||||
let explicit_self = get_explicit_self(mth_item);
|
||||
rslt.push(@resolve::MethodInfo {
|
||||
@ -690,7 +678,7 @@ pub fn get_impls_for_mod(intr: @ident_interner,
|
||||
let mod_item = lookup_item(m_id, data);
|
||||
let mut result = ~[];
|
||||
for reader::tagged_docs(mod_item, tag_mod_impl) |doc| {
|
||||
let did = reader::with_doc_data(doc, |d| parse_def_id(d));
|
||||
let did = reader::with_doc_data(doc, parse_def_id);
|
||||
let local_did = translate_def_id(cdata, did);
|
||||
debug!("(get impls for mod) getting did %? for '%?'",
|
||||
local_did, name);
|
||||
@ -831,7 +819,7 @@ pub fn get_type_name_if_impl(cdata: cmd,
|
||||
}
|
||||
|
||||
for reader::tagged_docs(item, tag_item_impl_type_basename) |doc| {
|
||||
return Some(token::str_to_ident(str::from_bytes(reader::doc_data(doc))));
|
||||
return Some(token::str_to_ident(doc.as_str_slice()));
|
||||
}
|
||||
|
||||
return None;
|
||||
@ -853,7 +841,7 @@ pub fn get_static_methods_if_impl(intr: @ident_interner,
|
||||
|
||||
let mut impl_method_ids = ~[];
|
||||
for reader::tagged_docs(item, tag_item_impl_method) |impl_method_doc| {
|
||||
impl_method_ids.push(parse_def_id(reader::doc_data(impl_method_doc)));
|
||||
impl_method_ids.push(reader::with_doc_data(impl_method_doc, parse_def_id));
|
||||
}
|
||||
|
||||
let mut static_impl_methods = ~[];
|
||||
@ -950,12 +938,13 @@ fn family_names_type(fam: Family) -> bool {
|
||||
}
|
||||
|
||||
fn read_path(d: ebml::Doc) -> (~str, uint) {
|
||||
let desc = reader::doc_data(d);
|
||||
let pos = io::u64_from_be_bytes(desc, 0u, 4u) as uint;
|
||||
let pathbytes = vec::slice::<u8>(desc, 4u, vec::len::<u8>(desc));
|
||||
let path = str::from_bytes(pathbytes);
|
||||
do reader::with_doc_data(d) |desc| {
|
||||
let pos = io::u64_from_be_bytes(desc, 0u, 4u) as uint;
|
||||
let pathbytes = desc.slice(4u, desc.len());
|
||||
let path = str::from_bytes(pathbytes);
|
||||
|
||||
(path, pos)
|
||||
(path, pos)
|
||||
}
|
||||
}
|
||||
|
||||
fn describe_def(items: ebml::Doc, id: ast::def_id) -> ~str {
|
||||
@ -996,21 +985,21 @@ fn get_meta_items(md: ebml::Doc) -> ~[@ast::meta_item] {
|
||||
let mut items: ~[@ast::meta_item] = ~[];
|
||||
for reader::tagged_docs(md, tag_meta_item_word) |meta_item_doc| {
|
||||
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
|
||||
let n = str::from_bytes(reader::doc_data(nd));
|
||||
let n = nd.as_str();
|
||||
items.push(attr::mk_word_item(@n));
|
||||
};
|
||||
for reader::tagged_docs(md, tag_meta_item_name_value) |meta_item_doc| {
|
||||
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
|
||||
let vd = reader::get_doc(meta_item_doc, tag_meta_item_value);
|
||||
let n = str::from_bytes(reader::doc_data(nd));
|
||||
let v = str::from_bytes(reader::doc_data(vd));
|
||||
let n = nd.as_str();
|
||||
let v = vd.as_str();
|
||||
// FIXME (#623): Should be able to decode meta_name_value variants,
|
||||
// but currently the encoder just drops them
|
||||
items.push(attr::mk_name_value_item_str(@n, @v));
|
||||
};
|
||||
for reader::tagged_docs(md, tag_meta_item_list) |meta_item_doc| {
|
||||
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
|
||||
let n = str::from_bytes(reader::doc_data(nd));
|
||||
let n = nd.as_str();
|
||||
let subitems = get_meta_items(meta_item_doc);
|
||||
items.push(attr::mk_list_item(@n, subitems));
|
||||
};
|
||||
@ -1079,7 +1068,7 @@ pub fn get_crate_deps(data: @~[u8]) -> ~[crate_dep] {
|
||||
let depsdoc = reader::get_doc(cratedoc, tag_crate_deps);
|
||||
let mut crate_num = 1;
|
||||
fn docstr(doc: ebml::Doc, tag_: uint) -> ~str {
|
||||
str::from_bytes(reader::doc_data(reader::get_doc(doc, tag_)))
|
||||
reader::get_doc(doc, tag_).as_str()
|
||||
}
|
||||
for reader::tagged_docs(depsdoc, tag_crate_dep) |depdoc| {
|
||||
deps.push(crate_dep {cnum: crate_num,
|
||||
@ -1106,7 +1095,7 @@ fn list_crate_deps(data: @~[u8], out: @io::Writer) {
|
||||
pub fn get_crate_hash(data: @~[u8]) -> @~str {
|
||||
let cratedoc = reader::Doc(data);
|
||||
let hashdoc = reader::get_doc(cratedoc, tag_crate_hash);
|
||||
@str::from_bytes(reader::doc_data(hashdoc))
|
||||
@hashdoc.as_str()
|
||||
}
|
||||
|
||||
pub fn get_crate_vers(data: @~[u8]) -> @~str {
|
||||
@ -1161,7 +1150,7 @@ pub fn get_link_args_for_crate(cdata: cmd) -> ~[~str] {
|
||||
let link_args = reader::get_doc(reader::Doc(cdata.data), tag_link_args);
|
||||
let mut result = ~[];
|
||||
for reader::tagged_docs(link_args, tag_link_args_arg) |arg_doc| {
|
||||
result.push(reader::doc_as_str(arg_doc));
|
||||
result.push(arg_doc.as_str());
|
||||
}
|
||||
result
|
||||
}
|
||||
|
@ -55,24 +55,24 @@ pub enum DefIdSource {
|
||||
type conv_did<'self> =
|
||||
&'self fn(source: DefIdSource, ast::def_id) -> ast::def_id;
|
||||
|
||||
pub struct PState {
|
||||
data: @~[u8],
|
||||
pub struct PState<'self> {
|
||||
data: &'self [u8],
|
||||
crate: int,
|
||||
pos: uint,
|
||||
tcx: ty::ctxt
|
||||
}
|
||||
|
||||
fn peek(st: @mut PState) -> char {
|
||||
fn peek(st: &PState) -> char {
|
||||
st.data[st.pos] as char
|
||||
}
|
||||
|
||||
fn next(st: @mut PState) -> char {
|
||||
fn next(st: &mut PState) -> char {
|
||||
let ch = st.data[st.pos] as char;
|
||||
st.pos = st.pos + 1u;
|
||||
return ch;
|
||||
}
|
||||
|
||||
fn next_byte(st: @mut PState) -> u8 {
|
||||
fn next_byte(st: &mut PState) -> u8 {
|
||||
let b = st.data[st.pos];
|
||||
st.pos = st.pos + 1u;
|
||||
return b;
|
||||
@ -92,20 +92,20 @@ fn scan<R>(st: &mut PState, is_last: &fn(char) -> bool,
|
||||
return op(st.data.slice(start_pos, end_pos));
|
||||
}
|
||||
|
||||
pub fn parse_ident(st: @mut PState, last: char) -> ast::ident {
|
||||
pub fn parse_ident(st: &mut PState, last: char) -> ast::ident {
|
||||
fn is_last(b: char, c: char) -> bool { return c == b; }
|
||||
return parse_ident_(st, |a| is_last(last, a) );
|
||||
}
|
||||
|
||||
fn parse_ident_(st: @mut PState, is_last: @fn(char) -> bool) ->
|
||||
fn parse_ident_(st: &mut PState, is_last: @fn(char) -> bool) ->
|
||||
ast::ident {
|
||||
let rslt = scan(st, is_last, str::from_bytes);
|
||||
return st.tcx.sess.ident_of(rslt);
|
||||
}
|
||||
|
||||
pub fn parse_state_from_data(data: @~[u8], crate_num: int,
|
||||
pos: uint, tcx: ty::ctxt) -> @mut PState {
|
||||
@mut PState {
|
||||
pub fn parse_state_from_data<'a>(data: &'a [u8], crate_num: int,
|
||||
pos: uint, tcx: ty::ctxt) -> PState<'a> {
|
||||
PState {
|
||||
data: data,
|
||||
crate: crate_num,
|
||||
pos: pos,
|
||||
@ -113,25 +113,25 @@ pub fn parse_state_from_data(data: @~[u8], crate_num: int,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_ty_data(data: @~[u8], crate_num: int, pos: uint, tcx: ty::ctxt,
|
||||
pub fn parse_ty_data(data: &[u8], crate_num: int, pos: uint, tcx: ty::ctxt,
|
||||
conv: conv_did) -> ty::t {
|
||||
let st = parse_state_from_data(data, crate_num, pos, tcx);
|
||||
parse_ty(st, conv)
|
||||
let mut st = parse_state_from_data(data, crate_num, pos, tcx);
|
||||
parse_ty(&mut st, conv)
|
||||
}
|
||||
|
||||
pub fn parse_bare_fn_ty_data(data: @~[u8], crate_num: int, pos: uint, tcx: ty::ctxt,
|
||||
pub fn parse_bare_fn_ty_data(data: &[u8], crate_num: int, pos: uint, tcx: ty::ctxt,
|
||||
conv: conv_did) -> ty::BareFnTy {
|
||||
let st = parse_state_from_data(data, crate_num, pos, tcx);
|
||||
parse_bare_fn_ty(st, conv)
|
||||
let mut st = parse_state_from_data(data, crate_num, pos, tcx);
|
||||
parse_bare_fn_ty(&mut st, conv)
|
||||
}
|
||||
|
||||
pub fn parse_trait_ref_data(data: @~[u8], crate_num: int, pos: uint, tcx: ty::ctxt,
|
||||
pub fn parse_trait_ref_data(data: &[u8], crate_num: int, pos: uint, tcx: ty::ctxt,
|
||||
conv: conv_did) -> ty::TraitRef {
|
||||
let st = parse_state_from_data(data, crate_num, pos, tcx);
|
||||
parse_trait_ref(st, conv)
|
||||
let mut st = parse_state_from_data(data, crate_num, pos, tcx);
|
||||
parse_trait_ref(&mut st, conv)
|
||||
}
|
||||
|
||||
fn parse_path(st: @mut PState) -> @ast::Path {
|
||||
fn parse_path(st: &mut PState) -> @ast::Path {
|
||||
let mut idents: ~[ast::ident] = ~[];
|
||||
fn is_last(c: char) -> bool { return c == '(' || c == ':'; }
|
||||
idents.push(parse_ident_(st, is_last));
|
||||
@ -151,7 +151,7 @@ fn parse_path(st: @mut PState) -> @ast::Path {
|
||||
};
|
||||
}
|
||||
|
||||
fn parse_sigil(st: @mut PState) -> ast::Sigil {
|
||||
fn parse_sigil(st: &mut PState) -> ast::Sigil {
|
||||
match next(st) {
|
||||
'@' => ast::ManagedSigil,
|
||||
'~' => ast::OwnedSigil,
|
||||
@ -160,7 +160,7 @@ fn parse_sigil(st: @mut PState) -> ast::Sigil {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_vstore(st: @mut PState) -> ty::vstore {
|
||||
fn parse_vstore(st: &mut PState) -> ty::vstore {
|
||||
assert_eq!(next(st), '/');
|
||||
|
||||
let c = peek(st);
|
||||
@ -178,7 +178,7 @@ fn parse_vstore(st: @mut PState) -> ty::vstore {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_trait_store(st: @mut PState) -> ty::TraitStore {
|
||||
fn parse_trait_store(st: &mut PState) -> ty::TraitStore {
|
||||
match next(st) {
|
||||
'~' => ty::UniqTraitStore,
|
||||
'@' => ty::BoxTraitStore,
|
||||
@ -187,10 +187,10 @@ fn parse_trait_store(st: @mut PState) -> ty::TraitStore {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_substs(st: @mut PState, conv: conv_did) -> ty::substs {
|
||||
let self_r = parse_opt(st, || parse_region(st) );
|
||||
fn parse_substs(st: &mut PState, conv: conv_did) -> ty::substs {
|
||||
let self_r = parse_opt(st, |st| parse_region(st) );
|
||||
|
||||
let self_ty = parse_opt(st, || parse_ty(st, conv) );
|
||||
let self_ty = parse_opt(st, |st| parse_ty(st, conv) );
|
||||
|
||||
assert_eq!(next(st), '[');
|
||||
let mut params: ~[ty::t] = ~[];
|
||||
@ -204,7 +204,7 @@ fn parse_substs(st: @mut PState, conv: conv_did) -> ty::substs {
|
||||
};
|
||||
}
|
||||
|
||||
fn parse_bound_region(st: @mut PState) -> ty::bound_region {
|
||||
fn parse_bound_region(st: &mut PState) -> ty::bound_region {
|
||||
match next(st) {
|
||||
's' => ty::br_self,
|
||||
'a' => {
|
||||
@ -222,7 +222,7 @@ fn parse_bound_region(st: @mut PState) -> ty::bound_region {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_region(st: @mut PState) -> ty::Region {
|
||||
fn parse_region(st: &mut PState) -> ty::Region {
|
||||
match next(st) {
|
||||
'b' => {
|
||||
ty::re_bound(parse_bound_region(st))
|
||||
@ -251,15 +251,15 @@ fn parse_region(st: @mut PState) -> ty::Region {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_opt<T>(st: @mut PState, f: &fn() -> T) -> Option<T> {
|
||||
fn parse_opt<T>(st: &mut PState, f: &fn(&mut PState) -> T) -> Option<T> {
|
||||
match next(st) {
|
||||
'n' => None,
|
||||
's' => Some(f()),
|
||||
's' => Some(f(st)),
|
||||
_ => fail!("parse_opt: bad input")
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_str(st: @mut PState, term: char) -> ~str {
|
||||
fn parse_str(st: &mut PState, term: char) -> ~str {
|
||||
let mut result = ~"";
|
||||
while peek(st) != term {
|
||||
result += str::from_byte(next_byte(st));
|
||||
@ -268,13 +268,13 @@ fn parse_str(st: @mut PState, term: char) -> ~str {
|
||||
return result;
|
||||
}
|
||||
|
||||
fn parse_trait_ref(st: @mut PState, conv: conv_did) -> ty::TraitRef {
|
||||
fn parse_trait_ref(st: &mut PState, conv: conv_did) -> ty::TraitRef {
|
||||
let def = parse_def(st, NominalType, conv);
|
||||
let substs = parse_substs(st, conv);
|
||||
ty::TraitRef {def_id: def, substs: substs}
|
||||
}
|
||||
|
||||
fn parse_ty(st: @mut PState, conv: conv_did) -> ty::t {
|
||||
fn parse_ty(st: &mut PState, conv: conv_did) -> ty::t {
|
||||
match next(st) {
|
||||
'n' => return ty::mk_nil(),
|
||||
'z' => return ty::mk_bot(),
|
||||
@ -370,8 +370,8 @@ fn parse_ty(st: @mut PState, conv: conv_did) -> ty::t {
|
||||
match st.tcx.rcache.find(&key) {
|
||||
Some(&tt) => return tt,
|
||||
None => {
|
||||
let ps = @mut PState {pos: pos ,.. copy *st};
|
||||
let tt = parse_ty(ps, conv);
|
||||
let mut ps = PState {pos: pos ,.. copy *st};
|
||||
let tt = parse_ty(&mut ps, conv);
|
||||
st.tcx.rcache.insert(key, tt);
|
||||
return tt;
|
||||
}
|
||||
@ -394,7 +394,7 @@ fn parse_ty(st: @mut PState, conv: conv_did) -> ty::t {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_mutability(st: @mut PState) -> ast::mutability {
|
||||
fn parse_mutability(st: &mut PState) -> ast::mutability {
|
||||
match peek(st) {
|
||||
'm' => { next(st); ast::m_mutbl }
|
||||
'?' => { next(st); ast::m_const }
|
||||
@ -402,20 +402,17 @@ fn parse_mutability(st: @mut PState) -> ast::mutability {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_mt(st: @mut PState, conv: conv_did) -> ty::mt {
|
||||
fn parse_mt(st: &mut PState, conv: conv_did) -> ty::mt {
|
||||
let m = parse_mutability(st);
|
||||
ty::mt { ty: parse_ty(st, conv), mutbl: m }
|
||||
}
|
||||
|
||||
fn parse_def(st: @mut PState, source: DefIdSource,
|
||||
fn parse_def(st: &mut PState, source: DefIdSource,
|
||||
conv: conv_did) -> ast::def_id {
|
||||
let mut def = ~[];
|
||||
while peek(st) != '|' { def.push(next_byte(st)); }
|
||||
st.pos = st.pos + 1u;
|
||||
return conv(source, parse_def_id(def));
|
||||
return conv(source, scan(st, |c| { c == '|' }, parse_def_id));
|
||||
}
|
||||
|
||||
fn parse_uint(st: @mut PState) -> uint {
|
||||
fn parse_uint(st: &mut PState) -> uint {
|
||||
let mut n = 0;
|
||||
loop {
|
||||
let cur = peek(st);
|
||||
@ -426,7 +423,7 @@ fn parse_uint(st: @mut PState) -> uint {
|
||||
};
|
||||
}
|
||||
|
||||
fn parse_hex(st: @mut PState) -> uint {
|
||||
fn parse_hex(st: &mut PState) -> uint {
|
||||
let mut n = 0u;
|
||||
loop {
|
||||
let cur = peek(st);
|
||||
@ -449,7 +446,7 @@ fn parse_purity(c: char) -> purity {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_abi_set(st: @mut PState) -> AbiSet {
|
||||
fn parse_abi_set(st: &mut PState) -> AbiSet {
|
||||
assert_eq!(next(st), '[');
|
||||
let mut abis = AbiSet::empty();
|
||||
while peek(st) != ']' {
|
||||
@ -470,7 +467,7 @@ fn parse_onceness(c: char) -> ast::Onceness {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_closure_ty(st: @mut PState, conv: conv_did) -> ty::ClosureTy {
|
||||
fn parse_closure_ty(st: &mut PState, conv: conv_did) -> ty::ClosureTy {
|
||||
let sigil = parse_sigil(st);
|
||||
let purity = parse_purity(next(st));
|
||||
let onceness = parse_onceness(next(st));
|
||||
@ -487,7 +484,7 @@ fn parse_closure_ty(st: @mut PState, conv: conv_did) -> ty::ClosureTy {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_bare_fn_ty(st: @mut PState, conv: conv_did) -> ty::BareFnTy {
|
||||
fn parse_bare_fn_ty(st: &mut PState, conv: conv_did) -> ty::BareFnTy {
|
||||
let purity = parse_purity(next(st));
|
||||
let abi = parse_abi_set(st);
|
||||
let sig = parse_sig(st, conv);
|
||||
@ -498,7 +495,7 @@ fn parse_bare_fn_ty(st: @mut PState, conv: conv_did) -> ty::BareFnTy {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_sig(st: @mut PState, conv: conv_did) -> ty::FnSig {
|
||||
fn parse_sig(st: &mut PState, conv: conv_did) -> ty::FnSig {
|
||||
assert_eq!(next(st), '[');
|
||||
let mut inputs = ~[];
|
||||
while peek(st) != ']' {
|
||||
@ -537,20 +534,20 @@ pub fn parse_def_id(buf: &[u8]) -> ast::def_id {
|
||||
ast::def_id { crate: crate_num, node: def_num }
|
||||
}
|
||||
|
||||
pub fn parse_type_param_def_data(data: @~[u8], start: uint,
|
||||
pub fn parse_type_param_def_data(data: &[u8], start: uint,
|
||||
crate_num: int, tcx: ty::ctxt,
|
||||
conv: conv_did) -> ty::TypeParameterDef
|
||||
{
|
||||
let st = parse_state_from_data(data, crate_num, start, tcx);
|
||||
parse_type_param_def(st, conv)
|
||||
let mut st = parse_state_from_data(data, crate_num, start, tcx);
|
||||
parse_type_param_def(&mut st, conv)
|
||||
}
|
||||
|
||||
fn parse_type_param_def(st: @mut PState, conv: conv_did) -> ty::TypeParameterDef {
|
||||
fn parse_type_param_def(st: &mut PState, conv: conv_did) -> ty::TypeParameterDef {
|
||||
ty::TypeParameterDef {def_id: parse_def(st, NominalType, conv),
|
||||
bounds: @parse_bounds(st, conv)}
|
||||
}
|
||||
|
||||
fn parse_bounds(st: @mut PState, conv: conv_did) -> ty::ParamBounds {
|
||||
fn parse_bounds(st: &mut PState, conv: conv_did) -> ty::ParamBounds {
|
||||
let mut param_bounds = ty::ParamBounds {
|
||||
builtin_bounds: ty::EmptyBuiltinBounds(),
|
||||
trait_bounds: ~[]
|
||||
|
@ -964,7 +964,7 @@ impl ebml_decoder_decoder_helpers for reader::Decoder {
|
||||
|
||||
return do self.read_opaque |this, doc| {
|
||||
let ty = tydecode::parse_ty_data(
|
||||
doc.data,
|
||||
*doc.data,
|
||||
xcx.dcx.cdata.cnum,
|
||||
doc.start,
|
||||
xcx.dcx.tcx,
|
||||
@ -994,7 +994,7 @@ impl ebml_decoder_decoder_helpers for reader::Decoder {
|
||||
-> ty::TypeParameterDef {
|
||||
do self.read_opaque |this, doc| {
|
||||
tydecode::parse_type_param_def_data(
|
||||
doc.data,
|
||||
*doc.data,
|
||||
doc.start,
|
||||
xcx.dcx.cdata.cnum,
|
||||
xcx.dcx.tcx,
|
||||
|
@ -434,12 +434,12 @@ pub fn super_fn_sigs<C:Combine>(
|
||||
pub fn super_tys<C:Combine>(
|
||||
this: &C, a: ty::t, b: ty::t) -> cres<ty::t> {
|
||||
let tcx = this.infcx().tcx;
|
||||
return match (/*bad*/copy ty::get(a).sty, /*bad*/copy ty::get(b).sty) {
|
||||
return match (&ty::get(a).sty, &ty::get(b).sty) {
|
||||
// The "subtype" ought to be handling cases involving bot or var:
|
||||
(ty::ty_bot, _) |
|
||||
(_, ty::ty_bot) |
|
||||
(ty::ty_infer(TyVar(_)), _) |
|
||||
(_, ty::ty_infer(TyVar(_))) => {
|
||||
(&ty::ty_bot, _) |
|
||||
(_, &ty::ty_bot) |
|
||||
(&ty::ty_infer(TyVar(_)), _) |
|
||||
(_, &ty::ty_infer(TyVar(_))) => {
|
||||
tcx.sess.bug(
|
||||
fmt!("%s: bot and var types should have been handled (%s,%s)",
|
||||
this.tag(),
|
||||
@ -448,46 +448,46 @@ pub fn super_tys<C:Combine>(
|
||||
}
|
||||
|
||||
// Relate integral variables to other types
|
||||
(ty::ty_infer(IntVar(a_id)), ty::ty_infer(IntVar(b_id))) => {
|
||||
(&ty::ty_infer(IntVar(a_id)), &ty::ty_infer(IntVar(b_id))) => {
|
||||
if_ok!(this.infcx().simple_vars(this.a_is_expected(),
|
||||
a_id, b_id));
|
||||
Ok(a)
|
||||
}
|
||||
(ty::ty_infer(IntVar(v_id)), ty::ty_int(v)) => {
|
||||
(&ty::ty_infer(IntVar(v_id)), &ty::ty_int(v)) => {
|
||||
unify_integral_variable(this, this.a_is_expected(),
|
||||
v_id, IntType(v))
|
||||
}
|
||||
(ty::ty_int(v), ty::ty_infer(IntVar(v_id))) => {
|
||||
(&ty::ty_int(v), &ty::ty_infer(IntVar(v_id))) => {
|
||||
unify_integral_variable(this, !this.a_is_expected(),
|
||||
v_id, IntType(v))
|
||||
}
|
||||
(ty::ty_infer(IntVar(v_id)), ty::ty_uint(v)) => {
|
||||
(&ty::ty_infer(IntVar(v_id)), &ty::ty_uint(v)) => {
|
||||
unify_integral_variable(this, this.a_is_expected(),
|
||||
v_id, UintType(v))
|
||||
}
|
||||
(ty::ty_uint(v), ty::ty_infer(IntVar(v_id))) => {
|
||||
(&ty::ty_uint(v), &ty::ty_infer(IntVar(v_id))) => {
|
||||
unify_integral_variable(this, !this.a_is_expected(),
|
||||
v_id, UintType(v))
|
||||
}
|
||||
|
||||
// Relate floating-point variables to other types
|
||||
(ty::ty_infer(FloatVar(a_id)), ty::ty_infer(FloatVar(b_id))) => {
|
||||
(&ty::ty_infer(FloatVar(a_id)), &ty::ty_infer(FloatVar(b_id))) => {
|
||||
if_ok!(this.infcx().simple_vars(this.a_is_expected(),
|
||||
a_id, b_id));
|
||||
Ok(a)
|
||||
}
|
||||
(ty::ty_infer(FloatVar(v_id)), ty::ty_float(v)) => {
|
||||
(&ty::ty_infer(FloatVar(v_id)), &ty::ty_float(v)) => {
|
||||
unify_float_variable(this, this.a_is_expected(), v_id, v)
|
||||
}
|
||||
(ty::ty_float(v), ty::ty_infer(FloatVar(v_id))) => {
|
||||
(&ty::ty_float(v), &ty::ty_infer(FloatVar(v_id))) => {
|
||||
unify_float_variable(this, !this.a_is_expected(), v_id, v)
|
||||
}
|
||||
|
||||
(ty::ty_nil, _) |
|
||||
(ty::ty_bool, _) |
|
||||
(ty::ty_int(_), _) |
|
||||
(ty::ty_uint(_), _) |
|
||||
(ty::ty_float(_), _) => {
|
||||
(&ty::ty_nil, _) |
|
||||
(&ty::ty_bool, _) |
|
||||
(&ty::ty_int(_), _) |
|
||||
(&ty::ty_uint(_), _) |
|
||||
(&ty::ty_float(_), _) => {
|
||||
if ty::get(a).sty == ty::get(b).sty {
|
||||
Ok(a)
|
||||
} else {
|
||||
@ -495,12 +495,12 @@ pub fn super_tys<C:Combine>(
|
||||
}
|
||||
}
|
||||
|
||||
(ty::ty_param(ref a_p), ty::ty_param(ref b_p)) if a_p.idx == b_p.idx => {
|
||||
(&ty::ty_param(ref a_p), &ty::ty_param(ref b_p)) if a_p.idx == b_p.idx => {
|
||||
Ok(a)
|
||||
}
|
||||
|
||||
(ty::ty_enum(a_id, ref a_substs),
|
||||
ty::ty_enum(b_id, ref b_substs))
|
||||
(&ty::ty_enum(a_id, ref a_substs),
|
||||
&ty::ty_enum(b_id, ref b_substs))
|
||||
if a_id == b_id => {
|
||||
let type_def = ty::lookup_item_type(tcx, a_id);
|
||||
do this.substs(&type_def.generics, a_substs, b_substs).chain |substs| {
|
||||
@ -508,8 +508,8 @@ pub fn super_tys<C:Combine>(
|
||||
}
|
||||
}
|
||||
|
||||
(ty::ty_trait(a_id, ref a_substs, a_store, a_mutbl),
|
||||
ty::ty_trait(b_id, ref b_substs, b_store, b_mutbl))
|
||||
(&ty::ty_trait(a_id, ref a_substs, a_store, a_mutbl),
|
||||
&ty::ty_trait(b_id, ref b_substs, b_store, b_mutbl))
|
||||
if a_id == b_id && a_mutbl == b_mutbl => {
|
||||
let trait_def = ty::lookup_trait_def(tcx, a_id);
|
||||
do this.substs(&trait_def.generics, a_substs, b_substs).chain |substs| {
|
||||
@ -519,7 +519,7 @@ pub fn super_tys<C:Combine>(
|
||||
}
|
||||
}
|
||||
|
||||
(ty::ty_struct(a_id, ref a_substs), ty::ty_struct(b_id, ref b_substs))
|
||||
(&ty::ty_struct(a_id, ref a_substs), &ty::ty_struct(b_id, ref b_substs))
|
||||
if a_id == b_id => {
|
||||
let type_def = ty::lookup_item_type(tcx, a_id);
|
||||
do this.substs(&type_def.generics, a_substs, b_substs).chain |substs| {
|
||||
@ -527,31 +527,31 @@ pub fn super_tys<C:Combine>(
|
||||
}
|
||||
}
|
||||
|
||||
(ty::ty_box(ref a_mt), ty::ty_box(ref b_mt)) => {
|
||||
(&ty::ty_box(ref a_mt), &ty::ty_box(ref b_mt)) => {
|
||||
do this.mts(a_mt, b_mt).chain |mt| {
|
||||
Ok(ty::mk_box(tcx, mt))
|
||||
}
|
||||
}
|
||||
|
||||
(ty::ty_uniq(ref a_mt), ty::ty_uniq(ref b_mt)) => {
|
||||
(&ty::ty_uniq(ref a_mt), &ty::ty_uniq(ref b_mt)) => {
|
||||
do this.mts(a_mt, b_mt).chain |mt| {
|
||||
Ok(ty::mk_uniq(tcx, mt))
|
||||
}
|
||||
}
|
||||
|
||||
(ty::ty_ptr(ref a_mt), ty::ty_ptr(ref b_mt)) => {
|
||||
(&ty::ty_ptr(ref a_mt), &ty::ty_ptr(ref b_mt)) => {
|
||||
do this.mts(a_mt, b_mt).chain |mt| {
|
||||
Ok(ty::mk_ptr(tcx, mt))
|
||||
}
|
||||
}
|
||||
|
||||
(ty::ty_rptr(a_r, ref a_mt), ty::ty_rptr(b_r, ref b_mt)) => {
|
||||
(&ty::ty_rptr(a_r, ref a_mt), &ty::ty_rptr(b_r, ref b_mt)) => {
|
||||
let r = if_ok!(this.contraregions(a_r, b_r));
|
||||
let mt = if_ok!(this.mts(a_mt, b_mt));
|
||||
Ok(ty::mk_rptr(tcx, r, mt))
|
||||
}
|
||||
|
||||
(ty::ty_evec(ref a_mt, vs_a), ty::ty_evec(ref b_mt, vs_b)) => {
|
||||
(&ty::ty_evec(ref a_mt, vs_a), &ty::ty_evec(ref b_mt, vs_b)) => {
|
||||
do this.mts(a_mt, b_mt).chain |mt| {
|
||||
do this.vstores(ty::terr_vec, vs_a, vs_b).chain |vs| {
|
||||
Ok(ty::mk_evec(tcx, mt, vs))
|
||||
@ -559,13 +559,13 @@ pub fn super_tys<C:Combine>(
|
||||
}
|
||||
}
|
||||
|
||||
(ty::ty_estr(vs_a), ty::ty_estr(vs_b)) => {
|
||||
(&ty::ty_estr(vs_a), &ty::ty_estr(vs_b)) => {
|
||||
do this.vstores(ty::terr_str, vs_a, vs_b).chain |vs| {
|
||||
Ok(ty::mk_estr(tcx,vs))
|
||||
}
|
||||
}
|
||||
|
||||
(ty::ty_tup(ref as_), ty::ty_tup(ref bs)) => {
|
||||
(&ty::ty_tup(ref as_), &ty::ty_tup(ref bs)) => {
|
||||
if as_.len() == bs.len() {
|
||||
map_vec2(*as_, *bs, |a, b| this.tys(*a, *b) )
|
||||
.chain(|ts| Ok(ty::mk_tup(tcx, ts)) )
|
||||
@ -575,13 +575,13 @@ pub fn super_tys<C:Combine>(
|
||||
}
|
||||
}
|
||||
|
||||
(ty::ty_bare_fn(ref a_fty), ty::ty_bare_fn(ref b_fty)) => {
|
||||
(&ty::ty_bare_fn(ref a_fty), &ty::ty_bare_fn(ref b_fty)) => {
|
||||
do this.bare_fn_tys(a_fty, b_fty).chain |fty| {
|
||||
Ok(ty::mk_bare_fn(tcx, fty))
|
||||
}
|
||||
}
|
||||
|
||||
(ty::ty_closure(ref a_fty), ty::ty_closure(ref b_fty)) => {
|
||||
(&ty::ty_closure(ref a_fty), &ty::ty_closure(ref b_fty)) => {
|
||||
do this.closure_tys(a_fty, b_fty).chain |fty| {
|
||||
Ok(ty::mk_closure(tcx, fty))
|
||||
}
|
||||
|
@ -13,7 +13,7 @@ use core::prelude::*;
|
||||
use ast;
|
||||
use codemap::{BytePos, CharPos, CodeMap, Pos};
|
||||
use diagnostic;
|
||||
use parse::lexer::{is_whitespace, get_str_from, reader};
|
||||
use parse::lexer::{is_whitespace, with_str_from, reader};
|
||||
use parse::lexer::{StringReader, bump, is_eof, nextch, TokenAndSpan};
|
||||
use parse::lexer::{is_line_non_doc_comment, is_block_non_doc_comment};
|
||||
use parse::lexer;
|
||||
@ -352,9 +352,10 @@ pub fn gather_comments_and_literals(span_diagnostic:
|
||||
//discard, and look ahead; we're working with internal state
|
||||
let TokenAndSpan {tok: tok, sp: sp} = rdr.peek();
|
||||
if token::is_lit(&tok) {
|
||||
let s = get_str_from(rdr, bstart);
|
||||
debug!("tok lit: %s", s);
|
||||
literals.push(lit {lit: s, pos: sp.lo});
|
||||
do with_str_from(rdr, bstart) |s| {
|
||||
debug!("tok lit: %s", s);
|
||||
literals.push(lit {lit: s.to_owned(), pos: sp.lo});
|
||||
}
|
||||
} else {
|
||||
debug!("tok: %s", token::to_str(get_ident_interner(), &tok));
|
||||
}
|
||||
|
@ -165,9 +165,10 @@ fn byte_offset(rdr: &StringReader, pos: BytePos) -> BytePos {
|
||||
(pos - rdr.filemap.start_pos)
|
||||
}
|
||||
|
||||
pub fn get_str_from(rdr: @mut StringReader, start: BytePos) -> ~str {
|
||||
return str::slice(*rdr.src, start.to_uint(),
|
||||
byte_offset(rdr, rdr.last_pos).to_uint()).to_owned();
|
||||
pub fn with_str_from<T>(rdr: @mut StringReader, start: BytePos, f: &fn(s: &str) -> T) -> T {
|
||||
f(rdr.src.slice(
|
||||
byte_offset(rdr, start).to_uint(),
|
||||
byte_offset(rdr, rdr.last_pos).to_uint()))
|
||||
}
|
||||
|
||||
// EFFECT: advance the StringReader by one character. If a newline is
|
||||
@ -259,18 +260,24 @@ fn consume_any_line_comment(rdr: @mut StringReader)
|
||||
bump(rdr);
|
||||
// line comments starting with "///" or "//!" are doc-comments
|
||||
if rdr.curr == '/' || rdr.curr == '!' {
|
||||
let start_bpos = rdr.pos - BytePos(2u);
|
||||
let mut acc = ~"//";
|
||||
let start_bpos = rdr.pos - BytePos(3u);
|
||||
while rdr.curr != '\n' && !is_eof(rdr) {
|
||||
str::push_char(&mut acc, rdr.curr);
|
||||
bump(rdr);
|
||||
}
|
||||
// but comments with only more "/"s are not
|
||||
if !is_line_non_doc_comment(acc) {
|
||||
return Some(TokenAndSpan{
|
||||
tok: token::DOC_COMMENT(str_to_ident(acc)),
|
||||
sp: codemap::mk_sp(start_bpos, rdr.pos)
|
||||
});
|
||||
let ret = do with_str_from(rdr, start_bpos) |string| {
|
||||
// but comments with only more "/"s are not
|
||||
if !is_line_non_doc_comment(string) {
|
||||
Some(TokenAndSpan{
|
||||
tok: token::DOC_COMMENT(str_to_ident(string)),
|
||||
sp: codemap::mk_sp(start_bpos, rdr.pos)
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
if ret.is_some() {
|
||||
return ret;
|
||||
}
|
||||
} else {
|
||||
while rdr.curr != '\n' && !is_eof(rdr) { bump(rdr); }
|
||||
@ -306,25 +313,26 @@ pub fn is_block_non_doc_comment(s: &str) -> bool {
|
||||
fn consume_block_comment(rdr: @mut StringReader)
|
||||
-> Option<TokenAndSpan> {
|
||||
// block comments starting with "/**" or "/*!" are doc-comments
|
||||
if rdr.curr == '*' || rdr.curr == '!' {
|
||||
let start_bpos = rdr.pos - BytePos(2u);
|
||||
let mut acc = ~"/*";
|
||||
let res = if rdr.curr == '*' || rdr.curr == '!' {
|
||||
let start_bpos = rdr.pos - BytePos(3u);
|
||||
while !(rdr.curr == '*' && nextch(rdr) == '/') && !is_eof(rdr) {
|
||||
str::push_char(&mut acc, rdr.curr);
|
||||
bump(rdr);
|
||||
}
|
||||
if is_eof(rdr) {
|
||||
rdr.fatal(~"unterminated block doc-comment");
|
||||
} else {
|
||||
acc += "*/";
|
||||
bump(rdr);
|
||||
bump(rdr);
|
||||
// but comments with only "*"s between two "/"s are not
|
||||
if !is_block_non_doc_comment(acc) {
|
||||
return Some(TokenAndSpan{
|
||||
tok: token::DOC_COMMENT(str_to_ident(acc)),
|
||||
sp: codemap::mk_sp(start_bpos, rdr.pos)
|
||||
});
|
||||
do with_str_from(rdr, start_bpos) |string| {
|
||||
// but comments with only "*"s between two "/"s are not
|
||||
if !is_block_non_doc_comment(string) {
|
||||
Some(TokenAndSpan{
|
||||
tok: token::DOC_COMMENT(str_to_ident(string)),
|
||||
sp: codemap::mk_sp(start_bpos, rdr.pos)
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -338,10 +346,11 @@ fn consume_block_comment(rdr: @mut StringReader)
|
||||
bump(rdr);
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
};
|
||||
// restart whitespace munch.
|
||||
|
||||
return consume_whitespace_and_comments(rdr);
|
||||
if res.is_some() { res } else { consume_whitespace_and_comments(rdr) }
|
||||
}
|
||||
|
||||
fn scan_exponent(rdr: @mut StringReader) -> Option<~str> {
|
||||
@ -538,19 +547,23 @@ fn ident_continue(c: char) -> bool {
|
||||
// EFFECT: advances the input past that token
|
||||
// EFFECT: updates the interner
|
||||
fn next_token_inner(rdr: @mut StringReader) -> token::Token {
|
||||
let mut accum_str = ~"";
|
||||
let mut c = rdr.curr;
|
||||
if ident_start(c) {
|
||||
while ident_continue(c) {
|
||||
str::push_char(&mut accum_str, c);
|
||||
let start = rdr.last_pos;
|
||||
while ident_continue(rdr.curr) {
|
||||
bump(rdr);
|
||||
c = rdr.curr;
|
||||
}
|
||||
if accum_str == ~"_" { return token::UNDERSCORE; }
|
||||
let is_mod_name = c == ':' && nextch(rdr) == ':';
|
||||
|
||||
// FIXME: perform NFKC normalization here. (Issue #2253)
|
||||
return token::IDENT(str_to_ident(accum_str), is_mod_name);
|
||||
return do with_str_from(rdr, start) |string| {
|
||||
if string == "_" {
|
||||
token::UNDERSCORE
|
||||
} else {
|
||||
let is_mod_name = rdr.curr == ':' && nextch(rdr) == ':';
|
||||
|
||||
// FIXME: perform NFKC normalization here. (Issue #2253)
|
||||
token::IDENT(str_to_ident(string), is_mod_name)
|
||||
}
|
||||
}
|
||||
}
|
||||
if is_dec_digit(c) {
|
||||
return scan_number(c, rdr);
|
||||
@ -648,19 +661,19 @@ fn next_token_inner(rdr: @mut StringReader) -> token::Token {
|
||||
'\'' => {
|
||||
// Either a character constant 'a' OR a lifetime name 'abc
|
||||
bump(rdr);
|
||||
let start = rdr.last_pos;
|
||||
let mut c2 = rdr.curr;
|
||||
bump(rdr);
|
||||
|
||||
// If the character is an ident start not followed by another single
|
||||
// quote, then this is a lifetime name:
|
||||
if ident_start(c2) && rdr.curr != '\'' {
|
||||
let mut lifetime_name = ~"";
|
||||
lifetime_name.push_char(c2);
|
||||
while ident_continue(rdr.curr) {
|
||||
lifetime_name.push_char(rdr.curr);
|
||||
bump(rdr);
|
||||
}
|
||||
return token::LIFETIME(str_to_ident(lifetime_name));
|
||||
return do with_str_from(rdr, start) |lifetime_name| {
|
||||
token::LIFETIME(str_to_ident(lifetime_name))
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise it is a character constant:
|
||||
@ -690,12 +703,14 @@ fn next_token_inner(rdr: @mut StringReader) -> token::Token {
|
||||
return token::LIT_INT(c2 as i64, ast::ty_char);
|
||||
}
|
||||
'"' => {
|
||||
let n = byte_offset(rdr, rdr.last_pos);
|
||||
let mut accum_str = ~"";
|
||||
let n = rdr.last_pos;
|
||||
bump(rdr);
|
||||
while rdr.curr != '"' {
|
||||
if is_eof(rdr) {
|
||||
rdr.fatal(fmt!("unterminated double quote string: %s",
|
||||
get_str_from(rdr, n)));
|
||||
do with_str_from(rdr, n) |s| {
|
||||
rdr.fatal(fmt!("unterminated double quote string: %s", s));
|
||||
}
|
||||
}
|
||||
|
||||
let ch = rdr.curr;
|
||||
|
Loading…
x
Reference in New Issue
Block a user