auto merge of #5076 : pcwalton/rust/demuting, r=pcwalton
This commit is contained in:
commit
e7924ce18f
File diff suppressed because it is too large
Load Diff
@ -831,7 +831,7 @@ pub fn extract_variant_args(bcx: block,
|
||||
-> ExtractedBlock {
|
||||
let (enm, evar) = vdefs;
|
||||
let _icx = bcx.insn_ctxt("match::extract_variant_args");
|
||||
let ccx = bcx.fcx.ccx;
|
||||
let ccx = *bcx.fcx.ccx;
|
||||
let enum_ty_substs = match ty::get(node_id_type(bcx, pat_id)).sty {
|
||||
ty::ty_enum(id, ref substs) => {
|
||||
assert id == enm;
|
||||
@ -1272,7 +1272,7 @@ pub fn compile_submatch(bcx: block,
|
||||
|
||||
let vals_left = vec::append(vec::slice(vals, 0u, col).to_vec(),
|
||||
vec::slice(vals, col + 1u, vals.len()));
|
||||
let ccx = bcx.fcx.ccx;
|
||||
let ccx = *bcx.fcx.ccx;
|
||||
let mut pat_id = 0;
|
||||
for vec::each(m) |br| {
|
||||
// Find a real id (we're adding placeholder wildcard patterns, but
|
||||
@ -1710,7 +1710,7 @@ pub fn bind_irrefutable_pat(bcx: block,
|
||||
binding_mode: IrrefutablePatternBindingMode)
|
||||
-> block {
|
||||
let _icx = bcx.insn_ctxt("match::bind_irrefutable_pat");
|
||||
let ccx = bcx.fcx.ccx;
|
||||
let ccx = *bcx.fcx.ccx;
|
||||
let mut bcx = bcx;
|
||||
|
||||
// Necessary since bind_irrefutable_pat is called outside trans_match
|
||||
|
@ -866,8 +866,8 @@ pub fn need_invoke(bcx: block) -> bool {
|
||||
// Walk the scopes to look for cleanups
|
||||
let mut cur = bcx;
|
||||
loop {
|
||||
match cur.kind {
|
||||
block_scope(ref inf) => {
|
||||
match *cur.kind {
|
||||
block_scope(ref mut inf) => {
|
||||
for vec::each((*inf).cleanups) |cleanup| {
|
||||
match *cleanup {
|
||||
clean(_, cleanup_type) | clean_temp(_, _, cleanup_type) => {
|
||||
@ -898,16 +898,21 @@ pub fn have_cached_lpad(bcx: block) -> bool {
|
||||
return res;
|
||||
}
|
||||
|
||||
pub fn in_lpad_scope_cx(bcx: block, f: fn(scope_info)) {
|
||||
pub fn in_lpad_scope_cx(bcx: block, f: fn(&mut scope_info)) {
|
||||
let mut bcx = bcx;
|
||||
loop {
|
||||
match bcx.kind {
|
||||
block_scope(ref inf) => {
|
||||
if (*inf).cleanups.len() > 0u || bcx.parent.is_none() {
|
||||
f((*inf)); return;
|
||||
{
|
||||
// XXX: Borrow check bug workaround.
|
||||
let kind: &mut block_kind = &mut *bcx.kind;
|
||||
match *kind {
|
||||
block_scope(ref mut inf) => {
|
||||
if inf.cleanups.len() > 0u || bcx.parent.is_none() {
|
||||
f(inf);
|
||||
return;
|
||||
}
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
bcx = block_parent(bcx);
|
||||
}
|
||||
@ -1157,7 +1162,7 @@ pub fn trans_stmt(cx: block, s: ast::stmt) -> block {
|
||||
}
|
||||
}
|
||||
}
|
||||
ast::decl_item(i) => trans_item(cx.fcx.ccx, *i)
|
||||
ast::decl_item(i) => trans_item(*cx.fcx.ccx, *i)
|
||||
}
|
||||
}
|
||||
ast::stmt_mac(*) => cx.tcx().sess.bug(~"unexpanded macro")
|
||||
@ -1198,9 +1203,9 @@ pub fn simple_block_scope() -> block_kind {
|
||||
block_scope(scope_info {
|
||||
loop_break: None,
|
||||
loop_label: None,
|
||||
mut cleanups: ~[],
|
||||
mut cleanup_paths: ~[],
|
||||
mut landing_pad: None
|
||||
cleanups: ~[],
|
||||
cleanup_paths: ~[],
|
||||
landing_pad: None
|
||||
})
|
||||
}
|
||||
|
||||
@ -1226,9 +1231,9 @@ pub fn loop_scope_block(bcx: block,
|
||||
return new_block(bcx.fcx, Some(bcx), block_scope(scope_info {
|
||||
loop_break: Some(loop_break),
|
||||
loop_label: loop_label,
|
||||
mut cleanups: ~[],
|
||||
mut cleanup_paths: ~[],
|
||||
mut landing_pad: None
|
||||
cleanups: ~[],
|
||||
cleanup_paths: ~[],
|
||||
landing_pad: None
|
||||
}), bcx.is_lpad, n, opt_node_info);
|
||||
}
|
||||
|
||||
@ -1301,23 +1306,30 @@ pub fn cleanup_and_leave(bcx: block,
|
||||
@fmt!("cleanup_and_leave(%s)", cur.to_str()));
|
||||
}
|
||||
|
||||
match cur.kind {
|
||||
block_scope(ref inf) if !inf.cleanups.is_empty() => {
|
||||
for vec::find((*inf).cleanup_paths,
|
||||
|cp| cp.target == leave).each |cp| {
|
||||
Br(bcx, cp.dest);
|
||||
return;
|
||||
{
|
||||
// XXX: Borrow check bug workaround.
|
||||
let kind: &mut block_kind = &mut *cur.kind;
|
||||
match *kind {
|
||||
block_scope(ref mut inf) if !inf.cleanups.is_empty() => {
|
||||
for vec::find((*inf).cleanup_paths,
|
||||
|cp| cp.target == leave).each |cp| {
|
||||
Br(bcx, cp.dest);
|
||||
return;
|
||||
}
|
||||
let sub_cx = sub_block(bcx, ~"cleanup");
|
||||
Br(bcx, sub_cx.llbb);
|
||||
inf.cleanup_paths.push(cleanup_path {
|
||||
target: leave,
|
||||
dest: sub_cx.llbb
|
||||
});
|
||||
bcx = trans_block_cleanups_(sub_cx,
|
||||
block_cleanups(cur),
|
||||
is_lpad);
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
let sub_cx = sub_block(bcx, ~"cleanup");
|
||||
Br(bcx, sub_cx.llbb);
|
||||
(*inf).cleanup_paths.push(cleanup_path {
|
||||
target: leave,
|
||||
dest: sub_cx.llbb
|
||||
});
|
||||
bcx = trans_block_cleanups_(sub_cx, block_cleanups(cur), is_lpad);
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
|
||||
match upto {
|
||||
Some(bb) => { if cur.llbb == bb { break; } }
|
||||
_ => ()
|
||||
@ -1572,25 +1584,25 @@ pub fn new_fn_ctxt_w_id(ccx: @CrateContext,
|
||||
param_substs: Option<@param_substs>,
|
||||
sp: Option<span>) -> fn_ctxt {
|
||||
let llbbs = mk_standard_basic_blocks(llfndecl);
|
||||
return @fn_ctxt_ {
|
||||
return @mut fn_ctxt_ {
|
||||
llfn: llfndecl,
|
||||
llenv: unsafe { llvm::LLVMGetParam(llfndecl, 1u as c_uint) },
|
||||
llretptr: unsafe { llvm::LLVMGetParam(llfndecl, 0u as c_uint) },
|
||||
mut llstaticallocas: llbbs.sa,
|
||||
mut llloadenv: None,
|
||||
mut llreturn: llbbs.rt,
|
||||
mut llself: None,
|
||||
mut personality: None,
|
||||
mut loop_ret: None,
|
||||
llargs: HashMap(),
|
||||
lllocals: HashMap(),
|
||||
llupvars: HashMap(),
|
||||
llstaticallocas: llbbs.sa,
|
||||
llloadenv: None,
|
||||
llreturn: llbbs.rt,
|
||||
llself: None,
|
||||
personality: None,
|
||||
loop_ret: None,
|
||||
llargs: @HashMap(),
|
||||
lllocals: @HashMap(),
|
||||
llupvars: @HashMap(),
|
||||
id: id,
|
||||
impl_id: impl_id,
|
||||
param_substs: param_substs,
|
||||
span: sp,
|
||||
path: path,
|
||||
ccx: ccx
|
||||
ccx: @ccx
|
||||
};
|
||||
}
|
||||
|
||||
@ -1780,7 +1792,7 @@ pub fn trans_closure(ccx: @CrateContext,
|
||||
llvm::LLVMSetGC(fcx.llfn, strategy);
|
||||
}
|
||||
}
|
||||
ccx.uses_gc = true;
|
||||
*ccx.uses_gc = true;
|
||||
}
|
||||
|
||||
// Create the first basic block in the function and keep a handle on it to
|
||||
@ -2803,7 +2815,7 @@ pub fn trap(bcx: block) {
|
||||
}
|
||||
|
||||
pub fn decl_gc_metadata(ccx: @CrateContext, llmod_id: ~str) {
|
||||
if !ccx.sess.opts.gc || !ccx.uses_gc {
|
||||
if !ccx.sess.opts.gc || !*ccx.uses_gc {
|
||||
return;
|
||||
}
|
||||
|
||||
@ -3038,7 +3050,7 @@ pub fn trans_crate(sess: session::Session,
|
||||
discrims: HashMap(),
|
||||
discrim_symbols: HashMap(),
|
||||
tydescs: ty::new_ty_hash(),
|
||||
mut finished_tydescs: false,
|
||||
finished_tydescs: @mut false,
|
||||
external: HashMap(),
|
||||
monomorphized: HashMap(),
|
||||
monomorphizing: HashMap(),
|
||||
@ -3080,9 +3092,9 @@ pub fn trans_crate(sess: session::Session,
|
||||
builder: BuilderRef_res(unsafe { llvm::LLVMCreateBuilder() }),
|
||||
shape_cx: mk_ctxt(llmod),
|
||||
crate_map: crate_map,
|
||||
mut uses_gc: false,
|
||||
uses_gc: @mut false,
|
||||
dbg_cx: dbg_cx,
|
||||
mut do_not_commit_warning_issued: false
|
||||
do_not_commit_warning_issued: @mut false
|
||||
};
|
||||
|
||||
{
|
||||
|
@ -175,7 +175,7 @@ pub struct CrateContext {
|
||||
tydescs: HashMap<ty::t, @mut tydesc_info>,
|
||||
// Set when running emit_tydescs to enforce that no more tydescs are
|
||||
// created.
|
||||
mut finished_tydescs: bool,
|
||||
finished_tydescs: @mut bool,
|
||||
// Track mapping of external ids to local items imported for inlining
|
||||
external: HashMap<ast::def_id, Option<ast::node_id>>,
|
||||
// Cache instances of monomorphized functions
|
||||
@ -224,9 +224,9 @@ pub struct CrateContext {
|
||||
// Set when at least one function uses GC. Needed so that
|
||||
// decl_gc_metadata knows whether to link to the module metadata, which
|
||||
// is not emitted by LLVM's GC pass when no functions use GC.
|
||||
mut uses_gc: bool,
|
||||
uses_gc: @mut bool,
|
||||
dbg_cx: Option<debuginfo::DebugContext>,
|
||||
mut do_not_commit_warning_issued: bool
|
||||
do_not_commit_warning_issued: @mut bool
|
||||
}
|
||||
|
||||
// Types used for llself.
|
||||
@ -273,34 +273,34 @@ pub struct fn_ctxt_ {
|
||||
// the function, due to LLVM's quirks.
|
||||
// A block for all the function's static allocas, so that LLVM
|
||||
// will coalesce them into a single alloca call.
|
||||
mut llstaticallocas: BasicBlockRef,
|
||||
llstaticallocas: BasicBlockRef,
|
||||
// A block containing code that copies incoming arguments to space
|
||||
// already allocated by code in one of the llallocas blocks.
|
||||
// (LLVM requires that arguments be copied to local allocas before
|
||||
// allowing most any operation to be performed on them.)
|
||||
mut llloadenv: Option<BasicBlockRef>,
|
||||
mut llreturn: BasicBlockRef,
|
||||
llloadenv: Option<BasicBlockRef>,
|
||||
llreturn: BasicBlockRef,
|
||||
// The 'self' value currently in use in this function, if there
|
||||
// is one.
|
||||
//
|
||||
// NB: This is the type of the self *variable*, not the self *type*. The
|
||||
// self type is set only for default methods, while the self variable is
|
||||
// set for all methods.
|
||||
mut llself: Option<ValSelfData>,
|
||||
llself: Option<ValSelfData>,
|
||||
// The a value alloca'd for calls to upcalls.rust_personality. Used when
|
||||
// outputting the resume instruction.
|
||||
mut personality: Option<ValueRef>,
|
||||
personality: Option<ValueRef>,
|
||||
// If this is a for-loop body that returns, this holds the pointers needed
|
||||
// for that (flagptr, retptr)
|
||||
mut loop_ret: Option<(ValueRef, ValueRef)>,
|
||||
loop_ret: Option<(ValueRef, ValueRef)>,
|
||||
|
||||
// Maps arguments to allocas created for them in llallocas.
|
||||
llargs: HashMap<ast::node_id, local_val>,
|
||||
llargs: @HashMap<ast::node_id, local_val>,
|
||||
// Maps the def_ids for local variables to the allocas created for
|
||||
// them in llallocas.
|
||||
lllocals: HashMap<ast::node_id, local_val>,
|
||||
lllocals: @HashMap<ast::node_id, local_val>,
|
||||
// Same as above, but for closure upvars
|
||||
llupvars: HashMap<ast::node_id, ValueRef>,
|
||||
llupvars: @HashMap<ast::node_id, ValueRef>,
|
||||
|
||||
// The node_id of the function, or -1 if it doesn't correspond to
|
||||
// a user-defined function.
|
||||
@ -319,14 +319,14 @@ pub struct fn_ctxt_ {
|
||||
path: path,
|
||||
|
||||
// This function's enclosing crate context.
|
||||
ccx: @CrateContext
|
||||
ccx: @@CrateContext
|
||||
}
|
||||
|
||||
pub type fn_ctxt = @fn_ctxt_;
|
||||
pub type fn_ctxt = @mut fn_ctxt_;
|
||||
|
||||
pub fn warn_not_to_commit(ccx: @CrateContext, msg: ~str) {
|
||||
if !ccx.do_not_commit_warning_issued {
|
||||
ccx.do_not_commit_warning_issued = true;
|
||||
if !*ccx.do_not_commit_warning_issued {
|
||||
*ccx.do_not_commit_warning_issued = true;
|
||||
ccx.sess.warn(msg + ~" -- do not commit like this!");
|
||||
}
|
||||
}
|
||||
@ -355,7 +355,7 @@ pub struct cleanup_path {
|
||||
dest: BasicBlockRef
|
||||
}
|
||||
|
||||
pub fn scope_clean_changed(scope_info: scope_info) {
|
||||
pub fn scope_clean_changed(scope_info: &mut scope_info) {
|
||||
if scope_info.cleanup_paths.len() > 0u { scope_info.cleanup_paths = ~[]; }
|
||||
scope_info.landing_pad = None;
|
||||
}
|
||||
@ -498,9 +498,9 @@ pub fn revoke_clean(cx: block, val: ValueRef) {
|
||||
}
|
||||
|
||||
pub fn block_cleanups(bcx: block) -> ~[cleanup] {
|
||||
match bcx.kind {
|
||||
match *bcx.kind {
|
||||
block_non_scope => ~[],
|
||||
block_scope(ref inf) => /*bad*/copy inf.cleanups
|
||||
block_scope(ref mut inf) => /*bad*/copy inf.cleanups
|
||||
}
|
||||
}
|
||||
|
||||
@ -524,12 +524,12 @@ pub struct scope_info {
|
||||
// A list of functions that must be run at when leaving this
|
||||
// block, cleaning up any variables that were introduced in the
|
||||
// block.
|
||||
mut cleanups: ~[cleanup],
|
||||
cleanups: ~[cleanup],
|
||||
// Existing cleanup paths that may be reused, indexed by destination and
|
||||
// cleared when the set of cleanups changes.
|
||||
mut cleanup_paths: ~[cleanup_path],
|
||||
cleanup_paths: ~[cleanup_path],
|
||||
// Unwinding landing pad. Also cleared when cleanups change.
|
||||
mut landing_pad: Option<BasicBlockRef>,
|
||||
landing_pad: Option<BasicBlockRef>,
|
||||
}
|
||||
|
||||
pub trait get_node_info {
|
||||
@ -574,11 +574,11 @@ pub struct block_ {
|
||||
// instructions into that block by way of this block context.
|
||||
// The block pointing to this one in the function's digraph.
|
||||
llbb: BasicBlockRef,
|
||||
mut terminated: bool,
|
||||
mut unreachable: bool,
|
||||
terminated: bool,
|
||||
unreachable: bool,
|
||||
parent: Option<block>,
|
||||
// The 'kind' of basic block this is.
|
||||
kind: block_kind,
|
||||
kind: @mut block_kind,
|
||||
// Is this block part of a landing pad?
|
||||
is_lpad: bool,
|
||||
// info about the AST node this block originated from, if any
|
||||
@ -597,21 +597,19 @@ pub fn block_(llbb: BasicBlockRef, parent: Option<block>, -kind: block_kind,
|
||||
terminated: false,
|
||||
unreachable: false,
|
||||
parent: parent,
|
||||
kind: kind,
|
||||
kind: @mut kind,
|
||||
is_lpad: is_lpad,
|
||||
node_info: node_info,
|
||||
fcx: fcx
|
||||
}
|
||||
}
|
||||
|
||||
/* This must be enum and not type, or trans goes into an infinite loop (#2572)
|
||||
*/
|
||||
pub enum block = @block_;
|
||||
pub type block = @mut block_;
|
||||
|
||||
pub fn mk_block(llbb: BasicBlockRef, parent: Option<block>, -kind: block_kind,
|
||||
is_lpad: bool, node_info: Option<NodeInfo>, fcx: fn_ctxt)
|
||||
-> block {
|
||||
block(@block_(llbb, parent, kind, is_lpad, node_info, fcx))
|
||||
@mut block_(llbb, parent, kind, is_lpad, node_info, fcx)
|
||||
}
|
||||
|
||||
// First two args are retptr, env
|
||||
@ -660,17 +658,21 @@ pub fn struct_elt(llstructty: TypeRef, n: uint) -> TypeRef {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn in_scope_cx(cx: block, f: fn(scope_info)) {
|
||||
pub fn in_scope_cx(cx: block, f: &fn(&mut scope_info)) {
|
||||
let mut cur = cx;
|
||||
loop {
|
||||
match cur.kind {
|
||||
block_scope(ref inf) => {
|
||||
debug!("in_scope_cx: selected cur=%s (cx=%s)",
|
||||
cur.to_str(), cx.to_str());
|
||||
f((*inf));
|
||||
return;
|
||||
}
|
||||
_ => ()
|
||||
{
|
||||
// XXX: Borrow check bug workaround.
|
||||
let kind: &mut block_kind = &mut *cur.kind;
|
||||
match *kind {
|
||||
block_scope(ref mut inf) => {
|
||||
debug!("in_scope_cx: selected cur=%s (cx=%s)",
|
||||
cur.to_str(), cx.to_str());
|
||||
f(inf);
|
||||
return;
|
||||
}
|
||||
_ => ()
|
||||
}
|
||||
}
|
||||
cur = block_parent(cur);
|
||||
}
|
||||
@ -687,7 +689,7 @@ pub fn block_parent(cx: block) -> block {
|
||||
// Accessors
|
||||
|
||||
pub impl block {
|
||||
pure fn ccx() -> @CrateContext { self.fcx.ccx }
|
||||
pure fn ccx() -> @CrateContext { *self.fcx.ccx }
|
||||
pure fn tcx() -> ty::ctxt { self.fcx.ccx.tcx }
|
||||
pure fn sess() -> Session { self.fcx.ccx.sess }
|
||||
|
||||
|
@ -237,7 +237,7 @@ pub fn trans_break_cont(bcx: block,
|
||||
let mut unwind = bcx;
|
||||
let mut target;
|
||||
loop {
|
||||
match unwind.kind {
|
||||
match *unwind.kind {
|
||||
block_scope(scope_info {
|
||||
loop_break: Some(brk),
|
||||
loop_label: l,
|
||||
|
@ -778,7 +778,7 @@ pub fn create_local_var(bcx: block, local: @ast::local)
|
||||
pub fn create_arg(bcx: block, arg: ast::arg, sp: span)
|
||||
-> Option<@Metadata<ArgumentMetadata>> {
|
||||
unsafe {
|
||||
let fcx = bcx.fcx, cx = fcx.ccx;
|
||||
let fcx = bcx.fcx, cx = *fcx.ccx;
|
||||
let cache = get_cache(cx);
|
||||
let tg = ArgVariableTag;
|
||||
match cached_metadata::<@Metadata<ArgumentMetadata>>(
|
||||
@ -845,7 +845,7 @@ pub fn update_source_pos(cx: block, s: span) {
|
||||
}
|
||||
|
||||
pub fn create_function(fcx: fn_ctxt) -> @Metadata<SubProgramMetadata> {
|
||||
let cx = fcx.ccx;
|
||||
let cx = *fcx.ccx;
|
||||
let dbg_cx = (/*bad*/copy cx.dbg_cx).get();
|
||||
|
||||
debug!("~~");
|
||||
|
@ -944,10 +944,10 @@ pub fn trans_local_var(bcx: block, def: ast::def) -> Datum {
|
||||
}
|
||||
}
|
||||
ast::def_arg(nid, _, _) => {
|
||||
take_local(bcx, bcx.fcx.llargs, nid)
|
||||
take_local(bcx, *bcx.fcx.llargs, nid)
|
||||
}
|
||||
ast::def_local(nid, _) | ast::def_binding(nid, _) => {
|
||||
take_local(bcx, bcx.fcx.lllocals, nid)
|
||||
take_local(bcx, *bcx.fcx.lllocals, nid)
|
||||
}
|
||||
ast::def_self(nid, _) => {
|
||||
let self_info: ValSelfData = match bcx.fcx.llself {
|
||||
|
@ -654,7 +654,7 @@ pub fn declare_tydesc(ccx: @CrateContext, t: ty::t) -> @mut tydesc_info {
|
||||
let _icx = ccx.insn_ctxt("declare_tydesc");
|
||||
// If emit_tydescs already ran, then we shouldn't be creating any new
|
||||
// tydescs.
|
||||
assert !ccx.finished_tydescs;
|
||||
assert !*ccx.finished_tydescs;
|
||||
|
||||
let llty = type_of(ccx, t);
|
||||
|
||||
@ -761,7 +761,7 @@ pub fn make_generic_glue(ccx: @CrateContext, t: ty::t, llfn: ValueRef,
|
||||
pub fn emit_tydescs(ccx: @CrateContext) {
|
||||
let _icx = ccx.insn_ctxt("emit_tydescs");
|
||||
// As of this point, allow no more tydescs to be created.
|
||||
ccx.finished_tydescs = true;
|
||||
*ccx.finished_tydescs = true;
|
||||
for ccx.tydescs.each_value |&val| {
|
||||
let glue_fn_ty = T_ptr(T_generic_glue_fn(ccx));
|
||||
let ti = val;
|
||||
|
@ -230,7 +230,7 @@ pub type ctxt = @ctxt_;
|
||||
struct ctxt_ {
|
||||
diag: syntax::diagnostic::span_handler,
|
||||
interner: HashMap<intern_key, t_box>,
|
||||
mut next_id: uint,
|
||||
next_id: @mut uint,
|
||||
vecs_implicitly_copyable: bool,
|
||||
legacy_modes: bool,
|
||||
cstore: @mut metadata::cstore::CStore,
|
||||
@ -260,7 +260,7 @@ struct ctxt_ {
|
||||
short_names_cache: HashMap<t, @~str>,
|
||||
needs_drop_cache: HashMap<t, bool>,
|
||||
needs_unwind_cleanup_cache: HashMap<t, bool>,
|
||||
mut tc_cache: LinearMap<uint, TypeContents>,
|
||||
tc_cache: @mut LinearMap<uint, TypeContents>,
|
||||
ast_ty_to_ty_cache: HashMap<node_id, ast_ty_to_ty_cache_entry>,
|
||||
enum_var_cache: HashMap<def_id, @~[VariantInfo]>,
|
||||
trait_method_cache: HashMap<def_id, @~[method]>,
|
||||
@ -804,7 +804,7 @@ pub fn mk_ctxt(s: session::Session,
|
||||
@ctxt_ {
|
||||
diag: s.diagnostic(),
|
||||
interner: interner,
|
||||
mut next_id: 0u,
|
||||
next_id: @mut 0,
|
||||
vecs_implicitly_copyable: vecs_implicitly_copyable,
|
||||
legacy_modes: legacy_modes,
|
||||
cstore: s.cstore,
|
||||
@ -823,7 +823,7 @@ pub fn mk_ctxt(s: session::Session,
|
||||
short_names_cache: new_ty_hash(),
|
||||
needs_drop_cache: new_ty_hash(),
|
||||
needs_unwind_cleanup_cache: new_ty_hash(),
|
||||
tc_cache: LinearMap::new(),
|
||||
tc_cache: @mut LinearMap::new(),
|
||||
ast_ty_to_ty_cache: HashMap(),
|
||||
enum_var_cache: HashMap(),
|
||||
trait_method_cache: HashMap(),
|
||||
@ -912,7 +912,7 @@ fn mk_t_with_id(cx: ctxt, +st: sty, o_def_id: Option<ast::def_id>) -> t {
|
||||
|
||||
let t = @t_box_ {
|
||||
sty: st,
|
||||
id: cx.next_id,
|
||||
id: *cx.next_id,
|
||||
flags: flags,
|
||||
o_def_id: o_def_id
|
||||
};
|
||||
@ -923,7 +923,7 @@ fn mk_t_with_id(cx: ctxt, +st: sty, o_def_id: Option<ast::def_id>) -> t {
|
||||
|
||||
cx.interner.insert(key, t);
|
||||
|
||||
cx.next_id += 1u;
|
||||
*cx.next_id += 1;
|
||||
unsafe { cast::reinterpret_cast(&t) }
|
||||
}
|
||||
|
||||
|
@ -196,7 +196,7 @@ pub fn mk_ctxt(parse_sess: @mut parse::ParseSess,
|
||||
struct CtxtRepr {
|
||||
parse_sess: @mut parse::ParseSess,
|
||||
cfg: ast::crate_cfg,
|
||||
backtrace: Option<@ExpnInfo>,
|
||||
backtrace: @mut Option<@ExpnInfo>,
|
||||
mod_path: ~[ast::ident],
|
||||
trace_mac: bool
|
||||
}
|
||||
@ -205,33 +205,33 @@ pub fn mk_ctxt(parse_sess: @mut parse::ParseSess,
|
||||
fn parse_sess(@mut self) -> @mut parse::ParseSess { self.parse_sess }
|
||||
fn cfg(@mut self) -> ast::crate_cfg { self.cfg }
|
||||
fn call_site(@mut self) -> span {
|
||||
match self.backtrace {
|
||||
match *self.backtrace {
|
||||
Some(@ExpandedFrom(CallInfo {call_site: cs, _})) => cs,
|
||||
None => self.bug(~"missing top span")
|
||||
}
|
||||
}
|
||||
fn print_backtrace(@mut self) { }
|
||||
fn backtrace(@mut self) -> Option<@ExpnInfo> { self.backtrace }
|
||||
fn backtrace(@mut self) -> Option<@ExpnInfo> { *self.backtrace }
|
||||
fn mod_push(@mut self, i: ast::ident) { self.mod_path.push(i); }
|
||||
fn mod_pop(@mut self) { self.mod_path.pop(); }
|
||||
fn mod_path(@mut self) -> ~[ast::ident] { return self.mod_path; }
|
||||
fn bt_push(@mut self, ei: codemap::ExpnInfo) {
|
||||
match ei {
|
||||
ExpandedFrom(CallInfo {call_site: cs, callee: ref callee}) => {
|
||||
self.backtrace =
|
||||
*self.backtrace =
|
||||
Some(@ExpandedFrom(CallInfo {
|
||||
call_site: span {lo: cs.lo, hi: cs.hi,
|
||||
expn_info: self.backtrace},
|
||||
expn_info: *self.backtrace},
|
||||
callee: (*callee)}));
|
||||
}
|
||||
}
|
||||
}
|
||||
fn bt_pop(@mut self) {
|
||||
match self.backtrace {
|
||||
match *self.backtrace {
|
||||
Some(@ExpandedFrom(CallInfo {
|
||||
call_site: span {expn_info: prev, _}, _
|
||||
})) => {
|
||||
self.backtrace = prev
|
||||
*self.backtrace = prev
|
||||
}
|
||||
_ => self.bug(~"tried to pop without a push")
|
||||
}
|
||||
@ -280,7 +280,7 @@ pub fn mk_ctxt(parse_sess: @mut parse::ParseSess,
|
||||
let imp: @mut CtxtRepr = @mut CtxtRepr {
|
||||
parse_sess: parse_sess,
|
||||
cfg: cfg,
|
||||
backtrace: None,
|
||||
backtrace: @mut None,
|
||||
mod_path: ~[],
|
||||
trace_mac: false
|
||||
};
|
||||
|
@ -28,7 +28,7 @@ use std::oldmap::HashMap;
|
||||
`~` */
|
||||
///an unzipping of `token_tree`s
|
||||
struct TtFrame {
|
||||
readme: ~[ast::token_tree],
|
||||
readme: @mut ~[ast::token_tree],
|
||||
idx: uint,
|
||||
dotdotdoted: bool,
|
||||
sep: Option<Token>,
|
||||
@ -60,7 +60,7 @@ pub fn new_tt_reader(sp_diag: span_handler,
|
||||
sp_diag: sp_diag,
|
||||
interner: itr,
|
||||
mut cur: @mut TtFrame {
|
||||
readme: src,
|
||||
readme: @mut src,
|
||||
idx: 0u,
|
||||
dotdotdoted: false,
|
||||
sep: None,
|
||||
@ -82,7 +82,7 @@ pub fn new_tt_reader(sp_diag: span_handler,
|
||||
|
||||
pure fn dup_tt_frame(f: @mut TtFrame) -> @mut TtFrame {
|
||||
@mut TtFrame {
|
||||
readme: f.readme,
|
||||
readme: @mut (copy *f.readme),
|
||||
idx: f.idx,
|
||||
dotdotdoted: f.dotdotdoted,
|
||||
sep: f.sep,
|
||||
@ -199,9 +199,9 @@ pub fn tt_next_token(r: @mut TtReader) -> TokenAndSpan {
|
||||
loop { /* because it's easiest, this handles `tt_delim` not starting
|
||||
with a `tt_tok`, even though it won't happen */
|
||||
match r.cur.readme[r.cur.idx] {
|
||||
tt_delim(copy tts) => {
|
||||
tt_delim(tts) => {
|
||||
r.cur = @mut TtFrame {
|
||||
readme: tts,
|
||||
readme: @mut copy tts,
|
||||
idx: 0u,
|
||||
dotdotdoted: false,
|
||||
sep: None,
|
||||
@ -242,7 +242,7 @@ pub fn tt_next_token(r: @mut TtReader) -> TokenAndSpan {
|
||||
r.repeat_len.push(len);
|
||||
r.repeat_idx.push(0u);
|
||||
r.cur = @mut TtFrame {
|
||||
readme: tts,
|
||||
readme: @mut copy tts,
|
||||
idx: 0u,
|
||||
dotdotdoted: true,
|
||||
sep: sep,
|
||||
|
Loading…
x
Reference in New Issue
Block a user