Use a Vec instead of an HashMap for the scope hierarchy

This increases regionck performance greatly - type-checking on
librustc decreased from 9.1s to 8.1s. Because of Amdahl's law,
total performance is improved only by about 1.5% (LLVM wizards,
this is your opportunity to shine!).

before:
576.91user 4.26system 7:42.36elapsed 125%CPU (0avgtext+0avgdata 1142192maxresident)k
after:
566.50user 4.84system 7:36.84elapsed 125%CPU (0avgtext+0avgdata 1124304maxresident)k

I am somewhat worried really need to find out why we have this Red Queen's
Race going on here. Originally I suspected it may be a problem from RFC1214's
warnings, but it seems to be an effect from other changes.

However, the increase seems to be mostly in LLVM's time, so I guess
it's the LLVM wizards' problem.
This commit is contained in:
Ariel Ben-Yehuda 2015-08-20 01:46:28 +03:00 committed by Ariel Ben-Yehuda
parent 2bcc6d8ec7
commit fc304384e6
17 changed files with 579 additions and 558 deletions

View File

@ -207,6 +207,8 @@ pub fn parse_region(&mut self) -> ty::Region {
}
'B' => {
assert_eq!(self.next(), '[');
// this is totally wrong, but nobody relevant cares about
// this field - it will die soon(TM).
let node_id = self.parse_uint() as ast::NodeId;
assert_eq!(self.next(), '|');
let space = self.parse_param_space();
@ -246,24 +248,26 @@ pub fn parse_region(&mut self) -> ty::Region {
}
fn parse_scope(&mut self) -> region::CodeExtent {
match self.next() {
self.tcx.region_maps.bogus_code_extent(match self.next() {
// the scopes created here are totally bogus with their
// NodeIDs
'P' => {
assert_eq!(self.next(), '[');
let fn_id = self.parse_uint() as ast::NodeId;
assert_eq!(self.next(), '|');
let body_id = self.parse_uint() as ast::NodeId;
assert_eq!(self.next(), ']');
region::CodeExtent::ParameterScope {
region::CodeExtentData::ParameterScope {
fn_id: fn_id, body_id: body_id
}
}
'M' => {
let node_id = self.parse_uint() as ast::NodeId;
region::CodeExtent::Misc(node_id)
region::CodeExtentData::Misc(node_id)
}
'D' => {
let node_id = self.parse_uint() as ast::NodeId;
region::CodeExtent::DestructionScope(node_id)
region::CodeExtentData::DestructionScope(node_id)
}
'B' => {
assert_eq!(self.next(), '[');
@ -274,10 +278,10 @@ fn parse_scope(&mut self) -> region::CodeExtent {
let block_remainder = region::BlockRemainder {
block: node_id, first_statement_index: first_stmt_index,
};
region::CodeExtent::Remainder(block_remainder)
region::CodeExtentData::Remainder(block_remainder)
}
_ => panic!("parse_scope: bad input")
}
})
}
fn parse_destruction_scope_data(&mut self) -> region::DestructionScopeData {
@ -619,6 +623,33 @@ pub fn parse_type_param_def(&mut self) -> ty::TypeParameterDef<'tcx> {
}
}
pub fn parse_region_param_def(&mut self) -> ty::RegionParameterDef {
let name = self.parse_name(':');
let def_id = self.parse_def(NominalType);
let space = self.parse_param_space();
assert_eq!(self.next(), '|');
let index = self.parse_u32();
assert_eq!(self.next(), '|');
let mut bounds = vec![];
loop {
match self.next() {
'R' => bounds.push(self.parse_region()),
'.' => { break; }
c => {
panic!("parse_region_param_def: bad bounds ('{}')", c)
}
}
}
ty::RegionParameterDef {
name: name,
def_id: def_id,
space: space,
index: index,
bounds: bounds
}
}
fn parse_object_lifetime_default(&mut self) -> ty::ObjectLifetimeDefault {
match self.next() {
'a' => ty::ObjectLifetimeDefault::Ambiguous,

View File

@ -278,14 +278,14 @@ pub fn enc_region(w: &mut Encoder, cx: &ctxt, r: ty::Region) {
}
}
fn enc_scope(w: &mut Encoder, _cx: &ctxt, scope: region::CodeExtent) {
match scope {
region::CodeExtent::ParameterScope {
fn enc_scope(w: &mut Encoder, cx: &ctxt, scope: region::CodeExtent) {
match cx.tcx.region_maps.code_extent_data(scope) {
region::CodeExtentData::ParameterScope {
fn_id, body_id } => mywrite!(w, "P[{}|{}]", fn_id, body_id),
region::CodeExtent::Misc(node_id) => mywrite!(w, "M{}", node_id),
region::CodeExtent::Remainder(region::BlockRemainder {
region::CodeExtentData::Misc(node_id) => mywrite!(w, "M{}", node_id),
region::CodeExtentData::Remainder(region::BlockRemainder {
block: b, first_statement_index: i }) => mywrite!(w, "B[{}|{}]", b, i),
region::CodeExtent::DestructionScope(node_id) => mywrite!(w, "D{}", node_id),
region::CodeExtentData::DestructionScope(node_id) => mywrite!(w, "D{}", node_id),
}
}
@ -396,17 +396,6 @@ pub fn enc_existential_bounds<'a,'tcx>(w: &mut Encoder,
mywrite!(w, ".");
}
pub fn enc_region_bounds<'a, 'tcx>(w: &mut Encoder,
cx: &ctxt<'a, 'tcx>,
rs: &[ty::Region]) {
for &r in rs {
mywrite!(w, "R");
enc_region(w, cx, r);
}
mywrite!(w, ".");
}
pub fn enc_type_param_def<'a, 'tcx>(w: &mut Encoder, cx: &ctxt<'a, 'tcx>,
v: &ty::TypeParameterDef<'tcx>) {
mywrite!(w, "{}:{}|{}|{}|{}|",
@ -416,6 +405,18 @@ pub fn enc_type_param_def<'a, 'tcx>(w: &mut Encoder, cx: &ctxt<'a, 'tcx>,
enc_object_lifetime_default(w, cx, v.object_lifetime_default);
}
pub fn enc_region_param_def(w: &mut Encoder, cx: &ctxt,
v: &ty::RegionParameterDef) {
mywrite!(w, "{}:{}|{}|{}|",
v.name, (cx.ds)(v.def_id),
v.space.to_uint(), v.index);
for &r in &v.bounds {
mywrite!(w, "R");
enc_region(w, cx, r);
}
mywrite!(w, ".");
}
fn enc_object_lifetime_default<'a, 'tcx>(w: &mut Encoder,
cx: &ctxt<'a, 'tcx>,
default: ty::ObjectLifetimeDefault)

View File

@ -478,67 +478,6 @@ fn tr(&self, dcx: &DecodeContext) -> def::Def {
}
}
// ______________________________________________________________________
// Encoding and decoding of ancillary information
impl tr for ty::Region {
fn tr(&self, dcx: &DecodeContext) -> ty::Region {
match *self {
ty::ReLateBound(debruijn, br) => {
ty::ReLateBound(debruijn, br.tr(dcx))
}
ty::ReEarlyBound(data) => {
ty::ReEarlyBound(ty::EarlyBoundRegion {
param_id: dcx.tr_id(data.param_id),
space: data.space,
index: data.index,
name: data.name,
})
}
ty::ReScope(scope) => {
ty::ReScope(scope.tr(dcx))
}
ty::ReEmpty | ty::ReStatic | ty::ReVar(..) | ty::ReSkolemized(..) => {
*self
}
ty::ReFree(ref fr) => {
ty::ReFree(fr.tr(dcx))
}
}
}
}
impl tr for ty::FreeRegion {
fn tr(&self, dcx: &DecodeContext) -> ty::FreeRegion {
ty::FreeRegion { scope: self.scope.tr(dcx),
bound_region: self.bound_region.tr(dcx) }
}
}
impl tr for region::CodeExtent {
fn tr(&self, dcx: &DecodeContext) -> region::CodeExtent {
self.map_id(|id| dcx.tr_id(id))
}
}
impl tr for region::DestructionScopeData {
fn tr(&self, dcx: &DecodeContext) -> region::DestructionScopeData {
region::DestructionScopeData { node_id: dcx.tr_id(self.node_id) }
}
}
impl tr for ty::BoundRegion {
fn tr(&self, dcx: &DecodeContext) -> ty::BoundRegion {
match *self {
ty::BrAnon(_) |
ty::BrFresh(_) |
ty::BrEnv => *self,
ty::BrNamed(id, ident) => ty::BrNamed(dcx.tr_def_id(id),
ident),
}
}
}
// ______________________________________________________________________
// Encoding and decoding of freevar information
@ -574,24 +513,6 @@ fn tr(&self, dcx: &DecodeContext) -> ty::Freevar {
}
}
impl tr for ty::UpvarBorrow {
fn tr(&self, dcx: &DecodeContext) -> ty::UpvarBorrow {
ty::UpvarBorrow {
kind: self.kind,
region: self.region.tr(dcx)
}
}
}
impl tr for ty::UpvarCapture {
fn tr(&self, dcx: &DecodeContext) -> ty::UpvarCapture {
match *self {
ty::UpvarCapture::ByValue => ty::UpvarCapture::ByValue,
ty::UpvarCapture::ByRef(ref data) => ty::UpvarCapture::ByRef(data.tr(dcx)),
}
}
}
// ______________________________________________________________________
// Encoding and decoding of MethodCallee
@ -703,10 +624,13 @@ fn ty_str_ctxt<'b>(&'b self) -> tyencode::ctxt<'b, 'tcx> {
trait rbml_writer_helpers<'tcx> {
fn emit_closure_type<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
closure_type: &ty::ClosureTy<'tcx>);
fn emit_region(&mut self, ecx: &e::EncodeContext, r: ty::Region);
fn emit_ty<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>, ty: Ty<'tcx>);
fn emit_tys<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>, tys: &[Ty<'tcx>]);
fn emit_type_param_def<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
type_param_def: &ty::TypeParameterDef<'tcx>);
fn emit_region_param_def(&mut self, ecx: &e::EncodeContext,
region_param_def: &ty::RegionParameterDef);
fn emit_predicate<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
predicate: &ty::Predicate<'tcx>);
fn emit_trait_ref<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
@ -718,9 +642,11 @@ fn emit_substs<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
fn emit_existential_bounds<'b>(&mut self, ecx: &e::EncodeContext<'b,'tcx>,
bounds: &ty::ExistentialBounds<'tcx>);
fn emit_builtin_bounds(&mut self, ecx: &e::EncodeContext, bounds: &ty::BuiltinBounds);
fn emit_upvar_capture(&mut self, ecx: &e::EncodeContext, capture: &ty::UpvarCapture);
fn emit_auto_adjustment<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
adj: &ty::AutoAdjustment<'tcx>);
fn emit_autoref<'a>(&mut self, autoref: &ty::AutoRef<'tcx>);
fn emit_autoref<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
autoref: &ty::AutoRef<'tcx>);
fn emit_auto_deref_ref<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>,
auto_deref_ref: &ty::AutoDerefRef<'tcx>);
}
@ -734,6 +660,10 @@ fn emit_closure_type<'b>(&mut self,
});
}
fn emit_region(&mut self, ecx: &e::EncodeContext, r: ty::Region) {
self.emit_opaque(|this| Ok(e::write_region(ecx, this, r)));
}
fn emit_ty<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>, ty: Ty<'tcx>) {
self.emit_opaque(|this| Ok(e::write_type(ecx, this, ty)));
}
@ -755,7 +685,14 @@ fn emit_type_param_def<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>,
type_param_def))
});
}
fn emit_region_param_def(&mut self, ecx: &e::EncodeContext,
region_param_def: &ty::RegionParameterDef) {
self.emit_opaque(|this| {
Ok(tyencode::enc_region_param_def(this,
&ecx.ty_str_ctxt(),
region_param_def))
});
}
fn emit_predicate<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>,
predicate: &ty::Predicate<'tcx>) {
self.emit_opaque(|this| {
@ -781,7 +718,7 @@ fn emit_type_scheme<'b>(&mut self,
this.emit_struct_field("regions", 1, |this| {
Ok(encode_vec_per_param_space(
this, &type_scheme.generics.regions,
|this, def| def.encode(this).unwrap()))
|this, def| this.emit_region_param_def(ecx, def)))
})
})
});
@ -804,6 +741,26 @@ fn emit_builtin_bounds(&mut self, ecx: &e::EncodeContext, bounds: &ty::BuiltinBo
bounds)));
}
fn emit_upvar_capture(&mut self, ecx: &e::EncodeContext, capture: &ty::UpvarCapture) {
use serialize::Encoder;
self.emit_enum("UpvarCapture", |this| {
match *capture {
ty::UpvarCapture::ByValue => {
this.emit_enum_variant("ByValue", 1, 0, |_| Ok(()))
}
ty::UpvarCapture::ByRef(ty::UpvarBorrow { kind, region }) => {
this.emit_enum_variant("ByRef", 2, 0, |this| {
this.emit_enum_variant_arg(0,
|this| kind.encode(this));
this.emit_enum_variant_arg(1,
|this| Ok(this.emit_region(ecx, region)))
})
}
}
}).unwrap()
}
fn emit_substs<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>,
substs: &subst::Substs<'tcx>) {
self.emit_opaque(|this| Ok(tyencode::enc_substs(this,
@ -837,14 +794,16 @@ fn emit_auto_adjustment<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>,
});
}
fn emit_autoref<'b>(&mut self, autoref: &ty::AutoRef<'tcx>) {
fn emit_autoref<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>,
autoref: &ty::AutoRef<'tcx>) {
use serialize::Encoder;
self.emit_enum("AutoRef", |this| {
match autoref {
&ty::AutoPtr(r, m) => {
this.emit_enum_variant("AutoPtr", 0, 2, |this| {
this.emit_enum_variant_arg(0, |this| r.encode(this));
this.emit_enum_variant_arg(0,
|this| Ok(this.emit_region(ecx, *r)));
this.emit_enum_variant_arg(1, |this| m.encode(this))
})
}
@ -868,7 +827,7 @@ fn emit_auto_deref_ref<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>,
this.emit_option(|this| {
match auto_deref_ref.autoref {
None => this.emit_option_none(),
Some(ref a) => this.emit_option_some(|this| Ok(this.emit_autoref(a))),
Some(ref a) => this.emit_option_some(|this| Ok(this.emit_autoref(ecx, a))),
}
})
});
@ -983,7 +942,7 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
.unwrap()
.clone();
var_id.encode(rbml_w);
upvar_capture.encode(rbml_w);
rbml_w.emit_upvar_capture(ecx, &upvar_capture);
})
}
}
@ -1080,6 +1039,7 @@ fn read_ty_encoded<'a, 'b, F, R>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>,
f: F) -> R
where F: for<'x> FnOnce(&mut tydecode::TyDecoder<'x, 'tcx>) -> R;
fn read_region(&mut self, dcx: &DecodeContext) -> ty::Region;
fn read_ty<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>) -> Ty<'tcx>;
fn read_tys<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>) -> Vec<Ty<'tcx>>;
fn read_trait_ref<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
@ -1088,6 +1048,8 @@ fn read_poly_trait_ref<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::PolyTraitRef<'tcx>;
fn read_type_param_def<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::TypeParameterDef<'tcx>;
fn read_region_param_def(&mut self, dcx: &DecodeContext)
-> ty::RegionParameterDef;
fn read_predicate<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::Predicate<'tcx>;
fn read_type_scheme<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
@ -1096,6 +1058,8 @@ fn read_existential_bounds<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::ExistentialBounds<'tcx>;
fn read_substs<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> subst::Substs<'tcx>;
fn read_upvar_capture(&mut self, dcx: &DecodeContext)
-> ty::UpvarCapture;
fn read_auto_adjustment<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
-> ty::AutoAdjustment<'tcx>;
fn read_cast_kind<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>)
@ -1180,13 +1144,14 @@ fn type_string(doc: rbml::Doc) -> String {
str
}
}
fn read_ty<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>) -> Ty<'tcx> {
fn read_region(&mut self, dcx: &DecodeContext) -> ty::Region {
// Note: regions types embed local node ids. In principle, we
// should translate these node ids into the new decode
// context. However, we do not bother, because region types
// are not used during trans.
// are not used during trans. This also applies to read_ty.
return self.read_ty_encoded(dcx, |decoder| decoder.parse_region());
}
fn read_ty<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>) -> Ty<'tcx> {
return self.read_ty_encoded(dcx, |decoder| decoder.parse_ty());
}
@ -1209,7 +1174,10 @@ fn read_type_param_def<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> ty::TypeParameterDef<'tcx> {
self.read_ty_encoded(dcx, |decoder| decoder.parse_type_param_def())
}
fn read_region_param_def(&mut self, dcx: &DecodeContext)
-> ty::RegionParameterDef {
self.read_ty_encoded(dcx, |decoder| decoder.parse_region_param_def())
}
fn read_predicate<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> ty::Predicate<'tcx>
{
@ -1232,7 +1200,7 @@ fn read_type_scheme<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
regions:
this.read_struct_field("regions", 1, |this| {
Ok(this.read_vec_per_param_space(
|this| Decodable::decode(this).unwrap()))
|this| this.read_region_param_def(dcx)))
}).unwrap(),
})
})
@ -1258,7 +1226,23 @@ fn read_substs<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
.parse_substs())
}).unwrap()
}
fn read_upvar_capture(&mut self, dcx: &DecodeContext) -> ty::UpvarCapture {
self.read_enum("UpvarCapture", |this| {
let variants = ["ByValue", "ByRef"];
this.read_enum_variant(&variants, |this, i| {
Ok(match i {
1 => ty::UpvarCapture::ByValue,
2 => ty::UpvarCapture::ByRef(ty::UpvarBorrow {
kind: this.read_enum_variant_arg(0,
|this| Decodable::decode(this)).unwrap(),
region: this.read_enum_variant_arg(1,
|this| Ok(this.read_region(dcx))).unwrap()
}),
_ => panic!("bad enum variant for ty::UpvarCapture")
})
})
}).unwrap()
}
fn read_auto_adjustment<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
-> ty::AutoAdjustment<'tcx> {
self.read_enum("AutoAdjustment", |this| {
@ -1317,11 +1301,15 @@ fn read_autoref<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
Ok(match i {
0 => {
let r: ty::Region =
this.read_enum_variant_arg(0, |this| Decodable::decode(this)).unwrap();
this.read_enum_variant_arg(0, |this| {
Ok(this.read_region(dcx))
}).unwrap();
let m: ast::Mutability =
this.read_enum_variant_arg(1, |this| Decodable::decode(this)).unwrap();
this.read_enum_variant_arg(1, |this| {
Decodable::decode(this)
}).unwrap();
ty::AutoPtr(dcx.tcx.mk_region(r.tr(dcx)), m)
ty::AutoPtr(dcx.tcx.mk_region(r), m)
}
1 => {
let m: ast::Mutability =
@ -1376,6 +1364,9 @@ fn read_closure_ty<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>)
/// case. We translate them with `tr_def_id()` which will map
/// the crate numbers back to the original source crate.
///
/// Scopes will end up as being totally bogus. This can actually
/// be fixed through.
///
/// Unboxed closures are cloned along with the function being
/// inlined, and all side tables use interned node IDs, so we
/// translate their def IDs accordingly.
@ -1453,8 +1444,8 @@ fn decode_side_tables(dcx: &DecodeContext,
var_id: dcx.tr_id(var_id),
closure_expr_id: id
};
let ub: ty::UpvarCapture = Decodable::decode(val_dsr).unwrap();
dcx.tcx.tables.borrow_mut().upvar_capture_map.insert(upvar_id, ub.tr(dcx));
let ub = val_dsr.read_upvar_capture(dcx);
dcx.tcx.tables.borrow_mut().upvar_capture_map.insert(upvar_id, ub);
}
c::tag_table_tcache => {
let type_scheme = val_dsr.read_type_scheme(dcx);

View File

@ -12,7 +12,6 @@
use middle::cfg::*;
use middle::def;
use middle::pat_util;
use middle::region::CodeExtent;
use middle::ty;
use syntax::ast;
use syntax::ast_util;
@ -585,11 +584,10 @@ fn add_exiting_edge(&mut self,
to_loop: LoopScope,
to_index: CFGIndex) {
let mut data = CFGEdgeData {exiting_scopes: vec!() };
let mut scope = CodeExtent::from_node_id(from_expr.id);
let target_scope = CodeExtent::from_node_id(to_loop.loop_id);
let mut scope = self.tcx.region_maps.node_extent(from_expr.id);
let target_scope = self.tcx.region_maps.node_extent(to_loop.loop_id);
while scope != target_scope {
data.exiting_scopes.push(scope.node_id());
data.exiting_scopes.push(scope.node_id(&self.tcx.region_maps));
scope = self.tcx.region_maps.encl_scope(scope);
}
self.graph.add_edge(from_index, to_index, data);

View File

@ -20,7 +20,7 @@
use self::TrackMatchMode::*;
use self::OverloadedCallType::*;
use middle::{def, region, pat_util};
use middle::{def, pat_util};
use middle::def_id::{DefId};
use middle::infer;
use middle::mem_categorization as mc;
@ -296,7 +296,7 @@ fn walk_arg_patterns(&mut self,
for arg in &decl.inputs {
let arg_ty = return_if_err!(self.typer.node_ty(arg.pat.id));
let fn_body_scope = region::CodeExtent::from_node_id(body.id);
let fn_body_scope = self.tcx().region_maps.node_extent(body.id);
let arg_cmt = self.mc.cat_rvalue(
arg.id,
arg.pat.span,
@ -579,7 +579,7 @@ fn walk_callee(&mut self, call: &ast::Expr, callee: &ast::Expr) {
let callee_ty = return_if_err!(self.typer.expr_ty_adjusted(callee));
debug!("walk_callee: callee={:?} callee_ty={:?}",
callee, callee_ty);
let call_scope = region::CodeExtent::from_node_id(call.id);
let call_scope = self.tcx().region_maps.node_extent(call.id);
match callee_ty.sty {
ty::TyBareFn(..) => {
self.consume_expr(callee);
@ -862,7 +862,7 @@ fn walk_autoref(&mut self,
// Converting from a &T to *T (or &mut T to *mut T) is
// treated as borrowing it for the enclosing temporary
// scope.
let r = ty::ReScope(region::CodeExtent::from_node_id(expr.id));
let r = ty::ReScope(self.tcx().region_maps.node_extent(expr.id));
self.delegate.borrow(expr.id,
expr.span,
@ -917,7 +917,7 @@ fn walk_overloaded_operator(&mut self,
// methods are implicitly autoref'd which sadly does not use
// adjustments, so we must hardcode the borrow here.
let r = ty::ReScope(region::CodeExtent::from_node_id(expr.id));
let r = ty::ReScope(self.tcx().region_maps.node_extent(expr.id));
let bk = ty::ImmBorrow;
for &arg in &rhs {

View File

@ -135,7 +135,8 @@ pub fn is_subregion_of(&self,
tcx.region_maps.is_subscope_of(sub_scope, super_scope),
(ty::ReScope(sub_scope), ty::ReFree(fr)) =>
tcx.region_maps.is_subscope_of(sub_scope, fr.scope.to_code_extent()) ||
tcx.region_maps.is_subscope_of(sub_scope,
fr.scope.to_code_extent(&tcx.region_maps)) ||
self.is_static(fr),
(ty::ReFree(sub_fr), ty::ReFree(super_fr)) =>
@ -177,4 +178,3 @@ fn lub() {
map.relate_free_regions(frs[1], frs[2]);
assert_eq!(map.lub_free_regions(frs[0], frs[1]), ty::ReFree(frs[2]));
}

View File

@ -121,11 +121,11 @@ fn explain_span(tcx: &ty::ctxt, heading: &str, span: Span)
format!("{}unknown scope: {:?}{}. Please report a bug.",
prefix, scope, suffix)
};
let span = match scope.span(&self.map) {
let span = match scope.span(&self.region_maps, &self.map) {
Some(s) => s,
None => return self.sess.note(&unknown_scope())
};
let tag = match self.map.find(scope.node_id()) {
let tag = match self.map.find(scope.node_id(&self.region_maps)) {
Some(ast_map::NodeBlock(_)) => "block",
Some(ast_map::NodeExpr(expr)) => match expr.node {
ast::ExprCall(..) => "call",
@ -142,16 +142,16 @@ fn explain_span(tcx: &ty::ctxt, heading: &str, span: Span)
return self.sess.span_note(span, &unknown_scope());
}
};
let scope_decorated_tag = match scope {
region::CodeExtent::Misc(_) => tag,
region::CodeExtent::ParameterScope { .. } => {
let scope_decorated_tag = match self.region_maps.code_extent_data(scope) {
region::CodeExtentData::Misc(_) => tag,
region::CodeExtentData::ParameterScope { .. } => {
"scope of parameters for function"
}
region::CodeExtent::DestructionScope(_) => {
region::CodeExtentData::DestructionScope(_) => {
new_string = format!("destruction scope surrounding {}", tag);
&new_string[..]
}
region::CodeExtent::Remainder(r) => {
region::CodeExtentData::Remainder(r) => {
new_string = format!("block suffix following statement {}",
r.first_statement_index);
&new_string[..]

View File

@ -790,7 +790,7 @@ fn lub_concrete_regions(&self, free_regions: &FreeRegionMap, a: Region, b: Regio
// A "free" region can be interpreted as "some region
// at least as big as the block fr.scope_id". So, we can
// reasonably compare free regions and scopes:
let fr_scope = fr.scope.to_code_extent();
let fr_scope = fr.scope.to_code_extent(&self.tcx.region_maps);
let r_id = self.tcx.region_maps.nearest_common_ancestor(fr_scope, s_id);
if r_id == fr_scope {
@ -871,7 +871,7 @@ fn glb_concrete_regions(&self,
// than the scope `s_id`, then we can say that the GLB
// is the scope `s_id`. Otherwise, as we do not know
// big the free region is precisely, the GLB is undefined.
let fr_scope = fr.scope.to_code_extent();
let fr_scope = fr.scope.to_code_extent(&self.tcx.region_maps);
if self.tcx.region_maps.nearest_common_ancestor(fr_scope, s_id) == fr_scope ||
free_regions.is_static(fr) {
Ok(s)
@ -927,8 +927,8 @@ fn helper<'a, 'tcx>(this: &RegionVarBindings<'a, 'tcx>,
Ok(ty::ReFree(*b))
} else {
this.intersect_scopes(ty::ReFree(*a), ty::ReFree(*b),
a.scope.to_code_extent(),
b.scope.to_code_extent())
a.scope.to_code_extent(&this.tcx.region_maps),
b.scope.to_code_extent(&this.tcx.region_maps))
}
}
}

View File

@ -20,16 +20,27 @@
use metadata::inline::InlinedItem;
use middle::ty::{self, Ty};
use session::Session;
use util::nodemap::{FnvHashMap, FnvHashSet, NodeMap};
use util::nodemap::{FnvHashMap, NodeMap, NodeSet};
use std::cell::RefCell;
use std::collections::hash_map::Entry;
use std::mem;
use syntax::codemap::{self, Span};
use syntax::{ast, visit};
use syntax::ast::{Block, Item, FnDecl, NodeId, Arm, Pat, Stmt, Expr, Local};
use syntax::ast_util::stmt_id;
use syntax::ptr::P;
use syntax::visit::{Visitor, FnKind};
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, RustcEncodable,
RustcDecodable, Debug, Copy)]
pub struct CodeExtent(u32);
/// The root of everything. I should be using NonZero or profiling
/// instead of this (probably).
pub const ROOT_CODE_EXTENT : CodeExtent = CodeExtent(0);
/// A placeholder used in trans to stand for real code extents
pub const DUMMY_CODE_EXTENT : CodeExtent = CodeExtent(1);
/// CodeExtent represents a statically-describable extent that can be
/// used to bound the lifetime/region for values.
///
@ -91,9 +102,8 @@
/// placate the same deriving in `ty::FreeRegion`, but we may want to
/// actually attach a more meaningful ordering to scopes than the one
/// generated via deriving here.
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, RustcEncodable,
RustcDecodable, Debug, Copy)]
pub enum CodeExtent {
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Debug, Copy)]
pub enum CodeExtentData {
Misc(ast::NodeId),
// extent of parameters passed to a function or closure (they
@ -118,8 +128,9 @@ impl DestructionScopeData {
pub fn new(node_id: ast::NodeId) -> DestructionScopeData {
DestructionScopeData { node_id: node_id }
}
pub fn to_code_extent(&self) -> CodeExtent {
CodeExtent::DestructionScope(self.node_id)
pub fn to_code_extent(&self, region_maps: &RegionMaps) -> CodeExtent {
region_maps.lookup_code_extent(
CodeExtentData::DestructionScope(self.node_id))
}
}
@ -134,9 +145,9 @@ pub fn to_code_extent(&self) -> CodeExtent {
/// * the subscope with `first_statement_index == 0` is scope of both
/// `a` and `b`; it does not include EXPR_1, but does include
/// everything after that first `let`. (If you want a scope that
/// includes EXPR_1 as well, then do not use `CodeExtent::Remainder`,
/// includes EXPR_1 as well, then do not use `CodeExtentData::Remainder`,
/// but instead another `CodeExtent` that encompasses the whole block,
/// e.g. `CodeExtent::Misc`.
/// e.g. `CodeExtentData::Misc`.
///
/// * the subscope with `first_statement_index == 1` is scope of `c`,
/// and thus does not include EXPR_2, but covers the `...`.
@ -147,58 +158,41 @@ pub struct BlockRemainder {
pub first_statement_index: u32,
}
impl CodeExtent {
/// Creates a scope that represents the dynamic extent associated
/// with `node_id`.
pub fn from_node_id(node_id: ast::NodeId) -> CodeExtent {
CodeExtent::Misc(node_id)
}
impl CodeExtentData {
/// Returns a node id associated with this scope.
///
/// NB: likely to be replaced as API is refined; e.g. pnkfelix
/// anticipates `fn entry_node_id` and `fn each_exit_node_id`.
pub fn node_id(&self) -> ast::NodeId {
match *self {
CodeExtent::Misc(node_id) => node_id,
CodeExtentData::Misc(node_id) => node_id,
// These cases all return rough approximations to the
// precise extent denoted by `self`.
CodeExtent::Remainder(br) => br.block,
CodeExtent::DestructionScope(node_id) => node_id,
CodeExtent::ParameterScope { fn_id: _, body_id } => body_id,
CodeExtentData::Remainder(br) => br.block,
CodeExtentData::DestructionScope(node_id) => node_id,
CodeExtentData::ParameterScope { fn_id: _, body_id } => body_id,
}
}
}
/// Maps this scope to a potentially new one according to the
/// NodeId transformer `f_id`.
pub fn map_id<F>(&self, mut f_id: F) -> CodeExtent where
F: FnMut(ast::NodeId) -> ast::NodeId,
{
match *self {
CodeExtent::Misc(node_id) => CodeExtent::Misc(f_id(node_id)),
CodeExtent::Remainder(br) =>
CodeExtent::Remainder(BlockRemainder {
block: f_id(br.block), first_statement_index: br.first_statement_index }),
CodeExtent::DestructionScope(node_id) =>
CodeExtent::DestructionScope(f_id(node_id)),
CodeExtent::ParameterScope { fn_id, body_id } =>
CodeExtent::ParameterScope { fn_id: f_id(fn_id), body_id: f_id(body_id) },
}
impl CodeExtent {
pub fn node_id(&self, region_maps: &RegionMaps) -> ast::NodeId {
region_maps.code_extent_data(*self).node_id()
}
/// Returns the span of this CodeExtent. Note that in general the
/// returned span may not correspond to the span of any node id in
/// the AST.
pub fn span(&self, ast_map: &ast_map::Map) -> Option<Span> {
match ast_map.find(self.node_id()) {
pub fn span(&self, region_maps: &RegionMaps, ast_map: &ast_map::Map) -> Option<Span> {
match ast_map.find(self.node_id(region_maps)) {
Some(ast_map::NodeBlock(ref blk)) => {
match *self {
CodeExtent::ParameterScope { .. } |
CodeExtent::Misc(_) |
CodeExtent::DestructionScope(_) => Some(blk.span),
match region_maps.code_extent_data(*self) {
CodeExtentData::ParameterScope { .. } |
CodeExtentData::Misc(_) |
CodeExtentData::DestructionScope(_) => Some(blk.span),
CodeExtent::Remainder(r) => {
CodeExtentData::Remainder(r) => {
assert_eq!(r.block, blk.id);
// Want span for extent starting after the
// indexed statement and ending at end of
@ -222,13 +216,15 @@ pub fn span(&self, ast_map: &ast_map::Map) -> Option<Span> {
/// The region maps encode information about region relationships.
pub struct RegionMaps {
code_extents: RefCell<Vec<CodeExtentData>>,
code_extent_interner: RefCell<FnvHashMap<CodeExtentData, CodeExtent>>,
/// `scope_map` maps from a scope id to the enclosing scope id;
/// this is usually corresponding to the lexical nesting, though
/// in the case of closures the parent scope is the innermost
/// conditional expression or repeating block. (Note that the
/// enclosing scope id for the block associated with a closure is
/// the closure itself.)
scope_map: RefCell<FnvHashMap<CodeExtent, CodeExtent>>,
scope_map: RefCell<Vec<CodeExtent>>,
/// `var_map` maps from a variable or binding id to the block in
/// which that variable is declared.
@ -242,6 +238,44 @@ pub struct RegionMaps {
/// block (see `terminating_scopes`).
rvalue_scopes: RefCell<NodeMap<CodeExtent>>,
/// Encodes the hierarchy of fn bodies. Every fn body (including
/// closures) forms its own distinct region hierarchy, rooted in
/// the block that is the fn body. This map points from the id of
/// that root block to the id of the root block for the enclosing
/// fn, if any. Thus the map structures the fn bodies into a
/// hierarchy based on their lexical mapping. This is used to
/// handle the relationships between regions in a fn and in a
/// closure defined by that fn. See the "Modeling closures"
/// section of the README in middle::infer::region_inference for
/// more details.
fn_tree: RefCell<NodeMap<ast::NodeId>>,
}
#[derive(Debug, Copy, Clone)]
pub struct Context {
/// the root of the current region tree. This is typically the id
/// of the innermost fn body. Each fn forms its own disjoint tree
/// in the region hierarchy. These fn bodies are themselves
/// arranged into a tree. See the "Modeling closures" section of
/// the README in middle::infer::region_inference for more
/// details.
root_id: Option<ast::NodeId>,
/// the scope that contains any new variables declared
var_parent: CodeExtent,
/// region parent of expressions etc
parent: CodeExtent
}
struct RegionResolutionVisitor<'a> {
sess: &'a Session,
// Generated maps:
region_maps: &'a RegionMaps,
cx: Context,
/// `terminating_scopes` is a set containing the ids of each
/// statement, or conditional/repeating expression. These scopes
/// are calling "terminating scopes" because, when attempting to
@ -259,122 +293,75 @@ pub struct RegionMaps {
/// temporaries we would have to cleanup. Therefore we ensure that
/// the temporaries never outlast the conditional/repeating
/// expression, preventing the need for dynamic checks and/or
/// arbitrary amounts of stack space.
terminating_scopes: RefCell<FnvHashSet<CodeExtent>>,
/// Encodes the hierarchy of fn bodies. Every fn body (including
/// closures) forms its own distinct region hierarchy, rooted in
/// the block that is the fn body. This map points from the id of
/// that root block to the id of the root block for the enclosing
/// fn, if any. Thus the map structures the fn bodies into a
/// hierarchy based on their lexical mapping. This is used to
/// handle the relationships between regions in a fn and in a
/// closure defined by that fn. See the "Modeling closures"
/// section of the README in middle::infer::region_inference for
/// more details.
fn_tree: RefCell<NodeMap<ast::NodeId>>,
}
/// Carries the node id for the innermost block or match expression,
/// for building up the `var_map` which maps ids to the blocks in
/// which they were declared.
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
enum InnermostDeclaringBlock {
None,
Block(ast::NodeId),
Statement(DeclaringStatementContext),
Match(ast::NodeId),
FnDecl { fn_id: ast::NodeId, body_id: ast::NodeId },
}
impl InnermostDeclaringBlock {
fn to_code_extent(&self) -> Option<CodeExtent> {
let extent = match *self {
InnermostDeclaringBlock::None => {
return Option::None;
}
InnermostDeclaringBlock::FnDecl { fn_id, body_id } =>
CodeExtent::ParameterScope { fn_id: fn_id, body_id: body_id },
InnermostDeclaringBlock::Block(id) |
InnermostDeclaringBlock::Match(id) => CodeExtent::from_node_id(id),
InnermostDeclaringBlock::Statement(s) => s.to_code_extent(),
};
Option::Some(extent)
}
}
/// Contextual information for declarations introduced by a statement
/// (i.e. `let`). It carries node-id's for statement and enclosing
/// block both, as well as the statement's index within the block.
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
struct DeclaringStatementContext {
stmt_id: ast::NodeId,
block_id: ast::NodeId,
stmt_index: u32,
}
impl DeclaringStatementContext {
fn to_code_extent(&self) -> CodeExtent {
CodeExtent::Remainder(BlockRemainder {
block: self.block_id,
first_statement_index: self.stmt_index,
})
}
}
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
enum InnermostEnclosingExpr {
None,
Some(ast::NodeId),
Statement(DeclaringStatementContext),
}
impl InnermostEnclosingExpr {
fn to_code_extent(&self) -> Option<CodeExtent> {
let extent = match *self {
InnermostEnclosingExpr::None => {
return Option::None;
}
InnermostEnclosingExpr::Statement(s) =>
s.to_code_extent(),
InnermostEnclosingExpr::Some(parent_id) =>
CodeExtent::from_node_id(parent_id),
};
Some(extent)
}
}
#[derive(Debug, Copy, Clone)]
pub struct Context {
/// the root of the current region tree. This is typically the id
/// of the innermost fn body. Each fn forms its own disjoint tree
/// in the region hierarchy. These fn bodies are themselves
/// arranged into a tree. See the "Modeling closures" section of
/// the README in middle::infer::region_inference for more
/// details.
root_id: Option<ast::NodeId>,
/// the scope that contains any new variables declared
var_parent: InnermostDeclaringBlock,
/// region parent of expressions etc
parent: InnermostEnclosingExpr,
}
struct RegionResolutionVisitor<'a> {
sess: &'a Session,
// Generated maps:
region_maps: &'a RegionMaps,
cx: Context
/// arbitrary amounts of stack space. Terminating scopes end
/// up being contained in a DestructionScope that contains the
/// destructor's execution.
terminating_scopes: NodeSet
}
impl RegionMaps {
/// create a bogus code extent for the regions in astencode types. Nobody
/// really cares about the contents of these.
pub fn bogus_code_extent(&self, e: CodeExtentData) -> CodeExtent {
self.intern_code_extent(e, DUMMY_CODE_EXTENT)
}
pub fn lookup_code_extent(&self, e: CodeExtentData) -> CodeExtent {
self.code_extent_interner.borrow()[&e]
}
pub fn intern_code_extent(&self,
e: CodeExtentData,
parent: CodeExtent) -> CodeExtent {
match self.code_extent_interner.borrow_mut().entry(e) {
Entry::Occupied(o) => {
// this can happen when the bogus code extents from tydecode
// have (bogus) NodeId-s that overlap items created during
// inlining.
// We probably shouldn't be creating bogus code extents
// through.
let idx = *o.get();
if parent == DUMMY_CODE_EXTENT {
info!("CodeExtent({}) = {:?} [parent={}] BOGUS!",
idx.0, e, parent.0);
} else {
assert_eq!(self.scope_map.borrow()[idx.0 as usize],
DUMMY_CODE_EXTENT);
info!("CodeExtent({}) = {:?} [parent={}] RECLAIMED!",
idx.0, e, parent.0);
self.scope_map.borrow_mut()[idx.0 as usize] = parent;
}
idx
}
Entry::Vacant(v) => {
if self.code_extents.borrow().len() > 0xffffffffusize {
unreachable!() // should pass a sess,
// but this isn't the only place
}
let idx = CodeExtent(self.code_extents.borrow().len() as u32);
info!("CodeExtent({}) = {:?} [parent={}]", idx.0, e, parent.0);
self.code_extents.borrow_mut().push(e);
self.scope_map.borrow_mut().push(parent);
*v.insert(idx)
}
}
}
pub fn intern_node(&self,
n: ast::NodeId,
parent: CodeExtent) -> CodeExtent {
self.intern_code_extent(CodeExtentData::Misc(n), parent)
}
pub fn node_extent(&self, n: ast::NodeId) -> CodeExtent {
self.lookup_code_extent(CodeExtentData::Misc(n))
}
pub fn code_extent_data(&self, e: CodeExtent) -> CodeExtentData {
self.code_extents.borrow()[e.0 as usize]
}
pub fn each_encl_scope<E>(&self, mut e:E) where E: FnMut(&CodeExtent, &CodeExtent) {
for (child, parent) in self.scope_map.borrow().iter() {
e(child, parent)
for child_id in (1..self.code_extents.borrow().len()) {
let child = CodeExtent(child_id as u32);
if let Some(parent) = self.opt_encl_scope(child) {
e(&child, &parent)
}
}
}
pub fn each_var_scope<E>(&self, mut e:E) where E: FnMut(&ast::NodeId, &CodeExtent) {
@ -387,12 +374,6 @@ pub fn each_rvalue_scope<E>(&self, mut e:E) where E: FnMut(&ast::NodeId, &CodeEx
e(child, parent)
}
}
pub fn each_terminating_scope<E>(&self, mut e:E) where E: FnMut(&CodeExtent) {
for scope in self.terminating_scopes.borrow().iter() {
e(scope)
}
}
/// Records that `sub_fn` is defined within `sup_fn`. These ids
/// should be the id of the block that is the fn body, which is
/// also the root of the region hierarchy for that fn.
@ -414,44 +395,30 @@ fn fn_is_enclosed_by(&self, mut sub_fn: ast::NodeId, sup_fn: ast::NodeId) -> boo
}
}
pub fn record_encl_scope(&self, sub: CodeExtent, sup: CodeExtent) {
debug!("record_encl_scope(sub={:?}, sup={:?})", sub, sup);
assert!(sub != sup);
self.scope_map.borrow_mut().insert(sub, sup);
}
fn record_var_scope(&self, var: ast::NodeId, lifetime: CodeExtent) {
debug!("record_var_scope(sub={:?}, sup={:?})", var, lifetime);
assert!(var != lifetime.node_id());
assert!(var != lifetime.node_id(self));
self.var_map.borrow_mut().insert(var, lifetime);
}
fn record_rvalue_scope(&self, var: ast::NodeId, lifetime: CodeExtent) {
debug!("record_rvalue_scope(sub={:?}, sup={:?})", var, lifetime);
assert!(var != lifetime.node_id());
assert!(var != lifetime.node_id(self));
self.rvalue_scopes.borrow_mut().insert(var, lifetime);
}
/// Records that a scope is a TERMINATING SCOPE. Whenever we create automatic temporaries --
/// e.g. by an expression like `a().f` -- they will be freed within the innermost terminating
/// scope.
fn mark_as_terminating_scope(&self, scope_id: CodeExtent) {
debug!("record_terminating_scope(scope_id={:?})", scope_id);
self.terminating_scopes.borrow_mut().insert(scope_id);
}
pub fn opt_encl_scope(&self, id: CodeExtent) -> Option<CodeExtent> {
//! Returns the narrowest scope that encloses `id`, if any.
self.scope_map.borrow().get(&id).cloned()
match self.scope_map.borrow()[id.0 as usize] {
ROOT_CODE_EXTENT => None,
c => Some(c)
}
}
#[allow(dead_code)] // used in middle::cfg
pub fn encl_scope(&self, id: CodeExtent) -> CodeExtent {
//! Returns the narrowest scope that encloses `id`, if any.
match self.scope_map.borrow().get(&id) {
Some(&r) => r,
None => { panic!("no enclosing scope for id {:?}", id); }
}
self.opt_encl_scope(id).unwrap()
}
/// Returns the lifetime of the local variable `var_id`
@ -478,24 +445,28 @@ pub fn temporary_scope(&self, expr_id: ast::NodeId) -> Option<CodeExtent> {
// if there's one. Static items, for instance, won't
// have an enclosing scope, hence no scope will be
// returned.
let mut id = match self.opt_encl_scope(CodeExtent::from_node_id(expr_id)) {
// For some reason, the expr's scope itself is skipped here.
let mut id = match self.opt_encl_scope(self.node_extent(expr_id)) {
Some(i) => i,
None => { return None; }
};
while !self.terminating_scopes.borrow().contains(&id) {
match self.opt_encl_scope(id) {
Some(p) => {
id = p;
}
None => {
debug!("temporary_scope({:?}) = None", expr_id);
return None;
loop { match self.opt_encl_scope(id) {
Some(p) => {
match self.code_extent_data(p) {
CodeExtentData::DestructionScope(..) => {
debug!("temporary_scope({:?}) = {:?} [enclosing]",
expr_id, id);
return Some(id);
}
_ => id = p
}
}
}
debug!("temporary_scope({:?}) = {:?} [enclosing]", expr_id, id);
return Some(id);
None => {
debug!("temporary_scope({:?}) = None", expr_id);
return None;
}
} }
}
pub fn var_region(&self, id: ast::NodeId) -> ty::Region {
@ -519,15 +490,15 @@ pub fn is_subscope_of(&self,
superscope: CodeExtent)
-> bool {
let mut s = subscope;
debug!("is_subscope_of({:?}, {:?})", subscope, superscope);
while superscope != s {
match self.scope_map.borrow().get(&s) {
match self.opt_encl_scope(s) {
None => {
debug!("is_subscope_of({:?}, {:?}, s={:?})=false",
subscope, superscope, s);
return false;
}
Some(&scope) => s = scope
Some(scope) => s = scope
}
}
@ -545,8 +516,15 @@ pub fn nearest_common_ancestor(&self,
-> CodeExtent {
if scope_a == scope_b { return scope_a; }
let a_ancestors = ancestors_of(self, scope_a);
let b_ancestors = ancestors_of(self, scope_b);
let mut a_buf: [CodeExtent; 32] = [ROOT_CODE_EXTENT; 32];
let mut a_vec: Vec<CodeExtent> = vec![];
let mut b_buf: [CodeExtent; 32] = [ROOT_CODE_EXTENT; 32];
let mut b_vec: Vec<CodeExtent> = vec![];
let scope_map : &[CodeExtent] = &self.scope_map.borrow();
let a_ancestors = ancestors_of(scope_map,
scope_a, &mut a_buf, &mut a_vec);
let b_ancestors = ancestors_of(scope_map,
scope_b, &mut b_buf, &mut b_vec);
let mut a_index = a_ancestors.len() - 1;
let mut b_index = b_ancestors.len() - 1;
@ -564,11 +542,11 @@ pub fn nearest_common_ancestor(&self,
// nesting. The reasoning behind this is subtle. See the
// "Modeling closures" section of the README in
// middle::infer::region_inference for more details.
let a_root_scope = a_ancestors[a_index];
let b_root_scope = a_ancestors[a_index];
let a_root_scope = self.code_extent_data(a_ancestors[a_index]);
let b_root_scope = self.code_extent_data(a_ancestors[a_index]);
return match (a_root_scope, b_root_scope) {
(CodeExtent::DestructionScope(a_root_id),
CodeExtent::DestructionScope(b_root_id)) => {
(CodeExtentData::DestructionScope(a_root_id),
CodeExtentData::DestructionScope(b_root_id)) => {
if self.fn_is_enclosed_by(a_root_id, b_root_id) {
// `a` is enclosed by `b`, hence `b` is the ancestor of everything in `a`
scope_b
@ -599,47 +577,52 @@ pub fn nearest_common_ancestor(&self,
}
}
fn ancestors_of(this: &RegionMaps, scope: CodeExtent) -> Vec<CodeExtent> {
fn ancestors_of<'a>(scope_map: &[CodeExtent],
scope: CodeExtent,
buf: &'a mut [CodeExtent; 32],
vec: &'a mut Vec<CodeExtent>) -> &'a [CodeExtent] {
// debug!("ancestors_of(scope={:?})", scope);
let mut result = vec!(scope);
let mut scope = scope;
loop {
match this.scope_map.borrow().get(&scope) {
None => return result,
Some(&superscope) => {
result.push(superscope);
scope = superscope;
}
let mut i = 0;
while i < 32 {
buf[i] = scope;
let superscope = scope_map[scope.0 as usize];
if superscope == ROOT_CODE_EXTENT {
return &buf[..i+1];
} else {
scope = superscope;
}
i += 1;
}
*vec = Vec::with_capacity(64);
vec.extend((*buf).into_iter());
loop {
vec.push(scope);
let superscope = scope_map[scope.0 as usize];
if superscope == ROOT_CODE_EXTENT {
return &*vec;
} else {
scope = superscope;
}
// debug!("ancestors_of_loop(scope={:?})", scope);
}
}
}
}
/// Records the current parent (if any) as the parent of `child_scope`.
fn record_superlifetime(visitor: &mut RegionResolutionVisitor,
child_scope: CodeExtent,
_sp: Span) {
match visitor.cx.parent.to_code_extent() {
Some(parent_scope) =>
visitor.region_maps.record_encl_scope(child_scope, parent_scope),
None => {}
}
}
/// Records the lifetime of a local variable as `cx.var_parent`
fn record_var_lifetime(visitor: &mut RegionResolutionVisitor,
var_id: ast::NodeId,
_sp: Span) {
match visitor.cx.var_parent.to_code_extent() {
Some(parent_scope) =>
visitor.region_maps.record_var_scope(var_id, parent_scope),
None => {
match visitor.cx.var_parent {
ROOT_CODE_EXTENT => {
// this can happen in extern fn declarations like
//
// extern fn isalnum(c: c_int) -> c_int
}
parent_scope =>
visitor.region_maps.record_var_scope(var_id, parent_scope),
}
}
@ -647,21 +630,7 @@ fn resolve_block(visitor: &mut RegionResolutionVisitor, blk: &ast::Block) {
debug!("resolve_block(blk.id={:?})", blk.id);
let prev_cx = visitor.cx;
let blk_scope = CodeExtent::Misc(blk.id);
// If block was previously marked as a terminating scope during
// the recursive visit of its parent node in the AST, then we need
// to account for the destruction scope representing the extent of
// the destructors that run immediately after the the block itself
// completes.
if visitor.region_maps.terminating_scopes.borrow().contains(&blk_scope) {
let dtor_scope = CodeExtent::DestructionScope(blk.id);
record_superlifetime(visitor, dtor_scope, blk.span);
visitor.region_maps.record_encl_scope(blk_scope, dtor_scope);
} else {
record_superlifetime(visitor, blk_scope, blk.span);
}
let block_extent = visitor.new_node_extent_with_dtor(blk.id);
// We treat the tail expression in the block (if any) somewhat
// differently from the statements. The issue has to do with
@ -690,35 +659,34 @@ fn resolve_block(visitor: &mut RegionResolutionVisitor, blk: &ast::Block) {
visitor.cx = Context {
root_id: prev_cx.root_id,
var_parent: InnermostDeclaringBlock::Block(blk.id),
parent: InnermostEnclosingExpr::Some(blk.id),
var_parent: block_extent,
parent: block_extent,
};
{
// This block should be kept approximately in sync with
// `visit::walk_block`. (We manually walk the block, rather
// than call `walk_block`, in order to maintain precise
// `InnermostDeclaringBlock` information.)
// index information.)
for (i, statement) in blk.stmts.iter().enumerate() {
if let ast::StmtDecl(_, stmt_id) = statement.node {
if let ast::StmtDecl(..) = statement.node {
// Each StmtDecl introduces a subscope for bindings
// introduced by the declaration; this subscope covers
// a suffix of the block . Each subscope in a block
// has the previous subscope in the block as a parent,
// except for the first such subscope, which has the
// block itself as a parent.
let declaring = DeclaringStatementContext {
stmt_id: stmt_id,
block_id: blk.id,
stmt_index: i as u32,
};
record_superlifetime(
visitor, declaring.to_code_extent(), statement.span);
let stmt_extent = visitor.new_code_extent(
CodeExtentData::Remainder(BlockRemainder {
block: blk.id,
first_statement_index: i as u32
})
);
visitor.cx = Context {
root_id: prev_cx.root_id,
var_parent: InnermostDeclaringBlock::Statement(declaring),
parent: InnermostEnclosingExpr::Statement(declaring),
var_parent: stmt_extent,
parent: stmt_extent,
};
}
visitor.visit_stmt(&**statement)
@ -730,22 +698,17 @@ fn resolve_block(visitor: &mut RegionResolutionVisitor, blk: &ast::Block) {
}
fn resolve_arm(visitor: &mut RegionResolutionVisitor, arm: &ast::Arm) {
let arm_body_scope = CodeExtent::from_node_id(arm.body.id);
visitor.region_maps.mark_as_terminating_scope(arm_body_scope);
visitor.terminating_scopes.insert(arm.body.id);
match arm.guard {
Some(ref expr) => {
let guard_scope = CodeExtent::from_node_id(expr.id);
visitor.region_maps.mark_as_terminating_scope(guard_scope);
}
None => { }
if let Some(ref expr) = arm.guard {
visitor.terminating_scopes.insert(expr.id);
}
visit::walk_arm(visitor, arm);
}
fn resolve_pat(visitor: &mut RegionResolutionVisitor, pat: &ast::Pat) {
record_superlifetime(visitor, CodeExtent::from_node_id(pat.id), pat.span);
visitor.new_node_extent(pat.id);
// If this is a binding (or maybe a binding, I'm too lazy to check
// the def map) then record the lifetime of that binding.
@ -763,20 +726,16 @@ fn resolve_stmt(visitor: &mut RegionResolutionVisitor, stmt: &ast::Stmt) {
let stmt_id = stmt_id(stmt);
debug!("resolve_stmt(stmt.id={:?})", stmt_id);
let stmt_scope = CodeExtent::from_node_id(stmt_id);
// Every statement will clean up the temporaries created during
// execution of that statement. Therefore each statement has an
// associated destruction scope that represents the extent of the
// statement plus its destructors, and thus the extent for which
// regions referenced by the destructors need to survive.
visitor.region_maps.mark_as_terminating_scope(stmt_scope);
let dtor_scope = CodeExtent::DestructionScope(stmt_id);
visitor.region_maps.record_encl_scope(stmt_scope, dtor_scope);
record_superlifetime(visitor, dtor_scope, stmt.span);
visitor.terminating_scopes.insert(stmt_id);
let stmt_extent = visitor.new_node_extent_with_dtor(stmt_id);
let prev_parent = visitor.cx.parent;
visitor.cx.parent = InnermostEnclosingExpr::Some(stmt_id);
visitor.cx.parent = stmt_extent;
visit::walk_stmt(visitor, stmt);
visitor.cx.parent = prev_parent;
}
@ -784,32 +743,14 @@ fn resolve_stmt(visitor: &mut RegionResolutionVisitor, stmt: &ast::Stmt) {
fn resolve_expr(visitor: &mut RegionResolutionVisitor, expr: &ast::Expr) {
debug!("resolve_expr(expr.id={:?})", expr.id);
let expr_scope = CodeExtent::Misc(expr.id);
// If expr was previously marked as a terminating scope during the
// recursive visit of its parent node in the AST, then we need to
// account for the destruction scope representing the extent of
// the destructors that run immediately after the the expression
// itself completes.
if visitor.region_maps.terminating_scopes.borrow().contains(&expr_scope) {
let dtor_scope = CodeExtent::DestructionScope(expr.id);
record_superlifetime(visitor, dtor_scope, expr.span);
visitor.region_maps.record_encl_scope(expr_scope, dtor_scope);
} else {
record_superlifetime(visitor, expr_scope, expr.span);
}
let expr_extent = visitor.new_node_extent_with_dtor(expr.id);
let prev_cx = visitor.cx;
visitor.cx.parent = InnermostEnclosingExpr::Some(expr.id);
visitor.cx.parent = expr_extent;
{
let region_maps = &mut visitor.region_maps;
let terminating = |e: &P<ast::Expr>| {
let scope = CodeExtent::from_node_id(e.id);
region_maps.mark_as_terminating_scope(scope)
};
let terminating_block = |b: &P<ast::Block>| {
let scope = CodeExtent::from_node_id(b.id);
region_maps.mark_as_terminating_scope(scope)
let terminating_scopes = &mut visitor.terminating_scopes;
let mut terminating = |id: ast::NodeId| {
terminating_scopes.insert(id);
};
match expr.node {
// Conditional or repeating scopes are always terminating
@ -820,30 +761,30 @@ fn resolve_expr(visitor: &mut RegionResolutionVisitor, expr: &ast::Expr) {
ast::ExprBinary(codemap::Spanned { node: ast::BiOr, .. }, _, ref r) => {
// For shortcircuiting operators, mark the RHS as a terminating
// scope since it only executes conditionally.
terminating(r);
terminating(r.id);
}
ast::ExprIf(_, ref then, Some(ref otherwise)) => {
terminating_block(then);
terminating(otherwise);
terminating(then.id);
terminating(otherwise.id);
}
ast::ExprIf(ref expr, ref then, None) => {
terminating(expr);
terminating_block(then);
terminating(expr.id);
terminating(then.id);
}
ast::ExprLoop(ref body, _) => {
terminating_block(body);
terminating(body.id);
}
ast::ExprWhile(ref expr, ref body, _) => {
terminating(expr);
terminating_block(body);
terminating(expr.id);
terminating(body.id);
}
ast::ExprMatch(..) => {
visitor.cx.var_parent = InnermostDeclaringBlock::Match(expr.id);
visitor.cx.var_parent = expr_extent;
}
ast::ExprAssignOp(..) | ast::ExprIndex(..) |
@ -883,10 +824,8 @@ fn resolve_local(visitor: &mut RegionResolutionVisitor, local: &ast::Local) {
// For convenience in trans, associate with the local-id the var
// scope that will be used for any bindings declared in this
// pattern.
let blk_scope = visitor.cx.var_parent.to_code_extent()
.unwrap_or_else(|| visitor.sess.span_bug(
local.span, "local without enclosing block"));
let blk_scope = visitor.cx.var_parent;
assert!(blk_scope != ROOT_CODE_EXTENT); // locals must be within a block
visitor.region_maps.record_var_scope(local.id, blk_scope);
// As an exception to the normal rules governing temporary
@ -1109,13 +1048,16 @@ fn record_rvalue_scope<'a>(visitor: &mut RegionResolutionVisitor,
fn resolve_item(visitor: &mut RegionResolutionVisitor, item: &ast::Item) {
// Items create a new outer block scope as far as we're concerned.
let prev_cx = visitor.cx;
let prev_ts = mem::replace(&mut visitor.terminating_scopes, NodeSet());
visitor.cx = Context {
root_id: None,
var_parent: InnermostDeclaringBlock::None,
parent: InnermostEnclosingExpr::None
var_parent: ROOT_CODE_EXTENT,
parent: ROOT_CODE_EXTENT
};
visit::walk_item(visitor, item);
visitor.create_item_scope_if_needed(item.id);
visitor.cx = prev_cx;
visitor.terminating_scopes = prev_ts;
}
fn resolve_fn(visitor: &mut RegionResolutionVisitor,
@ -1133,51 +1075,76 @@ fn resolve_fn(visitor: &mut RegionResolutionVisitor,
body.id,
visitor.cx.parent);
// This scope covers the function body, which includes the
// bindings introduced by let statements as well as temporaries
// created by the fn's tail expression (if any). It does *not*
// include the fn parameters (see below).
let body_scope = CodeExtent::from_node_id(body.id);
visitor.region_maps.mark_as_terminating_scope(body_scope);
let dtor_scope = CodeExtent::DestructionScope(body.id);
visitor.region_maps.record_encl_scope(body_scope, dtor_scope);
let fn_decl_scope = CodeExtent::ParameterScope { fn_id: id, body_id: body.id };
visitor.region_maps.record_encl_scope(dtor_scope, fn_decl_scope);
record_superlifetime(visitor, fn_decl_scope, body.span);
let fn_decl_scope = visitor.new_code_extent(
CodeExtentData::ParameterScope { fn_id: id, body_id: body.id });
if let Some(root_id) = visitor.cx.root_id {
visitor.region_maps.record_fn_parent(body.id, root_id);
}
let outer_cx = visitor.cx;
let outer_ts = mem::replace(&mut visitor.terminating_scopes, NodeSet());
visitor.terminating_scopes.insert(body.id);
// The arguments and `self` are parented to the fn.
visitor.cx = Context {
root_id: Some(body.id),
parent: InnermostEnclosingExpr::None,
var_parent: InnermostDeclaringBlock::FnDecl {
fn_id: id, body_id: body.id
},
parent: ROOT_CODE_EXTENT,
var_parent: fn_decl_scope,
};
visit::walk_fn_decl(visitor, decl);
// The body of the every fn is a root scope.
visitor.cx = Context {
root_id: Some(body.id),
parent: InnermostEnclosingExpr::None,
var_parent: InnermostDeclaringBlock::None
parent: fn_decl_scope,
var_parent: fn_decl_scope
};
visitor.visit_block(body);
// Restore context we had at the start.
visitor.cx = outer_cx;
visitor.terminating_scopes = outer_ts;
}
impl<'a> RegionResolutionVisitor<'a> {
/// Records the current parent (if any) as the parent of `child_scope`.
fn new_code_extent(&mut self, child_scope: CodeExtentData) -> CodeExtent {
self.region_maps.intern_code_extent(child_scope, self.cx.parent)
}
fn new_node_extent(&mut self, child_scope: ast::NodeId) -> CodeExtent {
self.new_code_extent(CodeExtentData::Misc(child_scope))
}
fn new_node_extent_with_dtor(&mut self, id: ast::NodeId) -> CodeExtent {
// If node was previously marked as a terminating scope during the
// recursive visit of its parent node in the AST, then we need to
// account for the destruction scope representing the extent of
// the destructors that run immediately after it completes.
if self.terminating_scopes.contains(&id) {
let ds = self.new_code_extent(
CodeExtentData::DestructionScope(id));
self.region_maps.intern_node(id, ds)
} else {
self.new_node_extent(id)
}
}
fn create_item_scope_if_needed(&mut self, id: ast::NodeId) {
// create a region for the destruction scope - this is needed
// for constructing parameter environments based on the item.
// functions put their destruction scopes *inside* their parameter
// scopes.
let scope = CodeExtentData::DestructionScope(id);
if !self.region_maps.code_extent_interner.borrow().contains_key(&scope) {
self.region_maps.intern_code_extent(scope, ROOT_CODE_EXTENT);
}
}
}
impl<'a, 'v> Visitor<'v> for RegionResolutionVisitor<'a> {
fn visit_block(&mut self, b: &Block) {
resolve_block(self, b);
}
@ -1186,6 +1153,16 @@ fn visit_item(&mut self, i: &Item) {
resolve_item(self, i);
}
fn visit_impl_item(&mut self, ii: &ast::ImplItem) {
visit::walk_impl_item(self, ii);
self.create_item_scope_if_needed(ii.id);
}
fn visit_trait_item(&mut self, ti: &ast::TraitItem) {
visit::walk_trait_item(self, ti);
self.create_item_scope_if_needed(ti.id);
}
fn visit_fn(&mut self, fk: FnKind<'v>, fd: &'v FnDecl,
b: &'v Block, s: Span, n: NodeId) {
resolve_fn(self, fk, fd, b, s, n);
@ -1209,21 +1186,29 @@ fn visit_local(&mut self, l: &Local) {
pub fn resolve_crate(sess: &Session, krate: &ast::Crate) -> RegionMaps {
let maps = RegionMaps {
scope_map: RefCell::new(FnvHashMap()),
code_extents: RefCell::new(vec![]),
code_extent_interner: RefCell::new(FnvHashMap()),
scope_map: RefCell::new(vec![]),
var_map: RefCell::new(NodeMap()),
rvalue_scopes: RefCell::new(NodeMap()),
terminating_scopes: RefCell::new(FnvHashSet()),
fn_tree: RefCell::new(NodeMap()),
};
let root_extent = maps.bogus_code_extent(
CodeExtentData::DestructionScope(ast::DUMMY_NODE_ID));
assert_eq!(root_extent, ROOT_CODE_EXTENT);
let bogus_extent = maps.bogus_code_extent(
CodeExtentData::Misc(ast::DUMMY_NODE_ID));
assert_eq!(bogus_extent, DUMMY_CODE_EXTENT);
{
let mut visitor = RegionResolutionVisitor {
sess: sess,
region_maps: &maps,
cx: Context {
root_id: None,
parent: InnermostEnclosingExpr::None,
var_parent: InnermostDeclaringBlock::None,
}
parent: ROOT_CODE_EXTENT,
var_parent: ROOT_CODE_EXTENT
},
terminating_scopes: NodeSet()
};
visit::walk_crate(&mut visitor, krate);
}
@ -1238,9 +1223,10 @@ pub fn resolve_inlined_item(sess: &Session,
region_maps: region_maps,
cx: Context {
root_id: None,
parent: InnermostEnclosingExpr::None,
var_parent: InnermostDeclaringBlock::None
}
parent: ROOT_CODE_EXTENT,
var_parent: ROOT_CODE_EXTENT
},
terminating_scopes: NodeSet()
};
item.visit(&mut visitor);
}

View File

@ -1503,7 +1503,7 @@ pub struct DebruijnIndex {
}
/// Representation of regions:
#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Copy)]
#[derive(Clone, PartialEq, Eq, Hash, Copy)]
pub enum Region {
// Region bound in a type or fn declaration which will be
// substituted 'early' -- that is, at the same time when type
@ -1609,7 +1609,7 @@ pub enum BorrowKind {
/// Information describing the capture of an upvar. This is computed
/// during `typeck`, specifically by `regionck`.
#[derive(PartialEq, Clone, RustcEncodable, RustcDecodable, Debug, Copy)]
#[derive(PartialEq, Clone, Debug, Copy)]
pub enum UpvarCapture {
/// Upvar is captured by value. This is always true when the
/// closure is labeled `move`, but can also be true in other cases
@ -1620,7 +1620,7 @@ pub enum UpvarCapture {
ByRef(UpvarBorrow),
}
#[derive(PartialEq, Clone, RustcEncodable, RustcDecodable, Copy)]
#[derive(PartialEq, Clone, Copy)]
pub struct UpvarBorrow {
/// The kind of borrow: by-ref upvars have access to shared
/// immutable borrows, which are not part of the normal language
@ -2271,7 +2271,7 @@ pub struct TypeParameterDef<'tcx> {
pub object_lifetime_default: ObjectLifetimeDefault,
}
#[derive(RustcEncodable, RustcDecodable, Clone, Debug)]
#[derive(Clone, Debug)]
pub struct RegionParameterDef {
pub name: ast::Name,
pub def_id: DefId,
@ -6673,7 +6673,8 @@ pub fn construct_parameter_environment<'a>(&'a self,
let unnormalized_env = ty::ParameterEnvironment {
tcx: self,
free_substs: free_substs,
implicit_region_bound: ty::ReScope(free_id_outlive.to_code_extent()),
implicit_region_bound: ty::ReScope(
free_id_outlive.to_code_extent(&self.region_maps)),
caller_bounds: predicates,
selection_cache: traits::SelectionCache::new(),
free_id: free_id,

View File

@ -144,7 +144,7 @@ fn borrow(&mut self,
None => { }
}
self.check_for_conflicting_loans(region::CodeExtent::from_node_id(borrow_id));
self.check_for_conflicting_loans(borrow_id);
}
fn mutate(&mut self,
@ -230,16 +230,16 @@ fn compatible_borrow_kinds(borrow_kind1: ty::BorrowKind,
impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> {
pub fn tcx(&self) -> &'a ty::ctxt<'tcx> { self.bccx.tcx }
pub fn each_issued_loan<F>(&self, scope: region::CodeExtent, mut op: F) -> bool where
pub fn each_issued_loan<F>(&self, node: ast::NodeId, mut op: F) -> bool where
F: FnMut(&Loan<'tcx>) -> bool,
{
//! Iterates over each loan that has been issued
//! on entrance to `scope`, regardless of whether it is
//! on entrance to `node`, regardless of whether it is
//! actually *in scope* at that point. Sometimes loans
//! are issued for future scopes and thus they may have been
//! *issued* but not yet be in effect.
self.dfcx_loans.each_bit_on_entry(scope.node_id(), |loan_index| {
self.dfcx_loans.each_bit_on_entry(node, |loan_index| {
let loan = &self.all_loans[loan_index];
op(loan)
})
@ -252,7 +252,7 @@ pub fn each_in_scope_loan<F>(&self, scope: region::CodeExtent, mut op: F) -> boo
//! currently in scope.
let tcx = self.tcx();
self.each_issued_loan(scope, |loan| {
self.each_issued_loan(scope.node_id(&tcx.region_maps), |loan| {
if tcx.region_maps.is_subscope_of(scope, loan.kill_scope) {
op(loan)
} else {
@ -336,33 +336,33 @@ fn each_in_scope_loan_affecting_path<F>(&self,
return true;
}
pub fn loans_generated_by(&self, scope: region::CodeExtent) -> Vec<usize> {
pub fn loans_generated_by(&self, node: ast::NodeId) -> Vec<usize> {
//! Returns a vector of the loans that are generated as
//! we enter `scope`.
//! we enter `node`.
let mut result = Vec::new();
self.dfcx_loans.each_gen_bit(scope.node_id(), |loan_index| {
self.dfcx_loans.each_gen_bit(node, |loan_index| {
result.push(loan_index);
true
});
return result;
}
pub fn check_for_conflicting_loans(&self, scope: region::CodeExtent) {
pub fn check_for_conflicting_loans(&self, node: ast::NodeId) {
//! Checks to see whether any of the loans that are issued
//! on entrance to `scope` conflict with loans that have already been
//! issued when we enter `scope` (for example, we do not
//! on entrance to `node` conflict with loans that have already been
//! issued when we enter `node` (for example, we do not
//! permit two `&mut` borrows of the same variable).
//!
//! (Note that some loans can be *issued* without necessarily
//! taking effect yet.)
debug!("check_for_conflicting_loans(scope={:?})", scope);
debug!("check_for_conflicting_loans(node={:?})", node);
let new_loan_indices = self.loans_generated_by(scope);
let new_loan_indices = self.loans_generated_by(node);
debug!("new_loan_indices = {:?}", new_loan_indices);
self.each_issued_loan(scope, |issued_loan| {
self.each_issued_loan(node, |issued_loan| {
for &new_loan_index in &new_loan_indices {
let new_loan = &self.all_loans[new_loan_index];
self.report_error_if_loans_conflict(issued_loan, new_loan);
@ -557,7 +557,8 @@ pub fn report_error_if_loan_conflicts_with_restriction(&self,
old_loan.span,
&format!("{}; {}", borrow_summary, rule_summary));
let old_loan_span = self.tcx().map.span(old_loan.kill_scope.node_id());
let old_loan_span = self.tcx().map.span(
old_loan.kill_scope.node_id(&self.tcx().region_maps));
self.bccx.span_end_note(old_loan_span,
"previous borrow ends here");
@ -673,7 +674,7 @@ pub fn analyze_restrictions_on_use(&self,
let mut ret = UseOk;
self.each_in_scope_loan_affecting_path(
region::CodeExtent::from_node_id(expr_id), use_path, |loan| {
self.tcx().region_maps.node_extent(expr_id), use_path, |loan| {
if !compatible_borrow_kinds(loan.kind, borrow_kind) {
ret = UseWhileBorrowed(loan.loan_path.clone(), loan.span);
false
@ -787,7 +788,7 @@ fn check_assignment(&self,
// Check that we don't invalidate any outstanding loans
if let Some(loan_path) = opt_loan_path(&assignee_cmt) {
let scope = region::CodeExtent::from_node_id(assignment_id);
let scope = self.tcx().region_maps.node_extent(assignment_id);
self.each_in_scope_loan_affecting_path(scope, &*loan_path, |loan| {
self.report_illegal_mutation(assignment_span, &*loan_path, loan);
false

View File

@ -44,7 +44,7 @@ pub fn gather_loans_in_fn<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
let mut glcx = GatherLoanCtxt {
bccx: bccx,
all_loans: Vec::new(),
item_ub: region::CodeExtent::from_node_id(body.id),
item_ub: bccx.tcx.region_maps.node_extent(body.id),
move_data: MoveData::new(),
move_error_collector: move_error::MoveErrorCollector::new(),
};
@ -360,7 +360,9 @@ fn guarantee_valid(&mut self,
let loan_scope = match loan_region {
ty::ReScope(scope) => scope,
ty::ReFree(ref fr) => fr.scope.to_code_extent(),
ty::ReFree(ref fr) => {
fr.scope.to_code_extent(&self.tcx().region_maps)
}
ty::ReStatic => {
// If we get here, an error must have been
@ -387,7 +389,7 @@ fn guarantee_valid(&mut self,
};
debug!("loan_scope = {:?}", loan_scope);
let borrow_scope = region::CodeExtent::from_node_id(borrow_id);
let borrow_scope = self.tcx().region_maps.node_extent(borrow_id);
let gen_scope = self.compute_gen_scope(borrow_scope, loan_scope);
debug!("gen_scope = {:?}", gen_scope);

View File

@ -191,6 +191,7 @@ fn build_borrowck_dataflow_data<'a, 'tcx>(this: &mut BorrowckCtxt<'a, 'tcx>,
-> AnalysisData<'a, 'tcx>
{
// Check the body of fn items.
let tcx = this.tcx;
let id_range = ast_util::compute_id_range_for_fn_body(fk, decl, body, sp, id);
let (all_loans, move_data) =
gather_loans::gather_loans_in_fn(this, id, decl, body);
@ -204,8 +205,9 @@ fn build_borrowck_dataflow_data<'a, 'tcx>(this: &mut BorrowckCtxt<'a, 'tcx>,
id_range,
all_loans.len());
for (loan_idx, loan) in all_loans.iter().enumerate() {
loan_dfcx.add_gen(loan.gen_scope.node_id(), loan_idx);
loan_dfcx.add_kill(KillFrom::ScopeEnd, loan.kill_scope.node_id(), loan_idx);
loan_dfcx.add_gen(loan.gen_scope.node_id(&tcx.region_maps), loan_idx);
loan_dfcx.add_kill(KillFrom::ScopeEnd,
loan.kill_scope.node_id(&tcx.region_maps), loan_idx);
}
loan_dfcx.add_kills_from_flow_exits(cfg);
loan_dfcx.propagate(cfg, body);
@ -414,7 +416,7 @@ pub fn kill_scope(&self, tcx: &ty::ctxt<'tcx>) -> region::CodeExtent {
LpVar(local_id) => tcx.region_maps.var_scope(local_id),
LpUpvar(upvar_id) => {
let block_id = closure_to_block(upvar_id.closure_expr_id, tcx);
region::CodeExtent::from_node_id(block_id)
tcx.region_maps.node_extent(block_id)
}
LpDowncast(ref base, _) |
LpExtend(ref base, _, _) => base.kill_scope(tcx),
@ -1135,7 +1137,7 @@ pub fn cmt_to_path_or_string(&self, cmt: &mc::cmt<'tcx>) -> String {
fn statement_scope_span(tcx: &ty::ctxt, region: ty::Region) -> Option<Span> {
match region {
ty::ReScope(scope) => {
match tcx.map.find(scope.node_id()) {
match tcx.map.find(scope.node_id(&tcx.region_maps)) {
Some(ast_map::NodeStmt(stmt)) => Some(stmt.span),
_ => None
}

View File

@ -494,7 +494,7 @@ fn add_gen_kills(&self,
LpVar(..) | LpUpvar(..) | LpDowncast(..) => {
let kill_scope = path.loan_path.kill_scope(tcx);
let path = *self.path_map.borrow().get(&path.loan_path).unwrap();
self.kill_moves(path, kill_scope.node_id(),
self.kill_moves(path, kill_scope.node_id(&tcx.region_maps),
KillFrom::ScopeEnd, dfcx_moves);
}
LpExtend(..) => {}
@ -509,7 +509,7 @@ fn add_gen_kills(&self,
LpVar(..) | LpUpvar(..) | LpDowncast(..) => {
let kill_scope = lp.kill_scope(tcx);
dfcx_assign.add_kill(KillFrom::ScopeEnd,
kill_scope.node_id(),
kill_scope.node_id(&tcx.region_maps),
assignment_index);
}
LpExtend(..) => {

View File

@ -18,6 +18,7 @@
use rustc_typeck::middle::lang_items;
use rustc_typeck::middle::free_region::FreeRegionMap;
use rustc_typeck::middle::region::{self, CodeExtent, DestructionScopeData};
use rustc_typeck::middle::region::CodeExtentData;
use rustc_typeck::middle::resolve_lifetime;
use rustc_typeck::middle::stability;
use rustc_typeck::middle::subst;
@ -153,24 +154,25 @@ pub fn tcx(&self) -> &ty::ctxt<'tcx> {
self.infcx.tcx
}
pub fn create_region_hierarchy(&self, rh: &RH) {
pub fn create_region_hierarchy(&self, rh: &RH, parent: CodeExtent) {
let me = self.infcx.tcx.region_maps.intern_node(rh.id, parent);
for child_rh in rh.sub {
self.create_region_hierarchy(child_rh);
self.infcx.tcx.region_maps.record_encl_scope(
CodeExtent::from_node_id(child_rh.id),
CodeExtent::from_node_id(rh.id));
self.create_region_hierarchy(child_rh, me);
}
}
pub fn create_simple_region_hierarchy(&self) {
// creates a region hierarchy where 1 is root, 10 and 11 are
// children of 1, etc
let dscope = self.infcx.tcx.region_maps.intern_code_extent(
CodeExtentData::DestructionScope(1), region::ROOT_CODE_EXTENT);
self.create_region_hierarchy(
&RH {id: 1,
sub: &[RH {id: 10,
sub: &[]},
RH {id: 11,
sub: &[]}]});
sub: &[]}]},
dscope);
}
#[allow(dead_code)] // this seems like it could be useful, even if we don't use it now
@ -321,7 +323,7 @@ pub fn t_rptr_late_bound_with_debruijn(&self,
}
pub fn t_rptr_scope(&self, id: ast::NodeId) -> Ty<'tcx> {
let r = ty::ReScope(CodeExtent::from_node_id(id));
let r = ty::ReScope(self.tcx().region_maps.node_extent(id));
self.infcx.tcx.mk_imm_ref(self.infcx.tcx.mk_region(r),
self.tcx().types.isize)
}
@ -462,7 +464,8 @@ fn sub_free_bound_false() {
//! does NOT hold.
test_env(EMPTY_SOURCE_STR, errors(&[]), |env| {
let t_rptr_free1 = env.t_rptr_free(0, 1);
env.create_simple_region_hierarchy();
let t_rptr_free1 = env.t_rptr_free(1, 1);
let t_rptr_bound1 = env.t_rptr_late_bound(1);
env.check_not_sub(env.t_fn(&[t_rptr_free1], env.tcx().types.isize),
env.t_fn(&[t_rptr_bound1], env.tcx().types.isize));
@ -478,8 +481,9 @@ fn sub_bound_free_true() {
//! DOES hold.
test_env(EMPTY_SOURCE_STR, errors(&[]), |env| {
env.create_simple_region_hierarchy();
let t_rptr_bound1 = env.t_rptr_late_bound(1);
let t_rptr_free1 = env.t_rptr_free(0, 1);
let t_rptr_free1 = env.t_rptr_free(1, 1);
env.check_sub(env.t_fn(&[t_rptr_bound1], env.tcx().types.isize),
env.t_fn(&[t_rptr_free1], env.tcx().types.isize));
})
@ -512,9 +516,10 @@ fn lub_free_bound_infer() {
//! anyhow.
test_env(EMPTY_SOURCE_STR, errors(&[]), |env| {
env.create_simple_region_hierarchy();
let t_infer1 = env.infcx.next_ty_var();
let t_rptr_bound1 = env.t_rptr_late_bound(1);
let t_rptr_free1 = env.t_rptr_free(0, 1);
let t_rptr_free1 = env.t_rptr_free(1, 1);
env.check_lub(env.t_fn(&[t_infer1], env.tcx().types.isize),
env.t_fn(&[t_rptr_bound1], env.tcx().types.isize),
env.t_fn(&[t_rptr_free1], env.tcx().types.isize));
@ -535,8 +540,9 @@ fn lub_bound_bound() {
#[test]
fn lub_bound_free() {
test_env(EMPTY_SOURCE_STR, errors(&[]), |env| {
env.create_simple_region_hierarchy();
let t_rptr_bound1 = env.t_rptr_late_bound(1);
let t_rptr_free1 = env.t_rptr_free(0, 1);
let t_rptr_free1 = env.t_rptr_free(1, 1);
env.check_lub(env.t_fn(&[t_rptr_bound1], env.tcx().types.isize),
env.t_fn(&[t_rptr_free1], env.tcx().types.isize),
env.t_fn(&[t_rptr_free1], env.tcx().types.isize));
@ -568,8 +574,9 @@ fn lub_bound_bound_inverse_order() {
#[test]
fn lub_free_free() {
test_env(EMPTY_SOURCE_STR, errors(&[]), |env| {
let t_rptr_free1 = env.t_rptr_free(0, 1);
let t_rptr_free2 = env.t_rptr_free(0, 2);
env.create_simple_region_hierarchy();
let t_rptr_free1 = env.t_rptr_free(1, 1);
let t_rptr_free2 = env.t_rptr_free(1, 2);
let t_rptr_static = env.t_rptr_static();
env.check_lub(env.t_fn(&[t_rptr_free1], env.tcx().types.isize),
env.t_fn(&[t_rptr_free2], env.tcx().types.isize),
@ -594,9 +601,10 @@ fn lub_returning_scope() {
#[test]
fn glb_free_free_with_common_scope() {
test_env(EMPTY_SOURCE_STR, errors(&[]), |env| {
let t_rptr_free1 = env.t_rptr_free(0, 1);
let t_rptr_free2 = env.t_rptr_free(0, 2);
let t_rptr_scope = env.t_rptr_scope(0);
env.create_simple_region_hierarchy();
let t_rptr_free1 = env.t_rptr_free(1, 1);
let t_rptr_free2 = env.t_rptr_free(1, 2);
let t_rptr_scope = env.t_rptr_scope(1);
env.check_glb(env.t_fn(&[t_rptr_free1], env.tcx().types.isize),
env.t_fn(&[t_rptr_free2], env.tcx().types.isize),
env.t_fn(&[t_rptr_scope], env.tcx().types.isize));
@ -617,8 +625,9 @@ fn glb_bound_bound() {
#[test]
fn glb_bound_free() {
test_env(EMPTY_SOURCE_STR, errors(&[]), |env| {
env.create_simple_region_hierarchy();
let t_rptr_bound1 = env.t_rptr_late_bound(1);
let t_rptr_free1 = env.t_rptr_free(0, 1);
let t_rptr_free1 = env.t_rptr_free(1, 1);
env.check_glb(env.t_fn(&[t_rptr_bound1], env.tcx().types.isize),
env.t_fn(&[t_rptr_free1], env.tcx().types.isize),
env.t_fn(&[t_rptr_bound1], env.tcx().types.isize));
@ -738,10 +747,11 @@ fn escaping() {
test_env(EMPTY_SOURCE_STR, errors(&[]), |env| {
// Situation:
// Theta = [A -> &'a foo]
env.create_simple_region_hierarchy();
assert!(!env.t_nil().has_escaping_regions());
let t_rptr_free1 = env.t_rptr_free(0, 1);
let t_rptr_free1 = env.t_rptr_free(1, 1);
assert!(!t_rptr_free1.has_escaping_regions());
let t_rptr_bound1 = env.t_rptr_late_bound_with_debruijn(1, ty::DebruijnIndex::new(1));

View File

@ -253,18 +253,16 @@ fn push_ast_cleanup_scope(&self, debug_loc: NodeIdAndSpan) {
// now we just say that if there is already an AST scope on the stack,
// this new AST scope had better be its immediate child.
let top_scope = self.top_ast_scope();
let region_maps = &self.ccx.tcx().region_maps;
if top_scope.is_some() {
assert!((self.ccx
.tcx()
.region_maps
.opt_encl_scope(region::CodeExtent::from_node_id(debug_loc.id))
.map(|s|s.node_id()) == top_scope)
assert!((region_maps
.opt_encl_scope(region_maps.node_extent(debug_loc.id))
.map(|s|s.node_id(region_maps)) == top_scope)
||
(self.ccx
.tcx()
.region_maps
.opt_encl_scope(region::CodeExtent::DestructionScope(debug_loc.id))
.map(|s|s.node_id()) == top_scope));
(region_maps
.opt_encl_scope(region_maps.lookup_code_extent(
region::CodeExtentData::DestructionScope(debug_loc.id)))
.map(|s|s.node_id(region_maps)) == top_scope));
}
self.push_scope(CleanupScope::new(AstScopeKind(debug_loc.id),
@ -1111,7 +1109,7 @@ pub fn temporary_scope(tcx: &ty::ctxt,
-> ScopeId {
match tcx.region_maps.temporary_scope(id) {
Some(scope) => {
let r = AstScope(scope.node_id());
let r = AstScope(scope.node_id(&tcx.region_maps));
debug!("temporary_scope({}) = {:?}", id, r);
r
}
@ -1125,7 +1123,7 @@ pub fn temporary_scope(tcx: &ty::ctxt,
pub fn var_scope(tcx: &ty::ctxt,
id: ast::NodeId)
-> ScopeId {
let r = AstScope(tcx.region_maps.var_scope(id).node_id());
let r = AstScope(tcx.region_maps.var_scope(id).node_id(&tcx.region_maps));
debug!("var_scope({}) = {:?}", id, r);
r
}

View File

@ -294,7 +294,9 @@ fn visit_fn_body(&mut self,
let old_body_id = self.set_body_id(body.id);
self.relate_free_regions(&fn_sig[..], body.id, span);
link_fn_args(self, CodeExtent::from_node_id(body.id), &fn_decl.inputs[..]);
link_fn_args(self,
self.tcx().region_maps.node_extent(body.id),
&fn_decl.inputs[..]);
self.visit_block(body);
self.visit_region_obligations(body.id);
@ -564,17 +566,15 @@ fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) {
// No matter what, the type of each expression must outlive the
// scope of that expression. This also guarantees basic WF.
let expr_ty = rcx.resolve_node_type(expr.id);
// the region corresponding to this expression
let expr_region = ty::ReScope(rcx.tcx().region_maps.node_extent(expr.id));
type_must_outlive(rcx, infer::ExprTypeIsNotInScope(expr_ty, expr.span),
expr_ty, ty::ReScope(CodeExtent::from_node_id(expr.id)));
expr_ty, expr_region);
let method_call = MethodCall::expr(expr.id);
let opt_method_callee = rcx.fcx.inh.tables.borrow().method_map.get(&method_call).cloned();
let has_method_map = opt_method_callee.is_some();
// the region corresponding to this expression
let expr_region = ty::ReScope(CodeExtent::from_node_id(expr.id));
// If we are calling a method (either explicitly or via an
// overloaded operator), check that all of the types provided as
// arguments for its type parameters are well-formed, and all the regions
@ -609,7 +609,7 @@ fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) {
// FIXME(#6268) remove to support nested method calls
type_of_node_must_outlive(
rcx, infer::AutoBorrow(expr.span),
expr.id, ty::ReScope(CodeExtent::from_node_id(expr.id)));
expr.id, expr_region);
}
}
/*
@ -726,7 +726,7 @@ fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) {
type_must_outlive(rcx,
infer::Operand(expr.span),
ty,
ty::ReScope(CodeExtent::from_node_id(expr.id)));
expr_region);
}
visit::walk_expr(rcx, expr);
}
@ -756,7 +756,7 @@ fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) {
};
if let ty::TyRef(r_ptr, _) = base_ty.sty {
mk_subregion_due_to_dereference(
rcx, expr.span, ty::ReScope(CodeExtent::from_node_id(expr.id)), *r_ptr);
rcx, expr.span, expr_region, *r_ptr);
}
visit::walk_expr(rcx, expr);
@ -789,8 +789,7 @@ fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) {
//
// FIXME(#6268) nested method calls requires that this rule change
let ty0 = rcx.resolve_node_type(expr.id);
type_must_outlive(rcx, infer::AddrOf(expr.span),
ty0, ty::ReScope(CodeExtent::from_node_id(expr.id)));
type_must_outlive(rcx, infer::AddrOf(expr.span), ty0, expr_region);
visit::walk_expr(rcx, expr);
}
@ -919,7 +918,7 @@ fn constrain_call<'a, I: Iterator<Item=&'a ast::Expr>>(rcx: &mut Rcx,
// call occurs.
//
// FIXME(#6268) to support nested method calls, should be callee_id
let callee_scope = CodeExtent::from_node_id(call_expr.id);
let callee_scope = rcx.tcx().region_maps.node_extent(call_expr.id);
let callee_region = ty::ReScope(callee_scope);
debug!("callee_region={:?}", callee_region);
@ -966,7 +965,8 @@ fn constrain_autoderefs<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx>,
derefs,
derefd_ty);
let r_deref_expr = ty::ReScope(CodeExtent::from_node_id(deref_expr.id));
let s_deref_expr = rcx.tcx().region_maps.node_extent(deref_expr.id);
let r_deref_expr = ty::ReScope(s_deref_expr);
for i in 0..derefs {
let method_call = MethodCall::autoderef(deref_expr.id, i as u32);
debug!("constrain_autoderefs: method_call={:?} (of {:?} total)", method_call, derefs);
@ -1083,7 +1083,7 @@ fn constrain_index<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx>,
debug!("constrain_index(index_expr=?, indexed_ty={}",
rcx.fcx.infcx().ty_to_string(indexed_ty));
let r_index_expr = ty::ReScope(CodeExtent::from_node_id(index_expr.id));
let r_index_expr = ty::ReScope(rcx.tcx().region_maps.node_extent(index_expr.id));
if let ty::TyRef(r_ptr, mt) = indexed_ty.sty {
match mt.ty.sty {
ty::TySlice(_) | ty::TyStr => {
@ -1234,7 +1234,7 @@ fn link_autoref(rcx: &Rcx,
}
ty::AutoUnsafe(m) => {
let r = ty::ReScope(CodeExtent::from_node_id(expr.id));
let r = ty::ReScope(rcx.tcx().region_maps.node_extent(expr.id));
link_region(rcx, expr.span, &r, ty::BorrowKind::from_mutbl(m), expr_cmt);
}
}