Auto merge of #29188 - nikomatsakis:remove-contraction, r=pnkfelix
This fixes #29048 (though I think adding better transactional support would be a better fix for that issue, but that is more difficult). It also simplifies region inference and changes the model to a pure data flow one, as discussed in [this internals thread](https://internals.rust-lang.org/t/rough-thoughts-on-the-impl-of-region-inference-mir-etc/2800). I am not 100% sure though if this PR is the right thing to do -- or at least maybe not at this moment, so thoughts on that would be appreciated. r? @pnkfelix cc @arielb1
This commit is contained in:
commit
427140f771
@ -61,12 +61,16 @@ impl fmt::Debug for DefId {
|
||||
// Unfortunately, there seems to be no way to attempt to print
|
||||
// a path for a def-id, so I'll just make a best effort for now
|
||||
// and otherwise fallback to just printing the crate/node pair
|
||||
try!(ty::tls::with_opt(|opt_tcx| {
|
||||
if let Some(tcx) = opt_tcx {
|
||||
try!(write!(f, " => {}", tcx.item_path_str(*self)));
|
||||
}
|
||||
Ok(())
|
||||
}));
|
||||
if self.is_local() { // (1)
|
||||
// (1) side-step fact that not all external things have paths at
|
||||
// the moment, such as type parameters
|
||||
try!(ty::tls::with_opt(|opt_tcx| {
|
||||
if let Some(tcx) = opt_tcx {
|
||||
try!(write!(f, " => {}", tcx.item_path_str(*self)));
|
||||
}
|
||||
Ok(())
|
||||
}));
|
||||
}
|
||||
|
||||
write!(f, " }}")
|
||||
}
|
||||
|
@ -65,7 +65,6 @@ use super::ValuePairs;
|
||||
use super::region_inference::RegionResolutionError;
|
||||
use super::region_inference::ConcreteFailure;
|
||||
use super::region_inference::SubSupConflict;
|
||||
use super::region_inference::SupSupConflict;
|
||||
use super::region_inference::GenericBoundFailure;
|
||||
use super::region_inference::GenericKind;
|
||||
use super::region_inference::ProcessedErrors;
|
||||
@ -258,13 +257,6 @@ pub trait ErrorReporting<'tcx> {
|
||||
sup_origin: SubregionOrigin<'tcx>,
|
||||
sup_region: Region);
|
||||
|
||||
fn report_sup_sup_conflict(&self,
|
||||
var_origin: RegionVariableOrigin,
|
||||
origin1: SubregionOrigin<'tcx>,
|
||||
region1: Region,
|
||||
origin2: SubregionOrigin<'tcx>,
|
||||
region2: Region);
|
||||
|
||||
fn report_processed_errors(&self,
|
||||
var_origin: &[RegionVariableOrigin],
|
||||
trace_origin: &[(TypeTrace<'tcx>, TypeError<'tcx>)],
|
||||
@ -313,14 +305,6 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
sup_origin, sup_r);
|
||||
}
|
||||
|
||||
SupSupConflict(var_origin,
|
||||
origin1, r1,
|
||||
origin2, r2) => {
|
||||
self.report_sup_sup_conflict(var_origin,
|
||||
origin1, r1,
|
||||
origin2, r2);
|
||||
}
|
||||
|
||||
ProcessedErrors(ref var_origins,
|
||||
ref trace_origins,
|
||||
ref same_regions) => {
|
||||
@ -376,7 +360,6 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
None => processed_errors.push((*error).clone()),
|
||||
}
|
||||
}
|
||||
SupSupConflict(..) => processed_errors.push((*error).clone()),
|
||||
_ => () // This shouldn't happen
|
||||
}
|
||||
}
|
||||
@ -933,29 +916,6 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
self.note_region_origin(&sub_origin);
|
||||
}
|
||||
|
||||
fn report_sup_sup_conflict(&self,
|
||||
var_origin: RegionVariableOrigin,
|
||||
origin1: SubregionOrigin<'tcx>,
|
||||
region1: Region,
|
||||
origin2: SubregionOrigin<'tcx>,
|
||||
region2: Region) {
|
||||
self.report_inference_failure(var_origin);
|
||||
|
||||
self.tcx.note_and_explain_region(
|
||||
"first, the lifetime must be contained by ",
|
||||
region1,
|
||||
"...");
|
||||
|
||||
self.note_region_origin(&origin1);
|
||||
|
||||
self.tcx.note_and_explain_region(
|
||||
"but, the lifetime must also be contained by ",
|
||||
region2,
|
||||
"...");
|
||||
|
||||
self.note_region_origin(&origin2);
|
||||
}
|
||||
|
||||
fn report_processed_errors(&self,
|
||||
var_origins: &[RegionVariableOrigin],
|
||||
trace_origins: &[(TypeTrace<'tcx>, TypeError<'tcx>)],
|
||||
|
@ -2,13 +2,12 @@ Region inference
|
||||
|
||||
# Terminology
|
||||
|
||||
Note that we use the terms region and lifetime interchangeably,
|
||||
though the term `lifetime` is preferred.
|
||||
Note that we use the terms region and lifetime interchangeably.
|
||||
|
||||
# Introduction
|
||||
|
||||
Region inference uses a somewhat more involved algorithm than type
|
||||
inference. It is not the most efficient thing ever written though it
|
||||
inference. It is not the most efficient thing ever written though it
|
||||
seems to work well enough in practice (famous last words). The reason
|
||||
that we use a different algorithm is because, unlike with types, it is
|
||||
impractical to hand-annotate with regions (in some cases, there aren't
|
||||
@ -25,22 +24,42 @@ once.
|
||||
|
||||
The constraints are always of one of three possible forms:
|
||||
|
||||
- ConstrainVarSubVar(R_i, R_j) states that region variable R_i
|
||||
must be a subregion of R_j
|
||||
- ConstrainRegSubVar(R, R_i) states that the concrete region R
|
||||
(which must not be a variable) must be a subregion of the variable R_i
|
||||
- ConstrainVarSubReg(R_i, R) is the inverse
|
||||
- `ConstrainVarSubVar(Ri, Rj)` states that region variable Ri must be
|
||||
a subregion of Rj
|
||||
- `ConstrainRegSubVar(R, Ri)` states that the concrete region R (which
|
||||
must not be a variable) must be a subregion of the variable Ri
|
||||
- `ConstrainVarSubReg(Ri, R)` states the variable Ri shoudl be less
|
||||
than the concrete region R. This is kind of deprecated and ought to
|
||||
be replaced with a verify (they essentially play the same role).
|
||||
|
||||
In addition to constraints, we also gather up a set of "verifys"
|
||||
(what, you don't think Verify is a noun? Get used to it my
|
||||
friend!). These represent relations that must hold but which don't
|
||||
influence inference proper. These take the form of:
|
||||
|
||||
- `VerifyRegSubReg(Ri, Rj)` indicates that Ri <= Rj must hold,
|
||||
where Rj is not an inference variable (and Ri may or may not contain
|
||||
one). This doesn't influence inference because we will already have
|
||||
inferred Ri to be as small as possible, so then we just test whether
|
||||
that result was less than Rj or not.
|
||||
- `VerifyGenericBound(R, Vb)` is a more complex expression which tests
|
||||
that the region R must satisfy the bound `Vb`. The bounds themselves
|
||||
may have structure like "must outlive one of the following regions"
|
||||
or "must outlive ALL of the following regions. These bounds arise
|
||||
from constraints like `T: 'a` -- if we know that `T: 'b` and `T: 'c`
|
||||
(say, from where clauses), then we can conclude that `T: 'a` if `'b:
|
||||
'a` *or* `'c: 'a`.
|
||||
|
||||
# Building up the constraints
|
||||
|
||||
Variables and constraints are created using the following methods:
|
||||
|
||||
- `new_region_var()` creates a new, unconstrained region variable;
|
||||
- `make_subregion(R_i, R_j)` states that R_i is a subregion of R_j
|
||||
- `lub_regions(R_i, R_j) -> R_k` returns a region R_k which is
|
||||
the smallest region that is greater than both R_i and R_j
|
||||
- `glb_regions(R_i, R_j) -> R_k` returns a region R_k which is
|
||||
the greatest region that is smaller than both R_i and R_j
|
||||
- `make_subregion(Ri, Rj)` states that Ri is a subregion of Rj
|
||||
- `lub_regions(Ri, Rj) -> Rk` returns a region Rk which is
|
||||
the smallest region that is greater than both Ri and Rj
|
||||
- `glb_regions(Ri, Rj) -> Rk` returns a region Rk which is
|
||||
the greatest region that is smaller than both Ri and Rj
|
||||
|
||||
The actual region resolution algorithm is not entirely
|
||||
obvious, though it is also not overly complex.
|
||||
@ -54,14 +73,6 @@ Alternatively, you can call `commit()` which ends all snapshots.
|
||||
Snapshots can be recursive---so you can start a snapshot when another
|
||||
is in progress, but only the root snapshot can "commit".
|
||||
|
||||
# Resolving constraints
|
||||
|
||||
The constraint resolution algorithm is not super complex but also not
|
||||
entirely obvious. Here I describe the problem somewhat abstractly,
|
||||
then describe how the current code works. There may be other, smarter
|
||||
ways of doing this with which I am unfamiliar and can't be bothered to
|
||||
research at the moment. - NDM
|
||||
|
||||
## The problem
|
||||
|
||||
Basically our input is a directed graph where nodes can be divided
|
||||
@ -83,31 +94,20 @@ Before resolution begins, we build up the constraints in a hashmap
|
||||
that maps `Constraint` keys to spans. During resolution, we construct
|
||||
the actual `Graph` structure that we describe here.
|
||||
|
||||
## Our current algorithm
|
||||
## Computing the values for region variables
|
||||
|
||||
We divide region variables into two groups: Expanding and Contracting.
|
||||
Expanding region variables are those that have a concrete region
|
||||
predecessor (direct or indirect). Contracting region variables are
|
||||
all others.
|
||||
The algorithm is a simple dataflow algorithm. Each region variable
|
||||
begins as empty. We iterate over the constraints, and for each constraint
|
||||
we grow the relevant region variable to be as big as it must be to meet all the
|
||||
constraints. This means the region variables can grow to be `'static` if
|
||||
necessary.
|
||||
|
||||
We first resolve the values of Expanding region variables and then
|
||||
process Contracting ones. We currently use an iterative, fixed-point
|
||||
procedure (but read on, I believe this could be replaced with a linear
|
||||
walk). Basically we iterate over the edges in the graph, ensuring
|
||||
that, if the source of the edge has a value, then this value is a
|
||||
subregion of the target value. If the target does not yet have a
|
||||
value, it takes the value from the source. If the target already had
|
||||
a value, then the resulting value is Least Upper Bound of the old and
|
||||
new values. When we are done, each Expanding node will have the
|
||||
smallest region that it could possibly have and still satisfy the
|
||||
constraints.
|
||||
## Verification
|
||||
|
||||
We next process the Contracting nodes. Here we again iterate over the
|
||||
edges, only this time we move values from target to source (if the
|
||||
source is a Contracting node). For each contracting node, we compute
|
||||
its value as the GLB of all its successors. Basically contracting
|
||||
nodes ensure that there is overlap between their successors; we will
|
||||
ultimately infer the largest overlap possible.
|
||||
After all constraints are fully propoagated, we do a "verification"
|
||||
step where we walk over the verify bounds and check that they are
|
||||
satisfied. These bounds represent the "maximal" values that a region
|
||||
variable can take on, basically.
|
||||
|
||||
# The Region Hierarchy
|
||||
|
||||
|
@ -16,19 +16,16 @@ pub use self::UndoLogEntry::*;
|
||||
pub use self::CombineMapType::*;
|
||||
pub use self::RegionResolutionError::*;
|
||||
pub use self::VarValue::*;
|
||||
use self::Classification::*;
|
||||
|
||||
use super::{RegionVariableOrigin, SubregionOrigin, TypeTrace, MiscVariable};
|
||||
|
||||
use rustc_data_structures::graph::{self, Direction, NodeIndex};
|
||||
use middle::free_region::FreeRegionMap;
|
||||
use middle::region;
|
||||
use middle::ty::{self, Ty};
|
||||
use middle::ty::{BoundRegion, FreeRegion, Region, RegionVid};
|
||||
use middle::ty::{ReEmpty, ReStatic, ReFree, ReEarlyBound};
|
||||
use middle::ty::{ReLateBound, ReScope, ReVar, ReSkolemized, BrFresh};
|
||||
use middle::ty::error::TypeError;
|
||||
use middle::ty::relate::RelateResult;
|
||||
use util::common::indenter;
|
||||
use util::nodemap::{FnvHashMap, FnvHashSet};
|
||||
|
||||
@ -50,6 +47,8 @@ pub enum Constraint {
|
||||
ConstrainRegSubVar(Region, RegionVid),
|
||||
|
||||
// Region variable is subregion of concrete region
|
||||
//
|
||||
// FIXME(#29436) -- should be remove in favor of a Verify
|
||||
ConstrainVarSubReg(RegionVid, Region),
|
||||
}
|
||||
|
||||
@ -144,15 +143,6 @@ pub enum RegionResolutionError<'tcx> {
|
||||
SubregionOrigin<'tcx>, Region,
|
||||
SubregionOrigin<'tcx>, Region),
|
||||
|
||||
/// `SupSupConflict(v, origin1, r1, origin2, r2)`:
|
||||
///
|
||||
/// Could not infer a value for `v` because `v <= r1` (due to
|
||||
/// `origin1`) and `v <= r2` (due to `origin2`) and
|
||||
/// `r1` and `r2` have no intersection.
|
||||
SupSupConflict(RegionVariableOrigin,
|
||||
SubregionOrigin<'tcx>, Region,
|
||||
SubregionOrigin<'tcx>, Region),
|
||||
|
||||
/// For subsets of `ConcreteFailure` and `SubSupConflict`, we can derive
|
||||
/// more specific errors message by suggesting to the user where they
|
||||
/// should put a lifetime. In those cases we process and put those errors
|
||||
@ -824,147 +814,14 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn glb_concrete_regions(&self,
|
||||
free_regions: &FreeRegionMap,
|
||||
a: Region,
|
||||
b: Region)
|
||||
-> RelateResult<'tcx, Region>
|
||||
{
|
||||
debug!("glb_concrete_regions({:?}, {:?})", a, b);
|
||||
match (a, b) {
|
||||
(ReLateBound(..), _) |
|
||||
(_, ReLateBound(..)) |
|
||||
(ReEarlyBound(..), _) |
|
||||
(_, ReEarlyBound(..)) => {
|
||||
self.tcx.sess.bug(
|
||||
&format!("cannot relate bound region: GLB({:?}, {:?})",
|
||||
a,
|
||||
b));
|
||||
}
|
||||
|
||||
(ReStatic, r) | (r, ReStatic) => {
|
||||
// static lives longer than everything else
|
||||
Ok(r)
|
||||
}
|
||||
|
||||
(ReEmpty, _) | (_, ReEmpty) => {
|
||||
// nothing lives shorter than everything else
|
||||
Ok(ReEmpty)
|
||||
}
|
||||
|
||||
(ReVar(v_id), _) |
|
||||
(_, ReVar(v_id)) => {
|
||||
self.tcx.sess.span_bug(
|
||||
(*self.var_origins.borrow())[v_id.index as usize].span(),
|
||||
&format!("glb_concrete_regions invoked with \
|
||||
non-concrete regions: {:?}, {:?}",
|
||||
a,
|
||||
b));
|
||||
}
|
||||
|
||||
(ReFree(fr), ReScope(s_id)) |
|
||||
(ReScope(s_id), ReFree(fr)) => {
|
||||
let s = ReScope(s_id);
|
||||
// Free region is something "at least as big as
|
||||
// `fr.scope_id`." If we find that the scope `fr.scope_id` is bigger
|
||||
// than the scope `s_id`, then we can say that the GLB
|
||||
// is the scope `s_id`. Otherwise, as we do not know
|
||||
// big the free region is precisely, the GLB is undefined.
|
||||
if self.tcx.region_maps.nearest_common_ancestor(fr.scope, s_id) == fr.scope ||
|
||||
free_regions.is_static(fr) {
|
||||
Ok(s)
|
||||
} else {
|
||||
Err(TypeError::RegionsNoOverlap(b, a))
|
||||
}
|
||||
}
|
||||
|
||||
(ReScope(a_id), ReScope(b_id)) => {
|
||||
self.intersect_scopes(a, b, a_id, b_id)
|
||||
}
|
||||
|
||||
(ReFree(ref a_fr), ReFree(ref b_fr)) => {
|
||||
self.glb_free_regions(free_regions, a_fr, b_fr)
|
||||
}
|
||||
|
||||
// For these types, we cannot define any additional
|
||||
// relationship:
|
||||
(ReSkolemized(..), _) |
|
||||
(_, ReSkolemized(..)) => {
|
||||
if a == b {
|
||||
Ok(a)
|
||||
} else {
|
||||
Err(TypeError::RegionsNoOverlap(b, a))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Computes a region that is enclosed by both free region arguments, if any. Guarantees that
|
||||
/// if the same two regions are given as argument, in any order, a consistent result is
|
||||
/// returned.
|
||||
fn glb_free_regions(&self,
|
||||
free_regions: &FreeRegionMap,
|
||||
a: &FreeRegion,
|
||||
b: &FreeRegion)
|
||||
-> RelateResult<'tcx, ty::Region>
|
||||
{
|
||||
return match a.cmp(b) {
|
||||
Less => helper(self, free_regions, a, b),
|
||||
Greater => helper(self, free_regions, b, a),
|
||||
Equal => Ok(ty::ReFree(*a))
|
||||
};
|
||||
|
||||
fn helper<'a, 'tcx>(this: &RegionVarBindings<'a, 'tcx>,
|
||||
free_regions: &FreeRegionMap,
|
||||
a: &FreeRegion,
|
||||
b: &FreeRegion) -> RelateResult<'tcx, ty::Region>
|
||||
{
|
||||
if free_regions.sub_free_region(*a, *b) {
|
||||
Ok(ty::ReFree(*a))
|
||||
} else if free_regions.sub_free_region(*b, *a) {
|
||||
Ok(ty::ReFree(*b))
|
||||
} else {
|
||||
this.intersect_scopes(ty::ReFree(*a), ty::ReFree(*b),
|
||||
a.scope, b.scope)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn intersect_scopes(&self,
|
||||
region_a: ty::Region,
|
||||
region_b: ty::Region,
|
||||
scope_a: region::CodeExtent,
|
||||
scope_b: region::CodeExtent)
|
||||
-> RelateResult<'tcx, Region>
|
||||
{
|
||||
// We want to generate the intersection of two
|
||||
// scopes or two free regions. So, if one of
|
||||
// these scopes is a subscope of the other, return
|
||||
// it. Otherwise fail.
|
||||
debug!("intersect_scopes(scope_a={:?}, scope_b={:?}, region_a={:?}, region_b={:?})",
|
||||
scope_a, scope_b, region_a, region_b);
|
||||
let r_id = self.tcx.region_maps.nearest_common_ancestor(scope_a, scope_b);
|
||||
if r_id == scope_a {
|
||||
Ok(ReScope(scope_b))
|
||||
} else if r_id == scope_b {
|
||||
Ok(ReScope(scope_a))
|
||||
} else {
|
||||
Err(TypeError::RegionsNoOverlap(region_a, region_b))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ______________________________________________________________________
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Debug)]
|
||||
enum Classification { Expanding, Contracting }
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub enum VarValue { NoValue, Value(Region), ErrorValue }
|
||||
pub enum VarValue { Value(Region), ErrorValue }
|
||||
|
||||
struct VarData {
|
||||
classification: Classification,
|
||||
value: VarValue,
|
||||
}
|
||||
|
||||
@ -1005,12 +862,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
fn construct_var_data(&self) -> Vec<VarData> {
|
||||
(0..self.num_vars() as usize).map(|_| {
|
||||
VarData {
|
||||
// All nodes are initially classified as contracting; during
|
||||
// the expansion phase, we will shift the classification for
|
||||
// those nodes that have a concrete region predecessor to
|
||||
// Expanding.
|
||||
classification: Contracting,
|
||||
value: NoValue,
|
||||
value: Value(ty::ReEmpty),
|
||||
}
|
||||
}).collect()
|
||||
}
|
||||
@ -1062,11 +914,11 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
}
|
||||
ConstrainVarSubVar(a_vid, b_vid) => {
|
||||
match var_data[a_vid.index as usize].value {
|
||||
NoValue | ErrorValue => false,
|
||||
Value(a_region) => {
|
||||
let b_node = &mut var_data[b_vid.index as usize];
|
||||
self.expand_node(free_regions, a_region, b_vid, b_node)
|
||||
}
|
||||
ErrorValue => false,
|
||||
Value(a_region) => {
|
||||
let b_node = &mut var_data[b_vid.index as usize];
|
||||
self.expand_node(free_regions, a_region, b_vid, b_node)
|
||||
}
|
||||
}
|
||||
}
|
||||
ConstrainVarSubReg(..) => {
|
||||
@ -1100,16 +952,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
_ => { }
|
||||
}
|
||||
|
||||
b_data.classification = Expanding;
|
||||
match b_data.value {
|
||||
NoValue => {
|
||||
debug!("Setting initial value of {:?} to {:?}",
|
||||
b_vid, a_region);
|
||||
|
||||
b_data.value = Value(a_region);
|
||||
return true;
|
||||
}
|
||||
|
||||
Value(cur_region) => {
|
||||
let lub = self.lub_concrete_regions(free_regions, a_region, cur_region);
|
||||
if lub == cur_region {
|
||||
@ -1131,6 +974,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME(#29436) -- this fn would just go away if we removed ConstrainVarSubReg
|
||||
fn contraction(&self,
|
||||
free_regions: &FreeRegionMap,
|
||||
var_data: &mut [VarData]) {
|
||||
@ -1142,104 +986,31 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
.unwrap()
|
||||
);
|
||||
match *constraint {
|
||||
ConstrainRegSubVar(..) => {
|
||||
// This is an expansion constraint. Ignore.
|
||||
false
|
||||
}
|
||||
ConstrainVarSubVar(a_vid, b_vid) => {
|
||||
match var_data[b_vid.index as usize].value {
|
||||
NoValue | ErrorValue => false,
|
||||
Value(b_region) => {
|
||||
ConstrainRegSubVar(..) |
|
||||
ConstrainVarSubVar(..) => {
|
||||
// Expansion will ensure that these constraints hold. Ignore.
|
||||
}
|
||||
ConstrainVarSubReg(a_vid, b_region) => {
|
||||
let a_data = &mut var_data[a_vid.index as usize];
|
||||
self.contract_node(free_regions, a_vid, a_data, b_region)
|
||||
}
|
||||
}
|
||||
}
|
||||
ConstrainVarSubReg(a_vid, b_region) => {
|
||||
let a_data = &mut var_data[a_vid.index as usize];
|
||||
self.contract_node(free_regions, a_vid, a_data, b_region)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
debug!("contraction: {:?} == {:?}, {:?}", a_vid, a_data.value, b_region);
|
||||
|
||||
fn contract_node(&self,
|
||||
free_regions: &FreeRegionMap,
|
||||
a_vid: RegionVid,
|
||||
a_data: &mut VarData,
|
||||
b_region: Region)
|
||||
-> bool {
|
||||
debug!("contract_node({:?} == {:?}/{:?}, {:?})",
|
||||
a_vid, a_data.value,
|
||||
a_data.classification, b_region);
|
||||
let a_region = match a_data.value {
|
||||
ErrorValue => return false,
|
||||
Value(a_region) => a_region,
|
||||
};
|
||||
|
||||
return match a_data.value {
|
||||
NoValue => {
|
||||
assert_eq!(a_data.classification, Contracting);
|
||||
a_data.value = Value(b_region);
|
||||
true // changed
|
||||
}
|
||||
|
||||
ErrorValue => false, // no change
|
||||
|
||||
Value(a_region) => {
|
||||
match a_data.classification {
|
||||
Expanding =>
|
||||
check_node(self, free_regions, a_vid, a_data, a_region, b_region),
|
||||
Contracting =>
|
||||
adjust_node(self, free_regions, a_vid, a_data, a_region, b_region),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
fn check_node(this: &RegionVarBindings,
|
||||
free_regions: &FreeRegionMap,
|
||||
a_vid: RegionVid,
|
||||
a_data: &mut VarData,
|
||||
a_region: Region,
|
||||
b_region: Region)
|
||||
-> bool
|
||||
{
|
||||
if !free_regions.is_subregion_of(this.tcx, a_region, b_region) {
|
||||
debug!("Setting {:?} to ErrorValue: {:?} not subregion of {:?}",
|
||||
a_vid,
|
||||
a_region,
|
||||
b_region);
|
||||
a_data.value = ErrorValue;
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
fn adjust_node(this: &RegionVarBindings,
|
||||
free_regions: &FreeRegionMap,
|
||||
a_vid: RegionVid,
|
||||
a_data: &mut VarData,
|
||||
a_region: Region,
|
||||
b_region: Region)
|
||||
-> bool {
|
||||
match this.glb_concrete_regions(free_regions, a_region, b_region) {
|
||||
Ok(glb) => {
|
||||
if glb == a_region {
|
||||
false
|
||||
} else {
|
||||
debug!("Contracting value of {:?} from {:?} to {:?}",
|
||||
a_vid,
|
||||
a_region,
|
||||
glb);
|
||||
a_data.value = Value(glb);
|
||||
true
|
||||
if !free_regions.is_subregion_of(self.tcx, a_region, b_region) {
|
||||
debug!("Setting {:?} to ErrorValue: {:?} not subregion of {:?}",
|
||||
a_vid,
|
||||
a_region,
|
||||
b_region);
|
||||
a_data.value = ErrorValue;
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
debug!("Setting {:?} to ErrorValue: no glb of {:?}, {:?}",
|
||||
a_vid,
|
||||
a_region,
|
||||
b_region);
|
||||
a_data.value = ErrorValue;
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
})
|
||||
}
|
||||
|
||||
fn collect_concrete_region_errors(&self,
|
||||
@ -1308,12 +1079,6 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
Value(_) => {
|
||||
/* Inference successful */
|
||||
}
|
||||
NoValue => {
|
||||
/* Unconstrained inference: do not report an error
|
||||
until the value of this variable is requested.
|
||||
After all, sometimes we make region variables but never
|
||||
really use their values. */
|
||||
}
|
||||
ErrorValue => {
|
||||
/* Inference impossible, this value contains
|
||||
inconsistent constraints.
|
||||
@ -1339,18 +1104,8 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
this portion of the code and think hard about it. =) */
|
||||
|
||||
let node_vid = RegionVid { index: idx as u32 };
|
||||
match var_data[idx].classification {
|
||||
Expanding => {
|
||||
self.collect_error_for_expanding_node(
|
||||
free_regions, graph, var_data, &mut dup_vec,
|
||||
node_vid, errors);
|
||||
}
|
||||
Contracting => {
|
||||
self.collect_error_for_contracting_node(
|
||||
free_regions, graph, var_data, &mut dup_vec,
|
||||
node_vid, errors);
|
||||
}
|
||||
}
|
||||
self.collect_error_for_expanding_node(
|
||||
free_regions, graph, &mut dup_vec, node_vid, errors);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1396,7 +1151,6 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
fn collect_error_for_expanding_node(&self,
|
||||
free_regions: &FreeRegionMap,
|
||||
graph: &RegionGraph,
|
||||
var_data: &[VarData],
|
||||
dup_vec: &mut [u32],
|
||||
node_idx: RegionVid,
|
||||
errors: &mut Vec<RegionResolutionError<'tcx>>)
|
||||
@ -1404,11 +1158,9 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
// Errors in expanding nodes result from a lower-bound that is
|
||||
// not contained by an upper-bound.
|
||||
let (mut lower_bounds, lower_dup) =
|
||||
self.collect_concrete_regions(graph, var_data, node_idx,
|
||||
graph::INCOMING, dup_vec);
|
||||
self.collect_concrete_regions(graph, node_idx, graph::INCOMING, dup_vec);
|
||||
let (mut upper_bounds, upper_dup) =
|
||||
self.collect_concrete_regions(graph, var_data, node_idx,
|
||||
graph::OUTGOING, dup_vec);
|
||||
self.collect_concrete_regions(graph, node_idx, graph::OUTGOING, dup_vec);
|
||||
|
||||
if lower_dup || upper_dup {
|
||||
return;
|
||||
@ -1459,59 +1211,8 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
upper_bounds));
|
||||
}
|
||||
|
||||
fn collect_error_for_contracting_node(
|
||||
&self,
|
||||
free_regions: &FreeRegionMap,
|
||||
graph: &RegionGraph,
|
||||
var_data: &[VarData],
|
||||
dup_vec: &mut [u32],
|
||||
node_idx: RegionVid,
|
||||
errors: &mut Vec<RegionResolutionError<'tcx>>)
|
||||
{
|
||||
// Errors in contracting nodes result from two upper-bounds
|
||||
// that have no intersection.
|
||||
let (upper_bounds, dup_found) =
|
||||
self.collect_concrete_regions(graph, var_data, node_idx,
|
||||
graph::OUTGOING, dup_vec);
|
||||
|
||||
if dup_found {
|
||||
return;
|
||||
}
|
||||
|
||||
for upper_bound_1 in &upper_bounds {
|
||||
for upper_bound_2 in &upper_bounds {
|
||||
match self.glb_concrete_regions(free_regions,
|
||||
upper_bound_1.region,
|
||||
upper_bound_2.region) {
|
||||
Ok(_) => {}
|
||||
Err(_) => {
|
||||
let origin = (*self.var_origins.borrow())[node_idx.index as usize].clone();
|
||||
debug!("region inference error at {:?} for {:?}: \
|
||||
SupSupConflict sub: {:?} sup: {:?}",
|
||||
origin, node_idx, upper_bound_1.region, upper_bound_2.region);
|
||||
errors.push(SupSupConflict(
|
||||
origin,
|
||||
upper_bound_1.origin.clone(),
|
||||
upper_bound_1.region,
|
||||
upper_bound_2.origin.clone(),
|
||||
upper_bound_2.region));
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.tcx.sess.span_bug(
|
||||
(*self.var_origins.borrow())[node_idx.index as usize].span(),
|
||||
&format!("collect_error_for_contracting_node() could not find error \
|
||||
for var {:?}, upper_bounds={:?}",
|
||||
node_idx,
|
||||
upper_bounds));
|
||||
}
|
||||
|
||||
fn collect_concrete_regions(&self,
|
||||
graph: &RegionGraph,
|
||||
var_data: &[VarData],
|
||||
orig_node_idx: RegionVid,
|
||||
dir: Direction,
|
||||
dup_vec: &mut [u32])
|
||||
@ -1536,7 +1237,6 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
|
||||
while !state.stack.is_empty() {
|
||||
let node_idx = state.stack.pop().unwrap();
|
||||
let classification = var_data[node_idx.index as usize].classification;
|
||||
|
||||
// check whether we've visited this node on some previous walk
|
||||
if dup_vec[node_idx.index as usize] == u32::MAX {
|
||||
@ -1545,17 +1245,12 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> {
|
||||
state.dup_found = true;
|
||||
}
|
||||
|
||||
debug!("collect_concrete_regions(orig_node_idx={:?}, node_idx={:?}, \
|
||||
classification={:?})",
|
||||
orig_node_idx, node_idx, classification);
|
||||
debug!("collect_concrete_regions(orig_node_idx={:?}, node_idx={:?})",
|
||||
orig_node_idx, node_idx);
|
||||
|
||||
// figure out the direction from which this node takes its
|
||||
// values, and search for concrete regions etc in that direction
|
||||
let dir = match classification {
|
||||
Expanding => graph::INCOMING,
|
||||
Contracting => graph::OUTGOING,
|
||||
};
|
||||
|
||||
let dir = graph::INCOMING;
|
||||
process_edges(self, &mut state, graph, node_idx, dir);
|
||||
}
|
||||
|
||||
@ -1638,7 +1333,6 @@ fn normalize(values: &Vec<VarValue>, r: ty::Region) -> ty::Region {
|
||||
fn lookup(values: &Vec<VarValue>, rid: ty::RegionVid) -> ty::Region {
|
||||
match values[rid.index as usize] {
|
||||
Value(r) => r,
|
||||
NoValue => ReEmpty, // No constraints, return ty::ReEmpty
|
||||
ErrorValue => ReStatic, // Previously reported error.
|
||||
}
|
||||
}
|
||||
|
@ -351,6 +351,11 @@ impl<'a, 'tcx> Env<'a, 'tcx> {
|
||||
self.tcx().types.isize)
|
||||
}
|
||||
|
||||
pub fn t_rptr_empty(&self) -> Ty<'tcx> {
|
||||
self.infcx.tcx.mk_imm_ref(self.infcx.tcx.mk_region(ty::ReEmpty),
|
||||
self.tcx().types.isize)
|
||||
}
|
||||
|
||||
pub fn dummy_type_trace(&self) -> infer::TypeTrace<'tcx> {
|
||||
infer::TypeTrace::dummy(self.tcx())
|
||||
}
|
||||
@ -593,16 +598,15 @@ fn lub_free_free() {
|
||||
|
||||
#[test]
|
||||
fn lub_returning_scope() {
|
||||
test_env(EMPTY_SOURCE_STR,
|
||||
errors(&["cannot infer an appropriate lifetime"]), |env| {
|
||||
env.create_simple_region_hierarchy();
|
||||
let t_rptr_scope10 = env.t_rptr_scope(10);
|
||||
let t_rptr_scope11 = env.t_rptr_scope(11);
|
||||
|
||||
// this should generate an error when regions are resolved
|
||||
env.make_lub_ty(env.t_fn(&[], t_rptr_scope10),
|
||||
env.t_fn(&[], t_rptr_scope11));
|
||||
})
|
||||
test_env(EMPTY_SOURCE_STR, errors(&[]), |env| {
|
||||
env.create_simple_region_hierarchy();
|
||||
let t_rptr_scope10 = env.t_rptr_scope(10);
|
||||
let t_rptr_scope11 = env.t_rptr_scope(11);
|
||||
let t_rptr_empty = env.t_rptr_empty();
|
||||
env.check_lub(env.t_fn(&[t_rptr_scope10], env.tcx().types.isize),
|
||||
env.t_fn(&[t_rptr_scope11], env.tcx().types.isize),
|
||||
env.t_fn(&[t_rptr_empty], env.tcx().types.isize));
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -259,17 +259,30 @@ pub fn check_pat<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>,
|
||||
}
|
||||
}
|
||||
hir::PatRegion(ref inner, mutbl) => {
|
||||
let inner_ty = fcx.infcx().next_ty_var();
|
||||
|
||||
let mt = ty::TypeAndMut { ty: inner_ty, mutbl: mutbl };
|
||||
let region = fcx.infcx().next_region_var(infer::PatternRegion(pat.span));
|
||||
let rptr_ty = tcx.mk_ref(tcx.mk_region(region), mt);
|
||||
|
||||
let expected = fcx.infcx().shallow_resolve(expected);
|
||||
if check_dereferencable(pcx, pat.span, expected, &**inner) {
|
||||
// `demand::subtype` would be good enough, but using
|
||||
// `eqtype` turns out to be equally general. See (*)
|
||||
// below for details.
|
||||
demand::eqtype(fcx, pat.span, expected, rptr_ty);
|
||||
|
||||
// Take region, inner-type from expected type if we
|
||||
// can, to avoid creating needless variables. This
|
||||
// also helps with the bad interactions of the given
|
||||
// hack detailed in (*) below.
|
||||
let (rptr_ty, inner_ty) = match expected.sty {
|
||||
ty::TyRef(_, mt) if mt.mutbl == mutbl => {
|
||||
(expected, mt.ty)
|
||||
}
|
||||
_ => {
|
||||
let inner_ty = fcx.infcx().next_ty_var();
|
||||
let mt = ty::TypeAndMut { ty: inner_ty, mutbl: mutbl };
|
||||
let region = fcx.infcx().next_region_var(infer::PatternRegion(pat.span));
|
||||
let rptr_ty = tcx.mk_ref(tcx.mk_region(region), mt);
|
||||
demand::eqtype(fcx, pat.span, expected, rptr_ty);
|
||||
(rptr_ty, inner_ty)
|
||||
}
|
||||
};
|
||||
|
||||
fcx.write_ty(pat.id, rptr_ty);
|
||||
check_pat(pcx, &**inner, inner_ty);
|
||||
} else {
|
||||
|
@ -1182,9 +1182,10 @@ fn link_fn_args(rcx: &Rcx, body_scope: CodeExtent, args: &[hir::Arg]) {
|
||||
let arg_ty = rcx.fcx.node_ty(arg.id);
|
||||
let re_scope = ty::ReScope(body_scope);
|
||||
let arg_cmt = mc.cat_rvalue(arg.id, arg.ty.span, re_scope, arg_ty);
|
||||
debug!("arg_ty={:?} arg_cmt={:?}",
|
||||
debug!("arg_ty={:?} arg_cmt={:?} arg={:?}",
|
||||
arg_ty,
|
||||
arg_cmt);
|
||||
arg_cmt,
|
||||
arg);
|
||||
link_pattern(rcx, mc, arg_cmt, &*arg.pat);
|
||||
}
|
||||
}
|
||||
@ -1527,9 +1528,10 @@ pub fn type_must_outlive<'a, 'tcx>(rcx: &Rcx<'a, 'tcx>,
|
||||
{
|
||||
let ty = rcx.resolve_type(ty);
|
||||
|
||||
debug!("type_must_outlive(ty={:?}, region={:?})",
|
||||
debug!("type_must_outlive(ty={:?}, region={:?}, origin={:?})",
|
||||
ty,
|
||||
region);
|
||||
region,
|
||||
origin);
|
||||
|
||||
assert!(!ty.has_escaping_regions());
|
||||
|
||||
|
28
src/test/run-fail/issue-28934.rs
Normal file
28
src/test/run-fail/issue-28934.rs
Normal file
@ -0,0 +1,28 @@
|
||||
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// Regression test: issue had to do with "givens" in region inference,
|
||||
// which were not being considered during the contraction phase.
|
||||
|
||||
// error-pattern:explicit panic
|
||||
|
||||
struct Parser<'i: 't, 't>(&'i u8, &'t u8);
|
||||
|
||||
impl<'i, 't> Parser<'i, 't> {
|
||||
fn parse_nested_block<F, T>(&mut self, parse: F) -> Result<T, ()>
|
||||
where for<'tt> F: FnOnce(&mut Parser<'i, 'tt>) -> T { panic!() }
|
||||
|
||||
fn expect_exhausted(&mut self) -> Result<(), ()> { Ok(()) }
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let x = 0u8;
|
||||
Parser(&x, &x).parse_nested_block(|input| input.expect_exhausted()).unwrap();
|
||||
}
|
21
src/test/run-pass/issue-29048.rs
Normal file
21
src/test/run-pass/issue-29048.rs
Normal file
@ -0,0 +1,21 @@
|
||||
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
pub struct Chan;
|
||||
pub struct ChanSelect<'c, T> {
|
||||
chans: Vec<(&'c Chan, T)>,
|
||||
}
|
||||
impl<'c, T> ChanSelect<'c, T> {
|
||||
pub fn add_recv_ret(&mut self, chan: &'c Chan, ret: T)
|
||||
{
|
||||
self.chans.push((chan, ret));
|
||||
}
|
||||
}
|
||||
fn main() {}
|
Loading…
x
Reference in New Issue
Block a user