Reject specialized Drop impls.

See Issue 8142 for discussion.

This makes it illegal for a Drop impl to be more specialized than the
original item.

So for example, all of the following are now rejected (when they would
have been blindly accepted before):

```rust
struct S<A> { ... };
impl Drop for S<i8> { ... } // error: specialized to concrete type

struct T<'a> { ... };
impl Drop for T<'static> { ... } // error: specialized to concrete region

struct U<A> { ... };
impl<A:Clone> Drop for U<A> { ... } // error: added extra type requirement

struct V<'a,'b>;
impl<'a,'b:a> Drop for V<'a,'b> { ... } // error: added extra region requirement
```

Due to examples like the above, this is a [breaking-change].

(The fix is to either remove the specialization from the `Drop` impl,
or to transcribe the requirements into the struct/enum definition;
examples of both are shown in the PR's fixed to `libstd`.)

----

This is likely to be the last thing blocking the removal of the
`#[unsafe_destructor]` attribute.

Includes two new error codes for the new dropck check.

Update run-pass tests to accommodate new dropck pass.

Update tests and docs to reflect new destructor restriction.

----

Implementation notes:

We identify Drop impl specialization by not being as parametric as the
struct/enum definition via unification.

More specifically:

 1. Attempt unification of a skolemized instance of the struct/enum
    with an instance of the Drop impl's type expression where all of
    the impl's generics (i.e. the free variables of the type
    expression) have been replaced with unification variables.

 2. If unification fails, then reject Drop impl as specialized.

 3. If unification succeeds, check if any of the skolemized
    variables "leaked" into the constraint set for the inference
    context; if so, then reject Drop impl as specialized.

 4. Otherwise, unification succeeded without leaking skolemized
    variables: accept the Drop impl.

We identify whether a Drop impl is injecting new predicates by simply
looking whether the predicate, after an appropriate substitution,
appears on the struct/enum definition.
This commit is contained in:
Felix S. Klock II 2015-03-21 13:12:08 +01:00
parent 290c8de0a6
commit 5b2e8693e4
13 changed files with 341 additions and 18 deletions

View File

@ -197,15 +197,16 @@ use std::ptr;
// Define a wrapper around the handle returned by the foreign code.
// Unique<T> has the same semantics as Box<T>
pub struct Unique<T> {
//
// NB: For simplicity and correctness, we require that T has kind Send
// (owned boxes relax this restriction).
pub struct Unique<T: Send> {
// It contains a single raw, mutable pointer to the object in question.
ptr: *mut T
}
// Implement methods for creating and using the values in the box.
// NB: For simplicity and correctness, we require that T has kind Send
// (owned boxes relax this restriction).
impl<T: Send> Unique<T> {
pub fn new(value: T) -> Unique<T> {
unsafe {
@ -239,11 +240,11 @@ impl<T: Send> Unique<T> {
// Unique<T>, making the struct manage the raw pointer: when the
// struct goes out of scope, it will automatically free the raw pointer.
//
// NB: This is an unsafe destructor, because rustc will not normally
// allow destructors to be associated with parameterized types, due to
// bad interaction with managed boxes. (With the Send restriction,
// we don't have this problem.) Note that the `#[unsafe_destructor]`
// feature gate is required to use unsafe destructors.
// NB: This is an unsafe destructor; rustc will not normally allow
// destructors to be associated with parameterized types (due to
// historically failing to check them soundly). Note that the
// `#[unsafe_destructor]` feature gate is currently required to use
// unsafe destructors.
#[unsafe_destructor]
impl<T: Send> Drop for Unique<T> {
fn drop(&mut self) {

View File

@ -14,6 +14,7 @@
use super::{CombinedSnapshot, cres, InferCtxt, HigherRankedType, SkolemizationMap};
use super::combine::{Combine, Combineable};
use middle::subst;
use middle::ty::{self, Binder};
use middle::ty_fold::{self, TypeFoldable};
use syntax::codemap::Span;
@ -455,6 +456,63 @@ impl<'a,'tcx> InferCtxtExt for InferCtxt<'a,'tcx> {
}
}
/// Constructs and returns a substitution that, for a given type
/// scheme parameterized by `generics`, will replace every generic
/// parmeter in the type with a skolemized type/region (which one can
/// think of as a "fresh constant", except at the type/region level of
/// reasoning).
///
/// Since we currently represent bound/free type parameters in the
/// same way, this only has an effect on regions.
///
/// (Note that unlike a substitution from `ty::construct_free_substs`,
/// this inserts skolemized regions rather than free regions; this
/// allows one to use `fn leak_check` to catch attmepts to unify the
/// skolemized regions with e.g. the `'static` lifetime)
pub fn construct_skolemized_substs<'a,'tcx>(infcx: &InferCtxt<'a,'tcx>,
generics: &ty::Generics<'tcx>,
snapshot: &CombinedSnapshot)
-> (subst::Substs<'tcx>, SkolemizationMap)
{
let mut map = FnvHashMap();
// map T => T
let mut types = subst::VecPerParamSpace::empty();
push_types_from_defs(infcx.tcx, &mut types, generics.types.as_slice());
// map early- or late-bound 'a => fresh 'a
let mut regions = subst::VecPerParamSpace::empty();
push_region_params(infcx, &mut map, &mut regions, generics.regions.as_slice(), snapshot);
let substs = subst::Substs { types: types,
regions: subst::NonerasedRegions(regions) };
return (substs, map);
fn push_region_params<'a,'tcx>(infcx: &InferCtxt<'a,'tcx>,
map: &mut SkolemizationMap,
regions: &mut subst::VecPerParamSpace<ty::Region>,
region_params: &[ty::RegionParameterDef],
snapshot: &CombinedSnapshot)
{
for r in region_params {
let br = r.to_bound_region();
let skol_var = infcx.region_vars.new_skolemized(br, &snapshot.region_vars_snapshot);
let sanity_check = map.insert(br, skol_var);
assert!(sanity_check.is_none());
regions.push(r.space, skol_var);
}
}
fn push_types_from_defs<'tcx>(tcx: &ty::ctxt<'tcx>,
types: &mut subst::VecPerParamSpace<ty::Ty<'tcx>>,
defs: &[ty::TypeParameterDef<'tcx>]) {
for def in defs {
let ty = ty::mk_param_from_def(tcx, def);
types.push(def.space, ty);
}
}
}
pub fn skolemize_late_bound_regions<'a,'tcx,T>(infcx: &InferCtxt<'a,'tcx>,
binder: &ty::Binder<T>,
snapshot: &CombinedSnapshot)

View File

@ -726,6 +726,15 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
})
}
pub fn construct_skolemized_subst(&self,
generics: &ty::Generics<'tcx>,
snapshot: &CombinedSnapshot)
-> (subst::Substs<'tcx>, SkolemizationMap) {
/*! See `higher_ranked::construct_skolemized_subst` */
higher_ranked::construct_skolemized_substs(self, generics, snapshot)
}
pub fn skolemize_late_bound_regions<T>(&self,
value: &ty::Binder<T>,
snapshot: &CombinedSnapshot)

View File

@ -1793,6 +1793,9 @@ impl RegionParameterDef {
pub fn to_early_bound_region(&self) -> ty::Region {
ty::ReEarlyBound(self.def_id.node, self.space, self.index, self.name)
}
pub fn to_bound_region(&self) -> ty::BoundRegion {
ty::BoundRegion::BrNamed(self.def_id, self.name)
}
}
/// Information about the formal type/lifetime parameters associated

View File

@ -12,13 +12,249 @@ use check::regionck::{self, Rcx};
use middle::infer;
use middle::region;
use middle::subst;
use middle::subst::{self, Subst};
use middle::ty::{self, Ty};
use util::ppaux::{Repr, UserString};
use syntax::ast;
use syntax::codemap::Span;
use syntax::codemap::{self, Span};
/// check_drop_impl confirms that the Drop implementation identfied by
/// `drop_impl_did` is not any more specialized than the type it is
/// attached to (Issue #8142).
///
/// This means:
///
/// 1. The self type must be nominal (this is already checked during
/// coherence),
///
/// 2. The generic region/type parameters of the impl's self-type must
/// all be parameters of the Drop impl itself (i.e. no
/// specialization like `impl Drop for Foo<i32>`), and,
///
/// 3. Any bounds on the generic parameters must be reflected in the
/// struct/enum definition for the nominal type itself (i.e.
/// cannot do `struct S<T>; impl<T:Clone> Drop for S<T> { ... }`).
///
pub fn check_drop_impl(tcx: &ty::ctxt, drop_impl_did: ast::DefId) -> Result<(), ()> {
let ty::TypeScheme { generics: ref dtor_generics,
ty: ref dtor_self_type } = ty::lookup_item_type(tcx, drop_impl_did);
let dtor_predicates = ty::lookup_predicates(tcx, drop_impl_did);
match dtor_self_type.sty {
ty::ty_enum(self_type_did, self_to_impl_substs) |
ty::ty_struct(self_type_did, self_to_impl_substs) |
ty::ty_closure(self_type_did, self_to_impl_substs) => {
try!(ensure_drop_params_and_item_params_correspond(tcx,
drop_impl_did,
dtor_generics,
dtor_self_type,
self_type_did));
ensure_drop_predicates_are_implied_by_item_defn(tcx,
drop_impl_did,
&dtor_predicates,
self_type_did,
self_to_impl_substs)
}
_ => {
// Destructors only work on nominal types. This was
// already checked by coherence, so we can panic here.
let span = tcx.map.def_id_span(drop_impl_did, codemap::DUMMY_SP);
tcx.sess.span_bug(
span, &format!("should have been rejected by coherence check: {}",
dtor_self_type.repr(tcx)));
}
}
}
fn ensure_drop_params_and_item_params_correspond<'tcx>(
tcx: &ty::ctxt<'tcx>,
drop_impl_did: ast::DefId,
drop_impl_generics: &ty::Generics<'tcx>,
drop_impl_ty: &ty::Ty<'tcx>,
self_type_did: ast::DefId) -> Result<(), ()>
{
// New strategy based on review suggestion from nikomatsakis.
//
// (In the text and code below, "named" denotes "struct/enum", and
// "generic params" denotes "type and region params")
//
// 1. Create fresh skolemized type/region "constants" for each of
// the named type's generic params. Instantiate the named type
// with the fresh constants, yielding `named_skolem`.
//
// 2. Create unification variables for each of the Drop impl's
// generic params. Instantiate the impl's Self's type with the
// unification-vars, yielding `drop_unifier`.
//
// 3. Attempt to unify Self_unif with Type_skolem. If unification
// succeeds, continue (i.e. with the predicate checks).
let ty::TypeScheme { generics: ref named_type_generics,
ty: named_type } =
ty::lookup_item_type(tcx, self_type_did);
let infcx = infer::new_infer_ctxt(tcx);
infcx.try(|snapshot| {
let (named_type_to_skolem, skol_map) =
infcx.construct_skolemized_subst(named_type_generics, snapshot);
let named_type_skolem = named_type.subst(tcx, &named_type_to_skolem);
let drop_impl_span = tcx.map.def_id_span(drop_impl_did, codemap::DUMMY_SP);
let drop_to_unifier =
infcx.fresh_substs_for_generics(drop_impl_span, drop_impl_generics);
let drop_unifier = drop_impl_ty.subst(tcx, &drop_to_unifier);
if let Ok(()) = infer::mk_eqty(&infcx, true, infer::TypeOrigin::Misc(drop_impl_span),
named_type_skolem, drop_unifier) {
// Even if we did manage to equate the types, the process
// may have just gathered unsolvable region constraints
// like `R == 'static` (represented as a pair of subregion
// constraints) for some skolemization constant R.
//
// However, the leak_check method allows us to confirm
// that no skolemized regions escaped (i.e. were related
// to other regions in the constraint graph).
if let Ok(()) = infcx.leak_check(&skol_map, snapshot) {
return Ok(())
}
}
span_err!(tcx.sess, drop_impl_span, E0366,
"Implementations of Drop cannot be specialized");
let item_span = tcx.map.span(self_type_did.node);
tcx.sess.span_note(item_span,
"Use same sequence of generic type and region \
parameters that is on the struct/enum definition");
return Err(());
})
}
/// Confirms that every predicate imposed by dtor_predicates is
/// implied by assuming the predicates attached to self_type_did.
fn ensure_drop_predicates_are_implied_by_item_defn<'tcx>(
tcx: &ty::ctxt<'tcx>,
drop_impl_did: ast::DefId,
dtor_predicates: &ty::GenericPredicates<'tcx>,
self_type_did: ast::DefId,
self_to_impl_substs: &subst::Substs<'tcx>) -> Result<(), ()> {
// Here is an example, analogous to that from
// `compare_impl_method`.
//
// Consider a struct type:
//
// struct Type<'c, 'b:'c, 'a> {
// x: &'a Contents // (contents are irrelevant;
// y: &'c Cell<&'b Contents>, // only the bounds matter for our purposes.)
// }
//
// and a Drop impl:
//
// impl<'z, 'y:'z, 'x:'y> Drop for P<'z, 'y, 'x> {
// fn drop(&mut self) { self.y.set(self.x); } // (only legal if 'x: 'y)
// }
//
// We start out with self_to_impl_substs, that maps the generic
// parameters of Type to that of the Drop impl.
//
// self_to_impl_substs = {'c => 'z, 'b => 'y, 'a => 'x}
//
// Applying this to the predicates (i.e. assumptions) provided by the item
// definition yields the instantiated assumptions:
//
// ['y : 'z]
//
// We then check all of the predicates of the Drop impl:
//
// ['y:'z, 'x:'y]
//
// and ensure each is in the list of instantiated
// assumptions. Here, `'y:'z` is present, but `'x:'y` is
// absent. So we report an error that the Drop impl injected a
// predicate that is not present on the struct definition.
assert_eq!(self_type_did.krate, ast::LOCAL_CRATE);
let drop_impl_span = tcx.map.def_id_span(drop_impl_did, codemap::DUMMY_SP);
// We can assume the predicates attached to struct/enum definition
// hold.
let generic_assumptions = ty::lookup_predicates(tcx, self_type_did);
let assumptions_in_impl_context = generic_assumptions.instantiate(tcx, &self_to_impl_substs);
assert!(assumptions_in_impl_context.predicates.is_empty_in(subst::SelfSpace));
assert!(assumptions_in_impl_context.predicates.is_empty_in(subst::FnSpace));
let assumptions_in_impl_context =
assumptions_in_impl_context.predicates.get_slice(subst::TypeSpace);
// An earlier version of this code attempted to do this checking
// via the traits::fulfill machinery. However, it ran into trouble
// since the fulfill machinery merely turns outlives-predicates
// 'a:'b and T:'b into region inference constraints. It is simpler
// just to look for all the predicates directly.
assert!(dtor_predicates.predicates.is_empty_in(subst::SelfSpace));
assert!(dtor_predicates.predicates.is_empty_in(subst::FnSpace));
let predicates = dtor_predicates.predicates.get_slice(subst::TypeSpace);
for predicate in predicates {
// (We do not need to worry about deep analysis of type
// expressions etc because the Drop impls are already forced
// to take on a structure that is roughly a alpha-renaming of
// the generic parameters of the item definition.)
// This path now just checks *all* predicates via the direct
// lookup, rather than using fulfill machinery.
//
// However, it may be more efficient in the future to batch
// the analysis together via the fulfill , rather than the
// repeated `contains` calls.
if !assumptions_in_impl_context.contains(&predicate) {
let item_span = tcx.map.span(self_type_did.node);
let req = predicate.user_string(tcx);
span_err!(tcx.sess, drop_impl_span, E0367,
"The requirement `{}` is added only by the Drop impl.", req);
tcx.sess.span_note(item_span,
"The same requirement must be part of \
the struct/enum definition");
}
}
if tcx.sess.has_errors() {
return Err(());
}
Ok(())
}
/// check_safety_of_destructor_if_necessary confirms that the type
/// expression `typ` conforms to the "Drop Check Rule" from the Sound
/// Generic Drop (RFC 769).
///
/// ----
///
/// The Drop Check Rule is the following:
///
/// Let `v` be some value (either temporary or named) and 'a be some
/// lifetime (scope). If the type of `v` owns data of type `D`, where
///
/// (1.) `D` has a lifetime- or type-parametric Drop implementation, and
/// (2.) the structure of `D` can reach a reference of type `&'a _`, and
/// (3.) either:
///
/// (A.) the Drop impl for `D` instantiates `D` at 'a directly,
/// i.e. `D<'a>`, or,
///
/// (B.) the Drop impl for `D` has some type parameter with a
/// trait bound `T` where `T` is a trait that has at least
/// one method,
///
/// then 'a must strictly outlive the scope of v.
///
/// ----
///
/// This function is meant to by applied to the type for every
/// expression in the program.
pub fn check_safety_of_destructor_if_necessary<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx>,
typ: ty::Ty<'tcx>,
span: Span,

View File

@ -478,6 +478,20 @@ pub fn check_item_types(ccx: &CrateCtxt) {
visit::walk_crate(&mut visit, krate);
ccx.tcx.sess.abort_if_errors();
for drop_method_did in ccx.tcx.destructors.borrow().iter() {
if drop_method_did.krate == ast::LOCAL_CRATE {
let drop_impl_did = ccx.tcx.map.get_parent_did(drop_method_did.node);
match dropck::check_drop_impl(ccx.tcx, drop_impl_did) {
Ok(()) => {}
Err(()) => {
assert!(ccx.tcx.sess.has_errors());
}
}
}
}
ccx.tcx.sess.abort_if_errors();
}
fn check_bare_fn<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,

View File

@ -177,7 +177,9 @@ register_diagnostics! {
E0319, // trait impls for defaulted traits allowed just for structs/enums
E0320, // recursive overflow during dropck
E0321, // extended coherence rules for defaulted traits violated
E0322 // cannot implement Sized explicitly
E0322, // cannot implement Sized explicitly
E0366, // dropck forbid specialization to concrete type or region
E0367 // dropck forbid specialization to predicate not in struct/enum
}
__build_diagnostic_array! { DIAGNOSTICS }

View File

@ -366,7 +366,7 @@ mod test {
use sync::{Arc, Mutex, StaticMutex, MUTEX_INIT, Condvar};
use thread;
struct Packet<T>(Arc<(Mutex<T>, Condvar)>);
struct Packet<T: Send>(Arc<(Mutex<T>, Condvar)>);
unsafe impl<T: Send> Send for Packet<T> {}
unsafe impl<T> Sync for Packet<T> {}

View File

@ -15,7 +15,7 @@
use std::marker;
struct arc_destruct<T> {
struct arc_destruct<T: Sync> {
_data: int,
_marker: marker::PhantomData<T>
}

View File

@ -25,7 +25,7 @@ impl Bar for BarImpl {
}
struct Foo<B>(B);
struct Foo<B: Bar>(B);
#[unsafe_destructor]
impl<B: Bar> Drop for Foo<B> {

View File

@ -18,7 +18,7 @@ use std::fmt;
use serialize::{Encoder, Encodable};
use serialize::json;
struct Foo<T> {
struct Foo<T: Encodable> {
v: T,
}

View File

@ -162,7 +162,7 @@ pub mod pipes {
}
}
pub struct send_packet<T> {
pub struct send_packet<T:Send> {
p: Option<*const packet<T>>,
}
@ -192,7 +192,7 @@ pub mod pipes {
}
}
pub struct recv_packet<T> {
pub struct recv_packet<T:Send> {
p: Option<*const packet<T>>,
}

View File

@ -21,7 +21,7 @@ trait X {
struct Y(int);
#[derive(Debug)]
struct Z<T> {
struct Z<T: X+std::fmt::Debug> {
x: T
}