move impl wf check so they occur earlier
Needed to keep coherence from freaking out.
This commit is contained in:
parent
34c361cfb2
commit
c17be9ea11
@ -1,46 +0,0 @@
|
||||
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use rustc::hir;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use std::collections::hash_map::Entry::{Occupied, Vacant};
|
||||
|
||||
use CrateCtxt;
|
||||
|
||||
/// Enforce that we do not have two items in an impl with the same name.
|
||||
pub fn enforce_impl_items_are_distinct<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
|
||||
impl_item_refs: &[hir::ImplItemRef])
|
||||
{
|
||||
let tcx = ccx.tcx;
|
||||
let mut seen_type_items = FxHashMap();
|
||||
let mut seen_value_items = FxHashMap();
|
||||
for &impl_item_ref in impl_item_refs {
|
||||
let impl_item = tcx.map.impl_item(impl_item_ref.id);
|
||||
let seen_items = match impl_item.node {
|
||||
hir::ImplItemKind::Type(_) => &mut seen_type_items,
|
||||
_ => &mut seen_value_items,
|
||||
};
|
||||
match seen_items.entry(impl_item.name) {
|
||||
Occupied(entry) => {
|
||||
let mut err = struct_span_err!(tcx.sess, impl_item.span, E0201,
|
||||
"duplicate definitions with name `{}`:",
|
||||
impl_item.name);
|
||||
err.span_label(*entry.get(),
|
||||
&format!("previous definition of `{}` here",
|
||||
impl_item.name));
|
||||
err.span_label(impl_item.span, &format!("duplicate definition"));
|
||||
err.emit();
|
||||
}
|
||||
Vacant(entry) => {
|
||||
entry.insert(impl_item.span);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -143,8 +143,6 @@
|
||||
mod callee;
|
||||
mod compare_method;
|
||||
mod intrinsic;
|
||||
mod impl_item_duplicate;
|
||||
mod impl_parameters_used;
|
||||
mod op;
|
||||
|
||||
/// closures defined within the function. For example:
|
||||
@ -817,7 +815,7 @@ pub fn check_item_type<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) {
|
||||
it.id);
|
||||
}
|
||||
hir::ItemFn(..) => {} // entirely within check_item_body
|
||||
hir::ItemImpl(_, _, ref hir_generics, _, _, ref impl_item_refs) => {
|
||||
hir::ItemImpl(.., ref impl_item_refs) => {
|
||||
debug!("ItemImpl {} with id {}", it.name, it.id);
|
||||
let impl_def_id = ccx.tcx.map.local_def_id(it.id);
|
||||
if let Some(impl_trait_ref) = ccx.tcx.impl_trait_ref(impl_def_id) {
|
||||
@ -829,14 +827,6 @@ pub fn check_item_type<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) {
|
||||
let trait_def_id = impl_trait_ref.def_id;
|
||||
check_on_unimplemented(ccx, trait_def_id, it);
|
||||
}
|
||||
|
||||
impl_parameters_used::enforce_impl_params_are_constrained(ccx,
|
||||
hir_generics,
|
||||
impl_def_id,
|
||||
impl_item_refs);
|
||||
|
||||
impl_item_duplicate::enforce_impl_items_are_distinct(ccx,
|
||||
impl_item_refs);
|
||||
}
|
||||
hir::ItemTrait(..) => {
|
||||
let def_id = ccx.tcx.map.local_def_id(it.id);
|
||||
|
@ -8,11 +8,24 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! This pass enforces various "well-formedness constraints" on impls.
|
||||
//! Logically, it is part of wfcheck -- but we do it early so that we
|
||||
//! can stop compilation afterwards, since part of the trait matching
|
||||
//! infrastructure gets very grumpy if these conditions don't hold. In
|
||||
//! particular, if there are type parameters that are not part of the
|
||||
//! impl, then coherence will report strange inference ambiguity
|
||||
//! errors; if impls have duplicate items, we get misleading
|
||||
//! specialization errors. These things can (and probably should) be
|
||||
//! fixed, but for the moment it's easier to do these checks early.
|
||||
|
||||
use constrained_type_params as ctp;
|
||||
use rustc::dep_graph::DepNode;
|
||||
use rustc::hir;
|
||||
use rustc::hir::itemlikevisit::ItemLikeVisitor;
|
||||
use rustc::hir::def_id::DefId;
|
||||
use rustc::ty;
|
||||
use rustc::util::nodemap::FxHashSet;
|
||||
use rustc::util::nodemap::{FxHashMap, FxHashSet};
|
||||
use std::collections::hash_map::Entry::{Occupied, Vacant};
|
||||
|
||||
use syntax_pos::Span;
|
||||
|
||||
@ -48,22 +61,52 @@
|
||||
/// impl<'a> Trait<Foo> for Bar { type X = &'a i32; }
|
||||
/// ^ 'a is unused and appears in assoc type, error
|
||||
/// ```
|
||||
pub fn enforce_impl_params_are_constrained<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
|
||||
impl_hir_generics: &hir::Generics,
|
||||
impl_def_id: DefId,
|
||||
impl_item_refs: &[hir::ImplItemRef])
|
||||
pub fn impl_wf_check<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>) {
|
||||
// We will tag this as part of the WF check -- logically, it is,
|
||||
// but it's one that we must perform earlier than the rest of
|
||||
// WfCheck.
|
||||
ccx.tcx.visit_all_item_likes_in_krate(DepNode::WfCheck, &mut ImplWfCheck { ccx: ccx });
|
||||
}
|
||||
|
||||
struct ImplWfCheck<'a, 'tcx: 'a> {
|
||||
ccx: &'a CrateCtxt<'a, 'tcx>,
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> ItemLikeVisitor<'tcx> for ImplWfCheck<'a, 'tcx> {
|
||||
fn visit_item(&mut self, item: &'tcx hir::Item) {
|
||||
match item.node {
|
||||
hir::ItemImpl(.., ref generics, _, _, ref impl_item_refs) => {
|
||||
let impl_def_id = self.ccx.tcx.map.local_def_id(item.id);
|
||||
enforce_impl_params_are_constrained(self.ccx,
|
||||
generics,
|
||||
impl_def_id,
|
||||
impl_item_refs);
|
||||
enforce_impl_items_are_distinct(self.ccx, impl_item_refs);
|
||||
}
|
||||
_ => { }
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_impl_item(&mut self, _impl_item: &'tcx hir::ImplItem) { }
|
||||
}
|
||||
|
||||
fn enforce_impl_params_are_constrained<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
|
||||
impl_hir_generics: &hir::Generics,
|
||||
impl_def_id: DefId,
|
||||
impl_item_refs: &[hir::ImplItemRef])
|
||||
{
|
||||
// Every lifetime used in an associated type must be constrained.
|
||||
let impl_scheme = ccx.tcx.lookup_item_type(impl_def_id);
|
||||
let impl_predicates = ccx.tcx.lookup_predicates(impl_def_id);
|
||||
let impl_self_ty = ccx.tcx.item_type(impl_def_id);
|
||||
let impl_generics = ccx.tcx.item_generics(impl_def_id);
|
||||
let impl_predicates = ccx.tcx.item_predicates(impl_def_id);
|
||||
let impl_trait_ref = ccx.tcx.impl_trait_ref(impl_def_id);
|
||||
|
||||
let mut input_parameters = ctp::parameters_for_impl(impl_scheme.ty, impl_trait_ref);
|
||||
let mut input_parameters = ctp::parameters_for_impl(impl_self_ty, impl_trait_ref);
|
||||
ctp::identify_constrained_type_params(
|
||||
&impl_predicates.predicates.as_slice(), impl_trait_ref, &mut input_parameters);
|
||||
|
||||
// Disallow ANY unconstrained type parameters.
|
||||
for (ty_param, param) in impl_scheme.generics.types.iter().zip(&impl_hir_generics.ty_params) {
|
||||
for (ty_param, param) in impl_generics.types.iter().zip(&impl_hir_generics.ty_params) {
|
||||
let param_ty = ty::ParamTy::for_def(ty_param);
|
||||
if !input_parameters.contains(&ctp::Parameter::from(param_ty)) {
|
||||
report_unused_parameter(ccx, param.span, "type", ¶m_ty.to_string());
|
||||
@ -78,9 +121,9 @@ pub fn enforce_impl_params_are_constrained<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
|
||||
item.kind == ty::AssociatedKind::Type && item.has_value
|
||||
})
|
||||
.flat_map(|def_id| {
|
||||
ctp::parameters_for(&ccx.tcx.lookup_item_type(def_id).ty, true)
|
||||
ctp::parameters_for(&ccx.tcx.item_type(def_id), true)
|
||||
}).collect();
|
||||
for (ty_lifetime, lifetime) in impl_scheme.generics.regions.iter()
|
||||
for (ty_lifetime, lifetime) in impl_generics.regions.iter()
|
||||
.zip(&impl_hir_generics.lifetimes)
|
||||
{
|
||||
let param = ctp::Parameter::from(ty_lifetime.to_early_bound_region_data());
|
||||
@ -127,3 +170,34 @@ impl trait, self type, or predicates",
|
||||
.span_label(span, &format!("unconstrained {} parameter", kind))
|
||||
.emit();
|
||||
}
|
||||
|
||||
/// Enforce that we do not have two items in an impl with the same name.
|
||||
fn enforce_impl_items_are_distinct<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
|
||||
impl_item_refs: &[hir::ImplItemRef])
|
||||
{
|
||||
let tcx = ccx.tcx;
|
||||
let mut seen_type_items = FxHashMap();
|
||||
let mut seen_value_items = FxHashMap();
|
||||
for impl_item_ref in impl_item_refs {
|
||||
let impl_item = tcx.map.impl_item(impl_item_ref.id);
|
||||
let seen_items = match impl_item.node {
|
||||
hir::ImplItemKind::Type(_) => &mut seen_type_items,
|
||||
_ => &mut seen_value_items,
|
||||
};
|
||||
match seen_items.entry(impl_item.name) {
|
||||
Occupied(entry) => {
|
||||
let mut err = struct_span_err!(tcx.sess, impl_item.span, E0201,
|
||||
"duplicate definitions with name `{}`:",
|
||||
impl_item.name);
|
||||
err.span_label(*entry.get(),
|
||||
&format!("previous definition of `{}` here",
|
||||
impl_item.name));
|
||||
err.span_label(impl_item.span, &format!("duplicate definition"));
|
||||
err.emit();
|
||||
}
|
||||
Vacant(entry) => {
|
||||
entry.insert(impl_item.span);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -131,6 +131,7 @@
|
||||
mod astconv;
|
||||
pub mod collect;
|
||||
mod constrained_type_params;
|
||||
mod impl_wf_check;
|
||||
pub mod coherence;
|
||||
pub mod variance;
|
||||
|
||||
@ -334,6 +335,11 @@ pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
|
||||
time(time_passes, "variance inference", ||
|
||||
variance::infer_variance(tcx));
|
||||
|
||||
tcx.sess.track_errors(|| {
|
||||
time(time_passes, "impl wf inference", ||
|
||||
impl_wf_check::impl_wf_check(&ccx));
|
||||
})?;
|
||||
|
||||
tcx.sess.track_errors(|| {
|
||||
time(time_passes, "coherence checking", ||
|
||||
coherence::check_coherence(&ccx));
|
||||
|
@ -15,7 +15,6 @@ struct foo {
|
||||
|
||||
impl<T> Drop for foo<T> {
|
||||
//~^ ERROR wrong number of type arguments
|
||||
//~^^ ERROR the type parameter `T` is not constrained
|
||||
fn drop(&mut self) {}
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user