Merge github.com:rust-lang/rust

This commit is contained in:
Alan Somers 2016-04-06 02:22:18 +00:00
commit 1e9ffb8991
72 changed files with 1692 additions and 935 deletions

View File

@ -157,6 +157,8 @@ else ifeq ($(findstring android, $(OSTYPE_$(1))), android)
# If the test suite passes, however, without symbol prefixes then we should be
# good to go!
JEMALLOC_ARGS_$(1) := --disable-tls --with-jemalloc-prefix=je_
else ifeq ($(findstring dragonfly, $(OSTYPE_$(1))), dragonfly)
JEMALLOC_ARGS_$(1) := --with-jemalloc-prefix=je_
endif
ifdef CFG_ENABLE_DEBUG_JEMALLOC

View File

@ -86,6 +86,8 @@ fn main() {
// should be good to go!
cmd.arg("--with-jemalloc-prefix=je_");
cmd.arg("--disable-tls");
} else if target.contains("dragonfly") {
cmd.arg("--with-jemalloc-prefix=je_");
}
if cfg!(feature = "debug-jemalloc") {

View File

@ -42,22 +42,27 @@ use libc::{c_int, c_void, size_t};
extern {}
// Note that the symbols here are prefixed by default on OSX (we don't
// explicitly request it), and on Android we explicitly request it as
// unprefixing cause segfaults (mismatches in allocators).
// explicitly request it), and on Android and DragonFly we explicitly request
// it as unprefixing cause segfaults (mismatches in allocators).
extern {
#[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios"),
#[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios",
target_os = "dragonfly"),
link_name = "je_mallocx")]
fn mallocx(size: size_t, flags: c_int) -> *mut c_void;
#[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios"),
#[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios",
target_os = "dragonfly"),
link_name = "je_rallocx")]
fn rallocx(ptr: *mut c_void, size: size_t, flags: c_int) -> *mut c_void;
#[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios"),
#[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios",
target_os = "dragonfly"),
link_name = "je_xallocx")]
fn xallocx(ptr: *mut c_void, size: size_t, extra: size_t, flags: c_int) -> size_t;
#[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios"),
#[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios",
target_os = "dragonfly"),
link_name = "je_sdallocx")]
fn sdallocx(ptr: *mut c_void, size: size_t, flags: c_int);
#[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios"),
#[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios",
target_os = "dragonfly"),
link_name = "je_nallocx")]
fn nallocx(size: size_t, flags: c_int) -> size_t;
}

@ -1 +1 @@
Subproject commit fb2f0bbdab5aa79d684d1e15ccd755f8d37bc07e
Subproject commit 7265c17d1845354f979a39b4ceb3a6934025b2ab

View File

@ -37,7 +37,7 @@ use super::equate::Equate;
use super::glb::Glb;
use super::lub::Lub;
use super::sub::Sub;
use super::{InferCtxt};
use super::InferCtxt;
use super::{MiscVariable, TypeTrace};
use super::type_variable::{RelationDir, BiTo, EqTo, SubtypeOf, SupertypeOf};
@ -46,6 +46,7 @@ use ty::{self, Ty, TyCtxt};
use ty::error::TypeError;
use ty::fold::{TypeFolder, TypeFoldable};
use ty::relate::{Relate, RelateResult, TypeRelation};
use traits::PredicateObligations;
use syntax::ast;
use syntax::codemap::Span;
@ -56,6 +57,7 @@ pub struct CombineFields<'a, 'tcx: 'a> {
pub a_is_expected: bool,
pub trace: TypeTrace<'tcx>,
pub cause: Option<ty::relate::Cause>,
pub obligations: PredicateObligations<'tcx>,
}
pub fn super_combine_tys<'a,'tcx:'a,R>(infcx: &InferCtxt<'a, 'tcx>,

View File

@ -16,6 +16,7 @@ use super::type_variable::{EqTo};
use ty::{self, Ty, TyCtxt};
use ty::TyVar;
use ty::relate::{Relate, RelateResult, TypeRelation};
use traits::PredicateObligations;
/// Ensures `a` is made equal to `b`. Returns `a` on success.
pub struct Equate<'a, 'tcx: 'a> {
@ -26,6 +27,10 @@ impl<'a, 'tcx> Equate<'a, 'tcx> {
pub fn new(fields: CombineFields<'a, 'tcx>) -> Equate<'a, 'tcx> {
Equate { fields: fields }
}
pub fn obligations(self) -> PredicateObligations<'tcx> {
self.fields.obligations
}
}
impl<'a, 'tcx> TypeRelation<'a,'tcx> for Equate<'a, 'tcx> {

View File

@ -16,6 +16,7 @@ use super::Subtype;
use ty::{self, Ty, TyCtxt};
use ty::relate::{Relate, RelateResult, TypeRelation};
use traits::PredicateObligations;
/// "Greatest lower bound" (common subtype)
pub struct Glb<'a, 'tcx: 'a> {
@ -26,6 +27,10 @@ impl<'a, 'tcx> Glb<'a, 'tcx> {
pub fn new(fields: CombineFields<'a, 'tcx>) -> Glb<'a, 'tcx> {
Glb { fields: fields }
}
pub fn obligations(self) -> PredicateObligations<'tcx> {
self.fields.obligations
}
}
impl<'a, 'tcx> TypeRelation<'a, 'tcx> for Glb<'a, 'tcx> {

View File

@ -16,6 +16,7 @@ use super::Subtype;
use ty::{self, Ty, TyCtxt};
use ty::relate::{Relate, RelateResult, TypeRelation};
use traits::PredicateObligations;
/// "Least upper bound" (common supertype)
pub struct Lub<'a, 'tcx: 'a> {
@ -26,6 +27,10 @@ impl<'a, 'tcx> Lub<'a, 'tcx> {
pub fn new(fields: CombineFields<'a, 'tcx>) -> Lub<'a, 'tcx> {
Lub { fields: fields }
}
pub fn obligations(self) -> PredicateObligations<'tcx> {
self.fields.obligations
}
}
impl<'a, 'tcx> TypeRelation<'a, 'tcx> for Lub<'a, 'tcx> {

View File

@ -27,13 +27,13 @@ use middle::region::CodeExtent;
use ty::subst;
use ty::subst::Substs;
use ty::subst::Subst;
use traits::{self, ProjectionMode};
use ty::adjustment;
use ty::{TyVid, IntVid, FloatVid};
use ty::{self, Ty, TyCtxt};
use ty::error::{ExpectedFound, TypeError, UnconstrainedNumeric};
use ty::fold::{TypeFolder, TypeFoldable};
use ty::relate::{Relate, RelateResult, TypeRelation};
use traits::{self, PredicateObligations, ProjectionMode};
use rustc_data_structures::unify::{self, UnificationTable};
use std::cell::{RefCell, Ref};
use std::fmt;
@ -63,6 +63,12 @@ pub mod sub;
pub mod type_variable;
pub mod unify_key;
pub struct InferOk<'tcx, T> {
pub value: T,
pub obligations: PredicateObligations<'tcx>,
}
pub type InferResult<'tcx, T> = Result<InferOk<'tcx, T>, TypeError<'tcx>>;
pub type Bound<T> = Option<T>;
pub type UnitResult<'tcx> = RelateResult<'tcx, ()>; // "unify result"
pub type FixupResult<T> = Result<T, FixupError>; // "fixup result"
@ -391,16 +397,15 @@ pub fn mk_subty<'a, 'tcx>(cx: &InferCtxt<'a, 'tcx>,
origin: TypeOrigin,
a: Ty<'tcx>,
b: Ty<'tcx>)
-> UnitResult<'tcx>
-> InferResult<'tcx, ()>
{
debug!("mk_subty({:?} <: {:?})", a, b);
cx.sub_types(a_is_expected, origin, a, b)
}
pub fn can_mk_subty<'a, 'tcx>(cx: &InferCtxt<'a, 'tcx>,
a: Ty<'tcx>,
b: Ty<'tcx>)
-> UnitResult<'tcx> {
pub fn can_mk_subty<'a, 'tcx>(cx: &InferCtxt<'a, 'tcx>, a: Ty<'tcx>, b: Ty<'tcx>)
-> UnitResult<'tcx>
{
debug!("can_mk_subty({:?} <: {:?})", a, b);
cx.probe(|_| {
let trace = TypeTrace {
@ -412,7 +417,7 @@ pub fn can_mk_subty<'a, 'tcx>(cx: &InferCtxt<'a, 'tcx>,
}
pub fn can_mk_eqty<'a, 'tcx>(cx: &InferCtxt<'a, 'tcx>, a: Ty<'tcx>, b: Ty<'tcx>)
-> UnitResult<'tcx>
-> UnitResult<'tcx>
{
cx.can_equate(&a, &b)
}
@ -432,7 +437,7 @@ pub fn mk_eqty<'a, 'tcx>(cx: &InferCtxt<'a, 'tcx>,
origin: TypeOrigin,
a: Ty<'tcx>,
b: Ty<'tcx>)
-> UnitResult<'tcx>
-> InferResult<'tcx, ()>
{
debug!("mk_eqty({:?} <: {:?})", a, b);
cx.eq_types(a_is_expected, origin, a, b)
@ -443,7 +448,7 @@ pub fn mk_eq_trait_refs<'a, 'tcx>(cx: &InferCtxt<'a, 'tcx>,
origin: TypeOrigin,
a: ty::TraitRef<'tcx>,
b: ty::TraitRef<'tcx>)
-> UnitResult<'tcx>
-> InferResult<'tcx, ()>
{
debug!("mk_eq_trait_refs({:?} = {:?})", a, b);
cx.eq_trait_refs(a_is_expected, origin, a, b)
@ -454,7 +459,7 @@ pub fn mk_sub_poly_trait_refs<'a, 'tcx>(cx: &InferCtxt<'a, 'tcx>,
origin: TypeOrigin,
a: ty::PolyTraitRef<'tcx>,
b: ty::PolyTraitRef<'tcx>)
-> UnitResult<'tcx>
-> InferResult<'tcx, ()>
{
debug!("mk_sub_poly_trait_refs({:?} <: {:?})", a, b);
cx.sub_poly_trait_refs(a_is_expected, origin, a, b)
@ -465,7 +470,7 @@ pub fn mk_eq_impl_headers<'a, 'tcx>(cx: &InferCtxt<'a, 'tcx>,
origin: TypeOrigin,
a: &ty::ImplHeader<'tcx>,
b: &ty::ImplHeader<'tcx>)
-> UnitResult<'tcx>
-> InferResult<'tcx, ()>
{
debug!("mk_eq_impl_header({:?} = {:?})", a, b);
match (a.trait_ref, b.trait_ref) {
@ -574,6 +579,12 @@ pub fn drain_fulfillment_cx<'a,'tcx,T>(infcx: &InferCtxt<'a,'tcx>,
Ok(infcx.tcx.erase_regions(&result))
}
impl<'tcx, T> InferOk<'tcx, T> {
fn unit(self) -> InferOk<'tcx, ()> {
InferOk { value: (), obligations: self.obligations }
}
}
impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
pub fn projection_mode(&self) -> ProjectionMode {
self.projection_mode
@ -661,39 +672,51 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
}
fn combine_fields(&'a self, a_is_expected: bool, trace: TypeTrace<'tcx>)
-> CombineFields<'a, 'tcx> {
CombineFields {infcx: self,
a_is_expected: a_is_expected,
trace: trace,
cause: None}
-> CombineFields<'a, 'tcx>
{
CombineFields {
infcx: self,
a_is_expected: a_is_expected,
trace: trace,
cause: None,
obligations: PredicateObligations::new(),
}
}
pub fn equate<T>(&'a self, a_is_expected: bool, trace: TypeTrace<'tcx>, a: &T, b: &T)
-> RelateResult<'tcx, T>
-> InferResult<'tcx, T>
where T: Relate<'a, 'tcx>
{
self.combine_fields(a_is_expected, trace).equate().relate(a, b)
let mut equate = self.combine_fields(a_is_expected, trace).equate();
let result = equate.relate(a, b);
result.map(|t| InferOk { value: t, obligations: equate.obligations() })
}
pub fn sub<T>(&'a self, a_is_expected: bool, trace: TypeTrace<'tcx>, a: &T, b: &T)
-> RelateResult<'tcx, T>
-> InferResult<'tcx, T>
where T: Relate<'a, 'tcx>
{
self.combine_fields(a_is_expected, trace).sub().relate(a, b)
let mut sub = self.combine_fields(a_is_expected, trace).sub();
let result = sub.relate(a, b);
result.map(|t| InferOk { value: t, obligations: sub.obligations() })
}
pub fn lub<T>(&'a self, a_is_expected: bool, trace: TypeTrace<'tcx>, a: &T, b: &T)
-> RelateResult<'tcx, T>
-> InferResult<'tcx, T>
where T: Relate<'a, 'tcx>
{
self.combine_fields(a_is_expected, trace).lub().relate(a, b)
let mut lub = self.combine_fields(a_is_expected, trace).lub();
let result = lub.relate(a, b);
result.map(|t| InferOk { value: t, obligations: lub.obligations() })
}
pub fn glb<T>(&'a self, a_is_expected: bool, trace: TypeTrace<'tcx>, a: &T, b: &T)
-> RelateResult<'tcx, T>
-> InferResult<'tcx, T>
where T: Relate<'a, 'tcx>
{
self.combine_fields(a_is_expected, trace).glb().relate(a, b)
let mut glb = self.combine_fields(a_is_expected, trace).glb();
let result = glb.relate(a, b);
result.map(|t| InferOk { value: t, obligations: glb.obligations() })
}
fn start_snapshot(&self) -> CombinedSnapshot {
@ -829,12 +852,12 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
origin: TypeOrigin,
a: Ty<'tcx>,
b: Ty<'tcx>)
-> UnitResult<'tcx>
-> InferResult<'tcx, ()>
{
debug!("sub_types({:?} <: {:?})", a, b);
self.commit_if_ok(|_| {
let trace = TypeTrace::types(origin, a_is_expected, a, b);
self.sub(a_is_expected, trace, &a, &b).map(|_| ())
self.sub(a_is_expected, trace, &a, &b).map(|ok| ok.unit())
})
}
@ -843,11 +866,11 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
origin: TypeOrigin,
a: Ty<'tcx>,
b: Ty<'tcx>)
-> UnitResult<'tcx>
-> InferResult<'tcx, ()>
{
self.commit_if_ok(|_| {
let trace = TypeTrace::types(origin, a_is_expected, a, b);
self.equate(a_is_expected, trace, &a, &b).map(|_| ())
self.equate(a_is_expected, trace, &a, &b).map(|ok| ok.unit())
})
}
@ -856,7 +879,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
origin: TypeOrigin,
a: ty::TraitRef<'tcx>,
b: ty::TraitRef<'tcx>)
-> UnitResult<'tcx>
-> InferResult<'tcx, ()>
{
debug!("eq_trait_refs({:?} <: {:?})",
a,
@ -866,7 +889,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
origin: origin,
values: TraitRefs(expected_found(a_is_expected, a.clone(), b.clone()))
};
self.equate(a_is_expected, trace, &a, &b).map(|_| ())
self.equate(a_is_expected, trace, &a, &b).map(|ok| ok.unit())
})
}
@ -875,7 +898,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
origin: TypeOrigin,
a: ty::PolyTraitRef<'tcx>,
b: ty::PolyTraitRef<'tcx>)
-> UnitResult<'tcx>
-> InferResult<'tcx, ()>
{
debug!("sub_poly_trait_refs({:?} <: {:?})",
a,
@ -885,7 +908,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
origin: origin,
values: PolyTraitRefs(expected_found(a_is_expected, a.clone(), b.clone()))
};
self.sub(a_is_expected, trace, &a, &b).map(|_| ())
self.sub(a_is_expected, trace, &a, &b).map(|ok| ok.unit())
})
}
@ -928,20 +951,22 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
pub fn equality_predicate(&self,
span: Span,
predicate: &ty::PolyEquatePredicate<'tcx>)
-> UnitResult<'tcx> {
-> InferResult<'tcx, ()>
{
self.commit_if_ok(|snapshot| {
let (ty::EquatePredicate(a, b), skol_map) =
self.skolemize_late_bound_regions(predicate, snapshot);
let origin = TypeOrigin::EquatePredicate(span);
let () = mk_eqty(self, false, origin, a, b)?;
self.leak_check(&skol_map, snapshot)
let eqty_ok = mk_eqty(self, false, origin, a, b)?;
self.leak_check(&skol_map, snapshot).map(|_| eqty_ok.unit())
})
}
pub fn region_outlives_predicate(&self,
span: Span,
predicate: &ty::PolyRegionOutlivesPredicate)
-> UnitResult<'tcx> {
-> UnitResult<'tcx>
{
self.commit_if_ok(|snapshot| {
let (ty::OutlivesPredicate(r_a, r_b), skol_map) =
self.skolemize_late_bound_regions(predicate, snapshot);

View File

@ -16,6 +16,7 @@ use super::type_variable::{SubtypeOf, SupertypeOf};
use ty::{self, Ty, TyCtxt};
use ty::TyVar;
use ty::relate::{Cause, Relate, RelateResult, TypeRelation};
use traits::PredicateObligations;
use std::mem;
/// Ensures `a` is made a subtype of `b`. Returns `a` on success.
@ -27,6 +28,10 @@ impl<'a, 'tcx> Sub<'a, 'tcx> {
pub fn new(f: CombineFields<'a, 'tcx>) -> Sub<'a, 'tcx> {
Sub { fields: f }
}
pub fn obligations(self) -> PredicateObligations<'tcx> {
self.fields.obligations
}
}
impl<'a, 'tcx> TypeRelation<'a, 'tcx> for Sub<'a, 'tcx> {

View File

@ -179,6 +179,12 @@ declare_lint! {
"lints that have been renamed or removed"
}
declare_lint! {
pub SUPER_OR_SELF_IN_GLOBAL_PATH,
Warn,
"detects super or self keywords at the beginning of global path"
}
/// Does nothing as a lint pass, but registers some `Lint`s
/// which are used by other parts of the compiler.
#[derive(Copy, Clone)]
@ -213,7 +219,8 @@ impl LintPass for HardwiredLints {
RAW_POINTER_DERIVE,
TRANSMUTE_FROM_FN_ITEM_TYPES,
OVERLAPPING_INHERENT_IMPLS,
RENAMED_AND_REMOVED_LINTS
RENAMED_AND_REMOVED_LINTS,
SUPER_OR_SELF_IN_GLOBAL_PATH
)
}
}

View File

@ -1502,7 +1502,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
} else {
let ends_with_stmt = match body.expr {
None if !body.stmts.is_empty() =>
match body.stmts.first().unwrap().node {
match body.stmts.last().unwrap().node {
hir::StmtSemi(ref e, _) => {
self.ir.tcx.expr_ty(&e) == t_ret
},
@ -1515,7 +1515,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
E0269,
"not all control paths return a value");
if ends_with_stmt {
let last_stmt = body.stmts.first().unwrap();
let last_stmt = body.stmts.last().unwrap();
let original_span = original_sp(self.ir.tcx.sess.codemap(),
last_stmt.span, sp);
let span_semicolon = Span {

View File

@ -9,7 +9,7 @@
// except according to those terms.
use dep_graph::DepGraph;
use infer::InferCtxt;
use infer::{InferCtxt, InferOk};
use ty::{self, Ty, TyCtxt, TypeFoldable, ToPolyTraitRef};
use rustc_data_structures::obligation_forest::{Backtrace, ObligationForest, Error};
use std::iter;
@ -320,103 +320,172 @@ impl<'tcx> FulfillmentContext<'tcx> {
fn process_predicate<'a,'tcx>(selcx: &mut SelectionContext<'a,'tcx>,
tree_cache: &mut LocalFulfilledPredicates<'tcx>,
pending_obligation: &mut PendingPredicateObligation<'tcx>,
mut backtrace: Backtrace<PendingPredicateObligation<'tcx>>,
backtrace: Backtrace<PendingPredicateObligation<'tcx>>,
region_obligations: &mut NodeMap<Vec<RegionObligation<'tcx>>>)
-> Result<Option<Vec<PendingPredicateObligation<'tcx>>>,
FulfillmentErrorCode<'tcx>>
{
match process_predicate1(selcx, pending_obligation, backtrace.clone(), region_obligations) {
Ok(Some(v)) => {
// FIXME(#30977) The code below is designed to detect (and
// permit) DAGs, while still ensuring that the reasoning
// is acyclic. However, it does a few things
// suboptimally. For example, it refreshes type variables
// a lot, probably more than needed, but also less than
// you might want.
//
// - more than needed: I want to be very sure we don't
// accidentally treat a cycle as a DAG, so I am
// refreshing type variables as we walk the ancestors;
// but we are going to repeat this a lot, which is
// sort of silly, and it would be nicer to refresh
// them *in place* so that later predicate processing
// can benefit from the same work;
// - less than you might want: we only add items in the cache here,
// but maybe we learn more about type variables and could add them into
// the cache later on.
let tcx = selcx.tcx();
// Compute a little FnvHashSet for the ancestors. We only
// do this the first time that we care.
let mut cache = None;
let mut is_ancestor = |predicate: &ty::Predicate<'tcx>| {
if cache.is_none() {
let mut c = FnvHashSet();
for ancestor in backtrace.by_ref() {
// Ugh. This just feels ridiculously
// inefficient. But we need to compare
// predicates without being concerned about
// the vagaries of type inference, so for now
// just ensure that they are always
// up-to-date. (I suppose we could just use a
// snapshot and check if they are unifiable?)
let resolved_predicate =
selcx.infcx().resolve_type_vars_if_possible(
&ancestor.obligation.predicate);
c.insert(resolved_predicate);
}
cache = Some(c);
}
cache.as_ref().unwrap().contains(predicate)
};
let pending_predicate_obligations: Vec<_> =
v.into_iter()
.filter_map(|obligation| {
// Probably silly, but remove any inference
// variables. This is actually crucial to the
// ancestor check below, but it's not clear that
// it makes sense to ALWAYS do it.
let obligation = selcx.infcx().resolve_type_vars_if_possible(&obligation);
// Screen out obligations that we know globally
// are true. This should really be the DAG check
// mentioned above.
if tcx.fulfilled_predicates.borrow().check_duplicate(&obligation.predicate) {
return None;
}
// Check whether this obligation appears somewhere else in the tree.
if tree_cache.is_duplicate_or_add(&obligation.predicate) {
// If the obligation appears as a parent,
// allow it, because that is a cycle.
// Otherwise though we can just ignore
// it. Note that we have to be careful around
// inference variables here -- for the
// purposes of the ancestor check, we retain
// the invariant that all type variables are
// fully refreshed.
if !is_ancestor(&obligation.predicate) {
return None;
}
}
Some(PendingPredicateObligation {
obligation: obligation,
stalled_on: vec![]
})
})
.collect();
Ok(Some(pending_predicate_obligations))
}
match process_predicate1(selcx, pending_obligation, region_obligations) {
Ok(Some(v)) => process_child_obligations(selcx,
tree_cache,
&pending_obligation.obligation,
backtrace,
v),
Ok(None) => Ok(None),
Err(e) => Err(e)
}
}
fn process_child_obligations<'a,'tcx>(
selcx: &mut SelectionContext<'a,'tcx>,
tree_cache: &mut LocalFulfilledPredicates<'tcx>,
pending_obligation: &PredicateObligation<'tcx>,
backtrace: Backtrace<PendingPredicateObligation<'tcx>>,
child_obligations: Vec<PredicateObligation<'tcx>>)
-> Result<Option<Vec<PendingPredicateObligation<'tcx>>>,
FulfillmentErrorCode<'tcx>>
{
// FIXME(#30977) The code below is designed to detect (and
// permit) DAGs, while still ensuring that the reasoning
// is acyclic. However, it does a few things
// suboptimally. For example, it refreshes type variables
// a lot, probably more than needed, but also less than
// you might want.
//
// - more than needed: I want to be very sure we don't
// accidentally treat a cycle as a DAG, so I am
// refreshing type variables as we walk the ancestors;
// but we are going to repeat this a lot, which is
// sort of silly, and it would be nicer to refresh
// them *in place* so that later predicate processing
// can benefit from the same work;
// - less than you might want: we only add items in the cache here,
// but maybe we learn more about type variables and could add them into
// the cache later on.
let tcx = selcx.tcx();
let mut ancestor_set = AncestorSet::new(&backtrace);
let pending_predicate_obligations: Vec<_> =
child_obligations
.into_iter()
.filter_map(|obligation| {
// Probably silly, but remove any inference
// variables. This is actually crucial to the ancestor
// check marked (*) below, but it's not clear that it
// makes sense to ALWAYS do it.
let obligation = selcx.infcx().resolve_type_vars_if_possible(&obligation);
// Screen out obligations that we know globally
// are true.
if tcx.fulfilled_predicates.borrow().check_duplicate(&obligation.predicate) {
return None;
}
// Check whether this obligation appears
// somewhere else in the tree. If not, we have to
// process it for sure.
if !tree_cache.is_duplicate_or_add(&obligation.predicate) {
return Some(PendingPredicateObligation {
obligation: obligation,
stalled_on: vec![]
});
}
debug!("process_child_obligations: duplicate={:?}",
obligation.predicate);
// OK, the obligation appears elsewhere in the tree.
// This is either a fatal error or else something we can
// ignore. If the obligation appears in our *ancestors*
// (rather than some more distant relative), that
// indicates a cycle. Cycles are either considered
// resolved (if this is a coinductive case) or a fatal
// error.
if let Some(index) = ancestor_set.has(selcx.infcx(), &obligation.predicate) {
// ~~~ (*) see above
debug!("process_child_obligations: cycle index = {}", index);
let backtrace = backtrace.clone();
let cycle: Vec<_> =
iter::once(&obligation)
.chain(Some(pending_obligation))
.chain(backtrace.take(index + 1).map(|p| &p.obligation))
.cloned()
.collect();
if coinductive_match(selcx, &cycle) {
debug!("process_child_obligations: coinductive match");
None
} else {
report_overflow_error_cycle(selcx.infcx(), &cycle);
}
} else {
// Not a cycle. Just ignore this obligation then,
// we're already in the process of proving it.
debug!("process_child_obligations: not a cycle");
None
}
})
.collect();
Ok(Some(pending_predicate_obligations))
}
struct AncestorSet<'b, 'tcx: 'b> {
populated: bool,
cache: FnvHashMap<ty::Predicate<'tcx>, usize>,
backtrace: Backtrace<'b, PendingPredicateObligation<'tcx>>,
}
impl<'b, 'tcx> AncestorSet<'b, 'tcx> {
fn new(backtrace: &Backtrace<'b, PendingPredicateObligation<'tcx>>) -> Self {
AncestorSet {
populated: false,
cache: FnvHashMap(),
backtrace: backtrace.clone(),
}
}
/// Checks whether any of the ancestors in the backtrace are equal
/// to `predicate` (`predicate` is assumed to be fully
/// type-resolved). Returns `None` if not; otherwise, returns
/// `Some` with the index within the backtrace.
fn has<'a>(&mut self,
infcx: &InferCtxt<'a, 'tcx>,
predicate: &ty::Predicate<'tcx>)
-> Option<usize> {
// the first time, we have to populate the cache
if !self.populated {
let backtrace = self.backtrace.clone();
for (index, ancestor) in backtrace.enumerate() {
// Ugh. This just feels ridiculously
// inefficient. But we need to compare
// predicates without being concerned about
// the vagaries of type inference, so for now
// just ensure that they are always
// up-to-date. (I suppose we could just use a
// snapshot and check if they are unifiable?)
let resolved_predicate =
infcx.resolve_type_vars_if_possible(
&ancestor.obligation.predicate);
// Though we try to avoid it, it can happen that a
// cycle already exists in the predecessors. This
// happens if the type variables were not fully known
// at the time that the ancestors were pushed. We'll
// just ignore such cycles for now, on the premise
// that they will repeat themselves and we'll deal
// with them properly then.
self.cache.entry(resolved_predicate)
.or_insert(index);
}
self.populated = true;
}
self.cache.get(predicate).cloned()
}
}
/// Return the set of type variables contained in a trait ref
fn trait_ref_type_vars<'a, 'tcx>(selcx: &mut SelectionContext<'a, 'tcx>,
@ -438,7 +507,6 @@ fn trait_ref_type_vars<'a, 'tcx>(selcx: &mut SelectionContext<'a, 'tcx>,
/// - `Err` if the predicate does not hold
fn process_predicate1<'a,'tcx>(selcx: &mut SelectionContext<'a,'tcx>,
pending_obligation: &mut PendingPredicateObligation<'tcx>,
backtrace: Backtrace<PendingPredicateObligation<'tcx>>,
region_obligations: &mut NodeMap<Vec<RegionObligation<'tcx>>>)
-> Result<Option<Vec<PredicateObligation<'tcx>>>,
FulfillmentErrorCode<'tcx>>
@ -461,16 +529,6 @@ fn process_predicate1<'a,'tcx>(selcx: &mut SelectionContext<'a,'tcx>,
let obligation = &mut pending_obligation.obligation;
// If we exceed the recursion limit, take a moment to look for a
// cycle so we can give a better error report from here, where we
// have more context.
let recursion_limit = selcx.tcx().sess.recursion_limit.get();
if obligation.recursion_depth >= recursion_limit {
if let Some(cycle) = scan_for_cycle(obligation, &backtrace) {
report_overflow_error_cycle(selcx.infcx(), &cycle);
}
}
if obligation.predicate.has_infer_types() {
obligation.predicate = selcx.infcx().resolve_type_vars_if_possible(&obligation.predicate);
}
@ -481,10 +539,6 @@ fn process_predicate1<'a,'tcx>(selcx: &mut SelectionContext<'a,'tcx>,
return Ok(Some(vec![]));
}
if coinductive_match(selcx, obligation, data, &backtrace) {
return Ok(Some(vec![]));
}
let trait_obligation = obligation.with(data.clone());
match selcx.select(&trait_obligation) {
Ok(Some(vtable)) => {
@ -526,7 +580,11 @@ fn process_predicate1<'a,'tcx>(selcx: &mut SelectionContext<'a,'tcx>,
ty::Predicate::Equate(ref binder) => {
match selcx.infcx().equality_predicate(obligation.cause.span, binder) {
Ok(()) => Ok(Some(Vec::new())),
Ok(InferOk { obligations, .. }) => {
// FIXME(#32730) propagate obligations
assert!(obligations.is_empty());
Ok(Some(Vec::new()))
},
Err(_) => Err(CodeSelectionError(Unimplemented)),
}
}
@ -609,63 +667,40 @@ fn process_predicate1<'a,'tcx>(selcx: &mut SelectionContext<'a,'tcx>,
/// For defaulted traits, we use a co-inductive strategy to solve, so
/// that recursion is ok. This routine returns true if the top of the
/// stack (`top_obligation` and `top_data`):
/// stack (`cycle[0]`):
/// - is a defaulted trait, and
/// - it also appears in the backtrace at some position `X`; and,
/// - all the predicates at positions `X..` between `X` an the top are
/// also defaulted traits.
fn coinductive_match<'a,'tcx>(selcx: &mut SelectionContext<'a,'tcx>,
top_obligation: &PredicateObligation<'tcx>,
top_data: &ty::PolyTraitPredicate<'tcx>,
backtrace: &Backtrace<PendingPredicateObligation<'tcx>>)
cycle: &[PredicateObligation<'tcx>])
-> bool
{
if selcx.tcx().trait_has_default_impl(top_data.def_id()) {
debug!("coinductive_match: top_data={:?}", top_data);
for bt_obligation in backtrace.clone() {
debug!("coinductive_match: bt_obligation={:?}", bt_obligation);
let len = cycle.len();
// *Everything* in the backtrace must be a defaulted trait.
match bt_obligation.obligation.predicate {
ty::Predicate::Trait(ref data) => {
if !selcx.tcx().trait_has_default_impl(data.def_id()) {
debug!("coinductive_match: trait does not have default impl");
break;
}
}
_ => { break; }
}
assert_eq!(cycle[0].predicate, cycle[len - 1].predicate);
// And we must find a recursive match.
if bt_obligation.obligation.predicate == top_obligation.predicate {
debug!("coinductive_match: found a match in the backtrace");
return true;
}
}
}
false
cycle[0..len-1]
.iter()
.all(|bt_obligation| {
let result = coinductive_obligation(selcx, bt_obligation);
debug!("coinductive_match: bt_obligation={:?} coinductive={}",
bt_obligation, result);
result
})
}
fn scan_for_cycle<'a,'tcx>(top_obligation: &PredicateObligation<'tcx>,
backtrace: &Backtrace<PendingPredicateObligation<'tcx>>)
-> Option<Vec<PredicateObligation<'tcx>>>
{
let mut map = FnvHashMap();
let all_obligations =
|| iter::once(top_obligation)
.chain(backtrace.clone()
.map(|p| &p.obligation));
for (index, bt_obligation) in all_obligations().enumerate() {
if let Some(&start) = map.get(&bt_obligation.predicate) {
// Found a cycle starting at position `start` and running
// until the current position (`index`).
return Some(all_obligations().skip(start).take(index - start + 1).cloned().collect());
} else {
map.insert(bt_obligation.predicate.clone(), index);
fn coinductive_obligation<'a, 'tcx>(selcx: &SelectionContext<'a, 'tcx>,
obligation: &PredicateObligation<'tcx>)
-> bool {
match obligation.predicate {
ty::Predicate::Trait(ref data) => {
selcx.tcx().trait_has_default_impl(data.def_id())
}
_ => {
false
}
}
None
}
fn register_region_obligation<'tcx>(t_a: Ty<'tcx>,

View File

@ -24,7 +24,7 @@ use super::VtableImplData;
use super::util;
use middle::def_id::DefId;
use infer::{self, TypeOrigin};
use infer::{self, InferOk, TypeOrigin};
use ty::subst::Subst;
use ty::{self, ToPredicate, ToPolyTraitRef, Ty, TyCtxt};
use ty::fold::{TypeFoldable, TypeFolder};
@ -232,7 +232,11 @@ fn project_and_unify_type<'cx,'tcx>(
let infcx = selcx.infcx();
let origin = TypeOrigin::RelateOutputImplTypes(obligation.cause.span);
match infer::mk_eqty(infcx, true, origin, normalized_ty, obligation.predicate.ty) {
Ok(()) => Ok(Some(obligations)),
Ok(InferOk { obligations: inferred_obligations, .. }) => {
// FIXME(#32730) propagate obligations
assert!(inferred_obligations.is_empty());
Ok(Some(obligations))
},
Err(err) => Err(MismatchedProjectionTypes { err: err }),
}
}
@ -278,7 +282,10 @@ fn consider_unification_despite_ambiguity<'cx,'tcx>(selcx: &mut SelectionContext
let origin = TypeOrigin::RelateOutputImplTypes(obligation.cause.span);
let obligation_ty = obligation.predicate.ty;
match infer::mk_eqty(infcx, true, origin, obligation_ty, ret_type) {
Ok(()) => { }
Ok(InferOk { obligations, .. }) => {
// FIXME(#32730) propagate obligations
assert!(obligations.is_empty());
}
Err(_) => { /* ignore errors */ }
}
}
@ -829,7 +836,10 @@ fn assemble_candidates_from_predicates<'cx,'tcx,I>(
infcx.sub_poly_trait_refs(false,
origin,
data_poly_trait_ref,
obligation_poly_trait_ref).is_ok()
obligation_poly_trait_ref)
// FIXME(#32730) propagate obligations
.map(|InferOk { obligations, .. }| assert!(obligations.is_empty()))
.is_ok()
});
debug!("assemble_candidates_from_predicates: candidate={:?} \
@ -1082,7 +1092,10 @@ fn confirm_param_env_candidate<'cx,'tcx>(
origin,
obligation.predicate.trait_ref.clone(),
projection.projection_ty.trait_ref.clone()) {
Ok(()) => { }
Ok(InferOk { obligations, .. }) => {
// FIXME(#32730) propagate obligations
assert!(obligations.is_empty());
}
Err(e) => {
span_bug!(
obligation.cause.span,

View File

@ -38,7 +38,7 @@ use super::util;
use middle::def_id::DefId;
use infer;
use infer::{InferCtxt, TypeFreshener, TypeOrigin};
use infer::{InferCtxt, InferOk, TypeFreshener, TypeOrigin};
use ty::subst::{Subst, Substs, TypeSpace};
use ty::{self, ToPredicate, ToPolyTraitRef, Ty, TyCtxt, TypeFoldable};
use traits;
@ -484,7 +484,11 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
ty::Predicate::Equate(ref p) => {
// does this code ever run?
match self.infcx.equality_predicate(obligation.cause.span, p) {
Ok(()) => EvaluatedToOk,
Ok(InferOk { obligations, .. }) => {
// FIXME(#32730) propagate obligations
assert!(obligations.is_empty());
EvaluatedToOk
},
Err(_) => EvaluatedToErr
}
}
@ -1185,7 +1189,10 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
origin,
trait_bound.clone(),
ty::Binder(skol_trait_ref.clone())) {
Ok(()) => { }
Ok(InferOk { obligations, .. }) => {
// FIXME(#32730) propagate obligations
assert!(obligations.is_empty());
}
Err(_) => { return false; }
}
@ -2487,13 +2494,13 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
let origin = TypeOrigin::RelateOutputImplTypes(obligation_cause.span);
let obligation_trait_ref = obligation_trait_ref.clone();
match self.infcx.sub_poly_trait_refs(false,
origin,
expected_trait_ref.clone(),
obligation_trait_ref.clone()) {
Ok(()) => Ok(()),
Err(e) => Err(OutputTypeParameterMismatch(expected_trait_ref, obligation_trait_ref, e))
}
self.infcx.sub_poly_trait_refs(false,
origin,
expected_trait_ref.clone(),
obligation_trait_ref.clone())
// FIXME(#32730) propagate obligations
.map(|InferOk { obligations, .. }| assert!(obligations.is_empty()))
.map_err(|e| OutputTypeParameterMismatch(expected_trait_ref, obligation_trait_ref, e))
}
fn confirm_builtin_unsize_candidate(&mut self,
@ -2524,9 +2531,11 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
let new_trait = tcx.mk_trait(data_a.principal.clone(), bounds);
let origin = TypeOrigin::Misc(obligation.cause.span);
if self.infcx.sub_types(false, origin, new_trait, target).is_err() {
return Err(Unimplemented);
}
let InferOk { obligations, .. } =
self.infcx.sub_types(false, origin, new_trait, target)
.map_err(|_| Unimplemented)?;
// FIXME(#32730) propagate obligations
assert!(obligations.is_empty());
// Register one obligation for 'a: 'b.
let cause = ObligationCause::new(obligation.cause.span,
@ -2589,9 +2598,11 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
// [T; n] -> [T].
(&ty::TyArray(a, _), &ty::TySlice(b)) => {
let origin = TypeOrigin::Misc(obligation.cause.span);
if self.infcx.sub_types(false, origin, a, b).is_err() {
return Err(Unimplemented);
}
let InferOk { obligations, .. } =
self.infcx.sub_types(false, origin, a, b)
.map_err(|_| Unimplemented)?;
// FIXME(#32730) propagate obligations
assert!(obligations.is_empty());
}
// Struct<T> -> Struct<U>.
@ -2647,9 +2658,11 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
}
let new_struct = tcx.mk_struct(def, tcx.mk_substs(new_substs));
let origin = TypeOrigin::Misc(obligation.cause.span);
if self.infcx.sub_types(false, origin, new_struct, target).is_err() {
return Err(Unimplemented);
}
let InferOk { obligations, .. } =
self.infcx.sub_types(false, origin, new_struct, target)
.map_err(|_| Unimplemented)?;
// FIXME(#32730) propagate obligations
assert!(obligations.is_empty());
// Construct the nested Field<T>: Unsize<Field<U>> predicate.
nested.push(util::predicate_for_trait_def(tcx,
@ -2734,13 +2747,17 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
skol_obligation_trait_ref);
let origin = TypeOrigin::RelateOutputImplTypes(obligation.cause.span);
if let Err(e) = self.infcx.eq_trait_refs(false,
origin,
impl_trait_ref.value.clone(),
skol_obligation_trait_ref) {
debug!("match_impl: failed eq_trait_refs due to `{}`", e);
return Err(());
}
let InferOk { obligations, .. } =
self.infcx.eq_trait_refs(false,
origin,
impl_trait_ref.value.clone(),
skol_obligation_trait_ref)
.map_err(|e| {
debug!("match_impl: failed eq_trait_refs due to `{}`", e);
()
})?;
// FIXME(#32730) propagate obligations
assert!(obligations.is_empty());
if let Err(e) = self.infcx.leak_check(&skol_map, snapshot) {
debug!("match_impl: failed leak check due to `{}`", e);
@ -2803,13 +2820,13 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
poly_trait_ref);
let origin = TypeOrigin::RelateOutputImplTypes(obligation.cause.span);
match self.infcx.sub_poly_trait_refs(false,
origin,
poly_trait_ref,
obligation.predicate.to_poly_trait_ref()) {
Ok(()) => Ok(()),
Err(_) => Err(()),
}
self.infcx.sub_poly_trait_refs(false,
origin,
poly_trait_ref,
obligation.predicate.to_poly_trait_ref())
// FIXME(#32730) propagate obligations
.map(|InferOk { obligations, .. }| assert!(obligations.is_empty()))
.map_err(|_| ())
}
///////////////////////////////////////////////////////////////////////////

View File

@ -116,21 +116,9 @@ pub trait TypeFoldable<'tcx>: fmt::Debug + Clone {
pub trait TypeFolder<'tcx> : Sized {
fn tcx<'a>(&'a self) -> &'a TyCtxt<'tcx>;
/// Invoked by the `super_*` routines when we enter a region
/// binding level (for example, when entering a function
/// signature). This is used by clients that want to track the
/// Debruijn index nesting level.
fn enter_region_binder(&mut self) { }
/// Invoked by the `super_*` routines when we exit a region
/// binding level. This is used by clients that want to
/// track the Debruijn index nesting level.
fn exit_region_binder(&mut self) { }
fn fold_binder<T>(&mut self, t: &Binder<T>) -> Binder<T>
where T : TypeFoldable<'tcx>
{
// FIXME(#20526) this should replace `enter_region_binder`/`exit_region_binder`.
t.super_fold_with(self)
}
@ -197,8 +185,9 @@ pub trait TypeFolder<'tcx> : Sized {
}
pub trait TypeVisitor<'tcx> : Sized {
fn enter_region_binder(&mut self) { }
fn exit_region_binder(&mut self) { }
fn visit_binder<T: TypeFoldable<'tcx>>(&mut self, t: &Binder<T>) -> bool {
t.super_visit_with(self)
}
fn visit_ty(&mut self, t: Ty<'tcx>) -> bool {
t.super_visit_with(self)
@ -296,12 +285,11 @@ impl<'a, 'tcx> TypeFolder<'tcx> for RegionFolder<'a, 'tcx>
{
fn tcx(&self) -> &TyCtxt<'tcx> { self.tcx }
fn enter_region_binder(&mut self) {
fn fold_binder<T: TypeFoldable<'tcx>>(&mut self, t: &ty::Binder<T>) -> ty::Binder<T> {
self.current_depth += 1;
}
fn exit_region_binder(&mut self) {
let t = t.super_fold_with(self);
self.current_depth -= 1;
t
}
fn fold_region(&mut self, r: ty::Region) -> ty::Region {
@ -438,12 +426,11 @@ impl<'a, 'tcx> TypeFolder<'tcx> for RegionReplacer<'a, 'tcx>
{
fn tcx(&self) -> &TyCtxt<'tcx> { self.tcx }
fn enter_region_binder(&mut self) {
fn fold_binder<T: TypeFoldable<'tcx>>(&mut self, t: &ty::Binder<T>) -> ty::Binder<T> {
self.current_depth += 1;
}
fn exit_region_binder(&mut self) {
let t = t.super_fold_with(self);
self.current_depth -= 1;
t
}
fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
@ -596,12 +583,11 @@ struct HasEscapingRegionsVisitor {
}
impl<'tcx> TypeVisitor<'tcx> for HasEscapingRegionsVisitor {
fn enter_region_binder(&mut self) {
fn visit_binder<T: TypeFoldable<'tcx>>(&mut self, t: &Binder<T>) -> bool {
self.depth += 1;
}
fn exit_region_binder(&mut self) {
let result = t.super_visit_with(self);
self.depth -= 1;
result
}
fn visit_ty(&mut self, t: Ty<'tcx>) -> bool {

View File

@ -190,10 +190,7 @@ impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for Vec<T> {
impl<'tcx, T:TypeFoldable<'tcx>> TypeFoldable<'tcx> for ty::Binder<T> {
fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
folder.enter_region_binder();
let result = ty::Binder(self.0.fold_with(folder));
folder.exit_region_binder();
result
ty::Binder(self.0.fold_with(folder))
}
fn fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
@ -201,10 +198,11 @@ impl<'tcx, T:TypeFoldable<'tcx>> TypeFoldable<'tcx> for ty::Binder<T> {
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
visitor.enter_region_binder();
if self.0.visit_with(visitor) { return true }
visitor.exit_region_binder();
false
self.0.visit_with(visitor)
}
fn visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
visitor.visit_binder(self)
}
}
@ -220,39 +218,11 @@ impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for P<[T]> {
impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for VecPerParamSpace<T> {
fn super_fold_with<F: TypeFolder<'tcx>>(&self, folder: &mut F) -> Self {
// Things in the Fn space take place under an additional level
// of region binding relative to the other spaces. This is
// because those entries are attached to a method, and methods
// always introduce a level of region binding.
let result = self.map_enumerated(|(space, index, elem)| {
if space == subst::FnSpace && index == 0 {
// enter new level when/if we reach the first thing in fn space
folder.enter_region_binder();
}
elem.fold_with(folder)
});
if result.len(subst::FnSpace) > 0 {
// if there was anything in fn space, exit the region binding level
folder.exit_region_binder();
}
result
self.map(|elem| elem.fold_with(folder))
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
let mut entered_region_binder = false;
let result = self.iter_enumerated().any(|(space, index, t)| {
if space == subst::FnSpace && index == 0 {
visitor.enter_region_binder();
entered_region_binder = true;
}
t.visit_with(visitor)
});
if entered_region_binder {
visitor.exit_region_binder();
}
result
self.iter().any(|elem| elem.visit_with(visitor))
}
}

View File

@ -582,12 +582,11 @@ struct SubstFolder<'a, 'tcx: 'a> {
impl<'a, 'tcx> TypeFolder<'tcx> for SubstFolder<'a, 'tcx> {
fn tcx(&self) -> &TyCtxt<'tcx> { self.tcx }
fn enter_region_binder(&mut self) {
fn fold_binder<T: TypeFoldable<'tcx>>(&mut self, t: &ty::Binder<T>) -> ty::Binder<T> {
self.region_binders_passed += 1;
}
fn exit_region_binder(&mut self) {
let t = t.super_fold_with(self);
self.region_binders_passed -= 1;
t
}
fn fold_region(&mut self, r: ty::Region) -> ty::Region {

View File

@ -343,10 +343,15 @@ pub fn eval_const_expr(tcx: &TyCtxt, e: &Expr) -> ConstVal {
match eval_const_expr_partial(tcx, e, ExprTypeChecked, None) {
Ok(r) => r,
// non-const path still needs to be a fatal error, because enums are funky
Err(ref s) if s.kind == NonConstPath => tcx.sess.span_fatal(s.span, &s.description()),
Err(s) => {
tcx.sess.span_err(s.span, &s.description());
Dummy
match s.kind {
NonConstPath |
UnimplementedConstVal(_) => tcx.sess.span_fatal(s.span, &s.description()),
_ => {
tcx.sess.span_err(s.span, &s.description());
Dummy
}
}
},
}
}
@ -607,6 +612,7 @@ pub fn eval_const_expr_partial<'tcx>(tcx: &TyCtxt<'tcx>,
const_val => signal!(e, NotOn(const_val)),
}
}
hir::ExprUnary(hir::UnDeref, _) => signal!(e, UnimplementedConstVal("deref operation")),
hir::ExprBinary(op, ref a, ref b) => {
let b_ty = match op.node {
hir::BiShl | hir::BiShr => ty_hint.erase_hint(),
@ -745,7 +751,7 @@ pub fn eval_const_expr_partial<'tcx>(tcx: &TyCtxt<'tcx>,
if let Some(const_expr) = lookup_variant_by_id(tcx, enum_def, variant_def) {
eval_const_expr_partial(tcx, const_expr, ty_hint, None)?
} else {
signal!(e, NonConstPath);
signal!(e, UnimplementedConstVal("enum variants"));
}
}
Def::Struct(..) => {
@ -768,6 +774,7 @@ pub fn eval_const_expr_partial<'tcx>(tcx: &TyCtxt<'tcx>,
let callee_val = eval_const_expr_partial(tcx, callee, sub_ty_hint, fn_args)?;
let did = match callee_val {
Function(did) => did,
Struct(_) => signal!(e, UnimplementedConstVal("tuple struct constructors")),
callee => signal!(e, CallOn(callee)),
};
let (decl, result) = if let Some(fn_like) = lookup_const_fn_by_id(tcx, did) {
@ -798,7 +805,7 @@ pub fn eval_const_expr_partial<'tcx>(tcx: &TyCtxt<'tcx>,
hir::ExprBlock(ref block) => {
match block.expr {
Some(ref expr) => eval_const_expr_partial(tcx, &expr, ty_hint, fn_args)?,
None => bug!(),
None => signal!(e, UnimplementedConstVal("empty block")),
}
}
hir::ExprType(ref e, _) => eval_const_expr_partial(tcx, &e, ty_hint, fn_args)?,
@ -840,7 +847,8 @@ pub fn eval_const_expr_partial<'tcx>(tcx: &TyCtxt<'tcx>,
},
Str(ref s) if idx as usize >= s.len() => signal!(e, IndexOutOfBounds),
Str(_) => bug!("unimplemented"), // FIXME: return a const char
// FIXME: return a const char
Str(_) => signal!(e, UnimplementedConstVal("indexing into str")),
_ => signal!(e, IndexedNonVec),
}
}
@ -894,6 +902,7 @@ pub fn eval_const_expr_partial<'tcx>(tcx: &TyCtxt<'tcx>,
signal!(base, ExpectedConstStruct);
}
}
hir::ExprAddrOf(..) => signal!(e, UnimplementedConstVal("address operator")),
_ => signal!(e, MiscCatchAll)
};
@ -1073,6 +1082,7 @@ fn cast_const_int<'tcx>(tcx: &TyCtxt<'tcx>, val: ConstInt, ty: ty::Ty) -> CastRe
Ok(Float(val as f64))
},
ty::TyFloat(ast::FloatTy::F32) => Ok(Float(val.to_u64().unwrap() as f32 as f64)),
ty::TyRawPtr(_) => Err(ErrKind::UnimplementedConstVal("casting an address to a raw ptr")),
_ => Err(CannotCast),
}
}
@ -1094,6 +1104,7 @@ fn cast_const<'tcx>(tcx: &TyCtxt<'tcx>, val: ConstVal, ty: ty::Ty) -> CastResult
Bool(b) => cast_const_int(tcx, Infer(b as u64), ty),
Float(f) => cast_const_float(tcx, f, ty),
Char(c) => cast_const_int(tcx, Infer(c as u64), ty),
Function(_) => Err(UnimplementedConstVal("casting fn pointers")),
_ => Err(CannotCast),
}
}

View File

@ -24,8 +24,8 @@ use rustc::ty::subst;
use rustc::ty::subst::Subst;
use rustc::traits::ProjectionMode;
use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
use rustc::ty::relate::{TypeRelation, RelateResult};
use rustc::infer::{self, TypeOrigin};
use rustc::ty::relate::TypeRelation;
use rustc::infer::{self, InferOk, InferResult, TypeOrigin};
use rustc_metadata::cstore::CStore;
use rustc::front::map as hir_map;
use rustc::session::{self, config};
@ -355,17 +355,17 @@ impl<'a, 'tcx> Env<'a, 'tcx> {
infer::TypeTrace::dummy(self.tcx())
}
pub fn sub(&self, t1: &Ty<'tcx>, t2: &Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> {
pub fn sub(&self, t1: &Ty<'tcx>, t2: &Ty<'tcx>) -> InferResult<'tcx, Ty<'tcx>> {
let trace = self.dummy_type_trace();
self.infcx.sub(true, trace, t1, t2)
}
pub fn lub(&self, t1: &Ty<'tcx>, t2: &Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> {
pub fn lub(&self, t1: &Ty<'tcx>, t2: &Ty<'tcx>) -> InferResult<'tcx, Ty<'tcx>> {
let trace = self.dummy_type_trace();
self.infcx.lub(true, trace, t1, t2)
}
pub fn glb(&self, t1: &Ty<'tcx>, t2: &Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> {
pub fn glb(&self, t1: &Ty<'tcx>, t2: &Ty<'tcx>) -> InferResult<'tcx, Ty<'tcx>> {
let trace = self.dummy_type_trace();
self.infcx.glb(true, trace, t1, t2)
}
@ -374,7 +374,10 @@ impl<'a, 'tcx> Env<'a, 'tcx> {
/// region checks).
pub fn check_sub(&self, t1: Ty<'tcx>, t2: Ty<'tcx>) {
match self.sub(&t1, &t2) {
Ok(_) => {}
Ok(InferOk { obligations, .. }) => {
// FIXME(#32730) once obligations are being propagated, assert the right thing.
assert!(obligations.is_empty());
}
Err(ref e) => {
panic!("unexpected error computing sub({:?},{:?}): {}", t1, t2, e);
}
@ -395,7 +398,10 @@ impl<'a, 'tcx> Env<'a, 'tcx> {
/// Checks that `LUB(t1,t2) == t_lub`
pub fn check_lub(&self, t1: Ty<'tcx>, t2: Ty<'tcx>, t_lub: Ty<'tcx>) {
match self.lub(&t1, &t2) {
Ok(t) => {
Ok(InferOk { obligations, value: t }) => {
// FIXME(#32730) once obligations are being propagated, assert the right thing.
assert!(obligations.is_empty());
self.assert_eq(t, t_lub);
}
Err(ref e) => {
@ -411,7 +417,10 @@ impl<'a, 'tcx> Env<'a, 'tcx> {
Err(e) => {
panic!("unexpected error computing LUB: {:?}", e)
}
Ok(t) => {
Ok(InferOk { obligations, value: t }) => {
// FIXME(#32730) once obligations are being propagated, assert the right thing.
assert!(obligations.is_empty());
self.assert_eq(t, t_glb);
// sanity check for good measure:

View File

@ -167,6 +167,10 @@ pub fn register_builtins(store: &mut lint::LintStore, sess: Option<&Session>) {
id: LintId::of(INVALID_TYPE_PARAM_DEFAULT),
reference: "PR 30742 <https://github.com/rust-lang/rust/pull/30724>",
},
FutureIncompatibleInfo {
id: LintId::of(SUPER_OR_SELF_IN_GLOBAL_PATH),
reference: "PR #32403 <https://github.com/rust-lang/rust/pull/32403>",
},
FutureIncompatibleInfo {
id: LintId::of(MATCH_OF_UNIT_VARIANT_VIA_PAREN_DOTDOT),
reference: "RFC 218 <https://github.com/rust-lang/rfcs/blob/\

View File

@ -12,7 +12,7 @@
#![allow(unreachable_code)]
use rustc::dep_graph::DepNode;
use rustc::infer::{self, InferCtxt};
use rustc::infer::{self, InferCtxt, InferOk};
use rustc::traits::{self, ProjectionMode};
use rustc::ty::fold::TypeFoldable;
use rustc::ty::{self, Ty, TyCtxt};
@ -338,6 +338,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
{
infer::mk_subty(self.infcx, false, infer::TypeOrigin::Misc(span),
sup, sub)
// FIXME(#32730) propagate obligations
.map(|InferOk { obligations, .. }| assert!(obligations.is_empty()))
}
fn mk_eqty(&self, span: Span, a: Ty<'tcx>, b: Ty<'tcx>)
@ -345,6 +347,8 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
{
infer::mk_eqty(self.infcx, false, infer::TypeOrigin::Misc(span),
a, b)
// FIXME(#32730) propagate obligations
.map(|InferOk { obligations, .. }| assert!(obligations.is_empty()))
}
fn tcx(&self) -> &'a TyCtxt<'tcx> {

View File

@ -28,7 +28,7 @@ use rustc::dep_graph::DepNode;
use rustc::ty::cast::{CastKind};
use rustc_const_eval::{ConstEvalErr, lookup_const_fn_by_id, compare_lit_exprs};
use rustc_const_eval::{eval_const_expr_partial, lookup_const_by_id};
use rustc_const_eval::ErrKind::IndexOpFeatureGated;
use rustc_const_eval::ErrKind::{IndexOpFeatureGated, UnimplementedConstVal};
use rustc_const_eval::EvalHint::ExprTypeChecked;
use rustc::middle::def::Def;
use rustc::middle::def_id::DefId;
@ -110,6 +110,16 @@ impl<'a, 'tcx> CheckCrateVisitor<'a, 'tcx> {
entry.insert(ConstQualif::empty());
}
}
if let Err(err) = eval_const_expr_partial(self.tcx, expr, ExprTypeChecked, None) {
match err.kind {
UnimplementedConstVal(_) => {},
IndexOpFeatureGated => {},
_ => self.tcx.sess.add_lint(CONST_ERR, expr.id, expr.span,
format!("constant evaluation error: {}. This will \
become a HARD ERROR in the future",
err.description())),
}
}
self.with_mode(mode, |this| {
this.with_euv(None, |euv| euv.consume_expr(expr));
this.visit_expr(expr);
@ -435,6 +445,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for CheckCrateVisitor<'a, 'tcx> {
match eval_const_expr_partial(
self.tcx, ex, ExprTypeChecked, None) {
Ok(_) => {}
Err(ConstEvalErr { kind: UnimplementedConstVal(_), ..}) |
Err(ConstEvalErr { kind: IndexOpFeatureGated, ..}) => {},
Err(msg) => {
self.tcx.sess.add_lint(CONST_ERR, ex.id,

View File

@ -14,8 +14,7 @@
//! any imports resolved.
use DefModifiers;
use resolve_imports::ImportDirective;
use resolve_imports::ImportDirectiveSubclass::{self, SingleImport, GlobImport};
use resolve_imports::ImportDirectiveSubclass::{self, GlobImport};
use Module;
use Namespace::{self, TypeNS, ValueNS};
use {NameBinding, NameBindingKind};
@ -24,13 +23,14 @@ use Resolver;
use {resolve_error, resolve_struct_error, ResolutionError};
use rustc::middle::cstore::{CrateStore, ChildItem, DlDef};
use rustc::lint;
use rustc::middle::def::*;
use rustc::middle::def_id::{CRATE_DEF_INDEX, DefId};
use rustc::ty::VariantKind;
use syntax::ast::{Name, NodeId};
use syntax::ast::Name;
use syntax::attr::AttrMetaMethods;
use syntax::parse::token::special_idents;
use syntax::parse::token::{special_idents, SELF_KEYWORD_NAME, SUPER_KEYWORD_NAME};
use syntax::codemap::{Span, DUMMY_SP};
use rustc_front::hir;
@ -117,8 +117,10 @@ impl<'b, 'tcx:'b> Resolver<'b, 'tcx> {
// Extract and intern the module part of the path. For
// globs and lists, the path is found directly in the AST;
// for simple paths we have to munge the path a little.
let module_path = match view_path.node {
let is_global;
let module_path: Vec<Name> = match view_path.node {
ViewPathSimple(_, ref full_path) => {
is_global = full_path.global;
full_path.segments
.split_last()
.unwrap()
@ -130,6 +132,7 @@ impl<'b, 'tcx:'b> Resolver<'b, 'tcx> {
ViewPathGlob(ref module_ident_path) |
ViewPathList(ref module_ident_path, _) => {
is_global = module_ident_path.global;
module_ident_path.segments
.iter()
.map(|seg| seg.identifier.name)
@ -137,6 +140,18 @@ impl<'b, 'tcx:'b> Resolver<'b, 'tcx> {
}
};
// Checking for special identifiers in path
// prevent `self` or `super` at beginning of global path
if is_global && (module_path.first() == Some(&SELF_KEYWORD_NAME) ||
module_path.first() == Some(&SUPER_KEYWORD_NAME)) {
self.session.add_lint(
lint::builtin::SUPER_OR_SELF_IN_GLOBAL_PATH,
item.id,
item.span,
format!("expected identifier, found keyword `{}`",
module_path.first().unwrap().as_str()));
}
// Build up the import directives.
let is_prelude = item.attrs.iter().any(|attr| {
attr.name() == special_idents::prelude_import.name.as_str()
@ -152,8 +167,8 @@ impl<'b, 'tcx:'b> Resolver<'b, 'tcx> {
}
let subclass = ImportDirectiveSubclass::single(binding, source_name);
self.build_import_directive(parent,
module_path,
self.unresolved_imports += 1;
parent.add_import_directive(module_path,
subclass,
view_path.span,
item.id,
@ -203,8 +218,8 @@ impl<'b, 'tcx:'b> Resolver<'b, 'tcx> {
}
};
let subclass = ImportDirectiveSubclass::single(rename, name);
self.build_import_directive(parent,
module_path,
self.unresolved_imports += 1;
parent.add_import_directive(module_path,
subclass,
source_item.span,
source_item.node.id(),
@ -213,8 +228,8 @@ impl<'b, 'tcx:'b> Resolver<'b, 'tcx> {
}
}
ViewPathGlob(_) => {
self.build_import_directive(parent,
module_path,
self.unresolved_imports += 1;
parent.add_import_directive(module_path,
GlobImport,
view_path.span,
item.id,
@ -521,39 +536,6 @@ impl<'b, 'tcx:'b> Resolver<'b, 'tcx> {
}
}
/// Creates and adds an import directive to the given module.
fn build_import_directive(&mut self,
module_: Module<'b>,
module_path: Vec<Name>,
subclass: ImportDirectiveSubclass,
span: Span,
id: NodeId,
is_public: bool,
is_prelude: bool) {
// Bump the reference count on the name. Or, if this is a glob, set
// the appropriate flag.
match subclass {
SingleImport { target, .. } => {
module_.increment_outstanding_references_for(target, ValueNS, is_public);
module_.increment_outstanding_references_for(target, TypeNS, is_public);
}
GlobImport if !is_prelude => {
// Set the glob flag. This tells us that we don't know the
// module's exports ahead of time.
module_.inc_glob_count(is_public)
}
// Prelude imports are not included in the glob counts since they do not get added to
// `resolved_globs` -- they are handled separately in `resolve_imports`.
GlobImport => {}
}
let directive =
ImportDirective::new(module_path, subclass, span, id, is_public, is_prelude);
module_.add_import_directive(directive);
self.unresolved_imports += 1;
}
/// Ensures that the reduced graph rooted at the given external module
/// is built, building it if it is not.
pub fn populate_module_if_necessary(&mut self, module: Module<'b>) {

View File

@ -828,8 +828,8 @@ pub struct ModuleS<'a> {
// is the NodeId of the local `extern crate` item (otherwise, `extern_crate_id` is None).
extern_crate_id: Option<NodeId>,
resolutions: RefCell<HashMap<(Name, Namespace), NameResolution<'a>>>,
unresolved_imports: RefCell<Vec<&'a ImportDirective>>,
resolutions: RefCell<HashMap<(Name, Namespace), &'a RefCell<NameResolution<'a>>>>,
unresolved_imports: RefCell<Vec<&'a ImportDirective<'a>>>,
// The module children of this node, including normal modules and anonymous modules.
// Anonymous children are pseudo-modules that are implicitly created around items
@ -849,14 +849,8 @@ pub struct ModuleS<'a> {
prelude: RefCell<Option<Module<'a>>>,
glob_importers: RefCell<Vec<(Module<'a>, &'a ImportDirective)>>,
resolved_globs: RefCell<(Vec<Module<'a>> /* public */, Vec<Module<'a>> /* private */)>,
// The number of public glob imports in this module.
public_glob_count: Cell<usize>,
// The number of private glob imports in this module.
private_glob_count: Cell<usize>,
glob_importers: RefCell<Vec<(Module<'a>, &'a ImportDirective<'a>)>>,
globs: RefCell<Vec<&'a ImportDirective<'a>>>,
// Whether this module is populated. If not populated, any attempt to
// access the children must be preceded with a
@ -884,22 +878,15 @@ impl<'a> ModuleS<'a> {
module_children: RefCell::new(NodeMap()),
prelude: RefCell::new(None),
glob_importers: RefCell::new(Vec::new()),
resolved_globs: RefCell::new((Vec::new(), Vec::new())),
public_glob_count: Cell::new(0),
private_glob_count: Cell::new(0),
globs: RefCell::new((Vec::new())),
populated: Cell::new(!external),
arenas: arenas
}
}
fn add_import_directive(&self, import_directive: ImportDirective) {
let import_directive = self.arenas.alloc_import_directive(import_directive);
self.unresolved_imports.borrow_mut().push(import_directive);
}
fn for_each_child<F: FnMut(Name, Namespace, &'a NameBinding<'a>)>(&self, mut f: F) {
for (&(name, ns), name_resolution) in self.resolutions.borrow().iter() {
name_resolution.binding.map(|binding| f(name, ns, binding));
name_resolution.borrow().binding.map(|binding| f(name, ns, binding));
}
}
@ -929,11 +916,6 @@ impl<'a> ModuleS<'a> {
_ => false,
}
}
fn inc_glob_count(&self, is_public: bool) {
let glob_count = if is_public { &self.public_glob_count } else { &self.private_glob_count };
glob_count.set(glob_count.get() + 1);
}
}
impl<'a> fmt::Debug for ModuleS<'a> {
@ -1135,7 +1117,8 @@ pub struct Resolver<'a, 'tcx: 'a> {
struct ResolverArenas<'a> {
modules: arena::TypedArena<ModuleS<'a>>,
name_bindings: arena::TypedArena<NameBinding<'a>>,
import_directives: arena::TypedArena<ImportDirective>,
import_directives: arena::TypedArena<ImportDirective<'a>>,
name_resolutions: arena::TypedArena<RefCell<NameResolution<'a>>>,
}
impl<'a> ResolverArenas<'a> {
@ -1145,9 +1128,13 @@ impl<'a> ResolverArenas<'a> {
fn alloc_name_binding(&'a self, name_binding: NameBinding<'a>) -> &'a NameBinding<'a> {
self.name_bindings.alloc(name_binding)
}
fn alloc_import_directive(&'a self, import_directive: ImportDirective) -> &'a ImportDirective {
fn alloc_import_directive(&'a self, import_directive: ImportDirective<'a>)
-> &'a ImportDirective {
self.import_directives.alloc(import_directive)
}
fn alloc_name_resolution(&'a self) -> &'a RefCell<NameResolution<'a>> {
self.name_resolutions.alloc(Default::default())
}
}
#[derive(PartialEq)]
@ -1216,6 +1203,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
modules: arena::TypedArena::new(),
name_bindings: arena::TypedArena::new(),
import_directives: arena::TypedArena::new(),
name_resolutions: arena::TypedArena::new(),
}
}

View File

@ -30,7 +30,7 @@ use syntax::codemap::Span;
use syntax::util::lev_distance::find_best_match_for_name;
use std::mem::replace;
use std::cell::Cell;
use std::cell::{Cell, RefCell};
/// Contains data for specific types of import directives.
#[derive(Clone, Debug)]
@ -57,8 +57,9 @@ impl ImportDirectiveSubclass {
/// One import directive.
#[derive(Debug,Clone)]
pub struct ImportDirective {
pub struct ImportDirective<'a> {
module_path: Vec<Name>,
target_module: Cell<Option<Module<'a>>>, // the resolution of `module_path`
subclass: ImportDirectiveSubclass,
span: Span,
id: NodeId,
@ -66,29 +67,11 @@ pub struct ImportDirective {
is_prelude: bool,
}
impl ImportDirective {
pub fn new(module_path: Vec<Name>,
subclass: ImportDirectiveSubclass,
span: Span,
id: NodeId,
is_public: bool,
is_prelude: bool)
-> ImportDirective {
ImportDirective {
module_path: module_path,
subclass: subclass,
span: span,
id: id,
is_public: is_public,
is_prelude: is_prelude,
}
}
impl<'a> ImportDirective<'a> {
// Given the binding to which this directive resolves in a particular namespace,
// this returns the binding for the name this directive defines in that namespace.
fn import<'a>(&self,
binding: &'a NameBinding<'a>,
privacy_error: Option<Box<PrivacyError<'a>>>) -> NameBinding<'a> {
fn import(&self, binding: &'a NameBinding<'a>, privacy_error: Option<Box<PrivacyError<'a>>>)
-> NameBinding<'a> {
let mut modifiers = match self.is_public {
true => DefModifiers::PUBLIC | DefModifiers::IMPORTABLE,
false => DefModifiers::empty(),
@ -110,17 +93,52 @@ impl ImportDirective {
}
#[derive(Clone, Default)]
/// Records information about the resolution of a name in a module.
/// Records information about the resolution of a name in a namespace of a module.
pub struct NameResolution<'a> {
/// The number of unresolved single imports of any visibility that could define the name.
outstanding_references: u32,
/// The number of unresolved `pub` single imports that could define the name.
pub_outstanding_references: u32,
/// The single imports that define the name in the namespace.
single_imports: SingleImports<'a>,
/// The least shadowable known binding for this name, or None if there are no known bindings.
pub binding: Option<&'a NameBinding<'a>>,
duplicate_globs: Vec<&'a NameBinding<'a>>,
}
#[derive(Clone, Debug)]
enum SingleImports<'a> {
/// No single imports can define the name in the namespace.
None,
/// Only the given single import can define the name in the namespace.
MaybeOne(&'a ImportDirective<'a>),
/// At least one single import will define the name in the namespace.
AtLeastOne,
}
impl<'a> Default for SingleImports<'a> {
fn default() -> Self {
SingleImports::None
}
}
impl<'a> SingleImports<'a> {
fn add_directive(&mut self, directive: &'a ImportDirective<'a>) {
match *self {
SingleImports::None => *self = SingleImports::MaybeOne(directive),
// If two single imports can define the name in the namespace, we can assume that at
// least one of them will define it since otherwise both would have to define only one
// namespace, leading to a duplicate error.
SingleImports::MaybeOne(_) => *self = SingleImports::AtLeastOne,
SingleImports::AtLeastOne => {}
};
}
fn directive_failed(&mut self) {
match *self {
SingleImports::None => unreachable!(),
SingleImports::MaybeOne(_) => *self = SingleImports::None,
SingleImports::AtLeastOne => {}
}
}
}
impl<'a> NameResolution<'a> {
fn try_define(&mut self, binding: &'a NameBinding<'a>) -> Result<(), &'a NameBinding<'a>> {
if let Some(old_binding) = self.binding {
@ -139,40 +157,54 @@ impl<'a> NameResolution<'a> {
Ok(())
}
// Returns the binding for the name if it is known or None if it not known.
fn binding(&self) -> Option<&'a NameBinding<'a>> {
self.binding.and_then(|binding| match self.single_imports {
SingleImports::None => Some(binding),
_ if !binding.defined_with(DefModifiers::GLOB_IMPORTED) => Some(binding),
_ => None, // The binding could be shadowed by a single import, so it is not known.
})
}
// Returns Some(the resolution of the name), or None if the resolution depends
// on whether more globs can define the name.
fn try_result(&self, allow_private_imports: bool)
fn try_result(&self, ns: Namespace, allow_private_imports: bool)
-> Option<ResolveResult<&'a NameBinding<'a>>> {
match self.binding {
Some(binding) if !binding.defined_with(DefModifiers::GLOB_IMPORTED) =>
Some(Success(binding)),
// If (1) we don't allow private imports, (2) no public single import can define the
// name, and (3) no public glob has defined the name, the resolution depends on globs.
_ if !allow_private_imports && self.pub_outstanding_references == 0 &&
!self.binding.map(NameBinding::is_public).unwrap_or(false) => None,
_ if self.outstanding_references > 0 => Some(Indeterminate),
Some(binding) => Some(Success(binding)),
None => None,
}
}
fn increment_outstanding_references(&mut self, is_public: bool) {
self.outstanding_references += 1;
if is_public {
self.pub_outstanding_references += 1;
}
}
fn decrement_outstanding_references(&mut self, is_public: bool) {
let decrement_references = |count: &mut _| {
assert!(*count > 0);
*count -= 1;
return Some(Success(binding)),
_ => {} // Items and single imports are not shadowable
};
decrement_references(&mut self.outstanding_references);
if is_public {
decrement_references(&mut self.pub_outstanding_references);
// Check if a single import can still define the name.
match self.single_imports {
SingleImports::None => {},
SingleImports::AtLeastOne => return Some(Indeterminate),
SingleImports::MaybeOne(directive) => {
// If (1) we don't allow private imports, (2) no public single import can define
// the name, and (3) no public glob has defined the name, the resolution depends
// on whether more globs can define the name.
if !allow_private_imports && !directive.is_public &&
!self.binding.map(NameBinding::is_public).unwrap_or(false) {
return None;
}
let target_module = match directive.target_module.get() {
Some(target_module) => target_module,
None => return Some(Indeterminate),
};
let name = match directive.subclass {
SingleImport { source, .. } => source,
GlobImport => unreachable!(),
};
match target_module.resolve_name(name, ns, false) {
Failed(_) => {}
_ => return Some(Indeterminate),
}
}
}
self.binding.map(Success)
}
fn report_conflicts<F: FnMut(&NameBinding, &NameBinding)>(&self, mut report: F) {
@ -195,15 +227,20 @@ impl<'a> NameResolution<'a> {
}
impl<'a> ::ModuleS<'a> {
fn resolution(&self, name: Name, ns: Namespace) -> &'a RefCell<NameResolution<'a>> {
*self.resolutions.borrow_mut().entry((name, ns))
.or_insert_with(|| self.arenas.alloc_name_resolution())
}
pub fn resolve_name(&self, name: Name, ns: Namespace, allow_private_imports: bool)
-> ResolveResult<&'a NameBinding<'a>> {
let resolutions = match self.resolutions.borrow_state() {
::std::cell::BorrowState::Unused => self.resolutions.borrow(),
_ => return Failed(None), // This happens when there is a cycle of glob imports
let resolution = self.resolution(name, ns);
let resolution = match resolution.borrow_state() {
::std::cell::BorrowState::Unused => resolution.borrow_mut(),
_ => return Failed(None), // This happens when there is a cycle of imports
};
let resolution = resolutions.get(&(name, ns)).cloned().unwrap_or_default();
if let Some(result) = resolution.try_result(allow_private_imports) {
if let Some(result) = resolution.try_result(ns, allow_private_imports) {
// If the resolution doesn't depend on glob definability, check privacy and return.
return result.and_then(|binding| {
let allowed = allow_private_imports || !binding.is_import() || binding.is_public();
@ -211,29 +248,15 @@ impl<'a> ::ModuleS<'a> {
});
}
let (ref mut public_globs, ref mut private_globs) = *self.resolved_globs.borrow_mut();
// Check if the public globs are determined
if public_globs.len() < self.public_glob_count.get() {
return Indeterminate;
}
for module in public_globs.iter() {
if let Indeterminate = module.resolve_name(name, ns, false) {
return Indeterminate;
}
}
if !allow_private_imports {
return Failed(None);
}
// Check if the private globs are determined
if private_globs.len() < self.private_glob_count.get() {
return Indeterminate;
}
for module in private_globs.iter() {
if let Indeterminate = module.resolve_name(name, ns, false) {
return Indeterminate;
// Check if the globs are determined
for directive in self.globs.borrow().iter() {
if !allow_private_imports && !directive.is_public { continue }
match directive.target_module.get() {
None => return Indeterminate,
Some(target_module) => match target_module.resolve_name(name, ns, false) {
Indeterminate => return Indeterminate,
_ => {}
}
}
}
@ -243,7 +266,7 @@ impl<'a> ::ModuleS<'a> {
// Invariant: this may not be called until import resolution is complete.
pub fn resolve_name_in_lexical_scope(&self, name: Name, ns: Namespace)
-> Option<&'a NameBinding<'a>> {
self.resolutions.borrow().get(&(name, ns)).and_then(|resolution| resolution.binding)
self.resolution(name, ns).borrow().binding
.or_else(|| self.prelude.borrow().and_then(|prelude| {
prelude.resolve_name(name, ns, false).success()
}))
@ -258,9 +281,36 @@ impl<'a> ::ModuleS<'a> {
})
}
pub fn increment_outstanding_references_for(&self, name: Name, ns: Namespace, is_public: bool) {
self.resolutions.borrow_mut().entry((name, ns)).or_insert_with(Default::default)
.increment_outstanding_references(is_public);
pub fn add_import_directive(&self,
module_path: Vec<Name>,
subclass: ImportDirectiveSubclass,
span: Span,
id: NodeId,
is_public: bool,
is_prelude: bool) {
let directive = self.arenas.alloc_import_directive(ImportDirective {
module_path: module_path,
target_module: Cell::new(None),
subclass: subclass,
span: span,
id: id,
is_public: is_public,
is_prelude: is_prelude,
});
self.unresolved_imports.borrow_mut().push(directive);
match directive.subclass {
SingleImport { target, .. } => {
for &ns in &[ValueNS, TypeNS] {
self.resolution(target, ns).borrow_mut().single_imports
.add_directive(directive);
}
}
// We don't add prelude imports to the globs since they only affect lexical scopes,
// which are not relevant to import resolution.
GlobImport if directive.is_prelude => {}
GlobImport => self.globs.borrow_mut().push(directive),
}
}
// Use `update` to mutate the resolution for the name.
@ -268,13 +318,12 @@ impl<'a> ::ModuleS<'a> {
fn update_resolution<T, F>(&self, name: Name, ns: Namespace, update: F) -> T
where F: FnOnce(&mut NameResolution<'a>) -> T
{
let mut resolutions = self.resolutions.borrow_mut();
let resolution = resolutions.entry((name, ns)).or_insert_with(Default::default);
let was_success = resolution.try_result(false).and_then(ResolveResult::success).is_some();
let mut resolution = &mut *self.resolution(name, ns).borrow_mut();
let was_known = resolution.binding().is_some();
let t = update(resolution);
if !was_success {
if let Some(Success(binding)) = resolution.try_result(false) {
if !was_known {
if let Some(binding) = resolution.binding() {
self.define_in_glob_importers(name, ns, binding);
}
}
@ -292,7 +341,7 @@ impl<'a> ::ModuleS<'a> {
struct ImportResolvingError<'a> {
/// Module where the error happened
source_module: Module<'a>,
import_directive: &'a ImportDirective,
import_directive: &'a ImportDirective<'a>,
span: Span,
help: String,
}
@ -424,19 +473,23 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> {
/// don't know whether the name exists at the moment due to other
/// currently-unresolved imports, or success if we know the name exists.
/// If successful, the resolved bindings are written into the module.
fn resolve_import(&mut self, directive: &'b ImportDirective) -> ResolveResult<()> {
fn resolve_import(&mut self, directive: &'b ImportDirective<'b>) -> ResolveResult<()> {
debug!("(resolving import for module) resolving import `{}::...` in `{}`",
names_to_string(&directive.module_path),
module_to_string(self.resolver.current_module));
let target_module = match self.resolver.resolve_module_path(&directive.module_path,
DontUseLexicalScope,
directive.span) {
Success(module) => module,
Indeterminate => return Indeterminate,
Failed(err) => return Failed(err),
let target_module = match directive.target_module.get() {
Some(module) => module,
_ => match self.resolver.resolve_module_path(&directive.module_path,
DontUseLexicalScope,
directive.span) {
Success(module) => module,
Indeterminate => return Indeterminate,
Failed(err) => return Failed(err),
},
};
directive.target_module.set(Some(target_module));
let (source, target, value_determined, type_determined) = match directive.subclass {
SingleImport { source, target, ref value_determined, ref type_determined } =>
(source, target, value_determined, type_determined),
@ -444,26 +497,12 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> {
};
// We need to resolve both namespaces for this to succeed.
let module_ = self.resolver.current_module;
let (value_result, type_result) = {
let mut resolve_in_ns = |ns, determined: bool| {
// Temporarily count the directive as determined so that the resolution fails
// (as opposed to being indeterminate) when it can only be defined by the directive.
if !determined {
module_.resolutions.borrow_mut().get_mut(&(target, ns)).unwrap()
.decrement_outstanding_references(directive.is_public);
}
let result =
self.resolver.resolve_name_in_module(target_module, source, ns, false, true);
if !determined {
module_.increment_outstanding_references_for(target, ns, directive.is_public)
}
result
};
(resolve_in_ns(ValueNS, value_determined.get()),
resolve_in_ns(TypeNS, type_determined.get()))
};
let value_result =
self.resolver.resolve_name_in_module(target_module, source, ValueNS, false, true);
let type_result =
self.resolver.resolve_name_in_module(target_module, source, TypeNS, false, true);
let module_ = self.resolver.current_module;
for &(ns, result, determined) in &[(ValueNS, &value_result, value_determined),
(TypeNS, &type_result, type_determined)] {
if determined.get() { continue }
@ -488,18 +527,24 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> {
let binding = &directive.import(binding, None);
self.resolver.report_conflict(module_, target, ns, binding, old_binding);
}
} else {
module_.update_resolution(target, ns, |resolution| {
resolution.single_imports.directive_failed();
});
}
module_.update_resolution(target, ns, |resolution| {
resolution.decrement_outstanding_references(directive.is_public);
})
}
match (&value_result, &type_result) {
(&Indeterminate, _) | (_, &Indeterminate) => return Indeterminate,
(&Failed(_), &Failed(_)) => {
let children = target_module.resolutions.borrow();
let names = children.keys().map(|&(ref name, _)| name);
let resolutions = target_module.resolutions.borrow();
let names = resolutions.iter().filter_map(|(&(ref name, _), resolution)| {
match *resolution.borrow() {
NameResolution { binding: Some(_), .. } => Some(name),
NameResolution { single_imports: SingleImports::None, .. } => None,
_ => Some(name),
}
});
let lev_suggestion = match find_best_match_for_name(names, &source.as_str(), None) {
Some(name) => format!(". Did you mean to use `{}`?", name),
None => "".to_owned(),
@ -579,7 +624,7 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> {
// succeeds or bails out (as importing * from an empty module or a module
// that exports nothing is valid). target_module is the module we are
// actually importing, i.e., `foo` in `use foo::*`.
fn resolve_glob_import(&mut self, target_module: Module<'b>, directive: &'b ImportDirective)
fn resolve_glob_import(&mut self, target_module: Module<'b>, directive: &'b ImportDirective<'b>)
-> ResolveResult<()> {
if let Some(Def::Trait(_)) = target_module.def {
self.resolver.session.span_err(directive.span, "items in traits are not importable.");
@ -598,15 +643,11 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> {
return Success(());
}
// Add to target_module's glob_importers and module_'s resolved_globs
// Add to target_module's glob_importers
target_module.glob_importers.borrow_mut().push((module_, directive));
match *module_.resolved_globs.borrow_mut() {
(ref mut public_globs, _) if directive.is_public => public_globs.push(target_module),
(_, ref mut private_globs) => private_globs.push(target_module),
}
for (&(name, ns), resolution) in target_module.resolutions.borrow().iter() {
if let Some(Success(binding)) = resolution.try_result(false) {
if let Some(binding) = resolution.borrow().binding() {
if binding.defined_with(DefModifiers::IMPORTABLE | DefModifiers::PUBLIC) {
let _ = module_.try_define_child(name, ns, directive.import(binding, None));
}
@ -630,11 +671,11 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> {
// reporting conflicts, reporting the PRIVATE_IN_PUBLIC lint, and reporting unresolved imports.
fn finalize_resolutions(&mut self, module: Module<'b>, report_unresolved_imports: bool) {
// Since import resolution is finished, globs will not define any more names.
module.public_glob_count.set(0); module.private_glob_count.set(0);
*module.resolved_globs.borrow_mut() = (Vec::new(), Vec::new());
*module.globs.borrow_mut() = Vec::new();
let mut reexports = Vec::new();
for (&(name, ns), resolution) in module.resolutions.borrow().iter() {
let resolution = resolution.borrow();
resolution.report_conflicts(|b1, b2| {
self.resolver.report_conflict(module, name, ns, b1, b2)
});

View File

@ -541,14 +541,6 @@ fn get_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
}
};
let llfn = declare::declare_fn(ccx, &sym, ty);
attributes::from_fn_attrs(ccx, attrs, llfn);
if let Some(id) = local_item {
// FIXME(eddyb) Doubt all extern fn should allow unwinding.
attributes::unwind(llfn, true);
ccx.item_symbols().borrow_mut().insert(id, sym);
}
// This is subtle and surprising, but sometimes we have to bitcast
// the resulting fn pointer. The reason has to do with external
// functions. If you have two crates that both bind the same C
@ -572,12 +564,32 @@ fn get_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
// This can occur on either a crate-local or crate-external
// reference. It also occurs when testing libcore and in some
// other weird situations. Annoying.
let llptrty = type_of::type_of(ccx, fn_ptr_ty);
let llfn = if common::val_ty(llfn) != llptrty {
debug!("get_fn: casting {:?} to {:?}", llfn, llptrty);
consts::ptrcast(llfn, llptrty)
let llfn = if let Some(llfn) = declare::get_declared_value(ccx, &sym) {
if common::val_ty(llfn) != llptrty {
if local_item.is_some() {
bug!("symbol `{}` previously declared as {:?}, now wanted as {:?}",
sym, Value(llfn), llptrty);
}
debug!("get_fn: casting {:?} to {:?}", llfn, llptrty);
consts::ptrcast(llfn, llptrty)
} else {
debug!("get_fn: not casting pointer!");
llfn
}
} else {
let llfn = declare::declare_fn(ccx, &sym, ty);
assert_eq!(common::val_ty(llfn), llptrty);
debug!("get_fn: not casting pointer!");
attributes::from_fn_attrs(ccx, attrs, llfn);
if let Some(id) = local_item {
// FIXME(eddyb) Doubt all extern fn should allow unwinding.
attributes::unwind(llfn, true);
ccx.item_symbols().borrow_mut().insert(id, sym);
}
llfn
};

View File

@ -26,6 +26,7 @@ use abi::{Abi, FnType};
use attributes;
use context::CrateContext;
use type_::Type;
use value::Value;
use std::ffi::CString;
@ -146,27 +147,33 @@ pub fn define_internal_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
}
/// Get defined or externally defined (AvailableExternally linkage) value by
/// name.
pub fn get_defined_value(ccx: &CrateContext, name: &str) -> Option<ValueRef> {
debug!("get_defined_value(name={:?})", name);
/// Get declared value by name.
pub fn get_declared_value(ccx: &CrateContext, name: &str) -> Option<ValueRef> {
debug!("get_declared_value(name={:?})", name);
let namebuf = CString::new(name).unwrap_or_else(|_|{
bug!("name {:?} contains an interior null byte", name)
});
let val = unsafe { llvm::LLVMGetNamedValue(ccx.llmod(), namebuf.as_ptr()) };
if val.is_null() {
debug!("get_defined_value: {:?} value is null", name);
debug!("get_declared_value: {:?} value is null", name);
None
} else {
debug!("get_declared_value: {:?} => {:?}", name, Value(val));
Some(val)
}
}
/// Get defined or externally defined (AvailableExternally linkage) value by
/// name.
pub fn get_defined_value(ccx: &CrateContext, name: &str) -> Option<ValueRef> {
get_declared_value(ccx, name).and_then(|val|{
let declaration = unsafe {
llvm::LLVMIsDeclaration(val) != 0
};
debug!("get_defined_value: found {:?} value (declaration: {})",
name, declaration);
if !declaration {
Some(val)
} else {
None
}
}
})
}

View File

@ -752,33 +752,47 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
match split[1] {
"cxchg" | "cxchgweak" => {
let cmp = from_immediate(bcx, llargs[1]);
let src = from_immediate(bcx, llargs[2]);
let ptr = PointerCast(bcx, llargs[0], val_ty(src).ptr_to());
let weak = if split[1] == "cxchgweak" { llvm::True } else { llvm::False };
let val = AtomicCmpXchg(bcx, ptr, cmp, src, order, failorder, weak);
let result = ExtractValue(bcx, val, 0);
let success = ZExt(bcx, ExtractValue(bcx, val, 1), Type::bool(bcx.ccx()));
Store(bcx,
result,
PointerCast(bcx, StructGEP(bcx, llresult, 0), val_ty(src).ptr_to()));
Store(bcx, success, StructGEP(bcx, llresult, 1));
let sty = &substs.types.get(FnSpace, 0).sty;
if int_type_width_signed(sty, ccx).is_some() {
let weak = if split[1] == "cxchgweak" { llvm::True } else { llvm::False };
let val = AtomicCmpXchg(bcx, llargs[0], llargs[1], llargs[2],
order, failorder, weak);
let result = ExtractValue(bcx, val, 0);
let success = ZExt(bcx, ExtractValue(bcx, val, 1), Type::bool(bcx.ccx()));
Store(bcx, result, StructGEP(bcx, llresult, 0));
Store(bcx, success, StructGEP(bcx, llresult, 1));
} else {
span_invalid_monomorphization_error(
tcx.sess, span,
&format!("invalid monomorphization of `{}` intrinsic: \
expected basic integer type, found `{}`", name, sty));
}
C_nil(ccx)
}
"load" => {
let tp_ty = *substs.types.get(FnSpace, 0);
let mut ptr = llargs[0];
if let Some(ty) = fn_ty.ret.cast {
ptr = PointerCast(bcx, ptr, ty.ptr_to());
let sty = &substs.types.get(FnSpace, 0).sty;
if int_type_width_signed(sty, ccx).is_some() {
AtomicLoad(bcx, llargs[0], order)
} else {
span_invalid_monomorphization_error(
tcx.sess, span,
&format!("invalid monomorphization of `{}` intrinsic: \
expected basic integer type, found `{}`", name, sty));
C_nil(ccx)
}
to_immediate(bcx, AtomicLoad(bcx, ptr, order), tp_ty)
}
"store" => {
let val = from_immediate(bcx, llargs[1]);
let ptr = PointerCast(bcx, llargs[0], val_ty(val).ptr_to());
AtomicStore(bcx, val, ptr, order);
let sty = &substs.types.get(FnSpace, 0).sty;
if int_type_width_signed(sty, ccx).is_some() {
AtomicStore(bcx, llargs[1], llargs[0], order);
} else {
span_invalid_monomorphization_error(
tcx.sess, span,
&format!("invalid monomorphization of `{}` intrinsic: \
expected basic integer type, found `{}`", name, sty));
}
C_nil(ccx)
}
@ -809,9 +823,16 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
_ => ccx.sess().fatal("unknown atomic operation")
};
let val = from_immediate(bcx, llargs[1]);
let ptr = PointerCast(bcx, llargs[0], val_ty(val).ptr_to());
AtomicRMW(bcx, atom_op, ptr, val, order)
let sty = &substs.types.get(FnSpace, 0).sty;
if int_type_width_signed(sty, ccx).is_some() {
AtomicRMW(bcx, atom_op, llargs[0], llargs[1], order)
} else {
span_invalid_monomorphization_error(
tcx.sess, span,
&format!("invalid monomorphization of `{}` intrinsic: \
expected basic integer type, found `{}`", name, sty));
C_nil(ccx)
}
}
}

View File

@ -9,7 +9,7 @@
// except according to those terms.
use middle::def::{self, Def};
use rustc::infer::{self, TypeOrigin};
use rustc::infer::{self, InferOk, TypeOrigin};
use middle::pat_util::{PatIdMap, pat_id_map, pat_is_binding};
use middle::pat_util::pat_is_resolved_const;
use rustc::ty::subst::Substs;
@ -531,7 +531,12 @@ pub fn check_match<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
};
let result = if is_if_let_fallback {
fcx.infcx().eq_types(true, origin, arm_ty, result_ty).map(|_| arm_ty)
fcx.infcx().eq_types(true, origin, arm_ty, result_ty)
.map(|InferOk { obligations, .. }| {
// FIXME(#32730) propagate obligations
assert!(obligations.is_empty());
arm_ty
})
} else if i == 0 {
// Special-case the first arm, as it has no "previous expressions".
coercion::try(fcx, &arm.body, coerce_first)

View File

@ -62,7 +62,7 @@
use check::{autoderef, FnCtxt, UnresolvedTypeAction};
use rustc::infer::{Coercion, TypeOrigin, TypeTrace};
use rustc::infer::{Coercion, InferOk, TypeOrigin, TypeTrace};
use rustc::traits::{self, ObligationCause};
use rustc::traits::{predicate_for_trait_def, report_selection_error};
use rustc::ty::adjustment::{AutoAdjustment, AutoDerefRef, AdjustDerefRef};
@ -118,8 +118,18 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> {
let trace = TypeTrace::types(self.origin, false, a, b);
if self.use_lub {
infcx.lub(false, trace, &a, &b)
.map(|InferOk { value, obligations }| {
// FIXME(#32730) propagate obligations
assert!(obligations.is_empty());
value
})
} else {
infcx.sub(false, trace, &a, &b)
.map(|InferOk { value, obligations }| {
// FIXME(#32730) propagate obligations
assert!(obligations.is_empty());
value
})
}
})
}
@ -655,12 +665,22 @@ pub fn try_find_lub<'a, 'b, 'tcx, E, I>(fcx: &FnCtxt<'a, 'tcx>,
(&ty::TyFnDef(a_def_id, a_substs, a_fty),
&ty::TyFnDef(b_def_id, b_substs, b_fty)) => {
// The signature must always match.
let fty = fcx.infcx().lub(true, trace.clone(), a_fty, b_fty)?;
let fty = fcx.infcx().lub(true, trace.clone(), a_fty, b_fty)
.map(|InferOk { value, obligations }| {
// FIXME(#32730) propagate obligations
assert!(obligations.is_empty());
value
})?;
if a_def_id == b_def_id {
// Same function, maybe the parameters match.
let substs = fcx.infcx().commit_if_ok(|_| {
fcx.infcx().lub(true, trace.clone(), a_substs, b_substs)
.map(|InferOk { value, obligations }| {
// FIXME(#32730) propagate obligations
assert!(obligations.is_empty());
value
})
}).map(|s| fcx.tcx().mk_substs(s));
if let Ok(substs) = substs {
@ -724,6 +744,11 @@ pub fn try_find_lub<'a, 'b, 'tcx, E, I>(fcx: &FnCtxt<'a, 'tcx>,
if !noop {
return fcx.infcx().commit_if_ok(|_| {
fcx.infcx().lub(true, trace.clone(), &prev_ty, &new_ty)
.map(|InferOk { value, obligations }| {
// FIXME(#32730) propagate obligations
assert!(obligations.is_empty());
value
})
});
}
}
@ -736,6 +761,11 @@ pub fn try_find_lub<'a, 'b, 'tcx, E, I>(fcx: &FnCtxt<'a, 'tcx>,
} else {
fcx.infcx().commit_if_ok(|_| {
fcx.infcx().lub(true, trace, &prev_ty, &new_ty)
.map(|InferOk { value, obligations }| {
// FIXME(#32730) propagate obligations
assert!(obligations.is_empty());
value
})
})
}
}

View File

@ -9,7 +9,7 @@
// except according to those terms.
use middle::free_region::FreeRegionMap;
use rustc::infer::{self, TypeOrigin};
use rustc::infer::{self, InferOk, TypeOrigin};
use rustc::ty::{self, TyCtxt};
use rustc::traits::{self, ProjectionMode};
use rustc::ty::subst::{self, Subst, Substs, VecPerParamSpace};
@ -475,7 +475,10 @@ pub fn compare_const_impl<'tcx>(tcx: &TyCtxt<'tcx>,
});
match err {
Ok(()) => { }
Ok(InferOk { obligations, .. }) => {
// FIXME(#32730) propagate obligations
assert!(obligations.is_empty())
}
Err(terr) => {
debug!("checking associated const for compatibility: impl ty {:?}, trait ty {:?}",
impl_ty,

View File

@ -11,7 +11,7 @@
use check::{coercion, FnCtxt};
use rustc::ty::Ty;
use rustc::infer::TypeOrigin;
use rustc::infer::{InferOk, TypeOrigin};
use syntax::codemap::Span;
use rustc_front::hir;
@ -21,16 +21,28 @@ use rustc_front::hir;
pub fn suptype<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, sp: Span,
expected: Ty<'tcx>, actual: Ty<'tcx>) {
let origin = TypeOrigin::Misc(sp);
if let Err(e) = fcx.infcx().sub_types(false, origin, actual, expected) {
fcx.infcx().report_mismatched_types(origin, expected, actual, e);
match fcx.infcx().sub_types(false, origin, actual, expected) {
Ok(InferOk { obligations, .. }) => {
// FIXME(#32730) propagate obligations
assert!(obligations.is_empty());
},
Err(e) => {
fcx.infcx().report_mismatched_types(origin, expected, actual, e);
}
}
}
pub fn eqtype<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, sp: Span,
expected: Ty<'tcx>, actual: Ty<'tcx>) {
let origin = TypeOrigin::Misc(sp);
if let Err(e) = fcx.infcx().eq_types(false, origin, actual, expected) {
fcx.infcx().report_mismatched_types(origin, expected, actual, e);
match fcx.infcx().eq_types(false, origin, actual, expected) {
Ok(InferOk { obligations, .. }) => {
// FIXME(#32730) propagate obligations
assert!(obligations.is_empty());
},
Err(e) => {
fcx.infcx().report_mismatched_types(origin, expected, actual, e);
}
}
}

View File

@ -21,8 +21,7 @@ use rustc::ty::subst;
use rustc::ty::subst::Subst;
use rustc::traits;
use rustc::ty::{self, NoPreference, Ty, TyCtxt, ToPolyTraitRef, TraitRef, TypeFoldable};
use rustc::infer;
use rustc::infer::{InferCtxt, TypeOrigin};
use rustc::infer::{self, InferCtxt, InferOk, TypeOrigin};
use syntax::ast;
use syntax::codemap::{Span, DUMMY_SP};
use rustc_front::hir;
@ -1151,6 +1150,8 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> {
fn make_sub_ty(&self, sub: Ty<'tcx>, sup: Ty<'tcx>) -> infer::UnitResult<'tcx> {
self.infcx().sub_types(false, TypeOrigin::Misc(DUMMY_SP), sub, sup)
// FIXME(#32730) propagate obligations
.map(|InferOk { obligations, .. }| assert!(obligations.is_empty()))
}
fn has_applicable_self(&self, item: &ty::ImplOrTraitItem) -> bool {

View File

@ -14,7 +14,7 @@
use CrateCtxt;
use astconv::AstConv;
use check::{self, FnCtxt};
use check::{self, FnCtxt, UnresolvedTypeAction, autoderef};
use front::map as hir_map;
use rustc::ty::{self, Ty, ToPolyTraitRef, ToPredicate, TypeFoldable};
use middle::cstore::{self, CrateStore};
@ -22,6 +22,7 @@ use middle::def::Def;
use middle::def_id::DefId;
use middle::lang_items::FnOnceTraitLangItem;
use rustc::ty::subst::Substs;
use rustc::ty::LvaluePreference;
use rustc::traits::{Obligation, SelectionContext};
use util::nodemap::{FnvHashSet};
@ -50,23 +51,37 @@ fn is_fn_ty<'a, 'tcx>(ty: &Ty<'tcx>, fcx: &FnCtxt<'a, 'tcx>, span: Span) -> bool
if let Ok(fn_once_trait_did) =
cx.lang_items.require(FnOnceTraitLangItem) {
let infcx = fcx.infcx();
infcx.probe(|_| {
let fn_once_substs =
Substs::new_trait(vec![infcx.next_ty_var()],
Vec::new(),
ty);
let trait_ref =
ty::TraitRef::new(fn_once_trait_did,
cx.mk_substs(fn_once_substs));
let poly_trait_ref = trait_ref.to_poly_trait_ref();
let obligation = Obligation::misc(span,
fcx.body_id,
poly_trait_ref
.to_predicate());
let mut selcx = SelectionContext::new(infcx);
let (_, _, opt_is_fn) = autoderef(fcx,
span,
ty,
|| None,
UnresolvedTypeAction::Ignore,
LvaluePreference::NoPreference,
|ty, _| {
infcx.probe(|_| {
let fn_once_substs =
Substs::new_trait(vec![infcx.next_ty_var()],
Vec::new(),
ty);
let trait_ref =
ty::TraitRef::new(fn_once_trait_did,
cx.mk_substs(fn_once_substs));
let poly_trait_ref = trait_ref.to_poly_trait_ref();
let obligation = Obligation::misc(span,
fcx.body_id,
poly_trait_ref
.to_predicate());
let mut selcx = SelectionContext::new(infcx);
return selcx.evaluate_obligation(&obligation)
})
if selcx.evaluate_obligation(&obligation) {
Some(())
} else {
None
}
})
});
opt_is_fn.is_some()
} else {
false
}

View File

@ -88,8 +88,7 @@ use middle::astconv_util::prohibit_type_params;
use middle::cstore::LOCAL_CRATE;
use middle::def::{self, Def};
use middle::def_id::DefId;
use rustc::infer;
use rustc::infer::{TypeOrigin, TypeTrace, type_variable};
use rustc::infer::{self, InferOk, TypeOrigin, TypeTrace, type_variable};
use middle::pat_util::{self, pat_id_map};
use rustc::ty::subst::{self, Subst, Substs, VecPerParamSpace, ParamSpace};
use rustc::traits::{self, report_fulfillment_errors, ProjectionMode};
@ -1627,6 +1626,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
sup: Ty<'tcx>)
-> Result<(), TypeError<'tcx>> {
infer::mk_subty(self.infcx(), a_is_expected, origin, sub, sup)
// FIXME(#32730) propagate obligations
.map(|InferOk { obligations, .. }| assert!(obligations.is_empty()))
}
pub fn mk_eqty(&self,
@ -1636,6 +1637,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
sup: Ty<'tcx>)
-> Result<(), TypeError<'tcx>> {
infer::mk_eqty(self.infcx(), a_is_expected, origin, sub, sup)
// FIXME(#32730) propagate obligations
.map(|InferOk { obligations, .. }| assert!(obligations.is_empty()))
}
pub fn mk_subr(&self,
@ -1914,7 +1917,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
match infer::mk_eqty(self.infcx(), false,
TypeOrigin::Misc(default.origin_span),
ty, default.ty) {
Ok(()) => {}
Ok(InferOk { obligations, .. }) => {
// FIXME(#32730) propagate obligations
assert!(obligations.is_empty())
},
Err(_) => {
conflicts.push((*ty, default));
}
@ -2007,7 +2013,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
match infer::mk_eqty(self.infcx(), false,
TypeOrigin::Misc(default.origin_span),
ty, default.ty) {
Ok(()) => {}
// FIXME(#32730) propagate obligations
Ok(InferOk { obligations, .. }) => assert!(obligations.is_empty()),
Err(_) => {
result = Some(default);
}
@ -2773,8 +2780,10 @@ fn expected_types_for_fn_args<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
let ures = fcx.infcx().sub_types(false, origin, formal_ret_ty, ret_ty);
// FIXME(#15760) can't use try! here, FromError doesn't default
// to identity so the resulting type is not constrained.
if let Err(e) = ures {
return Err(e);
match ures {
// FIXME(#32730) propagate obligations
Ok(InferOk { obligations, .. }) => assert!(obligations.is_empty()),
Err(e) => return Err(e),
}
// Record all the argument types, with the substitutions
@ -2902,13 +2911,23 @@ fn check_expr_with_expectation_and_lvalue_pref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
fcx.infcx().commit_if_ok(|_| {
let trace = TypeTrace::types(origin, true, then_ty, else_ty);
fcx.infcx().lub(true, trace, &then_ty, &else_ty)
.map(|InferOk { value, obligations }| {
// FIXME(#32730) propagate obligations
assert!(obligations.is_empty());
value
})
})
};
(origin, then_ty, else_ty, result)
} else {
let origin = TypeOrigin::IfExpressionWithNoElse(sp);
(origin, unit, then_ty,
fcx.infcx().eq_types(true, origin, unit, then_ty).map(|_| unit))
fcx.infcx().eq_types(true, origin, unit, then_ty)
.map(|InferOk { obligations, .. }| {
// FIXME(#32730) propagate obligations
assert!(obligations.is_empty());
unit
}))
};
let if_ty = match result {

View File

@ -92,7 +92,7 @@ use middle::region::{self, CodeExtent};
use rustc::ty::subst::Substs;
use rustc::traits;
use rustc::ty::{self, Ty, TyCtxt, MethodCall, TypeFoldable};
use rustc::infer::{self, GenericKind, InferCtxt, SubregionOrigin, TypeOrigin, VerifyBound};
use rustc::infer::{self, GenericKind, InferCtxt, InferOk, SubregionOrigin, TypeOrigin, VerifyBound};
use middle::pat_util;
use rustc::ty::adjustment;
use rustc::ty::wf::ImpliedBound;
@ -1841,7 +1841,11 @@ fn declared_projection_bounds_from_trait<'a,'tcx>(rcx: &Rcx<'a, 'tcx>,
// check whether this predicate applies to our current projection
match infer::mk_eqty(infcx, false, TypeOrigin::Misc(span), ty, outlives.0) {
Ok(()) => { Ok(outlives.1) }
Ok(InferOk { obligations, .. }) => {
// FIXME(#32730) propagate obligations
assert!(obligations.is_empty());
Ok(outlives.1)
}
Err(_) => { Err(()) }
}
});

View File

@ -14,7 +14,6 @@
pub use self::Type::*;
pub use self::PrimitiveType::*;
pub use self::TypeKind::*;
pub use self::StructField::*;
pub use self::VariantKind::*;
pub use self::Mutability::*;
pub use self::Import::*;
@ -53,6 +52,7 @@ use std::env::current_dir;
use core::DocContext;
use doctree;
use visit_ast;
use html::item_type::ItemType;
/// A stable identifier to the particular version of JSON output.
/// Increment this when the `Crate` and related structures change.
@ -273,36 +273,49 @@ impl Item {
}
pub fn is_crate(&self) -> bool {
match self.inner {
ModuleItem(Module { items: _, is_crate: true }) => true,
_ => false
StrippedItem(box ModuleItem(Module { is_crate: true, ..})) |
ModuleItem(Module { is_crate: true, ..}) => true,
_ => false,
}
}
pub fn is_mod(&self) -> bool {
match self.inner { ModuleItem(..) => true, _ => false }
ItemType::from_item(self) == ItemType::Module
}
pub fn is_trait(&self) -> bool {
match self.inner { TraitItem(..) => true, _ => false }
ItemType::from_item(self) == ItemType::Trait
}
pub fn is_struct(&self) -> bool {
match self.inner { StructItem(..) => true, _ => false }
ItemType::from_item(self) == ItemType::Struct
}
pub fn is_enum(&self) -> bool {
match self.inner { EnumItem(..) => true, _ => false }
ItemType::from_item(self) == ItemType::Module
}
pub fn is_fn(&self) -> bool {
match self.inner { FunctionItem(..) => true, _ => false }
ItemType::from_item(self) == ItemType::Function
}
pub fn is_associated_type(&self) -> bool {
match self.inner { AssociatedTypeItem(..) => true, _ => false }
ItemType::from_item(self) == ItemType::AssociatedType
}
pub fn is_associated_const(&self) -> bool {
match self.inner { AssociatedConstItem(..) => true, _ => false }
ItemType::from_item(self) == ItemType::AssociatedConst
}
pub fn is_method(&self) -> bool {
match self.inner { MethodItem(..) => true, _ => false }
ItemType::from_item(self) == ItemType::Method
}
pub fn is_ty_method(&self) -> bool {
match self.inner { TyMethodItem(..) => true, _ => false }
ItemType::from_item(self) == ItemType::TyMethod
}
pub fn is_stripped(&self) -> bool {
match self.inner { StrippedItem(..) => true, _ => false }
}
pub fn has_stripped_fields(&self) -> Option<bool> {
match self.inner {
StructItem(ref _struct) => Some(_struct.fields_stripped),
VariantItem(Variant { kind: StructVariant(ref vstruct)} ) => {
Some(vstruct.fields_stripped)
},
_ => None,
}
}
pub fn stability_class(&self) -> String {
@ -341,7 +354,7 @@ pub enum ItemEnum {
TyMethodItem(TyMethod),
/// A method with a body.
MethodItem(Method),
StructFieldItem(StructField),
StructFieldItem(Type),
VariantItem(Variant),
/// `fn`s from an extern block
ForeignFunctionItem(Function),
@ -352,6 +365,8 @@ pub enum ItemEnum {
AssociatedConstItem(Type, Option<String>),
AssociatedTypeItem(Vec<TyParamBound>, Option<Type>),
DefaultImplItem(DefaultImpl),
/// An item that has been stripped by a rustdoc pass
StrippedItem(Box<ItemEnum>),
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
@ -1733,12 +1748,6 @@ impl<'tcx> Clean<Type> for ty::Ty<'tcx> {
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum StructField {
HiddenStructField, // inserted later by strip passes
TypedStructField(Type),
}
impl Clean<Item> for hir::StructField {
fn clean(&self, cx: &DocContext) -> Item {
Item {
@ -1749,7 +1758,7 @@ impl Clean<Item> for hir::StructField {
stability: get_stability(cx, cx.map.local_def_id(self.id)),
deprecation: get_deprecation(cx, cx.map.local_def_id(self.id)),
def_id: cx.map.local_def_id(self.id),
inner: StructFieldItem(TypedStructField(self.ty.clean(cx))),
inner: StructFieldItem(self.ty.clean(cx)),
}
}
}
@ -1766,7 +1775,7 @@ impl<'tcx> Clean<Item> for ty::FieldDefData<'tcx, 'static> {
stability: get_stability(cx, self.did),
deprecation: get_deprecation(cx, self.did),
def_id: self.did,
inner: StructFieldItem(TypedStructField(self.unsubst_ty().clean(cx))),
inner: StructFieldItem(self.unsubst_ty().clean(cx)),
}
}
}
@ -1897,9 +1906,7 @@ impl<'tcx> Clean<Item> for ty::VariantDefData<'tcx, 'static> {
def_id: field.did,
stability: get_stability(cx, field.did),
deprecation: get_deprecation(cx, field.did),
inner: StructFieldItem(
TypedStructField(field.unsubst_ty().clean(cx))
)
inner: StructFieldItem(field.unsubst_ty().clean(cx))
}
}).collect()
})

View File

@ -10,28 +10,50 @@
use clean::*;
pub enum FoldItem {
Retain(Item),
Strip(Item),
Erase,
}
impl FoldItem {
pub fn fold(self) -> Option<Item> {
match self {
FoldItem::Erase => None,
FoldItem::Retain(i) => Some(i),
FoldItem::Strip(item@ Item { inner: StrippedItem(..), .. } ) => Some(item),
FoldItem::Strip(mut i) => {
i.inner = StrippedItem(box i.inner);
Some(i)
}
}
}
}
pub trait DocFolder : Sized {
fn fold_item(&mut self, item: Item) -> Option<Item> {
self.fold_item_recur(item)
}
/// don't override!
fn fold_item_recur(&mut self, item: Item) -> Option<Item> {
let Item { attrs, name, source, visibility, def_id, inner, stability, deprecation } = item;
let inner = match inner {
fn fold_inner_recur(&mut self, inner: ItemEnum) -> ItemEnum {
match inner {
StrippedItem(..) => unreachable!(),
ModuleItem(i) => {
ModuleItem(self.fold_mod(i))
},
StructItem(mut i) => {
let num_fields = i.fields.len();
i.fields = i.fields.into_iter().filter_map(|x| self.fold_item(x)).collect();
i.fields_stripped |= num_fields != i.fields.len();
i.fields_stripped |= num_fields != i.fields.len() ||
i.fields.iter().any(|f| f.is_stripped());
StructItem(i)
},
ModuleItem(i) => {
ModuleItem(self.fold_mod(i))
},
EnumItem(mut i) => {
let num_variants = i.variants.len();
i.variants = i.variants.into_iter().filter_map(|x| self.fold_item(x)).collect();
i.variants_stripped |= num_variants != i.variants.len();
i.variants_stripped |= num_variants != i.variants.len() ||
i.variants.iter().any(|f| f.is_stripped());
EnumItem(i)
},
TraitItem(mut i) => {
@ -48,13 +70,24 @@ pub trait DocFolder : Sized {
StructVariant(mut j) => {
let num_fields = j.fields.len();
j.fields = j.fields.into_iter().filter_map(|x| self.fold_item(x)).collect();
j.fields_stripped |= num_fields != j.fields.len();
j.fields_stripped |= num_fields != j.fields.len() ||
j.fields.iter().any(|f| f.is_stripped());
VariantItem(Variant {kind: StructVariant(j), ..i2})
},
_ => VariantItem(i2)
}
},
x => x
}
}
/// don't override!
fn fold_item_recur(&mut self, item: Item) -> Option<Item> {
let Item { attrs, name, source, visibility, def_id, inner, stability, deprecation } = item;
let inner = match inner {
StrippedItem(box i) => StrippedItem(box self.fold_inner_recur(i)),
_ => self.fold_inner_recur(inner),
};
Some(Item { attrs: attrs, name: name, source: source, inner: inner,
@ -70,9 +103,8 @@ pub trait DocFolder : Sized {
}
fn fold_crate(&mut self, mut c: Crate) -> Crate {
c.module = c.module.and_then(|module| {
self.fold_item(module)
});
c.module = c.module.and_then(|module| self.fold_item(module));
c.external_traits = c.external_traits.into_iter().map(|(k, mut v)| {
v.items = v.items.into_iter().filter_map(|i| self.fold_item(i)).collect();
(k, v)

View File

@ -17,22 +17,36 @@ use html::escape::Escape;
use std::io;
use std::io::prelude::*;
use syntax::parse::lexer;
use syntax::parse::lexer::{self, Reader};
use syntax::parse::token;
use syntax::parse;
/// Highlights some source code, returning the HTML output.
pub fn highlight(src: &str, class: Option<&str>, id: Option<&str>) -> String {
/// Highlights `src`, returning the HTML output.
pub fn render_with_highlighting(src: &str, class: Option<&str>, id: Option<&str>) -> String {
debug!("highlighting: ================\n{}\n==============", src);
let sess = parse::ParseSess::new();
let fm = sess.codemap().new_filemap("<stdin>".to_string(), src.to_string());
let mut out = Vec::new();
doit(&sess,
lexer::StringReader::new(&sess.span_diagnostic, fm),
class,
id,
&mut out).unwrap();
write_header(class, id, &mut out).unwrap();
write_source(&sess,
lexer::StringReader::new(&sess.span_diagnostic, fm),
&mut out).unwrap();
write_footer(&mut out).unwrap();
String::from_utf8_lossy(&out[..]).into_owned()
}
/// Highlights `src`, returning the HTML output. Returns only the inner html to
/// be inserted into an element. C.f., `render_with_highlighting` which includes
/// an enclosing `<pre>` block.
pub fn render_inner_with_highlighting(src: &str) -> String {
let sess = parse::ParseSess::new();
let fm = sess.codemap().new_filemap("<stdin>".to_string(), src.to_string());
let mut out = Vec::new();
write_source(&sess,
lexer::StringReader::new(&sess.span_diagnostic, fm),
&mut out).unwrap();
String::from_utf8_lossy(&out[..]).into_owned()
}
@ -43,17 +57,10 @@ pub fn highlight(src: &str, class: Option<&str>, id: Option<&str>) -> String {
/// it's used. All source code emission is done as slices from the source map,
/// not from the tokens themselves, in order to stay true to the original
/// source.
fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader,
class: Option<&str>, id: Option<&str>,
out: &mut Write) -> io::Result<()> {
use syntax::parse::lexer::Reader;
write!(out, "<pre ")?;
match id {
Some(id) => write!(out, "id='{}' ", id)?,
None => {}
}
write!(out, "class='rust {}'>\n", class.unwrap_or(""))?;
fn write_source(sess: &parse::ParseSess,
mut lexer: lexer::StringReader,
out: &mut Write)
-> io::Result<()> {
let mut is_attribute = false;
let mut is_macro = false;
let mut is_macro_nonterminal = false;
@ -184,5 +191,21 @@ fn doit(sess: &parse::ParseSess, mut lexer: lexer::StringReader,
}
}
Ok(())
}
fn write_header(class: Option<&str>,
id: Option<&str>,
out: &mut Write)
-> io::Result<()> {
write!(out, "<pre ")?;
match id {
Some(id) => write!(out, "id='{}' ", id)?,
None => {}
}
write!(out, "class='rust {}'>\n", class.unwrap_or(""))
}
fn write_footer(out: &mut Write) -> io::Result<()> {
write!(out, "</pre>\n")
}

View File

@ -44,7 +44,12 @@ pub enum ItemType {
impl ItemType {
pub fn from_item(item: &clean::Item) -> ItemType {
match item.inner {
let inner = match item.inner {
clean::StrippedItem(box ref item) => item,
ref inner@_ => inner,
};
match *inner {
clean::ModuleItem(..) => ItemType::Module,
clean::ExternCrateItem(..) => ItemType::ExternCrate,
clean::ImportItem(..) => ItemType::Import,
@ -67,6 +72,7 @@ impl ItemType {
clean::AssociatedConstItem(..) => ItemType::AssociatedConst,
clean::AssociatedTypeItem(..) => ItemType::AssociatedType,
clean::DefaultImplItem(..) => ItemType::Impl,
clean::StrippedItem(..) => unreachable!(),
}
}

View File

@ -262,9 +262,9 @@ pub fn render(w: &mut fmt::Formatter, s: &str, print_toc: bool) -> fmt::Result {
&Default::default());
s.push_str(&format!("<span class='rusttest'>{}</span>", Escape(&test)));
});
s.push_str(&highlight::highlight(&text,
Some("rust-example-rendered"),
None));
s.push_str(&highlight::render_with_highlighting(&text,
Some("rust-example-rendered"),
None));
let output = CString::new(s).unwrap();
hoedown_buffer_puts(ob, output.as_ptr());
})

View File

@ -91,12 +91,20 @@ pub struct Context {
/// String representation of how to get back to the root path of the 'doc/'
/// folder in terms of a relative URL.
pub root_path: String,
/// The path to the crate root source minus the file name.
/// Used for simplifying paths to the highlighted source code files.
pub src_root: PathBuf,
/// The current destination folder of where HTML artifacts should be placed.
/// This changes as the context descends into the module hierarchy.
pub dst: PathBuf,
/// A flag, which when `true`, will render pages which redirect to the
/// real location of an item. This is used to allow external links to
/// publicly reused items to redirect to the right location.
pub render_redirect_pages: bool,
pub shared: Arc<SharedContext>,
}
pub struct SharedContext {
/// The path to the crate root source minus the file name.
/// Used for simplifying paths to the highlighted source code files.
pub src_root: PathBuf,
/// This describes the layout of each page, and is not modified after
/// creation of the context (contains info like the favicon and added html).
pub layout: layout::Layout,
@ -106,10 +114,6 @@ pub struct Context {
pub include_sources: bool,
/// The local file sources we've emitted and their respective url-paths.
pub local_sources: HashMap<PathBuf, String>,
/// A flag, which when turned off, will render pages which redirect to the
/// real location of an item. This is used to allow external links to
/// publicly reused items to redirect to the right location.
pub render_redirect_pages: bool,
/// All the passes that were run on this crate.
pub passes: HashSet<String>,
/// The base-URL of the issue tracker for when an item has been tagged with
@ -245,8 +249,7 @@ pub struct Cache {
parent_stack: Vec<DefId>,
parent_is_trait_impl: bool,
search_index: Vec<IndexItem>,
privmod: bool,
remove_priv: bool,
stripped_mod: bool,
access_levels: AccessLevels<DefId>,
deref_trait_did: Option<DefId>,
@ -260,7 +263,7 @@ pub struct Cache {
/// Helper struct to render all source code to HTML pages
struct SourceCollector<'a> {
cx: &'a mut Context,
scx: &'a mut SharedContext,
/// Root destination to place all HTML output into
dst: PathBuf,
@ -413,12 +416,12 @@ pub fn run(mut krate: clean::Crate,
Some(p) => p.to_path_buf(),
None => PathBuf::new(),
};
let mut cx = Context {
dst: dst,
let mut scx = SharedContext {
src_root: src_root,
passes: passes,
current: Vec::new(),
root_path: String::new(),
include_sources: true,
local_sources: HashMap::new(),
issue_tracker_base_url: None,
layout: layout::Layout {
logo: "".to_string(),
favicon: "".to_string(),
@ -426,14 +429,8 @@ pub fn run(mut krate: clean::Crate,
krate: krate.name.clone(),
playground_url: "".to_string(),
},
include_sources: true,
local_sources: HashMap::new(),
render_redirect_pages: false,
issue_tracker_base_url: None,
};
try_err!(mkdir(&cx.dst), &cx.dst);
// Crawl the crate attributes looking for attributes which control how we're
// going to emit HTML
if let Some(attrs) = krate.module.as_ref().map(|m| m.attrs.list("doc")) {
@ -441,15 +438,15 @@ pub fn run(mut krate: clean::Crate,
match *attr {
clean::NameValue(ref x, ref s)
if "html_favicon_url" == *x => {
cx.layout.favicon = s.to_string();
scx.layout.favicon = s.to_string();
}
clean::NameValue(ref x, ref s)
if "html_logo_url" == *x => {
cx.layout.logo = s.to_string();
scx.layout.logo = s.to_string();
}
clean::NameValue(ref x, ref s)
if "html_playground_url" == *x => {
cx.layout.playground_url = s.to_string();
scx.layout.playground_url = s.to_string();
markdown::PLAYGROUND_KRATE.with(|slot| {
if slot.borrow().is_none() {
let name = krate.name.clone();
@ -459,16 +456,25 @@ pub fn run(mut krate: clean::Crate,
}
clean::NameValue(ref x, ref s)
if "issue_tracker_base_url" == *x => {
cx.issue_tracker_base_url = Some(s.to_string());
scx.issue_tracker_base_url = Some(s.to_string());
}
clean::Word(ref x)
if "html_no_source" == *x => {
cx.include_sources = false;
scx.include_sources = false;
}
_ => {}
}
}
}
try_err!(mkdir(&dst), &dst);
krate = render_sources(&dst, &mut scx, krate)?;
let cx = Context {
current: Vec::new(),
root_path: String::new(),
dst: dst,
render_redirect_pages: false,
shared: Arc::new(scx),
};
// Crawl the crate to build various caches used for the output
let analysis = ::ANALYSISKEY.with(|a| a.clone());
@ -492,8 +498,7 @@ pub fn run(mut krate: clean::Crate,
parent_is_trait_impl: false,
extern_locations: HashMap::new(),
primitive_locations: HashMap::new(),
remove_priv: cx.passes.contains("strip-private"),
privmod: false,
stripped_mod: false,
access_levels: access_levels,
orphan_methods: Vec::new(),
traits: mem::replace(&mut krate.external_traits, HashMap::new()),
@ -540,7 +545,6 @@ pub fn run(mut krate: clean::Crate,
CURRENT_LOCATION_KEY.with(|s| s.borrow_mut().clear());
write_shared(&cx, &krate, &*cache, index)?;
let krate = render_sources(&mut cx, krate)?;
// And finally render the whole crate's documentation
cx.krate(krate)
@ -762,16 +766,16 @@ fn write_shared(cx: &Context,
Ok(())
}
fn render_sources(cx: &mut Context,
fn render_sources(dst: &Path, scx: &mut SharedContext,
krate: clean::Crate) -> Result<clean::Crate, Error> {
info!("emitting source files");
let dst = cx.dst.join("src");
let dst = dst.join("src");
try_err!(mkdir(&dst), &dst);
let dst = dst.join(&krate.name);
try_err!(mkdir(&dst), &dst);
let mut folder = SourceCollector {
dst: dst,
cx: cx,
scx: scx,
};
Ok(folder.fold_crate(krate))
}
@ -849,7 +853,7 @@ impl<'a> DocFolder for SourceCollector<'a> {
fn fold_item(&mut self, item: clean::Item) -> Option<clean::Item> {
// If we're including source files, and we haven't seen this file yet,
// then we need to render it out to the filesystem
if self.cx.include_sources
if self.scx.include_sources
// skip all invalid spans
&& item.source.filename != ""
// macros from other libraries get special filenames which we can
@ -862,7 +866,7 @@ impl<'a> DocFolder for SourceCollector<'a> {
// something like that), so just don't include sources for the
// entire crate. The other option is maintaining this mapping on a
// per-file basis, but that's probably not worth it...
self.cx
self.scx
.include_sources = match self.emit_source(&item.source.filename) {
Ok(()) => true,
Err(e) => {
@ -874,7 +878,6 @@ impl<'a> DocFolder for SourceCollector<'a> {
}
};
}
self.fold_item_recur(item)
}
}
@ -883,7 +886,7 @@ impl<'a> SourceCollector<'a> {
/// Renders the given filename into its corresponding HTML source file.
fn emit_source(&mut self, filename: &str) -> io::Result<()> {
let p = PathBuf::from(filename);
if self.cx.local_sources.contains_key(&p) {
if self.scx.local_sources.contains_key(&p) {
// We've already emitted this source
return Ok(());
}
@ -904,7 +907,7 @@ impl<'a> SourceCollector<'a> {
let mut cur = self.dst.clone();
let mut root_path = String::from("../../");
let mut href = String::new();
clean_srcpath(&self.cx.src_root, &p, false, |component| {
clean_srcpath(&self.scx.src_root, &p, false, |component| {
cur.push(component);
mkdir(&cur).unwrap();
root_path.push_str("../");
@ -928,24 +931,25 @@ impl<'a> SourceCollector<'a> {
description: &desc,
keywords: BASIC_KEYWORDS,
};
layout::render(&mut w, &self.cx.layout,
layout::render(&mut w, &self.scx.layout,
&page, &(""), &Source(contents))?;
w.flush()?;
self.cx.local_sources.insert(p, href);
self.scx.local_sources.insert(p, href);
Ok(())
}
}
impl DocFolder for Cache {
fn fold_item(&mut self, item: clean::Item) -> Option<clean::Item> {
// If this is a private module, we don't want it in the search index.
let orig_privmod = match item.inner {
clean::ModuleItem(..) => {
let prev = self.privmod;
self.privmod = prev || (self.remove_priv && item.visibility != Some(hir::Public));
// If this is a stripped module,
// we don't want it or its children in the search index.
let orig_stripped_mod = match item.inner {
clean::StrippedItem(box clean::ModuleItem(..)) => {
let prev = self.stripped_mod;
self.stripped_mod = true;
prev
}
_ => self.privmod,
_ => self.stripped_mod,
};
// Register any generics to their corresponding string. This is used
@ -983,6 +987,7 @@ impl DocFolder for Cache {
// Index this method for searching later on
if let Some(ref s) = item.name {
let (parent, is_method) = match item.inner {
clean::StrippedItem(..) => ((None, None), false),
clean::AssociatedConstItem(..) |
clean::TypedefItem(_, true) if self.parent_is_trait_impl => {
// skip associated items in trait impls
@ -1021,13 +1026,9 @@ impl DocFolder for Cache {
}
_ => ((None, Some(&*self.stack)), false)
};
let hidden_field = match item.inner {
clean::StructFieldItem(clean::HiddenStructField) => true,
_ => false
};
match parent {
(parent, Some(path)) if is_method || (!self.privmod && !hidden_field) => {
(parent, Some(path)) if is_method || (!self.stripped_mod) => {
// Needed to determine `self` type.
let parent_basename = self.parent_stack.first().and_then(|parent| {
match self.paths.get(parent) {
@ -1035,6 +1036,7 @@ impl DocFolder for Cache {
_ => None
}
});
debug_assert!(!item.is_stripped());
// A crate has a module at its root, containing all items,
// which should not be indexed. The crate-item itself is
@ -1051,7 +1053,7 @@ impl DocFolder for Cache {
});
}
}
(Some(parent), None) if is_method || (!self.privmod && !hidden_field)=> {
(Some(parent), None) if is_method || (!self.stripped_mod)=> {
if parent.is_local() {
// We have a parent, but we don't know where they're
// defined yet. Wait for later to index this item.
@ -1075,7 +1077,7 @@ impl DocFolder for Cache {
clean::StructItem(..) | clean::EnumItem(..) |
clean::TypedefItem(..) | clean::TraitItem(..) |
clean::FunctionItem(..) | clean::ModuleItem(..) |
clean::ForeignFunctionItem(..) if !self.privmod => {
clean::ForeignFunctionItem(..) if !self.stripped_mod => {
// Reexported items mean that the same id can show up twice
// in the rustdoc ast that we're looking at. We know,
// however, that a reexported item doesn't show up in the
@ -1093,7 +1095,7 @@ impl DocFolder for Cache {
}
// link variants to their parent enum because pages aren't emitted
// for each variant
clean::VariantItem(..) if !self.privmod => {
clean::VariantItem(..) if !self.stripped_mod => {
let mut stack = self.stack.clone();
stack.pop();
self.paths.insert(item.def_id, (stack, ItemType::Enum));
@ -1176,7 +1178,7 @@ impl DocFolder for Cache {
if pushed { self.stack.pop().unwrap(); }
if parent_pushed { self.parent_stack.pop().unwrap(); }
self.privmod = orig_privmod;
self.stripped_mod = orig_stripped_mod;
self.parent_is_trait_impl = orig_parent_is_trait_impl;
return ret;
}
@ -1233,15 +1235,12 @@ impl Context {
// render the crate documentation
let mut work = vec!((self, item));
loop {
match work.pop() {
Some((mut cx, item)) => cx.item(item, |cx, item| {
work.push((cx.clone(), item));
})?,
None => break,
}
}
while let Some((mut cx, item)) = work.pop() {
cx.item(item, |cx, item| {
work.push((cx.clone(), item))
})?
}
Ok(())
}
@ -1272,10 +1271,10 @@ impl Context {
let tyname = shortty(it).to_static_str();
let desc = if it.is_crate() {
format!("API documentation for the Rust `{}` crate.",
cx.layout.krate)
cx.shared.layout.krate)
} else {
format!("API documentation for the Rust `{}` {} in crate `{}`.",
it.name.as_ref().unwrap(), tyname, cx.layout.krate)
it.name.as_ref().unwrap(), tyname, cx.shared.layout.krate)
};
let keywords = make_item_keywords(it);
let page = layout::Page {
@ -1293,82 +1292,75 @@ impl Context {
// write syscall all the time.
let mut writer = BufWriter::new(w);
if !cx.render_redirect_pages {
layout::render(&mut writer, &cx.layout, &page,
layout::render(&mut writer, &cx.shared.layout, &page,
&Sidebar{ cx: cx, item: it },
&Item{ cx: cx, item: it })?;
} else {
let mut url = repeat("../").take(cx.current.len())
.collect::<String>();
match cache().paths.get(&it.def_id) {
Some(&(ref names, _)) => {
for name in &names[..names.len() - 1] {
url.push_str(name);
url.push_str("/");
}
url.push_str(&item_path(it));
layout::redirect(&mut writer, &url)?;
if let Some(&(ref names, _)) = cache().paths.get(&it.def_id) {
for name in &names[..names.len() - 1] {
url.push_str(name);
url.push_str("/");
}
None => {}
url.push_str(&item_path(it));
layout::redirect(&mut writer, &url)?;
}
}
writer.flush()
}
// Private modules may survive the strip-private pass if they
// contain impls for public types. These modules can also
// Stripped modules survive the rustdoc passes (i.e. `strip-private`)
// if they contain impls for public types. These modules can also
// contain items such as publicly reexported structures.
//
// External crates will provide links to these structures, so
// these modules are recursed into, but not rendered normally (a
// flag on the context).
// these modules are recursed into, but not rendered normally
// (a flag on the context).
if !self.render_redirect_pages {
self.render_redirect_pages = self.ignore_private_item(&item);
self.render_redirect_pages = self.maybe_ignore_item(&item);
}
match item.inner {
if item.is_mod() {
// modules are special because they add a namespace. We also need to
// recurse into the items of the module as well.
clean::ModuleItem(..) => {
let name = item.name.as_ref().unwrap().to_string();
let mut item = Some(item);
self.recurse(name, |this| {
let item = item.take().unwrap();
let joint_dst = this.dst.join("index.html");
let dst = try_err!(File::create(&joint_dst), &joint_dst);
try_err!(render(dst, this, &item, false), &joint_dst);
let m = match item.inner {
clean::ModuleItem(m) => m,
_ => unreachable!()
};
// render sidebar-items.js used throughout this module
{
let items = this.build_sidebar_items(&m);
let js_dst = this.dst.join("sidebar-items.js");
let mut js_out = BufWriter::new(try_err!(File::create(&js_dst), &js_dst));
try_err!(write!(&mut js_out, "initSidebarItems({});",
as_json(&items)), &js_dst);
}
for item in m.items {
f(this,item);
}
Ok(())
})
}
// Things which don't have names (like impls) don't get special
// pages dedicated to them.
_ if item.name.is_some() => {
let joint_dst = self.dst.join(&item_path(&item));
let name = item.name.as_ref().unwrap().to_string();
let mut item = Some(item);
self.recurse(name, |this| {
let item = item.take().unwrap();
let joint_dst = this.dst.join("index.html");
let dst = try_err!(File::create(&joint_dst), &joint_dst);
try_err!(render(dst, self, &item, true), &joint_dst);
Ok(())
}
try_err!(render(dst, this, &item, false), &joint_dst);
_ => Ok(())
let m = match item.inner {
clean::StrippedItem(box clean::ModuleItem(m)) |
clean::ModuleItem(m) => m,
_ => unreachable!()
};
// render sidebar-items.js used throughout this module
{
let items = this.build_sidebar_items(&m);
let js_dst = this.dst.join("sidebar-items.js");
let mut js_out = BufWriter::new(try_err!(File::create(&js_dst), &js_dst));
try_err!(write!(&mut js_out, "initSidebarItems({});",
as_json(&items)), &js_dst);
}
for item in m.items {
f(this,item);
}
Ok(())
})
} else if item.name.is_some() {
let joint_dst = self.dst.join(&item_path(&item));
let dst = try_err!(File::create(&joint_dst), &joint_dst);
try_err!(render(dst, self, &item, true), &joint_dst);
Ok(())
} else {
Ok(())
}
}
@ -1376,7 +1368,7 @@ impl Context {
// BTreeMap instead of HashMap to get a sorted output
let mut map = BTreeMap::new();
for item in &m.items {
if self.ignore_private_item(item) { continue }
if self.maybe_ignore_item(item) { continue }
let short = shortty(item).to_static_str();
let myname = match item.name {
@ -1394,27 +1386,18 @@ impl Context {
return map;
}
fn ignore_private_item(&self, it: &clean::Item) -> bool {
fn maybe_ignore_item(&self, it: &clean::Item) -> bool {
match it.inner {
clean::StrippedItem(..) => true,
clean::ModuleItem(ref m) => {
(m.items.is_empty() &&
it.doc_value().is_none() &&
it.visibility != Some(hir::Public)) ||
(self.passes.contains("strip-private") && it.visibility != Some(hir::Public))
}
clean::PrimitiveItem(..) => it.visibility != Some(hir::Public),
it.doc_value().is_none() && m.items.is_empty() && it.visibility != Some(hir::Public)
},
_ => false,
}
}
}
impl<'a> Item<'a> {
fn ismodule(&self) -> bool {
match self.item.inner {
clean::ModuleItem(..) => true, _ => false
}
}
/// Generate a url appropriate for an `href` attribute back to the source of
/// this item.
///
@ -1457,10 +1440,11 @@ impl<'a> Item<'a> {
// know the span, so we plow forward and generate a proper url. The url
// has anchors for the line numbers that we're linking to.
} else if self.item.def_id.is_local() {
self.cx.local_sources.get(&PathBuf::from(&self.item.source.filename)).map(|path| {
let path = PathBuf::from(&self.item.source.filename);
self.cx.shared.local_sources.get(&path).map(|path| {
format!("{root}src/{krate}/{path}#{href}",
root = self.cx.root_path,
krate = self.cx.layout.krate,
krate = self.cx.shared.layout.krate,
path = path,
href = href)
})
@ -1495,6 +1479,7 @@ impl<'a> Item<'a> {
impl<'a> fmt::Display for Item<'a> {
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
debug_assert!(!self.item.is_stripped());
// Write the breadcrumb trail header for the top
write!(fmt, "\n<h1 class='fqn'><span class='in-band'>")?;
match self.item.inner {
@ -1516,7 +1501,7 @@ impl<'a> fmt::Display for Item<'a> {
};
if !is_primitive {
let cur = &self.cx.current;
let amt = if self.ismodule() { cur.len() - 1 } else { cur.len() };
let amt = if self.item.is_mod() { cur.len() - 1 } else { cur.len() };
for (i, component) in cur.iter().enumerate().take(amt) {
write!(fmt, "<a href='{}index.html'>{}</a>::<wbr>",
repeat("../").take(cur.len() - i - 1)
@ -1542,7 +1527,7 @@ impl<'a> fmt::Display for Item<'a> {
// [src] link in the downstream documentation will actually come back to
// this page, and this link will be auto-clicked. The `id` attribute is
// used to find the link to auto-click.
if self.cx.include_sources && !is_primitive {
if self.cx.shared.include_sources && !is_primitive {
if let Some(l) = self.href() {
write!(fmt, "<a id='src-{}' class='srclink' \
href='{}' title='{}'>[src]</a>",
@ -1575,15 +1560,12 @@ impl<'a> fmt::Display for Item<'a> {
}
fn item_path(item: &clean::Item) -> String {
match item.inner {
clean::ModuleItem(..) => {
format!("{}/index.html", item.name.as_ref().unwrap())
}
_ => {
format!("{}.{}.html",
shortty(item).to_static_str(),
*item.name.as_ref().unwrap())
}
if item.is_mod() {
format!("{}/index.html", item.name.as_ref().unwrap())
} else {
format!("{}.{}.html",
shortty(item).to_static_str(),
*item.name.as_ref().unwrap())
}
}
@ -1626,7 +1608,7 @@ fn item_module(w: &mut fmt::Formatter, cx: &Context,
document(w, cx, item)?;
let mut indices = (0..items.len()).filter(|i| {
!cx.ignore_private_item(&items[*i])
!cx.maybe_ignore_item(&items[*i])
}).collect::<Vec<usize>>();
// the order of item types in the listing
@ -1670,6 +1652,9 @@ fn item_module(w: &mut fmt::Formatter, cx: &Context,
let mut curty = None;
for &idx in &indices {
let myitem = &items[idx];
if myitem.is_stripped() {
continue;
}
let myty = Some(shortty(myitem));
if curty == Some(ItemType::ExternCrate) && myty == Some(ItemType::Import) {
@ -1774,7 +1759,7 @@ fn short_stability(item: &clean::Item, cx: &Context, show_reason: bool) -> Optio
format!("Deprecated{}{}", since, Markdown(&reason))
} else if stab.level == stability::Unstable {
let unstable_extra = if show_reason {
match (!stab.feature.is_empty(), &cx.issue_tracker_base_url, stab.issue) {
match (!stab.feature.is_empty(), &cx.shared.issue_tracker_base_url, stab.issue) {
(true, &Some(ref tracker_url), Some(issue_no)) if issue_no > 0 =>
format!(" (<code>{}</code> <a href=\"{}{}\">#{}</a>)",
Escape(&stab.feature), tracker_url, issue_no, issue_no),
@ -2146,6 +2131,7 @@ fn render_assoc_item(w: &mut fmt::Formatter,
where_clause = WhereClause(g))
}
match item.inner {
clean::StrippedItem(..) => Ok(()),
clean::TyMethodItem(ref m) => {
method(w, item, m.unsafety, hir::Constness::NotConst,
m.abi, &m.generics, &m.self_, &m.decl, link)
@ -2182,8 +2168,7 @@ fn item_struct(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
document(w, cx, it)?;
let mut fields = s.fields.iter().filter(|f| {
match f.inner {
clean::StructFieldItem(clean::HiddenStructField) => false,
clean::StructFieldItem(clean::TypedStructField(..)) => true,
clean::StructFieldItem(..) => true,
_ => false,
}
}).peekable();
@ -2273,7 +2258,7 @@ fn item_enum(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
if let clean::VariantItem( Variant { kind: StructVariant(ref s) } ) = variant.inner {
let fields = s.fields.iter().filter(|f| {
match f.inner {
clean::StructFieldItem(clean::TypedStructField(..)) => true,
clean::StructFieldItem(..) => true,
_ => false,
}
});
@ -2332,24 +2317,17 @@ fn render_struct(w: &mut fmt::Formatter, it: &clean::Item,
match ty {
doctree::Plain => {
write!(w, " {{\n{}", tab)?;
let mut fields_stripped = false;
for field in fields {
match field.inner {
clean::StructFieldItem(clean::HiddenStructField) => {
fields_stripped = true;
}
clean::StructFieldItem(clean::TypedStructField(ref ty)) => {
write!(w, " {}{}: {},\n{}",
VisSpace(field.visibility),
field.name.as_ref().unwrap(),
*ty,
tab)?;
}
_ => unreachable!(),
};
if let clean::StructFieldItem(ref ty) = field.inner {
write!(w, " {}{}: {},\n{}",
VisSpace(field.visibility),
field.name.as_ref().unwrap(),
*ty,
tab)?;
}
}
if fields_stripped {
if it.has_stripped_fields().unwrap() {
write!(w, " // some fields omitted\n{}", tab)?;
}
write!(w, "}}")?;
@ -2361,10 +2339,10 @@ fn render_struct(w: &mut fmt::Formatter, it: &clean::Item,
write!(w, ", ")?;
}
match field.inner {
clean::StructFieldItem(clean::HiddenStructField) => {
clean::StrippedItem(box clean::StructFieldItem(..)) => {
write!(w, "_")?
}
clean::StructFieldItem(clean::TypedStructField(ref ty)) => {
clean::StructFieldItem(ref ty) => {
write!(w, "{}{}", VisSpace(field.visibility), *ty)?
}
_ => unreachable!()
@ -2540,6 +2518,7 @@ fn render_impl(w: &mut fmt::Formatter, cx: &Context, i: &Impl, link: AssocItemLi
assoc_type(w, item, bounds, default.as_ref(), link)?;
write!(w, "</code></h4>\n")?;
}
clean::StrippedItem(..) => return Ok(()),
_ => panic!("can't make docs for trait item with name {:?}", item.name)
}
@ -2661,16 +2640,16 @@ impl<'a> fmt::Display for Source<'a> {
write!(fmt, "<span id=\"{0}\">{0:1$}</span>\n", i, cols)?;
}
write!(fmt, "</pre>")?;
write!(fmt, "{}", highlight::highlight(s, None, None))?;
write!(fmt, "{}", highlight::render_with_highlighting(s, None, None))?;
Ok(())
}
}
fn item_macro(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
t: &clean::Macro) -> fmt::Result {
w.write_str(&highlight::highlight(&t.source,
Some("macro"),
None))?;
w.write_str(&highlight::render_with_highlighting(&t.source,
Some("macro"),
None))?;
render_stability_since_raw(w, it.stable_since(), None)?;
document(w, cx, it)
}

View File

@ -21,6 +21,7 @@ use clean::Item;
use plugins;
use fold;
use fold::DocFolder;
use fold::FoldItem::Strip;
/// Strip items marked `#[doc(hidden)]`
pub fn strip_hidden(krate: clean::Crate) -> plugins::PluginResult {
@ -39,18 +40,12 @@ pub fn strip_hidden(krate: clean::Crate) -> plugins::PluginResult {
// use a dedicated hidden item for given item type if any
match i.inner {
clean::StructFieldItem(..) => {
return Some(clean::Item {
inner: clean::StructFieldItem(clean::HiddenStructField),
..i
});
}
_ => {
return None;
clean::StructFieldItem(..) | clean::ModuleItem(..) => {
return Strip(i).fold()
}
_ => return None,
}
}
self.fold_item_recur(i)
}
}
@ -125,12 +120,14 @@ struct Stripper<'a> {
impl<'a> fold::DocFolder for Stripper<'a> {
fn fold_item(&mut self, i: Item) -> Option<Item> {
match i.inner {
clean::StrippedItem(..) => return Some(i),
// These items can all get re-exported
clean::TypedefItem(..) | clean::StaticItem(..) |
clean::StructItem(..) | clean::EnumItem(..) |
clean::TraitItem(..) | clean::FunctionItem(..) |
clean::VariantItem(..) | clean::MethodItem(..) |
clean::ForeignFunctionItem(..) | clean::ForeignStaticItem(..) => {
clean::ForeignFunctionItem(..) | clean::ForeignStaticItem(..) |
clean::ConstantItem(..) => {
if i.def_id.is_local() {
if !self.access_levels.is_exported(i.def_id) {
return None;
@ -138,23 +135,17 @@ impl<'a> fold::DocFolder for Stripper<'a> {
}
}
clean::ConstantItem(..) => {
if i.def_id.is_local() && !self.access_levels.is_exported(i.def_id) {
return None;
}
}
clean::StructFieldItem(..) => {
if i.visibility != Some(hir::Public) {
return Some(clean::Item {
inner: clean::StructFieldItem(clean::HiddenStructField),
..i
})
return Strip(i).fold();
}
}
// handled below
clean::ModuleItem(..) => {}
clean::ModuleItem(..) => {
if i.def_id.is_local() && i.visibility != Some(hir::Public) {
return Strip(self.fold_item_recur(i).unwrap()).fold()
}
}
// trait impls for private items should be stripped
clean::ImplItem(clean::Impl{
@ -165,7 +156,7 @@ impl<'a> fold::DocFolder for Stripper<'a> {
}
}
// handled in the `strip-priv-imports` pass
clean::ExternCrateItem(..) | clean::ImportItem(_) => {}
clean::ExternCrateItem(..) | clean::ImportItem(..) => {}
clean::DefaultImplItem(..) | clean::ImplItem(..) => {}
@ -187,7 +178,6 @@ impl<'a> fold::DocFolder for Stripper<'a> {
// implementations of traits are always public.
clean::ImplItem(ref imp) if imp.trait_.is_some() => true,
// Struct variant fields have inherited visibility
clean::VariantItem(clean::Variant {
kind: clean::StructVariant(..)
@ -202,19 +192,17 @@ impl<'a> fold::DocFolder for Stripper<'a> {
self.fold_item_recur(i)
};
i.and_then(|i| {
match i.inner {
// emptied modules/impls have no need to exist
clean::ModuleItem(ref m)
if m.items.is_empty() &&
i.doc_value().is_none() => None,
clean::ImplItem(ref i) if i.items.is_empty() => None,
_ => {
self.retained.insert(i.def_id);
Some(i)
}
i.and_then(|i| { match i.inner {
// emptied modules/impls have no need to exist
clean::ModuleItem(ref m)
if m.items.is_empty() &&
i.doc_value().is_none() => None,
clean::ImplItem(ref i) if i.items.is_empty() => None,
_ => {
self.retained.insert(i.def_id);
Some(i)
}
})
}})
}
}

View File

@ -431,7 +431,7 @@ impl Collector {
// compiler failures are test failures
should_panic: testing::ShouldPanic::No,
},
testfn: testing::DynTestFn(Box::new(move|| {
testfn: testing::DynTestFn(box move|| {
runtest(&test,
&cratename,
cfgs,
@ -442,7 +442,7 @@ impl Collector {
as_test_harness,
compile_fail,
&opts);
}))
})
});
}

View File

@ -85,12 +85,46 @@ pub fn init() {
#[cfg(not(target_os = "nacl"))]
unsafe fn reset_sigpipe() {
assert!(libc::signal(libc::SIGPIPE, libc::SIG_IGN) != !0);
assert!(signal(libc::SIGPIPE, libc::SIG_IGN) != !0);
}
#[cfg(target_os = "nacl")]
unsafe fn reset_sigpipe() {}
}
// Currently the minimum supported Android version of the standard library is
// API level 18 (android-18). Back in those days [1] the `signal` function was
// just an inline wrapper around `bsd_signal`, but starting in API level
// android-20 the `signal` symbols was introduced [2]. Finally, in android-21
// the API `bsd_signal` was removed [3].
//
// Basically this means that if we want to be binary compatible with multiple
// Android releases (oldest being 18 and newest being 21) then we need to check
// for both symbols and not actually link against either.
//
// Note that if we're not on android we just link against the `android` symbol
// itself.
//
// [1]: https://chromium.googlesource.com/android_tools/+/20ee6d20/ndk/platforms
// /android-18/arch-arm/usr/include/signal.h
// [2]: https://chromium.googlesource.com/android_tools/+/fbd420/ndk_experimental
// /platforms/android-20/arch-arm
// /usr/include/signal.h
// [3]: https://chromium.googlesource.com/android_tools/+/20ee6d/ndk/platforms
// /android-21/arch-arm/usr/include/signal.h
#[cfg(target_os = "android")]
unsafe fn signal(signum: libc::c_int,
handler: libc::sighandler_t) -> libc::sighandler_t {
weak!(fn signal(libc::c_int, libc::sighandler_t) -> libc::sighandler_t);
weak!(fn bsd_signal(libc::c_int, libc::sighandler_t) -> libc::sighandler_t);
let f = signal.get().or_else(|| bsd_signal.get());
let f = f.expect("neither `signal` nor `bsd_signal` symbols found");
f(signum, handler)
}
#[cfg(not(target_os = "android"))]
pub use libc::signal;
pub fn decode_error_kind(errno: i32) -> ErrorKind {
match errno as libc::c_int {
libc::ECONNREFUSED => ErrorKind::ConnectionRefused,

View File

@ -393,7 +393,7 @@ impl Command {
t!(cvt(libc::sigemptyset(&mut set)));
t!(cvt(libc::pthread_sigmask(libc::SIG_SETMASK, &set,
ptr::null_mut())));
let ret = libc::signal(libc::SIGPIPE, libc::SIG_DFL);
let ret = super::signal(libc::SIGPIPE, libc::SIG_DFL);
if ret == libc::SIG_ERR {
return io::Error::last_os_error()
}

View File

@ -75,11 +75,5 @@ unsafe fn fetch(name: &str) -> usize {
Ok(cstr) => cstr,
Err(..) => return 0,
};
let lib = libc::dlopen(0 as *const _, libc::RTLD_LAZY);
if lib.is_null() {
return 0
}
let ret = libc::dlsym(lib, name.as_ptr()) as usize;
libc::dlclose(lib);
return ret
libc::dlsym(libc::RTLD_DEFAULT, name.as_ptr()) as usize
}

View File

@ -52,6 +52,7 @@ impl Duration {
/// If the nanoseconds is greater than 1 billion (the number of nanoseconds
/// in a second), then it will carry over into the seconds provided.
#[stable(feature = "duration", since = "1.3.0")]
#[inline]
pub fn new(secs: u64, nanos: u32) -> Duration {
let secs = secs + (nanos / NANOS_PER_SEC) as u64;
let nanos = nanos % NANOS_PER_SEC;
@ -60,12 +61,14 @@ impl Duration {
/// Creates a new `Duration` from the specified number of seconds.
#[stable(feature = "duration", since = "1.3.0")]
#[inline]
pub fn from_secs(secs: u64) -> Duration {
Duration { secs: secs, nanos: 0 }
}
/// Creates a new `Duration` from the specified number of milliseconds.
#[stable(feature = "duration", since = "1.3.0")]
#[inline]
pub fn from_millis(millis: u64) -> Duration {
let secs = millis / MILLIS_PER_SEC;
let nanos = ((millis % MILLIS_PER_SEC) as u32) * NANOS_PER_MILLI;
@ -77,6 +80,7 @@ impl Duration {
/// The extra precision represented by this duration is ignored (e.g. extra
/// nanoseconds are not represented in the returned value).
#[stable(feature = "duration", since = "1.3.0")]
#[inline]
pub fn as_secs(&self) -> u64 { self.secs }
/// Returns the nanosecond precision represented by this duration.
@ -85,6 +89,7 @@ impl Duration {
/// represented by nanoseconds. The returned number always represents a
/// fractional portion of a second (e.g. it is less than one billion).
#[stable(feature = "duration", since = "1.3.0")]
#[inline]
pub fn subsec_nanos(&self) -> u32 { self.nanos }
}

View File

@ -20,7 +20,7 @@
// FIXME spec the JSON output properly.
use codemap::{Span, MultiSpan, CodeMap};
use codemap::{self, Span, MultiSpan, CodeMap};
use diagnostics::registry::Registry;
use errors::{Level, DiagnosticBuilder, SubDiagnostic, RenderSpan, CodeSuggestion};
use errors::emitter::Emitter;
@ -197,8 +197,8 @@ impl DiagnosticSpan {
fn from_render_span(rsp: &RenderSpan, je: &JsonEmitter) -> Vec<DiagnosticSpan> {
match *rsp {
// FIXME(#30701) handle Suggestion properly
RenderSpan::FullSpan(ref msp) |
// FIXME(#30701) handle Suggestion properly
RenderSpan::Suggestion(CodeSuggestion { ref msp, .. }) => {
DiagnosticSpan::from_multispan(msp, je)
}
@ -207,13 +207,13 @@ impl DiagnosticSpan {
let end = je.cm.lookup_char_pos(span.hi);
DiagnosticSpan {
file_name: end.file.name.clone(),
byte_start: span.lo.0,
byte_start: span.hi.0,
byte_end: span.hi.0,
line_start: 0,
line_start: end.line,
line_end: end.line,
column_start: 0,
column_start: end.col.0 + 1,
column_end: end.col.0 + 1,
text: DiagnosticSpanLine::from_span(span, je),
text: DiagnosticSpanLine::from_span_end(span, je),
}
}).collect()
}
@ -237,25 +237,70 @@ impl DiagnosticSpan {
}
}
impl DiagnosticSpanLine {
fn from_span(span: &Span, je: &JsonEmitter) -> Vec<DiagnosticSpanLine> {
let lines = match je.cm.span_to_lines(*span) {
macro_rules! get_lines_for_span {
($span: ident, $je: ident) => {
match $je.cm.span_to_lines(*$span) {
Ok(lines) => lines,
Err(_) => {
debug!("unprintable span");
return Vec::new();
}
};
}
}
}
impl DiagnosticSpanLine {
fn line_from_filemap(fm: &codemap::FileMap,
index: usize,
h_start: usize,
h_end: usize)
-> DiagnosticSpanLine {
DiagnosticSpanLine {
text: fm.get_line(index).unwrap().to_owned(),
highlight_start: h_start,
highlight_end: h_end,
}
}
/// Create a list of DiagnosticSpanLines from span - each line with any part
/// of `span` gets a DiagnosticSpanLine, with the highlight indicating the
/// `span` within the line.
fn from_span(span: &Span, je: &JsonEmitter) -> Vec<DiagnosticSpanLine> {
let lines = get_lines_for_span!(span, je);
let mut result = Vec::new();
let fm = &*lines.file;
for line in &lines.lines {
result.push(DiagnosticSpanLine {
text: fm.get_line(line.line_index).unwrap().to_owned(),
highlight_start: line.start_col.0 + 1,
highlight_end: line.end_col.0 + 1,
});
result.push(DiagnosticSpanLine::line_from_filemap(fm,
line.line_index,
line.start_col.0 + 1,
line.end_col.0 + 1));
}
result
}
/// Create a list of DiagnosticSpanLines from span - the result covers all
/// of `span`, but the highlight is zero-length and at the end of `span`.
fn from_span_end(span: &Span, je: &JsonEmitter) -> Vec<DiagnosticSpanLine> {
let lines = get_lines_for_span!(span, je);
let mut result = Vec::new();
let fm = &*lines.file;
for (i, line) in lines.lines.iter().enumerate() {
// Invariant - CodeMap::span_to_lines will not return extra context
// lines - the last line returned is the last line of `span`.
let highlight = if i == lines.lines.len() - 1 {
(line.end_col.0 + 1, line.end_col.0 + 1)
} else {
(0, 0)
};
result.push(DiagnosticSpanLine::line_from_filemap(fm,
line.line_index,
highlight.0,
highlight.1));
}
result

View File

@ -6124,7 +6124,7 @@ impl<'a> Parser<'a> {
// Allow a leading :: because the paths are absolute either way.
// This occurs with "use $crate::..." in macros.
self.eat(&token::ModSep);
let is_global = self.eat(&token::ModSep);
if self.check(&token::OpenDelim(token::Brace)) {
// use {foo,bar}
@ -6135,7 +6135,7 @@ impl<'a> Parser<'a> {
|p| p.parse_path_list_item())?;
let path = ast::Path {
span: mk_sp(lo, self.span.hi),
global: false,
global: is_global,
segments: Vec::new()
};
return Ok(P(spanned(lo, self.span.hi, ViewPathList(path, idents))));
@ -6164,7 +6164,7 @@ impl<'a> Parser<'a> {
)?;
let path = ast::Path {
span: mk_sp(lo, self.span.hi),
global: false,
global: is_global,
segments: path.into_iter().map(|identifier| {
ast::PathSegment {
identifier: identifier,
@ -6180,7 +6180,7 @@ impl<'a> Parser<'a> {
self.bump();
let path = ast::Path {
span: mk_sp(lo, self.span.hi),
global: false,
global: is_global,
segments: path.into_iter().map(|identifier| {
ast::PathSegment {
identifier: identifier,
@ -6203,7 +6203,7 @@ impl<'a> Parser<'a> {
let mut rename_to = path[path.len() - 1];
let path = ast::Path {
span: mk_sp(lo, self.last_span.hi),
global: false,
global: is_global,
segments: path.into_iter().map(|identifier| {
ast::PathSegment {
identifier: identifier,

View File

@ -514,7 +514,7 @@ macro_rules! declare_special_idents_and_keywords {(
// If the special idents get renumbered, remember to modify these two as appropriate
pub const SELF_KEYWORD_NAME: ast::Name = ast::Name(SELF_KEYWORD_NAME_NUM);
const STATIC_KEYWORD_NAME: ast::Name = ast::Name(STATIC_KEYWORD_NAME_NUM);
const SUPER_KEYWORD_NAME: ast::Name = ast::Name(SUPER_KEYWORD_NAME_NUM);
pub const SUPER_KEYWORD_NAME: ast::Name = ast::Name(SUPER_KEYWORD_NAME_NUM);
const SELF_TYPE_KEYWORD_NAME: ast::Name = ast::Name(SELF_TYPE_KEYWORD_NAME_NUM);
pub const SELF_KEYWORD_NAME_NUM: u32 = 1;

View File

@ -9,6 +9,7 @@ S 2016-03-18 235d774
freebsd-x86_64 390b9a9f60f3d0d6a52c04d939a0355f572d03b3
S 2016-02-17 4d3eebf
dragonfly-x86_64 765bb5820ad406e966ec0ac51c8070b656459b02
linux-i386 5f194aa7628c0703f0fd48adc4ec7f3cc64b98c7
linux-x86_64 d29b7607d13d64078b6324aec82926fb493f59ba
macos-i386 4c8e42dd649e247f3576bf9dfa273327b4907f9c
@ -16,6 +17,7 @@ S 2016-02-17 4d3eebf
winnt-i386 0c336d794a65f8e285c121866c7d59aa2dd0d1e1
winnt-x86_64 27e75b1bf99770b3564bcebd7f3230be01135a92
openbsd-x86_64 ac957c6b84de2bd67f01df085d9ea515f96e22f3
freebsd-i386 4e2af0b34eb335e173aebff543be693724a956c2
freebsd-x86_64 f38991fbb81c1cd8d0bbda396f98f13a55b42804
S 2015-12-18 3391630

View File

@ -0,0 +1,21 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub use private::Quz;
pub use hidden::Bar;
mod private {
pub struct Quz;
}
#[doc(hidden)]
pub mod hidden {
pub struct Bar;
}

View File

@ -0,0 +1,23 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn f() -> String { //~ ERROR E0269
//~^ HELP detailed explanation
0u8;
"bla".to_string(); //~ HELP consider removing this semicolon
}
fn g() -> String { //~ ERROR E0269
//~^ HELP detailed explanation
"this won't work".to_string();
"removeme".to_string(); //~ HELP consider removing this semicolon
}
fn main() {}

View File

@ -0,0 +1,22 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(const_indexing)]
#![deny(const_err)]
pub const A: i8 = -std::i8::MIN; //~ ERROR attempted to negate with overflow
pub const B: u8 = 200u8 + 200u8; //~ ERROR attempted to add with overflow
pub const C: u8 = 200u8 * 4; //~ ERROR attempted to multiply with overflow
pub const D: u8 = 42u8 - (42u8 + 1); //~ ERROR attempted to subtract with overflow
pub const E: u8 = [5u8][1]; //~ ERROR index out of bounds
fn main() {
let _e = [6u8][1];
}

View File

@ -10,15 +10,11 @@
#![feature(rustc_attrs)]
#![allow(exceeding_bitshifts)]
#![deny(const_err)]
fn black_box<T>(_: T) {
unimplemented!()
}
const BLA: u8 = 200u8 + 200u8;
//~^ ERROR attempted to add with overflow
#[rustc_no_mir] // FIXME #29769 MIR overflow checking is TBD.
fn main() {
let a = -std::i8::MIN;
@ -30,7 +26,8 @@ fn main() {
//~^ WARN attempted to multiply with overflow
let d = 42u8 - (42u8 + 1);
//~^ WARN attempted to subtract with overflow
let _e = BLA;
let _e = [5u8][1];
//~^ ERROR const index-expr is out of bounds
black_box(a);
black_box(b);
black_box(c);

View File

@ -14,7 +14,7 @@
struct S(i32);
const CONSTANT: S = S(0);
//~^ ERROR: constant evaluation error: call on struct [E0080]
//~^ ERROR: unimplemented constant expression: tuple struct constructors [E0080]
enum E {
V = CONSTANT,

View File

@ -17,12 +17,12 @@ enum Cake {
use Cake::*;
const BOO: (Cake, Cake) = (Marmor, BlackForest);
//~^ ERROR: constant evaluation error: non-constant path in constant expression [E0471]
//~^ ERROR: constant evaluation error: unimplemented constant expression: enum variants [E0471]
const FOO: Cake = BOO.1;
const fn foo() -> Cake {
Marmor //~ ERROR: constant evaluation error: non-constant path in constant expression [E0471]
//~^ ERROR: non-constant path in constant expression
Marmor //~ ERROR: constant evaluation error: unimplemented constant expression: enum variants
//~^ ERROR: unimplemented constant expression: enum variants
}
const WORKS: Cake = Marmor;

View File

@ -17,7 +17,8 @@ impl std::ops::Neg for S {
}
const _MAX: usize = -1;
//~^ ERROR unary negation of unsigned integer
//~^ WARN unary negation of unsigned integer
//~| ERROR unary negation of unsigned integer
//~| HELP use a cast or the `!` operator
fn main() {

View File

@ -9,15 +9,20 @@
// except according to those terms.
#![feature(rustc_attrs)]
#![allow(unused_imports)]
pub type Type = i32;
pub type T = ();
mod foo { pub use super::T; }
mod bar { pub use super::T; }
mod one { use super::Type; }
pub use self::one::*;
pub use foo::*;
pub use bar::*;
mod two { use super::Type; }
pub use self::two::*;
mod baz {
pub type T = ();
mod foo { pub use super::T as S; }
mod bar { pub use super::foo::S as T; }
pub use self::bar::*;
}
#[rustc_error]
fn main() {} //~ ERROR compilation successful

View File

@ -0,0 +1,25 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Example {
example: Box<Fn(i32) -> i32>
}
fn main() {
let demo = Example {
example: Box::new(|x| {
x + 1
})
};
demo.example(1); //~ ERROR no method named `example`
//~^ NOTE use `(demo.example)(...)`
// (demo.example)(1);
}

View File

@ -0,0 +1,20 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Regression test for #32326. We ran out of memory because we
// attempted to expand this case up to the recursion limit, and 2^N is
// too big.
enum Expr { //~ ERROR E0072
Plus(Expr, Expr),
Literal(i64),
}
fn main() { }

View File

@ -15,5 +15,5 @@ enum State { ST_NULL, ST_WHITESPACE }
fn main() {
[State::ST_NULL; (State::ST_WHITESPACE as usize)];
//~^ ERROR expected constant integer for repeat count, but non-constant path
//~^ ERROR expected constant integer for repeat count, but unimplemented constant expression
}

View File

@ -0,0 +1,117 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(core_intrinsics, rustc_attrs)]
#![allow(warnings)]
use std::intrinsics;
#[derive(Copy, Clone)]
struct Foo(i64);
type Bar = &'static Fn();
type Quux = [u8; 100];
#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls.
unsafe fn test_bool_load(p: &mut bool, v: bool) {
intrinsics::atomic_load(p);
//~^ ERROR `atomic_load` intrinsic: expected basic integer type, found `bool`
}
#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls.
unsafe fn test_bool_store(p: &mut bool, v: bool) {
intrinsics::atomic_store(p, v);
//~^ ERROR `atomic_store` intrinsic: expected basic integer type, found `bool`
}
#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls.
unsafe fn test_bool_xchg(p: &mut bool, v: bool) {
intrinsics::atomic_xchg(p, v);
//~^ ERROR `atomic_xchg` intrinsic: expected basic integer type, found `bool`
}
#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls.
unsafe fn test_bool_cxchg(p: &mut bool, v: bool) {
intrinsics::atomic_cxchg(p, v, v);
//~^ ERROR `atomic_cxchg` intrinsic: expected basic integer type, found `bool`
}
#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls.
unsafe fn test_Foo_load(p: &mut Foo, v: Foo) {
intrinsics::atomic_load(p);
//~^ ERROR `atomic_load` intrinsic: expected basic integer type, found `Foo`
}
#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls.
unsafe fn test_Foo_store(p: &mut Foo, v: Foo) {
intrinsics::atomic_store(p, v);
//~^ ERROR `atomic_store` intrinsic: expected basic integer type, found `Foo`
}
#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls.
unsafe fn test_Foo_xchg(p: &mut Foo, v: Foo) {
intrinsics::atomic_xchg(p, v);
//~^ ERROR `atomic_xchg` intrinsic: expected basic integer type, found `Foo`
}
#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls.
unsafe fn test_Foo_cxchg(p: &mut Foo, v: Foo) {
intrinsics::atomic_cxchg(p, v, v);
//~^ ERROR `atomic_cxchg` intrinsic: expected basic integer type, found `Foo`
}
#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls.
unsafe fn test_Bar_load(p: &mut Bar, v: Bar) {
intrinsics::atomic_load(p);
//~^ ERROR expected basic integer type, found `&'static std::ops::Fn() + 'static`
}
#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls.
unsafe fn test_Bar_store(p: &mut Bar, v: Bar) {
intrinsics::atomic_store(p, v);
//~^ ERROR expected basic integer type, found `&'static std::ops::Fn() + 'static`
}
#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls.
unsafe fn test_Bar_xchg(p: &mut Bar, v: Bar) {
intrinsics::atomic_xchg(p, v);
//~^ ERROR expected basic integer type, found `&'static std::ops::Fn() + 'static`
}
#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls.
unsafe fn test_Bar_cxchg(p: &mut Bar, v: Bar) {
intrinsics::atomic_cxchg(p, v, v);
//~^ ERROR expected basic integer type, found `&'static std::ops::Fn() + 'static`
}
#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls.
unsafe fn test_Quux_load(p: &mut Quux, v: Quux) {
intrinsics::atomic_load(p);
//~^ ERROR `atomic_load` intrinsic: expected basic integer type, found `[u8; 100]`
}
#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls.
unsafe fn test_Quux_store(p: &mut Quux, v: Quux) {
intrinsics::atomic_store(p, v);
//~^ ERROR `atomic_store` intrinsic: expected basic integer type, found `[u8; 100]`
}
#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls.
unsafe fn test_Quux_xchg(p: &mut Quux, v: Quux) {
intrinsics::atomic_xchg(p, v);
//~^ ERROR `atomic_xchg` intrinsic: expected basic integer type, found `[u8; 100]`
}
#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls.
unsafe fn test_Quux_cxchg(p: &mut Quux, v: Quux) {
intrinsics::atomic_cxchg(p, v, v);
//~^ ERROR `atomic_cxchg` intrinsic: expected basic integer type, found `[u8; 100]`
}
fn main() {}

View File

@ -20,11 +20,11 @@ struct Baz { q: Option<Foo> }
struct Foo { q: Option<Baz> }
//~^ ERROR recursive type `Foo` has infinite size
//~| type `Foo` is embedded within `std::option::Option<Foo>`...
//~| ...which in turn is embedded within `std::option::Option<Foo>`...
//~| ...which in turn is embedded within `Baz`...
//~| ...which in turn is embedded within `std::option::Option<Baz>`...
//~| ...which in turn is embedded within `Foo`, completing the cycle.
//~| NOTE type `Foo` is embedded within `std::option::Option<Foo>`...
//~| NOTE ...which in turn is embedded within `std::option::Option<Foo>`...
//~| NOTE ...which in turn is embedded within `Baz`...
//~| NOTE ...which in turn is embedded within `std::option::Option<Baz>`...
//~| NOTE ...which in turn is embedded within `Foo`, completing the cycle.
impl Foo { fn bar(&self) {} }

View File

@ -0,0 +1,32 @@
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test for a potential corner case in current impl where you have an
// auto trait (Magic1) that depends on a normal trait (Magic2) which
// in turn depends on the auto trait (Magic1). This was incorrectly
// being considered coinductive, but because of the normal trait
// interfering, it should not be.
#![feature(optin_builtin_traits)]
trait Magic1: Magic2 { }
impl Magic1 for .. {}
trait Magic2 { }
impl<T: Magic1> Magic2 for T { }
fn is_magic1<T: Magic1>() { }
#[derive(Debug)]
struct NoClone;
fn main() {
is_magic1::<NoClone>(); //~ ERROR E0275
}

View File

@ -0,0 +1,22 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(rustc_attrs)]
mod foo {
pub fn g() {
use ::super::main; //~ WARN expected identifier, found keyword `super`
//~^ WARN this was previously accepted by the compiler but is being phased out
main();
}
}
#[rustc_error]
fn main() { foo::g(); } //~ ERROR compilation successful

View File

@ -31,9 +31,20 @@ mod rustrt2 {
}
}
pub fn main() {
unsafe {
rustrt1::rust_get_test_int();
rustrt2::rust_get_test_int();
mod rustrt3 {
// Different type, but same ABI (on all supported platforms).
// Ensures that we don't ICE or trigger LLVM asserts when
// importing the same symbol under different types.
// See https://github.com/rust-lang/rust/issues/32740.
extern {
pub fn rust_get_test_int() -> *const u8;
}
}
pub fn main() {
unsafe {
let x = rustrt1::rust_get_test_int();
assert_eq!(x, rustrt2::rust_get_test_int());
assert_eq!(x as *const _, rustrt3::rust_get_test_int());
}
}

View File

@ -16,24 +16,16 @@ use std::intrinsics;
#[derive(Copy, Clone)]
struct Wrap(i64);
// These volatile and atomic intrinsics used to cause an ICE
// These volatile intrinsics used to cause an ICE
unsafe fn test_bool(p: &mut bool, v: bool) {
intrinsics::volatile_load(p);
intrinsics::volatile_store(p, v);
intrinsics::atomic_load(p);
intrinsics::atomic_cxchg(p, v, v);
intrinsics::atomic_store(p, v);
intrinsics::atomic_xchg(p, v);
}
unsafe fn test_immediate_fca(p: &mut Wrap, v: Wrap) {
intrinsics::volatile_load(p);
intrinsics::volatile_store(p, v);
intrinsics::atomic_load(p);
intrinsics::atomic_cxchg(p, v, v);
intrinsics::atomic_store(p, v);
intrinsics::atomic_xchg(p, v);
}
fn main() {}

View File

@ -0,0 +1,48 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:reexp_stripped.rs
// build-aux-docs
// ignore-cross-compile
extern crate reexp_stripped;
pub trait Foo {}
// @has redirect/index.html
// @has - '//code' 'pub use reexp_stripped::Bar'
// @has - '//code/a' 'Bar'
// @has reexp_stripped/hidden/struct.Bar.html
// @has - '//p/a' '../../reexp_stripped/struct.Bar.html'
// @has 'reexp_stripped/struct.Bar.html'
#[doc(no_inline)]
pub use reexp_stripped::Bar;
impl Foo for Bar {}
// @has redirect/index.html
// @has - '//code' 'pub use reexp_stripped::Quz'
// @has - '//code/a' 'Quz'
// @has reexp_stripped/private/struct.Quz.html
// @has - '//p/a' '../../reexp_stripped/struct.Quz.html'
// @has 'reexp_stripped/struct.Quz.html'
#[doc(no_inline)]
pub use reexp_stripped::Quz;
impl Foo for Quz {}
mod private_no_inline {
pub struct Qux;
impl ::Foo for Qux {}
}
// @has redirect/index.html
// @has - '//code' 'pub use private_no_inline::Qux'
// @!has - '//code/a' 'Qux'
#[doc(no_inline)]
pub use private_no_inline::Qux;

View File

@ -0,0 +1,44 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// @has structfields/struct.Foo.html
pub struct Foo {
// @has - //pre "pub a: ()"
pub a: (),
// @has - //pre "// some fields omitted"
// @!has - //pre "b: ()"
b: (),
// @!has - //pre "c: usize"
#[doc(hidden)]
c: usize,
// @has - //pre "pub d: usize"
pub d: usize,
}
// @has structfields/struct.Bar.html
pub struct Bar {
// @has - //pre "pub a: ()"
pub a: (),
// @!has - //pre "// some fields omitted"
}
// @has structfields/enum.Qux.html
pub enum Qux {
Quz {
// @has - //pre "a: ()"
a: (),
// @!has - //pre "b: ()"
#[doc(hidden)]
b: (),
// @has - //pre "c: usize"
c: usize,
// @has - //pre "// some fields omitted"
},
}