Remove some allocations in predicate evaluation

This commit is contained in:
Esteban Küber 2020-04-20 10:42:12 -07:00
parent 6c921c2c5f
commit e7e3001557
2 changed files with 10 additions and 9 deletions

View File

@ -149,21 +149,20 @@ impl Elaborator<'tcx> {
// Get predicates declared on the trait.
let predicates = tcx.super_predicates_of(data.def_id());
let obligations = predicates.predicates.iter().map(|(pred, span)| {
let obligations = predicates.predicates.into_iter().map(|(pred, span)| {
predicate_obligation(
pred.subst_supertrait(tcx, &data.to_poly_trait_ref()),
Some(*span),
)
});
debug!("super_predicates: data={:?} predicates={:?}", data, &obligations);
debug!("super_predicates: data={:?}", data);
// Only keep those bounds that we haven't already seen.
// This is necessary to prevent infinite recursion in some
// cases. One common case is when people define
// `trait Sized: Sized { }` rather than `trait Sized { }`.
let visited = &mut self.visited;
let obligations =
obligations.filter(|obligation| visited.insert(&obligation.predicate));
let obligations = obligations.filter(|o| visited.insert(&o.predicate));
self.stack.extend(obligations);
}

View File

@ -281,7 +281,7 @@ impl AutoTraitFinder<'tcx> {
},
}));
let mut computed_preds: FxHashSet<_> = param_env.caller_bounds.iter().cloned().collect();
let computed_preds = param_env.caller_bounds.iter().cloned();
let mut user_computed_preds: FxHashSet<_> =
user_env.caller_bounds.iter().cloned().collect();
@ -358,9 +358,11 @@ impl AutoTraitFinder<'tcx> {
_ => panic!("Unexpected error for '{:?}': {:?}", ty, result),
};
computed_preds.extend(user_computed_preds.iter().cloned());
let normalized_preds =
elaborate_predicates(tcx, computed_preds.iter().cloned()).map(|o| o.predicate);
let normalized_preds = elaborate_predicates(
tcx,
computed_preds.clone().chain(user_computed_preds.iter().cloned()),
)
.map(|o| o.predicate);
new_env =
ty::ParamEnv::new(tcx.mk_predicates(normalized_preds), param_env.reveal, None);
}
@ -738,7 +740,7 @@ impl AutoTraitFinder<'tcx> {
if p.ty().skip_binder().has_infer_types() {
if !self.evaluate_nested_obligations(
ty,
v.clone().iter().cloned(),
v.into_iter(),
computed_preds,
fresh_preds,
predicates,