do not use the global solver cache for proof trees

doing so requires overwriting global cache entries and
generally adds significant complexity to the solver. This is
also only ever done for root goals, so it feels easier to wrap
the `evaluate_canonical_goal` in an ordinary query if
necessary.
This commit is contained in:
lcnr 2024-07-23 14:21:52 +02:00
parent 1d8f135b20
commit 7b86c98068
10 changed files with 74 additions and 209 deletions

View File

@ -61,10 +61,6 @@ macro_rules! arena_types {
[] dtorck_constraint: rustc_middle::traits::query::DropckConstraint<'tcx>,
[] candidate_step: rustc_middle::traits::query::CandidateStep<'tcx>,
[] autoderef_bad_ty: rustc_middle::traits::query::MethodAutoderefBadTy<'tcx>,
[] canonical_goal_evaluation:
rustc_type_ir::solve::inspect::CanonicalGoalEvaluationStep<
rustc_middle::ty::TyCtxt<'tcx>
>,
[] query_region_constraints: rustc_middle::infer::canonical::QueryRegionConstraints<'tcx>,
[] type_op_subtype:
rustc_middle::infer::canonical::Canonical<'tcx,

View File

@ -107,8 +107,6 @@ fn mk_predefined_opaques_in_body(
self.mk_predefined_opaques_in_body(data)
}
type DefiningOpaqueTypes = &'tcx ty::List<LocalDefId>;
type CanonicalGoalEvaluationStepRef =
&'tcx solve::inspect::CanonicalGoalEvaluationStep<TyCtxt<'tcx>>;
type CanonicalVars = CanonicalVarInfos<'tcx>;
fn mk_canonical_var_infos(self, infos: &[ty::CanonicalVarInfo<Self>]) -> Self::CanonicalVars {
self.mk_canonical_var_infos(infos)
@ -277,13 +275,6 @@ fn debug_assert_args_compatible(self, def_id: DefId, args: ty::GenericArgsRef<'t
self.debug_assert_args_compatible(def_id, args);
}
fn intern_canonical_goal_evaluation_step(
self,
step: solve::inspect::CanonicalGoalEvaluationStep<TyCtxt<'tcx>>,
) -> &'tcx solve::inspect::CanonicalGoalEvaluationStep<TyCtxt<'tcx>> {
self.arena.alloc(step)
}
fn mk_type_list_from_iter<I, T>(self, args: I) -> T::Output
where
I: Iterator<Item = T>,

View File

@ -5,11 +5,10 @@
//! see the comment on [ProofTreeBuilder].
use std::marker::PhantomData;
use std::mem;
use derive_where::derive_where;
use rustc_type_ir::inherent::*;
use rustc_type_ir::{self as ty, search_graph, Interner};
use rustc_type_ir::{self as ty, Interner};
use crate::delegate::SolverDelegate;
use crate::solve::eval_ctxt::canonical;
@ -94,31 +93,10 @@ fn finalize(self) -> inspect::GoalEvaluation<I> {
}
}
#[derive_where(PartialEq, Eq; I: Interner)]
pub(in crate::solve) enum WipCanonicalGoalEvaluationKind<I: Interner> {
Overflow,
CycleInStack,
ProvisionalCacheHit,
Interned { final_revision: I::CanonicalGoalEvaluationStepRef },
}
impl<I: Interner> std::fmt::Debug for WipCanonicalGoalEvaluationKind<I> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Overflow => write!(f, "Overflow"),
Self::CycleInStack => write!(f, "CycleInStack"),
Self::ProvisionalCacheHit => write!(f, "ProvisionalCacheHit"),
Self::Interned { final_revision: _ } => {
f.debug_struct("Interned").finish_non_exhaustive()
}
}
}
}
#[derive_where(PartialEq, Eq, Debug; I: Interner)]
struct WipCanonicalGoalEvaluation<I: Interner> {
goal: CanonicalInput<I>,
kind: Option<WipCanonicalGoalEvaluationKind<I>>,
encountered_overflow: bool,
/// Only used for uncached goals. After we finished evaluating
/// the goal, this is interned and moved into `kind`.
final_revision: Option<WipCanonicalGoalEvaluationStep<I>>,
@ -127,25 +105,17 @@ struct WipCanonicalGoalEvaluation<I: Interner> {
impl<I: Interner> WipCanonicalGoalEvaluation<I> {
fn finalize(self) -> inspect::CanonicalGoalEvaluation<I> {
// We've already interned the final revision in
// `fn finalize_canonical_goal_evaluation`.
assert!(self.final_revision.is_none());
let kind = match self.kind.unwrap() {
WipCanonicalGoalEvaluationKind::Overflow => {
inspect::CanonicalGoalEvaluation {
goal: self.goal,
kind: if self.encountered_overflow {
assert!(self.final_revision.is_none());
inspect::CanonicalGoalEvaluationKind::Overflow
}
WipCanonicalGoalEvaluationKind::CycleInStack => {
inspect::CanonicalGoalEvaluationKind::CycleInStack
}
WipCanonicalGoalEvaluationKind::ProvisionalCacheHit => {
inspect::CanonicalGoalEvaluationKind::ProvisionalCacheHit
}
WipCanonicalGoalEvaluationKind::Interned { final_revision } => {
} else {
let final_revision = self.final_revision.unwrap().finalize();
inspect::CanonicalGoalEvaluationKind::Evaluation { final_revision }
}
};
inspect::CanonicalGoalEvaluation { goal: self.goal, kind, result: self.result.unwrap() }
},
result: self.result.unwrap(),
}
}
}
@ -308,7 +278,7 @@ pub fn new_canonical_goal_evaluation(
) -> ProofTreeBuilder<D> {
self.nested(|| WipCanonicalGoalEvaluation {
goal,
kind: None,
encountered_overflow: false,
final_revision: None,
result: None,
})
@ -329,11 +299,11 @@ pub fn canonical_goal_evaluation(&mut self, canonical_goal_evaluation: ProofTree
}
}
pub fn canonical_goal_evaluation_kind(&mut self, kind: WipCanonicalGoalEvaluationKind<I>) {
pub fn canonical_goal_evaluation_overflow(&mut self) {
if let Some(this) = self.as_mut() {
match this {
DebugSolver::CanonicalGoalEvaluation(canonical_goal_evaluation) => {
assert_eq!(canonical_goal_evaluation.kind.replace(kind), None);
canonical_goal_evaluation.encountered_overflow = true;
}
_ => unreachable!(),
};
@ -547,51 +517,3 @@ pub fn query_result(&mut self, result: QueryResult<I>) {
}
}
}
impl<D, I> search_graph::ProofTreeBuilder<I> for ProofTreeBuilder<D>
where
D: SolverDelegate<Interner = I>,
I: Interner,
{
fn try_apply_proof_tree(
&mut self,
proof_tree: Option<I::CanonicalGoalEvaluationStepRef>,
) -> bool {
if !self.is_noop() {
if let Some(final_revision) = proof_tree {
let kind = WipCanonicalGoalEvaluationKind::Interned { final_revision };
self.canonical_goal_evaluation_kind(kind);
true
} else {
false
}
} else {
true
}
}
fn on_provisional_cache_hit(&mut self) {
self.canonical_goal_evaluation_kind(WipCanonicalGoalEvaluationKind::ProvisionalCacheHit);
}
fn on_cycle_in_stack(&mut self) {
self.canonical_goal_evaluation_kind(WipCanonicalGoalEvaluationKind::CycleInStack);
}
fn finalize_canonical_goal_evaluation(
&mut self,
tcx: I,
) -> Option<I::CanonicalGoalEvaluationStepRef> {
self.as_mut().map(|this| match this {
DebugSolver::CanonicalGoalEvaluation(evaluation) => {
let final_revision = mem::take(&mut evaluation.final_revision).unwrap();
let final_revision =
tcx.intern_canonical_goal_evaluation_step(final_revision.finalize());
let kind = WipCanonicalGoalEvaluationKind::Interned { final_revision };
assert_eq!(evaluation.kind.replace(kind), None);
final_revision
}
_ => unreachable!(),
})
}
}

View File

@ -5,7 +5,7 @@
use rustc_type_ir::solve::{CanonicalInput, Certainty, QueryResult};
use rustc_type_ir::Interner;
use super::inspect::{self, ProofTreeBuilder};
use super::inspect::ProofTreeBuilder;
use super::FIXPOINT_STEP_LIMIT;
use crate::delegate::SolverDelegate;
@ -25,6 +25,9 @@ impl<D, I> search_graph::Delegate for SearchGraphDelegate<D>
const FIXPOINT_STEP_LIMIT: usize = FIXPOINT_STEP_LIMIT;
type ProofTreeBuilder = ProofTreeBuilder<D>;
fn inspect_is_noop(inspect: &mut Self::ProofTreeBuilder) -> bool {
inspect.is_noop()
}
fn recursion_limit(cx: I) -> usize {
cx.recursion_limit()
@ -68,7 +71,7 @@ fn on_stack_overflow(
inspect: &mut ProofTreeBuilder<D>,
input: CanonicalInput<I>,
) -> QueryResult<I> {
inspect.canonical_goal_evaluation_kind(inspect::WipCanonicalGoalEvaluationKind::Overflow);
inspect.canonical_goal_evaluation_overflow();
response_no_constraints(cx, input, Certainty::overflow(true))
}

View File

@ -332,13 +332,9 @@ fn candidates_recur(
pub fn candidates(&'a self) -> Vec<InspectCandidate<'a, 'tcx>> {
let mut candidates = vec![];
let last_eval_step = match self.evaluation_kind {
inspect::CanonicalGoalEvaluationKind::Overflow
| inspect::CanonicalGoalEvaluationKind::CycleInStack
| inspect::CanonicalGoalEvaluationKind::ProvisionalCacheHit => {
warn!("unexpected root evaluation: {:?}", self.evaluation_kind);
return vec![];
}
let last_eval_step = match &self.evaluation_kind {
// An annoying edge case in case the recursion limit is 0.
inspect::CanonicalGoalEvaluationKind::Overflow => return vec![],
inspect::CanonicalGoalEvaluationKind::Evaluation { final_revision } => final_revision,
};

View File

@ -11,7 +11,6 @@
use crate::ir_print::IrPrint;
use crate::lang_items::TraitSolverLangItem;
use crate::relate::Relate;
use crate::solve::inspect::CanonicalGoalEvaluationStep;
use crate::solve::{
CanonicalInput, ExternalConstraintsData, PredefinedOpaquesData, QueryResult, SolverMode,
};
@ -65,11 +64,6 @@ fn mk_predefined_opaques_in_body(
+ Eq
+ TypeVisitable<Self>
+ SliceLike<Item = Self::LocalDefId>;
type CanonicalGoalEvaluationStepRef: Copy
+ Debug
+ Hash
+ Eq
+ Deref<Target = CanonicalGoalEvaluationStep<Self>>;
type CanonicalVars: Copy
+ Debug
@ -177,11 +171,6 @@ fn mk_args_from_iter<I, T>(self, args: I) -> T::Output
fn debug_assert_args_compatible(self, def_id: Self::DefId, args: Self::GenericArgs);
fn intern_canonical_goal_evaluation_step(
self,
step: CanonicalGoalEvaluationStep<Self>,
) -> Self::CanonicalGoalEvaluationStepRef;
fn mk_type_list_from_iter<I, T>(self, args: I) -> T::Output
where
I: Iterator<Item = T>,
@ -390,7 +379,6 @@ fn collect_and_apply<I, F>(mut iter: I, f: F) -> Result<R, E>
}
impl<I: Interner> search_graph::Cx for I {
type ProofTree = Option<I::CanonicalGoalEvaluationStepRef>;
type Input = CanonicalInput<I>;
type Result = QueryResult<I>;

View File

@ -4,14 +4,8 @@
use super::{AvailableDepth, Cx, StackDepth, StackEntry};
use crate::data_structures::{HashMap, HashSet};
#[derive_where(Debug, Clone, Copy; X: Cx)]
struct QueryData<X: Cx> {
result: X::Result,
proof_tree: X::ProofTree,
}
struct Success<X: Cx> {
data: X::Tracked<QueryData<X>>,
result: X::Tracked<X::Result>,
additional_depth: usize,
}
@ -28,13 +22,12 @@ struct CacheEntry<X: Cx> {
/// See the doc comment of `StackEntry::cycle_participants` for more
/// details.
nested_goals: HashSet<X::Input>,
with_overflow: HashMap<usize, X::Tracked<QueryData<X>>>,
with_overflow: HashMap<usize, X::Tracked<X::Result>>,
}
#[derive_where(Debug; X: Cx)]
pub(super) struct CacheData<'a, X: Cx> {
pub(super) result: X::Result,
pub(super) proof_tree: X::ProofTree,
pub(super) additional_depth: usize,
pub(super) encountered_overflow: bool,
// FIXME: This is currently unused, but impacts the design
@ -55,20 +48,19 @@ pub(super) fn insert(
input: X::Input,
result: X::Result,
proof_tree: X::ProofTree,
dep_node: X::DepNodeIndex,
additional_depth: usize,
encountered_overflow: bool,
nested_goals: &HashSet<X::Input>,
) {
let data = cx.mk_tracked(QueryData { result, proof_tree }, dep_node);
let result = cx.mk_tracked(result, dep_node);
let entry = self.map.entry(input).or_default();
entry.nested_goals.extend(nested_goals);
if encountered_overflow {
entry.with_overflow.insert(additional_depth, data);
entry.with_overflow.insert(additional_depth, result);
} else {
entry.success = Some(Success { data, additional_depth });
entry.success = Some(Success { result, additional_depth });
}
}
@ -90,10 +82,8 @@ pub(super) fn get<'a>(
if let Some(ref success) = entry.success {
if available_depth.cache_entry_is_applicable(success.additional_depth) {
let QueryData { result, proof_tree } = cx.get_tracked(&success.data);
return Some(CacheData {
result,
proof_tree,
result: cx.get_tracked(&success.result),
additional_depth: success.additional_depth,
encountered_overflow: false,
nested_goals: &entry.nested_goals,
@ -101,15 +91,11 @@ pub(super) fn get<'a>(
}
}
entry.with_overflow.get(&available_depth.0).map(|e| {
let QueryData { result, proof_tree } = cx.get_tracked(e);
CacheData {
result,
proof_tree,
additional_depth: available_depth.0,
encountered_overflow: true,
nested_goals: &entry.nested_goals,
}
entry.with_overflow.get(&available_depth.0).map(|e| CacheData {
result: cx.get_tracked(e),
additional_depth: available_depth.0,
encountered_overflow: true,
nested_goals: &entry.nested_goals,
})
}
}

View File

@ -22,7 +22,6 @@
/// about `Input` and `Result` as they are implementation details
/// of the search graph.
pub trait Cx: Copy {
type ProofTree: Debug + Copy;
type Input: Debug + Eq + Hash + Copy;
type Result: Debug + Eq + Hash + Copy;
@ -43,17 +42,12 @@ fn with_global_cache<R>(
) -> R;
}
pub trait ProofTreeBuilder<X: Cx> {
fn try_apply_proof_tree(&mut self, proof_tree: X::ProofTree) -> bool;
fn on_provisional_cache_hit(&mut self);
fn on_cycle_in_stack(&mut self);
fn finalize_canonical_goal_evaluation(&mut self, cx: X) -> X::ProofTree;
}
pub trait Delegate {
type Cx: Cx;
const FIXPOINT_STEP_LIMIT: usize;
type ProofTreeBuilder: ProofTreeBuilder<Self::Cx>;
type ProofTreeBuilder;
fn inspect_is_noop(inspect: &mut Self::ProofTreeBuilder) -> bool;
fn recursion_limit(cx: Self::Cx) -> usize;
@ -362,8 +356,10 @@ pub fn with_new_goal(
return D::on_stack_overflow(cx, inspect, input);
};
if let Some(result) = self.lookup_global_cache(cx, input, available_depth, inspect) {
return result;
if D::inspect_is_noop(inspect) {
if let Some(result) = self.lookup_global_cache(cx, input, available_depth) {
return result;
}
}
// Check whether the goal is in the provisional cache.
@ -386,7 +382,6 @@ pub fn with_new_goal(
// We have a nested goal which is already in the provisional cache, use
// its result. We do not provide any usage kind as that should have been
// already set correctly while computing the cache entry.
inspect.on_provisional_cache_hit();
Self::tag_cycle_participants(&mut self.stack, None, entry.head);
return entry.result;
} else if let Some(stack_depth) = cache_entry.stack_depth {
@ -397,8 +392,6 @@ pub fn with_new_goal(
//
// Finally we can return either the provisional response or the initial response
// in case we're in the first fixpoint iteration for this goal.
inspect.on_cycle_in_stack();
let is_coinductive_cycle = Self::stack_coinductive_from(cx, &self.stack, stack_depth);
let cycle_kind =
if is_coinductive_cycle { CycleKind::Coinductive } else { CycleKind::Inductive };
@ -453,8 +446,6 @@ pub fn with_new_goal(
(current_entry, result)
});
let proof_tree = inspect.finalize_canonical_goal_evaluation(cx);
self.update_parent_goal(final_entry.reached_depth, final_entry.encountered_overflow);
// We're now done with this goal. In case this goal is involved in a larger cycle
@ -471,28 +462,10 @@ pub fn with_new_goal(
entry.with_inductive_stack = Some(DetachedEntry { head, result });
}
} else {
// When encountering a cycle, both inductive and coinductive, we only
// move the root into the global cache. We also store all other cycle
// participants involved.
//
// We must not use the global cache entry of a root goal if a cycle
// participant is on the stack. This is necessary to prevent unstable
// results. See the comment of `StackEntry::nested_goals` for
// more details.
self.provisional_cache.remove(&input);
let additional_depth = final_entry.reached_depth.as_usize() - self.stack.len();
cx.with_global_cache(self.mode, |cache| {
cache.insert(
cx,
input,
result,
proof_tree,
dep_node,
additional_depth,
final_entry.encountered_overflow,
&final_entry.nested_goals,
)
})
if D::inspect_is_noop(inspect) {
self.insert_global_cache(cx, input, final_entry, result, dep_node)
}
}
self.check_invariants();
@ -508,25 +481,15 @@ fn lookup_global_cache(
cx: X,
input: X::Input,
available_depth: AvailableDepth,
inspect: &mut D::ProofTreeBuilder,
) -> Option<X::Result> {
cx.with_global_cache(self.mode, |cache| {
let CacheData {
result,
proof_tree,
additional_depth,
encountered_overflow,
nested_goals: _, // FIXME: consider nested goals here.
} = cache.get(cx, input, &self.stack, available_depth)?;
// If we're building a proof tree and the current cache entry does not
// contain a proof tree, we do not use the entry but instead recompute
// the goal. We simply overwrite the existing entry once we're done,
// caching the proof tree.
if !inspect.try_apply_proof_tree(proof_tree) {
return None;
}
// Update the reached depth of the current goal to make sure
// its state is the same regardless of whether we've used the
// global cache or not.
@ -537,6 +500,36 @@ fn lookup_global_cache(
Some(result)
})
}
/// When encountering a cycle, both inductive and coinductive, we only
/// move the root into the global cache. We also store all other cycle
/// participants involved.
///
/// We must not use the global cache entry of a root goal if a cycle
/// participant is on the stack. This is necessary to prevent unstable
/// results. See the comment of `StackEntry::nested_goals` for
/// more details.
fn insert_global_cache(
&mut self,
cx: X,
input: X::Input,
final_entry: StackEntry<X>,
result: X::Result,
dep_node: X::DepNodeIndex,
) {
let additional_depth = final_entry.reached_depth.as_usize() - self.stack.len();
cx.with_global_cache(self.mode, |cache| {
cache.insert(
cx,
input,
result,
dep_node,
additional_depth,
final_entry.encountered_overflow,
&final_entry.nested_goals,
)
})
}
}
enum StepResult<X: Cx> {

View File

@ -69,9 +69,7 @@ pub struct CanonicalGoalEvaluation<I: Interner> {
#[derive_where(PartialEq, Eq, Hash, Debug; I: Interner)]
pub enum CanonicalGoalEvaluationKind<I: Interner> {
Overflow,
CycleInStack,
ProvisionalCacheHit,
Evaluation { final_revision: I::CanonicalGoalEvaluationStepRef },
Evaluation { final_revision: CanonicalGoalEvaluationStep<I> },
}
#[derive_where(PartialEq, Eq, Hash, Debug; I: Interner)]

View File

@ -340,11 +340,3 @@ fn unify_with(self, other: MaybeCause) -> MaybeCause {
}
}
}
#[derive_where(PartialEq, Eq, Debug; I: Interner)]
pub struct CacheData<I: Interner> {
pub result: QueryResult<I>,
pub proof_tree: Option<I::CanonicalGoalEvaluationStepRef>,
pub additional_depth: usize,
pub encountered_overflow: bool,
}