expand fuzzing support

this allows us to only sometimes disable the global cache.
This commit is contained in:
lcnr 2024-07-23 13:12:23 +02:00
parent 7b86c98068
commit 51338ca0eb
2 changed files with 56 additions and 9 deletions

View File

@ -1,3 +1,4 @@
use std::convert::Infallible;
use std::marker::PhantomData; use std::marker::PhantomData;
use rustc_type_ir::inherent::*; use rustc_type_ir::inherent::*;
@ -22,6 +23,14 @@ impl<D, I> search_graph::Delegate for SearchGraphDelegate<D>
{ {
type Cx = D::Interner; type Cx = D::Interner;
type ValidationScope = Infallible;
fn enter_validation_scope(
_cx: Self::Cx,
_input: <Self::Cx as search_graph::Cx>::Input,
) -> Option<Self::ValidationScope> {
None
}
const FIXPOINT_STEP_LIMIT: usize = FIXPOINT_STEP_LIMIT; const FIXPOINT_STEP_LIMIT: usize = FIXPOINT_STEP_LIMIT;
type ProofTreeBuilder = ProofTreeBuilder<D>; type ProofTreeBuilder = ProofTreeBuilder<D>;

View File

@ -44,6 +44,19 @@ fn with_global_cache<R>(
pub trait Delegate { pub trait Delegate {
type Cx: Cx; type Cx: Cx;
type ValidationScope;
/// Returning `Some` disables the global cache for the current goal.
///
/// The `ValidationScope` is used when fuzzing the search graph to track
/// for which goals the global cache has been disabled. This is necessary
/// as we may otherwise ignore the global cache entry for some goal `G`
/// only to later use it, failing to detect a cycle goal and potentially
/// changing the result.
fn enter_validation_scope(
cx: Self::Cx,
input: <Self::Cx as Cx>::Input,
) -> Option<Self::ValidationScope>;
const FIXPOINT_STEP_LIMIT: usize; const FIXPOINT_STEP_LIMIT: usize;
type ProofTreeBuilder; type ProofTreeBuilder;
@ -356,11 +369,21 @@ pub fn with_new_goal(
return D::on_stack_overflow(cx, inspect, input); return D::on_stack_overflow(cx, inspect, input);
}; };
if D::inspect_is_noop(inspect) { let validate_cache = if !D::inspect_is_noop(inspect) {
if let Some(result) = self.lookup_global_cache(cx, input, available_depth) { None
return result; } else if let Some(scope) = D::enter_validation_scope(cx, input) {
} // When validating the global cache we need to track the goals for which the
} // global cache has been disabled as it may otherwise change the result for
// cyclic goals. We don't care about goals which are not on the current stack
// so it's fine to drop their scope eagerly.
self.lookup_global_cache_untracked(cx, input, available_depth)
.inspect(|expected| debug!(?expected, "validate cache entry"))
.map(|r| (scope, r))
} else if let Some(result) = self.lookup_global_cache(cx, input, available_depth) {
return result;
} else {
None
};
// Check whether the goal is in the provisional cache. // Check whether the goal is in the provisional cache.
// The provisional result may rely on the path to its cycle roots, // The provisional result may rely on the path to its cycle roots,
@ -452,6 +475,7 @@ pub fn with_new_goal(
// do not remove it from the provisional cache and update its provisional result. // do not remove it from the provisional cache and update its provisional result.
// We only add the root of cycles to the global cache. // We only add the root of cycles to the global cache.
if let Some(head) = final_entry.non_root_cycle_participant { if let Some(head) = final_entry.non_root_cycle_participant {
debug_assert!(validate_cache.is_none());
let coinductive_stack = Self::stack_coinductive_from(cx, &self.stack, head); let coinductive_stack = Self::stack_coinductive_from(cx, &self.stack, head);
let entry = self.provisional_cache.get_mut(&input).unwrap(); let entry = self.provisional_cache.get_mut(&input).unwrap();
@ -463,16 +487,29 @@ pub fn with_new_goal(
} }
} else { } else {
self.provisional_cache.remove(&input); self.provisional_cache.remove(&input);
if D::inspect_is_noop(inspect) { if let Some((_scope, expected)) = validate_cache {
// Do not try to move a goal into the cache again if we're testing
// the global cache.
assert_eq!(result, expected, "input={input:?}");
} else if D::inspect_is_noop(inspect) {
self.insert_global_cache(cx, input, final_entry, result, dep_node) self.insert_global_cache(cx, input, final_entry, result, dep_node)
} }
} }
self.check_invariants();
result result
} }
fn lookup_global_cache_untracked(
&self,
cx: X,
input: X::Input,
available_depth: AvailableDepth,
) -> Option<X::Result> {
cx.with_global_cache(self.mode, |cache| {
cache.get(cx, input, &self.stack, available_depth).map(|c| c.result)
})
}
/// Try to fetch a previously computed result from the global cache, /// Try to fetch a previously computed result from the global cache,
/// making sure to only do so if it would match the result of reevaluating /// making sure to only do so if it would match the result of reevaluating
/// this goal. /// this goal.
@ -496,7 +533,7 @@ fn lookup_global_cache(
let reached_depth = self.stack.next_index().plus(additional_depth); let reached_depth = self.stack.next_index().plus(additional_depth);
self.update_parent_goal(reached_depth, encountered_overflow); self.update_parent_goal(reached_depth, encountered_overflow);
debug!("global cache hit"); debug!(?additional_depth, "global cache hit");
Some(result) Some(result)
}) })
} }
@ -518,6 +555,7 @@ fn insert_global_cache(
dep_node: X::DepNodeIndex, dep_node: X::DepNodeIndex,
) { ) {
let additional_depth = final_entry.reached_depth.as_usize() - self.stack.len(); let additional_depth = final_entry.reached_depth.as_usize() - self.stack.len();
debug!(?final_entry, ?result, "insert global cache");
cx.with_global_cache(self.mode, |cache| { cx.with_global_cache(self.mode, |cache| {
cache.insert( cache.insert(
cx, cx,