From 41b624d40c969b5fc6eebd6c798cff522a5d3947 Mon Sep 17 00:00:00 2001 From: lcnr Date: Mon, 8 Jan 2024 10:19:15 +0100 Subject: [PATCH 1/7] use doc comments --- compiler/rustc_trait_selection/src/solve/search_graph.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/compiler/rustc_trait_selection/src/solve/search_graph.rs b/compiler/rustc_trait_selection/src/solve/search_graph.rs index 2a161c2d956..31a778263f0 100644 --- a/compiler/rustc_trait_selection/src/solve/search_graph.rs +++ b/compiler/rustc_trait_selection/src/solve/search_graph.rs @@ -22,10 +22,10 @@ pub struct StackDepth {} struct StackEntry<'tcx> { input: CanonicalInput<'tcx>, available_depth: Limit, - // The maximum depth reached by this stack entry, only up-to date - // for the top of the stack and lazily updated for the rest. + /// The maximum depth reached by this stack entry, only up-to date + /// for the top of the stack and lazily updated for the rest. reached_depth: StackDepth, - // In case of a cycle, the depth of the root. + /// In case of a cycle, the depth of the root. cycle_root_depth: StackDepth, encountered_overflow: bool, From 61d6b20cd87ac6bacc854482921f0a1ed80351e0 Mon Sep 17 00:00:00 2001 From: lcnr Date: Mon, 8 Jan 2024 10:37:21 +0100 Subject: [PATCH 2/7] do not track root depth of cycles results in slightly cleaner logic while making the following commit easier --- .../src/solve/search_graph.rs | 69 +++++++++---------- 1 file changed, 32 insertions(+), 37 deletions(-) diff --git a/compiler/rustc_trait_selection/src/solve/search_graph.rs b/compiler/rustc_trait_selection/src/solve/search_graph.rs index 31a778263f0..8a8221f85a4 100644 --- a/compiler/rustc_trait_selection/src/solve/search_graph.rs +++ b/compiler/rustc_trait_selection/src/solve/search_graph.rs @@ -12,6 +12,7 @@ use rustc_middle::ty::TyCtxt; use rustc_session::Limit; use std::collections::hash_map::Entry; +use std::mem; rustc_index::newtype_index! { #[orderable] @@ -25,23 +26,17 @@ struct StackEntry<'tcx> { /// The maximum depth reached by this stack entry, only up-to date /// for the top of the stack and lazily updated for the rest. reached_depth: StackDepth, - /// In case of a cycle, the depth of the root. - cycle_root_depth: StackDepth, + /// Whether this entry is a cycle participant which is not a root. + /// + /// If so, it must not be moved to the global cache. See + /// [SearchGraph::cycle_participants] for more details. + non_root_cycle_participant: bool, encountered_overflow: bool, has_been_used: bool, /// Starts out as `None` and gets set when rerunning this /// goal in case we encounter a cycle. provisional_result: Option>, - - /// We put only the root goal of a coinductive cycle into the global cache. - /// - /// If we were to use that result when later trying to prove another cycle - /// participant, we can end up with unstable query results. - /// - /// See tests/ui/next-solver/coinduction/incompleteness-unstable-result.rs for - /// an example of where this is needed. - cycle_participants: FxHashSet>, } pub(super) struct SearchGraph<'tcx> { @@ -52,6 +47,14 @@ pub(super) struct SearchGraph<'tcx> { /// An element is *deeper* in the stack if its index is *lower*. stack: IndexVec>, stack_entries: FxHashMap, StackDepth>, + /// We put only the root goal of a coinductive cycle into the global cache. + /// + /// If we were to use that result when later trying to prove another cycle + /// participant, we can end up with unstable query results. + /// + /// See tests/ui/next-solver/coinduction/incompleteness-unstable-result.rs for + /// an example of where this is needed. + cycle_participants: FxHashSet>, } impl<'tcx> SearchGraph<'tcx> { @@ -61,6 +64,7 @@ pub(super) fn new(tcx: TyCtxt<'tcx>, mode: SolverMode) -> SearchGraph<'tcx> { local_overflow_limit: tcx.recursion_limit().0.checked_ilog2().unwrap_or(0) as usize, stack: Default::default(), stack_entries: Default::default(), + cycle_participants: Default::default(), } } @@ -109,7 +113,13 @@ pub(super) fn global_cache(&self, tcx: TyCtxt<'tcx>) -> &'tcx EvaluationCache<'t } pub(super) fn is_empty(&self) -> bool { - self.stack.is_empty() + if self.stack.is_empty() { + debug_assert!(self.stack_entries.is_empty()); + debug_assert!(self.cycle_participants.is_empty()); + true + } else { + false + } } pub(super) fn current_goal_is_normalizes_to(&self) -> bool { @@ -209,11 +219,10 @@ pub(super) fn with_new_goal( input, available_depth, reached_depth: depth, - cycle_root_depth: depth, + non_root_cycle_participant: false, encountered_overflow: false, has_been_used: false, provisional_result: None, - cycle_participants: Default::default(), }; assert_eq!(self.stack.push(entry), depth); v.insert(depth); @@ -232,26 +241,11 @@ pub(super) fn with_new_goal( let stack_depth = *entry.get(); debug!("encountered cycle with depth {stack_depth:?}"); - // We start by updating the root depth of all cycle participants, and - // add all cycle participants to the root. - let root_depth = self.stack[stack_depth].cycle_root_depth; - let (prev, participants) = self.stack.raw.split_at_mut(stack_depth.as_usize() + 1); - let root = &mut prev[root_depth.as_usize()]; + // We start by tagging all non-root cycle participants. + let participants = self.stack.raw.iter_mut().skip(stack_depth.as_usize() + 1); for entry in participants { - debug_assert!(entry.cycle_root_depth >= root_depth); - entry.cycle_root_depth = root_depth; - root.cycle_participants.insert(entry.input); - // FIXME(@lcnr): I believe that this line is needed as we could - // otherwise access a cache entry for the root of a cycle while - // computing the result for a cycle participant. This can result - // in unstable results due to incompleteness. - // - // However, a test for this would be an even more complex version of - // tests/ui/traits/next-solver/coinduction/incompleteness-unstable-result.rs. - // I did not bother to write such a test and we have no regression test - // for this. It would be good to have such a test :) - #[allow(rustc::potential_query_instability)] - root.cycle_participants.extend(entry.cycle_participants.drain()); + entry.non_root_cycle_participant = true; + self.cycle_participants.insert(entry.input); } // If we're in a cycle, we have to retry proving the cycle head @@ -325,23 +319,24 @@ pub(super) fn with_new_goal( // // It is not possible for any nested goal to depend on something deeper on the // stack, as this would have also updated the depth of the current goal. - if final_entry.cycle_root_depth == self.stack.next_index() { + if !final_entry.non_root_cycle_participant { // When encountering a cycle, both inductive and coinductive, we only // move the root into the global cache. We also store all other cycle // participants involved. // - // We disable the global cache entry of the root goal if a cycle + // We must not use the global cache entry of a root goal if a cycle // participant is on the stack. This is necessary to prevent unstable - // results. See the comment of `StackEntry::cycle_participants` for + // results. See the comment of `SearchGraph::cycle_participants` for // more details. let reached_depth = final_entry.reached_depth.as_usize() - self.stack.len(); + let cycle_participants = mem::take(&mut self.cycle_participants); self.global_cache(tcx).insert( tcx, input, proof_tree, reached_depth, final_entry.encountered_overflow, - final_entry.cycle_participants, + cycle_participants, dep_node, result, ) From eb4d7c7adf55a7480a9ef3e3b990d015ef3ea07c Mon Sep 17 00:00:00 2001 From: lcnr Date: Mon, 8 Jan 2024 10:41:55 +0100 Subject: [PATCH 3/7] `all` to `any` don't really know why, but it is a lot easier for me to think about cycles that way. --- .../rustc_trait_selection/src/solve/search_graph.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/compiler/rustc_trait_selection/src/solve/search_graph.rs b/compiler/rustc_trait_selection/src/solve/search_graph.rs index 8a8221f85a4..523025cabe2 100644 --- a/compiler/rustc_trait_selection/src/solve/search_graph.rs +++ b/compiler/rustc_trait_selection/src/solve/search_graph.rs @@ -260,13 +260,13 @@ pub(super) fn with_new_goal( } else { // If we don't have a provisional result yet we're in the first iteration, // so we start with no constraints. - let is_coinductive = self.stack.raw[stack_depth.index()..] + let is_inductive = self.stack.raw[stack_depth.index()..] .iter() - .all(|entry| entry.input.value.goal.predicate.is_coinductive(tcx)); - if is_coinductive { - Self::response_no_constraints(tcx, input, Certainty::Yes) - } else { + .any(|entry| !entry.input.value.goal.predicate.is_coinductive(tcx)); + if is_inductive { Self::response_no_constraints(tcx, input, Certainty::OVERFLOW) + } else { + Self::response_no_constraints(tcx, input, Certainty::Yes) } }; } From 118453c7e161b81ec4cae5cc91b61b7ec59d3747 Mon Sep 17 00:00:00 2001 From: lcnr Date: Mon, 8 Jan 2024 11:13:50 +0100 Subject: [PATCH 4/7] readd the provisional cache --- .../rustc_middle/src/traits/solve/inspect.rs | 1 + .../src/traits/solve/inspect/format.rs | 3 + compiler/rustc_trait_selection/src/lib.rs | 1 + .../src/solve/inspect/analyse.rs | 3 +- .../src/solve/inspect/build.rs | 5 + .../src/solve/search_graph.rs | 215 +++++++++++++----- 6 files changed, 166 insertions(+), 62 deletions(-) diff --git a/compiler/rustc_middle/src/traits/solve/inspect.rs b/compiler/rustc_middle/src/traits/solve/inspect.rs index 77d112d0afc..e94ad4aa539 100644 --- a/compiler/rustc_middle/src/traits/solve/inspect.rs +++ b/compiler/rustc_middle/src/traits/solve/inspect.rs @@ -73,6 +73,7 @@ pub struct CanonicalGoalEvaluation<'tcx> { pub enum CanonicalGoalEvaluationKind<'tcx> { Overflow, CycleInStack, + ProvisionalCacheHit, Evaluation { revisions: &'tcx [GoalEvaluationStep<'tcx>] }, } impl Debug for GoalEvaluation<'_> { diff --git a/compiler/rustc_middle/src/traits/solve/inspect/format.rs b/compiler/rustc_middle/src/traits/solve/inspect/format.rs index 4e2207ed523..54db8dbd336 100644 --- a/compiler/rustc_middle/src/traits/solve/inspect/format.rs +++ b/compiler/rustc_middle/src/traits/solve/inspect/format.rs @@ -77,6 +77,9 @@ pub(super) fn format_canonical_goal_evaluation( CanonicalGoalEvaluationKind::CycleInStack => { writeln!(self.f, "CYCLE IN STACK: {:?}", eval.result) } + CanonicalGoalEvaluationKind::ProvisionalCacheHit => { + writeln!(self.f, "PROVISIONAL CACHE HIT: {:?}", eval.result) + } CanonicalGoalEvaluationKind::Evaluation { revisions } => { for (n, step) in revisions.iter().enumerate() { writeln!(self.f, "REVISION {n}")?; diff --git a/compiler/rustc_trait_selection/src/lib.rs b/compiler/rustc_trait_selection/src/lib.rs index de2577cca49..552c28c0586 100644 --- a/compiler/rustc_trait_selection/src/lib.rs +++ b/compiler/rustc_trait_selection/src/lib.rs @@ -19,6 +19,7 @@ #![feature(control_flow_enum)] #![feature(extract_if)] #![feature(let_chains)] +#![feature(option_take_if)] #![feature(if_let_guard)] #![feature(never_type)] #![feature(type_alias_impl_trait)] diff --git a/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs b/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs index 6db53d6ddc4..f33d0f397ce 100644 --- a/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs +++ b/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs @@ -171,7 +171,8 @@ pub fn candidates(&'a self) -> Vec> { let mut candidates = vec![]; let last_eval_step = match self.evaluation.evaluation.kind { inspect::CanonicalGoalEvaluationKind::Overflow - | inspect::CanonicalGoalEvaluationKind::CycleInStack => { + | inspect::CanonicalGoalEvaluationKind::CycleInStack + | inspect::CanonicalGoalEvaluationKind::ProvisionalCacheHit => { warn!("unexpected root evaluation: {:?}", self.evaluation); return vec![]; } diff --git a/compiler/rustc_trait_selection/src/solve/inspect/build.rs b/compiler/rustc_trait_selection/src/solve/inspect/build.rs index d8caef5b03f..b587a93b24c 100644 --- a/compiler/rustc_trait_selection/src/solve/inspect/build.rs +++ b/compiler/rustc_trait_selection/src/solve/inspect/build.rs @@ -118,6 +118,7 @@ pub(in crate::solve) enum WipGoalEvaluationKind<'tcx> { pub(in crate::solve) enum WipCanonicalGoalEvaluationKind<'tcx> { Overflow, CycleInStack, + ProvisionalCacheHit, Interned { revisions: &'tcx [inspect::GoalEvaluationStep<'tcx>] }, } @@ -126,6 +127,7 @@ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::Overflow => write!(f, "Overflow"), Self::CycleInStack => write!(f, "CycleInStack"), + Self::ProvisionalCacheHit => write!(f, "ProvisionalCacheHit"), Self::Interned { revisions: _ } => f.debug_struct("Interned").finish_non_exhaustive(), } } @@ -151,6 +153,9 @@ fn finalize(self) -> inspect::CanonicalGoalEvaluation<'tcx> { WipCanonicalGoalEvaluationKind::CycleInStack => { inspect::CanonicalGoalEvaluationKind::CycleInStack } + WipCanonicalGoalEvaluationKind::ProvisionalCacheHit => { + inspect::CanonicalGoalEvaluationKind::ProvisionalCacheHit + } WipCanonicalGoalEvaluationKind::Interned { revisions } => { inspect::CanonicalGoalEvaluationKind::Evaluation { revisions } } diff --git a/compiler/rustc_trait_selection/src/solve/search_graph.rs b/compiler/rustc_trait_selection/src/solve/search_graph.rs index 523025cabe2..ead4ab5723d 100644 --- a/compiler/rustc_trait_selection/src/solve/search_graph.rs +++ b/compiler/rustc_trait_selection/src/solve/search_graph.rs @@ -11,7 +11,6 @@ use rustc_middle::ty; use rustc_middle::ty::TyCtxt; use rustc_session::Limit; -use std::collections::hash_map::Entry; use std::mem; rustc_index::newtype_index! { @@ -30,7 +29,7 @@ struct StackEntry<'tcx> { /// /// If so, it must not be moved to the global cache. See /// [SearchGraph::cycle_participants] for more details. - non_root_cycle_participant: bool, + non_root_cycle_participant: Option, encountered_overflow: bool, has_been_used: bool, @@ -39,6 +38,34 @@ struct StackEntry<'tcx> { provisional_result: Option>, } +struct DetachedEntry<'tcx> { + /// The head of the smallest non-trivial cycle involving this entry. + /// + /// Given the following rules, when proving `A` the head for + /// the provisional entry of `C` would be `B`. + /// + /// A :- B + /// B :- C + /// C :- A + B + C + head: StackDepth, + result: QueryResult<'tcx>, +} + +#[derive(Default)] +struct ProvisionalCacheEntry<'tcx> { + stack_depth: Option, + with_inductive_stack: Option>, + with_coinductive_stack: Option>, +} + +impl<'tcx> ProvisionalCacheEntry<'tcx> { + fn is_empty(&self) -> bool { + self.stack_depth.is_none() + && self.with_inductive_stack.is_none() + && self.with_coinductive_stack.is_none() + } +} + pub(super) struct SearchGraph<'tcx> { mode: SolverMode, local_overflow_limit: usize, @@ -46,7 +73,7 @@ pub(super) struct SearchGraph<'tcx> { /// /// An element is *deeper* in the stack if its index is *lower*. stack: IndexVec>, - stack_entries: FxHashMap, StackDepth>, + provisional_cache: FxHashMap, ProvisionalCacheEntry<'tcx>>, /// We put only the root goal of a coinductive cycle into the global cache. /// /// If we were to use that result when later trying to prove another cycle @@ -63,7 +90,7 @@ pub(super) fn new(tcx: TyCtxt<'tcx>, mode: SolverMode) -> SearchGraph<'tcx> { mode, local_overflow_limit: tcx.recursion_limit().0.checked_ilog2().unwrap_or(0) as usize, stack: Default::default(), - stack_entries: Default::default(), + provisional_cache: Default::default(), cycle_participants: Default::default(), } } @@ -93,7 +120,6 @@ fn on_cache_hit(&mut self, additional_depth: usize, encountered_overflow: bool) /// would cause us to not track overflow and recursion depth correctly. fn pop_stack(&mut self) -> StackEntry<'tcx> { let elem = self.stack.pop().unwrap(); - assert!(self.stack_entries.remove(&elem.input).is_some()); if let Some(last) = self.stack.raw.last_mut() { last.reached_depth = last.reached_depth.max(elem.reached_depth); last.encountered_overflow |= elem.encountered_overflow; @@ -114,7 +140,7 @@ pub(super) fn global_cache(&self, tcx: TyCtxt<'tcx>) -> &'tcx EvaluationCache<'t pub(super) fn is_empty(&self) -> bool { if self.stack.is_empty() { - debug_assert!(self.stack_entries.is_empty()); + debug_assert!(self.provisional_cache.is_empty()); debug_assert!(self.cycle_participants.is_empty()); true } else { @@ -156,6 +182,40 @@ fn allowed_depth_for_nested( } } + fn stack_coinductive_from( + tcx: TyCtxt<'tcx>, + stack: &IndexVec>, + head: StackDepth, + ) -> bool { + stack.raw[head.index()..] + .iter() + .all(|entry| entry.input.value.goal.predicate.is_coinductive(tcx)) + } + + fn tag_cycle_participants( + stack: &mut IndexVec>, + cycle_participants: &mut FxHashSet>, + head: StackDepth, + ) { + stack[head].has_been_used = true; + for entry in &mut stack.raw[head.index() + 1..] { + entry.non_root_cycle_participant = entry.non_root_cycle_participant.max(Some(head)); + cycle_participants.insert(entry.input); + } + } + + fn clear_dependent_provisional_results( + provisional_cache: &mut FxHashMap, ProvisionalCacheEntry<'tcx>>, + head: StackDepth, + ) { + #[allow(rustc::potential_query_instability)] + provisional_cache.retain(|_, entry| { + entry.with_coinductive_stack.take_if(|p| p.head == head); + entry.with_inductive_stack.take_if(|p| p.head == head); + !entry.is_empty() + }); + } + /// Probably the most involved method of the whole solver. /// /// Given some goal which is proven via the `prove_goal` closure, this @@ -210,23 +270,36 @@ pub(super) fn with_new_goal( return result; } - // Check whether we're in a cycle. - match self.stack_entries.entry(input) { - // No entry, we push this goal on the stack and try to prove it. - Entry::Vacant(v) => { - let depth = self.stack.next_index(); - let entry = StackEntry { - input, - available_depth, - reached_depth: depth, - non_root_cycle_participant: false, - encountered_overflow: false, - has_been_used: false, - provisional_result: None, - }; - assert_eq!(self.stack.push(entry), depth); - v.insert(depth); - } + // Check whether the goal is in the provisional cache. + let cache_entry = self.provisional_cache.entry(input).or_default(); + if let Some(with_coinductive_stack) = &mut cache_entry.with_coinductive_stack + && Self::stack_coinductive_from(tcx, &self.stack, with_coinductive_stack.head) + { + // We have a nested goal which is already in the provisional cache, use + // its result. + inspect + .goal_evaluation_kind(inspect::WipCanonicalGoalEvaluationKind::ProvisionalCacheHit); + Self::tag_cycle_participants( + &mut self.stack, + &mut self.cycle_participants, + with_coinductive_stack.head, + ); + return with_coinductive_stack.result; + } else if let Some(with_inductive_stack) = &mut cache_entry.with_inductive_stack + && !Self::stack_coinductive_from(tcx, &self.stack, with_inductive_stack.head) + { + // We have a nested goal which is already in the provisional cache, use + // its result. + inspect + .goal_evaluation_kind(inspect::WipCanonicalGoalEvaluationKind::ProvisionalCacheHit); + Self::tag_cycle_participants( + &mut self.stack, + &mut self.cycle_participants, + with_inductive_stack.head, + ); + return with_inductive_stack.result; + } else if let Some(stack_depth) = cache_entry.stack_depth { + debug!("encountered cycle with depth {stack_depth:?}"); // We have a nested goal which relies on a goal `root` deeper in the stack. // // We first store that we may have to reprove `root` in case the provisional @@ -236,40 +309,37 @@ pub(super) fn with_new_goal( // // Finally we can return either the provisional response for that goal if we have a // coinductive cycle or an ambiguous result if the cycle is inductive. - Entry::Occupied(entry) => { - inspect.goal_evaluation_kind(inspect::WipCanonicalGoalEvaluationKind::CycleInStack); - - let stack_depth = *entry.get(); - debug!("encountered cycle with depth {stack_depth:?}"); - // We start by tagging all non-root cycle participants. - let participants = self.stack.raw.iter_mut().skip(stack_depth.as_usize() + 1); - for entry in participants { - entry.non_root_cycle_participant = true; - self.cycle_participants.insert(entry.input); - } - - // If we're in a cycle, we have to retry proving the cycle head - // until we reach a fixpoint. It is not enough to simply retry the - // `root` goal of this cycle. - // - // See tests/ui/traits/next-solver/cycles/fixpoint-rerun-all-cycle-heads.rs - // for an example. - self.stack[stack_depth].has_been_used = true; - return if let Some(result) = self.stack[stack_depth].provisional_result { - result + inspect.goal_evaluation_kind(inspect::WipCanonicalGoalEvaluationKind::CycleInStack); + Self::tag_cycle_participants( + &mut self.stack, + &mut self.cycle_participants, + stack_depth, + ); + return if let Some(result) = self.stack[stack_depth].provisional_result { + result + } else { + // If we don't have a provisional result yet we're in the first iteration, + // so we start with no constraints. + if Self::stack_coinductive_from(tcx, &self.stack, stack_depth) { + Self::response_no_constraints(tcx, input, Certainty::Yes) } else { - // If we don't have a provisional result yet we're in the first iteration, - // so we start with no constraints. - let is_inductive = self.stack.raw[stack_depth.index()..] - .iter() - .any(|entry| !entry.input.value.goal.predicate.is_coinductive(tcx)); - if is_inductive { - Self::response_no_constraints(tcx, input, Certainty::OVERFLOW) - } else { - Self::response_no_constraints(tcx, input, Certainty::Yes) - } - }; - } + Self::response_no_constraints(tcx, input, Certainty::OVERFLOW) + } + }; + } else { + // No entry, we push this goal on the stack and try to prove it. + let depth = self.stack.next_index(); + let entry = StackEntry { + input, + available_depth, + reached_depth: depth, + non_root_cycle_participant: None, + encountered_overflow: false, + has_been_used: false, + provisional_result: None, + }; + assert_eq!(self.stack.push(entry), depth); + cache_entry.stack_depth = Some(depth); } // This is for global caching, so we properly track query dependencies. @@ -285,11 +355,22 @@ pub(super) fn with_new_goal( for _ in 0..self.local_overflow_limit() { let result = prove_goal(self, inspect); - // Check whether the current goal is the root of a cycle and whether - // we have to rerun because its provisional result differed from the - // final result. + // Check whether the current goal is the root of a cycle. + // If so, we have to retry proving the cycle head + // until its result reaches a fixpoint. We need to do so for + // all cycle heads, not only for the root. + // + // See tests/ui/traits/next-solver/cycles/fixpoint-rerun-all-cycle-heads.rs + // for an example. let stack_entry = self.pop_stack(); debug_assert_eq!(stack_entry.input, input); + if stack_entry.has_been_used { + Self::clear_dependent_provisional_results( + &mut self.provisional_cache, + self.stack.next_index(), + ); + } + if stack_entry.has_been_used && stack_entry.provisional_result.map_or(true, |r| r != result) { @@ -299,7 +380,7 @@ pub(super) fn with_new_goal( provisional_result: Some(result), ..stack_entry }); - assert_eq!(self.stack_entries.insert(input, depth), None); + debug_assert_eq!(self.provisional_cache[&input].stack_depth, Some(depth)); } else { return (stack_entry, result); } @@ -307,6 +388,7 @@ pub(super) fn with_new_goal( debug!("canonical cycle overflow"); let current_entry = self.pop_stack(); + debug_assert!(!current_entry.has_been_used); let result = Self::response_no_constraints(tcx, input, Certainty::OVERFLOW); (current_entry, result) }); @@ -319,7 +401,17 @@ pub(super) fn with_new_goal( // // It is not possible for any nested goal to depend on something deeper on the // stack, as this would have also updated the depth of the current goal. - if !final_entry.non_root_cycle_participant { + if let Some(head) = final_entry.non_root_cycle_participant { + let coinductive_stack = Self::stack_coinductive_from(tcx, &self.stack, head); + + let entry = self.provisional_cache.get_mut(&input).unwrap(); + entry.stack_depth = None; + if coinductive_stack { + entry.with_coinductive_stack = Some(DetachedEntry { head, result }); + } else { + entry.with_inductive_stack = Some(DetachedEntry { head, result }); + } + } else { // When encountering a cycle, both inductive and coinductive, we only // move the root into the global cache. We also store all other cycle // participants involved. @@ -328,6 +420,7 @@ pub(super) fn with_new_goal( // participant is on the stack. This is necessary to prevent unstable // results. See the comment of `SearchGraph::cycle_participants` for // more details. + self.provisional_cache.remove(&input); let reached_depth = final_entry.reached_depth.as_usize() - self.stack.len(); let cycle_participants = mem::take(&mut self.cycle_participants); self.global_cache(tcx).insert( From 88271deac281d2d99b5e3cac0635d02a51bc2a2d Mon Sep 17 00:00:00 2001 From: lcnr Date: Tue, 9 Jan 2024 10:01:50 +0100 Subject: [PATCH 5/7] avoid always rerunning in case of a cycle --- Cargo.lock | 1 + compiler/rustc_trait_selection/Cargo.toml | 1 + .../src/solve/search_graph.rs | 105 ++++++++++++------ 3 files changed, 73 insertions(+), 34 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1948e6c2e5e..09b47c5de7e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4563,6 +4563,7 @@ checksum = "8ba09476327c4b70ccefb6180f046ef588c26a24cf5d269a9feba316eb4f029f" name = "rustc_trait_selection" version = "0.0.0" dependencies = [ + "bitflags 2.4.1", "itertools", "rustc_ast", "rustc_attr", diff --git a/compiler/rustc_trait_selection/Cargo.toml b/compiler/rustc_trait_selection/Cargo.toml index 1883099d345..00ce9fbe758 100644 --- a/compiler/rustc_trait_selection/Cargo.toml +++ b/compiler/rustc_trait_selection/Cargo.toml @@ -5,6 +5,7 @@ edition = "2021" [dependencies] # tidy-alphabetical-start +bitflags = "2.4.1" itertools = "0.11.0" rustc_ast = { path = "../rustc_ast" } rustc_attr = { path = "../rustc_attr" } diff --git a/compiler/rustc_trait_selection/src/solve/search_graph.rs b/compiler/rustc_trait_selection/src/solve/search_graph.rs index ead4ab5723d..d4f5a992d1f 100644 --- a/compiler/rustc_trait_selection/src/solve/search_graph.rs +++ b/compiler/rustc_trait_selection/src/solve/search_graph.rs @@ -18,6 +18,14 @@ pub struct StackDepth {} } +bitflags::bitflags! { + #[derive(Debug, Clone, Copy, PartialEq, Eq)] + struct HasBeenUsed: u8 { + const INDUCTIVE_CYCLE = 1 << 0; + const COINDUCTIVE_CYCLE = 1 << 1; + } +} + #[derive(Debug)] struct StackEntry<'tcx> { input: CanonicalInput<'tcx>, @@ -32,7 +40,7 @@ struct StackEntry<'tcx> { non_root_cycle_participant: Option, encountered_overflow: bool, - has_been_used: bool, + has_been_used: HasBeenUsed, /// Starts out as `None` and gets set when rerunning this /// goal in case we encounter a cycle. provisional_result: Option>, @@ -195,9 +203,11 @@ fn stack_coinductive_from( fn tag_cycle_participants( stack: &mut IndexVec>, cycle_participants: &mut FxHashSet>, + usage_kind: HasBeenUsed, head: StackDepth, ) { - stack[head].has_been_used = true; + stack[head].has_been_used |= usage_kind; + debug_assert!(!stack[head].has_been_used.is_empty()); for entry in &mut stack.raw[head.index() + 1..] { entry.non_root_cycle_participant = entry.non_root_cycle_participant.max(Some(head)); cycle_participants.insert(entry.input); @@ -272,29 +282,33 @@ pub(super) fn with_new_goal( // Check whether the goal is in the provisional cache. let cache_entry = self.provisional_cache.entry(input).or_default(); - if let Some(with_coinductive_stack) = &mut cache_entry.with_coinductive_stack + if let Some(with_coinductive_stack) = &cache_entry.with_coinductive_stack && Self::stack_coinductive_from(tcx, &self.stack, with_coinductive_stack.head) { // We have a nested goal which is already in the provisional cache, use - // its result. + // its result. We do not provide any usage kind as that should have been + // already set correctly while computing the cache entry. inspect .goal_evaluation_kind(inspect::WipCanonicalGoalEvaluationKind::ProvisionalCacheHit); Self::tag_cycle_participants( &mut self.stack, &mut self.cycle_participants, + HasBeenUsed::empty(), with_coinductive_stack.head, ); return with_coinductive_stack.result; - } else if let Some(with_inductive_stack) = &mut cache_entry.with_inductive_stack + } else if let Some(with_inductive_stack) = &cache_entry.with_inductive_stack && !Self::stack_coinductive_from(tcx, &self.stack, with_inductive_stack.head) { // We have a nested goal which is already in the provisional cache, use - // its result. + // its result. We do not provide any usage kind as that should have been + // already set correctly while computing the cache entry. inspect .goal_evaluation_kind(inspect::WipCanonicalGoalEvaluationKind::ProvisionalCacheHit); Self::tag_cycle_participants( &mut self.stack, &mut self.cycle_participants, + HasBeenUsed::empty(), with_inductive_stack.head, ); return with_inductive_stack.result; @@ -310,21 +324,27 @@ pub(super) fn with_new_goal( // Finally we can return either the provisional response for that goal if we have a // coinductive cycle or an ambiguous result if the cycle is inductive. inspect.goal_evaluation_kind(inspect::WipCanonicalGoalEvaluationKind::CycleInStack); + let is_coinductive_cycle = Self::stack_coinductive_from(tcx, &self.stack, stack_depth); + let usage_kind = if is_coinductive_cycle { + HasBeenUsed::COINDUCTIVE_CYCLE + } else { + HasBeenUsed::INDUCTIVE_CYCLE + }; Self::tag_cycle_participants( &mut self.stack, &mut self.cycle_participants, + usage_kind, stack_depth, ); + + // Return the provisional result or, if we're in the first iteration, + // start with no constraints. return if let Some(result) = self.stack[stack_depth].provisional_result { result + } else if is_coinductive_cycle { + Self::response_no_constraints(tcx, input, Certainty::Yes) } else { - // If we don't have a provisional result yet we're in the first iteration, - // so we start with no constraints. - if Self::stack_coinductive_from(tcx, &self.stack, stack_depth) { - Self::response_no_constraints(tcx, input, Certainty::Yes) - } else { - Self::response_no_constraints(tcx, input, Certainty::OVERFLOW) - } + Self::response_no_constraints(tcx, input, Certainty::OVERFLOW) }; } else { // No entry, we push this goal on the stack and try to prove it. @@ -335,7 +355,7 @@ pub(super) fn with_new_goal( reached_depth: depth, non_root_cycle_participant: None, encountered_overflow: false, - has_been_used: false, + has_been_used: HasBeenUsed::empty(), provisional_result: None, }; assert_eq!(self.stack.push(entry), depth); @@ -354,41 +374,58 @@ pub(super) fn with_new_goal( // point we are done. for _ in 0..self.local_overflow_limit() { let result = prove_goal(self, inspect); + let stack_entry = self.pop_stack(); + debug_assert_eq!(stack_entry.input, input); - // Check whether the current goal is the root of a cycle. - // If so, we have to retry proving the cycle head - // until its result reaches a fixpoint. We need to do so for - // all cycle heads, not only for the root. + // If the current goal is not the root of a cycle, we are done. + if stack_entry.has_been_used.is_empty() { + return (stack_entry, result); + } + + // If it is a cycle head, we have to keep trying to prove it until + // we reach a fixpoint. We need to do so for all cycle heads, + // not only for the root. // // See tests/ui/traits/next-solver/cycles/fixpoint-rerun-all-cycle-heads.rs // for an example. - let stack_entry = self.pop_stack(); - debug_assert_eq!(stack_entry.input, input); - if stack_entry.has_been_used { - Self::clear_dependent_provisional_results( - &mut self.provisional_cache, - self.stack.next_index(), - ); - } - if stack_entry.has_been_used - && stack_entry.provisional_result.map_or(true, |r| r != result) - { - // If so, update its provisional result and reevaluate it. + // Start by clearing all provisional cache entries which depend on this + // the current goal. + Self::clear_dependent_provisional_results( + &mut self.provisional_cache, + self.stack.next_index(), + ); + + // Check whether we reached a fixpoint, either because the final result + // is equal to the provisional result of the previous iteration, or because + // this was only the root of either coinductive or inductive cycles, and the + // final result is equal to the initial response for that case. + let reached_fixpoint = if let Some(r) = stack_entry.provisional_result { + r == result + } else if stack_entry.has_been_used == HasBeenUsed::COINDUCTIVE_CYCLE { + Self::response_no_constraints(tcx, input, Certainty::Yes) == result + } else if stack_entry.has_been_used == HasBeenUsed::INDUCTIVE_CYCLE { + Self::response_no_constraints(tcx, input, Certainty::OVERFLOW) == result + } else { + false + }; + + if reached_fixpoint { + return (stack_entry, result); + } else { + // Did not reach a fixpoint, update the provisional result and reevaluate. let depth = self.stack.push(StackEntry { - has_been_used: false, + has_been_used: HasBeenUsed::empty(), provisional_result: Some(result), ..stack_entry }); debug_assert_eq!(self.provisional_cache[&input].stack_depth, Some(depth)); - } else { - return (stack_entry, result); } } debug!("canonical cycle overflow"); let current_entry = self.pop_stack(); - debug_assert!(!current_entry.has_been_used); + debug_assert!(current_entry.has_been_used.is_empty()); let result = Self::response_no_constraints(tcx, input, Certainty::OVERFLOW); (current_entry, result) }); From 242633fe52b3bc032d7ecf0eb548c4ae225fb2a1 Mon Sep 17 00:00:00 2001 From: lcnr Date: Tue, 9 Jan 2024 11:19:24 +0100 Subject: [PATCH 6/7] add comments and tests --- .../src/solve/search_graph.rs | 94 ++++++++++++------- .../next-solver/cycles/mixed-cycles-1.rs | 39 ++++++++ .../next-solver/cycles/mixed-cycles-1.stderr | 16 ++++ .../next-solver/cycles/mixed-cycles-2.rs | 32 +++++++ .../next-solver/cycles/mixed-cycles-2.stderr | 16 ++++ .../provisional-cache-impacts-behavior.rs | 70 ++++++++++++++ 6 files changed, 231 insertions(+), 36 deletions(-) create mode 100644 tests/ui/traits/next-solver/cycles/mixed-cycles-1.rs create mode 100644 tests/ui/traits/next-solver/cycles/mixed-cycles-1.stderr create mode 100644 tests/ui/traits/next-solver/cycles/mixed-cycles-2.rs create mode 100644 tests/ui/traits/next-solver/cycles/mixed-cycles-2.stderr create mode 100644 tests/ui/traits/next-solver/cycles/provisional-cache-impacts-behavior.rs diff --git a/compiler/rustc_trait_selection/src/solve/search_graph.rs b/compiler/rustc_trait_selection/src/solve/search_graph.rs index d4f5a992d1f..c33b302c33a 100644 --- a/compiler/rustc_trait_selection/src/solve/search_graph.rs +++ b/compiler/rustc_trait_selection/src/solve/search_graph.rs @@ -19,6 +19,9 @@ pub struct StackDepth {} } bitflags::bitflags! { + /// Whether and how this goal has been used as the root of a + /// cycle. We track the kind of cycle as we're otherwise forced + /// to always rerun at least once. #[derive(Debug, Clone, Copy, PartialEq, Eq)] struct HasBeenUsed: u8 { const INDUCTIVE_CYCLE = 1 << 0; @@ -29,23 +32,30 @@ struct HasBeenUsed: u8 { #[derive(Debug)] struct StackEntry<'tcx> { input: CanonicalInput<'tcx>, + available_depth: Limit, + /// The maximum depth reached by this stack entry, only up-to date /// for the top of the stack and lazily updated for the rest. reached_depth: StackDepth, - /// Whether this entry is a cycle participant which is not a root. + + /// Whether this entry is a non-root cycle participant. /// - /// If so, it must not be moved to the global cache. See - /// [SearchGraph::cycle_participants] for more details. + /// We must not move the result of non-root cycle participants to the + /// global cache. See [SearchGraph::cycle_participants] for more details. + /// We store the highest stack depth of a head of a cycle this goal is involved + /// in. This necessary to soundly cache its provisional result. non_root_cycle_participant: Option, encountered_overflow: bool, + has_been_used: HasBeenUsed, /// Starts out as `None` and gets set when rerunning this /// goal in case we encounter a cycle. provisional_result: Option>, } +/// The provisional result for a goal which is not on the stack. struct DetachedEntry<'tcx> { /// The head of the smallest non-trivial cycle involving this entry. /// @@ -59,6 +69,18 @@ struct DetachedEntry<'tcx> { result: QueryResult<'tcx>, } +/// Stores the stack depth of a currently evaluated goal *and* already +/// computed results for goals which depend on other goals still on the stack. +/// +/// The provisional result may depend on whether the stack above it is inductive +/// or coinductive. Because of this, we store separate provisional results for +/// each case. If an provisional entry is not applicable, it may be the case +/// that we already have provisional result while computing a goal. In this case +/// we prefer the provisional result to potentially avoid fixpoint iterations. +/// See tests/ui/traits/next-solver/cycles/mixed-cycles-2.rs for an example. +/// +/// The provisional cache can theoretically result in changes to the observable behavior, +/// see tests/ui/traits/next-solver/cycles/provisional-cache-impacts-behavior.rs. #[derive(Default)] struct ProvisionalCacheEntry<'tcx> { stack_depth: Option, @@ -200,6 +222,16 @@ fn stack_coinductive_from( .all(|entry| entry.input.value.goal.predicate.is_coinductive(tcx)) } + // When encountering a solver cycle, the result of the current goal + // depends on goals lower on the stack. + // + // We have to therefore be careful when caching goals. Only the final result + // of the cycle root, i.e. the lowest goal on the stack involved in this cycle, + // is moved to the global cache while all others are stored in a provisional cache. + // + // We update both the head of this cycle to rerun its evaluation until + // we reach a fixpoint and all other cycle participants to make sure that + // their result does not get moved to the global cache. fn tag_cycle_participants( stack: &mut IndexVec>, cycle_participants: &mut FxHashSet>, @@ -281,9 +313,20 @@ pub(super) fn with_new_goal( } // Check whether the goal is in the provisional cache. + // The provisional result may rely on the path to its cycle roots, + // so we have to check the path of the current goal matches that of + // the cache entry. let cache_entry = self.provisional_cache.entry(input).or_default(); - if let Some(with_coinductive_stack) = &cache_entry.with_coinductive_stack - && Self::stack_coinductive_from(tcx, &self.stack, with_coinductive_stack.head) + if let Some(entry) = cache_entry + .with_coinductive_stack + .as_ref() + .filter(|p| Self::stack_coinductive_from(tcx, &self.stack, p.head)) + .or_else(|| { + cache_entry + .with_inductive_stack + .as_ref() + .filter(|p| !Self::stack_coinductive_from(tcx, &self.stack, p.head)) + }) { // We have a nested goal which is already in the provisional cache, use // its result. We do not provide any usage kind as that should have been @@ -294,35 +337,17 @@ pub(super) fn with_new_goal( &mut self.stack, &mut self.cycle_participants, HasBeenUsed::empty(), - with_coinductive_stack.head, + entry.head, ); - return with_coinductive_stack.result; - } else if let Some(with_inductive_stack) = &cache_entry.with_inductive_stack - && !Self::stack_coinductive_from(tcx, &self.stack, with_inductive_stack.head) - { - // We have a nested goal which is already in the provisional cache, use - // its result. We do not provide any usage kind as that should have been - // already set correctly while computing the cache entry. - inspect - .goal_evaluation_kind(inspect::WipCanonicalGoalEvaluationKind::ProvisionalCacheHit); - Self::tag_cycle_participants( - &mut self.stack, - &mut self.cycle_participants, - HasBeenUsed::empty(), - with_inductive_stack.head, - ); - return with_inductive_stack.result; + return entry.result; } else if let Some(stack_depth) = cache_entry.stack_depth { debug!("encountered cycle with depth {stack_depth:?}"); - // We have a nested goal which relies on a goal `root` deeper in the stack. + // We have a nested goal which directly relies on a goal deeper in the stack. // - // We first store that we may have to reprove `root` in case the provisional - // response is not equal to the final response. We also update the depth of all - // goals which recursively depend on our current goal to depend on `root` - // instead. + // We start by tagging all cycle participants, as that's necessary for caching. // - // Finally we can return either the provisional response for that goal if we have a - // coinductive cycle or an ambiguous result if the cycle is inductive. + // Finally we can return either the provisional response or the initial response + // in case we're in the first fixpoint iteration for this goal. inspect.goal_evaluation_kind(inspect::WipCanonicalGoalEvaluationKind::CycleInStack); let is_coinductive_cycle = Self::stack_coinductive_from(tcx, &self.stack, stack_depth); let usage_kind = if is_coinductive_cycle { @@ -410,10 +435,10 @@ pub(super) fn with_new_goal( false }; + // If we did not reach a fixpoint, update the provisional result and reevaluate. if reached_fixpoint { return (stack_entry, result); } else { - // Did not reach a fixpoint, update the provisional result and reevaluate. let depth = self.stack.push(StackEntry { has_been_used: HasBeenUsed::empty(), provisional_result: Some(result), @@ -435,9 +460,6 @@ pub(super) fn with_new_goal( // We're now done with this goal. In case this goal is involved in a larger cycle // do not remove it from the provisional cache and update its provisional result. // We only add the root of cycles to the global cache. - // - // It is not possible for any nested goal to depend on something deeper on the - // stack, as this would have also updated the depth of the current goal. if let Some(head) = final_entry.non_root_cycle_participant { let coinductive_stack = Self::stack_coinductive_from(tcx, &self.stack, head); @@ -449,6 +471,9 @@ pub(super) fn with_new_goal( entry.with_inductive_stack = Some(DetachedEntry { head, result }); } } else { + self.provisional_cache.remove(&input); + let reached_depth = final_entry.reached_depth.as_usize() - self.stack.len(); + let cycle_participants = mem::take(&mut self.cycle_participants); // When encountering a cycle, both inductive and coinductive, we only // move the root into the global cache. We also store all other cycle // participants involved. @@ -457,9 +482,6 @@ pub(super) fn with_new_goal( // participant is on the stack. This is necessary to prevent unstable // results. See the comment of `SearchGraph::cycle_participants` for // more details. - self.provisional_cache.remove(&input); - let reached_depth = final_entry.reached_depth.as_usize() - self.stack.len(); - let cycle_participants = mem::take(&mut self.cycle_participants); self.global_cache(tcx).insert( tcx, input, diff --git a/tests/ui/traits/next-solver/cycles/mixed-cycles-1.rs b/tests/ui/traits/next-solver/cycles/mixed-cycles-1.rs new file mode 100644 index 00000000000..424508dd9d9 --- /dev/null +++ b/tests/ui/traits/next-solver/cycles/mixed-cycles-1.rs @@ -0,0 +1,39 @@ +// compile-flags: -Znext-solver +#![feature(rustc_attrs)] + +// A test intended to check how we handle provisional results +// for a goal computed with an inductive and a coinductive stack. +// +// Unfortunately this doesn't really detect whether we've done +// something wrong but instead only showcases that we thought of +// this. +// +// FIXME(-Znext-solver=coinductive): With the new coinduction approach +// the same goal stack can be both inductive and coinductive, depending +// on why we're proving a specific nested goal. Rewrite this test +// at that point instead of relying on `BInd`. + + +#[rustc_coinductive] +trait A {} + +#[rustc_coinductive] +trait B {} +trait BInd {} +impl BInd for T {} + +#[rustc_coinductive] +trait C {} +trait CInd {} +impl CInd for T {} + +impl A for T {} +impl B for T {} +impl C for T {} + +fn impls_a() {} + +fn main() { + impls_a::<()>(); + //~^ ERROR overflow evaluating the requirement `(): A` +} diff --git a/tests/ui/traits/next-solver/cycles/mixed-cycles-1.stderr b/tests/ui/traits/next-solver/cycles/mixed-cycles-1.stderr new file mode 100644 index 00000000000..e828bdeb16b --- /dev/null +++ b/tests/ui/traits/next-solver/cycles/mixed-cycles-1.stderr @@ -0,0 +1,16 @@ +error[E0275]: overflow evaluating the requirement `(): A` + --> $DIR/mixed-cycles-1.rs:37:15 + | +LL | impls_a::<()>(); + | ^^ + | + = help: consider increasing the recursion limit by adding a `#![recursion_limit = "256"]` attribute to your crate (`mixed_cycles_1`) +note: required by a bound in `impls_a` + --> $DIR/mixed-cycles-1.rs:34:15 + | +LL | fn impls_a() {} + | ^ required by this bound in `impls_a` + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0275`. diff --git a/tests/ui/traits/next-solver/cycles/mixed-cycles-2.rs b/tests/ui/traits/next-solver/cycles/mixed-cycles-2.rs new file mode 100644 index 00000000000..300f30ecad2 --- /dev/null +++ b/tests/ui/traits/next-solver/cycles/mixed-cycles-2.rs @@ -0,0 +1,32 @@ +// compile-flags: -Znext-solver +#![feature(rustc_attrs)] + +// A test showcasing that the solver may need to +// compute a goal which is already in the provisional +// cache. +// +// However, given that `(): BInd` and `(): B` are currently distinct +// goals, this is actually not possible right now. +// +// FIXME(-Znext-solver=coinductive): With the new coinduction approach +// the same goal stack can be both inductive and coinductive, depending +// on why we're proving a specific nested goal. Rewrite this test +// at that point. + +#[rustc_coinductive] +trait A {} + +#[rustc_coinductive] +trait B {} +trait BInd {} +impl BInd for T {} + +impl A for T {} +impl B for T {} + +fn impls_a() {} + +fn main() { + impls_a::<()>(); + //~^ ERROR overflow evaluating the requirement `(): A` +} diff --git a/tests/ui/traits/next-solver/cycles/mixed-cycles-2.stderr b/tests/ui/traits/next-solver/cycles/mixed-cycles-2.stderr new file mode 100644 index 00000000000..ec13093f707 --- /dev/null +++ b/tests/ui/traits/next-solver/cycles/mixed-cycles-2.stderr @@ -0,0 +1,16 @@ +error[E0275]: overflow evaluating the requirement `(): A` + --> $DIR/mixed-cycles-2.rs:30:15 + | +LL | impls_a::<()>(); + | ^^ + | + = help: consider increasing the recursion limit by adding a `#![recursion_limit = "256"]` attribute to your crate (`mixed_cycles_2`) +note: required by a bound in `impls_a` + --> $DIR/mixed-cycles-2.rs:27:15 + | +LL | fn impls_a() {} + | ^ required by this bound in `impls_a` + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0275`. diff --git a/tests/ui/traits/next-solver/cycles/provisional-cache-impacts-behavior.rs b/tests/ui/traits/next-solver/cycles/provisional-cache-impacts-behavior.rs new file mode 100644 index 00000000000..ab7c4c7601b --- /dev/null +++ b/tests/ui/traits/next-solver/cycles/provisional-cache-impacts-behavior.rs @@ -0,0 +1,70 @@ +// compile-flags: -Znext-solver +// check-pass +#![feature(rustc_attrs)] + +// A test showcasing that using a provisional cache can differ +// from only tracking stack entries. +// +// Without a provisional cache, we have the following proof tree: +// +// - (): A +// - (): B +// - (): A (coinductive cycle) +// - (): C +// - (): B (coinductive cycle) +// - (): C +// - (): B +// - (): A (coinductive cycle) +// - (): C (coinductive cycle) +// +// While with the current provisional cache implementation we get: +// +// - (): A +// - (): B +// - (): A (coinductive cycle) +// - (): C +// - (): B (coinductive cycle) +// - (): C +// - (): B (provisional cache hit) +// +// Note that if even if we were to expand the provisional cache hit, +// the proof tree would still be different: +// +// - (): A +// - (): B +// - (): A (coinductive cycle) +// - (): C +// - (): B (coinductive cycle) +// - (): C +// - (): B (provisional cache hit, expanded) +// - (): A (coinductive cycle) +// - (): C +// - (): B (coinductive cycle) +// +// Theoretically, this can result in observable behavior differences +// due to incompleteness. However, this would require a very convoluted +// example and would still be sound. The difference is determinstic +// and can not be observed outside of the cycle itself as we don't move +// non-root cycle participants into the global cache. +// +// For an example of how incompleteness could impact the observable behavior here, see +// +// tests/ui/traits/next-solver/cycles/coinduction/incompleteness-unstable-result.rs +#[rustc_coinductive] +trait A {} + +#[rustc_coinductive] +trait B {} + +#[rustc_coinductive] +trait C {} + +impl A for T {} +impl B for T {} +impl C for T {} + +fn impls_a() {} + +fn main() { + impls_a::<()>(); +} From 13aa90042fca87b4209e81fbd6b7d4d03285b4a4 Mon Sep 17 00:00:00 2001 From: lcnr Date: Tue, 9 Jan 2024 13:10:37 +0100 Subject: [PATCH 7/7] this is not a rust code snippet --- compiler/rustc_trait_selection/src/solve/search_graph.rs | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/compiler/rustc_trait_selection/src/solve/search_graph.rs b/compiler/rustc_trait_selection/src/solve/search_graph.rs index c33b302c33a..bede94a2e43 100644 --- a/compiler/rustc_trait_selection/src/solve/search_graph.rs +++ b/compiler/rustc_trait_selection/src/solve/search_graph.rs @@ -61,10 +61,11 @@ struct DetachedEntry<'tcx> { /// /// Given the following rules, when proving `A` the head for /// the provisional entry of `C` would be `B`. - /// - /// A :- B - /// B :- C - /// C :- A + B + C + /// ```plain + /// A :- B + /// B :- C + /// C :- A + B + C + /// ``` head: StackDepth, result: QueryResult<'tcx>, }