@@ -47,20 +47,39 @@ struct StackEntry<I: Interner> {
4747 /// Whether this entry is a non-root cycle participant.
4848 ///
4949 /// We must not move the result of non-root cycle participants to the
50- /// global cache. See [SearchGraph::cycle_participants] for more details.
51- /// We store the highest stack depth of a head of a cycle this goal is involved
52- /// in. This necessary to soundly cache its provisional result.
50+ /// global cache. We store the highest stack depth of a head of a cycle
51+ /// this goal is involved in. This necessary to soundly cache its
52+ /// provisional result.
5353 non_root_cycle_participant : Option < StackDepth > ,
5454
5555 encountered_overflow : bool ,
5656
5757 has_been_used : HasBeenUsed ,
58+
59+ /// We put only the root goal of a coinductive cycle into the global cache.
60+ ///
61+ /// If we were to use that result when later trying to prove another cycle
62+ /// participant, we can end up with unstable query results.
63+ ///
64+ /// See tests/ui/next-solver/coinduction/incompleteness-unstable-result.rs for
65+ /// an example of where this is needed.
66+ ///
67+ /// There can be multiple roots on the same stack, so we need to track
68+ /// cycle participants per root:
69+ /// ```plain
70+ /// A :- B
71+ /// B :- A, C
72+ /// C :- D
73+ /// D :- C
74+ /// ```
75+ cycle_participants : FxHashSet < CanonicalInput < I > > ,
5876 /// Starts out as `None` and gets set when rerunning this
5977 /// goal in case we encounter a cycle.
6078 provisional_result : Option < QueryResult < I > > ,
6179}
6280
6381/// The provisional result for a goal which is not on the stack.
82+ #[ derive( Debug ) ]
6483struct DetachedEntry < I : Interner > {
6584 /// The head of the smallest non-trivial cycle involving this entry.
6685 ///
@@ -110,24 +129,11 @@ pub(super) struct SearchGraph<I: Interner> {
110129 /// An element is *deeper* in the stack if its index is *lower*.
111130 stack : IndexVec < StackDepth , StackEntry < I > > ,
112131 provisional_cache : FxHashMap < CanonicalInput < I > , ProvisionalCacheEntry < I > > ,
113- /// We put only the root goal of a coinductive cycle into the global cache.
114- ///
115- /// If we were to use that result when later trying to prove another cycle
116- /// participant, we can end up with unstable query results.
117- ///
118- /// See tests/ui/next-solver/coinduction/incompleteness-unstable-result.rs for
119- /// an example of where this is needed.
120- cycle_participants : FxHashSet < CanonicalInput < I > > ,
121132}
122133
123134impl < I : Interner > SearchGraph < I > {
124135 pub ( super ) fn new ( mode : SolverMode ) -> SearchGraph < I > {
125- Self {
126- mode,
127- stack : Default :: default ( ) ,
128- provisional_cache : Default :: default ( ) ,
129- cycle_participants : Default :: default ( ) ,
130- }
136+ Self { mode, stack : Default :: default ( ) , provisional_cache : Default :: default ( ) }
131137 }
132138
133139 pub ( super ) fn solver_mode ( & self ) -> SolverMode {
@@ -149,13 +155,7 @@ impl<I: Interner> SearchGraph<I> {
149155 }
150156
151157 pub ( super ) fn is_empty ( & self ) -> bool {
152- if self . stack . is_empty ( ) {
153- debug_assert ! ( self . provisional_cache. is_empty( ) ) ;
154- debug_assert ! ( self . cycle_participants. is_empty( ) ) ;
155- true
156- } else {
157- false
158- }
158+ self . stack . is_empty ( )
159159 }
160160
161161 /// Returns the remaining depth allowed for nested goals.
@@ -205,15 +205,26 @@ impl<I: Interner> SearchGraph<I> {
205205 // their result does not get moved to the global cache.
206206 fn tag_cycle_participants (
207207 stack : & mut IndexVec < StackDepth , StackEntry < I > > ,
208- cycle_participants : & mut FxHashSet < CanonicalInput < I > > ,
209208 usage_kind : HasBeenUsed ,
210209 head : StackDepth ,
211210 ) {
212211 stack[ head] . has_been_used |= usage_kind;
213212 debug_assert ! ( !stack[ head] . has_been_used. is_empty( ) ) ;
214- for entry in & mut stack. raw [ head. index ( ) + 1 ..] {
213+
214+ // The current root of these cycles. Note that this may not be the final
215+ // root in case a later goal depends on a goal higher up the stack.
216+ let mut current_root = head;
217+ while let Some ( parent) = stack[ current_root] . non_root_cycle_participant {
218+ current_root = parent;
219+ debug_assert ! ( !stack[ current_root] . has_been_used. is_empty( ) ) ;
220+ }
221+
222+ let ( stack, cycle_participants) = stack. raw . split_at_mut ( head. index ( ) + 1 ) ;
223+ let current_cycle_root = & mut stack[ current_root. as_usize ( ) ] ;
224+ for entry in cycle_participants {
215225 entry. non_root_cycle_participant = entry. non_root_cycle_participant . max ( Some ( head) ) ;
216- cycle_participants. insert ( entry. input ) ;
226+ current_cycle_root. cycle_participants . insert ( entry. input ) ;
227+ current_cycle_root. cycle_participants . extend ( mem:: take ( & mut entry. cycle_participants ) ) ;
217228 }
218229 }
219230
@@ -256,6 +267,7 @@ impl<'tcx> SearchGraph<TyCtxt<'tcx>> {
256267 & mut ProofTreeBuilder < TyCtxt < ' tcx > > ,
257268 ) -> QueryResult < TyCtxt < ' tcx > > ,
258269 ) -> QueryResult < TyCtxt < ' tcx > > {
270+ self . check_invariants ( ) ;
259271 // Check for overflow.
260272 let Some ( available_depth) = Self :: allowed_depth_for_nested ( tcx, & self . stack ) else {
261273 if let Some ( last) = self . stack . raw . last_mut ( ) {
@@ -292,12 +304,7 @@ impl<'tcx> SearchGraph<TyCtxt<'tcx>> {
292304 // already set correctly while computing the cache entry.
293305 inspect
294306 . goal_evaluation_kind ( inspect:: WipCanonicalGoalEvaluationKind :: ProvisionalCacheHit ) ;
295- Self :: tag_cycle_participants (
296- & mut self . stack ,
297- & mut self . cycle_participants ,
298- HasBeenUsed :: empty ( ) ,
299- entry. head ,
300- ) ;
307+ Self :: tag_cycle_participants ( & mut self . stack , HasBeenUsed :: empty ( ) , entry. head ) ;
301308 return entry. result ;
302309 } else if let Some ( stack_depth) = cache_entry. stack_depth {
303310 debug ! ( "encountered cycle with depth {stack_depth:?}" ) ;
@@ -314,12 +321,7 @@ impl<'tcx> SearchGraph<TyCtxt<'tcx>> {
314321 } else {
315322 HasBeenUsed :: INDUCTIVE_CYCLE
316323 } ;
317- Self :: tag_cycle_participants (
318- & mut self . stack ,
319- & mut self . cycle_participants ,
320- usage_kind,
321- stack_depth,
322- ) ;
324+ Self :: tag_cycle_participants ( & mut self . stack , usage_kind, stack_depth) ;
323325
324326 // Return the provisional result or, if we're in the first iteration,
325327 // start with no constraints.
@@ -340,6 +342,7 @@ impl<'tcx> SearchGraph<TyCtxt<'tcx>> {
340342 non_root_cycle_participant : None ,
341343 encountered_overflow : false ,
342344 has_been_used : HasBeenUsed :: empty ( ) ,
345+ cycle_participants : Default :: default ( ) ,
343346 provisional_result : None ,
344347 } ;
345348 assert_eq ! ( self . stack. push( entry) , depth) ;
@@ -386,27 +389,28 @@ impl<'tcx> SearchGraph<TyCtxt<'tcx>> {
386389 } else {
387390 self . provisional_cache . remove ( & input) ;
388391 let reached_depth = final_entry. reached_depth . as_usize ( ) - self . stack . len ( ) ;
389- let cycle_participants = mem:: take ( & mut self . cycle_participants ) ;
390392 // When encountering a cycle, both inductive and coinductive, we only
391393 // move the root into the global cache. We also store all other cycle
392394 // participants involved.
393395 //
394396 // We must not use the global cache entry of a root goal if a cycle
395397 // participant is on the stack. This is necessary to prevent unstable
396- // results. See the comment of `SearchGraph ::cycle_participants` for
398+ // results. See the comment of `StackEntry ::cycle_participants` for
397399 // more details.
398400 self . global_cache ( tcx) . insert (
399401 tcx,
400402 input,
401403 proof_tree,
402404 reached_depth,
403405 final_entry. encountered_overflow ,
404- cycle_participants,
406+ final_entry . cycle_participants ,
405407 dep_node,
406408 result,
407409 )
408410 }
409411
412+ self . check_invariants ( ) ;
413+
410414 result
411415 }
412416
@@ -416,10 +420,10 @@ impl<'tcx> SearchGraph<TyCtxt<'tcx>> {
416420 fn lookup_global_cache (
417421 & mut self ,
418422 tcx : TyCtxt < ' tcx > ,
419- input : CanonicalInput < ' tcx > ,
423+ input : CanonicalInput < TyCtxt < ' tcx > > ,
420424 available_depth : Limit ,
421425 inspect : & mut ProofTreeBuilder < TyCtxt < ' tcx > > ,
422- ) -> Option < QueryResult < ' tcx > > {
426+ ) -> Option < QueryResult < TyCtxt < ' tcx > > > {
423427 let CacheData { result, proof_tree, additional_depth, encountered_overflow } = self
424428 . global_cache ( tcx)
425429 . get ( tcx, input, self . stack . iter ( ) . map ( |e| e. input ) , available_depth) ?;
@@ -450,12 +454,12 @@ impl<'tcx> SearchGraph<TyCtxt<'tcx>> {
450454 }
451455}
452456
453- enum StepResult < ' tcx > {
454- Done ( StackEntry < ' tcx > , QueryResult < ' tcx > ) ,
457+ enum StepResult < I : Interner > {
458+ Done ( StackEntry < I > , QueryResult < I > ) ,
455459 HasChanged ,
456460}
457461
458- impl < ' tcx > SearchGraph < ' tcx > {
462+ impl < ' tcx > SearchGraph < TyCtxt < ' tcx > > {
459463 /// When we encounter a coinductive cycle, we have to fetch the
460464 /// result of that cycle while we are still computing it. Because
461465 /// of this we continuously recompute the cycle until the result
@@ -464,12 +468,12 @@ impl<'tcx> SearchGraph<'tcx> {
464468 fn fixpoint_step_in_task < F > (
465469 & mut self ,
466470 tcx : TyCtxt < ' tcx > ,
467- input : CanonicalInput < ' tcx > ,
471+ input : CanonicalInput < TyCtxt < ' tcx > > ,
468472 inspect : & mut ProofTreeBuilder < TyCtxt < ' tcx > > ,
469473 prove_goal : & mut F ,
470- ) -> StepResult < ' tcx >
474+ ) -> StepResult < TyCtxt < ' tcx > >
471475 where
472- F : FnMut ( & mut Self , & mut ProofTreeBuilder < TyCtxt < ' tcx > > ) -> QueryResult < ' tcx > ,
476+ F : FnMut ( & mut Self , & mut ProofTreeBuilder < TyCtxt < ' tcx > > ) -> QueryResult < TyCtxt < ' tcx > > ,
473477 {
474478 let result = prove_goal ( self , inspect) ;
475479 let stack_entry = self . pop_stack ( ) ;
@@ -530,3 +534,77 @@ impl<'tcx> SearchGraph<'tcx> {
530534 Ok ( super :: response_no_constraints_raw ( tcx, goal. max_universe , goal. variables , certainty) )
531535 }
532536}
537+
538+ impl < I : Interner > SearchGraph < I > {
539+ #[ allow( rustc:: potential_query_instability) ]
540+ fn check_invariants ( & self ) {
541+ if !cfg ! ( debug_assertions) {
542+ return ;
543+ }
544+
545+ let SearchGraph { mode : _, stack, provisional_cache } = self ;
546+ if stack. is_empty ( ) {
547+ assert ! ( provisional_cache. is_empty( ) ) ;
548+ }
549+
550+ for ( depth, entry) in stack. iter_enumerated ( ) {
551+ let StackEntry {
552+ input,
553+ available_depth : _,
554+ reached_depth : _,
555+ non_root_cycle_participant,
556+ encountered_overflow : _,
557+ has_been_used,
558+ ref cycle_participants,
559+ provisional_result,
560+ } = * entry;
561+ let cache_entry = provisional_cache. get ( & entry. input ) . unwrap ( ) ;
562+ assert_eq ! ( cache_entry. stack_depth, Some ( depth) ) ;
563+ if let Some ( head) = non_root_cycle_participant {
564+ assert ! ( head < depth) ;
565+ assert ! ( cycle_participants. is_empty( ) ) ;
566+ assert_ne ! ( stack[ head] . has_been_used, HasBeenUsed :: empty( ) ) ;
567+
568+ let mut current_root = head;
569+ while let Some ( parent) = stack[ current_root] . non_root_cycle_participant {
570+ current_root = parent;
571+ }
572+ assert ! ( stack[ current_root] . cycle_participants. contains( & input) ) ;
573+ }
574+
575+ if !cycle_participants. is_empty ( ) {
576+ assert ! ( provisional_result. is_some( ) || !has_been_used. is_empty( ) ) ;
577+ for entry in stack. iter ( ) . take ( depth. as_usize ( ) ) {
578+ assert_eq ! ( cycle_participants. get( & entry. input) , None ) ;
579+ }
580+ }
581+ }
582+
583+ for ( & input, entry) in & self . provisional_cache {
584+ let ProvisionalCacheEntry { stack_depth, with_coinductive_stack, with_inductive_stack } =
585+ entry;
586+ assert ! (
587+ stack_depth. is_some( )
588+ || with_coinductive_stack. is_some( )
589+ || with_inductive_stack. is_some( )
590+ ) ;
591+
592+ if let & Some ( stack_depth) = stack_depth {
593+ assert_eq ! ( stack[ stack_depth] . input, input) ;
594+ }
595+
596+ let check_detached = |detached_entry : & DetachedEntry < I > | {
597+ let DetachedEntry { head, result : _ } = * detached_entry;
598+ assert_ne ! ( stack[ head] . has_been_used, HasBeenUsed :: empty( ) ) ;
599+ } ;
600+
601+ if let Some ( with_coinductive_stack) = with_coinductive_stack {
602+ check_detached ( with_coinductive_stack) ;
603+ }
604+
605+ if let Some ( with_inductive_stack) = with_inductive_stack {
606+ check_detached ( with_inductive_stack) ;
607+ }
608+ }
609+ }
610+ }
0 commit comments