|
55 | 55 | // straightforward translation from JIT IR into the SCEV IR. Creating the add |
56 | 56 | // recurrences requires paying attention to the structure of PHIs, and |
57 | 57 | // disambiguating the values coming from outside the loop and the values coming |
58 | | -// from the backedges. Currently only simplistic add recurrences that do not |
59 | | -// require recursive analysis are supported. These simplistic add recurrences |
60 | | -// are always on the form i = i + k. |
| 58 | +// from the backedges. |
61 | 59 | // |
62 | 60 |
|
63 | 61 | #include "jitpch.h" |
@@ -208,7 +206,7 @@ void Scev::Dump(Compiler* comp) |
208 | 206 | // ResetForLoop. |
209 | 207 | // |
210 | 208 | ScalarEvolutionContext::ScalarEvolutionContext(Compiler* comp) |
211 | | - : m_comp(comp), m_cache(comp->getAllocator(CMK_LoopIVOpts)) |
| 209 | + : m_comp(comp), m_cache(comp->getAllocator(CMK_LoopIVOpts)), m_ephemeralCache(comp->getAllocator(CMK_LoopIVOpts)) |
212 | 210 | { |
213 | 211 | } |
214 | 212 |
|
@@ -471,34 +469,34 @@ Scev* ScalarEvolutionContext::AnalyzeNew(BasicBlock* block, GenTree* tree, int d |
471 | 469 |
|
472 | 470 | assert(ssaDsc->GetBlock() != nullptr); |
473 | 471 |
|
474 | | - // We currently do not handle complicated addrecs. We can do this |
475 | | - // by inserting a symbolic node in the cache and analyzing while it |
476 | | - // is part of the cache. It would allow us to model |
477 | | - // |
478 | | - // int i = 0; |
479 | | - // while (i < n) |
480 | | - // { |
481 | | - // int j = i + 1; |
482 | | - // ... |
483 | | - // i = j; |
484 | | - // } |
485 | | - // => <L, 0, 1> |
486 | | - // |
487 | | - // and chains of recurrences, such as |
488 | | - // |
489 | | - // int i = 0; |
490 | | - // int j = 0; |
491 | | - // while (i < n) |
492 | | - // { |
493 | | - // j++; |
494 | | - // i += j; |
495 | | - // } |
496 | | - // => <L, 0, <L, 1, 1>> |
497 | | - // |
498 | | - // The main issue is that it requires cache invalidation afterwards |
499 | | - // and turning the recursive result into an addrec. |
500 | | - // |
501 | | - return CreateSimpleAddRec(store, enterScev, ssaDsc->GetBlock(), ssaDsc->GetDefNode()->Data()); |
| 472 | + Scev* simpleAddRec = CreateSimpleAddRec(store, enterScev, ssaDsc->GetBlock(), ssaDsc->GetDefNode()->Data()); |
| 473 | + if (simpleAddRec != nullptr) |
| 474 | + { |
| 475 | + return simpleAddRec; |
| 476 | + } |
| 477 | + |
| 478 | + ScevConstant* symbolicAddRec = NewConstant(data->TypeGet(), 0xdeadbeef); |
| 479 | + m_ephemeralCache.Emplace(store, symbolicAddRec); |
| 480 | + |
| 481 | + Scev* result; |
| 482 | + if (m_usingEphemeralCache) |
| 483 | + { |
| 484 | + result = Analyze(ssaDsc->GetBlock(), ssaDsc->GetDefNode()->Data(), depth + 1); |
| 485 | + } |
| 486 | + else |
| 487 | + { |
| 488 | + m_usingEphemeralCache = true; |
| 489 | + result = Analyze(ssaDsc->GetBlock(), ssaDsc->GetDefNode()->Data(), depth + 1); |
| 490 | + m_usingEphemeralCache = false; |
| 491 | + m_ephemeralCache.RemoveAll(); |
| 492 | + } |
| 493 | + |
| 494 | + if (result == nullptr) |
| 495 | + { |
| 496 | + return nullptr; |
| 497 | + } |
| 498 | + |
| 499 | + return MakeAddRecFromRecursiveScev(enterScev, result, symbolicAddRec); |
502 | 500 | } |
503 | 501 | case GT_CAST: |
504 | 502 | { |
@@ -611,6 +609,138 @@ Scev* ScalarEvolutionContext::CreateSimpleAddRec(GenTreeLclVarCommon* headerStor |
611 | 609 | return NewAddRec(enterScev, stepScev); |
612 | 610 | } |
613 | 611 |
|
| 612 | +//------------------------------------------------------------------------ |
| 613 | +// ExtractAddOperands: Extract all operands of potentially nested add |
| 614 | +// operations. |
| 615 | +// |
| 616 | +// Parameters: |
| 617 | +// binop - The binop representing an add |
| 618 | +// operands - Array stack to add the operands to |
| 619 | +// |
| 620 | +void ScalarEvolutionContext::ExtractAddOperands(ScevBinop* binop, ArrayStack<Scev*>& operands) |
| 621 | +{ |
| 622 | + assert(binop->OperIs(ScevOper::Add)); |
| 623 | + |
| 624 | + if (binop->Op1->OperIs(ScevOper::Add)) |
| 625 | + { |
| 626 | + ExtractAddOperands(static_cast<ScevBinop*>(binop->Op1), operands); |
| 627 | + } |
| 628 | + else |
| 629 | + { |
| 630 | + operands.Push(binop->Op1); |
| 631 | + } |
| 632 | + |
| 633 | + if (binop->Op2->OperIs(ScevOper::Add)) |
| 634 | + { |
| 635 | + ExtractAddOperands(static_cast<ScevBinop*>(binop->Op2), operands); |
| 636 | + } |
| 637 | + else |
| 638 | + { |
| 639 | + operands.Push(binop->Op2); |
| 640 | + } |
| 641 | +} |
| 642 | + |
| 643 | +//------------------------------------------------------------------------ |
| 644 | +// MakeAddRecFromRecursiveScev: Given a recursive SCEV and a symbolic SCEV |
| 645 | +// whose appearances represent an occurrence of the full SCEV, create a |
| 646 | +// non-recursive add-rec from it. |
| 647 | +// |
| 648 | +// Parameters: |
| 649 | +// startScev - The start value of the addrec |
| 650 | +// scev - The scev |
| 651 | +// recursiveScev - A symbolic node whose appearance represents the value of "scev" |
| 652 | +// |
| 653 | +// Returns: |
| 654 | +// A non-recursive addrec |
| 655 | +// |
| 656 | +Scev* ScalarEvolutionContext::MakeAddRecFromRecursiveScev(Scev* startScev, Scev* scev, Scev* recursiveScev) |
| 657 | +{ |
| 658 | + if (!scev->OperIs(ScevOper::Add)) |
| 659 | + { |
| 660 | + return nullptr; |
| 661 | + } |
| 662 | + |
| 663 | + ArrayStack<Scev*> addOperands(m_comp->getAllocator(CMK_LoopIVOpts)); |
| 664 | + ExtractAddOperands(static_cast<ScevBinop*>(scev), addOperands); |
| 665 | + |
| 666 | + assert(addOperands.Height() >= 2); |
| 667 | + |
| 668 | + int numAppearances = 0; |
| 669 | + for (int i = 0; i < addOperands.Height(); i++) |
| 670 | + { |
| 671 | + Scev* addOperand = addOperands.Bottom(i); |
| 672 | + if (addOperand == recursiveScev) |
| 673 | + { |
| 674 | + numAppearances++; |
| 675 | + } |
| 676 | + else |
| 677 | + { |
| 678 | + ScevVisit result = addOperand->Visit([=](Scev* node) { |
| 679 | + if (node == recursiveScev) |
| 680 | + { |
| 681 | + return ScevVisit::Abort; |
| 682 | + } |
| 683 | + |
| 684 | + return ScevVisit::Continue; |
| 685 | + }); |
| 686 | + |
| 687 | + if (result == ScevVisit::Abort) |
| 688 | + { |
| 689 | + // We do not handle nested occurrences. Some of these may be representable, some won't. |
| 690 | + return nullptr; |
| 691 | + } |
| 692 | + } |
| 693 | + } |
| 694 | + |
| 695 | + if (numAppearances == 0) |
| 696 | + { |
| 697 | + // TODO-CQ: We currently cannot handle cases like |
| 698 | + // i = arr.Length; |
| 699 | + // j = i - 1; |
| 700 | + // i = j; |
| 701 | + // while (true) { ...; j = i - 1; i = j; } |
| 702 | + // |
| 703 | + // These cases can arise from loop structures like "for (int i = |
| 704 | + // arr.Length; --i >= 0;)" when Roslyn emits a "sub; dup; stloc" |
| 705 | + // sequence, and local prop + loop inversion converts the duplicated |
| 706 | + // local into a fully fledged IV. |
| 707 | + // In this case we see that i = <L, [i from outside loop], -1>, but for |
| 708 | + // j we will see <L, [i from outside loop], -1> + (-1) in this function |
| 709 | + // as the value coming around the backedge, and we cannot reconcile |
| 710 | + // this. |
| 711 | + // |
| 712 | + return nullptr; |
| 713 | + } |
| 714 | + |
| 715 | + if (numAppearances > 1) |
| 716 | + { |
| 717 | + // Multiple occurrences -- cannot be represented as an addrec |
| 718 | + // (corresponds to a geometric progression). |
| 719 | + return nullptr; |
| 720 | + } |
| 721 | + |
| 722 | + Scev* step = nullptr; |
| 723 | + for (int i = 0; i < addOperands.Height(); i++) |
| 724 | + { |
| 725 | + Scev* addOperand = addOperands.Bottom(i); |
| 726 | + if (addOperand == recursiveScev) |
| 727 | + { |
| 728 | + continue; |
| 729 | + } |
| 730 | + |
| 731 | + if (step == nullptr) |
| 732 | + { |
| 733 | + step = addOperand; |
| 734 | + } |
| 735 | + else |
| 736 | + { |
| 737 | + step = NewBinop(ScevOper::Add, step, addOperand); |
| 738 | + } |
| 739 | + } |
| 740 | + |
| 741 | + return NewAddRec(startScev, step); |
| 742 | +} |
| 743 | + |
614 | 744 | //------------------------------------------------------------------------ |
615 | 745 | // Analyze: Analyze the specified tree in the specified block. |
616 | 746 | // |
@@ -653,15 +783,23 @@ const int SCALAR_EVOLUTION_ANALYSIS_MAX_DEPTH = 64; |
653 | 783 | Scev* ScalarEvolutionContext::Analyze(BasicBlock* block, GenTree* tree, int depth) |
654 | 784 | { |
655 | 785 | Scev* result; |
656 | | - if (!m_cache.Lookup(tree, &result)) |
| 786 | + if (!m_cache.Lookup(tree, &result) && (!m_usingEphemeralCache || !m_ephemeralCache.Lookup(tree, &result))) |
657 | 787 | { |
658 | 788 | if (depth >= SCALAR_EVOLUTION_ANALYSIS_MAX_DEPTH) |
659 | 789 | { |
660 | 790 | return nullptr; |
661 | 791 | } |
662 | 792 |
|
663 | 793 | result = AnalyzeNew(block, tree, depth); |
664 | | - m_cache.Set(tree, result); |
| 794 | + |
| 795 | + if (m_usingEphemeralCache) |
| 796 | + { |
| 797 | + m_ephemeralCache.Set(tree, result, ScalarEvolutionMap::Overwrite); |
| 798 | + } |
| 799 | + else |
| 800 | + { |
| 801 | + m_cache.Set(tree, result); |
| 802 | + } |
665 | 803 | } |
666 | 804 |
|
667 | 805 | return result; |
|
0 commit comments