JIT: build pred lists first (#81448)
authorAndy Ayers <andya@microsoft.com>
Thu, 2 Feb 2023 00:33:29 +0000 (16:33 -0800)
committerGitHub <noreply@github.com>
Thu, 2 Feb 2023 00:33:29 +0000 (16:33 -0800)
Move pred list building before importation. It now runs as the first phase in
the phase list.

* Split up some unions in block.h as some things can't share storage anymore
(may revisit this later).
* Modify importer not to rely on cheap preds. Most of the work here is in
`impImportLeave`.
* Adjust OSR protection strategy for the method entry. In particular, watch
for the degenerate case where the OSR entry is the method entry.
* Ensure we don't double-decrement some ref counts when blocks with degenerate
or folded flow are re-imported.
* Update spill clique discovery to use the actual pred lists.
* Add new method `impFixPredLists` for the importer to use at the end of
importation. This adds pred list entries finally returns, which can't be
done until all `BBJ_LEAVE` blocks are processed.
* trigger badcode inside `fgComputePreds`
* fix issue with `STRESS_CATCH_ARG`

Contributes to #80193.

src/coreclr/jit/block.h
src/coreclr/jit/compiler.cpp
src/coreclr/jit/compiler.h
src/coreclr/jit/fgbasic.cpp
src/coreclr/jit/fgdiagnostic.cpp
src/coreclr/jit/fgflow.cpp
src/coreclr/jit/flowgraph.cpp
src/coreclr/jit/importer.cpp
src/coreclr/jit/importercalls.cpp
src/coreclr/jit/jiteh.cpp
src/coreclr/jit/morph.cpp

index ad33295..b27e122 100644 (file)
@@ -902,10 +902,7 @@ struct BasicBlock : private LIR::Range
         m_firstNode = tree;
     }
 
-    union {
-        EntryState* bbEntryState; // verifier tracked state of all entries in stack.
-        flowList*   bbLastPred;   // last pred list entry
-    };
+    EntryState* bbEntryState; // verifier tracked state of all entries in stack.
 
 #define NO_BASE_TMP UINT_MAX // base# to use when we have none
 
@@ -1091,10 +1088,14 @@ struct BasicBlock : private LIR::Range
     BlockSet bbReach; // Set of all blocks that can reach this one
 
     union {
-        BasicBlock* bbIDom;      // Represent the closest dominator to this block (called the Immediate
-                                 // Dominator) used to compute the dominance tree.
-        void* bbSparseProbeList; // Used early on by fgInstrument
+        BasicBlock* bbIDom;   // Represent the closest dominator to this block (called the Immediate
+                              // Dominator) used to compute the dominance tree.
+        flowList* bbLastPred; // Used early on by fgComputePreds
+    };
+
+    union {
         void* bbSparseCountInfo; // Used early on by fgIncorporateEdgeCounts
+        void* bbSparseProbeList; // Used early on by fgInstrument
     };
 
     unsigned bbPostOrderNum; // the block's post order number in the graph.
index 917fbcf..6e6468a 100644 (file)
@@ -4347,6 +4347,17 @@ void Compiler::EndPhase(Phases phase)
 //
 void Compiler::compCompile(void** methodCodePtr, uint32_t* methodCodeSize, JitFlags* compileFlags)
 {
+    compFunctionTraceStart();
+
+    // Compute bbRefs and bbPreds
+    //
+    auto computePredsPhase = [this]() {
+        fgComputePreds();
+        // Enable flow graph checks
+        activePhaseChecks |= PhaseChecks::CHECK_FG;
+    };
+    DoPhase(this, PHASE_COMPUTE_PREDS, computePredsPhase);
+
     // Prepare for importation
     //
     auto preImportPhase = [this]() {
@@ -4375,8 +4386,6 @@ void Compiler::compCompile(void** methodCodePtr, uint32_t* methodCodeSize, JitFl
     };
     DoPhase(this, PHASE_PRE_IMPORT, preImportPhase);
 
-    compFunctionTraceStart();
-
     // Incorporate profile data.
     //
     // Note: the importer is sensitive to block weights, so this has
@@ -4401,8 +4410,8 @@ void Compiler::compCompile(void** methodCodePtr, uint32_t* methodCodeSize, JitFl
 
     // Enable the post-phase checks that use internal logic to decide when checking makes sense.
     //
-    activePhaseChecks = PhaseChecks::CHECK_EH | PhaseChecks::CHECK_LOOPS | PhaseChecks::CHECK_UNIQUE |
-                        PhaseChecks::CHECK_PROFILE | PhaseChecks::CHECK_LINKED_LOCALS;
+    activePhaseChecks |= PhaseChecks::CHECK_EH | PhaseChecks::CHECK_LOOPS | PhaseChecks::CHECK_UNIQUE |
+                         PhaseChecks::CHECK_PROFILE | PhaseChecks::CHECK_LINKED_LOCALS;
 
     // Import: convert the instrs in each basic block to a tree based intermediate representation
     //
@@ -4425,23 +4434,6 @@ void Compiler::compCompile(void** methodCodePtr, uint32_t* methodCodeSize, JitFl
         return;
     }
 
-    // Compute bbNum, bbRefs and bbPreds
-    //
-    // This is the first time full (not cheap) preds will be computed.
-    // And, if we have profile data, we can now check integrity.
-    //
-    // From this point on the flowgraph information such as bbNum,
-    // bbRefs or bbPreds has to be kept updated.
-    //
-    auto computePredsPhase = [this]() {
-        JITDUMP("\nRenumbering the basic blocks for fgComputePred\n");
-        fgRenumberBlocks();
-        fgComputePreds();
-        // Enable flow graph checks
-        activePhaseChecks |= PhaseChecks::CHECK_FG;
-    };
-    DoPhase(this, PHASE_COMPUTE_PREDS, computePredsPhase);
-
     // If instrumenting, add block and class probes.
     //
     if (compileFlags->IsSet(JitFlags::JIT_FLAG_BBINSTR))
index 0bfe717..2632ab5 100644 (file)
@@ -3687,6 +3687,7 @@ private:
 public:
     void impInit();
     void impImport();
+    void impFixPredLists();
 
     CORINFO_CLASS_HANDLE impGetRefAnyClass();
     CORINFO_CLASS_HANDLE impGetRuntimeArgumentHandle();
@@ -4397,6 +4398,7 @@ public:
     DomTreeNode* fgSsaDomTree;
 
     bool fgBBVarSetsInited;
+    bool fgOSROriginalEntryBBProtected;
 
     // Allocate array like T* a = new T[fgBBNumMax + 1];
     // Using helper so we don't keep forgetting +1.
index dff9583..60a4b6f 100644 (file)
@@ -46,11 +46,12 @@ void Compiler::fgInit()
 
     /* Initialize the basic block list */
 
-    fgFirstBB        = nullptr;
-    fgLastBB         = nullptr;
-    fgFirstColdBlock = nullptr;
-    fgEntryBB        = nullptr;
-    fgOSREntryBB     = nullptr;
+    fgFirstBB                     = nullptr;
+    fgLastBB                      = nullptr;
+    fgFirstColdBlock              = nullptr;
+    fgEntryBB                     = nullptr;
+    fgOSREntryBB                  = nullptr;
+    fgOSROriginalEntryBBProtected = false;
 
 #if defined(FEATURE_EH_FUNCLETS)
     fgFirstFuncletBB  = nullptr;
@@ -3922,6 +3923,10 @@ void Compiler::fgCheckForLoopsInHandlers()
 //    the middle of the try. But we defer that until after importation.
 //    See fgPostImportationCleanup.
 //
+//    Also protect the original method entry, if it was imported, since
+//    we may decide to branch there during morph as part of the tail recursion
+//    to loop optimization.
+//
 void Compiler::fgFixEntryFlowForOSR()
 {
     // Ensure lookup IL->BB lookup table is valid
@@ -3944,6 +3949,8 @@ void Compiler::fgFixEntryFlowForOSR()
     // Now branch from method start to the right spot.
     //
     fgEnsureFirstBBisScratch();
+    assert(fgFirstBB->bbJumpKind == BBJ_NONE);
+    fgRemoveRefPred(fgFirstBB->bbNext, fgFirstBB);
     fgFirstBB->bbJumpKind = BBJ_ALWAYS;
     fgFirstBB->bbJumpDest = osrEntry;
     fgAddRefPred(osrEntry, fgFirstBB);
index 7022e3c..3773fe5 100644 (file)
@@ -785,7 +785,7 @@ bool Compiler::fgDumpFlowGraph(Phases phase, PhasePosition pos)
         return false;
     }
 
-    JITDUMP("Dumping flow graph %s phase %s\n", (pos == PhasePosition::PrePhase) ? "before" : "after",
+    JITDUMP("Writing out flow graph %s phase %s\n", (pos == PhasePosition::PrePhase) ? "before" : "after",
             PhaseNames[phase]);
 
     bool        validWeights  = fgHaveValidEdgeWeights;
@@ -2664,8 +2664,8 @@ bool BBPredsChecker::CheckJump(BasicBlock* blockPred, BasicBlock* block)
             break;
 
         case BBJ_LEAVE:
-            // We may see BBJ_LEAVE preds in unimported blocks if we haven't done cleanup yet.
-            if (!comp->compPostImportationCleanupDone && ((blockPred->bbFlags & BBF_IMPORTED) == 0))
+            // We may see BBJ_LEAVE preds if we haven't done cleanup yet.
+            if (!comp->compPostImportationCleanupDone)
             {
                 return true;
             }
@@ -2907,7 +2907,7 @@ void Compiler::fgDebugCheckBBlist(bool checkBBNum /* = false */, bool checkBBRef
 
         // Under OSR, if we also are keeping the original method entry around,
         // mark that as implicitly referenced as well.
-        if (opts.IsOSR() && (block == fgEntryBB) && ((block->bbFlags & BBF_IMPORTED) != 0))
+        if (opts.IsOSR() && (block == fgEntryBB) && fgOSROriginalEntryBBProtected)
         {
             blockRefs += 1;
         }
index 3592c54..6b67696 100644 (file)
@@ -693,14 +693,6 @@ void Compiler::fgComputePreds()
     // the first block is always reachable
     fgFirstBB->bbRefs = 1;
 
-    // Under OSR, we may need to specially protect the original method entry.
-    //
-    if (opts.IsOSR() && (fgEntryBB != nullptr) && (fgEntryBB->bbFlags & BBF_IMPORTED))
-    {
-        JITDUMP("OSR: protecting original method entry " FMT_BB "\n", fgEntryBB->bbNum);
-        fgEntryBB->bbRefs = 1;
-    }
-
     for (BasicBlock* const block : Blocks())
     {
         switch (block->bbJumpKind)
@@ -760,7 +752,7 @@ void Compiler::fgComputePreds()
 
                 if (!block->hasHndIndex())
                 {
-                    NO_WAY("endfinally outside a finally/fault block.");
+                    BADCODE("endfinally outside a finally/fault block.");
                 }
 
                 unsigned  hndIndex = block->getHndIndex();
@@ -768,7 +760,7 @@ void Compiler::fgComputePreds()
 
                 if (!ehDsc->HasFinallyOrFaultHandler())
                 {
-                    NO_WAY("endfinally outside a finally/fault block.");
+                    BADCODE("endfinally outside a finally/fault block.");
                 }
 
                 if (ehDsc->HasFinallyHandler())
index 40c72bf..f5e84d5 100644 (file)
@@ -616,14 +616,6 @@ PhaseStatus Compiler::fgImport()
         compInlineResult->SetImportedILSize(info.compILImportSize);
     }
 
-    // Full preds are only used later on
-    assert(!fgComputePredsDone);
-    if (fgCheapPredsValid)
-    {
-        // Cheap predecessors are only used during importation
-        fgRemovePreds();
-    }
-
     return PhaseStatus::MODIFIED_EVERYTHING;
 }
 
index c691c64..34db37c 100644 (file)
@@ -2198,15 +2198,21 @@ void Compiler::impSpillLclRefs(unsigned lclNum, unsigned chkLevel)
     }
 }
 
-/*****************************************************************************
- *
- *  Push catch arg onto the stack.
- *  If there are jumps to the beginning of the handler, insert basic block
- *  and spill catch arg to a temp. Update the handler block if necessary.
- *
- *  Returns the basic block of the actual handler.
- */
-
+//------------------------------------------------------------------------
+// impPushCatchArgOnStack: Push catch arg onto the stack.
+//
+// Arguments:
+//   hndBlk - first block of the catch handler
+//   clsHnd - type being caught
+//   isSingleBlockFilter - true if catch has single block filtger
+//
+// Returns:
+//   the basic block of the actual handler.
+//
+// Notes:
+//  If there are jumps to the beginning of the handler, insert basic block
+//  and spill catch arg to a temp. Update the handler block if necessary.
+//
 BasicBlock* Compiler::impPushCatchArgOnStack(BasicBlock* hndBlk, CORINFO_CLASS_HANDLE clsHnd, bool isSingleBlockFilter)
 {
     // Do not inject the basic block twice on reimport. This should be
@@ -2251,22 +2257,21 @@ BasicBlock* Compiler::impPushCatchArgOnStack(BasicBlock* hndBlk, CORINFO_CLASS_H
     const bool forceInsertNewBlock = compStressCompile(STRESS_CATCH_ARG, 5);
 #endif // defined(JIT32_GCENCODER)
 
-    /* Spill GT_CATCH_ARG to a temp if there are jumps to the beginning of the handler */
-    if (hndBlk->bbRefs > 1 || forceInsertNewBlock)
+    // Spill GT_CATCH_ARG to a temp if there are jumps to the beginning of the handler.
+    //
+    // For typical normal handlers we expect ref count to be 2 here (one artificial, one for
+    // the edge from the xxx...)
+    //
+    if ((hndBlk->bbRefs > 2) || forceInsertNewBlock)
     {
-        if (hndBlk->bbRefs == 1)
-        {
-            hndBlk->bbRefs++;
-        }
-
-        /* Create extra basic block for the spill */
+        // Create extra basic block for the spill
+        //
         BasicBlock* newBlk = fgNewBBbefore(BBJ_NONE, hndBlk, /* extendRegion */ true);
         newBlk->bbFlags |= BBF_IMPORTED | BBF_DONT_REMOVE;
         newBlk->inheritWeight(hndBlk);
         newBlk->bbCodeOffs = hndBlk->bbCodeOffs;
 
-        /* Account for the new link we are about to create */
-        hndBlk->bbRefs++;
+        fgAddRefPred(hndBlk, newBlk);
 
         // Spill into a temp.
         unsigned tempNum         = lvaGrabTemp(false DEBUGARG("SpillCatchArg"));
@@ -4632,18 +4637,25 @@ GenTree* Compiler::impFixupStructReturnType(GenTree* op)
     return op;
 }
 
-/*****************************************************************************
-   CEE_LEAVE may be jumping out of a protected block, viz, a catch or a
-   finally-protected try. We find the finally blocks protecting the current
-   offset (in order) by walking over the complete exception table and
-   finding enclosing clauses. This assumes that the table is sorted.
-   This will create a series of BBJ_CALLFINALLY -> BBJ_CALLFINALLY ... -> BBJ_ALWAYS.
-
-   If we are leaving a catch handler, we need to attach the
-   CPX_ENDCATCHes to the correct BBJ_CALLFINALLY blocks.
-
-   After this function, the BBJ_LEAVE block has been converted to a different type.
- */
+//------------------------------------------------------------------------
+// impImportLeave: canonicalize flow when leaving a protected region
+//
+// Arguments:
+//   block - block with BBJ_LEAVE jump kind to canonicalize
+//
+// Notes:
+//
+//   CEE_LEAVE may be jumping out of a protected block, viz, a catch or a
+//   finally-protected try. We find the finally blocks protecting the current
+//   offset (in order) by walking over the complete exception table and
+//   finding enclosing clauses. This assumes that the table is sorted.
+//   This will create a series of BBJ_CALLFINALLY -> BBJ_CALLFINALLY ... -> BBJ_ALWAYS.
+//
+//   If we are leaving a catch handler, we need to attach the
+//   CPX_ENDCATCHes to the correct BBJ_CALLFINALLY blocks.
+//
+//   After this function, the BBJ_LEAVE block has been converted to a different type.
+//
 
 #if !defined(FEATURE_EH_FUNCLETS)
 
@@ -4658,10 +4670,9 @@ void Compiler::impImportLeave(BasicBlock* block)
     }
 #endif // DEBUG
 
-    bool        invalidatePreds = false; // If we create new blocks, invalidate the predecessor lists (if created)
-    unsigned    blkAddr         = block->bbCodeOffs;
-    BasicBlock* leaveTarget     = block->bbJumpDest;
-    unsigned    jmpAddr         = leaveTarget->bbCodeOffs;
+    unsigned const    blkAddr     = block->bbCodeOffs;
+    BasicBlock* const leaveTarget = block->bbJumpDest;
+    unsigned const    jmpAddr     = leaveTarget->bbCodeOffs;
 
     // LEAVE clears the stack, spill side effects, and set stack to 0
 
@@ -4703,9 +4714,13 @@ void Compiler::impImportLeave(BasicBlock* block)
 
             // Make a list of all the currently pending endCatches
             if (endCatches)
+            {
                 endCatches = gtNewOperNode(GT_COMMA, TYP_VOID, endCatches, endCatch);
+            }
             else
+            {
                 endCatches = endCatch;
+            }
 
 #ifdef DEBUG
             if (verbose)
@@ -4739,7 +4754,9 @@ void Compiler::impImportLeave(BasicBlock* block)
                 callBlock->bbJumpKind = BBJ_CALLFINALLY; // convert the BBJ_LEAVE to BBJ_CALLFINALLY
 
                 if (endCatches)
+                {
                     impAppendTree(endCatches, CHECK_SPILL_NONE, impCurStmtDI);
+                }
 
 #ifdef DEBUG
                 if (verbose)
@@ -4757,9 +4774,13 @@ void Compiler::impImportLeave(BasicBlock* block)
                 /* Calling the finally block */
                 callBlock = fgNewBBinRegion(BBJ_CALLFINALLY, XTnum + 1, 0, step);
                 assert(step->bbJumpKind == BBJ_ALWAYS);
+                if (step->bbJumpDest != nullptr)
+                {
+                    fgRemoveRefPred(step->bbJumpDest, step);
+                }
                 step->bbJumpDest = callBlock; // the previous call to a finally returns to this call (to the next
                                               // finally in the chain)
-                step->bbJumpDest->bbRefs++;
+                fgAddRefPred(callBlock, step);
 
                 /* The new block will inherit this block's weight */
                 callBlock->inheritWeight(block);
@@ -4805,14 +4826,18 @@ void Compiler::impImportLeave(BasicBlock* block)
             unsigned finallyNesting = compHndBBtab[XTnum].ebdHandlerNestingLevel;
             assert(finallyNesting <= compHndBBtabCount);
 
+            if (callBlock->bbJumpDest != nullptr)
+            {
+                fgRemoveRefPred(callBlock->bbJumpDest, callBlock);
+            }
             callBlock->bbJumpDest = HBtab->ebdHndBeg; // This callBlock will call the "finally" handler.
-            GenTree* endLFin      = new (this, GT_END_LFIN) GenTreeVal(GT_END_LFIN, TYP_VOID, finallyNesting);
-            endLFinStmt           = gtNewStmt(endLFin);
-            endCatches            = NULL;
+            fgAddRefPred(HBtab->ebdHndBeg, callBlock);
 
-            encFinallies++;
+            GenTree* endLFin = new (this, GT_END_LFIN) GenTreeVal(GT_END_LFIN, TYP_VOID, finallyNesting);
+            endLFinStmt      = gtNewStmt(endLFin);
+            endCatches       = NULL;
 
-            invalidatePreds = true;
+            encFinallies++;
         }
     }
 
@@ -4826,7 +4851,9 @@ void Compiler::impImportLeave(BasicBlock* block)
         block->bbJumpKind = BBJ_ALWAYS; // convert the BBJ_LEAVE to a BBJ_ALWAYS
 
         if (endCatches)
+        {
             impAppendTree(endCatches, CHECK_SPILL_NONE, impCurStmtDI);
+        }
 
 #ifdef DEBUG
         if (verbose)
@@ -4849,7 +4876,12 @@ void Compiler::impImportLeave(BasicBlock* block)
         // depending on which is the inner region.
         BasicBlock* finalStep = fgNewBBinRegion(BBJ_ALWAYS, tryIndex, leaveTarget->bbHndIndex, step);
         finalStep->bbFlags |= BBF_KEEP_BBJ_ALWAYS;
+        if (step->bbJumpDest != nullptr)
+        {
+            fgRemoveRefPred(step->bbJumpDest, step);
+        }
         step->bbJumpDest = finalStep;
+        fgAddRefPred(finalStep, step);
 
         /* The new block will inherit this block's weight */
         finalStep->inheritWeight(block);
@@ -4878,18 +4910,11 @@ void Compiler::impImportLeave(BasicBlock* block)
         impEndTreeList(finalStep, endLFinStmt, lastStmt);
 
         finalStep->bbJumpDest = leaveTarget; // this is the ultimate destination of the LEAVE
+        fgAddRefPred(leaveTarget, finalStep);
 
         // Queue up the jump target for importing
 
         impImportBlockPending(leaveTarget);
-
-        invalidatePreds = true;
-    }
-
-    if (invalidatePreds && fgComputePredsDone)
-    {
-        JITDUMP("\n**** impImportLeave - Removing preds after creating new blocks\n");
-        fgRemovePreds();
     }
 
 #ifdef DEBUG
@@ -4918,10 +4943,9 @@ void Compiler::impImportLeave(BasicBlock* block)
     }
 #endif // DEBUG
 
-    bool        invalidatePreds = false; // If we create new blocks, invalidate the predecessor lists (if created)
-    unsigned    blkAddr         = block->bbCodeOffs;
-    BasicBlock* leaveTarget     = block->bbJumpDest;
-    unsigned    jmpAddr         = leaveTarget->bbCodeOffs;
+    unsigned    blkAddr     = block->bbCodeOffs;
+    BasicBlock* leaveTarget = block->bbJumpDest;
+    unsigned    jmpAddr     = leaveTarget->bbCodeOffs;
 
     // LEAVE clears the stack, spill side effects, and set stack to 0
 
@@ -5000,9 +5024,13 @@ void Compiler::impImportLeave(BasicBlock* block)
                 exitBlock = fgNewBBinRegion(BBJ_EHCATCHRET, 0, XTnum + 1, step);
 
                 assert(step->KindIs(BBJ_ALWAYS, BBJ_EHCATCHRET));
+                if (step->bbJumpDest != nullptr)
+                {
+                    fgRemoveRefPred(step->bbJumpDest, step);
+                }
                 step->bbJumpDest = exitBlock; // the previous step (maybe a call to a nested finally, or a nested catch
                                               // exit) returns to this block
-                step->bbJumpDest->bbRefs++;
+                fgAddRefPred(exitBlock, step);
 
 #if defined(TARGET_ARM)
                 if (stepType == ST_FinallyReturn)
@@ -5021,8 +5049,6 @@ void Compiler::impImportLeave(BasicBlock* block)
                 step     = exitBlock;
                 stepType = ST_Catch;
 
-                invalidatePreds = true;
-
 #ifdef DEBUG
                 if (verbose)
                 {
@@ -5055,8 +5081,9 @@ void Compiler::impImportLeave(BasicBlock* block)
                 // which might be in the middle of the "try". In most cases, the BBJ_ALWAYS will jump to the
                 // next block, and flow optimizations will remove it.
                 block->bbJumpKind = BBJ_ALWAYS;
+                fgRemoveRefPred(block->bbJumpDest, block);
                 block->bbJumpDest = callBlock;
-                block->bbJumpDest->bbRefs++;
+                fgAddRefPred(callBlock, block);
 
                 /* The new block will inherit this block's weight */
                 callBlock->inheritWeight(block);
@@ -5115,8 +5142,12 @@ void Compiler::impImportLeave(BasicBlock* block)
                     // Need to create another step block in the 'try' region that will actually branch to the
                     // call-to-finally thunk.
                     BasicBlock* step2 = fgNewBBinRegion(BBJ_ALWAYS, XTnum + 1, 0, step);
-                    step->bbJumpDest  = step2;
-                    step->bbJumpDest->bbRefs++;
+                    if (step->bbJumpDest != nullptr)
+                    {
+                        fgRemoveRefPred(step->bbJumpDest, step);
+                    }
+                    step->bbJumpDest = step2;
+                    fgAddRefPred(step2, step);
                     step2->inheritWeight(block);
                     step2->bbFlags |= (block->bbFlags & BBF_RUN_RARELY) | BBF_IMPORTED;
 
@@ -5144,10 +5175,14 @@ void Compiler::impImportLeave(BasicBlock* block)
                 unsigned callFinallyHndIndex = 0; // don't care
 #endif // !FEATURE_EH_CALLFINALLY_THUNKS
 
-                callBlock        = fgNewBBinRegion(BBJ_CALLFINALLY, callFinallyTryIndex, callFinallyHndIndex, step);
+                callBlock = fgNewBBinRegion(BBJ_CALLFINALLY, callFinallyTryIndex, callFinallyHndIndex, step);
+                if (step->bbJumpDest != nullptr)
+                {
+                    fgRemoveRefPred(step->bbJumpDest, step);
+                }
                 step->bbJumpDest = callBlock; // the previous call to a finally returns to this call (to the next
                                               // finally in the chain)
-                step->bbJumpDest->bbRefs++;
+                fgAddRefPred(callBlock, step);
 
 #if defined(TARGET_ARM)
                 if (stepType == ST_FinallyReturn)
@@ -5188,9 +5223,12 @@ void Compiler::impImportLeave(BasicBlock* block)
             }
 #endif
 
+            if (callBlock->bbJumpDest != nullptr)
+            {
+                fgRemoveRefPred(callBlock->bbJumpDest, callBlock);
+            }
             callBlock->bbJumpDest = HBtab->ebdHndBeg; // This callBlock will call the "finally" handler.
-
-            invalidatePreds = true;
+            fgAddRefPred(HBtab->ebdHndBeg, callBlock);
         }
         else if (HBtab->HasCatchHandler() && jitIsBetween(blkAddr, tryBeg, tryEnd) &&
                  !jitIsBetween(jmpAddr, tryBeg, tryEnd))
@@ -5250,9 +5288,14 @@ void Compiler::impImportLeave(BasicBlock* block)
                 }
 
                 /* Create a new exit block in the try region for the existing step block to jump to in this scope */
-                catchStep        = fgNewBBinRegion(BBJ_ALWAYS, XTnum + 1, 0, step);
+                catchStep = fgNewBBinRegion(BBJ_ALWAYS, XTnum + 1, 0, step);
+
+                if (step->bbJumpDest != nullptr)
+                {
+                    fgRemoveRefPred(step->bbJumpDest, step);
+                }
                 step->bbJumpDest = catchStep;
-                step->bbJumpDest->bbRefs++;
+                fgAddRefPred(catchStep, step);
 
 #if defined(TARGET_ARM)
                 if (stepType == ST_FinallyReturn)
@@ -5288,8 +5331,6 @@ void Compiler::impImportLeave(BasicBlock* block)
                 /* This block is the new step */
                 step     = catchStep;
                 stepType = ST_Try;
-
-                invalidatePreds = true;
             }
         }
     }
@@ -5309,7 +5350,12 @@ void Compiler::impImportLeave(BasicBlock* block)
     }
     else
     {
+        if (step->bbJumpDest != nullptr)
+        {
+            fgRemoveRefPred(step->bbJumpDest, step);
+        }
         step->bbJumpDest = leaveTarget; // this is the ultimate destination of the LEAVE
+        fgAddRefPred(leaveTarget, step);
 
 #if defined(TARGET_ARM)
         if (stepType == ST_FinallyReturn)
@@ -5332,12 +5378,6 @@ void Compiler::impImportLeave(BasicBlock* block)
         impImportBlockPending(leaveTarget);
     }
 
-    if (invalidatePreds && fgComputePredsDone)
-    {
-        JITDUMP("\n**** impImportLeave - Removing preds after creating new blocks\n");
-        fgRemovePreds();
-    }
-
 #ifdef DEBUG
     fgVerifyHandlerTab();
 
@@ -5386,6 +5426,7 @@ void Compiler::impResetLeaveBlock(BasicBlock* block, unsigned jmpAddr)
         BasicBlock* dupBlock = bbNewBasicBlock(block->bbJumpKind);
         dupBlock->bbFlags    = block->bbFlags;
         dupBlock->bbJumpDest = block->bbJumpDest;
+        fgAddRefPred(dupBlock->bbJumpDest, dupBlock);
         dupBlock->copyEHRegion(block);
         dupBlock->bbCatchTyp = block->bbCatchTyp;
 
@@ -5414,7 +5455,10 @@ void Compiler::impResetLeaveBlock(BasicBlock* block, unsigned jmpAddr)
 
     block->bbJumpKind = BBJ_LEAVE;
     fgInitBBLookup();
+
+    fgRemoveRefPred(block->bbJumpDest, block);
     block->bbJumpDest = fgLookupBB(jmpAddr);
+    fgAddRefPred(block->bbJumpDest, block);
 
     // We will leave the BBJ_ALWAYS block we introduced. When it's reimported
     // the BBJ_ALWAYS block will be unreachable, and will be removed after. The
@@ -6321,6 +6365,13 @@ void Compiler::impImportBlockCode(BasicBlock* block)
             block->bbFlags |= BBF_PARTIAL_COMPILATION_PATCHPOINT;
             setMethodHasPartialCompilationPatchpoint();
 
+            // Block will no longer flow to any of its successors.
+            //
+            for (BasicBlock* const succ : block->Succs())
+            {
+                fgRemoveRefPred(succ, block);
+            }
+
             // Change block to BBJ_THROW so we won't trigger importation of successors.
             //
             block->bbJumpKind = BBJ_THROW;
@@ -7626,7 +7677,19 @@ void Compiler::impImportBlockCode(BasicBlock* block)
 
                 if (opts.OptimizationEnabled() && (block->bbJumpDest == block->bbNext))
                 {
-                    block->bbJumpKind = BBJ_NONE;
+                    // We may have already modified `block`'s jump kind, if this is a re-importation.
+                    //
+                    if (block->bbJumpKind == BBJ_COND)
+                    {
+                        JITDUMP(FMT_BB " both branches and falls through to " FMT_BB ", changing to BBJ_NONE\n",
+                                block->bbNum, block->bbNext->bbNum);
+                        fgRemoveRefPred(block->bbJumpDest, block);
+                        block->bbJumpKind = BBJ_NONE;
+                    }
+                    else
+                    {
+                        assert(block->bbJumpKind == BBJ_NONE);
+                    }
 
                     if (op1->gtFlags & GTF_GLOB_EFFECT)
                     {
@@ -7681,21 +7744,22 @@ void Compiler::impImportBlockCode(BasicBlock* block)
                               || (block->bbJumpKind == foldedJumpKind)); // this can happen if we are reimporting the
                                                                          // block for the second time
 
-                    block->bbJumpKind = foldedJumpKind;
-#ifdef DEBUG
-                    if (verbose)
+                    if (block->bbJumpKind == BBJ_COND)
                     {
-                        if (op1->AsIntCon()->gtIconVal)
+                        if (foldedJumpKind == BBJ_NONE)
                         {
-                            printf("\nThe conditional jump becomes an unconditional jump to " FMT_BB "\n",
-                                   block->bbJumpDest->bbNum);
+                            JITDUMP("\nThe block falls through into the next " FMT_BB "\n", block->bbNext->bbNum);
+                            fgRemoveRefPred(block->bbJumpDest, block);
                         }
                         else
                         {
-                            printf("\nThe block falls through into the next " FMT_BB "\n", block->bbNext->bbNum);
+                            JITDUMP("\nThe conditional jump becomes an unconditional jump to " FMT_BB "\n",
+                                    block->bbJumpDest->bbNum);
+                            fgRemoveRefPred(block->bbNext, block);
                         }
+                        block->bbJumpKind = foldedJumpKind;
                     }
-#endif
+
                     break;
                 }
 
@@ -7859,7 +7923,19 @@ void Compiler::impImportBlockCode(BasicBlock* block)
 
                 if (opts.OptimizationEnabled() && (block->bbJumpDest == block->bbNext))
                 {
-                    block->bbJumpKind = BBJ_NONE;
+                    // We may have already modified `block`'s jump kind, if this is a re-importation.
+                    //
+                    if (block->bbJumpKind == BBJ_COND)
+                    {
+                        JITDUMP(FMT_BB " both branches and falls through to " FMT_BB ", changing to BBJ_NONE\n",
+                                block->bbNum, block->bbNext->bbNum);
+                        fgRemoveRefPred(block->bbJumpDest, block);
+                        block->bbJumpKind = BBJ_NONE;
+                    }
+                    else
+                    {
+                        assert(block->bbJumpKind == BBJ_NONE);
+                    }
 
                     if (op1->gtFlags & GTF_GLOB_EFFECT)
                     {
@@ -11944,21 +12020,22 @@ SPILLSTACK:
 #pragma warning(pop)
 #endif
 
-/*****************************************************************************/
+//------------------------------------------------------------------------
+// impImportBlockPending: ensure that block will be imported
+//
+// Arguments:
+//    block - block that should be imported.
+//
+// Notes:
+//   Ensures that "block" is a member of the list of BBs waiting to be imported, pushing it on the list if
+//   necessary (and ensures that it is a member of the set of BB's on the list, by setting its byte in
+//   impPendingBlockMembers).  Does *NOT* change the existing "pre-state" of the block.
+//
+//   Merges the current verification state into the verification state of "block" (its "pre-state")./
 //
-// Ensures that "block" is a member of the list of BBs waiting to be imported, pushing it on the list if
-// necessary (and ensures that it is a member of the set of BB's on the list, by setting its byte in
-// impPendingBlockMembers).  Merges the current verification state into the verification state of "block"
-// (its "pre-state").
-
 void Compiler::impImportBlockPending(BasicBlock* block)
 {
-#ifdef DEBUG
-    if (verbose)
-    {
-        printf("\nimpImportBlockPending for " FMT_BB "\n", block->bbNum);
-    }
-#endif
+    JITDUMP("\nimpImportBlockPending for " FMT_BB "\n", block->bbNum);
 
     // We will add a block to the pending set if it has not already been imported (or needs to be re-imported),
     // or if it has, but merging in a predecessor's post-state changes the block's pre-state.
@@ -12150,12 +12227,6 @@ void Compiler::impWalkSpillCliqueFromPred(BasicBlock* block, SpillCliqueWalker*
 {
     bool toDo = true;
 
-    noway_assert(!fgComputePredsDone);
-    if (!fgCheapPredsValid)
-    {
-        fgComputeCheapPreds();
-    }
-
     BlockListNode* succCliqueToDo = nullptr;
     BlockListNode* predCliqueToDo = new (this) BlockListNode(block);
     while (toDo)
@@ -12190,9 +12261,8 @@ void Compiler::impWalkSpillCliqueFromPred(BasicBlock* block, SpillCliqueWalker*
             BasicBlock* blk     = node->m_blk;
             FreeBlockListNode(node);
 
-            for (BasicBlockList* pred = blk->bbCheapPreds; pred != nullptr; pred = pred->next)
+            for (BasicBlock* predBlock : blk->PredBlocks())
             {
-                BasicBlock* predBlock = pred->block;
                 // If it's not already in the clique, add it, and also add it
                 // as a member of the predecessor "toDo" set.
                 if (impSpillCliqueGetMember(SpillCliquePred, predBlock) == 0)
@@ -12458,22 +12528,20 @@ void Compiler::impSpillCliqueSetMember(SpillCliqueDir predOrSucc, BasicBlock* bl
     }
 }
 
-/*****************************************************************************
- *
- *  Convert the instrs ("import") into our internal format (trees). The
- *  basic flowgraph has already been constructed and is passed in.
- */
-
+//------------------------------------------------------------------------
+// impImport: convert IL into jit IR
+//
+// Notes:
+//
+// The basic flowgraph has already been constructed. Blocks are filled in
+// by the importer as they are discovered to be reachable.
+//
+// Blocks may be added to provide the right structure for various EH
+// constructs (notably LEAVEs from catches and finallies).
+//
 void Compiler::impImport()
 {
-#ifdef DEBUG
-    if (verbose)
-    {
-        printf("*************** In impImport() for %s\n", info.compFullName);
-    }
-#endif
-
-    Compiler* inlineRoot = impInlineRoot();
+    Compiler* const inlineRoot = impInlineRoot();
 
     if (info.compMaxStack <= SMALL_STACK_SIZE)
     {
@@ -12608,20 +12676,75 @@ void Compiler::impImport()
         }
     }
 
-#ifdef DEBUG
-    if (verbose && info.compXcptnsCount)
+    // If the method had EH, we may be missing some pred edges
+    // (notably those from BBJ_EHFINALLYRET blocks). Add them.
+    // Only needed for the root method, since inlinees can't have EH.
+    //
+    if (!compIsForInlining() && (info.compXcptnsCount > 0))
     {
-        printf("\nAfter impImport() added block for try,catch,finally");
-        fgDispBasicBlocks();
-        printf("\n");
+        impFixPredLists();
+        JITDUMP("\nAfter impImport() added blocks for try,catch,finally");
+        JITDUMPEXEC(fgDispBasicBlocks());
     }
+}
 
-    // Used in impImportBlockPending() for STRESS_CHK_REIMPORT
-    for (BasicBlock* const block : Blocks())
+//------------------------------------------------------------------------
+// impFixPredLists: add pred edges from finally returns to their continuations
+//
+// Notes:
+//   These edges were not added during the initial pred list computation,
+//   because the initial flow graph does not contain the callfinally/always
+//   block pairs; those blocks are added during importation.
+//
+//   We rely on handler blocks being lexically contiguous between begin and last.
+//
+void Compiler::impFixPredLists()
+{
+    unsigned XTnum = 0;
+    bool     added = false;
+
+    for (EHblkDsc *HBtab = compHndBBtab; XTnum < compHndBBtabCount; XTnum++, HBtab++)
     {
-        block->bbFlags &= ~BBF_VISITED;
+        if (HBtab->HasFinallyHandler())
+        {
+            BasicBlock* const finallyBegBlock  = HBtab->ebdHndBeg;
+            BasicBlock* const finallyLastBlock = HBtab->ebdHndLast;
+
+            for (BasicBlock* const finallyBlock : BasicBlockRangeList(finallyBegBlock, finallyLastBlock))
+            {
+                if (finallyBlock->getHndIndex() != XTnum)
+                {
+                    // Must be a nested handler... we could skip to its last
+                    //
+                    continue;
+                }
+
+                if (finallyBlock->bbJumpKind != BBJ_EHFINALLYRET)
+                {
+                    continue;
+                }
+
+                for (BasicBlock* const predBlock : finallyBegBlock->PredBlocks())
+                {
+                    // We only care about preds that are callfinallies.
+                    //
+                    if (!predBlock->isBBCallAlwaysPair())
+                    {
+                        continue;
+                    }
+
+                    BasicBlock* const continuation = predBlock->bbNext;
+                    fgAddRefPred(continuation, finallyBlock);
+
+                    if (!added)
+                    {
+                        JITDUMP("\nAdding pred edges from BBJ_EHFINALLYRET blocks\n");
+                        added = true;
+                    }
+                }
+            }
+        }
     }
-#endif
 }
 
 //------------------------------------------------------------------------
index 5445915..b2a2d46 100644 (file)
@@ -1304,14 +1304,24 @@ DONE:
             const bool         mustImportEntryBlock = gtIsRecursiveCall(methHnd) || actualCall->IsInlineCandidate() ||
                                               actualCall->IsGuardedDevirtualizationCandidate();
 
-            // Only schedule importation if we're not currently importing.
+            // Only schedule importation if we're not currently importing the entry BB.
             //
             if (opts.IsOSR() && mustImportEntryBlock && (compCurBB != fgEntryBB))
             {
-                JITDUMP("\ninlineable or recursive tail call [%06u] in the method, so scheduling " FMT_BB
+                JITDUMP("\nOSR: inlineable or recursive tail call [%06u] in the method, so scheduling " FMT_BB
                         " for importation\n",
                         dspTreeID(call), fgEntryBB->bbNum);
                 impImportBlockPending(fgEntryBB);
+
+                if (!fgOSROriginalEntryBBProtected && (fgEntryBB != fgFirstBB))
+                {
+                    // Protect fgEntryBB from deletion, since it may not have any
+                    // explicit flow references until morph.
+                    //
+                    fgEntryBB->bbRefs += 1;
+                    fgOSROriginalEntryBBProtected = true;
+                    JITDUMP("   also protecting original method entry " FMT_BB "\n", fgEntryBB->bbNum);
+                }
             }
         }
     }
index ce404c6..e5a7072 100644 (file)
@@ -4555,7 +4555,9 @@ void Compiler::fgExtendEHRegionBefore(BasicBlock* block)
                 }
 #endif // DEBUG
                 // Change the bbJumpDest for bFilterLast from the old first 'block' to the new first 'bPrev'
+                fgRemoveRefPred(bFilterLast->bbJumpDest, bFilterLast);
                 bFilterLast->bbJumpDest = bPrev;
+                fgAddRefPred(bPrev, bFilterLast);
             }
         }
 
index 3d97c19..77453e0 100644 (file)
@@ -13687,12 +13687,16 @@ void Compiler::fgMorphBlocks()
 
     // Under OSR, we no longer need to specially protect the original method entry
     //
-    if (opts.IsOSR() && (fgEntryBB != nullptr) && (fgEntryBB->bbFlags & BBF_IMPORTED))
+    if (opts.IsOSR() && (fgEntryBB != nullptr))
     {
-        JITDUMP("OSR: un-protecting original method entry " FMT_BB "\n", fgEntryBB->bbNum);
-        assert(fgEntryBB->bbRefs > 0);
-        fgEntryBB->bbRefs--;
-        // We don't need to remember this block anymore.
+        if (fgOSROriginalEntryBBProtected)
+        {
+            JITDUMP("OSR: un-protecting original method entry " FMT_BB "\n", fgEntryBB->bbNum);
+            assert(fgEntryBB->bbRefs > 0);
+            fgEntryBB->bbRefs--;
+        }
+
+        // And we don't need to remember this block anymore.
         fgEntryBB = nullptr;
     }