JIT: add concept of edge likelihood (#81738)
authorAndy Ayers <andya@microsoft.com>
Thu, 9 Feb 2023 02:41:40 +0000 (18:41 -0800)
committerGitHub <noreply@github.com>
Thu, 9 Feb 2023 02:41:40 +0000 (18:41 -0800)
Now that FlowEdges are created early and persist, decorate them with likelihood
information early, if we have edge-based PGO data.

We don't use the likelihood for anything yet, but I need to get it in circulation
so I can start working on refining both initial and subsequent consistency of
the data.

Also add a diagnostic checker for likelhood, and a way to enable it. All of
this is off by default.

src/coreclr/jit/block.h
src/coreclr/jit/compiler.cpp
src/coreclr/jit/fgbasic.cpp
src/coreclr/jit/fgdiagnostic.cpp
src/coreclr/jit/fgprofile.cpp
src/coreclr/jit/jitconfigvalues.h
src/coreclr/jit/optimizer.cpp

index 9e30168..a83823c 100644 (file)
@@ -1800,12 +1800,25 @@ private:
     weight_t m_edgeWeightMin;
     weight_t m_edgeWeightMax;
 
+    // Likelihood that m_sourceBlock transfers control along this edge.
+    // Values in range [0..1]
+    weight_t m_likelihood;
+
     // The count of duplicate "edges" (used for switch stmts or degenerate branches)
     unsigned m_dupCount;
 
+#ifdef DEBUG
+    bool m_likelihoodSet;
+#endif
+
 public:
     FlowEdge(BasicBlock* block, FlowEdge* rest)
-        : m_nextPredEdge(rest), m_sourceBlock(block), m_edgeWeightMin(0), m_edgeWeightMax(0), m_dupCount(0)
+        : m_nextPredEdge(rest)
+        , m_sourceBlock(block)
+        , m_edgeWeightMin(0)
+        , m_edgeWeightMax(0)
+        , m_likelihood(0)
+        , m_dupCount(0) DEBUGARG(m_likelihoodSet(false))
     {
     }
 
@@ -1852,6 +1865,32 @@ public:
     bool setEdgeWeightMaxChecked(weight_t newWeight, BasicBlock* bDst, weight_t slop, bool* wbUsedSlop);
     void setEdgeWeights(weight_t newMinWeight, weight_t newMaxWeight, BasicBlock* bDst);
 
+    weight_t getLikelihood() const
+    {
+        return m_likelihood;
+    }
+
+    void setLikelihood(weight_t likelihood)
+    {
+        assert(likelihood >= 0.0);
+        assert(likelihood <= 1.0);
+        INDEBUG(m_likelihoodSet = true);
+        m_likelihood = likelihood;
+    }
+
+#ifdef DEBUG
+    bool hasLikelihood() const
+    {
+        return m_likelihoodSet;
+    }
+#endif
+
+    weight_t getLikelyWeight() const
+    {
+        assert(m_likelihoodSet);
+        return m_likelihood * m_sourceBlock->bbWeight;
+    }
+
     unsigned getDupCount() const
     {
         return m_dupCount;
index 6a042eb..257da74 100644 (file)
@@ -4434,6 +4434,7 @@ void Compiler::compCompile(void** methodCodePtr, uint32_t* methodCodeSize, JitFl
     // Note: the importer is sensitive to block weights, so this has
     // to happen before importation.
     //
+    activePhaseChecks |= PhaseChecks::CHECK_PROFILE;
     DoPhase(this, PHASE_INCPROFILE, &Compiler::fgIncorporateProfileData);
 
     // If we're going to instrument code, we may need to prepare before
@@ -4453,8 +4454,8 @@ void Compiler::compCompile(void** methodCodePtr, uint32_t* methodCodeSize, JitFl
 
     // Enable the post-phase checks that use internal logic to decide when checking makes sense.
     //
-    activePhaseChecks |= PhaseChecks::CHECK_EH | PhaseChecks::CHECK_LOOPS | PhaseChecks::CHECK_UNIQUE |
-                         PhaseChecks::CHECK_PROFILE | PhaseChecks::CHECK_LINKED_LOCALS;
+    activePhaseChecks |=
+        PhaseChecks::CHECK_EH | PhaseChecks::CHECK_LOOPS | PhaseChecks::CHECK_UNIQUE | PhaseChecks::CHECK_LINKED_LOCALS;
 
     // Import: convert the instrs in each basic block to a tree based intermediate representation
     //
index 54de2b5..17ece97 100644 (file)
@@ -488,7 +488,7 @@ void Compiler::fgReplaceSwitchJumpTarget(BasicBlock* blockSwitch, BasicBlock* ne
             //
             FlowEdge* const newEdge = fgAddRefPred(newTarget, blockSwitch);
 
-            // Now set the correct value of newEdge's lDupCount
+            // Now set the correct value of newEdge's DupCount
             // and replace any other jumps in jumpTab[] that go to oldTarget.
             //
             i++;
index 0919e21..d1ea965 100644 (file)
@@ -3635,7 +3635,7 @@ void Compiler::fgDebugCheckBlockLinks()
             {
                 // Create a set with all the successors. Don't use BlockSet, so we don't need to worry
                 // about the BlockSet epoch.
-                BitVecTraits bitVecTraits(fgBBNumMax + 1, this);
+                BitVecTraits bitVecTraits(impInlineRoot()->fgBBNumMax + 1, this);
                 BitVec       succBlocks(BitVecOps::MakeEmpty(&bitVecTraits));
                 for (BasicBlock* const bTarget : block->SwitchTargets())
                 {
index fb40c77..f411bb3 100644 (file)
@@ -732,6 +732,7 @@ public:
     {
         Unknown,
         PostdominatesSource,
+        Pseudo,
         DominatesTarget,
         CriticalEdge,
         Deleted,
@@ -869,7 +870,7 @@ void Compiler::WalkSpanningTree(SpanningTreeVisitor* visitor)
                 //
                 BasicBlock* const target = fgFirstBB;
                 assert(BlockSetOps::IsMember(comp, marked, target->bbNum));
-                visitor->VisitNonTreeEdge(block, target, SpanningTreeVisitor::EdgeKind::PostdominatesSource);
+                visitor->VisitNonTreeEdge(block, target, SpanningTreeVisitor::EdgeKind::Pseudo);
             }
             break;
 
@@ -925,7 +926,7 @@ void Compiler::WalkSpanningTree(SpanningTreeVisitor* visitor)
                     //
                     BasicBlock* const target = dsc->ebdHndBeg;
                     assert(BlockSetOps::IsMember(comp, marked, target->bbNum));
-                    visitor->VisitNonTreeEdge(block, target, SpanningTreeVisitor::EdgeKind::PostdominatesSource);
+                    visitor->VisitNonTreeEdge(block, target, SpanningTreeVisitor::EdgeKind::Pseudo);
                 }
             }
             break;
@@ -1266,6 +1267,7 @@ public:
         switch (kind)
         {
             case EdgeKind::PostdominatesSource:
+            case EdgeKind::Pseudo:
                 NewSourceProbe(source, target);
                 break;
             case EdgeKind::DominatesTarget:
@@ -2664,6 +2666,7 @@ private:
         Edge*       m_nextOutgoingEdge;
         Edge*       m_nextIncomingEdge;
         bool        m_weightKnown;
+        bool        m_isPseudoEdge;
 
         Edge(BasicBlock* source, BasicBlock* target)
             : m_weight(BB_ZERO_WEIGHT)
@@ -2672,6 +2675,7 @@ private:
             , m_nextOutgoingEdge(nullptr)
             , m_nextIncomingEdge(nullptr)
             , m_weightKnown(false)
+            , m_isPseudoEdge(false)
         {
         }
     };
@@ -2817,30 +2821,34 @@ public:
     {
         // We may have this edge in the schema, and so already added this edge to the map.
         //
-        // If not, assume we have a partial schema. We could add a zero count edge,
-        // but such edges don't impact the solving algorithm, so we can omit them.
-        //
         EdgeKey key(source, target);
         Edge*   edge = nullptr;
 
-        if (m_edgeKeyToEdgeMap.Lookup(key, &edge))
-        {
-            BlockInfo* const sourceInfo = BlockToInfo(source);
-            edge->m_nextOutgoingEdge    = sourceInfo->m_outgoingEdges;
-            sourceInfo->m_outgoingEdges = edge;
+        BlockInfo* const sourceInfo = BlockToInfo(source);
 
-            BlockInfo* const targetInfo = BlockToInfo(target);
-            edge->m_nextIncomingEdge    = targetInfo->m_incomingEdges;
-            targetInfo->m_incomingEdges = edge;
-        }
-        else
+        if (!m_edgeKeyToEdgeMap.Lookup(key, &edge))
         {
-            // Because the count is zero, we can just pretend this edge doesn't exist.
+            // If the edge is missing, assume it is zero.
             //
             JITDUMP("Schema is missing non-tree edge " FMT_BB " -> " FMT_BB ", will presume zero\n", source->bbNum,
                     target->bbNum);
+            edge = new (m_allocator) Edge(source, target);
+            m_edges++;
             m_zeroEdges++;
+
+            edge->m_weightKnown = true;
+            edge->m_weight      = 0;
         }
+
+        edge->m_nextOutgoingEdge    = sourceInfo->m_outgoingEdges;
+        sourceInfo->m_outgoingEdges = edge;
+
+        BlockInfo* const targetInfo = BlockToInfo(target);
+        edge->m_nextIncomingEdge    = targetInfo->m_incomingEdges;
+        targetInfo->m_incomingEdges = edge;
+
+        edge->m_isPseudoEdge = (kind == EdgeKind::Pseudo);
+        JITDUMP(" ... pseudo  edge " FMT_BB " -> " FMT_BB "\n", source->bbNum, target->bbNum);
     }
 };
 
@@ -2896,7 +2904,7 @@ void EfficientEdgeCountReconstructor::Prepare()
 
                 if (!m_keyToBlockMap.Lookup(schemaEntry.ILOffset, &sourceBlock))
                 {
-                    JITDUMP("Could not find source block for schema entry %d (IL offset/key %08x\n", iSchema,
+                    JITDUMP("Could not find source block for schema entry %d (IL offset/key %08x)\n", iSchema,
                             schemaEntry.ILOffset);
                 }
 
@@ -2904,7 +2912,7 @@ void EfficientEdgeCountReconstructor::Prepare()
 
                 if (!m_keyToBlockMap.Lookup(schemaEntry.Other, &targetBlock))
                 {
-                    JITDUMP("Could not find target block for schema entry %d (IL offset/key %08x\n", iSchema,
+                    JITDUMP("Could not find target block for schema entry %d (IL offset/key %08x)\n", iSchema,
                             schemaEntry.ILOffset);
                 }
 
@@ -3016,8 +3024,8 @@ void EfficientEdgeCountReconstructor::Solve()
     unsigned       nPasses = 0;
     unsigned const nLimit  = 10;
 
-    JITDUMP("\nSolver: %u blocks, %u unknown; %u edges, %u unknown, %u zero (and so ignored)\n", m_blocks,
-            m_unknownBlocks, m_edges, m_unknownEdges, m_zeroEdges);
+    JITDUMP("\nSolver: %u blocks, %u unknown; %u edges, %u unknown, %u zero\n", m_blocks, m_unknownBlocks, m_edges,
+            m_unknownEdges, m_zeroEdges);
 
     while ((m_unknownBlocks > 0) && (nPasses < nLimit))
     {
@@ -3280,19 +3288,157 @@ void EfficientEdgeCountReconstructor::Propagate()
         return;
     }
 
-    // Set weight on all blocks.
+    // Set weight on all blocks and edges.
     //
     for (BasicBlock* const block : m_comp->Blocks())
     {
         BlockInfo* const info = BlockToInfo(block);
         assert(info->m_weightKnown);
-
         m_comp->fgSetProfileWeight(block, info->m_weight);
 
         // Mark blocks that might be worth optimizing further, given
         // what we know about the PGO data.
         //
+        // TODO: defer until we've figured out edge likelihoods?
+        //
         MarkInterestingBlocks(block, info);
+
+        const unsigned nSucc = block->NumSucc(m_comp);
+        if (nSucc == 0)
+        {
+            // No edges to worry about.
+            //
+            continue;
+        }
+
+        // Else there is at least one FlowEdge.
+        //
+        // Check the reconstruction graph edges. If we have any pseudo-edges
+        // there should be only one pseudo-edge, and no regular edges.
+        //
+        Edge*    pseudoEdge = nullptr;
+        unsigned nEdges     = 0;
+
+        for (Edge* edge = info->m_outgoingEdges; edge != nullptr; edge = edge->m_nextOutgoingEdge)
+        {
+            if (edge->m_isPseudoEdge)
+            {
+                pseudoEdge = edge;
+                continue;
+            }
+
+            assert(pseudoEdge == nullptr);
+            nEdges++;
+        }
+
+        // If we found a pseudo edge there should be only one successor
+        // for block. The flow from block to successor will not represent
+        // real flow. We set likelihood anyways so we can assert later
+        // that all flow edges have known likelihood.
+        //
+        // Note the flowEdge target may not be the same as the pseudo edge target.
+        //
+        if (pseudoEdge != nullptr)
+        {
+            assert(nSucc == 1);
+            assert(block == pseudoEdge->m_sourceBlock);
+            assert(block->bbJumpDest != nullptr);
+            FlowEdge* const flowEdge = m_comp->fgGetPredForBlock(block->bbJumpDest, block);
+            assert(flowEdge != nullptr);
+            flowEdge->setLikelihood(1.0);
+            continue;
+        }
+
+        // We may not have have the same number of model edges and flow edges.
+        //
+        // This can happen because bome BBJ_LEAVE blocks may have been missed during
+        // our spanning tree walk since we don't know where all the finallies can return
+        // to just yet (specially, in WalkSpanningTree, we may not add the bbJumpDest of
+        // a BBJ_LEAVE to the worklist).
+        //
+        // Worst case those missed blocks dominate other blocks so we can't limit
+        // the screening here to specific BBJ kinds.
+        //
+        // Handle those specially by just assuming equally likely successors.
+        //
+        // Do likewise, if the block weight is zero, since examination of edge weights
+        // shouldn't tell us anything about edge likelihoods.
+        //
+        // (TODO: use synthesis here)
+        //
+        if ((nEdges != nSucc) || (info->m_weight == BB_ZERO_WEIGHT))
+        {
+            JITDUMP(FMT_BB " %s , setting outgoing likelihoods heuristically\n", block->bbNum,
+                    (nEdges != nSucc) ? "has inaccurate flow model" : "has zero weight");
+
+            weight_t equalLikelihood = 1.0 / nSucc;
+
+            for (BasicBlock* succ : block->Succs(m_comp))
+            {
+                FlowEdge* const flowEdge = m_comp->fgGetPredForBlock(succ, block);
+                JITDUMP("Setting likelihood of " FMT_BB " -> " FMT_BB " to " FMT_WT " (heur)\n", block->bbNum,
+                        succ->bbNum, equalLikelihood);
+                flowEdge->setLikelihood(equalLikelihood);
+            }
+            continue;
+        }
+
+        // Transfer model edge weight onto the FlowEdges as likelihoods.
+        //
+        assert(nEdges == nSucc);
+        weight_t totalLikelihood = 0;
+
+        for (Edge* edge = info->m_outgoingEdges; edge != nullptr; edge = edge->m_nextOutgoingEdge)
+        {
+            assert(block == edge->m_sourceBlock);
+            FlowEdge* const flowEdge = m_comp->fgGetPredForBlock(edge->m_targetBlock, block);
+            assert(flowEdge != nullptr);
+            assert(!flowEdge->hasLikelihood());
+            weight_t likelihood = 0;
+
+            if (nEdges == 1)
+            {
+                assert(nSucc == 1);
+
+                // Conceptually we could assert(edge->m_weight == info->m_weight);
+                // but we can have inconsistencies.
+                //
+                // Go with what we know for sure, edge should be 100% likely.
+                //
+                likelihood = 1.0;
+                JITDUMP("Setting likelihood of " FMT_BB " -> " FMT_BB " to " FMT_WT " (uniq)\n", block->bbNum,
+                        edge->m_targetBlock->bbNum, likelihood);
+                flowEdge->setLikelihood(likelihood);
+                totalLikelihood += likelihood;
+                break;
+            }
+
+            assert(info->m_weight != BB_ZERO_WEIGHT);
+
+            // We may see nonsensical weights here, cap likelihood.
+            //
+            bool capped = false;
+            if (edge->m_weight > info->m_weight)
+            {
+                capped     = true;
+                likelihood = 1.0;
+            }
+            else
+            {
+                likelihood = edge->m_weight / info->m_weight;
+            }
+            JITDUMP("Setting likelihood of " FMT_BB " -> " FMT_BB " to " FMT_WT " (%s)\n", block->bbNum,
+                    edge->m_targetBlock->bbNum, likelihood, capped ? "pgo -- capped" : "pgo");
+            flowEdge->setLikelihood(likelihood);
+            totalLikelihood += likelihood;
+        }
+
+        if (totalLikelihood != 1.0)
+        {
+            // Consider what to do here... flag this method as needing immediate profile repairs?
+            //
+            JITDUMP(FMT_BB "total outgoing likelihood inaccurate: " FMT_WT "\n", block->bbNum, totalLikelihood);
+        }
     }
 }
 
@@ -4473,9 +4619,15 @@ void Compiler::fgDebugCheckProfileWeights()
         return;
     }
 
-    // We can't check before we have computed edge weights.
+    // We can check classic (min/max, late computed) weights
+    //   and/or
+    // new likelyhood based weights.
     //
-    if (!fgEdgeWeightsComputed)
+    const bool verifyClassicWeights = fgEdgeWeightsComputed && (JitConfig.JitProfileChecks() & 0x1) == 0x1;
+    const bool verifyLikelyWeights  = (JitConfig.JitProfileChecks() & 0x2) == 0x2;
+    const bool assertOnFailure      = (JitConfig.JitProfileChecks() & 0x4) == 0x4;
+
+    if (!(verifyClassicWeights || verifyLikelyWeights))
     {
         return;
     }
@@ -4599,9 +4751,9 @@ void Compiler::fgDebugCheckProfileWeights()
         JITDUMP("Profile is NOT self-consistent, found %d problems (%d profiled blocks, %d unprofiled)\n",
                 problemBlocks, profiledBlocks, unprofiledBlocks);
 
-        if (JitConfig.JitProfileChecks() == 2)
+        if (assertOnFailure)
         {
-            assert(!"Inconsistent profile");
+            assert(!"Inconsistent profile data");
         }
     }
 }
@@ -4614,54 +4766,76 @@ void Compiler::fgDebugCheckProfileWeights()
 //   block - block to check
 //
 // Returns:
-//   true if counts consistent, false otherwise.
+//   true if counts consistent or checking disabled, false otherwise.
 //
 // Notes:
 //   Only useful to call on blocks with predecessors.
 //
 bool Compiler::fgDebugCheckIncomingProfileData(BasicBlock* block)
 {
-    weight_t const blockWeight       = block->bbWeight;
-    weight_t       incomingWeightMin = 0;
-    weight_t       incomingWeightMax = 0;
-    bool           foundPreds        = false;
+    const bool verifyClassicWeights = fgEdgeWeightsComputed && (JitConfig.JitProfileChecks() & 0x1) == 0x1;
+    const bool verifyLikelyWeights  = (JitConfig.JitProfileChecks() & 0x2) == 0x2;
+
+    if (!(verifyClassicWeights || verifyLikelyWeights))
+    {
+        return true;
+    }
+
+    weight_t const blockWeight          = block->bbWeight;
+    weight_t       incomingWeightMin    = 0;
+    weight_t       incomingWeightMax    = 0;
+    weight_t       incomingLikelyWeight = 0;
+    bool           foundPreds           = false;
 
     for (FlowEdge* const predEdge : block->PredEdges())
     {
         incomingWeightMin += predEdge->edgeWeightMin();
         incomingWeightMax += predEdge->edgeWeightMax();
+        incomingLikelyWeight += predEdge->getLikelyWeight();
         foundPreds = true;
     }
 
-    if (!foundPreds)
-    {
-        // Assume this is ok.
-        //
-        return true;
-    }
+    bool classicWeightsValid = true;
+    bool likelyWeightsValid  = true;
 
-    if (!fgProfileWeightsConsistent(incomingWeightMin, incomingWeightMax))
+    if (foundPreds)
     {
-        JITDUMP("  " FMT_BB " - incoming min " FMT_WT " inconsistent with incoming max " FMT_WT "\n", block->bbNum,
-                incomingWeightMin, incomingWeightMax);
-        return false;
-    }
+        if (verifyClassicWeights)
+        {
+            if (!fgProfileWeightsConsistent(incomingWeightMin, incomingWeightMax))
+            {
+                JITDUMP("  " FMT_BB " - incoming min " FMT_WT " inconsistent with incoming max " FMT_WT "\n",
+                        block->bbNum, incomingWeightMin, incomingWeightMax);
+                classicWeightsValid = false;
+            }
 
-    if (!fgProfileWeightsConsistent(blockWeight, incomingWeightMin))
-    {
-        JITDUMP("  " FMT_BB " - block weight " FMT_WT " inconsistent with incoming min " FMT_WT "\n", block->bbNum,
-                blockWeight, incomingWeightMin);
-        return false;
-    }
+            if (!fgProfileWeightsConsistent(blockWeight, incomingWeightMin))
+            {
+                JITDUMP("  " FMT_BB " - block weight " FMT_WT " inconsistent with incoming min " FMT_WT "\n",
+                        block->bbNum, blockWeight, incomingWeightMin);
+                classicWeightsValid = false;
+            }
 
-    if (!fgProfileWeightsConsistent(blockWeight, incomingWeightMax))
-    {
-        JITDUMP("  " FMT_BB " - block weight " FMT_WT " inconsistent with incoming max " FMT_WT "\n", block->bbNum,
-                blockWeight, incomingWeightMax);
-        return false;
+            if (!fgProfileWeightsConsistent(blockWeight, incomingWeightMax))
+            {
+                JITDUMP("  " FMT_BB " - block weight " FMT_WT " inconsistent with incoming max " FMT_WT "\n",
+                        block->bbNum, blockWeight, incomingWeightMax);
+                classicWeightsValid = false;
+            }
+        }
+
+        if (verifyLikelyWeights)
+        {
+            if (!fgProfileWeightsConsistent(blockWeight, incomingLikelyWeight))
+            {
+                JITDUMP("  " FMT_BB " - block weight " FMT_WT " inconsistent with incoming likely weight " FMT_WT "\n",
+                        block->bbNum, blockWeight, incomingLikelyWeight);
+                likelyWeightsValid = false;
+            }
+        }
     }
 
-    return true;
+    return classicWeightsValid && likelyWeightsValid;
 }
 
 //------------------------------------------------------------------------
@@ -4672,82 +4846,114 @@ bool Compiler::fgDebugCheckIncomingProfileData(BasicBlock* block)
 //   block - block to check
 //
 // Returns:
-//   true if counts consistent, false otherwise.
+//   true if counts consistent or checking disabled, false otherwise.
 //
 // Notes:
 //   Only useful to call on blocks with successors.
 //
 bool Compiler::fgDebugCheckOutgoingProfileData(BasicBlock* block)
 {
-    // We want switch targets unified, but not EH edges.
-    //
-    const unsigned numSuccs = block->NumSucc(this);
+    const bool verifyClassicWeights = fgEdgeWeightsComputed && (JitConfig.JitProfileChecks() & 0x1) == 0x1;
+    const bool verifyLikelyWeights  = (JitConfig.JitProfileChecks() & 0x2) == 0x2;
 
-    if (numSuccs == 0)
+    if (!(verifyClassicWeights || verifyLikelyWeights))
     {
-        // Assume this is ok.
-        //
         return true;
     }
 
-    // We won't check finally or filter returns (for now).
+    bool classicWeightsValid = true;
+    bool likelyWeightsValid  = true;
+
+    // We want switch targets unified, but not EH edges.
     //
-    if (block->KindIs(BBJ_EHFINALLYRET, BBJ_EHFILTERRET))
+    const unsigned numSuccs = block->NumSucc(this);
+
+    if ((numSuccs > 0) && !block->KindIs(BBJ_EHFINALLYRET, BBJ_EHFILTERRET))
     {
-        return true;
-    }
+        weight_t const blockWeight        = block->bbWeight;
+        weight_t       outgoingWeightMin  = 0;
+        weight_t       outgoingWeightMax  = 0;
+        weight_t       outgoingLikelihood = 0;
 
-    weight_t const blockWeight       = block->bbWeight;
-    weight_t       outgoingWeightMin = 0;
-    weight_t       outgoingWeightMax = 0;
+        // Walk successor edges and add up flow counts.
+        //
+        unsigned missingEdges      = 0;
+        unsigned missingLikelihood = 0;
 
-    // Walk successor edges and add up flow counts.
-    //
-    int missingEdges = 0;
+        for (unsigned i = 0; i < numSuccs; i++)
+        {
+            BasicBlock* succBlock = block->GetSucc(i, this);
+            FlowEdge*   succEdge  = fgGetPredForBlock(succBlock, block);
 
-    for (unsigned i = 0; i < numSuccs; i++)
-    {
-        BasicBlock* succBlock = block->GetSucc(i, this);
-        FlowEdge*   succEdge  = fgGetPredForBlock(succBlock, block);
+            if (succEdge == nullptr)
+            {
+                missingEdges++;
+                JITDUMP("  " FMT_BB " can't find successor edge to " FMT_BB "\n", block->bbNum, succBlock->bbNum);
+                continue;
+            }
+
+            outgoingWeightMin += succEdge->edgeWeightMin();
+            outgoingWeightMax += succEdge->edgeWeightMax();
+
+            if (succEdge->hasLikelihood())
+            {
+                outgoingLikelihood += succEdge->getLikelihood();
+            }
+            else
+            {
+                missingLikelihood++;
+            }
+        }
 
-        if (succEdge == nullptr)
+        if (missingEdges > 0)
         {
-            missingEdges++;
-            JITDUMP("  " FMT_BB " can't find successor edge to " FMT_BB "\n", block->bbNum, succBlock->bbNum);
-            continue;
+            JITDUMP("  " FMT_BB " - missing %d successor edges\n", block->bbNum, missingEdges);
+            classicWeightsValid = false;
+            likelyWeightsValid  = false;
         }
 
-        outgoingWeightMin += succEdge->edgeWeightMin();
-        outgoingWeightMax += succEdge->edgeWeightMax();
-    }
+        if (verifyClassicWeights)
+        {
+            if (!fgProfileWeightsConsistent(outgoingWeightMin, outgoingWeightMax))
+            {
+                JITDUMP("  " FMT_BB " - outgoing min " FMT_WT " inconsistent with outgoing max " FMT_WT "\n",
+                        block->bbNum, outgoingWeightMin, outgoingWeightMax);
+                classicWeightsValid = false;
+            }
 
-    if (missingEdges > 0)
-    {
-        JITDUMP("  " FMT_BB " - missing %d successor edges\n", block->bbNum, missingEdges);
-    }
+            if (!fgProfileWeightsConsistent(blockWeight, outgoingWeightMin))
+            {
+                JITDUMP("  " FMT_BB " - block weight " FMT_WT " inconsistent with outgoing min " FMT_WT "\n",
+                        block->bbNum, blockWeight, outgoingWeightMin);
+                classicWeightsValid = false;
+            }
 
-    if (!fgProfileWeightsConsistent(outgoingWeightMin, outgoingWeightMax))
-    {
-        JITDUMP("  " FMT_BB " - outgoing min " FMT_WT " inconsistent with outgoing max " FMT_WT "\n", block->bbNum,
-                outgoingWeightMin, outgoingWeightMax);
-        return false;
-    }
+            if (!fgProfileWeightsConsistent(blockWeight, outgoingWeightMax))
+            {
+                JITDUMP("  " FMT_BB " - block weight " FMT_WT " inconsistent with outgoing max " FMT_WT "\n",
+                        block->bbNum, blockWeight, outgoingWeightMax);
+                classicWeightsValid = false;
+            }
+        }
 
-    if (!fgProfileWeightsConsistent(blockWeight, outgoingWeightMin))
-    {
-        JITDUMP("  " FMT_BB " - block weight " FMT_WT " inconsistent with outgoing min " FMT_WT "\n", block->bbNum,
-                blockWeight, outgoingWeightMin);
-        return false;
-    }
+        if (verifyLikelyWeights)
+        {
+            if (missingLikelihood > 0)
+            {
+                JITDUMP("  " FMT_BB " - missing likelihood on %d successor edges\n", block->bbNum, missingLikelihood);
+                likelyWeightsValid = false;
+            }
 
-    if (!fgProfileWeightsConsistent(blockWeight, outgoingWeightMax))
-    {
-        JITDUMP("  " FMT_BB " - block weight " FMT_WT " inconsistent with outgoing max " FMT_WT "\n", block->bbNum,
-                blockWeight, outgoingWeightMax);
-        return false;
+            if (!fgProfileWeightsConsistent(outgoingLikelihood, 1.0))
+            {
+                JITDUMP("  " FMT_BB " - outgoing likelihood " FMT_WT " should be 1.0\n", blockWeight,
+                        outgoingLikelihood);
+                likelyWeightsValid = false;
+            }
+        }
     }
 
-    return missingEdges == 0;
+    return classicWeightsValid && likelyWeightsValid;
 }
 
 //------------------------------------------------------------------------------
index edc9ccd..14d5620 100644 (file)
@@ -149,7 +149,8 @@ CONFIG_INTEGER(JitPrintInlinedMethodsVerbose, W("JitPrintInlinedMethodsVerboseLe
 CONFIG_METHODSET(JitPrintInlinedMethods, W("JitPrintInlinedMethods"))
 
 CONFIG_METHODSET(JitPrintDevirtualizedMethods, W("JitPrintDevirtualizedMethods"))
-CONFIG_INTEGER(JitProfileChecks, W("JitProfileChecks"), 0) // 1 enable in dumps, 2 assert if issues found
+CONFIG_INTEGER(JitProfileChecks, W("JitProfileChecks"), 0) // Bitflag: 0x1 check classic, 0x2 check likely, 0x4 enable
+                                                           // asserts
 CONFIG_INTEGER(JitRequired, W("JITRequired"), -1)
 CONFIG_INTEGER(JitRoundFloat, W("JITRoundFloat"), DEFAULT_ROUND_LEVEL)
 CONFIG_INTEGER(JitStackAllocToLocalSize, W("JitStackAllocToLocalSize"), DEFAULT_MAX_LOCALLOC_TO_LOCAL_SIZE)
index 4adb72e..da63679 100644 (file)
@@ -5236,8 +5236,12 @@ bool Compiler::optInvertWhileLoop(BasicBlock* block)
 #ifdef DEBUG
         // Verify profile for the two target blocks is consistent.
         //
-        fgDebugCheckIncomingProfileData(bNewCond->bbNext);
-        fgDebugCheckIncomingProfileData(bNewCond->bbJumpDest);
+        const bool profileOk =
+            fgDebugCheckIncomingProfileData(bNewCond->bbNext) && fgDebugCheckIncomingProfileData(bNewCond->bbJumpDest);
+        if ((JitConfig.JitProfileChecks() & 0x4) == 0x4)
+        {
+            assert(profileOk);
+        }
 #endif // DEBUG
     }