{
Unknown,
PostdominatesSource,
+ Pseudo,
DominatesTarget,
CriticalEdge,
Deleted,
//
BasicBlock* const target = fgFirstBB;
assert(BlockSetOps::IsMember(comp, marked, target->bbNum));
- visitor->VisitNonTreeEdge(block, target, SpanningTreeVisitor::EdgeKind::PostdominatesSource);
+ visitor->VisitNonTreeEdge(block, target, SpanningTreeVisitor::EdgeKind::Pseudo);
}
break;
//
BasicBlock* const target = dsc->ebdHndBeg;
assert(BlockSetOps::IsMember(comp, marked, target->bbNum));
- visitor->VisitNonTreeEdge(block, target, SpanningTreeVisitor::EdgeKind::PostdominatesSource);
+ visitor->VisitNonTreeEdge(block, target, SpanningTreeVisitor::EdgeKind::Pseudo);
}
}
break;
switch (kind)
{
case EdgeKind::PostdominatesSource:
+ case EdgeKind::Pseudo:
NewSourceProbe(source, target);
break;
case EdgeKind::DominatesTarget:
Edge* m_nextOutgoingEdge;
Edge* m_nextIncomingEdge;
bool m_weightKnown;
+ bool m_isPseudoEdge;
Edge(BasicBlock* source, BasicBlock* target)
: m_weight(BB_ZERO_WEIGHT)
, m_nextOutgoingEdge(nullptr)
, m_nextIncomingEdge(nullptr)
, m_weightKnown(false)
+ , m_isPseudoEdge(false)
{
}
};
{
// We may have this edge in the schema, and so already added this edge to the map.
//
- // If not, assume we have a partial schema. We could add a zero count edge,
- // but such edges don't impact the solving algorithm, so we can omit them.
- //
EdgeKey key(source, target);
Edge* edge = nullptr;
- if (m_edgeKeyToEdgeMap.Lookup(key, &edge))
- {
- BlockInfo* const sourceInfo = BlockToInfo(source);
- edge->m_nextOutgoingEdge = sourceInfo->m_outgoingEdges;
- sourceInfo->m_outgoingEdges = edge;
+ BlockInfo* const sourceInfo = BlockToInfo(source);
- BlockInfo* const targetInfo = BlockToInfo(target);
- edge->m_nextIncomingEdge = targetInfo->m_incomingEdges;
- targetInfo->m_incomingEdges = edge;
- }
- else
+ if (!m_edgeKeyToEdgeMap.Lookup(key, &edge))
{
- // Because the count is zero, we can just pretend this edge doesn't exist.
+ // If the edge is missing, assume it is zero.
//
JITDUMP("Schema is missing non-tree edge " FMT_BB " -> " FMT_BB ", will presume zero\n", source->bbNum,
target->bbNum);
+ edge = new (m_allocator) Edge(source, target);
+ m_edges++;
m_zeroEdges++;
+
+ edge->m_weightKnown = true;
+ edge->m_weight = 0;
}
+
+ edge->m_nextOutgoingEdge = sourceInfo->m_outgoingEdges;
+ sourceInfo->m_outgoingEdges = edge;
+
+ BlockInfo* const targetInfo = BlockToInfo(target);
+ edge->m_nextIncomingEdge = targetInfo->m_incomingEdges;
+ targetInfo->m_incomingEdges = edge;
+
+ edge->m_isPseudoEdge = (kind == EdgeKind::Pseudo);
+ JITDUMP(" ... pseudo edge " FMT_BB " -> " FMT_BB "\n", source->bbNum, target->bbNum);
}
};
if (!m_keyToBlockMap.Lookup(schemaEntry.ILOffset, &sourceBlock))
{
- JITDUMP("Could not find source block for schema entry %d (IL offset/key %08x\n", iSchema,
+ JITDUMP("Could not find source block for schema entry %d (IL offset/key %08x)\n", iSchema,
schemaEntry.ILOffset);
}
if (!m_keyToBlockMap.Lookup(schemaEntry.Other, &targetBlock))
{
- JITDUMP("Could not find target block for schema entry %d (IL offset/key %08x\n", iSchema,
+ JITDUMP("Could not find target block for schema entry %d (IL offset/key %08x)\n", iSchema,
schemaEntry.ILOffset);
}
unsigned nPasses = 0;
unsigned const nLimit = 10;
- JITDUMP("\nSolver: %u blocks, %u unknown; %u edges, %u unknown, %u zero (and so ignored)\n", m_blocks,
- m_unknownBlocks, m_edges, m_unknownEdges, m_zeroEdges);
+ JITDUMP("\nSolver: %u blocks, %u unknown; %u edges, %u unknown, %u zero\n", m_blocks, m_unknownBlocks, m_edges,
+ m_unknownEdges, m_zeroEdges);
while ((m_unknownBlocks > 0) && (nPasses < nLimit))
{
return;
}
- // Set weight on all blocks.
+ // Set weight on all blocks and edges.
//
for (BasicBlock* const block : m_comp->Blocks())
{
BlockInfo* const info = BlockToInfo(block);
assert(info->m_weightKnown);
-
m_comp->fgSetProfileWeight(block, info->m_weight);
// Mark blocks that might be worth optimizing further, given
// what we know about the PGO data.
//
+ // TODO: defer until we've figured out edge likelihoods?
+ //
MarkInterestingBlocks(block, info);
+
+ const unsigned nSucc = block->NumSucc(m_comp);
+ if (nSucc == 0)
+ {
+ // No edges to worry about.
+ //
+ continue;
+ }
+
+ // Else there is at least one FlowEdge.
+ //
+ // Check the reconstruction graph edges. If we have any pseudo-edges
+ // there should be only one pseudo-edge, and no regular edges.
+ //
+ Edge* pseudoEdge = nullptr;
+ unsigned nEdges = 0;
+
+ for (Edge* edge = info->m_outgoingEdges; edge != nullptr; edge = edge->m_nextOutgoingEdge)
+ {
+ if (edge->m_isPseudoEdge)
+ {
+ pseudoEdge = edge;
+ continue;
+ }
+
+ assert(pseudoEdge == nullptr);
+ nEdges++;
+ }
+
+ // If we found a pseudo edge there should be only one successor
+ // for block. The flow from block to successor will not represent
+ // real flow. We set likelihood anyways so we can assert later
+ // that all flow edges have known likelihood.
+ //
+ // Note the flowEdge target may not be the same as the pseudo edge target.
+ //
+ if (pseudoEdge != nullptr)
+ {
+ assert(nSucc == 1);
+ assert(block == pseudoEdge->m_sourceBlock);
+ assert(block->bbJumpDest != nullptr);
+ FlowEdge* const flowEdge = m_comp->fgGetPredForBlock(block->bbJumpDest, block);
+ assert(flowEdge != nullptr);
+ flowEdge->setLikelihood(1.0);
+ continue;
+ }
+
+ // We may not have have the same number of model edges and flow edges.
+ //
+ // This can happen because bome BBJ_LEAVE blocks may have been missed during
+ // our spanning tree walk since we don't know where all the finallies can return
+ // to just yet (specially, in WalkSpanningTree, we may not add the bbJumpDest of
+ // a BBJ_LEAVE to the worklist).
+ //
+ // Worst case those missed blocks dominate other blocks so we can't limit
+ // the screening here to specific BBJ kinds.
+ //
+ // Handle those specially by just assuming equally likely successors.
+ //
+ // Do likewise, if the block weight is zero, since examination of edge weights
+ // shouldn't tell us anything about edge likelihoods.
+ //
+ // (TODO: use synthesis here)
+ //
+ if ((nEdges != nSucc) || (info->m_weight == BB_ZERO_WEIGHT))
+ {
+ JITDUMP(FMT_BB " %s , setting outgoing likelihoods heuristically\n", block->bbNum,
+ (nEdges != nSucc) ? "has inaccurate flow model" : "has zero weight");
+
+ weight_t equalLikelihood = 1.0 / nSucc;
+
+ for (BasicBlock* succ : block->Succs(m_comp))
+ {
+ FlowEdge* const flowEdge = m_comp->fgGetPredForBlock(succ, block);
+ JITDUMP("Setting likelihood of " FMT_BB " -> " FMT_BB " to " FMT_WT " (heur)\n", block->bbNum,
+ succ->bbNum, equalLikelihood);
+ flowEdge->setLikelihood(equalLikelihood);
+ }
+ continue;
+ }
+
+ // Transfer model edge weight onto the FlowEdges as likelihoods.
+ //
+ assert(nEdges == nSucc);
+ weight_t totalLikelihood = 0;
+
+ for (Edge* edge = info->m_outgoingEdges; edge != nullptr; edge = edge->m_nextOutgoingEdge)
+ {
+ assert(block == edge->m_sourceBlock);
+ FlowEdge* const flowEdge = m_comp->fgGetPredForBlock(edge->m_targetBlock, block);
+ assert(flowEdge != nullptr);
+ assert(!flowEdge->hasLikelihood());
+ weight_t likelihood = 0;
+
+ if (nEdges == 1)
+ {
+ assert(nSucc == 1);
+
+ // Conceptually we could assert(edge->m_weight == info->m_weight);
+ // but we can have inconsistencies.
+ //
+ // Go with what we know for sure, edge should be 100% likely.
+ //
+ likelihood = 1.0;
+ JITDUMP("Setting likelihood of " FMT_BB " -> " FMT_BB " to " FMT_WT " (uniq)\n", block->bbNum,
+ edge->m_targetBlock->bbNum, likelihood);
+ flowEdge->setLikelihood(likelihood);
+ totalLikelihood += likelihood;
+ break;
+ }
+
+ assert(info->m_weight != BB_ZERO_WEIGHT);
+
+ // We may see nonsensical weights here, cap likelihood.
+ //
+ bool capped = false;
+ if (edge->m_weight > info->m_weight)
+ {
+ capped = true;
+ likelihood = 1.0;
+ }
+ else
+ {
+ likelihood = edge->m_weight / info->m_weight;
+ }
+ JITDUMP("Setting likelihood of " FMT_BB " -> " FMT_BB " to " FMT_WT " (%s)\n", block->bbNum,
+ edge->m_targetBlock->bbNum, likelihood, capped ? "pgo -- capped" : "pgo");
+ flowEdge->setLikelihood(likelihood);
+ totalLikelihood += likelihood;
+ }
+
+ if (totalLikelihood != 1.0)
+ {
+ // Consider what to do here... flag this method as needing immediate profile repairs?
+ //
+ JITDUMP(FMT_BB "total outgoing likelihood inaccurate: " FMT_WT "\n", block->bbNum, totalLikelihood);
+ }
}
}
return;
}
- // We can't check before we have computed edge weights.
+ // We can check classic (min/max, late computed) weights
+ // and/or
+ // new likelyhood based weights.
//
- if (!fgEdgeWeightsComputed)
+ const bool verifyClassicWeights = fgEdgeWeightsComputed && (JitConfig.JitProfileChecks() & 0x1) == 0x1;
+ const bool verifyLikelyWeights = (JitConfig.JitProfileChecks() & 0x2) == 0x2;
+ const bool assertOnFailure = (JitConfig.JitProfileChecks() & 0x4) == 0x4;
+
+ if (!(verifyClassicWeights || verifyLikelyWeights))
{
return;
}
JITDUMP("Profile is NOT self-consistent, found %d problems (%d profiled blocks, %d unprofiled)\n",
problemBlocks, profiledBlocks, unprofiledBlocks);
- if (JitConfig.JitProfileChecks() == 2)
+ if (assertOnFailure)
{
- assert(!"Inconsistent profile");
+ assert(!"Inconsistent profile data");
}
}
}
// block - block to check
//
// Returns:
-// true if counts consistent, false otherwise.
+// true if counts consistent or checking disabled, false otherwise.
//
// Notes:
// Only useful to call on blocks with predecessors.
//
bool Compiler::fgDebugCheckIncomingProfileData(BasicBlock* block)
{
- weight_t const blockWeight = block->bbWeight;
- weight_t incomingWeightMin = 0;
- weight_t incomingWeightMax = 0;
- bool foundPreds = false;
+ const bool verifyClassicWeights = fgEdgeWeightsComputed && (JitConfig.JitProfileChecks() & 0x1) == 0x1;
+ const bool verifyLikelyWeights = (JitConfig.JitProfileChecks() & 0x2) == 0x2;
+
+ if (!(verifyClassicWeights || verifyLikelyWeights))
+ {
+ return true;
+ }
+
+ weight_t const blockWeight = block->bbWeight;
+ weight_t incomingWeightMin = 0;
+ weight_t incomingWeightMax = 0;
+ weight_t incomingLikelyWeight = 0;
+ bool foundPreds = false;
for (FlowEdge* const predEdge : block->PredEdges())
{
incomingWeightMin += predEdge->edgeWeightMin();
incomingWeightMax += predEdge->edgeWeightMax();
+ incomingLikelyWeight += predEdge->getLikelyWeight();
foundPreds = true;
}
- if (!foundPreds)
- {
- // Assume this is ok.
- //
- return true;
- }
+ bool classicWeightsValid = true;
+ bool likelyWeightsValid = true;
- if (!fgProfileWeightsConsistent(incomingWeightMin, incomingWeightMax))
+ if (foundPreds)
{
- JITDUMP(" " FMT_BB " - incoming min " FMT_WT " inconsistent with incoming max " FMT_WT "\n", block->bbNum,
- incomingWeightMin, incomingWeightMax);
- return false;
- }
+ if (verifyClassicWeights)
+ {
+ if (!fgProfileWeightsConsistent(incomingWeightMin, incomingWeightMax))
+ {
+ JITDUMP(" " FMT_BB " - incoming min " FMT_WT " inconsistent with incoming max " FMT_WT "\n",
+ block->bbNum, incomingWeightMin, incomingWeightMax);
+ classicWeightsValid = false;
+ }
- if (!fgProfileWeightsConsistent(blockWeight, incomingWeightMin))
- {
- JITDUMP(" " FMT_BB " - block weight " FMT_WT " inconsistent with incoming min " FMT_WT "\n", block->bbNum,
- blockWeight, incomingWeightMin);
- return false;
- }
+ if (!fgProfileWeightsConsistent(blockWeight, incomingWeightMin))
+ {
+ JITDUMP(" " FMT_BB " - block weight " FMT_WT " inconsistent with incoming min " FMT_WT "\n",
+ block->bbNum, blockWeight, incomingWeightMin);
+ classicWeightsValid = false;
+ }
- if (!fgProfileWeightsConsistent(blockWeight, incomingWeightMax))
- {
- JITDUMP(" " FMT_BB " - block weight " FMT_WT " inconsistent with incoming max " FMT_WT "\n", block->bbNum,
- blockWeight, incomingWeightMax);
- return false;
+ if (!fgProfileWeightsConsistent(blockWeight, incomingWeightMax))
+ {
+ JITDUMP(" " FMT_BB " - block weight " FMT_WT " inconsistent with incoming max " FMT_WT "\n",
+ block->bbNum, blockWeight, incomingWeightMax);
+ classicWeightsValid = false;
+ }
+ }
+
+ if (verifyLikelyWeights)
+ {
+ if (!fgProfileWeightsConsistent(blockWeight, incomingLikelyWeight))
+ {
+ JITDUMP(" " FMT_BB " - block weight " FMT_WT " inconsistent with incoming likely weight " FMT_WT "\n",
+ block->bbNum, blockWeight, incomingLikelyWeight);
+ likelyWeightsValid = false;
+ }
+ }
}
- return true;
+ return classicWeightsValid && likelyWeightsValid;
}
//------------------------------------------------------------------------
// block - block to check
//
// Returns:
-// true if counts consistent, false otherwise.
+// true if counts consistent or checking disabled, false otherwise.
//
// Notes:
// Only useful to call on blocks with successors.
//
bool Compiler::fgDebugCheckOutgoingProfileData(BasicBlock* block)
{
- // We want switch targets unified, but not EH edges.
- //
- const unsigned numSuccs = block->NumSucc(this);
+ const bool verifyClassicWeights = fgEdgeWeightsComputed && (JitConfig.JitProfileChecks() & 0x1) == 0x1;
+ const bool verifyLikelyWeights = (JitConfig.JitProfileChecks() & 0x2) == 0x2;
- if (numSuccs == 0)
+ if (!(verifyClassicWeights || verifyLikelyWeights))
{
- // Assume this is ok.
- //
return true;
}
- // We won't check finally or filter returns (for now).
+ bool classicWeightsValid = true;
+ bool likelyWeightsValid = true;
+
+ // We want switch targets unified, but not EH edges.
//
- if (block->KindIs(BBJ_EHFINALLYRET, BBJ_EHFILTERRET))
+ const unsigned numSuccs = block->NumSucc(this);
+
+ if ((numSuccs > 0) && !block->KindIs(BBJ_EHFINALLYRET, BBJ_EHFILTERRET))
{
- return true;
- }
+ weight_t const blockWeight = block->bbWeight;
+ weight_t outgoingWeightMin = 0;
+ weight_t outgoingWeightMax = 0;
+ weight_t outgoingLikelihood = 0;
- weight_t const blockWeight = block->bbWeight;
- weight_t outgoingWeightMin = 0;
- weight_t outgoingWeightMax = 0;
+ // Walk successor edges and add up flow counts.
+ //
+ unsigned missingEdges = 0;
+ unsigned missingLikelihood = 0;
- // Walk successor edges and add up flow counts.
- //
- int missingEdges = 0;
+ for (unsigned i = 0; i < numSuccs; i++)
+ {
+ BasicBlock* succBlock = block->GetSucc(i, this);
+ FlowEdge* succEdge = fgGetPredForBlock(succBlock, block);
- for (unsigned i = 0; i < numSuccs; i++)
- {
- BasicBlock* succBlock = block->GetSucc(i, this);
- FlowEdge* succEdge = fgGetPredForBlock(succBlock, block);
+ if (succEdge == nullptr)
+ {
+ missingEdges++;
+ JITDUMP(" " FMT_BB " can't find successor edge to " FMT_BB "\n", block->bbNum, succBlock->bbNum);
+ continue;
+ }
+
+ outgoingWeightMin += succEdge->edgeWeightMin();
+ outgoingWeightMax += succEdge->edgeWeightMax();
+
+ if (succEdge->hasLikelihood())
+ {
+ outgoingLikelihood += succEdge->getLikelihood();
+ }
+ else
+ {
+ missingLikelihood++;
+ }
+ }
- if (succEdge == nullptr)
+ if (missingEdges > 0)
{
- missingEdges++;
- JITDUMP(" " FMT_BB " can't find successor edge to " FMT_BB "\n", block->bbNum, succBlock->bbNum);
- continue;
+ JITDUMP(" " FMT_BB " - missing %d successor edges\n", block->bbNum, missingEdges);
+ classicWeightsValid = false;
+ likelyWeightsValid = false;
}
- outgoingWeightMin += succEdge->edgeWeightMin();
- outgoingWeightMax += succEdge->edgeWeightMax();
- }
+ if (verifyClassicWeights)
+ {
+ if (!fgProfileWeightsConsistent(outgoingWeightMin, outgoingWeightMax))
+ {
+ JITDUMP(" " FMT_BB " - outgoing min " FMT_WT " inconsistent with outgoing max " FMT_WT "\n",
+ block->bbNum, outgoingWeightMin, outgoingWeightMax);
+ classicWeightsValid = false;
+ }
- if (missingEdges > 0)
- {
- JITDUMP(" " FMT_BB " - missing %d successor edges\n", block->bbNum, missingEdges);
- }
+ if (!fgProfileWeightsConsistent(blockWeight, outgoingWeightMin))
+ {
+ JITDUMP(" " FMT_BB " - block weight " FMT_WT " inconsistent with outgoing min " FMT_WT "\n",
+ block->bbNum, blockWeight, outgoingWeightMin);
+ classicWeightsValid = false;
+ }
- if (!fgProfileWeightsConsistent(outgoingWeightMin, outgoingWeightMax))
- {
- JITDUMP(" " FMT_BB " - outgoing min " FMT_WT " inconsistent with outgoing max " FMT_WT "\n", block->bbNum,
- outgoingWeightMin, outgoingWeightMax);
- return false;
- }
+ if (!fgProfileWeightsConsistent(blockWeight, outgoingWeightMax))
+ {
+ JITDUMP(" " FMT_BB " - block weight " FMT_WT " inconsistent with outgoing max " FMT_WT "\n",
+ block->bbNum, blockWeight, outgoingWeightMax);
+ classicWeightsValid = false;
+ }
+ }
- if (!fgProfileWeightsConsistent(blockWeight, outgoingWeightMin))
- {
- JITDUMP(" " FMT_BB " - block weight " FMT_WT " inconsistent with outgoing min " FMT_WT "\n", block->bbNum,
- blockWeight, outgoingWeightMin);
- return false;
- }
+ if (verifyLikelyWeights)
+ {
+ if (missingLikelihood > 0)
+ {
+ JITDUMP(" " FMT_BB " - missing likelihood on %d successor edges\n", block->bbNum, missingLikelihood);
+ likelyWeightsValid = false;
+ }
- if (!fgProfileWeightsConsistent(blockWeight, outgoingWeightMax))
- {
- JITDUMP(" " FMT_BB " - block weight " FMT_WT " inconsistent with outgoing max " FMT_WT "\n", block->bbNum,
- blockWeight, outgoingWeightMax);
- return false;
+ if (!fgProfileWeightsConsistent(outgoingLikelihood, 1.0))
+ {
+ JITDUMP(" " FMT_BB " - outgoing likelihood " FMT_WT " should be 1.0\n", blockWeight,
+ outgoingLikelihood);
+ likelyWeightsValid = false;
+ }
+ }
}
- return missingEdges == 0;
+ return classicWeightsValid && likelyWeightsValid;
}
//------------------------------------------------------------------------------