Overview (doLinearScan):
- Walk all blocks, building intervals and RefPositions (buildIntervals)
- - Traverse the RefPositions, marking last uses (setLastUses)
- - Note that this is necessary because the execution order doesn't accurately reflect use order.
- There is a "TODO-Throughput" to eliminate this.
- Allocate registers (allocateRegisters)
- Annotate nodes with register assignments (resolveRegisters)
- Add move nodes as needed to resolve conflicting register
applyCalleeSaveHeuristics(rp);
- // Ensure that we have consistent def/use on SDSU temps.
- // However, in the case of a non-commutative rmw def, we must avoid over-constraining
- // the def, so don't propagate a single-register restriction from the consumer to the producer
+ if (theInterval->isLocalVar)
+ {
+ if (RefTypeIsUse(rp->refType))
+ {
+ RefPosition* const prevRP = theInterval->recentRefPosition;
+ if ((prevRP != nullptr) && (prevRP->bbNum == rp->bbNum))
+ {
+ prevRP->lastUse = false;
+ }
+ }
- if (RefTypeIsUse(rp->refType) && !theInterval->isLocalVar)
+ rp->lastUse = (rp->refType != RefTypeExpUse) && (rp->refType != RefTypeParamDef) && (rp->refType != RefTypeZeroInit);
+ }
+ else if (RefTypeIsUse(rp->refType))
{
+ // Ensure that we have consistent def/use on SDSU temps.
+ // However, in the case of a non-commutative rmw def, we must avoid over-constraining
+ // the def, so don't propagate a single-register restriction from the consumer to the producer
RefPosition* prevRefPosition = theInterval->recentRefPosition;
assert(prevRefPosition != nullptr && theInterval->firstRefPosition == prevRefPosition);
regMaskTP prevAssignment = prevRefPosition->registerAssignment;
{
theInterval->hasConflictingDefUse = true;
}
+
+ rp->lastUse = true;
}
}
// being set by dataflow analysis. It is necessary to do it this way only because the execution
// order wasn't strictly correct.
-void LinearScan::setLastUses(BasicBlock* block)
-{
#ifdef DEBUG
+void LinearScan::checkLastUses(BasicBlock* block)
+{
if (VERBOSE)
{
- JITDUMP("\n\nCALCULATING LAST USES for block %u, liveout=", block->bbNum);
+ JITDUMP("\n\nCHECKING LAST USES for block %u, liveout=", block->bbNum);
dumpConvertedVarSet(compiler, block->bbLiveOut);
JITDUMP("\n==============================\n");
}
-#endif // DEBUG
unsigned keepAliveVarNum = BAD_VAR_NUM;
if (compiler->lvaKeepAliveAndReportThis())
VARSET_TP VARSET_INIT(compiler, temp, block->bbLiveOut);
+ bool foundDiff = false;
auto currentRefPosition = refPositions.rbegin();
-
while (currentRefPosition->refType != RefTypeBB)
{
// We should never see ParamDefs or ZeroInits within a basic block.
{
unsigned varNum = currentRefPosition->getInterval()->varNum;
unsigned varIndex = currentRefPosition->getInterval()->getVarIndex(compiler);
+
+ LsraLocation loc = currentRefPosition->nodeLocation;
+
// We should always have a tree node for a localVar, except for the "special" RefPositions.
GenTreePtr tree = currentRefPosition->treeNode;
assert(tree != nullptr || currentRefPosition->refType == RefTypeExpUse ||
currentRefPosition->refType == RefTypeDummyDef);
+
if (!VarSetOps::IsMember(compiler, temp, varIndex) && varNum != keepAliveVarNum)
{
- // There was no exposed use, so this is a
- // "last use" (and we mark it thus even if it's a def)
-
- if (tree != nullptr)
- {
- tree->gtFlags |= GTF_VAR_DEATH;
- }
- LsraLocation loc = currentRefPosition->nodeLocation;
-#ifdef DEBUG
- if (getLsraExtendLifeTimes())
- {
- JITDUMP("last use of V%02u @%u (not marked as last use for LSRA due to extendLifetimes stress "
- "option)\n",
- compiler->lvaTrackedToVarNum[varIndex], loc);
- }
- else
-#endif // DEBUG
+ // There was no exposed use, so this is a "last use" (and we mark it thus even if it's a def)
+ if (!currentRefPosition->lastUse)
{
- JITDUMP("last use of V%02u @%u\n", compiler->lvaTrackedToVarNum[varIndex], loc);
- currentRefPosition->lastUse = true;
+ JITDUMP("missing expected last use of V%02u @%u\n", compiler->lvaTrackedToVarNum[varIndex], loc);
+ foundDiff = true;
}
VarSetOps::AddElemD(compiler, temp, varIndex);
}
- else
+ else if (currentRefPosition->lastUse)
{
- currentRefPosition->lastUse = false;
- if (tree != nullptr)
- {
- tree->gtFlags &= ~GTF_VAR_DEATH;
- }
+ JITDUMP("unexpected last use of V%02u @%u\n", compiler->lvaTrackedToVarNum[varIndex], loc);
+ foundDiff = true;
}
if (currentRefPosition->refType == RefTypeDef || currentRefPosition->refType == RefTypeDummyDef)
VarSetOps::RemoveElemD(compiler, temp, varIndex);
}
}
+
assert(currentRefPosition != refPositions.rend());
++currentRefPosition;
}
-#ifdef DEBUG
VARSET_TP VARSET_INIT(compiler, temp2, block->bbLiveIn);
VarSetOps::DiffD(compiler, temp2, temp);
VarSetOps::DiffD(compiler, temp, block->bbLiveIn);
- bool foundDiff = false;
{
VARSET_ITER_INIT(compiler, iter, temp, varIndex);
}
assert(!foundDiff);
-#endif // DEBUG
}
+#endif // DEBUG
void LinearScan::addRefsForPhysRegMask(regMaskTP mask, LsraLocation currentLoc, RefType refType, bool isLastUse)
{
{
RefPosition* newest = newRefPosition(defs[i]->getInterval(), currentLoc, RefTypeUse, tree, mask,
0 DEBUG_ARG(minRegCandidateCount));
- newest->lastUse = true;
if (tree->gtLsraInfo.isInternalRegDelayFree)
{
}
RefPosition* pos = newRefPosition(interval, currentLoc, RefTypeUse, tree, candidates);
pos->isLocalDefUse = true;
- bool isLastUse = ((tree->gtFlags & GTF_VAR_DEATH) != 0);
- pos->lastUse = isLastUse;
pos->setAllocateIfProfitable(tree->IsRegOptional());
DBEXEC(VERBOSE, pos->dump());
return;
}
#endif // FEATURE_SIMD
- bool delayRegFree = (hasDelayFreeSrc && useNode->gtLsraInfo.isDelayFree);
if (useNode->gtLsraInfo.isTgtPref)
{
prefSrcInterval = i;
}
- bool regOptionalAtUse = useNode->IsRegOptional();
- bool isLastUse = true;
- if (isCandidateLocalRef(useNode))
+ regMaskTP fixedAssignment = fixedCandidateMask(type, candidates);
+ if (fixedAssignment != RBM_NONE)
{
- isLastUse = ((useNode->gtFlags & GTF_VAR_DEATH) != 0);
+ candidates = fixedAssignment;
}
- else
+
+ const bool regOptionalAtUse = useNode->IsRegOptional();
+ const bool delayRegFree = (hasDelayFreeSrc && useNode->gtLsraInfo.isDelayFree);
+
+ assert(isCandidateLocalRef(useNode) == i->isLocalVar);
+ if (!i->isLocalVar)
{
// For non-localVar uses we record nothing,
// as nothing needs to be written back to the tree.
useNode = nullptr;
}
- regMaskTP fixedAssignment = fixedCandidateMask(type, candidates);
- if (fixedAssignment != RBM_NONE)
- {
- candidates = fixedAssignment;
- }
-
#ifdef DEBUG
// If delayRegFree, then Use will interfere with the destination of
// the consuming node. Therefore, we also need add the kill set of
pos->delayRegFree = true;
}
- if (isLastUse)
- {
- pos->lastUse = true;
- }
-
if (regOptionalAtUse)
{
pos->setAllocateIfProfitable(1);
JITDUMP("\n");
}
- // Identify the last uses of each variable, except in the case of MinOpts, where all vars
- // are kept live everywhere.
-
- if (!compiler->opts.MinOpts())
+ // Clear the "last use" flag on any vars that are live-out from this block.
{
- setLastUses(block);
+ VARSET_ITER_INIT(compiler, iter, block->bbLiveOut, varIndex);
+ while (iter.NextElem(compiler, &varIndex))
+ {
+ unsigned varNum = compiler->lvaTrackedToVarNum[varIndex];
+ LclVarDsc* const varDsc = &compiler->lvaTable[varNum];
+ if (isCandidateVar(varDsc))
+ {
+ RefPosition* const lastRP = getIntervalForLocalVar(varNum)->lastRefPosition;
+ if ((lastRP != nullptr) && (lastRP->bbNum == block->bbNum))
+ {
+ lastRP->lastUse = false;
+ }
+ }
+ }
}
#ifdef DEBUG
+ checkLastUses(block);
+
if (VERBOSE)
{
printf("use: ");
interval->recentRefPosition = currentRefPosition;
LclVarDsc* varDsc = interval->getLocalVar(compiler);
+ if ((treeNode != nullptr))
+ {
+ if (currentRefPosition->lastUse)
+ {
+ treeNode->gtFlags |= GTF_VAR_DEATH;
+ }
+ else
+ {
+ treeNode->gtFlags &= ~GTF_VAR_DEATH;
+ }
+ }
+
if (currentRefPosition->registerAssignment == RBM_NONE)
{
assert(!currentRefPosition->RequiresRegister());