1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
5 // =================================================================================
6 // Code that works with liveness and related concepts (interference, debug scope)
7 // =================================================================================
14 #if !defined(_TARGET_64BIT_)
15 #include "decomposelongs.h"
17 #include "lower.h" // for LowerRange()
19 /*****************************************************************************
21 * Helper for Compiler::fgPerBlockLocalVarLiveness().
22 * The goal is to compute the USE and DEF sets for a basic block.
24 void Compiler::fgMarkUseDef(GenTreeLclVarCommon* tree)
26 assert((tree->OperIsLocal() && (tree->OperGet() != GT_PHI_ARG)) || tree->OperIsLocalAddr());
28 const unsigned lclNum = tree->gtLclNum;
29 assert(lclNum < lvaCount);
31 LclVarDsc* const varDsc = &lvaTable[lclNum];
33 // We should never encounter a reference to a lclVar that has a zero refCnt.
34 if (varDsc->lvRefCnt() == 0 && (!varTypeIsPromotable(varDsc) || !varDsc->lvPromoted))
36 JITDUMP("Found reference to V%02u with zero refCnt.\n", lclNum);
37 assert(!"We should never encounter a reference to a lclVar that has a zero refCnt.");
38 varDsc->setLvRefCnt(1);
41 const bool isDef = (tree->gtFlags & GTF_VAR_DEF) != 0;
42 const bool isUse = !isDef || ((tree->gtFlags & GTF_VAR_USEASG) != 0);
44 if (varDsc->lvTracked)
46 assert(varDsc->lvVarIndex < lvaTrackedCount);
48 // We don't treat stores to tracked locals as modifications of ByrefExposed memory;
49 // Make sure no tracked local is addr-exposed, to make sure we don't incorrectly CSE byref
50 // loads aliasing it across a store to it.
51 assert(!varDsc->lvAddrExposed);
53 if (isUse && !VarSetOps::IsMember(this, fgCurDefSet, varDsc->lvVarIndex))
55 // This is an exposed use; add it to the set of uses.
56 VarSetOps::AddElemD(this, fgCurUseSet, varDsc->lvVarIndex);
61 // This is a def, add it to the set of defs.
62 VarSetOps::AddElemD(this, fgCurDefSet, varDsc->lvVarIndex);
67 if (varDsc->lvAddrExposed)
69 // Reflect the effect on ByrefExposed memory
73 fgCurMemoryUse |= memoryKindSet(ByrefExposed);
77 fgCurMemoryDef |= memoryKindSet(ByrefExposed);
79 // We've found a store that modifies ByrefExposed
80 // memory but not GcHeap memory, so track their
82 byrefStatesMatchGcHeapStates = false;
86 if (varTypeIsStruct(varDsc))
88 lvaPromotionType promotionType = lvaGetPromotionType(varDsc);
90 if (promotionType != PROMOTION_TYPE_NONE)
92 VARSET_TP bitMask(VarSetOps::MakeEmpty(this));
94 for (unsigned i = varDsc->lvFieldLclStart; i < varDsc->lvFieldLclStart + varDsc->lvFieldCnt; ++i)
96 noway_assert(lvaTable[i].lvIsStructField);
97 if (lvaTable[i].lvTracked)
99 noway_assert(lvaTable[i].lvVarIndex < lvaTrackedCount);
100 VarSetOps::AddElemD(this, bitMask, lvaTable[i].lvVarIndex);
104 // For pure defs (i.e. not an "update" def which is also a use), add to the (all) def set.
108 VarSetOps::UnionD(this, fgCurDefSet, bitMask);
110 else if (!VarSetOps::IsSubset(this, bitMask, fgCurDefSet))
112 // Mark as used any struct fields that are not yet defined.
113 VarSetOps::UnionD(this, fgCurUseSet, bitMask);
120 /*****************************************************************************/
121 void Compiler::fgLocalVarLiveness()
126 printf("*************** In fgLocalVarLiveness()\n");
128 if (compRationalIRForm)
135 // Init liveness data structures.
136 fgLocalVarLivenessInit();
138 EndPhase(PHASE_LCLVARLIVENESS_INIT);
140 // Make sure we haven't noted any partial last uses of promoted structs.
141 ClearPromotedStructDeathVars();
143 // Initialize the per-block var sets.
144 fgInitBlockVarSets();
146 fgLocalVarLivenessChanged = false;
149 /* Figure out use/def info for all basic blocks */
150 fgPerBlockLocalVarLiveness();
151 EndPhase(PHASE_LCLVARLIVENESS_PERBLOCK);
153 /* Live variable analysis. */
155 fgStmtRemoved = false;
156 fgInterBlockLocalVarLiveness();
157 } while (fgStmtRemoved && fgLocalVarLivenessChanged);
159 EndPhase(PHASE_LCLVARLIVENESS_INTERBLOCK);
162 /*****************************************************************************/
163 void Compiler::fgLocalVarLivenessInit()
165 JITDUMP("In fgLocalVarLivenessInit\n");
167 // Sort locals first, if we're optimizing
168 if (opts.OptimizationEnabled())
173 // We mark a lcl as must-init in a first pass of local variable
174 // liveness (Liveness1), then assertion prop eliminates the
175 // uninit-use of a variable Vk, asserting it will be init'ed to
176 // null. Then, in a second local-var liveness (Liveness2), the
177 // variable Vk is no longer live on entry to the method, since its
178 // uses have been replaced via constant propagation.
180 // This leads to a bug: since Vk is no longer live on entry, the
181 // register allocator sees Vk and an argument Vj as having
182 // disjoint lifetimes, and allocates them to the same register.
183 // But Vk is still marked "must-init", and this initialization (of
184 // the register) trashes the value in Vj.
186 // Therefore, initialize must-init to false for all variables in
187 // each liveness phase.
188 for (unsigned lclNum = 0; lclNum < lvaCount; ++lclNum)
190 lvaTable[lclNum].lvMustInit = false;
194 //------------------------------------------------------------------------
195 // fgPerNodeLocalVarLiveness:
196 // Set fgCurMemoryUse and fgCurMemoryDef when memory is read or updated
197 // Call fgMarkUseDef for any Local variables encountered
200 // tree - The current node.
202 void Compiler::fgPerNodeLocalVarLiveness(GenTree* tree)
204 assert(tree != nullptr);
206 switch (tree->gtOper)
210 // We never should encounter a GT_QMARK or GT_COLON node
211 noway_assert(!"unexpected GT_QMARK/GT_COLON");
216 case GT_LCL_VAR_ADDR:
217 case GT_LCL_FLD_ADDR:
218 case GT_STORE_LCL_VAR:
219 case GT_STORE_LCL_FLD:
220 fgMarkUseDef(tree->AsLclVarCommon());
224 // For Volatile indirection, first mutate GcHeap/ByrefExposed.
225 // See comments in ValueNum.cpp (under case GT_CLS_VAR)
226 // This models Volatile reads as def-then-use of memory
227 // and allows for a CSE of a subsequent non-volatile read.
228 if ((tree->gtFlags & GTF_FLD_VOLATILE) != 0)
230 // For any Volatile indirection, we must handle it as a
231 // definition of GcHeap/ByrefExposed
232 fgCurMemoryDef |= memoryKindSet(GcHeap, ByrefExposed);
234 // If the GT_CLS_VAR is the lhs of an assignment, we'll handle it as a GcHeap/ByrefExposed def, when we get
235 // to the assignment.
236 // Otherwise, we treat it as a use here.
237 if ((tree->gtFlags & GTF_CLS_VAR_ASG_LHS) == 0)
239 fgCurMemoryUse |= memoryKindSet(GcHeap, ByrefExposed);
244 // For Volatile indirection, first mutate GcHeap/ByrefExposed
245 // see comments in ValueNum.cpp (under case GT_CLS_VAR)
246 // This models Volatile reads as def-then-use of memory.
247 // and allows for a CSE of a subsequent non-volatile read
248 if ((tree->gtFlags & GTF_IND_VOLATILE) != 0)
250 // For any Volatile indirection, we must handle it as a
251 // definition of the GcHeap/ByrefExposed
252 fgCurMemoryDef |= memoryKindSet(GcHeap, ByrefExposed);
255 // If the GT_IND is the lhs of an assignment, we'll handle it
256 // as a memory def, when we get to assignment.
257 // Otherwise, we treat it as a use here.
258 if ((tree->gtFlags & GTF_IND_ASG_LHS) == 0)
260 GenTreeLclVarCommon* dummyLclVarTree = nullptr;
261 bool dummyIsEntire = false;
262 GenTree* addrArg = tree->gtOp.gtOp1->gtEffectiveVal(/*commaOnly*/ true);
263 if (!addrArg->DefinesLocalAddr(this, /*width doesn't matter*/ 0, &dummyLclVarTree, &dummyIsEntire))
265 fgCurMemoryUse |= memoryKindSet(GcHeap, ByrefExposed);
269 // Defines a local addr
270 assert(dummyLclVarTree != nullptr);
271 fgMarkUseDef(dummyLclVarTree->AsLclVarCommon());
276 // These should have been morphed away to become GT_INDs:
282 // We'll assume these are use-then-defs of memory.
287 fgCurMemoryUse |= memoryKindSet(GcHeap, ByrefExposed);
288 fgCurMemoryDef |= memoryKindSet(GcHeap, ByrefExposed);
289 fgCurMemoryHavoc |= memoryKindSet(GcHeap, ByrefExposed);
292 case GT_MEMORYBARRIER:
293 // Simliar to any Volatile indirection, we must handle this as a definition of GcHeap/ByrefExposed
294 fgCurMemoryDef |= memoryKindSet(GcHeap, ByrefExposed);
297 #ifdef FEATURE_HW_INTRINSICS
300 GenTreeHWIntrinsic* hwIntrinsicNode = tree->AsHWIntrinsic();
302 // We can't call fgMutateGcHeap unless the block has recorded a MemoryDef
304 if (hwIntrinsicNode->OperIsMemoryStore())
306 // We currently handle this like a Volatile store, so it counts as a definition of GcHeap/ByrefExposed
307 fgCurMemoryDef |= memoryKindSet(GcHeap, ByrefExposed);
309 if (hwIntrinsicNode->OperIsMemoryLoad())
311 // This instruction loads from memory and we need to record this information
312 fgCurMemoryUse |= memoryKindSet(GcHeap, ByrefExposed);
318 // For now, all calls read/write GcHeap/ByrefExposed, writes in their entirety. Might tighten this case later.
321 GenTreeCall* call = tree->AsCall();
323 if (call->gtCallType == CT_HELPER)
325 CorInfoHelpFunc helpFunc = eeGetHelperNum(call->gtCallMethHnd);
327 if (!s_helperCallProperties.MutatesHeap(helpFunc) && !s_helperCallProperties.MayRunCctor(helpFunc))
334 fgCurMemoryUse |= memoryKindSet(GcHeap, ByrefExposed);
335 fgCurMemoryDef |= memoryKindSet(GcHeap, ByrefExposed);
336 fgCurMemoryHavoc |= memoryKindSet(GcHeap, ByrefExposed);
340 // If this is a p/invoke unmanaged call or if this is a tail-call
341 // and we have an unmanaged p/invoke call in the method,
342 // then we're going to run the p/invoke epilog.
343 // So we mark the FrameRoot as used by this instruction.
344 // This ensures that the block->bbVarUse will contain
345 // the FrameRoot local var if is it a tracked variable.
347 if ((tree->gtCall.IsUnmanaged() || (tree->gtCall.IsTailCall() && info.compCallUnmanaged)))
349 assert((!opts.ShouldUsePInvokeHelpers()) || (info.compLvFrameListRoot == BAD_VAR_NUM));
350 if (!opts.ShouldUsePInvokeHelpers())
352 /* Get the TCB local and mark it as used */
354 noway_assert(info.compLvFrameListRoot < lvaCount);
356 LclVarDsc* varDsc = &lvaTable[info.compLvFrameListRoot];
358 if (varDsc->lvTracked)
360 if (!VarSetOps::IsMember(this, fgCurDefSet, varDsc->lvVarIndex))
362 VarSetOps::AddElemD(this, fgCurUseSet, varDsc->lvVarIndex);
372 // Determine what memory locations it defines.
373 if (tree->OperIs(GT_ASG) || tree->OperIsBlkOp())
375 GenTreeLclVarCommon* dummyLclVarTree = nullptr;
376 if (tree->DefinesLocal(this, &dummyLclVarTree))
378 if (lvaVarAddrExposed(dummyLclVarTree->gtLclNum))
380 fgCurMemoryDef |= memoryKindSet(ByrefExposed);
382 // We've found a store that modifies ByrefExposed
383 // memory but not GcHeap memory, so track their
384 // states separately.
385 byrefStatesMatchGcHeapStates = false;
390 // If it doesn't define a local, then it might update GcHeap/ByrefExposed.
391 fgCurMemoryDef |= memoryKindSet(GcHeap, ByrefExposed);
398 /*****************************************************************************/
399 void Compiler::fgPerBlockLocalVarLiveness()
404 printf("*************** In fgPerBlockLocalVarLiveness()\n");
408 unsigned livenessVarEpoch = GetCurLVEpoch();
412 // If we don't require accurate local var lifetimes, things are simple.
413 if (!backendRequiresLocalVarLifetimes())
418 VARSET_TP liveAll(VarSetOps::MakeEmpty(this));
420 /* We simply make everything live everywhere */
422 for (lclNum = 0, varDsc = lvaTable; lclNum < lvaCount; lclNum++, varDsc++)
424 if (varDsc->lvTracked)
426 VarSetOps::AddElemD(this, liveAll, varDsc->lvVarIndex);
430 for (block = fgFirstBB; block; block = block->bbNext)
432 // Strictly speaking, the assignments for the "Def" cases aren't necessary here.
433 // The empty set would do as well. Use means "use-before-def", so as long as that's
434 // "all", this has the right effect.
435 VarSetOps::Assign(this, block->bbVarUse, liveAll);
436 VarSetOps::Assign(this, block->bbVarDef, liveAll);
437 VarSetOps::Assign(this, block->bbLiveIn, liveAll);
438 block->bbMemoryUse = fullMemoryKindSet;
439 block->bbMemoryDef = fullMemoryKindSet;
440 block->bbMemoryLiveIn = fullMemoryKindSet;
441 block->bbMemoryLiveOut = fullMemoryKindSet;
443 switch (block->bbJumpKind)
445 case BBJ_EHFINALLYRET:
448 VarSetOps::AssignNoCopy(this, block->bbLiveOut, VarSetOps::MakeEmpty(this));
451 VarSetOps::Assign(this, block->bbLiveOut, liveAll);
456 // In minopts, we don't explicitly build SSA or value-number; GcHeap and
457 // ByrefExposed implicitly (conservatively) change state at each instr.
458 byrefStatesMatchGcHeapStates = true;
463 // Avoid allocations in the long case.
464 VarSetOps::AssignNoCopy(this, fgCurUseSet, VarSetOps::MakeEmpty(this));
465 VarSetOps::AssignNoCopy(this, fgCurDefSet, VarSetOps::MakeEmpty(this));
467 // GC Heap and ByrefExposed can share states unless we see a def of byref-exposed
468 // memory that is not a GC Heap def.
469 byrefStatesMatchGcHeapStates = true;
471 for (block = fgFirstBB; block; block = block->bbNext)
473 VarSetOps::ClearD(this, fgCurUseSet);
474 VarSetOps::ClearD(this, fgCurDefSet);
476 fgCurMemoryUse = emptyMemoryKindSet;
477 fgCurMemoryDef = emptyMemoryKindSet;
478 fgCurMemoryHavoc = emptyMemoryKindSet;
483 for (GenTree* node : LIR::AsRange(block).NonPhiNodes())
485 fgPerNodeLocalVarLiveness(node);
490 for (GenTreeStmt* stmt = block->FirstNonPhiDef(); stmt; stmt = stmt->gtNextStmt)
493 for (GenTree* node = stmt->gtStmtList; node != nullptr; node = node->gtNext)
495 fgPerNodeLocalVarLiveness(node);
500 /* Get the TCB local and mark it as used */
502 if (block->bbJumpKind == BBJ_RETURN && info.compCallUnmanaged)
504 assert((!opts.ShouldUsePInvokeHelpers()) || (info.compLvFrameListRoot == BAD_VAR_NUM));
505 if (!opts.ShouldUsePInvokeHelpers())
507 noway_assert(info.compLvFrameListRoot < lvaCount);
509 LclVarDsc* varDsc = &lvaTable[info.compLvFrameListRoot];
511 if (varDsc->lvTracked)
513 if (!VarSetOps::IsMember(this, fgCurDefSet, varDsc->lvVarIndex))
515 VarSetOps::AddElemD(this, fgCurUseSet, varDsc->lvVarIndex);
524 VARSET_TP allVars(VarSetOps::Union(this, fgCurUseSet, fgCurDefSet));
525 printf(FMT_BB, block->bbNum);
526 printf(" USE(%d)=", VarSetOps::Count(this, fgCurUseSet));
527 lvaDispVarSet(fgCurUseSet, allVars);
528 for (MemoryKind memoryKind : allMemoryKinds())
530 if ((fgCurMemoryUse & memoryKindSet(memoryKind)) != 0)
532 printf(" + %s", memoryKindNames[memoryKind]);
535 printf("\n DEF(%d)=", VarSetOps::Count(this, fgCurDefSet));
536 lvaDispVarSet(fgCurDefSet, allVars);
537 for (MemoryKind memoryKind : allMemoryKinds())
539 if ((fgCurMemoryDef & memoryKindSet(memoryKind)) != 0)
541 printf(" + %s", memoryKindNames[memoryKind]);
543 if ((fgCurMemoryHavoc & memoryKindSet(memoryKind)) != 0)
552 VarSetOps::Assign(this, block->bbVarUse, fgCurUseSet);
553 VarSetOps::Assign(this, block->bbVarDef, fgCurDefSet);
554 block->bbMemoryUse = fgCurMemoryUse;
555 block->bbMemoryDef = fgCurMemoryDef;
556 block->bbMemoryHavoc = fgCurMemoryHavoc;
558 /* also initialize the IN set, just in case we will do multiple DFAs */
560 VarSetOps::AssignNoCopy(this, block->bbLiveIn, VarSetOps::MakeEmpty(this));
561 block->bbMemoryLiveIn = emptyMemoryKindSet;
564 noway_assert(livenessVarEpoch == GetCurLVEpoch());
568 printf("** Memory liveness computed, GcHeap states and ByrefExposed states %s\n",
569 (byrefStatesMatchGcHeapStates ? "match" : "diverge"));
574 // Helper functions to mark variables live over their entire scope
576 void Compiler::fgBeginScopeLife(VARSET_TP* inScope, VarScopeDsc* var)
580 LclVarDsc* lclVarDsc1 = &lvaTable[var->vsdVarNum];
582 if (lclVarDsc1->lvTracked)
584 VarSetOps::AddElemD(this, *inScope, lclVarDsc1->lvVarIndex);
588 void Compiler::fgEndScopeLife(VARSET_TP* inScope, VarScopeDsc* var)
592 LclVarDsc* lclVarDsc1 = &lvaTable[var->vsdVarNum];
594 if (lclVarDsc1->lvTracked)
596 VarSetOps::RemoveElemD(this, *inScope, lclVarDsc1->lvVarIndex);
600 /*****************************************************************************/
602 void Compiler::fgMarkInScope(BasicBlock* block, VARSET_VALARG_TP inScope)
607 printf("Scope info: block " FMT_BB " marking in scope: ", block->bbNum);
608 dumpConvertedVarSet(this, inScope);
613 /* Record which vars are artifically kept alive for debugging */
615 VarSetOps::Assign(this, block->bbScope, inScope);
617 /* Being in scope implies a use of the variable. Add the var to bbVarUse
618 so that redoing fgLiveVarAnalysis() will work correctly */
620 VarSetOps::UnionD(this, block->bbVarUse, inScope);
622 /* Artifically mark all vars in scope as alive */
624 VarSetOps::UnionD(this, block->bbLiveIn, inScope);
625 VarSetOps::UnionD(this, block->bbLiveOut, inScope);
628 void Compiler::fgUnmarkInScope(BasicBlock* block, VARSET_VALARG_TP unmarkScope)
633 printf("Scope info: block " FMT_BB " UNmarking in scope: ", block->bbNum);
634 dumpConvertedVarSet(this, unmarkScope);
639 assert(VarSetOps::IsSubset(this, unmarkScope, block->bbScope));
641 VarSetOps::DiffD(this, block->bbScope, unmarkScope);
642 VarSetOps::DiffD(this, block->bbVarUse, unmarkScope);
643 VarSetOps::DiffD(this, block->bbLiveIn, unmarkScope);
644 VarSetOps::DiffD(this, block->bbLiveOut, unmarkScope);
649 void Compiler::fgDispDebugScopes()
651 printf("\nDebug scopes:\n");
654 for (block = fgFirstBB; block; block = block->bbNext)
656 printf(FMT_BB ": ", block->bbNum);
657 dumpConvertedVarSet(this, block->bbScope);
664 /*****************************************************************************
666 * Mark variables live across their entire scope.
669 #if FEATURE_EH_FUNCLETS
671 void Compiler::fgExtendDbgScopes()
673 compResetScopeLists();
678 printf("\nMarking vars alive over their entire scope :\n\n");
683 compDispScopeLists();
687 VARSET_TP inScope(VarSetOps::MakeEmpty(this));
689 // Mark all tracked LocalVars live over their scope - walk the blocks
690 // keeping track of the current life, and assign it to the blocks.
692 for (BasicBlock* block = fgFirstBB; block; block = block->bbNext)
694 // If we get to a funclet, reset the scope lists and start again, since the block
695 // offsets will be out of order compared to the previous block.
697 if (block->bbFlags & BBF_FUNCLET_BEG)
699 compResetScopeLists();
700 VarSetOps::ClearD(this, inScope);
703 // Process all scopes up to the current offset
705 if (block->bbCodeOffs != BAD_IL_OFFSET)
707 compProcessScopesUntil(block->bbCodeOffs, &inScope, &Compiler::fgBeginScopeLife, &Compiler::fgEndScopeLife);
710 // Assign the current set of variables that are in scope to the block variables tracking this.
712 fgMarkInScope(block, inScope);
723 #else // !FEATURE_EH_FUNCLETS
725 void Compiler::fgExtendDbgScopes()
727 compResetScopeLists();
732 printf("\nMarking vars alive over their entire scope :\n\n");
733 compDispScopeLists();
737 VARSET_TP inScope(VarSetOps::MakeEmpty(this));
738 compProcessScopesUntil(0, &inScope, &Compiler::fgBeginScopeLife, &Compiler::fgEndScopeLife);
740 IL_OFFSET lastEndOffs = 0;
742 // Mark all tracked LocalVars live over their scope - walk the blocks
743 // keeping track of the current life, and assign it to the blocks.
746 for (block = fgFirstBB; block; block = block->bbNext)
748 // Find scopes becoming alive. If there is a gap in the instr
749 // sequence, we need to process any scopes on those missing offsets.
751 if (block->bbCodeOffs != BAD_IL_OFFSET)
753 if (lastEndOffs != block->bbCodeOffs)
755 noway_assert(lastEndOffs < block->bbCodeOffs);
757 compProcessScopesUntil(block->bbCodeOffs, &inScope, &Compiler::fgBeginScopeLife,
758 &Compiler::fgEndScopeLife);
762 while (VarScopeDsc* varScope = compGetNextEnterScope(block->bbCodeOffs))
764 fgBeginScopeLife(&inScope, varScope);
769 // Assign the current set of variables that are in scope to the block variables tracking this.
771 fgMarkInScope(block, inScope);
773 // Find scopes going dead.
775 if (block->bbCodeOffsEnd != BAD_IL_OFFSET)
777 VarScopeDsc* varScope;
778 while ((varScope = compGetNextExitScope(block->bbCodeOffsEnd)) != nullptr)
780 fgEndScopeLife(&inScope, varScope);
783 lastEndOffs = block->bbCodeOffsEnd;
787 /* Everything should be out of scope by the end of the method. But if the
788 last BB got removed, then inScope may not be empty. */
790 noway_assert(VarSetOps::IsEmpty(this, inScope) || lastEndOffs < info.compILCodeSize);
793 #endif // !FEATURE_EH_FUNCLETS
795 /*****************************************************************************
797 * For debuggable code, we allow redundant assignments to vars
798 * by marking them live over their entire scope.
801 void Compiler::fgExtendDbgLifetimes()
806 printf("*************** In fgExtendDbgLifetimes()\n");
810 noway_assert(opts.compDbgCode && (info.compVarScopesCount > 0));
812 /*-------------------------------------------------------------------------
813 * Extend the lifetimes over the entire reported scope of the variable.
818 /*-------------------------------------------------------------------------
819 * Partly update liveness info so that we handle any funky BBF_INTERNAL
820 * blocks inserted out of sequence.
830 fgLiveVarAnalysis(true);
832 /* For compDbgCode, we prepend an empty BB which will hold the
833 initializations of variables which are in scope at IL offset 0 (but
834 not initialized by the IL code). Since they will currently be
835 marked as live on entry to fgFirstBB, unmark the liveness so that
836 the following code will know to add the initializations. */
838 assert(fgFirstBBisScratch());
840 VARSET_TP trackedArgs(VarSetOps::MakeEmpty(this));
842 for (unsigned argNum = 0; argNum < info.compArgsCount; argNum++)
844 LclVarDsc* argDsc = lvaTable + argNum;
845 if (argDsc->lvPromoted)
847 lvaPromotionType promotionType = lvaGetPromotionType(argDsc);
849 if (promotionType == PROMOTION_TYPE_INDEPENDENT)
851 noway_assert(argDsc->lvFieldCnt == 1); // We only handle one field here
853 unsigned fieldVarNum = argDsc->lvFieldLclStart;
854 argDsc = lvaTable + fieldVarNum;
857 noway_assert(argDsc->lvIsParam);
858 if (argDsc->lvTracked)
860 noway_assert(!VarSetOps::IsMember(this, trackedArgs, argDsc->lvVarIndex)); // Each arg should define a
862 VarSetOps::AddElemD(this, trackedArgs, argDsc->lvVarIndex);
866 // Don't unmark struct locals, either.
867 VARSET_TP noUnmarkVars(trackedArgs);
869 for (unsigned i = 0; i < lvaCount; i++)
871 LclVarDsc* varDsc = &lvaTable[i];
872 if (varTypeIsStruct(varDsc) && varDsc->lvTracked)
874 VarSetOps::AddElemD(this, noUnmarkVars, varDsc->lvVarIndex);
877 fgUnmarkInScope(fgFirstBB, VarSetOps::Diff(this, fgFirstBB->bbScope, noUnmarkVars));
879 /*-------------------------------------------------------------------------
880 * As we keep variables artifically alive over their entire scope,
881 * we need to also artificially initialize them if the scope does
882 * not exactly match the real lifetimes, or they will contain
883 * garbage until they are initialized by the IL code.
886 VARSET_TP initVars(VarSetOps::MakeEmpty(this)); // Vars which are artificially made alive
888 for (BasicBlock* block = fgFirstBB; block; block = block->bbNext)
890 VarSetOps::ClearD(this, initVars);
892 switch (block->bbJumpKind)
895 PREFIX_ASSUME(block->bbNext != nullptr);
896 VarSetOps::UnionD(this, initVars, block->bbNext->bbScope);
901 case BBJ_EHFILTERRET:
902 VarSetOps::UnionD(this, initVars, block->bbJumpDest->bbScope);
905 case BBJ_CALLFINALLY:
906 if (!(block->bbFlags & BBF_RETLESS_CALL))
908 assert(block->isBBCallAlwaysPair());
909 PREFIX_ASSUME(block->bbNext != nullptr);
910 VarSetOps::UnionD(this, initVars, block->bbNext->bbScope);
912 VarSetOps::UnionD(this, initVars, block->bbJumpDest->bbScope);
916 PREFIX_ASSUME(block->bbNext != nullptr);
917 VarSetOps::UnionD(this, initVars, block->bbNext->bbScope);
918 VarSetOps::UnionD(this, initVars, block->bbJumpDest->bbScope);
926 jmpCnt = block->bbJumpSwt->bbsCount;
927 jmpTab = block->bbJumpSwt->bbsDstTab;
931 VarSetOps::UnionD(this, initVars, (*jmpTab)->bbScope);
932 } while (++jmpTab, --jmpCnt);
936 case BBJ_EHFINALLYRET:
941 /* We don't have to do anything as we mark
942 * all vars live on entry to a catch handler as
948 noway_assert(!"Unexpected bbJumpKind");
952 /* If the var is already live on entry to the current BB,
953 we would have already initialized it. So ignore bbLiveIn */
955 VarSetOps::DiffD(this, initVars, block->bbLiveIn);
957 /* Add statements initializing the vars, if there are any to initialize */
958 unsigned blockWeight = block->getBBWeight(this);
960 VarSetOps::Iter iter(this, initVars);
961 unsigned varIndex = 0;
962 while (iter.NextElem(&varIndex))
964 /* Create initialization tree */
966 unsigned varNum = lvaTrackedToVarNum[varIndex];
967 LclVarDsc* varDsc = &lvaTable[varNum];
968 var_types type = varDsc->TypeGet();
970 // Don't extend struct lifetimes -- they aren't enregistered, anyway.
971 if (type == TYP_STRUCT)
976 // If we haven't already done this ...
977 if (!fgLocalVarLivenessDone)
979 // Create a "zero" node
980 GenTree* zero = gtNewZeroConNode(genActualType(type));
982 // Create initialization node
985 GenTree* varNode = gtNewLclvNode(varNum, type);
986 GenTree* initNode = gtNewAssignNode(varNode, zero);
988 // Create a statement for the initializer, sequence it, and append it to the current BB.
989 GenTree* initStmt = gtNewStmt(initNode);
990 gtSetStmtInfo(initStmt);
991 fgSetStmtSeq(initStmt);
992 fgInsertStmtNearEnd(block, initStmt);
996 GenTree* store = new (this, GT_STORE_LCL_VAR) GenTreeLclVar(GT_STORE_LCL_VAR, type, varNum);
997 store->gtOp.gtOp1 = zero;
998 store->gtFlags |= (GTF_VAR_DEF | GTF_ASG);
1000 LIR::Range initRange = LIR::EmptyRange();
1001 initRange.InsertBefore(nullptr, zero, store);
1003 #if !defined(_TARGET_64BIT_)
1004 DecomposeLongs::DecomposeRange(this, blockWeight, initRange);
1005 #endif // !defined(_TARGET_64BIT_)
1006 m_pLowering->LowerRange(block, initRange);
1008 // Naively inserting the initializer at the end of the block may add code after the block's
1009 // terminator, in which case the inserted code will never be executed (and the IR for the
1010 // block will be invalid). Use `LIR::InsertBeforeTerminator` to avoid this problem.
1011 LIR::InsertBeforeTerminator(block, std::move(initRange));
1017 printf("Created zero-init of V%02u in " FMT_BB "\n", varNum, block->bbNum);
1020 block->bbFlags |= BBF_CHANGED; // indicates that the contents of the block have changed.
1023 /* Update liveness information so that redoing fgLiveVarAnalysis()
1024 will work correctly if needed */
1026 VarSetOps::AddElemD(this, block->bbVarDef, varIndex);
1027 VarSetOps::AddElemD(this, block->bbLiveOut, varIndex);
1031 // raMarkStkVars() reserves stack space for unused variables (which
1032 // needs to be initialized). However, arguments don't need to be initialized.
1033 // So just ensure that they don't have a 0 ref cnt
1035 unsigned lclNum = 0;
1036 for (LclVarDsc *varDsc = lvaTable; lclNum < lvaCount; lclNum++, varDsc++)
1038 if (lclNum >= info.compArgsCount)
1040 break; // early exit for loop
1043 if (varDsc->lvIsRegArg)
1045 varDsc->lvImplicitlyReferenced = true;
1052 printf("\nBB liveness after fgExtendDbgLifetimes():\n\n");
1059 //------------------------------------------------------------------------
1060 // fgGetHandlerLiveVars: determine set of locals live because of implicit
1061 // exception flow from a block.
1064 // block - the block in question
1067 // Additional set of locals to be considered live throughout the block.
1070 // Assumes caller has screened candidate blocks to only those with
1071 // exception flow, via `ehBlockHasExnFlowDsc`.
1073 // Exception flow can arise because of a newly raised exception (for
1074 // blocks within try regions) or because of an actively propagating exception
1075 // (for filter blocks). This flow effectively creates additional successor
1076 // edges in the flow graph that the jit does not model. This method computes
1077 // the net contribution from all the missing successor edges.
1079 // For example, with the following C# source, during EH processing of the throw,
1080 // the outer filter will execute in pass1, before the inner handler executes
1081 // in pass2, and so the filter blocks should show the inner handler's local is live.
1085 // using (AllocateObject()) // ==> try-finally; handler calls Dispose
1087 // throw new Exception();
1090 // catch (Exception e1) when (IsExpectedException(e1))
1092 // Console.WriteLine("In catch 1");
1095 VARSET_VALRET_TP Compiler::fgGetHandlerLiveVars(BasicBlock* block)
1097 noway_assert(block);
1098 noway_assert(ehBlockHasExnFlowDsc(block));
1100 VARSET_TP liveVars(VarSetOps::MakeEmpty(this));
1101 EHblkDsc* HBtab = ehGetBlockExnFlowDsc(block);
1105 /* Either we enter the filter first or the catch/finally */
1106 if (HBtab->HasFilter())
1108 VarSetOps::UnionD(this, liveVars, HBtab->ebdFilter->bbLiveIn);
1109 #if FEATURE_EH_FUNCLETS
1110 // The EH subsystem can trigger a stack walk after the filter
1111 // has returned, but before invoking the handler, and the only
1112 // IP address reported from this method will be the original
1113 // faulting instruction, thus everything in the try body
1114 // must report as live any variables live-out of the filter
1115 // (which is the same as those live-in to the handler)
1116 VarSetOps::UnionD(this, liveVars, HBtab->ebdHndBeg->bbLiveIn);
1117 #endif // FEATURE_EH_FUNCLETS
1121 VarSetOps::UnionD(this, liveVars, HBtab->ebdHndBeg->bbLiveIn);
1124 /* If we have nested try's edbEnclosing will provide them */
1125 noway_assert((HBtab->ebdEnclosingTryIndex == EHblkDsc::NO_ENCLOSING_INDEX) ||
1126 (HBtab->ebdEnclosingTryIndex > ehGetIndex(HBtab)));
1128 unsigned outerIndex = HBtab->ebdEnclosingTryIndex;
1129 if (outerIndex == EHblkDsc::NO_ENCLOSING_INDEX)
1133 HBtab = ehGetDsc(outerIndex);
1137 // If this block is within a filter, we also need to report as live
1138 // any vars live into enclosed finally or fault handlers, since the
1139 // filter will run during the first EH pass, and enclosed or enclosing
1140 // handlers will run during the second EH pass. So all these handlers
1141 // are "exception flow" successors of the filter.
1143 // Note we are relying on ehBlockHasExnFlowDsc to return true
1144 // for any filter block that we should examine here.
1145 if (block->hasHndIndex())
1147 const unsigned thisHndIndex = block->getHndIndex();
1148 EHblkDsc* enclosingHBtab = ehGetDsc(thisHndIndex);
1150 if (enclosingHBtab->InFilterRegionBBRange(block))
1152 assert(enclosingHBtab->HasFilter());
1154 // Search the EH table for enclosed regions.
1156 // All the enclosed regions will be lower numbered and
1157 // immediately prior to and contiguous with the enclosing
1158 // region in the EH tab.
1159 unsigned index = thisHndIndex;
1164 unsigned enclosingIndex = ehGetEnclosingTryIndex(index);
1165 bool isEnclosed = false;
1167 // To verify this is an enclosed region, search up
1168 // through the enclosing regions until we find the
1169 // region associated with the filter.
1170 while (enclosingIndex != EHblkDsc::NO_ENCLOSING_INDEX)
1172 if (enclosingIndex == thisHndIndex)
1178 enclosingIndex = ehGetEnclosingTryIndex(enclosingIndex);
1181 // If we found an enclosed region, check if the region
1182 // is a try fault or try finally, and if so, add any
1183 // locals live into the enclosed region's handler into this
1184 // block's live-in set.
1187 EHblkDsc* enclosedHBtab = ehGetDsc(index);
1189 if (enclosedHBtab->HasFinallyOrFaultHandler())
1191 VarSetOps::UnionD(this, liveVars, enclosedHBtab->ebdHndBeg->bbLiveIn);
1194 // Once we run across a non-enclosed region, we can stop searching.
1206 class LiveVarAnalysis
1208 Compiler* m_compiler;
1210 bool m_hasPossibleBackEdge;
1212 unsigned m_memoryLiveIn;
1213 unsigned m_memoryLiveOut;
1215 VARSET_TP m_liveOut;
1217 LiveVarAnalysis(Compiler* compiler)
1218 : m_compiler(compiler)
1219 , m_hasPossibleBackEdge(false)
1220 , m_memoryLiveIn(emptyMemoryKindSet)
1221 , m_memoryLiveOut(emptyMemoryKindSet)
1222 , m_liveIn(VarSetOps::MakeEmpty(compiler))
1223 , m_liveOut(VarSetOps::MakeEmpty(compiler))
1227 bool PerBlockAnalysis(BasicBlock* block, bool updateInternalOnly, bool keepAliveThis)
1229 /* Compute the 'liveOut' set */
1230 VarSetOps::ClearD(m_compiler, m_liveOut);
1231 m_memoryLiveOut = emptyMemoryKindSet;
1232 if (block->endsWithJmpMethod(m_compiler))
1234 // A JMP uses all the arguments, so mark them all
1235 // as live at the JMP instruction
1237 const LclVarDsc* varDscEndParams = m_compiler->lvaTable + m_compiler->info.compArgsCount;
1238 for (LclVarDsc* varDsc = m_compiler->lvaTable; varDsc < varDscEndParams; varDsc++)
1240 noway_assert(!varDsc->lvPromoted);
1241 if (varDsc->lvTracked)
1243 VarSetOps::AddElemD(m_compiler, m_liveOut, varDsc->lvVarIndex);
1248 // Additionally, union in all the live-in tracked vars of successors.
1249 for (BasicBlock* succ : block->GetAllSuccs(m_compiler))
1251 VarSetOps::UnionD(m_compiler, m_liveOut, succ->bbLiveIn);
1252 m_memoryLiveOut |= succ->bbMemoryLiveIn;
1253 if (succ->bbNum <= block->bbNum)
1255 m_hasPossibleBackEdge = true;
1259 /* For lvaKeepAliveAndReportThis methods, "this" has to be kept alive everywhere
1260 Note that a function may end in a throw on an infinite loop (as opposed to a return).
1261 "this" has to be alive everywhere even in such methods. */
1265 VarSetOps::AddElemD(m_compiler, m_liveOut, m_compiler->lvaTable[m_compiler->info.compThisArg].lvVarIndex);
1268 /* Compute the 'm_liveIn' set */
1269 VarSetOps::LivenessD(m_compiler, m_liveIn, block->bbVarDef, block->bbVarUse, m_liveOut);
1271 // Even if block->bbMemoryDef is set, we must assume that it doesn't kill memory liveness from m_memoryLiveOut,
1272 // since (without proof otherwise) the use and def may touch different memory at run-time.
1273 m_memoryLiveIn = m_memoryLiveOut | block->bbMemoryUse;
1275 // Does this block have implicit exception flow to a filter or handler?
1276 // If so, include the effects of that flow.
1277 if (m_compiler->ehBlockHasExnFlowDsc(block))
1279 const VARSET_TP& liveVars(m_compiler->fgGetHandlerLiveVars(block));
1280 VarSetOps::UnionD(m_compiler, m_liveIn, liveVars);
1281 VarSetOps::UnionD(m_compiler, m_liveOut, liveVars);
1283 // Implicit eh edges can induce loop-like behavior,
1284 // so make sure we iterate to closure.
1285 m_hasPossibleBackEdge = true;
1288 /* Has there been any change in either live set? */
1290 bool liveInChanged = !VarSetOps::Equal(m_compiler, block->bbLiveIn, m_liveIn);
1291 if (liveInChanged || !VarSetOps::Equal(m_compiler, block->bbLiveOut, m_liveOut))
1293 if (updateInternalOnly)
1295 // Only "extend" liveness over BBF_INTERNAL blocks
1297 noway_assert(block->bbFlags & BBF_INTERNAL);
1299 liveInChanged = !VarSetOps::IsSubset(m_compiler, m_liveIn, block->bbLiveIn);
1300 if (liveInChanged || !VarSetOps::IsSubset(m_compiler, m_liveOut, block->bbLiveOut))
1303 if (m_compiler->verbose)
1305 printf("Scope info: block " FMT_BB " LiveIn+ ", block->bbNum);
1306 dumpConvertedVarSet(m_compiler, VarSetOps::Diff(m_compiler, m_liveIn, block->bbLiveIn));
1307 printf(", LiveOut+ ");
1308 dumpConvertedVarSet(m_compiler, VarSetOps::Diff(m_compiler, m_liveOut, block->bbLiveOut));
1313 VarSetOps::UnionD(m_compiler, block->bbLiveIn, m_liveIn);
1314 VarSetOps::UnionD(m_compiler, block->bbLiveOut, m_liveOut);
1319 VarSetOps::Assign(m_compiler, block->bbLiveIn, m_liveIn);
1320 VarSetOps::Assign(m_compiler, block->bbLiveOut, m_liveOut);
1324 const bool memoryLiveInChanged = (block->bbMemoryLiveIn != m_memoryLiveIn);
1325 if (memoryLiveInChanged || (block->bbMemoryLiveOut != m_memoryLiveOut))
1327 block->bbMemoryLiveIn = m_memoryLiveIn;
1328 block->bbMemoryLiveOut = m_memoryLiveOut;
1331 return liveInChanged || memoryLiveInChanged;
1334 void Run(bool updateInternalOnly)
1336 const bool keepAliveThis =
1337 m_compiler->lvaKeepAliveAndReportThis() && m_compiler->lvaTable[m_compiler->info.compThisArg].lvTracked;
1339 /* Live Variable Analysis - Backward dataflow */
1345 /* Visit all blocks and compute new data flow values */
1347 VarSetOps::ClearD(m_compiler, m_liveIn);
1348 VarSetOps::ClearD(m_compiler, m_liveOut);
1350 m_memoryLiveIn = emptyMemoryKindSet;
1351 m_memoryLiveOut = emptyMemoryKindSet;
1353 for (BasicBlock* block = m_compiler->fgLastBB; block; block = block->bbPrev)
1355 // sometimes block numbers are not monotonically increasing which
1356 // would cause us not to identify backedges
1357 if (block->bbNext && block->bbNext->bbNum <= block->bbNum)
1359 m_hasPossibleBackEdge = true;
1362 if (updateInternalOnly)
1364 /* Only update BBF_INTERNAL blocks as they may be
1365 syntactically out of sequence. */
1367 noway_assert(m_compiler->opts.compDbgCode && (m_compiler->info.compVarScopesCount > 0));
1369 if (!(block->bbFlags & BBF_INTERNAL))
1375 if (PerBlockAnalysis(block, updateInternalOnly, keepAliveThis))
1380 // if there is no way we could have processed a block without seeing all of its predecessors
1381 // then there is no need to iterate
1382 if (!m_hasPossibleBackEdge)
1390 static void Run(Compiler* compiler, bool updateInternalOnly)
1392 LiveVarAnalysis analysis(compiler);
1393 analysis.Run(updateInternalOnly);
1397 /*****************************************************************************
1399 * This is the classic algorithm for Live Variable Analysis.
1400 * If updateInternalOnly==true, only update BBF_INTERNAL blocks.
1403 void Compiler::fgLiveVarAnalysis(bool updateInternalOnly)
1405 if (!backendRequiresLocalVarLifetimes())
1410 LiveVarAnalysis::Run(this, updateInternalOnly);
1413 if (verbose && !updateInternalOnly)
1415 printf("\nBB liveness after fgLiveVarAnalysis():\n\n");
1421 /*****************************************************************************
1422 * For updating liveset during traversal AFTER fgComputeLife has completed
1425 VARSET_VALRET_TP Compiler::fgUpdateLiveSet(VARSET_VALARG_TP liveSet, GenTree* tree)
1427 VARSET_TP newLiveSet(VarSetOps::MakeCopy(this, liveSet));
1428 assert(fgLocalVarLivenessDone == true);
1429 GenTree* lclVarTree = tree; // After the tests below, "lclVarTree" will be the local variable.
1430 if (tree->gtOper == GT_LCL_VAR || tree->gtOper == GT_LCL_FLD ||
1431 (lclVarTree = fgIsIndirOfAddrOfLocal(tree)) != nullptr)
1433 const VARSET_TP& varBits(fgGetVarBits(lclVarTree));
1435 if (!VarSetOps::IsEmpty(this, varBits))
1437 if (tree->gtFlags & GTF_VAR_DEATH)
1439 // We'd like to be able to assert the following, however if we are walking
1440 // through a qmark/colon tree, we may encounter multiple last-use nodes.
1441 // assert (VarSetOps::IsSubset(this, varBits, newLiveSet));
1443 // We maintain the invariant that if the lclVarTree is a promoted struct, but the
1444 // the lookup fails, then all the field vars (i.e., "varBits") are dying.
1445 VARSET_TP* deadVarBits = nullptr;
1446 if (varTypeIsStruct(lclVarTree) && LookupPromotedStructDeathVars(lclVarTree, &deadVarBits))
1448 VarSetOps::DiffD(this, newLiveSet, *deadVarBits);
1452 VarSetOps::DiffD(this, newLiveSet, varBits);
1455 else if ((tree->gtFlags & GTF_VAR_DEF) != 0 && (tree->gtFlags & GTF_VAR_USEASG) == 0)
1457 assert(tree == lclVarTree); // LDOBJ case should only be a use.
1459 // This shouldn't be in newLiveSet, unless this is debug code, in which
1460 // case we keep vars live everywhere, OR it is address-exposed, OR this block
1461 // is part of a try block, in which case it may be live at the handler
1462 // Could add a check that, if it's in the newLiveSet, that it's also in
1463 // fgGetHandlerLiveVars(compCurBB), but seems excessive
1465 assert(VarSetOps::IsEmptyIntersection(this, newLiveSet, varBits) || opts.compDbgCode ||
1466 lvaTable[tree->gtLclVarCommon.gtLclNum].lvAddrExposed ||
1467 (compCurBB != nullptr && ehBlockHasExnFlowDsc(compCurBB)));
1468 VarSetOps::UnionD(this, newLiveSet, varBits);
1475 //------------------------------------------------------------------------
1476 // Compiler::fgComputeLifeCall: compute the changes to local var liveness
1477 // due to a GT_CALL node.
1480 // life - The live set that is being computed.
1481 // call - The call node in question.
1483 void Compiler::fgComputeLifeCall(VARSET_TP& life, GenTreeCall* call)
1485 assert(call != nullptr);
1487 // If this is a tail-call and we have any unmanaged p/invoke calls in
1488 // the method then we're going to run the p/invoke epilog
1489 // So we mark the FrameRoot as used by this instruction.
1490 // This ensure that this variable is kept alive at the tail-call
1491 if (call->IsTailCall() && info.compCallUnmanaged)
1493 assert((!opts.ShouldUsePInvokeHelpers()) || (info.compLvFrameListRoot == BAD_VAR_NUM));
1494 if (!opts.ShouldUsePInvokeHelpers())
1496 /* Get the TCB local and make it live */
1498 noway_assert(info.compLvFrameListRoot < lvaCount);
1500 LclVarDsc* frameVarDsc = &lvaTable[info.compLvFrameListRoot];
1502 if (frameVarDsc->lvTracked)
1504 VarSetOps::AddElemD(this, life, frameVarDsc->lvVarIndex);
1509 // TODO: we should generate the code for saving to/restoring
1510 // from the inlined N/Direct frame instead.
1512 /* Is this call to unmanaged code? */
1513 if (call->IsUnmanaged())
1515 /* Get the TCB local and make it live */
1516 assert((!opts.ShouldUsePInvokeHelpers()) || (info.compLvFrameListRoot == BAD_VAR_NUM));
1517 if (!opts.ShouldUsePInvokeHelpers())
1519 noway_assert(info.compLvFrameListRoot < lvaCount);
1521 LclVarDsc* frameVarDsc = &lvaTable[info.compLvFrameListRoot];
1523 if (frameVarDsc->lvTracked)
1525 unsigned varIndex = frameVarDsc->lvVarIndex;
1526 noway_assert(varIndex < lvaTrackedCount);
1528 // Is the variable already known to be alive?
1530 if (VarSetOps::IsMember(this, life, varIndex))
1532 // Since we may call this multiple times, clear the GTF_CALL_M_FRAME_VAR_DEATH if set.
1534 call->gtCallMoreFlags &= ~GTF_CALL_M_FRAME_VAR_DEATH;
1538 // The variable is just coming to life
1539 // Since this is a backwards walk of the trees
1540 // that makes this change in liveness a 'last-use'
1542 VarSetOps::AddElemD(this, life, varIndex);
1543 call->gtCallMoreFlags |= GTF_CALL_M_FRAME_VAR_DEATH;
1550 //------------------------------------------------------------------------
1551 // Compiler::fgComputeLifeTrackedLocalUse:
1552 // Compute the changes to local var liveness due to a use of a tracked local var.
1555 // life - The live set that is being computed.
1556 // varDsc - The LclVar descriptor for the variable being used or defined.
1557 // node - The node that is defining the lclVar.
1558 void Compiler::fgComputeLifeTrackedLocalUse(VARSET_TP& life, LclVarDsc& varDsc, GenTreeLclVarCommon* node)
1560 assert(node != nullptr);
1561 assert((node->gtFlags & GTF_VAR_DEF) == 0);
1562 assert(varDsc.lvTracked);
1564 const unsigned varIndex = varDsc.lvVarIndex;
1566 // Is the variable already known to be alive?
1567 if (VarSetOps::IsMember(this, life, varIndex))
1569 // Since we may do liveness analysis multiple times, clear the GTF_VAR_DEATH if set.
1570 node->gtFlags &= ~GTF_VAR_DEATH;
1577 printf("Ref V%02u,T%02u] at ", node->gtLclNum, varIndex);
1579 printf(" life %s -> %s\n", VarSetOps::ToString(this, life),
1580 VarSetOps::ToString(this, VarSetOps::AddElem(this, life, varIndex)));
1584 // The variable is being used, and it is not currently live.
1585 // So the variable is just coming to life
1586 node->gtFlags |= GTF_VAR_DEATH;
1587 VarSetOps::AddElemD(this, life, varIndex);
1590 //------------------------------------------------------------------------
1591 // Compiler::fgComputeLifeTrackedLocalDef:
1592 // Compute the changes to local var liveness due to a def of a tracked local var and return `true` if the def is a
1596 // life - The live set that is being computed.
1597 // keepAliveVars - The current set of variables to keep alive regardless of their actual lifetime.
1598 // varDsc - The LclVar descriptor for the variable being used or defined.
1599 // node - The node that is defining the lclVar.
1602 // `true` if the def is a dead store; `false` otherwise.
1603 bool Compiler::fgComputeLifeTrackedLocalDef(VARSET_TP& life,
1604 VARSET_VALARG_TP keepAliveVars,
1606 GenTreeLclVarCommon* node)
1608 assert(node != nullptr);
1609 assert((node->gtFlags & GTF_VAR_DEF) != 0);
1610 assert(varDsc.lvTracked);
1612 const unsigned varIndex = varDsc.lvVarIndex;
1613 if (VarSetOps::IsMember(this, life, varIndex))
1615 // The variable is live
1616 if ((node->gtFlags & GTF_VAR_USEASG) == 0)
1618 // Remove the variable from the live set if it is not in the keepalive set.
1619 if (!VarSetOps::IsMember(this, keepAliveVars, varIndex))
1621 VarSetOps::RemoveElemD(this, life, varIndex);
1626 printf("Def V%02u,T%02u at ", node->gtLclNum, varIndex);
1628 printf(" life %s -> %s\n",
1629 VarSetOps::ToString(this,
1630 VarSetOps::Union(this, life, VarSetOps::MakeSingleton(this, varIndex))),
1631 VarSetOps::ToString(this, life));
1639 node->gtFlags |= GTF_VAR_DEATH;
1641 if (!opts.MinOpts())
1643 // keepAliveVars always stay alive
1644 noway_assert(!VarSetOps::IsMember(this, keepAliveVars, varIndex));
1646 // Do not consider this store dead if the target local variable represents
1647 // a promoted struct field of an address exposed local or if the address
1648 // of the variable has been exposed. Improved alias analysis could allow
1649 // stores to these sorts of variables to be removed at the cost of compile
1651 return !varDsc.lvAddrExposed && !(varDsc.lvIsStructField && lvaTable[varDsc.lvParentLcl].lvAddrExposed);
1658 //------------------------------------------------------------------------
1659 // Compiler::fgComputeLifeUntrackedLocal:
1660 // Compute the changes to local var liveness due to a use or a def of an untracked local var.
1663 // It may seem a bit counter-intuitive that a change to an untracked lclVar could affect the liveness of tracked
1664 // lclVars. In theory, this could happen with promoted (especially dependently-promoted) structs: in these cases,
1665 // a use or def of the untracked struct var is treated as a use or def of any of its component fields that are
1669 // life - The live set that is being computed.
1670 // keepAliveVars - The current set of variables to keep alive regardless of their actual lifetime.
1671 // varDsc - The LclVar descriptor for the variable being used or defined.
1672 // lclVarNode - The node that corresponds to the local var def or use.
1673 void Compiler::fgComputeLifeUntrackedLocal(VARSET_TP& life,
1674 VARSET_VALARG_TP keepAliveVars,
1676 GenTreeLclVarCommon* lclVarNode)
1678 assert(lclVarNode != nullptr);
1680 if (!varTypeIsStruct(varDsc.lvType) || (lvaGetPromotionType(&varDsc) == PROMOTION_TYPE_NONE))
1685 VARSET_TP varBit(VarSetOps::MakeEmpty(this));
1687 for (unsigned i = varDsc.lvFieldLclStart; i < varDsc.lvFieldLclStart + varDsc.lvFieldCnt; ++i)
1689 #if !defined(_TARGET_64BIT_)
1690 if (!varTypeIsLong(lvaTable[i].lvType) || !lvaTable[i].lvPromoted)
1691 #endif // !defined(_TARGET_64BIT_)
1693 noway_assert(lvaTable[i].lvIsStructField);
1695 if (lvaTable[i].lvTracked)
1697 const unsigned varIndex = lvaTable[i].lvVarIndex;
1698 noway_assert(varIndex < lvaTrackedCount);
1699 VarSetOps::AddElemD(this, varBit, varIndex);
1702 if (lclVarNode->gtFlags & GTF_VAR_DEF)
1704 VarSetOps::DiffD(this, varBit, keepAliveVars);
1705 VarSetOps::DiffD(this, life, varBit);
1710 // Are the variables already known to be alive?
1711 if (VarSetOps::IsSubset(this, varBit, life))
1713 lclVarNode->gtFlags &= ~GTF_VAR_DEATH; // Since we may now call this multiple times, reset if live.
1717 // Some variables are being used, and they are not currently live.
1718 // So they are just coming to life, in the backwards traversal; in a forwards
1719 // traversal, one or more are dying. Mark this.
1721 lclVarNode->gtFlags |= GTF_VAR_DEATH;
1723 // Are all the variables becoming alive (in the backwards traversal), or just a subset?
1724 if (!VarSetOps::IsEmptyIntersection(this, varBit, life))
1726 // Only a subset of the variables are become live; we must record that subset.
1727 // (Lack of an entry for "lclVarNode" will be considered to imply all become dead in the
1728 // forward traversal.)
1729 VARSET_TP* deadVarSet = new (this, CMK_bitset) VARSET_TP;
1730 VarSetOps::AssignNoCopy(this, *deadVarSet, VarSetOps::Diff(this, varBit, life));
1731 GetPromotedStructDeathVars()->Set(lclVarNode, deadVarSet);
1734 // In any case, all the field vars are now live (in the backwards traversal).
1735 VarSetOps::UnionD(this, life, varBit);
1738 //------------------------------------------------------------------------
1739 // Compiler::fgComputeLifeLocal:
1740 // Compute the changes to local var liveness due to a use or a def of a local var and indicates whether the use/def
1744 // life - The live set that is being computed.
1745 // keepAliveVars - The current set of variables to keep alive regardless of their actual lifetime.
1746 // lclVarNode - The node that corresponds to the local var def or use.
1749 // `true` if the local var node corresponds to a dead store; `false` otherwise.
1750 bool Compiler::fgComputeLifeLocal(VARSET_TP& life, VARSET_VALARG_TP keepAliveVars, GenTree* lclVarNode)
1752 unsigned lclNum = lclVarNode->gtLclVarCommon.gtLclNum;
1754 assert(lclNum < lvaCount);
1755 LclVarDsc& varDsc = lvaTable[lclNum];
1757 // Is this a tracked variable?
1758 if (varDsc.lvTracked)
1760 /* Is this a definition or use? */
1761 if (lclVarNode->gtFlags & GTF_VAR_DEF)
1763 return fgComputeLifeTrackedLocalDef(life, keepAliveVars, varDsc, lclVarNode->AsLclVarCommon());
1767 fgComputeLifeTrackedLocalUse(life, varDsc, lclVarNode->AsLclVarCommon());
1772 fgComputeLifeUntrackedLocal(life, keepAliveVars, varDsc, lclVarNode->AsLclVarCommon());
1777 /*****************************************************************************
1779 * Compute the set of live variables at each node in a given statement
1780 * or subtree of a statement moving backward from startNode to endNode
1783 void Compiler::fgComputeLife(VARSET_TP& life,
1786 VARSET_VALARG_TP volatileVars,
1787 bool* pStmtInfoDirty DEBUGARG(bool* treeModf))
1791 // Don't kill vars in scope
1792 VARSET_TP keepAliveVars(VarSetOps::Union(this, volatileVars, compCurBB->bbScope));
1794 noway_assert(VarSetOps::IsSubset(this, keepAliveVars, life));
1795 noway_assert(compCurStmt->gtOper == GT_STMT);
1796 noway_assert(endNode || (startNode == compCurStmt->gtStmt.gtStmtExpr));
1798 // NOTE: Live variable analysis will not work if you try
1799 // to use the result of an assignment node directly!
1800 for (tree = startNode; tree != endNode; tree = tree->gtPrev)
1803 assert(tree->OperGet() != GT_QMARK);
1805 if (tree->gtOper == GT_CALL)
1807 fgComputeLifeCall(life, tree->AsCall());
1809 else if (tree->OperIsNonPhiLocal() || tree->OperIsLocalAddr())
1811 bool isDeadStore = fgComputeLifeLocal(life, keepAliveVars, tree);
1814 LclVarDsc* varDsc = &lvaTable[tree->gtLclVarCommon.gtLclNum];
1816 bool doAgain = false;
1817 if (fgRemoveDeadStore(&tree, varDsc, life, &doAgain, pStmtInfoDirty DEBUGARG(treeModf)))
1832 void Compiler::fgComputeLifeLIR(VARSET_TP& life, BasicBlock* block, VARSET_VALARG_TP volatileVars)
1834 // Don't kill volatile vars and vars in scope.
1835 VARSET_TP keepAliveVars(VarSetOps::Union(this, volatileVars, block->bbScope));
1837 noway_assert(VarSetOps::IsSubset(this, keepAliveVars, life));
1839 LIR::Range& blockRange = LIR::AsRange(block);
1840 GenTree* firstNonPhiNode = blockRange.FirstNonPhiNode();
1841 if (firstNonPhiNode == nullptr)
1845 for (GenTree *node = blockRange.LastNode(), *next = nullptr, *end = firstNonPhiNode->gtPrev; node != end;
1848 next = node->gtPrev;
1851 switch (node->OperGet())
1855 GenTreeCall* const call = node->AsCall();
1856 if (((call->TypeGet() == TYP_VOID) || call->IsUnusedValue()) && !call->HasSideEffects(this))
1858 JITDUMP("Removing dead call:\n");
1861 node->VisitOperands([](GenTree* operand) -> GenTree::VisitResult {
1862 if (operand->IsValue())
1864 operand->SetUnusedValue();
1867 // Special-case PUTARG_STK: since this operator is not considered a value, DCE will not remove
1869 if (operand->OperIs(GT_PUTARG_STK))
1871 operand->AsPutArgStk()->gtOp1->SetUnusedValue();
1872 operand->gtBashToNOP();
1875 return GenTree::VisitResult::Continue;
1878 blockRange.Remove(node);
1880 // Removing a call does not affect liveness unless it is a tail call in a nethod with P/Invokes or
1881 // is itself a P/Invoke, in which case it may affect the liveness of the frame root variable.
1882 if (!opts.MinOpts() && !opts.ShouldUsePInvokeHelpers() &&
1883 ((call->IsTailCall() && info.compCallUnmanaged) || call->IsUnmanaged()) &&
1884 lvaTable[info.compLvFrameListRoot].lvTracked)
1886 fgStmtRemoved = true;
1891 fgComputeLifeCall(life, call);
1899 GenTreeLclVarCommon* const lclVarNode = node->AsLclVarCommon();
1900 LclVarDsc& varDsc = lvaTable[lclVarNode->gtLclNum];
1902 if (node->IsUnusedValue())
1904 JITDUMP("Removing dead LclVar use:\n");
1905 DISPNODE(lclVarNode);
1907 blockRange.Delete(this, block, node);
1908 if (varDsc.lvTracked && !opts.MinOpts())
1910 fgStmtRemoved = true;
1913 else if (varDsc.lvTracked)
1915 fgComputeLifeTrackedLocalUse(life, varDsc, lclVarNode);
1919 fgComputeLifeUntrackedLocal(life, keepAliveVars, varDsc, lclVarNode);
1924 case GT_LCL_VAR_ADDR:
1925 case GT_LCL_FLD_ADDR:
1926 if (node->IsUnusedValue())
1928 JITDUMP("Removing dead LclVar address:\n");
1931 const bool isTracked = lvaTable[node->AsLclVarCommon()->gtLclNum].lvTracked;
1932 blockRange.Delete(this, block, node);
1933 if (isTracked && !opts.MinOpts())
1935 fgStmtRemoved = true;
1940 isDeadStore = fgComputeLifeLocal(life, keepAliveVars, node);
1944 if (blockRange.TryGetUse(node, &addrUse) && (addrUse.User()->OperGet() == GT_STOREIND))
1946 // Remove the store. DCE will iteratively clean up any ununsed operands.
1947 GenTreeStoreInd* const store = addrUse.User()->AsStoreInd();
1949 JITDUMP("Removing dead indirect store:\n");
1952 assert(store->Addr() == node);
1953 blockRange.Delete(this, block, node);
1955 store->Data()->SetUnusedValue();
1957 blockRange.Remove(store);
1959 assert(!opts.MinOpts());
1960 fgStmtRemoved = true;
1966 case GT_STORE_LCL_VAR:
1967 case GT_STORE_LCL_FLD:
1969 GenTreeLclVarCommon* const lclVarNode = node->AsLclVarCommon();
1971 LclVarDsc& varDsc = lvaTable[lclVarNode->gtLclNum];
1972 if (varDsc.lvTracked)
1974 isDeadStore = fgComputeLifeTrackedLocalDef(life, keepAliveVars, varDsc, lclVarNode);
1977 JITDUMP("Removing dead store:\n");
1978 DISPNODE(lclVarNode);
1980 // Remove the store. DCE will iteratively clean up any ununsed operands.
1981 lclVarNode->gtOp1->SetUnusedValue();
1983 // If the store is marked as a late argument, it is referenced by a call. Instead of removing
1984 // it, bash it to a NOP.
1985 if ((node->gtFlags & GTF_LATE_ARG) != 0)
1987 JITDUMP("node is a late arg; replacing with NOP\n");
1988 node->gtBashToNOP();
1990 // NOTE: this is a bit of a hack. We need to keep these nodes around as they are
1991 // referenced by the call, but they're considered side-effect-free non-value-producing
1992 // nodes, so they will be removed if we don't do this.
1993 node->gtFlags |= GTF_ORDER_SIDEEFF;
1997 blockRange.Remove(node);
2000 assert(!opts.MinOpts());
2001 fgStmtRemoved = true;
2006 fgComputeLifeUntrackedLocal(life, keepAliveVars, varDsc, lclVarNode);
2017 case GT_CLS_VAR_ADDR:
2019 // These are all side-effect-free leaf nodes.
2020 if (node->IsUnusedValue())
2022 JITDUMP("Removing dead node:\n");
2025 blockRange.Remove(node);
2033 case GT_MEMORYBARRIER:
2036 case GT_ARR_BOUNDS_CHECK:
2039 case GT_STORE_DYN_BLK:
2040 #if defined(FEATURE_SIMD)
2042 #endif // FEATURE_SIMD
2043 #ifdef FEATURE_HW_INTRINSICS
2044 case GT_HW_INTRINSIC_CHK:
2045 #endif // FEATURE_HW_INTRINSICS
2053 case GT_START_NONGC:
2054 case GT_START_PREEMPTGC:
2056 #if !FEATURE_EH_FUNCLETS
2058 #endif // !FEATURE_EH_FUNCLETS
2059 case GT_SWITCH_TABLE:
2060 case GT_PINVOKE_PROLOG:
2061 case GT_PINVOKE_EPILOG:
2065 #ifdef FEATURE_HW_INTRINSICS
2066 case GT_HWIntrinsic:
2067 #endif // FEATURE_HW_INTRINSICS
2068 // Never remove these nodes, as they are always side-effecting.
2070 // NOTE: the only side-effect of some of these nodes (GT_CMP, GT_SUB_HI) is a write to the flags
2072 // Properly modeling this would allow these nodes to be removed.
2076 // NOTE: we need to keep some NOPs around because they are referenced by calls. See the dead store
2077 // removal code above (case GT_STORE_LCL_VAR) for more explanation.
2078 if ((node->gtFlags & GTF_ORDER_SIDEEFF) != 0)
2085 assert(!node->OperIsLocal());
2086 if (!node->IsValue() || node->IsUnusedValue())
2088 // We are only interested in avoiding the removal of nodes with direct side-effects
2089 // (as opposed to side effects of their children).
2090 // This default case should never include calls or assignments.
2091 assert(!node->OperRequiresAsgFlag() && !node->OperIs(GT_CALL));
2092 if (!node->gtSetFlags() && !node->OperMayThrow(this))
2094 JITDUMP("Removing dead node:\n");
2097 node->VisitOperands([](GenTree* operand) -> GenTree::VisitResult {
2098 operand->SetUnusedValue();
2099 return GenTree::VisitResult::Continue;
2102 blockRange.Remove(node);
2110 // fgRemoveDeadStore - remove a store to a local which has no exposed uses.
2112 // pTree - GenTree** to local, including store-form local or local addr (post-rationalize)
2113 // varDsc - var that is being stored to
2114 // life - current live tracked vars (maintained as we walk backwards)
2115 // doAgain - out parameter, true if we should restart the statement
2116 // pStmtInfoDirty - should defer the cost computation to the point after the reverse walk is completed?
2118 // Returns: true if we should skip the rest of the statement, false if we should continue
2120 bool Compiler::fgRemoveDeadStore(GenTree** pTree,
2122 VARSET_VALARG_TP life,
2124 bool* pStmtInfoDirty DEBUGARG(bool* treeModf))
2126 assert(!compRationalIRForm);
2128 // Vars should have already been checked for address exposure by this point.
2129 assert(!varDsc->lvIsStructField || !lvaTable[varDsc->lvParentLcl].lvAddrExposed);
2130 assert(!varDsc->lvAddrExposed);
2132 GenTree* asgNode = nullptr;
2133 GenTree* rhsNode = nullptr;
2134 GenTree* addrNode = nullptr;
2135 GenTree* const tree = *pTree;
2137 GenTree* nextNode = tree->gtNext;
2139 // First, characterize the lclVarTree and see if we are taking its address.
2140 if (tree->OperIsLocalStore())
2142 rhsNode = tree->gtOp.gtOp1;
2145 else if (tree->OperIsLocal())
2147 if (nextNode == nullptr)
2151 if (nextNode->OperGet() == GT_ADDR)
2153 addrNode = nextNode;
2154 nextNode = nextNode->gtNext;
2159 assert(tree->OperIsLocalAddr());
2163 // Next, find the assignment.
2164 if (asgNode == nullptr)
2166 if (addrNode == nullptr)
2170 else if (asgNode == nullptr)
2172 // This may be followed by GT_IND/assign or GT_STOREIND.
2173 if (nextNode == nullptr)
2177 if (nextNode->OperIsIndir())
2179 // This must be a non-nullcheck form of indir, or it would not be a def.
2180 assert(nextNode->OperGet() != GT_NULLCHECK);
2181 if (nextNode->OperIsStore())
2184 if (asgNode->OperIsBlk())
2186 rhsNode = asgNode->AsBlk()->Data();
2188 // TODO-1stClassStructs: There should be an else clause here to handle
2189 // the non-block forms of store ops (GT_STORE_LCL_VAR, etc.) for which
2190 // rhsNode is op1. (This isn't really a 1stClassStructs item, but the
2191 // above was added to catch what used to be dead block ops, and that
2192 // made this omission apparent.)
2196 asgNode = nextNode->gtNext;
2202 if (asgNode == nullptr)
2207 if (asgNode->OperIs(GT_ASG))
2209 rhsNode = asgNode->gtGetOp2();
2211 else if (rhsNode == nullptr)
2216 if (asgNode && (asgNode->gtFlags & GTF_ASG))
2218 noway_assert(rhsNode);
2219 noway_assert(tree->gtFlags & GTF_VAR_DEF);
2221 assert(asgNode->OperIs(GT_ASG));
2223 // Do not remove if this local variable represents
2224 // a promoted struct field of an address exposed local.
2225 if (varDsc->lvIsStructField && lvaTable[varDsc->lvParentLcl].lvAddrExposed)
2230 // Do not remove if the address of the variable has been exposed.
2231 if (varDsc->lvAddrExposed)
2236 /* Test for interior statement */
2238 if (asgNode->gtNext == nullptr)
2240 /* This is a "NORMAL" statement with the
2241 * assignment node hanging from the GT_STMT node */
2243 noway_assert(compCurStmt->gtStmt.gtStmtExpr == asgNode);
2244 JITDUMP("top level assign\n");
2246 /* Check for side effects */
2248 if (rhsNode->gtFlags & GTF_SIDE_EFFECT)
2250 EXTRACT_SIDE_EFFECTS:
2251 /* Extract the side effects */
2253 GenTree* sideEffList = nullptr;
2257 printf(FMT_BB " - Dead assignment has side effects...\n", compCurBB->bbNum);
2258 gtDispTree(asgNode);
2262 if (rhsNode->TypeGet() == TYP_STRUCT)
2264 // This is a block assignment. An indirection of the rhs is not considered to
2265 // happen until the assignment, so we will extract the side effects from only
2267 if (rhsNode->OperIsIndir())
2269 assert(rhsNode->OperGet() != GT_NULLCHECK);
2270 rhsNode = rhsNode->AsIndir()->Addr();
2273 gtExtractSideEffList(rhsNode, &sideEffList);
2277 noway_assert(sideEffList->gtFlags & GTF_SIDE_EFFECT);
2281 printf("Extracted side effects list...\n");
2282 gtDispTree(sideEffList);
2287 /* Replace the assignment statement with the list of side effects */
2288 noway_assert(sideEffList->gtOper != GT_STMT);
2290 *pTree = compCurStmt->gtStmt.gtStmtExpr = sideEffList;
2294 /* Update ordering, costs, FP levels, etc. */
2295 gtSetStmtInfo(compCurStmt);
2297 /* Re-link the nodes for this statement */
2298 fgSetStmtSeq(compCurStmt);
2300 // Since the whole statement gets replaced it is safe to
2301 // re-thread and update order. No need to compute costs again.
2302 *pStmtInfoDirty = false;
2304 /* Compute the live set for the new statement */
2310 /* No side effects, most likely we forgot to reset some flags */
2311 fgRemoveStmt(compCurBB, compCurStmt);
2318 /* If this is GT_CATCH_ARG saved to a local var don't bother */
2320 JITDUMP("removing stmt with no side effects\n");
2322 if (asgNode->gtFlags & GTF_ORDER_SIDEEFF)
2324 if (rhsNode->gtOper == GT_CATCH_ARG)
2326 goto EXTRACT_SIDE_EFFECTS;
2330 /* No side effects - remove the whole statement from the block->bbTreeList */
2332 fgRemoveStmt(compCurBB, compCurStmt);
2334 /* Since we removed it do not process the rest (i.e. RHS) of the statement
2335 * variables in the RHS will not be marked as live, so we get the benefit of
2336 * propagating dead variables up the chain */
2343 /* This is an INTERIOR STATEMENT with a dead assignment - remove it */
2345 noway_assert(!VarSetOps::IsMember(this, life, varDsc->lvVarIndex));
2347 if (rhsNode->gtFlags & GTF_SIDE_EFFECT)
2349 /* :-( we have side effects */
2351 GenTree* sideEffList = nullptr;
2355 printf(FMT_BB " - INTERIOR dead assignment has side effects...\n", compCurBB->bbNum);
2356 gtDispTree(asgNode);
2360 gtExtractSideEffList(rhsNode, &sideEffList);
2364 goto NO_SIDE_EFFECTS;
2367 noway_assert(sideEffList->gtFlags & GTF_SIDE_EFFECT);
2371 printf("Extracted side effects list from condition...\n");
2372 gtDispTree(sideEffList);
2376 if (sideEffList->gtOper == asgNode->gtOper)
2381 asgNode->gtOp.gtOp1 = sideEffList->gtOp.gtOp1;
2382 asgNode->gtOp.gtOp2 = sideEffList->gtOp.gtOp2;
2383 asgNode->gtType = sideEffList->gtType;
2390 /* Change the node to a GT_COMMA holding the side effect list */
2391 asgNode->gtBashToNOP();
2393 asgNode->ChangeOper(GT_COMMA);
2394 asgNode->gtFlags |= sideEffList->gtFlags & GTF_ALL_EFFECT;
2396 if (sideEffList->gtOper == GT_COMMA)
2398 asgNode->gtOp.gtOp1 = sideEffList->gtOp.gtOp1;
2399 asgNode->gtOp.gtOp2 = sideEffList->gtOp.gtOp2;
2403 asgNode->gtOp.gtOp1 = sideEffList;
2404 asgNode->gtOp.gtOp2 = gtNewNothingNode();
2414 printf("\nRemoving tree ");
2415 printTreeID(asgNode);
2416 printf(" in " FMT_BB " as useless\n", compCurBB->bbNum);
2417 gtDispTree(asgNode);
2421 /* No side effects - Change the assignment to a GT_NOP node */
2422 asgNode->gtBashToNOP();
2429 /* Re-link the nodes for this statement - Do not update ordering! */
2431 // Do not update costs by calling gtSetStmtInfo. fgSetStmtSeq modifies
2432 // the tree threading based on the new costs. Removing nodes could
2433 // cause a subtree to get evaluated first (earlier second) during the
2434 // liveness walk. Instead just set a flag that costs are dirty and
2435 // caller has to call gtSetStmtInfo.
2436 *pStmtInfoDirty = true;
2438 fgSetStmtSeq(compCurStmt);
2440 /* Continue analysis from this node */
2450 /*****************************************************************************
2452 * Iterative data flow for live variable info and availability of range
2453 * check index expressions.
2455 void Compiler::fgInterBlockLocalVarLiveness()
2460 printf("*************** In fgInterBlockLocalVarLiveness()\n");
2464 /* This global flag is set whenever we remove a statement */
2466 fgStmtRemoved = false;
2468 // keep track if a bbLiveIn changed due to dead store removal
2469 fgLocalVarLivenessChanged = false;
2471 /* Compute the IN and OUT sets for tracked variables */
2473 fgLiveVarAnalysis();
2475 /* For debuggable code, we mark vars as live over their entire
2476 * reported scope, so that it will be visible over the entire scope
2479 if (opts.compDbgCode && (info.compVarScopesCount > 0))
2481 fgExtendDbgLifetimes();
2484 // Nothing more to be done if the backend does not require accurate local var lifetimes.
2485 if (!backendRequiresLocalVarLifetimes())
2487 fgLocalVarLivenessDone = true;
2491 /*-------------------------------------------------------------------------
2492 * Variables involved in exception-handlers and finally blocks need
2493 * to be specially marked
2497 VARSET_TP exceptVars(VarSetOps::MakeEmpty(this)); // vars live on entry to a handler
2498 VARSET_TP finallyVars(VarSetOps::MakeEmpty(this)); // vars live on exit of a 'finally' block
2499 VARSET_TP filterVars(VarSetOps::MakeEmpty(this)); // vars live on exit from a 'filter'
2501 for (block = fgFirstBB; block; block = block->bbNext)
2503 if (block->bbCatchTyp != BBCT_NONE)
2505 /* Note the set of variables live on entry to exception handler */
2506 VarSetOps::UnionD(this, exceptVars, block->bbLiveIn);
2509 if (block->bbJumpKind == BBJ_EHFILTERRET)
2511 /* Get the set of live variables on exit from a 'filter' */
2512 VarSetOps::UnionD(this, filterVars, block->bbLiveOut);
2514 else if (block->bbJumpKind == BBJ_EHFINALLYRET)
2516 /* Get the set of live variables on exit from a 'finally' block */
2518 VarSetOps::UnionD(this, finallyVars, block->bbLiveOut);
2520 #if FEATURE_EH_FUNCLETS
2521 // Funclets are called and returned from, as such we can only count on the frame
2522 // pointer being restored, and thus everything live in or live out must be on the
2524 if (block->bbFlags & BBF_FUNCLET_BEG)
2526 VarSetOps::UnionD(this, exceptVars, block->bbLiveIn);
2528 if ((block->bbJumpKind == BBJ_EHFINALLYRET) || (block->bbJumpKind == BBJ_EHFILTERRET) ||
2529 (block->bbJumpKind == BBJ_EHCATCHRET))
2531 VarSetOps::UnionD(this, exceptVars, block->bbLiveOut);
2533 #endif // FEATURE_EH_FUNCLETS
2539 for (varNum = 0, varDsc = lvaTable; varNum < lvaCount; varNum++, varDsc++)
2541 /* Ignore the variable if it's not tracked */
2543 if (!varDsc->lvTracked)
2548 if (lvaIsFieldOfDependentlyPromotedStruct(varDsc))
2553 /* Un-init locals may need auto-initialization. Note that the
2554 liveness of such locals will bubble to the top (fgFirstBB)
2555 in fgInterBlockLocalVarLiveness() */
2557 if (!varDsc->lvIsParam && VarSetOps::IsMember(this, fgFirstBB->bbLiveIn, varDsc->lvVarIndex) &&
2558 (info.compInitMem || varTypeIsGC(varDsc->TypeGet())))
2560 varDsc->lvMustInit = true;
2563 // Mark all variables that are live on entry to an exception handler
2564 // or on exit from a filter handler or finally as DoNotEnregister */
2566 if (VarSetOps::IsMember(this, exceptVars, varDsc->lvVarIndex) ||
2567 VarSetOps::IsMember(this, filterVars, varDsc->lvVarIndex))
2569 /* Mark the variable appropriately */
2570 lvaSetVarDoNotEnregister(varNum DEBUGARG(DNER_LiveInOutOfHandler));
2573 /* Mark all pointer variables live on exit from a 'finally'
2574 block as either volatile for non-GC ref types or as
2575 'explicitly initialized' (volatile and must-init) for GC-ref types */
2577 if (VarSetOps::IsMember(this, finallyVars, varDsc->lvVarIndex))
2579 lvaSetVarDoNotEnregister(varNum DEBUGARG(DNER_LiveInOutOfHandler));
2581 /* Don't set lvMustInit unless we have a non-arg, GC pointer */
2583 if (varDsc->lvIsParam)
2588 if (!varTypeIsGC(varDsc->TypeGet()))
2594 varDsc->lvMustInit = true;
2598 /*-------------------------------------------------------------------------
2599 * Now fill in liveness info within each basic block - Backward DataFlow
2602 for (block = fgFirstBB; block; block = block->bbNext)
2604 /* Tell everyone what block we're working on */
2608 /* Remember those vars live on entry to exception handlers */
2609 /* if we are part of a try block */
2611 VARSET_TP volatileVars(VarSetOps::MakeEmpty(this));
2613 if (ehBlockHasExnFlowDsc(block))
2615 VarSetOps::Assign(this, volatileVars, fgGetHandlerLiveVars(block));
2617 // volatileVars is a subset of exceptVars
2618 noway_assert(VarSetOps::IsSubset(this, volatileVars, exceptVars));
2621 /* Start with the variables live on exit from the block */
2623 VARSET_TP life(VarSetOps::MakeCopy(this, block->bbLiveOut));
2625 /* Mark any interference we might have at the end of the block */
2629 fgComputeLifeLIR(life, block, volatileVars);
2633 /* Get the first statement in the block */
2635 GenTree* firstStmt = block->FirstNonPhiDef();
2637 if (firstStmt == nullptr)
2642 /* Walk all the statements of the block backwards - Get the LAST stmt */
2644 GenTree* nextStmt = block->bbTreeList->gtPrev;
2649 bool treeModf = false;
2651 noway_assert(nextStmt);
2652 noway_assert(nextStmt->gtOper == GT_STMT);
2654 compCurStmt = nextStmt;
2655 nextStmt = nextStmt->gtPrev;
2657 /* Compute the liveness for each tree node in the statement */
2658 bool stmtInfoDirty = false;
2660 fgComputeLife(life, compCurStmt->gtStmt.gtStmtExpr, nullptr, volatileVars,
2661 &stmtInfoDirty DEBUGARG(&treeModf));
2665 gtSetStmtInfo(compCurStmt);
2666 fgSetStmtSeq(compCurStmt);
2667 gtUpdateStmtSideEffects(compCurStmt);
2671 if (verbose && treeModf)
2673 printf("\nfgComputeLife modified tree:\n");
2674 gtDispTree(compCurStmt->gtStmt.gtStmtExpr);
2678 } while (compCurStmt != firstStmt);
2681 /* Done with the current block - if we removed any statements, some
2682 * variables may have become dead at the beginning of the block
2683 * -> have to update bbLiveIn */
2685 if (!VarSetOps::Equal(this, life, block->bbLiveIn))
2687 /* some variables have become dead all across the block
2688 So life should be a subset of block->bbLiveIn */
2690 // We changed the liveIn of the block, which may affect liveOut of others,
2691 // which may expose more dead stores.
2692 fgLocalVarLivenessChanged = true;
2694 noway_assert(VarSetOps::IsSubset(this, life, block->bbLiveIn));
2696 /* set the new bbLiveIn */
2698 VarSetOps::Assign(this, block->bbLiveIn, life);
2700 /* compute the new bbLiveOut for all the predecessors of this block */
2703 noway_assert(compCurBB == block);
2705 compCurBB = nullptr;
2709 fgLocalVarLivenessDone = true;
2714 /*****************************************************************************/
2716 void Compiler::fgDispBBLiveness(BasicBlock* block)
2718 VARSET_TP allVars(VarSetOps::Union(this, block->bbLiveIn, block->bbLiveOut));
2719 printf(FMT_BB, block->bbNum);
2720 printf(" IN (%d)=", VarSetOps::Count(this, block->bbLiveIn));
2721 lvaDispVarSet(block->bbLiveIn, allVars);
2722 for (MemoryKind memoryKind : allMemoryKinds())
2724 if ((block->bbMemoryLiveIn & memoryKindSet(memoryKind)) != 0)
2726 printf(" + %s", memoryKindNames[memoryKind]);
2729 printf("\n OUT(%d)=", VarSetOps::Count(this, block->bbLiveOut));
2730 lvaDispVarSet(block->bbLiveOut, allVars);
2731 for (MemoryKind memoryKind : allMemoryKinds())
2733 if ((block->bbMemoryLiveOut & memoryKindSet(memoryKind)) != 0)
2735 printf(" + %s", memoryKindNames[memoryKind]);
2741 void Compiler::fgDispBBLiveness()
2743 for (BasicBlock* block = fgFirstBB; block; block = block->bbNext)
2745 fgDispBBLiveness(block);