1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
5 // =================================================================================
6 // Code that works with liveness and related concepts (interference, debug scope)
7 // =================================================================================
14 #if !defined(_TARGET_64BIT_)
15 #include "decomposelongs.h"
17 #ifndef LEGACY_BACKEND
18 #include "lower.h" // for LowerRange()
21 /*****************************************************************************
23 * Helper for Compiler::fgPerBlockLocalVarLiveness().
24 * The goal is to compute the USE and DEF sets for a basic block.
26 void Compiler::fgMarkUseDef(GenTreeLclVarCommon* tree)
28 assert((tree->OperIsLocal() && (tree->OperGet() != GT_PHI_ARG)) || tree->OperIsLocalAddr());
30 const unsigned lclNum = tree->gtLclNum;
31 assert(lclNum < lvaCount);
33 LclVarDsc* const varDsc = &lvaTable[lclNum];
35 // We should never encounter a reference to a lclVar that has a zero refCnt.
36 if (varDsc->lvRefCnt == 0 && (!varTypeIsPromotable(varDsc) || !varDsc->lvPromoted))
38 JITDUMP("Found reference to V%02u with zero refCnt.\n", lclNum);
39 assert(!"We should never encounter a reference to a lclVar that has a zero refCnt.");
43 const bool isDef = (tree->gtFlags & GTF_VAR_DEF) != 0;
44 const bool isUse = !isDef || ((tree->gtFlags & GTF_VAR_USEASG) != 0);
46 if (varDsc->lvTracked)
48 assert(varDsc->lvVarIndex < lvaTrackedCount);
50 // We don't treat stores to tracked locals as modifications of ByrefExposed memory;
51 // Make sure no tracked local is addr-exposed, to make sure we don't incorrectly CSE byref
52 // loads aliasing it across a store to it.
53 assert(!varDsc->lvAddrExposed);
55 if (isUse && !VarSetOps::IsMember(this, fgCurDefSet, varDsc->lvVarIndex))
57 // This is an exposed use; add it to the set of uses.
58 VarSetOps::AddElemD(this, fgCurUseSet, varDsc->lvVarIndex);
63 // This is a def, add it to the set of defs.
64 VarSetOps::AddElemD(this, fgCurDefSet, varDsc->lvVarIndex);
69 if (varDsc->lvAddrExposed)
71 // Reflect the effect on ByrefExposed memory
75 fgCurMemoryUse |= memoryKindSet(ByrefExposed);
79 fgCurMemoryDef |= memoryKindSet(ByrefExposed);
81 // We've found a store that modifies ByrefExposed
82 // memory but not GcHeap memory, so track their
84 byrefStatesMatchGcHeapStates = false;
88 if (varTypeIsStruct(varDsc))
90 lvaPromotionType promotionType = lvaGetPromotionType(varDsc);
92 if (promotionType != PROMOTION_TYPE_NONE)
94 VARSET_TP bitMask(VarSetOps::MakeEmpty(this));
96 for (unsigned i = varDsc->lvFieldLclStart; i < varDsc->lvFieldLclStart + varDsc->lvFieldCnt; ++i)
98 noway_assert(lvaTable[i].lvIsStructField);
99 if (lvaTable[i].lvTracked)
101 noway_assert(lvaTable[i].lvVarIndex < lvaTrackedCount);
102 VarSetOps::AddElemD(this, bitMask, lvaTable[i].lvVarIndex);
106 // For pure defs (i.e. not an "update" def which is also a use), add to the (all) def set.
110 VarSetOps::UnionD(this, fgCurDefSet, bitMask);
112 else if (!VarSetOps::IsSubset(this, bitMask, fgCurDefSet))
114 // Mark as used any struct fields that are not yet defined.
115 VarSetOps::UnionD(this, fgCurUseSet, bitMask);
122 /*****************************************************************************/
123 void Compiler::fgLocalVarLiveness()
128 printf("*************** In fgLocalVarLiveness()\n");
130 #ifndef LEGACY_BACKEND
131 if (compRationalIRForm)
135 #endif // !LEGACY_BACKEND
139 // Init liveness data structures.
140 fgLocalVarLivenessInit();
141 assert(lvaSortAgain == false); // Set to false by lvaSortOnly()
143 EndPhase(PHASE_LCLVARLIVENESS_INIT);
145 // Make sure we haven't noted any partial last uses of promoted structs.
146 GetPromotedStructDeathVars()->RemoveAll();
148 // Initialize the per-block var sets.
149 fgInitBlockVarSets();
151 fgLocalVarLivenessChanged = false;
154 /* Figure out use/def info for all basic blocks */
155 fgPerBlockLocalVarLiveness();
156 EndPhase(PHASE_LCLVARLIVENESS_PERBLOCK);
158 /* Live variable analysis. */
160 fgStmtRemoved = false;
161 fgInterBlockLocalVarLiveness();
162 } while (fgStmtRemoved && fgLocalVarLivenessChanged);
164 // If we removed any dead code we will have set 'lvaSortAgain' via decRefCnts
167 JITDUMP("In fgLocalVarLiveness, setting lvaSortAgain back to false (set during dead-code removal)\n");
168 lvaSortAgain = false; // We don't re-Sort because we just performed LclVar liveness.
171 EndPhase(PHASE_LCLVARLIVENESS_INTERBLOCK);
174 /*****************************************************************************/
175 void Compiler::fgLocalVarLivenessInit()
177 // If necessary, re-sort the variable table by ref-count...before creating any varsets using this sorting.
180 JITDUMP("In fgLocalVarLivenessInit, sorting locals\n");
182 assert(lvaSortAgain == false); // Set to false by lvaSortOnly()
185 #ifdef LEGACY_BACKEND // RyuJIT backend does not use interference info
187 for (unsigned i = 0; i < lclMAX_TRACKED; i++)
189 VarSetOps::AssignNoCopy(this, lvaVarIntf[i], VarSetOps::MakeEmpty(this));
192 /* If we're not optimizing at all, things are simple */
195 VARSET_TP allOnes(VarSetOps::MakeFull(this));
196 for (unsigned i = 0; i < lvaTrackedCount; i++)
198 VarSetOps::Assign(this, lvaVarIntf[i], allOnes);
202 #endif // LEGACY_BACKEND
204 // We mark a lcl as must-init in a first pass of local variable
205 // liveness (Liveness1), then assertion prop eliminates the
206 // uninit-use of a variable Vk, asserting it will be init'ed to
207 // null. Then, in a second local-var liveness (Liveness2), the
208 // variable Vk is no longer live on entry to the method, since its
209 // uses have been replaced via constant propagation.
211 // This leads to a bug: since Vk is no longer live on entry, the
212 // register allocator sees Vk and an argument Vj as having
213 // disjoint lifetimes, and allocates them to the same register.
214 // But Vk is still marked "must-init", and this initialization (of
215 // the register) trashes the value in Vj.
217 // Therefore, initialize must-init to false for all variables in
218 // each liveness phase.
219 for (unsigned lclNum = 0; lclNum < lvaCount; ++lclNum)
221 lvaTable[lclNum].lvMustInit = false;
225 // Note that for the LEGACY_BACKEND this method is replaced with
226 // fgLegacyPerStatementLocalVarLiveness and it lives in codegenlegacy.cpp
228 #ifndef LEGACY_BACKEND
229 //------------------------------------------------------------------------
230 // fgPerNodeLocalVarLiveness:
231 // Set fgCurMemoryUse and fgCurMemoryDef when memory is read or updated
232 // Call fgMarkUseDef for any Local variables encountered
235 // tree - The current node.
237 void Compiler::fgPerNodeLocalVarLiveness(GenTree* tree)
239 assert(tree != nullptr);
241 switch (tree->gtOper)
245 // We never should encounter a GT_QMARK or GT_COLON node
246 noway_assert(!"unexpected GT_QMARK/GT_COLON");
251 case GT_LCL_VAR_ADDR:
252 case GT_LCL_FLD_ADDR:
253 case GT_STORE_LCL_VAR:
254 case GT_STORE_LCL_FLD:
255 fgMarkUseDef(tree->AsLclVarCommon());
259 // For Volatile indirection, first mutate GcHeap/ByrefExposed.
260 // See comments in ValueNum.cpp (under case GT_CLS_VAR)
261 // This models Volatile reads as def-then-use of memory
262 // and allows for a CSE of a subsequent non-volatile read.
263 if ((tree->gtFlags & GTF_FLD_VOLATILE) != 0)
265 // For any Volatile indirection, we must handle it as a
266 // definition of GcHeap/ByrefExposed
267 fgCurMemoryDef |= memoryKindSet(GcHeap, ByrefExposed);
269 // If the GT_CLS_VAR is the lhs of an assignment, we'll handle it as a GcHeap/ByrefExposed def, when we get
270 // to the assignment.
271 // Otherwise, we treat it as a use here.
272 if ((tree->gtFlags & GTF_CLS_VAR_ASG_LHS) == 0)
274 fgCurMemoryUse |= memoryKindSet(GcHeap, ByrefExposed);
279 // For Volatile indirection, first mutate GcHeap/ByrefExposed
280 // see comments in ValueNum.cpp (under case GT_CLS_VAR)
281 // This models Volatile reads as def-then-use of memory.
282 // and allows for a CSE of a subsequent non-volatile read
283 if ((tree->gtFlags & GTF_IND_VOLATILE) != 0)
285 // For any Volatile indirection, we must handle it as a
286 // definition of the GcHeap/ByrefExposed
287 fgCurMemoryDef |= memoryKindSet(GcHeap, ByrefExposed);
290 // If the GT_IND is the lhs of an assignment, we'll handle it
291 // as a memory def, when we get to assignment.
292 // Otherwise, we treat it as a use here.
293 if ((tree->gtFlags & GTF_IND_ASG_LHS) == 0)
295 GenTreeLclVarCommon* dummyLclVarTree = nullptr;
296 bool dummyIsEntire = false;
297 GenTree* addrArg = tree->gtOp.gtOp1->gtEffectiveVal(/*commaOnly*/ true);
298 if (!addrArg->DefinesLocalAddr(this, /*width doesn't matter*/ 0, &dummyLclVarTree, &dummyIsEntire))
300 fgCurMemoryUse |= memoryKindSet(GcHeap, ByrefExposed);
304 // Defines a local addr
305 assert(dummyLclVarTree != nullptr);
306 fgMarkUseDef(dummyLclVarTree->AsLclVarCommon());
311 // These should have been morphed away to become GT_INDs:
317 // We'll assume these are use-then-defs of memory.
322 fgCurMemoryUse |= memoryKindSet(GcHeap, ByrefExposed);
323 fgCurMemoryDef |= memoryKindSet(GcHeap, ByrefExposed);
324 fgCurMemoryHavoc |= memoryKindSet(GcHeap, ByrefExposed);
327 case GT_MEMORYBARRIER:
328 // Simliar to any Volatile indirection, we must handle this as a definition of GcHeap/ByrefExposed
329 fgCurMemoryDef |= memoryKindSet(GcHeap, ByrefExposed);
332 #ifdef FEATURE_HW_INTRINSICS
335 GenTreeHWIntrinsic* hwIntrinsicNode = tree->AsHWIntrinsic();
337 // We can't call fgMutateGcHeap unless the block has recorded a MemoryDef
339 if (hwIntrinsicNode->OperIsMemoryStore())
341 // We currently handle this like a Volatile store, so it counts as a definition of GcHeap/ByrefExposed
342 fgCurMemoryDef |= memoryKindSet(GcHeap, ByrefExposed);
344 if (hwIntrinsicNode->OperIsMemoryLoad())
346 // This instruction loads from memory and we need to record this information
347 fgCurMemoryUse |= memoryKindSet(GcHeap, ByrefExposed);
353 // For now, all calls read/write GcHeap/ByrefExposed, writes in their entirety. Might tighten this case later.
356 GenTreeCall* call = tree->AsCall();
358 if (call->gtCallType == CT_HELPER)
360 CorInfoHelpFunc helpFunc = eeGetHelperNum(call->gtCallMethHnd);
362 if (!s_helperCallProperties.MutatesHeap(helpFunc) && !s_helperCallProperties.MayRunCctor(helpFunc))
369 fgCurMemoryUse |= memoryKindSet(GcHeap, ByrefExposed);
370 fgCurMemoryDef |= memoryKindSet(GcHeap, ByrefExposed);
371 fgCurMemoryHavoc |= memoryKindSet(GcHeap, ByrefExposed);
375 // If this is a p/invoke unmanaged call or if this is a tail-call
376 // and we have an unmanaged p/invoke call in the method,
377 // then we're going to run the p/invoke epilog.
378 // So we mark the FrameRoot as used by this instruction.
379 // This ensures that the block->bbVarUse will contain
380 // the FrameRoot local var if is it a tracked variable.
382 if ((tree->gtCall.IsUnmanaged() || (tree->gtCall.IsTailCall() && info.compCallUnmanaged)))
384 assert((!opts.ShouldUsePInvokeHelpers()) || (info.compLvFrameListRoot == BAD_VAR_NUM));
385 if (!opts.ShouldUsePInvokeHelpers())
387 /* Get the TCB local and mark it as used */
389 noway_assert(info.compLvFrameListRoot < lvaCount);
391 LclVarDsc* varDsc = &lvaTable[info.compLvFrameListRoot];
393 if (varDsc->lvTracked)
395 if (!VarSetOps::IsMember(this, fgCurDefSet, varDsc->lvVarIndex))
397 VarSetOps::AddElemD(this, fgCurUseSet, varDsc->lvVarIndex);
407 // Determine what memory locations it defines.
408 if (tree->OperIsAssignment() || tree->OperIsBlkOp())
410 GenTreeLclVarCommon* dummyLclVarTree = nullptr;
411 if (tree->DefinesLocal(this, &dummyLclVarTree))
413 if (lvaVarAddrExposed(dummyLclVarTree->gtLclNum))
415 fgCurMemoryDef |= memoryKindSet(ByrefExposed);
417 // We've found a store that modifies ByrefExposed
418 // memory but not GcHeap memory, so track their
419 // states separately.
420 byrefStatesMatchGcHeapStates = false;
425 // If it doesn't define a local, then it might update GcHeap/ByrefExposed.
426 fgCurMemoryDef |= memoryKindSet(GcHeap, ByrefExposed);
433 #endif // !LEGACY_BACKEND
435 /*****************************************************************************/
436 void Compiler::fgPerBlockLocalVarLiveness()
441 printf("*************** In fgPerBlockLocalVarLiveness()\n");
445 unsigned livenessVarEpoch = GetCurLVEpoch();
449 // If we don't require accurate local var lifetimes, things are simple.
450 if (!backendRequiresLocalVarLifetimes())
455 VARSET_TP liveAll(VarSetOps::MakeEmpty(this));
457 /* We simply make everything live everywhere */
459 for (lclNum = 0, varDsc = lvaTable; lclNum < lvaCount; lclNum++, varDsc++)
461 if (varDsc->lvTracked)
463 VarSetOps::AddElemD(this, liveAll, varDsc->lvVarIndex);
467 for (block = fgFirstBB; block; block = block->bbNext)
469 // Strictly speaking, the assignments for the "Def" cases aren't necessary here.
470 // The empty set would do as well. Use means "use-before-def", so as long as that's
471 // "all", this has the right effect.
472 VarSetOps::Assign(this, block->bbVarUse, liveAll);
473 VarSetOps::Assign(this, block->bbVarDef, liveAll);
474 VarSetOps::Assign(this, block->bbLiveIn, liveAll);
475 block->bbMemoryUse = fullMemoryKindSet;
476 block->bbMemoryDef = fullMemoryKindSet;
477 block->bbMemoryLiveIn = fullMemoryKindSet;
478 block->bbMemoryLiveOut = fullMemoryKindSet;
480 switch (block->bbJumpKind)
482 case BBJ_EHFINALLYRET:
485 VarSetOps::AssignNoCopy(this, block->bbLiveOut, VarSetOps::MakeEmpty(this));
488 VarSetOps::Assign(this, block->bbLiveOut, liveAll);
493 // In minopts, we don't explicitly build SSA or value-number; GcHeap and
494 // ByrefExposed implicitly (conservatively) change state at each instr.
495 byrefStatesMatchGcHeapStates = true;
500 // Avoid allocations in the long case.
501 VarSetOps::AssignNoCopy(this, fgCurUseSet, VarSetOps::MakeEmpty(this));
502 VarSetOps::AssignNoCopy(this, fgCurDefSet, VarSetOps::MakeEmpty(this));
504 // GC Heap and ByrefExposed can share states unless we see a def of byref-exposed
505 // memory that is not a GC Heap def.
506 byrefStatesMatchGcHeapStates = true;
508 for (block = fgFirstBB; block; block = block->bbNext)
510 VarSetOps::ClearD(this, fgCurUseSet);
511 VarSetOps::ClearD(this, fgCurDefSet);
513 fgCurMemoryUse = emptyMemoryKindSet;
514 fgCurMemoryDef = emptyMemoryKindSet;
515 fgCurMemoryHavoc = emptyMemoryKindSet;
520 for (GenTreeStmt* stmt = block->FirstNonPhiDef(); stmt; stmt = stmt->gtNextStmt)
524 #ifdef LEGACY_BACKEND
525 GenTree* tree = fgLegacyPerStatementLocalVarLiveness(stmt->gtStmtList, nullptr);
526 assert(tree == nullptr);
527 #else // !LEGACY_BACKEND
528 for (GenTree* node = stmt->gtStmtList; node != nullptr; node = node->gtNext)
530 fgPerNodeLocalVarLiveness(node);
532 #endif // !LEGACY_BACKEND
537 #ifdef LEGACY_BACKEND
539 #else // !LEGACY_BACKEND
540 for (GenTree* node : LIR::AsRange(block).NonPhiNodes())
542 fgPerNodeLocalVarLiveness(node);
544 #endif // !LEGACY_BACKEND
547 /* Get the TCB local and mark it as used */
549 if (block->bbJumpKind == BBJ_RETURN && info.compCallUnmanaged)
551 assert((!opts.ShouldUsePInvokeHelpers()) || (info.compLvFrameListRoot == BAD_VAR_NUM));
552 if (!opts.ShouldUsePInvokeHelpers())
554 noway_assert(info.compLvFrameListRoot < lvaCount);
556 LclVarDsc* varDsc = &lvaTable[info.compLvFrameListRoot];
558 if (varDsc->lvTracked)
560 if (!VarSetOps::IsMember(this, fgCurDefSet, varDsc->lvVarIndex))
562 VarSetOps::AddElemD(this, fgCurUseSet, varDsc->lvVarIndex);
571 VARSET_TP allVars(VarSetOps::Union(this, fgCurUseSet, fgCurDefSet));
572 printf("BB%02u", block->bbNum);
573 printf(" USE(%d)=", VarSetOps::Count(this, fgCurUseSet));
574 lvaDispVarSet(fgCurUseSet, allVars);
575 for (MemoryKind memoryKind : allMemoryKinds())
577 if ((fgCurMemoryUse & memoryKindSet(memoryKind)) != 0)
579 printf(" + %s", memoryKindNames[memoryKind]);
582 printf("\n DEF(%d)=", VarSetOps::Count(this, fgCurDefSet));
583 lvaDispVarSet(fgCurDefSet, allVars);
584 for (MemoryKind memoryKind : allMemoryKinds())
586 if ((fgCurMemoryDef & memoryKindSet(memoryKind)) != 0)
588 printf(" + %s", memoryKindNames[memoryKind]);
590 if ((fgCurMemoryHavoc & memoryKindSet(memoryKind)) != 0)
599 VarSetOps::Assign(this, block->bbVarUse, fgCurUseSet);
600 VarSetOps::Assign(this, block->bbVarDef, fgCurDefSet);
601 block->bbMemoryUse = fgCurMemoryUse;
602 block->bbMemoryDef = fgCurMemoryDef;
603 block->bbMemoryHavoc = fgCurMemoryHavoc;
605 /* also initialize the IN set, just in case we will do multiple DFAs */
607 VarSetOps::AssignNoCopy(this, block->bbLiveIn, VarSetOps::MakeEmpty(this));
608 block->bbMemoryLiveIn = emptyMemoryKindSet;
611 noway_assert(livenessVarEpoch == GetCurLVEpoch());
615 printf("** Memory liveness computed, GcHeap states and ByrefExposed states %s\n",
616 (byrefStatesMatchGcHeapStates ? "match" : "diverge"));
621 // Helper functions to mark variables live over their entire scope
623 void Compiler::fgBeginScopeLife(VARSET_TP* inScope, VarScopeDsc* var)
627 LclVarDsc* lclVarDsc1 = &lvaTable[var->vsdVarNum];
629 if (lclVarDsc1->lvTracked)
631 VarSetOps::AddElemD(this, *inScope, lclVarDsc1->lvVarIndex);
635 void Compiler::fgEndScopeLife(VARSET_TP* inScope, VarScopeDsc* var)
639 LclVarDsc* lclVarDsc1 = &lvaTable[var->vsdVarNum];
641 if (lclVarDsc1->lvTracked)
643 VarSetOps::RemoveElemD(this, *inScope, lclVarDsc1->lvVarIndex);
647 /*****************************************************************************/
649 void Compiler::fgMarkInScope(BasicBlock* block, VARSET_VALARG_TP inScope)
654 printf("Scope info: block BB%02u marking in scope: ", block->bbNum);
655 dumpConvertedVarSet(this, inScope);
660 /* Record which vars are artifically kept alive for debugging */
662 VarSetOps::Assign(this, block->bbScope, inScope);
664 /* Being in scope implies a use of the variable. Add the var to bbVarUse
665 so that redoing fgLiveVarAnalysis() will work correctly */
667 VarSetOps::UnionD(this, block->bbVarUse, inScope);
669 /* Artifically mark all vars in scope as alive */
671 VarSetOps::UnionD(this, block->bbLiveIn, inScope);
672 VarSetOps::UnionD(this, block->bbLiveOut, inScope);
675 void Compiler::fgUnmarkInScope(BasicBlock* block, VARSET_VALARG_TP unmarkScope)
680 printf("Scope info: block BB%02u UNmarking in scope: ", block->bbNum);
681 dumpConvertedVarSet(this, unmarkScope);
686 assert(VarSetOps::IsSubset(this, unmarkScope, block->bbScope));
688 VarSetOps::DiffD(this, block->bbScope, unmarkScope);
689 VarSetOps::DiffD(this, block->bbVarUse, unmarkScope);
690 VarSetOps::DiffD(this, block->bbLiveIn, unmarkScope);
691 VarSetOps::DiffD(this, block->bbLiveOut, unmarkScope);
696 void Compiler::fgDispDebugScopes()
698 printf("\nDebug scopes:\n");
701 for (block = fgFirstBB; block; block = block->bbNext)
703 printf("BB%02u: ", block->bbNum);
704 dumpConvertedVarSet(this, block->bbScope);
711 /*****************************************************************************
713 * Mark variables live across their entire scope.
716 #if FEATURE_EH_FUNCLETS
718 void Compiler::fgExtendDbgScopes()
720 compResetScopeLists();
725 printf("\nMarking vars alive over their entire scope :\n\n");
730 compDispScopeLists();
734 VARSET_TP inScope(VarSetOps::MakeEmpty(this));
736 // Mark all tracked LocalVars live over their scope - walk the blocks
737 // keeping track of the current life, and assign it to the blocks.
739 for (BasicBlock* block = fgFirstBB; block; block = block->bbNext)
741 // If we get to a funclet, reset the scope lists and start again, since the block
742 // offsets will be out of order compared to the previous block.
744 if (block->bbFlags & BBF_FUNCLET_BEG)
746 compResetScopeLists();
747 VarSetOps::ClearD(this, inScope);
750 // Process all scopes up to the current offset
752 if (block->bbCodeOffs != BAD_IL_OFFSET)
754 compProcessScopesUntil(block->bbCodeOffs, &inScope, &Compiler::fgBeginScopeLife, &Compiler::fgEndScopeLife);
757 // Assign the current set of variables that are in scope to the block variables tracking this.
759 fgMarkInScope(block, inScope);
770 #else // !FEATURE_EH_FUNCLETS
772 void Compiler::fgExtendDbgScopes()
774 compResetScopeLists();
779 printf("\nMarking vars alive over their entire scope :\n\n");
780 compDispScopeLists();
784 VARSET_TP inScope(VarSetOps::MakeEmpty(this));
785 compProcessScopesUntil(0, &inScope, &Compiler::fgBeginScopeLife, &Compiler::fgEndScopeLife);
787 IL_OFFSET lastEndOffs = 0;
789 // Mark all tracked LocalVars live over their scope - walk the blocks
790 // keeping track of the current life, and assign it to the blocks.
793 for (block = fgFirstBB; block; block = block->bbNext)
795 // Find scopes becoming alive. If there is a gap in the instr
796 // sequence, we need to process any scopes on those missing offsets.
798 if (block->bbCodeOffs != BAD_IL_OFFSET)
800 if (lastEndOffs != block->bbCodeOffs)
802 noway_assert(lastEndOffs < block->bbCodeOffs);
804 compProcessScopesUntil(block->bbCodeOffs, &inScope, &Compiler::fgBeginScopeLife,
805 &Compiler::fgEndScopeLife);
809 while (VarScopeDsc* varScope = compGetNextEnterScope(block->bbCodeOffs))
811 fgBeginScopeLife(&inScope, varScope);
816 // Assign the current set of variables that are in scope to the block variables tracking this.
818 fgMarkInScope(block, inScope);
820 // Find scopes going dead.
822 if (block->bbCodeOffsEnd != BAD_IL_OFFSET)
824 VarScopeDsc* varScope;
825 while ((varScope = compGetNextExitScope(block->bbCodeOffsEnd)) != nullptr)
827 fgEndScopeLife(&inScope, varScope);
830 lastEndOffs = block->bbCodeOffsEnd;
834 /* Everything should be out of scope by the end of the method. But if the
835 last BB got removed, then inScope may not be empty. */
837 noway_assert(VarSetOps::IsEmpty(this, inScope) || lastEndOffs < info.compILCodeSize);
840 #endif // !FEATURE_EH_FUNCLETS
842 /*****************************************************************************
844 * For debuggable code, we allow redundant assignments to vars
845 * by marking them live over their entire scope.
848 void Compiler::fgExtendDbgLifetimes()
853 printf("*************** In fgExtendDbgLifetimes()\n");
857 noway_assert(opts.compDbgCode && (info.compVarScopesCount > 0));
859 /*-------------------------------------------------------------------------
860 * Extend the lifetimes over the entire reported scope of the variable.
865 /*-------------------------------------------------------------------------
866 * Partly update liveness info so that we handle any funky BBF_INTERNAL
867 * blocks inserted out of sequence.
877 fgLiveVarAnalysis(true);
879 /* For compDbgCode, we prepend an empty BB which will hold the
880 initializations of variables which are in scope at IL offset 0 (but
881 not initialized by the IL code). Since they will currently be
882 marked as live on entry to fgFirstBB, unmark the liveness so that
883 the following code will know to add the initializations. */
885 assert(fgFirstBBisScratch());
887 VARSET_TP trackedArgs(VarSetOps::MakeEmpty(this));
889 for (unsigned argNum = 0; argNum < info.compArgsCount; argNum++)
891 LclVarDsc* argDsc = lvaTable + argNum;
892 if (argDsc->lvPromoted)
894 lvaPromotionType promotionType = lvaGetPromotionType(argDsc);
896 if (promotionType == PROMOTION_TYPE_INDEPENDENT)
898 noway_assert(argDsc->lvFieldCnt == 1); // We only handle one field here
900 unsigned fieldVarNum = argDsc->lvFieldLclStart;
901 argDsc = lvaTable + fieldVarNum;
904 noway_assert(argDsc->lvIsParam);
905 if (argDsc->lvTracked)
907 noway_assert(!VarSetOps::IsMember(this, trackedArgs, argDsc->lvVarIndex)); // Each arg should define a
909 VarSetOps::AddElemD(this, trackedArgs, argDsc->lvVarIndex);
913 // Don't unmark struct locals, either.
914 VARSET_TP noUnmarkVars(trackedArgs);
916 for (unsigned i = 0; i < lvaCount; i++)
918 LclVarDsc* varDsc = &lvaTable[i];
919 if (varTypeIsStruct(varDsc) && varDsc->lvTracked)
921 VarSetOps::AddElemD(this, noUnmarkVars, varDsc->lvVarIndex);
924 fgUnmarkInScope(fgFirstBB, VarSetOps::Diff(this, fgFirstBB->bbScope, noUnmarkVars));
926 /*-------------------------------------------------------------------------
927 * As we keep variables artifically alive over their entire scope,
928 * we need to also artificially initialize them if the scope does
929 * not exactly match the real lifetimes, or they will contain
930 * garbage until they are initialized by the IL code.
933 VARSET_TP initVars(VarSetOps::MakeEmpty(this)); // Vars which are artificially made alive
935 for (BasicBlock* block = fgFirstBB; block; block = block->bbNext)
937 VarSetOps::ClearD(this, initVars);
939 switch (block->bbJumpKind)
942 PREFIX_ASSUME(block->bbNext != nullptr);
943 VarSetOps::UnionD(this, initVars, block->bbNext->bbScope);
948 case BBJ_EHFILTERRET:
949 VarSetOps::UnionD(this, initVars, block->bbJumpDest->bbScope);
952 case BBJ_CALLFINALLY:
953 if (!(block->bbFlags & BBF_RETLESS_CALL))
955 assert(block->isBBCallAlwaysPair());
956 PREFIX_ASSUME(block->bbNext != nullptr);
957 VarSetOps::UnionD(this, initVars, block->bbNext->bbScope);
959 VarSetOps::UnionD(this, initVars, block->bbJumpDest->bbScope);
963 PREFIX_ASSUME(block->bbNext != nullptr);
964 VarSetOps::UnionD(this, initVars, block->bbNext->bbScope);
965 VarSetOps::UnionD(this, initVars, block->bbJumpDest->bbScope);
973 jmpCnt = block->bbJumpSwt->bbsCount;
974 jmpTab = block->bbJumpSwt->bbsDstTab;
978 VarSetOps::UnionD(this, initVars, (*jmpTab)->bbScope);
979 } while (++jmpTab, --jmpCnt);
983 case BBJ_EHFINALLYRET:
988 /* We don't have to do anything as we mark
989 * all vars live on entry to a catch handler as
995 noway_assert(!"Unexpected bbJumpKind");
999 /* If the var is already live on entry to the current BB,
1000 we would have already initialized it. So ignore bbLiveIn */
1002 VarSetOps::DiffD(this, initVars, block->bbLiveIn);
1004 /* Add statements initializing the vars, if there are any to initialize */
1005 unsigned blockWeight = block->getBBWeight(this);
1007 VarSetOps::Iter iter(this, initVars);
1008 unsigned varIndex = 0;
1009 while (iter.NextElem(&varIndex))
1011 /* Create initialization tree */
1013 unsigned varNum = lvaTrackedToVarNum[varIndex];
1014 LclVarDsc* varDsc = &lvaTable[varNum];
1015 var_types type = varDsc->TypeGet();
1017 // Don't extend struct lifetimes -- they aren't enregistered, anyway.
1018 if (type == TYP_STRUCT)
1023 // If we haven't already done this ...
1024 if (!fgLocalVarLivenessDone)
1026 // Create a "zero" node
1027 GenTree* zero = gtNewZeroConNode(genActualType(type));
1029 // Create initialization node
1030 if (!block->IsLIR())
1032 GenTree* varNode = gtNewLclvNode(varNum, type);
1033 GenTree* initNode = gtNewAssignNode(varNode, zero);
1035 // Create a statement for the initializer, sequence it, and append it to the current BB.
1036 GenTree* initStmt = gtNewStmt(initNode);
1037 gtSetStmtInfo(initStmt);
1038 fgSetStmtSeq(initStmt);
1039 fgInsertStmtNearEnd(block, initStmt);
1044 new (this, GT_STORE_LCL_VAR) GenTreeLclVar(GT_STORE_LCL_VAR, type, varNum, BAD_IL_OFFSET);
1045 store->gtOp.gtOp1 = zero;
1046 store->gtFlags |= (GTF_VAR_DEF | GTF_ASG);
1048 LIR::Range initRange = LIR::EmptyRange();
1049 initRange.InsertBefore(nullptr, zero, store);
1051 #ifndef LEGACY_BACKEND
1052 #if !defined(_TARGET_64BIT_)
1053 DecomposeLongs::DecomposeRange(this, blockWeight, initRange);
1054 #endif // !defined(_TARGET_64BIT_)
1055 m_pLowering->LowerRange(block, initRange);
1056 #endif // !LEGACY_BACKEND
1058 // Naively inserting the initializer at the end of the block may add code after the block's
1059 // terminator, in which case the inserted code will never be executed (and the IR for the
1060 // block will be invalid). Use `LIR::InsertBeforeTerminator` to avoid this problem.
1061 LIR::InsertBeforeTerminator(block, std::move(initRange));
1067 printf("Created zero-init of V%02u in BB%02u\n", varNum, block->bbNum);
1071 varDsc->incRefCnts(block->getBBWeight(this), this);
1073 block->bbFlags |= BBF_CHANGED; // indicates that the contents of the block have changed.
1076 /* Update liveness information so that redoing fgLiveVarAnalysis()
1077 will work correctly if needed */
1079 VarSetOps::AddElemD(this, block->bbVarDef, varIndex);
1080 VarSetOps::AddElemD(this, block->bbLiveOut, varIndex);
1084 // raMarkStkVars() reserves stack space for unused variables (which
1085 // needs to be initialized). However, arguments don't need to be initialized.
1086 // So just ensure that they don't have a 0 ref cnt
1088 unsigned lclNum = 0;
1089 for (LclVarDsc *varDsc = lvaTable; lclNum < lvaCount; lclNum++, varDsc++)
1091 if (varDsc->lvRefCnt == 0 && varDsc->lvIsRegArg)
1093 varDsc->lvRefCnt = 1;
1100 printf("\nBB liveness after fgExtendDbgLifetimes():\n\n");
1107 VARSET_VALRET_TP Compiler::fgGetHandlerLiveVars(BasicBlock* block)
1109 noway_assert(block);
1110 noway_assert(ehBlockHasExnFlowDsc(block));
1112 VARSET_TP liveVars(VarSetOps::MakeEmpty(this));
1113 EHblkDsc* HBtab = ehGetBlockExnFlowDsc(block);
1117 /* Either we enter the filter first or the catch/finally */
1119 if (HBtab->HasFilter())
1121 VarSetOps::UnionD(this, liveVars, HBtab->ebdFilter->bbLiveIn);
1122 #if FEATURE_EH_FUNCLETS
1123 // The EH subsystem can trigger a stack walk after the filter
1124 // has returned, but before invoking the handler, and the only
1125 // IP address reported from this method will be the original
1126 // faulting instruction, thus everything in the try body
1127 // must report as live any variables live-out of the filter
1128 // (which is the same as those live-in to the handler)
1129 VarSetOps::UnionD(this, liveVars, HBtab->ebdHndBeg->bbLiveIn);
1130 #endif // FEATURE_EH_FUNCLETS
1134 VarSetOps::UnionD(this, liveVars, HBtab->ebdHndBeg->bbLiveIn);
1137 /* If we have nested try's edbEnclosing will provide them */
1138 noway_assert((HBtab->ebdEnclosingTryIndex == EHblkDsc::NO_ENCLOSING_INDEX) ||
1139 (HBtab->ebdEnclosingTryIndex > ehGetIndex(HBtab)));
1141 unsigned outerIndex = HBtab->ebdEnclosingTryIndex;
1142 if (outerIndex == EHblkDsc::NO_ENCLOSING_INDEX)
1146 HBtab = ehGetDsc(outerIndex);
1153 class LiveVarAnalysis
1155 Compiler* m_compiler;
1157 bool m_hasPossibleBackEdge;
1159 unsigned m_memoryLiveIn;
1160 unsigned m_memoryLiveOut;
1162 VARSET_TP m_liveOut;
1164 LiveVarAnalysis(Compiler* compiler)
1165 : m_compiler(compiler)
1166 , m_hasPossibleBackEdge(false)
1167 , m_memoryLiveIn(emptyMemoryKindSet)
1168 , m_memoryLiveOut(emptyMemoryKindSet)
1169 , m_liveIn(VarSetOps::MakeEmpty(compiler))
1170 , m_liveOut(VarSetOps::MakeEmpty(compiler))
1174 bool PerBlockAnalysis(BasicBlock* block, bool updateInternalOnly, bool keepAliveThis)
1176 /* Compute the 'liveOut' set */
1177 VarSetOps::ClearD(m_compiler, m_liveOut);
1178 m_memoryLiveOut = emptyMemoryKindSet;
1179 if (block->endsWithJmpMethod(m_compiler))
1181 // A JMP uses all the arguments, so mark them all
1182 // as live at the JMP instruction
1184 const LclVarDsc* varDscEndParams = m_compiler->lvaTable + m_compiler->info.compArgsCount;
1185 for (LclVarDsc* varDsc = m_compiler->lvaTable; varDsc < varDscEndParams; varDsc++)
1187 noway_assert(!varDsc->lvPromoted);
1188 if (varDsc->lvTracked)
1190 VarSetOps::AddElemD(m_compiler, m_liveOut, varDsc->lvVarIndex);
1195 // Additionally, union in all the live-in tracked vars of successors.
1196 for (BasicBlock* succ : block->GetAllSuccs(m_compiler))
1198 VarSetOps::UnionD(m_compiler, m_liveOut, succ->bbLiveIn);
1199 m_memoryLiveOut |= succ->bbMemoryLiveIn;
1200 if (succ->bbNum <= block->bbNum)
1202 m_hasPossibleBackEdge = true;
1206 /* For lvaKeepAliveAndReportThis methods, "this" has to be kept alive everywhere
1207 Note that a function may end in a throw on an infinite loop (as opposed to a return).
1208 "this" has to be alive everywhere even in such methods. */
1212 VarSetOps::AddElemD(m_compiler, m_liveOut, m_compiler->lvaTable[m_compiler->info.compThisArg].lvVarIndex);
1215 /* Compute the 'm_liveIn' set */
1216 VarSetOps::LivenessD(m_compiler, m_liveIn, block->bbVarDef, block->bbVarUse, m_liveOut);
1218 // Even if block->bbMemoryDef is set, we must assume that it doesn't kill memory liveness from m_memoryLiveOut,
1219 // since (without proof otherwise) the use and def may touch different memory at run-time.
1220 m_memoryLiveIn = m_memoryLiveOut | block->bbMemoryUse;
1222 /* Can exceptions from this block be handled (in this function)? */
1224 if (m_compiler->ehBlockHasExnFlowDsc(block))
1226 const VARSET_TP& liveVars(m_compiler->fgGetHandlerLiveVars(block));
1228 VarSetOps::UnionD(m_compiler, m_liveIn, liveVars);
1229 VarSetOps::UnionD(m_compiler, m_liveOut, liveVars);
1232 /* Has there been any change in either live set? */
1234 bool liveInChanged = !VarSetOps::Equal(m_compiler, block->bbLiveIn, m_liveIn);
1235 if (liveInChanged || !VarSetOps::Equal(m_compiler, block->bbLiveOut, m_liveOut))
1237 if (updateInternalOnly)
1239 // Only "extend" liveness over BBF_INTERNAL blocks
1241 noway_assert(block->bbFlags & BBF_INTERNAL);
1243 liveInChanged = !VarSetOps::IsSubset(m_compiler, m_liveIn, block->bbLiveIn);
1244 if (liveInChanged || !VarSetOps::IsSubset(m_compiler, m_liveOut, block->bbLiveOut))
1247 if (m_compiler->verbose)
1249 printf("Scope info: block BB%02u LiveIn+ ", block->bbNum);
1250 dumpConvertedVarSet(m_compiler, VarSetOps::Diff(m_compiler, m_liveIn, block->bbLiveIn));
1251 printf(", LiveOut+ ");
1252 dumpConvertedVarSet(m_compiler, VarSetOps::Diff(m_compiler, m_liveOut, block->bbLiveOut));
1257 VarSetOps::UnionD(m_compiler, block->bbLiveIn, m_liveIn);
1258 VarSetOps::UnionD(m_compiler, block->bbLiveOut, m_liveOut);
1263 VarSetOps::Assign(m_compiler, block->bbLiveIn, m_liveIn);
1264 VarSetOps::Assign(m_compiler, block->bbLiveOut, m_liveOut);
1268 const bool memoryLiveInChanged = (block->bbMemoryLiveIn != m_memoryLiveIn);
1269 if (memoryLiveInChanged || (block->bbMemoryLiveOut != m_memoryLiveOut))
1271 block->bbMemoryLiveIn = m_memoryLiveIn;
1272 block->bbMemoryLiveOut = m_memoryLiveOut;
1275 return liveInChanged || memoryLiveInChanged;
1278 void Run(bool updateInternalOnly)
1280 const bool keepAliveThis =
1281 m_compiler->lvaKeepAliveAndReportThis() && m_compiler->lvaTable[m_compiler->info.compThisArg].lvTracked;
1283 /* Live Variable Analysis - Backward dataflow */
1289 /* Visit all blocks and compute new data flow values */
1291 VarSetOps::ClearD(m_compiler, m_liveIn);
1292 VarSetOps::ClearD(m_compiler, m_liveOut);
1294 m_memoryLiveIn = emptyMemoryKindSet;
1295 m_memoryLiveOut = emptyMemoryKindSet;
1297 for (BasicBlock* block = m_compiler->fgLastBB; block; block = block->bbPrev)
1299 // sometimes block numbers are not monotonically increasing which
1300 // would cause us not to identify backedges
1301 if (block->bbNext && block->bbNext->bbNum <= block->bbNum)
1303 m_hasPossibleBackEdge = true;
1306 if (updateInternalOnly)
1308 /* Only update BBF_INTERNAL blocks as they may be
1309 syntactically out of sequence. */
1311 noway_assert(m_compiler->opts.compDbgCode && (m_compiler->info.compVarScopesCount > 0));
1313 if (!(block->bbFlags & BBF_INTERNAL))
1319 if (PerBlockAnalysis(block, updateInternalOnly, keepAliveThis))
1324 // if there is no way we could have processed a block without seeing all of its predecessors
1325 // then there is no need to iterate
1326 if (!m_hasPossibleBackEdge)
1334 static void Run(Compiler* compiler, bool updateInternalOnly)
1336 LiveVarAnalysis analysis(compiler);
1337 analysis.Run(updateInternalOnly);
1341 /*****************************************************************************
1343 * This is the classic algorithm for Live Variable Analysis.
1344 * If updateInternalOnly==true, only update BBF_INTERNAL blocks.
1347 void Compiler::fgLiveVarAnalysis(bool updateInternalOnly)
1349 if (!backendRequiresLocalVarLifetimes())
1354 LiveVarAnalysis::Run(this, updateInternalOnly);
1357 if (verbose && !updateInternalOnly)
1359 printf("\nBB liveness after fgLiveVarAnalysis():\n\n");
1365 //------------------------------------------------------------------------
1366 // Compiler::fgMarkIntf:
1367 // Mark any variables in varSet1 as interfering with any variables
1368 // specified in varSet2.
1370 // We ensure that the interference graph is reflective: if T_x
1371 // interferes with T_y, then T_y interferes with T_x.
1373 // Note that this function is a no-op when targeting the RyuJIT
1374 // backend, as it does not require the interference graph.
1377 // varSet1 - The first set of variables.
1378 // varSet2 - The second set of variables.
1381 // True if any new interferences were recorded; false otherwise.
1383 bool Compiler::fgMarkIntf(VARSET_VALARG_TP varSet1, VARSET_VALARG_TP varSet2)
1385 #ifdef LEGACY_BACKEND
1386 /* If either set has no bits set (or we are not optimizing), take an early out */
1387 if (opts.MinOpts() || VarSetOps::IsEmpty(this, varSet2) || VarSetOps::IsEmpty(this, varSet1))
1392 bool addedIntf = false; // This is set to true if we add any new interferences
1394 VarSetOps::Assign(this, fgMarkIntfUnionVS, varSet1);
1395 VarSetOps::UnionD(this, fgMarkIntfUnionVS, varSet2);
1397 VarSetOps::Iter iter(this, fgMarkIntfUnionVS);
1398 unsigned refIndex = 0;
1399 while (iter.NextElem(&refIndex))
1401 // if varSet1 has this bit set then it interferes with varSet2
1402 if (VarSetOps::IsMember(this, varSet1, refIndex))
1404 // Calculate the set of new interference to add
1405 VARSET_TP newIntf(VarSetOps::Diff(this, varSet2, lvaVarIntf[refIndex]));
1406 if (!VarSetOps::IsEmpty(this, newIntf))
1409 VarSetOps::UnionD(this, lvaVarIntf[refIndex], newIntf);
1413 // if varSet2 has this bit set then it interferes with varSet1
1414 if (VarSetOps::IsMember(this, varSet2, refIndex))
1416 // Calculate the set of new interference to add
1417 VARSET_TP newIntf(VarSetOps::Diff(this, varSet1, lvaVarIntf[refIndex]));
1418 if (!VarSetOps::IsEmpty(this, newIntf))
1421 VarSetOps::UnionD(this, lvaVarIntf[refIndex], newIntf);
1432 //------------------------------------------------------------------------
1433 // Compiler::fgMarkIntf:
1434 // Mark any variables in varSet1 as interfering with the variable
1435 // specified by varIndex.
1437 // We ensure that the interference graph is reflective: if T_x
1438 // interferes with T_y, then T_y interferes with T_x.
1440 // Note that this function is a no-op when targeting the RyuJIT
1441 // backend, as it does not require the interference graph.
1444 // varSet1 - The first set of variables.
1445 // varIndex - The second variable.
1448 // True if any new interferences were recorded; false otherwise.
1450 bool Compiler::fgMarkIntf(VARSET_VALARG_TP varSet, unsigned varIndex)
1452 #ifdef LEGACY_BACKEND
1453 // If the input set has no bits set (or we are not optimizing), take an early out
1454 if (opts.MinOpts() || VarSetOps::IsEmpty(this, varSet))
1459 bool addedIntf = false; // This is set to true if we add any new interferences
1461 VarSetOps::Assign(this, fgMarkIntfUnionVS, varSet);
1462 VarSetOps::AddElemD(this, fgMarkIntfUnionVS, varIndex);
1464 VarSetOps::Iter iter(this, fgMarkIntfUnionVS);
1465 unsigned refIndex = 0;
1466 while (iter.NextElem(&refIndex))
1468 // if varSet has this bit set then it interferes with varIndex
1469 if (VarSetOps::IsMember(this, varSet, refIndex))
1471 // Calculate the set of new interference to add
1472 if (!VarSetOps::IsMember(this, lvaVarIntf[refIndex], varIndex))
1475 VarSetOps::AddElemD(this, lvaVarIntf[refIndex], varIndex);
1479 // if this bit is the same as varIndex then it interferes with varSet1
1480 if (refIndex == varIndex)
1482 // Calculate the set of new interference to add
1483 VARSET_TP newIntf(VarSetOps::Diff(this, varSet, lvaVarIntf[refIndex]));
1484 if (!VarSetOps::IsEmpty(this, newIntf))
1487 VarSetOps::UnionD(this, lvaVarIntf[refIndex], newIntf);
1498 /*****************************************************************************
1500 * Mark any variables in varSet as interfering with each other,
1501 * This is a specialized version of the above, when both args are the same
1502 * We ensure that the interference graph is reflective:
1503 * (if T11 interferes with T16, then T16 interferes with T11)
1504 * This function returns true if any new interferences were added
1505 * and returns false if no new interference were added
1508 bool Compiler::fgMarkIntf(VARSET_VALARG_TP varSet)
1510 #ifdef LEGACY_BACKEND
1511 /* No bits set or we are not optimizing, take an early out */
1512 if (VarSetOps::IsEmpty(this, varSet) || opts.MinOpts())
1515 bool addedIntf = false; // This is set to true if we add any new interferences
1517 VarSetOps::Iter iter(this, varSet);
1518 unsigned refIndex = 0;
1519 while (iter.NextElem(&refIndex))
1521 // Calculate the set of new interference to add
1522 VARSET_TP newIntf(VarSetOps::Diff(this, varSet, lvaVarIntf[refIndex]));
1523 if (!VarSetOps::IsEmpty(this, newIntf))
1526 VarSetOps::UnionD(this, lvaVarIntf[refIndex], newIntf);
1531 #else // !LEGACY_BACKEND
1533 #endif // !LEGACY_BACKEND
1536 /*****************************************************************************
1537 * For updating liveset during traversal AFTER fgComputeLife has completed
1540 VARSET_VALRET_TP Compiler::fgUpdateLiveSet(VARSET_VALARG_TP liveSet, GenTree* tree)
1542 VARSET_TP newLiveSet(VarSetOps::MakeCopy(this, liveSet));
1543 assert(fgLocalVarLivenessDone == true);
1544 GenTree* lclVarTree = tree; // After the tests below, "lclVarTree" will be the local variable.
1545 if (tree->gtOper == GT_LCL_VAR || tree->gtOper == GT_LCL_FLD || tree->gtOper == GT_REG_VAR ||
1546 (lclVarTree = fgIsIndirOfAddrOfLocal(tree)) != nullptr)
1548 const VARSET_TP& varBits(fgGetVarBits(lclVarTree));
1550 if (!VarSetOps::IsEmpty(this, varBits))
1552 if (tree->gtFlags & GTF_VAR_DEATH)
1554 // We'd like to be able to assert the following, however if we are walking
1555 // through a qmark/colon tree, we may encounter multiple last-use nodes.
1556 // assert (VarSetOps::IsSubset(this, varBits, newLiveSet));
1558 // We maintain the invariant that if the lclVarTree is a promoted struct, but the
1559 // the lookup fails, then all the field vars (i.e., "varBits") are dying.
1560 VARSET_TP* deadVarBits = nullptr;
1561 if (varTypeIsStruct(lclVarTree) && GetPromotedStructDeathVars()->Lookup(lclVarTree, &deadVarBits))
1563 VarSetOps::DiffD(this, newLiveSet, *deadVarBits);
1567 VarSetOps::DiffD(this, newLiveSet, varBits);
1570 else if ((tree->gtFlags & GTF_VAR_DEF) != 0 && (tree->gtFlags & GTF_VAR_USEASG) == 0)
1572 assert(tree == lclVarTree); // LDOBJ case should only be a use.
1574 // This shouldn't be in newLiveSet, unless this is debug code, in which
1575 // case we keep vars live everywhere, OR it is address-exposed, OR this block
1576 // is part of a try block, in which case it may be live at the handler
1577 // Could add a check that, if it's in the newLiveSet, that it's also in
1578 // fgGetHandlerLiveVars(compCurBB), but seems excessive
1580 assert(VarSetOps::IsEmptyIntersection(this, newLiveSet, varBits) || opts.compDbgCode ||
1581 lvaTable[tree->gtLclVarCommon.gtLclNum].lvAddrExposed ||
1582 (compCurBB != nullptr && ehBlockHasExnFlowDsc(compCurBB)));
1583 VarSetOps::UnionD(this, newLiveSet, varBits);
1590 //------------------------------------------------------------------------
1591 // Compiler::fgComputeLifeCall: compute the changes to local var liveness
1592 // due to a GT_CALL node.
1595 // life - The live set that is being computed.
1596 // call - The call node in question.
1598 void Compiler::fgComputeLifeCall(VARSET_TP& life, GenTreeCall* call)
1600 assert(call != nullptr);
1602 // If this is a tail-call and we have any unmanaged p/invoke calls in
1603 // the method then we're going to run the p/invoke epilog
1604 // So we mark the FrameRoot as used by this instruction.
1605 // This ensure that this variable is kept alive at the tail-call
1606 if (call->IsTailCall() && info.compCallUnmanaged)
1608 assert((!opts.ShouldUsePInvokeHelpers()) || (info.compLvFrameListRoot == BAD_VAR_NUM));
1609 if (!opts.ShouldUsePInvokeHelpers())
1611 /* Get the TCB local and make it live */
1613 noway_assert(info.compLvFrameListRoot < lvaCount);
1615 LclVarDsc* frameVarDsc = &lvaTable[info.compLvFrameListRoot];
1617 if (frameVarDsc->lvTracked)
1619 VarSetOps::AddElemD(this, life, frameVarDsc->lvVarIndex);
1621 // Record interference with other live variables
1622 fgMarkIntf(life, frameVarDsc->lvVarIndex);
1627 // TODO: we should generate the code for saving to/restoring
1628 // from the inlined N/Direct frame instead.
1630 /* Is this call to unmanaged code? */
1631 if (call->IsUnmanaged())
1633 /* Get the TCB local and make it live */
1634 assert((!opts.ShouldUsePInvokeHelpers()) || (info.compLvFrameListRoot == BAD_VAR_NUM));
1635 if (!opts.ShouldUsePInvokeHelpers())
1637 noway_assert(info.compLvFrameListRoot < lvaCount);
1639 LclVarDsc* frameVarDsc = &lvaTable[info.compLvFrameListRoot];
1641 if (frameVarDsc->lvTracked)
1643 unsigned varIndex = frameVarDsc->lvVarIndex;
1644 noway_assert(varIndex < lvaTrackedCount);
1646 // Is the variable already known to be alive?
1648 if (VarSetOps::IsMember(this, life, varIndex))
1650 // Since we may call this multiple times, clear the GTF_CALL_M_FRAME_VAR_DEATH if set.
1652 call->gtCallMoreFlags &= ~GTF_CALL_M_FRAME_VAR_DEATH;
1656 // The variable is just coming to life
1657 // Since this is a backwards walk of the trees
1658 // that makes this change in liveness a 'last-use'
1660 VarSetOps::AddElemD(this, life, varIndex);
1661 call->gtCallMoreFlags |= GTF_CALL_M_FRAME_VAR_DEATH;
1664 // Record an interference with the other live variables
1665 fgMarkIntf(life, varIndex);
1669 #ifdef LEGACY_BACKEND
1670 /* Do we have any live variables? */
1671 if (!VarSetOps::IsEmpty(this, life))
1673 // For each live variable if it is a GC-ref type, mark it volatile to prevent if from being enregistered
1674 // across the unmanaged call.
1676 // Note that this is not necessary when targeting the RyuJIT backend, as its RA handles these kills itself.
1680 for (lclNum = 0, varDsc = lvaTable; lclNum < lvaCount; lclNum++, varDsc++)
1682 /* Ignore the variable if it's not tracked */
1684 if (!varDsc->lvTracked)
1689 unsigned varNum = varDsc->lvVarIndex;
1691 /* Ignore the variable if it's not live here */
1693 if (!VarSetOps::IsMember(this, life, varDsc->lvVarIndex))
1698 // If it is a GC-ref type then mark it DoNotEnregister.
1699 if (varTypeIsGC(varDsc->TypeGet()))
1701 lvaSetVarDoNotEnregister(lclNum DEBUGARG(DNER_LiveAcrossUnmanagedCall));
1705 #endif // LEGACY_BACKEND
1709 //------------------------------------------------------------------------
1710 // Compiler::fgComputeLifeTrackedLocalUse:
1711 // Compute the changes to local var liveness due to a use of a tracked local var.
1714 // life - The live set that is being computed.
1715 // varDsc - The LclVar descriptor for the variable being used or defined.
1716 // node - The node that is defining the lclVar.
1717 void Compiler::fgComputeLifeTrackedLocalUse(VARSET_TP& life, LclVarDsc& varDsc, GenTreeLclVarCommon* node)
1719 assert(node != nullptr);
1720 assert((node->gtFlags & GTF_VAR_DEF) == 0);
1721 assert(varDsc.lvTracked);
1723 const unsigned varIndex = varDsc.lvVarIndex;
1725 // Is the variable already known to be alive?
1726 if (VarSetOps::IsMember(this, life, varIndex))
1728 // Since we may do liveness analysis multiple times, clear the GTF_VAR_DEATH if set.
1729 node->gtFlags &= ~GTF_VAR_DEATH;
1736 printf("Ref V%02u,T%02u] at ", node->gtLclNum, varIndex);
1738 printf(" life %s -> %s\n", VarSetOps::ToString(this, life),
1739 VarSetOps::ToString(this, VarSetOps::AddElem(this, life, varIndex)));
1743 // The variable is being used, and it is not currently live.
1744 // So the variable is just coming to life
1745 node->gtFlags |= GTF_VAR_DEATH;
1746 VarSetOps::AddElemD(this, life, varIndex);
1748 // Record interference with other live variables
1749 fgMarkIntf(life, varIndex);
1752 //------------------------------------------------------------------------
1753 // Compiler::fgComputeLifeTrackedLocalDef:
1754 // Compute the changes to local var liveness due to a def of a tracked local var and return `true` if the def is a
1758 // life - The live set that is being computed.
1759 // keepAliveVars - The current set of variables to keep alive regardless of their actual lifetime.
1760 // varDsc - The LclVar descriptor for the variable being used or defined.
1761 // node - The node that is defining the lclVar.
1764 // `true` if the def is a dead store; `false` otherwise.
1765 bool Compiler::fgComputeLifeTrackedLocalDef(VARSET_TP& life,
1766 VARSET_VALARG_TP keepAliveVars,
1768 GenTreeLclVarCommon* node)
1770 assert(node != nullptr);
1771 assert((node->gtFlags & GTF_VAR_DEF) != 0);
1772 assert(varDsc.lvTracked);
1774 const unsigned varIndex = varDsc.lvVarIndex;
1775 if (VarSetOps::IsMember(this, life, varIndex))
1777 // The variable is live
1778 if ((node->gtFlags & GTF_VAR_USEASG) == 0)
1780 // Remove the variable from the live set if it is not in the keepalive set.
1781 if (!VarSetOps::IsMember(this, keepAliveVars, varIndex))
1783 VarSetOps::RemoveElemD(this, life, varIndex);
1788 printf("Def V%02u,T%02u at ", node->gtLclNum, varIndex);
1790 printf(" life %s -> %s\n",
1791 VarSetOps::ToString(this,
1792 VarSetOps::Union(this, life, VarSetOps::MakeSingleton(this, varIndex))),
1793 VarSetOps::ToString(this, life));
1801 node->gtFlags |= GTF_VAR_DEATH;
1803 if (!opts.MinOpts())
1805 // keepAliveVars always stay alive
1806 noway_assert(!VarSetOps::IsMember(this, keepAliveVars, varIndex));
1808 // Do not consider this store dead if the target local variable represents
1809 // a promoted struct field of an address exposed local or if the address
1810 // of the variable has been exposed. Improved alias analysis could allow
1811 // stores to these sorts of variables to be removed at the cost of compile
1813 return !varDsc.lvAddrExposed && !(varDsc.lvIsStructField && lvaTable[varDsc.lvParentLcl].lvAddrExposed);
1820 //------------------------------------------------------------------------
1821 // Compiler::fgComputeLifeUntrackedLocal:
1822 // Compute the changes to local var liveness due to a use or a def of an untracked local var.
1825 // It may seem a bit counter-intuitive that a change to an untracked lclVar could affect the liveness of tracked
1826 // lclVars. In theory, this could happen with promoted (especially dependently-promoted) structs: in these cases,
1827 // a use or def of the untracked struct var is treated as a use or def of any of its component fields that are
1831 // life - The live set that is being computed.
1832 // keepAliveVars - The current set of variables to keep alive regardless of their actual lifetime.
1833 // varDsc - The LclVar descriptor for the variable being used or defined.
1834 // lclVarNode - The node that corresponds to the local var def or use. Only differs from `node` when targeting
1835 // the legacy backend.
1836 // node - The actual tree node being processed.
1837 void Compiler::fgComputeLifeUntrackedLocal(
1838 VARSET_TP& life, VARSET_VALARG_TP keepAliveVars, LclVarDsc& varDsc, GenTreeLclVarCommon* lclVarNode, GenTree* node)
1840 assert(lclVarNode != nullptr);
1841 assert(node != nullptr);
1843 if (!varTypeIsStruct(varDsc.lvType) || (lvaGetPromotionType(&varDsc) == PROMOTION_TYPE_NONE))
1848 VARSET_TP varBit(VarSetOps::MakeEmpty(this));
1850 for (unsigned i = varDsc.lvFieldLclStart; i < varDsc.lvFieldLclStart + varDsc.lvFieldCnt; ++i)
1852 #if !defined(_TARGET_64BIT_) && !defined(LEGACY_BACKEND)
1853 if (!varTypeIsLong(lvaTable[i].lvType) || !lvaTable[i].lvPromoted)
1854 #endif // !defined(_TARGET_64BIT_) && !defined(LEGACY_BACKEND)
1856 noway_assert(lvaTable[i].lvIsStructField);
1858 if (lvaTable[i].lvTracked)
1860 const unsigned varIndex = lvaTable[i].lvVarIndex;
1861 noway_assert(varIndex < lvaTrackedCount);
1862 VarSetOps::AddElemD(this, varBit, varIndex);
1865 if (node->gtFlags & GTF_VAR_DEF)
1867 VarSetOps::DiffD(this, varBit, keepAliveVars);
1868 VarSetOps::DiffD(this, life, varBit);
1873 // Are the variables already known to be alive?
1874 if (VarSetOps::IsSubset(this, varBit, life))
1876 node->gtFlags &= ~GTF_VAR_DEATH; // Since we may now call this multiple times, reset if live.
1880 // Some variables are being used, and they are not currently live.
1881 // So they are just coming to life, in the backwards traversal; in a forwards
1882 // traversal, one or more are dying. Mark this.
1884 node->gtFlags |= GTF_VAR_DEATH;
1886 // Are all the variables becoming alive (in the backwards traversal), or just a subset?
1887 if (!VarSetOps::IsEmptyIntersection(this, varBit, life))
1889 // Only a subset of the variables are become live; we must record that subset.
1890 // (Lack of an entry for "lclVarNode" will be considered to imply all become dead in the
1891 // forward traversal.)
1892 VARSET_TP* deadVarSet = new (this, CMK_bitset) VARSET_TP;
1893 VarSetOps::AssignNoCopy(this, *deadVarSet, VarSetOps::Diff(this, varBit, life));
1894 GetPromotedStructDeathVars()->Set(lclVarNode, deadVarSet);
1897 // In any case, all the field vars are now live (in the backwards traversal).
1898 VarSetOps::UnionD(this, life, varBit);
1900 // Record interference with other live variables
1901 fgMarkIntf(life, varBit);
1904 //------------------------------------------------------------------------
1905 // Compiler::fgComputeLifeLocal:
1906 // Compute the changes to local var liveness due to a use or a def of a local var and indicates whether the use/def
1910 // life - The live set that is being computed.
1911 // keepAliveVars - The current set of variables to keep alive regardless of their actual lifetime.
1912 // lclVarNode - The node that corresponds to the local var def or use. Only differs from `node` when targeting
1913 // the legacy backend.
1914 // node - The actual tree node being processed.
1917 // `true` if the local var node corresponds to a dead store; `false` otherwise.
1918 bool Compiler::fgComputeLifeLocal(VARSET_TP& life, VARSET_VALARG_TP keepAliveVars, GenTree* lclVarNode, GenTree* node)
1920 unsigned lclNum = lclVarNode->gtLclVarCommon.gtLclNum;
1922 assert(lclNum < lvaCount);
1923 LclVarDsc& varDsc = lvaTable[lclNum];
1925 // Is this a tracked variable?
1926 if (varDsc.lvTracked)
1928 /* Is this a definition or use? */
1929 if (lclVarNode->gtFlags & GTF_VAR_DEF)
1931 return fgComputeLifeTrackedLocalDef(life, keepAliveVars, varDsc, lclVarNode->AsLclVarCommon());
1935 fgComputeLifeTrackedLocalUse(life, varDsc, lclVarNode->AsLclVarCommon());
1940 fgComputeLifeUntrackedLocal(life, keepAliveVars, varDsc, lclVarNode->AsLclVarCommon(), node);
1945 /*****************************************************************************
1947 * Compute the set of live variables at each node in a given statement
1948 * or subtree of a statement moving backward from startNode to endNode
1951 #ifndef LEGACY_BACKEND
1952 void Compiler::fgComputeLife(VARSET_TP& life,
1955 VARSET_VALARG_TP volatileVars,
1956 bool* pStmtInfoDirty DEBUGARG(bool* treeModf))
1960 // Don't kill vars in scope
1961 VARSET_TP keepAliveVars(VarSetOps::Union(this, volatileVars, compCurBB->bbScope));
1963 noway_assert(VarSetOps::IsSubset(this, keepAliveVars, life));
1964 noway_assert(compCurStmt->gtOper == GT_STMT);
1965 noway_assert(endNode || (startNode == compCurStmt->gtStmt.gtStmtExpr));
1967 // NOTE: Live variable analysis will not work if you try
1968 // to use the result of an assignment node directly!
1969 for (tree = startNode; tree != endNode; tree = tree->gtPrev)
1972 assert(tree->OperGet() != GT_QMARK);
1974 if (tree->gtOper == GT_CALL)
1976 fgComputeLifeCall(life, tree->AsCall());
1978 else if (tree->OperIsNonPhiLocal() || tree->OperIsLocalAddr())
1980 bool isDeadStore = fgComputeLifeLocal(life, keepAliveVars, tree, tree);
1983 LclVarDsc* varDsc = &lvaTable[tree->gtLclVarCommon.gtLclNum];
1985 bool doAgain = false;
1986 if (fgRemoveDeadStore(&tree, varDsc, life, &doAgain, pStmtInfoDirty DEBUGARG(treeModf)))
2001 void Compiler::fgComputeLifeLIR(VARSET_TP& life, BasicBlock* block, VARSET_VALARG_TP volatileVars)
2003 // Don't kill volatile vars and vars in scope.
2004 VARSET_TP keepAliveVars(VarSetOps::Union(this, volatileVars, block->bbScope));
2006 noway_assert(VarSetOps::IsSubset(this, keepAliveVars, life));
2008 LIR::Range& blockRange = LIR::AsRange(block);
2009 GenTree* firstNonPhiNode = blockRange.FirstNonPhiNode();
2010 if (firstNonPhiNode == nullptr)
2014 for (GenTree *node = blockRange.LastNode(), *next = nullptr, *end = firstNonPhiNode->gtPrev; node != end;
2017 next = node->gtPrev;
2020 switch (node->OperGet())
2024 GenTreeCall* const call = node->AsCall();
2025 if (((call->TypeGet() == TYP_VOID) || call->IsUnusedValue()) && !call->HasSideEffects(this))
2027 JITDUMP("Removing dead call:\n");
2030 node->VisitOperands([](GenTree* operand) -> GenTree::VisitResult {
2031 if (operand->IsValue())
2033 operand->SetUnusedValue();
2036 // Special-case PUTARG_STK: since this operator is not considered a value, DCE will not remove
2038 if (operand->OperIs(GT_PUTARG_STK))
2040 operand->AsPutArgStk()->gtOp1->SetUnusedValue();
2041 operand->gtBashToNOP();
2044 return GenTree::VisitResult::Continue;
2047 blockRange.Remove(node);
2049 // Removing a call does not affect liveness unless it is a tail call in a nethod with P/Invokes or
2050 // is itself a P/Invoke, in which case it may affect the liveness of the frame root variable.
2051 if (!opts.MinOpts() && !opts.ShouldUsePInvokeHelpers() &&
2052 ((call->IsTailCall() && info.compCallUnmanaged) || call->IsUnmanaged()) &&
2053 lvaTable[info.compLvFrameListRoot].lvTracked)
2055 fgStmtRemoved = true;
2060 fgComputeLifeCall(life, call);
2068 GenTreeLclVarCommon* const lclVarNode = node->AsLclVarCommon();
2069 LclVarDsc& varDsc = lvaTable[lclVarNode->gtLclNum];
2071 if (node->IsUnusedValue())
2073 JITDUMP("Removing dead LclVar use:\n");
2074 DISPNODE(lclVarNode);
2076 blockRange.Delete(this, block, node);
2077 if (varDsc.lvTracked && !opts.MinOpts())
2079 fgStmtRemoved = true;
2082 else if (varDsc.lvTracked)
2084 fgComputeLifeTrackedLocalUse(life, varDsc, lclVarNode);
2088 fgComputeLifeUntrackedLocal(life, keepAliveVars, varDsc, lclVarNode, node);
2093 case GT_LCL_VAR_ADDR:
2094 case GT_LCL_FLD_ADDR:
2095 if (node->IsUnusedValue())
2097 JITDUMP("Removing dead LclVar address:\n");
2100 const bool isTracked = lvaTable[node->AsLclVarCommon()->gtLclNum].lvTracked;
2101 blockRange.Delete(this, block, node);
2102 if (isTracked && !opts.MinOpts())
2104 fgStmtRemoved = true;
2109 isDeadStore = fgComputeLifeLocal(life, keepAliveVars, node, node);
2113 if (blockRange.TryGetUse(node, &addrUse) && (addrUse.User()->OperGet() == GT_STOREIND))
2115 // Remove the store. DCE will iteratively clean up any ununsed operands.
2116 GenTreeStoreInd* const store = addrUse.User()->AsStoreInd();
2118 JITDUMP("Removing dead indirect store:\n");
2121 assert(store->Addr() == node);
2122 blockRange.Delete(this, block, node);
2124 store->Data()->SetUnusedValue();
2126 blockRange.Remove(store);
2128 assert(!opts.MinOpts());
2129 fgStmtRemoved = true;
2135 case GT_STORE_LCL_VAR:
2136 case GT_STORE_LCL_FLD:
2138 GenTreeLclVarCommon* const lclVarNode = node->AsLclVarCommon();
2140 LclVarDsc& varDsc = lvaTable[lclVarNode->gtLclNum];
2141 if (varDsc.lvTracked)
2143 isDeadStore = fgComputeLifeTrackedLocalDef(life, keepAliveVars, varDsc, lclVarNode);
2146 JITDUMP("Removing dead store:\n");
2147 DISPNODE(lclVarNode);
2149 // Remove the store. DCE will iteratively clean up any ununsed operands.
2150 lclVarNode->gtOp1->SetUnusedValue();
2152 lvaDecRefCnts(block, node);
2154 // If the store is marked as a late argument, it is referenced by a call. Instead of removing
2155 // it, bash it to a NOP.
2156 if ((node->gtFlags & GTF_LATE_ARG) != 0)
2158 JITDUMP("node is a late arg; replacing with NOP\n");
2159 node->gtBashToNOP();
2161 // NOTE: this is a bit of a hack. We need to keep these nodes around as they are
2162 // referenced by the call, but they're considered side-effect-free non-value-producing
2163 // nodes, so they will be removed if we don't do this.
2164 node->gtFlags |= GTF_ORDER_SIDEEFF;
2168 blockRange.Remove(node);
2171 assert(!opts.MinOpts());
2172 fgStmtRemoved = true;
2177 fgComputeLifeUntrackedLocal(life, keepAliveVars, varDsc, lclVarNode, node);
2188 case GT_CLS_VAR_ADDR:
2190 // These are all side-effect-free leaf nodes.
2191 if (node->IsUnusedValue())
2193 JITDUMP("Removing dead node:\n");
2196 blockRange.Remove(node);
2204 case GT_MEMORYBARRIER:
2207 case GT_ARR_BOUNDS_CHECK:
2210 case GT_STORE_DYN_BLK:
2211 #if defined(FEATURE_SIMD)
2213 #endif // FEATURE_SIMD
2214 #ifdef FEATURE_HW_INTRINSICS
2215 case GT_HW_INTRINSIC_CHK:
2216 #endif // FEATURE_HW_INTRINSICS
2224 case GT_START_NONGC:
2226 #if !FEATURE_EH_FUNCLETS
2228 #endif // !FEATURE_EH_FUNCLETS
2229 case GT_SWITCH_TABLE:
2230 case GT_PINVOKE_PROLOG:
2231 case GT_PINVOKE_EPILOG:
2235 #ifdef FEATURE_HW_INTRINSICS
2236 case GT_HWIntrinsic:
2237 #endif // FEATURE_HW_INTRINSICS
2238 // Never remove these nodes, as they are always side-effecting.
2240 // NOTE: the only side-effect of some of these nodes (GT_CMP, GT_SUB_HI) is a write to the flags
2242 // Properly modeling this would allow these nodes to be removed.
2246 // NOTE: we need to keep some NOPs around because they are referenced by calls. See the dead store
2247 // removal code above (case GT_STORE_LCL_VAR) for more explanation.
2248 if ((node->gtFlags & GTF_ORDER_SIDEEFF) != 0)
2255 assert(!node->OperIsLocal());
2256 if (!node->IsValue() || node->IsUnusedValue())
2258 // We are only interested in avoiding the removal of nodes with direct side-effects
2259 // (as opposed to side effects of their children).
2260 // This default case should never include calls or assignments.
2261 assert(!node->OperRequiresAsgFlag() && !node->OperIs(GT_CALL));
2262 if (!node->gtSetFlags() && !node->OperMayThrow(this))
2264 JITDUMP("Removing dead node:\n");
2267 node->VisitOperands([](GenTree* operand) -> GenTree::VisitResult {
2268 operand->SetUnusedValue();
2269 return GenTree::VisitResult::Continue;
2272 blockRange.Remove(node);
2280 #else // LEGACY_BACKEND
2283 #pragma warning(push)
2284 #pragma warning(disable : 21000) // Suppress PREFast warning about overly large function
2287 void Compiler::fgComputeLife(VARSET_TP& life,
2290 VARSET_VALARG_TP volatileVars,
2291 bool* pStmtInfoDirty DEBUGARG(bool* treeModf))
2296 GenTree* gtQMark = NULL; // current GT_QMARK node (walking the trees backwards)
2297 GenTree* nextColonExit = 0; // gtQMark->gtOp.gtOp2 while walking the 'else' branch.
2298 // gtQMark->gtOp.gtOp1 while walking the 'then' branch
2300 // TBD: This used to be an initialization to VARSET_NOT_ACCEPTABLE. Try to figure out what's going on here.
2301 VARSET_TP entryLiveSet(VarSetOps::MakeFull(this)); // liveness when we see gtQMark
2302 VARSET_TP gtColonLiveSet(VarSetOps::MakeFull(this)); // liveness when we see gtColon
2303 GenTree* gtColon = NULL;
2305 VARSET_TP keepAliveVars(VarSetOps::Union(this, volatileVars, compCurBB->bbScope)); /* Dont kill vars in scope */
2307 noway_assert(VarSetOps::Equal(this, VarSetOps::Intersection(this, keepAliveVars, life), keepAliveVars));
2308 noway_assert(compCurStmt->gtOper == GT_STMT);
2309 noway_assert(endNode || (startNode == compCurStmt->gtStmt.gtStmtExpr));
2311 /* NOTE: Live variable analysis will not work if you try
2312 * to use the result of an assignment node directly */
2314 for (tree = startNode; tree != endNode; tree = tree->gtPrev)
2317 /* For ?: nodes if we're done with the then branch, remember
2319 if (gtQMark && (tree == gtColon))
2321 VarSetOps::Assign(this, gtColonLiveSet, life);
2322 VarSetOps::Assign(this, gtQMark->gtQmark.gtThenLiveSet, gtColonLiveSet);
2325 /* For ?: nodes if we're done with the else branch
2326 * then set the correct life as the union of the two branches */
2328 if (gtQMark && (tree == gtQMark->gtOp.gtOp1))
2330 noway_assert(tree->gtFlags & GTF_RELOP_QMARK);
2331 noway_assert(gtQMark->gtOp.gtOp2->gtOper == GT_COLON);
2333 GenTree* thenNode = gtColon->AsColon()->ThenNode();
2334 GenTree* elseNode = gtColon->AsColon()->ElseNode();
2336 noway_assert(thenNode && elseNode);
2338 VarSetOps::Assign(this, gtQMark->gtQmark.gtElseLiveSet, life);
2340 /* Check if we optimized away the ?: */
2342 if (elseNode->IsNothingNode())
2344 if (thenNode->IsNothingNode())
2346 /* This can only happen for VOID ?: */
2347 noway_assert(gtColon->gtType == TYP_VOID);
2352 printf("BB%02u - Removing dead QMark - Colon ...\n", compCurBB->bbNum);
2353 gtDispTree(gtQMark);
2358 /* Remove the '?:' - keep the side effects in the condition */
2360 noway_assert(tree->OperKind() & GTK_RELOP);
2362 /* Change the node to a NOP */
2364 gtQMark->gtBashToNOP();
2369 /* Extract and keep the side effects */
2371 if (tree->gtFlags & GTF_SIDE_EFFECT)
2373 GenTree* sideEffList = NULL;
2375 gtExtractSideEffList(tree, &sideEffList);
2379 noway_assert(sideEffList->gtFlags & GTF_SIDE_EFFECT);
2383 printf("Extracted side effects list from condition...\n");
2384 gtDispTree(sideEffList);
2388 fgUpdateRefCntForExtract(tree, sideEffList);
2390 /* The NOP node becomes a GT_COMMA holding the side effect list */
2392 gtQMark->ChangeOper(GT_COMMA);
2393 gtQMark->gtFlags |= sideEffList->gtFlags & GTF_ALL_EFFECT;
2395 if (sideEffList->gtOper == GT_COMMA)
2397 gtQMark->gtOp.gtOp1 = sideEffList->gtOp.gtOp1;
2398 gtQMark->gtOp.gtOp2 = sideEffList->gtOp.gtOp2;
2402 gtQMark->gtOp.gtOp1 = sideEffList;
2403 gtQMark->gtOp.gtOp2 = gtNewNothingNode();
2411 printf("\nRemoving tree ");
2413 printf(" in BB%02u as useless\n", compCurBB->bbNum);
2418 fgUpdateRefCntForExtract(tree, NULL);
2422 /* If top node without side effects remove it */
2424 if ((gtQMark == compCurStmt->gtStmt.gtStmtExpr) && gtQMark->IsNothingNode())
2426 fgRemoveStmt(compCurBB, compCurStmt);
2430 /* Re-link the nodes for this statement */
2432 fgSetStmtSeq(compCurStmt);
2434 /* Continue analysis from this node */
2438 /* As the 'then' and 'else' branches are emtpy, liveness
2439 should not have changed */
2441 noway_assert(VarSetOps::Equal(this, life, entryLiveSet));
2446 // The 'else' branch is empty and the 'then' branch is non-empty
2447 // so swap the two branches and reverse the condition. If one is
2448 // non-empty, we want it to be the 'else'
2450 GenTree* tmp = thenNode;
2452 gtColon->AsColon()->ThenNode() = thenNode = elseNode;
2453 gtColon->AsColon()->ElseNode() = elseNode = tmp;
2454 noway_assert(tree == gtQMark->gtOp.gtOp1);
2455 gtReverseCond(tree);
2457 // Remember to also swap the live sets of the two branches.
2458 const VARSET_TP& tmpVS(gtQMark->gtQmark.gtElseLiveSet);
2459 VarSetOps::AssignNoCopy(this, gtQMark->gtQmark.gtElseLiveSet, gtQMark->gtQmark.gtThenLiveSet);
2460 VarSetOps::AssignNoCopy(this, gtQMark->gtQmark.gtThenLiveSet, tmpVS);
2462 /* Re-link the nodes for this statement */
2464 fgSetStmtSeq(compCurStmt);
2468 /* Variables in the two branches that are live at the split
2469 * must interfere with each other */
2471 fgMarkIntf(life, gtColonLiveSet);
2473 /* The live set at the split is the union of the two branches */
2475 VarSetOps::UnionD(this, life, gtColonLiveSet);
2479 /* We are out of the parallel branches, the rest is sequential */
2484 if (tree->gtOper == GT_CALL)
2486 fgComputeLifeCall(life, tree->AsCall());
2490 // Is this a use/def of a local variable?
2491 // Generally, the last use information is associated with the lclVar node.
2492 // However, for LEGACY_BACKEND, the information must be associated
2493 // with the OBJ itself for promoted structs.
2494 // In that case, the LDOBJ may be require an implementation that might itself allocate registers,
2495 // so the variable(s) should stay live until the end of the LDOBJ.
2496 // Note that for promoted structs lvTracked is false.
2498 GenTree* lclVarTree = nullptr;
2499 if (tree->gtOper == GT_OBJ)
2501 // fgIsIndirOfAddrOfLocal returns nullptr if the tree is
2502 // not an indir(addr(local)), in which case we will set lclVarTree
2503 // back to the original tree, and not handle it as a use/def.
2504 lclVarTree = fgIsIndirOfAddrOfLocal(tree);
2505 if ((lclVarTree != nullptr) && lvaTable[lclVarTree->gtLclVarCommon.gtLclNum].lvTracked)
2507 lclVarTree = nullptr;
2510 if (lclVarTree == nullptr)
2515 if (lclVarTree->OperIsNonPhiLocal() || lclVarTree->OperIsLocalAddr())
2517 bool isDeadStore = fgComputeLifeLocal(life, keepAliveVars, lclVarTree, tree);
2520 LclVarDsc* varDsc = &lvaTable[lclVarTree->gtLclVarCommon.gtLclNum];
2522 bool doAgain = false;
2523 if (fgRemoveDeadStore(&tree, varDsc, life, &doAgain, pStmtInfoDirty DEBUGARG(treeModf)))
2537 if (tree->gtOper == GT_QMARK && tree->gtOp.gtOp1)
2539 /* Special cases - "? :" operators.
2541 The trees are threaded as shown below with nodes 1 to 11 linked
2542 by gtNext. Both GT_<cond>->gtLiveSet and GT_COLON->gtLiveSet are
2543 the union of the liveness on entry to thenTree and elseTree.
2545 +--------------------+
2547 +----------+---------+
2553 +---------------------+ +--------------------+
2554 | GT_<cond> 3 | | GT_COLON 7 |
2555 | w/ GTF_RELOP_QMARK | | w/ GTF_COLON_COND |
2556 +----------+----------+ +---------+----------+
2562 2 1 thenTree 6 elseTree 10
2565 +----------------+ / / \ / \
2566 |prevExpr->gtNext+------/ / \ / \
2567 +----------------+ / \ / \
2572 noway_assert(tree->gtOp.gtOp1->OperKind() & GTK_RELOP);
2573 noway_assert(tree->gtOp.gtOp1->gtFlags & GTF_RELOP_QMARK);
2574 noway_assert(tree->gtOp.gtOp2->gtOper == GT_COLON);
2578 /* This is a nested QMARK sequence - we need to use recursion.
2579 * Compute the liveness for each node of the COLON branches
2580 * The new computation starts from the GT_QMARK node and ends
2581 * when the COLON branch of the enclosing QMARK ends */
2583 noway_assert(nextColonExit &&
2584 (nextColonExit == gtQMark->gtOp.gtOp1 || nextColonExit == gtQMark->gtOp.gtOp2));
2586 fgComputeLife(life, tree, nextColonExit, volatileVars, pStmtInfoDirty DEBUGARG(treeModf));
2588 /* Continue with exit node (the last node in the enclosing colon branch) */
2590 tree = nextColonExit;
2596 VarSetOps::Assign(this, entryLiveSet, life);
2597 gtColon = gtQMark->gtOp.gtOp2;
2598 nextColonExit = gtColon;
2602 /* If found the GT_COLON, start the new branch with the original life */
2604 if (gtQMark && tree == gtQMark->gtOp.gtOp2)
2606 /* The node better be a COLON. */
2607 noway_assert(tree->gtOper == GT_COLON);
2609 VarSetOps::Assign(this, life, entryLiveSet);
2610 nextColonExit = gtQMark->gtOp.gtOp1;
2615 /* Return the set of live variables out of this statement */
2619 #pragma warning(pop)
2622 #endif // !LEGACY_BACKEND
2624 // fgRemoveDeadStore - remove a store to a local which has no exposed uses.
2626 // pTree - GenTree** to local, including store-form local or local addr (post-rationalize)
2627 // varDsc - var that is being stored to
2628 // life - current live tracked vars (maintained as we walk backwards)
2629 // doAgain - out parameter, true if we should restart the statement
2630 // pStmtInfoDirty - should defer the cost computation to the point after the reverse walk is completed?
2632 // Returns: true if we should skip the rest of the statement, false if we should continue
2634 bool Compiler::fgRemoveDeadStore(GenTree** pTree,
2636 VARSET_VALARG_TP life,
2638 bool* pStmtInfoDirty DEBUGARG(bool* treeModf))
2640 assert(!compRationalIRForm);
2642 // Vars should have already been checked for address exposure by this point.
2643 assert(!varDsc->lvIsStructField || !lvaTable[varDsc->lvParentLcl].lvAddrExposed);
2644 assert(!varDsc->lvAddrExposed);
2646 GenTree* asgNode = nullptr;
2647 GenTree* rhsNode = nullptr;
2648 GenTree* addrNode = nullptr;
2649 GenTree* const tree = *pTree;
2651 GenTree* nextNode = tree->gtNext;
2653 // First, characterize the lclVarTree and see if we are taking its address.
2654 if (tree->OperIsLocalStore())
2656 rhsNode = tree->gtOp.gtOp1;
2659 else if (tree->OperIsLocal())
2661 if (nextNode == nullptr)
2665 if (nextNode->OperGet() == GT_ADDR)
2667 addrNode = nextNode;
2668 nextNode = nextNode->gtNext;
2673 assert(tree->OperIsLocalAddr());
2677 // Next, find the assignment.
2678 if (asgNode == nullptr)
2680 if (addrNode == nullptr)
2684 else if (asgNode == nullptr)
2686 // This may be followed by GT_IND/assign or GT_STOREIND.
2687 if (nextNode == nullptr)
2691 if (nextNode->OperIsIndir())
2693 // This must be a non-nullcheck form of indir, or it would not be a def.
2694 assert(nextNode->OperGet() != GT_NULLCHECK);
2695 if (nextNode->OperIsStore())
2698 if (asgNode->OperIsBlk())
2700 rhsNode = asgNode->AsBlk()->Data();
2702 // TODO-1stClassStructs: There should be an else clause here to handle
2703 // the non-block forms of store ops (GT_STORE_LCL_VAR, etc.) for which
2704 // rhsNode is op1. (This isn't really a 1stClassStructs item, but the
2705 // above was added to catch what used to be dead block ops, and that
2706 // made this omission apparent.)
2710 asgNode = nextNode->gtNext;
2716 if (asgNode == nullptr)
2721 if (asgNode->OperIsAssignment())
2723 rhsNode = asgNode->gtGetOp2();
2725 else if (rhsNode == nullptr)
2730 if (asgNode && (asgNode->gtFlags & GTF_ASG))
2732 noway_assert(rhsNode);
2733 noway_assert(tree->gtFlags & GTF_VAR_DEF);
2735 #ifndef LEGACY_BACKEND
2736 assert(asgNode->OperIs(GT_ASG));
2738 if (asgNode->gtOper != GT_ASG && asgNode->gtOverflowEx())
2740 // asgNode may be <op_ovf>= (with GTF_OVERFLOW). In that case, we need to keep the <op_ovf>
2742 // Dead <OpOvf>= assignment. We change it to the right operation (taking out the assignment),
2743 // update the flags, update order of statement, as we have changed the order of the operation
2744 // and we start computing life again from the op_ovf node (we go backwards). Note that we
2745 // don't need to update ref counts because we don't change them, we're only changing the
2747 CLANG_FORMAT_COMMENT_ANCHOR;
2752 printf("\nChanging dead <asgop> ovf to <op> ovf...\n");
2756 switch (asgNode->gtOper)
2759 asgNode->SetOperRaw(GT_ADD);
2762 asgNode->SetOperRaw(GT_SUB);
2765 // Only add and sub allowed, we don't have ASG_MUL and ASG_DIV for ints, and
2766 // floats don't allow OVF forms.
2767 noway_assert(!"Unexpected ASG_OP");
2770 asgNode->gtFlags &= ~GTF_REVERSE_OPS;
2771 if (!((asgNode->gtOp.gtOp1->gtFlags | rhsNode->gtFlags) & GTF_ASG))
2773 asgNode->gtFlags &= ~GTF_ASG;
2775 asgNode->gtOp.gtOp1->gtFlags &= ~(GTF_VAR_DEF | GTF_VAR_USEASG);
2781 // Make sure no previous cousin subtree rooted at a common ancestor has
2782 // asked to defer the recomputation of costs.
2783 if (!*pStmtInfoDirty)
2785 /* Update ordering, costs, FP levels, etc. */
2786 gtSetStmtInfo(compCurStmt);
2788 /* Re-link the nodes for this statement */
2789 fgSetStmtSeq(compCurStmt);
2791 // Start from the old assign node, as we have changed the order of its operands.
2792 // No need to update liveness, as nothing has changed (the target of the asgNode
2793 // either goes dead here, in which case the whole expression is now dead, or it
2794 // was already live).
2796 // TODO-Throughput: Redo this so that the graph is modified BEFORE traversing it!
2797 // We can determine this case when we first see the asgNode
2806 // Do not remove if this local variable represents
2807 // a promoted struct field of an address exposed local.
2808 if (varDsc->lvIsStructField && lvaTable[varDsc->lvParentLcl].lvAddrExposed)
2813 // Do not remove if the address of the variable has been exposed.
2814 if (varDsc->lvAddrExposed)
2819 /* Test for interior statement */
2821 if (asgNode->gtNext == nullptr)
2823 /* This is a "NORMAL" statement with the
2824 * assignment node hanging from the GT_STMT node */
2826 noway_assert(compCurStmt->gtStmt.gtStmtExpr == asgNode);
2827 JITDUMP("top level assign\n");
2829 /* Check for side effects */
2831 if (rhsNode->gtFlags & GTF_SIDE_EFFECT)
2833 EXTRACT_SIDE_EFFECTS:
2834 /* Extract the side effects */
2836 GenTree* sideEffList = nullptr;
2840 printf("BB%02u - Dead assignment has side effects...\n", compCurBB->bbNum);
2841 gtDispTree(asgNode);
2845 if (rhsNode->TypeGet() == TYP_STRUCT)
2847 // This is a block assignment. An indirection of the rhs is not considered to
2848 // happen until the assignment, so we will extract the side effects from only
2850 if (rhsNode->OperIsIndir())
2852 assert(rhsNode->OperGet() != GT_NULLCHECK);
2853 rhsNode = rhsNode->AsIndir()->Addr();
2856 gtExtractSideEffList(rhsNode, &sideEffList);
2860 noway_assert(sideEffList->gtFlags & GTF_SIDE_EFFECT);
2864 printf("Extracted side effects list...\n");
2865 gtDispTree(sideEffList);
2869 fgUpdateRefCntForExtract(asgNode, sideEffList);
2871 /* Replace the assignment statement with the list of side effects */
2872 noway_assert(sideEffList->gtOper != GT_STMT);
2874 *pTree = compCurStmt->gtStmt.gtStmtExpr = sideEffList;
2878 /* Update ordering, costs, FP levels, etc. */
2879 gtSetStmtInfo(compCurStmt);
2881 /* Re-link the nodes for this statement */
2882 fgSetStmtSeq(compCurStmt);
2884 // Since the whole statement gets replaced it is safe to
2885 // re-thread and update order. No need to compute costs again.
2886 *pStmtInfoDirty = false;
2888 /* Compute the live set for the new statement */
2894 /* No side effects, most likely we forgot to reset some flags */
2895 fgRemoveStmt(compCurBB, compCurStmt);
2902 /* If this is GT_CATCH_ARG saved to a local var don't bother */
2904 JITDUMP("removing stmt with no side effects\n");
2906 if (asgNode->gtFlags & GTF_ORDER_SIDEEFF)
2908 if (rhsNode->gtOper == GT_CATCH_ARG)
2910 goto EXTRACT_SIDE_EFFECTS;
2914 /* No side effects - remove the whole statement from the block->bbTreeList */
2916 fgRemoveStmt(compCurBB, compCurStmt);
2918 /* Since we removed it do not process the rest (i.e. RHS) of the statement
2919 * variables in the RHS will not be marked as live, so we get the benefit of
2920 * propagating dead variables up the chain */
2927 /* This is an INTERIOR STATEMENT with a dead assignment - remove it */
2929 noway_assert(!VarSetOps::IsMember(this, life, varDsc->lvVarIndex));
2931 if (rhsNode->gtFlags & GTF_SIDE_EFFECT)
2933 /* :-( we have side effects */
2935 GenTree* sideEffList = nullptr;
2939 printf("BB%02u - INTERIOR dead assignment has side effects...\n", compCurBB->bbNum);
2940 gtDispTree(asgNode);
2944 gtExtractSideEffList(rhsNode, &sideEffList);
2948 goto NO_SIDE_EFFECTS;
2951 noway_assert(sideEffList->gtFlags & GTF_SIDE_EFFECT);
2955 printf("Extracted side effects list from condition...\n");
2956 gtDispTree(sideEffList);
2960 if (sideEffList->gtOper == asgNode->gtOper)
2962 fgUpdateRefCntForExtract(asgNode, sideEffList);
2966 asgNode->gtOp.gtOp1 = sideEffList->gtOp.gtOp1;
2967 asgNode->gtOp.gtOp2 = sideEffList->gtOp.gtOp2;
2968 asgNode->gtType = sideEffList->gtType;
2972 fgUpdateRefCntForExtract(asgNode, sideEffList);
2976 /* Change the node to a GT_COMMA holding the side effect list */
2977 asgNode->gtBashToNOP();
2979 asgNode->ChangeOper(GT_COMMA);
2980 asgNode->gtFlags |= sideEffList->gtFlags & GTF_ALL_EFFECT;
2982 if (sideEffList->gtOper == GT_COMMA)
2984 asgNode->gtOp.gtOp1 = sideEffList->gtOp.gtOp1;
2985 asgNode->gtOp.gtOp2 = sideEffList->gtOp.gtOp2;
2989 asgNode->gtOp.gtOp1 = sideEffList;
2990 asgNode->gtOp.gtOp2 = gtNewNothingNode();
3000 printf("\nRemoving tree ");
3001 printTreeID(asgNode);
3002 printf(" in BB%02u as useless\n", compCurBB->bbNum);
3003 gtDispTree(asgNode);
3007 /* No side effects - Remove the interior statement */
3008 fgUpdateRefCntForExtract(asgNode, nullptr);
3010 /* Change the assignment to a GT_NOP node */
3012 asgNode->gtBashToNOP();
3019 /* Re-link the nodes for this statement - Do not update ordering! */
3021 // Do not update costs by calling gtSetStmtInfo. fgSetStmtSeq modifies
3022 // the tree threading based on the new costs. Removing nodes could
3023 // cause a subtree to get evaluated first (earlier second) during the
3024 // liveness walk. Instead just set a flag that costs are dirty and
3025 // caller has to call gtSetStmtInfo.
3026 *pStmtInfoDirty = true;
3028 fgSetStmtSeq(compCurStmt);
3030 /* Continue analysis from this node */
3040 /*****************************************************************************
3042 * Iterative data flow for live variable info and availability of range
3043 * check index expressions.
3045 void Compiler::fgInterBlockLocalVarLiveness()
3050 printf("*************** In fgInterBlockLocalVarLiveness()\n");
3054 /* This global flag is set whenever we remove a statement */
3056 fgStmtRemoved = false;
3058 // keep track if a bbLiveIn changed due to dead store removal
3059 fgLocalVarLivenessChanged = false;
3061 /* Compute the IN and OUT sets for tracked variables */
3063 fgLiveVarAnalysis();
3065 /* For debuggable code, we mark vars as live over their entire
3066 * reported scope, so that it will be visible over the entire scope
3069 if (opts.compDbgCode && (info.compVarScopesCount > 0))
3071 fgExtendDbgLifetimes();
3074 // Nothing more to be done if the backend does not require accurate local var lifetimes.
3075 if (!backendRequiresLocalVarLifetimes())
3077 fgLocalVarLivenessDone = true;
3081 /*-------------------------------------------------------------------------
3082 * Variables involved in exception-handlers and finally blocks need
3083 * to be specially marked
3087 VARSET_TP exceptVars(VarSetOps::MakeEmpty(this)); // vars live on entry to a handler
3088 VARSET_TP finallyVars(VarSetOps::MakeEmpty(this)); // vars live on exit of a 'finally' block
3089 VARSET_TP filterVars(VarSetOps::MakeEmpty(this)); // vars live on exit from a 'filter'
3091 for (block = fgFirstBB; block; block = block->bbNext)
3093 if (block->bbCatchTyp != BBCT_NONE)
3095 /* Note the set of variables live on entry to exception handler */
3097 VarSetOps::UnionD(this, exceptVars, block->bbLiveIn);
3100 if (block->bbJumpKind == BBJ_EHFILTERRET)
3102 /* Get the set of live variables on exit from a 'filter' */
3103 VarSetOps::UnionD(this, filterVars, block->bbLiveOut);
3105 else if (block->bbJumpKind == BBJ_EHFINALLYRET)
3107 /* Get the set of live variables on exit from a 'finally' block */
3109 VarSetOps::UnionD(this, finallyVars, block->bbLiveOut);
3111 #if FEATURE_EH_FUNCLETS
3112 // Funclets are called and returned from, as such we can only count on the frame
3113 // pointer being restored, and thus everything live in or live out must be on the
3115 if (block->bbFlags & BBF_FUNCLET_BEG)
3117 VarSetOps::UnionD(this, exceptVars, block->bbLiveIn);
3119 if ((block->bbJumpKind == BBJ_EHFINALLYRET) || (block->bbJumpKind == BBJ_EHFILTERRET) ||
3120 (block->bbJumpKind == BBJ_EHCATCHRET))
3122 VarSetOps::UnionD(this, exceptVars, block->bbLiveOut);
3124 #endif // FEATURE_EH_FUNCLETS
3130 for (varNum = 0, varDsc = lvaTable; varNum < lvaCount; varNum++, varDsc++)
3132 /* Ignore the variable if it's not tracked */
3134 if (!varDsc->lvTracked)
3139 if (lvaIsFieldOfDependentlyPromotedStruct(varDsc))
3144 /* Un-init locals may need auto-initialization. Note that the
3145 liveness of such locals will bubble to the top (fgFirstBB)
3146 in fgInterBlockLocalVarLiveness() */
3148 if (!varDsc->lvIsParam && VarSetOps::IsMember(this, fgFirstBB->bbLiveIn, varDsc->lvVarIndex) &&
3149 (info.compInitMem || varTypeIsGC(varDsc->TypeGet())))
3151 varDsc->lvMustInit = true;
3154 // Mark all variables that are live on entry to an exception handler
3155 // or on exit from a filter handler or finally as DoNotEnregister */
3157 if (VarSetOps::IsMember(this, exceptVars, varDsc->lvVarIndex) ||
3158 VarSetOps::IsMember(this, filterVars, varDsc->lvVarIndex))
3160 /* Mark the variable appropriately */
3161 lvaSetVarDoNotEnregister(varNum DEBUGARG(DNER_LiveInOutOfHandler));
3164 /* Mark all pointer variables live on exit from a 'finally'
3165 block as either volatile for non-GC ref types or as
3166 'explicitly initialized' (volatile and must-init) for GC-ref types */
3168 if (VarSetOps::IsMember(this, finallyVars, varDsc->lvVarIndex))
3170 lvaSetVarDoNotEnregister(varNum DEBUGARG(DNER_LiveInOutOfHandler));
3172 /* Don't set lvMustInit unless we have a non-arg, GC pointer */
3174 if (varDsc->lvIsParam)
3179 if (!varTypeIsGC(varDsc->TypeGet()))
3185 varDsc->lvMustInit = true;
3189 /*-------------------------------------------------------------------------
3190 * Now fill in liveness info within each basic block - Backward DataFlow
3193 // This is used in the liveness computation, as a temporary.
3194 VarSetOps::AssignNoCopy(this, fgMarkIntfUnionVS, VarSetOps::MakeEmpty(this));
3196 for (block = fgFirstBB; block; block = block->bbNext)
3198 /* Tell everyone what block we're working on */
3202 /* Remember those vars live on entry to exception handlers */
3203 /* if we are part of a try block */
3205 VARSET_TP volatileVars(VarSetOps::MakeEmpty(this));
3207 if (ehBlockHasExnFlowDsc(block))
3209 VarSetOps::Assign(this, volatileVars, fgGetHandlerLiveVars(block));
3211 // volatileVars is a subset of exceptVars
3212 noway_assert(VarSetOps::IsSubset(this, volatileVars, exceptVars));
3215 /* Start with the variables live on exit from the block */
3217 VARSET_TP life(VarSetOps::MakeCopy(this, block->bbLiveOut));
3219 /* Mark any interference we might have at the end of the block */
3223 if (!block->IsLIR())
3225 /* Get the first statement in the block */
3227 GenTree* firstStmt = block->FirstNonPhiDef();
3234 /* Walk all the statements of the block backwards - Get the LAST stmt */
3236 GenTree* nextStmt = block->bbTreeList->gtPrev;
3241 bool treeModf = false;
3243 noway_assert(nextStmt);
3244 noway_assert(nextStmt->gtOper == GT_STMT);
3246 compCurStmt = nextStmt;
3247 nextStmt = nextStmt->gtPrev;
3249 /* Compute the liveness for each tree node in the statement */
3250 bool stmtInfoDirty = false;
3252 fgComputeLife(life, compCurStmt->gtStmt.gtStmtExpr, nullptr, volatileVars,
3253 &stmtInfoDirty DEBUGARG(&treeModf));
3257 gtSetStmtInfo(compCurStmt);
3258 fgSetStmtSeq(compCurStmt);
3259 gtUpdateStmtSideEffects(compCurStmt);
3263 if (verbose && treeModf)
3265 printf("\nfgComputeLife modified tree:\n");
3266 gtDispTree(compCurStmt->gtStmt.gtStmtExpr);
3270 } while (compCurStmt != firstStmt);
3274 #ifdef LEGACY_BACKEND
3276 #else // !LEGACY_BACKEND
3277 fgComputeLifeLIR(life, block, volatileVars);
3278 #endif // !LEGACY_BACKEND
3281 /* Done with the current block - if we removed any statements, some
3282 * variables may have become dead at the beginning of the block
3283 * -> have to update bbLiveIn */
3285 if (!VarSetOps::Equal(this, life, block->bbLiveIn))
3287 /* some variables have become dead all across the block
3288 So life should be a subset of block->bbLiveIn */
3290 // We changed the liveIn of the block, which may affect liveOut of others,
3291 // which may expose more dead stores.
3292 fgLocalVarLivenessChanged = true;
3294 noway_assert(VarSetOps::IsSubset(this, life, block->bbLiveIn));
3296 /* set the new bbLiveIn */
3298 VarSetOps::Assign(this, block->bbLiveIn, life);
3300 /* compute the new bbLiveOut for all the predecessors of this block */
3303 noway_assert(compCurBB == block);
3305 compCurBB = nullptr;
3309 fgLocalVarLivenessDone = true;
3314 /*****************************************************************************/
3316 void Compiler::fgDispBBLiveness(BasicBlock* block)
3318 VARSET_TP allVars(VarSetOps::Union(this, block->bbLiveIn, block->bbLiveOut));
3319 printf("BB%02u", block->bbNum);
3320 printf(" IN (%d)=", VarSetOps::Count(this, block->bbLiveIn));
3321 lvaDispVarSet(block->bbLiveIn, allVars);
3322 for (MemoryKind memoryKind : allMemoryKinds())
3324 if ((block->bbMemoryLiveIn & memoryKindSet(memoryKind)) != 0)
3326 printf(" + %s", memoryKindNames[memoryKind]);
3329 printf("\n OUT(%d)=", VarSetOps::Count(this, block->bbLiveOut));
3330 lvaDispVarSet(block->bbLiveOut, allVars);
3331 for (MemoryKind memoryKind : allMemoryKinds())
3333 if ((block->bbMemoryLiveOut & memoryKindSet(memoryKind)) != 0)
3335 printf(" + %s", memoryKindNames[memoryKind]);
3341 void Compiler::fgDispBBLiveness()
3343 for (BasicBlock* block = fgFirstBB; block; block = block->bbNext)
3345 fgDispBBLiveness(block);