1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
5 /*XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
6 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
11 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
12 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
19 #if MEASURE_BLOCK_SIZE
21 size_t BasicBlock::s_Size;
23 size_t BasicBlock::s_Count;
24 #endif // MEASURE_BLOCK_SIZE
27 // The max # of tree nodes in any BB
29 unsigned BasicBlock::s_nMaxTrees;
33 flowList* ShuffleHelper(unsigned hash, flowList* res)
36 for (flowList *prev = nullptr; res != nullptr; prev = res, res = res->flNext)
38 unsigned blkHash = (hash ^ (res->flBlock->bbNum << 16) ^ res->flBlock->bbNum);
39 if (((blkHash % 1879) & 1) && prev != nullptr)
41 // Swap res with head.
43 jitstd::swap(head->flNext, res->flNext);
44 jitstd::swap(head, res);
50 unsigned SsaStressHashHelper()
52 // hash = 0: turned off, hash = 1: use method hash, hash = *: use custom hash.
53 unsigned hash = JitConfig.JitSsaStress();
61 return JitTls::GetCompiler()->info.compMethodHash();
63 return ((hash >> 16) == 0) ? ((hash << 16) | hash) : hash;
67 EHSuccessorIter::EHSuccessorIter(Compiler* comp, BasicBlock* block)
70 , m_curRegSucc(nullptr)
71 , m_curTry(comp->ehGetBlockExnFlowDsc(block))
72 , m_remainingRegSuccs(block->NumSucc(comp))
74 // If "block" is a "leave helper" block (the empty BBJ_ALWAYS block that pairs with a
75 // preceding BBJ_CALLFINALLY block to implement a "leave" IL instruction), then no exceptions
76 // can occur within it, so clear m_curTry if it's non-null.
77 if (m_curTry != nullptr)
79 BasicBlock* beforeBlock = block->bbPrev;
80 if (beforeBlock != nullptr && beforeBlock->isBBCallAlwaysPair())
86 if (m_curTry == nullptr && m_remainingRegSuccs > 0)
88 // Examine the successors to see if any are the start of try blocks.
93 void EHSuccessorIter::FindNextRegSuccTry()
95 assert(m_curTry == nullptr);
97 // Must now consider the next regular successor, if any.
98 while (m_remainingRegSuccs > 0)
100 m_remainingRegSuccs--;
101 m_curRegSucc = m_block->GetSucc(m_remainingRegSuccs, m_comp);
102 if (m_comp->bbIsTryBeg(m_curRegSucc))
104 assert(m_curRegSucc->hasTryIndex()); // Since it is a try begin.
105 unsigned newTryIndex = m_curRegSucc->getTryIndex();
107 // If the try region started by "m_curRegSucc" (represented by newTryIndex) contains m_block,
108 // we've already yielded its handler, as one of the EH handler successors of m_block itself.
109 if (m_comp->bbInExnFlowRegions(newTryIndex, m_block))
114 // Otherwise, consider this try.
115 m_curTry = m_comp->ehGetDsc(newTryIndex);
121 void EHSuccessorIter::operator++(void)
123 assert(m_curTry != nullptr);
124 if (m_curTry->ebdEnclosingTryIndex != EHblkDsc::NO_ENCLOSING_INDEX)
126 m_curTry = m_comp->ehGetDsc(m_curTry->ebdEnclosingTryIndex);
128 // If we've gone over into considering try's containing successors,
129 // then the enclosing try must have the successor as its first block.
130 if (m_curRegSucc == nullptr || m_curTry->ebdTryBeg == m_curRegSucc)
135 // Otherwise, give up, try the next regular successor.
143 // We've exhausted all try blocks.
144 // See if there are any remaining regular successors that start try blocks.
145 FindNextRegSuccTry();
148 BasicBlock* EHSuccessorIter::operator*()
150 assert(m_curTry != nullptr);
151 return m_curTry->ExFlowBlock();
154 flowList* Compiler::BlockPredsWithEH(BasicBlock* blk)
156 BlockToFlowListMap* ehPreds = GetBlockToEHPreds();
158 if (ehPreds->Lookup(blk, &res))
165 if (bbIsExFlowBlock(blk, &tryIndex))
167 // Find the first block of the try.
168 EHblkDsc* ehblk = ehGetDsc(tryIndex);
169 BasicBlock* tryStart = ehblk->ebdTryBeg;
170 for (flowList* tryStartPreds = tryStart->bbPreds; tryStartPreds != nullptr;
171 tryStartPreds = tryStartPreds->flNext)
173 res = new (this, CMK_FlowList) flowList(tryStartPreds->flBlock, res);
175 #if MEASURE_BLOCK_SIZE
177 genFlowNodeSize += sizeof(flowList);
178 #endif // MEASURE_BLOCK_SIZE
181 // Now add all blocks handled by this handler (except for second blocks of BBJ_CALLFINALLY/BBJ_ALWAYS pairs;
182 // these cannot cause transfer to the handler...)
183 BasicBlock* prevBB = nullptr;
185 // TODO-Throughput: It would be nice if we could iterate just over the blocks in the try, via
187 // for (BasicBlock* bb = ehblk->ebdTryBeg; bb != ehblk->ebdTryLast->bbNext; bb = bb->bbNext)
188 // (plus adding in any filter blocks outside the try whose exceptions are handled here).
189 // That doesn't work, however: funclets have caused us to sometimes split the body of a try into
190 // more than one sequence of contiguous blocks. We need to find a better way to do this.
191 for (BasicBlock *bb = fgFirstBB; bb != nullptr; prevBB = bb, bb = bb->bbNext)
193 if (bbInExnFlowRegions(tryIndex, bb) && (prevBB == nullptr || !prevBB->isBBCallAlwaysPair()))
195 res = new (this, CMK_FlowList) flowList(bb, res);
197 #if MEASURE_BLOCK_SIZE
199 genFlowNodeSize += sizeof(flowList);
200 #endif // MEASURE_BLOCK_SIZE
205 unsigned hash = SsaStressHashHelper();
208 res = ShuffleHelper(hash, res);
212 ehPreds->Set(blk, res);
219 //------------------------------------------------------------------------
220 // dspBlockILRange(): Display the block's IL range as [XXX...YYY), where XXX and YYY might be "???" for BAD_IL_OFFSET.
222 void BasicBlock::dspBlockILRange()
224 if (bbCodeOffs != BAD_IL_OFFSET)
226 printf("[%03X..", bbCodeOffs);
234 if (bbCodeOffsEnd != BAD_IL_OFFSET)
236 // brace-matching editor workaround for following line: (
237 printf("%03X)", bbCodeOffsEnd);
241 // brace-matching editor workaround for following line: (
247 //------------------------------------------------------------------------
248 // dspFlags: Print out the block's flags
250 void BasicBlock::dspFlags()
252 if (bbFlags & BBF_VISITED)
256 if (bbFlags & BBF_MARKED)
260 if (bbFlags & BBF_CHANGED)
264 if (bbFlags & BBF_REMOVED)
268 if (bbFlags & BBF_DONT_REMOVE)
272 if (bbFlags & BBF_IMPORTED)
276 if (bbFlags & BBF_INTERNAL)
280 if (bbFlags & BBF_FAILED_VERIFICATION)
284 if (bbFlags & BBF_TRY_BEG)
288 if (bbFlags & BBF_NEEDS_GCPOLL)
292 if (bbFlags & BBF_RUN_RARELY)
296 if (bbFlags & BBF_LOOP_HEAD)
300 if (bbFlags & BBF_LOOP_CALL0)
304 if (bbFlags & BBF_LOOP_CALL1)
308 if (bbFlags & BBF_HAS_LABEL)
312 if (bbFlags & BBF_JMP_TARGET)
316 if (bbFlags & BBF_HAS_JMP)
320 if (bbFlags & BBF_GC_SAFE_POINT)
324 if (bbFlags & BBF_FUNCLET_BEG)
328 if (bbFlags & BBF_HAS_IDX_LEN)
332 if (bbFlags & BBF_HAS_NEWARRAY)
336 if (bbFlags & BBF_HAS_NEWOBJ)
340 #if FEATURE_EH_FUNCLETS && defined(_TARGET_ARM_)
341 if (bbFlags & BBF_FINALLY_TARGET)
345 #endif // FEATURE_EH_FUNCLETS && defined(_TARGET_ARM_)
346 if (bbFlags & BBF_BACKWARD_JUMP)
350 if (bbFlags & BBF_RETLESS_CALL)
354 if (bbFlags & BBF_LOOP_PREHEADER)
358 if (bbFlags & BBF_COLD)
362 if (bbFlags & BBF_PROF_WEIGHT)
366 #ifdef LEGACY_BACKEND
367 if (bbFlags & BBF_FORWARD_SWITCH)
371 #else // !LEGACY_BACKEND
372 if (bbFlags & BBF_IS_LIR)
376 #endif // LEGACY_BACKEND
377 if (bbFlags & BBF_KEEP_BBJ_ALWAYS)
381 if (bbFlags & BBF_CLONED_FINALLY_BEGIN)
385 if (bbFlags & BBF_CLONED_FINALLY_END)
391 /*****************************************************************************
393 * Display the bbPreds basic block list (the block predecessors).
394 * Returns the number of characters printed.
397 unsigned BasicBlock::dspPreds()
400 for (flowList* pred = bbPreds; pred != nullptr; pred = pred->flNext)
407 printf("BB%02u", pred->flBlock->bbNum);
410 // Account for %02u only handling 2 digits, but we can display more than that.
411 unsigned digits = CountDigits(pred->flBlock->bbNum);
417 // Does this predecessor have an interesting dup count? If so, display it.
418 if (pred->flDupCount > 1)
420 printf("(%u)", pred->flDupCount);
421 count += 2 + CountDigits(pred->flDupCount);
427 /*****************************************************************************
429 * Display the bbCheapPreds basic block list (the block predecessors).
430 * Returns the number of characters printed.
433 unsigned BasicBlock::dspCheapPreds()
436 for (BasicBlockList* pred = bbCheapPreds; pred != nullptr; pred = pred->next)
443 printf("BB%02u", pred->block->bbNum);
446 // Account for %02u only handling 2 digits, but we can display more than that.
447 unsigned digits = CountDigits(pred->block->bbNum);
456 /*****************************************************************************
458 * Display the basic block successors.
459 * Returns the count of successors.
462 unsigned BasicBlock::dspSuccs(Compiler* compiler)
464 unsigned numSuccs = NumSucc(compiler);
466 for (unsigned i = 0; i < numSuccs; i++)
468 printf("%s", (count == 0) ? "" : ",");
469 printf("BB%02u", GetSucc(i, compiler)->bbNum);
475 // Display a compact representation of the bbJumpKind, that is, where this block branches.
476 // This is similar to code in Compiler::fgTableDispBasicBlock(), but doesn't have that code's requirements to align
478 void BasicBlock::dspJumpKind()
482 case BBJ_EHFINALLYRET:
486 case BBJ_EHFILTERRET:
491 printf(" -> BB%02u (cret)", bbJumpDest->bbNum);
503 // For fall-through blocks, print nothing.
507 if (bbFlags & BBF_KEEP_BBJ_ALWAYS)
509 printf(" -> BB%02u (ALWAYS)", bbJumpDest->bbNum);
513 printf(" -> BB%02u (always)", bbJumpDest->bbNum);
518 printf(" -> BB%02u (leave)", bbJumpDest->bbNum);
521 case BBJ_CALLFINALLY:
522 printf(" -> BB%02u (callf)", bbJumpDest->bbNum);
526 printf(" -> BB%02u (cond)", bbJumpDest->bbNum);
533 jumpCnt = bbJumpSwt->bbsCount;
534 BasicBlock** jumpTab;
535 jumpTab = bbJumpSwt->bbsDstTab;
538 printf("%cBB%02u", (jumpTab == bbJumpSwt->bbsDstTab) ? ' ' : ',', (*jumpTab)->bbNum);
539 } while (++jumpTab, --jumpCnt);
550 void BasicBlock::dspBlockHeader(Compiler* compiler,
551 bool showKind /*= true*/,
552 bool showFlags /*= false*/,
553 bool showPreds /*= true*/)
555 printf("BB%02u ", bbNum);
564 if (compiler->fgCheapPredsValid)
578 const unsigned lowFlags = (unsigned)bbFlags;
579 const unsigned highFlags = (unsigned)(bbFlags >> 32);
580 printf(" flags=0x%08x.%08x: ", highFlags, lowFlags);
586 const char* BasicBlock::dspToString(int blockNumPadding /* = 2*/)
588 static char buffers[3][64]; // static array of 3 to allow 3 concurrent calls in one printf()
589 static int nextBufferIndex = 0;
591 auto& buffer = buffers[nextBufferIndex];
592 nextBufferIndex = (nextBufferIndex + 1) % _countof(buffers);
593 _snprintf_s(buffer, _countof(buffer), _countof(buffer), "BB%02u%*s [%04u]", bbNum, blockNumPadding, "", bbID);
599 // Allocation function for MemoryPhiArg.
600 void* BasicBlock::MemoryPhiArg::operator new(size_t sz, Compiler* comp)
602 return comp->compGetMem(sz, CMK_MemoryPhiArg);
605 //------------------------------------------------------------------------
606 // CloneBlockState: Try to populate `to` block with a copy of `from` block's statements, replacing
607 // uses of local `varNum` with IntCns `varVal`.
610 // compiler - Jit compiler instance
611 // to - New/empty block to copy statements into
612 // from - Block to copy statements from
613 // varNum - lclVar uses with lclNum `varNum` will be replaced; can be ~0 to indicate no replacement.
614 // varVal - If replacing uses of `varNum`, replace them with int constants with value `varVal`.
617 // Cloning may fail because this routine uses `gtCloneExpr` for cloning and it can't handle all
618 // IR nodes. If cloning of any statement fails, `false` will be returned and block `to` may be
619 // partially populated. If cloning of all statements succeeds, `true` will be returned and
620 // block `to` will be fully populated.
622 bool BasicBlock::CloneBlockState(
623 Compiler* compiler, BasicBlock* to, const BasicBlock* from, unsigned varNum, int varVal)
625 assert(to->bbTreeList == nullptr);
627 to->bbFlags = from->bbFlags;
628 to->bbWeight = from->bbWeight;
629 BlockSetOps::AssignAllowUninitRhs(compiler, to->bbReach, from->bbReach);
630 to->copyEHRegion(from);
631 to->bbCatchTyp = from->bbCatchTyp;
632 to->bbRefs = from->bbRefs;
633 to->bbStkTempsIn = from->bbStkTempsIn;
634 to->bbStkTempsOut = from->bbStkTempsOut;
635 to->bbStkDepth = from->bbStkDepth;
636 to->bbCodeOffs = from->bbCodeOffs;
637 to->bbCodeOffsEnd = from->bbCodeOffsEnd;
638 VarSetOps::AssignAllowUninitRhs(compiler, to->bbScope, from->bbScope);
639 #if FEATURE_STACK_FP_X87
640 to->bbFPStateX87 = from->bbFPStateX87;
641 #endif // FEATURE_STACK_FP_X87
642 to->bbNatLoopNum = from->bbNatLoopNum;
644 to->bbLoopNum = from->bbLoopNum;
645 to->bbTgtStkDepth = from->bbTgtStkDepth;
648 for (GenTree* fromStmt = from->bbTreeList; fromStmt != nullptr; fromStmt = fromStmt->gtNext)
650 auto newExpr = compiler->gtCloneExpr(fromStmt->gtStmt.gtStmtExpr, 0, varNum, varVal);
653 // gtCloneExpr doesn't handle all opcodes, so may fail to clone a statement.
654 // When that happens, it returns nullptr; abandon the rest of this block and
655 // return `false` to the caller to indicate that cloning was unsuccessful.
658 compiler->fgInsertStmtAtEnd(to, compiler->fgNewStmtFromTree(newExpr));
664 void BasicBlock::MakeLIR(GenTree* firstNode, GenTree* lastNode)
666 #ifdef LEGACY_BACKEND
668 #else // !LEGACY_BACKEND
670 assert((firstNode == nullptr) == (lastNode == nullptr));
671 assert((firstNode == lastNode) || firstNode->Precedes(lastNode));
673 m_firstNode = firstNode;
674 m_lastNode = lastNode;
675 bbFlags |= BBF_IS_LIR;
676 #endif // LEGACY_BACKEND
679 bool BasicBlock::IsLIR()
681 #ifdef LEGACY_BACKEND
683 #else // !LEGACY_BACKEND
684 const bool isLIR = (bbFlags & BBF_IS_LIR) != 0;
685 assert((bbTreeList == nullptr) || ((isLIR) == !bbTreeList->IsStatement()));
687 #endif // LEGACY_BACKEND
690 //------------------------------------------------------------------------
691 // firstStmt: Returns the first statement in the block
697 // The first statement in the block's bbTreeList.
699 GenTreeStmt* BasicBlock::firstStmt() const
701 if (bbTreeList == nullptr)
706 return bbTreeList->AsStmt();
709 //------------------------------------------------------------------------
710 // lastStmt: Returns the last statement in the block
716 // The last statement in the block's bbTreeList.
718 GenTreeStmt* BasicBlock::lastStmt() const
720 if (bbTreeList == nullptr)
725 GenTree* result = bbTreeList->gtPrev;
726 assert(result && result->gtNext == nullptr);
727 return result->AsStmt();
730 //------------------------------------------------------------------------
731 // BasicBlock::firstNode: Returns the first node in the block.
733 GenTree* BasicBlock::firstNode()
735 return IsLIR() ? bbTreeList : Compiler::fgGetFirstNode(firstStmt()->gtStmtExpr);
738 //------------------------------------------------------------------------
739 // BasicBlock::lastNode: Returns the last node in the block.
741 GenTree* BasicBlock::lastNode()
743 return IsLIR() ? m_lastNode : lastStmt()->gtStmtExpr;
746 //------------------------------------------------------------------------
747 // GetUniquePred: Returns the unique predecessor of a block, if one exists.
748 // The predecessor lists must be accurate.
754 // The unique predecessor of a block, or nullptr if there is no unique predecessor.
757 // If the first block has a predecessor (which it may have, if it is the target of
758 // a backedge), we never want to consider it "unique" because the prolog is an
759 // implicit predecessor.
761 BasicBlock* BasicBlock::GetUniquePred(Compiler* compiler)
763 if ((bbPreds == nullptr) || (bbPreds->flNext != nullptr) || (this == compiler->fgFirstBB))
769 return bbPreds->flBlock;
773 //------------------------------------------------------------------------
774 // GetUniqueSucc: Returns the unique successor of a block, if one exists.
775 // Only considers BBJ_ALWAYS and BBJ_NONE block types.
781 // The unique successor of a block, or nullptr if there is no unique successor.
783 BasicBlock* BasicBlock::GetUniqueSucc()
785 if (bbJumpKind == BBJ_ALWAYS)
789 else if (bbJumpKind == BBJ_NONE)
800 BasicBlock::MemoryPhiArg* BasicBlock::EmptyMemoryPhiDef = (BasicBlock::MemoryPhiArg*)0x1;
802 unsigned JitPtrKeyFuncs<BasicBlock>::GetHashCode(const BasicBlock* ptr)
805 unsigned hash = SsaStressHashHelper();
808 return (hash ^ (ptr->bbNum << 16) ^ ptr->bbNum);
814 bool BasicBlock::isEmpty()
818 return (this->FirstNonPhiDef() == nullptr);
821 for (GenTree* node : LIR::AsRange(this).NonPhiNodes())
823 if (node->OperGet() != GT_IL_OFFSET)
832 GenTreeStmt* BasicBlock::FirstNonPhiDef()
834 GenTree* stmt = bbTreeList;
839 GenTree* tree = stmt->gtStmt.gtStmtExpr;
840 while ((tree->OperGet() == GT_ASG && tree->gtOp.gtOp2->OperGet() == GT_PHI) ||
841 (tree->OperGet() == GT_STORE_LCL_VAR && tree->gtOp.gtOp1->OperGet() == GT_PHI))
848 tree = stmt->gtStmt.gtStmtExpr;
850 return stmt->AsStmt();
853 GenTree* BasicBlock::FirstNonPhiDefOrCatchArgAsg()
855 GenTree* stmt = FirstNonPhiDef();
860 GenTree* tree = stmt->gtStmt.gtStmtExpr;
861 if ((tree->OperGet() == GT_ASG && tree->gtOp.gtOp2->OperGet() == GT_CATCH_ARG) ||
862 (tree->OperGet() == GT_STORE_LCL_VAR && tree->gtOp.gtOp1->OperGet() == GT_CATCH_ARG))
869 /*****************************************************************************
871 * Mark a block as rarely run, we also don't want to have a loop in a
872 * rarely run block, and we set it's weight to zero.
875 void BasicBlock::bbSetRunRarely()
877 setBBWeight(BB_ZERO_WEIGHT);
878 if (bbWeight == BB_ZERO_WEIGHT)
880 bbFlags |= BBF_RUN_RARELY; // This block is never/rarely run
884 /*****************************************************************************
886 * Can a BasicBlock be inserted after this without altering the flowgraph
889 bool BasicBlock::bbFallsThrough()
895 case BBJ_EHFINALLYRET:
896 case BBJ_EHFILTERRET:
908 case BBJ_CALLFINALLY:
909 return ((bbFlags & BBF_RETLESS_CALL) == 0);
912 assert(!"Unknown bbJumpKind in bbFallsThrough()");
917 //------------------------------------------------------------------------
918 // NumSucc: Returns the count of block successors. See the declaration comment for details.
924 // Count of block successors.
926 unsigned BasicBlock::NumSucc()
932 case BBJ_EHFINALLYRET:
933 case BBJ_EHFILTERRET:
936 case BBJ_CALLFINALLY:
944 if (bbJumpDest == bbNext)
954 return bbJumpSwt->bbsCount;
961 //------------------------------------------------------------------------
962 // GetSucc: Returns the requested block successor. See the declaration comment for details.
965 // i - index of successor to return. 0 <= i <= NumSucc().
968 // Requested successor block
970 BasicBlock* BasicBlock::GetSucc(unsigned i)
972 assert(i < NumSucc()); // Index bounds check.
975 case BBJ_CALLFINALLY:
996 return bbJumpSwt->bbsDstTab[i];
1003 //------------------------------------------------------------------------
1004 // NumSucc: Returns the count of block successors. See the declaration comment for details.
1007 // comp - Compiler instance
1010 // Count of block successors.
1012 unsigned BasicBlock::NumSucc(Compiler* comp)
1014 assert(comp != nullptr);
1022 case BBJ_EHFINALLYRET:
1024 // The first block of the handler is labelled with the catch type.
1025 BasicBlock* hndBeg = comp->fgFirstBlockOfHandler(this);
1026 if (hndBeg->bbCatchTyp == BBCT_FINALLY)
1028 return comp->fgNSuccsOfFinallyRet(this);
1032 assert(hndBeg->bbCatchTyp == BBCT_FAULT); // We can only BBJ_EHFINALLYRET from FINALLY and FAULT.
1033 // A FAULT block has no successors.
1038 case BBJ_CALLFINALLY:
1040 case BBJ_EHCATCHRET:
1041 case BBJ_EHFILTERRET:
1047 if (bbJumpDest == bbNext)
1058 Compiler::SwitchUniqueSuccSet sd = comp->GetDescriptorForSwitch(this);
1059 return sd.numDistinctSuccs;
1067 //------------------------------------------------------------------------
1068 // GetSucc: Returns the requested block successor. See the declaration comment for details.
1071 // i - index of successor to return. 0 <= i <= NumSucc(comp).
1072 // comp - Compiler instance
1075 // Requested successor block
1077 BasicBlock* BasicBlock::GetSucc(unsigned i, Compiler* comp)
1079 assert(comp != nullptr);
1081 assert(i < NumSucc(comp)); // Index bounds check.
1084 case BBJ_EHFILTERRET:
1086 // Handler is the (sole) normal successor of the filter.
1087 assert(comp->fgFirstBlockOfHandler(this) == bbJumpDest);
1091 case BBJ_EHFINALLYRET:
1092 // Note: the following call is expensive.
1093 return comp->fgSuccOfFinallyRet(this, i);
1095 case BBJ_CALLFINALLY:
1097 case BBJ_EHCATCHRET:
1117 Compiler::SwitchUniqueSuccSet sd = comp->GetDescriptorForSwitch(this);
1118 assert(i < sd.numDistinctSuccs); // Range check.
1119 return sd.nonDuplicates[i];
1127 void BasicBlock::InitVarSets(Compiler* comp)
1129 VarSetOps::AssignNoCopy(comp, bbVarUse, VarSetOps::MakeEmpty(comp));
1130 VarSetOps::AssignNoCopy(comp, bbVarDef, VarSetOps::MakeEmpty(comp));
1131 VarSetOps::AssignNoCopy(comp, bbLiveIn, VarSetOps::MakeEmpty(comp));
1132 VarSetOps::AssignNoCopy(comp, bbLiveOut, VarSetOps::MakeEmpty(comp));
1133 VarSetOps::AssignNoCopy(comp, bbScope, VarSetOps::MakeEmpty(comp));
1135 bbMemoryUse = emptyMemoryKindSet;
1136 bbMemoryDef = emptyMemoryKindSet;
1137 bbMemoryLiveIn = emptyMemoryKindSet;
1138 bbMemoryLiveOut = emptyMemoryKindSet;
1141 // Returns true if the basic block ends with GT_JMP
1142 bool BasicBlock::endsWithJmpMethod(Compiler* comp)
1144 if (comp->compJmpOpUsed && (bbJumpKind == BBJ_RETURN) && (bbFlags & BBF_HAS_JMP))
1146 GenTree* lastNode = this->lastNode();
1147 assert(lastNode != nullptr);
1148 return lastNode->OperGet() == GT_JMP;
1154 // Returns true if the basic block ends with either
1156 // ii) tail call (implicit or explicit)
1159 // comp - Compiler instance
1160 // fastTailCallsOnly - Only consider fast tail calls excluding tail calls via helper.
1162 bool BasicBlock::endsWithTailCallOrJmp(Compiler* comp, bool fastTailCallsOnly /*=false*/)
1164 GenTree* tailCall = nullptr;
1165 bool tailCallsConvertibleToLoopOnly = false;
1166 return endsWithJmpMethod(comp) ||
1167 endsWithTailCall(comp, fastTailCallsOnly, tailCallsConvertibleToLoopOnly, &tailCall);
1170 //------------------------------------------------------------------------------
1171 // endsWithTailCall : Check if the block ends with a tail call.
1174 // comp - compiler instance
1175 // fastTailCallsOnly - check for fast tail calls only
1176 // tailCallsConvertibleToLoopOnly - check for tail calls convertible to loop only
1177 // tailCall - a pointer to a tree that will be set to the call tree if the block
1178 // ends with a tail call and will be set to nullptr otherwise.
1181 // true if the block ends with a tail call; false otherwise.
1184 // At most one of fastTailCallsOnly and tailCallsConvertibleToLoopOnly flags can be true.
1186 bool BasicBlock::endsWithTailCall(Compiler* comp,
1187 bool fastTailCallsOnly,
1188 bool tailCallsConvertibleToLoopOnly,
1191 assert(!fastTailCallsOnly || !tailCallsConvertibleToLoopOnly);
1192 *tailCall = nullptr;
1193 bool result = false;
1195 // Is this a tail call?
1196 // The reason for keeping this under RyuJIT is so as not to impact existing Jit32 x86 and arm
1198 if (comp->compTailCallUsed)
1200 if (fastTailCallsOnly || tailCallsConvertibleToLoopOnly)
1202 // Only fast tail calls or only tail calls convertible to loops
1203 result = (bbFlags & BBF_HAS_JMP) && (bbJumpKind == BBJ_RETURN);
1207 // Fast tail calls, tail calls convertible to loops, and tails calls dispatched via helper
1208 result = (bbJumpKind == BBJ_THROW) || ((bbFlags & BBF_HAS_JMP) && (bbJumpKind == BBJ_RETURN));
1213 GenTree* lastNode = this->lastNode();
1214 if (lastNode->OperGet() == GT_CALL)
1216 GenTreeCall* call = lastNode->AsCall();
1217 if (tailCallsConvertibleToLoopOnly)
1219 result = call->IsTailCallConvertibleToLoop();
1221 else if (fastTailCallsOnly)
1223 result = call->IsFastTailCall();
1227 result = call->IsTailCall();
1245 //------------------------------------------------------------------------------
1246 // endsWithTailCallConvertibleToLoop : Check if the block ends with a tail call convertible to loop.
1249 // comp - compiler instance
1250 // tailCall - a pointer to a tree that will be set to the call tree if the block
1251 // ends with a tail call convertible to loop and will be set to nullptr otherwise.
1254 // true if the block ends with a tail call convertible to loop.
1256 bool BasicBlock::endsWithTailCallConvertibleToLoop(Compiler* comp, GenTree** tailCall)
1258 bool fastTailCallsOnly = false;
1259 bool tailCallsConvertibleToLoopOnly = true;
1260 return endsWithTailCall(comp, fastTailCallsOnly, tailCallsConvertibleToLoopOnly, tailCall);
1263 /*****************************************************************************
1265 * Allocate a basic block but don't append it to the current BB list.
1268 BasicBlock* Compiler::bbNewBasicBlock(BBjumpKinds jumpKind)
1272 /* Allocate the block descriptor and zero it out */
1273 assert(fgSafeBasicBlockCreation);
1275 block = new (this, CMK_BasicBlock) BasicBlock;
1277 #if MEASURE_BLOCK_SIZE
1278 BasicBlock::s_Count += 1;
1279 BasicBlock::s_Size += sizeof(*block);
1283 // fgLookupBB() is invalid until fgInitBBLookup() is called again.
1284 fgBBs = (BasicBlock**)0xCDCD;
1287 // TODO-Throughput: The following memset is pretty expensive - do something else?
1288 // Note that some fields have to be initialized to 0 (like bbFPStateX87)
1289 memset(block, 0, sizeof(*block));
1291 // scopeInfo needs to be able to differentiate between blocks which
1292 // correspond to some instrs (and so may have some LocalVarInfo
1293 // boundaries), or have been inserted by the JIT
1294 block->bbCodeOffs = BAD_IL_OFFSET;
1295 block->bbCodeOffsEnd = BAD_IL_OFFSET;
1298 block->bbID = compBasicBlockID++;
1301 /* Give the block a number, set the ancestor count and weight */
1305 if (compIsForInlining())
1307 block->bbNum = ++impInlineInfo->InlinerCompiler->fgBBNumMax;
1311 block->bbNum = ++fgBBNumMax;
1314 #ifndef LEGACY_BACKEND
1315 if (compRationalIRForm)
1317 block->bbFlags |= BBF_IS_LIR;
1319 #endif // !LEGACY_BACKEND
1322 block->bbWeight = BB_UNITY_WEIGHT;
1324 block->bbStkTempsIn = NO_BASE_TMP;
1325 block->bbStkTempsOut = NO_BASE_TMP;
1327 block->bbEntryState = nullptr;
1329 /* Record the jump kind in the block */
1331 block->bbJumpKind = jumpKind;
1333 if (jumpKind == BBJ_THROW)
1335 block->bbSetRunRarely();
1341 printf("New Basic Block %s created.\n", block->dspToString());
1345 // We will give all the blocks var sets after the number of tracked variables
1346 // is determined and frozen. After that, if we dynamically create a basic block,
1347 // we will initialize its var sets.
1348 if (fgBBVarSetsInited)
1350 VarSetOps::AssignNoCopy(this, block->bbVarUse, VarSetOps::MakeEmpty(this));
1351 VarSetOps::AssignNoCopy(this, block->bbVarDef, VarSetOps::MakeEmpty(this));
1352 VarSetOps::AssignNoCopy(this, block->bbLiveIn, VarSetOps::MakeEmpty(this));
1353 VarSetOps::AssignNoCopy(this, block->bbLiveOut, VarSetOps::MakeEmpty(this));
1354 VarSetOps::AssignNoCopy(this, block->bbScope, VarSetOps::MakeEmpty(this));
1358 VarSetOps::AssignNoCopy(this, block->bbVarUse, VarSetOps::UninitVal());
1359 VarSetOps::AssignNoCopy(this, block->bbVarDef, VarSetOps::UninitVal());
1360 VarSetOps::AssignNoCopy(this, block->bbLiveIn, VarSetOps::UninitVal());
1361 VarSetOps::AssignNoCopy(this, block->bbLiveOut, VarSetOps::UninitVal());
1362 VarSetOps::AssignNoCopy(this, block->bbScope, VarSetOps::UninitVal());
1365 block->bbMemoryUse = emptyMemoryKindSet;
1366 block->bbMemoryDef = emptyMemoryKindSet;
1367 block->bbMemoryLiveIn = emptyMemoryKindSet;
1368 block->bbMemoryLiveOut = emptyMemoryKindSet;
1370 for (MemoryKind memoryKind : allMemoryKinds())
1372 block->bbMemorySsaPhiFunc[memoryKind] = nullptr;
1373 block->bbMemorySsaNumIn[memoryKind] = 0;
1374 block->bbMemorySsaNumOut[memoryKind] = 0;
1377 // Make sure we reserve a NOT_IN_LOOP value that isn't a legal table index.
1378 static_assert_no_msg(MAX_LOOP_NUM < BasicBlock::NOT_IN_LOOP);
1380 block->bbNatLoopNum = BasicBlock::NOT_IN_LOOP;