1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
5 /*XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
6 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
10 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
11 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
20 #include "sideeffects.h"
22 class Lowering : public Phase
25 inline Lowering(Compiler* compiler, LinearScanInterface* lsra)
26 : Phase(compiler, "Lowering", PHASE_LOWERING), vtableCallTemp(BAD_VAR_NUM)
28 m_lsra = (LinearScan*)lsra;
31 virtual void DoPhase() override;
33 // If requiresOverflowCheck is false, all other values will be unset
36 bool requiresOverflowCheck; // Will the cast require an overflow check
37 bool unsignedSource; // Is the source unsigned
38 bool unsignedDest; // is the dest unsigned
40 // All other fields are only meaningful if requiresOverflowCheck is set.
42 ssize_t typeMin; // Lowest storable value of the dest type
43 ssize_t typeMax; // Highest storable value of the dest type
44 ssize_t typeMask; // For converting from/to unsigned
45 bool signCheckOnly; // For converting between unsigned/signed int
48 static void getCastDescription(GenTree* treeNode, CastInfo* castInfo);
50 // This variant of LowerRange is called from outside of the main Lowering pass,
51 // so it creates its own instance of Lowering to do so.
52 void LowerRange(BasicBlock* block, LIR::ReadOnlyRange& range)
54 Lowering lowerer(comp, m_lsra);
55 lowerer.m_block = block;
57 lowerer.LowerRange(range);
61 // LowerRange handles new code that is introduced by or after Lowering.
62 void LowerRange(LIR::ReadOnlyRange& range)
64 for (GenTree* newNode : range)
69 void LowerRange(GenTree* firstNode, GenTree* lastNode)
71 LIR::ReadOnlyRange range(firstNode, lastNode);
75 // ContainCheckRange handles new code that is introduced by or after Lowering,
76 // and that is known to be already in Lowered form.
77 void ContainCheckRange(LIR::ReadOnlyRange& range)
79 for (GenTree* newNode : range)
81 ContainCheckNode(newNode);
84 void ContainCheckRange(GenTree* firstNode, GenTree* lastNode)
86 LIR::ReadOnlyRange range(firstNode, lastNode);
87 ContainCheckRange(range);
90 void InsertTreeBeforeAndContainCheck(GenTree* insertionPoint, GenTree* tree)
92 LIR::Range range = LIR::SeqTree(comp, tree);
93 ContainCheckRange(range);
94 BlockRange().InsertBefore(insertionPoint, std::move(range));
97 void ContainCheckNode(GenTree* node);
99 void ContainCheckDivOrMod(GenTreeOp* node);
100 void ContainCheckReturnTrap(GenTreeOp* node);
101 void ContainCheckArrOffset(GenTreeArrOffs* node);
102 void ContainCheckLclHeap(GenTreeOp* node);
103 void ContainCheckRet(GenTreeOp* node);
104 void ContainCheckJTrue(GenTreeOp* node);
106 void ContainCheckCallOperands(GenTreeCall* call);
107 void ContainCheckIndir(GenTreeIndir* indirNode);
108 void ContainCheckStoreIndir(GenTreeIndir* indirNode);
109 void ContainCheckMul(GenTreeOp* node);
110 void ContainCheckShiftRotate(GenTreeOp* node);
111 void ContainCheckStoreLoc(GenTreeLclVarCommon* storeLoc);
112 void ContainCheckCast(GenTreeCast* node);
113 void ContainCheckCompare(GenTreeOp* node);
114 void ContainCheckBinary(GenTreeOp* node);
115 void ContainCheckBoundsChk(GenTreeBoundsChk* node);
116 #ifdef _TARGET_XARCH_
117 void ContainCheckFloatBinary(GenTreeOp* node);
118 void ContainCheckIntrinsic(GenTreeOp* node);
119 #endif // _TARGET_XARCH_
121 void ContainCheckSIMD(GenTreeSIMD* simdNode);
122 #endif // FEATURE_SIMD
123 #ifdef FEATURE_HW_INTRINSICS
124 void ContainCheckHWIntrinsic(GenTreeHWIntrinsic* node);
125 #endif // FEATURE_HW_INTRINSICS
128 static void CheckCallArg(GenTree* arg);
129 static void CheckCall(GenTreeCall* call);
130 static void CheckNode(Compiler* compiler, GenTree* node);
131 static bool CheckBlock(Compiler* compiler, BasicBlock* block);
134 void LowerBlock(BasicBlock* block);
135 GenTree* LowerNode(GenTree* node);
137 void CheckVSQuirkStackPaddingNeeded(GenTreeCall* call);
139 // ------------------------------
141 // ------------------------------
142 void LowerCall(GenTree* call);
143 #ifndef _TARGET_64BIT_
144 GenTree* DecomposeLongCompare(GenTree* cmp);
146 GenTree* OptimizeConstCompare(GenTree* cmp);
147 GenTree* LowerCompare(GenTree* cmp);
148 GenTree* LowerJTrue(GenTreeOp* jtrue);
149 void LowerJmpMethod(GenTree* jmp);
150 void LowerRet(GenTree* ret);
151 GenTree* LowerDelegateInvoke(GenTreeCall* call);
152 GenTree* LowerIndirectNonvirtCall(GenTreeCall* call);
153 GenTree* LowerDirectCall(GenTreeCall* call);
154 GenTree* LowerNonvirtPinvokeCall(GenTreeCall* call);
155 GenTree* LowerTailCallViaHelper(GenTreeCall* callNode, GenTree* callTarget);
156 void LowerFastTailCall(GenTreeCall* callNode);
157 void InsertProfTailCallHook(GenTreeCall* callNode, GenTree* insertionPoint);
158 GenTree* LowerVirtualVtableCall(GenTreeCall* call);
159 GenTree* LowerVirtualStubCall(GenTreeCall* call);
160 void LowerArgsForCall(GenTreeCall* call);
161 void ReplaceArgWithPutArgOrBitcast(GenTree** ppChild, GenTree* newNode);
162 GenTree* NewPutArg(GenTreeCall* call, GenTree* arg, fgArgTabEntry* info, var_types type);
163 void LowerArg(GenTreeCall* call, GenTree** ppTree);
164 #ifdef _TARGET_ARMARCH_
165 GenTree* LowerFloatArg(GenTree** pArg, fgArgTabEntry* info);
166 GenTree* LowerFloatArgReg(GenTree* arg, regNumber regNum);
169 void InsertPInvokeCallProlog(GenTreeCall* call);
170 void InsertPInvokeCallEpilog(GenTreeCall* call);
171 void InsertPInvokeMethodProlog();
172 void InsertPInvokeMethodEpilog(BasicBlock* returnBB DEBUGARG(GenTree* lastExpr));
173 GenTree* SetGCState(int cns);
174 GenTree* CreateReturnTrapSeq();
180 GenTree* CreateFrameLinkUpdate(FrameLinkAction);
181 GenTree* AddrGen(ssize_t addr);
182 GenTree* AddrGen(void* addr);
184 GenTree* Ind(GenTree* tree)
186 return comp->gtNewOperNode(GT_IND, TYP_I_IMPL, tree);
189 GenTree* PhysReg(regNumber reg, var_types type = TYP_I_IMPL)
191 return comp->gtNewPhysRegNode(reg, type);
194 GenTree* ThisReg(GenTreeCall* call)
196 return PhysReg(comp->codeGen->genGetThisArgReg(call), TYP_REF);
199 GenTree* Offset(GenTree* base, unsigned offset)
201 var_types resultType = (base->TypeGet() == TYP_REF) ? TYP_BYREF : base->TypeGet();
202 return new (comp, GT_LEA) GenTreeAddrMode(resultType, base, nullptr, 0, offset);
205 GenTree* OffsetByIndex(GenTree* base, GenTree* index)
207 var_types resultType = (base->TypeGet() == TYP_REF) ? TYP_BYREF : base->TypeGet();
208 return new (comp, GT_LEA) GenTreeAddrMode(resultType, base, index, 0, 0);
211 GenTree* OffsetByIndexWithScale(GenTree* base, GenTree* index, unsigned scale)
213 var_types resultType = (base->TypeGet() == TYP_REF) ? TYP_BYREF : base->TypeGet();
214 return new (comp, GT_LEA) GenTreeAddrMode(resultType, base, index, scale, 0);
217 // Replace the definition of the given use with a lclVar, allocating a new temp
218 // if 'tempNum' is BAD_VAR_NUM.
219 unsigned ReplaceWithLclVar(LIR::Use& use, unsigned tempNum = BAD_VAR_NUM)
221 GenTree* oldUseNode = use.Def();
222 if ((oldUseNode->gtOper != GT_LCL_VAR) || (tempNum != BAD_VAR_NUM))
224 unsigned newLclNum = use.ReplaceWithLclVar(comp, m_block->getBBWeight(comp), tempNum);
225 GenTree* newUseNode = use.Def();
226 ContainCheckRange(oldUseNode->gtNext, newUseNode);
229 return oldUseNode->AsLclVarCommon()->gtLclNum;
232 // return true if this call target is within range of a pc-rel call on the machine
233 bool IsCallTargetInRange(void* addr);
235 #if defined(_TARGET_XARCH_)
236 GenTree* PreferredRegOptionalOperand(GenTree* tree);
238 // ------------------------------------------------------------------
239 // SetRegOptionalBinOp - Indicates which of the operands of a bin-op
240 // register requirement is optional. Xarch instruction set allows
241 // either of op1 or op2 of binary operation (e.g. add, mul etc) to be
242 // a memory operand. This routine provides info to register allocator
243 // which of its operands optionally require a register. Lsra might not
244 // allocate a register to RefTypeUse positions of such operands if it
245 // is beneficial. In such a case codegen will treat them as memory
249 // tree - Gentree of a binary operation.
254 // Note: On xarch at most only one of the operands will be marked as
255 // reg optional, even when both operands could be considered register
257 void SetRegOptionalForBinOp(GenTree* tree)
259 assert(GenTree::OperIsBinary(tree->OperGet()));
261 GenTree* const op1 = tree->gtGetOp1();
262 GenTree* const op2 = tree->gtGetOp2();
264 const unsigned operatorSize = genTypeSize(tree->TypeGet());
266 const bool op1Legal = tree->OperIsCommutative() && (operatorSize == genTypeSize(op1->TypeGet()));
267 const bool op2Legal = operatorSize == genTypeSize(op2->TypeGet());
269 GenTree* regOptionalOperand = nullptr;
272 regOptionalOperand = op2Legal ? PreferredRegOptionalOperand(tree) : op1;
276 regOptionalOperand = op2;
278 if (regOptionalOperand != nullptr)
280 regOptionalOperand->SetRegOptional();
283 #endif // defined(_TARGET_XARCH_)
285 // Per tree node member functions
286 void LowerStoreIndir(GenTreeIndir* node);
287 GenTree* LowerAdd(GenTree* node);
288 bool LowerUnsignedDivOrMod(GenTreeOp* divMod);
289 GenTree* LowerConstIntDivOrMod(GenTree* node);
290 GenTree* LowerSignedDivOrMod(GenTree* node);
291 void LowerBlockStore(GenTreeBlk* blkNode);
292 void LowerPutArgStk(GenTreePutArgStk* tree);
294 GenTree* TryCreateAddrMode(LIR::Use&& use, bool isIndir);
295 void AddrModeCleanupHelper(GenTreeAddrMode* addrMode, GenTree* node);
297 GenTree* LowerSwitch(GenTree* node);
298 bool TryLowerSwitchToBitTest(
299 BasicBlock* jumpTable[], unsigned jumpCount, unsigned targetCount, BasicBlock* bbSwitch, GenTree* switchValue);
301 void LowerCast(GenTree* node);
303 #if !CPU_LOAD_STORE_ARCH
304 bool IsRMWIndirCandidate(GenTree* operand, GenTree* storeInd);
305 bool IsBinOpInRMWStoreInd(GenTree* tree);
306 bool IsRMWMemOpRootedAtStoreInd(GenTree* storeIndTree, GenTree** indirCandidate, GenTree** indirOpSource);
307 bool LowerRMWMemOp(GenTreeIndir* storeInd);
310 void WidenSIMD12IfNecessary(GenTreeLclVarCommon* node);
311 void LowerStoreLoc(GenTreeLclVarCommon* tree);
312 GenTree* LowerArrElem(GenTree* node);
313 void LowerRotate(GenTree* tree);
314 void LowerShift(GenTreeOp* shift);
316 void LowerSIMD(GenTreeSIMD* simdNode);
317 #endif // FEATURE_SIMD
318 #ifdef FEATURE_HW_INTRINSICS
319 void LowerHWIntrinsic(GenTreeHWIntrinsic* node);
320 #endif // FEATURE_HW_INTRINSICS
323 void MorphBlkIntoHelperCall(GenTree* pTree, GenTree* treeStmt);
326 static bool IndirsAreEquivalent(GenTree* pTreeA, GenTree* pTreeB);
328 // return true if 'childNode' is an immediate that can be contained
329 // by the 'parentNode' (i.e. folded into an instruction)
330 // for example small enough and non-relocatable
331 bool IsContainableImmed(GenTree* parentNode, GenTree* childNode);
333 // Return true if 'node' is a containable memory op.
334 bool IsContainableMemoryOp(GenTree* node)
336 return m_lsra->isContainableMemoryOp(node);
339 #ifdef FEATURE_HW_INTRINSICS
340 // Return true if 'node' is a containable HWIntrinsic op.
341 bool IsContainableHWIntrinsicOp(GenTreeHWIntrinsic* containingNode, GenTree* node);
342 #endif // FEATURE_HW_INTRINSICS
345 static bool NodesAreEquivalentLeaves(GenTree* candidate, GenTree* storeInd);
347 bool AreSourcesPossiblyModifiedLocals(GenTree* addr, GenTree* base, GenTree* index);
349 // Makes 'childNode' contained in the 'parentNode'
350 void MakeSrcContained(GenTree* parentNode, GenTree* childNode);
352 // Checks and makes 'childNode' contained in the 'parentNode'
353 bool CheckImmedAndMakeContained(GenTree* parentNode, GenTree* childNode);
355 // Checks for memory conflicts in the instructions between childNode and parentNode, and returns true if childNode
357 bool IsSafeToContainMem(GenTree* parentNode, GenTree* childNode);
359 inline LIR::Range& BlockRange() const
361 return LIR::AsRange(m_block);
365 unsigned vtableCallTemp; // local variable we use as a temp for vtable calls
366 SideEffectSet m_scratchSideEffects; // SideEffectSet used for IsSafeToContainMem and isRMWIndirCandidate