0d298e0a52fa5e59c64ac20c4326860bda963920
[platform/upstream/coreclr.git] / src / jit / lower.h
1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
4
5 /*XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
6 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
7 XX                                                                           XX
8 XX                               Lower                                       XX
9 XX                                                                           XX
10 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
11 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
12 */
13
14 #ifndef _LOWER_H_
15 #define _LOWER_H_
16
17 #include "compiler.h"
18 #include "phase.h"
19 #include "lsra.h"
20 #include "sideeffects.h"
21
22 class Lowering : public Phase
23 {
24 public:
25     inline Lowering(Compiler* compiler, LinearScanInterface* lsra)
26         : Phase(compiler, "Lowering", PHASE_LOWERING), vtableCallTemp(BAD_VAR_NUM)
27     {
28         m_lsra = (LinearScan*)lsra;
29         assert(m_lsra);
30     }
31     virtual void DoPhase() override;
32
33     // If requiresOverflowCheck is false, all other values will be unset
34     struct CastInfo
35     {
36         bool requiresOverflowCheck; // Will the cast require an overflow check
37         bool unsignedSource;        // Is the source unsigned
38         bool unsignedDest;          // is the dest unsigned
39
40         // All other fields are only meaningful if requiresOverflowCheck is set.
41
42         ssize_t typeMin;       // Lowest storable value of the dest type
43         ssize_t typeMax;       // Highest storable value of the dest type
44         ssize_t typeMask;      // For converting from/to unsigned
45         bool    signCheckOnly; // For converting between unsigned/signed int
46     };
47
48     static void getCastDescription(GenTree* treeNode, CastInfo* castInfo);
49
50     // This variant of LowerRange is called from outside of the main Lowering pass,
51     // so it creates its own instance of Lowering to do so.
52     void LowerRange(BasicBlock* block, LIR::ReadOnlyRange& range)
53     {
54         Lowering lowerer(comp, m_lsra);
55         lowerer.m_block = block;
56
57         lowerer.LowerRange(range);
58     }
59
60 private:
61     // LowerRange handles new code that is introduced by or after Lowering.
62     void LowerRange(LIR::ReadOnlyRange& range)
63     {
64         for (GenTree* newNode : range)
65         {
66             LowerNode(newNode);
67         }
68     }
69     void LowerRange(GenTree* firstNode, GenTree* lastNode)
70     {
71         LIR::ReadOnlyRange range(firstNode, lastNode);
72         LowerRange(range);
73     }
74
75     // ContainCheckRange handles new code that is introduced by or after Lowering,
76     // and that is known to be already in Lowered form.
77     void ContainCheckRange(LIR::ReadOnlyRange& range)
78     {
79         for (GenTree* newNode : range)
80         {
81             ContainCheckNode(newNode);
82         }
83     }
84     void ContainCheckRange(GenTree* firstNode, GenTree* lastNode)
85     {
86         LIR::ReadOnlyRange range(firstNode, lastNode);
87         ContainCheckRange(range);
88     }
89
90     void InsertTreeBeforeAndContainCheck(GenTree* insertionPoint, GenTree* tree)
91     {
92         LIR::Range range = LIR::SeqTree(comp, tree);
93         ContainCheckRange(range);
94         BlockRange().InsertBefore(insertionPoint, std::move(range));
95     }
96
97     void ContainCheckNode(GenTree* node);
98
99     void ContainCheckDivOrMod(GenTreeOp* node);
100     void ContainCheckReturnTrap(GenTreeOp* node);
101     void ContainCheckArrOffset(GenTreeArrOffs* node);
102     void ContainCheckLclHeap(GenTreeOp* node);
103     void ContainCheckRet(GenTreeOp* node);
104     void ContainCheckJTrue(GenTreeOp* node);
105
106     void ContainCheckCallOperands(GenTreeCall* call);
107     void ContainCheckIndir(GenTreeIndir* indirNode);
108     void ContainCheckStoreIndir(GenTreeIndir* indirNode);
109     void ContainCheckMul(GenTreeOp* node);
110     void ContainCheckShiftRotate(GenTreeOp* node);
111     void ContainCheckStoreLoc(GenTreeLclVarCommon* storeLoc);
112     void ContainCheckCast(GenTreeCast* node);
113     void ContainCheckCompare(GenTreeOp* node);
114     void ContainCheckBinary(GenTreeOp* node);
115     void ContainCheckBoundsChk(GenTreeBoundsChk* node);
116 #ifdef _TARGET_XARCH_
117     void ContainCheckFloatBinary(GenTreeOp* node);
118     void ContainCheckIntrinsic(GenTreeOp* node);
119 #endif // _TARGET_XARCH_
120 #ifdef FEATURE_SIMD
121     void ContainCheckSIMD(GenTreeSIMD* simdNode);
122 #endif // FEATURE_SIMD
123 #ifdef FEATURE_HW_INTRINSICS
124     void ContainCheckHWIntrinsic(GenTreeHWIntrinsic* node);
125 #endif // FEATURE_HW_INTRINSICS
126
127 #ifdef DEBUG
128     static void CheckCallArg(GenTree* arg);
129     static void CheckCall(GenTreeCall* call);
130     static void CheckNode(Compiler* compiler, GenTree* node);
131     static bool CheckBlock(Compiler* compiler, BasicBlock* block);
132 #endif // DEBUG
133
134     void LowerBlock(BasicBlock* block);
135     GenTree* LowerNode(GenTree* node);
136
137     void CheckVSQuirkStackPaddingNeeded(GenTreeCall* call);
138
139     // ------------------------------
140     // Call Lowering
141     // ------------------------------
142     void LowerCall(GenTree* call);
143 #ifndef _TARGET_64BIT_
144     GenTree* DecomposeLongCompare(GenTree* cmp);
145 #endif
146     GenTree* OptimizeConstCompare(GenTree* cmp);
147     GenTree* LowerCompare(GenTree* cmp);
148     GenTree* LowerJTrue(GenTreeOp* jtrue);
149     void LowerJmpMethod(GenTree* jmp);
150     void LowerRet(GenTree* ret);
151     GenTree* LowerDelegateInvoke(GenTreeCall* call);
152     GenTree* LowerIndirectNonvirtCall(GenTreeCall* call);
153     GenTree* LowerDirectCall(GenTreeCall* call);
154     GenTree* LowerNonvirtPinvokeCall(GenTreeCall* call);
155     GenTree* LowerTailCallViaHelper(GenTreeCall* callNode, GenTree* callTarget);
156     void LowerFastTailCall(GenTreeCall* callNode);
157     void InsertProfTailCallHook(GenTreeCall* callNode, GenTree* insertionPoint);
158     GenTree* LowerVirtualVtableCall(GenTreeCall* call);
159     GenTree* LowerVirtualStubCall(GenTreeCall* call);
160     void LowerArgsForCall(GenTreeCall* call);
161     void ReplaceArgWithPutArgOrBitcast(GenTree** ppChild, GenTree* newNode);
162     GenTree* NewPutArg(GenTreeCall* call, GenTree* arg, fgArgTabEntry* info, var_types type);
163     void LowerArg(GenTreeCall* call, GenTree** ppTree);
164 #ifdef _TARGET_ARMARCH_
165     GenTree* LowerFloatArg(GenTree** pArg, fgArgTabEntry* info);
166     GenTree* LowerFloatArgReg(GenTree* arg, regNumber regNum);
167 #endif
168
169     void InsertPInvokeCallProlog(GenTreeCall* call);
170     void InsertPInvokeCallEpilog(GenTreeCall* call);
171     void InsertPInvokeMethodProlog();
172     void InsertPInvokeMethodEpilog(BasicBlock* returnBB DEBUGARG(GenTree* lastExpr));
173     GenTree* SetGCState(int cns);
174     GenTree* CreateReturnTrapSeq();
175     enum FrameLinkAction
176     {
177         PushFrame,
178         PopFrame
179     };
180     GenTree* CreateFrameLinkUpdate(FrameLinkAction);
181     GenTree* AddrGen(ssize_t addr);
182     GenTree* AddrGen(void* addr);
183
184     GenTree* Ind(GenTree* tree)
185     {
186         return comp->gtNewOperNode(GT_IND, TYP_I_IMPL, tree);
187     }
188
189     GenTree* PhysReg(regNumber reg, var_types type = TYP_I_IMPL)
190     {
191         return comp->gtNewPhysRegNode(reg, type);
192     }
193
194     GenTree* ThisReg(GenTreeCall* call)
195     {
196         return PhysReg(comp->codeGen->genGetThisArgReg(call), TYP_REF);
197     }
198
199     GenTree* Offset(GenTree* base, unsigned offset)
200     {
201         var_types resultType = (base->TypeGet() == TYP_REF) ? TYP_BYREF : base->TypeGet();
202         return new (comp, GT_LEA) GenTreeAddrMode(resultType, base, nullptr, 0, offset);
203     }
204
205     GenTree* OffsetByIndex(GenTree* base, GenTree* index)
206     {
207         var_types resultType = (base->TypeGet() == TYP_REF) ? TYP_BYREF : base->TypeGet();
208         return new (comp, GT_LEA) GenTreeAddrMode(resultType, base, index, 0, 0);
209     }
210
211     // Replace the definition of the given use with a lclVar, allocating a new temp
212     // if 'tempNum' is BAD_VAR_NUM.
213     unsigned ReplaceWithLclVar(LIR::Use& use, unsigned tempNum = BAD_VAR_NUM)
214     {
215         GenTree* oldUseNode = use.Def();
216         if ((oldUseNode->gtOper != GT_LCL_VAR) || (tempNum != BAD_VAR_NUM))
217         {
218             unsigned newLclNum  = use.ReplaceWithLclVar(comp, m_block->getBBWeight(comp), tempNum);
219             GenTree* newUseNode = use.Def();
220             ContainCheckRange(oldUseNode->gtNext, newUseNode);
221             return newLclNum;
222         }
223         return oldUseNode->AsLclVarCommon()->gtLclNum;
224     }
225
226     // return true if this call target is within range of a pc-rel call on the machine
227     bool IsCallTargetInRange(void* addr);
228
229 #if defined(_TARGET_XARCH_)
230     GenTree* PreferredRegOptionalOperand(GenTree* tree);
231
232     // ------------------------------------------------------------------
233     // SetRegOptionalBinOp - Indicates which of the operands of a bin-op
234     // register requirement is optional. Xarch instruction set allows
235     // either of op1 or op2 of binary operation (e.g. add, mul etc) to be
236     // a memory operand.  This routine provides info to register allocator
237     // which of its operands optionally require a register.  Lsra might not
238     // allocate a register to RefTypeUse positions of such operands if it
239     // is beneficial. In such a case codegen will treat them as memory
240     // operands.
241     //
242     // Arguments:
243     //     tree  -  Gentree of a binary operation.
244     //
245     // Returns
246     //     None.
247     //
248     // Note: On xarch at most only one of the operands will be marked as
249     // reg optional, even when both operands could be considered register
250     // optional.
251     void SetRegOptionalForBinOp(GenTree* tree)
252     {
253         assert(GenTree::OperIsBinary(tree->OperGet()));
254
255         GenTree* const op1 = tree->gtGetOp1();
256         GenTree* const op2 = tree->gtGetOp2();
257
258         const unsigned operatorSize = genTypeSize(tree->TypeGet());
259
260         const bool op1Legal = tree->OperIsCommutative() && (operatorSize == genTypeSize(op1->TypeGet()));
261         const bool op2Legal = operatorSize == genTypeSize(op2->TypeGet());
262
263         GenTree* regOptionalOperand = nullptr;
264         if (op1Legal)
265         {
266             regOptionalOperand = op2Legal ? PreferredRegOptionalOperand(tree) : op1;
267         }
268         else if (op2Legal)
269         {
270             regOptionalOperand = op2;
271         }
272         if (regOptionalOperand != nullptr)
273         {
274             regOptionalOperand->SetRegOptional();
275         }
276     }
277 #endif // defined(_TARGET_XARCH_)
278
279     // Per tree node member functions
280     void LowerStoreIndir(GenTreeIndir* node);
281     GenTree* LowerAdd(GenTree* node);
282     bool LowerUnsignedDivOrMod(GenTreeOp* divMod);
283     GenTree* LowerConstIntDivOrMod(GenTree* node);
284     GenTree* LowerSignedDivOrMod(GenTree* node);
285     void LowerBlockStore(GenTreeBlk* blkNode);
286     void LowerPutArgStk(GenTreePutArgStk* tree);
287
288     GenTree* TryCreateAddrMode(LIR::Use&& use, bool isIndir);
289     void AddrModeCleanupHelper(GenTreeAddrMode* addrMode, GenTree* node);
290
291     GenTree* LowerSwitch(GenTree* node);
292     bool TryLowerSwitchToBitTest(
293         BasicBlock* jumpTable[], unsigned jumpCount, unsigned targetCount, BasicBlock* bbSwitch, GenTree* switchValue);
294
295     void LowerCast(GenTree* node);
296
297 #if !CPU_LOAD_STORE_ARCH
298     bool IsRMWIndirCandidate(GenTree* operand, GenTree* storeInd);
299     bool IsBinOpInRMWStoreInd(GenTree* tree);
300     bool IsRMWMemOpRootedAtStoreInd(GenTree* storeIndTree, GenTree** indirCandidate, GenTree** indirOpSource);
301     bool LowerRMWMemOp(GenTreeIndir* storeInd);
302 #endif
303
304     void WidenSIMD12IfNecessary(GenTreeLclVarCommon* node);
305     void LowerStoreLoc(GenTreeLclVarCommon* tree);
306     GenTree* LowerArrElem(GenTree* node);
307     void LowerRotate(GenTree* tree);
308     void LowerShift(GenTreeOp* shift);
309 #ifdef FEATURE_SIMD
310     void LowerSIMD(GenTreeSIMD* simdNode);
311 #endif // FEATURE_SIMD
312 #ifdef FEATURE_HW_INTRINSICS
313     void LowerHWIntrinsic(GenTreeHWIntrinsic* node);
314 #endif // FEATURE_HW_INTRINSICS
315
316     // Utility functions
317     void MorphBlkIntoHelperCall(GenTree* pTree, GenTree* treeStmt);
318
319 public:
320     static bool IndirsAreEquivalent(GenTree* pTreeA, GenTree* pTreeB);
321
322     // return true if 'childNode' is an immediate that can be contained
323     //  by the 'parentNode' (i.e. folded into an instruction)
324     //  for example small enough and non-relocatable
325     bool IsContainableImmed(GenTree* parentNode, GenTree* childNode);
326
327     // Return true if 'node' is a containable memory op.
328     bool IsContainableMemoryOp(GenTree* node)
329     {
330         return m_lsra->isContainableMemoryOp(node);
331     }
332
333 #ifdef FEATURE_HW_INTRINSICS
334     // Return true if 'node' is a containable HWIntrinsic op.
335     bool IsContainableHWIntrinsicOp(GenTreeHWIntrinsic* containingNode, GenTree* node);
336 #endif // FEATURE_HW_INTRINSICS
337
338 private:
339     static bool NodesAreEquivalentLeaves(GenTree* candidate, GenTree* storeInd);
340
341     bool AreSourcesPossiblyModifiedLocals(GenTree* addr, GenTree* base, GenTree* index);
342
343     // Makes 'childNode' contained in the 'parentNode'
344     void MakeSrcContained(GenTree* parentNode, GenTree* childNode);
345
346     // Checks and makes 'childNode' contained in the 'parentNode'
347     bool CheckImmedAndMakeContained(GenTree* parentNode, GenTree* childNode);
348
349     // Checks for memory conflicts in the instructions between childNode and parentNode, and returns true if childNode
350     // can be contained.
351     bool IsSafeToContainMem(GenTree* parentNode, GenTree* childNode);
352
353     inline LIR::Range& BlockRange() const
354     {
355         return LIR::AsRange(m_block);
356     }
357
358     LinearScan*   m_lsra;
359     unsigned      vtableCallTemp;       // local variable we use as a temp for vtable calls
360     SideEffectSet m_scratchSideEffects; // SideEffectSet used for IsSafeToContainMem and isRMWIndirCandidate
361     BasicBlock*   m_block;
362 };
363
364 #endif // _LOWER_H_