Remove relocations for vtable chunks (#17147)
[platform/upstream/coreclr.git] / src / jit / lower.h
1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
4
5 /*XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
6 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
7 XX                                                                           XX
8 XX                               Lower                                       XX
9 XX                                                                           XX
10 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
11 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
12 */
13
14 #ifndef _LOWER_H_
15 #define _LOWER_H_
16
17 #include "compiler.h"
18 #include "phase.h"
19 #include "lsra.h"
20 #include "sideeffects.h"
21
22 class Lowering : public Phase
23 {
24 public:
25     inline Lowering(Compiler* compiler, LinearScanInterface* lsra)
26         : Phase(compiler, "Lowering", PHASE_LOWERING), vtableCallTemp(BAD_VAR_NUM)
27     {
28         m_lsra = (LinearScan*)lsra;
29         assert(m_lsra);
30     }
31     virtual void DoPhase() override;
32
33     // If requiresOverflowCheck is false, all other values will be unset
34     struct CastInfo
35     {
36         bool requiresOverflowCheck; // Will the cast require an overflow check
37         bool unsignedSource;        // Is the source unsigned
38         bool unsignedDest;          // is the dest unsigned
39
40         // All other fields are only meaningful if requiresOverflowCheck is set.
41
42         ssize_t typeMin;       // Lowest storable value of the dest type
43         ssize_t typeMax;       // Highest storable value of the dest type
44         ssize_t typeMask;      // For converting from/to unsigned
45         bool    signCheckOnly; // For converting between unsigned/signed int
46     };
47
48     static void getCastDescription(GenTree* treeNode, CastInfo* castInfo);
49
50     // This variant of LowerRange is called from outside of the main Lowering pass,
51     // so it creates its own instance of Lowering to do so.
52     void LowerRange(BasicBlock* block, LIR::ReadOnlyRange& range)
53     {
54         Lowering lowerer(comp, m_lsra);
55         lowerer.m_block = block;
56
57         lowerer.LowerRange(range);
58     }
59
60 private:
61     // LowerRange handles new code that is introduced by or after Lowering.
62     void LowerRange(LIR::ReadOnlyRange& range)
63     {
64         for (GenTree* newNode : range)
65         {
66             LowerNode(newNode);
67         }
68     }
69     void LowerRange(GenTree* firstNode, GenTree* lastNode)
70     {
71         LIR::ReadOnlyRange range(firstNode, lastNode);
72         LowerRange(range);
73     }
74
75     // ContainCheckRange handles new code that is introduced by or after Lowering,
76     // and that is known to be already in Lowered form.
77     void ContainCheckRange(LIR::ReadOnlyRange& range)
78     {
79         for (GenTree* newNode : range)
80         {
81             ContainCheckNode(newNode);
82         }
83     }
84     void ContainCheckRange(GenTree* firstNode, GenTree* lastNode)
85     {
86         LIR::ReadOnlyRange range(firstNode, lastNode);
87         ContainCheckRange(range);
88     }
89
90     void InsertTreeBeforeAndContainCheck(GenTree* insertionPoint, GenTree* tree)
91     {
92         LIR::Range range = LIR::SeqTree(comp, tree);
93         ContainCheckRange(range);
94         BlockRange().InsertBefore(insertionPoint, std::move(range));
95     }
96
97     void ContainCheckNode(GenTree* node);
98
99     void ContainCheckDivOrMod(GenTreeOp* node);
100     void ContainCheckReturnTrap(GenTreeOp* node);
101     void ContainCheckArrOffset(GenTreeArrOffs* node);
102     void ContainCheckLclHeap(GenTreeOp* node);
103     void ContainCheckRet(GenTreeOp* node);
104     void ContainCheckJTrue(GenTreeOp* node);
105
106     void ContainCheckCallOperands(GenTreeCall* call);
107     void ContainCheckIndir(GenTreeIndir* indirNode);
108     void ContainCheckStoreIndir(GenTreeIndir* indirNode);
109     void ContainCheckMul(GenTreeOp* node);
110     void ContainCheckShiftRotate(GenTreeOp* node);
111     void ContainCheckStoreLoc(GenTreeLclVarCommon* storeLoc);
112     void ContainCheckCast(GenTreeCast* node);
113     void ContainCheckCompare(GenTreeOp* node);
114     void ContainCheckBinary(GenTreeOp* node);
115     void ContainCheckBoundsChk(GenTreeBoundsChk* node);
116 #ifdef _TARGET_XARCH_
117     void ContainCheckFloatBinary(GenTreeOp* node);
118     void ContainCheckIntrinsic(GenTreeOp* node);
119 #endif // _TARGET_XARCH_
120 #ifdef FEATURE_SIMD
121     void ContainCheckSIMD(GenTreeSIMD* simdNode);
122 #endif // FEATURE_SIMD
123 #ifdef FEATURE_HW_INTRINSICS
124     void ContainCheckHWIntrinsic(GenTreeHWIntrinsic* node);
125 #endif // FEATURE_HW_INTRINSICS
126
127 #ifdef DEBUG
128     static void CheckCallArg(GenTree* arg);
129     static void CheckCall(GenTreeCall* call);
130     static void CheckNode(Compiler* compiler, GenTree* node);
131     static bool CheckBlock(Compiler* compiler, BasicBlock* block);
132 #endif // DEBUG
133
134     void LowerBlock(BasicBlock* block);
135     GenTree* LowerNode(GenTree* node);
136
137     void CheckVSQuirkStackPaddingNeeded(GenTreeCall* call);
138
139     // ------------------------------
140     // Call Lowering
141     // ------------------------------
142     void LowerCall(GenTree* call);
143 #ifndef _TARGET_64BIT_
144     GenTree* DecomposeLongCompare(GenTree* cmp);
145 #endif
146     GenTree* OptimizeConstCompare(GenTree* cmp);
147     GenTree* LowerCompare(GenTree* cmp);
148     GenTree* LowerJTrue(GenTreeOp* jtrue);
149     void LowerJmpMethod(GenTree* jmp);
150     void LowerRet(GenTree* ret);
151     GenTree* LowerDelegateInvoke(GenTreeCall* call);
152     GenTree* LowerIndirectNonvirtCall(GenTreeCall* call);
153     GenTree* LowerDirectCall(GenTreeCall* call);
154     GenTree* LowerNonvirtPinvokeCall(GenTreeCall* call);
155     GenTree* LowerTailCallViaHelper(GenTreeCall* callNode, GenTree* callTarget);
156     void LowerFastTailCall(GenTreeCall* callNode);
157     void InsertProfTailCallHook(GenTreeCall* callNode, GenTree* insertionPoint);
158     GenTree* LowerVirtualVtableCall(GenTreeCall* call);
159     GenTree* LowerVirtualStubCall(GenTreeCall* call);
160     void LowerArgsForCall(GenTreeCall* call);
161     void ReplaceArgWithPutArgOrBitcast(GenTree** ppChild, GenTree* newNode);
162     GenTree* NewPutArg(GenTreeCall* call, GenTree* arg, fgArgTabEntry* info, var_types type);
163     void LowerArg(GenTreeCall* call, GenTree** ppTree);
164 #ifdef _TARGET_ARMARCH_
165     GenTree* LowerFloatArg(GenTree** pArg, fgArgTabEntry* info);
166     GenTree* LowerFloatArgReg(GenTree* arg, regNumber regNum);
167 #endif
168
169     void InsertPInvokeCallProlog(GenTreeCall* call);
170     void InsertPInvokeCallEpilog(GenTreeCall* call);
171     void InsertPInvokeMethodProlog();
172     void InsertPInvokeMethodEpilog(BasicBlock* returnBB DEBUGARG(GenTree* lastExpr));
173     GenTree* SetGCState(int cns);
174     GenTree* CreateReturnTrapSeq();
175     enum FrameLinkAction
176     {
177         PushFrame,
178         PopFrame
179     };
180     GenTree* CreateFrameLinkUpdate(FrameLinkAction);
181     GenTree* AddrGen(ssize_t addr);
182     GenTree* AddrGen(void* addr);
183
184     GenTree* Ind(GenTree* tree)
185     {
186         return comp->gtNewOperNode(GT_IND, TYP_I_IMPL, tree);
187     }
188
189     GenTree* PhysReg(regNumber reg, var_types type = TYP_I_IMPL)
190     {
191         return comp->gtNewPhysRegNode(reg, type);
192     }
193
194     GenTree* ThisReg(GenTreeCall* call)
195     {
196         return PhysReg(comp->codeGen->genGetThisArgReg(call), TYP_REF);
197     }
198
199     GenTree* Offset(GenTree* base, unsigned offset)
200     {
201         var_types resultType = (base->TypeGet() == TYP_REF) ? TYP_BYREF : base->TypeGet();
202         return new (comp, GT_LEA) GenTreeAddrMode(resultType, base, nullptr, 0, offset);
203     }
204
205     GenTree* OffsetByIndex(GenTree* base, GenTree* index)
206     {
207         var_types resultType = (base->TypeGet() == TYP_REF) ? TYP_BYREF : base->TypeGet();
208         return new (comp, GT_LEA) GenTreeAddrMode(resultType, base, index, 0, 0);
209     }
210
211     GenTree* OffsetByIndexWithScale(GenTree* base, GenTree* index, unsigned scale)
212     {
213         var_types resultType = (base->TypeGet() == TYP_REF) ? TYP_BYREF : base->TypeGet();
214         return new (comp, GT_LEA) GenTreeAddrMode(resultType, base, index, scale, 0);
215     }
216
217     // Replace the definition of the given use with a lclVar, allocating a new temp
218     // if 'tempNum' is BAD_VAR_NUM.
219     unsigned ReplaceWithLclVar(LIR::Use& use, unsigned tempNum = BAD_VAR_NUM)
220     {
221         GenTree* oldUseNode = use.Def();
222         if ((oldUseNode->gtOper != GT_LCL_VAR) || (tempNum != BAD_VAR_NUM))
223         {
224             unsigned newLclNum  = use.ReplaceWithLclVar(comp, m_block->getBBWeight(comp), tempNum);
225             GenTree* newUseNode = use.Def();
226             ContainCheckRange(oldUseNode->gtNext, newUseNode);
227             return newLclNum;
228         }
229         return oldUseNode->AsLclVarCommon()->gtLclNum;
230     }
231
232     // return true if this call target is within range of a pc-rel call on the machine
233     bool IsCallTargetInRange(void* addr);
234
235 #if defined(_TARGET_XARCH_)
236     GenTree* PreferredRegOptionalOperand(GenTree* tree);
237
238     // ------------------------------------------------------------------
239     // SetRegOptionalBinOp - Indicates which of the operands of a bin-op
240     // register requirement is optional. Xarch instruction set allows
241     // either of op1 or op2 of binary operation (e.g. add, mul etc) to be
242     // a memory operand.  This routine provides info to register allocator
243     // which of its operands optionally require a register.  Lsra might not
244     // allocate a register to RefTypeUse positions of such operands if it
245     // is beneficial. In such a case codegen will treat them as memory
246     // operands.
247     //
248     // Arguments:
249     //     tree  -  Gentree of a binary operation.
250     //
251     // Returns
252     //     None.
253     //
254     // Note: On xarch at most only one of the operands will be marked as
255     // reg optional, even when both operands could be considered register
256     // optional.
257     void SetRegOptionalForBinOp(GenTree* tree)
258     {
259         assert(GenTree::OperIsBinary(tree->OperGet()));
260
261         GenTree* const op1 = tree->gtGetOp1();
262         GenTree* const op2 = tree->gtGetOp2();
263
264         const unsigned operatorSize = genTypeSize(tree->TypeGet());
265
266         const bool op1Legal = tree->OperIsCommutative() && (operatorSize == genTypeSize(op1->TypeGet()));
267         const bool op2Legal = operatorSize == genTypeSize(op2->TypeGet());
268
269         GenTree* regOptionalOperand = nullptr;
270         if (op1Legal)
271         {
272             regOptionalOperand = op2Legal ? PreferredRegOptionalOperand(tree) : op1;
273         }
274         else if (op2Legal)
275         {
276             regOptionalOperand = op2;
277         }
278         if (regOptionalOperand != nullptr)
279         {
280             regOptionalOperand->SetRegOptional();
281         }
282     }
283 #endif // defined(_TARGET_XARCH_)
284
285     // Per tree node member functions
286     void LowerStoreIndir(GenTreeIndir* node);
287     GenTree* LowerAdd(GenTree* node);
288     bool LowerUnsignedDivOrMod(GenTreeOp* divMod);
289     GenTree* LowerConstIntDivOrMod(GenTree* node);
290     GenTree* LowerSignedDivOrMod(GenTree* node);
291     void LowerBlockStore(GenTreeBlk* blkNode);
292     void LowerPutArgStk(GenTreePutArgStk* tree);
293
294     GenTree* TryCreateAddrMode(LIR::Use&& use, bool isIndir);
295     void AddrModeCleanupHelper(GenTreeAddrMode* addrMode, GenTree* node);
296
297     GenTree* LowerSwitch(GenTree* node);
298     bool TryLowerSwitchToBitTest(
299         BasicBlock* jumpTable[], unsigned jumpCount, unsigned targetCount, BasicBlock* bbSwitch, GenTree* switchValue);
300
301     void LowerCast(GenTree* node);
302
303 #if !CPU_LOAD_STORE_ARCH
304     bool IsRMWIndirCandidate(GenTree* operand, GenTree* storeInd);
305     bool IsBinOpInRMWStoreInd(GenTree* tree);
306     bool IsRMWMemOpRootedAtStoreInd(GenTree* storeIndTree, GenTree** indirCandidate, GenTree** indirOpSource);
307     bool LowerRMWMemOp(GenTreeIndir* storeInd);
308 #endif
309
310     void WidenSIMD12IfNecessary(GenTreeLclVarCommon* node);
311     void LowerStoreLoc(GenTreeLclVarCommon* tree);
312     GenTree* LowerArrElem(GenTree* node);
313     void LowerRotate(GenTree* tree);
314     void LowerShift(GenTreeOp* shift);
315 #ifdef FEATURE_SIMD
316     void LowerSIMD(GenTreeSIMD* simdNode);
317 #endif // FEATURE_SIMD
318 #ifdef FEATURE_HW_INTRINSICS
319     void LowerHWIntrinsic(GenTreeHWIntrinsic* node);
320 #endif // FEATURE_HW_INTRINSICS
321
322     // Utility functions
323     void MorphBlkIntoHelperCall(GenTree* pTree, GenTree* treeStmt);
324
325 public:
326     static bool IndirsAreEquivalent(GenTree* pTreeA, GenTree* pTreeB);
327
328     // return true if 'childNode' is an immediate that can be contained
329     //  by the 'parentNode' (i.e. folded into an instruction)
330     //  for example small enough and non-relocatable
331     bool IsContainableImmed(GenTree* parentNode, GenTree* childNode);
332
333     // Return true if 'node' is a containable memory op.
334     bool IsContainableMemoryOp(GenTree* node)
335     {
336         return m_lsra->isContainableMemoryOp(node);
337     }
338
339 #ifdef FEATURE_HW_INTRINSICS
340     // Return true if 'node' is a containable HWIntrinsic op.
341     bool IsContainableHWIntrinsicOp(GenTreeHWIntrinsic* containingNode, GenTree* node);
342 #endif // FEATURE_HW_INTRINSICS
343
344 private:
345     static bool NodesAreEquivalentLeaves(GenTree* candidate, GenTree* storeInd);
346
347     bool AreSourcesPossiblyModifiedLocals(GenTree* addr, GenTree* base, GenTree* index);
348
349     // Makes 'childNode' contained in the 'parentNode'
350     void MakeSrcContained(GenTree* parentNode, GenTree* childNode);
351
352     // Checks and makes 'childNode' contained in the 'parentNode'
353     bool CheckImmedAndMakeContained(GenTree* parentNode, GenTree* childNode);
354
355     // Checks for memory conflicts in the instructions between childNode and parentNode, and returns true if childNode
356     // can be contained.
357     bool IsSafeToContainMem(GenTree* parentNode, GenTree* childNode);
358
359     inline LIR::Range& BlockRange() const
360     {
361         return LIR::AsRange(m_block);
362     }
363
364     LinearScan*   m_lsra;
365     unsigned      vtableCallTemp;       // local variable we use as a temp for vtable calls
366     SideEffectSet m_scratchSideEffects; // SideEffectSet used for IsSafeToContainMem and isRMWIndirCandidate
367     BasicBlock*   m_block;
368 };
369
370 #endif // _LOWER_H_