Merge pull request #15948 from fiigii/fix-ifdef
[platform/upstream/coreclr.git] / src / jit / codegenlinear.h
1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
4
5 //
6 // This file contains the members of CodeGen that are defined and used
7 // only by the RyuJIT backend.  It is included by CodeGen.h in the
8 // definition of the CodeGen class.
9 //
10
11 #ifndef LEGACY_BACKEND // Not necessary (it's this way in the #include location), but helpful to IntelliSense
12
13 void genSetRegToConst(regNumber targetReg, var_types targetType, GenTreePtr tree);
14 void genCodeForTreeNode(GenTreePtr treeNode);
15 void genCodeForBinary(GenTreePtr treeNode);
16
17 #if defined(_TARGET_X86_)
18 void genCodeForLongUMod(GenTreeOp* node);
19 #endif // _TARGET_X86_
20
21 void genCodeForDivMod(GenTreeOp* treeNode);
22 void genCodeForMul(GenTreeOp* treeNode);
23 void genCodeForMulHi(GenTreeOp* treeNode);
24 void genLeaInstruction(GenTreeAddrMode* lea);
25 void genSetRegToCond(regNumber dstReg, GenTreePtr tree);
26
27 #if defined(_TARGET_ARMARCH_)
28 void genScaledAdd(emitAttr attr, regNumber targetReg, regNumber baseReg, regNumber indexReg, int scale);
29 #endif // _TARGET_ARMARCH_
30
31 #if defined(_TARGET_ARM_)
32 void genCodeForMulLong(GenTreeMultiRegOp* treeNode);
33 #endif // _TARGET_ARM_
34
35 #if !defined(_TARGET_64BIT_)
36 void genLongToIntCast(GenTreePtr treeNode);
37 #endif
38
39 void genIntToIntCast(GenTreePtr treeNode);
40 void genFloatToFloatCast(GenTreePtr treeNode);
41 void genFloatToIntCast(GenTreePtr treeNode);
42 void genIntToFloatCast(GenTreePtr treeNode);
43 void genCkfinite(GenTreePtr treeNode);
44 void genCodeForCompare(GenTreeOp* tree);
45 void genIntrinsic(GenTreePtr treeNode);
46 void genPutArgStk(GenTreePutArgStk* treeNode);
47 void genPutArgReg(GenTreeOp* tree);
48 #ifdef _TARGET_ARM_
49 void genPutArgSplit(GenTreePutArgSplit* treeNode);
50 #endif
51
52 #if defined(_TARGET_XARCH_)
53 unsigned getBaseVarForPutArgStk(GenTreePtr treeNode);
54 #endif // _TARGET_XARCH_
55
56 unsigned getFirstArgWithStackSlot();
57
58 void genCompareFloat(GenTreePtr treeNode);
59 void genCompareInt(GenTreePtr treeNode);
60
61 #ifdef FEATURE_SIMD
62 enum SIMDScalarMoveType
63 {
64     SMT_ZeroInitUpper,                  // zero initlaize target upper bits
65     SMT_ZeroInitUpper_SrcHasUpperZeros, // zero initialize target upper bits; source upper bits are known to be zero
66     SMT_PreserveUpper                   // preserve target upper bits
67 };
68
69 #ifdef _TARGET_ARM64_
70 insOpts genGetSimdInsOpt(bool is16B, var_types elementType);
71 #endif
72 instruction getOpForSIMDIntrinsic(SIMDIntrinsicID intrinsicId, var_types baseType, unsigned* ival = nullptr);
73 void genSIMDScalarMove(
74     var_types targetType, var_types type, regNumber target, regNumber src, SIMDScalarMoveType moveType);
75 void genSIMDZero(var_types targetType, var_types baseType, regNumber targetReg);
76 void genSIMDIntrinsicInit(GenTreeSIMD* simdNode);
77 void genSIMDIntrinsicInitN(GenTreeSIMD* simdNode);
78 void genSIMDIntrinsicInitArray(GenTreeSIMD* simdNode);
79 void genSIMDIntrinsicUnOp(GenTreeSIMD* simdNode);
80 void genSIMDIntrinsicBinOp(GenTreeSIMD* simdNode);
81 void genSIMDIntrinsicRelOp(GenTreeSIMD* simdNode);
82 void genSIMDIntrinsicDotProduct(GenTreeSIMD* simdNode);
83 void genSIMDIntrinsicSetItem(GenTreeSIMD* simdNode);
84 void genSIMDIntrinsicGetItem(GenTreeSIMD* simdNode);
85 void genSIMDIntrinsicShuffleSSE2(GenTreeSIMD* simdNode);
86 void genSIMDIntrinsicUpperSave(GenTreeSIMD* simdNode);
87 void genSIMDIntrinsicUpperRestore(GenTreeSIMD* simdNode);
88 void genSIMDLo64BitConvert(SIMDIntrinsicID intrinsicID,
89                            var_types       simdType,
90                            var_types       baseType,
91                            regNumber       tmpReg,
92                            regNumber       tmpIntReg,
93                            regNumber       targetReg);
94 void genSIMDIntrinsic32BitConvert(GenTreeSIMD* simdNode);
95 void genSIMDIntrinsic64BitConvert(GenTreeSIMD* simdNode);
96 void genSIMDIntrinsicNarrow(GenTreeSIMD* simdNode);
97 void genSIMDExtractUpperHalf(GenTreeSIMD* simdNode, regNumber srcReg, regNumber tgtReg);
98 void genSIMDIntrinsicWiden(GenTreeSIMD* simdNode);
99 void genSIMDIntrinsic(GenTreeSIMD* simdNode);
100 void genSIMDCheck(GenTree* treeNode);
101
102 // TYP_SIMD12 (i.e Vector3 of size 12 bytes) is not a hardware supported size and requires
103 // two reads/writes on 64-bit targets. These routines abstract reading/writing of Vector3
104 // values through an indirection. Note that Vector3 locals allocated on stack would have
105 // their size rounded to TARGET_POINTER_SIZE (which is 8 bytes on 64-bit targets) and hence
106 // Vector3 locals could be treated as TYP_SIMD16 while reading/writing.
107 void genStoreIndTypeSIMD12(GenTree* treeNode);
108 void genLoadIndTypeSIMD12(GenTree* treeNode);
109 void genStoreLclTypeSIMD12(GenTree* treeNode);
110 void genLoadLclTypeSIMD12(GenTree* treeNode);
111 #ifdef _TARGET_X86_
112 void genStoreSIMD12ToStack(regNumber operandReg, regNumber tmpReg);
113 void genPutArgStkSIMD12(GenTree* treeNode);
114 #endif // _TARGET_X86_
115 #endif // FEATURE_SIMD
116
117 #if defined(FEATURE_HW_INTRINSICS) && defined(_TARGET_XARCH_)
118 void genHWIntrinsic(GenTreeHWIntrinsic* node);
119 void genHWIntrinsic_R_R_RM(GenTreeHWIntrinsic* node, instruction ins);
120 void genHWIntrinsic_R_R_RM_I(GenTreeHWIntrinsic* node, instruction ins);
121 void genSSEIntrinsic(GenTreeHWIntrinsic* node);
122 void genSSE2Intrinsic(GenTreeHWIntrinsic* node);
123 void genSSE3Intrinsic(GenTreeHWIntrinsic* node);
124 void genSSSE3Intrinsic(GenTreeHWIntrinsic* node);
125 void genSSE41Intrinsic(GenTreeHWIntrinsic* node);
126 void genSSE42Intrinsic(GenTreeHWIntrinsic* node);
127 void genAVXIntrinsic(GenTreeHWIntrinsic* node);
128 void genAVX2Intrinsic(GenTreeHWIntrinsic* node);
129 void genAESIntrinsic(GenTreeHWIntrinsic* node);
130 void genBMI1Intrinsic(GenTreeHWIntrinsic* node);
131 void genBMI2Intrinsic(GenTreeHWIntrinsic* node);
132 void genFMAIntrinsic(GenTreeHWIntrinsic* node);
133 void genLZCNTIntrinsic(GenTreeHWIntrinsic* node);
134 void genPCLMULQDQIntrinsic(GenTreeHWIntrinsic* node);
135 void genPOPCNTIntrinsic(GenTreeHWIntrinsic* node);
136 #endif // defined(FEATURE_HW_INTRINSICS) && defined(_TARGET_XARCH_)
137
138 #if !defined(_TARGET_64BIT_)
139
140 // CodeGen for Long Ints
141
142 void genStoreLongLclVar(GenTree* treeNode);
143
144 #endif // !defined(_TARGET_64BIT_)
145
146 void genProduceReg(GenTree* tree);
147 void genUnspillRegIfNeeded(GenTree* tree);
148 regNumber genConsumeReg(GenTree* tree);
149 void genCopyRegIfNeeded(GenTree* tree, regNumber needReg);
150 void genConsumeRegAndCopy(GenTree* tree, regNumber needReg);
151
152 void genConsumeIfReg(GenTreePtr tree)
153 {
154     if (!tree->isContained())
155     {
156         (void)genConsumeReg(tree);
157     }
158 }
159
160 void genRegCopy(GenTreePtr tree);
161 void genTransferRegGCState(regNumber dst, regNumber src);
162 void genConsumeAddress(GenTree* addr);
163 void genConsumeAddrMode(GenTreeAddrMode* mode);
164 void genSetBlockSize(GenTreeBlk* blkNode, regNumber sizeReg);
165 void genConsumeBlockSrc(GenTreeBlk* blkNode);
166 void genSetBlockSrc(GenTreeBlk* blkNode, regNumber srcReg);
167 void genConsumeBlockOp(GenTreeBlk* blkNode, regNumber dstReg, regNumber srcReg, regNumber sizeReg);
168
169 #ifdef FEATURE_PUT_STRUCT_ARG_STK
170 void genConsumePutStructArgStk(GenTreePutArgStk* putArgStkNode, regNumber dstReg, regNumber srcReg, regNumber sizeReg);
171 #endif // FEATURE_PUT_STRUCT_ARG_STK
172 #ifdef _TARGET_ARM_
173 void genConsumeArgSplitStruct(GenTreePutArgSplit* putArgNode);
174 #endif
175
176 void genConsumeRegs(GenTree* tree);
177 void genConsumeOperands(GenTreeOp* tree);
178 void genEmitGSCookieCheck(bool pushReg);
179 void genSetRegToIcon(regNumber reg, ssize_t val, var_types type = TYP_INT, insFlags flags = INS_FLAGS_DONT_CARE);
180 void genCodeForShift(GenTreePtr tree);
181
182 #if defined(_TARGET_X86_) || defined(_TARGET_ARM_)
183 void genCodeForShiftLong(GenTreePtr tree);
184 #endif
185
186 #ifdef _TARGET_XARCH_
187 void genCodeForShiftRMW(GenTreeStoreInd* storeInd);
188 void genCodeForBT(GenTreeOp* bt);
189 #endif // _TARGET_XARCH_
190
191 void genCodeForCast(GenTreeOp* tree);
192 void genCodeForLclAddr(GenTree* tree);
193 void genCodeForIndexAddr(GenTreeIndexAddr* tree);
194 void genCodeForIndir(GenTreeIndir* tree);
195 void genCodeForNegNot(GenTree* tree);
196 void genCodeForLclVar(GenTreeLclVar* tree);
197 void genCodeForLclFld(GenTreeLclFld* tree);
198 void genCodeForStoreLclFld(GenTreeLclFld* tree);
199 void genCodeForStoreLclVar(GenTreeLclVar* tree);
200 void genCodeForReturnTrap(GenTreeOp* tree);
201 void genCodeForJcc(GenTreeCC* tree);
202 void genCodeForSetcc(GenTreeCC* setcc);
203 void genCodeForStoreInd(GenTreeStoreInd* tree);
204 void genCodeForSwap(GenTreeOp* tree);
205 void genCodeForCpObj(GenTreeObj* cpObjNode);
206 void genCodeForCpBlk(GenTreeBlk* cpBlkNode);
207 void genCodeForCpBlkRepMovs(GenTreeBlk* cpBlkNode);
208 void genCodeForCpBlkUnroll(GenTreeBlk* cpBlkNode);
209 void genCodeForPhysReg(GenTreePhysReg* tree);
210 void genCodeForNullCheck(GenTreeOp* tree);
211 void genCodeForCmpXchg(GenTreeCmpXchg* tree);
212
213 void genAlignStackBeforeCall(GenTreePutArgStk* putArgStk);
214 void genAlignStackBeforeCall(GenTreeCall* call);
215 void genRemoveAlignmentAfterCall(GenTreeCall* call, unsigned bias = 0);
216
217 #if defined(UNIX_X86_ABI)
218
219 unsigned curNestedAlignment; // Keep track of alignment adjustment required during codegen.
220 unsigned maxNestedAlignment; // The maximum amount of alignment adjustment required.
221
222 void SubtractNestedAlignment(unsigned adjustment)
223 {
224     assert(curNestedAlignment >= adjustment);
225     unsigned newNestedAlignment = curNestedAlignment - adjustment;
226     if (curNestedAlignment != newNestedAlignment)
227     {
228         JITDUMP("Adjusting stack nested alignment from %d to %d\n", curNestedAlignment, newNestedAlignment);
229     }
230     curNestedAlignment = newNestedAlignment;
231 }
232
233 void AddNestedAlignment(unsigned adjustment)
234 {
235     unsigned newNestedAlignment = curNestedAlignment + adjustment;
236     if (curNestedAlignment != newNestedAlignment)
237     {
238         JITDUMP("Adjusting stack nested alignment from %d to %d\n", curNestedAlignment, newNestedAlignment);
239     }
240     curNestedAlignment = newNestedAlignment;
241
242     if (curNestedAlignment > maxNestedAlignment)
243     {
244         JITDUMP("Max stack nested alignment changed from %d to %d\n", maxNestedAlignment, curNestedAlignment);
245         maxNestedAlignment = curNestedAlignment;
246     }
247 }
248
249 #endif
250
251 #ifdef FEATURE_PUT_STRUCT_ARG_STK
252 #ifdef _TARGET_X86_
253 bool genAdjustStackForPutArgStk(GenTreePutArgStk* putArgStk);
254 void genPushReg(var_types type, regNumber srcReg);
255 void genPutArgStkFieldList(GenTreePutArgStk* putArgStk);
256 #endif // _TARGET_X86_
257
258 void genPutStructArgStk(GenTreePutArgStk* treeNode);
259
260 unsigned genMove8IfNeeded(unsigned size, regNumber tmpReg, GenTree* srcAddr, unsigned offset);
261 unsigned genMove4IfNeeded(unsigned size, regNumber tmpReg, GenTree* srcAddr, unsigned offset);
262 unsigned genMove2IfNeeded(unsigned size, regNumber tmpReg, GenTree* srcAddr, unsigned offset);
263 unsigned genMove1IfNeeded(unsigned size, regNumber tmpReg, GenTree* srcAddr, unsigned offset);
264 void genStructPutArgRepMovs(GenTreePutArgStk* putArgStkNode);
265 void genStructPutArgUnroll(GenTreePutArgStk* putArgStkNode);
266 void genStoreRegToStackArg(var_types type, regNumber reg, int offset);
267 #endif // FEATURE_PUT_STRUCT_ARG_STK
268
269 void genCodeForLoadOffset(instruction ins, emitAttr size, regNumber dst, GenTree* base, unsigned offset);
270 void genCodeForStoreOffset(instruction ins, emitAttr size, regNumber src, GenTree* base, unsigned offset);
271
272 #ifdef _TARGET_ARM64_
273 void genCodeForLoadPairOffset(regNumber dst, regNumber dst2, GenTree* base, unsigned offset);
274 void genCodeForStorePairOffset(regNumber src, regNumber src2, GenTree* base, unsigned offset);
275 #endif // _TARGET_ARM64_
276
277 void genCodeForStoreBlk(GenTreeBlk* storeBlkNode);
278 void genCodeForInitBlk(GenTreeBlk* initBlkNode);
279 void genCodeForInitBlkRepStos(GenTreeBlk* initBlkNode);
280 void genCodeForInitBlkUnroll(GenTreeBlk* initBlkNode);
281 void genJumpTable(GenTree* tree);
282 void genTableBasedSwitch(GenTree* tree);
283 void genCodeForArrIndex(GenTreeArrIndex* treeNode);
284 void genCodeForArrOffset(GenTreeArrOffs* treeNode);
285 instruction genGetInsForOper(genTreeOps oper, var_types type);
286 bool genEmitOptimizedGCWriteBarrier(GCInfo::WriteBarrierForm writeBarrierForm, GenTree* addr, GenTree* data);
287 void genCallInstruction(GenTreeCall* call);
288 void genJmpMethod(GenTreePtr jmp);
289 BasicBlock* genCallFinally(BasicBlock* block);
290 void genCodeForJumpTrue(GenTreePtr tree);
291 #ifdef _TARGET_ARM64_
292 void genCodeForJumpCompare(GenTreeOp* tree);
293 #endif // _TARGET_ARM64_
294
295 #if FEATURE_EH_FUNCLETS
296 void genEHCatchRet(BasicBlock* block);
297 #else  // !FEATURE_EH_FUNCLETS
298 void genEHFinallyOrFilterRet(BasicBlock* block);
299 #endif // !FEATURE_EH_FUNCLETS
300
301 void genMultiRegCallStoreToLocal(GenTreePtr treeNode);
302
303 // Deals with codegen for muti-register struct returns.
304 bool isStructReturn(GenTreePtr treeNode);
305 void genStructReturn(GenTreePtr treeNode);
306
307 void genReturn(GenTreePtr treeNode);
308
309 void genLclHeap(GenTreePtr tree);
310
311 bool genIsRegCandidateLocal(GenTreePtr tree)
312 {
313     if (!tree->IsLocal())
314     {
315         return false;
316     }
317     const LclVarDsc* varDsc = &compiler->lvaTable[tree->gtLclVarCommon.gtLclNum];
318     return (varDsc->lvIsRegCandidate());
319 }
320
321 #ifdef FEATURE_PUT_STRUCT_ARG_STK
322 #ifdef _TARGET_X86_
323 bool m_pushStkArg;
324 #else  // !_TARGET_X86_
325 unsigned m_stkArgVarNum;
326 unsigned m_stkArgOffset;
327 #endif // !_TARGET_X86_
328 #endif // !FEATURE_PUT_STRUCT_ARG_STK
329
330 #ifdef DEBUG
331 GenTree* lastConsumedNode;
332 void genNumberOperandUse(GenTree* const operand, int& useNum) const;
333 void genCheckConsumeNode(GenTree* const node);
334 #else  // !DEBUG
335 inline void genCheckConsumeNode(GenTree* treeNode)
336 {
337 }
338 #endif // DEBUG
339
340 #endif // !LEGACY_BACKEND