Fix reading Time zone rules using Julian days (#17672)
[platform/upstream/coreclr.git] / src / jit / codegenlinear.h
1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
4
5 //
6 // This file contains the members of CodeGen that are defined and used
7 // only by the RyuJIT backend.  It is included by CodeGen.h in the
8 // definition of the CodeGen class.
9 //
10
11 #ifndef LEGACY_BACKEND // Not necessary (it's this way in the #include location), but helpful to IntelliSense
12
13 void genSetRegToConst(regNumber targetReg, var_types targetType, GenTree* tree);
14 void genCodeForTreeNode(GenTree* treeNode);
15 void genCodeForBinary(GenTree* treeNode);
16
17 #if defined(_TARGET_X86_)
18 void genCodeForLongUMod(GenTreeOp* node);
19 #endif // _TARGET_X86_
20
21 void genCodeForDivMod(GenTreeOp* treeNode);
22 void genCodeForMul(GenTreeOp* treeNode);
23 void genCodeForMulHi(GenTreeOp* treeNode);
24 void genLeaInstruction(GenTreeAddrMode* lea);
25 void genSetRegToCond(regNumber dstReg, GenTree* tree);
26
27 #if defined(_TARGET_ARMARCH_)
28 void genScaledAdd(emitAttr attr, regNumber targetReg, regNumber baseReg, regNumber indexReg, int scale);
29 #endif // _TARGET_ARMARCH_
30
31 #if defined(_TARGET_ARM_)
32 void genCodeForMulLong(GenTreeMultiRegOp* treeNode);
33 #endif // _TARGET_ARM_
34
35 #if !defined(_TARGET_64BIT_)
36 void genLongToIntCast(GenTree* treeNode);
37 #endif
38
39 void genIntToIntCast(GenTree* treeNode);
40 void genFloatToFloatCast(GenTree* treeNode);
41 void genFloatToIntCast(GenTree* treeNode);
42 void genIntToFloatCast(GenTree* treeNode);
43 void genCkfinite(GenTree* treeNode);
44 void genCodeForCompare(GenTreeOp* tree);
45 void genIntrinsic(GenTree* treeNode);
46 void genPutArgStk(GenTreePutArgStk* treeNode);
47 void genPutArgReg(GenTreeOp* tree);
48 #ifdef _TARGET_ARM_
49 void genPutArgSplit(GenTreePutArgSplit* treeNode);
50 #endif
51
52 #if defined(_TARGET_XARCH_)
53 unsigned getBaseVarForPutArgStk(GenTree* treeNode);
54 #endif // _TARGET_XARCH_
55
56 unsigned getFirstArgWithStackSlot();
57
58 void genCompareFloat(GenTree* treeNode);
59 void genCompareInt(GenTree* treeNode);
60
61 #ifdef FEATURE_SIMD
62 enum SIMDScalarMoveType
63 {
64     SMT_ZeroInitUpper,                  // zero initlaize target upper bits
65     SMT_ZeroInitUpper_SrcHasUpperZeros, // zero initialize target upper bits; source upper bits are known to be zero
66     SMT_PreserveUpper                   // preserve target upper bits
67 };
68
69 #ifdef _TARGET_ARM64_
70 insOpts genGetSimdInsOpt(emitAttr size, var_types elementType);
71 #endif
72 instruction getOpForSIMDIntrinsic(SIMDIntrinsicID intrinsicId, var_types baseType, unsigned* ival = nullptr);
73 void genSIMDScalarMove(
74     var_types targetType, var_types type, regNumber target, regNumber src, SIMDScalarMoveType moveType);
75 void genSIMDZero(var_types targetType, var_types baseType, regNumber targetReg);
76 void genSIMDIntrinsicInit(GenTreeSIMD* simdNode);
77 void genSIMDIntrinsicInitN(GenTreeSIMD* simdNode);
78 void genSIMDIntrinsicInitArray(GenTreeSIMD* simdNode);
79 void genSIMDIntrinsicUnOp(GenTreeSIMD* simdNode);
80 void genSIMDIntrinsicBinOp(GenTreeSIMD* simdNode);
81 void genSIMDIntrinsicRelOp(GenTreeSIMD* simdNode);
82 void genSIMDIntrinsicDotProduct(GenTreeSIMD* simdNode);
83 void genSIMDIntrinsicSetItem(GenTreeSIMD* simdNode);
84 void genSIMDIntrinsicGetItem(GenTreeSIMD* simdNode);
85 void genSIMDIntrinsicShuffleSSE2(GenTreeSIMD* simdNode);
86 void genSIMDIntrinsicUpperSave(GenTreeSIMD* simdNode);
87 void genSIMDIntrinsicUpperRestore(GenTreeSIMD* simdNode);
88 void genSIMDLo64BitConvert(SIMDIntrinsicID intrinsicID,
89                            var_types       simdType,
90                            var_types       baseType,
91                            regNumber       tmpReg,
92                            regNumber       tmpIntReg,
93                            regNumber       targetReg);
94 void genSIMDIntrinsic32BitConvert(GenTreeSIMD* simdNode);
95 void genSIMDIntrinsic64BitConvert(GenTreeSIMD* simdNode);
96 void genSIMDIntrinsicNarrow(GenTreeSIMD* simdNode);
97 void genSIMDExtractUpperHalf(GenTreeSIMD* simdNode, regNumber srcReg, regNumber tgtReg);
98 void genSIMDIntrinsicWiden(GenTreeSIMD* simdNode);
99 void genSIMDIntrinsic(GenTreeSIMD* simdNode);
100 void genSIMDCheck(GenTree* treeNode);
101
102 // TYP_SIMD12 (i.e Vector3 of size 12 bytes) is not a hardware supported size and requires
103 // two reads/writes on 64-bit targets. These routines abstract reading/writing of Vector3
104 // values through an indirection. Note that Vector3 locals allocated on stack would have
105 // their size rounded to TARGET_POINTER_SIZE (which is 8 bytes on 64-bit targets) and hence
106 // Vector3 locals could be treated as TYP_SIMD16 while reading/writing.
107 void genStoreIndTypeSIMD12(GenTree* treeNode);
108 void genLoadIndTypeSIMD12(GenTree* treeNode);
109 void genStoreLclTypeSIMD12(GenTree* treeNode);
110 void genLoadLclTypeSIMD12(GenTree* treeNode);
111 #ifdef _TARGET_X86_
112 void genStoreSIMD12ToStack(regNumber operandReg, regNumber tmpReg);
113 void genPutArgStkSIMD12(GenTree* treeNode);
114 #endif // _TARGET_X86_
115 #endif // FEATURE_SIMD
116
117 #ifdef FEATURE_HW_INTRINSICS
118 void genHWIntrinsic(GenTreeHWIntrinsic* node);
119 #if defined(_TARGET_XARCH_)
120 void genHWIntrinsic_R_R_RM(GenTreeHWIntrinsic* node, instruction ins);
121 void genHWIntrinsic_R_R_RM_I(GenTreeHWIntrinsic* node, instruction ins);
122 void genSSEIntrinsic(GenTreeHWIntrinsic* node);
123 void genSSE2Intrinsic(GenTreeHWIntrinsic* node);
124 void genSSE41Intrinsic(GenTreeHWIntrinsic* node);
125 void genSSE42Intrinsic(GenTreeHWIntrinsic* node);
126 void genAvxOrAvx2Intrinsic(GenTreeHWIntrinsic* node);
127 void genAESIntrinsic(GenTreeHWIntrinsic* node);
128 void genBMI1Intrinsic(GenTreeHWIntrinsic* node);
129 void genBMI2Intrinsic(GenTreeHWIntrinsic* node);
130 void genFMAIntrinsic(GenTreeHWIntrinsic* node);
131 void genLZCNTIntrinsic(GenTreeHWIntrinsic* node);
132 void genPCLMULQDQIntrinsic(GenTreeHWIntrinsic* node);
133 void genPOPCNTIntrinsic(GenTreeHWIntrinsic* node);
134 template <typename HWIntrinsicSwitchCaseBody>
135 void genHWIntrinsicJumpTableFallback(NamedIntrinsic            intrinsic,
136                                      regNumber                 nonConstImmReg,
137                                      regNumber                 baseReg,
138                                      regNumber                 offsReg,
139                                      HWIntrinsicSwitchCaseBody emitSwCase);
140 #endif // defined(_TARGET_XARCH_)
141 #if defined(_TARGET_ARM64_)
142 instruction getOpForHWIntrinsic(GenTreeHWIntrinsic* node, var_types instrType);
143 void genHWIntrinsicUnaryOp(GenTreeHWIntrinsic* node);
144 void genHWIntrinsicCrcOp(GenTreeHWIntrinsic* node);
145 void genHWIntrinsicSimdBinaryOp(GenTreeHWIntrinsic* node);
146 void genHWIntrinsicSimdExtractOp(GenTreeHWIntrinsic* node);
147 void genHWIntrinsicSimdInsertOp(GenTreeHWIntrinsic* node);
148 void genHWIntrinsicSimdSelectOp(GenTreeHWIntrinsic* node);
149 void genHWIntrinsicSimdSetAllOp(GenTreeHWIntrinsic* node);
150 void genHWIntrinsicSimdUnaryOp(GenTreeHWIntrinsic* node);
151 void genHWIntrinsicSimdBinaryRMWOp(GenTreeHWIntrinsic* node);
152 void genHWIntrinsicSimdTernaryRMWOp(GenTreeHWIntrinsic* node);
153 void genHWIntrinsicShaHashOp(GenTreeHWIntrinsic* node);
154 void genHWIntrinsicShaRotateOp(GenTreeHWIntrinsic* node);
155 template <typename HWIntrinsicSwitchCaseBody>
156 void genHWIntrinsicSwitchTable(regNumber swReg, regNumber tmpReg, int swMax, HWIntrinsicSwitchCaseBody emitSwCase);
157 #endif // defined(_TARGET_XARCH_)
158 #endif // FEATURE_HW_INTRINSICS
159
160 #if !defined(_TARGET_64BIT_)
161
162 // CodeGen for Long Ints
163
164 void genStoreLongLclVar(GenTree* treeNode);
165
166 #endif // !defined(_TARGET_64BIT_)
167
168 void genProduceReg(GenTree* tree);
169 void genUnspillRegIfNeeded(GenTree* tree);
170 regNumber genConsumeReg(GenTree* tree);
171 void genCopyRegIfNeeded(GenTree* tree, regNumber needReg);
172 void genConsumeRegAndCopy(GenTree* tree, regNumber needReg);
173
174 void genConsumeIfReg(GenTree* tree)
175 {
176     if (!tree->isContained())
177     {
178         (void)genConsumeReg(tree);
179     }
180 }
181
182 void genRegCopy(GenTree* tree);
183 void genTransferRegGCState(regNumber dst, regNumber src);
184 void genConsumeAddress(GenTree* addr);
185 void genConsumeAddrMode(GenTreeAddrMode* mode);
186 void genSetBlockSize(GenTreeBlk* blkNode, regNumber sizeReg);
187 void genConsumeBlockSrc(GenTreeBlk* blkNode);
188 void genSetBlockSrc(GenTreeBlk* blkNode, regNumber srcReg);
189 void genConsumeBlockOp(GenTreeBlk* blkNode, regNumber dstReg, regNumber srcReg, regNumber sizeReg);
190
191 #ifdef FEATURE_PUT_STRUCT_ARG_STK
192 void genConsumePutStructArgStk(GenTreePutArgStk* putArgStkNode, regNumber dstReg, regNumber srcReg, regNumber sizeReg);
193 #endif // FEATURE_PUT_STRUCT_ARG_STK
194 #ifdef _TARGET_ARM_
195 void genConsumeArgSplitStruct(GenTreePutArgSplit* putArgNode);
196 #endif
197
198 void genConsumeRegs(GenTree* tree);
199 void genConsumeOperands(GenTreeOp* tree);
200 void genEmitGSCookieCheck(bool pushReg);
201 void genSetRegToIcon(regNumber reg, ssize_t val, var_types type = TYP_INT, insFlags flags = INS_FLAGS_DONT_CARE);
202 void genCodeForShift(GenTree* tree);
203
204 #if defined(_TARGET_X86_) || defined(_TARGET_ARM_)
205 void genCodeForShiftLong(GenTree* tree);
206 #endif
207
208 #ifdef _TARGET_XARCH_
209 void genCodeForShiftRMW(GenTreeStoreInd* storeInd);
210 void genCodeForBT(GenTreeOp* bt);
211 #endif // _TARGET_XARCH_
212
213 void genCodeForCast(GenTreeOp* tree);
214 void genCodeForLclAddr(GenTree* tree);
215 void genCodeForIndexAddr(GenTreeIndexAddr* tree);
216 void genCodeForIndir(GenTreeIndir* tree);
217 void genCodeForNegNot(GenTree* tree);
218 void genCodeForLclVar(GenTreeLclVar* tree);
219 void genCodeForLclFld(GenTreeLclFld* tree);
220 void genCodeForStoreLclFld(GenTreeLclFld* tree);
221 void genCodeForStoreLclVar(GenTreeLclVar* tree);
222 void genCodeForReturnTrap(GenTreeOp* tree);
223 void genCodeForJcc(GenTreeCC* tree);
224 void genCodeForSetcc(GenTreeCC* setcc);
225 void genCodeForStoreInd(GenTreeStoreInd* tree);
226 void genCodeForSwap(GenTreeOp* tree);
227 void genCodeForCpObj(GenTreeObj* cpObjNode);
228 void genCodeForCpBlk(GenTreeBlk* cpBlkNode);
229 void genCodeForCpBlkRepMovs(GenTreeBlk* cpBlkNode);
230 void genCodeForCpBlkUnroll(GenTreeBlk* cpBlkNode);
231 void genCodeForPhysReg(GenTreePhysReg* tree);
232 void genCodeForNullCheck(GenTreeOp* tree);
233 void genCodeForCmpXchg(GenTreeCmpXchg* tree);
234
235 void genAlignStackBeforeCall(GenTreePutArgStk* putArgStk);
236 void genAlignStackBeforeCall(GenTreeCall* call);
237 void genRemoveAlignmentAfterCall(GenTreeCall* call, unsigned bias = 0);
238
239 #if defined(UNIX_X86_ABI)
240
241 unsigned curNestedAlignment; // Keep track of alignment adjustment required during codegen.
242 unsigned maxNestedAlignment; // The maximum amount of alignment adjustment required.
243
244 void SubtractNestedAlignment(unsigned adjustment)
245 {
246     assert(curNestedAlignment >= adjustment);
247     unsigned newNestedAlignment = curNestedAlignment - adjustment;
248     if (curNestedAlignment != newNestedAlignment)
249     {
250         JITDUMP("Adjusting stack nested alignment from %d to %d\n", curNestedAlignment, newNestedAlignment);
251     }
252     curNestedAlignment = newNestedAlignment;
253 }
254
255 void AddNestedAlignment(unsigned adjustment)
256 {
257     unsigned newNestedAlignment = curNestedAlignment + adjustment;
258     if (curNestedAlignment != newNestedAlignment)
259     {
260         JITDUMP("Adjusting stack nested alignment from %d to %d\n", curNestedAlignment, newNestedAlignment);
261     }
262     curNestedAlignment = newNestedAlignment;
263
264     if (curNestedAlignment > maxNestedAlignment)
265     {
266         JITDUMP("Max stack nested alignment changed from %d to %d\n", maxNestedAlignment, curNestedAlignment);
267         maxNestedAlignment = curNestedAlignment;
268     }
269 }
270
271 #endif
272
273 #ifdef FEATURE_PUT_STRUCT_ARG_STK
274 #ifdef _TARGET_X86_
275 bool genAdjustStackForPutArgStk(GenTreePutArgStk* putArgStk);
276 void genPushReg(var_types type, regNumber srcReg);
277 void genPutArgStkFieldList(GenTreePutArgStk* putArgStk);
278 #endif // _TARGET_X86_
279
280 void genPutStructArgStk(GenTreePutArgStk* treeNode);
281
282 unsigned genMove8IfNeeded(unsigned size, regNumber tmpReg, GenTree* srcAddr, unsigned offset);
283 unsigned genMove4IfNeeded(unsigned size, regNumber tmpReg, GenTree* srcAddr, unsigned offset);
284 unsigned genMove2IfNeeded(unsigned size, regNumber tmpReg, GenTree* srcAddr, unsigned offset);
285 unsigned genMove1IfNeeded(unsigned size, regNumber tmpReg, GenTree* srcAddr, unsigned offset);
286 void genStructPutArgRepMovs(GenTreePutArgStk* putArgStkNode);
287 void genStructPutArgUnroll(GenTreePutArgStk* putArgStkNode);
288 void genStoreRegToStackArg(var_types type, regNumber reg, int offset);
289 #endif // FEATURE_PUT_STRUCT_ARG_STK
290
291 void genCodeForLoadOffset(instruction ins, emitAttr size, regNumber dst, GenTree* base, unsigned offset);
292 void genCodeForStoreOffset(instruction ins, emitAttr size, regNumber src, GenTree* base, unsigned offset);
293
294 #ifdef _TARGET_ARM64_
295 void genCodeForLoadPairOffset(regNumber dst, regNumber dst2, GenTree* base, unsigned offset);
296 void genCodeForStorePairOffset(regNumber src, regNumber src2, GenTree* base, unsigned offset);
297 #endif // _TARGET_ARM64_
298
299 void genCodeForStoreBlk(GenTreeBlk* storeBlkNode);
300 void genCodeForInitBlk(GenTreeBlk* initBlkNode);
301 void genCodeForInitBlkRepStos(GenTreeBlk* initBlkNode);
302 void genCodeForInitBlkUnroll(GenTreeBlk* initBlkNode);
303 void genJumpTable(GenTree* tree);
304 void genTableBasedSwitch(GenTree* tree);
305 void genCodeForArrIndex(GenTreeArrIndex* treeNode);
306 void genCodeForArrOffset(GenTreeArrOffs* treeNode);
307 instruction genGetInsForOper(genTreeOps oper, var_types type);
308 bool genEmitOptimizedGCWriteBarrier(GCInfo::WriteBarrierForm writeBarrierForm, GenTree* addr, GenTree* data);
309 void genCallInstruction(GenTreeCall* call);
310 void genJmpMethod(GenTree* jmp);
311 BasicBlock* genCallFinally(BasicBlock* block);
312 void genCodeForJumpTrue(GenTree* tree);
313 #ifdef _TARGET_ARM64_
314 void genCodeForJumpCompare(GenTreeOp* tree);
315 #endif // _TARGET_ARM64_
316
317 #if FEATURE_EH_FUNCLETS
318 void genEHCatchRet(BasicBlock* block);
319 #else  // !FEATURE_EH_FUNCLETS
320 void genEHFinallyOrFilterRet(BasicBlock* block);
321 #endif // !FEATURE_EH_FUNCLETS
322
323 void genMultiRegCallStoreToLocal(GenTree* treeNode);
324
325 // Deals with codegen for muti-register struct returns.
326 bool isStructReturn(GenTree* treeNode);
327 void genStructReturn(GenTree* treeNode);
328
329 #if defined(_TARGET_X86_) || defined(_TARGET_ARM_)
330 void genLongReturn(GenTree* treeNode);
331 #endif // _TARGET_X86_ ||  _TARGET_ARM_
332
333 #if defined(_TARGET_X86_)
334 void genFloatReturn(GenTree* treeNode);
335 #endif // _TARGET_X86_
336
337 #if defined(_TARGET_ARM64_)
338 void genSimpleReturn(GenTree* treeNode);
339 #endif // _TARGET_ARM64_
340
341 void genReturn(GenTree* treeNode);
342
343 void genLclHeap(GenTree* tree);
344
345 bool genIsRegCandidateLocal(GenTree* tree)
346 {
347     if (!tree->IsLocal())
348     {
349         return false;
350     }
351     const LclVarDsc* varDsc = &compiler->lvaTable[tree->gtLclVarCommon.gtLclNum];
352     return (varDsc->lvIsRegCandidate());
353 }
354
355 #ifdef FEATURE_PUT_STRUCT_ARG_STK
356 #ifdef _TARGET_X86_
357 bool m_pushStkArg;
358 #else  // !_TARGET_X86_
359 unsigned m_stkArgVarNum;
360 unsigned m_stkArgOffset;
361 #endif // !_TARGET_X86_
362 #endif // !FEATURE_PUT_STRUCT_ARG_STK
363
364 #ifdef DEBUG
365 GenTree* lastConsumedNode;
366 void genNumberOperandUse(GenTree* const operand, int& useNum) const;
367 void genCheckConsumeNode(GenTree* const node);
368 #else  // !DEBUG
369 inline void genCheckConsumeNode(GenTree* treeNode)
370 {
371 }
372 #endif // DEBUG
373
374 #endif // !LEGACY_BACKEND