Fix reading Time zone rules using Julian days (#17672)
[platform/upstream/coreclr.git] / src / jit / rationalize.cpp
1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
4
5 #include "jitpch.h"
6 #ifdef _MSC_VER
7 #pragma hdrstop
8 #endif
9
10 #ifndef LEGACY_BACKEND
11 // return op that is the store equivalent of the given load opcode
12 genTreeOps storeForm(genTreeOps loadForm)
13 {
14     switch (loadForm)
15     {
16         case GT_LCL_VAR:
17             return GT_STORE_LCL_VAR;
18         case GT_LCL_FLD:
19             return GT_STORE_LCL_FLD;
20         case GT_REG_VAR:
21             noway_assert(!"reg vars only supported in classic backend\n");
22             unreached();
23         default:
24             noway_assert(!"not a data load opcode\n");
25             unreached();
26     }
27 }
28
29 // return op that is the addr equivalent of the given load opcode
30 genTreeOps addrForm(genTreeOps loadForm)
31 {
32     switch (loadForm)
33     {
34         case GT_LCL_VAR:
35             return GT_LCL_VAR_ADDR;
36         case GT_LCL_FLD:
37             return GT_LCL_FLD_ADDR;
38         default:
39             noway_assert(!"not a data load opcode\n");
40             unreached();
41     }
42 }
43
44 // return op that is the load equivalent of the given addr opcode
45 genTreeOps loadForm(genTreeOps addrForm)
46 {
47     switch (addrForm)
48     {
49         case GT_LCL_VAR_ADDR:
50             return GT_LCL_VAR;
51         case GT_LCL_FLD_ADDR:
52             return GT_LCL_FLD;
53         default:
54             noway_assert(!"not a local address opcode\n");
55             unreached();
56     }
57 }
58
59 // copy the flags determined by mask from src to dst
60 void copyFlags(GenTree* dst, GenTree* src, unsigned mask)
61 {
62     dst->gtFlags &= ~mask;
63     dst->gtFlags |= (src->gtFlags & mask);
64 }
65
66 // Rewrite a SIMD indirection as GT_IND(GT_LEA(obj.op1)), or as a simple
67 // lclVar if possible.
68 //
69 // Arguments:
70 //    use      - A use reference for a block node
71 //    keepBlk  - True if this should remain a block node if it is not a lclVar
72 //
73 // Return Value:
74 //    None.
75 //
76 // TODO-1stClassStructs: These should be eliminated earlier, once we can handle
77 // lclVars in all the places that used to have GT_OBJ.
78 //
79 void Rationalizer::RewriteSIMDOperand(LIR::Use& use, bool keepBlk)
80 {
81 #ifdef FEATURE_SIMD
82     // No lowering is needed for non-SIMD nodes, so early out if featureSIMD is not enabled.
83     if (!comp->featureSIMD)
84     {
85         return;
86     }
87
88     GenTree* tree = use.Def();
89     if (!tree->OperIsIndir())
90     {
91         return;
92     }
93     var_types simdType = tree->TypeGet();
94
95     if (!varTypeIsSIMD(simdType))
96     {
97         return;
98     }
99
100     // If we have GT_IND(GT_LCL_VAR_ADDR) and the GT_LCL_VAR_ADDR is TYP_BYREF/TYP_I_IMPL,
101     // and the var is a SIMD type, replace the expression by GT_LCL_VAR.
102     GenTree* addr = tree->AsIndir()->Addr();
103     if (addr->OperIsLocalAddr() && comp->isAddrOfSIMDType(addr))
104     {
105         BlockRange().Remove(tree);
106
107         addr->SetOper(loadForm(addr->OperGet()));
108         addr->gtType = simdType;
109         use.ReplaceWith(comp, addr);
110     }
111     else if ((addr->OperGet() == GT_ADDR) && (addr->gtGetOp1()->OperIsSIMDorSimdHWintrinsic()))
112     {
113         // if we have GT_IND(GT_ADDR(GT_SIMD)), remove the GT_IND(GT_ADDR()), leaving just the GT_SIMD.
114         BlockRange().Remove(tree);
115         BlockRange().Remove(addr);
116
117         use.ReplaceWith(comp, addr->gtGetOp1());
118     }
119     else if (!keepBlk)
120     {
121         tree->SetOper(GT_IND);
122         tree->gtType = simdType;
123     }
124 #endif // FEATURE_SIMD
125 }
126
127 // RewriteNodeAsCall : Replace the given tree node by a GT_CALL.
128 //
129 // Arguments:
130 //    ppTree      - A pointer-to-a-pointer for the tree node
131 //    fgWalkData  - A pointer to tree walk data providing the context
132 //    callHnd     - The method handle of the call to be generated
133 //    entryPoint  - The method entrypoint of the call to be generated
134 //    args        - The argument list of the call to be generated
135 //
136 // Return Value:
137 //    None.
138 //
139
140 void Rationalizer::RewriteNodeAsCall(GenTree**             use,
141                                      ArrayStack<GenTree*>& parents,
142                                      CORINFO_METHOD_HANDLE callHnd,
143 #ifdef FEATURE_READYTORUN_COMPILER
144                                      CORINFO_CONST_LOOKUP entryPoint,
145 #endif
146                                      GenTreeArgList* args)
147 {
148     GenTree* const tree           = *use;
149     GenTree* const treeFirstNode  = comp->fgGetFirstNode(tree);
150     GenTree* const insertionPoint = treeFirstNode->gtPrev;
151
152     BlockRange().Remove(treeFirstNode, tree);
153
154     // Create the call node
155     GenTreeCall* call = comp->gtNewCallNode(CT_USER_FUNC, callHnd, tree->gtType, args);
156
157 #if DEBUG
158     CORINFO_SIG_INFO sig;
159     comp->eeGetMethodSig(callHnd, &sig);
160     assert(JITtype2varType(sig.retType) == tree->gtType);
161 #endif // DEBUG
162
163 #ifdef FEATURE_READYTORUN_COMPILER
164     call->gtCall.setEntryPoint(entryPoint);
165 #endif
166
167     call = comp->fgMorphArgs(call);
168     // Determine if this call has changed any codegen requirements.
169     comp->fgCheckArgCnt();
170
171     // Replace "tree" with "call"
172     if (parents.Height() > 1)
173     {
174         parents.Index(1)->ReplaceOperand(use, call);
175     }
176     else
177     {
178         // If there's no parent, the tree being replaced is the root of the
179         // statement (and no special handling is necessary).
180         *use = call;
181     }
182
183     comp->gtSetEvalOrder(call);
184     BlockRange().InsertAfter(insertionPoint, LIR::Range(comp->fgSetTreeSeq(call), call));
185
186     // Propagate flags of "call" to its parents.
187     // 0 is current node, so start at 1
188     for (int i = 1; i < parents.Height(); i++)
189     {
190         parents.Index(i)->gtFlags |= (call->gtFlags & GTF_ALL_EFFECT) | GTF_CALL;
191     }
192
193     // Since "tree" is replaced with "call", pop "tree" node (i.e the current node)
194     // and replace it with "call" on parent stack.
195     assert(parents.Top() == tree);
196     (void)parents.Pop();
197     parents.Push(call);
198 }
199
200 // RewriteIntrinsicAsUserCall : Rewrite an intrinsic operator as a GT_CALL to the original method.
201 //
202 // Arguments:
203 //    ppTree      - A pointer-to-a-pointer for the intrinsic node
204 //    fgWalkData  - A pointer to tree walk data providing the context
205 //
206 // Return Value:
207 //    None.
208 //
209 // Some intrinsics, such as operation Sqrt, are rewritten back to calls, and some are not.
210 // The ones that are not being rewritten here must be handled in Codegen.
211 // Conceptually, the lower is the right place to do the rewrite. Keeping it in rationalization is
212 // mainly for throughput issue.
213
214 void Rationalizer::RewriteIntrinsicAsUserCall(GenTree** use, ArrayStack<GenTree*>& parents)
215 {
216     GenTreeIntrinsic* intrinsic = (*use)->AsIntrinsic();
217
218     GenTreeArgList* args;
219     if (intrinsic->gtOp.gtOp2 == nullptr)
220     {
221         args = comp->gtNewArgList(intrinsic->gtGetOp1());
222     }
223     else
224     {
225         args = comp->gtNewArgList(intrinsic->gtGetOp1(), intrinsic->gtGetOp2());
226     }
227
228     RewriteNodeAsCall(use, parents, intrinsic->gtMethodHandle,
229 #ifdef FEATURE_READYTORUN_COMPILER
230                       intrinsic->gtEntryPoint,
231 #endif
232                       args);
233 }
234
235 // FixupIfSIMDLocal: Fixup the type of a lclVar tree, as needed, if it is a SIMD type vector.
236 //
237 // Arguments:
238 //    comp      - the Compiler object.
239 //    tree      - the GenTreeLclVarCommon tree to be fixed up.
240 //
241 // Return Value:
242 //    None.
243 //
244 // TODO-1stClassStructs: This is now only here to preserve existing behavior. It is actually not
245 // desirable to change the lclFld nodes back to TYP_SIMD (it will cause them to be loaded
246 // into a vector register, and then moved to an int register).
247
248 void Rationalizer::FixupIfSIMDLocal(GenTreeLclVarCommon* node)
249 {
250 #ifdef FEATURE_SIMD
251     if (!comp->featureSIMD)
252     {
253         return;
254     }
255
256     LclVarDsc* varDsc = &(comp->lvaTable[node->gtLclNum]);
257
258     // Don't mark byref of SIMD vector as a SIMD type.
259     // Note that struct args though marked as lvIsSIMD=true,
260     // the tree node representing such an arg should not be
261     // marked as a SIMD type, since it is a byref of a SIMD type.
262     if (!varTypeIsSIMD(varDsc))
263     {
264         return;
265     }
266     switch (node->OperGet())
267     {
268         default:
269             // Nothing to do for most tree nodes.
270             break;
271
272         case GT_LCL_FLD:
273             // We may see a lclFld used for pointer-sized structs that have been morphed, in which
274             // case we can change it to GT_LCL_VAR.
275             // However, we may also see a lclFld with FieldSeqStore::NotAField() for structs that can't
276             // be analyzed, e.g. those with overlapping fields such as the IL implementation of Vector<T>.
277             if ((node->AsLclFld()->gtFieldSeq == FieldSeqStore::NotAField()) && (node->AsLclFld()->gtLclOffs == 0) &&
278                 (node->gtType == TYP_I_IMPL) && (varDsc->lvExactSize == TARGET_POINTER_SIZE))
279             {
280                 node->SetOper(GT_LCL_VAR);
281                 node->gtFlags &= ~(GTF_VAR_USEASG);
282             }
283             else
284             {
285                 // If we access a field of a SIMD lclVar via GT_LCL_FLD, it cannot have been
286                 // independently promoted.
287                 assert(comp->lvaGetPromotionType(varDsc) != Compiler::PROMOTION_TYPE_INDEPENDENT);
288                 return;
289             }
290             break;
291         case GT_STORE_LCL_FLD:
292             assert(node->gtType == TYP_I_IMPL);
293             node->SetOper(GT_STORE_LCL_VAR);
294             node->gtFlags &= ~(GTF_VAR_USEASG);
295             break;
296     }
297     unsigned simdSize = (unsigned int)roundUp(varDsc->lvExactSize, TARGET_POINTER_SIZE);
298     node->gtType      = comp->getSIMDTypeForSize(simdSize);
299 #endif // FEATURE_SIMD
300 }
301
302 #ifdef DEBUG
303
304 void Rationalizer::ValidateStatement(GenTree* tree, BasicBlock* block)
305 {
306     assert(tree->gtOper == GT_STMT);
307     DBEXEC(TRUE, JitTls::GetCompiler()->fgDebugCheckNodeLinks(block, tree));
308 }
309
310 // sanity checks that apply to all kinds of IR
311 void Rationalizer::SanityCheck()
312 {
313     // TODO: assert(!IsLIR());
314     BasicBlock* block;
315     foreach_block(comp, block)
316     {
317         for (GenTree* statement = block->bbTreeList; statement != nullptr; statement = statement->gtNext)
318         {
319             ValidateStatement(statement, block);
320
321             for (GenTree* tree = statement->gtStmt.gtStmtList; tree; tree = tree->gtNext)
322             {
323                 // QMARK nodes should have been removed before this phase.
324                 assert(tree->OperGet() != GT_QMARK);
325
326                 if (tree->OperGet() == GT_ASG)
327                 {
328                     if (tree->gtGetOp1()->OperGet() == GT_LCL_VAR)
329                     {
330                         assert(tree->gtGetOp1()->gtFlags & GTF_VAR_DEF);
331                     }
332                     else if (tree->gtGetOp2()->OperGet() == GT_LCL_VAR)
333                     {
334                         assert(!(tree->gtGetOp2()->gtFlags & GTF_VAR_DEF));
335                     }
336                 }
337             }
338         }
339     }
340 }
341
342 void Rationalizer::SanityCheckRational()
343 {
344     // TODO-Cleanup : check that the tree is rational here
345     // then do normal checks
346     SanityCheck();
347 }
348
349 #endif // DEBUG
350
351 static void RewriteAssignmentIntoStoreLclCore(GenTreeOp* assignment,
352                                               GenTree*   location,
353                                               GenTree*   value,
354                                               genTreeOps locationOp)
355 {
356     assert(assignment != nullptr);
357     assert(assignment->OperGet() == GT_ASG);
358     assert(location != nullptr);
359     assert(value != nullptr);
360
361     genTreeOps storeOp = storeForm(locationOp);
362
363 #ifdef DEBUG
364     JITDUMP("rewriting asg(%s, X) to %s(X)\n", GenTree::OpName(locationOp), GenTree::OpName(storeOp));
365 #endif // DEBUG
366
367     assignment->SetOper(storeOp);
368     GenTreeLclVarCommon* store = assignment->AsLclVarCommon();
369
370     GenTreeLclVarCommon* var = location->AsLclVarCommon();
371     store->SetLclNum(var->gtLclNum);
372     store->SetSsaNum(var->gtSsaNum);
373
374     if (locationOp == GT_LCL_FLD)
375     {
376         store->gtLclFld.gtLclOffs  = var->gtLclFld.gtLclOffs;
377         store->gtLclFld.gtFieldSeq = var->gtLclFld.gtFieldSeq;
378     }
379
380     copyFlags(store, var, GTF_LIVENESS_MASK);
381     store->gtFlags &= ~GTF_REVERSE_OPS;
382
383     store->gtType = var->TypeGet();
384     store->gtOp1  = value;
385
386     DISPNODE(store);
387     JITDUMP("\n");
388 }
389
390 void Rationalizer::RewriteAssignmentIntoStoreLcl(GenTreeOp* assignment)
391 {
392     assert(assignment != nullptr);
393     assert(assignment->OperGet() == GT_ASG);
394
395     GenTree* location = assignment->gtGetOp1();
396     GenTree* value    = assignment->gtGetOp2();
397
398     RewriteAssignmentIntoStoreLclCore(assignment, location, value, location->OperGet());
399 }
400
401 void Rationalizer::RewriteAssignment(LIR::Use& use)
402 {
403     assert(use.IsInitialized());
404
405     GenTreeOp* assignment = use.Def()->AsOp();
406     assert(assignment->OperGet() == GT_ASG);
407
408     GenTree* location = assignment->gtGetOp1();
409     GenTree* value    = assignment->gtGetOp2();
410
411     genTreeOps locationOp = location->OperGet();
412
413     if (assignment->OperIsBlkOp())
414     {
415 #ifdef FEATURE_SIMD
416         if (varTypeIsSIMD(location) && assignment->OperIsInitBlkOp())
417         {
418             if (location->OperGet() == GT_LCL_VAR)
419             {
420                 var_types simdType = location->TypeGet();
421                 GenTree*  initVal  = assignment->gtOp.gtOp2;
422                 var_types baseType = comp->getBaseTypeOfSIMDLocal(location);
423                 if (baseType != TYP_UNKNOWN)
424                 {
425                     GenTreeSIMD* simdTree = new (comp, GT_SIMD)
426                         GenTreeSIMD(simdType, initVal, SIMDIntrinsicInit, baseType, genTypeSize(simdType));
427                     assignment->gtOp.gtOp2 = simdTree;
428                     value                  = simdTree;
429                     initVal->gtNext        = simdTree;
430                     simdTree->gtPrev       = initVal;
431
432                     simdTree->gtNext = location;
433                     location->gtPrev = simdTree;
434                 }
435             }
436         }
437 #endif // FEATURE_SIMD
438         if ((location->TypeGet() == TYP_STRUCT) && !assignment->IsPhiDefn() && !value->IsMultiRegCall())
439         {
440             if ((location->OperGet() == GT_LCL_VAR))
441             {
442                 // We need to construct a block node for the location.
443                 // Modify lcl to be the address form.
444                 location->SetOper(addrForm(locationOp));
445                 LclVarDsc* varDsc     = &(comp->lvaTable[location->AsLclVarCommon()->gtLclNum]);
446                 location->gtType      = TYP_BYREF;
447                 GenTreeBlk*  storeBlk = nullptr;
448                 unsigned int size     = varDsc->lvExactSize;
449
450                 if (varDsc->lvStructGcCount != 0)
451                 {
452                     CORINFO_CLASS_HANDLE structHnd = varDsc->lvVerTypeInfo.GetClassHandle();
453                     GenTreeObj*          objNode   = comp->gtNewObjNode(structHnd, location)->AsObj();
454                     unsigned int         slots = (unsigned)(roundUp(size, TARGET_POINTER_SIZE) / TARGET_POINTER_SIZE);
455
456                     objNode->SetGCInfo(varDsc->lvGcLayout, varDsc->lvStructGcCount, slots);
457                     objNode->ChangeOper(GT_STORE_OBJ);
458                     objNode->SetData(value);
459                     comp->fgMorphUnsafeBlk(objNode);
460                     storeBlk = objNode;
461                 }
462                 else
463                 {
464                     storeBlk = new (comp, GT_STORE_BLK) GenTreeBlk(GT_STORE_BLK, TYP_STRUCT, location, value, size);
465                 }
466                 storeBlk->gtFlags |= GTF_ASG;
467                 storeBlk->gtFlags |= ((location->gtFlags | value->gtFlags) & GTF_ALL_EFFECT);
468
469                 GenTree* insertionPoint = location->gtNext;
470                 BlockRange().InsertBefore(insertionPoint, storeBlk);
471                 use.ReplaceWith(comp, storeBlk);
472                 BlockRange().Remove(assignment);
473                 JITDUMP("After transforming local struct assignment into a block op:\n");
474                 DISPTREERANGE(BlockRange(), use.Def());
475                 JITDUMP("\n");
476                 return;
477             }
478             else
479             {
480                 assert(location->OperIsBlk());
481             }
482         }
483     }
484
485     switch (locationOp)
486     {
487         case GT_LCL_VAR:
488         case GT_LCL_FLD:
489         case GT_REG_VAR:
490         case GT_PHI_ARG:
491             RewriteAssignmentIntoStoreLclCore(assignment, location, value, locationOp);
492             BlockRange().Remove(location);
493             break;
494
495         case GT_IND:
496         {
497             GenTreeStoreInd* store =
498                 new (comp, GT_STOREIND) GenTreeStoreInd(location->TypeGet(), location->gtGetOp1(), value);
499
500             copyFlags(store, assignment, GTF_ALL_EFFECT);
501             copyFlags(store, location, GTF_IND_FLAGS);
502
503             // TODO: JIT dump
504
505             // Remove the GT_IND node and replace the assignment node with the store
506             BlockRange().Remove(location);
507             BlockRange().InsertBefore(assignment, store);
508             use.ReplaceWith(comp, store);
509             BlockRange().Remove(assignment);
510         }
511         break;
512
513         case GT_CLS_VAR:
514         {
515             location->SetOper(GT_CLS_VAR_ADDR);
516             location->gtType = TYP_BYREF;
517
518             assignment->SetOper(GT_STOREIND);
519             assignment->AsStoreInd()->SetRMWStatusDefault();
520
521             // TODO: JIT dump
522         }
523         break;
524
525         case GT_BLK:
526         case GT_OBJ:
527         case GT_DYN_BLK:
528         {
529             assert(varTypeIsStruct(location));
530             GenTreeBlk* storeBlk = location->AsBlk();
531             genTreeOps  storeOper;
532             switch (location->gtOper)
533             {
534                 case GT_BLK:
535                     storeOper = GT_STORE_BLK;
536                     break;
537                 case GT_OBJ:
538                     storeOper = GT_STORE_OBJ;
539                     break;
540                 case GT_DYN_BLK:
541                     storeOper                             = GT_STORE_DYN_BLK;
542                     storeBlk->AsDynBlk()->gtEvalSizeFirst = false;
543                     break;
544                 default:
545                     unreached();
546             }
547             JITDUMP("Rewriting GT_ASG(%s(X), Y) to %s(X,Y):\n", GenTree::OpName(location->gtOper),
548                     GenTree::OpName(storeOper));
549             storeBlk->SetOperRaw(storeOper);
550             storeBlk->gtFlags &= ~GTF_DONT_CSE;
551             storeBlk->gtFlags |=
552                 (assignment->gtFlags & (GTF_ALL_EFFECT | GTF_BLK_VOLATILE | GTF_BLK_UNALIGNED | GTF_DONT_CSE));
553             storeBlk->gtBlk.Data() = value;
554
555             // Replace the assignment node with the store
556             use.ReplaceWith(comp, storeBlk);
557             BlockRange().Remove(assignment);
558             DISPTREERANGE(BlockRange(), use.Def());
559             JITDUMP("\n");
560         }
561         break;
562
563         default:
564             unreached();
565             break;
566     }
567 }
568
569 void Rationalizer::RewriteAddress(LIR::Use& use)
570 {
571     assert(use.IsInitialized());
572
573     GenTreeUnOp* address = use.Def()->AsUnOp();
574     assert(address->OperGet() == GT_ADDR);
575
576     GenTree*   location   = address->gtGetOp1();
577     genTreeOps locationOp = location->OperGet();
578
579     if (location->IsLocal())
580     {
581 // We are changing the child from GT_LCL_VAR TO GT_LCL_VAR_ADDR.
582 // Therefore gtType of the child needs to be changed to a TYP_BYREF
583 #ifdef DEBUG
584         if (locationOp == GT_LCL_VAR)
585         {
586             JITDUMP("Rewriting GT_ADDR(GT_LCL_VAR) to GT_LCL_VAR_ADDR:\n");
587         }
588         else
589         {
590             assert(locationOp == GT_LCL_FLD);
591             JITDUMP("Rewriting GT_ADDR(GT_LCL_FLD) to GT_LCL_FLD_ADDR:\n");
592         }
593 #endif // DEBUG
594
595         location->SetOper(addrForm(locationOp));
596         location->gtType = TYP_BYREF;
597         copyFlags(location, address, GTF_ALL_EFFECT);
598
599         use.ReplaceWith(comp, location);
600         BlockRange().Remove(address);
601     }
602     else if (locationOp == GT_CLS_VAR)
603     {
604         location->SetOper(GT_CLS_VAR_ADDR);
605         location->gtType = TYP_BYREF;
606         copyFlags(location, address, GTF_ALL_EFFECT);
607
608         use.ReplaceWith(comp, location);
609         BlockRange().Remove(address);
610
611         JITDUMP("Rewriting GT_ADDR(GT_CLS_VAR) to GT_CLS_VAR_ADDR:\n");
612     }
613     else if (location->OperIsIndir())
614     {
615         use.ReplaceWith(comp, location->gtGetOp1());
616         BlockRange().Remove(location);
617         BlockRange().Remove(address);
618
619         JITDUMP("Rewriting GT_ADDR(GT_IND(X)) to X:\n");
620     }
621
622     DISPTREERANGE(BlockRange(), use.Def());
623     JITDUMP("\n");
624 }
625
626 Compiler::fgWalkResult Rationalizer::RewriteNode(GenTree** useEdge, ArrayStack<GenTree*>& parentStack)
627 {
628     assert(useEdge != nullptr);
629
630     GenTree* node = *useEdge;
631     assert(node != nullptr);
632
633 #ifdef DEBUG
634     const bool isLateArg = (node->gtFlags & GTF_LATE_ARG) != 0;
635 #endif
636
637     // First, remove any preceeding list nodes, which are not otherwise visited by the tree walk.
638     //
639     // NOTE: GT_FIELD_LIST head nodes, and GT_LIST nodes used by phi nodes will in fact be visited.
640     for (GenTree* prev = node->gtPrev; prev != nullptr && prev->OperIsAnyList() && !(prev->OperIsFieldListHead());
641          prev          = node->gtPrev)
642     {
643         prev->gtFlags &= ~GTF_REVERSE_OPS;
644         BlockRange().Remove(prev);
645     }
646
647     // Now clear the REVERSE_OPS flag on the current node.
648     node->gtFlags &= ~GTF_REVERSE_OPS;
649
650     // In addition, remove the current node if it is a GT_LIST node that is not an aggregate.
651     if (node->OperIsAnyList())
652     {
653         GenTreeArgList* list = node->AsArgList();
654         if (!list->OperIsFieldListHead())
655         {
656             BlockRange().Remove(list);
657         }
658         return Compiler::WALK_CONTINUE;
659     }
660
661     LIR::Use use;
662     if (parentStack.Height() < 2)
663     {
664         use = LIR::Use::GetDummyUse(BlockRange(), *useEdge);
665     }
666     else
667     {
668         use = LIR::Use(BlockRange(), useEdge, parentStack.Index(1));
669     }
670
671     assert(node == use.Def());
672     switch (node->OperGet())
673     {
674         case GT_ASG:
675             RewriteAssignment(use);
676             break;
677
678         case GT_BOX:
679             // GT_BOX at this level just passes through so get rid of it
680             use.ReplaceWith(comp, node->gtGetOp1());
681             BlockRange().Remove(node);
682             break;
683
684         case GT_ADDR:
685             RewriteAddress(use);
686             break;
687
688         case GT_IND:
689             // Clear the `GTF_IND_ASG_LHS` flag, which overlaps with `GTF_IND_REQ_ADDR_IN_REG`.
690             node->gtFlags &= ~GTF_IND_ASG_LHS;
691
692             if (varTypeIsSIMD(node))
693             {
694                 RewriteSIMDOperand(use, false);
695             }
696             else
697             {
698                 // Due to promotion of structs containing fields of type struct with a
699                 // single scalar type field, we could potentially see IR nodes of the
700                 // form GT_IND(GT_ADD(lclvarAddr, 0)) where 0 is an offset representing
701                 // a field-seq. These get folded here.
702                 //
703                 // TODO: This code can be removed once JIT implements recursive struct
704                 // promotion instead of lying about the type of struct field as the type
705                 // of its single scalar field.
706                 GenTree* addr = node->AsIndir()->Addr();
707                 if (addr->OperGet() == GT_ADD && addr->gtGetOp1()->OperGet() == GT_LCL_VAR_ADDR &&
708                     addr->gtGetOp2()->IsIntegralConst(0))
709                 {
710                     GenTreeLclVarCommon* lclVarNode = addr->gtGetOp1()->AsLclVarCommon();
711                     unsigned             lclNum     = lclVarNode->GetLclNum();
712                     LclVarDsc*           varDsc     = comp->lvaTable + lclNum;
713                     if (node->TypeGet() == varDsc->TypeGet())
714                     {
715                         JITDUMP("Rewriting GT_IND(GT_ADD(LCL_VAR_ADDR,0)) to LCL_VAR\n");
716                         lclVarNode->SetOper(GT_LCL_VAR);
717                         lclVarNode->gtType = node->TypeGet();
718                         use.ReplaceWith(comp, lclVarNode);
719                         BlockRange().Remove(addr);
720                         BlockRange().Remove(addr->gtGetOp2());
721                         BlockRange().Remove(node);
722                     }
723                 }
724             }
725             break;
726
727         case GT_NOP:
728             // fgMorph sometimes inserts NOP nodes between defs and uses
729             // supposedly 'to prevent constant folding'. In this case, remove the
730             // NOP.
731             if (node->gtGetOp1() != nullptr)
732             {
733                 use.ReplaceWith(comp, node->gtGetOp1());
734                 BlockRange().Remove(node);
735                 node = node->gtGetOp1();
736             }
737             break;
738
739         case GT_COMMA:
740         {
741             GenTree*           op1         = node->gtGetOp1();
742             bool               isClosed    = false;
743             unsigned           sideEffects = 0;
744             LIR::ReadOnlyRange lhsRange    = BlockRange().GetTreeRange(op1, &isClosed, &sideEffects);
745
746             if ((sideEffects & GTF_ALL_EFFECT) == 0)
747             {
748                 // The LHS has no side effects. Remove it.
749                 // None of the transforms performed herein violate tree order, so isClosed
750                 // should always be true.
751                 assert(isClosed);
752
753                 BlockRange().Delete(comp, m_block, std::move(lhsRange));
754             }
755             else if (op1->IsValue())
756             {
757                 op1->SetUnusedValue();
758             }
759
760             BlockRange().Remove(node);
761
762             GenTree* replacement = node->gtGetOp2();
763             if (!use.IsDummyUse())
764             {
765                 use.ReplaceWith(comp, replacement);
766                 node = replacement;
767             }
768             else
769             {
770                 // This is a top-level comma. If the RHS has no side effects we can remove
771                 // it as well.
772                 bool               isClosed    = false;
773                 unsigned           sideEffects = 0;
774                 LIR::ReadOnlyRange rhsRange    = BlockRange().GetTreeRange(replacement, &isClosed, &sideEffects);
775
776                 if ((sideEffects & GTF_ALL_EFFECT) == 0)
777                 {
778                     // None of the transforms performed herein violate tree order, so isClosed
779                     // should always be true.
780                     assert(isClosed);
781
782                     BlockRange().Delete(comp, m_block, std::move(rhsRange));
783                 }
784                 else
785                 {
786                     node = replacement;
787                 }
788             }
789         }
790         break;
791
792         case GT_ARGPLACE:
793             // Remove argplace and list nodes from the execution order.
794             //
795             // TODO: remove phi args and phi nodes as well?
796             BlockRange().Remove(node);
797             break;
798
799 #if defined(_TARGET_XARCH_) || defined(_TARGET_ARM_)
800         case GT_CLS_VAR:
801         {
802             // Class vars that are the target of an assignment will get rewritten into
803             // GT_STOREIND(GT_CLS_VAR_ADDR, val) by RewriteAssignment. This check is
804             // not strictly necessary--the GT_IND(GT_CLS_VAR_ADDR) pattern that would
805             // otherwise be generated would also be picked up by RewriteAssignment--but
806             // skipping the rewrite here saves an allocation and a bit of extra work.
807             const bool isLHSOfAssignment = (use.User()->OperGet() == GT_ASG) && (use.User()->gtGetOp1() == node);
808             if (!isLHSOfAssignment)
809             {
810                 GenTree* ind = comp->gtNewOperNode(GT_IND, node->TypeGet(), node);
811
812                 node->SetOper(GT_CLS_VAR_ADDR);
813                 node->gtType = TYP_BYREF;
814
815                 BlockRange().InsertAfter(node, ind);
816                 use.ReplaceWith(comp, ind);
817
818                 // TODO: JIT dump
819             }
820         }
821         break;
822 #endif // _TARGET_XARCH_
823
824         case GT_INTRINSIC:
825             // Non-target intrinsics should have already been rewritten back into user calls.
826             assert(comp->IsTargetIntrinsic(node->gtIntrinsic.gtIntrinsicId));
827             break;
828
829 #ifdef FEATURE_SIMD
830         case GT_BLK:
831         case GT_OBJ:
832         {
833             // TODO-1stClassStructs: These should have been transformed to GT_INDs, but in order
834             // to preserve existing behavior, we will keep this as a block node if this is the
835             // lhs of a block assignment, and either:
836             // - It is a "generic" TYP_STRUCT assignment, OR
837             // - It is an initblk, OR
838             // - Neither the lhs or rhs are known to be of SIMD type.
839
840             GenTree* parent  = use.User();
841             bool     keepBlk = false;
842             if ((parent->OperGet() == GT_ASG) && (node == parent->gtGetOp1()))
843             {
844                 if ((node->TypeGet() == TYP_STRUCT) || parent->OperIsInitBlkOp())
845                 {
846                     keepBlk = true;
847                 }
848                 else if (!comp->isAddrOfSIMDType(node->AsBlk()->Addr()))
849                 {
850                     GenTree* dataSrc = parent->gtGetOp2();
851                     if (!dataSrc->IsLocal() && (dataSrc->OperGet() != GT_SIMD) && (!dataSrc->OperIsHWIntrinsic()))
852                     {
853                         noway_assert(dataSrc->OperIsIndir());
854                         keepBlk = !comp->isAddrOfSIMDType(dataSrc->AsIndir()->Addr());
855                     }
856                 }
857             }
858             RewriteSIMDOperand(use, keepBlk);
859         }
860         break;
861
862         case GT_LCL_FLD:
863         case GT_STORE_LCL_FLD:
864             // TODO-1stClassStructs: Eliminate this.
865             FixupIfSIMDLocal(node->AsLclVarCommon());
866             break;
867
868         case GT_SIMD:
869         {
870             noway_assert(comp->featureSIMD);
871             GenTreeSIMD* simdNode = node->AsSIMD();
872             unsigned     simdSize = simdNode->gtSIMDSize;
873             var_types    simdType = comp->getSIMDTypeForSize(simdSize);
874
875             // TODO-1stClassStructs: This should be handled more generally for enregistered or promoted
876             // structs that are passed or returned in a different register type than their enregistered
877             // type(s).
878             if (simdNode->gtType == TYP_I_IMPL && simdNode->gtSIMDSize == TARGET_POINTER_SIZE)
879             {
880                 // This happens when it is consumed by a GT_RET_EXPR.
881                 // It can only be a Vector2f or Vector2i.
882                 assert(genTypeSize(simdNode->gtSIMDBaseType) == 4);
883                 simdNode->gtType = TYP_SIMD8;
884             }
885             // Certain SIMD trees require rationalizing.
886             if (simdNode->gtSIMD.gtSIMDIntrinsicID == SIMDIntrinsicInitArray)
887             {
888                 // Rewrite this as an explicit load.
889                 JITDUMP("Rewriting GT_SIMD array init as an explicit load:\n");
890                 unsigned int baseTypeSize = genTypeSize(simdNode->gtSIMDBaseType);
891                 GenTree*     address = new (comp, GT_LEA) GenTreeAddrMode(TYP_BYREF, simdNode->gtOp1, simdNode->gtOp2,
892                                                                       baseTypeSize, offsetof(CORINFO_Array, u1Elems));
893                 GenTree* ind = comp->gtNewOperNode(GT_IND, simdType, address);
894
895                 BlockRange().InsertBefore(simdNode, address, ind);
896                 use.ReplaceWith(comp, ind);
897                 BlockRange().Remove(simdNode);
898
899                 DISPTREERANGE(BlockRange(), use.Def());
900                 JITDUMP("\n");
901             }
902             else
903             {
904                 // This code depends on the fact that NONE of the SIMD intrinsics take vector operands
905                 // of a different width.  If that assumption changes, we will EITHER have to make these type
906                 // transformations during importation, and plumb the types all the way through the JIT,
907                 // OR add a lot of special handling here.
908                 GenTree* op1 = simdNode->gtGetOp1();
909                 if (op1 != nullptr && op1->gtType == TYP_STRUCT)
910                 {
911                     op1->gtType = simdType;
912                 }
913
914                 GenTree* op2 = simdNode->gtGetOp2IfPresent();
915                 if (op2 != nullptr && op2->gtType == TYP_STRUCT)
916                 {
917                     op2->gtType = simdType;
918                 }
919             }
920         }
921         break;
922 #endif // FEATURE_SIMD
923
924         default:
925             // JCMP, CMP, SETCC and JCC nodes should not be present in HIR.
926             assert(!node->OperIs(GT_CMP, GT_SETCC, GT_JCC, GT_JCMP));
927             break;
928     }
929
930     // Do some extra processing on top-level nodes to remove unused local reads.
931     if (node->OperIsLocalRead())
932     {
933         if (use.IsDummyUse())
934         {
935             comp->lvaDecRefCnts(node);
936             BlockRange().Remove(node);
937         }
938         else
939         {
940             // Local reads are side-effect-free; clear any flags leftover from frontend transformations.
941             node->gtFlags &= ~GTF_ALL_EFFECT;
942         }
943     }
944     else
945     {
946         if (!node->OperIsStore())
947         {
948             // Clear the GTF_ASG flag for all nodes but stores
949             node->gtFlags &= ~GTF_ASG;
950         }
951
952         if (!node->IsCall())
953         {
954             // Clear the GTF_CALL flag for all nodes but calls
955             node->gtFlags &= ~GTF_CALL;
956         }
957
958         if (node->IsValue() && use.IsDummyUse())
959         {
960             node->SetUnusedValue();
961         }
962
963         if (node->TypeGet() == TYP_LONG)
964         {
965             comp->compLongUsed = true;
966         }
967     }
968
969     assert(isLateArg == ((use.Def()->gtFlags & GTF_LATE_ARG) != 0));
970
971     return Compiler::WALK_CONTINUE;
972 }
973
974 void Rationalizer::DoPhase()
975 {
976     class RationalizeVisitor final : public GenTreeVisitor<RationalizeVisitor>
977     {
978         Rationalizer& m_rationalizer;
979
980     public:
981         enum
982         {
983             ComputeStack      = true,
984             DoPreOrder        = true,
985             DoPostOrder       = true,
986             UseExecutionOrder = true,
987         };
988
989         RationalizeVisitor(Rationalizer& rationalizer)
990             : GenTreeVisitor<RationalizeVisitor>(rationalizer.comp), m_rationalizer(rationalizer)
991         {
992         }
993
994         // Rewrite intrinsics that are not supported by the target back into user calls.
995         // This needs to be done before the transition to LIR because it relies on the use
996         // of fgMorphArgs, which is designed to operate on HIR. Once this is done for a
997         // particular statement, link that statement's nodes into the current basic block.
998         fgWalkResult PreOrderVisit(GenTree** use, GenTree* user)
999         {
1000             GenTree* const node = *use;
1001             if (node->OperGet() == GT_INTRINSIC &&
1002                 m_rationalizer.comp->IsIntrinsicImplementedByUserCall(node->gtIntrinsic.gtIntrinsicId))
1003             {
1004                 m_rationalizer.RewriteIntrinsicAsUserCall(use, this->m_ancestors);
1005             }
1006
1007             return Compiler::WALK_CONTINUE;
1008         }
1009
1010         // Rewrite HIR nodes into LIR nodes.
1011         fgWalkResult PostOrderVisit(GenTree** use, GenTree* user)
1012         {
1013             return m_rationalizer.RewriteNode(use, this->m_ancestors);
1014         }
1015     };
1016
1017     DBEXEC(TRUE, SanityCheck());
1018
1019     comp->compCurBB = nullptr;
1020     comp->fgOrder   = Compiler::FGOrderLinear;
1021
1022     RationalizeVisitor visitor(*this);
1023     for (BasicBlock* block = comp->fgFirstBB; block != nullptr; block = block->bbNext)
1024     {
1025         comp->compCurBB = block;
1026         m_block         = block;
1027
1028         GenTreeStmt* firstStatement = block->firstStmt();
1029         block->MakeLIR(nullptr, nullptr);
1030
1031         // Establish the first and last nodes for the block. This is necessary in order for the LIR
1032         // utilities that hang off the BasicBlock type to work correctly.
1033         if (firstStatement == nullptr)
1034         {
1035             // No statements in this block; skip it.
1036             continue;
1037         }
1038
1039         for (GenTreeStmt *statement = firstStatement, *nextStatement; statement != nullptr; statement = nextStatement)
1040         {
1041             assert(statement->gtStmtList != nullptr);
1042             assert(statement->gtStmtList->gtPrev == nullptr);
1043             assert(statement->gtStmtExpr != nullptr);
1044             assert(statement->gtStmtExpr->gtNext == nullptr);
1045
1046             BlockRange().InsertAtEnd(LIR::Range(statement->gtStmtList, statement->gtStmtExpr));
1047
1048             nextStatement     = statement->getNextStmt();
1049             statement->gtNext = nullptr;
1050             statement->gtPrev = nullptr;
1051
1052             // If this statement has correct offset information, change it into an IL offset
1053             // node and insert it into the LIR.
1054             if (statement->gtStmtILoffsx != BAD_IL_OFFSET)
1055             {
1056                 assert(!statement->IsPhiDefnStmt());
1057                 statement->SetOper(GT_IL_OFFSET);
1058
1059                 BlockRange().InsertBefore(statement->gtStmtList, statement);
1060             }
1061
1062             m_block = block;
1063             visitor.WalkTree(&statement->gtStmtExpr, nullptr);
1064         }
1065
1066         assert(BlockRange().CheckLIR(comp, true));
1067     }
1068
1069     comp->compRationalIRForm = true;
1070 }
1071 #endif // LEGACY_BACKEND