1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
10 #ifndef LEGACY_BACKEND
11 // return op that is the store equivalent of the given load opcode
12 genTreeOps storeForm(genTreeOps loadForm)
17 return GT_STORE_LCL_VAR;
19 return GT_STORE_LCL_FLD;
21 noway_assert(!"reg vars only supported in classic backend\n");
24 noway_assert(!"not a data load opcode\n");
29 // return op that is the addr equivalent of the given load opcode
30 genTreeOps addrForm(genTreeOps loadForm)
35 return GT_LCL_VAR_ADDR;
37 return GT_LCL_FLD_ADDR;
39 noway_assert(!"not a data load opcode\n");
44 // return op that is the load equivalent of the given addr opcode
45 genTreeOps loadForm(genTreeOps addrForm)
54 noway_assert(!"not a local address opcode\n");
59 // copy the flags determined by mask from src to dst
60 void copyFlags(GenTree* dst, GenTree* src, unsigned mask)
62 dst->gtFlags &= ~mask;
63 dst->gtFlags |= (src->gtFlags & mask);
66 // Rewrite a SIMD indirection as GT_IND(GT_LEA(obj.op1)), or as a simple
67 // lclVar if possible.
70 // use - A use reference for a block node
71 // keepBlk - True if this should remain a block node if it is not a lclVar
76 // TODO-1stClassStructs: These should be eliminated earlier, once we can handle
77 // lclVars in all the places that used to have GT_OBJ.
79 void Rationalizer::RewriteSIMDOperand(LIR::Use& use, bool keepBlk)
82 // No lowering is needed for non-SIMD nodes, so early out if featureSIMD is not enabled.
83 if (!comp->featureSIMD)
88 GenTree* tree = use.Def();
89 if (!tree->OperIsIndir())
93 var_types simdType = tree->TypeGet();
95 if (!varTypeIsSIMD(simdType))
100 // If we have GT_IND(GT_LCL_VAR_ADDR) and the GT_LCL_VAR_ADDR is TYP_BYREF/TYP_I_IMPL,
101 // and the var is a SIMD type, replace the expression by GT_LCL_VAR.
102 GenTree* addr = tree->AsIndir()->Addr();
103 if (addr->OperIsLocalAddr() && comp->isAddrOfSIMDType(addr))
105 BlockRange().Remove(tree);
107 addr->SetOper(loadForm(addr->OperGet()));
108 addr->gtType = simdType;
109 use.ReplaceWith(comp, addr);
111 else if ((addr->OperGet() == GT_ADDR) && (addr->gtGetOp1()->OperIsSIMDorSimdHWintrinsic()))
113 // if we have GT_IND(GT_ADDR(GT_SIMD)), remove the GT_IND(GT_ADDR()), leaving just the GT_SIMD.
114 BlockRange().Remove(tree);
115 BlockRange().Remove(addr);
117 use.ReplaceWith(comp, addr->gtGetOp1());
121 tree->SetOper(GT_IND);
122 tree->gtType = simdType;
124 #endif // FEATURE_SIMD
127 // RewriteNodeAsCall : Replace the given tree node by a GT_CALL.
130 // ppTree - A pointer-to-a-pointer for the tree node
131 // fgWalkData - A pointer to tree walk data providing the context
132 // callHnd - The method handle of the call to be generated
133 // entryPoint - The method entrypoint of the call to be generated
134 // args - The argument list of the call to be generated
140 void Rationalizer::RewriteNodeAsCall(GenTree** use,
141 ArrayStack<GenTree*>& parents,
142 CORINFO_METHOD_HANDLE callHnd,
143 #ifdef FEATURE_READYTORUN_COMPILER
144 CORINFO_CONST_LOOKUP entryPoint,
146 GenTreeArgList* args)
148 GenTree* const tree = *use;
149 GenTree* const treeFirstNode = comp->fgGetFirstNode(tree);
150 GenTree* const insertionPoint = treeFirstNode->gtPrev;
152 BlockRange().Remove(treeFirstNode, tree);
154 // Create the call node
155 GenTreeCall* call = comp->gtNewCallNode(CT_USER_FUNC, callHnd, tree->gtType, args);
158 CORINFO_SIG_INFO sig;
159 comp->eeGetMethodSig(callHnd, &sig);
160 assert(JITtype2varType(sig.retType) == tree->gtType);
163 #ifdef FEATURE_READYTORUN_COMPILER
164 call->gtCall.setEntryPoint(entryPoint);
167 call = comp->fgMorphArgs(call);
168 // Determine if this call has changed any codegen requirements.
169 comp->fgCheckArgCnt();
171 // Replace "tree" with "call"
172 if (parents.Height() > 1)
174 parents.Index(1)->ReplaceOperand(use, call);
178 // If there's no parent, the tree being replaced is the root of the
179 // statement (and no special handling is necessary).
183 comp->gtSetEvalOrder(call);
184 BlockRange().InsertAfter(insertionPoint, LIR::Range(comp->fgSetTreeSeq(call), call));
186 // Propagate flags of "call" to its parents.
187 // 0 is current node, so start at 1
188 for (int i = 1; i < parents.Height(); i++)
190 parents.Index(i)->gtFlags |= (call->gtFlags & GTF_ALL_EFFECT) | GTF_CALL;
193 // Since "tree" is replaced with "call", pop "tree" node (i.e the current node)
194 // and replace it with "call" on parent stack.
195 assert(parents.Top() == tree);
200 // RewriteIntrinsicAsUserCall : Rewrite an intrinsic operator as a GT_CALL to the original method.
203 // ppTree - A pointer-to-a-pointer for the intrinsic node
204 // fgWalkData - A pointer to tree walk data providing the context
209 // Some intrinsics, such as operation Sqrt, are rewritten back to calls, and some are not.
210 // The ones that are not being rewritten here must be handled in Codegen.
211 // Conceptually, the lower is the right place to do the rewrite. Keeping it in rationalization is
212 // mainly for throughput issue.
214 void Rationalizer::RewriteIntrinsicAsUserCall(GenTree** use, ArrayStack<GenTree*>& parents)
216 GenTreeIntrinsic* intrinsic = (*use)->AsIntrinsic();
218 GenTreeArgList* args;
219 if (intrinsic->gtOp.gtOp2 == nullptr)
221 args = comp->gtNewArgList(intrinsic->gtGetOp1());
225 args = comp->gtNewArgList(intrinsic->gtGetOp1(), intrinsic->gtGetOp2());
228 RewriteNodeAsCall(use, parents, intrinsic->gtMethodHandle,
229 #ifdef FEATURE_READYTORUN_COMPILER
230 intrinsic->gtEntryPoint,
235 // FixupIfSIMDLocal: Fixup the type of a lclVar tree, as needed, if it is a SIMD type vector.
238 // comp - the Compiler object.
239 // tree - the GenTreeLclVarCommon tree to be fixed up.
244 // TODO-1stClassStructs: This is now only here to preserve existing behavior. It is actually not
245 // desirable to change the lclFld nodes back to TYP_SIMD (it will cause them to be loaded
246 // into a vector register, and then moved to an int register).
248 void Rationalizer::FixupIfSIMDLocal(GenTreeLclVarCommon* node)
251 if (!comp->featureSIMD)
256 LclVarDsc* varDsc = &(comp->lvaTable[node->gtLclNum]);
258 // Don't mark byref of SIMD vector as a SIMD type.
259 // Note that struct args though marked as lvIsSIMD=true,
260 // the tree node representing such an arg should not be
261 // marked as a SIMD type, since it is a byref of a SIMD type.
262 if (!varTypeIsSIMD(varDsc))
266 switch (node->OperGet())
269 // Nothing to do for most tree nodes.
273 // We may see a lclFld used for pointer-sized structs that have been morphed, in which
274 // case we can change it to GT_LCL_VAR.
275 // However, we may also see a lclFld with FieldSeqStore::NotAField() for structs that can't
276 // be analyzed, e.g. those with overlapping fields such as the IL implementation of Vector<T>.
277 if ((node->AsLclFld()->gtFieldSeq == FieldSeqStore::NotAField()) && (node->AsLclFld()->gtLclOffs == 0) &&
278 (node->gtType == TYP_I_IMPL) && (varDsc->lvExactSize == TARGET_POINTER_SIZE))
280 node->SetOper(GT_LCL_VAR);
281 node->gtFlags &= ~(GTF_VAR_USEASG);
285 // If we access a field of a SIMD lclVar via GT_LCL_FLD, it cannot have been
286 // independently promoted.
287 assert(comp->lvaGetPromotionType(varDsc) != Compiler::PROMOTION_TYPE_INDEPENDENT);
291 case GT_STORE_LCL_FLD:
292 assert(node->gtType == TYP_I_IMPL);
293 node->SetOper(GT_STORE_LCL_VAR);
294 node->gtFlags &= ~(GTF_VAR_USEASG);
297 unsigned simdSize = (unsigned int)roundUp(varDsc->lvExactSize, TARGET_POINTER_SIZE);
298 node->gtType = comp->getSIMDTypeForSize(simdSize);
299 #endif // FEATURE_SIMD
304 void Rationalizer::ValidateStatement(GenTree* tree, BasicBlock* block)
306 assert(tree->gtOper == GT_STMT);
307 DBEXEC(TRUE, JitTls::GetCompiler()->fgDebugCheckNodeLinks(block, tree));
310 // sanity checks that apply to all kinds of IR
311 void Rationalizer::SanityCheck()
313 // TODO: assert(!IsLIR());
315 foreach_block(comp, block)
317 for (GenTree* statement = block->bbTreeList; statement != nullptr; statement = statement->gtNext)
319 ValidateStatement(statement, block);
321 for (GenTree* tree = statement->gtStmt.gtStmtList; tree; tree = tree->gtNext)
323 // QMARK nodes should have been removed before this phase.
324 assert(tree->OperGet() != GT_QMARK);
326 if (tree->OperGet() == GT_ASG)
328 if (tree->gtGetOp1()->OperGet() == GT_LCL_VAR)
330 assert(tree->gtGetOp1()->gtFlags & GTF_VAR_DEF);
332 else if (tree->gtGetOp2()->OperGet() == GT_LCL_VAR)
334 assert(!(tree->gtGetOp2()->gtFlags & GTF_VAR_DEF));
342 void Rationalizer::SanityCheckRational()
344 // TODO-Cleanup : check that the tree is rational here
345 // then do normal checks
351 static void RewriteAssignmentIntoStoreLclCore(GenTreeOp* assignment,
354 genTreeOps locationOp)
356 assert(assignment != nullptr);
357 assert(assignment->OperGet() == GT_ASG);
358 assert(location != nullptr);
359 assert(value != nullptr);
361 genTreeOps storeOp = storeForm(locationOp);
364 JITDUMP("rewriting asg(%s, X) to %s(X)\n", GenTree::OpName(locationOp), GenTree::OpName(storeOp));
367 assignment->SetOper(storeOp);
368 GenTreeLclVarCommon* store = assignment->AsLclVarCommon();
370 GenTreeLclVarCommon* var = location->AsLclVarCommon();
371 store->SetLclNum(var->gtLclNum);
372 store->SetSsaNum(var->gtSsaNum);
374 if (locationOp == GT_LCL_FLD)
376 store->gtLclFld.gtLclOffs = var->gtLclFld.gtLclOffs;
377 store->gtLclFld.gtFieldSeq = var->gtLclFld.gtFieldSeq;
380 copyFlags(store, var, GTF_LIVENESS_MASK);
381 store->gtFlags &= ~GTF_REVERSE_OPS;
383 store->gtType = var->TypeGet();
384 store->gtOp1 = value;
390 void Rationalizer::RewriteAssignmentIntoStoreLcl(GenTreeOp* assignment)
392 assert(assignment != nullptr);
393 assert(assignment->OperGet() == GT_ASG);
395 GenTree* location = assignment->gtGetOp1();
396 GenTree* value = assignment->gtGetOp2();
398 RewriteAssignmentIntoStoreLclCore(assignment, location, value, location->OperGet());
401 void Rationalizer::RewriteAssignment(LIR::Use& use)
403 assert(use.IsInitialized());
405 GenTreeOp* assignment = use.Def()->AsOp();
406 assert(assignment->OperGet() == GT_ASG);
408 GenTree* location = assignment->gtGetOp1();
409 GenTree* value = assignment->gtGetOp2();
411 genTreeOps locationOp = location->OperGet();
413 if (assignment->OperIsBlkOp())
416 if (varTypeIsSIMD(location) && assignment->OperIsInitBlkOp())
418 if (location->OperGet() == GT_LCL_VAR)
420 var_types simdType = location->TypeGet();
421 GenTree* initVal = assignment->gtOp.gtOp2;
422 var_types baseType = comp->getBaseTypeOfSIMDLocal(location);
423 if (baseType != TYP_UNKNOWN)
425 GenTreeSIMD* simdTree = new (comp, GT_SIMD)
426 GenTreeSIMD(simdType, initVal, SIMDIntrinsicInit, baseType, genTypeSize(simdType));
427 assignment->gtOp.gtOp2 = simdTree;
429 initVal->gtNext = simdTree;
430 simdTree->gtPrev = initVal;
432 simdTree->gtNext = location;
433 location->gtPrev = simdTree;
437 #endif // FEATURE_SIMD
438 if ((location->TypeGet() == TYP_STRUCT) && !assignment->IsPhiDefn() && !value->IsMultiRegCall())
440 if ((location->OperGet() == GT_LCL_VAR))
442 // We need to construct a block node for the location.
443 // Modify lcl to be the address form.
444 location->SetOper(addrForm(locationOp));
445 LclVarDsc* varDsc = &(comp->lvaTable[location->AsLclVarCommon()->gtLclNum]);
446 location->gtType = TYP_BYREF;
447 GenTreeBlk* storeBlk = nullptr;
448 unsigned int size = varDsc->lvExactSize;
450 if (varDsc->lvStructGcCount != 0)
452 CORINFO_CLASS_HANDLE structHnd = varDsc->lvVerTypeInfo.GetClassHandle();
453 GenTreeObj* objNode = comp->gtNewObjNode(structHnd, location)->AsObj();
454 unsigned int slots = (unsigned)(roundUp(size, TARGET_POINTER_SIZE) / TARGET_POINTER_SIZE);
456 objNode->SetGCInfo(varDsc->lvGcLayout, varDsc->lvStructGcCount, slots);
457 objNode->ChangeOper(GT_STORE_OBJ);
458 objNode->SetData(value);
459 comp->fgMorphUnsafeBlk(objNode);
464 storeBlk = new (comp, GT_STORE_BLK) GenTreeBlk(GT_STORE_BLK, TYP_STRUCT, location, value, size);
466 storeBlk->gtFlags |= GTF_ASG;
467 storeBlk->gtFlags |= ((location->gtFlags | value->gtFlags) & GTF_ALL_EFFECT);
469 GenTree* insertionPoint = location->gtNext;
470 BlockRange().InsertBefore(insertionPoint, storeBlk);
471 use.ReplaceWith(comp, storeBlk);
472 BlockRange().Remove(assignment);
473 JITDUMP("After transforming local struct assignment into a block op:\n");
474 DISPTREERANGE(BlockRange(), use.Def());
480 assert(location->OperIsBlk());
491 RewriteAssignmentIntoStoreLclCore(assignment, location, value, locationOp);
492 BlockRange().Remove(location);
497 GenTreeStoreInd* store =
498 new (comp, GT_STOREIND) GenTreeStoreInd(location->TypeGet(), location->gtGetOp1(), value);
500 copyFlags(store, assignment, GTF_ALL_EFFECT);
501 copyFlags(store, location, GTF_IND_FLAGS);
505 // Remove the GT_IND node and replace the assignment node with the store
506 BlockRange().Remove(location);
507 BlockRange().InsertBefore(assignment, store);
508 use.ReplaceWith(comp, store);
509 BlockRange().Remove(assignment);
515 location->SetOper(GT_CLS_VAR_ADDR);
516 location->gtType = TYP_BYREF;
518 assignment->SetOper(GT_STOREIND);
519 assignment->AsStoreInd()->SetRMWStatusDefault();
529 assert(varTypeIsStruct(location));
530 GenTreeBlk* storeBlk = location->AsBlk();
531 genTreeOps storeOper;
532 switch (location->gtOper)
535 storeOper = GT_STORE_BLK;
538 storeOper = GT_STORE_OBJ;
541 storeOper = GT_STORE_DYN_BLK;
542 storeBlk->AsDynBlk()->gtEvalSizeFirst = false;
547 JITDUMP("Rewriting GT_ASG(%s(X), Y) to %s(X,Y):\n", GenTree::OpName(location->gtOper),
548 GenTree::OpName(storeOper));
549 storeBlk->SetOperRaw(storeOper);
550 storeBlk->gtFlags &= ~GTF_DONT_CSE;
552 (assignment->gtFlags & (GTF_ALL_EFFECT | GTF_BLK_VOLATILE | GTF_BLK_UNALIGNED | GTF_DONT_CSE));
553 storeBlk->gtBlk.Data() = value;
555 // Replace the assignment node with the store
556 use.ReplaceWith(comp, storeBlk);
557 BlockRange().Remove(assignment);
558 DISPTREERANGE(BlockRange(), use.Def());
569 void Rationalizer::RewriteAddress(LIR::Use& use)
571 assert(use.IsInitialized());
573 GenTreeUnOp* address = use.Def()->AsUnOp();
574 assert(address->OperGet() == GT_ADDR);
576 GenTree* location = address->gtGetOp1();
577 genTreeOps locationOp = location->OperGet();
579 if (location->IsLocal())
581 // We are changing the child from GT_LCL_VAR TO GT_LCL_VAR_ADDR.
582 // Therefore gtType of the child needs to be changed to a TYP_BYREF
584 if (locationOp == GT_LCL_VAR)
586 JITDUMP("Rewriting GT_ADDR(GT_LCL_VAR) to GT_LCL_VAR_ADDR:\n");
590 assert(locationOp == GT_LCL_FLD);
591 JITDUMP("Rewriting GT_ADDR(GT_LCL_FLD) to GT_LCL_FLD_ADDR:\n");
595 location->SetOper(addrForm(locationOp));
596 location->gtType = TYP_BYREF;
597 copyFlags(location, address, GTF_ALL_EFFECT);
599 use.ReplaceWith(comp, location);
600 BlockRange().Remove(address);
602 else if (locationOp == GT_CLS_VAR)
604 location->SetOper(GT_CLS_VAR_ADDR);
605 location->gtType = TYP_BYREF;
606 copyFlags(location, address, GTF_ALL_EFFECT);
608 use.ReplaceWith(comp, location);
609 BlockRange().Remove(address);
611 JITDUMP("Rewriting GT_ADDR(GT_CLS_VAR) to GT_CLS_VAR_ADDR:\n");
613 else if (location->OperIsIndir())
615 use.ReplaceWith(comp, location->gtGetOp1());
616 BlockRange().Remove(location);
617 BlockRange().Remove(address);
619 JITDUMP("Rewriting GT_ADDR(GT_IND(X)) to X:\n");
622 DISPTREERANGE(BlockRange(), use.Def());
626 Compiler::fgWalkResult Rationalizer::RewriteNode(GenTree** useEdge, ArrayStack<GenTree*>& parentStack)
628 assert(useEdge != nullptr);
630 GenTree* node = *useEdge;
631 assert(node != nullptr);
634 const bool isLateArg = (node->gtFlags & GTF_LATE_ARG) != 0;
637 // First, remove any preceeding list nodes, which are not otherwise visited by the tree walk.
639 // NOTE: GT_FIELD_LIST head nodes, and GT_LIST nodes used by phi nodes will in fact be visited.
640 for (GenTree* prev = node->gtPrev; prev != nullptr && prev->OperIsAnyList() && !(prev->OperIsFieldListHead());
643 prev->gtFlags &= ~GTF_REVERSE_OPS;
644 BlockRange().Remove(prev);
647 // Now clear the REVERSE_OPS flag on the current node.
648 node->gtFlags &= ~GTF_REVERSE_OPS;
650 // In addition, remove the current node if it is a GT_LIST node that is not an aggregate.
651 if (node->OperIsAnyList())
653 GenTreeArgList* list = node->AsArgList();
654 if (!list->OperIsFieldListHead())
656 BlockRange().Remove(list);
658 return Compiler::WALK_CONTINUE;
662 if (parentStack.Height() < 2)
664 use = LIR::Use::GetDummyUse(BlockRange(), *useEdge);
668 use = LIR::Use(BlockRange(), useEdge, parentStack.Index(1));
671 assert(node == use.Def());
672 switch (node->OperGet())
675 RewriteAssignment(use);
679 // GT_BOX at this level just passes through so get rid of it
680 use.ReplaceWith(comp, node->gtGetOp1());
681 BlockRange().Remove(node);
689 // Clear the `GTF_IND_ASG_LHS` flag, which overlaps with `GTF_IND_REQ_ADDR_IN_REG`.
690 node->gtFlags &= ~GTF_IND_ASG_LHS;
692 if (varTypeIsSIMD(node))
694 RewriteSIMDOperand(use, false);
698 // Due to promotion of structs containing fields of type struct with a
699 // single scalar type field, we could potentially see IR nodes of the
700 // form GT_IND(GT_ADD(lclvarAddr, 0)) where 0 is an offset representing
701 // a field-seq. These get folded here.
703 // TODO: This code can be removed once JIT implements recursive struct
704 // promotion instead of lying about the type of struct field as the type
705 // of its single scalar field.
706 GenTree* addr = node->AsIndir()->Addr();
707 if (addr->OperGet() == GT_ADD && addr->gtGetOp1()->OperGet() == GT_LCL_VAR_ADDR &&
708 addr->gtGetOp2()->IsIntegralConst(0))
710 GenTreeLclVarCommon* lclVarNode = addr->gtGetOp1()->AsLclVarCommon();
711 unsigned lclNum = lclVarNode->GetLclNum();
712 LclVarDsc* varDsc = comp->lvaTable + lclNum;
713 if (node->TypeGet() == varDsc->TypeGet())
715 JITDUMP("Rewriting GT_IND(GT_ADD(LCL_VAR_ADDR,0)) to LCL_VAR\n");
716 lclVarNode->SetOper(GT_LCL_VAR);
717 lclVarNode->gtType = node->TypeGet();
718 use.ReplaceWith(comp, lclVarNode);
719 BlockRange().Remove(addr);
720 BlockRange().Remove(addr->gtGetOp2());
721 BlockRange().Remove(node);
728 // fgMorph sometimes inserts NOP nodes between defs and uses
729 // supposedly 'to prevent constant folding'. In this case, remove the
731 if (node->gtGetOp1() != nullptr)
733 use.ReplaceWith(comp, node->gtGetOp1());
734 BlockRange().Remove(node);
735 node = node->gtGetOp1();
741 GenTree* op1 = node->gtGetOp1();
742 bool isClosed = false;
743 unsigned sideEffects = 0;
744 LIR::ReadOnlyRange lhsRange = BlockRange().GetTreeRange(op1, &isClosed, &sideEffects);
746 if ((sideEffects & GTF_ALL_EFFECT) == 0)
748 // The LHS has no side effects. Remove it.
749 // None of the transforms performed herein violate tree order, so isClosed
750 // should always be true.
753 BlockRange().Delete(comp, m_block, std::move(lhsRange));
755 else if (op1->IsValue())
757 op1->SetUnusedValue();
760 BlockRange().Remove(node);
762 GenTree* replacement = node->gtGetOp2();
763 if (!use.IsDummyUse())
765 use.ReplaceWith(comp, replacement);
770 // This is a top-level comma. If the RHS has no side effects we can remove
772 bool isClosed = false;
773 unsigned sideEffects = 0;
774 LIR::ReadOnlyRange rhsRange = BlockRange().GetTreeRange(replacement, &isClosed, &sideEffects);
776 if ((sideEffects & GTF_ALL_EFFECT) == 0)
778 // None of the transforms performed herein violate tree order, so isClosed
779 // should always be true.
782 BlockRange().Delete(comp, m_block, std::move(rhsRange));
793 // Remove argplace and list nodes from the execution order.
795 // TODO: remove phi args and phi nodes as well?
796 BlockRange().Remove(node);
799 #if defined(_TARGET_XARCH_) || defined(_TARGET_ARM_)
802 // Class vars that are the target of an assignment will get rewritten into
803 // GT_STOREIND(GT_CLS_VAR_ADDR, val) by RewriteAssignment. This check is
804 // not strictly necessary--the GT_IND(GT_CLS_VAR_ADDR) pattern that would
805 // otherwise be generated would also be picked up by RewriteAssignment--but
806 // skipping the rewrite here saves an allocation and a bit of extra work.
807 const bool isLHSOfAssignment = (use.User()->OperGet() == GT_ASG) && (use.User()->gtGetOp1() == node);
808 if (!isLHSOfAssignment)
810 GenTree* ind = comp->gtNewOperNode(GT_IND, node->TypeGet(), node);
812 node->SetOper(GT_CLS_VAR_ADDR);
813 node->gtType = TYP_BYREF;
815 BlockRange().InsertAfter(node, ind);
816 use.ReplaceWith(comp, ind);
822 #endif // _TARGET_XARCH_
825 // Non-target intrinsics should have already been rewritten back into user calls.
826 assert(comp->IsTargetIntrinsic(node->gtIntrinsic.gtIntrinsicId));
833 // TODO-1stClassStructs: These should have been transformed to GT_INDs, but in order
834 // to preserve existing behavior, we will keep this as a block node if this is the
835 // lhs of a block assignment, and either:
836 // - It is a "generic" TYP_STRUCT assignment, OR
837 // - It is an initblk, OR
838 // - Neither the lhs or rhs are known to be of SIMD type.
840 GenTree* parent = use.User();
841 bool keepBlk = false;
842 if ((parent->OperGet() == GT_ASG) && (node == parent->gtGetOp1()))
844 if ((node->TypeGet() == TYP_STRUCT) || parent->OperIsInitBlkOp())
848 else if (!comp->isAddrOfSIMDType(node->AsBlk()->Addr()))
850 GenTree* dataSrc = parent->gtGetOp2();
851 if (!dataSrc->IsLocal() && (dataSrc->OperGet() != GT_SIMD) && (!dataSrc->OperIsHWIntrinsic()))
853 noway_assert(dataSrc->OperIsIndir());
854 keepBlk = !comp->isAddrOfSIMDType(dataSrc->AsIndir()->Addr());
858 RewriteSIMDOperand(use, keepBlk);
863 case GT_STORE_LCL_FLD:
864 // TODO-1stClassStructs: Eliminate this.
865 FixupIfSIMDLocal(node->AsLclVarCommon());
870 noway_assert(comp->featureSIMD);
871 GenTreeSIMD* simdNode = node->AsSIMD();
872 unsigned simdSize = simdNode->gtSIMDSize;
873 var_types simdType = comp->getSIMDTypeForSize(simdSize);
875 // TODO-1stClassStructs: This should be handled more generally for enregistered or promoted
876 // structs that are passed or returned in a different register type than their enregistered
878 if (simdNode->gtType == TYP_I_IMPL && simdNode->gtSIMDSize == TARGET_POINTER_SIZE)
880 // This happens when it is consumed by a GT_RET_EXPR.
881 // It can only be a Vector2f or Vector2i.
882 assert(genTypeSize(simdNode->gtSIMDBaseType) == 4);
883 simdNode->gtType = TYP_SIMD8;
885 // Certain SIMD trees require rationalizing.
886 if (simdNode->gtSIMD.gtSIMDIntrinsicID == SIMDIntrinsicInitArray)
888 // Rewrite this as an explicit load.
889 JITDUMP("Rewriting GT_SIMD array init as an explicit load:\n");
890 unsigned int baseTypeSize = genTypeSize(simdNode->gtSIMDBaseType);
891 GenTree* address = new (comp, GT_LEA) GenTreeAddrMode(TYP_BYREF, simdNode->gtOp1, simdNode->gtOp2,
892 baseTypeSize, offsetof(CORINFO_Array, u1Elems));
893 GenTree* ind = comp->gtNewOperNode(GT_IND, simdType, address);
895 BlockRange().InsertBefore(simdNode, address, ind);
896 use.ReplaceWith(comp, ind);
897 BlockRange().Remove(simdNode);
899 DISPTREERANGE(BlockRange(), use.Def());
904 // This code depends on the fact that NONE of the SIMD intrinsics take vector operands
905 // of a different width. If that assumption changes, we will EITHER have to make these type
906 // transformations during importation, and plumb the types all the way through the JIT,
907 // OR add a lot of special handling here.
908 GenTree* op1 = simdNode->gtGetOp1();
909 if (op1 != nullptr && op1->gtType == TYP_STRUCT)
911 op1->gtType = simdType;
914 GenTree* op2 = simdNode->gtGetOp2IfPresent();
915 if (op2 != nullptr && op2->gtType == TYP_STRUCT)
917 op2->gtType = simdType;
922 #endif // FEATURE_SIMD
925 // JCMP, CMP, SETCC and JCC nodes should not be present in HIR.
926 assert(!node->OperIs(GT_CMP, GT_SETCC, GT_JCC, GT_JCMP));
930 // Do some extra processing on top-level nodes to remove unused local reads.
931 if (node->OperIsLocalRead())
933 if (use.IsDummyUse())
935 comp->lvaDecRefCnts(node);
936 BlockRange().Remove(node);
940 // Local reads are side-effect-free; clear any flags leftover from frontend transformations.
941 node->gtFlags &= ~GTF_ALL_EFFECT;
946 if (!node->OperIsStore())
948 // Clear the GTF_ASG flag for all nodes but stores
949 node->gtFlags &= ~GTF_ASG;
954 // Clear the GTF_CALL flag for all nodes but calls
955 node->gtFlags &= ~GTF_CALL;
958 if (node->IsValue() && use.IsDummyUse())
960 node->SetUnusedValue();
963 if (node->TypeGet() == TYP_LONG)
965 comp->compLongUsed = true;
969 assert(isLateArg == ((use.Def()->gtFlags & GTF_LATE_ARG) != 0));
971 return Compiler::WALK_CONTINUE;
974 void Rationalizer::DoPhase()
976 class RationalizeVisitor final : public GenTreeVisitor<RationalizeVisitor>
978 Rationalizer& m_rationalizer;
986 UseExecutionOrder = true,
989 RationalizeVisitor(Rationalizer& rationalizer)
990 : GenTreeVisitor<RationalizeVisitor>(rationalizer.comp), m_rationalizer(rationalizer)
994 // Rewrite intrinsics that are not supported by the target back into user calls.
995 // This needs to be done before the transition to LIR because it relies on the use
996 // of fgMorphArgs, which is designed to operate on HIR. Once this is done for a
997 // particular statement, link that statement's nodes into the current basic block.
998 fgWalkResult PreOrderVisit(GenTree** use, GenTree* user)
1000 GenTree* const node = *use;
1001 if (node->OperGet() == GT_INTRINSIC &&
1002 m_rationalizer.comp->IsIntrinsicImplementedByUserCall(node->gtIntrinsic.gtIntrinsicId))
1004 m_rationalizer.RewriteIntrinsicAsUserCall(use, this->m_ancestors);
1007 return Compiler::WALK_CONTINUE;
1010 // Rewrite HIR nodes into LIR nodes.
1011 fgWalkResult PostOrderVisit(GenTree** use, GenTree* user)
1013 return m_rationalizer.RewriteNode(use, this->m_ancestors);
1017 DBEXEC(TRUE, SanityCheck());
1019 comp->compCurBB = nullptr;
1020 comp->fgOrder = Compiler::FGOrderLinear;
1022 RationalizeVisitor visitor(*this);
1023 for (BasicBlock* block = comp->fgFirstBB; block != nullptr; block = block->bbNext)
1025 comp->compCurBB = block;
1028 GenTreeStmt* firstStatement = block->firstStmt();
1029 block->MakeLIR(nullptr, nullptr);
1031 // Establish the first and last nodes for the block. This is necessary in order for the LIR
1032 // utilities that hang off the BasicBlock type to work correctly.
1033 if (firstStatement == nullptr)
1035 // No statements in this block; skip it.
1039 for (GenTreeStmt *statement = firstStatement, *nextStatement; statement != nullptr; statement = nextStatement)
1041 assert(statement->gtStmtList != nullptr);
1042 assert(statement->gtStmtList->gtPrev == nullptr);
1043 assert(statement->gtStmtExpr != nullptr);
1044 assert(statement->gtStmtExpr->gtNext == nullptr);
1046 BlockRange().InsertAtEnd(LIR::Range(statement->gtStmtList, statement->gtStmtExpr));
1048 nextStatement = statement->getNextStmt();
1049 statement->gtNext = nullptr;
1050 statement->gtPrev = nullptr;
1052 // If this statement has correct offset information, change it into an IL offset
1053 // node and insert it into the LIR.
1054 if (statement->gtStmtILoffsx != BAD_IL_OFFSET)
1056 assert(!statement->IsPhiDefnStmt());
1057 statement->SetOper(GT_IL_OFFSET);
1059 BlockRange().InsertBefore(statement->gtStmtList, statement);
1063 visitor.WalkTree(&statement->gtStmtExpr, nullptr);
1066 assert(BlockRange().CheckLIR(comp, true));
1069 comp->compRationalIRForm = true;
1071 #endif // LEGACY_BACKEND