1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
4 /*XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
5 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
7 XX Lowering for RISCV64 common code XX
9 XX This encapsulates common logic for lowering trees for the RISCV64 XX
10 XX architectures. For a more detailed view of what is lowering, please XX
11 XX take a look at Lower.cpp XX
13 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
14 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
22 #ifdef TARGET_RISCV64 // This file is ONLY used for RISCV64 architectures
25 #include "sideeffects.h"
29 #ifdef FEATURE_HW_INTRINSICS
30 #include "hwintrinsic.h"
33 //------------------------------------------------------------------------
34 // IsCallTargetInRange: Can a call target address be encoded in-place?
37 // True if the addr fits into the range.
39 bool Lowering::IsCallTargetInRange(void* addr)
41 // TODO-RISCV64: using B/BL for optimization.
45 //------------------------------------------------------------------------
46 // IsContainableImmed: Is an immediate encodable in-place?
49 // True if the immediate can be folded into an instruction,
50 // for example small enough and non-relocatable.
52 bool Lowering::IsContainableImmed(GenTree* parentNode, GenTree* childNode) const
54 if (!varTypeIsFloating(parentNode->TypeGet()))
56 // Make sure we have an actual immediate
57 if (!childNode->IsCnsIntOrI())
59 if (childNode->AsIntCon()->ImmedValNeedsReloc(comp))
62 // TODO-CrossBitness: we wouldn't need the cast below if GenTreeIntCon::gtIconVal had target_ssize_t type.
63 target_ssize_t immVal = (target_ssize_t)childNode->AsIntCon()->gtIconVal;
65 switch (parentNode->OperGet())
74 return emitter::isValidSimm12(immVal);
78 return emitter::isValidUimm11(immVal);
82 case GT_STORE_LCL_FLD:
83 case GT_STORE_LCL_VAR:
96 //------------------------------------------------------------------------
97 // LowerMul: Lower a GT_MUL/GT_MULHI/GT_MUL_LONG node.
99 // Performs contaiment checks.
101 // TODO-RISCV64-CQ: recognize GT_MULs that can be turned into MUL_LONGs,
102 // as those are cheaper.
105 // mul - The node to lower
108 // The next node to lower.
110 GenTree* Lowering::LowerMul(GenTreeOp* mul)
112 assert(mul->OperIsMul());
114 ContainCheckMul(mul);
119 //------------------------------------------------------------------------
120 // Lowering::LowerJTrue: Lowers a JTRUE node.
123 // jtrue - the JTRUE node
126 // The next node to lower (usually nullptr).
128 GenTree* Lowering::LowerJTrue(GenTreeOp* jtrue)
130 GenTree* op = jtrue->gtGetOp1();
135 if (op->OperIsCompare() && !varTypeIsFloating(op->gtGetOp1()))
137 // We do not expect any other relops on RISCV64
138 assert(op->OperIs(GT_EQ, GT_NE, GT_LT, GT_LE, GT_GE, GT_GT));
140 cond = GenCondition::FromRelop(op);
142 cmpOp1 = op->gtGetOp1();
143 cmpOp2 = op->gtGetOp2();
145 // We will fall through and turn this into a JCMP(op1, op2, kind), but need to remove the relop here.
146 BlockRange().Remove(op);
150 cond = GenCondition(GenCondition::NE);
153 cmpOp2 = comp->gtNewZeroConNode(cmpOp1->TypeGet());
155 BlockRange().InsertBefore(jtrue, cmpOp2);
157 // Fall through and turn this into a JCMP(op1, 0, NE).
160 // for RISCV64's compare and condition-branch instructions,
161 // it's very similar to the IL instructions.
162 jtrue->ChangeOper(GT_JCMP);
163 jtrue->gtOp1 = cmpOp1;
164 jtrue->gtOp2 = cmpOp2;
165 jtrue->AsOpCC()->gtCondition = cond;
167 if (cmpOp2->IsCnsIntOrI())
169 cmpOp2->SetContained();
172 return jtrue->gtNext;
175 //------------------------------------------------------------------------
176 // LowerBinaryArithmetic: lowers the given binary arithmetic node.
179 // node - the arithmetic node to lower
182 // The next node to lower.
184 GenTree* Lowering::LowerBinaryArithmetic(GenTreeOp* binOp)
186 ContainCheckBinary(binOp);
188 return binOp->gtNext;
191 //------------------------------------------------------------------------
192 // LowerStoreLoc: Lower a store of a lclVar
195 // storeLoc - the local store (GT_STORE_LCL_FLD or GT_STORE_LCL_VAR)
199 // - Widening operations of unsigneds.
201 void Lowering::LowerStoreLoc(GenTreeLclVarCommon* storeLoc)
203 if (storeLoc->OperIs(GT_STORE_LCL_FLD))
205 // We should only encounter this for lclVars that are lvDoNotEnregister.
206 verifyLclFldDoNotEnregister(storeLoc->GetLclNum());
208 ContainCheckStoreLoc(storeLoc);
211 //------------------------------------------------------------------------
212 // LowerStoreIndir: Determine addressing mode for an indirection, and whether operands are contained.
215 // node - The indirect store node (GT_STORE_IND) of interest
220 void Lowering::LowerStoreIndir(GenTreeStoreInd* node)
222 ContainCheckStoreIndir(node);
225 //------------------------------------------------------------------------
226 // LowerBlockStore: Set block store type
229 // blkNode - The block store node of interest
234 void Lowering::LowerBlockStore(GenTreeBlk* blkNode)
236 GenTree* dstAddr = blkNode->Addr();
237 GenTree* src = blkNode->Data();
238 unsigned size = blkNode->Size();
240 if (blkNode->OperIsInitBlkOp())
242 if (src->OperIs(GT_INIT_VAL))
245 src = src->AsUnOp()->gtGetOp1();
248 if (!blkNode->OperIs(GT_STORE_DYN_BLK) && (size <= comp->getUnrollThreshold(Compiler::UnrollKind::Memset)) &&
249 src->OperIs(GT_CNS_INT))
251 blkNode->gtBlkOpKind = GenTreeBlk::BlkOpKindUnroll;
253 // The fill value of an initblk is interpreted to hold a
254 // value of (unsigned int8) however a constant of any size
255 // may practically reside on the evaluation stack. So extract
256 // the lower byte out of the initVal constant and replicate
257 // it to a larger constant whose size is sufficient to support
258 // the largest width store of the desired inline expansion.
260 ssize_t fill = src->AsIntCon()->IconValue() & 0xFF;
265 else if (size >= REGSIZE_BYTES)
267 fill *= 0x0101010101010101LL;
268 src->gtType = TYP_LONG;
274 src->AsIntCon()->SetIconValue(fill);
276 ContainBlockStoreAddress(blkNode, size, dstAddr, nullptr);
280 blkNode->gtBlkOpKind = GenTreeBlk::BlkOpKindHelper;
285 assert(src->OperIs(GT_IND, GT_LCL_VAR, GT_LCL_FLD));
288 if (src->OperIs(GT_LCL_VAR))
290 // TODO-1stClassStructs: for now we can't work with STORE_BLOCK source in register.
291 const unsigned srcLclNum = src->AsLclVar()->GetLclNum();
292 comp->lvaSetVarDoNotEnregister(srcLclNum DEBUGARG(DoNotEnregisterReason::BlockOp));
295 ClassLayout* layout = blkNode->GetLayout();
296 bool doCpObj = !blkNode->OperIs(GT_STORE_DYN_BLK) && layout->HasGCPtr();
297 unsigned copyBlockUnrollLimit = comp->getUnrollThreshold(Compiler::UnrollKind::Memcpy);
299 if (doCpObj && (size <= copyBlockUnrollLimit))
301 // No write barriers are needed on the stack.
302 // If the layout contains a byref, then we know it must live on the stack.
303 if (dstAddr->OperIs(GT_LCL_ADDR) || layout->HasGCByRef())
305 // If the size is small enough to unroll then we need to mark the block as non-interruptible
306 // to actually allow unrolling. The generated code does not report GC references loaded in the
307 // temporary register(s) used for copying.
309 blkNode->gtBlkOpGcUnsafe = true;
313 // CopyObj or CopyBlk
316 assert((dstAddr->TypeGet() == TYP_BYREF) || (dstAddr->TypeGet() == TYP_I_IMPL));
317 blkNode->gtBlkOpKind = GenTreeBlk::BlkOpKindCpObjUnroll;
319 else if (blkNode->OperIs(GT_STORE_BLK) && (size <= copyBlockUnrollLimit))
321 blkNode->gtBlkOpKind = GenTreeBlk::BlkOpKindUnroll;
323 if (src->OperIs(GT_IND))
325 ContainBlockStoreAddress(blkNode, size, src->AsIndir()->Addr(), src->AsIndir());
328 ContainBlockStoreAddress(blkNode, size, dstAddr, nullptr);
332 assert(blkNode->OperIs(GT_STORE_BLK, GT_STORE_DYN_BLK));
334 blkNode->gtBlkOpKind = GenTreeBlk::BlkOpKindHelper;
339 //------------------------------------------------------------------------
340 // ContainBlockStoreAddress: Attempt to contain an address used by an unrolled block store.
343 // blkNode - the block store node
344 // size - the block size
345 // addr - the address node to try to contain
346 // addrParent - the parent of addr, in case this is checking containment of the source address.
348 void Lowering::ContainBlockStoreAddress(GenTreeBlk* blkNode, unsigned size, GenTree* addr, GenTree* addrParent)
350 assert(blkNode->OperIs(GT_STORE_BLK) && (blkNode->gtBlkOpKind == GenTreeBlk::BlkOpKindUnroll));
351 assert(size < INT32_MAX);
353 if (addr->OperIs(GT_LCL_ADDR))
355 addr->SetContained();
359 if (!addr->OperIs(GT_ADD) || addr->gtOverflow() || !addr->AsOp()->gtGetOp2()->OperIs(GT_CNS_INT))
364 GenTreeIntCon* offsetNode = addr->AsOp()->gtGetOp2()->AsIntCon();
365 ssize_t offset = offsetNode->IconValue();
367 // TODO-RISCV64: not including the ldptr and SIMD offset which not used right now.
368 if (!emitter::isValidSimm12(offset) || !emitter::isValidSimm12(offset + static_cast<int>(size)))
373 if (!IsSafeToContainMem(blkNode, addrParent, addr))
378 BlockRange().Remove(offsetNode);
380 addr->ChangeOper(GT_LEA);
381 addr->AsAddrMode()->SetIndex(nullptr);
382 addr->AsAddrMode()->SetScale(0);
383 addr->AsAddrMode()->SetOffset(static_cast<int>(offset));
384 addr->SetContained();
387 //------------------------------------------------------------------------
388 // LowerPutArgStkOrSplit: Lower a GT_PUTARG_STK/GT_PUTARG_SPLIT.
391 // putArgNode - The node to lower
393 void Lowering::LowerPutArgStkOrSplit(GenTreePutArgStk* putArgNode)
395 GenTree* src = putArgNode->Data();
397 if (src->TypeIs(TYP_STRUCT))
399 // STRUCT args (FIELD_LIST / BLK / LCL_VAR / LCL_FLD) will always be contained.
400 MakeSrcContained(putArgNode, src);
402 if (src->OperIs(GT_LCL_VAR))
404 // TODO-1stClassStructs: support struct enregistration here by retyping "src" to its register type for
405 // the non-split case.
406 comp->lvaSetVarDoNotEnregister(src->AsLclVar()->GetLclNum() DEBUGARG(DoNotEnregisterReason::IsStructArg));
411 //------------------------------------------------------------------------
412 // LowerCast: Lower GT_CAST(srcType, DstType) nodes.
415 // tree - GT_CAST node to be lowered
421 // Casts from float/double to a smaller int type are transformed as follows:
422 // GT_CAST(float/double, byte) = GT_CAST(GT_CAST(float/double, int32), byte)
423 // GT_CAST(float/double, sbyte) = GT_CAST(GT_CAST(float/double, int32), sbyte)
424 // GT_CAST(float/double, int16) = GT_CAST(GT_CAST(double/double, int32), int16)
425 // GT_CAST(float/double, uint16) = GT_CAST(GT_CAST(double/double, int32), uint16)
427 // Note that for the overflow conversions we still depend on helper calls and
428 // don't expect to see them here.
429 // i) GT_CAST(float/double, int type with overflow detection)
432 void Lowering::LowerCast(GenTree* tree)
434 assert(tree->OperGet() == GT_CAST);
436 JITDUMP("LowerCast for: ");
440 GenTree* op1 = tree->AsOp()->gtOp1;
441 var_types dstType = tree->CastToType();
442 var_types srcType = genActualType(op1->TypeGet());
444 if (varTypeIsFloating(srcType))
446 noway_assert(!tree->gtOverflow());
447 assert(!varTypeIsSmall(dstType)); // fgMorphCast creates intermediate casts when converting from float to small
451 assert(!varTypeIsSmall(srcType));
453 // Now determine if we have operands that should be contained.
454 ContainCheckCast(tree->AsCast());
457 //------------------------------------------------------------------------
458 // LowerRotate: Lower GT_ROL and GT_ROR nodes.
461 // tree - the node to lower
466 void Lowering::LowerRotate(GenTree* tree)
468 ContainCheckShiftRotate(tree->AsOp());
472 //----------------------------------------------------------------------------------------------
473 // Lowering::LowerSIMD: Perform containment analysis for a SIMD intrinsic node.
476 // simdNode - The SIMD intrinsic node.
478 void Lowering::LowerSIMD(GenTreeSIMD* simdNode)
480 NYI_RISCV64("LowerSIMD");
482 #endif // FEATURE_SIMD
484 #ifdef FEATURE_HW_INTRINSICS
485 //----------------------------------------------------------------------------------------------
486 // Lowering::LowerHWIntrinsic: Perform containment analysis for a hardware intrinsic node.
489 // node - The hardware intrinsic node.
491 void Lowering::LowerHWIntrinsic(GenTreeHWIntrinsic* node)
493 NYI_RISCV64("LowerHWIntrinsic");
496 //----------------------------------------------------------------------------------------------
497 // Lowering::IsValidConstForMovImm: Determines if the given node can be replaced by a mov/fmov immediate instruction
500 // node - The hardware intrinsic node.
503 // true if the node can be replaced by a mov/fmov immediate instruction; otherwise, false
506 // This check may end up modifying node->gtOp1 if it is a cast node that can be removed
507 bool Lowering::IsValidConstForMovImm(GenTreeHWIntrinsic* node)
509 NYI_RISCV64("IsValidConstForMovImm");
513 //----------------------------------------------------------------------------------------------
514 // Lowering::LowerHWIntrinsicCmpOp: Lowers a Vector128 or Vector256 comparison intrinsic
517 // node - The hardware intrinsic node.
518 // cmpOp - The comparison operation, currently must be GT_EQ or GT_NE
520 void Lowering::LowerHWIntrinsicCmpOp(GenTreeHWIntrinsic* node, genTreeOps cmpOp)
522 NYI_RISCV64("LowerHWIntrinsicCmpOp");
525 //----------------------------------------------------------------------------------------------
526 // Lowering::LowerHWIntrinsicCreate: Lowers a Vector64 or Vector128 Create call
529 // node - The hardware intrinsic node.
531 void Lowering::LowerHWIntrinsicCreate(GenTreeHWIntrinsic* node)
533 NYI_RISCV64("LowerHWIntrinsicCreate");
536 //----------------------------------------------------------------------------------------------
537 // Lowering::LowerHWIntrinsicDot: Lowers a Vector64 or Vector128 Dot call
540 // node - The hardware intrinsic node.
542 void Lowering::LowerHWIntrinsicDot(GenTreeHWIntrinsic* node)
544 NYI_RISCV64("LowerHWIntrinsicDot");
547 #endif // FEATURE_HW_INTRINSICS
549 //------------------------------------------------------------------------
550 // Containment analysis
551 //------------------------------------------------------------------------
553 //------------------------------------------------------------------------
554 // ContainCheckCallOperands: Determine whether operands of a call should be contained.
557 // call - The call node of interest
562 void Lowering::ContainCheckCallOperands(GenTreeCall* call)
564 // There are no contained operands for RISCV64.
567 //------------------------------------------------------------------------
568 // ContainCheckStoreIndir: determine whether the sources of a STOREIND node should be contained.
571 // node - pointer to the node
573 void Lowering::ContainCheckStoreIndir(GenTreeStoreInd* node)
575 GenTree* src = node->Data();
576 if (!varTypeIsFloating(src->TypeGet()) && src->IsIntegralConst(0))
578 // an integer zero for 'src' can be contained.
579 MakeSrcContained(node, src);
582 ContainCheckIndir(node);
585 //------------------------------------------------------------------------
586 // ContainCheckIndir: Determine whether operands of an indir should be contained.
589 // indirNode - The indirection node of interest
592 // This is called for both store and load indirections.
597 void Lowering::ContainCheckIndir(GenTreeIndir* indirNode)
599 // If this is the rhs of a block copy it will be handled when we handle the store.
600 if (indirNode->TypeGet() == TYP_STRUCT)
606 NYI_RISCV64("ContainCheckIndir-SIMD");
607 #endif // FEATURE_SIMD
609 GenTree* addr = indirNode->Addr();
610 if ((addr->OperGet() == GT_LEA) && IsSafeToContainMem(indirNode, addr))
612 MakeSrcContained(indirNode, addr);
614 else if (addr->OperIs(GT_LCL_ADDR))
616 // These nodes go into an addr mode:
617 // - GT_LCL_ADDR is a stack addr mode.
618 MakeSrcContained(indirNode, addr);
620 else if (addr->OperIs(GT_CLS_VAR_ADDR))
622 // These nodes go into an addr mode:
623 // - GT_CLS_VAR_ADDR turns into a constant.
624 // make this contained, it turns into a constant that goes into an addr mode
625 MakeSrcContained(indirNode, addr);
629 //------------------------------------------------------------------------
630 // ContainCheckBinary: Determine whether a binary op's operands should be contained.
633 // node - the node we care about
635 void Lowering::ContainCheckBinary(GenTreeOp* node)
637 // Check and make op2 contained (if it is a containable immediate)
638 CheckImmedAndMakeContained(node, node->gtOp2);
641 //------------------------------------------------------------------------
642 // ContainCheckMul: Determine whether a mul op's operands should be contained.
645 // node - the node we care about
647 void Lowering::ContainCheckMul(GenTreeOp* node)
649 ContainCheckBinary(node);
652 //------------------------------------------------------------------------
653 // ContainCheckDivOrMod: determine which operands of a div/mod should be contained.
656 // node - the node we care about
658 void Lowering::ContainCheckDivOrMod(GenTreeOp* node)
660 assert(node->OperIs(GT_MOD, GT_UMOD, GT_DIV, GT_UDIV));
663 //------------------------------------------------------------------------
664 // ContainCheckShiftRotate: Determine whether a mul op's operands should be contained.
667 // node - the node we care about
669 void Lowering::ContainCheckShiftRotate(GenTreeOp* node)
671 GenTree* shiftBy = node->gtOp2;
672 assert(node->OperIsShiftOrRotate());
674 if (shiftBy->IsCnsIntOrI())
676 MakeSrcContained(node, shiftBy);
680 //------------------------------------------------------------------------
681 // ContainCheckStoreLoc: determine whether the source of a STORE_LCL* should be contained.
684 // node - pointer to the node
686 void Lowering::ContainCheckStoreLoc(GenTreeLclVarCommon* storeLoc) const
688 assert(storeLoc->OperIsLocalStore());
689 GenTree* op1 = storeLoc->gtGetOp1();
691 if (op1->OperIs(GT_BITCAST))
693 // If we know that the source of the bitcast will be in a register, then we can make
694 // the bitcast itself contained. This will allow us to store directly from the other
695 // type if this node doesn't get a register.
696 GenTree* bitCastSrc = op1->gtGetOp1();
697 if (!bitCastSrc->isContained() && !bitCastSrc->IsRegOptional())
704 const LclVarDsc* varDsc = comp->lvaGetDesc(storeLoc);
707 if (storeLoc->TypeIs(TYP_SIMD8, TYP_SIMD12))
709 // If this is a store to memory, we can initialize a zero vector in memory from REG_ZR.
710 if ((op1->IsIntegralConst(0) || op1->IsVectorZero()) && varDsc->lvDoNotEnregister)
712 // For an InitBlk we want op1 to be contained
713 MakeSrcContained(storeLoc, op1);
717 #endif // FEATURE_SIMD
719 if (IsContainableImmed(storeLoc, op1))
721 MakeSrcContained(storeLoc, op1);
724 // If the source is a containable immediate, make it contained, unless it is
725 // an int-size or larger store of zero to memory, because we can generate smaller code
726 // by zeroing a register and then storing it.
727 var_types type = varDsc->GetRegisterType(storeLoc);
728 if (IsContainableImmed(storeLoc, op1) && (!op1->IsIntegralConst(0) || varTypeIsSmall(type)))
730 MakeSrcContained(storeLoc, op1);
734 //------------------------------------------------------------------------
735 // ContainCheckCast: determine whether the source of a CAST node should be contained.
738 // node - pointer to the node
740 void Lowering::ContainCheckCast(GenTreeCast* node)
742 // There are no contained operands for RISCV64.
745 //------------------------------------------------------------------------
746 // ContainCheckCompare: determine whether the sources of a compare node should be contained.
749 // node - pointer to the node
751 void Lowering::ContainCheckCompare(GenTreeOp* cmp)
753 CheckImmedAndMakeContained(cmp, cmp->gtOp2);
756 //------------------------------------------------------------------------
757 // ContainCheckSelect : determine whether the source of a select should be contained.
760 // node - pointer to the node
762 void Lowering::ContainCheckSelect(GenTreeOp* node)
764 noway_assert(!"GT_SELECT nodes are not supported on riscv64");
767 //------------------------------------------------------------------------
768 // ContainCheckBoundsChk: determine whether any source of a bounds check node should be contained.
771 // node - pointer to the node
773 void Lowering::ContainCheckBoundsChk(GenTreeBoundsChk* node)
775 assert(node->OperIs(GT_BOUNDS_CHECK));
776 if (!CheckImmedAndMakeContained(node, node->GetIndex()))
778 CheckImmedAndMakeContained(node, node->GetArrayLength());
783 //----------------------------------------------------------------------------------------------
784 // ContainCheckSIMD: Perform containment analysis for a SIMD intrinsic node.
787 // simdNode - The SIMD intrinsic node.
789 void Lowering::ContainCheckSIMD(GenTreeSIMD* simdNode)
791 NYI_RISCV64("ContainCheckSIMD");
793 #endif // FEATURE_SIMD
795 #ifdef FEATURE_HW_INTRINSICS
796 //----------------------------------------------------------------------------------------------
797 // ContainCheckHWIntrinsic: Perform containment analysis for a hardware intrinsic node.
800 // node - The hardware intrinsic node.
802 void Lowering::ContainCheckHWIntrinsic(GenTreeHWIntrinsic* node)
804 NYI_RISCV64("ContainCheckHWIntrinsic");
806 #endif // FEATURE_HW_INTRINSICS
808 #endif // TARGET_RISCV64