01364735749222936cf6a04154243598b93a6367
[platform/upstream/dotnet/runtime.git] / src / coreclr / jit / lsrariscv64.cpp
1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3
4 /*XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
5 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
6 XX                                                                           XX
7 XX                    Register Requirements for RISCV64                      XX
8 XX                                                                           XX
9 XX  This encapsulates all the logic for setting register requirements for    XX
10 XX  the RISCV64 architecture.                                                XX
11 XX                                                                           XX
12 XX                                                                           XX
13 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
14 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
15 */
16
17 #include "jitpch.h"
18 #ifdef _MSC_VER
19 #pragma hdrstop
20 #endif
21
22 #ifdef TARGET_RISCV64
23
24 #include "jit.h"
25 #include "sideeffects.h"
26 #include "lower.h"
27 #include "codegen.h"
28
29 //------------------------------------------------------------------------
30 // BuildNode: Build the RefPositions for a node
31 //
32 // Arguments:
33 //    treeNode - the node of interest
34 //
35 // Return Value:
36 //    The number of sources consumed by this node.
37 //
38 // Notes:
39 // Preconditions:
40 //    LSRA Has been initialized.
41 //
42 // Postconditions:
43 //    RefPositions have been built for all the register defs and uses required
44 //    for this node.
45 //
46 int LinearScan::BuildNode(GenTree* tree)
47 {
48     assert(!tree->isContained());
49     int       srcCount      = 0;
50     int       dstCount      = 0;
51     regMaskTP dstCandidates = RBM_NONE;
52     regMaskTP killMask      = RBM_NONE;
53     bool      isLocalDefUse = false;
54
55     // Reset the build-related members of LinearScan.
56     clearBuildState();
57
58     // Set the default dstCount. This may be modified below.
59     if (tree->IsValue())
60     {
61         dstCount = 1;
62         if (tree->IsUnusedValue())
63         {
64             isLocalDefUse = true;
65         }
66     }
67     else
68     {
69         dstCount = 0;
70     }
71
72     switch (tree->OperGet())
73     {
74         default:
75             srcCount = BuildSimple(tree);
76             break;
77
78         case GT_LCL_VAR:
79             // We make a final determination about whether a GT_LCL_VAR is a candidate or contained
80             // after liveness. In either case we don't build any uses or defs. Otherwise, this is a
81             // load of a stack-based local into a register and we'll fall through to the general
82             // local case below.
83             if (checkContainedOrCandidateLclVar(tree->AsLclVar()))
84             {
85                 return 0;
86             }
87             FALLTHROUGH;
88         case GT_LCL_FLD:
89         {
90             srcCount = 0;
91 #ifdef FEATURE_SIMD
92             // Need an additional register to read upper 4 bytes of Vector3.
93             if (tree->TypeGet() == TYP_SIMD12)
94             {
95                 // We need an internal register different from targetReg in which 'tree' produces its result
96                 // because both targetReg and internal reg will be in use at the same time.
97                 buildInternalFloatRegisterDefForNode(tree, allSIMDRegs());
98                 setInternalRegsDelayFree = true;
99                 buildInternalRegisterUses();
100             }
101 #endif
102             BuildDef(tree);
103         }
104         break;
105
106         case GT_STORE_LCL_VAR:
107             if (tree->IsMultiRegLclVar() && isCandidateMultiRegLclVar(tree->AsLclVar()))
108             {
109                 dstCount = compiler->lvaGetDesc(tree->AsLclVar())->lvFieldCnt;
110             }
111             FALLTHROUGH;
112
113         case GT_STORE_LCL_FLD:
114             srcCount = BuildStoreLoc(tree->AsLclVarCommon());
115             break;
116
117         case GT_FIELD_LIST:
118             // These should always be contained. We don't correctly allocate or
119             // generate code for a non-contained GT_FIELD_LIST.
120             noway_assert(!"Non-contained GT_FIELD_LIST");
121             srcCount = 0;
122             break;
123
124         case GT_NO_OP:
125         case GT_START_NONGC:
126             srcCount = 0;
127             assert(dstCount == 0);
128             break;
129
130         case GT_PROF_HOOK:
131             srcCount = 0;
132             assert(dstCount == 0);
133             killMask = getKillSetForProfilerHook();
134             BuildDefsWithKills(tree, 0, RBM_NONE, killMask);
135             break;
136
137         case GT_START_PREEMPTGC:
138             // This kills GC refs in callee save regs
139             srcCount = 0;
140             assert(dstCount == 0);
141             BuildDefsWithKills(tree, 0, RBM_NONE, RBM_NONE);
142             break;
143
144         case GT_CNS_DBL:
145         {
146             // There is no instruction for loading float/double imm directly into FPR.
147             // Reserve int to load constant from memory (IF_LARGELDC)
148             buildInternalIntRegisterDefForNode(tree);
149             buildInternalRegisterUses();
150         }
151             FALLTHROUGH;
152
153         case GT_CNS_INT:
154         {
155             srcCount = 0;
156             assert(dstCount == 1);
157             RefPosition* def               = BuildDef(tree);
158             def->getInterval()->isConstant = true;
159         }
160         break;
161
162         case GT_BOX:
163         case GT_COMMA:
164         case GT_QMARK:
165         case GT_COLON:
166             srcCount = 0;
167             assert(dstCount == 0);
168             unreached();
169             break;
170
171         case GT_RETURN:
172             srcCount = BuildReturn(tree);
173             killMask = getKillSetForReturn();
174             BuildDefsWithKills(tree, 0, RBM_NONE, killMask);
175             break;
176
177         case GT_RETFILT:
178             assert(dstCount == 0);
179             if (tree->TypeGet() == TYP_VOID)
180             {
181                 srcCount = 0;
182             }
183             else
184             {
185                 assert(tree->TypeGet() == TYP_INT);
186                 srcCount = 1;
187                 BuildUse(tree->gtGetOp1(), RBM_INTRET);
188             }
189             break;
190
191         case GT_NOP:
192             // A GT_NOP is either a passthrough (if it is void, or if it has
193             // a child), but must be considered to produce a dummy value if it
194             // has a type but no child.
195             srcCount = 0;
196             if (tree->TypeGet() != TYP_VOID && tree->gtGetOp1() == nullptr)
197             {
198                 assert(dstCount == 1);
199                 BuildDef(tree);
200             }
201             else
202             {
203                 assert(dstCount == 0);
204             }
205             break;
206
207         case GT_KEEPALIVE:
208             assert(dstCount == 0);
209             srcCount = BuildOperandUses(tree->gtGetOp1());
210             break;
211
212         case GT_JTRUE:
213             srcCount = 0;
214             assert(dstCount == 0);
215             break;
216
217         case GT_JMP:
218             srcCount = 0;
219             assert(dstCount == 0);
220             break;
221
222         case GT_SWITCH:
223             // This should never occur since switch nodes must not be visible at this
224             // point in the JIT.
225             srcCount = 0;
226             noway_assert(!"Switch must be lowered at this point");
227             break;
228
229         case GT_JMPTABLE:
230             srcCount = 0;
231             assert(dstCount == 1);
232             BuildDef(tree);
233             break;
234
235         case GT_SWITCH_TABLE:
236             buildInternalIntRegisterDefForNode(tree);
237             srcCount = BuildBinaryUses(tree->AsOp());
238             assert(dstCount == 0);
239             break;
240
241         case GT_ADD:
242         case GT_SUB:
243             if (varTypeIsFloating(tree->TypeGet()))
244             {
245                 // overflow operations aren't supported on float/double types.
246                 assert(!tree->gtOverflow());
247
248                 // No implicit conversions at this stage as the expectation is that
249                 // everything is made explicit by adding casts.
250                 assert(tree->gtGetOp1()->TypeGet() == tree->gtGetOp2()->TypeGet());
251             }
252             else if (tree->gtOverflow())
253             {
254                 // Need a register different from target reg to check for overflow.
255                 buildInternalIntRegisterDefForNode(tree);
256                 if ((tree->gtFlags & GTF_UNSIGNED) == 0)
257                     buildInternalIntRegisterDefForNode(tree);
258                 setInternalRegsDelayFree = true;
259             }
260             FALLTHROUGH;
261
262         case GT_AND:
263         case GT_AND_NOT:
264         case GT_OR:
265         case GT_XOR:
266         case GT_LSH:
267         case GT_RSH:
268         case GT_RSZ:
269         case GT_ROR:
270         case GT_ROL:
271             if (tree->OperIs(GT_ROR, GT_ROL))
272                 buildInternalIntRegisterDefForNode(tree);
273             srcCount = BuildBinaryUses(tree->AsOp());
274             buildInternalRegisterUses();
275             assert(dstCount == 1);
276             BuildDef(tree);
277             break;
278
279         case GT_RETURNTRAP:
280             // this just turns into a compare of its child with an int
281             // + a conditional call
282             BuildUse(tree->gtGetOp1());
283             srcCount = 1;
284             assert(dstCount == 0);
285             killMask = compiler->compHelperCallKillSet(CORINFO_HELP_STOP_FOR_GC);
286             BuildDefsWithKills(tree, 0, RBM_NONE, killMask);
287             break;
288
289         case GT_MUL:
290             if (tree->gtOverflow())
291             {
292                 // Need a register different from target reg to check for overflow.
293                 buildInternalIntRegisterDefForNode(tree);
294                 if ((tree->gtFlags & GTF_UNSIGNED) == 0)
295                     buildInternalIntRegisterDefForNode(tree);
296                 setInternalRegsDelayFree = true;
297             }
298             FALLTHROUGH;
299
300         case GT_MOD:
301         case GT_UMOD:
302         case GT_DIV:
303         case GT_UDIV:
304         {
305             srcCount = BuildBinaryUses(tree->AsOp());
306
307             GenTree* divisorOp = tree->gtGetOp2();
308
309             ExceptionSetFlags exceptions = tree->OperExceptions(compiler);
310
311             if (!varTypeIsFloating(tree->TypeGet()) &&
312                 !((exceptions & ExceptionSetFlags::DivideByZeroException) != ExceptionSetFlags::None &&
313                   (divisorOp->IsIntegralConst(0) || divisorOp->GetRegNum() == REG_ZERO)))
314             {
315                 bool needTemp = false;
316                 if (divisorOp->isContainedIntOrIImmed())
317                 {
318                     if (!emitter::isGeneralRegister(divisorOp->GetRegNum()))
319                         needTemp = true;
320                 }
321
322                 if (!needTemp && (tree->gtOper == GT_DIV || tree->gtOper == GT_MOD))
323                 {
324                     if ((exceptions & ExceptionSetFlags::ArithmeticException) != ExceptionSetFlags::None)
325                         needTemp = true;
326                 }
327
328                 if (needTemp)
329                     buildInternalIntRegisterDefForNode(tree);
330             }
331             buildInternalRegisterUses();
332             assert(dstCount == 1);
333             BuildDef(tree);
334         }
335         break;
336
337         case GT_MULHI:
338         {
339             srcCount = BuildBinaryUses(tree->AsOp());
340
341             emitAttr attr = emitActualTypeSize(tree->AsOp());
342             if (EA_SIZE(attr) != EA_8BYTE)
343             {
344                 if ((tree->AsOp()->gtFlags & GTF_UNSIGNED) != 0)
345                     buildInternalIntRegisterDefForNode(tree);
346             }
347
348             buildInternalRegisterUses();
349             assert(dstCount == 1);
350             BuildDef(tree);
351         }
352         break;
353
354         case GT_INTRINSIC:
355         {
356             noway_assert((tree->AsIntrinsic()->gtIntrinsicName == NI_System_Math_Abs) ||
357                          (tree->AsIntrinsic()->gtIntrinsicName == NI_System_Math_Ceiling) ||
358                          (tree->AsIntrinsic()->gtIntrinsicName == NI_System_Math_Floor) ||
359                          (tree->AsIntrinsic()->gtIntrinsicName == NI_System_Math_Round) ||
360                          (tree->AsIntrinsic()->gtIntrinsicName == NI_System_Math_Sqrt));
361
362             // Both operand and its result must be of the same floating point type.
363             GenTree* op1 = tree->gtGetOp1();
364             assert(varTypeIsFloating(op1));
365             assert(op1->TypeGet() == tree->TypeGet());
366
367             BuildUse(op1);
368             srcCount = 1;
369             assert(dstCount == 1);
370             BuildDef(tree);
371         }
372         break;
373
374 #ifdef FEATURE_SIMD
375         case GT_SIMD:
376             srcCount = BuildSIMD(tree->AsSIMD());
377             break;
378 #endif // FEATURE_SIMD
379
380 #ifdef FEATURE_HW_INTRINSICS
381         case GT_HWINTRINSIC:
382             srcCount = BuildHWIntrinsic(tree->AsHWIntrinsic(), &dstCount);
383             break;
384 #endif // FEATURE_HW_INTRINSICS
385
386         case GT_CAST:
387             assert(dstCount == 1);
388             srcCount = BuildCast(tree->AsCast());
389             break;
390
391         case GT_NEG:
392         case GT_NOT:
393             BuildUse(tree->gtGetOp1());
394             srcCount = 1;
395             assert(dstCount == 1);
396             BuildDef(tree);
397             break;
398
399         case GT_EQ:
400         case GT_NE:
401         case GT_LT:
402         case GT_LE:
403         case GT_GE:
404         case GT_GT:
405         {
406             var_types op1Type = genActualType(tree->gtGetOp1()->TypeGet());
407             if (varTypeIsFloating(op1Type))
408             {
409                 bool isUnordered = (tree->gtFlags & GTF_RELOP_NAN_UN) != 0;
410                 if (isUnordered)
411                 {
412                     if (tree->OperIs(GT_EQ))
413                         buildInternalIntRegisterDefForNode(tree);
414                 }
415                 else
416                 {
417                     if (tree->OperIs(GT_NE))
418                         buildInternalIntRegisterDefForNode(tree);
419                 }
420             }
421             else
422             {
423                 emitAttr cmpSize = EA_ATTR(genTypeSize(op1Type));
424                 if (tree->gtGetOp2()->isContainedIntOrIImmed())
425                 {
426                     bool isUnsigned = (tree->gtFlags & GTF_UNSIGNED) != 0;
427                     if (cmpSize == EA_4BYTE && isUnsigned)
428                         buildInternalIntRegisterDefForNode(tree);
429                 }
430                 else
431                 {
432                     if (cmpSize == EA_4BYTE)
433                         buildInternalIntRegisterDefForNode(tree);
434                 }
435             }
436             buildInternalRegisterUses();
437         }
438             FALLTHROUGH;
439
440         case GT_JCMP:
441             srcCount = BuildCmp(tree);
442             break;
443
444         case GT_CKFINITE:
445             srcCount = 1;
446             assert(dstCount == 1);
447             buildInternalIntRegisterDefForNode(tree);
448             BuildUse(tree->gtGetOp1());
449             BuildDef(tree);
450             buildInternalRegisterUses();
451             break;
452
453         case GT_CMPXCHG:
454         {
455             GenTreeCmpXchg* cas = tree->AsCmpXchg();
456             assert(!cas->gtOpComparand->isContained());
457             srcCount = 3;
458             assert(dstCount == 1);
459
460             buildInternalIntRegisterDefForNode(tree); // temp reg for store conditional error
461             // Extend lifetimes of argument regs because they may be reused during retries
462             setDelayFree(BuildUse(cas->gtOpLocation));
463             setDelayFree(BuildUse(cas->gtOpValue));
464             setDelayFree(BuildUse(cas->gtOpComparand));
465
466             // Internals may not collide with target
467             setInternalRegsDelayFree = true;
468             buildInternalRegisterUses();
469             BuildDef(tree);
470         }
471         break;
472
473         case GT_LOCKADD:
474             assert(!"-----unimplemented on RISCV64----");
475             break;
476
477         case GT_XORR:
478         case GT_XAND:
479         case GT_XADD:
480         case GT_XCHG:
481         {
482             assert(dstCount == (tree->TypeIs(TYP_VOID) ? 0 : 1));
483             GenTree* addr = tree->gtGetOp1();
484             GenTree* data = tree->gtGetOp2();
485             assert(!addr->isContained() && !data->isContained());
486             srcCount = 2;
487
488             BuildUse(addr);
489             BuildUse(data);
490             if (dstCount == 1)
491             {
492                 BuildDef(tree);
493             }
494         }
495         break;
496
497         case GT_PUTARG_SPLIT:
498             srcCount = BuildPutArgSplit(tree->AsPutArgSplit());
499             dstCount = tree->AsPutArgSplit()->gtNumRegs;
500             break;
501
502         case GT_PUTARG_STK:
503             srcCount = BuildPutArgStk(tree->AsPutArgStk());
504             break;
505
506         case GT_PUTARG_REG:
507             srcCount = BuildPutArgReg(tree->AsUnOp());
508             break;
509
510         case GT_CALL:
511             srcCount = BuildCall(tree->AsCall());
512             if (tree->AsCall()->HasMultiRegRetVal())
513             {
514                 dstCount = tree->AsCall()->GetReturnTypeDesc()->GetReturnRegCount();
515             }
516             break;
517
518         case GT_BLK:
519             // These should all be eliminated prior to Lowering.
520             assert(!"Non-store block node in Lowering");
521             srcCount = 0;
522             break;
523
524         case GT_STORE_BLK:
525         case GT_STORE_DYN_BLK:
526             srcCount = BuildBlockStore(tree->AsBlk());
527             break;
528
529         case GT_INIT_VAL:
530             // Always a passthrough of its child's value.
531             assert(!"INIT_VAL should always be contained");
532             srcCount = 0;
533             break;
534
535         case GT_LCLHEAP:
536         {
537             assert(dstCount == 1);
538
539             // Need a variable number of temp regs (see genLclHeap() in codegenrisv64.cpp):
540             // Here '-' means don't care.
541             //
542             //  Size?                   Init Memory?    # temp regs
543             //   0                          -               0
544             //   const and <=UnrollLimit    -               0
545             //   const and <PageSize        No              0
546             //   >UnrollLimit               Yes             0
547             //   Non-const                  Yes             0
548             //   Non-const                  No              2
549             //
550
551             bool needExtraTemp = (compiler->lvaOutgoingArgSpaceSize > 0);
552
553             GenTree* size = tree->gtGetOp1();
554             if (size->IsCnsIntOrI())
555             {
556                 assert(size->isContained());
557                 srcCount = 0;
558
559                 size_t sizeVal = size->AsIntCon()->gtIconVal;
560
561                 if (sizeVal != 0)
562                 {
563                     // Compute the amount of memory to properly STACK_ALIGN.
564                     // Note: The Gentree node is not updated here as it is cheap to recompute stack aligned size.
565                     // This should also help in debugging as we can examine the original size specified with
566                     // localloc.
567                     sizeVal = AlignUp(sizeVal, STACK_ALIGN);
568
569                     // For small allocations up to 4 'st' instructions (i.e. 16 to 64 bytes of localloc)
570                     if (sizeVal <= (REGSIZE_BYTES * 2 * 4))
571                     {
572                         // Need no internal registers
573                     }
574                     else if (!compiler->info.compInitMem)
575                     {
576                         // No need to initialize allocated stack space.
577                         if (sizeVal < compiler->eeGetPageSize())
578                         {
579                             ssize_t imm = -(ssize_t)sizeVal;
580                             needExtraTemp |= !emitter::isValidSimm12(imm);
581                         }
582                         else
583                         {
584                             // We need two registers: regCnt and RegTmp
585                             buildInternalIntRegisterDefForNode(tree);
586                             buildInternalIntRegisterDefForNode(tree);
587                             needExtraTemp = true;
588                         }
589                     }
590                 }
591             }
592             else
593             {
594                 srcCount = 1;
595                 if (!compiler->info.compInitMem)
596                 {
597                     buildInternalIntRegisterDefForNode(tree);
598                     buildInternalIntRegisterDefForNode(tree);
599                     needExtraTemp = true;
600                 }
601             }
602
603             if (needExtraTemp)
604                 buildInternalIntRegisterDefForNode(tree); // tempReg
605
606             if (!size->isContained())
607             {
608                 BuildUse(size);
609             }
610             buildInternalRegisterUses();
611             BuildDef(tree);
612         }
613         break;
614
615         case GT_BOUNDS_CHECK:
616         {
617             GenTreeBoundsChk* node = tree->AsBoundsChk();
618             if (genActualType(node->GetArrayLength()) == TYP_INT)
619             {
620                 buildInternalIntRegisterDefForNode(tree);
621             }
622             if (genActualType(node->GetIndex()) == TYP_INT)
623             {
624                 buildInternalIntRegisterDefForNode(tree);
625             }
626             buildInternalRegisterUses();
627             // Consumes arrLen & index - has no result
628             assert(dstCount == 0);
629             srcCount = BuildOperandUses(node->GetIndex());
630             srcCount += BuildOperandUses(node->GetArrayLength());
631         }
632         break;
633
634         case GT_ARR_ELEM:
635             // These must have been lowered
636             noway_assert(!"We should never see a GT_ARR_ELEM in lowering");
637             srcCount = 0;
638             assert(dstCount == 0);
639             break;
640
641         case GT_LEA:
642         {
643             GenTreeAddrMode* lea = tree->AsAddrMode();
644
645             GenTree* base  = lea->Base();
646             GenTree* index = lea->Index();
647             int      cns   = lea->Offset();
648
649             // This LEA is instantiating an address, so we set up the srcCount here.
650             srcCount = 0;
651             if (base != nullptr)
652             {
653                 srcCount++;
654                 BuildUse(base);
655             }
656             if (index != nullptr)
657             {
658                 srcCount++;
659                 BuildUse(index);
660             }
661             assert(dstCount == 1);
662
663             if ((base != nullptr) && (index != nullptr))
664             {
665                 DWORD scale;
666                 BitScanForward(&scale, lea->gtScale);
667                 if (scale > 0)
668                     buildInternalIntRegisterDefForNode(tree); // scaleTempReg
669             }
670
671             // On RISCV64 we may need a single internal register
672             // (when both conditions are true then we still only need a single internal register)
673             if ((index != nullptr) && (cns != 0))
674             {
675                 // RISCV64 does not support both Index and offset so we need an internal register
676                 buildInternalIntRegisterDefForNode(tree);
677             }
678             else if (!emitter::isValidSimm12(cns))
679             {
680                 // This offset can't be contained in the add instruction, so we need an internal register
681                 buildInternalIntRegisterDefForNode(tree);
682             }
683             buildInternalRegisterUses();
684             BuildDef(tree);
685         }
686         break;
687
688         case GT_STOREIND:
689         {
690             assert(dstCount == 0);
691
692             if (compiler->codeGen->gcInfo.gcIsWriteBarrierStoreIndNode(tree->AsStoreInd()))
693             {
694                 srcCount = BuildGCWriteBarrier(tree);
695                 break;
696             }
697
698             srcCount = BuildIndir(tree->AsIndir());
699             if (!tree->gtGetOp2()->isContained())
700             {
701                 BuildUse(tree->gtGetOp2());
702                 srcCount++;
703             }
704         }
705         break;
706
707         case GT_NULLCHECK:
708         case GT_IND:
709             assert(dstCount == (tree->OperIs(GT_NULLCHECK) ? 0 : 1));
710             srcCount = BuildIndir(tree->AsIndir());
711             break;
712
713         case GT_CATCH_ARG:
714             srcCount = 0;
715             assert(dstCount == 1);
716             BuildDef(tree, RBM_EXCEPTION_OBJECT);
717             break;
718
719         case GT_INDEX_ADDR:
720             assert(dstCount == 1);
721             srcCount = BuildBinaryUses(tree->AsOp());
722             buildInternalIntRegisterDefForNode(tree);
723             buildInternalRegisterUses();
724             BuildDef(tree);
725             break;
726
727     } // end switch (tree->OperGet())
728
729     if (tree->IsUnusedValue() && (dstCount != 0))
730     {
731         isLocalDefUse = true;
732     }
733     // We need to be sure that we've set srcCount and dstCount appropriately
734     assert((dstCount < 2) || tree->IsMultiRegNode());
735     assert(isLocalDefUse == (tree->IsValue() && tree->IsUnusedValue()));
736     assert(!tree->IsUnusedValue() || (dstCount != 0));
737     assert(dstCount == tree->GetRegisterDstCount(compiler));
738     return srcCount;
739 }
740
741 #ifdef FEATURE_SIMD
742 //------------------------------------------------------------------------
743 // BuildSIMD: Set the NodeInfo for a GT_SIMD tree.
744 //
745 // Arguments:
746 //    tree       - The GT_SIMD node of interest
747 //
748 // Return Value:
749 //    The number of sources consumed by this node.
750 //
751 int LinearScan::BuildSIMD(GenTreeSIMD* simdTree)
752 {
753     NYI_RISCV64("-----unimplemented on RISCV64 yet----");
754     return 0;
755 }
756 #endif // FEATURE_SIMD
757
758 #ifdef FEATURE_HW_INTRINSICS
759 #include "hwintrinsic.h"
760 //------------------------------------------------------------------------
761 // BuildHWIntrinsic: Set the NodeInfo for a GT_HWINTRINSIC tree.
762 //
763 // Arguments:
764 //    tree       - The GT_HWINTRINSIC node of interest
765 //
766 // Return Value:
767 //    The number of sources consumed by this node.
768 //
769 int LinearScan::BuildHWIntrinsic(GenTreeHWIntrinsic* intrinsicTree)
770 {
771     NYI_RISCV64("-----unimplemented on RISCV64 yet----");
772     return 0;
773 }
774 #endif
775
776 //------------------------------------------------------------------------
777 // BuildIndir: Specify register requirements for address expression
778 //                       of an indirection operation.
779 //
780 // Arguments:
781 //    indirTree - GT_IND, GT_STOREIND or block gentree node
782 //
783 // Return Value:
784 //    The number of sources consumed by this node.
785 //
786 int LinearScan::BuildIndir(GenTreeIndir* indirTree)
787 {
788     // struct typed indirs are expected only on rhs of a block copy,
789     // but in this case they must be contained.
790     assert(indirTree->TypeGet() != TYP_STRUCT);
791
792     GenTree* addr  = indirTree->Addr();
793     GenTree* index = nullptr;
794     int      cns   = 0;
795
796     if (addr->isContained())
797     {
798         if (addr->OperGet() == GT_LEA)
799         {
800             GenTreeAddrMode* lea = addr->AsAddrMode();
801             index                = lea->Index();
802             cns                  = lea->Offset();
803
804             // On RISCV64 we may need a single internal register
805             // (when both conditions are true then we still only need a single internal register)
806             if ((index != nullptr) && (cns != 0))
807             {
808                 // RISCV64 does not support both Index and offset so we need an internal register
809                 buildInternalIntRegisterDefForNode(indirTree);
810             }
811             else if (!emitter::isValidSimm12(cns))
812             {
813                 // This offset can't be contained in the ldr/str instruction, so we need an internal register
814                 buildInternalIntRegisterDefForNode(indirTree);
815             }
816         }
817         else if (addr->OperGet() == GT_CLS_VAR_ADDR)
818         {
819             // Reserve int to load constant from memory (IF_LARGELDC)
820             buildInternalIntRegisterDefForNode(indirTree);
821         }
822     }
823
824 #ifdef FEATURE_SIMD
825     if (indirTree->TypeGet() == TYP_SIMD12)
826     {
827         // If indirTree is of TYP_SIMD12, addr is not contained. See comment in LowerIndir().
828         assert(!addr->isContained());
829
830         // Vector3 is read/written as two reads/writes: 8 byte and 4 byte.
831         // To assemble the vector properly we would need an additional int register
832         buildInternalIntRegisterDefForNode(indirTree);
833     }
834 #endif // FEATURE_SIMD
835
836     int srcCount = BuildIndirUses(indirTree);
837     buildInternalRegisterUses();
838
839     if (!indirTree->OperIs(GT_STOREIND, GT_NULLCHECK))
840     {
841         BuildDef(indirTree);
842     }
843     return srcCount;
844 }
845
846 //------------------------------------------------------------------------
847 // BuildCall: Set the NodeInfo for a call.
848 //
849 // Arguments:
850 //    call - The call node of interest
851 //
852 // Return Value:
853 //    The number of sources consumed by this node.
854 //
855 int LinearScan::BuildCall(GenTreeCall* call)
856 {
857     bool                  hasMultiRegRetVal = false;
858     const ReturnTypeDesc* retTypeDesc       = nullptr;
859     regMaskTP             dstCandidates     = RBM_NONE;
860
861     int srcCount = 0;
862     int dstCount = 0;
863     if (call->TypeGet() != TYP_VOID)
864     {
865         hasMultiRegRetVal = call->HasMultiRegRetVal();
866         if (hasMultiRegRetVal)
867         {
868             // dst count = number of registers in which the value is returned by call
869             retTypeDesc = call->GetReturnTypeDesc();
870             dstCount    = retTypeDesc->GetReturnRegCount();
871         }
872         else
873         {
874             dstCount = 1;
875         }
876     }
877
878     GenTree*  ctrlExpr           = call->gtControlExpr;
879     regMaskTP ctrlExprCandidates = RBM_NONE;
880     if (call->gtCallType == CT_INDIRECT)
881     {
882         // either gtControlExpr != null or gtCallAddr != null.
883         // Both cannot be non-null at the same time.
884         assert(ctrlExpr == nullptr);
885         assert(call->gtCallAddr != nullptr);
886         ctrlExpr = call->gtCallAddr;
887     }
888
889     // set reg requirements on call target represented as control sequence.
890     if (ctrlExpr != nullptr)
891     {
892         // we should never see a gtControlExpr whose type is void.
893         assert(ctrlExpr->TypeGet() != TYP_VOID);
894
895         // In case of fast tail implemented as jmp, make sure that gtControlExpr is
896         // computed into a register.
897         if (call->IsFastTailCall())
898         {
899             // Fast tail call - make sure that call target is always computed in volatile registers
900             // that will not be overridden by epilog sequence.
901             ctrlExprCandidates = allRegs(TYP_INT) & RBM_INT_CALLEE_TRASH;
902             if (compiler->getNeedsGSSecurityCookie())
903             {
904                 ctrlExprCandidates &= ~(genRegMask(REG_GSCOOKIE_TMP_0) | genRegMask(REG_GSCOOKIE_TMP_1));
905             }
906             assert(ctrlExprCandidates != RBM_NONE);
907         }
908     }
909     else if (call->IsR2ROrVirtualStubRelativeIndir())
910     {
911         // For R2R and VSD we have stub address in REG_R2R_INDIRECT_PARAM
912         // and will load call address into the temp register from this register.
913         regMaskTP candidates = RBM_NONE;
914         if (call->IsFastTailCall())
915         {
916             candidates = allRegs(TYP_INT) & RBM_INT_CALLEE_TRASH;
917             assert(candidates != RBM_NONE);
918         }
919
920         buildInternalIntRegisterDefForNode(call, candidates);
921     }
922
923     RegisterType registerType = call->TypeGet();
924
925     // Set destination candidates for return value of the call.
926
927     if (hasMultiRegRetVal)
928     {
929         assert(retTypeDesc != nullptr);
930         dstCandidates = retTypeDesc->GetABIReturnRegs();
931     }
932     else if (varTypeUsesFloatArgReg(registerType))
933     {
934         dstCandidates = RBM_FLOATRET;
935     }
936     else if (registerType == TYP_LONG)
937     {
938         dstCandidates = RBM_LNGRET;
939     }
940     else
941     {
942         dstCandidates = RBM_INTRET;
943     }
944
945     // First, count reg args
946     // Each register argument corresponds to one source.
947     bool callHasFloatRegArgs = false;
948
949     for (CallArg& arg : call->gtArgs.LateArgs())
950     {
951         CallArgABIInformation& abiInfo = arg.AbiInfo;
952         GenTree*               argNode = arg.GetLateNode();
953
954 #ifdef DEBUG
955         regNumber argReg = abiInfo.GetRegNum();
956 #endif
957
958         if (argNode->gtOper == GT_PUTARG_STK)
959         {
960             // late arg that is not passed in a register
961             assert(abiInfo.GetRegNum() == REG_STK);
962             // These should never be contained.
963             assert(!argNode->isContained());
964             continue;
965         }
966
967         // A GT_FIELD_LIST has a TYP_VOID, but is used to represent a multireg struct
968         if (argNode->OperGet() == GT_FIELD_LIST)
969         {
970             assert(argNode->isContained());
971
972             // There could be up to 2 PUTARG_REGs in the list.
973             for (GenTreeFieldList::Use& use : argNode->AsFieldList()->Uses())
974             {
975 #ifdef DEBUG
976                 assert(use.GetNode()->OperIs(GT_PUTARG_REG));
977 #endif
978                 BuildUse(use.GetNode(), genRegMask(use.GetNode()->GetRegNum()));
979                 srcCount++;
980             }
981         }
982         else if (argNode->OperGet() == GT_PUTARG_SPLIT)
983         {
984             unsigned regCount = argNode->AsPutArgSplit()->gtNumRegs;
985             assert(regCount == abiInfo.NumRegs);
986             for (unsigned int i = 0; i < regCount; i++)
987             {
988                 BuildUse(argNode, genRegMask(argNode->AsPutArgSplit()->GetRegNumByIdx(i)), i);
989             }
990             srcCount += regCount;
991         }
992         else
993         {
994             assert(argNode->OperIs(GT_PUTARG_REG));
995             assert(argNode->GetRegNum() == argReg);
996             HandleFloatVarArgs(call, argNode, &callHasFloatRegArgs);
997             {
998                 BuildUse(argNode, genRegMask(argNode->GetRegNum()));
999                 srcCount++;
1000             }
1001         }
1002     }
1003
1004 #ifdef DEBUG
1005     // Now, count stack args
1006     // Note that these need to be computed into a register, but then
1007     // they're just stored to the stack - so the reg doesn't
1008     // need to remain live until the call.  In fact, it must not
1009     // because the code generator doesn't actually consider it live,
1010     // so it can't be spilled.
1011
1012     for (CallArg& arg : call->gtArgs.EarlyArgs())
1013     {
1014         GenTree* argNode = arg.GetEarlyNode();
1015
1016         // Skip arguments that have been moved to the Late Arg list
1017         if (arg.GetLateNode() == nullptr)
1018         {
1019             // PUTARG_SPLIT nodes must be in the gtCallLateArgs list, since they
1020             // define registers used by the call.
1021             assert(argNode->OperGet() != GT_PUTARG_SPLIT);
1022             if (argNode->gtOper == GT_PUTARG_STK)
1023             {
1024                 assert(arg.AbiInfo.GetRegNum() == REG_STK);
1025             }
1026             else
1027             {
1028                 assert(!argNode->IsValue() || argNode->IsUnusedValue());
1029             }
1030         }
1031     }
1032 #endif // DEBUG
1033
1034     // If it is a fast tail call, it is already preferenced to use IP0.
1035     // Therefore, no need set src candidates on call tgt again.
1036     if (call->IsVarargs() && callHasFloatRegArgs && !call->IsFastTailCall() && (ctrlExpr != nullptr))
1037     {
1038         // Don't assign the call target to any of the argument registers because
1039         // we will use them to also pass floating point arguments as required
1040         // by RISCV64 ABI.
1041         ctrlExprCandidates = allRegs(TYP_INT) & ~(RBM_ARG_REGS);
1042     }
1043
1044     if (ctrlExpr != nullptr)
1045     {
1046         BuildUse(ctrlExpr, ctrlExprCandidates);
1047         srcCount++;
1048     }
1049
1050     buildInternalRegisterUses();
1051
1052     // Now generate defs and kills.
1053     regMaskTP killMask = getKillSetForCall(call);
1054     BuildDefsWithKills(call, dstCount, dstCandidates, killMask);
1055
1056     // No args are placed in registers anymore.
1057     placedArgRegs      = RBM_NONE;
1058     numPlacedArgLocals = 0;
1059     return srcCount;
1060 }
1061
1062 //------------------------------------------------------------------------
1063 // BuildPutArgStk: Set the NodeInfo for a GT_PUTARG_STK node
1064 //
1065 // Arguments:
1066 //    argNode - a GT_PUTARG_STK node
1067 //
1068 // Return Value:
1069 //    The number of sources consumed by this node.
1070 //
1071 // Notes:
1072 //    Set the child node(s) to be contained when we have a multireg arg
1073 //
1074 int LinearScan::BuildPutArgStk(GenTreePutArgStk* argNode)
1075 {
1076     assert(argNode->gtOper == GT_PUTARG_STK);
1077
1078     GenTree* src = argNode->gtGetOp1();
1079
1080     int srcCount = 0;
1081
1082     // Do we have a TYP_STRUCT argument (or a GT_FIELD_LIST), if so it must be a multireg pass-by-value struct
1083     if (src->TypeIs(TYP_STRUCT))
1084     {
1085         // We will use store instructions that each write a register sized value
1086
1087         if (src->OperIs(GT_FIELD_LIST))
1088         {
1089             assert(src->isContained());
1090             // We consume all of the items in the GT_FIELD_LIST
1091             for (GenTreeFieldList::Use& use : src->AsFieldList()->Uses())
1092             {
1093                 BuildUse(use.GetNode());
1094                 srcCount++;
1095             }
1096         }
1097         else
1098         {
1099             // We can use a ld/st sequence so we need two internal registers for RISCV64.
1100             buildInternalIntRegisterDefForNode(argNode);
1101             buildInternalIntRegisterDefForNode(argNode);
1102
1103             assert(src->isContained());
1104
1105             if (src->OperGet() == GT_BLK)
1106             {
1107                 srcCount = BuildOperandUses(src->AsBlk()->Addr());
1108             }
1109             else
1110             {
1111                 // No source registers.
1112                 assert(src->OperIs(GT_LCL_VAR, GT_LCL_FLD));
1113             }
1114         }
1115     }
1116     else
1117     {
1118         assert(!src->isContained());
1119         srcCount = BuildOperandUses(src);
1120     }
1121     buildInternalRegisterUses();
1122     return srcCount;
1123 }
1124
1125 //------------------------------------------------------------------------
1126 // BuildPutArgSplit: Set the NodeInfo for a GT_PUTARG_SPLIT node
1127 //
1128 // Arguments:
1129 //    argNode - a GT_PUTARG_SPLIT node
1130 //
1131 // Return Value:
1132 //    The number of sources consumed by this node.
1133 //
1134 // Notes:
1135 //    Set the child node(s) to be contained
1136 //
1137 int LinearScan::BuildPutArgSplit(GenTreePutArgSplit* argNode)
1138 {
1139     int srcCount = 0;
1140     assert(argNode->gtOper == GT_PUTARG_SPLIT);
1141
1142     GenTree* src = argNode->gtGetOp1();
1143
1144     // Registers for split argument corresponds to source
1145     int dstCount = argNode->gtNumRegs;
1146
1147     regNumber argReg  = argNode->GetRegNum();
1148     regMaskTP argMask = RBM_NONE;
1149     for (unsigned i = 0; i < argNode->gtNumRegs; i++)
1150     {
1151         regNumber thisArgReg = (regNumber)((unsigned)argReg + i);
1152         argMask |= genRegMask(thisArgReg);
1153         argNode->SetRegNumByIdx(thisArgReg, i);
1154     }
1155
1156     if (src->OperGet() == GT_FIELD_LIST)
1157     {
1158         // Generated code:
1159         // 1. Consume all of the items in the GT_FIELD_LIST (source)
1160         // 2. Store to target slot and move to target registers (destination) from source
1161         //
1162         unsigned sourceRegCount = 0;
1163
1164         // To avoid redundant moves, have the argument operand computed in the
1165         // register in which the argument is passed to the call.
1166
1167         for (GenTreeFieldList::Use& use : src->AsFieldList()->Uses())
1168         {
1169             GenTree* node = use.GetNode();
1170             assert(!node->isContained());
1171             // The only multi-reg nodes we should see are OperIsMultiRegOp()
1172             unsigned currentRegCount = 1;
1173             assert(!node->IsMultiRegNode());
1174
1175             // Consume all the registers, setting the appropriate register mask for the ones that
1176             // go into registers.
1177             for (unsigned regIndex = 0; regIndex < currentRegCount; regIndex++)
1178             {
1179                 regMaskTP sourceMask = RBM_NONE;
1180                 if (sourceRegCount < argNode->gtNumRegs)
1181                 {
1182                     sourceMask = genRegMask((regNumber)((unsigned)argReg + sourceRegCount));
1183                 }
1184                 sourceRegCount++;
1185                 BuildUse(node, sourceMask, regIndex);
1186             }
1187         }
1188         srcCount += sourceRegCount;
1189         assert(src->isContained());
1190     }
1191     else
1192     {
1193         assert(src->TypeIs(TYP_STRUCT) && src->isContained());
1194
1195         if (src->OperIs(GT_BLK))
1196         {
1197             // If the PUTARG_SPLIT clobbers only one register we may need an
1198             // extra internal register in case there is a conflict between the
1199             // source address register and target register.
1200             if (argNode->gtNumRegs == 1)
1201             {
1202                 // We can use a ldr/str sequence so we need an internal register
1203                 buildInternalIntRegisterDefForNode(argNode, allRegs(TYP_INT) & ~argMask);
1204             }
1205
1206             // We will generate code that loads from the OBJ's address, which must be in a register.
1207             srcCount = BuildOperandUses(src->AsBlk()->Addr());
1208         }
1209         else
1210         {
1211             // We will generate all of the code for the GT_PUTARG_SPLIT and LCL_VAR/LCL_FLD as one contained operation.
1212             assert(src->OperIsLocalRead());
1213         }
1214     }
1215     buildInternalRegisterUses();
1216     BuildDefs(argNode, dstCount, argMask);
1217     return srcCount;
1218 }
1219
1220 //------------------------------------------------------------------------
1221 // BuildBlockStore: Build the RefPositions for a block store node.
1222 //
1223 // Arguments:
1224 //    blkNode       - The block store node of interest
1225 //
1226 // Return Value:
1227 //    The number of sources consumed by this node.
1228 //
1229 int LinearScan::BuildBlockStore(GenTreeBlk* blkNode)
1230 {
1231     GenTree* dstAddr = blkNode->Addr();
1232     GenTree* src     = blkNode->Data();
1233     unsigned size    = blkNode->Size();
1234
1235     GenTree* srcAddrOrFill = nullptr;
1236
1237     regMaskTP dstAddrRegMask = RBM_NONE;
1238     regMaskTP srcRegMask     = RBM_NONE;
1239     regMaskTP sizeRegMask    = RBM_NONE;
1240
1241     if (blkNode->OperIsInitBlkOp())
1242     {
1243         if (src->OperIs(GT_INIT_VAL))
1244         {
1245             assert(src->isContained());
1246             src = src->AsUnOp()->gtGetOp1();
1247         }
1248
1249         srcAddrOrFill = src;
1250
1251         switch (blkNode->gtBlkOpKind)
1252         {
1253             case GenTreeBlk::BlkOpKindUnroll:
1254             {
1255                 if (dstAddr->isContained())
1256                 {
1257                     // Since the dstAddr is contained the address will be computed in CodeGen.
1258                     // This might require an integer register to store the value.
1259                     buildInternalIntRegisterDefForNode(blkNode);
1260                 }
1261
1262                 const bool isDstRegAddrAlignmentKnown = dstAddr->OperIs(GT_LCL_ADDR);
1263
1264                 if (isDstRegAddrAlignmentKnown && (size > FP_REGSIZE_BYTES))
1265                 {
1266                     // TODO-RISCV64: For larger block sizes CodeGen can choose to use 16-byte SIMD instructions.
1267                     // here just used a temp register.
1268                     buildInternalIntRegisterDefForNode(blkNode);
1269                 }
1270             }
1271             break;
1272
1273             case GenTreeBlk::BlkOpKindHelper:
1274                 assert(!src->isContained());
1275                 dstAddrRegMask = RBM_ARG_0;
1276                 srcRegMask     = RBM_ARG_1;
1277                 sizeRegMask    = RBM_ARG_2;
1278                 break;
1279
1280             default:
1281                 unreached();
1282         }
1283     }
1284     else
1285     {
1286         if (src->OperIs(GT_IND))
1287         {
1288             assert(src->isContained());
1289             srcAddrOrFill = src->AsIndir()->Addr();
1290         }
1291
1292         switch (blkNode->gtBlkOpKind)
1293         {
1294             case GenTreeBlk::BlkOpKindCpObjUnroll:
1295             {
1296                 // We don't need to materialize the struct size but we still need
1297                 // a temporary register to perform the sequence of loads and stores.
1298                 // We can't use the special Write Barrier registers, so exclude them from the mask
1299                 regMaskTP internalIntCandidates =
1300                     allRegs(TYP_INT) & ~(RBM_WRITE_BARRIER_DST_BYREF | RBM_WRITE_BARRIER_SRC_BYREF);
1301                 buildInternalIntRegisterDefForNode(blkNode, internalIntCandidates);
1302
1303                 if (size >= 2 * REGSIZE_BYTES)
1304                 {
1305                     // TODO-RISCV64: We will use ld/st paired to reduce code size and improve performance
1306                     // so we need to reserve an extra internal register.
1307                     buildInternalIntRegisterDefForNode(blkNode, internalIntCandidates);
1308                 }
1309
1310                 // If we have a dest address we want it in RBM_WRITE_BARRIER_DST_BYREF.
1311                 dstAddrRegMask = RBM_WRITE_BARRIER_DST_BYREF;
1312
1313                 // If we have a source address we want it in REG_WRITE_BARRIER_SRC_BYREF.
1314                 // Otherwise, if it is a local, codegen will put its address in REG_WRITE_BARRIER_SRC_BYREF,
1315                 // which is killed by a StoreObj (and thus needn't be reserved).
1316                 if (srcAddrOrFill != nullptr)
1317                 {
1318                     assert(!srcAddrOrFill->isContained());
1319                     srcRegMask = RBM_WRITE_BARRIER_SRC_BYREF;
1320                 }
1321             }
1322             break;
1323
1324             case GenTreeBlk::BlkOpKindUnroll:
1325                 buildInternalIntRegisterDefForNode(blkNode);
1326                 break;
1327
1328             case GenTreeBlk::BlkOpKindHelper:
1329                 dstAddrRegMask = RBM_ARG_0;
1330                 if (srcAddrOrFill != nullptr)
1331                 {
1332                     assert(!srcAddrOrFill->isContained());
1333                     srcRegMask = RBM_ARG_1;
1334                 }
1335                 sizeRegMask = RBM_ARG_2;
1336                 break;
1337
1338             default:
1339                 unreached();
1340         }
1341     }
1342
1343     if (!blkNode->OperIs(GT_STORE_DYN_BLK) && (sizeRegMask != RBM_NONE))
1344     {
1345         // Reserve a temp register for the block size argument.
1346         buildInternalIntRegisterDefForNode(blkNode, sizeRegMask);
1347     }
1348
1349     int useCount = 0;
1350
1351     if (!dstAddr->isContained())
1352     {
1353         useCount++;
1354         BuildUse(dstAddr, dstAddrRegMask);
1355     }
1356     else if (dstAddr->OperIsAddrMode())
1357     {
1358         useCount += BuildAddrUses(dstAddr->AsAddrMode()->Base());
1359     }
1360
1361     if (srcAddrOrFill != nullptr)
1362     {
1363         if (!srcAddrOrFill->isContained())
1364         {
1365             useCount++;
1366             BuildUse(srcAddrOrFill, srcRegMask);
1367         }
1368         else if (srcAddrOrFill->OperIsAddrMode())
1369         {
1370             useCount += BuildAddrUses(srcAddrOrFill->AsAddrMode()->Base());
1371         }
1372     }
1373
1374     if (blkNode->OperIs(GT_STORE_DYN_BLK))
1375     {
1376         useCount++;
1377         BuildUse(blkNode->AsStoreDynBlk()->gtDynamicSize, sizeRegMask);
1378     }
1379
1380     buildInternalRegisterUses();
1381     regMaskTP killMask = getKillSetForBlockStore(blkNode);
1382     BuildDefsWithKills(blkNode, 0, RBM_NONE, killMask);
1383     return useCount;
1384 }
1385
1386 //------------------------------------------------------------------------
1387 // BuildCast: Set the NodeInfo for a GT_CAST.
1388 //
1389 // Arguments:
1390 //    cast - The GT_CAST node
1391 //
1392 // Return Value:
1393 //    The number of sources consumed by this node.
1394 //
1395 int LinearScan::BuildCast(GenTreeCast* cast)
1396 {
1397     enum CodeGen::GenIntCastDesc::CheckKind kind = CodeGen::GenIntCastDesc(cast).CheckKind();
1398     if ((kind != CodeGen::GenIntCastDesc::CHECK_NONE) && (kind != CodeGen::GenIntCastDesc::CHECK_POSITIVE))
1399     {
1400         buildInternalIntRegisterDefForNode(cast);
1401     }
1402     buildInternalRegisterUses();
1403     int srcCount = BuildOperandUses(cast->CastOp());
1404     BuildDef(cast);
1405
1406     if (varTypeIsFloating(cast->gtOp1) && !varTypeIsFloating(cast->TypeGet()))
1407     {
1408         buildInternalIntRegisterDefForNode(cast);
1409         buildInternalRegisterUses();
1410     }
1411
1412     return srcCount;
1413 }
1414
1415 #endif // TARGET_RISCV64