1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
5 /*XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
6 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
8 XX Register Requirements for ARM64 XX
10 XX This encapsulates all the logic for setting register requirements for XX
11 XX the ARM64 architecture. XX
14 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
15 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
26 #include "sideeffects.h"
29 //------------------------------------------------------------------------
30 // BuildNode: Build the RefPositions for for a node
33 // treeNode - the node of interest
36 // The number of sources consumed by this node.
40 // LSRA Has been initialized.
43 // RefPositions have been built for all the register defs and uses required
46 int LinearScan::BuildNode(GenTree* tree)
48 assert(!tree->isContained());
51 regMaskTP dstCandidates = RBM_NONE;
52 regMaskTP killMask = RBM_NONE;
53 bool isLocalDefUse = false;
55 // Reset the build-related members of LinearScan.
58 // Set the default dstCount. This may be modified below.
62 if (tree->IsUnusedValue())
72 switch (tree->OperGet())
75 srcCount = BuildSimple(tree);
81 // We handle tracked variables differently from non-tracked ones. If it is tracked,
82 // we will simply add a use of the tracked variable at its parent/consumer.
83 // Otherwise, for a use we need to actually add the appropriate references for loading
84 // or storing the variable.
86 // A tracked variable won't actually get used until the appropriate ancestor tree node
87 // is processed, unless this is marked "isLocalDefUse" because it is a stack-based argument
88 // to a call or an orphaned dead node.
90 LclVarDsc* const varDsc = &compiler->lvaTable[tree->AsLclVarCommon()->gtLclNum];
91 if (isCandidateVar(varDsc))
93 INDEBUG(dumpNodeInfo(tree, dstCandidates, 0, 1));
98 // Need an additional register to read upper 4 bytes of Vector3.
99 if (tree->TypeGet() == TYP_SIMD12)
101 // We need an internal register different from targetReg in which 'tree' produces its result
102 // because both targetReg and internal reg will be in use at the same time.
103 buildInternalFloatRegisterDefForNode(tree, allSIMDRegs());
104 setInternalRegsDelayFree = true;
105 buildInternalRegisterUses();
112 case GT_STORE_LCL_FLD:
113 case GT_STORE_LCL_VAR:
115 assert(dstCount == 0);
116 srcCount = BuildStoreLoc(tree->AsLclVarCommon());
120 // These should always be contained. We don't correctly allocate or
121 // generate code for a non-contained GT_FIELD_LIST.
122 noway_assert(!"Non-contained GT_FIELD_LIST");
132 assert(dstCount == 0);
135 case GT_START_PREEMPTGC:
136 // This kills GC refs in callee save regs
138 assert(dstCount == 0);
139 BuildDefsWithKills(tree, 0, RBM_NONE, RBM_NONE);
144 GenTreeDblCon* dblConst = tree->AsDblCon();
145 double constValue = dblConst->gtDblCon.gtDconVal;
147 if (emitter::emitIns_valid_imm_for_fmov(constValue))
149 // Directly encode constant to instructions.
153 // Reserve int to load constant from memory (IF_LARGELDC)
154 buildInternalIntRegisterDefForNode(tree);
155 buildInternalRegisterUses();
163 assert(dstCount == 1);
164 RefPosition* def = BuildDef(tree);
165 def->getInterval()->isConstant = true;
174 assert(dstCount == 0);
179 srcCount = BuildReturn(tree);
183 assert(dstCount == 0);
184 if (tree->TypeGet() == TYP_VOID)
190 assert(tree->TypeGet() == TYP_INT);
192 BuildUse(tree->gtGetOp1(), RBM_INTRET);
197 // A GT_NOP is either a passthrough (if it is void, or if it has
198 // a child), but must be considered to produce a dummy value if it
199 // has a type but no child.
201 if (tree->TypeGet() != TYP_VOID && tree->gtGetOp1() == nullptr)
203 assert(dstCount == 1);
208 assert(dstCount == 0);
214 assert(dstCount == 0);
219 assert(dstCount == 0);
223 // This should never occur since switch nodes must not be visible at this
226 noway_assert(!"Switch must be lowered at this point");
231 assert(dstCount == 1);
235 case GT_SWITCH_TABLE:
236 buildInternalIntRegisterDefForNode(tree);
237 srcCount = BuildBinaryUses(tree->AsOp());
238 assert(dstCount == 0);
242 noway_assert(!"We should never hit any assignment operator in lowering");
248 if (varTypeIsFloating(tree->TypeGet()))
250 // overflow operations aren't supported on float/double types.
251 assert(!tree->gtOverflow());
253 // No implicit conversions at this stage as the expectation is that
254 // everything is made explicit by adding casts.
255 assert(tree->gtGetOp1()->TypeGet() == tree->gtGetOp2()->TypeGet());
267 srcCount = BuildBinaryUses(tree->AsOp());
268 assert(dstCount == 1);
273 // this just turns into a compare of its child with an int
274 // + a conditional call
275 BuildUse(tree->gtGetOp1());
277 assert(dstCount == 0);
278 killMask = compiler->compHelperCallKillSet(CORINFO_HELP_STOP_FOR_GC);
279 BuildDefsWithKills(tree, 0, RBM_NONE, killMask);
284 NYI_IF(varTypeIsFloating(tree->TypeGet()), "FP Remainder in ARM64");
285 assert(!"Shouldn't see an integer typed GT_MOD node in ARM64");
290 if (tree->gtOverflow())
292 // Need a register different from target reg to check for overflow.
293 buildInternalIntRegisterDefForNode(tree);
294 setInternalRegsDelayFree = true;
302 srcCount = BuildBinaryUses(tree->AsOp());
303 buildInternalRegisterUses();
304 assert(dstCount == 1);
311 noway_assert((tree->gtIntrinsic.gtIntrinsicId == CORINFO_INTRINSIC_Abs) ||
312 (tree->gtIntrinsic.gtIntrinsicId == CORINFO_INTRINSIC_Ceiling) ||
313 (tree->gtIntrinsic.gtIntrinsicId == CORINFO_INTRINSIC_Floor) ||
314 (tree->gtIntrinsic.gtIntrinsicId == CORINFO_INTRINSIC_Round) ||
315 (tree->gtIntrinsic.gtIntrinsicId == CORINFO_INTRINSIC_Sqrt));
317 // Both operand and its result must be of the same floating point type.
318 GenTree* op1 = tree->gtGetOp1();
319 assert(varTypeIsFloating(op1));
320 assert(op1->TypeGet() == tree->TypeGet());
324 assert(dstCount == 1);
331 srcCount = BuildSIMD(tree->AsSIMD());
333 #endif // FEATURE_SIMD
335 #ifdef FEATURE_HW_INTRINSICS
337 srcCount = BuildHWIntrinsic(tree->AsHWIntrinsic());
339 #endif // FEATURE_HW_INTRINSICS
342 assert(dstCount == 1);
343 srcCount = BuildCast(tree->AsCast());
348 BuildUse(tree->gtGetOp1());
350 assert(dstCount == 1);
363 srcCount = BuildCmp(tree);
368 assert(dstCount == 1);
369 buildInternalIntRegisterDefForNode(tree);
370 BuildUse(tree->gtGetOp1());
372 buildInternalRegisterUses();
377 GenTreeCmpXchg* cmpXchgNode = tree->AsCmpXchg();
378 srcCount = cmpXchgNode->gtOpComparand->isContained() ? 2 : 3;
379 assert(dstCount == 1);
381 if (!compiler->compSupports(InstructionSet_Atomics))
383 // For ARMv8 exclusives requires a single internal register
384 buildInternalIntRegisterDefForNode(tree);
387 // For ARMv8 exclusives the lifetime of the addr and data must be extended because
388 // it may be used used multiple during retries
390 // For ARMv8.1 atomic cas the lifetime of the addr and data must be extended to prevent
391 // them being reused as the target register which must be destroyed early
393 RefPosition* locationUse = BuildUse(tree->gtCmpXchg.gtOpLocation);
394 setDelayFree(locationUse);
395 RefPosition* valueUse = BuildUse(tree->gtCmpXchg.gtOpValue);
396 setDelayFree(valueUse);
397 if (!cmpXchgNode->gtOpComparand->isContained())
399 RefPosition* comparandUse = BuildUse(tree->gtCmpXchg.gtOpComparand);
401 // For ARMv8 exclusives the lifetime of the comparand must be extended because
402 // it may be used used multiple during retries
403 if (!compiler->compSupports(InstructionSet_Atomics))
405 setDelayFree(comparandUse);
409 // Internals may not collide with target
410 setInternalRegsDelayFree = true;
411 buildInternalRegisterUses();
420 assert(dstCount == (tree->TypeGet() == TYP_VOID) ? 0 : 1);
421 srcCount = tree->gtGetOp2()->isContained() ? 1 : 2;
423 if (!compiler->compSupports(InstructionSet_Atomics))
425 // GT_XCHG requires a single internal register; the others require two.
426 buildInternalIntRegisterDefForNode(tree);
427 if (tree->OperGet() != GT_XCHG)
429 buildInternalIntRegisterDefForNode(tree);
433 assert(!tree->gtGetOp1()->isContained());
434 RefPosition* op1Use = BuildUse(tree->gtGetOp1());
435 RefPosition* op2Use = nullptr;
436 if (!tree->gtGetOp2()->isContained())
438 op2Use = BuildUse(tree->gtGetOp2());
441 // For ARMv8 exclusives the lifetime of the addr and data must be extended because
442 // it may be used used multiple during retries
443 if (!compiler->compSupports(InstructionSet_Atomics))
445 // Internals may not collide with target
448 setDelayFree(op1Use);
449 if (op2Use != nullptr)
451 setDelayFree(op2Use);
453 setInternalRegsDelayFree = true;
455 buildInternalRegisterUses();
464 #if FEATURE_ARG_SPLIT
465 case GT_PUTARG_SPLIT:
466 srcCount = BuildPutArgSplit(tree->AsPutArgSplit());
467 dstCount = tree->AsPutArgSplit()->gtNumRegs;
469 #endif // FEATURE _SPLIT_ARG
472 srcCount = BuildPutArgStk(tree->AsPutArgStk());
476 srcCount = BuildPutArgReg(tree->AsUnOp());
480 srcCount = BuildCall(tree->AsCall());
481 if (tree->AsCall()->HasMultiRegRetVal())
483 dstCount = tree->AsCall()->GetReturnTypeDesc()->GetReturnRegCount();
489 // For a GT_ADDR, the child node should not be evaluated into a register
490 GenTree* child = tree->gtGetOp1();
491 assert(!isCandidateLocalRef(child));
492 assert(child->isContained());
493 assert(dstCount == 1);
501 // These should all be eliminated prior to Lowering.
502 assert(!"Non-store block node in Lowering");
508 case GT_STORE_DYN_BLK:
509 srcCount = BuildBlockStore(tree->AsBlk());
513 // Always a passthrough of its child's value.
514 assert(!"INIT_VAL should always be contained");
520 assert(dstCount == 1);
522 // Need a variable number of temp regs (see genLclHeap() in codegenamd64.cpp):
523 // Here '-' means don't care.
525 // Size? Init Memory? # temp regs
527 // const and <=6 ptr words - 0
528 // const and <PageSize No 0
529 // >6 ptr words Yes 0
534 GenTree* size = tree->gtGetOp1();
535 if (size->IsCnsIntOrI())
537 assert(size->isContained());
540 size_t sizeVal = size->gtIntCon.gtIconVal;
544 // Compute the amount of memory to properly STACK_ALIGN.
545 // Note: The Gentree node is not updated here as it is cheap to recompute stack aligned size.
546 // This should also help in debugging as we can examine the original size specified with
548 sizeVal = AlignUp(sizeVal, STACK_ALIGN);
549 size_t stpCount = sizeVal / (REGSIZE_BYTES * 2);
551 // For small allocations up to 4 'stp' instructions (i.e. 16 to 64 bytes of localloc)
555 // Need no internal registers
557 else if (!compiler->info.compInitMem)
559 // No need to initialize allocated stack space.
560 if (sizeVal < compiler->eeGetPageSize())
562 // Need no internal registers
566 // We need two registers: regCnt and RegTmp
567 buildInternalIntRegisterDefForNode(tree);
568 buildInternalIntRegisterDefForNode(tree);
576 if (!compiler->info.compInitMem)
578 buildInternalIntRegisterDefForNode(tree);
579 buildInternalIntRegisterDefForNode(tree);
583 if (!size->isContained())
587 buildInternalRegisterUses();
592 case GT_ARR_BOUNDS_CHECK:
595 #endif // FEATURE_SIMD
596 #ifdef FEATURE_HW_INTRINSICS
597 case GT_HW_INTRINSIC_CHK:
598 #endif // FEATURE_HW_INTRINSICS
600 GenTreeBoundsChk* node = tree->AsBoundsChk();
601 // Consumes arrLen & index - has no result
602 assert(dstCount == 0);
603 srcCount = BuildOperandUses(node->gtIndex);
604 srcCount += BuildOperandUses(node->gtArrLen);
609 // These must have been lowered to GT_ARR_INDEX
610 noway_assert(!"We should never see a GT_ARR_ELEM in lowering");
612 assert(dstCount == 0);
618 assert(dstCount == 1);
619 buildInternalIntRegisterDefForNode(tree);
620 setInternalRegsDelayFree = true;
622 // For GT_ARR_INDEX, the lifetime of the arrObj must be extended because it is actually used multiple
623 // times while the result is being computed.
624 RefPosition* arrObjUse = BuildUse(tree->AsArrIndex()->ArrObj());
625 setDelayFree(arrObjUse);
626 BuildUse(tree->AsArrIndex()->IndexExpr());
627 buildInternalRegisterUses();
633 // This consumes the offset, if any, the arrObj and the effective index,
634 // and produces the flattened offset for this dimension.
636 if (!tree->gtArrOffs.gtOffset->isContained())
638 BuildUse(tree->AsArrOffs()->gtOffset);
641 BuildUse(tree->AsArrOffs()->gtIndex);
642 BuildUse(tree->AsArrOffs()->gtArrObj);
643 assert(dstCount == 1);
644 buildInternalIntRegisterDefForNode(tree);
645 buildInternalRegisterUses();
651 GenTreeAddrMode* lea = tree->AsAddrMode();
653 GenTree* base = lea->Base();
654 GenTree* index = lea->Index();
655 int cns = lea->Offset();
657 // This LEA is instantiating an address, so we set up the srcCount here.
664 if (index != nullptr)
669 assert(dstCount == 1);
671 // On ARM64 we may need a single internal register
672 // (when both conditions are true then we still only need a single internal register)
673 if ((index != nullptr) && (cns != 0))
675 // ARM64 does not support both Index and offset so we need an internal register
676 buildInternalIntRegisterDefForNode(tree);
678 else if (!emitter::emitIns_valid_imm_for_add(cns, EA_8BYTE))
680 // This offset can't be contained in the add instruction, so we need an internal register
681 buildInternalIntRegisterDefForNode(tree);
683 buildInternalRegisterUses();
690 assert(dstCount == 0);
692 if (compiler->codeGen->gcInfo.gcIsWriteBarrierStoreIndNode(tree))
694 srcCount = BuildGCWriteBarrier(tree);
698 srcCount = BuildIndir(tree->AsIndir());
699 if (!tree->gtGetOp2()->isContained())
701 BuildUse(tree->gtGetOp2());
708 // Unlike ARM, ARM64 implements NULLCHECK as a load to REG_ZR, so no internal register
709 // is required, and it is not a localDefUse.
710 assert(dstCount == 0);
711 assert(!tree->gtGetOp1()->isContained());
712 BuildUse(tree->gtGetOp1());
717 assert(dstCount == 1);
718 srcCount = BuildIndir(tree->AsIndir());
723 assert(dstCount == 1);
724 BuildDef(tree, RBM_EXCEPTION_OBJECT);
729 // GT_CLS_VAR, by the time we reach the backend, must always
731 // It will produce a result of the type of the
732 // node, and use an internal register for the address.
734 assert(dstCount == 1);
735 assert((tree->gtFlags & (GTF_VAR_DEF | GTF_VAR_USEASG)) == 0);
736 buildInternalIntRegisterDefForNode(tree);
737 buildInternalRegisterUses();
742 assert(dstCount == 1);
743 srcCount = BuildBinaryUses(tree->AsOp());
744 buildInternalIntRegisterDefForNode(tree);
745 buildInternalRegisterUses();
749 } // end switch (tree->OperGet())
751 if (tree->IsUnusedValue() && (dstCount != 0))
753 isLocalDefUse = true;
755 // We need to be sure that we've set srcCount and dstCount appropriately
756 assert((dstCount < 2) || tree->IsMultiRegCall());
757 assert(isLocalDefUse == (tree->IsValue() && tree->IsUnusedValue()));
758 assert(!tree->IsUnusedValue() || (dstCount != 0));
759 assert(dstCount == tree->GetRegisterDstCount());
760 INDEBUG(dumpNodeInfo(tree, dstCandidates, srcCount, dstCount));
765 //------------------------------------------------------------------------
766 // BuildSIMD: Set the NodeInfo for a GT_SIMD tree.
769 // tree - The GT_SIMD node of interest
772 // The number of sources consumed by this node.
774 int LinearScan::BuildSIMD(GenTreeSIMD* simdTree)
777 // Only SIMDIntrinsicInit can be contained
778 if (simdTree->isContained())
780 assert(simdTree->gtSIMDIntrinsicID == SIMDIntrinsicInit);
782 int dstCount = simdTree->IsValue() ? 1 : 0;
783 assert(dstCount == 1);
785 bool buildUses = true;
787 GenTree* op1 = simdTree->gtGetOp1();
788 GenTree* op2 = simdTree->gtGetOp2();
790 switch (simdTree->gtSIMDIntrinsicID)
792 case SIMDIntrinsicInit:
793 case SIMDIntrinsicCast:
794 case SIMDIntrinsicSqrt:
795 case SIMDIntrinsicAbs:
796 case SIMDIntrinsicConvertToSingle:
797 case SIMDIntrinsicConvertToInt32:
798 case SIMDIntrinsicConvertToDouble:
799 case SIMDIntrinsicConvertToInt64:
800 case SIMDIntrinsicWidenLo:
801 case SIMDIntrinsicWidenHi:
802 // No special handling required.
805 case SIMDIntrinsicGetItem:
807 op1 = simdTree->gtGetOp1();
808 op2 = simdTree->gtGetOp2();
810 // We have an object and an index, either of which may be contained.
811 bool setOp2DelayFree = false;
812 if (!op2->IsCnsIntOrI() && (!op1->isContained() || op1->OperIsLocal()))
814 // If the index is not a constant and the object is not contained or is a local
815 // we will need a general purpose register to calculate the address
816 // internal register must not clobber input index
817 // TODO-Cleanup: An internal register will never clobber a source; this code actually
818 // ensures that the index (op2) doesn't interfere with the target.
819 buildInternalIntRegisterDefForNode(simdTree);
820 setOp2DelayFree = true;
822 srcCount += BuildOperandUses(op1);
823 if (!op2->isContained())
825 RefPosition* op2Use = BuildUse(op2);
828 setDelayFree(op2Use);
833 if (!op2->IsCnsIntOrI() && (!op1->isContained()))
835 // If vector is not already in memory (contained) and the index is not a constant,
836 // we will use the SIMD temp location to store the vector.
837 compiler->getSIMDInitTempVarNum();
843 case SIMDIntrinsicAdd:
844 case SIMDIntrinsicSub:
845 case SIMDIntrinsicMul:
846 case SIMDIntrinsicDiv:
847 case SIMDIntrinsicBitwiseAnd:
848 case SIMDIntrinsicBitwiseAndNot:
849 case SIMDIntrinsicBitwiseOr:
850 case SIMDIntrinsicBitwiseXor:
851 case SIMDIntrinsicMin:
852 case SIMDIntrinsicMax:
853 case SIMDIntrinsicEqual:
854 case SIMDIntrinsicLessThan:
855 case SIMDIntrinsicGreaterThan:
856 case SIMDIntrinsicLessThanOrEqual:
857 case SIMDIntrinsicGreaterThanOrEqual:
858 // No special handling required.
861 case SIMDIntrinsicSetX:
862 case SIMDIntrinsicSetY:
863 case SIMDIntrinsicSetZ:
864 case SIMDIntrinsicSetW:
865 case SIMDIntrinsicNarrow:
867 // Op1 will write to dst before Op2 is free
869 RefPosition* op2Use = BuildUse(op2);
870 setDelayFree(op2Use);
876 case SIMDIntrinsicInitN:
878 var_types baseType = simdTree->gtSIMDBaseType;
879 srcCount = (short)(simdTree->gtSIMDSize / genTypeSize(baseType));
880 if (varTypeIsFloating(simdTree->gtSIMDBaseType))
882 // Need an internal register to stitch together all the values into a single vector in a SIMD reg.
883 buildInternalFloatRegisterDefForNode(simdTree);
887 for (GenTree* list = op1; list != nullptr; list = list->gtGetOp2())
889 assert(list->OperGet() == GT_LIST);
890 GenTree* listItem = list->gtGetOp1();
891 assert(listItem->TypeGet() == baseType);
892 assert(!listItem->isContained());
896 assert(initCount == srcCount);
902 case SIMDIntrinsicInitArray:
903 // We have an array and an index, which may be contained.
906 case SIMDIntrinsicOpEquality:
907 case SIMDIntrinsicOpInEquality:
908 buildInternalFloatRegisterDefForNode(simdTree);
911 case SIMDIntrinsicDotProduct:
912 buildInternalFloatRegisterDefForNode(simdTree);
915 case SIMDIntrinsicSelect:
916 // TODO-ARM64-CQ Allow lowering to see SIMDIntrinsicSelect so we can generate BSL VC, VA, VB
917 // bsl target register must be VC. Reserve a temp in case we need to shuffle things.
918 // This will require a different approach, as GenTreeSIMD has only two operands.
919 assert(!"SIMDIntrinsicSelect not yet supported");
920 buildInternalFloatRegisterDefForNode(simdTree);
923 case SIMDIntrinsicInitArrayX:
924 case SIMDIntrinsicInitFixed:
925 case SIMDIntrinsicCopyToArray:
926 case SIMDIntrinsicCopyToArrayX:
927 case SIMDIntrinsicNone:
928 case SIMDIntrinsicGetCount:
929 case SIMDIntrinsicGetOne:
930 case SIMDIntrinsicGetZero:
931 case SIMDIntrinsicGetAllOnes:
932 case SIMDIntrinsicGetX:
933 case SIMDIntrinsicGetY:
934 case SIMDIntrinsicGetZ:
935 case SIMDIntrinsicGetW:
936 case SIMDIntrinsicInstEquals:
937 case SIMDIntrinsicHWAccel:
938 case SIMDIntrinsicWiden:
939 case SIMDIntrinsicInvalid:
940 assert(!"These intrinsics should not be seen during register allocation");
944 noway_assert(!"Unimplemented SIMD node type.");
949 assert(!op1->OperIs(GT_LIST));
950 assert(srcCount == 0);
951 srcCount = BuildOperandUses(op1);
952 if ((op2 != nullptr) && !op2->isContained())
954 srcCount += BuildOperandUses(op2);
957 assert(internalCount <= MaxInternalCount);
958 buildInternalRegisterUses();
965 assert(dstCount == 0);
969 #endif // FEATURE_SIMD
971 #ifdef FEATURE_HW_INTRINSICS
972 #include "hwintrinsic.h"
973 //------------------------------------------------------------------------
974 // BuildHWIntrinsic: Set the NodeInfo for a GT_HWIntrinsic tree.
977 // tree - The GT_HWIntrinsic node of interest
980 // The number of sources consumed by this node.
982 int LinearScan::BuildHWIntrinsic(GenTreeHWIntrinsic* intrinsicTree)
984 NamedIntrinsic intrinsicID = intrinsicTree->gtHWIntrinsicId;
985 int numArgs = HWIntrinsicInfo::lookupNumArgs(intrinsicTree);
987 GenTree* op1 = intrinsicTree->gtGetOp1();
988 GenTree* op2 = intrinsicTree->gtGetOp2();
989 GenTree* op3 = nullptr;
992 if ((op1 != nullptr) && op1->OperIsList())
994 // op2 must be null, and there must be at least two more arguments.
995 assert(op2 == nullptr);
996 noway_assert(op1->AsArgList()->Rest() != nullptr);
997 noway_assert(op1->AsArgList()->Rest()->Rest() != nullptr);
998 assert(op1->AsArgList()->Rest()->Rest()->Rest() == nullptr);
999 op2 = op1->AsArgList()->Rest()->Current();
1000 op3 = op1->AsArgList()->Rest()->Rest()->Current();
1001 op1 = op1->AsArgList()->Current();
1004 bool op2IsDelayFree = false;
1005 bool op3IsDelayFree = false;
1007 // Create internal temps, and handle any other special requirements.
1008 switch (HWIntrinsicInfo::lookup(intrinsicID).form)
1010 case HWIntrinsicInfo::Sha1HashOp:
1011 assert((numArgs == 3) && (op2 != nullptr) && (op3 != nullptr));
1012 if (!op2->isContained())
1014 assert(!op3->isContained());
1015 op2IsDelayFree = true;
1016 op3IsDelayFree = true;
1017 setInternalRegsDelayFree = true;
1019 buildInternalFloatRegisterDefForNode(intrinsicTree);
1021 case HWIntrinsicInfo::SimdTernaryRMWOp:
1022 assert((numArgs == 3) && (op2 != nullptr) && (op3 != nullptr));
1023 if (!op2->isContained())
1025 assert(!op3->isContained());
1026 op2IsDelayFree = true;
1027 op3IsDelayFree = true;
1030 case HWIntrinsicInfo::Sha1RotateOp:
1031 buildInternalFloatRegisterDefForNode(intrinsicTree);
1034 case HWIntrinsicInfo::SimdExtractOp:
1035 case HWIntrinsicInfo::SimdInsertOp:
1036 if (!op2->isContained())
1038 // We need a temp to create a switch table
1039 buildInternalIntRegisterDefForNode(intrinsicTree);
1051 assert(!op2IsDelayFree && !op3IsDelayFree);
1052 assert(op1->OperIs(GT_LIST));
1054 for (GenTreeArgList* list = op1->AsArgList(); list != nullptr; list = list->Rest())
1056 srcCount += BuildOperandUses(list->Current());
1059 assert(srcCount == numArgs);
1065 srcCount += BuildOperandUses(op1);
1068 srcCount += (op2IsDelayFree) ? BuildDelayFreeUses(op2) : BuildOperandUses(op2);
1071 srcCount += (op3IsDelayFree) ? BuildDelayFreeUses(op3) : BuildOperandUses(op3);
1076 buildInternalRegisterUses();
1079 if (intrinsicTree->IsValue())
1081 BuildDef(intrinsicTree);
1088 #endif // _TARGET_ARM64_