1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
5 /*XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
6 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
8 XX Register Requirements for ARM64 XX
10 XX This encapsulates all the logic for setting register requirements for XX
11 XX the ARM64 architecture. XX
14 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
15 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
23 #ifndef LEGACY_BACKEND // This file is ONLY used for the RyuJIT backend that uses the linear scan register allocator
28 #include "sideeffects.h"
31 //------------------------------------------------------------------------
32 // TreeNodeInfoInit: Set the register requirements for RA.
35 // Takes care of annotating the register requirements
36 // for every TreeNodeInfo struct that maps to each tree node.
39 // LSRA has been initialized and there is a TreeNodeInfo node
40 // already allocated and initialized for every tree in the IR.
43 // Every TreeNodeInfo instance has the right annotations on register
44 // requirements needed by LSRA to build the Interval Table (source,
45 // destination and internal [temp] register counts).
47 void LinearScan::TreeNodeInfoInit(GenTree* tree, TreeNodeInfo* info)
49 unsigned kind = tree->OperKind();
50 RegisterType registerType = TypeGet(tree);
52 if (tree->isContained())
55 assert(info->srcCount == 0);
59 // Set the default dstCount. This may be modified below.
63 if (tree->IsUnusedValue())
65 info->isLocalDefUse = true;
73 switch (tree->OperGet())
79 if (kind & (GTK_CONST | GTK_LEAF))
83 else if (kind & (GTK_SMPOP))
85 info->srcCount = appendBinaryLocationInfoToList(tree->AsOp());
93 case GT_STORE_LCL_FLD:
94 case GT_STORE_LCL_VAR:
96 assert(info->dstCount == 0);
97 TreeNodeInfoInitStoreLoc(tree->AsLclVarCommon(), info);
107 assert(info->dstCount == 0);
112 assert(info->dstCount == 1);
114 GenTreeDblCon* dblConst = tree->AsDblCon();
115 double constValue = dblConst->gtDblCon.gtDconVal;
117 if (emitter::emitIns_valid_imm_for_fmov(constValue))
119 // Directly encode constant to instructions.
123 // Reserve int to load constant from memory (IF_LARGELDC)
124 info->internalIntCount = 1;
134 assert(info->dstCount == 0);
139 TreeNodeInfoInitReturn(tree, info);
143 if (tree->TypeGet() == TYP_VOID)
146 assert(info->dstCount == 0);
150 assert(tree->TypeGet() == TYP_INT);
153 assert(info->dstCount == 0);
155 info->setSrcCandidates(this, RBM_INTRET);
156 LocationInfoListNode* locationInfo = getLocationInfo(tree->gtOp.gtOp1);
157 locationInfo->info.setSrcCandidates(this, RBM_INTRET);
158 useList.Append(locationInfo);
163 // A GT_NOP is either a passthrough (if it is void, or if it has
164 // a child), but must be considered to produce a dummy value if it
165 // has a type but no child
167 if (tree->TypeGet() != TYP_VOID && tree->gtOp.gtOp1 == nullptr)
169 assert(info->dstCount == 1);
173 assert(info->dstCount == 0);
179 assert(info->dstCount == 0);
184 assert(info->dstCount == 0);
188 // This should never occur since switch nodes must not be visible at this
191 noway_assert(!"Switch must be lowered at this point");
196 assert(info->dstCount == 1);
199 case GT_SWITCH_TABLE:
200 info->srcCount = appendBinaryLocationInfoToList(tree->AsOp());
201 info->internalIntCount = 1;
202 assert(info->dstCount == 0);
206 noway_assert(!"We should never hit any assignment operator in lowering");
212 if (varTypeIsFloating(tree->TypeGet()))
214 // overflow operations aren't supported on float/double types.
215 assert(!tree->gtOverflow());
217 // No implicit conversions at this stage as the expectation is that
218 // everything is made explicit by adding casts.
219 assert(tree->gtOp.gtOp1->TypeGet() == tree->gtOp.gtOp2->TypeGet());
227 info->srcCount = appendBinaryLocationInfoToList(tree->AsOp());
228 assert(info->dstCount == 1);
232 // this just turns into a compare of its child with an int
233 // + a conditional call
234 appendLocationInfoToList(tree->gtGetOp1());
236 assert(info->dstCount == 0);
241 NYI_IF(varTypeIsFloating(tree->TypeGet()), "FP Remainder in ARM64");
242 assert(!"Shouldn't see an integer typed GT_MOD node in ARM64");
246 if (tree->gtOverflow())
248 // Need a register different from target reg to check for overflow.
249 info->internalIntCount = 1;
250 info->isInternalRegDelayFree = true;
258 info->srcCount = appendBinaryLocationInfoToList(tree->AsOp());
259 assert(info->dstCount == 1);
265 noway_assert((tree->gtIntrinsic.gtIntrinsicId == CORINFO_INTRINSIC_Abs) ||
266 (tree->gtIntrinsic.gtIntrinsicId == CORINFO_INTRINSIC_Ceiling) ||
267 (tree->gtIntrinsic.gtIntrinsicId == CORINFO_INTRINSIC_Floor) ||
268 (tree->gtIntrinsic.gtIntrinsicId == CORINFO_INTRINSIC_Round) ||
269 (tree->gtIntrinsic.gtIntrinsicId == CORINFO_INTRINSIC_Sqrt));
271 // Both operand and its result must be of the same floating point type.
272 op1 = tree->gtOp.gtOp1;
273 assert(varTypeIsFloating(op1));
274 assert(op1->TypeGet() == tree->TypeGet());
276 appendLocationInfoToList(op1);
278 assert(info->dstCount == 1);
284 TreeNodeInfoInitSIMD(tree->AsSIMD(), info);
286 #endif // FEATURE_SIMD
288 #ifdef FEATURE_HW_INTRINSICS
290 TreeNodeInfoInitHWIntrinsic(tree->AsHWIntrinsic(), info);
292 #endif // FEATURE_HW_INTRINSICS
296 // TODO-ARM64-CQ: Int-To-Int conversions - castOp cannot be a memory op and must have an assigned
298 // see CodeGen::genIntToIntCast()
300 appendLocationInfoToList(tree->gtGetOp1());
302 assert(info->dstCount == 1);
304 // Non-overflow casts to/from float/double are done using SSE2 instructions
305 // and that allow the source operand to be either a reg or memop. Given the
306 // fact that casts from small int to float/double are done as two-level casts,
307 // the source operand is always guaranteed to be of size 4 or 8 bytes.
308 var_types castToType = tree->CastToType();
309 GenTree* castOp = tree->gtCast.CastOp();
310 var_types castOpType = castOp->TypeGet();
311 if (tree->gtFlags & GTF_UNSIGNED)
313 castOpType = genUnsignedType(castOpType);
316 // Some overflow checks need a temp reg
318 Lowering::CastInfo castInfo;
319 // Get information about the cast.
320 Lowering::getCastDescription(tree, &castInfo);
322 if (castInfo.requiresOverflowCheck)
324 var_types srcType = castOp->TypeGet();
325 emitAttr cmpSize = EA_ATTR(genTypeSize(srcType));
327 // If we cannot store the comparisons in an immediate for either
328 // comparing against the max or min value, then we will need to
329 // reserve a temporary register.
331 bool canStoreMaxValue = emitter::emitIns_valid_imm_for_cmp(castInfo.typeMax, cmpSize);
332 bool canStoreMinValue = emitter::emitIns_valid_imm_for_cmp(castInfo.typeMin, cmpSize);
334 if (!canStoreMaxValue || !canStoreMinValue)
336 info->internalIntCount = 1;
344 appendLocationInfoToList(tree->gtGetOp1());
346 assert(info->dstCount == 1);
353 TreeNodeInfoInitShiftRotate(tree, info);
365 TreeNodeInfoInitCmp(tree, info);
369 appendLocationInfoToList(tree->gtOp.gtOp1);
371 assert(info->dstCount == 1);
372 info->internalIntCount = 1;
377 GenTreeCmpXchg* cmpXchgNode = tree->AsCmpXchg();
378 info->srcCount = cmpXchgNode->gtOpComparand->isContained() ? 2 : 3;
379 assert(info->dstCount == 1);
381 info->internalIntCount = 1;
383 // For ARMv8 exclusives the lifetime of the addr and data must be extended because
384 // it may be used used multiple during retries
385 LocationInfoListNode* locationInfo = getLocationInfo(tree->gtCmpXchg.gtOpLocation);
386 locationInfo->info.isDelayFree = true;
387 useList.Append(locationInfo);
388 LocationInfoListNode* valueInfo = getLocationInfo(tree->gtCmpXchg.gtOpValue);
389 valueInfo->info.isDelayFree = true;
390 useList.Append(valueInfo);
391 if (!cmpXchgNode->gtOpComparand->isContained())
393 LocationInfoListNode* comparandInfo = getLocationInfo(tree->gtCmpXchg.gtOpComparand);
394 comparandInfo->info.isDelayFree = true;
395 useList.Append(comparandInfo);
397 info->hasDelayFreeSrc = true;
399 // Internals may not collide with target
400 info->isInternalRegDelayFree = true;
408 assert(info->dstCount == (tree->TypeGet() == TYP_VOID) ? 0 : 1);
409 info->srcCount = tree->gtOp.gtOp2->isContained() ? 1 : 2;
410 info->internalIntCount = (tree->OperGet() == GT_XCHG) ? 1 : 2;
412 // For ARMv8 exclusives the lifetime of the addr and data must be extended because
413 // it may be used used multiple during retries
414 assert(!tree->gtOp.gtOp1->isContained());
415 LocationInfoListNode* op1Info = getLocationInfo(tree->gtOp.gtOp1);
416 op1Info->info.isDelayFree = true;
417 useList.Append(op1Info);
418 if (!tree->gtOp.gtOp2->isContained())
420 LocationInfoListNode* op2Info = getLocationInfo(tree->gtOp.gtOp2);
421 op2Info->info.isDelayFree = true;
422 useList.Append(op2Info);
424 info->hasDelayFreeSrc = true;
426 // Internals may not collide with target
427 info->isInternalRegDelayFree = true;
432 TreeNodeInfoInitPutArgStk(tree->AsPutArgStk(), info);
436 TreeNodeInfoInitPutArgReg(tree->AsUnOp(), info);
440 TreeNodeInfoInitCall(tree->AsCall(), info);
445 // For a GT_ADDR, the child node should not be evaluated into a register
446 GenTree* child = tree->gtOp.gtOp1;
447 assert(!isCandidateLocalRef(child));
448 assert(child->isContained());
449 assert(info->dstCount == 1);
456 // These should all be eliminated prior to Lowering.
457 assert(!"Non-store block node in Lowering");
463 case GT_STORE_DYN_BLK:
464 TreeNodeInfoInitBlockStore(tree->AsBlk(), info);
468 // Always a passthrough of its child's value.
469 assert(!"INIT_VAL should always be contained");
474 assert(info->dstCount == 1);
476 // Need a variable number of temp regs (see genLclHeap() in codegenamd64.cpp):
477 // Here '-' means don't care.
479 // Size? Init Memory? # temp regs
481 // const and <=6 ptr words - 0
482 // const and <PageSize No 0
483 // >6 ptr words Yes hasPspSym ? 1 : 0
484 // Non-const Yes hasPspSym ? 1 : 0
487 // PSPSym - If the method has PSPSym increment internalIntCount by 1.
490 #if FEATURE_EH_FUNCLETS
491 hasPspSym = (compiler->lvaPSPSym != BAD_VAR_NUM);
496 GenTree* size = tree->gtOp.gtOp1;
497 if (size->IsCnsIntOrI())
499 assert(size->isContained());
502 size_t sizeVal = size->gtIntCon.gtIconVal;
506 info->internalIntCount = 0;
510 // Compute the amount of memory to properly STACK_ALIGN.
511 // Note: The Gentree node is not updated here as it is cheap to recompute stack aligned size.
512 // This should also help in debugging as we can examine the original size specified with
514 sizeVal = AlignUp(sizeVal, STACK_ALIGN);
515 size_t cntStackAlignedWidthItems = (sizeVal >> STACK_ALIGN_SHIFT);
517 // For small allocations upto 4 'stp' instructions (i.e. 64 bytes of localloc)
519 if (cntStackAlignedWidthItems <= 4)
521 info->internalIntCount = 0;
523 else if (!compiler->info.compInitMem)
525 // No need to initialize allocated stack space.
526 if (sizeVal < compiler->eeGetPageSize())
528 info->internalIntCount = 0;
532 // We need two registers: regCnt and RegTmp
533 info->internalIntCount = 2;
538 // greater than 4 and need to zero initialize allocated stack space.
539 // If the method has PSPSym, we need an internal register to hold regCnt
540 // since targetReg allocated to GT_LCLHEAP node could be the same as one of
541 // the the internal registers.
542 info->internalIntCount = hasPspSym ? 1 : 0;
548 appendLocationInfoToList(size);
550 if (!compiler->info.compInitMem)
552 info->internalIntCount = 2;
556 // If the method has PSPSym, we need an internal register to hold regCnt
557 // since targetReg allocated to GT_LCLHEAP node could be the same as one of
558 // the the internal registers.
559 info->internalIntCount = hasPspSym ? 1 : 0;
563 // If the method has PSPSym, we would need an addtional register to relocate it on stack.
566 // Exclude const size 0
567 if (!size->IsCnsIntOrI() || (size->gtIntCon.gtIconVal > 0))
568 info->internalIntCount++;
573 case GT_ARR_BOUNDS_CHECK:
576 #endif // FEATURE_SIMD
578 GenTreeBoundsChk* node = tree->AsBoundsChk();
579 // Consumes arrLen & index - has no result
580 assert(info->dstCount == 0);
582 GenTree* intCns = nullptr;
583 GenTree* other = nullptr;
584 info->srcCount = GetOperandInfo(tree->AsBoundsChk()->gtIndex);
585 info->srcCount += GetOperandInfo(tree->AsBoundsChk()->gtArrLen);
590 // These must have been lowered to GT_ARR_INDEX
591 noway_assert(!"We should never see a GT_ARR_ELEM in lowering");
593 assert(info->dstCount == 0);
599 assert(info->dstCount == 1);
600 info->internalIntCount = 1;
601 info->isInternalRegDelayFree = true;
603 // For GT_ARR_INDEX, the lifetime of the arrObj must be extended because it is actually used multiple
604 // times while the result is being computed.
605 LocationInfoListNode* arrObjInfo = getLocationInfo(tree->AsArrIndex()->ArrObj());
606 arrObjInfo->info.isDelayFree = true;
607 useList.Append(arrObjInfo);
608 useList.Append(getLocationInfo(tree->AsArrIndex()->IndexExpr()));
609 info->hasDelayFreeSrc = true;
614 // This consumes the offset, if any, the arrObj and the effective index,
615 // and produces the flattened offset for this dimension.
617 if (!tree->gtArrOffs.gtOffset->isContained())
619 appendLocationInfoToList(tree->AsArrOffs()->gtOffset);
622 appendLocationInfoToList(tree->AsArrOffs()->gtIndex);
623 appendLocationInfoToList(tree->AsArrOffs()->gtArrObj);
624 assert(info->dstCount == 1);
625 info->internalIntCount = 1;
630 GenTreeAddrMode* lea = tree->AsAddrMode();
632 GenTree* base = lea->Base();
633 GenTree* index = lea->Index();
634 int cns = lea->Offset();
636 // This LEA is instantiating an address, so we set up the srcCount here.
641 appendLocationInfoToList(base);
643 if (index != nullptr)
646 appendLocationInfoToList(index);
648 assert(info->dstCount == 1);
650 // On ARM64 we may need a single internal register
651 // (when both conditions are true then we still only need a single internal register)
652 if ((index != nullptr) && (cns != 0))
654 // ARM64 does not support both Index and offset so we need an internal register
655 info->internalIntCount = 1;
657 else if (!emitter::emitIns_valid_imm_for_add(cns, EA_8BYTE))
659 // This offset can't be contained in the add instruction, so we need an internal register
660 info->internalIntCount = 1;
667 assert(info->dstCount == 0);
669 if (compiler->codeGen->gcInfo.gcIsWriteBarrierAsgNode(tree))
672 TreeNodeInfoInitGCWriteBarrier(tree, info);
676 TreeNodeInfoInitIndir(tree->AsIndir(), info);
677 if (!tree->gtGetOp2()->isContained())
679 appendLocationInfoToList(tree->gtGetOp2());
686 // Unlike ARM, ARM64 implements NULLCHECK as a load to REG_ZR, so no internal register
687 // is required, and it is not a localDefUse.
688 assert(info->dstCount == 0);
689 assert(!tree->gtGetOp1()->isContained());
690 appendLocationInfoToList(tree->gtOp.gtOp1);
695 assert(info->dstCount == 1);
696 TreeNodeInfoInitIndir(tree->AsIndir(), info);
701 assert(info->dstCount == 1);
702 info->setDstCandidates(this, RBM_EXCEPTION_OBJECT);
707 // GT_CLS_VAR, by the time we reach the backend, must always
709 // It will produce a result of the type of the
710 // node, and use an internal register for the address.
712 assert(info->dstCount == 1);
713 assert((tree->gtFlags & (GTF_VAR_DEF | GTF_VAR_USEASG)) == 0);
714 info->internalIntCount = 1;
718 assert(info->dstCount == 1);
719 info->srcCount = appendBinaryLocationInfoToList(tree->AsOp());
720 info->internalIntCount = 1;
722 } // end switch (tree->OperGet())
724 if (tree->IsUnusedValue() && (info->dstCount != 0))
726 info->isLocalDefUse = true;
728 // We need to be sure that we've set info->srcCount and info->dstCount appropriately
729 assert((info->dstCount < 2) || tree->IsMultiRegCall());
730 assert(info->isLocalDefUse == (tree->IsValue() && tree->IsUnusedValue()));
731 assert(!tree->IsUnusedValue() || (info->dstCount != 0));
732 assert(info->dstCount == tree->GetRegisterDstCount());
735 //------------------------------------------------------------------------
736 // TreeNodeInfoInitReturn: Set the NodeInfo for a GT_RETURN.
739 // tree - The node of interest
744 void LinearScan::TreeNodeInfoInitReturn(GenTree* tree, TreeNodeInfo* info)
746 GenTree* op1 = tree->gtGetOp1();
747 regMaskTP useCandidates = RBM_NONE;
749 info->srcCount = ((tree->TypeGet() == TYP_VOID) || op1->isContained()) ? 0 : 1;
750 assert(info->dstCount == 0);
752 if ((tree->TypeGet() != TYP_VOID) && !op1->isContained())
754 if (varTypeIsStruct(tree))
756 // op1 has to be either an lclvar or a multi-reg returning call
757 if (op1->OperGet() != GT_LCL_VAR)
759 noway_assert(op1->IsMultiRegCall());
761 ReturnTypeDesc* retTypeDesc = op1->AsCall()->GetReturnTypeDesc();
762 info->srcCount = retTypeDesc->GetReturnRegCount();
763 useCandidates = retTypeDesc->GetABIReturnRegs();
768 // Non-struct type return - determine useCandidates
769 switch (tree->TypeGet())
772 useCandidates = RBM_NONE;
775 useCandidates = RBM_FLOATRET;
778 useCandidates = RBM_DOUBLERET;
781 useCandidates = RBM_LNGRET;
784 useCandidates = RBM_INTRET;
789 LocationInfoListNode* locationInfo = getLocationInfo(op1);
790 if (useCandidates != RBM_NONE)
792 locationInfo->info.setSrcCandidates(this, useCandidates);
794 useList.Append(locationInfo);
799 //------------------------------------------------------------------------
800 // TreeNodeInfoInitSIMD: Set the NodeInfo for a GT_SIMD tree.
803 // tree - The GT_SIMD node of interest
808 void LinearScan::TreeNodeInfoInitSIMD(GenTreeSIMD* simdTree, TreeNodeInfo* info)
810 // Only SIMDIntrinsicInit can be contained
811 if (simdTree->isContained())
813 assert(simdTree->gtSIMDIntrinsicID == SIMDIntrinsicInit);
815 assert(info->dstCount == 1);
817 GenTree* op1 = simdTree->gtOp.gtOp1;
818 GenTree* op2 = simdTree->gtOp.gtOp2;
819 if (!op1->OperIs(GT_LIST))
821 info->srcCount += GetOperandInfo(op1);
823 if ((op2 != nullptr) && !op2->isContained())
825 info->srcCount += GetOperandInfo(op2);
828 switch (simdTree->gtSIMDIntrinsicID)
830 case SIMDIntrinsicInit:
831 assert(info->srcCount == (simdTree->gtGetOp1()->isContained() ? 0 : 1));
834 case SIMDIntrinsicCast:
835 case SIMDIntrinsicSqrt:
836 case SIMDIntrinsicAbs:
837 case SIMDIntrinsicConvertToSingle:
838 case SIMDIntrinsicConvertToInt32:
839 case SIMDIntrinsicConvertToDouble:
840 case SIMDIntrinsicConvertToInt64:
841 case SIMDIntrinsicWidenLo:
842 case SIMDIntrinsicWidenHi:
843 assert(info->srcCount == 1);
846 case SIMDIntrinsicGetItem:
848 op1 = simdTree->gtGetOp1();
849 op2 = simdTree->gtGetOp2();
851 // We have an object and an index, either of which may be contained.
852 if (!op2->IsCnsIntOrI() && (!op1->isContained() || op1->OperIsLocal()))
854 // If the index is not a constant and not contained or is a local
855 // we will need a general purpose register to calculate the address
856 info->internalIntCount = 1;
858 // internal register must not clobber input index
859 LocationInfoListNode* op2Info =
860 (op1->isContained()) ? useList.Begin() : useList.GetSecond(INDEBUG(op2));
861 op2Info->info.isDelayFree = true;
862 info->hasDelayFreeSrc = true;
865 if (!op2->IsCnsIntOrI() && (!op1->isContained()))
867 // If vector is not already in memory (contained) and the index is not a constant,
868 // we will use the SIMD temp location to store the vector.
869 compiler->getSIMDInitTempVarNum();
874 case SIMDIntrinsicAdd:
875 case SIMDIntrinsicSub:
876 case SIMDIntrinsicMul:
877 case SIMDIntrinsicDiv:
878 case SIMDIntrinsicBitwiseAnd:
879 case SIMDIntrinsicBitwiseAndNot:
880 case SIMDIntrinsicBitwiseOr:
881 case SIMDIntrinsicBitwiseXor:
882 case SIMDIntrinsicMin:
883 case SIMDIntrinsicMax:
884 case SIMDIntrinsicEqual:
885 case SIMDIntrinsicLessThan:
886 case SIMDIntrinsicGreaterThan:
887 case SIMDIntrinsicLessThanOrEqual:
888 case SIMDIntrinsicGreaterThanOrEqual:
889 assert(info->srcCount == 2);
892 case SIMDIntrinsicSetX:
893 case SIMDIntrinsicSetY:
894 case SIMDIntrinsicSetZ:
895 case SIMDIntrinsicSetW:
896 case SIMDIntrinsicNarrow:
897 assert(info->srcCount == 2);
899 // Op1 will write to dst before Op2 is free
900 useList.GetSecond(INDEBUG(simdTree->gtGetOp2()))->info.isDelayFree = true;
901 info->hasDelayFreeSrc = true;
904 case SIMDIntrinsicInitN:
906 var_types baseType = simdTree->gtSIMDBaseType;
907 info->srcCount = (short)(simdTree->gtSIMDSize / genTypeSize(baseType));
909 for (GenTree* list = op1; list != nullptr; list = list->gtGetOp2())
911 assert(list->OperGet() == GT_LIST);
912 GenTree* listItem = list->gtGetOp1();
913 assert(listItem->TypeGet() == baseType);
914 assert(!listItem->isContained());
915 appendLocationInfoToList(listItem);
918 assert(initCount == info->srcCount);
920 if (varTypeIsFloating(simdTree->gtSIMDBaseType))
922 // Need an internal register to stitch together all the values into a single vector in a SIMD reg.
923 info->setInternalCandidates(this, RBM_ALLFLOAT);
924 info->internalFloatCount = 1;
929 case SIMDIntrinsicInitArray:
930 // We have an array and an index, which may be contained.
931 assert(info->srcCount == (simdTree->gtGetOp2()->isContained() ? 1 : 2));
934 case SIMDIntrinsicOpEquality:
935 case SIMDIntrinsicOpInEquality:
936 assert(info->srcCount == (simdTree->gtGetOp2()->isContained() ? 1 : 2));
937 info->setInternalCandidates(this, RBM_ALLFLOAT);
938 info->internalFloatCount = 1;
941 case SIMDIntrinsicDotProduct:
942 assert(info->srcCount == 2);
943 info->setInternalCandidates(this, RBM_ALLFLOAT);
944 info->internalFloatCount = 1;
947 case SIMDIntrinsicSelect:
948 // TODO-ARM64-CQ Allow lowering to see SIMDIntrinsicSelect so we can generate BSL VC, VA, VB
949 // bsl target register must be VC. Reserve a temp in case we need to shuffle things.
950 // This will require a different approach, as GenTreeSIMD has only two operands.
951 assert(!"SIMDIntrinsicSelect not yet supported");
952 assert(info->srcCount == 3);
953 info->setInternalCandidates(this, RBM_ALLFLOAT);
954 info->internalFloatCount = 1;
957 case SIMDIntrinsicInitArrayX:
958 case SIMDIntrinsicInitFixed:
959 case SIMDIntrinsicCopyToArray:
960 case SIMDIntrinsicCopyToArrayX:
961 case SIMDIntrinsicNone:
962 case SIMDIntrinsicGetCount:
963 case SIMDIntrinsicGetOne:
964 case SIMDIntrinsicGetZero:
965 case SIMDIntrinsicGetAllOnes:
966 case SIMDIntrinsicGetX:
967 case SIMDIntrinsicGetY:
968 case SIMDIntrinsicGetZ:
969 case SIMDIntrinsicGetW:
970 case SIMDIntrinsicInstEquals:
971 case SIMDIntrinsicHWAccel:
972 case SIMDIntrinsicWiden:
973 case SIMDIntrinsicInvalid:
974 assert(!"These intrinsics should not be seen during register allocation");
978 noway_assert(!"Unimplemented SIMD node type.");
982 #endif // FEATURE_SIMD
984 #ifdef FEATURE_HW_INTRINSICS
985 //------------------------------------------------------------------------
986 // TreeNodeInfoInitHWIntrinsic: Set the NodeInfo for a GT_HWIntrinsic tree.
989 // tree - The GT_HWIntrinsic node of interest
994 void LinearScan::TreeNodeInfoInitHWIntrinsic(GenTreeHWIntrinsic* intrinsicTree, TreeNodeInfo* info)
996 NamedIntrinsic intrinsicID = intrinsicTree->gtHWIntrinsicId;
997 info->srcCount += GetOperandInfo(intrinsicTree->gtOp.gtOp1);
998 if (intrinsicTree->gtGetOp2IfPresent() != nullptr)
1000 info->srcCount += GetOperandInfo(intrinsicTree->gtOp.gtOp2);
1005 #endif // _TARGET_ARM64_
1007 #endif // !LEGACY_BACKEND