1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
5 /*XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
6 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
8 XX ARM Code Generator XX
10 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
11 XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
18 #ifndef LEGACY_BACKEND // This file is ONLY used for the RyuJIT backend that uses the linear scan register allocator
26 //------------------------------------------------------------------------
27 // genCallFinally: Generate a call to the finally block.
29 BasicBlock* CodeGen::genCallFinally(BasicBlock* block)
31 BasicBlock* bbFinallyRet = nullptr;
33 // We don't have retless calls, since we use the BBJ_ALWAYS to point at a NOP pad where
34 // we would have otherwise created retless calls.
35 assert(block->isBBCallAlwaysPair());
37 assert(block->bbNext != NULL);
38 assert(block->bbNext->bbJumpKind == BBJ_ALWAYS);
39 assert(block->bbNext->bbJumpDest != NULL);
40 assert(block->bbNext->bbJumpDest->bbFlags & BBF_FINALLY_TARGET);
42 bbFinallyRet = block->bbNext->bbJumpDest;
43 bbFinallyRet->bbFlags |= BBF_JMP_TARGET;
45 // Load the address where the finally funclet should return into LR.
46 // The funclet prolog/epilog will do "push {lr}" / "pop {pc}" to do the return.
47 getEmitter()->emitIns_R_L(INS_movw, EA_4BYTE_DSP_RELOC, bbFinallyRet, REG_LR);
48 getEmitter()->emitIns_R_L(INS_movt, EA_4BYTE_DSP_RELOC, bbFinallyRet, REG_LR);
50 // Jump to the finally BB
51 inst_JMP(EJ_jmp, block->bbJumpDest);
53 // The BBJ_ALWAYS is used because the BBJ_CALLFINALLY can't point to the
54 // jump target using bbJumpDest - that is already used to point
55 // to the finally block. So just skip past the BBJ_ALWAYS unless the
57 assert(!(block->bbFlags & BBF_RETLESS_CALL));
58 assert(block->isBBCallAlwaysPair());
62 //------------------------------------------------------------------------
64 void CodeGen::genEHCatchRet(BasicBlock* block)
66 getEmitter()->emitIns_R_L(INS_movw, EA_4BYTE_DSP_RELOC, block->bbJumpDest, REG_INTRET);
67 getEmitter()->emitIns_R_L(INS_movt, EA_4BYTE_DSP_RELOC, block->bbJumpDest, REG_INTRET);
70 //------------------------------------------------------------------------
71 // instGen_Set_Reg_To_Imm: Move an immediate value into an integer register.
73 void CodeGen::instGen_Set_Reg_To_Imm(emitAttr size, regNumber reg, ssize_t imm, insFlags flags)
75 // reg cannot be a FP register
76 assert(!genIsValidFloatReg(reg));
78 if (!compiler->opts.compReloc)
80 size = EA_SIZE(size); // Strip any Reloc flags from size if we aren't doing relocs
83 if (EA_IS_RELOC(size))
85 getEmitter()->emitIns_R_I(INS_movw, size, reg, imm);
86 getEmitter()->emitIns_R_I(INS_movt, size, reg, imm);
90 instGen_Set_Reg_To_Zero(size, reg, flags);
94 if (arm_Valid_Imm_For_Mov(imm))
96 getEmitter()->emitIns_R_I(INS_mov, size, reg, imm, flags);
98 else // We have to use a movw/movt pair of instructions
100 ssize_t imm_lo16 = (imm & 0xffff);
101 ssize_t imm_hi16 = (imm >> 16) & 0xffff;
103 assert(arm_Valid_Imm_For_Mov(imm_lo16));
104 assert(imm_hi16 != 0);
106 getEmitter()->emitIns_R_I(INS_movw, size, reg, imm_lo16);
108 // If we've got a low register, the high word is all bits set,
109 // and the high bit of the low word is set, we can sign extend
110 // halfword and save two bytes of encoding. This can happen for
111 // small magnitude negative numbers 'n' for -32768 <= n <= -1.
113 if (getEmitter()->isLowRegister(reg) && (imm_hi16 == 0xffff) && ((imm_lo16 & 0x8000) == 0x8000))
115 getEmitter()->emitIns_R_R(INS_sxth, EA_2BYTE, reg, reg);
119 getEmitter()->emitIns_R_I(INS_movt, size, reg, imm_hi16);
122 if (flags == INS_FLAGS_SET)
123 getEmitter()->emitIns_R_R(INS_mov, size, reg, reg, INS_FLAGS_SET);
127 regTracker.rsTrackRegIntCns(reg, imm);
130 //------------------------------------------------------------------------
131 // genSetRegToConst: Generate code to set a register 'targetReg' of type 'targetType'
132 // to the constant specified by the constant (GT_CNS_INT or GT_CNS_DBL) in 'tree'.
135 // This does not call genProduceReg() on the target register.
137 void CodeGen::genSetRegToConst(regNumber targetReg, var_types targetType, GenTreePtr tree)
139 switch (tree->gtOper)
143 // relocatable values tend to come down as a CNS_INT of native int type
144 // so the line between these two opcodes is kind of blurry
145 GenTreeIntConCommon* con = tree->AsIntConCommon();
146 ssize_t cnsVal = con->IconValue();
148 bool needReloc = compiler->opts.compReloc && tree->IsIconHandle();
151 instGen_Set_Reg_To_Imm(EA_HANDLE_CNS_RELOC, targetReg, cnsVal);
152 regTracker.rsTrackRegTrash(targetReg);
156 genSetRegToIcon(targetReg, cnsVal, targetType);
163 GenTreeDblCon* dblConst = tree->AsDblCon();
164 double constValue = dblConst->gtDblCon.gtDconVal;
165 // TODO-ARM-CQ: Do we have a faster/smaller way to generate 0.0 in thumb2 ISA ?
166 if (targetType == TYP_FLOAT)
168 // Get a temp integer register
169 regNumber tmpReg = tree->GetSingleTempReg();
171 float f = forceCastToFloat(constValue);
172 genSetRegToIcon(tmpReg, *((int*)(&f)));
173 getEmitter()->emitIns_R_R(INS_vmov_i2f, EA_4BYTE, targetReg, tmpReg);
177 assert(targetType == TYP_DOUBLE);
179 unsigned* cv = (unsigned*)&constValue;
181 // Get two temp integer registers
182 regNumber tmpReg1 = tree->ExtractTempReg();
183 regNumber tmpReg2 = tree->GetSingleTempReg();
185 genSetRegToIcon(tmpReg1, cv[0]);
186 genSetRegToIcon(tmpReg2, cv[1]);
188 getEmitter()->emitIns_R_R_R(INS_vmov_i2d, EA_8BYTE, targetReg, tmpReg1, tmpReg2);
198 //------------------------------------------------------------------------
199 // genCodeForBinary: Generate code for many binary arithmetic operators
200 // This method is expected to have called genConsumeOperands() before calling it.
203 // treeNode - The binary operation for which we are generating code.
209 // Mul and div are not handled here.
210 // See the assert below for the operators that are handled.
212 void CodeGen::genCodeForBinary(GenTree* treeNode)
214 const genTreeOps oper = treeNode->OperGet();
215 regNumber targetReg = treeNode->gtRegNum;
216 var_types targetType = treeNode->TypeGet();
217 emitter* emit = getEmitter();
219 assert(oper == GT_ADD || oper == GT_SUB || oper == GT_ADD_LO || oper == GT_ADD_HI || oper == GT_SUB_LO ||
220 oper == GT_SUB_HI || oper == GT_OR || oper == GT_XOR || oper == GT_AND);
222 if ((oper == GT_ADD || oper == GT_SUB || oper == GT_ADD_HI || oper == GT_SUB_HI) && treeNode->gtOverflow())
224 // This is also checked in the importer.
225 NYI("Overflow not yet implemented");
228 GenTreePtr op1 = treeNode->gtGetOp1();
229 GenTreePtr op2 = treeNode->gtGetOp2();
231 instruction ins = genGetInsForOper(oper, targetType);
233 // The arithmetic node must be sitting in a register (since it's not contained)
234 noway_assert(targetReg != REG_NA);
236 if ((oper == GT_ADD_LO || oper == GT_SUB_LO))
238 // During decomposition, all operands become reg
239 assert(!op1->isContained() && !op2->isContained());
240 emit->emitIns_R_R_R(ins, emitTypeSize(treeNode), treeNode->gtRegNum, op1->gtRegNum, op2->gtRegNum,
245 regNumber r = emit->emitInsTernary(ins, emitTypeSize(treeNode), treeNode, op1, op2);
246 assert(r == targetReg);
249 genProduceReg(treeNode);
252 //------------------------------------------------------------------------
253 // genReturn: Generates code for return statement.
254 // In case of struct return, delegates to the genStructReturn method.
257 // treeNode - The GT_RETURN or GT_RETFILT tree node.
262 void CodeGen::genReturn(GenTreePtr treeNode)
264 assert(treeNode->OperGet() == GT_RETURN || treeNode->OperGet() == GT_RETFILT);
265 GenTreePtr op1 = treeNode->gtGetOp1();
266 var_types targetType = treeNode->TypeGet();
269 if (targetType == TYP_VOID)
271 assert(op1 == nullptr);
275 if (treeNode->TypeGet() == TYP_LONG)
277 assert(op1 != nullptr);
278 noway_assert(op1->OperGet() == GT_LONG);
279 GenTree* loRetVal = op1->gtGetOp1();
280 GenTree* hiRetVal = op1->gtGetOp2();
281 noway_assert((loRetVal->gtRegNum != REG_NA) && (hiRetVal->gtRegNum != REG_NA));
283 genConsumeReg(loRetVal);
284 genConsumeReg(hiRetVal);
285 if (loRetVal->gtRegNum != REG_LNGRET_LO)
287 inst_RV_RV(ins_Copy(targetType), REG_LNGRET_LO, loRetVal->gtRegNum, TYP_INT);
289 if (hiRetVal->gtRegNum != REG_LNGRET_HI)
291 inst_RV_RV(ins_Copy(targetType), REG_LNGRET_HI, hiRetVal->gtRegNum, TYP_INT);
296 if (varTypeIsStruct(treeNode))
298 NYI_ARM("struct return");
300 else if (targetType != TYP_VOID)
302 assert(op1 != nullptr);
303 noway_assert(op1->gtRegNum != REG_NA);
305 // !! NOTE !! genConsumeReg will clear op1 as GC ref after it has
306 // consumed a reg for the operand. This is because the variable
307 // is dead after return. But we are issuing more instructions
308 // like "profiler leave callback" after this consumption. So
309 // if you are issuing more instructions after this point,
310 // remember to keep the variable live up until the new method
311 // exit point where it is actually dead.
314 regNumber retReg = varTypeIsFloating(treeNode) ? REG_FLOATRET : REG_INTRET;
315 if (op1->gtRegNum != retReg)
317 inst_RV_RV(ins_Move_Extend(targetType, true), retReg, op1->gtRegNum, targetType);
323 //------------------------------------------------------------------------
324 // genCodeForTreeNode Generate code for a single node in the tree.
327 // All operands have been evaluated.
329 void CodeGen::genCodeForTreeNode(GenTreePtr treeNode)
331 regNumber targetReg = treeNode->gtRegNum;
332 var_types targetType = treeNode->TypeGet();
333 emitter* emit = getEmitter();
336 lastConsumedNode = nullptr;
337 if (compiler->verbose)
339 unsigned seqNum = treeNode->gtSeqNum; // Useful for setting a conditional break in Visual Studio
340 compiler->gtDispLIRNode(treeNode, "Generating: ");
344 // contained nodes are part of their parents for codegen purposes
345 // ex : immediates, most LEAs
346 if (treeNode->isContained())
351 switch (treeNode->gtOper)
354 genLclHeap(treeNode);
359 genSetRegToConst(targetReg, targetType, treeNode);
360 genProduceReg(treeNode);
364 assert(!varTypeIsFloating(targetType));
370 instruction ins = genGetInsForOper(treeNode->OperGet(), targetType);
372 // The arithmetic node must be sitting in a register (since it's not contained)
373 assert(!treeNode->isContained());
374 // The dst can only be a register.
375 assert(targetReg != REG_NA);
377 GenTreePtr operand = treeNode->gtGetOp1();
378 assert(!operand->isContained());
379 // The src must be a register.
380 regNumber operandReg = genConsumeReg(operand);
384 getEmitter()->emitIns_R_R(ins, emitTypeSize(treeNode), targetReg, operandReg);
388 getEmitter()->emitIns_R_R_I(ins, emitTypeSize(treeNode), targetReg, operandReg, 0);
391 genProduceReg(treeNode);
397 assert(varTypeIsIntegralOrI(treeNode));
406 genConsumeOperands(treeNode->AsOp());
407 genCodeForBinary(treeNode);
412 genConsumeOperands(treeNode->AsOp());
414 const genTreeOps oper = treeNode->OperGet();
415 if (treeNode->gtOverflow())
417 // This is also checked in the importer.
418 NYI("Overflow not yet implemented");
421 GenTreePtr op1 = treeNode->gtGetOp1();
422 GenTreePtr op2 = treeNode->gtGetOp2();
423 instruction ins = genGetInsForOper(treeNode->OperGet(), targetType);
425 // The arithmetic node must be sitting in a register (since it's not contained)
426 noway_assert(targetReg != REG_NA);
428 regNumber r = emit->emitInsTernary(ins, emitTypeSize(treeNode), treeNode, op1, op2);
429 assert(r == targetReg);
431 genProduceReg(treeNode);
438 genCodeForShift(treeNode);
443 genCodeForShiftLong(treeNode);
447 // Cast is never contained (?)
448 noway_assert(targetReg != REG_NA);
450 if (varTypeIsFloating(targetType) && varTypeIsFloating(treeNode->gtOp.gtOp1))
452 // Casts float/double <--> double/float
453 genFloatToFloatCast(treeNode);
455 else if (varTypeIsFloating(treeNode->gtOp.gtOp1))
457 // Casts float/double --> int32/int64
458 genFloatToIntCast(treeNode);
460 else if (varTypeIsFloating(targetType))
462 // Casts int32/uint32/int64/uint64 --> float/double
463 genIntToFloatCast(treeNode);
467 // Casts int <--> int
468 genIntToIntCast(treeNode);
470 // The per-case functions call genProduceReg()
475 GenTreeLclVarCommon* lcl = treeNode->AsLclVarCommon();
476 // lcl_vars are not defs
477 assert((treeNode->gtFlags & GTF_VAR_DEF) == 0);
479 bool isRegCandidate = compiler->lvaTable[lcl->gtLclNum].lvIsRegCandidate();
481 if (isRegCandidate && !(treeNode->gtFlags & GTF_VAR_DEATH))
483 assert((treeNode->InReg()) || (treeNode->gtFlags & GTF_SPILLED));
486 // If this is a register candidate that has been spilled, genConsumeReg() will
487 // reload it at the point of use. Otherwise, if it's not in a register, we load it here.
489 if (!treeNode->InReg() && !(treeNode->gtFlags & GTF_SPILLED))
491 assert(!isRegCandidate);
492 emit->emitIns_R_S(ins_Load(treeNode->TypeGet()), emitTypeSize(treeNode), treeNode->gtRegNum,
494 genProduceReg(treeNode);
499 case GT_LCL_FLD_ADDR:
500 case GT_LCL_VAR_ADDR:
502 // Address of a local var. This by itself should never be allocated a register.
503 // If it is worth storing the address in a register then it should be cse'ed into
504 // a temp and that would be allocated a register.
505 noway_assert(targetType == TYP_BYREF);
506 noway_assert(!treeNode->InReg());
508 inst_RV_TT(INS_lea, targetReg, treeNode, 0, EA_BYREF);
510 genProduceReg(treeNode);
515 NYI_IF(targetType == TYP_STRUCT, "GT_LCL_FLD: struct load local field not supported");
516 NYI_IF(treeNode->gtRegNum == REG_NA, "GT_LCL_FLD: load local field not into a register is not supported");
518 emitAttr size = emitTypeSize(targetType);
519 unsigned offs = treeNode->gtLclFld.gtLclOffs;
520 unsigned varNum = treeNode->gtLclVarCommon.gtLclNum;
521 assert(varNum < compiler->lvaCount);
523 emit->emitIns_R_S(ins_Move_Extend(targetType, treeNode->InReg()), size, targetReg, varNum, offs);
525 genProduceReg(treeNode);
528 case GT_STORE_LCL_FLD:
530 noway_assert(targetType != TYP_STRUCT);
533 unsigned offset = treeNode->gtLclFld.gtLclOffs;
535 // We must have a stack store with GT_STORE_LCL_FLD
536 noway_assert(!treeNode->InReg());
537 noway_assert(targetReg == REG_NA);
539 GenTreeLclVarCommon* varNode = treeNode->AsLclVarCommon();
540 unsigned varNum = varNode->gtLclNum;
541 assert(varNum < compiler->lvaCount);
542 LclVarDsc* varDsc = &(compiler->lvaTable[varNum]);
544 // Ensure that lclVar nodes are typed correctly.
545 assert(!varDsc->lvNormalizeOnStore() || targetType == genActualType(varDsc->TypeGet()));
547 GenTreePtr data = treeNode->gtOp.gtOp1->gtEffectiveVal();
548 instruction ins = ins_Store(targetType);
549 emitAttr attr = emitTypeSize(targetType);
550 if (data->isContainedIntOrIImmed())
552 assert(data->IsIntegralConst(0));
553 NYI_ARM("st.lclFld contained operand");
557 assert(!data->isContained());
559 emit->emitIns_S_R(ins, attr, data->gtRegNum, varNum, offset);
562 genUpdateLife(varNode);
563 varDsc->lvRegNum = REG_STK;
567 case GT_STORE_LCL_VAR:
569 GenTreeLclVarCommon* varNode = treeNode->AsLclVarCommon();
571 unsigned varNum = varNode->gtLclNum;
572 assert(varNum < compiler->lvaCount);
573 LclVarDsc* varDsc = &(compiler->lvaTable[varNum]);
576 // Ensure that lclVar nodes are typed correctly.
577 assert(!varDsc->lvNormalizeOnStore() || targetType == genActualType(varDsc->TypeGet()));
579 GenTreePtr data = treeNode->gtOp.gtOp1->gtEffectiveVal();
581 // var = call, where call returns a multi-reg return value
582 // case is handled separately.
583 if (data->gtSkipReloadOrCopy()->IsMultiRegCall())
585 genMultiRegCallStoreToLocal(treeNode);
590 if (treeNode->TypeGet() == TYP_LONG)
592 genStoreLongLclVar(treeNode);
596 genConsumeRegs(data);
598 regNumber dataReg = REG_NA;
599 if (data->isContainedIntOrIImmed())
601 assert(data->IsIntegralConst(0));
602 NYI_ARM("st.lclVar contained operand");
606 assert(!data->isContained());
607 dataReg = data->gtRegNum;
609 assert(dataReg != REG_NA);
611 if (targetReg == REG_NA) // store into stack based LclVar
613 inst_set_SV_var(varNode);
615 instruction ins = ins_Store(targetType);
616 emitAttr attr = emitTypeSize(targetType);
618 emit->emitIns_S_R(ins, attr, dataReg, varNum, offset);
620 genUpdateLife(varNode);
622 varDsc->lvRegNum = REG_STK;
624 else // store into register (i.e move into register)
626 if (dataReg != targetReg)
628 // Assign into targetReg when dataReg (from op1) is not the same register
629 inst_RV_RV(ins_Copy(targetType), targetReg, dataReg, targetType);
631 genProduceReg(treeNode);
638 // A void GT_RETFILT is the end of a finally. For non-void filter returns we need to load the result in
639 // the return register, if it's not already there. The processing is the same as GT_RETURN.
640 if (targetType != TYP_VOID)
642 // For filters, the IL spec says the result is type int32. Further, the only specified legal values
643 // are 0 or 1, with the use of other values "undefined".
644 assert(targetType == TYP_INT);
655 // if we are here, it is the case where there is an LEA that cannot
656 // be folded into a parent instruction
657 GenTreeAddrMode* lea = treeNode->AsAddrMode();
658 genLeaInstruction(lea);
660 // genLeaInstruction calls genProduceReg()
664 genConsumeAddress(treeNode->AsIndir()->Addr());
665 emit->emitInsLoadStoreOp(ins_Load(targetType), emitTypeSize(treeNode), targetReg, treeNode->AsIndir());
666 genProduceReg(treeNode);
672 // We shouldn't be seeing GT_MOD on float/double args as it should get morphed into a
673 // helper call by front-end. Similarly we shouldn't be seeing GT_UDIV and GT_UMOD
674 // on float/double args.
675 noway_assert(!varTypeIsFloating(treeNode));
680 genConsumeOperands(treeNode->AsOp());
682 noway_assert(targetReg != REG_NA);
684 GenTreePtr dst = treeNode;
685 GenTreePtr src1 = treeNode->gtGetOp1();
686 GenTreePtr src2 = treeNode->gtGetOp2();
687 instruction ins = genGetInsForOper(treeNode->OperGet(), targetType);
688 emitAttr attr = emitTypeSize(treeNode);
689 regNumber result = REG_NA;
691 // dst can only be a reg
692 assert(!dst->isContained());
694 // src can be only reg
695 assert(!src1->isContained() || !src2->isContained());
697 if (varTypeIsFloating(targetType))
699 // Floating point divide never raises an exception
701 emit->emitIns_R_R_R(ins, attr, dst->gtRegNum, src1->gtRegNum, src2->gtRegNum);
703 else // an signed integer divide operation
705 // TODO-ARM-Bug: handle zero division exception.
707 emit->emitIns_R_R_R(ins, attr, dst->gtRegNum, src1->gtRegNum, src2->gtRegNum);
710 genProduceReg(treeNode);
716 genIntrinsic(treeNode);
727 // TODO-ARM-CQ: Check if we can use the currently set flags.
728 // TODO-ARM-CQ: Check for the case where we can simply transfer the carry bit to a register
729 // (signed < or >= where targetReg != REG_NA)
731 GenTreeOp* tree = treeNode->AsOp();
732 GenTreePtr op1 = tree->gtOp1->gtEffectiveVal();
733 GenTreePtr op2 = tree->gtOp2->gtEffectiveVal();
735 genConsumeIfReg(op1);
736 genConsumeIfReg(op2);
738 instruction ins = INS_cmp;
740 if (varTypeIsFloating(op1))
742 assert(op1->TypeGet() == op2->TypeGet());
744 cmpAttr = emitTypeSize(op1->TypeGet());
745 emit->emitInsBinary(ins, cmpAttr, op1, op2);
746 // vmrs with register 0xf has special meaning of transferring flags
747 emit->emitIns_R(INS_vmrs, EA_4BYTE, REG_R15);
749 else if (varTypeIsLong(op1))
752 // The result of an unlowered long compare on a 32-bit target must either be
753 // a) materialized into a register, or
756 // A long compare that has a result that is used but not materialized into a register should
757 // have been handled by Lowering::LowerCompare.
760 assert((treeNode->gtRegNum != REG_NA) || !LIR::AsRange(compiler->compCurBB).TryGetUse(treeNode, &use));
762 genCompareLong(treeNode);
767 var_types op1Type = op1->TypeGet();
768 var_types op2Type = op2->TypeGet();
769 assert(!varTypeIsFloating(op2Type));
771 if (op1Type == op2Type)
773 cmpAttr = emitTypeSize(op1Type);
777 var_types cmpType = TYP_INT;
778 bool op1Is64Bit = (varTypeIsLong(op1Type) || op1Type == TYP_REF);
779 bool op2Is64Bit = (varTypeIsLong(op2Type) || op2Type == TYP_REF);
780 NYI_IF(op1Is64Bit || op2Is64Bit, "Long compare");
781 assert(!op1->isUsedFromMemory() || op1Type == op2Type);
782 assert(!op2->isUsedFromMemory() || op1Type == op2Type);
783 cmpAttr = emitTypeSize(cmpType);
785 emit->emitInsBinary(ins, cmpAttr, op1, op2);
788 // Are we evaluating this into a register?
789 if (targetReg != REG_NA)
791 genSetRegToCond(targetReg, tree);
799 GenTree* cmp = treeNode->gtOp.gtOp1->gtEffectiveVal();
800 assert(cmp->OperIsCompare());
801 assert(compiler->compCurBB->bbJumpKind == BBJ_COND);
803 // Get the "kind" and type of the comparison. Note that whether it is an unsigned cmp
804 // is governed by a flag NOT by the inherent type of the node
805 // TODO-ARM-CQ: Check if we can use the currently set flags.
806 CompareKind compareKind = ((cmp->gtFlags & GTF_UNSIGNED) != 0) ? CK_UNSIGNED : CK_SIGNED;
808 emitJumpKind jmpKind = genJumpKindForOper(cmp->gtOper, compareKind);
809 BasicBlock* jmpTarget = compiler->compCurBB->bbJumpDest;
811 inst_JMP(jmpKind, jmpTarget);
817 GenTreeJumpCC* jcc = treeNode->AsJumpCC();
819 assert(compiler->compCurBB->bbJumpKind == BBJ_COND);
821 CompareKind compareKind = ((jcc->gtFlags & GTF_UNSIGNED) != 0) ? CK_UNSIGNED : CK_SIGNED;
822 emitJumpKind jumpKind = genJumpKindForOper(jcc->gtCondition, compareKind);
824 inst_JMP(jumpKind, compiler->compCurBB->bbJumpDest);
830 // this is nothing but a conditional call to CORINFO_HELP_STOP_FOR_GC
831 // based on the contents of 'data'
833 GenTree* data = treeNode->gtOp.gtOp1->gtEffectiveVal();
834 genConsumeIfReg(data);
835 GenTreeIntCon cns = intForm(TYP_INT, 0);
836 emit->emitInsBinary(INS_cmp, emitTypeSize(TYP_INT), data, &cns);
838 BasicBlock* skipLabel = genCreateTempLabel();
840 emitJumpKind jmpEqual = genJumpKindForOper(GT_EQ, CK_SIGNED);
841 inst_JMP(jmpEqual, skipLabel);
842 // emit the call to the EE-helper that stops for GC (or other reasons)
844 genEmitHelperCall(CORINFO_HELP_STOP_FOR_GC, 0, EA_UNKNOWN);
845 genDefineTempLabel(skipLabel);
851 GenTreeStoreInd* storeInd = treeNode->AsStoreInd();
852 GenTree* data = storeInd->Data();
853 GenTree* addr = storeInd->Addr();
854 var_types targetType = storeInd->TypeGet();
856 assert(!varTypeIsFloating(targetType) || (targetType == data->TypeGet()));
858 GCInfo::WriteBarrierForm writeBarrierForm = gcInfo.gcIsWriteBarrierCandidate(treeNode, data);
859 if (writeBarrierForm != GCInfo::WBF_NoBarrier)
861 // data and addr must be in registers.
862 // Consume both registers so that any copies of interfering
863 // registers are taken care of.
864 genConsumeOperands(storeInd->AsOp());
866 #if NOGC_WRITE_BARRIERS
867 NYI_ARM("NOGC_WRITE_BARRIERS");
869 // At this point, we should not have any interference.
870 // That is, 'data' must not be in REG_ARG_0,
871 // as that is where 'addr' must go.
872 noway_assert(data->gtRegNum != REG_ARG_0);
874 // addr goes in REG_ARG_0
875 if (addr->gtRegNum != REG_ARG_0)
877 inst_RV_RV(INS_mov, REG_ARG_0, addr->gtRegNum, addr->TypeGet());
880 // data goes in REG_ARG_1
881 if (data->gtRegNum != REG_ARG_1)
883 inst_RV_RV(INS_mov, REG_ARG_1, data->gtRegNum, data->TypeGet());
885 #endif // NOGC_WRITE_BARRIERS
887 genGCWriteBarrier(storeInd, writeBarrierForm);
889 else // A normal store, not a WriteBarrier store
891 bool reverseOps = ((storeInd->gtFlags & GTF_REVERSE_OPS) != 0);
892 bool dataIsUnary = false;
894 // We must consume the operands in the proper execution order,
895 // so that liveness is updated appropriately.
898 genConsumeAddress(addr);
901 if (!data->isContained())
903 genConsumeRegs(data);
908 genConsumeAddress(addr);
911 emit->emitInsLoadStoreOp(ins_Store(targetType), emitTypeSize(storeInd), data->gtRegNum,
912 treeNode->AsIndir());
918 // This is handled at the time we call genConsumeReg() on the GT_COPY
928 genPutArgStk(treeNode->AsPutArgStk());
933 NYI_IF(targetType == TYP_STRUCT, "GT_PUTARG_REG: struct support not implemented");
935 // commas show up here commonly, as part of a nullchk operation
936 GenTree* op1 = treeNode->gtOp.gtOp1->gtEffectiveVal();
937 // If child node is not already in the register we need, move it
939 if (treeNode->gtRegNum != op1->gtRegNum)
941 inst_RV_RV(ins_Move_Extend(targetType, true), treeNode->gtRegNum, op1->gtRegNum, targetType);
944 genProduceReg(treeNode);
948 genCallInstruction(treeNode->AsCall());
954 genLockedInstructions(treeNode->AsOp());
957 case GT_MEMORYBARRIER:
958 instGen_MemoryBarrier();
965 genProduceReg(treeNode);
969 // do nothing - reload is just a marker.
970 // The parent node will call genConsumeReg on this which will trigger the unspill of this node's child
971 // into the register specified in this node.
978 if (treeNode->gtFlags & GTF_NO_OP_NO)
980 noway_assert(!"GTF_NO_OP_NO should not be set");
988 case GT_ARR_BOUNDS_CHECK:
989 genRangeCheck(treeNode);
993 if (treeNode->gtRegNum != treeNode->AsPhysReg()->gtSrcReg)
995 inst_RV_RV(INS_mov, treeNode->gtRegNum, treeNode->AsPhysReg()->gtSrcReg, targetType);
997 genTransferRegGCState(treeNode->gtRegNum, treeNode->AsPhysReg()->gtSrcReg);
1006 assert(!treeNode->gtOp.gtOp1->isContained());
1007 regNumber addrReg = genConsumeReg(treeNode->gtOp.gtOp1);
1008 emit->emitIns_R_R_I(INS_ldr, EA_4BYTE, targetReg, addrReg, 0);
1014 noway_assert(handlerGetsXcptnObj(compiler->compCurBB->bbCatchTyp));
1016 /* Catch arguments get passed in a register. genCodeForBBlist()
1017 would have marked it as holding a GC object, but not used. */
1019 noway_assert(gcInfo.gcRegGCrefSetCur & RBM_EXCEPTION_OBJECT);
1020 genConsumeReg(treeNode);
1023 case GT_PINVOKE_PROLOG:
1024 noway_assert(((gcInfo.gcRegGCrefSetCur | gcInfo.gcRegByrefSetCur) & ~fullIntArgRegMask()) == 0);
1026 // the runtime side requires the codegen here to be consistent
1027 emit->emitDisableRandomNops();
1031 genPendingCallLabel = genCreateTempLabel();
1032 treeNode->gtLabel.gtLabBB = genPendingCallLabel;
1033 emit->emitIns_J_R(INS_adr, EA_PTRSIZE, genPendingCallLabel, treeNode->gtRegNum);
1036 case GT_CLS_VAR_ADDR:
1037 emit->emitIns_R_C(INS_lea, EA_PTRSIZE, targetReg, treeNode->gtClsVar.gtClsVarHnd, 0);
1038 genProduceReg(treeNode);
1041 case GT_STORE_DYN_BLK:
1043 genCodeForStoreBlk(treeNode->AsBlk());
1047 genJumpTable(treeNode);
1050 case GT_SWITCH_TABLE:
1051 genTableBasedSwitch(treeNode);
1055 genCodeForArrIndex(treeNode->AsArrIndex());
1059 genCodeForArrOffset(treeNode->AsArrOffs());
1063 // Do nothing; these nodes are simply markers for debug info.
1070 _snprintf_s(message, _countof(message), _TRUNCATE, "NYI: Unimplemented node type %s",
1071 GenTree::NodeName(treeNode->OperGet()));
1074 NYI("unimplemented node");
1081 //------------------------------------------------------------------------
1082 // genLockedInstructions: Generate code for the locked operations.
1085 // Handles GT_LOCKADD, GT_XCHG, GT_XADD nodes.
1087 void CodeGen::genLockedInstructions(GenTreeOp* treeNode)
1089 NYI("genLockedInstructions");
1092 //--------------------------------------------------------------------------------------
1093 // genLclHeap: Generate code for localloc
1096 // There are 2 ways depending from build version to generate code for localloc:
1097 // 1) For debug build where memory should be initialized we generate loop
1098 // which invoke push {tmpReg} N times.
1099 // 2) Fore /o build However, we tickle the pages to ensure that SP is always
1100 // valid and is in sync with the "stack guard page". Amount of iteration
1104 // There can be some optimization:
1105 // 1) It's not needed to generate loop for zero size allocation
1106 // 2) For small allocation (less than 4 store) we unroll loop
1107 // 3) For allocation less than PAGE_SIZE and when it's not needed to initialize
1108 // memory to zero, we can just increment SP.
1110 // Notes: Size N should be aligned to STACK_ALIGN before any allocation
1112 void CodeGen::genLclHeap(GenTreePtr tree)
1114 assert(tree->OperGet() == GT_LCLHEAP);
1116 GenTreePtr size = tree->gtOp.gtOp1;
1117 noway_assert((genActualType(size->gtType) == TYP_INT) || (genActualType(size->gtType) == TYP_I_IMPL));
1119 // Result of localloc will be returned in regCnt.
1120 // Also it used as temporary register in code generation
1121 // for storing allocation size
1122 regNumber regCnt = tree->gtRegNum;
1123 regNumber pspSymReg = REG_NA;
1124 var_types type = genActualType(size->gtType);
1125 emitAttr easz = emitTypeSize(type);
1126 BasicBlock* endLabel = nullptr;
1127 BasicBlock* loop = nullptr;
1128 unsigned stackAdjustment = 0;
1132 if (compiler->opts.compStackCheckOnRet)
1134 noway_assert(compiler->lvaReturnEspCheck != 0xCCCCCCCC &&
1135 compiler->lvaTable[compiler->lvaReturnEspCheck].lvDoNotEnregister &&
1136 compiler->lvaTable[compiler->lvaReturnEspCheck].lvOnFrame);
1137 getEmitter()->emitIns_S_R(INS_cmp, EA_PTRSIZE, REG_SPBASE, compiler->lvaReturnEspCheck, 0);
1139 BasicBlock* esp_check = genCreateTempLabel();
1140 emitJumpKind jmpEqual = genJumpKindForOper(GT_EQ, CK_SIGNED);
1141 inst_JMP(jmpEqual, esp_check);
1142 getEmitter()->emitIns(INS_BREAKPOINT);
1143 genDefineTempLabel(esp_check);
1147 noway_assert(isFramePointerUsed()); // localloc requires Frame Pointer to be established since SP changes
1148 noway_assert(genStackLevel == 0); // Can't have anything on the stack
1150 // Whether method has PSPSym.
1152 #if FEATURE_EH_FUNCLETS
1153 hasPspSym = (compiler->lvaPSPSym != BAD_VAR_NUM);
1158 // Check to 0 size allocations
1159 // size_t amount = 0;
1160 if (size->IsCnsIntOrI())
1162 // If size is a constant, then it must be contained.
1163 assert(size->isContained());
1165 // If amount is zero then return null in regCnt
1166 size_t amount = size->gtIntCon.gtIconVal;
1169 instGen_Set_Reg_To_Zero(EA_PTRSIZE, regCnt);
1175 // If 0 bail out by returning null in regCnt
1176 genConsumeRegAndCopy(size, regCnt);
1177 endLabel = genCreateTempLabel();
1178 getEmitter()->emitIns_R_R(INS_TEST, easz, regCnt, regCnt);
1179 emitJumpKind jmpEqual = genJumpKindForOper(GT_EQ, CK_SIGNED);
1180 inst_JMP(jmpEqual, endLabel);
1183 stackAdjustment = 0;
1184 #if FEATURE_EH_FUNCLETS
1185 // If we have PSPsym, then need to re-locate it after localloc.
1188 stackAdjustment += STACK_ALIGN;
1190 // Save a copy of PSPSym
1191 pspSymReg = tree->ExtractTempReg();
1192 getEmitter()->emitIns_R_S(ins_Load(TYP_I_IMPL), EA_PTRSIZE, pspSymReg, compiler->lvaPSPSym, 0);
1196 #if FEATURE_FIXED_OUT_ARGS
1197 // If we have an outgoing arg area then we must adjust the SP by popping off the
1198 // outgoing arg area. We will restore it right before we return from this method.
1199 if (compiler->lvaOutgoingArgSpaceSize > 0)
1201 assert((compiler->lvaOutgoingArgSpaceSize % STACK_ALIGN) == 0); // This must be true for the stack to remain
1203 inst_RV_IV(INS_add, REG_SPBASE, compiler->lvaOutgoingArgSpaceSize, EA_PTRSIZE);
1204 stackAdjustment += compiler->lvaOutgoingArgSpaceSize;
1208 // Put aligned allocation size to regCnt
1209 if (size->IsCnsIntOrI())
1211 // 'amount' is the total number of bytes to localloc to properly STACK_ALIGN
1212 size_t amount = size->gtIntCon.gtIconVal;
1213 amount = AlignUp(amount, STACK_ALIGN);
1215 // For small allocations we will generate up to four stp instructions
1216 size_t cntStackAlignedWidthItems = (amount >> STACK_ALIGN_SHIFT);
1217 if (cntStackAlignedWidthItems <= 4)
1219 instGen_Set_Reg_To_Zero(EA_PTRSIZE, regCnt);
1221 while (cntStackAlignedWidthItems != 0)
1223 inst_IV(INS_push, (unsigned)genRegMask(regCnt));
1224 cntStackAlignedWidthItems -= 1;
1229 else if (!compiler->info.compInitMem && (amount < compiler->eeGetPageSize())) // must be < not <=
1231 // Since the size is a page or less, simply adjust the SP value
1232 // The SP might already be in the guard page, must touch it BEFORE
1233 // the alloc, not after.
1234 getEmitter()->emitIns_R_R_I(INS_ldr, EA_4BYTE, regCnt, REG_SP, 0);
1235 inst_RV_IV(INS_sub, REG_SP, amount, EA_PTRSIZE);
1239 // regCnt will be the total number of bytes to locAlloc
1240 genSetRegToIcon(regCnt, amount, ((int)amount == amount) ? TYP_INT : TYP_LONG);
1244 // Round up the number of bytes to allocate to a STACK_ALIGN boundary.
1245 inst_RV_IV(INS_add, regCnt, (STACK_ALIGN - 1), emitActualTypeSize(type));
1246 inst_RV_IV(INS_AND, regCnt, ~(STACK_ALIGN - 1), emitActualTypeSize(type));
1250 if (compiler->info.compInitMem)
1252 // At this point 'regCnt' is set to the total number of bytes to locAlloc.
1253 // Since we have to zero out the allocated memory AND ensure that RSP is always valid
1254 // by tickling the pages, we will just push 0's on the stack.
1256 regNumber regTmp = tree->ExtractTempReg();
1257 instGen_Set_Reg_To_Zero(EA_PTRSIZE, regTmp);
1260 BasicBlock* loop = genCreateTempLabel();
1261 genDefineTempLabel(loop);
1263 noway_assert(STACK_ALIGN == 8);
1264 inst_IV(INS_push, (unsigned)genRegMask(regTmp));
1265 inst_IV(INS_push, (unsigned)genRegMask(regTmp));
1267 // If not done, loop
1268 // Note that regCnt is the number of bytes to stack allocate.
1269 assert(genIsValidIntReg(regCnt));
1270 getEmitter()->emitIns_R_I(INS_sub, EA_PTRSIZE, regCnt, STACK_ALIGN, INS_FLAGS_SET);
1271 emitJumpKind jmpNotEqual = genJumpKindForOper(GT_NE, CK_SIGNED);
1272 inst_JMP(jmpNotEqual, loop);
1276 // At this point 'regCnt' is set to the total number of bytes to locAlloc.
1278 // We don't need to zero out the allocated memory. However, we do have
1279 // to tickle the pages to ensure that SP is always valid and is
1280 // in sync with the "stack guard page". Note that in the worst
1281 // case SP is on the last byte of the guard page. Thus you must
1282 // touch SP+0 first not SP+0x1000.
1284 // Another subtlety is that you don't want SP to be exactly on the
1285 // boundary of the guard page because PUSH is predecrement, thus
1286 // call setup would not touch the guard page but just beyond it
1288 // Note that we go through a few hoops so that SP never points to
1289 // illegal pages at any time during the ticking process
1291 // subs regCnt, SP, regCnt // regCnt now holds ultimate SP
1292 // jb Loop // result is smaller than orignial SP (no wrap around)
1293 // mov regCnt, #0 // Overflow, pick lowest possible value
1296 // ldr regTmp, [SP + 0] // tickle the page - read from the page
1297 // sub regTmp, SP, PAGE_SIZE // decrement SP by PAGE_SIZE
1298 // cmp regTmp, regCnt
1308 regNumber regTmp = tree->ExtractTempReg();
1310 BasicBlock* loop = genCreateTempLabel();
1311 BasicBlock* done = genCreateTempLabel();
1313 // subs regCnt, SP, regCnt // regCnt now holds ultimate SP
1314 getEmitter()->emitIns_R_R_R(INS_sub, EA_PTRSIZE, regCnt, REG_SPBASE, regCnt, INS_FLAGS_SET);
1316 inst_JMP(EJ_vc, loop); // branch if the V flag is not set
1318 // Ups... Overflow, set regCnt to lowest possible value
1319 instGen_Set_Reg_To_Zero(EA_PTRSIZE, regCnt);
1321 genDefineTempLabel(loop);
1323 // tickle the page - Read from the updated SP - this triggers a page fault when on the guard page
1324 getEmitter()->emitIns_R_R_I(INS_ldr, EA_4BYTE, regTmp, REG_SPBASE, 0);
1326 // decrement SP by PAGE_SIZE
1327 getEmitter()->emitIns_R_R_I(INS_sub, EA_PTRSIZE, regTmp, REG_SPBASE, compiler->eeGetPageSize());
1329 getEmitter()->emitIns_R_R(INS_cmp, EA_PTRSIZE, regTmp, regCnt);
1330 emitJumpKind jmpLTU = genJumpKindForOper(GT_LT, CK_UNSIGNED);
1331 inst_JMP(jmpLTU, done);
1333 // Update SP to be at the next page of stack that we will tickle
1334 getEmitter()->emitIns_R_R(INS_mov, EA_PTRSIZE, REG_SPBASE, regCnt);
1336 // Jump to loop and tickle new stack address
1337 inst_JMP(EJ_jmp, loop);
1339 // Done with stack tickle loop
1340 genDefineTempLabel(done);
1342 // Now just move the final value to SP
1343 getEmitter()->emitIns_R_R(INS_mov, EA_PTRSIZE, REG_SPBASE, regCnt);
1347 // Re-adjust SP to allocate PSPSym and out-going arg area
1348 if (stackAdjustment != 0)
1350 assert((stackAdjustment % STACK_ALIGN) == 0); // This must be true for the stack to remain aligned
1351 assert(stackAdjustment > 0);
1352 getEmitter()->emitIns_R_R_I(INS_sub, EA_PTRSIZE, REG_SPBASE, REG_SPBASE, (int)stackAdjustment);
1354 #if FEATURE_EH_FUNCLETS
1355 // Write PSPSym to its new location.
1358 assert(genIsValidIntReg(pspSymReg));
1359 getEmitter()->emitIns_S_R(ins_Store(TYP_I_IMPL), EA_PTRSIZE, pspSymReg, compiler->lvaPSPSym, 0);
1362 // Return the stackalloc'ed address in result register.
1363 // regCnt = RSP + stackAdjustment.
1364 getEmitter()->emitIns_R_R_I(INS_add, EA_PTRSIZE, regCnt, REG_SPBASE, (int)stackAdjustment);
1366 else // stackAdjustment == 0
1368 // Move the final value of SP to regCnt
1369 inst_RV_RV(INS_mov, regCnt, REG_SPBASE);
1373 if (endLabel != nullptr)
1374 genDefineTempLabel(endLabel);
1376 // Write the lvaLocAllocSPvar stack frame slot
1377 if (compiler->lvaLocAllocSPvar != BAD_VAR_NUM)
1379 getEmitter()->emitIns_S_R(ins_Store(TYP_I_IMPL), EA_PTRSIZE, regCnt, compiler->lvaLocAllocSPvar, 0);
1383 if (compiler->opts.compNeedStackProbes)
1385 genGenerateStackProbe();
1391 if (compiler->opts.compStackCheckOnRet)
1393 noway_assert(compiler->lvaReturnEspCheck != 0xCCCCCCCC &&
1394 compiler->lvaTable[compiler->lvaReturnEspCheck].lvDoNotEnregister &&
1395 compiler->lvaTable[compiler->lvaReturnEspCheck].lvOnFrame);
1396 getEmitter()->emitIns_S_R(ins_Store(TYP_I_IMPL), EA_PTRSIZE, regCnt, compiler->lvaReturnEspCheck, 0);
1400 genProduceReg(tree);
1403 //------------------------------------------------------------------------
1404 // genTableBasedSwitch: generate code for a switch statement based on a table of ip-relative offsets
1406 void CodeGen::genTableBasedSwitch(GenTree* treeNode)
1408 genConsumeOperands(treeNode->AsOp());
1409 regNumber idxReg = treeNode->gtOp.gtOp1->gtRegNum;
1410 regNumber baseReg = treeNode->gtOp.gtOp2->gtRegNum;
1412 getEmitter()->emitIns_R_ARX(INS_ldr, EA_4BYTE, REG_PC, baseReg, idxReg, TARGET_POINTER_SIZE, 0);
1415 //------------------------------------------------------------------------
1416 // genJumpTable: emits the table and an instruction to get the address of the first element
1418 void CodeGen::genJumpTable(GenTree* treeNode)
1420 noway_assert(compiler->compCurBB->bbJumpKind == BBJ_SWITCH);
1421 assert(treeNode->OperGet() == GT_JMPTABLE);
1423 unsigned jumpCount = compiler->compCurBB->bbJumpSwt->bbsCount;
1424 BasicBlock** jumpTable = compiler->compCurBB->bbJumpSwt->bbsDstTab;
1425 unsigned jmpTabBase;
1427 jmpTabBase = getEmitter()->emitBBTableDataGenBeg(jumpCount, false);
1429 JITDUMP("\n J_M%03u_DS%02u LABEL DWORD\n", Compiler::s_compMethodsCount, jmpTabBase);
1431 for (unsigned i = 0; i < jumpCount; i++)
1433 BasicBlock* target = *jumpTable++;
1434 noway_assert(target->bbFlags & BBF_JMP_TARGET);
1436 JITDUMP(" DD L_M%03u_BB%02u\n", Compiler::s_compMethodsCount, target->bbNum);
1438 getEmitter()->emitDataGenData(i, target);
1441 getEmitter()->emitDataGenEnd();
1443 getEmitter()->emitIns_R_D(INS_movw, EA_HANDLE_CNS_RELOC, jmpTabBase, treeNode->gtRegNum);
1444 getEmitter()->emitIns_R_D(INS_movt, EA_HANDLE_CNS_RELOC, jmpTabBase, treeNode->gtRegNum);
1446 genProduceReg(treeNode);
1449 //------------------------------------------------------------------------
1450 // genGetInsForOper: Return instruction encoding of the operation tree.
1452 instruction CodeGen::genGetInsForOper(genTreeOps oper, var_types type)
1456 if (varTypeIsFloating(type))
1457 return CodeGen::ins_MathOp(oper, type);
1474 ins = INS_SHIFT_LEFT_LOGICAL;
1486 ins = INS_SHIFT_RIGHT_ARITHM;
1489 ins = INS_SHIFT_RIGHT_LOGICAL;
1513 ins = INS_SHIFT_LEFT_LOGICAL;
1516 ins = INS_SHIFT_RIGHT_LOGICAL;
1525 // Generates CpBlk code by performing a loop unroll
1527 // The size argument of the CpBlk node is a constant and <= 64 bytes.
1528 // This may seem small but covers >95% of the cases in several framework assemblies.
1529 void CodeGen::genCodeForCpBlkUnroll(GenTreeBlk* cpBlkNode)
1531 NYI_ARM("genCodeForCpBlkUnroll");
1534 // Generate code for InitBlk by performing a loop unroll
1536 // a) Both the size and fill byte value are integer constants.
1537 // b) The size of the struct to initialize is smaller than INITBLK_UNROLL_LIMIT bytes.
1538 void CodeGen::genCodeForInitBlkUnroll(GenTreeBlk* initBlkNode)
1540 NYI_ARM("genCodeForInitBlkUnroll");
1543 void CodeGen::genCodeForStoreBlk(GenTreeBlk* blkOp)
1545 if (blkOp->gtBlkOpGcUnsafe)
1547 getEmitter()->emitDisableGC();
1549 bool isCopyBlk = blkOp->OperIsCopyBlkOp();
1551 switch (blkOp->gtBlkOpKind)
1553 case GenTreeBlk::BlkOpKindHelper:
1556 genCodeForCpBlk(blkOp);
1560 genCodeForInitBlk(blkOp);
1563 case GenTreeBlk::BlkOpKindUnroll:
1566 genCodeForCpBlkUnroll(blkOp);
1570 genCodeForInitBlkUnroll(blkOp);
1576 if (blkOp->gtBlkOpGcUnsafe)
1578 getEmitter()->emitEnableGC();
1582 //------------------------------------------------------------------------
1583 // genCodeForShiftLong: Generates the code sequence for a GenTree node that
1584 // represents a three operand bit shift or rotate operation (<<Hi, >>Lo).
1587 // tree - the bit shift node (that specifies the type of bit shift to perform).
1590 // a) All GenTrees are register allocated.
1591 // b) The shift-by-amount in tree->gtOp.gtOp2 is a contained constant
1593 void CodeGen::genCodeForShiftLong(GenTreePtr tree)
1595 // Only the non-RMW case here.
1596 genTreeOps oper = tree->OperGet();
1597 assert(oper == GT_LSH_HI || oper == GT_RSH_LO);
1599 GenTree* operand = tree->gtOp.gtOp1;
1600 assert(operand->OperGet() == GT_LONG);
1601 assert(operand->gtOp.gtOp1->isUsedFromReg());
1602 assert(operand->gtOp.gtOp2->isUsedFromReg());
1604 GenTree* operandLo = operand->gtGetOp1();
1605 GenTree* operandHi = operand->gtGetOp2();
1607 regNumber regLo = operandLo->gtRegNum;
1608 regNumber regHi = operandHi->gtRegNum;
1610 genConsumeOperands(tree->AsOp());
1612 var_types targetType = tree->TypeGet();
1613 instruction ins = genGetInsForOper(oper, targetType);
1615 GenTreePtr shiftBy = tree->gtGetOp2();
1617 assert(shiftBy->isContainedIntOrIImmed());
1619 unsigned int count = shiftBy->AsIntConCommon()->IconValue();
1621 regNumber regResult = (oper == GT_LSH_HI) ? regHi : regLo;
1623 if (regResult != tree->gtRegNum)
1625 inst_RV_RV(INS_mov, tree->gtRegNum, regResult, targetType);
1628 if (oper == GT_LSH_HI)
1630 inst_RV_SH(ins, EA_4BYTE, tree->gtRegNum, count);
1631 getEmitter()->emitIns_R_R_R_I(INS_OR, EA_4BYTE, tree->gtRegNum, tree->gtRegNum, regLo, 32 - count,
1632 INS_FLAGS_DONT_CARE, INS_OPTS_LSR);
1636 assert(oper == GT_RSH_LO);
1637 inst_RV_SH(INS_SHIFT_RIGHT_LOGICAL, EA_4BYTE, tree->gtRegNum, count);
1638 getEmitter()->emitIns_R_R_R_I(INS_OR, EA_4BYTE, tree->gtRegNum, tree->gtRegNum, regHi, 32 - count,
1639 INS_FLAGS_DONT_CARE, INS_OPTS_LSL);
1642 genProduceReg(tree);
1645 //------------------------------------------------------------------------
1646 // genLeaInstruction: Produce code for a GT_LEA subnode.
1648 void CodeGen::genLeaInstruction(GenTreeAddrMode* lea)
1650 emitAttr size = emitTypeSize(lea);
1651 genConsumeOperands(lea);
1653 if (lea->Base() && lea->Index())
1655 regNumber baseReg = lea->Base()->gtRegNum;
1656 regNumber indexReg = lea->Index()->gtRegNum;
1657 getEmitter()->emitIns_R_ARX(INS_lea, size, lea->gtRegNum, baseReg, indexReg, lea->gtScale, lea->gtOffset);
1659 else if (lea->Base())
1661 regNumber baseReg = lea->Base()->gtRegNum;
1662 getEmitter()->emitIns_R_AR(INS_lea, size, lea->gtRegNum, baseReg, lea->gtOffset);
1664 else if (lea->Index())
1666 assert(!"Should we see a baseless address computation during CodeGen for ARM32?");
1672 //------------------------------------------------------------------------
1673 // genCompareLong: Generate code for comparing two longs when the result of the compare
1674 // is manifested in a register.
1677 // treeNode - the compare tree
1683 // For long compares, we need to compare the high parts of operands first, then the low parts.
1684 // If the high compare is false, we do not need to compare the low parts. For less than and
1685 // greater than, if the high compare is true, we can assume the entire compare is true.
1687 void CodeGen::genCompareLong(GenTreePtr treeNode)
1689 assert(treeNode->OperIsCompare());
1691 GenTreeOp* tree = treeNode->AsOp();
1692 GenTreePtr op1 = tree->gtOp1;
1693 GenTreePtr op2 = tree->gtOp2;
1695 assert(varTypeIsLong(op1->TypeGet()));
1696 assert(varTypeIsLong(op2->TypeGet()));
1698 regNumber targetReg = treeNode->gtRegNum;
1700 genConsumeOperands(tree);
1702 GenTreePtr loOp1 = op1->gtGetOp1();
1703 GenTreePtr hiOp1 = op1->gtGetOp2();
1704 GenTreePtr loOp2 = op2->gtGetOp1();
1705 GenTreePtr hiOp2 = op2->gtGetOp2();
1707 // Create compare for the high parts
1708 instruction ins = INS_cmp;
1709 var_types cmpType = TYP_INT;
1710 emitAttr cmpAttr = emitTypeSize(cmpType);
1712 // Emit the compare instruction
1713 getEmitter()->emitInsBinary(ins, cmpAttr, hiOp1, hiOp2);
1715 // If the result is not being materialized in a register, we're done.
1716 if (targetReg == REG_NA)
1721 BasicBlock* labelTrue = genCreateTempLabel();
1722 BasicBlock* labelFalse = genCreateTempLabel();
1723 BasicBlock* labelNext = genCreateTempLabel();
1725 genJccLongHi(tree->gtOper, labelTrue, labelFalse, tree->IsUnsigned());
1726 getEmitter()->emitInsBinary(ins, cmpAttr, loOp1, loOp2);
1727 genJccLongLo(tree->gtOper, labelTrue, labelFalse);
1729 genDefineTempLabel(labelFalse);
1730 getEmitter()->emitIns_R_I(INS_mov, emitActualTypeSize(tree->gtType), tree->gtRegNum, 0);
1731 getEmitter()->emitIns_J(INS_b, labelNext);
1733 genDefineTempLabel(labelTrue);
1734 getEmitter()->emitIns_R_I(INS_mov, emitActualTypeSize(tree->gtType), tree->gtRegNum, 1);
1736 genDefineTempLabel(labelNext);
1738 genProduceReg(tree);
1741 void CodeGen::genJccLongHi(genTreeOps cmp, BasicBlock* jumpTrue, BasicBlock* jumpFalse, bool isUnsigned)
1745 jumpFalse->bbFlags |= BBF_JMP_TARGET | BBF_HAS_LABEL;
1751 inst_JMP(EJ_ne, jumpFalse);
1755 inst_JMP(EJ_ne, jumpTrue);
1762 inst_JMP(EJ_hi, jumpFalse);
1763 inst_JMP(EJ_lo, jumpTrue);
1767 inst_JMP(EJ_gt, jumpFalse);
1768 inst_JMP(EJ_lt, jumpTrue);
1776 inst_JMP(EJ_lo, jumpFalse);
1777 inst_JMP(EJ_hi, jumpTrue);
1781 inst_JMP(EJ_lt, jumpFalse);
1782 inst_JMP(EJ_gt, jumpTrue);
1787 noway_assert(!"expected a comparison operator");
1791 void CodeGen::genJccLongLo(genTreeOps cmp, BasicBlock* jumpTrue, BasicBlock* jumpFalse)
1796 inst_JMP(EJ_eq, jumpTrue);
1800 inst_JMP(EJ_ne, jumpTrue);
1804 inst_JMP(EJ_lo, jumpTrue);
1808 inst_JMP(EJ_ls, jumpTrue);
1812 inst_JMP(EJ_hs, jumpTrue);
1816 inst_JMP(EJ_hi, jumpTrue);
1820 noway_assert(!"expected comparison");
1824 //------------------------------------------------------------------------
1825 // genSetRegToCond: Generate code to materialize a condition into a register.
1828 // dstReg - The target register to set to 1 or 0
1829 // tree - The GenTree Relop node that was used to set the Condition codes
1831 // Return Value: none
1834 // The condition codes must already have been appropriately set.
1836 void CodeGen::genSetRegToCond(regNumber dstReg, GenTreePtr tree)
1838 // Emit code like that:
1848 CompareKind compareKind = ((tree->gtFlags & GTF_UNSIGNED) != 0) ? CK_UNSIGNED : CK_SIGNED;
1849 emitJumpKind jmpKind = genJumpKindForOper(tree->gtOper, compareKind);
1851 BasicBlock* labelTrue = genCreateTempLabel();
1852 getEmitter()->emitIns_J(emitter::emitJumpKindToIns(jmpKind), labelTrue);
1854 getEmitter()->emitIns_R_I(INS_mov, emitActualTypeSize(tree->gtType), dstReg, 0);
1856 BasicBlock* labelNext = genCreateTempLabel();
1857 getEmitter()->emitIns_J(INS_b, labelNext);
1859 genDefineTempLabel(labelTrue);
1860 getEmitter()->emitIns_R_I(INS_mov, emitActualTypeSize(tree->gtType), dstReg, 1);
1861 genDefineTempLabel(labelNext);
1864 //------------------------------------------------------------------------
1865 // genLongToIntCast: Generate code for long to int casts.
1868 // cast - The GT_CAST node
1874 // The cast node and its sources (via GT_LONG) must have been assigned registers.
1875 // The destination cannot be a floating point type or a small integer type.
1877 void CodeGen::genLongToIntCast(GenTree* cast)
1879 assert(cast->OperGet() == GT_CAST);
1881 GenTree* src = cast->gtGetOp1();
1882 noway_assert(src->OperGet() == GT_LONG);
1884 genConsumeRegs(src);
1886 var_types srcType = ((cast->gtFlags & GTF_UNSIGNED) != 0) ? TYP_ULONG : TYP_LONG;
1887 var_types dstType = cast->CastToType();
1888 regNumber loSrcReg = src->gtGetOp1()->gtRegNum;
1889 regNumber hiSrcReg = src->gtGetOp2()->gtRegNum;
1890 regNumber dstReg = cast->gtRegNum;
1892 assert((dstType == TYP_INT) || (dstType == TYP_UINT));
1893 assert(genIsValidIntReg(loSrcReg));
1894 assert(genIsValidIntReg(hiSrcReg));
1895 assert(genIsValidIntReg(dstReg));
1897 if (cast->gtOverflow())
1900 // Generate an overflow check for [u]long to [u]int casts:
1902 // long -> int - check if the upper 33 bits are all 0 or all 1
1904 // ulong -> int - check if the upper 33 bits are all 0
1906 // long -> uint - check if the upper 32 bits are all 0
1907 // ulong -> uint - check if the upper 32 bits are all 0
1910 if ((srcType == TYP_LONG) && (dstType == TYP_INT))
1912 BasicBlock* allOne = genCreateTempLabel();
1913 BasicBlock* success = genCreateTempLabel();
1915 inst_RV_RV(INS_tst, loSrcReg, loSrcReg, TYP_INT, EA_4BYTE);
1916 emitJumpKind JmpNegative = genJumpKindForOper(GT_LT, CK_LOGICAL);
1917 inst_JMP(JmpNegative, allOne);
1918 inst_RV_RV(INS_tst, hiSrcReg, hiSrcReg, TYP_INT, EA_4BYTE);
1919 emitJumpKind jmpNotEqualL = genJumpKindForOper(GT_NE, CK_LOGICAL);
1920 genJumpToThrowHlpBlk(jmpNotEqualL, SCK_OVERFLOW);
1921 inst_JMP(EJ_jmp, success);
1923 genDefineTempLabel(allOne);
1924 inst_RV_IV(INS_cmp, hiSrcReg, -1, EA_4BYTE);
1925 emitJumpKind jmpNotEqualS = genJumpKindForOper(GT_NE, CK_SIGNED);
1926 genJumpToThrowHlpBlk(jmpNotEqualS, SCK_OVERFLOW);
1928 genDefineTempLabel(success);
1932 if ((srcType == TYP_ULONG) && (dstType == TYP_INT))
1934 inst_RV_RV(INS_tst, loSrcReg, loSrcReg, TYP_INT, EA_4BYTE);
1935 emitJumpKind JmpNegative = genJumpKindForOper(GT_LT, CK_LOGICAL);
1936 genJumpToThrowHlpBlk(JmpNegative, SCK_OVERFLOW);
1939 inst_RV_RV(INS_tst, hiSrcReg, hiSrcReg, TYP_INT, EA_4BYTE);
1940 emitJumpKind jmpNotEqual = genJumpKindForOper(GT_NE, CK_LOGICAL);
1941 genJumpToThrowHlpBlk(jmpNotEqual, SCK_OVERFLOW);
1945 if (dstReg != loSrcReg)
1947 inst_RV_RV(INS_mov, dstReg, loSrcReg, TYP_INT, EA_4BYTE);
1950 genProduceReg(cast);
1953 //------------------------------------------------------------------------
1954 // genIntToFloatCast: Generate code to cast an int/long to float/double
1957 // treeNode - The GT_CAST node
1963 // Cast is a non-overflow conversion.
1964 // The treeNode must have an assigned register.
1965 // SrcType= int32/uint32/int64/uint64 and DstType=float/double.
1967 void CodeGen::genIntToFloatCast(GenTreePtr treeNode)
1969 // int --> float/double conversions are always non-overflow ones
1970 assert(treeNode->OperGet() == GT_CAST);
1971 assert(!treeNode->gtOverflow());
1973 regNumber targetReg = treeNode->gtRegNum;
1974 assert(genIsValidFloatReg(targetReg));
1976 GenTreePtr op1 = treeNode->gtOp.gtOp1;
1977 assert(!op1->isContained()); // Cannot be contained
1978 assert(genIsValidIntReg(op1->gtRegNum)); // Must be a valid int reg.
1980 var_types dstType = treeNode->CastToType();
1981 var_types srcType = op1->TypeGet();
1982 assert(!varTypeIsFloating(srcType) && varTypeIsFloating(dstType));
1984 // force the srcType to unsigned if GT_UNSIGNED flag is set
1985 if (treeNode->gtFlags & GTF_UNSIGNED)
1987 srcType = genUnsignedType(srcType);
1990 // We should never see a srcType whose size is neither EA_4BYTE or EA_8BYTE
1991 // For conversions from small types (byte/sbyte/int16/uint16) to float/double,
1992 // we expect the front-end or lowering phase to have generated two levels of cast.
1994 emitAttr srcSize = EA_ATTR(genTypeSize(srcType));
1995 noway_assert((srcSize == EA_4BYTE) || (srcSize == EA_8BYTE));
1997 instruction insVcvt = INS_invalid;
1999 if (dstType == TYP_DOUBLE)
2001 if (srcSize == EA_4BYTE)
2003 insVcvt = (varTypeIsUnsigned(srcType)) ? INS_vcvt_u2d : INS_vcvt_i2d;
2007 assert(srcSize == EA_8BYTE);
2008 NYI_ARM("Casting int64/uint64 to double in genIntToFloatCast");
2013 assert(dstType == TYP_FLOAT);
2014 if (srcSize == EA_4BYTE)
2016 insVcvt = (varTypeIsUnsigned(srcType)) ? INS_vcvt_u2f : INS_vcvt_i2f;
2020 assert(srcSize == EA_8BYTE);
2021 NYI_ARM("Casting int64/uint64 to float in genIntToFloatCast");
2025 genConsumeOperands(treeNode->AsOp());
2027 assert(insVcvt != INS_invalid);
2028 getEmitter()->emitIns_R_R(INS_vmov_i2f, srcSize, treeNode->gtRegNum, op1->gtRegNum);
2029 getEmitter()->emitIns_R_R(insVcvt, srcSize, treeNode->gtRegNum, treeNode->gtRegNum);
2031 genProduceReg(treeNode);
2034 //------------------------------------------------------------------------
2035 // genFloatToIntCast: Generate code to cast float/double to int/long
2038 // treeNode - The GT_CAST node
2044 // Cast is a non-overflow conversion.
2045 // The treeNode must have an assigned register.
2046 // SrcType=float/double and DstType= int32/uint32/int64/uint64
2048 void CodeGen::genFloatToIntCast(GenTreePtr treeNode)
2050 // we don't expect to see overflow detecting float/double --> int type conversions here
2051 // as they should have been converted into helper calls by front-end.
2052 assert(treeNode->OperGet() == GT_CAST);
2053 assert(!treeNode->gtOverflow());
2055 regNumber targetReg = treeNode->gtRegNum;
2056 assert(genIsValidIntReg(targetReg)); // Must be a valid int reg.
2058 GenTreePtr op1 = treeNode->gtOp.gtOp1;
2059 assert(!op1->isContained()); // Cannot be contained
2060 assert(genIsValidFloatReg(op1->gtRegNum)); // Must be a valid float reg.
2062 var_types dstType = treeNode->CastToType();
2063 var_types srcType = op1->TypeGet();
2064 assert(varTypeIsFloating(srcType) && !varTypeIsFloating(dstType));
2066 // We should never see a dstType whose size is neither EA_4BYTE or EA_8BYTE
2067 // For conversions to small types (byte/sbyte/int16/uint16) from float/double,
2068 // we expect the front-end or lowering phase to have generated two levels of cast.
2070 emitAttr dstSize = EA_ATTR(genTypeSize(dstType));
2071 noway_assert((dstSize == EA_4BYTE) || (dstSize == EA_8BYTE));
2073 instruction insVcvt = INS_invalid;
2075 if (srcType == TYP_DOUBLE)
2077 if (dstSize == EA_4BYTE)
2079 insVcvt = (varTypeIsUnsigned(dstType)) ? INS_vcvt_d2u : INS_vcvt_d2i;
2083 assert(dstSize == EA_8BYTE);
2084 NYI_ARM("Casting double to int64/uint64 in genIntToFloatCast");
2089 assert(srcType == TYP_FLOAT);
2090 if (dstSize == EA_4BYTE)
2092 insVcvt = (varTypeIsUnsigned(dstType)) ? INS_vcvt_f2u : INS_vcvt_f2i;
2096 assert(dstSize == EA_8BYTE);
2097 NYI_ARM("Casting float to int64/uint64 in genIntToFloatCast");
2101 genConsumeOperands(treeNode->AsOp());
2103 assert(insVcvt != INS_invalid);
2104 getEmitter()->emitIns_R_R(insVcvt, dstSize, op1->gtRegNum, op1->gtRegNum);
2105 getEmitter()->emitIns_R_R(INS_vmov_f2i, dstSize, treeNode->gtRegNum, op1->gtRegNum);
2107 genProduceReg(treeNode);
2110 //------------------------------------------------------------------------
2111 // genEmitHelperCall: Emit a call to a helper function.
2113 void CodeGen::genEmitHelperCall(unsigned helper, int argSize, emitAttr retSize, regNumber callTargetReg /*= REG_NA */)
2115 // Can we call the helper function directly
2117 void *addr = NULL, **pAddr = NULL;
2119 #if defined(DEBUG) && defined(PROFILING_SUPPORTED)
2120 // Don't ask VM if it hasn't requested ELT hooks
2121 if (!compiler->compProfilerHookNeeded && compiler->opts.compJitELTHookEnabled &&
2122 (helper == CORINFO_HELP_PROF_FCN_ENTER || helper == CORINFO_HELP_PROF_FCN_LEAVE ||
2123 helper == CORINFO_HELP_PROF_FCN_TAILCALL))
2125 addr = compiler->compProfilerMethHnd;
2130 addr = compiler->compGetHelperFtn((CorInfoHelpFunc)helper, (void**)&pAddr);
2133 if (!addr || !arm_Valid_Imm_For_BL((ssize_t)addr))
2135 if (callTargetReg == REG_NA)
2137 // If a callTargetReg has not been explicitly provided, we will use REG_DEFAULT_HELPER_CALL_TARGET, but
2138 // this is only a valid assumption if the helper call is known to kill REG_DEFAULT_HELPER_CALL_TARGET.
2139 callTargetReg = REG_DEFAULT_HELPER_CALL_TARGET;
2142 // Load the address into a register and call through a register
2145 instGen_Set_Reg_To_Imm(EA_HANDLE_CNS_RELOC, callTargetReg, (ssize_t)addr);
2149 getEmitter()->emitIns_R_AI(INS_ldr, EA_PTR_DSP_RELOC, callTargetReg, (ssize_t)pAddr);
2150 regTracker.rsTrackRegTrash(callTargetReg);
2153 getEmitter()->emitIns_Call(emitter::EC_INDIR_R, compiler->eeFindHelper(helper),
2154 INDEBUG_LDISASM_COMMA(nullptr) NULL, // addr
2155 argSize, retSize, gcInfo.gcVarPtrSetCur, gcInfo.gcRegGCrefSetCur,
2156 gcInfo.gcRegByrefSetCur,
2157 BAD_IL_OFFSET, // ilOffset
2158 callTargetReg, // ireg
2159 REG_NA, 0, 0, // xreg, xmul, disp
2161 emitter::emitNoGChelper(helper),
2162 (CorInfoHelpFunc)helper == CORINFO_HELP_PROF_FCN_LEAVE);
2166 getEmitter()->emitIns_Call(emitter::EC_FUNC_TOKEN, compiler->eeFindHelper(helper),
2167 INDEBUG_LDISASM_COMMA(nullptr) addr, argSize, retSize, gcInfo.gcVarPtrSetCur,
2168 gcInfo.gcRegGCrefSetCur, gcInfo.gcRegByrefSetCur, BAD_IL_OFFSET, REG_NA, REG_NA, 0,
2169 0, /* ilOffset, ireg, xreg, xmul, disp */
2171 emitter::emitNoGChelper(helper),
2172 (CorInfoHelpFunc)helper == CORINFO_HELP_PROF_FCN_LEAVE);
2175 regTracker.rsTrashRegSet(RBM_CALLEE_TRASH);
2176 regTracker.rsTrashRegsForGCInterruptability();
2179 //------------------------------------------------------------------------
2180 // genStoreLongLclVar: Generate code to store a non-enregistered long lclVar
2183 // treeNode - A TYP_LONG lclVar node.
2189 // 'treeNode' must be a TYP_LONG lclVar node for a lclVar that has NOT been promoted.
2190 // Its operand must be a GT_LONG node.
2192 void CodeGen::genStoreLongLclVar(GenTree* treeNode)
2194 emitter* emit = getEmitter();
2196 GenTreeLclVarCommon* lclNode = treeNode->AsLclVarCommon();
2197 unsigned lclNum = lclNode->gtLclNum;
2198 LclVarDsc* varDsc = &(compiler->lvaTable[lclNum]);
2199 assert(varDsc->TypeGet() == TYP_LONG);
2200 assert(!varDsc->lvPromoted);
2201 GenTreePtr op1 = treeNode->gtOp.gtOp1;
2202 noway_assert(op1->OperGet() == GT_LONG || op1->OperGet() == GT_MUL_LONG);
2203 genConsumeRegs(op1);
2205 if (op1->OperGet() == GT_LONG)
2207 // Definitions of register candidates will have been lowered to 2 int lclVars.
2208 assert(!treeNode->InReg());
2210 GenTreePtr loVal = op1->gtGetOp1();
2211 GenTreePtr hiVal = op1->gtGetOp2();
2213 // NYI: Contained immediates.
2214 NYI_IF((loVal->gtRegNum == REG_NA) || (hiVal->gtRegNum == REG_NA),
2215 "Store of long lclVar with contained immediate");
2217 emit->emitIns_S_R(ins_Store(TYP_INT), EA_4BYTE, loVal->gtRegNum, lclNum, 0);
2218 emit->emitIns_S_R(ins_Store(TYP_INT), EA_4BYTE, hiVal->gtRegNum, lclNum, genTypeSize(TYP_INT));
2220 else if (op1->OperGet() == GT_MUL_LONG)
2222 assert((op1->gtFlags & GTF_MUL_64RSLT) != 0);
2225 getEmitter()->emitIns_S_R(ins_Store(TYP_INT), emitTypeSize(TYP_INT), REG_LNGRET_LO, lclNum, 0);
2226 getEmitter()->emitIns_S_R(ins_Store(TYP_INT), emitTypeSize(TYP_INT), REG_LNGRET_HI, lclNum,
2227 genTypeSize(TYP_INT));
2231 #endif // _TARGET_ARM_
2233 #endif // !LEGACY_BACKEND