int BuildStoreLoc(GenTreeLclVarCommon* tree);
int BuildIndir(GenTreeIndir* indirTree);
int BuildGCWriteBarrier(GenTree* tree);
- int BuildCast(GenTree* tree);
+ int BuildCast(GenTreeCast* cast);
#if defined(_TARGET_XARCH_)
// returns true if the tree can use the read-modify-write memory instruction form
break;
case GT_CAST:
- {
assert(dstCount == 1);
-
- // Non-overflow casts to/from float/double are done using SSE2 instructions
- // and that allow the source operand to be either a reg or memop. Given the
- // fact that casts from small int to float/double are done as two-level casts,
- // the source operand is always guaranteed to be of size 4 or 8 bytes.
- var_types castToType = tree->CastToType();
- GenTree* castOp = tree->gtCast.CastOp();
- var_types castOpType = castOp->TypeGet();
- if (tree->gtFlags & GTF_UNSIGNED)
- {
- castOpType = genUnsignedType(castOpType);
- }
-
- if (varTypeIsLong(castOpType))
- {
- assert((castOp->OperGet() == GT_LONG) && castOp->isContained());
- }
-
- // FloatToIntCast needs a temporary register
- if (varTypeIsFloating(castOpType) && varTypeIsIntOrI(tree))
- {
- buildInternalFloatRegisterDefForNode(tree, RBM_ALLFLOAT);
- setInternalRegsDelayFree = true;
- }
-
- Lowering::CastInfo castInfo;
-
- // Get information about the cast.
- Lowering::getCastDescription(tree, &castInfo);
-
- if (castInfo.requiresOverflowCheck)
- {
- var_types srcType = castOp->TypeGet();
- emitAttr cmpSize = EA_ATTR(genTypeSize(srcType));
-
- // If we cannot store data in an immediate for instructions,
- // then we will need to reserve a temporary register.
-
- if (!castInfo.signCheckOnly) // In case of only sign check, temp regs are not needeed.
- {
- if (castInfo.unsignedSource || castInfo.unsignedDest)
- {
- // check typeMask
- bool canStoreTypeMask = emitter::emitIns_valid_imm_for_alu(castInfo.typeMask);
- if (!canStoreTypeMask)
- {
- buildInternalIntRegisterDefForNode(tree);
- }
- }
- else
- {
- // For comparing against the max or min value
- bool canStoreMaxValue =
- emitter::emitIns_valid_imm_for_cmp(castInfo.typeMax, INS_FLAGS_DONT_CARE);
- bool canStoreMinValue =
- emitter::emitIns_valid_imm_for_cmp(castInfo.typeMin, INS_FLAGS_DONT_CARE);
-
- if (!canStoreMaxValue || !canStoreMinValue)
- {
- buildInternalIntRegisterDefForNode(tree);
- }
- }
- }
- }
- srcCount = BuildOperandUses(castOp);
- buildInternalRegisterUses();
- BuildDef(tree);
- }
- break;
+ srcCount = BuildCast(tree->AsCast());
+ break;
case GT_JTRUE:
srcCount = 0;
#endif // FEATURE_HW_INTRINSICS
case GT_CAST:
- {
- // TODO-ARM64-CQ: Int-To-Int conversions - castOp cannot be a memory op and must have an assigned
- // register.
- // see CodeGen::genIntToIntCast()
-
- // Non-overflow casts to/from float/double are done using SSE2 instructions
- // and that allow the source operand to be either a reg or memop. Given the
- // fact that casts from small int to float/double are done as two-level casts,
- // the source operand is always guaranteed to be of size 4 or 8 bytes.
- var_types castToType = tree->CastToType();
- GenTree* castOp = tree->gtCast.CastOp();
- var_types castOpType = castOp->TypeGet();
- if (tree->gtFlags & GTF_UNSIGNED)
- {
- castOpType = genUnsignedType(castOpType);
- }
-
- // Some overflow checks need a temp reg
-
- Lowering::CastInfo castInfo;
- // Get information about the cast.
- Lowering::getCastDescription(tree, &castInfo);
-
- if (castInfo.requiresOverflowCheck)
- {
- var_types srcType = castOp->TypeGet();
- emitAttr cmpSize = EA_ATTR(genTypeSize(srcType));
-
- // If we cannot store the comparisons in an immediate for either
- // comparing against the max or min value, then we will need to
- // reserve a temporary register.
-
- bool canStoreMaxValue = emitter::emitIns_valid_imm_for_cmp(castInfo.typeMax, cmpSize);
- bool canStoreMinValue = emitter::emitIns_valid_imm_for_cmp(castInfo.typeMin, cmpSize);
-
- if (!canStoreMaxValue || !canStoreMinValue)
- {
- buildInternalIntRegisterDefForNode(tree);
- }
- }
- BuildUse(tree->gtGetOp1());
- srcCount = 1;
- buildInternalRegisterUses();
assert(dstCount == 1);
- BuildDef(tree);
- }
- break;
+ srcCount = BuildCast(tree->AsCast());
+ break;
case GT_NEG:
case GT_NOT:
return srcCount;
}
+//------------------------------------------------------------------------
+// BuildCast: Set the NodeInfo for a GT_CAST.
+//
+// Arguments:
+// cast - The GT_CAST node
+//
+// Return Value:
+// The number of sources consumed by this node.
+//
+int LinearScan::BuildCast(GenTreeCast* cast)
+{
+ GenTree* src = cast->gtGetOp1();
+
+ const var_types srcType = genActualType(src->TypeGet());
+ const var_types castType = cast->gtCastType;
+
+#ifdef _TARGET_ARM_
+ assert(!varTypeIsLong(srcType) || (src->OperIs(GT_LONG) && src->isContained()));
+
+ // Floating point to integer casts requires a temporary register.
+ if (varTypeIsFloating(srcType) && !varTypeIsFloating(castType))
+ {
+ buildInternalFloatRegisterDefForNode(cast, RBM_ALLFLOAT);
+ setInternalRegsDelayFree = true;
+ }
+#else
+ // Overflow checking cast from TYP_LONG to TYP_INT requires a temporary register to
+ // store the min and max immediate values that cannot be encoded in the CMP instruction.
+ if (cast->gtOverflow() && varTypeIsLong(srcType) && !cast->IsUnsigned() && (castType == TYP_INT))
+ {
+ buildInternalIntRegisterDefForNode(cast);
+ }
+#endif
+
+ int srcCount = BuildOperandUses(src);
+ buildInternalRegisterUses();
+ BuildDef(cast);
+ return srcCount;
+}
+
#endif // _TARGET_ARMARCH_
#endif // FEATURE_HW_INTRINSICS
case GT_CAST:
- srcCount = BuildCast(tree);
+ assert(dstCount == 1);
+ srcCount = BuildCast(tree->AsCast());
break;
case GT_BITCAST:
// BuildCast: Set the NodeInfo for a GT_CAST.
//
// Arguments:
-// tree - The node of interest
+// cast - The GT_CAST node
//
// Return Value:
// The number of sources consumed by this node.
//
-int LinearScan::BuildCast(GenTree* tree)
+int LinearScan::BuildCast(GenTreeCast* cast)
{
- // TODO-XArch-CQ: Int-To-Int conversions - castOp cannot be a memory op and must have an assigned register.
- // see CodeGen::genIntToIntCast()
-
- // Non-overflow casts to/from float/double are done using SSE2 instructions
- // and that allow the source operand to be either a reg or memop. Given the
- // fact that casts from small int to float/double are done as two-level casts,
- // the source operand is always guaranteed to be of size 4 or 8 bytes.
- var_types castToType = tree->CastToType();
- GenTree* castOp = tree->gtCast.CastOp();
- var_types castOpType = castOp->TypeGet();
- regMaskTP candidates = RBM_NONE;
+ GenTree* src = cast->gtGetOp1();
- if (tree->gtFlags & GTF_UNSIGNED)
- {
- castOpType = genUnsignedType(castOpType);
- }
+ const var_types srcType = genActualType(src->TypeGet());
+ const var_types castType = cast->gtCastType;
+ regMaskTP candidates = RBM_NONE;
#ifdef _TARGET_X86_
- if (varTypeIsByte(castToType))
+ if (varTypeIsByte(castType))
{
candidates = allByteRegs();
}
-#endif // _TARGET_X86_
- // some overflow checks need a temp reg:
- // - GT_CAST from INT64/UINT64 to UINT32
- RefPosition* internalDef = nullptr;
- if (tree->gtOverflow() && (castToType == TYP_UINT))
+ assert(!varTypeIsLong(srcType) || (src->OperIs(GT_LONG) && src->isContained()));
+#else
+ // Overflow checking cast from TYP_(U)LONG to TYP_UINT requires a temporary
+ // register to extract the upper 32 bits of the 64 bit source register.
+ if (cast->gtOverflow() && varTypeIsLong(srcType) && (castType == TYP_UINT))
{
- if (genTypeSize(castOpType) == 8)
- {
- // Here we don't need internal register to be different from targetReg,
- // rather require it to be different from operand's reg.
- buildInternalIntRegisterDefForNode(tree);
- }
+ // Here we don't need internal register to be different from targetReg,
+ // rather require it to be different from operand's reg.
+ buildInternalIntRegisterDefForNode(cast);
}
- int srcCount = BuildOperandUses(castOp, candidates);
+#endif
+
+ int srcCount = BuildOperandUses(src, candidates);
buildInternalRegisterUses();
- BuildDef(tree, candidates);
+ BuildDef(cast, candidates);
return srcCount;
}