if (corType == CORINFO_TYPE_DOUBLE && args->Current()->TypeGet() == TYP_FLOAT)
{
- args->Current() = gtNewCastNode(TYP_DOUBLE, args->Current(), TYP_DOUBLE);
+ args->Current() = gtNewCastNode(TYP_DOUBLE, args->Current(), false, TYP_DOUBLE);
}
else if (corType == CORINFO_TYPE_FLOAT && args->Current()->TypeGet() == TYP_DOUBLE)
{
- args->Current() = gtNewCastNode(TYP_FLOAT, args->Current(), TYP_FLOAT);
+ args->Current() = gtNewCastNode(TYP_FLOAT, args->Current(), false, TYP_FLOAT);
}
// insert any widening or narrowing casts for backwards compatibility
CORINFO_GENERIC_HANDLE handle = nullptr;
void* pIndirection = nullptr;
- assert(pLookup->constLookup.accessType != IAT_PPVALUE);
+ assert(pLookup->constLookup.accessType != IAT_PPVALUE && pLookup->constLookup.accessType != IAT_RELPVALUE);
if (pLookup->constLookup.accessType == IAT_VALUE)
{
{
CORINFO_GENERIC_HANDLE handle = nullptr;
void* pIndirection = nullptr;
- assert(pLookup->accessType != IAT_PPVALUE);
+ assert(pLookup->accessType != IAT_PPVALUE && pLookup->accessType != IAT_RELPVALUE);
if (pLookup->accessType == IAT_VALUE)
{
}
else
{
- op1->gtFptrVal.gtEntryPoint.addr = nullptr;
+ op1->gtFptrVal.gtEntryPoint.addr = nullptr;
+ op1->gtFptrVal.gtEntryPoint.accessType = IAT_VALUE;
}
#endif
break;
else if (varTypeIsI(wantedType) && (currType == TYP_INT))
{
// Note that this allows TYP_INT to be cast to a TYP_I_IMPL when wantedType is a TYP_BYREF or TYP_REF
- tree = gtNewCastNode(TYP_I_IMPL, tree, TYP_I_IMPL);
+ tree = gtNewCastNode(TYP_I_IMPL, tree, false, TYP_I_IMPL);
}
else if ((wantedType == TYP_INT) && varTypeIsI(currType))
{
// Note that this allows TYP_BYREF or TYP_REF to be cast to a TYP_INT
- tree = gtNewCastNode(TYP_INT, tree, TYP_INT);
+ tree = gtNewCastNode(TYP_INT, tree, false, TYP_INT);
}
#endif // _TARGET_64BIT_
}
#ifndef LEGACY_BACKEND
if (varTypeIsFloating(tree) && varTypeIsFloating(dstTyp) && (dstTyp != tree->gtType))
{
- tree = gtNewCastNode(dstTyp, tree, dstTyp);
+ tree = gtNewCastNode(dstTyp, tree, false, dstTyp);
}
#endif // !LEGACY_BACKEND
noway_assert(varTypeIsFloating(op1));
#else // FEATURE_X87_DOUBLES
+ assert(varTypeIsFloating(op1));
if (op1->TypeGet() != callType)
{
- op1 = gtNewCastNode(callType, op1, callType);
+ op1 = gtNewCastNode(callType, op1, false, callType);
}
#endif // FEATURE_X87_DOUBLES
noway_assert(varTypeIsFloating(op1));
#else // FEATURE_X87_DOUBLES
+ assert(varTypeIsFloating(op1));
+ assert(varTypeIsFloating(op2));
if (op2->TypeGet() != callType)
{
- op2 = gtNewCastNode(callType, op2, callType);
+ op2 = gtNewCastNode(callType, op2, false, callType);
}
if (op1->TypeGet() != callType)
{
- op1 = gtNewCastNode(callType, op1, callType);
+ op1 = gtNewCastNode(callType, op1, false, callType);
}
#endif // FEATURE_X87_DOUBLES
{
assert((varTypeIsFloating(srcTyp) && varTypeIsFloating(dstTyp)) ||
(varTypeIsIntegral(srcTyp) && varTypeIsIntegral(dstTyp)));
- exprToBox = gtNewCastNode(dstTyp, exprToBox, dstTyp);
+ exprToBox = gtNewCastNode(dstTyp, exprToBox, false, dstTyp);
}
op1 = gtNewAssignNode(gtNewOperNode(GT_IND, lclTyp, op1), exprToBox);
}
}
else
{
- call->gtIntrinsic.gtEntryPoint.addr = nullptr;
+ call->gtIntrinsic.gtEntryPoint.addr = nullptr;
+ call->gtIntrinsic.gtEntryPoint.accessType = IAT_VALUE;
}
}
#endif
call = gtNewCallNode(CT_USER_FUNC, callInfo->hMethod, callRetTyp, nullptr, ilOffset);
call->gtCall.gtStubCallStubAddr = callInfo->stubLookup.constLookup.addr;
call->gtFlags |= GTF_CALL_VIRT_STUB;
- assert(callInfo->stubLookup.constLookup.accessType != IAT_PPVALUE);
+ assert(callInfo->stubLookup.constLookup.accessType != IAT_PPVALUE &&
+ callInfo->stubLookup.constLookup.accessType != IAT_RELPVALUE);
if (callInfo->stubLookup.constLookup.accessType == IAT_PVALUE)
{
call->gtCall.gtCallMoreFlags |= GTF_CALL_M_VIRTSTUB_REL_INDIRECT;
if (checkForSmallType && varTypeIsIntegral(callRetTyp) && genTypeSize(callRetTyp) < genTypeSize(TYP_INT))
{
- call = gtNewCastNode(genActualType(callRetTyp), call, callRetTyp);
+ call = gtNewCastNode(genActualType(callRetTyp), call, false, callRetTyp);
}
}
if (genActualType(op1->TypeGet()) != TYP_I_IMPL)
{
// insert an explicit upcast
- op1 = *pOp1 = gtNewCastNode(TYP_I_IMPL, op1, (var_types)(fUnsigned ? TYP_U_IMPL : TYP_I_IMPL));
+ op1 = *pOp1 = gtNewCastNode(TYP_I_IMPL, op1, fUnsigned, fUnsigned ? TYP_U_IMPL : TYP_I_IMPL);
}
#endif // _TARGET_64BIT_
if ((genActualType(op2->TypeGet()) != TYP_I_IMPL))
{
// insert an explicit upcast
- op2 = *pOp2 = gtNewCastNode(TYP_I_IMPL, op2, (var_types)(fUnsigned ? TYP_U_IMPL : TYP_I_IMPL));
+ op2 = *pOp2 = gtNewCastNode(TYP_I_IMPL, op2, fUnsigned, fUnsigned ? TYP_U_IMPL : TYP_I_IMPL);
}
#endif // _TARGET_64BIT_
if (genActualType(op1->TypeGet()) != TYP_I_IMPL)
{
// insert an explicit upcast
- op1 = *pOp1 = gtNewCastNode(TYP_I_IMPL, op1, (var_types)(fUnsigned ? TYP_U_IMPL : TYP_I_IMPL));
+ op1 = *pOp1 = gtNewCastNode(TYP_I_IMPL, op1, fUnsigned, fUnsigned ? TYP_U_IMPL : TYP_I_IMPL);
}
}
else if (genActualType(op2->TypeGet()) != TYP_I_IMPL)
{
// insert an explicit upcast
- op2 = *pOp2 = gtNewCastNode(TYP_I_IMPL, op2, (var_types)(fUnsigned ? TYP_U_IMPL : TYP_I_IMPL));
+ op2 = *pOp2 = gtNewCastNode(TYP_I_IMPL, op2, fUnsigned, fUnsigned ? TYP_U_IMPL : TYP_I_IMPL);
}
#endif // _TARGET_64BIT_
if (genActualType(op1->TypeGet()) != TYP_I_IMPL)
{
// insert an explicit upcast
- op1 = *pOp1 = gtNewCastNode(TYP_I_IMPL, op1, (var_types)(fUnsigned ? TYP_U_IMPL : TYP_I_IMPL));
+ op1 = *pOp1 = gtNewCastNode(TYP_I_IMPL, op1, fUnsigned, fUnsigned ? TYP_U_IMPL : TYP_I_IMPL);
}
else if (genActualType(op2->TypeGet()) != TYP_I_IMPL)
{
// insert an explicit upcast
- op2 = *pOp2 = gtNewCastNode(TYP_I_IMPL, op2, (var_types)(fUnsigned ? TYP_U_IMPL : TYP_I_IMPL));
+ op2 = *pOp2 = gtNewCastNode(TYP_I_IMPL, op2, fUnsigned, fUnsigned ? TYP_U_IMPL : TYP_I_IMPL);
}
type = TYP_I_IMPL;
if (varTypeIsI(op1->TypeGet()) && (genActualType(lclTyp) == TYP_INT))
{
assert(!tiVerificationNeeded); // We should have thrown the VerificationException before.
- op1 = gtNewCastNode(TYP_INT, op1, TYP_INT);
+ op1 = gtNewCastNode(TYP_INT, op1, false, TYP_INT);
}
#endif // _TARGET_64BIT_
if ((op1->TypeGet() != op2->TypeGet()) && varTypeIsFloating(op1->gtType) &&
varTypeIsFloating(op2->gtType))
{
- op1 = gtNewCastNode(op2->TypeGet(), op1, op2->TypeGet());
+ op1 = gtNewCastNode(op2->TypeGet(), op1, false, op2->TypeGet());
}
#endif // !FEATURE_X87_DOUBLES
if (op1->TypeGet() != type)
{
// We insert a cast of op1 to 'type'
- op1 = gtNewCastNode(type, op1, type);
+ op1 = gtNewCastNode(type, op1, false, type);
}
if (op2->TypeGet() != type)
{
// We insert a cast of op2 to 'type'
- op2 = gtNewCastNode(type, op2, type);
+ op2 = gtNewCastNode(type, op2, false, type);
}
}
#endif // !FEATURE_X87_DOUBLES
#ifdef _TARGET_64BIT_
if (varTypeIsI(op1->TypeGet()) && (genActualType(op2->TypeGet()) == TYP_INT))
{
- op2 = gtNewCastNode(TYP_I_IMPL, op2, (var_types)(uns ? TYP_U_IMPL : TYP_I_IMPL));
+ op2 = gtNewCastNode(TYP_I_IMPL, op2, uns, uns ? TYP_U_IMPL : TYP_I_IMPL);
}
else if (varTypeIsI(op2->TypeGet()) && (genActualType(op1->TypeGet()) == TYP_INT))
{
- op1 = gtNewCastNode(TYP_I_IMPL, op1, (var_types)(uns ? TYP_U_IMPL : TYP_I_IMPL));
+ op1 = gtNewCastNode(TYP_I_IMPL, op1, uns, uns ? TYP_U_IMPL : TYP_I_IMPL);
}
#endif // _TARGET_64BIT_
#ifdef _TARGET_64BIT_
if ((op1->TypeGet() == TYP_I_IMPL) && (genActualType(op2->TypeGet()) == TYP_INT))
{
- op2 = gtNewCastNode(TYP_I_IMPL, op2, (var_types)(uns ? TYP_U_IMPL : TYP_I_IMPL));
+ op2 = gtNewCastNode(TYP_I_IMPL, op2, uns, uns ? TYP_U_IMPL : TYP_I_IMPL);
}
else if ((op2->TypeGet() == TYP_I_IMPL) && (genActualType(op1->TypeGet()) == TYP_INT))
{
- op1 = gtNewCastNode(TYP_I_IMPL, op1, (var_types)(uns ? TYP_U_IMPL : TYP_I_IMPL));
+ op1 = gtNewCastNode(TYP_I_IMPL, op1, uns, uns ? TYP_U_IMPL : TYP_I_IMPL);
}
#endif // _TARGET_64BIT_
if (op1->TypeGet() == TYP_DOUBLE)
{
// We insert a cast of op2 to TYP_DOUBLE
- op2 = gtNewCastNode(TYP_DOUBLE, op2, TYP_DOUBLE);
+ op2 = gtNewCastNode(TYP_DOUBLE, op2, false, TYP_DOUBLE);
}
else if (op2->TypeGet() == TYP_DOUBLE)
{
// We insert a cast of op1 to TYP_DOUBLE
- op1 = gtNewCastNode(TYP_DOUBLE, op1, TYP_DOUBLE);
+ op1 = gtNewCastNode(TYP_DOUBLE, op1, false, TYP_DOUBLE);
}
}
}
#if SMALL_TREE_NODES
if (callNode)
{
- op1 = gtNewCastNodeL(type, op1, lclTyp);
+ op1 = gtNewCastNodeL(type, op1, uns, lclTyp);
}
else
#endif // SMALL_TREE_NODES
{
- op1 = gtNewCastNode(type, op1, lclTyp);
+ op1 = gtNewCastNode(type, op1, uns, lclTyp);
}
if (ovfl)
{
op1->gtFlags |= (GTF_OVERFLOW | GTF_EXCEPT);
}
- if (uns)
- {
- op1->gtFlags |= GTF_UNSIGNED;
- }
impPushOnStack(op1, tiRetVal);
break;
if (varTypeIsI(op2->gtType) && (genActualType(lclTyp) == TYP_INT))
{
assert(!tiVerificationNeeded); // We should have thrown the VerificationException before.
- op2 = gtNewCastNode(TYP_INT, op2, TYP_INT);
+ op2 = gtNewCastNode(TYP_INT, op2, false, TYP_INT);
}
// Allow an upcast of op2 from a 32-bit Int into TYP_I_IMPL for x86 JIT compatiblity
//
if (varTypeIsI(lclTyp) && (genActualType(op2->gtType) == TYP_INT))
{
assert(!tiVerificationNeeded); // We should have thrown the VerificationException before.
- op2 = gtNewCastNode(TYP_I_IMPL, op2, TYP_I_IMPL);
+ op2 = gtNewCastNode(TYP_I_IMPL, op2, false, TYP_I_IMPL);
}
}
#endif // _TARGET_64BIT_
if (genActualType(op1->gtType) == TYP_INT)
{
assert(!tiVerificationNeeded); // We should have thrown the VerificationException before.
- op1 = gtNewCastNode(TYP_I_IMPL, op1, TYP_I_IMPL);
+ op1 = gtNewCastNode(TYP_I_IMPL, op1, false, TYP_I_IMPL);
}
#endif
if ((op1->TypeGet() != op2->TypeGet()) && op2->OperIsConst() && varTypeIsIntOrI(op2->TypeGet()) &&
varTypeIsLong(op1->TypeGet()))
{
- op2 = gtNewCastNode(op1->TypeGet(), op2, op1->TypeGet());
+ op2 = gtNewCastNode(op1->TypeGet(), op2, false, op1->TypeGet());
}
#endif
//
if (varTypeIsI(op2->gtType) && (genActualType(lclTyp) == TYP_INT))
{
- op2 = gtNewCastNode(TYP_INT, op2, TYP_INT);
+ op2 = gtNewCastNode(TYP_INT, op2, false, TYP_INT);
}
// Allow an upcast of op2 from a 32-bit Int into TYP_I_IMPL for x86 JIT compatiblity
//
if (varTypeIsI(lclTyp) && (genActualType(op2->gtType) == TYP_INT))
{
- op2 = gtNewCastNode(TYP_I_IMPL, op2, TYP_I_IMPL);
+ op2 = gtNewCastNode(TYP_I_IMPL, op2, false, TYP_I_IMPL);
}
}
#endif
if ((op1->TypeGet() != op2->TypeGet()) && varTypeIsFloating(op1->gtType) &&
varTypeIsFloating(op2->gtType))
{
- op2 = gtNewCastNode(op1->TypeGet(), op2, op1->TypeGet());
+ op2 = gtNewCastNode(op1->TypeGet(), op2, false, op1->TypeGet());
}
#endif // !FEATURE_X87_DOUBLES
fgCastNeeded(op2, fncRealRetType))
{
// Small-typed return values are normalized by the callee
- op2 = gtNewCastNode(TYP_INT, op2, fncRealRetType);
+ op2 = gtNewCastNode(TYP_INT, op2, false, fncRealRetType);
}
}
{
// Spill clique has decided this should be "native int", but this block only pushes an "int".
// Insert a sign-extension to "native int" so we match the clique.
- verCurrentState.esStack[level].val = gtNewCastNode(TYP_I_IMPL, tree, TYP_I_IMPL);
+ verCurrentState.esStack[level].val = gtNewCastNode(TYP_I_IMPL, tree, false, TYP_I_IMPL);
}
// Consider the case where one branch left a 'byref' on the stack and the other leaves
{
// Spill clique has decided this should be "byref", but this block only pushes an "int".
// Insert a sign-extension to "native int" so we match the clique size.
- verCurrentState.esStack[level].val = gtNewCastNode(TYP_I_IMPL, tree, TYP_I_IMPL);
+ verCurrentState.esStack[level].val = gtNewCastNode(TYP_I_IMPL, tree, false, TYP_I_IMPL);
}
}
#endif // _TARGET_64BIT_
{
// Spill clique has decided this should be "double", but this block only pushes a "float".
// Insert a cast to "double" so we match the clique.
- verCurrentState.esStack[level].val = gtNewCastNode(TYP_DOUBLE, tree, TYP_DOUBLE);
+ verCurrentState.esStack[level].val = gtNewCastNode(TYP_DOUBLE, tree, false, TYP_DOUBLE);
}
#endif // FEATURE_X87_DOUBLES
continue;
}
- inlArgNode = inlArgInfo[i].argNode = gtNewCastNode(TYP_INT, inlArgNode, sigType);
+ inlArgNode = inlArgInfo[i].argNode = gtNewCastNode(TYP_INT, inlArgNode, false, sigType);
inlArgInfo[i].argIsLclVar = false;
else if (genTypeSize(genActualType(inlArgNode->gtType)) < genTypeSize(sigType))
{
// This should only happen for int -> native int widening
- inlArgNode = inlArgInfo[i].argNode = gtNewCastNode(genActualType(sigType), inlArgNode, sigType);
+ inlArgNode = inlArgInfo[i].argNode =
+ gtNewCastNode(genActualType(sigType), inlArgNode, false, sigType);
inlArgInfo[i].argIsLclVar = false;
bool Compiler::IsIntrinsicImplementedByUserCall(CorInfoIntrinsics intrinsicId)
{
- // Currently, if an math intrisic is not implemented by target-specific
- // intructions, it will be implemented by a System.Math call. In the
- // future, if we turn to implementing some of them with helper callers,
+ // Currently, if a math intrinsic is not implemented by target-specific
+ // instructions, it will be implemented by a System.Math call. In the
+ // future, if we turn to implementing some of them with helper calls,
// this predicate needs to be revisited.
return !IsTargetIntrinsic(intrinsicId);
}