Remove relocations for vtable chunks (#17147)
[platform/upstream/coreclr.git] / src / jit / importer.cpp
index c23f17c..80b1e87 100644 (file)
@@ -901,11 +901,11 @@ GenTreeArgList* Compiler::impPopList(unsigned count, CORINFO_SIG_INFO* sig, GenT
 
             if (corType == CORINFO_TYPE_DOUBLE && args->Current()->TypeGet() == TYP_FLOAT)
             {
-                args->Current() = gtNewCastNode(TYP_DOUBLE, args->Current(), TYP_DOUBLE);
+                args->Current() = gtNewCastNode(TYP_DOUBLE, args->Current(), false, TYP_DOUBLE);
             }
             else if (corType == CORINFO_TYPE_FLOAT && args->Current()->TypeGet() == TYP_DOUBLE)
             {
-                args->Current() = gtNewCastNode(TYP_FLOAT, args->Current(), TYP_FLOAT);
+                args->Current() = gtNewCastNode(TYP_FLOAT, args->Current(), false, TYP_FLOAT);
             }
 
             // insert any widening or narrowing casts for backwards compatibility
@@ -1794,7 +1794,7 @@ GenTree* Compiler::impLookupToTree(CORINFO_RESOLVED_TOKEN* pResolvedToken,
 
         CORINFO_GENERIC_HANDLE handle       = nullptr;
         void*                  pIndirection = nullptr;
-        assert(pLookup->constLookup.accessType != IAT_PPVALUE);
+        assert(pLookup->constLookup.accessType != IAT_PPVALUE && pLookup->constLookup.accessType != IAT_RELPVALUE);
 
         if (pLookup->constLookup.accessType == IAT_VALUE)
         {
@@ -1829,7 +1829,7 @@ GenTree* Compiler::impReadyToRunLookupToTree(CORINFO_CONST_LOOKUP* pLookup,
 {
     CORINFO_GENERIC_HANDLE handle       = nullptr;
     void*                  pIndirection = nullptr;
-    assert(pLookup->accessType != IAT_PPVALUE);
+    assert(pLookup->accessType != IAT_PPVALUE && pLookup->accessType != IAT_RELPVALUE);
 
     if (pLookup->accessType == IAT_VALUE)
     {
@@ -1879,7 +1879,8 @@ GenTree* Compiler::impMethodPointer(CORINFO_RESOLVED_TOKEN* pResolvedToken, CORI
             }
             else
             {
-                op1->gtFptrVal.gtEntryPoint.addr = nullptr;
+                op1->gtFptrVal.gtEntryPoint.addr       = nullptr;
+                op1->gtFptrVal.gtEntryPoint.accessType = IAT_VALUE;
             }
 #endif
             break;
@@ -2894,12 +2895,12 @@ GenTree* Compiler::impImplicitIorI4Cast(GenTree* tree, var_types dstTyp)
         else if (varTypeIsI(wantedType) && (currType == TYP_INT))
         {
             // Note that this allows TYP_INT to be cast to a TYP_I_IMPL when wantedType is a TYP_BYREF or TYP_REF
-            tree = gtNewCastNode(TYP_I_IMPL, tree, TYP_I_IMPL);
+            tree = gtNewCastNode(TYP_I_IMPL, tree, false, TYP_I_IMPL);
         }
         else if ((wantedType == TYP_INT) && varTypeIsI(currType))
         {
             // Note that this allows TYP_BYREF or TYP_REF to be cast to a TYP_INT
-            tree = gtNewCastNode(TYP_INT, tree, TYP_INT);
+            tree = gtNewCastNode(TYP_INT, tree, false, TYP_INT);
         }
 #endif // _TARGET_64BIT_
     }
@@ -2918,7 +2919,7 @@ GenTree* Compiler::impImplicitR4orR8Cast(GenTree* tree, var_types dstTyp)
 #ifndef LEGACY_BACKEND
     if (varTypeIsFloating(tree) && varTypeIsFloating(dstTyp) && (dstTyp != tree->gtType))
     {
-        tree = gtNewCastNode(dstTyp, tree, dstTyp);
+        tree = gtNewCastNode(dstTyp, tree, false, dstTyp);
     }
 #endif // !LEGACY_BACKEND
 
@@ -4010,10 +4011,11 @@ GenTree* Compiler::impMathIntrinsic(CORINFO_METHOD_HANDLE method,
                 noway_assert(varTypeIsFloating(op1));
 
 #else // FEATURE_X87_DOUBLES
+                assert(varTypeIsFloating(op1));
 
                 if (op1->TypeGet() != callType)
                 {
-                    op1 = gtNewCastNode(callType, op1, callType);
+                    op1 = gtNewCastNode(callType, op1, false, callType);
                 }
 
 #endif // FEATURE_X87_DOUBLES
@@ -4034,14 +4036,16 @@ GenTree* Compiler::impMathIntrinsic(CORINFO_METHOD_HANDLE method,
                 noway_assert(varTypeIsFloating(op1));
 
 #else // FEATURE_X87_DOUBLES
+                assert(varTypeIsFloating(op1));
+                assert(varTypeIsFloating(op2));
 
                 if (op2->TypeGet() != callType)
                 {
-                    op2 = gtNewCastNode(callType, op2, callType);
+                    op2 = gtNewCastNode(callType, op2, false, callType);
                 }
                 if (op1->TypeGet() != callType)
                 {
-                    op1 = gtNewCastNode(callType, op1, callType);
+                    op1 = gtNewCastNode(callType, op1, false, callType);
                 }
 
 #endif // FEATURE_X87_DOUBLES
@@ -5715,7 +5719,7 @@ void Compiler::impImportAndPushBox(CORINFO_RESOLVED_TOKEN* pResolvedToken)
             {
                 assert((varTypeIsFloating(srcTyp) && varTypeIsFloating(dstTyp)) ||
                        (varTypeIsIntegral(srcTyp) && varTypeIsIntegral(dstTyp)));
-                exprToBox = gtNewCastNode(dstTyp, exprToBox, dstTyp);
+                exprToBox = gtNewCastNode(dstTyp, exprToBox, false, dstTyp);
             }
             op1 = gtNewAssignNode(gtNewOperNode(GT_IND, lclTyp, op1), exprToBox);
         }
@@ -7173,7 +7177,8 @@ var_types Compiler::impImportCall(OPCODE                  opcode,
                     }
                     else
                     {
-                        call->gtIntrinsic.gtEntryPoint.addr = nullptr;
+                        call->gtIntrinsic.gtEntryPoint.addr       = nullptr;
+                        call->gtIntrinsic.gtEntryPoint.accessType = IAT_VALUE;
                     }
                 }
 #endif
@@ -7294,7 +7299,8 @@ var_types Compiler::impImportCall(OPCODE                  opcode,
                     call = gtNewCallNode(CT_USER_FUNC, callInfo->hMethod, callRetTyp, nullptr, ilOffset);
                     call->gtCall.gtStubCallStubAddr = callInfo->stubLookup.constLookup.addr;
                     call->gtFlags |= GTF_CALL_VIRT_STUB;
-                    assert(callInfo->stubLookup.constLookup.accessType != IAT_PPVALUE);
+                    assert(callInfo->stubLookup.constLookup.accessType != IAT_PPVALUE &&
+                           callInfo->stubLookup.constLookup.accessType != IAT_RELPVALUE);
                     if (callInfo->stubLookup.constLookup.accessType == IAT_PVALUE)
                     {
                         call->gtCall.gtCallMoreFlags |= GTF_CALL_M_VIRTSTUB_REL_INDIRECT;
@@ -8344,7 +8350,7 @@ DONE_CALL:
 
             if (checkForSmallType && varTypeIsIntegral(callRetTyp) && genTypeSize(callRetTyp) < genTypeSize(TYP_INT))
             {
-                call = gtNewCastNode(genActualType(callRetTyp), call, callRetTyp);
+                call = gtNewCastNode(genActualType(callRetTyp), call, false, callRetTyp);
             }
         }
 
@@ -9684,7 +9690,7 @@ var_types Compiler::impGetByRefResultType(genTreeOps oper, bool fUnsigned, GenTr
             if (genActualType(op1->TypeGet()) != TYP_I_IMPL)
             {
                 // insert an explicit upcast
-                op1 = *pOp1 = gtNewCastNode(TYP_I_IMPL, op1, (var_types)(fUnsigned ? TYP_U_IMPL : TYP_I_IMPL));
+                op1 = *pOp1 = gtNewCastNode(TYP_I_IMPL, op1, fUnsigned, fUnsigned ? TYP_U_IMPL : TYP_I_IMPL);
             }
 #endif // _TARGET_64BIT_
 
@@ -9699,7 +9705,7 @@ var_types Compiler::impGetByRefResultType(genTreeOps oper, bool fUnsigned, GenTr
             if ((genActualType(op2->TypeGet()) != TYP_I_IMPL))
             {
                 // insert an explicit upcast
-                op2 = *pOp2 = gtNewCastNode(TYP_I_IMPL, op2, (var_types)(fUnsigned ? TYP_U_IMPL : TYP_I_IMPL));
+                op2 = *pOp2 = gtNewCastNode(TYP_I_IMPL, op2, fUnsigned, fUnsigned ? TYP_U_IMPL : TYP_I_IMPL);
             }
 #endif // _TARGET_64BIT_
 
@@ -9723,13 +9729,13 @@ var_types Compiler::impGetByRefResultType(genTreeOps oper, bool fUnsigned, GenTr
             if (genActualType(op1->TypeGet()) != TYP_I_IMPL)
             {
                 // insert an explicit upcast
-                op1 = *pOp1 = gtNewCastNode(TYP_I_IMPL, op1, (var_types)(fUnsigned ? TYP_U_IMPL : TYP_I_IMPL));
+                op1 = *pOp1 = gtNewCastNode(TYP_I_IMPL, op1, fUnsigned, fUnsigned ? TYP_U_IMPL : TYP_I_IMPL);
             }
         }
         else if (genActualType(op2->TypeGet()) != TYP_I_IMPL)
         {
             // insert an explicit upcast
-            op2 = *pOp2 = gtNewCastNode(TYP_I_IMPL, op2, (var_types)(fUnsigned ? TYP_U_IMPL : TYP_I_IMPL));
+            op2 = *pOp2 = gtNewCastNode(TYP_I_IMPL, op2, fUnsigned, fUnsigned ? TYP_U_IMPL : TYP_I_IMPL);
         }
 #endif // _TARGET_64BIT_
 
@@ -9747,12 +9753,12 @@ var_types Compiler::impGetByRefResultType(genTreeOps oper, bool fUnsigned, GenTr
         if (genActualType(op1->TypeGet()) != TYP_I_IMPL)
         {
             // insert an explicit upcast
-            op1 = *pOp1 = gtNewCastNode(TYP_I_IMPL, op1, (var_types)(fUnsigned ? TYP_U_IMPL : TYP_I_IMPL));
+            op1 = *pOp1 = gtNewCastNode(TYP_I_IMPL, op1, fUnsigned, fUnsigned ? TYP_U_IMPL : TYP_I_IMPL);
         }
         else if (genActualType(op2->TypeGet()) != TYP_I_IMPL)
         {
             // insert an explicit upcast
-            op2 = *pOp2 = gtNewCastNode(TYP_I_IMPL, op2, (var_types)(fUnsigned ? TYP_U_IMPL : TYP_I_IMPL));
+            op2 = *pOp2 = gtNewCastNode(TYP_I_IMPL, op2, fUnsigned, fUnsigned ? TYP_U_IMPL : TYP_I_IMPL);
         }
 
         type = TYP_I_IMPL;
@@ -10708,7 +10714,7 @@ void Compiler::impImportBlockCode(BasicBlock* block)
                 if (varTypeIsI(op1->TypeGet()) && (genActualType(lclTyp) == TYP_INT))
                 {
                     assert(!tiVerificationNeeded); // We should have thrown the VerificationException before.
-                    op1 = gtNewCastNode(TYP_INT, op1, TYP_INT);
+                    op1 = gtNewCastNode(TYP_INT, op1, false, TYP_INT);
                 }
 #endif // _TARGET_64BIT_
 
@@ -10799,7 +10805,7 @@ void Compiler::impImportBlockCode(BasicBlock* block)
                 if ((op1->TypeGet() != op2->TypeGet()) && varTypeIsFloating(op1->gtType) &&
                     varTypeIsFloating(op2->gtType))
                 {
-                    op1 = gtNewCastNode(op2->TypeGet(), op1, op2->TypeGet());
+                    op1 = gtNewCastNode(op2->TypeGet(), op1, false, op2->TypeGet());
                 }
 #endif // !FEATURE_X87_DOUBLES
 
@@ -11736,12 +11742,12 @@ void Compiler::impImportBlockCode(BasicBlock* block)
                     if (op1->TypeGet() != type)
                     {
                         // We insert a cast of op1 to 'type'
-                        op1 = gtNewCastNode(type, op1, type);
+                        op1 = gtNewCastNode(type, op1, false, type);
                     }
                     if (op2->TypeGet() != type)
                     {
                         // We insert a cast of op2 to 'type'
-                        op2 = gtNewCastNode(type, op2, type);
+                        op2 = gtNewCastNode(type, op2, false, type);
                     }
                 }
 #endif // !FEATURE_X87_DOUBLES
@@ -12034,11 +12040,11 @@ void Compiler::impImportBlockCode(BasicBlock* block)
 #ifdef _TARGET_64BIT_
                 if (varTypeIsI(op1->TypeGet()) && (genActualType(op2->TypeGet()) == TYP_INT))
                 {
-                    op2 = gtNewCastNode(TYP_I_IMPL, op2, (var_types)(uns ? TYP_U_IMPL : TYP_I_IMPL));
+                    op2 = gtNewCastNode(TYP_I_IMPL, op2, uns, uns ? TYP_U_IMPL : TYP_I_IMPL);
                 }
                 else if (varTypeIsI(op2->TypeGet()) && (genActualType(op1->TypeGet()) == TYP_INT))
                 {
-                    op1 = gtNewCastNode(TYP_I_IMPL, op1, (var_types)(uns ? TYP_U_IMPL : TYP_I_IMPL));
+                    op1 = gtNewCastNode(TYP_I_IMPL, op1, uns, uns ? TYP_U_IMPL : TYP_I_IMPL);
                 }
 #endif // _TARGET_64BIT_
 
@@ -12134,11 +12140,11 @@ void Compiler::impImportBlockCode(BasicBlock* block)
 #ifdef _TARGET_64BIT_
                 if ((op1->TypeGet() == TYP_I_IMPL) && (genActualType(op2->TypeGet()) == TYP_INT))
                 {
-                    op2 = gtNewCastNode(TYP_I_IMPL, op2, (var_types)(uns ? TYP_U_IMPL : TYP_I_IMPL));
+                    op2 = gtNewCastNode(TYP_I_IMPL, op2, uns, uns ? TYP_U_IMPL : TYP_I_IMPL);
                 }
                 else if ((op2->TypeGet() == TYP_I_IMPL) && (genActualType(op1->TypeGet()) == TYP_INT))
                 {
-                    op1 = gtNewCastNode(TYP_I_IMPL, op1, (var_types)(uns ? TYP_U_IMPL : TYP_I_IMPL));
+                    op1 = gtNewCastNode(TYP_I_IMPL, op1, uns, uns ? TYP_U_IMPL : TYP_I_IMPL);
                 }
 #endif // _TARGET_64BIT_
 
@@ -12187,12 +12193,12 @@ void Compiler::impImportBlockCode(BasicBlock* block)
                         if (op1->TypeGet() == TYP_DOUBLE)
                         {
                             // We insert a cast of op2 to TYP_DOUBLE
-                            op2 = gtNewCastNode(TYP_DOUBLE, op2, TYP_DOUBLE);
+                            op2 = gtNewCastNode(TYP_DOUBLE, op2, false, TYP_DOUBLE);
                         }
                         else if (op2->TypeGet() == TYP_DOUBLE)
                         {
                             // We insert a cast of op1 to TYP_DOUBLE
-                            op1 = gtNewCastNode(TYP_DOUBLE, op1, TYP_DOUBLE);
+                            op1 = gtNewCastNode(TYP_DOUBLE, op1, false, TYP_DOUBLE);
                         }
                     }
                 }
@@ -12483,22 +12489,18 @@ void Compiler::impImportBlockCode(BasicBlock* block)
 #if SMALL_TREE_NODES
                 if (callNode)
                 {
-                    op1 = gtNewCastNodeL(type, op1, lclTyp);
+                    op1 = gtNewCastNodeL(type, op1, uns, lclTyp);
                 }
                 else
 #endif // SMALL_TREE_NODES
                 {
-                    op1 = gtNewCastNode(type, op1, lclTyp);
+                    op1 = gtNewCastNode(type, op1, uns, lclTyp);
                 }
 
                 if (ovfl)
                 {
                     op1->gtFlags |= (GTF_OVERFLOW | GTF_EXCEPT);
                 }
-                if (uns)
-                {
-                    op1->gtFlags |= GTF_UNSIGNED;
-                }
                 impPushOnStack(op1, tiRetVal);
                 break;
 
@@ -12681,14 +12683,14 @@ void Compiler::impImportBlockCode(BasicBlock* block)
                     if (varTypeIsI(op2->gtType) && (genActualType(lclTyp) == TYP_INT))
                     {
                         assert(!tiVerificationNeeded); // We should have thrown the VerificationException before.
-                        op2 = gtNewCastNode(TYP_INT, op2, TYP_INT);
+                        op2 = gtNewCastNode(TYP_INT, op2, false, TYP_INT);
                     }
                     // Allow an upcast of op2 from a 32-bit Int into TYP_I_IMPL for x86 JIT compatiblity
                     //
                     if (varTypeIsI(lclTyp) && (genActualType(op2->gtType) == TYP_INT))
                     {
                         assert(!tiVerificationNeeded); // We should have thrown the VerificationException before.
-                        op2 = gtNewCastNode(TYP_I_IMPL, op2, TYP_I_IMPL);
+                        op2 = gtNewCastNode(TYP_I_IMPL, op2, false, TYP_I_IMPL);
                     }
                 }
 #endif // _TARGET_64BIT_
@@ -12812,7 +12814,7 @@ void Compiler::impImportBlockCode(BasicBlock* block)
                 if (genActualType(op1->gtType) == TYP_INT)
                 {
                     assert(!tiVerificationNeeded); // We should have thrown the VerificationException before.
-                    op1 = gtNewCastNode(TYP_I_IMPL, op1, TYP_I_IMPL);
+                    op1 = gtNewCastNode(TYP_I_IMPL, op1, false, TYP_I_IMPL);
                 }
 #endif
 
@@ -14147,7 +14149,7 @@ void Compiler::impImportBlockCode(BasicBlock* block)
                     if ((op1->TypeGet() != op2->TypeGet()) && op2->OperIsConst() && varTypeIsIntOrI(op2->TypeGet()) &&
                         varTypeIsLong(op1->TypeGet()))
                     {
-                        op2 = gtNewCastNode(op1->TypeGet(), op2, op1->TypeGet());
+                        op2 = gtNewCastNode(op1->TypeGet(), op2, false, op1->TypeGet());
                     }
 #endif
 
@@ -14163,13 +14165,13 @@ void Compiler::impImportBlockCode(BasicBlock* block)
                         //
                         if (varTypeIsI(op2->gtType) && (genActualType(lclTyp) == TYP_INT))
                         {
-                            op2 = gtNewCastNode(TYP_INT, op2, TYP_INT);
+                            op2 = gtNewCastNode(TYP_INT, op2, false, TYP_INT);
                         }
                         // Allow an upcast of op2 from a 32-bit Int into TYP_I_IMPL for x86 JIT compatiblity
                         //
                         if (varTypeIsI(lclTyp) && (genActualType(op2->gtType) == TYP_INT))
                         {
-                            op2 = gtNewCastNode(TYP_I_IMPL, op2, TYP_I_IMPL);
+                            op2 = gtNewCastNode(TYP_I_IMPL, op2, false, TYP_I_IMPL);
                         }
                     }
 #endif
@@ -14181,7 +14183,7 @@ void Compiler::impImportBlockCode(BasicBlock* block)
                     if ((op1->TypeGet() != op2->TypeGet()) && varTypeIsFloating(op1->gtType) &&
                         varTypeIsFloating(op2->gtType))
                     {
-                        op2 = gtNewCastNode(op1->TypeGet(), op2, op1->TypeGet());
+                        op2 = gtNewCastNode(op1->TypeGet(), op2, false, op1->TypeGet());
                     }
 #endif // !FEATURE_X87_DOUBLES
 
@@ -15855,7 +15857,7 @@ bool Compiler::impReturnInstruction(BasicBlock* block, int prefixFlags, OPCODE&
                         fgCastNeeded(op2, fncRealRetType))
                     {
                         // Small-typed return values are normalized by the callee
-                        op2 = gtNewCastNode(TYP_INT, op2, fncRealRetType);
+                        op2 = gtNewCastNode(TYP_INT, op2, false, fncRealRetType);
                     }
                 }
 
@@ -16644,7 +16646,7 @@ SPILLSTACK:
             {
                 // Spill clique has decided this should be "native int", but this block only pushes an "int".
                 // Insert a sign-extension to "native int" so we match the clique.
-                verCurrentState.esStack[level].val = gtNewCastNode(TYP_I_IMPL, tree, TYP_I_IMPL);
+                verCurrentState.esStack[level].val = gtNewCastNode(TYP_I_IMPL, tree, false, TYP_I_IMPL);
             }
 
             // Consider the case where one branch left a 'byref' on the stack and the other leaves
@@ -16668,7 +16670,7 @@ SPILLSTACK:
                 {
                     // Spill clique has decided this should be "byref", but this block only pushes an "int".
                     // Insert a sign-extension to "native int" so we match the clique size.
-                    verCurrentState.esStack[level].val = gtNewCastNode(TYP_I_IMPL, tree, TYP_I_IMPL);
+                    verCurrentState.esStack[level].val = gtNewCastNode(TYP_I_IMPL, tree, false, TYP_I_IMPL);
                 }
             }
 #endif // _TARGET_64BIT_
@@ -16697,7 +16699,7 @@ SPILLSTACK:
             {
                 // Spill clique has decided this should be "double", but this block only pushes a "float".
                 // Insert a cast to "double" so we match the clique.
-                verCurrentState.esStack[level].val = gtNewCastNode(TYP_DOUBLE, tree, TYP_DOUBLE);
+                verCurrentState.esStack[level].val = gtNewCastNode(TYP_DOUBLE, tree, false, TYP_DOUBLE);
             }
 
 #endif // FEATURE_X87_DOUBLES
@@ -18418,7 +18420,7 @@ void Compiler::impInlineInitVars(InlineInfo* pInlineInfo)
                         continue;
                     }
 
-                    inlArgNode = inlArgInfo[i].argNode = gtNewCastNode(TYP_INT, inlArgNode, sigType);
+                    inlArgNode = inlArgInfo[i].argNode = gtNewCastNode(TYP_INT, inlArgNode, false, sigType);
 
                     inlArgInfo[i].argIsLclVar = false;
 
@@ -18435,7 +18437,8 @@ void Compiler::impInlineInitVars(InlineInfo* pInlineInfo)
                 else if (genTypeSize(genActualType(inlArgNode->gtType)) < genTypeSize(sigType))
                 {
                     // This should only happen for int -> native int widening
-                    inlArgNode = inlArgInfo[i].argNode = gtNewCastNode(genActualType(sigType), inlArgNode, sigType);
+                    inlArgNode = inlArgInfo[i].argNode =
+                        gtNewCastNode(genActualType(sigType), inlArgNode, false, sigType);
 
                     inlArgInfo[i].argIsLclVar = false;
 
@@ -19219,9 +19222,9 @@ bool Compiler::IsTargetIntrinsic(CorInfoIntrinsics intrinsicId)
 
 bool Compiler::IsIntrinsicImplementedByUserCall(CorInfoIntrinsics intrinsicId)
 {
-    // Currently, if an math intrisic is not implemented by target-specific
-    // intructions, it will be implemented by a System.Math call. In the
-    // future, if we turn to implementing some of them with helper callers,
+    // Currently, if a math intrinsic is not implemented by target-specific
+    // instructions, it will be implemented by a System.Math call. In the
+    // future, if we turn to implementing some of them with helper calls,
     // this predicate needs to be revisited.
     return !IsTargetIntrinsic(intrinsicId);
 }