Disable TC for UnmanagedCallersOnly methods. (#46550)
authorJeremy Koritzinsky <jekoritz@microsoft.com>
Wed, 6 Jan 2021 18:51:48 +0000 (10:51 -0800)
committerGitHub <noreply@github.com>
Wed, 6 Jan 2021 18:51:48 +0000 (10:51 -0800)
src/coreclr/vm/jitinterface.cpp
src/coreclr/vm/method.cpp
src/coreclr/vm/method.hpp
src/coreclr/vm/prestub.cpp

index 10a1d5c..c6529e0 100644 (file)
@@ -13024,22 +13024,6 @@ PCODE UnsafeJitFunction(PrepareCodeConfig* config,
 
     flags = GetCompileFlags(ftn, flags, &methodInfo);
 
-#ifdef FEATURE_TIERED_COMPILATION
-    // Clearing all tier flags and mark as optimized if the reverse P/Invoke
-    // flag is used and the function is eligible.
-    if (flags.IsSet(CORJIT_FLAGS::CORJIT_FLAG_REVERSE_PINVOKE)
-        && ftn->IsEligibleForTieredCompilation())
-    {
-        _ASSERTE(config->GetCallerGCMode() != CallerGCMode::Coop);
-
-        // Clear all possible states.
-        flags.Clear(CORJIT_FLAGS::CORJIT_FLAG_TIER0);
-        flags.Clear(CORJIT_FLAGS::CORJIT_FLAG_TIER1);
-
-        config->SetJitSwitchedToOptimized();
-    }
-#endif // FEATURE_TIERED_COMPILATION
-
 #ifdef _DEBUG
     if (!flags.IsSet(CORJIT_FLAGS::CORJIT_FLAG_SKIP_VERIFICATION))
     {
index 1daa371..20191c2 100644 (file)
@@ -139,10 +139,10 @@ SIZE_T MethodDesc::SizeOf()
 {
     LIMITED_METHOD_DAC_CONTRACT;
 
-    SIZE_T size = s_ClassificationSizeTable[m_wFlags & 
-        (mdcClassification 
-        | mdcHasNonVtableSlot 
-        | mdcMethodImpl 
+    SIZE_T size = s_ClassificationSizeTable[m_wFlags &
+        (mdcClassification
+        | mdcHasNonVtableSlot
+        | mdcMethodImpl
 #ifdef FEATURE_COMINTEROP
         | mdcHasComPlusCallInfo
 #endif
@@ -151,7 +151,7 @@ SIZE_T MethodDesc::SizeOf()
 #ifdef FEATURE_PREJIT
     if (HasNativeCodeSlot())
     {
-        size += (*dac_cast<PTR_TADDR>(GetAddrOfNativeCodeSlot()) & FIXUP_LIST_MASK) ? 
+        size += (*dac_cast<PTR_TADDR>(GetAddrOfNativeCodeSlot()) & FIXUP_LIST_MASK) ?
             sizeof(FixupListSlot) : 0;
     }
 #endif
index ee1e685..f34b9f1 100644 (file)
@@ -2023,16 +2023,23 @@ public:
 
 private:
     PCODE PrepareILBasedCode(PrepareCodeConfig* pConfig);
+#ifdef FEATURE_TIERED_COMPILATION
+    PCODE GetPrecompiledCode(PrepareCodeConfig* pConfig, bool shouldCountCalls);
+    PCODE JitCompileCode(PrepareCodeConfig* pConfig, bool shouldCountCalls);
+    PCODE JitCompileCodeLockedEventWrapper(PrepareCodeConfig* pConfig, JitListLockEntry* pEntry, bool shouldCountCalls);
+    PCODE JitCompileCodeLocked(PrepareCodeConfig* pConfig, JitListLockEntry* pLockEntry, bool shouldCountCalls, ULONG* pSizeOfCode, CORJIT_FLAGS* pFlags);
+#else
     PCODE GetPrecompiledCode(PrepareCodeConfig* pConfig);
+    PCODE JitCompileCode(PrepareCodeConfig* pConfig);
+    PCODE JitCompileCodeLockedEventWrapper(PrepareCodeConfig* pConfig, JitListLockEntry* pEntry);
+    PCODE JitCompileCodeLocked(PrepareCodeConfig* pConfig, JitListLockEntry* pLockEntry, ULONG* pSizeOfCode, CORJIT_FLAGS* pFlags);
+#endif
     PCODE GetPrecompiledNgenCode(PrepareCodeConfig* pConfig);
     PCODE GetPrecompiledR2RCode(PrepareCodeConfig* pConfig);
     PCODE GetMulticoreJitCode(PrepareCodeConfig* pConfig, bool* pWasTier0Jit);
     COR_ILMETHOD_DECODER* GetAndVerifyILHeader(PrepareCodeConfig* pConfig, COR_ILMETHOD_DECODER* pIlDecoderMemory);
     COR_ILMETHOD_DECODER* GetAndVerifyMetadataILHeader(PrepareCodeConfig* pConfig, COR_ILMETHOD_DECODER* pIlDecoderMemory);
     COR_ILMETHOD_DECODER* GetAndVerifyNoMetadataILHeader();
-    PCODE JitCompileCode(PrepareCodeConfig* pConfig);
-    PCODE JitCompileCodeLockedEventWrapper(PrepareCodeConfig* pConfig, JitListLockEntry* pEntry);
-    PCODE JitCompileCodeLocked(PrepareCodeConfig* pConfig, JitListLockEntry* pLockEntry, ULONG* pSizeOfCode, CORJIT_FLAGS* pFlags);
 #endif // DACCESS_COMPILE
 
 #ifdef HAVE_GCCOVER
@@ -3611,7 +3618,7 @@ public:
 #ifdef FEATURE_PREJIT
         if (HasNativeCodeSlot())
         {
-            size += (*dac_cast<PTR_TADDR>(GetAddrOfNativeCodeSlot()) & FIXUP_LIST_MASK) ? 
+            size += (*dac_cast<PTR_TADDR>(GetAddrOfNativeCodeSlot()) & FIXUP_LIST_MASK) ?
                 sizeof(FixupListSlot) : 0;
         }
 #endif
index 7bc994b..52875ca 100644 (file)
@@ -361,6 +361,19 @@ PCODE MethodDesc::PrepareILBasedCode(PrepareCodeConfig* pConfig)
     STANDARD_VM_CONTRACT;
     PCODE pCode = NULL;
 
+#if defined(FEATURE_TIERED_COMPILATION)
+    bool shouldCountCalls = pConfig->GetMethodDesc()->IsEligibleForTieredCompilation();
+#if !defined(TARGET_X86)
+    if (shouldCountCalls
+        && (pConfig->GetCallerGCMode() == CallerGCMode::Preemptive
+            || (pConfig->GetCallerGCMode() == CallerGCMode::Unknown
+                && HasUnmanagedCallersOnlyAttribute())))
+    {
+        shouldCountCalls = false;
+    }
+#endif
+#endif
+
     if (pConfig->MayUsePrecompiledCode())
     {
 #ifdef FEATURE_READYTORUN
@@ -393,7 +406,13 @@ PCODE MethodDesc::PrepareILBasedCode(PrepareCodeConfig* pConfig)
 #endif // FEATURE_READYTORUN
 
         if (pCode == NULL)
+        {
+#ifdef FEATURE_TIERED_COMPILATION
+            pCode = GetPrecompiledCode(pConfig, shouldCountCalls);
+#else
             pCode = GetPrecompiledCode(pConfig);
+#endif
+        }
 
 #ifdef FEATURE_PERFMAP
         if (pCode != NULL)
@@ -405,7 +424,12 @@ PCODE MethodDesc::PrepareILBasedCode(PrepareCodeConfig* pConfig)
     {
         LOG((LF_CLASSLOADER, LL_INFO1000000,
             "    In PrepareILBasedCode, calling JitCompileCode\n"));
-        pCode = JitCompileCode(pConfig);
+
+#ifdef FEATURE_TIERED_COMPILATION
+            pCode = JitCompileCode(pConfig, shouldCountCalls);
+#else
+            pCode = JitCompileCode(pConfig);
+#endif
     }
     else
     {
@@ -418,7 +442,11 @@ PCODE MethodDesc::PrepareILBasedCode(PrepareCodeConfig* pConfig)
     return pCode;
 }
 
+#ifdef FEATURE_TIERED_COMPILATION
+PCODE MethodDesc::GetPrecompiledCode(PrepareCodeConfig* pConfig, bool shouldCountCalls)
+#else
 PCODE MethodDesc::GetPrecompiledCode(PrepareCodeConfig* pConfig)
+#endif
 {
     STANDARD_VM_CONTRACT;
     PCODE pCode = NULL;
@@ -441,37 +469,13 @@ PCODE MethodDesc::GetPrecompiledCode(PrepareCodeConfig* pConfig)
         {
             LOG_USING_R2R_CODE(this);
 
-#ifdef FEATURE_TIERED_COMPILATION
-            bool shouldTier = pConfig->GetMethodDesc()->IsEligibleForTieredCompilation();
-#if !defined(TARGET_X86)
-            CallerGCMode callerGcMode = pConfig->GetCallerGCMode();
-            // If the method is eligible for tiering but is being
-            // called from a Preemptive GC Mode thread or the method
-            // has the UnmanagedCallersOnlyAttribute then the Tiered Compilation
-            // should be disabled.
-            if (shouldTier
-                && (callerGcMode == CallerGCMode::Preemptive
-                    || (callerGcMode == CallerGCMode::Unknown
-                        && HasUnmanagedCallersOnlyAttribute())))
-            {
-                NativeCodeVersion codeVersion = pConfig->GetCodeVersion();
-                if (codeVersion.IsDefaultVersion())
-                {
-                    pConfig->GetMethodDesc()->GetLoaderAllocator()->GetCallCountingManager()->DisableCallCounting(codeVersion);
-                }
-                codeVersion.SetOptimizationTier(NativeCodeVersion::OptimizationTierOptimized);
-                shouldTier = false;
-            }
-#endif  // !TARGET_X86
-#endif // FEATURE_TIERED_COMPILATION
-
             if (pConfig->SetNativeCode(pCode, &pCode))
             {
 #ifdef FEATURE_CODE_VERSIONING
                 pConfig->SetGeneratedOrLoadedNewCode();
 #endif
 #ifdef FEATURE_TIERED_COMPILATION
-                if (shouldTier)
+                if (shouldCountCalls)
                 {
                     _ASSERTE(pConfig->GetCodeVersion().GetOptimizationTier() == NativeCodeVersion::OptimizationTier0);
                     pConfig->SetShouldCountCalls();
@@ -710,8 +714,11 @@ COR_ILMETHOD_DECODER* MethodDesc::GetAndVerifyILHeader(PrepareCodeConfig* pConfi
 //
 // This function creates a DeadlockAware list of methods being jitted
 // which prevents us from trying to JIT the same method more that once.
-
+#ifdef FEATURE_TIERED_COMPILATION
+PCODE MethodDesc::JitCompileCode(PrepareCodeConfig* pConfig, bool shouldCountCalls)
+#else
 PCODE MethodDesc::JitCompileCode(PrepareCodeConfig* pConfig)
+#endif
 {
     STANDARD_VM_CONTRACT;
 
@@ -804,7 +811,7 @@ PCODE MethodDesc::JitCompileCode(PrepareCodeConfig* pConfig)
                 {
                 #ifdef FEATURE_TIERED_COMPILATION
                     // Finalize the optimization tier before SetNativeCode() is called
-                    bool shouldCountCalls = wasTier0Jit && pConfig->FinalizeOptimizationTierForTier0Jit();
+                    shouldCountCalls = wasTier0Jit && pConfig->FinalizeOptimizationTierForTier0Jit() && shouldCountCalls;
                 #endif
 
                     if (pConfig->SetNativeCode(pCode, &pCode))
@@ -824,12 +831,20 @@ PCODE MethodDesc::JitCompileCode(PrepareCodeConfig* pConfig)
                 }
             }
 
+#ifdef FEATURE_TIERED_COMPILATION
+            return JitCompileCodeLockedEventWrapper(pConfig, pEntryLock, shouldCountCalls);
+#else
             return JitCompileCodeLockedEventWrapper(pConfig, pEntryLock);
+#endif
         }
     }
 }
 
+#ifdef FEATURE_TIERED_COMPILATION
+PCODE MethodDesc::JitCompileCodeLockedEventWrapper(PrepareCodeConfig* pConfig, JitListLockEntry* pEntry, bool shouldCountCalls)
+#else
 PCODE MethodDesc::JitCompileCodeLockedEventWrapper(PrepareCodeConfig* pConfig, JitListLockEntry* pEntry)
+#endif
 {
     STANDARD_VM_CONTRACT;
 
@@ -884,7 +899,11 @@ PCODE MethodDesc::JitCompileCodeLockedEventWrapper(PrepareCodeConfig* pConfig, J
         TRACE_LEVEL_VERBOSE,
         CLR_JIT_KEYWORD))
     {
+#ifdef FEATURE_TIERED_COMPILATION
+        pCode = JitCompileCodeLocked(pConfig, pEntry, shouldCountCalls, &sizeOfCode, &flags);
+#else
         pCode = JitCompileCodeLocked(pConfig, pEntry, &sizeOfCode, &flags);
+#endif
     }
     else
     {
@@ -904,7 +923,11 @@ PCODE MethodDesc::JitCompileCodeLockedEventWrapper(PrepareCodeConfig* pConfig, J
             &methodSignature);
 #endif
 
+#ifdef FEATURE_TIERED_COMPILATION
+        pCode = JitCompileCodeLocked(pConfig, pEntry, shouldCountCalls, &sizeOfCode, &flags);
+#else
         pCode = JitCompileCodeLocked(pConfig, pEntry, &sizeOfCode, &flags);
+#endif
 
         // Interpretted methods skip this notification
 #ifdef FEATURE_INTERPRETER
@@ -994,7 +1017,11 @@ PCODE MethodDesc::JitCompileCodeLockedEventWrapper(PrepareCodeConfig* pConfig, J
     return pCode;
 }
 
+#ifdef FEATURE_TIERED_COMPILATION
+PCODE MethodDesc::JitCompileCodeLocked(PrepareCodeConfig* pConfig, JitListLockEntry* pEntry, bool shouldCountCalls, ULONG* pSizeOfCode, CORJIT_FLAGS* pFlags)
+#else
 PCODE MethodDesc::JitCompileCodeLocked(PrepareCodeConfig* pConfig, JitListLockEntry* pEntry, ULONG* pSizeOfCode, CORJIT_FLAGS* pFlags)
+#endif
 {
     STANDARD_VM_CONTRACT;
 
@@ -1007,6 +1034,23 @@ PCODE MethodDesc::JitCompileCodeLocked(PrepareCodeConfig* pConfig, JitListLockEn
     COR_ILMETHOD_DECODER ilDecoderTemp;
     COR_ILMETHOD_DECODER *pilHeader = GetAndVerifyILHeader(pConfig, &ilDecoderTemp);
     *pFlags = pConfig->GetJitCompilationFlags();
+
+#ifdef FEATURE_TIERED_COMPILATION
+    bool isTier0 = pFlags->IsSet(CORJIT_FLAGS::CORJIT_FLAG_TIER0);
+    // If we've already opted-out of call counting, for example in the UnmangedCallersOnly case,
+    // switch to optimized code.
+    if (!shouldCountCalls)
+    {
+        pFlags->Clear(CORJIT_FLAGS::CORJIT_FLAG_TIER0);
+        pFlags->Clear(CORJIT_FLAGS::CORJIT_FLAG_TIER1);
+
+        if (pConfig->GetMethodDesc()->IsEligibleForTieredCompilation())
+        {
+            pConfig->SetJitSwitchedToOptimized();
+        }
+    }
+#endif
+
     PCODE pOtherCode = NULL;
 
     EX_TRY
@@ -1074,7 +1118,7 @@ PCODE MethodDesc::JitCompileCodeLocked(PrepareCodeConfig* pConfig, JitListLockEn
 
 #ifdef FEATURE_TIERED_COMPILATION
     // Finalize the optimization tier before SetNativeCode() is called
-    bool shouldCountCalls = pFlags->IsSet(CORJIT_FLAGS::CORJIT_FLAG_TIER0) && pConfig->FinalizeOptimizationTierForTier0Jit();
+    shouldCountCalls = isTier0 && pConfig->FinalizeOptimizationTierForTier0Jit() && shouldCountCalls;
 #endif
 
     // Aside from rejit, performing a SetNativeCodeInterlocked at this point