Don't suspend for debugger while holding the slot backpatching lock (#40060)
authorKoundinya Veluri <kouvel@users.noreply.github.com>
Tue, 4 Aug 2020 22:02:35 +0000 (18:02 -0400)
committerGitHub <noreply@github.com>
Tue, 4 Aug 2020 22:02:35 +0000 (15:02 -0700)
- Mostly reverted the previous workaround for the issue (commit fc06054a774e28a5a47bbe862adcf03251abb43c)
- Added a forbid-suspend-for-debugger region in preemptive GC mode
- Added a crst holder type that acquires a lock and enters the forbid region
- Where the slot backpatching lock would be taken where cooperative GC mode may be entered inside that lock, the new crst holder is used
- When a suspend for debugger is requested, a thread in preemptive GC mode that is in the forbid region is considered not yet suspended and is synched later similarly to threads in cooperative GC mode

Fixes https://github.com/dotnet/runtime/issues/37278

17 files changed:
src/coreclr/src/debug/ee/debugger.cpp
src/coreclr/src/vm/callcounting.cpp
src/coreclr/src/vm/codeversion.cpp
src/coreclr/src/vm/crst.cpp
src/coreclr/src/vm/crst.h
src/coreclr/src/vm/fptrstubs.cpp
src/coreclr/src/vm/method.cpp
src/coreclr/src/vm/methoddescbackpatchinfo.cpp
src/coreclr/src/vm/methoddescbackpatchinfo.h
src/coreclr/src/vm/prestub.cpp
src/coreclr/src/vm/rejit.cpp
src/coreclr/src/vm/threads.cpp
src/coreclr/src/vm/threads.h
src/coreclr/src/vm/threads.inl
src/coreclr/src/vm/threadsuspend.cpp
src/coreclr/src/vm/tieredcompilation.cpp
src/coreclr/src/vm/virtualcallstub.cpp

index 56ecf03..5ead858 100644 (file)
@@ -15242,15 +15242,6 @@ HRESULT Debugger::FuncEvalSetup(DebuggerIPCE_FuncEvalInfo *pEvalInfo,
         return CORDBG_E_FUNC_EVAL_BAD_START_POINT;
     }
 
-    if (MethodDescBackpatchInfoTracker::IsLockOwnedByAnyThread())
-    {
-        // A thread may have suspended for the debugger while holding the slot backpatching lock while trying to enter
-        // cooperative GC mode. If the FuncEval calls a method that is eligible for slot backpatching (virtual or interface
-        // methods that are eligible for tiering), the FuncEval may deadlock on trying to acquire the same lock. Fail the
-        // FuncEval to avoid the issue.
-        return CORDBG_E_FUNC_EVAL_BAD_START_POINT;
-    }
-
     // Create a DebuggerEval to hold info about this eval while its in progress. Constructor copies the thread's
     // CONTEXT.
     DebuggerEval *pDE = new (interopsafe, nothrow) DebuggerEval(filterContext, pEvalInfo, fInException);
index 45edd54..8c3678b 100644 (file)
@@ -824,8 +824,7 @@ void CallCountingManager::CompleteCallCounting()
     {
         CodeVersionManager *codeVersionManager = appDomain->GetCodeVersionManager();
 
-        MethodDescBackpatchInfoTracker::PollForDebuggerSuspension();
-        MethodDescBackpatchInfoTracker::ConditionalLockHolder slotBackpatchLockHolder;
+        MethodDescBackpatchInfoTracker::ConditionalLockHolderForGCCoop slotBackpatchLockHolder;
 
         // Backpatching entry point slots requires cooperative GC mode, see
         // MethodDescBackpatchInfoTracker::Backpatch_Locked(). The code version manager's table lock is an unsafe lock that
@@ -956,8 +955,7 @@ void CallCountingManager::StopAndDeleteAllCallCountingStubs()
     TieredCompilationManager *tieredCompilationManager = GetAppDomain()->GetTieredCompilationManager();
     bool scheduleTieringBackgroundWork = false;
     {
-        MethodDescBackpatchInfoTracker::PollForDebuggerSuspension();
-        MethodDescBackpatchInfoTracker::ConditionalLockHolder slotBackpatchLockHolder;
+        MethodDescBackpatchInfoTracker::ConditionalLockHolderForGCCoop slotBackpatchLockHolder;
 
         ThreadSuspend::SuspendEE(ThreadSuspend::SUSPEND_OTHER);
         struct AutoRestartEE
index 5ae3940..78f2ede 100644 (file)
@@ -1759,7 +1759,8 @@ PCODE CodeVersionManager::PublishVersionableCodeIfNecessary(
         do
         {
             bool mayHaveEntryPointSlotsToBackpatch = doPublish && pMethodDesc->MayHaveEntryPointSlotsToBackpatch();
-            MethodDescBackpatchInfoTracker::ConditionalLockHolder slotBackpatchLockHolder(mayHaveEntryPointSlotsToBackpatch);
+            MethodDescBackpatchInfoTracker::ConditionalLockHolderForGCCoop slotBackpatchLockHolder(
+                mayHaveEntryPointSlotsToBackpatch);
 
             // Try a faster check to see if we can avoid checking the currently active code version
             // - For the default code version, if a profiler is attached it may be notified of JIT events and may request rejit
index 22f7a1f..cbfb665 100644 (file)
@@ -772,6 +772,91 @@ BOOL CrstBase::IsSafeToTake()
 
 #endif // _DEBUG
 
+CrstBase::CrstAndForbidSuspendForDebuggerHolder::CrstAndForbidSuspendForDebuggerHolder(CrstBase *pCrst)
+    : m_pCrst(pCrst), m_pThreadForExitingForbidRegion(nullptr)
+{
+    CONTRACTL
+    {
+        NOTHROW;
+        GC_TRIGGERS;
+        MODE_PREEMPTIVE;
+    }
+    CONTRACTL_END;
+
+    if (pCrst == nullptr)
+    {
+        return;
+    }
+
+    // Reentrant locks are currently not supported
+    _ASSERTE((pCrst->m_dwFlags & CRST_REENTRANCY) == 0);
+
+    Thread *pThread = GetThread();
+    _ASSERTE(pThread != nullptr);
+    if (pThread->IsInForbidSuspendForDebuggerRegion())
+    {
+        AcquireLock(pCrst);
+        return;
+    }
+
+    while (true)
+    {
+        // Enter the forbid region and make that state change visible to other threads (memory barrier) before checking for the
+        // TS_DebugSuspendPending state. The other interacting thread in SysStartSuspendForDebug(), sets TS_DebugSuspendPending
+        // and makes that state change visible to other threads (memory barrier) before checking for whether this thread is in
+        // the forbid region. This ensures that in race conditions where both threads update the respective state and make the
+        // state change visible to other threads, at least one of those threads will see the state change made by the other
+        // thread. If this thread sees the state change (TS_DebugSuspendPending), it will avoid entering the forbid region by
+        // exiting the lock and pulsing the GC mode to try suspending for the debugger. If SysStartSuspendForDebug() sees the
+        // state change (that this thread is in the forbid region), then it will flag this thread appropriately to sync for
+        // suspend, and the debugger will later identify this thread as synced after this thread leaves the forbid region.
+        //
+        // The forbid region could be entered after acquiring the lock, but an additional memory barrier would be necessary. It
+        // is entered before the lock just to make use of the implicit memory barrier from acquiring the lock. It is anyway a
+        // prerequisite for entering a lock along with entering a forbid-suspend-for-debugger region, that the lock is not held
+        // for too long such that the thread can suspend for the debugger in reasonable time.
+        pThread->EnterForbidSuspendForDebuggerRegion();
+        AcquireLock(pCrst); // implicit full memory barrier on all supported platforms
+
+        // This can be an opportunistic check (instead of a volatile load), because if the GC mode change below does not suspend
+        // for the debugger (which is also possible with a volatile load), it will just loop around and try again if necessary
+        if (!pThread->HasThreadStateOpportunistic(Thread::TS_DebugSuspendPending))
+        {
+            m_pThreadForExitingForbidRegion = pThread;
+            return;
+        }
+
+        // Cannot enter the forbid region when a suspend for the debugger is pending because there are likely to be subsequent
+        // changes to the GC mode inside the lock, and this thread needs to suspend for the debugger in reasonable time. Exit
+        // the lock and pulse the GC mode to suspend for the debugger.
+        ReleaseLock(pCrst);
+        pThread->ExitForbidSuspendForDebuggerRegion();
+        GCX_COOP();
+    }
+}
+
+CrstBase::CrstAndForbidSuspendForDebuggerHolder::~CrstAndForbidSuspendForDebuggerHolder()
+{
+    CONTRACTL
+    {
+        NOTHROW;
+        GC_NOTRIGGER;
+        MODE_PREEMPTIVE;
+    }
+    CONTRACTL_END;
+
+    if (m_pCrst == nullptr)
+    {
+        return;
+    }
+
+    ReleaseLock(m_pCrst);
+    if (m_pThreadForExitingForbidRegion != nullptr)
+    {
+        m_pThreadForExitingForbidRegion->ExitForbidSuspendForDebuggerRegion();
+    }
+}
+
 #endif // !DACCESS_COMPILE
 
 #ifdef TEST_DATA_CONSISTENCY
index 8bdcaae..ed1e393 100644 (file)
@@ -362,13 +362,14 @@ private:
     {
         m_dwFlags = 0;
     }
+
     // ------------------------------- Holders ------------------------------
- public:
-     //
-     // CrstHolder is optimized for the common use that takes the lock in constructor
-     // and releases it in destructor. Users that require all Holder features
-     // can use CrstHolderWithState.
-     //
+public:
+    //
+    // CrstHolder is optimized for the common use that takes the lock in constructor
+    // and releases it in destructor. Users that require all Holder features
+    // can use CrstHolderWithState.
+    //
     class CrstHolder
     {
         CrstBase * m_pCrst;
@@ -397,11 +398,22 @@ private:
     // Generally, it's better to use a regular CrstHolder, and then use the Release() / Acquire() methods on it.
     // This just exists to convert legacy OS Critical Section patterns over to holders.
     typedef DacHolder<CrstBase *, CrstBase::ReleaseLock, CrstBase::AcquireLock, 0, CompareDefault> UnsafeCrstInverseHolder;
+
+    class CrstAndForbidSuspendForDebuggerHolder
+    {
+    private:
+        CrstBase *m_pCrst;
+        Thread *m_pThreadForExitingForbidRegion;
+
+    public:
+        CrstAndForbidSuspendForDebuggerHolder(CrstBase *pCrst);
+        ~CrstAndForbidSuspendForDebuggerHolder();
+    };
 };
 
 typedef CrstBase::CrstHolder CrstHolder;
 typedef CrstBase::CrstHolderWithState CrstHolderWithState;
-
+typedef CrstBase::CrstAndForbidSuspendForDebuggerHolder CrstAndForbidSuspendForDebuggerHolder;
 
 // The CRST.
 class Crst : public CrstBase
index f76b795..cd3dd33 100644 (file)
@@ -151,10 +151,12 @@ PCODE FuncPtrStubs::GetFuncPtrStub(MethodDesc * pMD, PrecodeType type)
 
     if (setTargetAfterAddingToHashTable)
     {
+        GCX_PREEMP();
+
         _ASSERTE(pMD->IsVersionableWithVtableSlotBackpatch());
 
         PCODE temporaryEntryPoint = pMD->GetTemporaryEntryPoint();
-        MethodDescBackpatchInfoTracker::ConditionalLockHolder slotBackpatchLockHolder;
+        MethodDescBackpatchInfoTracker::ConditionalLockHolderForGCPreemp slotBackpatchLockHolder;
 
         // Set the funcptr stub's entry point to the current entry point inside the lock and after the funcptr stub is exposed,
         // to synchronize with backpatching in MethodDesc::BackpatchEntryPointSlots()
index e378fad..3fa6028 100644 (file)
@@ -4903,8 +4903,10 @@ void MethodDesc::RecordAndBackpatchEntryPointSlot(
 {
     WRAPPER_NO_CONTRACT;
 
+    GCX_PREEMP();
+
     LoaderAllocator *mdLoaderAllocator = GetLoaderAllocator();
-    MethodDescBackpatchInfoTracker::ConditionalLockHolder slotBackpatchLockHolder;
+    MethodDescBackpatchInfoTracker::ConditionalLockHolderForGCCoop slotBackpatchLockHolder;
 
     RecordAndBackpatchEntryPointSlot_Locked(
         mdLoaderAllocator,
index f6ea27e..efe1d04 100644 (file)
@@ -65,7 +65,6 @@ void EntryPointSlots::Backpatch_Locked(TADDR slot, SlotType slotType, PCODE entr
 // MethodDescBackpatchInfoTracker
 
 CrstStatic MethodDescBackpatchInfoTracker::s_lock;
-bool MethodDescBackpatchInfoTracker::s_isLocked = false;
 
 #ifndef DACCESS_COMPILE
 
@@ -123,30 +122,4 @@ bool MethodDescBackpatchInfoTracker::IsLockOwnedByCurrentThread()
 }
 #endif // _DEBUG
 
-#ifndef DACCESS_COMPILE
-void MethodDescBackpatchInfoTracker::PollForDebuggerSuspension()
-{
-    CONTRACTL
-    {
-        NOTHROW;
-        GC_TRIGGERS;
-        MODE_PREEMPTIVE;
-    }
-    CONTRACTL_END;
-
-    _ASSERTE(!IsLockOwnedByCurrentThread());
-
-    // If suspension is pending for the debugger, pulse the GC mode to suspend the thread here. Following this call, typically
-    // the lock is acquired and the GC mode is changed, and suspending there would cause FuncEvals to fail (see
-    // Debugger::FuncEvalSetup() at the reference to IsLockOwnedByAnyThread()). Since this thread is in preemptive mode, the
-    // debugger may think it's already suspended and it would be unfortunate to suspend the thread with the lock held.
-    Thread *thread = GetThread();
-    _ASSERTE(thread != nullptr);
-    if (thread->HasThreadState(Thread::TS_DebugSuspendPending))
-    {
-        GCX_COOP();
-    }
-}
-#endif
-
 ////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
index a4d7a8c..2fc5e6c 100644 (file)
@@ -65,7 +65,6 @@ class MethodDescBackpatchInfoTracker
 {
 private:
     static CrstStatic s_lock;
-    static bool s_isLocked;
 
     class BackpatchInfoTrackerHashTraits : public NoRemoveDefaultCrossLoaderAllocatorHashTraits<MethodDesc *, UINT_PTR>
     {
@@ -97,63 +96,55 @@ public:
     static bool IsLockOwnedByCurrentThread();
 #endif
 
-#ifndef DACCESS_COMPILE
 public:
-    static bool IsLockOwnedByAnyThread()
+    // To be used when the thread will remain in preemptive GC mode while holding the lock
+    class ConditionalLockHolderForGCPreemp : private CrstHolderWithState
     {
-        LIMITED_METHOD_CONTRACT;
-        return VolatileLoadWithoutBarrier(&s_isLocked);
-    }
-
-    static void PollForDebuggerSuspension();
-#endif
-
-public:
-    class ConditionalLockHolder : private CrstHolderWithState
-    {
-    private:
-        bool m_isLocked;
-
     public:
-        ConditionalLockHolder(bool acquireLock = true)
+        ConditionalLockHolderForGCPreemp(bool acquireLock = true)
             : CrstHolderWithState(
 #ifndef DACCESS_COMPILE
                 acquireLock ? &s_lock : nullptr
 #else
                 nullptr
 #endif
-                ),
-            m_isLocked(false)
+                )
         {
-            WRAPPER_NO_CONTRACT;
-
-        #ifndef DACCESS_COMPILE
-            if (acquireLock)
+            CONTRACTL
             {
-                _ASSERTE(IsLockOwnedByCurrentThread());
-                _ASSERTE(!s_isLocked);
-                m_isLocked = true;
-                s_isLocked = true;
+                NOTHROW;
+                GC_NOTRIGGER;
+                MODE_PREEMPTIVE;
             }
-        #endif
+            CONTRACTL_END;
         }
 
-        ~ConditionalLockHolder()
-        {
-            WRAPPER_NO_CONTRACT;
+        DISABLE_COPY(ConditionalLockHolderForGCPreemp);
+    };
 
-        #ifndef DACCESS_COMPILE
-            if (m_isLocked)
+#ifndef DACCESS_COMPILE
+public:
+    // To be used when the thread may enter cooperative GC mode while holding the lock. The thread enters a
+    // forbid-suspend-for-debugger region along with acquiring the lock, such that it would not suspend for the debugger while
+    // holding the lock, as that may otherwise cause a FuncEval to deadlock when trying to acquire the lock.
+    class ConditionalLockHolderForGCCoop : private CrstAndForbidSuspendForDebuggerHolder
+    {
+    public:
+        ConditionalLockHolderForGCCoop(bool acquireLock = true)
+            : CrstAndForbidSuspendForDebuggerHolder(acquireLock ? &s_lock : nullptr)
+        {
+            CONTRACTL
             {
-                _ASSERTE(IsLockOwnedByCurrentThread());
-                _ASSERTE(s_isLocked);
-                s_isLocked = false;
+                NOTHROW;
+                GC_NOTRIGGER;
+                MODE_PREEMPTIVE;
             }
-        #endif
+            CONTRACTL_END;
         }
 
-        DISABLE_COPY(ConditionalLockHolder);
+        DISABLE_COPY(ConditionalLockHolderForGCCoop);
     };
+#endif
 
 public:
     MethodDescBackpatchInfoTracker()
index e6ff77a..d44c3cc 100644 (file)
@@ -95,7 +95,8 @@ PCODE MethodDesc::DoBackpatch(MethodTable * pMT, MethodTable *pDispatchingMT, BO
 
     // Only take the lock if the method is versionable with vtable slot backpatch, for recording slots and synchronizing with
     // backpatching slots
-    MethodDescBackpatchInfoTracker::ConditionalLockHolder slotBackpatchLockHolder(isVersionableWithVtableSlotBackpatch);
+    MethodDescBackpatchInfoTracker::ConditionalLockHolderForGCCoop slotBackpatchLockHolder(
+        isVersionableWithVtableSlotBackpatch);
 
     // Get the method entry point inside the lock above to synchronize with backpatching in
     // MethodDesc::BackpatchEntryPointSlots()
index d9077a3..ff180ce 100644 (file)
@@ -643,7 +643,7 @@ HRESULT ReJitManager::UpdateActiveILVersions(
     SHash<CodeActivationBatchTraits>::Iterator endIter = mgrToCodeActivationBatch.End();
 
     {
-        MethodDescBackpatchInfoTracker::ConditionalLockHolder slotBackpatchLockHolder;
+        MethodDescBackpatchInfoTracker::ConditionalLockHolderForGCCoop slotBackpatchLockHolder;
 
         for (SHash<CodeActivationBatchTraits>::Iterator iter = beginIter; iter != endIter; iter++)
         {
index ab09248..9dae20e 100644 (file)
@@ -1606,6 +1606,7 @@ Thread::Thread()
     m_DeserializationTracker = NULL;
 
     m_currentPrepareCodeConfig = nullptr;
+    m_isInForbidSuspendForDebuggerRegion = false;
 
 #ifdef _DEBUG
     memset(dangerousObjRefs, 0, sizeof(dangerousObjRefs));
index a56bd5e..03aea2e 100644 (file)
@@ -4750,6 +4750,21 @@ public:
 
 private:
     PrepareCodeConfig *m_currentPrepareCodeConfig;
+
+#ifndef DACCESS_COMPILE
+public:
+    bool IsInForbidSuspendForDebuggerRegion() const
+    {
+        LIMITED_METHOD_CONTRACT;
+        return m_isInForbidSuspendForDebuggerRegion;
+    }
+
+    void EnterForbidSuspendForDebuggerRegion();
+    void ExitForbidSuspendForDebuggerRegion();
+#endif
+
+private:
+    bool m_isInForbidSuspendForDebuggerRegion;
 };
 
 // End of class Thread
index 68fa53b..5dc6ef8 100644 (file)
@@ -195,6 +195,24 @@ inline Thread::CurrentPrepareCodeConfigHolder::~CurrentPrepareCodeConfigHolder()
     config->SetNextInSameThread(nullptr);
 }
 
+inline void Thread::EnterForbidSuspendForDebuggerRegion()
+{
+    WRAPPER_NO_CONTRACT;
+    _ASSERTE(this == GetThread());
+
+    _ASSERTE(!m_isInForbidSuspendForDebuggerRegion);
+    m_isInForbidSuspendForDebuggerRegion = true;
+}
+
+inline void Thread::ExitForbidSuspendForDebuggerRegion()
+{
+    WRAPPER_NO_CONTRACT;
+    _ASSERTE(this == GetThread());
+
+    _ASSERTE(m_isInForbidSuspendForDebuggerRegion);
+    m_isInForbidSuspendForDebuggerRegion = false;
+}
+
 #ifdef HOST_WINDOWS
 inline size_t Thread::GetOffsetOfThreadStatic(void* pThreadStatic)
 {
index 7d632cb..a9db60e 100644 (file)
@@ -2262,6 +2262,8 @@ void Thread::RareDisablePreemptiveGC()
     // waiting for GC
     _ASSERTE ((m_StateNC & Thread::TSNC_OwnsSpinLock) == 0);
 
+    _ASSERTE(!MethodDescBackpatchInfoTracker::IsLockOwnedByCurrentThread() || IsInForbidSuspendForDebuggerRegion());
+
     if (!GCHeapUtilities::IsGCHeapInitialized())
     {
         goto Exit;
@@ -2561,6 +2563,8 @@ void Thread::RareEnablePreemptiveGC()
     // holding a spin lock in coop mode and transit to preemp mode will cause deadlock on GC
     _ASSERTE ((m_StateNC & Thread::TSNC_OwnsSpinLock) == 0);
 
+    _ASSERTE(!MethodDescBackpatchInfoTracker::IsLockOwnedByCurrentThread() || IsInForbidSuspendForDebuggerRegion());
+
     FastInterlockOr (&m_fPreemptiveGCDisabled, 0);
 
 #if defined(STRESS_HEAP) && defined(_DEBUG)
@@ -2582,7 +2586,7 @@ void Thread::RareEnablePreemptiveGC()
         // for GC, the fact that we are leaving the EE means that it no longer needs to
         // suspend us.  But if we are doing a non-GC suspend, we need to block now.
         // Give the debugger precedence over user suspensions:
-        while (m_State & TS_DebugSuspendPending)
+        while ((m_State & TS_DebugSuspendPending) && !IsInForbidSuspendForDebuggerRegion())
         {
 
 #ifdef DEBUGGING_SUPPORTED
@@ -4609,7 +4613,11 @@ bool Thread::SysStartSuspendForDebug(AppDomain *pAppDomain)
             // and the moment we enable TrapReturningThreads in MarkForSuspension.  However,
             // nothing bad happens if the thread has transitioned to preemptive before marking
             // the thread for suspension; the thread will later be identified as Synced in
-            // SysSweepThreadsForDebug
+            // SysSweepThreadsForDebug.
+            //
+            // If the thread transitions to preemptive mode and into a forbid-suspend-for-debugger
+            // region, SysSweepThreadsForDebug would similarly identify the thread as synced
+            // after it leaves the forbid region.
 #else  // DISABLE_THREADSUSPEND
             // Resume the thread and let it run to a safe point
             thread->ResumeThread();
@@ -4625,23 +4633,32 @@ bool Thread::SysStartSuspendForDebug(AppDomain *pAppDomain)
             // they attempt to re-enter they will trip.
             thread->MarkForSuspension(TS_DebugSuspendPending);
 
+            if (
 #ifdef DISABLE_THREADSUSPEND
-            // There'a a race above between the moment we first check m_fPreemptiveGCDisabled
-            // and the moment we enable TrapReturningThreads in MarkForSuspension.  To account
-            // for that we check whether the thread moved into cooperative mode, and if it had
-            // we mark it as a DebugWillSync thread, that will be handled later in
-            // SysSweepThreadsForDebug
-            if (thread->m_fPreemptiveGCDisabled)
+                // There'a a race above between the moment we first check m_fPreemptiveGCDisabled
+                // and the moment we enable TrapReturningThreads in MarkForSuspension.  To account
+                // for that we check whether the thread moved into cooperative mode, and if it had
+                // we mark it as a DebugWillSync thread, that will be handled later in
+                // SysSweepThreadsForDebug.
+                thread->m_fPreemptiveGCDisabled ||
+#endif // DISABLE_THREADSUSPEND
+                // The thread may have been suspended in a forbid-suspend-for-debugger region, or
+                // before the state change to set TS_DebugSuspendPending is made visible to other
+                // threads, the thread may have transitioned into a forbid region. In either case,
+                // flag the thread as TS_DebugWillSync and let SysSweepThreadsForDebug later
+                // identify the thread as synced after the thread leaves the forbid region.
+                thread->IsInForbidSuspendForDebuggerRegion())
             {
                 // Remember that this thread will be running to a safe point
                 FastInterlockIncrement(&m_DebugWillSyncCount);
                 thread->SetThreadState(TS_DebugWillSync);
             }
-#else  // DISABLE_THREADSUSPEND
+
+#ifndef DISABLE_THREADSUSPEND
             if (str == STR_Success) {
                 thread->ResumeThread();
             }
-#endif // DISABLE_THREADSUSPEND
+#endif // !DISABLE_THREADSUSPEND
 
             LOG((LF_CORDB, LL_INFO1000,
                  "[0x%x] SUSPEND: gc enabled.\n", thread->GetThreadId()));
@@ -4720,9 +4737,10 @@ bool Thread::SysSweepThreadsForDebug(bool forceSync)
         // this thread will happen after any earlier writes on a different
         // thread.
         FastInterlockOr(&thread->m_fPreemptiveGCDisabled, 0);
-        if (!thread->m_fPreemptiveGCDisabled)
+        if (!thread->m_fPreemptiveGCDisabled && !thread->IsInForbidSuspendForDebuggerRegion())
         {
-            // If the thread toggled to preemptive mode, then it's synced.
+            // If the thread toggled to preemptive mode and is not in a
+            // forbid-suspend-for-debugger region, then it's synced.
             goto Label_MarkThreadAsSynced;
         }
         else
@@ -4756,7 +4774,7 @@ RetrySuspension:
         else if (str == STR_SwitchedOut)
         {
             // The thread was switched b/c of fiber-mode stuff.
-            if (!thread->m_fPreemptiveGCDisabled)
+            if (!thread->m_fPreemptiveGCDisabled && !thread->IsInForbidSuspendForDebuggerRegion())
             {
                 goto Label_MarkThreadAsSynced;
             }
@@ -4771,15 +4789,27 @@ RetrySuspension:
         }
         else if (!thread->m_fPreemptiveGCDisabled)
         {
-            // If the thread toggled to preemptive mode, then it's synced.
+            // If the thread toggled to preemptive mode and is not in a
+            // forbid-suspend-for-debugger region, then it's synced.
 
             // We can safely resume the thread here b/c it's in PreemptiveMode and the
             // EE will trap anybody trying to re-enter cooperative. So letting it run free
             // won't hurt the runtime.
+            //
+            // If the thread is in a forbid-suspend-for-debugger region, the thread needs to
+            // be resumed to give it a chance to leave the forbid region. The EE will also
+            // trap the thread if it tries to re-enter a forbid region.
             _ASSERTE(str == STR_Success);
             thread->ResumeThread();
 
-            goto Label_MarkThreadAsSynced;
+            if (!thread->IsInForbidSuspendForDebuggerRegion())
+            {
+                goto Label_MarkThreadAsSynced;
+            }
+            else
+            {
+                continue;
+            }
         }
 #if defined(FEATURE_HIJACK) && !defined(TARGET_UNIX)
         // If the thread is in jitted code, HandledJitCase will try to hijack it; and the hijack
index 01c663c..05fc673 100644 (file)
@@ -448,8 +448,7 @@ void TieredCompilationManager::DeactivateTieringDelay()
         COUNT_T methodCount = methodsPendingCounting->GetCount();
         CodeVersionManager *codeVersionManager = GetAppDomain()->GetCodeVersionManager();
 
-        MethodDescBackpatchInfoTracker::PollForDebuggerSuspension();
-        MethodDescBackpatchInfoTracker::ConditionalLockHolder slotBackpatchLockHolder;
+        MethodDescBackpatchInfoTracker::ConditionalLockHolderForGCCoop slotBackpatchLockHolder;
 
         // Backpatching entry point slots requires cooperative GC mode, see
         // MethodDescBackpatchInfoTracker::Backpatch_Locked(). The code version manager's table lock is an unsafe lock that
@@ -825,11 +824,8 @@ void TieredCompilationManager::ActivateCodeVersion(NativeCodeVersion nativeCodeV
     HRESULT hr = S_OK;
     {
         bool mayHaveEntryPointSlotsToBackpatch = pMethod->MayHaveEntryPointSlotsToBackpatch();
-        if (mayHaveEntryPointSlotsToBackpatch)
-        {
-            MethodDescBackpatchInfoTracker::PollForDebuggerSuspension();
-        }
-        MethodDescBackpatchInfoTracker::ConditionalLockHolder slotBackpatchLockHolder(mayHaveEntryPointSlotsToBackpatch);
+        MethodDescBackpatchInfoTracker::ConditionalLockHolderForGCCoop slotBackpatchLockHolder(
+            mayHaveEntryPointSlotsToBackpatch);
 
         // Backpatching entry point slots requires cooperative GC mode, see
         // MethodDescBackpatchInfoTracker::Backpatch_Locked(). The code version manager's table lock is an unsafe lock that
index 828b560..1f05aef 100644 (file)
@@ -2765,7 +2765,7 @@ DispatchHolder *VirtualCallStubManager::GenerateDispatchStub(PCODE            ad
         TADDR slot = holder->stub()->implTargetSlot(&slotType);
         pMD->RecordAndBackpatchEntryPointSlot(m_loaderAllocator, slot, slotType);
 
-        // RecordAndBackpatchEntryPointSlot() takes a lock that would exit and reenter cooperative GC mode
+        // RecordAndBackpatchEntryPointSlot() may exit and reenter cooperative GC mode
         *pMayHaveReenteredCooperativeGCMode = true;
     }
 #endif
@@ -2827,7 +2827,7 @@ DispatchHolder *VirtualCallStubManager::GenerateDispatchStubLong(PCODE
         TADDR slot = holder->stub()->implTargetSlot(&slotType);
         pMD->RecordAndBackpatchEntryPointSlot(m_loaderAllocator, slot, slotType);
 
-        // RecordAndBackpatchEntryPointSlot() takes a lock that would exit and reenter cooperative GC mode
+        // RecordAndBackpatchEntryPointSlot() may exit and reenter cooperative GC mode
         *pMayHaveReenteredCooperativeGCMode = true;
     }
 #endif
@@ -3020,7 +3020,7 @@ ResolveCacheElem *VirtualCallStubManager::GenerateResolveCacheElem(void *addrOfC
             (TADDR)&e->target,
             EntryPointSlots::SlotType_Normal);
 
-        // RecordAndBackpatchEntryPointSlot() takes a lock that would exit and reenter cooperative GC mode
+        // RecordAndBackpatchEntryPointSlot() may exit and reenter cooperative GC mode
         *pMayHaveReenteredCooperativeGCMode = true;
     }
 #endif