Enable thread statics for collectible classes (#19944)
authorJan Vorlicek <janvorli@microsoft.com>
Thu, 4 Oct 2018 08:26:06 +0000 (10:26 +0200)
committerGitHub <noreply@github.com>
Thu, 4 Oct 2018 08:26:06 +0000 (10:26 +0200)
* Enable thread statics for collectible classes

This change removes checks that were preventing usage of thread statics
in collectible classes and also implements all the necessary changes.
The handles that hold arrays with thread statics are allocated from
LoaderAllocator for collectible classes instead of using the global
strong handle like in the case of non-collectible classes.
The change very much mimics what is done for regular statics.

This change also adds ability to reuse freed handles to the
LoaderAllocator handle table. Freed handle indexes are stored into a
stack and when a new handle allocation is requested, the indices from
this stack are used first.
Due to the code path from which the FreeTLM that in turn frees the
handles is called, I had to modify the critical section flags and also
refactor the handle allocation so that the actual managed array
representing the handle table is allocated out of the critical section.
When I was touching the code, I have also moved the code that was
dealing with handles that are not stored in the LoaderAllocator handle
tables out of the critical section, since there is no point in having it
inside of it.

13 files changed:
src/vm/ceeload.cpp
src/vm/generics.cpp
src/vm/jithelpers.cpp
src/vm/loaderallocator.cpp
src/vm/loaderallocator.hpp
src/vm/loaderallocator.inl
src/vm/methodtable.cpp
src/vm/methodtablebuilder.cpp
src/vm/threads.cpp
src/vm/threads.h
src/vm/threadstatics.cpp
src/vm/threadstatics.h
src/vm/typedesc.cpp

index 5160b79..01ecaca 100644 (file)
@@ -41,7 +41,7 @@
 #include "sigbuilder.h"
 #include "metadataexports.h"
 #include "inlinetracking.h"
-
+#include "threads.h"
 
 #ifdef FEATURE_PREJIT
 #include "exceptionhandling.h"
@@ -2904,6 +2904,18 @@ void Module::FreeModuleIndex()
             _ASSERTE(!Module::IsEncodedModuleIndex((SIZE_T)m_ModuleID));
             _ASSERTE(m_ModuleIndex == m_ModuleID->GetModuleIndex());
 
+#ifndef CROSSGEN_COMPILE
+            if (IsCollectible())
+            {
+                ThreadStoreLockHolder tsLock;
+                Thread *pThread = NULL;
+                while ((pThread = ThreadStore::GetThreadList(pThread)) != NULL)
+                {
+                    pThread->DeleteThreadStaticData(m_ModuleIndex);
+                }
+            }
+#endif // CROSSGEN_COMPILE
+
             // Get the ModuleIndex from the DLM and free it
             Module::FreeModuleIndex(m_ModuleIndex);
         }
index b680549..a92177d 100644 (file)
@@ -227,7 +227,6 @@ ClassLoader::CreateTypeHandleForNonCanonicalGenericInstantiation(
     BOOL fHasRemotingVtsInfo = FALSE;
     BOOL fHasContextStatics = FALSE;
     BOOL fHasGenericsStaticsInfo = pOldMT->HasGenericsStaticsInfo();
-    BOOL fHasThreadStatics = (pOldMT->GetNumThreadStaticFields() > 0);
 
 #ifdef FEATURE_COMINTEROP
     BOOL fHasDynamicInterfaceMap = pOldMT->HasDynamicInterfaceMap();
@@ -240,11 +239,11 @@ ClassLoader::CreateTypeHandleForNonCanonicalGenericInstantiation(
     // Collectible types have some special restrictions
     if (pAllocator->IsCollectible())
     {
-        if (fHasThreadStatics || fHasContextStatics)
+        if (fHasContextStatics)
         {
             ClassLoader::ThrowTypeLoadException(pTypeKey, IDS_CLASSLOAD_COLLECTIBLESPECIALSTATICS);
         }
-        else if (pOldMT->HasFixedAddressVTStatics())
+        if (pOldMT->HasFixedAddressVTStatics())
         {
             ClassLoader::ThrowTypeLoadException(pTypeKey, IDS_CLASSLOAD_COLLECTIBLEFIXEDVTATTR);
         }
index 9bb6d98..f92e52b 100644 (file)
@@ -1935,11 +1935,14 @@ HCIMPL2(void*, JIT_GetSharedNonGCThreadStaticBaseDynamicClass, SIZE_T moduleDoma
 {
     FCALL_CONTRACT;
 
-    // Get the ModuleIndex
-    ModuleIndex index = 
+    // Obtain the DomainLocalModule
+    DomainLocalModule *pDomainLocalModule =
         (Module::IsEncodedModuleIndex(moduleDomainID)) ?
-            Module::IDToIndex(moduleDomainID) :
-            ((DomainLocalModule *)moduleDomainID)->GetModuleIndex();
+        GetAppDomain()->GetDomainLocalBlock()->GetModuleSlot(Module::IDToIndex(moduleDomainID)) :
+        (DomainLocalModule *)moduleDomainID;
+
+    // Get the ModuleIndex
+    ModuleIndex index = pDomainLocalModule->GetModuleIndex();
 
     // Get the relevant ThreadLocalModule
     ThreadLocalModule * pThreadLocalModule = ThreadStatics::GetTLMIfExists(index);
@@ -1950,18 +1953,18 @@ HCIMPL2(void*, JIT_GetSharedNonGCThreadStaticBaseDynamicClass, SIZE_T moduleDoma
     { 
         ThreadLocalModule::PTR_DynamicClassInfo pLocalInfo = pThreadLocalModule->GetDynamicClassInfoIfInitialized(dwDynamicClassDomainID);
         if (pLocalInfo != NULL)
-            return (void*)pLocalInfo->m_pDynamicEntry->GetNonGCStaticsBasePointer();
+        {
+            PTR_BYTE retval;
+            GET_DYNAMICENTRY_NONGCTHREADSTATICS_BASEPOINTER(pDomainLocalModule->GetDomainFile()->GetModule()->GetLoaderAllocator(),
+                                                            pLocalInfo,
+                                                            &retval);
+            return retval;
+        }
     }
 
     // If the TLM was not allocated or if the class was not marked as initialized
     // then we have to go through the slow path
 
-    // Obtain the DomainLocalModule
-    DomainLocalModule *pDomainLocalModule =
-        (Module::IsEncodedModuleIndex(moduleDomainID)) ?
-            GetAppDomain()->GetDomainLocalBlock()->GetModuleSlot(Module::IDToIndex(moduleDomainID)) :
-            (DomainLocalModule *) moduleDomainID;
-   
     // Obtain the Module
     Module * pModule = pDomainLocalModule->GetDomainFile()->GetModule();
 
@@ -1986,11 +1989,14 @@ HCIMPL2(void*, JIT_GetSharedGCThreadStaticBaseDynamicClass, SIZE_T moduleDomainI
 {
     FCALL_CONTRACT;
 
-    // Get the ModuleIndex
-    ModuleIndex index = 
+    // Obtain the DomainLocalModule
+    DomainLocalModule *pDomainLocalModule =
         (Module::IsEncodedModuleIndex(moduleDomainID)) ?
-            Module::IDToIndex(moduleDomainID) :
-            ((DomainLocalModule *)moduleDomainID)->GetModuleIndex();
+        GetAppDomain()->GetDomainLocalBlock()->GetModuleSlot(Module::IDToIndex(moduleDomainID)) :
+        (DomainLocalModule *)moduleDomainID;
+
+    // Get the ModuleIndex
+    ModuleIndex index = pDomainLocalModule->GetModuleIndex();
 
     // Get the relevant ThreadLocalModule
     ThreadLocalModule * pThreadLocalModule = ThreadStatics::GetTLMIfExists(index);
@@ -2001,18 +2007,19 @@ HCIMPL2(void*, JIT_GetSharedGCThreadStaticBaseDynamicClass, SIZE_T moduleDomainI
     { 
         ThreadLocalModule::PTR_DynamicClassInfo pLocalInfo = pThreadLocalModule->GetDynamicClassInfoIfInitialized(dwDynamicClassDomainID);
         if (pLocalInfo != NULL)
-            return (void*)pLocalInfo->m_pDynamicEntry->GetGCStaticsBasePointer();
+        {
+            PTR_BYTE retval;
+            GET_DYNAMICENTRY_GCTHREADSTATICS_BASEPOINTER(pDomainLocalModule->GetDomainFile()->GetModule()->GetLoaderAllocator(),
+                                                         pLocalInfo,
+                                                         &retval);
+
+            return retval;
+        }
     }
 
     // If the TLM was not allocated or if the class was not marked as initialized
     // then we have to go through the slow path
 
-    // Obtain the DomainLocalModule
-    DomainLocalModule *pDomainLocalModule =
-        (Module::IsEncodedModuleIndex(moduleDomainID)) ?
-            GetAppDomain()->GetDomainLocalBlock()->GetModuleSlot(Module::IDToIndex(moduleDomainID)) :
-            (DomainLocalModule *) moduleDomainID;
-   
     // Obtain the Module
     Module * pModule = pDomainLocalModule->GetDomainFile()->GetModule();
 
@@ -2060,7 +2067,14 @@ HCIMPL1(void*, JIT_GetGenericsNonGCThreadStaticBase, MethodTable *pMT)
     { 
         ThreadLocalModule::PTR_DynamicClassInfo pLocalInfo = pThreadLocalModule->GetDynamicClassInfoIfInitialized(dwDynamicClassDomainID);
         if (pLocalInfo != NULL)
-            return (void*)pLocalInfo->m_pDynamicEntry->GetNonGCStaticsBasePointer();
+        {
+            PTR_BYTE retval;
+            GET_DYNAMICENTRY_NONGCSTATICS_BASEPOINTER(pMT->GetLoaderAllocator(), 
+                                                      pLocalInfo, 
+                                                      &retval);
+
+            return retval;        
+        }
     }
     
     // If the TLM was not allocated or if the class was not marked as initialized
@@ -2105,7 +2119,14 @@ HCIMPL1(void*, JIT_GetGenericsGCThreadStaticBase, MethodTable *pMT)
     { 
         ThreadLocalModule::PTR_DynamicClassInfo pLocalInfo = pThreadLocalModule->GetDynamicClassInfoIfInitialized(dwDynamicClassDomainID);
         if (pLocalInfo != NULL)
-            return (void*)pLocalInfo->m_pDynamicEntry->GetGCStaticsBasePointer();
+        {
+            PTR_BYTE retval;
+            GET_DYNAMICENTRY_GCTHREADSTATICS_BASEPOINTER(pMT->GetLoaderAllocator(), 
+                                                         pLocalInfo, 
+                                                         &retval);
+
+            return retval;        
+        }
     }
     
     // If the TLM was not allocated or if the class was not marked as initialized
index cb5f817..46358f7 100644 (file)
@@ -701,6 +701,8 @@ BOOL QCALLTYPE LoaderAllocator::Destroy(QCall::LoaderAllocatorHandle pLoaderAllo
     return ret;
 } // LoaderAllocator::Destroy
 
+#define MAX_LOADERALLOCATOR_HANDLE 0x40000000
+
 // Returns NULL if the managed LoaderAllocator object was already collected.
 LOADERHANDLE LoaderAllocator::AllocateHandle(OBJECTREF value)
 {
@@ -714,32 +716,6 @@ LOADERHANDLE LoaderAllocator::AllocateHandle(OBJECTREF value)
 
     LOADERHANDLE retVal;
 
-    GCPROTECT_BEGIN(value);
-    CrstHolder ch(&m_crstLoaderAllocator);
-
-    retVal = AllocateHandle_Unlocked(value);
-    GCPROTECT_END();
-
-    return retVal;
-}
-
-#define MAX_LOADERALLOCATOR_HANDLE 0x40000000
-
-// Returns NULL if the managed LoaderAllocator object was already collected.
-LOADERHANDLE LoaderAllocator::AllocateHandle_Unlocked(OBJECTREF valueUNSAFE)
-{
-    CONTRACTL
-    {
-        THROWS;
-        GC_TRIGGERS;
-        MODE_COOPERATIVE;
-    }
-    CONTRACTL_END;
-    
-    _ASSERTE(m_crstLoaderAllocator.OwnedByCurrentThread());
-    
-    UINT_PTR retVal;
-
     struct _gc
     {
         OBJECTREF value;
@@ -752,57 +728,106 @@ LOADERHANDLE LoaderAllocator::AllocateHandle_Unlocked(OBJECTREF valueUNSAFE)
 
     GCPROTECT_BEGIN(gc);
 
-    gc.value = valueUNSAFE;
+    gc.value = value;
 
+    // The handle table is read locklessly, be careful
+    if (IsCollectible())
     {
-        // The handle table is read locklessly, be careful
-        if (IsCollectible())
+        gc.loaderAllocator = (LOADERALLOCATORREF)ObjectFromHandle(m_hLoaderAllocatorObjectHandle);
+        if (gc.loaderAllocator == NULL)
+        {   // The managed LoaderAllocator is already collected, we cannot allocate any exposed managed objects for it
+            retVal = NULL;
+        }
+        else
         {
-            gc.loaderAllocator = (LOADERALLOCATORREF)ObjectFromHandle(m_hLoaderAllocatorObjectHandle);
-            if (gc.loaderAllocator == NULL)
-            {   // The managed LoaderAllocator is already collected, we cannot allocate any exposed managed objects for it
-                retVal = NULL;
-            }
-            else
-            {
-                DWORD slotsUsed = gc.loaderAllocator->GetSlotsUsed();
+            DWORD slotsUsed;
+            DWORD numComponents;
 
-                if (slotsUsed > MAX_LOADERALLOCATOR_HANDLE)
+            do
+            {
                 {
-                    COMPlusThrowOM();
+                    CrstHolder ch(&m_crstLoaderAllocator);
+
+                    gc.handleTable = gc.loaderAllocator->GetHandleTable();
+
+                    if (!m_freeHandleIndexesStack.IsEmpty())
+                    {
+                        // Reuse a handle slot that was previously freed
+                        DWORD freeHandleIndex = m_freeHandleIndexesStack.Pop();
+                        gc.handleTable->SetAt(freeHandleIndex, gc.value);
+                        retVal = (UINT_PTR)((freeHandleIndex + 1) << 1);
+                        break;
+                    }
+
+                    slotsUsed = gc.loaderAllocator->GetSlotsUsed();
+
+                    if (slotsUsed > MAX_LOADERALLOCATOR_HANDLE)
+                    {
+                        COMPlusThrowOM();
+                    }
+
+                    numComponents = gc.handleTable->GetNumComponents();
+
+                    if (slotsUsed < numComponents)
+                    {
+                        // The handle table is large enough, allocate next slot from it
+                        gc.handleTable->SetAt(slotsUsed, gc.value);
+                        gc.loaderAllocator->SetSlotsUsed(slotsUsed + 1);
+                        retVal = (UINT_PTR)((slotsUsed + 1) << 1);
+                        break;
+                    }
                 }
-                gc.handleTable = gc.loaderAllocator->GetHandleTable();
 
-                /* If we need to enlarge the table, do it now. */
-                if (slotsUsed >= gc.handleTable->GetNumComponents())
+                // We need to enlarge the handle table
+                gc.handleTableOld = gc.handleTable;
+
+                DWORD newSize = numComponents * 2;
+                gc.handleTable = (PTRARRAYREF)AllocateObjectArray(newSize, g_pObjectClass);
+
                 {
-                    gc.handleTableOld = gc.handleTable;
+                    CrstHolder ch(&m_crstLoaderAllocator);
 
-                    DWORD newSize = gc.handleTable->GetNumComponents() * 2;
-                    gc.handleTable = (PTRARRAYREF)AllocateObjectArray(newSize, g_pObjectClass);
+                    if (gc.loaderAllocator->GetHandleTable() == gc.handleTableOld)
+                    {
+                        /* Copy out of old array */
+                        memmoveGCRefs(gc.handleTable->GetDataPtr(), gc.handleTableOld->GetDataPtr(), slotsUsed * sizeof(Object *));
+                        gc.loaderAllocator->SetHandleTable(gc.handleTable);
+                    }
+                    else
+                    {
+                        // Another thread has beaten us on enlarging the handle array, use the handle table it has allocated
+                        gc.handleTable = gc.loaderAllocator->GetHandleTable();
+                    }
+
+                    numComponents = gc.handleTable->GetNumComponents();
 
-                    /* Copy out of old array */
-                    memmoveGCRefs(gc.handleTable->GetDataPtr(), gc.handleTableOld->GetDataPtr(), slotsUsed * sizeof(Object *));
-                    gc.loaderAllocator->SetHandleTable(gc.handleTable);
+                    if (slotsUsed < numComponents)
+                    {
+                        // The handle table is large enough, allocate next slot from it
+                        gc.handleTable->SetAt(slotsUsed, gc.value);
+                        gc.loaderAllocator->SetSlotsUsed(slotsUsed + 1);
+                        retVal = (UINT_PTR)((slotsUsed + 1) << 1);
+                        break;
+                    }
                 }
 
-                gc.handleTable->SetAt(slotsUsed, gc.value);
-                gc.loaderAllocator->SetSlotsUsed(slotsUsed + 1);
-                retVal = (UINT_PTR)((slotsUsed + 1) << 1);
-            }
-        }
-        else
-        {
-            OBJECTREF* pRef = GetDomain()->AllocateObjRefPtrsInLargeTable(1);
-            SetObjectReference(pRef, gc.value, IsDomainNeutral() ? NULL : GetDomain()->AsAppDomain());
-            retVal = (((UINT_PTR)pRef) + 1);
+                // Loop in the unlikely case that another thread has beaten us on the handle array enlarging, but
+                // all the slots were used up before the current thread was scheduled.
+            } 
+            while (true); 
         }
     }
+    else
+    {
+        OBJECTREF* pRef = GetDomain()->AllocateObjRefPtrsInLargeTable(1);
+        SetObjectReference(pRef, gc.value, IsDomainNeutral() ? NULL : GetDomain()->AsAppDomain());
+        retVal = (((UINT_PTR)pRef) + 1);
+    }
 
     GCPROTECT_END();
 
-    return (LOADERHANDLE)retVal;
-} // LoaderAllocator::AllocateHandle_Unlocked
+    return retVal;
+}
 
 OBJECTREF LoaderAllocator::GetHandleValue(LOADERHANDLE handle)
 {
@@ -820,18 +845,32 @@ OBJECTREF LoaderAllocator::GetHandleValue(LOADERHANDLE handle)
     return objRet;
 }
 
-void LoaderAllocator::ClearHandle(LOADERHANDLE handle)
+void LoaderAllocator::FreeHandle(LOADERHANDLE handle)
 {
     CONTRACTL
     {
-        THROWS;
-        GC_TRIGGERS;
-        MODE_COOPERATIVE;
+        NOTHROW;
+        GC_NOTRIGGER;
+        MODE_ANY;
         PRECONDITION(handle != NULL);
     }
     CONTRACTL_END;
 
     SetHandleValue(handle, NULL);
+
+    if ((((UINT_PTR)handle) & 1) == 0)
+    {
+        // The slot value doesn't have the low bit set, so it is an index to the handle table.
+        // In this case, push the index of the handle to the stack of freed indexes for
+        // reuse.
+        CrstHolder ch(&m_crstLoaderAllocator);
+
+        UINT_PTR index = (((UINT_PTR)handle) >> 1) - 1;
+        // The Push can fail due to OOM. Ignore this failure, it is better than crashing. The
+        // only effect is that the slot will not be reused in the future if the runtime survives
+        // the low memory situation.
+        m_freeHandleIndexesStack.Push((DWORD)index);
+    }
 }
 
 OBJECTREF LoaderAllocator::CompareExchangeValueInHandle(LOADERHANDLE handle, OBJECTREF valueUNSAFE, OBJECTREF compareUNSAFE)
@@ -860,34 +899,32 @@ OBJECTREF LoaderAllocator::CompareExchangeValueInHandle(LOADERHANDLE handle, OBJ
     gc.value = valueUNSAFE;
     gc.compare = compareUNSAFE;
 
-    /* The handle table is read locklessly, be careful */
+    if ((((UINT_PTR)handle) & 1) != 0)
     {
-        CrstHolder ch(&m_crstLoaderAllocator);
-
-        if ((((UINT_PTR)handle) & 1) != 0)
+        OBJECTREF *ptr = (OBJECTREF *)(((UINT_PTR)handle) - 1);
+        gc.previous = *ptr;
+        if ((*ptr) == gc.compare)
         {
-            OBJECTREF *ptr = (OBJECTREF *)(((UINT_PTR)handle) - 1);
-            gc.previous = *ptr;
-            if ((*ptr) == gc.compare)
-            {
-                SetObjectReference(ptr, gc.value, IsDomainNeutral() ? NULL : GetDomain()->AsAppDomain());
-            }
+            SetObjectReference(ptr, gc.value, IsDomainNeutral() ? NULL : GetDomain()->AsAppDomain());
         }
-        else
-        {
-            _ASSERTE(!ObjectHandleIsNull(m_hLoaderAllocatorObjectHandle));
+    }
+    else
+    {
+        /* The handle table is read locklessly, be careful */
+        CrstHolder ch(&m_crstLoaderAllocator);
 
-            UINT_PTR index = (((UINT_PTR)handle) >> 1) - 1;
-            LOADERALLOCATORREF loaderAllocator = (LOADERALLOCATORREF)ObjectFromHandle(m_hLoaderAllocatorObjectHandle);
-            PTRARRAYREF handleTable = loaderAllocator->GetHandleTable();
+        _ASSERTE(!ObjectHandleIsNull(m_hLoaderAllocatorObjectHandle));
 
-            gc.previous = handleTable->GetAt(index);
-            if (gc.previous == gc.compare)
-            {
-                handleTable->SetAt(index, gc.value);
-            }
+        UINT_PTR index = (((UINT_PTR)handle) >> 1) - 1;
+        LOADERALLOCATORREF loaderAllocator = (LOADERALLOCATORREF)ObjectFromHandle(m_hLoaderAllocatorObjectHandle);
+        PTRARRAYREF handleTable = loaderAllocator->GetHandleTable();
+
+        gc.previous = handleTable->GetAt(index);
+        if (gc.previous == gc.compare)
+        {
+            handleTable->SetAt(index, gc.value);
         }
-    } // End critical section
+    }
 
     retVal = gc.previous;
     GCPROTECT_END();
@@ -899,35 +936,35 @@ void LoaderAllocator::SetHandleValue(LOADERHANDLE handle, OBJECTREF value)
 {
     CONTRACTL
     {
-        THROWS;
-        GC_TRIGGERS;
-        MODE_COOPERATIVE;
+        NOTHROW;
+        GC_NOTRIGGER;
+        MODE_ANY;
         PRECONDITION(handle != NULL);
     }
     CONTRACTL_END;
 
+    GCX_COOP();
+
     GCPROTECT_BEGIN(value);
 
-    // The handle table is read locklessly, be careful
+    // If the slot value does have the low bit set, then it is a simple pointer to the value
+    // Otherwise, we will need a more complicated operation to clear the value.
+    if ((((UINT_PTR)handle) & 1) != 0)
+    {
+        OBJECTREF *ptr = (OBJECTREF *)(((UINT_PTR)handle) - 1);
+        SetObjectReference(ptr, value, IsDomainNeutral() ? NULL : GetDomain()->AsAppDomain());
+    }
+    else
     {
+        // The handle table is read locklessly, be careful
         CrstHolder ch(&m_crstLoaderAllocator);
 
-        // If the slot value does have the low bit set, then it is a simple pointer to the value
-        // Otherwise, we will need a more complicated operation to clear the value.
-        if ((((UINT_PTR)handle) & 1) != 0)
-        {
-            OBJECTREF *ptr = (OBJECTREF *)(((UINT_PTR)handle) - 1);
-            SetObjectReference(ptr, value, IsDomainNeutral() ? NULL : GetDomain()->AsAppDomain());
-        }
-        else
-        {
-            _ASSERTE(!ObjectHandleIsNull(m_hLoaderAllocatorObjectHandle));
+        _ASSERTE(!ObjectHandleIsNull(m_hLoaderAllocatorObjectHandle));
 
-            UINT_PTR index = (((UINT_PTR)handle) >> 1) - 1;
-            LOADERALLOCATORREF loaderAllocator = (LOADERALLOCATORREF)ObjectFromHandle(m_hLoaderAllocatorObjectHandle);
-            PTRARRAYREF handleTable = loaderAllocator->GetHandleTable();
-            handleTable->SetAt(index, value);
-        }
+        UINT_PTR index = (((UINT_PTR)handle) >> 1) - 1;
+        LOADERALLOCATORREF loaderAllocator = (LOADERALLOCATORREF)ObjectFromHandle(m_hLoaderAllocatorObjectHandle);
+        PTRARRAYREF handleTable = loaderAllocator->GetHandleTable();
+        handleTable->SetAt(index, value);
     }
 
     GCPROTECT_END();
@@ -1001,7 +1038,7 @@ void LoaderAllocator::Init(BaseDomain *pDomain, BYTE *pExecutableHeapMemory)
 
     m_pDomain = pDomain;
 
-    m_crstLoaderAllocator.Init(CrstLoaderAllocator);
+    m_crstLoaderAllocator.Init(CrstLoaderAllocator, (CrstFlags)CRST_UNSAFE_COOPGC);
 
     //
     // Initialize the heaps
index abfd4d0..f047076 100644 (file)
@@ -93,6 +93,38 @@ public:
     COUNT_T Hash();
 };
 
+// Segmented stack to store freed handle indices
+class SegmentedHandleIndexStack
+{
+    // Segment of the stack
+    struct Segment
+    {
+        static const int Size = 64;
+
+        Segment* m_prev;
+        DWORD    m_data[Size];
+    };
+
+    // Segment containing the TOS
+    Segment * m_TOSSegment = NULL;
+    // One free segment to prevent rapid delete / new if pop / push happens rapidly
+    // at the boundary of two segments.
+    Segment * m_freeSegment = NULL;
+    // Index of the top of stack in the TOS segment
+    int       m_TOSIndex = Segment::Size;
+
+public:
+
+    // Push the value to the stack. If the push cannot be done due to OOM, return false;
+    inline bool Push(DWORD value);
+
+    // Pop the value from the stack
+    inline DWORD Pop();
+
+    // Check if the stack is empty.
+    inline bool IsEmpty();
+};
+
 class StringLiteralMap;
 class VirtualCallStubManager;
 template <typename ELEMENT>
@@ -210,8 +242,9 @@ private:
 
     SList<FailedTypeInitCleanupListItem> m_failedTypeInitCleanupList;
 
+    SegmentedHandleIndexStack m_freeHandleIndexesStack;
+
 #ifndef DACCESS_COMPILE
-    LOADERHANDLE AllocateHandle_Unlocked(OBJECTREF value);
 
 public:
     // CleanupFailedTypeInit is called from AppDomain
@@ -393,7 +426,7 @@ public:
 
     void SetHandleValue(LOADERHANDLE handle, OBJECTREF value);
     OBJECTREF CompareExchangeValueInHandle(LOADERHANDLE handle, OBJECTREF value, OBJECTREF compare);
-    void ClearHandle(LOADERHANDLE handle);
+    void FreeHandle(LOADERHANDLE handle);
 
     // The default implementation is a no-op. Only collectible loader allocators implement this method.
     virtual void RegisterHandleForCleanup(OBJECTHANDLE /* objHandle */) { }
index 46c253f..327dd3e 100644 (file)
@@ -179,5 +179,66 @@ FORCEINLINE OBJECTREF LoaderAllocator::GetHandleValueFastCannotFailType2(LOADERH
 
     return handleTable->GetAt(index);
 }
+
+inline bool SegmentedHandleIndexStack::Push(DWORD value)
+{
+    LIMITED_METHOD_CONTRACT;
+
+    if (m_TOSIndex == Segment::Size)
+    {
+        Segment* segment;
+
+        if (m_freeSegment == NULL)
+        {
+            segment = new (nothrow) Segment();
+            if (segment == NULL)
+            {
+                return false;
+            }
+        }
+        else
+        {
+            segment = m_freeSegment;
+            m_freeSegment = NULL;
+        }
+
+        segment->m_prev = m_TOSSegment;
+        m_TOSSegment = segment;
+
+        m_TOSIndex = 0;
+    }
+
+    m_TOSSegment->m_data[m_TOSIndex++] = value;
+    return true;
+}
+
+inline DWORD SegmentedHandleIndexStack::Pop()
+{
+    LIMITED_METHOD_CONTRACT;
+
+    _ASSERTE(!IsEmpty());
+
+    if (m_TOSIndex == 0)
+    {
+        Segment* prevSegment = m_TOSSegment->m_prev;
+        _ASSERTE(prevSegment != NULL);
+
+        delete m_freeSegment;
+        m_freeSegment = m_TOSSegment;
+
+        m_TOSSegment = prevSegment;
+        m_TOSIndex = Segment::Size;
+    }
+
+    return m_TOSSegment->m_data[--m_TOSIndex];
+}
+
+inline bool SegmentedHandleIndexStack::IsEmpty()
+{
+    LIMITED_METHOD_CONTRACT;
+
+    return (m_TOSSegment == NULL) || ((m_TOSIndex == 0) && (m_TOSSegment->m_prev == NULL));
+}
+
 #endif //  _LOADER_ALLOCATOR_I
 
index 53a0be7..c3441d7 100644 (file)
@@ -3489,7 +3489,7 @@ void MethodTable::DoRunClassInitThrowing()
                 if (hNewInitException != NULL && 
                     InterlockedCompareExchangeT((&pEntry->m_hInitException), hNewInitException, hOrigInitException) != hOrigInitException)
                 {
-                    pEntry->m_pLoaderAllocator->ClearHandle(hNewInitException);
+                    pEntry->m_pLoaderAllocator->FreeHandle(hNewInitException);
                 }
             }
         }
@@ -4042,7 +4042,7 @@ OBJECTREF MethodTable::GetManagedClassObject()
 
         if (FastInterlockCompareExchangePointer(&(EnsureWritablePages(GetWriteableDataForWrite())->m_hExposedClassObject), exposedClassObjectHandle, static_cast<LOADERHANDLE>(NULL)))
         {
-            pLoaderAllocator->ClearHandle(exposedClassObjectHandle);
+            pLoaderAllocator->FreeHandle(exposedClassObjectHandle);
         }
 
         GCPROTECT_END();
index 56397ca..15ce7d2 100644 (file)
@@ -3822,7 +3822,7 @@ VOID    MethodTableBuilder::InitializeFieldDescs(FieldDesc *pFieldDescList,
                 IfFailThrow(COR_E_TYPELOAD);
             }
 
-            if ((fIsThreadStatic || fIsContextStatic || bmtFP->fHasFixedAddressValueTypes) && GetAssembly()->IsCollectible())
+            if ((fIsContextStatic || bmtFP->fHasFixedAddressValueTypes) && GetAssembly()->IsCollectible())
             {
                 if (bmtFP->fHasFixedAddressValueTypes)
                 {
@@ -6529,7 +6529,7 @@ VOID MethodTableBuilder::PlaceInterfaceDeclarationOnClass(
         }
     }
 #endif
-    
+
 #ifdef _DEBUG
     if (bmtInterface->dbg_fShouldInjectInterfaceDuplicates)
     {   // We injected interface duplicates
index 9a0ebdc..8557f9c 100644 (file)
@@ -9136,6 +9136,28 @@ void Thread::DeleteThreadStaticData()
 
 //+----------------------------------------------------------------------------
 //
+//  Method:     Thread::DeleteThreadStaticData   public
+//
+//  Synopsis:   Delete the static data for the given module. This is called
+//              when the AssemblyLoadContext unloads.
+//
+//
+//+----------------------------------------------------------------------------
+
+void Thread::DeleteThreadStaticData(ModuleIndex index)
+{
+    for (SIZE_T i = 0; i < m_TLBTableSize; ++i)
+    {
+        ThreadLocalBlock * pTLB = m_pTLBTable[i];
+        if (pTLB != NULL)
+        {
+            pTLB->FreeTLM(index.m_dwIndex, FALSE /* isThreadShuttingDown */);
+        }
+    }
+}
+
+//+----------------------------------------------------------------------------
+//
 //  Method:     Thread::DeleteThreadStaticData   protected
 //
 //  Synopsis:   Delete the static data for the given appdomain. This is called
index dc46863..3f281c0 100644 (file)
@@ -4526,6 +4526,10 @@ public:
     }
     */
 
+    // Called during AssemblyLoadContext teardown to clean up all structures
+    // associated with thread statics for the specific Module
+    void DeleteThreadStaticData(ModuleIndex index);
+
 protected:
     
     // Called during AD teardown to clean up any references this 
index 501cbbc..7e9a9da 100644 (file)
@@ -17,7 +17,7 @@
 
 #ifndef DACCESS_COMPILE
 
-void ThreadLocalBlock::FreeTLM(SIZE_T i)
+void ThreadLocalBlock::FreeTLM(SIZE_T i, BOOL isThreadShuttingdown)
 {
     CONTRACTL
     {
@@ -27,10 +27,20 @@ void ThreadLocalBlock::FreeTLM(SIZE_T i)
         MODE_ANY;
     }
     CONTRACTL_END;
-    _ASSERTE(m_pTLMTable != NULL);
 
-    PTR_ThreadLocalModule pThreadLocalModule = m_pTLMTable[i].pTLM;
-    m_pTLMTable[i].pTLM = NULL;
+    PTR_ThreadLocalModule pThreadLocalModule;
+
+    {
+        SpinLock::Holder lock(&m_TLMTableLock);
+
+        _ASSERTE(m_pTLMTable != NULL);
+        if (i >= m_TLMTableSize)
+        {
+            return;
+        }
+        pThreadLocalModule = m_pTLMTable[i].pTLM;
+        m_pTLMTable[i].pTLM = NULL;
+    }
 
     if (pThreadLocalModule != NULL)
     {
@@ -40,6 +50,20 @@ void ThreadLocalBlock::FreeTLM(SIZE_T i)
             {
                 if (pThreadLocalModule->m_pDynamicClassTable[k].m_pDynamicEntry != NULL)
                 {
+                    if (isThreadShuttingdown && (pThreadLocalModule->m_pDynamicClassTable[k].m_dwFlags & ClassInitFlags::COLLECTIBLE_FLAG))
+                    {
+                        ThreadLocalModule::CollectibleDynamicEntry *entry = (ThreadLocalModule::CollectibleDynamicEntry*)pThreadLocalModule->m_pDynamicClassTable[k].m_pDynamicEntry;
+                        PTR_LoaderAllocator pLoaderAllocator = entry->m_pLoaderAllocator;
+
+                        if (entry->m_hGCStatics != NULL)
+                        {
+                            pLoaderAllocator->FreeHandle(entry->m_hGCStatics);
+                        }
+                        if (entry->m_hNonGCStatics != NULL)
+                        {
+                            pLoaderAllocator->FreeHandle(entry->m_hNonGCStatics);
+                        }
+                    }
                     delete pThreadLocalModule->m_pDynamicClassTable[k].m_pDynamicEntry;
                     pThreadLocalModule->m_pDynamicClassTable[k].m_pDynamicEntry = NULL;
                 }
@@ -70,7 +94,7 @@ void ThreadLocalBlock::FreeTable()
         {
             if (m_pTLMTable[i].pTLM != NULL)
             {
-                FreeTLM(i);
+                FreeTLM(i, TRUE /* isThreadShuttingDown */);
             }
         }
 
@@ -119,19 +143,23 @@ void ThreadLocalBlock::EnsureModuleIndex(ModuleIndex index)
     // Zero out the new TLM table
     memset(pNewModuleSlots, 0 , sizeof(TLMTableEntry) * aModuleIndices);
 
-    if (m_pTLMTable != NULL)
-    {
-        memcpy(pNewModuleSlots, m_pTLMTable, sizeof(TLMTableEntry) * m_TLMTableSize);
-    }
-    else
+    PTR_TLMTableEntry pOldModuleSlots = m_pTLMTable;
+
     {
-        _ASSERTE(m_TLMTableSize == 0);
-    }
+        SpinLock::Holder lock(&m_TLMTableLock);
 
-    PTR_TLMTableEntry pOldModuleSlots = m_pTLMTable;
-    
-    m_pTLMTable = pNewModuleSlots;
-    m_TLMTableSize = aModuleIndices;
+        if (m_pTLMTable != NULL)
+        {
+            memcpy(pNewModuleSlots, m_pTLMTable, sizeof(TLMTableEntry) * m_TLMTableSize);
+        }
+        else
+        {
+            _ASSERTE(m_TLMTableSize == 0);
+        }
+
+        m_pTLMTable = pNewModuleSlots;
+        m_TLMTableSize = aModuleIndices;
+    }
 
     if (pOldModuleSlots != NULL)
         delete pOldModuleSlots;
@@ -500,34 +528,72 @@ void    ThreadLocalModule::AllocateDynamicClass(MethodTable *pMT)
     // We need this check because maybe a class had a cctor but no statics
     if (dwStaticBytes > 0 || dwNumHandleStatics > 0)
     {
-        // Collectible types do not support static fields yet
-        if (pMT->Collectible())
-            COMPlusThrow(kNotSupportedException, W("NotSupported_CollectibleNotYet"));
-
         if (pDynamicStatics == NULL)
         {            
+            SIZE_T dynamicEntrySize;
+            if (pMT->Collectible())
+            {
+                dynamicEntrySize = sizeof(CollectibleDynamicEntry);
+            }
+            else
+            {
+                dynamicEntrySize = DynamicEntry::GetOffsetOfDataBlob() + dwStaticBytes;
+            }
+
             // If this allocation fails, we will throw
-            pDynamicStatics = (DynamicEntry*)new BYTE[sizeof(DynamicEntry) + dwStaticBytes];
+            pDynamicStatics = (DynamicEntry*)new BYTE[dynamicEntrySize];
 
 #ifdef FEATURE_64BIT_ALIGNMENT
             // The memory block has be aligned at MAX_PRIMITIVE_FIELD_SIZE to guarantee alignment of statics
-            static_assert_no_msg(sizeof(DynamicEntry) % MAX_PRIMITIVE_FIELD_SIZE == 0);
+            static_assert_no_msg(sizeof(NormalDynamicEntry) % MAX_PRIMITIVE_FIELD_SIZE == 0);
             _ASSERTE(IS_ALIGNED(pDynamicStatics, MAX_PRIMITIVE_FIELD_SIZE));
 #endif
 
             // Zero out the new DynamicEntry
-            memset((BYTE*)pDynamicStatics, 0, sizeof(DynamicEntry) + dwStaticBytes);
+            memset((BYTE*)pDynamicStatics, 0, dynamicEntrySize);
+
+            if (pMT->Collectible())
+            {
+                ((CollectibleDynamicEntry*)pDynamicStatics)->m_pLoaderAllocator = pMT->GetLoaderAllocator();
+            }
 
             // Save the DynamicEntry in the DynamicClassTable
             m_pDynamicClassTable[dwID].m_pDynamicEntry = pDynamicStatics;
         }
 
+        if (pMT->Collectible() && (dwStaticBytes != 0))
+        {
+            OBJECTREF nongcStaticsArray = NULL;
+            GCPROTECT_BEGIN(nongcStaticsArray);
+#ifdef FEATURE_64BIT_ALIGNMENT
+            // Allocate memory with extra alignment only if it is really necessary
+            if (dwStaticBytes >= MAX_PRIMITIVE_FIELD_SIZE)
+                nongcStaticsArray = AllocatePrimitiveArray(ELEMENT_TYPE_I8, (dwStaticBytes + (sizeof(CLR_I8) - 1)) / (sizeof(CLR_I8)));
+            else
+#endif
+                nongcStaticsArray = AllocatePrimitiveArray(ELEMENT_TYPE_U1, dwStaticBytes);
+
+            ((CollectibleDynamicEntry *)pDynamicStatics)->m_hNonGCStatics = pMT->GetLoaderAllocator()->AllocateHandle(nongcStaticsArray);
+            GCPROTECT_END();
+        }
+
         if (dwNumHandleStatics > 0)
         {
-            PTR_ThreadLocalBlock pThreadLocalBlock = GetThread()->m_pThreadLocalBlock;
-            _ASSERTE(pThreadLocalBlock != NULL);
-            pThreadLocalBlock->AllocateStaticFieldObjRefPtrs(dwNumHandleStatics,
-                                                             &pDynamicStatics->m_pGCStatics);
+            if (!pMT->Collectible())
+            {
+                PTR_ThreadLocalBlock pThreadLocalBlock = GetThread()->m_pThreadLocalBlock;
+                _ASSERTE(pThreadLocalBlock != NULL);
+                pThreadLocalBlock->AllocateStaticFieldObjRefPtrs(dwNumHandleStatics,
+                        &((NormalDynamicEntry *)pDynamicStatics)->m_pGCStatics);
+            }
+            else
+            {
+                OBJECTREF gcStaticsArray = NULL;
+                GCPROTECT_BEGIN(gcStaticsArray);
+                gcStaticsArray = AllocateObjectArray(dwNumHandleStatics, g_pObjectClass);
+                ((CollectibleDynamicEntry *)pDynamicStatics)->m_hGCStatics = pMT->GetLoaderAllocator()->AllocateHandle(gcStaticsArray);
+                GCPROTECT_END();
+            }
         }
     }
 }
@@ -552,6 +618,11 @@ void ThreadLocalModule::PopulateClass(MethodTable *pMT)
     if (pMT->IsDynamicStatics())
         AllocateDynamicClass(pMT);
 
+    if (pMT->Collectible())
+    {
+        SetClassFlags(pMT, ClassInitFlags::COLLECTIBLE_FLAG);
+    }
+
     // We need to allocate boxes any value-type statics that are not
     // primitives or enums, because these statics may contain references
     // to objects on the GC heap
@@ -668,6 +739,7 @@ PTR_ThreadLocalModule ThreadStatics::AllocateTLM(Module * pModule)
     }
     CONTRACTL_END;
 
+
     SIZE_T size = pModule->GetThreadLocalModuleSize();
 
     _ASSERTE(size >= ThreadLocalModule::OffsetOfDataBlob());
@@ -681,7 +753,7 @@ PTR_ThreadLocalModule ThreadStatics::AllocateTLM(Module * pModule)
     
     // Zero out the part of memory where the TLM resides
     memset(pThreadLocalModule, 0, size);
-    
+
     return pThreadLocalModule;
 }
 
index 3e61049..b6cd7db 100644 (file)
@@ -29,6 +29,7 @@
 #include "field.h"
 #include "methodtable.h"
 #include "threads.h"
+#include "spinlock.h"
 
 // Defines ObjectHandeList type
 #include "specialstatics.h"
@@ -42,8 +43,63 @@ struct ThreadLocalModule
     friend class CheckAsmOffsets; 
     friend struct ThreadLocalBlock;
 
+    // After these macros complete, they may have returned an interior pointer into a gc object. This pointer will have been cast to a byte pointer
+    // It is critically important that no GC is allowed to occur before this pointer is used.
+#define GET_DYNAMICENTRY_GCTHREADSTATICS_BASEPOINTER(pLoaderAllocator, dynamicClassInfoParam, pGCStatics) \
+    {\
+        ThreadLocalModule::PTR_DynamicClassInfo dynamicClassInfo = dac_cast<ThreadLocalModule::PTR_DynamicClassInfo>(dynamicClassInfoParam);\
+        ThreadLocalModule::PTR_DynamicEntry pDynamicEntry = dac_cast<ThreadLocalModule::PTR_DynamicEntry>((ThreadLocalModule::DynamicEntry*)dynamicClassInfo->m_pDynamicEntry); \
+        if ((dynamicClassInfo->m_dwFlags) & ClassInitFlags::COLLECTIBLE_FLAG) \
+        {\
+            PTRARRAYREF objArray;\
+            objArray = (PTRARRAYREF)pLoaderAllocator->GetHandleValueFastCannotFailType2( \
+                                        (dac_cast<ThreadLocalModule::PTR_CollectibleDynamicEntry>(pDynamicEntry))->m_hGCStatics);\
+            *(pGCStatics) = dac_cast<PTR_BYTE>(PTR_READ(PTR_TO_TADDR(OBJECTREFToObject( objArray )) + offsetof(PtrArray, m_Array), objArray->GetNumComponents() * sizeof(void*))) ;\
+        }\
+        else\
+        {\
+            *(pGCStatics) = (dac_cast<ThreadLocalModule::PTR_NormalDynamicEntry>(pDynamicEntry))->GetGCStaticsBasePointer();\
+        }\
+    }\
+
+#define GET_DYNAMICENTRY_NONGCTHREADSTATICS_BASEPOINTER(pLoaderAllocator, dynamicClassInfoParam, pNonGCStatics) \
+    {\
+        ThreadLocalModule::PTR_DynamicClassInfo dynamicClassInfo = dac_cast<ThreadLocalModule::PTR_DynamicClassInfo>(dynamicClassInfoParam);\
+        ThreadLocalModule::PTR_DynamicEntry pDynamicEntry = dac_cast<ThreadLocalModule::PTR_DynamicEntry>((ThreadLocalModule::DynamicEntry*)(dynamicClassInfo)->m_pDynamicEntry); \
+        if (((dynamicClassInfo)->m_dwFlags) & ClassInitFlags::COLLECTIBLE_FLAG) \
+        {\
+            if ((dac_cast<ThreadLocalModule::PTR_CollectibleDynamicEntry>(pDynamicEntry))->m_hNonGCStatics != 0) \
+            { \
+                U1ARRAYREF objArray;\
+                objArray = (U1ARRAYREF)pLoaderAllocator->GetHandleValueFastCannotFailType2( \
+                                            (dac_cast<ThreadLocalModule::PTR_CollectibleDynamicEntry>(pDynamicEntry))->m_hNonGCStatics);\
+                *(pNonGCStatics) = dac_cast<PTR_BYTE>(PTR_READ( \
+                        PTR_TO_TADDR(OBJECTREFToObject( objArray )) + sizeof(ArrayBase) - ThreadLocalModule::DynamicEntry::GetOffsetOfDataBlob(), \
+                            objArray->GetNumComponents() * (DWORD)objArray->GetComponentSize() + ThreadLocalModule::DynamicEntry::GetOffsetOfDataBlob())); \
+            } else (*pNonGCStatics) = NULL; \
+        }\
+        else\
+        {\
+            *(pNonGCStatics) = dac_cast<ThreadLocalModule::PTR_NormalDynamicEntry>(pDynamicEntry)->GetNonGCStaticsBasePointer();\
+        }\
+    }\
+
     struct DynamicEntry
     {
+        static DWORD GetOffsetOfDataBlob();
+    };
+    typedef DPTR(DynamicEntry) PTR_DynamicEntry;
+
+    struct CollectibleDynamicEntry : public DynamicEntry
+    {
+        LOADERHANDLE        m_hGCStatics;
+        LOADERHANDLE        m_hNonGCStatics;
+        PTR_LoaderAllocator m_pLoaderAllocator;
+    };
+    typedef DPTR(CollectibleDynamicEntry) PTR_CollectibleDynamicEntry;
+
+    struct NormalDynamicEntry : public DynamicEntry
+    {
         OBJECTHANDLE    m_pGCStatics;
 #ifdef FEATURE_64BIT_ALIGNMENT
         // Padding to make m_pDataBlob aligned at MAX_PRIMITIVE_FIELD_SIZE.
@@ -80,13 +136,8 @@ struct ThreadLocalModule
             SUPPORTS_DAC;
             return dac_cast<PTR_BYTE>(this);
         }
-        static DWORD GetOffsetOfDataBlob()
-        {
-            LIMITED_METHOD_CONTRACT;
-            return offsetof(DynamicEntry, m_pDataBlob);
-        }
     };
-    typedef DPTR(DynamicEntry) PTR_DynamicEntry;
+    typedef DPTR(NormalDynamicEntry) PTR_NormalDynamicEntry;
 
     struct DynamicClassInfo
     {
@@ -168,7 +219,7 @@ struct ThreadLocalModule
 
         if (pMT->IsDynamicStatics())
         {
-            return GetDynamicEntryGCStaticsBasePointer(pMT->GetModuleDynamicEntryID());
+            return GetDynamicEntryGCStaticsBasePointer(pMT->GetModuleDynamicEntryID(), pMT->GetLoaderAllocator());
         }
         else
         {
@@ -189,7 +240,7 @@ struct ThreadLocalModule
 
         if (pMT->IsDynamicStatics())
         {
-            return GetDynamicEntryNonGCStaticsBasePointer(pMT->GetModuleDynamicEntryID());
+            return GetDynamicEntryNonGCStaticsBasePointer(pMT->GetModuleDynamicEntryID(), pMT->GetLoaderAllocator());
         }
         else
         {
@@ -207,9 +258,19 @@ struct ThreadLocalModule
         return pEntry;
     }
 
+    inline DynamicClassInfo* GetDynamicClassInfo(DWORD n)
+    {
+        LIMITED_METHOD_CONTRACT
+        SUPPORTS_DAC;
+        _ASSERTE(m_pDynamicClassTable && m_aDynamicEntries > n);
+        dac_cast<PTR_DynamicEntry>(m_pDynamicClassTable[n].m_pDynamicEntry);
+
+        return &m_pDynamicClassTable[n];
+    }
+
     // These helpers can now return null, as the debugger may do queries on a type
     // before the calls to PopulateClass happen
-    inline PTR_BYTE GetDynamicEntryGCStaticsBasePointer(DWORD n)
+    inline PTR_BYTE GetDynamicEntryGCStaticsBasePointer(DWORD n, PTR_LoaderAllocator pLoaderAllocator)
     {
         CONTRACTL
         {
@@ -225,16 +286,20 @@ struct ThreadLocalModule
             return NULL;
         }
         
-        DynamicEntry* pEntry = GetDynamicEntry(n);
-        if (!pEntry)
+        DynamicClassInfo* pClassInfo = GetDynamicClassInfo(n);
+        if (!pClassInfo->m_pDynamicEntry)
         {
             return NULL;
         }
 
-        return pEntry->GetGCStaticsBasePointer();
+        PTR_BYTE retval = NULL;
+
+        GET_DYNAMICENTRY_GCTHREADSTATICS_BASEPOINTER(pLoaderAllocator, pClassInfo, &retval);
+
+        return retval;
     }
 
-    inline PTR_BYTE GetDynamicEntryNonGCStaticsBasePointer(DWORD n)
+    inline PTR_BYTE GetDynamicEntryNonGCStaticsBasePointer(DWORD n, PTR_LoaderAllocator pLoaderAllocator)
     {
         CONTRACTL
         {
@@ -250,13 +315,17 @@ struct ThreadLocalModule
             return NULL;
         }
 
-        DynamicEntry* pEntry = GetDynamicEntry(n);
-        if (!pEntry)
+        DynamicClassInfo* pClassInfo = GetDynamicClassInfo(n);
+        if (!pClassInfo->m_pDynamicEntry)
         {
             return NULL;
         }
 
-        return pEntry->GetNonGCStaticsBasePointer();
+        PTR_BYTE retval = NULL;
+
+        GET_DYNAMICENTRY_NONGCTHREADSTATICS_BASEPOINTER(pLoaderAllocator, pClassInfo, &retval);
+
+        return retval;
     }
 
     FORCEINLINE PTR_DynamicClassInfo GetDynamicClassInfoIfInitialized(DWORD n)
@@ -320,16 +389,6 @@ struct ThreadLocalModule
         SetClassFlags(pMT, ClassInitFlags::INITIALIZED_FLAG);
     }
 
-    void SetClassAllocatedAndInitialized(MethodTable* pMT)
-    {
-        WRAPPER_NO_CONTRACT;
-    
-        _ASSERTE(!IsClassInitialized(pMT));
-        _ASSERTE(!IsClassInitError(pMT));
-    
-        SetClassFlags(pMT, ClassInitFlags::ALLOCATECLASS_FLAG | ClassInitFlags::INITIALIZED_FLAG);
-    }
-
     void SetClassAllocated(MethodTable* pMT)
     {
         WRAPPER_NO_CONTRACT;
@@ -465,6 +524,7 @@ struct ThreadLocalBlock
 private:
     PTR_TLMTableEntry   m_pTLMTable;     // Table of ThreadLocalModules
     SIZE_T              m_TLMTableSize;  // Current size of table
+    SpinLock            m_TLMTableLock;  // Spinlock used to synchronize growing the table and freeing TLM by other threads
 
     // Each ThreadLocalBlock has its own ThreadStaticHandleTable. The ThreadStaticHandleTable works
     // by allocating Object arrays on the GC heap and keeping them alive with pinning handles.
@@ -498,9 +558,12 @@ public:
 
 #ifndef DACCESS_COMPILE
     ThreadLocalBlock()
-      : m_pTLMTable(NULL), m_TLMTableSize(0), m_pThreadStaticHandleTable(NULL) {}
+      : m_pTLMTable(NULL), m_TLMTableSize(0), m_pThreadStaticHandleTable(NULL) 
+    {
+        m_TLMTableLock.Init(LOCK_TYPE_DEFAULT);
+    }
 
-    void    FreeTLM(SIZE_T i);
+    void    FreeTLM(SIZE_T i, BOOL isThreadShuttingDown);
 
     void    FreeTable();
 
@@ -676,5 +739,12 @@ class ThreadStatics
 
 };
 
+/* static */
+inline DWORD ThreadLocalModule::DynamicEntry::GetOffsetOfDataBlob()
+{
+    LIMITED_METHOD_CONTRACT;
+    _ASSERTE(DWORD(offsetof(NormalDynamicEntry, m_pDataBlob)) == offsetof(NormalDynamicEntry, m_pDataBlob));
+    return (DWORD)offsetof(NormalDynamicEntry, m_pDataBlob);
+}
 
 #endif
index b244f45..1b3d5f5 100644 (file)
@@ -835,7 +835,7 @@ OBJECTREF ParamTypeDesc::GetManagedClassObject()
         EnsureWritablePages(this);
         if (FastInterlockCompareExchangePointer(&m_hExposedClassObject, hExposedClassObject, static_cast<LOADERHANDLE>(NULL)))
         {
-            pLoaderAllocator->ClearHandle(hExposedClassObject);
+            pLoaderAllocator->FreeHandle(hExposedClassObject);
         }
 
         if (OwnsTemplateMethodTable())
@@ -2271,7 +2271,7 @@ OBJECTREF TypeVarTypeDesc::GetManagedClassObject()
 
         if (FastInterlockCompareExchangePointer(EnsureWritablePages(&m_hExposedClassObject), hExposedClassObject, static_cast<LOADERHANDLE>(NULL)))
         {
-            pLoaderAllocator->ClearHandle(hExposedClassObject);
+            pLoaderAllocator->FreeHandle(hExposedClassObject);
         }
 
         GCPROTECT_END();