1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
7 #include "stringliteralmap.h"
8 #include "virtualcallstub.h"
9 #include "threadsuspend.h"
11 #ifndef DACCESS_COMPILE
12 #include "comdelegate.h"
14 #include "comcallablewrapper.h"
16 //*****************************************************************************
17 // Used by LoaderAllocator::Init for easier readability.
18 #ifdef ENABLE_PERF_COUNTERS
19 #define LOADERHEAP_PROFILE_COUNTER (&(GetPerfCounters().m_Loading.cbLoaderHeapSize))
21 #define LOADERHEAP_PROFILE_COUNTER (NULL)
24 #ifndef CROSSGEN_COMPILE
25 #define STUBMANAGER_RANGELIST(stubManager) (stubManager::g_pManager->GetRangeList())
27 #define STUBMANAGER_RANGELIST(stubManager) (NULL)
30 UINT64 LoaderAllocator::cLoaderAllocatorsCreated = 1;
32 LoaderAllocator::LoaderAllocator()
34 LIMITED_METHOD_CONTRACT;
36 // initialize all members up front to NULL so that short-circuit failure won't cause invalid values
37 m_InitialReservedMemForLoaderHeaps = NULL;
38 m_pLowFrequencyHeap = NULL;
39 m_pHighFrequencyHeap = NULL;
41 m_pPrecodeHeap = NULL;
42 m_pExecutableHeap = NULL;
43 #ifdef FEATURE_READYTORUN
44 m_pDynamicHelpersHeap = NULL;
46 m_pFuncPtrStubs = NULL;
47 m_hLoaderAllocatorObjectHandle = NULL;
48 m_pStringLiteralMap = NULL;
50 m_cReferences = (UINT32)-1;
52 m_pFirstDomainAssemblyFromSameALCToDelete = NULL;
54 #ifdef FAT_DISPATCH_TOKENS
55 // DispatchTokenFat pointer table for token overflow scenarios. Lazily allocated.
56 m_pFatTokenSetLock = NULL;
57 m_pFatTokenSet = NULL;
60 #ifndef CROSSGEN_COMPILE
61 m_pVirtualCallStubManager = NULL;
64 m_fGCPressure = false;
65 m_fTerminated = false;
68 m_pLoaderAllocatorDestroyNext = NULL;
70 m_pCodeHeapInitialAlloc = NULL;
71 m_pVSDHeapInitialAlloc = NULL;
72 m_pLastUsedCodeHeap = NULL;
73 m_pLastUsedDynamicCodeHeap = NULL;
74 m_pJumpStubCache = NULL;
75 m_IsCollectible = false;
77 m_pMarshalingData = NULL;
79 #ifdef FEATURE_COMINTEROP
80 m_pComCallWrapperCache = NULL;
83 m_pUMEntryThunkCache = NULL;
85 m_nLoaderAllocator = InterlockedIncrement64((LONGLONG *)&LoaderAllocator::cLoaderAllocatorsCreated);
88 LoaderAllocator::~LoaderAllocator()
95 #if !defined(DACCESS_COMPILE) && !defined(CROSSGEN_COMPILE)
98 // Assert that VSD is not still active when the destructor is called.
99 _ASSERTE(m_pVirtualCallStubManager == NULL);
101 // Code manager is responsible for cleaning up.
102 _ASSERTE(m_pJumpStubCache == NULL);
106 #ifndef DACCESS_COMPILE
107 //---------------------------------------------------------------------------------------
109 void LoaderAllocator::AddReference()
119 _ASSERTE((m_cReferences > (UINT32)0) && (m_cReferences != (UINT32)-1));
120 FastInterlockIncrement((LONG *)&m_cReferences);
122 #endif //!DACCESS_COMPILE
124 //---------------------------------------------------------------------------------------
126 // Adds reference if the native object is alive - code:LoaderAllocator#AssemblyPhases.
127 // Returns TRUE if the reference was added.
129 BOOL LoaderAllocator::AddReferenceIfAlive()
139 #ifndef DACCESS_COMPILE
142 // Local snaphost of ref-count
143 UINT32 cReferencesLocalSnapshot = m_cReferences;
144 _ASSERTE(cReferencesLocalSnapshot != (UINT32)-1);
146 if (cReferencesLocalSnapshot == 0)
147 { // Ref-count was 0, do not AddRef
151 UINT32 cOriginalReferences = FastInterlockCompareExchange(
152 (LONG *)&m_cReferences,
153 cReferencesLocalSnapshot + 1,
154 cReferencesLocalSnapshot);
156 if (cOriginalReferences == cReferencesLocalSnapshot)
157 { // The exchange happened
160 // Let's spin till we are the only thread to modify this value
162 #else //DACCESS_COMPILE
165 #endif //DACCESS_COMPILE
166 } // LoaderAllocator::AddReferenceIfAlive
168 //---------------------------------------------------------------------------------------
170 BOOL LoaderAllocator::Release()
180 // Only actually destroy the domain assembly when all references to it are gone.
181 // This should preserve behavior in the debugger such that an UnloadModule event
182 // will occur before the underlying data structure cease functioning.
183 #ifndef DACCESS_COMPILE
185 _ASSERTE((m_cReferences > (UINT32)0) && (m_cReferences != (UINT32)-1));
186 LONG cNewReferences = FastInterlockDecrement((LONG *)&m_cReferences);
187 return (cNewReferences == 0);
188 #else //DACCESS_COMPILE
190 return (m_cReferences == (UINT32)0);
191 #endif //DACCESS_COMPILE
192 } // LoaderAllocator::Release
194 #ifndef DACCESS_COMPILE
195 #ifndef CROSSGEN_COMPILE
196 //---------------------------------------------------------------------------------------
198 BOOL LoaderAllocator::CheckAddReference_Unlocked(LoaderAllocator *pOtherLA)
207 // This must be checked before calling this function
208 _ASSERTE(pOtherLA != this);
210 // This function requires the that loader allocator lock have been taken.
211 _ASSERTE(GetDomain()->GetLoaderAllocatorReferencesLock()->OwnedByCurrentThread());
213 if (m_LoaderAllocatorReferences.Lookup(pOtherLA) == NULL)
216 // Build a managed reference to keep the target object live
217 AllocateHandle(pOtherLA->GetExposedObject());
219 // Keep track of the references that have already been made
220 m_LoaderAllocatorReferences.Add(pOtherLA);
222 // Notify the other LoaderAllocator that a reference exists
223 pOtherLA->AddReference();
230 //---------------------------------------------------------------------------------------
232 BOOL LoaderAllocator::EnsureReference(LoaderAllocator *pOtherLA)
241 // Check if this lock can be taken in all places that the function is called
242 _ASSERTE(GetDomain()->GetLoaderAllocatorReferencesLock()->Debug_CanTake());
244 if (!IsCollectible())
247 if (this == pOtherLA)
250 if (!pOtherLA->IsCollectible())
253 CrstHolder ch(GetDomain()->GetLoaderAllocatorReferencesLock());
254 return CheckAddReference_Unlocked(pOtherLA);
257 BOOL LoaderAllocator::EnsureInstantiation(Module *pDefiningModule, Instantiation inst)
266 BOOL fNewReferenceNeeded = FALSE;
268 // Check if this lock can be taken in all places that the function is called
269 _ASSERTE(GetDomain()->GetLoaderAllocatorReferencesLock()->Debug_CanTake());
271 if (!IsCollectible())
274 CrstHolder ch(GetDomain()->GetLoaderAllocatorReferencesLock());
276 if (pDefiningModule != NULL)
278 LoaderAllocator *pDefiningLoaderAllocator = pDefiningModule->GetLoaderAllocator();
279 if (pDefiningLoaderAllocator->IsCollectible())
281 if (pDefiningLoaderAllocator != this)
283 fNewReferenceNeeded = CheckAddReference_Unlocked(pDefiningLoaderAllocator) || fNewReferenceNeeded;
288 for (DWORD i = 0; i < inst.GetNumArgs(); i++)
290 TypeHandle arg = inst[i];
291 _ASSERTE(!arg.IsEncodedFixup());
292 LoaderAllocator *pOtherLA = arg.GetLoaderModule()->GetLoaderAllocator();
294 if (pOtherLA == this)
297 if (!pOtherLA->IsCollectible())
300 fNewReferenceNeeded = CheckAddReference_Unlocked(pOtherLA) || fNewReferenceNeeded;
303 return fNewReferenceNeeded;
305 #else // CROSSGEN_COMPILE
306 BOOL LoaderAllocator::EnsureReference(LoaderAllocator *pOtherLA)
311 BOOL LoaderAllocator::EnsureInstantiation(Module *pDefiningModule, Instantiation inst)
315 #endif // !CROSSGEN_COMPILE
317 #ifndef CROSSGEN_COMPILE
318 bool LoaderAllocator::Marked()
320 LIMITED_METHOD_CONTRACT;
324 void LoaderAllocator::ClearMark()
326 LIMITED_METHOD_CONTRACT;
330 void LoaderAllocator::Mark()
338 LoaderAllocatorSet::Iterator iter = m_LoaderAllocatorReferences.Begin();
339 while (iter != m_LoaderAllocatorReferences.End())
341 LoaderAllocator *pAllocator = *iter;
348 //---------------------------------------------------------------------------------------
350 // Collect unreferenced assemblies, remove them from the assembly list and return their loader allocator
354 LoaderAllocator * LoaderAllocator::GCLoaderAllocators_RemoveAssemblies(AppDomain * pAppDomain)
363 // List of LoaderAllocators being deleted
364 LoaderAllocator * pFirstDestroyedLoaderAllocator = NULL;
367 // Debug logic for debugging the loader allocator gc.
369 /* Iterate through every loader allocator, and print its current state */
370 AppDomain::AssemblyIterator iData;
371 iData = pAppDomain->IterateAssembliesEx((AssemblyIterationFlags)(
372 kIncludeExecution | kIncludeLoaded | kIncludeCollected));
373 CollectibleAssemblyHolder<DomainAssembly *> pDomainAssembly;
375 while (iData.Next_Unlocked(pDomainAssembly.This()))
377 // The assembly could be collected (ref-count = 0), do not use holder which calls add-ref
378 Assembly * pAssembly = pDomainAssembly->GetLoadedAssembly();
380 if (pAssembly != NULL)
382 LoaderAllocator * pLoaderAllocator = pAssembly->GetLoaderAllocator();
383 if (pLoaderAllocator->IsCollectible())
385 printf("LA %p ReferencesTo %d\n", pLoaderAllocator, pLoaderAllocator->m_cReferences);
386 LoaderAllocatorSet::Iterator iter = pLoaderAllocator->m_LoaderAllocatorReferences.Begin();
387 while (iter != pLoaderAllocator->m_LoaderAllocatorReferences.End())
389 LoaderAllocator * pAllocator = *iter;
390 printf("LARefTo: %p\n", pAllocator);
399 AppDomain::AssemblyIterator i;
400 // Iterate through every loader allocator, marking as we go
402 CrstHolder chAssemblyListLock(pAppDomain->GetAssemblyListLock());
404 i = pAppDomain->IterateAssembliesEx((AssemblyIterationFlags)(
405 kIncludeExecution | kIncludeLoaded | kIncludeCollected));
406 CollectibleAssemblyHolder<DomainAssembly *> pDomainAssembly;
408 while (i.Next_Unlocked(pDomainAssembly.This()))
410 // The assembly could be collected (ref-count = 0), do not use holder which calls add-ref
411 Assembly * pAssembly = pDomainAssembly->GetLoadedAssembly();
413 if (pAssembly != NULL)
415 LoaderAllocator * pLoaderAllocator = pAssembly->GetLoaderAllocator();
416 if (pLoaderAllocator->IsCollectible())
418 if (pLoaderAllocator->IsAlive())
419 pLoaderAllocator->Mark();
425 // Iterate through every loader allocator, unmarking marked loaderallocators, and
426 // build a free list of unmarked ones
428 CrstHolder chLoaderAllocatorReferencesLock(pAppDomain->GetLoaderAllocatorReferencesLock());
429 CrstHolder chAssemblyListLock(pAppDomain->GetAssemblyListLock());
431 i = pAppDomain->IterateAssembliesEx((AssemblyIterationFlags)(
432 kIncludeExecution | kIncludeLoaded | kIncludeCollected));
433 CollectibleAssemblyHolder<DomainAssembly *> pDomainAssembly;
435 while (i.Next_Unlocked(pDomainAssembly.This()))
437 // The assembly could be collected (ref-count = 0), do not use holder which calls add-ref
438 Assembly * pAssembly = pDomainAssembly->GetLoadedAssembly();
440 if (pAssembly != NULL)
442 LoaderAllocator * pLoaderAllocator = pAssembly->GetLoaderAllocator();
443 if (pLoaderAllocator->IsCollectible())
445 if (pLoaderAllocator->Marked())
447 pLoaderAllocator->ClearMark();
449 else if (!pLoaderAllocator->IsAlive())
451 // Check that we don't have already this LoaderAllocator in the list to destroy
452 // (in case multiple assemblies are loaded in the same LoaderAllocator)
453 bool addAllocator = true;
454 LoaderAllocator * pCheckAllocatorToDestroy = pFirstDestroyedLoaderAllocator;
455 while (pCheckAllocatorToDestroy != NULL)
457 if (pCheckAllocatorToDestroy == pLoaderAllocator)
459 addAllocator = false;
463 pCheckAllocatorToDestroy = pCheckAllocatorToDestroy->m_pLoaderAllocatorDestroyNext;
466 // Otherwise, we have a LoaderAllocator that we add to the list
469 pLoaderAllocator->m_pLoaderAllocatorDestroyNext = pFirstDestroyedLoaderAllocator;
470 // We will store a reference to this assembly, and use it later in this function
471 pFirstDestroyedLoaderAllocator = pLoaderAllocator;
472 _ASSERTE(pLoaderAllocator->m_pFirstDomainAssemblyFromSameALCToDelete != NULL);
480 // Iterate through free list, removing from Assembly list
481 LoaderAllocator * pDomainLoaderAllocatorDestroyIterator = pFirstDestroyedLoaderAllocator;
483 while (pDomainLoaderAllocatorDestroyIterator != NULL)
485 _ASSERTE(!pDomainLoaderAllocatorDestroyIterator->IsAlive());
487 GetAppDomain()->RemoveTypesFromTypeIDMap(pDomainLoaderAllocatorDestroyIterator);
489 DomainAssemblyIterator domainAssemblyIt(pDomainLoaderAllocatorDestroyIterator->m_pFirstDomainAssemblyFromSameALCToDelete);
491 // Release all assemblies from the same ALC
492 while (!domainAssemblyIt.end())
494 DomainAssembly* domainAssemblyToRemove = domainAssemblyIt;
495 pAppDomain->RemoveAssembly(domainAssemblyToRemove);
497 if (!domainAssemblyToRemove->GetAssembly()->IsDynamic())
499 pAppDomain->RemoveFileFromCache(domainAssemblyToRemove->GetFile());
501 spec.InitializeSpec(domainAssemblyToRemove->GetFile());
502 VERIFY(pAppDomain->RemoveAssemblyFromCache(domainAssemblyToRemove));
503 pAppDomain->RemoveNativeImageDependency(&spec);
509 pDomainLoaderAllocatorDestroyIterator = pDomainLoaderAllocatorDestroyIterator->m_pLoaderAllocatorDestroyNext;
512 return pFirstDestroyedLoaderAllocator;
513 } // LoaderAllocator::GCLoaderAllocators_RemoveAssemblies
515 //---------------------------------------------------------------------------------------
517 // Collect unreferenced assemblies, delete all their remaining resources.
520 void LoaderAllocator::GCLoaderAllocators(LoaderAllocator* pOriginalLoaderAllocator)
530 // List of LoaderAllocators being deleted
531 LoaderAllocator * pFirstDestroyedLoaderAllocator = NULL;
533 AppDomain* pAppDomain = (AppDomain*)pOriginalLoaderAllocator->GetDomain();
535 // Collect all LoaderAllocators that don't have anymore DomainAssemblies alive
536 // Note: that it may not collect our pOriginalLoaderAllocator in case this
537 // LoaderAllocator hasn't loaded any DomainAssembly. We handle this case in the next loop.
538 // Note: The removed LoaderAllocators are not reachable outside of this function anymore, because we
539 // removed them from the assembly list
540 pFirstDestroyedLoaderAllocator = GCLoaderAllocators_RemoveAssemblies(pAppDomain);
542 bool isOriginalLoaderAllocatorFound = false;
544 // Iterate through free list, firing ETW events and notifying the debugger
545 LoaderAllocator * pDomainLoaderAllocatorDestroyIterator = pFirstDestroyedLoaderAllocator;
546 while (pDomainLoaderAllocatorDestroyIterator != NULL)
548 _ASSERTE(!pDomainLoaderAllocatorDestroyIterator->IsAlive());
550 ETW::LoaderLog::CollectibleLoaderAllocatorUnload((AssemblyLoaderAllocator *)pDomainLoaderAllocatorDestroyIterator);
552 // Set the unloaded flag before notifying the debugger
553 pDomainLoaderAllocatorDestroyIterator->SetIsUnloaded();
555 DomainAssemblyIterator domainAssemblyIt(pDomainLoaderAllocatorDestroyIterator->m_pFirstDomainAssemblyFromSameALCToDelete);
556 while (!domainAssemblyIt.end())
558 // Notify the debugger
559 domainAssemblyIt->NotifyDebuggerUnload();
563 if (pDomainLoaderAllocatorDestroyIterator == pOriginalLoaderAllocator)
565 isOriginalLoaderAllocatorFound = true;
567 pDomainLoaderAllocatorDestroyIterator = pDomainLoaderAllocatorDestroyIterator->m_pLoaderAllocatorDestroyNext;
570 // If the original LoaderAllocator was not processed, it is most likely a LoaderAllocator without any loaded DomainAssembly
571 // But we still want to collect it so we add it to the list of LoaderAllocator to destroy
572 if (!isOriginalLoaderAllocatorFound && !pOriginalLoaderAllocator->IsAlive())
574 pOriginalLoaderAllocator->m_pLoaderAllocatorDestroyNext = pFirstDestroyedLoaderAllocator;
575 pFirstDestroyedLoaderAllocator = pOriginalLoaderAllocator;
578 // Iterate through free list, deleting DomainAssemblies
579 pDomainLoaderAllocatorDestroyIterator = pFirstDestroyedLoaderAllocator;
580 while (pDomainLoaderAllocatorDestroyIterator != NULL)
582 _ASSERTE(!pDomainLoaderAllocatorDestroyIterator->IsAlive());
584 DomainAssemblyIterator domainAssemblyIt(pDomainLoaderAllocatorDestroyIterator->m_pFirstDomainAssemblyFromSameALCToDelete);
585 while (!domainAssemblyIt.end())
587 delete (DomainAssembly*)domainAssemblyIt;
590 // We really don't have to set it to NULL as the assembly is not reachable anymore, but just in case ...
591 // (Also debugging NULL AVs if someone uses it accidentally is so much easier)
592 pDomainLoaderAllocatorDestroyIterator->m_pFirstDomainAssemblyFromSameALCToDelete = NULL;
594 pDomainLoaderAllocatorDestroyIterator->ReleaseManagedAssemblyLoadContext();
596 // The following code was previously happening on delete ~DomainAssembly->Terminate
597 // We are moving this part here in order to make sure that we can unload a LoaderAllocator
598 // that didn't have a DomainAssembly
599 // (we have now a LoaderAllocator with 0-n DomainAssembly)
601 // This cleanup code starts resembling parts of AppDomain::Terminate too much.
602 // It would be useful to reduce duplication and also establish clear responsibilites
603 // for LoaderAllocator::Destroy, Assembly::Terminate, LoaderAllocator::Terminate
604 // and LoaderAllocator::~LoaderAllocator. We need to establish how these
605 // cleanup paths interact with app-domain unload and process tear-down, too.
607 if (!IsAtProcessExit())
609 // Suspend the EE to do some clean up that can only occur
610 // while no threads are running.
611 GCX_COOP(); // SuspendEE may require current thread to be in Coop mode
612 // SuspendEE cares about the reason flag only when invoked for a GC
613 // Other values are typically ignored. If using SUSPEND_FOR_APPDOMAIN_SHUTDOWN
614 // is inappropriate, we can introduce a new flag or hijack an unused one.
615 ThreadSuspend::SuspendEE(ThreadSuspend::SUSPEND_FOR_APPDOMAIN_SHUTDOWN);
618 ExecutionManager::Unload(pDomainLoaderAllocatorDestroyIterator);
619 pDomainLoaderAllocatorDestroyIterator->UninitVirtualCallStubManager();
621 // TODO: Do we really want to perform this on each LoaderAllocator?
622 MethodTable::ClearMethodDataCache();
623 ClearJitGenericHandleCache(pAppDomain);
625 if (!IsAtProcessExit())
628 ThreadSuspend::RestartEE(FALSE, TRUE);
631 // Because RegisterLoaderAllocatorForDeletion is modifying m_pLoaderAllocatorDestroyNext, we are saving it here
632 LoaderAllocator* pLoaderAllocatorDestroyNext = pDomainLoaderAllocatorDestroyIterator->m_pLoaderAllocatorDestroyNext;
634 // Register this LoaderAllocator for cleanup
635 pAppDomain->RegisterLoaderAllocatorForDeletion(pDomainLoaderAllocatorDestroyIterator);
638 pDomainLoaderAllocatorDestroyIterator = pLoaderAllocatorDestroyNext;
641 // Deleting the DomainAssemblies will have created a list of LoaderAllocator's on the AppDomain
642 // Call this shutdown function to clean those up.
643 pAppDomain->ShutdownFreeLoaderAllocators();
644 } // LoaderAllocator::GCLoaderAllocators
646 //---------------------------------------------------------------------------------------
649 BOOL QCALLTYPE LoaderAllocator::Destroy(QCall::LoaderAllocatorHandle pLoaderAllocator)
657 if (ObjectHandleIsNull(pLoaderAllocator->GetLoaderAllocatorObjectHandle()))
659 STRESS_LOG1(LF_CLASSLOADER, LL_INFO100, "Begin LoaderAllocator::Destroy for loader allocator %p\n", reinterpret_cast<void *>(static_cast<PTR_LoaderAllocator>(pLoaderAllocator)));
660 LoaderAllocatorID *pID = pLoaderAllocator->Id();
662 // This will probably change for shared code unloading
663 _ASSERTE(pID->GetType() == LAT_Assembly);
665 #ifdef FEATURE_COMINTEROP
666 if (pLoaderAllocator->m_pComCallWrapperCache)
668 pLoaderAllocator->m_pComCallWrapperCache->Release();
670 // if the above released the wrapper cache, then it will call back and reset our
671 // m_pComCallWrapperCache to null.
672 if (!pLoaderAllocator->m_pComCallWrapperCache)
674 LOG((LF_CLASSLOADER, LL_INFO10, "LoaderAllocator::Destroy ComCallWrapperCache released\n"));
679 pLoaderAllocator->m_pComCallWrapperCache = NULL;
680 LOG((LF_CLASSLOADER, LL_INFO10, "LoaderAllocator::Destroy ComCallWrapperCache not released\n"));
684 #endif // FEATURE_COMINTEROP
686 DomainAssembly* pDomainAssembly = (DomainAssembly*)(pID->GetDomainAssemblyIterator());
687 if (pDomainAssembly != NULL)
689 Assembly *pAssembly = pDomainAssembly->GetCurrentAssembly();
690 pLoaderAllocator->m_pFirstDomainAssemblyFromSameALCToDelete = pAssembly->GetDomainAssembly();
693 // Iterate through all references to other loader allocators and decrement their reference
695 LoaderAllocatorSet::Iterator iter = pLoaderAllocator->m_LoaderAllocatorReferences.Begin();
696 while (iter != pLoaderAllocator->m_LoaderAllocatorReferences.End())
698 LoaderAllocator *pAllocator = *iter;
699 pAllocator->Release();
703 // Release this loader allocator
704 BOOL fIsLastReferenceReleased = pLoaderAllocator->Release();
706 // If the reference count on this assembly got to 0, then a LoaderAllocator may
707 // be able to be collected, thus, perform a garbage collection.
708 // The reference count is setup such that in the case of non-trivial graphs, the reference count
709 // may hit zero early.
710 if (fIsLastReferenceReleased)
712 LoaderAllocator::GCLoaderAllocators(pLoaderAllocator);
714 STRESS_LOG1(LF_CLASSLOADER, LL_INFO100, "End LoaderAllocator::Destroy for loader allocator %p\n", reinterpret_cast<void *>(static_cast<PTR_LoaderAllocator>(pLoaderAllocator)));
722 } // LoaderAllocator::Destroy
724 #define MAX_LOADERALLOCATOR_HANDLE 0x40000000
726 // Returns NULL if the managed LoaderAllocator object was already collected.
727 LOADERHANDLE LoaderAllocator::AllocateHandle(OBJECTREF value)
742 LOADERALLOCATORREF loaderAllocator;
743 PTRARRAYREF handleTable;
744 PTRARRAYREF handleTableOld;
747 ZeroMemory(&gc, sizeof(gc));
753 // The handle table is read locklessly, be careful
756 gc.loaderAllocator = (LOADERALLOCATORREF)ObjectFromHandle(m_hLoaderAllocatorObjectHandle);
757 if (gc.loaderAllocator == NULL)
758 { // The managed LoaderAllocator is already collected, we cannot allocate any exposed managed objects for it
769 CrstHolder ch(&m_crstLoaderAllocator);
771 gc.handleTable = gc.loaderAllocator->GetHandleTable();
773 if (!m_freeHandleIndexesStack.IsEmpty())
775 // Reuse a handle slot that was previously freed
776 DWORD freeHandleIndex = m_freeHandleIndexesStack.Pop();
777 gc.handleTable->SetAt(freeHandleIndex, gc.value);
778 retVal = (UINT_PTR)((freeHandleIndex + 1) << 1);
782 slotsUsed = gc.loaderAllocator->GetSlotsUsed();
784 if (slotsUsed > MAX_LOADERALLOCATOR_HANDLE)
789 numComponents = gc.handleTable->GetNumComponents();
791 if (slotsUsed < numComponents)
793 // The handle table is large enough, allocate next slot from it
794 gc.handleTable->SetAt(slotsUsed, gc.value);
795 gc.loaderAllocator->SetSlotsUsed(slotsUsed + 1);
796 retVal = (UINT_PTR)((slotsUsed + 1) << 1);
801 // We need to enlarge the handle table
802 gc.handleTableOld = gc.handleTable;
804 DWORD newSize = numComponents * 2;
805 gc.handleTable = (PTRARRAYREF)AllocateObjectArray(newSize, g_pObjectClass);
808 CrstHolder ch(&m_crstLoaderAllocator);
810 if (gc.loaderAllocator->GetHandleTable() == gc.handleTableOld)
812 /* Copy out of old array */
813 memmoveGCRefs(gc.handleTable->GetDataPtr(), gc.handleTableOld->GetDataPtr(), slotsUsed * sizeof(Object *));
814 gc.loaderAllocator->SetHandleTable(gc.handleTable);
818 // Another thread has beaten us on enlarging the handle array, use the handle table it has allocated
819 gc.handleTable = gc.loaderAllocator->GetHandleTable();
822 slotsUsed = gc.loaderAllocator->GetSlotsUsed();
823 numComponents = gc.handleTable->GetNumComponents();
825 if (slotsUsed < numComponents)
827 // The handle table is large enough, allocate next slot from it
828 gc.handleTable->SetAt(slotsUsed, gc.value);
829 gc.loaderAllocator->SetSlotsUsed(slotsUsed + 1);
830 retVal = (UINT_PTR)((slotsUsed + 1) << 1);
835 // Loop in the unlikely case that another thread has beaten us on the handle array enlarging, but
836 // all the slots were used up before the current thread was scheduled.
843 OBJECTREF* pRef = GetDomain()->AllocateObjRefPtrsInLargeTable(1);
844 SetObjectReference(pRef, gc.value);
845 retVal = (((UINT_PTR)pRef) + 1);
853 OBJECTREF LoaderAllocator::GetHandleValue(LOADERHANDLE handle)
863 OBJECTREF objRet = NULL;
864 GET_LOADERHANDLE_VALUE_FAST(this, handle, &objRet);
868 void LoaderAllocator::FreeHandle(LOADERHANDLE handle)
875 PRECONDITION(handle != NULL);
879 SetHandleValue(handle, NULL);
881 if ((((UINT_PTR)handle) & 1) == 0)
883 // The slot value doesn't have the low bit set, so it is an index to the handle table.
884 // In this case, push the index of the handle to the stack of freed indexes for
886 CrstHolder ch(&m_crstLoaderAllocator);
888 UINT_PTR index = (((UINT_PTR)handle) >> 1) - 1;
889 // The Push can fail due to OOM. Ignore this failure, it is better than crashing. The
890 // only effect is that the slot will not be reused in the future if the runtime survives
891 // the low memory situation.
892 m_freeHandleIndexesStack.Push((DWORD)index);
896 OBJECTREF LoaderAllocator::CompareExchangeValueInHandle(LOADERHANDLE handle, OBJECTREF valueUNSAFE, OBJECTREF compareUNSAFE)
903 PRECONDITION(handle != NULL);
916 ZeroMemory(&gc, sizeof(gc));
919 gc.value = valueUNSAFE;
920 gc.compare = compareUNSAFE;
922 if ((((UINT_PTR)handle) & 1) != 0)
924 OBJECTREF *ptr = (OBJECTREF *)(((UINT_PTR)handle) - 1);
926 if ((*ptr) == gc.compare)
928 SetObjectReference(ptr, gc.value);
933 /* The handle table is read locklessly, be careful */
934 CrstHolder ch(&m_crstLoaderAllocator);
936 _ASSERTE(!ObjectHandleIsNull(m_hLoaderAllocatorObjectHandle));
938 UINT_PTR index = (((UINT_PTR)handle) >> 1) - 1;
939 LOADERALLOCATORREF loaderAllocator = (LOADERALLOCATORREF)ObjectFromHandle(m_hLoaderAllocatorObjectHandle);
940 PTRARRAYREF handleTable = loaderAllocator->GetHandleTable();
942 gc.previous = handleTable->GetAt(index);
943 if (gc.previous == gc.compare)
945 handleTable->SetAt(index, gc.value);
949 retVal = gc.previous;
955 void LoaderAllocator::SetHandleValue(LOADERHANDLE handle, OBJECTREF value)
962 PRECONDITION(handle != NULL);
968 GCPROTECT_BEGIN(value);
970 // If the slot value does have the low bit set, then it is a simple pointer to the value
971 // Otherwise, we will need a more complicated operation to clear the value.
972 if ((((UINT_PTR)handle) & 1) != 0)
974 OBJECTREF *ptr = (OBJECTREF *)(((UINT_PTR)handle) - 1);
975 SetObjectReference(ptr, value);
979 // The handle table is read locklessly, be careful
980 CrstHolder ch(&m_crstLoaderAllocator);
982 _ASSERTE(!ObjectHandleIsNull(m_hLoaderAllocatorObjectHandle));
984 UINT_PTR index = (((UINT_PTR)handle) >> 1) - 1;
985 LOADERALLOCATORREF loaderAllocator = (LOADERALLOCATORREF)ObjectFromHandle(m_hLoaderAllocatorObjectHandle);
986 PTRARRAYREF handleTable = loaderAllocator->GetHandleTable();
987 handleTable->SetAt(index, value);
995 void LoaderAllocator::SetupManagedTracking(LOADERALLOCATORREF * pKeepLoaderAllocatorAlive)
997 STANDARD_VM_CONTRACT;
999 GCInterface::AddMemoryPressure(30000);
1000 m_fGCPressure = true;
1005 // Initialize managed loader allocator reference holder
1008 MethodTable *pMT = MscorlibBinder::GetClass(CLASS__LOADERALLOCATOR);
1010 *pKeepLoaderAllocatorAlive = (LOADERALLOCATORREF)AllocateObject(pMT);
1012 MethodDescCallSite initLoaderAllocator(METHOD__LOADERALLOCATOR__CTOR, (OBJECTREF *)pKeepLoaderAllocatorAlive);
1015 ObjToArgSlot(*pKeepLoaderAllocatorAlive)
1018 initLoaderAllocator.Call(args);
1020 m_hLoaderAllocatorObjectHandle = GetDomain()->CreateLongWeakHandle(*pKeepLoaderAllocatorAlive);
1022 RegisterHandleForCleanup(m_hLoaderAllocatorObjectHandle);
1025 void LoaderAllocator::ActivateManagedTracking()
1038 // There is now one external reference to this LoaderAllocator (the managed scout)
1039 _ASSERTE(m_cReferences == (UINT32)-1);
1040 m_cReferences = (UINT32)1;
1042 LOADERALLOCATORREF loaderAllocator = (LOADERALLOCATORREF)ObjectFromHandle(m_hLoaderAllocatorObjectHandle);
1043 loaderAllocator->SetNativeLoaderAllocator(this);
1045 #endif // !CROSSGEN_COMPILE
1048 // We don't actually allocate a low frequency heap for collectible types.
1049 // This is carefully tuned to sum up to 16 pages to reduce waste.
1050 #define COLLECTIBLE_LOW_FREQUENCY_HEAP_SIZE (0 * GetOsPageSize())
1051 #define COLLECTIBLE_HIGH_FREQUENCY_HEAP_SIZE (3 * GetOsPageSize())
1052 #define COLLECTIBLE_STUB_HEAP_SIZE GetOsPageSize()
1053 #define COLLECTIBLE_CODEHEAP_SIZE (7 * GetOsPageSize())
1054 #define COLLECTIBLE_VIRTUALSTUBDISPATCH_HEAP_SPACE (5 * GetOsPageSize())
1056 void LoaderAllocator::Init(BaseDomain *pDomain, BYTE *pExecutableHeapMemory)
1058 STANDARD_VM_CONTRACT;
1060 m_pDomain = pDomain;
1062 m_crstLoaderAllocator.Init(CrstLoaderAllocator, (CrstFlags)CRST_UNSAFE_COOPGC);
1063 m_InteropDataCrst.Init(CrstInteropData, CRST_REENTRANCY);
1064 #ifdef FEATURE_COMINTEROP
1065 m_ComCallWrapperCrst.Init(CrstCOMCallWrapper);
1068 #ifndef CROSSGEN_COMPILE
1069 m_methodDescBackpatchInfoTracker.Initialize(this);
1073 // Initialize the heaps
1076 DWORD dwLowFrequencyHeapReserveSize;
1077 DWORD dwHighFrequencyHeapReserveSize;
1078 DWORD dwStubHeapReserveSize;
1079 DWORD dwExecutableHeapReserveSize;
1080 DWORD dwCodeHeapReserveSize;
1081 DWORD dwVSDHeapReserveSize;
1083 dwExecutableHeapReserveSize = 0;
1085 if (IsCollectible())
1087 dwLowFrequencyHeapReserveSize = COLLECTIBLE_LOW_FREQUENCY_HEAP_SIZE;
1088 dwHighFrequencyHeapReserveSize = COLLECTIBLE_HIGH_FREQUENCY_HEAP_SIZE;
1089 dwStubHeapReserveSize = COLLECTIBLE_STUB_HEAP_SIZE;
1090 dwCodeHeapReserveSize = COLLECTIBLE_CODEHEAP_SIZE;
1091 dwVSDHeapReserveSize = COLLECTIBLE_VIRTUALSTUBDISPATCH_HEAP_SPACE;
1095 dwLowFrequencyHeapReserveSize = LOW_FREQUENCY_HEAP_RESERVE_SIZE;
1096 dwHighFrequencyHeapReserveSize = HIGH_FREQUENCY_HEAP_RESERVE_SIZE;
1097 dwStubHeapReserveSize = STUB_HEAP_RESERVE_SIZE;
1099 // Non-collectible assemblies do not reserve space for these heaps.
1100 dwCodeHeapReserveSize = 0;
1101 dwVSDHeapReserveSize = 0;
1104 // The global heap needs a bit of space for executable memory that is not associated with a rangelist.
1105 // Take a page from the high-frequency heap for this.
1106 if (pExecutableHeapMemory != NULL)
1108 dwExecutableHeapReserveSize = GetOsPageSize();
1110 _ASSERTE(dwExecutableHeapReserveSize < dwHighFrequencyHeapReserveSize);
1111 dwHighFrequencyHeapReserveSize -= dwExecutableHeapReserveSize;
1114 DWORD dwTotalReserveMemSize = dwLowFrequencyHeapReserveSize
1115 + dwHighFrequencyHeapReserveSize
1116 + dwStubHeapReserveSize
1117 + dwCodeHeapReserveSize
1118 + dwVSDHeapReserveSize
1119 + dwExecutableHeapReserveSize;
1121 dwTotalReserveMemSize = (DWORD) ALIGN_UP(dwTotalReserveMemSize, VIRTUAL_ALLOC_RESERVE_GRANULARITY);
1123 #if !defined(_WIN64)
1124 // Make sure that we reserve as little as possible on 32-bit to save address space
1125 _ASSERTE(dwTotalReserveMemSize <= VIRTUAL_ALLOC_RESERVE_GRANULARITY);
1128 BYTE * initReservedMem = ClrVirtualAllocExecutable(dwTotalReserveMemSize, MEM_RESERVE, PAGE_NOACCESS);
1130 m_InitialReservedMemForLoaderHeaps = initReservedMem;
1132 if (initReservedMem == NULL)
1135 if (IsCollectible())
1137 m_pCodeHeapInitialAlloc = initReservedMem;
1138 initReservedMem += dwCodeHeapReserveSize;
1139 m_pVSDHeapInitialAlloc = initReservedMem;
1140 initReservedMem += dwVSDHeapReserveSize;
1144 _ASSERTE((dwCodeHeapReserveSize == 0) && (m_pCodeHeapInitialAlloc == NULL));
1145 _ASSERTE((dwVSDHeapReserveSize == 0) && (m_pVSDHeapInitialAlloc == NULL));
1148 if (dwLowFrequencyHeapReserveSize != 0)
1150 _ASSERTE(!IsCollectible());
1152 m_pLowFrequencyHeap = new (&m_LowFreqHeapInstance) LoaderHeap(LOW_FREQUENCY_HEAP_RESERVE_SIZE,
1153 LOW_FREQUENCY_HEAP_COMMIT_SIZE,
1155 dwLowFrequencyHeapReserveSize,
1156 LOADERHEAP_PROFILE_COUNTER);
1157 initReservedMem += dwLowFrequencyHeapReserveSize;
1160 if (dwExecutableHeapReserveSize != 0)
1162 _ASSERTE(!IsCollectible());
1164 m_pExecutableHeap = new (pExecutableHeapMemory) LoaderHeap(STUB_HEAP_RESERVE_SIZE,
1165 STUB_HEAP_COMMIT_SIZE,
1167 dwExecutableHeapReserveSize,
1168 LOADERHEAP_PROFILE_COUNTER,
1170 TRUE /* Make heap executable */
1172 initReservedMem += dwExecutableHeapReserveSize;
1175 m_pHighFrequencyHeap = new (&m_HighFreqHeapInstance) LoaderHeap(HIGH_FREQUENCY_HEAP_RESERVE_SIZE,
1176 HIGH_FREQUENCY_HEAP_COMMIT_SIZE,
1178 dwHighFrequencyHeapReserveSize,
1179 LOADERHEAP_PROFILE_COUNTER);
1180 initReservedMem += dwHighFrequencyHeapReserveSize;
1182 if (IsCollectible())
1183 m_pLowFrequencyHeap = m_pHighFrequencyHeap;
1185 #if defined(_DEBUG) && defined(STUBLINKER_GENERATES_UNWIND_INFO)
1186 m_pHighFrequencyHeap->m_fPermitStubsWithUnwindInfo = TRUE;
1189 m_pStubHeap = new (&m_StubHeapInstance) LoaderHeap(STUB_HEAP_RESERVE_SIZE,
1190 STUB_HEAP_COMMIT_SIZE,
1192 dwStubHeapReserveSize,
1193 LOADERHEAP_PROFILE_COUNTER,
1194 STUBMANAGER_RANGELIST(StubLinkStubManager),
1195 TRUE /* Make heap executable */);
1197 initReservedMem += dwStubHeapReserveSize;
1199 #if defined(_DEBUG) && defined(STUBLINKER_GENERATES_UNWIND_INFO)
1200 m_pStubHeap->m_fPermitStubsWithUnwindInfo = TRUE;
1203 #ifdef CROSSGEN_COMPILE
1204 m_pPrecodeHeap = new (&m_PrecodeHeapInstance) LoaderHeap(GetOsPageSize(), GetOsPageSize());
1206 m_pPrecodeHeap = new (&m_PrecodeHeapInstance) CodeFragmentHeap(this, STUB_CODE_BLOCK_PRECODE);
1209 // Initialize the EE marshaling data to NULL.
1210 m_pMarshalingData = NULL;
1212 // Set up the IL stub cache
1213 m_ILStubCache.Init(m_pHighFrequencyHeap);
1215 #ifdef FEATURE_COMINTEROP
1216 // Init the COM Interop data hash
1218 LockOwner lock = { &m_InteropDataCrst, IsOwnerOfCrst };
1219 m_interopDataHash.Init(0, NULL, false, &lock);
1221 #endif // FEATURE_COMINTEROP
1225 #ifndef CROSSGEN_COMPILE
1227 #ifdef FEATURE_READYTORUN
1228 PTR_CodeFragmentHeap LoaderAllocator::GetDynamicHelpersHeap()
1235 if (m_pDynamicHelpersHeap == NULL)
1237 CodeFragmentHeap * pDynamicHelpersHeap = new CodeFragmentHeap(this, STUB_CODE_BLOCK_DYNAMICHELPER);
1238 if (InterlockedCompareExchangeT(&m_pDynamicHelpersHeap, pDynamicHelpersHeap, NULL) != NULL)
1239 delete pDynamicHelpersHeap;
1241 return m_pDynamicHelpersHeap;
1245 FuncPtrStubs * LoaderAllocator::GetFuncPtrStubs()
1252 if (m_pFuncPtrStubs == NULL)
1254 FuncPtrStubs * pFuncPtrStubs = new FuncPtrStubs();
1255 if (InterlockedCompareExchangeT(&m_pFuncPtrStubs, pFuncPtrStubs, NULL) != NULL)
1256 delete pFuncPtrStubs;
1258 return m_pFuncPtrStubs;
1261 BYTE *LoaderAllocator::GetVSDHeapInitialBlock(DWORD *pSize)
1263 LIMITED_METHOD_CONTRACT;
1266 BYTE *buffer = InterlockedCompareExchangeT(&m_pVSDHeapInitialAlloc, NULL, m_pVSDHeapInitialAlloc);
1269 *pSize = COLLECTIBLE_VIRTUALSTUBDISPATCH_HEAP_SPACE;
1274 BYTE *LoaderAllocator::GetCodeHeapInitialBlock(const BYTE * loAddr, const BYTE * hiAddr, DWORD minimumSize, DWORD *pSize)
1276 LIMITED_METHOD_CONTRACT;
1279 // Check to see if the size is small enough that this might work
1280 if (minimumSize > COLLECTIBLE_CODEHEAP_SIZE)
1283 // Check to see if initial alloc would be in the proper region
1284 if (loAddr != NULL || hiAddr != NULL)
1286 if (m_pCodeHeapInitialAlloc < loAddr)
1288 if ((m_pCodeHeapInitialAlloc + COLLECTIBLE_CODEHEAP_SIZE) > hiAddr)
1292 BYTE * buffer = InterlockedCompareExchangeT(&m_pCodeHeapInitialAlloc, NULL, m_pCodeHeapInitialAlloc);
1295 *pSize = COLLECTIBLE_CODEHEAP_SIZE;
1300 // in retail should be called from AppDomain::Terminate
1301 void LoaderAllocator::Terminate()
1312 m_fTerminated = true;
1314 LOG((LF_CLASSLOADER, LL_INFO100, "Begin LoaderAllocator::Terminate for loader allocator %p\n", reinterpret_cast<void *>(static_cast<PTR_LoaderAllocator>(this))));
1316 DeleteMarshalingData();
1321 GCInterface::RemoveMemoryPressure(30000);
1322 m_fGCPressure = false;
1325 delete m_pUMEntryThunkCache;
1326 m_pUMEntryThunkCache = NULL;
1328 m_crstLoaderAllocator.Destroy();
1329 #ifdef FEATURE_COMINTEROP
1330 m_ComCallWrapperCrst.Destroy();
1331 m_InteropDataCrst.Destroy();
1333 m_LoaderAllocatorReferences.RemoveAll();
1335 // In collectible types we merge the low frequency and high frequency heaps
1336 // So don't destroy them twice.
1337 if ((m_pLowFrequencyHeap != NULL) && (m_pLowFrequencyHeap != m_pHighFrequencyHeap))
1339 m_pLowFrequencyHeap->~LoaderHeap();
1340 m_pLowFrequencyHeap = NULL;
1343 if (m_pHighFrequencyHeap != NULL)
1345 #ifdef STUBLINKER_GENERATES_UNWIND_INFO
1346 UnregisterUnwindInfoInLoaderHeap(m_pHighFrequencyHeap);
1349 m_pHighFrequencyHeap->~LoaderHeap();
1350 m_pHighFrequencyHeap = NULL;
1353 if (m_pStubHeap != NULL)
1355 #ifdef STUBLINKER_GENERATES_UNWIND_INFO
1356 UnregisterUnwindInfoInLoaderHeap(m_pStubHeap);
1359 m_pStubHeap->~LoaderHeap();
1363 if (m_pPrecodeHeap != NULL)
1365 m_pPrecodeHeap->~CodeFragmentHeap();
1366 m_pPrecodeHeap = NULL;
1369 #ifdef FEATURE_READYTORUN
1370 if (m_pDynamicHelpersHeap != NULL)
1372 delete m_pDynamicHelpersHeap;
1373 m_pDynamicHelpersHeap = NULL;
1377 if (m_pFuncPtrStubs != NULL)
1379 delete m_pFuncPtrStubs;
1380 m_pFuncPtrStubs = NULL;
1383 // This was the block reserved by BaseDomain::Init for the loaderheaps.
1384 if (m_InitialReservedMemForLoaderHeaps)
1386 ClrVirtualFree (m_InitialReservedMemForLoaderHeaps, 0, MEM_RELEASE);
1387 m_InitialReservedMemForLoaderHeaps=NULL;
1390 #ifdef FAT_DISPATCH_TOKENS
1391 if (m_pFatTokenSetLock != NULL)
1393 delete m_pFatTokenSetLock;
1394 m_pFatTokenSetLock = NULL;
1397 if (m_pFatTokenSet != NULL)
1399 delete m_pFatTokenSet;
1400 m_pFatTokenSet = NULL;
1402 #endif // FAT_DISPATCH_TOKENS
1404 CleanupStringLiteralMap();
1406 LOG((LF_CLASSLOADER, LL_INFO100, "End LoaderAllocator::Terminate for loader allocator %p\n", reinterpret_cast<void *>(static_cast<PTR_LoaderAllocator>(this))));
1409 #endif // !CROSSGEN_COMPILE
1412 #else //DACCESS_COMPILE
1413 void LoaderAllocator::EnumMemoryRegions(CLRDataEnumMemoryFlags flags)
1417 if (m_pLowFrequencyHeap.IsValid())
1419 m_pLowFrequencyHeap->EnumMemoryRegions(flags);
1421 if (m_pHighFrequencyHeap.IsValid())
1423 m_pHighFrequencyHeap->EnumMemoryRegions(flags);
1425 if (m_pStubHeap.IsValid())
1427 m_pStubHeap->EnumMemoryRegions(flags);
1429 if (m_pPrecodeHeap.IsValid())
1431 m_pPrecodeHeap->EnumMemoryRegions(flags);
1433 if (m_pPrecodeHeap.IsValid())
1435 m_pPrecodeHeap->EnumMemoryRegions(flags);
1438 #endif //DACCESS_COMPILE
1440 SIZE_T LoaderAllocator::EstimateSize()
1442 WRAPPER_NO_CONTRACT;
1444 if(m_pHighFrequencyHeap)
1445 retval+=m_pHighFrequencyHeap->GetSize();
1446 if(m_pLowFrequencyHeap)
1447 retval+=m_pLowFrequencyHeap->GetSize();
1449 retval+=m_pStubHeap->GetSize();
1450 if(m_pStringLiteralMap)
1451 retval+=m_pStringLiteralMap->GetSize();
1452 #ifndef CROSSGEN_COMPILE
1453 if(m_pVirtualCallStubManager)
1454 retval+=m_pVirtualCallStubManager->GetSize();
1460 #ifndef DACCESS_COMPILE
1462 #ifndef CROSSGEN_COMPILE
1464 DispatchToken LoaderAllocator::GetDispatchToken(
1465 UINT32 typeId, UINT32 slotNumber)
1471 INJECT_FAULT(COMPlusThrowOM(););
1474 #ifdef FAT_DISPATCH_TOKENS
1476 if (DispatchToken::RequiresDispatchTokenFat(typeId, slotNumber))
1479 // Lock and set are lazily created.
1481 if (m_pFatTokenSetLock == NULL)
1483 NewHolder<SimpleRWLock> pFatTokenSetLock = new SimpleRWLock(COOPERATIVE_OR_PREEMPTIVE, LOCK_TYPE_DEFAULT);
1484 SimpleWriteLockHolder lock(pFatTokenSetLock);
1485 NewHolder<FatTokenSet> pFatTokenSet = new FatTokenSet;
1487 if (FastInterlockCompareExchangePointer(
1488 &m_pFatTokenSetLock, pFatTokenSetLock.GetValue(), NULL) != NULL)
1489 { // Someone beat us to it
1491 // NewHolder will delete lock.
1494 { // Make sure second allocation succeeds before suppressing holder of first.
1495 pFatTokenSetLock.SuppressRelease();
1496 m_pFatTokenSet = pFatTokenSet;
1497 pFatTokenSet.SuppressRelease();
1502 // Take read lock, see if the requisite token has already been created and if so use it.
1503 // Otherwise, take write lock and create new token and add to the set.
1507 SimpleReadLockHolder rlock(m_pFatTokenSetLock);
1508 DispatchTokenFat key(typeId, slotNumber);
1509 DispatchTokenFat *pFat = m_pFatTokenSet->Lookup(&key);
1511 { // <typeId,slotNumber> is already in the set.
1512 return DispatchToken(pFat);
1517 SimpleWriteLockHolder wlock(m_pFatTokenSetLock);
1519 // Check to see if someone beat us to the punch between
1520 // releasing the read lock and taking the write lock.
1521 pFat = m_pFatTokenSet->Lookup(&key);
1524 { // No one beat us; allocate and insert a new DispatchTokenFat instance.
1525 pFat = new ((LPVOID)GetHighFrequencyHeap()->AllocMem(S_SIZE_T(sizeof(DispatchTokenFat))))
1526 DispatchTokenFat(typeId, slotNumber);
1528 m_pFatTokenSet->Add(pFat);
1531 return DispatchToken(pFat);
1534 #endif // FAT_DISPATCH_TOKENS
1536 return DispatchToken::CreateDispatchToken(typeId, slotNumber);
1539 DispatchToken LoaderAllocator::TryLookupDispatchToken(UINT32 typeId, UINT32 slotNumber)
1547 #ifdef FAT_DISPATCH_TOKENS
1549 if (DispatchToken::RequiresDispatchTokenFat(typeId, slotNumber))
1551 if (m_pFatTokenSetLock != NULL)
1553 DispatchTokenFat * pFat = NULL;
1554 // Stack probes and locking operations are throwing. Catch all
1555 // exceptions and just return an invalid token, since this is
1558 SimpleReadLockHolder rlock(m_pFatTokenSetLock);
1559 if (m_pFatTokenSet != NULL)
1561 DispatchTokenFat key(typeId, slotNumber);
1562 pFat = m_pFatTokenSet->Lookup(&key);
1569 EX_END_CATCH(SwallowAllExceptions);
1573 return DispatchToken(pFat);
1576 // Return invalid token when not found.
1577 return DispatchToken();
1580 #endif // FAT_DISPATCH_TOKENS
1582 return DispatchToken::CreateDispatchToken(typeId, slotNumber);
1586 void LoaderAllocator::InitVirtualCallStubManager(BaseDomain * pDomain)
1588 STANDARD_VM_CONTRACT;
1590 NewHolder<VirtualCallStubManager> pMgr(new VirtualCallStubManager());
1592 // Init the manager, including all heaps and such.
1593 pMgr->Init(pDomain, this);
1595 m_pVirtualCallStubManager = pMgr;
1597 // Successfully created the manager.
1598 pMgr.SuppressRelease();
1601 void LoaderAllocator::UninitVirtualCallStubManager()
1603 WRAPPER_NO_CONTRACT;
1605 if (m_pVirtualCallStubManager != NULL)
1607 m_pVirtualCallStubManager->Uninit();
1608 delete m_pVirtualCallStubManager;
1609 m_pVirtualCallStubManager = NULL;
1613 #endif // !CROSSGEN_COMPILE
1615 EEMarshalingData *LoaderAllocator::GetMarshalingData()
1617 CONTRACT (EEMarshalingData*)
1622 INJECT_FAULT(COMPlusThrowOM());
1623 POSTCONDITION(CheckPointer(m_pMarshalingData));
1627 if (!m_pMarshalingData)
1630 CrstHolder holder(&m_InteropDataCrst);
1632 if (!m_pMarshalingData)
1634 m_pMarshalingData = new (GetLowFrequencyHeap()) EEMarshalingData(this, &m_InteropDataCrst);
1638 RETURN m_pMarshalingData;
1641 void LoaderAllocator::DeleteMarshalingData()
1651 // We are in shutdown - no need to take any lock
1652 if (m_pMarshalingData)
1654 delete m_pMarshalingData;
1655 m_pMarshalingData = NULL;
1659 #endif // !DACCESS_COMPILE
1661 BOOL GlobalLoaderAllocator::CanUnload()
1663 LIMITED_METHOD_CONTRACT;
1668 BOOL AssemblyLoaderAllocator::CanUnload()
1670 LIMITED_METHOD_CONTRACT;
1675 DomainAssemblyIterator::DomainAssemblyIterator(DomainAssembly* pFirstAssembly)
1677 pCurrentAssembly = pFirstAssembly;
1678 pNextAssembly = pCurrentAssembly ? pCurrentAssembly->GetNextDomainAssemblyInSameALC() : NULL;
1681 void DomainAssemblyIterator::operator++()
1683 pCurrentAssembly = pNextAssembly;
1684 pNextAssembly = pCurrentAssembly ? pCurrentAssembly->GetNextDomainAssemblyInSameALC() : NULL;
1687 void AssemblyLoaderAllocator::SetCollectible()
1695 m_IsCollectible = true;
1696 #ifndef DACCESS_COMPILE
1697 m_pShuffleThunkCache = new ShuffleThunkCache(m_pStubHeap);
1701 #ifndef DACCESS_COMPILE
1703 #ifndef CROSSGEN_COMPILE
1705 AssemblyLoaderAllocator::~AssemblyLoaderAllocator()
1707 if (m_binderToRelease != NULL)
1709 VERIFY(m_binderToRelease->Release() == 0);
1710 m_binderToRelease = NULL;
1713 delete m_pShuffleThunkCache;
1714 m_pShuffleThunkCache = NULL;
1717 void AssemblyLoaderAllocator::RegisterBinder(CLRPrivBinderAssemblyLoadContext* binderToRelease)
1719 // When the binder is registered it will be released by the destructor
1721 _ASSERTE(m_binderToRelease == NULL);
1722 m_binderToRelease = binderToRelease;
1725 STRINGREF *LoaderAllocator::GetStringObjRefPtrFromUnicodeString(EEStringData *pStringData)
1732 PRECONDITION(CheckPointer(pStringData));
1733 INJECT_FAULT(COMPlusThrowOM(););
1736 if (m_pStringLiteralMap == NULL)
1738 LazyInitStringLiteralMap();
1740 _ASSERTE(m_pStringLiteralMap);
1741 return m_pStringLiteralMap->GetStringLiteral(pStringData, TRUE, !CanUnload());
1744 //*****************************************************************************
1745 void LoaderAllocator::LazyInitStringLiteralMap()
1752 INJECT_FAULT(COMPlusThrowOM(););
1756 NewHolder<StringLiteralMap> pStringLiteralMap(new StringLiteralMap());
1758 pStringLiteralMap->Init();
1760 if (InterlockedCompareExchangeT<StringLiteralMap *>(&m_pStringLiteralMap, pStringLiteralMap, NULL) == NULL)
1762 pStringLiteralMap.SuppressRelease();
1766 void LoaderAllocator::CleanupStringLiteralMap()
1776 if (m_pStringLiteralMap)
1778 delete m_pStringLiteralMap;
1779 m_pStringLiteralMap = NULL;
1783 STRINGREF *LoaderAllocator::IsStringInterned(STRINGREF *pString)
1790 PRECONDITION(CheckPointer(pString));
1791 INJECT_FAULT(COMPlusThrowOM(););
1794 if (m_pStringLiteralMap == NULL)
1796 LazyInitStringLiteralMap();
1798 _ASSERTE(m_pStringLiteralMap);
1799 return m_pStringLiteralMap->GetInternedString(pString, FALSE, !CanUnload());
1802 STRINGREF *LoaderAllocator::GetOrInternString(STRINGREF *pString)
1809 PRECONDITION(CheckPointer(pString));
1810 INJECT_FAULT(COMPlusThrowOM(););
1813 if (m_pStringLiteralMap == NULL)
1815 LazyInitStringLiteralMap();
1817 _ASSERTE(m_pStringLiteralMap);
1818 return m_pStringLiteralMap->GetInternedString(pString, TRUE, !CanUnload());
1821 void AssemblyLoaderAllocator::RegisterHandleForCleanup(OBJECTHANDLE objHandle)
1829 PRECONDITION(CheckPointer(objHandle));
1830 INJECT_FAULT(COMPlusThrowOM(););
1834 void * pItem = GetLowFrequencyHeap()->AllocMem(S_SIZE_T(sizeof(HandleCleanupListItem)));
1836 // InsertTail must be protected by a lock. Just use the loader allocator lock
1837 CrstHolder ch(&m_crstLoaderAllocator);
1838 m_handleCleanupList.InsertTail(new (pItem) HandleCleanupListItem(objHandle));
1841 void AssemblyLoaderAllocator::CleanupHandles()
1852 _ASSERTE(GetDomain()->IsAppDomain());
1854 // This method doesn't take a lock around RemoveHead because it's supposed to
1855 // be called only from Terminate
1856 while (!m_handleCleanupList.IsEmpty())
1858 HandleCleanupListItem * pItem = m_handleCleanupList.RemoveHead();
1859 DestroyTypedHandle(pItem->m_handle);
1863 void LoaderAllocator::RegisterFailedTypeInitForCleanup(ListLockEntry *pListLockEntry)
1871 PRECONDITION(CheckPointer(pListLockEntry));
1872 INJECT_FAULT(COMPlusThrowOM(););
1876 if (!IsCollectible())
1881 void * pItem = GetLowFrequencyHeap()->AllocMem(S_SIZE_T(sizeof(FailedTypeInitCleanupListItem)));
1883 // InsertTail must be protected by a lock. Just use the loader allocator lock
1884 CrstHolder ch(&m_crstLoaderAllocator);
1885 m_failedTypeInitCleanupList.InsertTail(new (pItem) FailedTypeInitCleanupListItem(pListLockEntry));
1888 void LoaderAllocator::CleanupFailedTypeInit()
1899 if (!IsCollectible())
1904 _ASSERTE(GetDomain()->IsAppDomain());
1906 // This method doesn't take a lock around loader allocator state access, because
1907 // it's supposed to be called only during cleanup. However, the domain-level state
1908 // might be accessed by multiple threads.
1909 ListLock *pLock = GetDomain()->GetClassInitLock();
1911 while (!m_failedTypeInitCleanupList.IsEmpty())
1913 FailedTypeInitCleanupListItem * pItem = m_failedTypeInitCleanupList.RemoveHead();
1915 ListLockHolder pInitLock(pLock);
1916 pLock->Unlink(pItem->m_pListLockEntry);
1920 void AssemblyLoaderAllocator::ReleaseManagedAssemblyLoadContext()
1930 if (m_binderToRelease != NULL)
1932 // Release the managed ALC
1933 m_binderToRelease->ReleaseLoadContext();
1937 #ifdef FEATURE_COMINTEROP
1938 ComCallWrapperCache * LoaderAllocator::GetComCallWrapperCache()
1945 INJECT_FAULT(COMPlusThrowOM(););
1949 if (!m_pComCallWrapperCache)
1951 CrstHolder lh(&m_ComCallWrapperCrst);
1953 if (!m_pComCallWrapperCache)
1954 m_pComCallWrapperCache = ComCallWrapperCache::Create(this);
1956 _ASSERTE(m_pComCallWrapperCache);
1957 return m_pComCallWrapperCache;
1959 #endif // FEATURE_COMINTEROP
1961 // U->M thunks created in this LoaderAllocator and not associated with a delegate.
1962 UMEntryThunkCache *LoaderAllocator::GetUMEntryThunkCache()
1969 INJECT_FAULT(COMPlusThrowOM(););
1973 if (!m_pUMEntryThunkCache)
1975 UMEntryThunkCache *pUMEntryThunkCache = new UMEntryThunkCache(GetAppDomain());
1977 if (FastInterlockCompareExchangePointer(&m_pUMEntryThunkCache, pUMEntryThunkCache, NULL) != NULL)
1979 // some thread swooped in and set the field
1980 delete pUMEntryThunkCache;
1983 _ASSERTE(m_pUMEntryThunkCache);
1984 return m_pUMEntryThunkCache;
1987 #endif // !CROSSGEN_COMPILE
1989 #ifdef FEATURE_COMINTEROP
1991 // Look up interop data for a method table
1992 // Returns the data pointer if present, NULL otherwise
1993 InteropMethodTableData *LoaderAllocator::LookupComInteropData(MethodTable *pMT)
1996 CrstHolder holder(&m_InteropDataCrst);
1999 InteropMethodTableData *pData = (InteropMethodTableData*)m_interopDataHash.LookupValue((UPTR)pMT, (LPVOID)NULL);
2002 if (pData == (InteropMethodTableData*)INVALIDENTRY)
2009 // Returns TRUE if successfully inserted, FALSE if this would be a duplicate entry
2010 BOOL LoaderAllocator::InsertComInteropData(MethodTable* pMT, InteropMethodTableData *pData)
2012 // We don't keep track of this kind of information for interfaces
2013 _ASSERTE(!pMT->IsInterface());
2016 CrstHolder holder(&m_InteropDataCrst);
2018 // Check to see that it's not already in there
2019 InteropMethodTableData *pDupData = (InteropMethodTableData*)m_interopDataHash.LookupValue((UPTR)pMT, (LPVOID)NULL);
2020 if (pDupData != (InteropMethodTableData*)INVALIDENTRY)
2023 // Not in there, so insert
2024 m_interopDataHash.InsertValue((UPTR)pMT, (LPVOID)pData);
2030 #endif // FEATURE_COMINTEROP
2032 #endif // !DACCESS_COMPILE