[Tizen] Unify dnetmemoryenumlib terms to match the codebase (#291)
[platform/upstream/coreclr.git] / src / vm / loaderallocator.cpp
1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
4
5
6 #include "common.h"
7 #include "stringliteralmap.h"
8 #include "virtualcallstub.h"
9 #include "threadsuspend.h"
10 #include "mlinfo.h"
11 #ifndef DACCESS_COMPILE
12 #include "comdelegate.h"
13 #endif
14 #include "comcallablewrapper.h"
15
16 //*****************************************************************************
17 // Used by LoaderAllocator::Init for easier readability.
18 #ifdef ENABLE_PERF_COUNTERS
19 #define LOADERHEAP_PROFILE_COUNTER (&(GetPerfCounters().m_Loading.cbLoaderHeapSize))
20 #else
21 #define LOADERHEAP_PROFILE_COUNTER (NULL)
22 #endif
23
24 #ifndef CROSSGEN_COMPILE
25 #define STUBMANAGER_RANGELIST(stubManager) (stubManager::g_pManager->GetRangeList())
26 #else
27 #define STUBMANAGER_RANGELIST(stubManager) (NULL)
28 #endif
29
30 UINT64 LoaderAllocator::cLoaderAllocatorsCreated = 1;
31
32 LoaderAllocator::LoaderAllocator()  
33 {
34     LIMITED_METHOD_CONTRACT;
35
36     // initialize all members up front to NULL so that short-circuit failure won't cause invalid values
37     m_InitialReservedMemForLoaderHeaps = NULL;
38     m_pLowFrequencyHeap = NULL;
39     m_pHighFrequencyHeap = NULL;
40     m_pStubHeap = NULL;
41     m_pPrecodeHeap = NULL;
42     m_pExecutableHeap = NULL;
43 #ifdef FEATURE_READYTORUN
44     m_pDynamicHelpersHeap = NULL;
45 #endif
46     m_pFuncPtrStubs = NULL;
47     m_hLoaderAllocatorObjectHandle = NULL;
48     m_pStringLiteralMap = NULL;
49     
50     m_cReferences = (UINT32)-1;
51     
52     m_pFirstDomainAssemblyFromSameALCToDelete = NULL;
53     
54 #ifdef FAT_DISPATCH_TOKENS
55     // DispatchTokenFat pointer table for token overflow scenarios. Lazily allocated.
56     m_pFatTokenSetLock = NULL;
57     m_pFatTokenSet = NULL;
58 #endif
59
60 #ifndef CROSSGEN_COMPILE
61     m_pVirtualCallStubManager = NULL;
62 #endif
63
64     m_fGCPressure = false;
65     m_fTerminated = false;
66     m_fUnloaded = false;
67     m_fMarked = false;
68     m_pLoaderAllocatorDestroyNext = NULL;
69     m_pDomain = NULL;
70     m_pCodeHeapInitialAlloc = NULL;
71     m_pVSDHeapInitialAlloc = NULL;
72     m_pLastUsedCodeHeap = NULL;
73     m_pLastUsedDynamicCodeHeap = NULL;
74     m_pJumpStubCache = NULL;
75     m_IsCollectible = false;
76
77     m_pMarshalingData = NULL;
78
79 #ifdef FEATURE_COMINTEROP
80     m_pComCallWrapperCache = NULL;
81 #endif
82
83     m_pUMEntryThunkCache = NULL;
84
85     m_nLoaderAllocator = InterlockedIncrement64((LONGLONG *)&LoaderAllocator::cLoaderAllocatorsCreated);
86 }
87
88 LoaderAllocator::~LoaderAllocator()
89 {
90     CONTRACTL
91     {
92         DESTRUCTOR_CHECK;
93     }
94     CONTRACTL_END;
95 #if !defined(DACCESS_COMPILE) && !defined(CROSSGEN_COMPILE)
96     Terminate();
97
98     // Assert that VSD is not still active when the destructor is called.
99     _ASSERTE(m_pVirtualCallStubManager == NULL);
100
101      // Code manager is responsible for cleaning up.
102     _ASSERTE(m_pJumpStubCache == NULL);
103 #endif
104 }
105
106 #ifndef DACCESS_COMPILE
107 //---------------------------------------------------------------------------------------
108 // 
109 void LoaderAllocator::AddReference()
110 {
111     CONTRACTL
112     {
113         NOTHROW;
114         GC_NOTRIGGER;
115         MODE_ANY;
116     }
117     CONTRACTL_END;
118     
119     _ASSERTE((m_cReferences > (UINT32)0) && (m_cReferences != (UINT32)-1));
120     FastInterlockIncrement((LONG *)&m_cReferences);
121 }
122 #endif //!DACCESS_COMPILE
123
124 //---------------------------------------------------------------------------------------
125 // 
126 // Adds reference if the native object is alive  - code:LoaderAllocator#AssemblyPhases.
127 // Returns TRUE if the reference was added.
128 // 
129 BOOL LoaderAllocator::AddReferenceIfAlive()
130 {
131     CONTRACTL
132     {
133         NOTHROW;
134         GC_NOTRIGGER;
135         MODE_ANY;
136     }
137     CONTRACTL_END;
138     
139 #ifndef DACCESS_COMPILE
140     for (;;)
141     {
142         // Local snaphost of ref-count
143         UINT32 cReferencesLocalSnapshot = m_cReferences;
144         _ASSERTE(cReferencesLocalSnapshot != (UINT32)-1);
145         
146         if (cReferencesLocalSnapshot == 0)
147         {   // Ref-count was 0, do not AddRef
148             return FALSE;
149         }
150         
151         UINT32 cOriginalReferences = FastInterlockCompareExchange(
152             (LONG *)&m_cReferences, 
153             cReferencesLocalSnapshot + 1, 
154             cReferencesLocalSnapshot);
155         
156         if (cOriginalReferences == cReferencesLocalSnapshot)
157         {   // The exchange happened
158             return TRUE;
159         }
160         // Let's spin till we are the only thread to modify this value
161     }
162 #else //DACCESS_COMPILE
163     // DAC won't AddRef
164     return IsAlive();
165 #endif //DACCESS_COMPILE
166 } // LoaderAllocator::AddReferenceIfAlive
167
168 //---------------------------------------------------------------------------------------
169 // 
170 BOOL LoaderAllocator::Release()
171 {
172     CONTRACTL
173     {
174         NOTHROW;
175         GC_NOTRIGGER;
176         MODE_ANY;
177     }
178     CONTRACTL_END;
179     
180     // Only actually destroy the domain assembly when all references to it are gone.
181     // This should preserve behavior in the debugger such that an UnloadModule event
182     // will occur before the underlying data structure cease functioning.
183 #ifndef DACCESS_COMPILE
184     
185     _ASSERTE((m_cReferences > (UINT32)0) && (m_cReferences != (UINT32)-1));
186     LONG cNewReferences = FastInterlockDecrement((LONG *)&m_cReferences);
187     return (cNewReferences == 0);
188 #else //DACCESS_COMPILE
189     
190     return (m_cReferences == (UINT32)0);
191 #endif //DACCESS_COMPILE
192 } // LoaderAllocator::Release
193
194 #ifndef DACCESS_COMPILE
195 #ifndef CROSSGEN_COMPILE
196 //---------------------------------------------------------------------------------------
197 // 
198 BOOL LoaderAllocator::CheckAddReference_Unlocked(LoaderAllocator *pOtherLA)
199 {
200     CONTRACTL
201     {
202         THROWS;
203         MODE_ANY;
204     }
205     CONTRACTL_END;
206
207     // This must be checked before calling this function
208     _ASSERTE(pOtherLA != this);
209     
210     // This function requires the that loader allocator lock have been taken.
211     _ASSERTE(GetDomain()->GetLoaderAllocatorReferencesLock()->OwnedByCurrentThread());
212     
213     if (m_LoaderAllocatorReferences.Lookup(pOtherLA) == NULL)
214     {
215         GCX_COOP();
216         // Build a managed reference to keep the target object live
217         AllocateHandle(pOtherLA->GetExposedObject());
218
219         // Keep track of the references that have already been made
220         m_LoaderAllocatorReferences.Add(pOtherLA);
221
222         // Notify the other LoaderAllocator that a reference exists
223         pOtherLA->AddReference();
224         return TRUE;
225     }
226
227     return FALSE;
228 }
229
230 //---------------------------------------------------------------------------------------
231 // 
232 BOOL LoaderAllocator::EnsureReference(LoaderAllocator *pOtherLA)
233 {
234     CONTRACTL
235     {
236         THROWS;
237         MODE_ANY;
238     }
239     CONTRACTL_END;
240
241     // Check if this lock can be taken in all places that the function is called
242     _ASSERTE(GetDomain()->GetLoaderAllocatorReferencesLock()->Debug_CanTake());
243     
244     if (!IsCollectible())
245         return FALSE;
246
247     if (this == pOtherLA)
248         return FALSE;
249
250     if (!pOtherLA->IsCollectible())
251         return FALSE;
252
253     CrstHolder ch(GetDomain()->GetLoaderAllocatorReferencesLock());
254     return CheckAddReference_Unlocked(pOtherLA);
255 }
256
257 BOOL LoaderAllocator::EnsureInstantiation(Module *pDefiningModule, Instantiation inst)
258 {
259     CONTRACTL
260     {
261         THROWS;
262         MODE_ANY;
263     }
264     CONTRACTL_END;
265
266     BOOL fNewReferenceNeeded = FALSE;
267
268     // Check if this lock can be taken in all places that the function is called
269     _ASSERTE(GetDomain()->GetLoaderAllocatorReferencesLock()->Debug_CanTake());
270
271     if (!IsCollectible())
272         return FALSE;
273
274     CrstHolder ch(GetDomain()->GetLoaderAllocatorReferencesLock());
275
276     if (pDefiningModule != NULL)
277     {
278         LoaderAllocator *pDefiningLoaderAllocator = pDefiningModule->GetLoaderAllocator();
279         if (pDefiningLoaderAllocator->IsCollectible())
280         {
281             if (pDefiningLoaderAllocator != this)
282             {
283                 fNewReferenceNeeded = CheckAddReference_Unlocked(pDefiningLoaderAllocator) || fNewReferenceNeeded;
284             }
285         }
286     }
287
288     for (DWORD i = 0; i < inst.GetNumArgs(); i++)
289     {
290         TypeHandle arg = inst[i];
291         _ASSERTE(!arg.IsEncodedFixup());
292         LoaderAllocator *pOtherLA = arg.GetLoaderModule()->GetLoaderAllocator();
293
294         if (pOtherLA == this)
295             continue;
296
297         if (!pOtherLA->IsCollectible())
298             continue;
299
300         fNewReferenceNeeded = CheckAddReference_Unlocked(pOtherLA) || fNewReferenceNeeded;
301     }
302
303     return fNewReferenceNeeded;
304 }
305 #else // CROSSGEN_COMPILE
306 BOOL LoaderAllocator::EnsureReference(LoaderAllocator *pOtherLA)
307 {
308     return FALSE;
309 }
310
311 BOOL LoaderAllocator::EnsureInstantiation(Module *pDefiningModule, Instantiation inst)
312 {
313     return FALSE;
314 }
315 #endif // !CROSSGEN_COMPILE
316
317 #ifndef CROSSGEN_COMPILE
318 bool LoaderAllocator::Marked()
319 {
320     LIMITED_METHOD_CONTRACT;
321     return m_fMarked;
322 }
323
324 void LoaderAllocator::ClearMark() 
325 {
326     LIMITED_METHOD_CONTRACT; 
327     m_fMarked = false;
328 }
329
330 void LoaderAllocator::Mark() 
331 {
332     WRAPPER_NO_CONTRACT;
333
334     if (!m_fMarked) 
335     {
336         m_fMarked = true;
337
338         LoaderAllocatorSet::Iterator iter = m_LoaderAllocatorReferences.Begin();
339         while (iter != m_LoaderAllocatorReferences.End())
340         {
341             LoaderAllocator *pAllocator = *iter;
342             pAllocator->Mark();
343             iter++;
344         }
345     }
346 }
347
348 //---------------------------------------------------------------------------------------
349 // 
350 // Collect unreferenced assemblies, remove them from the assembly list and return their loader allocator 
351 // list.
352 // 
353 //static
354 LoaderAllocator * LoaderAllocator::GCLoaderAllocators_RemoveAssemblies(AppDomain * pAppDomain)
355 {
356     CONTRACTL
357     {
358         THROWS;
359         GC_TRIGGERS;
360         MODE_PREEMPTIVE;
361     }
362     CONTRACTL_END;
363     // List of LoaderAllocators being deleted
364     LoaderAllocator * pFirstDestroyedLoaderAllocator = NULL;
365     
366 #if 0
367     // Debug logic for debugging the loader allocator gc.
368     {
369         /* Iterate through every loader allocator, and print its current state */
370         AppDomain::AssemblyIterator iData;
371         iData = pAppDomain->IterateAssembliesEx((AssemblyIterationFlags)(
372             kIncludeExecution | kIncludeLoaded | kIncludeCollected));
373         CollectibleAssemblyHolder<DomainAssembly *> pDomainAssembly;
374         
375         while (iData.Next_Unlocked(pDomainAssembly.This()))
376         {
377             // The assembly could be collected (ref-count = 0), do not use holder which calls add-ref
378             Assembly * pAssembly = pDomainAssembly->GetLoadedAssembly();
379
380             if (pAssembly != NULL)
381             {
382                 LoaderAllocator * pLoaderAllocator = pAssembly->GetLoaderAllocator();
383                 if (pLoaderAllocator->IsCollectible())
384                 {
385                     printf("LA %p ReferencesTo %d\n", pLoaderAllocator, pLoaderAllocator->m_cReferences);
386                     LoaderAllocatorSet::Iterator iter = pLoaderAllocator->m_LoaderAllocatorReferences.Begin();
387                     while (iter != pLoaderAllocator->m_LoaderAllocatorReferences.End())
388                     {
389                         LoaderAllocator * pAllocator = *iter;
390                         printf("LARefTo: %p\n", pAllocator);
391                         iter++;
392                     }
393                 }
394             }
395         }
396     }
397 #endif //0
398     
399     AppDomain::AssemblyIterator i;
400     // Iterate through every loader allocator, marking as we go
401     {
402         CrstHolder chAssemblyListLock(pAppDomain->GetAssemblyListLock());
403
404         i = pAppDomain->IterateAssembliesEx((AssemblyIterationFlags)(
405             kIncludeExecution | kIncludeLoaded | kIncludeCollected));
406         CollectibleAssemblyHolder<DomainAssembly *> pDomainAssembly;
407         
408         while (i.Next_Unlocked(pDomainAssembly.This()))
409         {
410             // The assembly could be collected (ref-count = 0), do not use holder which calls add-ref
411             Assembly * pAssembly = pDomainAssembly->GetLoadedAssembly();
412             
413             if (pAssembly != NULL)
414             {
415                 LoaderAllocator * pLoaderAllocator = pAssembly->GetLoaderAllocator();
416                 if (pLoaderAllocator->IsCollectible())
417                 {
418                     if (pLoaderAllocator->IsAlive())
419                         pLoaderAllocator->Mark();
420                 }
421             }
422         }
423     }
424     
425     // Iterate through every loader allocator, unmarking marked loaderallocators, and
426     // build a free list of unmarked ones 
427     {
428         CrstHolder chLoaderAllocatorReferencesLock(pAppDomain->GetLoaderAllocatorReferencesLock());
429         CrstHolder chAssemblyListLock(pAppDomain->GetAssemblyListLock());
430
431         i = pAppDomain->IterateAssembliesEx((AssemblyIterationFlags)(
432             kIncludeExecution | kIncludeLoaded | kIncludeCollected));
433         CollectibleAssemblyHolder<DomainAssembly *> pDomainAssembly;
434
435         while (i.Next_Unlocked(pDomainAssembly.This()))
436         {
437             // The assembly could be collected (ref-count = 0), do not use holder which calls add-ref
438             Assembly * pAssembly = pDomainAssembly->GetLoadedAssembly();
439             
440             if (pAssembly != NULL)
441             {
442                 LoaderAllocator * pLoaderAllocator = pAssembly->GetLoaderAllocator();
443                 if (pLoaderAllocator->IsCollectible())
444                 {
445                     if (pLoaderAllocator->Marked())
446                     {
447                         pLoaderAllocator->ClearMark();
448                     }
449                     else if (!pLoaderAllocator->IsAlive())
450                     {
451                         // Check that we don't have already this LoaderAllocator in the list to destroy
452                         // (in case multiple assemblies are loaded in the same LoaderAllocator)
453                         bool addAllocator = true;
454                         LoaderAllocator * pCheckAllocatorToDestroy = pFirstDestroyedLoaderAllocator;
455                         while (pCheckAllocatorToDestroy != NULL)
456                         {
457                             if (pCheckAllocatorToDestroy == pLoaderAllocator)
458                             {
459                                 addAllocator = false;
460                                 break;
461                             }
462
463                             pCheckAllocatorToDestroy = pCheckAllocatorToDestroy->m_pLoaderAllocatorDestroyNext;
464                         }
465
466                         // Otherwise, we have a LoaderAllocator that we add to the list
467                         if (addAllocator)
468                         {
469                             pLoaderAllocator->m_pLoaderAllocatorDestroyNext = pFirstDestroyedLoaderAllocator;
470                             // We will store a reference to this assembly, and use it later in this function
471                             pFirstDestroyedLoaderAllocator = pLoaderAllocator;
472                             _ASSERTE(pLoaderAllocator->m_pFirstDomainAssemblyFromSameALCToDelete != NULL);
473                         }
474                     }
475                 }
476             }
477         }
478     }
479     
480     // Iterate through free list, removing from Assembly list 
481     LoaderAllocator * pDomainLoaderAllocatorDestroyIterator = pFirstDestroyedLoaderAllocator;
482
483     while (pDomainLoaderAllocatorDestroyIterator != NULL)
484     {
485         _ASSERTE(!pDomainLoaderAllocatorDestroyIterator->IsAlive());
486
487         GetAppDomain()->RemoveTypesFromTypeIDMap(pDomainLoaderAllocatorDestroyIterator);
488
489         DomainAssemblyIterator domainAssemblyIt(pDomainLoaderAllocatorDestroyIterator->m_pFirstDomainAssemblyFromSameALCToDelete);
490
491         // Release all assemblies from the same ALC
492         while (!domainAssemblyIt.end())
493         {
494             DomainAssembly* domainAssemblyToRemove = domainAssemblyIt;
495             pAppDomain->RemoveAssembly(domainAssemblyToRemove);
496
497             if (!domainAssemblyToRemove->GetAssembly()->IsDynamic())
498             {
499                 pAppDomain->RemoveFileFromCache(domainAssemblyToRemove->GetFile());
500                 AssemblySpec spec;
501                 spec.InitializeSpec(domainAssemblyToRemove->GetFile());
502                 VERIFY(pAppDomain->RemoveAssemblyFromCache(domainAssemblyToRemove));
503                 pAppDomain->RemoveNativeImageDependency(&spec);
504             }
505
506             domainAssemblyIt++;
507         }
508
509         pDomainLoaderAllocatorDestroyIterator = pDomainLoaderAllocatorDestroyIterator->m_pLoaderAllocatorDestroyNext;
510     }
511     
512     return pFirstDestroyedLoaderAllocator;
513 } // LoaderAllocator::GCLoaderAllocators_RemoveAssemblies
514
515 //---------------------------------------------------------------------------------------
516 // 
517 // Collect unreferenced assemblies, delete all their remaining resources.
518 // 
519 //static
520 void LoaderAllocator::GCLoaderAllocators(LoaderAllocator* pOriginalLoaderAllocator)
521 {
522     CONTRACTL
523     {
524         THROWS;
525         GC_TRIGGERS;
526         MODE_PREEMPTIVE;
527     }
528     CONTRACTL_END;
529     
530     // List of LoaderAllocators being deleted
531     LoaderAllocator * pFirstDestroyedLoaderAllocator = NULL;
532     
533     AppDomain* pAppDomain = (AppDomain*)pOriginalLoaderAllocator->GetDomain();
534
535     // Collect all LoaderAllocators that don't have anymore DomainAssemblies alive
536     // Note: that it may not collect our pOriginalLoaderAllocator in case this 
537     // LoaderAllocator hasn't loaded any DomainAssembly. We handle this case in the next loop.
538     // Note: The removed LoaderAllocators are not reachable outside of this function anymore, because we 
539     // removed them from the assembly list
540     pFirstDestroyedLoaderAllocator = GCLoaderAllocators_RemoveAssemblies(pAppDomain);
541
542     bool isOriginalLoaderAllocatorFound = false;
543
544     // Iterate through free list, firing ETW events and notifying the debugger
545     LoaderAllocator * pDomainLoaderAllocatorDestroyIterator = pFirstDestroyedLoaderAllocator;
546     while (pDomainLoaderAllocatorDestroyIterator != NULL)
547     {
548         _ASSERTE(!pDomainLoaderAllocatorDestroyIterator->IsAlive());
549         // Fire ETW event
550         ETW::LoaderLog::CollectibleLoaderAllocatorUnload((AssemblyLoaderAllocator *)pDomainLoaderAllocatorDestroyIterator);
551
552         // Set the unloaded flag before notifying the debugger
553         pDomainLoaderAllocatorDestroyIterator->SetIsUnloaded();
554
555         DomainAssemblyIterator domainAssemblyIt(pDomainLoaderAllocatorDestroyIterator->m_pFirstDomainAssemblyFromSameALCToDelete);
556         while (!domainAssemblyIt.end())
557         {
558             // Notify the debugger
559             domainAssemblyIt->NotifyDebuggerUnload();
560             domainAssemblyIt++;
561         }
562
563         if (pDomainLoaderAllocatorDestroyIterator == pOriginalLoaderAllocator)
564         {
565             isOriginalLoaderAllocatorFound = true;
566         }
567         pDomainLoaderAllocatorDestroyIterator = pDomainLoaderAllocatorDestroyIterator->m_pLoaderAllocatorDestroyNext;
568     }
569
570     // If the original LoaderAllocator was not processed, it is most likely a LoaderAllocator without any loaded DomainAssembly
571     // But we still want to collect it so we add it to the list of LoaderAllocator to destroy
572     if (!isOriginalLoaderAllocatorFound && !pOriginalLoaderAllocator->IsAlive())
573     {
574         pOriginalLoaderAllocator->m_pLoaderAllocatorDestroyNext = pFirstDestroyedLoaderAllocator;
575         pFirstDestroyedLoaderAllocator = pOriginalLoaderAllocator;
576     }
577     
578     // Iterate through free list, deleting DomainAssemblies
579     pDomainLoaderAllocatorDestroyIterator = pFirstDestroyedLoaderAllocator;
580     while (pDomainLoaderAllocatorDestroyIterator != NULL)
581     {
582         _ASSERTE(!pDomainLoaderAllocatorDestroyIterator->IsAlive());
583
584         DomainAssemblyIterator domainAssemblyIt(pDomainLoaderAllocatorDestroyIterator->m_pFirstDomainAssemblyFromSameALCToDelete);
585         while (!domainAssemblyIt.end())
586         {
587             delete (DomainAssembly*)domainAssemblyIt;
588             domainAssemblyIt++;
589         }
590         // We really don't have to set it to NULL as the assembly is not reachable anymore, but just in case ...
591         // (Also debugging NULL AVs if someone uses it accidentally is so much easier)
592         pDomainLoaderAllocatorDestroyIterator->m_pFirstDomainAssemblyFromSameALCToDelete = NULL;
593
594         pDomainLoaderAllocatorDestroyIterator->ReleaseManagedAssemblyLoadContext();
595
596         // The following code was previously happening on delete ~DomainAssembly->Terminate
597         // We are moving this part here in order to make sure that we can unload a LoaderAllocator
598         // that didn't have a DomainAssembly
599         // (we have now a LoaderAllocator with 0-n DomainAssembly)
600
601         // This cleanup code starts resembling parts of AppDomain::Terminate too much.
602         // It would be useful to reduce duplication and also establish clear responsibilites
603         // for LoaderAllocator::Destroy, Assembly::Terminate, LoaderAllocator::Terminate
604         // and LoaderAllocator::~LoaderAllocator. We need to establish how these
605         // cleanup paths interact with app-domain unload and process tear-down, too.
606
607         if (!IsAtProcessExit())
608         {
609             // Suspend the EE to do some clean up that can only occur
610             // while no threads are running.
611             GCX_COOP(); // SuspendEE may require current thread to be in Coop mode
612                         // SuspendEE cares about the reason flag only when invoked for a GC
613                         // Other values are typically ignored. If using SUSPEND_FOR_APPDOMAIN_SHUTDOWN
614                         // is inappropriate, we can introduce a new flag or hijack an unused one.
615             ThreadSuspend::SuspendEE(ThreadSuspend::SUSPEND_FOR_APPDOMAIN_SHUTDOWN);
616         }
617
618         ExecutionManager::Unload(pDomainLoaderAllocatorDestroyIterator);
619         pDomainLoaderAllocatorDestroyIterator->UninitVirtualCallStubManager();
620
621         // TODO: Do we really want to perform this on each LoaderAllocator?
622         MethodTable::ClearMethodDataCache();
623         ClearJitGenericHandleCache(pAppDomain);
624
625         if (!IsAtProcessExit())
626         {
627             // Resume the EE.
628             ThreadSuspend::RestartEE(FALSE, TRUE);
629         }
630
631         // Because RegisterLoaderAllocatorForDeletion is modifying m_pLoaderAllocatorDestroyNext, we are saving it here
632         LoaderAllocator* pLoaderAllocatorDestroyNext = pDomainLoaderAllocatorDestroyIterator->m_pLoaderAllocatorDestroyNext;
633
634         // Register this LoaderAllocator for cleanup
635         pAppDomain->RegisterLoaderAllocatorForDeletion(pDomainLoaderAllocatorDestroyIterator);
636
637         // Go to next
638         pDomainLoaderAllocatorDestroyIterator = pLoaderAllocatorDestroyNext;
639     }
640     
641     // Deleting the DomainAssemblies will have created a list of LoaderAllocator's on the AppDomain
642     // Call this shutdown function to clean those up.
643     pAppDomain->ShutdownFreeLoaderAllocators();
644 } // LoaderAllocator::GCLoaderAllocators
645         
646 //---------------------------------------------------------------------------------------
647 // 
648 //static
649 BOOL QCALLTYPE LoaderAllocator::Destroy(QCall::LoaderAllocatorHandle pLoaderAllocator)
650 {
651     QCALL_CONTRACT;
652
653     BOOL ret = FALSE;
654
655     BEGIN_QCALL;
656
657     if (ObjectHandleIsNull(pLoaderAllocator->GetLoaderAllocatorObjectHandle()))
658     {
659         STRESS_LOG1(LF_CLASSLOADER, LL_INFO100, "Begin LoaderAllocator::Destroy for loader allocator %p\n", reinterpret_cast<void *>(static_cast<PTR_LoaderAllocator>(pLoaderAllocator)));
660         LoaderAllocatorID *pID = pLoaderAllocator->Id();
661
662         // This will probably change for shared code unloading
663         _ASSERTE(pID->GetType() == LAT_Assembly);
664
665 #ifdef FEATURE_COMINTEROP
666         if (pLoaderAllocator->m_pComCallWrapperCache)
667         {
668             pLoaderAllocator->m_pComCallWrapperCache->Release();
669
670             // if the above released the wrapper cache, then it will call back and reset our
671             // m_pComCallWrapperCache to null.
672             if (!pLoaderAllocator->m_pComCallWrapperCache)
673             {
674                 LOG((LF_CLASSLOADER, LL_INFO10, "LoaderAllocator::Destroy ComCallWrapperCache released\n"));
675             }
676     #ifdef _DEBUG
677             else
678             {
679                 pLoaderAllocator->m_pComCallWrapperCache = NULL;
680                 LOG((LF_CLASSLOADER, LL_INFO10, "LoaderAllocator::Destroy ComCallWrapperCache not released\n"));
681             }
682     #endif // _DEBUG
683         }
684 #endif // FEATURE_COMINTEROP
685
686         DomainAssembly* pDomainAssembly = (DomainAssembly*)(pID->GetDomainAssemblyIterator());
687         if (pDomainAssembly != NULL)
688         {
689             Assembly *pAssembly = pDomainAssembly->GetCurrentAssembly();
690             pLoaderAllocator->m_pFirstDomainAssemblyFromSameALCToDelete = pAssembly->GetDomainAssembly();
691         }
692
693         // Iterate through all references to other loader allocators and decrement their reference
694         // count
695         LoaderAllocatorSet::Iterator iter = pLoaderAllocator->m_LoaderAllocatorReferences.Begin();
696         while (iter != pLoaderAllocator->m_LoaderAllocatorReferences.End())
697         {
698             LoaderAllocator *pAllocator = *iter;
699             pAllocator->Release();
700             iter++;
701         }
702
703         // Release this loader allocator
704         BOOL fIsLastReferenceReleased = pLoaderAllocator->Release();
705
706         // If the reference count on this assembly got to 0, then a LoaderAllocator may 
707         // be able to be collected, thus, perform a garbage collection.
708         // The reference count is setup such that in the case of non-trivial graphs, the reference count
709         // may hit zero early.
710         if (fIsLastReferenceReleased)
711         {
712             LoaderAllocator::GCLoaderAllocators(pLoaderAllocator);
713         }
714         STRESS_LOG1(LF_CLASSLOADER, LL_INFO100, "End LoaderAllocator::Destroy for loader allocator %p\n", reinterpret_cast<void *>(static_cast<PTR_LoaderAllocator>(pLoaderAllocator)));
715
716         ret = TRUE;
717     }
718
719     END_QCALL;
720
721     return ret;
722 } // LoaderAllocator::Destroy
723
724 #define MAX_LOADERALLOCATOR_HANDLE 0x40000000
725
726 // Returns NULL if the managed LoaderAllocator object was already collected.
727 LOADERHANDLE LoaderAllocator::AllocateHandle(OBJECTREF value)
728 {
729     CONTRACTL
730     {
731         THROWS;
732         GC_TRIGGERS;
733         MODE_COOPERATIVE;
734     }
735     CONTRACTL_END;
736
737     LOADERHANDLE retVal;
738
739     struct _gc
740     {
741         OBJECTREF value;
742         LOADERALLOCATORREF loaderAllocator;
743         PTRARRAYREF handleTable;
744         PTRARRAYREF handleTableOld;
745     } gc;
746
747     ZeroMemory(&gc, sizeof(gc));
748
749     GCPROTECT_BEGIN(gc);
750
751     gc.value = value;
752
753     // The handle table is read locklessly, be careful
754     if (IsCollectible())
755     {
756         gc.loaderAllocator = (LOADERALLOCATORREF)ObjectFromHandle(m_hLoaderAllocatorObjectHandle);
757         if (gc.loaderAllocator == NULL)
758         {   // The managed LoaderAllocator is already collected, we cannot allocate any exposed managed objects for it
759             retVal = NULL;
760         }
761         else
762         {
763             DWORD slotsUsed;
764             DWORD numComponents;
765
766             do
767             {
768                 {
769                     CrstHolder ch(&m_crstLoaderAllocator);
770
771                     gc.handleTable = gc.loaderAllocator->GetHandleTable();
772
773                     if (!m_freeHandleIndexesStack.IsEmpty())
774                     {
775                         // Reuse a handle slot that was previously freed
776                         DWORD freeHandleIndex = m_freeHandleIndexesStack.Pop();
777                         gc.handleTable->SetAt(freeHandleIndex, gc.value);
778                         retVal = (UINT_PTR)((freeHandleIndex + 1) << 1);
779                         break;
780                     }
781
782                     slotsUsed = gc.loaderAllocator->GetSlotsUsed();
783
784                     if (slotsUsed > MAX_LOADERALLOCATOR_HANDLE)
785                     {
786                         COMPlusThrowOM();
787                     }
788
789                     numComponents = gc.handleTable->GetNumComponents();
790
791                     if (slotsUsed < numComponents)
792                     {
793                         // The handle table is large enough, allocate next slot from it
794                         gc.handleTable->SetAt(slotsUsed, gc.value);
795                         gc.loaderAllocator->SetSlotsUsed(slotsUsed + 1);
796                         retVal = (UINT_PTR)((slotsUsed + 1) << 1);
797                         break;
798                     }
799                 }
800
801                 // We need to enlarge the handle table
802                 gc.handleTableOld = gc.handleTable;
803
804                 DWORD newSize = numComponents * 2;
805                 gc.handleTable = (PTRARRAYREF)AllocateObjectArray(newSize, g_pObjectClass);
806
807                 {
808                     CrstHolder ch(&m_crstLoaderAllocator);
809
810                     if (gc.loaderAllocator->GetHandleTable() == gc.handleTableOld)
811                     {
812                         /* Copy out of old array */
813                         memmoveGCRefs(gc.handleTable->GetDataPtr(), gc.handleTableOld->GetDataPtr(), slotsUsed * sizeof(Object *));
814                         gc.loaderAllocator->SetHandleTable(gc.handleTable);
815                     }
816                     else
817                     {
818                         // Another thread has beaten us on enlarging the handle array, use the handle table it has allocated
819                         gc.handleTable = gc.loaderAllocator->GetHandleTable();
820                     }
821
822                     slotsUsed = gc.loaderAllocator->GetSlotsUsed();
823                     numComponents = gc.handleTable->GetNumComponents();
824
825                     if (slotsUsed < numComponents)
826                     {
827                         // The handle table is large enough, allocate next slot from it
828                         gc.handleTable->SetAt(slotsUsed, gc.value);
829                         gc.loaderAllocator->SetSlotsUsed(slotsUsed + 1);
830                         retVal = (UINT_PTR)((slotsUsed + 1) << 1);
831                         break;
832                     }
833                 }
834
835                 // Loop in the unlikely case that another thread has beaten us on the handle array enlarging, but
836                 // all the slots were used up before the current thread was scheduled.
837             } 
838             while (true); 
839         }
840     }
841     else
842     {
843         OBJECTREF* pRef = GetDomain()->AllocateObjRefPtrsInLargeTable(1);
844         SetObjectReference(pRef, gc.value);
845         retVal = (((UINT_PTR)pRef) + 1);
846     }
847
848     GCPROTECT_END();
849
850     return retVal;
851 }
852
853 OBJECTREF LoaderAllocator::GetHandleValue(LOADERHANDLE handle)
854 {
855     CONTRACTL
856     {
857         NOTHROW;
858         GC_NOTRIGGER;
859         MODE_COOPERATIVE;
860     }
861     CONTRACTL_END;
862
863     OBJECTREF objRet = NULL;
864     GET_LOADERHANDLE_VALUE_FAST(this, handle, &objRet);
865     return objRet;
866 }
867
868 void LoaderAllocator::FreeHandle(LOADERHANDLE handle)
869 {
870     CONTRACTL
871     {
872         NOTHROW;
873         GC_NOTRIGGER;
874         MODE_ANY;
875         PRECONDITION(handle != NULL);
876     }
877     CONTRACTL_END;
878
879     SetHandleValue(handle, NULL);
880
881     if ((((UINT_PTR)handle) & 1) == 0)
882     {
883         // The slot value doesn't have the low bit set, so it is an index to the handle table.
884         // In this case, push the index of the handle to the stack of freed indexes for
885         // reuse.
886         CrstHolder ch(&m_crstLoaderAllocator);
887
888         UINT_PTR index = (((UINT_PTR)handle) >> 1) - 1;
889         // The Push can fail due to OOM. Ignore this failure, it is better than crashing. The
890         // only effect is that the slot will not be reused in the future if the runtime survives
891         // the low memory situation.
892         m_freeHandleIndexesStack.Push((DWORD)index);
893     }
894 }
895
896 OBJECTREF LoaderAllocator::CompareExchangeValueInHandle(LOADERHANDLE handle, OBJECTREF valueUNSAFE, OBJECTREF compareUNSAFE)
897 {
898     CONTRACTL
899     {
900         THROWS;
901         GC_TRIGGERS;
902         MODE_COOPERATIVE;
903         PRECONDITION(handle != NULL);
904     }
905     CONTRACTL_END;
906
907     OBJECTREF retVal;
908
909     struct _gc
910     {
911         OBJECTREF value;
912         OBJECTREF compare;
913         OBJECTREF previous;
914     } gc;
915
916     ZeroMemory(&gc, sizeof(gc));
917     GCPROTECT_BEGIN(gc);
918
919     gc.value = valueUNSAFE;
920     gc.compare = compareUNSAFE;
921
922     if ((((UINT_PTR)handle) & 1) != 0)
923     {
924         OBJECTREF *ptr = (OBJECTREF *)(((UINT_PTR)handle) - 1);
925         gc.previous = *ptr;
926         if ((*ptr) == gc.compare)
927         {
928             SetObjectReference(ptr, gc.value);
929         }
930     }
931     else
932     {
933         /* The handle table is read locklessly, be careful */
934         CrstHolder ch(&m_crstLoaderAllocator);
935
936         _ASSERTE(!ObjectHandleIsNull(m_hLoaderAllocatorObjectHandle));
937
938         UINT_PTR index = (((UINT_PTR)handle) >> 1) - 1;
939         LOADERALLOCATORREF loaderAllocator = (LOADERALLOCATORREF)ObjectFromHandle(m_hLoaderAllocatorObjectHandle);
940         PTRARRAYREF handleTable = loaderAllocator->GetHandleTable();
941
942         gc.previous = handleTable->GetAt(index);
943         if (gc.previous == gc.compare)
944         {
945             handleTable->SetAt(index, gc.value);
946         }
947     }
948
949     retVal = gc.previous;
950     GCPROTECT_END();
951
952     return retVal;
953 }
954
955 void LoaderAllocator::SetHandleValue(LOADERHANDLE handle, OBJECTREF value)
956 {
957     CONTRACTL
958     {
959         NOTHROW;
960         GC_NOTRIGGER;
961         MODE_ANY;
962         PRECONDITION(handle != NULL);
963     }
964     CONTRACTL_END;
965
966     GCX_COOP();
967
968     GCPROTECT_BEGIN(value);
969
970     // If the slot value does have the low bit set, then it is a simple pointer to the value
971     // Otherwise, we will need a more complicated operation to clear the value.
972     if ((((UINT_PTR)handle) & 1) != 0)
973     {
974         OBJECTREF *ptr = (OBJECTREF *)(((UINT_PTR)handle) - 1);
975         SetObjectReference(ptr, value);
976     }
977     else
978     {
979         // The handle table is read locklessly, be careful
980         CrstHolder ch(&m_crstLoaderAllocator);
981
982         _ASSERTE(!ObjectHandleIsNull(m_hLoaderAllocatorObjectHandle));
983
984         UINT_PTR index = (((UINT_PTR)handle) >> 1) - 1;
985         LOADERALLOCATORREF loaderAllocator = (LOADERALLOCATORREF)ObjectFromHandle(m_hLoaderAllocatorObjectHandle);
986         PTRARRAYREF handleTable = loaderAllocator->GetHandleTable();
987         handleTable->SetAt(index, value);
988     }
989
990     GCPROTECT_END();
991
992     return;
993 }
994
995 void LoaderAllocator::SetupManagedTracking(LOADERALLOCATORREF * pKeepLoaderAllocatorAlive)
996 {
997     STANDARD_VM_CONTRACT;
998
999     GCInterface::AddMemoryPressure(30000);
1000     m_fGCPressure = true;
1001
1002     GCX_COOP();
1003
1004     //
1005     // Initialize managed loader allocator reference holder
1006     //
1007
1008     MethodTable *pMT = MscorlibBinder::GetClass(CLASS__LOADERALLOCATOR);
1009
1010     *pKeepLoaderAllocatorAlive = (LOADERALLOCATORREF)AllocateObject(pMT);
1011
1012     MethodDescCallSite initLoaderAllocator(METHOD__LOADERALLOCATOR__CTOR, (OBJECTREF *)pKeepLoaderAllocatorAlive);
1013
1014     ARG_SLOT args[] = {
1015         ObjToArgSlot(*pKeepLoaderAllocatorAlive)
1016     };
1017
1018     initLoaderAllocator.Call(args);
1019
1020     m_hLoaderAllocatorObjectHandle = GetDomain()->CreateLongWeakHandle(*pKeepLoaderAllocatorAlive);
1021
1022     RegisterHandleForCleanup(m_hLoaderAllocatorObjectHandle);
1023 }
1024
1025 void LoaderAllocator::ActivateManagedTracking()
1026 {
1027     CONTRACTL
1028     {
1029         NOTHROW;
1030         GC_TRIGGERS;
1031         FORBID_FAULT;
1032         MODE_ANY;
1033     }
1034     CONTRACTL_END
1035
1036     GCX_COOP();
1037
1038     // There is now one external reference to this LoaderAllocator (the managed scout)
1039     _ASSERTE(m_cReferences == (UINT32)-1);
1040     m_cReferences = (UINT32)1;
1041
1042     LOADERALLOCATORREF loaderAllocator = (LOADERALLOCATORREF)ObjectFromHandle(m_hLoaderAllocatorObjectHandle);
1043     loaderAllocator->SetNativeLoaderAllocator(this);
1044 }
1045 #endif // !CROSSGEN_COMPILE
1046
1047
1048 // We don't actually allocate a low frequency heap for collectible types.
1049 // This is carefully tuned to sum up to 16 pages to reduce waste.
1050 #define COLLECTIBLE_LOW_FREQUENCY_HEAP_SIZE        (0 * GetOsPageSize())
1051 #define COLLECTIBLE_HIGH_FREQUENCY_HEAP_SIZE       (3 * GetOsPageSize())
1052 #define COLLECTIBLE_STUB_HEAP_SIZE                 GetOsPageSize()
1053 #define COLLECTIBLE_CODEHEAP_SIZE                  (7 * GetOsPageSize())
1054 #define COLLECTIBLE_VIRTUALSTUBDISPATCH_HEAP_SPACE (5 * GetOsPageSize())
1055
1056 void LoaderAllocator::Init(BaseDomain *pDomain, BYTE *pExecutableHeapMemory)
1057 {
1058     STANDARD_VM_CONTRACT;
1059
1060     m_pDomain = pDomain;
1061
1062     m_crstLoaderAllocator.Init(CrstLoaderAllocator, (CrstFlags)CRST_UNSAFE_COOPGC);
1063     m_InteropDataCrst.Init(CrstInteropData, CRST_REENTRANCY);
1064 #ifdef FEATURE_COMINTEROP
1065     m_ComCallWrapperCrst.Init(CrstCOMCallWrapper);
1066 #endif
1067
1068 #ifndef CROSSGEN_COMPILE
1069     m_methodDescBackpatchInfoTracker.Initialize(this);
1070 #endif
1071
1072     //
1073     // Initialize the heaps
1074     //
1075
1076     DWORD dwLowFrequencyHeapReserveSize;
1077     DWORD dwHighFrequencyHeapReserveSize;
1078     DWORD dwStubHeapReserveSize;
1079     DWORD dwExecutableHeapReserveSize;
1080     DWORD dwCodeHeapReserveSize;
1081     DWORD dwVSDHeapReserveSize;
1082
1083     dwExecutableHeapReserveSize = 0;
1084
1085     if (IsCollectible())
1086     {
1087         dwLowFrequencyHeapReserveSize  = COLLECTIBLE_LOW_FREQUENCY_HEAP_SIZE;
1088         dwHighFrequencyHeapReserveSize = COLLECTIBLE_HIGH_FREQUENCY_HEAP_SIZE;
1089         dwStubHeapReserveSize          = COLLECTIBLE_STUB_HEAP_SIZE;
1090         dwCodeHeapReserveSize          = COLLECTIBLE_CODEHEAP_SIZE;
1091         dwVSDHeapReserveSize           = COLLECTIBLE_VIRTUALSTUBDISPATCH_HEAP_SPACE;
1092     }
1093     else
1094     {
1095         dwLowFrequencyHeapReserveSize  = LOW_FREQUENCY_HEAP_RESERVE_SIZE;
1096         dwHighFrequencyHeapReserveSize = HIGH_FREQUENCY_HEAP_RESERVE_SIZE;
1097         dwStubHeapReserveSize          = STUB_HEAP_RESERVE_SIZE;
1098
1099         // Non-collectible assemblies do not reserve space for these heaps.
1100         dwCodeHeapReserveSize = 0;
1101         dwVSDHeapReserveSize = 0;
1102     }
1103
1104     // The global heap needs a bit of space for executable memory that is not associated with a rangelist.
1105     // Take a page from the high-frequency heap for this.
1106     if (pExecutableHeapMemory != NULL)
1107     {
1108         dwExecutableHeapReserveSize = GetOsPageSize();
1109
1110         _ASSERTE(dwExecutableHeapReserveSize < dwHighFrequencyHeapReserveSize);
1111         dwHighFrequencyHeapReserveSize -= dwExecutableHeapReserveSize;
1112     }
1113
1114     DWORD dwTotalReserveMemSize = dwLowFrequencyHeapReserveSize
1115                                 + dwHighFrequencyHeapReserveSize
1116                                 + dwStubHeapReserveSize
1117                                 + dwCodeHeapReserveSize
1118                                 + dwVSDHeapReserveSize
1119                                 + dwExecutableHeapReserveSize;
1120
1121     dwTotalReserveMemSize = (DWORD) ALIGN_UP(dwTotalReserveMemSize, VIRTUAL_ALLOC_RESERVE_GRANULARITY);
1122
1123 #if !defined(_WIN64)
1124     // Make sure that we reserve as little as possible on 32-bit to save address space
1125     _ASSERTE(dwTotalReserveMemSize <= VIRTUAL_ALLOC_RESERVE_GRANULARITY);
1126 #endif
1127
1128     BYTE * initReservedMem = ClrVirtualAllocExecutable(dwTotalReserveMemSize, MEM_RESERVE, PAGE_NOACCESS);
1129
1130     m_InitialReservedMemForLoaderHeaps = initReservedMem;
1131
1132     if (initReservedMem == NULL)
1133         COMPlusThrowOM();
1134
1135     if (IsCollectible())
1136     {
1137         m_pCodeHeapInitialAlloc = initReservedMem;
1138         initReservedMem += dwCodeHeapReserveSize;
1139         m_pVSDHeapInitialAlloc = initReservedMem;
1140         initReservedMem += dwVSDHeapReserveSize;
1141     }
1142     else
1143     {
1144         _ASSERTE((dwCodeHeapReserveSize == 0) && (m_pCodeHeapInitialAlloc == NULL));
1145         _ASSERTE((dwVSDHeapReserveSize == 0) && (m_pVSDHeapInitialAlloc == NULL));
1146     }
1147
1148     if (dwLowFrequencyHeapReserveSize != 0)
1149     {
1150         _ASSERTE(!IsCollectible());
1151
1152         m_pLowFrequencyHeap = new (&m_LowFreqHeapInstance) LoaderHeap(LOW_FREQUENCY_HEAP_RESERVE_SIZE,
1153                                                                       LOW_FREQUENCY_HEAP_COMMIT_SIZE,
1154                                                                       initReservedMem,
1155                                                                       dwLowFrequencyHeapReserveSize,
1156                                                                       LOADERHEAP_PROFILE_COUNTER);
1157         initReservedMem += dwLowFrequencyHeapReserveSize;
1158     }
1159
1160     if (dwExecutableHeapReserveSize != 0)
1161     {
1162         _ASSERTE(!IsCollectible());
1163
1164         m_pExecutableHeap = new (pExecutableHeapMemory) LoaderHeap(STUB_HEAP_RESERVE_SIZE,
1165                                                                       STUB_HEAP_COMMIT_SIZE,
1166                                                                       initReservedMem,
1167                                                                       dwExecutableHeapReserveSize,
1168                                                                       LOADERHEAP_PROFILE_COUNTER,
1169                                                                       NULL,
1170                                                                       TRUE /* Make heap executable */
1171                                                                       );
1172         initReservedMem += dwExecutableHeapReserveSize;
1173     }
1174
1175     m_pHighFrequencyHeap = new (&m_HighFreqHeapInstance) LoaderHeap(HIGH_FREQUENCY_HEAP_RESERVE_SIZE,
1176                                                                     HIGH_FREQUENCY_HEAP_COMMIT_SIZE,
1177                                                                     initReservedMem,
1178                                                                     dwHighFrequencyHeapReserveSize,
1179                                                                     LOADERHEAP_PROFILE_COUNTER);
1180     initReservedMem += dwHighFrequencyHeapReserveSize;
1181
1182     if (IsCollectible())
1183         m_pLowFrequencyHeap = m_pHighFrequencyHeap;
1184
1185 #if defined(_DEBUG) && defined(STUBLINKER_GENERATES_UNWIND_INFO)
1186     m_pHighFrequencyHeap->m_fPermitStubsWithUnwindInfo = TRUE;
1187 #endif
1188
1189     m_pStubHeap = new (&m_StubHeapInstance) LoaderHeap(STUB_HEAP_RESERVE_SIZE,
1190                                                        STUB_HEAP_COMMIT_SIZE,
1191                                                        initReservedMem,
1192                                                        dwStubHeapReserveSize,
1193                                                        LOADERHEAP_PROFILE_COUNTER,
1194                                                        STUBMANAGER_RANGELIST(StubLinkStubManager),
1195                                                        TRUE /* Make heap executable */);
1196
1197     initReservedMem += dwStubHeapReserveSize;
1198
1199 #if defined(_DEBUG) && defined(STUBLINKER_GENERATES_UNWIND_INFO)
1200     m_pStubHeap->m_fPermitStubsWithUnwindInfo = TRUE;
1201 #endif
1202
1203 #ifdef CROSSGEN_COMPILE
1204     m_pPrecodeHeap = new (&m_PrecodeHeapInstance) LoaderHeap(GetOsPageSize(), GetOsPageSize());
1205 #else
1206     m_pPrecodeHeap = new (&m_PrecodeHeapInstance) CodeFragmentHeap(this, STUB_CODE_BLOCK_PRECODE);
1207 #endif
1208
1209     // Initialize the EE marshaling data to NULL.
1210     m_pMarshalingData = NULL;
1211
1212     // Set up the IL stub cache
1213     m_ILStubCache.Init(m_pHighFrequencyHeap);
1214
1215 #ifdef FEATURE_COMINTEROP
1216     // Init the COM Interop data hash
1217     {
1218         LockOwner lock = { &m_InteropDataCrst, IsOwnerOfCrst };
1219         m_interopDataHash.Init(0, NULL, false, &lock);
1220     }
1221 #endif // FEATURE_COMINTEROP
1222 }
1223
1224
1225 #ifndef CROSSGEN_COMPILE
1226
1227 #ifdef FEATURE_READYTORUN
1228 PTR_CodeFragmentHeap LoaderAllocator::GetDynamicHelpersHeap()
1229 {
1230     CONTRACTL {
1231         THROWS;
1232         MODE_ANY;
1233     } CONTRACTL_END;
1234
1235     if (m_pDynamicHelpersHeap == NULL)
1236     {
1237         CodeFragmentHeap * pDynamicHelpersHeap = new CodeFragmentHeap(this, STUB_CODE_BLOCK_DYNAMICHELPER);
1238         if (InterlockedCompareExchangeT(&m_pDynamicHelpersHeap, pDynamicHelpersHeap, NULL) != NULL)
1239             delete pDynamicHelpersHeap;
1240     }
1241     return m_pDynamicHelpersHeap;
1242 }
1243 #endif
1244
1245 FuncPtrStubs * LoaderAllocator::GetFuncPtrStubs()
1246 {
1247     CONTRACTL {
1248         THROWS;
1249         MODE_ANY;
1250     } CONTRACTL_END;
1251
1252     if (m_pFuncPtrStubs == NULL)
1253     {
1254         FuncPtrStubs * pFuncPtrStubs = new FuncPtrStubs();
1255         if (InterlockedCompareExchangeT(&m_pFuncPtrStubs, pFuncPtrStubs, NULL) != NULL)
1256             delete pFuncPtrStubs;
1257     }
1258     return m_pFuncPtrStubs;
1259 }
1260
1261 BYTE *LoaderAllocator::GetVSDHeapInitialBlock(DWORD *pSize)
1262 {
1263     LIMITED_METHOD_CONTRACT;
1264
1265     *pSize = 0;
1266     BYTE *buffer = InterlockedCompareExchangeT(&m_pVSDHeapInitialAlloc, NULL, m_pVSDHeapInitialAlloc);
1267     if (buffer != NULL)
1268     {
1269         *pSize = COLLECTIBLE_VIRTUALSTUBDISPATCH_HEAP_SPACE;
1270     }
1271     return buffer;
1272 }
1273
1274 BYTE *LoaderAllocator::GetCodeHeapInitialBlock(const BYTE * loAddr, const BYTE * hiAddr, DWORD minimumSize, DWORD *pSize)
1275 {
1276     LIMITED_METHOD_CONTRACT;
1277
1278     *pSize = 0;
1279     // Check to see if the size is small enough that this might work
1280     if (minimumSize > COLLECTIBLE_CODEHEAP_SIZE)
1281         return NULL;
1282
1283     // Check to see if initial alloc would be in the proper region
1284     if (loAddr != NULL || hiAddr != NULL)
1285     {
1286         if (m_pCodeHeapInitialAlloc < loAddr)
1287             return NULL;
1288         if ((m_pCodeHeapInitialAlloc + COLLECTIBLE_CODEHEAP_SIZE) > hiAddr)
1289             return NULL;
1290     }
1291
1292     BYTE * buffer = InterlockedCompareExchangeT(&m_pCodeHeapInitialAlloc, NULL, m_pCodeHeapInitialAlloc);
1293     if (buffer != NULL)
1294     {
1295         *pSize = COLLECTIBLE_CODEHEAP_SIZE;
1296     }
1297     return buffer;
1298 }
1299
1300 // in retail should be called from AppDomain::Terminate
1301 void LoaderAllocator::Terminate()
1302 {
1303     CONTRACTL {
1304         NOTHROW;
1305         GC_TRIGGERS;
1306         MODE_ANY;
1307     } CONTRACTL_END;
1308
1309     if (m_fTerminated)
1310         return;
1311
1312     m_fTerminated = true;
1313
1314     LOG((LF_CLASSLOADER, LL_INFO100, "Begin LoaderAllocator::Terminate for loader allocator %p\n", reinterpret_cast<void *>(static_cast<PTR_LoaderAllocator>(this))));
1315
1316     DeleteMarshalingData();
1317
1318     if (m_fGCPressure)
1319     {
1320         GCX_PREEMP();
1321         GCInterface::RemoveMemoryPressure(30000);
1322         m_fGCPressure = false;
1323     }
1324
1325     delete m_pUMEntryThunkCache;
1326     m_pUMEntryThunkCache = NULL;
1327
1328     m_crstLoaderAllocator.Destroy();
1329 #ifdef FEATURE_COMINTEROP
1330     m_ComCallWrapperCrst.Destroy();
1331     m_InteropDataCrst.Destroy();
1332 #endif
1333     m_LoaderAllocatorReferences.RemoveAll();
1334
1335     // In collectible types we merge the low frequency and high frequency heaps
1336     // So don't destroy them twice.
1337     if ((m_pLowFrequencyHeap != NULL) && (m_pLowFrequencyHeap != m_pHighFrequencyHeap))
1338     {
1339         m_pLowFrequencyHeap->~LoaderHeap();
1340         m_pLowFrequencyHeap = NULL;
1341     }
1342
1343     if (m_pHighFrequencyHeap != NULL)
1344     {
1345 #ifdef STUBLINKER_GENERATES_UNWIND_INFO
1346         UnregisterUnwindInfoInLoaderHeap(m_pHighFrequencyHeap);
1347 #endif
1348
1349         m_pHighFrequencyHeap->~LoaderHeap();
1350         m_pHighFrequencyHeap = NULL;
1351     }
1352
1353     if (m_pStubHeap != NULL)
1354     {
1355 #ifdef STUBLINKER_GENERATES_UNWIND_INFO
1356         UnregisterUnwindInfoInLoaderHeap(m_pStubHeap);
1357 #endif
1358
1359         m_pStubHeap->~LoaderHeap();
1360         m_pStubHeap = NULL;
1361     }
1362
1363     if (m_pPrecodeHeap != NULL)
1364     {
1365         m_pPrecodeHeap->~CodeFragmentHeap();
1366         m_pPrecodeHeap = NULL;
1367     }
1368
1369 #ifdef FEATURE_READYTORUN
1370     if (m_pDynamicHelpersHeap != NULL)
1371     {
1372         delete m_pDynamicHelpersHeap;
1373         m_pDynamicHelpersHeap = NULL;
1374     }
1375 #endif
1376
1377     if (m_pFuncPtrStubs != NULL)
1378     {
1379         delete m_pFuncPtrStubs;
1380         m_pFuncPtrStubs = NULL;
1381     }
1382
1383     // This was the block reserved by BaseDomain::Init for the loaderheaps.
1384     if (m_InitialReservedMemForLoaderHeaps)
1385     {
1386         ClrVirtualFree (m_InitialReservedMemForLoaderHeaps, 0, MEM_RELEASE);
1387         m_InitialReservedMemForLoaderHeaps=NULL;
1388     }
1389
1390 #ifdef FAT_DISPATCH_TOKENS
1391     if (m_pFatTokenSetLock != NULL)
1392     {
1393         delete m_pFatTokenSetLock;
1394         m_pFatTokenSetLock = NULL;
1395     }
1396
1397     if (m_pFatTokenSet != NULL)
1398     {
1399         delete m_pFatTokenSet;
1400         m_pFatTokenSet = NULL;
1401     }
1402 #endif // FAT_DISPATCH_TOKENS
1403
1404     CleanupStringLiteralMap();
1405
1406     LOG((LF_CLASSLOADER, LL_INFO100, "End LoaderAllocator::Terminate for loader allocator %p\n", reinterpret_cast<void *>(static_cast<PTR_LoaderAllocator>(this))));
1407 }
1408
1409 #endif // !CROSSGEN_COMPILE
1410
1411
1412 #else //DACCESS_COMPILE
1413 void LoaderAllocator::EnumMemoryRegions(CLRDataEnumMemoryFlags flags)
1414 {
1415     SUPPORTS_DAC;
1416     DAC_ENUM_DTHIS();
1417     if (m_pLowFrequencyHeap.IsValid())
1418     {
1419         m_pLowFrequencyHeap->EnumMemoryRegions(flags);
1420     }
1421     if (m_pHighFrequencyHeap.IsValid())
1422     {
1423         m_pHighFrequencyHeap->EnumMemoryRegions(flags);
1424     }
1425     if (m_pStubHeap.IsValid())
1426     {
1427         m_pStubHeap->EnumMemoryRegions(flags);
1428     }
1429     if (m_pPrecodeHeap.IsValid())
1430     {
1431         m_pPrecodeHeap->EnumMemoryRegions(flags);
1432     }
1433     if (m_pPrecodeHeap.IsValid())
1434     {
1435         m_pPrecodeHeap->EnumMemoryRegions(flags);
1436     }
1437 }
1438 #endif //DACCESS_COMPILE
1439
1440 SIZE_T LoaderAllocator::EstimateSize()
1441 {
1442     WRAPPER_NO_CONTRACT;
1443     SIZE_T retval=0;
1444     if(m_pHighFrequencyHeap) 
1445         retval+=m_pHighFrequencyHeap->GetSize();
1446     if(m_pLowFrequencyHeap) 
1447         retval+=m_pLowFrequencyHeap->GetSize();  
1448     if(m_pStubHeap) 
1449         retval+=m_pStubHeap->GetSize();   
1450     if(m_pStringLiteralMap)
1451         retval+=m_pStringLiteralMap->GetSize();
1452 #ifndef CROSSGEN_COMPILE
1453     if(m_pVirtualCallStubManager)
1454         retval+=m_pVirtualCallStubManager->GetSize();
1455 #endif
1456
1457     return retval;    
1458 }
1459
1460 #ifndef DACCESS_COMPILE
1461
1462 #ifndef CROSSGEN_COMPILE
1463
1464 DispatchToken LoaderAllocator::GetDispatchToken(
1465     UINT32 typeId, UINT32 slotNumber)
1466 {
1467     CONTRACTL {
1468         THROWS;
1469         GC_TRIGGERS;
1470         MODE_ANY;
1471         INJECT_FAULT(COMPlusThrowOM(););
1472     } CONTRACTL_END;
1473
1474 #ifdef FAT_DISPATCH_TOKENS
1475
1476     if (DispatchToken::RequiresDispatchTokenFat(typeId, slotNumber))
1477     {
1478         //
1479         // Lock and set are lazily created.
1480         //
1481         if (m_pFatTokenSetLock == NULL)
1482         {
1483             NewHolder<SimpleRWLock> pFatTokenSetLock = new SimpleRWLock(COOPERATIVE_OR_PREEMPTIVE, LOCK_TYPE_DEFAULT);
1484             SimpleWriteLockHolder lock(pFatTokenSetLock);
1485             NewHolder<FatTokenSet> pFatTokenSet = new FatTokenSet;
1486
1487             if (FastInterlockCompareExchangePointer(
1488                     &m_pFatTokenSetLock, pFatTokenSetLock.GetValue(), NULL) != NULL)
1489             {   // Someone beat us to it
1490                 lock.Release();
1491                 // NewHolder will delete lock.
1492             }
1493             else
1494             {   // Make sure second allocation succeeds before suppressing holder of first.
1495                 pFatTokenSetLock.SuppressRelease();
1496                 m_pFatTokenSet = pFatTokenSet;
1497                 pFatTokenSet.SuppressRelease();
1498             }
1499         }
1500
1501         //
1502         // Take read lock, see if the requisite token has already been created and if so use it.
1503         // Otherwise, take write lock and create new token and add to the set.
1504         //
1505
1506         // Lookup
1507         SimpleReadLockHolder rlock(m_pFatTokenSetLock);
1508         DispatchTokenFat key(typeId, slotNumber);
1509         DispatchTokenFat *pFat = m_pFatTokenSet->Lookup(&key);
1510         if (pFat != NULL)
1511         {   // <typeId,slotNumber> is already in the set.
1512             return DispatchToken(pFat);
1513         }
1514         else
1515         {   // Create
1516             rlock.Release();
1517             SimpleWriteLockHolder wlock(m_pFatTokenSetLock);
1518
1519             // Check to see if someone beat us to the punch between
1520             // releasing the read lock and taking the write lock.
1521             pFat = m_pFatTokenSet->Lookup(&key);
1522
1523             if (pFat == NULL)
1524             {   // No one beat us; allocate and insert a new DispatchTokenFat instance.
1525                 pFat = new ((LPVOID)GetHighFrequencyHeap()->AllocMem(S_SIZE_T(sizeof(DispatchTokenFat))))
1526                     DispatchTokenFat(typeId, slotNumber);
1527
1528                 m_pFatTokenSet->Add(pFat);
1529             }
1530
1531             return DispatchToken(pFat);
1532         }
1533     }
1534 #endif // FAT_DISPATCH_TOKENS
1535
1536     return DispatchToken::CreateDispatchToken(typeId, slotNumber);
1537 }
1538
1539 DispatchToken LoaderAllocator::TryLookupDispatchToken(UINT32 typeId, UINT32 slotNumber)
1540 {
1541     CONTRACTL {
1542         NOTHROW;
1543         GC_NOTRIGGER;
1544         MODE_ANY;
1545     } CONTRACTL_END;
1546
1547 #ifdef FAT_DISPATCH_TOKENS
1548
1549     if (DispatchToken::RequiresDispatchTokenFat(typeId, slotNumber))
1550     {
1551         if (m_pFatTokenSetLock != NULL)
1552         {
1553             DispatchTokenFat * pFat = NULL;
1554             // Stack probes and locking operations are throwing. Catch all
1555             // exceptions and just return an invalid token, since this is
1556             EX_TRY
1557             {
1558                 SimpleReadLockHolder rlock(m_pFatTokenSetLock);
1559                 if (m_pFatTokenSet != NULL)
1560                 {
1561                     DispatchTokenFat key(typeId, slotNumber);
1562                     pFat = m_pFatTokenSet->Lookup(&key);
1563                 }
1564             }
1565             EX_CATCH
1566             {
1567                 pFat = NULL;
1568             }
1569             EX_END_CATCH(SwallowAllExceptions);
1570
1571             if (pFat != NULL)
1572             {
1573                 return DispatchToken(pFat);
1574             }
1575         }
1576         // Return invalid token when not found.
1577         return DispatchToken();
1578     }
1579     else
1580 #endif // FAT_DISPATCH_TOKENS
1581     {
1582         return DispatchToken::CreateDispatchToken(typeId, slotNumber);
1583     }
1584 }
1585
1586 void LoaderAllocator::InitVirtualCallStubManager(BaseDomain * pDomain)
1587 {
1588     STANDARD_VM_CONTRACT;
1589
1590     NewHolder<VirtualCallStubManager> pMgr(new VirtualCallStubManager());
1591
1592     // Init the manager, including all heaps and such.
1593     pMgr->Init(pDomain, this);
1594
1595     m_pVirtualCallStubManager = pMgr;
1596
1597     // Successfully created the manager.
1598     pMgr.SuppressRelease();
1599 }
1600
1601 void LoaderAllocator::UninitVirtualCallStubManager()
1602 {    
1603     WRAPPER_NO_CONTRACT;
1604
1605     if (m_pVirtualCallStubManager != NULL)
1606     {
1607         m_pVirtualCallStubManager->Uninit();
1608         delete m_pVirtualCallStubManager;
1609         m_pVirtualCallStubManager = NULL;
1610     }
1611 }
1612
1613 #endif // !CROSSGEN_COMPILE
1614
1615 EEMarshalingData *LoaderAllocator::GetMarshalingData()
1616 {
1617     CONTRACT (EEMarshalingData*)
1618     {
1619         THROWS;
1620         GC_TRIGGERS;
1621         MODE_ANY;
1622         INJECT_FAULT(COMPlusThrowOM());
1623         POSTCONDITION(CheckPointer(m_pMarshalingData));
1624     }
1625     CONTRACT_END;
1626
1627     if (!m_pMarshalingData)
1628     {
1629         // Take the lock
1630         CrstHolder holder(&m_InteropDataCrst);
1631
1632         if (!m_pMarshalingData)
1633         {
1634             m_pMarshalingData = new (GetLowFrequencyHeap()) EEMarshalingData(this, &m_InteropDataCrst);
1635         }
1636     }
1637
1638     RETURN m_pMarshalingData;
1639 }
1640
1641 void LoaderAllocator::DeleteMarshalingData()
1642 {
1643     CONTRACTL
1644     {
1645         NOTHROW;
1646         GC_TRIGGERS;
1647         MODE_ANY;
1648     }
1649     CONTRACTL_END;
1650
1651     // We are in shutdown - no need to take any lock
1652     if (m_pMarshalingData)
1653     {
1654         delete m_pMarshalingData;
1655         m_pMarshalingData = NULL;
1656     }
1657 }
1658
1659 #endif // !DACCESS_COMPILE
1660
1661 BOOL GlobalLoaderAllocator::CanUnload()
1662 {
1663     LIMITED_METHOD_CONTRACT;
1664
1665     return FALSE;
1666 }
1667
1668 BOOL AssemblyLoaderAllocator::CanUnload()
1669 {
1670     LIMITED_METHOD_CONTRACT;
1671
1672     return TRUE;
1673 }
1674
1675 DomainAssemblyIterator::DomainAssemblyIterator(DomainAssembly* pFirstAssembly)
1676 {
1677     pCurrentAssembly = pFirstAssembly;
1678     pNextAssembly = pCurrentAssembly ? pCurrentAssembly->GetNextDomainAssemblyInSameALC() : NULL;
1679 }
1680
1681 void DomainAssemblyIterator::operator++()
1682 {
1683     pCurrentAssembly = pNextAssembly;
1684     pNextAssembly = pCurrentAssembly ? pCurrentAssembly->GetNextDomainAssemblyInSameALC() : NULL;
1685 }
1686
1687 void AssemblyLoaderAllocator::SetCollectible()
1688 {
1689     CONTRACTL
1690     {
1691         THROWS;
1692     }
1693     CONTRACTL_END;
1694
1695     m_IsCollectible = true;
1696 #ifndef DACCESS_COMPILE
1697     m_pShuffleThunkCache = new ShuffleThunkCache(m_pStubHeap);
1698 #endif
1699 }
1700
1701 #ifndef DACCESS_COMPILE
1702
1703 #ifndef CROSSGEN_COMPILE
1704
1705 AssemblyLoaderAllocator::~AssemblyLoaderAllocator()
1706 {
1707     if (m_binderToRelease != NULL)
1708     {
1709         VERIFY(m_binderToRelease->Release() == 0);
1710         m_binderToRelease = NULL;
1711     }
1712
1713     delete m_pShuffleThunkCache;
1714     m_pShuffleThunkCache = NULL;
1715 }
1716
1717 void AssemblyLoaderAllocator::RegisterBinder(CLRPrivBinderAssemblyLoadContext* binderToRelease)
1718 {
1719     // When the binder is registered it will be released by the destructor
1720     // of this instance
1721     _ASSERTE(m_binderToRelease == NULL);
1722     m_binderToRelease = binderToRelease;
1723 }
1724
1725 STRINGREF *LoaderAllocator::GetStringObjRefPtrFromUnicodeString(EEStringData *pStringData)
1726 {
1727     CONTRACTL
1728     {
1729         GC_TRIGGERS;
1730         THROWS;
1731         MODE_COOPERATIVE;
1732         PRECONDITION(CheckPointer(pStringData));
1733         INJECT_FAULT(COMPlusThrowOM(););
1734     }
1735     CONTRACTL_END;
1736     if (m_pStringLiteralMap == NULL)
1737     {
1738         LazyInitStringLiteralMap();
1739     }
1740     _ASSERTE(m_pStringLiteralMap);
1741     return m_pStringLiteralMap->GetStringLiteral(pStringData, TRUE, !CanUnload());
1742 }
1743
1744 //*****************************************************************************
1745 void LoaderAllocator::LazyInitStringLiteralMap()
1746 {
1747     CONTRACTL
1748     {
1749         THROWS;
1750         GC_TRIGGERS;
1751         MODE_ANY;
1752         INJECT_FAULT(COMPlusThrowOM(););
1753     }
1754     CONTRACTL_END;
1755
1756     NewHolder<StringLiteralMap> pStringLiteralMap(new StringLiteralMap());
1757
1758     pStringLiteralMap->Init();
1759
1760     if (InterlockedCompareExchangeT<StringLiteralMap *>(&m_pStringLiteralMap, pStringLiteralMap, NULL) == NULL)
1761     {
1762         pStringLiteralMap.SuppressRelease();
1763     }
1764 }
1765
1766 void LoaderAllocator::CleanupStringLiteralMap()
1767 {
1768     CONTRACTL
1769     {
1770         NOTHROW;
1771         GC_TRIGGERS;
1772         MODE_ANY;
1773     }
1774     CONTRACTL_END;
1775
1776     if (m_pStringLiteralMap)
1777     {
1778         delete m_pStringLiteralMap;
1779         m_pStringLiteralMap = NULL;
1780     }
1781 }
1782
1783 STRINGREF *LoaderAllocator::IsStringInterned(STRINGREF *pString)
1784 {
1785     CONTRACTL
1786     {
1787         GC_TRIGGERS;
1788         THROWS;
1789         MODE_COOPERATIVE;
1790         PRECONDITION(CheckPointer(pString));
1791         INJECT_FAULT(COMPlusThrowOM(););
1792     }
1793     CONTRACTL_END;
1794     if (m_pStringLiteralMap == NULL)
1795     {
1796         LazyInitStringLiteralMap();
1797     }
1798     _ASSERTE(m_pStringLiteralMap);
1799     return m_pStringLiteralMap->GetInternedString(pString, FALSE, !CanUnload());
1800 }
1801
1802 STRINGREF *LoaderAllocator::GetOrInternString(STRINGREF *pString)
1803 {
1804     CONTRACTL
1805     {
1806         GC_TRIGGERS;
1807         THROWS;
1808         MODE_COOPERATIVE;
1809         PRECONDITION(CheckPointer(pString));
1810         INJECT_FAULT(COMPlusThrowOM(););
1811     }
1812     CONTRACTL_END;
1813     if (m_pStringLiteralMap == NULL)
1814     {
1815         LazyInitStringLiteralMap();
1816     }
1817     _ASSERTE(m_pStringLiteralMap);
1818     return m_pStringLiteralMap->GetInternedString(pString, TRUE, !CanUnload());
1819 }
1820
1821 void AssemblyLoaderAllocator::RegisterHandleForCleanup(OBJECTHANDLE objHandle)
1822 {
1823     CONTRACTL
1824     {
1825         GC_TRIGGERS;
1826         THROWS;
1827         MODE_ANY;
1828         CAN_TAKE_LOCK;
1829         PRECONDITION(CheckPointer(objHandle));
1830         INJECT_FAULT(COMPlusThrowOM(););
1831     }
1832     CONTRACTL_END;
1833
1834     void * pItem = GetLowFrequencyHeap()->AllocMem(S_SIZE_T(sizeof(HandleCleanupListItem)));
1835
1836     // InsertTail must be protected by a lock. Just use the loader allocator lock
1837     CrstHolder ch(&m_crstLoaderAllocator);
1838     m_handleCleanupList.InsertTail(new (pItem) HandleCleanupListItem(objHandle));
1839 }
1840
1841 void AssemblyLoaderAllocator::CleanupHandles()
1842 {
1843     CONTRACTL
1844     {
1845         GC_TRIGGERS;
1846         NOTHROW;
1847         MODE_ANY;
1848         CAN_TAKE_LOCK;
1849     }
1850     CONTRACTL_END;
1851
1852     _ASSERTE(GetDomain()->IsAppDomain());
1853
1854     // This method doesn't take a lock around RemoveHead because it's supposed to
1855     // be called only from Terminate
1856     while (!m_handleCleanupList.IsEmpty())
1857     {
1858         HandleCleanupListItem * pItem = m_handleCleanupList.RemoveHead();
1859         DestroyTypedHandle(pItem->m_handle);
1860     }
1861 }
1862
1863 void LoaderAllocator::RegisterFailedTypeInitForCleanup(ListLockEntry *pListLockEntry)
1864 {    
1865     CONTRACTL
1866     {
1867         GC_TRIGGERS;
1868         THROWS;
1869         MODE_ANY;
1870         CAN_TAKE_LOCK;
1871         PRECONDITION(CheckPointer(pListLockEntry));
1872         INJECT_FAULT(COMPlusThrowOM(););
1873     }
1874     CONTRACTL_END;
1875
1876     if (!IsCollectible())
1877     {
1878         return;
1879     }
1880
1881     void * pItem = GetLowFrequencyHeap()->AllocMem(S_SIZE_T(sizeof(FailedTypeInitCleanupListItem)));
1882
1883     // InsertTail must be protected by a lock. Just use the loader allocator lock
1884     CrstHolder ch(&m_crstLoaderAllocator);
1885     m_failedTypeInitCleanupList.InsertTail(new (pItem) FailedTypeInitCleanupListItem(pListLockEntry));
1886 }
1887
1888 void LoaderAllocator::CleanupFailedTypeInit()
1889 {
1890     CONTRACTL
1891     {
1892         GC_TRIGGERS;
1893         THROWS;
1894         MODE_ANY;
1895         CAN_TAKE_LOCK;
1896     }
1897     CONTRACTL_END;
1898
1899     if (!IsCollectible())
1900     {
1901         return;
1902     }
1903
1904     _ASSERTE(GetDomain()->IsAppDomain());
1905
1906     // This method doesn't take a lock around loader allocator state access, because
1907     // it's supposed to be called only during cleanup. However, the domain-level state
1908     // might be accessed by multiple threads.
1909     ListLock *pLock = GetDomain()->GetClassInitLock();
1910
1911     while (!m_failedTypeInitCleanupList.IsEmpty())
1912     {
1913         FailedTypeInitCleanupListItem * pItem = m_failedTypeInitCleanupList.RemoveHead();
1914
1915         ListLockHolder pInitLock(pLock);
1916         pLock->Unlink(pItem->m_pListLockEntry);
1917     }
1918 }
1919
1920 void AssemblyLoaderAllocator::ReleaseManagedAssemblyLoadContext()
1921 {
1922     CONTRACTL
1923     {
1924         THROWS;
1925         GC_TRIGGERS;
1926         MODE_ANY;
1927     }
1928     CONTRACTL_END;
1929
1930     if (m_binderToRelease != NULL)
1931     {
1932         // Release the managed ALC
1933         m_binderToRelease->ReleaseLoadContext();
1934     }
1935 }
1936
1937 #ifdef FEATURE_COMINTEROP
1938 ComCallWrapperCache * LoaderAllocator::GetComCallWrapperCache()
1939 {
1940     CONTRACTL
1941     {
1942         THROWS;
1943         GC_TRIGGERS;
1944         MODE_ANY;
1945         INJECT_FAULT(COMPlusThrowOM(););
1946     }
1947     CONTRACTL_END;
1948
1949     if (!m_pComCallWrapperCache)
1950     {
1951         CrstHolder lh(&m_ComCallWrapperCrst);
1952
1953         if (!m_pComCallWrapperCache)
1954             m_pComCallWrapperCache = ComCallWrapperCache::Create(this);
1955     }
1956     _ASSERTE(m_pComCallWrapperCache);
1957     return m_pComCallWrapperCache;
1958 }
1959 #endif // FEATURE_COMINTEROP
1960
1961 // U->M thunks created in this LoaderAllocator and not associated with a delegate.
1962 UMEntryThunkCache *LoaderAllocator::GetUMEntryThunkCache()
1963 {
1964     CONTRACTL
1965     {
1966         THROWS;
1967         GC_TRIGGERS;
1968         MODE_ANY;
1969         INJECT_FAULT(COMPlusThrowOM(););
1970     }
1971     CONTRACTL_END;
1972
1973     if (!m_pUMEntryThunkCache)
1974     {
1975         UMEntryThunkCache *pUMEntryThunkCache = new UMEntryThunkCache(GetAppDomain());
1976
1977         if (FastInterlockCompareExchangePointer(&m_pUMEntryThunkCache, pUMEntryThunkCache, NULL) != NULL)
1978         {
1979             // some thread swooped in and set the field
1980             delete pUMEntryThunkCache;
1981         }
1982     }
1983     _ASSERTE(m_pUMEntryThunkCache);
1984     return m_pUMEntryThunkCache;
1985 }
1986
1987 #endif // !CROSSGEN_COMPILE
1988
1989 #ifdef FEATURE_COMINTEROP
1990
1991 // Look up interop data for a method table
1992 // Returns the data pointer if present, NULL otherwise
1993 InteropMethodTableData *LoaderAllocator::LookupComInteropData(MethodTable *pMT)
1994 {
1995     // Take the lock
1996     CrstHolder holder(&m_InteropDataCrst);
1997
1998     // Lookup
1999     InteropMethodTableData *pData = (InteropMethodTableData*)m_interopDataHash.LookupValue((UPTR)pMT, (LPVOID)NULL);
2000
2001     // Not there...
2002     if (pData == (InteropMethodTableData*)INVALIDENTRY)
2003         return NULL;
2004
2005     // Found it
2006     return pData;
2007 }
2008
2009 // Returns TRUE if successfully inserted, FALSE if this would be a duplicate entry
2010 BOOL LoaderAllocator::InsertComInteropData(MethodTable* pMT, InteropMethodTableData *pData)
2011 {
2012     // We don't keep track of this kind of information for interfaces
2013     _ASSERTE(!pMT->IsInterface());
2014
2015     // Take the lock
2016     CrstHolder holder(&m_InteropDataCrst);
2017
2018     // Check to see that it's not already in there
2019     InteropMethodTableData *pDupData = (InteropMethodTableData*)m_interopDataHash.LookupValue((UPTR)pMT, (LPVOID)NULL);
2020     if (pDupData != (InteropMethodTableData*)INVALIDENTRY)
2021         return FALSE;
2022
2023     // Not in there, so insert
2024     m_interopDataHash.InsertValue((UPTR)pMT, (LPVOID)pData);
2025
2026     // Success
2027     return TRUE;
2028 }
2029
2030 #endif // FEATURE_COMINTEROP
2031
2032 #endif // !DACCESS_COMPILE