2 // Copyright (c) Microsoft. All rights reserved.
3 // Licensed under the MIT license. See LICENSE file in the project root for full license information.
20 #ifdef PROFILING_SUPPORTED
21 #define GC_PROFILING //Turn on profiling
22 #endif // PROFILING_SUPPORTED
27 * Promotion Function Prototypes
29 typedef void enum_func (Object*);
31 // callback functions for heap walkers
32 typedef void object_callback_func(void * pvContext, void * pvDataLoc);
34 // stub type to abstract a heap segment
35 struct gc_heap_segment_stub;
36 typedef gc_heap_segment_stub *segment_handle;
40 LPVOID pvMem; // base of the allocation, not the first object (must add ibFirstObject)
41 size_t ibFirstObject; // offset to the base of the first object in the segment
42 size_t ibAllocated; // limit of allocated memory in the segment (>= firstobject)
43 size_t ibCommit; // limit of committed memory in the segment (>= alllocated)
44 size_t ibReserved; // limit of reserved memory in the segment (>= commit)
47 /*!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!*/
48 /* If you modify failure_get_memory and */
49 /* oom_reason be sure to make the corresponding */
50 /* changes in toolbox\sos\strike\strike.cpp. */
51 /*!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!*/
52 enum failure_get_memory
55 fgm_reserve_segment = 1,
56 fgm_commit_segment_beg = 2,
57 fgm_commit_eph_segment = 3,
64 failure_get_memory fgm;
66 size_t available_pagefile_mb;
69 void set_fgm (failure_get_memory f, size_t s, BOOL l)
85 oom_unproductive_full_gc = 6
95 failure_get_memory fgm;
97 size_t available_pagefile_mb;
101 /* forward declerations */
108 #define LARGE_OBJECT_SIZE ((size_t)(85000))
110 GPTR_DECL(GCHeap, g_pGCHeap);
112 #ifndef DACCESS_COMPILE
115 GPTR_DECL(BYTE,g_lowest_address);
116 GPTR_DECL(BYTE,g_highest_address);
117 GPTR_DECL(DWORD,g_card_table);
118 #ifndef DACCESS_COMPILE
122 #ifdef DACCESS_COMPILE
130 #ifdef WRITE_BARRIER_CHECK
131 //always defined, but should be 0 in Server GC
132 extern BYTE* g_GCShadow;
133 extern BYTE* g_GCShadowEnd;
134 // saves the g_lowest_address in between GCs to verify the consistency of the shadow segment
135 extern BYTE* g_shadow_lowest_address;
140 extern "C" BYTE* g_ephemeral_low;
141 extern "C" BYTE* g_ephemeral_high;
144 ::GCHeap* CreateGCHeap();
149 #if defined(FEATURE_SVR_GC)
151 ::GCHeap* CreateGCHeap();
155 #endif // defined(FEATURE_SVR_GC)
158 * Ephemeral Garbage Collected Heap Interface
164 friend class WKS::gc_heap;
165 #if defined(FEATURE_SVR_GC)
166 friend class SVR::gc_heap;
167 friend class SVR::GCHeap;
168 #endif // defined(FEATURE_SVR_GC)
169 friend struct ClassDumpInfo;
173 __int64 alloc_bytes; //Number of bytes allocated on SOH by this context
174 __int64 alloc_bytes_loh; //Number of bytes allocated on LOH by this context
175 #if defined(FEATURE_SVR_GC)
176 SVR::GCHeap* alloc_heap;
177 SVR::GCHeap* home_heap;
178 #endif // defined(FEATURE_SVR_GC)
184 LIMITED_METHOD_CONTRACT;
190 #if defined(FEATURE_SVR_GC)
193 #endif // defined(FEATURE_SVR_GC)
200 Thread* thread_under_crawl;
202 BOOL promotion; //TRUE: Promotion, FALSE: Relocation.
203 BOOL concurrent; //TRUE: concurrent scanning
204 #if CHECK_APP_DOMAIN_LEAKS || defined (FEATURE_APPDOMAIN_RESOURCE_MONITORING) || defined (DACCESS_COMPILE)
205 AppDomain *pCurrentDomain;
206 #endif //CHECK_APP_DOMAIN_LEAKS || FEATURE_APPDOMAIN_RESOURCE_MONITORING || DACCESS_COMPILE
208 #if defined(GC_PROFILING) || defined (DACCESS_COMPILE)
210 #endif //GC_PROFILING || DACCESS_COMPILE
211 #if defined(GC_PROFILING) || defined(FEATURE_EVENT_TRACE)
212 EtwGCRootKind dwEtwRootKind;
213 #endif // GC_PROFILING || FEATURE_EVENT_TRACE
217 LIMITED_METHOD_CONTRACT;
219 thread_under_crawl = 0;
225 #endif //GC_PROFILING
226 #ifdef FEATURE_EVENT_TRACE
227 dwEtwRootKind = kEtwGCRootKindOther;
228 #endif // FEATURE_EVENT_TRACE
232 typedef BOOL (* walk_fn)(Object*, void*);
233 typedef void (* gen_walk_fn)(void *context, int generation, BYTE *range_start, BYTE * range_end, BYTE *range_reserved);
235 #if defined(GC_PROFILING) || defined(FEATURE_EVENT_TRACE)
236 struct ProfilingScanContext : ScanContext
238 BOOL fProfilerPinned;
242 ProfilingScanContext(BOOL fProfilerPinnedParam) : ScanContext()
244 LIMITED_METHOD_CONTRACT;
247 fProfilerPinned = fProfilerPinnedParam;
249 #ifdef FEATURE_CONSERVATIVE_GC
250 // To not confuse CNameSpace::GcScanRoots
251 promotion = g_pConfig->GetGCConservative();
255 #endif // defined(GC_PROFILING) || defined(FEATURE_EVENT_TRACE)
258 #define IN_STRESS_HEAP(x) x
259 #define STRESS_HEAP_ARG(x) ,x
261 #define IN_STRESS_HEAP(x)
262 #define STRESS_HEAP_ARG(x)
263 #endif // STRESS_HEAP
266 //dynamic data interface
270 size_t promoted_size;
271 size_t collection_count;
274 // !!!!!!!!!!!!!!!!!!!!!!!
275 // make sure you change the def in bcl\system\gc.cs
276 // if you change this!
279 collection_non_blocking = 0x00000001,
280 collection_blocking = 0x00000002,
281 collection_optimized = 0x00000004,
282 collection_compacting = 0x00000008
284 , collection_gcstress = 0x80000000
285 #endif // STRESS_HEAP
288 // !!!!!!!!!!!!!!!!!!!!!!!
289 // make sure you change the def in bcl\system\gc.cs
290 // if you change this!
291 enum wait_full_gc_status
293 wait_full_gc_success = 0,
294 wait_full_gc_failed = 1,
295 wait_full_gc_cancelled = 2,
296 wait_full_gc_timeout = 3,
300 // !!!!!!!!!!!!!!!!!!!!!!!
301 // make sure you change the def in bcl\system\gc.cs
302 // if you change this!
303 enum start_no_gc_region_status
305 start_no_gc_success = 0,
306 start_no_gc_no_memory = 1,
307 start_no_gc_too_large = 2,
308 start_no_gc_in_progress = 3
311 enum end_no_gc_region_status
313 end_no_gc_success = 0,
314 end_no_gc_not_in_progress = 1,
315 end_no_gc_induced = 2,
316 end_no_gc_alloc_exceeded = 3
321 bgc_not_in_process = 0,
336 enum changed_seg_state
342 void record_changed_seg (BYTE* start, BYTE* end,
343 size_t current_gc_index,
344 bgc_state current_bgc_state,
345 changed_seg_state changed_state);
347 //constants for the flags parameter to the gc call back
349 #define GC_CALL_INTERIOR 0x1
350 #define GC_CALL_PINNED 0x2
351 #define GC_CALL_CHECK_APP_DOMAIN 0x4
353 //flags for GCHeap::Alloc(...)
354 #define GC_ALLOC_FINALIZE 0x1
355 #define GC_ALLOC_CONTAINS_REF 0x2
356 #define GC_ALLOC_ALIGN8_BIAS 0x4
359 friend struct ::_DacGlobals;
360 #ifdef DACCESS_COMPILE
361 friend class ClrDataAccess;
365 static GCHeap *GetGCHeap()
367 #ifdef CLR_STANDALONE_BINDER
370 LIMITED_METHOD_CONTRACT;
372 _ASSERTE(g_pGCHeap != NULL);
377 #ifndef CLR_STANDALONE_BINDER
378 static BOOL IsGCHeapInitialized()
380 LIMITED_METHOD_CONTRACT;
382 return (g_pGCHeap != NULL);
384 static BOOL IsGCInProgress(BOOL bConsiderGCStart = FALSE)
388 return (IsGCHeapInitialized() ? GetGCHeap()->IsGCInProgressHelper(bConsiderGCStart) : false);
391 static void WaitForGCCompletion(BOOL bConsiderGCStart = FALSE)
395 if (IsGCHeapInitialized())
396 GetGCHeap()->WaitUntilGCComplete(bConsiderGCStart);
399 // The runtime needs to know whether we're using workstation or server GC
400 // long before the GCHeap is created. So IsServerHeap cannot be a virtual
401 // method on GCHeap. Instead we make it a static method and initialize
402 // gcHeapType before any of the calls to IsServerHeap. Note that this also
403 // has the advantage of getting the answer without an indirection
404 // (virtual call), which is important for perf critical codepaths.
406 #ifndef DACCESS_COMPILE
407 static void InitializeHeapType(bool bServerHeap)
409 LIMITED_METHOD_CONTRACT;
410 #ifdef FEATURE_SVR_GC
411 gcHeapType = bServerHeap ? GC_HEAP_SVR : GC_HEAP_WKS;
412 #ifdef WRITE_BARRIER_CHECK
413 if (gcHeapType == GC_HEAP_SVR)
419 #else // FEATURE_SVR_GC
420 CONSISTENCY_CHECK(bServerHeap == false);
421 #endif // FEATURE_SVR_GC
425 static BOOL IsValidSegmentSize(size_t cbSize)
427 //Must be aligned on a Mb and greater than 4Mb
428 return (((cbSize & (1024*1024-1)) ==0) && (cbSize >> 22));
431 static BOOL IsValidGen0MaxSize(size_t cbSize)
433 return (cbSize >= 64*1024);
436 inline static bool IsServerHeap()
438 LIMITED_METHOD_CONTRACT;
439 #ifdef FEATURE_SVR_GC
440 _ASSERTE(gcHeapType != GC_HEAP_INVALID);
441 return (gcHeapType == GC_HEAP_SVR);
442 #else // FEATURE_SVR_GC
444 #endif // FEATURE_SVR_GC
447 inline static bool UseAllocationContexts()
450 #ifdef FEATURE_REDHAWK
451 // SIMPLIFY: only use allocation contexts
457 return ((IsServerHeap() ? true : (g_SystemInfo.dwNumberOfProcessors >= 2)));
461 inline static bool MarkShouldCompeteForStatics()
465 return IsServerHeap() && g_SystemInfo.dwNumberOfProcessors >= 2;
468 #ifndef DACCESS_COMPILE
469 static GCHeap * CreateGCHeap()
475 #if defined(FEATURE_SVR_GC)
476 pGCHeap = (IsServerHeap() ? SVR::CreateGCHeap() : WKS::CreateGCHeap());
478 pGCHeap = WKS::CreateGCHeap();
479 #endif // defined(FEATURE_SVR_GC)
484 #endif // DACCESS_COMPILE
486 #endif // !CLR_STANDALONE_BINDER
496 #ifdef FEATURE_SVR_GC
497 SVAL_DECL(DWORD,gcHeapType);
498 #endif // FEATURE_SVR_GC
501 // TODO Synchronization, should be moved out
502 virtual BOOL IsGCInProgressHelper (BOOL bConsiderGCStart = FALSE) = 0;
503 virtual DWORD WaitUntilGCComplete (BOOL bConsiderGCStart = FALSE) = 0;
504 virtual void SetGCInProgress(BOOL fInProgress) = 0;
505 virtual CLREventStatic * GetWaitForGCEvent() = 0;
507 virtual void SetFinalizationRun (Object* obj) = 0;
508 virtual Object* GetNextFinalizable() = 0;
509 virtual size_t GetNumberOfFinalizable() = 0;
511 virtual void SetFinalizeQueueForShutdown(BOOL fHasLock) = 0;
512 virtual BOOL FinalizeAppDomain(AppDomain *pDomain, BOOL fRunFinalizers) = 0;
513 virtual BOOL ShouldRestartFinalizerWatchDog() = 0;
515 //wait for concurrent GC to finish
516 virtual void WaitUntilConcurrentGCComplete () = 0; // Use in managed threads
517 #ifndef DACCESS_COMPILE
518 virtual HRESULT WaitUntilConcurrentGCCompleteAsync(int millisecondsTimeout) = 0; // Use in native threads. TRUE if succeed. FALSE if failed or timeout
520 virtual BOOL IsConcurrentGCInProgress() = 0;
522 // Enable/disable concurrent GC
523 virtual void TemporaryEnableConcurrentGC() = 0;
524 virtual void TemporaryDisableConcurrentGC() = 0;
525 virtual BOOL IsConcurrentGCEnabled() = 0;
527 virtual void FixAllocContext (alloc_context* acontext, BOOL lockp, void* arg, void *heap) = 0;
528 virtual Object* Alloc (alloc_context* acontext, size_t size, DWORD flags) = 0;
530 // This is safe to call only when EE is suspended.
531 virtual Object* GetContainingObject(void *pInteriorPtr) = 0;
533 // TODO Should be folded into constructor
534 virtual HRESULT Initialize () = 0;
536 virtual HRESULT GarbageCollect (int generation = -1, BOOL low_memory_p=FALSE, int mode = collection_blocking) = 0;
537 virtual Object* Alloc (size_t size, DWORD flags) = 0;
538 #ifdef FEATURE_64BIT_ALIGNMENT
539 virtual Object* AllocAlign8 (size_t size, DWORD flags) = 0;
540 virtual Object* AllocAlign8 (alloc_context* acontext, size_t size, DWORD flags) = 0;
542 virtual Object* AllocAlign8Common (void* hp, alloc_context* acontext, size_t size, DWORD flags) = 0;
544 #endif // FEATURE_64BIT_ALIGNMENT
545 virtual Object* AllocLHeap (size_t size, DWORD flags) = 0;
546 virtual void SetReservedVMLimit (size_t vmlimit) = 0;
547 virtual void SetCardsAfterBulkCopy( Object**, size_t ) = 0;
548 #if defined(GC_PROFILING) || defined(FEATURE_EVENT_TRACE)
549 virtual void WalkObject (Object* obj, walk_fn fn, void* context) = 0;
550 #endif //defined(GC_PROFILING) || defined(FEATURE_EVENT_TRACE)
552 virtual bool IsThreadUsingAllocationContextHeap(alloc_context* acontext, int thread_number) = 0;
553 virtual int GetNumberOfHeaps () = 0;
554 virtual int GetHomeHeapNumber () = 0;
556 virtual int CollectionCount (int generation, int get_bgc_fgc_count = 0) = 0;
558 // Finalizer queue stuff (should stay)
559 virtual bool RegisterForFinalization (int gen, Object* obj) = 0;
561 // General queries to the GC
562 virtual BOOL IsPromoted (Object *object) = 0;
563 virtual unsigned WhichGeneration (Object* object) = 0;
564 virtual BOOL IsEphemeral (Object* object) = 0;
565 virtual BOOL IsHeapPointer (void* object, BOOL small_heap_only = FALSE) = 0;
567 virtual unsigned GetCondemnedGeneration() = 0;
568 virtual int GetGcLatencyMode() = 0;
569 virtual int SetGcLatencyMode(int newLatencyMode) = 0;
571 virtual int GetLOHCompactionMode() = 0;
572 virtual void SetLOHCompactionMode(int newLOHCompactionyMode) = 0;
574 virtual BOOL RegisterForFullGCNotification(DWORD gen2Percentage,
575 DWORD lohPercentage) = 0;
576 virtual BOOL CancelFullGCNotification() = 0;
577 virtual int WaitForFullGCApproach(int millisecondsTimeout) = 0;
578 virtual int WaitForFullGCComplete(int millisecondsTimeout) = 0;
580 virtual int StartNoGCRegion(ULONGLONG totalSize, BOOL lohSizeKnown, ULONGLONG lohSize, BOOL disallowFullBlockingGC) = 0;
581 virtual int EndNoGCRegion() = 0;
583 virtual BOOL IsObjectInFixedHeap(Object *pObj) = 0;
584 virtual size_t GetTotalBytesInUse () = 0;
585 virtual size_t GetCurrentObjSize() = 0;
586 virtual size_t GetLastGCStartTime(int generation) = 0;
587 virtual size_t GetLastGCDuration(int generation) = 0;
588 virtual size_t GetNow() = 0;
589 virtual unsigned GetGcCount() = 0;
590 virtual void TraceGCSegments() = 0;
592 virtual void PublishObject(BYTE* obj) = 0;
594 // static if since restricting for all heaps is fine
595 virtual size_t GetValidSegmentSize(BOOL large_seg = FALSE) = 0;
598 static BOOL IsLargeObject(MethodTable *mt) {
601 return mt->GetBaseSize() >= LARGE_OBJECT_SIZE;
604 static unsigned GetMaxGeneration() {
605 LIMITED_METHOD_DAC_CONTRACT;
606 return max_generation;
609 virtual size_t GetPromotedBytes(int heap_index) = 0;
618 #ifdef FEATURE_BASICFREEZE
619 // frozen segment management functions
620 virtual segment_handle RegisterFrozenSegment(segment_info *pseginfo) = 0;
621 #endif //FEATURE_BASICFREEZE
624 #ifndef FEATURE_REDHAWK // Redhawk forces relocation a different way
626 //return TRUE if GC actually happens, otherwise FALSE
627 virtual BOOL StressHeap(alloc_context * acontext = 0) = 0;
629 #endif // FEATURE_REDHAWK
631 virtual void ValidateObjectMember (Object *obj) = 0;
634 #if defined(GC_PROFILING) || defined(FEATURE_EVENT_TRACE)
635 virtual void DescrGenerationsToProfiler (gen_walk_fn fn, void *context) = 0;
636 #endif // defined(GC_PROFILING) || defined(FEATURE_EVENT_TRACE)
641 // Return NULL if can't find next object. When EE is not suspended,
642 // the result is not accurate: if the input arg is in gen0, the function could
643 // return zeroed out memory as next object
644 virtual Object * NextObj (Object * object) = 0;
645 #ifdef FEATURE_BASICFREEZE
646 // Return TRUE if object lives in frozen segment
647 virtual BOOL IsInFrozenSegment (Object * object) = 0;
648 #endif //FEATURE_BASICFREEZE
652 extern VOLATILE(LONG) m_GCLock;
654 // Go through and touch (read) each page straddled by a memory block.
655 void TouchPages(LPVOID pStart, UINT cb);
657 // For low memory notification from host
658 extern LONG g_bLowMemoryFromHost;
660 #ifdef WRITE_BARRIER_CHECK
661 void updateGCShadow(Object** ptr, Object* val);
664 // the method table for the WeakReference class
665 extern MethodTable *pWeakReferenceMT;
666 // The canonical method table for WeakReference<T>
667 extern MethodTable *pWeakReferenceOfTCanonMT;
668 extern void FinalizeWeakReference(Object * obj);