1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 // See the LICENSE file in the project root for more information.
9 #endif // defined(_MSC_VER)
11 //------------------------------------------------------------------------
13 // This subclass of `ArenaAllocator` is a singleton that always keeps
14 // a single default-sized page allocated. We try to use the singleton
15 // allocator as often as possible (i.e. for all non-concurrent
16 // method compilations).
17 class PooledAllocator : public ArenaAllocator
22 POOLED_ALLOCATOR_NOTINITIALIZED = 0,
23 POOLED_ALLOCATOR_IN_USE = 1,
24 POOLED_ALLOCATOR_AVAILABLE = 2,
25 POOLED_ALLOCATOR_SHUTDOWN = 3,
28 static PooledAllocator s_pooledAllocator;
29 static LONG s_pooledAllocatorState;
31 PooledAllocator() : ArenaAllocator()
34 PooledAllocator(IEEMemoryManager* memoryManager);
36 PooledAllocator(const PooledAllocator& other) = delete;
37 PooledAllocator& operator=(const PooledAllocator& other) = delete;
40 PooledAllocator& operator=(PooledAllocator&& other);
42 void destroy() override;
44 static void shutdown();
46 static ArenaAllocator* getPooledAllocator(IEEMemoryManager* memoryManager);
49 size_t ArenaAllocator::s_defaultPageSize = 0;
51 //------------------------------------------------------------------------
52 // ArenaAllocator::bypassHostAllocator:
53 // Indicates whether or not the ArenaAllocator should bypass the JIT
54 // host when allocating memory for arena pages.
57 // True if the JIT should bypass the JIT host; false otherwise.
58 bool ArenaAllocator::bypassHostAllocator()
61 // When JitDirectAlloc is set, all JIT allocations requests are forwarded
62 // directly to the OS. This allows taking advantage of pageheap and other gflag
63 // knobs for ensuring that we do not have buffer overruns in the JIT.
65 return JitConfig.JitDirectAlloc() != 0;
66 #else // defined(DEBUG)
68 #endif // !defined(DEBUG)
71 //------------------------------------------------------------------------
72 // ArenaAllocator::getDefaultPageSize:
73 // Returns the default size of an arena page.
76 // The default size of an arena page.
77 size_t ArenaAllocator::getDefaultPageSize()
79 return s_defaultPageSize;
82 //------------------------------------------------------------------------
83 // ArenaAllocator::ArenaAllocator:
84 // Default-constructs an arena allocator.
85 ArenaAllocator::ArenaAllocator()
86 : m_memoryManager(nullptr)
87 , m_firstPage(nullptr)
89 , m_nextFreeByte(nullptr)
90 , m_lastFreeByte(nullptr)
94 //------------------------------------------------------------------------
95 // ArenaAllocator::ArenaAllocator:
96 // Constructs an arena allocator.
99 // memoryManager - The `IEEMemoryManager` instance that will be used to
100 // allocate memory for arena pages.
101 ArenaAllocator::ArenaAllocator(IEEMemoryManager* memoryManager)
102 : m_memoryManager(memoryManager)
103 , m_firstPage(nullptr)
104 , m_lastPage(nullptr)
105 , m_nextFreeByte(nullptr)
106 , m_lastFreeByte(nullptr)
108 assert(getDefaultPageSize() != 0);
109 assert(isInitialized());
112 //------------------------------------------------------------------------
113 // ArenaAllocator::operator=:
114 // Move-assigns a `ArenaAllocator`.
115 ArenaAllocator& ArenaAllocator::operator=(ArenaAllocator&& other)
117 assert(!isInitialized());
119 m_memoryManager = other.m_memoryManager;
120 m_firstPage = other.m_firstPage;
121 m_lastPage = other.m_lastPage;
122 m_nextFreeByte = other.m_nextFreeByte;
123 m_lastFreeByte = other.m_lastFreeByte;
125 other.m_memoryManager = nullptr;
126 other.m_firstPage = nullptr;
127 other.m_lastPage = nullptr;
128 other.m_nextFreeByte = nullptr;
129 other.m_lastFreeByte = nullptr;
134 bool ArenaAllocator::isInitialized()
136 return m_memoryManager != nullptr;
139 //------------------------------------------------------------------------
140 // ArenaAllocator::allocateNewPage:
141 // Allocates a new arena page.
144 // size - The number of bytes that were requested by the allocation
145 // that triggered this request to allocate a new arena page.
148 // A pointer to the first usable byte of the newly allocated page.
149 void* ArenaAllocator::allocateNewPage(size_t size, bool canThrow)
151 assert(isInitialized());
153 size_t pageSize = sizeof(PageDescriptor) + size;
155 // Check for integer overflow
166 // If the current page is now full, update a few statistics
167 if (m_lastPage != nullptr)
169 // Undo the "+=" done in allocateMemory()
170 m_nextFreeByte -= size;
172 // Save the actual used size of the page
173 m_lastPage->m_usedBytes = m_nextFreeByte - m_lastPage->m_contents;
176 // Round up to a default-sized page if necessary
177 if (pageSize <= s_defaultPageSize)
179 pageSize = s_defaultPageSize;
182 // Round to the nearest multiple of OS page size if necessary
183 if (!bypassHostAllocator())
185 pageSize = roundUp(pageSize, DEFAULT_PAGE_SIZE);
188 // Allocate the new page
189 PageDescriptor* newPage = (PageDescriptor*)allocateHostMemory(pageSize);
190 if (newPage == nullptr)
200 // Append the new page to the end of the list
201 newPage->m_next = nullptr;
202 newPage->m_pageBytes = pageSize;
203 newPage->m_previous = m_lastPage;
204 newPage->m_usedBytes = 0; // m_usedBytes is meaningless until a new page is allocated.
205 // Instead of letting it contain garbage (so to confuse us),
208 if (m_lastPage != nullptr)
210 m_lastPage->m_next = newPage;
214 m_firstPage = newPage;
217 m_lastPage = newPage;
219 // Adjust the next/last free byte pointers
220 m_nextFreeByte = newPage->m_contents + size;
221 m_lastFreeByte = (BYTE*)newPage + pageSize;
222 assert((m_lastFreeByte - m_nextFreeByte) >= 0);
224 return newPage->m_contents;
227 //------------------------------------------------------------------------
228 // ArenaAllocator::destroy:
229 // Performs any necessary teardown for an `ArenaAllocator`.
230 void ArenaAllocator::destroy()
232 assert(isInitialized());
234 // Free all of the allocated pages
235 for (PageDescriptor *page = m_firstPage, *next; page != nullptr; page = next)
238 freeHostMemory(page);
241 // Clear out the allocator's fields
242 m_memoryManager = nullptr;
243 m_firstPage = nullptr;
244 m_lastPage = nullptr;
245 m_nextFreeByte = nullptr;
246 m_lastFreeByte = nullptr;
249 // The debug version of the allocator may allocate directly from the
250 // OS rather than going through the hosting APIs. In order to do so,
251 // it must undef the macros that are usually in place to prevent
252 // accidental uses of the OS allocator.
254 #undef GetProcessHeap
259 //------------------------------------------------------------------------
260 // ArenaAllocator::allocateHostMemory:
261 // Allocates memory from the host (or the OS if `bypassHostAllocator()`
265 // size - The number of bytes to allocate.
268 // A pointer to the allocated memory.
269 void* ArenaAllocator::allocateHostMemory(size_t size)
271 assert(isInitialized());
274 if (bypassHostAllocator())
276 return ::HeapAlloc(GetProcessHeap(), 0, size);
280 return ClrAllocInProcessHeap(0, S_SIZE_T(size));
282 #else // defined(DEBUG)
283 return m_memoryManager->ClrVirtualAlloc(nullptr, size, MEM_COMMIT, PAGE_READWRITE);
284 #endif // !defined(DEBUG)
287 //------------------------------------------------------------------------
288 // ArenaAllocator::freeHostMemory:
289 // Frees memory allocated by a previous call to `allocateHostMemory`.
292 // block - A pointer to the memory to free.
293 void ArenaAllocator::freeHostMemory(void* block)
295 assert(isInitialized());
298 if (bypassHostAllocator())
300 ::HeapFree(GetProcessHeap(), 0, block);
304 ClrFreeInProcessHeap(0, block);
306 #else // defined(DEBUG)
307 m_memoryManager->ClrVirtualFree(block, 0, MEM_RELEASE);
308 #endif // !defined(DEBUG)
311 //------------------------------------------------------------------------
312 // ArenaAllocator::getTotalBytesAllocated:
313 // Gets the total number of bytes allocated for all of the arena pages
314 // for an `ArenaAllocator`.
318 size_t ArenaAllocator::getTotalBytesAllocated()
320 assert(isInitialized());
323 for (PageDescriptor* page = m_firstPage; page != nullptr; page = page->m_next)
325 bytes += page->m_pageBytes;
331 //------------------------------------------------------------------------
332 // ArenaAllocator::getTotalBytesAllocated:
333 // Gets the total number of bytes used in all of the arena pages for
334 // an `ArenaAllocator`.
340 // An arena page may have unused space at the very end. This happens
341 // when an allocation request comes in (via a call to `allocateMemory`)
342 // that will not fit in the remaining bytes for the current page.
343 // Another way to understand this method is as returning the total
344 // number of bytes allocated for arena pages minus the number of bytes
345 // that are unused across all area pages.
346 size_t ArenaAllocator::getTotalBytesUsed()
348 assert(isInitialized());
350 if (m_lastPage != nullptr)
352 m_lastPage->m_usedBytes = m_nextFreeByte - m_lastPage->m_contents;
356 for (PageDescriptor* page = m_firstPage; page != nullptr; page = page->m_next)
358 bytes += page->m_usedBytes;
364 //------------------------------------------------------------------------
365 // ArenaAllocator::startup:
366 // Performs any necessary initialization for the arena allocator
368 void ArenaAllocator::startup()
370 s_defaultPageSize = bypassHostAllocator() ? (size_t)MIN_PAGE_SIZE : (size_t)DEFAULT_PAGE_SIZE;
373 //------------------------------------------------------------------------
374 // ArenaAllocator::shutdown:
375 // Performs any necessary teardown for the arena allocator subsystem.
376 void ArenaAllocator::shutdown()
378 PooledAllocator::shutdown();
381 PooledAllocator PooledAllocator::s_pooledAllocator;
382 LONG PooledAllocator::s_pooledAllocatorState = POOLED_ALLOCATOR_NOTINITIALIZED;
384 //------------------------------------------------------------------------
385 // PooledAllocator::PooledAllocator:
386 // Constructs a `PooledAllocator`.
387 PooledAllocator::PooledAllocator(IEEMemoryManager* memoryManager) : ArenaAllocator(memoryManager)
391 //------------------------------------------------------------------------
392 // PooledAllocator::operator=:
393 // Move-assigns a `PooledAllocator`.
394 PooledAllocator& PooledAllocator::operator=(PooledAllocator&& other)
396 *((ArenaAllocator*)this) = std::move((ArenaAllocator &&)other);
400 //------------------------------------------------------------------------
401 // PooledAllocator::shutdown:
402 // Performs any necessary teardown for the pooled allocator.
405 // If the allocator has been initialized and is in use when this method is called,
406 // it is up to whatever is using the pooled allocator to call `destroy` in order
407 // to free its memory.
408 void PooledAllocator::shutdown()
410 LONG oldState = InterlockedExchange(&s_pooledAllocatorState, POOLED_ALLOCATOR_SHUTDOWN);
413 case POOLED_ALLOCATOR_NOTINITIALIZED:
414 case POOLED_ALLOCATOR_SHUTDOWN:
415 case POOLED_ALLOCATOR_IN_USE:
418 case POOLED_ALLOCATOR_AVAILABLE:
419 // The pooled allocator was initialized and not in use; we must destroy it.
420 s_pooledAllocator.destroy();
425 //------------------------------------------------------------------------
426 // PooledAllocator::getPooledAllocator:
427 // Returns the pooled allocator if it is not already in use.
430 // memoryManager: The `IEEMemoryManager` instance in use by the caller.
433 // A pointer to the pooled allocator if it is available or `nullptr`
434 // if it is already in use.
437 // Calling `destroy` on the returned allocator will return it to the
439 ArenaAllocator* PooledAllocator::getPooledAllocator(IEEMemoryManager* memoryManager)
441 LONG oldState = InterlockedExchange(&s_pooledAllocatorState, POOLED_ALLOCATOR_IN_USE);
444 case POOLED_ALLOCATOR_IN_USE:
445 case POOLED_ALLOCATOR_SHUTDOWN:
446 // Either the allocator is in use or this call raced with a call to `shutdown`.
450 case POOLED_ALLOCATOR_AVAILABLE:
451 if (s_pooledAllocator.m_memoryManager != memoryManager)
453 // The allocator is available, but it was initialized with a different
454 // memory manager. Release it and return `nullptr`.
455 InterlockedExchange(&s_pooledAllocatorState, POOLED_ALLOCATOR_AVAILABLE);
459 return &s_pooledAllocator;
461 case POOLED_ALLOCATOR_NOTINITIALIZED:
463 PooledAllocator allocator(memoryManager);
464 if (allocator.allocateNewPage(0, false) == nullptr)
466 // Failed to grab the initial memory page.
467 InterlockedExchange(&s_pooledAllocatorState, POOLED_ALLOCATOR_NOTINITIALIZED);
471 s_pooledAllocator = std::move(allocator);
474 return &s_pooledAllocator;
477 assert(!"Unknown pooled allocator state");
482 //------------------------------------------------------------------------
483 // PooledAllocator::destroy:
484 // Performs any necessary teardown for an `PooledAllocator` and returns the allocator
486 void PooledAllocator::destroy()
488 assert(isInitialized());
489 assert(this == &s_pooledAllocator);
490 assert(s_pooledAllocatorState == POOLED_ALLOCATOR_IN_USE || s_pooledAllocatorState == POOLED_ALLOCATOR_SHUTDOWN);
491 assert(m_firstPage != nullptr);
493 // Free all but the first allocated page
494 for (PageDescriptor *page = m_firstPage->m_next, *next; page != nullptr; page = next)
497 freeHostMemory(page);
500 // Reset the relevant state to point back to the first byte of the first page
501 m_firstPage->m_next = nullptr;
502 m_lastPage = m_firstPage;
503 m_nextFreeByte = m_firstPage->m_contents;
504 m_lastFreeByte = (BYTE*)m_firstPage + m_firstPage->m_pageBytes;
506 assert(getTotalBytesAllocated() == s_defaultPageSize);
508 // If we've already been shut down, free the first page. Otherwise, return the allocator to the pool.
509 if (s_pooledAllocatorState == POOLED_ALLOCATOR_SHUTDOWN)
511 ArenaAllocator::destroy();
515 InterlockedExchange(&s_pooledAllocatorState, POOLED_ALLOCATOR_AVAILABLE);
519 //------------------------------------------------------------------------
520 // ArenaAllocator::getPooledAllocator:
521 // Returns the pooled allocator if it is not already in use.
524 // memoryManager: The `IEEMemoryManager` instance in use by the caller.
527 // A pointer to the pooled allocator if it is available or `nullptr`
528 // if it is already in use.
531 // Calling `destroy` on the returned allocator will return it to the
533 ArenaAllocator* ArenaAllocator::getPooledAllocator(IEEMemoryManager* memoryManager)
535 return PooledAllocator::getPooledAllocator(memoryManager);