Reviewed by Sam Weinig.
Some heap refactoring
https://bugs.webkit.org/show_bug.cgi?id=61704
SunSpider says no change.
* JavaScriptCore.exp: Export!
* heap/Heap.cpp: COLLECT_ON_EVERY_ALLOCATION can actually do so now.
(JSC::Heap::Heap): Changed Heap sub-objects to point to the heap.
(JSC::Heap::allocate): Changed inline allocation code to only select the
size class, since this can be optimized out at compile time -- everything
else is now inlined into this out-of-line function.
No need to duplicate ASSERTs made in our caller.
* heap/Heap.h:
(JSC::Heap::heap):
(JSC::Heap::isMarked):
(JSC::Heap::testAndSetMarked):
(JSC::Heap::testAndClearMarked):
(JSC::Heap::setMarked): Call directly into MarkedBlock instead of adding
a layer of indirection through MarkedSpace.
(JSC::Heap::allocate): See above.
* heap/MarkedBlock.cpp:
(JSC::MarkedBlock::create):
(JSC::MarkedBlock::MarkedBlock):
* heap/MarkedBlock.h: Changed Heap sub-objects to point to the heap.
* heap/MarkedSpace.cpp:
(JSC::MarkedSpace::MarkedSpace):
(JSC::MarkedSpace::allocateBlock):
* heap/MarkedSpace.h:
(JSC::MarkedSpace::allocate): Updated to match changes above.
git-svn-id: http://svn.webkit.org/repository/webkit/trunk@87653
268f45cc-cd09-0410-ab3c-
d52691b4dbfc
+2011-05-29 Geoffrey Garen <ggaren@apple.com>
+
+ Reviewed by Sam Weinig.
+
+ Some heap refactoring
+ https://bugs.webkit.org/show_bug.cgi?id=61704
+
+ SunSpider says no change.
+
+ * JavaScriptCore.exp: Export!
+
+ * heap/Heap.cpp: COLLECT_ON_EVERY_ALLOCATION can actually do so now.
+
+ (JSC::Heap::Heap): Changed Heap sub-objects to point to the heap.
+
+ (JSC::Heap::allocate): Changed inline allocation code to only select the
+ size class, since this can be optimized out at compile time -- everything
+ else is now inlined into this out-of-line function.
+
+ No need to duplicate ASSERTs made in our caller.
+
+ * heap/Heap.h:
+ (JSC::Heap::heap):
+ (JSC::Heap::isMarked):
+ (JSC::Heap::testAndSetMarked):
+ (JSC::Heap::testAndClearMarked):
+ (JSC::Heap::setMarked): Call directly into MarkedBlock instead of adding
+ a layer of indirection through MarkedSpace.
+
+ (JSC::Heap::allocate): See above.
+
+ * heap/MarkedBlock.cpp:
+ (JSC::MarkedBlock::create):
+ (JSC::MarkedBlock::MarkedBlock):
+ * heap/MarkedBlock.h: Changed Heap sub-objects to point to the heap.
+
+ * heap/MarkedSpace.cpp:
+ (JSC::MarkedSpace::MarkedSpace):
+ (JSC::MarkedSpace::allocateBlock):
+ * heap/MarkedSpace.h:
+ (JSC::MarkedSpace::allocate): Updated to match changes above.
+
2011-05-28 David Kilzer <ddkilzer@apple.com>
BUILD FIX when building only the interpreter
__ZN3JSC11JSByteArray13s_defaultInfoE
__ZN3JSC11JSByteArray15createStructureERNS_12JSGlobalDataENS_7JSValueEPKNS_9ClassInfoE
__ZN3JSC11JSByteArrayC1EPNS_9ExecStateEPNS_9StructureEPN3WTF9ByteArrayE
-__ZN3JSC11MarkedSpace21allocateFromSizeClassERNS0_9SizeClassE
__ZN3JSC11ParserArena5resetEv
__ZN3JSC11checkSyntaxEPNS_9ExecStateERKNS_10SourceCodeE
__ZN3JSC11createErrorEPNS_9ExecStateERKNS_7UStringE
__ZN3JSC3NaNE
__ZN3JSC41constructFunctionSkippingEvalEnabledCheckEPNS_9ExecStateEPNS_14JSGlobalObjectERKNS_7ArgListERKNS_10IdentifierERKNS_7UStringEi
__ZN3JSC4Heap16activityCallbackEv
-__ZN3JSC4Heap16allocateSlowCaseEm
__ZN3JSC4Heap16objectTypeCountsEv
__ZN3JSC4Heap17collectAllGarbageEv
__ZN3JSC4Heap17globalObjectCountEv
__ZN3JSC4Heap29reportExtraMemoryCostSlowCaseEm
__ZN3JSC4Heap7destroyEv
__ZN3JSC4Heap7protectENS_7JSValueE
+__ZN3JSC4Heap8allocateERNS_11MarkedSpace9SizeClassE
__ZN3JSC4Heap9unprotectENS_7JSValueE
__ZN3JSC4Yarr11YarrPatternC1ERKNS_7UStringEbbPPKc
__ZN3JSC4Yarr11byteCompileERNS0_11YarrPatternEPN3WTF20BumpPointerAllocatorE
?addPropertyWithoutTransition@Structure@JSC@@QAEIAAVJSGlobalData@2@ABVIdentifier@2@IPAVJSCell@2@@Z
?addSlowCase@Identifier@JSC@@CA?AV?$PassRefPtr@VStringImpl@WTF@@@WTF@@PAVExecState@2@PAVStringImpl@4@@Z
?addStaticGlobals@JSGlobalObject@JSC@@IAEXPAUGlobalPropertyInfo@12@H@Z
+ ?allocate@Heap@JSC@@QAEPAXAAUSizeClass@MarkedSpace@2@@Z
?allocate@Heap@JSC@@QAEPAXI@Z
- ?allocateFromSizeClass@MarkedSpace@JSC@@AAEPAXAAUSizeClass@12@@Z
?allocatePropertyStorage@JSObject@JSC@@QAEXII@Z
- ?allocateSlowCase@Heap@JSC@@AAEPAXI@Z
?append@StringBuilder@WTF@@QAEXPBDI@Z
?append@StringBuilder@WTF@@QAEXPB_WI@Z
?ascii@UString@JSC@@QBE?AVCString@WTF@@XZ
#include "Tracing.h"
#include <algorithm>
-#define COLLECT_ON_EVERY_SLOW_ALLOCATION 0
+#define COLLECT_ON_EVERY_ALLOCATION 0
using namespace std;
Heap::Heap(JSGlobalData* globalData)
: m_operationInProgress(NoOperation)
- , m_markedSpace(globalData)
+ , m_markedSpace(this)
, m_markListSet(0)
, m_activityCallback(DefaultGCActivityCallback::create(this))
, m_globalData(globalData)
m_extraCost += cost;
}
-void* Heap::allocateSlowCase(size_t bytes)
+void* Heap::allocate(MarkedSpace::SizeClass& sizeClass)
{
- ASSERT(globalData()->identifierTable == wtfThreadData().currentIdentifierTable());
- ASSERT(JSLock::lockCount() > 0);
- ASSERT(JSLock::currentThreadIsHoldingLock());
- ASSERT(bytes <= MarkedSpace::maxCellSize);
- ASSERT(m_operationInProgress == NoOperation);
-
-#if COLLECT_ON_EVERY_SLOW_ALLOCATION
+#if COLLECT_ON_EVERY_ALLOCATION
collectAllGarbage();
ASSERT(m_operationInProgress == NoOperation);
#endif
+ void* result = m_markedSpace.allocate(sizeClass);
+ if (result)
+ return result;
+
reset(DoNotSweep);
m_operationInProgress = Allocation;
- void* result = m_markedSpace.allocate(bytes);
+ result = m_markedSpace.allocate(sizeClass);
m_operationInProgress = NoOperation;
ASSERT(result);
inline bool isBusy();
void* allocate(size_t);
+ void* allocate(MarkedSpace::SizeClass&);
void collectAllGarbage();
void reportExtraMemoryCost(size_t cost);
HandleStack* handleStack() { return &m_handleStack; }
private:
- friend class JSGlobalData;
-
static const size_t minExtraCost = 256;
static const size_t maxExtraCost = 1024 * 1024;
return m_operationInProgress != NoOperation;
}
+ inline Heap* Heap::heap(JSCell* cell)
+ {
+ return MarkedBlock::blockFor(cell)->heap();
+ }
+
+ inline Heap* Heap::heap(JSValue v)
+ {
+ if (!v.isCell())
+ return 0;
+ return heap(v.asCell());
+ }
+
inline bool Heap::isMarked(const void* cell)
{
- return MarkedSpace::isMarked(cell);
+ return MarkedBlock::blockFor(cell)->isMarked(cell);
}
inline bool Heap::testAndSetMarked(const void* cell)
{
- return MarkedSpace::testAndSetMarked(cell);
+ return MarkedBlock::blockFor(cell)->testAndSetMarked(cell);
}
inline bool Heap::testAndClearMarked(const void* cell)
{
- return MarkedSpace::testAndClearMarked(cell);
+ return MarkedBlock::blockFor(cell)->testAndClearMarked(cell);
}
inline void Heap::setMarked(const void* cell)
{
- MarkedSpace::setMarked(cell);
+ MarkedBlock::blockFor(cell)->setMarked(cell);
}
inline void Heap::writeBarrier(const JSCell*, JSValue)
inline void* Heap::allocate(size_t bytes)
{
ASSERT(isValidAllocation(bytes));
-
- m_operationInProgress = Allocation;
- void* result = m_markedSpace.allocate(bytes);
- m_operationInProgress = NoOperation;
- if (result)
- return result;
-
- return allocateSlowCase(bytes);
- }
-
- inline Heap* Heap::heap(JSValue v)
- {
- if (!v.isCell())
- return 0;
- return heap(v.asCell());
- }
-
- inline Heap* Heap::heap(JSCell* c)
- {
- return MarkedSpace::heap(c);
+ MarkedSpace::SizeClass& sizeClass = m_markedSpace.sizeClassFor(bytes);
+ return allocate(sizeClass);
}
} // namespace JSC
namespace JSC {
-MarkedBlock* MarkedBlock::create(JSGlobalData* globalData, size_t cellSize)
+MarkedBlock* MarkedBlock::create(Heap* heap, size_t cellSize)
{
PageAllocationAligned allocation = PageAllocationAligned::allocate(blockSize, blockSize, OSAllocator::JSGCHeapPages);
if (!static_cast<bool>(allocation))
CRASH();
- return new (allocation.base()) MarkedBlock(allocation, globalData, cellSize);
+ return new (allocation.base()) MarkedBlock(allocation, heap, cellSize);
}
void MarkedBlock::destroy(MarkedBlock* block)
block->m_allocation.deallocate();
}
-MarkedBlock::MarkedBlock(const PageAllocationAligned& allocation, JSGlobalData* globalData, size_t cellSize)
+MarkedBlock::MarkedBlock(const PageAllocationAligned& allocation, Heap* heap, size_t cellSize)
: m_nextAtom(firstAtom())
, m_allocation(allocation)
- , m_heap(&globalData->heap)
+ , m_heap(heap)
{
m_atomsPerCell = (cellSize + atomSize - 1) / atomSize;
m_endAtom = atomsPerBlock - m_atomsPerCell + 1;
- Structure* dummyMarkableCellStructure = globalData->dummyMarkableCellStructure.get();
+ Structure* dummyMarkableCellStructure = heap->globalData()->dummyMarkableCellStructure.get();
for (size_t i = firstAtom(); i < m_endAtom; i += m_atomsPerCell)
- new (&atoms()[i]) JSCell(*globalData, dummyMarkableCellStructure, JSCell::CreatingEarlyCell);
+ new (&atoms()[i]) JSCell(*heap->globalData(), dummyMarkableCellStructure, JSCell::CreatingEarlyCell);
}
void MarkedBlock::sweep()
class Heap;
class JSCell;
- class JSGlobalData;
typedef uintptr_t Bits;
public:
static const size_t atomSize = sizeof(double); // Ensures natural alignment for all built-in types.
- static MarkedBlock* create(JSGlobalData*, size_t cellSize);
+ static MarkedBlock* create(Heap*, size_t cellSize);
static void destroy(MarkedBlock*);
static bool isAtomAligned(const void*);
typedef char Atom[atomSize];
- MarkedBlock(const PageAllocationAligned&, JSGlobalData*, size_t cellSize);
+ MarkedBlock(const PageAllocationAligned&, Heap*, size_t cellSize);
Atom* atoms();
size_t m_nextAtom;
#include "JSGlobalObject.h"
#include "JSCell.h"
-#include "JSGlobalData.h"
#include "JSLock.h"
#include "JSObject.h"
#include "ScopeChain.h"
class Structure;
-MarkedSpace::MarkedSpace(JSGlobalData* globalData)
+MarkedSpace::MarkedSpace(Heap* heap)
: m_waterMark(0)
, m_highWaterMark(0)
- , m_globalData(globalData)
+ , m_heap(heap)
{
for (size_t cellSize = preciseStep; cellSize < preciseCutoff; cellSize += preciseStep)
sizeClassFor(cellSize).cellSize = cellSize;
MarkedBlock* MarkedSpace::allocateBlock(SizeClass& sizeClass)
{
- MarkedBlock* block = MarkedBlock::create(globalData(), sizeClass.cellSize);
+ MarkedBlock* block = MarkedBlock::create(m_heap, sizeClass.cellSize);
sizeClass.blockList.append(block);
sizeClass.nextBlock = block;
m_blocks.add(block);
}
}
-void* MarkedSpace::allocateFromSizeClass(SizeClass& sizeClass)
-{
- for (MarkedBlock*& block = sizeClass.nextBlock ; block; block = block->next()) {
- if (void* result = block->allocate())
- return result;
-
- m_waterMark += block->capacity();
- }
-
- if (m_waterMark < m_highWaterMark)
- return allocateBlock(sizeClass)->allocate();
-
- return 0;
-}
-
void MarkedSpace::shrink()
{
// We record a temporary list of empties to avoid modifying m_blocks while iterating it.
class Heap;
class JSCell;
- class JSGlobalData;
class LiveObjectIterator;
class MarkStack;
class WeakGCHandle;
class MarkedSpace {
WTF_MAKE_NONCOPYABLE(MarkedSpace);
public:
- // Currently public for use in assertions.
static const size_t maxCellSize = 1024;
- static Heap* heap(JSCell*);
+ struct SizeClass {
+ SizeClass();
+ void reset();
- static bool isMarked(const void*);
- static bool testAndSetMarked(const void*);
- static bool testAndClearMarked(const void*);
- static void setMarked(const void*);
+ MarkedBlock* nextBlock;
+ DoublyLinkedList<MarkedBlock> blockList;
+ size_t cellSize;
+ };
- MarkedSpace(JSGlobalData*);
+ MarkedSpace(Heap*);
void destroy();
- JSGlobalData* globalData();
-
size_t highWaterMark();
void setHighWaterMark(size_t);
- void* allocate(size_t);
+ SizeClass& sizeClassFor(size_t);
+ void* allocate(SizeClass&);
void clearMarks();
void markRoots();
typedef HashSet<MarkedBlock*>::iterator BlockIterator;
- struct SizeClass {
- SizeClass();
- void reset();
-
- MarkedBlock* nextBlock;
- DoublyLinkedList<MarkedBlock> blockList;
- size_t cellSize;
- };
-
MarkedBlock* allocateBlock(SizeClass&);
void freeBlocks(DoublyLinkedList<MarkedBlock>&);
- SizeClass& sizeClassFor(size_t);
- void* allocateFromSizeClass(SizeClass&);
-
void clearMarks(MarkedBlock*);
SizeClass m_preciseSizeClasses[preciseCount];
HashSet<MarkedBlock*> m_blocks;
size_t m_waterMark;
size_t m_highWaterMark;
- JSGlobalData* m_globalData;
+ Heap* m_heap;
};
- inline Heap* MarkedSpace::heap(JSCell* cell)
- {
- return MarkedBlock::blockFor(cell)->heap();
- }
-
- inline bool MarkedSpace::isMarked(const void* cell)
- {
- return MarkedBlock::blockFor(cell)->isMarked(cell);
- }
-
- inline bool MarkedSpace::testAndSetMarked(const void* cell)
- {
- return MarkedBlock::blockFor(cell)->testAndSetMarked(cell);
- }
-
- inline bool MarkedSpace::testAndClearMarked(const void* cell)
- {
- return MarkedBlock::blockFor(cell)->testAndClearMarked(cell);
- }
-
- inline void MarkedSpace::setMarked(const void* cell)
- {
- MarkedBlock::blockFor(cell)->setMarked(cell);
- }
-
inline bool MarkedSpace::contains(const void* x)
{
if (!MarkedBlock::isAtomAligned(x))
(*it)->forEach(functor);
}
- inline JSGlobalData* MarkedSpace::globalData()
- {
- return m_globalData;
- }
-
inline size_t MarkedSpace::highWaterMark()
{
return m_highWaterMark;
return m_impreciseSizeClasses[(bytes - 1) / impreciseStep];
}
- inline void* MarkedSpace::allocate(size_t bytes)
+ inline void* MarkedSpace::allocate(SizeClass& sizeClass)
{
- SizeClass& sizeClass = sizeClassFor(bytes);
- return allocateFromSizeClass(sizeClass);
+ for (MarkedBlock*& block = sizeClass.nextBlock ; block; block = block->next()) {
+ if (void* result = block->allocate())
+ return result;
+
+ m_waterMark += block->capacity();
+ }
+
+ if (m_waterMark < m_highWaterMark)
+ return allocateBlock(sizeClass)->allocate();
+
+ return 0;
}
-
+
inline MarkedSpace::SizeClass::SizeClass()
: nextBlock(0)
, cellSize(0)