// --- Memory Allocation Callback ---
enum ObjectSpace {
kObjectSpaceNewSpace = 1 << 0,
- kObjectSpaceOldPointerSpace = 1 << 1,
- kObjectSpaceOldDataSpace = 1 << 2,
- kObjectSpaceCodeSpace = 1 << 3,
- kObjectSpaceMapSpace = 1 << 4,
- kObjectSpaceCellSpace = 1 << 5,
- kObjectSpaceLoSpace = 1 << 6,
- kObjectSpaceAll = kObjectSpaceNewSpace | kObjectSpaceOldPointerSpace |
- kObjectSpaceOldDataSpace | kObjectSpaceCodeSpace |
- kObjectSpaceMapSpace | kObjectSpaceLoSpace
+ kObjectSpaceOldSpace = 1 << 1,
+ kObjectSpaceCodeSpace = 1 << 2,
+ kObjectSpaceMapSpace = 1 << 3,
+ kObjectSpaceCellSpace = 1 << 4,
+ kObjectSpaceLoSpace = 1 << 5,
+ kObjectSpaceAll = kObjectSpaceNewSpace | kObjectSpaceOldSpace |
+ kObjectSpaceCodeSpace | kObjectSpaceMapSpace |
+ kObjectSpaceLoSpace
};
enum AllocationAction {
heap_stats.new_space_size = &new_space_size;
int new_space_capacity;
heap_stats.new_space_capacity = &new_space_capacity;
- intptr_t old_pointer_space_size;
- heap_stats.old_pointer_space_size = &old_pointer_space_size;
- intptr_t old_pointer_space_capacity;
- heap_stats.old_pointer_space_capacity = &old_pointer_space_capacity;
- intptr_t old_data_space_size;
- heap_stats.old_data_space_size = &old_data_space_size;
- intptr_t old_data_space_capacity;
- heap_stats.old_data_space_capacity = &old_data_space_capacity;
+ intptr_t old_space_size;
+ heap_stats.old_space_size = &old_space_size;
+ intptr_t old_space_capacity;
+ heap_stats.old_space_capacity = &old_space_capacity;
intptr_t code_space_size;
heap_stats.code_space_size = &code_space_size;
intptr_t code_space_capacity;
if (instr->hydrogen()->MustAllocateDoubleAligned()) {
flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
}
- if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
- DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
+ if (instr->hydrogen()->IsOldSpaceAllocation()) {
DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
- } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
- DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
+ flags = static_cast<AllocationFlags>(flags | PRETENURE);
}
if (instr->size()->IsConstantOperand()) {
int flags = AllocateDoubleAlignFlag::encode(
instr->hydrogen()->MustAllocateDoubleAligned());
- if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
- DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
- DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
- } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+ if (instr->hydrogen()->IsOldSpaceAllocation()) {
DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
+ flags = AllocateTargetSpace::update(flags, OLD_SPACE);
} else {
flags = AllocateTargetSpace::update(flags, NEW_SPACE);
}
if ((flags & DOUBLE_ALIGNMENT) != 0) {
// Align the next allocation. Storing the filler map without checking top is
// safe in new-space because the limit of the heap is aligned there.
- DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
STATIC_ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
and_(scratch2, result, Operand(kDoubleAlignmentMask), SetCC);
Label aligned;
b(eq, &aligned);
- if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+ if ((flags & PRETENURE) != 0) {
cmp(result, Operand(ip));
b(hs, gc_required);
}
if ((flags & DOUBLE_ALIGNMENT) != 0) {
// Align the next allocation. Storing the filler map without checking top is
// safe in new-space because the limit of the heap is aligned there.
- DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
and_(scratch2, result, Operand(kDoubleAlignmentMask), SetCC);
Label aligned;
b(eq, &aligned);
- if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+ if ((flags & PRETENURE) != 0) {
cmp(result, Operand(ip));
b(hs, gc_required);
}
// ---------------------------------------------------------------------------
// Allocation support
- // Allocate an object in new space or old pointer space. The object_size is
+ // Allocate an object in new space or old space. The object_size is
// specified either in bytes or in words if the allocation flag SIZE_IN_WORDS
// is passed. If the space is exhausted control continues at the gc_required
// label. The allocated object is returned in result. If the flag
flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
}
- if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
- DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
+ if (instr->hydrogen()->IsOldSpaceAllocation()) {
DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
- } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
- DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
+ flags = static_cast<AllocationFlags>(flags | PRETENURE);
}
if (instr->size()->IsConstantOperand()) {
}
int flags = AllocateDoubleAlignFlag::encode(
instr->hydrogen()->MustAllocateDoubleAligned());
- if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
- DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
- DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
- } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+ if (instr->hydrogen()->IsOldSpaceAllocation()) {
DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
+ flags = AllocateTargetSpace::update(flags, OLD_SPACE);
} else {
flags = AllocateTargetSpace::update(flags, NEW_SPACE);
}
// ---------------------------------------------------------------------------
// Allocation support
- // Allocate an object in new space or old pointer space. The object_size is
+ // Allocate an object in new space or old space. The object_size is
// specified either in bytes or in words if the allocation flag SIZE_IN_WORDS
// is passed. The allocated object is returned in result.
//
}
-ExternalReference ExternalReference::old_pointer_space_allocation_top_address(
+ExternalReference ExternalReference::old_space_allocation_top_address(
Isolate* isolate) {
- return ExternalReference(
- isolate->heap()->OldPointerSpaceAllocationTopAddress());
-}
-
-
-ExternalReference ExternalReference::old_pointer_space_allocation_limit_address(
- Isolate* isolate) {
- return ExternalReference(
- isolate->heap()->OldPointerSpaceAllocationLimitAddress());
+ return ExternalReference(isolate->heap()->OldSpaceAllocationTopAddress());
}
-ExternalReference ExternalReference::old_data_space_allocation_top_address(
+ExternalReference ExternalReference::old_space_allocation_limit_address(
Isolate* isolate) {
- return ExternalReference(isolate->heap()->OldDataSpaceAllocationTopAddress());
-}
-
-
-ExternalReference ExternalReference::old_data_space_allocation_limit_address(
- Isolate* isolate) {
- return ExternalReference(
- isolate->heap()->OldDataSpaceAllocationLimitAddress());
+ return ExternalReference(isolate->heap()->OldSpaceAllocationLimitAddress());
}
// Used for fast allocation in generated code.
static ExternalReference new_space_allocation_top_address(Isolate* isolate);
static ExternalReference new_space_allocation_limit_address(Isolate* isolate);
- static ExternalReference old_pointer_space_allocation_top_address(
- Isolate* isolate);
- static ExternalReference old_pointer_space_allocation_limit_address(
- Isolate* isolate);
- static ExternalReference old_data_space_allocation_top_address(
- Isolate* isolate);
- static ExternalReference old_data_space_allocation_limit_address(
- Isolate* isolate);
+ static ExternalReference old_space_allocation_top_address(Isolate* isolate);
+ static ExternalReference old_space_allocation_limit_address(Isolate* isolate);
static ExternalReference mod_two_doubles_operation(Isolate* isolate);
static ExternalReference power_double_double_function(Isolate* isolate);
#define HISTOGRAM_PERCENTAGE_LIST(HP) \
/* Heap fragmentation. */ \
HP(external_fragmentation_total, V8.MemoryExternalFragmentationTotal) \
- HP(external_fragmentation_old_pointer_space, \
- V8.MemoryExternalFragmentationOldPointerSpace) \
- HP(external_fragmentation_old_data_space, \
- V8.MemoryExternalFragmentationOldDataSpace) \
+ HP(external_fragmentation_old_space, V8.MemoryExternalFragmentationOldSpace) \
HP(external_fragmentation_code_space, \
V8.MemoryExternalFragmentationCodeSpace) \
HP(external_fragmentation_map_space, V8.MemoryExternalFragmentationMapSpace) \
HP(external_fragmentation_lo_space, V8.MemoryExternalFragmentationLoSpace) \
/* Percentages of heap committed to each space. */ \
HP(heap_fraction_new_space, V8.MemoryHeapFractionNewSpace) \
- HP(heap_fraction_old_pointer_space, V8.MemoryHeapFractionOldPointerSpace) \
- HP(heap_fraction_old_data_space, V8.MemoryHeapFractionOldDataSpace) \
+ HP(heap_fraction_old_space, V8.MemoryHeapFractionOldSpace) \
HP(heap_fraction_code_space, V8.MemoryHeapFractionCodeSpace) \
HP(heap_fraction_map_space, V8.MemoryHeapFractionMapSpace) \
HP(heap_fraction_cell_space, V8.MemoryHeapFractionCellSpace) \
SC(new_space_bytes_available, V8.MemoryNewSpaceBytesAvailable) \
SC(new_space_bytes_committed, V8.MemoryNewSpaceBytesCommitted) \
SC(new_space_bytes_used, V8.MemoryNewSpaceBytesUsed) \
- SC(old_pointer_space_bytes_available, \
- V8.MemoryOldPointerSpaceBytesAvailable) \
- SC(old_pointer_space_bytes_committed, \
- V8.MemoryOldPointerSpaceBytesCommitted) \
- SC(old_pointer_space_bytes_used, V8.MemoryOldPointerSpaceBytesUsed) \
- SC(old_data_space_bytes_available, V8.MemoryOldDataSpaceBytesAvailable) \
- SC(old_data_space_bytes_committed, V8.MemoryOldDataSpaceBytesCommitted) \
- SC(old_data_space_bytes_used, V8.MemoryOldDataSpaceBytesUsed) \
+ SC(old_space_bytes_available, V8.MemoryOldSpaceBytesAvailable) \
+ SC(old_space_bytes_committed, V8.MemoryOldSpaceBytesCommitted) \
+ SC(old_space_bytes_used, V8.MemoryOldSpaceBytesUsed) \
SC(code_space_bytes_available, V8.MemoryCodeSpaceBytesAvailable) \
SC(code_space_bytes_committed, V8.MemoryCodeSpaceBytesCommitted) \
SC(code_space_bytes_used, V8.MemoryCodeSpaceBytesUsed) \
{heap->new_space()->Size(), "new_space_live_bytes"},
{heap->new_space()->Available(), "new_space_available_bytes"},
{heap->new_space()->CommittedMemory(), "new_space_commited_bytes"},
- {heap->old_pointer_space()->Size(), "old_pointer_space_live_bytes"},
- {heap->old_pointer_space()->Available(),
- "old_pointer_space_available_bytes"},
- {heap->old_pointer_space()->CommittedMemory(),
- "old_pointer_space_commited_bytes"},
- {heap->old_data_space()->Size(), "old_data_space_live_bytes"},
- {heap->old_data_space()->Available(), "old_data_space_available_bytes"},
- {heap->old_data_space()->CommittedMemory(),
- "old_data_space_commited_bytes"},
+ {heap->old_space()->Size(), "old_space_live_bytes"},
+ {heap->old_space()->Available(), "old_space_available_bytes"},
+ {heap->old_space()->CommittedMemory(), "old_space_commited_bytes"},
{heap->code_space()->Size(), "code_space_live_bytes"},
{heap->code_space()->Available(), "code_space_available_bytes"},
{heap->code_space()->CommittedMemory(), "code_space_commited_bytes"},
const char* to_string,
Handle<Object> to_number,
byte kind) {
- Handle<Oddball> oddball = New<Oddball>(map, OLD_POINTER_SPACE);
+ Handle<Oddball> oddball = New<Oddball>(map, OLD_SPACE);
Oddball::Initialize(isolate(), oddball, to_string, to_number, kind);
return oddball;
}
Handle<AllocationSite> Factory::NewAllocationSite() {
Handle<Map> map = allocation_site_map();
- Handle<AllocationSite> site = New<AllocationSite>(map, OLD_POINTER_SPACE);
+ Handle<AllocationSite> site = New<AllocationSite>(map, OLD_SPACE);
site->Initialize();
// Link the site
Handle<SharedFunctionInfo> info,
Handle<Context> context,
PretenureFlag pretenure) {
- AllocationSpace space = pretenure == TENURED ? OLD_POINTER_SPACE : NEW_SPACE;
+ AllocationSpace space = pretenure == TENURED ? OLD_SPACE : NEW_SPACE;
Handle<JSFunction> result = New<JSFunction>(map, space);
InitializeFunction(result, info, context);
return result;
}
// Allocate the global object and initialize it with the backing store.
- Handle<GlobalObject> global = New<GlobalObject>(map, OLD_POINTER_SPACE);
+ Handle<GlobalObject> global = New<GlobalObject>(map, OLD_SPACE);
isolate()->heap()->InitializeJSObjectFromMap(*global, *dictionary, *map);
// Create a new map for the global object.
Handle<String> name,
MaybeHandle<Code> maybe_code) {
Handle<Map> map = shared_function_info_map();
- Handle<SharedFunctionInfo> share =
- New<SharedFunctionInfo>(map, OLD_POINTER_SPACE);
+ Handle<SharedFunctionInfo> share = New<SharedFunctionInfo>(map, OLD_SPACE);
// Set pointer fields.
share->set_name(*name);
// consecutive.
// Keep this enum in sync with the ObjectSpace enum in v8.h
enum AllocationSpace {
- NEW_SPACE, // Semispaces collected with copying collector.
- OLD_POINTER_SPACE, // May contain pointers to new space.
- OLD_DATA_SPACE, // Must not have pointers to new space.
- CODE_SPACE, // No pointers to new space, marked executable.
- MAP_SPACE, // Only and all map objects.
- CELL_SPACE, // Only and all cell objects.
- LO_SPACE, // Promoted large objects.
+ NEW_SPACE, // Semispaces collected with copying collector.
+ OLD_SPACE, // May contain pointers to new space.
+ CODE_SPACE, // No pointers to new space, marked executable.
+ MAP_SPACE, // Only and all map objects.
+ CELL_SPACE, // Only and all cell objects.
+ LO_SPACE, // Promoted large objects.
FIRST_SPACE = NEW_SPACE,
LAST_SPACE = LO_SPACE,
- FIRST_PAGED_SPACE = OLD_POINTER_SPACE,
+ FIRST_PAGED_SPACE = OLD_SPACE,
LAST_PAGED_SPACE = CELL_SPACE
};
const int kSpaceTagSize = 3;
// Compute map and object size.
Map* map = one_byte_internalized_string_map();
int size = SeqOneByteString::SizeFor(str.length());
- AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
+ AllocationSpace space = SelectSpace(size, TENURED);
// Allocate string.
HeapObject* result;
{
- AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
+ AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
if (!allocation.To(&result)) return allocation;
}
// Compute map and object size.
Map* map = internalized_string_map();
int size = SeqTwoByteString::SizeFor(str.length());
- AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
+ AllocationSpace space = SelectSpace(size, TENURED);
// Allocate string.
HeapObject* result;
{
- AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
+ AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
if (!allocation.To(&result)) return allocation;
}
}
}
- if (OLD_POINTER_SPACE == space) {
- allocation = old_pointer_space_->AllocateRaw(size_in_bytes);
- } else if (OLD_DATA_SPACE == space) {
- allocation = old_data_space_->AllocateRaw(size_in_bytes);
+ if (OLD_SPACE == space) {
+ allocation = old_space_->AllocateRaw(size_in_bytes);
} else if (CODE_SPACE == space) {
if (size_in_bytes <= code_space()->AreaSize()) {
allocation = code_space_->AllocateRaw(size_in_bytes);
}
-bool Heap::InOldPointerSpace(Address address) {
- return old_pointer_space_->Contains(address);
-}
+bool Heap::InOldSpace(Address address) { return old_space_->Contains(address); }
-bool Heap::InOldPointerSpace(Object* object) {
- return InOldPointerSpace(reinterpret_cast<Address>(object));
-}
-
-
-bool Heap::InOldDataSpace(Address address) {
- return old_data_space_->Contains(address);
-}
-
-
-bool Heap::InOldDataSpace(Object* object) {
- return InOldDataSpace(reinterpret_cast<Address>(object));
+bool Heap::InOldSpace(Object* object) {
+ return InOldSpace(reinterpret_cast<Address>(object));
}
}
-OldSpace* Heap::TargetSpace(HeapObject* object) {
- InstanceType type = object->map()->instance_type();
- AllocationSpace space = TargetSpaceId(type);
- return (space == OLD_POINTER_SPACE) ? old_pointer_space_ : old_data_space_;
-}
-
-
-AllocationSpace Heap::TargetSpaceId(InstanceType type) {
- // Heap numbers and sequential strings are promoted to old data space, all
- // other object types are promoted to old pointer space. We do not use
- // object->IsHeapNumber() and object->IsSeqString() because we already
- // know that object has the heap object tag.
-
- // These objects are never allocated in new space.
- DCHECK(type != MAP_TYPE);
- DCHECK(type != CODE_TYPE);
- DCHECK(type != ODDBALL_TYPE);
- DCHECK(type != CELL_TYPE);
-
- if (type <= LAST_NAME_TYPE) {
- if (type == SYMBOL_TYPE) return OLD_POINTER_SPACE;
- DCHECK(type < FIRST_NONSTRING_TYPE);
- // There are four string representations: sequential strings, external
- // strings, cons strings, and sliced strings.
- // Only the latter two contain non-map-word pointers to heap objects.
- return ((type & kIsIndirectStringMask) == kIsIndirectStringTag)
- ? OLD_POINTER_SPACE
- : OLD_DATA_SPACE;
- } else {
- return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE;
- }
-}
-
-
bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) {
// Object migration is governed by the following rules:
//
- // 1) Objects in new-space can be migrated to one of the old spaces
+ // 1) Objects in new-space can be migrated to the old space
// that matches their target space or they stay in new-space.
// 2) Objects in old-space stay in the same space when migrating.
// 3) Fillers (two or more words) can migrate due to left-trimming of
- // fixed arrays in new-space, old-data-space and old-pointer-space.
+ // fixed arrays in new-space or old space.
// 4) Fillers (one word) can never migrate, they are skipped by
// incremental marking explicitly to prevent invalid pattern.
- // 5) Short external strings can end up in old pointer space when a cons
- // string in old pointer space is made external (String::MakeExternal).
//
// Since this function is used for debugging only, we do not place
// asserts here, but check everything explicitly.
AllocationSpace src = chunk->owner()->identity();
switch (src) {
case NEW_SPACE:
- return dst == src || dst == TargetSpaceId(type);
- case OLD_POINTER_SPACE:
- return dst == src && (dst == TargetSpaceId(type) || obj->IsFiller() ||
- obj->IsExternalString());
- case OLD_DATA_SPACE:
- return dst == src && dst == TargetSpaceId(type);
+ return dst == src || dst == OLD_SPACE;
+ case OLD_SPACE:
+ return dst == src &&
+ (dst == OLD_SPACE || obj->IsFiller() || obj->IsExternalString());
case CODE_SPACE:
return dst == src && type == CODE_TYPE;
case MAP_SPACE:
global_ic_age_(0),
scan_on_scavenge_pages_(0),
new_space_(this),
- old_pointer_space_(NULL),
- old_data_space_(NULL),
+ old_space_(NULL),
code_space_(NULL),
map_space_(NULL),
cell_space_(NULL),
intptr_t Heap::Capacity() {
if (!HasBeenSetUp()) return 0;
- return new_space_.Capacity() + old_pointer_space_->Capacity() +
- old_data_space_->Capacity() + code_space_->Capacity() +
- map_space_->Capacity() + cell_space_->Capacity();
+ return new_space_.Capacity() + old_space_->Capacity() +
+ code_space_->Capacity() + map_space_->Capacity() +
+ cell_space_->Capacity();
}
intptr_t Heap::CommittedOldGenerationMemory() {
if (!HasBeenSetUp()) return 0;
- return old_pointer_space_->CommittedMemory() +
- old_data_space_->CommittedMemory() + code_space_->CommittedMemory() +
+ return old_space_->CommittedMemory() + code_space_->CommittedMemory() +
map_space_->CommittedMemory() + cell_space_->CommittedMemory() +
lo_space_->Size();
}
if (!HasBeenSetUp()) return 0;
return new_space_.CommittedPhysicalMemory() +
- old_pointer_space_->CommittedPhysicalMemory() +
- old_data_space_->CommittedPhysicalMemory() +
+ old_space_->CommittedPhysicalMemory() +
code_space_->CommittedPhysicalMemory() +
map_space_->CommittedPhysicalMemory() +
cell_space_->CommittedPhysicalMemory() +
intptr_t Heap::Available() {
if (!HasBeenSetUp()) return 0;
- return new_space_.Available() + old_pointer_space_->Available() +
- old_data_space_->Available() + code_space_->Available() +
- map_space_->Available() + cell_space_->Available();
+ return new_space_.Available() + old_space_->Available() +
+ code_space_->Available() + map_space_->Available() +
+ cell_space_->Available();
}
bool Heap::HasBeenSetUp() {
- return old_pointer_space_ != NULL && old_data_space_ != NULL &&
- code_space_ != NULL && map_space_ != NULL && cell_space_ != NULL &&
- lo_space_ != NULL;
+ return old_space_ != NULL && code_space_ != NULL && map_space_ != NULL &&
+ cell_space_ != NULL && lo_space_ != NULL;
}
", committed: %6" V8_PTR_PREFIX "d KB\n",
new_space_.Size() / KB, new_space_.Available() / KB,
new_space_.CommittedMemory() / KB);
- PrintPID("Old pointers, used: %6" V8_PTR_PREFIX
+ PrintPID("Old space, used: %6" V8_PTR_PREFIX
"d KB"
", available: %6" V8_PTR_PREFIX
"d KB"
", committed: %6" V8_PTR_PREFIX "d KB\n",
- old_pointer_space_->SizeOfObjects() / KB,
- old_pointer_space_->Available() / KB,
- old_pointer_space_->CommittedMemory() / KB);
- PrintPID("Old data space, used: %6" V8_PTR_PREFIX
- "d KB"
- ", available: %6" V8_PTR_PREFIX
- "d KB"
- ", committed: %6" V8_PTR_PREFIX "d KB\n",
- old_data_space_->SizeOfObjects() / KB,
- old_data_space_->Available() / KB,
- old_data_space_->CommittedMemory() / KB);
+ old_space_->SizeOfObjects() / KB, old_space_->Available() / KB,
+ old_space_->CommittedMemory() / KB);
PrintPID("Code space, used: %6" V8_PTR_PREFIX
"d KB"
", available: %6" V8_PTR_PREFIX
isolate_->counters()->heap_fraction_new_space()->AddSample(static_cast<int>(
(new_space()->CommittedMemory() * 100.0) / CommittedMemory()));
- isolate_->counters()->heap_fraction_old_pointer_space()->AddSample(
- static_cast<int>((old_pointer_space()->CommittedMemory() * 100.0) /
- CommittedMemory()));
- isolate_->counters()->heap_fraction_old_data_space()->AddSample(
- static_cast<int>((old_data_space()->CommittedMemory() * 100.0) /
- CommittedMemory()));
+ isolate_->counters()->heap_fraction_old_space()->AddSample(static_cast<int>(
+ (old_space()->CommittedMemory() * 100.0) / CommittedMemory()));
isolate_->counters()->heap_fraction_code_space()->AddSample(
static_cast<int>((code_space()->CommittedMemory() * 100.0) /
CommittedMemory()));
UPDATE_FRAGMENTATION_FOR_SPACE(space)
UPDATE_COUNTERS_FOR_SPACE(new_space)
- UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(old_pointer_space)
- UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(old_data_space)
+ UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(old_space)
UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(code_space)
UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(map_space)
UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(cell_space)
// not matter, so long as we do not specify NEW_SPACE, which would not
// cause a full GC.
mark_compact_collector_.SetFlags(flags);
- CollectGarbage(OLD_POINTER_SPACE, gc_reason, gc_callback_flags);
+ CollectGarbage(OLD_SPACE, gc_reason, gc_callback_flags);
mark_compact_collector_.SetFlags(kNoGCFlags);
}
for (HeapObject* object = code_it.Next(); object != NULL;
object = code_it.Next())
object->Iterate(&v);
-
- HeapObjectIterator data_it(heap->old_data_space());
- for (HeapObject* object = data_it.Next(); object != NULL;
- object = data_it.Next())
- object->Iterate(&v);
}
#endif // VERIFY_HEAP
}
AllocationResult allocation;
- if (object_contents == DATA_OBJECT) {
- DCHECK(heap->AllowedToBeMigrated(object, OLD_DATA_SPACE));
- allocation = heap->old_data_space()->AllocateRaw(allocation_size);
- } else {
- DCHECK(heap->AllowedToBeMigrated(object, OLD_POINTER_SPACE));
- allocation = heap->old_pointer_space()->AllocateRaw(allocation_size);
- }
+ allocation = heap->old_space()->AllocateRaw(allocation_size);
HeapObject* target = NULL; // Initialization to please compiler.
if (allocation.To(&target)) {
set_empty_fixed_array(FixedArray::cast(obj));
{
- AllocationResult allocation = Allocate(null_map(), OLD_POINTER_SPACE);
+ AllocationResult allocation = Allocate(null_map(), OLD_SPACE);
if (!allocation.To(&obj)) return false;
}
set_null_value(Oddball::cast(obj));
Oddball::cast(obj)->set_kind(Oddball::kNull);
{
- AllocationResult allocation = Allocate(undefined_map(), OLD_POINTER_SPACE);
+ AllocationResult allocation = Allocate(undefined_map(), OLD_SPACE);
if (!allocation.To(&obj)) return false;
}
set_undefined_value(Oddball::cast(obj));
int size = HeapNumber::kSize;
STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxRegularHeapObjectSize);
- AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
+ AllocationSpace space = SelectSpace(size, pretenure);
HeapObject* result;
{
- AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
+ AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
if (!allocation.To(&result)) return allocation;
}
STATIC_ASSERT(PropertyCell::kSize <= Page::kMaxRegularHeapObjectSize);
HeapObject* result;
- AllocationResult allocation =
- AllocateRaw(size, OLD_POINTER_SPACE, OLD_POINTER_SPACE);
+ AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE);
if (!allocation.To(&result)) return allocation;
result->set_map_no_write_barrier(global_property_cell_map());
STATIC_ASSERT(WeakCell::kSize <= Page::kMaxRegularHeapObjectSize);
HeapObject* result = NULL;
{
- AllocationResult allocation =
- AllocateRaw(size, OLD_POINTER_SPACE, OLD_POINTER_SPACE);
+ AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE);
if (!allocation.To(&result)) return allocation;
}
result->set_map_no_write_barrier(weak_cell_map());
PretenureFlag pretenure) {
// Statically ensure that it is safe to allocate foreigns in paged spaces.
STATIC_ASSERT(Foreign::kSize <= Page::kMaxRegularHeapObjectSize);
- AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
+ AllocationSpace space = (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE;
Foreign* result;
AllocationResult allocation = Allocate(foreign_map(), space);
if (!allocation.To(&result)) return allocation;
v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true);
}
int size = ByteArray::SizeFor(length);
- AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
+ AllocationSpace space = SelectSpace(size, pretenure);
HeapObject* result;
{
- AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
+ AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
if (!allocation.To(&result)) return allocation;
}
bool Heap::CanMoveObjectStart(HeapObject* object) {
Address address = object->address();
- bool is_in_old_pointer_space = InOldPointerSpace(address);
- bool is_in_old_data_space = InOldDataSpace(address);
if (lo_space()->Contains(object)) return false;
Page* page = Page::FromAddress(address);
// We can move the object start if:
- // (1) the object is not in old pointer or old data space,
+ // (1) the object is not in old space,
// (2) the page of the object was already swept,
// (3) the page was already concurrently swept. This case is an optimization
// for concurrent sweeping. The WasSwept predicate for concurrently swept
// pages is set after sweeping all pages.
- return (!is_in_old_pointer_space && !is_in_old_data_space) ||
- page->WasSwept() || page->SweepingCompleted();
+ return !InOldSpace(address) || page->WasSwept() || page->SweepingCompleted();
}
void* external_pointer,
PretenureFlag pretenure) {
int size = ExternalArray::kAlignedSize;
- AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
+ AllocationSpace space = SelectSpace(size, pretenure);
HeapObject* result;
{
- AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
+ AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
if (!allocation.To(&result)) return allocation;
}
size += kPointerSize;
}
#endif
- AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
+ AllocationSpace space = SelectSpace(size, pretenure);
HeapObject* object;
- AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
+ AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
if (!allocation.To(&object)) return allocation;
if (array_type == kExternalFloat64Array) {
DCHECK(map->instance_type() != MAP_TYPE);
// If allocation failures are disallowed, we may allocate in a different
// space when new space is full and the object is not a large object.
- AllocationSpace retry_space =
- (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type());
+ AllocationSpace retry_space = (space != NEW_SPACE) ? space : OLD_SPACE;
int size = map->instance_size();
if (allocation_site != NULL) {
size += AllocationMemento::kSize;
// Allocate the JSObject.
int size = map->instance_size();
- AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure);
+ AllocationSpace space = SelectSpace(size, pretenure);
JSObject* js_obj;
AllocationResult allocation = Allocate(map, space, allocation_site);
if (!allocation.To(&js_obj)) return allocation;
if (always_allocate()) {
{
AllocationResult allocation =
- AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE);
+ AllocateRaw(object_size, NEW_SPACE, OLD_SPACE);
if (!allocation.To(&clone)) return allocation;
}
Address clone_address = clone->address();
map = internalized_string_map();
size = SeqTwoByteString::SizeFor(chars);
}
- AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
+ AllocationSpace space = SelectSpace(size, TENURED);
// Allocate string.
HeapObject* result;
{
- AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
+ AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
if (!allocation.To(&result)) return allocation;
}
DCHECK_GE(String::kMaxLength, length);
int size = SeqOneByteString::SizeFor(length);
DCHECK(size <= SeqOneByteString::kMaxSize);
- AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
+ AllocationSpace space = SelectSpace(size, pretenure);
HeapObject* result;
{
- AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
+ AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
if (!allocation.To(&result)) return allocation;
}
DCHECK_GE(String::kMaxLength, length);
int size = SeqTwoByteString::SizeFor(length);
DCHECK(size <= SeqTwoByteString::kMaxSize);
- AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
+ AllocationSpace space = SelectSpace(size, pretenure);
HeapObject* result;
{
- AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
+ AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
if (!allocation.To(&result)) return allocation;
}
int size = FixedArray::SizeFor(0);
HeapObject* result;
{
- AllocationResult allocation =
- AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE);
+ AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE);
if (!allocation.To(&result)) return allocation;
}
// Initialize the object.
v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true);
}
int size = FixedArray::SizeFor(length);
- AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure);
+ AllocationSpace space = SelectSpace(size, pretenure);
- return AllocateRaw(size, space, OLD_POINTER_SPACE);
+ return AllocateRaw(size, space, OLD_SPACE);
}
#ifndef V8_HOST_ARCH_64_BIT
size += kPointerSize;
#endif
- AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
+ AllocationSpace space = SelectSpace(size, pretenure);
HeapObject* object;
{
- AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
+ AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
if (!allocation.To(&object)) return allocation;
}
#ifndef V8_HOST_ARCH_64_BIT
size += kPointerSize;
#endif
- AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED);
+ AllocationSpace space = SelectSpace(size, TENURED);
HeapObject* object;
{
- AllocationResult allocation = AllocateRaw(size, space, OLD_POINTER_SPACE);
+ AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
if (!allocation.To(&object)) return allocation;
}
object = EnsureDoubleAligned(this, object, size);
#ifndef V8_HOST_ARCH_64_BIT
size += kPointerSize;
#endif
- AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED);
+ AllocationSpace space = SelectSpace(size, TENURED);
HeapObject* object;
{
- AllocationResult allocation = AllocateRaw(size, space, OLD_POINTER_SPACE);
+ AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
if (!allocation.To(&object)) return allocation;
}
object = EnsureDoubleAligned(this, object, size);
int size = ConstantPoolArray::SizeFor(small);
HeapObject* result = NULL;
{
- AllocationResult allocation =
- AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE);
+ AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE);
if (!allocation.To(&result)) return allocation;
}
result->set_map_no_write_barrier(constant_pool_array_map());
HeapObject* result = NULL;
AllocationResult allocation =
- AllocateRaw(Symbol::kSize, OLD_POINTER_SPACE, OLD_POINTER_SPACE);
+ AllocateRaw(Symbol::kSize, OLD_SPACE, OLD_SPACE);
if (!allocation.To(&result)) return allocation;
result->set_map_no_write_barrier(symbol_map());
return exception();
}
int size = map->instance_size();
- AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED);
+ AllocationSpace space = SelectSpace(size, TENURED);
Struct* result;
{
AllocationResult allocation = Allocate(map, space);
isolate_->memory_allocator()->ReportStatistics();
PrintF("To space : ");
new_space_.ReportStatistics();
- PrintF("Old pointer space : ");
- old_pointer_space_->ReportStatistics();
- PrintF("Old data space : ");
- old_data_space_->ReportStatistics();
+ PrintF("Old space : ");
+ old_space_->ReportStatistics();
PrintF("Code space : ");
code_space_->ReportStatistics();
PrintF("Map space : ");
bool Heap::Contains(Address addr) {
if (isolate_->memory_allocator()->IsOutsideAllocatedSpace(addr)) return false;
return HasBeenSetUp() &&
- (new_space_.ToSpaceContains(addr) ||
- old_pointer_space_->Contains(addr) ||
- old_data_space_->Contains(addr) || code_space_->Contains(addr) ||
- map_space_->Contains(addr) || cell_space_->Contains(addr) ||
- lo_space_->SlowContains(addr));
+ (new_space_.ToSpaceContains(addr) || old_space_->Contains(addr) ||
+ code_space_->Contains(addr) || map_space_->Contains(addr) ||
+ cell_space_->Contains(addr) || lo_space_->SlowContains(addr));
}
switch (space) {
case NEW_SPACE:
return new_space_.ToSpaceContains(addr);
- case OLD_POINTER_SPACE:
- return old_pointer_space_->Contains(addr);
- case OLD_DATA_SPACE:
- return old_data_space_->Contains(addr);
+ case OLD_SPACE:
+ return old_space_->Contains(addr);
case CODE_SPACE:
return code_space_->Contains(addr);
case MAP_SPACE:
new_space_.Verify();
- old_pointer_space_->Verify(&visitor);
+ old_space_->Verify(&visitor);
map_space_->Verify(&visitor);
VerifyPointersVisitor no_dirty_regions_visitor;
- old_data_space_->Verify(&no_dirty_regions_visitor);
code_space_->Verify(&no_dirty_regions_visitor);
cell_space_->Verify(&no_dirty_regions_visitor);
*stats->end_marker = HeapStats::kEndMarker;
*stats->new_space_size = new_space_.SizeAsInt();
*stats->new_space_capacity = static_cast<int>(new_space_.Capacity());
- *stats->old_pointer_space_size = old_pointer_space_->SizeOfObjects();
- *stats->old_pointer_space_capacity = old_pointer_space_->Capacity();
- *stats->old_data_space_size = old_data_space_->SizeOfObjects();
- *stats->old_data_space_capacity = old_data_space_->Capacity();
+ *stats->old_space_size = old_space_->SizeOfObjects();
+ *stats->old_space_capacity = old_space_->Capacity();
*stats->code_space_size = code_space_->SizeOfObjects();
*stats->code_space_capacity = code_space_->Capacity();
*stats->map_space_size = map_space_->SizeOfObjects();
intptr_t Heap::PromotedSpaceSizeOfObjects() {
- return old_pointer_space_->SizeOfObjects() +
- old_data_space_->SizeOfObjects() + code_space_->SizeOfObjects() +
+ return old_space_->SizeOfObjects() + code_space_->SizeOfObjects() +
map_space_->SizeOfObjects() + cell_space_->SizeOfObjects() +
lo_space_->SizeOfObjects();
}
}
new_space_top_after_last_gc_ = new_space()->top();
- // Initialize old pointer space.
- old_pointer_space_ = new OldSpace(this, max_old_generation_size_,
- OLD_POINTER_SPACE, NOT_EXECUTABLE);
- if (old_pointer_space_ == NULL) return false;
- if (!old_pointer_space_->SetUp()) return false;
-
- // Initialize old data space.
- old_data_space_ = new OldSpace(this, max_old_generation_size_, OLD_DATA_SPACE,
- NOT_EXECUTABLE);
- if (old_data_space_ == NULL) return false;
- if (!old_data_space_->SetUp()) return false;
+ // Initialize old space.
+ old_space_ =
+ new OldSpace(this, max_old_generation_size_, OLD_SPACE, NOT_EXECUTABLE);
+ if (old_space_ == NULL) return false;
+ if (!old_space_->SetUp()) return false;
if (!isolate_->code_range()->SetUp(code_range_size_)) return false;
MaximumCommittedMemory());
PrintF("maximum_committed_by_new_space=%" V8_PTR_PREFIX "d ",
new_space_.MaximumCommittedMemory());
- PrintF("maximum_committed_by_old_pointer_space=%" V8_PTR_PREFIX "d ",
- old_data_space_->MaximumCommittedMemory());
- PrintF("maximum_committed_by_old_data_space=%" V8_PTR_PREFIX "d ",
- old_pointer_space_->MaximumCommittedMemory());
- PrintF("maximum_committed_by_old_data_space=%" V8_PTR_PREFIX "d ",
- old_pointer_space_->MaximumCommittedMemory());
+ PrintF("maximum_committed_by_old_space=%" V8_PTR_PREFIX "d ",
+ old_space_->MaximumCommittedMemory());
PrintF("maximum_committed_by_code_space=%" V8_PTR_PREFIX "d ",
code_space_->MaximumCommittedMemory());
PrintF("maximum_committed_by_map_space=%" V8_PTR_PREFIX "d ",
new_space_.TearDown();
- if (old_pointer_space_ != NULL) {
- old_pointer_space_->TearDown();
- delete old_pointer_space_;
- old_pointer_space_ = NULL;
- }
-
- if (old_data_space_ != NULL) {
- old_data_space_->TearDown();
- delete old_data_space_;
- old_data_space_ = NULL;
+ if (old_space_ != NULL) {
+ old_space_->TearDown();
+ delete old_space_;
+ old_space_ = NULL;
}
if (code_space_ != NULL) {
switch (counter_++) {
case NEW_SPACE:
return heap_->new_space();
- case OLD_POINTER_SPACE:
- return heap_->old_pointer_space();
- case OLD_DATA_SPACE:
- return heap_->old_data_space();
+ case OLD_SPACE:
+ return heap_->old_space();
case CODE_SPACE:
return heap_->code_space();
case MAP_SPACE:
PagedSpace* PagedSpaces::next() {
switch (counter_++) {
- case OLD_POINTER_SPACE:
- return heap_->old_pointer_space();
- case OLD_DATA_SPACE:
- return heap_->old_data_space();
+ case OLD_SPACE:
+ return heap_->old_space();
case CODE_SPACE:
return heap_->code_space();
case MAP_SPACE:
OldSpace* OldSpaces::next() {
switch (counter_++) {
- case OLD_POINTER_SPACE:
- return heap_->old_pointer_space();
- case OLD_DATA_SPACE:
- return heap_->old_data_space();
+ case OLD_SPACE:
+ return heap_->old_space();
case CODE_SPACE:
return heap_->code_space();
default:
case NEW_SPACE:
iterator_ = new SemiSpaceIterator(heap_->new_space(), size_func_);
break;
- case OLD_POINTER_SPACE:
- iterator_ =
- new HeapObjectIterator(heap_->old_pointer_space(), size_func_);
- break;
- case OLD_DATA_SPACE:
- iterator_ = new HeapObjectIterator(heap_->old_data_space(), size_func_);
+ case OLD_SPACE:
+ iterator_ = new HeapObjectIterator(heap_->old_space(), size_func_);
break;
case CODE_SPACE:
iterator_ = new HeapObjectIterator(heap_->code_space(), size_func_);
Address NewSpaceTop() { return new_space_.top(); }
NewSpace* new_space() { return &new_space_; }
- OldSpace* old_pointer_space() { return old_pointer_space_; }
- OldSpace* old_data_space() { return old_data_space_; }
+ OldSpace* old_space() { return old_space_; }
OldSpace* code_space() { return code_space_; }
MapSpace* map_space() { return map_space_; }
CellSpace* cell_space() { return cell_space_; }
LargeObjectSpace* lo_space() { return lo_space_; }
PagedSpace* paged_space(int idx) {
switch (idx) {
- case OLD_POINTER_SPACE:
- return old_pointer_space();
- case OLD_DATA_SPACE:
- return old_data_space();
+ case OLD_SPACE:
+ return old_space();
case MAP_SPACE:
return map_space();
case CELL_SPACE:
return new_space_.allocation_limit_address();
}
- Address* OldPointerSpaceAllocationTopAddress() {
- return old_pointer_space_->allocation_top_address();
+ Address* OldSpaceAllocationTopAddress() {
+ return old_space_->allocation_top_address();
}
- Address* OldPointerSpaceAllocationLimitAddress() {
- return old_pointer_space_->allocation_limit_address();
- }
-
- Address* OldDataSpaceAllocationTopAddress() {
- return old_data_space_->allocation_top_address();
- }
- Address* OldDataSpaceAllocationLimitAddress() {
- return old_data_space_->allocation_limit_address();
+ Address* OldSpaceAllocationLimitAddress() {
+ return old_space_->allocation_limit_address();
}
// TODO(hpayer): There is still a missmatch between capacity and actual
inline bool InFromSpace(Object* object);
inline bool InToSpace(Object* object);
- // Returns whether the object resides in old pointer space.
- inline bool InOldPointerSpace(Address address);
- inline bool InOldPointerSpace(Object* object);
-
- // Returns whether the object resides in old data space.
- inline bool InOldDataSpace(Address address);
- inline bool InOldDataSpace(Object* object);
+ // Returns whether the object resides in old space.
+ inline bool InOldSpace(Address address);
+ inline bool InOldSpace(Object* object);
// Checks whether an address/object in the heap (including auxiliary
// area and unused area).
bool InSpace(Address addr, AllocationSpace space);
bool InSpace(HeapObject* value, AllocationSpace space);
- // Finds out which space an object should get promoted to based on its type.
- inline OldSpace* TargetSpace(HeapObject* object);
- static inline AllocationSpace TargetSpaceId(InstanceType type);
-
// Checks whether the given object is allowed to be migrated from it's
// current space into the given destination space. Used for debugging.
inline bool AllowedToBeMigrated(HeapObject* object, AllocationSpace dest);
int scan_on_scavenge_pages_;
NewSpace new_space_;
- OldSpace* old_pointer_space_;
- OldSpace* old_data_space_;
+ OldSpace* old_space_;
OldSpace* code_space_;
MapSpace* map_space_;
CellSpace* cell_space_;
inline void UpdateOldSpaceLimits();
// Selects the proper allocation space depending on the given object
- // size, pretenuring decision, and preferred old-space.
+ // size and pretenuring decision.
static AllocationSpace SelectSpace(int object_size,
- AllocationSpace preferred_old_space,
PretenureFlag pretenure) {
- DCHECK(preferred_old_space == OLD_POINTER_SPACE ||
- preferred_old_space == OLD_DATA_SPACE);
if (object_size > Page::kMaxRegularHeapObjectSize) return LO_SPACE;
- return (pretenure == TENURED) ? preferred_old_space : NEW_SPACE;
+ return (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE;
}
HeapObject* DoubleAlignForDeserialization(HeapObject* object, int size);
int* start_marker; // 0
int* new_space_size; // 1
int* new_space_capacity; // 2
- intptr_t* old_pointer_space_size; // 3
- intptr_t* old_pointer_space_capacity; // 4
- intptr_t* old_data_space_size; // 5
- intptr_t* old_data_space_capacity; // 6
- intptr_t* code_space_size; // 7
- intptr_t* code_space_capacity; // 8
- intptr_t* map_space_size; // 9
- intptr_t* map_space_capacity; // 10
- intptr_t* cell_space_size; // 11
- intptr_t* cell_space_capacity; // 12
- intptr_t* lo_space_size; // 13
- int* global_handle_count; // 14
- int* weak_global_handle_count; // 15
- int* pending_global_handle_count; // 16
- int* near_death_global_handle_count; // 17
- int* free_global_handle_count; // 18
- intptr_t* memory_allocator_size; // 19
- intptr_t* memory_allocator_capacity; // 20
- int* objects_per_type; // 21
- int* size_per_type; // 22
- int* os_error; // 23
- int* end_marker; // 24
+ intptr_t* old_space_size; // 3
+ intptr_t* old_space_capacity; // 4
+ intptr_t* code_space_size; // 5
+ intptr_t* code_space_capacity; // 6
+ intptr_t* map_space_size; // 7
+ intptr_t* map_space_capacity; // 8
+ intptr_t* cell_space_size; // 9
+ intptr_t* cell_space_capacity; // 10
+ intptr_t* lo_space_size; // 11
+ int* global_handle_count; // 12
+ int* weak_global_handle_count; // 13
+ int* pending_global_handle_count; // 14
+ int* near_death_global_handle_count; // 15
+ int* free_global_handle_count; // 16
+ intptr_t* memory_allocator_size; // 17
+ intptr_t* memory_allocator_capacity; // 18
+ int* objects_per_type; // 19
+ int* size_per_type; // 20
+ int* os_error; // 21
+ int* end_marker; // 22
};
};
-// Space iterator for iterating over all old spaces of the heap: Old pointer
-// space, old data space and code space. Returns each space in turn, and null
-// when it is done.
+// Space iterator for iterating over all old spaces of the heap: Old space
+// and code space. Returns each space in turn, and null when it is done.
class OldSpaces BASE_EMBEDDED {
public:
- explicit OldSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {}
+ explicit OldSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
OldSpace* next();
private:
// Space iterator for iterating over all the paged spaces of the heap: Map
-// space, old pointer space, old data space, code space and cell space. Returns
+// space, old space, code space and cell space. Returns
// each space in turn, and null when it is done.
class PagedSpaces BASE_EMBEDDED {
public:
- explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {}
+ explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
PagedSpace* next();
private:
}
-static inline void MarkBlackOrKeepGrey(HeapObject* heap_object,
- MarkBit mark_bit, int size) {
- DCHECK(!Marking::IsImpossible(mark_bit));
- if (mark_bit.Get()) return;
- mark_bit.Set();
- MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(), size);
- DCHECK(Marking::IsBlack(mark_bit));
-}
-
-
static inline void MarkBlackOrKeepBlack(HeapObject* heap_object,
MarkBit mark_bit, int size) {
DCHECK(!Marking::IsImpossible(mark_bit));
void IncrementalMarking::DeactivateIncrementalWriteBarrier() {
- DeactivateIncrementalWriteBarrierForSpace(heap_->old_pointer_space());
- DeactivateIncrementalWriteBarrierForSpace(heap_->old_data_space());
+ DeactivateIncrementalWriteBarrierForSpace(heap_->old_space());
DeactivateIncrementalWriteBarrierForSpace(heap_->cell_space());
DeactivateIncrementalWriteBarrierForSpace(heap_->map_space());
DeactivateIncrementalWriteBarrierForSpace(heap_->code_space());
void IncrementalMarking::ActivateIncrementalWriteBarrier() {
- ActivateIncrementalWriteBarrier(heap_->old_pointer_space());
- ActivateIncrementalWriteBarrier(heap_->old_data_space());
+ ActivateIncrementalWriteBarrier(heap_->old_space());
ActivateIncrementalWriteBarrier(heap_->cell_space());
ActivateIncrementalWriteBarrier(heap_->map_space());
ActivateIncrementalWriteBarrier(heap_->code_space());
void IncrementalMarking::MarkObject(Heap* heap, HeapObject* obj) {
MarkBit mark_bit = Marking::MarkBitFrom(obj);
- if (mark_bit.data_only()) {
- MarkBlackOrKeepGrey(obj, mark_bit, obj->Size());
- } else if (Marking::IsWhite(mark_bit)) {
+ if (Marking::IsWhite(mark_bit)) {
heap->incremental_marking()->WhiteToGreyAndPush(obj, mark_bit);
}
}
MarkBit Marking::MarkBitFrom(Address addr) {
MemoryChunk* p = MemoryChunk::FromAddress(addr);
- return p->markbits()->MarkBitFromIndex(p->AddressToMarkbitIndex(addr),
- p->ContainsOnlyData());
+ return p->markbits()->MarkBitFromIndex(p->AddressToMarkbitIndex(addr));
}
static void VerifyMarking(Heap* heap) {
- VerifyMarking(heap->old_pointer_space());
- VerifyMarking(heap->old_data_space());
+ VerifyMarking(heap->old_space());
VerifyMarking(heap->code_space());
VerifyMarking(heap->cell_space());
VerifyMarking(heap->map_space());
static void VerifyEvacuation(Heap* heap, PagedSpace* space) {
- if (FLAG_use_allocation_folding &&
- (space == heap->old_pointer_space() || space == heap->old_data_space())) {
+ if (FLAG_use_allocation_folding && (space == heap->old_space())) {
return;
}
PageIterator it(space);
static void VerifyEvacuation(Heap* heap) {
- VerifyEvacuation(heap, heap->old_pointer_space());
- VerifyEvacuation(heap, heap->old_data_space());
+ VerifyEvacuation(heap, heap->old_space());
VerifyEvacuation(heap, heap->code_space());
VerifyEvacuation(heap, heap->cell_space());
VerifyEvacuation(heap, heap->map_space());
void MarkCompactCollector::SetUp() {
- free_list_old_data_space_.Reset(new FreeList(heap_->old_data_space()));
- free_list_old_pointer_space_.Reset(new FreeList(heap_->old_pointer_space()));
+ free_list_old_space_.Reset(new FreeList(heap_->old_space()));
}
if (!compacting_) {
DCHECK(evacuation_candidates_.length() == 0);
- CollectEvacuationCandidates(heap()->old_pointer_space());
- CollectEvacuationCandidates(heap()->old_data_space());
+ CollectEvacuationCandidates(heap()->old_space());
if (FLAG_compact_code_space && (mode == NON_INCREMENTAL_COMPACTION ||
FLAG_incremental_code_compaction)) {
TraceFragmentation(heap()->cell_space());
}
- heap()->old_pointer_space()->EvictEvacuationCandidatesFromFreeLists();
- heap()->old_data_space()->EvictEvacuationCandidatesFromFreeLists();
+ heap()->old_space()->EvictEvacuationCandidatesFromFreeLists();
heap()->code_space()->EvictEvacuationCandidatesFromFreeLists();
compacting_ = evacuation_candidates_.length() > 0;
void MarkCompactCollector::ClearInvalidStoreAndSlotsBufferEntries() {
heap_->store_buffer()->ClearInvalidStoreBufferEntries();
- ClearInvalidSlotsBufferEntries(heap_->old_pointer_space());
- ClearInvalidSlotsBufferEntries(heap_->old_data_space());
+ ClearInvalidSlotsBufferEntries(heap_->old_space());
ClearInvalidSlotsBufferEntries(heap_->code_space());
ClearInvalidSlotsBufferEntries(heap_->cell_space());
ClearInvalidSlotsBufferEntries(heap_->map_space());
static void VerifyValidStoreAndSlotsBufferEntries(Heap* heap) {
heap->store_buffer()->VerifyValidStoreBufferEntries();
- VerifyValidSlotsBufferEntries(heap, heap->old_pointer_space());
- VerifyValidSlotsBufferEntries(heap, heap->old_data_space());
+ VerifyValidSlotsBufferEntries(heap, heap->old_space());
VerifyValidSlotsBufferEntries(heap, heap->code_space());
VerifyValidSlotsBufferEntries(heap, heap->cell_space());
VerifyValidSlotsBufferEntries(heap, heap->map_space());
void MarkCompactCollector::VerifyMarkbitsAreClean() {
- VerifyMarkbitsAreClean(heap_->old_pointer_space());
- VerifyMarkbitsAreClean(heap_->old_data_space());
+ VerifyMarkbitsAreClean(heap_->old_space());
VerifyMarkbitsAreClean(heap_->code_space());
VerifyMarkbitsAreClean(heap_->cell_space());
VerifyMarkbitsAreClean(heap_->map_space());
void MarkCompactCollector::ClearMarkbits() {
ClearMarkbitsInPagedSpace(heap_->code_space());
ClearMarkbitsInPagedSpace(heap_->map_space());
- ClearMarkbitsInPagedSpace(heap_->old_pointer_space());
- ClearMarkbitsInPagedSpace(heap_->old_data_space());
+ ClearMarkbitsInPagedSpace(heap_->old_space());
ClearMarkbitsInPagedSpace(heap_->cell_space());
ClearMarkbitsInNewSpace(heap_->new_space());
void MarkCompactCollector::StartSweeperThreads() {
- DCHECK(free_list_old_pointer_space_.get()->IsEmpty());
- DCHECK(free_list_old_data_space_.get()->IsEmpty());
+ DCHECK(free_list_old_space_.get()->IsEmpty());
V8::GetCurrentPlatform()->CallOnBackgroundThread(
- new SweeperTask(heap(), heap()->old_data_space()),
- v8::Platform::kShortRunningTask);
- V8::GetCurrentPlatform()->CallOnBackgroundThread(
- new SweeperTask(heap(), heap()->old_pointer_space()),
+ new SweeperTask(heap(), heap()->old_space()),
v8::Platform::kShortRunningTask);
}
// If sweeping is not completed or not running at all, we try to complete it
// here.
if (!heap()->concurrent_sweeping_enabled() || !IsSweepingCompleted()) {
- SweepInParallel(heap()->paged_space(OLD_DATA_SPACE), 0);
- SweepInParallel(heap()->paged_space(OLD_POINTER_SPACE), 0);
+ SweepInParallel(heap()->paged_space(OLD_SPACE), 0);
}
// Wait twice for both jobs.
if (heap()->concurrent_sweeping_enabled()) {
pending_sweeper_jobs_semaphore_.Wait();
- pending_sweeper_jobs_semaphore_.Wait();
}
ParallelSweepSpacesComplete();
sweeping_in_progress_ = false;
- RefillFreeList(heap()->paged_space(OLD_DATA_SPACE));
- RefillFreeList(heap()->paged_space(OLD_POINTER_SPACE));
- heap()->paged_space(OLD_DATA_SPACE)->ResetUnsweptFreeBytes();
- heap()->paged_space(OLD_POINTER_SPACE)->ResetUnsweptFreeBytes();
+ RefillFreeList(heap()->paged_space(OLD_SPACE));
+ heap()->paged_space(OLD_SPACE)->ResetUnsweptFreeBytes();
#ifdef VERIFY_HEAP
if (FLAG_verify_heap && !evacuation()) {
void MarkCompactCollector::RefillFreeList(PagedSpace* space) {
FreeList* free_list;
- if (space == heap()->old_pointer_space()) {
- free_list = free_list_old_pointer_space_.get();
- } else if (space == heap()->old_data_space()) {
- free_list = free_list_old_data_space_.get();
+ if (space == heap()->old_space()) {
+ free_list = free_list_old_space_.get();
} else {
// Any PagedSpace might invoke RefillFreeLists, so we need to make sure
- // to only refill them for old data and pointer spaces.
+ // to only refill them for the old space.
return;
}
switch (space) {
case NEW_SPACE:
return "NEW_SPACE";
- case OLD_POINTER_SPACE:
- return "OLD_POINTER_SPACE";
- case OLD_DATA_SPACE:
- return "OLD_DATA_SPACE";
+ case OLD_SPACE:
+ return "OLD_SPACE";
case CODE_SPACE:
return "CODE_SPACE";
case MAP_SPACE:
void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
- DCHECK(space->identity() == OLD_POINTER_SPACE ||
- space->identity() == OLD_DATA_SPACE ||
- space->identity() == CODE_SPACE);
+ DCHECK(space->identity() == OLD_SPACE || space->identity() == CODE_SPACE);
static const int kMaxMaxEvacuationCandidates = 1000;
int number_of_pages = space->CountTotalPages();
int trailing_zeros = base::bits::CountTrailingZeros32(grey_objects);
grey_objects >>= trailing_zeros;
offset += trailing_zeros;
- MarkBit markbit(cell, 1 << offset, false);
+ MarkBit markbit(cell, 1 << offset);
DCHECK(Marking::IsGrey(markbit));
Marking::GreyToBlack(markbit);
Address addr = cell_base + offset * kPointerSize;
DiscoverGreyObjectsInNewSpace(heap(), &marking_deque_);
if (marking_deque_.IsFull()) return;
- DiscoverGreyObjectsInSpace(heap(), &marking_deque_,
- heap()->old_pointer_space());
- if (marking_deque_.IsFull()) return;
-
- DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->old_data_space());
+ DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->old_space());
if (marking_deque_.IsFull()) return;
DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->code_space());
Address src_addr = src->address();
DCHECK(heap()->AllowedToBeMigrated(src, dest));
DCHECK(dest != LO_SPACE && size <= Page::kMaxRegularHeapObjectSize);
- if (dest == OLD_POINTER_SPACE) {
+ if (dest == OLD_SPACE) {
Address src_slot = src_addr;
Address dst_slot = dst_addr;
DCHECK(IsAligned(size, kPointerSize));
SlotsBuffer::IGNORE_OVERFLOW);
Code::cast(dst)->Relocate(dst_addr - src_addr);
} else {
- DCHECK(dest == OLD_DATA_SPACE || dest == NEW_SPACE);
+ DCHECK(dest == NEW_SPACE);
heap()->MoveBlock(dst_addr, src_addr, size);
}
heap()->OnMoveEvent(dst, src, size);
space_owner_id = 1;
} else if (heap->new_space()->FromSpaceContains(slot_address)) {
space_owner_id = 2;
- } else if (heap->old_pointer_space()->ContainsSafe(slot_address)) {
+ } else if (heap->old_space()->ContainsSafe(slot_address)) {
space_owner_id = 3;
- } else if (heap->old_data_space()->ContainsSafe(slot_address)) {
- space_owner_id = 4;
} else if (heap->code_space()->ContainsSafe(slot_address)) {
- space_owner_id = 5;
+ space_owner_id = 4;
} else if (heap->map_space()->ContainsSafe(slot_address)) {
- space_owner_id = 6;
+ space_owner_id = 5;
} else if (heap->cell_space()->ContainsSafe(slot_address)) {
- space_owner_id = 7;
+ space_owner_id = 6;
} else {
// Lo space or other.
- space_owner_id = 8;
+ space_owner_id = 7;
}
data[index++] = space_owner_id;
data[index++] = 0x20aaaaaaaaUL;
int object_size) {
DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
- OldSpace* target_space = heap()->TargetSpace(object);
+ OldSpace* old_space = heap()->old_space();
- DCHECK(target_space == heap()->old_pointer_space() ||
- target_space == heap()->old_data_space());
HeapObject* target;
- AllocationResult allocation = target_space->AllocateRaw(object_size);
+ AllocationResult allocation = old_space->AllocateRaw(object_size);
if (allocation.To(&target)) {
- MigrateObject(target, object, object_size, target_space->identity());
+ MigrateObject(target, object, object_size, old_space->identity());
heap()->IncrementPromotedObjectsSize(object_size);
return true;
}
// we can safely go to the page from the slot address.
Page* p = Page::FromAddress(addr);
- // First check owner's identity because old pointer and old data spaces
- // are swept lazily and might still have non-zero mark-bits on some
- // pages.
+ // First check owner's identity because old space is swept concurrently or
+ // lazily and might still have non-zero mark-bits on some pages.
if (p->owner()->identity() != CODE_SPACE) return false;
// In code space only bits on evacuation candidates (but we don't record
p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION);
switch (space->identity()) {
- case OLD_DATA_SPACE:
- Sweep<SWEEP_AND_VISIT_LIVE_OBJECTS, SWEEP_ON_MAIN_THREAD,
- IGNORE_SKIP_LIST, IGNORE_FREE_SPACE>(space, NULL, p,
- &updating_visitor);
- break;
- case OLD_POINTER_SPACE:
+ case OLD_SPACE:
Sweep<SWEEP_AND_VISIT_LIVE_OBJECTS, SWEEP_ON_MAIN_THREAD,
IGNORE_SKIP_LIST, IGNORE_FREE_SPACE>(space, NULL, p,
&updating_visitor);
int MarkCompactCollector::SweepInParallel(Page* page, PagedSpace* space) {
int max_freed = 0;
if (page->TryParallelSweeping()) {
- FreeList* free_list = space == heap()->old_pointer_space()
- ? free_list_old_pointer_space_.get()
- : free_list_old_data_space_.get();
+ FreeList* free_list = free_list_old_space_.get();
FreeList private_free_list(space);
max_freed = Sweep<SWEEP_ONLY, SWEEP_IN_PARALLEL, IGNORE_SKIP_LIST,
IGNORE_FREE_SPACE>(space, &private_free_list, page, NULL);
{
GCTracer::Scope sweep_scope(heap()->tracer(),
GCTracer::Scope::MC_SWEEP_OLDSPACE);
- {
- SweepSpace(heap()->old_pointer_space(), CONCURRENT_SWEEPING);
- SweepSpace(heap()->old_data_space(), CONCURRENT_SWEEPING);
- }
+ { SweepSpace(heap()->old_space(), CONCURRENT_SWEEPING); }
sweeping_in_progress_ = true;
if (heap()->concurrent_sweeping_enabled()) {
StartSweeperThreads();
void MarkCompactCollector::ParallelSweepSpacesComplete() {
- ParallelSweepSpaceComplete(heap()->old_pointer_space());
- ParallelSweepSpaceComplete(heap()->old_data_space());
+ ParallelSweepSpaceComplete(heap()->old_space());
}
// to other evacuation candidates thus we have to
// rescan the page after evacuation to discover and update all
// pointers to evacuated objects.
- if (page->owner()->identity() == OLD_DATA_SPACE) {
- evacuation_candidates_.RemoveElement(page);
- } else {
- page->SetFlag(Page::RESCAN_ON_EVACUATION);
- }
+ page->SetFlag(Page::RESCAN_ON_EVACUATION);
}
List<Page*> evacuation_candidates_;
List<Code*> invalidated_code_;
- SmartPointer<FreeList> free_list_old_data_space_;
- SmartPointer<FreeList> free_list_old_pointer_space_;
+ SmartPointer<FreeList> free_list_old_space_;
friend class Heap;
};
PointerChunkIterator::PointerChunkIterator(Heap* heap)
- : state_(kOldPointerState),
- old_pointer_iterator_(heap->old_pointer_space()),
+ : state_(kOldSpaceState),
+ old_iterator_(heap->old_space()),
map_iterator_(heap->map_space()),
lo_iterator_(heap->lo_space()) {}
HeapObjectIterator::HeapObjectIterator(Page* page,
HeapObjectCallback size_func) {
Space* owner = page->owner();
- DCHECK(owner == page->heap()->old_pointer_space() ||
- owner == page->heap()->old_data_space() ||
+ DCHECK(owner == page->heap()->old_space() ||
owner == page->heap()->map_space() ||
owner == page->heap()->cell_space() ||
owner == page->heap()->code_space());
chunk->SetFlag(IS_EXECUTABLE);
}
- if (owner == heap->old_data_space()) {
- chunk->SetFlag(CONTAINS_ONLY_DATA);
- }
-
return chunk;
}
STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::NEW_SPACE) ==
ObjectSpace::kObjectSpaceNewSpace);
-STATIC_ASSERT(static_cast<ObjectSpace>(1
- << AllocationSpace::OLD_POINTER_SPACE) ==
- ObjectSpace::kObjectSpaceOldPointerSpace);
-STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::OLD_DATA_SPACE) ==
- ObjectSpace::kObjectSpaceOldDataSpace);
+STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::OLD_SPACE) ==
+ ObjectSpace::kObjectSpaceOldSpace);
STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::CODE_SPACE) ==
ObjectSpace::kObjectSpaceCodeSpace);
STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::CELL_SPACE) ==
page->Unlink();
}
- if (page->IsFlagSet(MemoryChunk::CONTAINS_ONLY_DATA)) {
- heap()->isolate()->memory_allocator()->Free(page);
- } else {
- heap()->QueueMemoryChunkForFree(page);
- }
+ heap()->QueueMemoryChunkForFree(page);
DCHECK(Capacity() > 0);
accounting_stats_.ShrinkSpace(AreaSize());
//
// During scavenges and mark-sweep collections we sometimes (after a store
// buffer overflow) iterate intergenerational pointers without decoding heap
-// object maps so if the page belongs to old pointer space or large object
-// space it is essential to guarantee that the page does not contain any
+// object maps so if the page belongs to old space or large object space
+// it is essential to guarantee that the page does not contain any
// garbage pointers to new space: every pointer aligned word which satisfies
// the Heap::InNewSpace() predicate must be a pointer to a live heap object in
-// new space. Thus objects in old pointer and large object spaces should have a
+// new space. Thus objects in old space and large object spaces should have a
// special layout (e.g. no bare integer fields). This requirement does not
// apply to map space which is iterated in a special fashion. However we still
// require pointer fields of dead maps to be cleaned.
public:
typedef uint32_t CellType;
- inline MarkBit(CellType* cell, CellType mask, bool data_only)
- : cell_(cell), mask_(mask), data_only_(data_only) {}
+ inline MarkBit(CellType* cell, CellType mask) : cell_(cell), mask_(mask) {}
inline CellType* cell() { return cell_; }
inline CellType mask() { return mask_; }
inline bool Get() { return (*cell_ & mask_) != 0; }
inline void Clear() { *cell_ &= ~mask_; }
- inline bool data_only() { return data_only_; }
inline MarkBit Next() {
CellType new_mask = mask_ << 1;
if (new_mask == 0) {
- return MarkBit(cell_ + 1, 1, data_only_);
+ return MarkBit(cell_ + 1, 1);
} else {
- return MarkBit(cell_, new_mask, data_only_);
+ return MarkBit(cell_, new_mask);
}
}
private:
CellType* cell_;
CellType mask_;
- // This boolean indicates that the object is in a data-only space with no
- // pointers. This enables some optimizations when marking.
- // It is expected that this field is inlined and turned into control flow
- // at the place where the MarkBit object is created.
- bool data_only_;
};
return reinterpret_cast<Bitmap*>(addr);
}
- inline MarkBit MarkBitFromIndex(uint32_t index, bool data_only = false) {
+ inline MarkBit MarkBitFromIndex(uint32_t index) {
MarkBit::CellType mask = 1 << (index & kBitIndexMask);
MarkBit::CellType* cell = this->cells() + (index >> kBitsPerCellLog2);
- return MarkBit(cell, mask, data_only);
+ return MarkBit(cell, mask);
}
static inline void Clear(MemoryChunk* chunk);
IN_FROM_SPACE, // Mutually exclusive with IN_TO_SPACE.
IN_TO_SPACE, // All pages in new space has one of these two set.
NEW_SPACE_BELOW_AGE_MARK,
- CONTAINS_ONLY_DATA,
EVACUATION_CANDIDATE,
RESCAN_ON_EVACUATION,
NEVER_EVACUATE, // May contain immortal immutables.
return IsFlagSet(IS_EXECUTABLE) ? EXECUTABLE : NOT_EXECUTABLE;
}
- bool ContainsOnlyData() { return IsFlagSet(CONTAINS_ONLY_DATA); }
-
bool InNewSpace() {
return (flags_ & ((1 << IN_FROM_SPACE) | (1 << IN_TO_SPACE))) != 0;
}
// -----------------------------------------------------------------------------
-// Old object space (excluding map objects)
+// Old object space (includes the old space of objects and code space)
class OldSpace : public PagedSpace {
public:
// Return NULL when the iterator is done.
MemoryChunk* next() {
switch (state_) {
- case kOldPointerState: {
- if (old_pointer_iterator_.has_next()) {
- return old_pointer_iterator_.next();
+ case kOldSpaceState: {
+ if (old_iterator_.has_next()) {
+ return old_iterator_.next();
}
state_ = kMapState;
// Fall through.
private:
- enum State { kOldPointerState, kMapState, kLargeObjectState, kFinishedState };
+ enum State { kOldSpaceState, kMapState, kLargeObjectState, kFinishedState };
State state_;
- PageIterator old_pointer_iterator_;
+ PageIterator old_iterator_;
PageIterator map_iterator_;
LargeObjectIterator lo_iterator_;
};
void StoreBuffer::Mark(Address addr) {
DCHECK(!heap_->cell_space()->Contains(addr));
DCHECK(!heap_->code_space()->Contains(addr));
- DCHECK(!heap_->old_data_space()->Contains(addr));
Address* top = reinterpret_cast<Address*>(heap_->store_buffer_top());
*top++ = addr;
heap_->public_set_store_buffer_top(top);
if (store_buffer_rebuilding_enabled_) {
SLOW_DCHECK(!heap_->cell_space()->Contains(addr) &&
!heap_->code_space()->Contains(addr) &&
- !heap_->old_data_space()->Contains(addr) &&
!heap_->new_space()->Contains(addr));
Address* top = old_top_;
*top++ = addr;
heap_->mark_compact_collector()->EnsureSweepingCompleted();
}
}
- CHECK(page->owner() == heap_->old_pointer_space());
+ CHECK(page->owner() == heap_->old_space());
HeapObjectIterator iterator(page, NULL);
for (HeapObject* heap_object = iterator.Next(); heap_object != NULL;
heap_object = iterator.Next()) {
for (Address* current = start_; current < top; current++) {
DCHECK(!heap_->cell_space()->Contains(*current));
DCHECK(!heap_->code_space()->Contains(*current));
- DCHECK(!heap_->old_data_space()->Contains(*current));
uintptr_t int_addr = reinterpret_cast<uintptr_t>(*current);
// Shift out the last bits including any tags.
int_addr >>= kPointerSizeLog2;
return false;
}
- dominator_allocate = GetFoldableDominator(dominator_allocate);
- if (dominator_allocate == NULL) {
+
+ if (!IsFoldable(dominator_allocate)) {
+ if (FLAG_trace_allocation_folding) {
+ PrintF("#%d (%s) cannot fold into #%d (%s), different spaces\n", id(),
+ Mnemonic(), dominator->id(), dominator->Mnemonic());
+ }
return false;
}
DCHECK(
(IsNewSpaceAllocation() && dominator_allocate->IsNewSpaceAllocation()) ||
- (IsOldDataSpaceAllocation() &&
- dominator_allocate->IsOldDataSpaceAllocation()) ||
- (IsOldPointerSpaceAllocation() &&
- dominator_allocate->IsOldPointerSpaceAllocation()));
+ (IsOldSpaceAllocation() && dominator_allocate->IsOldSpaceAllocation()));
// First update the size of the dominator allocate instruction.
dominator_size = dominator_allocate->size();
}
-HAllocate* HAllocate::GetFoldableDominator(HAllocate* dominator) {
- if (!IsFoldable(dominator)) {
- // We cannot hoist old space allocations over new space allocations.
- if (IsNewSpaceAllocation() || dominator->IsNewSpaceAllocation()) {
- if (FLAG_trace_allocation_folding) {
- PrintF("#%d (%s) cannot fold into #%d (%s), new space hoisting\n", id(),
- Mnemonic(), dominator->id(), dominator->Mnemonic());
- }
- return NULL;
- }
-
- HAllocate* dominator_dominator = dominator->dominating_allocate_;
-
- // We can hoist old data space allocations over an old pointer space
- // allocation and vice versa. For that we have to check the dominator
- // of the dominator allocate instruction.
- if (dominator_dominator == NULL) {
- dominating_allocate_ = dominator;
- if (FLAG_trace_allocation_folding) {
- PrintF("#%d (%s) cannot fold into #%d (%s), different spaces\n", id(),
- Mnemonic(), dominator->id(), dominator->Mnemonic());
- }
- return NULL;
- }
-
- // We can just fold old space allocations that are in the same basic block,
- // since it is not guaranteed that we fill up the whole allocated old
- // space memory.
- // TODO(hpayer): Remove this limitation and add filler maps for each each
- // allocation as soon as we have store elimination.
- if (block()->block_id() != dominator_dominator->block()->block_id()) {
- if (FLAG_trace_allocation_folding) {
- PrintF("#%d (%s) cannot fold into #%d (%s), different basic blocks\n",
- id(), Mnemonic(), dominator_dominator->id(),
- dominator_dominator->Mnemonic());
- }
- return NULL;
- }
-
- DCHECK((IsOldDataSpaceAllocation() &&
- dominator_dominator->IsOldDataSpaceAllocation()) ||
- (IsOldPointerSpaceAllocation() &&
- dominator_dominator->IsOldPointerSpaceAllocation()));
-
- int32_t current_size = HConstant::cast(size())->GetInteger32Constant();
- HStoreNamedField* dominator_free_space_size =
- dominator->filler_free_space_size_;
- if (dominator_free_space_size != NULL) {
- // We already hoisted one old space allocation, i.e., we already installed
- // a filler map. Hence, we just have to update the free space size.
- dominator->UpdateFreeSpaceFiller(current_size);
- } else {
- // This is the first old space allocation that gets hoisted. We have to
- // install a filler map since the follwing allocation may cause a GC.
- dominator->CreateFreeSpaceFiller(current_size);
- }
-
- // We can hoist the old space allocation over the actual dominator.
- return dominator_dominator;
- }
- return dominator;
-}
-
-
void HAllocate::UpdateFreeSpaceFiller(int32_t free_space_size) {
DCHECK(filler_free_space_size_ != NULL);
Zone* zone = block()->zone();
std::ostream& HAllocate::PrintDataTo(std::ostream& os) const { // NOLINT
os << NameOf(size()) << " (";
if (IsNewSpaceAllocation()) os << "N";
- if (IsOldPointerSpaceAllocation()) os << "P";
- if (IsOldDataSpaceAllocation()) os << "D";
+ if (IsOldSpaceAllocation()) os << "P";
if (MustAllocateDoubleAligned()) os << "A";
if (MustPrefillWithFiller()) os << "F";
return os << ")";
return (flags_ & ALLOCATE_IN_NEW_SPACE) != 0;
}
- bool IsOldDataSpaceAllocation() const {
- return (flags_ & ALLOCATE_IN_OLD_DATA_SPACE) != 0;
- }
-
- bool IsOldPointerSpaceAllocation() const {
- return (flags_ & ALLOCATE_IN_OLD_POINTER_SPACE) != 0;
+ bool IsOldSpaceAllocation() const {
+ return (flags_ & ALLOCATE_IN_OLD_SPACE) != 0;
}
bool MustAllocateDoubleAligned() const {
private:
enum Flags {
ALLOCATE_IN_NEW_SPACE = 1 << 0,
- ALLOCATE_IN_OLD_DATA_SPACE = 1 << 1,
- ALLOCATE_IN_OLD_POINTER_SPACE = 1 << 2,
+ ALLOCATE_IN_OLD_SPACE = 1 << 2,
ALLOCATE_DOUBLE_ALIGNED = 1 << 3,
PREFILL_WITH_FILLER = 1 << 4,
CLEAR_NEXT_MAP_WORD = 1 << 5
static Flags ComputeFlags(PretenureFlag pretenure_flag,
InstanceType instance_type) {
- Flags flags = pretenure_flag == TENURED
- ? (Heap::TargetSpaceId(instance_type) == OLD_POINTER_SPACE
- ? ALLOCATE_IN_OLD_POINTER_SPACE
- : ALLOCATE_IN_OLD_DATA_SPACE)
- : ALLOCATE_IN_NEW_SPACE;
+ Flags flags = pretenure_flag == TENURED ? ALLOCATE_IN_OLD_SPACE
+ : ALLOCATE_IN_NEW_SPACE;
if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
flags = static_cast<Flags>(flags | ALLOCATE_DOUBLE_ALIGNED);
}
bool IsFoldable(HAllocate* allocate) {
return (IsNewSpaceAllocation() && allocate->IsNewSpaceAllocation()) ||
- (IsOldDataSpaceAllocation() &&
- allocate->IsOldDataSpaceAllocation()) ||
- (IsOldPointerSpaceAllocation() &&
- allocate->IsOldPointerSpaceAllocation());
+ (IsOldSpaceAllocation() && allocate->IsOldSpaceAllocation());
}
void ClearNextMapWord(int offset);
if (instr->hydrogen()->MustAllocateDoubleAligned()) {
flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
}
- if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
- DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
+ if (instr->hydrogen()->IsOldSpaceAllocation()) {
DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
- } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
- DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
+ flags = static_cast<AllocationFlags>(flags | PRETENURE);
}
if (instr->size()->IsConstantOperand()) {
int flags = AllocateDoubleAlignFlag::encode(
instr->hydrogen()->MustAllocateDoubleAligned());
- if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
- DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
- DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
- } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+ if (instr->hydrogen()->IsOldSpaceAllocation()) {
DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
+ flags = AllocateTargetSpace::update(flags, OLD_SPACE);
} else {
flags = AllocateTargetSpace::update(flags, NEW_SPACE);
}
// Align the next allocation. Storing the filler map without checking top is
// safe in new-space because the limit of the heap is aligned there.
if ((flags & DOUBLE_ALIGNMENT) != 0) {
- DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
Label aligned;
test(result, Immediate(kDoubleAlignmentMask));
j(zero, &aligned, Label::kNear);
- if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+ if ((flags & PRETENURE) != 0) {
cmp(result, Operand::StaticVariable(allocation_limit));
j(above_equal, gc_required);
}
// Align the next allocation. Storing the filler map without checking top is
// safe in new-space because the limit of the heap is aligned there.
if ((flags & DOUBLE_ALIGNMENT) != 0) {
- DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
Label aligned;
test(result, Immediate(kDoubleAlignmentMask));
j(zero, &aligned, Label::kNear);
- if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+ if ((flags & PRETENURE) != 0) {
cmp(result, Operand::StaticVariable(allocation_limit));
j(above_equal, gc_required);
}
// Align the next allocation. Storing the filler map without checking top is
// safe in new-space because the limit of the heap is aligned there.
if ((flags & DOUBLE_ALIGNMENT) != 0) {
- DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
Label aligned;
test(result, Immediate(kDoubleAlignmentMask));
j(zero, &aligned, Label::kNear);
- if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+ if ((flags & PRETENURE) != 0) {
cmp(result, Operand::StaticVariable(allocation_limit));
j(above_equal, gc_required);
}
// ---------------------------------------------------------------------------
// Allocation support
- // Allocate an object in new space or old pointer space. If the given space
+ // Allocate an object in new space or old space. If the given space
// is exhausted control continues at the gc_required label. The allocated
// object is returned in result and end of the new object is returned in
// result_end. The register scratch can be passed as no_reg in which case
SIZE_IN_WORDS = 1 << 2,
// Align the allocation to a multiple of kDoubleSize
DOUBLE_ALIGNMENT = 1 << 3,
- // Directly allocate in old pointer space
- PRETENURE_OLD_POINTER_SPACE = 1 << 4,
- // Directly allocate in old data space
- PRETENURE_OLD_DATA_SPACE = 1 << 5
+ // Directly allocate in old space
+ PRETENURE = 1 << 4,
};
public:
static ExternalReference GetAllocationTopReference(
Isolate* isolate, AllocationFlags flags) {
- if ((flags & PRETENURE_OLD_POINTER_SPACE) != 0) {
- return ExternalReference::old_pointer_space_allocation_top_address(
- isolate);
- } else if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
- return ExternalReference::old_data_space_allocation_top_address(isolate);
+ if ((flags & PRETENURE) != 0) {
+ return ExternalReference::old_space_allocation_top_address(isolate);
}
return ExternalReference::new_space_allocation_top_address(isolate);
}
static ExternalReference GetAllocationLimitReference(
Isolate* isolate, AllocationFlags flags) {
- if ((flags & PRETENURE_OLD_POINTER_SPACE) != 0) {
- return ExternalReference::old_pointer_space_allocation_limit_address(
- isolate);
- } else if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
- return ExternalReference::old_data_space_allocation_limit_address(
- isolate);
+ if ((flags & PRETENURE) != 0) {
+ return ExternalReference::old_space_allocation_limit_address(isolate);
}
return ExternalReference::new_space_allocation_limit_address(isolate);
}
if (instr->hydrogen()->MustAllocateDoubleAligned()) {
flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
}
- if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
- DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
+ if (instr->hydrogen()->IsOldSpaceAllocation()) {
DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
- } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
- DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
+ flags = static_cast<AllocationFlags>(flags | PRETENURE);
}
if (instr->size()->IsConstantOperand()) {
int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
int flags = AllocateDoubleAlignFlag::encode(
instr->hydrogen()->MustAllocateDoubleAligned());
- if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
- DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
- DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
- } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+ if (instr->hydrogen()->IsOldSpaceAllocation()) {
DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
+ flags = AllocateTargetSpace::update(flags, OLD_SPACE);
} else {
flags = AllocateTargetSpace::update(flags, NEW_SPACE);
}
if ((flags & DOUBLE_ALIGNMENT) != 0) {
// Align the next allocation. Storing the filler map without checking top is
// safe in new-space because the limit of the heap is aligned there.
- DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
And(scratch2, result, Operand(kDoubleAlignmentMask));
Label aligned;
Branch(&aligned, eq, scratch2, Operand(zero_reg));
- if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+ if ((flags & PRETENURE) != 0) {
Branch(gc_required, Ugreater_equal, result, Operand(t9));
}
li(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
if ((flags & DOUBLE_ALIGNMENT) != 0) {
// Align the next allocation. Storing the filler map without checking top is
// safe in new-space because the limit of the heap is aligned there.
- DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
And(scratch2, result, Operand(kDoubleAlignmentMask));
Label aligned;
Branch(&aligned, eq, scratch2, Operand(zero_reg));
- if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+ if ((flags & PRETENURE) != 0) {
Branch(gc_required, Ugreater_equal, result, Operand(t9));
}
li(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
// ---------------------------------------------------------------------------
// Allocation support.
- // Allocate an object in new space or old pointer space. The object_size is
+ // Allocate an object in new space or old space. The object_size is
// specified either in bytes or in words if the allocation flag SIZE_IN_WORDS
// is passed. If the space is exhausted control continues at the gc_required
// label. The allocated object is returned in result. If the flag
if (instr->hydrogen()->MustAllocateDoubleAligned()) {
flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
}
- if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
- DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
+ if (instr->hydrogen()->IsOldSpaceAllocation()) {
DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
- } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
- DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
+ flags = static_cast<AllocationFlags>(flags | PRETENURE);
}
if (instr->size()->IsConstantOperand()) {
int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
int flags = AllocateDoubleAlignFlag::encode(
instr->hydrogen()->MustAllocateDoubleAligned());
- if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
- DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
- DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
- } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+ if (instr->hydrogen()->IsOldSpaceAllocation()) {
DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
+ flags = AllocateTargetSpace::update(flags, OLD_SPACE);
} else {
flags = AllocateTargetSpace::update(flags, NEW_SPACE);
}
// ---------------------------------------------------------------------------
// Allocation support.
- // Allocate an object in new space or old pointer space. The object_size is
+ // Allocate an object in new space or old space. The object_size is
// specified either in bytes or in words if the allocation flag SIZE_IN_WORDS
// is passed. If the space is exhausted control continues at the gc_required
// label. The allocated object is returned in result. If the flag
// Boundaries for testing for a fixed typed array.
FIRST_FIXED_TYPED_ARRAY_TYPE = FIXED_INT8_ARRAY_TYPE,
LAST_FIXED_TYPED_ARRAY_TYPE = FIXED_UINT8_CLAMPED_ARRAY_TYPE,
- // Boundary for promotion to old data space/old pointer space.
+ // Boundary for promotion to old space.
LAST_DATA_TYPE = FILLER_TYPE,
// Boundary for objects represented as JSReceiver (i.e. JSObject or JSProxy).
// Note that there is no range for JSObject or JSProxy, since their subtypes
// Foreign describes objects pointing from JavaScript to C structures.
-// Since they cannot contain references to JS HeapObjects they can be
-// placed in old_data_space.
class Foreign: public HeapObject {
public:
// [address]: field containing the address.
if (instr->hydrogen()->MustAllocateDoubleAligned()) {
flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
}
- if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
- DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
+ if (instr->hydrogen()->IsOldSpaceAllocation()) {
DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
- } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
- DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
+ flags = static_cast<AllocationFlags>(flags | PRETENURE);
}
if (instr->size()->IsConstantOperand()) {
int flags = AllocateDoubleAlignFlag::encode(
instr->hydrogen()->MustAllocateDoubleAligned());
- if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
- DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
- DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
- } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+ if (instr->hydrogen()->IsOldSpaceAllocation()) {
DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
+ flags = AllocateTargetSpace::update(flags, OLD_SPACE);
} else {
flags = AllocateTargetSpace::update(flags, NEW_SPACE);
}
if ((flags & DOUBLE_ALIGNMENT) != 0) {
// Align the next allocation. Storing the filler map without checking top is
// safe in new-space because the limit of the heap is aligned there.
- DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
+ DCHECK((flags & PRETENURE_OLD_SPACE) == 0);
#if V8_TARGET_ARCH_PPC64
STATIC_ASSERT(kPointerAlignment == kDoubleAlignment);
#else
andi(scratch2, result, Operand(kDoubleAlignmentMask));
Label aligned;
beq(&aligned, cr0);
- if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+ if ((flags & PRETENURE) != 0) {
cmpl(result, ip);
bge(gc_required);
}
if ((flags & DOUBLE_ALIGNMENT) != 0) {
// Align the next allocation. Storing the filler map without checking top is
// safe in new-space because the limit of the heap is aligned there.
- DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
+ DCHECK((flags & PRETENURE_OLD_SPACE) == 0);
#if V8_TARGET_ARCH_PPC64
STATIC_ASSERT(kPointerAlignment == kDoubleAlignment);
#else
andi(scratch2, result, Operand(kDoubleAlignmentMask));
Label aligned;
beq(&aligned, cr0);
- if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+ if ((flags & PRETENURE) != 0) {
cmpl(result, ip);
bge(gc_required);
}
// ---------------------------------------------------------------------------
// Allocation support
- // Allocate an object in new space or old pointer space. The object_size is
+ // Allocate an object in new space or old space. The object_size is
// specified either in bytes or in words if the allocation flag SIZE_IN_WORDS
// is passed. If the space is exhausted control continues at the gc_required
// label. The allocated object is returned in result. If the flag
Add(ExternalReference::get_make_code_young_function(isolate).address(),
"Code::MakeCodeYoung");
Add(ExternalReference::cpu_features().address(), "cpu_features");
- Add(ExternalReference::old_pointer_space_allocation_top_address(isolate)
- .address(),
- "Heap::OldPointerSpaceAllocationTopAddress");
- Add(ExternalReference::old_pointer_space_allocation_limit_address(isolate)
- .address(),
- "Heap::OldPointerSpaceAllocationLimitAddress");
- Add(ExternalReference::old_data_space_allocation_top_address(isolate)
- .address(),
- "Heap::OldDataSpaceAllocationTopAddress");
- Add(ExternalReference::old_data_space_allocation_limit_address(isolate)
- .address(),
- "Heap::OldDataSpaceAllocationLimitAddress");
+ Add(ExternalReference::old_space_allocation_top_address(isolate).address(),
+ "Heap::OldSpaceAllocationTopAddress");
+ Add(ExternalReference::old_space_allocation_limit_address(isolate).address(),
+ "Heap::OldSpaceAllocationLimitAddress");
Add(ExternalReference::allocation_sites_list_address(isolate).address(),
"Heap::allocation_sites_list_address()");
Add(ExternalReference::address_of_uint32_bias().address(), "uint32_bias");
// but that may change.
bool write_barrier_needed =
(current_object_address != NULL && source_space != NEW_SPACE &&
- source_space != CELL_SPACE && source_space != CODE_SPACE &&
- source_space != OLD_DATA_SPACE);
+ source_space != CELL_SPACE && source_space != CODE_SPACE);
while (current < limit) {
byte data = source_.Get();
switch (data) {
// This generates a case and a body for the new space (which has to do extra
// write barrier handling) and handles the other spaces with fall-through cases
// and one body.
-#define ALL_SPACES(where, how, within) \
- CASE_STATEMENT(where, how, within, NEW_SPACE) \
- CASE_BODY(where, how, within, NEW_SPACE) \
- CASE_STATEMENT(where, how, within, OLD_DATA_SPACE) \
- CASE_STATEMENT(where, how, within, OLD_POINTER_SPACE) \
- CASE_STATEMENT(where, how, within, CODE_SPACE) \
- CASE_STATEMENT(where, how, within, MAP_SPACE) \
- CASE_STATEMENT(where, how, within, CELL_SPACE) \
- CASE_STATEMENT(where, how, within, LO_SPACE) \
+#define ALL_SPACES(where, how, within) \
+ CASE_STATEMENT(where, how, within, NEW_SPACE) \
+ CASE_BODY(where, how, within, NEW_SPACE) \
+ CASE_STATEMENT(where, how, within, OLD_SPACE) \
+ CASE_STATEMENT(where, how, within, CODE_SPACE) \
+ CASE_STATEMENT(where, how, within, MAP_SPACE) \
+ CASE_STATEMENT(where, how, within, CELL_SPACE) \
+ CASE_STATEMENT(where, how, within, LO_SPACE) \
CASE_BODY(where, how, within, kAnyOldSpace)
#define FOUR_CASES(byte_code) \
AllocationSpace space = (allocation_size > Page::kMaxRegularHeapObjectSize)
? LO_SPACE
- : OLD_DATA_SPACE;
+ : OLD_SPACE;
SerializePrologue(space, allocation_size, map);
// Output the rest of the imaginary string.
if (instr->hydrogen()->MustAllocateDoubleAligned()) {
flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
}
- if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
- DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
+ if (instr->hydrogen()->IsOldSpaceAllocation()) {
DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
- } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
- DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
+ flags = static_cast<AllocationFlags>(flags | PRETENURE);
}
if (instr->size()->IsConstantOperand()) {
}
int flags = 0;
- if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
- DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
- DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
- } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+ if (instr->hydrogen()->IsOldSpaceAllocation()) {
DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
+ flags = AllocateTargetSpace::update(flags, OLD_SPACE);
} else {
flags = AllocateTargetSpace::update(flags, NEW_SPACE);
}
// Align the next allocation. Storing the filler map without checking top
// is safe in new-space because the limit of the heap is aligned there.
DCHECK(kPointerSize * 2 == kDoubleSize);
- DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
// Make sure scratch is not clobbered by this function as it might be
// used in UpdateAllocationTopHelper later.
Label aligned;
testl(result, Immediate(kDoubleAlignmentMask));
j(zero, &aligned, Label::kNear);
- if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+ if ((flags & PRETENURE) != 0) {
ExternalReference allocation_limit =
AllocationUtils::GetAllocationLimitReference(isolate(), flags);
cmpp(result, ExternalOperand(allocation_limit));
// ---------------------------------------------------------------------------
// Allocation support
- // Allocate an object in new space or old pointer space. If the given space
+ // Allocate an object in new space or old space. If the given space
// is exhausted control continues at the gc_required label. The allocated
// object is returned in result and end of the new object is returned in
// result_end. The register scratch can be passed as no_reg in which case
if (instr->hydrogen()->MustAllocateDoubleAligned()) {
flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
}
- if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
- DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
+ if (instr->hydrogen()->IsOldSpaceAllocation()) {
DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
- } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
- DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
+ flags = static_cast<AllocationFlags>(flags | PRETENURE);
}
if (instr->size()->IsConstantOperand()) {
int flags = AllocateDoubleAlignFlag::encode(
instr->hydrogen()->MustAllocateDoubleAligned());
- if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
- DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
- DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
- } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+ if (instr->hydrogen()->IsOldSpaceAllocation()) {
DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
- flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
+ flags = AllocateTargetSpace::update(flags, OLD_SPACE);
} else {
flags = AllocateTargetSpace::update(flags, NEW_SPACE);
}
// Align the next allocation. Storing the filler map without checking top is
// safe in new-space because the limit of the heap is aligned there.
if ((flags & DOUBLE_ALIGNMENT) != 0) {
- DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
Label aligned;
test(result, Immediate(kDoubleAlignmentMask));
j(zero, &aligned, Label::kNear);
- if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+ if ((flags & PRETENURE) != 0) {
cmp(result, Operand::StaticVariable(allocation_limit));
j(above_equal, gc_required);
}
// Align the next allocation. Storing the filler map without checking top is
// safe in new-space because the limit of the heap is aligned there.
if ((flags & DOUBLE_ALIGNMENT) != 0) {
- DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
Label aligned;
test(result, Immediate(kDoubleAlignmentMask));
j(zero, &aligned, Label::kNear);
- if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+ if ((flags & PRETENURE) != 0) {
cmp(result, Operand::StaticVariable(allocation_limit));
j(above_equal, gc_required);
}
// Align the next allocation. Storing the filler map without checking top is
// safe in new-space because the limit of the heap is aligned there.
if ((flags & DOUBLE_ALIGNMENT) != 0) {
- DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
Label aligned;
test(result, Immediate(kDoubleAlignmentMask));
j(zero, &aligned, Label::kNear);
- if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+ if ((flags & PRETENURE) != 0) {
cmp(result, Operand::StaticVariable(allocation_limit));
j(above_equal, gc_required);
}
// ---------------------------------------------------------------------------
// Allocation support
- // Allocate an object in new space or old pointer space. If the given space
+ // Allocate an object in new space or old space. If the given space
// is exhausted control continues at the gc_required label. The allocated
// object is returned in result and end of the new object is returned in
// result_end. The register scratch can be passed as no_reg in which case
heap->CopyJSObject(JSObject::cast(object)).ToObjectChecked();
// Old data space.
- SimulateFullSpace(heap->old_data_space());
+ SimulateFullSpace(heap->old_space());
heap->AllocateByteArray(100, TENURED).ToObjectChecked();
// Old pointer space.
- SimulateFullSpace(heap->old_pointer_space());
+ SimulateFullSpace(heap->old_space());
heap->AllocateFixedArray(10000, TENURED).ToObjectChecked();
// Large object space.
static const int kLargeObjectSpaceFillerLength = 3 * (Page::kPageSize / 10);
static const int kLargeObjectSpaceFillerSize = FixedArray::SizeFor(
kLargeObjectSpaceFillerLength);
- DCHECK(kLargeObjectSpaceFillerSize > heap->old_pointer_space()->AreaSize());
+ DCHECK(kLargeObjectSpaceFillerSize > heap->old_space()->AreaSize());
while (heap->OldGenerationSpaceAvailable() > kLargeObjectSpaceFillerSize) {
heap->AllocateFixedArray(
kLargeObjectSpaceFillerLength, TENURED).ToObjectChecked();
"slice('abcdefghijklmnopqrstuvwxyz');"));
// Trigger GCs so that the newly allocated string moves to old gen.
- SimulateFullSpace(CcTest::heap()->old_pointer_space());
+ SimulateFullSpace(CcTest::heap()->old_space());
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
in_new_space = CcTest::heap()->InNewSpace(*istring);
- CHECK(in_new_space || CcTest::heap()->old_data_space()->Contains(*istring));
+ CHECK(in_new_space || CcTest::heap()->old_space()->Contains(*istring));
CHECK_EQ(0, dispose_count);
}
- CcTest::heap()->CollectGarbage(in_new_space ? i::NEW_SPACE
- : i::OLD_DATA_SPACE);
+ CcTest::heap()->CollectGarbage(in_new_space ? i::NEW_SPACE : i::OLD_SPACE);
CHECK_EQ(1, dispose_count);
}
i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
in_new_space = CcTest::heap()->InNewSpace(*istring);
- CHECK(in_new_space || CcTest::heap()->old_data_space()->Contains(*istring));
+ CHECK(in_new_space || CcTest::heap()->old_space()->Contains(*istring));
CHECK_EQ(0, dispose_count);
}
- CcTest::heap()->CollectGarbage(in_new_space ? i::NEW_SPACE
- : i::OLD_DATA_SPACE);
+ CcTest::heap()->CollectGarbage(in_new_space ? i::NEW_SPACE : i::OLD_SPACE);
CHECK_EQ(1, dispose_count);
}
CompileRun("'Romeo Montague ' + 'Juliet Capulet'")->ToString(isolate);
CHECK(v8::Utils::OpenHandle(*cons)->IsConsString());
CcTest::heap()->CollectAllAvailableGarbage();
- CHECK(CcTest::heap()->old_pointer_space()->Contains(
- *v8::Utils::OpenHandle(*cons)));
+ CHECK(CcTest::heap()->old_space()->Contains(*v8::Utils::OpenHandle(*cons)));
TestResource* resource = new TestResource(
AsciiToTwoByteString("Romeo Montague Juliet Capulet"));
CompileRun("'Romeo Montague ' + 'Juliet Capulet'")->ToString(isolate);
CHECK(v8::Utils::OpenHandle(*cons)->IsConsString());
CcTest::heap()->CollectAllAvailableGarbage();
- CHECK(CcTest::heap()->old_pointer_space()->Contains(
- *v8::Utils::OpenHandle(*cons)));
+ CHECK(CcTest::heap()->old_space()->Contains(*v8::Utils::OpenHandle(*cons)));
TestOneByteResource* resource =
new TestOneByteResource(i::StrDup("Romeo Montague Juliet Capulet"));
// Start a second old-space page so that the heap pointer added to the
// constant pool array ends up on the an evacuation candidate page.
- Page* first_page = heap->old_data_space()->anchor()->next_page();
+ Page* first_page = heap->old_space()->anchor()->next_page();
{
HandleScope scope(isolate);
int dummy_array_size = Page::kMaxRegularHeapObjectSize - 92 * KB;
Handle<HeapObject> temp =
factory->NewFixedDoubleArray(dummy_array_size / kDoubleSize, TENURED);
- CHECK(heap->InOldDataSpace(temp->address()));
+ CHECK(heap->InOldSpace(temp->address()));
Handle<HeapObject> heap_ptr =
factory->NewHeapNumber(5.0, IMMUTABLE, TENURED);
- CHECK(heap->InOldDataSpace(heap_ptr->address()));
+ CHECK(heap->InOldSpace(heap_ptr->address()));
CHECK(!first_page->Contains(heap_ptr->address()));
array->set(0, *heap_ptr);
array->set(1, *heap_ptr);
// Simulate a full heap so that generating an identity hash code
// in subsequent calls will request GC.
SimulateFullSpace(CcTest::heap()->new_space());
- SimulateFullSpace(CcTest::heap()->old_pointer_space());
+ SimulateFullSpace(CcTest::heap()->old_space());
// Calling Contains() should not cause GC ever.
int gc_count = isolate->heap()->gc_count();
// Simulate a full heap so that generating an identity hash code
// in subsequent calls will request GC.
SimulateFullSpace(CcTest::heap()->new_space());
- SimulateFullSpace(CcTest::heap()->old_pointer_space());
+ SimulateFullSpace(CcTest::heap()->old_space());
// Calling Lookup() should not cause GC ever.
CHECK(table->Lookup(key)->IsTheHole());
}
// Make sure the objects are promoted.
- heap->CollectGarbage(OLD_POINTER_SPACE);
+ heap->CollectGarbage(OLD_SPACE);
heap->CollectGarbage(NEW_SPACE);
CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
CHECK(!WeakPointerCleared);
// Mark-compact treats weak reference properly.
- heap->CollectGarbage(OLD_POINTER_SPACE);
+ heap->CollectGarbage(OLD_SPACE);
CHECK(WeakPointerCleared);
}
Handle<Object> objs[objs_count];
int next_objs_index = 0;
- // Allocate a JS array to OLD_POINTER_SPACE and NEW_SPACE
+ // Allocate a JS array to OLD_SPACE and NEW_SPACE
objs[next_objs_index++] = factory->NewJSArray(10);
objs[next_objs_index++] = factory->NewJSArray(10,
FAST_HOLEY_ELEMENTS,
// Step 4: clone jsobject, but force always allocate first to create a clone
// in old pointer space.
- Address old_pointer_space_top = heap->old_pointer_space()->top();
+ Address old_space_top = heap->old_space()->top();
AlwaysAllocateScope aa_scope(isolate);
Object* clone_obj = heap->CopyJSObject(jsobject).ToObjectChecked();
JSObject* clone = JSObject::cast(clone_obj);
- if (clone->address() != old_pointer_space_top) {
+ if (clone->address() != old_space_top) {
// Alas, got allocated from free list, we cannot do checks.
return;
}
- CHECK(heap->old_pointer_space()->Contains(clone->address()));
+ CHECK(heap->old_space()->Contains(clone->address()));
}
}
CcTest::heap()->incremental_marking()->set_should_hurry(true);
- CcTest::heap()->CollectGarbage(OLD_POINTER_SPACE);
+ CcTest::heap()->CollectGarbage(OLD_SPACE);
}
// Make sure next prototype is placed on an old-space evacuation candidate.
Handle<JSObject> prototype;
- PagedSpace* space = CcTest::heap()->old_pointer_space();
+ PagedSpace* space = CcTest::heap()->old_space();
{
AlwaysAllocateScope always_allocate(isolate);
SimulateFullSpace(space);
TEST(IdleNotificationFinishMarking) {
i::FLAG_allow_natives_syntax = true;
CcTest::InitializeVM();
- SimulateFullSpace(CcTest::heap()->old_pointer_space());
+ SimulateFullSpace(CcTest::heap()->old_space());
IncrementalMarking* marking = CcTest::heap()->incremental_marking();
marking->Abort();
marking->Start();
Handle<JSObject> o =
v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
- CHECK(CcTest::heap()->InOldPointerSpace(*o));
- CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle));
- CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle->elements()));
- CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle));
- CHECK(CcTest::heap()->InOldDataSpace(double_array_handle->elements()));
+ CHECK(CcTest::heap()->InOldSpace(*o));
+ CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
+ CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
+ CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
+ CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
}
Handle<JSObject> o =
v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
- CHECK(CcTest::heap()->InOldPointerSpace(o->elements()));
- CHECK(CcTest::heap()->InOldPointerSpace(*o));
+ CHECK(CcTest::heap()->InOldSpace(o->elements()));
+ CHECK(CcTest::heap()->InOldSpace(*o));
}
Handle<JSObject> o =
v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
- CHECK(CcTest::heap()->InOldPointerSpace(*o));
+ CHECK(CcTest::heap()->InOldSpace(*o));
FieldIndex idx1 = FieldIndex::ForPropertyIndex(o->map(), 0);
FieldIndex idx2 = FieldIndex::ForPropertyIndex(o->map(), 1);
- CHECK(CcTest::heap()->InOldPointerSpace(o->RawFastPropertyAt(idx1)));
+ CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx1)));
if (!o->IsUnboxedDoubleField(idx2)) {
- CHECK(CcTest::heap()->InOldDataSpace(o->RawFastPropertyAt(idx2)));
+ CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx2)));
} else {
CHECK_EQ(1.1, o->RawFastDoublePropertyAt(idx2));
}
JSObject* inner_object =
reinterpret_cast<JSObject*>(o->RawFastPropertyAt(idx1));
- CHECK(CcTest::heap()->InOldPointerSpace(inner_object));
+ CHECK(CcTest::heap()->InOldSpace(inner_object));
if (!inner_object->IsUnboxedDoubleField(idx1)) {
- CHECK(
- CcTest::heap()->InOldDataSpace(inner_object->RawFastPropertyAt(idx1)));
+ CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx1)));
} else {
CHECK_EQ(2.2, inner_object->RawFastDoublePropertyAt(idx1));
}
- CHECK(
- CcTest::heap()->InOldPointerSpace(inner_object->RawFastPropertyAt(idx2)));
+ CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx2)));
}
Handle<JSObject> o =
v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
- CHECK(CcTest::heap()->InOldPointerSpace(*o));
- CHECK(CcTest::heap()->InOldDataSpace(o->properties()));
+ CHECK(CcTest::heap()->InOldSpace(*o));
+ CHECK(CcTest::heap()->InOldSpace(o->properties()));
}
Handle<JSObject> o =
v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
- CHECK(CcTest::heap()->InOldDataSpace(o->elements()));
- CHECK(CcTest::heap()->InOldPointerSpace(*o));
+ CHECK(CcTest::heap()->InOldSpace(o->elements()));
+ CHECK(CcTest::heap()->InOldSpace(*o));
}
Handle<JSObject> o =
v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
- CHECK(CcTest::heap()->InOldPointerSpace(*o));
- CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle));
- CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle->elements()));
- CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle));
- CHECK(CcTest::heap()->InOldDataSpace(double_array_handle->elements()));
+ CHECK(CcTest::heap()->InOldSpace(*o));
+ CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
+ CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
+ CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
+ CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
}
Handle<JSObject> o =
v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
- CHECK(CcTest::heap()->InOldPointerSpace(*o));
- CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle_1));
- CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle_1->elements()));
- CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle_2));
- CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle_2->elements()));
+ CHECK(CcTest::heap()->InOldSpace(*o));
+ CHECK(CcTest::heap()->InOldSpace(*int_array_handle_1));
+ CHECK(CcTest::heap()->InOldSpace(int_array_handle_1->elements()));
+ CHECK(CcTest::heap()->InOldSpace(*int_array_handle_2));
+ CHECK(CcTest::heap()->InOldSpace(int_array_handle_2->elements()));
}
Handle<JSObject> o =
v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
- CHECK(CcTest::heap()->InOldPointerSpace(*o));
- CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle_1));
- CHECK(CcTest::heap()->InOldDataSpace(double_array_handle_1->elements()));
- CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle_2));
- CHECK(CcTest::heap()->InOldDataSpace(double_array_handle_2->elements()));
+ CHECK(CcTest::heap()->InOldSpace(*o));
+ CHECK(CcTest::heap()->InOldSpace(*double_array_handle_1));
+ CHECK(CcTest::heap()->InOldSpace(double_array_handle_1->elements()));
+ CHECK(CcTest::heap()->InOldSpace(*double_array_handle_2));
+ CHECK(CcTest::heap()->InOldSpace(double_array_handle_2->elements()));
}
Handle<JSObject> o =
v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
- CHECK(CcTest::heap()->InOldPointerSpace(*o));
+ CHECK(CcTest::heap()->InOldSpace(*o));
}
Handle<JSObject> o =
v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
- CHECK(CcTest::heap()->InOldPointerSpace(*o));
+ CHECK(CcTest::heap()->InOldSpace(*o));
}
root = GetByName("root");
AddPropertyTo(0, root, "prop9");
- CcTest::i_isolate()->heap()->CollectGarbage(OLD_POINTER_SPACE);
+ CcTest::i_isolate()->heap()->CollectGarbage(OLD_SPACE);
// Count number of live transitions after marking. Note that one transition
// is left, because 'o' still holds an instance of one transition target.
static const int number_of_test_pages = 20;
// Prepare many pages with low live-bytes count.
- PagedSpace* old_pointer_space = heap->old_pointer_space();
- CHECK_EQ(1, old_pointer_space->CountTotalPages());
+ PagedSpace* old_space = heap->old_space();
+ CHECK_EQ(1, old_space->CountTotalPages());
for (int i = 0; i < number_of_test_pages; i++) {
AlwaysAllocateScope always_allocate(isolate);
- SimulateFullSpace(old_pointer_space);
+ SimulateFullSpace(old_space);
factory->NewFixedArray(1, TENURED);
}
- CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
+ CHECK_EQ(number_of_test_pages + 1, old_space->CountTotalPages());
// Triggering one GC will cause a lot of garbage to be discovered but
// even spread across all allocated pages.
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask,
"triggered for preparation");
- CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
+ CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
// Triggering subsequent GCs should cause at least half of the pages
// to be released to the OS after at most two cycles.
heap->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 1");
- CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
+ CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
heap->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 2");
- CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages() * 2);
+ CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages() * 2);
// Triggering a last-resort GC should cause all pages to be released to the
// OS so that other processes can seize the memory. If we get a failure here
// boots, but if the 20 small arrays don't fit on the first page then that's
// an indication that it is too small.
heap->CollectAllAvailableGarbage("triggered really hard");
- CHECK_EQ(1, old_pointer_space->CountTotalPages());
+ CHECK_EQ(1, old_space->CountTotalPages());
}
Handle<JSObject> o =
v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
- CHECK(heap->InOldPointerSpace(o->elements()));
- CHECK(heap->InOldPointerSpace(*o));
+ CHECK(heap->InOldSpace(o->elements()));
+ CHECK(heap->InOldSpace(*o));
Page* page = Page::FromAddress(o->elements()->address());
CHECK(page->parallel_sweeping() <= MemoryChunk::SWEEPING_FINALIZE ||
Marking::IsBlack(Marking::MarkBitFrom(o->elements())));
// Allocate fixed array in old pointer space so, that object allocated
// afterwards would end at the end of the page.
{
- SimulateFullSpace(heap->old_pointer_space());
+ SimulateFullSpace(heap->old_space());
int padding_size = desired_offset - Page::kObjectStartOffset;
int padding_array_length =
(padding_size - FixedArray::kHeaderSize) / kPointerSize;
" weak_map.set(future_keys[i], i);"
"}");
heap->incremental_marking()->set_should_hurry(true);
- heap->CollectGarbage(OLD_POINTER_SPACE);
+ heap->CollectGarbage(OLD_SPACE);
}
Handle<String> name = factory->InternalizeUtf8String("testArray");
JSReceiver::SetProperty(global, name, array, SLOPPY).Check();
CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();");
- heap->CollectGarbage(OLD_POINTER_SPACE);
+ heap->CollectGarbage(OLD_SPACE);
}
Handle<WeakCell> weak_cell = AddRetainedMap(isolate, heap);
CHECK(!weak_cell->cleared());
for (int i = 0; i < n; i++) {
- heap->CollectGarbage(OLD_POINTER_SPACE);
+ heap->CollectGarbage(OLD_SPACE);
}
CHECK(!weak_cell->cleared());
- heap->CollectGarbage(OLD_POINTER_SPACE);
+ heap->CollectGarbage(OLD_SPACE);
CHECK(weak_cell->cleared());
}
Heap* heap = isolate->heap();
AddRetainedMap(isolate, heap);
Handle<Map> map = Map::Create(isolate, 1);
- heap->CollectGarbage(OLD_POINTER_SPACE);
+ heap->CollectGarbage(OLD_SPACE);
// Force GC in old space on next addition of retained map.
Map::WeakCellForMap(map);
SimulateFullSpace(CcTest::heap()->new_space());
for (int i = 0; i < 10; i++) {
heap->AddRetainedMap(map);
}
- heap->CollectGarbage(OLD_POINTER_SPACE);
+ heap->CollectGarbage(OLD_SPACE);
}
heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
// Array now sits in the old space
- CHECK(heap->InSpace(*array, OLD_POINTER_SPACE));
+ CHECK(heap->InSpace(*array, OLD_SPACE));
}
CHECK(heap->InSpace(*array, NEW_SPACE));
// Simulate a full old space to make promotion fail.
- SimulateFullSpace(heap->old_pointer_space());
+ SimulateFullSpace(heap->old_space());
// Call mark compact GC, and it should pass.
- heap->CollectGarbage(OLD_POINTER_SPACE);
+ heap->CollectGarbage(OLD_SPACE);
}
Handle<GlobalObject> global(isolate->context()->global_object());
// call mark-compact when heap is empty
- heap->CollectGarbage(OLD_POINTER_SPACE, "trigger 1");
+ heap->CollectGarbage(OLD_SPACE, "trigger 1");
// keep allocating garbage in new space until it fails
const int arraysize = 100;
factory->NewJSObject(function);
}
- heap->CollectGarbage(OLD_POINTER_SPACE, "trigger 4");
+ heap->CollectGarbage(OLD_SPACE, "trigger 4");
{ HandleScope scope(isolate);
Handle<String> func_name = factory->InternalizeUtf8String("theFunction");
JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
}
- heap->CollectGarbage(OLD_POINTER_SPACE, "trigger 5");
+ heap->CollectGarbage(OLD_SPACE, "trigger 5");
{ HandleScope scope(isolate);
Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
g2c1.location());
}
// Do a full GC
- heap->CollectGarbage(OLD_POINTER_SPACE);
+ heap->CollectGarbage(OLD_SPACE);
// All object should be alive.
CHECK_EQ(0, NumberOfWeakCalls);
g2c1.location());
}
- heap->CollectGarbage(OLD_POINTER_SPACE);
+ heap->CollectGarbage(OLD_SPACE);
// All objects should be gone. 5 global handles in total.
CHECK_EQ(5, NumberOfWeakCalls);
reinterpret_cast<void*>(&g2c1_and_id),
&WeakPointerCallback);
- heap->CollectGarbage(OLD_POINTER_SPACE);
+ heap->CollectGarbage(OLD_SPACE);
CHECK_EQ(7, NumberOfWeakCalls);
}
Heap* heap = isolate->heap();
CHECK(heap->InSpace(
*v8::Utils::OpenHandle(*CompileRun("a")->ToString(CcTest::isolate())),
- OLD_DATA_SPACE));
+ OLD_SPACE));
CHECK(heap->InSpace(
*v8::Utils::OpenHandle(*CompileRun("b")->ToString(CcTest::isolate())),
- OLD_DATA_SPACE));
+ OLD_SPACE));
CHECK(heap->InSpace(
*v8::Utils::OpenHandle(*CompileRun("c")->ToString(CcTest::isolate())),
- OLD_DATA_SPACE));
+ OLD_SPACE));
delete cache;
source_a.Dispose();
heap->MaxExecutableSize()));
int total_pages = 0;
- OldSpace faked_space(heap, heap->MaxReserved(), OLD_POINTER_SPACE,
- NOT_EXECUTABLE);
+ OldSpace faked_space(heap, heap->MaxReserved(), OLD_SPACE, NOT_EXECUTABLE);
Page* first_page = memory_allocator->AllocatePage(
faked_space.AreaSize(), &faked_space, NOT_EXECUTABLE);
heap->MaxExecutableSize()));
TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
- OldSpace* s = new OldSpace(heap, heap->MaxOldGenerationSize(),
- OLD_POINTER_SPACE, NOT_EXECUTABLE);
+ OldSpace* s = new OldSpace(heap, heap->MaxOldGenerationSize(), OLD_SPACE,
+ NOT_EXECUTABLE);
CHECK(s != NULL);
CHECK(s->SetUp());
// a pointer to a from semi-space.
CcTest::heap()->CollectGarbage(i::NEW_SPACE, "boom");
- CHECK(isolate->heap()->old_pointer_space()->Contains(*obj));
+ CHECK(isolate->heap()->old_space()->Contains(*obj));
CHECK_EQ(boom_value, GetDoubleFieldValue(*obj, field_index));
}
Isolate* isolate = CcTest::i_isolate();
Factory* factory = isolate->factory();
Heap* heap = CcTest::heap();
- PagedSpace* old_pointer_space = heap->old_pointer_space();
+ PagedSpace* old_space = heap->old_space();
// The plan: create |obj_value| in old space and ensure that it is allocated
// on evacuation candidate page, create |obj| with double and tagged fields
{
AlwaysAllocateScope always_allocate(isolate);
// Make sure |obj_value| is placed on an old-space evacuation candidate.
- SimulateFullSpace(old_pointer_space);
+ SimulateFullSpace(old_space);
obj_value = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
ec_page = Page::FromAddress(obj_value->address());
}
heap->CollectGarbage(i::NEW_SPACE); // in survivor space now
heap->CollectGarbage(i::NEW_SPACE); // in old gen now
- CHECK(isolate->heap()->old_pointer_space()->Contains(*obj));
- CHECK(isolate->heap()->old_pointer_space()->Contains(*obj_value));
+ CHECK(isolate->heap()->old_space()->Contains(*obj));
+ CHECK(isolate->heap()->old_space()->Contains(*obj_value));
CHECK(MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
- heap->CollectGarbage(i::OLD_POINTER_SPACE, "boom");
+ heap->CollectGarbage(i::OLD_SPACE, "boom");
// |obj_value| must be evacuated.
CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in survivor space now
CcTest::heap()->CollectGarbage(i::NEW_SPACE); // in old gen now
- CHECK(isolate->heap()->old_pointer_space()->Contains(*obj));
+ CHECK(isolate->heap()->old_space()->Contains(*obj));
// Create temp object in the new space.
Handle<JSArray> temp = factory->NewJSArray(FAST_ELEMENTS, NOT_TENURED);
AlwaysAllocateScope aa_scope(isolate);
Object* clone_obj = heap->CopyJSObject(jsobject).ToObjectChecked();
Handle<JSObject> clone(JSObject::cast(clone_obj));
- CHECK(heap->old_pointer_space()->Contains(clone->address()));
+ CHECK(heap->old_space()->Contains(clone->address()));
CcTest::heap()->CollectGarbage(NEW_SPACE, "boom");
Isolate* isolate = CcTest::i_isolate();
Factory* factory = isolate->factory();
Heap* heap = CcTest::heap();
- PagedSpace* old_pointer_space = heap->old_pointer_space();
+ PagedSpace* old_space = heap->old_space();
// The plan: create |obj| by |map| in old space, create |obj_value| in
// new space and ensure that write barrier is triggered when |obj_value| is
{
AlwaysAllocateScope always_allocate(isolate);
obj = factory->NewJSObjectFromMap(map, TENURED, false);
- CHECK(old_pointer_space->Contains(*obj));
+ CHECK(old_space->Contains(*obj));
obj_value = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS);
}
Isolate* isolate = CcTest::i_isolate();
Factory* factory = isolate->factory();
Heap* heap = CcTest::heap();
- PagedSpace* old_pointer_space = heap->old_pointer_space();
+ PagedSpace* old_space = heap->old_space();
// The plan: create |obj| by |map| in old space, create |obj_value| in
// old space and ensure it end up in evacuation candidate page. Start
{
AlwaysAllocateScope always_allocate(isolate);
obj = factory->NewJSObjectFromMap(map, TENURED, false);
- CHECK(old_pointer_space->Contains(*obj));
+ CHECK(old_space->Contains(*obj));
// Make sure |obj_value| is placed on an old-space evacuation candidate.
- SimulateFullSpace(old_pointer_space);
+ SimulateFullSpace(old_space);
obj_value = factory->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
ec_page = Page::FromAddress(obj_value->address());
CHECK_NE(ec_page, Page::FromAddress(obj->address()));
obj->RawFastDoublePropertyAtPut(double_field_index, boom_value);
// Trigger GC to evacuate all candidates.
- CcTest::heap()->CollectGarbage(OLD_POINTER_SPACE, "boom");
+ CcTest::heap()->CollectGarbage(OLD_SPACE, "boom");
// Ensure that the values are still there and correct.
CHECK(!MarkCompactCollector::IsOnEvacuationCandidate(*obj_value));
Handle<JSWeakMap> weakmap = AllocateJSWeakMap(isolate);
// Start second old-space page so that values land on evacuation candidate.
- Page* first_page = heap->old_pointer_space()->anchor()->next_page();
+ Page* first_page = heap->old_space()->anchor()->next_page();
int dummy_array_size = Page::kMaxRegularHeapObjectSize - 92 * KB;
factory->NewFixedArray(dummy_array_size / kPointerSize, TENURED);
factory->function_string());
// Start second old-space page so that keys land on evacuation candidate.
- Page* first_page = heap->old_pointer_space()->anchor()->next_page();
+ Page* first_page = heap->old_space()->anchor()->next_page();
int dummy_array_size = Page::kMaxRegularHeapObjectSize - 92 * KB;
factory->NewFixedArray(dummy_array_size / kPointerSize, TENURED);
Handle<JSWeakSet> weakset = AllocateJSWeakSet(isolate);
// Start second old-space page so that values land on evacuation candidate.
- Page* first_page = heap->old_pointer_space()->anchor()->next_page();
+ Page* first_page = heap->old_space()->anchor()->next_page();
int dummy_array_size = Page::kMaxRegularHeapObjectSize - 92 * KB;
factory->NewFixedArray(dummy_array_size / kPointerSize, TENURED);
factory->function_string());
// Start second old-space page so that keys land on evacuation candidate.
- Page* first_page = heap->old_pointer_space()->anchor()->next_page();
+ Page* first_page = heap->old_space()->anchor()->next_page();
int dummy_array_size = Page::kMaxRegularHeapObjectSize - 92 * KB;
factory->NewFixedArray(dummy_array_size / kPointerSize, TENURED);
COMMENT_RE = re.compile(r"^C (0x[0-9a-fA-F]+) (.*)$")
PAGEADDRESS_RE = re.compile(
- r"^P (mappage|pointerpage|datapage) (0x[0-9a-fA-F]+)$")
+ r"^P (mappage|oldpage) (0x[0-9a-fA-F]+)$")
class InspectionInfo(object):
self.reader = reader
self.heap = heap
self.known_first_map_page = 0
- self.known_first_data_page = 0
- self.known_first_pointer_page = 0
+ self.known_first_old_page = 0
def __getattr__(self, name):
"""An InspectionPadawan can be used instead of V8Heap, even though
def IsInKnownOldSpace(self, tagged_address):
page_address = tagged_address & ~self.heap.PageAlignmentMask()
- return page_address in [self.known_first_data_page,
- self.known_first_pointer_page]
+ return page_address == self.known_first_old_page
def ContainingKnownOldSpaceName(self, tagged_address):
page_address = tagged_address & ~self.heap.PageAlignmentMask()
- if page_address == self.known_first_data_page: return "OLD_DATA_SPACE"
- if page_address == self.known_first_pointer_page: return "OLD_POINTER_SPACE"
+ if page_address == self.known_first_old_page: return "OLD_SPACE"
return None
def SenseObject(self, tagged_address):
def PrintKnowledge(self):
print " known_first_map_page = %s\n"\
- " known_first_data_page = %s\n"\
- " known_first_pointer_page = %s" % (
+ " known_first_old_page = %s" % (
self.reader.FormatIntPtr(self.known_first_map_page),
- self.reader.FormatIntPtr(self.known_first_data_page),
- self.reader.FormatIntPtr(self.known_first_pointer_page))
+ self.reader.FormatIntPtr(self.known_first_old_page))
WEB_HEADER = """
<!DOCTYPE html>
self.padawan = InspectionPadawan(self.reader, self.heap)
self.comments = InspectionInfo(minidump_name, self.reader)
- self.padawan.known_first_data_page = (
- self.comments.get_page_address("datapage"))
+ self.padawan.known_first_old_page = (
+ self.comments.get_page_address("oldpage"))
self.padawan.known_first_map_page = (
self.comments.get_page_address("mappage"))
- self.padawan.known_first_pointer_page = (
- self.comments.get_page_address("pointerpage"))
def set_comment(self, straddress, comment):
try:
def set_page_address(self, kind, straddress):
try:
address = int(straddress, 0)
- if kind == "datapage":
- self.padawan.known_first_data_page = address
+ if kind == "oldpage":
+ self.padawan.known_first_old_page = address
elif kind == "mappage":
self.padawan.known_first_map_page = address
- elif kind == "pointerpage":
- self.padawan.known_first_pointer_page = address
self.comments.save_page_address(kind, address)
except ValueError:
print "Invalid address"
page_address = address & ~self.heap.PageAlignmentMask()
f.write("Page info: \n")
- self.output_page_info(f, "data", self.padawan.known_first_data_page, \
+ self.output_page_info(f, "old", self.padawan.known_first_old_page, \
page_address)
self.output_page_info(f, "map", self.padawan.known_first_map_page, \
page_address)
- self.output_page_info(f, "pointer", \
- self.padawan.known_first_pointer_page, \
- page_address)
if not self.reader.IsValidAddress(address):
f.write("<h3>The contents at address %s not found in the dump.</h3>" % \
"""
self.padawan.PrintKnowledge()
- def do_kd(self, address):
+ def do_ko(self, address):
"""
Teach V8 heap layout information to the inspector. Set the first
- data-space page by passing any pointer into that page.
+ old space page by passing any pointer into that page.
"""
address = int(address, 16)
page_address = address & ~self.heap.PageAlignmentMask()
- self.padawan.known_first_data_page = page_address
+ self.padawan.known_first_old_page = page_address
def do_km(self, address):
"""
page_address = address & ~self.heap.PageAlignmentMask()
self.padawan.known_first_map_page = page_address
- def do_kp(self, address):
- """
- Teach V8 heap layout information to the inspector. Set the first
- pointer-space page by passing any pointer into that page.
- """
- address = int(address, 16)
- page_address = address & ~self.heap.PageAlignmentMask()
- self.padawan.known_first_pointer_page = page_address
-
def do_list(self, smth):
"""
List all available memory regions.
const int new_space_size = READ_FIELD(1);
const int new_space_capacity = READ_FIELD(2);
- const int old_pointer_space_size = READ_FIELD(3);
- const int old_pointer_space_capacity = READ_FIELD(4);
- const int old_data_space_size = READ_FIELD(5);
- const int old_data_space_capacity = READ_FIELD(6);
- const int code_space_size = READ_FIELD(7);
- const int code_space_capacity = READ_FIELD(8);
- const int map_space_size = READ_FIELD(9);
- const int map_space_capacity = READ_FIELD(10);
- const int cell_space_size = READ_FIELD(11);
- const int cell_space_capacity = READ_FIELD(12);
- const int lo_space_size = READ_FIELD(13);
- const int global_handle_count = READ_FIELD(14);
- const int weak_global_handle_count = READ_FIELD(15);
- const int pending_global_handle_count = READ_FIELD(16);
- const int near_death_global_handle_count = READ_FIELD(17);
- const int destroyed_global_handle_count = READ_FIELD(18);
- const int memory_allocator_size = READ_FIELD(19);
- const int memory_allocator_capacity = READ_FIELD(20);
- const int os_error = READ_FIELD(23);
+ const int old_space_size = READ_FIELD(3);
+ const int old_space_capacity = READ_FIELD(4);
+ const int code_space_size = READ_FIELD(5);
+ const int code_space_capacity = READ_FIELD(6);
+ const int map_space_size = READ_FIELD(7);
+ const int map_space_capacity = READ_FIELD(8);
+ const int cell_space_size = READ_FIELD(9);
+ const int cell_space_capacity = READ_FIELD(10);
+ const int lo_space_size = READ_FIELD(11);
+ const int global_handle_count = READ_FIELD(12);
+ const int weak_global_handle_count = READ_FIELD(13);
+ const int pending_global_handle_count = READ_FIELD(14);
+ const int near_death_global_handle_count = READ_FIELD(15);
+ const int destroyed_global_handle_count = READ_FIELD(16);
+ const int memory_allocator_size = READ_FIELD(17);
+ const int memory_allocator_capacity = READ_FIELD(18);
+ const int os_error = READ_FIELD(19);
#undef READ_FIELD
int objects_per_type[v8::internal::LAST_TYPE + 1] = {0};
printf("\t%-25s\t% 10.3f MB\n", #stat ":", toM(stat));
PRINT_MB_STAT(new_space_size);
PRINT_MB_STAT(new_space_capacity);
- PRINT_MB_STAT(old_pointer_space_size);
- PRINT_MB_STAT(old_pointer_space_capacity);
- PRINT_MB_STAT(old_data_space_size);
- PRINT_MB_STAT(old_data_space_capacity);
+ PRINT_MB_STAT(old_space_size);
+ PRINT_MB_STAT(old_space_capacity);
PRINT_MB_STAT(code_space_size);
PRINT_MB_STAT(code_space_capacity);
PRINT_MB_STAT(map_space_size);
# List of known V8 maps.
KNOWN_MAPS = {
0x08081: (137, "ByteArrayMap"),
- 0x080a9: (129, "MetaMap"),
- 0x080d1: (131, "NullMap"),
- 0x080f9: (131, "UndefinedMap"),
- 0x08121: (180, "FixedArrayMap"),
- 0x08149: (4, "OneByteInternalizedStringMap"),
- 0x08171: (134, "HeapNumberMap"),
- 0x08199: (138, "FreeSpaceMap"),
- 0x081c1: (158, "OnePointerFillerMap"),
- 0x081e9: (158, "TwoPointerFillerMap"),
- 0x08211: (131, "TheHoleMap"),
- 0x08239: (131, "BooleanMap"),
- 0x08261: (131, "UninitializedMap"),
- 0x08289: (131, "ExceptionMap"),
- 0x082b1: (132, "CellMap"),
- 0x082d9: (133, "GlobalPropertyCellMap"),
- 0x08301: (182, "SharedFunctionInfoMap"),
- 0x08329: (135, "MutableHeapNumberMap"),
- 0x08351: (180, "NativeContextMap"),
- 0x08379: (130, "CodeMap"),
- 0x083a1: (180, "ScopeInfoMap"),
- 0x083c9: (180, "FixedCOWArrayMap"),
- 0x083f1: (157, "FixedDoubleArrayMap"),
- 0x08419: (181, "ConstantPoolArrayMap"),
- 0x08441: (183, "WeakCellMap"),
- 0x08469: (131, "NoInterceptorResultSentinelMap"),
- 0x08491: (180, "HashTableMap"),
- 0x084b9: (180, "OrderedHashTableMap"),
- 0x084e1: (131, "ArgumentsMarkerMap"),
- 0x08509: (131, "TerminationExceptionMap"),
- 0x08531: (128, "SymbolMap"),
- 0x08559: (64, "StringMap"),
- 0x08581: (68, "OneByteStringMap"),
- 0x085a9: (65, "ConsStringMap"),
- 0x085d1: (69, "ConsOneByteStringMap"),
- 0x085f9: (67, "SlicedStringMap"),
- 0x08621: (71, "SlicedOneByteStringMap"),
- 0x08649: (66, "ExternalStringMap"),
- 0x08671: (74, "ExternalStringWithOneByteDataMap"),
- 0x08699: (70, "ExternalOneByteStringMap"),
- 0x086c1: (70, "NativeSourceStringMap"),
- 0x086e9: (82, "ShortExternalStringMap"),
- 0x08711: (90, "ShortExternalStringWithOneByteDataMap"),
- 0x08739: (0, "InternalizedStringMap"),
- 0x08761: (2, "ExternalInternalizedStringMap"),
- 0x08789: (10, "ExternalInternalizedStringWithOneByteDataMap"),
- 0x087b1: (6, "ExternalOneByteInternalizedStringMap"),
- 0x087d9: (18, "ShortExternalInternalizedStringMap"),
- 0x08801: (26, "ShortExternalInternalizedStringWithOneByteDataMap"),
- 0x08829: (22, "ShortExternalOneByteInternalizedStringMap"),
- 0x08851: (86, "ShortExternalOneByteStringMap"),
- 0x08879: (139, "ExternalInt8ArrayMap"),
- 0x088a1: (140, "ExternalUint8ArrayMap"),
- 0x088c9: (141, "ExternalInt16ArrayMap"),
- 0x088f1: (142, "ExternalUint16ArrayMap"),
- 0x08919: (143, "ExternalInt32ArrayMap"),
- 0x08941: (144, "ExternalUint32ArrayMap"),
- 0x08969: (145, "ExternalFloat32ArrayMap"),
- 0x08991: (146, "ExternalFloat64ArrayMap"),
- 0x089b9: (147, "ExternalUint8ClampedArrayMap"),
- 0x089e1: (149, "FixedUint8ArrayMap"),
- 0x08a09: (148, "FixedInt8ArrayMap"),
- 0x08a31: (151, "FixedUint16ArrayMap"),
- 0x08a59: (150, "FixedInt16ArrayMap"),
- 0x08a81: (153, "FixedUint32ArrayMap"),
- 0x08aa9: (152, "FixedInt32ArrayMap"),
- 0x08ad1: (154, "FixedFloat32ArrayMap"),
- 0x08af9: (155, "FixedFloat64ArrayMap"),
- 0x08b21: (156, "FixedUint8ClampedArrayMap"),
- 0x08b49: (180, "SloppyArgumentsElementsMap"),
- 0x08b71: (180, "FunctionContextMap"),
- 0x08b99: (180, "CatchContextMap"),
- 0x08bc1: (180, "WithContextMap"),
- 0x08be9: (180, "BlockContextMap"),
- 0x08c11: (180, "ModuleContextMap"),
- 0x08c39: (180, "ScriptContextMap"),
- 0x08c61: (180, "ScriptContextTableMap"),
- 0x08c89: (187, "JSMessageObjectMap"),
- 0x08cb1: (136, "ForeignMap"),
- 0x08cd9: (189, "NeanderMap"),
- 0x08d01: (170, "AllocationSiteMap"),
- 0x08d29: (171, "AllocationMementoMap"),
- 0x08d51: (174, "PolymorphicCodeCacheMap"),
- 0x08d79: (172, "ScriptMap"),
- 0x08dc9: (189, "ExternalMap"),
- 0x08f09: (177, "BoxMap"),
- 0x08f31: (161, "ExecutableAccessorInfoMap"),
- 0x08f59: (162, "AccessorPairMap"),
- 0x08f81: (163, "AccessCheckInfoMap"),
- 0x08fa9: (164, "InterceptorInfoMap"),
- 0x08fd1: (165, "CallHandlerInfoMap"),
- 0x08ff9: (166, "FunctionTemplateInfoMap"),
- 0x09021: (167, "ObjectTemplateInfoMap"),
- 0x09049: (169, "TypeSwitchInfoMap"),
- 0x09071: (173, "CodeCacheMap"),
- 0x09099: (175, "TypeFeedbackInfoMap"),
- 0x090c1: (176, "AliasedArgumentsEntryMap"),
- 0x090e9: (178, "DebugInfoMap"),
- 0x09111: (179, "BreakPointInfoMap"),
+ 0x080ad: (129, "MetaMap"),
+ 0x080d9: (131, "NullMap"),
+ 0x08105: (180, "FixedArrayMap"),
+ 0x08131: (4, "OneByteInternalizedStringMap"),
+ 0x0815d: (183, "WeakCellMap"),
+ 0x08189: (131, "UndefinedMap"),
+ 0x081b5: (134, "HeapNumberMap"),
+ 0x081e1: (138, "FreeSpaceMap"),
+ 0x0820d: (158, "OnePointerFillerMap"),
+ 0x08239: (158, "TwoPointerFillerMap"),
+ 0x08265: (131, "TheHoleMap"),
+ 0x08291: (131, "BooleanMap"),
+ 0x082bd: (131, "UninitializedMap"),
+ 0x082e9: (131, "ExceptionMap"),
+ 0x08315: (132, "CellMap"),
+ 0x08341: (133, "GlobalPropertyCellMap"),
+ 0x0836d: (182, "SharedFunctionInfoMap"),
+ 0x08399: (135, "MutableHeapNumberMap"),
+ 0x083c5: (180, "NativeContextMap"),
+ 0x083f1: (130, "CodeMap"),
+ 0x0841d: (180, "ScopeInfoMap"),
+ 0x08449: (180, "FixedCOWArrayMap"),
+ 0x08475: (157, "FixedDoubleArrayMap"),
+ 0x084a1: (181, "ConstantPoolArrayMap"),
+ 0x084cd: (131, "NoInterceptorResultSentinelMap"),
+ 0x084f9: (180, "HashTableMap"),
+ 0x08525: (180, "OrderedHashTableMap"),
+ 0x08551: (131, "ArgumentsMarkerMap"),
+ 0x0857d: (131, "TerminationExceptionMap"),
+ 0x085a9: (128, "SymbolMap"),
+ 0x085d5: (64, "StringMap"),
+ 0x08601: (68, "OneByteStringMap"),
+ 0x0862d: (65, "ConsStringMap"),
+ 0x08659: (69, "ConsOneByteStringMap"),
+ 0x08685: (67, "SlicedStringMap"),
+ 0x086b1: (71, "SlicedOneByteStringMap"),
+ 0x086dd: (66, "ExternalStringMap"),
+ 0x08709: (74, "ExternalStringWithOneByteDataMap"),
+ 0x08735: (70, "ExternalOneByteStringMap"),
+ 0x08761: (70, "NativeSourceStringMap"),
+ 0x0878d: (82, "ShortExternalStringMap"),
+ 0x087b9: (90, "ShortExternalStringWithOneByteDataMap"),
+ 0x087e5: (0, "InternalizedStringMap"),
+ 0x08811: (2, "ExternalInternalizedStringMap"),
+ 0x0883d: (10, "ExternalInternalizedStringWithOneByteDataMap"),
+ 0x08869: (6, "ExternalOneByteInternalizedStringMap"),
+ 0x08895: (18, "ShortExternalInternalizedStringMap"),
+ 0x088c1: (26, "ShortExternalInternalizedStringWithOneByteDataMap"),
+ 0x088ed: (22, "ShortExternalOneByteInternalizedStringMap"),
+ 0x08919: (86, "ShortExternalOneByteStringMap"),
+ 0x08945: (139, "ExternalInt8ArrayMap"),
+ 0x08971: (140, "ExternalUint8ArrayMap"),
+ 0x0899d: (141, "ExternalInt16ArrayMap"),
+ 0x089c9: (142, "ExternalUint16ArrayMap"),
+ 0x089f5: (143, "ExternalInt32ArrayMap"),
+ 0x08a21: (144, "ExternalUint32ArrayMap"),
+ 0x08a4d: (145, "ExternalFloat32ArrayMap"),
+ 0x08a79: (146, "ExternalFloat64ArrayMap"),
+ 0x08aa5: (147, "ExternalUint8ClampedArrayMap"),
+ 0x08ad1: (149, "FixedUint8ArrayMap"),
+ 0x08afd: (148, "FixedInt8ArrayMap"),
+ 0x08b29: (151, "FixedUint16ArrayMap"),
+ 0x08b55: (150, "FixedInt16ArrayMap"),
+ 0x08b81: (153, "FixedUint32ArrayMap"),
+ 0x08bad: (152, "FixedInt32ArrayMap"),
+ 0x08bd9: (154, "FixedFloat32ArrayMap"),
+ 0x08c05: (155, "FixedFloat64ArrayMap"),
+ 0x08c31: (156, "FixedUint8ClampedArrayMap"),
+ 0x08c5d: (180, "SloppyArgumentsElementsMap"),
+ 0x08c89: (180, "FunctionContextMap"),
+ 0x08cb5: (180, "CatchContextMap"),
+ 0x08ce1: (180, "WithContextMap"),
+ 0x08d0d: (180, "BlockContextMap"),
+ 0x08d39: (180, "ModuleContextMap"),
+ 0x08d65: (180, "ScriptContextMap"),
+ 0x08d91: (180, "ScriptContextTableMap"),
+ 0x08dbd: (187, "JSMessageObjectMap"),
+ 0x08de9: (136, "ForeignMap"),
+ 0x08e15: (189, "NeanderMap"),
+ 0x08e41: (189, "ExternalMap"),
+ 0x08e6d: (170, "AllocationSiteMap"),
+ 0x08e99: (171, "AllocationMementoMap"),
+ 0x08ec5: (174, "PolymorphicCodeCacheMap"),
+ 0x08ef1: (172, "ScriptMap"),
+ 0x0907d: (177, "BoxMap"),
+ 0x090a9: (161, "ExecutableAccessorInfoMap"),
+ 0x090d5: (162, "AccessorPairMap"),
+ 0x09101: (163, "AccessCheckInfoMap"),
+ 0x0912d: (164, "InterceptorInfoMap"),
+ 0x09159: (165, "CallHandlerInfoMap"),
+ 0x09185: (166, "FunctionTemplateInfoMap"),
+ 0x091b1: (167, "ObjectTemplateInfoMap"),
+ 0x091dd: (169, "TypeSwitchInfoMap"),
+ 0x09209: (173, "CodeCacheMap"),
+ 0x09235: (175, "TypeFeedbackInfoMap"),
+ 0x09261: (176, "AliasedArgumentsEntryMap"),
+ 0x0928d: (178, "DebugInfoMap"),
+ 0x092b9: (179, "BreakPointInfoMap"),
}
# List of known V8 objects.
KNOWN_OBJECTS = {
- ("OLD_POINTER_SPACE", 0x08081): "NullValue",
- ("OLD_POINTER_SPACE", 0x08091): "UndefinedValue",
- ("OLD_POINTER_SPACE", 0x080a1): "TheHoleValue",
- ("OLD_POINTER_SPACE", 0x080b1): "TrueValue",
- ("OLD_POINTER_SPACE", 0x080c1): "FalseValue",
- ("OLD_POINTER_SPACE", 0x080d1): "UninitializedValue",
- ("OLD_POINTER_SPACE", 0x080e1): "Exception",
- ("OLD_POINTER_SPACE", 0x080f1): "NoInterceptorResultSentinel",
- ("OLD_POINTER_SPACE", 0x08101): "ArgumentsMarker",
- ("OLD_POINTER_SPACE", 0x08111): "NumberStringCache",
- ("OLD_POINTER_SPACE", 0x08919): "SingleCharacterStringCache",
- ("OLD_POINTER_SPACE", 0x08d21): "StringSplitCache",
- ("OLD_POINTER_SPACE", 0x09129): "RegExpMultipleCache",
- ("OLD_POINTER_SPACE", 0x09531): "TerminationException",
- ("OLD_POINTER_SPACE", 0x09541): "MessageListeners",
- ("OLD_POINTER_SPACE", 0x0955d): "CodeStubs",
- ("OLD_POINTER_SPACE", 0x0f555): "NonMonomorphicCache",
- ("OLD_POINTER_SPACE", 0x0fb69): "PolymorphicCodeCache",
- ("OLD_POINTER_SPACE", 0x0fb71): "NativesSourceCache",
- ("OLD_POINTER_SPACE", 0x0fbe1): "EmptyScript",
- ("OLD_POINTER_SPACE", 0x0fc1d): "IntrinsicFunctionNames",
- ("OLD_POINTER_SPACE", 0x15c39): "ObservationState",
- ("OLD_POINTER_SPACE", 0x15c45): "SymbolRegistry",
- ("OLD_POINTER_SPACE", 0x16601): "EmptySlowElementDictionary",
- ("OLD_POINTER_SPACE", 0x1679d): "AllocationSitesScratchpad",
- ("OLD_POINTER_SPACE", 0x43e61): "StringTable",
- ("OLD_DATA_SPACE", 0x08081): "EmptyDescriptorArray",
- ("OLD_DATA_SPACE", 0x08089): "EmptyFixedArray",
- ("OLD_DATA_SPACE", 0x080a9): "NanValue",
- ("OLD_DATA_SPACE", 0x08159): "EmptyByteArray",
- ("OLD_DATA_SPACE", 0x08161): "EmptyConstantPoolArray",
- ("OLD_DATA_SPACE", 0x08241): "EmptyExternalInt8Array",
- ("OLD_DATA_SPACE", 0x0824d): "EmptyExternalUint8Array",
- ("OLD_DATA_SPACE", 0x08259): "EmptyExternalInt16Array",
- ("OLD_DATA_SPACE", 0x08265): "EmptyExternalUint16Array",
- ("OLD_DATA_SPACE", 0x08271): "EmptyExternalInt32Array",
- ("OLD_DATA_SPACE", 0x0827d): "EmptyExternalUint32Array",
- ("OLD_DATA_SPACE", 0x08289): "EmptyExternalFloat32Array",
- ("OLD_DATA_SPACE", 0x08295): "EmptyExternalFloat64Array",
- ("OLD_DATA_SPACE", 0x082a1): "EmptyExternalUint8ClampedArray",
- ("OLD_DATA_SPACE", 0x082ad): "EmptyFixedUint8Array",
- ("OLD_DATA_SPACE", 0x082b5): "EmptyFixedInt8Array",
- ("OLD_DATA_SPACE", 0x082bd): "EmptyFixedUint16Array",
- ("OLD_DATA_SPACE", 0x082c5): "EmptyFixedInt16Array",
- ("OLD_DATA_SPACE", 0x082cd): "EmptyFixedUint32Array",
- ("OLD_DATA_SPACE", 0x082d5): "EmptyFixedInt32Array",
- ("OLD_DATA_SPACE", 0x082dd): "EmptyFixedFloat32Array",
- ("OLD_DATA_SPACE", 0x082e5): "EmptyFixedFloat64Array",
- ("OLD_DATA_SPACE", 0x082ed): "EmptyFixedUint8ClampedArray",
- ("OLD_DATA_SPACE", 0x082f5): "InfinityValue",
- ("OLD_DATA_SPACE", 0x08301): "MinusZeroValue",
- ("CODE_SPACE", 0x15fa1): "JsEntryCode",
- ("CODE_SPACE", 0x243c1): "JsConstructEntryCode",
+ ("OLD_SPACE", 0x08081): "NullValue",
+ ("OLD_SPACE", 0x08091): "EmptyDescriptorArray",
+ ("OLD_SPACE", 0x08099): "EmptyFixedArray",
+ ("OLD_SPACE", 0x080bd): "UndefinedValue",
+ ("OLD_SPACE", 0x080e5): "NanValue",
+ ("OLD_SPACE", 0x080f1): "TheHoleValue",
+ ("OLD_SPACE", 0x08111): "TrueValue",
+ ("OLD_SPACE", 0x08131): "FalseValue",
+ ("OLD_SPACE", 0x08155): "UninitializedValue",
+ ("OLD_SPACE", 0x08181): "Exception",
+ ("OLD_SPACE", 0x081a9): "NoInterceptorResultSentinel",
+ ("OLD_SPACE", 0x081e5): "EmptyByteArray",
+ ("OLD_SPACE", 0x081ed): "EmptyConstantPoolArray",
+ ("OLD_SPACE", 0x081fd): "ArgumentsMarker",
+ ("OLD_SPACE", 0x08229): "NumberStringCache",
+ ("OLD_SPACE", 0x08a31): "SingleCharacterStringCache",
+ ("OLD_SPACE", 0x08ec9): "StringSplitCache",
+ ("OLD_SPACE", 0x092d1): "RegExpMultipleCache",
+ ("OLD_SPACE", 0x096d9): "TerminationException",
+ ("OLD_SPACE", 0x0970d): "EmptyExternalInt8Array",
+ ("OLD_SPACE", 0x09719): "EmptyExternalUint8Array",
+ ("OLD_SPACE", 0x09725): "EmptyExternalInt16Array",
+ ("OLD_SPACE", 0x09731): "EmptyExternalUint16Array",
+ ("OLD_SPACE", 0x0973d): "EmptyExternalInt32Array",
+ ("OLD_SPACE", 0x09749): "EmptyExternalUint32Array",
+ ("OLD_SPACE", 0x09755): "EmptyExternalFloat32Array",
+ ("OLD_SPACE", 0x09761): "EmptyExternalFloat64Array",
+ ("OLD_SPACE", 0x0976d): "EmptyExternalUint8ClampedArray",
+ ("OLD_SPACE", 0x09779): "EmptyFixedUint8Array",
+ ("OLD_SPACE", 0x09781): "EmptyFixedInt8Array",
+ ("OLD_SPACE", 0x09789): "EmptyFixedUint16Array",
+ ("OLD_SPACE", 0x09791): "EmptyFixedInt16Array",
+ ("OLD_SPACE", 0x09799): "EmptyFixedUint32Array",
+ ("OLD_SPACE", 0x097a1): "EmptyFixedInt32Array",
+ ("OLD_SPACE", 0x097a9): "EmptyFixedFloat32Array",
+ ("OLD_SPACE", 0x097b1): "EmptyFixedFloat64Array",
+ ("OLD_SPACE", 0x097b9): "EmptyFixedUint8ClampedArray",
+ ("OLD_SPACE", 0x097c1): "InfinityValue",
+ ("OLD_SPACE", 0x097cd): "MinusZeroValue",
+ ("OLD_SPACE", 0x097d9): "MessageListeners",
+ ("OLD_SPACE", 0x097f5): "CodeStubs",
+ ("OLD_SPACE", 0x12c49): "NonMonomorphicCache",
+ ("OLD_SPACE", 0x132bd): "PolymorphicCodeCache",
+ ("OLD_SPACE", 0x132c5): "NativesSourceCache",
+ ("OLD_SPACE", 0x1353d): "EmptyScript",
+ ("OLD_SPACE", 0x13585): "IntrinsicFunctionNames",
+ ("OLD_SPACE", 0x27ae5): "ObservationState",
+ ("OLD_SPACE", 0x27af1): "SymbolRegistry",
+ ("OLD_SPACE", 0x2863d): "EmptySlowElementDictionary",
+ ("OLD_SPACE", 0x287d9): "AllocationSitesScratchpad",
+ ("OLD_SPACE", 0x28be1): "WeakObjectToCodeTable",
+ ("OLD_SPACE", 0x4e9dd): "StringTable",
+ ("CODE_SPACE", 0x15f61): "JsEntryCode",
+ ("CODE_SPACE", 0x24781): "JsConstructEntryCode",
}