Revert "Merge old data and pointer space."
authorhpayer <hpayer@chromium.org>
Thu, 19 Mar 2015 22:03:18 +0000 (15:03 -0700)
committerCommit bot <commit-bot@chromium.org>
Thu, 19 Mar 2015 22:03:32 +0000 (22:03 +0000)
TBR=verwaest@chromium.org,ulan@chromium.org,ishell@chromium.org
NOTRY=true

Review URL: https://codereview.chromium.org/1027463002

Cr-Commit-Position: refs/heads/master@{#27323}

62 files changed:
include/v8.h
src/api.cc
src/arm/lithium-codegen-arm.cc
src/arm/macro-assembler-arm.cc
src/arm/macro-assembler-arm.h
src/arm64/lithium-codegen-arm64.cc
src/arm64/macro-assembler-arm64.h
src/assembler.cc
src/assembler.h
src/counters.h
src/extensions/statistics-extension.cc
src/factory.cc
src/flag-definitions.h
src/globals.h
src/heap/heap-inl.h
src/heap/heap.cc
src/heap/heap.h
src/heap/incremental-marking.cc
src/heap/mark-compact-inl.h
src/heap/mark-compact.cc
src/heap/mark-compact.h
src/heap/spaces-inl.h
src/heap/spaces.cc
src/heap/spaces.h
src/heap/store-buffer-inl.h
src/heap/store-buffer.cc
src/hydrogen-instructions.cc
src/hydrogen-instructions.h
src/ia32/lithium-codegen-ia32.cc
src/ia32/macro-assembler-ia32.cc
src/ia32/macro-assembler-ia32.h
src/macro-assembler.h
src/mips/lithium-codegen-mips.cc
src/mips/macro-assembler-mips.cc
src/mips/macro-assembler-mips.h
src/mips64/lithium-codegen-mips64.cc
src/mips64/macro-assembler-mips64.h
src/objects.h
src/ppc/lithium-codegen-ppc.cc
src/ppc/macro-assembler-ppc.cc
src/ppc/macro-assembler-ppc.h
src/serialize.cc
src/x64/lithium-codegen-x64.cc
src/x64/macro-assembler-x64.cc
src/x64/macro-assembler-x64.h
src/x87/lithium-codegen-x87.cc
src/x87/macro-assembler-x87.cc
src/x87/macro-assembler-x87.h
test/cctest/test-alloc.cc
test/cctest/test-api.cc
test/cctest/test-constantpool.cc
test/cctest/test-dictionary.cc
test/cctest/test-heap.cc
test/cctest/test-mark-compact.cc
test/cctest/test-serialize.cc
test/cctest/test-spaces.cc
test/cctest/test-unboxed-doubles.cc
test/cctest/test-weakmaps.cc
test/cctest/test-weaksets.cc
tools/grokdump.py
tools/oom_dump/oom_dump.cc
tools/v8heapconst.py

index 7f76c16d2e48a95ee93416713ab156dbac4f5d86..32d4392a3598da2033f7d9e3efc1bfa6e7539e43 100644 (file)
@@ -4664,15 +4664,16 @@ typedef void (*AddHistogramSampleCallback)(void* histogram, int sample);
 // --- Memory Allocation Callback ---
 enum ObjectSpace {
   kObjectSpaceNewSpace = 1 << 0,
-  kObjectSpaceOldSpace = 1 << 1,
-  kObjectSpaceCodeSpace = 1 << 2,
-  kObjectSpaceMapSpace = 1 << 3,
-  kObjectSpaceCellSpace = 1 << 4,
-  kObjectSpacePropertyCellSpace = 1 << 5,
-  kObjectSpaceLoSpace = 1 << 6,
-  kObjectSpaceAll = kObjectSpaceNewSpace | kObjectSpaceOldSpace |
-                    kObjectSpaceCodeSpace | kObjectSpaceMapSpace |
-                    kObjectSpaceLoSpace
+  kObjectSpaceOldPointerSpace = 1 << 1,
+  kObjectSpaceOldDataSpace = 1 << 2,
+  kObjectSpaceCodeSpace = 1 << 3,
+  kObjectSpaceMapSpace = 1 << 4,
+  kObjectSpaceCellSpace = 1 << 5,
+  kObjectSpacePropertyCellSpace = 1 << 6,
+  kObjectSpaceLoSpace = 1 << 7,
+  kObjectSpaceAll = kObjectSpaceNewSpace | kObjectSpaceOldPointerSpace |
+                    kObjectSpaceOldDataSpace | kObjectSpaceCodeSpace |
+                    kObjectSpaceMapSpace | kObjectSpaceLoSpace
 };
 
   enum AllocationAction {
index 5d01809da05bd345791efcad57edf941d95af3c2..2d2bd1f23b8b11d80b6c1bc190a856b43b15ac99 100644 (file)
@@ -215,10 +215,14 @@ void i::V8::FatalProcessOutOfMemory(const char* location, bool take_snapshot) {
   heap_stats.new_space_size = &new_space_size;
   int new_space_capacity;
   heap_stats.new_space_capacity = &new_space_capacity;
-  intptr_t old_space_size;
-  heap_stats.old_space_size = &old_space_size;
-  intptr_t old_space_capacity;
-  heap_stats.old_space_capacity = &old_space_capacity;
+  intptr_t old_pointer_space_size;
+  heap_stats.old_pointer_space_size = &old_pointer_space_size;
+  intptr_t old_pointer_space_capacity;
+  heap_stats.old_pointer_space_capacity = &old_pointer_space_capacity;
+  intptr_t old_data_space_size;
+  heap_stats.old_data_space_size = &old_data_space_size;
+  intptr_t old_data_space_capacity;
+  heap_stats.old_data_space_capacity = &old_data_space_capacity;
   intptr_t code_space_size;
   heap_stats.code_space_size = &code_space_size;
   intptr_t code_space_capacity;
index a5a3f9b04e23100b6a6393acac08c983e431f5ba..f2d50575731e08fc7a576963cbda5a03c272fb45 100644 (file)
@@ -5378,9 +5378,13 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
   if (instr->hydrogen()->MustAllocateDoubleAligned()) {
     flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
   }
-  if (instr->hydrogen()->IsOldSpaceAllocation()) {
+  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
+    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = static_cast<AllocationFlags>(flags | PRETENURE);
+    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
+  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
+    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
   }
 
   if (instr->size()->IsConstantOperand()) {
@@ -5442,9 +5446,13 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
 
   int flags = AllocateDoubleAlignFlag::encode(
       instr->hydrogen()->MustAllocateDoubleAligned());
-  if (instr->hydrogen()->IsOldSpaceAllocation()) {
+  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
+    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
+    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
+    flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
+  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = AllocateTargetSpace::update(flags, OLD_SPACE);
+    flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
   } else {
     flags = AllocateTargetSpace::update(flags, NEW_SPACE);
   }
index 0cfdeafc5805fd6b9fdf27845661ff1e87642188..9572a6f913294b284edfbc85e250c6eb32f046fc 100644 (file)
@@ -1692,11 +1692,12 @@ void MacroAssembler::Allocate(int object_size,
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
     // Align the next allocation. Storing the filler map without checking top is
     // safe in new-space because the limit of the heap is aligned there.
+    DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     STATIC_ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
     and_(scratch2, result, Operand(kDoubleAlignmentMask), SetCC);
     Label aligned;
     b(eq, &aligned);
-    if ((flags & PRETENURE) != 0) {
+    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
       cmp(result, Operand(ip));
       b(hs, gc_required);
     }
@@ -1805,11 +1806,12 @@ void MacroAssembler::Allocate(Register object_size,
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
     // Align the next allocation. Storing the filler map without checking top is
     // safe in new-space because the limit of the heap is aligned there.
+    DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
     and_(scratch2, result, Operand(kDoubleAlignmentMask), SetCC);
     Label aligned;
     b(eq, &aligned);
-    if ((flags & PRETENURE) != 0) {
+    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
       cmp(result, Operand(ip));
       b(hs, gc_required);
     }
index 16c1b803842b864bdcbb3f84371130c164adc589..73bc072dea6e3c4c6e4113b506054d3b746c9612 100644 (file)
@@ -709,7 +709,7 @@ class MacroAssembler: public Assembler {
   // ---------------------------------------------------------------------------
   // Allocation support
 
-  // Allocate an object in new space or old space. The object_size is
+  // Allocate an object in new space or old pointer space. The object_size is
   // specified either in bytes or in words if the allocation flag SIZE_IN_WORDS
   // is passed. If the space is exhausted control continues at the gc_required
   // label. The allocated object is returned in result. If the flag
index 98732683989d2906775a25e389566108b8291803..3f7644962907b33b4a1f341b146e9cad3398e932 100644 (file)
@@ -1580,9 +1580,13 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
     flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
   }
 
-  if (instr->hydrogen()->IsOldSpaceAllocation()) {
+  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
+    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = static_cast<AllocationFlags>(flags | PRETENURE);
+    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
+  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
+    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
   }
 
   if (instr->size()->IsConstantOperand()) {
@@ -1637,9 +1641,13 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
   }
   int flags = AllocateDoubleAlignFlag::encode(
       instr->hydrogen()->MustAllocateDoubleAligned());
-  if (instr->hydrogen()->IsOldSpaceAllocation()) {
+  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
+    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
+    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
+    flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
+  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = AllocateTargetSpace::update(flags, OLD_SPACE);
+    flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
   } else {
     flags = AllocateTargetSpace::update(flags, NEW_SPACE);
   }
index 11361029bf86426b50654344e34ad128534f67d8..1b44788f18760a15b2663c9fe887e121253f9cfd 100644 (file)
@@ -1284,7 +1284,7 @@ class MacroAssembler : public Assembler {
   // ---------------------------------------------------------------------------
   // Allocation support
 
-  // Allocate an object in new space or old space. The object_size is
+  // Allocate an object in new space or old pointer space. The object_size is
   // specified either in bytes or in words if the allocation flag SIZE_IN_WORDS
   // is passed. The allocated object is returned in result.
   //
index d3f541a33f10db64ddfd389066f5a9b7503b7524..b6e52aebdf28c032d039e7046755c8ca0886da43 100644 (file)
@@ -1216,15 +1216,30 @@ ExternalReference ExternalReference::new_space_allocation_limit_address(
 }
 
 
-ExternalReference ExternalReference::old_space_allocation_top_address(
+ExternalReference ExternalReference::old_pointer_space_allocation_top_address(
     Isolate* isolate) {
-  return ExternalReference(isolate->heap()->OldSpaceAllocationTopAddress());
+  return ExternalReference(
+      isolate->heap()->OldPointerSpaceAllocationTopAddress());
+}
+
+
+ExternalReference ExternalReference::old_pointer_space_allocation_limit_address(
+    Isolate* isolate) {
+  return ExternalReference(
+      isolate->heap()->OldPointerSpaceAllocationLimitAddress());
 }
 
 
-ExternalReference ExternalReference::old_space_allocation_limit_address(
+ExternalReference ExternalReference::old_data_space_allocation_top_address(
     Isolate* isolate) {
-  return ExternalReference(isolate->heap()->OldSpaceAllocationLimitAddress());
+  return ExternalReference(isolate->heap()->OldDataSpaceAllocationTopAddress());
+}
+
+
+ExternalReference ExternalReference::old_data_space_allocation_limit_address(
+    Isolate* isolate) {
+  return ExternalReference(
+      isolate->heap()->OldDataSpaceAllocationLimitAddress());
 }
 
 
index 691cae510ea53f5191d164d03765388922008878..ab37cd9dab871dc6dbc966cb0dee04fbdef8626a 100644 (file)
@@ -939,8 +939,14 @@ class ExternalReference BASE_EMBEDDED {
   // Used for fast allocation in generated code.
   static ExternalReference new_space_allocation_top_address(Isolate* isolate);
   static ExternalReference new_space_allocation_limit_address(Isolate* isolate);
-  static ExternalReference old_space_allocation_top_address(Isolate* isolate);
-  static ExternalReference old_space_allocation_limit_address(Isolate* isolate);
+  static ExternalReference old_pointer_space_allocation_top_address(
+      Isolate* isolate);
+  static ExternalReference old_pointer_space_allocation_limit_address(
+      Isolate* isolate);
+  static ExternalReference old_data_space_allocation_top_address(
+      Isolate* isolate);
+  static ExternalReference old_data_space_allocation_limit_address(
+      Isolate* isolate);
 
   static ExternalReference mod_two_doubles_operation(Isolate* isolate);
   static ExternalReference power_double_double_function(Isolate* isolate);
index e62c51a9a7b670b4d3b2aabccf7e4af01fd7ee02..e20a49fa3ff5cde490244bbc50d627cb78b40ba8 100644 (file)
@@ -399,7 +399,10 @@ class AggregatedHistogramTimerScope {
 #define HISTOGRAM_PERCENTAGE_LIST(HP)                                          \
   /* Heap fragmentation. */                                                    \
   HP(external_fragmentation_total, V8.MemoryExternalFragmentationTotal)        \
-  HP(external_fragmentation_old_space, V8.MemoryExternalFragmentationOldSpace) \
+  HP(external_fragmentation_old_pointer_space,                                 \
+     V8.MemoryExternalFragmentationOldPointerSpace)                            \
+  HP(external_fragmentation_old_data_space,                                    \
+     V8.MemoryExternalFragmentationOldDataSpace)                               \
   HP(external_fragmentation_code_space,                                        \
      V8.MemoryExternalFragmentationCodeSpace)                                  \
   HP(external_fragmentation_map_space, V8.MemoryExternalFragmentationMapSpace) \
@@ -408,7 +411,8 @@ class AggregatedHistogramTimerScope {
   HP(external_fragmentation_lo_space, V8.MemoryExternalFragmentationLoSpace)   \
   /* Percentages of heap committed to each space. */                           \
   HP(heap_fraction_new_space, V8.MemoryHeapFractionNewSpace)                   \
-  HP(heap_fraction_old_space, V8.MemoryHeapFractionOldSpace)                   \
+  HP(heap_fraction_old_pointer_space, V8.MemoryHeapFractionOldPointerSpace)    \
+  HP(heap_fraction_old_data_space, V8.MemoryHeapFractionOldDataSpace)          \
   HP(heap_fraction_code_space, V8.MemoryHeapFractionCodeSpace)                 \
   HP(heap_fraction_map_space, V8.MemoryHeapFractionMapSpace)                   \
   HP(heap_fraction_cell_space, V8.MemoryHeapFractionCellSpace)                 \
@@ -585,9 +589,14 @@ class AggregatedHistogramTimerScope {
   SC(new_space_bytes_available, V8.MemoryNewSpaceBytesAvailable)               \
   SC(new_space_bytes_committed, V8.MemoryNewSpaceBytesCommitted)               \
   SC(new_space_bytes_used, V8.MemoryNewSpaceBytesUsed)                         \
-  SC(old_space_bytes_available, V8.MemoryOldSpaceBytesAvailable)               \
-  SC(old_space_bytes_committed, V8.MemoryOldSpaceBytesCommitted)               \
-  SC(old_space_bytes_used, V8.MemoryOldSpaceBytesUsed)                         \
+  SC(old_pointer_space_bytes_available,                                        \
+     V8.MemoryOldPointerSpaceBytesAvailable)                                   \
+  SC(old_pointer_space_bytes_committed,                                        \
+     V8.MemoryOldPointerSpaceBytesCommitted)                                   \
+  SC(old_pointer_space_bytes_used, V8.MemoryOldPointerSpaceBytesUsed)          \
+  SC(old_data_space_bytes_available, V8.MemoryOldDataSpaceBytesAvailable)      \
+  SC(old_data_space_bytes_committed, V8.MemoryOldDataSpaceBytesCommitted)      \
+  SC(old_data_space_bytes_used, V8.MemoryOldDataSpaceBytesUsed)                \
   SC(code_space_bytes_available, V8.MemoryCodeSpaceBytesAvailable)             \
   SC(code_space_bytes_committed, V8.MemoryCodeSpaceBytesCommitted)             \
   SC(code_space_bytes_used, V8.MemoryCodeSpaceBytesUsed)                       \
index 0bd9497b851793f99257b9a2a2faffbb8a360521..58f808bbc9786f47a4f49b33e0d315ec02f6044f 100644 (file)
@@ -108,9 +108,15 @@ void StatisticsExtension::GetCounters(
       {heap->new_space()->Size(), "new_space_live_bytes"},
       {heap->new_space()->Available(), "new_space_available_bytes"},
       {heap->new_space()->CommittedMemory(), "new_space_commited_bytes"},
-      {heap->old_space()->Size(), "old_space_live_bytes"},
-      {heap->old_space()->Available(), "old_space_available_bytes"},
-      {heap->old_space()->CommittedMemory(), "old_space_commited_bytes"},
+      {heap->old_pointer_space()->Size(), "old_pointer_space_live_bytes"},
+      {heap->old_pointer_space()->Available(),
+       "old_pointer_space_available_bytes"},
+      {heap->old_pointer_space()->CommittedMemory(),
+       "old_pointer_space_commited_bytes"},
+      {heap->old_data_space()->Size(), "old_data_space_live_bytes"},
+      {heap->old_data_space()->Available(), "old_data_space_available_bytes"},
+      {heap->old_data_space()->CommittedMemory(),
+       "old_data_space_commited_bytes"},
       {heap->code_space()->Size(), "code_space_live_bytes"},
       {heap->code_space()->Available(), "code_space_available_bytes"},
       {heap->code_space()->CommittedMemory(), "code_space_commited_bytes"},
index 146b80b3ee16b621aa814c9ecc6613ab6b16d43c..d999cd5034849f773774b5beff7efad3f892bee5 100644 (file)
@@ -55,7 +55,7 @@ Handle<Oddball> Factory::NewOddball(Handle<Map> map,
                                     const char* to_string,
                                     Handle<Object> to_number,
                                     byte kind) {
-  Handle<Oddball> oddball = New<Oddball>(map, OLD_SPACE);
+  Handle<Oddball> oddball = New<Oddball>(map, OLD_POINTER_SPACE);
   Oddball::Initialize(isolate(), oddball, to_string, to_number, kind);
   return oddball;
 }
@@ -926,7 +926,7 @@ Handle<WeakCell> Factory::NewWeakCell(Handle<HeapObject> value) {
 
 Handle<AllocationSite> Factory::NewAllocationSite() {
   Handle<Map> map = allocation_site_map();
-  Handle<AllocationSite> site = New<AllocationSite>(map, OLD_SPACE);
+  Handle<AllocationSite> site = New<AllocationSite>(map, OLD_POINTER_SPACE);
   site->Initialize();
 
   // Link the site
@@ -1239,7 +1239,7 @@ Handle<JSFunction> Factory::NewFunction(Handle<Map> map,
                                         Handle<SharedFunctionInfo> info,
                                         Handle<Context> context,
                                         PretenureFlag pretenure) {
-  AllocationSpace space = pretenure == TENURED ? OLD_SPACE : NEW_SPACE;
+  AllocationSpace space = pretenure == TENURED ? OLD_POINTER_SPACE : NEW_SPACE;
   Handle<JSFunction> result = New<JSFunction>(map, space);
   InitializeFunction(result, info, context);
   return result;
@@ -1574,7 +1574,7 @@ Handle<GlobalObject> Factory::NewGlobalObject(Handle<JSFunction> constructor) {
   }
 
   // Allocate the global object and initialize it with the backing store.
-  Handle<GlobalObject> global = New<GlobalObject>(map, OLD_SPACE);
+  Handle<GlobalObject> global = New<GlobalObject>(map, OLD_POINTER_SPACE);
   isolate()->heap()->InitializeJSObjectFromMap(*global, *dictionary, *map);
 
   // Create a new map for the global object.
@@ -2062,7 +2062,8 @@ Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfo(
     Handle<String> name,
     MaybeHandle<Code> maybe_code) {
   Handle<Map> map = shared_function_info_map();
-  Handle<SharedFunctionInfo> share = New<SharedFunctionInfo>(map, OLD_SPACE);
+  Handle<SharedFunctionInfo> share =
+      New<SharedFunctionInfo>(map, OLD_POINTER_SPACE);
 
   // Set pointer fields.
   share->set_name(*name);
index c9ac7b57b70d54c5733b22da512eb653efbd5fb5..e8ae81e0fefba3f7cd172557bc3608c66e72da85 100644 (file)
@@ -596,7 +596,8 @@ DEFINE_BOOL(print_max_heap_committed, false,
             "in name=value format on exit")
 DEFINE_BOOL(trace_gc_verbose, false,
             "print more details following each garbage collection")
-DEFINE_BOOL(trace_fragmentation, false, "report fragmentation")
+DEFINE_BOOL(trace_fragmentation, false,
+            "report fragmentation for old pointer and data pages")
 DEFINE_BOOL(collect_maps, true,
             "garbage collect maps from which no objects can be reached")
 DEFINE_BOOL(weak_embedded_maps_in_optimized_code, true,
index ce047aa52086b3c535f8394d926ed29badddf487..e93fa3b07ead3f33d9453a507dd5a7f221595f3c 100644 (file)
@@ -408,16 +408,17 @@ typedef bool (*WeakSlotCallbackWithHeap)(Heap* heap, Object** pointer);
 // consecutive.
 // Keep this enum in sync with the ObjectSpace enum in v8.h
 enum AllocationSpace {
-  NEW_SPACE,   // Semispaces collected with copying collector.
-  OLD_SPACE,   // May contain pointers to new space.
-  CODE_SPACE,  // No pointers to new space, marked executable.
-  MAP_SPACE,   // Only and all map objects.
-  CELL_SPACE,  // Only and all cell objects.
-  LO_SPACE,    // Promoted large objects.
+  NEW_SPACE,          // Semispaces collected with copying collector.
+  OLD_POINTER_SPACE,  // May contain pointers to new space.
+  OLD_DATA_SPACE,     // Must not have pointers to new space.
+  CODE_SPACE,         // No pointers to new space, marked executable.
+  MAP_SPACE,          // Only and all map objects.
+  CELL_SPACE,         // Only and all cell objects.
+  LO_SPACE,           // Promoted large objects.
 
   FIRST_SPACE = NEW_SPACE,
   LAST_SPACE = LO_SPACE,
-  FIRST_PAGED_SPACE = OLD_SPACE,
+  FIRST_PAGED_SPACE = OLD_POINTER_SPACE,
   LAST_PAGED_SPACE = CELL_SPACE
 };
 const int kSpaceTagSize = 3;
index bfb5498751f29cf3bd94d785473a64e598c10345..7e869f263a1be6e6f0ead788bf2ee42f4a3bccc0 100644 (file)
@@ -81,12 +81,12 @@ AllocationResult Heap::AllocateOneByteInternalizedString(
   // Compute map and object size.
   Map* map = one_byte_internalized_string_map();
   int size = SeqOneByteString::SizeFor(str.length());
-  AllocationSpace space = SelectSpace(size, TENURED);
+  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
 
   // Allocate string.
   HeapObject* result;
   {
-    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
+    AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
 
@@ -113,12 +113,12 @@ AllocationResult Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str,
   // Compute map and object size.
   Map* map = internalized_string_map();
   int size = SeqTwoByteString::SizeFor(str.length());
-  AllocationSpace space = SelectSpace(size, TENURED);
+  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
 
   // Allocate string.
   HeapObject* result;
   {
-    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
+    AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
 
@@ -183,8 +183,10 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
     }
   }
 
-  if (OLD_SPACE == space) {
-    allocation = old_space_->AllocateRaw(size_in_bytes);
+  if (OLD_POINTER_SPACE == space) {
+    allocation = old_pointer_space_->AllocateRaw(size_in_bytes);
+  } else if (OLD_DATA_SPACE == space) {
+    allocation = old_data_space_->AllocateRaw(size_in_bytes);
   } else if (CODE_SPACE == space) {
     if (size_in_bytes <= code_space()->AreaSize()) {
       allocation = code_space_->AllocateRaw(size_in_bytes);
@@ -325,11 +327,23 @@ bool Heap::InToSpace(Object* object) {
 }
 
 
-bool Heap::InOldSpace(Address address) { return old_space_->Contains(address); }
+bool Heap::InOldPointerSpace(Address address) {
+  return old_pointer_space_->Contains(address);
+}
 
 
-bool Heap::InOldSpace(Object* object) {
-  return InOldSpace(reinterpret_cast<Address>(object));
+bool Heap::InOldPointerSpace(Object* object) {
+  return InOldPointerSpace(reinterpret_cast<Address>(object));
+}
+
+
+bool Heap::InOldDataSpace(Address address) {
+  return old_data_space_->Contains(address);
+}
+
+
+bool Heap::InOldDataSpace(Object* object) {
+  return InOldDataSpace(reinterpret_cast<Address>(object));
 }
 
 
@@ -361,16 +375,52 @@ void Heap::RecordWrites(Address address, int start, int len) {
 }
 
 
+OldSpace* Heap::TargetSpace(HeapObject* object) {
+  InstanceType type = object->map()->instance_type();
+  AllocationSpace space = TargetSpaceId(type);
+  return (space == OLD_POINTER_SPACE) ? old_pointer_space_ : old_data_space_;
+}
+
+
+AllocationSpace Heap::TargetSpaceId(InstanceType type) {
+  // Heap numbers and sequential strings are promoted to old data space, all
+  // other object types are promoted to old pointer space.  We do not use
+  // object->IsHeapNumber() and object->IsSeqString() because we already
+  // know that object has the heap object tag.
+
+  // These objects are never allocated in new space.
+  DCHECK(type != MAP_TYPE);
+  DCHECK(type != CODE_TYPE);
+  DCHECK(type != ODDBALL_TYPE);
+  DCHECK(type != CELL_TYPE);
+
+  if (type <= LAST_NAME_TYPE) {
+    if (type == SYMBOL_TYPE) return OLD_POINTER_SPACE;
+    DCHECK(type < FIRST_NONSTRING_TYPE);
+    // There are four string representations: sequential strings, external
+    // strings, cons strings, and sliced strings.
+    // Only the latter two contain non-map-word pointers to heap objects.
+    return ((type & kIsIndirectStringMask) == kIsIndirectStringTag)
+               ? OLD_POINTER_SPACE
+               : OLD_DATA_SPACE;
+  } else {
+    return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE;
+  }
+}
+
+
 bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) {
   // Object migration is governed by the following rules:
   //
-  // 1) Objects in new-space can be migrated to the old space
+  // 1) Objects in new-space can be migrated to one of the old spaces
   //    that matches their target space or they stay in new-space.
   // 2) Objects in old-space stay in the same space when migrating.
   // 3) Fillers (two or more words) can migrate due to left-trimming of
-  //    fixed arrays in new-space or old space.
+  //    fixed arrays in new-space, old-data-space and old-pointer-space.
   // 4) Fillers (one word) can never migrate, they are skipped by
   //    incremental marking explicitly to prevent invalid pattern.
+  // 5) Short external strings can end up in old pointer space when a cons
+  //    string in old pointer space is made external (String::MakeExternal).
   //
   // Since this function is used for debugging only, we do not place
   // asserts here, but check everything explicitly.
@@ -380,10 +430,12 @@ bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) {
   AllocationSpace src = chunk->owner()->identity();
   switch (src) {
     case NEW_SPACE:
-      return dst == src || dst == OLD_SPACE;
-    case OLD_SPACE:
-      return dst == src &&
-             (dst == OLD_SPACE || obj->IsFiller() || obj->IsExternalString());
+      return dst == src || dst == TargetSpaceId(type);
+    case OLD_POINTER_SPACE:
+      return dst == src && (dst == TargetSpaceId(type) || obj->IsFiller() ||
+                            obj->IsExternalString());
+    case OLD_DATA_SPACE:
+      return dst == src && dst == TargetSpaceId(type);
     case CODE_SPACE:
       return dst == src && type == CODE_TYPE;
     case MAP_SPACE:
index fd17774de2294df78cf5ea1e73a952b9fa3d6813..1b0ab092f04da91374cb07f7d8b46da18aa75e49 100644 (file)
@@ -84,7 +84,8 @@ Heap::Heap()
       global_ic_age_(0),
       scan_on_scavenge_pages_(0),
       new_space_(this),
-      old_space_(NULL),
+      old_pointer_space_(NULL),
+      old_data_space_(NULL),
       code_space_(NULL),
       map_space_(NULL),
       cell_space_(NULL),
@@ -171,16 +172,17 @@ Heap::Heap()
 intptr_t Heap::Capacity() {
   if (!HasBeenSetUp()) return 0;
 
-  return new_space_.Capacity() + old_space_->Capacity() +
-         code_space_->Capacity() + map_space_->Capacity() +
-         cell_space_->Capacity();
+  return new_space_.Capacity() + old_pointer_space_->Capacity() +
+         old_data_space_->Capacity() + code_space_->Capacity() +
+         map_space_->Capacity() + cell_space_->Capacity();
 }
 
 
 intptr_t Heap::CommittedOldGenerationMemory() {
   if (!HasBeenSetUp()) return 0;
 
-  return old_space_->CommittedMemory() + code_space_->CommittedMemory() +
+  return old_pointer_space_->CommittedMemory() +
+         old_data_space_->CommittedMemory() + code_space_->CommittedMemory() +
          map_space_->CommittedMemory() + cell_space_->CommittedMemory() +
          lo_space_->Size();
 }
@@ -197,7 +199,8 @@ size_t Heap::CommittedPhysicalMemory() {
   if (!HasBeenSetUp()) return 0;
 
   return new_space_.CommittedPhysicalMemory() +
-         old_space_->CommittedPhysicalMemory() +
+         old_pointer_space_->CommittedPhysicalMemory() +
+         old_data_space_->CommittedPhysicalMemory() +
          code_space_->CommittedPhysicalMemory() +
          map_space_->CommittedPhysicalMemory() +
          cell_space_->CommittedPhysicalMemory() +
@@ -225,15 +228,16 @@ void Heap::UpdateMaximumCommitted() {
 intptr_t Heap::Available() {
   if (!HasBeenSetUp()) return 0;
 
-  return new_space_.Available() + old_space_->Available() +
-         code_space_->Available() + map_space_->Available() +
-         cell_space_->Available();
+  return new_space_.Available() + old_pointer_space_->Available() +
+         old_data_space_->Available() + code_space_->Available() +
+         map_space_->Available() + cell_space_->Available();
 }
 
 
 bool Heap::HasBeenSetUp() {
-  return old_space_ != NULL && code_space_ != NULL && map_space_ != NULL &&
-         cell_space_ != NULL && lo_space_ != NULL;
+  return old_pointer_space_ != NULL && old_data_space_ != NULL &&
+         code_space_ != NULL && map_space_ != NULL && cell_space_ != NULL &&
+         lo_space_ != NULL;
 }
 
 
@@ -336,13 +340,22 @@ void Heap::PrintShortHeapStatistics() {
            ", committed: %6" V8_PTR_PREFIX "d KB\n",
            new_space_.Size() / KB, new_space_.Available() / KB,
            new_space_.CommittedMemory() / KB);
-  PrintPID("Old space,       used: %6" V8_PTR_PREFIX
+  PrintPID("Old pointers,       used: %6" V8_PTR_PREFIX
            "d KB"
            ", available: %6" V8_PTR_PREFIX
            "d KB"
            ", committed: %6" V8_PTR_PREFIX "d KB\n",
-           old_space_->SizeOfObjects() / KB, old_space_->Available() / KB,
-           old_space_->CommittedMemory() / KB);
+           old_pointer_space_->SizeOfObjects() / KB,
+           old_pointer_space_->Available() / KB,
+           old_pointer_space_->CommittedMemory() / KB);
+  PrintPID("Old data space,     used: %6" V8_PTR_PREFIX
+           "d KB"
+           ", available: %6" V8_PTR_PREFIX
+           "d KB"
+           ", committed: %6" V8_PTR_PREFIX "d KB\n",
+           old_data_space_->SizeOfObjects() / KB,
+           old_data_space_->Available() / KB,
+           old_data_space_->CommittedMemory() / KB);
   PrintPID("Code space,         used: %6" V8_PTR_PREFIX
            "d KB"
            ", available: %6" V8_PTR_PREFIX
@@ -638,8 +651,12 @@ void Heap::GarbageCollectionEpilogue() {
 
     isolate_->counters()->heap_fraction_new_space()->AddSample(static_cast<int>(
         (new_space()->CommittedMemory() * 100.0) / CommittedMemory()));
-    isolate_->counters()->heap_fraction_old_space()->AddSample(static_cast<int>(
-        (old_space()->CommittedMemory() * 100.0) / CommittedMemory()));
+    isolate_->counters()->heap_fraction_old_pointer_space()->AddSample(
+        static_cast<int>((old_pointer_space()->CommittedMemory() * 100.0) /
+                         CommittedMemory()));
+    isolate_->counters()->heap_fraction_old_data_space()->AddSample(
+        static_cast<int>((old_data_space()->CommittedMemory() * 100.0) /
+                         CommittedMemory()));
     isolate_->counters()->heap_fraction_code_space()->AddSample(
         static_cast<int>((code_space()->CommittedMemory() * 100.0) /
                          CommittedMemory()));
@@ -685,7 +702,8 @@ void Heap::GarbageCollectionEpilogue() {
   UPDATE_FRAGMENTATION_FOR_SPACE(space)
 
   UPDATE_COUNTERS_FOR_SPACE(new_space)
-  UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(old_space)
+  UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(old_pointer_space)
+  UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(old_data_space)
   UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(code_space)
   UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(map_space)
   UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(cell_space)
@@ -762,7 +780,7 @@ void Heap::CollectAllGarbage(int flags, const char* gc_reason,
   // not matter, so long as we do not specify NEW_SPACE, which would not
   // cause a full GC.
   mark_compact_collector_.SetFlags(flags);
-  CollectGarbage(OLD_SPACE, gc_reason, gc_callback_flags);
+  CollectGarbage(OLD_POINTER_SPACE, gc_reason, gc_callback_flags);
   mark_compact_collector_.SetFlags(kNoGCFlags);
 }
 
@@ -1345,6 +1363,11 @@ static void VerifyNonPointerSpacePointers(Heap* heap) {
   for (HeapObject* object = code_it.Next(); object != NULL;
        object = code_it.Next())
     object->Iterate(&v);
+
+  HeapObjectIterator data_it(heap->old_data_space());
+  for (HeapObject* object = data_it.Next(); object != NULL;
+       object = data_it.Next())
+    object->Iterate(&v);
 }
 #endif  // VERIFY_HEAP
 
@@ -2126,7 +2149,13 @@ class ScavengingVisitor : public StaticVisitorBase {
     }
 
     AllocationResult allocation;
-    allocation = heap->old_space()->AllocateRaw(allocation_size);
+    if (object_contents == DATA_OBJECT) {
+      DCHECK(heap->AllowedToBeMigrated(object, OLD_DATA_SPACE));
+      allocation = heap->old_data_space()->AllocateRaw(allocation_size);
+    } else {
+      DCHECK(heap->AllowedToBeMigrated(object, OLD_POINTER_SPACE));
+      allocation = heap->old_pointer_space()->AllocateRaw(allocation_size);
+    }
 
     HeapObject* target = NULL;  // Initialization to please compiler.
     if (allocation.To(&target)) {
@@ -2542,14 +2571,14 @@ bool Heap::CreateInitialMaps() {
   set_empty_fixed_array(FixedArray::cast(obj));
 
   {
-    AllocationResult allocation = Allocate(null_map(), OLD_SPACE);
+    AllocationResult allocation = Allocate(null_map(), OLD_POINTER_SPACE);
     if (!allocation.To(&obj)) return false;
   }
   set_null_value(Oddball::cast(obj));
   Oddball::cast(obj)->set_kind(Oddball::kNull);
 
   {
-    AllocationResult allocation = Allocate(undefined_map(), OLD_SPACE);
+    AllocationResult allocation = Allocate(undefined_map(), OLD_POINTER_SPACE);
     if (!allocation.To(&obj)) return false;
   }
   set_undefined_value(Oddball::cast(obj));
@@ -2786,11 +2815,11 @@ AllocationResult Heap::AllocateHeapNumber(double value, MutableMode mode,
   int size = HeapNumber::kSize;
   STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxRegularHeapObjectSize);
 
-  AllocationSpace space = SelectSpace(size, pretenure);
+  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
 
   HeapObject* result;
   {
-    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
+    AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
 
@@ -2821,7 +2850,8 @@ AllocationResult Heap::AllocatePropertyCell() {
   STATIC_ASSERT(PropertyCell::kSize <= Page::kMaxRegularHeapObjectSize);
 
   HeapObject* result;
-  AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE);
+  AllocationResult allocation =
+      AllocateRaw(size, OLD_POINTER_SPACE, OLD_POINTER_SPACE);
   if (!allocation.To(&result)) return allocation;
 
   result->set_map_no_write_barrier(global_property_cell_map());
@@ -2838,7 +2868,8 @@ AllocationResult Heap::AllocateWeakCell(HeapObject* value) {
   STATIC_ASSERT(WeakCell::kSize <= Page::kMaxRegularHeapObjectSize);
   HeapObject* result = NULL;
   {
-    AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE);
+    AllocationResult allocation =
+        AllocateRaw(size, OLD_POINTER_SPACE, OLD_POINTER_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
   result->set_map_no_write_barrier(weak_cell_map());
@@ -3392,7 +3423,7 @@ AllocationResult Heap::AllocateForeign(Address address,
                                        PretenureFlag pretenure) {
   // Statically ensure that it is safe to allocate foreigns in paged spaces.
   STATIC_ASSERT(Foreign::kSize <= Page::kMaxRegularHeapObjectSize);
-  AllocationSpace space = (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE;
+  AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
   Foreign* result;
   AllocationResult allocation = Allocate(foreign_map(), space);
   if (!allocation.To(&result)) return allocation;
@@ -3406,10 +3437,10 @@ AllocationResult Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
     v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true);
   }
   int size = ByteArray::SizeFor(length);
-  AllocationSpace space = SelectSpace(size, pretenure);
+  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
   HeapObject* result;
   {
-    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
+    AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
 
@@ -3440,17 +3471,20 @@ void Heap::CreateFillerObjectAt(Address addr, int size) {
 
 bool Heap::CanMoveObjectStart(HeapObject* object) {
   Address address = object->address();
+  bool is_in_old_pointer_space = InOldPointerSpace(address);
+  bool is_in_old_data_space = InOldDataSpace(address);
 
   if (lo_space()->Contains(object)) return false;
 
   Page* page = Page::FromAddress(address);
   // We can move the object start if:
-  // (1) the object is not in old space,
+  // (1) the object is not in old pointer or old data space,
   // (2) the page of the object was already swept,
   // (3) the page was already concurrently swept. This case is an optimization
   // for concurrent sweeping. The WasSwept predicate for concurrently swept
   // pages is set after sweeping all pages.
-  return !InOldSpace(address) || page->WasSwept() || page->SweepingCompleted();
+  return (!is_in_old_pointer_space && !is_in_old_data_space) ||
+         page->WasSwept() || page->SweepingCompleted();
 }
 
 
@@ -3567,10 +3601,10 @@ AllocationResult Heap::AllocateExternalArray(int length,
                                              void* external_pointer,
                                              PretenureFlag pretenure) {
   int size = ExternalArray::kAlignedSize;
-  AllocationSpace space = SelectSpace(size, pretenure);
+  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
   HeapObject* result;
   {
-    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
+    AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
 
@@ -3613,10 +3647,10 @@ AllocationResult Heap::AllocateFixedTypedArray(int length,
     size += kPointerSize;
   }
 #endif
-  AllocationSpace space = SelectSpace(size, pretenure);
+  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
 
   HeapObject* object;
-  AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
+  AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
   if (!allocation.To(&object)) return allocation;
 
   if (array_type == kExternalFloat64Array) {
@@ -3780,7 +3814,8 @@ AllocationResult Heap::Allocate(Map* map, AllocationSpace space,
   DCHECK(map->instance_type() != MAP_TYPE);
   // If allocation failures are disallowed, we may allocate in a different
   // space when new space is full and the object is not a large object.
-  AllocationSpace retry_space = (space != NEW_SPACE) ? space : OLD_SPACE;
+  AllocationSpace retry_space =
+      (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type());
   int size = map->instance_size();
   if (allocation_site != NULL) {
     size += AllocationMemento::kSize;
@@ -3857,7 +3892,7 @@ AllocationResult Heap::AllocateJSObjectFromMap(
 
   // Allocate the JSObject.
   int size = map->instance_size();
-  AllocationSpace space = SelectSpace(size, pretenure);
+  AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure);
   JSObject* js_obj;
   AllocationResult allocation = Allocate(map, space, allocation_site);
   if (!allocation.To(&js_obj)) return allocation;
@@ -3908,7 +3943,7 @@ AllocationResult Heap::CopyJSObject(JSObject* source, AllocationSite* site) {
   if (always_allocate()) {
     {
       AllocationResult allocation =
-          AllocateRaw(object_size, NEW_SPACE, OLD_SPACE);
+          AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE);
       if (!allocation.To(&clone)) return allocation;
     }
     Address clone_address = clone->address();
@@ -4059,12 +4094,12 @@ AllocationResult Heap::AllocateInternalizedStringImpl(T t, int chars,
     map = internalized_string_map();
     size = SeqTwoByteString::SizeFor(chars);
   }
-  AllocationSpace space = SelectSpace(size, TENURED);
+  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
 
   // Allocate string.
   HeapObject* result;
   {
-    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
+    AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
 
@@ -4102,11 +4137,11 @@ AllocationResult Heap::AllocateRawOneByteString(int length,
   DCHECK_GE(String::kMaxLength, length);
   int size = SeqOneByteString::SizeFor(length);
   DCHECK(size <= SeqOneByteString::kMaxSize);
-  AllocationSpace space = SelectSpace(size, pretenure);
+  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
 
   HeapObject* result;
   {
-    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
+    AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
 
@@ -4126,11 +4161,11 @@ AllocationResult Heap::AllocateRawTwoByteString(int length,
   DCHECK_GE(String::kMaxLength, length);
   int size = SeqTwoByteString::SizeFor(length);
   DCHECK(size <= SeqTwoByteString::kMaxSize);
-  AllocationSpace space = SelectSpace(size, pretenure);
+  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
 
   HeapObject* result;
   {
-    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
+    AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
 
@@ -4147,7 +4182,8 @@ AllocationResult Heap::AllocateEmptyFixedArray() {
   int size = FixedArray::SizeFor(0);
   HeapObject* result;
   {
-    AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE);
+    AllocationResult allocation =
+        AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
   // Initialize the object.
@@ -4269,9 +4305,9 @@ AllocationResult Heap::AllocateRawFixedArray(int length,
     v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true);
   }
   int size = FixedArray::SizeFor(length);
-  AllocationSpace space = SelectSpace(size, pretenure);
+  AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure);
 
-  return AllocateRaw(size, space, OLD_SPACE);
+  return AllocateRaw(size, space, OLD_POINTER_SPACE);
 }
 
 
@@ -4340,11 +4376,11 @@ AllocationResult Heap::AllocateRawFixedDoubleArray(int length,
 #ifndef V8_HOST_ARCH_64_BIT
   size += kPointerSize;
 #endif
-  AllocationSpace space = SelectSpace(size, pretenure);
+  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
 
   HeapObject* object;
   {
-    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
+    AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
     if (!allocation.To(&object)) return allocation;
   }
 
@@ -4359,11 +4395,11 @@ AllocationResult Heap::AllocateConstantPoolArray(
 #ifndef V8_HOST_ARCH_64_BIT
   size += kPointerSize;
 #endif
-  AllocationSpace space = SelectSpace(size, TENURED);
+  AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED);
 
   HeapObject* object;
   {
-    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
+    AllocationResult allocation = AllocateRaw(size, space, OLD_POINTER_SPACE);
     if (!allocation.To(&object)) return allocation;
   }
   object = EnsureDoubleAligned(this, object, size);
@@ -4385,11 +4421,11 @@ AllocationResult Heap::AllocateExtendedConstantPoolArray(
 #ifndef V8_HOST_ARCH_64_BIT
   size += kPointerSize;
 #endif
-  AllocationSpace space = SelectSpace(size, TENURED);
+  AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED);
 
   HeapObject* object;
   {
-    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
+    AllocationResult allocation = AllocateRaw(size, space, OLD_POINTER_SPACE);
     if (!allocation.To(&object)) return allocation;
   }
   object = EnsureDoubleAligned(this, object, size);
@@ -4407,7 +4443,8 @@ AllocationResult Heap::AllocateEmptyConstantPoolArray() {
   int size = ConstantPoolArray::SizeFor(small);
   HeapObject* result = NULL;
   {
-    AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE);
+    AllocationResult allocation =
+        AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
   result->set_map_no_write_barrier(constant_pool_array_map());
@@ -4422,7 +4459,7 @@ AllocationResult Heap::AllocateSymbol() {
 
   HeapObject* result = NULL;
   AllocationResult allocation =
-      AllocateRaw(Symbol::kSize, OLD_SPACE, OLD_SPACE);
+      AllocateRaw(Symbol::kSize, OLD_POINTER_SPACE, OLD_POINTER_SPACE);
   if (!allocation.To(&result)) return allocation;
 
   result->set_map_no_write_barrier(symbol_map());
@@ -4460,7 +4497,7 @@ AllocationResult Heap::AllocateStruct(InstanceType type) {
       return exception();
   }
   int size = map->instance_size();
-  AllocationSpace space = SelectSpace(size, TENURED);
+  AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED);
   Struct* result;
   {
     AllocationResult allocation = Allocate(map, space);
@@ -4725,8 +4762,10 @@ void Heap::ReportHeapStatistics(const char* title) {
   isolate_->memory_allocator()->ReportStatistics();
   PrintF("To space : ");
   new_space_.ReportStatistics();
-  PrintF("Old space : ");
-  old_space_->ReportStatistics();
+  PrintF("Old pointer space : ");
+  old_pointer_space_->ReportStatistics();
+  PrintF("Old data space : ");
+  old_data_space_->ReportStatistics();
   PrintF("Code space : ");
   code_space_->ReportStatistics();
   PrintF("Map space : ");
@@ -4746,9 +4785,11 @@ bool Heap::Contains(HeapObject* value) { return Contains(value->address()); }
 bool Heap::Contains(Address addr) {
   if (isolate_->memory_allocator()->IsOutsideAllocatedSpace(addr)) return false;
   return HasBeenSetUp() &&
-         (new_space_.ToSpaceContains(addr) || old_space_->Contains(addr) ||
-          code_space_->Contains(addr) || map_space_->Contains(addr) ||
-          cell_space_->Contains(addr) || lo_space_->SlowContains(addr));
+         (new_space_.ToSpaceContains(addr) ||
+          old_pointer_space_->Contains(addr) ||
+          old_data_space_->Contains(addr) || code_space_->Contains(addr) ||
+          map_space_->Contains(addr) || cell_space_->Contains(addr) ||
+          lo_space_->SlowContains(addr));
 }
 
 
@@ -4764,8 +4805,10 @@ bool Heap::InSpace(Address addr, AllocationSpace space) {
   switch (space) {
     case NEW_SPACE:
       return new_space_.ToSpaceContains(addr);
-    case OLD_SPACE:
-      return old_space_->Contains(addr);
+    case OLD_POINTER_SPACE:
+      return old_pointer_space_->Contains(addr);
+    case OLD_DATA_SPACE:
+      return old_data_space_->Contains(addr);
     case CODE_SPACE:
       return code_space_->Contains(addr);
     case MAP_SPACE:
@@ -4813,10 +4856,11 @@ void Heap::Verify() {
 
   new_space_.Verify();
 
-  old_space_->Verify(&visitor);
+  old_pointer_space_->Verify(&visitor);
   map_space_->Verify(&visitor);
 
   VerifyPointersVisitor no_dirty_regions_visitor;
+  old_data_space_->Verify(&no_dirty_regions_visitor);
   code_space_->Verify(&no_dirty_regions_visitor);
   cell_space_->Verify(&no_dirty_regions_visitor);
 
@@ -5133,8 +5177,10 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
   *stats->end_marker = HeapStats::kEndMarker;
   *stats->new_space_size = new_space_.SizeAsInt();
   *stats->new_space_capacity = static_cast<int>(new_space_.Capacity());
-  *stats->old_space_size = old_space_->SizeOfObjects();
-  *stats->old_space_capacity = old_space_->Capacity();
+  *stats->old_pointer_space_size = old_pointer_space_->SizeOfObjects();
+  *stats->old_pointer_space_capacity = old_pointer_space_->Capacity();
+  *stats->old_data_space_size = old_data_space_->SizeOfObjects();
+  *stats->old_data_space_capacity = old_data_space_->Capacity();
   *stats->code_space_size = code_space_->SizeOfObjects();
   *stats->code_space_capacity = code_space_->Capacity();
   *stats->map_space_size = map_space_->SizeOfObjects();
@@ -5163,7 +5209,8 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
 
 
 intptr_t Heap::PromotedSpaceSizeOfObjects() {
-  return old_space_->SizeOfObjects() + code_space_->SizeOfObjects() +
+  return old_pointer_space_->SizeOfObjects() +
+         old_data_space_->SizeOfObjects() + code_space_->SizeOfObjects() +
          map_space_->SizeOfObjects() + cell_space_->SizeOfObjects() +
          lo_space_->SizeOfObjects();
 }
@@ -5287,11 +5334,17 @@ bool Heap::SetUp() {
   }
   new_space_top_after_last_gc_ = new_space()->top();
 
-  // Initialize old space.
-  old_space_ =
-      new OldSpace(this, max_old_generation_size_, OLD_SPACE, NOT_EXECUTABLE);
-  if (old_space_ == NULL) return false;
-  if (!old_space_->SetUp()) return false;
+  // Initialize old pointer space.
+  old_pointer_space_ = new OldSpace(this, max_old_generation_size_,
+                                    OLD_POINTER_SPACE, NOT_EXECUTABLE);
+  if (old_pointer_space_ == NULL) return false;
+  if (!old_pointer_space_->SetUp()) return false;
+
+  // Initialize old data space.
+  old_data_space_ = new OldSpace(this, max_old_generation_size_, OLD_DATA_SPACE,
+                                 NOT_EXECUTABLE);
+  if (old_data_space_ == NULL) return false;
+  if (!old_data_space_->SetUp()) return false;
 
   if (!isolate_->code_range()->SetUp(code_range_size_)) return false;
 
@@ -5414,8 +5467,12 @@ void Heap::TearDown() {
            MaximumCommittedMemory());
     PrintF("maximum_committed_by_new_space=%" V8_PTR_PREFIX "d ",
            new_space_.MaximumCommittedMemory());
-    PrintF("maximum_committed_by_old_space=%" V8_PTR_PREFIX "d ",
-           old_space_->MaximumCommittedMemory());
+    PrintF("maximum_committed_by_old_pointer_space=%" V8_PTR_PREFIX "d ",
+           old_data_space_->MaximumCommittedMemory());
+    PrintF("maximum_committed_by_old_data_space=%" V8_PTR_PREFIX "d ",
+           old_pointer_space_->MaximumCommittedMemory());
+    PrintF("maximum_committed_by_old_data_space=%" V8_PTR_PREFIX "d ",
+           old_pointer_space_->MaximumCommittedMemory());
     PrintF("maximum_committed_by_code_space=%" V8_PTR_PREFIX "d ",
            code_space_->MaximumCommittedMemory());
     PrintF("maximum_committed_by_map_space=%" V8_PTR_PREFIX "d ",
@@ -5441,10 +5498,16 @@ void Heap::TearDown() {
 
   new_space_.TearDown();
 
-  if (old_space_ != NULL) {
-    old_space_->TearDown();
-    delete old_space_;
-    old_space_ = NULL;
+  if (old_pointer_space_ != NULL) {
+    old_pointer_space_->TearDown();
+    delete old_pointer_space_;
+    old_pointer_space_ = NULL;
+  }
+
+  if (old_data_space_ != NULL) {
+    old_data_space_->TearDown();
+    delete old_data_space_;
+    old_data_space_ = NULL;
   }
 
   if (code_space_ != NULL) {
@@ -5580,8 +5643,10 @@ Space* AllSpaces::next() {
   switch (counter_++) {
     case NEW_SPACE:
       return heap_->new_space();
-    case OLD_SPACE:
-      return heap_->old_space();
+    case OLD_POINTER_SPACE:
+      return heap_->old_pointer_space();
+    case OLD_DATA_SPACE:
+      return heap_->old_data_space();
     case CODE_SPACE:
       return heap_->code_space();
     case MAP_SPACE:
@@ -5598,8 +5663,10 @@ Space* AllSpaces::next() {
 
 PagedSpace* PagedSpaces::next() {
   switch (counter_++) {
-    case OLD_SPACE:
-      return heap_->old_space();
+    case OLD_POINTER_SPACE:
+      return heap_->old_pointer_space();
+    case OLD_DATA_SPACE:
+      return heap_->old_data_space();
     case CODE_SPACE:
       return heap_->code_space();
     case MAP_SPACE:
@@ -5614,8 +5681,10 @@ PagedSpace* PagedSpaces::next() {
 
 OldSpace* OldSpaces::next() {
   switch (counter_++) {
-    case OLD_SPACE:
-      return heap_->old_space();
+    case OLD_POINTER_SPACE:
+      return heap_->old_pointer_space();
+    case OLD_DATA_SPACE:
+      return heap_->old_data_space();
     case CODE_SPACE:
       return heap_->code_space();
     default:
@@ -5674,8 +5743,12 @@ ObjectIterator* SpaceIterator::CreateIterator() {
     case NEW_SPACE:
       iterator_ = new SemiSpaceIterator(heap_->new_space(), size_func_);
       break;
-    case OLD_SPACE:
-      iterator_ = new HeapObjectIterator(heap_->old_space(), size_func_);
+    case OLD_POINTER_SPACE:
+      iterator_ =
+          new HeapObjectIterator(heap_->old_pointer_space(), size_func_);
+      break;
+    case OLD_DATA_SPACE:
+      iterator_ = new HeapObjectIterator(heap_->old_data_space(), size_func_);
       break;
     case CODE_SPACE:
       iterator_ = new HeapObjectIterator(heap_->code_space(), size_func_);
index a2fa8832f3b2f69663bd1e3aef4d1a59730b7e70..5c276e9581c5fa2aac11a33db53cd7dd96a444b5 100644 (file)
@@ -641,15 +641,18 @@ class Heap {
   Address NewSpaceTop() { return new_space_.top(); }
 
   NewSpace* new_space() { return &new_space_; }
-  OldSpace* old_space() { return old_space_; }
+  OldSpace* old_pointer_space() { return old_pointer_space_; }
+  OldSpace* old_data_space() { return old_data_space_; }
   OldSpace* code_space() { return code_space_; }
   MapSpace* map_space() { return map_space_; }
   CellSpace* cell_space() { return cell_space_; }
   LargeObjectSpace* lo_space() { return lo_space_; }
   PagedSpace* paged_space(int idx) {
     switch (idx) {
-      case OLD_SPACE:
-        return old_space();
+      case OLD_POINTER_SPACE:
+        return old_pointer_space();
+      case OLD_DATA_SPACE:
+        return old_data_space();
       case MAP_SPACE:
         return map_space();
       case CELL_SPACE:
@@ -675,11 +678,18 @@ class Heap {
     return new_space_.allocation_limit_address();
   }
 
-  Address* OldSpaceAllocationTopAddress() {
-    return old_space_->allocation_top_address();
+  Address* OldPointerSpaceAllocationTopAddress() {
+    return old_pointer_space_->allocation_top_address();
   }
-  Address* OldSpaceAllocationLimitAddress() {
-    return old_space_->allocation_limit_address();
+  Address* OldPointerSpaceAllocationLimitAddress() {
+    return old_pointer_space_->allocation_limit_address();
+  }
+
+  Address* OldDataSpaceAllocationTopAddress() {
+    return old_data_space_->allocation_top_address();
+  }
+  Address* OldDataSpaceAllocationLimitAddress() {
+    return old_data_space_->allocation_limit_address();
   }
 
   // TODO(hpayer): There is still a missmatch between capacity and actual
@@ -909,9 +919,13 @@ class Heap {
   inline bool InFromSpace(Object* object);
   inline bool InToSpace(Object* object);
 
-  // Returns whether the object resides in old space.
-  inline bool InOldSpace(Address address);
-  inline bool InOldSpace(Object* object);
+  // Returns whether the object resides in old pointer space.
+  inline bool InOldPointerSpace(Address address);
+  inline bool InOldPointerSpace(Object* object);
+
+  // Returns whether the object resides in old data space.
+  inline bool InOldDataSpace(Address address);
+  inline bool InOldDataSpace(Object* object);
 
   // Checks whether an address/object in the heap (including auxiliary
   // area and unused area).
@@ -923,6 +937,10 @@ class Heap {
   bool InSpace(Address addr, AllocationSpace space);
   bool InSpace(HeapObject* value, AllocationSpace space);
 
+  // Finds out which space an object should get promoted to based on its type.
+  inline OldSpace* TargetSpace(HeapObject* object);
+  static inline AllocationSpace TargetSpaceId(InstanceType type);
+
   // Checks whether the given object is allowed to be migrated from it's
   // current space into the given destination space. Used for debugging.
   inline bool AllowedToBeMigrated(HeapObject* object, AllocationSpace dest);
@@ -1559,7 +1577,8 @@ class Heap {
   int scan_on_scavenge_pages_;
 
   NewSpace new_space_;
-  OldSpace* old_space_;
+  OldSpace* old_pointer_space_;
+  OldSpace* old_data_space_;
   OldSpace* code_space_;
   MapSpace* map_space_;
   CellSpace* cell_space_;
@@ -1754,11 +1773,14 @@ class Heap {
   inline void UpdateOldSpaceLimits();
 
   // Selects the proper allocation space depending on the given object
-  // size and pretenuring decision.
+  // size, pretenuring decision, and preferred old-space.
   static AllocationSpace SelectSpace(int object_size,
+                                     AllocationSpace preferred_old_space,
                                      PretenureFlag pretenure) {
+    DCHECK(preferred_old_space == OLD_POINTER_SPACE ||
+           preferred_old_space == OLD_DATA_SPACE);
     if (object_size > Page::kMaxRegularHeapObjectSize) return LO_SPACE;
-    return (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE;
+    return (pretenure == TENURED) ? preferred_old_space : NEW_SPACE;
   }
 
   HeapObject* DoubleAlignForDeserialization(HeapObject* object, int size);
@@ -2161,26 +2183,28 @@ class HeapStats {
   int* start_marker;                       //  0
   int* new_space_size;                     //  1
   int* new_space_capacity;                 //  2
-  intptr_t* old_space_size;                //  3
-  intptr_t* old_space_capacity;            //  4
-  intptr_t* code_space_size;               //  5
-  intptr_t* code_space_capacity;           //  6
-  intptr_t* map_space_size;                //  7
-  intptr_t* map_space_capacity;            //  8
-  intptr_t* cell_space_size;               //  9
-  intptr_t* cell_space_capacity;           // 10
-  intptr_t* lo_space_size;                 // 11
-  int* global_handle_count;                // 12
-  int* weak_global_handle_count;           // 13
-  int* pending_global_handle_count;        // 14
-  int* near_death_global_handle_count;     // 15
-  int* free_global_handle_count;           // 16
-  intptr_t* memory_allocator_size;         // 17
-  intptr_t* memory_allocator_capacity;     // 18
-  int* objects_per_type;                   // 19
-  int* size_per_type;                      // 20
-  int* os_error;                           // 21
-  int* end_marker;                         // 22
+  intptr_t* old_pointer_space_size;        //  3
+  intptr_t* old_pointer_space_capacity;    //  4
+  intptr_t* old_data_space_size;           //  5
+  intptr_t* old_data_space_capacity;       //  6
+  intptr_t* code_space_size;               //  7
+  intptr_t* code_space_capacity;           //  8
+  intptr_t* map_space_size;                //  9
+  intptr_t* map_space_capacity;            // 10
+  intptr_t* cell_space_size;               // 11
+  intptr_t* cell_space_capacity;           // 12
+  intptr_t* lo_space_size;                 // 13
+  int* global_handle_count;                // 14
+  int* weak_global_handle_count;           // 15
+  int* pending_global_handle_count;        // 16
+  int* near_death_global_handle_count;     // 17
+  int* free_global_handle_count;           // 18
+  intptr_t* memory_allocator_size;         // 19
+  intptr_t* memory_allocator_capacity;     // 20
+  int* objects_per_type;                   // 21
+  int* size_per_type;                      // 22
+  int* os_error;                           // 23
+  int* end_marker;                         // 24
 };
 
 
@@ -2239,11 +2263,12 @@ class AllSpaces BASE_EMBEDDED {
 };
 
 
-// Space iterator for iterating over all old spaces of the heap: Old space
-// and code space.  Returns each space in turn, and null when it is done.
+// Space iterator for iterating over all old spaces of the heap: Old pointer
+// space, old data space and code space.  Returns each space in turn, and null
+// when it is done.
 class OldSpaces BASE_EMBEDDED {
  public:
-  explicit OldSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
+  explicit OldSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {}
   OldSpace* next();
 
  private:
@@ -2253,11 +2278,11 @@ class OldSpaces BASE_EMBEDDED {
 
 
 // Space iterator for iterating over all the paged spaces of the heap: Map
-// space, old space, code space and cell space.  Returns
+// space, old pointer space, old data space, code space and cell space.  Returns
 // each space in turn, and null when it is done.
 class PagedSpaces BASE_EMBEDDED {
  public:
-  explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
+  explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {}
   PagedSpace* next();
 
  private:
index e01c50b849a0bb99053fa4f0b57f635280ae755c..b3609e4adcf7db1baa16dde096f9ee48d5290045 100644 (file)
@@ -135,6 +135,16 @@ static void MarkObjectGreyDoNotEnqueue(Object* obj) {
 }
 
 
+static inline void MarkBlackOrKeepGrey(HeapObject* heap_object,
+                                       MarkBit mark_bit, int size) {
+  DCHECK(!Marking::IsImpossible(mark_bit));
+  if (mark_bit.Get()) return;
+  mark_bit.Set();
+  MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(), size);
+  DCHECK(Marking::IsBlack(mark_bit));
+}
+
+
 static inline void MarkBlackOrKeepBlack(HeapObject* heap_object,
                                         MarkBit mark_bit, int size) {
   DCHECK(!Marking::IsImpossible(mark_bit));
@@ -346,7 +356,8 @@ void IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace(
 
 
 void IncrementalMarking::DeactivateIncrementalWriteBarrier() {
-  DeactivateIncrementalWriteBarrierForSpace(heap_->old_space());
+  DeactivateIncrementalWriteBarrierForSpace(heap_->old_pointer_space());
+  DeactivateIncrementalWriteBarrierForSpace(heap_->old_data_space());
   DeactivateIncrementalWriteBarrierForSpace(heap_->cell_space());
   DeactivateIncrementalWriteBarrierForSpace(heap_->map_space());
   DeactivateIncrementalWriteBarrierForSpace(heap_->code_space());
@@ -379,7 +390,8 @@ void IncrementalMarking::ActivateIncrementalWriteBarrier(NewSpace* space) {
 
 
 void IncrementalMarking::ActivateIncrementalWriteBarrier() {
-  ActivateIncrementalWriteBarrier(heap_->old_space());
+  ActivateIncrementalWriteBarrier(heap_->old_pointer_space());
+  ActivateIncrementalWriteBarrier(heap_->old_data_space());
   ActivateIncrementalWriteBarrier(heap_->cell_space());
   ActivateIncrementalWriteBarrier(heap_->map_space());
   ActivateIncrementalWriteBarrier(heap_->code_space());
@@ -642,7 +654,9 @@ void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
 
 void IncrementalMarking::MarkObject(Heap* heap, HeapObject* obj) {
   MarkBit mark_bit = Marking::MarkBitFrom(obj);
-  if (Marking::IsWhite(mark_bit)) {
+  if (mark_bit.data_only()) {
+    MarkBlackOrKeepGrey(obj, mark_bit, obj->Size());
+  } else if (Marking::IsWhite(mark_bit)) {
     heap->incremental_marking()->WhiteToGreyAndPush(obj, mark_bit);
   }
 }
index 757b6c347a04ae3b51356c155dd4985a79a2f27b..66b0a59cec6c0b32ba3cf2147e0af2e55bd81f98 100644 (file)
@@ -15,7 +15,8 @@ namespace internal {
 
 MarkBit Marking::MarkBitFrom(Address addr) {
   MemoryChunk* p = MemoryChunk::FromAddress(addr);
-  return p->markbits()->MarkBitFromIndex(p->AddressToMarkbitIndex(addr));
+  return p->markbits()->MarkBitFromIndex(p->AddressToMarkbitIndex(addr),
+                                         p->ContainsOnlyData());
 }
 
 
index 84a2a7f31898b63cd4836210c854b1461ce5c74e..de381aab356657374f1e62f083173045d949ded0 100644 (file)
@@ -135,7 +135,8 @@ static void VerifyMarking(PagedSpace* space) {
 
 
 static void VerifyMarking(Heap* heap) {
-  VerifyMarking(heap->old_space());
+  VerifyMarking(heap->old_pointer_space());
+  VerifyMarking(heap->old_data_space());
   VerifyMarking(heap->code_space());
   VerifyMarking(heap->cell_space());
   VerifyMarking(heap->map_space());
@@ -199,7 +200,8 @@ static void VerifyEvacuation(NewSpace* space) {
 
 
 static void VerifyEvacuation(Heap* heap, PagedSpace* space) {
-  if (FLAG_use_allocation_folding && (space == heap->old_space())) {
+  if (FLAG_use_allocation_folding &&
+      (space == heap->old_pointer_space() || space == heap->old_data_space())) {
     return;
   }
   PageIterator it(space);
@@ -213,7 +215,8 @@ static void VerifyEvacuation(Heap* heap, PagedSpace* space) {
 
 
 static void VerifyEvacuation(Heap* heap) {
-  VerifyEvacuation(heap, heap->old_space());
+  VerifyEvacuation(heap, heap->old_pointer_space());
+  VerifyEvacuation(heap, heap->old_data_space());
   VerifyEvacuation(heap, heap->code_space());
   VerifyEvacuation(heap, heap->cell_space());
   VerifyEvacuation(heap, heap->map_space());
@@ -226,7 +229,8 @@ static void VerifyEvacuation(Heap* heap) {
 
 
 void MarkCompactCollector::SetUp() {
-  free_list_old_space_.Reset(new FreeList(heap_->old_space()));
+  free_list_old_data_space_.Reset(new FreeList(heap_->old_data_space()));
+  free_list_old_pointer_space_.Reset(new FreeList(heap_->old_pointer_space()));
 }
 
 
@@ -257,7 +261,8 @@ bool MarkCompactCollector::StartCompaction(CompactionMode mode) {
   if (!compacting_) {
     DCHECK(evacuation_candidates_.length() == 0);
 
-    CollectEvacuationCandidates(heap()->old_space());
+    CollectEvacuationCandidates(heap()->old_pointer_space());
+    CollectEvacuationCandidates(heap()->old_data_space());
 
     if (FLAG_compact_code_space && (mode == NON_INCREMENTAL_COMPACTION ||
                                     FLAG_incremental_code_compaction)) {
@@ -271,7 +276,8 @@ bool MarkCompactCollector::StartCompaction(CompactionMode mode) {
       TraceFragmentation(heap()->cell_space());
     }
 
-    heap()->old_space()->EvictEvacuationCandidatesFromFreeLists();
+    heap()->old_pointer_space()->EvictEvacuationCandidatesFromFreeLists();
+    heap()->old_data_space()->EvictEvacuationCandidatesFromFreeLists();
     heap()->code_space()->EvictEvacuationCandidatesFromFreeLists();
 
     compacting_ = evacuation_candidates_.length() > 0;
@@ -358,7 +364,8 @@ void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) {
 
 
 void MarkCompactCollector::VerifyMarkbitsAreClean() {
-  VerifyMarkbitsAreClean(heap_->old_space());
+  VerifyMarkbitsAreClean(heap_->old_pointer_space());
+  VerifyMarkbitsAreClean(heap_->old_data_space());
   VerifyMarkbitsAreClean(heap_->code_space());
   VerifyMarkbitsAreClean(heap_->cell_space());
   VerifyMarkbitsAreClean(heap_->map_space());
@@ -416,7 +423,8 @@ static void ClearMarkbitsInNewSpace(NewSpace* space) {
 void MarkCompactCollector::ClearMarkbits() {
   ClearMarkbitsInPagedSpace(heap_->code_space());
   ClearMarkbitsInPagedSpace(heap_->map_space());
-  ClearMarkbitsInPagedSpace(heap_->old_space());
+  ClearMarkbitsInPagedSpace(heap_->old_pointer_space());
+  ClearMarkbitsInPagedSpace(heap_->old_data_space());
   ClearMarkbitsInPagedSpace(heap_->cell_space());
   ClearMarkbitsInNewSpace(heap_->new_space());
 
@@ -452,9 +460,13 @@ class MarkCompactCollector::SweeperTask : public v8::Task {
 
 
 void MarkCompactCollector::StartSweeperThreads() {
-  DCHECK(free_list_old_space_.get()->IsEmpty());
+  DCHECK(free_list_old_pointer_space_.get()->IsEmpty());
+  DCHECK(free_list_old_data_space_.get()->IsEmpty());
   V8::GetCurrentPlatform()->CallOnBackgroundThread(
-      new SweeperTask(heap(), heap()->old_space()),
+      new SweeperTask(heap(), heap()->old_data_space()),
+      v8::Platform::kShortRunningTask);
+  V8::GetCurrentPlatform()->CallOnBackgroundThread(
+      new SweeperTask(heap(), heap()->old_pointer_space()),
       v8::Platform::kShortRunningTask);
 }
 
@@ -465,16 +477,20 @@ void MarkCompactCollector::EnsureSweepingCompleted() {
   // If sweeping is not completed or not running at all, we try to complete it
   // here.
   if (!heap()->concurrent_sweeping_enabled() || !IsSweepingCompleted()) {
-    SweepInParallel(heap()->paged_space(OLD_SPACE), 0);
+    SweepInParallel(heap()->paged_space(OLD_DATA_SPACE), 0);
+    SweepInParallel(heap()->paged_space(OLD_POINTER_SPACE), 0);
   }
   // Wait twice for both jobs.
   if (heap()->concurrent_sweeping_enabled()) {
     pending_sweeper_jobs_semaphore_.Wait();
+    pending_sweeper_jobs_semaphore_.Wait();
   }
   ParallelSweepSpacesComplete();
   sweeping_in_progress_ = false;
-  RefillFreeList(heap()->paged_space(OLD_SPACE));
-  heap()->paged_space(OLD_SPACE)->ResetUnsweptFreeBytes();
+  RefillFreeList(heap()->paged_space(OLD_DATA_SPACE));
+  RefillFreeList(heap()->paged_space(OLD_POINTER_SPACE));
+  heap()->paged_space(OLD_DATA_SPACE)->ResetUnsweptFreeBytes();
+  heap()->paged_space(OLD_POINTER_SPACE)->ResetUnsweptFreeBytes();
 
 #ifdef VERIFY_HEAP
   if (FLAG_verify_heap && !evacuation()) {
@@ -497,11 +513,13 @@ bool MarkCompactCollector::IsSweepingCompleted() {
 void MarkCompactCollector::RefillFreeList(PagedSpace* space) {
   FreeList* free_list;
 
-  if (space == heap()->old_space()) {
-    free_list = free_list_old_space_.get();
+  if (space == heap()->old_pointer_space()) {
+    free_list = free_list_old_pointer_space_.get();
+  } else if (space == heap()->old_data_space()) {
+    free_list = free_list_old_data_space_.get();
   } else {
     // Any PagedSpace might invoke RefillFreeLists, so we need to make sure
-    // to only refill them for the old space.
+    // to only refill them for old data and pointer spaces.
     return;
   }
 
@@ -555,8 +573,10 @@ const char* AllocationSpaceName(AllocationSpace space) {
   switch (space) {
     case NEW_SPACE:
       return "NEW_SPACE";
-    case OLD_SPACE:
-      return "OLD_SPACE";
+    case OLD_POINTER_SPACE:
+      return "OLD_POINTER_SPACE";
+    case OLD_DATA_SPACE:
+      return "OLD_DATA_SPACE";
     case CODE_SPACE:
       return "CODE_SPACE";
     case MAP_SPACE:
@@ -625,7 +645,9 @@ static int FreeListFragmentation(PagedSpace* space, Page* p) {
 
 
 void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
-  DCHECK(space->identity() == OLD_SPACE || space->identity() == CODE_SPACE);
+  DCHECK(space->identity() == OLD_POINTER_SPACE ||
+         space->identity() == OLD_DATA_SPACE ||
+         space->identity() == CODE_SPACE);
 
   static const int kMaxMaxEvacuationCandidates = 1000;
   int number_of_pages = space->CountTotalPages();
@@ -1791,7 +1813,7 @@ static void DiscoverGreyObjectsOnPage(MarkingDeque* marking_deque,
       int trailing_zeros = base::bits::CountTrailingZeros32(grey_objects);
       grey_objects >>= trailing_zeros;
       offset += trailing_zeros;
-      MarkBit markbit(cell, 1 << offset);
+      MarkBit markbit(cell, 1 << offset, false);
       DCHECK(Marking::IsGrey(markbit));
       Marking::GreyToBlack(markbit);
       Address addr = cell_base + offset * kPointerSize;
@@ -2015,7 +2037,11 @@ void MarkCompactCollector::RefillMarkingDeque() {
   DiscoverGreyObjectsInNewSpace(heap(), &marking_deque_);
   if (marking_deque_.IsFull()) return;
 
-  DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->old_space());
+  DiscoverGreyObjectsInSpace(heap(), &marking_deque_,
+                             heap()->old_pointer_space());
+  if (marking_deque_.IsFull()) return;
+
+  DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->old_data_space());
   if (marking_deque_.IsFull()) return;
 
   DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->code_space());
@@ -2687,7 +2713,7 @@ void MarkCompactCollector::MigrateObject(HeapObject* dst, HeapObject* src,
   Address src_addr = src->address();
   DCHECK(heap()->AllowedToBeMigrated(src, dest));
   DCHECK(dest != LO_SPACE && size <= Page::kMaxRegularHeapObjectSize);
-  if (dest == OLD_SPACE) {
+  if (dest == OLD_POINTER_SPACE) {
     Address src_slot = src_addr;
     Address dst_slot = dst_addr;
     DCHECK(IsAligned(size, kPointerSize));
@@ -2759,7 +2785,7 @@ void MarkCompactCollector::MigrateObject(HeapObject* dst, HeapObject* src,
                        SlotsBuffer::IGNORE_OVERFLOW);
     Code::cast(dst)->Relocate(dst_addr - src_addr);
   } else {
-    DCHECK(dest == NEW_SPACE);
+    DCHECK(dest == OLD_DATA_SPACE || dest == NEW_SPACE);
     heap()->MoveBlock(dst_addr, src_addr, size);
   }
   heap()->OnMoveEvent(dst, src, size);
@@ -2881,17 +2907,19 @@ void PointersUpdatingVisitor::CheckLayoutDescriptorAndDie(Heap* heap,
     space_owner_id = 1;
   } else if (heap->new_space()->FromSpaceContains(slot_address)) {
     space_owner_id = 2;
-  } else if (heap->old_space()->ContainsSafe(slot_address)) {
+  } else if (heap->old_pointer_space()->ContainsSafe(slot_address)) {
     space_owner_id = 3;
-  } else if (heap->code_space()->ContainsSafe(slot_address)) {
+  } else if (heap->old_data_space()->ContainsSafe(slot_address)) {
     space_owner_id = 4;
-  } else if (heap->map_space()->ContainsSafe(slot_address)) {
+  } else if (heap->code_space()->ContainsSafe(slot_address)) {
     space_owner_id = 5;
-  } else if (heap->cell_space()->ContainsSafe(slot_address)) {
+  } else if (heap->map_space()->ContainsSafe(slot_address)) {
     space_owner_id = 6;
+  } else if (heap->cell_space()->ContainsSafe(slot_address)) {
+    space_owner_id = 7;
   } else {
     // Lo space or other.
-    space_owner_id = 7;
+    space_owner_id = 8;
   }
   data[index++] = space_owner_id;
   data[index++] = 0x20aaaaaaaaUL;
@@ -2982,12 +3010,14 @@ bool MarkCompactCollector::TryPromoteObject(HeapObject* object,
                                             int object_size) {
   DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
 
-  OldSpace* old_space = heap()->old_space();
+  OldSpace* target_space = heap()->TargetSpace(object);
 
+  DCHECK(target_space == heap()->old_pointer_space() ||
+         target_space == heap()->old_data_space());
   HeapObject* target;
-  AllocationResult allocation = old_space->AllocateRaw(object_size);
+  AllocationResult allocation = target_space->AllocateRaw(object_size);
   if (allocation.To(&target)) {
-    MigrateObject(target, object, object_size, old_space->identity());
+    MigrateObject(target, object, object_size, target_space->identity());
     heap()->IncrementPromotedObjectsSize(object_size);
     return true;
   }
@@ -3473,8 +3503,9 @@ static bool IsOnInvalidatedCodeObject(Address addr) {
   // we can safely go to the page from the slot address.
   Page* p = Page::FromAddress(addr);
 
-  // First check owner's identity because old space is swept concurrently or
-  // lazily and might still have non-zero mark-bits on some pages.
+  // First check owner's identity because old pointer and old data spaces
+  // are swept lazily and might still have non-zero mark-bits on some
+  // pages.
   if (p->owner()->identity() != CODE_SPACE) return false;
 
   // In code space only bits on evacuation candidates (but we don't record
@@ -3652,7 +3683,12 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
         p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION);
 
         switch (space->identity()) {
-          case OLD_SPACE:
+          case OLD_DATA_SPACE:
+            Sweep<SWEEP_AND_VISIT_LIVE_OBJECTS, SWEEP_ON_MAIN_THREAD,
+                  IGNORE_SKIP_LIST, IGNORE_FREE_SPACE>(space, NULL, p,
+                                                       &updating_visitor);
+            break;
+          case OLD_POINTER_SPACE:
             Sweep<SWEEP_AND_VISIT_LIVE_OBJECTS, SWEEP_ON_MAIN_THREAD,
                   IGNORE_SKIP_LIST, IGNORE_FREE_SPACE>(space, NULL, p,
                                                        &updating_visitor);
@@ -4163,7 +4199,9 @@ int MarkCompactCollector::SweepInParallel(PagedSpace* space,
 int MarkCompactCollector::SweepInParallel(Page* page, PagedSpace* space) {
   int max_freed = 0;
   if (page->TryParallelSweeping()) {
-    FreeList* free_list = free_list_old_space_.get();
+    FreeList* free_list = space == heap()->old_pointer_space()
+                              ? free_list_old_pointer_space_.get()
+                              : free_list_old_data_space_.get();
     FreeList private_free_list(space);
     max_freed = Sweep<SWEEP_ONLY, SWEEP_IN_PARALLEL, IGNORE_SKIP_LIST,
                       IGNORE_FREE_SPACE>(space, &private_free_list, page, NULL);
@@ -4288,7 +4326,10 @@ void MarkCompactCollector::SweepSpaces() {
   {
     GCTracer::Scope sweep_scope(heap()->tracer(),
                                 GCTracer::Scope::MC_SWEEP_OLDSPACE);
-    { SweepSpace(heap()->old_space(), CONCURRENT_SWEEPING); }
+    {
+      SweepSpace(heap()->old_pointer_space(), CONCURRENT_SWEEPING);
+      SweepSpace(heap()->old_data_space(), CONCURRENT_SWEEPING);
+    }
     sweeping_in_progress_ = true;
     if (heap()->concurrent_sweeping_enabled()) {
       StartSweeperThreads();
@@ -4350,7 +4391,8 @@ void MarkCompactCollector::ParallelSweepSpaceComplete(PagedSpace* space) {
 
 
 void MarkCompactCollector::ParallelSweepSpacesComplete() {
-  ParallelSweepSpaceComplete(heap()->old_space());
+  ParallelSweepSpaceComplete(heap()->old_pointer_space());
+  ParallelSweepSpaceComplete(heap()->old_data_space());
 }
 
 
index 3680dcf1c6a5e8b5b498593b334a542a6373a47a..ddb993f5a2f676491e732da2a1a39cae26c48b59 100644 (file)
@@ -603,7 +603,11 @@ class MarkCompactCollector {
     // to other evacuation candidates thus we have to
     // rescan the page after evacuation to discover and update all
     // pointers to evacuated objects.
-    page->SetFlag(Page::RESCAN_ON_EVACUATION);
+    if (page->owner()->identity() == OLD_DATA_SPACE) {
+      evacuation_candidates_.RemoveElement(page);
+    } else {
+      page->SetFlag(Page::RESCAN_ON_EVACUATION);
+    }
   }
 
   void RecordRelocSlot(RelocInfo* rinfo, Object* target);
@@ -903,7 +907,8 @@ class MarkCompactCollector {
   List<Page*> evacuation_candidates_;
   List<Code*> invalidated_code_;
 
-  SmartPointer<FreeList> free_list_old_space_;
+  SmartPointer<FreeList> free_list_old_data_space_;
+  SmartPointer<FreeList> free_list_old_pointer_space_;
 
   friend class Heap;
 };
index 7d73a70d8ee677719d3902ac694e906d3d20d24c..cfa23255c41c10b09de336b5bce3198764bd1186 100644 (file)
@@ -206,8 +206,8 @@ void MemoryChunk::UpdateHighWaterMark(Address mark) {
 
 
 PointerChunkIterator::PointerChunkIterator(Heap* heap)
-    : state_(kOldSpaceState),
-      old_pointer_iterator_(heap->old_space()),
+    : state_(kOldPointerState),
+      old_pointer_iterator_(heap->old_pointer_space()),
       map_iterator_(heap->map_space()),
       lo_iterator_(heap->lo_space()) {}
 
index b5b7d12c946f41ffe8c2dc621abb3d9a3d242094..f3c2eb0abb0134f9f96da51d2d52960c088fcd69 100644 (file)
@@ -41,7 +41,8 @@ HeapObjectIterator::HeapObjectIterator(PagedSpace* space,
 HeapObjectIterator::HeapObjectIterator(Page* page,
                                        HeapObjectCallback size_func) {
   Space* owner = page->owner();
-  DCHECK(owner == page->heap()->old_space() ||
+  DCHECK(owner == page->heap()->old_pointer_space() ||
+         owner == page->heap()->old_data_space() ||
          owner == page->heap()->map_space() ||
          owner == page->heap()->cell_space() ||
          owner == page->heap()->code_space());
@@ -509,6 +510,10 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
     chunk->SetFlag(IS_EXECUTABLE);
   }
 
+  if (owner == heap->old_data_space()) {
+    chunk->SetFlag(CONTAINS_ONLY_DATA);
+  }
+
   return chunk;
 }
 
@@ -920,8 +925,11 @@ void MemoryChunk::IncrementLiveBytesFromMutator(Address address, int by) {
 
 STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::NEW_SPACE) ==
               ObjectSpace::kObjectSpaceNewSpace);
-STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::OLD_SPACE) ==
-              ObjectSpace::kObjectSpaceOldSpace);
+STATIC_ASSERT(static_cast<ObjectSpace>(1
+                                       << AllocationSpace::OLD_POINTER_SPACE) ==
+              ObjectSpace::kObjectSpaceOldPointerSpace);
+STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::OLD_DATA_SPACE) ==
+              ObjectSpace::kObjectSpaceOldDataSpace);
 STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::CODE_SPACE) ==
               ObjectSpace::kObjectSpaceCodeSpace);
 STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::CELL_SPACE) ==
@@ -1109,7 +1117,11 @@ void PagedSpace::ReleasePage(Page* page) {
     page->Unlink();
   }
 
-  heap()->QueueMemoryChunkForFree(page);
+  if (page->IsFlagSet(MemoryChunk::CONTAINS_ONLY_DATA)) {
+    heap()->isolate()->memory_allocator()->Free(page);
+  } else {
+    heap()->QueueMemoryChunkForFree(page);
+  }
 
   DCHECK(Capacity() > 0);
   accounting_stats_.ShrinkSpace(AreaSize());
index 54a9168e06fbc7a9fda7707e7fd5a8e01e3b7081..08330be7aac48241c91e0a6bd4aa0be8434e2d1b 100644 (file)
@@ -43,11 +43,11 @@ class Isolate;
 //
 // During scavenges and mark-sweep collections we sometimes (after a store
 // buffer overflow) iterate intergenerational pointers without decoding heap
-// object maps so if the page belongs to old space or large object space
-// it is essential to guarantee that the page does not contain any
+// object maps so if the page belongs to old pointer space or large object
+// space it is essential to guarantee that the page does not contain any
 // garbage pointers to new space: every pointer aligned word which satisfies
 // the Heap::InNewSpace() predicate must be a pointer to a live heap object in
-// new space. Thus objects in old space and large object spaces should have a
+// new space. Thus objects in old pointer and large object spaces should have a
 // special layout (e.g. no bare integer fields). This requirement does not
 // apply to map space which is iterated in a special fashion. However we still
 // require pointer fields of dead maps to be cleaned.
@@ -102,7 +102,8 @@ class MarkBit {
  public:
   typedef uint32_t CellType;
 
-  inline MarkBit(CellType* cell, CellType mask) : cell_(cell), mask_(mask) {}
+  inline MarkBit(CellType* cell, CellType mask, bool data_only)
+      : cell_(cell), mask_(mask), data_only_(data_only) {}
 
   inline CellType* cell() { return cell_; }
   inline CellType mask() { return mask_; }
@@ -117,19 +118,25 @@ class MarkBit {
   inline bool Get() { return (*cell_ & mask_) != 0; }
   inline void Clear() { *cell_ &= ~mask_; }
 
+  inline bool data_only() { return data_only_; }
 
   inline MarkBit Next() {
     CellType new_mask = mask_ << 1;
     if (new_mask == 0) {
-      return MarkBit(cell_ + 1, 1);
+      return MarkBit(cell_ + 1, 1, data_only_);
     } else {
-      return MarkBit(cell_, new_mask);
+      return MarkBit(cell_, new_mask, data_only_);
     }
   }
 
  private:
   CellType* cell_;
   CellType mask_;
+  // This boolean indicates that the object is in a data-only space with no
+  // pointers.  This enables some optimizations when marking.
+  // It is expected that this field is inlined and turned into control flow
+  // at the place where the MarkBit object is created.
+  bool data_only_;
 };
 
 
@@ -180,10 +187,10 @@ class Bitmap {
     return reinterpret_cast<Bitmap*>(addr);
   }
 
-  inline MarkBit MarkBitFromIndex(uint32_t index) {
+  inline MarkBit MarkBitFromIndex(uint32_t index, bool data_only = false) {
     MarkBit::CellType mask = 1 << (index & kBitIndexMask);
     MarkBit::CellType* cell = this->cells() + (index >> kBitsPerCellLog2);
-    return MarkBit(cell, mask);
+    return MarkBit(cell, mask, data_only);
   }
 
   static inline void Clear(MemoryChunk* chunk);
@@ -363,6 +370,7 @@ class MemoryChunk {
     IN_FROM_SPACE,  // Mutually exclusive with IN_TO_SPACE.
     IN_TO_SPACE,    // All pages in new space has one of these two set.
     NEW_SPACE_BELOW_AGE_MARK,
+    CONTAINS_ONLY_DATA,
     EVACUATION_CANDIDATE,
     RESCAN_ON_EVACUATION,
     NEVER_EVACUATE,  // May contain immortal immutables.
@@ -557,6 +565,8 @@ class MemoryChunk {
     return IsFlagSet(IS_EXECUTABLE) ? EXECUTABLE : NOT_EXECUTABLE;
   }
 
+  bool ContainsOnlyData() { return IsFlagSet(CONTAINS_ONLY_DATA); }
+
   bool InNewSpace() {
     return (flags_ & ((1 << IN_FROM_SPACE) | (1 << IN_TO_SPACE))) != 0;
   }
@@ -2586,7 +2596,7 @@ class NewSpace : public Space {
 
 
 // -----------------------------------------------------------------------------
-// Old object space (includes the old space of objects and code space)
+// Old object space (excluding map objects)
 
 class OldSpace : public PagedSpace {
  public:
@@ -2795,7 +2805,7 @@ class PointerChunkIterator BASE_EMBEDDED {
   // Return NULL when the iterator is done.
   MemoryChunk* next() {
     switch (state_) {
-      case kOldSpaceState: {
+      case kOldPointerState: {
         if (old_pointer_iterator_.has_next()) {
           return old_pointer_iterator_.next();
         }
@@ -2834,7 +2844,7 @@ class PointerChunkIterator BASE_EMBEDDED {
 
 
  private:
-  enum State { kOldSpaceState, kMapState, kLargeObjectState, kFinishedState };
+  enum State { kOldPointerState, kMapState, kLargeObjectState, kFinishedState };
   State state_;
   PageIterator old_pointer_iterator_;
   PageIterator map_iterator_;
index 806680e145d43ac066d6d92e59a757d4ff957c0c..ccbe339534ddd97fcaba71312570a825d1dce84d 100644 (file)
@@ -18,6 +18,7 @@ Address StoreBuffer::TopAddress() {
 void StoreBuffer::Mark(Address addr) {
   DCHECK(!heap_->cell_space()->Contains(addr));
   DCHECK(!heap_->code_space()->Contains(addr));
+  DCHECK(!heap_->old_data_space()->Contains(addr));
   Address* top = reinterpret_cast<Address*>(heap_->store_buffer_top());
   *top++ = addr;
   heap_->public_set_store_buffer_top(top);
@@ -34,6 +35,7 @@ void StoreBuffer::EnterDirectlyIntoStoreBuffer(Address addr) {
   if (store_buffer_rebuilding_enabled_) {
     SLOW_DCHECK(!heap_->cell_space()->Contains(addr) &&
                 !heap_->code_space()->Contains(addr) &&
+                !heap_->old_data_space()->Contains(addr) &&
                 !heap_->new_space()->Contains(addr));
     Address* top = old_top_;
     *top++ = addr;
index 3ede0dfaf145c4e5484d69cd7628df6d25072b90..dec11daa90674b922d1a5510abd637323c08d654 100644 (file)
@@ -464,7 +464,7 @@ void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback slot_callback) {
                 heap_->mark_compact_collector()->EnsureSweepingCompleted();
               }
             }
-            CHECK(page->owner() == heap_->old_space());
+            CHECK(page->owner() == heap_->old_pointer_space());
             HeapObjectIterator iterator(page, NULL);
             for (HeapObject* heap_object = iterator.Next(); heap_object != NULL;
                  heap_object = iterator.Next()) {
@@ -534,6 +534,7 @@ void StoreBuffer::Compact() {
   for (Address* current = start_; current < top; current++) {
     DCHECK(!heap_->cell_space()->Contains(*current));
     DCHECK(!heap_->code_space()->Contains(*current));
+    DCHECK(!heap_->old_data_space()->Contains(*current));
     uintptr_t int_addr = reinterpret_cast<uintptr_t>(*current);
     // Shift out the last bits including any tags.
     int_addr >>= kPointerSizeLog2;
index a3436ed94d771602947fade2df9eacdca9237e82..245c7c040369af136046ab87e44c2ac748fe1c86 100644 (file)
@@ -3729,12 +3729,8 @@ bool HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
     return false;
   }
 
-
-  if (!IsFoldable(dominator_allocate)) {
-    if (FLAG_trace_allocation_folding) {
-      PrintF("#%d (%s) cannot fold into #%d (%s), different spaces\n", id(),
-             Mnemonic(), dominator->id(), dominator->Mnemonic());
-    }
+  dominator_allocate = GetFoldableDominator(dominator_allocate);
+  if (dominator_allocate == NULL) {
     return false;
   }
 
@@ -3766,7 +3762,10 @@ bool HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
 
   DCHECK(
       (IsNewSpaceAllocation() && dominator_allocate->IsNewSpaceAllocation()) ||
-      (IsOldSpaceAllocation() && dominator_allocate->IsOldSpaceAllocation()));
+      (IsOldDataSpaceAllocation() &&
+       dominator_allocate->IsOldDataSpaceAllocation()) ||
+      (IsOldPointerSpaceAllocation() &&
+       dominator_allocate->IsOldPointerSpaceAllocation()));
 
   // First update the size of the dominator allocate instruction.
   dominator_size = dominator_allocate->size();
@@ -3857,6 +3856,70 @@ bool HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
 }
 
 
+HAllocate* HAllocate::GetFoldableDominator(HAllocate* dominator) {
+  if (!IsFoldable(dominator)) {
+    // We cannot hoist old space allocations over new space allocations.
+    if (IsNewSpaceAllocation() || dominator->IsNewSpaceAllocation()) {
+      if (FLAG_trace_allocation_folding) {
+        PrintF("#%d (%s) cannot fold into #%d (%s), new space hoisting\n", id(),
+               Mnemonic(), dominator->id(), dominator->Mnemonic());
+      }
+      return NULL;
+    }
+
+    HAllocate* dominator_dominator = dominator->dominating_allocate_;
+
+    // We can hoist old data space allocations over an old pointer space
+    // allocation and vice versa. For that we have to check the dominator
+    // of the dominator allocate instruction.
+    if (dominator_dominator == NULL) {
+      dominating_allocate_ = dominator;
+      if (FLAG_trace_allocation_folding) {
+        PrintF("#%d (%s) cannot fold into #%d (%s), different spaces\n", id(),
+               Mnemonic(), dominator->id(), dominator->Mnemonic());
+      }
+      return NULL;
+    }
+
+    // We can just fold old space allocations that are in the same basic block,
+    // since it is not guaranteed that we fill up the whole allocated old
+    // space memory.
+    // TODO(hpayer): Remove this limitation and add filler maps for each each
+    // allocation as soon as we have store elimination.
+    if (block()->block_id() != dominator_dominator->block()->block_id()) {
+      if (FLAG_trace_allocation_folding) {
+        PrintF("#%d (%s) cannot fold into #%d (%s), different basic blocks\n",
+               id(), Mnemonic(), dominator_dominator->id(),
+               dominator_dominator->Mnemonic());
+      }
+      return NULL;
+    }
+
+    DCHECK((IsOldDataSpaceAllocation() &&
+            dominator_dominator->IsOldDataSpaceAllocation()) ||
+           (IsOldPointerSpaceAllocation() &&
+            dominator_dominator->IsOldPointerSpaceAllocation()));
+
+    int32_t current_size = HConstant::cast(size())->GetInteger32Constant();
+    HStoreNamedField* dominator_free_space_size =
+        dominator->filler_free_space_size_;
+    if (dominator_free_space_size != NULL) {
+      // We already hoisted one old space allocation, i.e., we already installed
+      // a filler map. Hence, we just have to update the free space size.
+      dominator->UpdateFreeSpaceFiller(current_size);
+    } else {
+      // This is the first old space allocation that gets hoisted. We have to
+      // install a filler map since the follwing allocation may cause a GC.
+      dominator->CreateFreeSpaceFiller(current_size);
+    }
+
+    // We can hoist the old space allocation over the actual dominator.
+    return dominator_dominator;
+  }
+  return dominator;
+}
+
+
 void HAllocate::UpdateFreeSpaceFiller(int32_t free_space_size) {
   DCHECK(filler_free_space_size_ != NULL);
   Zone* zone = block()->zone();
@@ -3924,7 +3987,8 @@ void HAllocate::ClearNextMapWord(int offset) {
 std::ostream& HAllocate::PrintDataTo(std::ostream& os) const {  // NOLINT
   os << NameOf(size()) << " (";
   if (IsNewSpaceAllocation()) os << "N";
-  if (IsOldSpaceAllocation()) os << "P";
+  if (IsOldPointerSpaceAllocation()) os << "P";
+  if (IsOldDataSpaceAllocation()) os << "D";
   if (MustAllocateDoubleAligned()) os << "A";
   if (MustPrefillWithFiller()) os << "F";
   return os << ")";
index 8219f3ed2200af69a9e1bae3e97c179300f60b4f..5cf575c9e5efb91b38daab91bcf2368d856b7069 100644 (file)
@@ -5509,8 +5509,12 @@ class HAllocate FINAL : public HTemplateInstruction<2> {
     return (flags_ & ALLOCATE_IN_NEW_SPACE) != 0;
   }
 
-  bool IsOldSpaceAllocation() const {
-    return (flags_ & ALLOCATE_IN_OLD_SPACE) != 0;
+  bool IsOldDataSpaceAllocation() const {
+    return (flags_ & ALLOCATE_IN_OLD_DATA_SPACE) != 0;
+  }
+
+  bool IsOldPointerSpaceAllocation() const {
+    return (flags_ & ALLOCATE_IN_OLD_POINTER_SPACE) != 0;
   }
 
   bool MustAllocateDoubleAligned() const {
@@ -5543,7 +5547,8 @@ class HAllocate FINAL : public HTemplateInstruction<2> {
  private:
   enum Flags {
     ALLOCATE_IN_NEW_SPACE = 1 << 0,
-    ALLOCATE_IN_OLD_SPACE = 1 << 2,
+    ALLOCATE_IN_OLD_DATA_SPACE = 1 << 1,
+    ALLOCATE_IN_OLD_POINTER_SPACE = 1 << 2,
     ALLOCATE_DOUBLE_ALIGNED = 1 << 3,
     PREFILL_WITH_FILLER = 1 << 4,
     CLEAR_NEXT_MAP_WORD = 1 << 5
@@ -5579,8 +5584,11 @@ class HAllocate FINAL : public HTemplateInstruction<2> {
 
   static Flags ComputeFlags(PretenureFlag pretenure_flag,
                             InstanceType instance_type) {
-    Flags flags = pretenure_flag == TENURED ? ALLOCATE_IN_OLD_SPACE
-                                            : ALLOCATE_IN_NEW_SPACE;
+    Flags flags = pretenure_flag == TENURED
+                      ? (Heap::TargetSpaceId(instance_type) == OLD_POINTER_SPACE
+                             ? ALLOCATE_IN_OLD_POINTER_SPACE
+                             : ALLOCATE_IN_OLD_DATA_SPACE)
+                      : ALLOCATE_IN_NEW_SPACE;
     if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
       flags = static_cast<Flags>(flags | ALLOCATE_DOUBLE_ALIGNED);
     }
@@ -5622,7 +5630,10 @@ class HAllocate FINAL : public HTemplateInstruction<2> {
 
   bool IsFoldable(HAllocate* allocate) {
     return (IsNewSpaceAllocation() && allocate->IsNewSpaceAllocation()) ||
-           (IsOldSpaceAllocation() && allocate->IsOldSpaceAllocation());
+           (IsOldDataSpaceAllocation() &&
+            allocate->IsOldDataSpaceAllocation()) ||
+           (IsOldPointerSpaceAllocation() &&
+            allocate->IsOldPointerSpaceAllocation());
   }
 
   void ClearNextMapWord(int offset);
index 0f11f9e717757dc9444c5e17fae426947dd74254..bc0859aef3d6afef9bc4e769596070040f55b3ca 100644 (file)
@@ -5220,9 +5220,13 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
   if (instr->hydrogen()->MustAllocateDoubleAligned()) {
     flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
   }
-  if (instr->hydrogen()->IsOldSpaceAllocation()) {
+  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
+    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = static_cast<AllocationFlags>(flags | PRETENURE);
+    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
+  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
+    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
   }
 
   if (instr->size()->IsConstantOperand()) {
@@ -5285,9 +5289,13 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
 
   int flags = AllocateDoubleAlignFlag::encode(
       instr->hydrogen()->MustAllocateDoubleAligned());
-  if (instr->hydrogen()->IsOldSpaceAllocation()) {
+  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
+    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
+    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
+    flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
+  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = AllocateTargetSpace::update(flags, OLD_SPACE);
+    flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
   } else {
     flags = AllocateTargetSpace::update(flags, NEW_SPACE);
   }
index 13d9841bdc139c348a1ec0d0a7749a8051a21a75..52e674f9dc035a991b0356ab72148c2a60447f56 100644 (file)
@@ -1324,11 +1324,12 @@ void MacroAssembler::Allocate(int object_size,
   // Align the next allocation. Storing the filler map without checking top is
   // safe in new-space because the limit of the heap is aligned there.
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
+    DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
     Label aligned;
     test(result, Immediate(kDoubleAlignmentMask));
     j(zero, &aligned, Label::kNear);
-    if ((flags & PRETENURE) != 0) {
+    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
       cmp(result, Operand::StaticVariable(allocation_limit));
       j(above_equal, gc_required);
     }
@@ -1400,11 +1401,12 @@ void MacroAssembler::Allocate(int header_size,
   // Align the next allocation. Storing the filler map without checking top is
   // safe in new-space because the limit of the heap is aligned there.
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
+    DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
     Label aligned;
     test(result, Immediate(kDoubleAlignmentMask));
     j(zero, &aligned, Label::kNear);
-    if ((flags & PRETENURE) != 0) {
+    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
       cmp(result, Operand::StaticVariable(allocation_limit));
       j(above_equal, gc_required);
     }
@@ -1474,11 +1476,12 @@ void MacroAssembler::Allocate(Register object_size,
   // Align the next allocation. Storing the filler map without checking top is
   // safe in new-space because the limit of the heap is aligned there.
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
+    DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
     Label aligned;
     test(result, Immediate(kDoubleAlignmentMask));
     j(zero, &aligned, Label::kNear);
-    if ((flags & PRETENURE) != 0) {
+    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
       cmp(result, Operand::StaticVariable(allocation_limit));
       j(above_equal, gc_required);
     }
index 6a598b1e021ccdf1b575b16ec9f6ef3ffcc9461e..8c135f2b6021c9b71e5dbf3eea44138fedb7c7a3 100644 (file)
@@ -601,7 +601,7 @@ class MacroAssembler: public Assembler {
   // ---------------------------------------------------------------------------
   // Allocation support
 
-  // Allocate an object in new space or old space. If the given space
+  // Allocate an object in new space or old pointer space. If the given space
   // is exhausted control continues at the gc_required label. The allocated
   // object is returned in result and end of the new object is returned in
   // result_end. The register scratch can be passed as no_reg in which case
index b59fd3b94d43fa54f2bfe6fe96dc9fc8356a3ad4..166ac428b57767f82f452a2870f93c8c34fac0a2 100644 (file)
@@ -27,8 +27,10 @@ enum AllocationFlags {
   SIZE_IN_WORDS = 1 << 2,
   // Align the allocation to a multiple of kDoubleSize
   DOUBLE_ALIGNMENT = 1 << 3,
-  // Directly allocate in old space
-  PRETENURE = 1 << 4,
+  // Directly allocate in old pointer space
+  PRETENURE_OLD_POINTER_SPACE = 1 << 4,
+  // Directly allocate in old data space
+  PRETENURE_OLD_DATA_SPACE = 1 << 5
 };
 
 
@@ -249,8 +251,11 @@ class AllocationUtils {
  public:
   static ExternalReference GetAllocationTopReference(
       Isolate* isolate, AllocationFlags flags) {
-    if ((flags & PRETENURE) != 0) {
-      return ExternalReference::old_space_allocation_top_address(isolate);
+    if ((flags & PRETENURE_OLD_POINTER_SPACE) != 0) {
+      return ExternalReference::old_pointer_space_allocation_top_address(
+          isolate);
+    } else if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+      return ExternalReference::old_data_space_allocation_top_address(isolate);
     }
     return ExternalReference::new_space_allocation_top_address(isolate);
   }
@@ -258,8 +263,12 @@ class AllocationUtils {
 
   static ExternalReference GetAllocationLimitReference(
       Isolate* isolate, AllocationFlags flags) {
-    if ((flags & PRETENURE) != 0) {
-      return ExternalReference::old_space_allocation_limit_address(isolate);
+    if ((flags & PRETENURE_OLD_POINTER_SPACE) != 0) {
+      return ExternalReference::old_pointer_space_allocation_limit_address(
+          isolate);
+    } else if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+      return ExternalReference::old_data_space_allocation_limit_address(
+          isolate);
     }
     return ExternalReference::new_space_allocation_limit_address(isolate);
   }
index 52ab0bdc687a1f817afe9faac186c459536b195b..5feb681c18f72f61568330050a015324498e6124 100644 (file)
@@ -5387,9 +5387,13 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
   if (instr->hydrogen()->MustAllocateDoubleAligned()) {
     flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
   }
-  if (instr->hydrogen()->IsOldSpaceAllocation()) {
+  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
+    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = static_cast<AllocationFlags>(flags | PRETENURE);
+    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
+  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
+    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
   }
   if (instr->size()->IsConstantOperand()) {
     int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
@@ -5451,9 +5455,13 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
 
   int flags = AllocateDoubleAlignFlag::encode(
       instr->hydrogen()->MustAllocateDoubleAligned());
-  if (instr->hydrogen()->IsOldSpaceAllocation()) {
+  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
+    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
+    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
+    flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
+  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = AllocateTargetSpace::update(flags, OLD_SPACE);
+    flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
   } else {
     flags = AllocateTargetSpace::update(flags, NEW_SPACE);
   }
index ef87a539d0b5fc19aabf091cbcff945bbb766f15..a6e394ca0e56616c55db248a04139411c484eeb4 100644 (file)
@@ -3346,11 +3346,12 @@ void MacroAssembler::Allocate(int object_size,
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
     // Align the next allocation. Storing the filler map without checking top is
     // safe in new-space because the limit of the heap is aligned there.
+    DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
     And(scratch2, result, Operand(kDoubleAlignmentMask));
     Label aligned;
     Branch(&aligned, eq, scratch2, Operand(zero_reg));
-    if ((flags & PRETENURE) != 0) {
+    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
       Branch(gc_required, Ugreater_equal, result, Operand(t9));
     }
     li(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
@@ -3432,11 +3433,12 @@ void MacroAssembler::Allocate(Register object_size,
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
     // Align the next allocation. Storing the filler map without checking top is
     // safe in new-space because the limit of the heap is aligned there.
+    DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
     And(scratch2, result, Operand(kDoubleAlignmentMask));
     Label aligned;
     Branch(&aligned, eq, scratch2, Operand(zero_reg));
-    if ((flags & PRETENURE) != 0) {
+    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
       Branch(gc_required, Ugreater_equal, result, Operand(t9));
     }
     li(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
index 5a0653538a8eff2f85bba8ed4a5da1622d4df0dd..44f6c4b58adad2ba60b07efa90f2b48af2757336 100644 (file)
@@ -488,7 +488,7 @@ class MacroAssembler: public Assembler {
   // ---------------------------------------------------------------------------
   // Allocation support.
 
-  // Allocate an object in new space or old space. The object_size is
+  // Allocate an object in new space or old pointer space. The object_size is
   // specified either in bytes or in words if the allocation flag SIZE_IN_WORDS
   // is passed. If the space is exhausted control continues at the gc_required
   // label. The allocated object is returned in result. If the flag
index f5eb8f25d096c7df9217a500560932226cf73521..94eab87ea100b21fca8d733003b2625c6fb81fa5 100644 (file)
@@ -5424,9 +5424,13 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
   if (instr->hydrogen()->MustAllocateDoubleAligned()) {
     flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
   }
-  if (instr->hydrogen()->IsOldSpaceAllocation()) {
+  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
+    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = static_cast<AllocationFlags>(flags | PRETENURE);
+    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
+  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
+    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
   }
   if (instr->size()->IsConstantOperand()) {
     int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
@@ -5489,9 +5493,13 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
 
   int flags = AllocateDoubleAlignFlag::encode(
       instr->hydrogen()->MustAllocateDoubleAligned());
-  if (instr->hydrogen()->IsOldSpaceAllocation()) {
+  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
+    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
+    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
+    flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
+  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = AllocateTargetSpace::update(flags, OLD_SPACE);
+    flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
   } else {
     flags = AllocateTargetSpace::update(flags, NEW_SPACE);
   }
index c001ac0a3d28b5247da18566e7abd96f6f57d146..3c1c2b0a50346e5071af8723cbdd54285385ce65 100644 (file)
@@ -509,7 +509,7 @@ class MacroAssembler: public Assembler {
   // ---------------------------------------------------------------------------
   // Allocation support.
 
-  // Allocate an object in new space or old space. The object_size is
+  // Allocate an object in new space or old pointer space. The object_size is
   // specified either in bytes or in words if the allocation flag SIZE_IN_WORDS
   // is passed. If the space is exhausted control continues at the gc_required
   // label. The allocated object is returned in result. If the flag
index 706c06496fef1313552ecea4387cf3970db95eea..cf6d1613e6545724162d07b7fa22e4ab759f58c6 100644 (file)
@@ -763,7 +763,7 @@ enum InstanceType {
   // Boundaries for testing for a fixed typed array.
   FIRST_FIXED_TYPED_ARRAY_TYPE = FIXED_INT8_ARRAY_TYPE,
   LAST_FIXED_TYPED_ARRAY_TYPE = FIXED_UINT8_CLAMPED_ARRAY_TYPE,
-  // Boundary for promotion to old space.
+  // Boundary for promotion to old data space/old pointer space.
   LAST_DATA_TYPE = FILLER_TYPE,
   // Boundary for objects represented as JSReceiver (i.e. JSObject or JSProxy).
   // Note that there is no range for JSObject or JSProxy, since their subtypes
@@ -10266,6 +10266,8 @@ class JSDataView: public JSArrayBufferView {
 
 
 // Foreign describes objects pointing from JavaScript to C structures.
+// Since they cannot contain references to JS HeapObjects they can be
+// placed in old_data_space.
 class Foreign: public HeapObject {
  public:
   // [address]: field containing the address.
index 3653fd2af677f1169c04b95132b3c1b168d55b35..9af13ae23525989c6194b0905028e8aa135d6656 100644 (file)
@@ -5687,9 +5687,13 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
   if (instr->hydrogen()->MustAllocateDoubleAligned()) {
     flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
   }
-  if (instr->hydrogen()->IsOldSpaceAllocation()) {
+  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
+    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = static_cast<AllocationFlags>(flags | PRETENURE);
+    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
+  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
+    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
   }
 
   if (instr->size()->IsConstantOperand()) {
@@ -5755,9 +5759,13 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
 
   int flags = AllocateDoubleAlignFlag::encode(
       instr->hydrogen()->MustAllocateDoubleAligned());
-  if (instr->hydrogen()->IsOldSpaceAllocation()) {
+  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
+    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
+    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
+    flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
+  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = AllocateTargetSpace::update(flags, OLD_SPACE);
+    flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
   } else {
     flags = AllocateTargetSpace::update(flags, NEW_SPACE);
   }
index 31b91654c4168ab23678fa93d2df9c57125fda29..54e40f767129e937b20a0423e65f1af4984eee39 100644 (file)
@@ -1384,7 +1384,7 @@ void MacroAssembler::Allocate(int object_size, Register result,
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
     // Align the next allocation. Storing the filler map without checking top is
     // safe in new-space because the limit of the heap is aligned there.
-    DCHECK((flags & PRETENURE_OLD_SPACE) == 0);
+    DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
 #if V8_TARGET_ARCH_PPC64
     STATIC_ASSERT(kPointerAlignment == kDoubleAlignment);
 #else
@@ -1392,7 +1392,7 @@ void MacroAssembler::Allocate(int object_size, Register result,
     andi(scratch2, result, Operand(kDoubleAlignmentMask));
     Label aligned;
     beq(&aligned, cr0);
-    if ((flags & PRETENURE) != 0) {
+    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
       cmpl(result, ip);
       bge(gc_required);
     }
@@ -1483,7 +1483,7 @@ void MacroAssembler::Allocate(Register object_size, Register result,
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
     // Align the next allocation. Storing the filler map without checking top is
     // safe in new-space because the limit of the heap is aligned there.
-    DCHECK((flags & PRETENURE_OLD_SPACE) == 0);
+    DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
 #if V8_TARGET_ARCH_PPC64
     STATIC_ASSERT(kPointerAlignment == kDoubleAlignment);
 #else
@@ -1491,7 +1491,7 @@ void MacroAssembler::Allocate(Register object_size, Register result,
     andi(scratch2, result, Operand(kDoubleAlignmentMask));
     Label aligned;
     beq(&aligned, cr0);
-    if ((flags & PRETENURE) != 0) {
+    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
       cmpl(result, ip);
       bge(gc_required);
     }
index 6fa19449238e7fce3971fea3ca74b479ac6c0236..fc569706ecb2ddf486da1dbff10556fdd2389b22 100644 (file)
@@ -603,7 +603,7 @@ class MacroAssembler : public Assembler {
   // ---------------------------------------------------------------------------
   // Allocation support
 
-  // Allocate an object in new space or old space. The object_size is
+  // Allocate an object in new space or old pointer space. The object_size is
   // specified either in bytes or in words if the allocation flag SIZE_IN_WORDS
   // is passed. If the space is exhausted control continues at the gc_required
   // label. The allocated object is returned in result. If the flag
index 1d33f4f0623e3a38f2f46247b06a6e64eb93f60b..848055e76a962d23575e8e995eb0e7c7e45bdbfe 100644 (file)
@@ -105,10 +105,18 @@ ExternalReferenceTable::ExternalReferenceTable(Isolate* isolate) {
   Add(ExternalReference::get_make_code_young_function(isolate).address(),
       "Code::MakeCodeYoung");
   Add(ExternalReference::cpu_features().address(), "cpu_features");
-  Add(ExternalReference::old_space_allocation_top_address(isolate).address(),
-      "Heap::OldSpaceAllocationTopAddress");
-  Add(ExternalReference::old_space_allocation_limit_address(isolate).address(),
-      "Heap::OldSpaceAllocationLimitAddress");
+  Add(ExternalReference::old_pointer_space_allocation_top_address(isolate)
+          .address(),
+      "Heap::OldPointerSpaceAllocationTopAddress");
+  Add(ExternalReference::old_pointer_space_allocation_limit_address(isolate)
+          .address(),
+      "Heap::OldPointerSpaceAllocationLimitAddress");
+  Add(ExternalReference::old_data_space_allocation_top_address(isolate)
+          .address(),
+      "Heap::OldDataSpaceAllocationTopAddress");
+  Add(ExternalReference::old_data_space_allocation_limit_address(isolate)
+          .address(),
+      "Heap::OldDataSpaceAllocationLimitAddress");
   Add(ExternalReference::allocation_sites_list_address(isolate).address(),
       "Heap::allocation_sites_list_address()");
   Add(ExternalReference::address_of_uint32_bias().address(), "uint32_bias");
@@ -834,7 +842,8 @@ void Deserializer::ReadData(Object** current, Object** limit, int source_space,
   // but that may change.
   bool write_barrier_needed =
       (current_object_address != NULL && source_space != NEW_SPACE &&
-       source_space != CELL_SPACE && source_space != CODE_SPACE);
+       source_space != CELL_SPACE && source_space != CODE_SPACE &&
+       source_space != OLD_DATA_SPACE);
   while (current < limit) {
     byte data = source_.Get();
     switch (data) {
@@ -937,14 +946,15 @@ void Deserializer::ReadData(Object** current, Object** limit, int source_space,
 // This generates a case and a body for the new space (which has to do extra
 // write barrier handling) and handles the other spaces with fall-through cases
 // and one body.
-#define ALL_SPACES(where, how, within)                    \
-  CASE_STATEMENT(where, how, within, NEW_SPACE)           \
-  CASE_BODY(where, how, within, NEW_SPACE)                \
-  CASE_STATEMENT(where, how, within, OLD_SPACE)           \
-  CASE_STATEMENT(where, how, within, CODE_SPACE)          \
-  CASE_STATEMENT(where, how, within, MAP_SPACE)           \
-  CASE_STATEMENT(where, how, within, CELL_SPACE)          \
-  CASE_STATEMENT(where, how, within, LO_SPACE)            \
+#define ALL_SPACES(where, how, within)                  \
+  CASE_STATEMENT(where, how, within, NEW_SPACE)         \
+  CASE_BODY(where, how, within, NEW_SPACE)              \
+  CASE_STATEMENT(where, how, within, OLD_DATA_SPACE)    \
+  CASE_STATEMENT(where, how, within, OLD_POINTER_SPACE) \
+  CASE_STATEMENT(where, how, within, CODE_SPACE)        \
+  CASE_STATEMENT(where, how, within, MAP_SPACE)         \
+  CASE_STATEMENT(where, how, within, CELL_SPACE)        \
+  CASE_STATEMENT(where, how, within, LO_SPACE)          \
   CASE_BODY(where, how, within, kAnyOldSpace)
 
 #define FOUR_CASES(byte_code)             \
@@ -1659,7 +1669,7 @@ void Serializer::ObjectSerializer::SerializeExternalString() {
 
   AllocationSpace space = (allocation_size > Page::kMaxRegularHeapObjectSize)
                               ? LO_SPACE
-                              : OLD_SPACE;
+                              : OLD_DATA_SPACE;
   SerializePrologue(space, allocation_size, map);
 
   // Output the rest of the imaginary string.
index d451861edcbd31c2dad96de95d2cbface57be550..98a76ad58e6f2c4440684e0679b3292e910f4437 100644 (file)
@@ -5380,9 +5380,13 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
   if (instr->hydrogen()->MustAllocateDoubleAligned()) {
     flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
   }
-  if (instr->hydrogen()->IsOldSpaceAllocation()) {
+  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
+    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = static_cast<AllocationFlags>(flags | PRETENURE);
+    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
+  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
+    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
   }
 
   if (instr->size()->IsConstantOperand()) {
@@ -5438,9 +5442,13 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
   }
 
   int flags = 0;
-  if (instr->hydrogen()->IsOldSpaceAllocation()) {
+  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
+    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
+    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
+    flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
+  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = AllocateTargetSpace::update(flags, OLD_SPACE);
+    flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
   } else {
     flags = AllocateTargetSpace::update(flags, NEW_SPACE);
   }
index 8b04144366413c50b111dc37a471da75df48ac86..293b064a448ed7b96cfc87a0999260d9b598139b 100644 (file)
@@ -4119,6 +4119,7 @@ void MacroAssembler::MakeSureDoubleAlignedHelper(Register result,
     // Align the next allocation. Storing the filler map without checking top
     // is safe in new-space because the limit of the heap is aligned there.
     DCHECK(kPointerSize * 2 == kDoubleSize);
+    DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
     // Make sure scratch is not clobbered by this function as it might be
     // used in UpdateAllocationTopHelper later.
@@ -4126,7 +4127,7 @@ void MacroAssembler::MakeSureDoubleAlignedHelper(Register result,
     Label aligned;
     testl(result, Immediate(kDoubleAlignmentMask));
     j(zero, &aligned, Label::kNear);
-    if ((flags & PRETENURE) != 0) {
+    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
       ExternalReference allocation_limit =
           AllocationUtils::GetAllocationLimitReference(isolate(), flags);
       cmpp(result, ExternalOperand(allocation_limit));
index 7e1c1c36c24d4b65cacec1a667efadc3217f554a..1eb7f6effefc7c32bbc6ae96ad6f25bb5e80c68a 100644 (file)
@@ -1137,7 +1137,7 @@ class MacroAssembler: public Assembler {
   // ---------------------------------------------------------------------------
   // Allocation support
 
-  // Allocate an object in new space or old space. If the given space
+  // Allocate an object in new space or old pointer space. If the given space
   // is exhausted control continues at the gc_required label. The allocated
   // object is returned in result and end of the new object is returned in
   // result_end. The register scratch can be passed as no_reg in which case
index 04b9e2abd3027e377724c9d86e53c5008281ee46..4467d2e6ce0591a384036ff35ead45731ea88bb4 100644 (file)
@@ -5855,9 +5855,13 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
   if (instr->hydrogen()->MustAllocateDoubleAligned()) {
     flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
   }
-  if (instr->hydrogen()->IsOldSpaceAllocation()) {
+  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
+    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = static_cast<AllocationFlags>(flags | PRETENURE);
+    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
+  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
+    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
   }
 
   if (instr->size()->IsConstantOperand()) {
@@ -5920,9 +5924,13 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
 
   int flags = AllocateDoubleAlignFlag::encode(
       instr->hydrogen()->MustAllocateDoubleAligned());
-  if (instr->hydrogen()->IsOldSpaceAllocation()) {
+  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
+    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
+    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
+    flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
+  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = AllocateTargetSpace::update(flags, OLD_SPACE);
+    flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
   } else {
     flags = AllocateTargetSpace::update(flags, NEW_SPACE);
   }
index 6b82f3d2f54bcfc5a1bb477c235c78e51bfaa08c..9e449f9139cd454e8102aae7c4c1f22834c7108f 100644 (file)
@@ -1298,11 +1298,12 @@ void MacroAssembler::Allocate(int object_size,
   // Align the next allocation. Storing the filler map without checking top is
   // safe in new-space because the limit of the heap is aligned there.
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
+    DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
     Label aligned;
     test(result, Immediate(kDoubleAlignmentMask));
     j(zero, &aligned, Label::kNear);
-    if ((flags & PRETENURE) != 0) {
+    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
       cmp(result, Operand::StaticVariable(allocation_limit));
       j(above_equal, gc_required);
     }
@@ -1374,11 +1375,12 @@ void MacroAssembler::Allocate(int header_size,
   // Align the next allocation. Storing the filler map without checking top is
   // safe in new-space because the limit of the heap is aligned there.
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
+    DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
     Label aligned;
     test(result, Immediate(kDoubleAlignmentMask));
     j(zero, &aligned, Label::kNear);
-    if ((flags & PRETENURE) != 0) {
+    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
       cmp(result, Operand::StaticVariable(allocation_limit));
       j(above_equal, gc_required);
     }
@@ -1448,11 +1450,12 @@ void MacroAssembler::Allocate(Register object_size,
   // Align the next allocation. Storing the filler map without checking top is
   // safe in new-space because the limit of the heap is aligned there.
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
+    DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
     Label aligned;
     test(result, Immediate(kDoubleAlignmentMask));
     j(zero, &aligned, Label::kNear);
-    if ((flags & PRETENURE) != 0) {
+    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
       cmp(result, Operand::StaticVariable(allocation_limit));
       j(above_equal, gc_required);
     }
index 741140555615b321977c9674aa5e71cdb3b4019f..061709be9c050efb6ff5578ad47b22bc4d58b56b 100644 (file)
@@ -566,7 +566,7 @@ class MacroAssembler: public Assembler {
   // ---------------------------------------------------------------------------
   // Allocation support
 
-  // Allocate an object in new space or old space. If the given space
+  // Allocate an object in new space or old pointer space. If the given space
   // is exhausted control continues at the gc_required label. The allocated
   // object is returned in result and end of the new object is returned in
   // result_end. The register scratch can be passed as no_reg in which case
index 66f69f8b574d68ab57147c2f739bc8cb64aef851..0ec99346445d3612f9369675db1ea00744c52985 100644 (file)
@@ -55,18 +55,18 @@ static AllocationResult AllocateAfterFailures() {
   heap->CopyJSObject(JSObject::cast(object)).ToObjectChecked();
 
   // Old data space.
-  SimulateFullSpace(heap->old_space());
+  SimulateFullSpace(heap->old_data_space());
   heap->AllocateByteArray(100, TENURED).ToObjectChecked();
 
   // Old pointer space.
-  SimulateFullSpace(heap->old_space());
+  SimulateFullSpace(heap->old_pointer_space());
   heap->AllocateFixedArray(10000, TENURED).ToObjectChecked();
 
   // Large object space.
   static const int kLargeObjectSpaceFillerLength = 3 * (Page::kPageSize / 10);
   static const int kLargeObjectSpaceFillerSize = FixedArray::SizeFor(
       kLargeObjectSpaceFillerLength);
-  DCHECK(kLargeObjectSpaceFillerSize > heap->old_space()->AreaSize());
+  DCHECK(kLargeObjectSpaceFillerSize > heap->old_pointer_space()->AreaSize());
   while (heap->OldGenerationSpaceAvailable() > kLargeObjectSpaceFillerSize) {
     heap->AllocateFixedArray(
         kLargeObjectSpaceFillerLength, TENURED).ToObjectChecked();
index e9d1c0423ebca912992592dfd96a5fd48912e786..dcb11be19d1c84a377b2381a7d3929aff4bbe3a1 100644 (file)
@@ -608,7 +608,7 @@ TEST(MakingExternalUnalignedOneByteString) {
       "slice('abcdefghijklmnopqrstuvwxyz');"));
 
   // Trigger GCs so that the newly allocated string moves to old gen.
-  SimulateFullSpace(CcTest::heap()->old_space());
+  SimulateFullSpace(CcTest::heap()->old_pointer_space());
   CcTest::heap()->CollectGarbage(i::NEW_SPACE);  // in survivor space now
   CcTest::heap()->CollectGarbage(i::NEW_SPACE);  // in old gen now
 
@@ -728,10 +728,11 @@ THREADED_TEST(ScavengeExternalString) {
     i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
     CcTest::heap()->CollectGarbage(i::NEW_SPACE);
     in_new_space = CcTest::heap()->InNewSpace(*istring);
-    CHECK(in_new_space || CcTest::heap()->old_space()->Contains(*istring));
+    CHECK(in_new_space || CcTest::heap()->old_data_space()->Contains(*istring));
     CHECK_EQ(0, dispose_count);
   }
-  CcTest::heap()->CollectGarbage(in_new_space ? i::NEW_SPACE : i::OLD_SPACE);
+  CcTest::heap()->CollectGarbage(in_new_space ? i::NEW_SPACE
+                                              : i::OLD_DATA_SPACE);
   CHECK_EQ(1, dispose_count);
 }
 
@@ -750,10 +751,11 @@ THREADED_TEST(ScavengeExternalOneByteString) {
     i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
     CcTest::heap()->CollectGarbage(i::NEW_SPACE);
     in_new_space = CcTest::heap()->InNewSpace(*istring);
-    CHECK(in_new_space || CcTest::heap()->old_space()->Contains(*istring));
+    CHECK(in_new_space || CcTest::heap()->old_data_space()->Contains(*istring));
     CHECK_EQ(0, dispose_count);
   }
-  CcTest::heap()->CollectGarbage(in_new_space ? i::NEW_SPACE : i::OLD_SPACE);
+  CcTest::heap()->CollectGarbage(in_new_space ? i::NEW_SPACE
+                                              : i::OLD_DATA_SPACE);
   CHECK_EQ(1, dispose_count);
 }
 
@@ -15916,7 +15918,8 @@ TEST(ExternalizeOldSpaceTwoByteCons) {
       CompileRun("'Romeo Montague ' + 'Juliet Capulet'")->ToString(isolate);
   CHECK(v8::Utils::OpenHandle(*cons)->IsConsString());
   CcTest::heap()->CollectAllAvailableGarbage();
-  CHECK(CcTest::heap()->old_space()->Contains(*v8::Utils::OpenHandle(*cons)));
+  CHECK(CcTest::heap()->old_pointer_space()->Contains(
+      *v8::Utils::OpenHandle(*cons)));
 
   TestResource* resource = new TestResource(
       AsciiToTwoByteString("Romeo Montague Juliet Capulet"));
@@ -15938,7 +15941,8 @@ TEST(ExternalizeOldSpaceOneByteCons) {
       CompileRun("'Romeo Montague ' + 'Juliet Capulet'")->ToString(isolate);
   CHECK(v8::Utils::OpenHandle(*cons)->IsConsString());
   CcTest::heap()->CollectAllAvailableGarbage();
-  CHECK(CcTest::heap()->old_space()->Contains(*v8::Utils::OpenHandle(*cons)));
+  CHECK(CcTest::heap()->old_pointer_space()->Contains(
+      *v8::Utils::OpenHandle(*cons)));
 
   TestOneByteResource* resource =
       new TestOneByteResource(i::StrDup("Romeo Montague Juliet Capulet"));
index 1f10a41a52530af3e744b7356b664348dec3465c..8b9b2cf73a6cb1d640871efcbdc51beeca40af83 100644 (file)
@@ -281,16 +281,16 @@ TEST(ConstantPoolCompacting) {
 
   // Start a second old-space page so that the heap pointer added to the
   // constant pool array ends up on the an evacuation candidate page.
-  Page* first_page = heap->old_space()->anchor()->next_page();
+  Page* first_page = heap->old_data_space()->anchor()->next_page();
   {
     HandleScope scope(isolate);
     int dummy_array_size = Page::kMaxRegularHeapObjectSize - 92 * KB;
     Handle<HeapObject> temp =
         factory->NewFixedDoubleArray(dummy_array_size / kDoubleSize, TENURED);
-    CHECK(heap->InOldSpace(temp->address()));
+    CHECK(heap->InOldDataSpace(temp->address()));
     Handle<HeapObject> heap_ptr =
         factory->NewHeapNumber(5.0, IMMUTABLE, TENURED);
-    CHECK(heap->InOldSpace(heap_ptr->address()));
+    CHECK(heap->InOldDataSpace(heap_ptr->address()));
     CHECK(!first_page->Contains(heap_ptr->address()));
     array->set(0, *heap_ptr);
     array->set(1, *heap_ptr);
index df197ff9457eca871eab7c30db77e9b4eca1a19c..14e5d69d43714bba76b9e66a6c96026590f60b88 100644 (file)
@@ -182,7 +182,7 @@ static void TestHashSetCausesGC(Handle<HashSet> table) {
   // Simulate a full heap so that generating an identity hash code
   // in subsequent calls will request GC.
   SimulateFullSpace(CcTest::heap()->new_space());
-  SimulateFullSpace(CcTest::heap()->old_space());
+  SimulateFullSpace(CcTest::heap()->old_pointer_space());
 
   // Calling Contains() should not cause GC ever.
   int gc_count = isolate->heap()->gc_count();
@@ -228,7 +228,7 @@ static void TestHashMapCausesGC(Handle<HashMap> table) {
   // Simulate a full heap so that generating an identity hash code
   // in subsequent calls will request GC.
   SimulateFullSpace(CcTest::heap()->new_space());
-  SimulateFullSpace(CcTest::heap()->old_space());
+  SimulateFullSpace(CcTest::heap()->old_pointer_space());
 
   // Calling Lookup() should not cause GC ever.
   CHECK(table->Lookup(key)->IsTheHole());
index ca3ec7b07ab416f3385702ed480b4f08add3de0f..cd012f6bfa91b942771008077c02715ef8535ea7 100644 (file)
@@ -441,7 +441,7 @@ TEST(WeakGlobalHandlesMark) {
   }
 
   // Make sure the objects are promoted.
-  heap->CollectGarbage(OLD_SPACE);
+  heap->CollectGarbage(OLD_POINTER_SPACE);
   heap->CollectGarbage(NEW_SPACE);
   CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
 
@@ -494,7 +494,7 @@ TEST(DeleteWeakGlobalHandle) {
   CHECK(!WeakPointerCleared);
 
   // Mark-compact treats weak reference properly.
-  heap->CollectGarbage(OLD_SPACE);
+  heap->CollectGarbage(OLD_POINTER_SPACE);
 
   CHECK(WeakPointerCleared);
 }
@@ -893,7 +893,7 @@ TEST(Iteration) {
   Handle<Object> objs[objs_count];
   int next_objs_index = 0;
 
-  // Allocate a JS array to OLD_SPACE and NEW_SPACE
+  // Allocate a JS array to OLD_POINTER_SPACE and NEW_SPACE
   objs[next_objs_index++] = factory->NewJSArray(10);
   objs[next_objs_index++] = factory->NewJSArray(10,
                                                 FAST_HOLEY_ELEMENTS,
@@ -1002,15 +1002,15 @@ TEST(Regression39128) {
 
   // Step 4: clone jsobject, but force always allocate first to create a clone
   // in old pointer space.
-  Address old_space_top = heap->old_space()->top();
+  Address old_pointer_space_top = heap->old_pointer_space()->top();
   AlwaysAllocateScope aa_scope(isolate);
   Object* clone_obj = heap->CopyJSObject(jsobject).ToObjectChecked();
   JSObject* clone = JSObject::cast(clone_obj);
-  if (clone->address() != old_space_top) {
+  if (clone->address() != old_pointer_space_top) {
     // Alas, got allocated from free list, we cannot do checks.
     return;
   }
-  CHECK(heap->old_space()->Contains(clone->address()));
+  CHECK(heap->old_pointer_space()->Contains(clone->address()));
 }
 
 
@@ -2138,7 +2138,7 @@ TEST(InstanceOfStubWriteBarrier) {
   }
 
   CcTest::heap()->incremental_marking()->set_should_hurry(true);
-  CcTest::heap()->CollectGarbage(OLD_SPACE);
+  CcTest::heap()->CollectGarbage(OLD_POINTER_SPACE);
 }
 
 
@@ -2189,7 +2189,7 @@ TEST(PrototypeTransitionClearing) {
 
   // Make sure next prototype is placed on an old-space evacuation candidate.
   Handle<JSObject> prototype;
-  PagedSpace* space = CcTest::heap()->old_space();
+  PagedSpace* space = CcTest::heap()->old_pointer_space();
   {
     AlwaysAllocateScope always_allocate(isolate);
     SimulateFullSpace(space);
@@ -2309,7 +2309,7 @@ TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
 TEST(IdleNotificationFinishMarking) {
   i::FLAG_allow_natives_syntax = true;
   CcTest::InitializeVM();
-  SimulateFullSpace(CcTest::heap()->old_space());
+  SimulateFullSpace(CcTest::heap()->old_pointer_space());
   IncrementalMarking* marking = CcTest::heap()->incremental_marking();
   marking->Abort();
   marking->Start();
@@ -2422,11 +2422,11 @@ TEST(OptimizedPretenuringAllocationFolding) {
 
   Handle<JSObject> o =
       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
-  CHECK(CcTest::heap()->InOldSpace(*o));
-  CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
-  CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
-  CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
-  CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
+  CHECK(CcTest::heap()->InOldPointerSpace(*o));
+  CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle));
+  CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle->elements()));
+  CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle));
+  CHECK(CcTest::heap()->InOldDataSpace(double_array_handle->elements()));
 }
 
 
@@ -2465,8 +2465,8 @@ TEST(OptimizedPretenuringObjectArrayLiterals) {
   Handle<JSObject> o =
       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
 
-  CHECK(CcTest::heap()->InOldSpace(o->elements()));
-  CHECK(CcTest::heap()->InOldSpace(*o));
+  CHECK(CcTest::heap()->InOldPointerSpace(o->elements()));
+  CHECK(CcTest::heap()->InOldPointerSpace(*o));
 }
 
 
@@ -2506,25 +2506,27 @@ TEST(OptimizedPretenuringMixedInObjectProperties) {
   Handle<JSObject> o =
       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
 
-  CHECK(CcTest::heap()->InOldSpace(*o));
+  CHECK(CcTest::heap()->InOldPointerSpace(*o));
   FieldIndex idx1 = FieldIndex::ForPropertyIndex(o->map(), 0);
   FieldIndex idx2 = FieldIndex::ForPropertyIndex(o->map(), 1);
-  CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx1)));
+  CHECK(CcTest::heap()->InOldPointerSpace(o->RawFastPropertyAt(idx1)));
   if (!o->IsUnboxedDoubleField(idx2)) {
-    CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx2)));
+    CHECK(CcTest::heap()->InOldDataSpace(o->RawFastPropertyAt(idx2)));
   } else {
     CHECK_EQ(1.1, o->RawFastDoublePropertyAt(idx2));
   }
 
   JSObject* inner_object =
       reinterpret_cast<JSObject*>(o->RawFastPropertyAt(idx1));
-  CHECK(CcTest::heap()->InOldSpace(inner_object));
+  CHECK(CcTest::heap()->InOldPointerSpace(inner_object));
   if (!inner_object->IsUnboxedDoubleField(idx1)) {
-    CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx1)));
+    CHECK(
+        CcTest::heap()->InOldDataSpace(inner_object->RawFastPropertyAt(idx1)));
   } else {
     CHECK_EQ(2.2, inner_object->RawFastDoublePropertyAt(idx1));
   }
-  CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx2)));
+  CHECK(
+      CcTest::heap()->InOldPointerSpace(inner_object->RawFastPropertyAt(idx2)));
 }
 
 
@@ -2563,8 +2565,8 @@ TEST(OptimizedPretenuringDoubleArrayProperties) {
   Handle<JSObject> o =
       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
 
-  CHECK(CcTest::heap()->InOldSpace(*o));
-  CHECK(CcTest::heap()->InOldSpace(o->properties()));
+  CHECK(CcTest::heap()->InOldPointerSpace(*o));
+  CHECK(CcTest::heap()->InOldDataSpace(o->properties()));
 }
 
 
@@ -2603,8 +2605,8 @@ TEST(OptimizedPretenuringdoubleArrayLiterals) {
   Handle<JSObject> o =
       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
 
-  CHECK(CcTest::heap()->InOldSpace(o->elements()));
-  CHECK(CcTest::heap()->InOldSpace(*o));
+  CHECK(CcTest::heap()->InOldDataSpace(o->elements()));
+  CHECK(CcTest::heap()->InOldPointerSpace(*o));
 }
 
 
@@ -2648,11 +2650,11 @@ TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
 
   Handle<JSObject> o =
       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
-  CHECK(CcTest::heap()->InOldSpace(*o));
-  CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
-  CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
-  CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
-  CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
+  CHECK(CcTest::heap()->InOldPointerSpace(*o));
+  CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle));
+  CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle->elements()));
+  CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle));
+  CHECK(CcTest::heap()->InOldDataSpace(double_array_handle->elements()));
 }
 
 
@@ -2697,11 +2699,11 @@ TEST(OptimizedPretenuringNestedObjectLiterals) {
 
   Handle<JSObject> o =
       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
-  CHECK(CcTest::heap()->InOldSpace(*o));
-  CHECK(CcTest::heap()->InOldSpace(*int_array_handle_1));
-  CHECK(CcTest::heap()->InOldSpace(int_array_handle_1->elements()));
-  CHECK(CcTest::heap()->InOldSpace(*int_array_handle_2));
-  CHECK(CcTest::heap()->InOldSpace(int_array_handle_2->elements()));
+  CHECK(CcTest::heap()->InOldPointerSpace(*o));
+  CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle_1));
+  CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle_1->elements()));
+  CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle_2));
+  CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle_2->elements()));
 }
 
 
@@ -2748,11 +2750,11 @@ TEST(OptimizedPretenuringNestedDoubleLiterals) {
 
   Handle<JSObject> o =
       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
-  CHECK(CcTest::heap()->InOldSpace(*o));
-  CHECK(CcTest::heap()->InOldSpace(*double_array_handle_1));
-  CHECK(CcTest::heap()->InOldSpace(double_array_handle_1->elements()));
-  CHECK(CcTest::heap()->InOldSpace(*double_array_handle_2));
-  CHECK(CcTest::heap()->InOldSpace(double_array_handle_2->elements()));
+  CHECK(CcTest::heap()->InOldPointerSpace(*o));
+  CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle_1));
+  CHECK(CcTest::heap()->InOldDataSpace(double_array_handle_1->elements()));
+  CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle_2));
+  CHECK(CcTest::heap()->InOldDataSpace(double_array_handle_2->elements()));
 }
 
 
@@ -2805,7 +2807,7 @@ TEST(OptimizedPretenuringConstructorCalls) {
   Handle<JSObject> o =
       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
 
-  CHECK(CcTest::heap()->InOldSpace(*o));
+  CHECK(CcTest::heap()->InOldPointerSpace(*o));
 }
 
 
@@ -2852,7 +2854,7 @@ TEST(OptimizedPretenuringCallNew) {
 
   Handle<JSObject> o =
       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
-  CHECK(CcTest::heap()->InOldSpace(*o));
+  CHECK(CcTest::heap()->InOldPointerSpace(*o));
 }
 
 
@@ -3037,7 +3039,7 @@ TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
 
   root = GetByName("root");
   AddPropertyTo(0, root, "prop9");
-  CcTest::i_isolate()->heap()->CollectGarbage(OLD_SPACE);
+  CcTest::i_isolate()->heap()->CollectGarbage(OLD_POINTER_SPACE);
 
   // Count number of live transitions after marking.  Note that one transition
   // is left, because 'o' still holds an instance of one transition target.
@@ -3175,27 +3177,27 @@ TEST(ReleaseOverReservedPages) {
   static const int number_of_test_pages = 20;
 
   // Prepare many pages with low live-bytes count.
-  PagedSpace* old_space = heap->old_space();
-  CHECK_EQ(1, old_space->CountTotalPages());
+  PagedSpace* old_pointer_space = heap->old_pointer_space();
+  CHECK_EQ(1, old_pointer_space->CountTotalPages());
   for (int i = 0; i < number_of_test_pages; i++) {
     AlwaysAllocateScope always_allocate(isolate);
-    SimulateFullSpace(old_space);
+    SimulateFullSpace(old_pointer_space);
     factory->NewFixedArray(1, TENURED);
   }
-  CHECK_EQ(number_of_test_pages + 1, old_space->CountTotalPages());
+  CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
 
   // Triggering one GC will cause a lot of garbage to be discovered but
   // even spread across all allocated pages.
   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask,
                           "triggered for preparation");
-  CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
+  CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
 
   // Triggering subsequent GCs should cause at least half of the pages
   // to be released to the OS after at most two cycles.
   heap->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 1");
-  CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
+  CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
   heap->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 2");
-  CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages() * 2);
+  CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages() * 2);
 
   // Triggering a last-resort GC should cause all pages to be released to the
   // OS so that other processes can seize the memory.  If we get a failure here
@@ -3205,7 +3207,7 @@ TEST(ReleaseOverReservedPages) {
   // boots, but if the 20 small arrays don't fit on the first page then that's
   // an indication that it is too small.
   heap->CollectAllAvailableGarbage("triggered really hard");
-  CHECK_EQ(1, old_space->CountTotalPages());
+  CHECK_EQ(1, old_pointer_space->CountTotalPages());
 }
 
 
@@ -4845,8 +4847,8 @@ TEST(ArrayShiftSweeping) {
 
   Handle<JSObject> o =
       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
-  CHECK(heap->InOldSpace(o->elements()));
-  CHECK(heap->InOldSpace(*o));
+  CHECK(heap->InOldPointerSpace(o->elements()));
+  CHECK(heap->InOldPointerSpace(*o));
   Page* page = Page::FromAddress(o->elements()->address());
   CHECK(page->parallel_sweeping() <= MemoryChunk::SWEEPING_FINALIZE ||
         Marking::IsBlack(Marking::MarkBitFrom(o->elements())));
@@ -4959,7 +4961,7 @@ TEST(Regress388880) {
   // Allocate fixed array in old pointer space so, that object allocated
   // afterwards would end at the end of the page.
   {
-    SimulateFullSpace(heap->old_space());
+    SimulateFullSpace(heap->old_pointer_space());
     int padding_size = desired_offset - Page::kObjectStartOffset;
     int padding_array_length =
         (padding_size - FixedArray::kHeaderSize) / kPointerSize;
@@ -5028,7 +5030,7 @@ TEST(Regress3631) {
       "  weak_map.set(future_keys[i], i);"
       "}");
   heap->incremental_marking()->set_should_hurry(true);
-  heap->CollectGarbage(OLD_SPACE);
+  heap->CollectGarbage(OLD_POINTER_SPACE);
 }
 
 
@@ -5045,7 +5047,7 @@ TEST(Regress442710) {
   Handle<String> name = factory->InternalizeUtf8String("testArray");
   JSReceiver::SetProperty(global, name, array, SLOPPY).Check();
   CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();");
-  heap->CollectGarbage(OLD_SPACE);
+  heap->CollectGarbage(OLD_POINTER_SPACE);
 }
 
 
@@ -5078,10 +5080,10 @@ void CheckMapRetainingFor(int n) {
   }
   CHECK(!weak_cell->cleared());
   for (int i = 0; i < n; i++) {
-    heap->CollectGarbage(OLD_SPACE);
+    heap->CollectGarbage(OLD_POINTER_SPACE);
   }
   CHECK(!weak_cell->cleared());
-  heap->CollectGarbage(OLD_SPACE);
+  heap->CollectGarbage(OLD_POINTER_SPACE);
   CHECK(weak_cell->cleared());
 }
 
index 1674afc2082b67a96e12ee5540fa5792f516e4a3..5006c103d7a63e04951a01999f8652f3b61518df 100644 (file)
@@ -97,7 +97,7 @@ TEST(Promotion) {
   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
 
   // Array now sits in the old space
-  CHECK(heap->InSpace(*array, OLD_SPACE));
+  CHECK(heap->InSpace(*array, OLD_POINTER_SPACE));
 }
 
 
@@ -119,10 +119,10 @@ TEST(NoPromotion) {
   CHECK(heap->InSpace(*array, NEW_SPACE));
 
   // Simulate a full old space to make promotion fail.
-  SimulateFullSpace(heap->old_space());
+  SimulateFullSpace(heap->old_pointer_space());
 
   // Call mark compact GC, and it should pass.
-  heap->CollectGarbage(OLD_SPACE);
+  heap->CollectGarbage(OLD_POINTER_SPACE);
 }
 
 
@@ -138,7 +138,7 @@ TEST(MarkCompactCollector) {
   Handle<GlobalObject> global(isolate->context()->global_object());
 
   // call mark-compact when heap is empty
-  heap->CollectGarbage(OLD_SPACE, "trigger 1");
+  heap->CollectGarbage(OLD_POINTER_SPACE, "trigger 1");
 
   // keep allocating garbage in new space until it fails
   const int arraysize = 100;
@@ -165,7 +165,7 @@ TEST(MarkCompactCollector) {
     factory->NewJSObject(function);
   }
 
-  heap->CollectGarbage(OLD_SPACE, "trigger 4");
+  heap->CollectGarbage(OLD_POINTER_SPACE, "trigger 4");
 
   { HandleScope scope(isolate);
     Handle<String> func_name = factory->InternalizeUtf8String("theFunction");
@@ -183,7 +183,7 @@ TEST(MarkCompactCollector) {
     JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
   }
 
-  heap->CollectGarbage(OLD_SPACE, "trigger 5");
+  heap->CollectGarbage(OLD_POINTER_SPACE, "trigger 5");
 
   { HandleScope scope(isolate);
     Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
@@ -308,7 +308,7 @@ TEST(ObjectGroups) {
                                  g2c1.location());
   }
   // Do a full GC
-  heap->CollectGarbage(OLD_SPACE);
+  heap->CollectGarbage(OLD_POINTER_SPACE);
 
   // All object should be alive.
   CHECK_EQ(0, NumberOfWeakCalls);
@@ -335,7 +335,7 @@ TEST(ObjectGroups) {
                                  g2c1.location());
   }
 
-  heap->CollectGarbage(OLD_SPACE);
+  heap->CollectGarbage(OLD_POINTER_SPACE);
 
   // All objects should be gone. 5 global handles in total.
   CHECK_EQ(5, NumberOfWeakCalls);
@@ -348,7 +348,7 @@ TEST(ObjectGroups) {
                           reinterpret_cast<void*>(&g2c1_and_id),
                           &WeakPointerCallback);
 
-  heap->CollectGarbage(OLD_SPACE);
+  heap->CollectGarbage(OLD_POINTER_SPACE);
   CHECK_EQ(7, NumberOfWeakCalls);
 }
 
index 9fadddf4c27a199564430b786307cc4f4a3aee77..f86ef5ec8117c166637c99380e80eb158cd8544a 100644 (file)
@@ -1074,13 +1074,13 @@ TEST(SerializeToplevelThreeBigStrings) {
   Heap* heap = isolate->heap();
   CHECK(heap->InSpace(
       *v8::Utils::OpenHandle(*CompileRun("a")->ToString(CcTest::isolate())),
-      OLD_SPACE));
+      OLD_DATA_SPACE));
   CHECK(heap->InSpace(
       *v8::Utils::OpenHandle(*CompileRun("b")->ToString(CcTest::isolate())),
-      OLD_SPACE));
+      OLD_DATA_SPACE));
   CHECK(heap->InSpace(
       *v8::Utils::OpenHandle(*CompileRun("c")->ToString(CcTest::isolate())),
-      OLD_SPACE));
+      OLD_DATA_SPACE));
 
   delete cache;
   source_a.Dispose();
index b4bc7ad06945cc432fb6c62559bf94389d23b764..52a93176029217e4aa67d6b68778ff8ea010054a 100644 (file)
@@ -308,7 +308,8 @@ TEST(MemoryAllocator) {
                                 heap->MaxExecutableSize()));
 
   int total_pages = 0;
-  OldSpace faked_space(heap, heap->MaxReserved(), OLD_SPACE, NOT_EXECUTABLE);
+  OldSpace faked_space(heap, heap->MaxReserved(), OLD_POINTER_SPACE,
+                       NOT_EXECUTABLE);
   Page* first_page = memory_allocator->AllocatePage(
       faked_space.AreaSize(), &faked_space, NOT_EXECUTABLE);
 
@@ -377,8 +378,8 @@ TEST(OldSpace) {
                                 heap->MaxExecutableSize()));
   TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
 
-  OldSpace* s = new OldSpace(heap, heap->MaxOldGenerationSize(), OLD_SPACE,
-                             NOT_EXECUTABLE);
+  OldSpace* s = new OldSpace(heap, heap->MaxOldGenerationSize(),
+                             OLD_POINTER_SPACE, NOT_EXECUTABLE);
   CHECK(s != NULL);
 
   CHECK(s->SetUp());
index 32284eaf01ace485d548dbdf62f4d37dda8d9429..fdcac3af355067c2d1e5945ef39e83cdec439692 100644 (file)
@@ -961,7 +961,7 @@ TEST(DoScavenge) {
   // a pointer to a from semi-space.
   CcTest::heap()->CollectGarbage(i::NEW_SPACE);  // in old gen now
 
-  CHECK(isolate->heap()->old_space()->Contains(*obj));
+  CHECK(isolate->heap()->old_pointer_space()->Contains(*obj));
 
   CHECK_EQ(boom_value, GetDoubleFieldValue(*obj, field_index));
 }
@@ -1195,7 +1195,7 @@ TEST(StoreBufferScanOnScavenge) {
   CcTest::heap()->CollectGarbage(i::NEW_SPACE);  // in survivor space now
   CcTest::heap()->CollectGarbage(i::NEW_SPACE);  // in old gen now
 
-  CHECK(isolate->heap()->old_space()->Contains(*obj));
+  CHECK(isolate->heap()->old_pointer_space()->Contains(*obj));
 
   // Create temp object in the new space.
   Handle<JSArray> temp = factory->NewJSArray(FAST_ELEMENTS, NOT_TENURED);
@@ -1297,7 +1297,7 @@ TEST(WriteBarriersInCopyJSObject) {
   AlwaysAllocateScope aa_scope(isolate);
   Object* clone_obj = heap->CopyJSObject(jsobject).ToObjectChecked();
   Handle<JSObject> clone(JSObject::cast(clone_obj));
-  CHECK(heap->old_space()->Contains(clone->address()));
+  CHECK(heap->old_pointer_space()->Contains(clone->address()));
 
   CcTest::heap()->CollectGarbage(NEW_SPACE, "boom");
 
index f2044720a5769a03b41580be2db68f733c65f28b..2f947d76a5e24b79f52c7b78c422a7451cb22a4f 100644 (file)
@@ -183,7 +183,7 @@ TEST(Regress2060a) {
   Handle<JSWeakMap> weakmap = AllocateJSWeakMap(isolate);
 
   // Start second old-space page so that values land on evacuation candidate.
-  Page* first_page = heap->old_space()->anchor()->next_page();
+  Page* first_page = heap->old_pointer_space()->anchor()->next_page();
   int dummy_array_size = Page::kMaxRegularHeapObjectSize - 92 * KB;
   factory->NewFixedArray(dummy_array_size / kPointerSize, TENURED);
 
@@ -222,7 +222,7 @@ TEST(Regress2060b) {
       factory->function_string());
 
   // Start second old-space page so that keys land on evacuation candidate.
-  Page* first_page = heap->old_space()->anchor()->next_page();
+  Page* first_page = heap->old_pointer_space()->anchor()->next_page();
   int dummy_array_size = Page::kMaxRegularHeapObjectSize - 92 * KB;
   factory->NewFixedArray(dummy_array_size / kPointerSize, TENURED);
 
index 77afd0070691983f18b5bbac9e8e96d893eeab99..a01d3152779639811b1fe0c7e03174b115933e4f 100644 (file)
@@ -183,7 +183,7 @@ TEST(WeakSet_Regress2060a) {
   Handle<JSWeakSet> weakset = AllocateJSWeakSet(isolate);
 
   // Start second old-space page so that values land on evacuation candidate.
-  Page* first_page = heap->old_space()->anchor()->next_page();
+  Page* first_page = heap->old_pointer_space()->anchor()->next_page();
   int dummy_array_size = Page::kMaxRegularHeapObjectSize - 92 * KB;
   factory->NewFixedArray(dummy_array_size / kPointerSize, TENURED);
 
@@ -222,7 +222,7 @@ TEST(WeakSet_Regress2060b) {
       factory->function_string());
 
   // Start second old-space page so that keys land on evacuation candidate.
-  Page* first_page = heap->old_space()->anchor()->next_page();
+  Page* first_page = heap->old_pointer_space()->anchor()->next_page();
   int dummy_array_size = Page::kMaxRegularHeapObjectSize - 92 * KB;
   factory->NewFixedArray(dummy_array_size / kPointerSize, TENURED);
 
index 44f294d03db3f6084ca44e946fb1855a7a9e4e84..8986a91b5c318302c0f8ee8601d8919df675d9d6 100755 (executable)
@@ -1621,7 +1621,7 @@ class KnownMap(HeapObject):
 
 COMMENT_RE = re.compile(r"^C (0x[0-9a-fA-F]+) (.*)$")
 PAGEADDRESS_RE = re.compile(
-    r"^P (mappage|oldpage) (0x[0-9a-fA-F]+)$")
+    r"^P (mappage|pointerpage|datapage) (0x[0-9a-fA-F]+)$")
 
 
 class InspectionInfo(object):
@@ -1698,7 +1698,8 @@ class InspectionPadawan(object):
     self.reader = reader
     self.heap = heap
     self.known_first_map_page = 0
-    self.known_first_old_page = 0
+    self.known_first_data_page = 0
+    self.known_first_pointer_page = 0
 
   def __getattr__(self, name):
     """An InspectionPadawan can be used instead of V8Heap, even though
@@ -1714,11 +1715,13 @@ class InspectionPadawan(object):
 
   def IsInKnownOldSpace(self, tagged_address):
     page_address = tagged_address & ~self.heap.PageAlignmentMask()
-    return page_address == self.known_first_old_page
+    return page_address in [self.known_first_data_page,
+                            self.known_first_pointer_page]
 
   def ContainingKnownOldSpaceName(self, tagged_address):
     page_address = tagged_address & ~self.heap.PageAlignmentMask()
-    if page_address == self.known_first_old_page: return "OLD_SPACE"
+    if page_address == self.known_first_data_page: return "OLD_DATA_SPACE"
+    if page_address == self.known_first_pointer_page: return "OLD_POINTER_SPACE"
     return None
 
   def SenseObject(self, tagged_address):
@@ -1775,9 +1778,11 @@ class InspectionPadawan(object):
 
   def PrintKnowledge(self):
     print "  known_first_map_page = %s\n"\
-          "  known_first_old_page = %s" % (
+          "  known_first_data_page = %s\n"\
+          "  known_first_pointer_page = %s" % (
           self.reader.FormatIntPtr(self.known_first_map_page),
-          self.reader.FormatIntPtr(self.known_first_old_page))
+          self.reader.FormatIntPtr(self.known_first_data_page),
+          self.reader.FormatIntPtr(self.known_first_pointer_page))
 
 WEB_HEADER = """
 <!DOCTYPE html>
@@ -2119,10 +2124,12 @@ class InspectionWebFormatter(object):
 
     self.padawan = InspectionPadawan(self.reader, self.heap)
     self.comments = InspectionInfo(minidump_name, self.reader)
-    self.padawan.known_first_old_page = (
-        self.comments.get_page_address("oldpage"))
+    self.padawan.known_first_data_page = (
+        self.comments.get_page_address("datapage"))
     self.padawan.known_first_map_page = (
         self.comments.get_page_address("mappage"))
+    self.padawan.known_first_pointer_page = (
+        self.comments.get_page_address("pointerpage"))
 
   def set_comment(self, straddress, comment):
     try:
@@ -2134,10 +2141,12 @@ class InspectionWebFormatter(object):
   def set_page_address(self, kind, straddress):
     try:
       address = int(straddress, 0)
-      if kind == "oldpage":
-        self.padawan.known_first_old_page = address
+      if kind == "datapage":
+        self.padawan.known_first_data_page = address
       elif kind == "mappage":
         self.padawan.known_first_map_page = address
+      elif kind == "pointerpage":
+        self.padawan.known_first_pointer_page = address
       self.comments.save_page_address(kind, address)
     except ValueError:
       print "Invalid address"
@@ -2608,10 +2617,13 @@ class InspectionWebFormatter(object):
       page_address = address & ~self.heap.PageAlignmentMask()
 
       f.write("Page info: \n")
-      self.output_page_info(f, "old", self.padawan.known_first_old_page, \
+      self.output_page_info(f, "data", self.padawan.known_first_data_page, \
                             page_address)
       self.output_page_info(f, "map", self.padawan.known_first_map_page, \
                             page_address)
+      self.output_page_info(f, "pointer", \
+                            self.padawan.known_first_pointer_page, \
+                            page_address)
 
       if not self.reader.IsValidAddress(address):
         f.write("<h3>The contents at address %s not found in the dump.</h3>" % \
@@ -2913,14 +2925,14 @@ class InspectionShell(cmd.Cmd):
     """
     self.padawan.PrintKnowledge()
 
-  def do_ko(self, address):
+  def do_kd(self, address):
     """
      Teach V8 heap layout information to the inspector. Set the first
-     old space page by passing any pointer into that page.
+     data-space page by passing any pointer into that page.
     """
     address = int(address, 16)
     page_address = address & ~self.heap.PageAlignmentMask()
-    self.padawan.known_first_old_page = page_address
+    self.padawan.known_first_data_page = page_address
 
   def do_km(self, address):
     """
@@ -2931,6 +2943,15 @@ class InspectionShell(cmd.Cmd):
     page_address = address & ~self.heap.PageAlignmentMask()
     self.padawan.known_first_map_page = page_address
 
+  def do_kp(self, address):
+    """
+     Teach V8 heap layout information to the inspector. Set the first
+     pointer-space page by passing any pointer into that page.
+    """
+    address = int(address, 16)
+    page_address = address & ~self.heap.PageAlignmentMask()
+    self.padawan.known_first_pointer_page = page_address
+
   def do_list(self, smth):
     """
      List all available memory regions.
index 581e1914e2e345497c9a4a2a47f319a10e7ae07b..60e0685377b692586417b82f804820011a93dcc6 100644 (file)
@@ -165,23 +165,25 @@ void DumpHeapStats(const char *minidump_file) {
 
   const int new_space_size = READ_FIELD(1);
   const int new_space_capacity = READ_FIELD(2);
-  const int old_space_size = READ_FIELD(3);
-  const int old_space_capacity = READ_FIELD(4);
-  const int code_space_size = READ_FIELD(5);
-  const int code_space_capacity = READ_FIELD(6);
-  const int map_space_size = READ_FIELD(7);
-  const int map_space_capacity = READ_FIELD(8);
-  const int cell_space_size = READ_FIELD(9);
-  const int cell_space_capacity = READ_FIELD(10);
-  const int lo_space_size = READ_FIELD(11);
-  const int global_handle_count = READ_FIELD(12);
-  const int weak_global_handle_count = READ_FIELD(13);
-  const int pending_global_handle_count = READ_FIELD(14);
-  const int near_death_global_handle_count = READ_FIELD(15);
-  const int destroyed_global_handle_count = READ_FIELD(16);
-  const int memory_allocator_size = READ_FIELD(17);
-  const int memory_allocator_capacity = READ_FIELD(18);
-  const int os_error = READ_FIELD(19);
+  const int old_pointer_space_size = READ_FIELD(3);
+  const int old_pointer_space_capacity = READ_FIELD(4);
+  const int old_data_space_size = READ_FIELD(5);
+  const int old_data_space_capacity = READ_FIELD(6);
+  const int code_space_size = READ_FIELD(7);
+  const int code_space_capacity = READ_FIELD(8);
+  const int map_space_size = READ_FIELD(9);
+  const int map_space_capacity = READ_FIELD(10);
+  const int cell_space_size = READ_FIELD(11);
+  const int cell_space_capacity = READ_FIELD(12);
+  const int lo_space_size = READ_FIELD(13);
+  const int global_handle_count = READ_FIELD(14);
+  const int weak_global_handle_count = READ_FIELD(15);
+  const int pending_global_handle_count = READ_FIELD(16);
+  const int near_death_global_handle_count = READ_FIELD(17);
+  const int destroyed_global_handle_count = READ_FIELD(18);
+  const int memory_allocator_size = READ_FIELD(19);
+  const int memory_allocator_capacity = READ_FIELD(20);
+  const int os_error = READ_FIELD(23);
 #undef READ_FIELD
 
   int objects_per_type[v8::internal::LAST_TYPE + 1] = {0};
@@ -223,8 +225,10 @@ void DumpHeapStats(const char *minidump_file) {
     printf("\t%-25s\t% 10.3f MB\n", #stat ":", toM(stat));
   PRINT_MB_STAT(new_space_size);
   PRINT_MB_STAT(new_space_capacity);
-  PRINT_MB_STAT(old_space_size);
-  PRINT_MB_STAT(old_space_capacity);
+  PRINT_MB_STAT(old_pointer_space_size);
+  PRINT_MB_STAT(old_pointer_space_capacity);
+  PRINT_MB_STAT(old_data_space_size);
+  PRINT_MB_STAT(old_data_space_capacity);
   PRINT_MB_STAT(code_space_size);
   PRINT_MB_STAT(code_space_capacity);
   PRINT_MB_STAT(map_space_size);
index 0527cc8e5801bdbe108e24ecfc8bd06c2e56e5d8..7a693937055dc2cc2d209079d2ddd391895eacd5 100644 (file)
@@ -135,160 +135,159 @@ INSTANCE_TYPES = {
 # List of known V8 maps.
 KNOWN_MAPS = {
   0x08081: (137, "ByteArrayMap"),
-  0x080ad: (129, "MetaMap"),
-  0x080d9: (131, "NullMap"),
-  0x08105: (180, "FixedArrayMap"),
-  0x08131: (4, "OneByteInternalizedStringMap"),
-  0x0815d: (183, "WeakCellMap"),
-  0x08189: (131, "UndefinedMap"),
-  0x081b5: (134, "HeapNumberMap"),
-  0x081e1: (138, "FreeSpaceMap"),
-  0x0820d: (158, "OnePointerFillerMap"),
-  0x08239: (158, "TwoPointerFillerMap"),
-  0x08265: (131, "TheHoleMap"),
-  0x08291: (131, "BooleanMap"),
-  0x082bd: (131, "UninitializedMap"),
-  0x082e9: (131, "ExceptionMap"),
-  0x08315: (132, "CellMap"),
-  0x08341: (133, "GlobalPropertyCellMap"),
-  0x0836d: (182, "SharedFunctionInfoMap"),
-  0x08399: (135, "MutableHeapNumberMap"),
-  0x083c5: (180, "NativeContextMap"),
-  0x083f1: (130, "CodeMap"),
-  0x0841d: (180, "ScopeInfoMap"),
-  0x08449: (180, "FixedCOWArrayMap"),
-  0x08475: (157, "FixedDoubleArrayMap"),
-  0x084a1: (181, "ConstantPoolArrayMap"),
-  0x084cd: (131, "NoInterceptorResultSentinelMap"),
-  0x084f9: (180, "HashTableMap"),
-  0x08525: (180, "OrderedHashTableMap"),
-  0x08551: (131, "ArgumentsMarkerMap"),
-  0x0857d: (131, "TerminationExceptionMap"),
-  0x085a9: (128, "SymbolMap"),
-  0x085d5: (64, "StringMap"),
-  0x08601: (68, "OneByteStringMap"),
-  0x0862d: (65, "ConsStringMap"),
-  0x08659: (69, "ConsOneByteStringMap"),
-  0x08685: (67, "SlicedStringMap"),
-  0x086b1: (71, "SlicedOneByteStringMap"),
-  0x086dd: (66, "ExternalStringMap"),
-  0x08709: (74, "ExternalStringWithOneByteDataMap"),
-  0x08735: (70, "ExternalOneByteStringMap"),
-  0x08761: (70, "NativeSourceStringMap"),
-  0x0878d: (82, "ShortExternalStringMap"),
-  0x087b9: (90, "ShortExternalStringWithOneByteDataMap"),
-  0x087e5: (0, "InternalizedStringMap"),
-  0x08811: (2, "ExternalInternalizedStringMap"),
-  0x0883d: (10, "ExternalInternalizedStringWithOneByteDataMap"),
-  0x08869: (6, "ExternalOneByteInternalizedStringMap"),
-  0x08895: (18, "ShortExternalInternalizedStringMap"),
-  0x088c1: (26, "ShortExternalInternalizedStringWithOneByteDataMap"),
-  0x088ed: (22, "ShortExternalOneByteInternalizedStringMap"),
-  0x08919: (86, "ShortExternalOneByteStringMap"),
-  0x08945: (139, "ExternalInt8ArrayMap"),
-  0x08971: (140, "ExternalUint8ArrayMap"),
-  0x0899d: (141, "ExternalInt16ArrayMap"),
-  0x089c9: (142, "ExternalUint16ArrayMap"),
-  0x089f5: (143, "ExternalInt32ArrayMap"),
-  0x08a21: (144, "ExternalUint32ArrayMap"),
-  0x08a4d: (145, "ExternalFloat32ArrayMap"),
-  0x08a79: (146, "ExternalFloat64ArrayMap"),
-  0x08aa5: (147, "ExternalUint8ClampedArrayMap"),
-  0x08ad1: (149, "FixedUint8ArrayMap"),
-  0x08afd: (148, "FixedInt8ArrayMap"),
-  0x08b29: (151, "FixedUint16ArrayMap"),
-  0x08b55: (150, "FixedInt16ArrayMap"),
-  0x08b81: (153, "FixedUint32ArrayMap"),
-  0x08bad: (152, "FixedInt32ArrayMap"),
-  0x08bd9: (154, "FixedFloat32ArrayMap"),
-  0x08c05: (155, "FixedFloat64ArrayMap"),
-  0x08c31: (156, "FixedUint8ClampedArrayMap"),
-  0x08c5d: (180, "SloppyArgumentsElementsMap"),
-  0x08c89: (180, "FunctionContextMap"),
-  0x08cb5: (180, "CatchContextMap"),
-  0x08ce1: (180, "WithContextMap"),
-  0x08d0d: (180, "BlockContextMap"),
-  0x08d39: (180, "ModuleContextMap"),
-  0x08d65: (180, "ScriptContextMap"),
-  0x08d91: (180, "ScriptContextTableMap"),
-  0x08dbd: (187, "JSMessageObjectMap"),
-  0x08de9: (136, "ForeignMap"),
-  0x08e15: (189, "NeanderMap"),
-  0x08e41: (189, "ExternalMap"),
-  0x08e6d: (170, "AllocationSiteMap"),
-  0x08e99: (171, "AllocationMementoMap"),
-  0x08ec5: (174, "PolymorphicCodeCacheMap"),
-  0x08ef1: (172, "ScriptMap"),
-  0x0907d: (177, "BoxMap"),
-  0x090a9: (161, "ExecutableAccessorInfoMap"),
-  0x090d5: (162, "AccessorPairMap"),
-  0x09101: (163, "AccessCheckInfoMap"),
-  0x0912d: (164, "InterceptorInfoMap"),
-  0x09159: (165, "CallHandlerInfoMap"),
-  0x09185: (166, "FunctionTemplateInfoMap"),
-  0x091b1: (167, "ObjectTemplateInfoMap"),
-  0x091dd: (169, "TypeSwitchInfoMap"),
-  0x09209: (173, "CodeCacheMap"),
-  0x09235: (175, "TypeFeedbackInfoMap"),
-  0x09261: (176, "AliasedArgumentsEntryMap"),
-  0x0928d: (178, "DebugInfoMap"),
-  0x092b9: (179, "BreakPointInfoMap"),
+  0x080a9: (129, "MetaMap"),
+  0x080d1: (131, "NullMap"),
+  0x080f9: (131, "UndefinedMap"),
+  0x08121: (180, "FixedArrayMap"),
+  0x08149: (4, "OneByteInternalizedStringMap"),
+  0x08171: (134, "HeapNumberMap"),
+  0x08199: (138, "FreeSpaceMap"),
+  0x081c1: (158, "OnePointerFillerMap"),
+  0x081e9: (158, "TwoPointerFillerMap"),
+  0x08211: (131, "TheHoleMap"),
+  0x08239: (131, "BooleanMap"),
+  0x08261: (131, "UninitializedMap"),
+  0x08289: (131, "ExceptionMap"),
+  0x082b1: (132, "CellMap"),
+  0x082d9: (133, "GlobalPropertyCellMap"),
+  0x08301: (182, "SharedFunctionInfoMap"),
+  0x08329: (135, "MutableHeapNumberMap"),
+  0x08351: (180, "NativeContextMap"),
+  0x08379: (130, "CodeMap"),
+  0x083a1: (180, "ScopeInfoMap"),
+  0x083c9: (180, "FixedCOWArrayMap"),
+  0x083f1: (157, "FixedDoubleArrayMap"),
+  0x08419: (181, "ConstantPoolArrayMap"),
+  0x08441: (183, "WeakCellMap"),
+  0x08469: (131, "NoInterceptorResultSentinelMap"),
+  0x08491: (180, "HashTableMap"),
+  0x084b9: (180, "OrderedHashTableMap"),
+  0x084e1: (131, "ArgumentsMarkerMap"),
+  0x08509: (131, "TerminationExceptionMap"),
+  0x08531: (128, "SymbolMap"),
+  0x08559: (64, "StringMap"),
+  0x08581: (68, "OneByteStringMap"),
+  0x085a9: (65, "ConsStringMap"),
+  0x085d1: (69, "ConsOneByteStringMap"),
+  0x085f9: (67, "SlicedStringMap"),
+  0x08621: (71, "SlicedOneByteStringMap"),
+  0x08649: (66, "ExternalStringMap"),
+  0x08671: (74, "ExternalStringWithOneByteDataMap"),
+  0x08699: (70, "ExternalOneByteStringMap"),
+  0x086c1: (70, "NativeSourceStringMap"),
+  0x086e9: (82, "ShortExternalStringMap"),
+  0x08711: (90, "ShortExternalStringWithOneByteDataMap"),
+  0x08739: (0, "InternalizedStringMap"),
+  0x08761: (2, "ExternalInternalizedStringMap"),
+  0x08789: (10, "ExternalInternalizedStringWithOneByteDataMap"),
+  0x087b1: (6, "ExternalOneByteInternalizedStringMap"),
+  0x087d9: (18, "ShortExternalInternalizedStringMap"),
+  0x08801: (26, "ShortExternalInternalizedStringWithOneByteDataMap"),
+  0x08829: (22, "ShortExternalOneByteInternalizedStringMap"),
+  0x08851: (86, "ShortExternalOneByteStringMap"),
+  0x08879: (139, "ExternalInt8ArrayMap"),
+  0x088a1: (140, "ExternalUint8ArrayMap"),
+  0x088c9: (141, "ExternalInt16ArrayMap"),
+  0x088f1: (142, "ExternalUint16ArrayMap"),
+  0x08919: (143, "ExternalInt32ArrayMap"),
+  0x08941: (144, "ExternalUint32ArrayMap"),
+  0x08969: (145, "ExternalFloat32ArrayMap"),
+  0x08991: (146, "ExternalFloat64ArrayMap"),
+  0x089b9: (147, "ExternalUint8ClampedArrayMap"),
+  0x089e1: (149, "FixedUint8ArrayMap"),
+  0x08a09: (148, "FixedInt8ArrayMap"),
+  0x08a31: (151, "FixedUint16ArrayMap"),
+  0x08a59: (150, "FixedInt16ArrayMap"),
+  0x08a81: (153, "FixedUint32ArrayMap"),
+  0x08aa9: (152, "FixedInt32ArrayMap"),
+  0x08ad1: (154, "FixedFloat32ArrayMap"),
+  0x08af9: (155, "FixedFloat64ArrayMap"),
+  0x08b21: (156, "FixedUint8ClampedArrayMap"),
+  0x08b49: (180, "SloppyArgumentsElementsMap"),
+  0x08b71: (180, "FunctionContextMap"),
+  0x08b99: (180, "CatchContextMap"),
+  0x08bc1: (180, "WithContextMap"),
+  0x08be9: (180, "BlockContextMap"),
+  0x08c11: (180, "ModuleContextMap"),
+  0x08c39: (180, "ScriptContextMap"),
+  0x08c61: (180, "ScriptContextTableMap"),
+  0x08c89: (187, "JSMessageObjectMap"),
+  0x08cb1: (136, "ForeignMap"),
+  0x08cd9: (189, "NeanderMap"),
+  0x08d01: (170, "AllocationSiteMap"),
+  0x08d29: (171, "AllocationMementoMap"),
+  0x08d51: (174, "PolymorphicCodeCacheMap"),
+  0x08d79: (172, "ScriptMap"),
+  0x08dc9: (189, "ExternalMap"),
+  0x08f09: (177, "BoxMap"),
+  0x08f31: (161, "ExecutableAccessorInfoMap"),
+  0x08f59: (162, "AccessorPairMap"),
+  0x08f81: (163, "AccessCheckInfoMap"),
+  0x08fa9: (164, "InterceptorInfoMap"),
+  0x08fd1: (165, "CallHandlerInfoMap"),
+  0x08ff9: (166, "FunctionTemplateInfoMap"),
+  0x09021: (167, "ObjectTemplateInfoMap"),
+  0x09049: (169, "TypeSwitchInfoMap"),
+  0x09071: (173, "CodeCacheMap"),
+  0x09099: (175, "TypeFeedbackInfoMap"),
+  0x090c1: (176, "AliasedArgumentsEntryMap"),
+  0x090e9: (178, "DebugInfoMap"),
+  0x09111: (179, "BreakPointInfoMap"),
 }
 
 # List of known V8 objects.
 KNOWN_OBJECTS = {
-  ("OLD_SPACE", 0x08081): "NullValue",
-  ("OLD_SPACE", 0x08091): "EmptyDescriptorArray",
-  ("OLD_SPACE", 0x08099): "EmptyFixedArray",
-  ("OLD_SPACE", 0x080bd): "UndefinedValue",
-  ("OLD_SPACE", 0x080e5): "NanValue",
-  ("OLD_SPACE", 0x080f1): "TheHoleValue",
-  ("OLD_SPACE", 0x08111): "TrueValue",
-  ("OLD_SPACE", 0x08131): "FalseValue",
-  ("OLD_SPACE", 0x08155): "UninitializedValue",
-  ("OLD_SPACE", 0x08181): "Exception",
-  ("OLD_SPACE", 0x081a9): "NoInterceptorResultSentinel",
-  ("OLD_SPACE", 0x081e5): "EmptyByteArray",
-  ("OLD_SPACE", 0x081ed): "EmptyConstantPoolArray",
-  ("OLD_SPACE", 0x081fd): "ArgumentsMarker",
-  ("OLD_SPACE", 0x08229): "NumberStringCache",
-  ("OLD_SPACE", 0x08a31): "SingleCharacterStringCache",
-  ("OLD_SPACE", 0x08ec9): "StringSplitCache",
-  ("OLD_SPACE", 0x092d1): "RegExpMultipleCache",
-  ("OLD_SPACE", 0x096d9): "TerminationException",
-  ("OLD_SPACE", 0x0970d): "EmptyExternalInt8Array",
-  ("OLD_SPACE", 0x09719): "EmptyExternalUint8Array",
-  ("OLD_SPACE", 0x09725): "EmptyExternalInt16Array",
-  ("OLD_SPACE", 0x09731): "EmptyExternalUint16Array",
-  ("OLD_SPACE", 0x0973d): "EmptyExternalInt32Array",
-  ("OLD_SPACE", 0x09749): "EmptyExternalUint32Array",
-  ("OLD_SPACE", 0x09755): "EmptyExternalFloat32Array",
-  ("OLD_SPACE", 0x09761): "EmptyExternalFloat64Array",
-  ("OLD_SPACE", 0x0976d): "EmptyExternalUint8ClampedArray",
-  ("OLD_SPACE", 0x09779): "EmptyFixedUint8Array",
-  ("OLD_SPACE", 0x09781): "EmptyFixedInt8Array",
-  ("OLD_SPACE", 0x09789): "EmptyFixedUint16Array",
-  ("OLD_SPACE", 0x09791): "EmptyFixedInt16Array",
-  ("OLD_SPACE", 0x09799): "EmptyFixedUint32Array",
-  ("OLD_SPACE", 0x097a1): "EmptyFixedInt32Array",
-  ("OLD_SPACE", 0x097a9): "EmptyFixedFloat32Array",
-  ("OLD_SPACE", 0x097b1): "EmptyFixedFloat64Array",
-  ("OLD_SPACE", 0x097b9): "EmptyFixedUint8ClampedArray",
-  ("OLD_SPACE", 0x097c1): "InfinityValue",
-  ("OLD_SPACE", 0x097cd): "MinusZeroValue",
-  ("OLD_SPACE", 0x097d9): "MessageListeners",
-  ("OLD_SPACE", 0x097f5): "CodeStubs",
-  ("OLD_SPACE", 0x12c49): "NonMonomorphicCache",
-  ("OLD_SPACE", 0x132bd): "PolymorphicCodeCache",
-  ("OLD_SPACE", 0x132c5): "NativesSourceCache",
-  ("OLD_SPACE", 0x1353d): "EmptyScript",
-  ("OLD_SPACE", 0x13585): "IntrinsicFunctionNames",
-  ("OLD_SPACE", 0x27ae5): "ObservationState",
-  ("OLD_SPACE", 0x27af1): "SymbolRegistry",
-  ("OLD_SPACE", 0x2863d): "EmptySlowElementDictionary",
-  ("OLD_SPACE", 0x287d9): "AllocationSitesScratchpad",
-  ("OLD_SPACE", 0x28be1): "WeakObjectToCodeTable",
-  ("OLD_SPACE", 0x4e9dd): "StringTable",
-  ("CODE_SPACE", 0x15f61): "JsEntryCode",
-  ("CODE_SPACE", 0x24781): "JsConstructEntryCode",
+  ("OLD_POINTER_SPACE", 0x08081): "NullValue",
+  ("OLD_POINTER_SPACE", 0x08091): "UndefinedValue",
+  ("OLD_POINTER_SPACE", 0x080a1): "TheHoleValue",
+  ("OLD_POINTER_SPACE", 0x080b1): "TrueValue",
+  ("OLD_POINTER_SPACE", 0x080c1): "FalseValue",
+  ("OLD_POINTER_SPACE", 0x080d1): "UninitializedValue",
+  ("OLD_POINTER_SPACE", 0x080e1): "Exception",
+  ("OLD_POINTER_SPACE", 0x080f1): "NoInterceptorResultSentinel",
+  ("OLD_POINTER_SPACE", 0x08101): "ArgumentsMarker",
+  ("OLD_POINTER_SPACE", 0x08111): "NumberStringCache",
+  ("OLD_POINTER_SPACE", 0x08919): "SingleCharacterStringCache",
+  ("OLD_POINTER_SPACE", 0x08d21): "StringSplitCache",
+  ("OLD_POINTER_SPACE", 0x09129): "RegExpMultipleCache",
+  ("OLD_POINTER_SPACE", 0x09531): "TerminationException",
+  ("OLD_POINTER_SPACE", 0x09541): "MessageListeners",
+  ("OLD_POINTER_SPACE", 0x0955d): "CodeStubs",
+  ("OLD_POINTER_SPACE", 0x0f555): "NonMonomorphicCache",
+  ("OLD_POINTER_SPACE", 0x0fb69): "PolymorphicCodeCache",
+  ("OLD_POINTER_SPACE", 0x0fb71): "NativesSourceCache",
+  ("OLD_POINTER_SPACE", 0x0fbe1): "EmptyScript",
+  ("OLD_POINTER_SPACE", 0x0fc1d): "IntrinsicFunctionNames",
+  ("OLD_POINTER_SPACE", 0x15c39): "ObservationState",
+  ("OLD_POINTER_SPACE", 0x15c45): "SymbolRegistry",
+  ("OLD_POINTER_SPACE", 0x16601): "EmptySlowElementDictionary",
+  ("OLD_POINTER_SPACE", 0x1679d): "AllocationSitesScratchpad",
+  ("OLD_POINTER_SPACE", 0x43e61): "StringTable",
+  ("OLD_DATA_SPACE", 0x08081): "EmptyDescriptorArray",
+  ("OLD_DATA_SPACE", 0x08089): "EmptyFixedArray",
+  ("OLD_DATA_SPACE", 0x080a9): "NanValue",
+  ("OLD_DATA_SPACE", 0x08159): "EmptyByteArray",
+  ("OLD_DATA_SPACE", 0x08161): "EmptyConstantPoolArray",
+  ("OLD_DATA_SPACE", 0x08241): "EmptyExternalInt8Array",
+  ("OLD_DATA_SPACE", 0x0824d): "EmptyExternalUint8Array",
+  ("OLD_DATA_SPACE", 0x08259): "EmptyExternalInt16Array",
+  ("OLD_DATA_SPACE", 0x08265): "EmptyExternalUint16Array",
+  ("OLD_DATA_SPACE", 0x08271): "EmptyExternalInt32Array",
+  ("OLD_DATA_SPACE", 0x0827d): "EmptyExternalUint32Array",
+  ("OLD_DATA_SPACE", 0x08289): "EmptyExternalFloat32Array",
+  ("OLD_DATA_SPACE", 0x08295): "EmptyExternalFloat64Array",
+  ("OLD_DATA_SPACE", 0x082a1): "EmptyExternalUint8ClampedArray",
+  ("OLD_DATA_SPACE", 0x082ad): "EmptyFixedUint8Array",
+  ("OLD_DATA_SPACE", 0x082b5): "EmptyFixedInt8Array",
+  ("OLD_DATA_SPACE", 0x082bd): "EmptyFixedUint16Array",
+  ("OLD_DATA_SPACE", 0x082c5): "EmptyFixedInt16Array",
+  ("OLD_DATA_SPACE", 0x082cd): "EmptyFixedUint32Array",
+  ("OLD_DATA_SPACE", 0x082d5): "EmptyFixedInt32Array",
+  ("OLD_DATA_SPACE", 0x082dd): "EmptyFixedFloat32Array",
+  ("OLD_DATA_SPACE", 0x082e5): "EmptyFixedFloat64Array",
+  ("OLD_DATA_SPACE", 0x082ed): "EmptyFixedUint8ClampedArray",
+  ("OLD_DATA_SPACE", 0x082f5): "InfinityValue",
+  ("OLD_DATA_SPACE", 0x08301): "MinusZeroValue",
+  ("CODE_SPACE", 0x15fa1): "JsEntryCode",
+  ("CODE_SPACE", 0x243c1): "JsConstructEntryCode",
 }