Merge old data and pointer space.
authorhpayer <hpayer@chromium.org>
Wed, 18 Mar 2015 09:38:46 +0000 (02:38 -0700)
committerCommit bot <commit-bot@chromium.org>
Wed, 18 Mar 2015 09:39:03 +0000 (09:39 +0000)
BUG=

Review URL: https://codereview.chromium.org/1012023002

Cr-Commit-Position: refs/heads/master@{#27259}

62 files changed:
include/v8.h
src/api.cc
src/arm/lithium-codegen-arm.cc
src/arm/macro-assembler-arm.cc
src/arm/macro-assembler-arm.h
src/arm64/lithium-codegen-arm64.cc
src/arm64/macro-assembler-arm64.h
src/assembler.cc
src/assembler.h
src/counters.h
src/extensions/statistics-extension.cc
src/factory.cc
src/flag-definitions.h
src/globals.h
src/heap/heap-inl.h
src/heap/heap.cc
src/heap/heap.h
src/heap/incremental-marking.cc
src/heap/mark-compact-inl.h
src/heap/mark-compact.cc
src/heap/mark-compact.h
src/heap/spaces-inl.h
src/heap/spaces.cc
src/heap/spaces.h
src/heap/store-buffer-inl.h
src/heap/store-buffer.cc
src/hydrogen-instructions.cc
src/hydrogen-instructions.h
src/ia32/lithium-codegen-ia32.cc
src/ia32/macro-assembler-ia32.cc
src/ia32/macro-assembler-ia32.h
src/macro-assembler.h
src/mips/lithium-codegen-mips.cc
src/mips/macro-assembler-mips.cc
src/mips/macro-assembler-mips.h
src/mips64/lithium-codegen-mips64.cc
src/mips64/macro-assembler-mips64.h
src/objects.h
src/ppc/lithium-codegen-ppc.cc
src/ppc/macro-assembler-ppc.cc
src/ppc/macro-assembler-ppc.h
src/serialize.cc
src/x64/lithium-codegen-x64.cc
src/x64/macro-assembler-x64.cc
src/x64/macro-assembler-x64.h
src/x87/lithium-codegen-x87.cc
src/x87/macro-assembler-x87.cc
src/x87/macro-assembler-x87.h
test/cctest/test-alloc.cc
test/cctest/test-api.cc
test/cctest/test-constantpool.cc
test/cctest/test-dictionary.cc
test/cctest/test-heap.cc
test/cctest/test-mark-compact.cc
test/cctest/test-serialize.cc
test/cctest/test-spaces.cc
test/cctest/test-unboxed-doubles.cc
test/cctest/test-weakmaps.cc
test/cctest/test-weaksets.cc
tools/grokdump.py
tools/oom_dump/oom_dump.cc
tools/v8heapconst.py

index 755a87f..3cee396 100644 (file)
@@ -4641,16 +4641,15 @@ typedef void (*AddHistogramSampleCallback)(void* histogram, int sample);
 // --- Memory Allocation Callback ---
 enum ObjectSpace {
   kObjectSpaceNewSpace = 1 << 0,
-  kObjectSpaceOldPointerSpace = 1 << 1,
-  kObjectSpaceOldDataSpace = 1 << 2,
-  kObjectSpaceCodeSpace = 1 << 3,
-  kObjectSpaceMapSpace = 1 << 4,
-  kObjectSpaceCellSpace = 1 << 5,
-  kObjectSpacePropertyCellSpace = 1 << 6,
-  kObjectSpaceLoSpace = 1 << 7,
-  kObjectSpaceAll = kObjectSpaceNewSpace | kObjectSpaceOldPointerSpace |
-                    kObjectSpaceOldDataSpace | kObjectSpaceCodeSpace |
-                    kObjectSpaceMapSpace | kObjectSpaceLoSpace
+  kObjectSpaceOldSpace = 1 << 1,
+  kObjectSpaceCodeSpace = 1 << 2,
+  kObjectSpaceMapSpace = 1 << 3,
+  kObjectSpaceCellSpace = 1 << 4,
+  kObjectSpacePropertyCellSpace = 1 << 5,
+  kObjectSpaceLoSpace = 1 << 6,
+  kObjectSpaceAll = kObjectSpaceNewSpace | kObjectSpaceOldSpace |
+                    kObjectSpaceCodeSpace | kObjectSpaceMapSpace |
+                    kObjectSpaceLoSpace
 };
 
   enum AllocationAction {
index fd17edf..8a6164f 100644 (file)
@@ -233,14 +233,10 @@ void i::V8::FatalProcessOutOfMemory(const char* location, bool take_snapshot) {
   heap_stats.new_space_size = &new_space_size;
   int new_space_capacity;
   heap_stats.new_space_capacity = &new_space_capacity;
-  intptr_t old_pointer_space_size;
-  heap_stats.old_pointer_space_size = &old_pointer_space_size;
-  intptr_t old_pointer_space_capacity;
-  heap_stats.old_pointer_space_capacity = &old_pointer_space_capacity;
-  intptr_t old_data_space_size;
-  heap_stats.old_data_space_size = &old_data_space_size;
-  intptr_t old_data_space_capacity;
-  heap_stats.old_data_space_capacity = &old_data_space_capacity;
+  intptr_t old_space_size;
+  heap_stats.old_space_size = &old_space_size;
+  intptr_t old_space_capacity;
+  heap_stats.old_space_capacity = &old_space_capacity;
   intptr_t code_space_size;
   heap_stats.code_space_size = &code_space_size;
   intptr_t code_space_capacity;
index 646e1a9..b02d51b 100644 (file)
@@ -5415,13 +5415,9 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
   if (instr->hydrogen()->MustAllocateDoubleAligned()) {
     flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
   }
-  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
-    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
+  if (instr->hydrogen()->IsOldSpaceAllocation()) {
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
-  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
-    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
+    flags = static_cast<AllocationFlags>(flags | PRETENURE);
   }
 
   if (instr->size()->IsConstantOperand()) {
@@ -5483,13 +5479,9 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
 
   int flags = AllocateDoubleAlignFlag::encode(
       instr->hydrogen()->MustAllocateDoubleAligned());
-  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
-    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
-    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
-  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+  if (instr->hydrogen()->IsOldSpaceAllocation()) {
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
+    flags = AllocateTargetSpace::update(flags, OLD_SPACE);
   } else {
     flags = AllocateTargetSpace::update(flags, NEW_SPACE);
   }
index 2344c78..33f0470 100644 (file)
@@ -1695,12 +1695,11 @@ void MacroAssembler::Allocate(int object_size,
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
     // Align the next allocation. Storing the filler map without checking top is
     // safe in new-space because the limit of the heap is aligned there.
-    DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     STATIC_ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
     and_(scratch2, result, Operand(kDoubleAlignmentMask), SetCC);
     Label aligned;
     b(eq, &aligned);
-    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+    if ((flags & PRETENURE) != 0) {
       cmp(result, Operand(ip));
       b(hs, gc_required);
     }
@@ -1809,12 +1808,11 @@ void MacroAssembler::Allocate(Register object_size,
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
     // Align the next allocation. Storing the filler map without checking top is
     // safe in new-space because the limit of the heap is aligned there.
-    DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
     and_(scratch2, result, Operand(kDoubleAlignmentMask), SetCC);
     Label aligned;
     b(eq, &aligned);
-    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+    if ((flags & PRETENURE) != 0) {
       cmp(result, Operand(ip));
       b(hs, gc_required);
     }
index 73bc072..16c1b80 100644 (file)
@@ -709,7 +709,7 @@ class MacroAssembler: public Assembler {
   // ---------------------------------------------------------------------------
   // Allocation support
 
-  // Allocate an object in new space or old pointer space. The object_size is
+  // Allocate an object in new space or old space. The object_size is
   // specified either in bytes or in words if the allocation flag SIZE_IN_WORDS
   // is passed. If the space is exhausted control continues at the gc_required
   // label. The allocated object is returned in result. If the flag
index 2c99a60..3db63be 100644 (file)
@@ -1580,13 +1580,9 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
     flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
   }
 
-  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
-    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
+  if (instr->hydrogen()->IsOldSpaceAllocation()) {
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
-  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
-    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
+    flags = static_cast<AllocationFlags>(flags | PRETENURE);
   }
 
   if (instr->size()->IsConstantOperand()) {
@@ -1641,13 +1637,9 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
   }
   int flags = AllocateDoubleAlignFlag::encode(
       instr->hydrogen()->MustAllocateDoubleAligned());
-  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
-    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
-    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
-  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+  if (instr->hydrogen()->IsOldSpaceAllocation()) {
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
+    flags = AllocateTargetSpace::update(flags, OLD_SPACE);
   } else {
     flags = AllocateTargetSpace::update(flags, NEW_SPACE);
   }
index 1b44788..1136102 100644 (file)
@@ -1284,7 +1284,7 @@ class MacroAssembler : public Assembler {
   // ---------------------------------------------------------------------------
   // Allocation support
 
-  // Allocate an object in new space or old pointer space. The object_size is
+  // Allocate an object in new space or old space. The object_size is
   // specified either in bytes or in words if the allocation flag SIZE_IN_WORDS
   // is passed. The allocated object is returned in result.
   //
index 6e9f3f2..3494f69 100644 (file)
@@ -1209,31 +1209,15 @@ ExternalReference ExternalReference::new_space_allocation_limit_address(
 }
 
 
-ExternalReference ExternalReference::old_pointer_space_allocation_top_address(
+ExternalReference ExternalReference::old_space_allocation_top_address(
     Isolate* isolate) {
-  return ExternalReference(
-      isolate->heap()->OldPointerSpaceAllocationTopAddress());
-}
-
-
-ExternalReference ExternalReference::old_pointer_space_allocation_limit_address(
-    Isolate* isolate) {
-  return ExternalReference(
-      isolate->heap()->OldPointerSpaceAllocationLimitAddress());
+  return ExternalReference(isolate->heap()->OldSpaceAllocationTopAddress());
 }
 
 
-ExternalReference ExternalReference::old_data_space_allocation_top_address(
+ExternalReference ExternalReference::old_space_allocation_limit_address(
     Isolate* isolate) {
-  return ExternalReference(
-      isolate->heap()->OldDataSpaceAllocationTopAddress());
-}
-
-
-ExternalReference ExternalReference::old_data_space_allocation_limit_address(
-    Isolate* isolate) {
-  return ExternalReference(
-      isolate->heap()->OldDataSpaceAllocationLimitAddress());
+  return ExternalReference(isolate->heap()->OldSpaceAllocationLimitAddress());
 }
 
 
index 983ca32..07859b3 100644 (file)
@@ -936,14 +936,8 @@ class ExternalReference BASE_EMBEDDED {
   // Used for fast allocation in generated code.
   static ExternalReference new_space_allocation_top_address(Isolate* isolate);
   static ExternalReference new_space_allocation_limit_address(Isolate* isolate);
-  static ExternalReference old_pointer_space_allocation_top_address(
-      Isolate* isolate);
-  static ExternalReference old_pointer_space_allocation_limit_address(
-      Isolate* isolate);
-  static ExternalReference old_data_space_allocation_top_address(
-      Isolate* isolate);
-  static ExternalReference old_data_space_allocation_limit_address(
-      Isolate* isolate);
+  static ExternalReference old_space_allocation_top_address(Isolate* isolate);
+  static ExternalReference old_space_allocation_limit_address(Isolate* isolate);
 
   static ExternalReference mod_two_doubles_operation(Isolate* isolate);
   static ExternalReference power_double_double_function(Isolate* isolate);
index 539b182..7c83502 100644 (file)
@@ -396,44 +396,29 @@ class AggregatedHistogramTimerScope {
   AHT(compile_lazy, V8.CompileLazyMicroSeconds)
 
 
-#define HISTOGRAM_PERCENTAGE_LIST(HP)                                 \
-  /* Heap fragmentation. */                                           \
-  HP(external_fragmentation_total,                                    \
-     V8.MemoryExternalFragmentationTotal)                             \
-  HP(external_fragmentation_old_pointer_space,                        \
-     V8.MemoryExternalFragmentationOldPointerSpace)                   \
-  HP(external_fragmentation_old_data_space,                           \
-     V8.MemoryExternalFragmentationOldDataSpace)                      \
-  HP(external_fragmentation_code_space,                               \
-     V8.MemoryExternalFragmentationCodeSpace)                         \
-  HP(external_fragmentation_map_space,                                \
-     V8.MemoryExternalFragmentationMapSpace)                          \
-  HP(external_fragmentation_cell_space,                               \
-     V8.MemoryExternalFragmentationCellSpace)                         \
-  HP(external_fragmentation_property_cell_space,                      \
-     V8.MemoryExternalFragmentationPropertyCellSpace)                 \
-  HP(external_fragmentation_lo_space,                                 \
-     V8.MemoryExternalFragmentationLoSpace)                           \
-  /* Percentages of heap committed to each space. */                  \
-  HP(heap_fraction_new_space,                                         \
-     V8.MemoryHeapFractionNewSpace)                                   \
-  HP(heap_fraction_old_pointer_space,                                 \
-     V8.MemoryHeapFractionOldPointerSpace)                            \
-  HP(heap_fraction_old_data_space,                                    \
-     V8.MemoryHeapFractionOldDataSpace)                               \
-  HP(heap_fraction_code_space,                                        \
-     V8.MemoryHeapFractionCodeSpace)                                  \
-  HP(heap_fraction_map_space,                                         \
-     V8.MemoryHeapFractionMapSpace)                                   \
-  HP(heap_fraction_cell_space,                                        \
-     V8.MemoryHeapFractionCellSpace)                                  \
-  HP(heap_fraction_property_cell_space,                               \
-     V8.MemoryHeapFractionPropertyCellSpace)                          \
-  HP(heap_fraction_lo_space,                                          \
-     V8.MemoryHeapFractionLoSpace)                                    \
-  /* Percentage of crankshafted codegen. */                           \
-  HP(codegen_fraction_crankshaft,                                     \
-     V8.CodegenFractionCrankshaft)                                    \
+#define HISTOGRAM_PERCENTAGE_LIST(HP)                                          \
+  /* Heap fragmentation. */                                                    \
+  HP(external_fragmentation_total, V8.MemoryExternalFragmentationTotal)        \
+  HP(external_fragmentation_old_space, V8.MemoryExternalFragmentationOldSpace) \
+  HP(external_fragmentation_code_space,                                        \
+     V8.MemoryExternalFragmentationCodeSpace)                                  \
+  HP(external_fragmentation_map_space, V8.MemoryExternalFragmentationMapSpace) \
+  HP(external_fragmentation_cell_space,                                        \
+     V8.MemoryExternalFragmentationCellSpace)                                  \
+  HP(external_fragmentation_property_cell_space,                               \
+     V8.MemoryExternalFragmentationPropertyCellSpace)                          \
+  HP(external_fragmentation_lo_space, V8.MemoryExternalFragmentationLoSpace)   \
+  /* Percentages of heap committed to each space. */                           \
+  HP(heap_fraction_new_space, V8.MemoryHeapFractionNewSpace)                   \
+  HP(heap_fraction_old_space, V8.MemoryHeapFractionOldSpace)                   \
+  HP(heap_fraction_code_space, V8.MemoryHeapFractionCodeSpace)                 \
+  HP(heap_fraction_map_space, V8.MemoryHeapFractionMapSpace)                   \
+  HP(heap_fraction_cell_space, V8.MemoryHeapFractionCellSpace)                 \
+  HP(heap_fraction_property_cell_space,                                        \
+     V8.MemoryHeapFractionPropertyCellSpace)                                   \
+  HP(heap_fraction_lo_space, V8.MemoryHeapFractionLoSpace)                     \
+  /* Percentage of crankshafted codegen. */                                    \
+  HP(codegen_fraction_crankshaft, V8.CodegenFractionCrankshaft)
 
 
 #define HISTOGRAM_MEMORY_LIST(HM)                                     \
@@ -606,14 +591,9 @@ class AggregatedHistogramTimerScope {
   SC(new_space_bytes_available, V8.MemoryNewSpaceBytesAvailable)               \
   SC(new_space_bytes_committed, V8.MemoryNewSpaceBytesCommitted)               \
   SC(new_space_bytes_used, V8.MemoryNewSpaceBytesUsed)                         \
-  SC(old_pointer_space_bytes_available,                                        \
-     V8.MemoryOldPointerSpaceBytesAvailable)                                   \
-  SC(old_pointer_space_bytes_committed,                                        \
-     V8.MemoryOldPointerSpaceBytesCommitted)                                   \
-  SC(old_pointer_space_bytes_used, V8.MemoryOldPointerSpaceBytesUsed)          \
-  SC(old_data_space_bytes_available, V8.MemoryOldDataSpaceBytesAvailable)      \
-  SC(old_data_space_bytes_committed, V8.MemoryOldDataSpaceBytesCommitted)      \
-  SC(old_data_space_bytes_used, V8.MemoryOldDataSpaceBytesUsed)                \
+  SC(old_space_bytes_available, V8.MemoryOldSpaceBytesAvailable)               \
+  SC(old_space_bytes_committed, V8.MemoryOldSpaceBytesCommitted)               \
+  SC(old_space_bytes_used, V8.MemoryOldSpaceBytesUsed)                         \
   SC(code_space_bytes_available, V8.MemoryCodeSpaceBytesAvailable)             \
   SC(code_space_bytes_committed, V8.MemoryCodeSpaceBytesCommitted)             \
   SC(code_space_bytes_used, V8.MemoryCodeSpaceBytesUsed)                       \
index 2c1f91d..7fdae42 100644 (file)
@@ -108,15 +108,9 @@ void StatisticsExtension::GetCounters(
       {heap->new_space()->Size(), "new_space_live_bytes"},
       {heap->new_space()->Available(), "new_space_available_bytes"},
       {heap->new_space()->CommittedMemory(), "new_space_commited_bytes"},
-      {heap->old_pointer_space()->Size(), "old_pointer_space_live_bytes"},
-      {heap->old_pointer_space()->Available(),
-       "old_pointer_space_available_bytes"},
-      {heap->old_pointer_space()->CommittedMemory(),
-       "old_pointer_space_commited_bytes"},
-      {heap->old_data_space()->Size(), "old_data_space_live_bytes"},
-      {heap->old_data_space()->Available(), "old_data_space_available_bytes"},
-      {heap->old_data_space()->CommittedMemory(),
-       "old_data_space_commited_bytes"},
+      {heap->old_space()->Size(), "old_space_live_bytes"},
+      {heap->old_space()->Available(), "old_space_available_bytes"},
+      {heap->old_space()->CommittedMemory(), "old_space_commited_bytes"},
       {heap->code_space()->Size(), "code_space_live_bytes"},
       {heap->code_space()->Available(), "code_space_available_bytes"},
       {heap->code_space()->CommittedMemory(), "code_space_commited_bytes"},
index dc066dd..146b80b 100644 (file)
@@ -55,7 +55,7 @@ Handle<Oddball> Factory::NewOddball(Handle<Map> map,
                                     const char* to_string,
                                     Handle<Object> to_number,
                                     byte kind) {
-  Handle<Oddball> oddball = New<Oddball>(map, OLD_POINTER_SPACE);
+  Handle<Oddball> oddball = New<Oddball>(map, OLD_SPACE);
   Oddball::Initialize(isolate(), oddball, to_string, to_number, kind);
   return oddball;
 }
@@ -926,7 +926,7 @@ Handle<WeakCell> Factory::NewWeakCell(Handle<HeapObject> value) {
 
 Handle<AllocationSite> Factory::NewAllocationSite() {
   Handle<Map> map = allocation_site_map();
-  Handle<AllocationSite> site = New<AllocationSite>(map, OLD_POINTER_SPACE);
+  Handle<AllocationSite> site = New<AllocationSite>(map, OLD_SPACE);
   site->Initialize();
 
   // Link the site
@@ -1239,7 +1239,7 @@ Handle<JSFunction> Factory::NewFunction(Handle<Map> map,
                                         Handle<SharedFunctionInfo> info,
                                         Handle<Context> context,
                                         PretenureFlag pretenure) {
-  AllocationSpace space = pretenure == TENURED ? OLD_POINTER_SPACE : NEW_SPACE;
+  AllocationSpace space = pretenure == TENURED ? OLD_SPACE : NEW_SPACE;
   Handle<JSFunction> result = New<JSFunction>(map, space);
   InitializeFunction(result, info, context);
   return result;
@@ -1574,7 +1574,7 @@ Handle<GlobalObject> Factory::NewGlobalObject(Handle<JSFunction> constructor) {
   }
 
   // Allocate the global object and initialize it with the backing store.
-  Handle<GlobalObject> global = New<GlobalObject>(map, OLD_POINTER_SPACE);
+  Handle<GlobalObject> global = New<GlobalObject>(map, OLD_SPACE);
   isolate()->heap()->InitializeJSObjectFromMap(*global, *dictionary, *map);
 
   // Create a new map for the global object.
@@ -2062,8 +2062,7 @@ Handle<SharedFunctionInfo> Factory::NewSharedFunctionInfo(
     Handle<String> name,
     MaybeHandle<Code> maybe_code) {
   Handle<Map> map = shared_function_info_map();
-  Handle<SharedFunctionInfo> share = New<SharedFunctionInfo>(map,
-                                                             OLD_POINTER_SPACE);
+  Handle<SharedFunctionInfo> share = New<SharedFunctionInfo>(map, OLD_SPACE);
 
   // Set pointer fields.
   share->set_name(*name);
index ffc8282..7cf1fd1 100644 (file)
@@ -595,8 +595,7 @@ DEFINE_BOOL(print_max_heap_committed, false,
             "in name=value format on exit")
 DEFINE_BOOL(trace_gc_verbose, false,
             "print more details following each garbage collection")
-DEFINE_BOOL(trace_fragmentation, false,
-            "report fragmentation for old pointer and data pages")
+DEFINE_BOOL(trace_fragmentation, false, "report fragmentation")
 DEFINE_BOOL(collect_maps, true,
             "garbage collect maps from which no objects can be reached")
 DEFINE_BOOL(weak_embedded_maps_in_optimized_code, true,
index 9547ea2..7bd9b22 100644 (file)
@@ -409,8 +409,7 @@ typedef bool (*WeakSlotCallbackWithHeap)(Heap* heap, Object** pointer);
 // Keep this enum in sync with the ObjectSpace enum in v8.h
 enum AllocationSpace {
   NEW_SPACE,            // Semispaces collected with copying collector.
-  OLD_POINTER_SPACE,    // May contain pointers to new space.
-  OLD_DATA_SPACE,       // Must not have pointers to new space.
+  OLD_SPACE,            // May contain pointers to new space.
   CODE_SPACE,           // No pointers to new space, marked executable.
   MAP_SPACE,            // Only and all map objects.
   CELL_SPACE,           // Only and all cell objects.
@@ -419,7 +418,7 @@ enum AllocationSpace {
 
   FIRST_SPACE = NEW_SPACE,
   LAST_SPACE = LO_SPACE,
-  FIRST_PAGED_SPACE = OLD_POINTER_SPACE,
+  FIRST_PAGED_SPACE = OLD_SPACE,
   LAST_PAGED_SPACE = PROPERTY_CELL_SPACE
 };
 const int kSpaceTagSize = 3;
index 8a8edea..1d10be3 100644 (file)
@@ -81,12 +81,12 @@ AllocationResult Heap::AllocateOneByteInternalizedString(
   // Compute map and object size.
   Map* map = one_byte_internalized_string_map();
   int size = SeqOneByteString::SizeFor(str.length());
-  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
+  AllocationSpace space = SelectSpace(size, TENURED);
 
   // Allocate string.
   HeapObject* result;
   {
-    AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
+    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
 
@@ -113,12 +113,12 @@ AllocationResult Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str,
   // Compute map and object size.
   Map* map = internalized_string_map();
   int size = SeqTwoByteString::SizeFor(str.length());
-  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
+  AllocationSpace space = SelectSpace(size, TENURED);
 
   // Allocate string.
   HeapObject* result;
   {
-    AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
+    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
 
@@ -183,10 +183,8 @@ AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
     }
   }
 
-  if (OLD_POINTER_SPACE == space) {
-    allocation = old_pointer_space_->AllocateRaw(size_in_bytes);
-  } else if (OLD_DATA_SPACE == space) {
-    allocation = old_data_space_->AllocateRaw(size_in_bytes);
+  if (OLD_SPACE == space) {
+    allocation = old_space_->AllocateRaw(size_in_bytes);
   } else if (CODE_SPACE == space) {
     if (size_in_bytes <= code_space()->AreaSize()) {
       allocation = code_space_->AllocateRaw(size_in_bytes);
@@ -329,23 +327,11 @@ bool Heap::InToSpace(Object* object) {
 }
 
 
-bool Heap::InOldPointerSpace(Address address) {
-  return old_pointer_space_->Contains(address);
-}
+bool Heap::InOldSpace(Address address) { return old_space_->Contains(address); }
 
 
-bool Heap::InOldPointerSpace(Object* object) {
-  return InOldPointerSpace(reinterpret_cast<Address>(object));
-}
-
-
-bool Heap::InOldDataSpace(Address address) {
-  return old_data_space_->Contains(address);
-}
-
-
-bool Heap::InOldDataSpace(Object* object) {
-  return InOldDataSpace(reinterpret_cast<Address>(object));
+bool Heap::InOldSpace(Object* object) {
+  return InOldSpace(reinterpret_cast<Address>(object));
 }
 
 
@@ -377,53 +363,16 @@ void Heap::RecordWrites(Address address, int start, int len) {
 }
 
 
-OldSpace* Heap::TargetSpace(HeapObject* object) {
-  InstanceType type = object->map()->instance_type();
-  AllocationSpace space = TargetSpaceId(type);
-  return (space == OLD_POINTER_SPACE) ? old_pointer_space_ : old_data_space_;
-}
-
-
-AllocationSpace Heap::TargetSpaceId(InstanceType type) {
-  // Heap numbers and sequential strings are promoted to old data space, all
-  // other object types are promoted to old pointer space.  We do not use
-  // object->IsHeapNumber() and object->IsSeqString() because we already
-  // know that object has the heap object tag.
-
-  // These objects are never allocated in new space.
-  DCHECK(type != MAP_TYPE);
-  DCHECK(type != CODE_TYPE);
-  DCHECK(type != ODDBALL_TYPE);
-  DCHECK(type != CELL_TYPE);
-  DCHECK(type != PROPERTY_CELL_TYPE);
-
-  if (type <= LAST_NAME_TYPE) {
-    if (type == SYMBOL_TYPE) return OLD_POINTER_SPACE;
-    DCHECK(type < FIRST_NONSTRING_TYPE);
-    // There are four string representations: sequential strings, external
-    // strings, cons strings, and sliced strings.
-    // Only the latter two contain non-map-word pointers to heap objects.
-    return ((type & kIsIndirectStringMask) == kIsIndirectStringTag)
-               ? OLD_POINTER_SPACE
-               : OLD_DATA_SPACE;
-  } else {
-    return (type <= LAST_DATA_TYPE) ? OLD_DATA_SPACE : OLD_POINTER_SPACE;
-  }
-}
-
-
 bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) {
   // Object migration is governed by the following rules:
   //
-  // 1) Objects in new-space can be migrated to one of the old spaces
+  // 1) Objects in new-space can be migrated to the old space
   //    that matches their target space or they stay in new-space.
   // 2) Objects in old-space stay in the same space when migrating.
   // 3) Fillers (two or more words) can migrate due to left-trimming of
-  //    fixed arrays in new-space, old-data-space and old-pointer-space.
+  //    fixed arrays in new-space or old space.
   // 4) Fillers (one word) can never migrate, they are skipped by
   //    incremental marking explicitly to prevent invalid pattern.
-  // 5) Short external strings can end up in old pointer space when a cons
-  //    string in old pointer space is made external (String::MakeExternal).
   //
   // Since this function is used for debugging only, we do not place
   // asserts here, but check everything explicitly.
@@ -433,12 +382,10 @@ bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) {
   AllocationSpace src = chunk->owner()->identity();
   switch (src) {
     case NEW_SPACE:
-      return dst == src || dst == TargetSpaceId(type);
-    case OLD_POINTER_SPACE:
-      return dst == src && (dst == TargetSpaceId(type) || obj->IsFiller() ||
-                            obj->IsExternalString());
-    case OLD_DATA_SPACE:
-      return dst == src && dst == TargetSpaceId(type);
+      return dst == src || dst == OLD_SPACE;
+    case OLD_SPACE:
+      return dst == src &&
+             (dst == OLD_SPACE || obj->IsFiller() || obj->IsExternalString());
     case CODE_SPACE:
       return dst == src && type == CODE_TYPE;
     case MAP_SPACE:
index bb1bc43..9fe8629 100644 (file)
@@ -84,8 +84,7 @@ Heap::Heap()
       global_ic_age_(0),
       scan_on_scavenge_pages_(0),
       new_space_(this),
-      old_pointer_space_(NULL),
-      old_data_space_(NULL),
+      old_space_(NULL),
       code_space_(NULL),
       map_space_(NULL),
       cell_space_(NULL),
@@ -173,18 +172,16 @@ Heap::Heap()
 intptr_t Heap::Capacity() {
   if (!HasBeenSetUp()) return 0;
 
-  return new_space_.Capacity() + old_pointer_space_->Capacity() +
-         old_data_space_->Capacity() + code_space_->Capacity() +
-         map_space_->Capacity() + cell_space_->Capacity() +
-         property_cell_space_->Capacity();
+  return new_space_.Capacity() + old_space_->Capacity() +
+         code_space_->Capacity() + map_space_->Capacity() +
+         cell_space_->Capacity() + property_cell_space_->Capacity();
 }
 
 
 intptr_t Heap::CommittedOldGenerationMemory() {
   if (!HasBeenSetUp()) return 0;
 
-  return old_pointer_space_->CommittedMemory() +
-         old_data_space_->CommittedMemory() + code_space_->CommittedMemory() +
+  return old_space_->CommittedMemory() + code_space_->CommittedMemory() +
          map_space_->CommittedMemory() + cell_space_->CommittedMemory() +
          property_cell_space_->CommittedMemory() + lo_space_->Size();
 }
@@ -201,8 +198,7 @@ size_t Heap::CommittedPhysicalMemory() {
   if (!HasBeenSetUp()) return 0;
 
   return new_space_.CommittedPhysicalMemory() +
-         old_pointer_space_->CommittedPhysicalMemory() +
-         old_data_space_->CommittedPhysicalMemory() +
+         old_space_->CommittedPhysicalMemory() +
          code_space_->CommittedPhysicalMemory() +
          map_space_->CommittedPhysicalMemory() +
          cell_space_->CommittedPhysicalMemory() +
@@ -231,17 +227,16 @@ void Heap::UpdateMaximumCommitted() {
 intptr_t Heap::Available() {
   if (!HasBeenSetUp()) return 0;
 
-  return new_space_.Available() + old_pointer_space_->Available() +
-         old_data_space_->Available() + code_space_->Available() +
-         map_space_->Available() + cell_space_->Available() +
-         property_cell_space_->Available();
+  return new_space_.Available() + old_space_->Available() +
+         code_space_->Available() + map_space_->Available() +
+         cell_space_->Available() + property_cell_space_->Available();
 }
 
 
 bool Heap::HasBeenSetUp() {
-  return old_pointer_space_ != NULL && old_data_space_ != NULL &&
-         code_space_ != NULL && map_space_ != NULL && cell_space_ != NULL &&
-         property_cell_space_ != NULL && lo_space_ != NULL;
+  return old_space_ != NULL && code_space_ != NULL && map_space_ != NULL &&
+         cell_space_ != NULL && property_cell_space_ != NULL &&
+         lo_space_ != NULL;
 }
 
 
@@ -344,22 +339,13 @@ void Heap::PrintShortHeapStatistics() {
            ", committed: %6" V8_PTR_PREFIX "d KB\n",
            new_space_.Size() / KB, new_space_.Available() / KB,
            new_space_.CommittedMemory() / KB);
-  PrintPID("Old pointers,       used: %6" V8_PTR_PREFIX
+  PrintPID("Old space,       used: %6" V8_PTR_PREFIX
            "d KB"
            ", available: %6" V8_PTR_PREFIX
            "d KB"
            ", committed: %6" V8_PTR_PREFIX "d KB\n",
-           old_pointer_space_->SizeOfObjects() / KB,
-           old_pointer_space_->Available() / KB,
-           old_pointer_space_->CommittedMemory() / KB);
-  PrintPID("Old data space,     used: %6" V8_PTR_PREFIX
-           "d KB"
-           ", available: %6" V8_PTR_PREFIX
-           "d KB"
-           ", committed: %6" V8_PTR_PREFIX "d KB\n",
-           old_data_space_->SizeOfObjects() / KB,
-           old_data_space_->Available() / KB,
-           old_data_space_->CommittedMemory() / KB);
+           old_space_->SizeOfObjects() / KB, old_space_->Available() / KB,
+           old_space_->CommittedMemory() / KB);
   PrintPID("Code space,         used: %6" V8_PTR_PREFIX
            "d KB"
            ", available: %6" V8_PTR_PREFIX
@@ -663,12 +649,8 @@ void Heap::GarbageCollectionEpilogue() {
 
     isolate_->counters()->heap_fraction_new_space()->AddSample(static_cast<int>(
         (new_space()->CommittedMemory() * 100.0) / CommittedMemory()));
-    isolate_->counters()->heap_fraction_old_pointer_space()->AddSample(
-        static_cast<int>((old_pointer_space()->CommittedMemory() * 100.0) /
-                         CommittedMemory()));
-    isolate_->counters()->heap_fraction_old_data_space()->AddSample(
-        static_cast<int>((old_data_space()->CommittedMemory() * 100.0) /
-                         CommittedMemory()));
+    isolate_->counters()->heap_fraction_old_space()->AddSample(static_cast<int>(
+        (old_space()->CommittedMemory() * 100.0) / CommittedMemory()));
     isolate_->counters()->heap_fraction_code_space()->AddSample(
         static_cast<int>((code_space()->CommittedMemory() * 100.0) /
                          CommittedMemory()));
@@ -721,8 +703,7 @@ void Heap::GarbageCollectionEpilogue() {
   UPDATE_FRAGMENTATION_FOR_SPACE(space)
 
   UPDATE_COUNTERS_FOR_SPACE(new_space)
-  UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(old_pointer_space)
-  UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(old_data_space)
+  UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(old_space)
   UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(code_space)
   UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(map_space)
   UPDATE_COUNTERS_AND_FRAGMENTATION_FOR_SPACE(cell_space)
@@ -800,7 +781,7 @@ void Heap::CollectAllGarbage(int flags, const char* gc_reason,
   // not matter, so long as we do not specify NEW_SPACE, which would not
   // cause a full GC.
   mark_compact_collector_.SetFlags(flags);
-  CollectGarbage(OLD_POINTER_SPACE, gc_reason, gc_callback_flags);
+  CollectGarbage(OLD_SPACE, gc_reason, gc_callback_flags);
   mark_compact_collector_.SetFlags(kNoGCFlags);
 }
 
@@ -1383,11 +1364,6 @@ static void VerifyNonPointerSpacePointers(Heap* heap) {
   for (HeapObject* object = code_it.Next(); object != NULL;
        object = code_it.Next())
     object->Iterate(&v);
-
-    HeapObjectIterator data_it(heap->old_data_space());
-    for (HeapObject* object = data_it.Next(); object != NULL;
-         object = data_it.Next())
-      object->Iterate(&v);
 }
 #endif  // VERIFY_HEAP
 
@@ -2182,13 +2158,7 @@ class ScavengingVisitor : public StaticVisitorBase {
     }
 
     AllocationResult allocation;
-    if (object_contents == DATA_OBJECT) {
-      DCHECK(heap->AllowedToBeMigrated(object, OLD_DATA_SPACE));
-      allocation = heap->old_data_space()->AllocateRaw(allocation_size);
-    } else {
-      DCHECK(heap->AllowedToBeMigrated(object, OLD_POINTER_SPACE));
-      allocation = heap->old_pointer_space()->AllocateRaw(allocation_size);
-    }
+    allocation = heap->old_space()->AllocateRaw(allocation_size);
 
     HeapObject* target = NULL;  // Initialization to please compiler.
     if (allocation.To(&target)) {
@@ -2604,14 +2574,14 @@ bool Heap::CreateInitialMaps() {
   set_empty_fixed_array(FixedArray::cast(obj));
 
   {
-    AllocationResult allocation = Allocate(null_map(), OLD_POINTER_SPACE);
+    AllocationResult allocation = Allocate(null_map(), OLD_SPACE);
     if (!allocation.To(&obj)) return false;
   }
   set_null_value(Oddball::cast(obj));
   Oddball::cast(obj)->set_kind(Oddball::kNull);
 
   {
-    AllocationResult allocation = Allocate(undefined_map(), OLD_POINTER_SPACE);
+    AllocationResult allocation = Allocate(undefined_map(), OLD_SPACE);
     if (!allocation.To(&obj)) return false;
   }
   set_undefined_value(Oddball::cast(obj));
@@ -2848,11 +2818,11 @@ AllocationResult Heap::AllocateHeapNumber(double value, MutableMode mode,
   int size = HeapNumber::kSize;
   STATIC_ASSERT(HeapNumber::kSize <= Page::kMaxRegularHeapObjectSize);
 
-  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
+  AllocationSpace space = SelectSpace(size, pretenure);
 
   HeapObject* result;
   {
-    AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
+    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
 
@@ -2901,8 +2871,7 @@ AllocationResult Heap::AllocateWeakCell(HeapObject* value) {
   STATIC_ASSERT(WeakCell::kSize <= Page::kMaxRegularHeapObjectSize);
   HeapObject* result = NULL;
   {
-    AllocationResult allocation =
-        AllocateRaw(size, OLD_POINTER_SPACE, OLD_POINTER_SPACE);
+    AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
   result->set_map_no_write_barrier(weak_cell_map());
@@ -3456,7 +3425,7 @@ AllocationResult Heap::AllocateForeign(Address address,
                                        PretenureFlag pretenure) {
   // Statically ensure that it is safe to allocate foreigns in paged spaces.
   STATIC_ASSERT(Foreign::kSize <= Page::kMaxRegularHeapObjectSize);
-  AllocationSpace space = (pretenure == TENURED) ? OLD_DATA_SPACE : NEW_SPACE;
+  AllocationSpace space = (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE;
   Foreign* result;
   AllocationResult allocation = Allocate(foreign_map(), space);
   if (!allocation.To(&result)) return allocation;
@@ -3470,10 +3439,10 @@ AllocationResult Heap::AllocateByteArray(int length, PretenureFlag pretenure) {
     v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true);
   }
   int size = ByteArray::SizeFor(length);
-  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
+  AllocationSpace space = SelectSpace(size, pretenure);
   HeapObject* result;
   {
-    AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
+    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
 
@@ -3504,20 +3473,17 @@ void Heap::CreateFillerObjectAt(Address addr, int size) {
 
 bool Heap::CanMoveObjectStart(HeapObject* object) {
   Address address = object->address();
-  bool is_in_old_pointer_space = InOldPointerSpace(address);
-  bool is_in_old_data_space = InOldDataSpace(address);
 
   if (lo_space()->Contains(object)) return false;
 
   Page* page = Page::FromAddress(address);
   // We can move the object start if:
-  // (1) the object is not in old pointer or old data space,
+  // (1) the object is not in old space,
   // (2) the page of the object was already swept,
   // (3) the page was already concurrently swept. This case is an optimization
   // for concurrent sweeping. The WasSwept predicate for concurrently swept
   // pages is set after sweeping all pages.
-  return (!is_in_old_pointer_space && !is_in_old_data_space) ||
-         page->WasSwept() || page->SweepingCompleted();
+  return !InOldSpace(address) || page->WasSwept() || page->SweepingCompleted();
 }
 
 
@@ -3634,10 +3600,10 @@ AllocationResult Heap::AllocateExternalArray(int length,
                                              void* external_pointer,
                                              PretenureFlag pretenure) {
   int size = ExternalArray::kAlignedSize;
-  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
+  AllocationSpace space = SelectSpace(size, pretenure);
   HeapObject* result;
   {
-    AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
+    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
 
@@ -3680,10 +3646,10 @@ AllocationResult Heap::AllocateFixedTypedArray(int length,
     size += kPointerSize;
   }
 #endif
-  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
+  AllocationSpace space = SelectSpace(size, pretenure);
 
   HeapObject* object;
-  AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
+  AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
   if (!allocation.To(&object)) return allocation;
 
   if (array_type == kExternalFloat64Array) {
@@ -3847,8 +3813,7 @@ AllocationResult Heap::Allocate(Map* map, AllocationSpace space,
   DCHECK(map->instance_type() != MAP_TYPE);
   // If allocation failures are disallowed, we may allocate in a different
   // space when new space is full and the object is not a large object.
-  AllocationSpace retry_space =
-      (space != NEW_SPACE) ? space : TargetSpaceId(map->instance_type());
+  AllocationSpace retry_space = (space != NEW_SPACE) ? space : OLD_SPACE;
   int size = map->instance_size();
   if (allocation_site != NULL) {
     size += AllocationMemento::kSize;
@@ -3925,7 +3890,7 @@ AllocationResult Heap::AllocateJSObjectFromMap(
 
   // Allocate the JSObject.
   int size = map->instance_size();
-  AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure);
+  AllocationSpace space = SelectSpace(size, pretenure);
   JSObject* js_obj;
   AllocationResult allocation = Allocate(map, space, allocation_site);
   if (!allocation.To(&js_obj)) return allocation;
@@ -3976,7 +3941,7 @@ AllocationResult Heap::CopyJSObject(JSObject* source, AllocationSite* site) {
   if (always_allocate()) {
     {
       AllocationResult allocation =
-          AllocateRaw(object_size, NEW_SPACE, OLD_POINTER_SPACE);
+          AllocateRaw(object_size, NEW_SPACE, OLD_SPACE);
       if (!allocation.To(&clone)) return allocation;
     }
     Address clone_address = clone->address();
@@ -4127,12 +4092,12 @@ AllocationResult Heap::AllocateInternalizedStringImpl(T t, int chars,
     map = internalized_string_map();
     size = SeqTwoByteString::SizeFor(chars);
   }
-  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, TENURED);
+  AllocationSpace space = SelectSpace(size, TENURED);
 
   // Allocate string.
   HeapObject* result;
   {
-    AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
+    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
 
@@ -4170,11 +4135,11 @@ AllocationResult Heap::AllocateRawOneByteString(int length,
   DCHECK_GE(String::kMaxLength, length);
   int size = SeqOneByteString::SizeFor(length);
   DCHECK(size <= SeqOneByteString::kMaxSize);
-  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
+  AllocationSpace space = SelectSpace(size, pretenure);
 
   HeapObject* result;
   {
-    AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
+    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
 
@@ -4194,11 +4159,11 @@ AllocationResult Heap::AllocateRawTwoByteString(int length,
   DCHECK_GE(String::kMaxLength, length);
   int size = SeqTwoByteString::SizeFor(length);
   DCHECK(size <= SeqTwoByteString::kMaxSize);
-  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
+  AllocationSpace space = SelectSpace(size, pretenure);
 
   HeapObject* result;
   {
-    AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
+    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
 
@@ -4215,8 +4180,7 @@ AllocationResult Heap::AllocateEmptyFixedArray() {
   int size = FixedArray::SizeFor(0);
   HeapObject* result;
   {
-    AllocationResult allocation =
-        AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE);
+    AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
   // Initialize the object.
@@ -4338,9 +4302,9 @@ AllocationResult Heap::AllocateRawFixedArray(int length,
     v8::internal::Heap::FatalProcessOutOfMemory("invalid array length", true);
   }
   int size = FixedArray::SizeFor(length);
-  AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, pretenure);
+  AllocationSpace space = SelectSpace(size, pretenure);
 
-  return AllocateRaw(size, space, OLD_POINTER_SPACE);
+  return AllocateRaw(size, space, OLD_SPACE);
 }
 
 
@@ -4409,11 +4373,11 @@ AllocationResult Heap::AllocateRawFixedDoubleArray(int length,
 #ifndef V8_HOST_ARCH_64_BIT
   size += kPointerSize;
 #endif
-  AllocationSpace space = SelectSpace(size, OLD_DATA_SPACE, pretenure);
+  AllocationSpace space = SelectSpace(size, pretenure);
 
   HeapObject* object;
   {
-    AllocationResult allocation = AllocateRaw(size, space, OLD_DATA_SPACE);
+    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
     if (!allocation.To(&object)) return allocation;
   }
 
@@ -4428,11 +4392,11 @@ AllocationResult Heap::AllocateConstantPoolArray(
 #ifndef V8_HOST_ARCH_64_BIT
   size += kPointerSize;
 #endif
-  AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED);
+  AllocationSpace space = SelectSpace(size, TENURED);
 
   HeapObject* object;
   {
-    AllocationResult allocation = AllocateRaw(size, space, OLD_POINTER_SPACE);
+    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
     if (!allocation.To(&object)) return allocation;
   }
   object = EnsureDoubleAligned(this, object, size);
@@ -4454,11 +4418,11 @@ AllocationResult Heap::AllocateExtendedConstantPoolArray(
 #ifndef V8_HOST_ARCH_64_BIT
   size += kPointerSize;
 #endif
-  AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED);
+  AllocationSpace space = SelectSpace(size, TENURED);
 
   HeapObject* object;
   {
-    AllocationResult allocation = AllocateRaw(size, space, OLD_POINTER_SPACE);
+    AllocationResult allocation = AllocateRaw(size, space, OLD_SPACE);
     if (!allocation.To(&object)) return allocation;
   }
   object = EnsureDoubleAligned(this, object, size);
@@ -4476,8 +4440,7 @@ AllocationResult Heap::AllocateEmptyConstantPoolArray() {
   int size = ConstantPoolArray::SizeFor(small);
   HeapObject* result = NULL;
   {
-    AllocationResult allocation =
-        AllocateRaw(size, OLD_DATA_SPACE, OLD_DATA_SPACE);
+    AllocationResult allocation = AllocateRaw(size, OLD_SPACE, OLD_SPACE);
     if (!allocation.To(&result)) return allocation;
   }
   result->set_map_no_write_barrier(constant_pool_array_map());
@@ -4492,7 +4455,7 @@ AllocationResult Heap::AllocateSymbol() {
 
   HeapObject* result = NULL;
   AllocationResult allocation =
-      AllocateRaw(Symbol::kSize, OLD_POINTER_SPACE, OLD_POINTER_SPACE);
+      AllocateRaw(Symbol::kSize, OLD_SPACE, OLD_SPACE);
   if (!allocation.To(&result)) return allocation;
 
   result->set_map_no_write_barrier(symbol_map());
@@ -4530,7 +4493,7 @@ AllocationResult Heap::AllocateStruct(InstanceType type) {
       return exception();
   }
   int size = map->instance_size();
-  AllocationSpace space = SelectSpace(size, OLD_POINTER_SPACE, TENURED);
+  AllocationSpace space = SelectSpace(size, TENURED);
   Struct* result;
   {
     AllocationResult allocation = Allocate(map, space);
@@ -4794,10 +4757,8 @@ void Heap::ReportHeapStatistics(const char* title) {
   isolate_->memory_allocator()->ReportStatistics();
   PrintF("To space : ");
   new_space_.ReportStatistics();
-  PrintF("Old pointer space : ");
-  old_pointer_space_->ReportStatistics();
-  PrintF("Old data space : ");
-  old_data_space_->ReportStatistics();
+  PrintF("Old space : ");
+  old_space_->ReportStatistics();
   PrintF("Code space : ");
   code_space_->ReportStatistics();
   PrintF("Map space : ");
@@ -4819,11 +4780,9 @@ bool Heap::Contains(HeapObject* value) { return Contains(value->address()); }
 bool Heap::Contains(Address addr) {
   if (isolate_->memory_allocator()->IsOutsideAllocatedSpace(addr)) return false;
   return HasBeenSetUp() &&
-         (new_space_.ToSpaceContains(addr) ||
-          old_pointer_space_->Contains(addr) ||
-          old_data_space_->Contains(addr) || code_space_->Contains(addr) ||
-          map_space_->Contains(addr) || cell_space_->Contains(addr) ||
-          property_cell_space_->Contains(addr) ||
+         (new_space_.ToSpaceContains(addr) || old_space_->Contains(addr) ||
+          code_space_->Contains(addr) || map_space_->Contains(addr) ||
+          cell_space_->Contains(addr) || property_cell_space_->Contains(addr) ||
           lo_space_->SlowContains(addr));
 }
 
@@ -4840,10 +4799,8 @@ bool Heap::InSpace(Address addr, AllocationSpace space) {
   switch (space) {
     case NEW_SPACE:
       return new_space_.ToSpaceContains(addr);
-    case OLD_POINTER_SPACE:
-      return old_pointer_space_->Contains(addr);
-    case OLD_DATA_SPACE:
-      return old_data_space_->Contains(addr);
+    case OLD_SPACE:
+      return old_space_->Contains(addr);
     case CODE_SPACE:
       return code_space_->Contains(addr);
     case MAP_SPACE:
@@ -4893,11 +4850,10 @@ void Heap::Verify() {
 
   new_space_.Verify();
 
-  old_pointer_space_->Verify(&visitor);
+  old_space_->Verify(&visitor);
   map_space_->Verify(&visitor);
 
   VerifyPointersVisitor no_dirty_regions_visitor;
-  old_data_space_->Verify(&no_dirty_regions_visitor);
   code_space_->Verify(&no_dirty_regions_visitor);
   cell_space_->Verify(&no_dirty_regions_visitor);
   property_cell_space_->Verify(&no_dirty_regions_visitor);
@@ -5215,10 +5171,8 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
   *stats->end_marker = HeapStats::kEndMarker;
   *stats->new_space_size = new_space_.SizeAsInt();
   *stats->new_space_capacity = static_cast<int>(new_space_.Capacity());
-  *stats->old_pointer_space_size = old_pointer_space_->SizeOfObjects();
-  *stats->old_pointer_space_capacity = old_pointer_space_->Capacity();
-  *stats->old_data_space_size = old_data_space_->SizeOfObjects();
-  *stats->old_data_space_capacity = old_data_space_->Capacity();
+  *stats->old_space_size = old_space_->SizeOfObjects();
+  *stats->old_space_capacity = old_space_->Capacity();
   *stats->code_space_size = code_space_->SizeOfObjects();
   *stats->code_space_capacity = code_space_->Capacity();
   *stats->map_space_size = map_space_->SizeOfObjects();
@@ -5249,8 +5203,7 @@ void Heap::RecordStats(HeapStats* stats, bool take_snapshot) {
 
 
 intptr_t Heap::PromotedSpaceSizeOfObjects() {
-  return old_pointer_space_->SizeOfObjects() +
-         old_data_space_->SizeOfObjects() + code_space_->SizeOfObjects() +
+  return old_space_->SizeOfObjects() + code_space_->SizeOfObjects() +
          map_space_->SizeOfObjects() + cell_space_->SizeOfObjects() +
          property_cell_space_->SizeOfObjects() + lo_space_->SizeOfObjects();
 }
@@ -5374,17 +5327,11 @@ bool Heap::SetUp() {
   }
   new_space_top_after_last_gc_ = new_space()->top();
 
-  // Initialize old pointer space.
-  old_pointer_space_ = new OldSpace(this, max_old_generation_size_,
-                                    OLD_POINTER_SPACE, NOT_EXECUTABLE);
-  if (old_pointer_space_ == NULL) return false;
-  if (!old_pointer_space_->SetUp()) return false;
-
-  // Initialize old data space.
-  old_data_space_ = new OldSpace(this, max_old_generation_size_, OLD_DATA_SPACE,
-                                 NOT_EXECUTABLE);
-  if (old_data_space_ == NULL) return false;
-  if (!old_data_space_->SetUp()) return false;
+  // Initialize old space.
+  old_space_ =
+      new OldSpace(this, max_old_generation_size_, OLD_SPACE, NOT_EXECUTABLE);
+  if (old_space_ == NULL) return false;
+  if (!old_space_->SetUp()) return false;
 
   if (!isolate_->code_range()->SetUp(code_range_size_)) return false;
 
@@ -5513,12 +5460,8 @@ void Heap::TearDown() {
            MaximumCommittedMemory());
     PrintF("maximum_committed_by_new_space=%" V8_PTR_PREFIX "d ",
            new_space_.MaximumCommittedMemory());
-    PrintF("maximum_committed_by_old_pointer_space=%" V8_PTR_PREFIX "d ",
-           old_data_space_->MaximumCommittedMemory());
-    PrintF("maximum_committed_by_old_data_space=%" V8_PTR_PREFIX "d ",
-           old_pointer_space_->MaximumCommittedMemory());
-    PrintF("maximum_committed_by_old_data_space=%" V8_PTR_PREFIX "d ",
-           old_pointer_space_->MaximumCommittedMemory());
+    PrintF("maximum_committed_by_old_space=%" V8_PTR_PREFIX "d ",
+           old_space_->MaximumCommittedMemory());
     PrintF("maximum_committed_by_code_space=%" V8_PTR_PREFIX "d ",
            code_space_->MaximumCommittedMemory());
     PrintF("maximum_committed_by_map_space=%" V8_PTR_PREFIX "d ",
@@ -5546,16 +5489,10 @@ void Heap::TearDown() {
 
   new_space_.TearDown();
 
-  if (old_pointer_space_ != NULL) {
-    old_pointer_space_->TearDown();
-    delete old_pointer_space_;
-    old_pointer_space_ = NULL;
-  }
-
-  if (old_data_space_ != NULL) {
-    old_data_space_->TearDown();
-    delete old_data_space_;
-    old_data_space_ = NULL;
+  if (old_space_ != NULL) {
+    old_space_->TearDown();
+    delete old_space_;
+    old_space_ = NULL;
   }
 
   if (code_space_ != NULL) {
@@ -5697,10 +5634,8 @@ Space* AllSpaces::next() {
   switch (counter_++) {
     case NEW_SPACE:
       return heap_->new_space();
-    case OLD_POINTER_SPACE:
-      return heap_->old_pointer_space();
-    case OLD_DATA_SPACE:
-      return heap_->old_data_space();
+    case OLD_SPACE:
+      return heap_->old_space();
     case CODE_SPACE:
       return heap_->code_space();
     case MAP_SPACE:
@@ -5719,10 +5654,8 @@ Space* AllSpaces::next() {
 
 PagedSpace* PagedSpaces::next() {
   switch (counter_++) {
-    case OLD_POINTER_SPACE:
-      return heap_->old_pointer_space();
-    case OLD_DATA_SPACE:
-      return heap_->old_data_space();
+    case OLD_SPACE:
+      return heap_->old_space();
     case CODE_SPACE:
       return heap_->code_space();
     case MAP_SPACE:
@@ -5739,10 +5672,8 @@ PagedSpace* PagedSpaces::next() {
 
 OldSpace* OldSpaces::next() {
   switch (counter_++) {
-    case OLD_POINTER_SPACE:
-      return heap_->old_pointer_space();
-    case OLD_DATA_SPACE:
-      return heap_->old_data_space();
+    case OLD_SPACE:
+      return heap_->old_space();
     case CODE_SPACE:
       return heap_->code_space();
     default:
@@ -5801,12 +5732,8 @@ ObjectIterator* SpaceIterator::CreateIterator() {
     case NEW_SPACE:
       iterator_ = new SemiSpaceIterator(heap_->new_space(), size_func_);
       break;
-    case OLD_POINTER_SPACE:
-      iterator_ =
-          new HeapObjectIterator(heap_->old_pointer_space(), size_func_);
-      break;
-    case OLD_DATA_SPACE:
-      iterator_ = new HeapObjectIterator(heap_->old_data_space(), size_func_);
+    case OLD_SPACE:
+      iterator_ = new HeapObjectIterator(heap_->old_space(), size_func_);
       break;
     case CODE_SPACE:
       iterator_ = new HeapObjectIterator(heap_->code_space(), size_func_);
index 7a17abb..ee4dd0f 100644 (file)
@@ -641,8 +641,7 @@ class Heap {
   Address NewSpaceTop() { return new_space_.top(); }
 
   NewSpace* new_space() { return &new_space_; }
-  OldSpace* old_pointer_space() { return old_pointer_space_; }
-  OldSpace* old_data_space() { return old_data_space_; }
+  OldSpace* old_space() { return old_space_; }
   OldSpace* code_space() { return code_space_; }
   MapSpace* map_space() { return map_space_; }
   CellSpace* cell_space() { return cell_space_; }
@@ -650,10 +649,8 @@ class Heap {
   LargeObjectSpace* lo_space() { return lo_space_; }
   PagedSpace* paged_space(int idx) {
     switch (idx) {
-      case OLD_POINTER_SPACE:
-        return old_pointer_space();
-      case OLD_DATA_SPACE:
-        return old_data_space();
+      case OLD_SPACE:
+        return old_space();
       case MAP_SPACE:
         return map_space();
       case CELL_SPACE:
@@ -681,18 +678,11 @@ class Heap {
     return new_space_.allocation_limit_address();
   }
 
-  Address* OldPointerSpaceAllocationTopAddress() {
-    return old_pointer_space_->allocation_top_address();
+  Address* OldSpaceAllocationTopAddress() {
+    return old_space_->allocation_top_address();
   }
-  Address* OldPointerSpaceAllocationLimitAddress() {
-    return old_pointer_space_->allocation_limit_address();
-  }
-
-  Address* OldDataSpaceAllocationTopAddress() {
-    return old_data_space_->allocation_top_address();
-  }
-  Address* OldDataSpaceAllocationLimitAddress() {
-    return old_data_space_->allocation_limit_address();
+  Address* OldSpaceAllocationLimitAddress() {
+    return old_space_->allocation_limit_address();
   }
 
   // TODO(hpayer): There is still a missmatch between capacity and actual
@@ -922,13 +912,9 @@ class Heap {
   inline bool InFromSpace(Object* object);
   inline bool InToSpace(Object* object);
 
-  // Returns whether the object resides in old pointer space.
-  inline bool InOldPointerSpace(Address address);
-  inline bool InOldPointerSpace(Object* object);
-
-  // Returns whether the object resides in old data space.
-  inline bool InOldDataSpace(Address address);
-  inline bool InOldDataSpace(Object* object);
+  // Returns whether the object resides in old space.
+  inline bool InOldSpace(Address address);
+  inline bool InOldSpace(Object* object);
 
   // Checks whether an address/object in the heap (including auxiliary
   // area and unused area).
@@ -940,10 +926,6 @@ class Heap {
   bool InSpace(Address addr, AllocationSpace space);
   bool InSpace(HeapObject* value, AllocationSpace space);
 
-  // Finds out which space an object should get promoted to based on its type.
-  inline OldSpace* TargetSpace(HeapObject* object);
-  static inline AllocationSpace TargetSpaceId(InstanceType type);
-
   // Checks whether the given object is allowed to be migrated from it's
   // current space into the given destination space. Used for debugging.
   inline bool AllowedToBeMigrated(HeapObject* object, AllocationSpace dest);
@@ -1580,8 +1562,7 @@ class Heap {
   int scan_on_scavenge_pages_;
 
   NewSpace new_space_;
-  OldSpace* old_pointer_space_;
-  OldSpace* old_data_space_;
+  OldSpace* old_space_;
   OldSpace* code_space_;
   MapSpace* map_space_;
   CellSpace* cell_space_;
@@ -1777,14 +1758,11 @@ class Heap {
   inline void UpdateOldSpaceLimits();
 
   // Selects the proper allocation space depending on the given object
-  // size, pretenuring decision, and preferred old-space.
+  // size and pretenuring decision.
   static AllocationSpace SelectSpace(int object_size,
-                                     AllocationSpace preferred_old_space,
                                      PretenureFlag pretenure) {
-    DCHECK(preferred_old_space == OLD_POINTER_SPACE ||
-           preferred_old_space == OLD_DATA_SPACE);
     if (object_size > Page::kMaxRegularHeapObjectSize) return LO_SPACE;
-    return (pretenure == TENURED) ? preferred_old_space : NEW_SPACE;
+    return (pretenure == TENURED) ? OLD_SPACE : NEW_SPACE;
   }
 
   HeapObject* DoubleAlignForDeserialization(HeapObject* object, int size);
@@ -2187,30 +2165,28 @@ class HeapStats {
   int* start_marker;                       //  0
   int* new_space_size;                     //  1
   int* new_space_capacity;                 //  2
-  intptr_t* old_pointer_space_size;        //  3
-  intptr_t* old_pointer_space_capacity;    //  4
-  intptr_t* old_data_space_size;           //  5
-  intptr_t* old_data_space_capacity;       //  6
-  intptr_t* code_space_size;               //  7
-  intptr_t* code_space_capacity;           //  8
-  intptr_t* map_space_size;                //  9
-  intptr_t* map_space_capacity;            // 10
-  intptr_t* cell_space_size;               // 11
-  intptr_t* cell_space_capacity;           // 12
-  intptr_t* lo_space_size;                 // 13
-  int* global_handle_count;                // 14
-  int* weak_global_handle_count;           // 15
-  int* pending_global_handle_count;        // 16
-  int* near_death_global_handle_count;     // 17
-  int* free_global_handle_count;           // 18
-  intptr_t* memory_allocator_size;         // 19
-  intptr_t* memory_allocator_capacity;     // 20
-  int* objects_per_type;                   // 21
-  int* size_per_type;                      // 22
-  int* os_error;                           // 23
-  int* end_marker;                         // 24
-  intptr_t* property_cell_space_size;      // 25
-  intptr_t* property_cell_space_capacity;  // 26
+  intptr_t* old_space_size;                //  3
+  intptr_t* old_space_capacity;            //  4
+  intptr_t* code_space_size;               //  5
+  intptr_t* code_space_capacity;           //  6
+  intptr_t* map_space_size;                //  7
+  intptr_t* map_space_capacity;            //  8
+  intptr_t* cell_space_size;               //  9
+  intptr_t* cell_space_capacity;           // 10
+  intptr_t* lo_space_size;                 // 11
+  int* global_handle_count;                // 12
+  int* weak_global_handle_count;           // 13
+  int* pending_global_handle_count;        // 14
+  int* near_death_global_handle_count;     // 15
+  int* free_global_handle_count;           // 16
+  intptr_t* memory_allocator_size;         // 17
+  intptr_t* memory_allocator_capacity;     // 18
+  int* objects_per_type;                   // 19
+  int* size_per_type;                      // 20
+  int* os_error;                           // 21
+  int* end_marker;                         // 22
+  intptr_t* property_cell_space_size;      // 23
+  intptr_t* property_cell_space_capacity;  // 24
 };
 
 
@@ -2269,12 +2245,11 @@ class AllSpaces BASE_EMBEDDED {
 };
 
 
-// Space iterator for iterating over all old spaces of the heap: Old pointer
-// space, old data space and code space.  Returns each space in turn, and null
-// when it is done.
+// Space iterator for iterating over all old spaces of the heap: Old space
+// and code space.  Returns each space in turn, and null when it is done.
 class OldSpaces BASE_EMBEDDED {
  public:
-  explicit OldSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {}
+  explicit OldSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
   OldSpace* next();
 
  private:
@@ -2284,11 +2259,11 @@ class OldSpaces BASE_EMBEDDED {
 
 
 // Space iterator for iterating over all the paged spaces of the heap: Map
-// space, old pointer space, old data space, code space and cell space.  Returns
+// space, old space, code space and cell space.  Returns
 // each space in turn, and null when it is done.
 class PagedSpaces BASE_EMBEDDED {
  public:
-  explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_POINTER_SPACE) {}
+  explicit PagedSpaces(Heap* heap) : heap_(heap), counter_(OLD_SPACE) {}
   PagedSpace* next();
 
  private:
index 648d4c2..56cd688 100644 (file)
@@ -135,16 +135,6 @@ static void MarkObjectGreyDoNotEnqueue(Object* obj) {
 }
 
 
-static inline void MarkBlackOrKeepGrey(HeapObject* heap_object,
-                                       MarkBit mark_bit, int size) {
-  DCHECK(!Marking::IsImpossible(mark_bit));
-  if (mark_bit.Get()) return;
-  mark_bit.Set();
-  MemoryChunk::IncrementLiveBytesFromGC(heap_object->address(), size);
-  DCHECK(Marking::IsBlack(mark_bit));
-}
-
-
 static inline void MarkBlackOrKeepBlack(HeapObject* heap_object,
                                         MarkBit mark_bit, int size) {
   DCHECK(!Marking::IsImpossible(mark_bit));
@@ -357,8 +347,7 @@ void IncrementalMarking::DeactivateIncrementalWriteBarrierForSpace(
 
 
 void IncrementalMarking::DeactivateIncrementalWriteBarrier() {
-  DeactivateIncrementalWriteBarrierForSpace(heap_->old_pointer_space());
-  DeactivateIncrementalWriteBarrierForSpace(heap_->old_data_space());
+  DeactivateIncrementalWriteBarrierForSpace(heap_->old_space());
   DeactivateIncrementalWriteBarrierForSpace(heap_->cell_space());
   DeactivateIncrementalWriteBarrierForSpace(heap_->property_cell_space());
   DeactivateIncrementalWriteBarrierForSpace(heap_->map_space());
@@ -392,8 +381,7 @@ void IncrementalMarking::ActivateIncrementalWriteBarrier(NewSpace* space) {
 
 
 void IncrementalMarking::ActivateIncrementalWriteBarrier() {
-  ActivateIncrementalWriteBarrier(heap_->old_pointer_space());
-  ActivateIncrementalWriteBarrier(heap_->old_data_space());
+  ActivateIncrementalWriteBarrier(heap_->old_space());
   ActivateIncrementalWriteBarrier(heap_->cell_space());
   ActivateIncrementalWriteBarrier(heap_->property_cell_space());
   ActivateIncrementalWriteBarrier(heap_->map_space());
@@ -657,9 +645,7 @@ void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
 
 void IncrementalMarking::MarkObject(Heap* heap, HeapObject* obj) {
   MarkBit mark_bit = Marking::MarkBitFrom(obj);
-  if (mark_bit.data_only()) {
-    MarkBlackOrKeepGrey(obj, mark_bit, obj->Size());
-  } else if (Marking::IsWhite(mark_bit)) {
+  if (Marking::IsWhite(mark_bit)) {
     heap->incremental_marking()->WhiteToGreyAndPush(obj, mark_bit);
   }
 }
index 66b0a59..757b6c3 100644 (file)
@@ -15,8 +15,7 @@ namespace internal {
 
 MarkBit Marking::MarkBitFrom(Address addr) {
   MemoryChunk* p = MemoryChunk::FromAddress(addr);
-  return p->markbits()->MarkBitFromIndex(p->AddressToMarkbitIndex(addr),
-                                         p->ContainsOnlyData());
+  return p->markbits()->MarkBitFromIndex(p->AddressToMarkbitIndex(addr));
 }
 
 
index c48a954..0156935 100644 (file)
@@ -135,8 +135,7 @@ static void VerifyMarking(PagedSpace* space) {
 
 
 static void VerifyMarking(Heap* heap) {
-  VerifyMarking(heap->old_pointer_space());
-  VerifyMarking(heap->old_data_space());
+  VerifyMarking(heap->old_space());
   VerifyMarking(heap->code_space());
   VerifyMarking(heap->cell_space());
   VerifyMarking(heap->property_cell_space());
@@ -201,8 +200,7 @@ static void VerifyEvacuation(NewSpace* space) {
 
 
 static void VerifyEvacuation(Heap* heap, PagedSpace* space) {
-  if (FLAG_use_allocation_folding &&
-      (space == heap->old_pointer_space() || space == heap->old_data_space())) {
+  if (FLAG_use_allocation_folding && (space == heap->old_space())) {
     return;
   }
   PageIterator it(space);
@@ -216,8 +214,7 @@ static void VerifyEvacuation(Heap* heap, PagedSpace* space) {
 
 
 static void VerifyEvacuation(Heap* heap) {
-  VerifyEvacuation(heap, heap->old_pointer_space());
-  VerifyEvacuation(heap, heap->old_data_space());
+  VerifyEvacuation(heap, heap->old_space());
   VerifyEvacuation(heap, heap->code_space());
   VerifyEvacuation(heap, heap->cell_space());
   VerifyEvacuation(heap, heap->property_cell_space());
@@ -231,8 +228,7 @@ static void VerifyEvacuation(Heap* heap) {
 
 
 void MarkCompactCollector::SetUp() {
-  free_list_old_data_space_.Reset(new FreeList(heap_->old_data_space()));
-  free_list_old_pointer_space_.Reset(new FreeList(heap_->old_pointer_space()));
+  free_list_old_space_.Reset(new FreeList(heap_->old_space()));
 }
 
 
@@ -263,8 +259,7 @@ bool MarkCompactCollector::StartCompaction(CompactionMode mode) {
   if (!compacting_) {
     DCHECK(evacuation_candidates_.length() == 0);
 
-    CollectEvacuationCandidates(heap()->old_pointer_space());
-    CollectEvacuationCandidates(heap()->old_data_space());
+    CollectEvacuationCandidates(heap()->old_space());
 
     if (FLAG_compact_code_space && (mode == NON_INCREMENTAL_COMPACTION ||
                                     FLAG_incremental_code_compaction)) {
@@ -279,8 +274,7 @@ bool MarkCompactCollector::StartCompaction(CompactionMode mode) {
       TraceFragmentation(heap()->property_cell_space());
     }
 
-    heap()->old_pointer_space()->EvictEvacuationCandidatesFromFreeLists();
-    heap()->old_data_space()->EvictEvacuationCandidatesFromFreeLists();
+    heap()->old_space()->EvictEvacuationCandidatesFromFreeLists();
     heap()->code_space()->EvictEvacuationCandidatesFromFreeLists();
 
     compacting_ = evacuation_candidates_.length() > 0;
@@ -367,8 +361,7 @@ void MarkCompactCollector::VerifyMarkbitsAreClean(NewSpace* space) {
 
 
 void MarkCompactCollector::VerifyMarkbitsAreClean() {
-  VerifyMarkbitsAreClean(heap_->old_pointer_space());
-  VerifyMarkbitsAreClean(heap_->old_data_space());
+  VerifyMarkbitsAreClean(heap_->old_space());
   VerifyMarkbitsAreClean(heap_->code_space());
   VerifyMarkbitsAreClean(heap_->cell_space());
   VerifyMarkbitsAreClean(heap_->property_cell_space());
@@ -427,8 +420,7 @@ static void ClearMarkbitsInNewSpace(NewSpace* space) {
 void MarkCompactCollector::ClearMarkbits() {
   ClearMarkbitsInPagedSpace(heap_->code_space());
   ClearMarkbitsInPagedSpace(heap_->map_space());
-  ClearMarkbitsInPagedSpace(heap_->old_pointer_space());
-  ClearMarkbitsInPagedSpace(heap_->old_data_space());
+  ClearMarkbitsInPagedSpace(heap_->old_space());
   ClearMarkbitsInPagedSpace(heap_->cell_space());
   ClearMarkbitsInPagedSpace(heap_->property_cell_space());
   ClearMarkbitsInNewSpace(heap_->new_space());
@@ -465,13 +457,9 @@ class MarkCompactCollector::SweeperTask : public v8::Task {
 
 
 void MarkCompactCollector::StartSweeperThreads() {
-  DCHECK(free_list_old_pointer_space_.get()->IsEmpty());
-  DCHECK(free_list_old_data_space_.get()->IsEmpty());
+  DCHECK(free_list_old_space_.get()->IsEmpty());
   V8::GetCurrentPlatform()->CallOnBackgroundThread(
-      new SweeperTask(heap(), heap()->old_data_space()),
-      v8::Platform::kShortRunningTask);
-  V8::GetCurrentPlatform()->CallOnBackgroundThread(
-      new SweeperTask(heap(), heap()->old_pointer_space()),
+      new SweeperTask(heap(), heap()->old_space()),
       v8::Platform::kShortRunningTask);
 }
 
@@ -482,20 +470,16 @@ void MarkCompactCollector::EnsureSweepingCompleted() {
   // If sweeping is not completed or not running at all, we try to complete it
   // here.
   if (!heap()->concurrent_sweeping_enabled() || !IsSweepingCompleted()) {
-    SweepInParallel(heap()->paged_space(OLD_DATA_SPACE), 0);
-    SweepInParallel(heap()->paged_space(OLD_POINTER_SPACE), 0);
+    SweepInParallel(heap()->paged_space(OLD_SPACE), 0);
   }
   // Wait twice for both jobs.
   if (heap()->concurrent_sweeping_enabled()) {
     pending_sweeper_jobs_semaphore_.Wait();
-    pending_sweeper_jobs_semaphore_.Wait();
   }
   ParallelSweepSpacesComplete();
   sweeping_in_progress_ = false;
-  RefillFreeList(heap()->paged_space(OLD_DATA_SPACE));
-  RefillFreeList(heap()->paged_space(OLD_POINTER_SPACE));
-  heap()->paged_space(OLD_DATA_SPACE)->ResetUnsweptFreeBytes();
-  heap()->paged_space(OLD_POINTER_SPACE)->ResetUnsweptFreeBytes();
+  RefillFreeList(heap()->paged_space(OLD_SPACE));
+  heap()->paged_space(OLD_SPACE)->ResetUnsweptFreeBytes();
 
 #ifdef VERIFY_HEAP
   if (FLAG_verify_heap && !evacuation()) {
@@ -518,13 +502,11 @@ bool MarkCompactCollector::IsSweepingCompleted() {
 void MarkCompactCollector::RefillFreeList(PagedSpace* space) {
   FreeList* free_list;
 
-  if (space == heap()->old_pointer_space()) {
-    free_list = free_list_old_pointer_space_.get();
-  } else if (space == heap()->old_data_space()) {
-    free_list = free_list_old_data_space_.get();
+  if (space == heap()->old_space()) {
+    free_list = free_list_old_space_.get();
   } else {
     // Any PagedSpace might invoke RefillFreeLists, so we need to make sure
-    // to only refill them for old data and pointer spaces.
+    // to only refill them for the old space.
     return;
   }
 
@@ -578,10 +560,8 @@ const char* AllocationSpaceName(AllocationSpace space) {
   switch (space) {
     case NEW_SPACE:
       return "NEW_SPACE";
-    case OLD_POINTER_SPACE:
-      return "OLD_POINTER_SPACE";
-    case OLD_DATA_SPACE:
-      return "OLD_DATA_SPACE";
+    case OLD_SPACE:
+      return "OLD_SPACE";
     case CODE_SPACE:
       return "CODE_SPACE";
     case MAP_SPACE:
@@ -652,9 +632,7 @@ static int FreeListFragmentation(PagedSpace* space, Page* p) {
 
 
 void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
-  DCHECK(space->identity() == OLD_POINTER_SPACE ||
-         space->identity() == OLD_DATA_SPACE ||
-         space->identity() == CODE_SPACE);
+  DCHECK(space->identity() == OLD_SPACE || space->identity() == CODE_SPACE);
 
   static const int kMaxMaxEvacuationCandidates = 1000;
   int number_of_pages = space->CountTotalPages();
@@ -1820,7 +1798,7 @@ static void DiscoverGreyObjectsOnPage(MarkingDeque* marking_deque,
       int trailing_zeros = base::bits::CountTrailingZeros32(grey_objects);
       grey_objects >>= trailing_zeros;
       offset += trailing_zeros;
-      MarkBit markbit(cell, 1 << offset, false);
+      MarkBit markbit(cell, 1 << offset);
       DCHECK(Marking::IsGrey(markbit));
       Marking::GreyToBlack(markbit);
       Address addr = cell_base + offset * kPointerSize;
@@ -2044,11 +2022,7 @@ void MarkCompactCollector::RefillMarkingDeque() {
   DiscoverGreyObjectsInNewSpace(heap(), &marking_deque_);
   if (marking_deque_.IsFull()) return;
 
-  DiscoverGreyObjectsInSpace(heap(), &marking_deque_,
-                             heap()->old_pointer_space());
-  if (marking_deque_.IsFull()) return;
-
-  DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->old_data_space());
+  DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->old_space());
   if (marking_deque_.IsFull()) return;
 
   DiscoverGreyObjectsInSpace(heap(), &marking_deque_, heap()->code_space());
@@ -2733,7 +2707,7 @@ void MarkCompactCollector::MigrateObject(HeapObject* dst, HeapObject* src,
   Address src_addr = src->address();
   DCHECK(heap()->AllowedToBeMigrated(src, dest));
   DCHECK(dest != LO_SPACE && size <= Page::kMaxRegularHeapObjectSize);
-  if (dest == OLD_POINTER_SPACE) {
+  if (dest == OLD_SPACE) {
     Address src_slot = src_addr;
     Address dst_slot = dst_addr;
     DCHECK(IsAligned(size, kPointerSize));
@@ -2805,7 +2779,7 @@ void MarkCompactCollector::MigrateObject(HeapObject* dst, HeapObject* src,
                        SlotsBuffer::IGNORE_OVERFLOW);
     Code::cast(dst)->Relocate(dst_addr - src_addr);
   } else {
-    DCHECK(dest == OLD_DATA_SPACE || dest == NEW_SPACE);
+    DCHECK(dest == NEW_SPACE);
     heap()->MoveBlock(dst_addr, src_addr, size);
   }
   heap()->OnMoveEvent(dst, src, size);
@@ -2927,21 +2901,19 @@ void PointersUpdatingVisitor::CheckLayoutDescriptorAndDie(Heap* heap,
     space_owner_id = 1;
   } else if (heap->new_space()->FromSpaceContains(slot_address)) {
     space_owner_id = 2;
-  } else if (heap->old_pointer_space()->ContainsSafe(slot_address)) {
+  } else if (heap->old_space()->ContainsSafe(slot_address)) {
     space_owner_id = 3;
-  } else if (heap->old_data_space()->ContainsSafe(slot_address)) {
-    space_owner_id = 4;
   } else if (heap->code_space()->ContainsSafe(slot_address)) {
-    space_owner_id = 5;
+    space_owner_id = 4;
   } else if (heap->map_space()->ContainsSafe(slot_address)) {
-    space_owner_id = 6;
+    space_owner_id = 5;
   } else if (heap->cell_space()->ContainsSafe(slot_address)) {
-    space_owner_id = 7;
+    space_owner_id = 6;
   } else if (heap->property_cell_space()->ContainsSafe(slot_address)) {
-    space_owner_id = 8;
+    space_owner_id = 7;
   } else {
     // Lo space or other.
-    space_owner_id = 9;
+    space_owner_id = 8;
   }
   data[index++] = space_owner_id;
   data[index++] = 0x20aaaaaaaaUL;
@@ -3032,14 +3004,12 @@ bool MarkCompactCollector::TryPromoteObject(HeapObject* object,
                                             int object_size) {
   DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
 
-  OldSpace* target_space = heap()->TargetSpace(object);
+  OldSpace* old_space = heap()->old_space();
 
-  DCHECK(target_space == heap()->old_pointer_space() ||
-         target_space == heap()->old_data_space());
   HeapObject* target;
-  AllocationResult allocation = target_space->AllocateRaw(object_size);
+  AllocationResult allocation = old_space->AllocateRaw(object_size);
   if (allocation.To(&target)) {
-    MigrateObject(target, object, object_size, target_space->identity());
+    MigrateObject(target, object, object_size, old_space->identity());
     heap()->IncrementPromotedObjectsSize(object_size);
     return true;
   }
@@ -3525,9 +3495,8 @@ static bool IsOnInvalidatedCodeObject(Address addr) {
   // we can safely go to the page from the slot address.
   Page* p = Page::FromAddress(addr);
 
-  // First check owner's identity because old pointer and old data spaces
-  // are swept lazily and might still have non-zero mark-bits on some
-  // pages.
+  // First check owner's identity because old space is swept concurrently or
+  // lazily and might still have non-zero mark-bits on some pages.
   if (p->owner()->identity() != CODE_SPACE) return false;
 
   // In code space only bits on evacuation candidates (but we don't record
@@ -3705,12 +3674,7 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() {
         p->ClearFlag(MemoryChunk::RESCAN_ON_EVACUATION);
 
         switch (space->identity()) {
-          case OLD_DATA_SPACE:
-            Sweep<SWEEP_AND_VISIT_LIVE_OBJECTS, SWEEP_ON_MAIN_THREAD,
-                  IGNORE_SKIP_LIST, IGNORE_FREE_SPACE>(space, NULL, p,
-                                                       &updating_visitor);
-            break;
-          case OLD_POINTER_SPACE:
+          case OLD_SPACE:
             Sweep<SWEEP_AND_VISIT_LIVE_OBJECTS, SWEEP_ON_MAIN_THREAD,
                   IGNORE_SKIP_LIST, IGNORE_FREE_SPACE>(space, NULL, p,
                                                        &updating_visitor);
@@ -4230,9 +4194,7 @@ int MarkCompactCollector::SweepInParallel(PagedSpace* space,
 int MarkCompactCollector::SweepInParallel(Page* page, PagedSpace* space) {
   int max_freed = 0;
   if (page->TryParallelSweeping()) {
-    FreeList* free_list = space == heap()->old_pointer_space()
-                              ? free_list_old_pointer_space_.get()
-                              : free_list_old_data_space_.get();
+    FreeList* free_list = free_list_old_space_.get();
     FreeList private_free_list(space);
     max_freed = Sweep<SWEEP_ONLY, SWEEP_IN_PARALLEL, IGNORE_SKIP_LIST,
                       IGNORE_FREE_SPACE>(space, &private_free_list, page, NULL);
@@ -4357,10 +4319,7 @@ void MarkCompactCollector::SweepSpaces() {
   {
     GCTracer::Scope sweep_scope(heap()->tracer(),
                                 GCTracer::Scope::MC_SWEEP_OLDSPACE);
-    {
-      SweepSpace(heap()->old_pointer_space(), CONCURRENT_SWEEPING);
-      SweepSpace(heap()->old_data_space(), CONCURRENT_SWEEPING);
-    }
+    { SweepSpace(heap()->old_space(), CONCURRENT_SWEEPING); }
     sweeping_in_progress_ = true;
     if (heap()->concurrent_sweeping_enabled()) {
       StartSweeperThreads();
@@ -4423,8 +4382,7 @@ void MarkCompactCollector::ParallelSweepSpaceComplete(PagedSpace* space) {
 
 
 void MarkCompactCollector::ParallelSweepSpacesComplete() {
-  ParallelSweepSpaceComplete(heap()->old_pointer_space());
-  ParallelSweepSpaceComplete(heap()->old_data_space());
+  ParallelSweepSpaceComplete(heap()->old_space());
 }
 
 
index ddb993f..3680dcf 100644 (file)
@@ -603,11 +603,7 @@ class MarkCompactCollector {
     // to other evacuation candidates thus we have to
     // rescan the page after evacuation to discover and update all
     // pointers to evacuated objects.
-    if (page->owner()->identity() == OLD_DATA_SPACE) {
-      evacuation_candidates_.RemoveElement(page);
-    } else {
-      page->SetFlag(Page::RESCAN_ON_EVACUATION);
-    }
+    page->SetFlag(Page::RESCAN_ON_EVACUATION);
   }
 
   void RecordRelocSlot(RelocInfo* rinfo, Object* target);
@@ -907,8 +903,7 @@ class MarkCompactCollector {
   List<Page*> evacuation_candidates_;
   List<Code*> invalidated_code_;
 
-  SmartPointer<FreeList> free_list_old_data_space_;
-  SmartPointer<FreeList> free_list_old_pointer_space_;
+  SmartPointer<FreeList> free_list_old_space_;
 
   friend class Heap;
 };
index cfa2325..7d73a70 100644 (file)
@@ -206,8 +206,8 @@ void MemoryChunk::UpdateHighWaterMark(Address mark) {
 
 
 PointerChunkIterator::PointerChunkIterator(Heap* heap)
-    : state_(kOldPointerState),
-      old_pointer_iterator_(heap->old_pointer_space()),
+    : state_(kOldSpaceState),
+      old_pointer_iterator_(heap->old_space()),
       map_iterator_(heap->map_space()),
       lo_iterator_(heap->lo_space()) {}
 
index 39e5100..3073287 100644 (file)
@@ -41,8 +41,7 @@ HeapObjectIterator::HeapObjectIterator(PagedSpace* space,
 HeapObjectIterator::HeapObjectIterator(Page* page,
                                        HeapObjectCallback size_func) {
   Space* owner = page->owner();
-  DCHECK(owner == page->heap()->old_pointer_space() ||
-         owner == page->heap()->old_data_space() ||
+  DCHECK(owner == page->heap()->old_space() ||
          owner == page->heap()->map_space() ||
          owner == page->heap()->cell_space() ||
          owner == page->heap()->property_cell_space() ||
@@ -511,10 +510,6 @@ MemoryChunk* MemoryChunk::Initialize(Heap* heap, Address base, size_t size,
     chunk->SetFlag(IS_EXECUTABLE);
   }
 
-  if (owner == heap->old_data_space()) {
-    chunk->SetFlag(CONTAINS_ONLY_DATA);
-  }
-
   return chunk;
 }
 
@@ -926,11 +921,8 @@ void MemoryChunk::IncrementLiveBytesFromMutator(Address address, int by) {
 
 STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::NEW_SPACE) ==
               ObjectSpace::kObjectSpaceNewSpace);
-STATIC_ASSERT(static_cast<ObjectSpace>(1
-                                       << AllocationSpace::OLD_POINTER_SPACE) ==
-              ObjectSpace::kObjectSpaceOldPointerSpace);
-STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::OLD_DATA_SPACE) ==
-              ObjectSpace::kObjectSpaceOldDataSpace);
+STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::OLD_SPACE) ==
+              ObjectSpace::kObjectSpaceOldSpace);
 STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::CODE_SPACE) ==
               ObjectSpace::kObjectSpaceCodeSpace);
 STATIC_ASSERT(static_cast<ObjectSpace>(1 << AllocationSpace::CELL_SPACE) ==
@@ -1121,11 +1113,7 @@ void PagedSpace::ReleasePage(Page* page) {
     page->Unlink();
   }
 
-  if (page->IsFlagSet(MemoryChunk::CONTAINS_ONLY_DATA)) {
-    heap()->isolate()->memory_allocator()->Free(page);
-  } else {
-    heap()->QueueMemoryChunkForFree(page);
-  }
+  heap()->QueueMemoryChunkForFree(page);
 
   DCHECK(Capacity() > 0);
   accounting_stats_.ShrinkSpace(AreaSize());
index 2eae029..70346bb 100644 (file)
@@ -43,11 +43,11 @@ class Isolate;
 //
 // During scavenges and mark-sweep collections we sometimes (after a store
 // buffer overflow) iterate intergenerational pointers without decoding heap
-// object maps so if the page belongs to old pointer space or large object
-// space it is essential to guarantee that the page does not contain any
+// object maps so if the page belongs to old space or large object space
+// it is essential to guarantee that the page does not contain any
 // garbage pointers to new space: every pointer aligned word which satisfies
 // the Heap::InNewSpace() predicate must be a pointer to a live heap object in
-// new space. Thus objects in old pointer and large object spaces should have a
+// new space. Thus objects in old space and large object spaces should have a
 // special layout (e.g. no bare integer fields). This requirement does not
 // apply to map space which is iterated in a special fashion. However we still
 // require pointer fields of dead maps to be cleaned.
@@ -102,8 +102,7 @@ class MarkBit {
  public:
   typedef uint32_t CellType;
 
-  inline MarkBit(CellType* cell, CellType mask, bool data_only)
-      : cell_(cell), mask_(mask), data_only_(data_only) {}
+  inline MarkBit(CellType* cell, CellType mask) : cell_(cell), mask_(mask) {}
 
   inline CellType* cell() { return cell_; }
   inline CellType mask() { return mask_; }
@@ -118,25 +117,19 @@ class MarkBit {
   inline bool Get() { return (*cell_ & mask_) != 0; }
   inline void Clear() { *cell_ &= ~mask_; }
 
-  inline bool data_only() { return data_only_; }
 
   inline MarkBit Next() {
     CellType new_mask = mask_ << 1;
     if (new_mask == 0) {
-      return MarkBit(cell_ + 1, 1, data_only_);
+      return MarkBit(cell_ + 1, 1);
     } else {
-      return MarkBit(cell_, new_mask, data_only_);
+      return MarkBit(cell_, new_mask);
     }
   }
 
  private:
   CellType* cell_;
   CellType mask_;
-  // This boolean indicates that the object is in a data-only space with no
-  // pointers.  This enables some optimizations when marking.
-  // It is expected that this field is inlined and turned into control flow
-  // at the place where the MarkBit object is created.
-  bool data_only_;
 };
 
 
@@ -187,10 +180,10 @@ class Bitmap {
     return reinterpret_cast<Bitmap*>(addr);
   }
 
-  inline MarkBit MarkBitFromIndex(uint32_t index, bool data_only = false) {
+  inline MarkBit MarkBitFromIndex(uint32_t index) {
     MarkBit::CellType mask = 1 << (index & kBitIndexMask);
     MarkBit::CellType* cell = this->cells() + (index >> kBitsPerCellLog2);
-    return MarkBit(cell, mask, data_only);
+    return MarkBit(cell, mask);
   }
 
   static inline void Clear(MemoryChunk* chunk);
@@ -370,7 +363,6 @@ class MemoryChunk {
     IN_FROM_SPACE,  // Mutually exclusive with IN_TO_SPACE.
     IN_TO_SPACE,    // All pages in new space has one of these two set.
     NEW_SPACE_BELOW_AGE_MARK,
-    CONTAINS_ONLY_DATA,
     EVACUATION_CANDIDATE,
     RESCAN_ON_EVACUATION,
     NEVER_EVACUATE,  // May contain immortal immutables.
@@ -565,8 +557,6 @@ class MemoryChunk {
     return IsFlagSet(IS_EXECUTABLE) ? EXECUTABLE : NOT_EXECUTABLE;
   }
 
-  bool ContainsOnlyData() { return IsFlagSet(CONTAINS_ONLY_DATA); }
-
   bool InNewSpace() {
     return (flags_ & ((1 << IN_FROM_SPACE) | (1 << IN_TO_SPACE))) != 0;
   }
@@ -2596,7 +2586,7 @@ class NewSpace : public Space {
 
 
 // -----------------------------------------------------------------------------
-// Old object space (excluding map objects)
+// Old object space (includes the old space of objects and code space)
 
 class OldSpace : public PagedSpace {
  public:
@@ -2830,7 +2820,7 @@ class PointerChunkIterator BASE_EMBEDDED {
   // Return NULL when the iterator is done.
   MemoryChunk* next() {
     switch (state_) {
-      case kOldPointerState: {
+      case kOldSpaceState: {
         if (old_pointer_iterator_.has_next()) {
           return old_pointer_iterator_.next();
         }
@@ -2869,7 +2859,7 @@ class PointerChunkIterator BASE_EMBEDDED {
 
 
  private:
-  enum State { kOldPointerState, kMapState, kLargeObjectState, kFinishedState };
+  enum State { kOldSpaceState, kMapState, kLargeObjectState, kFinishedState };
   State state_;
   PageIterator old_pointer_iterator_;
   PageIterator map_iterator_;
index ccbe339..806680e 100644 (file)
@@ -18,7 +18,6 @@ Address StoreBuffer::TopAddress() {
 void StoreBuffer::Mark(Address addr) {
   DCHECK(!heap_->cell_space()->Contains(addr));
   DCHECK(!heap_->code_space()->Contains(addr));
-  DCHECK(!heap_->old_data_space()->Contains(addr));
   Address* top = reinterpret_cast<Address*>(heap_->store_buffer_top());
   *top++ = addr;
   heap_->public_set_store_buffer_top(top);
@@ -35,7 +34,6 @@ void StoreBuffer::EnterDirectlyIntoStoreBuffer(Address addr) {
   if (store_buffer_rebuilding_enabled_) {
     SLOW_DCHECK(!heap_->cell_space()->Contains(addr) &&
                 !heap_->code_space()->Contains(addr) &&
-                !heap_->old_data_space()->Contains(addr) &&
                 !heap_->new_space()->Contains(addr));
     Address* top = old_top_;
     *top++ = addr;
index dec11da..3ede0df 100644 (file)
@@ -464,7 +464,7 @@ void StoreBuffer::IteratePointersToNewSpace(ObjectSlotCallback slot_callback) {
                 heap_->mark_compact_collector()->EnsureSweepingCompleted();
               }
             }
-            CHECK(page->owner() == heap_->old_pointer_space());
+            CHECK(page->owner() == heap_->old_space());
             HeapObjectIterator iterator(page, NULL);
             for (HeapObject* heap_object = iterator.Next(); heap_object != NULL;
                  heap_object = iterator.Next()) {
@@ -534,7 +534,6 @@ void StoreBuffer::Compact() {
   for (Address* current = start_; current < top; current++) {
     DCHECK(!heap_->cell_space()->Contains(*current));
     DCHECK(!heap_->code_space()->Contains(*current));
-    DCHECK(!heap_->old_data_space()->Contains(*current));
     uintptr_t int_addr = reinterpret_cast<uintptr_t>(*current);
     // Shift out the last bits including any tags.
     int_addr >>= kPointerSizeLog2;
index 095c95d..a69905e 100644 (file)
@@ -3747,8 +3747,12 @@ bool HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
     return false;
   }
 
-  dominator_allocate = GetFoldableDominator(dominator_allocate);
-  if (dominator_allocate == NULL) {
+
+  if (!IsFoldable(dominator_allocate)) {
+    if (FLAG_trace_allocation_folding) {
+      PrintF("#%d (%s) cannot fold into #%d (%s), different spaces\n", id(),
+             Mnemonic(), dominator->id(), dominator->Mnemonic());
+    }
     return false;
   }
 
@@ -3778,12 +3782,9 @@ bool HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
     }
   }
 
-  DCHECK((IsNewSpaceAllocation() &&
-         dominator_allocate->IsNewSpaceAllocation()) ||
-         (IsOldDataSpaceAllocation() &&
-         dominator_allocate->IsOldDataSpaceAllocation()) ||
-         (IsOldPointerSpaceAllocation() &&
-         dominator_allocate->IsOldPointerSpaceAllocation()));
+  DCHECK(
+      (IsNewSpaceAllocation() && dominator_allocate->IsNewSpaceAllocation()) ||
+      (IsOldSpaceAllocation() && dominator_allocate->IsOldSpaceAllocation()));
 
   // First update the size of the dominator allocate instruction.
   dominator_size = dominator_allocate->size();
@@ -3874,70 +3875,6 @@ bool HAllocate::HandleSideEffectDominator(GVNFlag side_effect,
 }
 
 
-HAllocate* HAllocate::GetFoldableDominator(HAllocate* dominator) {
-  if (!IsFoldable(dominator)) {
-    // We cannot hoist old space allocations over new space allocations.
-    if (IsNewSpaceAllocation() || dominator->IsNewSpaceAllocation()) {
-      if (FLAG_trace_allocation_folding) {
-        PrintF("#%d (%s) cannot fold into #%d (%s), new space hoisting\n",
-            id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
-      }
-      return NULL;
-    }
-
-    HAllocate* dominator_dominator = dominator->dominating_allocate_;
-
-    // We can hoist old data space allocations over an old pointer space
-    // allocation and vice versa. For that we have to check the dominator
-    // of the dominator allocate instruction.
-    if (dominator_dominator == NULL) {
-      dominating_allocate_ = dominator;
-      if (FLAG_trace_allocation_folding) {
-        PrintF("#%d (%s) cannot fold into #%d (%s), different spaces\n",
-            id(), Mnemonic(), dominator->id(), dominator->Mnemonic());
-      }
-      return NULL;
-    }
-
-    // We can just fold old space allocations that are in the same basic block,
-    // since it is not guaranteed that we fill up the whole allocated old
-    // space memory.
-    // TODO(hpayer): Remove this limitation and add filler maps for each each
-    // allocation as soon as we have store elimination.
-    if (block()->block_id() != dominator_dominator->block()->block_id()) {
-      if (FLAG_trace_allocation_folding) {
-        PrintF("#%d (%s) cannot fold into #%d (%s), different basic blocks\n",
-            id(), Mnemonic(), dominator_dominator->id(),
-            dominator_dominator->Mnemonic());
-      }
-      return NULL;
-    }
-
-    DCHECK((IsOldDataSpaceAllocation() &&
-           dominator_dominator->IsOldDataSpaceAllocation()) ||
-           (IsOldPointerSpaceAllocation() &&
-           dominator_dominator->IsOldPointerSpaceAllocation()));
-
-    int32_t current_size = HConstant::cast(size())->GetInteger32Constant();
-    HStoreNamedField* dominator_free_space_size =
-        dominator->filler_free_space_size_;
-    if (dominator_free_space_size != NULL) {
-      // We already hoisted one old space allocation, i.e., we already installed
-      // a filler map. Hence, we just have to update the free space size.
-      dominator->UpdateFreeSpaceFiller(current_size);
-    } else {
-      // This is the first old space allocation that gets hoisted. We have to
-      // install a filler map since the follwing allocation may cause a GC.
-      dominator->CreateFreeSpaceFiller(current_size);
-    }
-
-    // We can hoist the old space allocation over the actual dominator.
-    return dominator_dominator;
-  }
-  return dominator;
-}
-
-
 void HAllocate::UpdateFreeSpaceFiller(int32_t free_space_size) {
   DCHECK(filler_free_space_size_ != NULL);
   Zone* zone = block()->zone();
@@ -4005,8 +3942,7 @@ void HAllocate::ClearNextMapWord(int offset) {
 std::ostream& HAllocate::PrintDataTo(std::ostream& os) const {  // NOLINT
   os << NameOf(size()) << " (";
   if (IsNewSpaceAllocation()) os << "N";
-  if (IsOldPointerSpaceAllocation()) os << "P";
-  if (IsOldDataSpaceAllocation()) os << "D";
+  if (IsOldSpaceAllocation()) os << "P";
   if (MustAllocateDoubleAligned()) os << "A";
   if (MustPrefillWithFiller()) os << "F";
   return os << ")";
index 0ec6887..f3a353c 100644 (file)
@@ -5552,12 +5552,8 @@ class HAllocate FINAL : public HTemplateInstruction<2> {
     return (flags_ & ALLOCATE_IN_NEW_SPACE) != 0;
   }
 
-  bool IsOldDataSpaceAllocation() const {
-    return (flags_ & ALLOCATE_IN_OLD_DATA_SPACE) != 0;
-  }
-
-  bool IsOldPointerSpaceAllocation() const {
-    return (flags_ & ALLOCATE_IN_OLD_POINTER_SPACE) != 0;
+  bool IsOldSpaceAllocation() const {
+    return (flags_ & ALLOCATE_IN_OLD_SPACE) != 0;
   }
 
   bool MustAllocateDoubleAligned() const {
@@ -5590,8 +5586,7 @@ class HAllocate FINAL : public HTemplateInstruction<2> {
  private:
   enum Flags {
     ALLOCATE_IN_NEW_SPACE = 1 << 0,
-    ALLOCATE_IN_OLD_DATA_SPACE = 1 << 1,
-    ALLOCATE_IN_OLD_POINTER_SPACE = 1 << 2,
+    ALLOCATE_IN_OLD_SPACE = 1 << 2,
     ALLOCATE_DOUBLE_ALIGNED = 1 << 3,
     PREFILL_WITH_FILLER = 1 << 4,
     CLEAR_NEXT_MAP_WORD = 1 << 5
@@ -5627,10 +5622,8 @@ class HAllocate FINAL : public HTemplateInstruction<2> {
 
   static Flags ComputeFlags(PretenureFlag pretenure_flag,
                             InstanceType instance_type) {
-    Flags flags = pretenure_flag == TENURED
-        ? (Heap::TargetSpaceId(instance_type) == OLD_POINTER_SPACE
-            ? ALLOCATE_IN_OLD_POINTER_SPACE : ALLOCATE_IN_OLD_DATA_SPACE)
-        : ALLOCATE_IN_NEW_SPACE;
+    Flags flags = pretenure_flag == TENURED ? ALLOCATE_IN_OLD_SPACE
+                                            : ALLOCATE_IN_NEW_SPACE;
     if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
       flags = static_cast<Flags>(flags | ALLOCATE_DOUBLE_ALIGNED);
     }
@@ -5672,9 +5665,7 @@ class HAllocate FINAL : public HTemplateInstruction<2> {
 
   bool IsFoldable(HAllocate* allocate) {
     return (IsNewSpaceAllocation() && allocate->IsNewSpaceAllocation()) ||
-        (IsOldDataSpaceAllocation() && allocate->IsOldDataSpaceAllocation()) ||
-        (IsOldPointerSpaceAllocation() &&
-            allocate->IsOldPointerSpaceAllocation());
+           (IsOldSpaceAllocation() && allocate->IsOldSpaceAllocation());
   }
 
   void ClearNextMapWord(int offset);
index ed3e4b2..30315d1 100644 (file)
@@ -5249,13 +5249,9 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
   if (instr->hydrogen()->MustAllocateDoubleAligned()) {
     flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
   }
-  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
-    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
+  if (instr->hydrogen()->IsOldSpaceAllocation()) {
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
-  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
-    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
+    flags = static_cast<AllocationFlags>(flags | PRETENURE);
   }
 
   if (instr->size()->IsConstantOperand()) {
@@ -5318,13 +5314,9 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
 
   int flags = AllocateDoubleAlignFlag::encode(
       instr->hydrogen()->MustAllocateDoubleAligned());
-  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
-    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
-    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
-  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+  if (instr->hydrogen()->IsOldSpaceAllocation()) {
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
+    flags = AllocateTargetSpace::update(flags, OLD_SPACE);
   } else {
     flags = AllocateTargetSpace::update(flags, NEW_SPACE);
   }
index c25b817..2fe1840 100644 (file)
@@ -1326,12 +1326,11 @@ void MacroAssembler::Allocate(int object_size,
   // Align the next allocation. Storing the filler map without checking top is
   // safe in new-space because the limit of the heap is aligned there.
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
-    DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
     Label aligned;
     test(result, Immediate(kDoubleAlignmentMask));
     j(zero, &aligned, Label::kNear);
-    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+    if ((flags & PRETENURE) != 0) {
       cmp(result, Operand::StaticVariable(allocation_limit));
       j(above_equal, gc_required);
     }
@@ -1403,12 +1402,11 @@ void MacroAssembler::Allocate(int header_size,
   // Align the next allocation. Storing the filler map without checking top is
   // safe in new-space because the limit of the heap is aligned there.
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
-    DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
     Label aligned;
     test(result, Immediate(kDoubleAlignmentMask));
     j(zero, &aligned, Label::kNear);
-    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+    if ((flags & PRETENURE) != 0) {
       cmp(result, Operand::StaticVariable(allocation_limit));
       j(above_equal, gc_required);
     }
@@ -1478,12 +1476,11 @@ void MacroAssembler::Allocate(Register object_size,
   // Align the next allocation. Storing the filler map without checking top is
   // safe in new-space because the limit of the heap is aligned there.
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
-    DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
     Label aligned;
     test(result, Immediate(kDoubleAlignmentMask));
     j(zero, &aligned, Label::kNear);
-    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+    if ((flags & PRETENURE) != 0) {
       cmp(result, Operand::StaticVariable(allocation_limit));
       j(above_equal, gc_required);
     }
index 8c135f2..6a598b1 100644 (file)
@@ -601,7 +601,7 @@ class MacroAssembler: public Assembler {
   // ---------------------------------------------------------------------------
   // Allocation support
 
-  // Allocate an object in new space or old pointer space. If the given space
+  // Allocate an object in new space or old space. If the given space
   // is exhausted control continues at the gc_required label. The allocated
   // object is returned in result and end of the new object is returned in
   // result_end. The register scratch can be passed as no_reg in which case
index 166ac42..b59fd3b 100644 (file)
@@ -27,10 +27,8 @@ enum AllocationFlags {
   SIZE_IN_WORDS = 1 << 2,
   // Align the allocation to a multiple of kDoubleSize
   DOUBLE_ALIGNMENT = 1 << 3,
-  // Directly allocate in old pointer space
-  PRETENURE_OLD_POINTER_SPACE = 1 << 4,
-  // Directly allocate in old data space
-  PRETENURE_OLD_DATA_SPACE = 1 << 5
+  // Directly allocate in old space
+  PRETENURE = 1 << 4,
 };
 
 
@@ -251,11 +249,8 @@ class AllocationUtils {
  public:
   static ExternalReference GetAllocationTopReference(
       Isolate* isolate, AllocationFlags flags) {
-    if ((flags & PRETENURE_OLD_POINTER_SPACE) != 0) {
-      return ExternalReference::old_pointer_space_allocation_top_address(
-          isolate);
-    } else if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
-      return ExternalReference::old_data_space_allocation_top_address(isolate);
+    if ((flags & PRETENURE) != 0) {
+      return ExternalReference::old_space_allocation_top_address(isolate);
     }
     return ExternalReference::new_space_allocation_top_address(isolate);
   }
@@ -263,12 +258,8 @@ class AllocationUtils {
 
   static ExternalReference GetAllocationLimitReference(
       Isolate* isolate, AllocationFlags flags) {
-    if ((flags & PRETENURE_OLD_POINTER_SPACE) != 0) {
-      return ExternalReference::old_pointer_space_allocation_limit_address(
-          isolate);
-    } else if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
-      return ExternalReference::old_data_space_allocation_limit_address(
-          isolate);
+    if ((flags & PRETENURE) != 0) {
+      return ExternalReference::old_space_allocation_limit_address(isolate);
     }
     return ExternalReference::new_space_allocation_limit_address(isolate);
   }
index 18d0c9b..6673a86 100644 (file)
@@ -5424,13 +5424,9 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
   if (instr->hydrogen()->MustAllocateDoubleAligned()) {
     flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
   }
-  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
-    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
+  if (instr->hydrogen()->IsOldSpaceAllocation()) {
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
-  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
-    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
+    flags = static_cast<AllocationFlags>(flags | PRETENURE);
   }
   if (instr->size()->IsConstantOperand()) {
     int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
@@ -5492,13 +5488,9 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
 
   int flags = AllocateDoubleAlignFlag::encode(
       instr->hydrogen()->MustAllocateDoubleAligned());
-  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
-    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
-    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
-  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+  if (instr->hydrogen()->IsOldSpaceAllocation()) {
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
+    flags = AllocateTargetSpace::update(flags, OLD_SPACE);
   } else {
     flags = AllocateTargetSpace::update(flags, NEW_SPACE);
   }
index 983f549..46aa0a5 100644 (file)
@@ -3349,12 +3349,11 @@ void MacroAssembler::Allocate(int object_size,
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
     // Align the next allocation. Storing the filler map without checking top is
     // safe in new-space because the limit of the heap is aligned there.
-    DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
     And(scratch2, result, Operand(kDoubleAlignmentMask));
     Label aligned;
     Branch(&aligned, eq, scratch2, Operand(zero_reg));
-    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+    if ((flags & PRETENURE) != 0) {
       Branch(gc_required, Ugreater_equal, result, Operand(t9));
     }
     li(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
@@ -3436,12 +3435,11 @@ void MacroAssembler::Allocate(Register object_size,
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
     // Align the next allocation. Storing the filler map without checking top is
     // safe in new-space because the limit of the heap is aligned there.
-    DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
     And(scratch2, result, Operand(kDoubleAlignmentMask));
     Label aligned;
     Branch(&aligned, eq, scratch2, Operand(zero_reg));
-    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+    if ((flags & PRETENURE) != 0) {
       Branch(gc_required, Ugreater_equal, result, Operand(t9));
     }
     li(scratch2, Operand(isolate()->factory()->one_pointer_filler_map()));
index 44f6c4b..5a06535 100644 (file)
@@ -488,7 +488,7 @@ class MacroAssembler: public Assembler {
   // ---------------------------------------------------------------------------
   // Allocation support.
 
-  // Allocate an object in new space or old pointer space. The object_size is
+  // Allocate an object in new space or old space. The object_size is
   // specified either in bytes or in words if the allocation flag SIZE_IN_WORDS
   // is passed. If the space is exhausted control continues at the gc_required
   // label. The allocated object is returned in result. If the flag
index 51c5ec4..114933f 100644 (file)
@@ -5460,13 +5460,9 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
   if (instr->hydrogen()->MustAllocateDoubleAligned()) {
     flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
   }
-  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
-    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
+  if (instr->hydrogen()->IsOldSpaceAllocation()) {
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
-  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
-    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
+    flags = static_cast<AllocationFlags>(flags | PRETENURE);
   }
   if (instr->size()->IsConstantOperand()) {
     int32_t size = ToInteger32(LConstantOperand::cast(instr->size()));
@@ -5529,13 +5525,9 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
 
   int flags = AllocateDoubleAlignFlag::encode(
       instr->hydrogen()->MustAllocateDoubleAligned());
-  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
-    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
-    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
-  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+  if (instr->hydrogen()->IsOldSpaceAllocation()) {
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
+    flags = AllocateTargetSpace::update(flags, OLD_SPACE);
   } else {
     flags = AllocateTargetSpace::update(flags, NEW_SPACE);
   }
index 3c1c2b0..c001ac0 100644 (file)
@@ -509,7 +509,7 @@ class MacroAssembler: public Assembler {
   // ---------------------------------------------------------------------------
   // Allocation support.
 
-  // Allocate an object in new space or old pointer space. The object_size is
+  // Allocate an object in new space or old space. The object_size is
   // specified either in bytes or in words if the allocation flag SIZE_IN_WORDS
   // is passed. If the space is exhausted control continues at the gc_required
   // label. The allocated object is returned in result. If the flag
index ccc3663..a7a7ba1 100644 (file)
@@ -763,7 +763,7 @@ enum InstanceType {
   // Boundaries for testing for a fixed typed array.
   FIRST_FIXED_TYPED_ARRAY_TYPE = FIXED_INT8_ARRAY_TYPE,
   LAST_FIXED_TYPED_ARRAY_TYPE = FIXED_UINT8_CLAMPED_ARRAY_TYPE,
-  // Boundary for promotion to old data space/old pointer space.
+  // Boundary for promotion to old space.
   LAST_DATA_TYPE = FILLER_TYPE,
   // Boundary for objects represented as JSReceiver (i.e. JSObject or JSProxy).
   // Note that there is no range for JSObject or JSProxy, since their subtypes
@@ -10266,8 +10266,6 @@ class JSDataView: public JSArrayBufferView {
 
 
 // Foreign describes objects pointing from JavaScript to C structures.
-// Since they cannot contain references to JS HeapObjects they can be
-// placed in old_data_space.
 class Foreign: public HeapObject {
  public:
   // [address]: field containing the address.
index 9af13ae..3653fd2 100644 (file)
@@ -5687,13 +5687,9 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
   if (instr->hydrogen()->MustAllocateDoubleAligned()) {
     flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
   }
-  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
-    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
+  if (instr->hydrogen()->IsOldSpaceAllocation()) {
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
-  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
-    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
+    flags = static_cast<AllocationFlags>(flags | PRETENURE);
   }
 
   if (instr->size()->IsConstantOperand()) {
@@ -5759,13 +5755,9 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
 
   int flags = AllocateDoubleAlignFlag::encode(
       instr->hydrogen()->MustAllocateDoubleAligned());
-  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
-    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
-    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
-  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+  if (instr->hydrogen()->IsOldSpaceAllocation()) {
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
+    flags = AllocateTargetSpace::update(flags, OLD_SPACE);
   } else {
     flags = AllocateTargetSpace::update(flags, NEW_SPACE);
   }
index 54e40f7..31b9165 100644 (file)
@@ -1384,7 +1384,7 @@ void MacroAssembler::Allocate(int object_size, Register result,
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
     // Align the next allocation. Storing the filler map without checking top is
     // safe in new-space because the limit of the heap is aligned there.
-    DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
+    DCHECK((flags & PRETENURE_OLD_SPACE) == 0);
 #if V8_TARGET_ARCH_PPC64
     STATIC_ASSERT(kPointerAlignment == kDoubleAlignment);
 #else
@@ -1392,7 +1392,7 @@ void MacroAssembler::Allocate(int object_size, Register result,
     andi(scratch2, result, Operand(kDoubleAlignmentMask));
     Label aligned;
     beq(&aligned, cr0);
-    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+    if ((flags & PRETENURE) != 0) {
       cmpl(result, ip);
       bge(gc_required);
     }
@@ -1483,7 +1483,7 @@ void MacroAssembler::Allocate(Register object_size, Register result,
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
     // Align the next allocation. Storing the filler map without checking top is
     // safe in new-space because the limit of the heap is aligned there.
-    DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
+    DCHECK((flags & PRETENURE_OLD_SPACE) == 0);
 #if V8_TARGET_ARCH_PPC64
     STATIC_ASSERT(kPointerAlignment == kDoubleAlignment);
 #else
@@ -1491,7 +1491,7 @@ void MacroAssembler::Allocate(Register object_size, Register result,
     andi(scratch2, result, Operand(kDoubleAlignmentMask));
     Label aligned;
     beq(&aligned, cr0);
-    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+    if ((flags & PRETENURE) != 0) {
       cmpl(result, ip);
       bge(gc_required);
     }
index fc56970..6fa1944 100644 (file)
@@ -603,7 +603,7 @@ class MacroAssembler : public Assembler {
   // ---------------------------------------------------------------------------
   // Allocation support
 
-  // Allocate an object in new space or old pointer space. The object_size is
+  // Allocate an object in new space or old space. The object_size is
   // specified either in bytes or in words if the allocation flag SIZE_IN_WORDS
   // is passed. If the space is exhausted control continues at the gc_required
   // label. The allocated object is returned in result. If the flag
index 00405dd..7e22320 100644 (file)
@@ -105,18 +105,10 @@ ExternalReferenceTable::ExternalReferenceTable(Isolate* isolate) {
   Add(ExternalReference::get_make_code_young_function(isolate).address(),
       "Code::MakeCodeYoung");
   Add(ExternalReference::cpu_features().address(), "cpu_features");
-  Add(ExternalReference::old_pointer_space_allocation_top_address(isolate)
-          .address(),
-      "Heap::OldPointerSpaceAllocationTopAddress");
-  Add(ExternalReference::old_pointer_space_allocation_limit_address(isolate)
-          .address(),
-      "Heap::OldPointerSpaceAllocationLimitAddress");
-  Add(ExternalReference::old_data_space_allocation_top_address(isolate)
-          .address(),
-      "Heap::OldDataSpaceAllocationTopAddress");
-  Add(ExternalReference::old_data_space_allocation_limit_address(isolate)
-          .address(),
-      "Heap::OldDataSpaceAllocationLimitAddress");
+  Add(ExternalReference::old_space_allocation_top_address(isolate).address(),
+      "Heap::OldSpaceAllocationTopAddress");
+  Add(ExternalReference::old_space_allocation_limit_address(isolate).address(),
+      "Heap::OldSpaceAllocationLimitAddress");
   Add(ExternalReference::allocation_sites_list_address(isolate).address(),
       "Heap::allocation_sites_list_address()");
   Add(ExternalReference::address_of_uint32_bias().address(), "uint32_bias");
@@ -852,12 +844,10 @@ void Deserializer::ReadData(Object** current, Object** limit, int source_space,
   // Write barrier support costs around 1% in startup time.  In fact there
   // are no new space objects in current boot snapshots, so it's not needed,
   // but that may change.
-  bool write_barrier_needed = (current_object_address != NULL &&
-                               source_space != NEW_SPACE &&
-                               source_space != CELL_SPACE &&
-                               source_space != PROPERTY_CELL_SPACE &&
-                               source_space != CODE_SPACE &&
-                               source_space != OLD_DATA_SPACE);
+  bool write_barrier_needed =
+      (current_object_address != NULL && source_space != NEW_SPACE &&
+       source_space != CELL_SPACE && source_space != PROPERTY_CELL_SPACE &&
+       source_space != CODE_SPACE);
   while (current < limit) {
     byte data = source_.Get();
     switch (data) {
@@ -963,8 +953,7 @@ void Deserializer::ReadData(Object** current, Object** limit, int source_space,
 #define ALL_SPACES(where, how, within)                    \
   CASE_STATEMENT(where, how, within, NEW_SPACE)           \
   CASE_BODY(where, how, within, NEW_SPACE)                \
-  CASE_STATEMENT(where, how, within, OLD_DATA_SPACE)      \
-  CASE_STATEMENT(where, how, within, OLD_POINTER_SPACE)   \
+  CASE_STATEMENT(where, how, within, OLD_SPACE)           \
   CASE_STATEMENT(where, how, within, CODE_SPACE)          \
   CASE_STATEMENT(where, how, within, MAP_SPACE)           \
   CASE_STATEMENT(where, how, within, CELL_SPACE)          \
@@ -1720,7 +1709,7 @@ void Serializer::ObjectSerializer::SerializeExternalString() {
 
   AllocationSpace space = (allocation_size > Page::kMaxRegularHeapObjectSize)
                               ? LO_SPACE
-                              : OLD_DATA_SPACE;
+                              : OLD_SPACE;
   SerializePrologue(space, allocation_size, map);
 
   // Output the rest of the imaginary string.
index ffa0c4c..8838d61 100644 (file)
@@ -5416,13 +5416,9 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
   if (instr->hydrogen()->MustAllocateDoubleAligned()) {
     flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
   }
-  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
-    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
+  if (instr->hydrogen()->IsOldSpaceAllocation()) {
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
-  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
-    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
+    flags = static_cast<AllocationFlags>(flags | PRETENURE);
   }
 
   if (instr->size()->IsConstantOperand()) {
@@ -5478,13 +5474,9 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
   }
 
   int flags = 0;
-  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
-    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
-    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
-  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+  if (instr->hydrogen()->IsOldSpaceAllocation()) {
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
+    flags = AllocateTargetSpace::update(flags, OLD_SPACE);
   } else {
     flags = AllocateTargetSpace::update(flags, NEW_SPACE);
   }
index 5a17b3a..89e5cc9 100644 (file)
@@ -4122,7 +4122,6 @@ void MacroAssembler::MakeSureDoubleAlignedHelper(Register result,
     // Align the next allocation. Storing the filler map without checking top
     // is safe in new-space because the limit of the heap is aligned there.
     DCHECK(kPointerSize * 2 == kDoubleSize);
-    DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
     // Make sure scratch is not clobbered by this function as it might be
     // used in UpdateAllocationTopHelper later.
@@ -4130,7 +4129,7 @@ void MacroAssembler::MakeSureDoubleAlignedHelper(Register result,
     Label aligned;
     testl(result, Immediate(kDoubleAlignmentMask));
     j(zero, &aligned, Label::kNear);
-    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+    if ((flags & PRETENURE) != 0) {
       ExternalReference allocation_limit =
           AllocationUtils::GetAllocationLimitReference(isolate(), flags);
       cmpp(result, ExternalOperand(allocation_limit));
index 1eb7f6e..7e1c1c3 100644 (file)
@@ -1137,7 +1137,7 @@ class MacroAssembler: public Assembler {
   // ---------------------------------------------------------------------------
   // Allocation support
 
-  // Allocate an object in new space or old pointer space. If the given space
+  // Allocate an object in new space or old space. If the given space
   // is exhausted control continues at the gc_required label. The allocated
   // object is returned in result and end of the new object is returned in
   // result_end. The register scratch can be passed as no_reg in which case
index 4467d2e..04b9e2a 100644 (file)
@@ -5855,13 +5855,9 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
   if (instr->hydrogen()->MustAllocateDoubleAligned()) {
     flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
   }
-  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
-    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
+  if (instr->hydrogen()->IsOldSpaceAllocation()) {
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
-  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
-    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
+    flags = static_cast<AllocationFlags>(flags | PRETENURE);
   }
 
   if (instr->size()->IsConstantOperand()) {
@@ -5924,13 +5920,9 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
 
   int flags = AllocateDoubleAlignFlag::encode(
       instr->hydrogen()->MustAllocateDoubleAligned());
-  if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
-    DCHECK(!instr->hydrogen()->IsOldDataSpaceAllocation());
-    DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = AllocateTargetSpace::update(flags, OLD_POINTER_SPACE);
-  } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+  if (instr->hydrogen()->IsOldSpaceAllocation()) {
     DCHECK(!instr->hydrogen()->IsNewSpaceAllocation());
-    flags = AllocateTargetSpace::update(flags, OLD_DATA_SPACE);
+    flags = AllocateTargetSpace::update(flags, OLD_SPACE);
   } else {
     flags = AllocateTargetSpace::update(flags, NEW_SPACE);
   }
index 9e449f9..6b82f3d 100644 (file)
@@ -1298,12 +1298,11 @@ void MacroAssembler::Allocate(int object_size,
   // Align the next allocation. Storing the filler map without checking top is
   // safe in new-space because the limit of the heap is aligned there.
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
-    DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
     Label aligned;
     test(result, Immediate(kDoubleAlignmentMask));
     j(zero, &aligned, Label::kNear);
-    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+    if ((flags & PRETENURE) != 0) {
       cmp(result, Operand::StaticVariable(allocation_limit));
       j(above_equal, gc_required);
     }
@@ -1375,12 +1374,11 @@ void MacroAssembler::Allocate(int header_size,
   // Align the next allocation. Storing the filler map without checking top is
   // safe in new-space because the limit of the heap is aligned there.
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
-    DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
     Label aligned;
     test(result, Immediate(kDoubleAlignmentMask));
     j(zero, &aligned, Label::kNear);
-    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+    if ((flags & PRETENURE) != 0) {
       cmp(result, Operand::StaticVariable(allocation_limit));
       j(above_equal, gc_required);
     }
@@ -1450,12 +1448,11 @@ void MacroAssembler::Allocate(Register object_size,
   // Align the next allocation. Storing the filler map without checking top is
   // safe in new-space because the limit of the heap is aligned there.
   if ((flags & DOUBLE_ALIGNMENT) != 0) {
-    DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
     Label aligned;
     test(result, Immediate(kDoubleAlignmentMask));
     j(zero, &aligned, Label::kNear);
-    if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
+    if ((flags & PRETENURE) != 0) {
       cmp(result, Operand::StaticVariable(allocation_limit));
       j(above_equal, gc_required);
     }
index 061709b..7411405 100644 (file)
@@ -566,7 +566,7 @@ class MacroAssembler: public Assembler {
   // ---------------------------------------------------------------------------
   // Allocation support
 
-  // Allocate an object in new space or old pointer space. If the given space
+  // Allocate an object in new space or old space. If the given space
   // is exhausted control continues at the gc_required label. The allocated
   // object is returned in result and end of the new object is returned in
   // result_end. The register scratch can be passed as no_reg in which case
index 0ec9934..66f69f8 100644 (file)
@@ -55,18 +55,18 @@ static AllocationResult AllocateAfterFailures() {
   heap->CopyJSObject(JSObject::cast(object)).ToObjectChecked();
 
   // Old data space.
-  SimulateFullSpace(heap->old_data_space());
+  SimulateFullSpace(heap->old_space());
   heap->AllocateByteArray(100, TENURED).ToObjectChecked();
 
   // Old pointer space.
-  SimulateFullSpace(heap->old_pointer_space());
+  SimulateFullSpace(heap->old_space());
   heap->AllocateFixedArray(10000, TENURED).ToObjectChecked();
 
   // Large object space.
   static const int kLargeObjectSpaceFillerLength = 3 * (Page::kPageSize / 10);
   static const int kLargeObjectSpaceFillerSize = FixedArray::SizeFor(
       kLargeObjectSpaceFillerLength);
-  DCHECK(kLargeObjectSpaceFillerSize > heap->old_pointer_space()->AreaSize());
+  DCHECK(kLargeObjectSpaceFillerSize > heap->old_space()->AreaSize());
   while (heap->OldGenerationSpaceAvailable() > kLargeObjectSpaceFillerSize) {
     heap->AllocateFixedArray(
         kLargeObjectSpaceFillerLength, TENURED).ToObjectChecked();
index d339c15..e9d1c04 100644 (file)
@@ -608,7 +608,7 @@ TEST(MakingExternalUnalignedOneByteString) {
       "slice('abcdefghijklmnopqrstuvwxyz');"));
 
   // Trigger GCs so that the newly allocated string moves to old gen.
-  SimulateFullSpace(CcTest::heap()->old_pointer_space());
+  SimulateFullSpace(CcTest::heap()->old_space());
   CcTest::heap()->CollectGarbage(i::NEW_SPACE);  // in survivor space now
   CcTest::heap()->CollectGarbage(i::NEW_SPACE);  // in old gen now
 
@@ -728,11 +728,10 @@ THREADED_TEST(ScavengeExternalString) {
     i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
     CcTest::heap()->CollectGarbage(i::NEW_SPACE);
     in_new_space = CcTest::heap()->InNewSpace(*istring);
-    CHECK(in_new_space || CcTest::heap()->old_data_space()->Contains(*istring));
+    CHECK(in_new_space || CcTest::heap()->old_space()->Contains(*istring));
     CHECK_EQ(0, dispose_count);
   }
-  CcTest::heap()->CollectGarbage(
-      in_new_space ? i::NEW_SPACE : i::OLD_DATA_SPACE);
+  CcTest::heap()->CollectGarbage(in_new_space ? i::NEW_SPACE : i::OLD_SPACE);
   CHECK_EQ(1, dispose_count);
 }
 
@@ -751,11 +750,10 @@ THREADED_TEST(ScavengeExternalOneByteString) {
     i::Handle<i::String> istring = v8::Utils::OpenHandle(*string);
     CcTest::heap()->CollectGarbage(i::NEW_SPACE);
     in_new_space = CcTest::heap()->InNewSpace(*istring);
-    CHECK(in_new_space || CcTest::heap()->old_data_space()->Contains(*istring));
+    CHECK(in_new_space || CcTest::heap()->old_space()->Contains(*istring));
     CHECK_EQ(0, dispose_count);
   }
-  CcTest::heap()->CollectGarbage(
-      in_new_space ? i::NEW_SPACE : i::OLD_DATA_SPACE);
+  CcTest::heap()->CollectGarbage(in_new_space ? i::NEW_SPACE : i::OLD_SPACE);
   CHECK_EQ(1, dispose_count);
 }
 
@@ -15918,8 +15916,7 @@ TEST(ExternalizeOldSpaceTwoByteCons) {
       CompileRun("'Romeo Montague ' + 'Juliet Capulet'")->ToString(isolate);
   CHECK(v8::Utils::OpenHandle(*cons)->IsConsString());
   CcTest::heap()->CollectAllAvailableGarbage();
-  CHECK(CcTest::heap()->old_pointer_space()->Contains(
-            *v8::Utils::OpenHandle(*cons)));
+  CHECK(CcTest::heap()->old_space()->Contains(*v8::Utils::OpenHandle(*cons)));
 
   TestResource* resource = new TestResource(
       AsciiToTwoByteString("Romeo Montague Juliet Capulet"));
@@ -15941,8 +15938,7 @@ TEST(ExternalizeOldSpaceOneByteCons) {
       CompileRun("'Romeo Montague ' + 'Juliet Capulet'")->ToString(isolate);
   CHECK(v8::Utils::OpenHandle(*cons)->IsConsString());
   CcTest::heap()->CollectAllAvailableGarbage();
-  CHECK(CcTest::heap()->old_pointer_space()->Contains(
-            *v8::Utils::OpenHandle(*cons)));
+  CHECK(CcTest::heap()->old_space()->Contains(*v8::Utils::OpenHandle(*cons)));
 
   TestOneByteResource* resource =
       new TestOneByteResource(i::StrDup("Romeo Montague Juliet Capulet"));
index 8b9b2cf..1f10a41 100644 (file)
@@ -281,16 +281,16 @@ TEST(ConstantPoolCompacting) {
 
   // Start a second old-space page so that the heap pointer added to the
   // constant pool array ends up on the an evacuation candidate page.
-  Page* first_page = heap->old_data_space()->anchor()->next_page();
+  Page* first_page = heap->old_space()->anchor()->next_page();
   {
     HandleScope scope(isolate);
     int dummy_array_size = Page::kMaxRegularHeapObjectSize - 92 * KB;
     Handle<HeapObject> temp =
         factory->NewFixedDoubleArray(dummy_array_size / kDoubleSize, TENURED);
-    CHECK(heap->InOldDataSpace(temp->address()));
+    CHECK(heap->InOldSpace(temp->address()));
     Handle<HeapObject> heap_ptr =
         factory->NewHeapNumber(5.0, IMMUTABLE, TENURED);
-    CHECK(heap->InOldDataSpace(heap_ptr->address()));
+    CHECK(heap->InOldSpace(heap_ptr->address()));
     CHECK(!first_page->Contains(heap_ptr->address()));
     array->set(0, *heap_ptr);
     array->set(1, *heap_ptr);
index 14e5d69..df197ff 100644 (file)
@@ -182,7 +182,7 @@ static void TestHashSetCausesGC(Handle<HashSet> table) {
   // Simulate a full heap so that generating an identity hash code
   // in subsequent calls will request GC.
   SimulateFullSpace(CcTest::heap()->new_space());
-  SimulateFullSpace(CcTest::heap()->old_pointer_space());
+  SimulateFullSpace(CcTest::heap()->old_space());
 
   // Calling Contains() should not cause GC ever.
   int gc_count = isolate->heap()->gc_count();
@@ -228,7 +228,7 @@ static void TestHashMapCausesGC(Handle<HashMap> table) {
   // Simulate a full heap so that generating an identity hash code
   // in subsequent calls will request GC.
   SimulateFullSpace(CcTest::heap()->new_space());
-  SimulateFullSpace(CcTest::heap()->old_pointer_space());
+  SimulateFullSpace(CcTest::heap()->old_space());
 
   // Calling Lookup() should not cause GC ever.
   CHECK(table->Lookup(key)->IsTheHole());
index 65a1214..8d8341b 100644 (file)
@@ -441,7 +441,7 @@ TEST(WeakGlobalHandlesMark) {
   }
 
   // Make sure the objects are promoted.
-  heap->CollectGarbage(OLD_POINTER_SPACE);
+  heap->CollectGarbage(OLD_SPACE);
   heap->CollectGarbage(NEW_SPACE);
   CHECK(!heap->InNewSpace(*h1) && !heap->InNewSpace(*h2));
 
@@ -494,7 +494,7 @@ TEST(DeleteWeakGlobalHandle) {
   CHECK(!WeakPointerCleared);
 
   // Mark-compact treats weak reference properly.
-  heap->CollectGarbage(OLD_POINTER_SPACE);
+  heap->CollectGarbage(OLD_SPACE);
 
   CHECK(WeakPointerCleared);
 }
@@ -893,7 +893,7 @@ TEST(Iteration) {
   Handle<Object> objs[objs_count];
   int next_objs_index = 0;
 
-  // Allocate a JS array to OLD_POINTER_SPACE and NEW_SPACE
+  // Allocate a JS array to OLD_SPACE and NEW_SPACE
   objs[next_objs_index++] = factory->NewJSArray(10);
   objs[next_objs_index++] = factory->NewJSArray(10,
                                                 FAST_HOLEY_ELEMENTS,
@@ -1002,15 +1002,15 @@ TEST(Regression39128) {
 
   // Step 4: clone jsobject, but force always allocate first to create a clone
   // in old pointer space.
-  Address old_pointer_space_top = heap->old_pointer_space()->top();
+  Address old_space_top = heap->old_space()->top();
   AlwaysAllocateScope aa_scope(isolate);
   Object* clone_obj = heap->CopyJSObject(jsobject).ToObjectChecked();
   JSObject* clone = JSObject::cast(clone_obj);
-  if (clone->address() != old_pointer_space_top) {
+  if (clone->address() != old_space_top) {
     // Alas, got allocated from free list, we cannot do checks.
     return;
   }
-  CHECK(heap->old_pointer_space()->Contains(clone->address()));
+  CHECK(heap->old_space()->Contains(clone->address()));
 }
 
 
@@ -2138,7 +2138,7 @@ TEST(InstanceOfStubWriteBarrier) {
   }
 
   CcTest::heap()->incremental_marking()->set_should_hurry(true);
-  CcTest::heap()->CollectGarbage(OLD_POINTER_SPACE);
+  CcTest::heap()->CollectGarbage(OLD_SPACE);
 }
 
 
@@ -2189,7 +2189,7 @@ TEST(PrototypeTransitionClearing) {
 
   // Make sure next prototype is placed on an old-space evacuation candidate.
   Handle<JSObject> prototype;
-  PagedSpace* space = CcTest::heap()->old_pointer_space();
+  PagedSpace* space = CcTest::heap()->old_space();
   {
     AlwaysAllocateScope always_allocate(isolate);
     SimulateFullSpace(space);
@@ -2309,7 +2309,7 @@ TEST(ResetSharedFunctionInfoCountersDuringMarkSweep) {
 TEST(IdleNotificationFinishMarking) {
   i::FLAG_allow_natives_syntax = true;
   CcTest::InitializeVM();
-  SimulateFullSpace(CcTest::heap()->old_pointer_space());
+  SimulateFullSpace(CcTest::heap()->old_space());
   IncrementalMarking* marking = CcTest::heap()->incremental_marking();
   marking->Abort();
   marking->Start();
@@ -2420,11 +2420,11 @@ TEST(OptimizedPretenuringAllocationFolding) {
 
   Handle<JSObject> o =
       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
-  CHECK(CcTest::heap()->InOldPointerSpace(*o));
-  CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle));
-  CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle->elements()));
-  CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle));
-  CHECK(CcTest::heap()->InOldDataSpace(double_array_handle->elements()));
+  CHECK(CcTest::heap()->InOldSpace(*o));
+  CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
+  CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
+  CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
+  CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
 }
 
 
@@ -2463,8 +2463,8 @@ TEST(OptimizedPretenuringObjectArrayLiterals) {
   Handle<JSObject> o =
       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
 
-  CHECK(CcTest::heap()->InOldPointerSpace(o->elements()));
-  CHECK(CcTest::heap()->InOldPointerSpace(*o));
+  CHECK(CcTest::heap()->InOldSpace(o->elements()));
+  CHECK(CcTest::heap()->InOldSpace(*o));
 }
 
 
@@ -2504,27 +2504,25 @@ TEST(OptimizedPretenuringMixedInObjectProperties) {
   Handle<JSObject> o =
       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
 
-  CHECK(CcTest::heap()->InOldPointerSpace(*o));
+  CHECK(CcTest::heap()->InOldSpace(*o));
   FieldIndex idx1 = FieldIndex::ForPropertyIndex(o->map(), 0);
   FieldIndex idx2 = FieldIndex::ForPropertyIndex(o->map(), 1);
-  CHECK(CcTest::heap()->InOldPointerSpace(o->RawFastPropertyAt(idx1)));
+  CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx1)));
   if (!o->IsUnboxedDoubleField(idx2)) {
-    CHECK(CcTest::heap()->InOldDataSpace(o->RawFastPropertyAt(idx2)));
+    CHECK(CcTest::heap()->InOldSpace(o->RawFastPropertyAt(idx2)));
   } else {
     CHECK_EQ(1.1, o->RawFastDoublePropertyAt(idx2));
   }
 
   JSObject* inner_object =
       reinterpret_cast<JSObject*>(o->RawFastPropertyAt(idx1));
-  CHECK(CcTest::heap()->InOldPointerSpace(inner_object));
+  CHECK(CcTest::heap()->InOldSpace(inner_object));
   if (!inner_object->IsUnboxedDoubleField(idx1)) {
-    CHECK(
-        CcTest::heap()->InOldDataSpace(inner_object->RawFastPropertyAt(idx1)));
+    CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx1)));
   } else {
     CHECK_EQ(2.2, inner_object->RawFastDoublePropertyAt(idx1));
   }
-  CHECK(CcTest::heap()->InOldPointerSpace(
-      inner_object->RawFastPropertyAt(idx2)));
+  CHECK(CcTest::heap()->InOldSpace(inner_object->RawFastPropertyAt(idx2)));
 }
 
 
@@ -2563,8 +2561,8 @@ TEST(OptimizedPretenuringDoubleArrayProperties) {
   Handle<JSObject> o =
       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
 
-  CHECK(CcTest::heap()->InOldPointerSpace(*o));
-  CHECK(CcTest::heap()->InOldDataSpace(o->properties()));
+  CHECK(CcTest::heap()->InOldSpace(*o));
+  CHECK(CcTest::heap()->InOldSpace(o->properties()));
 }
 
 
@@ -2603,8 +2601,8 @@ TEST(OptimizedPretenuringdoubleArrayLiterals) {
   Handle<JSObject> o =
       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
 
-  CHECK(CcTest::heap()->InOldDataSpace(o->elements()));
-  CHECK(CcTest::heap()->InOldPointerSpace(*o));
+  CHECK(CcTest::heap()->InOldSpace(o->elements()));
+  CHECK(CcTest::heap()->InOldSpace(*o));
 }
 
 
@@ -2648,11 +2646,11 @@ TEST(OptimizedPretenuringNestedMixedArrayLiterals) {
 
   Handle<JSObject> o =
       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
-  CHECK(CcTest::heap()->InOldPointerSpace(*o));
-  CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle));
-  CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle->elements()));
-  CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle));
-  CHECK(CcTest::heap()->InOldDataSpace(double_array_handle->elements()));
+  CHECK(CcTest::heap()->InOldSpace(*o));
+  CHECK(CcTest::heap()->InOldSpace(*int_array_handle));
+  CHECK(CcTest::heap()->InOldSpace(int_array_handle->elements()));
+  CHECK(CcTest::heap()->InOldSpace(*double_array_handle));
+  CHECK(CcTest::heap()->InOldSpace(double_array_handle->elements()));
 }
 
 
@@ -2697,11 +2695,11 @@ TEST(OptimizedPretenuringNestedObjectLiterals) {
 
   Handle<JSObject> o =
       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
-  CHECK(CcTest::heap()->InOldPointerSpace(*o));
-  CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle_1));
-  CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle_1->elements()));
-  CHECK(CcTest::heap()->InOldPointerSpace(*int_array_handle_2));
-  CHECK(CcTest::heap()->InOldPointerSpace(int_array_handle_2->elements()));
+  CHECK(CcTest::heap()->InOldSpace(*o));
+  CHECK(CcTest::heap()->InOldSpace(*int_array_handle_1));
+  CHECK(CcTest::heap()->InOldSpace(int_array_handle_1->elements()));
+  CHECK(CcTest::heap()->InOldSpace(*int_array_handle_2));
+  CHECK(CcTest::heap()->InOldSpace(int_array_handle_2->elements()));
 }
 
 
@@ -2748,11 +2746,11 @@ TEST(OptimizedPretenuringNestedDoubleLiterals) {
 
   Handle<JSObject> o =
       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
-  CHECK(CcTest::heap()->InOldPointerSpace(*o));
-  CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle_1));
-  CHECK(CcTest::heap()->InOldDataSpace(double_array_handle_1->elements()));
-  CHECK(CcTest::heap()->InOldPointerSpace(*double_array_handle_2));
-  CHECK(CcTest::heap()->InOldDataSpace(double_array_handle_2->elements()));
+  CHECK(CcTest::heap()->InOldSpace(*o));
+  CHECK(CcTest::heap()->InOldSpace(*double_array_handle_1));
+  CHECK(CcTest::heap()->InOldSpace(double_array_handle_1->elements()));
+  CHECK(CcTest::heap()->InOldSpace(*double_array_handle_2));
+  CHECK(CcTest::heap()->InOldSpace(double_array_handle_2->elements()));
 }
 
 
@@ -2805,7 +2803,7 @@ TEST(OptimizedPretenuringConstructorCalls) {
   Handle<JSObject> o =
       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
 
-  CHECK(CcTest::heap()->InOldPointerSpace(*o));
+  CHECK(CcTest::heap()->InOldSpace(*o));
 }
 
 
@@ -2852,7 +2850,7 @@ TEST(OptimizedPretenuringCallNew) {
 
   Handle<JSObject> o =
       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(res));
-  CHECK(CcTest::heap()->InOldPointerSpace(*o));
+  CHECK(CcTest::heap()->InOldSpace(*o));
 }
 
 
@@ -3037,7 +3035,7 @@ TEST(TransitionArrayShrinksDuringAllocToOnePropertyFound) {
 
   root = GetByName("root");
   AddPropertyTo(0, root, "prop9");
-  CcTest::i_isolate()->heap()->CollectGarbage(OLD_POINTER_SPACE);
+  CcTest::i_isolate()->heap()->CollectGarbage(OLD_SPACE);
 
   // Count number of live transitions after marking.  Note that one transition
   // is left, because 'o' still holds an instance of one transition target.
@@ -3175,27 +3173,27 @@ TEST(ReleaseOverReservedPages) {
   static const int number_of_test_pages = 20;
 
   // Prepare many pages with low live-bytes count.
-  PagedSpace* old_pointer_space = heap->old_pointer_space();
-  CHECK_EQ(1, old_pointer_space->CountTotalPages());
+  PagedSpace* old_space = heap->old_space();
+  CHECK_EQ(1, old_space->CountTotalPages());
   for (int i = 0; i < number_of_test_pages; i++) {
     AlwaysAllocateScope always_allocate(isolate);
-    SimulateFullSpace(old_pointer_space);
+    SimulateFullSpace(old_space);
     factory->NewFixedArray(1, TENURED);
   }
-  CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
+  CHECK_EQ(number_of_test_pages + 1, old_space->CountTotalPages());
 
   // Triggering one GC will cause a lot of garbage to be discovered but
   // even spread across all allocated pages.
   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask,
                           "triggered for preparation");
-  CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
+  CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
 
   // Triggering subsequent GCs should cause at least half of the pages
   // to be released to the OS after at most two cycles.
   heap->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 1");
-  CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
+  CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages());
   heap->CollectAllGarbage(Heap::kNoGCFlags, "triggered by test 2");
-  CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages() * 2);
+  CHECK_GE(number_of_test_pages + 1, old_space->CountTotalPages() * 2);
 
   // Triggering a last-resort GC should cause all pages to be released to the
   // OS so that other processes can seize the memory.  If we get a failure here
@@ -3205,7 +3203,7 @@ TEST(ReleaseOverReservedPages) {
   // boots, but if the 20 small arrays don't fit on the first page then that's
   // an indication that it is too small.
   heap->CollectAllAvailableGarbage("triggered really hard");
-  CHECK_EQ(1, old_pointer_space->CountTotalPages());
+  CHECK_EQ(1, old_space->CountTotalPages());
 }
 
 
@@ -4844,8 +4842,8 @@ TEST(ArrayShiftSweeping) {
 
   Handle<JSObject> o =
       v8::Utils::OpenHandle(*v8::Handle<v8::Object>::Cast(result));
-  CHECK(heap->InOldPointerSpace(o->elements()));
-  CHECK(heap->InOldPointerSpace(*o));
+  CHECK(heap->InOldSpace(o->elements()));
+  CHECK(heap->InOldSpace(*o));
   Page* page = Page::FromAddress(o->elements()->address());
   CHECK(page->parallel_sweeping() <= MemoryChunk::SWEEPING_FINALIZE ||
         Marking::IsBlack(Marking::MarkBitFrom(o->elements())));
@@ -4958,7 +4956,7 @@ TEST(Regress388880) {
   // Allocate fixed array in old pointer space so, that object allocated
   // afterwards would end at the end of the page.
   {
-    SimulateFullSpace(heap->old_pointer_space());
+    SimulateFullSpace(heap->old_space());
     int padding_size = desired_offset - Page::kObjectStartOffset;
     int padding_array_length =
         (padding_size - FixedArray::kHeaderSize) / kPointerSize;
@@ -5027,7 +5025,7 @@ TEST(Regress3631) {
       "  weak_map.set(future_keys[i], i);"
       "}");
   heap->incremental_marking()->set_should_hurry(true);
-  heap->CollectGarbage(OLD_POINTER_SPACE);
+  heap->CollectGarbage(OLD_SPACE);
 }
 
 
@@ -5044,7 +5042,7 @@ TEST(Regress442710) {
   Handle<String> name = factory->InternalizeUtf8String("testArray");
   JSReceiver::SetProperty(global, name, array, SLOPPY).Check();
   CompileRun("testArray[0] = 1; testArray[1] = 2; testArray.shift();");
-  heap->CollectGarbage(OLD_POINTER_SPACE);
+  heap->CollectGarbage(OLD_SPACE);
 }
 
 
@@ -5111,10 +5109,10 @@ void CheckMapRetainingFor(int n) {
   }
   CHECK(!weak_cell->cleared());
   for (int i = 0; i < n; i++) {
-    heap->CollectGarbage(OLD_POINTER_SPACE);
+    heap->CollectGarbage(OLD_SPACE);
   }
   CHECK(!weak_cell->cleared());
-  heap->CollectGarbage(OLD_POINTER_SPACE);
+  heap->CollectGarbage(OLD_SPACE);
   CHECK(weak_cell->cleared());
 }
 
index 5006c10..1674afc 100644 (file)
@@ -97,7 +97,7 @@ TEST(Promotion) {
   heap->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
 
   // Array now sits in the old space
-  CHECK(heap->InSpace(*array, OLD_POINTER_SPACE));
+  CHECK(heap->InSpace(*array, OLD_SPACE));
 }
 
 
@@ -119,10 +119,10 @@ TEST(NoPromotion) {
   CHECK(heap->InSpace(*array, NEW_SPACE));
 
   // Simulate a full old space to make promotion fail.
-  SimulateFullSpace(heap->old_pointer_space());
+  SimulateFullSpace(heap->old_space());
 
   // Call mark compact GC, and it should pass.
-  heap->CollectGarbage(OLD_POINTER_SPACE);
+  heap->CollectGarbage(OLD_SPACE);
 }
 
 
@@ -138,7 +138,7 @@ TEST(MarkCompactCollector) {
   Handle<GlobalObject> global(isolate->context()->global_object());
 
   // call mark-compact when heap is empty
-  heap->CollectGarbage(OLD_POINTER_SPACE, "trigger 1");
+  heap->CollectGarbage(OLD_SPACE, "trigger 1");
 
   // keep allocating garbage in new space until it fails
   const int arraysize = 100;
@@ -165,7 +165,7 @@ TEST(MarkCompactCollector) {
     factory->NewJSObject(function);
   }
 
-  heap->CollectGarbage(OLD_POINTER_SPACE, "trigger 4");
+  heap->CollectGarbage(OLD_SPACE, "trigger 4");
 
   { HandleScope scope(isolate);
     Handle<String> func_name = factory->InternalizeUtf8String("theFunction");
@@ -183,7 +183,7 @@ TEST(MarkCompactCollector) {
     JSReceiver::SetProperty(obj, prop_name, twenty_three, SLOPPY).Check();
   }
 
-  heap->CollectGarbage(OLD_POINTER_SPACE, "trigger 5");
+  heap->CollectGarbage(OLD_SPACE, "trigger 5");
 
   { HandleScope scope(isolate);
     Handle<String> obj_name = factory->InternalizeUtf8String("theObject");
@@ -308,7 +308,7 @@ TEST(ObjectGroups) {
                                  g2c1.location());
   }
   // Do a full GC
-  heap->CollectGarbage(OLD_POINTER_SPACE);
+  heap->CollectGarbage(OLD_SPACE);
 
   // All object should be alive.
   CHECK_EQ(0, NumberOfWeakCalls);
@@ -335,7 +335,7 @@ TEST(ObjectGroups) {
                                  g2c1.location());
   }
 
-  heap->CollectGarbage(OLD_POINTER_SPACE);
+  heap->CollectGarbage(OLD_SPACE);
 
   // All objects should be gone. 5 global handles in total.
   CHECK_EQ(5, NumberOfWeakCalls);
@@ -348,7 +348,7 @@ TEST(ObjectGroups) {
                           reinterpret_cast<void*>(&g2c1_and_id),
                           &WeakPointerCallback);
 
-  heap->CollectGarbage(OLD_POINTER_SPACE);
+  heap->CollectGarbage(OLD_SPACE);
   CHECK_EQ(7, NumberOfWeakCalls);
 }
 
index 88711b8..b124974 100644 (file)
@@ -1074,13 +1074,13 @@ TEST(SerializeToplevelThreeBigStrings) {
   Heap* heap = isolate->heap();
   CHECK(heap->InSpace(
       *v8::Utils::OpenHandle(*CompileRun("a")->ToString(CcTest::isolate())),
-      OLD_DATA_SPACE));
+      OLD_SPACE));
   CHECK(heap->InSpace(
       *v8::Utils::OpenHandle(*CompileRun("b")->ToString(CcTest::isolate())),
-      OLD_DATA_SPACE));
+      OLD_SPACE));
   CHECK(heap->InSpace(
       *v8::Utils::OpenHandle(*CompileRun("c")->ToString(CcTest::isolate())),
-      OLD_DATA_SPACE));
+      OLD_SPACE));
 
   delete cache;
   source_a.Dispose();
index 19c1dd7..b4bc7ad 100644 (file)
@@ -308,10 +308,7 @@ TEST(MemoryAllocator) {
                                 heap->MaxExecutableSize()));
 
   int total_pages = 0;
-  OldSpace faked_space(heap,
-                       heap->MaxReserved(),
-                       OLD_POINTER_SPACE,
-                       NOT_EXECUTABLE);
+  OldSpace faked_space(heap, heap->MaxReserved(), OLD_SPACE, NOT_EXECUTABLE);
   Page* first_page = memory_allocator->AllocatePage(
       faked_space.AreaSize(), &faked_space, NOT_EXECUTABLE);
 
@@ -380,9 +377,7 @@ TEST(OldSpace) {
                                 heap->MaxExecutableSize()));
   TestMemoryAllocatorScope test_scope(isolate, memory_allocator);
 
-  OldSpace* s = new OldSpace(heap,
-                             heap->MaxOldGenerationSize(),
-                             OLD_POINTER_SPACE,
+  OldSpace* s = new OldSpace(heap, heap->MaxOldGenerationSize(), OLD_SPACE,
                              NOT_EXECUTABLE);
   CHECK(s != NULL);
 
index fdcac3a..32284ea 100644 (file)
@@ -961,7 +961,7 @@ TEST(DoScavenge) {
   // a pointer to a from semi-space.
   CcTest::heap()->CollectGarbage(i::NEW_SPACE);  // in old gen now
 
-  CHECK(isolate->heap()->old_pointer_space()->Contains(*obj));
+  CHECK(isolate->heap()->old_space()->Contains(*obj));
 
   CHECK_EQ(boom_value, GetDoubleFieldValue(*obj, field_index));
 }
@@ -1195,7 +1195,7 @@ TEST(StoreBufferScanOnScavenge) {
   CcTest::heap()->CollectGarbage(i::NEW_SPACE);  // in survivor space now
   CcTest::heap()->CollectGarbage(i::NEW_SPACE);  // in old gen now
 
-  CHECK(isolate->heap()->old_pointer_space()->Contains(*obj));
+  CHECK(isolate->heap()->old_space()->Contains(*obj));
 
   // Create temp object in the new space.
   Handle<JSArray> temp = factory->NewJSArray(FAST_ELEMENTS, NOT_TENURED);
@@ -1297,7 +1297,7 @@ TEST(WriteBarriersInCopyJSObject) {
   AlwaysAllocateScope aa_scope(isolate);
   Object* clone_obj = heap->CopyJSObject(jsobject).ToObjectChecked();
   Handle<JSObject> clone(JSObject::cast(clone_obj));
-  CHECK(heap->old_pointer_space()->Contains(clone->address()));
+  CHECK(heap->old_space()->Contains(clone->address()));
 
   CcTest::heap()->CollectGarbage(NEW_SPACE, "boom");
 
index 2f947d7..f204472 100644 (file)
@@ -183,7 +183,7 @@ TEST(Regress2060a) {
   Handle<JSWeakMap> weakmap = AllocateJSWeakMap(isolate);
 
   // Start second old-space page so that values land on evacuation candidate.
-  Page* first_page = heap->old_pointer_space()->anchor()->next_page();
+  Page* first_page = heap->old_space()->anchor()->next_page();
   int dummy_array_size = Page::kMaxRegularHeapObjectSize - 92 * KB;
   factory->NewFixedArray(dummy_array_size / kPointerSize, TENURED);
 
@@ -222,7 +222,7 @@ TEST(Regress2060b) {
       factory->function_string());
 
   // Start second old-space page so that keys land on evacuation candidate.
-  Page* first_page = heap->old_pointer_space()->anchor()->next_page();
+  Page* first_page = heap->old_space()->anchor()->next_page();
   int dummy_array_size = Page::kMaxRegularHeapObjectSize - 92 * KB;
   factory->NewFixedArray(dummy_array_size / kPointerSize, TENURED);
 
index a01d315..77afd00 100644 (file)
@@ -183,7 +183,7 @@ TEST(WeakSet_Regress2060a) {
   Handle<JSWeakSet> weakset = AllocateJSWeakSet(isolate);
 
   // Start second old-space page so that values land on evacuation candidate.
-  Page* first_page = heap->old_pointer_space()->anchor()->next_page();
+  Page* first_page = heap->old_space()->anchor()->next_page();
   int dummy_array_size = Page::kMaxRegularHeapObjectSize - 92 * KB;
   factory->NewFixedArray(dummy_array_size / kPointerSize, TENURED);
 
@@ -222,7 +222,7 @@ TEST(WeakSet_Regress2060b) {
       factory->function_string());
 
   // Start second old-space page so that keys land on evacuation candidate.
-  Page* first_page = heap->old_pointer_space()->anchor()->next_page();
+  Page* first_page = heap->old_space()->anchor()->next_page();
   int dummy_array_size = Page::kMaxRegularHeapObjectSize - 92 * KB;
   factory->NewFixedArray(dummy_array_size / kPointerSize, TENURED);
 
index 8986a91..44f294d 100755 (executable)
@@ -1621,7 +1621,7 @@ class KnownMap(HeapObject):
 
 COMMENT_RE = re.compile(r"^C (0x[0-9a-fA-F]+) (.*)$")
 PAGEADDRESS_RE = re.compile(
-    r"^P (mappage|pointerpage|datapage) (0x[0-9a-fA-F]+)$")
+    r"^P (mappage|oldpage) (0x[0-9a-fA-F]+)$")
 
 
 class InspectionInfo(object):
@@ -1698,8 +1698,7 @@ class InspectionPadawan(object):
     self.reader = reader
     self.heap = heap
     self.known_first_map_page = 0
-    self.known_first_data_page = 0
-    self.known_first_pointer_page = 0
+    self.known_first_old_page = 0
 
   def __getattr__(self, name):
     """An InspectionPadawan can be used instead of V8Heap, even though
@@ -1715,13 +1714,11 @@ class InspectionPadawan(object):
 
   def IsInKnownOldSpace(self, tagged_address):
     page_address = tagged_address & ~self.heap.PageAlignmentMask()
-    return page_address in [self.known_first_data_page,
-                            self.known_first_pointer_page]
+    return page_address == self.known_first_old_page
 
   def ContainingKnownOldSpaceName(self, tagged_address):
     page_address = tagged_address & ~self.heap.PageAlignmentMask()
-    if page_address == self.known_first_data_page: return "OLD_DATA_SPACE"
-    if page_address == self.known_first_pointer_page: return "OLD_POINTER_SPACE"
+    if page_address == self.known_first_old_page: return "OLD_SPACE"
     return None
 
   def SenseObject(self, tagged_address):
@@ -1778,11 +1775,9 @@ class InspectionPadawan(object):
 
   def PrintKnowledge(self):
     print "  known_first_map_page = %s\n"\
-          "  known_first_data_page = %s\n"\
-          "  known_first_pointer_page = %s" % (
+          "  known_first_old_page = %s" % (
           self.reader.FormatIntPtr(self.known_first_map_page),
-          self.reader.FormatIntPtr(self.known_first_data_page),
-          self.reader.FormatIntPtr(self.known_first_pointer_page))
+          self.reader.FormatIntPtr(self.known_first_old_page))
 
 WEB_HEADER = """
 <!DOCTYPE html>
@@ -2124,12 +2119,10 @@ class InspectionWebFormatter(object):
 
     self.padawan = InspectionPadawan(self.reader, self.heap)
     self.comments = InspectionInfo(minidump_name, self.reader)
-    self.padawan.known_first_data_page = (
-        self.comments.get_page_address("datapage"))
+    self.padawan.known_first_old_page = (
+        self.comments.get_page_address("oldpage"))
     self.padawan.known_first_map_page = (
         self.comments.get_page_address("mappage"))
-    self.padawan.known_first_pointer_page = (
-        self.comments.get_page_address("pointerpage"))
 
   def set_comment(self, straddress, comment):
     try:
@@ -2141,12 +2134,10 @@ class InspectionWebFormatter(object):
   def set_page_address(self, kind, straddress):
     try:
       address = int(straddress, 0)
-      if kind == "datapage":
-        self.padawan.known_first_data_page = address
+      if kind == "oldpage":
+        self.padawan.known_first_old_page = address
       elif kind == "mappage":
         self.padawan.known_first_map_page = address
-      elif kind == "pointerpage":
-        self.padawan.known_first_pointer_page = address
       self.comments.save_page_address(kind, address)
     except ValueError:
       print "Invalid address"
@@ -2617,13 +2608,10 @@ class InspectionWebFormatter(object):
       page_address = address & ~self.heap.PageAlignmentMask()
 
       f.write("Page info: \n")
-      self.output_page_info(f, "data", self.padawan.known_first_data_page, \
+      self.output_page_info(f, "old", self.padawan.known_first_old_page, \
                             page_address)
       self.output_page_info(f, "map", self.padawan.known_first_map_page, \
                             page_address)
-      self.output_page_info(f, "pointer", \
-                            self.padawan.known_first_pointer_page, \
-                            page_address)
 
       if not self.reader.IsValidAddress(address):
         f.write("<h3>The contents at address %s not found in the dump.</h3>" % \
@@ -2925,14 +2913,14 @@ class InspectionShell(cmd.Cmd):
     """
     self.padawan.PrintKnowledge()
 
-  def do_kd(self, address):
+  def do_ko(self, address):
     """
      Teach V8 heap layout information to the inspector. Set the first
-     data-space page by passing any pointer into that page.
+     old space page by passing any pointer into that page.
     """
     address = int(address, 16)
     page_address = address & ~self.heap.PageAlignmentMask()
-    self.padawan.known_first_data_page = page_address
+    self.padawan.known_first_old_page = page_address
 
   def do_km(self, address):
     """
@@ -2943,15 +2931,6 @@ class InspectionShell(cmd.Cmd):
     page_address = address & ~self.heap.PageAlignmentMask()
     self.padawan.known_first_map_page = page_address
 
-  def do_kp(self, address):
-    """
-     Teach V8 heap layout information to the inspector. Set the first
-     pointer-space page by passing any pointer into that page.
-    """
-    address = int(address, 16)
-    page_address = address & ~self.heap.PageAlignmentMask()
-    self.padawan.known_first_pointer_page = page_address
-
   def do_list(self, smth):
     """
      List all available memory regions.
index 60e0685..581e191 100644 (file)
@@ -165,25 +165,23 @@ void DumpHeapStats(const char *minidump_file) {
 
   const int new_space_size = READ_FIELD(1);
   const int new_space_capacity = READ_FIELD(2);
-  const int old_pointer_space_size = READ_FIELD(3);
-  const int old_pointer_space_capacity = READ_FIELD(4);
-  const int old_data_space_size = READ_FIELD(5);
-  const int old_data_space_capacity = READ_FIELD(6);
-  const int code_space_size = READ_FIELD(7);
-  const int code_space_capacity = READ_FIELD(8);
-  const int map_space_size = READ_FIELD(9);
-  const int map_space_capacity = READ_FIELD(10);
-  const int cell_space_size = READ_FIELD(11);
-  const int cell_space_capacity = READ_FIELD(12);
-  const int lo_space_size = READ_FIELD(13);
-  const int global_handle_count = READ_FIELD(14);
-  const int weak_global_handle_count = READ_FIELD(15);
-  const int pending_global_handle_count = READ_FIELD(16);
-  const int near_death_global_handle_count = READ_FIELD(17);
-  const int destroyed_global_handle_count = READ_FIELD(18);
-  const int memory_allocator_size = READ_FIELD(19);
-  const int memory_allocator_capacity = READ_FIELD(20);
-  const int os_error = READ_FIELD(23);
+  const int old_space_size = READ_FIELD(3);
+  const int old_space_capacity = READ_FIELD(4);
+  const int code_space_size = READ_FIELD(5);
+  const int code_space_capacity = READ_FIELD(6);
+  const int map_space_size = READ_FIELD(7);
+  const int map_space_capacity = READ_FIELD(8);
+  const int cell_space_size = READ_FIELD(9);
+  const int cell_space_capacity = READ_FIELD(10);
+  const int lo_space_size = READ_FIELD(11);
+  const int global_handle_count = READ_FIELD(12);
+  const int weak_global_handle_count = READ_FIELD(13);
+  const int pending_global_handle_count = READ_FIELD(14);
+  const int near_death_global_handle_count = READ_FIELD(15);
+  const int destroyed_global_handle_count = READ_FIELD(16);
+  const int memory_allocator_size = READ_FIELD(17);
+  const int memory_allocator_capacity = READ_FIELD(18);
+  const int os_error = READ_FIELD(19);
 #undef READ_FIELD
 
   int objects_per_type[v8::internal::LAST_TYPE + 1] = {0};
@@ -225,10 +223,8 @@ void DumpHeapStats(const char *minidump_file) {
     printf("\t%-25s\t% 10.3f MB\n", #stat ":", toM(stat));
   PRINT_MB_STAT(new_space_size);
   PRINT_MB_STAT(new_space_capacity);
-  PRINT_MB_STAT(old_pointer_space_size);
-  PRINT_MB_STAT(old_pointer_space_capacity);
-  PRINT_MB_STAT(old_data_space_size);
-  PRINT_MB_STAT(old_data_space_capacity);
+  PRINT_MB_STAT(old_space_size);
+  PRINT_MB_STAT(old_space_capacity);
   PRINT_MB_STAT(code_space_size);
   PRINT_MB_STAT(code_space_capacity);
   PRINT_MB_STAT(map_space_size);
index 7a69393..0527cc8 100644 (file)
@@ -135,159 +135,160 @@ INSTANCE_TYPES = {
 # List of known V8 maps.
 KNOWN_MAPS = {
   0x08081: (137, "ByteArrayMap"),
-  0x080a9: (129, "MetaMap"),
-  0x080d1: (131, "NullMap"),
-  0x080f9: (131, "UndefinedMap"),
-  0x08121: (180, "FixedArrayMap"),
-  0x08149: (4, "OneByteInternalizedStringMap"),
-  0x08171: (134, "HeapNumberMap"),
-  0x08199: (138, "FreeSpaceMap"),
-  0x081c1: (158, "OnePointerFillerMap"),
-  0x081e9: (158, "TwoPointerFillerMap"),
-  0x08211: (131, "TheHoleMap"),
-  0x08239: (131, "BooleanMap"),
-  0x08261: (131, "UninitializedMap"),
-  0x08289: (131, "ExceptionMap"),
-  0x082b1: (132, "CellMap"),
-  0x082d9: (133, "GlobalPropertyCellMap"),
-  0x08301: (182, "SharedFunctionInfoMap"),
-  0x08329: (135, "MutableHeapNumberMap"),
-  0x08351: (180, "NativeContextMap"),
-  0x08379: (130, "CodeMap"),
-  0x083a1: (180, "ScopeInfoMap"),
-  0x083c9: (180, "FixedCOWArrayMap"),
-  0x083f1: (157, "FixedDoubleArrayMap"),
-  0x08419: (181, "ConstantPoolArrayMap"),
-  0x08441: (183, "WeakCellMap"),
-  0x08469: (131, "NoInterceptorResultSentinelMap"),
-  0x08491: (180, "HashTableMap"),
-  0x084b9: (180, "OrderedHashTableMap"),
-  0x084e1: (131, "ArgumentsMarkerMap"),
-  0x08509: (131, "TerminationExceptionMap"),
-  0x08531: (128, "SymbolMap"),
-  0x08559: (64, "StringMap"),
-  0x08581: (68, "OneByteStringMap"),
-  0x085a9: (65, "ConsStringMap"),
-  0x085d1: (69, "ConsOneByteStringMap"),
-  0x085f9: (67, "SlicedStringMap"),
-  0x08621: (71, "SlicedOneByteStringMap"),
-  0x08649: (66, "ExternalStringMap"),
-  0x08671: (74, "ExternalStringWithOneByteDataMap"),
-  0x08699: (70, "ExternalOneByteStringMap"),
-  0x086c1: (70, "NativeSourceStringMap"),
-  0x086e9: (82, "ShortExternalStringMap"),
-  0x08711: (90, "ShortExternalStringWithOneByteDataMap"),
-  0x08739: (0, "InternalizedStringMap"),
-  0x08761: (2, "ExternalInternalizedStringMap"),
-  0x08789: (10, "ExternalInternalizedStringWithOneByteDataMap"),
-  0x087b1: (6, "ExternalOneByteInternalizedStringMap"),
-  0x087d9: (18, "ShortExternalInternalizedStringMap"),
-  0x08801: (26, "ShortExternalInternalizedStringWithOneByteDataMap"),
-  0x08829: (22, "ShortExternalOneByteInternalizedStringMap"),
-  0x08851: (86, "ShortExternalOneByteStringMap"),
-  0x08879: (139, "ExternalInt8ArrayMap"),
-  0x088a1: (140, "ExternalUint8ArrayMap"),
-  0x088c9: (141, "ExternalInt16ArrayMap"),
-  0x088f1: (142, "ExternalUint16ArrayMap"),
-  0x08919: (143, "ExternalInt32ArrayMap"),
-  0x08941: (144, "ExternalUint32ArrayMap"),
-  0x08969: (145, "ExternalFloat32ArrayMap"),
-  0x08991: (146, "ExternalFloat64ArrayMap"),
-  0x089b9: (147, "ExternalUint8ClampedArrayMap"),
-  0x089e1: (149, "FixedUint8ArrayMap"),
-  0x08a09: (148, "FixedInt8ArrayMap"),
-  0x08a31: (151, "FixedUint16ArrayMap"),
-  0x08a59: (150, "FixedInt16ArrayMap"),
-  0x08a81: (153, "FixedUint32ArrayMap"),
-  0x08aa9: (152, "FixedInt32ArrayMap"),
-  0x08ad1: (154, "FixedFloat32ArrayMap"),
-  0x08af9: (155, "FixedFloat64ArrayMap"),
-  0x08b21: (156, "FixedUint8ClampedArrayMap"),
-  0x08b49: (180, "SloppyArgumentsElementsMap"),
-  0x08b71: (180, "FunctionContextMap"),
-  0x08b99: (180, "CatchContextMap"),
-  0x08bc1: (180, "WithContextMap"),
-  0x08be9: (180, "BlockContextMap"),
-  0x08c11: (180, "ModuleContextMap"),
-  0x08c39: (180, "ScriptContextMap"),
-  0x08c61: (180, "ScriptContextTableMap"),
-  0x08c89: (187, "JSMessageObjectMap"),
-  0x08cb1: (136, "ForeignMap"),
-  0x08cd9: (189, "NeanderMap"),
-  0x08d01: (170, "AllocationSiteMap"),
-  0x08d29: (171, "AllocationMementoMap"),
-  0x08d51: (174, "PolymorphicCodeCacheMap"),
-  0x08d79: (172, "ScriptMap"),
-  0x08dc9: (189, "ExternalMap"),
-  0x08f09: (177, "BoxMap"),
-  0x08f31: (161, "ExecutableAccessorInfoMap"),
-  0x08f59: (162, "AccessorPairMap"),
-  0x08f81: (163, "AccessCheckInfoMap"),
-  0x08fa9: (164, "InterceptorInfoMap"),
-  0x08fd1: (165, "CallHandlerInfoMap"),
-  0x08ff9: (166, "FunctionTemplateInfoMap"),
-  0x09021: (167, "ObjectTemplateInfoMap"),
-  0x09049: (169, "TypeSwitchInfoMap"),
-  0x09071: (173, "CodeCacheMap"),
-  0x09099: (175, "TypeFeedbackInfoMap"),
-  0x090c1: (176, "AliasedArgumentsEntryMap"),
-  0x090e9: (178, "DebugInfoMap"),
-  0x09111: (179, "BreakPointInfoMap"),
+  0x080ad: (129, "MetaMap"),
+  0x080d9: (131, "NullMap"),
+  0x08105: (180, "FixedArrayMap"),
+  0x08131: (4, "OneByteInternalizedStringMap"),
+  0x0815d: (183, "WeakCellMap"),
+  0x08189: (131, "UndefinedMap"),
+  0x081b5: (134, "HeapNumberMap"),
+  0x081e1: (138, "FreeSpaceMap"),
+  0x0820d: (158, "OnePointerFillerMap"),
+  0x08239: (158, "TwoPointerFillerMap"),
+  0x08265: (131, "TheHoleMap"),
+  0x08291: (131, "BooleanMap"),
+  0x082bd: (131, "UninitializedMap"),
+  0x082e9: (131, "ExceptionMap"),
+  0x08315: (132, "CellMap"),
+  0x08341: (133, "GlobalPropertyCellMap"),
+  0x0836d: (182, "SharedFunctionInfoMap"),
+  0x08399: (135, "MutableHeapNumberMap"),
+  0x083c5: (180, "NativeContextMap"),
+  0x083f1: (130, "CodeMap"),
+  0x0841d: (180, "ScopeInfoMap"),
+  0x08449: (180, "FixedCOWArrayMap"),
+  0x08475: (157, "FixedDoubleArrayMap"),
+  0x084a1: (181, "ConstantPoolArrayMap"),
+  0x084cd: (131, "NoInterceptorResultSentinelMap"),
+  0x084f9: (180, "HashTableMap"),
+  0x08525: (180, "OrderedHashTableMap"),
+  0x08551: (131, "ArgumentsMarkerMap"),
+  0x0857d: (131, "TerminationExceptionMap"),
+  0x085a9: (128, "SymbolMap"),
+  0x085d5: (64, "StringMap"),
+  0x08601: (68, "OneByteStringMap"),
+  0x0862d: (65, "ConsStringMap"),
+  0x08659: (69, "ConsOneByteStringMap"),
+  0x08685: (67, "SlicedStringMap"),
+  0x086b1: (71, "SlicedOneByteStringMap"),
+  0x086dd: (66, "ExternalStringMap"),
+  0x08709: (74, "ExternalStringWithOneByteDataMap"),
+  0x08735: (70, "ExternalOneByteStringMap"),
+  0x08761: (70, "NativeSourceStringMap"),
+  0x0878d: (82, "ShortExternalStringMap"),
+  0x087b9: (90, "ShortExternalStringWithOneByteDataMap"),
+  0x087e5: (0, "InternalizedStringMap"),
+  0x08811: (2, "ExternalInternalizedStringMap"),
+  0x0883d: (10, "ExternalInternalizedStringWithOneByteDataMap"),
+  0x08869: (6, "ExternalOneByteInternalizedStringMap"),
+  0x08895: (18, "ShortExternalInternalizedStringMap"),
+  0x088c1: (26, "ShortExternalInternalizedStringWithOneByteDataMap"),
+  0x088ed: (22, "ShortExternalOneByteInternalizedStringMap"),
+  0x08919: (86, "ShortExternalOneByteStringMap"),
+  0x08945: (139, "ExternalInt8ArrayMap"),
+  0x08971: (140, "ExternalUint8ArrayMap"),
+  0x0899d: (141, "ExternalInt16ArrayMap"),
+  0x089c9: (142, "ExternalUint16ArrayMap"),
+  0x089f5: (143, "ExternalInt32ArrayMap"),
+  0x08a21: (144, "ExternalUint32ArrayMap"),
+  0x08a4d: (145, "ExternalFloat32ArrayMap"),
+  0x08a79: (146, "ExternalFloat64ArrayMap"),
+  0x08aa5: (147, "ExternalUint8ClampedArrayMap"),
+  0x08ad1: (149, "FixedUint8ArrayMap"),
+  0x08afd: (148, "FixedInt8ArrayMap"),
+  0x08b29: (151, "FixedUint16ArrayMap"),
+  0x08b55: (150, "FixedInt16ArrayMap"),
+  0x08b81: (153, "FixedUint32ArrayMap"),
+  0x08bad: (152, "FixedInt32ArrayMap"),
+  0x08bd9: (154, "FixedFloat32ArrayMap"),
+  0x08c05: (155, "FixedFloat64ArrayMap"),
+  0x08c31: (156, "FixedUint8ClampedArrayMap"),
+  0x08c5d: (180, "SloppyArgumentsElementsMap"),
+  0x08c89: (180, "FunctionContextMap"),
+  0x08cb5: (180, "CatchContextMap"),
+  0x08ce1: (180, "WithContextMap"),
+  0x08d0d: (180, "BlockContextMap"),
+  0x08d39: (180, "ModuleContextMap"),
+  0x08d65: (180, "ScriptContextMap"),
+  0x08d91: (180, "ScriptContextTableMap"),
+  0x08dbd: (187, "JSMessageObjectMap"),
+  0x08de9: (136, "ForeignMap"),
+  0x08e15: (189, "NeanderMap"),
+  0x08e41: (189, "ExternalMap"),
+  0x08e6d: (170, "AllocationSiteMap"),
+  0x08e99: (171, "AllocationMementoMap"),
+  0x08ec5: (174, "PolymorphicCodeCacheMap"),
+  0x08ef1: (172, "ScriptMap"),
+  0x0907d: (177, "BoxMap"),
+  0x090a9: (161, "ExecutableAccessorInfoMap"),
+  0x090d5: (162, "AccessorPairMap"),
+  0x09101: (163, "AccessCheckInfoMap"),
+  0x0912d: (164, "InterceptorInfoMap"),
+  0x09159: (165, "CallHandlerInfoMap"),
+  0x09185: (166, "FunctionTemplateInfoMap"),
+  0x091b1: (167, "ObjectTemplateInfoMap"),
+  0x091dd: (169, "TypeSwitchInfoMap"),
+  0x09209: (173, "CodeCacheMap"),
+  0x09235: (175, "TypeFeedbackInfoMap"),
+  0x09261: (176, "AliasedArgumentsEntryMap"),
+  0x0928d: (178, "DebugInfoMap"),
+  0x092b9: (179, "BreakPointInfoMap"),
 }
 
 # List of known V8 objects.
 KNOWN_OBJECTS = {
-  ("OLD_POINTER_SPACE", 0x08081): "NullValue",
-  ("OLD_POINTER_SPACE", 0x08091): "UndefinedValue",
-  ("OLD_POINTER_SPACE", 0x080a1): "TheHoleValue",
-  ("OLD_POINTER_SPACE", 0x080b1): "TrueValue",
-  ("OLD_POINTER_SPACE", 0x080c1): "FalseValue",
-  ("OLD_POINTER_SPACE", 0x080d1): "UninitializedValue",
-  ("OLD_POINTER_SPACE", 0x080e1): "Exception",
-  ("OLD_POINTER_SPACE", 0x080f1): "NoInterceptorResultSentinel",
-  ("OLD_POINTER_SPACE", 0x08101): "ArgumentsMarker",
-  ("OLD_POINTER_SPACE", 0x08111): "NumberStringCache",
-  ("OLD_POINTER_SPACE", 0x08919): "SingleCharacterStringCache",
-  ("OLD_POINTER_SPACE", 0x08d21): "StringSplitCache",
-  ("OLD_POINTER_SPACE", 0x09129): "RegExpMultipleCache",
-  ("OLD_POINTER_SPACE", 0x09531): "TerminationException",
-  ("OLD_POINTER_SPACE", 0x09541): "MessageListeners",
-  ("OLD_POINTER_SPACE", 0x0955d): "CodeStubs",
-  ("OLD_POINTER_SPACE", 0x0f555): "NonMonomorphicCache",
-  ("OLD_POINTER_SPACE", 0x0fb69): "PolymorphicCodeCache",
-  ("OLD_POINTER_SPACE", 0x0fb71): "NativesSourceCache",
-  ("OLD_POINTER_SPACE", 0x0fbe1): "EmptyScript",
-  ("OLD_POINTER_SPACE", 0x0fc1d): "IntrinsicFunctionNames",
-  ("OLD_POINTER_SPACE", 0x15c39): "ObservationState",
-  ("OLD_POINTER_SPACE", 0x15c45): "SymbolRegistry",
-  ("OLD_POINTER_SPACE", 0x16601): "EmptySlowElementDictionary",
-  ("OLD_POINTER_SPACE", 0x1679d): "AllocationSitesScratchpad",
-  ("OLD_POINTER_SPACE", 0x43e61): "StringTable",
-  ("OLD_DATA_SPACE", 0x08081): "EmptyDescriptorArray",
-  ("OLD_DATA_SPACE", 0x08089): "EmptyFixedArray",
-  ("OLD_DATA_SPACE", 0x080a9): "NanValue",
-  ("OLD_DATA_SPACE", 0x08159): "EmptyByteArray",
-  ("OLD_DATA_SPACE", 0x08161): "EmptyConstantPoolArray",
-  ("OLD_DATA_SPACE", 0x08241): "EmptyExternalInt8Array",
-  ("OLD_DATA_SPACE", 0x0824d): "EmptyExternalUint8Array",
-  ("OLD_DATA_SPACE", 0x08259): "EmptyExternalInt16Array",
-  ("OLD_DATA_SPACE", 0x08265): "EmptyExternalUint16Array",
-  ("OLD_DATA_SPACE", 0x08271): "EmptyExternalInt32Array",
-  ("OLD_DATA_SPACE", 0x0827d): "EmptyExternalUint32Array",
-  ("OLD_DATA_SPACE", 0x08289): "EmptyExternalFloat32Array",
-  ("OLD_DATA_SPACE", 0x08295): "EmptyExternalFloat64Array",
-  ("OLD_DATA_SPACE", 0x082a1): "EmptyExternalUint8ClampedArray",
-  ("OLD_DATA_SPACE", 0x082ad): "EmptyFixedUint8Array",
-  ("OLD_DATA_SPACE", 0x082b5): "EmptyFixedInt8Array",
-  ("OLD_DATA_SPACE", 0x082bd): "EmptyFixedUint16Array",
-  ("OLD_DATA_SPACE", 0x082c5): "EmptyFixedInt16Array",
-  ("OLD_DATA_SPACE", 0x082cd): "EmptyFixedUint32Array",
-  ("OLD_DATA_SPACE", 0x082d5): "EmptyFixedInt32Array",
-  ("OLD_DATA_SPACE", 0x082dd): "EmptyFixedFloat32Array",
-  ("OLD_DATA_SPACE", 0x082e5): "EmptyFixedFloat64Array",
-  ("OLD_DATA_SPACE", 0x082ed): "EmptyFixedUint8ClampedArray",
-  ("OLD_DATA_SPACE", 0x082f5): "InfinityValue",
-  ("OLD_DATA_SPACE", 0x08301): "MinusZeroValue",
-  ("CODE_SPACE", 0x15fa1): "JsEntryCode",
-  ("CODE_SPACE", 0x243c1): "JsConstructEntryCode",
+  ("OLD_SPACE", 0x08081): "NullValue",
+  ("OLD_SPACE", 0x08091): "EmptyDescriptorArray",
+  ("OLD_SPACE", 0x08099): "EmptyFixedArray",
+  ("OLD_SPACE", 0x080bd): "UndefinedValue",
+  ("OLD_SPACE", 0x080e5): "NanValue",
+  ("OLD_SPACE", 0x080f1): "TheHoleValue",
+  ("OLD_SPACE", 0x08111): "TrueValue",
+  ("OLD_SPACE", 0x08131): "FalseValue",
+  ("OLD_SPACE", 0x08155): "UninitializedValue",
+  ("OLD_SPACE", 0x08181): "Exception",
+  ("OLD_SPACE", 0x081a9): "NoInterceptorResultSentinel",
+  ("OLD_SPACE", 0x081e5): "EmptyByteArray",
+  ("OLD_SPACE", 0x081ed): "EmptyConstantPoolArray",
+  ("OLD_SPACE", 0x081fd): "ArgumentsMarker",
+  ("OLD_SPACE", 0x08229): "NumberStringCache",
+  ("OLD_SPACE", 0x08a31): "SingleCharacterStringCache",
+  ("OLD_SPACE", 0x08ec9): "StringSplitCache",
+  ("OLD_SPACE", 0x092d1): "RegExpMultipleCache",
+  ("OLD_SPACE", 0x096d9): "TerminationException",
+  ("OLD_SPACE", 0x0970d): "EmptyExternalInt8Array",
+  ("OLD_SPACE", 0x09719): "EmptyExternalUint8Array",
+  ("OLD_SPACE", 0x09725): "EmptyExternalInt16Array",
+  ("OLD_SPACE", 0x09731): "EmptyExternalUint16Array",
+  ("OLD_SPACE", 0x0973d): "EmptyExternalInt32Array",
+  ("OLD_SPACE", 0x09749): "EmptyExternalUint32Array",
+  ("OLD_SPACE", 0x09755): "EmptyExternalFloat32Array",
+  ("OLD_SPACE", 0x09761): "EmptyExternalFloat64Array",
+  ("OLD_SPACE", 0x0976d): "EmptyExternalUint8ClampedArray",
+  ("OLD_SPACE", 0x09779): "EmptyFixedUint8Array",
+  ("OLD_SPACE", 0x09781): "EmptyFixedInt8Array",
+  ("OLD_SPACE", 0x09789): "EmptyFixedUint16Array",
+  ("OLD_SPACE", 0x09791): "EmptyFixedInt16Array",
+  ("OLD_SPACE", 0x09799): "EmptyFixedUint32Array",
+  ("OLD_SPACE", 0x097a1): "EmptyFixedInt32Array",
+  ("OLD_SPACE", 0x097a9): "EmptyFixedFloat32Array",
+  ("OLD_SPACE", 0x097b1): "EmptyFixedFloat64Array",
+  ("OLD_SPACE", 0x097b9): "EmptyFixedUint8ClampedArray",
+  ("OLD_SPACE", 0x097c1): "InfinityValue",
+  ("OLD_SPACE", 0x097cd): "MinusZeroValue",
+  ("OLD_SPACE", 0x097d9): "MessageListeners",
+  ("OLD_SPACE", 0x097f5): "CodeStubs",
+  ("OLD_SPACE", 0x12c49): "NonMonomorphicCache",
+  ("OLD_SPACE", 0x132bd): "PolymorphicCodeCache",
+  ("OLD_SPACE", 0x132c5): "NativesSourceCache",
+  ("OLD_SPACE", 0x1353d): "EmptyScript",
+  ("OLD_SPACE", 0x13585): "IntrinsicFunctionNames",
+  ("OLD_SPACE", 0x27ae5): "ObservationState",
+  ("OLD_SPACE", 0x27af1): "SymbolRegistry",
+  ("OLD_SPACE", 0x2863d): "EmptySlowElementDictionary",
+  ("OLD_SPACE", 0x287d9): "AllocationSitesScratchpad",
+  ("OLD_SPACE", 0x28be1): "WeakObjectToCodeTable",
+  ("OLD_SPACE", 0x4e9dd): "StringTable",
+  ("CODE_SPACE", 0x15f61): "JsEntryCode",
+  ("CODE_SPACE", 0x24781): "JsConstructEntryCode",
 }