Tweaks to the code serializer.
authoryangguo@chromium.org <yangguo@chromium.org>
Fri, 24 Oct 2014 12:40:05 +0000 (12:40 +0000)
committeryangguo@chromium.org <yangguo@chromium.org>
Fri, 24 Oct 2014 13:12:09 +0000 (13:12 +0000)
- consider the source string as a special sort of back reference.
- use repeat op code for more root members.

R=mvstanton@chromium.org

Review URL: https://codereview.chromium.org/674883002

Cr-Commit-Position: refs/heads/master@{#24871}
git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@24871 ce2b1a6d-e550-0410-aec6-3dcde31c8c00

src/heap/heap.cc
src/heap/heap.h
src/hydrogen-instructions.cc
src/serialize.cc
src/serialize.h
test/cctest/test-serialize.cc

index 38e4971..94c8937 100644 (file)
@@ -4551,6 +4551,19 @@ bool Heap::InSpace(Address addr, AllocationSpace space) {
 }
 
 
+bool Heap::RootIsImmortalImmovable(int root_index) {
+  switch (root_index) {
+#define CASE(name)               \
+  case Heap::k##name##RootIndex: \
+    return true;
+    IMMORTAL_IMMOVABLE_ROOT_LIST(CASE);
+#undef CASE
+    default:
+      return false;
+  }
+}
+
+
 #ifdef VERIFY_HEAP
 void Heap::Verify() {
   CHECK(HasBeenSetUp());
index f3830dd..ee1fca9 100644 (file)
@@ -198,58 +198,6 @@ namespace internal {
   SMI_ROOT_LIST(V)    \
   V(StringTable, string_table, StringTable)
 
-// Heap roots that are known to be immortal immovable, for which we can safely
-// skip write barriers.
-#define IMMORTAL_IMMOVABLE_ROOT_LIST(V) \
-  V(byte_array_map)                     \
-  V(free_space_map)                     \
-  V(one_pointer_filler_map)             \
-  V(two_pointer_filler_map)             \
-  V(undefined_value)                    \
-  V(the_hole_value)                     \
-  V(null_value)                         \
-  V(true_value)                         \
-  V(false_value)                        \
-  V(uninitialized_value)                \
-  V(cell_map)                           \
-  V(global_property_cell_map)           \
-  V(shared_function_info_map)           \
-  V(meta_map)                           \
-  V(heap_number_map)                    \
-  V(mutable_heap_number_map)            \
-  V(native_context_map)                 \
-  V(fixed_array_map)                    \
-  V(code_map)                           \
-  V(scope_info_map)                     \
-  V(fixed_cow_array_map)                \
-  V(fixed_double_array_map)             \
-  V(constant_pool_array_map)            \
-  V(weak_cell_map)                      \
-  V(no_interceptor_result_sentinel)     \
-  V(hash_table_map)                     \
-  V(ordered_hash_table_map)             \
-  V(empty_fixed_array)                  \
-  V(empty_byte_array)                   \
-  V(empty_descriptor_array)             \
-  V(empty_constant_pool_array)          \
-  V(arguments_marker)                   \
-  V(symbol_map)                         \
-  V(sloppy_arguments_elements_map)      \
-  V(function_context_map)               \
-  V(catch_context_map)                  \
-  V(with_context_map)                   \
-  V(block_context_map)                  \
-  V(module_context_map)                 \
-  V(global_context_map)                 \
-  V(undefined_map)                      \
-  V(the_hole_map)                       \
-  V(null_map)                           \
-  V(boolean_map)                        \
-  V(uninitialized_map)                  \
-  V(message_object_map)                 \
-  V(foreign_map)                        \
-  V(neander_map)
-
 #define INTERNALIZED_STRING_LIST(V)                        \
   V(Object_string, "Object")                               \
   V(proto_string, "__proto__")                             \
@@ -351,6 +299,60 @@ namespace internal {
   V(class_start_position_symbol)    \
   V(class_end_position_symbol)
 
+// Heap roots that are known to be immortal immovable, for which we can safely
+// skip write barriers. This list is not complete and has omissions.
+#define IMMORTAL_IMMOVABLE_ROOT_LIST(V) \
+  V(ByteArrayMap)                       \
+  V(FreeSpaceMap)                       \
+  V(OnePointerFillerMap)                \
+  V(TwoPointerFillerMap)                \
+  V(UndefinedValue)                     \
+  V(TheHoleValue)                       \
+  V(NullValue)                          \
+  V(TrueValue)                          \
+  V(FalseValue)                         \
+  V(UninitializedValue)                 \
+  V(CellMap)                            \
+  V(GlobalPropertyCellMap)              \
+  V(SharedFunctionInfoMap)              \
+  V(MetaMap)                            \
+  V(HeapNumberMap)                      \
+  V(MutableHeapNumberMap)               \
+  V(NativeContextMap)                   \
+  V(FixedArrayMap)                      \
+  V(CodeMap)                            \
+  V(ScopeInfoMap)                       \
+  V(FixedCOWArrayMap)                   \
+  V(FixedDoubleArrayMap)                \
+  V(ConstantPoolArrayMap)               \
+  V(WeakCellMap)                        \
+  V(NoInterceptorResultSentinel)        \
+  V(HashTableMap)                       \
+  V(OrderedHashTableMap)                \
+  V(EmptyFixedArray)                    \
+  V(EmptyByteArray)                     \
+  V(EmptyDescriptorArray)               \
+  V(EmptyConstantPoolArray)             \
+  V(ArgumentsMarker)                    \
+  V(SymbolMap)                          \
+  V(SloppyArgumentsElementsMap)         \
+  V(FunctionContextMap)                 \
+  V(CatchContextMap)                    \
+  V(WithContextMap)                     \
+  V(BlockContextMap)                    \
+  V(ModuleContextMap)                   \
+  V(GlobalContextMap)                   \
+  V(UndefinedMap)                       \
+  V(TheHoleMap)                         \
+  V(NullMap)                            \
+  V(BooleanMap)                         \
+  V(UninitializedMap)                   \
+  V(ArgumentsMarkerMap)                 \
+  V(JSMessageObjectMap)                 \
+  V(ForeignMap)                         \
+  V(NeanderMap)                         \
+  PRIVATE_SYMBOL_LIST(V)
+
 // Forward declarations.
 class HeapStats;
 class Isolate;
@@ -928,6 +930,8 @@ class Heap {
     return reinterpret_cast<Address*>(&roots_[kStoreBufferTopRootIndex]);
   }
 
+  static bool RootIsImmortalImmovable(int root_index);
+
 #ifdef VERIFY_HEAP
   // Verify the heap is in its normal state before or after a GC.
   void Verify();
@@ -1116,6 +1120,8 @@ class Heap {
     kSmiRootsStart = kStringTableRootIndex + 1
   };
 
+  Object* root(RootListIndex index) { return roots_[index]; }
+
   STATIC_ASSERT(kUndefinedValueRootIndex ==
                 Internals::kUndefinedValueRootIndex);
   STATIC_ASSERT(kNullValueRootIndex == Internals::kNullValueRootIndex);
index 1544bad..188119d 100644 (file)
@@ -2862,7 +2862,7 @@ bool HConstant::ImmortalImmovable() const {
   DCHECK(!object_.IsKnownGlobal(heap->nan_value()));
   return
 #define IMMORTAL_IMMOVABLE_ROOT(name) \
-      object_.IsKnownGlobal(heap->name()) ||
+  object_.IsKnownGlobal(heap->root(Heap::k##name##RootIndex)) ||
       IMMORTAL_IMMOVABLE_ROOT_LIST(IMMORTAL_IMMOVABLE_ROOT)
 #undef IMMORTAL_IMMOVABLE_ROOT
 #define INTERNALIZED_STRING(name, value) \
@@ -2873,9 +2873,6 @@ bool HConstant::ImmortalImmovable() const {
       object_.IsKnownGlobal(heap->name##_map()) ||
       STRING_TYPE_LIST(STRING_TYPE)
 #undef STRING_TYPE
-#define SYMBOL(name) object_.IsKnownGlobal(heap->name()) ||
-      PRIVATE_SYMBOL_LIST(SYMBOL)
-#undef SYMBOL
       false;
 }
 
index ba4bf41..28838ed 100644 (file)
@@ -1677,11 +1677,10 @@ void Serializer::ObjectSerializer::VisitPointers(Object** start,
     while (current < end && !(*current)->IsSmi()) {
       HeapObject* current_contents = HeapObject::cast(*current);
       int root_index = serializer_->root_index_map()->Lookup(current_contents);
-      // Repeats are not subject to the write barrier so there are only some
-      // objects that can be used in a repeat encoding.  These are the early
-      // ones in the root array that are never in new space.
+      // Repeats are not subject to the write barrier so we can only use
+      // immortal immovable root members. They are never in new space.
       if (current != start && root_index != RootIndexMap::kInvalidRootIndex &&
-          root_index < kRootArrayNumberOfConstantEncodings &&
+          Heap::RootIsImmortalImmovable(root_index) &&
           current_contents == current[-1]) {
         DCHECK(!serializer_->isolate()->heap()->InNewSpace(current_contents));
         int repeat_count = 1;
@@ -1908,7 +1907,7 @@ BackReference Serializer::AllocateLargeObject(int size) {
   // Large objects are allocated one-by-one when deserializing. We do not
   // have to keep track of multiple chunks.
   pending_chunk_[LO_SPACE] += size;
-  return BackReference(LO_SPACE, 0, seen_large_objects_index_++);
+  return BackReference::LargeObjectReference(seen_large_objects_index_++);
 }
 
 
@@ -1925,7 +1924,8 @@ BackReference Serializer::Allocate(AllocationSpace space, int size) {
   }
   uint32_t offset = pending_chunk_[space];
   pending_chunk_[space] = new_chunk_size;
-  return BackReference(space, completed_chunks_[space].length(), offset);
+  return BackReference::Reference(space, completed_chunks_[space].length(),
+                                  offset);
 }
 
 
@@ -2007,12 +2007,17 @@ void CodeSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code,
 
   BackReference back_reference = back_reference_map_.Lookup(obj);
   if (back_reference.is_valid()) {
-    if (FLAG_trace_code_serializer) {
-      PrintF(" Encoding back reference to: ");
-      obj->ShortPrint();
-      PrintF("\n");
+    if (back_reference.is_source()) {
+      DCHECK_EQ(source_, obj);
+      SerializeSourceObject(how_to_code, where_to_point);
+    } else {
+      if (FLAG_trace_code_serializer) {
+        PrintF(" Encoding back reference to: ");
+        obj->ShortPrint();
+        PrintF("\n");
+      }
+      SerializeBackReference(back_reference, how_to_code, where_to_point, skip);
     }
-    SerializeBackReference(back_reference, how_to_code, where_to_point, skip);
     return;
   }
 
@@ -2056,11 +2061,6 @@ void CodeSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code,
     UNREACHABLE();
   }
 
-  if (obj == source_) {
-    SerializeSourceObject(how_to_code, where_to_point);
-    return;
-  }
-
   // Past this point we should not see any (context-specific) maps anymore.
   CHECK(!obj->IsMap());
   // There should be no references to the global object embedded.
index 3766801..ae64462 100644 (file)
@@ -189,16 +189,26 @@ class BackReference {
  public:
   explicit BackReference(uint32_t bitfield) : bitfield_(bitfield) {}
 
-  BackReference(AllocationSpace space, uint32_t chunk_index,
-                uint32_t chunk_offset) {
-    DCHECK(IsAligned(chunk_offset, kObjectAlignment));
-    bitfield_ = SpaceBits::encode(space) | ChunkIndexBits::encode(chunk_index) |
-                ChunkOffsetBits::encode(chunk_offset >> kObjectAlignmentBits);
+  BackReference() : bitfield_(kInvalidValue) {}
+
+  static BackReference SourceReference() { return BackReference(kSourceValue); }
+
+  static BackReference LargeObjectReference(uint32_t index) {
+    return BackReference(SpaceBits::encode(LO_SPACE) |
+                         ChunkOffsetBits::encode(index));
   }
 
-  BackReference() : bitfield_(kInvalidValue) {}
+  static BackReference Reference(AllocationSpace space, uint32_t chunk_index,
+                                 uint32_t chunk_offset) {
+    DCHECK(IsAligned(chunk_offset, kObjectAlignment));
+    DCHECK_NE(LO_SPACE, space);
+    return BackReference(
+        SpaceBits::encode(space) | ChunkIndexBits::encode(chunk_index) |
+        ChunkOffsetBits::encode(chunk_offset >> kObjectAlignmentBits));
+  }
 
   bool is_valid() const { return bitfield_ != kInvalidValue; }
+  bool is_source() const { return bitfield_ == kSourceValue; }
 
   AllocationSpace space() const {
     DCHECK(is_valid());
@@ -224,6 +234,7 @@ class BackReference {
 
  private:
   static const uint32_t kInvalidValue = 0xFFFFFFFF;
+  static const uint32_t kSourceValue = 0xFFFFFFFE;
   static const int kChunkOffsetSize = kPageSizeBits - kObjectAlignmentBits;
   static const int kChunkIndexSize = 32 - kChunkOffsetSize - kSpaceTagSize;
 
@@ -263,6 +274,10 @@ class BackReferenceMap : public AddressMapBase {
     SetValue(entry, b.bitfield());
   }
 
+  void AddSourceString(String* string) {
+    Add(string, BackReference::SourceReference());
+  }
+
  private:
   DisallowHeapAllocation no_allocation_;
   HashMap* map_;
@@ -700,7 +715,9 @@ class CodeSerializer : public Serializer {
       : Serializer(isolate, sink),
         source_(source),
         main_code_(main_code),
-        num_internalized_strings_(0) {}
+        num_internalized_strings_(0) {
+    back_reference_map_.AddSourceString(source);
+  }
 
   virtual void SerializeObject(HeapObject* o, HowToCode how_to_code,
                                WhereToPoint where_to_point, int skip);
index ac716a4..0c0e522 100644 (file)
@@ -867,6 +867,8 @@ TEST(SerializeToplevelLargeString) {
 
   CHECK_EQ(6 * 1000000, Handle<String>::cast(copy_result)->length());
   CHECK(isolate->heap()->InSpace(HeapObject::cast(*copy_result), LO_SPACE));
+  // Make sure we do not serialize too much, e.g. include the source string.
+  CHECK_LT(cache->length(), 7000000);
 
   delete cache;
   source.Dispose();