Use virtually dispatched specialized scavengers instead of single generic ScavengeObj...
authorvegorov@chromium.org <vegorov@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Mon, 12 Jul 2010 12:47:09 +0000 (12:47 +0000)
committervegorov@chromium.org <vegorov@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Mon, 12 Jul 2010 12:47:09 +0000 (12:47 +0000)
Review URL: http://codereview.chromium.org/2895008

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@5041 ce2b1a6d-e550-0410-aec6-3dcde31c8c00

src/bootstrapper.cc
src/factory.cc
src/heap.cc
src/heap.h
src/objects-inl.h
src/objects.cc
src/objects.h
src/serialize.cc

index bbd69ec..e1d4489 100644 (file)
@@ -812,6 +812,9 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
     initial_map->set_instance_size(
         initial_map->instance_size() + 5 * kPointerSize);
     initial_map->set_instance_descriptors(*descriptors);
+    initial_map->set_scavenger(
+        Heap::GetScavenger(initial_map->instance_type(),
+                           initial_map->instance_size()));
   }
 
   {  // -- J S O N
index 39e881a..5b38741 100644 (file)
@@ -277,6 +277,8 @@ Handle<Map> Factory::CopyMap(Handle<Map> src,
   copy->set_inobject_properties(inobject_properties);
   copy->set_unused_property_fields(inobject_properties);
   copy->set_instance_size(copy->instance_size() + instance_size_delta);
+  copy->set_scavenger(Heap::GetScavenger(copy->instance_type(),
+                                         copy->instance_size()));
   return copy;
 }
 
index 1b62589..5c9d77f 100644 (file)
@@ -799,34 +799,34 @@ class ScavengeVisitor: public ObjectVisitor {
 };
 
 
-// A queue of pointers and maps of to-be-promoted objects during a
-// scavenge collection.
+// A queue of objects promoted during scavenge. Each object is accompanied
+// by it's size to avoid dereferencing a map pointer for scanning.
 class PromotionQueue {
  public:
   void Initialize(Address start_address) {
-    front_ = rear_ = reinterpret_cast<HeapObject**>(start_address);
+    front_ = rear_ = reinterpret_cast<intptr_t*>(start_address);
   }
 
   bool is_empty() { return front_ <= rear_; }
 
-  void insert(HeapObject* object, Map* map) {
-    *(--rear_) = object;
-    *(--rear_) = map;
+  void insert(HeapObject* target, int size) {
+    *(--rear_) = reinterpret_cast<intptr_t>(target);
+    *(--rear_) = size;
     // Assert no overflow into live objects.
     ASSERT(reinterpret_cast<Address>(rear_) >= Heap::new_space()->top());
   }
 
-  void remove(HeapObject** object, Map** map) {
-    *object = *(--front_);
-    *map = Map::cast(*(--front_));
+  void remove(HeapObject** target, int* size) {
+    *target = reinterpret_cast<HeapObject*>(*(--front_));
+    *size = *(--front_);
     // Assert no underflow.
     ASSERT(front_ >= rear_);
   }
 
  private:
   // The front of the queue is higher in memory than the rear.
-  HeapObject** front_;
-  HeapObject** rear_;
+  intptr_t* front_;
+  intptr_t* rear_;
 };
 
 
@@ -1041,31 +1041,26 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
     // queue is empty.
     while (new_space_front < new_space_.top()) {
       HeapObject* object = HeapObject::FromAddress(new_space_front);
-      object->Iterate(scavenge_visitor);
-      new_space_front += object->Size();
+      Map* map = object->map();
+      int size = object->SizeFromMap(map);
+      object->IterateBody(map->instance_type(), size, scavenge_visitor);
+      new_space_front += size;
     }
 
     // Promote and process all the to-be-promoted objects.
     while (!promotion_queue.is_empty()) {
-      HeapObject* source;
-      Map* map;
-      promotion_queue.remove(&source, &map);
-      // Copy the from-space object to its new location (given by the
-      // forwarding address) and fix its map.
-      HeapObject* target = source->map_word().ToForwardingAddress();
-      int size = source->SizeFromMap(map);
-      CopyBlock(target->address(), source->address(), size);
-      target->set_map(map);
-
-#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
-      // Update NewSpace stats if necessary.
-      RecordCopiedObject(target);
-#endif
-      // Visit the newly copied object for pointers to new space.
+      HeapObject* target;
+      int size;
+      promotion_queue.remove(&target, &size);
+
+      // Promoted object might be already partially visited
+      // during dirty regions iteration. Thus we search specificly
+      // for pointers to from semispace instead of looking for pointers
+      // to new space.
       ASSERT(!target->IsMap());
-      IterateAndMarkPointersToNewSpace(target->address(),
-                                       target->address() + size,
-                                       &ScavengePointer);
+      IterateAndMarkPointersToFromSpace(target->address(),
+                                        target->address() + size,
+                                        &ScavengePointer);
     }
 
     // Take another spin if there are now unswept objects in new space
@@ -1077,7 +1072,7 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
 
 
 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
-void Heap::RecordCopiedObject(HeapObject* obj) {
+static void RecordCopiedObject(HeapObject* obj) {
   bool should_record = false;
 #ifdef DEBUG
   should_record = FLAG_heap_stats;
@@ -1086,22 +1081,24 @@ void Heap::RecordCopiedObject(HeapObject* obj) {
   should_record = should_record || FLAG_log_gc;
 #endif
   if (should_record) {
-    if (new_space_.Contains(obj)) {
-      new_space_.RecordAllocation(obj);
+    if (Heap::new_space()->Contains(obj)) {
+      Heap::new_space()->RecordAllocation(obj);
     } else {
-      new_space_.RecordPromotion(obj);
+      Heap::new_space()->RecordPromotion(obj);
     }
   }
 }
 #endif  // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
 
 
-
-HeapObject* Heap::MigrateObject(HeapObject* source,
-                                HeapObject* target,
-                                int size) {
+// Helper function used by CopyObject to copy a source object to an
+// allocated target object and update the forwarding pointer in the source
+// object.  Returns the target object.
+inline static HeapObject* MigrateObject(HeapObject* source,
+                                        HeapObject* target,
+                                        int size) {
   // Copy the content of source to target.
-  CopyBlock(target->address(), source->address(), size);
+  Heap::CopyBlock(target->address(), source->address(), size);
 
   // Set the forwarding address.
   source->set_map_word(MapWord::FromForwardingAddress(target));
@@ -1115,117 +1112,281 @@ HeapObject* Heap::MigrateObject(HeapObject* source,
 }
 
 
-static inline bool IsShortcutCandidate(HeapObject* object, Map* map) {
-  STATIC_ASSERT(kNotStringTag != 0 && kSymbolTag != 0);
-  ASSERT(object->map() == map);
-  InstanceType type = map->instance_type();
-  if ((type & kShortcutTypeMask) != kShortcutTypeTag) return false;
-  ASSERT(object->IsString() && !object->IsSymbol());
-  return ConsString::cast(object)->unchecked_second() == Heap::empty_string();
+enum ObjectContents  { DATA_OBJECT, POINTER_OBJECT };
+enum SizeRestriction { SMALL, UNKNOWN_SIZE };
+
+
+template<ObjectContents object_contents, SizeRestriction size_restriction>
+static inline void EvacuateObject(Map* map,
+                                  HeapObject** slot,
+                                  HeapObject* object,
+                                  int object_size) {
+  ASSERT((size_restriction != SMALL) ||
+         (object_size <= Page::kMaxHeapObjectSize));
+  ASSERT(object->Size() == object_size);
+
+  if (Heap::ShouldBePromoted(object->address(), object_size)) {
+    Object* result;
+
+    if ((size_restriction != SMALL) &&
+        (object_size > Page::kMaxHeapObjectSize)) {
+      result = Heap::lo_space()->AllocateRawFixedArray(object_size);
+    } else {
+      if (object_contents == DATA_OBJECT) {
+        result = Heap::old_data_space()->AllocateRaw(object_size);
+      } else {
+        result = Heap::old_pointer_space()->AllocateRaw(object_size);
+      }
+    }
+
+    if (!result->IsFailure()) {
+      HeapObject* target = HeapObject::cast(result);
+      *slot = MigrateObject(object, target, object_size);
+
+      if (object_contents == POINTER_OBJECT) {
+        promotion_queue.insert(target, object_size);
+      }
+
+      Heap::tracer()->increment_promoted_objects_size(object_size);
+      return;
+    }
+  }
+  Object* result = Heap::new_space()->AllocateRaw(object_size);
+  ASSERT(!result->IsFailure());
+  *slot = MigrateObject(object, HeapObject::cast(result), object_size);
+  return;
 }
 
 
-void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
-  ASSERT(InFromSpace(object));
-  MapWord first_word = object->map_word();
-  ASSERT(!first_word.IsForwardingAddress());
+template<int object_size_in_words, ObjectContents object_contents>
+static inline void EvacuateObjectOfFixedSize(Map* map,
+                                             HeapObject** slot,
+                                             HeapObject* object) {
+  const int object_size = object_size_in_words << kPointerSizeLog2;
+  EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
+}
 
-  // Optimization: Bypass flattened ConsString objects.
-  if (IsShortcutCandidate(object, first_word.ToMap())) {
-    object = HeapObject::cast(ConsString::cast(object)->unchecked_first());
-    *p = object;
-    // After patching *p we have to repeat the checks that object is in the
-    // active semispace of the young generation and not already copied.
-    if (!InNewSpace(object)) return;
-    first_word = object->map_word();
+
+template<ObjectContents object_contents>
+static inline void EvacuateObjectOfFixedSize(Map* map,
+                                             HeapObject** slot,
+                                             HeapObject* object) {
+  int object_size = map->instance_size();
+  EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
+}
+
+
+static inline void EvacuateFixedArray(Map* map,
+                                      HeapObject** slot,
+                                      HeapObject* object) {
+  int object_size = FixedArray::cast(object)->FixedArraySize();
+  EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
+}
+
+
+static inline void EvacuateByteArray(Map* map,
+                                     HeapObject** slot,
+                                     HeapObject* object) {
+  int object_size = ByteArray::cast(object)->ByteArraySize();
+  EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
+}
+
+
+static Scavenger GetScavengerForSize(int object_size,
+                                     ObjectContents object_contents) {
+  ASSERT(IsAligned(object_size, kPointerSize));
+  ASSERT(object_size < Page::kMaxHeapObjectSize);
+
+  switch (object_size >> kPointerSizeLog2) {
+#define CASE(n)                                           \
+    case n:                                               \
+      if (object_contents == DATA_OBJECT) {               \
+        return static_cast<Scavenger>(                    \
+          &EvacuateObjectOfFixedSize<n, DATA_OBJECT>);    \
+      } else {                                            \
+        return static_cast<Scavenger>(                    \
+          &EvacuateObjectOfFixedSize<n, POINTER_OBJECT>); \
+      }
+
+    CASE(1);
+    CASE(2);
+    CASE(3);
+    CASE(4);
+    CASE(5);
+    CASE(6);
+    CASE(7);
+    CASE(8);
+    CASE(9);
+    CASE(10);
+    CASE(11);
+    CASE(12);
+    CASE(13);
+    CASE(14);
+    CASE(15);
+    CASE(16);
+    default:
+      if (object_contents == DATA_OBJECT) {
+        return static_cast<Scavenger>(&EvacuateObjectOfFixedSize<DATA_OBJECT>);
+      } else {
+        return static_cast<Scavenger>(
+            &EvacuateObjectOfFixedSize<POINTER_OBJECT>);
+      }
+
+#undef CASE
+  }
+}
+
+
+static inline void EvacuateSeqAsciiString(Map* map,
+                                          HeapObject** slot,
+                                          HeapObject* object) {
+  int object_size = SeqAsciiString::cast(object)->
+      SeqAsciiStringSize(map->instance_type());
+  EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
+}
+
+
+static inline void EvacuateSeqTwoByteString(Map* map,
+                                            HeapObject** slot,
+                                            HeapObject* object) {
+  int object_size = SeqTwoByteString::cast(object)->
+      SeqTwoByteStringSize(map->instance_type());
+  EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
+}
+
+
+static inline bool IsShortcutCandidate(int type) {
+  return ((type & kShortcutTypeMask) == kShortcutTypeTag);
+}
+
+
+static inline void EvacuateShortcutCandidate(Map* map,
+                                             HeapObject** slot,
+                                             HeapObject* object) {
+  ASSERT(IsShortcutCandidate(map->instance_type()));
+
+  if (ConsString::cast(object)->unchecked_second() == Heap::empty_string()) {
+    HeapObject* first =
+        HeapObject::cast(ConsString::cast(object)->unchecked_first());
+
+    *slot = first;
+
+    if (!Heap::InNewSpace(first)) {
+      object->set_map_word(MapWord::FromForwardingAddress(first));
+      return;
+    }
+
+    MapWord first_word = first->map_word();
     if (first_word.IsForwardingAddress()) {
-      *p = first_word.ToForwardingAddress();
+      HeapObject* target = first_word.ToForwardingAddress();
+
+      *slot = target;
+      object->set_map_word(MapWord::FromForwardingAddress(target));
       return;
     }
+
+    first->map()->Scavenge(slot, first);
+    object->set_map_word(MapWord::FromForwardingAddress(*slot));
+    return;
   }
 
-  int object_size = object->SizeFromMap(first_word.ToMap());
-  // We rely on live objects in new space to be at least two pointers,
-  // so we can store the from-space address and map pointer of promoted
-  // objects in the to space.
-  ASSERT(object_size >= 2 * kPointerSize);
+  int object_size = ConsString::kSize;
+  EvacuateObject<POINTER_OBJECT, SMALL>(map, slot, object, object_size);
+}
+
 
-  // If the object should be promoted, we try to copy it to old space.
-  if (ShouldBePromoted(object->address(), object_size)) {
-    Object* result;
-    if (object_size > MaxObjectSizeInPagedSpace()) {
-      result = lo_space_->AllocateRawFixedArray(object_size);
-      if (!result->IsFailure()) {
-        HeapObject* target = HeapObject::cast(result);
-
-        if (object->IsFixedArray()) {
-          // Save the from-space object pointer and its map pointer at the
-          // top of the to space to be swept and copied later.  Write the
-          // forwarding address over the map word of the from-space
-          // object.
-          promotion_queue.insert(object, first_word.ToMap());
-          object->set_map_word(MapWord::FromForwardingAddress(target));
-
-          // Give the space allocated for the result a proper map by
-          // treating it as a free list node (not linked into the free
-          // list).
-          FreeListNode* node = FreeListNode::FromAddress(target->address());
-          node->set_size(object_size);
-
-          *p = target;
+Scavenger Heap::GetScavenger(int instance_type, int instance_size) {
+  if (instance_type < FIRST_NONSTRING_TYPE) {
+    switch (instance_type & kStringRepresentationMask) {
+      case kSeqStringTag:
+        if ((instance_type & kStringEncodingMask) == kAsciiStringTag) {
+          return &EvacuateSeqAsciiString;
         } else {
-          // In large object space only fixed arrays might possibly contain
-          // intergenerational references.
-          // All other objects can be copied immediately and not revisited.
-          *p = MigrateObject(object, target, object_size);
+          return &EvacuateSeqTwoByteString;
         }
 
-        tracer()->increment_promoted_objects_size(object_size);
-        return;
-      }
-    } else {
-      OldSpace* target_space = Heap::TargetSpace(object);
-      ASSERT(target_space == Heap::old_pointer_space_ ||
-             target_space == Heap::old_data_space_);
-      result = target_space->AllocateRaw(object_size);
-      if (!result->IsFailure()) {
-        HeapObject* target = HeapObject::cast(result);
-        if (target_space == Heap::old_pointer_space_) {
-          // Save the from-space object pointer and its map pointer at the
-          // top of the to space to be swept and copied later.  Write the
-          // forwarding address over the map word of the from-space
-          // object.
-          promotion_queue.insert(object, first_word.ToMap());
-          object->set_map_word(MapWord::FromForwardingAddress(target));
-
-          // Give the space allocated for the result a proper map by
-          // treating it as a free list node (not linked into the free
-          // list).
-          FreeListNode* node = FreeListNode::FromAddress(target->address());
-          node->set_size(object_size);
-
-          *p = target;
+      case kConsStringTag:
+        if (IsShortcutCandidate(instance_type)) {
+          return &EvacuateShortcutCandidate;
         } else {
-          // Objects promoted to the data space can be copied immediately
-          // and not revisited---we will never sweep that space for
-          // pointers and the copied objects do not contain pointers to
-          // new space objects.
-          *p = MigrateObject(object, target, object_size);
-#ifdef DEBUG
-          VerifyNonPointerSpacePointersVisitor v;
-          (*p)->Iterate(&v);
-#endif
+          ASSERT(instance_size == ConsString::kSize);
+          return GetScavengerForSize(ConsString::kSize, POINTER_OBJECT);
         }
-        tracer()->increment_promoted_objects_size(object_size);
-        return;
-      }
+
+      case kExternalStringTag:
+        ASSERT(instance_size == ExternalString::kSize);
+        return GetScavengerForSize(ExternalString::kSize, DATA_OBJECT);
     }
+    UNREACHABLE();
   }
-  // The object should remain in new space or the old space allocation failed.
-  Object* result = new_space_.AllocateRaw(object_size);
-  // Failed allocation at this point is utterly unexpected.
-  ASSERT(!result->IsFailure());
-  *p = MigrateObject(object, HeapObject::cast(result), object_size);
+
+  switch (instance_type) {
+    case BYTE_ARRAY_TYPE:
+      return reinterpret_cast<Scavenger>(&EvacuateByteArray);
+
+    case FIXED_ARRAY_TYPE:
+      return reinterpret_cast<Scavenger>(&EvacuateFixedArray);
+
+    case JS_OBJECT_TYPE:
+    case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
+    case JS_VALUE_TYPE:
+    case JS_ARRAY_TYPE:
+    case JS_REGEXP_TYPE:
+    case JS_FUNCTION_TYPE:
+    case JS_GLOBAL_PROXY_TYPE:
+    case JS_GLOBAL_OBJECT_TYPE:
+    case JS_BUILTINS_OBJECT_TYPE:
+      return GetScavengerForSize(instance_size, POINTER_OBJECT);
+
+    case ODDBALL_TYPE:
+      return NULL;
+
+    case PROXY_TYPE:
+      return GetScavengerForSize(Proxy::kSize, DATA_OBJECT);
+
+    case MAP_TYPE:
+      return NULL;
+
+    case CODE_TYPE:
+      return NULL;
+
+    case JS_GLOBAL_PROPERTY_CELL_TYPE:
+      return NULL;
+
+    case HEAP_NUMBER_TYPE:
+    case FILLER_TYPE:
+    case PIXEL_ARRAY_TYPE:
+    case EXTERNAL_BYTE_ARRAY_TYPE:
+    case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE:
+    case EXTERNAL_SHORT_ARRAY_TYPE:
+    case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE:
+    case EXTERNAL_INT_ARRAY_TYPE:
+    case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE:
+    case EXTERNAL_FLOAT_ARRAY_TYPE:
+      return GetScavengerForSize(instance_size, DATA_OBJECT);
+
+    case SHARED_FUNCTION_INFO_TYPE:
+      return GetScavengerForSize(SharedFunctionInfo::kAlignedSize,
+                                 POINTER_OBJECT);
+
+#define MAKE_STRUCT_CASE(NAME, Name, name) \
+        case NAME##_TYPE:
+      STRUCT_LIST(MAKE_STRUCT_CASE)
+#undef MAKE_STRUCT_CASE
+          return GetScavengerForSize(instance_size, POINTER_OBJECT);
+    default:
+      UNREACHABLE();
+      return NULL;
+  }
+}
+
+
+void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
+  ASSERT(InFromSpace(object));
+  MapWord first_word = object->map_word();
+  ASSERT(!first_word.IsForwardingAddress());
+  Map* map = first_word.ToMap();
+  map->Scavenge(p, object);
 }
 
 
@@ -1243,6 +1404,8 @@ Object* Heap::AllocatePartialMap(InstanceType instance_type,
   reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
   reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
   reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
+  reinterpret_cast<Map*>(result)->
+      set_scavenger(GetScavenger(instance_type, instance_size));
   reinterpret_cast<Map*>(result)->set_inobject_properties(0);
   reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0);
   reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
@@ -1259,6 +1422,7 @@ Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) {
   Map* map = reinterpret_cast<Map*>(result);
   map->set_map(meta_map());
   map->set_instance_type(instance_type);
+  map->set_scavenger(GetScavenger(instance_type, instance_size));
   map->set_prototype(null_value());
   map->set_constructor(null_value());
   map->set_instance_size(instance_size);
@@ -3691,9 +3855,9 @@ bool Heap::IteratePointersInDirtyMapsRegion(
 }
 
 
-void Heap::IterateAndMarkPointersToNewSpace(Address start,
-                                            Address end,
-                                            ObjectSlotCallback callback) {
+void Heap::IterateAndMarkPointersToFromSpace(Address start,
+                                             Address end,
+                                             ObjectSlotCallback callback) {
   Address slot_address = start;
   Page* page = Page::FromAddress(start);
 
@@ -3701,7 +3865,7 @@ void Heap::IterateAndMarkPointersToNewSpace(Address start,
 
   while (slot_address < end) {
     Object** slot = reinterpret_cast<Object**>(slot_address);
-    if (Heap::InNewSpace(*slot)) {
+    if (Heap::InFromSpace(*slot)) {
       ASSERT((*slot)->IsHeapObject());
       callback(reinterpret_cast<HeapObject**>(slot));
       if (Heap::InNewSpace(*slot)) {
index df3ba0e..1349e51 100644 (file)
@@ -774,11 +774,12 @@ class Heap : public AllStatic {
                                       DirtyRegionCallback visit_dirty_region,
                                       ObjectSlotCallback callback);
 
-  // Iterate pointers to new space found in memory interval from start to end.
+  // Iterate pointers to from semispace of new space found in memory interval
+  // from start to end.
   // Update dirty marks for page containing start address.
-  static void IterateAndMarkPointersToNewSpace(Address start,
-                                               Address end,
-                                               ObjectSlotCallback callback);
+  static void IterateAndMarkPointersToFromSpace(Address start,
+                                                Address end,
+                                                ObjectSlotCallback callback);
 
   // Iterate pointers to new space found in memory interval from start to end.
   // Return true if pointers to new space was found.
@@ -985,6 +986,8 @@ class Heap : public AllStatic {
 
   static void RecordStats(HeapStats* stats);
 
+  static Scavenger GetScavenger(int instance_type, int instance_size);
+
   // Copy block of memory from src to dst. Size of block should be aligned
   // by pointer size.
   static inline void CopyBlock(Address dst, Address src, int byte_size);
@@ -1232,17 +1235,7 @@ class Heap : public AllStatic {
     set_instanceof_cache_function(the_hole_value());
   }
 
-  // Helper function used by CopyObject to copy a source object to an
-  // allocated target object and update the forwarding pointer in the source
-  // object.  Returns the target object.
-  static inline HeapObject* MigrateObject(HeapObject* source,
-                                          HeapObject* target,
-                                          int size);
-
 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
-  // Record the copy of an object in the NewSpace's statistics.
-  static void RecordCopiedObject(HeapObject* obj);
-
   // Record statistics before and after garbage collection.
   static void ReportStatisticsBeforeGC();
   static void ReportStatisticsAfterGC();
index 0e45550..e764e39 100644 (file)
@@ -2060,6 +2060,23 @@ void ExternalFloatArray::set(int index, float value) {
   ptr[index] = value;
 }
 
+inline Scavenger Map::scavenger() {
+  Scavenger callback = reinterpret_cast<Scavenger>(
+      READ_INTPTR_FIELD(this, kIterateBodyCallbackOffset));
+
+  ASSERT(callback == Heap::GetScavenger(instance_type(),
+                                        instance_size()));
+
+  return callback;
+}
+
+inline void Map::set_scavenger(Scavenger callback) {
+  ASSERT(!reinterpret_cast<Object*>(
+      reinterpret_cast<intptr_t>(callback))->IsHeapObject());
+  WRITE_INTPTR_FIELD(this,
+                     kIterateBodyCallbackOffset,
+                     reinterpret_cast<intptr_t>(callback));
+}
 
 int Map::instance_size() {
   return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
index e79a550..10c549c 100644 (file)
@@ -2190,6 +2190,8 @@ Object* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
     int new_instance_size = map()->instance_size() - instance_size_delta;
     new_map->set_inobject_properties(0);
     new_map->set_instance_size(new_instance_size);
+    new_map->set_scavenger(Heap::GetScavenger(new_map->instance_type(),
+                                              new_map->instance_size()));
     Heap::CreateFillerObjectAt(this->address() + new_instance_size,
                                instance_size_delta);
   }
index 4a7dee6..8dfc75a 100644 (file)
@@ -2899,6 +2899,7 @@ class Code: public HeapObject {
   DISALLOW_IMPLICIT_CONSTRUCTORS(Code);
 };
 
+typedef void (*Scavenger)(Map* map, HeapObject** slot, HeapObject* object);
 
 // All heap objects have a Map that describes their structure.
 //  A Map contains information about:
@@ -3100,6 +3101,13 @@ class Map: public HeapObject {
   void MapVerify();
 #endif
 
+  inline Scavenger scavenger();
+  inline void set_scavenger(Scavenger callback);
+
+  inline void Scavenge(HeapObject** slot, HeapObject* obj) {
+    scavenger()(this, slot, obj);
+  }
+
   static const int kMaxPreAllocatedPropertyFields = 255;
 
   // Layout description.
@@ -3110,7 +3118,8 @@ class Map: public HeapObject {
   static const int kInstanceDescriptorsOffset =
       kConstructorOffset + kPointerSize;
   static const int kCodeCacheOffset = kInstanceDescriptorsOffset + kPointerSize;
-  static const int kPadStart = kCodeCacheOffset + kPointerSize;
+  static const int kIterateBodyCallbackOffset = kCodeCacheOffset + kPointerSize;
+  static const int kPadStart = kIterateBodyCallbackOffset + kPointerSize;
   static const int kSize = MAP_POINTER_ALIGN(kPadStart);
 
   // Layout of pointer fields. Heap iteration code relies on them
index a6a516a..e8aed54 100644 (file)
@@ -673,6 +673,14 @@ void Deserializer::ReadObject(int space_number,
     LOG(SnapshotPositionEvent(address, source_->position()));
   }
   ReadChunk(current, limit, space_number, address);
+
+  if (space == Heap::map_space()) {
+    ASSERT(size == Map::kSize);
+    HeapObject* obj = HeapObject::FromAddress(address);
+    Map* map = reinterpret_cast<Map*>(obj);
+    map->set_scavenger(Heap::GetScavenger(map->instance_type(),
+                                          map->instance_size()));
+  }
 }