Revert revisions 5041 and 5042 introducing virtual scavenge
authorager@chromium.org <ager@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Mon, 12 Jul 2010 16:57:07 +0000 (16:57 +0000)
committerager@chromium.org <ager@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Mon, 12 Jul 2010 16:57:07 +0000 (16:57 +0000)
behavior. It breaks debug builds with snapshots on my machine.

TBR=vegorov@chromium.org
Review URL: http://codereview.chromium.org/2983001

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@5046 ce2b1a6d-e550-0410-aec6-3dcde31c8c00

src/bootstrapper.cc
src/factory.cc
src/heap.cc
src/heap.h
src/objects-inl.h
src/objects.cc
src/objects.h
src/serialize.cc

index e1d4489d442526a54cec36de8eb9281075cce978..bbd69ecaba8a75956bc2cdd8d9821fbcab0edd3f 100644 (file)
@@ -812,9 +812,6 @@ void Genesis::InitializeGlobal(Handle<GlobalObject> inner_global,
     initial_map->set_instance_size(
         initial_map->instance_size() + 5 * kPointerSize);
     initial_map->set_instance_descriptors(*descriptors);
-    initial_map->set_scavenger(
-        Heap::GetScavenger(initial_map->instance_type(),
-                           initial_map->instance_size()));
   }
 
   {  // -- J S O N
index 5b387413c38b0a9b95765a383cb2cdb809fa19c3..39e881ac3d567b5fbdc3d354325cc62b4ea994c5 100644 (file)
@@ -277,8 +277,6 @@ Handle<Map> Factory::CopyMap(Handle<Map> src,
   copy->set_inobject_properties(inobject_properties);
   copy->set_unused_property_fields(inobject_properties);
   copy->set_instance_size(copy->instance_size() + instance_size_delta);
-  copy->set_scavenger(Heap::GetScavenger(copy->instance_type(),
-                                         copy->instance_size()));
   return copy;
 }
 
index a27eff1786e3a2cf09540436afb4601058a7502a..1b625897d11dd732c4556fd8e3c6c7116702518c 100644 (file)
@@ -799,34 +799,34 @@ class ScavengeVisitor: public ObjectVisitor {
 };
 
 
-// A queue of objects promoted during scavenge. Each object is accompanied
-// by it's size to avoid dereferencing a map pointer for scanning.
+// A queue of pointers and maps of to-be-promoted objects during a
+// scavenge collection.
 class PromotionQueue {
  public:
   void Initialize(Address start_address) {
-    front_ = rear_ = reinterpret_cast<intptr_t*>(start_address);
+    front_ = rear_ = reinterpret_cast<HeapObject**>(start_address);
   }
 
   bool is_empty() { return front_ <= rear_; }
 
-  void insert(HeapObject* target, int size) {
-    *(--rear_) = reinterpret_cast<intptr_t>(target);
-    *(--rear_) = size;
+  void insert(HeapObject* object, Map* map) {
+    *(--rear_) = object;
+    *(--rear_) = map;
     // Assert no overflow into live objects.
     ASSERT(reinterpret_cast<Address>(rear_) >= Heap::new_space()->top());
   }
 
-  void remove(HeapObject** target, int* size) {
-    *target = reinterpret_cast<HeapObject*>(*(--front_));
-    *size = static_cast<int>(*(--front_));
+  void remove(HeapObject** object, Map** map) {
+    *object = *(--front_);
+    *map = Map::cast(*(--front_));
     // Assert no underflow.
     ASSERT(front_ >= rear_);
   }
 
  private:
   // The front of the queue is higher in memory than the rear.
-  intptr_t* front_;
-  intptr_t* rear_;
+  HeapObject** front_;
+  HeapObject** rear_;
 };
 
 
@@ -1041,26 +1041,31 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
     // queue is empty.
     while (new_space_front < new_space_.top()) {
       HeapObject* object = HeapObject::FromAddress(new_space_front);
-      Map* map = object->map();
-      int size = object->SizeFromMap(map);
-      object->IterateBody(map->instance_type(), size, scavenge_visitor);
-      new_space_front += size;
+      object->Iterate(scavenge_visitor);
+      new_space_front += object->Size();
     }
 
     // Promote and process all the to-be-promoted objects.
     while (!promotion_queue.is_empty()) {
-      HeapObject* target;
-      int size;
-      promotion_queue.remove(&target, &size);
-
-      // Promoted object might be already partially visited
-      // during dirty regions iteration. Thus we search specificly
-      // for pointers to from semispace instead of looking for pointers
-      // to new space.
+      HeapObject* source;
+      Map* map;
+      promotion_queue.remove(&source, &map);
+      // Copy the from-space object to its new location (given by the
+      // forwarding address) and fix its map.
+      HeapObject* target = source->map_word().ToForwardingAddress();
+      int size = source->SizeFromMap(map);
+      CopyBlock(target->address(), source->address(), size);
+      target->set_map(map);
+
+#if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
+      // Update NewSpace stats if necessary.
+      RecordCopiedObject(target);
+#endif
+      // Visit the newly copied object for pointers to new space.
       ASSERT(!target->IsMap());
-      IterateAndMarkPointersToFromSpace(target->address(),
-                                        target->address() + size,
-                                        &ScavengePointer);
+      IterateAndMarkPointersToNewSpace(target->address(),
+                                       target->address() + size,
+                                       &ScavengePointer);
     }
 
     // Take another spin if there are now unswept objects in new space
@@ -1072,7 +1077,7 @@ Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor,
 
 
 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
-static void RecordCopiedObject(HeapObject* obj) {
+void Heap::RecordCopiedObject(HeapObject* obj) {
   bool should_record = false;
 #ifdef DEBUG
   should_record = FLAG_heap_stats;
@@ -1081,24 +1086,22 @@ static void RecordCopiedObject(HeapObject* obj) {
   should_record = should_record || FLAG_log_gc;
 #endif
   if (should_record) {
-    if (Heap::new_space()->Contains(obj)) {
-      Heap::new_space()->RecordAllocation(obj);
+    if (new_space_.Contains(obj)) {
+      new_space_.RecordAllocation(obj);
     } else {
-      Heap::new_space()->RecordPromotion(obj);
+      new_space_.RecordPromotion(obj);
     }
   }
 }
 #endif  // defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
 
 
-// Helper function used by CopyObject to copy a source object to an
-// allocated target object and update the forwarding pointer in the source
-// object.  Returns the target object.
-inline static HeapObject* MigrateObject(HeapObject* source,
-                                        HeapObject* target,
-                                        int size) {
+
+HeapObject* Heap::MigrateObject(HeapObject* source,
+                                HeapObject* target,
+                                int size) {
   // Copy the content of source to target.
-  Heap::CopyBlock(target->address(), source->address(), size);
+  CopyBlock(target->address(), source->address(), size);
 
   // Set the forwarding address.
   source->set_map_word(MapWord::FromForwardingAddress(target));
@@ -1112,281 +1115,117 @@ inline static HeapObject* MigrateObject(HeapObject* source,
 }
 
 
-enum ObjectContents  { DATA_OBJECT, POINTER_OBJECT };
-enum SizeRestriction { SMALL, UNKNOWN_SIZE };
-
-
-template<ObjectContents object_contents, SizeRestriction size_restriction>
-static inline void EvacuateObject(Map* map,
-                                  HeapObject** slot,
-                                  HeapObject* object,
-                                  int object_size) {
-  ASSERT((size_restriction != SMALL) ||
-         (object_size <= Page::kMaxHeapObjectSize));
-  ASSERT(object->Size() == object_size);
-
-  if (Heap::ShouldBePromoted(object->address(), object_size)) {
-    Object* result;
-
-    if ((size_restriction != SMALL) &&
-        (object_size > Page::kMaxHeapObjectSize)) {
-      result = Heap::lo_space()->AllocateRawFixedArray(object_size);
-    } else {
-      if (object_contents == DATA_OBJECT) {
-        result = Heap::old_data_space()->AllocateRaw(object_size);
-      } else {
-        result = Heap::old_pointer_space()->AllocateRaw(object_size);
-      }
-    }
-
-    if (!result->IsFailure()) {
-      HeapObject* target = HeapObject::cast(result);
-      *slot = MigrateObject(object, target, object_size);
-
-      if (object_contents == POINTER_OBJECT) {
-        promotion_queue.insert(target, object_size);
-      }
-
-      Heap::tracer()->increment_promoted_objects_size(object_size);
-      return;
-    }
-  }
-  Object* result = Heap::new_space()->AllocateRaw(object_size);
-  ASSERT(!result->IsFailure());
-  *slot = MigrateObject(object, HeapObject::cast(result), object_size);
-  return;
-}
-
-
-template<int object_size_in_words, ObjectContents object_contents>
-static inline void EvacuateObjectOfFixedSize(Map* map,
-                                             HeapObject** slot,
-                                             HeapObject* object) {
-  const int object_size = object_size_in_words << kPointerSizeLog2;
-  EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
-}
-
-
-template<ObjectContents object_contents>
-static inline void EvacuateObjectOfFixedSize(Map* map,
-                                             HeapObject** slot,
-                                             HeapObject* object) {
-  int object_size = map->instance_size();
-  EvacuateObject<object_contents, SMALL>(map, slot, object, object_size);
-}
-
-
-static inline void EvacuateFixedArray(Map* map,
-                                      HeapObject** slot,
-                                      HeapObject* object) {
-  int object_size = FixedArray::cast(object)->FixedArraySize();
-  EvacuateObject<POINTER_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
-}
-
-
-static inline void EvacuateByteArray(Map* map,
-                                     HeapObject** slot,
-                                     HeapObject* object) {
-  int object_size = ByteArray::cast(object)->ByteArraySize();
-  EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
-}
-
-
-static Scavenger GetScavengerForSize(int object_size,
-                                     ObjectContents object_contents) {
-  ASSERT(IsAligned(object_size, kPointerSize));
-  ASSERT(object_size < Page::kMaxHeapObjectSize);
-
-  switch (object_size >> kPointerSizeLog2) {
-#define CASE(n)                                           \
-    case n:                                               \
-      if (object_contents == DATA_OBJECT) {               \
-        return static_cast<Scavenger>(                    \
-          &EvacuateObjectOfFixedSize<n, DATA_OBJECT>);    \
-      } else {                                            \
-        return static_cast<Scavenger>(                    \
-          &EvacuateObjectOfFixedSize<n, POINTER_OBJECT>); \
-      }
-
-    CASE(1);
-    CASE(2);
-    CASE(3);
-    CASE(4);
-    CASE(5);
-    CASE(6);
-    CASE(7);
-    CASE(8);
-    CASE(9);
-    CASE(10);
-    CASE(11);
-    CASE(12);
-    CASE(13);
-    CASE(14);
-    CASE(15);
-    CASE(16);
-    default:
-      if (object_contents == DATA_OBJECT) {
-        return static_cast<Scavenger>(&EvacuateObjectOfFixedSize<DATA_OBJECT>);
-      } else {
-        return static_cast<Scavenger>(
-            &EvacuateObjectOfFixedSize<POINTER_OBJECT>);
-      }
-
-#undef CASE
-  }
-}
-
-
-static inline void EvacuateSeqAsciiString(Map* map,
-                                          HeapObject** slot,
-                                          HeapObject* object) {
-  int object_size = SeqAsciiString::cast(object)->
-      SeqAsciiStringSize(map->instance_type());
-  EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
-}
-
-
-static inline void EvacuateSeqTwoByteString(Map* map,
-                                            HeapObject** slot,
-                                            HeapObject* object) {
-  int object_size = SeqTwoByteString::cast(object)->
-      SeqTwoByteStringSize(map->instance_type());
-  EvacuateObject<DATA_OBJECT, UNKNOWN_SIZE>(map, slot, object, object_size);
+static inline bool IsShortcutCandidate(HeapObject* object, Map* map) {
+  STATIC_ASSERT(kNotStringTag != 0 && kSymbolTag != 0);
+  ASSERT(object->map() == map);
+  InstanceType type = map->instance_type();
+  if ((type & kShortcutTypeMask) != kShortcutTypeTag) return false;
+  ASSERT(object->IsString() && !object->IsSymbol());
+  return ConsString::cast(object)->unchecked_second() == Heap::empty_string();
 }
 
 
-static inline bool IsShortcutCandidate(int type) {
-  return ((type & kShortcutTypeMask) == kShortcutTypeTag);
-}
-
-
-static inline void EvacuateShortcutCandidate(Map* map,
-                                             HeapObject** slot,
-                                             HeapObject* object) {
-  ASSERT(IsShortcutCandidate(map->instance_type()));
-
-  if (ConsString::cast(object)->unchecked_second() == Heap::empty_string()) {
-    HeapObject* first =
-        HeapObject::cast(ConsString::cast(object)->unchecked_first());
-
-    *slot = first;
-
-    if (!Heap::InNewSpace(first)) {
-      object->set_map_word(MapWord::FromForwardingAddress(first));
-      return;
-    }
+void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
+  ASSERT(InFromSpace(object));
+  MapWord first_word = object->map_word();
+  ASSERT(!first_word.IsForwardingAddress());
 
-    MapWord first_word = first->map_word();
+  // Optimization: Bypass flattened ConsString objects.
+  if (IsShortcutCandidate(object, first_word.ToMap())) {
+    object = HeapObject::cast(ConsString::cast(object)->unchecked_first());
+    *p = object;
+    // After patching *p we have to repeat the checks that object is in the
+    // active semispace of the young generation and not already copied.
+    if (!InNewSpace(object)) return;
+    first_word = object->map_word();
     if (first_word.IsForwardingAddress()) {
-      HeapObject* target = first_word.ToForwardingAddress();
-
-      *slot = target;
-      object->set_map_word(MapWord::FromForwardingAddress(target));
+      *p = first_word.ToForwardingAddress();
       return;
     }
-
-    first->map()->Scavenge(slot, first);
-    object->set_map_word(MapWord::FromForwardingAddress(*slot));
-    return;
   }
 
-  int object_size = ConsString::kSize;
-  EvacuateObject<POINTER_OBJECT, SMALL>(map, slot, object, object_size);
-}
-
+  int object_size = object->SizeFromMap(first_word.ToMap());
+  // We rely on live objects in new space to be at least two pointers,
+  // so we can store the from-space address and map pointer of promoted
+  // objects in the to space.
+  ASSERT(object_size >= 2 * kPointerSize);
 
-Scavenger Heap::GetScavenger(int instance_type, int instance_size) {
-  if (instance_type < FIRST_NONSTRING_TYPE) {
-    switch (instance_type & kStringRepresentationMask) {
-      case kSeqStringTag:
-        if ((instance_type & kStringEncodingMask) == kAsciiStringTag) {
-          return &EvacuateSeqAsciiString;
+  // If the object should be promoted, we try to copy it to old space.
+  if (ShouldBePromoted(object->address(), object_size)) {
+    Object* result;
+    if (object_size > MaxObjectSizeInPagedSpace()) {
+      result = lo_space_->AllocateRawFixedArray(object_size);
+      if (!result->IsFailure()) {
+        HeapObject* target = HeapObject::cast(result);
+
+        if (object->IsFixedArray()) {
+          // Save the from-space object pointer and its map pointer at the
+          // top of the to space to be swept and copied later.  Write the
+          // forwarding address over the map word of the from-space
+          // object.
+          promotion_queue.insert(object, first_word.ToMap());
+          object->set_map_word(MapWord::FromForwardingAddress(target));
+
+          // Give the space allocated for the result a proper map by
+          // treating it as a free list node (not linked into the free
+          // list).
+          FreeListNode* node = FreeListNode::FromAddress(target->address());
+          node->set_size(object_size);
+
+          *p = target;
         } else {
-          return &EvacuateSeqTwoByteString;
+          // In large object space only fixed arrays might possibly contain
+          // intergenerational references.
+          // All other objects can be copied immediately and not revisited.
+          *p = MigrateObject(object, target, object_size);
         }
 
-      case kConsStringTag:
-        if (IsShortcutCandidate(instance_type)) {
-          return &EvacuateShortcutCandidate;
+        tracer()->increment_promoted_objects_size(object_size);
+        return;
+      }
+    } else {
+      OldSpace* target_space = Heap::TargetSpace(object);
+      ASSERT(target_space == Heap::old_pointer_space_ ||
+             target_space == Heap::old_data_space_);
+      result = target_space->AllocateRaw(object_size);
+      if (!result->IsFailure()) {
+        HeapObject* target = HeapObject::cast(result);
+        if (target_space == Heap::old_pointer_space_) {
+          // Save the from-space object pointer and its map pointer at the
+          // top of the to space to be swept and copied later.  Write the
+          // forwarding address over the map word of the from-space
+          // object.
+          promotion_queue.insert(object, first_word.ToMap());
+          object->set_map_word(MapWord::FromForwardingAddress(target));
+
+          // Give the space allocated for the result a proper map by
+          // treating it as a free list node (not linked into the free
+          // list).
+          FreeListNode* node = FreeListNode::FromAddress(target->address());
+          node->set_size(object_size);
+
+          *p = target;
         } else {
-          ASSERT(instance_size == ConsString::kSize);
-          return GetScavengerForSize(ConsString::kSize, POINTER_OBJECT);
+          // Objects promoted to the data space can be copied immediately
+          // and not revisited---we will never sweep that space for
+          // pointers and the copied objects do not contain pointers to
+          // new space objects.
+          *p = MigrateObject(object, target, object_size);
+#ifdef DEBUG
+          VerifyNonPointerSpacePointersVisitor v;
+          (*p)->Iterate(&v);
+#endif
         }
-
-      case kExternalStringTag:
-        ASSERT(instance_size == ExternalString::kSize);
-        return GetScavengerForSize(ExternalString::kSize, DATA_OBJECT);
+        tracer()->increment_promoted_objects_size(object_size);
+        return;
+      }
     }
-    UNREACHABLE();
   }
-
-  switch (instance_type) {
-    case BYTE_ARRAY_TYPE:
-      return reinterpret_cast<Scavenger>(&EvacuateByteArray);
-
-    case FIXED_ARRAY_TYPE:
-      return reinterpret_cast<Scavenger>(&EvacuateFixedArray);
-
-    case JS_OBJECT_TYPE:
-    case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
-    case JS_VALUE_TYPE:
-    case JS_ARRAY_TYPE:
-    case JS_REGEXP_TYPE:
-    case JS_FUNCTION_TYPE:
-    case JS_GLOBAL_PROXY_TYPE:
-    case JS_GLOBAL_OBJECT_TYPE:
-    case JS_BUILTINS_OBJECT_TYPE:
-      return GetScavengerForSize(instance_size, POINTER_OBJECT);
-
-    case ODDBALL_TYPE:
-      return NULL;
-
-    case PROXY_TYPE:
-      return GetScavengerForSize(Proxy::kSize, DATA_OBJECT);
-
-    case MAP_TYPE:
-      return NULL;
-
-    case CODE_TYPE:
-      return NULL;
-
-    case JS_GLOBAL_PROPERTY_CELL_TYPE:
-      return NULL;
-
-    case HEAP_NUMBER_TYPE:
-    case FILLER_TYPE:
-    case PIXEL_ARRAY_TYPE:
-    case EXTERNAL_BYTE_ARRAY_TYPE:
-    case EXTERNAL_UNSIGNED_BYTE_ARRAY_TYPE:
-    case EXTERNAL_SHORT_ARRAY_TYPE:
-    case EXTERNAL_UNSIGNED_SHORT_ARRAY_TYPE:
-    case EXTERNAL_INT_ARRAY_TYPE:
-    case EXTERNAL_UNSIGNED_INT_ARRAY_TYPE:
-    case EXTERNAL_FLOAT_ARRAY_TYPE:
-      return GetScavengerForSize(instance_size, DATA_OBJECT);
-
-    case SHARED_FUNCTION_INFO_TYPE:
-      return GetScavengerForSize(SharedFunctionInfo::kAlignedSize,
-                                 POINTER_OBJECT);
-
-#define MAKE_STRUCT_CASE(NAME, Name, name) \
-        case NAME##_TYPE:
-      STRUCT_LIST(MAKE_STRUCT_CASE)
-#undef MAKE_STRUCT_CASE
-          return GetScavengerForSize(instance_size, POINTER_OBJECT);
-    default:
-      UNREACHABLE();
-      return NULL;
-  }
-}
-
-
-void Heap::ScavengeObjectSlow(HeapObject** p, HeapObject* object) {
-  ASSERT(InFromSpace(object));
-  MapWord first_word = object->map_word();
-  ASSERT(!first_word.IsForwardingAddress());
-  Map* map = first_word.ToMap();
-  map->Scavenge(p, object);
+  // The object should remain in new space or the old space allocation failed.
+  Object* result = new_space_.AllocateRaw(object_size);
+  // Failed allocation at this point is utterly unexpected.
+  ASSERT(!result->IsFailure());
+  *p = MigrateObject(object, HeapObject::cast(result), object_size);
 }
 
 
@@ -1404,8 +1243,6 @@ Object* Heap::AllocatePartialMap(InstanceType instance_type,
   reinterpret_cast<Map*>(result)->set_map(raw_unchecked_meta_map());
   reinterpret_cast<Map*>(result)->set_instance_type(instance_type);
   reinterpret_cast<Map*>(result)->set_instance_size(instance_size);
-  reinterpret_cast<Map*>(result)->
-      set_scavenger(GetScavenger(instance_type, instance_size));
   reinterpret_cast<Map*>(result)->set_inobject_properties(0);
   reinterpret_cast<Map*>(result)->set_pre_allocated_property_fields(0);
   reinterpret_cast<Map*>(result)->set_unused_property_fields(0);
@@ -1422,7 +1259,6 @@ Object* Heap::AllocateMap(InstanceType instance_type, int instance_size) {
   Map* map = reinterpret_cast<Map*>(result);
   map->set_map(meta_map());
   map->set_instance_type(instance_type);
-  map->set_scavenger(GetScavenger(instance_type, instance_size));
   map->set_prototype(null_value());
   map->set_constructor(null_value());
   map->set_instance_size(instance_size);
@@ -3855,9 +3691,9 @@ bool Heap::IteratePointersInDirtyMapsRegion(
 }
 
 
-void Heap::IterateAndMarkPointersToFromSpace(Address start,
-                                             Address end,
-                                             ObjectSlotCallback callback) {
+void Heap::IterateAndMarkPointersToNewSpace(Address start,
+                                            Address end,
+                                            ObjectSlotCallback callback) {
   Address slot_address = start;
   Page* page = Page::FromAddress(start);
 
@@ -3865,7 +3701,7 @@ void Heap::IterateAndMarkPointersToFromSpace(Address start,
 
   while (slot_address < end) {
     Object** slot = reinterpret_cast<Object**>(slot_address);
-    if (Heap::InFromSpace(*slot)) {
+    if (Heap::InNewSpace(*slot)) {
       ASSERT((*slot)->IsHeapObject());
       callback(reinterpret_cast<HeapObject**>(slot));
       if (Heap::InNewSpace(*slot)) {
index 1349e51199858e0b1193c2150b4f2b701e7a35a6..df3ba0ea2a24b9c3b1f85d532c6d31fcf80a2b85 100644 (file)
@@ -774,12 +774,11 @@ class Heap : public AllStatic {
                                       DirtyRegionCallback visit_dirty_region,
                                       ObjectSlotCallback callback);
 
-  // Iterate pointers to from semispace of new space found in memory interval
-  // from start to end.
+  // Iterate pointers to new space found in memory interval from start to end.
   // Update dirty marks for page containing start address.
-  static void IterateAndMarkPointersToFromSpace(Address start,
-                                                Address end,
-                                                ObjectSlotCallback callback);
+  static void IterateAndMarkPointersToNewSpace(Address start,
+                                               Address end,
+                                               ObjectSlotCallback callback);
 
   // Iterate pointers to new space found in memory interval from start to end.
   // Return true if pointers to new space was found.
@@ -986,8 +985,6 @@ class Heap : public AllStatic {
 
   static void RecordStats(HeapStats* stats);
 
-  static Scavenger GetScavenger(int instance_type, int instance_size);
-
   // Copy block of memory from src to dst. Size of block should be aligned
   // by pointer size.
   static inline void CopyBlock(Address dst, Address src, int byte_size);
@@ -1235,7 +1232,17 @@ class Heap : public AllStatic {
     set_instanceof_cache_function(the_hole_value());
   }
 
+  // Helper function used by CopyObject to copy a source object to an
+  // allocated target object and update the forwarding pointer in the source
+  // object.  Returns the target object.
+  static inline HeapObject* MigrateObject(HeapObject* source,
+                                          HeapObject* target,
+                                          int size);
+
 #if defined(DEBUG) || defined(ENABLE_LOGGING_AND_PROFILING)
+  // Record the copy of an object in the NewSpace's statistics.
+  static void RecordCopiedObject(HeapObject* obj);
+
   // Record statistics before and after garbage collection.
   static void ReportStatisticsBeforeGC();
   static void ReportStatisticsAfterGC();
index e764e39fcc5a83c2ed2a57629285b15ec3f1263c..0e455508456d7b23465fa893194b51993e316c78 100644 (file)
@@ -2060,23 +2060,6 @@ void ExternalFloatArray::set(int index, float value) {
   ptr[index] = value;
 }
 
-inline Scavenger Map::scavenger() {
-  Scavenger callback = reinterpret_cast<Scavenger>(
-      READ_INTPTR_FIELD(this, kIterateBodyCallbackOffset));
-
-  ASSERT(callback == Heap::GetScavenger(instance_type(),
-                                        instance_size()));
-
-  return callback;
-}
-
-inline void Map::set_scavenger(Scavenger callback) {
-  ASSERT(!reinterpret_cast<Object*>(
-      reinterpret_cast<intptr_t>(callback))->IsHeapObject());
-  WRITE_INTPTR_FIELD(this,
-                     kIterateBodyCallbackOffset,
-                     reinterpret_cast<intptr_t>(callback));
-}
 
 int Map::instance_size() {
   return READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
index 10c549c6b635ece1c9e081bf1ff6e1f49ff5395f..e79a5505c90db513ae3b18fbd25aa1931e9ff3a6 100644 (file)
@@ -2190,8 +2190,6 @@ Object* JSObject::NormalizeProperties(PropertyNormalizationMode mode,
     int new_instance_size = map()->instance_size() - instance_size_delta;
     new_map->set_inobject_properties(0);
     new_map->set_instance_size(new_instance_size);
-    new_map->set_scavenger(Heap::GetScavenger(new_map->instance_type(),
-                                              new_map->instance_size()));
     Heap::CreateFillerObjectAt(this->address() + new_instance_size,
                                instance_size_delta);
   }
index 8dfc75aa75be3c6d9a81b8d23fa299444dc48949..4a7dee6a836f9a9614d4b4032097db1274195a82 100644 (file)
@@ -2899,7 +2899,6 @@ class Code: public HeapObject {
   DISALLOW_IMPLICIT_CONSTRUCTORS(Code);
 };
 
-typedef void (*Scavenger)(Map* map, HeapObject** slot, HeapObject* object);
 
 // All heap objects have a Map that describes their structure.
 //  A Map contains information about:
@@ -3101,13 +3100,6 @@ class Map: public HeapObject {
   void MapVerify();
 #endif
 
-  inline Scavenger scavenger();
-  inline void set_scavenger(Scavenger callback);
-
-  inline void Scavenge(HeapObject** slot, HeapObject* obj) {
-    scavenger()(this, slot, obj);
-  }
-
   static const int kMaxPreAllocatedPropertyFields = 255;
 
   // Layout description.
@@ -3118,8 +3110,7 @@ class Map: public HeapObject {
   static const int kInstanceDescriptorsOffset =
       kConstructorOffset + kPointerSize;
   static const int kCodeCacheOffset = kInstanceDescriptorsOffset + kPointerSize;
-  static const int kIterateBodyCallbackOffset = kCodeCacheOffset + kPointerSize;
-  static const int kPadStart = kIterateBodyCallbackOffset + kPointerSize;
+  static const int kPadStart = kCodeCacheOffset + kPointerSize;
   static const int kSize = MAP_POINTER_ALIGN(kPadStart);
 
   // Layout of pointer fields. Heap iteration code relies on them
index e8aed5496f14dd0061760b3c20c60e2e32ebe19c..a6a516a76d5ceb5896c47eb8bb4e76e282fd5659 100644 (file)
@@ -673,14 +673,6 @@ void Deserializer::ReadObject(int space_number,
     LOG(SnapshotPositionEvent(address, source_->position()));
   }
   ReadChunk(current, limit, space_number, address);
-
-  if (space == Heap::map_space()) {
-    ASSERT(size == Map::kSize);
-    HeapObject* obj = HeapObject::FromAddress(address);
-    Map* map = reinterpret_cast<Map*>(obj);
-    map->set_scavenger(Heap::GetScavenger(map->instance_type(),
-                                          map->instance_size()));
-  }
 }