Process weak references between optimized JSFunctions on scavenges.
authorvegorov@chromium.org <vegorov@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Tue, 17 Apr 2012 10:37:41 +0000 (10:37 +0000)
committervegorov@chromium.org <vegorov@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Tue, 17 Apr 2012 10:37:41 +0000 (10:37 +0000)
R=mstarzinger@chromium.org

Review URL: https://chromiumcodereview.appspot.com/10091027

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@11347 ce2b1a6d-e550-0410-aec6-3dcde31c8c00

src/heap.cc
src/incremental-marking.cc
src/mark-compact.cc
src/mark-compact.h
src/objects-visiting-inl.h
src/objects-visiting.h

index fcea7c9..9081017 100644 (file)
@@ -1124,6 +1124,27 @@ void PromotionQueue::RelocateQueueHead() {
 }
 
 
+class ScavengeWeakObjectRetainer : public WeakObjectRetainer {
+ public:
+  explicit ScavengeWeakObjectRetainer(Heap* heap) : heap_(heap) { }
+
+  virtual Object* RetainAs(Object* object) {
+    if (!heap_->InFromSpace(object)) {
+      return object;
+    }
+
+    MapWord map_word = HeapObject::cast(object)->map_word();
+    if (map_word.IsForwardingAddress()) {
+      return map_word.ToForwardingAddress();
+    }
+    return NULL;
+  }
+
+ private:
+  Heap* heap_;
+};
+
+
 void Heap::Scavenge() {
 #ifdef DEBUG
   if (FLAG_verify_heap) VerifyNonPointerSpacePointers();
@@ -1222,6 +1243,9 @@ void Heap::Scavenge() {
   }
   incremental_marking()->UpdateMarkingDequeAfterScavenge();
 
+  ScavengeWeakObjectRetainer weak_object_retainer(this);
+  ProcessWeakReferences(&weak_object_retainer);
+
   ASSERT(new_space_front == new_space_.top());
 
   // Set age mark.
@@ -1308,7 +1332,8 @@ void Heap::UpdateReferencesInExternalStringTable(
 
 static Object* ProcessFunctionWeakReferences(Heap* heap,
                                              Object* function,
-                                             WeakObjectRetainer* retainer) {
+                                             WeakObjectRetainer* retainer,
+                                             bool record_slots) {
   Object* undefined = heap->undefined_value();
   Object* head = undefined;
   JSFunction* tail = NULL;
@@ -1325,6 +1350,12 @@ static Object* ProcessFunctionWeakReferences(Heap* heap,
         // Subsequent elements in the list.
         ASSERT(tail != NULL);
         tail->set_next_function_link(retain);
+        if (record_slots) {
+          Object** next_function =
+              HeapObject::RawField(tail, JSFunction::kNextFunctionLinkOffset);
+          heap->mark_compact_collector()->RecordSlot(
+              next_function, next_function, retain);
+        }
       }
       // Retained function is new tail.
       candidate_function = reinterpret_cast<JSFunction*>(retain);
@@ -1353,6 +1384,15 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
   Object* head = undefined;
   Context* tail = NULL;
   Object* candidate = global_contexts_list_;
+
+  // We don't record weak slots during marking or scavenges.
+  // Instead we do it once when we complete mark-compact cycle.
+  // Note that write barrier has no effect if we are already in the middle of
+  // compacting mark-sweep cycle and we have to record slots manually.
+  bool record_slots =
+      gc_state() == MARK_COMPACT &&
+      mark_compact_collector()->is_compacting();
+
   while (candidate != undefined) {
     // Check whether to keep the candidate in the list.
     Context* candidate_context = reinterpret_cast<Context*>(candidate);
@@ -1368,6 +1408,14 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
                             Context::NEXT_CONTEXT_LINK,
                             retain,
                             UPDATE_WRITE_BARRIER);
+
+        if (record_slots) {
+          Object** next_context =
+              HeapObject::RawField(
+                  tail, FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK));
+          mark_compact_collector()->RecordSlot(
+              next_context, next_context, retain);
+        }
       }
       // Retained context is new tail.
       candidate_context = reinterpret_cast<Context*>(retain);
@@ -1380,11 +1428,19 @@ void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
           ProcessFunctionWeakReferences(
               this,
               candidate_context->get(Context::OPTIMIZED_FUNCTIONS_LIST),
-              retainer);
+              retainer,
+              record_slots);
       candidate_context->set_unchecked(this,
                                        Context::OPTIMIZED_FUNCTIONS_LIST,
                                        function_list_head,
                                        UPDATE_WRITE_BARRIER);
+      if (record_slots) {
+        Object** optimized_functions =
+            HeapObject::RawField(
+                tail, FixedArray::SizeFor(Context::OPTIMIZED_FUNCTIONS_LIST));
+        mark_compact_collector()->RecordSlot(
+            optimized_functions, optimized_functions, function_list_head);
+      }
     }
 
     // Move to next element in the list.
@@ -1678,7 +1734,12 @@ class ScavengingVisitor : public StaticVisitorBase {
         MigrateObject(heap, object, target, object_size);
 
         if (object_contents == POINTER_OBJECT) {
-          heap->promotion_queue()->insert(target, object_size);
+          if (map->instance_type() == JS_FUNCTION_TYPE) {
+            heap->promotion_queue()->insert(
+                target, JSFunction::kNonWeakFieldsEndOffset);
+          } else {
+            heap->promotion_queue()->insert(target, object_size);
+          }
         }
 
         heap->tracer()->increment_promoted_objects_size(object_size);
index 7bbd521..2413b67 100644 (file)
@@ -830,6 +830,19 @@ void IncrementalMarking::Step(intptr_t allocated_bytes,
         MarkObjectGreyDoNotEnqueue(ctx->normalized_map_cache());
 
         VisitGlobalContext(ctx, &marking_visitor);
+      } else if (map->instance_type() == JS_FUNCTION_TYPE) {
+        marking_visitor.VisitPointers(
+            HeapObject::RawField(obj, JSFunction::kPropertiesOffset),
+            HeapObject::RawField(obj, JSFunction::kCodeEntryOffset));
+
+        marking_visitor.VisitCodeEntry(
+            obj->address() + JSFunction::kCodeEntryOffset);
+
+        marking_visitor.VisitPointers(
+            HeapObject::RawField(obj,
+                                 JSFunction::kCodeEntryOffset + kPointerSize),
+            HeapObject::RawField(obj,
+                                 JSFunction::kNonWeakFieldsEndOffset));
       } else {
         obj->IterateBody(map->instance_type(), size, &marking_visitor);
       }
index 346b9a1..0f6a566 100644 (file)
@@ -1525,12 +1525,6 @@ class StaticMarkingVisitor : public StaticVisitorBase {
                              JSFunction::kCodeEntryOffset + kPointerSize),
         HeapObject::RawField(object,
                              JSFunction::kNonWeakFieldsEndOffset));
-
-    // Don't visit the next function list field as it is a weak reference.
-    Object** next_function =
-        HeapObject::RawField(object, JSFunction::kNextFunctionLinkOffset);
-    heap->mark_compact_collector()->RecordSlot(
-        next_function, next_function, *next_function);
   }
 
   static inline void VisitJSRegExpFields(Map* map,
index 66ffd19..f8488bb 100644 (file)
@@ -544,6 +544,8 @@ class MarkCompactCollector {
 
   void ClearMarkbits();
 
+  bool is_compacting() const { return compacting_; }
+
  private:
   MarkCompactCollector();
   ~MarkCompactCollector();
index 627d1bc..8ba92f7 100644 (file)
@@ -72,9 +72,7 @@ void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
 
   table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
 
-  table_.Register(kVisitJSFunction,
-                  &JSObjectVisitor::
-                      template VisitSpecialized<JSFunction::kSize>);
+  table_.Register(kVisitJSFunction, &VisitJSFunction);
 
   table_.Register(kVisitFreeSpace, &VisitFreeSpace);
 
index 26e79ae..b476dfe 100644 (file)
@@ -289,6 +289,23 @@ class StaticNewSpaceVisitor : public StaticVisitorBase {
   }
 
  private:
+  static inline int VisitJSFunction(Map* map, HeapObject* object) {
+    Heap* heap = map->GetHeap();
+    VisitPointers(heap,
+                  HeapObject::RawField(object, JSFunction::kPropertiesOffset),
+                  HeapObject::RawField(object, JSFunction::kCodeEntryOffset));
+
+    // Don't visit code entry. We are using this visitor only during scavenges.
+
+    VisitPointers(
+        heap,
+        HeapObject::RawField(object,
+                             JSFunction::kCodeEntryOffset + kPointerSize),
+        HeapObject::RawField(object,
+                             JSFunction::kNonWeakFieldsEndOffset));
+    return JSFunction::kSize;
+  }
+
   static inline int VisitByteArray(Map* map, HeapObject* object) {
     return reinterpret_cast<ByteArray*>(object)->ByteArraySize();
   }