Simplified slot buffer logic during weak list visiting.
authorsvenpanne@chromium.org <svenpanne@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Tue, 13 May 2014 06:22:49 +0000 (06:22 +0000)
committersvenpanne@chromium.org <svenpanne@chromium.org@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Tue, 13 May 2014 06:22:49 +0000 (06:22 +0000)
Tiny reformatting cleanup on the way.

R=mstarzinger@chromium.org

Review URL: https://codereview.chromium.org/282493004

git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@21278 ce2b1a6d-e550-0410-aec6-3dcde31c8c00

src/heap.cc
src/heap.h
src/mark-compact.cc
src/objects-visiting.cc
src/objects-visiting.h

index ddec9a9..f0c9154 100644 (file)
@@ -1681,36 +1681,24 @@ void Heap::UpdateReferencesInExternalStringTable(
 
 
 void Heap::ProcessWeakReferences(WeakObjectRetainer* retainer) {
-  // We don't record weak slots during marking or scavenges.
-  // Instead we do it once when we complete mark-compact cycle.
-  // Note that write barrier has no effect if we are already in the middle of
-  // compacting mark-sweep cycle and we have to record slots manually.
-  bool record_slots =
-      gc_state() == MARK_COMPACT &&
-      mark_compact_collector()->is_compacting();
-  ProcessArrayBuffers(retainer, record_slots);
-  ProcessNativeContexts(retainer, record_slots);
+  ProcessArrayBuffers(retainer);
+  ProcessNativeContexts(retainer);
   // TODO(mvstanton): AllocationSites only need to be processed during
   // MARK_COMPACT, as they live in old space. Verify and address.
-  ProcessAllocationSites(retainer, record_slots);
+  ProcessAllocationSites(retainer);
 }
 
-void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer,
-                                 bool record_slots) {
-  Object* head =
-      VisitWeakList<Context>(
-          this, native_contexts_list(), retainer, record_slots);
+
+void Heap::ProcessNativeContexts(WeakObjectRetainer* retainer) {
+  Object* head = VisitWeakList<Context>(this, native_contexts_list(), retainer);
   // Update the head of the list of contexts.
   set_native_contexts_list(head);
 }
 
 
-void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer,
-                               bool record_slots) {
+void Heap::ProcessArrayBuffers(WeakObjectRetainer* retainer) {
   Object* array_buffer_obj =
-      VisitWeakList<JSArrayBuffer>(this,
-                                   array_buffers_list(),
-                                   retainer, record_slots);
+      VisitWeakList<JSArrayBuffer>(this, array_buffers_list(), retainer);
   set_array_buffers_list(array_buffer_obj);
 }
 
@@ -1726,12 +1714,9 @@ void Heap::TearDownArrayBuffers() {
 }
 
 
-void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer,
-                                  bool record_slots) {
+void Heap::ProcessAllocationSites(WeakObjectRetainer* retainer) {
   Object* allocation_site_obj =
-      VisitWeakList<AllocationSite>(this,
-                                    allocation_sites_list(),
-                                    retainer, record_slots);
+      VisitWeakList<AllocationSite>(this, allocation_sites_list(), retainer);
   set_allocation_sites_list(allocation_site_obj);
 }
 
index 0ea4529..8e3cd3f 100644 (file)
@@ -1974,9 +1974,9 @@ class Heap {
   // Code to be run before and after mark-compact.
   void MarkCompactPrologue();
 
-  void ProcessNativeContexts(WeakObjectRetainer* retainer, bool record_slots);
-  void ProcessArrayBuffers(WeakObjectRetainer* retainer, bool record_slots);
-  void ProcessAllocationSites(WeakObjectRetainer* retainer, bool record_slots);
+  void ProcessNativeContexts(WeakObjectRetainer* retainer);
+  void ProcessArrayBuffers(WeakObjectRetainer* retainer);
+  void ProcessAllocationSites(WeakObjectRetainer* retainer);
 
   // Deopts all code that contains allocation instruction which are tenured or
   // not tenured. Moreover it clears the pretenuring allocation site statistics.
index 3801396..ff6d2e3 100644 (file)
@@ -2757,7 +2757,7 @@ int MarkCompactCollector::ClearNonLiveDependentCodeInGroup(
       ASSERT(start + 1 == end);
       Object* old_head = entries->object_at(start);
       MarkCompactWeakObjectRetainer retainer;
-      Object* head = VisitWeakList<Code>(heap(), old_head, &retainer, true);
+      Object* head = VisitWeakList<Code>(heap(), old_head, &retainer);
       entries->set_object_at(new_start, head);
       Object** slot = entries->slot_at(new_start);
       RecordSlot(slot, slot, head);
index 24cff34..aea8a09 100644 (file)
@@ -191,6 +191,16 @@ StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
 }
 
 
+// We don't record weak slots during marking or scavenges. Instead we do it
+// once when we complete mark-compact cycle.  Note that write barrier has no
+// effect if we are already in the middle of compacting mark-sweep cycle and we
+// have to record slots manually.
+static bool MustRecordSlots(Heap* heap) {
+  return heap->gc_state() == Heap::MARK_COMPACT &&
+      heap->mark_compact_collector()->is_compacting();
+}
+
+
 template <class T>
 struct WeakListVisitor;
 
@@ -198,12 +208,12 @@ struct WeakListVisitor;
 template <class T>
 Object* VisitWeakList(Heap* heap,
                       Object* list,
-                      WeakObjectRetainer* retainer,
-                      bool record_slots) {
+                      WeakObjectRetainer* retainer) {
   Object* undefined = heap->undefined_value();
   Object* head = undefined;
   T* tail = NULL;
   MarkCompactCollector* collector = heap->mark_compact_collector();
+  bool record_slots = MustRecordSlots(heap);
   while (list != undefined) {
     // Check whether to keep the candidate in the list.
     T* candidate = reinterpret_cast<T*>(list);
@@ -229,8 +239,7 @@ Object* VisitWeakList(Heap* heap,
 
 
       // tail is a live object, visit it.
-      WeakListVisitor<T>::VisitLiveObject(
-          heap, tail, retainer, record_slots);
+      WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer);
     } else {
       WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
     }
@@ -273,12 +282,9 @@ struct WeakListVisitor<JSFunction> {
     return JSFunction::kNextFunctionLinkOffset;
   }
 
-  static void VisitLiveObject(Heap*, JSFunction*,
-                              WeakObjectRetainer*, bool) {
-  }
+  static void VisitLiveObject(Heap*, JSFunction*, WeakObjectRetainer*) {}
 
-  static void VisitPhantomObject(Heap*, JSFunction*) {
-  }
+  static void VisitPhantomObject(Heap*, JSFunction*) {}
 };
 
 
@@ -296,12 +302,9 @@ struct WeakListVisitor<Code> {
     return Code::kNextCodeLinkOffset;
   }
 
-  static void VisitLiveObject(Heap*, Code*,
-                              WeakObjectRetainer*, bool) {
-  }
+  static void VisitLiveObject(Heap*, Code*, WeakObjectRetainer*) {}
 
-  static void VisitPhantomObject(Heap*, Code*) {
-  }
+  static void VisitPhantomObject(Heap*, Code*) {}
 };
 
 
@@ -317,33 +320,32 @@ struct WeakListVisitor<Context> {
     return context->get(Context::NEXT_CONTEXT_LINK);
   }
 
+  static int WeakNextOffset() {
+    return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK);
+  }
+
   static void VisitLiveObject(Heap* heap,
                               Context* context,
-                              WeakObjectRetainer* retainer,
-                              bool record_slots) {
+                              WeakObjectRetainer* retainer) {
     // Process the three weak lists linked off the context.
-    DoWeakList<JSFunction>(heap, context, retainer, record_slots,
+    DoWeakList<JSFunction>(heap, context, retainer,
         Context::OPTIMIZED_FUNCTIONS_LIST);
-    DoWeakList<Code>(heap, context, retainer, record_slots,
-        Context::OPTIMIZED_CODE_LIST);
-    DoWeakList<Code>(heap, context, retainer, record_slots,
-        Context::DEOPTIMIZED_CODE_LIST);
+    DoWeakList<Code>(heap, context, retainer, Context::OPTIMIZED_CODE_LIST);
+    DoWeakList<Code>(heap, context, retainer, Context::DEOPTIMIZED_CODE_LIST);
   }
 
   template<class T>
   static void DoWeakList(Heap* heap,
                          Context* context,
                          WeakObjectRetainer* retainer,
-                         bool record_slots,
                          int index) {
     // Visit the weak list, removing dead intermediate elements.
-    Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer,
-        record_slots);
+    Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer);
 
     // Update the list head.
     context->set(index, list_head, UPDATE_WRITE_BARRIER);
 
-    if (record_slots) {
+    if (MustRecordSlots(heap)) {
       // Record the updated slot if necessary.
       Object** head_slot = HeapObject::RawField(
           context, FixedArray::SizeFor(index));
@@ -358,10 +360,6 @@ struct WeakListVisitor<Context> {
     ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST));
     ClearWeakList<Code>(heap, context->get(Context::DEOPTIMIZED_CODE_LIST));
   }
-
-  static int WeakNextOffset() {
-    return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK);
-  }
 };
 
 
@@ -375,16 +373,13 @@ struct WeakListVisitor<JSArrayBufferView> {
     return obj->weak_next();
   }
 
-  static void VisitLiveObject(Heap*,
-                              JSArrayBufferView* obj,
-                              WeakObjectRetainer* retainer,
-                              bool record_slots) {}
-
-  static void VisitPhantomObject(Heap*, JSArrayBufferView*) {}
-
   static int WeakNextOffset() {
     return JSArrayBufferView::kWeakNextOffset;
   }
+
+  static void VisitLiveObject(Heap*, JSArrayBufferView*, WeakObjectRetainer*) {}
+
+  static void VisitPhantomObject(Heap*, JSArrayBufferView*) {}
 };
 
 
@@ -398,17 +393,20 @@ struct WeakListVisitor<JSArrayBuffer> {
     return obj->weak_next();
   }
 
+  static int WeakNextOffset() {
+    return JSArrayBuffer::kWeakNextOffset;
+  }
+
   static void VisitLiveObject(Heap* heap,
                               JSArrayBuffer* array_buffer,
-                              WeakObjectRetainer* retainer,
-                              bool record_slots) {
+                              WeakObjectRetainer* retainer) {
     Object* typed_array_obj =
         VisitWeakList<JSArrayBufferView>(
             heap,
             array_buffer->weak_first_view(),
-            retainer, record_slots);
+            retainer);
     array_buffer->set_weak_first_view(typed_array_obj);
-    if (typed_array_obj != heap->undefined_value() && record_slots) {
+    if (typed_array_obj != heap->undefined_value() && MustRecordSlots(heap)) {
       Object** slot = HeapObject::RawField(
           array_buffer, JSArrayBuffer::kWeakFirstViewOffset);
       heap->mark_compact_collector()->RecordSlot(slot, slot, typed_array_obj);
@@ -418,10 +416,6 @@ struct WeakListVisitor<JSArrayBuffer> {
   static void VisitPhantomObject(Heap* heap, JSArrayBuffer* phantom) {
     Runtime::FreeArrayBuffer(heap->isolate(), phantom);
   }
-
-  static int WeakNextOffset() {
-    return JSArrayBuffer::kWeakNextOffset;
-  }
 };
 
 
@@ -435,36 +429,33 @@ struct WeakListVisitor<AllocationSite> {
     return obj->weak_next();
   }
 
-  static void VisitLiveObject(Heap* heap,
-                              AllocationSite* site,
-                              WeakObjectRetainer* retainer,
-                              bool record_slots) {}
-
-  static void VisitPhantomObject(Heap* heap, AllocationSite* phantom) {}
-
   static int WeakNextOffset() {
     return AllocationSite::kWeakNextOffset;
   }
+
+  static void VisitLiveObject(Heap*, AllocationSite*, WeakObjectRetainer*) {}
+
+  static void VisitPhantomObject(Heap*, AllocationSite*) {}
 };
 
 
 template Object* VisitWeakList<Code>(
-    Heap* heap, Object* list, WeakObjectRetainer* retainer, bool record_slots);
+    Heap* heap, Object* list, WeakObjectRetainer* retainer);
 
 
 template Object* VisitWeakList<JSFunction>(
-    Heap* heap, Object* list, WeakObjectRetainer* retainer, bool record_slots);
+    Heap* heap, Object* list, WeakObjectRetainer* retainer);
 
 
 template Object* VisitWeakList<Context>(
-    Heap* heap, Object* list, WeakObjectRetainer* retainer, bool record_slots);
+    Heap* heap, Object* list, WeakObjectRetainer* retainer);
 
 
 template Object* VisitWeakList<JSArrayBuffer>(
-    Heap* heap, Object* list, WeakObjectRetainer* retainer, bool record_slots);
+    Heap* heap, Object* list, WeakObjectRetainer* retainer);
 
 
 template Object* VisitWeakList<AllocationSite>(
-    Heap* heap, Object* list, WeakObjectRetainer* retainer, bool record_slots);
+    Heap* heap, Object* list, WeakObjectRetainer* retainer);
 
 } }  // namespace v8::internal
index 05f8257..d9ab02a 100644 (file)
@@ -469,10 +469,7 @@ class WeakObjectRetainer;
 // pointers. The template parameter T is a WeakListVisitor that defines how to
 // access the next-element pointers.
 template <class T>
-Object* VisitWeakList(Heap* heap,
-                      Object* list,
-                      WeakObjectRetainer* retainer,
-                      bool record_slots);
+Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer);
 
 } }  // namespace v8::internal