CallGCPrologueCallbacks(kGCTypeMarkSweepCompact, kNoGCCallbackFlags);
}
}
- mark_compact_collector()->OverApproximateWeakClosure();
- incremental_marking()->set_should_hurry(false);
- incremental_marking()->set_weak_closure_was_overapproximated(true);
+ incremental_marking()->MarkObjectGroups();
{
GCCallbacksScope scope(this);
if (scope.CheckReenter()) {
// Marks the object grey and pushes it on the marking stack.
INLINE(static void MarkObject(Heap* heap, Object* obj)) {
- IncrementalMarking::MarkObject(heap, obj);
+ IncrementalMarking::MarkObject(heap, HeapObject::cast(obj));
}
// Marks the object black without pushing it on the marking stack.
Object* obj = *p;
if (!obj->IsHeapObject()) return;
- IncrementalMarking::MarkObject(heap_, obj);
+ IncrementalMarking::MarkObject(heap_, HeapObject::cast(obj));
}
Heap* heap_;
}
+void IncrementalMarking::MarkObjectGroups() {
+ DCHECK(FLAG_overapproximate_weak_closure);
+ DCHECK(!weak_closure_was_overapproximated_);
+
+ GCTracer::Scope gc_scope(heap_->tracer(),
+ GCTracer::Scope::MC_INCREMENTAL_WEAKCLOSURE);
+
+ heap_->mark_compact_collector()->MarkImplicitRefGroups(&MarkObject);
+
+ IncrementalMarkingRootMarkingVisitor visitor(this);
+ heap_->isolate()->global_handles()->IterateObjectGroups(
+ &visitor, &MarkCompactCollector::IsUnmarkedHeapObjectWithHeap);
+
+ heap_->isolate()->global_handles()->RemoveImplicitRefGroups();
+ heap_->isolate()->global_handles()->RemoveObjectGroups();
+
+ weak_closure_was_overapproximated_ = true;
+}
+
+
void IncrementalMarking::PrepareForScavenge() {
if (!IsMarking()) return;
NewSpacePageIterator it(heap_->new_space()->FromSpaceStart(),
}
-void IncrementalMarking::MarkObject(Heap* heap, Object* obj) {
- HeapObject* heap_object = HeapObject::cast(obj);
- MarkBit mark_bit = Marking::MarkBitFrom(heap_object);
+void IncrementalMarking::MarkObject(Heap* heap, HeapObject* obj) {
+ MarkBit mark_bit = Marking::MarkBitFrom(obj);
if (mark_bit.data_only()) {
- MarkBlackOrKeepGrey(heap_object, mark_bit, heap_object->Size());
+ MarkBlackOrKeepGrey(obj, mark_bit, obj->Size());
} else if (Marking::IsWhite(mark_bit)) {
- heap->incremental_marking()->WhiteToGreyAndPush(heap_object, mark_bit);
+ heap->incremental_marking()->WhiteToGreyAndPush(obj, mark_bit);
}
}
if (FLAG_trace_incremental_marking) {
PrintF("[IncrementalMarking] requesting weak closure overapproximation.\n");
}
- set_should_hurry(true);
request_type_ = OVERAPPROXIMATION;
heap_->isolate()->stack_guard()->RequestGC();
}
void Stop();
+ void MarkObjectGroups();
+
void PrepareForScavenge();
void UpdateMarkingDequeAfterScavenge();
bool IsIdleMarkingDelayCounterLimitReached();
- INLINE(static void MarkObject(Heap* heap, Object* object));
+ INLINE(static void MarkObject(Heap* heap, HeapObject* object));
Heap* heap() const { return heap_; }
}
-void MarkCompactCollector::MarkImplicitRefGroups() {
+void MarkCompactCollector::MarkImplicitRefGroups(
+ MarkObjectFunction mark_object) {
List<ImplicitRefGroup*>* ref_groups =
isolate()->global_handles()->implicit_ref_groups();
// A parent object is marked, so mark all child heap objects.
for (size_t j = 0; j < entry->length; ++j) {
if ((*children[j])->IsHeapObject()) {
- HeapObject* child = HeapObject::cast(*children[j]);
- MarkBit mark = Marking::MarkBitFrom(child);
- MarkObject(child, mark);
+ mark_object(heap(), HeapObject::cast(*children[j]));
}
}
if (!only_process_harmony_weak_collections) {
isolate()->global_handles()->IterateObjectGroups(
visitor, &IsUnmarkedHeapObjectWithHeap);
- MarkImplicitRefGroups();
+ MarkImplicitRefGroups(&MarkCompactMarkingVisitor::MarkObject);
}
ProcessWeakCollections();
work_to_do = !marking_deque_.IsEmpty();
}
-void MarkCompactCollector::OverApproximateWeakClosure() {
- GCTracer::Scope gc_scope(heap()->tracer(),
- GCTracer::Scope::MC_INCREMENTAL_WEAKCLOSURE);
-
- RootMarkingVisitor root_visitor(heap());
- isolate()->global_handles()->IterateObjectGroups(
- &root_visitor, &IsUnmarkedHeapObjectWithHeap);
- MarkImplicitRefGroups();
-
- // Remove object groups after marking phase.
- heap()->isolate()->global_handles()->RemoveObjectGroups();
- heap()->isolate()->global_handles()->RemoveImplicitRefGroups();
-}
-
-
void MarkCompactCollector::MarkLiveObjects() {
GCTracer::Scope gc_scope(heap()->tracer(), GCTracer::Scope::MC_MARK);
double start_time = 0.0;
// to the first live object in the page (only used for old and map objects).
typedef bool (*IsAliveFunction)(HeapObject* obj, int* size, int* offset);
+// Callback function to mark an object in a given heap.
+typedef void (*MarkObjectFunction)(Heap* heap, HeapObject* object);
+
// Forward declarations.
class CodeFlusher;
class MarkCompactCollector;
static const uint32_t kMultiFreeEncoding = 1;
static inline bool IsMarked(Object* obj);
+ static bool IsUnmarkedHeapObjectWithHeap(Heap* heap, Object** p);
inline Heap* heap() const { return heap_; }
inline Isolate* isolate() const;
// to artificially keep AllocationSites alive for a time.
void MarkAllocationSite(AllocationSite* site);
+ // Mark objects in implicit references groups if their parent object
+ // is marked.
+ void MarkImplicitRefGroups(MarkObjectFunction mark_object);
+
MarkingDeque* marking_deque() { return &marking_deque_; }
void EnsureMarkingDequeIsCommittedAndInitialize();
void UncommitMarkingDeque();
- void OverApproximateWeakClosure();
-
// The following four methods can just be called after marking, when the
// whole transitive closure is known. They must be called before sweeping
// when mark bits are still intact.
// the string table are weak.
void MarkStringTable(RootMarkingVisitor* visitor);
- // Mark objects in implicit references groups if their parent object
- // is marked.
- void MarkImplicitRefGroups();
-
// Mark objects reachable (transitively) from objects in the marking stack
// or overflowed in the heap.
void ProcessMarkingDeque();
// Callback function for telling whether the object *p is an unmarked
// heap object.
static bool IsUnmarkedHeapObject(Object** p);
- static bool IsUnmarkedHeapObjectWithHeap(Heap* heap, Object** p);
// Map transitions from a live map to a dead map must be killed.
// We replace them with a null descriptor, with the same key.