/** Clears the weak reference to this object.*/
inline void ClearWeak();
+ /**
+ * Marks the reference to this object independent. Garbage collector
+ * is free to ignore any object groups containing this object.
+ * Weak callback for an independent handle should not
+ * assume that it will be preceded by a global GC prologue callback
+ * or followed by a global GC epilogue callback.
+ */
+ inline void MarkIndependent();
+
/**
*Checks if the handle holds the only reference to an object.
*/
void* data,
WeakReferenceCallback);
static void ClearWeak(internal::Object** global_handle);
+ static void MarkIndependent(internal::Object** global_handle);
static bool IsGlobalNearDeath(internal::Object** global_handle);
static bool IsGlobalWeak(internal::Object** global_handle);
static void SetWrapperClassId(internal::Object** global_handle,
V8::ClearWeak(reinterpret_cast<internal::Object**>(**this));
}
+template <class T>
+void Persistent<T>::MarkIndependent() {
+ V8::MarkIndependent(reinterpret_cast<internal::Object**>(**this));
+}
+
template <class T>
void Persistent<T>::SetWrapperClassId(uint16_t class_id) {
V8::SetWrapperClassId(reinterpret_cast<internal::Object**>(**this), class_id);
}
+void V8::MarkIndependent(i::Object** object) {
+ i::Isolate* isolate = i::Isolate::Current();
+ LOG_API(isolate, "MakeIndependent");
+ isolate->global_handles()->MarkIndependent(object);
+}
+
+
bool V8::IsGlobalNearDeath(i::Object** obj) {
i::Isolate* isolate = i::Isolate::Current();
LOG_API(isolate, "IsGlobalNearDeath");
// Set the initial value of the handle.
object_ = object;
class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
+ independent_ = false;
state_ = NORMAL;
parameter_or_next_free_.parameter = NULL;
callback_ = NULL;
set_parameter(NULL);
}
+ void MarkIndependent(GlobalHandles* global_handles) {
+ LOG(global_handles->isolate(),
+ HandleEvent("GlobalHandle::MarkIndependent", handle().location()));
+ ASSERT(state_ != DESTROYED);
+ independent_ = true;
+ }
+
bool IsNearDeath() {
// Check for PENDING to ensure correct answer when processing callbacks.
return state_ == PENDING || state_ == NEAR_DEATH;
};
State state_ : 4; // Need one more bit for MSVC as it treats enums as signed.
+ bool independent_ : 1;
+
private:
// Handle specific callback.
WeakReferenceCallback callback_;
}
+void GlobalHandles::MarkIndependent(Object** location) {
+ Node::FromLocation(location)->MarkIndependent(this);
+}
+
+
bool GlobalHandles::IsNearDeath(Object** location) {
return Node::FromLocation(location)->IsNearDeath();
}
void GlobalHandles::IterateWeakRoots(ObjectVisitor* v) {
// Traversal of GC roots in the global handle list that are marked as
- // WEAK or PENDING.
+ // WEAK, PENDING or NEAR_DEATH.
for (Node* current = head_; current != NULL; current = current->next()) {
if (current->state_ == Node::WEAK
|| current->state_ == Node::PENDING
}
+void GlobalHandles::IterateWeakIndependentRoots(ObjectVisitor* v) {
+ // Traversal of GC roots in the global handle list that are independent
+ // and marked as WEAK, PENDING or NEAR_DEATH.
+ for (Node* current = head_; current != NULL; current = current->next()) {
+ if (!current->independent_) continue;
+ if (current->state_ == Node::WEAK
+ || current->state_ == Node::PENDING
+ || current->state_ == Node::NEAR_DEATH) {
+ v->VisitPointer(¤t->object_);
+ }
+ }
+}
+
+
void GlobalHandles::IterateWeakRoots(WeakReferenceGuest f,
WeakReferenceCallback callback) {
for (Node* current = head_; current != NULL; current = current->next()) {
}
-bool GlobalHandles::PostGarbageCollectionProcessing() {
+void GlobalHandles::IdentifyWeakIndependentHandles(WeakSlotCallbackWithHeap f) {
+ for (Node* current = head_; current != NULL; current = current->next()) {
+ if (current->state_ == Node::WEAK && current->independent_) {
+ if (f(isolate_->heap(), ¤t->object_)) {
+ current->state_ = Node::PENDING;
+ LOG(isolate_,
+ HandleEvent("GlobalHandle::Pending", current->handle().location()));
+ }
+ }
+ }
+}
+
+
+bool GlobalHandles::PostGarbageCollectionProcessing(
+ GarbageCollector collector) {
// Process weak global handle callbacks. This must be done after the
// GC is completely done, because the callbacks may invoke arbitrary
// API functions.
bool next_gc_likely_to_collect_more = false;
Node** p = &head_;
while (*p != NULL) {
+ // Skip dependent handles. Their weak callbacks might expect to be
+ // called between two global garbage collection callbacks which
+ // are not called for minor collections.
+ if (collector == SCAVENGER && !(*p)->independent_) {
+ p = (*p)->next_addr();
+ continue;
+ }
+
if ((*p)->PostGarbageCollectionProcessing(isolate_, this)) {
if (initial_post_gc_processing_count != post_gc_processing_count_) {
// Weak callback triggered another GC and another round of
}
+void GlobalHandles::IterateStrongAndDependentRoots(ObjectVisitor* v) {
+ for (Node* current = head_; current != NULL; current = current->next()) {
+ if ((current->independent_ && current->state_ == Node::NORMAL) ||
+ (!current->independent_ && current->state_ != Node::DESTROYED)) {
+ v->VisitPointer(¤t->object_);
+ }
+ }
+}
+
+
void GlobalHandles::IterateAllRootsWithClassIds(ObjectVisitor* v) {
for (Node* current = head_; current != NULL; current = current->next()) {
if (current->class_id_ != v8::HeapProfiler::kPersistentHandleNoClassId &&
// Clear the weakness of a global handle.
void ClearWeakness(Object** location);
+ // Clear the weakness of a global handle.
+ void MarkIndependent(Object** location);
+
// Tells whether global handle is near death.
static bool IsNearDeath(Object** location);
// Process pending weak handles.
// Returns true if next major GC is likely to collect more garbage.
- bool PostGarbageCollectionProcessing();
+ bool PostGarbageCollectionProcessing(GarbageCollector collector);
// Iterates over all strong handles.
void IterateStrongRoots(ObjectVisitor* v);
+ // Iterates over all strong and dependent handles.
+ void IterateStrongAndDependentRoots(ObjectVisitor* v);
+
// Iterates over all handles.
void IterateAllRoots(ObjectVisitor* v);
// Iterates over all weak roots in heap.
void IterateWeakRoots(ObjectVisitor* v);
+ // Iterates over all weak independent roots in heap.
+ void IterateWeakIndependentRoots(ObjectVisitor* v);
+
// Iterates over weak roots that are bound to a given callback.
void IterateWeakRoots(WeakReferenceGuest f,
WeakReferenceCallback callback);
// them as pending.
void IdentifyWeakHandles(WeakSlotCallback f);
+ // Find all weak independent handles satisfying the callback predicate, mark
+ // them as pending.
+ void IdentifyWeakIndependentHandles(WeakSlotCallbackWithHeap f);
+
// Add an object group.
// Should be only used in GC callback function before a collection.
// All groups are destroyed after a mark-compact collection.
isolate_->counters()->objs_since_last_young()->Set(0);
- if (collector == MARK_COMPACTOR) {
- DisableAssertNoAllocation allow_allocation;
+ { DisableAssertNoAllocation allow_allocation;
GCTracer::Scope scope(tracer, GCTracer::Scope::EXTERNAL);
next_gc_likely_to_collect_more =
- isolate_->global_handles()->PostGarbageCollectionProcessing();
+ isolate_->global_handles()->PostGarbageCollectionProcessing(collector);
}
// Update relocatables.
}
+static bool IsUnscavengedHeapObject(Heap* heap, Object** p) {
+ return heap->InNewSpace(*p) &&
+ !HeapObject::cast(*p)->map_word().IsForwardingAddress();
+}
+
+
void Heap::Scavenge() {
#ifdef DEBUG
if (FLAG_enable_slow_asserts) VerifyNonPointerSpacePointers();
scavenge_visitor.VisitPointer(BitCast<Object**>(&global_contexts_list_));
new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
+ isolate_->global_handles()->IdentifyWeakIndependentHandles(
+ &IsUnscavengedHeapObject);
+ isolate_->global_handles()->IterateWeakIndependentRoots(&scavenge_visitor);
+ new_space_front = DoScavenge(&scavenge_visitor, new_space_front);
+
UpdateNewSpaceReferencesInExternalStringTable(
&UpdateNewSpaceReferenceInExternalStringTableEntry);
void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) {
v->VisitPointer(reinterpret_cast<Object**>(&roots_[kSymbolTableRootIndex]));
v->Synchronize("symbol_table");
- if (mode != VISIT_ALL_IN_SCAVENGE) {
+ if (mode != VISIT_ALL_IN_SCAVENGE &&
+ mode != VISIT_ALL_IN_SWEEP_NEWSPACE) {
// Scavenge collections have special processing for this.
external_string_table_.Iterate(v);
}
// Iterate over the builtin code objects and code stubs in the
// heap. Note that it is not necessary to iterate over code objects
// on scavenge collections.
- if (mode != VISIT_ALL_IN_SCAVENGE) {
+ if (mode != VISIT_ALL_IN_SCAVENGE &&
+ mode != VISIT_ALL_IN_SWEEP_NEWSPACE) {
isolate_->builtins()->IterateBuiltins(v);
}
v->Synchronize("builtins");
// Iterate over global handles.
- if (mode == VISIT_ONLY_STRONG) {
- isolate_->global_handles()->IterateStrongRoots(v);
- } else {
- isolate_->global_handles()->IterateAllRoots(v);
+ switch (mode) {
+ case VISIT_ONLY_STRONG:
+ isolate_->global_handles()->IterateStrongRoots(v);
+ break;
+ case VISIT_ALL_IN_SCAVENGE:
+ isolate_->global_handles()->IterateStrongAndDependentRoots(v);
+ break;
+ case VISIT_ALL_IN_SWEEP_NEWSPACE:
+ case VISIT_ALL:
+ isolate_->global_handles()->IterateAllRoots(v);
+ break;
}
v->Synchronize("globalhandles");
}
// Update roots.
- heap->IterateRoots(&updating_visitor, VISIT_ALL_IN_SCAVENGE);
+ heap->IterateRoots(&updating_visitor, VISIT_ALL_IN_SWEEP_NEWSPACE);
LiveObjectList::IterateElements(&updating_visitor);
// Update pointers in old spaces.
typedef bool (*WeakSlotCallback)(Object** pointer);
+typedef bool (*WeakSlotCallbackWithHeap)(Heap* heap, Object** pointer);
+
// -----------------------------------------------------------------------------
// Miscellaneous
enum Executability { NOT_EXECUTABLE, EXECUTABLE };
-enum VisitMode { VISIT_ALL, VISIT_ALL_IN_SCAVENGE, VISIT_ONLY_STRONG };
+enum VisitMode {
+ VISIT_ALL,
+ VISIT_ALL_IN_SCAVENGE,
+ VISIT_ALL_IN_SWEEP_NEWSPACE,
+ VISIT_ONLY_STRONG
+};
// Flag indicating whether code is built into the VM (one of the natives files).
enum NativesFlag { NOT_NATIVES_CODE, NATIVES_CODE };
}
-static bool in_scavenge = false;
-static int last = -1;
-
-static void ForceScavenge(v8::Persistent<v8::Value> obj, void* data) {
- CHECK_EQ(-1, last);
- last = 0;
+static void DisposeAndSetFlag(v8::Persistent<v8::Value> obj, void* data) {
obj.Dispose();
obj.Clear();
- in_scavenge = true;
- HEAP->PerformScavenge();
- in_scavenge = false;
*(reinterpret_cast<bool*>(data)) = true;
}
-static void CheckIsNotInvokedInScavenge(v8::Persistent<v8::Value> obj,
- void* data) {
- CHECK_EQ(0, last);
- last = 1;
- *(reinterpret_cast<bool*>(data)) = in_scavenge;
- obj.Dispose();
- obj.Clear();
-}
-THREADED_TEST(NoWeakRefCallbacksInScavenge) {
- // Test verifies that scavenge cannot invoke WeakReferenceCallbacks.
- // Calling callbacks from scavenges is unsafe as objects held by those
- // handlers might have become strongly reachable, but scavenge doesn't
- // check that.
+THREADED_TEST(IndependentWeakHandle) {
v8::Persistent<Context> context = Context::New();
Context::Scope context_scope(context);
v8::Persistent<v8::Object> object_a;
- v8::Persistent<v8::Object> object_b;
{
v8::HandleScope handle_scope;
- object_b = v8::Persistent<v8::Object>::New(v8::Object::New());
object_a = v8::Persistent<v8::Object>::New(v8::Object::New());
}
bool object_a_disposed = false;
- object_a.MakeWeak(&object_a_disposed, &ForceScavenge);
- bool released_in_scavenge = false;
- object_b.MakeWeak(&released_in_scavenge, &CheckIsNotInvokedInScavenge);
+ object_a.MakeWeak(&object_a_disposed, &DisposeAndSetFlag);
+ object_a.MarkIndependent();
+ HEAP->PerformScavenge();
+ CHECK(object_a_disposed);
+}
- while (!object_a_disposed) {
- HEAP->CollectAllGarbage(false);
+
+static void InvokeScavenge() {
+ HEAP->PerformScavenge();
+}
+
+
+static void InvokeMarkSweep() {
+ HEAP->CollectAllGarbage(false);
+}
+
+
+static void ForceScavenge(v8::Persistent<v8::Value> obj, void* data) {
+ obj.Dispose();
+ obj.Clear();
+ *(reinterpret_cast<bool*>(data)) = true;
+ InvokeScavenge();
+}
+
+
+static void ForceMarkSweep(v8::Persistent<v8::Value> obj, void* data) {
+ obj.Dispose();
+ obj.Clear();
+ *(reinterpret_cast<bool*>(data)) = true;
+ InvokeMarkSweep();
+}
+
+
+THREADED_TEST(GCFromWeakCallbacks) {
+ v8::Persistent<Context> context = Context::New();
+ Context::Scope context_scope(context);
+
+ static const int kNumberOfGCTypes = 2;
+ v8::WeakReferenceCallback gc_forcing_callback[kNumberOfGCTypes] =
+ {&ForceScavenge, &ForceMarkSweep};
+
+ typedef void (*GCInvoker)();
+ GCInvoker invoke_gc[kNumberOfGCTypes] = {&InvokeScavenge, &InvokeMarkSweep};
+
+ for (int outer_gc = 0; outer_gc < kNumberOfGCTypes; outer_gc++) {
+ for (int inner_gc = 0; inner_gc < kNumberOfGCTypes; inner_gc++) {
+ v8::Persistent<v8::Object> object;
+ {
+ v8::HandleScope handle_scope;
+ object = v8::Persistent<v8::Object>::New(v8::Object::New());
+ }
+ bool disposed = false;
+ object.MakeWeak(&disposed, gc_forcing_callback[inner_gc]);
+ object.MarkIndependent();
+ invoke_gc[outer_gc]();
+ CHECK(disposed);
+ }
+ }
+}
+
+
+static void RevivingCallback(v8::Persistent<v8::Value> obj, void* data) {
+ obj.ClearWeak();
+ *(reinterpret_cast<bool*>(data)) = true;
+}
+
+
+THREADED_TEST(IndependentHandleRevival) {
+ v8::Persistent<Context> context = Context::New();
+ Context::Scope context_scope(context);
+
+ v8::Persistent<v8::Object> object;
+ {
+ v8::HandleScope handle_scope;
+ object = v8::Persistent<v8::Object>::New(v8::Object::New());
+ object->Set(v8_str("x"), v8::Integer::New(1));
+ v8::Local<String> y_str = v8_str("y");
+ object->Set(y_str, y_str);
+ }
+ bool revived = false;
+ object.MakeWeak(&revived, &RevivingCallback);
+ object.MarkIndependent();
+ HEAP->PerformScavenge();
+ CHECK(revived);
+ HEAP->CollectAllGarbage(true);
+ {
+ v8::HandleScope handle_scope;
+ v8::Local<String> y_str = v8_str("y");
+ CHECK_EQ(v8::Integer::New(1), object->Get(v8_str("x")));
+ CHECK(object->Get(y_str)->Equals(y_str));
}
- CHECK(!released_in_scavenge);
}