P* parameter,
typename WeakCallbackData<S, P>::Callback callback);
+ // Phantom persistents work like weak persistents, except that the pointer to
+ // the object being collected is not available in the finalization callback.
+ // This enables the garbage collector to collect the object and any objects
+ // it references transitively in one GC cycle.
+ template <typename P>
+ V8_INLINE void SetPhantom(P* parameter,
+ typename WeakCallbackData<T, P>::Callback callback);
+
+ template <typename S, typename P>
+ V8_INLINE void SetPhantom(P* parameter,
+ typename WeakCallbackData<S, P>::Callback callback);
+
template<typename P>
V8_INLINE P* ClearWeak();
private:
V8();
+ enum WeakHandleType { PhantomHandle, NonphantomHandle };
+
static internal::Object** GlobalizeReference(internal::Isolate* isolate,
internal::Object** handle);
static internal::Object** CopyPersistent(internal::Object** handle);
static void DisposeGlobal(internal::Object** global_handle);
typedef WeakCallbackData<Value, void>::Callback WeakCallback;
- static void MakeWeak(internal::Object** global_handle,
- void* data,
- WeakCallback weak_callback);
+ static void MakeWeak(internal::Object** global_handle, void* data,
+ WeakCallback weak_callback, WeakHandleType phantom);
static void* ClearWeak(internal::Object** global_handle);
static void Eternalize(Isolate* isolate,
Value* handle,
typename WeakCallbackData<S, P>::Callback callback) {
TYPE_CHECK(S, T);
typedef typename WeakCallbackData<Value, void>::Callback Callback;
- V8::MakeWeak(reinterpret_cast<internal::Object**>(this->val_),
- parameter,
- reinterpret_cast<Callback>(callback));
+ V8::MakeWeak(reinterpret_cast<internal::Object**>(this->val_), parameter,
+ reinterpret_cast<Callback>(callback), V8::NonphantomHandle);
}
template <class T>
-template<typename P>
+template <typename S, typename P>
+void PersistentBase<T>::SetPhantom(
+ P* parameter, typename WeakCallbackData<S, P>::Callback callback) {
+ TYPE_CHECK(S, T);
+ typedef typename WeakCallbackData<Value, void>::Callback Callback;
+ V8::MakeWeak(reinterpret_cast<internal::Object**>(this->val_), parameter,
+ reinterpret_cast<Callback>(callback), V8::PhantomHandle);
+}
+
+
+template <class T>
+template <typename P>
+void PersistentBase<T>::SetPhantom(
+ P* parameter, typename WeakCallbackData<T, P>::Callback callback) {
+ SetPhantom<T, P>(parameter, callback);
+}
+
+
+template <class T>
+template <typename P>
P* PersistentBase<T>::ClearWeak() {
return reinterpret_cast<P*>(
V8::ClearWeak(reinterpret_cast<internal::Object**>(this->val_)));
}
-void V8::MakeWeak(i::Object** object,
- void* parameters,
- WeakCallback weak_callback) {
- i::GlobalHandles::MakeWeak(object, parameters, weak_callback);
+void V8::MakeWeak(i::Object** object, void* parameters,
+ WeakCallback weak_callback, V8::WeakHandleType weak_type) {
+ i::GlobalHandles::PhantomState phantom;
+ phantom = weak_type == V8::PhantomHandle ? i::GlobalHandles::Phantom
+ : i::GlobalHandles::Nonphantom;
+ i::GlobalHandles::MakeWeak(object, parameters, weak_callback, phantom);
}
if (outer_info->is_toplevel() && outer_info->will_serialize()) {
// Make sure that if the toplevel code (possibly to be serialized),
- // the inner unction must be allowed to be compiled lazily.
+ // the inner function must be allowed to be compiled lazily.
DCHECK(allow_lazy);
}
Heap* heap = isolate_->heap();
HandleScope scope(isolate_);
- // Perform two GCs to get rid of all unreferenced scripts. The first GC gets
- // rid of all the cached script wrappers and the second gets rid of the
- // scripts which are no longer referenced.
- heap->CollectAllGarbage(Heap::kMakeHeapIterableMask, "ScriptCache");
+ // Perform a GC to get rid of all unreferenced scripts.
heap->CollectAllGarbage(Heap::kMakeHeapIterableMask, "ScriptCache");
// Scan heap for Script objects.
Debug* debug = reinterpret_cast<Isolate*>(data.GetIsolate())->debug();
DebugInfoListNode* node =
reinterpret_cast<DebugInfoListNode*>(data.GetParameter());
- // We need to clear all breakpoints associated with the function to restore
- // original code and avoid patching the code twice later because
- // the function will live in the heap until next gc, and can be found by
- // Debug::FindSharedFunctionInfoInScript.
- BreakLocationIterator it(node->debug_info(), ALL_BREAK_LOCATIONS);
- it.ClearAllDebugBreak();
- debug->RemoveDebugInfo(node->debug_info());
+ debug->RemoveDebugInfo(node->debug_info().location());
#ifdef DEBUG
for (DebugInfoListNode* n = debug->debug_info_list_;
n != NULL;
GlobalHandles* global_handles = debug_info->GetIsolate()->global_handles();
debug_info_ = Handle<DebugInfo>::cast(global_handles->Create(debug_info));
GlobalHandles::MakeWeak(reinterpret_cast<Object**>(debug_info_.location()),
- this,
- Debug::HandleWeakDebugInfo);
+ this, Debug::HandleWeakDebugInfo,
+ GlobalHandles::Phantom);
}
// If there are no more break points left remove the debug info for this
// function.
if (debug_info->GetBreakPointCount() == 0) {
- RemoveDebugInfo(debug_info);
+ RemoveDebugInfoAndClearFromShared(debug_info);
}
return;
// Remove all debug info.
while (debug_info_list_ != NULL) {
- RemoveDebugInfo(debug_info_list_->debug_info());
+ RemoveDebugInfoAndClearFromShared(debug_info_list_->debug_info());
}
}
}
-void Debug::RemoveDebugInfo(Handle<DebugInfo> debug_info) {
+// This uses the location of a handle to look up the debug info in the debug
+// info list, but it doesn't use the actual debug info for anything. Therefore
+// if the debug info has been collected by the GC, we can be sure that this
+// method will not attempt to resurrect it.
+void Debug::RemoveDebugInfo(DebugInfo** debug_info) {
DCHECK(debug_info_list_ != NULL);
// Run through the debug info objects to find this one and remove it.
DebugInfoListNode* prev = NULL;
DebugInfoListNode* current = debug_info_list_;
while (current != NULL) {
- if (*current->debug_info() == *debug_info) {
+ if (current->debug_info().location() == debug_info) {
// Unlink from list. If prev is NULL we are looking at the first element.
if (prev == NULL) {
debug_info_list_ = current->next();
} else {
prev->set_next(current->next());
}
- current->debug_info()->shared()->set_debug_info(
- isolate_->heap()->undefined_value());
delete current;
// If there are no more debug info objects there are not more break
}
+void Debug::RemoveDebugInfoAndClearFromShared(Handle<DebugInfo> debug_info) {
+ HandleScope scope(isolate_);
+ Handle<SharedFunctionInfo> shared(debug_info->shared());
+
+ RemoveDebugInfo(debug_info.location());
+
+ shared->set_debug_info(isolate_->heap()->undefined_value());
+}
+
+
void Debug::SetAfterBreakTarget(JavaScriptFrame* frame) {
after_break_target_ = NULL;
HandleScope scope(isolate_);
// If there are no break points this cannot be break at return, as
- // the debugger statement and stack guard bebug break cannot be at
+ // the debugger statement and stack guard debug break cannot be at
// return.
if (!has_break_points_) {
return false;
v8::Handle<v8::Context> MessageImpl::GetEventContext() const {
Isolate* isolate = event_data_->GetIsolate();
v8::Handle<v8::Context> context = GetDebugEventContext(isolate);
- // Isolate::context() may be NULL when "script collected" event occures.
+ // Isolate::context() may be NULL when "script collected" event occurs.
DCHECK(!context.IsEmpty());
return context;
}
void ClearStepIn();
void ActivateStepOut(StackFrame* frame);
void ClearStepNext();
- // Returns whether the compile succeeded.
- void RemoveDebugInfo(Handle<DebugInfo> debug_info);
+ void RemoveDebugInfoAndClearFromShared(Handle<DebugInfo> debug_info);
+ void RemoveDebugInfo(DebugInfo** debug_info);
Handle<Object> CheckBreakPoints(Handle<Object> break_point);
bool CheckBreakPoint(Handle<Object> break_point_object);
flags_ = IsInNewSpaceList::update(flags_, v);
}
+ bool is_zapped_during_weak_callback() {
+ return IsZappedDuringWeakCallback::decode(flags_);
+ }
+ void set_is_zapped_during_weak_callback(bool v) {
+ flags_ = IsZappedDuringWeakCallback::update(flags_, v);
+ }
+
bool IsNearDeath() const {
// Check for PENDING to ensure correct answer when processing callbacks.
return state() == PENDING || state() == NEAR_DEATH;
parameter_or_next_free_.next_free = value;
}
- void MakeWeak(void* parameter, WeakCallback weak_callback) {
+ void MakeWeak(void* parameter, WeakCallback weak_callback,
+ bool is_zapped_during_weak_callback = false) {
DCHECK(weak_callback != NULL);
DCHECK(state() != FREE);
CHECK(object_ != NULL);
set_state(WEAK);
set_parameter(parameter);
+ set_is_zapped_during_weak_callback(is_zapped_during_weak_callback);
weak_callback_ = weak_callback;
}
Release();
return false;
}
- void* par = parameter();
+ void* param = parameter();
set_state(NEAR_DEATH);
set_parameter(NULL);
DCHECK(!object_->IsExternalTwoByteString() ||
ExternalTwoByteString::cast(object_)->resource() != NULL);
// Leaving V8.
- VMState<EXTERNAL> state(isolate);
+ VMState<EXTERNAL> vmstate(isolate);
HandleScope handle_scope(isolate);
- Handle<Object> handle(*object, isolate);
- v8::WeakCallbackData<v8::Value, void> data(
- reinterpret_cast<v8::Isolate*>(isolate),
- v8::Utils::ToLocal(handle),
- par);
- weak_callback_(data);
+ if (is_zapped_during_weak_callback()) {
+ // Phantom weak pointer case.
+ DCHECK(*object == Smi::FromInt(kPhantomReferenceZap));
+ // Make data with a null handle.
+ v8::WeakCallbackData<v8::Value, void> data(
+ reinterpret_cast<v8::Isolate*>(isolate), v8::Local<v8::Object>(),
+ param);
+ weak_callback_(data);
+ if (state() != FREE) {
+ // Callback does not have to clear the global handle if it is a
+ // phantom handle.
+ Release();
+ }
+ } else {
+ Handle<Object> handle(object);
+ v8::WeakCallbackData<v8::Value, void> data(
+ reinterpret_cast<v8::Isolate*>(isolate), v8::Utils::ToLocal(handle),
+ param);
+ weak_callback_(data);
+ }
}
// Absence of explicit cleanup or revival of weak handle
// in most of the cases would lead to memory leak.
// This stores three flags (independent, partially_dependent and
// in_new_space_list) and a State.
- class NodeState: public BitField<State, 0, 4> {};
- class IsIndependent: public BitField<bool, 4, 1> {};
- class IsPartiallyDependent: public BitField<bool, 5, 1> {};
- class IsInNewSpaceList: public BitField<bool, 6, 1> {};
+ class NodeState : public BitField<State, 0, 4> {};
+ class IsIndependent : public BitField<bool, 4, 1> {};
+ class IsPartiallyDependent : public BitField<bool, 5, 1> {};
+ class IsInNewSpaceList : public BitField<bool, 6, 1> {};
+ class IsZappedDuringWeakCallback : public BitField<bool, 7, 1> {};
uint8_t flags_;
}
-void GlobalHandles::MakeWeak(Object** location,
- void* parameter,
- WeakCallback weak_callback) {
- Node::FromLocation(location)->MakeWeak(parameter, weak_callback);
+void GlobalHandles::MakeWeak(Object** location, void* parameter,
+ WeakCallback weak_callback, PhantomState phantom) {
+ Node::FromLocation(location)
+ ->MakeWeak(parameter, weak_callback, phantom == Phantom);
}
void GlobalHandles::IterateWeakRoots(ObjectVisitor* v) {
for (NodeIterator it(this); !it.done(); it.Advance()) {
- if (it.node()->IsWeakRetainer()) v->VisitPointer(it.node()->location());
+ Node* node = it.node();
+ if (node->IsWeakRetainer()) {
+ if (node->state() == Node::PENDING &&
+ node->is_zapped_during_weak_callback()) {
+ *(node->location()) = Smi::FromInt(kPhantomReferenceZap);
+ } else {
+ v->VisitPointer(node->location());
+ }
+ }
}
}
DCHECK(node->is_in_new_space_list());
if ((node->is_independent() || node->is_partially_dependent()) &&
node->IsWeakRetainer()) {
- v->VisitPointer(node->location());
+ if (node->is_zapped_during_weak_callback()) {
+ *(node->location()) = Smi::FromInt(kPhantomReferenceZap);
+ } else {
+ v->VisitPointer(node->location());
+ }
}
}
}
typedef WeakCallbackData<v8::Value, void>::Callback WeakCallback;
+ // For a phantom weak reference, the callback does not have access to the
+ // dying object. Phantom weak references are preferred because they allow
+ // memory to be reclaimed in one GC cycle rather than two. However, for
+ // historical reasons the default is non-phantom.
+ enum PhantomState { Nonphantom, Phantom };
+
// Make the global handle weak and set the callback parameter for the
// handle. When the garbage collector recognizes that only weak global
- // handles point to an object the handles are cleared and the callback
- // function is invoked (for each handle) with the handle and corresponding
- // parameter as arguments. Note: cleared means set to Smi::FromInt(0). The
- // reason is that Smi::FromInt(0) does not change during garage collection.
- static void MakeWeak(Object** location,
- void* parameter,
- WeakCallback weak_callback);
+ // handles point to an object the callback function is invoked (for each
+ // handle) with the handle and corresponding parameter as arguments. By
+ // default the handle still contains a pointer to the object that is being
+ // collected. For this reason the object is not collected until the next
+ // GC. For a phantom weak handle the handle is cleared (set to a Smi)
+ // before the callback is invoked, but the handle can still be identified
+ // in the callback by using the location() of the handle.
+ static void MakeWeak(Object** location, void* parameter,
+ WeakCallback weak_callback,
+ PhantomState phantom = Nonphantom);
void RecordStats(HeapStats* stats);
#endif
const int kCodeZapValue = 0xbadc0de;
+const uint32_t kPhantomReferenceZap = 0xca11bac;
// On Intel architecture, cache line size is 64 bytes.
// On ARM it may be less (32 bytes), but as far this constant is
};
-static void DisposeAndSetFlag(
+static void SetFlag(
const v8::WeakCallbackData<v8::Object, FlagAndPersistent>& data) {
- data.GetParameter()->handle.Reset();
data.GetParameter()->flag = true;
}
-THREADED_TEST(IndependentWeakHandle) {
+static void IndependentWeakHandle(bool global_gc, bool interlinked) {
v8::Isolate* iso = CcTest::isolate();
v8::HandleScope scope(iso);
v8::Handle<Context> context = Context::New(iso);
FlagAndPersistent object_a, object_b;
+ intptr_t big_heap_size;
+
{
v8::HandleScope handle_scope(iso);
- object_a.handle.Reset(iso, v8::Object::New(iso));
- object_b.handle.Reset(iso, v8::Object::New(iso));
+ Local<Object> a(v8::Object::New(iso));
+ Local<Object> b(v8::Object::New(iso));
+ object_a.handle.Reset(iso, a);
+ object_b.handle.Reset(iso, b);
+ if (interlinked) {
+ a->Set(v8_str("x"), b);
+ b->Set(v8_str("x"), a);
+ }
+ if (global_gc) {
+ CcTest::heap()->CollectAllGarbage(TestHeap::Heap::kNoGCFlags);
+ } else {
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE);
+ }
+ // We are relying on this creating a big flag array and reserving the space
+ // up front.
+ v8::Handle<Value> big_array = CompileRun("new Array(50000)");
+ a->Set(v8_str("y"), big_array);
+ big_heap_size = CcTest::heap()->SizeOfObjects();
}
object_a.flag = false;
object_b.flag = false;
- object_a.handle.SetWeak(&object_a, &DisposeAndSetFlag);
- object_b.handle.SetWeak(&object_b, &DisposeAndSetFlag);
+ object_a.handle.SetPhantom(&object_a, &SetFlag);
+ object_b.handle.SetPhantom(&object_b, &SetFlag);
CHECK(!object_b.handle.IsIndependent());
object_a.handle.MarkIndependent();
object_b.handle.MarkIndependent();
CHECK(object_b.handle.IsIndependent());
- CcTest::heap()->CollectGarbage(i::NEW_SPACE);
+ if (global_gc) {
+ CcTest::heap()->CollectAllGarbage(TestHeap::Heap::kNoGCFlags);
+ } else {
+ CcTest::heap()->CollectGarbage(i::NEW_SPACE);
+ }
+ // A single GC should be enough to reclaim the memory, since we are using
+ // phantom handles.
+ CHECK_LT(CcTest::heap()->SizeOfObjects(), big_heap_size - 200000);
CHECK(object_a.flag);
CHECK(object_b.flag);
}
+THREADED_TEST(IndependentWeakHandle) {
+ IndependentWeakHandle(false, false);
+ IndependentWeakHandle(false, true);
+ IndependentWeakHandle(true, false);
+ IndependentWeakHandle(true, true);
+}
+
+
static void InvokeScavenge() {
CcTest::heap()->CollectGarbage(i::NEW_SPACE);
}