class PropertyCallbackArguments;
class FunctionCallbackArguments;
class GlobalHandles;
+
+class CallbackData {
+ public:
+ V8_INLINE v8::Isolate* GetIsolate() const { return isolate_; }
+
+ protected:
+ explicit CallbackData(v8::Isolate* isolate) : isolate_(isolate) {}
+
+ private:
+ v8::Isolate* isolate_;
+};
}
};
-template<class T, class P>
-class WeakCallbackData {
+template <typename T>
+class PhantomCallbackData : public internal::CallbackData {
+ public:
+ typedef void (*Callback)(const PhantomCallbackData<T>& data);
+
+ V8_INLINE T* GetParameter() const { return parameter_; }
+
+ PhantomCallbackData<T>(Isolate* isolate, T* parameter)
+ : internal::CallbackData(isolate), parameter_(parameter) {}
+
+ private:
+ T* parameter_;
+};
+
+
+template <class T, class P>
+class WeakCallbackData : public PhantomCallbackData<P> {
public:
typedef void (*Callback)(const WeakCallbackData<T, P>& data);
- V8_INLINE Isolate* GetIsolate() const { return isolate_; }
V8_INLINE Local<T> GetValue() const { return handle_; }
- V8_INLINE P* GetParameter() const { return parameter_; }
private:
friend class internal::GlobalHandles;
- WeakCallbackData(Isolate* isolate, Local<T> handle, P* parameter)
- : isolate_(isolate), handle_(handle), parameter_(parameter) { }
- Isolate* isolate_;
+ WeakCallbackData(Isolate* isolate, P* parameter, Local<T> handle)
+ : PhantomCallbackData<P>(isolate, parameter), handle_(handle) {}
Local<T> handle_;
- P* parameter_;
+};
+
+
+template <typename T, typename U>
+class InternalFieldsCallbackData : public internal::CallbackData {
+ public:
+ typedef void (*Callback)(const InternalFieldsCallbackData<T, U>& data);
+
+ InternalFieldsCallbackData(Isolate* isolate, T* internalField1,
+ U* internalField2)
+ : internal::CallbackData(isolate),
+ internal_field1_(internalField1),
+ internal_field2_(internalField2) {}
+
+ V8_INLINE T* GetInternalField1() const { return internal_field1_; }
+ V8_INLINE U* GetInternalField2() const { return internal_field2_; }
+
+ private:
+ T* internal_field1_;
+ U* internal_field2_;
};
template <class S>
V8_INLINE void Reset(Isolate* isolate, const PersistentBase<S>& other);
- V8_INLINE bool IsEmpty() const { return val_ == 0; }
+ V8_INLINE bool IsEmpty() const { return val_ == NULL; }
+ V8_INLINE void Empty() { val_ = 0; }
template <class S>
V8_INLINE bool operator==(const PersistentBase<S>& that) const {
internal::Object** a = reinterpret_cast<internal::Object**>(this->val_);
internal::Object** b = reinterpret_cast<internal::Object**>(that.val_);
- if (a == 0) return b == 0;
- if (b == 0) return false;
+ if (a == NULL) return b == NULL;
+ if (b == NULL) return false;
return *a == *b;
}
template <class S> V8_INLINE bool operator==(const Handle<S>& that) const {
internal::Object** a = reinterpret_cast<internal::Object**>(this->val_);
internal::Object** b = reinterpret_cast<internal::Object**>(that.val_);
- if (a == 0) return b == 0;
- if (b == 0) return false;
+ if (a == NULL) return b == NULL;
+ if (b == NULL) return false;
return *a == *b;
}
// Phantom persistents work like weak persistents, except that the pointer to
// the object being collected is not available in the finalization callback.
// This enables the garbage collector to collect the object and any objects
- // it references transitively in one GC cycle.
+ // it references transitively in one GC cycle. At the moment you can either
+ // specify a parameter for the callback or the location of two internal
+ // fields in the dying object.
template <typename P>
V8_INLINE void SetPhantom(P* parameter,
- typename WeakCallbackData<T, P>::Callback callback);
+ typename PhantomCallbackData<P>::Callback callback);
- template <typename S, typename P>
- V8_INLINE void SetPhantom(P* parameter,
- typename WeakCallbackData<S, P>::Callback callback);
+ template <typename P, typename Q>
+ V8_INLINE void SetPhantom(
+ void (*callback)(const InternalFieldsCallbackData<P, Q>&),
+ int internal_field_index1, int internal_field_index2);
template<typename P>
V8_INLINE P* ClearWeak();
/** Gets the number of internal fields for this Object. */
int InternalFieldCount();
+ static const int kNoInternalFieldIndex = -1;
+
/** Same as above, but works for Persistents */
V8_INLINE static int InternalFieldCount(
const PersistentBase<Object>& object) {
static void DisposeGlobal(internal::Object** global_handle);
typedef WeakCallbackData<Value, void>::Callback WeakCallback;
static void MakeWeak(internal::Object** global_handle, void* data,
- WeakCallback weak_callback, WeakHandleType phantom);
+ WeakCallback weak_callback);
+ static void MakePhantom(internal::Object** global_handle, void* data,
+ PhantomCallbackData<void>::Callback weak_callback);
+ static void MakePhantom(
+ internal::Object** global_handle,
+ InternalFieldsCallbackData<void, void>::Callback weak_callback,
+ int internal_field_index1,
+ int internal_field_index2 = Object::kNoInternalFieldIndex);
static void* ClearWeak(internal::Object** global_handle);
static void Eternalize(Isolate* isolate,
Value* handle,
static const int kNodeClassIdOffset = 1 * kApiPointerSize;
static const int kNodeFlagsOffset = 1 * kApiPointerSize + 3;
- static const int kNodeStateMask = 0xf;
+ static const int kNodeStateMask = 0x7;
static const int kNodeStateIsWeakValue = 2;
static const int kNodeStateIsPendingValue = 3;
static const int kNodeStateIsNearDeathValue = 4;
- static const int kNodeIsIndependentShift = 4;
- static const int kNodeIsPartiallyDependentShift = 5;
+ static const int kNodeIsIndependentShift = 3;
+ static const int kNodeIsPartiallyDependentShift = 4;
static const int kJSObjectType = 0xbd;
static const int kFirstNonstringType = 0x80;
TYPE_CHECK(S, T);
typedef typename WeakCallbackData<Value, void>::Callback Callback;
V8::MakeWeak(reinterpret_cast<internal::Object**>(this->val_), parameter,
- reinterpret_cast<Callback>(callback), V8::NonphantomHandle);
+ reinterpret_cast<Callback>(callback));
}
template <class T>
-template <typename S, typename P>
+template <typename P>
void PersistentBase<T>::SetPhantom(
- P* parameter, typename WeakCallbackData<S, P>::Callback callback) {
- TYPE_CHECK(S, T);
- typedef typename WeakCallbackData<Value, void>::Callback Callback;
- V8::MakeWeak(reinterpret_cast<internal::Object**>(this->val_), parameter,
- reinterpret_cast<Callback>(callback), V8::PhantomHandle);
+ P* parameter, typename PhantomCallbackData<P>::Callback callback) {
+ typedef typename PhantomCallbackData<void>::Callback Callback;
+ V8::MakePhantom(reinterpret_cast<internal::Object**>(this->val_), parameter,
+ reinterpret_cast<Callback>(callback));
}
template <class T>
-template <typename P>
+template <typename U, typename V>
void PersistentBase<T>::SetPhantom(
- P* parameter, typename WeakCallbackData<T, P>::Callback callback) {
- SetPhantom<T, P>(parameter, callback);
+ void (*callback)(const InternalFieldsCallbackData<U, V>&),
+ int internal_field_index1, int internal_field_index2) {
+ typedef typename InternalFieldsCallbackData<void, void>::Callback Callback;
+ V8::MakePhantom(reinterpret_cast<internal::Object**>(this->val_),
+ reinterpret_cast<Callback>(callback), internal_field_index1,
+ internal_field_index2);
}
// FREE -> NORMAL <-> WEAK -> PENDING -> NEAR_DEATH -> { NORMAL, WEAK, FREE }
enum State {
FREE = 0,
- NORMAL, // Normal global handle.
- WEAK, // Flagged as weak but not yet finalized.
- PENDING, // Has been recognized as only reachable by weak handles.
- NEAR_DEATH // Callback has informed the handle is near death.
+ NORMAL, // Normal global handle.
+ WEAK, // Flagged as weak but not yet finalized.
+ PENDING, // Has been recognized as only reachable by weak handles.
+ NEAR_DEATH, // Callback has informed the handle is near death.
+ NUMBER_OF_NODE_STATES
};
// Maps handle location (slot) to the containing node.
IncreaseBlockUses();
}
+ void Zap() {
+ DCHECK(IsInUse());
+ // Zap the values for eager trapping.
+ object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
+ }
+
void Release() {
- DCHECK(state() != FREE);
+ DCHECK(IsInUse());
set_state(FREE);
// Zap the values for eager trapping.
object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
flags_ = IsInNewSpaceList::update(flags_, v);
}
- bool is_zapped_during_weak_callback() {
- return IsZappedDuringWeakCallback::decode(flags_);
+ WeaknessType weakness_type() const {
+ return NodeWeaknessType::decode(flags_);
}
- void set_is_zapped_during_weak_callback(bool v) {
- flags_ = IsZappedDuringWeakCallback::update(flags_, v);
+ void set_weakness_type(WeaknessType weakness_type) {
+ flags_ = NodeWeaknessType::update(flags_, weakness_type);
}
bool IsNearDeath() const {
bool IsWeak() const { return state() == WEAK; }
+ bool IsInUse() const { return state() != FREE; }
+
bool IsRetainer() const { return state() != FREE; }
bool IsStrongRetainer() const { return state() == NORMAL; }
// Independent flag accessors.
void MarkIndependent() {
- DCHECK(state() != FREE);
+ DCHECK(IsInUse());
set_independent(true);
}
void MarkPartiallyDependent() {
- DCHECK(state() != FREE);
+ DCHECK(IsInUse());
if (GetGlobalHandles()->isolate()->heap()->InNewSpace(object_)) {
set_partially_dependent(true);
}
// Callback parameter accessors.
void set_parameter(void* parameter) {
- DCHECK(state() != FREE);
+ DCHECK(IsInUse());
+ DCHECK(weakness_type() == NORMAL_WEAK || weakness_type() == PHANTOM_WEAK);
parameter_or_next_free_.parameter = parameter;
}
void* parameter() const {
- DCHECK(state() != FREE);
+ DCHECK(IsInUse());
return parameter_or_next_free_.parameter;
}
+ void set_internal_fields(int internal_field_index1,
+ int internal_field_index2) {
+ DCHECK(weakness_type() == INTERNAL_FIELDS_WEAK);
+ // These are stored in an int16_t.
+ DCHECK(internal_field_index1 < 1 << 16);
+ DCHECK(internal_field_index1 >= -(1 << 16));
+ DCHECK(internal_field_index2 < 1 << 16);
+ DCHECK(internal_field_index2 >= -(1 << 16));
+ parameter_or_next_free_.internal_field_indeces.internal_field1 =
+ static_cast<int16_t>(internal_field_index1);
+ parameter_or_next_free_.internal_field_indeces.internal_field2 =
+ static_cast<int16_t>(internal_field_index2);
+ }
+
+ int internal_field1() const {
+ DCHECK(weakness_type() == INTERNAL_FIELDS_WEAK);
+ return parameter_or_next_free_.internal_field_indeces.internal_field1;
+ }
+
+ int internal_field2() const {
+ DCHECK(weakness_type() == INTERNAL_FIELDS_WEAK);
+ return parameter_or_next_free_.internal_field_indeces.internal_field2;
+ }
+
// Accessors for next free node in the free list.
Node* next_free() {
DCHECK(state() == FREE);
parameter_or_next_free_.next_free = value;
}
- void MakeWeak(void* parameter, WeakCallback weak_callback,
- bool is_zapped_during_weak_callback = false) {
+ void MakeWeak(void* parameter, WeakCallback weak_callback) {
DCHECK(weak_callback != NULL);
- DCHECK(state() != FREE);
+ DCHECK(IsInUse());
CHECK(object_ != NULL);
set_state(WEAK);
+ set_weakness_type(NORMAL_WEAK);
set_parameter(parameter);
- set_is_zapped_during_weak_callback(is_zapped_during_weak_callback);
weak_callback_ = weak_callback;
}
+ void MakePhantom(void* parameter,
+ PhantomCallbackData<void>::Callback phantom_callback,
+ int16_t internal_field_index1,
+ int16_t internal_field_index2) {
+ DCHECK(phantom_callback != NULL);
+ DCHECK(IsInUse());
+ CHECK(object_ != NULL);
+ set_state(WEAK);
+ if (parameter == NULL) {
+ set_weakness_type(INTERNAL_FIELDS_WEAK);
+ set_internal_fields(internal_field_index1, internal_field_index2);
+ } else {
+ DCHECK(internal_field_index1 == v8::Object::kNoInternalFieldIndex);
+ DCHECK(internal_field_index2 == v8::Object::kNoInternalFieldIndex);
+ set_weakness_type(PHANTOM_WEAK);
+ set_parameter(parameter);
+ }
+ weak_callback_ = reinterpret_cast<WeakCallback>(phantom_callback);
+ }
+
void* ClearWeakness() {
- DCHECK(state() != FREE);
+ DCHECK(IsInUse());
void* p = parameter();
set_state(NORMAL);
set_parameter(NULL);
return p;
}
+ void CollectPhantomCallbackData(
+ Isolate* isolate, List<PendingPhantomCallback>* pending_phantom_callbacks,
+ List<PendingInternalFieldsCallback>* pending_internal_fields_callbacks) {
+ if (state() != Node::PENDING) return;
+ bool do_release = true;
+ if (weak_callback_ != NULL) {
+ if (weakness_type() == NORMAL_WEAK) return;
+
+ v8::Isolate* api_isolate = reinterpret_cast<v8::Isolate*>(isolate);
+
+ if (weakness_type() == PHANTOM_WEAK) {
+ // Phantom weak pointer case. Zap with harmless value.
+ DCHECK(*location() == Smi::FromInt(0));
+ typedef PhantomCallbackData<void> Data;
+
+ Data data(api_isolate, parameter());
+ Data::Callback callback =
+ reinterpret_cast<Data::Callback>(weak_callback_);
+
+ pending_phantom_callbacks->Add(
+ PendingPhantomCallback(this, data, callback));
+
+ // Postpone the release of the handle. The embedder can't use the
+ // handle (it's zapped), but it may be using the location, and we
+ // don't want to confuse things by reusing that.
+ do_release = false;
+ } else {
+ DCHECK(weakness_type() == INTERNAL_FIELDS_WEAK);
+ typedef InternalFieldsCallbackData<void, void> Data;
+
+ // Phantom weak pointer case, passing internal fields instead of
+ // parameter. Don't use a handle here during GC, because it will
+ // create a handle pointing to a dying object, which can confuse
+ // the next GC.
+ JSObject* jsobject = reinterpret_cast<JSObject*>(object());
+ DCHECK(jsobject->IsJSObject());
+ Data data(api_isolate, jsobject->GetInternalField(internal_field1()),
+ jsobject->GetInternalField(internal_field2()));
+ Data::Callback callback =
+ reinterpret_cast<Data::Callback>(weak_callback_);
+
+ // In the future, we want to delay the callback. In that case we will
+ // zap when we queue up, to stop the C++ side accessing the dead V8
+ // object, but we will call Release only after the callback (allowing
+ // the node to be reused).
+ pending_internal_fields_callbacks->Add(
+ PendingInternalFieldsCallback(data, callback));
+ }
+ }
+ // TODO(erikcorry): At the moment the callbacks are not postponed much,
+ // but if we really postpone them until after the mutator has run, we
+ // need to divide things up, so that an early callback clears the handle,
+ // while a later one destroys the objects involved, possibley triggering
+ // some work when decremented ref counts hit zero.
+ if (do_release) Release();
+ }
+
bool PostGarbageCollectionProcessing(Isolate* isolate) {
if (state() != Node::PENDING) return false;
if (weak_callback_ == NULL) {
Release();
return false;
}
- void* param = parameter();
set_state(NEAR_DEATH);
- set_parameter(NULL);
+ // Check that we are not passing a finalized external string to
+ // the callback.
+ DCHECK(!object_->IsExternalOneByteString() ||
+ ExternalOneByteString::cast(object_)->resource() != NULL);
+ DCHECK(!object_->IsExternalTwoByteString() ||
+ ExternalTwoByteString::cast(object_)->resource() != NULL);
+ // Leaving V8.
+ VMState<EXTERNAL> vmstate(isolate);
+ HandleScope handle_scope(isolate);
+ if (weakness_type() == PHANTOM_WEAK) return false;
+ DCHECK(weakness_type() == NORMAL_WEAK);
Object** object = location();
- {
- // Check that we are not passing a finalized external string to
- // the callback.
- DCHECK(!object_->IsExternalOneByteString() ||
- ExternalOneByteString::cast(object_)->resource() != NULL);
- DCHECK(!object_->IsExternalTwoByteString() ||
- ExternalTwoByteString::cast(object_)->resource() != NULL);
- // Leaving V8.
- VMState<EXTERNAL> vmstate(isolate);
- HandleScope handle_scope(isolate);
- if (is_zapped_during_weak_callback()) {
- // Phantom weak pointer case.
- DCHECK(*object == Smi::FromInt(kPhantomReferenceZap));
- // Make data with a null handle.
- v8::WeakCallbackData<v8::Value, void> data(
- reinterpret_cast<v8::Isolate*>(isolate), v8::Local<v8::Object>(),
- param);
- weak_callback_(data);
- if (state() != FREE) {
- // Callback does not have to clear the global handle if it is a
- // phantom handle.
- Release();
- }
- } else {
- Handle<Object> handle(*object, isolate);
- v8::WeakCallbackData<v8::Value, void> data(
- reinterpret_cast<v8::Isolate*>(isolate), v8::Utils::ToLocal(handle),
- param);
- weak_callback_(data);
- }
- }
+ Handle<Object> handle(*object, isolate);
+ v8::WeakCallbackData<v8::Value, void> data(
+ reinterpret_cast<v8::Isolate*>(isolate), parameter(),
+ v8::Utils::ToLocal(handle));
+ set_parameter(NULL);
+ weak_callback_(data);
+
// Absence of explicit cleanup or revival of weak handle
// in most of the cases would lead to memory leak.
CHECK(state() != NEAR_DEATH);
// This stores three flags (independent, partially_dependent and
// in_new_space_list) and a State.
- class NodeState : public BitField<State, 0, 4> {};
- class IsIndependent : public BitField<bool, 4, 1> {};
- class IsPartiallyDependent : public BitField<bool, 5, 1> {};
- class IsInNewSpaceList : public BitField<bool, 6, 1> {};
- class IsZappedDuringWeakCallback : public BitField<bool, 7, 1> {};
+ class NodeState : public BitField<State, 0, 3> {};
+ class IsIndependent : public BitField<bool, 3, 1> {};
+ class IsPartiallyDependent : public BitField<bool, 4, 1> {};
+ class IsInNewSpaceList : public BitField<bool, 5, 1> {};
+ class NodeWeaknessType : public BitField<WeaknessType, 6, 2> {};
uint8_t flags_;
// the free list link.
union {
void* parameter;
+ struct {
+ int16_t internal_field1;
+ int16_t internal_field2;
+ } internal_field_indeces;
Node* next_free;
} parameter_or_next_free_;
void GlobalHandles::MakeWeak(Object** location, void* parameter,
- WeakCallback weak_callback, PhantomState phantom) {
+ WeakCallback weak_callback) {
+ Node::FromLocation(location)->MakeWeak(parameter, weak_callback);
+}
+
+
+typedef PhantomCallbackData<void>::Callback GenericCallback;
+
+
+void GlobalHandles::MakePhantom(
+ Object** location,
+ v8::InternalFieldsCallbackData<void, void>::Callback phantom_callback,
+ int16_t internal_field_index1, int16_t internal_field_index2) {
Node::FromLocation(location)
- ->MakeWeak(parameter, weak_callback, phantom == Phantom);
+ ->MakePhantom(NULL, reinterpret_cast<GenericCallback>(phantom_callback),
+ internal_field_index1, internal_field_index2);
+}
+
+
+void GlobalHandles::MakePhantom(Object** location, void* parameter,
+ GenericCallback phantom_callback) {
+ Node::FromLocation(location)->MakePhantom(parameter, phantom_callback,
+ v8::Object::kNoInternalFieldIndex,
+ v8::Object::kNoInternalFieldIndex);
+}
+
+
+void GlobalHandles::CollectPhantomCallbackData() {
+ for (NodeIterator it(this); !it.done(); it.Advance()) {
+ Node* node = it.node();
+ node->CollectPhantomCallbackData(isolate(), &pending_phantom_callbacks_,
+ &pending_internal_fields_callbacks_);
+ }
}
for (NodeIterator it(this); !it.done(); it.Advance()) {
Node* node = it.node();
if (node->IsWeakRetainer()) {
- if (node->state() == Node::PENDING &&
- node->is_zapped_during_weak_callback()) {
- *(node->location()) = Smi::FromInt(kPhantomReferenceZap);
+ // Weakness type can be normal, phantom or internal fields.
+ // For normal weakness we mark through the handle so that
+ // the object and things reachable from it are available
+ // to the callback.
+ // In the case of phantom we can zap the object handle now
+ // and we won't need it, so we don't need to mark through it.
+ // In the internal fields case we will need the internal
+ // fields, so we can't zap the handle, but we don't need to
+ // mark through it, because it will die in this GC round.
+ if (node->state() == Node::PENDING) {
+ if (node->weakness_type() == PHANTOM_WEAK) {
+ *(node->location()) = Smi::FromInt(0);
+ } else if (node->weakness_type() == NORMAL_WEAK) {
+ v->VisitPointer(node->location());
+ } else {
+ DCHECK(node->weakness_type() == INTERNAL_FIELDS_WEAK);
+ }
} else {
+ // Node is not pending, so that means the object survived. We still
+ // need to visit the pointer in case the object moved, eg. because of
+ // compaction.
v->VisitPointer(node->location());
}
}
DCHECK(node->is_in_new_space_list());
if ((node->is_independent() || node->is_partially_dependent()) &&
node->IsWeakRetainer()) {
- if (node->is_zapped_during_weak_callback()) {
- *(node->location()) = Smi::FromInt(kPhantomReferenceZap);
- } else {
+ if (node->weakness_type() == PHANTOM_WEAK) {
+ *(node->location()) = Smi::FromInt(0);
+ } else if (node->weakness_type() == NORMAL_WEAK) {
v->VisitPointer(node->location());
+ } else {
+ DCHECK(node->weakness_type() == INTERNAL_FIELDS_WEAK);
+ // For this case we only need to trace if it's alive: The tracing of
+ // something that is already alive is just to get the pointer updated
+ // to the new location of the object).
+ DCHECK(node->state() != Node::NEAR_DEATH);
+ if (node->state() != Node::PENDING) {
+ v->VisitPointer(node->location());
+ }
}
}
}
}
-int GlobalHandles::PostGarbageCollectionProcessing(
- GarbageCollector collector) {
- // Process weak global handle callbacks. This must be done after the
- // GC is completely done, because the callbacks may invoke arbitrary
- // API functions.
- DCHECK(isolate_->heap()->gc_state() == Heap::NOT_IN_GC);
- const int initial_post_gc_processing_count = ++post_gc_processing_count_;
+int GlobalHandles::PostScavengeProcessing(
+ const int initial_post_gc_processing_count) {
int freed_nodes = 0;
- if (collector == SCAVENGER) {
- for (int i = 0; i < new_space_nodes_.length(); ++i) {
- Node* node = new_space_nodes_[i];
- DCHECK(node->is_in_new_space_list());
- if (!node->IsRetainer()) {
- // Free nodes do not have weak callbacks. Do not use them to compute
- // the freed_nodes.
- continue;
- }
- // Skip dependent handles. Their weak callbacks might expect to be
- // called between two global garbage collection callbacks which
- // are not called for minor collections.
- if (!node->is_independent() && !node->is_partially_dependent()) {
- continue;
- }
- node->clear_partially_dependent();
- if (node->PostGarbageCollectionProcessing(isolate_)) {
- if (initial_post_gc_processing_count != post_gc_processing_count_) {
- // Weak callback triggered another GC and another round of
- // PostGarbageCollection processing. The current node might
- // have been deleted in that round, so we need to bail out (or
- // restart the processing).
- return freed_nodes;
- }
- }
- if (!node->IsRetainer()) {
- freed_nodes++;
- }
+ for (int i = 0; i < new_space_nodes_.length(); ++i) {
+ Node* node = new_space_nodes_[i];
+ DCHECK(node->is_in_new_space_list());
+ if (!node->IsRetainer()) {
+ // Free nodes do not have weak callbacks. Do not use them to compute
+ // the freed_nodes.
+ continue;
}
- } else {
- for (NodeIterator it(this); !it.done(); it.Advance()) {
- if (!it.node()->IsRetainer()) {
- // Free nodes do not have weak callbacks. Do not use them to compute
- // the freed_nodes.
- continue;
- }
- it.node()->clear_partially_dependent();
- if (it.node()->PostGarbageCollectionProcessing(isolate_)) {
- if (initial_post_gc_processing_count != post_gc_processing_count_) {
- // See the comment above.
- return freed_nodes;
- }
+ // Skip dependent handles. Their weak callbacks might expect to be
+ // called between two global garbage collection callbacks which
+ // are not called for minor collections.
+ if (!node->is_independent() && !node->is_partially_dependent()) {
+ continue;
+ }
+ node->clear_partially_dependent();
+ if (node->PostGarbageCollectionProcessing(isolate_)) {
+ if (initial_post_gc_processing_count != post_gc_processing_count_) {
+ // Weak callback triggered another GC and another round of
+ // PostGarbageCollection processing. The current node might
+ // have been deleted in that round, so we need to bail out (or
+ // restart the processing).
+ return freed_nodes;
}
- if (!it.node()->IsRetainer()) {
- freed_nodes++;
+ }
+ if (!node->IsRetainer()) {
+ freed_nodes++;
+ }
+ }
+ return freed_nodes;
+}
+
+
+int GlobalHandles::PostMarkSweepProcessing(
+ const int initial_post_gc_processing_count) {
+ int freed_nodes = 0;
+ for (NodeIterator it(this); !it.done(); it.Advance()) {
+ if (!it.node()->IsRetainer()) {
+ // Free nodes do not have weak callbacks. Do not use them to compute
+ // the freed_nodes.
+ continue;
+ }
+ it.node()->clear_partially_dependent();
+ if (it.node()->PostGarbageCollectionProcessing(isolate_)) {
+ if (initial_post_gc_processing_count != post_gc_processing_count_) {
+ // See the comment above.
+ return freed_nodes;
}
}
+ if (!it.node()->IsRetainer()) {
+ freed_nodes++;
+ }
}
- // Update the list of new space nodes.
+ return freed_nodes;
+}
+
+
+void GlobalHandles::UpdateListOfNewSpaceNodes() {
int last = 0;
for (int i = 0; i < new_space_nodes_.length(); ++i) {
Node* node = new_space_nodes_[i];
}
}
new_space_nodes_.Rewind(last);
+}
+
+
+int GlobalHandles::DispatchPendingPhantomCallbacks() {
+ int freed_nodes = 0;
+ while (pending_phantom_callbacks_.length() != 0) {
+ PendingPhantomCallback callback = pending_phantom_callbacks_.RemoveLast();
+ callback.invoke();
+ freed_nodes++;
+ }
+ while (pending_internal_fields_callbacks_.length() != 0) {
+ PendingInternalFieldsCallback callback =
+ pending_internal_fields_callbacks_.RemoveLast();
+ callback.invoke();
+ freed_nodes++;
+ }
+ return freed_nodes;
+}
+
+
+int GlobalHandles::PostGarbageCollectionProcessing(GarbageCollector collector) {
+ // Process weak global handle callbacks. This must be done after the
+ // GC is completely done, because the callbacks may invoke arbitrary
+ // API functions.
+ DCHECK(isolate_->heap()->gc_state() == Heap::NOT_IN_GC);
+ const int initial_post_gc_processing_count = ++post_gc_processing_count_;
+ int freed_nodes = 0;
+ if (collector == SCAVENGER) {
+ freed_nodes = PostScavengeProcessing(initial_post_gc_processing_count);
+ } else {
+ freed_nodes = PostMarkSweepProcessing(initial_post_gc_processing_count);
+ }
+ if (initial_post_gc_processing_count != post_gc_processing_count_) {
+ // If the callbacks caused a nested GC, then return. See comment in
+ // PostScavengeProcessing.
+ return freed_nodes;
+ }
+ freed_nodes += DispatchPendingPhantomCallbacks();
+ if (initial_post_gc_processing_count == post_gc_processing_count_) {
+ UpdateListOfNewSpaceNodes();
+ }
return freed_nodes;
}
+void GlobalHandles::PendingPhantomCallback::invoke() {
+ if (node_->state() == Node::FREE) return;
+ DCHECK(node_->state() == Node::NEAR_DEATH);
+ callback_(data_);
+ if (node_->state() != Node::FREE) node_->Release();
+}
+
+
void GlobalHandles::IterateStrongRoots(ObjectVisitor* v) {
for (NodeIterator it(this); !it.done(); it.Advance()) {
if (it.node()->IsStrongRetainer()) {