1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
8 #include "src/global-handles.h"
10 #include "src/vm-state-inl.h"
16 ObjectGroup::~ObjectGroup() {
17 if (info != NULL) info->Dispose();
22 ImplicitRefGroup::~ImplicitRefGroup() {
27 class GlobalHandles::Node {
29 // State transition diagram:
30 // FREE -> NORMAL <-> WEAK -> PENDING -> NEAR_DEATH -> { NORMAL, WEAK, FREE }
33 NORMAL, // Normal global handle.
34 WEAK, // Flagged as weak but not yet finalized.
35 PENDING, // Has been recognized as only reachable by weak handles.
36 NEAR_DEATH, // Callback has informed the handle is near death.
40 // Maps handle location (slot) to the containing node.
41 static Node* FromLocation(Object** location) {
42 DCHECK(OFFSET_OF(Node, object_) == 0);
43 return reinterpret_cast<Node*>(location);
47 DCHECK(OFFSET_OF(Node, class_id_) == Internals::kNodeClassIdOffset);
48 DCHECK(OFFSET_OF(Node, flags_) == Internals::kNodeFlagsOffset);
49 STATIC_ASSERT(static_cast<int>(NodeState::kMask) ==
50 Internals::kNodeStateMask);
51 STATIC_ASSERT(WEAK == Internals::kNodeStateIsWeakValue);
52 STATIC_ASSERT(PENDING == Internals::kNodeStateIsPendingValue);
53 STATIC_ASSERT(NEAR_DEATH == Internals::kNodeStateIsNearDeathValue);
54 STATIC_ASSERT(static_cast<int>(IsIndependent::kShift) ==
55 Internals::kNodeIsIndependentShift);
56 STATIC_ASSERT(static_cast<int>(IsPartiallyDependent::kShift) ==
57 Internals::kNodeIsPartiallyDependentShift);
60 #ifdef ENABLE_HANDLE_ZAPPING
62 // TODO(1428): if it's a weak handle we should have invoked its callback.
63 // Zap the values for eager trapping.
64 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
65 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
67 set_independent(false);
68 set_partially_dependent(false);
69 set_in_new_space_list(false);
70 parameter_or_next_free_.next_free = NULL;
71 weak_callback_ = NULL;
75 void Initialize(int index, Node** first_free) {
76 index_ = static_cast<uint8_t>(index);
77 DCHECK(static_cast<int>(index_) == index);
79 set_in_new_space_list(false);
80 parameter_or_next_free_.next_free = *first_free;
84 void Acquire(Object* object) {
85 DCHECK(state() == FREE);
87 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
88 set_independent(false);
89 set_partially_dependent(false);
91 parameter_or_next_free_.parameter = NULL;
92 weak_callback_ = NULL;
98 // Zap the values for eager trapping.
99 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
105 // Zap the values for eager trapping.
106 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
107 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
108 set_independent(false);
109 set_partially_dependent(false);
110 weak_callback_ = NULL;
114 // Object slot accessors.
115 Object* object() const { return object_; }
116 Object** location() { return &object_; }
117 Handle<Object> handle() { return Handle<Object>(location()); }
119 // Wrapper class ID accessors.
120 bool has_wrapper_class_id() const {
121 return class_id_ != v8::HeapProfiler::kPersistentHandleNoClassId;
124 uint16_t wrapper_class_id() const { return class_id_; }
126 // State and flag accessors.
128 State state() const {
129 return NodeState::decode(flags_);
131 void set_state(State state) {
132 flags_ = NodeState::update(flags_, state);
135 bool is_independent() {
136 return IsIndependent::decode(flags_);
138 void set_independent(bool v) {
139 flags_ = IsIndependent::update(flags_, v);
142 bool is_partially_dependent() {
143 return IsPartiallyDependent::decode(flags_);
145 void set_partially_dependent(bool v) {
146 flags_ = IsPartiallyDependent::update(flags_, v);
149 bool is_in_new_space_list() {
150 return IsInNewSpaceList::decode(flags_);
152 void set_in_new_space_list(bool v) {
153 flags_ = IsInNewSpaceList::update(flags_, v);
156 WeaknessType weakness_type() const {
157 return NodeWeaknessType::decode(flags_);
159 void set_weakness_type(WeaknessType weakness_type) {
160 flags_ = NodeWeaknessType::update(flags_, weakness_type);
163 bool IsNearDeath() const {
164 // Check for PENDING to ensure correct answer when processing callbacks.
165 return state() == PENDING || state() == NEAR_DEATH;
168 bool IsWeak() const { return state() == WEAK; }
170 bool IsInUse() const { return state() != FREE; }
172 bool IsRetainer() const { return state() != FREE; }
174 bool IsStrongRetainer() const { return state() == NORMAL; }
176 bool IsWeakRetainer() const {
177 return state() == WEAK || state() == PENDING || state() == NEAR_DEATH;
181 DCHECK(state() == WEAK);
185 // Independent flag accessors.
186 void MarkIndependent() {
188 set_independent(true);
191 void MarkPartiallyDependent() {
193 if (GetGlobalHandles()->isolate()->heap()->InNewSpace(object_)) {
194 set_partially_dependent(true);
197 void clear_partially_dependent() { set_partially_dependent(false); }
199 // Callback accessor.
200 // TODO(svenpanne) Re-enable or nuke later.
201 // WeakReferenceCallback callback() { return callback_; }
203 // Callback parameter accessors.
204 void set_parameter(void* parameter) {
206 parameter_or_next_free_.parameter = parameter;
208 void* parameter() const {
210 return parameter_or_next_free_.parameter;
213 // Accessors for next free node in the free list.
215 DCHECK(state() == FREE);
216 return parameter_or_next_free_.next_free;
218 void set_next_free(Node* value) {
219 DCHECK(state() == FREE);
220 parameter_or_next_free_.next_free = value;
223 void MakeWeak(void* parameter, WeakCallback weak_callback) {
224 DCHECK(weak_callback != NULL);
226 CHECK(object_ != NULL);
228 set_weakness_type(NORMAL_WEAK);
229 set_parameter(parameter);
230 weak_callback_ = weak_callback;
233 void MakeWeak(void* parameter,
234 WeakCallbackInfo<void>::Callback phantom_callback,
235 v8::WeakCallbackType type) {
236 DCHECK(phantom_callback != nullptr);
238 CHECK(object_ != nullptr);
241 case v8::WeakCallbackType::kParameter:
242 set_weakness_type(PHANTOM_WEAK);
244 case v8::WeakCallbackType::kInternalFields:
245 set_weakness_type(PHANTOM_WEAK_2_INTERNAL_FIELDS);
248 set_parameter(parameter);
249 weak_callback_ = reinterpret_cast<WeakCallback>(phantom_callback);
252 void* ClearWeakness() {
254 void* p = parameter();
260 void CollectPhantomCallbackData(
262 List<PendingPhantomCallback>* pending_phantom_callbacks) {
263 if (state() != PENDING) return;
264 if (weak_callback_ != NULL) {
265 if (weakness_type() == NORMAL_WEAK) return;
267 DCHECK(weakness_type() == PHANTOM_WEAK ||
268 weakness_type() == PHANTOM_WEAK_2_INTERNAL_FIELDS);
270 void* internal_fields[v8::kInternalFieldsInWeakCallback] = {nullptr,
272 if (weakness_type() != PHANTOM_WEAK && object()->IsJSObject()) {
273 auto jsobject = JSObject::cast(object());
274 int field_count = jsobject->GetInternalFieldCount();
275 for (int i = 0; i < v8::kInternalFieldsInWeakCallback; ++i) {
276 if (field_count == i) break;
277 auto field = jsobject->GetInternalField(i);
278 if (field->IsSmi()) internal_fields[i] = field;
282 // Zap with something dangerous.
283 *location() = reinterpret_cast<Object*>(0x6057ca11);
285 typedef v8::WeakCallbackInfo<void> Data;
286 auto callback = reinterpret_cast<Data::Callback>(weak_callback_);
287 pending_phantom_callbacks->Add(
288 PendingPhantomCallback(this, callback, parameter(), internal_fields));
290 set_state(NEAR_DEATH);
294 bool PostGarbageCollectionProcessing(Isolate* isolate) {
295 // Handles only weak handles (not phantom) that are dying.
296 if (state() != Node::PENDING) return false;
297 if (weak_callback_ == NULL) {
301 set_state(NEAR_DEATH);
303 // Check that we are not passing a finalized external string to
305 DCHECK(!object_->IsExternalOneByteString() ||
306 ExternalOneByteString::cast(object_)->resource() != NULL);
307 DCHECK(!object_->IsExternalTwoByteString() ||
308 ExternalTwoByteString::cast(object_)->resource() != NULL);
309 if (weakness_type() != NORMAL_WEAK) return false;
312 VMState<EXTERNAL> vmstate(isolate);
313 HandleScope handle_scope(isolate);
314 Object** object = location();
315 Handle<Object> handle(*object, isolate);
316 v8::WeakCallbackData<v8::Value, void> data(
317 reinterpret_cast<v8::Isolate*>(isolate), parameter(),
318 v8::Utils::ToLocal(handle));
320 weak_callback_(data);
322 // Absence of explicit cleanup or revival of weak handle
323 // in most of the cases would lead to memory leak.
324 CHECK(state() != NEAR_DEATH);
328 inline GlobalHandles* GetGlobalHandles();
331 inline NodeBlock* FindBlock();
332 inline void IncreaseBlockUses();
333 inline void DecreaseBlockUses();
335 // Storage for object pointer.
336 // Placed first to avoid offset computation.
339 // Next word stores class_id, index, state, and independent.
340 // Note: the most aligned fields should go first.
345 // Index in the containing handle block.
348 // This stores three flags (independent, partially_dependent and
349 // in_new_space_list) and a State.
350 class NodeState : public BitField<State, 0, 3> {};
351 class IsIndependent : public BitField<bool, 3, 1> {};
352 class IsPartiallyDependent : public BitField<bool, 4, 1> {};
353 class IsInNewSpaceList : public BitField<bool, 5, 1> {};
354 class NodeWeaknessType : public BitField<WeaknessType, 6, 2> {};
358 // Handle specific callback - might be a weak reference in disguise.
359 WeakCallback weak_callback_;
361 // Provided data for callback. In FREE state, this is used for
362 // the free list link.
366 } parameter_or_next_free_;
368 DISALLOW_COPY_AND_ASSIGN(Node);
372 class GlobalHandles::NodeBlock {
374 static const int kSize = 256;
376 explicit NodeBlock(GlobalHandles* global_handles, NodeBlock* next)
381 global_handles_(global_handles) {}
383 void PutNodesOnFreeList(Node** first_free) {
384 for (int i = kSize - 1; i >= 0; --i) {
385 nodes_[i].Initialize(i, first_free);
389 Node* node_at(int index) {
390 DCHECK(0 <= index && index < kSize);
391 return &nodes_[index];
394 void IncreaseUses() {
395 DCHECK(used_nodes_ < kSize);
396 if (used_nodes_++ == 0) {
397 NodeBlock* old_first = global_handles_->first_used_block_;
398 global_handles_->first_used_block_ = this;
399 next_used_ = old_first;
401 if (old_first == NULL) return;
402 old_first->prev_used_ = this;
406 void DecreaseUses() {
407 DCHECK(used_nodes_ > 0);
408 if (--used_nodes_ == 0) {
409 if (next_used_ != NULL) next_used_->prev_used_ = prev_used_;
410 if (prev_used_ != NULL) prev_used_->next_used_ = next_used_;
411 if (this == global_handles_->first_used_block_) {
412 global_handles_->first_used_block_ = next_used_;
417 GlobalHandles* global_handles() { return global_handles_; }
419 // Next block in the list of all blocks.
420 NodeBlock* next() const { return next_; }
422 // Next/previous block in the list of blocks with used nodes.
423 NodeBlock* next_used() const { return next_used_; }
424 NodeBlock* prev_used() const { return prev_used_; }
428 NodeBlock* const next_;
430 NodeBlock* next_used_;
431 NodeBlock* prev_used_;
432 GlobalHandles* global_handles_;
436 GlobalHandles* GlobalHandles::Node::GetGlobalHandles() {
437 return FindBlock()->global_handles();
441 GlobalHandles::NodeBlock* GlobalHandles::Node::FindBlock() {
442 intptr_t ptr = reinterpret_cast<intptr_t>(this);
443 ptr = ptr - index_ * sizeof(Node);
444 NodeBlock* block = reinterpret_cast<NodeBlock*>(ptr);
445 DCHECK(block->node_at(index_) == this);
450 void GlobalHandles::Node::IncreaseBlockUses() {
451 NodeBlock* node_block = FindBlock();
452 node_block->IncreaseUses();
453 GlobalHandles* global_handles = node_block->global_handles();
454 global_handles->isolate()->counters()->global_handles()->Increment();
455 global_handles->number_of_global_handles_++;
459 void GlobalHandles::Node::DecreaseBlockUses() {
460 NodeBlock* node_block = FindBlock();
461 GlobalHandles* global_handles = node_block->global_handles();
462 parameter_or_next_free_.next_free = global_handles->first_free_;
463 global_handles->first_free_ = this;
464 node_block->DecreaseUses();
465 global_handles->isolate()->counters()->global_handles()->Decrement();
466 global_handles->number_of_global_handles_--;
470 class GlobalHandles::NodeIterator {
472 explicit NodeIterator(GlobalHandles* global_handles)
473 : block_(global_handles->first_used_block_),
476 bool done() const { return block_ == NULL; }
480 return block_->node_at(index_);
485 if (++index_ < NodeBlock::kSize) return;
487 block_ = block_->next_used();
494 DISALLOW_COPY_AND_ASSIGN(NodeIterator);
498 GlobalHandles::GlobalHandles(Isolate* isolate)
500 number_of_global_handles_(0),
502 first_used_block_(NULL),
504 post_gc_processing_count_(0),
505 object_group_connections_(kObjectGroupConnectionsCapacity) {}
508 GlobalHandles::~GlobalHandles() {
509 NodeBlock* block = first_block_;
510 while (block != NULL) {
511 NodeBlock* tmp = block->next();
519 Handle<Object> GlobalHandles::Create(Object* value) {
520 if (first_free_ == NULL) {
521 first_block_ = new NodeBlock(this, first_block_);
522 first_block_->PutNodesOnFreeList(&first_free_);
524 DCHECK(first_free_ != NULL);
525 // Take the first node in the free list.
526 Node* result = first_free_;
527 first_free_ = result->next_free();
528 result->Acquire(value);
529 if (isolate_->heap()->InNewSpace(value) &&
530 !result->is_in_new_space_list()) {
531 new_space_nodes_.Add(result);
532 result->set_in_new_space_list(true);
534 return result->handle();
538 Handle<Object> GlobalHandles::CopyGlobal(Object** location) {
539 DCHECK(location != NULL);
540 return Node::FromLocation(location)->GetGlobalHandles()->Create(*location);
544 void GlobalHandles::Destroy(Object** location) {
545 if (location != NULL) Node::FromLocation(location)->Release();
549 void GlobalHandles::MakeWeak(Object** location, void* parameter,
550 WeakCallback weak_callback) {
551 Node::FromLocation(location)->MakeWeak(parameter, weak_callback);
555 typedef v8::WeakCallbackInfo<void>::Callback GenericCallback;
558 void GlobalHandles::MakeWeak(Object** location, void* parameter,
559 GenericCallback phantom_callback,
560 v8::WeakCallbackType type) {
561 Node::FromLocation(location)->MakeWeak(parameter, phantom_callback, type);
565 void GlobalHandles::CollectAllPhantomCallbackData() {
566 for (NodeIterator it(this); !it.done(); it.Advance()) {
567 Node* node = it.node();
568 node->CollectPhantomCallbackData(isolate(), &pending_phantom_callbacks_);
573 void GlobalHandles::CollectYoungPhantomCallbackData() {
574 for (int i = 0; i < new_space_nodes_.length(); ++i) {
575 Node* node = new_space_nodes_[i];
576 DCHECK(node->is_in_new_space_list());
577 node->CollectPhantomCallbackData(isolate(), &pending_phantom_callbacks_);
582 void* GlobalHandles::ClearWeakness(Object** location) {
583 return Node::FromLocation(location)->ClearWeakness();
587 void GlobalHandles::MarkIndependent(Object** location) {
588 Node::FromLocation(location)->MarkIndependent();
592 void GlobalHandles::MarkPartiallyDependent(Object** location) {
593 Node::FromLocation(location)->MarkPartiallyDependent();
597 bool GlobalHandles::IsIndependent(Object** location) {
598 return Node::FromLocation(location)->is_independent();
602 bool GlobalHandles::IsNearDeath(Object** location) {
603 return Node::FromLocation(location)->IsNearDeath();
607 bool GlobalHandles::IsWeak(Object** location) {
608 return Node::FromLocation(location)->IsWeak();
612 void GlobalHandles::IterateWeakRoots(ObjectVisitor* v) {
613 for (NodeIterator it(this); !it.done(); it.Advance()) {
614 Node* node = it.node();
615 if (node->IsWeakRetainer()) {
616 // Weakness type can be normal or phantom, with or without internal
617 // fields). For normal weakness we mark through the handle so that the
618 // object and things reachable from it are available to the callback.
620 // In the case of phantom with no internal fields, we can zap the object
621 // handle now and we won't need it, so we don't need to mark through it.
622 // In the internal fields case we will need the internal
623 // fields, so we can't zap the handle.
624 if (node->state() == Node::PENDING) {
625 if (node->weakness_type() == PHANTOM_WEAK) {
626 *(node->location()) = Smi::FromInt(0);
627 } else if (node->weakness_type() == NORMAL_WEAK) {
628 v->VisitPointer(node->location());
630 DCHECK(node->weakness_type() == PHANTOM_WEAK_2_INTERNAL_FIELDS);
633 // Node is not pending, so that means the object survived. We still
634 // need to visit the pointer in case the object moved, eg. because of
636 v->VisitPointer(node->location());
643 void GlobalHandles::IdentifyWeakHandles(WeakSlotCallback f) {
644 for (NodeIterator it(this); !it.done(); it.Advance()) {
645 if (it.node()->IsWeak() && f(it.node()->location())) {
646 it.node()->MarkPending();
652 void GlobalHandles::IterateNewSpaceStrongAndDependentRoots(ObjectVisitor* v) {
653 for (int i = 0; i < new_space_nodes_.length(); ++i) {
654 Node* node = new_space_nodes_[i];
655 if (node->IsStrongRetainer() ||
656 (node->IsWeakRetainer() && !node->is_independent() &&
657 !node->is_partially_dependent())) {
658 v->VisitPointer(node->location());
664 void GlobalHandles::IdentifyNewSpaceWeakIndependentHandles(
665 WeakSlotCallbackWithHeap f) {
666 for (int i = 0; i < new_space_nodes_.length(); ++i) {
667 Node* node = new_space_nodes_[i];
668 DCHECK(node->is_in_new_space_list());
669 if ((node->is_independent() || node->is_partially_dependent()) &&
670 node->IsWeak() && f(isolate_->heap(), node->location())) {
677 void GlobalHandles::IterateNewSpaceWeakIndependentRoots(ObjectVisitor* v) {
678 for (int i = 0; i < new_space_nodes_.length(); ++i) {
679 Node* node = new_space_nodes_[i];
680 DCHECK(node->is_in_new_space_list());
681 if ((node->is_independent() || node->is_partially_dependent()) &&
682 node->IsWeakRetainer()) {
683 if (node->weakness_type() == PHANTOM_WEAK) {
684 *(node->location()) = Smi::FromInt(0);
685 } else if (node->weakness_type() == NORMAL_WEAK) {
686 v->VisitPointer(node->location());
688 DCHECK(node->weakness_type() == PHANTOM_WEAK_2_INTERNAL_FIELDS);
689 // For this case we only need to trace if it's alive: The tracing of
690 // something that is already alive is just to get the pointer updated
691 // to the new location of the object).
692 DCHECK(node->state() != Node::NEAR_DEATH);
693 if (node->state() != Node::PENDING) {
694 v->VisitPointer(node->location());
702 bool GlobalHandles::IterateObjectGroups(ObjectVisitor* v,
703 WeakSlotCallbackWithHeap can_skip) {
704 ComputeObjectGroupsAndImplicitReferences();
706 bool any_group_was_visited = false;
707 for (int i = 0; i < object_groups_.length(); i++) {
708 ObjectGroup* entry = object_groups_.at(i);
709 DCHECK(entry != NULL);
711 Object*** objects = entry->objects;
712 bool group_should_be_visited = false;
713 for (size_t j = 0; j < entry->length; j++) {
714 Object* object = *objects[j];
715 if (object->IsHeapObject()) {
716 if (!can_skip(isolate_->heap(), &object)) {
717 group_should_be_visited = true;
723 if (!group_should_be_visited) {
724 object_groups_[last++] = entry;
728 // An object in the group requires visiting, so iterate over all
729 // objects in the group.
730 for (size_t j = 0; j < entry->length; ++j) {
731 Object* object = *objects[j];
732 if (object->IsHeapObject()) {
733 v->VisitPointer(&object);
734 any_group_was_visited = true;
738 // Once the entire group has been iterated over, set the object
739 // group to NULL so it won't be processed again.
741 object_groups_.at(i) = NULL;
743 object_groups_.Rewind(last);
744 return any_group_was_visited;
748 int GlobalHandles::PostScavengeProcessing(
749 const int initial_post_gc_processing_count) {
751 for (int i = 0; i < new_space_nodes_.length(); ++i) {
752 Node* node = new_space_nodes_[i];
753 DCHECK(node->is_in_new_space_list());
754 if (!node->IsRetainer()) {
755 // Free nodes do not have weak callbacks. Do not use them to compute
759 // Skip dependent handles. Their weak callbacks might expect to be
760 // called between two global garbage collection callbacks which
761 // are not called for minor collections.
762 if (!node->is_independent() && !node->is_partially_dependent()) {
765 node->clear_partially_dependent();
766 if (node->PostGarbageCollectionProcessing(isolate_)) {
767 if (initial_post_gc_processing_count != post_gc_processing_count_) {
768 // Weak callback triggered another GC and another round of
769 // PostGarbageCollection processing. The current node might
770 // have been deleted in that round, so we need to bail out (or
771 // restart the processing).
775 if (!node->IsRetainer()) {
783 int GlobalHandles::PostMarkSweepProcessing(
784 const int initial_post_gc_processing_count) {
786 for (NodeIterator it(this); !it.done(); it.Advance()) {
787 if (!it.node()->IsRetainer()) {
788 // Free nodes do not have weak callbacks. Do not use them to compute
792 it.node()->clear_partially_dependent();
793 if (it.node()->PostGarbageCollectionProcessing(isolate_)) {
794 if (initial_post_gc_processing_count != post_gc_processing_count_) {
795 // See the comment above.
799 if (!it.node()->IsRetainer()) {
807 void GlobalHandles::UpdateListOfNewSpaceNodes() {
809 for (int i = 0; i < new_space_nodes_.length(); ++i) {
810 Node* node = new_space_nodes_[i];
811 DCHECK(node->is_in_new_space_list());
812 if (node->IsRetainer()) {
813 if (isolate_->heap()->InNewSpace(node->object())) {
814 new_space_nodes_[last++] = node;
815 isolate_->heap()->IncrementNodesCopiedInNewSpace();
817 node->set_in_new_space_list(false);
818 isolate_->heap()->IncrementNodesPromoted();
821 node->set_in_new_space_list(false);
822 isolate_->heap()->IncrementNodesDiedInNewSpace();
825 new_space_nodes_.Rewind(last);
829 int GlobalHandles::DispatchPendingPhantomCallbacks() {
832 // The initial pass callbacks must simply clear the nodes.
833 for (auto i = pending_phantom_callbacks_.begin();
834 i != pending_phantom_callbacks_.end(); ++i) {
836 // Skip callbacks that have already been processed once.
837 if (callback->node() == nullptr) continue;
838 callback->Invoke(isolate());
842 // The second pass empties the list.
843 while (pending_phantom_callbacks_.length() != 0) {
844 auto callback = pending_phantom_callbacks_.RemoveLast();
845 DCHECK(callback.node() == nullptr);
846 // No second pass callback required.
847 if (callback.callback() == nullptr) continue;
848 // Fire second pass callback.
849 callback.Invoke(isolate());
855 void GlobalHandles::PendingPhantomCallback::Invoke(Isolate* isolate) {
856 Data::Callback* callback_addr = nullptr;
857 if (node_ != nullptr) {
858 // Initialize for first pass callback.
859 DCHECK(node_->state() == Node::NEAR_DEATH);
860 callback_addr = &callback_;
862 Data data(reinterpret_cast<v8::Isolate*>(isolate), parameter_,
863 internal_fields_, callback_addr);
864 Data::Callback callback = callback_;
867 if (node_ != nullptr) {
868 // Transition to second pass state.
869 DCHECK(node_->state() == Node::FREE);
875 int GlobalHandles::PostGarbageCollectionProcessing(GarbageCollector collector) {
876 // Process weak global handle callbacks. This must be done after the
877 // GC is completely done, because the callbacks may invoke arbitrary
879 DCHECK(isolate_->heap()->gc_state() == Heap::NOT_IN_GC);
880 const int initial_post_gc_processing_count = ++post_gc_processing_count_;
882 freed_nodes += DispatchPendingPhantomCallbacks();
883 if (initial_post_gc_processing_count != post_gc_processing_count_) {
884 // If the callbacks caused a nested GC, then return. See comment in
885 // PostScavengeProcessing.
888 if (collector == SCAVENGER) {
889 freed_nodes = PostScavengeProcessing(initial_post_gc_processing_count);
891 freed_nodes = PostMarkSweepProcessing(initial_post_gc_processing_count);
893 if (initial_post_gc_processing_count != post_gc_processing_count_) {
894 // If the callbacks caused a nested GC, then return. See comment in
895 // PostScavengeProcessing.
898 if (initial_post_gc_processing_count == post_gc_processing_count_) {
899 UpdateListOfNewSpaceNodes();
905 void GlobalHandles::IterateStrongRoots(ObjectVisitor* v) {
906 for (NodeIterator it(this); !it.done(); it.Advance()) {
907 if (it.node()->IsStrongRetainer()) {
908 v->VisitPointer(it.node()->location());
914 void GlobalHandles::IterateAllRoots(ObjectVisitor* v) {
915 for (NodeIterator it(this); !it.done(); it.Advance()) {
916 if (it.node()->IsRetainer()) {
917 v->VisitPointer(it.node()->location());
923 void GlobalHandles::IterateAllRootsWithClassIds(ObjectVisitor* v) {
924 for (NodeIterator it(this); !it.done(); it.Advance()) {
925 if (it.node()->IsRetainer() && it.node()->has_wrapper_class_id()) {
926 v->VisitEmbedderReference(it.node()->location(),
927 it.node()->wrapper_class_id());
933 void GlobalHandles::IterateAllRootsInNewSpaceWithClassIds(ObjectVisitor* v) {
934 for (int i = 0; i < new_space_nodes_.length(); ++i) {
935 Node* node = new_space_nodes_[i];
936 if (node->IsRetainer() && node->has_wrapper_class_id()) {
937 v->VisitEmbedderReference(node->location(),
938 node->wrapper_class_id());
944 int GlobalHandles::NumberOfWeakHandles() {
946 for (NodeIterator it(this); !it.done(); it.Advance()) {
947 if (it.node()->IsWeakRetainer()) {
955 int GlobalHandles::NumberOfGlobalObjectWeakHandles() {
957 for (NodeIterator it(this); !it.done(); it.Advance()) {
958 if (it.node()->IsWeakRetainer() &&
959 it.node()->object()->IsJSGlobalObject()) {
967 void GlobalHandles::RecordStats(HeapStats* stats) {
968 *stats->global_handle_count = 0;
969 *stats->weak_global_handle_count = 0;
970 *stats->pending_global_handle_count = 0;
971 *stats->near_death_global_handle_count = 0;
972 *stats->free_global_handle_count = 0;
973 for (NodeIterator it(this); !it.done(); it.Advance()) {
974 *stats->global_handle_count += 1;
975 if (it.node()->state() == Node::WEAK) {
976 *stats->weak_global_handle_count += 1;
977 } else if (it.node()->state() == Node::PENDING) {
978 *stats->pending_global_handle_count += 1;
979 } else if (it.node()->state() == Node::NEAR_DEATH) {
980 *stats->near_death_global_handle_count += 1;
981 } else if (it.node()->state() == Node::FREE) {
982 *stats->free_global_handle_count += 1;
989 void GlobalHandles::PrintStats() {
996 for (NodeIterator it(this); !it.done(); it.Advance()) {
998 if (it.node()->state() == Node::WEAK) weak++;
999 if (it.node()->state() == Node::PENDING) pending++;
1000 if (it.node()->state() == Node::NEAR_DEATH) near_death++;
1001 if (it.node()->state() == Node::FREE) destroyed++;
1004 PrintF("Global Handle Statistics:\n");
1005 PrintF(" allocated memory = %" V8_PTR_PREFIX "dB\n", sizeof(Node) * total);
1006 PrintF(" # weak = %d\n", weak);
1007 PrintF(" # pending = %d\n", pending);
1008 PrintF(" # near_death = %d\n", near_death);
1009 PrintF(" # free = %d\n", destroyed);
1010 PrintF(" # total = %d\n", total);
1014 void GlobalHandles::Print() {
1015 PrintF("Global handles:\n");
1016 for (NodeIterator it(this); !it.done(); it.Advance()) {
1017 PrintF(" handle %p to %p%s\n",
1018 reinterpret_cast<void*>(it.node()->location()),
1019 reinterpret_cast<void*>(it.node()->object()),
1020 it.node()->IsWeak() ? " (weak)" : "");
1028 void GlobalHandles::AddObjectGroup(Object*** handles,
1030 v8::RetainedObjectInfo* info) {
1032 for (size_t i = 0; i < length; ++i) {
1033 DCHECK(!Node::FromLocation(handles[i])->is_independent());
1037 if (info != NULL) info->Dispose();
1040 ObjectGroup* group = new ObjectGroup(length);
1041 for (size_t i = 0; i < length; ++i)
1042 group->objects[i] = handles[i];
1044 object_groups_.Add(group);
1048 void GlobalHandles::SetObjectGroupId(Object** handle,
1050 object_group_connections_.Add(ObjectGroupConnection(id, handle));
1054 void GlobalHandles::SetRetainedObjectInfo(UniqueId id,
1055 RetainedObjectInfo* info) {
1056 retainer_infos_.Add(ObjectGroupRetainerInfo(id, info));
1060 void GlobalHandles::SetReferenceFromGroup(UniqueId id, Object** child) {
1061 DCHECK(!Node::FromLocation(child)->is_independent());
1062 implicit_ref_connections_.Add(ObjectGroupConnection(id, child));
1066 void GlobalHandles::SetReference(HeapObject** parent, Object** child) {
1067 DCHECK(!Node::FromLocation(child)->is_independent());
1068 ImplicitRefGroup* group = new ImplicitRefGroup(parent, 1);
1069 group->children[0] = child;
1070 implicit_ref_groups_.Add(group);
1074 void GlobalHandles::RemoveObjectGroups() {
1075 for (int i = 0; i < object_groups_.length(); i++)
1076 delete object_groups_.at(i);
1077 object_groups_.Clear();
1078 for (int i = 0; i < retainer_infos_.length(); ++i)
1079 retainer_infos_[i].info->Dispose();
1080 retainer_infos_.Clear();
1081 object_group_connections_.Clear();
1082 object_group_connections_.Initialize(kObjectGroupConnectionsCapacity);
1086 void GlobalHandles::RemoveImplicitRefGroups() {
1087 for (int i = 0; i < implicit_ref_groups_.length(); i++) {
1088 delete implicit_ref_groups_.at(i);
1090 implicit_ref_groups_.Clear();
1091 implicit_ref_connections_.Clear();
1095 void GlobalHandles::TearDown() {
1096 // TODO(1428): invoke weak callbacks.
1100 void GlobalHandles::ComputeObjectGroupsAndImplicitReferences() {
1101 if (object_group_connections_.length() == 0) {
1102 for (int i = 0; i < retainer_infos_.length(); ++i)
1103 retainer_infos_[i].info->Dispose();
1104 retainer_infos_.Clear();
1105 implicit_ref_connections_.Clear();
1109 object_group_connections_.Sort();
1110 retainer_infos_.Sort();
1111 implicit_ref_connections_.Sort();
1113 int info_index = 0; // For iterating retainer_infos_.
1114 UniqueId current_group_id(0);
1115 int current_group_start = 0;
1117 int current_implicit_refs_start = 0;
1118 int current_implicit_refs_end = 0;
1119 for (int i = 0; i <= object_group_connections_.length(); ++i) {
1121 current_group_id = object_group_connections_[i].id;
1122 if (i == object_group_connections_.length() ||
1123 current_group_id != object_group_connections_[i].id) {
1124 // Group detected: objects in indices [current_group_start, i[.
1126 // Find out which implicit references are related to this group. (We want
1127 // to ignore object groups which only have 1 object, but that object is
1128 // needed as a representative object for the implicit refrerence group.)
1129 while (current_implicit_refs_start < implicit_ref_connections_.length() &&
1130 implicit_ref_connections_[current_implicit_refs_start].id <
1132 ++current_implicit_refs_start;
1133 current_implicit_refs_end = current_implicit_refs_start;
1134 while (current_implicit_refs_end < implicit_ref_connections_.length() &&
1135 implicit_ref_connections_[current_implicit_refs_end].id ==
1137 ++current_implicit_refs_end;
1139 if (current_implicit_refs_end > current_implicit_refs_start) {
1140 // Find a representative object for the implicit references.
1141 HeapObject** representative = NULL;
1142 for (int j = current_group_start; j < i; ++j) {
1143 Object** object = object_group_connections_[j].object;
1144 if ((*object)->IsHeapObject()) {
1145 representative = reinterpret_cast<HeapObject**>(object);
1149 if (representative) {
1150 ImplicitRefGroup* group = new ImplicitRefGroup(
1152 current_implicit_refs_end - current_implicit_refs_start);
1153 for (int j = current_implicit_refs_start;
1154 j < current_implicit_refs_end;
1156 group->children[j - current_implicit_refs_start] =
1157 implicit_ref_connections_[j].object;
1159 implicit_ref_groups_.Add(group);
1161 current_implicit_refs_start = current_implicit_refs_end;
1164 // Find a RetainedObjectInfo for the group.
1165 RetainedObjectInfo* info = NULL;
1166 while (info_index < retainer_infos_.length() &&
1167 retainer_infos_[info_index].id < current_group_id) {
1168 retainer_infos_[info_index].info->Dispose();
1171 if (info_index < retainer_infos_.length() &&
1172 retainer_infos_[info_index].id == current_group_id) {
1173 // This object group has an associated ObjectGroupRetainerInfo.
1174 info = retainer_infos_[info_index].info;
1178 // Ignore groups which only contain one object.
1179 if (i > current_group_start + 1) {
1180 ObjectGroup* group = new ObjectGroup(i - current_group_start);
1181 for (int j = current_group_start; j < i; ++j) {
1182 group->objects[j - current_group_start] =
1183 object_group_connections_[j].object;
1186 object_groups_.Add(group);
1191 if (i < object_group_connections_.length()) {
1192 current_group_id = object_group_connections_[i].id;
1193 current_group_start = i;
1197 object_group_connections_.Clear();
1198 object_group_connections_.Initialize(kObjectGroupConnectionsCapacity);
1199 retainer_infos_.Clear();
1200 implicit_ref_connections_.Clear();
1204 EternalHandles::EternalHandles() : size_(0) {
1205 for (unsigned i = 0; i < arraysize(singleton_handles_); i++) {
1206 singleton_handles_[i] = kInvalidIndex;
1211 EternalHandles::~EternalHandles() {
1212 for (int i = 0; i < blocks_.length(); i++) delete[] blocks_[i];
1216 void EternalHandles::IterateAllRoots(ObjectVisitor* visitor) {
1218 for (int i = 0; i < blocks_.length(); i++) {
1220 Object** block = blocks_[i];
1221 visitor->VisitPointers(block, block + Min(limit, kSize));
1227 void EternalHandles::IterateNewSpaceRoots(ObjectVisitor* visitor) {
1228 for (int i = 0; i < new_space_indices_.length(); i++) {
1229 visitor->VisitPointer(GetLocation(new_space_indices_[i]));
1234 void EternalHandles::PostGarbageCollectionProcessing(Heap* heap) {
1236 for (int i = 0; i < new_space_indices_.length(); i++) {
1237 int index = new_space_indices_[i];
1238 if (heap->InNewSpace(*GetLocation(index))) {
1239 new_space_indices_[last++] = index;
1242 new_space_indices_.Rewind(last);
1246 void EternalHandles::Create(Isolate* isolate, Object* object, int* index) {
1247 DCHECK_EQ(kInvalidIndex, *index);
1248 if (object == NULL) return;
1249 DCHECK_NE(isolate->heap()->the_hole_value(), object);
1250 int block = size_ >> kShift;
1251 int offset = size_ & kMask;
1254 Object** next_block = new Object*[kSize];
1255 Object* the_hole = isolate->heap()->the_hole_value();
1256 MemsetPointer(next_block, the_hole, kSize);
1257 blocks_.Add(next_block);
1259 DCHECK_EQ(isolate->heap()->the_hole_value(), blocks_[block][offset]);
1260 blocks_[block][offset] = object;
1261 if (isolate->heap()->InNewSpace(object)) {
1262 new_space_indices_.Add(size_);
1268 } } // namespace v8::internal