1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
8 #include "src/global-handles.h"
10 #include "src/vm-state-inl.h"
16 ObjectGroup::~ObjectGroup() {
17 if (info != NULL) info->Dispose();
22 ImplicitRefGroup::~ImplicitRefGroup() {
27 class GlobalHandles::Node {
29 // State transition diagram:
30 // FREE -> NORMAL <-> WEAK -> PENDING -> NEAR_DEATH -> { NORMAL, WEAK, FREE }
33 NORMAL, // Normal global handle.
34 WEAK, // Flagged as weak but not yet finalized.
35 PENDING, // Has been recognized as only reachable by weak handles.
36 NEAR_DEATH // Callback has informed the handle is near death.
39 // Maps handle location (slot) to the containing node.
40 static Node* FromLocation(Object** location) {
41 DCHECK(OFFSET_OF(Node, object_) == 0);
42 return reinterpret_cast<Node*>(location);
46 DCHECK(OFFSET_OF(Node, class_id_) == Internals::kNodeClassIdOffset);
47 DCHECK(OFFSET_OF(Node, flags_) == Internals::kNodeFlagsOffset);
48 STATIC_ASSERT(static_cast<int>(NodeState::kMask) ==
49 Internals::kNodeStateMask);
50 STATIC_ASSERT(WEAK == Internals::kNodeStateIsWeakValue);
51 STATIC_ASSERT(PENDING == Internals::kNodeStateIsPendingValue);
52 STATIC_ASSERT(NEAR_DEATH == Internals::kNodeStateIsNearDeathValue);
53 STATIC_ASSERT(static_cast<int>(IsIndependent::kShift) ==
54 Internals::kNodeIsIndependentShift);
55 STATIC_ASSERT(static_cast<int>(IsPartiallyDependent::kShift) ==
56 Internals::kNodeIsPartiallyDependentShift);
59 #ifdef ENABLE_HANDLE_ZAPPING
61 // TODO(1428): if it's a weak handle we should have invoked its callback.
62 // Zap the values for eager trapping.
63 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
64 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
66 set_independent(false);
67 set_partially_dependent(false);
68 set_in_new_space_list(false);
69 parameter_or_next_free_.next_free = NULL;
70 weak_callback_ = NULL;
74 void Initialize(int index, Node** first_free) {
75 index_ = static_cast<uint8_t>(index);
76 DCHECK(static_cast<int>(index_) == index);
78 set_in_new_space_list(false);
79 parameter_or_next_free_.next_free = *first_free;
83 void Acquire(Object* object) {
84 DCHECK(state() == FREE);
86 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
87 set_independent(false);
88 set_partially_dependent(false);
90 parameter_or_next_free_.parameter = NULL;
91 weak_callback_ = NULL;
96 DCHECK(state() != FREE);
98 // Zap the values for eager trapping.
99 object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
100 class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
101 set_independent(false);
102 set_partially_dependent(false);
103 weak_callback_ = NULL;
107 // Object slot accessors.
108 Object* object() const { return object_; }
109 Object** location() { return &object_; }
110 Handle<Object> handle() { return Handle<Object>(location()); }
112 // Wrapper class ID accessors.
113 bool has_wrapper_class_id() const {
114 return class_id_ != v8::HeapProfiler::kPersistentHandleNoClassId;
117 uint16_t wrapper_class_id() const { return class_id_; }
119 // State and flag accessors.
121 State state() const {
122 return NodeState::decode(flags_);
124 void set_state(State state) {
125 flags_ = NodeState::update(flags_, state);
128 bool is_independent() {
129 return IsIndependent::decode(flags_);
131 void set_independent(bool v) {
132 flags_ = IsIndependent::update(flags_, v);
135 bool is_partially_dependent() {
136 return IsPartiallyDependent::decode(flags_);
138 void set_partially_dependent(bool v) {
139 flags_ = IsPartiallyDependent::update(flags_, v);
142 bool is_in_new_space_list() {
143 return IsInNewSpaceList::decode(flags_);
145 void set_in_new_space_list(bool v) {
146 flags_ = IsInNewSpaceList::update(flags_, v);
149 bool is_zapped_during_weak_callback() {
150 return IsZappedDuringWeakCallback::decode(flags_);
152 void set_is_zapped_during_weak_callback(bool v) {
153 flags_ = IsZappedDuringWeakCallback::update(flags_, v);
156 bool IsNearDeath() const {
157 // Check for PENDING to ensure correct answer when processing callbacks.
158 return state() == PENDING || state() == NEAR_DEATH;
161 bool IsWeak() const { return state() == WEAK; }
163 bool IsRetainer() const { return state() != FREE; }
165 bool IsStrongRetainer() const { return state() == NORMAL; }
167 bool IsWeakRetainer() const {
168 return state() == WEAK || state() == PENDING || state() == NEAR_DEATH;
172 DCHECK(state() == WEAK);
176 // Independent flag accessors.
177 void MarkIndependent() {
178 DCHECK(state() != FREE);
179 set_independent(true);
182 void MarkPartiallyDependent() {
183 DCHECK(state() != FREE);
184 if (GetGlobalHandles()->isolate()->heap()->InNewSpace(object_)) {
185 set_partially_dependent(true);
188 void clear_partially_dependent() { set_partially_dependent(false); }
190 // Callback accessor.
191 // TODO(svenpanne) Re-enable or nuke later.
192 // WeakReferenceCallback callback() { return callback_; }
194 // Callback parameter accessors.
195 void set_parameter(void* parameter) {
196 DCHECK(state() != FREE);
197 parameter_or_next_free_.parameter = parameter;
199 void* parameter() const {
200 DCHECK(state() != FREE);
201 return parameter_or_next_free_.parameter;
204 // Accessors for next free node in the free list.
206 DCHECK(state() == FREE);
207 return parameter_or_next_free_.next_free;
209 void set_next_free(Node* value) {
210 DCHECK(state() == FREE);
211 parameter_or_next_free_.next_free = value;
214 void MakeWeak(void* parameter, WeakCallback weak_callback,
215 bool is_zapped_during_weak_callback = false) {
216 DCHECK(weak_callback != NULL);
217 DCHECK(state() != FREE);
218 CHECK(object_ != NULL);
220 set_parameter(parameter);
221 set_is_zapped_during_weak_callback(is_zapped_during_weak_callback);
222 weak_callback_ = weak_callback;
225 void* ClearWeakness() {
226 DCHECK(state() != FREE);
227 void* p = parameter();
233 bool PostGarbageCollectionProcessing(Isolate* isolate) {
234 if (state() != Node::PENDING) return false;
235 if (weak_callback_ == NULL) {
239 void* param = parameter();
240 set_state(NEAR_DEATH);
243 Object** object = location();
245 // Check that we are not passing a finalized external string to
247 DCHECK(!object_->IsExternalOneByteString() ||
248 ExternalOneByteString::cast(object_)->resource() != NULL);
249 DCHECK(!object_->IsExternalTwoByteString() ||
250 ExternalTwoByteString::cast(object_)->resource() != NULL);
252 VMState<EXTERNAL> vmstate(isolate);
253 HandleScope handle_scope(isolate);
254 if (is_zapped_during_weak_callback()) {
255 // Phantom weak pointer case.
256 DCHECK(*object == Smi::FromInt(kPhantomReferenceZap));
257 // Make data with a null handle.
258 v8::WeakCallbackData<v8::Value, void> data(
259 reinterpret_cast<v8::Isolate*>(isolate), v8::Local<v8::Object>(),
261 weak_callback_(data);
262 if (state() != FREE) {
263 // Callback does not have to clear the global handle if it is a
268 Handle<Object> handle(*object, isolate);
269 v8::WeakCallbackData<v8::Value, void> data(
270 reinterpret_cast<v8::Isolate*>(isolate), v8::Utils::ToLocal(handle),
272 weak_callback_(data);
275 // Absence of explicit cleanup or revival of weak handle
276 // in most of the cases would lead to memory leak.
277 CHECK(state() != NEAR_DEATH);
281 inline GlobalHandles* GetGlobalHandles();
284 inline NodeBlock* FindBlock();
285 inline void IncreaseBlockUses();
286 inline void DecreaseBlockUses();
288 // Storage for object pointer.
289 // Placed first to avoid offset computation.
292 // Next word stores class_id, index, state, and independent.
293 // Note: the most aligned fields should go first.
298 // Index in the containing handle block.
301 // This stores three flags (independent, partially_dependent and
302 // in_new_space_list) and a State.
303 class NodeState : public BitField<State, 0, 4> {};
304 class IsIndependent : public BitField<bool, 4, 1> {};
305 class IsPartiallyDependent : public BitField<bool, 5, 1> {};
306 class IsInNewSpaceList : public BitField<bool, 6, 1> {};
307 class IsZappedDuringWeakCallback : public BitField<bool, 7, 1> {};
311 // Handle specific callback - might be a weak reference in disguise.
312 WeakCallback weak_callback_;
314 // Provided data for callback. In FREE state, this is used for
315 // the free list link.
319 } parameter_or_next_free_;
321 DISALLOW_COPY_AND_ASSIGN(Node);
325 class GlobalHandles::NodeBlock {
327 static const int kSize = 256;
329 explicit NodeBlock(GlobalHandles* global_handles, NodeBlock* next)
334 global_handles_(global_handles) {}
336 void PutNodesOnFreeList(Node** first_free) {
337 for (int i = kSize - 1; i >= 0; --i) {
338 nodes_[i].Initialize(i, first_free);
342 Node* node_at(int index) {
343 DCHECK(0 <= index && index < kSize);
344 return &nodes_[index];
347 void IncreaseUses() {
348 DCHECK(used_nodes_ < kSize);
349 if (used_nodes_++ == 0) {
350 NodeBlock* old_first = global_handles_->first_used_block_;
351 global_handles_->first_used_block_ = this;
352 next_used_ = old_first;
354 if (old_first == NULL) return;
355 old_first->prev_used_ = this;
359 void DecreaseUses() {
360 DCHECK(used_nodes_ > 0);
361 if (--used_nodes_ == 0) {
362 if (next_used_ != NULL) next_used_->prev_used_ = prev_used_;
363 if (prev_used_ != NULL) prev_used_->next_used_ = next_used_;
364 if (this == global_handles_->first_used_block_) {
365 global_handles_->first_used_block_ = next_used_;
370 GlobalHandles* global_handles() { return global_handles_; }
372 // Next block in the list of all blocks.
373 NodeBlock* next() const { return next_; }
375 // Next/previous block in the list of blocks with used nodes.
376 NodeBlock* next_used() const { return next_used_; }
377 NodeBlock* prev_used() const { return prev_used_; }
381 NodeBlock* const next_;
383 NodeBlock* next_used_;
384 NodeBlock* prev_used_;
385 GlobalHandles* global_handles_;
389 GlobalHandles* GlobalHandles::Node::GetGlobalHandles() {
390 return FindBlock()->global_handles();
394 GlobalHandles::NodeBlock* GlobalHandles::Node::FindBlock() {
395 intptr_t ptr = reinterpret_cast<intptr_t>(this);
396 ptr = ptr - index_ * sizeof(Node);
397 NodeBlock* block = reinterpret_cast<NodeBlock*>(ptr);
398 DCHECK(block->node_at(index_) == this);
403 void GlobalHandles::Node::IncreaseBlockUses() {
404 NodeBlock* node_block = FindBlock();
405 node_block->IncreaseUses();
406 GlobalHandles* global_handles = node_block->global_handles();
407 global_handles->isolate()->counters()->global_handles()->Increment();
408 global_handles->number_of_global_handles_++;
412 void GlobalHandles::Node::DecreaseBlockUses() {
413 NodeBlock* node_block = FindBlock();
414 GlobalHandles* global_handles = node_block->global_handles();
415 parameter_or_next_free_.next_free = global_handles->first_free_;
416 global_handles->first_free_ = this;
417 node_block->DecreaseUses();
418 global_handles->isolate()->counters()->global_handles()->Decrement();
419 global_handles->number_of_global_handles_--;
423 class GlobalHandles::NodeIterator {
425 explicit NodeIterator(GlobalHandles* global_handles)
426 : block_(global_handles->first_used_block_),
429 bool done() const { return block_ == NULL; }
433 return block_->node_at(index_);
438 if (++index_ < NodeBlock::kSize) return;
440 block_ = block_->next_used();
447 DISALLOW_COPY_AND_ASSIGN(NodeIterator);
451 GlobalHandles::GlobalHandles(Isolate* isolate)
453 number_of_global_handles_(0),
455 first_used_block_(NULL),
457 post_gc_processing_count_(0),
458 object_group_connections_(kObjectGroupConnectionsCapacity) {}
461 GlobalHandles::~GlobalHandles() {
462 NodeBlock* block = first_block_;
463 while (block != NULL) {
464 NodeBlock* tmp = block->next();
472 Handle<Object> GlobalHandles::Create(Object* value) {
473 if (first_free_ == NULL) {
474 first_block_ = new NodeBlock(this, first_block_);
475 first_block_->PutNodesOnFreeList(&first_free_);
477 DCHECK(first_free_ != NULL);
478 // Take the first node in the free list.
479 Node* result = first_free_;
480 first_free_ = result->next_free();
481 result->Acquire(value);
482 if (isolate_->heap()->InNewSpace(value) &&
483 !result->is_in_new_space_list()) {
484 new_space_nodes_.Add(result);
485 result->set_in_new_space_list(true);
487 return result->handle();
491 Handle<Object> GlobalHandles::CopyGlobal(Object** location) {
492 DCHECK(location != NULL);
493 return Node::FromLocation(location)->GetGlobalHandles()->Create(*location);
497 void GlobalHandles::Destroy(Object** location) {
498 if (location != NULL) Node::FromLocation(location)->Release();
502 void GlobalHandles::MakeWeak(Object** location, void* parameter,
503 WeakCallback weak_callback, PhantomState phantom) {
504 Node::FromLocation(location)
505 ->MakeWeak(parameter, weak_callback, phantom == Phantom);
509 void* GlobalHandles::ClearWeakness(Object** location) {
510 return Node::FromLocation(location)->ClearWeakness();
514 void GlobalHandles::MarkIndependent(Object** location) {
515 Node::FromLocation(location)->MarkIndependent();
519 void GlobalHandles::MarkPartiallyDependent(Object** location) {
520 Node::FromLocation(location)->MarkPartiallyDependent();
524 bool GlobalHandles::IsIndependent(Object** location) {
525 return Node::FromLocation(location)->is_independent();
529 bool GlobalHandles::IsNearDeath(Object** location) {
530 return Node::FromLocation(location)->IsNearDeath();
534 bool GlobalHandles::IsWeak(Object** location) {
535 return Node::FromLocation(location)->IsWeak();
539 void GlobalHandles::IterateWeakRoots(ObjectVisitor* v) {
540 for (NodeIterator it(this); !it.done(); it.Advance()) {
541 Node* node = it.node();
542 if (node->IsWeakRetainer()) {
543 if (node->state() == Node::PENDING &&
544 node->is_zapped_during_weak_callback()) {
545 *(node->location()) = Smi::FromInt(kPhantomReferenceZap);
547 v->VisitPointer(node->location());
554 void GlobalHandles::IdentifyWeakHandles(WeakSlotCallback f) {
555 for (NodeIterator it(this); !it.done(); it.Advance()) {
556 if (it.node()->IsWeak() && f(it.node()->location())) {
557 it.node()->MarkPending();
563 void GlobalHandles::IterateNewSpaceStrongAndDependentRoots(ObjectVisitor* v) {
564 for (int i = 0; i < new_space_nodes_.length(); ++i) {
565 Node* node = new_space_nodes_[i];
566 if (node->IsStrongRetainer() ||
567 (node->IsWeakRetainer() && !node->is_independent() &&
568 !node->is_partially_dependent())) {
569 v->VisitPointer(node->location());
575 void GlobalHandles::IdentifyNewSpaceWeakIndependentHandles(
576 WeakSlotCallbackWithHeap f) {
577 for (int i = 0; i < new_space_nodes_.length(); ++i) {
578 Node* node = new_space_nodes_[i];
579 DCHECK(node->is_in_new_space_list());
580 if ((node->is_independent() || node->is_partially_dependent()) &&
581 node->IsWeak() && f(isolate_->heap(), node->location())) {
588 void GlobalHandles::IterateNewSpaceWeakIndependentRoots(ObjectVisitor* v) {
589 for (int i = 0; i < new_space_nodes_.length(); ++i) {
590 Node* node = new_space_nodes_[i];
591 DCHECK(node->is_in_new_space_list());
592 if ((node->is_independent() || node->is_partially_dependent()) &&
593 node->IsWeakRetainer()) {
594 if (node->is_zapped_during_weak_callback()) {
595 *(node->location()) = Smi::FromInt(kPhantomReferenceZap);
597 v->VisitPointer(node->location());
604 bool GlobalHandles::IterateObjectGroups(ObjectVisitor* v,
605 WeakSlotCallbackWithHeap can_skip) {
606 ComputeObjectGroupsAndImplicitReferences();
608 bool any_group_was_visited = false;
609 for (int i = 0; i < object_groups_.length(); i++) {
610 ObjectGroup* entry = object_groups_.at(i);
611 DCHECK(entry != NULL);
613 Object*** objects = entry->objects;
614 bool group_should_be_visited = false;
615 for (size_t j = 0; j < entry->length; j++) {
616 Object* object = *objects[j];
617 if (object->IsHeapObject()) {
618 if (!can_skip(isolate_->heap(), &object)) {
619 group_should_be_visited = true;
625 if (!group_should_be_visited) {
626 object_groups_[last++] = entry;
630 // An object in the group requires visiting, so iterate over all
631 // objects in the group.
632 for (size_t j = 0; j < entry->length; ++j) {
633 Object* object = *objects[j];
634 if (object->IsHeapObject()) {
635 v->VisitPointer(&object);
636 any_group_was_visited = true;
640 // Once the entire group has been iterated over, set the object
641 // group to NULL so it won't be processed again.
643 object_groups_.at(i) = NULL;
645 object_groups_.Rewind(last);
646 return any_group_was_visited;
650 int GlobalHandles::PostGarbageCollectionProcessing(
651 GarbageCollector collector) {
652 // Process weak global handle callbacks. This must be done after the
653 // GC is completely done, because the callbacks may invoke arbitrary
655 DCHECK(isolate_->heap()->gc_state() == Heap::NOT_IN_GC);
656 const int initial_post_gc_processing_count = ++post_gc_processing_count_;
658 if (collector == SCAVENGER) {
659 for (int i = 0; i < new_space_nodes_.length(); ++i) {
660 Node* node = new_space_nodes_[i];
661 DCHECK(node->is_in_new_space_list());
662 if (!node->IsRetainer()) {
663 // Free nodes do not have weak callbacks. Do not use them to compute
667 // Skip dependent handles. Their weak callbacks might expect to be
668 // called between two global garbage collection callbacks which
669 // are not called for minor collections.
670 if (!node->is_independent() && !node->is_partially_dependent()) {
673 node->clear_partially_dependent();
674 if (node->PostGarbageCollectionProcessing(isolate_)) {
675 if (initial_post_gc_processing_count != post_gc_processing_count_) {
676 // Weak callback triggered another GC and another round of
677 // PostGarbageCollection processing. The current node might
678 // have been deleted in that round, so we need to bail out (or
679 // restart the processing).
683 if (!node->IsRetainer()) {
688 for (NodeIterator it(this); !it.done(); it.Advance()) {
689 if (!it.node()->IsRetainer()) {
690 // Free nodes do not have weak callbacks. Do not use them to compute
694 it.node()->clear_partially_dependent();
695 if (it.node()->PostGarbageCollectionProcessing(isolate_)) {
696 if (initial_post_gc_processing_count != post_gc_processing_count_) {
697 // See the comment above.
701 if (!it.node()->IsRetainer()) {
706 // Update the list of new space nodes.
708 for (int i = 0; i < new_space_nodes_.length(); ++i) {
709 Node* node = new_space_nodes_[i];
710 DCHECK(node->is_in_new_space_list());
711 if (node->IsRetainer()) {
712 if (isolate_->heap()->InNewSpace(node->object())) {
713 new_space_nodes_[last++] = node;
714 isolate_->heap()->IncrementNodesCopiedInNewSpace();
716 node->set_in_new_space_list(false);
717 isolate_->heap()->IncrementNodesPromoted();
720 node->set_in_new_space_list(false);
721 isolate_->heap()->IncrementNodesDiedInNewSpace();
724 new_space_nodes_.Rewind(last);
729 void GlobalHandles::IterateStrongRoots(ObjectVisitor* v) {
730 for (NodeIterator it(this); !it.done(); it.Advance()) {
731 if (it.node()->IsStrongRetainer()) {
732 v->VisitPointer(it.node()->location());
738 void GlobalHandles::IterateAllRoots(ObjectVisitor* v) {
739 for (NodeIterator it(this); !it.done(); it.Advance()) {
740 if (it.node()->IsRetainer()) {
741 v->VisitPointer(it.node()->location());
747 void GlobalHandles::IterateAllRootsWithClassIds(ObjectVisitor* v) {
748 for (NodeIterator it(this); !it.done(); it.Advance()) {
749 if (it.node()->IsRetainer() && it.node()->has_wrapper_class_id()) {
750 v->VisitEmbedderReference(it.node()->location(),
751 it.node()->wrapper_class_id());
757 void GlobalHandles::IterateAllRootsInNewSpaceWithClassIds(ObjectVisitor* v) {
758 for (int i = 0; i < new_space_nodes_.length(); ++i) {
759 Node* node = new_space_nodes_[i];
760 if (node->IsRetainer() && node->has_wrapper_class_id()) {
761 v->VisitEmbedderReference(node->location(),
762 node->wrapper_class_id());
768 int GlobalHandles::NumberOfWeakHandles() {
770 for (NodeIterator it(this); !it.done(); it.Advance()) {
771 if (it.node()->IsWeakRetainer()) {
779 int GlobalHandles::NumberOfGlobalObjectWeakHandles() {
781 for (NodeIterator it(this); !it.done(); it.Advance()) {
782 if (it.node()->IsWeakRetainer() &&
783 it.node()->object()->IsJSGlobalObject()) {
791 void GlobalHandles::RecordStats(HeapStats* stats) {
792 *stats->global_handle_count = 0;
793 *stats->weak_global_handle_count = 0;
794 *stats->pending_global_handle_count = 0;
795 *stats->near_death_global_handle_count = 0;
796 *stats->free_global_handle_count = 0;
797 for (NodeIterator it(this); !it.done(); it.Advance()) {
798 *stats->global_handle_count += 1;
799 if (it.node()->state() == Node::WEAK) {
800 *stats->weak_global_handle_count += 1;
801 } else if (it.node()->state() == Node::PENDING) {
802 *stats->pending_global_handle_count += 1;
803 } else if (it.node()->state() == Node::NEAR_DEATH) {
804 *stats->near_death_global_handle_count += 1;
805 } else if (it.node()->state() == Node::FREE) {
806 *stats->free_global_handle_count += 1;
813 void GlobalHandles::PrintStats() {
820 for (NodeIterator it(this); !it.done(); it.Advance()) {
822 if (it.node()->state() == Node::WEAK) weak++;
823 if (it.node()->state() == Node::PENDING) pending++;
824 if (it.node()->state() == Node::NEAR_DEATH) near_death++;
825 if (it.node()->state() == Node::FREE) destroyed++;
828 PrintF("Global Handle Statistics:\n");
829 PrintF(" allocated memory = %" V8_PTR_PREFIX "dB\n", sizeof(Node) * total);
830 PrintF(" # weak = %d\n", weak);
831 PrintF(" # pending = %d\n", pending);
832 PrintF(" # near_death = %d\n", near_death);
833 PrintF(" # free = %d\n", destroyed);
834 PrintF(" # total = %d\n", total);
838 void GlobalHandles::Print() {
839 PrintF("Global handles:\n");
840 for (NodeIterator it(this); !it.done(); it.Advance()) {
841 PrintF(" handle %p to %p%s\n",
842 reinterpret_cast<void*>(it.node()->location()),
843 reinterpret_cast<void*>(it.node()->object()),
844 it.node()->IsWeak() ? " (weak)" : "");
852 void GlobalHandles::AddObjectGroup(Object*** handles,
854 v8::RetainedObjectInfo* info) {
856 for (size_t i = 0; i < length; ++i) {
857 DCHECK(!Node::FromLocation(handles[i])->is_independent());
861 if (info != NULL) info->Dispose();
864 ObjectGroup* group = new ObjectGroup(length);
865 for (size_t i = 0; i < length; ++i)
866 group->objects[i] = handles[i];
868 object_groups_.Add(group);
872 void GlobalHandles::SetObjectGroupId(Object** handle,
874 object_group_connections_.Add(ObjectGroupConnection(id, handle));
878 void GlobalHandles::SetRetainedObjectInfo(UniqueId id,
879 RetainedObjectInfo* info) {
880 retainer_infos_.Add(ObjectGroupRetainerInfo(id, info));
884 void GlobalHandles::SetReferenceFromGroup(UniqueId id, Object** child) {
885 DCHECK(!Node::FromLocation(child)->is_independent());
886 implicit_ref_connections_.Add(ObjectGroupConnection(id, child));
890 void GlobalHandles::SetReference(HeapObject** parent, Object** child) {
891 DCHECK(!Node::FromLocation(child)->is_independent());
892 ImplicitRefGroup* group = new ImplicitRefGroup(parent, 1);
893 group->children[0] = child;
894 implicit_ref_groups_.Add(group);
898 void GlobalHandles::RemoveObjectGroups() {
899 for (int i = 0; i < object_groups_.length(); i++)
900 delete object_groups_.at(i);
901 object_groups_.Clear();
902 for (int i = 0; i < retainer_infos_.length(); ++i)
903 retainer_infos_[i].info->Dispose();
904 retainer_infos_.Clear();
905 object_group_connections_.Clear();
906 object_group_connections_.Initialize(kObjectGroupConnectionsCapacity);
910 void GlobalHandles::RemoveImplicitRefGroups() {
911 for (int i = 0; i < implicit_ref_groups_.length(); i++) {
912 delete implicit_ref_groups_.at(i);
914 implicit_ref_groups_.Clear();
915 implicit_ref_connections_.Clear();
919 void GlobalHandles::TearDown() {
920 // TODO(1428): invoke weak callbacks.
924 void GlobalHandles::ComputeObjectGroupsAndImplicitReferences() {
925 if (object_group_connections_.length() == 0) {
926 for (int i = 0; i < retainer_infos_.length(); ++i)
927 retainer_infos_[i].info->Dispose();
928 retainer_infos_.Clear();
929 implicit_ref_connections_.Clear();
933 object_group_connections_.Sort();
934 retainer_infos_.Sort();
935 implicit_ref_connections_.Sort();
937 int info_index = 0; // For iterating retainer_infos_.
938 UniqueId current_group_id(0);
939 int current_group_start = 0;
941 int current_implicit_refs_start = 0;
942 int current_implicit_refs_end = 0;
943 for (int i = 0; i <= object_group_connections_.length(); ++i) {
945 current_group_id = object_group_connections_[i].id;
946 if (i == object_group_connections_.length() ||
947 current_group_id != object_group_connections_[i].id) {
948 // Group detected: objects in indices [current_group_start, i[.
950 // Find out which implicit references are related to this group. (We want
951 // to ignore object groups which only have 1 object, but that object is
952 // needed as a representative object for the implicit refrerence group.)
953 while (current_implicit_refs_start < implicit_ref_connections_.length() &&
954 implicit_ref_connections_[current_implicit_refs_start].id <
956 ++current_implicit_refs_start;
957 current_implicit_refs_end = current_implicit_refs_start;
958 while (current_implicit_refs_end < implicit_ref_connections_.length() &&
959 implicit_ref_connections_[current_implicit_refs_end].id ==
961 ++current_implicit_refs_end;
963 if (current_implicit_refs_end > current_implicit_refs_start) {
964 // Find a representative object for the implicit references.
965 HeapObject** representative = NULL;
966 for (int j = current_group_start; j < i; ++j) {
967 Object** object = object_group_connections_[j].object;
968 if ((*object)->IsHeapObject()) {
969 representative = reinterpret_cast<HeapObject**>(object);
973 if (representative) {
974 ImplicitRefGroup* group = new ImplicitRefGroup(
976 current_implicit_refs_end - current_implicit_refs_start);
977 for (int j = current_implicit_refs_start;
978 j < current_implicit_refs_end;
980 group->children[j - current_implicit_refs_start] =
981 implicit_ref_connections_[j].object;
983 implicit_ref_groups_.Add(group);
985 current_implicit_refs_start = current_implicit_refs_end;
988 // Find a RetainedObjectInfo for the group.
989 RetainedObjectInfo* info = NULL;
990 while (info_index < retainer_infos_.length() &&
991 retainer_infos_[info_index].id < current_group_id) {
992 retainer_infos_[info_index].info->Dispose();
995 if (info_index < retainer_infos_.length() &&
996 retainer_infos_[info_index].id == current_group_id) {
997 // This object group has an associated ObjectGroupRetainerInfo.
998 info = retainer_infos_[info_index].info;
1002 // Ignore groups which only contain one object.
1003 if (i > current_group_start + 1) {
1004 ObjectGroup* group = new ObjectGroup(i - current_group_start);
1005 for (int j = current_group_start; j < i; ++j) {
1006 group->objects[j - current_group_start] =
1007 object_group_connections_[j].object;
1010 object_groups_.Add(group);
1015 if (i < object_group_connections_.length()) {
1016 current_group_id = object_group_connections_[i].id;
1017 current_group_start = i;
1021 object_group_connections_.Clear();
1022 object_group_connections_.Initialize(kObjectGroupConnectionsCapacity);
1023 retainer_infos_.Clear();
1024 implicit_ref_connections_.Clear();
1028 EternalHandles::EternalHandles() : size_(0) {
1029 for (unsigned i = 0; i < arraysize(singleton_handles_); i++) {
1030 singleton_handles_[i] = kInvalidIndex;
1035 EternalHandles::~EternalHandles() {
1036 for (int i = 0; i < blocks_.length(); i++) delete[] blocks_[i];
1040 void EternalHandles::IterateAllRoots(ObjectVisitor* visitor) {
1042 for (int i = 0; i < blocks_.length(); i++) {
1044 Object** block = blocks_[i];
1045 visitor->VisitPointers(block, block + Min(limit, kSize));
1051 void EternalHandles::IterateNewSpaceRoots(ObjectVisitor* visitor) {
1052 for (int i = 0; i < new_space_indices_.length(); i++) {
1053 visitor->VisitPointer(GetLocation(new_space_indices_[i]));
1058 void EternalHandles::PostGarbageCollectionProcessing(Heap* heap) {
1060 for (int i = 0; i < new_space_indices_.length(); i++) {
1061 int index = new_space_indices_[i];
1062 if (heap->InNewSpace(*GetLocation(index))) {
1063 new_space_indices_[last++] = index;
1066 new_space_indices_.Rewind(last);
1070 void EternalHandles::Create(Isolate* isolate, Object* object, int* index) {
1071 DCHECK_EQ(kInvalidIndex, *index);
1072 if (object == NULL) return;
1073 DCHECK_NE(isolate->heap()->the_hole_value(), object);
1074 int block = size_ >> kShift;
1075 int offset = size_ & kMask;
1078 Object** next_block = new Object*[kSize];
1079 Object* the_hole = isolate->heap()->the_hole_value();
1080 MemsetPointer(next_block, the_hole, kSize);
1081 blocks_.Add(next_block);
1083 DCHECK_EQ(isolate->heap()->the_hole_value(), blocks_[block][offset]);
1084 blocks_[block][offset] = object;
1085 if (isolate->heap()->InNewSpace(object)) {
1086 new_space_indices_.Add(size_);
1092 } } // namespace v8::internal