deps: update v8 to 4.3.61.21
[platform/upstream/nodejs.git] / deps / v8 / src / global-handles.cc
1 // Copyright 2009 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #include "src/api.h"
8 #include "src/global-handles.h"
9
10 #include "src/vm-state-inl.h"
11
12 namespace v8 {
13 namespace internal {
14
15
16 ObjectGroup::~ObjectGroup() {
17   if (info != NULL) info->Dispose();
18   delete[] objects;
19 }
20
21
22 ImplicitRefGroup::~ImplicitRefGroup() {
23   delete[] children;
24 }
25
26
27 class GlobalHandles::Node {
28  public:
29   // State transition diagram:
30   // FREE -> NORMAL <-> WEAK -> PENDING -> NEAR_DEATH -> { NORMAL, WEAK, FREE }
31   enum State {
32     FREE = 0,
33     NORMAL,      // Normal global handle.
34     WEAK,        // Flagged as weak but not yet finalized.
35     PENDING,     // Has been recognized as only reachable by weak handles.
36     NEAR_DEATH,  // Callback has informed the handle is near death.
37     NUMBER_OF_NODE_STATES
38   };
39
40   // Maps handle location (slot) to the containing node.
41   static Node* FromLocation(Object** location) {
42     DCHECK(OFFSET_OF(Node, object_) == 0);
43     return reinterpret_cast<Node*>(location);
44   }
45
46   Node() {
47     DCHECK(OFFSET_OF(Node, class_id_) == Internals::kNodeClassIdOffset);
48     DCHECK(OFFSET_OF(Node, flags_) == Internals::kNodeFlagsOffset);
49     STATIC_ASSERT(static_cast<int>(NodeState::kMask) ==
50                   Internals::kNodeStateMask);
51     STATIC_ASSERT(WEAK == Internals::kNodeStateIsWeakValue);
52     STATIC_ASSERT(PENDING == Internals::kNodeStateIsPendingValue);
53     STATIC_ASSERT(NEAR_DEATH == Internals::kNodeStateIsNearDeathValue);
54     STATIC_ASSERT(static_cast<int>(IsIndependent::kShift) ==
55                   Internals::kNodeIsIndependentShift);
56     STATIC_ASSERT(static_cast<int>(IsPartiallyDependent::kShift) ==
57                   Internals::kNodeIsPartiallyDependentShift);
58   }
59
60 #ifdef ENABLE_HANDLE_ZAPPING
61   ~Node() {
62     // TODO(1428): if it's a weak handle we should have invoked its callback.
63     // Zap the values for eager trapping.
64     object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
65     class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
66     index_ = 0;
67     set_independent(false);
68     set_partially_dependent(false);
69     set_in_new_space_list(false);
70     parameter_or_next_free_.next_free = NULL;
71     weak_callback_ = NULL;
72   }
73 #endif
74
75   void Initialize(int index, Node** first_free) {
76     index_ = static_cast<uint8_t>(index);
77     DCHECK(static_cast<int>(index_) == index);
78     set_state(FREE);
79     set_in_new_space_list(false);
80     parameter_or_next_free_.next_free = *first_free;
81     *first_free = this;
82   }
83
84   void Acquire(Object* object) {
85     DCHECK(state() == FREE);
86     object_ = object;
87     class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
88     set_independent(false);
89     set_partially_dependent(false);
90     set_state(NORMAL);
91     parameter_or_next_free_.parameter = NULL;
92     weak_callback_ = NULL;
93     IncreaseBlockUses();
94   }
95
96   void Zap() {
97     DCHECK(IsInUse());
98     // Zap the values for eager trapping.
99     object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
100   }
101
102   void Release() {
103     DCHECK(IsInUse());
104     set_state(FREE);
105     // Zap the values for eager trapping.
106     object_ = reinterpret_cast<Object*>(kGlobalHandleZapValue);
107     class_id_ = v8::HeapProfiler::kPersistentHandleNoClassId;
108     set_independent(false);
109     set_partially_dependent(false);
110     weak_callback_ = NULL;
111     DecreaseBlockUses();
112   }
113
114   // Object slot accessors.
115   Object* object() const { return object_; }
116   Object** location() { return &object_; }
117   Handle<Object> handle() { return Handle<Object>(location()); }
118
119   // Wrapper class ID accessors.
120   bool has_wrapper_class_id() const {
121     return class_id_ != v8::HeapProfiler::kPersistentHandleNoClassId;
122   }
123
124   uint16_t wrapper_class_id() const { return class_id_; }
125
126   // State and flag accessors.
127
128   State state() const {
129     return NodeState::decode(flags_);
130   }
131   void set_state(State state) {
132     flags_ = NodeState::update(flags_, state);
133   }
134
135   bool is_independent() {
136     return IsIndependent::decode(flags_);
137   }
138   void set_independent(bool v) {
139     flags_ = IsIndependent::update(flags_, v);
140   }
141
142   bool is_partially_dependent() {
143     return IsPartiallyDependent::decode(flags_);
144   }
145   void set_partially_dependent(bool v) {
146     flags_ = IsPartiallyDependent::update(flags_, v);
147   }
148
149   bool is_in_new_space_list() {
150     return IsInNewSpaceList::decode(flags_);
151   }
152   void set_in_new_space_list(bool v) {
153     flags_ = IsInNewSpaceList::update(flags_, v);
154   }
155
156   WeaknessType weakness_type() const {
157     return NodeWeaknessType::decode(flags_);
158   }
159   void set_weakness_type(WeaknessType weakness_type) {
160     flags_ = NodeWeaknessType::update(flags_, weakness_type);
161   }
162
163   bool IsNearDeath() const {
164     // Check for PENDING to ensure correct answer when processing callbacks.
165     return state() == PENDING || state() == NEAR_DEATH;
166   }
167
168   bool IsWeak() const { return state() == WEAK; }
169
170   bool IsInUse() const { return state() != FREE; }
171
172   bool IsRetainer() const { return state() != FREE; }
173
174   bool IsStrongRetainer() const { return state() == NORMAL; }
175
176   bool IsWeakRetainer() const {
177     return state() == WEAK || state() == PENDING || state() == NEAR_DEATH;
178   }
179
180   void MarkPending() {
181     DCHECK(state() == WEAK);
182     set_state(PENDING);
183   }
184
185   // Independent flag accessors.
186   void MarkIndependent() {
187     DCHECK(IsInUse());
188     set_independent(true);
189   }
190
191   void MarkPartiallyDependent() {
192     DCHECK(IsInUse());
193     if (GetGlobalHandles()->isolate()->heap()->InNewSpace(object_)) {
194       set_partially_dependent(true);
195     }
196   }
197   void clear_partially_dependent() { set_partially_dependent(false); }
198
199   // Callback accessor.
200   // TODO(svenpanne) Re-enable or nuke later.
201   // WeakReferenceCallback callback() { return callback_; }
202
203   // Callback parameter accessors.
204   void set_parameter(void* parameter) {
205     DCHECK(IsInUse());
206     parameter_or_next_free_.parameter = parameter;
207   }
208   void* parameter() const {
209     DCHECK(IsInUse());
210     return parameter_or_next_free_.parameter;
211   }
212
213   // Accessors for next free node in the free list.
214   Node* next_free() {
215     DCHECK(state() == FREE);
216     return parameter_or_next_free_.next_free;
217   }
218   void set_next_free(Node* value) {
219     DCHECK(state() == FREE);
220     parameter_or_next_free_.next_free = value;
221   }
222
223   void MakeWeak(void* parameter, WeakCallback weak_callback) {
224     DCHECK(weak_callback != NULL);
225     DCHECK(IsInUse());
226     CHECK(object_ != NULL);
227     set_state(WEAK);
228     set_weakness_type(NORMAL_WEAK);
229     set_parameter(parameter);
230     weak_callback_ = weak_callback;
231   }
232
233   void MakeWeak(void* parameter,
234                 WeakCallbackInfo<void>::Callback phantom_callback,
235                 v8::WeakCallbackType type) {
236     DCHECK(phantom_callback != nullptr);
237     DCHECK(IsInUse());
238     CHECK(object_ != nullptr);
239     set_state(WEAK);
240     switch (type) {
241       case v8::WeakCallbackType::kParameter:
242         set_weakness_type(PHANTOM_WEAK);
243         break;
244       case v8::WeakCallbackType::kInternalFields:
245       set_weakness_type(PHANTOM_WEAK_2_INTERNAL_FIELDS);
246       break;
247     }
248     set_parameter(parameter);
249     weak_callback_ = reinterpret_cast<WeakCallback>(phantom_callback);
250   }
251
252   void* ClearWeakness() {
253     DCHECK(IsInUse());
254     void* p = parameter();
255     set_state(NORMAL);
256     set_parameter(NULL);
257     return p;
258   }
259
260   void CollectPhantomCallbackData(
261       Isolate* isolate,
262       List<PendingPhantomCallback>* pending_phantom_callbacks) {
263     if (state() != PENDING) return;
264     if (weak_callback_ != NULL) {
265       if (weakness_type() == NORMAL_WEAK) return;
266
267       DCHECK(weakness_type() == PHANTOM_WEAK ||
268              weakness_type() == PHANTOM_WEAK_2_INTERNAL_FIELDS);
269
270       void* internal_fields[v8::kInternalFieldsInWeakCallback] = {nullptr,
271                                                                   nullptr};
272       if (weakness_type() != PHANTOM_WEAK && object()->IsJSObject()) {
273         auto jsobject = JSObject::cast(object());
274         int field_count = jsobject->GetInternalFieldCount();
275         for (int i = 0; i < v8::kInternalFieldsInWeakCallback; ++i) {
276           if (field_count == i) break;
277           auto field = jsobject->GetInternalField(i);
278           if (field->IsSmi()) internal_fields[i] = field;
279         }
280       }
281
282       // Zap with something dangerous.
283       *location() = reinterpret_cast<Object*>(0x6057ca11);
284
285       typedef v8::WeakCallbackInfo<void> Data;
286       auto callback = reinterpret_cast<Data::Callback>(weak_callback_);
287       pending_phantom_callbacks->Add(
288           PendingPhantomCallback(this, callback, parameter(), internal_fields));
289       DCHECK(IsInUse());
290       set_state(NEAR_DEATH);
291     }
292   }
293
294   bool PostGarbageCollectionProcessing(Isolate* isolate) {
295     // Handles only weak handles (not phantom) that are dying.
296     if (state() != Node::PENDING) return false;
297     if (weak_callback_ == NULL) {
298       Release();
299       return false;
300     }
301     set_state(NEAR_DEATH);
302
303     // Check that we are not passing a finalized external string to
304     // the callback.
305     DCHECK(!object_->IsExternalOneByteString() ||
306            ExternalOneByteString::cast(object_)->resource() != NULL);
307     DCHECK(!object_->IsExternalTwoByteString() ||
308            ExternalTwoByteString::cast(object_)->resource() != NULL);
309     if (weakness_type() != NORMAL_WEAK) return false;
310
311     // Leaving V8.
312     VMState<EXTERNAL> vmstate(isolate);
313     HandleScope handle_scope(isolate);
314     Object** object = location();
315     Handle<Object> handle(*object, isolate);
316     v8::WeakCallbackData<v8::Value, void> data(
317         reinterpret_cast<v8::Isolate*>(isolate), parameter(),
318         v8::Utils::ToLocal(handle));
319     set_parameter(NULL);
320     weak_callback_(data);
321
322     // Absence of explicit cleanup or revival of weak handle
323     // in most of the cases would lead to memory leak.
324     CHECK(state() != NEAR_DEATH);
325     return true;
326   }
327
328   inline GlobalHandles* GetGlobalHandles();
329
330  private:
331   inline NodeBlock* FindBlock();
332   inline void IncreaseBlockUses();
333   inline void DecreaseBlockUses();
334
335   // Storage for object pointer.
336   // Placed first to avoid offset computation.
337   Object* object_;
338
339   // Next word stores class_id, index, state, and independent.
340   // Note: the most aligned fields should go first.
341
342   // Wrapper class ID.
343   uint16_t class_id_;
344
345   // Index in the containing handle block.
346   uint8_t index_;
347
348   // This stores three flags (independent, partially_dependent and
349   // in_new_space_list) and a State.
350   class NodeState : public BitField<State, 0, 3> {};
351   class IsIndependent : public BitField<bool, 3, 1> {};
352   class IsPartiallyDependent : public BitField<bool, 4, 1> {};
353   class IsInNewSpaceList : public BitField<bool, 5, 1> {};
354   class NodeWeaknessType : public BitField<WeaknessType, 6, 2> {};
355
356   uint8_t flags_;
357
358   // Handle specific callback - might be a weak reference in disguise.
359   WeakCallback weak_callback_;
360
361   // Provided data for callback.  In FREE state, this is used for
362   // the free list link.
363   union {
364     void* parameter;
365     Node* next_free;
366   } parameter_or_next_free_;
367
368   DISALLOW_COPY_AND_ASSIGN(Node);
369 };
370
371
372 class GlobalHandles::NodeBlock {
373  public:
374   static const int kSize = 256;
375
376   explicit NodeBlock(GlobalHandles* global_handles, NodeBlock* next)
377       : next_(next),
378         used_nodes_(0),
379         next_used_(NULL),
380         prev_used_(NULL),
381         global_handles_(global_handles) {}
382
383   void PutNodesOnFreeList(Node** first_free) {
384     for (int i = kSize - 1; i >= 0; --i) {
385       nodes_[i].Initialize(i, first_free);
386     }
387   }
388
389   Node* node_at(int index) {
390     DCHECK(0 <= index && index < kSize);
391     return &nodes_[index];
392   }
393
394   void IncreaseUses() {
395     DCHECK(used_nodes_ < kSize);
396     if (used_nodes_++ == 0) {
397       NodeBlock* old_first = global_handles_->first_used_block_;
398       global_handles_->first_used_block_ = this;
399       next_used_ = old_first;
400       prev_used_ = NULL;
401       if (old_first == NULL) return;
402       old_first->prev_used_ = this;
403     }
404   }
405
406   void DecreaseUses() {
407     DCHECK(used_nodes_ > 0);
408     if (--used_nodes_ == 0) {
409       if (next_used_ != NULL) next_used_->prev_used_ = prev_used_;
410       if (prev_used_ != NULL) prev_used_->next_used_ = next_used_;
411       if (this == global_handles_->first_used_block_) {
412         global_handles_->first_used_block_ = next_used_;
413       }
414     }
415   }
416
417   GlobalHandles* global_handles() { return global_handles_; }
418
419   // Next block in the list of all blocks.
420   NodeBlock* next() const { return next_; }
421
422   // Next/previous block in the list of blocks with used nodes.
423   NodeBlock* next_used() const { return next_used_; }
424   NodeBlock* prev_used() const { return prev_used_; }
425
426  private:
427   Node nodes_[kSize];
428   NodeBlock* const next_;
429   int used_nodes_;
430   NodeBlock* next_used_;
431   NodeBlock* prev_used_;
432   GlobalHandles* global_handles_;
433 };
434
435
436 GlobalHandles* GlobalHandles::Node::GetGlobalHandles() {
437   return FindBlock()->global_handles();
438 }
439
440
441 GlobalHandles::NodeBlock* GlobalHandles::Node::FindBlock() {
442   intptr_t ptr = reinterpret_cast<intptr_t>(this);
443   ptr = ptr - index_ * sizeof(Node);
444   NodeBlock* block = reinterpret_cast<NodeBlock*>(ptr);
445   DCHECK(block->node_at(index_) == this);
446   return block;
447 }
448
449
450 void GlobalHandles::Node::IncreaseBlockUses() {
451   NodeBlock* node_block = FindBlock();
452   node_block->IncreaseUses();
453   GlobalHandles* global_handles = node_block->global_handles();
454   global_handles->isolate()->counters()->global_handles()->Increment();
455   global_handles->number_of_global_handles_++;
456 }
457
458
459 void GlobalHandles::Node::DecreaseBlockUses() {
460   NodeBlock* node_block = FindBlock();
461   GlobalHandles* global_handles = node_block->global_handles();
462   parameter_or_next_free_.next_free = global_handles->first_free_;
463   global_handles->first_free_ = this;
464   node_block->DecreaseUses();
465   global_handles->isolate()->counters()->global_handles()->Decrement();
466   global_handles->number_of_global_handles_--;
467 }
468
469
470 class GlobalHandles::NodeIterator {
471  public:
472   explicit NodeIterator(GlobalHandles* global_handles)
473       : block_(global_handles->first_used_block_),
474         index_(0) {}
475
476   bool done() const { return block_ == NULL; }
477
478   Node* node() const {
479     DCHECK(!done());
480     return block_->node_at(index_);
481   }
482
483   void Advance() {
484     DCHECK(!done());
485     if (++index_ < NodeBlock::kSize) return;
486     index_ = 0;
487     block_ = block_->next_used();
488   }
489
490  private:
491   NodeBlock* block_;
492   int index_;
493
494   DISALLOW_COPY_AND_ASSIGN(NodeIterator);
495 };
496
497
498 GlobalHandles::GlobalHandles(Isolate* isolate)
499     : isolate_(isolate),
500       number_of_global_handles_(0),
501       first_block_(NULL),
502       first_used_block_(NULL),
503       first_free_(NULL),
504       post_gc_processing_count_(0),
505       object_group_connections_(kObjectGroupConnectionsCapacity) {}
506
507
508 GlobalHandles::~GlobalHandles() {
509   NodeBlock* block = first_block_;
510   while (block != NULL) {
511     NodeBlock* tmp = block->next();
512     delete block;
513     block = tmp;
514   }
515   first_block_ = NULL;
516 }
517
518
519 Handle<Object> GlobalHandles::Create(Object* value) {
520   if (first_free_ == NULL) {
521     first_block_ = new NodeBlock(this, first_block_);
522     first_block_->PutNodesOnFreeList(&first_free_);
523   }
524   DCHECK(first_free_ != NULL);
525   // Take the first node in the free list.
526   Node* result = first_free_;
527   first_free_ = result->next_free();
528   result->Acquire(value);
529   if (isolate_->heap()->InNewSpace(value) &&
530       !result->is_in_new_space_list()) {
531     new_space_nodes_.Add(result);
532     result->set_in_new_space_list(true);
533   }
534   return result->handle();
535 }
536
537
538 Handle<Object> GlobalHandles::CopyGlobal(Object** location) {
539   DCHECK(location != NULL);
540   return Node::FromLocation(location)->GetGlobalHandles()->Create(*location);
541 }
542
543
544 void GlobalHandles::Destroy(Object** location) {
545   if (location != NULL) Node::FromLocation(location)->Release();
546 }
547
548
549 void GlobalHandles::MakeWeak(Object** location, void* parameter,
550                              WeakCallback weak_callback) {
551   Node::FromLocation(location)->MakeWeak(parameter, weak_callback);
552 }
553
554
555 typedef v8::WeakCallbackInfo<void>::Callback GenericCallback;
556
557
558 void GlobalHandles::MakeWeak(Object** location, void* parameter,
559                              GenericCallback phantom_callback,
560                              v8::WeakCallbackType type) {
561   Node::FromLocation(location)->MakeWeak(parameter, phantom_callback, type);
562 }
563
564
565 void GlobalHandles::CollectAllPhantomCallbackData() {
566   for (NodeIterator it(this); !it.done(); it.Advance()) {
567     Node* node = it.node();
568     node->CollectPhantomCallbackData(isolate(), &pending_phantom_callbacks_);
569   }
570 }
571
572
573 void GlobalHandles::CollectYoungPhantomCallbackData() {
574   for (int i = 0; i < new_space_nodes_.length(); ++i) {
575     Node* node = new_space_nodes_[i];
576     DCHECK(node->is_in_new_space_list());
577     node->CollectPhantomCallbackData(isolate(), &pending_phantom_callbacks_);
578   }
579 }
580
581
582 void* GlobalHandles::ClearWeakness(Object** location) {
583   return Node::FromLocation(location)->ClearWeakness();
584 }
585
586
587 void GlobalHandles::MarkIndependent(Object** location) {
588   Node::FromLocation(location)->MarkIndependent();
589 }
590
591
592 void GlobalHandles::MarkPartiallyDependent(Object** location) {
593   Node::FromLocation(location)->MarkPartiallyDependent();
594 }
595
596
597 bool GlobalHandles::IsIndependent(Object** location) {
598   return Node::FromLocation(location)->is_independent();
599 }
600
601
602 bool GlobalHandles::IsNearDeath(Object** location) {
603   return Node::FromLocation(location)->IsNearDeath();
604 }
605
606
607 bool GlobalHandles::IsWeak(Object** location) {
608   return Node::FromLocation(location)->IsWeak();
609 }
610
611
612 void GlobalHandles::IterateWeakRoots(ObjectVisitor* v) {
613   for (NodeIterator it(this); !it.done(); it.Advance()) {
614     Node* node = it.node();
615     if (node->IsWeakRetainer()) {
616       // Weakness type can be normal or phantom, with or without internal
617       // fields).  For normal weakness we mark through the handle so that the
618       // object and things reachable from it are available to the callback.
619       //
620       // In the case of phantom with no internal fields, we can zap the object
621       // handle now and we won't need it, so we don't need to mark through it.
622       // In the internal fields case we will need the internal
623       // fields, so we can't zap the handle.
624       if (node->state() == Node::PENDING) {
625         if (node->weakness_type() == PHANTOM_WEAK) {
626           *(node->location()) = Smi::FromInt(0);
627         } else if (node->weakness_type() == NORMAL_WEAK) {
628           v->VisitPointer(node->location());
629         } else {
630           DCHECK(node->weakness_type() == PHANTOM_WEAK_2_INTERNAL_FIELDS);
631         }
632       } else {
633         // Node is not pending, so that means the object survived.  We still
634         // need to visit the pointer in case the object moved, eg. because of
635         // compaction.
636         v->VisitPointer(node->location());
637       }
638     }
639   }
640 }
641
642
643 void GlobalHandles::IdentifyWeakHandles(WeakSlotCallback f) {
644   for (NodeIterator it(this); !it.done(); it.Advance()) {
645     if (it.node()->IsWeak() && f(it.node()->location())) {
646       it.node()->MarkPending();
647     }
648   }
649 }
650
651
652 void GlobalHandles::IterateNewSpaceStrongAndDependentRoots(ObjectVisitor* v) {
653   for (int i = 0; i < new_space_nodes_.length(); ++i) {
654     Node* node = new_space_nodes_[i];
655     if (node->IsStrongRetainer() ||
656         (node->IsWeakRetainer() && !node->is_independent() &&
657          !node->is_partially_dependent())) {
658         v->VisitPointer(node->location());
659     }
660   }
661 }
662
663
664 void GlobalHandles::IdentifyNewSpaceWeakIndependentHandles(
665     WeakSlotCallbackWithHeap f) {
666   for (int i = 0; i < new_space_nodes_.length(); ++i) {
667     Node* node = new_space_nodes_[i];
668     DCHECK(node->is_in_new_space_list());
669     if ((node->is_independent() || node->is_partially_dependent()) &&
670         node->IsWeak() && f(isolate_->heap(), node->location())) {
671       node->MarkPending();
672     }
673   }
674 }
675
676
677 void GlobalHandles::IterateNewSpaceWeakIndependentRoots(ObjectVisitor* v) {
678   for (int i = 0; i < new_space_nodes_.length(); ++i) {
679     Node* node = new_space_nodes_[i];
680     DCHECK(node->is_in_new_space_list());
681     if ((node->is_independent() || node->is_partially_dependent()) &&
682         node->IsWeakRetainer()) {
683       if (node->weakness_type() == PHANTOM_WEAK) {
684         *(node->location()) = Smi::FromInt(0);
685       } else if (node->weakness_type() == NORMAL_WEAK) {
686         v->VisitPointer(node->location());
687       } else {
688         DCHECK(node->weakness_type() == PHANTOM_WEAK_2_INTERNAL_FIELDS);
689         // For this case we only need to trace if it's alive: The tracing of
690         // something that is already alive is just to get the pointer updated
691         // to the new location of the object).
692         DCHECK(node->state() != Node::NEAR_DEATH);
693         if (node->state() != Node::PENDING) {
694           v->VisitPointer(node->location());
695         }
696       }
697     }
698   }
699 }
700
701
702 bool GlobalHandles::IterateObjectGroups(ObjectVisitor* v,
703                                         WeakSlotCallbackWithHeap can_skip) {
704   ComputeObjectGroupsAndImplicitReferences();
705   int last = 0;
706   bool any_group_was_visited = false;
707   for (int i = 0; i < object_groups_.length(); i++) {
708     ObjectGroup* entry = object_groups_.at(i);
709     DCHECK(entry != NULL);
710
711     Object*** objects = entry->objects;
712     bool group_should_be_visited = false;
713     for (size_t j = 0; j < entry->length; j++) {
714       Object* object = *objects[j];
715       if (object->IsHeapObject()) {
716         if (!can_skip(isolate_->heap(), &object)) {
717           group_should_be_visited = true;
718           break;
719         }
720       }
721     }
722
723     if (!group_should_be_visited) {
724       object_groups_[last++] = entry;
725       continue;
726     }
727
728     // An object in the group requires visiting, so iterate over all
729     // objects in the group.
730     for (size_t j = 0; j < entry->length; ++j) {
731       Object* object = *objects[j];
732       if (object->IsHeapObject()) {
733         v->VisitPointer(&object);
734         any_group_was_visited = true;
735       }
736     }
737
738     // Once the entire group has been iterated over, set the object
739     // group to NULL so it won't be processed again.
740     delete entry;
741     object_groups_.at(i) = NULL;
742   }
743   object_groups_.Rewind(last);
744   return any_group_was_visited;
745 }
746
747
748 int GlobalHandles::PostScavengeProcessing(
749     const int initial_post_gc_processing_count) {
750   int freed_nodes = 0;
751   for (int i = 0; i < new_space_nodes_.length(); ++i) {
752     Node* node = new_space_nodes_[i];
753     DCHECK(node->is_in_new_space_list());
754     if (!node->IsRetainer()) {
755       // Free nodes do not have weak callbacks. Do not use them to compute
756       // the freed_nodes.
757       continue;
758     }
759     // Skip dependent handles. Their weak callbacks might expect to be
760     // called between two global garbage collection callbacks which
761     // are not called for minor collections.
762     if (!node->is_independent() && !node->is_partially_dependent()) {
763       continue;
764     }
765     node->clear_partially_dependent();
766     if (node->PostGarbageCollectionProcessing(isolate_)) {
767       if (initial_post_gc_processing_count != post_gc_processing_count_) {
768         // Weak callback triggered another GC and another round of
769         // PostGarbageCollection processing.  The current node might
770         // have been deleted in that round, so we need to bail out (or
771         // restart the processing).
772         return freed_nodes;
773       }
774     }
775     if (!node->IsRetainer()) {
776       freed_nodes++;
777     }
778   }
779   return freed_nodes;
780 }
781
782
783 int GlobalHandles::PostMarkSweepProcessing(
784     const int initial_post_gc_processing_count) {
785   int freed_nodes = 0;
786   for (NodeIterator it(this); !it.done(); it.Advance()) {
787     if (!it.node()->IsRetainer()) {
788       // Free nodes do not have weak callbacks. Do not use them to compute
789       // the freed_nodes.
790       continue;
791     }
792     it.node()->clear_partially_dependent();
793     if (it.node()->PostGarbageCollectionProcessing(isolate_)) {
794       if (initial_post_gc_processing_count != post_gc_processing_count_) {
795         // See the comment above.
796         return freed_nodes;
797       }
798     }
799     if (!it.node()->IsRetainer()) {
800       freed_nodes++;
801     }
802   }
803   return freed_nodes;
804 }
805
806
807 void GlobalHandles::UpdateListOfNewSpaceNodes() {
808   int last = 0;
809   for (int i = 0; i < new_space_nodes_.length(); ++i) {
810     Node* node = new_space_nodes_[i];
811     DCHECK(node->is_in_new_space_list());
812     if (node->IsRetainer()) {
813       if (isolate_->heap()->InNewSpace(node->object())) {
814         new_space_nodes_[last++] = node;
815         isolate_->heap()->IncrementNodesCopiedInNewSpace();
816       } else {
817         node->set_in_new_space_list(false);
818         isolate_->heap()->IncrementNodesPromoted();
819       }
820     } else {
821       node->set_in_new_space_list(false);
822       isolate_->heap()->IncrementNodesDiedInNewSpace();
823     }
824   }
825   new_space_nodes_.Rewind(last);
826 }
827
828
829 int GlobalHandles::DispatchPendingPhantomCallbacks() {
830   int freed_nodes = 0;
831   {
832     // The initial pass callbacks must simply clear the nodes.
833     for (auto i = pending_phantom_callbacks_.begin();
834          i != pending_phantom_callbacks_.end(); ++i) {
835       auto callback = i;
836       // Skip callbacks that have already been processed once.
837       if (callback->node() == nullptr) continue;
838       callback->Invoke(isolate());
839       freed_nodes++;
840     }
841   }
842   // The second pass empties the list.
843   while (pending_phantom_callbacks_.length() != 0) {
844     auto callback = pending_phantom_callbacks_.RemoveLast();
845     DCHECK(callback.node() == nullptr);
846     // No second pass callback required.
847     if (callback.callback() == nullptr) continue;
848     // Fire second pass callback.
849     callback.Invoke(isolate());
850   }
851   return freed_nodes;
852 }
853
854
855 void GlobalHandles::PendingPhantomCallback::Invoke(Isolate* isolate) {
856   Data::Callback* callback_addr = nullptr;
857   if (node_ != nullptr) {
858     // Initialize for first pass callback.
859     DCHECK(node_->state() == Node::NEAR_DEATH);
860     callback_addr = &callback_;
861   }
862   Data data(reinterpret_cast<v8::Isolate*>(isolate), parameter_,
863             internal_fields_, callback_addr);
864   Data::Callback callback = callback_;
865   callback_ = nullptr;
866   callback(data);
867   if (node_ != nullptr) {
868     // Transition to second pass state.
869     DCHECK(node_->state() == Node::FREE);
870     node_ = nullptr;
871   }
872 }
873
874
875 int GlobalHandles::PostGarbageCollectionProcessing(GarbageCollector collector) {
876   // Process weak global handle callbacks. This must be done after the
877   // GC is completely done, because the callbacks may invoke arbitrary
878   // API functions.
879   DCHECK(isolate_->heap()->gc_state() == Heap::NOT_IN_GC);
880   const int initial_post_gc_processing_count = ++post_gc_processing_count_;
881   int freed_nodes = 0;
882   freed_nodes += DispatchPendingPhantomCallbacks();
883   if (initial_post_gc_processing_count != post_gc_processing_count_) {
884     // If the callbacks caused a nested GC, then return.  See comment in
885     // PostScavengeProcessing.
886     return freed_nodes;
887   }
888   if (collector == SCAVENGER) {
889     freed_nodes = PostScavengeProcessing(initial_post_gc_processing_count);
890   } else {
891     freed_nodes = PostMarkSweepProcessing(initial_post_gc_processing_count);
892   }
893   if (initial_post_gc_processing_count != post_gc_processing_count_) {
894     // If the callbacks caused a nested GC, then return.  See comment in
895     // PostScavengeProcessing.
896     return freed_nodes;
897   }
898   if (initial_post_gc_processing_count == post_gc_processing_count_) {
899     UpdateListOfNewSpaceNodes();
900   }
901   return freed_nodes;
902 }
903
904
905 void GlobalHandles::IterateStrongRoots(ObjectVisitor* v) {
906   for (NodeIterator it(this); !it.done(); it.Advance()) {
907     if (it.node()->IsStrongRetainer()) {
908       v->VisitPointer(it.node()->location());
909     }
910   }
911 }
912
913
914 void GlobalHandles::IterateAllRoots(ObjectVisitor* v) {
915   for (NodeIterator it(this); !it.done(); it.Advance()) {
916     if (it.node()->IsRetainer()) {
917       v->VisitPointer(it.node()->location());
918     }
919   }
920 }
921
922
923 void GlobalHandles::IterateAllRootsWithClassIds(ObjectVisitor* v) {
924   for (NodeIterator it(this); !it.done(); it.Advance()) {
925     if (it.node()->IsRetainer() && it.node()->has_wrapper_class_id()) {
926       v->VisitEmbedderReference(it.node()->location(),
927                                 it.node()->wrapper_class_id());
928     }
929   }
930 }
931
932
933 void GlobalHandles::IterateAllRootsInNewSpaceWithClassIds(ObjectVisitor* v) {
934   for (int i = 0; i < new_space_nodes_.length(); ++i) {
935     Node* node = new_space_nodes_[i];
936     if (node->IsRetainer() && node->has_wrapper_class_id()) {
937       v->VisitEmbedderReference(node->location(),
938                                 node->wrapper_class_id());
939     }
940   }
941 }
942
943
944 int GlobalHandles::NumberOfWeakHandles() {
945   int count = 0;
946   for (NodeIterator it(this); !it.done(); it.Advance()) {
947     if (it.node()->IsWeakRetainer()) {
948       count++;
949     }
950   }
951   return count;
952 }
953
954
955 int GlobalHandles::NumberOfGlobalObjectWeakHandles() {
956   int count = 0;
957   for (NodeIterator it(this); !it.done(); it.Advance()) {
958     if (it.node()->IsWeakRetainer() &&
959         it.node()->object()->IsJSGlobalObject()) {
960       count++;
961     }
962   }
963   return count;
964 }
965
966
967 void GlobalHandles::RecordStats(HeapStats* stats) {
968   *stats->global_handle_count = 0;
969   *stats->weak_global_handle_count = 0;
970   *stats->pending_global_handle_count = 0;
971   *stats->near_death_global_handle_count = 0;
972   *stats->free_global_handle_count = 0;
973   for (NodeIterator it(this); !it.done(); it.Advance()) {
974     *stats->global_handle_count += 1;
975     if (it.node()->state() == Node::WEAK) {
976       *stats->weak_global_handle_count += 1;
977     } else if (it.node()->state() == Node::PENDING) {
978       *stats->pending_global_handle_count += 1;
979     } else if (it.node()->state() == Node::NEAR_DEATH) {
980       *stats->near_death_global_handle_count += 1;
981     } else if (it.node()->state() == Node::FREE) {
982       *stats->free_global_handle_count += 1;
983     }
984   }
985 }
986
987 #ifdef DEBUG
988
989 void GlobalHandles::PrintStats() {
990   int total = 0;
991   int weak = 0;
992   int pending = 0;
993   int near_death = 0;
994   int destroyed = 0;
995
996   for (NodeIterator it(this); !it.done(); it.Advance()) {
997     total++;
998     if (it.node()->state() == Node::WEAK) weak++;
999     if (it.node()->state() == Node::PENDING) pending++;
1000     if (it.node()->state() == Node::NEAR_DEATH) near_death++;
1001     if (it.node()->state() == Node::FREE) destroyed++;
1002   }
1003
1004   PrintF("Global Handle Statistics:\n");
1005   PrintF("  allocated memory = %" V8_PTR_PREFIX "dB\n", sizeof(Node) * total);
1006   PrintF("  # weak       = %d\n", weak);
1007   PrintF("  # pending    = %d\n", pending);
1008   PrintF("  # near_death = %d\n", near_death);
1009   PrintF("  # free       = %d\n", destroyed);
1010   PrintF("  # total      = %d\n", total);
1011 }
1012
1013
1014 void GlobalHandles::Print() {
1015   PrintF("Global handles:\n");
1016   for (NodeIterator it(this); !it.done(); it.Advance()) {
1017     PrintF("  handle %p to %p%s\n",
1018            reinterpret_cast<void*>(it.node()->location()),
1019            reinterpret_cast<void*>(it.node()->object()),
1020            it.node()->IsWeak() ? " (weak)" : "");
1021   }
1022 }
1023
1024 #endif
1025
1026
1027
1028 void GlobalHandles::AddObjectGroup(Object*** handles,
1029                                    size_t length,
1030                                    v8::RetainedObjectInfo* info) {
1031 #ifdef DEBUG
1032   for (size_t i = 0; i < length; ++i) {
1033     DCHECK(!Node::FromLocation(handles[i])->is_independent());
1034   }
1035 #endif
1036   if (length == 0) {
1037     if (info != NULL) info->Dispose();
1038     return;
1039   }
1040   ObjectGroup* group = new ObjectGroup(length);
1041   for (size_t i = 0; i < length; ++i)
1042     group->objects[i] = handles[i];
1043   group->info = info;
1044   object_groups_.Add(group);
1045 }
1046
1047
1048 void GlobalHandles::SetObjectGroupId(Object** handle,
1049                                      UniqueId id) {
1050   object_group_connections_.Add(ObjectGroupConnection(id, handle));
1051 }
1052
1053
1054 void GlobalHandles::SetRetainedObjectInfo(UniqueId id,
1055                                           RetainedObjectInfo* info) {
1056   retainer_infos_.Add(ObjectGroupRetainerInfo(id, info));
1057 }
1058
1059
1060 void GlobalHandles::SetReferenceFromGroup(UniqueId id, Object** child) {
1061   DCHECK(!Node::FromLocation(child)->is_independent());
1062   implicit_ref_connections_.Add(ObjectGroupConnection(id, child));
1063 }
1064
1065
1066 void GlobalHandles::SetReference(HeapObject** parent, Object** child) {
1067   DCHECK(!Node::FromLocation(child)->is_independent());
1068   ImplicitRefGroup* group = new ImplicitRefGroup(parent, 1);
1069   group->children[0] = child;
1070   implicit_ref_groups_.Add(group);
1071 }
1072
1073
1074 void GlobalHandles::RemoveObjectGroups() {
1075   for (int i = 0; i < object_groups_.length(); i++)
1076     delete object_groups_.at(i);
1077   object_groups_.Clear();
1078   for (int i = 0; i < retainer_infos_.length(); ++i)
1079     retainer_infos_[i].info->Dispose();
1080   retainer_infos_.Clear();
1081   object_group_connections_.Clear();
1082   object_group_connections_.Initialize(kObjectGroupConnectionsCapacity);
1083 }
1084
1085
1086 void GlobalHandles::RemoveImplicitRefGroups() {
1087   for (int i = 0; i < implicit_ref_groups_.length(); i++) {
1088     delete implicit_ref_groups_.at(i);
1089   }
1090   implicit_ref_groups_.Clear();
1091   implicit_ref_connections_.Clear();
1092 }
1093
1094
1095 void GlobalHandles::TearDown() {
1096   // TODO(1428): invoke weak callbacks.
1097 }
1098
1099
1100 void GlobalHandles::ComputeObjectGroupsAndImplicitReferences() {
1101   if (object_group_connections_.length() == 0) {
1102     for (int i = 0; i < retainer_infos_.length(); ++i)
1103       retainer_infos_[i].info->Dispose();
1104     retainer_infos_.Clear();
1105     implicit_ref_connections_.Clear();
1106     return;
1107   }
1108
1109   object_group_connections_.Sort();
1110   retainer_infos_.Sort();
1111   implicit_ref_connections_.Sort();
1112
1113   int info_index = 0;  // For iterating retainer_infos_.
1114   UniqueId current_group_id(0);
1115   int current_group_start = 0;
1116
1117   int current_implicit_refs_start = 0;
1118   int current_implicit_refs_end = 0;
1119   for (int i = 0; i <= object_group_connections_.length(); ++i) {
1120     if (i == 0)
1121       current_group_id = object_group_connections_[i].id;
1122     if (i == object_group_connections_.length() ||
1123         current_group_id != object_group_connections_[i].id) {
1124       // Group detected: objects in indices [current_group_start, i[.
1125
1126       // Find out which implicit references are related to this group. (We want
1127       // to ignore object groups which only have 1 object, but that object is
1128       // needed as a representative object for the implicit refrerence group.)
1129       while (current_implicit_refs_start < implicit_ref_connections_.length() &&
1130              implicit_ref_connections_[current_implicit_refs_start].id <
1131                  current_group_id)
1132         ++current_implicit_refs_start;
1133       current_implicit_refs_end = current_implicit_refs_start;
1134       while (current_implicit_refs_end < implicit_ref_connections_.length() &&
1135              implicit_ref_connections_[current_implicit_refs_end].id ==
1136                  current_group_id)
1137         ++current_implicit_refs_end;
1138
1139       if (current_implicit_refs_end > current_implicit_refs_start) {
1140         // Find a representative object for the implicit references.
1141         HeapObject** representative = NULL;
1142         for (int j = current_group_start; j < i; ++j) {
1143           Object** object = object_group_connections_[j].object;
1144           if ((*object)->IsHeapObject()) {
1145             representative = reinterpret_cast<HeapObject**>(object);
1146             break;
1147           }
1148         }
1149         if (representative) {
1150           ImplicitRefGroup* group = new ImplicitRefGroup(
1151               representative,
1152               current_implicit_refs_end - current_implicit_refs_start);
1153           for (int j = current_implicit_refs_start;
1154                j < current_implicit_refs_end;
1155                ++j) {
1156             group->children[j - current_implicit_refs_start] =
1157                 implicit_ref_connections_[j].object;
1158           }
1159           implicit_ref_groups_.Add(group);
1160         }
1161         current_implicit_refs_start = current_implicit_refs_end;
1162       }
1163
1164       // Find a RetainedObjectInfo for the group.
1165       RetainedObjectInfo* info = NULL;
1166       while (info_index < retainer_infos_.length() &&
1167              retainer_infos_[info_index].id < current_group_id) {
1168         retainer_infos_[info_index].info->Dispose();
1169         ++info_index;
1170       }
1171       if (info_index < retainer_infos_.length() &&
1172           retainer_infos_[info_index].id == current_group_id) {
1173         // This object group has an associated ObjectGroupRetainerInfo.
1174         info = retainer_infos_[info_index].info;
1175         ++info_index;
1176       }
1177
1178       // Ignore groups which only contain one object.
1179       if (i > current_group_start + 1) {
1180         ObjectGroup* group = new ObjectGroup(i - current_group_start);
1181         for (int j = current_group_start; j < i; ++j) {
1182           group->objects[j - current_group_start] =
1183               object_group_connections_[j].object;
1184         }
1185         group->info = info;
1186         object_groups_.Add(group);
1187       } else if (info) {
1188         info->Dispose();
1189       }
1190
1191       if (i < object_group_connections_.length()) {
1192         current_group_id = object_group_connections_[i].id;
1193         current_group_start = i;
1194       }
1195     }
1196   }
1197   object_group_connections_.Clear();
1198   object_group_connections_.Initialize(kObjectGroupConnectionsCapacity);
1199   retainer_infos_.Clear();
1200   implicit_ref_connections_.Clear();
1201 }
1202
1203
1204 EternalHandles::EternalHandles() : size_(0) {
1205   for (unsigned i = 0; i < arraysize(singleton_handles_); i++) {
1206     singleton_handles_[i] = kInvalidIndex;
1207   }
1208 }
1209
1210
1211 EternalHandles::~EternalHandles() {
1212   for (int i = 0; i < blocks_.length(); i++) delete[] blocks_[i];
1213 }
1214
1215
1216 void EternalHandles::IterateAllRoots(ObjectVisitor* visitor) {
1217   int limit = size_;
1218   for (int i = 0; i < blocks_.length(); i++) {
1219     DCHECK(limit > 0);
1220     Object** block = blocks_[i];
1221     visitor->VisitPointers(block, block + Min(limit, kSize));
1222     limit -= kSize;
1223   }
1224 }
1225
1226
1227 void EternalHandles::IterateNewSpaceRoots(ObjectVisitor* visitor) {
1228   for (int i = 0; i < new_space_indices_.length(); i++) {
1229     visitor->VisitPointer(GetLocation(new_space_indices_[i]));
1230   }
1231 }
1232
1233
1234 void EternalHandles::PostGarbageCollectionProcessing(Heap* heap) {
1235   int last = 0;
1236   for (int i = 0; i < new_space_indices_.length(); i++) {
1237     int index = new_space_indices_[i];
1238     if (heap->InNewSpace(*GetLocation(index))) {
1239       new_space_indices_[last++] = index;
1240     }
1241   }
1242   new_space_indices_.Rewind(last);
1243 }
1244
1245
1246 void EternalHandles::Create(Isolate* isolate, Object* object, int* index) {
1247   DCHECK_EQ(kInvalidIndex, *index);
1248   if (object == NULL) return;
1249   DCHECK_NE(isolate->heap()->the_hole_value(), object);
1250   int block = size_ >> kShift;
1251   int offset = size_ & kMask;
1252   // need to resize
1253   if (offset == 0) {
1254     Object** next_block = new Object*[kSize];
1255     Object* the_hole = isolate->heap()->the_hole_value();
1256     MemsetPointer(next_block, the_hole, kSize);
1257     blocks_.Add(next_block);
1258   }
1259   DCHECK_EQ(isolate->heap()->the_hole_value(), blocks_[block][offset]);
1260   blocks_[block][offset] = object;
1261   if (isolate->heap()->InNewSpace(object)) {
1262     new_space_indices_.Add(size_);
1263   }
1264   *index = size_++;
1265 }
1266
1267
1268 } }  // namespace v8::internal