63916a268fb237d17afcc1c5ef7eac3b24863b88
[platform/upstream/v8.git] / src / heap-snapshot-generator.cc
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #include "src/heap-snapshot-generator-inl.h"
8
9 #include "src/allocation-tracker.h"
10 #include "src/code-stubs.h"
11 #include "src/conversions.h"
12 #include "src/debug.h"
13 #include "src/heap-profiler.h"
14 #include "src/types.h"
15
16 namespace v8 {
17 namespace internal {
18
19
20 HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to)
21     : bit_field_(TypeField::encode(type) | FromIndexField::encode(from)),
22       to_index_(to),
23       name_(name) {
24   DCHECK(type == kContextVariable
25       || type == kProperty
26       || type == kInternal
27       || type == kShortcut
28       || type == kWeak);
29 }
30
31
32 HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to)
33     : bit_field_(TypeField::encode(type) | FromIndexField::encode(from)),
34       to_index_(to),
35       index_(index) {
36   DCHECK(type == kElement || type == kHidden);
37 }
38
39
40 void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
41   to_entry_ = &snapshot->entries()[to_index_];
42 }
43
44
45 const int HeapEntry::kNoEntry = -1;
46
47 HeapEntry::HeapEntry(HeapSnapshot* snapshot,
48                      Type type,
49                      const char* name,
50                      SnapshotObjectId id,
51                      size_t self_size,
52                      unsigned trace_node_id)
53     : type_(type),
54       children_count_(0),
55       children_index_(-1),
56       self_size_(self_size),
57       snapshot_(snapshot),
58       name_(name),
59       id_(id),
60       trace_node_id_(trace_node_id) { }
61
62
63 void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
64                                   const char* name,
65                                   HeapEntry* entry) {
66   HeapGraphEdge edge(type, name, this->index(), entry->index());
67   snapshot_->edges().Add(edge);
68   ++children_count_;
69 }
70
71
72 void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
73                                     int index,
74                                     HeapEntry* entry) {
75   HeapGraphEdge edge(type, index, this->index(), entry->index());
76   snapshot_->edges().Add(edge);
77   ++children_count_;
78 }
79
80
81 void HeapEntry::Print(
82     const char* prefix, const char* edge_name, int max_depth, int indent) {
83   STATIC_ASSERT(sizeof(unsigned) == sizeof(id()));
84   base::OS::Print("%6" V8PRIuPTR " @%6u %*c %s%s: ", self_size(), id(), indent,
85                   ' ', prefix, edge_name);
86   if (type() != kString) {
87     base::OS::Print("%s %.40s\n", TypeAsString(), name_);
88   } else {
89     base::OS::Print("\"");
90     const char* c = name_;
91     while (*c && (c - name_) <= 40) {
92       if (*c != '\n')
93         base::OS::Print("%c", *c);
94       else
95         base::OS::Print("\\n");
96       ++c;
97     }
98     base::OS::Print("\"\n");
99   }
100   if (--max_depth == 0) return;
101   Vector<HeapGraphEdge*> ch = children();
102   for (int i = 0; i < ch.length(); ++i) {
103     HeapGraphEdge& edge = *ch[i];
104     const char* edge_prefix = "";
105     EmbeddedVector<char, 64> index;
106     const char* edge_name = index.start();
107     switch (edge.type()) {
108       case HeapGraphEdge::kContextVariable:
109         edge_prefix = "#";
110         edge_name = edge.name();
111         break;
112       case HeapGraphEdge::kElement:
113         SNPrintF(index, "%d", edge.index());
114         break;
115       case HeapGraphEdge::kInternal:
116         edge_prefix = "$";
117         edge_name = edge.name();
118         break;
119       case HeapGraphEdge::kProperty:
120         edge_name = edge.name();
121         break;
122       case HeapGraphEdge::kHidden:
123         edge_prefix = "$";
124         SNPrintF(index, "%d", edge.index());
125         break;
126       case HeapGraphEdge::kShortcut:
127         edge_prefix = "^";
128         edge_name = edge.name();
129         break;
130       case HeapGraphEdge::kWeak:
131         edge_prefix = "w";
132         edge_name = edge.name();
133         break;
134       default:
135         SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
136     }
137     edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
138   }
139 }
140
141
142 const char* HeapEntry::TypeAsString() {
143   switch (type()) {
144     case kHidden: return "/hidden/";
145     case kObject: return "/object/";
146     case kClosure: return "/closure/";
147     case kString: return "/string/";
148     case kCode: return "/code/";
149     case kArray: return "/array/";
150     case kRegExp: return "/regexp/";
151     case kHeapNumber: return "/number/";
152     case kNative: return "/native/";
153     case kSynthetic: return "/synthetic/";
154     case kConsString: return "/concatenated string/";
155     case kSlicedString: return "/sliced string/";
156     case kSymbol: return "/symbol/";
157     case kSimdValue: return "/simd/";
158     default: return "???";
159   }
160 }
161
162
163 // It is very important to keep objects that form a heap snapshot
164 // as small as possible.
165 namespace {  // Avoid littering the global namespace.
166
167 template <size_t ptr_size> struct SnapshotSizeConstants;
168
169 template <> struct SnapshotSizeConstants<4> {
170   static const int kExpectedHeapGraphEdgeSize = 12;
171   static const int kExpectedHeapEntrySize = 28;
172 };
173
174 template <> struct SnapshotSizeConstants<8> {
175   static const int kExpectedHeapGraphEdgeSize = 24;
176   static const int kExpectedHeapEntrySize = 40;
177 };
178
179 }  // namespace
180
181
182 HeapSnapshot::HeapSnapshot(HeapProfiler* profiler)
183     : profiler_(profiler),
184       root_index_(HeapEntry::kNoEntry),
185       gc_roots_index_(HeapEntry::kNoEntry),
186       max_snapshot_js_object_id_(0) {
187   STATIC_ASSERT(
188       sizeof(HeapGraphEdge) ==
189       SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
190   STATIC_ASSERT(
191       sizeof(HeapEntry) ==
192       SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
193   USE(SnapshotSizeConstants<4>::kExpectedHeapGraphEdgeSize);
194   USE(SnapshotSizeConstants<4>::kExpectedHeapEntrySize);
195   USE(SnapshotSizeConstants<8>::kExpectedHeapGraphEdgeSize);
196   USE(SnapshotSizeConstants<8>::kExpectedHeapEntrySize);
197   for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
198     gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
199   }
200 }
201
202
203 void HeapSnapshot::Delete() {
204   profiler_->RemoveSnapshot(this);
205   delete this;
206 }
207
208
209 void HeapSnapshot::RememberLastJSObjectId() {
210   max_snapshot_js_object_id_ = profiler_->heap_object_map()->last_assigned_id();
211 }
212
213
214 void HeapSnapshot::AddSyntheticRootEntries() {
215   AddRootEntry();
216   AddGcRootsEntry();
217   SnapshotObjectId id = HeapObjectsMap::kGcRootsFirstSubrootId;
218   for (int tag = 0; tag < VisitorSynchronization::kNumberOfSyncTags; tag++) {
219     AddGcSubrootEntry(tag, id);
220     id += HeapObjectsMap::kObjectIdStep;
221   }
222   DCHECK(HeapObjectsMap::kFirstAvailableObjectId == id);
223 }
224
225
226 HeapEntry* HeapSnapshot::AddRootEntry() {
227   DCHECK(root_index_ == HeapEntry::kNoEntry);
228   DCHECK(entries_.is_empty());  // Root entry must be the first one.
229   HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
230                               "",
231                               HeapObjectsMap::kInternalRootObjectId,
232                               0,
233                               0);
234   root_index_ = entry->index();
235   DCHECK(root_index_ == 0);
236   return entry;
237 }
238
239
240 HeapEntry* HeapSnapshot::AddGcRootsEntry() {
241   DCHECK(gc_roots_index_ == HeapEntry::kNoEntry);
242   HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
243                               "(GC roots)",
244                               HeapObjectsMap::kGcRootsObjectId,
245                               0,
246                               0);
247   gc_roots_index_ = entry->index();
248   return entry;
249 }
250
251
252 HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag, SnapshotObjectId id) {
253   DCHECK(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
254   DCHECK(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags);
255   HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
256                               VisitorSynchronization::kTagNames[tag], id, 0, 0);
257   gc_subroot_indexes_[tag] = entry->index();
258   return entry;
259 }
260
261
262 HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
263                                   const char* name,
264                                   SnapshotObjectId id,
265                                   size_t size,
266                                   unsigned trace_node_id) {
267   HeapEntry entry(this, type, name, id, size, trace_node_id);
268   entries_.Add(entry);
269   return &entries_.last();
270 }
271
272
273 void HeapSnapshot::FillChildren() {
274   DCHECK(children().is_empty());
275   children().Allocate(edges().length());
276   int children_index = 0;
277   for (int i = 0; i < entries().length(); ++i) {
278     HeapEntry* entry = &entries()[i];
279     children_index = entry->set_children_index(children_index);
280   }
281   DCHECK(edges().length() == children_index);
282   for (int i = 0; i < edges().length(); ++i) {
283     HeapGraphEdge* edge = &edges()[i];
284     edge->ReplaceToIndexWithEntry(this);
285     edge->from()->add_child(edge);
286   }
287 }
288
289
290 class FindEntryById {
291  public:
292   explicit FindEntryById(SnapshotObjectId id) : id_(id) { }
293   int operator()(HeapEntry* const* entry) {
294     if ((*entry)->id() == id_) return 0;
295     return (*entry)->id() < id_ ? -1 : 1;
296   }
297  private:
298   SnapshotObjectId id_;
299 };
300
301
302 HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
303   List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
304   // Perform a binary search by id.
305   int index = SortedListBSearch(*entries_by_id, FindEntryById(id));
306   if (index == -1)
307     return NULL;
308   return entries_by_id->at(index);
309 }
310
311
312 template<class T>
313 static int SortByIds(const T* entry1_ptr,
314                      const T* entry2_ptr) {
315   if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
316   return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
317 }
318
319
320 List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() {
321   if (sorted_entries_.is_empty()) {
322     sorted_entries_.Allocate(entries_.length());
323     for (int i = 0; i < entries_.length(); ++i) {
324       sorted_entries_[i] = &entries_[i];
325     }
326     sorted_entries_.Sort<int (*)(HeapEntry* const*, HeapEntry* const*)>(
327         SortByIds);
328   }
329   return &sorted_entries_;
330 }
331
332
333 void HeapSnapshot::Print(int max_depth) {
334   root()->Print("", "", max_depth, 0);
335 }
336
337
338 size_t HeapSnapshot::RawSnapshotSize() const {
339   return
340       sizeof(*this) +
341       GetMemoryUsedByList(entries_) +
342       GetMemoryUsedByList(edges_) +
343       GetMemoryUsedByList(children_) +
344       GetMemoryUsedByList(sorted_entries_);
345 }
346
347
348 // We split IDs on evens for embedder objects (see
349 // HeapObjectsMap::GenerateId) and odds for native objects.
350 const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
351 const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
352     HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
353 const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
354     HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
355 const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
356     HeapObjectsMap::kGcRootsFirstSubrootId +
357     VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
358
359
360 static bool AddressesMatch(void* key1, void* key2) {
361   return key1 == key2;
362 }
363
364
365 HeapObjectsMap::HeapObjectsMap(Heap* heap)
366     : next_id_(kFirstAvailableObjectId),
367       entries_map_(AddressesMatch),
368       heap_(heap) {
369   // This dummy element solves a problem with entries_map_.
370   // When we do lookup in HashMap we see no difference between two cases:
371   // it has an entry with NULL as the value or it has created
372   // a new entry on the fly with NULL as the default value.
373   // With such dummy element we have a guaranty that all entries_map_ entries
374   // will have the value field grater than 0.
375   // This fact is using in MoveObject method.
376   entries_.Add(EntryInfo(0, NULL, 0));
377 }
378
379
380 bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) {
381   DCHECK(to != NULL);
382   DCHECK(from != NULL);
383   if (from == to) return false;
384   void* from_value = entries_map_.Remove(from, ComputePointerHash(from));
385   if (from_value == NULL) {
386     // It may occur that some untracked object moves to an address X and there
387     // is a tracked object at that address. In this case we should remove the
388     // entry as we know that the object has died.
389     void* to_value = entries_map_.Remove(to, ComputePointerHash(to));
390     if (to_value != NULL) {
391       int to_entry_info_index =
392           static_cast<int>(reinterpret_cast<intptr_t>(to_value));
393       entries_.at(to_entry_info_index).addr = NULL;
394     }
395   } else {
396     HashMap::Entry* to_entry =
397         entries_map_.LookupOrInsert(to, ComputePointerHash(to));
398     if (to_entry->value != NULL) {
399       // We found the existing entry with to address for an old object.
400       // Without this operation we will have two EntryInfo's with the same
401       // value in addr field. It is bad because later at RemoveDeadEntries
402       // one of this entry will be removed with the corresponding entries_map_
403       // entry.
404       int to_entry_info_index =
405           static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
406       entries_.at(to_entry_info_index).addr = NULL;
407     }
408     int from_entry_info_index =
409         static_cast<int>(reinterpret_cast<intptr_t>(from_value));
410     entries_.at(from_entry_info_index).addr = to;
411     // Size of an object can change during its life, so to keep information
412     // about the object in entries_ consistent, we have to adjust size when the
413     // object is migrated.
414     if (FLAG_heap_profiler_trace_objects) {
415       PrintF("Move object from %p to %p old size %6d new size %6d\n",
416              from,
417              to,
418              entries_.at(from_entry_info_index).size,
419              object_size);
420     }
421     entries_.at(from_entry_info_index).size = object_size;
422     to_entry->value = from_value;
423   }
424   return from_value != NULL;
425 }
426
427
428 void HeapObjectsMap::UpdateObjectSize(Address addr, int size) {
429   FindOrAddEntry(addr, size, false);
430 }
431
432
433 SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
434   HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr));
435   if (entry == NULL) return 0;
436   int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
437   EntryInfo& entry_info = entries_.at(entry_index);
438   DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
439   return entry_info.id;
440 }
441
442
443 SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
444                                                 unsigned int size,
445                                                 bool accessed) {
446   DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
447   HashMap::Entry* entry =
448       entries_map_.LookupOrInsert(addr, ComputePointerHash(addr));
449   if (entry->value != NULL) {
450     int entry_index =
451         static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
452     EntryInfo& entry_info = entries_.at(entry_index);
453     entry_info.accessed = accessed;
454     if (FLAG_heap_profiler_trace_objects) {
455       PrintF("Update object size : %p with old size %d and new size %d\n",
456              addr,
457              entry_info.size,
458              size);
459     }
460     entry_info.size = size;
461     return entry_info.id;
462   }
463   entry->value = reinterpret_cast<void*>(entries_.length());
464   SnapshotObjectId id = next_id_;
465   next_id_ += kObjectIdStep;
466   entries_.Add(EntryInfo(id, addr, size, accessed));
467   DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
468   return id;
469 }
470
471
472 void HeapObjectsMap::StopHeapObjectsTracking() {
473   time_intervals_.Clear();
474 }
475
476
477 void HeapObjectsMap::UpdateHeapObjectsMap() {
478   if (FLAG_heap_profiler_trace_objects) {
479     PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
480            entries_map_.occupancy());
481   }
482   heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
483                           "HeapObjectsMap::UpdateHeapObjectsMap");
484   HeapIterator iterator(heap_);
485   for (HeapObject* obj = iterator.next();
486        obj != NULL;
487        obj = iterator.next()) {
488     FindOrAddEntry(obj->address(), obj->Size());
489     if (FLAG_heap_profiler_trace_objects) {
490       PrintF("Update object      : %p %6d. Next address is %p\n",
491              obj->address(),
492              obj->Size(),
493              obj->address() + obj->Size());
494     }
495   }
496   RemoveDeadEntries();
497   if (FLAG_heap_profiler_trace_objects) {
498     PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
499            entries_map_.occupancy());
500   }
501 }
502
503
504 namespace {
505
506
507 struct HeapObjectInfo {
508   HeapObjectInfo(HeapObject* obj, int expected_size)
509     : obj(obj),
510       expected_size(expected_size) {
511   }
512
513   HeapObject* obj;
514   int expected_size;
515
516   bool IsValid() const { return expected_size == obj->Size(); }
517
518   void Print() const {
519     if (expected_size == 0) {
520       PrintF("Untracked object   : %p %6d. Next address is %p\n",
521              obj->address(),
522              obj->Size(),
523              obj->address() + obj->Size());
524     } else if (obj->Size() != expected_size) {
525       PrintF("Wrong size %6d: %p %6d. Next address is %p\n",
526              expected_size,
527              obj->address(),
528              obj->Size(),
529              obj->address() + obj->Size());
530     } else {
531       PrintF("Good object      : %p %6d. Next address is %p\n",
532              obj->address(),
533              expected_size,
534              obj->address() + obj->Size());
535     }
536   }
537 };
538
539
540 static int comparator(const HeapObjectInfo* a, const HeapObjectInfo* b) {
541   if (a->obj < b->obj) return -1;
542   if (a->obj > b->obj) return 1;
543   return 0;
544 }
545
546
547 }  // namespace
548
549
550 int HeapObjectsMap::FindUntrackedObjects() {
551   List<HeapObjectInfo> heap_objects(1000);
552
553   HeapIterator iterator(heap_);
554   int untracked = 0;
555   for (HeapObject* obj = iterator.next();
556        obj != NULL;
557        obj = iterator.next()) {
558     HashMap::Entry* entry =
559         entries_map_.Lookup(obj->address(), ComputePointerHash(obj->address()));
560     if (entry == NULL) {
561       ++untracked;
562       if (FLAG_heap_profiler_trace_objects) {
563         heap_objects.Add(HeapObjectInfo(obj, 0));
564       }
565     } else {
566       int entry_index = static_cast<int>(
567           reinterpret_cast<intptr_t>(entry->value));
568       EntryInfo& entry_info = entries_.at(entry_index);
569       if (FLAG_heap_profiler_trace_objects) {
570         heap_objects.Add(HeapObjectInfo(obj,
571                          static_cast<int>(entry_info.size)));
572         if (obj->Size() != static_cast<int>(entry_info.size))
573           ++untracked;
574       } else {
575         CHECK_EQ(obj->Size(), static_cast<int>(entry_info.size));
576       }
577     }
578   }
579   if (FLAG_heap_profiler_trace_objects) {
580     PrintF("\nBegin HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n",
581            entries_map_.occupancy());
582     heap_objects.Sort(comparator);
583     int last_printed_object = -1;
584     bool print_next_object = false;
585     for (int i = 0; i < heap_objects.length(); ++i) {
586       const HeapObjectInfo& object_info = heap_objects[i];
587       if (!object_info.IsValid()) {
588         ++untracked;
589         if (last_printed_object != i - 1) {
590           if (i > 0) {
591             PrintF("%d objects were skipped\n", i - 1 - last_printed_object);
592             heap_objects[i - 1].Print();
593           }
594         }
595         object_info.Print();
596         last_printed_object = i;
597         print_next_object = true;
598       } else if (print_next_object) {
599         object_info.Print();
600         print_next_object = false;
601         last_printed_object = i;
602       }
603     }
604     if (last_printed_object < heap_objects.length() - 1) {
605       PrintF("Last %d objects were skipped\n",
606              heap_objects.length() - 1 - last_printed_object);
607     }
608     PrintF("End HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n\n",
609            entries_map_.occupancy());
610   }
611   return untracked;
612 }
613
614
615 SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream,
616                                                       int64_t* timestamp_us) {
617   UpdateHeapObjectsMap();
618   time_intervals_.Add(TimeInterval(next_id_));
619   int prefered_chunk_size = stream->GetChunkSize();
620   List<v8::HeapStatsUpdate> stats_buffer;
621   DCHECK(!entries_.is_empty());
622   EntryInfo* entry_info = &entries_.first();
623   EntryInfo* end_entry_info = &entries_.last() + 1;
624   for (int time_interval_index = 0;
625        time_interval_index < time_intervals_.length();
626        ++time_interval_index) {
627     TimeInterval& time_interval = time_intervals_[time_interval_index];
628     SnapshotObjectId time_interval_id = time_interval.id;
629     uint32_t entries_size = 0;
630     EntryInfo* start_entry_info = entry_info;
631     while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
632       entries_size += entry_info->size;
633       ++entry_info;
634     }
635     uint32_t entries_count =
636         static_cast<uint32_t>(entry_info - start_entry_info);
637     if (time_interval.count != entries_count ||
638         time_interval.size != entries_size) {
639       stats_buffer.Add(v8::HeapStatsUpdate(
640           time_interval_index,
641           time_interval.count = entries_count,
642           time_interval.size = entries_size));
643       if (stats_buffer.length() >= prefered_chunk_size) {
644         OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
645             &stats_buffer.first(), stats_buffer.length());
646         if (result == OutputStream::kAbort) return last_assigned_id();
647         stats_buffer.Clear();
648       }
649     }
650   }
651   DCHECK(entry_info == end_entry_info);
652   if (!stats_buffer.is_empty()) {
653     OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
654         &stats_buffer.first(), stats_buffer.length());
655     if (result == OutputStream::kAbort) return last_assigned_id();
656   }
657   stream->EndOfStream();
658   if (timestamp_us) {
659     *timestamp_us = (time_intervals_.last().timestamp -
660                      time_intervals_[0].timestamp).InMicroseconds();
661   }
662   return last_assigned_id();
663 }
664
665
666 void HeapObjectsMap::RemoveDeadEntries() {
667   DCHECK(entries_.length() > 0 &&
668          entries_.at(0).id == 0 &&
669          entries_.at(0).addr == NULL);
670   int first_free_entry = 1;
671   for (int i = 1; i < entries_.length(); ++i) {
672     EntryInfo& entry_info = entries_.at(i);
673     if (entry_info.accessed) {
674       if (first_free_entry != i) {
675         entries_.at(first_free_entry) = entry_info;
676       }
677       entries_.at(first_free_entry).accessed = false;
678       HashMap::Entry* entry = entries_map_.Lookup(
679           entry_info.addr, ComputePointerHash(entry_info.addr));
680       DCHECK(entry);
681       entry->value = reinterpret_cast<void*>(first_free_entry);
682       ++first_free_entry;
683     } else {
684       if (entry_info.addr) {
685         entries_map_.Remove(entry_info.addr,
686                             ComputePointerHash(entry_info.addr));
687       }
688     }
689   }
690   entries_.Rewind(first_free_entry);
691   DCHECK(static_cast<uint32_t>(entries_.length()) - 1 ==
692          entries_map_.occupancy());
693 }
694
695
696 SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
697   SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
698   const char* label = info->GetLabel();
699   id ^= StringHasher::HashSequentialString(label,
700                                            static_cast<int>(strlen(label)),
701                                            heap_->HashSeed());
702   intptr_t element_count = info->GetElementCount();
703   if (element_count != -1)
704     id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
705                              v8::internal::kZeroHashSeed);
706   return id << 1;
707 }
708
709
710 size_t HeapObjectsMap::GetUsedMemorySize() const {
711   return
712       sizeof(*this) +
713       sizeof(HashMap::Entry) * entries_map_.capacity() +
714       GetMemoryUsedByList(entries_) +
715       GetMemoryUsedByList(time_intervals_);
716 }
717
718
719 HeapEntriesMap::HeapEntriesMap()
720     : entries_(HashMap::PointersMatch) {
721 }
722
723
724 int HeapEntriesMap::Map(HeapThing thing) {
725   HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing));
726   if (cache_entry == NULL) return HeapEntry::kNoEntry;
727   return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
728 }
729
730
731 void HeapEntriesMap::Pair(HeapThing thing, int entry) {
732   HashMap::Entry* cache_entry = entries_.LookupOrInsert(thing, Hash(thing));
733   DCHECK(cache_entry->value == NULL);
734   cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
735 }
736
737
738 HeapObjectsSet::HeapObjectsSet()
739     : entries_(HashMap::PointersMatch) {
740 }
741
742
743 void HeapObjectsSet::Clear() {
744   entries_.Clear();
745 }
746
747
748 bool HeapObjectsSet::Contains(Object* obj) {
749   if (!obj->IsHeapObject()) return false;
750   HeapObject* object = HeapObject::cast(obj);
751   return entries_.Lookup(object, HeapEntriesMap::Hash(object)) != NULL;
752 }
753
754
755 void HeapObjectsSet::Insert(Object* obj) {
756   if (!obj->IsHeapObject()) return;
757   HeapObject* object = HeapObject::cast(obj);
758   entries_.LookupOrInsert(object, HeapEntriesMap::Hash(object));
759 }
760
761
762 const char* HeapObjectsSet::GetTag(Object* obj) {
763   HeapObject* object = HeapObject::cast(obj);
764   HashMap::Entry* cache_entry =
765       entries_.Lookup(object, HeapEntriesMap::Hash(object));
766   return cache_entry != NULL
767       ? reinterpret_cast<const char*>(cache_entry->value)
768       : NULL;
769 }
770
771
772 void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
773   if (!obj->IsHeapObject()) return;
774   HeapObject* object = HeapObject::cast(obj);
775   HashMap::Entry* cache_entry =
776       entries_.LookupOrInsert(object, HeapEntriesMap::Hash(object));
777   cache_entry->value = const_cast<char*>(tag);
778 }
779
780
781 V8HeapExplorer::V8HeapExplorer(
782     HeapSnapshot* snapshot,
783     SnapshottingProgressReportingInterface* progress,
784     v8::HeapProfiler::ObjectNameResolver* resolver)
785     : heap_(snapshot->profiler()->heap_object_map()->heap()),
786       snapshot_(snapshot),
787       names_(snapshot_->profiler()->names()),
788       heap_object_map_(snapshot_->profiler()->heap_object_map()),
789       progress_(progress),
790       filler_(NULL),
791       global_object_name_resolver_(resolver) {
792 }
793
794
795 V8HeapExplorer::~V8HeapExplorer() {
796 }
797
798
799 HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) {
800   return AddEntry(reinterpret_cast<HeapObject*>(ptr));
801 }
802
803
804 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
805   if (object->IsJSFunction()) {
806     JSFunction* func = JSFunction::cast(object);
807     SharedFunctionInfo* shared = func->shared();
808     const char* name = shared->bound() ? "native_bind" :
809         names_->GetName(String::cast(shared->name()));
810     return AddEntry(object, HeapEntry::kClosure, name);
811   } else if (object->IsJSRegExp()) {
812     JSRegExp* re = JSRegExp::cast(object);
813     return AddEntry(object,
814                     HeapEntry::kRegExp,
815                     names_->GetName(re->Pattern()));
816   } else if (object->IsJSObject()) {
817     const char* name = names_->GetName(
818         GetConstructorName(JSObject::cast(object)));
819     if (object->IsJSGlobalObject()) {
820       const char* tag = objects_tags_.GetTag(object);
821       if (tag != NULL) {
822         name = names_->GetFormatted("%s / %s", name, tag);
823       }
824     }
825     return AddEntry(object, HeapEntry::kObject, name);
826   } else if (object->IsString()) {
827     String* string = String::cast(object);
828     if (string->IsConsString())
829       return AddEntry(object,
830                       HeapEntry::kConsString,
831                       "(concatenated string)");
832     if (string->IsSlicedString())
833       return AddEntry(object,
834                       HeapEntry::kSlicedString,
835                       "(sliced string)");
836     return AddEntry(object,
837                     HeapEntry::kString,
838                     names_->GetName(String::cast(object)));
839   } else if (object->IsSymbol()) {
840     return AddEntry(object, HeapEntry::kSymbol, "symbol");
841   } else if (object->IsCode()) {
842     return AddEntry(object, HeapEntry::kCode, "");
843   } else if (object->IsSharedFunctionInfo()) {
844     String* name = String::cast(SharedFunctionInfo::cast(object)->name());
845     return AddEntry(object,
846                     HeapEntry::kCode,
847                     names_->GetName(name));
848   } else if (object->IsScript()) {
849     Object* name = Script::cast(object)->name();
850     return AddEntry(object,
851                     HeapEntry::kCode,
852                     name->IsString()
853                         ? names_->GetName(String::cast(name))
854                         : "");
855   } else if (object->IsNativeContext()) {
856     return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
857   } else if (object->IsContext()) {
858     return AddEntry(object, HeapEntry::kObject, "system / Context");
859   } else if (object->IsFixedArray() ||
860              object->IsFixedDoubleArray() ||
861              object->IsByteArray() ||
862              object->IsExternalArray()) {
863     return AddEntry(object, HeapEntry::kArray, "");
864   } else if (object->IsHeapNumber()) {
865     return AddEntry(object, HeapEntry::kHeapNumber, "number");
866   } else if (object->IsFloat32x4()) {
867     return AddEntry(object, HeapEntry::kSimdValue, "simd");
868   }
869   return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
870 }
871
872
873 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
874                                     HeapEntry::Type type,
875                                     const char* name) {
876   return AddEntry(object->address(), type, name, object->Size());
877 }
878
879
880 HeapEntry* V8HeapExplorer::AddEntry(Address address,
881                                     HeapEntry::Type type,
882                                     const char* name,
883                                     size_t size) {
884   SnapshotObjectId object_id = heap_object_map_->FindOrAddEntry(
885       address, static_cast<unsigned int>(size));
886   unsigned trace_node_id = 0;
887   if (AllocationTracker* allocation_tracker =
888       snapshot_->profiler()->allocation_tracker()) {
889     trace_node_id =
890         allocation_tracker->address_to_trace()->GetTraceNodeId(address);
891   }
892   return snapshot_->AddEntry(type, name, object_id, size, trace_node_id);
893 }
894
895
896 class SnapshotFiller {
897  public:
898   explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
899       : snapshot_(snapshot),
900         names_(snapshot->profiler()->names()),
901         entries_(entries) { }
902   HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
903     HeapEntry* entry = allocator->AllocateEntry(ptr);
904     entries_->Pair(ptr, entry->index());
905     return entry;
906   }
907   HeapEntry* FindEntry(HeapThing ptr) {
908     int index = entries_->Map(ptr);
909     return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL;
910   }
911   HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
912     HeapEntry* entry = FindEntry(ptr);
913     return entry != NULL ? entry : AddEntry(ptr, allocator);
914   }
915   void SetIndexedReference(HeapGraphEdge::Type type,
916                            int parent,
917                            int index,
918                            HeapEntry* child_entry) {
919     HeapEntry* parent_entry = &snapshot_->entries()[parent];
920     parent_entry->SetIndexedReference(type, index, child_entry);
921   }
922   void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
923                                     int parent,
924                                     HeapEntry* child_entry) {
925     HeapEntry* parent_entry = &snapshot_->entries()[parent];
926     int index = parent_entry->children_count() + 1;
927     parent_entry->SetIndexedReference(type, index, child_entry);
928   }
929   void SetNamedReference(HeapGraphEdge::Type type,
930                          int parent,
931                          const char* reference_name,
932                          HeapEntry* child_entry) {
933     HeapEntry* parent_entry = &snapshot_->entries()[parent];
934     parent_entry->SetNamedReference(type, reference_name, child_entry);
935   }
936   void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
937                                   int parent,
938                                   HeapEntry* child_entry) {
939     HeapEntry* parent_entry = &snapshot_->entries()[parent];
940     int index = parent_entry->children_count() + 1;
941     parent_entry->SetNamedReference(
942         type,
943         names_->GetName(index),
944         child_entry);
945   }
946
947  private:
948   HeapSnapshot* snapshot_;
949   StringsStorage* names_;
950   HeapEntriesMap* entries_;
951 };
952
953
954 const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
955   switch (object->map()->instance_type()) {
956     case MAP_TYPE:
957       switch (Map::cast(object)->instance_type()) {
958 #define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
959         case instance_type: return "system / Map (" #Name ")";
960       STRING_TYPE_LIST(MAKE_STRING_MAP_CASE)
961 #undef MAKE_STRING_MAP_CASE
962         default: return "system / Map";
963       }
964     case CELL_TYPE: return "system / Cell";
965     case PROPERTY_CELL_TYPE: return "system / PropertyCell";
966     case FOREIGN_TYPE: return "system / Foreign";
967     case ODDBALL_TYPE: return "system / Oddball";
968 #define MAKE_STRUCT_CASE(NAME, Name, name) \
969     case NAME##_TYPE: return "system / "#Name;
970   STRUCT_LIST(MAKE_STRUCT_CASE)
971 #undef MAKE_STRUCT_CASE
972     default: return "system";
973   }
974 }
975
976
977 int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) {
978   int objects_count = 0;
979   for (HeapObject* obj = iterator->next();
980        obj != NULL;
981        obj = iterator->next()) {
982     objects_count++;
983   }
984   return objects_count;
985 }
986
987
988 class IndexedReferencesExtractor : public ObjectVisitor {
989  public:
990   IndexedReferencesExtractor(V8HeapExplorer* generator,
991                              HeapObject* parent_obj,
992                              int parent)
993       : generator_(generator),
994         parent_obj_(parent_obj),
995         parent_(parent),
996         next_index_(0) {
997   }
998   void VisitCodeEntry(Address entry_address) {
999      Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
1000      generator_->SetInternalReference(parent_obj_, parent_, "code", code);
1001      generator_->TagCodeObject(code);
1002   }
1003   void VisitPointers(Object** start, Object** end) {
1004     for (Object** p = start; p < end; p++) {
1005       ++next_index_;
1006       if (CheckVisitedAndUnmark(p)) continue;
1007       generator_->SetHiddenReference(parent_obj_, parent_, next_index_, *p);
1008     }
1009   }
1010   static void MarkVisitedField(HeapObject* obj, int offset) {
1011     if (offset < 0) return;
1012     Address field = obj->address() + offset;
1013     DCHECK(Memory::Object_at(field)->IsHeapObject());
1014     intptr_t p = reinterpret_cast<intptr_t>(Memory::Object_at(field));
1015     DCHECK(!IsMarked(p));
1016     intptr_t p_tagged = p | kTag;
1017     Memory::Object_at(field) = reinterpret_cast<Object*>(p_tagged);
1018   }
1019
1020  private:
1021   bool CheckVisitedAndUnmark(Object** field) {
1022     intptr_t p = reinterpret_cast<intptr_t>(*field);
1023     if (IsMarked(p)) {
1024       intptr_t p_untagged = (p & ~kTaggingMask) | kHeapObjectTag;
1025       *field = reinterpret_cast<Object*>(p_untagged);
1026       DCHECK((*field)->IsHeapObject());
1027       return true;
1028     }
1029     return false;
1030   }
1031
1032   static const intptr_t kTaggingMask = 3;
1033   static const intptr_t kTag = 3;
1034
1035   static bool IsMarked(intptr_t p) { return (p & kTaggingMask) == kTag; }
1036
1037   V8HeapExplorer* generator_;
1038   HeapObject* parent_obj_;
1039   int parent_;
1040   int next_index_;
1041 };
1042
1043
1044 bool V8HeapExplorer::ExtractReferencesPass1(int entry, HeapObject* obj) {
1045   if (obj->IsFixedArray()) return false;  // FixedArrays are processed on pass 2
1046
1047   if (obj->IsJSGlobalProxy()) {
1048     ExtractJSGlobalProxyReferences(entry, JSGlobalProxy::cast(obj));
1049   } else if (obj->IsJSArrayBuffer()) {
1050     ExtractJSArrayBufferReferences(entry, JSArrayBuffer::cast(obj));
1051   } else if (obj->IsJSObject()) {
1052     if (obj->IsJSWeakSet()) {
1053       ExtractJSWeakCollectionReferences(entry, JSWeakSet::cast(obj));
1054     } else if (obj->IsJSWeakMap()) {
1055       ExtractJSWeakCollectionReferences(entry, JSWeakMap::cast(obj));
1056     } else if (obj->IsJSSet()) {
1057       ExtractJSCollectionReferences(entry, JSSet::cast(obj));
1058     } else if (obj->IsJSMap()) {
1059       ExtractJSCollectionReferences(entry, JSMap::cast(obj));
1060     }
1061     ExtractJSObjectReferences(entry, JSObject::cast(obj));
1062   } else if (obj->IsString()) {
1063     ExtractStringReferences(entry, String::cast(obj));
1064   } else if (obj->IsSymbol()) {
1065     ExtractSymbolReferences(entry, Symbol::cast(obj));
1066   } else if (obj->IsMap()) {
1067     ExtractMapReferences(entry, Map::cast(obj));
1068   } else if (obj->IsSharedFunctionInfo()) {
1069     ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
1070   } else if (obj->IsScript()) {
1071     ExtractScriptReferences(entry, Script::cast(obj));
1072   } else if (obj->IsAccessorInfo()) {
1073     ExtractAccessorInfoReferences(entry, AccessorInfo::cast(obj));
1074   } else if (obj->IsAccessorPair()) {
1075     ExtractAccessorPairReferences(entry, AccessorPair::cast(obj));
1076   } else if (obj->IsCodeCache()) {
1077     ExtractCodeCacheReferences(entry, CodeCache::cast(obj));
1078   } else if (obj->IsCode()) {
1079     ExtractCodeReferences(entry, Code::cast(obj));
1080   } else if (obj->IsBox()) {
1081     ExtractBoxReferences(entry, Box::cast(obj));
1082   } else if (obj->IsCell()) {
1083     ExtractCellReferences(entry, Cell::cast(obj));
1084   } else if (obj->IsPropertyCell()) {
1085     ExtractPropertyCellReferences(entry, PropertyCell::cast(obj));
1086   } else if (obj->IsAllocationSite()) {
1087     ExtractAllocationSiteReferences(entry, AllocationSite::cast(obj));
1088   }
1089   return true;
1090 }
1091
1092
1093 bool V8HeapExplorer::ExtractReferencesPass2(int entry, HeapObject* obj) {
1094   if (!obj->IsFixedArray()) return false;
1095
1096   if (obj->IsContext()) {
1097     ExtractContextReferences(entry, Context::cast(obj));
1098   } else {
1099     ExtractFixedArrayReferences(entry, FixedArray::cast(obj));
1100   }
1101   return true;
1102 }
1103
1104
1105 void V8HeapExplorer::ExtractJSGlobalProxyReferences(
1106     int entry, JSGlobalProxy* proxy) {
1107   SetInternalReference(proxy, entry,
1108                        "native_context", proxy->native_context(),
1109                        JSGlobalProxy::kNativeContextOffset);
1110 }
1111
1112
1113 void V8HeapExplorer::ExtractJSObjectReferences(
1114     int entry, JSObject* js_obj) {
1115   HeapObject* obj = js_obj;
1116   ExtractClosureReferences(js_obj, entry);
1117   ExtractPropertyReferences(js_obj, entry);
1118   ExtractElementReferences(js_obj, entry);
1119   ExtractInternalReferences(js_obj, entry);
1120   PrototypeIterator iter(heap_->isolate(), js_obj);
1121   SetPropertyReference(obj, entry, heap_->proto_string(), iter.GetCurrent());
1122   if (obj->IsJSFunction()) {
1123     JSFunction* js_fun = JSFunction::cast(js_obj);
1124     Object* proto_or_map = js_fun->prototype_or_initial_map();
1125     if (!proto_or_map->IsTheHole()) {
1126       if (!proto_or_map->IsMap()) {
1127         SetPropertyReference(
1128             obj, entry,
1129             heap_->prototype_string(), proto_or_map,
1130             NULL,
1131             JSFunction::kPrototypeOrInitialMapOffset);
1132       } else {
1133         SetPropertyReference(
1134             obj, entry,
1135             heap_->prototype_string(), js_fun->prototype());
1136         SetInternalReference(
1137             obj, entry, "initial_map", proto_or_map,
1138             JSFunction::kPrototypeOrInitialMapOffset);
1139       }
1140     }
1141     SharedFunctionInfo* shared_info = js_fun->shared();
1142     // JSFunction has either bindings or literals and never both.
1143     bool bound = shared_info->bound();
1144     TagObject(js_fun->literals_or_bindings(),
1145               bound ? "(function bindings)" : "(function literals)");
1146     SetInternalReference(js_fun, entry,
1147                          bound ? "bindings" : "literals",
1148                          js_fun->literals_or_bindings(),
1149                          JSFunction::kLiteralsOffset);
1150     TagObject(shared_info, "(shared function info)");
1151     SetInternalReference(js_fun, entry,
1152                          "shared", shared_info,
1153                          JSFunction::kSharedFunctionInfoOffset);
1154     TagObject(js_fun->context(), "(context)");
1155     SetInternalReference(js_fun, entry,
1156                          "context", js_fun->context(),
1157                          JSFunction::kContextOffset);
1158     SetWeakReference(js_fun, entry,
1159                      "next_function_link", js_fun->next_function_link(),
1160                      JSFunction::kNextFunctionLinkOffset);
1161     STATIC_ASSERT(JSFunction::kNextFunctionLinkOffset
1162                  == JSFunction::kNonWeakFieldsEndOffset);
1163     STATIC_ASSERT(JSFunction::kNextFunctionLinkOffset + kPointerSize
1164                  == JSFunction::kSize);
1165   } else if (obj->IsGlobalObject()) {
1166     GlobalObject* global_obj = GlobalObject::cast(obj);
1167     SetInternalReference(global_obj, entry,
1168                          "builtins", global_obj->builtins(),
1169                          GlobalObject::kBuiltinsOffset);
1170     SetInternalReference(global_obj, entry,
1171                          "native_context", global_obj->native_context(),
1172                          GlobalObject::kNativeContextOffset);
1173     SetInternalReference(global_obj, entry,
1174                          "global_proxy", global_obj->global_proxy(),
1175                          GlobalObject::kGlobalProxyOffset);
1176     STATIC_ASSERT(GlobalObject::kHeaderSize - JSObject::kHeaderSize ==
1177                  3 * kPointerSize);
1178   } else if (obj->IsJSArrayBufferView()) {
1179     JSArrayBufferView* view = JSArrayBufferView::cast(obj);
1180     SetInternalReference(view, entry, "buffer", view->buffer(),
1181                          JSArrayBufferView::kBufferOffset);
1182   }
1183   TagObject(js_obj->properties(), "(object properties)");
1184   SetInternalReference(obj, entry,
1185                        "properties", js_obj->properties(),
1186                        JSObject::kPropertiesOffset);
1187   TagObject(js_obj->elements(), "(object elements)");
1188   SetInternalReference(obj, entry,
1189                        "elements", js_obj->elements(),
1190                        JSObject::kElementsOffset);
1191 }
1192
1193
1194 void V8HeapExplorer::ExtractStringReferences(int entry, String* string) {
1195   if (string->IsConsString()) {
1196     ConsString* cs = ConsString::cast(string);
1197     SetInternalReference(cs, entry, "first", cs->first(),
1198                          ConsString::kFirstOffset);
1199     SetInternalReference(cs, entry, "second", cs->second(),
1200                          ConsString::kSecondOffset);
1201   } else if (string->IsSlicedString()) {
1202     SlicedString* ss = SlicedString::cast(string);
1203     SetInternalReference(ss, entry, "parent", ss->parent(),
1204                          SlicedString::kParentOffset);
1205   }
1206 }
1207
1208
1209 void V8HeapExplorer::ExtractSymbolReferences(int entry, Symbol* symbol) {
1210   SetInternalReference(symbol, entry,
1211                        "name", symbol->name(),
1212                        Symbol::kNameOffset);
1213 }
1214
1215
1216 void V8HeapExplorer::ExtractJSCollectionReferences(int entry,
1217                                                    JSCollection* collection) {
1218   SetInternalReference(collection, entry, "table", collection->table(),
1219                        JSCollection::kTableOffset);
1220 }
1221
1222
1223 void V8HeapExplorer::ExtractJSWeakCollectionReferences(
1224     int entry, JSWeakCollection* collection) {
1225   MarkAsWeakContainer(collection->table());
1226   SetInternalReference(collection, entry,
1227                        "table", collection->table(),
1228                        JSWeakCollection::kTableOffset);
1229 }
1230
1231
1232 void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) {
1233   if (context == context->declaration_context()) {
1234     ScopeInfo* scope_info = context->closure()->shared()->scope_info();
1235     // Add context allocated locals.
1236     int context_locals = scope_info->ContextLocalCount();
1237     for (int i = 0; i < context_locals; ++i) {
1238       String* local_name = scope_info->ContextLocalName(i);
1239       int idx = Context::MIN_CONTEXT_SLOTS + i;
1240       SetContextReference(context, entry, local_name, context->get(idx),
1241                           Context::OffsetOfElementAt(idx));
1242     }
1243     if (scope_info->HasFunctionName()) {
1244       String* name = scope_info->FunctionName();
1245       VariableMode mode;
1246       int idx = scope_info->FunctionContextSlotIndex(name, &mode);
1247       if (idx >= 0) {
1248         SetContextReference(context, entry, name, context->get(idx),
1249                             Context::OffsetOfElementAt(idx));
1250       }
1251     }
1252   }
1253
1254 #define EXTRACT_CONTEXT_FIELD(index, type, name) \
1255   if (Context::index < Context::FIRST_WEAK_SLOT || \
1256       Context::index == Context::MAP_CACHE_INDEX) { \
1257     SetInternalReference(context, entry, #name, context->get(Context::index), \
1258         FixedArray::OffsetOfElementAt(Context::index)); \
1259   } else { \
1260     SetWeakReference(context, entry, #name, context->get(Context::index), \
1261         FixedArray::OffsetOfElementAt(Context::index)); \
1262   }
1263   EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
1264   EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
1265   EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, Object, extension);
1266   EXTRACT_CONTEXT_FIELD(GLOBAL_OBJECT_INDEX, GlobalObject, global);
1267   if (context->IsNativeContext()) {
1268     TagObject(context->jsfunction_result_caches(),
1269               "(context func. result caches)");
1270     TagObject(context->normalized_map_cache(), "(context norm. map cache)");
1271     TagObject(context->runtime_context(), "(runtime context)");
1272     TagObject(context->embedder_data(), "(context data)");
1273     NATIVE_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD);
1274     EXTRACT_CONTEXT_FIELD(OPTIMIZED_FUNCTIONS_LIST, unused,
1275                           optimized_functions_list);
1276     EXTRACT_CONTEXT_FIELD(OPTIMIZED_CODE_LIST, unused, optimized_code_list);
1277     EXTRACT_CONTEXT_FIELD(DEOPTIMIZED_CODE_LIST, unused, deoptimized_code_list);
1278     EXTRACT_CONTEXT_FIELD(NEXT_CONTEXT_LINK, unused, next_context_link);
1279 #undef EXTRACT_CONTEXT_FIELD
1280     STATIC_ASSERT(Context::OPTIMIZED_FUNCTIONS_LIST ==
1281                   Context::FIRST_WEAK_SLOT);
1282     STATIC_ASSERT(Context::NEXT_CONTEXT_LINK + 1 ==
1283                   Context::NATIVE_CONTEXT_SLOTS);
1284     STATIC_ASSERT(Context::FIRST_WEAK_SLOT + 4 ==
1285                   Context::NATIVE_CONTEXT_SLOTS);
1286   }
1287 }
1288
1289
1290 void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
1291   Object* raw_transitions_or_prototype_info = map->raw_transitions();
1292   if (TransitionArray::IsFullTransitionArray(
1293           raw_transitions_or_prototype_info)) {
1294     TransitionArray* transitions =
1295         TransitionArray::cast(raw_transitions_or_prototype_info);
1296     int transitions_entry = GetEntry(transitions)->index();
1297
1298     if (map->CanTransition()) {
1299       if (transitions->HasPrototypeTransitions()) {
1300         FixedArray* prototype_transitions =
1301             transitions->GetPrototypeTransitions();
1302         MarkAsWeakContainer(prototype_transitions);
1303         TagObject(prototype_transitions, "(prototype transitions");
1304         SetInternalReference(transitions, transitions_entry,
1305                              "prototype_transitions", prototype_transitions);
1306       }
1307       // TODO(alph): transitions keys are strong links.
1308       MarkAsWeakContainer(transitions);
1309     }
1310
1311     TagObject(transitions, "(transition array)");
1312     SetInternalReference(map, entry, "transitions", transitions,
1313                          Map::kTransitionsOrPrototypeInfoOffset);
1314   } else if (TransitionArray::IsSimpleTransition(
1315                  raw_transitions_or_prototype_info)) {
1316     TagObject(raw_transitions_or_prototype_info, "(transition)");
1317     SetInternalReference(map, entry, "transition",
1318                          raw_transitions_or_prototype_info,
1319                          Map::kTransitionsOrPrototypeInfoOffset);
1320   } else if (map->is_prototype_map()) {
1321     TagObject(raw_transitions_or_prototype_info, "prototype_info");
1322     SetInternalReference(map, entry, "prototype_info",
1323                          raw_transitions_or_prototype_info,
1324                          Map::kTransitionsOrPrototypeInfoOffset);
1325   }
1326   DescriptorArray* descriptors = map->instance_descriptors();
1327   TagObject(descriptors, "(map descriptors)");
1328   SetInternalReference(map, entry,
1329                        "descriptors", descriptors,
1330                        Map::kDescriptorsOffset);
1331
1332   MarkAsWeakContainer(map->code_cache());
1333   SetInternalReference(map, entry,
1334                        "code_cache", map->code_cache(),
1335                        Map::kCodeCacheOffset);
1336   SetInternalReference(map, entry,
1337                        "prototype", map->prototype(), Map::kPrototypeOffset);
1338   Object* constructor_or_backpointer = map->constructor_or_backpointer();
1339   if (constructor_or_backpointer->IsMap()) {
1340     TagObject(constructor_or_backpointer, "(back pointer)");
1341     SetInternalReference(map, entry, "back_pointer", constructor_or_backpointer,
1342                          Map::kConstructorOrBackPointerOffset);
1343   } else {
1344     SetInternalReference(map, entry, "constructor", constructor_or_backpointer,
1345                          Map::kConstructorOrBackPointerOffset);
1346   }
1347   TagObject(map->dependent_code(), "(dependent code)");
1348   MarkAsWeakContainer(map->dependent_code());
1349   SetInternalReference(map, entry,
1350                        "dependent_code", map->dependent_code(),
1351                        Map::kDependentCodeOffset);
1352 }
1353
1354
1355 void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
1356     int entry, SharedFunctionInfo* shared) {
1357   HeapObject* obj = shared;
1358   String* shared_name = shared->DebugName();
1359   const char* name = NULL;
1360   if (shared_name != *heap_->isolate()->factory()->empty_string()) {
1361     name = names_->GetName(shared_name);
1362     TagObject(shared->code(), names_->GetFormatted("(code for %s)", name));
1363   } else {
1364     TagObject(shared->code(), names_->GetFormatted("(%s code)",
1365         Code::Kind2String(shared->code()->kind())));
1366   }
1367
1368   SetInternalReference(obj, entry,
1369                        "name", shared->name(),
1370                        SharedFunctionInfo::kNameOffset);
1371   SetInternalReference(obj, entry,
1372                        "code", shared->code(),
1373                        SharedFunctionInfo::kCodeOffset);
1374   TagObject(shared->scope_info(), "(function scope info)");
1375   SetInternalReference(obj, entry,
1376                        "scope_info", shared->scope_info(),
1377                        SharedFunctionInfo::kScopeInfoOffset);
1378   SetInternalReference(obj, entry,
1379                        "instance_class_name", shared->instance_class_name(),
1380                        SharedFunctionInfo::kInstanceClassNameOffset);
1381   SetInternalReference(obj, entry,
1382                        "script", shared->script(),
1383                        SharedFunctionInfo::kScriptOffset);
1384   const char* construct_stub_name = name ?
1385       names_->GetFormatted("(construct stub code for %s)", name) :
1386       "(construct stub code)";
1387   TagObject(shared->construct_stub(), construct_stub_name);
1388   SetInternalReference(obj, entry,
1389                        "construct_stub", shared->construct_stub(),
1390                        SharedFunctionInfo::kConstructStubOffset);
1391   SetInternalReference(obj, entry,
1392                        "function_data", shared->function_data(),
1393                        SharedFunctionInfo::kFunctionDataOffset);
1394   SetInternalReference(obj, entry,
1395                        "debug_info", shared->debug_info(),
1396                        SharedFunctionInfo::kDebugInfoOffset);
1397   SetInternalReference(obj, entry,
1398                        "inferred_name", shared->inferred_name(),
1399                        SharedFunctionInfo::kInferredNameOffset);
1400   SetInternalReference(obj, entry,
1401                        "optimized_code_map", shared->optimized_code_map(),
1402                        SharedFunctionInfo::kOptimizedCodeMapOffset);
1403   SetInternalReference(obj, entry,
1404                        "feedback_vector", shared->feedback_vector(),
1405                        SharedFunctionInfo::kFeedbackVectorOffset);
1406 }
1407
1408
1409 void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) {
1410   HeapObject* obj = script;
1411   SetInternalReference(obj, entry,
1412                        "source", script->source(),
1413                        Script::kSourceOffset);
1414   SetInternalReference(obj, entry,
1415                        "name", script->name(),
1416                        Script::kNameOffset);
1417   SetInternalReference(obj, entry,
1418                        "context_data", script->context_data(),
1419                        Script::kContextOffset);
1420   TagObject(script->line_ends(), "(script line ends)");
1421   SetInternalReference(obj, entry,
1422                        "line_ends", script->line_ends(),
1423                        Script::kLineEndsOffset);
1424 }
1425
1426
1427 void V8HeapExplorer::ExtractAccessorInfoReferences(
1428     int entry, AccessorInfo* accessor_info) {
1429   SetInternalReference(accessor_info, entry, "name", accessor_info->name(),
1430                        AccessorInfo::kNameOffset);
1431   SetInternalReference(accessor_info, entry, "expected_receiver_type",
1432                        accessor_info->expected_receiver_type(),
1433                        AccessorInfo::kExpectedReceiverTypeOffset);
1434   if (accessor_info->IsExecutableAccessorInfo()) {
1435     ExecutableAccessorInfo* executable_accessor_info =
1436         ExecutableAccessorInfo::cast(accessor_info);
1437     SetInternalReference(executable_accessor_info, entry, "getter",
1438                          executable_accessor_info->getter(),
1439                          ExecutableAccessorInfo::kGetterOffset);
1440     SetInternalReference(executable_accessor_info, entry, "setter",
1441                          executable_accessor_info->setter(),
1442                          ExecutableAccessorInfo::kSetterOffset);
1443     SetInternalReference(executable_accessor_info, entry, "data",
1444                          executable_accessor_info->data(),
1445                          ExecutableAccessorInfo::kDataOffset);
1446   }
1447 }
1448
1449
1450 void V8HeapExplorer::ExtractAccessorPairReferences(
1451     int entry, AccessorPair* accessors) {
1452   SetInternalReference(accessors, entry, "getter", accessors->getter(),
1453                        AccessorPair::kGetterOffset);
1454   SetInternalReference(accessors, entry, "setter", accessors->setter(),
1455                        AccessorPair::kSetterOffset);
1456 }
1457
1458
1459 void V8HeapExplorer::ExtractCodeCacheReferences(
1460     int entry, CodeCache* code_cache) {
1461   TagObject(code_cache->default_cache(), "(default code cache)");
1462   SetInternalReference(code_cache, entry,
1463                        "default_cache", code_cache->default_cache(),
1464                        CodeCache::kDefaultCacheOffset);
1465   TagObject(code_cache->normal_type_cache(), "(code type cache)");
1466   SetInternalReference(code_cache, entry,
1467                        "type_cache", code_cache->normal_type_cache(),
1468                        CodeCache::kNormalTypeCacheOffset);
1469 }
1470
1471
1472 void V8HeapExplorer::TagBuiltinCodeObject(Code* code, const char* name) {
1473   TagObject(code, names_->GetFormatted("(%s builtin)", name));
1474 }
1475
1476
1477 void V8HeapExplorer::TagCodeObject(Code* code) {
1478   if (code->kind() == Code::STUB) {
1479     TagObject(code, names_->GetFormatted(
1480                         "(%s code)", CodeStub::MajorName(
1481                                          CodeStub::GetMajorKey(code), true)));
1482   }
1483 }
1484
1485
1486 void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
1487   TagCodeObject(code);
1488   TagObject(code->relocation_info(), "(code relocation info)");
1489   SetInternalReference(code, entry,
1490                        "relocation_info", code->relocation_info(),
1491                        Code::kRelocationInfoOffset);
1492   SetInternalReference(code, entry,
1493                        "handler_table", code->handler_table(),
1494                        Code::kHandlerTableOffset);
1495   TagObject(code->deoptimization_data(), "(code deopt data)");
1496   SetInternalReference(code, entry,
1497                        "deoptimization_data", code->deoptimization_data(),
1498                        Code::kDeoptimizationDataOffset);
1499   if (code->kind() == Code::FUNCTION) {
1500     SetInternalReference(code, entry,
1501                          "type_feedback_info", code->type_feedback_info(),
1502                          Code::kTypeFeedbackInfoOffset);
1503   }
1504   SetInternalReference(code, entry,
1505                        "gc_metadata", code->gc_metadata(),
1506                        Code::kGCMetadataOffset);
1507   if (code->kind() == Code::OPTIMIZED_FUNCTION) {
1508     SetWeakReference(code, entry,
1509                      "next_code_link", code->next_code_link(),
1510                      Code::kNextCodeLinkOffset);
1511   }
1512 }
1513
1514
1515 void V8HeapExplorer::ExtractBoxReferences(int entry, Box* box) {
1516   SetInternalReference(box, entry, "value", box->value(), Box::kValueOffset);
1517 }
1518
1519
1520 void V8HeapExplorer::ExtractCellReferences(int entry, Cell* cell) {
1521   SetInternalReference(cell, entry, "value", cell->value(), Cell::kValueOffset);
1522 }
1523
1524
1525 void V8HeapExplorer::ExtractPropertyCellReferences(int entry,
1526                                                    PropertyCell* cell) {
1527   SetInternalReference(cell, entry, "value", cell->value(),
1528                        PropertyCell::kValueOffset);
1529   MarkAsWeakContainer(cell->dependent_code());
1530   SetInternalReference(cell, entry, "dependent_code", cell->dependent_code(),
1531                        PropertyCell::kDependentCodeOffset);
1532 }
1533
1534
1535 void V8HeapExplorer::ExtractAllocationSiteReferences(int entry,
1536                                                      AllocationSite* site) {
1537   SetInternalReference(site, entry, "transition_info", site->transition_info(),
1538                        AllocationSite::kTransitionInfoOffset);
1539   SetInternalReference(site, entry, "nested_site", site->nested_site(),
1540                        AllocationSite::kNestedSiteOffset);
1541   MarkAsWeakContainer(site->dependent_code());
1542   SetInternalReference(site, entry, "dependent_code", site->dependent_code(),
1543                        AllocationSite::kDependentCodeOffset);
1544   // Do not visit weak_next as it is not visited by the StaticVisitor,
1545   // and we're not very interested in weak_next field here.
1546   STATIC_ASSERT(AllocationSite::kWeakNextOffset >=
1547                AllocationSite::BodyDescriptor::kEndOffset);
1548 }
1549
1550
1551 class JSArrayBufferDataEntryAllocator : public HeapEntriesAllocator {
1552  public:
1553   JSArrayBufferDataEntryAllocator(size_t size, V8HeapExplorer* explorer)
1554       : size_(size)
1555       , explorer_(explorer) {
1556   }
1557   virtual HeapEntry* AllocateEntry(HeapThing ptr) {
1558     return explorer_->AddEntry(
1559         static_cast<Address>(ptr),
1560         HeapEntry::kNative, "system / JSArrayBufferData", size_);
1561   }
1562  private:
1563   size_t size_;
1564   V8HeapExplorer* explorer_;
1565 };
1566
1567
1568 void V8HeapExplorer::ExtractJSArrayBufferReferences(
1569     int entry, JSArrayBuffer* buffer) {
1570   // Setup a reference to a native memory backing_store object.
1571   if (!buffer->backing_store())
1572     return;
1573   size_t data_size = NumberToSize(heap_->isolate(), buffer->byte_length());
1574   JSArrayBufferDataEntryAllocator allocator(data_size, this);
1575   HeapEntry* data_entry =
1576       filler_->FindOrAddEntry(buffer->backing_store(), &allocator);
1577   filler_->SetNamedReference(HeapGraphEdge::kInternal,
1578                              entry, "backing_store", data_entry);
1579 }
1580
1581
1582 void V8HeapExplorer::ExtractFixedArrayReferences(int entry, FixedArray* array) {
1583   bool is_weak = weak_containers_.Contains(array);
1584   for (int i = 0, l = array->length(); i < l; ++i) {
1585     if (is_weak) {
1586       SetWeakReference(array, entry,
1587                        i, array->get(i), array->OffsetOfElementAt(i));
1588     } else {
1589       SetInternalReference(array, entry,
1590                            i, array->get(i), array->OffsetOfElementAt(i));
1591     }
1592   }
1593 }
1594
1595
1596 void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj, int entry) {
1597   if (!js_obj->IsJSFunction()) return;
1598
1599   JSFunction* func = JSFunction::cast(js_obj);
1600   if (func->shared()->bound()) {
1601     FixedArray* bindings = func->function_bindings();
1602     SetNativeBindReference(js_obj, entry, "bound_this",
1603                            bindings->get(JSFunction::kBoundThisIndex));
1604     SetNativeBindReference(js_obj, entry, "bound_function",
1605                            bindings->get(JSFunction::kBoundFunctionIndex));
1606     for (int i = JSFunction::kBoundArgumentsStartIndex;
1607          i < bindings->length(); i++) {
1608       const char* reference_name = names_->GetFormatted(
1609           "bound_argument_%d",
1610           i - JSFunction::kBoundArgumentsStartIndex);
1611       SetNativeBindReference(js_obj, entry, reference_name,
1612                              bindings->get(i));
1613     }
1614   }
1615 }
1616
1617
1618 void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
1619   if (js_obj->HasFastProperties()) {
1620     DescriptorArray* descs = js_obj->map()->instance_descriptors();
1621     int real_size = js_obj->map()->NumberOfOwnDescriptors();
1622     for (int i = 0; i < real_size; i++) {
1623       PropertyDetails details = descs->GetDetails(i);
1624       switch (details.location()) {
1625         case kField: {
1626           Representation r = details.representation();
1627           if (r.IsSmi() || r.IsDouble()) break;
1628
1629           Name* k = descs->GetKey(i);
1630           FieldIndex field_index = FieldIndex::ForDescriptor(js_obj->map(), i);
1631           Object* value = js_obj->RawFastPropertyAt(field_index);
1632           int field_offset =
1633               field_index.is_inobject() ? field_index.offset() : -1;
1634
1635           if (k != heap_->hidden_string()) {
1636             SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry, k,
1637                                                value, NULL, field_offset);
1638           } else {
1639             TagObject(value, "(hidden properties)");
1640             SetInternalReference(js_obj, entry, "hidden_properties", value,
1641                                  field_offset);
1642           }
1643           break;
1644         }
1645         case kDescriptor:
1646           SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry,
1647                                              descs->GetKey(i),
1648                                              descs->GetValue(i));
1649           break;
1650       }
1651     }
1652   } else if (js_obj->IsGlobalObject()) {
1653     // We assume that global objects can only have slow properties.
1654     GlobalDictionary* dictionary = js_obj->global_dictionary();
1655     int length = dictionary->Capacity();
1656     for (int i = 0; i < length; ++i) {
1657       Object* k = dictionary->KeyAt(i);
1658       if (dictionary->IsKey(k)) {
1659         DCHECK(dictionary->ValueAt(i)->IsPropertyCell());
1660         PropertyCell* cell = PropertyCell::cast(dictionary->ValueAt(i));
1661         Object* value = cell->value();
1662         if (k == heap_->hidden_string()) {
1663           TagObject(value, "(hidden properties)");
1664           SetInternalReference(js_obj, entry, "hidden_properties", value);
1665           continue;
1666         }
1667         PropertyDetails details = cell->property_details();
1668         SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry,
1669                                            Name::cast(k), value);
1670       }
1671     }
1672   } else {
1673     NameDictionary* dictionary = js_obj->property_dictionary();
1674     int length = dictionary->Capacity();
1675     for (int i = 0; i < length; ++i) {
1676       Object* k = dictionary->KeyAt(i);
1677       if (dictionary->IsKey(k)) {
1678         Object* value = dictionary->ValueAt(i);
1679         if (k == heap_->hidden_string()) {
1680           TagObject(value, "(hidden properties)");
1681           SetInternalReference(js_obj, entry, "hidden_properties", value);
1682           continue;
1683         }
1684         PropertyDetails details = dictionary->DetailsAt(i);
1685         SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry,
1686                                            Name::cast(k), value);
1687       }
1688     }
1689   }
1690 }
1691
1692
1693 void V8HeapExplorer::ExtractAccessorPairProperty(JSObject* js_obj, int entry,
1694                                                  Name* key,
1695                                                  Object* callback_obj,
1696                                                  int field_offset) {
1697   if (!callback_obj->IsAccessorPair()) return;
1698   AccessorPair* accessors = AccessorPair::cast(callback_obj);
1699   SetPropertyReference(js_obj, entry, key, accessors, NULL, field_offset);
1700   Object* getter = accessors->getter();
1701   if (!getter->IsOddball()) {
1702     SetPropertyReference(js_obj, entry, key, getter, "get %s");
1703   }
1704   Object* setter = accessors->setter();
1705   if (!setter->IsOddball()) {
1706     SetPropertyReference(js_obj, entry, key, setter, "set %s");
1707   }
1708 }
1709
1710
1711 void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) {
1712   if (js_obj->HasFastObjectElements()) {
1713     FixedArray* elements = FixedArray::cast(js_obj->elements());
1714     int length = js_obj->IsJSArray() ?
1715         Smi::cast(JSArray::cast(js_obj)->length())->value() :
1716         elements->length();
1717     for (int i = 0; i < length; ++i) {
1718       if (!elements->get(i)->IsTheHole()) {
1719         SetElementReference(js_obj, entry, i, elements->get(i));
1720       }
1721     }
1722   } else if (js_obj->HasDictionaryElements()) {
1723     SeededNumberDictionary* dictionary = js_obj->element_dictionary();
1724     int length = dictionary->Capacity();
1725     for (int i = 0; i < length; ++i) {
1726       Object* k = dictionary->KeyAt(i);
1727       if (dictionary->IsKey(k)) {
1728         DCHECK(k->IsNumber());
1729         uint32_t index = static_cast<uint32_t>(k->Number());
1730         SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
1731       }
1732     }
1733   }
1734 }
1735
1736
1737 void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) {
1738   int length = js_obj->GetInternalFieldCount();
1739   for (int i = 0; i < length; ++i) {
1740     Object* o = js_obj->GetInternalField(i);
1741     SetInternalReference(
1742         js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
1743   }
1744 }
1745
1746
1747 String* V8HeapExplorer::GetConstructorName(JSObject* object) {
1748   Heap* heap = object->GetHeap();
1749   if (object->IsJSFunction()) return heap->closure_string();
1750   String* constructor_name = object->constructor_name();
1751   if (constructor_name == heap->Object_string()) {
1752     // TODO(verwaest): Try to get object.constructor.name in this case.
1753     // This requires handlification of the V8HeapExplorer.
1754   }
1755   return object->constructor_name();
1756 }
1757
1758
1759 HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
1760   if (!obj->IsHeapObject()) return NULL;
1761   return filler_->FindOrAddEntry(obj, this);
1762 }
1763
1764
1765 class RootsReferencesExtractor : public ObjectVisitor {
1766  private:
1767   struct IndexTag {
1768     IndexTag(int index, VisitorSynchronization::SyncTag tag)
1769         : index(index), tag(tag) { }
1770     int index;
1771     VisitorSynchronization::SyncTag tag;
1772   };
1773
1774  public:
1775   explicit RootsReferencesExtractor(Heap* heap)
1776       : collecting_all_references_(false),
1777         previous_reference_count_(0),
1778         heap_(heap) {
1779   }
1780
1781   void VisitPointers(Object** start, Object** end) {
1782     if (collecting_all_references_) {
1783       for (Object** p = start; p < end; p++) all_references_.Add(*p);
1784     } else {
1785       for (Object** p = start; p < end; p++) strong_references_.Add(*p);
1786     }
1787   }
1788
1789   void SetCollectingAllReferences() { collecting_all_references_ = true; }
1790
1791   void FillReferences(V8HeapExplorer* explorer) {
1792     DCHECK(strong_references_.length() <= all_references_.length());
1793     Builtins* builtins = heap_->isolate()->builtins();
1794     int strong_index = 0, all_index = 0, tags_index = 0, builtin_index = 0;
1795     while (all_index < all_references_.length()) {
1796       bool is_strong = strong_index < strong_references_.length()
1797           && strong_references_[strong_index] == all_references_[all_index];
1798       explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
1799                                       !is_strong,
1800                                       all_references_[all_index]);
1801       if (reference_tags_[tags_index].tag ==
1802           VisitorSynchronization::kBuiltins) {
1803         DCHECK(all_references_[all_index]->IsCode());
1804         explorer->TagBuiltinCodeObject(
1805             Code::cast(all_references_[all_index]),
1806             builtins->name(builtin_index++));
1807       }
1808       ++all_index;
1809       if (is_strong) ++strong_index;
1810       if (reference_tags_[tags_index].index == all_index) ++tags_index;
1811     }
1812   }
1813
1814   void Synchronize(VisitorSynchronization::SyncTag tag) {
1815     if (collecting_all_references_ &&
1816         previous_reference_count_ != all_references_.length()) {
1817       previous_reference_count_ = all_references_.length();
1818       reference_tags_.Add(IndexTag(previous_reference_count_, tag));
1819     }
1820   }
1821
1822  private:
1823   bool collecting_all_references_;
1824   List<Object*> strong_references_;
1825   List<Object*> all_references_;
1826   int previous_reference_count_;
1827   List<IndexTag> reference_tags_;
1828   Heap* heap_;
1829 };
1830
1831
1832 bool V8HeapExplorer::IterateAndExtractReferences(
1833     SnapshotFiller* filler) {
1834   filler_ = filler;
1835
1836   // Create references to the synthetic roots.
1837   SetRootGcRootsReference();
1838   for (int tag = 0; tag < VisitorSynchronization::kNumberOfSyncTags; tag++) {
1839     SetGcRootsReference(static_cast<VisitorSynchronization::SyncTag>(tag));
1840   }
1841
1842   // Make sure builtin code objects get their builtin tags
1843   // first. Otherwise a particular JSFunction object could set
1844   // its custom name to a generic builtin.
1845   RootsReferencesExtractor extractor(heap_);
1846   heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
1847   extractor.SetCollectingAllReferences();
1848   heap_->IterateRoots(&extractor, VISIT_ALL);
1849   extractor.FillReferences(this);
1850
1851   // We have to do two passes as sometimes FixedArrays are used
1852   // to weakly hold their items, and it's impossible to distinguish
1853   // between these cases without processing the array owner first.
1854   bool interrupted =
1855       IterateAndExtractSinglePass<&V8HeapExplorer::ExtractReferencesPass1>() ||
1856       IterateAndExtractSinglePass<&V8HeapExplorer::ExtractReferencesPass2>();
1857
1858   if (interrupted) {
1859     filler_ = NULL;
1860     return false;
1861   }
1862
1863   filler_ = NULL;
1864   return progress_->ProgressReport(true);
1865 }
1866
1867
1868 template<V8HeapExplorer::ExtractReferencesMethod extractor>
1869 bool V8HeapExplorer::IterateAndExtractSinglePass() {
1870   // Now iterate the whole heap.
1871   bool interrupted = false;
1872   HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
1873   // Heap iteration with filtering must be finished in any case.
1874   for (HeapObject* obj = iterator.next();
1875        obj != NULL;
1876        obj = iterator.next(), progress_->ProgressStep()) {
1877     if (interrupted) continue;
1878
1879     HeapEntry* heap_entry = GetEntry(obj);
1880     int entry = heap_entry->index();
1881     if ((this->*extractor)(entry, obj)) {
1882       SetInternalReference(obj, entry,
1883                            "map", obj->map(), HeapObject::kMapOffset);
1884       // Extract unvisited fields as hidden references and restore tags
1885       // of visited fields.
1886       IndexedReferencesExtractor refs_extractor(this, obj, entry);
1887       obj->Iterate(&refs_extractor);
1888     }
1889
1890     if (!progress_->ProgressReport(false)) interrupted = true;
1891   }
1892   return interrupted;
1893 }
1894
1895
1896 bool V8HeapExplorer::IsEssentialObject(Object* object) {
1897   return object->IsHeapObject() && !object->IsOddball() &&
1898          object != heap_->empty_byte_array() &&
1899          object != heap_->empty_bytecode_array() &&
1900          object != heap_->empty_fixed_array() &&
1901          object != heap_->empty_descriptor_array() &&
1902          object != heap_->fixed_array_map() && object != heap_->cell_map() &&
1903          object != heap_->global_property_cell_map() &&
1904          object != heap_->shared_function_info_map() &&
1905          object != heap_->free_space_map() &&
1906          object != heap_->one_pointer_filler_map() &&
1907          object != heap_->two_pointer_filler_map();
1908 }
1909
1910
1911 void V8HeapExplorer::SetContextReference(HeapObject* parent_obj,
1912                                          int parent_entry,
1913                                          String* reference_name,
1914                                          Object* child_obj,
1915                                          int field_offset) {
1916   DCHECK(parent_entry == GetEntry(parent_obj)->index());
1917   HeapEntry* child_entry = GetEntry(child_obj);
1918   if (child_entry != NULL) {
1919     filler_->SetNamedReference(HeapGraphEdge::kContextVariable,
1920                                parent_entry,
1921                                names_->GetName(reference_name),
1922                                child_entry);
1923     IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1924   }
1925 }
1926
1927
1928 void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
1929                                             int parent_entry,
1930                                             const char* reference_name,
1931                                             Object* child_obj) {
1932   DCHECK(parent_entry == GetEntry(parent_obj)->index());
1933   HeapEntry* child_entry = GetEntry(child_obj);
1934   if (child_entry != NULL) {
1935     filler_->SetNamedReference(HeapGraphEdge::kShortcut,
1936                                parent_entry,
1937                                reference_name,
1938                                child_entry);
1939   }
1940 }
1941
1942
1943 void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
1944                                          int parent_entry,
1945                                          int index,
1946                                          Object* child_obj) {
1947   DCHECK(parent_entry == GetEntry(parent_obj)->index());
1948   HeapEntry* child_entry = GetEntry(child_obj);
1949   if (child_entry != NULL) {
1950     filler_->SetIndexedReference(HeapGraphEdge::kElement,
1951                                  parent_entry,
1952                                  index,
1953                                  child_entry);
1954   }
1955 }
1956
1957
1958 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1959                                           int parent_entry,
1960                                           const char* reference_name,
1961                                           Object* child_obj,
1962                                           int field_offset) {
1963   DCHECK(parent_entry == GetEntry(parent_obj)->index());
1964   HeapEntry* child_entry = GetEntry(child_obj);
1965   if (child_entry == NULL) return;
1966   if (IsEssentialObject(child_obj)) {
1967     filler_->SetNamedReference(HeapGraphEdge::kInternal,
1968                                parent_entry,
1969                                reference_name,
1970                                child_entry);
1971   }
1972   IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1973 }
1974
1975
1976 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1977                                           int parent_entry,
1978                                           int index,
1979                                           Object* child_obj,
1980                                           int field_offset) {
1981   DCHECK(parent_entry == GetEntry(parent_obj)->index());
1982   HeapEntry* child_entry = GetEntry(child_obj);
1983   if (child_entry == NULL) return;
1984   if (IsEssentialObject(child_obj)) {
1985     filler_->SetNamedReference(HeapGraphEdge::kInternal,
1986                                parent_entry,
1987                                names_->GetName(index),
1988                                child_entry);
1989   }
1990   IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1991 }
1992
1993
1994 void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
1995                                         int parent_entry,
1996                                         int index,
1997                                         Object* child_obj) {
1998   DCHECK(parent_entry == GetEntry(parent_obj)->index());
1999   HeapEntry* child_entry = GetEntry(child_obj);
2000   if (child_entry != NULL && IsEssentialObject(child_obj)) {
2001     filler_->SetIndexedReference(HeapGraphEdge::kHidden,
2002                                  parent_entry,
2003                                  index,
2004                                  child_entry);
2005   }
2006 }
2007
2008
2009 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
2010                                       int parent_entry,
2011                                       const char* reference_name,
2012                                       Object* child_obj,
2013                                       int field_offset) {
2014   DCHECK(parent_entry == GetEntry(parent_obj)->index());
2015   HeapEntry* child_entry = GetEntry(child_obj);
2016   if (child_entry == NULL) return;
2017   if (IsEssentialObject(child_obj)) {
2018     filler_->SetNamedReference(HeapGraphEdge::kWeak,
2019                                parent_entry,
2020                                reference_name,
2021                                child_entry);
2022   }
2023   IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2024 }
2025
2026
2027 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
2028                                       int parent_entry,
2029                                       int index,
2030                                       Object* child_obj,
2031                                       int field_offset) {
2032   DCHECK(parent_entry == GetEntry(parent_obj)->index());
2033   HeapEntry* child_entry = GetEntry(child_obj);
2034   if (child_entry == NULL) return;
2035   if (IsEssentialObject(child_obj)) {
2036     filler_->SetNamedReference(HeapGraphEdge::kWeak,
2037                                parent_entry,
2038                                names_->GetFormatted("%d", index),
2039                                child_entry);
2040   }
2041   IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2042 }
2043
2044
2045 void V8HeapExplorer::SetDataOrAccessorPropertyReference(
2046     PropertyKind kind, JSObject* parent_obj, int parent_entry,
2047     Name* reference_name, Object* child_obj, const char* name_format_string,
2048     int field_offset) {
2049   if (kind == kAccessor) {
2050     ExtractAccessorPairProperty(parent_obj, parent_entry, reference_name,
2051                                 child_obj, field_offset);
2052   } else {
2053     SetPropertyReference(parent_obj, parent_entry, reference_name, child_obj,
2054                          name_format_string, field_offset);
2055   }
2056 }
2057
2058
2059 void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
2060                                           int parent_entry,
2061                                           Name* reference_name,
2062                                           Object* child_obj,
2063                                           const char* name_format_string,
2064                                           int field_offset) {
2065   DCHECK(parent_entry == GetEntry(parent_obj)->index());
2066   HeapEntry* child_entry = GetEntry(child_obj);
2067   if (child_entry != NULL) {
2068     HeapGraphEdge::Type type =
2069         reference_name->IsSymbol() || String::cast(reference_name)->length() > 0
2070             ? HeapGraphEdge::kProperty : HeapGraphEdge::kInternal;
2071     const char* name = name_format_string != NULL && reference_name->IsString()
2072         ? names_->GetFormatted(
2073               name_format_string,
2074               String::cast(reference_name)->ToCString(
2075                   DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL).get()) :
2076         names_->GetName(reference_name);
2077
2078     filler_->SetNamedReference(type,
2079                                parent_entry,
2080                                name,
2081                                child_entry);
2082     IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2083   }
2084 }
2085
2086
2087 void V8HeapExplorer::SetRootGcRootsReference() {
2088   filler_->SetIndexedAutoIndexReference(
2089       HeapGraphEdge::kElement,
2090       snapshot_->root()->index(),
2091       snapshot_->gc_roots());
2092 }
2093
2094
2095 void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
2096   HeapEntry* child_entry = GetEntry(child_obj);
2097   DCHECK(child_entry != NULL);
2098   filler_->SetNamedAutoIndexReference(
2099       HeapGraphEdge::kShortcut,
2100       snapshot_->root()->index(),
2101       child_entry);
2102 }
2103
2104
2105 void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
2106   filler_->SetIndexedAutoIndexReference(
2107       HeapGraphEdge::kElement,
2108       snapshot_->gc_roots()->index(),
2109       snapshot_->gc_subroot(tag));
2110 }
2111
2112
2113 void V8HeapExplorer::SetGcSubrootReference(
2114     VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
2115   HeapEntry* child_entry = GetEntry(child_obj);
2116   if (child_entry != NULL) {
2117     const char* name = GetStrongGcSubrootName(child_obj);
2118     if (name != NULL) {
2119       filler_->SetNamedReference(
2120           HeapGraphEdge::kInternal,
2121           snapshot_->gc_subroot(tag)->index(),
2122           name,
2123           child_entry);
2124     } else {
2125       if (is_weak) {
2126         filler_->SetNamedAutoIndexReference(
2127             HeapGraphEdge::kWeak,
2128             snapshot_->gc_subroot(tag)->index(),
2129             child_entry);
2130       } else {
2131         filler_->SetIndexedAutoIndexReference(
2132             HeapGraphEdge::kElement,
2133             snapshot_->gc_subroot(tag)->index(),
2134             child_entry);
2135       }
2136     }
2137
2138     // Add a shortcut to JS global object reference at snapshot root.
2139     if (child_obj->IsNativeContext()) {
2140       Context* context = Context::cast(child_obj);
2141       GlobalObject* global = context->global_object();
2142       if (global->IsJSGlobalObject()) {
2143         bool is_debug_object = false;
2144         is_debug_object = heap_->isolate()->debug()->IsDebugGlobal(global);
2145         if (!is_debug_object && !user_roots_.Contains(global)) {
2146           user_roots_.Insert(global);
2147           SetUserGlobalReference(global);
2148         }
2149       }
2150     }
2151   }
2152 }
2153
2154
2155 const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
2156   if (strong_gc_subroot_names_.is_empty()) {
2157 #define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
2158 #define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
2159     STRONG_ROOT_LIST(ROOT_NAME)
2160 #undef ROOT_NAME
2161 #define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
2162     STRUCT_LIST(STRUCT_MAP_NAME)
2163 #undef STRUCT_MAP_NAME
2164 #define STRING_NAME(name, str) NAME_ENTRY(name)
2165     INTERNALIZED_STRING_LIST(STRING_NAME)
2166 #undef STRING_NAME
2167 #define SYMBOL_NAME(name) NAME_ENTRY(name)
2168     PRIVATE_SYMBOL_LIST(SYMBOL_NAME)
2169 #undef SYMBOL_NAME
2170 #define SYMBOL_NAME(name, varname, description) NAME_ENTRY(name)
2171     PUBLIC_SYMBOL_LIST(SYMBOL_NAME)
2172 #undef SYMBOL_NAME
2173 #undef NAME_ENTRY
2174     CHECK(!strong_gc_subroot_names_.is_empty());
2175   }
2176   return strong_gc_subroot_names_.GetTag(object);
2177 }
2178
2179
2180 void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
2181   if (IsEssentialObject(obj)) {
2182     HeapEntry* entry = GetEntry(obj);
2183     if (entry->name()[0] == '\0') {
2184       entry->set_name(tag);
2185     }
2186   }
2187 }
2188
2189
2190 void V8HeapExplorer::MarkAsWeakContainer(Object* object) {
2191   if (IsEssentialObject(object) && object->IsFixedArray()) {
2192     weak_containers_.Insert(object);
2193   }
2194 }
2195
2196
2197 class GlobalObjectsEnumerator : public ObjectVisitor {
2198  public:
2199   virtual void VisitPointers(Object** start, Object** end) {
2200     for (Object** p = start; p < end; p++) {
2201       if ((*p)->IsNativeContext()) {
2202         Context* context = Context::cast(*p);
2203         JSObject* proxy = context->global_proxy();
2204         if (proxy->IsJSGlobalProxy()) {
2205           Object* global = proxy->map()->prototype();
2206           if (global->IsJSGlobalObject()) {
2207             objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global)));
2208           }
2209         }
2210       }
2211     }
2212   }
2213   int count() { return objects_.length(); }
2214   Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
2215
2216  private:
2217   List<Handle<JSGlobalObject> > objects_;
2218 };
2219
2220
2221 // Modifies heap. Must not be run during heap traversal.
2222 void V8HeapExplorer::TagGlobalObjects() {
2223   Isolate* isolate = heap_->isolate();
2224   HandleScope scope(isolate);
2225   GlobalObjectsEnumerator enumerator;
2226   isolate->global_handles()->IterateAllRoots(&enumerator);
2227   const char** urls = NewArray<const char*>(enumerator.count());
2228   for (int i = 0, l = enumerator.count(); i < l; ++i) {
2229     if (global_object_name_resolver_) {
2230       HandleScope scope(isolate);
2231       Handle<JSGlobalObject> global_obj = enumerator.at(i);
2232       urls[i] = global_object_name_resolver_->GetName(
2233           Utils::ToLocal(Handle<JSObject>::cast(global_obj)));
2234     } else {
2235       urls[i] = NULL;
2236     }
2237   }
2238
2239   DisallowHeapAllocation no_allocation;
2240   for (int i = 0, l = enumerator.count(); i < l; ++i) {
2241     objects_tags_.SetTag(*enumerator.at(i), urls[i]);
2242   }
2243
2244   DeleteArray(urls);
2245 }
2246
2247
2248 class GlobalHandlesExtractor : public ObjectVisitor {
2249  public:
2250   explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer)
2251       : explorer_(explorer) {}
2252   virtual ~GlobalHandlesExtractor() {}
2253   virtual void VisitPointers(Object** start, Object** end) {
2254     UNREACHABLE();
2255   }
2256   virtual void VisitEmbedderReference(Object** p, uint16_t class_id) {
2257     explorer_->VisitSubtreeWrapper(p, class_id);
2258   }
2259  private:
2260   NativeObjectsExplorer* explorer_;
2261 };
2262
2263
2264 class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
2265  public:
2266   BasicHeapEntriesAllocator(
2267       HeapSnapshot* snapshot,
2268       HeapEntry::Type entries_type)
2269     : snapshot_(snapshot),
2270       names_(snapshot_->profiler()->names()),
2271       heap_object_map_(snapshot_->profiler()->heap_object_map()),
2272       entries_type_(entries_type) {
2273   }
2274   virtual HeapEntry* AllocateEntry(HeapThing ptr);
2275  private:
2276   HeapSnapshot* snapshot_;
2277   StringsStorage* names_;
2278   HeapObjectsMap* heap_object_map_;
2279   HeapEntry::Type entries_type_;
2280 };
2281
2282
2283 HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) {
2284   v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
2285   intptr_t elements = info->GetElementCount();
2286   intptr_t size = info->GetSizeInBytes();
2287   const char* name = elements != -1
2288       ? names_->GetFormatted(
2289             "%s / %" V8_PTR_PREFIX "d entries", info->GetLabel(), elements)
2290       : names_->GetCopy(info->GetLabel());
2291   return snapshot_->AddEntry(
2292       entries_type_,
2293       name,
2294       heap_object_map_->GenerateId(info),
2295       size != -1 ? static_cast<int>(size) : 0,
2296       0);
2297 }
2298
2299
2300 NativeObjectsExplorer::NativeObjectsExplorer(
2301     HeapSnapshot* snapshot,
2302     SnapshottingProgressReportingInterface* progress)
2303     : isolate_(snapshot->profiler()->heap_object_map()->heap()->isolate()),
2304       snapshot_(snapshot),
2305       names_(snapshot_->profiler()->names()),
2306       embedder_queried_(false),
2307       objects_by_info_(RetainedInfosMatch),
2308       native_groups_(StringsMatch),
2309       filler_(NULL) {
2310   synthetic_entries_allocator_ =
2311       new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
2312   native_entries_allocator_ =
2313       new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
2314 }
2315
2316
2317 NativeObjectsExplorer::~NativeObjectsExplorer() {
2318   for (HashMap::Entry* p = objects_by_info_.Start();
2319        p != NULL;
2320        p = objects_by_info_.Next(p)) {
2321     v8::RetainedObjectInfo* info =
2322         reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2323     info->Dispose();
2324     List<HeapObject*>* objects =
2325         reinterpret_cast<List<HeapObject*>* >(p->value);
2326     delete objects;
2327   }
2328   for (HashMap::Entry* p = native_groups_.Start();
2329        p != NULL;
2330        p = native_groups_.Next(p)) {
2331     v8::RetainedObjectInfo* info =
2332         reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
2333     info->Dispose();
2334   }
2335   delete synthetic_entries_allocator_;
2336   delete native_entries_allocator_;
2337 }
2338
2339
2340 int NativeObjectsExplorer::EstimateObjectsCount() {
2341   FillRetainedObjects();
2342   return objects_by_info_.occupancy();
2343 }
2344
2345
2346 void NativeObjectsExplorer::FillRetainedObjects() {
2347   if (embedder_queried_) return;
2348   Isolate* isolate = isolate_;
2349   const GCType major_gc_type = kGCTypeMarkSweepCompact;
2350   // Record objects that are joined into ObjectGroups.
2351   isolate->heap()->CallGCPrologueCallbacks(
2352       major_gc_type, kGCCallbackFlagConstructRetainedObjectInfos);
2353   List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
2354   for (int i = 0; i < groups->length(); ++i) {
2355     ObjectGroup* group = groups->at(i);
2356     if (group->info == NULL) continue;
2357     List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info);
2358     for (size_t j = 0; j < group->length; ++j) {
2359       HeapObject* obj = HeapObject::cast(*group->objects[j]);
2360       list->Add(obj);
2361       in_groups_.Insert(obj);
2362     }
2363     group->info = NULL;  // Acquire info object ownership.
2364   }
2365   isolate->global_handles()->RemoveObjectGroups();
2366   isolate->heap()->CallGCEpilogueCallbacks(major_gc_type, kNoGCCallbackFlags);
2367   // Record objects that are not in ObjectGroups, but have class ID.
2368   GlobalHandlesExtractor extractor(this);
2369   isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
2370   embedder_queried_ = true;
2371 }
2372
2373
2374 void NativeObjectsExplorer::FillImplicitReferences() {
2375   Isolate* isolate = isolate_;
2376   List<ImplicitRefGroup*>* groups =
2377       isolate->global_handles()->implicit_ref_groups();
2378   for (int i = 0; i < groups->length(); ++i) {
2379     ImplicitRefGroup* group = groups->at(i);
2380     HeapObject* parent = *group->parent;
2381     int parent_entry =
2382         filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
2383     DCHECK(parent_entry != HeapEntry::kNoEntry);
2384     Object*** children = group->children;
2385     for (size_t j = 0; j < group->length; ++j) {
2386       Object* child = *children[j];
2387       HeapEntry* child_entry =
2388           filler_->FindOrAddEntry(child, native_entries_allocator_);
2389       filler_->SetNamedReference(
2390           HeapGraphEdge::kInternal,
2391           parent_entry,
2392           "native",
2393           child_entry);
2394     }
2395   }
2396   isolate->global_handles()->RemoveImplicitRefGroups();
2397 }
2398
2399 List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
2400     v8::RetainedObjectInfo* info) {
2401   HashMap::Entry* entry = objects_by_info_.LookupOrInsert(info, InfoHash(info));
2402   if (entry->value != NULL) {
2403     info->Dispose();
2404   } else {
2405     entry->value = new List<HeapObject*>(4);
2406   }
2407   return reinterpret_cast<List<HeapObject*>* >(entry->value);
2408 }
2409
2410
2411 bool NativeObjectsExplorer::IterateAndExtractReferences(
2412     SnapshotFiller* filler) {
2413   filler_ = filler;
2414   FillRetainedObjects();
2415   FillImplicitReferences();
2416   if (EstimateObjectsCount() > 0) {
2417     for (HashMap::Entry* p = objects_by_info_.Start();
2418          p != NULL;
2419          p = objects_by_info_.Next(p)) {
2420       v8::RetainedObjectInfo* info =
2421           reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2422       SetNativeRootReference(info);
2423       List<HeapObject*>* objects =
2424           reinterpret_cast<List<HeapObject*>* >(p->value);
2425       for (int i = 0; i < objects->length(); ++i) {
2426         SetWrapperNativeReferences(objects->at(i), info);
2427       }
2428     }
2429     SetRootNativeRootsReference();
2430   }
2431   filler_ = NULL;
2432   return true;
2433 }
2434
2435
2436 class NativeGroupRetainedObjectInfo : public v8::RetainedObjectInfo {
2437  public:
2438   explicit NativeGroupRetainedObjectInfo(const char* label)
2439       : disposed_(false),
2440         hash_(reinterpret_cast<intptr_t>(label)),
2441         label_(label) {
2442   }
2443
2444   virtual ~NativeGroupRetainedObjectInfo() {}
2445   virtual void Dispose() {
2446     CHECK(!disposed_);
2447     disposed_ = true;
2448     delete this;
2449   }
2450   virtual bool IsEquivalent(RetainedObjectInfo* other) {
2451     return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
2452   }
2453   virtual intptr_t GetHash() { return hash_; }
2454   virtual const char* GetLabel() { return label_; }
2455
2456  private:
2457   bool disposed_;
2458   intptr_t hash_;
2459   const char* label_;
2460 };
2461
2462
2463 NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2464     const char* label) {
2465   const char* label_copy = names_->GetCopy(label);
2466   uint32_t hash = StringHasher::HashSequentialString(
2467       label_copy,
2468       static_cast<int>(strlen(label_copy)),
2469       isolate_->heap()->HashSeed());
2470   HashMap::Entry* entry =
2471       native_groups_.LookupOrInsert(const_cast<char*>(label_copy), hash);
2472   if (entry->value == NULL) {
2473     entry->value = new NativeGroupRetainedObjectInfo(label);
2474   }
2475   return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2476 }
2477
2478
2479 void NativeObjectsExplorer::SetNativeRootReference(
2480     v8::RetainedObjectInfo* info) {
2481   HeapEntry* child_entry =
2482       filler_->FindOrAddEntry(info, native_entries_allocator_);
2483   DCHECK(child_entry != NULL);
2484   NativeGroupRetainedObjectInfo* group_info =
2485       FindOrAddGroupInfo(info->GetGroupLabel());
2486   HeapEntry* group_entry =
2487       filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
2488   // |FindOrAddEntry| can move and resize the entries backing store. Reload
2489   // potentially-stale pointer.
2490   child_entry = filler_->FindEntry(info);
2491   filler_->SetNamedAutoIndexReference(
2492       HeapGraphEdge::kInternal,
2493       group_entry->index(),
2494       child_entry);
2495 }
2496
2497
2498 void NativeObjectsExplorer::SetWrapperNativeReferences(
2499     HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2500   HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
2501   DCHECK(wrapper_entry != NULL);
2502   HeapEntry* info_entry =
2503       filler_->FindOrAddEntry(info, native_entries_allocator_);
2504   DCHECK(info_entry != NULL);
2505   filler_->SetNamedReference(HeapGraphEdge::kInternal,
2506                              wrapper_entry->index(),
2507                              "native",
2508                              info_entry);
2509   filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
2510                                         info_entry->index(),
2511                                         wrapper_entry);
2512 }
2513
2514
2515 void NativeObjectsExplorer::SetRootNativeRootsReference() {
2516   for (HashMap::Entry* entry = native_groups_.Start();
2517        entry;
2518        entry = native_groups_.Next(entry)) {
2519     NativeGroupRetainedObjectInfo* group_info =
2520         static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2521     HeapEntry* group_entry =
2522         filler_->FindOrAddEntry(group_info, native_entries_allocator_);
2523     DCHECK(group_entry != NULL);
2524     filler_->SetIndexedAutoIndexReference(
2525         HeapGraphEdge::kElement,
2526         snapshot_->root()->index(),
2527         group_entry);
2528   }
2529 }
2530
2531
2532 void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
2533   if (in_groups_.Contains(*p)) return;
2534   Isolate* isolate = isolate_;
2535   v8::RetainedObjectInfo* info =
2536       isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
2537   if (info == NULL) return;
2538   GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
2539 }
2540
2541
2542 HeapSnapshotGenerator::HeapSnapshotGenerator(
2543     HeapSnapshot* snapshot,
2544     v8::ActivityControl* control,
2545     v8::HeapProfiler::ObjectNameResolver* resolver,
2546     Heap* heap)
2547     : snapshot_(snapshot),
2548       control_(control),
2549       v8_heap_explorer_(snapshot_, this, resolver),
2550       dom_explorer_(snapshot_, this),
2551       heap_(heap) {
2552 }
2553
2554
2555 bool HeapSnapshotGenerator::GenerateSnapshot() {
2556   v8_heap_explorer_.TagGlobalObjects();
2557
2558   // TODO(1562) Profiler assumes that any object that is in the heap after
2559   // full GC is reachable from the root when computing dominators.
2560   // This is not true for weakly reachable objects.
2561   // As a temporary solution we call GC twice.
2562   heap_->CollectAllGarbage(
2563       Heap::kMakeHeapIterableMask,
2564       "HeapSnapshotGenerator::GenerateSnapshot");
2565   heap_->CollectAllGarbage(
2566       Heap::kMakeHeapIterableMask,
2567       "HeapSnapshotGenerator::GenerateSnapshot");
2568
2569 #ifdef VERIFY_HEAP
2570   Heap* debug_heap = heap_;
2571   if (FLAG_verify_heap) {
2572     debug_heap->Verify();
2573   }
2574 #endif
2575
2576   SetProgressTotal(2);  // 2 passes.
2577
2578 #ifdef VERIFY_HEAP
2579   if (FLAG_verify_heap) {
2580     debug_heap->Verify();
2581   }
2582 #endif
2583
2584   snapshot_->AddSyntheticRootEntries();
2585
2586   if (!FillReferences()) return false;
2587
2588   snapshot_->FillChildren();
2589   snapshot_->RememberLastJSObjectId();
2590
2591   progress_counter_ = progress_total_;
2592   if (!ProgressReport(true)) return false;
2593   return true;
2594 }
2595
2596
2597 void HeapSnapshotGenerator::ProgressStep() {
2598   ++progress_counter_;
2599 }
2600
2601
2602 bool HeapSnapshotGenerator::ProgressReport(bool force) {
2603   const int kProgressReportGranularity = 10000;
2604   if (control_ != NULL
2605       && (force || progress_counter_ % kProgressReportGranularity == 0)) {
2606       return
2607           control_->ReportProgressValue(progress_counter_, progress_total_) ==
2608           v8::ActivityControl::kContinue;
2609   }
2610   return true;
2611 }
2612
2613
2614 void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
2615   if (control_ == NULL) return;
2616   HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
2617   progress_total_ = iterations_count * (
2618       v8_heap_explorer_.EstimateObjectsCount(&iterator) +
2619       dom_explorer_.EstimateObjectsCount());
2620   progress_counter_ = 0;
2621 }
2622
2623
2624 bool HeapSnapshotGenerator::FillReferences() {
2625   SnapshotFiller filler(snapshot_, &entries_);
2626   return v8_heap_explorer_.IterateAndExtractReferences(&filler)
2627       && dom_explorer_.IterateAndExtractReferences(&filler);
2628 }
2629
2630
2631 template<int bytes> struct MaxDecimalDigitsIn;
2632 template<> struct MaxDecimalDigitsIn<4> {
2633   static const int kSigned = 11;
2634   static const int kUnsigned = 10;
2635 };
2636 template<> struct MaxDecimalDigitsIn<8> {
2637   static const int kSigned = 20;
2638   static const int kUnsigned = 20;
2639 };
2640
2641
2642 class OutputStreamWriter {
2643  public:
2644   explicit OutputStreamWriter(v8::OutputStream* stream)
2645       : stream_(stream),
2646         chunk_size_(stream->GetChunkSize()),
2647         chunk_(chunk_size_),
2648         chunk_pos_(0),
2649         aborted_(false) {
2650     DCHECK(chunk_size_ > 0);
2651   }
2652   bool aborted() { return aborted_; }
2653   void AddCharacter(char c) {
2654     DCHECK(c != '\0');
2655     DCHECK(chunk_pos_ < chunk_size_);
2656     chunk_[chunk_pos_++] = c;
2657     MaybeWriteChunk();
2658   }
2659   void AddString(const char* s) {
2660     AddSubstring(s, StrLength(s));
2661   }
2662   void AddSubstring(const char* s, int n) {
2663     if (n <= 0) return;
2664     DCHECK(static_cast<size_t>(n) <= strlen(s));
2665     const char* s_end = s + n;
2666     while (s < s_end) {
2667       int s_chunk_size =
2668           Min(chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
2669       DCHECK(s_chunk_size > 0);
2670       MemCopy(chunk_.start() + chunk_pos_, s, s_chunk_size);
2671       s += s_chunk_size;
2672       chunk_pos_ += s_chunk_size;
2673       MaybeWriteChunk();
2674     }
2675   }
2676   void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
2677   void Finalize() {
2678     if (aborted_) return;
2679     DCHECK(chunk_pos_ < chunk_size_);
2680     if (chunk_pos_ != 0) {
2681       WriteChunk();
2682     }
2683     stream_->EndOfStream();
2684   }
2685
2686  private:
2687   template<typename T>
2688   void AddNumberImpl(T n, const char* format) {
2689     // Buffer for the longest value plus trailing \0
2690     static const int kMaxNumberSize =
2691         MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1;
2692     if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
2693       int result = SNPrintF(
2694           chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
2695       DCHECK(result != -1);
2696       chunk_pos_ += result;
2697       MaybeWriteChunk();
2698     } else {
2699       EmbeddedVector<char, kMaxNumberSize> buffer;
2700       int result = SNPrintF(buffer, format, n);
2701       USE(result);
2702       DCHECK(result != -1);
2703       AddString(buffer.start());
2704     }
2705   }
2706   void MaybeWriteChunk() {
2707     DCHECK(chunk_pos_ <= chunk_size_);
2708     if (chunk_pos_ == chunk_size_) {
2709       WriteChunk();
2710     }
2711   }
2712   void WriteChunk() {
2713     if (aborted_) return;
2714     if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
2715         v8::OutputStream::kAbort) aborted_ = true;
2716     chunk_pos_ = 0;
2717   }
2718
2719   v8::OutputStream* stream_;
2720   int chunk_size_;
2721   ScopedVector<char> chunk_;
2722   int chunk_pos_;
2723   bool aborted_;
2724 };
2725
2726
2727 // type, name|index, to_node.
2728 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
2729 // type, name, id, self_size, edge_count, trace_node_id.
2730 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 6;
2731
2732 void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
2733   if (AllocationTracker* allocation_tracker =
2734       snapshot_->profiler()->allocation_tracker()) {
2735     allocation_tracker->PrepareForSerialization();
2736   }
2737   DCHECK(writer_ == NULL);
2738   writer_ = new OutputStreamWriter(stream);
2739   SerializeImpl();
2740   delete writer_;
2741   writer_ = NULL;
2742 }
2743
2744
2745 void HeapSnapshotJSONSerializer::SerializeImpl() {
2746   DCHECK(0 == snapshot_->root()->index());
2747   writer_->AddCharacter('{');
2748   writer_->AddString("\"snapshot\":{");
2749   SerializeSnapshot();
2750   if (writer_->aborted()) return;
2751   writer_->AddString("},\n");
2752   writer_->AddString("\"nodes\":[");
2753   SerializeNodes();
2754   if (writer_->aborted()) return;
2755   writer_->AddString("],\n");
2756   writer_->AddString("\"edges\":[");
2757   SerializeEdges();
2758   if (writer_->aborted()) return;
2759   writer_->AddString("],\n");
2760
2761   writer_->AddString("\"trace_function_infos\":[");
2762   SerializeTraceNodeInfos();
2763   if (writer_->aborted()) return;
2764   writer_->AddString("],\n");
2765   writer_->AddString("\"trace_tree\":[");
2766   SerializeTraceTree();
2767   if (writer_->aborted()) return;
2768   writer_->AddString("],\n");
2769
2770   writer_->AddString("\"samples\":[");
2771   SerializeSamples();
2772   if (writer_->aborted()) return;
2773   writer_->AddString("],\n");
2774
2775   writer_->AddString("\"strings\":[");
2776   SerializeStrings();
2777   if (writer_->aborted()) return;
2778   writer_->AddCharacter(']');
2779   writer_->AddCharacter('}');
2780   writer_->Finalize();
2781 }
2782
2783
2784 int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
2785   HashMap::Entry* cache_entry =
2786       strings_.LookupOrInsert(const_cast<char*>(s), StringHash(s));
2787   if (cache_entry->value == NULL) {
2788     cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
2789   }
2790   return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
2791 }
2792
2793
2794 namespace {
2795
2796 template<size_t size> struct ToUnsigned;
2797
2798 template<> struct ToUnsigned<4> {
2799   typedef uint32_t Type;
2800 };
2801
2802 template<> struct ToUnsigned<8> {
2803   typedef uint64_t Type;
2804 };
2805
2806 }  // namespace
2807
2808
2809 template<typename T>
2810 static int utoa_impl(T value, const Vector<char>& buffer, int buffer_pos) {
2811   STATIC_ASSERT(static_cast<T>(-1) > 0);  // Check that T is unsigned
2812   int number_of_digits = 0;
2813   T t = value;
2814   do {
2815     ++number_of_digits;
2816   } while (t /= 10);
2817
2818   buffer_pos += number_of_digits;
2819   int result = buffer_pos;
2820   do {
2821     int last_digit = static_cast<int>(value % 10);
2822     buffer[--buffer_pos] = '0' + last_digit;
2823     value /= 10;
2824   } while (value);
2825   return result;
2826 }
2827
2828
2829 template<typename T>
2830 static int utoa(T value, const Vector<char>& buffer, int buffer_pos) {
2831   typename ToUnsigned<sizeof(value)>::Type unsigned_value = value;
2832   STATIC_ASSERT(sizeof(value) == sizeof(unsigned_value));
2833   return utoa_impl(unsigned_value, buffer, buffer_pos);
2834 }
2835
2836
2837 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
2838                                                bool first_edge) {
2839   // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
2840   static const int kBufferSize =
2841       MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2;  // NOLINT
2842   EmbeddedVector<char, kBufferSize> buffer;
2843   int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
2844       || edge->type() == HeapGraphEdge::kHidden
2845       ? edge->index() : GetStringId(edge->name());
2846   int buffer_pos = 0;
2847   if (!first_edge) {
2848     buffer[buffer_pos++] = ',';
2849   }
2850   buffer_pos = utoa(edge->type(), buffer, buffer_pos);
2851   buffer[buffer_pos++] = ',';
2852   buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
2853   buffer[buffer_pos++] = ',';
2854   buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
2855   buffer[buffer_pos++] = '\n';
2856   buffer[buffer_pos++] = '\0';
2857   writer_->AddString(buffer.start());
2858 }
2859
2860
2861 void HeapSnapshotJSONSerializer::SerializeEdges() {
2862   List<HeapGraphEdge*>& edges = snapshot_->children();
2863   for (int i = 0; i < edges.length(); ++i) {
2864     DCHECK(i == 0 ||
2865            edges[i - 1]->from()->index() <= edges[i]->from()->index());
2866     SerializeEdge(edges[i], i == 0);
2867     if (writer_->aborted()) return;
2868   }
2869 }
2870
2871
2872 void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
2873   // The buffer needs space for 4 unsigned ints, 1 size_t, 5 commas, \n and \0
2874   static const int kBufferSize =
2875       5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned  // NOLINT
2876       + MaxDecimalDigitsIn<sizeof(size_t)>::kUnsigned  // NOLINT
2877       + 6 + 1 + 1;
2878   EmbeddedVector<char, kBufferSize> buffer;
2879   int buffer_pos = 0;
2880   if (entry_index(entry) != 0) {
2881     buffer[buffer_pos++] = ',';
2882   }
2883   buffer_pos = utoa(entry->type(), buffer, buffer_pos);
2884   buffer[buffer_pos++] = ',';
2885   buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
2886   buffer[buffer_pos++] = ',';
2887   buffer_pos = utoa(entry->id(), buffer, buffer_pos);
2888   buffer[buffer_pos++] = ',';
2889   buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
2890   buffer[buffer_pos++] = ',';
2891   buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
2892   buffer[buffer_pos++] = ',';
2893   buffer_pos = utoa(entry->trace_node_id(), buffer, buffer_pos);
2894   buffer[buffer_pos++] = '\n';
2895   buffer[buffer_pos++] = '\0';
2896   writer_->AddString(buffer.start());
2897 }
2898
2899
2900 void HeapSnapshotJSONSerializer::SerializeNodes() {
2901   List<HeapEntry>& entries = snapshot_->entries();
2902   for (int i = 0; i < entries.length(); ++i) {
2903     SerializeNode(&entries[i]);
2904     if (writer_->aborted()) return;
2905   }
2906 }
2907
2908
2909 void HeapSnapshotJSONSerializer::SerializeSnapshot() {
2910   writer_->AddString("\"meta\":");
2911   // The object describing node serialization layout.
2912   // We use a set of macros to improve readability.
2913 #define JSON_A(s) "[" s "]"
2914 #define JSON_O(s) "{" s "}"
2915 #define JSON_S(s) "\"" s "\""
2916   writer_->AddString(JSON_O(
2917     JSON_S("node_fields") ":" JSON_A(
2918         JSON_S("type") ","
2919         JSON_S("name") ","
2920         JSON_S("id") ","
2921         JSON_S("self_size") ","
2922         JSON_S("edge_count") ","
2923         JSON_S("trace_node_id")) ","
2924     JSON_S("node_types") ":" JSON_A(
2925         JSON_A(
2926             JSON_S("hidden") ","
2927             JSON_S("array") ","
2928             JSON_S("string") ","
2929             JSON_S("object") ","
2930             JSON_S("code") ","
2931             JSON_S("closure") ","
2932             JSON_S("regexp") ","
2933             JSON_S("number") ","
2934             JSON_S("native") ","
2935             JSON_S("synthetic") ","
2936             JSON_S("concatenated string") ","
2937             JSON_S("sliced string")) ","
2938         JSON_S("string") ","
2939         JSON_S("number") ","
2940         JSON_S("number") ","
2941         JSON_S("number") ","
2942         JSON_S("number") ","
2943         JSON_S("number")) ","
2944     JSON_S("edge_fields") ":" JSON_A(
2945         JSON_S("type") ","
2946         JSON_S("name_or_index") ","
2947         JSON_S("to_node")) ","
2948     JSON_S("edge_types") ":" JSON_A(
2949         JSON_A(
2950             JSON_S("context") ","
2951             JSON_S("element") ","
2952             JSON_S("property") ","
2953             JSON_S("internal") ","
2954             JSON_S("hidden") ","
2955             JSON_S("shortcut") ","
2956             JSON_S("weak")) ","
2957         JSON_S("string_or_number") ","
2958         JSON_S("node")) ","
2959     JSON_S("trace_function_info_fields") ":" JSON_A(
2960         JSON_S("function_id") ","
2961         JSON_S("name") ","
2962         JSON_S("script_name") ","
2963         JSON_S("script_id") ","
2964         JSON_S("line") ","
2965         JSON_S("column")) ","
2966     JSON_S("trace_node_fields") ":" JSON_A(
2967         JSON_S("id") ","
2968         JSON_S("function_info_index") ","
2969         JSON_S("count") ","
2970         JSON_S("size") ","
2971         JSON_S("children")) ","
2972     JSON_S("sample_fields") ":" JSON_A(
2973         JSON_S("timestamp_us") ","
2974         JSON_S("last_assigned_id"))));
2975 #undef JSON_S
2976 #undef JSON_O
2977 #undef JSON_A
2978   writer_->AddString(",\"node_count\":");
2979   writer_->AddNumber(snapshot_->entries().length());
2980   writer_->AddString(",\"edge_count\":");
2981   writer_->AddNumber(snapshot_->edges().length());
2982   writer_->AddString(",\"trace_function_count\":");
2983   uint32_t count = 0;
2984   AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2985   if (tracker) {
2986     count = tracker->function_info_list().length();
2987   }
2988   writer_->AddNumber(count);
2989 }
2990
2991
2992 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
2993   static const char hex_chars[] = "0123456789ABCDEF";
2994   w->AddString("\\u");
2995   w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
2996   w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
2997   w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
2998   w->AddCharacter(hex_chars[u & 0xf]);
2999 }
3000
3001
3002 void HeapSnapshotJSONSerializer::SerializeTraceTree() {
3003   AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
3004   if (!tracker) return;
3005   AllocationTraceTree* traces = tracker->trace_tree();
3006   SerializeTraceNode(traces->root());
3007 }
3008
3009
3010 void HeapSnapshotJSONSerializer::SerializeTraceNode(AllocationTraceNode* node) {
3011   // The buffer needs space for 4 unsigned ints, 4 commas, [ and \0
3012   const int kBufferSize =
3013       4 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned  // NOLINT
3014       + 4 + 1 + 1;
3015   EmbeddedVector<char, kBufferSize> buffer;
3016   int buffer_pos = 0;
3017   buffer_pos = utoa(node->id(), buffer, buffer_pos);
3018   buffer[buffer_pos++] = ',';
3019   buffer_pos = utoa(node->function_info_index(), buffer, buffer_pos);
3020   buffer[buffer_pos++] = ',';
3021   buffer_pos = utoa(node->allocation_count(), buffer, buffer_pos);
3022   buffer[buffer_pos++] = ',';
3023   buffer_pos = utoa(node->allocation_size(), buffer, buffer_pos);
3024   buffer[buffer_pos++] = ',';
3025   buffer[buffer_pos++] = '[';
3026   buffer[buffer_pos++] = '\0';
3027   writer_->AddString(buffer.start());
3028
3029   Vector<AllocationTraceNode*> children = node->children();
3030   for (int i = 0; i < children.length(); i++) {
3031     if (i > 0) {
3032       writer_->AddCharacter(',');
3033     }
3034     SerializeTraceNode(children[i]);
3035   }
3036   writer_->AddCharacter(']');
3037 }
3038
3039
3040 // 0-based position is converted to 1-based during the serialization.
3041 static int SerializePosition(int position, const Vector<char>& buffer,
3042                              int buffer_pos) {
3043   if (position == -1) {
3044     buffer[buffer_pos++] = '0';
3045   } else {
3046     DCHECK(position >= 0);
3047     buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos);
3048   }
3049   return buffer_pos;
3050 }
3051
3052
3053 void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() {
3054   AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
3055   if (!tracker) return;
3056   // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0
3057   const int kBufferSize =
3058       6 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned  // NOLINT
3059       + 6 + 1 + 1;
3060   EmbeddedVector<char, kBufferSize> buffer;
3061   const List<AllocationTracker::FunctionInfo*>& list =
3062       tracker->function_info_list();
3063   for (int i = 0; i < list.length(); i++) {
3064     AllocationTracker::FunctionInfo* info = list[i];
3065     int buffer_pos = 0;
3066     if (i > 0) {
3067       buffer[buffer_pos++] = ',';
3068     }
3069     buffer_pos = utoa(info->function_id, buffer, buffer_pos);
3070     buffer[buffer_pos++] = ',';
3071     buffer_pos = utoa(GetStringId(info->name), buffer, buffer_pos);
3072     buffer[buffer_pos++] = ',';
3073     buffer_pos = utoa(GetStringId(info->script_name), buffer, buffer_pos);
3074     buffer[buffer_pos++] = ',';
3075     // The cast is safe because script id is a non-negative Smi.
3076     buffer_pos = utoa(static_cast<unsigned>(info->script_id), buffer,
3077         buffer_pos);
3078     buffer[buffer_pos++] = ',';
3079     buffer_pos = SerializePosition(info->line, buffer, buffer_pos);
3080     buffer[buffer_pos++] = ',';
3081     buffer_pos = SerializePosition(info->column, buffer, buffer_pos);
3082     buffer[buffer_pos++] = '\n';
3083     buffer[buffer_pos++] = '\0';
3084     writer_->AddString(buffer.start());
3085   }
3086 }
3087
3088
3089 void HeapSnapshotJSONSerializer::SerializeSamples() {
3090   const List<HeapObjectsMap::TimeInterval>& samples =
3091       snapshot_->profiler()->heap_object_map()->samples();
3092   if (samples.is_empty()) return;
3093   base::TimeTicks start_time = samples[0].timestamp;
3094   // The buffer needs space for 2 unsigned ints, 2 commas, \n and \0
3095   const int kBufferSize = MaxDecimalDigitsIn<sizeof(
3096                               base::TimeDelta().InMicroseconds())>::kUnsigned +
3097                           MaxDecimalDigitsIn<sizeof(samples[0].id)>::kUnsigned +
3098                           2 + 1 + 1;
3099   EmbeddedVector<char, kBufferSize> buffer;
3100   for (int i = 0; i < samples.length(); i++) {
3101     HeapObjectsMap::TimeInterval& sample = samples[i];
3102     int buffer_pos = 0;
3103     if (i > 0) {
3104       buffer[buffer_pos++] = ',';
3105     }
3106     base::TimeDelta time_delta = sample.timestamp - start_time;
3107     buffer_pos = utoa(time_delta.InMicroseconds(), buffer, buffer_pos);
3108     buffer[buffer_pos++] = ',';
3109     buffer_pos = utoa(sample.last_assigned_id(), buffer, buffer_pos);
3110     buffer[buffer_pos++] = '\n';
3111     buffer[buffer_pos++] = '\0';
3112     writer_->AddString(buffer.start());
3113   }
3114 }
3115
3116
3117 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
3118   writer_->AddCharacter('\n');
3119   writer_->AddCharacter('\"');
3120   for ( ; *s != '\0'; ++s) {
3121     switch (*s) {
3122       case '\b':
3123         writer_->AddString("\\b");
3124         continue;
3125       case '\f':
3126         writer_->AddString("\\f");
3127         continue;
3128       case '\n':
3129         writer_->AddString("\\n");
3130         continue;
3131       case '\r':
3132         writer_->AddString("\\r");
3133         continue;
3134       case '\t':
3135         writer_->AddString("\\t");
3136         continue;
3137       case '\"':
3138       case '\\':
3139         writer_->AddCharacter('\\');
3140         writer_->AddCharacter(*s);
3141         continue;
3142       default:
3143         if (*s > 31 && *s < 128) {
3144           writer_->AddCharacter(*s);
3145         } else if (*s <= 31) {
3146           // Special character with no dedicated literal.
3147           WriteUChar(writer_, *s);
3148         } else {
3149           // Convert UTF-8 into \u UTF-16 literal.
3150           size_t length = 1, cursor = 0;
3151           for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
3152           unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
3153           if (c != unibrow::Utf8::kBadChar) {
3154             WriteUChar(writer_, c);
3155             DCHECK(cursor != 0);
3156             s += cursor - 1;
3157           } else {
3158             writer_->AddCharacter('?');
3159           }
3160         }
3161     }
3162   }
3163   writer_->AddCharacter('\"');
3164 }
3165
3166
3167 void HeapSnapshotJSONSerializer::SerializeStrings() {
3168   ScopedVector<const unsigned char*> sorted_strings(
3169       strings_.occupancy() + 1);
3170   for (HashMap::Entry* entry = strings_.Start();
3171        entry != NULL;
3172        entry = strings_.Next(entry)) {
3173     int index = static_cast<int>(reinterpret_cast<uintptr_t>(entry->value));
3174     sorted_strings[index] = reinterpret_cast<const unsigned char*>(entry->key);
3175   }
3176   writer_->AddString("\"<dummy>\"");
3177   for (int i = 1; i < sorted_strings.length(); ++i) {
3178     writer_->AddCharacter(',');
3179     SerializeString(sorted_strings[i]);
3180     if (writer_->aborted()) return;
3181   }
3182 }
3183
3184
3185 }  // namespace internal
3186 }  // namespace v8