Upstream version 6.35.121.0
[platform/framework/web/crosswalk.git] / src / v8 / src / heap-snapshot-generator.cc
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #include "heap-snapshot-generator-inl.h"
31
32 #include "allocation-tracker.h"
33 #include "code-stubs.h"
34 #include "heap-profiler.h"
35 #include "debug.h"
36 #include "types.h"
37 #include "v8conversions.h"
38
39 namespace v8 {
40 namespace internal {
41
42
43 HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to)
44     : type_(type),
45       from_index_(from),
46       to_index_(to),
47       name_(name) {
48   ASSERT(type == kContextVariable
49       || type == kProperty
50       || type == kInternal
51       || type == kShortcut
52       || type == kWeak);
53 }
54
55
56 HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to)
57     : type_(type),
58       from_index_(from),
59       to_index_(to),
60       index_(index) {
61   ASSERT(type == kElement || type == kHidden);
62 }
63
64
65 void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
66   to_entry_ = &snapshot->entries()[to_index_];
67 }
68
69
70 const int HeapEntry::kNoEntry = -1;
71
72 HeapEntry::HeapEntry(HeapSnapshot* snapshot,
73                      Type type,
74                      const char* name,
75                      SnapshotObjectId id,
76                      size_t self_size,
77                      unsigned trace_node_id)
78     : type_(type),
79       children_count_(0),
80       children_index_(-1),
81       self_size_(self_size),
82       snapshot_(snapshot),
83       name_(name),
84       id_(id),
85       trace_node_id_(trace_node_id) { }
86
87
88 void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
89                                   const char* name,
90                                   HeapEntry* entry) {
91   HeapGraphEdge edge(type, name, this->index(), entry->index());
92   snapshot_->edges().Add(edge);
93   ++children_count_;
94 }
95
96
97 void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
98                                     int index,
99                                     HeapEntry* entry) {
100   HeapGraphEdge edge(type, index, this->index(), entry->index());
101   snapshot_->edges().Add(edge);
102   ++children_count_;
103 }
104
105
106 void HeapEntry::Print(
107     const char* prefix, const char* edge_name, int max_depth, int indent) {
108   STATIC_CHECK(sizeof(unsigned) == sizeof(id()));
109   OS::Print("%6" V8PRIuPTR " @%6u %*c %s%s: ",
110             self_size(), id(), indent, ' ', prefix, edge_name);
111   if (type() != kString) {
112     OS::Print("%s %.40s\n", TypeAsString(), name_);
113   } else {
114     OS::Print("\"");
115     const char* c = name_;
116     while (*c && (c - name_) <= 40) {
117       if (*c != '\n')
118         OS::Print("%c", *c);
119       else
120         OS::Print("\\n");
121       ++c;
122     }
123     OS::Print("\"\n");
124   }
125   if (--max_depth == 0) return;
126   Vector<HeapGraphEdge*> ch = children();
127   for (int i = 0; i < ch.length(); ++i) {
128     HeapGraphEdge& edge = *ch[i];
129     const char* edge_prefix = "";
130     EmbeddedVector<char, 64> index;
131     const char* edge_name = index.start();
132     switch (edge.type()) {
133       case HeapGraphEdge::kContextVariable:
134         edge_prefix = "#";
135         edge_name = edge.name();
136         break;
137       case HeapGraphEdge::kElement:
138         OS::SNPrintF(index, "%d", edge.index());
139         break;
140       case HeapGraphEdge::kInternal:
141         edge_prefix = "$";
142         edge_name = edge.name();
143         break;
144       case HeapGraphEdge::kProperty:
145         edge_name = edge.name();
146         break;
147       case HeapGraphEdge::kHidden:
148         edge_prefix = "$";
149         OS::SNPrintF(index, "%d", edge.index());
150         break;
151       case HeapGraphEdge::kShortcut:
152         edge_prefix = "^";
153         edge_name = edge.name();
154         break;
155       case HeapGraphEdge::kWeak:
156         edge_prefix = "w";
157         edge_name = edge.name();
158         break;
159       default:
160         OS::SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
161     }
162     edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
163   }
164 }
165
166
167 const char* HeapEntry::TypeAsString() {
168   switch (type()) {
169     case kHidden: return "/hidden/";
170     case kObject: return "/object/";
171     case kClosure: return "/closure/";
172     case kString: return "/string/";
173     case kCode: return "/code/";
174     case kArray: return "/array/";
175     case kRegExp: return "/regexp/";
176     case kHeapNumber: return "/number/";
177     case kNative: return "/native/";
178     case kSynthetic: return "/synthetic/";
179     case kConsString: return "/concatenated string/";
180     case kSlicedString: return "/sliced string/";
181     default: return "???";
182   }
183 }
184
185
186 // It is very important to keep objects that form a heap snapshot
187 // as small as possible.
188 namespace {  // Avoid littering the global namespace.
189
190 template <size_t ptr_size> struct SnapshotSizeConstants;
191
192 template <> struct SnapshotSizeConstants<4> {
193   static const int kExpectedHeapGraphEdgeSize = 12;
194   static const int kExpectedHeapEntrySize = 28;
195 };
196
197 template <> struct SnapshotSizeConstants<8> {
198   static const int kExpectedHeapGraphEdgeSize = 24;
199   static const int kExpectedHeapEntrySize = 40;
200 };
201
202 }  // namespace
203
204
205 HeapSnapshot::HeapSnapshot(HeapProfiler* profiler,
206                            const char* title,
207                            unsigned uid)
208     : profiler_(profiler),
209       title_(title),
210       uid_(uid),
211       root_index_(HeapEntry::kNoEntry),
212       gc_roots_index_(HeapEntry::kNoEntry),
213       natives_root_index_(HeapEntry::kNoEntry),
214       max_snapshot_js_object_id_(0) {
215   STATIC_CHECK(
216       sizeof(HeapGraphEdge) ==
217       SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
218   STATIC_CHECK(
219       sizeof(HeapEntry) ==
220       SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
221   USE(SnapshotSizeConstants<4>::kExpectedHeapGraphEdgeSize);
222   USE(SnapshotSizeConstants<4>::kExpectedHeapEntrySize);
223   USE(SnapshotSizeConstants<8>::kExpectedHeapGraphEdgeSize);
224   USE(SnapshotSizeConstants<8>::kExpectedHeapEntrySize);
225   for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
226     gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
227   }
228 }
229
230
231 void HeapSnapshot::Delete() {
232   profiler_->RemoveSnapshot(this);
233   delete this;
234 }
235
236
237 void HeapSnapshot::RememberLastJSObjectId() {
238   max_snapshot_js_object_id_ = profiler_->heap_object_map()->last_assigned_id();
239 }
240
241
242 HeapEntry* HeapSnapshot::AddRootEntry() {
243   ASSERT(root_index_ == HeapEntry::kNoEntry);
244   ASSERT(entries_.is_empty());  // Root entry must be the first one.
245   HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
246                               "",
247                               HeapObjectsMap::kInternalRootObjectId,
248                               0,
249                               0);
250   root_index_ = entry->index();
251   ASSERT(root_index_ == 0);
252   return entry;
253 }
254
255
256 HeapEntry* HeapSnapshot::AddGcRootsEntry() {
257   ASSERT(gc_roots_index_ == HeapEntry::kNoEntry);
258   HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
259                               "(GC roots)",
260                               HeapObjectsMap::kGcRootsObjectId,
261                               0,
262                               0);
263   gc_roots_index_ = entry->index();
264   return entry;
265 }
266
267
268 HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag) {
269   ASSERT(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
270   ASSERT(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags);
271   HeapEntry* entry = AddEntry(
272       HeapEntry::kSynthetic,
273       VisitorSynchronization::kTagNames[tag],
274       HeapObjectsMap::GetNthGcSubrootId(tag),
275       0,
276       0);
277   gc_subroot_indexes_[tag] = entry->index();
278   return entry;
279 }
280
281
282 HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
283                                   const char* name,
284                                   SnapshotObjectId id,
285                                   size_t size,
286                                   unsigned trace_node_id) {
287   HeapEntry entry(this, type, name, id, size, trace_node_id);
288   entries_.Add(entry);
289   return &entries_.last();
290 }
291
292
293 void HeapSnapshot::FillChildren() {
294   ASSERT(children().is_empty());
295   children().Allocate(edges().length());
296   int children_index = 0;
297   for (int i = 0; i < entries().length(); ++i) {
298     HeapEntry* entry = &entries()[i];
299     children_index = entry->set_children_index(children_index);
300   }
301   ASSERT(edges().length() == children_index);
302   for (int i = 0; i < edges().length(); ++i) {
303     HeapGraphEdge* edge = &edges()[i];
304     edge->ReplaceToIndexWithEntry(this);
305     edge->from()->add_child(edge);
306   }
307 }
308
309
310 class FindEntryById {
311  public:
312   explicit FindEntryById(SnapshotObjectId id) : id_(id) { }
313   int operator()(HeapEntry* const* entry) {
314     if ((*entry)->id() == id_) return 0;
315     return (*entry)->id() < id_ ? -1 : 1;
316   }
317  private:
318   SnapshotObjectId id_;
319 };
320
321
322 HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
323   List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
324   // Perform a binary search by id.
325   int index = SortedListBSearch(*entries_by_id, FindEntryById(id));
326   if (index == -1)
327     return NULL;
328   return entries_by_id->at(index);
329 }
330
331
332 template<class T>
333 static int SortByIds(const T* entry1_ptr,
334                      const T* entry2_ptr) {
335   if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
336   return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
337 }
338
339
340 List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() {
341   if (sorted_entries_.is_empty()) {
342     sorted_entries_.Allocate(entries_.length());
343     for (int i = 0; i < entries_.length(); ++i) {
344       sorted_entries_[i] = &entries_[i];
345     }
346     sorted_entries_.Sort(SortByIds);
347   }
348   return &sorted_entries_;
349 }
350
351
352 void HeapSnapshot::Print(int max_depth) {
353   root()->Print("", "", max_depth, 0);
354 }
355
356
357 size_t HeapSnapshot::RawSnapshotSize() const {
358   return
359       sizeof(*this) +
360       GetMemoryUsedByList(entries_) +
361       GetMemoryUsedByList(edges_) +
362       GetMemoryUsedByList(children_) +
363       GetMemoryUsedByList(sorted_entries_);
364 }
365
366
367 // We split IDs on evens for embedder objects (see
368 // HeapObjectsMap::GenerateId) and odds for native objects.
369 const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
370 const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
371     HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
372 const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
373     HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
374 const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
375     HeapObjectsMap::kGcRootsFirstSubrootId +
376     VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
377
378
379 static bool AddressesMatch(void* key1, void* key2) {
380   return key1 == key2;
381 }
382
383
384 HeapObjectsMap::HeapObjectsMap(Heap* heap)
385     : next_id_(kFirstAvailableObjectId),
386       entries_map_(AddressesMatch),
387       heap_(heap) {
388   // This dummy element solves a problem with entries_map_.
389   // When we do lookup in HashMap we see no difference between two cases:
390   // it has an entry with NULL as the value or it has created
391   // a new entry on the fly with NULL as the default value.
392   // With such dummy element we have a guaranty that all entries_map_ entries
393   // will have the value field grater than 0.
394   // This fact is using in MoveObject method.
395   entries_.Add(EntryInfo(0, NULL, 0));
396 }
397
398
399 bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) {
400   ASSERT(to != NULL);
401   ASSERT(from != NULL);
402   if (from == to) return false;
403   void* from_value = entries_map_.Remove(from, ComputePointerHash(from));
404   if (from_value == NULL) {
405     // It may occur that some untracked object moves to an address X and there
406     // is a tracked object at that address. In this case we should remove the
407     // entry as we know that the object has died.
408     void* to_value = entries_map_.Remove(to, ComputePointerHash(to));
409     if (to_value != NULL) {
410       int to_entry_info_index =
411           static_cast<int>(reinterpret_cast<intptr_t>(to_value));
412       entries_.at(to_entry_info_index).addr = NULL;
413     }
414   } else {
415     HashMap::Entry* to_entry = entries_map_.Lookup(to, ComputePointerHash(to),
416                                                    true);
417     if (to_entry->value != NULL) {
418       // We found the existing entry with to address for an old object.
419       // Without this operation we will have two EntryInfo's with the same
420       // value in addr field. It is bad because later at RemoveDeadEntries
421       // one of this entry will be removed with the corresponding entries_map_
422       // entry.
423       int to_entry_info_index =
424           static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
425       entries_.at(to_entry_info_index).addr = NULL;
426     }
427     int from_entry_info_index =
428         static_cast<int>(reinterpret_cast<intptr_t>(from_value));
429     entries_.at(from_entry_info_index).addr = to;
430     // Size of an object can change during its life, so to keep information
431     // about the object in entries_ consistent, we have to adjust size when the
432     // object is migrated.
433     if (FLAG_heap_profiler_trace_objects) {
434       PrintF("Move object from %p to %p old size %6d new size %6d\n",
435              from,
436              to,
437              entries_.at(from_entry_info_index).size,
438              object_size);
439     }
440     entries_.at(from_entry_info_index).size = object_size;
441     to_entry->value = from_value;
442   }
443   return from_value != NULL;
444 }
445
446
447 void HeapObjectsMap::UpdateObjectSize(Address addr, int size) {
448   FindOrAddEntry(addr, size, false);
449 }
450
451
452 SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
453   HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr),
454                                               false);
455   if (entry == NULL) return 0;
456   int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
457   EntryInfo& entry_info = entries_.at(entry_index);
458   ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
459   return entry_info.id;
460 }
461
462
463 SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
464                                                 unsigned int size,
465                                                 bool accessed) {
466   ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
467   HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr),
468                                               true);
469   if (entry->value != NULL) {
470     int entry_index =
471         static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
472     EntryInfo& entry_info = entries_.at(entry_index);
473     entry_info.accessed = accessed;
474     if (FLAG_heap_profiler_trace_objects) {
475       PrintF("Update object size : %p with old size %d and new size %d\n",
476              addr,
477              entry_info.size,
478              size);
479     }
480     entry_info.size = size;
481     return entry_info.id;
482   }
483   entry->value = reinterpret_cast<void*>(entries_.length());
484   SnapshotObjectId id = next_id_;
485   next_id_ += kObjectIdStep;
486   entries_.Add(EntryInfo(id, addr, size, accessed));
487   ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
488   return id;
489 }
490
491
492 void HeapObjectsMap::StopHeapObjectsTracking() {
493   time_intervals_.Clear();
494 }
495
496
497 void HeapObjectsMap::UpdateHeapObjectsMap() {
498   if (FLAG_heap_profiler_trace_objects) {
499     PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
500            entries_map_.occupancy());
501   }
502   heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
503                           "HeapObjectsMap::UpdateHeapObjectsMap");
504   HeapIterator iterator(heap_);
505   for (HeapObject* obj = iterator.next();
506        obj != NULL;
507        obj = iterator.next()) {
508     FindOrAddEntry(obj->address(), obj->Size());
509     if (FLAG_heap_profiler_trace_objects) {
510       PrintF("Update object      : %p %6d. Next address is %p\n",
511              obj->address(),
512              obj->Size(),
513              obj->address() + obj->Size());
514     }
515   }
516   RemoveDeadEntries();
517   if (FLAG_heap_profiler_trace_objects) {
518     PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
519            entries_map_.occupancy());
520   }
521 }
522
523
524 namespace {
525
526
527 struct HeapObjectInfo {
528   HeapObjectInfo(HeapObject* obj, int expected_size)
529     : obj(obj),
530       expected_size(expected_size) {
531   }
532
533   HeapObject* obj;
534   int expected_size;
535
536   bool IsValid() const { return expected_size == obj->Size(); }
537
538   void Print() const {
539     if (expected_size == 0) {
540       PrintF("Untracked object   : %p %6d. Next address is %p\n",
541              obj->address(),
542              obj->Size(),
543              obj->address() + obj->Size());
544     } else if (obj->Size() != expected_size) {
545       PrintF("Wrong size %6d: %p %6d. Next address is %p\n",
546              expected_size,
547              obj->address(),
548              obj->Size(),
549              obj->address() + obj->Size());
550     } else {
551       PrintF("Good object      : %p %6d. Next address is %p\n",
552              obj->address(),
553              expected_size,
554              obj->address() + obj->Size());
555     }
556   }
557 };
558
559
560 static int comparator(const HeapObjectInfo* a, const HeapObjectInfo* b) {
561   if (a->obj < b->obj) return -1;
562   if (a->obj > b->obj) return 1;
563   return 0;
564 }
565
566
567 }  // namespace
568
569
570 int HeapObjectsMap::FindUntrackedObjects() {
571   List<HeapObjectInfo> heap_objects(1000);
572
573   HeapIterator iterator(heap_);
574   int untracked = 0;
575   for (HeapObject* obj = iterator.next();
576        obj != NULL;
577        obj = iterator.next()) {
578     HashMap::Entry* entry = entries_map_.Lookup(
579       obj->address(), ComputePointerHash(obj->address()), false);
580     if (entry == NULL) {
581       ++untracked;
582       if (FLAG_heap_profiler_trace_objects) {
583         heap_objects.Add(HeapObjectInfo(obj, 0));
584       }
585     } else {
586       int entry_index = static_cast<int>(
587           reinterpret_cast<intptr_t>(entry->value));
588       EntryInfo& entry_info = entries_.at(entry_index);
589       if (FLAG_heap_profiler_trace_objects) {
590         heap_objects.Add(HeapObjectInfo(obj,
591                          static_cast<int>(entry_info.size)));
592         if (obj->Size() != static_cast<int>(entry_info.size))
593           ++untracked;
594       } else {
595         CHECK_EQ(obj->Size(), static_cast<int>(entry_info.size));
596       }
597     }
598   }
599   if (FLAG_heap_profiler_trace_objects) {
600     PrintF("\nBegin HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n",
601            entries_map_.occupancy());
602     heap_objects.Sort(comparator);
603     int last_printed_object = -1;
604     bool print_next_object = false;
605     for (int i = 0; i < heap_objects.length(); ++i) {
606       const HeapObjectInfo& object_info = heap_objects[i];
607       if (!object_info.IsValid()) {
608         ++untracked;
609         if (last_printed_object != i - 1) {
610           if (i > 0) {
611             PrintF("%d objects were skipped\n", i - 1 - last_printed_object);
612             heap_objects[i - 1].Print();
613           }
614         }
615         object_info.Print();
616         last_printed_object = i;
617         print_next_object = true;
618       } else if (print_next_object) {
619         object_info.Print();
620         print_next_object = false;
621         last_printed_object = i;
622       }
623     }
624     if (last_printed_object < heap_objects.length() - 1) {
625       PrintF("Last %d objects were skipped\n",
626              heap_objects.length() - 1 - last_printed_object);
627     }
628     PrintF("End HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n\n",
629            entries_map_.occupancy());
630   }
631   return untracked;
632 }
633
634
635 SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) {
636   UpdateHeapObjectsMap();
637   time_intervals_.Add(TimeInterval(next_id_));
638   int prefered_chunk_size = stream->GetChunkSize();
639   List<v8::HeapStatsUpdate> stats_buffer;
640   ASSERT(!entries_.is_empty());
641   EntryInfo* entry_info = &entries_.first();
642   EntryInfo* end_entry_info = &entries_.last() + 1;
643   for (int time_interval_index = 0;
644        time_interval_index < time_intervals_.length();
645        ++time_interval_index) {
646     TimeInterval& time_interval = time_intervals_[time_interval_index];
647     SnapshotObjectId time_interval_id = time_interval.id;
648     uint32_t entries_size = 0;
649     EntryInfo* start_entry_info = entry_info;
650     while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
651       entries_size += entry_info->size;
652       ++entry_info;
653     }
654     uint32_t entries_count =
655         static_cast<uint32_t>(entry_info - start_entry_info);
656     if (time_interval.count != entries_count ||
657         time_interval.size != entries_size) {
658       stats_buffer.Add(v8::HeapStatsUpdate(
659           time_interval_index,
660           time_interval.count = entries_count,
661           time_interval.size = entries_size));
662       if (stats_buffer.length() >= prefered_chunk_size) {
663         OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
664             &stats_buffer.first(), stats_buffer.length());
665         if (result == OutputStream::kAbort) return last_assigned_id();
666         stats_buffer.Clear();
667       }
668     }
669   }
670   ASSERT(entry_info == end_entry_info);
671   if (!stats_buffer.is_empty()) {
672     OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
673         &stats_buffer.first(), stats_buffer.length());
674     if (result == OutputStream::kAbort) return last_assigned_id();
675   }
676   stream->EndOfStream();
677   return last_assigned_id();
678 }
679
680
681 void HeapObjectsMap::RemoveDeadEntries() {
682   ASSERT(entries_.length() > 0 &&
683          entries_.at(0).id == 0 &&
684          entries_.at(0).addr == NULL);
685   int first_free_entry = 1;
686   for (int i = 1; i < entries_.length(); ++i) {
687     EntryInfo& entry_info = entries_.at(i);
688     if (entry_info.accessed) {
689       if (first_free_entry != i) {
690         entries_.at(first_free_entry) = entry_info;
691       }
692       entries_.at(first_free_entry).accessed = false;
693       HashMap::Entry* entry = entries_map_.Lookup(
694           entry_info.addr, ComputePointerHash(entry_info.addr), false);
695       ASSERT(entry);
696       entry->value = reinterpret_cast<void*>(first_free_entry);
697       ++first_free_entry;
698     } else {
699       if (entry_info.addr) {
700         entries_map_.Remove(entry_info.addr,
701                             ComputePointerHash(entry_info.addr));
702       }
703     }
704   }
705   entries_.Rewind(first_free_entry);
706   ASSERT(static_cast<uint32_t>(entries_.length()) - 1 ==
707          entries_map_.occupancy());
708 }
709
710
711 SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
712   SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
713   const char* label = info->GetLabel();
714   id ^= StringHasher::HashSequentialString(label,
715                                            static_cast<int>(strlen(label)),
716                                            heap_->HashSeed());
717   intptr_t element_count = info->GetElementCount();
718   if (element_count != -1)
719     id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
720                              v8::internal::kZeroHashSeed);
721   return id << 1;
722 }
723
724
725 size_t HeapObjectsMap::GetUsedMemorySize() const {
726   return
727       sizeof(*this) +
728       sizeof(HashMap::Entry) * entries_map_.capacity() +
729       GetMemoryUsedByList(entries_) +
730       GetMemoryUsedByList(time_intervals_);
731 }
732
733
734 HeapEntriesMap::HeapEntriesMap()
735     : entries_(HeapThingsMatch) {
736 }
737
738
739 int HeapEntriesMap::Map(HeapThing thing) {
740   HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), false);
741   if (cache_entry == NULL) return HeapEntry::kNoEntry;
742   return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
743 }
744
745
746 void HeapEntriesMap::Pair(HeapThing thing, int entry) {
747   HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), true);
748   ASSERT(cache_entry->value == NULL);
749   cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
750 }
751
752
753 HeapObjectsSet::HeapObjectsSet()
754     : entries_(HeapEntriesMap::HeapThingsMatch) {
755 }
756
757
758 void HeapObjectsSet::Clear() {
759   entries_.Clear();
760 }
761
762
763 bool HeapObjectsSet::Contains(Object* obj) {
764   if (!obj->IsHeapObject()) return false;
765   HeapObject* object = HeapObject::cast(obj);
766   return entries_.Lookup(object, HeapEntriesMap::Hash(object), false) != NULL;
767 }
768
769
770 void HeapObjectsSet::Insert(Object* obj) {
771   if (!obj->IsHeapObject()) return;
772   HeapObject* object = HeapObject::cast(obj);
773   entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
774 }
775
776
777 const char* HeapObjectsSet::GetTag(Object* obj) {
778   HeapObject* object = HeapObject::cast(obj);
779   HashMap::Entry* cache_entry =
780       entries_.Lookup(object, HeapEntriesMap::Hash(object), false);
781   return cache_entry != NULL
782       ? reinterpret_cast<const char*>(cache_entry->value)
783       : NULL;
784 }
785
786
787 void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
788   if (!obj->IsHeapObject()) return;
789   HeapObject* object = HeapObject::cast(obj);
790   HashMap::Entry* cache_entry =
791       entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
792   cache_entry->value = const_cast<char*>(tag);
793 }
794
795
796 HeapObject* const V8HeapExplorer::kInternalRootObject =
797     reinterpret_cast<HeapObject*>(
798         static_cast<intptr_t>(HeapObjectsMap::kInternalRootObjectId));
799 HeapObject* const V8HeapExplorer::kGcRootsObject =
800     reinterpret_cast<HeapObject*>(
801         static_cast<intptr_t>(HeapObjectsMap::kGcRootsObjectId));
802 HeapObject* const V8HeapExplorer::kFirstGcSubrootObject =
803     reinterpret_cast<HeapObject*>(
804         static_cast<intptr_t>(HeapObjectsMap::kGcRootsFirstSubrootId));
805 HeapObject* const V8HeapExplorer::kLastGcSubrootObject =
806     reinterpret_cast<HeapObject*>(
807         static_cast<intptr_t>(HeapObjectsMap::kFirstAvailableObjectId));
808
809
810 V8HeapExplorer::V8HeapExplorer(
811     HeapSnapshot* snapshot,
812     SnapshottingProgressReportingInterface* progress,
813     v8::HeapProfiler::ObjectNameResolver* resolver)
814     : heap_(snapshot->profiler()->heap_object_map()->heap()),
815       snapshot_(snapshot),
816       names_(snapshot_->profiler()->names()),
817       heap_object_map_(snapshot_->profiler()->heap_object_map()),
818       progress_(progress),
819       filler_(NULL),
820       global_object_name_resolver_(resolver) {
821 }
822
823
824 V8HeapExplorer::~V8HeapExplorer() {
825 }
826
827
828 HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) {
829   return AddEntry(reinterpret_cast<HeapObject*>(ptr));
830 }
831
832
833 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
834   if (object == kInternalRootObject) {
835     snapshot_->AddRootEntry();
836     return snapshot_->root();
837   } else if (object == kGcRootsObject) {
838     HeapEntry* entry = snapshot_->AddGcRootsEntry();
839     return entry;
840   } else if (object >= kFirstGcSubrootObject && object < kLastGcSubrootObject) {
841     HeapEntry* entry = snapshot_->AddGcSubrootEntry(GetGcSubrootOrder(object));
842     return entry;
843   } else if (object->IsJSFunction()) {
844     JSFunction* func = JSFunction::cast(object);
845     SharedFunctionInfo* shared = func->shared();
846     const char* name = shared->bound() ? "native_bind" :
847         names_->GetName(String::cast(shared->name()));
848     return AddEntry(object, HeapEntry::kClosure, name);
849   } else if (object->IsJSRegExp()) {
850     JSRegExp* re = JSRegExp::cast(object);
851     return AddEntry(object,
852                     HeapEntry::kRegExp,
853                     names_->GetName(re->Pattern()));
854   } else if (object->IsJSObject()) {
855     const char* name = names_->GetName(
856         GetConstructorName(JSObject::cast(object)));
857     if (object->IsJSGlobalObject()) {
858       const char* tag = objects_tags_.GetTag(object);
859       if (tag != NULL) {
860         name = names_->GetFormatted("%s / %s", name, tag);
861       }
862     }
863     return AddEntry(object, HeapEntry::kObject, name);
864   } else if (object->IsString()) {
865     String* string = String::cast(object);
866     if (string->IsConsString())
867       return AddEntry(object,
868                       HeapEntry::kConsString,
869                       "(concatenated string)");
870     if (string->IsSlicedString())
871       return AddEntry(object,
872                       HeapEntry::kSlicedString,
873                       "(sliced string)");
874     return AddEntry(object,
875                     HeapEntry::kString,
876                     names_->GetName(String::cast(object)));
877   } else if (object->IsCode()) {
878     return AddEntry(object, HeapEntry::kCode, "");
879   } else if (object->IsSharedFunctionInfo()) {
880     String* name = String::cast(SharedFunctionInfo::cast(object)->name());
881     return AddEntry(object,
882                     HeapEntry::kCode,
883                     names_->GetName(name));
884   } else if (object->IsScript()) {
885     Object* name = Script::cast(object)->name();
886     return AddEntry(object,
887                     HeapEntry::kCode,
888                     name->IsString()
889                         ? names_->GetName(String::cast(name))
890                         : "");
891   } else if (object->IsNativeContext()) {
892     return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
893   } else if (object->IsContext()) {
894     return AddEntry(object, HeapEntry::kObject, "system / Context");
895   } else if (object->IsFixedArray() ||
896              object->IsFixedDoubleArray() ||
897              object->IsByteArray() ||
898              object->IsExternalArray()) {
899     return AddEntry(object, HeapEntry::kArray, "");
900   } else if (object->IsHeapNumber()) {
901     return AddEntry(object, HeapEntry::kHeapNumber, "number");
902   }
903   return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
904 }
905
906
907 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
908                                     HeapEntry::Type type,
909                                     const char* name) {
910   return AddEntry(object->address(), type, name, object->Size());
911 }
912
913
914 HeapEntry* V8HeapExplorer::AddEntry(Address address,
915                                     HeapEntry::Type type,
916                                     const char* name,
917                                     size_t size) {
918   SnapshotObjectId object_id = heap_object_map_->FindOrAddEntry(
919       address, static_cast<unsigned int>(size));
920   unsigned trace_node_id = 0;
921   if (AllocationTracker* allocation_tracker =
922       snapshot_->profiler()->allocation_tracker()) {
923     trace_node_id =
924         allocation_tracker->address_to_trace()->GetTraceNodeId(address);
925   }
926   return snapshot_->AddEntry(type, name, object_id, size, trace_node_id);
927 }
928
929
930 class SnapshotFiller {
931  public:
932   explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
933       : snapshot_(snapshot),
934         names_(snapshot->profiler()->names()),
935         entries_(entries) { }
936   HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
937     HeapEntry* entry = allocator->AllocateEntry(ptr);
938     entries_->Pair(ptr, entry->index());
939     return entry;
940   }
941   HeapEntry* FindEntry(HeapThing ptr) {
942     int index = entries_->Map(ptr);
943     return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL;
944   }
945   HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
946     HeapEntry* entry = FindEntry(ptr);
947     return entry != NULL ? entry : AddEntry(ptr, allocator);
948   }
949   void SetIndexedReference(HeapGraphEdge::Type type,
950                            int parent,
951                            int index,
952                            HeapEntry* child_entry) {
953     HeapEntry* parent_entry = &snapshot_->entries()[parent];
954     parent_entry->SetIndexedReference(type, index, child_entry);
955   }
956   void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
957                                     int parent,
958                                     HeapEntry* child_entry) {
959     HeapEntry* parent_entry = &snapshot_->entries()[parent];
960     int index = parent_entry->children_count() + 1;
961     parent_entry->SetIndexedReference(type, index, child_entry);
962   }
963   void SetNamedReference(HeapGraphEdge::Type type,
964                          int parent,
965                          const char* reference_name,
966                          HeapEntry* child_entry) {
967     HeapEntry* parent_entry = &snapshot_->entries()[parent];
968     parent_entry->SetNamedReference(type, reference_name, child_entry);
969   }
970   void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
971                                   int parent,
972                                   HeapEntry* child_entry) {
973     HeapEntry* parent_entry = &snapshot_->entries()[parent];
974     int index = parent_entry->children_count() + 1;
975     parent_entry->SetNamedReference(
976         type,
977         names_->GetName(index),
978         child_entry);
979   }
980
981  private:
982   HeapSnapshot* snapshot_;
983   StringsStorage* names_;
984   HeapEntriesMap* entries_;
985 };
986
987
988 class GcSubrootsEnumerator : public ObjectVisitor {
989  public:
990   GcSubrootsEnumerator(
991       SnapshotFiller* filler, V8HeapExplorer* explorer)
992       : filler_(filler),
993         explorer_(explorer),
994         previous_object_count_(0),
995         object_count_(0) {
996   }
997   void VisitPointers(Object** start, Object** end) {
998     object_count_ += end - start;
999   }
1000   void Synchronize(VisitorSynchronization::SyncTag tag) {
1001     // Skip empty subroots.
1002     if (previous_object_count_ != object_count_) {
1003       previous_object_count_ = object_count_;
1004       filler_->AddEntry(V8HeapExplorer::GetNthGcSubrootObject(tag), explorer_);
1005     }
1006   }
1007  private:
1008   SnapshotFiller* filler_;
1009   V8HeapExplorer* explorer_;
1010   intptr_t previous_object_count_;
1011   intptr_t object_count_;
1012 };
1013
1014
1015 void V8HeapExplorer::AddRootEntries(SnapshotFiller* filler) {
1016   filler->AddEntry(kInternalRootObject, this);
1017   filler->AddEntry(kGcRootsObject, this);
1018   GcSubrootsEnumerator enumerator(filler, this);
1019   heap_->IterateRoots(&enumerator, VISIT_ALL);
1020 }
1021
1022
1023 const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
1024   switch (object->map()->instance_type()) {
1025     case MAP_TYPE:
1026       switch (Map::cast(object)->instance_type()) {
1027 #define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
1028         case instance_type: return "system / Map (" #Name ")";
1029       STRING_TYPE_LIST(MAKE_STRING_MAP_CASE)
1030 #undef MAKE_STRING_MAP_CASE
1031         default: return "system / Map";
1032       }
1033     case CELL_TYPE: return "system / Cell";
1034     case PROPERTY_CELL_TYPE: return "system / PropertyCell";
1035     case FOREIGN_TYPE: return "system / Foreign";
1036     case ODDBALL_TYPE: return "system / Oddball";
1037 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1038     case NAME##_TYPE: return "system / "#Name;
1039   STRUCT_LIST(MAKE_STRUCT_CASE)
1040 #undef MAKE_STRUCT_CASE
1041     default: return "system";
1042   }
1043 }
1044
1045
1046 int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) {
1047   int objects_count = 0;
1048   for (HeapObject* obj = iterator->next();
1049        obj != NULL;
1050        obj = iterator->next()) {
1051     objects_count++;
1052   }
1053   return objects_count;
1054 }
1055
1056
1057 class IndexedReferencesExtractor : public ObjectVisitor {
1058  public:
1059   IndexedReferencesExtractor(V8HeapExplorer* generator,
1060                              HeapObject* parent_obj,
1061                              int parent)
1062       : generator_(generator),
1063         parent_obj_(parent_obj),
1064         parent_(parent),
1065         next_index_(0) {
1066   }
1067   void VisitCodeEntry(Address entry_address) {
1068      Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
1069      generator_->SetInternalReference(parent_obj_, parent_, "code", code);
1070      generator_->TagCodeObject(code);
1071   }
1072   void VisitPointers(Object** start, Object** end) {
1073     for (Object** p = start; p < end; p++) {
1074       ++next_index_;
1075       if (CheckVisitedAndUnmark(p)) continue;
1076       generator_->SetHiddenReference(parent_obj_, parent_, next_index_, *p);
1077     }
1078   }
1079   static void MarkVisitedField(HeapObject* obj, int offset) {
1080     if (offset < 0) return;
1081     Address field = obj->address() + offset;
1082     ASSERT(!Memory::Object_at(field)->IsFailure());
1083     ASSERT(Memory::Object_at(field)->IsHeapObject());
1084     *field |= kFailureTag;
1085   }
1086
1087  private:
1088   bool CheckVisitedAndUnmark(Object** field) {
1089     if ((*field)->IsFailure()) {
1090       intptr_t untagged = reinterpret_cast<intptr_t>(*field) & ~kFailureTagMask;
1091       *field = reinterpret_cast<Object*>(untagged | kHeapObjectTag);
1092       ASSERT((*field)->IsHeapObject());
1093       return true;
1094     }
1095     return false;
1096   }
1097   V8HeapExplorer* generator_;
1098   HeapObject* parent_obj_;
1099   int parent_;
1100   int next_index_;
1101 };
1102
1103
1104 void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
1105   HeapEntry* heap_entry = GetEntry(obj);
1106   if (heap_entry == NULL) return;  // No interest in this object.
1107   int entry = heap_entry->index();
1108
1109   if (obj->IsJSGlobalProxy()) {
1110     ExtractJSGlobalProxyReferences(entry, JSGlobalProxy::cast(obj));
1111   } else if (obj->IsJSArrayBuffer()) {
1112     ExtractJSArrayBufferReferences(entry, JSArrayBuffer::cast(obj));
1113   } else if (obj->IsJSObject()) {
1114     ExtractJSObjectReferences(entry, JSObject::cast(obj));
1115   } else if (obj->IsString()) {
1116     ExtractStringReferences(entry, String::cast(obj));
1117   } else if (obj->IsContext()) {
1118     ExtractContextReferences(entry, Context::cast(obj));
1119   } else if (obj->IsMap()) {
1120     ExtractMapReferences(entry, Map::cast(obj));
1121   } else if (obj->IsSharedFunctionInfo()) {
1122     ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
1123   } else if (obj->IsScript()) {
1124     ExtractScriptReferences(entry, Script::cast(obj));
1125   } else if (obj->IsAccessorPair()) {
1126     ExtractAccessorPairReferences(entry, AccessorPair::cast(obj));
1127   } else if (obj->IsCodeCache()) {
1128     ExtractCodeCacheReferences(entry, CodeCache::cast(obj));
1129   } else if (obj->IsCode()) {
1130     ExtractCodeReferences(entry, Code::cast(obj));
1131   } else if (obj->IsBox()) {
1132     ExtractBoxReferences(entry, Box::cast(obj));
1133   } else if (obj->IsCell()) {
1134     ExtractCellReferences(entry, Cell::cast(obj));
1135   } else if (obj->IsPropertyCell()) {
1136     ExtractPropertyCellReferences(entry, PropertyCell::cast(obj));
1137   } else if (obj->IsAllocationSite()) {
1138     ExtractAllocationSiteReferences(entry, AllocationSite::cast(obj));
1139   }
1140   SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset);
1141
1142   // Extract unvisited fields as hidden references and restore tags
1143   // of visited fields.
1144   IndexedReferencesExtractor refs_extractor(this, obj, entry);
1145   obj->Iterate(&refs_extractor);
1146 }
1147
1148
1149 void V8HeapExplorer::ExtractJSGlobalProxyReferences(
1150     int entry, JSGlobalProxy* proxy) {
1151   SetInternalReference(proxy, entry,
1152                        "native_context", proxy->native_context(),
1153                        JSGlobalProxy::kNativeContextOffset);
1154 }
1155
1156
1157 void V8HeapExplorer::ExtractJSObjectReferences(
1158     int entry, JSObject* js_obj) {
1159   HeapObject* obj = js_obj;
1160   ExtractClosureReferences(js_obj, entry);
1161   ExtractPropertyReferences(js_obj, entry);
1162   ExtractElementReferences(js_obj, entry);
1163   ExtractInternalReferences(js_obj, entry);
1164   SetPropertyReference(
1165       obj, entry, heap_->proto_string(), js_obj->GetPrototype());
1166   if (obj->IsJSFunction()) {
1167     JSFunction* js_fun = JSFunction::cast(js_obj);
1168     Object* proto_or_map = js_fun->prototype_or_initial_map();
1169     if (!proto_or_map->IsTheHole()) {
1170       if (!proto_or_map->IsMap()) {
1171         SetPropertyReference(
1172             obj, entry,
1173             heap_->prototype_string(), proto_or_map,
1174             NULL,
1175             JSFunction::kPrototypeOrInitialMapOffset);
1176       } else {
1177         SetPropertyReference(
1178             obj, entry,
1179             heap_->prototype_string(), js_fun->prototype());
1180         SetInternalReference(
1181             obj, entry, "initial_map", proto_or_map,
1182             JSFunction::kPrototypeOrInitialMapOffset);
1183       }
1184     }
1185     SharedFunctionInfo* shared_info = js_fun->shared();
1186     // JSFunction has either bindings or literals and never both.
1187     bool bound = shared_info->bound();
1188     TagObject(js_fun->literals_or_bindings(),
1189               bound ? "(function bindings)" : "(function literals)");
1190     SetInternalReference(js_fun, entry,
1191                          bound ? "bindings" : "literals",
1192                          js_fun->literals_or_bindings(),
1193                          JSFunction::kLiteralsOffset);
1194     TagObject(shared_info, "(shared function info)");
1195     SetInternalReference(js_fun, entry,
1196                          "shared", shared_info,
1197                          JSFunction::kSharedFunctionInfoOffset);
1198     TagObject(js_fun->context(), "(context)");
1199     SetInternalReference(js_fun, entry,
1200                          "context", js_fun->context(),
1201                          JSFunction::kContextOffset);
1202     SetWeakReference(js_fun, entry,
1203                      "next_function_link", js_fun->next_function_link(),
1204                      JSFunction::kNextFunctionLinkOffset);
1205     STATIC_CHECK(JSFunction::kNextFunctionLinkOffset
1206                  == JSFunction::kNonWeakFieldsEndOffset);
1207     STATIC_CHECK(JSFunction::kNextFunctionLinkOffset + kPointerSize
1208                  == JSFunction::kSize);
1209   } else if (obj->IsGlobalObject()) {
1210     GlobalObject* global_obj = GlobalObject::cast(obj);
1211     SetInternalReference(global_obj, entry,
1212                          "builtins", global_obj->builtins(),
1213                          GlobalObject::kBuiltinsOffset);
1214     SetInternalReference(global_obj, entry,
1215                          "native_context", global_obj->native_context(),
1216                          GlobalObject::kNativeContextOffset);
1217     SetInternalReference(global_obj, entry,
1218                          "global_context", global_obj->global_context(),
1219                          GlobalObject::kGlobalContextOffset);
1220     SetInternalReference(global_obj, entry,
1221                          "global_receiver", global_obj->global_receiver(),
1222                          GlobalObject::kGlobalReceiverOffset);
1223     STATIC_CHECK(GlobalObject::kHeaderSize - JSObject::kHeaderSize ==
1224                  4 * kPointerSize);
1225   } else if (obj->IsJSArrayBufferView()) {
1226     JSArrayBufferView* view = JSArrayBufferView::cast(obj);
1227     SetInternalReference(view, entry, "buffer", view->buffer(),
1228                          JSArrayBufferView::kBufferOffset);
1229     SetWeakReference(view, entry, "weak_next", view->weak_next(),
1230                      JSArrayBufferView::kWeakNextOffset);
1231   }
1232   TagObject(js_obj->properties(), "(object properties)");
1233   SetInternalReference(obj, entry,
1234                        "properties", js_obj->properties(),
1235                        JSObject::kPropertiesOffset);
1236   TagObject(js_obj->elements(), "(object elements)");
1237   SetInternalReference(obj, entry,
1238                        "elements", js_obj->elements(),
1239                        JSObject::kElementsOffset);
1240 }
1241
1242
1243 void V8HeapExplorer::ExtractStringReferences(int entry, String* string) {
1244   if (string->IsConsString()) {
1245     ConsString* cs = ConsString::cast(string);
1246     SetInternalReference(cs, entry, "first", cs->first(),
1247                          ConsString::kFirstOffset);
1248     SetInternalReference(cs, entry, "second", cs->second(),
1249                          ConsString::kSecondOffset);
1250   } else if (string->IsSlicedString()) {
1251     SlicedString* ss = SlicedString::cast(string);
1252     SetInternalReference(ss, entry, "parent", ss->parent(),
1253                          SlicedString::kParentOffset);
1254   }
1255 }
1256
1257
1258 void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) {
1259   if (context == context->declaration_context()) {
1260     ScopeInfo* scope_info = context->closure()->shared()->scope_info();
1261     // Add context allocated locals.
1262     int context_locals = scope_info->ContextLocalCount();
1263     for (int i = 0; i < context_locals; ++i) {
1264       String* local_name = scope_info->ContextLocalName(i);
1265       int idx = Context::MIN_CONTEXT_SLOTS + i;
1266       SetContextReference(context, entry, local_name, context->get(idx),
1267                           Context::OffsetOfElementAt(idx));
1268     }
1269     if (scope_info->HasFunctionName()) {
1270       String* name = scope_info->FunctionName();
1271       VariableMode mode;
1272       int idx = scope_info->FunctionContextSlotIndex(name, &mode);
1273       if (idx >= 0) {
1274         SetContextReference(context, entry, name, context->get(idx),
1275                             Context::OffsetOfElementAt(idx));
1276       }
1277     }
1278   }
1279
1280 #define EXTRACT_CONTEXT_FIELD(index, type, name) \
1281   if (Context::index < Context::FIRST_WEAK_SLOT) { \
1282     SetInternalReference(context, entry, #name, context->get(Context::index), \
1283         FixedArray::OffsetOfElementAt(Context::index)); \
1284   } else { \
1285     SetWeakReference(context, entry, #name, context->get(Context::index), \
1286         FixedArray::OffsetOfElementAt(Context::index)); \
1287   }
1288   EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
1289   EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
1290   EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, Object, extension);
1291   EXTRACT_CONTEXT_FIELD(GLOBAL_OBJECT_INDEX, GlobalObject, global);
1292   if (context->IsNativeContext()) {
1293     TagObject(context->jsfunction_result_caches(),
1294               "(context func. result caches)");
1295     TagObject(context->normalized_map_cache(), "(context norm. map cache)");
1296     TagObject(context->runtime_context(), "(runtime context)");
1297     TagObject(context->embedder_data(), "(context data)");
1298     NATIVE_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD);
1299     EXTRACT_CONTEXT_FIELD(OPTIMIZED_FUNCTIONS_LIST, unused,
1300                           optimized_functions_list);
1301     EXTRACT_CONTEXT_FIELD(OPTIMIZED_CODE_LIST, unused, optimized_code_list);
1302     EXTRACT_CONTEXT_FIELD(DEOPTIMIZED_CODE_LIST, unused, deoptimized_code_list);
1303     EXTRACT_CONTEXT_FIELD(NEXT_CONTEXT_LINK, unused, next_context_link);
1304 #undef EXTRACT_CONTEXT_FIELD
1305     STATIC_CHECK(Context::OPTIMIZED_FUNCTIONS_LIST == Context::FIRST_WEAK_SLOT);
1306     STATIC_CHECK(Context::NEXT_CONTEXT_LINK + 1
1307                  == Context::NATIVE_CONTEXT_SLOTS);
1308     STATIC_CHECK(Context::FIRST_WEAK_SLOT + 5 == Context::NATIVE_CONTEXT_SLOTS);
1309   }
1310 }
1311
1312
1313 void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
1314   if (map->HasTransitionArray()) {
1315     TransitionArray* transitions = map->transitions();
1316     int transitions_entry = GetEntry(transitions)->index();
1317     Object* back_pointer = transitions->back_pointer_storage();
1318     TagObject(back_pointer, "(back pointer)");
1319     SetInternalReference(transitions, transitions_entry,
1320                          "back_pointer", back_pointer);
1321     TagObject(transitions, "(transition array)");
1322     SetInternalReference(map, entry,
1323                          "transitions", transitions,
1324                          Map::kTransitionsOrBackPointerOffset);
1325   } else {
1326     Object* back_pointer = map->GetBackPointer();
1327     TagObject(back_pointer, "(back pointer)");
1328     SetInternalReference(map, entry,
1329                          "back_pointer", back_pointer,
1330                          Map::kTransitionsOrBackPointerOffset);
1331   }
1332   DescriptorArray* descriptors = map->instance_descriptors();
1333   TagObject(descriptors, "(map descriptors)");
1334   SetInternalReference(map, entry,
1335                        "descriptors", descriptors,
1336                        Map::kDescriptorsOffset);
1337
1338   SetInternalReference(map, entry,
1339                        "code_cache", map->code_cache(),
1340                        Map::kCodeCacheOffset);
1341   SetInternalReference(map, entry,
1342                        "prototype", map->prototype(), Map::kPrototypeOffset);
1343   SetInternalReference(map, entry,
1344                        "constructor", map->constructor(),
1345                        Map::kConstructorOffset);
1346   TagObject(map->dependent_code(), "(dependent code)");
1347   SetInternalReference(map, entry,
1348                        "dependent_code", map->dependent_code(),
1349                        Map::kDependentCodeOffset);
1350 }
1351
1352
1353 void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
1354     int entry, SharedFunctionInfo* shared) {
1355   HeapObject* obj = shared;
1356   String* shared_name = shared->DebugName();
1357   const char* name = NULL;
1358   if (shared_name != *heap_->isolate()->factory()->empty_string()) {
1359     name = names_->GetName(shared_name);
1360     TagObject(shared->code(), names_->GetFormatted("(code for %s)", name));
1361   } else {
1362     TagObject(shared->code(), names_->GetFormatted("(%s code)",
1363         Code::Kind2String(shared->code()->kind())));
1364   }
1365
1366   SetInternalReference(obj, entry,
1367                        "name", shared->name(),
1368                        SharedFunctionInfo::kNameOffset);
1369   SetInternalReference(obj, entry,
1370                        "code", shared->code(),
1371                        SharedFunctionInfo::kCodeOffset);
1372   TagObject(shared->scope_info(), "(function scope info)");
1373   SetInternalReference(obj, entry,
1374                        "scope_info", shared->scope_info(),
1375                        SharedFunctionInfo::kScopeInfoOffset);
1376   SetInternalReference(obj, entry,
1377                        "instance_class_name", shared->instance_class_name(),
1378                        SharedFunctionInfo::kInstanceClassNameOffset);
1379   SetInternalReference(obj, entry,
1380                        "script", shared->script(),
1381                        SharedFunctionInfo::kScriptOffset);
1382   const char* construct_stub_name = name ?
1383       names_->GetFormatted("(construct stub code for %s)", name) :
1384       "(construct stub code)";
1385   TagObject(shared->construct_stub(), construct_stub_name);
1386   SetInternalReference(obj, entry,
1387                        "construct_stub", shared->construct_stub(),
1388                        SharedFunctionInfo::kConstructStubOffset);
1389   SetInternalReference(obj, entry,
1390                        "function_data", shared->function_data(),
1391                        SharedFunctionInfo::kFunctionDataOffset);
1392   SetInternalReference(obj, entry,
1393                        "debug_info", shared->debug_info(),
1394                        SharedFunctionInfo::kDebugInfoOffset);
1395   SetInternalReference(obj, entry,
1396                        "inferred_name", shared->inferred_name(),
1397                        SharedFunctionInfo::kInferredNameOffset);
1398   SetInternalReference(obj, entry,
1399                        "optimized_code_map", shared->optimized_code_map(),
1400                        SharedFunctionInfo::kOptimizedCodeMapOffset);
1401   SetWeakReference(obj, entry,
1402                    "initial_map", shared->initial_map(),
1403                    SharedFunctionInfo::kInitialMapOffset);
1404 }
1405
1406
1407 void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) {
1408   HeapObject* obj = script;
1409   SetInternalReference(obj, entry,
1410                        "source", script->source(),
1411                        Script::kSourceOffset);
1412   SetInternalReference(obj, entry,
1413                        "name", script->name(),
1414                        Script::kNameOffset);
1415   SetInternalReference(obj, entry,
1416                        "context_data", script->context_data(),
1417                        Script::kContextOffset);
1418   TagObject(script->line_ends(), "(script line ends)");
1419   SetInternalReference(obj, entry,
1420                        "line_ends", script->line_ends(),
1421                        Script::kLineEndsOffset);
1422 }
1423
1424
1425 void V8HeapExplorer::ExtractAccessorPairReferences(
1426     int entry, AccessorPair* accessors) {
1427   SetInternalReference(accessors, entry, "getter", accessors->getter(),
1428                        AccessorPair::kGetterOffset);
1429   SetInternalReference(accessors, entry, "setter", accessors->setter(),
1430                        AccessorPair::kSetterOffset);
1431 }
1432
1433
1434 void V8HeapExplorer::ExtractCodeCacheReferences(
1435     int entry, CodeCache* code_cache) {
1436   TagObject(code_cache->default_cache(), "(default code cache)");
1437   SetInternalReference(code_cache, entry,
1438                        "default_cache", code_cache->default_cache(),
1439                        CodeCache::kDefaultCacheOffset);
1440   TagObject(code_cache->normal_type_cache(), "(code type cache)");
1441   SetInternalReference(code_cache, entry,
1442                        "type_cache", code_cache->normal_type_cache(),
1443                        CodeCache::kNormalTypeCacheOffset);
1444 }
1445
1446
1447 void V8HeapExplorer::TagBuiltinCodeObject(Code* code, const char* name) {
1448   TagObject(code, names_->GetFormatted("(%s builtin)", name));
1449 }
1450
1451
1452 void V8HeapExplorer::TagCodeObject(Code* code) {
1453   if (code->kind() == Code::STUB) {
1454     TagObject(code, names_->GetFormatted(
1455         "(%s code)", CodeStub::MajorName(
1456             static_cast<CodeStub::Major>(code->major_key()), true)));
1457   }
1458 }
1459
1460
1461 void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
1462   TagCodeObject(code);
1463   TagObject(code->relocation_info(), "(code relocation info)");
1464   SetInternalReference(code, entry,
1465                        "relocation_info", code->relocation_info(),
1466                        Code::kRelocationInfoOffset);
1467   SetInternalReference(code, entry,
1468                        "handler_table", code->handler_table(),
1469                        Code::kHandlerTableOffset);
1470   TagObject(code->deoptimization_data(), "(code deopt data)");
1471   SetInternalReference(code, entry,
1472                        "deoptimization_data", code->deoptimization_data(),
1473                        Code::kDeoptimizationDataOffset);
1474   if (code->kind() == Code::FUNCTION) {
1475     SetInternalReference(code, entry,
1476                          "type_feedback_info", code->type_feedback_info(),
1477                          Code::kTypeFeedbackInfoOffset);
1478   }
1479   SetInternalReference(code, entry,
1480                        "gc_metadata", code->gc_metadata(),
1481                        Code::kGCMetadataOffset);
1482   SetInternalReference(code, entry,
1483                        "constant_pool", code->constant_pool(),
1484                        Code::kConstantPoolOffset);
1485   if (code->kind() == Code::OPTIMIZED_FUNCTION) {
1486     SetWeakReference(code, entry,
1487                      "next_code_link", code->next_code_link(),
1488                      Code::kNextCodeLinkOffset);
1489   }
1490 }
1491
1492
1493 void V8HeapExplorer::ExtractBoxReferences(int entry, Box* box) {
1494   SetInternalReference(box, entry, "value", box->value(), Box::kValueOffset);
1495 }
1496
1497
1498 void V8HeapExplorer::ExtractCellReferences(int entry, Cell* cell) {
1499   SetInternalReference(cell, entry, "value", cell->value(), Cell::kValueOffset);
1500 }
1501
1502
1503 void V8HeapExplorer::ExtractPropertyCellReferences(int entry,
1504                                                    PropertyCell* cell) {
1505   ExtractCellReferences(entry, cell);
1506   SetInternalReference(cell, entry, "type", cell->type(),
1507                        PropertyCell::kTypeOffset);
1508   SetInternalReference(cell, entry, "dependent_code", cell->dependent_code(),
1509                        PropertyCell::kDependentCodeOffset);
1510 }
1511
1512
1513 void V8HeapExplorer::ExtractAllocationSiteReferences(int entry,
1514                                                      AllocationSite* site) {
1515   SetInternalReference(site, entry, "transition_info", site->transition_info(),
1516                        AllocationSite::kTransitionInfoOffset);
1517   SetInternalReference(site, entry, "nested_site", site->nested_site(),
1518                        AllocationSite::kNestedSiteOffset);
1519   SetInternalReference(site, entry, "dependent_code", site->dependent_code(),
1520                        AllocationSite::kDependentCodeOffset);
1521   // Do not visit weak_next as it is not visited by the StaticVisitor,
1522   // and we're not very interested in weak_next field here.
1523   STATIC_CHECK(AllocationSite::kWeakNextOffset >=
1524                AllocationSite::BodyDescriptor::kEndOffset);
1525 }
1526
1527
1528 class JSArrayBufferDataEntryAllocator : public HeapEntriesAllocator {
1529  public:
1530   JSArrayBufferDataEntryAllocator(size_t size, V8HeapExplorer* explorer)
1531       : size_(size)
1532       , explorer_(explorer) {
1533   }
1534   virtual HeapEntry* AllocateEntry(HeapThing ptr) {
1535     return explorer_->AddEntry(
1536         static_cast<Address>(ptr),
1537         HeapEntry::kNative, "system / JSArrayBufferData", size_);
1538   }
1539  private:
1540   size_t size_;
1541   V8HeapExplorer* explorer_;
1542 };
1543
1544
1545 void V8HeapExplorer::ExtractJSArrayBufferReferences(
1546     int entry, JSArrayBuffer* buffer) {
1547   SetWeakReference(buffer, entry, "weak_next", buffer->weak_next(),
1548                    JSArrayBuffer::kWeakNextOffset);
1549   SetWeakReference(buffer, entry,
1550                    "weak_first_view", buffer->weak_first_view(),
1551                    JSArrayBuffer::kWeakFirstViewOffset);
1552   // Setup a reference to a native memory backing_store object.
1553   if (!buffer->backing_store())
1554     return;
1555   size_t data_size = NumberToSize(heap_->isolate(), buffer->byte_length());
1556   JSArrayBufferDataEntryAllocator allocator(data_size, this);
1557   HeapEntry* data_entry =
1558       filler_->FindOrAddEntry(buffer->backing_store(), &allocator);
1559   filler_->SetNamedReference(HeapGraphEdge::kInternal,
1560                              entry, "backing_store", data_entry);
1561 }
1562
1563
1564 void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj, int entry) {
1565   if (!js_obj->IsJSFunction()) return;
1566
1567   JSFunction* func = JSFunction::cast(js_obj);
1568   if (func->shared()->bound()) {
1569     FixedArray* bindings = func->function_bindings();
1570     SetNativeBindReference(js_obj, entry, "bound_this",
1571                            bindings->get(JSFunction::kBoundThisIndex));
1572     SetNativeBindReference(js_obj, entry, "bound_function",
1573                            bindings->get(JSFunction::kBoundFunctionIndex));
1574     for (int i = JSFunction::kBoundArgumentsStartIndex;
1575          i < bindings->length(); i++) {
1576       const char* reference_name = names_->GetFormatted(
1577           "bound_argument_%d",
1578           i - JSFunction::kBoundArgumentsStartIndex);
1579       SetNativeBindReference(js_obj, entry, reference_name,
1580                              bindings->get(i));
1581     }
1582   }
1583 }
1584
1585
1586 void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
1587   if (js_obj->HasFastProperties()) {
1588     DescriptorArray* descs = js_obj->map()->instance_descriptors();
1589     int real_size = js_obj->map()->NumberOfOwnDescriptors();
1590     for (int i = 0; i < real_size; i++) {
1591       switch (descs->GetType(i)) {
1592         case FIELD: {
1593           int index = descs->GetFieldIndex(i);
1594
1595           Name* k = descs->GetKey(i);
1596           if (index < js_obj->map()->inobject_properties()) {
1597             Object* value = js_obj->InObjectPropertyAt(index);
1598             if (k != heap_->hidden_string()) {
1599               SetPropertyReference(
1600                   js_obj, entry,
1601                   k, value,
1602                   NULL,
1603                   js_obj->GetInObjectPropertyOffset(index));
1604             } else {
1605               TagObject(value, "(hidden properties)");
1606               SetInternalReference(
1607                   js_obj, entry,
1608                   "hidden_properties", value,
1609                   js_obj->GetInObjectPropertyOffset(index));
1610             }
1611           } else {
1612             Object* value = js_obj->RawFastPropertyAt(index);
1613             if (k != heap_->hidden_string()) {
1614               SetPropertyReference(js_obj, entry, k, value);
1615             } else {
1616               TagObject(value, "(hidden properties)");
1617               SetInternalReference(js_obj, entry, "hidden_properties", value);
1618             }
1619           }
1620           break;
1621         }
1622         case CONSTANT:
1623           SetPropertyReference(
1624               js_obj, entry,
1625               descs->GetKey(i), descs->GetConstant(i));
1626           break;
1627         case CALLBACKS:
1628           ExtractAccessorPairProperty(
1629               js_obj, entry,
1630               descs->GetKey(i), descs->GetValue(i));
1631           break;
1632         case NORMAL:  // only in slow mode
1633         case HANDLER:  // only in lookup results, not in descriptors
1634         case INTERCEPTOR:  // only in lookup results, not in descriptors
1635           break;
1636         case TRANSITION:
1637         case NONEXISTENT:
1638           UNREACHABLE();
1639           break;
1640       }
1641     }
1642   } else {
1643     NameDictionary* dictionary = js_obj->property_dictionary();
1644     int length = dictionary->Capacity();
1645     for (int i = 0; i < length; ++i) {
1646       Object* k = dictionary->KeyAt(i);
1647       if (dictionary->IsKey(k)) {
1648         Object* target = dictionary->ValueAt(i);
1649         // We assume that global objects can only have slow properties.
1650         Object* value = target->IsPropertyCell()
1651             ? PropertyCell::cast(target)->value()
1652             : target;
1653         if (k == heap_->hidden_string()) {
1654           TagObject(value, "(hidden properties)");
1655           SetInternalReference(js_obj, entry, "hidden_properties", value);
1656           continue;
1657         }
1658         if (ExtractAccessorPairProperty(js_obj, entry, k, value)) continue;
1659         SetPropertyReference(js_obj, entry, String::cast(k), value);
1660       }
1661     }
1662   }
1663 }
1664
1665
1666 bool V8HeapExplorer::ExtractAccessorPairProperty(
1667     JSObject* js_obj, int entry, Object* key, Object* callback_obj) {
1668   if (!callback_obj->IsAccessorPair()) return false;
1669   AccessorPair* accessors = AccessorPair::cast(callback_obj);
1670   Object* getter = accessors->getter();
1671   if (!getter->IsOddball()) {
1672     SetPropertyReference(js_obj, entry, String::cast(key), getter, "get %s");
1673   }
1674   Object* setter = accessors->setter();
1675   if (!setter->IsOddball()) {
1676     SetPropertyReference(js_obj, entry, String::cast(key), setter, "set %s");
1677   }
1678   return true;
1679 }
1680
1681
1682 void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) {
1683   if (js_obj->HasFastObjectElements()) {
1684     FixedArray* elements = FixedArray::cast(js_obj->elements());
1685     int length = js_obj->IsJSArray() ?
1686         Smi::cast(JSArray::cast(js_obj)->length())->value() :
1687         elements->length();
1688     for (int i = 0; i < length; ++i) {
1689       if (!elements->get(i)->IsTheHole()) {
1690         SetElementReference(js_obj, entry, i, elements->get(i));
1691       }
1692     }
1693   } else if (js_obj->HasDictionaryElements()) {
1694     SeededNumberDictionary* dictionary = js_obj->element_dictionary();
1695     int length = dictionary->Capacity();
1696     for (int i = 0; i < length; ++i) {
1697       Object* k = dictionary->KeyAt(i);
1698       if (dictionary->IsKey(k)) {
1699         ASSERT(k->IsNumber());
1700         uint32_t index = static_cast<uint32_t>(k->Number());
1701         SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
1702       }
1703     }
1704   }
1705 }
1706
1707
1708 void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) {
1709   int length = js_obj->GetInternalFieldCount();
1710   for (int i = 0; i < length; ++i) {
1711     Object* o = js_obj->GetInternalField(i);
1712     SetInternalReference(
1713         js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
1714   }
1715 }
1716
1717
1718 String* V8HeapExplorer::GetConstructorName(JSObject* object) {
1719   Heap* heap = object->GetHeap();
1720   if (object->IsJSFunction()) return heap->closure_string();
1721   String* constructor_name = object->constructor_name();
1722   if (constructor_name == heap->Object_string()) {
1723     // Look up an immediate "constructor" property, if it is a function,
1724     // return its name. This is for instances of binding objects, which
1725     // have prototype constructor type "Object".
1726     Object* constructor_prop = NULL;
1727     LookupResult result(heap->isolate());
1728     object->LocalLookupRealNamedProperty(heap->constructor_string(), &result);
1729     if (!result.IsFound()) return object->constructor_name();
1730
1731     constructor_prop = result.GetLazyValue();
1732     if (constructor_prop->IsJSFunction()) {
1733       Object* maybe_name =
1734           JSFunction::cast(constructor_prop)->shared()->name();
1735       if (maybe_name->IsString()) {
1736         String* name = String::cast(maybe_name);
1737         if (name->length() > 0) return name;
1738       }
1739     }
1740   }
1741   return object->constructor_name();
1742 }
1743
1744
1745 HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
1746   if (!obj->IsHeapObject()) return NULL;
1747   return filler_->FindOrAddEntry(obj, this);
1748 }
1749
1750
1751 class RootsReferencesExtractor : public ObjectVisitor {
1752  private:
1753   struct IndexTag {
1754     IndexTag(int index, VisitorSynchronization::SyncTag tag)
1755         : index(index), tag(tag) { }
1756     int index;
1757     VisitorSynchronization::SyncTag tag;
1758   };
1759
1760  public:
1761   explicit RootsReferencesExtractor(Heap* heap)
1762       : collecting_all_references_(false),
1763         previous_reference_count_(0),
1764         heap_(heap) {
1765   }
1766
1767   void VisitPointers(Object** start, Object** end) {
1768     if (collecting_all_references_) {
1769       for (Object** p = start; p < end; p++) all_references_.Add(*p);
1770     } else {
1771       for (Object** p = start; p < end; p++) strong_references_.Add(*p);
1772     }
1773   }
1774
1775   void SetCollectingAllReferences() { collecting_all_references_ = true; }
1776
1777   void FillReferences(V8HeapExplorer* explorer) {
1778     ASSERT(strong_references_.length() <= all_references_.length());
1779     Builtins* builtins = heap_->isolate()->builtins();
1780     for (int i = 0; i < reference_tags_.length(); ++i) {
1781       explorer->SetGcRootsReference(reference_tags_[i].tag);
1782     }
1783     int strong_index = 0, all_index = 0, tags_index = 0, builtin_index = 0;
1784     while (all_index < all_references_.length()) {
1785       bool is_strong = strong_index < strong_references_.length()
1786           && strong_references_[strong_index] == all_references_[all_index];
1787       explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
1788                                       !is_strong,
1789                                       all_references_[all_index]);
1790       if (reference_tags_[tags_index].tag ==
1791           VisitorSynchronization::kBuiltins) {
1792         ASSERT(all_references_[all_index]->IsCode());
1793         explorer->TagBuiltinCodeObject(
1794             Code::cast(all_references_[all_index]),
1795             builtins->name(builtin_index++));
1796       }
1797       ++all_index;
1798       if (is_strong) ++strong_index;
1799       if (reference_tags_[tags_index].index == all_index) ++tags_index;
1800     }
1801   }
1802
1803   void Synchronize(VisitorSynchronization::SyncTag tag) {
1804     if (collecting_all_references_ &&
1805         previous_reference_count_ != all_references_.length()) {
1806       previous_reference_count_ = all_references_.length();
1807       reference_tags_.Add(IndexTag(previous_reference_count_, tag));
1808     }
1809   }
1810
1811  private:
1812   bool collecting_all_references_;
1813   List<Object*> strong_references_;
1814   List<Object*> all_references_;
1815   int previous_reference_count_;
1816   List<IndexTag> reference_tags_;
1817   Heap* heap_;
1818 };
1819
1820
1821 bool V8HeapExplorer::IterateAndExtractReferences(
1822     SnapshotFiller* filler) {
1823   filler_ = filler;
1824
1825   // Make sure builtin code objects get their builtin tags
1826   // first. Otherwise a particular JSFunction object could set
1827   // its custom name to a generic builtin.
1828   SetRootGcRootsReference();
1829   RootsReferencesExtractor extractor(heap_);
1830   heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
1831   extractor.SetCollectingAllReferences();
1832   heap_->IterateRoots(&extractor, VISIT_ALL);
1833   extractor.FillReferences(this);
1834
1835   // Now iterate the whole heap.
1836   bool interrupted = false;
1837   HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
1838   // Heap iteration with filtering must be finished in any case.
1839   for (HeapObject* obj = iterator.next();
1840        obj != NULL;
1841        obj = iterator.next(), progress_->ProgressStep()) {
1842     if (!interrupted) {
1843       ExtractReferences(obj);
1844       if (!progress_->ProgressReport(false)) interrupted = true;
1845     }
1846   }
1847   if (interrupted) {
1848     filler_ = NULL;
1849     return false;
1850   }
1851
1852   filler_ = NULL;
1853   return progress_->ProgressReport(true);
1854 }
1855
1856
1857 bool V8HeapExplorer::IsEssentialObject(Object* object) {
1858   return object->IsHeapObject()
1859       && !object->IsOddball()
1860       && object != heap_->empty_byte_array()
1861       && object != heap_->empty_fixed_array()
1862       && object != heap_->empty_descriptor_array()
1863       && object != heap_->fixed_array_map()
1864       && object != heap_->cell_map()
1865       && object != heap_->global_property_cell_map()
1866       && object != heap_->shared_function_info_map()
1867       && object != heap_->free_space_map()
1868       && object != heap_->one_pointer_filler_map()
1869       && object != heap_->two_pointer_filler_map();
1870 }
1871
1872
1873 void V8HeapExplorer::SetContextReference(HeapObject* parent_obj,
1874                                          int parent_entry,
1875                                          String* reference_name,
1876                                          Object* child_obj,
1877                                          int field_offset) {
1878   ASSERT(parent_entry == GetEntry(parent_obj)->index());
1879   HeapEntry* child_entry = GetEntry(child_obj);
1880   if (child_entry != NULL) {
1881     filler_->SetNamedReference(HeapGraphEdge::kContextVariable,
1882                                parent_entry,
1883                                names_->GetName(reference_name),
1884                                child_entry);
1885     IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1886   }
1887 }
1888
1889
1890 void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
1891                                             int parent_entry,
1892                                             const char* reference_name,
1893                                             Object* child_obj) {
1894   ASSERT(parent_entry == GetEntry(parent_obj)->index());
1895   HeapEntry* child_entry = GetEntry(child_obj);
1896   if (child_entry != NULL) {
1897     filler_->SetNamedReference(HeapGraphEdge::kShortcut,
1898                                parent_entry,
1899                                reference_name,
1900                                child_entry);
1901   }
1902 }
1903
1904
1905 void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
1906                                          int parent_entry,
1907                                          int index,
1908                                          Object* child_obj) {
1909   ASSERT(parent_entry == GetEntry(parent_obj)->index());
1910   HeapEntry* child_entry = GetEntry(child_obj);
1911   if (child_entry != NULL) {
1912     filler_->SetIndexedReference(HeapGraphEdge::kElement,
1913                                  parent_entry,
1914                                  index,
1915                                  child_entry);
1916   }
1917 }
1918
1919
1920 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1921                                           int parent_entry,
1922                                           const char* reference_name,
1923                                           Object* child_obj,
1924                                           int field_offset) {
1925   ASSERT(parent_entry == GetEntry(parent_obj)->index());
1926   HeapEntry* child_entry = GetEntry(child_obj);
1927   if (child_entry == NULL) return;
1928   if (IsEssentialObject(child_obj)) {
1929     filler_->SetNamedReference(HeapGraphEdge::kInternal,
1930                                parent_entry,
1931                                reference_name,
1932                                child_entry);
1933   }
1934   IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1935 }
1936
1937
1938 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1939                                           int parent_entry,
1940                                           int index,
1941                                           Object* child_obj,
1942                                           int field_offset) {
1943   ASSERT(parent_entry == GetEntry(parent_obj)->index());
1944   HeapEntry* child_entry = GetEntry(child_obj);
1945   if (child_entry == NULL) return;
1946   if (IsEssentialObject(child_obj)) {
1947     filler_->SetNamedReference(HeapGraphEdge::kInternal,
1948                                parent_entry,
1949                                names_->GetName(index),
1950                                child_entry);
1951   }
1952   IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1953 }
1954
1955
1956 void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
1957                                         int parent_entry,
1958                                         int index,
1959                                         Object* child_obj) {
1960   ASSERT(parent_entry == GetEntry(parent_obj)->index());
1961   HeapEntry* child_entry = GetEntry(child_obj);
1962   if (child_entry != NULL && IsEssentialObject(child_obj)) {
1963     filler_->SetIndexedReference(HeapGraphEdge::kHidden,
1964                                  parent_entry,
1965                                  index,
1966                                  child_entry);
1967   }
1968 }
1969
1970
1971 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
1972                                       int parent_entry,
1973                                       const char* reference_name,
1974                                       Object* child_obj,
1975                                       int field_offset) {
1976   ASSERT(parent_entry == GetEntry(parent_obj)->index());
1977   HeapEntry* child_entry = GetEntry(child_obj);
1978   if (child_entry == NULL) return;
1979   if (IsEssentialObject(child_obj)) {
1980     filler_->SetNamedReference(HeapGraphEdge::kWeak,
1981                                parent_entry,
1982                                reference_name,
1983                                child_entry);
1984   }
1985   IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1986 }
1987
1988
1989 void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
1990                                           int parent_entry,
1991                                           Name* reference_name,
1992                                           Object* child_obj,
1993                                           const char* name_format_string,
1994                                           int field_offset) {
1995   ASSERT(parent_entry == GetEntry(parent_obj)->index());
1996   HeapEntry* child_entry = GetEntry(child_obj);
1997   if (child_entry != NULL) {
1998     HeapGraphEdge::Type type =
1999         reference_name->IsSymbol() || String::cast(reference_name)->length() > 0
2000             ? HeapGraphEdge::kProperty : HeapGraphEdge::kInternal;
2001     const char* name = name_format_string != NULL && reference_name->IsString()
2002         ? names_->GetFormatted(
2003               name_format_string,
2004               String::cast(reference_name)->ToCString(
2005                   DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL).get()) :
2006         names_->GetName(reference_name);
2007
2008     filler_->SetNamedReference(type,
2009                                parent_entry,
2010                                name,
2011                                child_entry);
2012     IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2013   }
2014 }
2015
2016
2017 void V8HeapExplorer::SetRootGcRootsReference() {
2018   filler_->SetIndexedAutoIndexReference(
2019       HeapGraphEdge::kElement,
2020       snapshot_->root()->index(),
2021       snapshot_->gc_roots());
2022 }
2023
2024
2025 void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
2026   HeapEntry* child_entry = GetEntry(child_obj);
2027   ASSERT(child_entry != NULL);
2028   filler_->SetNamedAutoIndexReference(
2029       HeapGraphEdge::kShortcut,
2030       snapshot_->root()->index(),
2031       child_entry);
2032 }
2033
2034
2035 void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
2036   filler_->SetIndexedAutoIndexReference(
2037       HeapGraphEdge::kElement,
2038       snapshot_->gc_roots()->index(),
2039       snapshot_->gc_subroot(tag));
2040 }
2041
2042
2043 void V8HeapExplorer::SetGcSubrootReference(
2044     VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
2045   HeapEntry* child_entry = GetEntry(child_obj);
2046   if (child_entry != NULL) {
2047     const char* name = GetStrongGcSubrootName(child_obj);
2048     if (name != NULL) {
2049       filler_->SetNamedReference(
2050           HeapGraphEdge::kInternal,
2051           snapshot_->gc_subroot(tag)->index(),
2052           name,
2053           child_entry);
2054     } else {
2055       if (is_weak) {
2056         filler_->SetNamedAutoIndexReference(
2057             HeapGraphEdge::kWeak,
2058             snapshot_->gc_subroot(tag)->index(),
2059             child_entry);
2060       } else {
2061         filler_->SetIndexedAutoIndexReference(
2062             HeapGraphEdge::kElement,
2063             snapshot_->gc_subroot(tag)->index(),
2064             child_entry);
2065       }
2066     }
2067
2068     // Add a shortcut to JS global object reference at snapshot root.
2069     if (child_obj->IsNativeContext()) {
2070       Context* context = Context::cast(child_obj);
2071       GlobalObject* global = context->global_object();
2072       if (global->IsJSGlobalObject()) {
2073         bool is_debug_object = false;
2074 #ifdef ENABLE_DEBUGGER_SUPPORT
2075         is_debug_object = heap_->isolate()->debug()->IsDebugGlobal(global);
2076 #endif
2077         if (!is_debug_object && !user_roots_.Contains(global)) {
2078           user_roots_.Insert(global);
2079           SetUserGlobalReference(global);
2080         }
2081       }
2082     }
2083   }
2084 }
2085
2086
2087 const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
2088   if (strong_gc_subroot_names_.is_empty()) {
2089 #define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
2090 #define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
2091     STRONG_ROOT_LIST(ROOT_NAME)
2092 #undef ROOT_NAME
2093 #define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
2094     STRUCT_LIST(STRUCT_MAP_NAME)
2095 #undef STRUCT_MAP_NAME
2096 #define STRING_NAME(name, str) NAME_ENTRY(name)
2097     INTERNALIZED_STRING_LIST(STRING_NAME)
2098 #undef STRING_NAME
2099 #undef NAME_ENTRY
2100     CHECK(!strong_gc_subroot_names_.is_empty());
2101   }
2102   return strong_gc_subroot_names_.GetTag(object);
2103 }
2104
2105
2106 void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
2107   if (IsEssentialObject(obj)) {
2108     HeapEntry* entry = GetEntry(obj);
2109     if (entry->name()[0] == '\0') {
2110       entry->set_name(tag);
2111     }
2112   }
2113 }
2114
2115
2116 class GlobalObjectsEnumerator : public ObjectVisitor {
2117  public:
2118   virtual void VisitPointers(Object** start, Object** end) {
2119     for (Object** p = start; p < end; p++) {
2120       if ((*p)->IsNativeContext()) {
2121         Context* context = Context::cast(*p);
2122         JSObject* proxy = context->global_proxy();
2123         if (proxy->IsJSGlobalProxy()) {
2124           Object* global = proxy->map()->prototype();
2125           if (global->IsJSGlobalObject()) {
2126             objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global)));
2127           }
2128         }
2129       }
2130     }
2131   }
2132   int count() { return objects_.length(); }
2133   Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
2134
2135  private:
2136   List<Handle<JSGlobalObject> > objects_;
2137 };
2138
2139
2140 // Modifies heap. Must not be run during heap traversal.
2141 void V8HeapExplorer::TagGlobalObjects() {
2142   Isolate* isolate = heap_->isolate();
2143   HandleScope scope(isolate);
2144   GlobalObjectsEnumerator enumerator;
2145   isolate->global_handles()->IterateAllRoots(&enumerator);
2146   const char** urls = NewArray<const char*>(enumerator.count());
2147   for (int i = 0, l = enumerator.count(); i < l; ++i) {
2148     if (global_object_name_resolver_) {
2149       HandleScope scope(isolate);
2150       Handle<JSGlobalObject> global_obj = enumerator.at(i);
2151       urls[i] = global_object_name_resolver_->GetName(
2152           Utils::ToLocal(Handle<JSObject>::cast(global_obj)));
2153     } else {
2154       urls[i] = NULL;
2155     }
2156   }
2157
2158   DisallowHeapAllocation no_allocation;
2159   for (int i = 0, l = enumerator.count(); i < l; ++i) {
2160     objects_tags_.SetTag(*enumerator.at(i), urls[i]);
2161   }
2162
2163   DeleteArray(urls);
2164 }
2165
2166
2167 class GlobalHandlesExtractor : public ObjectVisitor {
2168  public:
2169   explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer)
2170       : explorer_(explorer) {}
2171   virtual ~GlobalHandlesExtractor() {}
2172   virtual void VisitPointers(Object** start, Object** end) {
2173     UNREACHABLE();
2174   }
2175   virtual void VisitEmbedderReference(Object** p, uint16_t class_id) {
2176     explorer_->VisitSubtreeWrapper(p, class_id);
2177   }
2178  private:
2179   NativeObjectsExplorer* explorer_;
2180 };
2181
2182
2183 class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
2184  public:
2185   BasicHeapEntriesAllocator(
2186       HeapSnapshot* snapshot,
2187       HeapEntry::Type entries_type)
2188     : snapshot_(snapshot),
2189       names_(snapshot_->profiler()->names()),
2190       heap_object_map_(snapshot_->profiler()->heap_object_map()),
2191       entries_type_(entries_type) {
2192   }
2193   virtual HeapEntry* AllocateEntry(HeapThing ptr);
2194  private:
2195   HeapSnapshot* snapshot_;
2196   StringsStorage* names_;
2197   HeapObjectsMap* heap_object_map_;
2198   HeapEntry::Type entries_type_;
2199 };
2200
2201
2202 HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) {
2203   v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
2204   intptr_t elements = info->GetElementCount();
2205   intptr_t size = info->GetSizeInBytes();
2206   const char* name = elements != -1
2207       ? names_->GetFormatted(
2208             "%s / %" V8_PTR_PREFIX "d entries", info->GetLabel(), elements)
2209       : names_->GetCopy(info->GetLabel());
2210   return snapshot_->AddEntry(
2211       entries_type_,
2212       name,
2213       heap_object_map_->GenerateId(info),
2214       size != -1 ? static_cast<int>(size) : 0,
2215       0);
2216 }
2217
2218
2219 NativeObjectsExplorer::NativeObjectsExplorer(
2220     HeapSnapshot* snapshot,
2221     SnapshottingProgressReportingInterface* progress)
2222     : isolate_(snapshot->profiler()->heap_object_map()->heap()->isolate()),
2223       snapshot_(snapshot),
2224       names_(snapshot_->profiler()->names()),
2225       progress_(progress),
2226       embedder_queried_(false),
2227       objects_by_info_(RetainedInfosMatch),
2228       native_groups_(StringsMatch),
2229       filler_(NULL) {
2230   synthetic_entries_allocator_ =
2231       new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
2232   native_entries_allocator_ =
2233       new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
2234 }
2235
2236
2237 NativeObjectsExplorer::~NativeObjectsExplorer() {
2238   for (HashMap::Entry* p = objects_by_info_.Start();
2239        p != NULL;
2240        p = objects_by_info_.Next(p)) {
2241     v8::RetainedObjectInfo* info =
2242         reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2243     info->Dispose();
2244     List<HeapObject*>* objects =
2245         reinterpret_cast<List<HeapObject*>* >(p->value);
2246     delete objects;
2247   }
2248   for (HashMap::Entry* p = native_groups_.Start();
2249        p != NULL;
2250        p = native_groups_.Next(p)) {
2251     v8::RetainedObjectInfo* info =
2252         reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
2253     info->Dispose();
2254   }
2255   delete synthetic_entries_allocator_;
2256   delete native_entries_allocator_;
2257 }
2258
2259
2260 int NativeObjectsExplorer::EstimateObjectsCount() {
2261   FillRetainedObjects();
2262   return objects_by_info_.occupancy();
2263 }
2264
2265
2266 void NativeObjectsExplorer::FillRetainedObjects() {
2267   if (embedder_queried_) return;
2268   Isolate* isolate = isolate_;
2269   const GCType major_gc_type = kGCTypeMarkSweepCompact;
2270   // Record objects that are joined into ObjectGroups.
2271   isolate->heap()->CallGCPrologueCallbacks(
2272       major_gc_type, kGCCallbackFlagConstructRetainedObjectInfos);
2273   List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
2274   for (int i = 0; i < groups->length(); ++i) {
2275     ObjectGroup* group = groups->at(i);
2276     if (group->info == NULL) continue;
2277     List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info);
2278     for (size_t j = 0; j < group->length; ++j) {
2279       HeapObject* obj = HeapObject::cast(*group->objects[j]);
2280       list->Add(obj);
2281       in_groups_.Insert(obj);
2282     }
2283     group->info = NULL;  // Acquire info object ownership.
2284   }
2285   isolate->global_handles()->RemoveObjectGroups();
2286   isolate->heap()->CallGCEpilogueCallbacks(major_gc_type, kNoGCCallbackFlags);
2287   // Record objects that are not in ObjectGroups, but have class ID.
2288   GlobalHandlesExtractor extractor(this);
2289   isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
2290   embedder_queried_ = true;
2291 }
2292
2293
2294 void NativeObjectsExplorer::FillImplicitReferences() {
2295   Isolate* isolate = isolate_;
2296   List<ImplicitRefGroup*>* groups =
2297       isolate->global_handles()->implicit_ref_groups();
2298   for (int i = 0; i < groups->length(); ++i) {
2299     ImplicitRefGroup* group = groups->at(i);
2300     HeapObject* parent = *group->parent;
2301     int parent_entry =
2302         filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
2303     ASSERT(parent_entry != HeapEntry::kNoEntry);
2304     Object*** children = group->children;
2305     for (size_t j = 0; j < group->length; ++j) {
2306       Object* child = *children[j];
2307       HeapEntry* child_entry =
2308           filler_->FindOrAddEntry(child, native_entries_allocator_);
2309       filler_->SetNamedReference(
2310           HeapGraphEdge::kInternal,
2311           parent_entry,
2312           "native",
2313           child_entry);
2314     }
2315   }
2316   isolate->global_handles()->RemoveImplicitRefGroups();
2317 }
2318
2319 List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
2320     v8::RetainedObjectInfo* info) {
2321   HashMap::Entry* entry =
2322       objects_by_info_.Lookup(info, InfoHash(info), true);
2323   if (entry->value != NULL) {
2324     info->Dispose();
2325   } else {
2326     entry->value = new List<HeapObject*>(4);
2327   }
2328   return reinterpret_cast<List<HeapObject*>* >(entry->value);
2329 }
2330
2331
2332 bool NativeObjectsExplorer::IterateAndExtractReferences(
2333     SnapshotFiller* filler) {
2334   filler_ = filler;
2335   FillRetainedObjects();
2336   FillImplicitReferences();
2337   if (EstimateObjectsCount() > 0) {
2338     for (HashMap::Entry* p = objects_by_info_.Start();
2339          p != NULL;
2340          p = objects_by_info_.Next(p)) {
2341       v8::RetainedObjectInfo* info =
2342           reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2343       SetNativeRootReference(info);
2344       List<HeapObject*>* objects =
2345           reinterpret_cast<List<HeapObject*>* >(p->value);
2346       for (int i = 0; i < objects->length(); ++i) {
2347         SetWrapperNativeReferences(objects->at(i), info);
2348       }
2349     }
2350     SetRootNativeRootsReference();
2351   }
2352   filler_ = NULL;
2353   return true;
2354 }
2355
2356
2357 class NativeGroupRetainedObjectInfo : public v8::RetainedObjectInfo {
2358  public:
2359   explicit NativeGroupRetainedObjectInfo(const char* label)
2360       : disposed_(false),
2361         hash_(reinterpret_cast<intptr_t>(label)),
2362         label_(label) {
2363   }
2364
2365   virtual ~NativeGroupRetainedObjectInfo() {}
2366   virtual void Dispose() {
2367     CHECK(!disposed_);
2368     disposed_ = true;
2369     delete this;
2370   }
2371   virtual bool IsEquivalent(RetainedObjectInfo* other) {
2372     return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
2373   }
2374   virtual intptr_t GetHash() { return hash_; }
2375   virtual const char* GetLabel() { return label_; }
2376
2377  private:
2378   bool disposed_;
2379   intptr_t hash_;
2380   const char* label_;
2381 };
2382
2383
2384 NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2385     const char* label) {
2386   const char* label_copy = names_->GetCopy(label);
2387   uint32_t hash = StringHasher::HashSequentialString(
2388       label_copy,
2389       static_cast<int>(strlen(label_copy)),
2390       isolate_->heap()->HashSeed());
2391   HashMap::Entry* entry = native_groups_.Lookup(const_cast<char*>(label_copy),
2392                                                 hash, true);
2393   if (entry->value == NULL) {
2394     entry->value = new NativeGroupRetainedObjectInfo(label);
2395   }
2396   return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2397 }
2398
2399
2400 void NativeObjectsExplorer::SetNativeRootReference(
2401     v8::RetainedObjectInfo* info) {
2402   HeapEntry* child_entry =
2403       filler_->FindOrAddEntry(info, native_entries_allocator_);
2404   ASSERT(child_entry != NULL);
2405   NativeGroupRetainedObjectInfo* group_info =
2406       FindOrAddGroupInfo(info->GetGroupLabel());
2407   HeapEntry* group_entry =
2408       filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
2409   filler_->SetNamedAutoIndexReference(
2410       HeapGraphEdge::kInternal,
2411       group_entry->index(),
2412       child_entry);
2413 }
2414
2415
2416 void NativeObjectsExplorer::SetWrapperNativeReferences(
2417     HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2418   HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
2419   ASSERT(wrapper_entry != NULL);
2420   HeapEntry* info_entry =
2421       filler_->FindOrAddEntry(info, native_entries_allocator_);
2422   ASSERT(info_entry != NULL);
2423   filler_->SetNamedReference(HeapGraphEdge::kInternal,
2424                              wrapper_entry->index(),
2425                              "native",
2426                              info_entry);
2427   filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
2428                                         info_entry->index(),
2429                                         wrapper_entry);
2430 }
2431
2432
2433 void NativeObjectsExplorer::SetRootNativeRootsReference() {
2434   for (HashMap::Entry* entry = native_groups_.Start();
2435        entry;
2436        entry = native_groups_.Next(entry)) {
2437     NativeGroupRetainedObjectInfo* group_info =
2438         static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2439     HeapEntry* group_entry =
2440         filler_->FindOrAddEntry(group_info, native_entries_allocator_);
2441     ASSERT(group_entry != NULL);
2442     filler_->SetIndexedAutoIndexReference(
2443         HeapGraphEdge::kElement,
2444         snapshot_->root()->index(),
2445         group_entry);
2446   }
2447 }
2448
2449
2450 void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
2451   if (in_groups_.Contains(*p)) return;
2452   Isolate* isolate = isolate_;
2453   v8::RetainedObjectInfo* info =
2454       isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
2455   if (info == NULL) return;
2456   GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
2457 }
2458
2459
2460 HeapSnapshotGenerator::HeapSnapshotGenerator(
2461     HeapSnapshot* snapshot,
2462     v8::ActivityControl* control,
2463     v8::HeapProfiler::ObjectNameResolver* resolver,
2464     Heap* heap)
2465     : snapshot_(snapshot),
2466       control_(control),
2467       v8_heap_explorer_(snapshot_, this, resolver),
2468       dom_explorer_(snapshot_, this),
2469       heap_(heap) {
2470 }
2471
2472
2473 bool HeapSnapshotGenerator::GenerateSnapshot() {
2474   v8_heap_explorer_.TagGlobalObjects();
2475
2476   // TODO(1562) Profiler assumes that any object that is in the heap after
2477   // full GC is reachable from the root when computing dominators.
2478   // This is not true for weakly reachable objects.
2479   // As a temporary solution we call GC twice.
2480   heap_->CollectAllGarbage(
2481       Heap::kMakeHeapIterableMask,
2482       "HeapSnapshotGenerator::GenerateSnapshot");
2483   heap_->CollectAllGarbage(
2484       Heap::kMakeHeapIterableMask,
2485       "HeapSnapshotGenerator::GenerateSnapshot");
2486
2487 #ifdef VERIFY_HEAP
2488   Heap* debug_heap = heap_;
2489   CHECK(!debug_heap->old_data_space()->was_swept_conservatively());
2490   CHECK(!debug_heap->old_pointer_space()->was_swept_conservatively());
2491   CHECK(!debug_heap->code_space()->was_swept_conservatively());
2492   CHECK(!debug_heap->cell_space()->was_swept_conservatively());
2493   CHECK(!debug_heap->property_cell_space()->
2494         was_swept_conservatively());
2495   CHECK(!debug_heap->map_space()->was_swept_conservatively());
2496 #endif
2497
2498   // The following code uses heap iterators, so we want the heap to be
2499   // stable. It should follow TagGlobalObjects as that can allocate.
2500   DisallowHeapAllocation no_alloc;
2501
2502 #ifdef VERIFY_HEAP
2503   debug_heap->Verify();
2504 #endif
2505
2506   SetProgressTotal(1);  // 1 pass.
2507
2508 #ifdef VERIFY_HEAP
2509   debug_heap->Verify();
2510 #endif
2511
2512   if (!FillReferences()) return false;
2513
2514   snapshot_->FillChildren();
2515   snapshot_->RememberLastJSObjectId();
2516
2517   progress_counter_ = progress_total_;
2518   if (!ProgressReport(true)) return false;
2519   return true;
2520 }
2521
2522
2523 void HeapSnapshotGenerator::ProgressStep() {
2524   ++progress_counter_;
2525 }
2526
2527
2528 bool HeapSnapshotGenerator::ProgressReport(bool force) {
2529   const int kProgressReportGranularity = 10000;
2530   if (control_ != NULL
2531       && (force || progress_counter_ % kProgressReportGranularity == 0)) {
2532       return
2533           control_->ReportProgressValue(progress_counter_, progress_total_) ==
2534           v8::ActivityControl::kContinue;
2535   }
2536   return true;
2537 }
2538
2539
2540 void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
2541   if (control_ == NULL) return;
2542   HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
2543   progress_total_ = iterations_count * (
2544       v8_heap_explorer_.EstimateObjectsCount(&iterator) +
2545       dom_explorer_.EstimateObjectsCount());
2546   progress_counter_ = 0;
2547 }
2548
2549
2550 bool HeapSnapshotGenerator::FillReferences() {
2551   SnapshotFiller filler(snapshot_, &entries_);
2552   v8_heap_explorer_.AddRootEntries(&filler);
2553   return v8_heap_explorer_.IterateAndExtractReferences(&filler)
2554       && dom_explorer_.IterateAndExtractReferences(&filler);
2555 }
2556
2557
2558 template<int bytes> struct MaxDecimalDigitsIn;
2559 template<> struct MaxDecimalDigitsIn<4> {
2560   static const int kSigned = 11;
2561   static const int kUnsigned = 10;
2562 };
2563 template<> struct MaxDecimalDigitsIn<8> {
2564   static const int kSigned = 20;
2565   static const int kUnsigned = 20;
2566 };
2567
2568
2569 class OutputStreamWriter {
2570  public:
2571   explicit OutputStreamWriter(v8::OutputStream* stream)
2572       : stream_(stream),
2573         chunk_size_(stream->GetChunkSize()),
2574         chunk_(chunk_size_),
2575         chunk_pos_(0),
2576         aborted_(false) {
2577     ASSERT(chunk_size_ > 0);
2578   }
2579   bool aborted() { return aborted_; }
2580   void AddCharacter(char c) {
2581     ASSERT(c != '\0');
2582     ASSERT(chunk_pos_ < chunk_size_);
2583     chunk_[chunk_pos_++] = c;
2584     MaybeWriteChunk();
2585   }
2586   void AddString(const char* s) {
2587     AddSubstring(s, StrLength(s));
2588   }
2589   void AddSubstring(const char* s, int n) {
2590     if (n <= 0) return;
2591     ASSERT(static_cast<size_t>(n) <= strlen(s));
2592     const char* s_end = s + n;
2593     while (s < s_end) {
2594       int s_chunk_size = Min(
2595           chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
2596       ASSERT(s_chunk_size > 0);
2597       OS::MemCopy(chunk_.start() + chunk_pos_, s, s_chunk_size);
2598       s += s_chunk_size;
2599       chunk_pos_ += s_chunk_size;
2600       MaybeWriteChunk();
2601     }
2602   }
2603   void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
2604   void Finalize() {
2605     if (aborted_) return;
2606     ASSERT(chunk_pos_ < chunk_size_);
2607     if (chunk_pos_ != 0) {
2608       WriteChunk();
2609     }
2610     stream_->EndOfStream();
2611   }
2612
2613  private:
2614   template<typename T>
2615   void AddNumberImpl(T n, const char* format) {
2616     // Buffer for the longest value plus trailing \0
2617     static const int kMaxNumberSize =
2618         MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1;
2619     if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
2620       int result = OS::SNPrintF(
2621           chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
2622       ASSERT(result != -1);
2623       chunk_pos_ += result;
2624       MaybeWriteChunk();
2625     } else {
2626       EmbeddedVector<char, kMaxNumberSize> buffer;
2627       int result = OS::SNPrintF(buffer, format, n);
2628       USE(result);
2629       ASSERT(result != -1);
2630       AddString(buffer.start());
2631     }
2632   }
2633   void MaybeWriteChunk() {
2634     ASSERT(chunk_pos_ <= chunk_size_);
2635     if (chunk_pos_ == chunk_size_) {
2636       WriteChunk();
2637     }
2638   }
2639   void WriteChunk() {
2640     if (aborted_) return;
2641     if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
2642         v8::OutputStream::kAbort) aborted_ = true;
2643     chunk_pos_ = 0;
2644   }
2645
2646   v8::OutputStream* stream_;
2647   int chunk_size_;
2648   ScopedVector<char> chunk_;
2649   int chunk_pos_;
2650   bool aborted_;
2651 };
2652
2653
2654 // type, name|index, to_node.
2655 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
2656 // type, name, id, self_size, edge_count, trace_node_id.
2657 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 6;
2658
2659 void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
2660   if (AllocationTracker* allocation_tracker =
2661       snapshot_->profiler()->allocation_tracker()) {
2662     allocation_tracker->PrepareForSerialization();
2663   }
2664   ASSERT(writer_ == NULL);
2665   writer_ = new OutputStreamWriter(stream);
2666   SerializeImpl();
2667   delete writer_;
2668   writer_ = NULL;
2669 }
2670
2671
2672 void HeapSnapshotJSONSerializer::SerializeImpl() {
2673   ASSERT(0 == snapshot_->root()->index());
2674   writer_->AddCharacter('{');
2675   writer_->AddString("\"snapshot\":{");
2676   SerializeSnapshot();
2677   if (writer_->aborted()) return;
2678   writer_->AddString("},\n");
2679   writer_->AddString("\"nodes\":[");
2680   SerializeNodes();
2681   if (writer_->aborted()) return;
2682   writer_->AddString("],\n");
2683   writer_->AddString("\"edges\":[");
2684   SerializeEdges();
2685   if (writer_->aborted()) return;
2686   writer_->AddString("],\n");
2687
2688   writer_->AddString("\"trace_function_infos\":[");
2689   SerializeTraceNodeInfos();
2690   if (writer_->aborted()) return;
2691   writer_->AddString("],\n");
2692   writer_->AddString("\"trace_tree\":[");
2693   SerializeTraceTree();
2694   if (writer_->aborted()) return;
2695   writer_->AddString("],\n");
2696
2697   writer_->AddString("\"strings\":[");
2698   SerializeStrings();
2699   if (writer_->aborted()) return;
2700   writer_->AddCharacter(']');
2701   writer_->AddCharacter('}');
2702   writer_->Finalize();
2703 }
2704
2705
2706 int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
2707   HashMap::Entry* cache_entry = strings_.Lookup(
2708       const_cast<char*>(s), StringHash(s), true);
2709   if (cache_entry->value == NULL) {
2710     cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
2711   }
2712   return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
2713 }
2714
2715
2716 namespace {
2717
2718 template<size_t size> struct ToUnsigned;
2719
2720 template<> struct ToUnsigned<4> {
2721   typedef uint32_t Type;
2722 };
2723
2724 template<> struct ToUnsigned<8> {
2725   typedef uint64_t Type;
2726 };
2727
2728 }  // namespace
2729
2730
2731 template<typename T>
2732 static int utoa_impl(T value, const Vector<char>& buffer, int buffer_pos) {
2733   STATIC_CHECK(static_cast<T>(-1) > 0);  // Check that T is unsigned
2734   int number_of_digits = 0;
2735   T t = value;
2736   do {
2737     ++number_of_digits;
2738   } while (t /= 10);
2739
2740   buffer_pos += number_of_digits;
2741   int result = buffer_pos;
2742   do {
2743     int last_digit = static_cast<int>(value % 10);
2744     buffer[--buffer_pos] = '0' + last_digit;
2745     value /= 10;
2746   } while (value);
2747   return result;
2748 }
2749
2750
2751 template<typename T>
2752 static int utoa(T value, const Vector<char>& buffer, int buffer_pos) {
2753   typename ToUnsigned<sizeof(value)>::Type unsigned_value = value;
2754   STATIC_CHECK(sizeof(value) == sizeof(unsigned_value));
2755   return utoa_impl(unsigned_value, buffer, buffer_pos);
2756 }
2757
2758
2759 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
2760                                                bool first_edge) {
2761   // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
2762   static const int kBufferSize =
2763       MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2;  // NOLINT
2764   EmbeddedVector<char, kBufferSize> buffer;
2765   int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
2766       || edge->type() == HeapGraphEdge::kHidden
2767       ? edge->index() : GetStringId(edge->name());
2768   int buffer_pos = 0;
2769   if (!first_edge) {
2770     buffer[buffer_pos++] = ',';
2771   }
2772   buffer_pos = utoa(edge->type(), buffer, buffer_pos);
2773   buffer[buffer_pos++] = ',';
2774   buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
2775   buffer[buffer_pos++] = ',';
2776   buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
2777   buffer[buffer_pos++] = '\n';
2778   buffer[buffer_pos++] = '\0';
2779   writer_->AddString(buffer.start());
2780 }
2781
2782
2783 void HeapSnapshotJSONSerializer::SerializeEdges() {
2784   List<HeapGraphEdge*>& edges = snapshot_->children();
2785   for (int i = 0; i < edges.length(); ++i) {
2786     ASSERT(i == 0 ||
2787            edges[i - 1]->from()->index() <= edges[i]->from()->index());
2788     SerializeEdge(edges[i], i == 0);
2789     if (writer_->aborted()) return;
2790   }
2791 }
2792
2793
2794 void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
2795   // The buffer needs space for 4 unsigned ints, 1 size_t, 5 commas, \n and \0
2796   static const int kBufferSize =
2797       5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned  // NOLINT
2798       + MaxDecimalDigitsIn<sizeof(size_t)>::kUnsigned  // NOLINT
2799       + 6 + 1 + 1;
2800   EmbeddedVector<char, kBufferSize> buffer;
2801   int buffer_pos = 0;
2802   if (entry_index(entry) != 0) {
2803     buffer[buffer_pos++] = ',';
2804   }
2805   buffer_pos = utoa(entry->type(), buffer, buffer_pos);
2806   buffer[buffer_pos++] = ',';
2807   buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
2808   buffer[buffer_pos++] = ',';
2809   buffer_pos = utoa(entry->id(), buffer, buffer_pos);
2810   buffer[buffer_pos++] = ',';
2811   buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
2812   buffer[buffer_pos++] = ',';
2813   buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
2814   buffer[buffer_pos++] = ',';
2815   buffer_pos = utoa(entry->trace_node_id(), buffer, buffer_pos);
2816   buffer[buffer_pos++] = '\n';
2817   buffer[buffer_pos++] = '\0';
2818   writer_->AddString(buffer.start());
2819 }
2820
2821
2822 void HeapSnapshotJSONSerializer::SerializeNodes() {
2823   List<HeapEntry>& entries = snapshot_->entries();
2824   for (int i = 0; i < entries.length(); ++i) {
2825     SerializeNode(&entries[i]);
2826     if (writer_->aborted()) return;
2827   }
2828 }
2829
2830
2831 void HeapSnapshotJSONSerializer::SerializeSnapshot() {
2832   writer_->AddString("\"title\":\"");
2833   writer_->AddString(snapshot_->title());
2834   writer_->AddString("\"");
2835   writer_->AddString(",\"uid\":");
2836   writer_->AddNumber(snapshot_->uid());
2837   writer_->AddString(",\"meta\":");
2838   // The object describing node serialization layout.
2839   // We use a set of macros to improve readability.
2840 #define JSON_A(s) "[" s "]"
2841 #define JSON_O(s) "{" s "}"
2842 #define JSON_S(s) "\"" s "\""
2843   writer_->AddString(JSON_O(
2844     JSON_S("node_fields") ":" JSON_A(
2845         JSON_S("type") ","
2846         JSON_S("name") ","
2847         JSON_S("id") ","
2848         JSON_S("self_size") ","
2849         JSON_S("edge_count") ","
2850         JSON_S("trace_node_id")) ","
2851     JSON_S("node_types") ":" JSON_A(
2852         JSON_A(
2853             JSON_S("hidden") ","
2854             JSON_S("array") ","
2855             JSON_S("string") ","
2856             JSON_S("object") ","
2857             JSON_S("code") ","
2858             JSON_S("closure") ","
2859             JSON_S("regexp") ","
2860             JSON_S("number") ","
2861             JSON_S("native") ","
2862             JSON_S("synthetic") ","
2863             JSON_S("concatenated string") ","
2864             JSON_S("sliced string")) ","
2865         JSON_S("string") ","
2866         JSON_S("number") ","
2867         JSON_S("number") ","
2868         JSON_S("number") ","
2869         JSON_S("number") ","
2870         JSON_S("number")) ","
2871     JSON_S("edge_fields") ":" JSON_A(
2872         JSON_S("type") ","
2873         JSON_S("name_or_index") ","
2874         JSON_S("to_node")) ","
2875     JSON_S("edge_types") ":" JSON_A(
2876         JSON_A(
2877             JSON_S("context") ","
2878             JSON_S("element") ","
2879             JSON_S("property") ","
2880             JSON_S("internal") ","
2881             JSON_S("hidden") ","
2882             JSON_S("shortcut") ","
2883             JSON_S("weak")) ","
2884         JSON_S("string_or_number") ","
2885         JSON_S("node")) ","
2886     JSON_S("trace_function_info_fields") ":" JSON_A(
2887         JSON_S("function_id") ","
2888         JSON_S("name") ","
2889         JSON_S("script_name") ","
2890         JSON_S("script_id") ","
2891         JSON_S("line") ","
2892         JSON_S("column")) ","
2893     JSON_S("trace_node_fields") ":" JSON_A(
2894         JSON_S("id") ","
2895         JSON_S("function_info_index") ","
2896         JSON_S("count") ","
2897         JSON_S("size") ","
2898         JSON_S("children"))));
2899 #undef JSON_S
2900 #undef JSON_O
2901 #undef JSON_A
2902   writer_->AddString(",\"node_count\":");
2903   writer_->AddNumber(snapshot_->entries().length());
2904   writer_->AddString(",\"edge_count\":");
2905   writer_->AddNumber(snapshot_->edges().length());
2906   writer_->AddString(",\"trace_function_count\":");
2907   uint32_t count = 0;
2908   AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2909   if (tracker) {
2910     count = tracker->function_info_list().length();
2911   }
2912   writer_->AddNumber(count);
2913 }
2914
2915
2916 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
2917   static const char hex_chars[] = "0123456789ABCDEF";
2918   w->AddString("\\u");
2919   w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
2920   w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
2921   w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
2922   w->AddCharacter(hex_chars[u & 0xf]);
2923 }
2924
2925
2926 void HeapSnapshotJSONSerializer::SerializeTraceTree() {
2927   AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2928   if (!tracker) return;
2929   AllocationTraceTree* traces = tracker->trace_tree();
2930   SerializeTraceNode(traces->root());
2931 }
2932
2933
2934 void HeapSnapshotJSONSerializer::SerializeTraceNode(AllocationTraceNode* node) {
2935   // The buffer needs space for 4 unsigned ints, 4 commas, [ and \0
2936   const int kBufferSize =
2937       4 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned  // NOLINT
2938       + 4 + 1 + 1;
2939   EmbeddedVector<char, kBufferSize> buffer;
2940   int buffer_pos = 0;
2941   buffer_pos = utoa(node->id(), buffer, buffer_pos);
2942   buffer[buffer_pos++] = ',';
2943   buffer_pos = utoa(node->function_info_index(), buffer, buffer_pos);
2944   buffer[buffer_pos++] = ',';
2945   buffer_pos = utoa(node->allocation_count(), buffer, buffer_pos);
2946   buffer[buffer_pos++] = ',';
2947   buffer_pos = utoa(node->allocation_size(), buffer, buffer_pos);
2948   buffer[buffer_pos++] = ',';
2949   buffer[buffer_pos++] = '[';
2950   buffer[buffer_pos++] = '\0';
2951   writer_->AddString(buffer.start());
2952
2953   Vector<AllocationTraceNode*> children = node->children();
2954   for (int i = 0; i < children.length(); i++) {
2955     if (i > 0) {
2956       writer_->AddCharacter(',');
2957     }
2958     SerializeTraceNode(children[i]);
2959   }
2960   writer_->AddCharacter(']');
2961 }
2962
2963
2964 // 0-based position is converted to 1-based during the serialization.
2965 static int SerializePosition(int position, const Vector<char>& buffer,
2966                              int buffer_pos) {
2967   if (position == -1) {
2968     buffer[buffer_pos++] = '0';
2969   } else {
2970     ASSERT(position >= 0);
2971     buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos);
2972   }
2973   return buffer_pos;
2974 }
2975
2976
2977 void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() {
2978   AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2979   if (!tracker) return;
2980   // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0
2981   const int kBufferSize =
2982       6 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned  // NOLINT
2983       + 6 + 1 + 1;
2984   EmbeddedVector<char, kBufferSize> buffer;
2985   const List<AllocationTracker::FunctionInfo*>& list =
2986       tracker->function_info_list();
2987   bool first_entry = true;
2988   for (int i = 0; i < list.length(); i++) {
2989     AllocationTracker::FunctionInfo* info = list[i];
2990     int buffer_pos = 0;
2991     if (first_entry) {
2992       first_entry = false;
2993     } else {
2994       buffer[buffer_pos++] = ',';
2995     }
2996     buffer_pos = utoa(info->function_id, buffer, buffer_pos);
2997     buffer[buffer_pos++] = ',';
2998     buffer_pos = utoa(GetStringId(info->name), buffer, buffer_pos);
2999     buffer[buffer_pos++] = ',';
3000     buffer_pos = utoa(GetStringId(info->script_name), buffer, buffer_pos);
3001     buffer[buffer_pos++] = ',';
3002     // The cast is safe because script id is a non-negative Smi.
3003     buffer_pos = utoa(static_cast<unsigned>(info->script_id), buffer,
3004         buffer_pos);
3005     buffer[buffer_pos++] = ',';
3006     buffer_pos = SerializePosition(info->line, buffer, buffer_pos);
3007     buffer[buffer_pos++] = ',';
3008     buffer_pos = SerializePosition(info->column, buffer, buffer_pos);
3009     buffer[buffer_pos++] = '\n';
3010     buffer[buffer_pos++] = '\0';
3011     writer_->AddString(buffer.start());
3012   }
3013 }
3014
3015
3016 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
3017   writer_->AddCharacter('\n');
3018   writer_->AddCharacter('\"');
3019   for ( ; *s != '\0'; ++s) {
3020     switch (*s) {
3021       case '\b':
3022         writer_->AddString("\\b");
3023         continue;
3024       case '\f':
3025         writer_->AddString("\\f");
3026         continue;
3027       case '\n':
3028         writer_->AddString("\\n");
3029         continue;
3030       case '\r':
3031         writer_->AddString("\\r");
3032         continue;
3033       case '\t':
3034         writer_->AddString("\\t");
3035         continue;
3036       case '\"':
3037       case '\\':
3038         writer_->AddCharacter('\\');
3039         writer_->AddCharacter(*s);
3040         continue;
3041       default:
3042         if (*s > 31 && *s < 128) {
3043           writer_->AddCharacter(*s);
3044         } else if (*s <= 31) {
3045           // Special character with no dedicated literal.
3046           WriteUChar(writer_, *s);
3047         } else {
3048           // Convert UTF-8 into \u UTF-16 literal.
3049           unsigned length = 1, cursor = 0;
3050           for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
3051           unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
3052           if (c != unibrow::Utf8::kBadChar) {
3053             WriteUChar(writer_, c);
3054             ASSERT(cursor != 0);
3055             s += cursor - 1;
3056           } else {
3057             writer_->AddCharacter('?');
3058           }
3059         }
3060     }
3061   }
3062   writer_->AddCharacter('\"');
3063 }
3064
3065
3066 void HeapSnapshotJSONSerializer::SerializeStrings() {
3067   ScopedVector<const unsigned char*> sorted_strings(
3068       strings_.occupancy() + 1);
3069   for (HashMap::Entry* entry = strings_.Start();
3070        entry != NULL;
3071        entry = strings_.Next(entry)) {
3072     int index = static_cast<int>(reinterpret_cast<uintptr_t>(entry->value));
3073     sorted_strings[index] = reinterpret_cast<const unsigned char*>(entry->key);
3074   }
3075   writer_->AddString("\"<dummy>\"");
3076   for (int i = 1; i < sorted_strings.length(); ++i) {
3077     writer_->AddCharacter(',');
3078     SerializeString(sorted_strings[i]);
3079     if (writer_->aborted()) return;
3080   }
3081 }
3082
3083
3084 } }  // namespace v8::internal