Upstream version 5.34.104.0
[platform/framework/web/crosswalk.git] / src / v8 / src / heap-snapshot-generator.cc
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #include "heap-snapshot-generator-inl.h"
31
32 #include "allocation-tracker.h"
33 #include "code-stubs.h"
34 #include "heap-profiler.h"
35 #include "debug.h"
36 #include "types.h"
37
38 namespace v8 {
39 namespace internal {
40
41
42 HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to)
43     : type_(type),
44       from_index_(from),
45       to_index_(to),
46       name_(name) {
47   ASSERT(type == kContextVariable
48       || type == kProperty
49       || type == kInternal
50       || type == kShortcut
51       || type == kWeak);
52 }
53
54
55 HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to)
56     : type_(type),
57       from_index_(from),
58       to_index_(to),
59       index_(index) {
60   ASSERT(type == kElement || type == kHidden);
61 }
62
63
64 void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
65   to_entry_ = &snapshot->entries()[to_index_];
66 }
67
68
69 const int HeapEntry::kNoEntry = -1;
70
71 HeapEntry::HeapEntry(HeapSnapshot* snapshot,
72                      Type type,
73                      const char* name,
74                      SnapshotObjectId id,
75                      int self_size)
76     : type_(type),
77       children_count_(0),
78       children_index_(-1),
79       self_size_(self_size),
80       id_(id),
81       snapshot_(snapshot),
82       name_(name) { }
83
84
85 void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
86                                   const char* name,
87                                   HeapEntry* entry) {
88   HeapGraphEdge edge(type, name, this->index(), entry->index());
89   snapshot_->edges().Add(edge);
90   ++children_count_;
91 }
92
93
94 void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
95                                     int index,
96                                     HeapEntry* entry) {
97   HeapGraphEdge edge(type, index, this->index(), entry->index());
98   snapshot_->edges().Add(edge);
99   ++children_count_;
100 }
101
102
103 void HeapEntry::Print(
104     const char* prefix, const char* edge_name, int max_depth, int indent) {
105   STATIC_CHECK(sizeof(unsigned) == sizeof(id()));
106   OS::Print("%6d @%6u %*c %s%s: ",
107             self_size(), id(), indent, ' ', prefix, edge_name);
108   if (type() != kString) {
109     OS::Print("%s %.40s\n", TypeAsString(), name_);
110   } else {
111     OS::Print("\"");
112     const char* c = name_;
113     while (*c && (c - name_) <= 40) {
114       if (*c != '\n')
115         OS::Print("%c", *c);
116       else
117         OS::Print("\\n");
118       ++c;
119     }
120     OS::Print("\"\n");
121   }
122   if (--max_depth == 0) return;
123   Vector<HeapGraphEdge*> ch = children();
124   for (int i = 0; i < ch.length(); ++i) {
125     HeapGraphEdge& edge = *ch[i];
126     const char* edge_prefix = "";
127     EmbeddedVector<char, 64> index;
128     const char* edge_name = index.start();
129     switch (edge.type()) {
130       case HeapGraphEdge::kContextVariable:
131         edge_prefix = "#";
132         edge_name = edge.name();
133         break;
134       case HeapGraphEdge::kElement:
135         OS::SNPrintF(index, "%d", edge.index());
136         break;
137       case HeapGraphEdge::kInternal:
138         edge_prefix = "$";
139         edge_name = edge.name();
140         break;
141       case HeapGraphEdge::kProperty:
142         edge_name = edge.name();
143         break;
144       case HeapGraphEdge::kHidden:
145         edge_prefix = "$";
146         OS::SNPrintF(index, "%d", edge.index());
147         break;
148       case HeapGraphEdge::kShortcut:
149         edge_prefix = "^";
150         edge_name = edge.name();
151         break;
152       case HeapGraphEdge::kWeak:
153         edge_prefix = "w";
154         edge_name = edge.name();
155         break;
156       default:
157         OS::SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
158     }
159     edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
160   }
161 }
162
163
164 const char* HeapEntry::TypeAsString() {
165   switch (type()) {
166     case kHidden: return "/hidden/";
167     case kObject: return "/object/";
168     case kClosure: return "/closure/";
169     case kString: return "/string/";
170     case kCode: return "/code/";
171     case kArray: return "/array/";
172     case kRegExp: return "/regexp/";
173     case kHeapNumber: return "/number/";
174     case kNative: return "/native/";
175     case kSynthetic: return "/synthetic/";
176     case kConsString: return "/concatenated string/";
177     case kSlicedString: return "/sliced string/";
178     default: return "???";
179   }
180 }
181
182
183 // It is very important to keep objects that form a heap snapshot
184 // as small as possible.
185 namespace {  // Avoid littering the global namespace.
186
187 template <size_t ptr_size> struct SnapshotSizeConstants;
188
189 template <> struct SnapshotSizeConstants<4> {
190   static const int kExpectedHeapGraphEdgeSize = 12;
191   static const int kExpectedHeapEntrySize = 24;
192 };
193
194 template <> struct SnapshotSizeConstants<8> {
195   static const int kExpectedHeapGraphEdgeSize = 24;
196   static const int kExpectedHeapEntrySize = 32;
197 };
198
199 }  // namespace
200
201
202 HeapSnapshot::HeapSnapshot(HeapProfiler* profiler,
203                            const char* title,
204                            unsigned uid)
205     : profiler_(profiler),
206       title_(title),
207       uid_(uid),
208       root_index_(HeapEntry::kNoEntry),
209       gc_roots_index_(HeapEntry::kNoEntry),
210       natives_root_index_(HeapEntry::kNoEntry),
211       max_snapshot_js_object_id_(0) {
212   STATIC_CHECK(
213       sizeof(HeapGraphEdge) ==
214       SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
215   STATIC_CHECK(
216       sizeof(HeapEntry) ==
217       SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
218   USE(SnapshotSizeConstants<4>::kExpectedHeapGraphEdgeSize);
219   USE(SnapshotSizeConstants<4>::kExpectedHeapEntrySize);
220   USE(SnapshotSizeConstants<8>::kExpectedHeapGraphEdgeSize);
221   USE(SnapshotSizeConstants<8>::kExpectedHeapEntrySize);
222   for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
223     gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
224   }
225 }
226
227
228 void HeapSnapshot::Delete() {
229   profiler_->RemoveSnapshot(this);
230   delete this;
231 }
232
233
234 void HeapSnapshot::RememberLastJSObjectId() {
235   max_snapshot_js_object_id_ = profiler_->heap_object_map()->last_assigned_id();
236 }
237
238
239 HeapEntry* HeapSnapshot::AddRootEntry() {
240   ASSERT(root_index_ == HeapEntry::kNoEntry);
241   ASSERT(entries_.is_empty());  // Root entry must be the first one.
242   HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
243                               "",
244                               HeapObjectsMap::kInternalRootObjectId,
245                               0);
246   root_index_ = entry->index();
247   ASSERT(root_index_ == 0);
248   return entry;
249 }
250
251
252 HeapEntry* HeapSnapshot::AddGcRootsEntry() {
253   ASSERT(gc_roots_index_ == HeapEntry::kNoEntry);
254   HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
255                               "(GC roots)",
256                               HeapObjectsMap::kGcRootsObjectId,
257                               0);
258   gc_roots_index_ = entry->index();
259   return entry;
260 }
261
262
263 HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag) {
264   ASSERT(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
265   ASSERT(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags);
266   HeapEntry* entry = AddEntry(
267       HeapEntry::kSynthetic,
268       VisitorSynchronization::kTagNames[tag],
269       HeapObjectsMap::GetNthGcSubrootId(tag),
270       0);
271   gc_subroot_indexes_[tag] = entry->index();
272   return entry;
273 }
274
275
276 HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
277                                   const char* name,
278                                   SnapshotObjectId id,
279                                   int size) {
280   HeapEntry entry(this, type, name, id, size);
281   entries_.Add(entry);
282   return &entries_.last();
283 }
284
285
286 void HeapSnapshot::FillChildren() {
287   ASSERT(children().is_empty());
288   children().Allocate(edges().length());
289   int children_index = 0;
290   for (int i = 0; i < entries().length(); ++i) {
291     HeapEntry* entry = &entries()[i];
292     children_index = entry->set_children_index(children_index);
293   }
294   ASSERT(edges().length() == children_index);
295   for (int i = 0; i < edges().length(); ++i) {
296     HeapGraphEdge* edge = &edges()[i];
297     edge->ReplaceToIndexWithEntry(this);
298     edge->from()->add_child(edge);
299   }
300 }
301
302
303 class FindEntryById {
304  public:
305   explicit FindEntryById(SnapshotObjectId id) : id_(id) { }
306   int operator()(HeapEntry* const* entry) {
307     if ((*entry)->id() == id_) return 0;
308     return (*entry)->id() < id_ ? -1 : 1;
309   }
310  private:
311   SnapshotObjectId id_;
312 };
313
314
315 HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
316   List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
317   // Perform a binary search by id.
318   int index = SortedListBSearch(*entries_by_id, FindEntryById(id));
319   if (index == -1)
320     return NULL;
321   return entries_by_id->at(index);
322 }
323
324
325 template<class T>
326 static int SortByIds(const T* entry1_ptr,
327                      const T* entry2_ptr) {
328   if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
329   return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
330 }
331
332
333 List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() {
334   if (sorted_entries_.is_empty()) {
335     sorted_entries_.Allocate(entries_.length());
336     for (int i = 0; i < entries_.length(); ++i) {
337       sorted_entries_[i] = &entries_[i];
338     }
339     sorted_entries_.Sort(SortByIds);
340   }
341   return &sorted_entries_;
342 }
343
344
345 void HeapSnapshot::Print(int max_depth) {
346   root()->Print("", "", max_depth, 0);
347 }
348
349
350 size_t HeapSnapshot::RawSnapshotSize() const {
351   return
352       sizeof(*this) +
353       GetMemoryUsedByList(entries_) +
354       GetMemoryUsedByList(edges_) +
355       GetMemoryUsedByList(children_) +
356       GetMemoryUsedByList(sorted_entries_);
357 }
358
359
360 // We split IDs on evens for embedder objects (see
361 // HeapObjectsMap::GenerateId) and odds for native objects.
362 const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
363 const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
364     HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
365 const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
366     HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
367 const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
368     HeapObjectsMap::kGcRootsFirstSubrootId +
369     VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
370
371
372 static bool AddressesMatch(void* key1, void* key2) {
373   return key1 == key2;
374 }
375
376
377 HeapObjectsMap::HeapObjectsMap(Heap* heap)
378     : next_id_(kFirstAvailableObjectId),
379       entries_map_(AddressesMatch),
380       heap_(heap) {
381   // This dummy element solves a problem with entries_map_.
382   // When we do lookup in HashMap we see no difference between two cases:
383   // it has an entry with NULL as the value or it has created
384   // a new entry on the fly with NULL as the default value.
385   // With such dummy element we have a guaranty that all entries_map_ entries
386   // will have the value field grater than 0.
387   // This fact is using in MoveObject method.
388   entries_.Add(EntryInfo(0, NULL, 0));
389 }
390
391
392 void HeapObjectsMap::MoveObject(Address from, Address to, int object_size) {
393   ASSERT(to != NULL);
394   ASSERT(from != NULL);
395   if (from == to) return;
396   void* from_value = entries_map_.Remove(from, ComputePointerHash(from));
397   if (from_value == NULL) {
398     // It may occur that some untracked object moves to an address X and there
399     // is a tracked object at that address. In this case we should remove the
400     // entry as we know that the object has died.
401     void* to_value = entries_map_.Remove(to, ComputePointerHash(to));
402     if (to_value != NULL) {
403       int to_entry_info_index =
404           static_cast<int>(reinterpret_cast<intptr_t>(to_value));
405       entries_.at(to_entry_info_index).addr = NULL;
406     }
407   } else {
408     HashMap::Entry* to_entry = entries_map_.Lookup(to, ComputePointerHash(to),
409                                                    true);
410     if (to_entry->value != NULL) {
411       // We found the existing entry with to address for an old object.
412       // Without this operation we will have two EntryInfo's with the same
413       // value in addr field. It is bad because later at RemoveDeadEntries
414       // one of this entry will be removed with the corresponding entries_map_
415       // entry.
416       int to_entry_info_index =
417           static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
418       entries_.at(to_entry_info_index).addr = NULL;
419     }
420     int from_entry_info_index =
421         static_cast<int>(reinterpret_cast<intptr_t>(from_value));
422     entries_.at(from_entry_info_index).addr = to;
423     // Size of an object can change during its life, so to keep information
424     // about the object in entries_ consistent, we have to adjust size when the
425     // object is migrated.
426     if (FLAG_heap_profiler_trace_objects) {
427       PrintF("Move object from %p to %p old size %6d new size %6d\n",
428              from,
429              to,
430              entries_.at(from_entry_info_index).size,
431              object_size);
432     }
433     entries_.at(from_entry_info_index).size = object_size;
434     to_entry->value = from_value;
435   }
436 }
437
438
439 void HeapObjectsMap::UpdateObjectSize(Address addr, int size) {
440   FindOrAddEntry(addr, size, false);
441 }
442
443
444 SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
445   HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr),
446                                               false);
447   if (entry == NULL) return 0;
448   int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
449   EntryInfo& entry_info = entries_.at(entry_index);
450   ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
451   return entry_info.id;
452 }
453
454
455 SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
456                                                 unsigned int size,
457                                                 bool accessed) {
458   ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
459   HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr),
460                                               true);
461   if (entry->value != NULL) {
462     int entry_index =
463         static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
464     EntryInfo& entry_info = entries_.at(entry_index);
465     entry_info.accessed = accessed;
466     if (FLAG_heap_profiler_trace_objects) {
467       PrintF("Update object size : %p with old size %d and new size %d\n",
468              addr,
469              entry_info.size,
470              size);
471     }
472     entry_info.size = size;
473     return entry_info.id;
474   }
475   entry->value = reinterpret_cast<void*>(entries_.length());
476   SnapshotObjectId id = next_id_;
477   next_id_ += kObjectIdStep;
478   entries_.Add(EntryInfo(id, addr, size, accessed));
479   ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
480   return id;
481 }
482
483
484 void HeapObjectsMap::StopHeapObjectsTracking() {
485   time_intervals_.Clear();
486 }
487
488
489 void HeapObjectsMap::UpdateHeapObjectsMap() {
490   if (FLAG_heap_profiler_trace_objects) {
491     PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
492            entries_map_.occupancy());
493   }
494   heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
495                           "HeapObjectsMap::UpdateHeapObjectsMap");
496   HeapIterator iterator(heap_);
497   for (HeapObject* obj = iterator.next();
498        obj != NULL;
499        obj = iterator.next()) {
500     FindOrAddEntry(obj->address(), obj->Size());
501     if (FLAG_heap_profiler_trace_objects) {
502       PrintF("Update object      : %p %6d. Next address is %p\n",
503              obj->address(),
504              obj->Size(),
505              obj->address() + obj->Size());
506     }
507   }
508   RemoveDeadEntries();
509   if (FLAG_heap_profiler_trace_objects) {
510     PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
511            entries_map_.occupancy());
512   }
513 }
514
515
516 namespace {
517
518
519 struct HeapObjectInfo {
520   HeapObjectInfo(HeapObject* obj, int expected_size)
521     : obj(obj),
522       expected_size(expected_size) {
523   }
524
525   HeapObject* obj;
526   int expected_size;
527
528   bool IsValid() const { return expected_size == obj->Size(); }
529
530   void Print() const {
531     if (expected_size == 0) {
532       PrintF("Untracked object   : %p %6d. Next address is %p\n",
533              obj->address(),
534              obj->Size(),
535              obj->address() + obj->Size());
536     } else if (obj->Size() != expected_size) {
537       PrintF("Wrong size %6d: %p %6d. Next address is %p\n",
538              expected_size,
539              obj->address(),
540              obj->Size(),
541              obj->address() + obj->Size());
542     } else {
543       PrintF("Good object      : %p %6d. Next address is %p\n",
544              obj->address(),
545              expected_size,
546              obj->address() + obj->Size());
547     }
548   }
549 };
550
551
552 static int comparator(const HeapObjectInfo* a, const HeapObjectInfo* b) {
553   if (a->obj < b->obj) return -1;
554   if (a->obj > b->obj) return 1;
555   return 0;
556 }
557
558
559 }  // namespace
560
561
562 int HeapObjectsMap::FindUntrackedObjects() {
563   List<HeapObjectInfo> heap_objects(1000);
564
565   HeapIterator iterator(heap_);
566   int untracked = 0;
567   for (HeapObject* obj = iterator.next();
568        obj != NULL;
569        obj = iterator.next()) {
570     HashMap::Entry* entry = entries_map_.Lookup(
571       obj->address(), ComputePointerHash(obj->address()), false);
572     if (entry == NULL) {
573       ++untracked;
574       if (FLAG_heap_profiler_trace_objects) {
575         heap_objects.Add(HeapObjectInfo(obj, 0));
576       }
577     } else {
578       int entry_index = static_cast<int>(
579           reinterpret_cast<intptr_t>(entry->value));
580       EntryInfo& entry_info = entries_.at(entry_index);
581       if (FLAG_heap_profiler_trace_objects) {
582         heap_objects.Add(HeapObjectInfo(obj,
583                          static_cast<int>(entry_info.size)));
584         if (obj->Size() != static_cast<int>(entry_info.size))
585           ++untracked;
586       } else {
587         CHECK_EQ(obj->Size(), static_cast<int>(entry_info.size));
588       }
589     }
590   }
591   if (FLAG_heap_profiler_trace_objects) {
592     PrintF("\nBegin HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n",
593            entries_map_.occupancy());
594     heap_objects.Sort(comparator);
595     int last_printed_object = -1;
596     bool print_next_object = false;
597     for (int i = 0; i < heap_objects.length(); ++i) {
598       const HeapObjectInfo& object_info = heap_objects[i];
599       if (!object_info.IsValid()) {
600         ++untracked;
601         if (last_printed_object != i - 1) {
602           if (i > 0) {
603             PrintF("%d objects were skipped\n", i - 1 - last_printed_object);
604             heap_objects[i - 1].Print();
605           }
606         }
607         object_info.Print();
608         last_printed_object = i;
609         print_next_object = true;
610       } else if (print_next_object) {
611         object_info.Print();
612         print_next_object = false;
613         last_printed_object = i;
614       }
615     }
616     if (last_printed_object < heap_objects.length() - 1) {
617       PrintF("Last %d objects were skipped\n",
618              heap_objects.length() - 1 - last_printed_object);
619     }
620     PrintF("End HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n\n",
621            entries_map_.occupancy());
622   }
623   return untracked;
624 }
625
626
627 SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) {
628   UpdateHeapObjectsMap();
629   time_intervals_.Add(TimeInterval(next_id_));
630   int prefered_chunk_size = stream->GetChunkSize();
631   List<v8::HeapStatsUpdate> stats_buffer;
632   ASSERT(!entries_.is_empty());
633   EntryInfo* entry_info = &entries_.first();
634   EntryInfo* end_entry_info = &entries_.last() + 1;
635   for (int time_interval_index = 0;
636        time_interval_index < time_intervals_.length();
637        ++time_interval_index) {
638     TimeInterval& time_interval = time_intervals_[time_interval_index];
639     SnapshotObjectId time_interval_id = time_interval.id;
640     uint32_t entries_size = 0;
641     EntryInfo* start_entry_info = entry_info;
642     while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
643       entries_size += entry_info->size;
644       ++entry_info;
645     }
646     uint32_t entries_count =
647         static_cast<uint32_t>(entry_info - start_entry_info);
648     if (time_interval.count != entries_count ||
649         time_interval.size != entries_size) {
650       stats_buffer.Add(v8::HeapStatsUpdate(
651           time_interval_index,
652           time_interval.count = entries_count,
653           time_interval.size = entries_size));
654       if (stats_buffer.length() >= prefered_chunk_size) {
655         OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
656             &stats_buffer.first(), stats_buffer.length());
657         if (result == OutputStream::kAbort) return last_assigned_id();
658         stats_buffer.Clear();
659       }
660     }
661   }
662   ASSERT(entry_info == end_entry_info);
663   if (!stats_buffer.is_empty()) {
664     OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
665         &stats_buffer.first(), stats_buffer.length());
666     if (result == OutputStream::kAbort) return last_assigned_id();
667   }
668   stream->EndOfStream();
669   return last_assigned_id();
670 }
671
672
673 void HeapObjectsMap::RemoveDeadEntries() {
674   ASSERT(entries_.length() > 0 &&
675          entries_.at(0).id == 0 &&
676          entries_.at(0).addr == NULL);
677   int first_free_entry = 1;
678   for (int i = 1; i < entries_.length(); ++i) {
679     EntryInfo& entry_info = entries_.at(i);
680     if (entry_info.accessed) {
681       if (first_free_entry != i) {
682         entries_.at(first_free_entry) = entry_info;
683       }
684       entries_.at(first_free_entry).accessed = false;
685       HashMap::Entry* entry = entries_map_.Lookup(
686           entry_info.addr, ComputePointerHash(entry_info.addr), false);
687       ASSERT(entry);
688       entry->value = reinterpret_cast<void*>(first_free_entry);
689       ++first_free_entry;
690     } else {
691       if (entry_info.addr) {
692         entries_map_.Remove(entry_info.addr,
693                             ComputePointerHash(entry_info.addr));
694       }
695     }
696   }
697   entries_.Rewind(first_free_entry);
698   ASSERT(static_cast<uint32_t>(entries_.length()) - 1 ==
699          entries_map_.occupancy());
700 }
701
702
703 SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
704   SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
705   const char* label = info->GetLabel();
706   id ^= StringHasher::HashSequentialString(label,
707                                            static_cast<int>(strlen(label)),
708                                            heap_->HashSeed());
709   intptr_t element_count = info->GetElementCount();
710   if (element_count != -1)
711     id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
712                              v8::internal::kZeroHashSeed);
713   return id << 1;
714 }
715
716
717 size_t HeapObjectsMap::GetUsedMemorySize() const {
718   return
719       sizeof(*this) +
720       sizeof(HashMap::Entry) * entries_map_.capacity() +
721       GetMemoryUsedByList(entries_) +
722       GetMemoryUsedByList(time_intervals_);
723 }
724
725
726 HeapEntriesMap::HeapEntriesMap()
727     : entries_(HeapThingsMatch) {
728 }
729
730
731 int HeapEntriesMap::Map(HeapThing thing) {
732   HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), false);
733   if (cache_entry == NULL) return HeapEntry::kNoEntry;
734   return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
735 }
736
737
738 void HeapEntriesMap::Pair(HeapThing thing, int entry) {
739   HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), true);
740   ASSERT(cache_entry->value == NULL);
741   cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
742 }
743
744
745 HeapObjectsSet::HeapObjectsSet()
746     : entries_(HeapEntriesMap::HeapThingsMatch) {
747 }
748
749
750 void HeapObjectsSet::Clear() {
751   entries_.Clear();
752 }
753
754
755 bool HeapObjectsSet::Contains(Object* obj) {
756   if (!obj->IsHeapObject()) return false;
757   HeapObject* object = HeapObject::cast(obj);
758   return entries_.Lookup(object, HeapEntriesMap::Hash(object), false) != NULL;
759 }
760
761
762 void HeapObjectsSet::Insert(Object* obj) {
763   if (!obj->IsHeapObject()) return;
764   HeapObject* object = HeapObject::cast(obj);
765   entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
766 }
767
768
769 const char* HeapObjectsSet::GetTag(Object* obj) {
770   HeapObject* object = HeapObject::cast(obj);
771   HashMap::Entry* cache_entry =
772       entries_.Lookup(object, HeapEntriesMap::Hash(object), false);
773   return cache_entry != NULL
774       ? reinterpret_cast<const char*>(cache_entry->value)
775       : NULL;
776 }
777
778
779 void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
780   if (!obj->IsHeapObject()) return;
781   HeapObject* object = HeapObject::cast(obj);
782   HashMap::Entry* cache_entry =
783       entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
784   cache_entry->value = const_cast<char*>(tag);
785 }
786
787
788 HeapObject* const V8HeapExplorer::kInternalRootObject =
789     reinterpret_cast<HeapObject*>(
790         static_cast<intptr_t>(HeapObjectsMap::kInternalRootObjectId));
791 HeapObject* const V8HeapExplorer::kGcRootsObject =
792     reinterpret_cast<HeapObject*>(
793         static_cast<intptr_t>(HeapObjectsMap::kGcRootsObjectId));
794 HeapObject* const V8HeapExplorer::kFirstGcSubrootObject =
795     reinterpret_cast<HeapObject*>(
796         static_cast<intptr_t>(HeapObjectsMap::kGcRootsFirstSubrootId));
797 HeapObject* const V8HeapExplorer::kLastGcSubrootObject =
798     reinterpret_cast<HeapObject*>(
799         static_cast<intptr_t>(HeapObjectsMap::kFirstAvailableObjectId));
800
801
802 V8HeapExplorer::V8HeapExplorer(
803     HeapSnapshot* snapshot,
804     SnapshottingProgressReportingInterface* progress,
805     v8::HeapProfiler::ObjectNameResolver* resolver)
806     : heap_(snapshot->profiler()->heap_object_map()->heap()),
807       snapshot_(snapshot),
808       names_(snapshot_->profiler()->names()),
809       heap_object_map_(snapshot_->profiler()->heap_object_map()),
810       progress_(progress),
811       filler_(NULL),
812       global_object_name_resolver_(resolver) {
813 }
814
815
816 V8HeapExplorer::~V8HeapExplorer() {
817 }
818
819
820 HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) {
821   return AddEntry(reinterpret_cast<HeapObject*>(ptr));
822 }
823
824
825 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
826   if (object == kInternalRootObject) {
827     snapshot_->AddRootEntry();
828     return snapshot_->root();
829   } else if (object == kGcRootsObject) {
830     HeapEntry* entry = snapshot_->AddGcRootsEntry();
831     return entry;
832   } else if (object >= kFirstGcSubrootObject && object < kLastGcSubrootObject) {
833     HeapEntry* entry = snapshot_->AddGcSubrootEntry(GetGcSubrootOrder(object));
834     return entry;
835   } else if (object->IsJSFunction()) {
836     JSFunction* func = JSFunction::cast(object);
837     SharedFunctionInfo* shared = func->shared();
838     const char* name = shared->bound() ? "native_bind" :
839         names_->GetName(String::cast(shared->name()));
840     return AddEntry(object, HeapEntry::kClosure, name);
841   } else if (object->IsJSRegExp()) {
842     JSRegExp* re = JSRegExp::cast(object);
843     return AddEntry(object,
844                     HeapEntry::kRegExp,
845                     names_->GetName(re->Pattern()));
846   } else if (object->IsJSObject()) {
847     const char* name = names_->GetName(
848         GetConstructorName(JSObject::cast(object)));
849     if (object->IsJSGlobalObject()) {
850       const char* tag = objects_tags_.GetTag(object);
851       if (tag != NULL) {
852         name = names_->GetFormatted("%s / %s", name, tag);
853       }
854     }
855     return AddEntry(object, HeapEntry::kObject, name);
856   } else if (object->IsString()) {
857     String* string = String::cast(object);
858     if (string->IsConsString())
859       return AddEntry(object,
860                       HeapEntry::kConsString,
861                       "(concatenated string)");
862     if (string->IsSlicedString())
863       return AddEntry(object,
864                       HeapEntry::kSlicedString,
865                       "(sliced string)");
866     return AddEntry(object,
867                     HeapEntry::kString,
868                     names_->GetName(String::cast(object)));
869   } else if (object->IsCode()) {
870     return AddEntry(object, HeapEntry::kCode, "");
871   } else if (object->IsSharedFunctionInfo()) {
872     String* name = String::cast(SharedFunctionInfo::cast(object)->name());
873     return AddEntry(object,
874                     HeapEntry::kCode,
875                     names_->GetName(name));
876   } else if (object->IsScript()) {
877     Object* name = Script::cast(object)->name();
878     return AddEntry(object,
879                     HeapEntry::kCode,
880                     name->IsString()
881                         ? names_->GetName(String::cast(name))
882                         : "");
883   } else if (object->IsNativeContext()) {
884     return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
885   } else if (object->IsContext()) {
886     return AddEntry(object, HeapEntry::kObject, "system / Context");
887   } else if (object->IsFixedArray() ||
888              object->IsFixedDoubleArray() ||
889              object->IsByteArray() ||
890              object->IsExternalArray()) {
891     return AddEntry(object, HeapEntry::kArray, "");
892   } else if (object->IsHeapNumber()) {
893     return AddEntry(object, HeapEntry::kHeapNumber, "number");
894   }
895   return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
896 }
897
898
899 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
900                                     HeapEntry::Type type,
901                                     const char* name) {
902   int object_size = object->Size();
903   SnapshotObjectId object_id =
904       heap_object_map_->FindOrAddEntry(object->address(), object_size);
905   return snapshot_->AddEntry(type, name, object_id, object_size);
906 }
907
908
909 class GcSubrootsEnumerator : public ObjectVisitor {
910  public:
911   GcSubrootsEnumerator(
912       SnapshotFillerInterface* filler, V8HeapExplorer* explorer)
913       : filler_(filler),
914         explorer_(explorer),
915         previous_object_count_(0),
916         object_count_(0) {
917   }
918   void VisitPointers(Object** start, Object** end) {
919     object_count_ += end - start;
920   }
921   void Synchronize(VisitorSynchronization::SyncTag tag) {
922     // Skip empty subroots.
923     if (previous_object_count_ != object_count_) {
924       previous_object_count_ = object_count_;
925       filler_->AddEntry(V8HeapExplorer::GetNthGcSubrootObject(tag), explorer_);
926     }
927   }
928  private:
929   SnapshotFillerInterface* filler_;
930   V8HeapExplorer* explorer_;
931   intptr_t previous_object_count_;
932   intptr_t object_count_;
933 };
934
935
936 void V8HeapExplorer::AddRootEntries(SnapshotFillerInterface* filler) {
937   filler->AddEntry(kInternalRootObject, this);
938   filler->AddEntry(kGcRootsObject, this);
939   GcSubrootsEnumerator enumerator(filler, this);
940   heap_->IterateRoots(&enumerator, VISIT_ALL);
941 }
942
943
944 const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
945   switch (object->map()->instance_type()) {
946     case MAP_TYPE:
947       switch (Map::cast(object)->instance_type()) {
948 #define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
949         case instance_type: return "system / Map (" #Name ")";
950       STRING_TYPE_LIST(MAKE_STRING_MAP_CASE)
951 #undef MAKE_STRING_MAP_CASE
952         default: return "system / Map";
953       }
954     case CELL_TYPE: return "system / Cell";
955     case PROPERTY_CELL_TYPE: return "system / PropertyCell";
956     case FOREIGN_TYPE: return "system / Foreign";
957     case ODDBALL_TYPE: return "system / Oddball";
958 #define MAKE_STRUCT_CASE(NAME, Name, name) \
959     case NAME##_TYPE: return "system / "#Name;
960   STRUCT_LIST(MAKE_STRUCT_CASE)
961 #undef MAKE_STRUCT_CASE
962     default: return "system";
963   }
964 }
965
966
967 int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) {
968   int objects_count = 0;
969   for (HeapObject* obj = iterator->next();
970        obj != NULL;
971        obj = iterator->next()) {
972     objects_count++;
973   }
974   return objects_count;
975 }
976
977
978 class IndexedReferencesExtractor : public ObjectVisitor {
979  public:
980   IndexedReferencesExtractor(V8HeapExplorer* generator,
981                              HeapObject* parent_obj,
982                              int parent)
983       : generator_(generator),
984         parent_obj_(parent_obj),
985         parent_(parent),
986         next_index_(0) {
987   }
988   void VisitCodeEntry(Address entry_address) {
989      Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
990      generator_->SetInternalReference(parent_obj_, parent_, "code", code);
991      generator_->TagCodeObject(code);
992   }
993   void VisitPointers(Object** start, Object** end) {
994     for (Object** p = start; p < end; p++) {
995       ++next_index_;
996       if (CheckVisitedAndUnmark(p)) continue;
997       generator_->SetHiddenReference(parent_obj_, parent_, next_index_, *p);
998     }
999   }
1000   static void MarkVisitedField(HeapObject* obj, int offset) {
1001     if (offset < 0) return;
1002     Address field = obj->address() + offset;
1003     ASSERT(!Memory::Object_at(field)->IsFailure());
1004     ASSERT(Memory::Object_at(field)->IsHeapObject());
1005     *field |= kFailureTag;
1006   }
1007
1008  private:
1009   bool CheckVisitedAndUnmark(Object** field) {
1010     if ((*field)->IsFailure()) {
1011       intptr_t untagged = reinterpret_cast<intptr_t>(*field) & ~kFailureTagMask;
1012       *field = reinterpret_cast<Object*>(untagged | kHeapObjectTag);
1013       ASSERT((*field)->IsHeapObject());
1014       return true;
1015     }
1016     return false;
1017   }
1018   V8HeapExplorer* generator_;
1019   HeapObject* parent_obj_;
1020   int parent_;
1021   int next_index_;
1022 };
1023
1024
1025 void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
1026   HeapEntry* heap_entry = GetEntry(obj);
1027   if (heap_entry == NULL) return;  // No interest in this object.
1028   int entry = heap_entry->index();
1029
1030   if (obj->IsJSGlobalProxy()) {
1031     ExtractJSGlobalProxyReferences(entry, JSGlobalProxy::cast(obj));
1032   } else if (obj->IsJSObject()) {
1033     ExtractJSObjectReferences(entry, JSObject::cast(obj));
1034   } else if (obj->IsString()) {
1035     ExtractStringReferences(entry, String::cast(obj));
1036   } else if (obj->IsContext()) {
1037     ExtractContextReferences(entry, Context::cast(obj));
1038   } else if (obj->IsMap()) {
1039     ExtractMapReferences(entry, Map::cast(obj));
1040   } else if (obj->IsSharedFunctionInfo()) {
1041     ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
1042   } else if (obj->IsScript()) {
1043     ExtractScriptReferences(entry, Script::cast(obj));
1044   } else if (obj->IsAccessorPair()) {
1045     ExtractAccessorPairReferences(entry, AccessorPair::cast(obj));
1046   } else if (obj->IsCodeCache()) {
1047     ExtractCodeCacheReferences(entry, CodeCache::cast(obj));
1048   } else if (obj->IsCode()) {
1049     ExtractCodeReferences(entry, Code::cast(obj));
1050   } else if (obj->IsBox()) {
1051     ExtractBoxReferences(entry, Box::cast(obj));
1052   } else if (obj->IsCell()) {
1053     ExtractCellReferences(entry, Cell::cast(obj));
1054   } else if (obj->IsPropertyCell()) {
1055     ExtractPropertyCellReferences(entry, PropertyCell::cast(obj));
1056   } else if (obj->IsAllocationSite()) {
1057     ExtractAllocationSiteReferences(entry, AllocationSite::cast(obj));
1058   }
1059   SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset);
1060
1061   // Extract unvisited fields as hidden references and restore tags
1062   // of visited fields.
1063   IndexedReferencesExtractor refs_extractor(this, obj, entry);
1064   obj->Iterate(&refs_extractor);
1065 }
1066
1067
1068 void V8HeapExplorer::ExtractJSGlobalProxyReferences(
1069     int entry, JSGlobalProxy* proxy) {
1070   SetInternalReference(proxy, entry,
1071                        "native_context", proxy->native_context(),
1072                        JSGlobalProxy::kNativeContextOffset);
1073 }
1074
1075
1076 void V8HeapExplorer::ExtractJSObjectReferences(
1077     int entry, JSObject* js_obj) {
1078   HeapObject* obj = js_obj;
1079   ExtractClosureReferences(js_obj, entry);
1080   ExtractPropertyReferences(js_obj, entry);
1081   ExtractElementReferences(js_obj, entry);
1082   ExtractInternalReferences(js_obj, entry);
1083   SetPropertyReference(
1084       obj, entry, heap_->proto_string(), js_obj->GetPrototype());
1085   if (obj->IsJSFunction()) {
1086     JSFunction* js_fun = JSFunction::cast(js_obj);
1087     Object* proto_or_map = js_fun->prototype_or_initial_map();
1088     if (!proto_or_map->IsTheHole()) {
1089       if (!proto_or_map->IsMap()) {
1090         SetPropertyReference(
1091             obj, entry,
1092             heap_->prototype_string(), proto_or_map,
1093             NULL,
1094             JSFunction::kPrototypeOrInitialMapOffset);
1095       } else {
1096         SetPropertyReference(
1097             obj, entry,
1098             heap_->prototype_string(), js_fun->prototype());
1099         SetInternalReference(
1100             obj, entry, "initial_map", proto_or_map,
1101             JSFunction::kPrototypeOrInitialMapOffset);
1102       }
1103     }
1104     SharedFunctionInfo* shared_info = js_fun->shared();
1105     // JSFunction has either bindings or literals and never both.
1106     bool bound = shared_info->bound();
1107     TagObject(js_fun->literals_or_bindings(),
1108               bound ? "(function bindings)" : "(function literals)");
1109     SetInternalReference(js_fun, entry,
1110                          bound ? "bindings" : "literals",
1111                          js_fun->literals_or_bindings(),
1112                          JSFunction::kLiteralsOffset);
1113     TagObject(shared_info, "(shared function info)");
1114     SetInternalReference(js_fun, entry,
1115                          "shared", shared_info,
1116                          JSFunction::kSharedFunctionInfoOffset);
1117     TagObject(js_fun->context(), "(context)");
1118     SetInternalReference(js_fun, entry,
1119                          "context", js_fun->context(),
1120                          JSFunction::kContextOffset);
1121     SetWeakReference(js_fun, entry,
1122                      "next_function_link", js_fun->next_function_link(),
1123                      JSFunction::kNextFunctionLinkOffset);
1124     STATIC_CHECK(JSFunction::kNextFunctionLinkOffset
1125                  == JSFunction::kNonWeakFieldsEndOffset);
1126     STATIC_CHECK(JSFunction::kNextFunctionLinkOffset + kPointerSize
1127                  == JSFunction::kSize);
1128   } else if (obj->IsGlobalObject()) {
1129     GlobalObject* global_obj = GlobalObject::cast(obj);
1130     SetInternalReference(global_obj, entry,
1131                          "builtins", global_obj->builtins(),
1132                          GlobalObject::kBuiltinsOffset);
1133     SetInternalReference(global_obj, entry,
1134                          "native_context", global_obj->native_context(),
1135                          GlobalObject::kNativeContextOffset);
1136     SetInternalReference(global_obj, entry,
1137                          "global_context", global_obj->global_context(),
1138                          GlobalObject::kGlobalContextOffset);
1139     SetInternalReference(global_obj, entry,
1140                          "global_receiver", global_obj->global_receiver(),
1141                          GlobalObject::kGlobalReceiverOffset);
1142     STATIC_CHECK(GlobalObject::kHeaderSize - JSObject::kHeaderSize ==
1143                  4 * kPointerSize);
1144   } else if (obj->IsJSArrayBufferView()) {
1145     JSArrayBufferView* view = JSArrayBufferView::cast(obj);
1146     SetInternalReference(view, entry, "buffer", view->buffer(),
1147                          JSArrayBufferView::kBufferOffset);
1148     SetWeakReference(view, entry, "weak_next", view->weak_next(),
1149                      JSArrayBufferView::kWeakNextOffset);
1150   } else if (obj->IsJSArrayBuffer()) {
1151     JSArrayBuffer* buffer = JSArrayBuffer::cast(obj);
1152     SetWeakReference(buffer, entry, "weak_next", buffer->weak_next(),
1153                      JSArrayBuffer::kWeakNextOffset);
1154     SetWeakReference(buffer, entry,
1155                      "weak_first_view", buffer->weak_first_view(),
1156                      JSArrayBuffer::kWeakFirstViewOffset);
1157   }
1158   TagObject(js_obj->properties(), "(object properties)");
1159   SetInternalReference(obj, entry,
1160                        "properties", js_obj->properties(),
1161                        JSObject::kPropertiesOffset);
1162   TagObject(js_obj->elements(), "(object elements)");
1163   SetInternalReference(obj, entry,
1164                        "elements", js_obj->elements(),
1165                        JSObject::kElementsOffset);
1166 }
1167
1168
1169 void V8HeapExplorer::ExtractStringReferences(int entry, String* string) {
1170   if (string->IsConsString()) {
1171     ConsString* cs = ConsString::cast(string);
1172     SetInternalReference(cs, entry, "first", cs->first(),
1173                          ConsString::kFirstOffset);
1174     SetInternalReference(cs, entry, "second", cs->second(),
1175                          ConsString::kSecondOffset);
1176   } else if (string->IsSlicedString()) {
1177     SlicedString* ss = SlicedString::cast(string);
1178     SetInternalReference(ss, entry, "parent", ss->parent(),
1179                          SlicedString::kParentOffset);
1180   }
1181 }
1182
1183
1184 void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) {
1185   if (context == context->declaration_context()) {
1186     ScopeInfo* scope_info = context->closure()->shared()->scope_info();
1187     // Add context allocated locals.
1188     int context_locals = scope_info->ContextLocalCount();
1189     for (int i = 0; i < context_locals; ++i) {
1190       String* local_name = scope_info->ContextLocalName(i);
1191       int idx = Context::MIN_CONTEXT_SLOTS + i;
1192       SetContextReference(context, entry, local_name, context->get(idx),
1193                           Context::OffsetOfElementAt(idx));
1194     }
1195     if (scope_info->HasFunctionName()) {
1196       String* name = scope_info->FunctionName();
1197       VariableMode mode;
1198       int idx = scope_info->FunctionContextSlotIndex(name, &mode);
1199       if (idx >= 0) {
1200         SetContextReference(context, entry, name, context->get(idx),
1201                             Context::OffsetOfElementAt(idx));
1202       }
1203     }
1204   }
1205
1206 #define EXTRACT_CONTEXT_FIELD(index, type, name) \
1207   if (Context::index < Context::FIRST_WEAK_SLOT) { \
1208     SetInternalReference(context, entry, #name, context->get(Context::index), \
1209         FixedArray::OffsetOfElementAt(Context::index)); \
1210   } else { \
1211     SetWeakReference(context, entry, #name, context->get(Context::index), \
1212         FixedArray::OffsetOfElementAt(Context::index)); \
1213   }
1214   EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
1215   EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
1216   EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, Object, extension);
1217   EXTRACT_CONTEXT_FIELD(GLOBAL_OBJECT_INDEX, GlobalObject, global);
1218   if (context->IsNativeContext()) {
1219     TagObject(context->jsfunction_result_caches(),
1220               "(context func. result caches)");
1221     TagObject(context->normalized_map_cache(), "(context norm. map cache)");
1222     TagObject(context->runtime_context(), "(runtime context)");
1223     TagObject(context->embedder_data(), "(context data)");
1224     NATIVE_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD);
1225     EXTRACT_CONTEXT_FIELD(OPTIMIZED_FUNCTIONS_LIST, unused,
1226                           optimized_functions_list);
1227     EXTRACT_CONTEXT_FIELD(OPTIMIZED_CODE_LIST, unused, optimized_code_list);
1228     EXTRACT_CONTEXT_FIELD(DEOPTIMIZED_CODE_LIST, unused, deoptimized_code_list);
1229     EXTRACT_CONTEXT_FIELD(NEXT_CONTEXT_LINK, unused, next_context_link);
1230 #undef EXTRACT_CONTEXT_FIELD
1231     STATIC_CHECK(Context::OPTIMIZED_FUNCTIONS_LIST == Context::FIRST_WEAK_SLOT);
1232     STATIC_CHECK(Context::NEXT_CONTEXT_LINK + 1
1233                  == Context::NATIVE_CONTEXT_SLOTS);
1234     STATIC_CHECK(Context::FIRST_WEAK_SLOT + 5 == Context::NATIVE_CONTEXT_SLOTS);
1235   }
1236 }
1237
1238
1239 void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
1240   if (map->HasTransitionArray()) {
1241     TransitionArray* transitions = map->transitions();
1242     int transitions_entry = GetEntry(transitions)->index();
1243     Object* back_pointer = transitions->back_pointer_storage();
1244     TagObject(back_pointer, "(back pointer)");
1245     SetInternalReference(transitions, transitions_entry,
1246                          "back_pointer", back_pointer);
1247     TagObject(transitions, "(transition array)");
1248     SetInternalReference(map, entry,
1249                          "transitions", transitions,
1250                          Map::kTransitionsOrBackPointerOffset);
1251   } else {
1252     Object* back_pointer = map->GetBackPointer();
1253     TagObject(back_pointer, "(back pointer)");
1254     SetInternalReference(map, entry,
1255                          "back_pointer", back_pointer,
1256                          Map::kTransitionsOrBackPointerOffset);
1257   }
1258   DescriptorArray* descriptors = map->instance_descriptors();
1259   TagObject(descriptors, "(map descriptors)");
1260   SetInternalReference(map, entry,
1261                        "descriptors", descriptors,
1262                        Map::kDescriptorsOffset);
1263
1264   SetInternalReference(map, entry,
1265                        "code_cache", map->code_cache(),
1266                        Map::kCodeCacheOffset);
1267   SetInternalReference(map, entry,
1268                        "prototype", map->prototype(), Map::kPrototypeOffset);
1269   SetInternalReference(map, entry,
1270                        "constructor", map->constructor(),
1271                        Map::kConstructorOffset);
1272   TagObject(map->dependent_code(), "(dependent code)");
1273   SetInternalReference(map, entry,
1274                        "dependent_code", map->dependent_code(),
1275                        Map::kDependentCodeOffset);
1276 }
1277
1278
1279 void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
1280     int entry, SharedFunctionInfo* shared) {
1281   HeapObject* obj = shared;
1282   String* shared_name = shared->DebugName();
1283   const char* name = NULL;
1284   if (shared_name != *heap_->isolate()->factory()->empty_string()) {
1285     name = names_->GetName(shared_name);
1286     TagObject(shared->code(), names_->GetFormatted("(code for %s)", name));
1287   } else {
1288     TagObject(shared->code(), names_->GetFormatted("(%s code)",
1289         Code::Kind2String(shared->code()->kind())));
1290   }
1291
1292   SetInternalReference(obj, entry,
1293                        "name", shared->name(),
1294                        SharedFunctionInfo::kNameOffset);
1295   SetInternalReference(obj, entry,
1296                        "code", shared->code(),
1297                        SharedFunctionInfo::kCodeOffset);
1298   TagObject(shared->scope_info(), "(function scope info)");
1299   SetInternalReference(obj, entry,
1300                        "scope_info", shared->scope_info(),
1301                        SharedFunctionInfo::kScopeInfoOffset);
1302   SetInternalReference(obj, entry,
1303                        "instance_class_name", shared->instance_class_name(),
1304                        SharedFunctionInfo::kInstanceClassNameOffset);
1305   SetInternalReference(obj, entry,
1306                        "script", shared->script(),
1307                        SharedFunctionInfo::kScriptOffset);
1308   const char* construct_stub_name = name ?
1309       names_->GetFormatted("(construct stub code for %s)", name) :
1310       "(construct stub code)";
1311   TagObject(shared->construct_stub(), construct_stub_name);
1312   SetInternalReference(obj, entry,
1313                        "construct_stub", shared->construct_stub(),
1314                        SharedFunctionInfo::kConstructStubOffset);
1315   SetInternalReference(obj, entry,
1316                        "function_data", shared->function_data(),
1317                        SharedFunctionInfo::kFunctionDataOffset);
1318   SetInternalReference(obj, entry,
1319                        "debug_info", shared->debug_info(),
1320                        SharedFunctionInfo::kDebugInfoOffset);
1321   SetInternalReference(obj, entry,
1322                        "inferred_name", shared->inferred_name(),
1323                        SharedFunctionInfo::kInferredNameOffset);
1324   SetInternalReference(obj, entry,
1325                        "optimized_code_map", shared->optimized_code_map(),
1326                        SharedFunctionInfo::kOptimizedCodeMapOffset);
1327   SetWeakReference(obj, entry,
1328                    "initial_map", shared->initial_map(),
1329                    SharedFunctionInfo::kInitialMapOffset);
1330 }
1331
1332
1333 void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) {
1334   HeapObject* obj = script;
1335   SetInternalReference(obj, entry,
1336                        "source", script->source(),
1337                        Script::kSourceOffset);
1338   SetInternalReference(obj, entry,
1339                        "name", script->name(),
1340                        Script::kNameOffset);
1341   SetInternalReference(obj, entry,
1342                        "data", script->data(),
1343                        Script::kDataOffset);
1344   SetInternalReference(obj, entry,
1345                        "context_data", script->context_data(),
1346                        Script::kContextOffset);
1347   TagObject(script->line_ends(), "(script line ends)");
1348   SetInternalReference(obj, entry,
1349                        "line_ends", script->line_ends(),
1350                        Script::kLineEndsOffset);
1351 }
1352
1353
1354 void V8HeapExplorer::ExtractAccessorPairReferences(
1355     int entry, AccessorPair* accessors) {
1356   SetInternalReference(accessors, entry, "getter", accessors->getter(),
1357                        AccessorPair::kGetterOffset);
1358   SetInternalReference(accessors, entry, "setter", accessors->setter(),
1359                        AccessorPair::kSetterOffset);
1360 }
1361
1362
1363 void V8HeapExplorer::ExtractCodeCacheReferences(
1364     int entry, CodeCache* code_cache) {
1365   TagObject(code_cache->default_cache(), "(default code cache)");
1366   SetInternalReference(code_cache, entry,
1367                        "default_cache", code_cache->default_cache(),
1368                        CodeCache::kDefaultCacheOffset);
1369   TagObject(code_cache->normal_type_cache(), "(code type cache)");
1370   SetInternalReference(code_cache, entry,
1371                        "type_cache", code_cache->normal_type_cache(),
1372                        CodeCache::kNormalTypeCacheOffset);
1373 }
1374
1375
1376 void V8HeapExplorer::TagBuiltinCodeObject(Code* code, const char* name) {
1377   TagObject(code, names_->GetFormatted("(%s builtin)", name));
1378 }
1379
1380
1381 void V8HeapExplorer::TagCodeObject(Code* code) {
1382   if (code->kind() == Code::STUB) {
1383     TagObject(code, names_->GetFormatted(
1384         "(%s code)", CodeStub::MajorName(
1385             static_cast<CodeStub::Major>(code->major_key()), true)));
1386   }
1387 }
1388
1389
1390 void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
1391   TagCodeObject(code);
1392   TagObject(code->relocation_info(), "(code relocation info)");
1393   SetInternalReference(code, entry,
1394                        "relocation_info", code->relocation_info(),
1395                        Code::kRelocationInfoOffset);
1396   SetInternalReference(code, entry,
1397                        "handler_table", code->handler_table(),
1398                        Code::kHandlerTableOffset);
1399   TagObject(code->deoptimization_data(), "(code deopt data)");
1400   SetInternalReference(code, entry,
1401                        "deoptimization_data", code->deoptimization_data(),
1402                        Code::kDeoptimizationDataOffset);
1403   if (code->kind() == Code::FUNCTION) {
1404     SetInternalReference(code, entry,
1405                          "type_feedback_info", code->type_feedback_info(),
1406                          Code::kTypeFeedbackInfoOffset);
1407   }
1408   SetInternalReference(code, entry,
1409                        "gc_metadata", code->gc_metadata(),
1410                        Code::kGCMetadataOffset);
1411   SetInternalReference(code, entry,
1412                        "constant_pool", code->constant_pool(),
1413                        Code::kConstantPoolOffset);
1414   if (code->kind() == Code::OPTIMIZED_FUNCTION) {
1415     SetWeakReference(code, entry,
1416                      "next_code_link", code->next_code_link(),
1417                      Code::kNextCodeLinkOffset);
1418   }
1419 }
1420
1421
1422 void V8HeapExplorer::ExtractBoxReferences(int entry, Box* box) {
1423   SetInternalReference(box, entry, "value", box->value(), Box::kValueOffset);
1424 }
1425
1426
1427 void V8HeapExplorer::ExtractCellReferences(int entry, Cell* cell) {
1428   SetInternalReference(cell, entry, "value", cell->value(), Cell::kValueOffset);
1429 }
1430
1431
1432 void V8HeapExplorer::ExtractPropertyCellReferences(int entry,
1433                                                    PropertyCell* cell) {
1434   ExtractCellReferences(entry, cell);
1435   SetInternalReference(cell, entry, "type", cell->type(),
1436                        PropertyCell::kTypeOffset);
1437   SetInternalReference(cell, entry, "dependent_code", cell->dependent_code(),
1438                        PropertyCell::kDependentCodeOffset);
1439 }
1440
1441
1442 void V8HeapExplorer::ExtractAllocationSiteReferences(int entry,
1443                                                      AllocationSite* site) {
1444   SetInternalReference(site, entry, "transition_info", site->transition_info(),
1445                        AllocationSite::kTransitionInfoOffset);
1446   SetInternalReference(site, entry, "nested_site", site->nested_site(),
1447                        AllocationSite::kNestedSiteOffset);
1448   SetInternalReference(site, entry, "dependent_code", site->dependent_code(),
1449                        AllocationSite::kDependentCodeOffset);
1450   // Do not visit weak_next as it is not visited by the StaticVisitor,
1451   // and we're not very interested in weak_next field here.
1452   STATIC_CHECK(AllocationSite::kWeakNextOffset >=
1453                AllocationSite::BodyDescriptor::kEndOffset);
1454 }
1455
1456
1457 void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj, int entry) {
1458   if (!js_obj->IsJSFunction()) return;
1459
1460   JSFunction* func = JSFunction::cast(js_obj);
1461   if (func->shared()->bound()) {
1462     FixedArray* bindings = func->function_bindings();
1463     SetNativeBindReference(js_obj, entry, "bound_this",
1464                            bindings->get(JSFunction::kBoundThisIndex));
1465     SetNativeBindReference(js_obj, entry, "bound_function",
1466                            bindings->get(JSFunction::kBoundFunctionIndex));
1467     for (int i = JSFunction::kBoundArgumentsStartIndex;
1468          i < bindings->length(); i++) {
1469       const char* reference_name = names_->GetFormatted(
1470           "bound_argument_%d",
1471           i - JSFunction::kBoundArgumentsStartIndex);
1472       SetNativeBindReference(js_obj, entry, reference_name,
1473                              bindings->get(i));
1474     }
1475   }
1476 }
1477
1478
1479 void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
1480   if (js_obj->HasFastProperties()) {
1481     DescriptorArray* descs = js_obj->map()->instance_descriptors();
1482     int real_size = js_obj->map()->NumberOfOwnDescriptors();
1483     for (int i = 0; i < real_size; i++) {
1484       switch (descs->GetType(i)) {
1485         case FIELD: {
1486           int index = descs->GetFieldIndex(i);
1487
1488           Name* k = descs->GetKey(i);
1489           if (index < js_obj->map()->inobject_properties()) {
1490             Object* value = js_obj->InObjectPropertyAt(index);
1491             if (k != heap_->hidden_string()) {
1492               SetPropertyReference(
1493                   js_obj, entry,
1494                   k, value,
1495                   NULL,
1496                   js_obj->GetInObjectPropertyOffset(index));
1497             } else {
1498               TagObject(value, "(hidden properties)");
1499               SetInternalReference(
1500                   js_obj, entry,
1501                   "hidden_properties", value,
1502                   js_obj->GetInObjectPropertyOffset(index));
1503             }
1504           } else {
1505             Object* value = js_obj->RawFastPropertyAt(index);
1506             if (k != heap_->hidden_string()) {
1507               SetPropertyReference(js_obj, entry, k, value);
1508             } else {
1509               TagObject(value, "(hidden properties)");
1510               SetInternalReference(js_obj, entry, "hidden_properties", value);
1511             }
1512           }
1513           break;
1514         }
1515         case CONSTANT:
1516           SetPropertyReference(
1517               js_obj, entry,
1518               descs->GetKey(i), descs->GetConstant(i));
1519           break;
1520         case CALLBACKS:
1521           ExtractAccessorPairProperty(
1522               js_obj, entry,
1523               descs->GetKey(i), descs->GetValue(i));
1524           break;
1525         case NORMAL:  // only in slow mode
1526         case HANDLER:  // only in lookup results, not in descriptors
1527         case INTERCEPTOR:  // only in lookup results, not in descriptors
1528           break;
1529         case TRANSITION:
1530         case NONEXISTENT:
1531           UNREACHABLE();
1532           break;
1533       }
1534     }
1535   } else {
1536     NameDictionary* dictionary = js_obj->property_dictionary();
1537     int length = dictionary->Capacity();
1538     for (int i = 0; i < length; ++i) {
1539       Object* k = dictionary->KeyAt(i);
1540       if (dictionary->IsKey(k)) {
1541         Object* target = dictionary->ValueAt(i);
1542         // We assume that global objects can only have slow properties.
1543         Object* value = target->IsPropertyCell()
1544             ? PropertyCell::cast(target)->value()
1545             : target;
1546         if (k == heap_->hidden_string()) {
1547           TagObject(value, "(hidden properties)");
1548           SetInternalReference(js_obj, entry, "hidden_properties", value);
1549           continue;
1550         }
1551         if (ExtractAccessorPairProperty(js_obj, entry, k, value)) continue;
1552         SetPropertyReference(js_obj, entry, String::cast(k), value);
1553       }
1554     }
1555   }
1556 }
1557
1558
1559 bool V8HeapExplorer::ExtractAccessorPairProperty(
1560     JSObject* js_obj, int entry, Object* key, Object* callback_obj) {
1561   if (!callback_obj->IsAccessorPair()) return false;
1562   AccessorPair* accessors = AccessorPair::cast(callback_obj);
1563   Object* getter = accessors->getter();
1564   if (!getter->IsOddball()) {
1565     SetPropertyReference(js_obj, entry, String::cast(key), getter, "get %s");
1566   }
1567   Object* setter = accessors->setter();
1568   if (!setter->IsOddball()) {
1569     SetPropertyReference(js_obj, entry, String::cast(key), setter, "set %s");
1570   }
1571   return true;
1572 }
1573
1574
1575 void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) {
1576   if (js_obj->HasFastObjectElements()) {
1577     FixedArray* elements = FixedArray::cast(js_obj->elements());
1578     int length = js_obj->IsJSArray() ?
1579         Smi::cast(JSArray::cast(js_obj)->length())->value() :
1580         elements->length();
1581     for (int i = 0; i < length; ++i) {
1582       if (!elements->get(i)->IsTheHole()) {
1583         SetElementReference(js_obj, entry, i, elements->get(i));
1584       }
1585     }
1586   } else if (js_obj->HasDictionaryElements()) {
1587     SeededNumberDictionary* dictionary = js_obj->element_dictionary();
1588     int length = dictionary->Capacity();
1589     for (int i = 0; i < length; ++i) {
1590       Object* k = dictionary->KeyAt(i);
1591       if (dictionary->IsKey(k)) {
1592         ASSERT(k->IsNumber());
1593         uint32_t index = static_cast<uint32_t>(k->Number());
1594         SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
1595       }
1596     }
1597   }
1598 }
1599
1600
1601 void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) {
1602   int length = js_obj->GetInternalFieldCount();
1603   for (int i = 0; i < length; ++i) {
1604     Object* o = js_obj->GetInternalField(i);
1605     SetInternalReference(
1606         js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
1607   }
1608 }
1609
1610
1611 String* V8HeapExplorer::GetConstructorName(JSObject* object) {
1612   Heap* heap = object->GetHeap();
1613   if (object->IsJSFunction()) return heap->closure_string();
1614   String* constructor_name = object->constructor_name();
1615   if (constructor_name == heap->Object_string()) {
1616     // Look up an immediate "constructor" property, if it is a function,
1617     // return its name. This is for instances of binding objects, which
1618     // have prototype constructor type "Object".
1619     Object* constructor_prop = NULL;
1620     LookupResult result(heap->isolate());
1621     object->LocalLookupRealNamedProperty(heap->constructor_string(), &result);
1622     if (!result.IsFound()) return object->constructor_name();
1623
1624     constructor_prop = result.GetLazyValue();
1625     if (constructor_prop->IsJSFunction()) {
1626       Object* maybe_name =
1627           JSFunction::cast(constructor_prop)->shared()->name();
1628       if (maybe_name->IsString()) {
1629         String* name = String::cast(maybe_name);
1630         if (name->length() > 0) return name;
1631       }
1632     }
1633   }
1634   return object->constructor_name();
1635 }
1636
1637
1638 HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
1639   if (!obj->IsHeapObject()) return NULL;
1640   return filler_->FindOrAddEntry(obj, this);
1641 }
1642
1643
1644 class RootsReferencesExtractor : public ObjectVisitor {
1645  private:
1646   struct IndexTag {
1647     IndexTag(int index, VisitorSynchronization::SyncTag tag)
1648         : index(index), tag(tag) { }
1649     int index;
1650     VisitorSynchronization::SyncTag tag;
1651   };
1652
1653  public:
1654   explicit RootsReferencesExtractor(Heap* heap)
1655       : collecting_all_references_(false),
1656         previous_reference_count_(0),
1657         heap_(heap) {
1658   }
1659
1660   void VisitPointers(Object** start, Object** end) {
1661     if (collecting_all_references_) {
1662       for (Object** p = start; p < end; p++) all_references_.Add(*p);
1663     } else {
1664       for (Object** p = start; p < end; p++) strong_references_.Add(*p);
1665     }
1666   }
1667
1668   void SetCollectingAllReferences() { collecting_all_references_ = true; }
1669
1670   void FillReferences(V8HeapExplorer* explorer) {
1671     ASSERT(strong_references_.length() <= all_references_.length());
1672     Builtins* builtins = heap_->isolate()->builtins();
1673     for (int i = 0; i < reference_tags_.length(); ++i) {
1674       explorer->SetGcRootsReference(reference_tags_[i].tag);
1675     }
1676     int strong_index = 0, all_index = 0, tags_index = 0, builtin_index = 0;
1677     while (all_index < all_references_.length()) {
1678       bool is_strong = strong_index < strong_references_.length()
1679           && strong_references_[strong_index] == all_references_[all_index];
1680       explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
1681                                       !is_strong,
1682                                       all_references_[all_index]);
1683       if (reference_tags_[tags_index].tag ==
1684           VisitorSynchronization::kBuiltins) {
1685         ASSERT(all_references_[all_index]->IsCode());
1686         explorer->TagBuiltinCodeObject(
1687             Code::cast(all_references_[all_index]),
1688             builtins->name(builtin_index++));
1689       }
1690       ++all_index;
1691       if (is_strong) ++strong_index;
1692       if (reference_tags_[tags_index].index == all_index) ++tags_index;
1693     }
1694   }
1695
1696   void Synchronize(VisitorSynchronization::SyncTag tag) {
1697     if (collecting_all_references_ &&
1698         previous_reference_count_ != all_references_.length()) {
1699       previous_reference_count_ = all_references_.length();
1700       reference_tags_.Add(IndexTag(previous_reference_count_, tag));
1701     }
1702   }
1703
1704  private:
1705   bool collecting_all_references_;
1706   List<Object*> strong_references_;
1707   List<Object*> all_references_;
1708   int previous_reference_count_;
1709   List<IndexTag> reference_tags_;
1710   Heap* heap_;
1711 };
1712
1713
1714 bool V8HeapExplorer::IterateAndExtractReferences(
1715     SnapshotFillerInterface* filler) {
1716   filler_ = filler;
1717
1718   // Make sure builtin code objects get their builtin tags
1719   // first. Otherwise a particular JSFunction object could set
1720   // its custom name to a generic builtin.
1721   SetRootGcRootsReference();
1722   RootsReferencesExtractor extractor(heap_);
1723   heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
1724   extractor.SetCollectingAllReferences();
1725   heap_->IterateRoots(&extractor, VISIT_ALL);
1726   extractor.FillReferences(this);
1727
1728   // Now iterate the whole heap.
1729   bool interrupted = false;
1730   HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
1731   // Heap iteration with filtering must be finished in any case.
1732   for (HeapObject* obj = iterator.next();
1733        obj != NULL;
1734        obj = iterator.next(), progress_->ProgressStep()) {
1735     if (!interrupted) {
1736       ExtractReferences(obj);
1737       if (!progress_->ProgressReport(false)) interrupted = true;
1738     }
1739   }
1740   if (interrupted) {
1741     filler_ = NULL;
1742     return false;
1743   }
1744
1745   filler_ = NULL;
1746   return progress_->ProgressReport(true);
1747 }
1748
1749
1750 bool V8HeapExplorer::IsEssentialObject(Object* object) {
1751   return object->IsHeapObject()
1752       && !object->IsOddball()
1753       && object != heap_->empty_byte_array()
1754       && object != heap_->empty_fixed_array()
1755       && object != heap_->empty_descriptor_array()
1756       && object != heap_->fixed_array_map()
1757       && object != heap_->cell_map()
1758       && object != heap_->global_property_cell_map()
1759       && object != heap_->shared_function_info_map()
1760       && object != heap_->free_space_map()
1761       && object != heap_->one_pointer_filler_map()
1762       && object != heap_->two_pointer_filler_map();
1763 }
1764
1765
1766 void V8HeapExplorer::SetContextReference(HeapObject* parent_obj,
1767                                          int parent_entry,
1768                                          String* reference_name,
1769                                          Object* child_obj,
1770                                          int field_offset) {
1771   ASSERT(parent_entry == GetEntry(parent_obj)->index());
1772   HeapEntry* child_entry = GetEntry(child_obj);
1773   if (child_entry != NULL) {
1774     filler_->SetNamedReference(HeapGraphEdge::kContextVariable,
1775                                parent_entry,
1776                                names_->GetName(reference_name),
1777                                child_entry);
1778     IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1779   }
1780 }
1781
1782
1783 void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
1784                                             int parent_entry,
1785                                             const char* reference_name,
1786                                             Object* child_obj) {
1787   ASSERT(parent_entry == GetEntry(parent_obj)->index());
1788   HeapEntry* child_entry = GetEntry(child_obj);
1789   if (child_entry != NULL) {
1790     filler_->SetNamedReference(HeapGraphEdge::kShortcut,
1791                                parent_entry,
1792                                reference_name,
1793                                child_entry);
1794   }
1795 }
1796
1797
1798 void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
1799                                          int parent_entry,
1800                                          int index,
1801                                          Object* child_obj) {
1802   ASSERT(parent_entry == GetEntry(parent_obj)->index());
1803   HeapEntry* child_entry = GetEntry(child_obj);
1804   if (child_entry != NULL) {
1805     filler_->SetIndexedReference(HeapGraphEdge::kElement,
1806                                  parent_entry,
1807                                  index,
1808                                  child_entry);
1809   }
1810 }
1811
1812
1813 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1814                                           int parent_entry,
1815                                           const char* reference_name,
1816                                           Object* child_obj,
1817                                           int field_offset) {
1818   ASSERT(parent_entry == GetEntry(parent_obj)->index());
1819   HeapEntry* child_entry = GetEntry(child_obj);
1820   if (child_entry == NULL) return;
1821   if (IsEssentialObject(child_obj)) {
1822     filler_->SetNamedReference(HeapGraphEdge::kInternal,
1823                                parent_entry,
1824                                reference_name,
1825                                child_entry);
1826   }
1827   IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1828 }
1829
1830
1831 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1832                                           int parent_entry,
1833                                           int index,
1834                                           Object* child_obj,
1835                                           int field_offset) {
1836   ASSERT(parent_entry == GetEntry(parent_obj)->index());
1837   HeapEntry* child_entry = GetEntry(child_obj);
1838   if (child_entry == NULL) return;
1839   if (IsEssentialObject(child_obj)) {
1840     filler_->SetNamedReference(HeapGraphEdge::kInternal,
1841                                parent_entry,
1842                                names_->GetName(index),
1843                                child_entry);
1844   }
1845   IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1846 }
1847
1848
1849 void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
1850                                         int parent_entry,
1851                                         int index,
1852                                         Object* child_obj) {
1853   ASSERT(parent_entry == GetEntry(parent_obj)->index());
1854   HeapEntry* child_entry = GetEntry(child_obj);
1855   if (child_entry != NULL && IsEssentialObject(child_obj)) {
1856     filler_->SetIndexedReference(HeapGraphEdge::kHidden,
1857                                  parent_entry,
1858                                  index,
1859                                  child_entry);
1860   }
1861 }
1862
1863
1864 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
1865                                       int parent_entry,
1866                                       const char* reference_name,
1867                                       Object* child_obj,
1868                                       int field_offset) {
1869   ASSERT(parent_entry == GetEntry(parent_obj)->index());
1870   HeapEntry* child_entry = GetEntry(child_obj);
1871   if (child_entry == NULL) return;
1872   if (IsEssentialObject(child_obj)) {
1873     filler_->SetNamedReference(HeapGraphEdge::kWeak,
1874                                parent_entry,
1875                                reference_name,
1876                                child_entry);
1877   }
1878   IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1879 }
1880
1881
1882 void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
1883                                           int parent_entry,
1884                                           Name* reference_name,
1885                                           Object* child_obj,
1886                                           const char* name_format_string,
1887                                           int field_offset) {
1888   ASSERT(parent_entry == GetEntry(parent_obj)->index());
1889   HeapEntry* child_entry = GetEntry(child_obj);
1890   if (child_entry != NULL) {
1891     HeapGraphEdge::Type type =
1892         reference_name->IsSymbol() || String::cast(reference_name)->length() > 0
1893             ? HeapGraphEdge::kProperty : HeapGraphEdge::kInternal;
1894     const char* name = name_format_string != NULL && reference_name->IsString()
1895         ? names_->GetFormatted(
1896               name_format_string,
1897               String::cast(reference_name)->ToCString(
1898                   DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL).get()) :
1899         names_->GetName(reference_name);
1900
1901     filler_->SetNamedReference(type,
1902                                parent_entry,
1903                                name,
1904                                child_entry);
1905     IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1906   }
1907 }
1908
1909
1910 void V8HeapExplorer::SetRootGcRootsReference() {
1911   filler_->SetIndexedAutoIndexReference(
1912       HeapGraphEdge::kElement,
1913       snapshot_->root()->index(),
1914       snapshot_->gc_roots());
1915 }
1916
1917
1918 void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
1919   HeapEntry* child_entry = GetEntry(child_obj);
1920   ASSERT(child_entry != NULL);
1921   filler_->SetNamedAutoIndexReference(
1922       HeapGraphEdge::kShortcut,
1923       snapshot_->root()->index(),
1924       child_entry);
1925 }
1926
1927
1928 void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
1929   filler_->SetIndexedAutoIndexReference(
1930       HeapGraphEdge::kElement,
1931       snapshot_->gc_roots()->index(),
1932       snapshot_->gc_subroot(tag));
1933 }
1934
1935
1936 void V8HeapExplorer::SetGcSubrootReference(
1937     VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
1938   HeapEntry* child_entry = GetEntry(child_obj);
1939   if (child_entry != NULL) {
1940     const char* name = GetStrongGcSubrootName(child_obj);
1941     if (name != NULL) {
1942       filler_->SetNamedReference(
1943           HeapGraphEdge::kInternal,
1944           snapshot_->gc_subroot(tag)->index(),
1945           name,
1946           child_entry);
1947     } else {
1948       if (is_weak) {
1949         filler_->SetNamedAutoIndexReference(
1950             HeapGraphEdge::kWeak,
1951             snapshot_->gc_subroot(tag)->index(),
1952             child_entry);
1953       } else {
1954         filler_->SetIndexedAutoIndexReference(
1955             HeapGraphEdge::kElement,
1956             snapshot_->gc_subroot(tag)->index(),
1957             child_entry);
1958       }
1959     }
1960
1961     // Add a shortcut to JS global object reference at snapshot root.
1962     if (child_obj->IsNativeContext()) {
1963       Context* context = Context::cast(child_obj);
1964       GlobalObject* global = context->global_object();
1965       if (global->IsJSGlobalObject()) {
1966         bool is_debug_object = false;
1967 #ifdef ENABLE_DEBUGGER_SUPPORT
1968         is_debug_object = heap_->isolate()->debug()->IsDebugGlobal(global);
1969 #endif
1970         if (!is_debug_object && !user_roots_.Contains(global)) {
1971           user_roots_.Insert(global);
1972           SetUserGlobalReference(global);
1973         }
1974       }
1975     }
1976   }
1977 }
1978
1979
1980 const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
1981   if (strong_gc_subroot_names_.is_empty()) {
1982 #define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
1983 #define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
1984     STRONG_ROOT_LIST(ROOT_NAME)
1985 #undef ROOT_NAME
1986 #define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
1987     STRUCT_LIST(STRUCT_MAP_NAME)
1988 #undef STRUCT_MAP_NAME
1989 #define STRING_NAME(name, str) NAME_ENTRY(name)
1990     INTERNALIZED_STRING_LIST(STRING_NAME)
1991 #undef STRING_NAME
1992 #undef NAME_ENTRY
1993     CHECK(!strong_gc_subroot_names_.is_empty());
1994   }
1995   return strong_gc_subroot_names_.GetTag(object);
1996 }
1997
1998
1999 void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
2000   if (IsEssentialObject(obj)) {
2001     HeapEntry* entry = GetEntry(obj);
2002     if (entry->name()[0] == '\0') {
2003       entry->set_name(tag);
2004     }
2005   }
2006 }
2007
2008
2009 class GlobalObjectsEnumerator : public ObjectVisitor {
2010  public:
2011   virtual void VisitPointers(Object** start, Object** end) {
2012     for (Object** p = start; p < end; p++) {
2013       if ((*p)->IsNativeContext()) {
2014         Context* context = Context::cast(*p);
2015         JSObject* proxy = context->global_proxy();
2016         if (proxy->IsJSGlobalProxy()) {
2017           Object* global = proxy->map()->prototype();
2018           if (global->IsJSGlobalObject()) {
2019             objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global)));
2020           }
2021         }
2022       }
2023     }
2024   }
2025   int count() { return objects_.length(); }
2026   Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
2027
2028  private:
2029   List<Handle<JSGlobalObject> > objects_;
2030 };
2031
2032
2033 // Modifies heap. Must not be run during heap traversal.
2034 void V8HeapExplorer::TagGlobalObjects() {
2035   Isolate* isolate = heap_->isolate();
2036   HandleScope scope(isolate);
2037   GlobalObjectsEnumerator enumerator;
2038   isolate->global_handles()->IterateAllRoots(&enumerator);
2039   const char** urls = NewArray<const char*>(enumerator.count());
2040   for (int i = 0, l = enumerator.count(); i < l; ++i) {
2041     if (global_object_name_resolver_) {
2042       HandleScope scope(isolate);
2043       Handle<JSGlobalObject> global_obj = enumerator.at(i);
2044       urls[i] = global_object_name_resolver_->GetName(
2045           Utils::ToLocal(Handle<JSObject>::cast(global_obj)));
2046     } else {
2047       urls[i] = NULL;
2048     }
2049   }
2050
2051   DisallowHeapAllocation no_allocation;
2052   for (int i = 0, l = enumerator.count(); i < l; ++i) {
2053     objects_tags_.SetTag(*enumerator.at(i), urls[i]);
2054   }
2055
2056   DeleteArray(urls);
2057 }
2058
2059
2060 class GlobalHandlesExtractor : public ObjectVisitor {
2061  public:
2062   explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer)
2063       : explorer_(explorer) {}
2064   virtual ~GlobalHandlesExtractor() {}
2065   virtual void VisitPointers(Object** start, Object** end) {
2066     UNREACHABLE();
2067   }
2068   virtual void VisitEmbedderReference(Object** p, uint16_t class_id) {
2069     explorer_->VisitSubtreeWrapper(p, class_id);
2070   }
2071  private:
2072   NativeObjectsExplorer* explorer_;
2073 };
2074
2075
2076 class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
2077  public:
2078   BasicHeapEntriesAllocator(
2079       HeapSnapshot* snapshot,
2080       HeapEntry::Type entries_type)
2081     : snapshot_(snapshot),
2082       names_(snapshot_->profiler()->names()),
2083       heap_object_map_(snapshot_->profiler()->heap_object_map()),
2084       entries_type_(entries_type) {
2085   }
2086   virtual HeapEntry* AllocateEntry(HeapThing ptr);
2087  private:
2088   HeapSnapshot* snapshot_;
2089   StringsStorage* names_;
2090   HeapObjectsMap* heap_object_map_;
2091   HeapEntry::Type entries_type_;
2092 };
2093
2094
2095 HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) {
2096   v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
2097   intptr_t elements = info->GetElementCount();
2098   intptr_t size = info->GetSizeInBytes();
2099   const char* name = elements != -1
2100       ? names_->GetFormatted(
2101             "%s / %" V8_PTR_PREFIX "d entries", info->GetLabel(), elements)
2102       : names_->GetCopy(info->GetLabel());
2103   return snapshot_->AddEntry(
2104       entries_type_,
2105       name,
2106       heap_object_map_->GenerateId(info),
2107       size != -1 ? static_cast<int>(size) : 0);
2108 }
2109
2110
2111 NativeObjectsExplorer::NativeObjectsExplorer(
2112     HeapSnapshot* snapshot,
2113     SnapshottingProgressReportingInterface* progress)
2114     : isolate_(snapshot->profiler()->heap_object_map()->heap()->isolate()),
2115       snapshot_(snapshot),
2116       names_(snapshot_->profiler()->names()),
2117       progress_(progress),
2118       embedder_queried_(false),
2119       objects_by_info_(RetainedInfosMatch),
2120       native_groups_(StringsMatch),
2121       filler_(NULL) {
2122   synthetic_entries_allocator_ =
2123       new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
2124   native_entries_allocator_ =
2125       new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
2126 }
2127
2128
2129 NativeObjectsExplorer::~NativeObjectsExplorer() {
2130   for (HashMap::Entry* p = objects_by_info_.Start();
2131        p != NULL;
2132        p = objects_by_info_.Next(p)) {
2133     v8::RetainedObjectInfo* info =
2134         reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2135     info->Dispose();
2136     List<HeapObject*>* objects =
2137         reinterpret_cast<List<HeapObject*>* >(p->value);
2138     delete objects;
2139   }
2140   for (HashMap::Entry* p = native_groups_.Start();
2141        p != NULL;
2142        p = native_groups_.Next(p)) {
2143     v8::RetainedObjectInfo* info =
2144         reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
2145     info->Dispose();
2146   }
2147   delete synthetic_entries_allocator_;
2148   delete native_entries_allocator_;
2149 }
2150
2151
2152 int NativeObjectsExplorer::EstimateObjectsCount() {
2153   FillRetainedObjects();
2154   return objects_by_info_.occupancy();
2155 }
2156
2157
2158 void NativeObjectsExplorer::FillRetainedObjects() {
2159   if (embedder_queried_) return;
2160   Isolate* isolate = isolate_;
2161   const GCType major_gc_type = kGCTypeMarkSweepCompact;
2162   // Record objects that are joined into ObjectGroups.
2163   isolate->heap()->CallGCPrologueCallbacks(
2164       major_gc_type, kGCCallbackFlagConstructRetainedObjectInfos);
2165   List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
2166   for (int i = 0; i < groups->length(); ++i) {
2167     ObjectGroup* group = groups->at(i);
2168     if (group->info == NULL) continue;
2169     List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info);
2170     for (size_t j = 0; j < group->length; ++j) {
2171       HeapObject* obj = HeapObject::cast(*group->objects[j]);
2172       list->Add(obj);
2173       in_groups_.Insert(obj);
2174     }
2175     group->info = NULL;  // Acquire info object ownership.
2176   }
2177   isolate->global_handles()->RemoveObjectGroups();
2178   isolate->heap()->CallGCEpilogueCallbacks(major_gc_type, kNoGCCallbackFlags);
2179   // Record objects that are not in ObjectGroups, but have class ID.
2180   GlobalHandlesExtractor extractor(this);
2181   isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
2182   embedder_queried_ = true;
2183 }
2184
2185
2186 void NativeObjectsExplorer::FillImplicitReferences() {
2187   Isolate* isolate = isolate_;
2188   List<ImplicitRefGroup*>* groups =
2189       isolate->global_handles()->implicit_ref_groups();
2190   for (int i = 0; i < groups->length(); ++i) {
2191     ImplicitRefGroup* group = groups->at(i);
2192     HeapObject* parent = *group->parent;
2193     int parent_entry =
2194         filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
2195     ASSERT(parent_entry != HeapEntry::kNoEntry);
2196     Object*** children = group->children;
2197     for (size_t j = 0; j < group->length; ++j) {
2198       Object* child = *children[j];
2199       HeapEntry* child_entry =
2200           filler_->FindOrAddEntry(child, native_entries_allocator_);
2201       filler_->SetNamedReference(
2202           HeapGraphEdge::kInternal,
2203           parent_entry,
2204           "native",
2205           child_entry);
2206     }
2207   }
2208   isolate->global_handles()->RemoveImplicitRefGroups();
2209 }
2210
2211 List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
2212     v8::RetainedObjectInfo* info) {
2213   HashMap::Entry* entry =
2214       objects_by_info_.Lookup(info, InfoHash(info), true);
2215   if (entry->value != NULL) {
2216     info->Dispose();
2217   } else {
2218     entry->value = new List<HeapObject*>(4);
2219   }
2220   return reinterpret_cast<List<HeapObject*>* >(entry->value);
2221 }
2222
2223
2224 bool NativeObjectsExplorer::IterateAndExtractReferences(
2225     SnapshotFillerInterface* filler) {
2226   filler_ = filler;
2227   FillRetainedObjects();
2228   FillImplicitReferences();
2229   if (EstimateObjectsCount() > 0) {
2230     for (HashMap::Entry* p = objects_by_info_.Start();
2231          p != NULL;
2232          p = objects_by_info_.Next(p)) {
2233       v8::RetainedObjectInfo* info =
2234           reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2235       SetNativeRootReference(info);
2236       List<HeapObject*>* objects =
2237           reinterpret_cast<List<HeapObject*>* >(p->value);
2238       for (int i = 0; i < objects->length(); ++i) {
2239         SetWrapperNativeReferences(objects->at(i), info);
2240       }
2241     }
2242     SetRootNativeRootsReference();
2243   }
2244   filler_ = NULL;
2245   return true;
2246 }
2247
2248
2249 class NativeGroupRetainedObjectInfo : public v8::RetainedObjectInfo {
2250  public:
2251   explicit NativeGroupRetainedObjectInfo(const char* label)
2252       : disposed_(false),
2253         hash_(reinterpret_cast<intptr_t>(label)),
2254         label_(label) {
2255   }
2256
2257   virtual ~NativeGroupRetainedObjectInfo() {}
2258   virtual void Dispose() {
2259     CHECK(!disposed_);
2260     disposed_ = true;
2261     delete this;
2262   }
2263   virtual bool IsEquivalent(RetainedObjectInfo* other) {
2264     return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
2265   }
2266   virtual intptr_t GetHash() { return hash_; }
2267   virtual const char* GetLabel() { return label_; }
2268
2269  private:
2270   bool disposed_;
2271   intptr_t hash_;
2272   const char* label_;
2273 };
2274
2275
2276 NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2277     const char* label) {
2278   const char* label_copy = names_->GetCopy(label);
2279   uint32_t hash = StringHasher::HashSequentialString(
2280       label_copy,
2281       static_cast<int>(strlen(label_copy)),
2282       isolate_->heap()->HashSeed());
2283   HashMap::Entry* entry = native_groups_.Lookup(const_cast<char*>(label_copy),
2284                                                 hash, true);
2285   if (entry->value == NULL) {
2286     entry->value = new NativeGroupRetainedObjectInfo(label);
2287   }
2288   return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2289 }
2290
2291
2292 void NativeObjectsExplorer::SetNativeRootReference(
2293     v8::RetainedObjectInfo* info) {
2294   HeapEntry* child_entry =
2295       filler_->FindOrAddEntry(info, native_entries_allocator_);
2296   ASSERT(child_entry != NULL);
2297   NativeGroupRetainedObjectInfo* group_info =
2298       FindOrAddGroupInfo(info->GetGroupLabel());
2299   HeapEntry* group_entry =
2300       filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
2301   filler_->SetNamedAutoIndexReference(
2302       HeapGraphEdge::kInternal,
2303       group_entry->index(),
2304       child_entry);
2305 }
2306
2307
2308 void NativeObjectsExplorer::SetWrapperNativeReferences(
2309     HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2310   HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
2311   ASSERT(wrapper_entry != NULL);
2312   HeapEntry* info_entry =
2313       filler_->FindOrAddEntry(info, native_entries_allocator_);
2314   ASSERT(info_entry != NULL);
2315   filler_->SetNamedReference(HeapGraphEdge::kInternal,
2316                              wrapper_entry->index(),
2317                              "native",
2318                              info_entry);
2319   filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
2320                                         info_entry->index(),
2321                                         wrapper_entry);
2322 }
2323
2324
2325 void NativeObjectsExplorer::SetRootNativeRootsReference() {
2326   for (HashMap::Entry* entry = native_groups_.Start();
2327        entry;
2328        entry = native_groups_.Next(entry)) {
2329     NativeGroupRetainedObjectInfo* group_info =
2330         static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2331     HeapEntry* group_entry =
2332         filler_->FindOrAddEntry(group_info, native_entries_allocator_);
2333     ASSERT(group_entry != NULL);
2334     filler_->SetIndexedAutoIndexReference(
2335         HeapGraphEdge::kElement,
2336         snapshot_->root()->index(),
2337         group_entry);
2338   }
2339 }
2340
2341
2342 void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
2343   if (in_groups_.Contains(*p)) return;
2344   Isolate* isolate = isolate_;
2345   v8::RetainedObjectInfo* info =
2346       isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
2347   if (info == NULL) return;
2348   GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
2349 }
2350
2351
2352 class SnapshotFiller : public SnapshotFillerInterface {
2353  public:
2354   explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
2355       : snapshot_(snapshot),
2356         names_(snapshot->profiler()->names()),
2357         entries_(entries) { }
2358   HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
2359     HeapEntry* entry = allocator->AllocateEntry(ptr);
2360     entries_->Pair(ptr, entry->index());
2361     return entry;
2362   }
2363   HeapEntry* FindEntry(HeapThing ptr) {
2364     int index = entries_->Map(ptr);
2365     return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL;
2366   }
2367   HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
2368     HeapEntry* entry = FindEntry(ptr);
2369     return entry != NULL ? entry : AddEntry(ptr, allocator);
2370   }
2371   void SetIndexedReference(HeapGraphEdge::Type type,
2372                            int parent,
2373                            int index,
2374                            HeapEntry* child_entry) {
2375     HeapEntry* parent_entry = &snapshot_->entries()[parent];
2376     parent_entry->SetIndexedReference(type, index, child_entry);
2377   }
2378   void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
2379                                     int parent,
2380                                     HeapEntry* child_entry) {
2381     HeapEntry* parent_entry = &snapshot_->entries()[parent];
2382     int index = parent_entry->children_count() + 1;
2383     parent_entry->SetIndexedReference(type, index, child_entry);
2384   }
2385   void SetNamedReference(HeapGraphEdge::Type type,
2386                          int parent,
2387                          const char* reference_name,
2388                          HeapEntry* child_entry) {
2389     HeapEntry* parent_entry = &snapshot_->entries()[parent];
2390     parent_entry->SetNamedReference(type, reference_name, child_entry);
2391   }
2392   void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
2393                                   int parent,
2394                                   HeapEntry* child_entry) {
2395     HeapEntry* parent_entry = &snapshot_->entries()[parent];
2396     int index = parent_entry->children_count() + 1;
2397     parent_entry->SetNamedReference(
2398         type,
2399         names_->GetName(index),
2400         child_entry);
2401   }
2402
2403  private:
2404   HeapSnapshot* snapshot_;
2405   StringsStorage* names_;
2406   HeapEntriesMap* entries_;
2407 };
2408
2409
2410 HeapSnapshotGenerator::HeapSnapshotGenerator(
2411     HeapSnapshot* snapshot,
2412     v8::ActivityControl* control,
2413     v8::HeapProfiler::ObjectNameResolver* resolver,
2414     Heap* heap)
2415     : snapshot_(snapshot),
2416       control_(control),
2417       v8_heap_explorer_(snapshot_, this, resolver),
2418       dom_explorer_(snapshot_, this),
2419       heap_(heap) {
2420 }
2421
2422
2423 bool HeapSnapshotGenerator::GenerateSnapshot() {
2424   v8_heap_explorer_.TagGlobalObjects();
2425
2426   // TODO(1562) Profiler assumes that any object that is in the heap after
2427   // full GC is reachable from the root when computing dominators.
2428   // This is not true for weakly reachable objects.
2429   // As a temporary solution we call GC twice.
2430   heap_->CollectAllGarbage(
2431       Heap::kMakeHeapIterableMask,
2432       "HeapSnapshotGenerator::GenerateSnapshot");
2433   heap_->CollectAllGarbage(
2434       Heap::kMakeHeapIterableMask,
2435       "HeapSnapshotGenerator::GenerateSnapshot");
2436
2437 #ifdef VERIFY_HEAP
2438   Heap* debug_heap = heap_;
2439   CHECK(!debug_heap->old_data_space()->was_swept_conservatively());
2440   CHECK(!debug_heap->old_pointer_space()->was_swept_conservatively());
2441   CHECK(!debug_heap->code_space()->was_swept_conservatively());
2442   CHECK(!debug_heap->cell_space()->was_swept_conservatively());
2443   CHECK(!debug_heap->property_cell_space()->
2444         was_swept_conservatively());
2445   CHECK(!debug_heap->map_space()->was_swept_conservatively());
2446 #endif
2447
2448   // The following code uses heap iterators, so we want the heap to be
2449   // stable. It should follow TagGlobalObjects as that can allocate.
2450   DisallowHeapAllocation no_alloc;
2451
2452 #ifdef VERIFY_HEAP
2453   debug_heap->Verify();
2454 #endif
2455
2456   SetProgressTotal(1);  // 1 pass.
2457
2458 #ifdef VERIFY_HEAP
2459   debug_heap->Verify();
2460 #endif
2461
2462   if (!FillReferences()) return false;
2463
2464   snapshot_->FillChildren();
2465   snapshot_->RememberLastJSObjectId();
2466
2467   progress_counter_ = progress_total_;
2468   if (!ProgressReport(true)) return false;
2469   return true;
2470 }
2471
2472
2473 void HeapSnapshotGenerator::ProgressStep() {
2474   ++progress_counter_;
2475 }
2476
2477
2478 bool HeapSnapshotGenerator::ProgressReport(bool force) {
2479   const int kProgressReportGranularity = 10000;
2480   if (control_ != NULL
2481       && (force || progress_counter_ % kProgressReportGranularity == 0)) {
2482       return
2483           control_->ReportProgressValue(progress_counter_, progress_total_) ==
2484           v8::ActivityControl::kContinue;
2485   }
2486   return true;
2487 }
2488
2489
2490 void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
2491   if (control_ == NULL) return;
2492   HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
2493   progress_total_ = iterations_count * (
2494       v8_heap_explorer_.EstimateObjectsCount(&iterator) +
2495       dom_explorer_.EstimateObjectsCount());
2496   progress_counter_ = 0;
2497 }
2498
2499
2500 bool HeapSnapshotGenerator::FillReferences() {
2501   SnapshotFiller filler(snapshot_, &entries_);
2502   v8_heap_explorer_.AddRootEntries(&filler);
2503   return v8_heap_explorer_.IterateAndExtractReferences(&filler)
2504       && dom_explorer_.IterateAndExtractReferences(&filler);
2505 }
2506
2507
2508 template<int bytes> struct MaxDecimalDigitsIn;
2509 template<> struct MaxDecimalDigitsIn<4> {
2510   static const int kSigned = 11;
2511   static const int kUnsigned = 10;
2512 };
2513 template<> struct MaxDecimalDigitsIn<8> {
2514   static const int kSigned = 20;
2515   static const int kUnsigned = 20;
2516 };
2517
2518
2519 class OutputStreamWriter {
2520  public:
2521   explicit OutputStreamWriter(v8::OutputStream* stream)
2522       : stream_(stream),
2523         chunk_size_(stream->GetChunkSize()),
2524         chunk_(chunk_size_),
2525         chunk_pos_(0),
2526         aborted_(false) {
2527     ASSERT(chunk_size_ > 0);
2528   }
2529   bool aborted() { return aborted_; }
2530   void AddCharacter(char c) {
2531     ASSERT(c != '\0');
2532     ASSERT(chunk_pos_ < chunk_size_);
2533     chunk_[chunk_pos_++] = c;
2534     MaybeWriteChunk();
2535   }
2536   void AddString(const char* s) {
2537     AddSubstring(s, StrLength(s));
2538   }
2539   void AddSubstring(const char* s, int n) {
2540     if (n <= 0) return;
2541     ASSERT(static_cast<size_t>(n) <= strlen(s));
2542     const char* s_end = s + n;
2543     while (s < s_end) {
2544       int s_chunk_size = Min(
2545           chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
2546       ASSERT(s_chunk_size > 0);
2547       OS::MemCopy(chunk_.start() + chunk_pos_, s, s_chunk_size);
2548       s += s_chunk_size;
2549       chunk_pos_ += s_chunk_size;
2550       MaybeWriteChunk();
2551     }
2552   }
2553   void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
2554   void Finalize() {
2555     if (aborted_) return;
2556     ASSERT(chunk_pos_ < chunk_size_);
2557     if (chunk_pos_ != 0) {
2558       WriteChunk();
2559     }
2560     stream_->EndOfStream();
2561   }
2562
2563  private:
2564   template<typename T>
2565   void AddNumberImpl(T n, const char* format) {
2566     // Buffer for the longest value plus trailing \0
2567     static const int kMaxNumberSize =
2568         MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1;
2569     if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
2570       int result = OS::SNPrintF(
2571           chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
2572       ASSERT(result != -1);
2573       chunk_pos_ += result;
2574       MaybeWriteChunk();
2575     } else {
2576       EmbeddedVector<char, kMaxNumberSize> buffer;
2577       int result = OS::SNPrintF(buffer, format, n);
2578       USE(result);
2579       ASSERT(result != -1);
2580       AddString(buffer.start());
2581     }
2582   }
2583   void MaybeWriteChunk() {
2584     ASSERT(chunk_pos_ <= chunk_size_);
2585     if (chunk_pos_ == chunk_size_) {
2586       WriteChunk();
2587     }
2588   }
2589   void WriteChunk() {
2590     if (aborted_) return;
2591     if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
2592         v8::OutputStream::kAbort) aborted_ = true;
2593     chunk_pos_ = 0;
2594   }
2595
2596   v8::OutputStream* stream_;
2597   int chunk_size_;
2598   ScopedVector<char> chunk_;
2599   int chunk_pos_;
2600   bool aborted_;
2601 };
2602
2603
2604 // type, name|index, to_node.
2605 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
2606 // type, name, id, self_size, children_index.
2607 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 5;
2608
2609 void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
2610   if (AllocationTracker* allocation_tracker =
2611       snapshot_->profiler()->allocation_tracker()) {
2612     allocation_tracker->PrepareForSerialization();
2613   }
2614   ASSERT(writer_ == NULL);
2615   writer_ = new OutputStreamWriter(stream);
2616   SerializeImpl();
2617   delete writer_;
2618   writer_ = NULL;
2619 }
2620
2621
2622 void HeapSnapshotJSONSerializer::SerializeImpl() {
2623   ASSERT(0 == snapshot_->root()->index());
2624   writer_->AddCharacter('{');
2625   writer_->AddString("\"snapshot\":{");
2626   SerializeSnapshot();
2627   if (writer_->aborted()) return;
2628   writer_->AddString("},\n");
2629   writer_->AddString("\"nodes\":[");
2630   SerializeNodes();
2631   if (writer_->aborted()) return;
2632   writer_->AddString("],\n");
2633   writer_->AddString("\"edges\":[");
2634   SerializeEdges();
2635   if (writer_->aborted()) return;
2636   writer_->AddString("],\n");
2637
2638   writer_->AddString("\"trace_function_infos\":[");
2639   SerializeTraceNodeInfos();
2640   if (writer_->aborted()) return;
2641   writer_->AddString("],\n");
2642   writer_->AddString("\"trace_tree\":[");
2643   SerializeTraceTree();
2644   if (writer_->aborted()) return;
2645   writer_->AddString("],\n");
2646
2647   writer_->AddString("\"strings\":[");
2648   SerializeStrings();
2649   if (writer_->aborted()) return;
2650   writer_->AddCharacter(']');
2651   writer_->AddCharacter('}');
2652   writer_->Finalize();
2653 }
2654
2655
2656 int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
2657   HashMap::Entry* cache_entry = strings_.Lookup(
2658       const_cast<char*>(s), StringHash(s), true);
2659   if (cache_entry->value == NULL) {
2660     cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
2661   }
2662   return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
2663 }
2664
2665
2666 static int utoa(unsigned value, const Vector<char>& buffer, int buffer_pos) {
2667   int number_of_digits = 0;
2668   unsigned t = value;
2669   do {
2670     ++number_of_digits;
2671   } while (t /= 10);
2672
2673   buffer_pos += number_of_digits;
2674   int result = buffer_pos;
2675   do {
2676     int last_digit = value % 10;
2677     buffer[--buffer_pos] = '0' + last_digit;
2678     value /= 10;
2679   } while (value);
2680   return result;
2681 }
2682
2683
2684 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
2685                                                bool first_edge) {
2686   // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
2687   static const int kBufferSize =
2688       MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2;  // NOLINT
2689   EmbeddedVector<char, kBufferSize> buffer;
2690   int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
2691       || edge->type() == HeapGraphEdge::kHidden
2692       ? edge->index() : GetStringId(edge->name());
2693   int buffer_pos = 0;
2694   if (!first_edge) {
2695     buffer[buffer_pos++] = ',';
2696   }
2697   buffer_pos = utoa(edge->type(), buffer, buffer_pos);
2698   buffer[buffer_pos++] = ',';
2699   buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
2700   buffer[buffer_pos++] = ',';
2701   buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
2702   buffer[buffer_pos++] = '\n';
2703   buffer[buffer_pos++] = '\0';
2704   writer_->AddString(buffer.start());
2705 }
2706
2707
2708 void HeapSnapshotJSONSerializer::SerializeEdges() {
2709   List<HeapGraphEdge*>& edges = snapshot_->children();
2710   for (int i = 0; i < edges.length(); ++i) {
2711     ASSERT(i == 0 ||
2712            edges[i - 1]->from()->index() <= edges[i]->from()->index());
2713     SerializeEdge(edges[i], i == 0);
2714     if (writer_->aborted()) return;
2715   }
2716 }
2717
2718
2719 void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
2720   // The buffer needs space for 5 unsigned ints, 5 commas, \n and \0
2721   static const int kBufferSize =
2722       5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned  // NOLINT
2723       + 5 + 1 + 1;
2724   EmbeddedVector<char, kBufferSize> buffer;
2725   int buffer_pos = 0;
2726   if (entry_index(entry) != 0) {
2727     buffer[buffer_pos++] = ',';
2728   }
2729   buffer_pos = utoa(entry->type(), buffer, buffer_pos);
2730   buffer[buffer_pos++] = ',';
2731   buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
2732   buffer[buffer_pos++] = ',';
2733   buffer_pos = utoa(entry->id(), buffer, buffer_pos);
2734   buffer[buffer_pos++] = ',';
2735   buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
2736   buffer[buffer_pos++] = ',';
2737   buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
2738   buffer[buffer_pos++] = '\n';
2739   buffer[buffer_pos++] = '\0';
2740   writer_->AddString(buffer.start());
2741 }
2742
2743
2744 void HeapSnapshotJSONSerializer::SerializeNodes() {
2745   List<HeapEntry>& entries = snapshot_->entries();
2746   for (int i = 0; i < entries.length(); ++i) {
2747     SerializeNode(&entries[i]);
2748     if (writer_->aborted()) return;
2749   }
2750 }
2751
2752
2753 void HeapSnapshotJSONSerializer::SerializeSnapshot() {
2754   writer_->AddString("\"title\":\"");
2755   writer_->AddString(snapshot_->title());
2756   writer_->AddString("\"");
2757   writer_->AddString(",\"uid\":");
2758   writer_->AddNumber(snapshot_->uid());
2759   writer_->AddString(",\"meta\":");
2760   // The object describing node serialization layout.
2761   // We use a set of macros to improve readability.
2762 #define JSON_A(s) "[" s "]"
2763 #define JSON_O(s) "{" s "}"
2764 #define JSON_S(s) "\"" s "\""
2765   writer_->AddString(JSON_O(
2766     JSON_S("node_fields") ":" JSON_A(
2767         JSON_S("type") ","
2768         JSON_S("name") ","
2769         JSON_S("id") ","
2770         JSON_S("self_size") ","
2771         JSON_S("edge_count")) ","
2772     JSON_S("node_types") ":" JSON_A(
2773         JSON_A(
2774             JSON_S("hidden") ","
2775             JSON_S("array") ","
2776             JSON_S("string") ","
2777             JSON_S("object") ","
2778             JSON_S("code") ","
2779             JSON_S("closure") ","
2780             JSON_S("regexp") ","
2781             JSON_S("number") ","
2782             JSON_S("native") ","
2783             JSON_S("synthetic") ","
2784             JSON_S("concatenated string") ","
2785             JSON_S("sliced string")) ","
2786         JSON_S("string") ","
2787         JSON_S("number") ","
2788         JSON_S("number") ","
2789         JSON_S("number") ","
2790         JSON_S("number") ","
2791         JSON_S("number")) ","
2792     JSON_S("edge_fields") ":" JSON_A(
2793         JSON_S("type") ","
2794         JSON_S("name_or_index") ","
2795         JSON_S("to_node")) ","
2796     JSON_S("edge_types") ":" JSON_A(
2797         JSON_A(
2798             JSON_S("context") ","
2799             JSON_S("element") ","
2800             JSON_S("property") ","
2801             JSON_S("internal") ","
2802             JSON_S("hidden") ","
2803             JSON_S("shortcut") ","
2804             JSON_S("weak")) ","
2805         JSON_S("string_or_number") ","
2806         JSON_S("node")) ","
2807     JSON_S("trace_function_info_fields") ":" JSON_A(
2808         JSON_S("function_id") ","
2809         JSON_S("name") ","
2810         JSON_S("script_name") ","
2811         JSON_S("script_id") ","
2812         JSON_S("line") ","
2813         JSON_S("column")) ","
2814     JSON_S("trace_node_fields") ":" JSON_A(
2815         JSON_S("id") ","
2816         JSON_S("function_id") ","
2817         JSON_S("count") ","
2818         JSON_S("size") ","
2819         JSON_S("children"))));
2820 #undef JSON_S
2821 #undef JSON_O
2822 #undef JSON_A
2823   writer_->AddString(",\"node_count\":");
2824   writer_->AddNumber(snapshot_->entries().length());
2825   writer_->AddString(",\"edge_count\":");
2826   writer_->AddNumber(snapshot_->edges().length());
2827   writer_->AddString(",\"trace_function_count\":");
2828   uint32_t count = 0;
2829   AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2830   if (tracker) {
2831     count = tracker->id_to_function_info()->occupancy();
2832   }
2833   writer_->AddNumber(count);
2834 }
2835
2836
2837 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
2838   static const char hex_chars[] = "0123456789ABCDEF";
2839   w->AddString("\\u");
2840   w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
2841   w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
2842   w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
2843   w->AddCharacter(hex_chars[u & 0xf]);
2844 }
2845
2846
2847 void HeapSnapshotJSONSerializer::SerializeTraceTree() {
2848   AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2849   if (!tracker) return;
2850   AllocationTraceTree* traces = tracker->trace_tree();
2851   SerializeTraceNode(traces->root());
2852 }
2853
2854
2855 void HeapSnapshotJSONSerializer::SerializeTraceNode(AllocationTraceNode* node) {
2856   // The buffer needs space for 4 unsigned ints, 4 commas, [ and \0
2857   const int kBufferSize =
2858       4 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned  // NOLINT
2859       + 4 + 1 + 1;
2860   EmbeddedVector<char, kBufferSize> buffer;
2861   int buffer_pos = 0;
2862   buffer_pos = utoa(node->id(), buffer, buffer_pos);
2863   buffer[buffer_pos++] = ',';
2864   buffer_pos = utoa(node->function_id(), buffer, buffer_pos);
2865   buffer[buffer_pos++] = ',';
2866   buffer_pos = utoa(node->allocation_count(), buffer, buffer_pos);
2867   buffer[buffer_pos++] = ',';
2868   buffer_pos = utoa(node->allocation_size(), buffer, buffer_pos);
2869   buffer[buffer_pos++] = ',';
2870   buffer[buffer_pos++] = '[';
2871   buffer[buffer_pos++] = '\0';
2872   writer_->AddString(buffer.start());
2873
2874   Vector<AllocationTraceNode*> children = node->children();
2875   for (int i = 0; i < children.length(); i++) {
2876     if (i > 0) {
2877       writer_->AddCharacter(',');
2878     }
2879     SerializeTraceNode(children[i]);
2880   }
2881   writer_->AddCharacter(']');
2882 }
2883
2884
2885 // 0-based position is converted to 1-based during the serialization.
2886 static int SerializePosition(int position, const Vector<char>& buffer,
2887                              int buffer_pos) {
2888   if (position == -1) {
2889     buffer[buffer_pos++] = '0';
2890   } else {
2891     ASSERT(position >= 0);
2892     buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos);
2893   }
2894   return buffer_pos;
2895 }
2896
2897
2898 void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() {
2899   AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2900   if (!tracker) return;
2901   // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0
2902   const int kBufferSize =
2903       6 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned  // NOLINT
2904       + 6 + 1 + 1;
2905   EmbeddedVector<char, kBufferSize> buffer;
2906   HashMap* id_to_function_info = tracker->id_to_function_info();
2907   bool first_entry = true;
2908   for (HashMap::Entry* p = id_to_function_info->Start();
2909        p != NULL;
2910        p = id_to_function_info->Next(p)) {
2911     SnapshotObjectId id =
2912         static_cast<SnapshotObjectId>(reinterpret_cast<intptr_t>(p->key));
2913     AllocationTracker::FunctionInfo* info =
2914         reinterpret_cast<AllocationTracker::FunctionInfo* >(p->value);
2915     int buffer_pos = 0;
2916     if (first_entry) {
2917       first_entry = false;
2918     } else {
2919       buffer[buffer_pos++] = ',';
2920     }
2921     buffer_pos = utoa(id, buffer, buffer_pos);
2922     buffer[buffer_pos++] = ',';
2923     buffer_pos = utoa(GetStringId(info->name), buffer, buffer_pos);
2924     buffer[buffer_pos++] = ',';
2925     buffer_pos = utoa(GetStringId(info->script_name), buffer, buffer_pos);
2926     buffer[buffer_pos++] = ',';
2927     // The cast is safe because script id is a non-negative Smi.
2928     buffer_pos = utoa(static_cast<unsigned>(info->script_id), buffer,
2929         buffer_pos);
2930     buffer[buffer_pos++] = ',';
2931     buffer_pos = SerializePosition(info->line, buffer, buffer_pos);
2932     buffer[buffer_pos++] = ',';
2933     buffer_pos = SerializePosition(info->column, buffer, buffer_pos);
2934     buffer[buffer_pos++] = '\n';
2935     buffer[buffer_pos++] = '\0';
2936     writer_->AddString(buffer.start());
2937   }
2938 }
2939
2940
2941 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
2942   writer_->AddCharacter('\n');
2943   writer_->AddCharacter('\"');
2944   for ( ; *s != '\0'; ++s) {
2945     switch (*s) {
2946       case '\b':
2947         writer_->AddString("\\b");
2948         continue;
2949       case '\f':
2950         writer_->AddString("\\f");
2951         continue;
2952       case '\n':
2953         writer_->AddString("\\n");
2954         continue;
2955       case '\r':
2956         writer_->AddString("\\r");
2957         continue;
2958       case '\t':
2959         writer_->AddString("\\t");
2960         continue;
2961       case '\"':
2962       case '\\':
2963         writer_->AddCharacter('\\');
2964         writer_->AddCharacter(*s);
2965         continue;
2966       default:
2967         if (*s > 31 && *s < 128) {
2968           writer_->AddCharacter(*s);
2969         } else if (*s <= 31) {
2970           // Special character with no dedicated literal.
2971           WriteUChar(writer_, *s);
2972         } else {
2973           // Convert UTF-8 into \u UTF-16 literal.
2974           unsigned length = 1, cursor = 0;
2975           for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
2976           unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
2977           if (c != unibrow::Utf8::kBadChar) {
2978             WriteUChar(writer_, c);
2979             ASSERT(cursor != 0);
2980             s += cursor - 1;
2981           } else {
2982             writer_->AddCharacter('?');
2983           }
2984         }
2985     }
2986   }
2987   writer_->AddCharacter('\"');
2988 }
2989
2990
2991 void HeapSnapshotJSONSerializer::SerializeStrings() {
2992   ScopedVector<const unsigned char*> sorted_strings(
2993       strings_.occupancy() + 1);
2994   for (HashMap::Entry* entry = strings_.Start();
2995        entry != NULL;
2996        entry = strings_.Next(entry)) {
2997     int index = static_cast<int>(reinterpret_cast<uintptr_t>(entry->value));
2998     sorted_strings[index] = reinterpret_cast<const unsigned char*>(entry->key);
2999   }
3000   writer_->AddString("\"<dummy>\"");
3001   for (int i = 1; i < sorted_strings.length(); ++i) {
3002     writer_->AddCharacter(',');
3003     SerializeString(sorted_strings[i]);
3004     if (writer_->aborted()) return;
3005   }
3006 }
3007
3008
3009 } }  // namespace v8::internal