1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "src/heap-snapshot-generator-inl.h"
9 #include "src/allocation-tracker.h"
10 #include "src/code-stubs.h"
11 #include "src/conversions.h"
12 #include "src/debug.h"
13 #include "src/heap-profiler.h"
14 #include "src/types.h"
20 HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to)
21 : bit_field_(TypeField::encode(type) | FromIndexField::encode(from)),
24 DCHECK(type == kContextVariable
32 HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to)
33 : bit_field_(TypeField::encode(type) | FromIndexField::encode(from)),
36 DCHECK(type == kElement || type == kHidden);
40 void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
41 to_entry_ = &snapshot->entries()[to_index_];
45 const int HeapEntry::kNoEntry = -1;
47 HeapEntry::HeapEntry(HeapSnapshot* snapshot,
52 unsigned trace_node_id)
56 self_size_(self_size),
60 trace_node_id_(trace_node_id) { }
63 void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
66 HeapGraphEdge edge(type, name, this->index(), entry->index());
67 snapshot_->edges().Add(edge);
72 void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
75 HeapGraphEdge edge(type, index, this->index(), entry->index());
76 snapshot_->edges().Add(edge);
81 void HeapEntry::Print(
82 const char* prefix, const char* edge_name, int max_depth, int indent) {
83 STATIC_ASSERT(sizeof(unsigned) == sizeof(id()));
84 base::OS::Print("%6" V8PRIuPTR " @%6u %*c %s%s: ", self_size(), id(), indent,
85 ' ', prefix, edge_name);
86 if (type() != kString) {
87 base::OS::Print("%s %.40s\n", TypeAsString(), name_);
89 base::OS::Print("\"");
90 const char* c = name_;
91 while (*c && (c - name_) <= 40) {
93 base::OS::Print("%c", *c);
95 base::OS::Print("\\n");
98 base::OS::Print("\"\n");
100 if (--max_depth == 0) return;
101 Vector<HeapGraphEdge*> ch = children();
102 for (int i = 0; i < ch.length(); ++i) {
103 HeapGraphEdge& edge = *ch[i];
104 const char* edge_prefix = "";
105 EmbeddedVector<char, 64> index;
106 const char* edge_name = index.start();
107 switch (edge.type()) {
108 case HeapGraphEdge::kContextVariable:
110 edge_name = edge.name();
112 case HeapGraphEdge::kElement:
113 SNPrintF(index, "%d", edge.index());
115 case HeapGraphEdge::kInternal:
117 edge_name = edge.name();
119 case HeapGraphEdge::kProperty:
120 edge_name = edge.name();
122 case HeapGraphEdge::kHidden:
124 SNPrintF(index, "%d", edge.index());
126 case HeapGraphEdge::kShortcut:
128 edge_name = edge.name();
130 case HeapGraphEdge::kWeak:
132 edge_name = edge.name();
135 SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
137 edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
142 const char* HeapEntry::TypeAsString() {
144 case kHidden: return "/hidden/";
145 case kObject: return "/object/";
146 case kClosure: return "/closure/";
147 case kString: return "/string/";
148 case kCode: return "/code/";
149 case kArray: return "/array/";
150 case kRegExp: return "/regexp/";
151 case kHeapNumber: return "/number/";
152 case kNative: return "/native/";
153 case kSynthetic: return "/synthetic/";
154 case kConsString: return "/concatenated string/";
155 case kSlicedString: return "/sliced string/";
156 case kSymbol: return "/symbol/";
157 case kSimdValue: return "/simd/";
158 default: return "???";
163 // It is very important to keep objects that form a heap snapshot
164 // as small as possible.
165 namespace { // Avoid littering the global namespace.
167 template <size_t ptr_size> struct SnapshotSizeConstants;
169 template <> struct SnapshotSizeConstants<4> {
170 static const int kExpectedHeapGraphEdgeSize = 12;
171 static const int kExpectedHeapEntrySize = 28;
174 template <> struct SnapshotSizeConstants<8> {
175 static const int kExpectedHeapGraphEdgeSize = 24;
176 static const int kExpectedHeapEntrySize = 40;
182 HeapSnapshot::HeapSnapshot(HeapProfiler* profiler)
183 : profiler_(profiler),
184 root_index_(HeapEntry::kNoEntry),
185 gc_roots_index_(HeapEntry::kNoEntry),
186 max_snapshot_js_object_id_(0) {
188 sizeof(HeapGraphEdge) ==
189 SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
192 SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
193 USE(SnapshotSizeConstants<4>::kExpectedHeapGraphEdgeSize);
194 USE(SnapshotSizeConstants<4>::kExpectedHeapEntrySize);
195 USE(SnapshotSizeConstants<8>::kExpectedHeapGraphEdgeSize);
196 USE(SnapshotSizeConstants<8>::kExpectedHeapEntrySize);
197 for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
198 gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
203 void HeapSnapshot::Delete() {
204 profiler_->RemoveSnapshot(this);
209 void HeapSnapshot::RememberLastJSObjectId() {
210 max_snapshot_js_object_id_ = profiler_->heap_object_map()->last_assigned_id();
214 void HeapSnapshot::AddSyntheticRootEntries() {
217 SnapshotObjectId id = HeapObjectsMap::kGcRootsFirstSubrootId;
218 for (int tag = 0; tag < VisitorSynchronization::kNumberOfSyncTags; tag++) {
219 AddGcSubrootEntry(tag, id);
220 id += HeapObjectsMap::kObjectIdStep;
222 DCHECK(HeapObjectsMap::kFirstAvailableObjectId == id);
226 HeapEntry* HeapSnapshot::AddRootEntry() {
227 DCHECK(root_index_ == HeapEntry::kNoEntry);
228 DCHECK(entries_.is_empty()); // Root entry must be the first one.
229 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
231 HeapObjectsMap::kInternalRootObjectId,
234 root_index_ = entry->index();
235 DCHECK(root_index_ == 0);
240 HeapEntry* HeapSnapshot::AddGcRootsEntry() {
241 DCHECK(gc_roots_index_ == HeapEntry::kNoEntry);
242 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
244 HeapObjectsMap::kGcRootsObjectId,
247 gc_roots_index_ = entry->index();
252 HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag, SnapshotObjectId id) {
253 DCHECK(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
254 DCHECK(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags);
255 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
256 VisitorSynchronization::kTagNames[tag], id, 0, 0);
257 gc_subroot_indexes_[tag] = entry->index();
262 HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
266 unsigned trace_node_id) {
267 HeapEntry entry(this, type, name, id, size, trace_node_id);
269 return &entries_.last();
273 void HeapSnapshot::FillChildren() {
274 DCHECK(children().is_empty());
275 children().Allocate(edges().length());
276 int children_index = 0;
277 for (int i = 0; i < entries().length(); ++i) {
278 HeapEntry* entry = &entries()[i];
279 children_index = entry->set_children_index(children_index);
281 DCHECK(edges().length() == children_index);
282 for (int i = 0; i < edges().length(); ++i) {
283 HeapGraphEdge* edge = &edges()[i];
284 edge->ReplaceToIndexWithEntry(this);
285 edge->from()->add_child(edge);
290 class FindEntryById {
292 explicit FindEntryById(SnapshotObjectId id) : id_(id) { }
293 int operator()(HeapEntry* const* entry) {
294 if ((*entry)->id() == id_) return 0;
295 return (*entry)->id() < id_ ? -1 : 1;
298 SnapshotObjectId id_;
302 HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
303 List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
304 // Perform a binary search by id.
305 int index = SortedListBSearch(*entries_by_id, FindEntryById(id));
308 return entries_by_id->at(index);
313 static int SortByIds(const T* entry1_ptr,
314 const T* entry2_ptr) {
315 if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
316 return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
320 List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() {
321 if (sorted_entries_.is_empty()) {
322 sorted_entries_.Allocate(entries_.length());
323 for (int i = 0; i < entries_.length(); ++i) {
324 sorted_entries_[i] = &entries_[i];
326 sorted_entries_.Sort<int (*)(HeapEntry* const*, HeapEntry* const*)>(
329 return &sorted_entries_;
333 void HeapSnapshot::Print(int max_depth) {
334 root()->Print("", "", max_depth, 0);
338 size_t HeapSnapshot::RawSnapshotSize() const {
341 GetMemoryUsedByList(entries_) +
342 GetMemoryUsedByList(edges_) +
343 GetMemoryUsedByList(children_) +
344 GetMemoryUsedByList(sorted_entries_);
348 // We split IDs on evens for embedder objects (see
349 // HeapObjectsMap::GenerateId) and odds for native objects.
350 const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
351 const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
352 HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
353 const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
354 HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
355 const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
356 HeapObjectsMap::kGcRootsFirstSubrootId +
357 VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
360 static bool AddressesMatch(void* key1, void* key2) {
365 HeapObjectsMap::HeapObjectsMap(Heap* heap)
366 : next_id_(kFirstAvailableObjectId),
367 entries_map_(AddressesMatch),
369 // This dummy element solves a problem with entries_map_.
370 // When we do lookup in HashMap we see no difference between two cases:
371 // it has an entry with NULL as the value or it has created
372 // a new entry on the fly with NULL as the default value.
373 // With such dummy element we have a guaranty that all entries_map_ entries
374 // will have the value field grater than 0.
375 // This fact is using in MoveObject method.
376 entries_.Add(EntryInfo(0, NULL, 0));
380 bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) {
382 DCHECK(from != NULL);
383 if (from == to) return false;
384 void* from_value = entries_map_.Remove(from, ComputePointerHash(from));
385 if (from_value == NULL) {
386 // It may occur that some untracked object moves to an address X and there
387 // is a tracked object at that address. In this case we should remove the
388 // entry as we know that the object has died.
389 void* to_value = entries_map_.Remove(to, ComputePointerHash(to));
390 if (to_value != NULL) {
391 int to_entry_info_index =
392 static_cast<int>(reinterpret_cast<intptr_t>(to_value));
393 entries_.at(to_entry_info_index).addr = NULL;
396 HashMap::Entry* to_entry =
397 entries_map_.LookupOrInsert(to, ComputePointerHash(to));
398 if (to_entry->value != NULL) {
399 // We found the existing entry with to address for an old object.
400 // Without this operation we will have two EntryInfo's with the same
401 // value in addr field. It is bad because later at RemoveDeadEntries
402 // one of this entry will be removed with the corresponding entries_map_
404 int to_entry_info_index =
405 static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
406 entries_.at(to_entry_info_index).addr = NULL;
408 int from_entry_info_index =
409 static_cast<int>(reinterpret_cast<intptr_t>(from_value));
410 entries_.at(from_entry_info_index).addr = to;
411 // Size of an object can change during its life, so to keep information
412 // about the object in entries_ consistent, we have to adjust size when the
413 // object is migrated.
414 if (FLAG_heap_profiler_trace_objects) {
415 PrintF("Move object from %p to %p old size %6d new size %6d\n",
418 entries_.at(from_entry_info_index).size,
421 entries_.at(from_entry_info_index).size = object_size;
422 to_entry->value = from_value;
424 return from_value != NULL;
428 void HeapObjectsMap::UpdateObjectSize(Address addr, int size) {
429 FindOrAddEntry(addr, size, false);
433 SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
434 HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr));
435 if (entry == NULL) return 0;
436 int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
437 EntryInfo& entry_info = entries_.at(entry_index);
438 DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
439 return entry_info.id;
443 SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
446 DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
447 HashMap::Entry* entry =
448 entries_map_.LookupOrInsert(addr, ComputePointerHash(addr));
449 if (entry->value != NULL) {
451 static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
452 EntryInfo& entry_info = entries_.at(entry_index);
453 entry_info.accessed = accessed;
454 if (FLAG_heap_profiler_trace_objects) {
455 PrintF("Update object size : %p with old size %d and new size %d\n",
460 entry_info.size = size;
461 return entry_info.id;
463 entry->value = reinterpret_cast<void*>(entries_.length());
464 SnapshotObjectId id = next_id_;
465 next_id_ += kObjectIdStep;
466 entries_.Add(EntryInfo(id, addr, size, accessed));
467 DCHECK(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
472 void HeapObjectsMap::StopHeapObjectsTracking() {
473 time_intervals_.Clear();
477 void HeapObjectsMap::UpdateHeapObjectsMap() {
478 if (FLAG_heap_profiler_trace_objects) {
479 PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
480 entries_map_.occupancy());
482 heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
483 "HeapObjectsMap::UpdateHeapObjectsMap");
484 HeapIterator iterator(heap_);
485 for (HeapObject* obj = iterator.next();
487 obj = iterator.next()) {
488 FindOrAddEntry(obj->address(), obj->Size());
489 if (FLAG_heap_profiler_trace_objects) {
490 PrintF("Update object : %p %6d. Next address is %p\n",
493 obj->address() + obj->Size());
497 if (FLAG_heap_profiler_trace_objects) {
498 PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
499 entries_map_.occupancy());
507 struct HeapObjectInfo {
508 HeapObjectInfo(HeapObject* obj, int expected_size)
510 expected_size(expected_size) {
516 bool IsValid() const { return expected_size == obj->Size(); }
519 if (expected_size == 0) {
520 PrintF("Untracked object : %p %6d. Next address is %p\n",
523 obj->address() + obj->Size());
524 } else if (obj->Size() != expected_size) {
525 PrintF("Wrong size %6d: %p %6d. Next address is %p\n",
529 obj->address() + obj->Size());
531 PrintF("Good object : %p %6d. Next address is %p\n",
534 obj->address() + obj->Size());
540 static int comparator(const HeapObjectInfo* a, const HeapObjectInfo* b) {
541 if (a->obj < b->obj) return -1;
542 if (a->obj > b->obj) return 1;
550 int HeapObjectsMap::FindUntrackedObjects() {
551 List<HeapObjectInfo> heap_objects(1000);
553 HeapIterator iterator(heap_);
555 for (HeapObject* obj = iterator.next();
557 obj = iterator.next()) {
558 HashMap::Entry* entry =
559 entries_map_.Lookup(obj->address(), ComputePointerHash(obj->address()));
562 if (FLAG_heap_profiler_trace_objects) {
563 heap_objects.Add(HeapObjectInfo(obj, 0));
566 int entry_index = static_cast<int>(
567 reinterpret_cast<intptr_t>(entry->value));
568 EntryInfo& entry_info = entries_.at(entry_index);
569 if (FLAG_heap_profiler_trace_objects) {
570 heap_objects.Add(HeapObjectInfo(obj,
571 static_cast<int>(entry_info.size)));
572 if (obj->Size() != static_cast<int>(entry_info.size))
575 CHECK_EQ(obj->Size(), static_cast<int>(entry_info.size));
579 if (FLAG_heap_profiler_trace_objects) {
580 PrintF("\nBegin HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n",
581 entries_map_.occupancy());
582 heap_objects.Sort(comparator);
583 int last_printed_object = -1;
584 bool print_next_object = false;
585 for (int i = 0; i < heap_objects.length(); ++i) {
586 const HeapObjectInfo& object_info = heap_objects[i];
587 if (!object_info.IsValid()) {
589 if (last_printed_object != i - 1) {
591 PrintF("%d objects were skipped\n", i - 1 - last_printed_object);
592 heap_objects[i - 1].Print();
596 last_printed_object = i;
597 print_next_object = true;
598 } else if (print_next_object) {
600 print_next_object = false;
601 last_printed_object = i;
604 if (last_printed_object < heap_objects.length() - 1) {
605 PrintF("Last %d objects were skipped\n",
606 heap_objects.length() - 1 - last_printed_object);
608 PrintF("End HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n\n",
609 entries_map_.occupancy());
615 SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream,
616 int64_t* timestamp_us) {
617 UpdateHeapObjectsMap();
618 time_intervals_.Add(TimeInterval(next_id_));
619 int prefered_chunk_size = stream->GetChunkSize();
620 List<v8::HeapStatsUpdate> stats_buffer;
621 DCHECK(!entries_.is_empty());
622 EntryInfo* entry_info = &entries_.first();
623 EntryInfo* end_entry_info = &entries_.last() + 1;
624 for (int time_interval_index = 0;
625 time_interval_index < time_intervals_.length();
626 ++time_interval_index) {
627 TimeInterval& time_interval = time_intervals_[time_interval_index];
628 SnapshotObjectId time_interval_id = time_interval.id;
629 uint32_t entries_size = 0;
630 EntryInfo* start_entry_info = entry_info;
631 while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
632 entries_size += entry_info->size;
635 uint32_t entries_count =
636 static_cast<uint32_t>(entry_info - start_entry_info);
637 if (time_interval.count != entries_count ||
638 time_interval.size != entries_size) {
639 stats_buffer.Add(v8::HeapStatsUpdate(
641 time_interval.count = entries_count,
642 time_interval.size = entries_size));
643 if (stats_buffer.length() >= prefered_chunk_size) {
644 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
645 &stats_buffer.first(), stats_buffer.length());
646 if (result == OutputStream::kAbort) return last_assigned_id();
647 stats_buffer.Clear();
651 DCHECK(entry_info == end_entry_info);
652 if (!stats_buffer.is_empty()) {
653 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
654 &stats_buffer.first(), stats_buffer.length());
655 if (result == OutputStream::kAbort) return last_assigned_id();
657 stream->EndOfStream();
659 *timestamp_us = (time_intervals_.last().timestamp -
660 time_intervals_[0].timestamp).InMicroseconds();
662 return last_assigned_id();
666 void HeapObjectsMap::RemoveDeadEntries() {
667 DCHECK(entries_.length() > 0 &&
668 entries_.at(0).id == 0 &&
669 entries_.at(0).addr == NULL);
670 int first_free_entry = 1;
671 for (int i = 1; i < entries_.length(); ++i) {
672 EntryInfo& entry_info = entries_.at(i);
673 if (entry_info.accessed) {
674 if (first_free_entry != i) {
675 entries_.at(first_free_entry) = entry_info;
677 entries_.at(first_free_entry).accessed = false;
678 HashMap::Entry* entry = entries_map_.Lookup(
679 entry_info.addr, ComputePointerHash(entry_info.addr));
681 entry->value = reinterpret_cast<void*>(first_free_entry);
684 if (entry_info.addr) {
685 entries_map_.Remove(entry_info.addr,
686 ComputePointerHash(entry_info.addr));
690 entries_.Rewind(first_free_entry);
691 DCHECK(static_cast<uint32_t>(entries_.length()) - 1 ==
692 entries_map_.occupancy());
696 SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
697 SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
698 const char* label = info->GetLabel();
699 id ^= StringHasher::HashSequentialString(label,
700 static_cast<int>(strlen(label)),
702 intptr_t element_count = info->GetElementCount();
703 if (element_count != -1)
704 id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
705 v8::internal::kZeroHashSeed);
710 size_t HeapObjectsMap::GetUsedMemorySize() const {
713 sizeof(HashMap::Entry) * entries_map_.capacity() +
714 GetMemoryUsedByList(entries_) +
715 GetMemoryUsedByList(time_intervals_);
719 HeapEntriesMap::HeapEntriesMap()
720 : entries_(HashMap::PointersMatch) {
724 int HeapEntriesMap::Map(HeapThing thing) {
725 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing));
726 if (cache_entry == NULL) return HeapEntry::kNoEntry;
727 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
731 void HeapEntriesMap::Pair(HeapThing thing, int entry) {
732 HashMap::Entry* cache_entry = entries_.LookupOrInsert(thing, Hash(thing));
733 DCHECK(cache_entry->value == NULL);
734 cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
738 HeapObjectsSet::HeapObjectsSet()
739 : entries_(HashMap::PointersMatch) {
743 void HeapObjectsSet::Clear() {
748 bool HeapObjectsSet::Contains(Object* obj) {
749 if (!obj->IsHeapObject()) return false;
750 HeapObject* object = HeapObject::cast(obj);
751 return entries_.Lookup(object, HeapEntriesMap::Hash(object)) != NULL;
755 void HeapObjectsSet::Insert(Object* obj) {
756 if (!obj->IsHeapObject()) return;
757 HeapObject* object = HeapObject::cast(obj);
758 entries_.LookupOrInsert(object, HeapEntriesMap::Hash(object));
762 const char* HeapObjectsSet::GetTag(Object* obj) {
763 HeapObject* object = HeapObject::cast(obj);
764 HashMap::Entry* cache_entry =
765 entries_.Lookup(object, HeapEntriesMap::Hash(object));
766 return cache_entry != NULL
767 ? reinterpret_cast<const char*>(cache_entry->value)
772 void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
773 if (!obj->IsHeapObject()) return;
774 HeapObject* object = HeapObject::cast(obj);
775 HashMap::Entry* cache_entry =
776 entries_.LookupOrInsert(object, HeapEntriesMap::Hash(object));
777 cache_entry->value = const_cast<char*>(tag);
781 V8HeapExplorer::V8HeapExplorer(
782 HeapSnapshot* snapshot,
783 SnapshottingProgressReportingInterface* progress,
784 v8::HeapProfiler::ObjectNameResolver* resolver)
785 : heap_(snapshot->profiler()->heap_object_map()->heap()),
787 names_(snapshot_->profiler()->names()),
788 heap_object_map_(snapshot_->profiler()->heap_object_map()),
791 global_object_name_resolver_(resolver) {
795 V8HeapExplorer::~V8HeapExplorer() {
799 HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) {
800 return AddEntry(reinterpret_cast<HeapObject*>(ptr));
804 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
805 if (object->IsJSFunction()) {
806 JSFunction* func = JSFunction::cast(object);
807 SharedFunctionInfo* shared = func->shared();
808 const char* name = shared->bound() ? "native_bind" :
809 names_->GetName(String::cast(shared->name()));
810 return AddEntry(object, HeapEntry::kClosure, name);
811 } else if (object->IsJSRegExp()) {
812 JSRegExp* re = JSRegExp::cast(object);
813 return AddEntry(object,
815 names_->GetName(re->Pattern()));
816 } else if (object->IsJSObject()) {
817 const char* name = names_->GetName(
818 GetConstructorName(JSObject::cast(object)));
819 if (object->IsJSGlobalObject()) {
820 const char* tag = objects_tags_.GetTag(object);
822 name = names_->GetFormatted("%s / %s", name, tag);
825 return AddEntry(object, HeapEntry::kObject, name);
826 } else if (object->IsString()) {
827 String* string = String::cast(object);
828 if (string->IsConsString())
829 return AddEntry(object,
830 HeapEntry::kConsString,
831 "(concatenated string)");
832 if (string->IsSlicedString())
833 return AddEntry(object,
834 HeapEntry::kSlicedString,
836 return AddEntry(object,
838 names_->GetName(String::cast(object)));
839 } else if (object->IsSymbol()) {
840 return AddEntry(object, HeapEntry::kSymbol, "symbol");
841 } else if (object->IsCode()) {
842 return AddEntry(object, HeapEntry::kCode, "");
843 } else if (object->IsSharedFunctionInfo()) {
844 String* name = String::cast(SharedFunctionInfo::cast(object)->name());
845 return AddEntry(object,
847 names_->GetName(name));
848 } else if (object->IsScript()) {
849 Object* name = Script::cast(object)->name();
850 return AddEntry(object,
853 ? names_->GetName(String::cast(name))
855 } else if (object->IsNativeContext()) {
856 return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
857 } else if (object->IsContext()) {
858 return AddEntry(object, HeapEntry::kObject, "system / Context");
859 } else if (object->IsFixedArray() ||
860 object->IsFixedDoubleArray() ||
861 object->IsByteArray() ||
862 object->IsExternalArray()) {
863 return AddEntry(object, HeapEntry::kArray, "");
864 } else if (object->IsHeapNumber()) {
865 return AddEntry(object, HeapEntry::kHeapNumber, "number");
866 } else if (object->IsFloat32x4()) {
867 return AddEntry(object, HeapEntry::kSimdValue, "simd");
869 return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
873 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
874 HeapEntry::Type type,
876 return AddEntry(object->address(), type, name, object->Size());
880 HeapEntry* V8HeapExplorer::AddEntry(Address address,
881 HeapEntry::Type type,
884 SnapshotObjectId object_id = heap_object_map_->FindOrAddEntry(
885 address, static_cast<unsigned int>(size));
886 unsigned trace_node_id = 0;
887 if (AllocationTracker* allocation_tracker =
888 snapshot_->profiler()->allocation_tracker()) {
890 allocation_tracker->address_to_trace()->GetTraceNodeId(address);
892 return snapshot_->AddEntry(type, name, object_id, size, trace_node_id);
896 class SnapshotFiller {
898 explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
899 : snapshot_(snapshot),
900 names_(snapshot->profiler()->names()),
901 entries_(entries) { }
902 HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
903 HeapEntry* entry = allocator->AllocateEntry(ptr);
904 entries_->Pair(ptr, entry->index());
907 HeapEntry* FindEntry(HeapThing ptr) {
908 int index = entries_->Map(ptr);
909 return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL;
911 HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
912 HeapEntry* entry = FindEntry(ptr);
913 return entry != NULL ? entry : AddEntry(ptr, allocator);
915 void SetIndexedReference(HeapGraphEdge::Type type,
918 HeapEntry* child_entry) {
919 HeapEntry* parent_entry = &snapshot_->entries()[parent];
920 parent_entry->SetIndexedReference(type, index, child_entry);
922 void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
924 HeapEntry* child_entry) {
925 HeapEntry* parent_entry = &snapshot_->entries()[parent];
926 int index = parent_entry->children_count() + 1;
927 parent_entry->SetIndexedReference(type, index, child_entry);
929 void SetNamedReference(HeapGraphEdge::Type type,
931 const char* reference_name,
932 HeapEntry* child_entry) {
933 HeapEntry* parent_entry = &snapshot_->entries()[parent];
934 parent_entry->SetNamedReference(type, reference_name, child_entry);
936 void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
938 HeapEntry* child_entry) {
939 HeapEntry* parent_entry = &snapshot_->entries()[parent];
940 int index = parent_entry->children_count() + 1;
941 parent_entry->SetNamedReference(
943 names_->GetName(index),
948 HeapSnapshot* snapshot_;
949 StringsStorage* names_;
950 HeapEntriesMap* entries_;
954 const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
955 switch (object->map()->instance_type()) {
957 switch (Map::cast(object)->instance_type()) {
958 #define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
959 case instance_type: return "system / Map (" #Name ")";
960 STRING_TYPE_LIST(MAKE_STRING_MAP_CASE)
961 #undef MAKE_STRING_MAP_CASE
962 default: return "system / Map";
964 case CELL_TYPE: return "system / Cell";
965 case PROPERTY_CELL_TYPE: return "system / PropertyCell";
966 case FOREIGN_TYPE: return "system / Foreign";
967 case ODDBALL_TYPE: return "system / Oddball";
968 #define MAKE_STRUCT_CASE(NAME, Name, name) \
969 case NAME##_TYPE: return "system / "#Name;
970 STRUCT_LIST(MAKE_STRUCT_CASE)
971 #undef MAKE_STRUCT_CASE
972 default: return "system";
977 int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) {
978 int objects_count = 0;
979 for (HeapObject* obj = iterator->next();
981 obj = iterator->next()) {
984 return objects_count;
988 class IndexedReferencesExtractor : public ObjectVisitor {
990 IndexedReferencesExtractor(V8HeapExplorer* generator,
991 HeapObject* parent_obj,
993 : generator_(generator),
994 parent_obj_(parent_obj),
998 void VisitCodeEntry(Address entry_address) {
999 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
1000 generator_->SetInternalReference(parent_obj_, parent_, "code", code);
1001 generator_->TagCodeObject(code);
1003 void VisitPointers(Object** start, Object** end) {
1004 for (Object** p = start; p < end; p++) {
1006 if (CheckVisitedAndUnmark(p)) continue;
1007 generator_->SetHiddenReference(parent_obj_, parent_, next_index_, *p);
1010 static void MarkVisitedField(HeapObject* obj, int offset) {
1011 if (offset < 0) return;
1012 Address field = obj->address() + offset;
1013 DCHECK(Memory::Object_at(field)->IsHeapObject());
1014 intptr_t p = reinterpret_cast<intptr_t>(Memory::Object_at(field));
1015 DCHECK(!IsMarked(p));
1016 intptr_t p_tagged = p | kTag;
1017 Memory::Object_at(field) = reinterpret_cast<Object*>(p_tagged);
1021 bool CheckVisitedAndUnmark(Object** field) {
1022 intptr_t p = reinterpret_cast<intptr_t>(*field);
1024 intptr_t p_untagged = (p & ~kTaggingMask) | kHeapObjectTag;
1025 *field = reinterpret_cast<Object*>(p_untagged);
1026 DCHECK((*field)->IsHeapObject());
1032 static const intptr_t kTaggingMask = 3;
1033 static const intptr_t kTag = 3;
1035 static bool IsMarked(intptr_t p) { return (p & kTaggingMask) == kTag; }
1037 V8HeapExplorer* generator_;
1038 HeapObject* parent_obj_;
1044 bool V8HeapExplorer::ExtractReferencesPass1(int entry, HeapObject* obj) {
1045 if (obj->IsFixedArray()) return false; // FixedArrays are processed on pass 2
1047 if (obj->IsJSGlobalProxy()) {
1048 ExtractJSGlobalProxyReferences(entry, JSGlobalProxy::cast(obj));
1049 } else if (obj->IsJSArrayBuffer()) {
1050 ExtractJSArrayBufferReferences(entry, JSArrayBuffer::cast(obj));
1051 } else if (obj->IsJSObject()) {
1052 if (obj->IsJSWeakSet()) {
1053 ExtractJSWeakCollectionReferences(entry, JSWeakSet::cast(obj));
1054 } else if (obj->IsJSWeakMap()) {
1055 ExtractJSWeakCollectionReferences(entry, JSWeakMap::cast(obj));
1056 } else if (obj->IsJSSet()) {
1057 ExtractJSCollectionReferences(entry, JSSet::cast(obj));
1058 } else if (obj->IsJSMap()) {
1059 ExtractJSCollectionReferences(entry, JSMap::cast(obj));
1061 ExtractJSObjectReferences(entry, JSObject::cast(obj));
1062 } else if (obj->IsString()) {
1063 ExtractStringReferences(entry, String::cast(obj));
1064 } else if (obj->IsSymbol()) {
1065 ExtractSymbolReferences(entry, Symbol::cast(obj));
1066 } else if (obj->IsMap()) {
1067 ExtractMapReferences(entry, Map::cast(obj));
1068 } else if (obj->IsSharedFunctionInfo()) {
1069 ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
1070 } else if (obj->IsScript()) {
1071 ExtractScriptReferences(entry, Script::cast(obj));
1072 } else if (obj->IsAccessorInfo()) {
1073 ExtractAccessorInfoReferences(entry, AccessorInfo::cast(obj));
1074 } else if (obj->IsAccessorPair()) {
1075 ExtractAccessorPairReferences(entry, AccessorPair::cast(obj));
1076 } else if (obj->IsCodeCache()) {
1077 ExtractCodeCacheReferences(entry, CodeCache::cast(obj));
1078 } else if (obj->IsCode()) {
1079 ExtractCodeReferences(entry, Code::cast(obj));
1080 } else if (obj->IsBox()) {
1081 ExtractBoxReferences(entry, Box::cast(obj));
1082 } else if (obj->IsCell()) {
1083 ExtractCellReferences(entry, Cell::cast(obj));
1084 } else if (obj->IsPropertyCell()) {
1085 ExtractPropertyCellReferences(entry, PropertyCell::cast(obj));
1086 } else if (obj->IsAllocationSite()) {
1087 ExtractAllocationSiteReferences(entry, AllocationSite::cast(obj));
1093 bool V8HeapExplorer::ExtractReferencesPass2(int entry, HeapObject* obj) {
1094 if (!obj->IsFixedArray()) return false;
1096 if (obj->IsContext()) {
1097 ExtractContextReferences(entry, Context::cast(obj));
1099 ExtractFixedArrayReferences(entry, FixedArray::cast(obj));
1105 void V8HeapExplorer::ExtractJSGlobalProxyReferences(
1106 int entry, JSGlobalProxy* proxy) {
1107 SetInternalReference(proxy, entry,
1108 "native_context", proxy->native_context(),
1109 JSGlobalProxy::kNativeContextOffset);
1113 void V8HeapExplorer::ExtractJSObjectReferences(
1114 int entry, JSObject* js_obj) {
1115 HeapObject* obj = js_obj;
1116 ExtractClosureReferences(js_obj, entry);
1117 ExtractPropertyReferences(js_obj, entry);
1118 ExtractElementReferences(js_obj, entry);
1119 ExtractInternalReferences(js_obj, entry);
1120 PrototypeIterator iter(heap_->isolate(), js_obj);
1121 SetPropertyReference(obj, entry, heap_->proto_string(), iter.GetCurrent());
1122 if (obj->IsJSFunction()) {
1123 JSFunction* js_fun = JSFunction::cast(js_obj);
1124 Object* proto_or_map = js_fun->prototype_or_initial_map();
1125 if (!proto_or_map->IsTheHole()) {
1126 if (!proto_or_map->IsMap()) {
1127 SetPropertyReference(
1129 heap_->prototype_string(), proto_or_map,
1131 JSFunction::kPrototypeOrInitialMapOffset);
1133 SetPropertyReference(
1135 heap_->prototype_string(), js_fun->prototype());
1136 SetInternalReference(
1137 obj, entry, "initial_map", proto_or_map,
1138 JSFunction::kPrototypeOrInitialMapOffset);
1141 SharedFunctionInfo* shared_info = js_fun->shared();
1142 // JSFunction has either bindings or literals and never both.
1143 bool bound = shared_info->bound();
1144 TagObject(js_fun->literals_or_bindings(),
1145 bound ? "(function bindings)" : "(function literals)");
1146 SetInternalReference(js_fun, entry,
1147 bound ? "bindings" : "literals",
1148 js_fun->literals_or_bindings(),
1149 JSFunction::kLiteralsOffset);
1150 TagObject(shared_info, "(shared function info)");
1151 SetInternalReference(js_fun, entry,
1152 "shared", shared_info,
1153 JSFunction::kSharedFunctionInfoOffset);
1154 TagObject(js_fun->context(), "(context)");
1155 SetInternalReference(js_fun, entry,
1156 "context", js_fun->context(),
1157 JSFunction::kContextOffset);
1158 SetWeakReference(js_fun, entry,
1159 "next_function_link", js_fun->next_function_link(),
1160 JSFunction::kNextFunctionLinkOffset);
1161 STATIC_ASSERT(JSFunction::kNextFunctionLinkOffset
1162 == JSFunction::kNonWeakFieldsEndOffset);
1163 STATIC_ASSERT(JSFunction::kNextFunctionLinkOffset + kPointerSize
1164 == JSFunction::kSize);
1165 } else if (obj->IsGlobalObject()) {
1166 GlobalObject* global_obj = GlobalObject::cast(obj);
1167 SetInternalReference(global_obj, entry,
1168 "builtins", global_obj->builtins(),
1169 GlobalObject::kBuiltinsOffset);
1170 SetInternalReference(global_obj, entry,
1171 "native_context", global_obj->native_context(),
1172 GlobalObject::kNativeContextOffset);
1173 SetInternalReference(global_obj, entry,
1174 "global_proxy", global_obj->global_proxy(),
1175 GlobalObject::kGlobalProxyOffset);
1176 STATIC_ASSERT(GlobalObject::kHeaderSize - JSObject::kHeaderSize ==
1178 } else if (obj->IsJSArrayBufferView()) {
1179 JSArrayBufferView* view = JSArrayBufferView::cast(obj);
1180 SetInternalReference(view, entry, "buffer", view->buffer(),
1181 JSArrayBufferView::kBufferOffset);
1183 TagObject(js_obj->properties(), "(object properties)");
1184 SetInternalReference(obj, entry,
1185 "properties", js_obj->properties(),
1186 JSObject::kPropertiesOffset);
1187 TagObject(js_obj->elements(), "(object elements)");
1188 SetInternalReference(obj, entry,
1189 "elements", js_obj->elements(),
1190 JSObject::kElementsOffset);
1194 void V8HeapExplorer::ExtractStringReferences(int entry, String* string) {
1195 if (string->IsConsString()) {
1196 ConsString* cs = ConsString::cast(string);
1197 SetInternalReference(cs, entry, "first", cs->first(),
1198 ConsString::kFirstOffset);
1199 SetInternalReference(cs, entry, "second", cs->second(),
1200 ConsString::kSecondOffset);
1201 } else if (string->IsSlicedString()) {
1202 SlicedString* ss = SlicedString::cast(string);
1203 SetInternalReference(ss, entry, "parent", ss->parent(),
1204 SlicedString::kParentOffset);
1209 void V8HeapExplorer::ExtractSymbolReferences(int entry, Symbol* symbol) {
1210 SetInternalReference(symbol, entry,
1211 "name", symbol->name(),
1212 Symbol::kNameOffset);
1216 void V8HeapExplorer::ExtractJSCollectionReferences(int entry,
1217 JSCollection* collection) {
1218 SetInternalReference(collection, entry, "table", collection->table(),
1219 JSCollection::kTableOffset);
1223 void V8HeapExplorer::ExtractJSWeakCollectionReferences(
1224 int entry, JSWeakCollection* collection) {
1225 MarkAsWeakContainer(collection->table());
1226 SetInternalReference(collection, entry,
1227 "table", collection->table(),
1228 JSWeakCollection::kTableOffset);
1232 void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) {
1233 if (context == context->declaration_context()) {
1234 ScopeInfo* scope_info = context->closure()->shared()->scope_info();
1235 // Add context allocated locals.
1236 int context_locals = scope_info->ContextLocalCount();
1237 for (int i = 0; i < context_locals; ++i) {
1238 String* local_name = scope_info->ContextLocalName(i);
1239 int idx = Context::MIN_CONTEXT_SLOTS + i;
1240 SetContextReference(context, entry, local_name, context->get(idx),
1241 Context::OffsetOfElementAt(idx));
1243 if (scope_info->HasFunctionName()) {
1244 String* name = scope_info->FunctionName();
1246 int idx = scope_info->FunctionContextSlotIndex(name, &mode);
1248 SetContextReference(context, entry, name, context->get(idx),
1249 Context::OffsetOfElementAt(idx));
1254 #define EXTRACT_CONTEXT_FIELD(index, type, name) \
1255 if (Context::index < Context::FIRST_WEAK_SLOT || \
1256 Context::index == Context::MAP_CACHE_INDEX) { \
1257 SetInternalReference(context, entry, #name, context->get(Context::index), \
1258 FixedArray::OffsetOfElementAt(Context::index)); \
1260 SetWeakReference(context, entry, #name, context->get(Context::index), \
1261 FixedArray::OffsetOfElementAt(Context::index)); \
1263 EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
1264 EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
1265 EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, Object, extension);
1266 EXTRACT_CONTEXT_FIELD(GLOBAL_OBJECT_INDEX, GlobalObject, global);
1267 if (context->IsNativeContext()) {
1268 TagObject(context->jsfunction_result_caches(),
1269 "(context func. result caches)");
1270 TagObject(context->normalized_map_cache(), "(context norm. map cache)");
1271 TagObject(context->runtime_context(), "(runtime context)");
1272 TagObject(context->embedder_data(), "(context data)");
1273 NATIVE_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD);
1274 EXTRACT_CONTEXT_FIELD(OPTIMIZED_FUNCTIONS_LIST, unused,
1275 optimized_functions_list);
1276 EXTRACT_CONTEXT_FIELD(OPTIMIZED_CODE_LIST, unused, optimized_code_list);
1277 EXTRACT_CONTEXT_FIELD(DEOPTIMIZED_CODE_LIST, unused, deoptimized_code_list);
1278 EXTRACT_CONTEXT_FIELD(NEXT_CONTEXT_LINK, unused, next_context_link);
1279 #undef EXTRACT_CONTEXT_FIELD
1280 STATIC_ASSERT(Context::OPTIMIZED_FUNCTIONS_LIST ==
1281 Context::FIRST_WEAK_SLOT);
1282 STATIC_ASSERT(Context::NEXT_CONTEXT_LINK + 1 ==
1283 Context::NATIVE_CONTEXT_SLOTS);
1284 STATIC_ASSERT(Context::FIRST_WEAK_SLOT + 4 ==
1285 Context::NATIVE_CONTEXT_SLOTS);
1290 void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
1291 Object* raw_transitions_or_prototype_info = map->raw_transitions();
1292 if (TransitionArray::IsFullTransitionArray(
1293 raw_transitions_or_prototype_info)) {
1294 TransitionArray* transitions =
1295 TransitionArray::cast(raw_transitions_or_prototype_info);
1296 int transitions_entry = GetEntry(transitions)->index();
1298 if (map->CanTransition()) {
1299 if (transitions->HasPrototypeTransitions()) {
1300 FixedArray* prototype_transitions =
1301 transitions->GetPrototypeTransitions();
1302 MarkAsWeakContainer(prototype_transitions);
1303 TagObject(prototype_transitions, "(prototype transitions");
1304 SetInternalReference(transitions, transitions_entry,
1305 "prototype_transitions", prototype_transitions);
1307 // TODO(alph): transitions keys are strong links.
1308 MarkAsWeakContainer(transitions);
1311 TagObject(transitions, "(transition array)");
1312 SetInternalReference(map, entry, "transitions", transitions,
1313 Map::kTransitionsOrPrototypeInfoOffset);
1314 } else if (TransitionArray::IsSimpleTransition(
1315 raw_transitions_or_prototype_info)) {
1316 TagObject(raw_transitions_or_prototype_info, "(transition)");
1317 SetInternalReference(map, entry, "transition",
1318 raw_transitions_or_prototype_info,
1319 Map::kTransitionsOrPrototypeInfoOffset);
1320 } else if (map->is_prototype_map()) {
1321 TagObject(raw_transitions_or_prototype_info, "prototype_info");
1322 SetInternalReference(map, entry, "prototype_info",
1323 raw_transitions_or_prototype_info,
1324 Map::kTransitionsOrPrototypeInfoOffset);
1326 DescriptorArray* descriptors = map->instance_descriptors();
1327 TagObject(descriptors, "(map descriptors)");
1328 SetInternalReference(map, entry,
1329 "descriptors", descriptors,
1330 Map::kDescriptorsOffset);
1332 MarkAsWeakContainer(map->code_cache());
1333 SetInternalReference(map, entry,
1334 "code_cache", map->code_cache(),
1335 Map::kCodeCacheOffset);
1336 SetInternalReference(map, entry,
1337 "prototype", map->prototype(), Map::kPrototypeOffset);
1338 Object* constructor_or_backpointer = map->constructor_or_backpointer();
1339 if (constructor_or_backpointer->IsMap()) {
1340 TagObject(constructor_or_backpointer, "(back pointer)");
1341 SetInternalReference(map, entry, "back_pointer", constructor_or_backpointer,
1342 Map::kConstructorOrBackPointerOffset);
1344 SetInternalReference(map, entry, "constructor", constructor_or_backpointer,
1345 Map::kConstructorOrBackPointerOffset);
1347 TagObject(map->dependent_code(), "(dependent code)");
1348 MarkAsWeakContainer(map->dependent_code());
1349 SetInternalReference(map, entry,
1350 "dependent_code", map->dependent_code(),
1351 Map::kDependentCodeOffset);
1355 void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
1356 int entry, SharedFunctionInfo* shared) {
1357 HeapObject* obj = shared;
1358 String* shared_name = shared->DebugName();
1359 const char* name = NULL;
1360 if (shared_name != *heap_->isolate()->factory()->empty_string()) {
1361 name = names_->GetName(shared_name);
1362 TagObject(shared->code(), names_->GetFormatted("(code for %s)", name));
1364 TagObject(shared->code(), names_->GetFormatted("(%s code)",
1365 Code::Kind2String(shared->code()->kind())));
1368 SetInternalReference(obj, entry,
1369 "name", shared->name(),
1370 SharedFunctionInfo::kNameOffset);
1371 SetInternalReference(obj, entry,
1372 "code", shared->code(),
1373 SharedFunctionInfo::kCodeOffset);
1374 TagObject(shared->scope_info(), "(function scope info)");
1375 SetInternalReference(obj, entry,
1376 "scope_info", shared->scope_info(),
1377 SharedFunctionInfo::kScopeInfoOffset);
1378 SetInternalReference(obj, entry,
1379 "instance_class_name", shared->instance_class_name(),
1380 SharedFunctionInfo::kInstanceClassNameOffset);
1381 SetInternalReference(obj, entry,
1382 "script", shared->script(),
1383 SharedFunctionInfo::kScriptOffset);
1384 const char* construct_stub_name = name ?
1385 names_->GetFormatted("(construct stub code for %s)", name) :
1386 "(construct stub code)";
1387 TagObject(shared->construct_stub(), construct_stub_name);
1388 SetInternalReference(obj, entry,
1389 "construct_stub", shared->construct_stub(),
1390 SharedFunctionInfo::kConstructStubOffset);
1391 SetInternalReference(obj, entry,
1392 "function_data", shared->function_data(),
1393 SharedFunctionInfo::kFunctionDataOffset);
1394 SetInternalReference(obj, entry,
1395 "debug_info", shared->debug_info(),
1396 SharedFunctionInfo::kDebugInfoOffset);
1397 SetInternalReference(obj, entry,
1398 "inferred_name", shared->inferred_name(),
1399 SharedFunctionInfo::kInferredNameOffset);
1400 SetInternalReference(obj, entry,
1401 "optimized_code_map", shared->optimized_code_map(),
1402 SharedFunctionInfo::kOptimizedCodeMapOffset);
1403 SetInternalReference(obj, entry,
1404 "feedback_vector", shared->feedback_vector(),
1405 SharedFunctionInfo::kFeedbackVectorOffset);
1409 void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) {
1410 HeapObject* obj = script;
1411 SetInternalReference(obj, entry,
1412 "source", script->source(),
1413 Script::kSourceOffset);
1414 SetInternalReference(obj, entry,
1415 "name", script->name(),
1416 Script::kNameOffset);
1417 SetInternalReference(obj, entry,
1418 "context_data", script->context_data(),
1419 Script::kContextOffset);
1420 TagObject(script->line_ends(), "(script line ends)");
1421 SetInternalReference(obj, entry,
1422 "line_ends", script->line_ends(),
1423 Script::kLineEndsOffset);
1427 void V8HeapExplorer::ExtractAccessorInfoReferences(
1428 int entry, AccessorInfo* accessor_info) {
1429 SetInternalReference(accessor_info, entry, "name", accessor_info->name(),
1430 AccessorInfo::kNameOffset);
1431 SetInternalReference(accessor_info, entry, "expected_receiver_type",
1432 accessor_info->expected_receiver_type(),
1433 AccessorInfo::kExpectedReceiverTypeOffset);
1434 if (accessor_info->IsExecutableAccessorInfo()) {
1435 ExecutableAccessorInfo* executable_accessor_info =
1436 ExecutableAccessorInfo::cast(accessor_info);
1437 SetInternalReference(executable_accessor_info, entry, "getter",
1438 executable_accessor_info->getter(),
1439 ExecutableAccessorInfo::kGetterOffset);
1440 SetInternalReference(executable_accessor_info, entry, "setter",
1441 executable_accessor_info->setter(),
1442 ExecutableAccessorInfo::kSetterOffset);
1443 SetInternalReference(executable_accessor_info, entry, "data",
1444 executable_accessor_info->data(),
1445 ExecutableAccessorInfo::kDataOffset);
1450 void V8HeapExplorer::ExtractAccessorPairReferences(
1451 int entry, AccessorPair* accessors) {
1452 SetInternalReference(accessors, entry, "getter", accessors->getter(),
1453 AccessorPair::kGetterOffset);
1454 SetInternalReference(accessors, entry, "setter", accessors->setter(),
1455 AccessorPair::kSetterOffset);
1459 void V8HeapExplorer::ExtractCodeCacheReferences(
1460 int entry, CodeCache* code_cache) {
1461 TagObject(code_cache->default_cache(), "(default code cache)");
1462 SetInternalReference(code_cache, entry,
1463 "default_cache", code_cache->default_cache(),
1464 CodeCache::kDefaultCacheOffset);
1465 TagObject(code_cache->normal_type_cache(), "(code type cache)");
1466 SetInternalReference(code_cache, entry,
1467 "type_cache", code_cache->normal_type_cache(),
1468 CodeCache::kNormalTypeCacheOffset);
1472 void V8HeapExplorer::TagBuiltinCodeObject(Code* code, const char* name) {
1473 TagObject(code, names_->GetFormatted("(%s builtin)", name));
1477 void V8HeapExplorer::TagCodeObject(Code* code) {
1478 if (code->kind() == Code::STUB) {
1479 TagObject(code, names_->GetFormatted(
1480 "(%s code)", CodeStub::MajorName(
1481 CodeStub::GetMajorKey(code), true)));
1486 void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
1487 TagCodeObject(code);
1488 TagObject(code->relocation_info(), "(code relocation info)");
1489 SetInternalReference(code, entry,
1490 "relocation_info", code->relocation_info(),
1491 Code::kRelocationInfoOffset);
1492 SetInternalReference(code, entry,
1493 "handler_table", code->handler_table(),
1494 Code::kHandlerTableOffset);
1495 TagObject(code->deoptimization_data(), "(code deopt data)");
1496 SetInternalReference(code, entry,
1497 "deoptimization_data", code->deoptimization_data(),
1498 Code::kDeoptimizationDataOffset);
1499 if (code->kind() == Code::FUNCTION) {
1500 SetInternalReference(code, entry,
1501 "type_feedback_info", code->type_feedback_info(),
1502 Code::kTypeFeedbackInfoOffset);
1504 SetInternalReference(code, entry,
1505 "gc_metadata", code->gc_metadata(),
1506 Code::kGCMetadataOffset);
1507 if (code->kind() == Code::OPTIMIZED_FUNCTION) {
1508 SetWeakReference(code, entry,
1509 "next_code_link", code->next_code_link(),
1510 Code::kNextCodeLinkOffset);
1515 void V8HeapExplorer::ExtractBoxReferences(int entry, Box* box) {
1516 SetInternalReference(box, entry, "value", box->value(), Box::kValueOffset);
1520 void V8HeapExplorer::ExtractCellReferences(int entry, Cell* cell) {
1521 SetInternalReference(cell, entry, "value", cell->value(), Cell::kValueOffset);
1525 void V8HeapExplorer::ExtractPropertyCellReferences(int entry,
1526 PropertyCell* cell) {
1527 SetInternalReference(cell, entry, "value", cell->value(),
1528 PropertyCell::kValueOffset);
1529 MarkAsWeakContainer(cell->dependent_code());
1530 SetInternalReference(cell, entry, "dependent_code", cell->dependent_code(),
1531 PropertyCell::kDependentCodeOffset);
1535 void V8HeapExplorer::ExtractAllocationSiteReferences(int entry,
1536 AllocationSite* site) {
1537 SetInternalReference(site, entry, "transition_info", site->transition_info(),
1538 AllocationSite::kTransitionInfoOffset);
1539 SetInternalReference(site, entry, "nested_site", site->nested_site(),
1540 AllocationSite::kNestedSiteOffset);
1541 MarkAsWeakContainer(site->dependent_code());
1542 SetInternalReference(site, entry, "dependent_code", site->dependent_code(),
1543 AllocationSite::kDependentCodeOffset);
1544 // Do not visit weak_next as it is not visited by the StaticVisitor,
1545 // and we're not very interested in weak_next field here.
1546 STATIC_ASSERT(AllocationSite::kWeakNextOffset >=
1547 AllocationSite::BodyDescriptor::kEndOffset);
1551 class JSArrayBufferDataEntryAllocator : public HeapEntriesAllocator {
1553 JSArrayBufferDataEntryAllocator(size_t size, V8HeapExplorer* explorer)
1555 , explorer_(explorer) {
1557 virtual HeapEntry* AllocateEntry(HeapThing ptr) {
1558 return explorer_->AddEntry(
1559 static_cast<Address>(ptr),
1560 HeapEntry::kNative, "system / JSArrayBufferData", size_);
1564 V8HeapExplorer* explorer_;
1568 void V8HeapExplorer::ExtractJSArrayBufferReferences(
1569 int entry, JSArrayBuffer* buffer) {
1570 // Setup a reference to a native memory backing_store object.
1571 if (!buffer->backing_store())
1573 size_t data_size = NumberToSize(heap_->isolate(), buffer->byte_length());
1574 JSArrayBufferDataEntryAllocator allocator(data_size, this);
1575 HeapEntry* data_entry =
1576 filler_->FindOrAddEntry(buffer->backing_store(), &allocator);
1577 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1578 entry, "backing_store", data_entry);
1582 void V8HeapExplorer::ExtractFixedArrayReferences(int entry, FixedArray* array) {
1583 bool is_weak = weak_containers_.Contains(array);
1584 for (int i = 0, l = array->length(); i < l; ++i) {
1586 SetWeakReference(array, entry,
1587 i, array->get(i), array->OffsetOfElementAt(i));
1589 SetInternalReference(array, entry,
1590 i, array->get(i), array->OffsetOfElementAt(i));
1596 void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj, int entry) {
1597 if (!js_obj->IsJSFunction()) return;
1599 JSFunction* func = JSFunction::cast(js_obj);
1600 if (func->shared()->bound()) {
1601 FixedArray* bindings = func->function_bindings();
1602 SetNativeBindReference(js_obj, entry, "bound_this",
1603 bindings->get(JSFunction::kBoundThisIndex));
1604 SetNativeBindReference(js_obj, entry, "bound_function",
1605 bindings->get(JSFunction::kBoundFunctionIndex));
1606 for (int i = JSFunction::kBoundArgumentsStartIndex;
1607 i < bindings->length(); i++) {
1608 const char* reference_name = names_->GetFormatted(
1609 "bound_argument_%d",
1610 i - JSFunction::kBoundArgumentsStartIndex);
1611 SetNativeBindReference(js_obj, entry, reference_name,
1618 void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
1619 if (js_obj->HasFastProperties()) {
1620 DescriptorArray* descs = js_obj->map()->instance_descriptors();
1621 int real_size = js_obj->map()->NumberOfOwnDescriptors();
1622 for (int i = 0; i < real_size; i++) {
1623 PropertyDetails details = descs->GetDetails(i);
1624 switch (details.location()) {
1626 Representation r = details.representation();
1627 if (r.IsSmi() || r.IsDouble()) break;
1629 Name* k = descs->GetKey(i);
1630 FieldIndex field_index = FieldIndex::ForDescriptor(js_obj->map(), i);
1631 Object* value = js_obj->RawFastPropertyAt(field_index);
1633 field_index.is_inobject() ? field_index.offset() : -1;
1635 if (k != heap_->hidden_string()) {
1636 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry, k,
1637 value, NULL, field_offset);
1639 TagObject(value, "(hidden properties)");
1640 SetInternalReference(js_obj, entry, "hidden_properties", value,
1646 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry,
1648 descs->GetValue(i));
1652 } else if (js_obj->IsGlobalObject()) {
1653 // We assume that global objects can only have slow properties.
1654 GlobalDictionary* dictionary = js_obj->global_dictionary();
1655 int length = dictionary->Capacity();
1656 for (int i = 0; i < length; ++i) {
1657 Object* k = dictionary->KeyAt(i);
1658 if (dictionary->IsKey(k)) {
1659 DCHECK(dictionary->ValueAt(i)->IsPropertyCell());
1660 PropertyCell* cell = PropertyCell::cast(dictionary->ValueAt(i));
1661 Object* value = cell->value();
1662 if (k == heap_->hidden_string()) {
1663 TagObject(value, "(hidden properties)");
1664 SetInternalReference(js_obj, entry, "hidden_properties", value);
1667 PropertyDetails details = cell->property_details();
1668 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry,
1669 Name::cast(k), value);
1673 NameDictionary* dictionary = js_obj->property_dictionary();
1674 int length = dictionary->Capacity();
1675 for (int i = 0; i < length; ++i) {
1676 Object* k = dictionary->KeyAt(i);
1677 if (dictionary->IsKey(k)) {
1678 Object* value = dictionary->ValueAt(i);
1679 if (k == heap_->hidden_string()) {
1680 TagObject(value, "(hidden properties)");
1681 SetInternalReference(js_obj, entry, "hidden_properties", value);
1684 PropertyDetails details = dictionary->DetailsAt(i);
1685 SetDataOrAccessorPropertyReference(details.kind(), js_obj, entry,
1686 Name::cast(k), value);
1693 void V8HeapExplorer::ExtractAccessorPairProperty(JSObject* js_obj, int entry,
1695 Object* callback_obj,
1697 if (!callback_obj->IsAccessorPair()) return;
1698 AccessorPair* accessors = AccessorPair::cast(callback_obj);
1699 SetPropertyReference(js_obj, entry, key, accessors, NULL, field_offset);
1700 Object* getter = accessors->getter();
1701 if (!getter->IsOddball()) {
1702 SetPropertyReference(js_obj, entry, key, getter, "get %s");
1704 Object* setter = accessors->setter();
1705 if (!setter->IsOddball()) {
1706 SetPropertyReference(js_obj, entry, key, setter, "set %s");
1711 void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) {
1712 if (js_obj->HasFastObjectElements()) {
1713 FixedArray* elements = FixedArray::cast(js_obj->elements());
1714 int length = js_obj->IsJSArray() ?
1715 Smi::cast(JSArray::cast(js_obj)->length())->value() :
1717 for (int i = 0; i < length; ++i) {
1718 if (!elements->get(i)->IsTheHole()) {
1719 SetElementReference(js_obj, entry, i, elements->get(i));
1722 } else if (js_obj->HasDictionaryElements()) {
1723 SeededNumberDictionary* dictionary = js_obj->element_dictionary();
1724 int length = dictionary->Capacity();
1725 for (int i = 0; i < length; ++i) {
1726 Object* k = dictionary->KeyAt(i);
1727 if (dictionary->IsKey(k)) {
1728 DCHECK(k->IsNumber());
1729 uint32_t index = static_cast<uint32_t>(k->Number());
1730 SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
1737 void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) {
1738 int length = js_obj->GetInternalFieldCount();
1739 for (int i = 0; i < length; ++i) {
1740 Object* o = js_obj->GetInternalField(i);
1741 SetInternalReference(
1742 js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
1747 String* V8HeapExplorer::GetConstructorName(JSObject* object) {
1748 Heap* heap = object->GetHeap();
1749 if (object->IsJSFunction()) return heap->closure_string();
1750 String* constructor_name = object->constructor_name();
1751 if (constructor_name == heap->Object_string()) {
1752 // TODO(verwaest): Try to get object.constructor.name in this case.
1753 // This requires handlification of the V8HeapExplorer.
1755 return object->constructor_name();
1759 HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
1760 if (!obj->IsHeapObject()) return NULL;
1761 return filler_->FindOrAddEntry(obj, this);
1765 class RootsReferencesExtractor : public ObjectVisitor {
1768 IndexTag(int index, VisitorSynchronization::SyncTag tag)
1769 : index(index), tag(tag) { }
1771 VisitorSynchronization::SyncTag tag;
1775 explicit RootsReferencesExtractor(Heap* heap)
1776 : collecting_all_references_(false),
1777 previous_reference_count_(0),
1781 void VisitPointers(Object** start, Object** end) {
1782 if (collecting_all_references_) {
1783 for (Object** p = start; p < end; p++) all_references_.Add(*p);
1785 for (Object** p = start; p < end; p++) strong_references_.Add(*p);
1789 void SetCollectingAllReferences() { collecting_all_references_ = true; }
1791 void FillReferences(V8HeapExplorer* explorer) {
1792 DCHECK(strong_references_.length() <= all_references_.length());
1793 Builtins* builtins = heap_->isolate()->builtins();
1794 int strong_index = 0, all_index = 0, tags_index = 0, builtin_index = 0;
1795 while (all_index < all_references_.length()) {
1796 bool is_strong = strong_index < strong_references_.length()
1797 && strong_references_[strong_index] == all_references_[all_index];
1798 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
1800 all_references_[all_index]);
1801 if (reference_tags_[tags_index].tag ==
1802 VisitorSynchronization::kBuiltins) {
1803 DCHECK(all_references_[all_index]->IsCode());
1804 explorer->TagBuiltinCodeObject(
1805 Code::cast(all_references_[all_index]),
1806 builtins->name(builtin_index++));
1809 if (is_strong) ++strong_index;
1810 if (reference_tags_[tags_index].index == all_index) ++tags_index;
1814 void Synchronize(VisitorSynchronization::SyncTag tag) {
1815 if (collecting_all_references_ &&
1816 previous_reference_count_ != all_references_.length()) {
1817 previous_reference_count_ = all_references_.length();
1818 reference_tags_.Add(IndexTag(previous_reference_count_, tag));
1823 bool collecting_all_references_;
1824 List<Object*> strong_references_;
1825 List<Object*> all_references_;
1826 int previous_reference_count_;
1827 List<IndexTag> reference_tags_;
1832 bool V8HeapExplorer::IterateAndExtractReferences(
1833 SnapshotFiller* filler) {
1836 // Create references to the synthetic roots.
1837 SetRootGcRootsReference();
1838 for (int tag = 0; tag < VisitorSynchronization::kNumberOfSyncTags; tag++) {
1839 SetGcRootsReference(static_cast<VisitorSynchronization::SyncTag>(tag));
1842 // Make sure builtin code objects get their builtin tags
1843 // first. Otherwise a particular JSFunction object could set
1844 // its custom name to a generic builtin.
1845 RootsReferencesExtractor extractor(heap_);
1846 heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
1847 extractor.SetCollectingAllReferences();
1848 heap_->IterateRoots(&extractor, VISIT_ALL);
1849 extractor.FillReferences(this);
1851 // We have to do two passes as sometimes FixedArrays are used
1852 // to weakly hold their items, and it's impossible to distinguish
1853 // between these cases without processing the array owner first.
1855 IterateAndExtractSinglePass<&V8HeapExplorer::ExtractReferencesPass1>() ||
1856 IterateAndExtractSinglePass<&V8HeapExplorer::ExtractReferencesPass2>();
1864 return progress_->ProgressReport(true);
1868 template<V8HeapExplorer::ExtractReferencesMethod extractor>
1869 bool V8HeapExplorer::IterateAndExtractSinglePass() {
1870 // Now iterate the whole heap.
1871 bool interrupted = false;
1872 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
1873 // Heap iteration with filtering must be finished in any case.
1874 for (HeapObject* obj = iterator.next();
1876 obj = iterator.next(), progress_->ProgressStep()) {
1877 if (interrupted) continue;
1879 HeapEntry* heap_entry = GetEntry(obj);
1880 int entry = heap_entry->index();
1881 if ((this->*extractor)(entry, obj)) {
1882 SetInternalReference(obj, entry,
1883 "map", obj->map(), HeapObject::kMapOffset);
1884 // Extract unvisited fields as hidden references and restore tags
1885 // of visited fields.
1886 IndexedReferencesExtractor refs_extractor(this, obj, entry);
1887 obj->Iterate(&refs_extractor);
1890 if (!progress_->ProgressReport(false)) interrupted = true;
1896 bool V8HeapExplorer::IsEssentialObject(Object* object) {
1897 return object->IsHeapObject() && !object->IsOddball() &&
1898 object != heap_->empty_byte_array() &&
1899 object != heap_->empty_bytecode_array() &&
1900 object != heap_->empty_fixed_array() &&
1901 object != heap_->empty_descriptor_array() &&
1902 object != heap_->fixed_array_map() && object != heap_->cell_map() &&
1903 object != heap_->global_property_cell_map() &&
1904 object != heap_->shared_function_info_map() &&
1905 object != heap_->free_space_map() &&
1906 object != heap_->one_pointer_filler_map() &&
1907 object != heap_->two_pointer_filler_map();
1911 void V8HeapExplorer::SetContextReference(HeapObject* parent_obj,
1913 String* reference_name,
1916 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1917 HeapEntry* child_entry = GetEntry(child_obj);
1918 if (child_entry != NULL) {
1919 filler_->SetNamedReference(HeapGraphEdge::kContextVariable,
1921 names_->GetName(reference_name),
1923 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1928 void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
1930 const char* reference_name,
1931 Object* child_obj) {
1932 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1933 HeapEntry* child_entry = GetEntry(child_obj);
1934 if (child_entry != NULL) {
1935 filler_->SetNamedReference(HeapGraphEdge::kShortcut,
1943 void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
1946 Object* child_obj) {
1947 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1948 HeapEntry* child_entry = GetEntry(child_obj);
1949 if (child_entry != NULL) {
1950 filler_->SetIndexedReference(HeapGraphEdge::kElement,
1958 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1960 const char* reference_name,
1963 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1964 HeapEntry* child_entry = GetEntry(child_obj);
1965 if (child_entry == NULL) return;
1966 if (IsEssentialObject(child_obj)) {
1967 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1972 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1976 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1981 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1982 HeapEntry* child_entry = GetEntry(child_obj);
1983 if (child_entry == NULL) return;
1984 if (IsEssentialObject(child_obj)) {
1985 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1987 names_->GetName(index),
1990 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1994 void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
1997 Object* child_obj) {
1998 DCHECK(parent_entry == GetEntry(parent_obj)->index());
1999 HeapEntry* child_entry = GetEntry(child_obj);
2000 if (child_entry != NULL && IsEssentialObject(child_obj)) {
2001 filler_->SetIndexedReference(HeapGraphEdge::kHidden,
2009 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
2011 const char* reference_name,
2014 DCHECK(parent_entry == GetEntry(parent_obj)->index());
2015 HeapEntry* child_entry = GetEntry(child_obj);
2016 if (child_entry == NULL) return;
2017 if (IsEssentialObject(child_obj)) {
2018 filler_->SetNamedReference(HeapGraphEdge::kWeak,
2023 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2027 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
2032 DCHECK(parent_entry == GetEntry(parent_obj)->index());
2033 HeapEntry* child_entry = GetEntry(child_obj);
2034 if (child_entry == NULL) return;
2035 if (IsEssentialObject(child_obj)) {
2036 filler_->SetNamedReference(HeapGraphEdge::kWeak,
2038 names_->GetFormatted("%d", index),
2041 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2045 void V8HeapExplorer::SetDataOrAccessorPropertyReference(
2046 PropertyKind kind, JSObject* parent_obj, int parent_entry,
2047 Name* reference_name, Object* child_obj, const char* name_format_string,
2049 if (kind == kAccessor) {
2050 ExtractAccessorPairProperty(parent_obj, parent_entry, reference_name,
2051 child_obj, field_offset);
2053 SetPropertyReference(parent_obj, parent_entry, reference_name, child_obj,
2054 name_format_string, field_offset);
2059 void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
2061 Name* reference_name,
2063 const char* name_format_string,
2065 DCHECK(parent_entry == GetEntry(parent_obj)->index());
2066 HeapEntry* child_entry = GetEntry(child_obj);
2067 if (child_entry != NULL) {
2068 HeapGraphEdge::Type type =
2069 reference_name->IsSymbol() || String::cast(reference_name)->length() > 0
2070 ? HeapGraphEdge::kProperty : HeapGraphEdge::kInternal;
2071 const char* name = name_format_string != NULL && reference_name->IsString()
2072 ? names_->GetFormatted(
2074 String::cast(reference_name)->ToCString(
2075 DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL).get()) :
2076 names_->GetName(reference_name);
2078 filler_->SetNamedReference(type,
2082 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2087 void V8HeapExplorer::SetRootGcRootsReference() {
2088 filler_->SetIndexedAutoIndexReference(
2089 HeapGraphEdge::kElement,
2090 snapshot_->root()->index(),
2091 snapshot_->gc_roots());
2095 void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
2096 HeapEntry* child_entry = GetEntry(child_obj);
2097 DCHECK(child_entry != NULL);
2098 filler_->SetNamedAutoIndexReference(
2099 HeapGraphEdge::kShortcut,
2100 snapshot_->root()->index(),
2105 void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
2106 filler_->SetIndexedAutoIndexReference(
2107 HeapGraphEdge::kElement,
2108 snapshot_->gc_roots()->index(),
2109 snapshot_->gc_subroot(tag));
2113 void V8HeapExplorer::SetGcSubrootReference(
2114 VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
2115 HeapEntry* child_entry = GetEntry(child_obj);
2116 if (child_entry != NULL) {
2117 const char* name = GetStrongGcSubrootName(child_obj);
2119 filler_->SetNamedReference(
2120 HeapGraphEdge::kInternal,
2121 snapshot_->gc_subroot(tag)->index(),
2126 filler_->SetNamedAutoIndexReference(
2127 HeapGraphEdge::kWeak,
2128 snapshot_->gc_subroot(tag)->index(),
2131 filler_->SetIndexedAutoIndexReference(
2132 HeapGraphEdge::kElement,
2133 snapshot_->gc_subroot(tag)->index(),
2138 // Add a shortcut to JS global object reference at snapshot root.
2139 if (child_obj->IsNativeContext()) {
2140 Context* context = Context::cast(child_obj);
2141 GlobalObject* global = context->global_object();
2142 if (global->IsJSGlobalObject()) {
2143 bool is_debug_object = false;
2144 is_debug_object = heap_->isolate()->debug()->IsDebugGlobal(global);
2145 if (!is_debug_object && !user_roots_.Contains(global)) {
2146 user_roots_.Insert(global);
2147 SetUserGlobalReference(global);
2155 const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
2156 if (strong_gc_subroot_names_.is_empty()) {
2157 #define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
2158 #define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
2159 STRONG_ROOT_LIST(ROOT_NAME)
2161 #define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
2162 STRUCT_LIST(STRUCT_MAP_NAME)
2163 #undef STRUCT_MAP_NAME
2164 #define STRING_NAME(name, str) NAME_ENTRY(name)
2165 INTERNALIZED_STRING_LIST(STRING_NAME)
2167 #define SYMBOL_NAME(name) NAME_ENTRY(name)
2168 PRIVATE_SYMBOL_LIST(SYMBOL_NAME)
2170 #define SYMBOL_NAME(name, varname, description) NAME_ENTRY(name)
2171 PUBLIC_SYMBOL_LIST(SYMBOL_NAME)
2174 CHECK(!strong_gc_subroot_names_.is_empty());
2176 return strong_gc_subroot_names_.GetTag(object);
2180 void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
2181 if (IsEssentialObject(obj)) {
2182 HeapEntry* entry = GetEntry(obj);
2183 if (entry->name()[0] == '\0') {
2184 entry->set_name(tag);
2190 void V8HeapExplorer::MarkAsWeakContainer(Object* object) {
2191 if (IsEssentialObject(object) && object->IsFixedArray()) {
2192 weak_containers_.Insert(object);
2197 class GlobalObjectsEnumerator : public ObjectVisitor {
2199 virtual void VisitPointers(Object** start, Object** end) {
2200 for (Object** p = start; p < end; p++) {
2201 if ((*p)->IsNativeContext()) {
2202 Context* context = Context::cast(*p);
2203 JSObject* proxy = context->global_proxy();
2204 if (proxy->IsJSGlobalProxy()) {
2205 Object* global = proxy->map()->prototype();
2206 if (global->IsJSGlobalObject()) {
2207 objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global)));
2213 int count() { return objects_.length(); }
2214 Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
2217 List<Handle<JSGlobalObject> > objects_;
2221 // Modifies heap. Must not be run during heap traversal.
2222 void V8HeapExplorer::TagGlobalObjects() {
2223 Isolate* isolate = heap_->isolate();
2224 HandleScope scope(isolate);
2225 GlobalObjectsEnumerator enumerator;
2226 isolate->global_handles()->IterateAllRoots(&enumerator);
2227 const char** urls = NewArray<const char*>(enumerator.count());
2228 for (int i = 0, l = enumerator.count(); i < l; ++i) {
2229 if (global_object_name_resolver_) {
2230 HandleScope scope(isolate);
2231 Handle<JSGlobalObject> global_obj = enumerator.at(i);
2232 urls[i] = global_object_name_resolver_->GetName(
2233 Utils::ToLocal(Handle<JSObject>::cast(global_obj)));
2239 DisallowHeapAllocation no_allocation;
2240 for (int i = 0, l = enumerator.count(); i < l; ++i) {
2241 objects_tags_.SetTag(*enumerator.at(i), urls[i]);
2248 class GlobalHandlesExtractor : public ObjectVisitor {
2250 explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer)
2251 : explorer_(explorer) {}
2252 virtual ~GlobalHandlesExtractor() {}
2253 virtual void VisitPointers(Object** start, Object** end) {
2256 virtual void VisitEmbedderReference(Object** p, uint16_t class_id) {
2257 explorer_->VisitSubtreeWrapper(p, class_id);
2260 NativeObjectsExplorer* explorer_;
2264 class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
2266 BasicHeapEntriesAllocator(
2267 HeapSnapshot* snapshot,
2268 HeapEntry::Type entries_type)
2269 : snapshot_(snapshot),
2270 names_(snapshot_->profiler()->names()),
2271 heap_object_map_(snapshot_->profiler()->heap_object_map()),
2272 entries_type_(entries_type) {
2274 virtual HeapEntry* AllocateEntry(HeapThing ptr);
2276 HeapSnapshot* snapshot_;
2277 StringsStorage* names_;
2278 HeapObjectsMap* heap_object_map_;
2279 HeapEntry::Type entries_type_;
2283 HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) {
2284 v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
2285 intptr_t elements = info->GetElementCount();
2286 intptr_t size = info->GetSizeInBytes();
2287 const char* name = elements != -1
2288 ? names_->GetFormatted(
2289 "%s / %" V8_PTR_PREFIX "d entries", info->GetLabel(), elements)
2290 : names_->GetCopy(info->GetLabel());
2291 return snapshot_->AddEntry(
2294 heap_object_map_->GenerateId(info),
2295 size != -1 ? static_cast<int>(size) : 0,
2300 NativeObjectsExplorer::NativeObjectsExplorer(
2301 HeapSnapshot* snapshot,
2302 SnapshottingProgressReportingInterface* progress)
2303 : isolate_(snapshot->profiler()->heap_object_map()->heap()->isolate()),
2304 snapshot_(snapshot),
2305 names_(snapshot_->profiler()->names()),
2306 embedder_queried_(false),
2307 objects_by_info_(RetainedInfosMatch),
2308 native_groups_(StringsMatch),
2310 synthetic_entries_allocator_ =
2311 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
2312 native_entries_allocator_ =
2313 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
2317 NativeObjectsExplorer::~NativeObjectsExplorer() {
2318 for (HashMap::Entry* p = objects_by_info_.Start();
2320 p = objects_by_info_.Next(p)) {
2321 v8::RetainedObjectInfo* info =
2322 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2324 List<HeapObject*>* objects =
2325 reinterpret_cast<List<HeapObject*>* >(p->value);
2328 for (HashMap::Entry* p = native_groups_.Start();
2330 p = native_groups_.Next(p)) {
2331 v8::RetainedObjectInfo* info =
2332 reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
2335 delete synthetic_entries_allocator_;
2336 delete native_entries_allocator_;
2340 int NativeObjectsExplorer::EstimateObjectsCount() {
2341 FillRetainedObjects();
2342 return objects_by_info_.occupancy();
2346 void NativeObjectsExplorer::FillRetainedObjects() {
2347 if (embedder_queried_) return;
2348 Isolate* isolate = isolate_;
2349 const GCType major_gc_type = kGCTypeMarkSweepCompact;
2350 // Record objects that are joined into ObjectGroups.
2351 isolate->heap()->CallGCPrologueCallbacks(
2352 major_gc_type, kGCCallbackFlagConstructRetainedObjectInfos);
2353 List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
2354 for (int i = 0; i < groups->length(); ++i) {
2355 ObjectGroup* group = groups->at(i);
2356 if (group->info == NULL) continue;
2357 List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info);
2358 for (size_t j = 0; j < group->length; ++j) {
2359 HeapObject* obj = HeapObject::cast(*group->objects[j]);
2361 in_groups_.Insert(obj);
2363 group->info = NULL; // Acquire info object ownership.
2365 isolate->global_handles()->RemoveObjectGroups();
2366 isolate->heap()->CallGCEpilogueCallbacks(major_gc_type, kNoGCCallbackFlags);
2367 // Record objects that are not in ObjectGroups, but have class ID.
2368 GlobalHandlesExtractor extractor(this);
2369 isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
2370 embedder_queried_ = true;
2374 void NativeObjectsExplorer::FillImplicitReferences() {
2375 Isolate* isolate = isolate_;
2376 List<ImplicitRefGroup*>* groups =
2377 isolate->global_handles()->implicit_ref_groups();
2378 for (int i = 0; i < groups->length(); ++i) {
2379 ImplicitRefGroup* group = groups->at(i);
2380 HeapObject* parent = *group->parent;
2382 filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
2383 DCHECK(parent_entry != HeapEntry::kNoEntry);
2384 Object*** children = group->children;
2385 for (size_t j = 0; j < group->length; ++j) {
2386 Object* child = *children[j];
2387 HeapEntry* child_entry =
2388 filler_->FindOrAddEntry(child, native_entries_allocator_);
2389 filler_->SetNamedReference(
2390 HeapGraphEdge::kInternal,
2396 isolate->global_handles()->RemoveImplicitRefGroups();
2399 List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
2400 v8::RetainedObjectInfo* info) {
2401 HashMap::Entry* entry = objects_by_info_.LookupOrInsert(info, InfoHash(info));
2402 if (entry->value != NULL) {
2405 entry->value = new List<HeapObject*>(4);
2407 return reinterpret_cast<List<HeapObject*>* >(entry->value);
2411 bool NativeObjectsExplorer::IterateAndExtractReferences(
2412 SnapshotFiller* filler) {
2414 FillRetainedObjects();
2415 FillImplicitReferences();
2416 if (EstimateObjectsCount() > 0) {
2417 for (HashMap::Entry* p = objects_by_info_.Start();
2419 p = objects_by_info_.Next(p)) {
2420 v8::RetainedObjectInfo* info =
2421 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2422 SetNativeRootReference(info);
2423 List<HeapObject*>* objects =
2424 reinterpret_cast<List<HeapObject*>* >(p->value);
2425 for (int i = 0; i < objects->length(); ++i) {
2426 SetWrapperNativeReferences(objects->at(i), info);
2429 SetRootNativeRootsReference();
2436 class NativeGroupRetainedObjectInfo : public v8::RetainedObjectInfo {
2438 explicit NativeGroupRetainedObjectInfo(const char* label)
2440 hash_(reinterpret_cast<intptr_t>(label)),
2444 virtual ~NativeGroupRetainedObjectInfo() {}
2445 virtual void Dispose() {
2450 virtual bool IsEquivalent(RetainedObjectInfo* other) {
2451 return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
2453 virtual intptr_t GetHash() { return hash_; }
2454 virtual const char* GetLabel() { return label_; }
2463 NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2464 const char* label) {
2465 const char* label_copy = names_->GetCopy(label);
2466 uint32_t hash = StringHasher::HashSequentialString(
2468 static_cast<int>(strlen(label_copy)),
2469 isolate_->heap()->HashSeed());
2470 HashMap::Entry* entry =
2471 native_groups_.LookupOrInsert(const_cast<char*>(label_copy), hash);
2472 if (entry->value == NULL) {
2473 entry->value = new NativeGroupRetainedObjectInfo(label);
2475 return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2479 void NativeObjectsExplorer::SetNativeRootReference(
2480 v8::RetainedObjectInfo* info) {
2481 HeapEntry* child_entry =
2482 filler_->FindOrAddEntry(info, native_entries_allocator_);
2483 DCHECK(child_entry != NULL);
2484 NativeGroupRetainedObjectInfo* group_info =
2485 FindOrAddGroupInfo(info->GetGroupLabel());
2486 HeapEntry* group_entry =
2487 filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
2488 // |FindOrAddEntry| can move and resize the entries backing store. Reload
2489 // potentially-stale pointer.
2490 child_entry = filler_->FindEntry(info);
2491 filler_->SetNamedAutoIndexReference(
2492 HeapGraphEdge::kInternal,
2493 group_entry->index(),
2498 void NativeObjectsExplorer::SetWrapperNativeReferences(
2499 HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2500 HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
2501 DCHECK(wrapper_entry != NULL);
2502 HeapEntry* info_entry =
2503 filler_->FindOrAddEntry(info, native_entries_allocator_);
2504 DCHECK(info_entry != NULL);
2505 filler_->SetNamedReference(HeapGraphEdge::kInternal,
2506 wrapper_entry->index(),
2509 filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
2510 info_entry->index(),
2515 void NativeObjectsExplorer::SetRootNativeRootsReference() {
2516 for (HashMap::Entry* entry = native_groups_.Start();
2518 entry = native_groups_.Next(entry)) {
2519 NativeGroupRetainedObjectInfo* group_info =
2520 static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2521 HeapEntry* group_entry =
2522 filler_->FindOrAddEntry(group_info, native_entries_allocator_);
2523 DCHECK(group_entry != NULL);
2524 filler_->SetIndexedAutoIndexReference(
2525 HeapGraphEdge::kElement,
2526 snapshot_->root()->index(),
2532 void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
2533 if (in_groups_.Contains(*p)) return;
2534 Isolate* isolate = isolate_;
2535 v8::RetainedObjectInfo* info =
2536 isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
2537 if (info == NULL) return;
2538 GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
2542 HeapSnapshotGenerator::HeapSnapshotGenerator(
2543 HeapSnapshot* snapshot,
2544 v8::ActivityControl* control,
2545 v8::HeapProfiler::ObjectNameResolver* resolver,
2547 : snapshot_(snapshot),
2549 v8_heap_explorer_(snapshot_, this, resolver),
2550 dom_explorer_(snapshot_, this),
2555 bool HeapSnapshotGenerator::GenerateSnapshot() {
2556 v8_heap_explorer_.TagGlobalObjects();
2558 // TODO(1562) Profiler assumes that any object that is in the heap after
2559 // full GC is reachable from the root when computing dominators.
2560 // This is not true for weakly reachable objects.
2561 // As a temporary solution we call GC twice.
2562 heap_->CollectAllGarbage(
2563 Heap::kMakeHeapIterableMask,
2564 "HeapSnapshotGenerator::GenerateSnapshot");
2565 heap_->CollectAllGarbage(
2566 Heap::kMakeHeapIterableMask,
2567 "HeapSnapshotGenerator::GenerateSnapshot");
2570 Heap* debug_heap = heap_;
2571 if (FLAG_verify_heap) {
2572 debug_heap->Verify();
2576 SetProgressTotal(2); // 2 passes.
2579 if (FLAG_verify_heap) {
2580 debug_heap->Verify();
2584 snapshot_->AddSyntheticRootEntries();
2586 if (!FillReferences()) return false;
2588 snapshot_->FillChildren();
2589 snapshot_->RememberLastJSObjectId();
2591 progress_counter_ = progress_total_;
2592 if (!ProgressReport(true)) return false;
2597 void HeapSnapshotGenerator::ProgressStep() {
2598 ++progress_counter_;
2602 bool HeapSnapshotGenerator::ProgressReport(bool force) {
2603 const int kProgressReportGranularity = 10000;
2604 if (control_ != NULL
2605 && (force || progress_counter_ % kProgressReportGranularity == 0)) {
2607 control_->ReportProgressValue(progress_counter_, progress_total_) ==
2608 v8::ActivityControl::kContinue;
2614 void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
2615 if (control_ == NULL) return;
2616 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
2617 progress_total_ = iterations_count * (
2618 v8_heap_explorer_.EstimateObjectsCount(&iterator) +
2619 dom_explorer_.EstimateObjectsCount());
2620 progress_counter_ = 0;
2624 bool HeapSnapshotGenerator::FillReferences() {
2625 SnapshotFiller filler(snapshot_, &entries_);
2626 return v8_heap_explorer_.IterateAndExtractReferences(&filler)
2627 && dom_explorer_.IterateAndExtractReferences(&filler);
2631 template<int bytes> struct MaxDecimalDigitsIn;
2632 template<> struct MaxDecimalDigitsIn<4> {
2633 static const int kSigned = 11;
2634 static const int kUnsigned = 10;
2636 template<> struct MaxDecimalDigitsIn<8> {
2637 static const int kSigned = 20;
2638 static const int kUnsigned = 20;
2642 class OutputStreamWriter {
2644 explicit OutputStreamWriter(v8::OutputStream* stream)
2646 chunk_size_(stream->GetChunkSize()),
2647 chunk_(chunk_size_),
2650 DCHECK(chunk_size_ > 0);
2652 bool aborted() { return aborted_; }
2653 void AddCharacter(char c) {
2655 DCHECK(chunk_pos_ < chunk_size_);
2656 chunk_[chunk_pos_++] = c;
2659 void AddString(const char* s) {
2660 AddSubstring(s, StrLength(s));
2662 void AddSubstring(const char* s, int n) {
2664 DCHECK(static_cast<size_t>(n) <= strlen(s));
2665 const char* s_end = s + n;
2668 Min(chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
2669 DCHECK(s_chunk_size > 0);
2670 MemCopy(chunk_.start() + chunk_pos_, s, s_chunk_size);
2672 chunk_pos_ += s_chunk_size;
2676 void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
2678 if (aborted_) return;
2679 DCHECK(chunk_pos_ < chunk_size_);
2680 if (chunk_pos_ != 0) {
2683 stream_->EndOfStream();
2687 template<typename T>
2688 void AddNumberImpl(T n, const char* format) {
2689 // Buffer for the longest value plus trailing \0
2690 static const int kMaxNumberSize =
2691 MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1;
2692 if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
2693 int result = SNPrintF(
2694 chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
2695 DCHECK(result != -1);
2696 chunk_pos_ += result;
2699 EmbeddedVector<char, kMaxNumberSize> buffer;
2700 int result = SNPrintF(buffer, format, n);
2702 DCHECK(result != -1);
2703 AddString(buffer.start());
2706 void MaybeWriteChunk() {
2707 DCHECK(chunk_pos_ <= chunk_size_);
2708 if (chunk_pos_ == chunk_size_) {
2713 if (aborted_) return;
2714 if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
2715 v8::OutputStream::kAbort) aborted_ = true;
2719 v8::OutputStream* stream_;
2721 ScopedVector<char> chunk_;
2727 // type, name|index, to_node.
2728 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
2729 // type, name, id, self_size, edge_count, trace_node_id.
2730 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 6;
2732 void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
2733 if (AllocationTracker* allocation_tracker =
2734 snapshot_->profiler()->allocation_tracker()) {
2735 allocation_tracker->PrepareForSerialization();
2737 DCHECK(writer_ == NULL);
2738 writer_ = new OutputStreamWriter(stream);
2745 void HeapSnapshotJSONSerializer::SerializeImpl() {
2746 DCHECK(0 == snapshot_->root()->index());
2747 writer_->AddCharacter('{');
2748 writer_->AddString("\"snapshot\":{");
2749 SerializeSnapshot();
2750 if (writer_->aborted()) return;
2751 writer_->AddString("},\n");
2752 writer_->AddString("\"nodes\":[");
2754 if (writer_->aborted()) return;
2755 writer_->AddString("],\n");
2756 writer_->AddString("\"edges\":[");
2758 if (writer_->aborted()) return;
2759 writer_->AddString("],\n");
2761 writer_->AddString("\"trace_function_infos\":[");
2762 SerializeTraceNodeInfos();
2763 if (writer_->aborted()) return;
2764 writer_->AddString("],\n");
2765 writer_->AddString("\"trace_tree\":[");
2766 SerializeTraceTree();
2767 if (writer_->aborted()) return;
2768 writer_->AddString("],\n");
2770 writer_->AddString("\"samples\":[");
2772 if (writer_->aborted()) return;
2773 writer_->AddString("],\n");
2775 writer_->AddString("\"strings\":[");
2777 if (writer_->aborted()) return;
2778 writer_->AddCharacter(']');
2779 writer_->AddCharacter('}');
2780 writer_->Finalize();
2784 int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
2785 HashMap::Entry* cache_entry =
2786 strings_.LookupOrInsert(const_cast<char*>(s), StringHash(s));
2787 if (cache_entry->value == NULL) {
2788 cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
2790 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
2796 template<size_t size> struct ToUnsigned;
2798 template<> struct ToUnsigned<4> {
2799 typedef uint32_t Type;
2802 template<> struct ToUnsigned<8> {
2803 typedef uint64_t Type;
2809 template<typename T>
2810 static int utoa_impl(T value, const Vector<char>& buffer, int buffer_pos) {
2811 STATIC_ASSERT(static_cast<T>(-1) > 0); // Check that T is unsigned
2812 int number_of_digits = 0;
2818 buffer_pos += number_of_digits;
2819 int result = buffer_pos;
2821 int last_digit = static_cast<int>(value % 10);
2822 buffer[--buffer_pos] = '0' + last_digit;
2829 template<typename T>
2830 static int utoa(T value, const Vector<char>& buffer, int buffer_pos) {
2831 typename ToUnsigned<sizeof(value)>::Type unsigned_value = value;
2832 STATIC_ASSERT(sizeof(value) == sizeof(unsigned_value));
2833 return utoa_impl(unsigned_value, buffer, buffer_pos);
2837 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
2839 // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
2840 static const int kBufferSize =
2841 MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2; // NOLINT
2842 EmbeddedVector<char, kBufferSize> buffer;
2843 int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
2844 || edge->type() == HeapGraphEdge::kHidden
2845 ? edge->index() : GetStringId(edge->name());
2848 buffer[buffer_pos++] = ',';
2850 buffer_pos = utoa(edge->type(), buffer, buffer_pos);
2851 buffer[buffer_pos++] = ',';
2852 buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
2853 buffer[buffer_pos++] = ',';
2854 buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
2855 buffer[buffer_pos++] = '\n';
2856 buffer[buffer_pos++] = '\0';
2857 writer_->AddString(buffer.start());
2861 void HeapSnapshotJSONSerializer::SerializeEdges() {
2862 List<HeapGraphEdge*>& edges = snapshot_->children();
2863 for (int i = 0; i < edges.length(); ++i) {
2865 edges[i - 1]->from()->index() <= edges[i]->from()->index());
2866 SerializeEdge(edges[i], i == 0);
2867 if (writer_->aborted()) return;
2872 void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
2873 // The buffer needs space for 4 unsigned ints, 1 size_t, 5 commas, \n and \0
2874 static const int kBufferSize =
2875 5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2876 + MaxDecimalDigitsIn<sizeof(size_t)>::kUnsigned // NOLINT
2878 EmbeddedVector<char, kBufferSize> buffer;
2880 if (entry_index(entry) != 0) {
2881 buffer[buffer_pos++] = ',';
2883 buffer_pos = utoa(entry->type(), buffer, buffer_pos);
2884 buffer[buffer_pos++] = ',';
2885 buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
2886 buffer[buffer_pos++] = ',';
2887 buffer_pos = utoa(entry->id(), buffer, buffer_pos);
2888 buffer[buffer_pos++] = ',';
2889 buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
2890 buffer[buffer_pos++] = ',';
2891 buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
2892 buffer[buffer_pos++] = ',';
2893 buffer_pos = utoa(entry->trace_node_id(), buffer, buffer_pos);
2894 buffer[buffer_pos++] = '\n';
2895 buffer[buffer_pos++] = '\0';
2896 writer_->AddString(buffer.start());
2900 void HeapSnapshotJSONSerializer::SerializeNodes() {
2901 List<HeapEntry>& entries = snapshot_->entries();
2902 for (int i = 0; i < entries.length(); ++i) {
2903 SerializeNode(&entries[i]);
2904 if (writer_->aborted()) return;
2909 void HeapSnapshotJSONSerializer::SerializeSnapshot() {
2910 writer_->AddString("\"meta\":");
2911 // The object describing node serialization layout.
2912 // We use a set of macros to improve readability.
2913 #define JSON_A(s) "[" s "]"
2914 #define JSON_O(s) "{" s "}"
2915 #define JSON_S(s) "\"" s "\""
2916 writer_->AddString(JSON_O(
2917 JSON_S("node_fields") ":" JSON_A(
2921 JSON_S("self_size") ","
2922 JSON_S("edge_count") ","
2923 JSON_S("trace_node_id")) ","
2924 JSON_S("node_types") ":" JSON_A(
2926 JSON_S("hidden") ","
2928 JSON_S("string") ","
2929 JSON_S("object") ","
2931 JSON_S("closure") ","
2932 JSON_S("regexp") ","
2933 JSON_S("number") ","
2934 JSON_S("native") ","
2935 JSON_S("synthetic") ","
2936 JSON_S("concatenated string") ","
2937 JSON_S("sliced string")) ","
2938 JSON_S("string") ","
2939 JSON_S("number") ","
2940 JSON_S("number") ","
2941 JSON_S("number") ","
2942 JSON_S("number") ","
2943 JSON_S("number")) ","
2944 JSON_S("edge_fields") ":" JSON_A(
2946 JSON_S("name_or_index") ","
2947 JSON_S("to_node")) ","
2948 JSON_S("edge_types") ":" JSON_A(
2950 JSON_S("context") ","
2951 JSON_S("element") ","
2952 JSON_S("property") ","
2953 JSON_S("internal") ","
2954 JSON_S("hidden") ","
2955 JSON_S("shortcut") ","
2957 JSON_S("string_or_number") ","
2959 JSON_S("trace_function_info_fields") ":" JSON_A(
2960 JSON_S("function_id") ","
2962 JSON_S("script_name") ","
2963 JSON_S("script_id") ","
2965 JSON_S("column")) ","
2966 JSON_S("trace_node_fields") ":" JSON_A(
2968 JSON_S("function_info_index") ","
2971 JSON_S("children")) ","
2972 JSON_S("sample_fields") ":" JSON_A(
2973 JSON_S("timestamp_us") ","
2974 JSON_S("last_assigned_id"))));
2978 writer_->AddString(",\"node_count\":");
2979 writer_->AddNumber(snapshot_->entries().length());
2980 writer_->AddString(",\"edge_count\":");
2981 writer_->AddNumber(snapshot_->edges().length());
2982 writer_->AddString(",\"trace_function_count\":");
2984 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2986 count = tracker->function_info_list().length();
2988 writer_->AddNumber(count);
2992 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
2993 static const char hex_chars[] = "0123456789ABCDEF";
2994 w->AddString("\\u");
2995 w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
2996 w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
2997 w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
2998 w->AddCharacter(hex_chars[u & 0xf]);
3002 void HeapSnapshotJSONSerializer::SerializeTraceTree() {
3003 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
3004 if (!tracker) return;
3005 AllocationTraceTree* traces = tracker->trace_tree();
3006 SerializeTraceNode(traces->root());
3010 void HeapSnapshotJSONSerializer::SerializeTraceNode(AllocationTraceNode* node) {
3011 // The buffer needs space for 4 unsigned ints, 4 commas, [ and \0
3012 const int kBufferSize =
3013 4 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
3015 EmbeddedVector<char, kBufferSize> buffer;
3017 buffer_pos = utoa(node->id(), buffer, buffer_pos);
3018 buffer[buffer_pos++] = ',';
3019 buffer_pos = utoa(node->function_info_index(), buffer, buffer_pos);
3020 buffer[buffer_pos++] = ',';
3021 buffer_pos = utoa(node->allocation_count(), buffer, buffer_pos);
3022 buffer[buffer_pos++] = ',';
3023 buffer_pos = utoa(node->allocation_size(), buffer, buffer_pos);
3024 buffer[buffer_pos++] = ',';
3025 buffer[buffer_pos++] = '[';
3026 buffer[buffer_pos++] = '\0';
3027 writer_->AddString(buffer.start());
3029 Vector<AllocationTraceNode*> children = node->children();
3030 for (int i = 0; i < children.length(); i++) {
3032 writer_->AddCharacter(',');
3034 SerializeTraceNode(children[i]);
3036 writer_->AddCharacter(']');
3040 // 0-based position is converted to 1-based during the serialization.
3041 static int SerializePosition(int position, const Vector<char>& buffer,
3043 if (position == -1) {
3044 buffer[buffer_pos++] = '0';
3046 DCHECK(position >= 0);
3047 buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos);
3053 void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() {
3054 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
3055 if (!tracker) return;
3056 // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0
3057 const int kBufferSize =
3058 6 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
3060 EmbeddedVector<char, kBufferSize> buffer;
3061 const List<AllocationTracker::FunctionInfo*>& list =
3062 tracker->function_info_list();
3063 for (int i = 0; i < list.length(); i++) {
3064 AllocationTracker::FunctionInfo* info = list[i];
3067 buffer[buffer_pos++] = ',';
3069 buffer_pos = utoa(info->function_id, buffer, buffer_pos);
3070 buffer[buffer_pos++] = ',';
3071 buffer_pos = utoa(GetStringId(info->name), buffer, buffer_pos);
3072 buffer[buffer_pos++] = ',';
3073 buffer_pos = utoa(GetStringId(info->script_name), buffer, buffer_pos);
3074 buffer[buffer_pos++] = ',';
3075 // The cast is safe because script id is a non-negative Smi.
3076 buffer_pos = utoa(static_cast<unsigned>(info->script_id), buffer,
3078 buffer[buffer_pos++] = ',';
3079 buffer_pos = SerializePosition(info->line, buffer, buffer_pos);
3080 buffer[buffer_pos++] = ',';
3081 buffer_pos = SerializePosition(info->column, buffer, buffer_pos);
3082 buffer[buffer_pos++] = '\n';
3083 buffer[buffer_pos++] = '\0';
3084 writer_->AddString(buffer.start());
3089 void HeapSnapshotJSONSerializer::SerializeSamples() {
3090 const List<HeapObjectsMap::TimeInterval>& samples =
3091 snapshot_->profiler()->heap_object_map()->samples();
3092 if (samples.is_empty()) return;
3093 base::TimeTicks start_time = samples[0].timestamp;
3094 // The buffer needs space for 2 unsigned ints, 2 commas, \n and \0
3095 const int kBufferSize = MaxDecimalDigitsIn<sizeof(
3096 base::TimeDelta().InMicroseconds())>::kUnsigned +
3097 MaxDecimalDigitsIn<sizeof(samples[0].id)>::kUnsigned +
3099 EmbeddedVector<char, kBufferSize> buffer;
3100 for (int i = 0; i < samples.length(); i++) {
3101 HeapObjectsMap::TimeInterval& sample = samples[i];
3104 buffer[buffer_pos++] = ',';
3106 base::TimeDelta time_delta = sample.timestamp - start_time;
3107 buffer_pos = utoa(time_delta.InMicroseconds(), buffer, buffer_pos);
3108 buffer[buffer_pos++] = ',';
3109 buffer_pos = utoa(sample.last_assigned_id(), buffer, buffer_pos);
3110 buffer[buffer_pos++] = '\n';
3111 buffer[buffer_pos++] = '\0';
3112 writer_->AddString(buffer.start());
3117 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
3118 writer_->AddCharacter('\n');
3119 writer_->AddCharacter('\"');
3120 for ( ; *s != '\0'; ++s) {
3123 writer_->AddString("\\b");
3126 writer_->AddString("\\f");
3129 writer_->AddString("\\n");
3132 writer_->AddString("\\r");
3135 writer_->AddString("\\t");
3139 writer_->AddCharacter('\\');
3140 writer_->AddCharacter(*s);
3143 if (*s > 31 && *s < 128) {
3144 writer_->AddCharacter(*s);
3145 } else if (*s <= 31) {
3146 // Special character with no dedicated literal.
3147 WriteUChar(writer_, *s);
3149 // Convert UTF-8 into \u UTF-16 literal.
3150 size_t length = 1, cursor = 0;
3151 for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
3152 unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
3153 if (c != unibrow::Utf8::kBadChar) {
3154 WriteUChar(writer_, c);
3155 DCHECK(cursor != 0);
3158 writer_->AddCharacter('?');
3163 writer_->AddCharacter('\"');
3167 void HeapSnapshotJSONSerializer::SerializeStrings() {
3168 ScopedVector<const unsigned char*> sorted_strings(
3169 strings_.occupancy() + 1);
3170 for (HashMap::Entry* entry = strings_.Start();
3172 entry = strings_.Next(entry)) {
3173 int index = static_cast<int>(reinterpret_cast<uintptr_t>(entry->value));
3174 sorted_strings[index] = reinterpret_cast<const unsigned char*>(entry->key);
3176 writer_->AddString("\"<dummy>\"");
3177 for (int i = 1; i < sorted_strings.length(); ++i) {
3178 writer_->AddCharacter(',');
3179 SerializeString(sorted_strings[i]);
3180 if (writer_->aborted()) return;
3185 } // namespace internal