1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #include "heap-snapshot-generator-inl.h"
32 #include "allocation-tracker.h"
33 #include "code-stubs.h"
34 #include "heap-profiler.h"
42 HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to)
47 ASSERT(type == kContextVariable
55 HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to)
60 ASSERT(type == kElement || type == kHidden);
64 void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
65 to_entry_ = &snapshot->entries()[to_index_];
69 const int HeapEntry::kNoEntry = -1;
71 HeapEntry::HeapEntry(HeapSnapshot* snapshot,
79 self_size_(self_size),
85 void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
88 HeapGraphEdge edge(type, name, this->index(), entry->index());
89 snapshot_->edges().Add(edge);
94 void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
97 HeapGraphEdge edge(type, index, this->index(), entry->index());
98 snapshot_->edges().Add(edge);
103 void HeapEntry::Print(
104 const char* prefix, const char* edge_name, int max_depth, int indent) {
105 STATIC_CHECK(sizeof(unsigned) == sizeof(id()));
106 OS::Print("%6d @%6u %*c %s%s: ",
107 self_size(), id(), indent, ' ', prefix, edge_name);
108 if (type() != kString) {
109 OS::Print("%s %.40s\n", TypeAsString(), name_);
112 const char* c = name_;
113 while (*c && (c - name_) <= 40) {
122 if (--max_depth == 0) return;
123 Vector<HeapGraphEdge*> ch = children();
124 for (int i = 0; i < ch.length(); ++i) {
125 HeapGraphEdge& edge = *ch[i];
126 const char* edge_prefix = "";
127 EmbeddedVector<char, 64> index;
128 const char* edge_name = index.start();
129 switch (edge.type()) {
130 case HeapGraphEdge::kContextVariable:
132 edge_name = edge.name();
134 case HeapGraphEdge::kElement:
135 OS::SNPrintF(index, "%d", edge.index());
137 case HeapGraphEdge::kInternal:
139 edge_name = edge.name();
141 case HeapGraphEdge::kProperty:
142 edge_name = edge.name();
144 case HeapGraphEdge::kHidden:
146 OS::SNPrintF(index, "%d", edge.index());
148 case HeapGraphEdge::kShortcut:
150 edge_name = edge.name();
152 case HeapGraphEdge::kWeak:
154 edge_name = edge.name();
157 OS::SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
159 edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
164 const char* HeapEntry::TypeAsString() {
166 case kHidden: return "/hidden/";
167 case kObject: return "/object/";
168 case kClosure: return "/closure/";
169 case kString: return "/string/";
170 case kCode: return "/code/";
171 case kArray: return "/array/";
172 case kRegExp: return "/regexp/";
173 case kHeapNumber: return "/number/";
174 case kNative: return "/native/";
175 case kSynthetic: return "/synthetic/";
176 case kConsString: return "/concatenated string/";
177 case kSlicedString: return "/sliced string/";
178 default: return "???";
183 // It is very important to keep objects that form a heap snapshot
184 // as small as possible.
185 namespace { // Avoid littering the global namespace.
187 template <size_t ptr_size> struct SnapshotSizeConstants;
189 template <> struct SnapshotSizeConstants<4> {
190 static const int kExpectedHeapGraphEdgeSize = 12;
191 static const int kExpectedHeapEntrySize = 24;
194 template <> struct SnapshotSizeConstants<8> {
195 static const int kExpectedHeapGraphEdgeSize = 24;
196 static const int kExpectedHeapEntrySize = 32;
202 HeapSnapshot::HeapSnapshot(HeapProfiler* profiler,
205 : profiler_(profiler),
208 root_index_(HeapEntry::kNoEntry),
209 gc_roots_index_(HeapEntry::kNoEntry),
210 natives_root_index_(HeapEntry::kNoEntry),
211 max_snapshot_js_object_id_(0) {
213 sizeof(HeapGraphEdge) ==
214 SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
217 SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
218 USE(SnapshotSizeConstants<4>::kExpectedHeapGraphEdgeSize);
219 USE(SnapshotSizeConstants<4>::kExpectedHeapEntrySize);
220 USE(SnapshotSizeConstants<8>::kExpectedHeapGraphEdgeSize);
221 USE(SnapshotSizeConstants<8>::kExpectedHeapEntrySize);
222 for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
223 gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
228 void HeapSnapshot::Delete() {
229 profiler_->RemoveSnapshot(this);
234 void HeapSnapshot::RememberLastJSObjectId() {
235 max_snapshot_js_object_id_ = profiler_->heap_object_map()->last_assigned_id();
239 HeapEntry* HeapSnapshot::AddRootEntry() {
240 ASSERT(root_index_ == HeapEntry::kNoEntry);
241 ASSERT(entries_.is_empty()); // Root entry must be the first one.
242 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
244 HeapObjectsMap::kInternalRootObjectId,
246 root_index_ = entry->index();
247 ASSERT(root_index_ == 0);
252 HeapEntry* HeapSnapshot::AddGcRootsEntry() {
253 ASSERT(gc_roots_index_ == HeapEntry::kNoEntry);
254 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
256 HeapObjectsMap::kGcRootsObjectId,
258 gc_roots_index_ = entry->index();
263 HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag) {
264 ASSERT(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
265 ASSERT(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags);
266 HeapEntry* entry = AddEntry(
267 HeapEntry::kSynthetic,
268 VisitorSynchronization::kTagNames[tag],
269 HeapObjectsMap::GetNthGcSubrootId(tag),
271 gc_subroot_indexes_[tag] = entry->index();
276 HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
280 HeapEntry entry(this, type, name, id, size);
282 return &entries_.last();
286 void HeapSnapshot::FillChildren() {
287 ASSERT(children().is_empty());
288 children().Allocate(edges().length());
289 int children_index = 0;
290 for (int i = 0; i < entries().length(); ++i) {
291 HeapEntry* entry = &entries()[i];
292 children_index = entry->set_children_index(children_index);
294 ASSERT(edges().length() == children_index);
295 for (int i = 0; i < edges().length(); ++i) {
296 HeapGraphEdge* edge = &edges()[i];
297 edge->ReplaceToIndexWithEntry(this);
298 edge->from()->add_child(edge);
303 class FindEntryById {
305 explicit FindEntryById(SnapshotObjectId id) : id_(id) { }
306 int operator()(HeapEntry* const* entry) {
307 if ((*entry)->id() == id_) return 0;
308 return (*entry)->id() < id_ ? -1 : 1;
311 SnapshotObjectId id_;
315 HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
316 List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
317 // Perform a binary search by id.
318 int index = SortedListBSearch(*entries_by_id, FindEntryById(id));
321 return entries_by_id->at(index);
326 static int SortByIds(const T* entry1_ptr,
327 const T* entry2_ptr) {
328 if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
329 return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
333 List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() {
334 if (sorted_entries_.is_empty()) {
335 sorted_entries_.Allocate(entries_.length());
336 for (int i = 0; i < entries_.length(); ++i) {
337 sorted_entries_[i] = &entries_[i];
339 sorted_entries_.Sort(SortByIds);
341 return &sorted_entries_;
345 void HeapSnapshot::Print(int max_depth) {
346 root()->Print("", "", max_depth, 0);
350 size_t HeapSnapshot::RawSnapshotSize() const {
353 GetMemoryUsedByList(entries_) +
354 GetMemoryUsedByList(edges_) +
355 GetMemoryUsedByList(children_) +
356 GetMemoryUsedByList(sorted_entries_);
360 // We split IDs on evens for embedder objects (see
361 // HeapObjectsMap::GenerateId) and odds for native objects.
362 const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
363 const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
364 HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
365 const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
366 HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
367 const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
368 HeapObjectsMap::kGcRootsFirstSubrootId +
369 VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
372 static bool AddressesMatch(void* key1, void* key2) {
377 HeapObjectsMap::HeapObjectsMap(Heap* heap)
378 : next_id_(kFirstAvailableObjectId),
379 entries_map_(AddressesMatch),
381 // This dummy element solves a problem with entries_map_.
382 // When we do lookup in HashMap we see no difference between two cases:
383 // it has an entry with NULL as the value or it has created
384 // a new entry on the fly with NULL as the default value.
385 // With such dummy element we have a guaranty that all entries_map_ entries
386 // will have the value field grater than 0.
387 // This fact is using in MoveObject method.
388 entries_.Add(EntryInfo(0, NULL, 0));
392 void HeapObjectsMap::MoveObject(Address from, Address to, int object_size) {
394 ASSERT(from != NULL);
395 if (from == to) return;
396 void* from_value = entries_map_.Remove(from, ComputePointerHash(from));
397 if (from_value == NULL) {
398 // It may occur that some untracked object moves to an address X and there
399 // is a tracked object at that address. In this case we should remove the
400 // entry as we know that the object has died.
401 void* to_value = entries_map_.Remove(to, ComputePointerHash(to));
402 if (to_value != NULL) {
403 int to_entry_info_index =
404 static_cast<int>(reinterpret_cast<intptr_t>(to_value));
405 entries_.at(to_entry_info_index).addr = NULL;
408 HashMap::Entry* to_entry = entries_map_.Lookup(to, ComputePointerHash(to),
410 if (to_entry->value != NULL) {
411 // We found the existing entry with to address for an old object.
412 // Without this operation we will have two EntryInfo's with the same
413 // value in addr field. It is bad because later at RemoveDeadEntries
414 // one of this entry will be removed with the corresponding entries_map_
416 int to_entry_info_index =
417 static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
418 entries_.at(to_entry_info_index).addr = NULL;
420 int from_entry_info_index =
421 static_cast<int>(reinterpret_cast<intptr_t>(from_value));
422 entries_.at(from_entry_info_index).addr = to;
423 // Size of an object can change during its life, so to keep information
424 // about the object in entries_ consistent, we have to adjust size when the
425 // object is migrated.
426 if (FLAG_heap_profiler_trace_objects) {
427 PrintF("Move object from %p to %p old size %6d new size %6d\n",
430 entries_.at(from_entry_info_index).size,
433 entries_.at(from_entry_info_index).size = object_size;
434 to_entry->value = from_value;
439 void HeapObjectsMap::UpdateObjectSize(Address addr, int size) {
440 FindOrAddEntry(addr, size, false);
444 SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
445 HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr),
447 if (entry == NULL) return 0;
448 int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
449 EntryInfo& entry_info = entries_.at(entry_index);
450 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
451 return entry_info.id;
455 SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
458 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
459 HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr),
461 if (entry->value != NULL) {
463 static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
464 EntryInfo& entry_info = entries_.at(entry_index);
465 entry_info.accessed = accessed;
466 if (FLAG_heap_profiler_trace_objects) {
467 PrintF("Update object size : %p with old size %d and new size %d\n",
472 entry_info.size = size;
473 return entry_info.id;
475 entry->value = reinterpret_cast<void*>(entries_.length());
476 SnapshotObjectId id = next_id_;
477 next_id_ += kObjectIdStep;
478 entries_.Add(EntryInfo(id, addr, size, accessed));
479 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
484 void HeapObjectsMap::StopHeapObjectsTracking() {
485 time_intervals_.Clear();
489 void HeapObjectsMap::UpdateHeapObjectsMap() {
490 if (FLAG_heap_profiler_trace_objects) {
491 PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
492 entries_map_.occupancy());
494 heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
495 "HeapObjectsMap::UpdateHeapObjectsMap");
496 HeapIterator iterator(heap_);
497 for (HeapObject* obj = iterator.next();
499 obj = iterator.next()) {
500 FindOrAddEntry(obj->address(), obj->Size());
501 if (FLAG_heap_profiler_trace_objects) {
502 PrintF("Update object : %p %6d. Next address is %p\n",
505 obj->address() + obj->Size());
509 if (FLAG_heap_profiler_trace_objects) {
510 PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
511 entries_map_.occupancy());
519 struct HeapObjectInfo {
520 HeapObjectInfo(HeapObject* obj, int expected_size)
522 expected_size(expected_size) {
528 bool IsValid() const { return expected_size == obj->Size(); }
531 if (expected_size == 0) {
532 PrintF("Untracked object : %p %6d. Next address is %p\n",
535 obj->address() + obj->Size());
536 } else if (obj->Size() != expected_size) {
537 PrintF("Wrong size %6d: %p %6d. Next address is %p\n",
541 obj->address() + obj->Size());
543 PrintF("Good object : %p %6d. Next address is %p\n",
546 obj->address() + obj->Size());
552 static int comparator(const HeapObjectInfo* a, const HeapObjectInfo* b) {
553 if (a->obj < b->obj) return -1;
554 if (a->obj > b->obj) return 1;
562 int HeapObjectsMap::FindUntrackedObjects() {
563 List<HeapObjectInfo> heap_objects(1000);
565 HeapIterator iterator(heap_);
567 for (HeapObject* obj = iterator.next();
569 obj = iterator.next()) {
570 HashMap::Entry* entry = entries_map_.Lookup(
571 obj->address(), ComputePointerHash(obj->address()), false);
574 if (FLAG_heap_profiler_trace_objects) {
575 heap_objects.Add(HeapObjectInfo(obj, 0));
578 int entry_index = static_cast<int>(
579 reinterpret_cast<intptr_t>(entry->value));
580 EntryInfo& entry_info = entries_.at(entry_index);
581 if (FLAG_heap_profiler_trace_objects) {
582 heap_objects.Add(HeapObjectInfo(obj,
583 static_cast<int>(entry_info.size)));
584 if (obj->Size() != static_cast<int>(entry_info.size))
587 CHECK_EQ(obj->Size(), static_cast<int>(entry_info.size));
591 if (FLAG_heap_profiler_trace_objects) {
592 PrintF("\nBegin HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n",
593 entries_map_.occupancy());
594 heap_objects.Sort(comparator);
595 int last_printed_object = -1;
596 bool print_next_object = false;
597 for (int i = 0; i < heap_objects.length(); ++i) {
598 const HeapObjectInfo& object_info = heap_objects[i];
599 if (!object_info.IsValid()) {
601 if (last_printed_object != i - 1) {
603 PrintF("%d objects were skipped\n", i - 1 - last_printed_object);
604 heap_objects[i - 1].Print();
608 last_printed_object = i;
609 print_next_object = true;
610 } else if (print_next_object) {
612 print_next_object = false;
613 last_printed_object = i;
616 if (last_printed_object < heap_objects.length() - 1) {
617 PrintF("Last %d objects were skipped\n",
618 heap_objects.length() - 1 - last_printed_object);
620 PrintF("End HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n\n",
621 entries_map_.occupancy());
627 SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) {
628 UpdateHeapObjectsMap();
629 time_intervals_.Add(TimeInterval(next_id_));
630 int prefered_chunk_size = stream->GetChunkSize();
631 List<v8::HeapStatsUpdate> stats_buffer;
632 ASSERT(!entries_.is_empty());
633 EntryInfo* entry_info = &entries_.first();
634 EntryInfo* end_entry_info = &entries_.last() + 1;
635 for (int time_interval_index = 0;
636 time_interval_index < time_intervals_.length();
637 ++time_interval_index) {
638 TimeInterval& time_interval = time_intervals_[time_interval_index];
639 SnapshotObjectId time_interval_id = time_interval.id;
640 uint32_t entries_size = 0;
641 EntryInfo* start_entry_info = entry_info;
642 while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
643 entries_size += entry_info->size;
646 uint32_t entries_count =
647 static_cast<uint32_t>(entry_info - start_entry_info);
648 if (time_interval.count != entries_count ||
649 time_interval.size != entries_size) {
650 stats_buffer.Add(v8::HeapStatsUpdate(
652 time_interval.count = entries_count,
653 time_interval.size = entries_size));
654 if (stats_buffer.length() >= prefered_chunk_size) {
655 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
656 &stats_buffer.first(), stats_buffer.length());
657 if (result == OutputStream::kAbort) return last_assigned_id();
658 stats_buffer.Clear();
662 ASSERT(entry_info == end_entry_info);
663 if (!stats_buffer.is_empty()) {
664 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
665 &stats_buffer.first(), stats_buffer.length());
666 if (result == OutputStream::kAbort) return last_assigned_id();
668 stream->EndOfStream();
669 return last_assigned_id();
673 void HeapObjectsMap::RemoveDeadEntries() {
674 ASSERT(entries_.length() > 0 &&
675 entries_.at(0).id == 0 &&
676 entries_.at(0).addr == NULL);
677 int first_free_entry = 1;
678 for (int i = 1; i < entries_.length(); ++i) {
679 EntryInfo& entry_info = entries_.at(i);
680 if (entry_info.accessed) {
681 if (first_free_entry != i) {
682 entries_.at(first_free_entry) = entry_info;
684 entries_.at(first_free_entry).accessed = false;
685 HashMap::Entry* entry = entries_map_.Lookup(
686 entry_info.addr, ComputePointerHash(entry_info.addr), false);
688 entry->value = reinterpret_cast<void*>(first_free_entry);
691 if (entry_info.addr) {
692 entries_map_.Remove(entry_info.addr,
693 ComputePointerHash(entry_info.addr));
697 entries_.Rewind(first_free_entry);
698 ASSERT(static_cast<uint32_t>(entries_.length()) - 1 ==
699 entries_map_.occupancy());
703 SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
704 SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
705 const char* label = info->GetLabel();
706 id ^= StringHasher::HashSequentialString(label,
707 static_cast<int>(strlen(label)),
709 intptr_t element_count = info->GetElementCount();
710 if (element_count != -1)
711 id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
712 v8::internal::kZeroHashSeed);
717 size_t HeapObjectsMap::GetUsedMemorySize() const {
720 sizeof(HashMap::Entry) * entries_map_.capacity() +
721 GetMemoryUsedByList(entries_) +
722 GetMemoryUsedByList(time_intervals_);
726 HeapEntriesMap::HeapEntriesMap()
727 : entries_(HeapThingsMatch) {
731 int HeapEntriesMap::Map(HeapThing thing) {
732 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), false);
733 if (cache_entry == NULL) return HeapEntry::kNoEntry;
734 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
738 void HeapEntriesMap::Pair(HeapThing thing, int entry) {
739 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), true);
740 ASSERT(cache_entry->value == NULL);
741 cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
745 HeapObjectsSet::HeapObjectsSet()
746 : entries_(HeapEntriesMap::HeapThingsMatch) {
750 void HeapObjectsSet::Clear() {
755 bool HeapObjectsSet::Contains(Object* obj) {
756 if (!obj->IsHeapObject()) return false;
757 HeapObject* object = HeapObject::cast(obj);
758 return entries_.Lookup(object, HeapEntriesMap::Hash(object), false) != NULL;
762 void HeapObjectsSet::Insert(Object* obj) {
763 if (!obj->IsHeapObject()) return;
764 HeapObject* object = HeapObject::cast(obj);
765 entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
769 const char* HeapObjectsSet::GetTag(Object* obj) {
770 HeapObject* object = HeapObject::cast(obj);
771 HashMap::Entry* cache_entry =
772 entries_.Lookup(object, HeapEntriesMap::Hash(object), false);
773 return cache_entry != NULL
774 ? reinterpret_cast<const char*>(cache_entry->value)
779 void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
780 if (!obj->IsHeapObject()) return;
781 HeapObject* object = HeapObject::cast(obj);
782 HashMap::Entry* cache_entry =
783 entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
784 cache_entry->value = const_cast<char*>(tag);
788 HeapObject* const V8HeapExplorer::kInternalRootObject =
789 reinterpret_cast<HeapObject*>(
790 static_cast<intptr_t>(HeapObjectsMap::kInternalRootObjectId));
791 HeapObject* const V8HeapExplorer::kGcRootsObject =
792 reinterpret_cast<HeapObject*>(
793 static_cast<intptr_t>(HeapObjectsMap::kGcRootsObjectId));
794 HeapObject* const V8HeapExplorer::kFirstGcSubrootObject =
795 reinterpret_cast<HeapObject*>(
796 static_cast<intptr_t>(HeapObjectsMap::kGcRootsFirstSubrootId));
797 HeapObject* const V8HeapExplorer::kLastGcSubrootObject =
798 reinterpret_cast<HeapObject*>(
799 static_cast<intptr_t>(HeapObjectsMap::kFirstAvailableObjectId));
802 V8HeapExplorer::V8HeapExplorer(
803 HeapSnapshot* snapshot,
804 SnapshottingProgressReportingInterface* progress,
805 v8::HeapProfiler::ObjectNameResolver* resolver)
806 : heap_(snapshot->profiler()->heap_object_map()->heap()),
808 names_(snapshot_->profiler()->names()),
809 heap_object_map_(snapshot_->profiler()->heap_object_map()),
812 global_object_name_resolver_(resolver) {
816 V8HeapExplorer::~V8HeapExplorer() {
820 HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) {
821 return AddEntry(reinterpret_cast<HeapObject*>(ptr));
825 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
826 if (object == kInternalRootObject) {
827 snapshot_->AddRootEntry();
828 return snapshot_->root();
829 } else if (object == kGcRootsObject) {
830 HeapEntry* entry = snapshot_->AddGcRootsEntry();
832 } else if (object >= kFirstGcSubrootObject && object < kLastGcSubrootObject) {
833 HeapEntry* entry = snapshot_->AddGcSubrootEntry(GetGcSubrootOrder(object));
835 } else if (object->IsJSFunction()) {
836 JSFunction* func = JSFunction::cast(object);
837 SharedFunctionInfo* shared = func->shared();
838 const char* name = shared->bound() ? "native_bind" :
839 names_->GetName(String::cast(shared->name()));
840 return AddEntry(object, HeapEntry::kClosure, name);
841 } else if (object->IsJSRegExp()) {
842 JSRegExp* re = JSRegExp::cast(object);
843 return AddEntry(object,
845 names_->GetName(re->Pattern()));
846 } else if (object->IsJSObject()) {
847 const char* name = names_->GetName(
848 GetConstructorName(JSObject::cast(object)));
849 if (object->IsJSGlobalObject()) {
850 const char* tag = objects_tags_.GetTag(object);
852 name = names_->GetFormatted("%s / %s", name, tag);
855 return AddEntry(object, HeapEntry::kObject, name);
856 } else if (object->IsString()) {
857 String* string = String::cast(object);
858 if (string->IsConsString())
859 return AddEntry(object,
860 HeapEntry::kConsString,
861 "(concatenated string)");
862 if (string->IsSlicedString())
863 return AddEntry(object,
864 HeapEntry::kSlicedString,
866 return AddEntry(object,
868 names_->GetName(String::cast(object)));
869 } else if (object->IsCode()) {
870 return AddEntry(object, HeapEntry::kCode, "");
871 } else if (object->IsSharedFunctionInfo()) {
872 String* name = String::cast(SharedFunctionInfo::cast(object)->name());
873 return AddEntry(object,
875 names_->GetName(name));
876 } else if (object->IsScript()) {
877 Object* name = Script::cast(object)->name();
878 return AddEntry(object,
881 ? names_->GetName(String::cast(name))
883 } else if (object->IsNativeContext()) {
884 return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
885 } else if (object->IsContext()) {
886 return AddEntry(object, HeapEntry::kObject, "system / Context");
887 } else if (object->IsFixedArray() ||
888 object->IsFixedDoubleArray() ||
889 object->IsByteArray() ||
890 object->IsExternalArray()) {
891 return AddEntry(object, HeapEntry::kArray, "");
892 } else if (object->IsHeapNumber()) {
893 return AddEntry(object, HeapEntry::kHeapNumber, "number");
895 return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
899 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
900 HeapEntry::Type type,
902 int object_size = object->Size();
903 SnapshotObjectId object_id =
904 heap_object_map_->FindOrAddEntry(object->address(), object_size);
905 return snapshot_->AddEntry(type, name, object_id, object_size);
909 class GcSubrootsEnumerator : public ObjectVisitor {
911 GcSubrootsEnumerator(
912 SnapshotFillerInterface* filler, V8HeapExplorer* explorer)
915 previous_object_count_(0),
918 void VisitPointers(Object** start, Object** end) {
919 object_count_ += end - start;
921 void Synchronize(VisitorSynchronization::SyncTag tag) {
922 // Skip empty subroots.
923 if (previous_object_count_ != object_count_) {
924 previous_object_count_ = object_count_;
925 filler_->AddEntry(V8HeapExplorer::GetNthGcSubrootObject(tag), explorer_);
929 SnapshotFillerInterface* filler_;
930 V8HeapExplorer* explorer_;
931 intptr_t previous_object_count_;
932 intptr_t object_count_;
936 void V8HeapExplorer::AddRootEntries(SnapshotFillerInterface* filler) {
937 filler->AddEntry(kInternalRootObject, this);
938 filler->AddEntry(kGcRootsObject, this);
939 GcSubrootsEnumerator enumerator(filler, this);
940 heap_->IterateRoots(&enumerator, VISIT_ALL);
944 const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
945 switch (object->map()->instance_type()) {
947 switch (Map::cast(object)->instance_type()) {
948 #define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
949 case instance_type: return "system / Map (" #Name ")";
950 STRING_TYPE_LIST(MAKE_STRING_MAP_CASE)
951 #undef MAKE_STRING_MAP_CASE
952 default: return "system / Map";
954 case CELL_TYPE: return "system / Cell";
955 case PROPERTY_CELL_TYPE: return "system / PropertyCell";
956 case FOREIGN_TYPE: return "system / Foreign";
957 case ODDBALL_TYPE: return "system / Oddball";
958 #define MAKE_STRUCT_CASE(NAME, Name, name) \
959 case NAME##_TYPE: return "system / "#Name;
960 STRUCT_LIST(MAKE_STRUCT_CASE)
961 #undef MAKE_STRUCT_CASE
962 default: return "system";
967 int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) {
968 int objects_count = 0;
969 for (HeapObject* obj = iterator->next();
971 obj = iterator->next()) {
974 return objects_count;
978 class IndexedReferencesExtractor : public ObjectVisitor {
980 IndexedReferencesExtractor(V8HeapExplorer* generator,
981 HeapObject* parent_obj,
983 : generator_(generator),
984 parent_obj_(parent_obj),
988 void VisitCodeEntry(Address entry_address) {
989 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
990 generator_->SetInternalReference(parent_obj_, parent_, "code", code);
991 generator_->TagCodeObject(code);
993 void VisitPointers(Object** start, Object** end) {
994 for (Object** p = start; p < end; p++) {
996 if (CheckVisitedAndUnmark(p)) continue;
997 generator_->SetHiddenReference(parent_obj_, parent_, next_index_, *p);
1000 static void MarkVisitedField(HeapObject* obj, int offset) {
1001 if (offset < 0) return;
1002 Address field = obj->address() + offset;
1003 ASSERT(!Memory::Object_at(field)->IsFailure());
1004 ASSERT(Memory::Object_at(field)->IsHeapObject());
1005 *field |= kFailureTag;
1009 bool CheckVisitedAndUnmark(Object** field) {
1010 if ((*field)->IsFailure()) {
1011 intptr_t untagged = reinterpret_cast<intptr_t>(*field) & ~kFailureTagMask;
1012 *field = reinterpret_cast<Object*>(untagged | kHeapObjectTag);
1013 ASSERT((*field)->IsHeapObject());
1018 V8HeapExplorer* generator_;
1019 HeapObject* parent_obj_;
1025 void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
1026 HeapEntry* heap_entry = GetEntry(obj);
1027 if (heap_entry == NULL) return; // No interest in this object.
1028 int entry = heap_entry->index();
1030 if (obj->IsJSGlobalProxy()) {
1031 ExtractJSGlobalProxyReferences(entry, JSGlobalProxy::cast(obj));
1032 } else if (obj->IsJSObject()) {
1033 ExtractJSObjectReferences(entry, JSObject::cast(obj));
1034 } else if (obj->IsString()) {
1035 ExtractStringReferences(entry, String::cast(obj));
1036 } else if (obj->IsContext()) {
1037 ExtractContextReferences(entry, Context::cast(obj));
1038 } else if (obj->IsMap()) {
1039 ExtractMapReferences(entry, Map::cast(obj));
1040 } else if (obj->IsSharedFunctionInfo()) {
1041 ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
1042 } else if (obj->IsScript()) {
1043 ExtractScriptReferences(entry, Script::cast(obj));
1044 } else if (obj->IsAccessorPair()) {
1045 ExtractAccessorPairReferences(entry, AccessorPair::cast(obj));
1046 } else if (obj->IsCodeCache()) {
1047 ExtractCodeCacheReferences(entry, CodeCache::cast(obj));
1048 } else if (obj->IsCode()) {
1049 ExtractCodeReferences(entry, Code::cast(obj));
1050 } else if (obj->IsBox()) {
1051 ExtractBoxReferences(entry, Box::cast(obj));
1052 } else if (obj->IsCell()) {
1053 ExtractCellReferences(entry, Cell::cast(obj));
1054 } else if (obj->IsPropertyCell()) {
1055 ExtractPropertyCellReferences(entry, PropertyCell::cast(obj));
1056 } else if (obj->IsAllocationSite()) {
1057 ExtractAllocationSiteReferences(entry, AllocationSite::cast(obj));
1059 SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset);
1061 // Extract unvisited fields as hidden references and restore tags
1062 // of visited fields.
1063 IndexedReferencesExtractor refs_extractor(this, obj, entry);
1064 obj->Iterate(&refs_extractor);
1068 void V8HeapExplorer::ExtractJSGlobalProxyReferences(
1069 int entry, JSGlobalProxy* proxy) {
1070 SetInternalReference(proxy, entry,
1071 "native_context", proxy->native_context(),
1072 JSGlobalProxy::kNativeContextOffset);
1076 void V8HeapExplorer::ExtractJSObjectReferences(
1077 int entry, JSObject* js_obj) {
1078 HeapObject* obj = js_obj;
1079 ExtractClosureReferences(js_obj, entry);
1080 ExtractPropertyReferences(js_obj, entry);
1081 ExtractElementReferences(js_obj, entry);
1082 ExtractInternalReferences(js_obj, entry);
1083 SetPropertyReference(
1084 obj, entry, heap_->proto_string(), js_obj->GetPrototype());
1085 if (obj->IsJSFunction()) {
1086 JSFunction* js_fun = JSFunction::cast(js_obj);
1087 Object* proto_or_map = js_fun->prototype_or_initial_map();
1088 if (!proto_or_map->IsTheHole()) {
1089 if (!proto_or_map->IsMap()) {
1090 SetPropertyReference(
1092 heap_->prototype_string(), proto_or_map,
1094 JSFunction::kPrototypeOrInitialMapOffset);
1096 SetPropertyReference(
1098 heap_->prototype_string(), js_fun->prototype());
1099 SetInternalReference(
1100 obj, entry, "initial_map", proto_or_map,
1101 JSFunction::kPrototypeOrInitialMapOffset);
1104 SharedFunctionInfo* shared_info = js_fun->shared();
1105 // JSFunction has either bindings or literals and never both.
1106 bool bound = shared_info->bound();
1107 TagObject(js_fun->literals_or_bindings(),
1108 bound ? "(function bindings)" : "(function literals)");
1109 SetInternalReference(js_fun, entry,
1110 bound ? "bindings" : "literals",
1111 js_fun->literals_or_bindings(),
1112 JSFunction::kLiteralsOffset);
1113 TagObject(shared_info, "(shared function info)");
1114 SetInternalReference(js_fun, entry,
1115 "shared", shared_info,
1116 JSFunction::kSharedFunctionInfoOffset);
1117 TagObject(js_fun->context(), "(context)");
1118 SetInternalReference(js_fun, entry,
1119 "context", js_fun->context(),
1120 JSFunction::kContextOffset);
1121 SetWeakReference(js_fun, entry,
1122 "next_function_link", js_fun->next_function_link(),
1123 JSFunction::kNextFunctionLinkOffset);
1124 STATIC_CHECK(JSFunction::kNextFunctionLinkOffset
1125 == JSFunction::kNonWeakFieldsEndOffset);
1126 STATIC_CHECK(JSFunction::kNextFunctionLinkOffset + kPointerSize
1127 == JSFunction::kSize);
1128 } else if (obj->IsGlobalObject()) {
1129 GlobalObject* global_obj = GlobalObject::cast(obj);
1130 SetInternalReference(global_obj, entry,
1131 "builtins", global_obj->builtins(),
1132 GlobalObject::kBuiltinsOffset);
1133 SetInternalReference(global_obj, entry,
1134 "native_context", global_obj->native_context(),
1135 GlobalObject::kNativeContextOffset);
1136 SetInternalReference(global_obj, entry,
1137 "global_context", global_obj->global_context(),
1138 GlobalObject::kGlobalContextOffset);
1139 SetInternalReference(global_obj, entry,
1140 "global_receiver", global_obj->global_receiver(),
1141 GlobalObject::kGlobalReceiverOffset);
1142 STATIC_CHECK(GlobalObject::kHeaderSize - JSObject::kHeaderSize ==
1144 } else if (obj->IsJSArrayBufferView()) {
1145 JSArrayBufferView* view = JSArrayBufferView::cast(obj);
1146 SetInternalReference(view, entry, "buffer", view->buffer(),
1147 JSArrayBufferView::kBufferOffset);
1148 SetWeakReference(view, entry, "weak_next", view->weak_next(),
1149 JSArrayBufferView::kWeakNextOffset);
1150 } else if (obj->IsJSArrayBuffer()) {
1151 JSArrayBuffer* buffer = JSArrayBuffer::cast(obj);
1152 SetWeakReference(buffer, entry, "weak_next", buffer->weak_next(),
1153 JSArrayBuffer::kWeakNextOffset);
1154 SetWeakReference(buffer, entry,
1155 "weak_first_view", buffer->weak_first_view(),
1156 JSArrayBuffer::kWeakFirstViewOffset);
1158 TagObject(js_obj->properties(), "(object properties)");
1159 SetInternalReference(obj, entry,
1160 "properties", js_obj->properties(),
1161 JSObject::kPropertiesOffset);
1162 TagObject(js_obj->elements(), "(object elements)");
1163 SetInternalReference(obj, entry,
1164 "elements", js_obj->elements(),
1165 JSObject::kElementsOffset);
1169 void V8HeapExplorer::ExtractStringReferences(int entry, String* string) {
1170 if (string->IsConsString()) {
1171 ConsString* cs = ConsString::cast(string);
1172 SetInternalReference(cs, entry, "first", cs->first(),
1173 ConsString::kFirstOffset);
1174 SetInternalReference(cs, entry, "second", cs->second(),
1175 ConsString::kSecondOffset);
1176 } else if (string->IsSlicedString()) {
1177 SlicedString* ss = SlicedString::cast(string);
1178 SetInternalReference(ss, entry, "parent", ss->parent(),
1179 SlicedString::kParentOffset);
1184 void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) {
1185 if (context == context->declaration_context()) {
1186 ScopeInfo* scope_info = context->closure()->shared()->scope_info();
1187 // Add context allocated locals.
1188 int context_locals = scope_info->ContextLocalCount();
1189 for (int i = 0; i < context_locals; ++i) {
1190 String* local_name = scope_info->ContextLocalName(i);
1191 int idx = Context::MIN_CONTEXT_SLOTS + i;
1192 SetContextReference(context, entry, local_name, context->get(idx),
1193 Context::OffsetOfElementAt(idx));
1195 if (scope_info->HasFunctionName()) {
1196 String* name = scope_info->FunctionName();
1198 int idx = scope_info->FunctionContextSlotIndex(name, &mode);
1200 SetContextReference(context, entry, name, context->get(idx),
1201 Context::OffsetOfElementAt(idx));
1206 #define EXTRACT_CONTEXT_FIELD(index, type, name) \
1207 if (Context::index < Context::FIRST_WEAK_SLOT) { \
1208 SetInternalReference(context, entry, #name, context->get(Context::index), \
1209 FixedArray::OffsetOfElementAt(Context::index)); \
1211 SetWeakReference(context, entry, #name, context->get(Context::index), \
1212 FixedArray::OffsetOfElementAt(Context::index)); \
1214 EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
1215 EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
1216 EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, Object, extension);
1217 EXTRACT_CONTEXT_FIELD(GLOBAL_OBJECT_INDEX, GlobalObject, global);
1218 if (context->IsNativeContext()) {
1219 TagObject(context->jsfunction_result_caches(),
1220 "(context func. result caches)");
1221 TagObject(context->normalized_map_cache(), "(context norm. map cache)");
1222 TagObject(context->runtime_context(), "(runtime context)");
1223 TagObject(context->embedder_data(), "(context data)");
1224 NATIVE_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD);
1225 EXTRACT_CONTEXT_FIELD(OPTIMIZED_FUNCTIONS_LIST, unused,
1226 optimized_functions_list);
1227 EXTRACT_CONTEXT_FIELD(OPTIMIZED_CODE_LIST, unused, optimized_code_list);
1228 EXTRACT_CONTEXT_FIELD(DEOPTIMIZED_CODE_LIST, unused, deoptimized_code_list);
1229 EXTRACT_CONTEXT_FIELD(NEXT_CONTEXT_LINK, unused, next_context_link);
1230 #undef EXTRACT_CONTEXT_FIELD
1231 STATIC_CHECK(Context::OPTIMIZED_FUNCTIONS_LIST == Context::FIRST_WEAK_SLOT);
1232 STATIC_CHECK(Context::NEXT_CONTEXT_LINK + 1
1233 == Context::NATIVE_CONTEXT_SLOTS);
1234 STATIC_CHECK(Context::FIRST_WEAK_SLOT + 5 == Context::NATIVE_CONTEXT_SLOTS);
1239 void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
1240 if (map->HasTransitionArray()) {
1241 TransitionArray* transitions = map->transitions();
1242 int transitions_entry = GetEntry(transitions)->index();
1243 Object* back_pointer = transitions->back_pointer_storage();
1244 TagObject(back_pointer, "(back pointer)");
1245 SetInternalReference(transitions, transitions_entry,
1246 "back_pointer", back_pointer);
1247 TagObject(transitions, "(transition array)");
1248 SetInternalReference(map, entry,
1249 "transitions", transitions,
1250 Map::kTransitionsOrBackPointerOffset);
1252 Object* back_pointer = map->GetBackPointer();
1253 TagObject(back_pointer, "(back pointer)");
1254 SetInternalReference(map, entry,
1255 "back_pointer", back_pointer,
1256 Map::kTransitionsOrBackPointerOffset);
1258 DescriptorArray* descriptors = map->instance_descriptors();
1259 TagObject(descriptors, "(map descriptors)");
1260 SetInternalReference(map, entry,
1261 "descriptors", descriptors,
1262 Map::kDescriptorsOffset);
1264 SetInternalReference(map, entry,
1265 "code_cache", map->code_cache(),
1266 Map::kCodeCacheOffset);
1267 SetInternalReference(map, entry,
1268 "prototype", map->prototype(), Map::kPrototypeOffset);
1269 SetInternalReference(map, entry,
1270 "constructor", map->constructor(),
1271 Map::kConstructorOffset);
1272 TagObject(map->dependent_code(), "(dependent code)");
1273 SetInternalReference(map, entry,
1274 "dependent_code", map->dependent_code(),
1275 Map::kDependentCodeOffset);
1279 void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
1280 int entry, SharedFunctionInfo* shared) {
1281 HeapObject* obj = shared;
1282 String* shared_name = shared->DebugName();
1283 const char* name = NULL;
1284 if (shared_name != *heap_->isolate()->factory()->empty_string()) {
1285 name = names_->GetName(shared_name);
1286 TagObject(shared->code(), names_->GetFormatted("(code for %s)", name));
1288 TagObject(shared->code(), names_->GetFormatted("(%s code)",
1289 Code::Kind2String(shared->code()->kind())));
1292 SetInternalReference(obj, entry,
1293 "name", shared->name(),
1294 SharedFunctionInfo::kNameOffset);
1295 SetInternalReference(obj, entry,
1296 "code", shared->code(),
1297 SharedFunctionInfo::kCodeOffset);
1298 TagObject(shared->scope_info(), "(function scope info)");
1299 SetInternalReference(obj, entry,
1300 "scope_info", shared->scope_info(),
1301 SharedFunctionInfo::kScopeInfoOffset);
1302 SetInternalReference(obj, entry,
1303 "instance_class_name", shared->instance_class_name(),
1304 SharedFunctionInfo::kInstanceClassNameOffset);
1305 SetInternalReference(obj, entry,
1306 "script", shared->script(),
1307 SharedFunctionInfo::kScriptOffset);
1308 const char* construct_stub_name = name ?
1309 names_->GetFormatted("(construct stub code for %s)", name) :
1310 "(construct stub code)";
1311 TagObject(shared->construct_stub(), construct_stub_name);
1312 SetInternalReference(obj, entry,
1313 "construct_stub", shared->construct_stub(),
1314 SharedFunctionInfo::kConstructStubOffset);
1315 SetInternalReference(obj, entry,
1316 "function_data", shared->function_data(),
1317 SharedFunctionInfo::kFunctionDataOffset);
1318 SetInternalReference(obj, entry,
1319 "debug_info", shared->debug_info(),
1320 SharedFunctionInfo::kDebugInfoOffset);
1321 SetInternalReference(obj, entry,
1322 "inferred_name", shared->inferred_name(),
1323 SharedFunctionInfo::kInferredNameOffset);
1324 SetInternalReference(obj, entry,
1325 "optimized_code_map", shared->optimized_code_map(),
1326 SharedFunctionInfo::kOptimizedCodeMapOffset);
1327 SetWeakReference(obj, entry,
1328 "initial_map", shared->initial_map(),
1329 SharedFunctionInfo::kInitialMapOffset);
1333 void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) {
1334 HeapObject* obj = script;
1335 SetInternalReference(obj, entry,
1336 "source", script->source(),
1337 Script::kSourceOffset);
1338 SetInternalReference(obj, entry,
1339 "name", script->name(),
1340 Script::kNameOffset);
1341 SetInternalReference(obj, entry,
1342 "data", script->data(),
1343 Script::kDataOffset);
1344 SetInternalReference(obj, entry,
1345 "context_data", script->context_data(),
1346 Script::kContextOffset);
1347 TagObject(script->line_ends(), "(script line ends)");
1348 SetInternalReference(obj, entry,
1349 "line_ends", script->line_ends(),
1350 Script::kLineEndsOffset);
1354 void V8HeapExplorer::ExtractAccessorPairReferences(
1355 int entry, AccessorPair* accessors) {
1356 SetInternalReference(accessors, entry, "getter", accessors->getter(),
1357 AccessorPair::kGetterOffset);
1358 SetInternalReference(accessors, entry, "setter", accessors->setter(),
1359 AccessorPair::kSetterOffset);
1363 void V8HeapExplorer::ExtractCodeCacheReferences(
1364 int entry, CodeCache* code_cache) {
1365 TagObject(code_cache->default_cache(), "(default code cache)");
1366 SetInternalReference(code_cache, entry,
1367 "default_cache", code_cache->default_cache(),
1368 CodeCache::kDefaultCacheOffset);
1369 TagObject(code_cache->normal_type_cache(), "(code type cache)");
1370 SetInternalReference(code_cache, entry,
1371 "type_cache", code_cache->normal_type_cache(),
1372 CodeCache::kNormalTypeCacheOffset);
1376 void V8HeapExplorer::TagBuiltinCodeObject(Code* code, const char* name) {
1377 TagObject(code, names_->GetFormatted("(%s builtin)", name));
1381 void V8HeapExplorer::TagCodeObject(Code* code) {
1382 if (code->kind() == Code::STUB) {
1383 TagObject(code, names_->GetFormatted(
1384 "(%s code)", CodeStub::MajorName(
1385 static_cast<CodeStub::Major>(code->major_key()), true)));
1390 void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
1391 TagCodeObject(code);
1392 TagObject(code->relocation_info(), "(code relocation info)");
1393 SetInternalReference(code, entry,
1394 "relocation_info", code->relocation_info(),
1395 Code::kRelocationInfoOffset);
1396 SetInternalReference(code, entry,
1397 "handler_table", code->handler_table(),
1398 Code::kHandlerTableOffset);
1399 TagObject(code->deoptimization_data(), "(code deopt data)");
1400 SetInternalReference(code, entry,
1401 "deoptimization_data", code->deoptimization_data(),
1402 Code::kDeoptimizationDataOffset);
1403 if (code->kind() == Code::FUNCTION) {
1404 SetInternalReference(code, entry,
1405 "type_feedback_info", code->type_feedback_info(),
1406 Code::kTypeFeedbackInfoOffset);
1408 SetInternalReference(code, entry,
1409 "gc_metadata", code->gc_metadata(),
1410 Code::kGCMetadataOffset);
1411 SetInternalReference(code, entry,
1412 "constant_pool", code->constant_pool(),
1413 Code::kConstantPoolOffset);
1414 if (code->kind() == Code::OPTIMIZED_FUNCTION) {
1415 SetWeakReference(code, entry,
1416 "next_code_link", code->next_code_link(),
1417 Code::kNextCodeLinkOffset);
1422 void V8HeapExplorer::ExtractBoxReferences(int entry, Box* box) {
1423 SetInternalReference(box, entry, "value", box->value(), Box::kValueOffset);
1427 void V8HeapExplorer::ExtractCellReferences(int entry, Cell* cell) {
1428 SetInternalReference(cell, entry, "value", cell->value(), Cell::kValueOffset);
1432 void V8HeapExplorer::ExtractPropertyCellReferences(int entry,
1433 PropertyCell* cell) {
1434 ExtractCellReferences(entry, cell);
1435 SetInternalReference(cell, entry, "type", cell->type(),
1436 PropertyCell::kTypeOffset);
1437 SetInternalReference(cell, entry, "dependent_code", cell->dependent_code(),
1438 PropertyCell::kDependentCodeOffset);
1442 void V8HeapExplorer::ExtractAllocationSiteReferences(int entry,
1443 AllocationSite* site) {
1444 SetInternalReference(site, entry, "transition_info", site->transition_info(),
1445 AllocationSite::kTransitionInfoOffset);
1446 SetInternalReference(site, entry, "nested_site", site->nested_site(),
1447 AllocationSite::kNestedSiteOffset);
1448 SetInternalReference(site, entry, "dependent_code", site->dependent_code(),
1449 AllocationSite::kDependentCodeOffset);
1450 // Do not visit weak_next as it is not visited by the StaticVisitor,
1451 // and we're not very interested in weak_next field here.
1452 STATIC_CHECK(AllocationSite::kWeakNextOffset >=
1453 AllocationSite::BodyDescriptor::kEndOffset);
1457 void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj, int entry) {
1458 if (!js_obj->IsJSFunction()) return;
1460 JSFunction* func = JSFunction::cast(js_obj);
1461 if (func->shared()->bound()) {
1462 FixedArray* bindings = func->function_bindings();
1463 SetNativeBindReference(js_obj, entry, "bound_this",
1464 bindings->get(JSFunction::kBoundThisIndex));
1465 SetNativeBindReference(js_obj, entry, "bound_function",
1466 bindings->get(JSFunction::kBoundFunctionIndex));
1467 for (int i = JSFunction::kBoundArgumentsStartIndex;
1468 i < bindings->length(); i++) {
1469 const char* reference_name = names_->GetFormatted(
1470 "bound_argument_%d",
1471 i - JSFunction::kBoundArgumentsStartIndex);
1472 SetNativeBindReference(js_obj, entry, reference_name,
1479 void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
1480 if (js_obj->HasFastProperties()) {
1481 DescriptorArray* descs = js_obj->map()->instance_descriptors();
1482 int real_size = js_obj->map()->NumberOfOwnDescriptors();
1483 for (int i = 0; i < real_size; i++) {
1484 switch (descs->GetType(i)) {
1486 int index = descs->GetFieldIndex(i);
1488 Name* k = descs->GetKey(i);
1489 if (index < js_obj->map()->inobject_properties()) {
1490 Object* value = js_obj->InObjectPropertyAt(index);
1491 if (k != heap_->hidden_string()) {
1492 SetPropertyReference(
1496 js_obj->GetInObjectPropertyOffset(index));
1498 TagObject(value, "(hidden properties)");
1499 SetInternalReference(
1501 "hidden_properties", value,
1502 js_obj->GetInObjectPropertyOffset(index));
1505 Object* value = js_obj->RawFastPropertyAt(index);
1506 if (k != heap_->hidden_string()) {
1507 SetPropertyReference(js_obj, entry, k, value);
1509 TagObject(value, "(hidden properties)");
1510 SetInternalReference(js_obj, entry, "hidden_properties", value);
1516 SetPropertyReference(
1518 descs->GetKey(i), descs->GetConstant(i));
1521 ExtractAccessorPairProperty(
1523 descs->GetKey(i), descs->GetValue(i));
1525 case NORMAL: // only in slow mode
1526 case HANDLER: // only in lookup results, not in descriptors
1527 case INTERCEPTOR: // only in lookup results, not in descriptors
1536 NameDictionary* dictionary = js_obj->property_dictionary();
1537 int length = dictionary->Capacity();
1538 for (int i = 0; i < length; ++i) {
1539 Object* k = dictionary->KeyAt(i);
1540 if (dictionary->IsKey(k)) {
1541 Object* target = dictionary->ValueAt(i);
1542 // We assume that global objects can only have slow properties.
1543 Object* value = target->IsPropertyCell()
1544 ? PropertyCell::cast(target)->value()
1546 if (k == heap_->hidden_string()) {
1547 TagObject(value, "(hidden properties)");
1548 SetInternalReference(js_obj, entry, "hidden_properties", value);
1551 if (ExtractAccessorPairProperty(js_obj, entry, k, value)) continue;
1552 SetPropertyReference(js_obj, entry, String::cast(k), value);
1559 bool V8HeapExplorer::ExtractAccessorPairProperty(
1560 JSObject* js_obj, int entry, Object* key, Object* callback_obj) {
1561 if (!callback_obj->IsAccessorPair()) return false;
1562 AccessorPair* accessors = AccessorPair::cast(callback_obj);
1563 Object* getter = accessors->getter();
1564 if (!getter->IsOddball()) {
1565 SetPropertyReference(js_obj, entry, String::cast(key), getter, "get %s");
1567 Object* setter = accessors->setter();
1568 if (!setter->IsOddball()) {
1569 SetPropertyReference(js_obj, entry, String::cast(key), setter, "set %s");
1575 void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) {
1576 if (js_obj->HasFastObjectElements()) {
1577 FixedArray* elements = FixedArray::cast(js_obj->elements());
1578 int length = js_obj->IsJSArray() ?
1579 Smi::cast(JSArray::cast(js_obj)->length())->value() :
1581 for (int i = 0; i < length; ++i) {
1582 if (!elements->get(i)->IsTheHole()) {
1583 SetElementReference(js_obj, entry, i, elements->get(i));
1586 } else if (js_obj->HasDictionaryElements()) {
1587 SeededNumberDictionary* dictionary = js_obj->element_dictionary();
1588 int length = dictionary->Capacity();
1589 for (int i = 0; i < length; ++i) {
1590 Object* k = dictionary->KeyAt(i);
1591 if (dictionary->IsKey(k)) {
1592 ASSERT(k->IsNumber());
1593 uint32_t index = static_cast<uint32_t>(k->Number());
1594 SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
1601 void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) {
1602 int length = js_obj->GetInternalFieldCount();
1603 for (int i = 0; i < length; ++i) {
1604 Object* o = js_obj->GetInternalField(i);
1605 SetInternalReference(
1606 js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
1611 String* V8HeapExplorer::GetConstructorName(JSObject* object) {
1612 Heap* heap = object->GetHeap();
1613 if (object->IsJSFunction()) return heap->closure_string();
1614 String* constructor_name = object->constructor_name();
1615 if (constructor_name == heap->Object_string()) {
1616 // Look up an immediate "constructor" property, if it is a function,
1617 // return its name. This is for instances of binding objects, which
1618 // have prototype constructor type "Object".
1619 Object* constructor_prop = NULL;
1620 LookupResult result(heap->isolate());
1621 object->LocalLookupRealNamedProperty(heap->constructor_string(), &result);
1622 if (!result.IsFound()) return object->constructor_name();
1624 constructor_prop = result.GetLazyValue();
1625 if (constructor_prop->IsJSFunction()) {
1626 Object* maybe_name =
1627 JSFunction::cast(constructor_prop)->shared()->name();
1628 if (maybe_name->IsString()) {
1629 String* name = String::cast(maybe_name);
1630 if (name->length() > 0) return name;
1634 return object->constructor_name();
1638 HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
1639 if (!obj->IsHeapObject()) return NULL;
1640 return filler_->FindOrAddEntry(obj, this);
1644 class RootsReferencesExtractor : public ObjectVisitor {
1647 IndexTag(int index, VisitorSynchronization::SyncTag tag)
1648 : index(index), tag(tag) { }
1650 VisitorSynchronization::SyncTag tag;
1654 explicit RootsReferencesExtractor(Heap* heap)
1655 : collecting_all_references_(false),
1656 previous_reference_count_(0),
1660 void VisitPointers(Object** start, Object** end) {
1661 if (collecting_all_references_) {
1662 for (Object** p = start; p < end; p++) all_references_.Add(*p);
1664 for (Object** p = start; p < end; p++) strong_references_.Add(*p);
1668 void SetCollectingAllReferences() { collecting_all_references_ = true; }
1670 void FillReferences(V8HeapExplorer* explorer) {
1671 ASSERT(strong_references_.length() <= all_references_.length());
1672 Builtins* builtins = heap_->isolate()->builtins();
1673 for (int i = 0; i < reference_tags_.length(); ++i) {
1674 explorer->SetGcRootsReference(reference_tags_[i].tag);
1676 int strong_index = 0, all_index = 0, tags_index = 0, builtin_index = 0;
1677 while (all_index < all_references_.length()) {
1678 bool is_strong = strong_index < strong_references_.length()
1679 && strong_references_[strong_index] == all_references_[all_index];
1680 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
1682 all_references_[all_index]);
1683 if (reference_tags_[tags_index].tag ==
1684 VisitorSynchronization::kBuiltins) {
1685 ASSERT(all_references_[all_index]->IsCode());
1686 explorer->TagBuiltinCodeObject(
1687 Code::cast(all_references_[all_index]),
1688 builtins->name(builtin_index++));
1691 if (is_strong) ++strong_index;
1692 if (reference_tags_[tags_index].index == all_index) ++tags_index;
1696 void Synchronize(VisitorSynchronization::SyncTag tag) {
1697 if (collecting_all_references_ &&
1698 previous_reference_count_ != all_references_.length()) {
1699 previous_reference_count_ = all_references_.length();
1700 reference_tags_.Add(IndexTag(previous_reference_count_, tag));
1705 bool collecting_all_references_;
1706 List<Object*> strong_references_;
1707 List<Object*> all_references_;
1708 int previous_reference_count_;
1709 List<IndexTag> reference_tags_;
1714 bool V8HeapExplorer::IterateAndExtractReferences(
1715 SnapshotFillerInterface* filler) {
1718 // Make sure builtin code objects get their builtin tags
1719 // first. Otherwise a particular JSFunction object could set
1720 // its custom name to a generic builtin.
1721 SetRootGcRootsReference();
1722 RootsReferencesExtractor extractor(heap_);
1723 heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
1724 extractor.SetCollectingAllReferences();
1725 heap_->IterateRoots(&extractor, VISIT_ALL);
1726 extractor.FillReferences(this);
1728 // Now iterate the whole heap.
1729 bool interrupted = false;
1730 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
1731 // Heap iteration with filtering must be finished in any case.
1732 for (HeapObject* obj = iterator.next();
1734 obj = iterator.next(), progress_->ProgressStep()) {
1736 ExtractReferences(obj);
1737 if (!progress_->ProgressReport(false)) interrupted = true;
1746 return progress_->ProgressReport(true);
1750 bool V8HeapExplorer::IsEssentialObject(Object* object) {
1751 return object->IsHeapObject()
1752 && !object->IsOddball()
1753 && object != heap_->empty_byte_array()
1754 && object != heap_->empty_fixed_array()
1755 && object != heap_->empty_descriptor_array()
1756 && object != heap_->fixed_array_map()
1757 && object != heap_->cell_map()
1758 && object != heap_->global_property_cell_map()
1759 && object != heap_->shared_function_info_map()
1760 && object != heap_->free_space_map()
1761 && object != heap_->one_pointer_filler_map()
1762 && object != heap_->two_pointer_filler_map();
1766 void V8HeapExplorer::SetContextReference(HeapObject* parent_obj,
1768 String* reference_name,
1771 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1772 HeapEntry* child_entry = GetEntry(child_obj);
1773 if (child_entry != NULL) {
1774 filler_->SetNamedReference(HeapGraphEdge::kContextVariable,
1776 names_->GetName(reference_name),
1778 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1783 void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
1785 const char* reference_name,
1786 Object* child_obj) {
1787 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1788 HeapEntry* child_entry = GetEntry(child_obj);
1789 if (child_entry != NULL) {
1790 filler_->SetNamedReference(HeapGraphEdge::kShortcut,
1798 void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
1801 Object* child_obj) {
1802 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1803 HeapEntry* child_entry = GetEntry(child_obj);
1804 if (child_entry != NULL) {
1805 filler_->SetIndexedReference(HeapGraphEdge::kElement,
1813 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1815 const char* reference_name,
1818 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1819 HeapEntry* child_entry = GetEntry(child_obj);
1820 if (child_entry == NULL) return;
1821 if (IsEssentialObject(child_obj)) {
1822 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1827 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1831 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1836 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1837 HeapEntry* child_entry = GetEntry(child_obj);
1838 if (child_entry == NULL) return;
1839 if (IsEssentialObject(child_obj)) {
1840 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1842 names_->GetName(index),
1845 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1849 void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
1852 Object* child_obj) {
1853 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1854 HeapEntry* child_entry = GetEntry(child_obj);
1855 if (child_entry != NULL && IsEssentialObject(child_obj)) {
1856 filler_->SetIndexedReference(HeapGraphEdge::kHidden,
1864 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
1866 const char* reference_name,
1869 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1870 HeapEntry* child_entry = GetEntry(child_obj);
1871 if (child_entry == NULL) return;
1872 if (IsEssentialObject(child_obj)) {
1873 filler_->SetNamedReference(HeapGraphEdge::kWeak,
1878 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1882 void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
1884 Name* reference_name,
1886 const char* name_format_string,
1888 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1889 HeapEntry* child_entry = GetEntry(child_obj);
1890 if (child_entry != NULL) {
1891 HeapGraphEdge::Type type =
1892 reference_name->IsSymbol() || String::cast(reference_name)->length() > 0
1893 ? HeapGraphEdge::kProperty : HeapGraphEdge::kInternal;
1894 const char* name = name_format_string != NULL && reference_name->IsString()
1895 ? names_->GetFormatted(
1897 String::cast(reference_name)->ToCString(
1898 DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL).get()) :
1899 names_->GetName(reference_name);
1901 filler_->SetNamedReference(type,
1905 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1910 void V8HeapExplorer::SetRootGcRootsReference() {
1911 filler_->SetIndexedAutoIndexReference(
1912 HeapGraphEdge::kElement,
1913 snapshot_->root()->index(),
1914 snapshot_->gc_roots());
1918 void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
1919 HeapEntry* child_entry = GetEntry(child_obj);
1920 ASSERT(child_entry != NULL);
1921 filler_->SetNamedAutoIndexReference(
1922 HeapGraphEdge::kShortcut,
1923 snapshot_->root()->index(),
1928 void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
1929 filler_->SetIndexedAutoIndexReference(
1930 HeapGraphEdge::kElement,
1931 snapshot_->gc_roots()->index(),
1932 snapshot_->gc_subroot(tag));
1936 void V8HeapExplorer::SetGcSubrootReference(
1937 VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
1938 HeapEntry* child_entry = GetEntry(child_obj);
1939 if (child_entry != NULL) {
1940 const char* name = GetStrongGcSubrootName(child_obj);
1942 filler_->SetNamedReference(
1943 HeapGraphEdge::kInternal,
1944 snapshot_->gc_subroot(tag)->index(),
1949 filler_->SetNamedAutoIndexReference(
1950 HeapGraphEdge::kWeak,
1951 snapshot_->gc_subroot(tag)->index(),
1954 filler_->SetIndexedAutoIndexReference(
1955 HeapGraphEdge::kElement,
1956 snapshot_->gc_subroot(tag)->index(),
1961 // Add a shortcut to JS global object reference at snapshot root.
1962 if (child_obj->IsNativeContext()) {
1963 Context* context = Context::cast(child_obj);
1964 GlobalObject* global = context->global_object();
1965 if (global->IsJSGlobalObject()) {
1966 bool is_debug_object = false;
1967 #ifdef ENABLE_DEBUGGER_SUPPORT
1968 is_debug_object = heap_->isolate()->debug()->IsDebugGlobal(global);
1970 if (!is_debug_object && !user_roots_.Contains(global)) {
1971 user_roots_.Insert(global);
1972 SetUserGlobalReference(global);
1980 const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
1981 if (strong_gc_subroot_names_.is_empty()) {
1982 #define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
1983 #define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
1984 STRONG_ROOT_LIST(ROOT_NAME)
1986 #define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
1987 STRUCT_LIST(STRUCT_MAP_NAME)
1988 #undef STRUCT_MAP_NAME
1989 #define STRING_NAME(name, str) NAME_ENTRY(name)
1990 INTERNALIZED_STRING_LIST(STRING_NAME)
1993 CHECK(!strong_gc_subroot_names_.is_empty());
1995 return strong_gc_subroot_names_.GetTag(object);
1999 void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
2000 if (IsEssentialObject(obj)) {
2001 HeapEntry* entry = GetEntry(obj);
2002 if (entry->name()[0] == '\0') {
2003 entry->set_name(tag);
2009 class GlobalObjectsEnumerator : public ObjectVisitor {
2011 virtual void VisitPointers(Object** start, Object** end) {
2012 for (Object** p = start; p < end; p++) {
2013 if ((*p)->IsNativeContext()) {
2014 Context* context = Context::cast(*p);
2015 JSObject* proxy = context->global_proxy();
2016 if (proxy->IsJSGlobalProxy()) {
2017 Object* global = proxy->map()->prototype();
2018 if (global->IsJSGlobalObject()) {
2019 objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global)));
2025 int count() { return objects_.length(); }
2026 Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
2029 List<Handle<JSGlobalObject> > objects_;
2033 // Modifies heap. Must not be run during heap traversal.
2034 void V8HeapExplorer::TagGlobalObjects() {
2035 Isolate* isolate = heap_->isolate();
2036 HandleScope scope(isolate);
2037 GlobalObjectsEnumerator enumerator;
2038 isolate->global_handles()->IterateAllRoots(&enumerator);
2039 const char** urls = NewArray<const char*>(enumerator.count());
2040 for (int i = 0, l = enumerator.count(); i < l; ++i) {
2041 if (global_object_name_resolver_) {
2042 HandleScope scope(isolate);
2043 Handle<JSGlobalObject> global_obj = enumerator.at(i);
2044 urls[i] = global_object_name_resolver_->GetName(
2045 Utils::ToLocal(Handle<JSObject>::cast(global_obj)));
2051 DisallowHeapAllocation no_allocation;
2052 for (int i = 0, l = enumerator.count(); i < l; ++i) {
2053 objects_tags_.SetTag(*enumerator.at(i), urls[i]);
2060 class GlobalHandlesExtractor : public ObjectVisitor {
2062 explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer)
2063 : explorer_(explorer) {}
2064 virtual ~GlobalHandlesExtractor() {}
2065 virtual void VisitPointers(Object** start, Object** end) {
2068 virtual void VisitEmbedderReference(Object** p, uint16_t class_id) {
2069 explorer_->VisitSubtreeWrapper(p, class_id);
2072 NativeObjectsExplorer* explorer_;
2076 class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
2078 BasicHeapEntriesAllocator(
2079 HeapSnapshot* snapshot,
2080 HeapEntry::Type entries_type)
2081 : snapshot_(snapshot),
2082 names_(snapshot_->profiler()->names()),
2083 heap_object_map_(snapshot_->profiler()->heap_object_map()),
2084 entries_type_(entries_type) {
2086 virtual HeapEntry* AllocateEntry(HeapThing ptr);
2088 HeapSnapshot* snapshot_;
2089 StringsStorage* names_;
2090 HeapObjectsMap* heap_object_map_;
2091 HeapEntry::Type entries_type_;
2095 HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) {
2096 v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
2097 intptr_t elements = info->GetElementCount();
2098 intptr_t size = info->GetSizeInBytes();
2099 const char* name = elements != -1
2100 ? names_->GetFormatted(
2101 "%s / %" V8_PTR_PREFIX "d entries", info->GetLabel(), elements)
2102 : names_->GetCopy(info->GetLabel());
2103 return snapshot_->AddEntry(
2106 heap_object_map_->GenerateId(info),
2107 size != -1 ? static_cast<int>(size) : 0);
2111 NativeObjectsExplorer::NativeObjectsExplorer(
2112 HeapSnapshot* snapshot,
2113 SnapshottingProgressReportingInterface* progress)
2114 : isolate_(snapshot->profiler()->heap_object_map()->heap()->isolate()),
2115 snapshot_(snapshot),
2116 names_(snapshot_->profiler()->names()),
2117 progress_(progress),
2118 embedder_queried_(false),
2119 objects_by_info_(RetainedInfosMatch),
2120 native_groups_(StringsMatch),
2122 synthetic_entries_allocator_ =
2123 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
2124 native_entries_allocator_ =
2125 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
2129 NativeObjectsExplorer::~NativeObjectsExplorer() {
2130 for (HashMap::Entry* p = objects_by_info_.Start();
2132 p = objects_by_info_.Next(p)) {
2133 v8::RetainedObjectInfo* info =
2134 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2136 List<HeapObject*>* objects =
2137 reinterpret_cast<List<HeapObject*>* >(p->value);
2140 for (HashMap::Entry* p = native_groups_.Start();
2142 p = native_groups_.Next(p)) {
2143 v8::RetainedObjectInfo* info =
2144 reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
2147 delete synthetic_entries_allocator_;
2148 delete native_entries_allocator_;
2152 int NativeObjectsExplorer::EstimateObjectsCount() {
2153 FillRetainedObjects();
2154 return objects_by_info_.occupancy();
2158 void NativeObjectsExplorer::FillRetainedObjects() {
2159 if (embedder_queried_) return;
2160 Isolate* isolate = isolate_;
2161 const GCType major_gc_type = kGCTypeMarkSweepCompact;
2162 // Record objects that are joined into ObjectGroups.
2163 isolate->heap()->CallGCPrologueCallbacks(
2164 major_gc_type, kGCCallbackFlagConstructRetainedObjectInfos);
2165 List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
2166 for (int i = 0; i < groups->length(); ++i) {
2167 ObjectGroup* group = groups->at(i);
2168 if (group->info == NULL) continue;
2169 List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info);
2170 for (size_t j = 0; j < group->length; ++j) {
2171 HeapObject* obj = HeapObject::cast(*group->objects[j]);
2173 in_groups_.Insert(obj);
2175 group->info = NULL; // Acquire info object ownership.
2177 isolate->global_handles()->RemoveObjectGroups();
2178 isolate->heap()->CallGCEpilogueCallbacks(major_gc_type, kNoGCCallbackFlags);
2179 // Record objects that are not in ObjectGroups, but have class ID.
2180 GlobalHandlesExtractor extractor(this);
2181 isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
2182 embedder_queried_ = true;
2186 void NativeObjectsExplorer::FillImplicitReferences() {
2187 Isolate* isolate = isolate_;
2188 List<ImplicitRefGroup*>* groups =
2189 isolate->global_handles()->implicit_ref_groups();
2190 for (int i = 0; i < groups->length(); ++i) {
2191 ImplicitRefGroup* group = groups->at(i);
2192 HeapObject* parent = *group->parent;
2194 filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
2195 ASSERT(parent_entry != HeapEntry::kNoEntry);
2196 Object*** children = group->children;
2197 for (size_t j = 0; j < group->length; ++j) {
2198 Object* child = *children[j];
2199 HeapEntry* child_entry =
2200 filler_->FindOrAddEntry(child, native_entries_allocator_);
2201 filler_->SetNamedReference(
2202 HeapGraphEdge::kInternal,
2208 isolate->global_handles()->RemoveImplicitRefGroups();
2211 List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
2212 v8::RetainedObjectInfo* info) {
2213 HashMap::Entry* entry =
2214 objects_by_info_.Lookup(info, InfoHash(info), true);
2215 if (entry->value != NULL) {
2218 entry->value = new List<HeapObject*>(4);
2220 return reinterpret_cast<List<HeapObject*>* >(entry->value);
2224 bool NativeObjectsExplorer::IterateAndExtractReferences(
2225 SnapshotFillerInterface* filler) {
2227 FillRetainedObjects();
2228 FillImplicitReferences();
2229 if (EstimateObjectsCount() > 0) {
2230 for (HashMap::Entry* p = objects_by_info_.Start();
2232 p = objects_by_info_.Next(p)) {
2233 v8::RetainedObjectInfo* info =
2234 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2235 SetNativeRootReference(info);
2236 List<HeapObject*>* objects =
2237 reinterpret_cast<List<HeapObject*>* >(p->value);
2238 for (int i = 0; i < objects->length(); ++i) {
2239 SetWrapperNativeReferences(objects->at(i), info);
2242 SetRootNativeRootsReference();
2249 class NativeGroupRetainedObjectInfo : public v8::RetainedObjectInfo {
2251 explicit NativeGroupRetainedObjectInfo(const char* label)
2253 hash_(reinterpret_cast<intptr_t>(label)),
2257 virtual ~NativeGroupRetainedObjectInfo() {}
2258 virtual void Dispose() {
2263 virtual bool IsEquivalent(RetainedObjectInfo* other) {
2264 return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
2266 virtual intptr_t GetHash() { return hash_; }
2267 virtual const char* GetLabel() { return label_; }
2276 NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2277 const char* label) {
2278 const char* label_copy = names_->GetCopy(label);
2279 uint32_t hash = StringHasher::HashSequentialString(
2281 static_cast<int>(strlen(label_copy)),
2282 isolate_->heap()->HashSeed());
2283 HashMap::Entry* entry = native_groups_.Lookup(const_cast<char*>(label_copy),
2285 if (entry->value == NULL) {
2286 entry->value = new NativeGroupRetainedObjectInfo(label);
2288 return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2292 void NativeObjectsExplorer::SetNativeRootReference(
2293 v8::RetainedObjectInfo* info) {
2294 HeapEntry* child_entry =
2295 filler_->FindOrAddEntry(info, native_entries_allocator_);
2296 ASSERT(child_entry != NULL);
2297 NativeGroupRetainedObjectInfo* group_info =
2298 FindOrAddGroupInfo(info->GetGroupLabel());
2299 HeapEntry* group_entry =
2300 filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
2301 filler_->SetNamedAutoIndexReference(
2302 HeapGraphEdge::kInternal,
2303 group_entry->index(),
2308 void NativeObjectsExplorer::SetWrapperNativeReferences(
2309 HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2310 HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
2311 ASSERT(wrapper_entry != NULL);
2312 HeapEntry* info_entry =
2313 filler_->FindOrAddEntry(info, native_entries_allocator_);
2314 ASSERT(info_entry != NULL);
2315 filler_->SetNamedReference(HeapGraphEdge::kInternal,
2316 wrapper_entry->index(),
2319 filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
2320 info_entry->index(),
2325 void NativeObjectsExplorer::SetRootNativeRootsReference() {
2326 for (HashMap::Entry* entry = native_groups_.Start();
2328 entry = native_groups_.Next(entry)) {
2329 NativeGroupRetainedObjectInfo* group_info =
2330 static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2331 HeapEntry* group_entry =
2332 filler_->FindOrAddEntry(group_info, native_entries_allocator_);
2333 ASSERT(group_entry != NULL);
2334 filler_->SetIndexedAutoIndexReference(
2335 HeapGraphEdge::kElement,
2336 snapshot_->root()->index(),
2342 void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
2343 if (in_groups_.Contains(*p)) return;
2344 Isolate* isolate = isolate_;
2345 v8::RetainedObjectInfo* info =
2346 isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
2347 if (info == NULL) return;
2348 GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
2352 class SnapshotFiller : public SnapshotFillerInterface {
2354 explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
2355 : snapshot_(snapshot),
2356 names_(snapshot->profiler()->names()),
2357 entries_(entries) { }
2358 HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
2359 HeapEntry* entry = allocator->AllocateEntry(ptr);
2360 entries_->Pair(ptr, entry->index());
2363 HeapEntry* FindEntry(HeapThing ptr) {
2364 int index = entries_->Map(ptr);
2365 return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL;
2367 HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
2368 HeapEntry* entry = FindEntry(ptr);
2369 return entry != NULL ? entry : AddEntry(ptr, allocator);
2371 void SetIndexedReference(HeapGraphEdge::Type type,
2374 HeapEntry* child_entry) {
2375 HeapEntry* parent_entry = &snapshot_->entries()[parent];
2376 parent_entry->SetIndexedReference(type, index, child_entry);
2378 void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
2380 HeapEntry* child_entry) {
2381 HeapEntry* parent_entry = &snapshot_->entries()[parent];
2382 int index = parent_entry->children_count() + 1;
2383 parent_entry->SetIndexedReference(type, index, child_entry);
2385 void SetNamedReference(HeapGraphEdge::Type type,
2387 const char* reference_name,
2388 HeapEntry* child_entry) {
2389 HeapEntry* parent_entry = &snapshot_->entries()[parent];
2390 parent_entry->SetNamedReference(type, reference_name, child_entry);
2392 void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
2394 HeapEntry* child_entry) {
2395 HeapEntry* parent_entry = &snapshot_->entries()[parent];
2396 int index = parent_entry->children_count() + 1;
2397 parent_entry->SetNamedReference(
2399 names_->GetName(index),
2404 HeapSnapshot* snapshot_;
2405 StringsStorage* names_;
2406 HeapEntriesMap* entries_;
2410 HeapSnapshotGenerator::HeapSnapshotGenerator(
2411 HeapSnapshot* snapshot,
2412 v8::ActivityControl* control,
2413 v8::HeapProfiler::ObjectNameResolver* resolver,
2415 : snapshot_(snapshot),
2417 v8_heap_explorer_(snapshot_, this, resolver),
2418 dom_explorer_(snapshot_, this),
2423 bool HeapSnapshotGenerator::GenerateSnapshot() {
2424 v8_heap_explorer_.TagGlobalObjects();
2426 // TODO(1562) Profiler assumes that any object that is in the heap after
2427 // full GC is reachable from the root when computing dominators.
2428 // This is not true for weakly reachable objects.
2429 // As a temporary solution we call GC twice.
2430 heap_->CollectAllGarbage(
2431 Heap::kMakeHeapIterableMask,
2432 "HeapSnapshotGenerator::GenerateSnapshot");
2433 heap_->CollectAllGarbage(
2434 Heap::kMakeHeapIterableMask,
2435 "HeapSnapshotGenerator::GenerateSnapshot");
2438 Heap* debug_heap = heap_;
2439 CHECK(!debug_heap->old_data_space()->was_swept_conservatively());
2440 CHECK(!debug_heap->old_pointer_space()->was_swept_conservatively());
2441 CHECK(!debug_heap->code_space()->was_swept_conservatively());
2442 CHECK(!debug_heap->cell_space()->was_swept_conservatively());
2443 CHECK(!debug_heap->property_cell_space()->
2444 was_swept_conservatively());
2445 CHECK(!debug_heap->map_space()->was_swept_conservatively());
2448 // The following code uses heap iterators, so we want the heap to be
2449 // stable. It should follow TagGlobalObjects as that can allocate.
2450 DisallowHeapAllocation no_alloc;
2453 debug_heap->Verify();
2456 SetProgressTotal(1); // 1 pass.
2459 debug_heap->Verify();
2462 if (!FillReferences()) return false;
2464 snapshot_->FillChildren();
2465 snapshot_->RememberLastJSObjectId();
2467 progress_counter_ = progress_total_;
2468 if (!ProgressReport(true)) return false;
2473 void HeapSnapshotGenerator::ProgressStep() {
2474 ++progress_counter_;
2478 bool HeapSnapshotGenerator::ProgressReport(bool force) {
2479 const int kProgressReportGranularity = 10000;
2480 if (control_ != NULL
2481 && (force || progress_counter_ % kProgressReportGranularity == 0)) {
2483 control_->ReportProgressValue(progress_counter_, progress_total_) ==
2484 v8::ActivityControl::kContinue;
2490 void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
2491 if (control_ == NULL) return;
2492 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
2493 progress_total_ = iterations_count * (
2494 v8_heap_explorer_.EstimateObjectsCount(&iterator) +
2495 dom_explorer_.EstimateObjectsCount());
2496 progress_counter_ = 0;
2500 bool HeapSnapshotGenerator::FillReferences() {
2501 SnapshotFiller filler(snapshot_, &entries_);
2502 v8_heap_explorer_.AddRootEntries(&filler);
2503 return v8_heap_explorer_.IterateAndExtractReferences(&filler)
2504 && dom_explorer_.IterateAndExtractReferences(&filler);
2508 template<int bytes> struct MaxDecimalDigitsIn;
2509 template<> struct MaxDecimalDigitsIn<4> {
2510 static const int kSigned = 11;
2511 static const int kUnsigned = 10;
2513 template<> struct MaxDecimalDigitsIn<8> {
2514 static const int kSigned = 20;
2515 static const int kUnsigned = 20;
2519 class OutputStreamWriter {
2521 explicit OutputStreamWriter(v8::OutputStream* stream)
2523 chunk_size_(stream->GetChunkSize()),
2524 chunk_(chunk_size_),
2527 ASSERT(chunk_size_ > 0);
2529 bool aborted() { return aborted_; }
2530 void AddCharacter(char c) {
2532 ASSERT(chunk_pos_ < chunk_size_);
2533 chunk_[chunk_pos_++] = c;
2536 void AddString(const char* s) {
2537 AddSubstring(s, StrLength(s));
2539 void AddSubstring(const char* s, int n) {
2541 ASSERT(static_cast<size_t>(n) <= strlen(s));
2542 const char* s_end = s + n;
2544 int s_chunk_size = Min(
2545 chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
2546 ASSERT(s_chunk_size > 0);
2547 OS::MemCopy(chunk_.start() + chunk_pos_, s, s_chunk_size);
2549 chunk_pos_ += s_chunk_size;
2553 void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
2555 if (aborted_) return;
2556 ASSERT(chunk_pos_ < chunk_size_);
2557 if (chunk_pos_ != 0) {
2560 stream_->EndOfStream();
2564 template<typename T>
2565 void AddNumberImpl(T n, const char* format) {
2566 // Buffer for the longest value plus trailing \0
2567 static const int kMaxNumberSize =
2568 MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1;
2569 if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
2570 int result = OS::SNPrintF(
2571 chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
2572 ASSERT(result != -1);
2573 chunk_pos_ += result;
2576 EmbeddedVector<char, kMaxNumberSize> buffer;
2577 int result = OS::SNPrintF(buffer, format, n);
2579 ASSERT(result != -1);
2580 AddString(buffer.start());
2583 void MaybeWriteChunk() {
2584 ASSERT(chunk_pos_ <= chunk_size_);
2585 if (chunk_pos_ == chunk_size_) {
2590 if (aborted_) return;
2591 if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
2592 v8::OutputStream::kAbort) aborted_ = true;
2596 v8::OutputStream* stream_;
2598 ScopedVector<char> chunk_;
2604 // type, name|index, to_node.
2605 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
2606 // type, name, id, self_size, children_index.
2607 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 5;
2609 void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
2610 if (AllocationTracker* allocation_tracker =
2611 snapshot_->profiler()->allocation_tracker()) {
2612 allocation_tracker->PrepareForSerialization();
2614 ASSERT(writer_ == NULL);
2615 writer_ = new OutputStreamWriter(stream);
2622 void HeapSnapshotJSONSerializer::SerializeImpl() {
2623 ASSERT(0 == snapshot_->root()->index());
2624 writer_->AddCharacter('{');
2625 writer_->AddString("\"snapshot\":{");
2626 SerializeSnapshot();
2627 if (writer_->aborted()) return;
2628 writer_->AddString("},\n");
2629 writer_->AddString("\"nodes\":[");
2631 if (writer_->aborted()) return;
2632 writer_->AddString("],\n");
2633 writer_->AddString("\"edges\":[");
2635 if (writer_->aborted()) return;
2636 writer_->AddString("],\n");
2638 writer_->AddString("\"trace_function_infos\":[");
2639 SerializeTraceNodeInfos();
2640 if (writer_->aborted()) return;
2641 writer_->AddString("],\n");
2642 writer_->AddString("\"trace_tree\":[");
2643 SerializeTraceTree();
2644 if (writer_->aborted()) return;
2645 writer_->AddString("],\n");
2647 writer_->AddString("\"strings\":[");
2649 if (writer_->aborted()) return;
2650 writer_->AddCharacter(']');
2651 writer_->AddCharacter('}');
2652 writer_->Finalize();
2656 int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
2657 HashMap::Entry* cache_entry = strings_.Lookup(
2658 const_cast<char*>(s), StringHash(s), true);
2659 if (cache_entry->value == NULL) {
2660 cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
2662 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
2666 static int utoa(unsigned value, const Vector<char>& buffer, int buffer_pos) {
2667 int number_of_digits = 0;
2673 buffer_pos += number_of_digits;
2674 int result = buffer_pos;
2676 int last_digit = value % 10;
2677 buffer[--buffer_pos] = '0' + last_digit;
2684 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
2686 // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
2687 static const int kBufferSize =
2688 MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2; // NOLINT
2689 EmbeddedVector<char, kBufferSize> buffer;
2690 int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
2691 || edge->type() == HeapGraphEdge::kHidden
2692 ? edge->index() : GetStringId(edge->name());
2695 buffer[buffer_pos++] = ',';
2697 buffer_pos = utoa(edge->type(), buffer, buffer_pos);
2698 buffer[buffer_pos++] = ',';
2699 buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
2700 buffer[buffer_pos++] = ',';
2701 buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
2702 buffer[buffer_pos++] = '\n';
2703 buffer[buffer_pos++] = '\0';
2704 writer_->AddString(buffer.start());
2708 void HeapSnapshotJSONSerializer::SerializeEdges() {
2709 List<HeapGraphEdge*>& edges = snapshot_->children();
2710 for (int i = 0; i < edges.length(); ++i) {
2712 edges[i - 1]->from()->index() <= edges[i]->from()->index());
2713 SerializeEdge(edges[i], i == 0);
2714 if (writer_->aborted()) return;
2719 void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
2720 // The buffer needs space for 5 unsigned ints, 5 commas, \n and \0
2721 static const int kBufferSize =
2722 5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2724 EmbeddedVector<char, kBufferSize> buffer;
2726 if (entry_index(entry) != 0) {
2727 buffer[buffer_pos++] = ',';
2729 buffer_pos = utoa(entry->type(), buffer, buffer_pos);
2730 buffer[buffer_pos++] = ',';
2731 buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
2732 buffer[buffer_pos++] = ',';
2733 buffer_pos = utoa(entry->id(), buffer, buffer_pos);
2734 buffer[buffer_pos++] = ',';
2735 buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
2736 buffer[buffer_pos++] = ',';
2737 buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
2738 buffer[buffer_pos++] = '\n';
2739 buffer[buffer_pos++] = '\0';
2740 writer_->AddString(buffer.start());
2744 void HeapSnapshotJSONSerializer::SerializeNodes() {
2745 List<HeapEntry>& entries = snapshot_->entries();
2746 for (int i = 0; i < entries.length(); ++i) {
2747 SerializeNode(&entries[i]);
2748 if (writer_->aborted()) return;
2753 void HeapSnapshotJSONSerializer::SerializeSnapshot() {
2754 writer_->AddString("\"title\":\"");
2755 writer_->AddString(snapshot_->title());
2756 writer_->AddString("\"");
2757 writer_->AddString(",\"uid\":");
2758 writer_->AddNumber(snapshot_->uid());
2759 writer_->AddString(",\"meta\":");
2760 // The object describing node serialization layout.
2761 // We use a set of macros to improve readability.
2762 #define JSON_A(s) "[" s "]"
2763 #define JSON_O(s) "{" s "}"
2764 #define JSON_S(s) "\"" s "\""
2765 writer_->AddString(JSON_O(
2766 JSON_S("node_fields") ":" JSON_A(
2770 JSON_S("self_size") ","
2771 JSON_S("edge_count")) ","
2772 JSON_S("node_types") ":" JSON_A(
2774 JSON_S("hidden") ","
2776 JSON_S("string") ","
2777 JSON_S("object") ","
2779 JSON_S("closure") ","
2780 JSON_S("regexp") ","
2781 JSON_S("number") ","
2782 JSON_S("native") ","
2783 JSON_S("synthetic") ","
2784 JSON_S("concatenated string") ","
2785 JSON_S("sliced string")) ","
2786 JSON_S("string") ","
2787 JSON_S("number") ","
2788 JSON_S("number") ","
2789 JSON_S("number") ","
2790 JSON_S("number") ","
2791 JSON_S("number")) ","
2792 JSON_S("edge_fields") ":" JSON_A(
2794 JSON_S("name_or_index") ","
2795 JSON_S("to_node")) ","
2796 JSON_S("edge_types") ":" JSON_A(
2798 JSON_S("context") ","
2799 JSON_S("element") ","
2800 JSON_S("property") ","
2801 JSON_S("internal") ","
2802 JSON_S("hidden") ","
2803 JSON_S("shortcut") ","
2805 JSON_S("string_or_number") ","
2807 JSON_S("trace_function_info_fields") ":" JSON_A(
2808 JSON_S("function_id") ","
2810 JSON_S("script_name") ","
2811 JSON_S("script_id") ","
2813 JSON_S("column")) ","
2814 JSON_S("trace_node_fields") ":" JSON_A(
2816 JSON_S("function_id") ","
2819 JSON_S("children"))));
2823 writer_->AddString(",\"node_count\":");
2824 writer_->AddNumber(snapshot_->entries().length());
2825 writer_->AddString(",\"edge_count\":");
2826 writer_->AddNumber(snapshot_->edges().length());
2827 writer_->AddString(",\"trace_function_count\":");
2829 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2831 count = tracker->id_to_function_info()->occupancy();
2833 writer_->AddNumber(count);
2837 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
2838 static const char hex_chars[] = "0123456789ABCDEF";
2839 w->AddString("\\u");
2840 w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
2841 w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
2842 w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
2843 w->AddCharacter(hex_chars[u & 0xf]);
2847 void HeapSnapshotJSONSerializer::SerializeTraceTree() {
2848 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2849 if (!tracker) return;
2850 AllocationTraceTree* traces = tracker->trace_tree();
2851 SerializeTraceNode(traces->root());
2855 void HeapSnapshotJSONSerializer::SerializeTraceNode(AllocationTraceNode* node) {
2856 // The buffer needs space for 4 unsigned ints, 4 commas, [ and \0
2857 const int kBufferSize =
2858 4 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2860 EmbeddedVector<char, kBufferSize> buffer;
2862 buffer_pos = utoa(node->id(), buffer, buffer_pos);
2863 buffer[buffer_pos++] = ',';
2864 buffer_pos = utoa(node->function_id(), buffer, buffer_pos);
2865 buffer[buffer_pos++] = ',';
2866 buffer_pos = utoa(node->allocation_count(), buffer, buffer_pos);
2867 buffer[buffer_pos++] = ',';
2868 buffer_pos = utoa(node->allocation_size(), buffer, buffer_pos);
2869 buffer[buffer_pos++] = ',';
2870 buffer[buffer_pos++] = '[';
2871 buffer[buffer_pos++] = '\0';
2872 writer_->AddString(buffer.start());
2874 Vector<AllocationTraceNode*> children = node->children();
2875 for (int i = 0; i < children.length(); i++) {
2877 writer_->AddCharacter(',');
2879 SerializeTraceNode(children[i]);
2881 writer_->AddCharacter(']');
2885 // 0-based position is converted to 1-based during the serialization.
2886 static int SerializePosition(int position, const Vector<char>& buffer,
2888 if (position == -1) {
2889 buffer[buffer_pos++] = '0';
2891 ASSERT(position >= 0);
2892 buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos);
2898 void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() {
2899 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2900 if (!tracker) return;
2901 // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0
2902 const int kBufferSize =
2903 6 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2905 EmbeddedVector<char, kBufferSize> buffer;
2906 HashMap* id_to_function_info = tracker->id_to_function_info();
2907 bool first_entry = true;
2908 for (HashMap::Entry* p = id_to_function_info->Start();
2910 p = id_to_function_info->Next(p)) {
2911 SnapshotObjectId id =
2912 static_cast<SnapshotObjectId>(reinterpret_cast<intptr_t>(p->key));
2913 AllocationTracker::FunctionInfo* info =
2914 reinterpret_cast<AllocationTracker::FunctionInfo* >(p->value);
2917 first_entry = false;
2919 buffer[buffer_pos++] = ',';
2921 buffer_pos = utoa(id, buffer, buffer_pos);
2922 buffer[buffer_pos++] = ',';
2923 buffer_pos = utoa(GetStringId(info->name), buffer, buffer_pos);
2924 buffer[buffer_pos++] = ',';
2925 buffer_pos = utoa(GetStringId(info->script_name), buffer, buffer_pos);
2926 buffer[buffer_pos++] = ',';
2927 // The cast is safe because script id is a non-negative Smi.
2928 buffer_pos = utoa(static_cast<unsigned>(info->script_id), buffer,
2930 buffer[buffer_pos++] = ',';
2931 buffer_pos = SerializePosition(info->line, buffer, buffer_pos);
2932 buffer[buffer_pos++] = ',';
2933 buffer_pos = SerializePosition(info->column, buffer, buffer_pos);
2934 buffer[buffer_pos++] = '\n';
2935 buffer[buffer_pos++] = '\0';
2936 writer_->AddString(buffer.start());
2941 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
2942 writer_->AddCharacter('\n');
2943 writer_->AddCharacter('\"');
2944 for ( ; *s != '\0'; ++s) {
2947 writer_->AddString("\\b");
2950 writer_->AddString("\\f");
2953 writer_->AddString("\\n");
2956 writer_->AddString("\\r");
2959 writer_->AddString("\\t");
2963 writer_->AddCharacter('\\');
2964 writer_->AddCharacter(*s);
2967 if (*s > 31 && *s < 128) {
2968 writer_->AddCharacter(*s);
2969 } else if (*s <= 31) {
2970 // Special character with no dedicated literal.
2971 WriteUChar(writer_, *s);
2973 // Convert UTF-8 into \u UTF-16 literal.
2974 unsigned length = 1, cursor = 0;
2975 for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
2976 unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
2977 if (c != unibrow::Utf8::kBadChar) {
2978 WriteUChar(writer_, c);
2979 ASSERT(cursor != 0);
2982 writer_->AddCharacter('?');
2987 writer_->AddCharacter('\"');
2991 void HeapSnapshotJSONSerializer::SerializeStrings() {
2992 ScopedVector<const unsigned char*> sorted_strings(
2993 strings_.occupancy() + 1);
2994 for (HashMap::Entry* entry = strings_.Start();
2996 entry = strings_.Next(entry)) {
2997 int index = static_cast<int>(reinterpret_cast<uintptr_t>(entry->value));
2998 sorted_strings[index] = reinterpret_cast<const unsigned char*>(entry->key);
3000 writer_->AddString("\"<dummy>\"");
3001 for (int i = 1; i < sorted_strings.length(); ++i) {
3002 writer_->AddCharacter(',');
3003 SerializeString(sorted_strings[i]);
3004 if (writer_->aborted()) return;
3009 } } // namespace v8::internal