1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #include "heap-snapshot-generator-inl.h"
32 #include "allocation-tracker.h"
33 #include "code-stubs.h"
34 #include "heap-profiler.h"
37 #include "v8conversions.h"
43 HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to)
48 ASSERT(type == kContextVariable
56 HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to)
61 ASSERT(type == kElement || type == kHidden);
65 void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
66 to_entry_ = &snapshot->entries()[to_index_];
70 const int HeapEntry::kNoEntry = -1;
72 HeapEntry::HeapEntry(HeapSnapshot* snapshot,
77 unsigned trace_node_id)
81 self_size_(self_size),
85 trace_node_id_(trace_node_id) { }
88 void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
91 HeapGraphEdge edge(type, name, this->index(), entry->index());
92 snapshot_->edges().Add(edge);
97 void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
100 HeapGraphEdge edge(type, index, this->index(), entry->index());
101 snapshot_->edges().Add(edge);
106 void HeapEntry::Print(
107 const char* prefix, const char* edge_name, int max_depth, int indent) {
108 STATIC_CHECK(sizeof(unsigned) == sizeof(id()));
109 OS::Print("%6" V8PRIuPTR " @%6u %*c %s%s: ",
110 self_size(), id(), indent, ' ', prefix, edge_name);
111 if (type() != kString) {
112 OS::Print("%s %.40s\n", TypeAsString(), name_);
115 const char* c = name_;
116 while (*c && (c - name_) <= 40) {
125 if (--max_depth == 0) return;
126 Vector<HeapGraphEdge*> ch = children();
127 for (int i = 0; i < ch.length(); ++i) {
128 HeapGraphEdge& edge = *ch[i];
129 const char* edge_prefix = "";
130 EmbeddedVector<char, 64> index;
131 const char* edge_name = index.start();
132 switch (edge.type()) {
133 case HeapGraphEdge::kContextVariable:
135 edge_name = edge.name();
137 case HeapGraphEdge::kElement:
138 OS::SNPrintF(index, "%d", edge.index());
140 case HeapGraphEdge::kInternal:
142 edge_name = edge.name();
144 case HeapGraphEdge::kProperty:
145 edge_name = edge.name();
147 case HeapGraphEdge::kHidden:
149 OS::SNPrintF(index, "%d", edge.index());
151 case HeapGraphEdge::kShortcut:
153 edge_name = edge.name();
155 case HeapGraphEdge::kWeak:
157 edge_name = edge.name();
160 OS::SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
162 edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
167 const char* HeapEntry::TypeAsString() {
169 case kHidden: return "/hidden/";
170 case kObject: return "/object/";
171 case kClosure: return "/closure/";
172 case kString: return "/string/";
173 case kCode: return "/code/";
174 case kArray: return "/array/";
175 case kRegExp: return "/regexp/";
176 case kHeapNumber: return "/number/";
177 case kNative: return "/native/";
178 case kSynthetic: return "/synthetic/";
179 case kConsString: return "/concatenated string/";
180 case kSlicedString: return "/sliced string/";
181 default: return "???";
186 // It is very important to keep objects that form a heap snapshot
187 // as small as possible.
188 namespace { // Avoid littering the global namespace.
190 template <size_t ptr_size> struct SnapshotSizeConstants;
192 template <> struct SnapshotSizeConstants<4> {
193 static const int kExpectedHeapGraphEdgeSize = 12;
194 static const int kExpectedHeapEntrySize = 28;
197 template <> struct SnapshotSizeConstants<8> {
198 static const int kExpectedHeapGraphEdgeSize = 24;
199 static const int kExpectedHeapEntrySize = 40;
205 HeapSnapshot::HeapSnapshot(HeapProfiler* profiler,
208 : profiler_(profiler),
211 root_index_(HeapEntry::kNoEntry),
212 gc_roots_index_(HeapEntry::kNoEntry),
213 natives_root_index_(HeapEntry::kNoEntry),
214 max_snapshot_js_object_id_(0) {
216 sizeof(HeapGraphEdge) ==
217 SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
220 SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
221 USE(SnapshotSizeConstants<4>::kExpectedHeapGraphEdgeSize);
222 USE(SnapshotSizeConstants<4>::kExpectedHeapEntrySize);
223 USE(SnapshotSizeConstants<8>::kExpectedHeapGraphEdgeSize);
224 USE(SnapshotSizeConstants<8>::kExpectedHeapEntrySize);
225 for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
226 gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
231 void HeapSnapshot::Delete() {
232 profiler_->RemoveSnapshot(this);
237 void HeapSnapshot::RememberLastJSObjectId() {
238 max_snapshot_js_object_id_ = profiler_->heap_object_map()->last_assigned_id();
242 HeapEntry* HeapSnapshot::AddRootEntry() {
243 ASSERT(root_index_ == HeapEntry::kNoEntry);
244 ASSERT(entries_.is_empty()); // Root entry must be the first one.
245 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
247 HeapObjectsMap::kInternalRootObjectId,
250 root_index_ = entry->index();
251 ASSERT(root_index_ == 0);
256 HeapEntry* HeapSnapshot::AddGcRootsEntry() {
257 ASSERT(gc_roots_index_ == HeapEntry::kNoEntry);
258 HeapEntry* entry = AddEntry(HeapEntry::kSynthetic,
260 HeapObjectsMap::kGcRootsObjectId,
263 gc_roots_index_ = entry->index();
268 HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag) {
269 ASSERT(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
270 ASSERT(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags);
271 HeapEntry* entry = AddEntry(
272 HeapEntry::kSynthetic,
273 VisitorSynchronization::kTagNames[tag],
274 HeapObjectsMap::GetNthGcSubrootId(tag),
277 gc_subroot_indexes_[tag] = entry->index();
282 HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
286 unsigned trace_node_id) {
287 HeapEntry entry(this, type, name, id, size, trace_node_id);
289 return &entries_.last();
293 void HeapSnapshot::FillChildren() {
294 ASSERT(children().is_empty());
295 children().Allocate(edges().length());
296 int children_index = 0;
297 for (int i = 0; i < entries().length(); ++i) {
298 HeapEntry* entry = &entries()[i];
299 children_index = entry->set_children_index(children_index);
301 ASSERT(edges().length() == children_index);
302 for (int i = 0; i < edges().length(); ++i) {
303 HeapGraphEdge* edge = &edges()[i];
304 edge->ReplaceToIndexWithEntry(this);
305 edge->from()->add_child(edge);
310 class FindEntryById {
312 explicit FindEntryById(SnapshotObjectId id) : id_(id) { }
313 int operator()(HeapEntry* const* entry) {
314 if ((*entry)->id() == id_) return 0;
315 return (*entry)->id() < id_ ? -1 : 1;
318 SnapshotObjectId id_;
322 HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
323 List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
324 // Perform a binary search by id.
325 int index = SortedListBSearch(*entries_by_id, FindEntryById(id));
328 return entries_by_id->at(index);
333 static int SortByIds(const T* entry1_ptr,
334 const T* entry2_ptr) {
335 if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
336 return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
340 List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() {
341 if (sorted_entries_.is_empty()) {
342 sorted_entries_.Allocate(entries_.length());
343 for (int i = 0; i < entries_.length(); ++i) {
344 sorted_entries_[i] = &entries_[i];
346 sorted_entries_.Sort(SortByIds);
348 return &sorted_entries_;
352 void HeapSnapshot::Print(int max_depth) {
353 root()->Print("", "", max_depth, 0);
357 size_t HeapSnapshot::RawSnapshotSize() const {
360 GetMemoryUsedByList(entries_) +
361 GetMemoryUsedByList(edges_) +
362 GetMemoryUsedByList(children_) +
363 GetMemoryUsedByList(sorted_entries_);
367 // We split IDs on evens for embedder objects (see
368 // HeapObjectsMap::GenerateId) and odds for native objects.
369 const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
370 const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
371 HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
372 const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
373 HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
374 const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
375 HeapObjectsMap::kGcRootsFirstSubrootId +
376 VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
379 static bool AddressesMatch(void* key1, void* key2) {
384 HeapObjectsMap::HeapObjectsMap(Heap* heap)
385 : next_id_(kFirstAvailableObjectId),
386 entries_map_(AddressesMatch),
388 // This dummy element solves a problem with entries_map_.
389 // When we do lookup in HashMap we see no difference between two cases:
390 // it has an entry with NULL as the value or it has created
391 // a new entry on the fly with NULL as the default value.
392 // With such dummy element we have a guaranty that all entries_map_ entries
393 // will have the value field grater than 0.
394 // This fact is using in MoveObject method.
395 entries_.Add(EntryInfo(0, NULL, 0));
399 bool HeapObjectsMap::MoveObject(Address from, Address to, int object_size) {
401 ASSERT(from != NULL);
402 if (from == to) return false;
403 void* from_value = entries_map_.Remove(from, ComputePointerHash(from));
404 if (from_value == NULL) {
405 // It may occur that some untracked object moves to an address X and there
406 // is a tracked object at that address. In this case we should remove the
407 // entry as we know that the object has died.
408 void* to_value = entries_map_.Remove(to, ComputePointerHash(to));
409 if (to_value != NULL) {
410 int to_entry_info_index =
411 static_cast<int>(reinterpret_cast<intptr_t>(to_value));
412 entries_.at(to_entry_info_index).addr = NULL;
415 HashMap::Entry* to_entry = entries_map_.Lookup(to, ComputePointerHash(to),
417 if (to_entry->value != NULL) {
418 // We found the existing entry with to address for an old object.
419 // Without this operation we will have two EntryInfo's with the same
420 // value in addr field. It is bad because later at RemoveDeadEntries
421 // one of this entry will be removed with the corresponding entries_map_
423 int to_entry_info_index =
424 static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
425 entries_.at(to_entry_info_index).addr = NULL;
427 int from_entry_info_index =
428 static_cast<int>(reinterpret_cast<intptr_t>(from_value));
429 entries_.at(from_entry_info_index).addr = to;
430 // Size of an object can change during its life, so to keep information
431 // about the object in entries_ consistent, we have to adjust size when the
432 // object is migrated.
433 if (FLAG_heap_profiler_trace_objects) {
434 PrintF("Move object from %p to %p old size %6d new size %6d\n",
437 entries_.at(from_entry_info_index).size,
440 entries_.at(from_entry_info_index).size = object_size;
441 to_entry->value = from_value;
443 return from_value != NULL;
447 void HeapObjectsMap::UpdateObjectSize(Address addr, int size) {
448 FindOrAddEntry(addr, size, false);
452 SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
453 HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr),
455 if (entry == NULL) return 0;
456 int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
457 EntryInfo& entry_info = entries_.at(entry_index);
458 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
459 return entry_info.id;
463 SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
466 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
467 HashMap::Entry* entry = entries_map_.Lookup(addr, ComputePointerHash(addr),
469 if (entry->value != NULL) {
471 static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
472 EntryInfo& entry_info = entries_.at(entry_index);
473 entry_info.accessed = accessed;
474 if (FLAG_heap_profiler_trace_objects) {
475 PrintF("Update object size : %p with old size %d and new size %d\n",
480 entry_info.size = size;
481 return entry_info.id;
483 entry->value = reinterpret_cast<void*>(entries_.length());
484 SnapshotObjectId id = next_id_;
485 next_id_ += kObjectIdStep;
486 entries_.Add(EntryInfo(id, addr, size, accessed));
487 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
492 void HeapObjectsMap::StopHeapObjectsTracking() {
493 time_intervals_.Clear();
497 void HeapObjectsMap::UpdateHeapObjectsMap() {
498 if (FLAG_heap_profiler_trace_objects) {
499 PrintF("Begin HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
500 entries_map_.occupancy());
502 heap_->CollectAllGarbage(Heap::kMakeHeapIterableMask,
503 "HeapObjectsMap::UpdateHeapObjectsMap");
504 HeapIterator iterator(heap_);
505 for (HeapObject* obj = iterator.next();
507 obj = iterator.next()) {
508 FindOrAddEntry(obj->address(), obj->Size());
509 if (FLAG_heap_profiler_trace_objects) {
510 PrintF("Update object : %p %6d. Next address is %p\n",
513 obj->address() + obj->Size());
517 if (FLAG_heap_profiler_trace_objects) {
518 PrintF("End HeapObjectsMap::UpdateHeapObjectsMap. map has %d entries.\n",
519 entries_map_.occupancy());
527 struct HeapObjectInfo {
528 HeapObjectInfo(HeapObject* obj, int expected_size)
530 expected_size(expected_size) {
536 bool IsValid() const { return expected_size == obj->Size(); }
539 if (expected_size == 0) {
540 PrintF("Untracked object : %p %6d. Next address is %p\n",
543 obj->address() + obj->Size());
544 } else if (obj->Size() != expected_size) {
545 PrintF("Wrong size %6d: %p %6d. Next address is %p\n",
549 obj->address() + obj->Size());
551 PrintF("Good object : %p %6d. Next address is %p\n",
554 obj->address() + obj->Size());
560 static int comparator(const HeapObjectInfo* a, const HeapObjectInfo* b) {
561 if (a->obj < b->obj) return -1;
562 if (a->obj > b->obj) return 1;
570 int HeapObjectsMap::FindUntrackedObjects() {
571 List<HeapObjectInfo> heap_objects(1000);
573 HeapIterator iterator(heap_);
575 for (HeapObject* obj = iterator.next();
577 obj = iterator.next()) {
578 HashMap::Entry* entry = entries_map_.Lookup(
579 obj->address(), ComputePointerHash(obj->address()), false);
582 if (FLAG_heap_profiler_trace_objects) {
583 heap_objects.Add(HeapObjectInfo(obj, 0));
586 int entry_index = static_cast<int>(
587 reinterpret_cast<intptr_t>(entry->value));
588 EntryInfo& entry_info = entries_.at(entry_index);
589 if (FLAG_heap_profiler_trace_objects) {
590 heap_objects.Add(HeapObjectInfo(obj,
591 static_cast<int>(entry_info.size)));
592 if (obj->Size() != static_cast<int>(entry_info.size))
595 CHECK_EQ(obj->Size(), static_cast<int>(entry_info.size));
599 if (FLAG_heap_profiler_trace_objects) {
600 PrintF("\nBegin HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n",
601 entries_map_.occupancy());
602 heap_objects.Sort(comparator);
603 int last_printed_object = -1;
604 bool print_next_object = false;
605 for (int i = 0; i < heap_objects.length(); ++i) {
606 const HeapObjectInfo& object_info = heap_objects[i];
607 if (!object_info.IsValid()) {
609 if (last_printed_object != i - 1) {
611 PrintF("%d objects were skipped\n", i - 1 - last_printed_object);
612 heap_objects[i - 1].Print();
616 last_printed_object = i;
617 print_next_object = true;
618 } else if (print_next_object) {
620 print_next_object = false;
621 last_printed_object = i;
624 if (last_printed_object < heap_objects.length() - 1) {
625 PrintF("Last %d objects were skipped\n",
626 heap_objects.length() - 1 - last_printed_object);
628 PrintF("End HeapObjectsMap::FindUntrackedObjects. %d entries in map.\n\n",
629 entries_map_.occupancy());
635 SnapshotObjectId HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) {
636 UpdateHeapObjectsMap();
637 time_intervals_.Add(TimeInterval(next_id_));
638 int prefered_chunk_size = stream->GetChunkSize();
639 List<v8::HeapStatsUpdate> stats_buffer;
640 ASSERT(!entries_.is_empty());
641 EntryInfo* entry_info = &entries_.first();
642 EntryInfo* end_entry_info = &entries_.last() + 1;
643 for (int time_interval_index = 0;
644 time_interval_index < time_intervals_.length();
645 ++time_interval_index) {
646 TimeInterval& time_interval = time_intervals_[time_interval_index];
647 SnapshotObjectId time_interval_id = time_interval.id;
648 uint32_t entries_size = 0;
649 EntryInfo* start_entry_info = entry_info;
650 while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
651 entries_size += entry_info->size;
654 uint32_t entries_count =
655 static_cast<uint32_t>(entry_info - start_entry_info);
656 if (time_interval.count != entries_count ||
657 time_interval.size != entries_size) {
658 stats_buffer.Add(v8::HeapStatsUpdate(
660 time_interval.count = entries_count,
661 time_interval.size = entries_size));
662 if (stats_buffer.length() >= prefered_chunk_size) {
663 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
664 &stats_buffer.first(), stats_buffer.length());
665 if (result == OutputStream::kAbort) return last_assigned_id();
666 stats_buffer.Clear();
670 ASSERT(entry_info == end_entry_info);
671 if (!stats_buffer.is_empty()) {
672 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
673 &stats_buffer.first(), stats_buffer.length());
674 if (result == OutputStream::kAbort) return last_assigned_id();
676 stream->EndOfStream();
677 return last_assigned_id();
681 void HeapObjectsMap::RemoveDeadEntries() {
682 ASSERT(entries_.length() > 0 &&
683 entries_.at(0).id == 0 &&
684 entries_.at(0).addr == NULL);
685 int first_free_entry = 1;
686 for (int i = 1; i < entries_.length(); ++i) {
687 EntryInfo& entry_info = entries_.at(i);
688 if (entry_info.accessed) {
689 if (first_free_entry != i) {
690 entries_.at(first_free_entry) = entry_info;
692 entries_.at(first_free_entry).accessed = false;
693 HashMap::Entry* entry = entries_map_.Lookup(
694 entry_info.addr, ComputePointerHash(entry_info.addr), false);
696 entry->value = reinterpret_cast<void*>(first_free_entry);
699 if (entry_info.addr) {
700 entries_map_.Remove(entry_info.addr,
701 ComputePointerHash(entry_info.addr));
705 entries_.Rewind(first_free_entry);
706 ASSERT(static_cast<uint32_t>(entries_.length()) - 1 ==
707 entries_map_.occupancy());
711 SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
712 SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
713 const char* label = info->GetLabel();
714 id ^= StringHasher::HashSequentialString(label,
715 static_cast<int>(strlen(label)),
717 intptr_t element_count = info->GetElementCount();
718 if (element_count != -1)
719 id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
720 v8::internal::kZeroHashSeed);
725 size_t HeapObjectsMap::GetUsedMemorySize() const {
728 sizeof(HashMap::Entry) * entries_map_.capacity() +
729 GetMemoryUsedByList(entries_) +
730 GetMemoryUsedByList(time_intervals_);
734 HeapEntriesMap::HeapEntriesMap()
735 : entries_(HeapThingsMatch) {
739 int HeapEntriesMap::Map(HeapThing thing) {
740 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), false);
741 if (cache_entry == NULL) return HeapEntry::kNoEntry;
742 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
746 void HeapEntriesMap::Pair(HeapThing thing, int entry) {
747 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), true);
748 ASSERT(cache_entry->value == NULL);
749 cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
753 HeapObjectsSet::HeapObjectsSet()
754 : entries_(HeapEntriesMap::HeapThingsMatch) {
758 void HeapObjectsSet::Clear() {
763 bool HeapObjectsSet::Contains(Object* obj) {
764 if (!obj->IsHeapObject()) return false;
765 HeapObject* object = HeapObject::cast(obj);
766 return entries_.Lookup(object, HeapEntriesMap::Hash(object), false) != NULL;
770 void HeapObjectsSet::Insert(Object* obj) {
771 if (!obj->IsHeapObject()) return;
772 HeapObject* object = HeapObject::cast(obj);
773 entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
777 const char* HeapObjectsSet::GetTag(Object* obj) {
778 HeapObject* object = HeapObject::cast(obj);
779 HashMap::Entry* cache_entry =
780 entries_.Lookup(object, HeapEntriesMap::Hash(object), false);
781 return cache_entry != NULL
782 ? reinterpret_cast<const char*>(cache_entry->value)
787 void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
788 if (!obj->IsHeapObject()) return;
789 HeapObject* object = HeapObject::cast(obj);
790 HashMap::Entry* cache_entry =
791 entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
792 cache_entry->value = const_cast<char*>(tag);
796 HeapObject* const V8HeapExplorer::kInternalRootObject =
797 reinterpret_cast<HeapObject*>(
798 static_cast<intptr_t>(HeapObjectsMap::kInternalRootObjectId));
799 HeapObject* const V8HeapExplorer::kGcRootsObject =
800 reinterpret_cast<HeapObject*>(
801 static_cast<intptr_t>(HeapObjectsMap::kGcRootsObjectId));
802 HeapObject* const V8HeapExplorer::kFirstGcSubrootObject =
803 reinterpret_cast<HeapObject*>(
804 static_cast<intptr_t>(HeapObjectsMap::kGcRootsFirstSubrootId));
805 HeapObject* const V8HeapExplorer::kLastGcSubrootObject =
806 reinterpret_cast<HeapObject*>(
807 static_cast<intptr_t>(HeapObjectsMap::kFirstAvailableObjectId));
810 V8HeapExplorer::V8HeapExplorer(
811 HeapSnapshot* snapshot,
812 SnapshottingProgressReportingInterface* progress,
813 v8::HeapProfiler::ObjectNameResolver* resolver)
814 : heap_(snapshot->profiler()->heap_object_map()->heap()),
816 names_(snapshot_->profiler()->names()),
817 heap_object_map_(snapshot_->profiler()->heap_object_map()),
820 global_object_name_resolver_(resolver) {
824 V8HeapExplorer::~V8HeapExplorer() {
828 HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) {
829 return AddEntry(reinterpret_cast<HeapObject*>(ptr));
833 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
834 if (object == kInternalRootObject) {
835 snapshot_->AddRootEntry();
836 return snapshot_->root();
837 } else if (object == kGcRootsObject) {
838 HeapEntry* entry = snapshot_->AddGcRootsEntry();
840 } else if (object >= kFirstGcSubrootObject && object < kLastGcSubrootObject) {
841 HeapEntry* entry = snapshot_->AddGcSubrootEntry(GetGcSubrootOrder(object));
843 } else if (object->IsJSFunction()) {
844 JSFunction* func = JSFunction::cast(object);
845 SharedFunctionInfo* shared = func->shared();
846 const char* name = shared->bound() ? "native_bind" :
847 names_->GetName(String::cast(shared->name()));
848 return AddEntry(object, HeapEntry::kClosure, name);
849 } else if (object->IsJSRegExp()) {
850 JSRegExp* re = JSRegExp::cast(object);
851 return AddEntry(object,
853 names_->GetName(re->Pattern()));
854 } else if (object->IsJSObject()) {
855 const char* name = names_->GetName(
856 GetConstructorName(JSObject::cast(object)));
857 if (object->IsJSGlobalObject()) {
858 const char* tag = objects_tags_.GetTag(object);
860 name = names_->GetFormatted("%s / %s", name, tag);
863 return AddEntry(object, HeapEntry::kObject, name);
864 } else if (object->IsString()) {
865 String* string = String::cast(object);
866 if (string->IsConsString())
867 return AddEntry(object,
868 HeapEntry::kConsString,
869 "(concatenated string)");
870 if (string->IsSlicedString())
871 return AddEntry(object,
872 HeapEntry::kSlicedString,
874 return AddEntry(object,
876 names_->GetName(String::cast(object)));
877 } else if (object->IsCode()) {
878 return AddEntry(object, HeapEntry::kCode, "");
879 } else if (object->IsSharedFunctionInfo()) {
880 String* name = String::cast(SharedFunctionInfo::cast(object)->name());
881 return AddEntry(object,
883 names_->GetName(name));
884 } else if (object->IsScript()) {
885 Object* name = Script::cast(object)->name();
886 return AddEntry(object,
889 ? names_->GetName(String::cast(name))
891 } else if (object->IsNativeContext()) {
892 return AddEntry(object, HeapEntry::kHidden, "system / NativeContext");
893 } else if (object->IsContext()) {
894 return AddEntry(object, HeapEntry::kObject, "system / Context");
895 } else if (object->IsFixedArray() ||
896 object->IsFixedDoubleArray() ||
897 object->IsByteArray() ||
898 object->IsExternalArray()) {
899 return AddEntry(object, HeapEntry::kArray, "");
900 } else if (object->IsHeapNumber()) {
901 return AddEntry(object, HeapEntry::kHeapNumber, "number");
903 return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
907 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
908 HeapEntry::Type type,
910 return AddEntry(object->address(), type, name, object->Size());
914 HeapEntry* V8HeapExplorer::AddEntry(Address address,
915 HeapEntry::Type type,
918 SnapshotObjectId object_id = heap_object_map_->FindOrAddEntry(
919 address, static_cast<unsigned int>(size));
920 unsigned trace_node_id = 0;
921 if (AllocationTracker* allocation_tracker =
922 snapshot_->profiler()->allocation_tracker()) {
924 allocation_tracker->address_to_trace()->GetTraceNodeId(address);
926 return snapshot_->AddEntry(type, name, object_id, size, trace_node_id);
930 class SnapshotFiller {
932 explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
933 : snapshot_(snapshot),
934 names_(snapshot->profiler()->names()),
935 entries_(entries) { }
936 HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
937 HeapEntry* entry = allocator->AllocateEntry(ptr);
938 entries_->Pair(ptr, entry->index());
941 HeapEntry* FindEntry(HeapThing ptr) {
942 int index = entries_->Map(ptr);
943 return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL;
945 HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
946 HeapEntry* entry = FindEntry(ptr);
947 return entry != NULL ? entry : AddEntry(ptr, allocator);
949 void SetIndexedReference(HeapGraphEdge::Type type,
952 HeapEntry* child_entry) {
953 HeapEntry* parent_entry = &snapshot_->entries()[parent];
954 parent_entry->SetIndexedReference(type, index, child_entry);
956 void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
958 HeapEntry* child_entry) {
959 HeapEntry* parent_entry = &snapshot_->entries()[parent];
960 int index = parent_entry->children_count() + 1;
961 parent_entry->SetIndexedReference(type, index, child_entry);
963 void SetNamedReference(HeapGraphEdge::Type type,
965 const char* reference_name,
966 HeapEntry* child_entry) {
967 HeapEntry* parent_entry = &snapshot_->entries()[parent];
968 parent_entry->SetNamedReference(type, reference_name, child_entry);
970 void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
972 HeapEntry* child_entry) {
973 HeapEntry* parent_entry = &snapshot_->entries()[parent];
974 int index = parent_entry->children_count() + 1;
975 parent_entry->SetNamedReference(
977 names_->GetName(index),
982 HeapSnapshot* snapshot_;
983 StringsStorage* names_;
984 HeapEntriesMap* entries_;
988 class GcSubrootsEnumerator : public ObjectVisitor {
990 GcSubrootsEnumerator(
991 SnapshotFiller* filler, V8HeapExplorer* explorer)
994 previous_object_count_(0),
997 void VisitPointers(Object** start, Object** end) {
998 object_count_ += end - start;
1000 void Synchronize(VisitorSynchronization::SyncTag tag) {
1001 // Skip empty subroots.
1002 if (previous_object_count_ != object_count_) {
1003 previous_object_count_ = object_count_;
1004 filler_->AddEntry(V8HeapExplorer::GetNthGcSubrootObject(tag), explorer_);
1008 SnapshotFiller* filler_;
1009 V8HeapExplorer* explorer_;
1010 intptr_t previous_object_count_;
1011 intptr_t object_count_;
1015 void V8HeapExplorer::AddRootEntries(SnapshotFiller* filler) {
1016 filler->AddEntry(kInternalRootObject, this);
1017 filler->AddEntry(kGcRootsObject, this);
1018 GcSubrootsEnumerator enumerator(filler, this);
1019 heap_->IterateRoots(&enumerator, VISIT_ALL);
1023 const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
1024 switch (object->map()->instance_type()) {
1026 switch (Map::cast(object)->instance_type()) {
1027 #define MAKE_STRING_MAP_CASE(instance_type, size, name, Name) \
1028 case instance_type: return "system / Map (" #Name ")";
1029 STRING_TYPE_LIST(MAKE_STRING_MAP_CASE)
1030 #undef MAKE_STRING_MAP_CASE
1031 default: return "system / Map";
1033 case CELL_TYPE: return "system / Cell";
1034 case PROPERTY_CELL_TYPE: return "system / PropertyCell";
1035 case FOREIGN_TYPE: return "system / Foreign";
1036 case ODDBALL_TYPE: return "system / Oddball";
1037 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1038 case NAME##_TYPE: return "system / "#Name;
1039 STRUCT_LIST(MAKE_STRUCT_CASE)
1040 #undef MAKE_STRUCT_CASE
1041 default: return "system";
1046 int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) {
1047 int objects_count = 0;
1048 for (HeapObject* obj = iterator->next();
1050 obj = iterator->next()) {
1053 return objects_count;
1057 class IndexedReferencesExtractor : public ObjectVisitor {
1059 IndexedReferencesExtractor(V8HeapExplorer* generator,
1060 HeapObject* parent_obj,
1062 : generator_(generator),
1063 parent_obj_(parent_obj),
1067 void VisitCodeEntry(Address entry_address) {
1068 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
1069 generator_->SetInternalReference(parent_obj_, parent_, "code", code);
1070 generator_->TagCodeObject(code);
1072 void VisitPointers(Object** start, Object** end) {
1073 for (Object** p = start; p < end; p++) {
1075 if (CheckVisitedAndUnmark(p)) continue;
1076 generator_->SetHiddenReference(parent_obj_, parent_, next_index_, *p);
1079 static void MarkVisitedField(HeapObject* obj, int offset) {
1080 if (offset < 0) return;
1081 Address field = obj->address() + offset;
1082 ASSERT(!Memory::Object_at(field)->IsFailure());
1083 ASSERT(Memory::Object_at(field)->IsHeapObject());
1084 *field |= kFailureTag;
1088 bool CheckVisitedAndUnmark(Object** field) {
1089 if ((*field)->IsFailure()) {
1090 intptr_t untagged = reinterpret_cast<intptr_t>(*field) & ~kFailureTagMask;
1091 *field = reinterpret_cast<Object*>(untagged | kHeapObjectTag);
1092 ASSERT((*field)->IsHeapObject());
1097 V8HeapExplorer* generator_;
1098 HeapObject* parent_obj_;
1104 void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
1105 HeapEntry* heap_entry = GetEntry(obj);
1106 if (heap_entry == NULL) return; // No interest in this object.
1107 int entry = heap_entry->index();
1109 if (obj->IsJSGlobalProxy()) {
1110 ExtractJSGlobalProxyReferences(entry, JSGlobalProxy::cast(obj));
1111 } else if (obj->IsJSArrayBuffer()) {
1112 ExtractJSArrayBufferReferences(entry, JSArrayBuffer::cast(obj));
1113 } else if (obj->IsJSObject()) {
1114 ExtractJSObjectReferences(entry, JSObject::cast(obj));
1115 } else if (obj->IsString()) {
1116 ExtractStringReferences(entry, String::cast(obj));
1117 } else if (obj->IsContext()) {
1118 ExtractContextReferences(entry, Context::cast(obj));
1119 } else if (obj->IsMap()) {
1120 ExtractMapReferences(entry, Map::cast(obj));
1121 } else if (obj->IsSharedFunctionInfo()) {
1122 ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
1123 } else if (obj->IsScript()) {
1124 ExtractScriptReferences(entry, Script::cast(obj));
1125 } else if (obj->IsAccessorPair()) {
1126 ExtractAccessorPairReferences(entry, AccessorPair::cast(obj));
1127 } else if (obj->IsCodeCache()) {
1128 ExtractCodeCacheReferences(entry, CodeCache::cast(obj));
1129 } else if (obj->IsCode()) {
1130 ExtractCodeReferences(entry, Code::cast(obj));
1131 } else if (obj->IsBox()) {
1132 ExtractBoxReferences(entry, Box::cast(obj));
1133 } else if (obj->IsCell()) {
1134 ExtractCellReferences(entry, Cell::cast(obj));
1135 } else if (obj->IsPropertyCell()) {
1136 ExtractPropertyCellReferences(entry, PropertyCell::cast(obj));
1137 } else if (obj->IsAllocationSite()) {
1138 ExtractAllocationSiteReferences(entry, AllocationSite::cast(obj));
1140 SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset);
1142 // Extract unvisited fields as hidden references and restore tags
1143 // of visited fields.
1144 IndexedReferencesExtractor refs_extractor(this, obj, entry);
1145 obj->Iterate(&refs_extractor);
1149 void V8HeapExplorer::ExtractJSGlobalProxyReferences(
1150 int entry, JSGlobalProxy* proxy) {
1151 SetInternalReference(proxy, entry,
1152 "native_context", proxy->native_context(),
1153 JSGlobalProxy::kNativeContextOffset);
1157 void V8HeapExplorer::ExtractJSObjectReferences(
1158 int entry, JSObject* js_obj) {
1159 HeapObject* obj = js_obj;
1160 ExtractClosureReferences(js_obj, entry);
1161 ExtractPropertyReferences(js_obj, entry);
1162 ExtractElementReferences(js_obj, entry);
1163 ExtractInternalReferences(js_obj, entry);
1164 SetPropertyReference(
1165 obj, entry, heap_->proto_string(), js_obj->GetPrototype());
1166 if (obj->IsJSFunction()) {
1167 JSFunction* js_fun = JSFunction::cast(js_obj);
1168 Object* proto_or_map = js_fun->prototype_or_initial_map();
1169 if (!proto_or_map->IsTheHole()) {
1170 if (!proto_or_map->IsMap()) {
1171 SetPropertyReference(
1173 heap_->prototype_string(), proto_or_map,
1175 JSFunction::kPrototypeOrInitialMapOffset);
1177 SetPropertyReference(
1179 heap_->prototype_string(), js_fun->prototype());
1180 SetInternalReference(
1181 obj, entry, "initial_map", proto_or_map,
1182 JSFunction::kPrototypeOrInitialMapOffset);
1185 SharedFunctionInfo* shared_info = js_fun->shared();
1186 // JSFunction has either bindings or literals and never both.
1187 bool bound = shared_info->bound();
1188 TagObject(js_fun->literals_or_bindings(),
1189 bound ? "(function bindings)" : "(function literals)");
1190 SetInternalReference(js_fun, entry,
1191 bound ? "bindings" : "literals",
1192 js_fun->literals_or_bindings(),
1193 JSFunction::kLiteralsOffset);
1194 TagObject(shared_info, "(shared function info)");
1195 SetInternalReference(js_fun, entry,
1196 "shared", shared_info,
1197 JSFunction::kSharedFunctionInfoOffset);
1198 TagObject(js_fun->context(), "(context)");
1199 SetInternalReference(js_fun, entry,
1200 "context", js_fun->context(),
1201 JSFunction::kContextOffset);
1202 SetWeakReference(js_fun, entry,
1203 "next_function_link", js_fun->next_function_link(),
1204 JSFunction::kNextFunctionLinkOffset);
1205 STATIC_CHECK(JSFunction::kNextFunctionLinkOffset
1206 == JSFunction::kNonWeakFieldsEndOffset);
1207 STATIC_CHECK(JSFunction::kNextFunctionLinkOffset + kPointerSize
1208 == JSFunction::kSize);
1209 } else if (obj->IsGlobalObject()) {
1210 GlobalObject* global_obj = GlobalObject::cast(obj);
1211 SetInternalReference(global_obj, entry,
1212 "builtins", global_obj->builtins(),
1213 GlobalObject::kBuiltinsOffset);
1214 SetInternalReference(global_obj, entry,
1215 "native_context", global_obj->native_context(),
1216 GlobalObject::kNativeContextOffset);
1217 SetInternalReference(global_obj, entry,
1218 "global_context", global_obj->global_context(),
1219 GlobalObject::kGlobalContextOffset);
1220 SetInternalReference(global_obj, entry,
1221 "global_receiver", global_obj->global_receiver(),
1222 GlobalObject::kGlobalReceiverOffset);
1223 STATIC_CHECK(GlobalObject::kHeaderSize - JSObject::kHeaderSize ==
1225 } else if (obj->IsJSArrayBufferView()) {
1226 JSArrayBufferView* view = JSArrayBufferView::cast(obj);
1227 SetInternalReference(view, entry, "buffer", view->buffer(),
1228 JSArrayBufferView::kBufferOffset);
1229 SetWeakReference(view, entry, "weak_next", view->weak_next(),
1230 JSArrayBufferView::kWeakNextOffset);
1232 TagObject(js_obj->properties(), "(object properties)");
1233 SetInternalReference(obj, entry,
1234 "properties", js_obj->properties(),
1235 JSObject::kPropertiesOffset);
1236 TagObject(js_obj->elements(), "(object elements)");
1237 SetInternalReference(obj, entry,
1238 "elements", js_obj->elements(),
1239 JSObject::kElementsOffset);
1243 void V8HeapExplorer::ExtractStringReferences(int entry, String* string) {
1244 if (string->IsConsString()) {
1245 ConsString* cs = ConsString::cast(string);
1246 SetInternalReference(cs, entry, "first", cs->first(),
1247 ConsString::kFirstOffset);
1248 SetInternalReference(cs, entry, "second", cs->second(),
1249 ConsString::kSecondOffset);
1250 } else if (string->IsSlicedString()) {
1251 SlicedString* ss = SlicedString::cast(string);
1252 SetInternalReference(ss, entry, "parent", ss->parent(),
1253 SlicedString::kParentOffset);
1258 void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) {
1259 if (context == context->declaration_context()) {
1260 ScopeInfo* scope_info = context->closure()->shared()->scope_info();
1261 // Add context allocated locals.
1262 int context_locals = scope_info->ContextLocalCount();
1263 for (int i = 0; i < context_locals; ++i) {
1264 String* local_name = scope_info->ContextLocalName(i);
1265 int idx = Context::MIN_CONTEXT_SLOTS + i;
1266 SetContextReference(context, entry, local_name, context->get(idx),
1267 Context::OffsetOfElementAt(idx));
1269 if (scope_info->HasFunctionName()) {
1270 String* name = scope_info->FunctionName();
1272 int idx = scope_info->FunctionContextSlotIndex(name, &mode);
1274 SetContextReference(context, entry, name, context->get(idx),
1275 Context::OffsetOfElementAt(idx));
1280 #define EXTRACT_CONTEXT_FIELD(index, type, name) \
1281 if (Context::index < Context::FIRST_WEAK_SLOT) { \
1282 SetInternalReference(context, entry, #name, context->get(Context::index), \
1283 FixedArray::OffsetOfElementAt(Context::index)); \
1285 SetWeakReference(context, entry, #name, context->get(Context::index), \
1286 FixedArray::OffsetOfElementAt(Context::index)); \
1288 EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
1289 EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
1290 EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, Object, extension);
1291 EXTRACT_CONTEXT_FIELD(GLOBAL_OBJECT_INDEX, GlobalObject, global);
1292 if (context->IsNativeContext()) {
1293 TagObject(context->jsfunction_result_caches(),
1294 "(context func. result caches)");
1295 TagObject(context->normalized_map_cache(), "(context norm. map cache)");
1296 TagObject(context->runtime_context(), "(runtime context)");
1297 TagObject(context->embedder_data(), "(context data)");
1298 NATIVE_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD);
1299 EXTRACT_CONTEXT_FIELD(OPTIMIZED_FUNCTIONS_LIST, unused,
1300 optimized_functions_list);
1301 EXTRACT_CONTEXT_FIELD(OPTIMIZED_CODE_LIST, unused, optimized_code_list);
1302 EXTRACT_CONTEXT_FIELD(DEOPTIMIZED_CODE_LIST, unused, deoptimized_code_list);
1303 EXTRACT_CONTEXT_FIELD(NEXT_CONTEXT_LINK, unused, next_context_link);
1304 #undef EXTRACT_CONTEXT_FIELD
1305 STATIC_CHECK(Context::OPTIMIZED_FUNCTIONS_LIST == Context::FIRST_WEAK_SLOT);
1306 STATIC_CHECK(Context::NEXT_CONTEXT_LINK + 1
1307 == Context::NATIVE_CONTEXT_SLOTS);
1308 STATIC_CHECK(Context::FIRST_WEAK_SLOT + 5 == Context::NATIVE_CONTEXT_SLOTS);
1313 void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
1314 if (map->HasTransitionArray()) {
1315 TransitionArray* transitions = map->transitions();
1316 int transitions_entry = GetEntry(transitions)->index();
1317 Object* back_pointer = transitions->back_pointer_storage();
1318 TagObject(back_pointer, "(back pointer)");
1319 SetInternalReference(transitions, transitions_entry,
1320 "back_pointer", back_pointer);
1321 TagObject(transitions, "(transition array)");
1322 SetInternalReference(map, entry,
1323 "transitions", transitions,
1324 Map::kTransitionsOrBackPointerOffset);
1326 Object* back_pointer = map->GetBackPointer();
1327 TagObject(back_pointer, "(back pointer)");
1328 SetInternalReference(map, entry,
1329 "back_pointer", back_pointer,
1330 Map::kTransitionsOrBackPointerOffset);
1332 DescriptorArray* descriptors = map->instance_descriptors();
1333 TagObject(descriptors, "(map descriptors)");
1334 SetInternalReference(map, entry,
1335 "descriptors", descriptors,
1336 Map::kDescriptorsOffset);
1338 SetInternalReference(map, entry,
1339 "code_cache", map->code_cache(),
1340 Map::kCodeCacheOffset);
1341 SetInternalReference(map, entry,
1342 "prototype", map->prototype(), Map::kPrototypeOffset);
1343 SetInternalReference(map, entry,
1344 "constructor", map->constructor(),
1345 Map::kConstructorOffset);
1346 TagObject(map->dependent_code(), "(dependent code)");
1347 SetInternalReference(map, entry,
1348 "dependent_code", map->dependent_code(),
1349 Map::kDependentCodeOffset);
1353 void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
1354 int entry, SharedFunctionInfo* shared) {
1355 HeapObject* obj = shared;
1356 String* shared_name = shared->DebugName();
1357 const char* name = NULL;
1358 if (shared_name != *heap_->isolate()->factory()->empty_string()) {
1359 name = names_->GetName(shared_name);
1360 TagObject(shared->code(), names_->GetFormatted("(code for %s)", name));
1362 TagObject(shared->code(), names_->GetFormatted("(%s code)",
1363 Code::Kind2String(shared->code()->kind())));
1366 SetInternalReference(obj, entry,
1367 "name", shared->name(),
1368 SharedFunctionInfo::kNameOffset);
1369 SetInternalReference(obj, entry,
1370 "code", shared->code(),
1371 SharedFunctionInfo::kCodeOffset);
1372 TagObject(shared->scope_info(), "(function scope info)");
1373 SetInternalReference(obj, entry,
1374 "scope_info", shared->scope_info(),
1375 SharedFunctionInfo::kScopeInfoOffset);
1376 SetInternalReference(obj, entry,
1377 "instance_class_name", shared->instance_class_name(),
1378 SharedFunctionInfo::kInstanceClassNameOffset);
1379 SetInternalReference(obj, entry,
1380 "script", shared->script(),
1381 SharedFunctionInfo::kScriptOffset);
1382 const char* construct_stub_name = name ?
1383 names_->GetFormatted("(construct stub code for %s)", name) :
1384 "(construct stub code)";
1385 TagObject(shared->construct_stub(), construct_stub_name);
1386 SetInternalReference(obj, entry,
1387 "construct_stub", shared->construct_stub(),
1388 SharedFunctionInfo::kConstructStubOffset);
1389 SetInternalReference(obj, entry,
1390 "function_data", shared->function_data(),
1391 SharedFunctionInfo::kFunctionDataOffset);
1392 SetInternalReference(obj, entry,
1393 "debug_info", shared->debug_info(),
1394 SharedFunctionInfo::kDebugInfoOffset);
1395 SetInternalReference(obj, entry,
1396 "inferred_name", shared->inferred_name(),
1397 SharedFunctionInfo::kInferredNameOffset);
1398 SetInternalReference(obj, entry,
1399 "optimized_code_map", shared->optimized_code_map(),
1400 SharedFunctionInfo::kOptimizedCodeMapOffset);
1401 SetWeakReference(obj, entry,
1402 "initial_map", shared->initial_map(),
1403 SharedFunctionInfo::kInitialMapOffset);
1407 void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) {
1408 HeapObject* obj = script;
1409 SetInternalReference(obj, entry,
1410 "source", script->source(),
1411 Script::kSourceOffset);
1412 SetInternalReference(obj, entry,
1413 "name", script->name(),
1414 Script::kNameOffset);
1415 SetInternalReference(obj, entry,
1416 "context_data", script->context_data(),
1417 Script::kContextOffset);
1418 TagObject(script->line_ends(), "(script line ends)");
1419 SetInternalReference(obj, entry,
1420 "line_ends", script->line_ends(),
1421 Script::kLineEndsOffset);
1425 void V8HeapExplorer::ExtractAccessorPairReferences(
1426 int entry, AccessorPair* accessors) {
1427 SetInternalReference(accessors, entry, "getter", accessors->getter(),
1428 AccessorPair::kGetterOffset);
1429 SetInternalReference(accessors, entry, "setter", accessors->setter(),
1430 AccessorPair::kSetterOffset);
1434 void V8HeapExplorer::ExtractCodeCacheReferences(
1435 int entry, CodeCache* code_cache) {
1436 TagObject(code_cache->default_cache(), "(default code cache)");
1437 SetInternalReference(code_cache, entry,
1438 "default_cache", code_cache->default_cache(),
1439 CodeCache::kDefaultCacheOffset);
1440 TagObject(code_cache->normal_type_cache(), "(code type cache)");
1441 SetInternalReference(code_cache, entry,
1442 "type_cache", code_cache->normal_type_cache(),
1443 CodeCache::kNormalTypeCacheOffset);
1447 void V8HeapExplorer::TagBuiltinCodeObject(Code* code, const char* name) {
1448 TagObject(code, names_->GetFormatted("(%s builtin)", name));
1452 void V8HeapExplorer::TagCodeObject(Code* code) {
1453 if (code->kind() == Code::STUB) {
1454 TagObject(code, names_->GetFormatted(
1455 "(%s code)", CodeStub::MajorName(
1456 static_cast<CodeStub::Major>(code->major_key()), true)));
1461 void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
1462 TagCodeObject(code);
1463 TagObject(code->relocation_info(), "(code relocation info)");
1464 SetInternalReference(code, entry,
1465 "relocation_info", code->relocation_info(),
1466 Code::kRelocationInfoOffset);
1467 SetInternalReference(code, entry,
1468 "handler_table", code->handler_table(),
1469 Code::kHandlerTableOffset);
1470 TagObject(code->deoptimization_data(), "(code deopt data)");
1471 SetInternalReference(code, entry,
1472 "deoptimization_data", code->deoptimization_data(),
1473 Code::kDeoptimizationDataOffset);
1474 if (code->kind() == Code::FUNCTION) {
1475 SetInternalReference(code, entry,
1476 "type_feedback_info", code->type_feedback_info(),
1477 Code::kTypeFeedbackInfoOffset);
1479 SetInternalReference(code, entry,
1480 "gc_metadata", code->gc_metadata(),
1481 Code::kGCMetadataOffset);
1482 SetInternalReference(code, entry,
1483 "constant_pool", code->constant_pool(),
1484 Code::kConstantPoolOffset);
1485 if (code->kind() == Code::OPTIMIZED_FUNCTION) {
1486 SetWeakReference(code, entry,
1487 "next_code_link", code->next_code_link(),
1488 Code::kNextCodeLinkOffset);
1493 void V8HeapExplorer::ExtractBoxReferences(int entry, Box* box) {
1494 SetInternalReference(box, entry, "value", box->value(), Box::kValueOffset);
1498 void V8HeapExplorer::ExtractCellReferences(int entry, Cell* cell) {
1499 SetInternalReference(cell, entry, "value", cell->value(), Cell::kValueOffset);
1503 void V8HeapExplorer::ExtractPropertyCellReferences(int entry,
1504 PropertyCell* cell) {
1505 ExtractCellReferences(entry, cell);
1506 SetInternalReference(cell, entry, "type", cell->type(),
1507 PropertyCell::kTypeOffset);
1508 SetInternalReference(cell, entry, "dependent_code", cell->dependent_code(),
1509 PropertyCell::kDependentCodeOffset);
1513 void V8HeapExplorer::ExtractAllocationSiteReferences(int entry,
1514 AllocationSite* site) {
1515 SetInternalReference(site, entry, "transition_info", site->transition_info(),
1516 AllocationSite::kTransitionInfoOffset);
1517 SetInternalReference(site, entry, "nested_site", site->nested_site(),
1518 AllocationSite::kNestedSiteOffset);
1519 SetInternalReference(site, entry, "dependent_code", site->dependent_code(),
1520 AllocationSite::kDependentCodeOffset);
1521 // Do not visit weak_next as it is not visited by the StaticVisitor,
1522 // and we're not very interested in weak_next field here.
1523 STATIC_CHECK(AllocationSite::kWeakNextOffset >=
1524 AllocationSite::BodyDescriptor::kEndOffset);
1528 class JSArrayBufferDataEntryAllocator : public HeapEntriesAllocator {
1530 JSArrayBufferDataEntryAllocator(size_t size, V8HeapExplorer* explorer)
1532 , explorer_(explorer) {
1534 virtual HeapEntry* AllocateEntry(HeapThing ptr) {
1535 return explorer_->AddEntry(
1536 static_cast<Address>(ptr),
1537 HeapEntry::kNative, "system / JSArrayBufferData", size_);
1541 V8HeapExplorer* explorer_;
1545 void V8HeapExplorer::ExtractJSArrayBufferReferences(
1546 int entry, JSArrayBuffer* buffer) {
1547 SetWeakReference(buffer, entry, "weak_next", buffer->weak_next(),
1548 JSArrayBuffer::kWeakNextOffset);
1549 SetWeakReference(buffer, entry,
1550 "weak_first_view", buffer->weak_first_view(),
1551 JSArrayBuffer::kWeakFirstViewOffset);
1552 // Setup a reference to a native memory backing_store object.
1553 if (!buffer->backing_store())
1555 size_t data_size = NumberToSize(heap_->isolate(), buffer->byte_length());
1556 JSArrayBufferDataEntryAllocator allocator(data_size, this);
1557 HeapEntry* data_entry =
1558 filler_->FindOrAddEntry(buffer->backing_store(), &allocator);
1559 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1560 entry, "backing_store", data_entry);
1564 void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj, int entry) {
1565 if (!js_obj->IsJSFunction()) return;
1567 JSFunction* func = JSFunction::cast(js_obj);
1568 if (func->shared()->bound()) {
1569 FixedArray* bindings = func->function_bindings();
1570 SetNativeBindReference(js_obj, entry, "bound_this",
1571 bindings->get(JSFunction::kBoundThisIndex));
1572 SetNativeBindReference(js_obj, entry, "bound_function",
1573 bindings->get(JSFunction::kBoundFunctionIndex));
1574 for (int i = JSFunction::kBoundArgumentsStartIndex;
1575 i < bindings->length(); i++) {
1576 const char* reference_name = names_->GetFormatted(
1577 "bound_argument_%d",
1578 i - JSFunction::kBoundArgumentsStartIndex);
1579 SetNativeBindReference(js_obj, entry, reference_name,
1586 void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
1587 if (js_obj->HasFastProperties()) {
1588 DescriptorArray* descs = js_obj->map()->instance_descriptors();
1589 int real_size = js_obj->map()->NumberOfOwnDescriptors();
1590 for (int i = 0; i < real_size; i++) {
1591 switch (descs->GetType(i)) {
1593 int index = descs->GetFieldIndex(i);
1595 Name* k = descs->GetKey(i);
1596 if (index < js_obj->map()->inobject_properties()) {
1597 Object* value = js_obj->InObjectPropertyAt(index);
1598 if (k != heap_->hidden_string()) {
1599 SetPropertyReference(
1603 js_obj->GetInObjectPropertyOffset(index));
1605 TagObject(value, "(hidden properties)");
1606 SetInternalReference(
1608 "hidden_properties", value,
1609 js_obj->GetInObjectPropertyOffset(index));
1612 Object* value = js_obj->RawFastPropertyAt(index);
1613 if (k != heap_->hidden_string()) {
1614 SetPropertyReference(js_obj, entry, k, value);
1616 TagObject(value, "(hidden properties)");
1617 SetInternalReference(js_obj, entry, "hidden_properties", value);
1623 SetPropertyReference(
1625 descs->GetKey(i), descs->GetConstant(i));
1628 ExtractAccessorPairProperty(
1630 descs->GetKey(i), descs->GetValue(i));
1632 case NORMAL: // only in slow mode
1633 case HANDLER: // only in lookup results, not in descriptors
1634 case INTERCEPTOR: // only in lookup results, not in descriptors
1643 NameDictionary* dictionary = js_obj->property_dictionary();
1644 int length = dictionary->Capacity();
1645 for (int i = 0; i < length; ++i) {
1646 Object* k = dictionary->KeyAt(i);
1647 if (dictionary->IsKey(k)) {
1648 Object* target = dictionary->ValueAt(i);
1649 // We assume that global objects can only have slow properties.
1650 Object* value = target->IsPropertyCell()
1651 ? PropertyCell::cast(target)->value()
1653 if (k == heap_->hidden_string()) {
1654 TagObject(value, "(hidden properties)");
1655 SetInternalReference(js_obj, entry, "hidden_properties", value);
1658 if (ExtractAccessorPairProperty(js_obj, entry, k, value)) continue;
1659 SetPropertyReference(js_obj, entry, String::cast(k), value);
1666 bool V8HeapExplorer::ExtractAccessorPairProperty(
1667 JSObject* js_obj, int entry, Object* key, Object* callback_obj) {
1668 if (!callback_obj->IsAccessorPair()) return false;
1669 AccessorPair* accessors = AccessorPair::cast(callback_obj);
1670 Object* getter = accessors->getter();
1671 if (!getter->IsOddball()) {
1672 SetPropertyReference(js_obj, entry, String::cast(key), getter, "get %s");
1674 Object* setter = accessors->setter();
1675 if (!setter->IsOddball()) {
1676 SetPropertyReference(js_obj, entry, String::cast(key), setter, "set %s");
1682 void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) {
1683 if (js_obj->HasFastObjectElements()) {
1684 FixedArray* elements = FixedArray::cast(js_obj->elements());
1685 int length = js_obj->IsJSArray() ?
1686 Smi::cast(JSArray::cast(js_obj)->length())->value() :
1688 for (int i = 0; i < length; ++i) {
1689 if (!elements->get(i)->IsTheHole()) {
1690 SetElementReference(js_obj, entry, i, elements->get(i));
1693 } else if (js_obj->HasDictionaryElements()) {
1694 SeededNumberDictionary* dictionary = js_obj->element_dictionary();
1695 int length = dictionary->Capacity();
1696 for (int i = 0; i < length; ++i) {
1697 Object* k = dictionary->KeyAt(i);
1698 if (dictionary->IsKey(k)) {
1699 ASSERT(k->IsNumber());
1700 uint32_t index = static_cast<uint32_t>(k->Number());
1701 SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
1708 void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) {
1709 int length = js_obj->GetInternalFieldCount();
1710 for (int i = 0; i < length; ++i) {
1711 Object* o = js_obj->GetInternalField(i);
1712 SetInternalReference(
1713 js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
1718 String* V8HeapExplorer::GetConstructorName(JSObject* object) {
1719 Heap* heap = object->GetHeap();
1720 if (object->IsJSFunction()) return heap->closure_string();
1721 String* constructor_name = object->constructor_name();
1722 if (constructor_name == heap->Object_string()) {
1723 // Look up an immediate "constructor" property, if it is a function,
1724 // return its name. This is for instances of binding objects, which
1725 // have prototype constructor type "Object".
1726 Object* constructor_prop = NULL;
1727 LookupResult result(heap->isolate());
1728 object->LocalLookupRealNamedProperty(heap->constructor_string(), &result);
1729 if (!result.IsFound()) return object->constructor_name();
1731 constructor_prop = result.GetLazyValue();
1732 if (constructor_prop->IsJSFunction()) {
1733 Object* maybe_name =
1734 JSFunction::cast(constructor_prop)->shared()->name();
1735 if (maybe_name->IsString()) {
1736 String* name = String::cast(maybe_name);
1737 if (name->length() > 0) return name;
1741 return object->constructor_name();
1745 HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
1746 if (!obj->IsHeapObject()) return NULL;
1747 return filler_->FindOrAddEntry(obj, this);
1751 class RootsReferencesExtractor : public ObjectVisitor {
1754 IndexTag(int index, VisitorSynchronization::SyncTag tag)
1755 : index(index), tag(tag) { }
1757 VisitorSynchronization::SyncTag tag;
1761 explicit RootsReferencesExtractor(Heap* heap)
1762 : collecting_all_references_(false),
1763 previous_reference_count_(0),
1767 void VisitPointers(Object** start, Object** end) {
1768 if (collecting_all_references_) {
1769 for (Object** p = start; p < end; p++) all_references_.Add(*p);
1771 for (Object** p = start; p < end; p++) strong_references_.Add(*p);
1775 void SetCollectingAllReferences() { collecting_all_references_ = true; }
1777 void FillReferences(V8HeapExplorer* explorer) {
1778 ASSERT(strong_references_.length() <= all_references_.length());
1779 Builtins* builtins = heap_->isolate()->builtins();
1780 for (int i = 0; i < reference_tags_.length(); ++i) {
1781 explorer->SetGcRootsReference(reference_tags_[i].tag);
1783 int strong_index = 0, all_index = 0, tags_index = 0, builtin_index = 0;
1784 while (all_index < all_references_.length()) {
1785 bool is_strong = strong_index < strong_references_.length()
1786 && strong_references_[strong_index] == all_references_[all_index];
1787 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
1789 all_references_[all_index]);
1790 if (reference_tags_[tags_index].tag ==
1791 VisitorSynchronization::kBuiltins) {
1792 ASSERT(all_references_[all_index]->IsCode());
1793 explorer->TagBuiltinCodeObject(
1794 Code::cast(all_references_[all_index]),
1795 builtins->name(builtin_index++));
1798 if (is_strong) ++strong_index;
1799 if (reference_tags_[tags_index].index == all_index) ++tags_index;
1803 void Synchronize(VisitorSynchronization::SyncTag tag) {
1804 if (collecting_all_references_ &&
1805 previous_reference_count_ != all_references_.length()) {
1806 previous_reference_count_ = all_references_.length();
1807 reference_tags_.Add(IndexTag(previous_reference_count_, tag));
1812 bool collecting_all_references_;
1813 List<Object*> strong_references_;
1814 List<Object*> all_references_;
1815 int previous_reference_count_;
1816 List<IndexTag> reference_tags_;
1821 bool V8HeapExplorer::IterateAndExtractReferences(
1822 SnapshotFiller* filler) {
1825 // Make sure builtin code objects get their builtin tags
1826 // first. Otherwise a particular JSFunction object could set
1827 // its custom name to a generic builtin.
1828 SetRootGcRootsReference();
1829 RootsReferencesExtractor extractor(heap_);
1830 heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
1831 extractor.SetCollectingAllReferences();
1832 heap_->IterateRoots(&extractor, VISIT_ALL);
1833 extractor.FillReferences(this);
1835 // Now iterate the whole heap.
1836 bool interrupted = false;
1837 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
1838 // Heap iteration with filtering must be finished in any case.
1839 for (HeapObject* obj = iterator.next();
1841 obj = iterator.next(), progress_->ProgressStep()) {
1843 ExtractReferences(obj);
1844 if (!progress_->ProgressReport(false)) interrupted = true;
1853 return progress_->ProgressReport(true);
1857 bool V8HeapExplorer::IsEssentialObject(Object* object) {
1858 return object->IsHeapObject()
1859 && !object->IsOddball()
1860 && object != heap_->empty_byte_array()
1861 && object != heap_->empty_fixed_array()
1862 && object != heap_->empty_descriptor_array()
1863 && object != heap_->fixed_array_map()
1864 && object != heap_->cell_map()
1865 && object != heap_->global_property_cell_map()
1866 && object != heap_->shared_function_info_map()
1867 && object != heap_->free_space_map()
1868 && object != heap_->one_pointer_filler_map()
1869 && object != heap_->two_pointer_filler_map();
1873 void V8HeapExplorer::SetContextReference(HeapObject* parent_obj,
1875 String* reference_name,
1878 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1879 HeapEntry* child_entry = GetEntry(child_obj);
1880 if (child_entry != NULL) {
1881 filler_->SetNamedReference(HeapGraphEdge::kContextVariable,
1883 names_->GetName(reference_name),
1885 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1890 void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
1892 const char* reference_name,
1893 Object* child_obj) {
1894 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1895 HeapEntry* child_entry = GetEntry(child_obj);
1896 if (child_entry != NULL) {
1897 filler_->SetNamedReference(HeapGraphEdge::kShortcut,
1905 void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
1908 Object* child_obj) {
1909 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1910 HeapEntry* child_entry = GetEntry(child_obj);
1911 if (child_entry != NULL) {
1912 filler_->SetIndexedReference(HeapGraphEdge::kElement,
1920 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1922 const char* reference_name,
1925 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1926 HeapEntry* child_entry = GetEntry(child_obj);
1927 if (child_entry == NULL) return;
1928 if (IsEssentialObject(child_obj)) {
1929 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1934 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1938 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
1943 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1944 HeapEntry* child_entry = GetEntry(child_obj);
1945 if (child_entry == NULL) return;
1946 if (IsEssentialObject(child_obj)) {
1947 filler_->SetNamedReference(HeapGraphEdge::kInternal,
1949 names_->GetName(index),
1952 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1956 void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
1959 Object* child_obj) {
1960 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1961 HeapEntry* child_entry = GetEntry(child_obj);
1962 if (child_entry != NULL && IsEssentialObject(child_obj)) {
1963 filler_->SetIndexedReference(HeapGraphEdge::kHidden,
1971 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
1973 const char* reference_name,
1976 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1977 HeapEntry* child_entry = GetEntry(child_obj);
1978 if (child_entry == NULL) return;
1979 if (IsEssentialObject(child_obj)) {
1980 filler_->SetNamedReference(HeapGraphEdge::kWeak,
1985 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
1989 void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
1991 Name* reference_name,
1993 const char* name_format_string,
1995 ASSERT(parent_entry == GetEntry(parent_obj)->index());
1996 HeapEntry* child_entry = GetEntry(child_obj);
1997 if (child_entry != NULL) {
1998 HeapGraphEdge::Type type =
1999 reference_name->IsSymbol() || String::cast(reference_name)->length() > 0
2000 ? HeapGraphEdge::kProperty : HeapGraphEdge::kInternal;
2001 const char* name = name_format_string != NULL && reference_name->IsString()
2002 ? names_->GetFormatted(
2004 String::cast(reference_name)->ToCString(
2005 DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL).get()) :
2006 names_->GetName(reference_name);
2008 filler_->SetNamedReference(type,
2012 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2017 void V8HeapExplorer::SetRootGcRootsReference() {
2018 filler_->SetIndexedAutoIndexReference(
2019 HeapGraphEdge::kElement,
2020 snapshot_->root()->index(),
2021 snapshot_->gc_roots());
2025 void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
2026 HeapEntry* child_entry = GetEntry(child_obj);
2027 ASSERT(child_entry != NULL);
2028 filler_->SetNamedAutoIndexReference(
2029 HeapGraphEdge::kShortcut,
2030 snapshot_->root()->index(),
2035 void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
2036 filler_->SetIndexedAutoIndexReference(
2037 HeapGraphEdge::kElement,
2038 snapshot_->gc_roots()->index(),
2039 snapshot_->gc_subroot(tag));
2043 void V8HeapExplorer::SetGcSubrootReference(
2044 VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
2045 HeapEntry* child_entry = GetEntry(child_obj);
2046 if (child_entry != NULL) {
2047 const char* name = GetStrongGcSubrootName(child_obj);
2049 filler_->SetNamedReference(
2050 HeapGraphEdge::kInternal,
2051 snapshot_->gc_subroot(tag)->index(),
2056 filler_->SetNamedAutoIndexReference(
2057 HeapGraphEdge::kWeak,
2058 snapshot_->gc_subroot(tag)->index(),
2061 filler_->SetIndexedAutoIndexReference(
2062 HeapGraphEdge::kElement,
2063 snapshot_->gc_subroot(tag)->index(),
2068 // Add a shortcut to JS global object reference at snapshot root.
2069 if (child_obj->IsNativeContext()) {
2070 Context* context = Context::cast(child_obj);
2071 GlobalObject* global = context->global_object();
2072 if (global->IsJSGlobalObject()) {
2073 bool is_debug_object = false;
2074 #ifdef ENABLE_DEBUGGER_SUPPORT
2075 is_debug_object = heap_->isolate()->debug()->IsDebugGlobal(global);
2077 if (!is_debug_object && !user_roots_.Contains(global)) {
2078 user_roots_.Insert(global);
2079 SetUserGlobalReference(global);
2087 const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
2088 if (strong_gc_subroot_names_.is_empty()) {
2089 #define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
2090 #define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
2091 STRONG_ROOT_LIST(ROOT_NAME)
2093 #define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
2094 STRUCT_LIST(STRUCT_MAP_NAME)
2095 #undef STRUCT_MAP_NAME
2096 #define STRING_NAME(name, str) NAME_ENTRY(name)
2097 INTERNALIZED_STRING_LIST(STRING_NAME)
2100 CHECK(!strong_gc_subroot_names_.is_empty());
2102 return strong_gc_subroot_names_.GetTag(object);
2106 void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
2107 if (IsEssentialObject(obj)) {
2108 HeapEntry* entry = GetEntry(obj);
2109 if (entry->name()[0] == '\0') {
2110 entry->set_name(tag);
2116 class GlobalObjectsEnumerator : public ObjectVisitor {
2118 virtual void VisitPointers(Object** start, Object** end) {
2119 for (Object** p = start; p < end; p++) {
2120 if ((*p)->IsNativeContext()) {
2121 Context* context = Context::cast(*p);
2122 JSObject* proxy = context->global_proxy();
2123 if (proxy->IsJSGlobalProxy()) {
2124 Object* global = proxy->map()->prototype();
2125 if (global->IsJSGlobalObject()) {
2126 objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global)));
2132 int count() { return objects_.length(); }
2133 Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
2136 List<Handle<JSGlobalObject> > objects_;
2140 // Modifies heap. Must not be run during heap traversal.
2141 void V8HeapExplorer::TagGlobalObjects() {
2142 Isolate* isolate = heap_->isolate();
2143 HandleScope scope(isolate);
2144 GlobalObjectsEnumerator enumerator;
2145 isolate->global_handles()->IterateAllRoots(&enumerator);
2146 const char** urls = NewArray<const char*>(enumerator.count());
2147 for (int i = 0, l = enumerator.count(); i < l; ++i) {
2148 if (global_object_name_resolver_) {
2149 HandleScope scope(isolate);
2150 Handle<JSGlobalObject> global_obj = enumerator.at(i);
2151 urls[i] = global_object_name_resolver_->GetName(
2152 Utils::ToLocal(Handle<JSObject>::cast(global_obj)));
2158 DisallowHeapAllocation no_allocation;
2159 for (int i = 0, l = enumerator.count(); i < l; ++i) {
2160 objects_tags_.SetTag(*enumerator.at(i), urls[i]);
2167 class GlobalHandlesExtractor : public ObjectVisitor {
2169 explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer)
2170 : explorer_(explorer) {}
2171 virtual ~GlobalHandlesExtractor() {}
2172 virtual void VisitPointers(Object** start, Object** end) {
2175 virtual void VisitEmbedderReference(Object** p, uint16_t class_id) {
2176 explorer_->VisitSubtreeWrapper(p, class_id);
2179 NativeObjectsExplorer* explorer_;
2183 class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
2185 BasicHeapEntriesAllocator(
2186 HeapSnapshot* snapshot,
2187 HeapEntry::Type entries_type)
2188 : snapshot_(snapshot),
2189 names_(snapshot_->profiler()->names()),
2190 heap_object_map_(snapshot_->profiler()->heap_object_map()),
2191 entries_type_(entries_type) {
2193 virtual HeapEntry* AllocateEntry(HeapThing ptr);
2195 HeapSnapshot* snapshot_;
2196 StringsStorage* names_;
2197 HeapObjectsMap* heap_object_map_;
2198 HeapEntry::Type entries_type_;
2202 HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) {
2203 v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
2204 intptr_t elements = info->GetElementCount();
2205 intptr_t size = info->GetSizeInBytes();
2206 const char* name = elements != -1
2207 ? names_->GetFormatted(
2208 "%s / %" V8_PTR_PREFIX "d entries", info->GetLabel(), elements)
2209 : names_->GetCopy(info->GetLabel());
2210 return snapshot_->AddEntry(
2213 heap_object_map_->GenerateId(info),
2214 size != -1 ? static_cast<int>(size) : 0,
2219 NativeObjectsExplorer::NativeObjectsExplorer(
2220 HeapSnapshot* snapshot,
2221 SnapshottingProgressReportingInterface* progress)
2222 : isolate_(snapshot->profiler()->heap_object_map()->heap()->isolate()),
2223 snapshot_(snapshot),
2224 names_(snapshot_->profiler()->names()),
2225 progress_(progress),
2226 embedder_queried_(false),
2227 objects_by_info_(RetainedInfosMatch),
2228 native_groups_(StringsMatch),
2230 synthetic_entries_allocator_ =
2231 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
2232 native_entries_allocator_ =
2233 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
2237 NativeObjectsExplorer::~NativeObjectsExplorer() {
2238 for (HashMap::Entry* p = objects_by_info_.Start();
2240 p = objects_by_info_.Next(p)) {
2241 v8::RetainedObjectInfo* info =
2242 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2244 List<HeapObject*>* objects =
2245 reinterpret_cast<List<HeapObject*>* >(p->value);
2248 for (HashMap::Entry* p = native_groups_.Start();
2250 p = native_groups_.Next(p)) {
2251 v8::RetainedObjectInfo* info =
2252 reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
2255 delete synthetic_entries_allocator_;
2256 delete native_entries_allocator_;
2260 int NativeObjectsExplorer::EstimateObjectsCount() {
2261 FillRetainedObjects();
2262 return objects_by_info_.occupancy();
2266 void NativeObjectsExplorer::FillRetainedObjects() {
2267 if (embedder_queried_) return;
2268 Isolate* isolate = isolate_;
2269 const GCType major_gc_type = kGCTypeMarkSweepCompact;
2270 // Record objects that are joined into ObjectGroups.
2271 isolate->heap()->CallGCPrologueCallbacks(
2272 major_gc_type, kGCCallbackFlagConstructRetainedObjectInfos);
2273 List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
2274 for (int i = 0; i < groups->length(); ++i) {
2275 ObjectGroup* group = groups->at(i);
2276 if (group->info == NULL) continue;
2277 List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info);
2278 for (size_t j = 0; j < group->length; ++j) {
2279 HeapObject* obj = HeapObject::cast(*group->objects[j]);
2281 in_groups_.Insert(obj);
2283 group->info = NULL; // Acquire info object ownership.
2285 isolate->global_handles()->RemoveObjectGroups();
2286 isolate->heap()->CallGCEpilogueCallbacks(major_gc_type, kNoGCCallbackFlags);
2287 // Record objects that are not in ObjectGroups, but have class ID.
2288 GlobalHandlesExtractor extractor(this);
2289 isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
2290 embedder_queried_ = true;
2294 void NativeObjectsExplorer::FillImplicitReferences() {
2295 Isolate* isolate = isolate_;
2296 List<ImplicitRefGroup*>* groups =
2297 isolate->global_handles()->implicit_ref_groups();
2298 for (int i = 0; i < groups->length(); ++i) {
2299 ImplicitRefGroup* group = groups->at(i);
2300 HeapObject* parent = *group->parent;
2302 filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
2303 ASSERT(parent_entry != HeapEntry::kNoEntry);
2304 Object*** children = group->children;
2305 for (size_t j = 0; j < group->length; ++j) {
2306 Object* child = *children[j];
2307 HeapEntry* child_entry =
2308 filler_->FindOrAddEntry(child, native_entries_allocator_);
2309 filler_->SetNamedReference(
2310 HeapGraphEdge::kInternal,
2316 isolate->global_handles()->RemoveImplicitRefGroups();
2319 List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
2320 v8::RetainedObjectInfo* info) {
2321 HashMap::Entry* entry =
2322 objects_by_info_.Lookup(info, InfoHash(info), true);
2323 if (entry->value != NULL) {
2326 entry->value = new List<HeapObject*>(4);
2328 return reinterpret_cast<List<HeapObject*>* >(entry->value);
2332 bool NativeObjectsExplorer::IterateAndExtractReferences(
2333 SnapshotFiller* filler) {
2335 FillRetainedObjects();
2336 FillImplicitReferences();
2337 if (EstimateObjectsCount() > 0) {
2338 for (HashMap::Entry* p = objects_by_info_.Start();
2340 p = objects_by_info_.Next(p)) {
2341 v8::RetainedObjectInfo* info =
2342 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2343 SetNativeRootReference(info);
2344 List<HeapObject*>* objects =
2345 reinterpret_cast<List<HeapObject*>* >(p->value);
2346 for (int i = 0; i < objects->length(); ++i) {
2347 SetWrapperNativeReferences(objects->at(i), info);
2350 SetRootNativeRootsReference();
2357 class NativeGroupRetainedObjectInfo : public v8::RetainedObjectInfo {
2359 explicit NativeGroupRetainedObjectInfo(const char* label)
2361 hash_(reinterpret_cast<intptr_t>(label)),
2365 virtual ~NativeGroupRetainedObjectInfo() {}
2366 virtual void Dispose() {
2371 virtual bool IsEquivalent(RetainedObjectInfo* other) {
2372 return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
2374 virtual intptr_t GetHash() { return hash_; }
2375 virtual const char* GetLabel() { return label_; }
2384 NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2385 const char* label) {
2386 const char* label_copy = names_->GetCopy(label);
2387 uint32_t hash = StringHasher::HashSequentialString(
2389 static_cast<int>(strlen(label_copy)),
2390 isolate_->heap()->HashSeed());
2391 HashMap::Entry* entry = native_groups_.Lookup(const_cast<char*>(label_copy),
2393 if (entry->value == NULL) {
2394 entry->value = new NativeGroupRetainedObjectInfo(label);
2396 return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2400 void NativeObjectsExplorer::SetNativeRootReference(
2401 v8::RetainedObjectInfo* info) {
2402 HeapEntry* child_entry =
2403 filler_->FindOrAddEntry(info, native_entries_allocator_);
2404 ASSERT(child_entry != NULL);
2405 NativeGroupRetainedObjectInfo* group_info =
2406 FindOrAddGroupInfo(info->GetGroupLabel());
2407 HeapEntry* group_entry =
2408 filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
2409 filler_->SetNamedAutoIndexReference(
2410 HeapGraphEdge::kInternal,
2411 group_entry->index(),
2416 void NativeObjectsExplorer::SetWrapperNativeReferences(
2417 HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2418 HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
2419 ASSERT(wrapper_entry != NULL);
2420 HeapEntry* info_entry =
2421 filler_->FindOrAddEntry(info, native_entries_allocator_);
2422 ASSERT(info_entry != NULL);
2423 filler_->SetNamedReference(HeapGraphEdge::kInternal,
2424 wrapper_entry->index(),
2427 filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
2428 info_entry->index(),
2433 void NativeObjectsExplorer::SetRootNativeRootsReference() {
2434 for (HashMap::Entry* entry = native_groups_.Start();
2436 entry = native_groups_.Next(entry)) {
2437 NativeGroupRetainedObjectInfo* group_info =
2438 static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2439 HeapEntry* group_entry =
2440 filler_->FindOrAddEntry(group_info, native_entries_allocator_);
2441 ASSERT(group_entry != NULL);
2442 filler_->SetIndexedAutoIndexReference(
2443 HeapGraphEdge::kElement,
2444 snapshot_->root()->index(),
2450 void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
2451 if (in_groups_.Contains(*p)) return;
2452 Isolate* isolate = isolate_;
2453 v8::RetainedObjectInfo* info =
2454 isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
2455 if (info == NULL) return;
2456 GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
2460 HeapSnapshotGenerator::HeapSnapshotGenerator(
2461 HeapSnapshot* snapshot,
2462 v8::ActivityControl* control,
2463 v8::HeapProfiler::ObjectNameResolver* resolver,
2465 : snapshot_(snapshot),
2467 v8_heap_explorer_(snapshot_, this, resolver),
2468 dom_explorer_(snapshot_, this),
2473 bool HeapSnapshotGenerator::GenerateSnapshot() {
2474 v8_heap_explorer_.TagGlobalObjects();
2476 // TODO(1562) Profiler assumes that any object that is in the heap after
2477 // full GC is reachable from the root when computing dominators.
2478 // This is not true for weakly reachable objects.
2479 // As a temporary solution we call GC twice.
2480 heap_->CollectAllGarbage(
2481 Heap::kMakeHeapIterableMask,
2482 "HeapSnapshotGenerator::GenerateSnapshot");
2483 heap_->CollectAllGarbage(
2484 Heap::kMakeHeapIterableMask,
2485 "HeapSnapshotGenerator::GenerateSnapshot");
2488 Heap* debug_heap = heap_;
2489 CHECK(!debug_heap->old_data_space()->was_swept_conservatively());
2490 CHECK(!debug_heap->old_pointer_space()->was_swept_conservatively());
2491 CHECK(!debug_heap->code_space()->was_swept_conservatively());
2492 CHECK(!debug_heap->cell_space()->was_swept_conservatively());
2493 CHECK(!debug_heap->property_cell_space()->
2494 was_swept_conservatively());
2495 CHECK(!debug_heap->map_space()->was_swept_conservatively());
2498 // The following code uses heap iterators, so we want the heap to be
2499 // stable. It should follow TagGlobalObjects as that can allocate.
2500 DisallowHeapAllocation no_alloc;
2503 debug_heap->Verify();
2506 SetProgressTotal(1); // 1 pass.
2509 debug_heap->Verify();
2512 if (!FillReferences()) return false;
2514 snapshot_->FillChildren();
2515 snapshot_->RememberLastJSObjectId();
2517 progress_counter_ = progress_total_;
2518 if (!ProgressReport(true)) return false;
2523 void HeapSnapshotGenerator::ProgressStep() {
2524 ++progress_counter_;
2528 bool HeapSnapshotGenerator::ProgressReport(bool force) {
2529 const int kProgressReportGranularity = 10000;
2530 if (control_ != NULL
2531 && (force || progress_counter_ % kProgressReportGranularity == 0)) {
2533 control_->ReportProgressValue(progress_counter_, progress_total_) ==
2534 v8::ActivityControl::kContinue;
2540 void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
2541 if (control_ == NULL) return;
2542 HeapIterator iterator(heap_, HeapIterator::kFilterUnreachable);
2543 progress_total_ = iterations_count * (
2544 v8_heap_explorer_.EstimateObjectsCount(&iterator) +
2545 dom_explorer_.EstimateObjectsCount());
2546 progress_counter_ = 0;
2550 bool HeapSnapshotGenerator::FillReferences() {
2551 SnapshotFiller filler(snapshot_, &entries_);
2552 v8_heap_explorer_.AddRootEntries(&filler);
2553 return v8_heap_explorer_.IterateAndExtractReferences(&filler)
2554 && dom_explorer_.IterateAndExtractReferences(&filler);
2558 template<int bytes> struct MaxDecimalDigitsIn;
2559 template<> struct MaxDecimalDigitsIn<4> {
2560 static const int kSigned = 11;
2561 static const int kUnsigned = 10;
2563 template<> struct MaxDecimalDigitsIn<8> {
2564 static const int kSigned = 20;
2565 static const int kUnsigned = 20;
2569 class OutputStreamWriter {
2571 explicit OutputStreamWriter(v8::OutputStream* stream)
2573 chunk_size_(stream->GetChunkSize()),
2574 chunk_(chunk_size_),
2577 ASSERT(chunk_size_ > 0);
2579 bool aborted() { return aborted_; }
2580 void AddCharacter(char c) {
2582 ASSERT(chunk_pos_ < chunk_size_);
2583 chunk_[chunk_pos_++] = c;
2586 void AddString(const char* s) {
2587 AddSubstring(s, StrLength(s));
2589 void AddSubstring(const char* s, int n) {
2591 ASSERT(static_cast<size_t>(n) <= strlen(s));
2592 const char* s_end = s + n;
2594 int s_chunk_size = Min(
2595 chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
2596 ASSERT(s_chunk_size > 0);
2597 OS::MemCopy(chunk_.start() + chunk_pos_, s, s_chunk_size);
2599 chunk_pos_ += s_chunk_size;
2603 void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
2605 if (aborted_) return;
2606 ASSERT(chunk_pos_ < chunk_size_);
2607 if (chunk_pos_ != 0) {
2610 stream_->EndOfStream();
2614 template<typename T>
2615 void AddNumberImpl(T n, const char* format) {
2616 // Buffer for the longest value plus trailing \0
2617 static const int kMaxNumberSize =
2618 MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1;
2619 if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
2620 int result = OS::SNPrintF(
2621 chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
2622 ASSERT(result != -1);
2623 chunk_pos_ += result;
2626 EmbeddedVector<char, kMaxNumberSize> buffer;
2627 int result = OS::SNPrintF(buffer, format, n);
2629 ASSERT(result != -1);
2630 AddString(buffer.start());
2633 void MaybeWriteChunk() {
2634 ASSERT(chunk_pos_ <= chunk_size_);
2635 if (chunk_pos_ == chunk_size_) {
2640 if (aborted_) return;
2641 if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
2642 v8::OutputStream::kAbort) aborted_ = true;
2646 v8::OutputStream* stream_;
2648 ScopedVector<char> chunk_;
2654 // type, name|index, to_node.
2655 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
2656 // type, name, id, self_size, edge_count, trace_node_id.
2657 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 6;
2659 void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
2660 if (AllocationTracker* allocation_tracker =
2661 snapshot_->profiler()->allocation_tracker()) {
2662 allocation_tracker->PrepareForSerialization();
2664 ASSERT(writer_ == NULL);
2665 writer_ = new OutputStreamWriter(stream);
2672 void HeapSnapshotJSONSerializer::SerializeImpl() {
2673 ASSERT(0 == snapshot_->root()->index());
2674 writer_->AddCharacter('{');
2675 writer_->AddString("\"snapshot\":{");
2676 SerializeSnapshot();
2677 if (writer_->aborted()) return;
2678 writer_->AddString("},\n");
2679 writer_->AddString("\"nodes\":[");
2681 if (writer_->aborted()) return;
2682 writer_->AddString("],\n");
2683 writer_->AddString("\"edges\":[");
2685 if (writer_->aborted()) return;
2686 writer_->AddString("],\n");
2688 writer_->AddString("\"trace_function_infos\":[");
2689 SerializeTraceNodeInfos();
2690 if (writer_->aborted()) return;
2691 writer_->AddString("],\n");
2692 writer_->AddString("\"trace_tree\":[");
2693 SerializeTraceTree();
2694 if (writer_->aborted()) return;
2695 writer_->AddString("],\n");
2697 writer_->AddString("\"strings\":[");
2699 if (writer_->aborted()) return;
2700 writer_->AddCharacter(']');
2701 writer_->AddCharacter('}');
2702 writer_->Finalize();
2706 int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
2707 HashMap::Entry* cache_entry = strings_.Lookup(
2708 const_cast<char*>(s), StringHash(s), true);
2709 if (cache_entry->value == NULL) {
2710 cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
2712 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
2718 template<size_t size> struct ToUnsigned;
2720 template<> struct ToUnsigned<4> {
2721 typedef uint32_t Type;
2724 template<> struct ToUnsigned<8> {
2725 typedef uint64_t Type;
2731 template<typename T>
2732 static int utoa_impl(T value, const Vector<char>& buffer, int buffer_pos) {
2733 STATIC_CHECK(static_cast<T>(-1) > 0); // Check that T is unsigned
2734 int number_of_digits = 0;
2740 buffer_pos += number_of_digits;
2741 int result = buffer_pos;
2743 int last_digit = static_cast<int>(value % 10);
2744 buffer[--buffer_pos] = '0' + last_digit;
2751 template<typename T>
2752 static int utoa(T value, const Vector<char>& buffer, int buffer_pos) {
2753 typename ToUnsigned<sizeof(value)>::Type unsigned_value = value;
2754 STATIC_CHECK(sizeof(value) == sizeof(unsigned_value));
2755 return utoa_impl(unsigned_value, buffer, buffer_pos);
2759 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
2761 // The buffer needs space for 3 unsigned ints, 3 commas, \n and \0
2762 static const int kBufferSize =
2763 MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned * 3 + 3 + 2; // NOLINT
2764 EmbeddedVector<char, kBufferSize> buffer;
2765 int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
2766 || edge->type() == HeapGraphEdge::kHidden
2767 ? edge->index() : GetStringId(edge->name());
2770 buffer[buffer_pos++] = ',';
2772 buffer_pos = utoa(edge->type(), buffer, buffer_pos);
2773 buffer[buffer_pos++] = ',';
2774 buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
2775 buffer[buffer_pos++] = ',';
2776 buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
2777 buffer[buffer_pos++] = '\n';
2778 buffer[buffer_pos++] = '\0';
2779 writer_->AddString(buffer.start());
2783 void HeapSnapshotJSONSerializer::SerializeEdges() {
2784 List<HeapGraphEdge*>& edges = snapshot_->children();
2785 for (int i = 0; i < edges.length(); ++i) {
2787 edges[i - 1]->from()->index() <= edges[i]->from()->index());
2788 SerializeEdge(edges[i], i == 0);
2789 if (writer_->aborted()) return;
2794 void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry) {
2795 // The buffer needs space for 4 unsigned ints, 1 size_t, 5 commas, \n and \0
2796 static const int kBufferSize =
2797 5 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2798 + MaxDecimalDigitsIn<sizeof(size_t)>::kUnsigned // NOLINT
2800 EmbeddedVector<char, kBufferSize> buffer;
2802 if (entry_index(entry) != 0) {
2803 buffer[buffer_pos++] = ',';
2805 buffer_pos = utoa(entry->type(), buffer, buffer_pos);
2806 buffer[buffer_pos++] = ',';
2807 buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
2808 buffer[buffer_pos++] = ',';
2809 buffer_pos = utoa(entry->id(), buffer, buffer_pos);
2810 buffer[buffer_pos++] = ',';
2811 buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
2812 buffer[buffer_pos++] = ',';
2813 buffer_pos = utoa(entry->children_count(), buffer, buffer_pos);
2814 buffer[buffer_pos++] = ',';
2815 buffer_pos = utoa(entry->trace_node_id(), buffer, buffer_pos);
2816 buffer[buffer_pos++] = '\n';
2817 buffer[buffer_pos++] = '\0';
2818 writer_->AddString(buffer.start());
2822 void HeapSnapshotJSONSerializer::SerializeNodes() {
2823 List<HeapEntry>& entries = snapshot_->entries();
2824 for (int i = 0; i < entries.length(); ++i) {
2825 SerializeNode(&entries[i]);
2826 if (writer_->aborted()) return;
2831 void HeapSnapshotJSONSerializer::SerializeSnapshot() {
2832 writer_->AddString("\"title\":\"");
2833 writer_->AddString(snapshot_->title());
2834 writer_->AddString("\"");
2835 writer_->AddString(",\"uid\":");
2836 writer_->AddNumber(snapshot_->uid());
2837 writer_->AddString(",\"meta\":");
2838 // The object describing node serialization layout.
2839 // We use a set of macros to improve readability.
2840 #define JSON_A(s) "[" s "]"
2841 #define JSON_O(s) "{" s "}"
2842 #define JSON_S(s) "\"" s "\""
2843 writer_->AddString(JSON_O(
2844 JSON_S("node_fields") ":" JSON_A(
2848 JSON_S("self_size") ","
2849 JSON_S("edge_count") ","
2850 JSON_S("trace_node_id")) ","
2851 JSON_S("node_types") ":" JSON_A(
2853 JSON_S("hidden") ","
2855 JSON_S("string") ","
2856 JSON_S("object") ","
2858 JSON_S("closure") ","
2859 JSON_S("regexp") ","
2860 JSON_S("number") ","
2861 JSON_S("native") ","
2862 JSON_S("synthetic") ","
2863 JSON_S("concatenated string") ","
2864 JSON_S("sliced string")) ","
2865 JSON_S("string") ","
2866 JSON_S("number") ","
2867 JSON_S("number") ","
2868 JSON_S("number") ","
2869 JSON_S("number") ","
2870 JSON_S("number")) ","
2871 JSON_S("edge_fields") ":" JSON_A(
2873 JSON_S("name_or_index") ","
2874 JSON_S("to_node")) ","
2875 JSON_S("edge_types") ":" JSON_A(
2877 JSON_S("context") ","
2878 JSON_S("element") ","
2879 JSON_S("property") ","
2880 JSON_S("internal") ","
2881 JSON_S("hidden") ","
2882 JSON_S("shortcut") ","
2884 JSON_S("string_or_number") ","
2886 JSON_S("trace_function_info_fields") ":" JSON_A(
2887 JSON_S("function_id") ","
2889 JSON_S("script_name") ","
2890 JSON_S("script_id") ","
2892 JSON_S("column")) ","
2893 JSON_S("trace_node_fields") ":" JSON_A(
2895 JSON_S("function_info_index") ","
2898 JSON_S("children"))));
2902 writer_->AddString(",\"node_count\":");
2903 writer_->AddNumber(snapshot_->entries().length());
2904 writer_->AddString(",\"edge_count\":");
2905 writer_->AddNumber(snapshot_->edges().length());
2906 writer_->AddString(",\"trace_function_count\":");
2908 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2910 count = tracker->function_info_list().length();
2912 writer_->AddNumber(count);
2916 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
2917 static const char hex_chars[] = "0123456789ABCDEF";
2918 w->AddString("\\u");
2919 w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
2920 w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
2921 w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
2922 w->AddCharacter(hex_chars[u & 0xf]);
2926 void HeapSnapshotJSONSerializer::SerializeTraceTree() {
2927 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2928 if (!tracker) return;
2929 AllocationTraceTree* traces = tracker->trace_tree();
2930 SerializeTraceNode(traces->root());
2934 void HeapSnapshotJSONSerializer::SerializeTraceNode(AllocationTraceNode* node) {
2935 // The buffer needs space for 4 unsigned ints, 4 commas, [ and \0
2936 const int kBufferSize =
2937 4 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2939 EmbeddedVector<char, kBufferSize> buffer;
2941 buffer_pos = utoa(node->id(), buffer, buffer_pos);
2942 buffer[buffer_pos++] = ',';
2943 buffer_pos = utoa(node->function_info_index(), buffer, buffer_pos);
2944 buffer[buffer_pos++] = ',';
2945 buffer_pos = utoa(node->allocation_count(), buffer, buffer_pos);
2946 buffer[buffer_pos++] = ',';
2947 buffer_pos = utoa(node->allocation_size(), buffer, buffer_pos);
2948 buffer[buffer_pos++] = ',';
2949 buffer[buffer_pos++] = '[';
2950 buffer[buffer_pos++] = '\0';
2951 writer_->AddString(buffer.start());
2953 Vector<AllocationTraceNode*> children = node->children();
2954 for (int i = 0; i < children.length(); i++) {
2956 writer_->AddCharacter(',');
2958 SerializeTraceNode(children[i]);
2960 writer_->AddCharacter(']');
2964 // 0-based position is converted to 1-based during the serialization.
2965 static int SerializePosition(int position, const Vector<char>& buffer,
2967 if (position == -1) {
2968 buffer[buffer_pos++] = '0';
2970 ASSERT(position >= 0);
2971 buffer_pos = utoa(static_cast<unsigned>(position + 1), buffer, buffer_pos);
2977 void HeapSnapshotJSONSerializer::SerializeTraceNodeInfos() {
2978 AllocationTracker* tracker = snapshot_->profiler()->allocation_tracker();
2979 if (!tracker) return;
2980 // The buffer needs space for 6 unsigned ints, 6 commas, \n and \0
2981 const int kBufferSize =
2982 6 * MaxDecimalDigitsIn<sizeof(unsigned)>::kUnsigned // NOLINT
2984 EmbeddedVector<char, kBufferSize> buffer;
2985 const List<AllocationTracker::FunctionInfo*>& list =
2986 tracker->function_info_list();
2987 bool first_entry = true;
2988 for (int i = 0; i < list.length(); i++) {
2989 AllocationTracker::FunctionInfo* info = list[i];
2992 first_entry = false;
2994 buffer[buffer_pos++] = ',';
2996 buffer_pos = utoa(info->function_id, buffer, buffer_pos);
2997 buffer[buffer_pos++] = ',';
2998 buffer_pos = utoa(GetStringId(info->name), buffer, buffer_pos);
2999 buffer[buffer_pos++] = ',';
3000 buffer_pos = utoa(GetStringId(info->script_name), buffer, buffer_pos);
3001 buffer[buffer_pos++] = ',';
3002 // The cast is safe because script id is a non-negative Smi.
3003 buffer_pos = utoa(static_cast<unsigned>(info->script_id), buffer,
3005 buffer[buffer_pos++] = ',';
3006 buffer_pos = SerializePosition(info->line, buffer, buffer_pos);
3007 buffer[buffer_pos++] = ',';
3008 buffer_pos = SerializePosition(info->column, buffer, buffer_pos);
3009 buffer[buffer_pos++] = '\n';
3010 buffer[buffer_pos++] = '\0';
3011 writer_->AddString(buffer.start());
3016 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
3017 writer_->AddCharacter('\n');
3018 writer_->AddCharacter('\"');
3019 for ( ; *s != '\0'; ++s) {
3022 writer_->AddString("\\b");
3025 writer_->AddString("\\f");
3028 writer_->AddString("\\n");
3031 writer_->AddString("\\r");
3034 writer_->AddString("\\t");
3038 writer_->AddCharacter('\\');
3039 writer_->AddCharacter(*s);
3042 if (*s > 31 && *s < 128) {
3043 writer_->AddCharacter(*s);
3044 } else if (*s <= 31) {
3045 // Special character with no dedicated literal.
3046 WriteUChar(writer_, *s);
3048 // Convert UTF-8 into \u UTF-16 literal.
3049 unsigned length = 1, cursor = 0;
3050 for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
3051 unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
3052 if (c != unibrow::Utf8::kBadChar) {
3053 WriteUChar(writer_, c);
3054 ASSERT(cursor != 0);
3057 writer_->AddCharacter('?');
3062 writer_->AddCharacter('\"');
3066 void HeapSnapshotJSONSerializer::SerializeStrings() {
3067 ScopedVector<const unsigned char*> sorted_strings(
3068 strings_.occupancy() + 1);
3069 for (HashMap::Entry* entry = strings_.Start();
3071 entry = strings_.Next(entry)) {
3072 int index = static_cast<int>(reinterpret_cast<uintptr_t>(entry->value));
3073 sorted_strings[index] = reinterpret_cast<const unsigned char*>(entry->key);
3075 writer_->AddString("\"<dummy>\"");
3076 for (int i = 1; i < sorted_strings.length(); ++i) {
3077 writer_->AddCharacter(',');
3078 SerializeString(sorted_strings[i]);
3079 if (writer_->aborted()) return;
3084 } } // namespace v8::internal