1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #include "profile-generator-inl.h"
32 #include "global-handles.h"
33 #include "heap-profiler.h"
34 #include "scopeinfo.h"
43 TokenEnumerator::TokenEnumerator()
44 : token_locations_(4),
49 TokenEnumerator::~TokenEnumerator() {
50 Isolate* isolate = Isolate::Current();
51 for (int i = 0; i < token_locations_.length(); ++i) {
52 if (!token_removed_[i]) {
53 isolate->global_handles()->ClearWeakness(token_locations_[i]);
54 isolate->global_handles()->Destroy(token_locations_[i]);
60 int TokenEnumerator::GetTokenId(Object* token) {
61 Isolate* isolate = Isolate::Current();
62 if (token == NULL) return TokenEnumerator::kNoSecurityToken;
63 for (int i = 0; i < token_locations_.length(); ++i) {
64 if (*token_locations_[i] == token && !token_removed_[i]) return i;
66 Handle<Object> handle = isolate->global_handles()->Create(token);
67 // handle.location() points to a memory cell holding a pointer
68 // to a token object in the V8's heap.
69 isolate->global_handles()->MakeWeak(handle.location(), this,
70 TokenRemovedCallback);
71 token_locations_.Add(handle.location());
72 token_removed_.Add(false);
73 return token_locations_.length() - 1;
77 void TokenEnumerator::TokenRemovedCallback(v8::Persistent<v8::Value> handle,
79 reinterpret_cast<TokenEnumerator*>(parameter)->TokenRemoved(
80 Utils::OpenHandle(*handle).location());
85 void TokenEnumerator::TokenRemoved(Object** token_location) {
86 for (int i = 0; i < token_locations_.length(); ++i) {
87 if (token_locations_[i] == token_location && !token_removed_[i]) {
88 token_removed_[i] = true;
95 StringsStorage::StringsStorage()
96 : names_(StringsMatch) {
100 StringsStorage::~StringsStorage() {
101 for (HashMap::Entry* p = names_.Start();
103 p = names_.Next(p)) {
104 DeleteArray(reinterpret_cast<const char*>(p->value));
109 const char* StringsStorage::GetCopy(const char* src) {
110 int len = static_cast<int>(strlen(src));
111 Vector<char> dst = Vector<char>::New(len + 1);
112 OS::StrNCpy(dst, src, len);
115 HashSequentialString(dst.start(), len, HEAP->HashSeed());
116 return AddOrDisposeString(dst.start(), hash);
120 const char* StringsStorage::GetFormatted(const char* format, ...) {
122 va_start(args, format);
123 const char* result = GetVFormatted(format, args);
129 const char* StringsStorage::AddOrDisposeString(char* str, uint32_t hash) {
130 HashMap::Entry* cache_entry = names_.Lookup(str, hash, true);
131 if (cache_entry->value == NULL) {
133 cache_entry->value = str;
137 return reinterpret_cast<const char*>(cache_entry->value);
141 const char* StringsStorage::GetVFormatted(const char* format, va_list args) {
142 Vector<char> str = Vector<char>::New(1024);
143 int len = OS::VSNPrintF(str, format, args);
145 DeleteArray(str.start());
148 uint32_t hash = HashSequentialString(
149 str.start(), len, HEAP->HashSeed());
150 return AddOrDisposeString(str.start(), hash);
154 const char* StringsStorage::GetName(String* name) {
155 if (name->IsString()) {
156 int length = Min(kMaxNameSize, name->length());
157 SmartArrayPointer<char> data =
158 name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL, 0, length);
160 HashSequentialString(*data, length, name->GetHeap()->HashSeed());
161 return AddOrDisposeString(data.Detach(), hash);
167 const char* StringsStorage::GetName(int index) {
168 return GetFormatted("%d", index);
172 const char* const CodeEntry::kEmptyNamePrefix = "";
175 void CodeEntry::CopyData(const CodeEntry& source) {
177 name_prefix_ = source.name_prefix_;
178 name_ = source.name_;
179 resource_name_ = source.resource_name_;
180 line_number_ = source.line_number_;
184 uint32_t CodeEntry::GetCallUid() const {
185 uint32_t hash = ComputeIntegerHash(tag_, v8::internal::kZeroHashSeed);
186 if (shared_id_ != 0) {
187 hash ^= ComputeIntegerHash(static_cast<uint32_t>(shared_id_),
188 v8::internal::kZeroHashSeed);
190 hash ^= ComputeIntegerHash(
191 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_prefix_)),
192 v8::internal::kZeroHashSeed);
193 hash ^= ComputeIntegerHash(
194 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_)),
195 v8::internal::kZeroHashSeed);
196 hash ^= ComputeIntegerHash(
197 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(resource_name_)),
198 v8::internal::kZeroHashSeed);
199 hash ^= ComputeIntegerHash(line_number_, v8::internal::kZeroHashSeed);
205 bool CodeEntry::IsSameAs(CodeEntry* entry) const {
207 || (tag_ == entry->tag_
208 && shared_id_ == entry->shared_id_
210 || (name_prefix_ == entry->name_prefix_
211 && name_ == entry->name_
212 && resource_name_ == entry->resource_name_
213 && line_number_ == entry->line_number_)));
217 ProfileNode* ProfileNode::FindChild(CodeEntry* entry) {
218 HashMap::Entry* map_entry =
219 children_.Lookup(entry, CodeEntryHash(entry), false);
220 return map_entry != NULL ?
221 reinterpret_cast<ProfileNode*>(map_entry->value) : NULL;
225 ProfileNode* ProfileNode::FindOrAddChild(CodeEntry* entry) {
226 HashMap::Entry* map_entry =
227 children_.Lookup(entry, CodeEntryHash(entry), true);
228 if (map_entry->value == NULL) {
230 ProfileNode* new_node = new ProfileNode(tree_, entry);
231 map_entry->value = new_node;
232 children_list_.Add(new_node);
234 return reinterpret_cast<ProfileNode*>(map_entry->value);
238 double ProfileNode::GetSelfMillis() const {
239 return tree_->TicksToMillis(self_ticks_);
243 double ProfileNode::GetTotalMillis() const {
244 return tree_->TicksToMillis(total_ticks_);
248 void ProfileNode::Print(int indent) {
249 OS::Print("%5u %5u %*c %s%s [%d]",
250 total_ticks_, self_ticks_,
252 entry_->name_prefix(),
254 entry_->security_token_id());
255 if (entry_->resource_name()[0] != '\0')
256 OS::Print(" %s:%d", entry_->resource_name(), entry_->line_number());
258 for (HashMap::Entry* p = children_.Start();
260 p = children_.Next(p)) {
261 reinterpret_cast<ProfileNode*>(p->value)->Print(indent + 2);
266 class DeleteNodesCallback {
268 void BeforeTraversingChild(ProfileNode*, ProfileNode*) { }
270 void AfterAllChildrenTraversed(ProfileNode* node) {
274 void AfterChildTraversed(ProfileNode*, ProfileNode*) { }
278 ProfileTree::ProfileTree()
279 : root_entry_(Logger::FUNCTION_TAG,
284 TokenEnumerator::kNoSecurityToken),
285 root_(new ProfileNode(this, &root_entry_)) {
289 ProfileTree::~ProfileTree() {
290 DeleteNodesCallback cb;
291 TraverseDepthFirst(&cb);
295 void ProfileTree::AddPathFromEnd(const Vector<CodeEntry*>& path) {
296 ProfileNode* node = root_;
297 for (CodeEntry** entry = path.start() + path.length() - 1;
298 entry != path.start() - 1;
300 if (*entry != NULL) {
301 node = node->FindOrAddChild(*entry);
304 node->IncrementSelfTicks();
308 void ProfileTree::AddPathFromStart(const Vector<CodeEntry*>& path) {
309 ProfileNode* node = root_;
310 for (CodeEntry** entry = path.start();
311 entry != path.start() + path.length();
313 if (*entry != NULL) {
314 node = node->FindOrAddChild(*entry);
317 node->IncrementSelfTicks();
322 NodesPair(ProfileNode* src, ProfileNode* dst)
323 : src(src), dst(dst) { }
329 class FilteredCloneCallback {
331 FilteredCloneCallback(ProfileNode* dst_root, int security_token_id)
333 security_token_id_(security_token_id) {
334 stack_.Add(NodesPair(NULL, dst_root));
337 void BeforeTraversingChild(ProfileNode* parent, ProfileNode* child) {
338 if (IsTokenAcceptable(child->entry()->security_token_id(),
339 parent->entry()->security_token_id())) {
340 ProfileNode* clone = stack_.last().dst->FindOrAddChild(child->entry());
341 clone->IncreaseSelfTicks(child->self_ticks());
342 stack_.Add(NodesPair(child, clone));
344 // Attribute ticks to parent node.
345 stack_.last().dst->IncreaseSelfTicks(child->self_ticks());
349 void AfterAllChildrenTraversed(ProfileNode* parent) { }
351 void AfterChildTraversed(ProfileNode*, ProfileNode* child) {
352 if (stack_.last().src == child) {
358 bool IsTokenAcceptable(int token, int parent_token) {
359 if (token == TokenEnumerator::kNoSecurityToken
360 || token == security_token_id_) return true;
361 if (token == TokenEnumerator::kInheritsSecurityToken) {
362 ASSERT(parent_token != TokenEnumerator::kInheritsSecurityToken);
363 return parent_token == TokenEnumerator::kNoSecurityToken
364 || parent_token == security_token_id_;
369 List<NodesPair> stack_;
370 int security_token_id_;
373 void ProfileTree::FilteredClone(ProfileTree* src, int security_token_id) {
374 ms_to_ticks_scale_ = src->ms_to_ticks_scale_;
375 FilteredCloneCallback cb(root_, security_token_id);
376 src->TraverseDepthFirst(&cb);
377 CalculateTotalTicks();
381 void ProfileTree::SetTickRatePerMs(double ticks_per_ms) {
382 ms_to_ticks_scale_ = ticks_per_ms > 0 ? 1.0 / ticks_per_ms : 1.0;
388 explicit Position(ProfileNode* node)
389 : node(node), child_idx_(0) { }
390 INLINE(ProfileNode* current_child()) {
391 return node->children()->at(child_idx_);
393 INLINE(bool has_current_child()) {
394 return child_idx_ < node->children()->length();
396 INLINE(void next_child()) { ++child_idx_; }
404 // Non-recursive implementation of a depth-first post-order tree traversal.
405 template <typename Callback>
406 void ProfileTree::TraverseDepthFirst(Callback* callback) {
407 List<Position> stack(10);
408 stack.Add(Position(root_));
409 while (stack.length() > 0) {
410 Position& current = stack.last();
411 if (current.has_current_child()) {
412 callback->BeforeTraversingChild(current.node, current.current_child());
413 stack.Add(Position(current.current_child()));
415 callback->AfterAllChildrenTraversed(current.node);
416 if (stack.length() > 1) {
417 Position& parent = stack[stack.length() - 2];
418 callback->AfterChildTraversed(parent.node, current.node);
421 // Remove child from the stack.
428 class CalculateTotalTicksCallback {
430 void BeforeTraversingChild(ProfileNode*, ProfileNode*) { }
432 void AfterAllChildrenTraversed(ProfileNode* node) {
433 node->IncreaseTotalTicks(node->self_ticks());
436 void AfterChildTraversed(ProfileNode* parent, ProfileNode* child) {
437 parent->IncreaseTotalTicks(child->total_ticks());
442 void ProfileTree::CalculateTotalTicks() {
443 CalculateTotalTicksCallback cb;
444 TraverseDepthFirst(&cb);
448 void ProfileTree::ShortPrint() {
449 OS::Print("root: %u %u %.2fms %.2fms\n",
450 root_->total_ticks(), root_->self_ticks(),
451 root_->GetTotalMillis(), root_->GetSelfMillis());
455 void CpuProfile::AddPath(const Vector<CodeEntry*>& path) {
456 top_down_.AddPathFromEnd(path);
457 bottom_up_.AddPathFromStart(path);
461 void CpuProfile::CalculateTotalTicks() {
462 top_down_.CalculateTotalTicks();
463 bottom_up_.CalculateTotalTicks();
467 void CpuProfile::SetActualSamplingRate(double actual_sampling_rate) {
468 top_down_.SetTickRatePerMs(actual_sampling_rate);
469 bottom_up_.SetTickRatePerMs(actual_sampling_rate);
473 CpuProfile* CpuProfile::FilteredClone(int security_token_id) {
474 ASSERT(security_token_id != TokenEnumerator::kNoSecurityToken);
475 CpuProfile* clone = new CpuProfile(title_, uid_);
476 clone->top_down_.FilteredClone(&top_down_, security_token_id);
477 clone->bottom_up_.FilteredClone(&bottom_up_, security_token_id);
482 void CpuProfile::ShortPrint() {
483 OS::Print("top down ");
484 top_down_.ShortPrint();
485 OS::Print("bottom up ");
486 bottom_up_.ShortPrint();
490 void CpuProfile::Print() {
491 OS::Print("[Top down]:\n");
493 OS::Print("[Bottom up]:\n");
498 CodeEntry* const CodeMap::kSharedFunctionCodeEntry = NULL;
499 const CodeMap::CodeTreeConfig::Key CodeMap::CodeTreeConfig::kNoKey = NULL;
502 void CodeMap::AddCode(Address addr, CodeEntry* entry, unsigned size) {
503 DeleteAllCoveredCode(addr, addr + size);
504 CodeTree::Locator locator;
505 tree_.Insert(addr, &locator);
506 locator.set_value(CodeEntryInfo(entry, size));
510 void CodeMap::DeleteAllCoveredCode(Address start, Address end) {
511 List<Address> to_delete;
512 Address addr = end - 1;
513 while (addr >= start) {
514 CodeTree::Locator locator;
515 if (!tree_.FindGreatestLessThan(addr, &locator)) break;
516 Address start2 = locator.key(), end2 = start2 + locator.value().size;
517 if (start2 < end && start < end2) to_delete.Add(start2);
520 for (int i = 0; i < to_delete.length(); ++i) tree_.Remove(to_delete[i]);
524 CodeEntry* CodeMap::FindEntry(Address addr) {
525 CodeTree::Locator locator;
526 if (tree_.FindGreatestLessThan(addr, &locator)) {
527 // locator.key() <= addr. Need to check that addr is within entry.
528 const CodeEntryInfo& entry = locator.value();
529 if (addr < (locator.key() + entry.size))
536 int CodeMap::GetSharedId(Address addr) {
537 CodeTree::Locator locator;
538 // For shared function entries, 'size' field is used to store their IDs.
539 if (tree_.Find(addr, &locator)) {
540 const CodeEntryInfo& entry = locator.value();
541 ASSERT(entry.entry == kSharedFunctionCodeEntry);
544 tree_.Insert(addr, &locator);
545 int id = next_shared_id_++;
546 locator.set_value(CodeEntryInfo(kSharedFunctionCodeEntry, id));
552 void CodeMap::MoveCode(Address from, Address to) {
553 if (from == to) return;
554 CodeTree::Locator locator;
555 if (!tree_.Find(from, &locator)) return;
556 CodeEntryInfo entry = locator.value();
558 AddCode(to, entry.entry, entry.size);
562 void CodeMap::CodeTreePrinter::Call(
563 const Address& key, const CodeMap::CodeEntryInfo& value) {
564 OS::Print("%p %5d %s\n", key, value.size, value.entry->name());
568 void CodeMap::Print() {
569 CodeTreePrinter printer;
570 tree_.ForEach(&printer);
574 CpuProfilesCollection::CpuProfilesCollection()
575 : profiles_uids_(UidsMatch),
576 current_profiles_semaphore_(OS::CreateSemaphore(1)) {
577 // Create list of unabridged profiles.
578 profiles_by_token_.Add(new List<CpuProfile*>());
582 static void DeleteCodeEntry(CodeEntry** entry_ptr) {
586 static void DeleteCpuProfile(CpuProfile** profile_ptr) {
590 static void DeleteProfilesList(List<CpuProfile*>** list_ptr) {
591 if (*list_ptr != NULL) {
592 (*list_ptr)->Iterate(DeleteCpuProfile);
597 CpuProfilesCollection::~CpuProfilesCollection() {
598 delete current_profiles_semaphore_;
599 current_profiles_.Iterate(DeleteCpuProfile);
600 detached_profiles_.Iterate(DeleteCpuProfile);
601 profiles_by_token_.Iterate(DeleteProfilesList);
602 code_entries_.Iterate(DeleteCodeEntry);
606 bool CpuProfilesCollection::StartProfiling(const char* title, unsigned uid) {
608 current_profiles_semaphore_->Wait();
609 if (current_profiles_.length() >= kMaxSimultaneousProfiles) {
610 current_profiles_semaphore_->Signal();
613 for (int i = 0; i < current_profiles_.length(); ++i) {
614 if (strcmp(current_profiles_[i]->title(), title) == 0) {
615 // Ignore attempts to start profile with the same title.
616 current_profiles_semaphore_->Signal();
620 current_profiles_.Add(new CpuProfile(title, uid));
621 current_profiles_semaphore_->Signal();
626 bool CpuProfilesCollection::StartProfiling(String* title, unsigned uid) {
627 return StartProfiling(GetName(title), uid);
631 CpuProfile* CpuProfilesCollection::StopProfiling(int security_token_id,
633 double actual_sampling_rate) {
634 const int title_len = StrLength(title);
635 CpuProfile* profile = NULL;
636 current_profiles_semaphore_->Wait();
637 for (int i = current_profiles_.length() - 1; i >= 0; --i) {
638 if (title_len == 0 || strcmp(current_profiles_[i]->title(), title) == 0) {
639 profile = current_profiles_.Remove(i);
643 current_profiles_semaphore_->Signal();
645 if (profile != NULL) {
646 profile->CalculateTotalTicks();
647 profile->SetActualSamplingRate(actual_sampling_rate);
648 List<CpuProfile*>* unabridged_list =
649 profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
650 unabridged_list->Add(profile);
651 HashMap::Entry* entry =
652 profiles_uids_.Lookup(reinterpret_cast<void*>(profile->uid()),
653 static_cast<uint32_t>(profile->uid()),
655 ASSERT(entry->value == NULL);
656 entry->value = reinterpret_cast<void*>(unabridged_list->length() - 1);
657 return GetProfile(security_token_id, profile->uid());
663 CpuProfile* CpuProfilesCollection::GetProfile(int security_token_id,
665 int index = GetProfileIndex(uid);
666 if (index < 0) return NULL;
667 List<CpuProfile*>* unabridged_list =
668 profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
669 if (security_token_id == TokenEnumerator::kNoSecurityToken) {
670 return unabridged_list->at(index);
672 List<CpuProfile*>* list = GetProfilesList(security_token_id);
673 if (list->at(index) == NULL) {
675 unabridged_list->at(index)->FilteredClone(security_token_id);
677 return list->at(index);
681 int CpuProfilesCollection::GetProfileIndex(unsigned uid) {
682 HashMap::Entry* entry = profiles_uids_.Lookup(reinterpret_cast<void*>(uid),
683 static_cast<uint32_t>(uid),
685 return entry != NULL ?
686 static_cast<int>(reinterpret_cast<intptr_t>(entry->value)) : -1;
690 bool CpuProfilesCollection::IsLastProfile(const char* title) {
691 // Called from VM thread, and only it can mutate the list,
692 // so no locking is needed here.
693 if (current_profiles_.length() != 1) return false;
694 return StrLength(title) == 0
695 || strcmp(current_profiles_[0]->title(), title) == 0;
699 void CpuProfilesCollection::RemoveProfile(CpuProfile* profile) {
700 // Called from VM thread for a completed profile.
701 unsigned uid = profile->uid();
702 int index = GetProfileIndex(uid);
704 detached_profiles_.RemoveElement(profile);
707 profiles_uids_.Remove(reinterpret_cast<void*>(uid),
708 static_cast<uint32_t>(uid));
709 // Decrement all indexes above the deleted one.
710 for (HashMap::Entry* p = profiles_uids_.Start();
712 p = profiles_uids_.Next(p)) {
713 intptr_t p_index = reinterpret_cast<intptr_t>(p->value);
714 if (p_index > index) {
715 p->value = reinterpret_cast<void*>(p_index - 1);
718 for (int i = 0; i < profiles_by_token_.length(); ++i) {
719 List<CpuProfile*>* list = profiles_by_token_[i];
720 if (list != NULL && index < list->length()) {
721 // Move all filtered clones into detached_profiles_,
722 // so we can know that they are still in use.
723 CpuProfile* cloned_profile = list->Remove(index);
724 if (cloned_profile != NULL && cloned_profile != profile) {
725 detached_profiles_.Add(cloned_profile);
732 int CpuProfilesCollection::TokenToIndex(int security_token_id) {
733 ASSERT(TokenEnumerator::kNoSecurityToken == -1);
734 return security_token_id + 1; // kNoSecurityToken -> 0, 0 -> 1, ...
738 List<CpuProfile*>* CpuProfilesCollection::GetProfilesList(
739 int security_token_id) {
740 const int index = TokenToIndex(security_token_id);
741 const int lists_to_add = index - profiles_by_token_.length() + 1;
742 if (lists_to_add > 0) profiles_by_token_.AddBlock(NULL, lists_to_add);
743 List<CpuProfile*>* unabridged_list =
744 profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
745 const int current_count = unabridged_list->length();
746 if (profiles_by_token_[index] == NULL) {
747 profiles_by_token_[index] = new List<CpuProfile*>(current_count);
749 List<CpuProfile*>* list = profiles_by_token_[index];
750 const int profiles_to_add = current_count - list->length();
751 if (profiles_to_add > 0) list->AddBlock(NULL, profiles_to_add);
756 List<CpuProfile*>* CpuProfilesCollection::Profiles(int security_token_id) {
757 List<CpuProfile*>* unabridged_list =
758 profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
759 if (security_token_id == TokenEnumerator::kNoSecurityToken) {
760 return unabridged_list;
762 List<CpuProfile*>* list = GetProfilesList(security_token_id);
763 const int current_count = unabridged_list->length();
764 for (int i = 0; i < current_count; ++i) {
765 if (list->at(i) == NULL) {
766 (*list)[i] = unabridged_list->at(i)->FilteredClone(security_token_id);
773 CodeEntry* CpuProfilesCollection::NewCodeEntry(Logger::LogEventsAndTags tag,
775 String* resource_name,
777 CodeEntry* entry = new CodeEntry(tag,
778 CodeEntry::kEmptyNamePrefix,
779 GetFunctionName(name),
780 GetName(resource_name),
782 TokenEnumerator::kNoSecurityToken);
783 code_entries_.Add(entry);
788 CodeEntry* CpuProfilesCollection::NewCodeEntry(Logger::LogEventsAndTags tag,
790 CodeEntry* entry = new CodeEntry(tag,
791 CodeEntry::kEmptyNamePrefix,
792 GetFunctionName(name),
794 v8::CpuProfileNode::kNoLineNumberInfo,
795 TokenEnumerator::kNoSecurityToken);
796 code_entries_.Add(entry);
801 CodeEntry* CpuProfilesCollection::NewCodeEntry(Logger::LogEventsAndTags tag,
802 const char* name_prefix,
804 CodeEntry* entry = new CodeEntry(tag,
808 v8::CpuProfileNode::kNoLineNumberInfo,
809 TokenEnumerator::kInheritsSecurityToken);
810 code_entries_.Add(entry);
815 CodeEntry* CpuProfilesCollection::NewCodeEntry(Logger::LogEventsAndTags tag,
817 CodeEntry* entry = new CodeEntry(tag,
821 v8::CpuProfileNode::kNoLineNumberInfo,
822 TokenEnumerator::kInheritsSecurityToken);
823 code_entries_.Add(entry);
828 void CpuProfilesCollection::AddPathToCurrentProfiles(
829 const Vector<CodeEntry*>& path) {
830 // As starting / stopping profiles is rare relatively to this
831 // method, we don't bother minimizing the duration of lock holding,
832 // e.g. copying contents of the list to a local vector.
833 current_profiles_semaphore_->Wait();
834 for (int i = 0; i < current_profiles_.length(); ++i) {
835 current_profiles_[i]->AddPath(path);
837 current_profiles_semaphore_->Signal();
841 void SampleRateCalculator::Tick() {
842 if (--wall_time_query_countdown_ == 0)
843 UpdateMeasurements(OS::TimeCurrentMillis());
847 void SampleRateCalculator::UpdateMeasurements(double current_time) {
848 if (measurements_count_++ != 0) {
849 const double measured_ticks_per_ms =
850 (kWallTimeQueryIntervalMs * ticks_per_ms_) /
851 (current_time - last_wall_time_);
852 // Update the average value.
854 (measured_ticks_per_ms - ticks_per_ms_) / measurements_count_;
855 // Update the externally accessible result.
856 result_ = static_cast<AtomicWord>(ticks_per_ms_ * kResultScale);
858 last_wall_time_ = current_time;
859 wall_time_query_countdown_ =
860 static_cast<unsigned>(kWallTimeQueryIntervalMs * ticks_per_ms_);
864 const char* const ProfileGenerator::kAnonymousFunctionName =
865 "(anonymous function)";
866 const char* const ProfileGenerator::kProgramEntryName =
868 const char* const ProfileGenerator::kGarbageCollectorEntryName =
869 "(garbage collector)";
872 ProfileGenerator::ProfileGenerator(CpuProfilesCollection* profiles)
873 : profiles_(profiles),
875 profiles->NewCodeEntry(Logger::FUNCTION_TAG, kProgramEntryName)),
877 profiles->NewCodeEntry(Logger::BUILTIN_TAG,
878 kGarbageCollectorEntryName)) {
882 void ProfileGenerator::RecordTickSample(const TickSample& sample) {
883 // Allocate space for stack frames + pc + function + vm-state.
884 ScopedVector<CodeEntry*> entries(sample.frames_count + 3);
885 // As actual number of decoded code entries may vary, initialize
886 // entries vector with NULL values.
887 CodeEntry** entry = entries.start();
888 memset(entry, 0, entries.length() * sizeof(*entry));
889 if (sample.pc != NULL) {
890 *entry++ = code_map_.FindEntry(sample.pc);
892 if (sample.has_external_callback) {
893 // Don't use PC when in external callback code, as it can point
894 // inside callback's code, and we will erroneously report
895 // that a callback calls itself.
896 *(entries.start()) = NULL;
897 *entry++ = code_map_.FindEntry(sample.external_callback);
898 } else if (sample.tos != NULL) {
899 // Find out, if top of stack was pointing inside a JS function
900 // meaning that we have encountered a frameless invocation.
901 *entry = code_map_.FindEntry(sample.tos);
902 if (*entry != NULL && !(*entry)->is_js_function()) {
908 for (const Address* stack_pos = sample.stack,
909 *stack_end = stack_pos + sample.frames_count;
910 stack_pos != stack_end;
912 *entry++ = code_map_.FindEntry(*stack_pos);
916 if (FLAG_prof_browser_mode) {
917 bool no_symbolized_entries = true;
918 for (CodeEntry** e = entries.start(); e != entry; ++e) {
920 no_symbolized_entries = false;
924 // If no frames were symbolized, put the VM state entry in.
925 if (no_symbolized_entries) {
926 *entry++ = EntryForVMState(sample.state);
930 profiles_->AddPathToCurrentProfiles(entries);
934 HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to)
939 ASSERT(type == kContextVariable
942 || type == kShortcut);
946 HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to)
951 ASSERT(type == kElement || type == kHidden || type == kWeak);
955 void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
956 to_entry_ = &snapshot->entries()[to_index_];
960 const int HeapEntry::kNoEntry = -1;
962 HeapEntry::HeapEntry(HeapSnapshot* snapshot,
968 user_reachable_(false),
969 dominator_(kNoEntry),
972 retainers_index_(-1),
975 self_size_(self_size),
982 void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
985 HeapGraphEdge edge(type, name, this->index(), entry->index());
986 snapshot_->edges().Add(edge);
988 ++entry->retainers_count_;
992 void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
995 HeapGraphEdge edge(type, index, this->index(), entry->index());
996 snapshot_->edges().Add(edge);
998 ++entry->retainers_count_;
1002 Handle<HeapObject> HeapEntry::GetHeapObject() {
1003 return snapshot_->collection()->FindHeapObjectById(id());
1007 void HeapEntry::Print(
1008 const char* prefix, const char* edge_name, int max_depth, int indent) {
1009 STATIC_CHECK(sizeof(unsigned) == sizeof(id()));
1010 OS::Print("%6d %7d @%6u %*c %s%s: ",
1011 self_size(), retained_size(), id(),
1012 indent, ' ', prefix, edge_name);
1013 if (type() != kString) {
1014 OS::Print("%s %.40s\n", TypeAsString(), name_);
1017 const char* c = name_;
1018 while (*c && (c - name_) <= 40) {
1020 OS::Print("%c", *c);
1027 if (--max_depth == 0) return;
1028 Vector<HeapGraphEdge*> ch = children();
1029 for (int i = 0; i < ch.length(); ++i) {
1030 HeapGraphEdge& edge = *ch[i];
1031 const char* edge_prefix = "";
1032 EmbeddedVector<char, 64> index;
1033 const char* edge_name = index.start();
1034 switch (edge.type()) {
1035 case HeapGraphEdge::kContextVariable:
1037 edge_name = edge.name();
1039 case HeapGraphEdge::kElement:
1040 OS::SNPrintF(index, "%d", edge.index());
1042 case HeapGraphEdge::kInternal:
1044 edge_name = edge.name();
1046 case HeapGraphEdge::kProperty:
1047 edge_name = edge.name();
1049 case HeapGraphEdge::kHidden:
1051 OS::SNPrintF(index, "%d", edge.index());
1053 case HeapGraphEdge::kShortcut:
1055 edge_name = edge.name();
1057 case HeapGraphEdge::kWeak:
1059 OS::SNPrintF(index, "%d", edge.index());
1062 OS::SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
1064 edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
1069 const char* HeapEntry::TypeAsString() {
1071 case kHidden: return "/hidden/";
1072 case kObject: return "/object/";
1073 case kClosure: return "/closure/";
1074 case kString: return "/string/";
1075 case kCode: return "/code/";
1076 case kArray: return "/array/";
1077 case kRegExp: return "/regexp/";
1078 case kHeapNumber: return "/number/";
1079 case kNative: return "/native/";
1080 case kSynthetic: return "/synthetic/";
1081 default: return "???";
1086 // It is very important to keep objects that form a heap snapshot
1087 // as small as possible.
1088 namespace { // Avoid littering the global namespace.
1090 template <size_t ptr_size> struct SnapshotSizeConstants;
1092 template <> struct SnapshotSizeConstants<4> {
1093 static const int kExpectedHeapGraphEdgeSize = 12;
1094 static const int kExpectedHeapEntrySize = 40;
1095 static const size_t kMaxSerializableSnapshotRawSize = 256 * MB;
1098 template <> struct SnapshotSizeConstants<8> {
1099 static const int kExpectedHeapGraphEdgeSize = 24;
1100 static const int kExpectedHeapEntrySize = 48;
1101 static const uint64_t kMaxSerializableSnapshotRawSize =
1102 static_cast<uint64_t>(6000) * MB;
1107 HeapSnapshot::HeapSnapshot(HeapSnapshotsCollection* collection,
1108 HeapSnapshot::Type type,
1111 : collection_(collection),
1115 root_index_(HeapEntry::kNoEntry),
1116 gc_roots_index_(HeapEntry::kNoEntry),
1117 natives_root_index_(HeapEntry::kNoEntry),
1118 max_snapshot_js_object_id_(0) {
1120 sizeof(HeapGraphEdge) ==
1121 SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
1123 sizeof(HeapEntry) ==
1124 SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
1125 for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
1126 gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
1131 void HeapSnapshot::Delete() {
1132 collection_->RemoveSnapshot(this);
1137 void HeapSnapshot::RememberLastJSObjectId() {
1138 max_snapshot_js_object_id_ = collection_->last_assigned_id();
1142 static void HeapEntryClearPaint(HeapEntry* entry_ptr) {
1143 entry_ptr->clear_paint();
1147 void HeapSnapshot::ClearPaint() {
1148 entries_.Iterate(HeapEntryClearPaint);
1152 HeapEntry* HeapSnapshot::AddRootEntry() {
1153 ASSERT(root_index_ == HeapEntry::kNoEntry);
1154 ASSERT(entries_.is_empty()); // Root entry must be the first one.
1155 HeapEntry* entry = AddEntry(HeapEntry::kObject,
1157 HeapObjectsMap::kInternalRootObjectId,
1159 root_index_ = entry->index();
1160 ASSERT(root_index_ == 0);
1165 HeapEntry* HeapSnapshot::AddGcRootsEntry() {
1166 ASSERT(gc_roots_index_ == HeapEntry::kNoEntry);
1167 HeapEntry* entry = AddEntry(HeapEntry::kObject,
1169 HeapObjectsMap::kGcRootsObjectId,
1171 gc_roots_index_ = entry->index();
1176 HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag) {
1177 ASSERT(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
1178 ASSERT(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags);
1179 HeapEntry* entry = AddEntry(
1181 VisitorSynchronization::kTagNames[tag],
1182 HeapObjectsMap::GetNthGcSubrootId(tag),
1184 gc_subroot_indexes_[tag] = entry->index();
1189 HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
1191 SnapshotObjectId id,
1193 HeapEntry entry(this, type, name, id, size);
1194 entries_.Add(entry);
1195 return &entries_.last();
1199 void HeapSnapshot::FillChildrenAndRetainers() {
1200 ASSERT(children().is_empty());
1201 children().Allocate(edges().length());
1202 ASSERT(retainers().is_empty());
1203 retainers().Allocate(edges().length());
1204 int children_index = 0;
1205 int retainers_index = 0;
1206 for (int i = 0; i < entries().length(); ++i) {
1207 HeapEntry* entry = &entries()[i];
1208 children_index = entry->set_children_index(children_index);
1209 retainers_index = entry->set_retainers_index(retainers_index);
1211 ASSERT(edges().length() == children_index);
1212 ASSERT(edges().length() == retainers_index);
1213 for (int i = 0; i < edges().length(); ++i) {
1214 HeapGraphEdge* edge = &edges()[i];
1215 edge->ReplaceToIndexWithEntry(this);
1216 edge->from()->add_child(edge);
1217 edge->to()->add_retainer(edge);
1222 void HeapSnapshot::SetDominatorsToSelf() {
1223 for (int i = 0; i < entries_.length(); ++i) {
1224 entries_[i].set_dominator(&entries_[i]);
1229 class FindEntryById {
1231 explicit FindEntryById(SnapshotObjectId id) : id_(id) { }
1232 int operator()(HeapEntry* const* entry) {
1233 if ((*entry)->id() == id_) return 0;
1234 return (*entry)->id() < id_ ? -1 : 1;
1237 SnapshotObjectId id_;
1241 HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
1242 List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
1243 // Perform a binary search by id.
1244 int index = SortedListBSearch(*entries_by_id, FindEntryById(id));
1247 return entries_by_id->at(index);
1252 static int SortByIds(const T* entry1_ptr,
1253 const T* entry2_ptr) {
1254 if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
1255 return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
1259 List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() {
1260 if (sorted_entries_.is_empty()) {
1261 sorted_entries_.Allocate(entries_.length());
1262 for (int i = 0; i < entries_.length(); ++i) {
1263 sorted_entries_[i] = &entries_[i];
1265 sorted_entries_.Sort(SortByIds);
1267 return &sorted_entries_;
1271 void HeapSnapshot::Print(int max_depth) {
1272 root()->Print("", "", max_depth, 0);
1276 template<typename T, class P>
1277 static size_t GetMemoryUsedByList(const List<T, P>& list) {
1278 return list.capacity() * sizeof(T);
1282 size_t HeapSnapshot::RawSnapshotSize() const {
1284 GetMemoryUsedByList(entries_) +
1285 GetMemoryUsedByList(edges_) +
1286 GetMemoryUsedByList(children_) +
1287 GetMemoryUsedByList(retainers_) +
1288 GetMemoryUsedByList(sorted_entries_);
1292 // We split IDs on evens for embedder objects (see
1293 // HeapObjectsMap::GenerateId) and odds for native objects.
1294 const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
1295 const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
1296 HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
1297 const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
1298 HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
1299 const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
1300 HeapObjectsMap::kGcRootsFirstSubrootId +
1301 VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
1303 HeapObjectsMap::HeapObjectsMap()
1304 : next_id_(kFirstAvailableObjectId),
1305 entries_map_(AddressesMatch) {
1306 // This dummy element solves a problem with entries_map_.
1307 // When we do lookup in HashMap we see no difference between two cases:
1308 // it has an entry with NULL as the value or it has created
1309 // a new entry on the fly with NULL as the default value.
1310 // With such dummy element we have a guaranty that all entries_map_ entries
1311 // will have the value field grater than 0.
1312 // This fact is using in MoveObject method.
1313 entries_.Add(EntryInfo(0, NULL, 0));
1317 void HeapObjectsMap::SnapshotGenerationFinished() {
1318 RemoveDeadEntries();
1322 void HeapObjectsMap::MoveObject(Address from, Address to) {
1324 ASSERT(from != NULL);
1325 if (from == to) return;
1326 void* from_value = entries_map_.Remove(from, AddressHash(from));
1327 if (from_value == NULL) return;
1328 int from_entry_info_index =
1329 static_cast<int>(reinterpret_cast<intptr_t>(from_value));
1330 entries_.at(from_entry_info_index).addr = to;
1331 HashMap::Entry* to_entry = entries_map_.Lookup(to, AddressHash(to), true);
1332 if (to_entry->value != NULL) {
1333 int to_entry_info_index =
1334 static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
1335 // Without this operation we will have two EntryInfo's with the same
1336 // value in addr field. It is bad because later at RemoveDeadEntries
1337 // one of this entry will be removed with the corresponding entries_map_
1339 entries_.at(to_entry_info_index).addr = NULL;
1341 to_entry->value = reinterpret_cast<void*>(from_entry_info_index);
1345 SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
1346 HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), false);
1347 if (entry == NULL) return 0;
1348 int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
1349 EntryInfo& entry_info = entries_.at(entry_index);
1350 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
1351 return entry_info.id;
1355 SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
1356 unsigned int size) {
1357 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
1358 HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), true);
1359 if (entry->value != NULL) {
1361 static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
1362 EntryInfo& entry_info = entries_.at(entry_index);
1363 entry_info.accessed = true;
1364 entry_info.size = size;
1365 return entry_info.id;
1367 entry->value = reinterpret_cast<void*>(entries_.length());
1368 SnapshotObjectId id = next_id_;
1369 next_id_ += kObjectIdStep;
1370 entries_.Add(EntryInfo(id, addr, size));
1371 ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
1376 void HeapObjectsMap::StopHeapObjectsTracking() {
1377 time_intervals_.Clear();
1380 void HeapObjectsMap::UpdateHeapObjectsMap() {
1381 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
1382 "HeapSnapshotsCollection::UpdateHeapObjectsMap");
1383 HeapIterator iterator;
1384 for (HeapObject* obj = iterator.next();
1386 obj = iterator.next()) {
1387 FindOrAddEntry(obj->address(), obj->Size());
1389 RemoveDeadEntries();
1393 void HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) {
1394 UpdateHeapObjectsMap();
1395 time_intervals_.Add(TimeInterval(next_id_));
1396 int prefered_chunk_size = stream->GetChunkSize();
1397 List<v8::HeapStatsUpdate> stats_buffer;
1398 ASSERT(!entries_.is_empty());
1399 EntryInfo* entry_info = &entries_.first();
1400 EntryInfo* end_entry_info = &entries_.last() + 1;
1401 for (int time_interval_index = 0;
1402 time_interval_index < time_intervals_.length();
1403 ++time_interval_index) {
1404 TimeInterval& time_interval = time_intervals_[time_interval_index];
1405 SnapshotObjectId time_interval_id = time_interval.id;
1406 uint32_t entries_size = 0;
1407 EntryInfo* start_entry_info = entry_info;
1408 while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
1409 entries_size += entry_info->size;
1412 uint32_t entries_count =
1413 static_cast<uint32_t>(entry_info - start_entry_info);
1414 if (time_interval.count != entries_count ||
1415 time_interval.size != entries_size) {
1416 stats_buffer.Add(v8::HeapStatsUpdate(
1417 time_interval_index,
1418 time_interval.count = entries_count,
1419 time_interval.size = entries_size));
1420 if (stats_buffer.length() >= prefered_chunk_size) {
1421 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
1422 &stats_buffer.first(), stats_buffer.length());
1423 if (result == OutputStream::kAbort) return;
1424 stats_buffer.Clear();
1428 ASSERT(entry_info == end_entry_info);
1429 if (!stats_buffer.is_empty()) {
1430 OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
1431 &stats_buffer.first(), stats_buffer.length());
1432 if (result == OutputStream::kAbort) return;
1434 stream->EndOfStream();
1438 void HeapObjectsMap::RemoveDeadEntries() {
1439 ASSERT(entries_.length() > 0 &&
1440 entries_.at(0).id == 0 &&
1441 entries_.at(0).addr == NULL);
1442 int first_free_entry = 1;
1443 for (int i = 1; i < entries_.length(); ++i) {
1444 EntryInfo& entry_info = entries_.at(i);
1445 if (entry_info.accessed) {
1446 if (first_free_entry != i) {
1447 entries_.at(first_free_entry) = entry_info;
1449 entries_.at(first_free_entry).accessed = false;
1450 HashMap::Entry* entry = entries_map_.Lookup(
1451 entry_info.addr, AddressHash(entry_info.addr), false);
1453 entry->value = reinterpret_cast<void*>(first_free_entry);
1456 if (entry_info.addr) {
1457 entries_map_.Remove(entry_info.addr, AddressHash(entry_info.addr));
1461 entries_.Rewind(first_free_entry);
1462 ASSERT(static_cast<uint32_t>(entries_.length()) - 1 ==
1463 entries_map_.occupancy());
1467 SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
1468 SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
1469 const char* label = info->GetLabel();
1470 id ^= HashSequentialString(label,
1471 static_cast<int>(strlen(label)),
1473 intptr_t element_count = info->GetElementCount();
1474 if (element_count != -1)
1475 id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
1476 v8::internal::kZeroHashSeed);
1481 HeapSnapshotsCollection::HeapSnapshotsCollection()
1482 : is_tracking_objects_(false),
1483 snapshots_uids_(HeapSnapshotsMatch),
1484 token_enumerator_(new TokenEnumerator()) {
1488 static void DeleteHeapSnapshot(HeapSnapshot** snapshot_ptr) {
1489 delete *snapshot_ptr;
1493 HeapSnapshotsCollection::~HeapSnapshotsCollection() {
1494 delete token_enumerator_;
1495 snapshots_.Iterate(DeleteHeapSnapshot);
1499 HeapSnapshot* HeapSnapshotsCollection::NewSnapshot(HeapSnapshot::Type type,
1502 is_tracking_objects_ = true; // Start watching for heap objects moves.
1503 return new HeapSnapshot(this, type, name, uid);
1507 void HeapSnapshotsCollection::SnapshotGenerationFinished(
1508 HeapSnapshot* snapshot) {
1509 ids_.SnapshotGenerationFinished();
1510 if (snapshot != NULL) {
1511 snapshots_.Add(snapshot);
1512 HashMap::Entry* entry =
1513 snapshots_uids_.Lookup(reinterpret_cast<void*>(snapshot->uid()),
1514 static_cast<uint32_t>(snapshot->uid()),
1516 ASSERT(entry->value == NULL);
1517 entry->value = snapshot;
1522 HeapSnapshot* HeapSnapshotsCollection::GetSnapshot(unsigned uid) {
1523 HashMap::Entry* entry = snapshots_uids_.Lookup(reinterpret_cast<void*>(uid),
1524 static_cast<uint32_t>(uid),
1526 return entry != NULL ? reinterpret_cast<HeapSnapshot*>(entry->value) : NULL;
1530 void HeapSnapshotsCollection::RemoveSnapshot(HeapSnapshot* snapshot) {
1531 snapshots_.RemoveElement(snapshot);
1532 unsigned uid = snapshot->uid();
1533 snapshots_uids_.Remove(reinterpret_cast<void*>(uid),
1534 static_cast<uint32_t>(uid));
1538 Handle<HeapObject> HeapSnapshotsCollection::FindHeapObjectById(
1539 SnapshotObjectId id) {
1540 // First perform a full GC in order to avoid dead objects.
1541 HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
1542 "HeapSnapshotsCollection::FindHeapObjectById");
1543 AssertNoAllocation no_allocation;
1544 HeapObject* object = NULL;
1545 HeapIterator iterator(HeapIterator::kFilterUnreachable);
1546 // Make sure that object with the given id is still reachable.
1547 for (HeapObject* obj = iterator.next();
1549 obj = iterator.next()) {
1550 if (ids_.FindEntry(obj->address()) == id) {
1551 ASSERT(object == NULL);
1553 // Can't break -- kFilterUnreachable requires full heap traversal.
1556 return object != NULL ? Handle<HeapObject>(object) : Handle<HeapObject>();
1560 HeapEntriesMap::HeapEntriesMap()
1561 : entries_(HeapThingsMatch) {
1565 int HeapEntriesMap::Map(HeapThing thing) {
1566 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), false);
1567 if (cache_entry == NULL) return HeapEntry::kNoEntry;
1568 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
1572 void HeapEntriesMap::Pair(HeapThing thing, int entry) {
1573 HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), true);
1574 ASSERT(cache_entry->value == NULL);
1575 cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
1579 HeapObjectsSet::HeapObjectsSet()
1580 : entries_(HeapEntriesMap::HeapThingsMatch) {
1584 void HeapObjectsSet::Clear() {
1589 bool HeapObjectsSet::Contains(Object* obj) {
1590 if (!obj->IsHeapObject()) return false;
1591 HeapObject* object = HeapObject::cast(obj);
1592 return entries_.Lookup(object, HeapEntriesMap::Hash(object), false) != NULL;
1596 void HeapObjectsSet::Insert(Object* obj) {
1597 if (!obj->IsHeapObject()) return;
1598 HeapObject* object = HeapObject::cast(obj);
1599 entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
1603 const char* HeapObjectsSet::GetTag(Object* obj) {
1604 HeapObject* object = HeapObject::cast(obj);
1605 HashMap::Entry* cache_entry =
1606 entries_.Lookup(object, HeapEntriesMap::Hash(object), false);
1607 return cache_entry != NULL
1608 ? reinterpret_cast<const char*>(cache_entry->value)
1613 void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
1614 if (!obj->IsHeapObject()) return;
1615 HeapObject* object = HeapObject::cast(obj);
1616 HashMap::Entry* cache_entry =
1617 entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
1618 cache_entry->value = const_cast<char*>(tag);
1622 HeapObject* const V8HeapExplorer::kInternalRootObject =
1623 reinterpret_cast<HeapObject*>(
1624 static_cast<intptr_t>(HeapObjectsMap::kInternalRootObjectId));
1625 HeapObject* const V8HeapExplorer::kGcRootsObject =
1626 reinterpret_cast<HeapObject*>(
1627 static_cast<intptr_t>(HeapObjectsMap::kGcRootsObjectId));
1628 HeapObject* const V8HeapExplorer::kFirstGcSubrootObject =
1629 reinterpret_cast<HeapObject*>(
1630 static_cast<intptr_t>(HeapObjectsMap::kGcRootsFirstSubrootId));
1631 HeapObject* const V8HeapExplorer::kLastGcSubrootObject =
1632 reinterpret_cast<HeapObject*>(
1633 static_cast<intptr_t>(HeapObjectsMap::kFirstAvailableObjectId));
1636 V8HeapExplorer::V8HeapExplorer(
1637 HeapSnapshot* snapshot,
1638 SnapshottingProgressReportingInterface* progress)
1639 : heap_(Isolate::Current()->heap()),
1640 snapshot_(snapshot),
1641 collection_(snapshot_->collection()),
1642 progress_(progress),
1647 V8HeapExplorer::~V8HeapExplorer() {
1651 HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) {
1652 return AddEntry(reinterpret_cast<HeapObject*>(ptr));
1656 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
1657 if (object == kInternalRootObject) {
1658 snapshot_->AddRootEntry();
1659 return snapshot_->root();
1660 } else if (object == kGcRootsObject) {
1661 HeapEntry* entry = snapshot_->AddGcRootsEntry();
1663 } else if (object >= kFirstGcSubrootObject && object < kLastGcSubrootObject) {
1664 HeapEntry* entry = snapshot_->AddGcSubrootEntry(GetGcSubrootOrder(object));
1666 } else if (object->IsJSFunction()) {
1667 JSFunction* func = JSFunction::cast(object);
1668 SharedFunctionInfo* shared = func->shared();
1669 const char* name = shared->bound() ? "native_bind" :
1670 collection_->names()->GetName(String::cast(shared->name()));
1671 return AddEntry(object, HeapEntry::kClosure, name);
1672 } else if (object->IsJSRegExp()) {
1673 JSRegExp* re = JSRegExp::cast(object);
1674 return AddEntry(object,
1676 collection_->names()->GetName(re->Pattern()));
1677 } else if (object->IsJSObject()) {
1678 const char* name = collection_->names()->GetName(
1679 GetConstructorName(JSObject::cast(object)));
1680 if (object->IsJSGlobalObject()) {
1681 const char* tag = objects_tags_.GetTag(object);
1683 name = collection_->names()->GetFormatted("%s / %s", name, tag);
1686 return AddEntry(object, HeapEntry::kObject, name);
1687 } else if (object->IsString()) {
1688 return AddEntry(object,
1690 collection_->names()->GetName(String::cast(object)));
1691 } else if (object->IsCode()) {
1692 return AddEntry(object, HeapEntry::kCode, "");
1693 } else if (object->IsSharedFunctionInfo()) {
1694 String* name = String::cast(SharedFunctionInfo::cast(object)->name());
1695 return AddEntry(object,
1697 collection_->names()->GetName(name));
1698 } else if (object->IsScript()) {
1699 Object* name = Script::cast(object)->name();
1700 return AddEntry(object,
1703 ? collection_->names()->GetName(String::cast(name))
1705 } else if (object->IsGlobalContext()) {
1706 return AddEntry(object, HeapEntry::kHidden, "system / GlobalContext");
1707 } else if (object->IsContext()) {
1708 return AddEntry(object, HeapEntry::kHidden, "system / Context");
1709 } else if (object->IsFixedArray() ||
1710 object->IsFixedDoubleArray() ||
1711 object->IsByteArray() ||
1712 object->IsExternalArray()) {
1713 return AddEntry(object, HeapEntry::kArray, "");
1714 } else if (object->IsHeapNumber()) {
1715 return AddEntry(object, HeapEntry::kHeapNumber, "number");
1717 return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
1721 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
1722 HeapEntry::Type type,
1724 int object_size = object->Size();
1725 SnapshotObjectId object_id =
1726 collection_->GetObjectId(object->address(), object_size);
1727 return snapshot_->AddEntry(type, name, object_id, object_size);
1731 class GcSubrootsEnumerator : public ObjectVisitor {
1733 GcSubrootsEnumerator(
1734 SnapshotFillerInterface* filler, V8HeapExplorer* explorer)
1736 explorer_(explorer),
1737 previous_object_count_(0),
1740 void VisitPointers(Object** start, Object** end) {
1741 object_count_ += end - start;
1743 void Synchronize(VisitorSynchronization::SyncTag tag) {
1744 // Skip empty subroots.
1745 if (previous_object_count_ != object_count_) {
1746 previous_object_count_ = object_count_;
1747 filler_->AddEntry(V8HeapExplorer::GetNthGcSubrootObject(tag), explorer_);
1751 SnapshotFillerInterface* filler_;
1752 V8HeapExplorer* explorer_;
1753 intptr_t previous_object_count_;
1754 intptr_t object_count_;
1758 void V8HeapExplorer::AddRootEntries(SnapshotFillerInterface* filler) {
1759 filler->AddEntry(kInternalRootObject, this);
1760 filler->AddEntry(kGcRootsObject, this);
1761 GcSubrootsEnumerator enumerator(filler, this);
1762 heap_->IterateRoots(&enumerator, VISIT_ALL);
1766 const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
1767 switch (object->map()->instance_type()) {
1768 case MAP_TYPE: return "system / Map";
1769 case JS_GLOBAL_PROPERTY_CELL_TYPE: return "system / JSGlobalPropertyCell";
1770 case FOREIGN_TYPE: return "system / Foreign";
1771 case ODDBALL_TYPE: return "system / Oddball";
1772 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1773 case NAME##_TYPE: return "system / "#Name;
1774 STRUCT_LIST(MAKE_STRUCT_CASE)
1775 #undef MAKE_STRUCT_CASE
1776 default: return "system";
1781 int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) {
1782 int objects_count = 0;
1783 for (HeapObject* obj = iterator->next();
1785 obj = iterator->next()) {
1788 return objects_count;
1792 class IndexedReferencesExtractor : public ObjectVisitor {
1794 IndexedReferencesExtractor(V8HeapExplorer* generator,
1795 HeapObject* parent_obj,
1797 : generator_(generator),
1798 parent_obj_(parent_obj),
1802 void VisitPointers(Object** start, Object** end) {
1803 for (Object** p = start; p < end; p++) {
1804 if (CheckVisitedAndUnmark(p)) continue;
1805 generator_->SetHiddenReference(parent_obj_, parent_, next_index_++, *p);
1808 static void MarkVisitedField(HeapObject* obj, int offset) {
1809 if (offset < 0) return;
1810 Address field = obj->address() + offset;
1811 ASSERT(!Memory::Object_at(field)->IsFailure());
1812 ASSERT(Memory::Object_at(field)->IsHeapObject());
1813 *field |= kFailureTag;
1817 bool CheckVisitedAndUnmark(Object** field) {
1818 if ((*field)->IsFailure()) {
1819 intptr_t untagged = reinterpret_cast<intptr_t>(*field) & ~kFailureTagMask;
1820 *field = reinterpret_cast<Object*>(untagged | kHeapObjectTag);
1821 ASSERT((*field)->IsHeapObject());
1826 V8HeapExplorer* generator_;
1827 HeapObject* parent_obj_;
1833 void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
1834 HeapEntry* heap_entry = GetEntry(obj);
1835 if (heap_entry == NULL) return; // No interest in this object.
1836 int entry = heap_entry->index();
1838 bool extract_indexed_refs = true;
1839 if (obj->IsJSGlobalProxy()) {
1840 ExtractJSGlobalProxyReferences(JSGlobalProxy::cast(obj));
1841 } else if (obj->IsJSObject()) {
1842 ExtractJSObjectReferences(entry, JSObject::cast(obj));
1843 } else if (obj->IsString()) {
1844 ExtractStringReferences(entry, String::cast(obj));
1845 extract_indexed_refs = false;
1846 } else if (obj->IsContext()) {
1847 ExtractContextReferences(entry, Context::cast(obj));
1848 } else if (obj->IsMap()) {
1849 ExtractMapReferences(entry, Map::cast(obj));
1850 } else if (obj->IsSharedFunctionInfo()) {
1851 ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
1852 } else if (obj->IsScript()) {
1853 ExtractScriptReferences(entry, Script::cast(obj));
1854 } else if (obj->IsCodeCache()) {
1855 ExtractCodeCacheReferences(entry, CodeCache::cast(obj));
1856 } else if (obj->IsCode()) {
1857 ExtractCodeReferences(entry, Code::cast(obj));
1858 } else if (obj->IsJSGlobalPropertyCell()) {
1859 ExtractJSGlobalPropertyCellReferences(
1860 entry, JSGlobalPropertyCell::cast(obj));
1861 extract_indexed_refs = false;
1863 if (extract_indexed_refs) {
1864 SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset);
1865 IndexedReferencesExtractor refs_extractor(this, obj, entry);
1866 obj->Iterate(&refs_extractor);
1871 void V8HeapExplorer::ExtractJSGlobalProxyReferences(JSGlobalProxy* proxy) {
1872 // We need to reference JS global objects from snapshot's root.
1873 // We use JSGlobalProxy because this is what embedder (e.g. browser)
1874 // uses for the global object.
1875 Object* object = proxy->map()->prototype();
1876 bool is_debug_object = false;
1877 #ifdef ENABLE_DEBUGGER_SUPPORT
1878 is_debug_object = object->IsGlobalObject() &&
1879 Isolate::Current()->debug()->IsDebugGlobal(GlobalObject::cast(object));
1881 if (!is_debug_object) {
1882 SetUserGlobalReference(object);
1887 void V8HeapExplorer::ExtractJSObjectReferences(
1888 int entry, JSObject* js_obj) {
1889 HeapObject* obj = js_obj;
1890 ExtractClosureReferences(js_obj, entry);
1891 ExtractPropertyReferences(js_obj, entry);
1892 ExtractElementReferences(js_obj, entry);
1893 ExtractInternalReferences(js_obj, entry);
1894 SetPropertyReference(
1895 obj, entry, heap_->Proto_symbol(), js_obj->GetPrototype());
1896 if (obj->IsJSFunction()) {
1897 JSFunction* js_fun = JSFunction::cast(js_obj);
1898 Object* proto_or_map = js_fun->prototype_or_initial_map();
1899 if (!proto_or_map->IsTheHole()) {
1900 if (!proto_or_map->IsMap()) {
1901 SetPropertyReference(
1903 heap_->prototype_symbol(), proto_or_map,
1905 JSFunction::kPrototypeOrInitialMapOffset);
1907 SetPropertyReference(
1909 heap_->prototype_symbol(), js_fun->prototype());
1912 SharedFunctionInfo* shared_info = js_fun->shared();
1913 // JSFunction has either bindings or literals and never both.
1914 bool bound = shared_info->bound();
1915 TagObject(js_fun->literals_or_bindings(),
1916 bound ? "(function bindings)" : "(function literals)");
1917 SetInternalReference(js_fun, entry,
1918 bound ? "bindings" : "literals",
1919 js_fun->literals_or_bindings(),
1920 JSFunction::kLiteralsOffset);
1921 TagObject(shared_info, "(shared function info)");
1922 SetInternalReference(js_fun, entry,
1923 "shared", shared_info,
1924 JSFunction::kSharedFunctionInfoOffset);
1925 TagObject(js_fun->unchecked_context(), "(context)");
1926 SetInternalReference(js_fun, entry,
1927 "context", js_fun->unchecked_context(),
1928 JSFunction::kContextOffset);
1929 for (int i = JSFunction::kNonWeakFieldsEndOffset;
1930 i < JSFunction::kSize;
1931 i += kPointerSize) {
1932 SetWeakReference(js_fun, entry, i, *HeapObject::RawField(js_fun, i), i);
1934 } else if (obj->IsGlobalObject()) {
1935 GlobalObject* global_obj = GlobalObject::cast(obj);
1936 SetInternalReference(global_obj, entry,
1937 "builtins", global_obj->builtins(),
1938 GlobalObject::kBuiltinsOffset);
1939 SetInternalReference(global_obj, entry,
1940 "global_context", global_obj->global_context(),
1941 GlobalObject::kGlobalContextOffset);
1942 SetInternalReference(global_obj, entry,
1943 "global_receiver", global_obj->global_receiver(),
1944 GlobalObject::kGlobalReceiverOffset);
1946 TagObject(js_obj->properties(), "(object properties)");
1947 SetInternalReference(obj, entry,
1948 "properties", js_obj->properties(),
1949 JSObject::kPropertiesOffset);
1950 TagObject(js_obj->elements(), "(object elements)");
1951 SetInternalReference(obj, entry,
1952 "elements", js_obj->elements(),
1953 JSObject::kElementsOffset);
1957 void V8HeapExplorer::ExtractStringReferences(int entry, String* string) {
1958 if (string->IsConsString()) {
1959 ConsString* cs = ConsString::cast(string);
1960 SetInternalReference(cs, entry, "first", cs->first());
1961 SetInternalReference(cs, entry, "second", cs->second());
1962 } else if (string->IsSlicedString()) {
1963 SlicedString* ss = SlicedString::cast(string);
1964 SetInternalReference(ss, entry, "parent", ss->parent());
1969 void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) {
1970 #define EXTRACT_CONTEXT_FIELD(index, type, name) \
1971 SetInternalReference(context, entry, #name, context->get(Context::index), \
1972 FixedArray::OffsetOfElementAt(Context::index));
1973 EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
1974 EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
1975 EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, Object, extension);
1976 EXTRACT_CONTEXT_FIELD(GLOBAL_INDEX, GlobalObject, global);
1977 if (context->IsGlobalContext()) {
1978 TagObject(context->jsfunction_result_caches(),
1979 "(context func. result caches)");
1980 TagObject(context->normalized_map_cache(), "(context norm. map cache)");
1981 TagObject(context->runtime_context(), "(runtime context)");
1982 TagObject(context->data(), "(context data)");
1983 GLOBAL_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD);
1984 #undef EXTRACT_CONTEXT_FIELD
1985 for (int i = Context::FIRST_WEAK_SLOT;
1986 i < Context::GLOBAL_CONTEXT_SLOTS;
1988 SetWeakReference(context, entry, i, context->get(i),
1989 FixedArray::OffsetOfElementAt(i));
1995 void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
1996 SetInternalReference(map, entry,
1997 "prototype", map->prototype(), Map::kPrototypeOffset);
1998 SetInternalReference(map, entry,
1999 "constructor", map->constructor(),
2000 Map::kConstructorOffset);
2001 if (!map->instance_descriptors()->IsEmpty()) {
2002 TagObject(map->instance_descriptors(), "(map descriptors)");
2003 SetInternalReference(map, entry,
2004 "descriptors", map->instance_descriptors(),
2005 Map::kInstanceDescriptorsOrBitField3Offset);
2007 if (map->unchecked_prototype_transitions()->IsFixedArray()) {
2008 TagObject(map->prototype_transitions(), "(prototype transitions)");
2009 SetInternalReference(map, entry,
2010 "prototype_transitions", map->prototype_transitions(),
2011 Map::kPrototypeTransitionsOrBackPointerOffset);
2013 SetInternalReference(map, entry,
2014 "back_pointer", map->GetBackPointer(),
2015 Map::kPrototypeTransitionsOrBackPointerOffset);
2017 SetInternalReference(map, entry,
2018 "code_cache", map->code_cache(),
2019 Map::kCodeCacheOffset);
2023 void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
2024 int entry, SharedFunctionInfo* shared) {
2025 HeapObject* obj = shared;
2026 SetInternalReference(obj, entry,
2027 "name", shared->name(),
2028 SharedFunctionInfo::kNameOffset);
2029 TagObject(shared->code(), "(code)");
2030 SetInternalReference(obj, entry,
2031 "code", shared->code(),
2032 SharedFunctionInfo::kCodeOffset);
2033 TagObject(shared->scope_info(), "(function scope info)");
2034 SetInternalReference(obj, entry,
2035 "scope_info", shared->scope_info(),
2036 SharedFunctionInfo::kScopeInfoOffset);
2037 SetInternalReference(obj, entry,
2038 "instance_class_name", shared->instance_class_name(),
2039 SharedFunctionInfo::kInstanceClassNameOffset);
2040 SetInternalReference(obj, entry,
2041 "script", shared->script(),
2042 SharedFunctionInfo::kScriptOffset);
2043 TagObject(shared->construct_stub(), "(code)");
2044 SetInternalReference(obj, entry,
2045 "construct_stub", shared->construct_stub(),
2046 SharedFunctionInfo::kConstructStubOffset);
2047 SetInternalReference(obj, entry,
2048 "function_data", shared->function_data(),
2049 SharedFunctionInfo::kFunctionDataOffset);
2050 SetInternalReference(obj, entry,
2051 "debug_info", shared->debug_info(),
2052 SharedFunctionInfo::kDebugInfoOffset);
2053 SetInternalReference(obj, entry,
2054 "inferred_name", shared->inferred_name(),
2055 SharedFunctionInfo::kInferredNameOffset);
2056 SetInternalReference(obj, entry,
2057 "this_property_assignments",
2058 shared->this_property_assignments(),
2059 SharedFunctionInfo::kThisPropertyAssignmentsOffset);
2060 SetWeakReference(obj, entry,
2061 1, shared->initial_map(),
2062 SharedFunctionInfo::kInitialMapOffset);
2066 void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) {
2067 HeapObject* obj = script;
2068 SetInternalReference(obj, entry,
2069 "source", script->source(),
2070 Script::kSourceOffset);
2071 SetInternalReference(obj, entry,
2072 "name", script->name(),
2073 Script::kNameOffset);
2074 SetInternalReference(obj, entry,
2075 "data", script->data(),
2076 Script::kDataOffset);
2077 SetInternalReference(obj, entry,
2078 "context_data", script->context_data(),
2079 Script::kContextOffset);
2080 TagObject(script->line_ends(), "(script line ends)");
2081 SetInternalReference(obj, entry,
2082 "line_ends", script->line_ends(),
2083 Script::kLineEndsOffset);
2087 void V8HeapExplorer::ExtractCodeCacheReferences(
2088 int entry, CodeCache* code_cache) {
2089 TagObject(code_cache->default_cache(), "(default code cache)");
2090 SetInternalReference(code_cache, entry,
2091 "default_cache", code_cache->default_cache(),
2092 CodeCache::kDefaultCacheOffset);
2093 TagObject(code_cache->normal_type_cache(), "(code type cache)");
2094 SetInternalReference(code_cache, entry,
2095 "type_cache", code_cache->normal_type_cache(),
2096 CodeCache::kNormalTypeCacheOffset);
2100 void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
2101 TagObject(code->relocation_info(), "(code relocation info)");
2102 SetInternalReference(code, entry,
2103 "relocation_info", code->relocation_info(),
2104 Code::kRelocationInfoOffset);
2105 SetInternalReference(code, entry,
2106 "handler_table", code->handler_table(),
2107 Code::kHandlerTableOffset);
2108 TagObject(code->deoptimization_data(), "(code deopt data)");
2109 SetInternalReference(code, entry,
2110 "deoptimization_data", code->deoptimization_data(),
2111 Code::kDeoptimizationDataOffset);
2112 SetInternalReference(code, entry,
2113 "type_feedback_info", code->type_feedback_info(),
2114 Code::kTypeFeedbackInfoOffset);
2115 SetInternalReference(code, entry,
2116 "gc_metadata", code->gc_metadata(),
2117 Code::kGCMetadataOffset);
2121 void V8HeapExplorer::ExtractJSGlobalPropertyCellReferences(
2122 int entry, JSGlobalPropertyCell* cell) {
2123 SetInternalReference(cell, entry, "value", cell->value());
2127 void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj, int entry) {
2128 if (!js_obj->IsJSFunction()) return;
2130 JSFunction* func = JSFunction::cast(js_obj);
2131 if (func->shared()->bound()) {
2132 FixedArray* bindings = func->function_bindings();
2133 SetNativeBindReference(js_obj, entry, "bound_this",
2134 bindings->get(JSFunction::kBoundThisIndex));
2135 SetNativeBindReference(js_obj, entry, "bound_function",
2136 bindings->get(JSFunction::kBoundFunctionIndex));
2137 for (int i = JSFunction::kBoundArgumentsStartIndex;
2138 i < bindings->length(); i++) {
2139 const char* reference_name = collection_->names()->GetFormatted(
2140 "bound_argument_%d",
2141 i - JSFunction::kBoundArgumentsStartIndex);
2142 SetNativeBindReference(js_obj, entry, reference_name,
2146 Context* context = func->context()->declaration_context();
2147 ScopeInfo* scope_info = context->closure()->shared()->scope_info();
2148 // Add context allocated locals.
2149 int context_locals = scope_info->ContextLocalCount();
2150 for (int i = 0; i < context_locals; ++i) {
2151 String* local_name = scope_info->ContextLocalName(i);
2152 int idx = Context::MIN_CONTEXT_SLOTS + i;
2153 SetClosureReference(js_obj, entry, local_name, context->get(idx));
2156 // Add function variable.
2157 if (scope_info->HasFunctionName()) {
2158 String* name = scope_info->FunctionName();
2160 int idx = scope_info->FunctionContextSlotIndex(name, &mode);
2162 SetClosureReference(js_obj, entry, name, context->get(idx));
2169 void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
2170 if (js_obj->HasFastProperties()) {
2171 DescriptorArray* descs = js_obj->map()->instance_descriptors();
2172 for (int i = 0; i < descs->number_of_descriptors(); i++) {
2173 switch (descs->GetType(i)) {
2175 int index = descs->GetFieldIndex(i);
2176 if (index < js_obj->map()->inobject_properties()) {
2177 SetPropertyReference(
2179 descs->GetKey(i), js_obj->InObjectPropertyAt(index),
2181 js_obj->GetInObjectPropertyOffset(index));
2183 SetPropertyReference(
2185 descs->GetKey(i), js_obj->FastPropertyAt(index));
2189 case CONSTANT_FUNCTION:
2190 SetPropertyReference(
2192 descs->GetKey(i), descs->GetConstantFunction(i));
2195 Object* callback_obj = descs->GetValue(i);
2196 if (callback_obj->IsAccessorPair()) {
2197 AccessorPair* accessors = AccessorPair::cast(callback_obj);
2198 if (Object* getter = accessors->getter()) {
2199 SetPropertyReference(js_obj, entry, descs->GetKey(i),
2202 if (Object* setter = accessors->setter()) {
2203 SetPropertyReference(js_obj, entry, descs->GetKey(i),
2209 case NORMAL: // only in slow mode
2210 case HANDLER: // only in lookup results, not in descriptors
2211 case INTERCEPTOR: // only in lookup results, not in descriptors
2212 case MAP_TRANSITION: // we do not care about transitions here...
2213 case ELEMENTS_TRANSITION:
2214 case CONSTANT_TRANSITION:
2215 case NULL_DESCRIPTOR: // ... and not about "holes"
2220 StringDictionary* dictionary = js_obj->property_dictionary();
2221 int length = dictionary->Capacity();
2222 for (int i = 0; i < length; ++i) {
2223 Object* k = dictionary->KeyAt(i);
2224 if (dictionary->IsKey(k)) {
2225 Object* target = dictionary->ValueAt(i);
2226 // We assume that global objects can only have slow properties.
2227 Object* value = target->IsJSGlobalPropertyCell()
2228 ? JSGlobalPropertyCell::cast(target)->value()
2230 if (String::cast(k)->length() > 0) {
2231 SetPropertyReference(js_obj, entry, String::cast(k), value);
2233 TagObject(value, "(hidden properties)");
2234 SetInternalReference(js_obj, entry, "hidden_properties", value);
2242 void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) {
2243 if (js_obj->HasFastElements()) {
2244 FixedArray* elements = FixedArray::cast(js_obj->elements());
2245 int length = js_obj->IsJSArray() ?
2246 Smi::cast(JSArray::cast(js_obj)->length())->value() :
2248 for (int i = 0; i < length; ++i) {
2249 if (!elements->get(i)->IsTheHole()) {
2250 SetElementReference(js_obj, entry, i, elements->get(i));
2253 } else if (js_obj->HasDictionaryElements()) {
2254 SeededNumberDictionary* dictionary = js_obj->element_dictionary();
2255 int length = dictionary->Capacity();
2256 for (int i = 0; i < length; ++i) {
2257 Object* k = dictionary->KeyAt(i);
2258 if (dictionary->IsKey(k)) {
2259 ASSERT(k->IsNumber());
2260 uint32_t index = static_cast<uint32_t>(k->Number());
2261 SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
2268 void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) {
2269 int length = js_obj->GetInternalFieldCount();
2270 for (int i = 0; i < length; ++i) {
2271 Object* o = js_obj->GetInternalField(i);
2272 SetInternalReference(
2273 js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
2278 String* V8HeapExplorer::GetConstructorName(JSObject* object) {
2279 Heap* heap = object->GetHeap();
2280 if (object->IsJSFunction()) return heap->closure_symbol();
2281 String* constructor_name = object->constructor_name();
2282 if (constructor_name == heap->Object_symbol()) {
2283 // Look up an immediate "constructor" property, if it is a function,
2284 // return its name. This is for instances of binding objects, which
2285 // have prototype constructor type "Object".
2286 Object* constructor_prop = NULL;
2287 LookupResult result(heap->isolate());
2288 object->LocalLookupRealNamedProperty(heap->constructor_symbol(), &result);
2289 if (result.IsProperty()) {
2290 constructor_prop = result.GetLazyValue();
2292 if (constructor_prop->IsJSFunction()) {
2293 Object* maybe_name = JSFunction::cast(constructor_prop)->shared()->name();
2294 if (maybe_name->IsString()) {
2295 String* name = String::cast(maybe_name);
2296 if (name->length() > 0) return name;
2300 return object->constructor_name();
2304 HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
2305 if (!obj->IsHeapObject()) return NULL;
2306 return filler_->FindOrAddEntry(obj, this);
2310 class RootsReferencesExtractor : public ObjectVisitor {
2313 IndexTag(int index, VisitorSynchronization::SyncTag tag)
2314 : index(index), tag(tag) { }
2316 VisitorSynchronization::SyncTag tag;
2320 RootsReferencesExtractor()
2321 : collecting_all_references_(false),
2322 previous_reference_count_(0) {
2325 void VisitPointers(Object** start, Object** end) {
2326 if (collecting_all_references_) {
2327 for (Object** p = start; p < end; p++) all_references_.Add(*p);
2329 for (Object** p = start; p < end; p++) strong_references_.Add(*p);
2333 void SetCollectingAllReferences() { collecting_all_references_ = true; }
2335 void FillReferences(V8HeapExplorer* explorer) {
2336 ASSERT(strong_references_.length() <= all_references_.length());
2337 for (int i = 0; i < reference_tags_.length(); ++i) {
2338 explorer->SetGcRootsReference(reference_tags_[i].tag);
2340 int strong_index = 0, all_index = 0, tags_index = 0;
2341 while (all_index < all_references_.length()) {
2342 if (strong_index < strong_references_.length() &&
2343 strong_references_[strong_index] == all_references_[all_index]) {
2344 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
2346 all_references_[all_index++]);
2349 explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
2351 all_references_[all_index++]);
2353 if (reference_tags_[tags_index].index == all_index) ++tags_index;
2357 void Synchronize(VisitorSynchronization::SyncTag tag) {
2358 if (collecting_all_references_ &&
2359 previous_reference_count_ != all_references_.length()) {
2360 previous_reference_count_ = all_references_.length();
2361 reference_tags_.Add(IndexTag(previous_reference_count_, tag));
2366 bool collecting_all_references_;
2367 List<Object*> strong_references_;
2368 List<Object*> all_references_;
2369 int previous_reference_count_;
2370 List<IndexTag> reference_tags_;
2374 bool V8HeapExplorer::IterateAndExtractReferences(
2375 SnapshotFillerInterface* filler) {
2376 HeapIterator iterator(HeapIterator::kFilterUnreachable);
2379 bool interrupted = false;
2381 // Heap iteration with filtering must be finished in any case.
2382 for (HeapObject* obj = iterator.next();
2384 obj = iterator.next(), progress_->ProgressStep()) {
2386 ExtractReferences(obj);
2387 if (!progress_->ProgressReport(false)) interrupted = true;
2395 SetRootGcRootsReference();
2396 RootsReferencesExtractor extractor;
2397 heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
2398 extractor.SetCollectingAllReferences();
2399 heap_->IterateRoots(&extractor, VISIT_ALL);
2400 extractor.FillReferences(this);
2402 return progress_->ProgressReport(true);
2406 bool V8HeapExplorer::IsEssentialObject(Object* object) {
2407 // We have to use raw_unchecked_* versions because checked versions
2408 // would fail during iteration over object properties.
2409 return object->IsHeapObject()
2410 && !object->IsOddball()
2411 && object != heap_->raw_unchecked_empty_byte_array()
2412 && object != heap_->raw_unchecked_empty_fixed_array()
2413 && object != heap_->raw_unchecked_empty_descriptor_array()
2414 && object != heap_->raw_unchecked_fixed_array_map()
2415 && object != heap_->raw_unchecked_global_property_cell_map()
2416 && object != heap_->raw_unchecked_shared_function_info_map()
2417 && object != heap_->raw_unchecked_free_space_map()
2418 && object != heap_->raw_unchecked_one_pointer_filler_map()
2419 && object != heap_->raw_unchecked_two_pointer_filler_map();
2423 void V8HeapExplorer::SetClosureReference(HeapObject* parent_obj,
2425 String* reference_name,
2426 Object* child_obj) {
2427 HeapEntry* child_entry = GetEntry(child_obj);
2428 if (child_entry != NULL) {
2429 filler_->SetNamedReference(HeapGraphEdge::kContextVariable,
2431 collection_->names()->GetName(reference_name),
2437 void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
2439 const char* reference_name,
2440 Object* child_obj) {
2441 HeapEntry* child_entry = GetEntry(child_obj);
2442 if (child_entry != NULL) {
2443 filler_->SetNamedReference(HeapGraphEdge::kShortcut,
2451 void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
2454 Object* child_obj) {
2455 HeapEntry* child_entry = GetEntry(child_obj);
2456 if (child_entry != NULL) {
2457 filler_->SetIndexedReference(HeapGraphEdge::kElement,
2465 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
2467 const char* reference_name,
2470 HeapEntry* child_entry = GetEntry(child_obj);
2471 if (child_entry == NULL) return;
2472 if (IsEssentialObject(child_obj)) {
2473 filler_->SetNamedReference(HeapGraphEdge::kInternal,
2478 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2482 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
2487 HeapEntry* child_entry = GetEntry(child_obj);
2488 if (child_entry == NULL) return;
2489 if (IsEssentialObject(child_obj)) {
2490 filler_->SetNamedReference(HeapGraphEdge::kInternal,
2492 collection_->names()->GetName(index),
2495 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2499 void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
2502 Object* child_obj) {
2503 HeapEntry* child_entry = GetEntry(child_obj);
2504 if (child_entry != NULL && IsEssentialObject(child_obj)) {
2505 filler_->SetIndexedReference(HeapGraphEdge::kHidden,
2513 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
2518 HeapEntry* child_entry = GetEntry(child_obj);
2519 if (child_entry != NULL) {
2520 filler_->SetIndexedReference(HeapGraphEdge::kWeak,
2524 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2529 void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
2531 String* reference_name,
2533 const char* name_format_string,
2535 HeapEntry* child_entry = GetEntry(child_obj);
2536 if (child_entry != NULL) {
2537 HeapGraphEdge::Type type = reference_name->length() > 0 ?
2538 HeapGraphEdge::kProperty : HeapGraphEdge::kInternal;
2539 const char* name = name_format_string != NULL ?
2540 collection_->names()->GetFormatted(
2542 *reference_name->ToCString(DISALLOW_NULLS,
2543 ROBUST_STRING_TRAVERSAL)) :
2544 collection_->names()->GetName(reference_name);
2546 filler_->SetNamedReference(type,
2550 IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2555 void V8HeapExplorer::SetPropertyShortcutReference(HeapObject* parent_obj,
2557 String* reference_name,
2558 Object* child_obj) {
2559 HeapEntry* child_entry = GetEntry(child_obj);
2560 if (child_entry != NULL) {
2561 filler_->SetNamedReference(HeapGraphEdge::kShortcut,
2563 collection_->names()->GetName(reference_name),
2569 void V8HeapExplorer::SetRootGcRootsReference() {
2570 filler_->SetIndexedAutoIndexReference(
2571 HeapGraphEdge::kElement,
2572 snapshot_->root()->index(),
2573 snapshot_->gc_roots());
2577 void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
2578 HeapEntry* child_entry = GetEntry(child_obj);
2579 ASSERT(child_entry != NULL);
2580 filler_->SetNamedAutoIndexReference(
2581 HeapGraphEdge::kShortcut,
2582 snapshot_->root()->index(),
2587 void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
2588 filler_->SetIndexedAutoIndexReference(
2589 HeapGraphEdge::kElement,
2590 snapshot_->gc_roots()->index(),
2591 snapshot_->gc_subroot(tag));
2595 void V8HeapExplorer::SetGcSubrootReference(
2596 VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
2597 HeapEntry* child_entry = GetEntry(child_obj);
2598 if (child_entry != NULL) {
2599 const char* name = GetStrongGcSubrootName(child_obj);
2601 filler_->SetNamedReference(
2602 HeapGraphEdge::kInternal,
2603 snapshot_->gc_subroot(tag)->index(),
2607 filler_->SetIndexedAutoIndexReference(
2608 is_weak ? HeapGraphEdge::kWeak : HeapGraphEdge::kElement,
2609 snapshot_->gc_subroot(tag)->index(),
2616 const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
2617 if (strong_gc_subroot_names_.is_empty()) {
2618 #define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
2619 #define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
2620 STRONG_ROOT_LIST(ROOT_NAME)
2622 #define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
2623 STRUCT_LIST(STRUCT_MAP_NAME)
2624 #undef STRUCT_MAP_NAME
2625 #define SYMBOL_NAME(name, str) NAME_ENTRY(name)
2626 SYMBOL_LIST(SYMBOL_NAME)
2629 CHECK(!strong_gc_subroot_names_.is_empty());
2631 return strong_gc_subroot_names_.GetTag(object);
2635 void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
2636 if (IsEssentialObject(obj)) {
2637 HeapEntry* entry = GetEntry(obj);
2638 if (entry->name()[0] == '\0') {
2639 entry->set_name(tag);
2645 class GlobalObjectsEnumerator : public ObjectVisitor {
2647 virtual void VisitPointers(Object** start, Object** end) {
2648 for (Object** p = start; p < end; p++) {
2649 if ((*p)->IsGlobalContext()) {
2650 Context* context = Context::cast(*p);
2651 JSObject* proxy = context->global_proxy();
2652 if (proxy->IsJSGlobalProxy()) {
2653 Object* global = proxy->map()->prototype();
2654 if (global->IsJSGlobalObject()) {
2655 objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global)));
2661 int count() { return objects_.length(); }
2662 Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
2665 List<Handle<JSGlobalObject> > objects_;
2669 // Modifies heap. Must not be run during heap traversal.
2670 void V8HeapExplorer::TagGlobalObjects() {
2672 Isolate* isolate = Isolate::Current();
2673 GlobalObjectsEnumerator enumerator;
2674 isolate->global_handles()->IterateAllRoots(&enumerator);
2675 Handle<String> document_string =
2676 isolate->factory()->NewStringFromAscii(CStrVector("document"));
2677 Handle<String> url_string =
2678 isolate->factory()->NewStringFromAscii(CStrVector("URL"));
2679 const char** urls = NewArray<const char*>(enumerator.count());
2680 for (int i = 0, l = enumerator.count(); i < l; ++i) {
2683 Handle<JSGlobalObject> global_obj = enumerator.at(i);
2684 Object* obj_document;
2685 if (global_obj->GetProperty(*document_string)->ToObject(&obj_document) &&
2686 obj_document->IsJSObject()) {
2687 JSObject* document = JSObject::cast(obj_document);
2689 if (document->GetProperty(*url_string)->ToObject(&obj_url) &&
2690 obj_url->IsString()) {
2691 urls[i] = collection_->names()->GetName(String::cast(obj_url));
2696 AssertNoAllocation no_allocation;
2697 for (int i = 0, l = enumerator.count(); i < l; ++i) {
2698 objects_tags_.SetTag(*enumerator.at(i), urls[i]);
2705 class GlobalHandlesExtractor : public ObjectVisitor {
2707 explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer)
2708 : explorer_(explorer) {}
2709 virtual ~GlobalHandlesExtractor() {}
2710 virtual void VisitPointers(Object** start, Object** end) {
2713 virtual void VisitEmbedderReference(Object** p, uint16_t class_id) {
2714 explorer_->VisitSubtreeWrapper(p, class_id);
2717 NativeObjectsExplorer* explorer_;
2721 class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
2723 BasicHeapEntriesAllocator(
2724 HeapSnapshot* snapshot,
2725 HeapEntry::Type entries_type)
2726 : snapshot_(snapshot),
2727 collection_(snapshot_->collection()),
2728 entries_type_(entries_type) {
2730 virtual HeapEntry* AllocateEntry(HeapThing ptr);
2732 HeapSnapshot* snapshot_;
2733 HeapSnapshotsCollection* collection_;
2734 HeapEntry::Type entries_type_;
2738 HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) {
2739 v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
2740 intptr_t elements = info->GetElementCount();
2741 intptr_t size = info->GetSizeInBytes();
2742 const char* name = elements != -1
2743 ? collection_->names()->GetFormatted(
2744 "%s / %" V8_PTR_PREFIX "d entries", info->GetLabel(), elements)
2745 : collection_->names()->GetCopy(info->GetLabel());
2746 return snapshot_->AddEntry(
2749 HeapObjectsMap::GenerateId(info),
2750 size != -1 ? static_cast<int>(size) : 0);
2754 NativeObjectsExplorer::NativeObjectsExplorer(
2755 HeapSnapshot* snapshot, SnapshottingProgressReportingInterface* progress)
2756 : snapshot_(snapshot),
2757 collection_(snapshot_->collection()),
2758 progress_(progress),
2759 embedder_queried_(false),
2760 objects_by_info_(RetainedInfosMatch),
2761 native_groups_(StringsMatch),
2763 synthetic_entries_allocator_ =
2764 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
2765 native_entries_allocator_ =
2766 new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
2770 NativeObjectsExplorer::~NativeObjectsExplorer() {
2771 for (HashMap::Entry* p = objects_by_info_.Start();
2773 p = objects_by_info_.Next(p)) {
2774 v8::RetainedObjectInfo* info =
2775 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2777 List<HeapObject*>* objects =
2778 reinterpret_cast<List<HeapObject*>* >(p->value);
2781 for (HashMap::Entry* p = native_groups_.Start();
2783 p = native_groups_.Next(p)) {
2784 v8::RetainedObjectInfo* info =
2785 reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
2788 delete synthetic_entries_allocator_;
2789 delete native_entries_allocator_;
2793 int NativeObjectsExplorer::EstimateObjectsCount() {
2794 FillRetainedObjects();
2795 return objects_by_info_.occupancy();
2799 void NativeObjectsExplorer::FillRetainedObjects() {
2800 if (embedder_queried_) return;
2801 Isolate* isolate = Isolate::Current();
2802 // Record objects that are joined into ObjectGroups.
2803 isolate->heap()->CallGlobalGCPrologueCallback();
2804 List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
2805 for (int i = 0; i < groups->length(); ++i) {
2806 ObjectGroup* group = groups->at(i);
2807 if (group->info_ == NULL) continue;
2808 List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info_);
2809 for (size_t j = 0; j < group->length_; ++j) {
2810 HeapObject* obj = HeapObject::cast(*group->objects_[j]);
2812 in_groups_.Insert(obj);
2814 group->info_ = NULL; // Acquire info object ownership.
2816 isolate->global_handles()->RemoveObjectGroups();
2817 isolate->heap()->CallGlobalGCEpilogueCallback();
2818 // Record objects that are not in ObjectGroups, but have class ID.
2819 GlobalHandlesExtractor extractor(this);
2820 isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
2821 embedder_queried_ = true;
2824 void NativeObjectsExplorer::FillImplicitReferences() {
2825 Isolate* isolate = Isolate::Current();
2826 List<ImplicitRefGroup*>* groups =
2827 isolate->global_handles()->implicit_ref_groups();
2828 for (int i = 0; i < groups->length(); ++i) {
2829 ImplicitRefGroup* group = groups->at(i);
2830 HeapObject* parent = *group->parent_;
2832 filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
2833 ASSERT(parent_entry != HeapEntry::kNoEntry);
2834 Object*** children = group->children_;
2835 for (size_t j = 0; j < group->length_; ++j) {
2836 Object* child = *children[j];
2837 HeapEntry* child_entry =
2838 filler_->FindOrAddEntry(child, native_entries_allocator_);
2839 filler_->SetNamedReference(
2840 HeapGraphEdge::kInternal,
2848 List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
2849 v8::RetainedObjectInfo* info) {
2850 HashMap::Entry* entry =
2851 objects_by_info_.Lookup(info, InfoHash(info), true);
2852 if (entry->value != NULL) {
2855 entry->value = new List<HeapObject*>(4);
2857 return reinterpret_cast<List<HeapObject*>* >(entry->value);
2861 bool NativeObjectsExplorer::IterateAndExtractReferences(
2862 SnapshotFillerInterface* filler) {
2864 FillRetainedObjects();
2865 FillImplicitReferences();
2866 if (EstimateObjectsCount() > 0) {
2867 for (HashMap::Entry* p = objects_by_info_.Start();
2869 p = objects_by_info_.Next(p)) {
2870 v8::RetainedObjectInfo* info =
2871 reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2872 SetNativeRootReference(info);
2873 List<HeapObject*>* objects =
2874 reinterpret_cast<List<HeapObject*>* >(p->value);
2875 for (int i = 0; i < objects->length(); ++i) {
2876 SetWrapperNativeReferences(objects->at(i), info);
2879 SetRootNativeRootsReference();
2886 class NativeGroupRetainedObjectInfo : public v8::RetainedObjectInfo {
2888 explicit NativeGroupRetainedObjectInfo(const char* label)
2890 hash_(reinterpret_cast<intptr_t>(label)),
2894 virtual ~NativeGroupRetainedObjectInfo() {}
2895 virtual void Dispose() {
2900 virtual bool IsEquivalent(RetainedObjectInfo* other) {
2901 return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
2903 virtual intptr_t GetHash() { return hash_; }
2904 virtual const char* GetLabel() { return label_; }
2913 NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2914 const char* label) {
2915 const char* label_copy = collection_->names()->GetCopy(label);
2916 uint32_t hash = HashSequentialString(label_copy,
2917 static_cast<int>(strlen(label_copy)),
2919 HashMap::Entry* entry = native_groups_.Lookup(const_cast<char*>(label_copy),
2921 if (entry->value == NULL) {
2922 entry->value = new NativeGroupRetainedObjectInfo(label);
2924 return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2928 void NativeObjectsExplorer::SetNativeRootReference(
2929 v8::RetainedObjectInfo* info) {
2930 HeapEntry* child_entry =
2931 filler_->FindOrAddEntry(info, native_entries_allocator_);
2932 ASSERT(child_entry != NULL);
2933 NativeGroupRetainedObjectInfo* group_info =
2934 FindOrAddGroupInfo(info->GetGroupLabel());
2935 HeapEntry* group_entry =
2936 filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
2937 filler_->SetNamedAutoIndexReference(
2938 HeapGraphEdge::kInternal,
2939 group_entry->index(),
2944 void NativeObjectsExplorer::SetWrapperNativeReferences(
2945 HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2946 HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
2947 ASSERT(wrapper_entry != NULL);
2948 HeapEntry* info_entry =
2949 filler_->FindOrAddEntry(info, native_entries_allocator_);
2950 ASSERT(info_entry != NULL);
2951 filler_->SetNamedReference(HeapGraphEdge::kInternal,
2952 wrapper_entry->index(),
2955 filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
2956 info_entry->index(),
2961 void NativeObjectsExplorer::SetRootNativeRootsReference() {
2962 for (HashMap::Entry* entry = native_groups_.Start();
2964 entry = native_groups_.Next(entry)) {
2965 NativeGroupRetainedObjectInfo* group_info =
2966 static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2967 HeapEntry* group_entry =
2968 filler_->FindOrAddEntry(group_info, native_entries_allocator_);
2969 ASSERT(group_entry != NULL);
2970 filler_->SetIndexedAutoIndexReference(
2971 HeapGraphEdge::kElement,
2972 snapshot_->root()->index(),
2978 void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
2979 if (in_groups_.Contains(*p)) return;
2980 Isolate* isolate = Isolate::Current();
2981 v8::RetainedObjectInfo* info =
2982 isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
2983 if (info == NULL) return;
2984 GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
2988 class SnapshotFiller : public SnapshotFillerInterface {
2990 explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
2991 : snapshot_(snapshot),
2992 collection_(snapshot->collection()),
2993 entries_(entries) { }
2994 HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
2995 HeapEntry* entry = allocator->AllocateEntry(ptr);
2996 entries_->Pair(ptr, entry->index());
2999 HeapEntry* FindEntry(HeapThing ptr) {
3000 int index = entries_->Map(ptr);
3001 return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL;
3003 HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
3004 HeapEntry* entry = FindEntry(ptr);
3005 return entry != NULL ? entry : AddEntry(ptr, allocator);
3007 void SetIndexedReference(HeapGraphEdge::Type type,
3010 HeapEntry* child_entry) {
3011 HeapEntry* parent_entry = &snapshot_->entries()[parent];
3012 parent_entry->SetIndexedReference(type, index, child_entry);
3014 void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
3016 HeapEntry* child_entry) {
3017 HeapEntry* parent_entry = &snapshot_->entries()[parent];
3018 int index = parent_entry->children_count() + 1;
3019 parent_entry->SetIndexedReference(type, index, child_entry);
3021 void SetNamedReference(HeapGraphEdge::Type type,
3023 const char* reference_name,
3024 HeapEntry* child_entry) {
3025 HeapEntry* parent_entry = &snapshot_->entries()[parent];
3026 parent_entry->SetNamedReference(type, reference_name, child_entry);
3028 void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
3030 HeapEntry* child_entry) {
3031 HeapEntry* parent_entry = &snapshot_->entries()[parent];
3032 int index = parent_entry->children_count() + 1;
3033 parent_entry->SetNamedReference(
3035 collection_->names()->GetName(index),
3040 HeapSnapshot* snapshot_;
3041 HeapSnapshotsCollection* collection_;
3042 HeapEntriesMap* entries_;
3046 HeapSnapshotGenerator::HeapSnapshotGenerator(HeapSnapshot* snapshot,
3047 v8::ActivityControl* control)
3048 : snapshot_(snapshot),
3050 v8_heap_explorer_(snapshot_, this),
3051 dom_explorer_(snapshot_, this) {
3055 bool HeapSnapshotGenerator::GenerateSnapshot() {
3056 v8_heap_explorer_.TagGlobalObjects();
3058 // TODO(1562) Profiler assumes that any object that is in the heap after
3059 // full GC is reachable from the root when computing dominators.
3060 // This is not true for weakly reachable objects.
3061 // As a temporary solution we call GC twice.
3062 Isolate::Current()->heap()->CollectAllGarbage(
3063 Heap::kMakeHeapIterableMask,
3064 "HeapSnapshotGenerator::GenerateSnapshot");
3065 Isolate::Current()->heap()->CollectAllGarbage(
3066 Heap::kMakeHeapIterableMask,
3067 "HeapSnapshotGenerator::GenerateSnapshot");
3070 Heap* debug_heap = Isolate::Current()->heap();
3071 ASSERT(!debug_heap->old_data_space()->was_swept_conservatively());
3072 ASSERT(!debug_heap->old_pointer_space()->was_swept_conservatively());
3073 ASSERT(!debug_heap->code_space()->was_swept_conservatively());
3074 ASSERT(!debug_heap->cell_space()->was_swept_conservatively());
3075 ASSERT(!debug_heap->map_space()->was_swept_conservatively());
3078 // The following code uses heap iterators, so we want the heap to be
3079 // stable. It should follow TagGlobalObjects as that can allocate.
3080 AssertNoAllocation no_alloc;
3083 debug_heap->Verify();
3086 SetProgressTotal(1); // 1 pass.
3089 debug_heap->Verify();
3092 if (!FillReferences()) return false;
3094 snapshot_->FillChildrenAndRetainers();
3095 snapshot_->RememberLastJSObjectId();
3097 if (!SetEntriesDominators()) return false;
3098 if (!CalculateRetainedSizes()) return false;
3100 progress_counter_ = progress_total_;
3101 if (!ProgressReport(true)) return false;
3106 void HeapSnapshotGenerator::ProgressStep() {
3107 ++progress_counter_;
3111 bool HeapSnapshotGenerator::ProgressReport(bool force) {
3112 const int kProgressReportGranularity = 10000;
3113 if (control_ != NULL
3114 && (force || progress_counter_ % kProgressReportGranularity == 0)) {
3116 control_->ReportProgressValue(progress_counter_, progress_total_) ==
3117 v8::ActivityControl::kContinue;
3123 void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
3124 if (control_ == NULL) return;
3125 HeapIterator iterator(HeapIterator::kFilterUnreachable);
3126 progress_total_ = iterations_count * (
3127 v8_heap_explorer_.EstimateObjectsCount(&iterator) +
3128 dom_explorer_.EstimateObjectsCount());
3129 progress_counter_ = 0;
3133 bool HeapSnapshotGenerator::FillReferences() {
3134 SnapshotFiller filler(snapshot_, &entries_);
3135 v8_heap_explorer_.AddRootEntries(&filler);
3136 return v8_heap_explorer_.IterateAndExtractReferences(&filler)
3137 && dom_explorer_.IterateAndExtractReferences(&filler);
3141 bool HeapSnapshotGenerator::IsUserGlobalReference(const HeapGraphEdge* edge) {
3142 ASSERT(edge->from() == snapshot_->root());
3143 return edge->type() == HeapGraphEdge::kShortcut;
3147 void HeapSnapshotGenerator::MarkUserReachableObjects() {
3148 List<HeapEntry*> worklist;
3150 Vector<HeapGraphEdge*> children = snapshot_->root()->children();
3151 for (int i = 0; i < children.length(); ++i) {
3152 if (IsUserGlobalReference(children[i])) {
3153 worklist.Add(children[i]->to());
3157 while (!worklist.is_empty()) {
3158 HeapEntry* entry = worklist.RemoveLast();
3159 if (entry->user_reachable()) continue;
3160 entry->set_user_reachable();
3161 Vector<HeapGraphEdge*> children = entry->children();
3162 for (int i = 0; i < children.length(); ++i) {
3163 HeapEntry* child = children[i]->to();
3164 if (!child->user_reachable()) {
3165 worklist.Add(child);
3172 static bool IsRetainingEdge(HeapGraphEdge* edge) {
3173 if (edge->type() == HeapGraphEdge::kShortcut) return false;
3174 // The edge is not retaining if it goes from system domain
3175 // (i.e. an object not reachable from window) to the user domain
3176 // (i.e. a reachable object).
3177 return edge->from()->user_reachable()
3178 || !edge->to()->user_reachable();
3182 void HeapSnapshotGenerator::FillPostorderIndexes(
3183 Vector<HeapEntry*>* entries) {
3184 snapshot_->ClearPaint();
3185 int current_entry = 0;
3186 List<HeapEntry*> nodes_to_visit;
3187 HeapEntry* root = snapshot_->root();
3188 nodes_to_visit.Add(root);
3189 snapshot_->root()->paint();
3190 while (!nodes_to_visit.is_empty()) {
3191 HeapEntry* entry = nodes_to_visit.last();
3192 Vector<HeapGraphEdge*> children = entry->children();
3193 bool has_new_edges = false;
3194 for (int i = 0; i < children.length(); ++i) {
3195 if (entry != root && !IsRetainingEdge(children[i])) continue;
3196 HeapEntry* child = children[i]->to();
3197 if (!child->painted()) {
3198 nodes_to_visit.Add(child);
3200 has_new_edges = true;
3203 if (!has_new_edges) {
3204 entry->set_postorder_index(current_entry);
3205 (*entries)[current_entry++] = entry;
3206 nodes_to_visit.RemoveLast();
3209 ASSERT_EQ(current_entry, entries->length());
3213 static int Intersect(int i1, int i2, const Vector<int>& dominators) {
3214 int finger1 = i1, finger2 = i2;
3215 while (finger1 != finger2) {
3216 while (finger1 < finger2) finger1 = dominators[finger1];
3217 while (finger2 < finger1) finger2 = dominators[finger2];
3223 // The algorithm is based on the article:
3224 // K. Cooper, T. Harvey and K. Kennedy "A Simple, Fast Dominance Algorithm"
3225 // Softw. Pract. Exper. 4 (2001), pp. 1-10.
3226 bool HeapSnapshotGenerator::BuildDominatorTree(
3227 const Vector<HeapEntry*>& entries,
3228 Vector<int>* dominators) {
3229 if (entries.length() == 0) return true;
3230 HeapEntry* root = snapshot_->root();
3231 const int entries_length = entries.length(), root_index = entries_length - 1;
3232 for (int i = 0; i < root_index; ++i) (*dominators)[i] = HeapEntry::kNoEntry;
3233 (*dominators)[root_index] = root_index;
3235 // The affected array is used to mark entries which dominators
3236 // have to be racalculated because of changes in their retainers.
3237 ScopedVector<bool> affected(entries_length);
3238 for (int i = 0; i < affected.length(); ++i) affected[i] = false;
3239 // Mark the root direct children as affected.
3240 Vector<HeapGraphEdge*> children = entries[root_index]->children();
3241 for (int i = 0; i < children.length(); ++i) {
3242 affected[children[i]->to()->postorder_index()] = true;
3245 bool changed = true;
3248 if (!ProgressReport(false)) return false;
3249 for (int i = root_index - 1; i >= 0; --i) {
3250 if (!affected[i]) continue;
3251 affected[i] = false;
3252 // If dominator of the entry has already been set to root,
3253 // then it can't propagate any further.
3254 if ((*dominators)[i] == root_index) continue;
3255 int new_idom_index = HeapEntry::kNoEntry;
3256 Vector<HeapGraphEdge*> rets = entries[i]->retainers();
3257 for (int j = 0; j < rets.length(); ++j) {
3258 if (rets[j]->from() != root && !IsRetainingEdge(rets[j])) continue;
3259 int ret_index = rets[j]->from()->postorder_index();
3260 if (dominators->at(ret_index) != HeapEntry::kNoEntry) {
3261 new_idom_index = new_idom_index == HeapEntry::kNoEntry
3263 : Intersect(ret_index, new_idom_index, *dominators);
3264 // If idom has already reached the root, it doesn't make sense
3265 // to check other retainers.
3266 if (new_idom_index == root_index) break;
3269 if (new_idom_index != HeapEntry::kNoEntry
3270 && dominators->at(i) != new_idom_index) {
3271 (*dominators)[i] = new_idom_index;
3273 Vector<HeapGraphEdge*> children = entries[i]->children();
3274 for (int j = 0; j < children.length(); ++j) {
3275 affected[children[j]->to()->postorder_index()] = true;
3284 bool HeapSnapshotGenerator::SetEntriesDominators() {
3285 MarkUserReachableObjects();
3286 // This array is used for maintaining postorder of nodes.
3287 ScopedVector<HeapEntry*> ordered_entries(snapshot_->entries().length());
3288 FillPostorderIndexes(&ordered_entries);
3289 ScopedVector<int> dominators(ordered_entries.length());
3290 if (!BuildDominatorTree(ordered_entries, &dominators)) return false;
3291 for (int i = 0; i < ordered_entries.length(); ++i) {
3292 ASSERT(dominators[i] != HeapEntry::kNoEntry);
3293 ordered_entries[i]->set_dominator(ordered_entries[dominators[i]]);
3299 bool HeapSnapshotGenerator::CalculateRetainedSizes() {
3300 // As for the dominators tree we only know parent nodes, not
3301 // children, to sum up total sizes we "bubble" node's self size
3302 // adding it to all of its parents.
3303 List<HeapEntry>& entries = snapshot_->entries();
3304 for (int i = 0; i < entries.length(); ++i) {
3305 HeapEntry* entry = &entries[i];
3306 entry->set_retained_size(entry->self_size());
3308 for (int i = 0; i < entries.length(); ++i) {
3309 int entry_size = entries[i].self_size();
3310 HeapEntry* current = &entries[i];
3311 for (HeapEntry* dominator = current->dominator();
3312 dominator != current;
3313 current = dominator, dominator = current->dominator()) {
3314 ASSERT(current->dominator() != NULL);
3315 dominator->add_retained_size(entry_size);
3322 template<int bytes> struct MaxDecimalDigitsIn;
3323 template<> struct MaxDecimalDigitsIn<4> {
3324 static const int kSigned = 11;
3325 static const int kUnsigned = 10;
3327 template<> struct MaxDecimalDigitsIn<8> {
3328 static const int kSigned = 20;
3329 static const int kUnsigned = 20;
3333 class OutputStreamWriter {
3335 explicit OutputStreamWriter(v8::OutputStream* stream)
3337 chunk_size_(stream->GetChunkSize()),
3338 chunk_(chunk_size_),
3341 ASSERT(chunk_size_ > 0);
3343 bool aborted() { return aborted_; }
3344 void AddCharacter(char c) {
3346 ASSERT(chunk_pos_ < chunk_size_);
3347 chunk_[chunk_pos_++] = c;
3350 void AddString(const char* s) {
3351 AddSubstring(s, StrLength(s));
3353 void AddSubstring(const char* s, int n) {
3355 ASSERT(static_cast<size_t>(n) <= strlen(s));
3356 const char* s_end = s + n;
3358 int s_chunk_size = Min(
3359 chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
3360 ASSERT(s_chunk_size > 0);
3361 memcpy(chunk_.start() + chunk_pos_, s, s_chunk_size);
3363 chunk_pos_ += s_chunk_size;
3367 void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
3369 if (aborted_) return;
3370 ASSERT(chunk_pos_ < chunk_size_);
3371 if (chunk_pos_ != 0) {
3374 stream_->EndOfStream();
3378 template<typename T>
3379 void AddNumberImpl(T n, const char* format) {
3380 // Buffer for the longest value plus trailing \0
3381 static const int kMaxNumberSize =
3382 MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1;
3383 if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
3384 int result = OS::SNPrintF(
3385 chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
3386 ASSERT(result != -1);
3387 chunk_pos_ += result;
3390 EmbeddedVector<char, kMaxNumberSize> buffer;
3391 int result = OS::SNPrintF(buffer, format, n);
3393 ASSERT(result != -1);
3394 AddString(buffer.start());
3397 void MaybeWriteChunk() {
3398 ASSERT(chunk_pos_ <= chunk_size_);
3399 if (chunk_pos_ == chunk_size_) {
3404 if (aborted_) return;
3405 if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
3406 v8::OutputStream::kAbort) aborted_ = true;
3410 v8::OutputStream* stream_;
3412 ScopedVector<char> chunk_;
3418 // type, name|index, to_node.
3419 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
3420 // type, name, id, self_size, retained_size, dominator, children_index.
3421 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 7;
3423 void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
3424 ASSERT(writer_ == NULL);
3425 writer_ = new OutputStreamWriter(stream);
3427 HeapSnapshot* original_snapshot = NULL;
3428 if (snapshot_->RawSnapshotSize() >=
3429 SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize) {
3430 // The snapshot is too big. Serialize a fake snapshot.
3431 original_snapshot = snapshot_;
3432 snapshot_ = CreateFakeSnapshot();
3440 if (original_snapshot != NULL) {
3442 snapshot_ = original_snapshot;
3447 HeapSnapshot* HeapSnapshotJSONSerializer::CreateFakeSnapshot() {
3448 HeapSnapshot* result = new HeapSnapshot(snapshot_->collection(),
3449 HeapSnapshot::kFull,
3452 result->AddRootEntry();
3453 const char* text = snapshot_->collection()->names()->GetFormatted(
3454 "The snapshot is too big. "
3455 "Maximum snapshot size is %" V8_PTR_PREFIX "u MB. "
3456 "Actual snapshot size is %" V8_PTR_PREFIX "u MB.",
3457 SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize / MB,
3458 (snapshot_->RawSnapshotSize() + MB - 1) / MB);
3459 HeapEntry* message = result->AddEntry(HeapEntry::kString, text, 0, 4);
3460 result->root()->SetIndexedReference(HeapGraphEdge::kElement, 1, message);
3461 result->FillChildrenAndRetainers();
3462 result->SetDominatorsToSelf();
3467 void HeapSnapshotJSONSerializer::SerializeImpl() {
3468 List<HeapEntry>& nodes = snapshot_->entries();
3469 ASSERT(0 == snapshot_->root()->index());
3470 writer_->AddCharacter('{');
3471 writer_->AddString("\"snapshot\":{");
3472 SerializeSnapshot();
3473 if (writer_->aborted()) return;
3474 writer_->AddString("},\n");
3475 writer_->AddString("\"nodes\":[");
3476 SerializeNodes(nodes);
3477 if (writer_->aborted()) return;
3478 writer_->AddString("],\n");
3479 writer_->AddString("\"edges\":[");
3480 SerializeEdges(nodes);
3481 if (writer_->aborted()) return;
3482 writer_->AddString("],\n");
3483 writer_->AddString("\"strings\":[");
3485 if (writer_->aborted()) return;
3486 writer_->AddCharacter(']');
3487 writer_->AddCharacter('}');
3488 writer_->Finalize();
3492 int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
3493 HashMap::Entry* cache_entry = strings_.Lookup(
3494 const_cast<char*>(s), ObjectHash(s), true);
3495 if (cache_entry->value == NULL) {
3496 cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
3498 return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
3502 static int utoa(unsigned value, const Vector<char>& buffer, int buffer_pos) {
3503 int number_of_digits = 0;
3509 buffer_pos += number_of_digits;
3510 int result = buffer_pos;
3512 int last_digit = value % 10;
3513 buffer[--buffer_pos] = '0' + last_digit;
3520 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
3522 // The buffer needs space for 3 ints, 3 commas and \0
3523 static const int kBufferSize =
3524 MaxDecimalDigitsIn<sizeof(int)>::kSigned * 3 + 3 + 1; // NOLINT
3525 EmbeddedVector<char, kBufferSize> buffer;
3526 int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
3527 || edge->type() == HeapGraphEdge::kHidden
3528 || edge->type() == HeapGraphEdge::kWeak
3529 ? edge->index() : GetStringId(edge->name());
3532 buffer[buffer_pos++] = ',';
3534 buffer_pos = utoa(edge->type(), buffer, buffer_pos);
3535 buffer[buffer_pos++] = ',';
3536 buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
3537 buffer[buffer_pos++] = ',';
3538 buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
3539 buffer[buffer_pos++] = '\0';
3540 writer_->AddString(buffer.start());
3544 void HeapSnapshotJSONSerializer::SerializeEdges(const List<HeapEntry>& nodes) {
3545 bool first_edge = true;
3546 for (int i = 0; i < nodes.length(); ++i) {
3547 HeapEntry* entry = &nodes[i];
3548 Vector<HeapGraphEdge*> children = entry->children();
3549 for (int j = 0; j < children.length(); ++j) {
3550 SerializeEdge(children[j], first_edge);
3552 if (writer_->aborted()) return;
3558 void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry,
3560 // The buffer needs space for 6 ints, 1 uint32_t, 7 commas, \n and \0
3561 static const int kBufferSize =
3562 6 * MaxDecimalDigitsIn<sizeof(int)>::kSigned // NOLINT
3563 + MaxDecimalDigitsIn<sizeof(uint32_t)>::kUnsigned // NOLINT
3565 EmbeddedVector<char, kBufferSize> buffer;
3567 if (entry_index(entry) != 0) {
3568 buffer[buffer_pos++] = ',';
3570 buffer_pos = utoa(entry->type(), buffer, buffer_pos);
3571 buffer[buffer_pos++] = ',';
3572 buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
3573 buffer[buffer_pos++] = ',';
3574 buffer_pos = utoa(entry->id(), buffer, buffer_pos);
3575 buffer[buffer_pos++] = ',';
3576 buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
3577 buffer[buffer_pos++] = ',';
3578 buffer_pos = utoa(entry->retained_size(), buffer, buffer_pos);
3579 buffer[buffer_pos++] = ',';
3580 buffer_pos = utoa(entry_index(entry->dominator()), buffer, buffer_pos);
3581 buffer[buffer_pos++] = ',';
3582 buffer_pos = utoa(edges_index, buffer, buffer_pos);
3583 buffer[buffer_pos++] = '\n';
3584 buffer[buffer_pos++] = '\0';
3585 writer_->AddString(buffer.start());
3589 void HeapSnapshotJSONSerializer::SerializeNodes(const List<HeapEntry>& nodes) {
3590 int edges_index = 0;
3591 for (int i = 0; i < nodes.length(); ++i) {
3592 HeapEntry* entry = &nodes[i];
3593 SerializeNode(entry, edges_index);
3594 edges_index += entry->children().length() * kEdgeFieldsCount;
3595 if (writer_->aborted()) return;
3600 void HeapSnapshotJSONSerializer::SerializeSnapshot() {
3601 writer_->AddString("\"title\":\"");
3602 writer_->AddString(snapshot_->title());
3603 writer_->AddString("\"");
3604 writer_->AddString(",\"uid\":");
3605 writer_->AddNumber(snapshot_->uid());
3606 writer_->AddString(",\"meta\":");
3607 // The object describing node serialization layout.
3608 // We use a set of macros to improve readability.
3609 #define JSON_A(s) "[" s "]"
3610 #define JSON_O(s) "{" s "}"
3611 #define JSON_S(s) "\"" s "\""
3612 writer_->AddString(JSON_O(
3613 JSON_S("node_fields") ":" JSON_A(
3617 JSON_S("self_size") ","
3618 JSON_S("retained_size") ","
3619 JSON_S("dominator") ","
3620 JSON_S("edges_index")) ","
3621 JSON_S("node_types") ":" JSON_A(
3623 JSON_S("hidden") ","
3625 JSON_S("string") ","
3626 JSON_S("object") ","
3628 JSON_S("closure") ","
3629 JSON_S("regexp") ","
3630 JSON_S("number") ","
3631 JSON_S("native") ","
3632 JSON_S("synthetic")) ","
3633 JSON_S("string") ","
3634 JSON_S("number") ","
3635 JSON_S("number") ","
3636 JSON_S("number") ","
3637 JSON_S("number") ","
3638 JSON_S("number")) ","
3639 JSON_S("edge_fields") ":" JSON_A(
3641 JSON_S("name_or_index") ","
3642 JSON_S("to_node")) ","
3643 JSON_S("edge_types") ":" JSON_A(
3645 JSON_S("context") ","
3646 JSON_S("element") ","
3647 JSON_S("property") ","
3648 JSON_S("internal") ","
3649 JSON_S("hidden") ","
3650 JSON_S("shortcut") ","
3652 JSON_S("string_or_number") ","
3657 writer_->AddString(",\"node_count\":");
3658 writer_->AddNumber(snapshot_->entries().length());
3659 writer_->AddString(",\"edge_count\":");
3660 writer_->AddNumber(snapshot_->edges().length());
3664 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
3665 static const char hex_chars[] = "0123456789ABCDEF";
3666 w->AddString("\\u");
3667 w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
3668 w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
3669 w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
3670 w->AddCharacter(hex_chars[u & 0xf]);
3673 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
3674 writer_->AddCharacter('\n');
3675 writer_->AddCharacter('\"');
3676 for ( ; *s != '\0'; ++s) {
3679 writer_->AddString("\\b");
3682 writer_->AddString("\\f");
3685 writer_->AddString("\\n");
3688 writer_->AddString("\\r");
3691 writer_->AddString("\\t");
3695 writer_->AddCharacter('\\');
3696 writer_->AddCharacter(*s);
3699 if (*s > 31 && *s < 128) {
3700 writer_->AddCharacter(*s);
3701 } else if (*s <= 31) {
3702 // Special character with no dedicated literal.
3703 WriteUChar(writer_, *s);
3705 // Convert UTF-8 into \u UTF-16 literal.
3706 unsigned length = 1, cursor = 0;
3707 for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
3708 unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
3709 if (c != unibrow::Utf8::kBadChar) {
3710 WriteUChar(writer_, c);
3711 ASSERT(cursor != 0);
3714 writer_->AddCharacter('?');
3719 writer_->AddCharacter('\"');
3723 void HeapSnapshotJSONSerializer::SerializeStrings() {
3724 List<HashMap::Entry*> sorted_strings;
3725 SortHashMap(&strings_, &sorted_strings);
3726 writer_->AddString("\"<dummy>\"");
3727 for (int i = 0; i < sorted_strings.length(); ++i) {
3728 writer_->AddCharacter(',');
3730 reinterpret_cast<const unsigned char*>(sorted_strings[i]->key));
3731 if (writer_->aborted()) return;
3736 template<typename T>
3737 inline static int SortUsingEntryValue(const T* x, const T* y) {
3738 uintptr_t x_uint = reinterpret_cast<uintptr_t>((*x)->value);
3739 uintptr_t y_uint = reinterpret_cast<uintptr_t>((*y)->value);
3740 if (x_uint > y_uint) {
3742 } else if (x_uint == y_uint) {
3750 void HeapSnapshotJSONSerializer::SortHashMap(
3751 HashMap* map, List<HashMap::Entry*>* sorted_entries) {
3752 for (HashMap::Entry* p = map->Start(); p != NULL; p = map->Next(p))
3753 sorted_entries->Add(p);
3754 sorted_entries->Sort(SortUsingEntryValue);
3757 } } // namespace v8::internal