[V8] Introduce a QML compilation mode
[profile/ivi/qtjsbackend.git] / src / 3rdparty / v8 / src / profile-generator.cc
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #include "profile-generator-inl.h"
31
32 #include "global-handles.h"
33 #include "heap-profiler.h"
34 #include "scopeinfo.h"
35 #include "unicode.h"
36 #include "zone-inl.h"
37 #include "debug.h"
38
39 namespace v8 {
40 namespace internal {
41
42
43 TokenEnumerator::TokenEnumerator()
44     : token_locations_(4),
45       token_removed_(4) {
46 }
47
48
49 TokenEnumerator::~TokenEnumerator() {
50   Isolate* isolate = Isolate::Current();
51   for (int i = 0; i < token_locations_.length(); ++i) {
52     if (!token_removed_[i]) {
53       isolate->global_handles()->ClearWeakness(token_locations_[i]);
54       isolate->global_handles()->Destroy(token_locations_[i]);
55     }
56   }
57 }
58
59
60 int TokenEnumerator::GetTokenId(Object* token) {
61   Isolate* isolate = Isolate::Current();
62   if (token == NULL) return TokenEnumerator::kNoSecurityToken;
63   for (int i = 0; i < token_locations_.length(); ++i) {
64     if (*token_locations_[i] == token && !token_removed_[i]) return i;
65   }
66   Handle<Object> handle = isolate->global_handles()->Create(token);
67   // handle.location() points to a memory cell holding a pointer
68   // to a token object in the V8's heap.
69   isolate->global_handles()->MakeWeak(handle.location(), this,
70                                       TokenRemovedCallback);
71   token_locations_.Add(handle.location());
72   token_removed_.Add(false);
73   return token_locations_.length() - 1;
74 }
75
76
77 void TokenEnumerator::TokenRemovedCallback(v8::Persistent<v8::Value> handle,
78                                            void* parameter) {
79   reinterpret_cast<TokenEnumerator*>(parameter)->TokenRemoved(
80       Utils::OpenHandle(*handle).location());
81   handle.Dispose();
82 }
83
84
85 void TokenEnumerator::TokenRemoved(Object** token_location) {
86   for (int i = 0; i < token_locations_.length(); ++i) {
87     if (token_locations_[i] == token_location && !token_removed_[i]) {
88       token_removed_[i] = true;
89       return;
90     }
91   }
92 }
93
94
95 StringsStorage::StringsStorage()
96     : names_(StringsMatch) {
97 }
98
99
100 StringsStorage::~StringsStorage() {
101   for (HashMap::Entry* p = names_.Start();
102        p != NULL;
103        p = names_.Next(p)) {
104     DeleteArray(reinterpret_cast<const char*>(p->value));
105   }
106 }
107
108
109 const char* StringsStorage::GetCopy(const char* src) {
110   int len = static_cast<int>(strlen(src));
111   Vector<char> dst = Vector<char>::New(len + 1);
112   OS::StrNCpy(dst, src, len);
113   dst[len] = '\0';
114   uint32_t hash =
115       HashSequentialString(dst.start(), len, HEAP->HashSeed());
116   return AddOrDisposeString(dst.start(), hash);
117 }
118
119
120 const char* StringsStorage::GetFormatted(const char* format, ...) {
121   va_list args;
122   va_start(args, format);
123   const char* result = GetVFormatted(format, args);
124   va_end(args);
125   return result;
126 }
127
128
129 const char* StringsStorage::AddOrDisposeString(char* str, uint32_t hash) {
130   HashMap::Entry* cache_entry = names_.Lookup(str, hash, true);
131   if (cache_entry->value == NULL) {
132     // New entry added.
133     cache_entry->value = str;
134   } else {
135     DeleteArray(str);
136   }
137   return reinterpret_cast<const char*>(cache_entry->value);
138 }
139
140
141 const char* StringsStorage::GetVFormatted(const char* format, va_list args) {
142   Vector<char> str = Vector<char>::New(1024);
143   int len = OS::VSNPrintF(str, format, args);
144   if (len == -1) {
145     DeleteArray(str.start());
146     return format;
147   }
148   uint32_t hash = HashSequentialString(
149       str.start(), len, HEAP->HashSeed());
150   return AddOrDisposeString(str.start(), hash);
151 }
152
153
154 const char* StringsStorage::GetName(String* name) {
155   if (name->IsString()) {
156     int length = Min(kMaxNameSize, name->length());
157     SmartArrayPointer<char> data =
158         name->ToCString(DISALLOW_NULLS, ROBUST_STRING_TRAVERSAL, 0, length);
159     uint32_t hash =
160         HashSequentialString(*data, length, name->GetHeap()->HashSeed());
161     return AddOrDisposeString(data.Detach(), hash);
162   }
163   return "";
164 }
165
166
167 const char* StringsStorage::GetName(int index) {
168   return GetFormatted("%d", index);
169 }
170
171
172 const char* const CodeEntry::kEmptyNamePrefix = "";
173
174
175 void CodeEntry::CopyData(const CodeEntry& source) {
176   tag_ = source.tag_;
177   name_prefix_ = source.name_prefix_;
178   name_ = source.name_;
179   resource_name_ = source.resource_name_;
180   line_number_ = source.line_number_;
181 }
182
183
184 uint32_t CodeEntry::GetCallUid() const {
185   uint32_t hash = ComputeIntegerHash(tag_, v8::internal::kZeroHashSeed);
186   if (shared_id_ != 0) {
187     hash ^= ComputeIntegerHash(static_cast<uint32_t>(shared_id_),
188                                v8::internal::kZeroHashSeed);
189   } else {
190     hash ^= ComputeIntegerHash(
191         static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_prefix_)),
192         v8::internal::kZeroHashSeed);
193     hash ^= ComputeIntegerHash(
194         static_cast<uint32_t>(reinterpret_cast<uintptr_t>(name_)),
195         v8::internal::kZeroHashSeed);
196     hash ^= ComputeIntegerHash(
197         static_cast<uint32_t>(reinterpret_cast<uintptr_t>(resource_name_)),
198         v8::internal::kZeroHashSeed);
199     hash ^= ComputeIntegerHash(line_number_, v8::internal::kZeroHashSeed);
200   }
201   return hash;
202 }
203
204
205 bool CodeEntry::IsSameAs(CodeEntry* entry) const {
206   return this == entry
207       || (tag_ == entry->tag_
208           && shared_id_ == entry->shared_id_
209           && (shared_id_ != 0
210               || (name_prefix_ == entry->name_prefix_
211                   && name_ == entry->name_
212                   && resource_name_ == entry->resource_name_
213                   && line_number_ == entry->line_number_)));
214 }
215
216
217 ProfileNode* ProfileNode::FindChild(CodeEntry* entry) {
218   HashMap::Entry* map_entry =
219       children_.Lookup(entry, CodeEntryHash(entry), false);
220   return map_entry != NULL ?
221       reinterpret_cast<ProfileNode*>(map_entry->value) : NULL;
222 }
223
224
225 ProfileNode* ProfileNode::FindOrAddChild(CodeEntry* entry) {
226   HashMap::Entry* map_entry =
227       children_.Lookup(entry, CodeEntryHash(entry), true);
228   if (map_entry->value == NULL) {
229     // New node added.
230     ProfileNode* new_node = new ProfileNode(tree_, entry);
231     map_entry->value = new_node;
232     children_list_.Add(new_node);
233   }
234   return reinterpret_cast<ProfileNode*>(map_entry->value);
235 }
236
237
238 double ProfileNode::GetSelfMillis() const {
239   return tree_->TicksToMillis(self_ticks_);
240 }
241
242
243 double ProfileNode::GetTotalMillis() const {
244   return tree_->TicksToMillis(total_ticks_);
245 }
246
247
248 void ProfileNode::Print(int indent) {
249   OS::Print("%5u %5u %*c %s%s [%d]",
250             total_ticks_, self_ticks_,
251             indent, ' ',
252             entry_->name_prefix(),
253             entry_->name(),
254             entry_->security_token_id());
255   if (entry_->resource_name()[0] != '\0')
256     OS::Print(" %s:%d", entry_->resource_name(), entry_->line_number());
257   OS::Print("\n");
258   for (HashMap::Entry* p = children_.Start();
259        p != NULL;
260        p = children_.Next(p)) {
261     reinterpret_cast<ProfileNode*>(p->value)->Print(indent + 2);
262   }
263 }
264
265
266 class DeleteNodesCallback {
267  public:
268   void BeforeTraversingChild(ProfileNode*, ProfileNode*) { }
269
270   void AfterAllChildrenTraversed(ProfileNode* node) {
271     delete node;
272   }
273
274   void AfterChildTraversed(ProfileNode*, ProfileNode*) { }
275 };
276
277
278 ProfileTree::ProfileTree()
279     : root_entry_(Logger::FUNCTION_TAG,
280                   "",
281                   "(root)",
282                   "",
283                   0,
284                   TokenEnumerator::kNoSecurityToken),
285       root_(new ProfileNode(this, &root_entry_)) {
286 }
287
288
289 ProfileTree::~ProfileTree() {
290   DeleteNodesCallback cb;
291   TraverseDepthFirst(&cb);
292 }
293
294
295 void ProfileTree::AddPathFromEnd(const Vector<CodeEntry*>& path) {
296   ProfileNode* node = root_;
297   for (CodeEntry** entry = path.start() + path.length() - 1;
298        entry != path.start() - 1;
299        --entry) {
300     if (*entry != NULL) {
301       node = node->FindOrAddChild(*entry);
302     }
303   }
304   node->IncrementSelfTicks();
305 }
306
307
308 void ProfileTree::AddPathFromStart(const Vector<CodeEntry*>& path) {
309   ProfileNode* node = root_;
310   for (CodeEntry** entry = path.start();
311        entry != path.start() + path.length();
312        ++entry) {
313     if (*entry != NULL) {
314       node = node->FindOrAddChild(*entry);
315     }
316   }
317   node->IncrementSelfTicks();
318 }
319
320
321 struct NodesPair {
322   NodesPair(ProfileNode* src, ProfileNode* dst)
323       : src(src), dst(dst) { }
324   ProfileNode* src;
325   ProfileNode* dst;
326 };
327
328
329 class FilteredCloneCallback {
330  public:
331   FilteredCloneCallback(ProfileNode* dst_root, int security_token_id)
332       : stack_(10),
333         security_token_id_(security_token_id) {
334     stack_.Add(NodesPair(NULL, dst_root));
335   }
336
337   void BeforeTraversingChild(ProfileNode* parent, ProfileNode* child) {
338     if (IsTokenAcceptable(child->entry()->security_token_id(),
339                           parent->entry()->security_token_id())) {
340       ProfileNode* clone = stack_.last().dst->FindOrAddChild(child->entry());
341       clone->IncreaseSelfTicks(child->self_ticks());
342       stack_.Add(NodesPair(child, clone));
343     } else {
344       // Attribute ticks to parent node.
345       stack_.last().dst->IncreaseSelfTicks(child->self_ticks());
346     }
347   }
348
349   void AfterAllChildrenTraversed(ProfileNode* parent) { }
350
351   void AfterChildTraversed(ProfileNode*, ProfileNode* child) {
352     if (stack_.last().src == child) {
353       stack_.RemoveLast();
354     }
355   }
356
357  private:
358   bool IsTokenAcceptable(int token, int parent_token) {
359     if (token == TokenEnumerator::kNoSecurityToken
360         || token == security_token_id_) return true;
361     if (token == TokenEnumerator::kInheritsSecurityToken) {
362       ASSERT(parent_token != TokenEnumerator::kInheritsSecurityToken);
363       return parent_token == TokenEnumerator::kNoSecurityToken
364           || parent_token == security_token_id_;
365     }
366     return false;
367   }
368
369   List<NodesPair> stack_;
370   int security_token_id_;
371 };
372
373 void ProfileTree::FilteredClone(ProfileTree* src, int security_token_id) {
374   ms_to_ticks_scale_ = src->ms_to_ticks_scale_;
375   FilteredCloneCallback cb(root_, security_token_id);
376   src->TraverseDepthFirst(&cb);
377   CalculateTotalTicks();
378 }
379
380
381 void ProfileTree::SetTickRatePerMs(double ticks_per_ms) {
382   ms_to_ticks_scale_ = ticks_per_ms > 0 ? 1.0 / ticks_per_ms : 1.0;
383 }
384
385
386 class Position {
387  public:
388   explicit Position(ProfileNode* node)
389       : node(node), child_idx_(0) { }
390   INLINE(ProfileNode* current_child()) {
391     return node->children()->at(child_idx_);
392   }
393   INLINE(bool has_current_child()) {
394     return child_idx_ < node->children()->length();
395   }
396   INLINE(void next_child()) { ++child_idx_; }
397
398   ProfileNode* node;
399  private:
400   int child_idx_;
401 };
402
403
404 // Non-recursive implementation of a depth-first post-order tree traversal.
405 template <typename Callback>
406 void ProfileTree::TraverseDepthFirst(Callback* callback) {
407   List<Position> stack(10);
408   stack.Add(Position(root_));
409   while (stack.length() > 0) {
410     Position& current = stack.last();
411     if (current.has_current_child()) {
412       callback->BeforeTraversingChild(current.node, current.current_child());
413       stack.Add(Position(current.current_child()));
414     } else {
415       callback->AfterAllChildrenTraversed(current.node);
416       if (stack.length() > 1) {
417         Position& parent = stack[stack.length() - 2];
418         callback->AfterChildTraversed(parent.node, current.node);
419         parent.next_child();
420       }
421       // Remove child from the stack.
422       stack.RemoveLast();
423     }
424   }
425 }
426
427
428 class CalculateTotalTicksCallback {
429  public:
430   void BeforeTraversingChild(ProfileNode*, ProfileNode*) { }
431
432   void AfterAllChildrenTraversed(ProfileNode* node) {
433     node->IncreaseTotalTicks(node->self_ticks());
434   }
435
436   void AfterChildTraversed(ProfileNode* parent, ProfileNode* child) {
437     parent->IncreaseTotalTicks(child->total_ticks());
438   }
439 };
440
441
442 void ProfileTree::CalculateTotalTicks() {
443   CalculateTotalTicksCallback cb;
444   TraverseDepthFirst(&cb);
445 }
446
447
448 void ProfileTree::ShortPrint() {
449   OS::Print("root: %u %u %.2fms %.2fms\n",
450             root_->total_ticks(), root_->self_ticks(),
451             root_->GetTotalMillis(), root_->GetSelfMillis());
452 }
453
454
455 void CpuProfile::AddPath(const Vector<CodeEntry*>& path) {
456   top_down_.AddPathFromEnd(path);
457   bottom_up_.AddPathFromStart(path);
458 }
459
460
461 void CpuProfile::CalculateTotalTicks() {
462   top_down_.CalculateTotalTicks();
463   bottom_up_.CalculateTotalTicks();
464 }
465
466
467 void CpuProfile::SetActualSamplingRate(double actual_sampling_rate) {
468   top_down_.SetTickRatePerMs(actual_sampling_rate);
469   bottom_up_.SetTickRatePerMs(actual_sampling_rate);
470 }
471
472
473 CpuProfile* CpuProfile::FilteredClone(int security_token_id) {
474   ASSERT(security_token_id != TokenEnumerator::kNoSecurityToken);
475   CpuProfile* clone = new CpuProfile(title_, uid_);
476   clone->top_down_.FilteredClone(&top_down_, security_token_id);
477   clone->bottom_up_.FilteredClone(&bottom_up_, security_token_id);
478   return clone;
479 }
480
481
482 void CpuProfile::ShortPrint() {
483   OS::Print("top down ");
484   top_down_.ShortPrint();
485   OS::Print("bottom up ");
486   bottom_up_.ShortPrint();
487 }
488
489
490 void CpuProfile::Print() {
491   OS::Print("[Top down]:\n");
492   top_down_.Print();
493   OS::Print("[Bottom up]:\n");
494   bottom_up_.Print();
495 }
496
497
498 CodeEntry* const CodeMap::kSharedFunctionCodeEntry = NULL;
499 const CodeMap::CodeTreeConfig::Key CodeMap::CodeTreeConfig::kNoKey = NULL;
500
501
502 void CodeMap::AddCode(Address addr, CodeEntry* entry, unsigned size) {
503   DeleteAllCoveredCode(addr, addr + size);
504   CodeTree::Locator locator;
505   tree_.Insert(addr, &locator);
506   locator.set_value(CodeEntryInfo(entry, size));
507 }
508
509
510 void CodeMap::DeleteAllCoveredCode(Address start, Address end) {
511   List<Address> to_delete;
512   Address addr = end - 1;
513   while (addr >= start) {
514     CodeTree::Locator locator;
515     if (!tree_.FindGreatestLessThan(addr, &locator)) break;
516     Address start2 = locator.key(), end2 = start2 + locator.value().size;
517     if (start2 < end && start < end2) to_delete.Add(start2);
518     addr = start2 - 1;
519   }
520   for (int i = 0; i < to_delete.length(); ++i) tree_.Remove(to_delete[i]);
521 }
522
523
524 CodeEntry* CodeMap::FindEntry(Address addr) {
525   CodeTree::Locator locator;
526   if (tree_.FindGreatestLessThan(addr, &locator)) {
527     // locator.key() <= addr. Need to check that addr is within entry.
528     const CodeEntryInfo& entry = locator.value();
529     if (addr < (locator.key() + entry.size))
530       return entry.entry;
531   }
532   return NULL;
533 }
534
535
536 int CodeMap::GetSharedId(Address addr) {
537   CodeTree::Locator locator;
538   // For shared function entries, 'size' field is used to store their IDs.
539   if (tree_.Find(addr, &locator)) {
540     const CodeEntryInfo& entry = locator.value();
541     ASSERT(entry.entry == kSharedFunctionCodeEntry);
542     return entry.size;
543   } else {
544     tree_.Insert(addr, &locator);
545     int id = next_shared_id_++;
546     locator.set_value(CodeEntryInfo(kSharedFunctionCodeEntry, id));
547     return id;
548   }
549 }
550
551
552 void CodeMap::MoveCode(Address from, Address to) {
553   if (from == to) return;
554   CodeTree::Locator locator;
555   if (!tree_.Find(from, &locator)) return;
556   CodeEntryInfo entry = locator.value();
557   tree_.Remove(from);
558   AddCode(to, entry.entry, entry.size);
559 }
560
561
562 void CodeMap::CodeTreePrinter::Call(
563     const Address& key, const CodeMap::CodeEntryInfo& value) {
564   OS::Print("%p %5d %s\n", key, value.size, value.entry->name());
565 }
566
567
568 void CodeMap::Print() {
569   CodeTreePrinter printer;
570   tree_.ForEach(&printer);
571 }
572
573
574 CpuProfilesCollection::CpuProfilesCollection()
575     : profiles_uids_(UidsMatch),
576       current_profiles_semaphore_(OS::CreateSemaphore(1)) {
577   // Create list of unabridged profiles.
578   profiles_by_token_.Add(new List<CpuProfile*>());
579 }
580
581
582 static void DeleteCodeEntry(CodeEntry** entry_ptr) {
583   delete *entry_ptr;
584 }
585
586 static void DeleteCpuProfile(CpuProfile** profile_ptr) {
587   delete *profile_ptr;
588 }
589
590 static void DeleteProfilesList(List<CpuProfile*>** list_ptr) {
591   if (*list_ptr != NULL) {
592     (*list_ptr)->Iterate(DeleteCpuProfile);
593     delete *list_ptr;
594   }
595 }
596
597 CpuProfilesCollection::~CpuProfilesCollection() {
598   delete current_profiles_semaphore_;
599   current_profiles_.Iterate(DeleteCpuProfile);
600   detached_profiles_.Iterate(DeleteCpuProfile);
601   profiles_by_token_.Iterate(DeleteProfilesList);
602   code_entries_.Iterate(DeleteCodeEntry);
603 }
604
605
606 bool CpuProfilesCollection::StartProfiling(const char* title, unsigned uid) {
607   ASSERT(uid > 0);
608   current_profiles_semaphore_->Wait();
609   if (current_profiles_.length() >= kMaxSimultaneousProfiles) {
610     current_profiles_semaphore_->Signal();
611     return false;
612   }
613   for (int i = 0; i < current_profiles_.length(); ++i) {
614     if (strcmp(current_profiles_[i]->title(), title) == 0) {
615       // Ignore attempts to start profile with the same title.
616       current_profiles_semaphore_->Signal();
617       return false;
618     }
619   }
620   current_profiles_.Add(new CpuProfile(title, uid));
621   current_profiles_semaphore_->Signal();
622   return true;
623 }
624
625
626 bool CpuProfilesCollection::StartProfiling(String* title, unsigned uid) {
627   return StartProfiling(GetName(title), uid);
628 }
629
630
631 CpuProfile* CpuProfilesCollection::StopProfiling(int security_token_id,
632                                                  const char* title,
633                                                  double actual_sampling_rate) {
634   const int title_len = StrLength(title);
635   CpuProfile* profile = NULL;
636   current_profiles_semaphore_->Wait();
637   for (int i = current_profiles_.length() - 1; i >= 0; --i) {
638     if (title_len == 0 || strcmp(current_profiles_[i]->title(), title) == 0) {
639       profile = current_profiles_.Remove(i);
640       break;
641     }
642   }
643   current_profiles_semaphore_->Signal();
644
645   if (profile != NULL) {
646     profile->CalculateTotalTicks();
647     profile->SetActualSamplingRate(actual_sampling_rate);
648     List<CpuProfile*>* unabridged_list =
649         profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
650     unabridged_list->Add(profile);
651     HashMap::Entry* entry =
652         profiles_uids_.Lookup(reinterpret_cast<void*>(profile->uid()),
653                               static_cast<uint32_t>(profile->uid()),
654                               true);
655     ASSERT(entry->value == NULL);
656     entry->value = reinterpret_cast<void*>(unabridged_list->length() - 1);
657     return GetProfile(security_token_id, profile->uid());
658   }
659   return NULL;
660 }
661
662
663 CpuProfile* CpuProfilesCollection::GetProfile(int security_token_id,
664                                               unsigned uid) {
665   int index = GetProfileIndex(uid);
666   if (index < 0) return NULL;
667   List<CpuProfile*>* unabridged_list =
668       profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
669   if (security_token_id == TokenEnumerator::kNoSecurityToken) {
670     return unabridged_list->at(index);
671   }
672   List<CpuProfile*>* list = GetProfilesList(security_token_id);
673   if (list->at(index) == NULL) {
674     (*list)[index] =
675         unabridged_list->at(index)->FilteredClone(security_token_id);
676   }
677   return list->at(index);
678 }
679
680
681 int CpuProfilesCollection::GetProfileIndex(unsigned uid) {
682   HashMap::Entry* entry = profiles_uids_.Lookup(reinterpret_cast<void*>(uid),
683                                                 static_cast<uint32_t>(uid),
684                                                 false);
685   return entry != NULL ?
686       static_cast<int>(reinterpret_cast<intptr_t>(entry->value)) : -1;
687 }
688
689
690 bool CpuProfilesCollection::IsLastProfile(const char* title) {
691   // Called from VM thread, and only it can mutate the list,
692   // so no locking is needed here.
693   if (current_profiles_.length() != 1) return false;
694   return StrLength(title) == 0
695       || strcmp(current_profiles_[0]->title(), title) == 0;
696 }
697
698
699 void CpuProfilesCollection::RemoveProfile(CpuProfile* profile) {
700   // Called from VM thread for a completed profile.
701   unsigned uid = profile->uid();
702   int index = GetProfileIndex(uid);
703   if (index < 0) {
704     detached_profiles_.RemoveElement(profile);
705     return;
706   }
707   profiles_uids_.Remove(reinterpret_cast<void*>(uid),
708                         static_cast<uint32_t>(uid));
709   // Decrement all indexes above the deleted one.
710   for (HashMap::Entry* p = profiles_uids_.Start();
711        p != NULL;
712        p = profiles_uids_.Next(p)) {
713     intptr_t p_index = reinterpret_cast<intptr_t>(p->value);
714     if (p_index > index) {
715       p->value = reinterpret_cast<void*>(p_index - 1);
716     }
717   }
718   for (int i = 0; i < profiles_by_token_.length(); ++i) {
719     List<CpuProfile*>* list = profiles_by_token_[i];
720     if (list != NULL && index < list->length()) {
721       // Move all filtered clones into detached_profiles_,
722       // so we can know that they are still in use.
723       CpuProfile* cloned_profile = list->Remove(index);
724       if (cloned_profile != NULL && cloned_profile != profile) {
725         detached_profiles_.Add(cloned_profile);
726       }
727     }
728   }
729 }
730
731
732 int CpuProfilesCollection::TokenToIndex(int security_token_id) {
733   ASSERT(TokenEnumerator::kNoSecurityToken == -1);
734   return security_token_id + 1;  // kNoSecurityToken -> 0, 0 -> 1, ...
735 }
736
737
738 List<CpuProfile*>* CpuProfilesCollection::GetProfilesList(
739     int security_token_id) {
740   const int index = TokenToIndex(security_token_id);
741   const int lists_to_add = index - profiles_by_token_.length() + 1;
742   if (lists_to_add > 0) profiles_by_token_.AddBlock(NULL, lists_to_add);
743   List<CpuProfile*>* unabridged_list =
744       profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
745   const int current_count = unabridged_list->length();
746   if (profiles_by_token_[index] == NULL) {
747     profiles_by_token_[index] = new List<CpuProfile*>(current_count);
748   }
749   List<CpuProfile*>* list = profiles_by_token_[index];
750   const int profiles_to_add = current_count - list->length();
751   if (profiles_to_add > 0) list->AddBlock(NULL, profiles_to_add);
752   return list;
753 }
754
755
756 List<CpuProfile*>* CpuProfilesCollection::Profiles(int security_token_id) {
757   List<CpuProfile*>* unabridged_list =
758       profiles_by_token_[TokenToIndex(TokenEnumerator::kNoSecurityToken)];
759   if (security_token_id == TokenEnumerator::kNoSecurityToken) {
760     return unabridged_list;
761   }
762   List<CpuProfile*>* list = GetProfilesList(security_token_id);
763   const int current_count = unabridged_list->length();
764   for (int i = 0; i < current_count; ++i) {
765     if (list->at(i) == NULL) {
766       (*list)[i] = unabridged_list->at(i)->FilteredClone(security_token_id);
767     }
768   }
769   return list;
770 }
771
772
773 CodeEntry* CpuProfilesCollection::NewCodeEntry(Logger::LogEventsAndTags tag,
774                                                String* name,
775                                                String* resource_name,
776                                                int line_number) {
777   CodeEntry* entry = new CodeEntry(tag,
778                                    CodeEntry::kEmptyNamePrefix,
779                                    GetFunctionName(name),
780                                    GetName(resource_name),
781                                    line_number,
782                                    TokenEnumerator::kNoSecurityToken);
783   code_entries_.Add(entry);
784   return entry;
785 }
786
787
788 CodeEntry* CpuProfilesCollection::NewCodeEntry(Logger::LogEventsAndTags tag,
789                                                const char* name) {
790   CodeEntry* entry = new CodeEntry(tag,
791                                    CodeEntry::kEmptyNamePrefix,
792                                    GetFunctionName(name),
793                                    "",
794                                    v8::CpuProfileNode::kNoLineNumberInfo,
795                                    TokenEnumerator::kNoSecurityToken);
796   code_entries_.Add(entry);
797   return entry;
798 }
799
800
801 CodeEntry* CpuProfilesCollection::NewCodeEntry(Logger::LogEventsAndTags tag,
802                                                const char* name_prefix,
803                                                String* name) {
804   CodeEntry* entry = new CodeEntry(tag,
805                                    name_prefix,
806                                    GetName(name),
807                                    "",
808                                    v8::CpuProfileNode::kNoLineNumberInfo,
809                                    TokenEnumerator::kInheritsSecurityToken);
810   code_entries_.Add(entry);
811   return entry;
812 }
813
814
815 CodeEntry* CpuProfilesCollection::NewCodeEntry(Logger::LogEventsAndTags tag,
816                                                int args_count) {
817   CodeEntry* entry = new CodeEntry(tag,
818                                    "args_count: ",
819                                    GetName(args_count),
820                                    "",
821                                    v8::CpuProfileNode::kNoLineNumberInfo,
822                                    TokenEnumerator::kInheritsSecurityToken);
823   code_entries_.Add(entry);
824   return entry;
825 }
826
827
828 void CpuProfilesCollection::AddPathToCurrentProfiles(
829     const Vector<CodeEntry*>& path) {
830   // As starting / stopping profiles is rare relatively to this
831   // method, we don't bother minimizing the duration of lock holding,
832   // e.g. copying contents of the list to a local vector.
833   current_profiles_semaphore_->Wait();
834   for (int i = 0; i < current_profiles_.length(); ++i) {
835     current_profiles_[i]->AddPath(path);
836   }
837   current_profiles_semaphore_->Signal();
838 }
839
840
841 void SampleRateCalculator::Tick() {
842   if (--wall_time_query_countdown_ == 0)
843     UpdateMeasurements(OS::TimeCurrentMillis());
844 }
845
846
847 void SampleRateCalculator::UpdateMeasurements(double current_time) {
848   if (measurements_count_++ != 0) {
849     const double measured_ticks_per_ms =
850         (kWallTimeQueryIntervalMs * ticks_per_ms_) /
851         (current_time - last_wall_time_);
852     // Update the average value.
853     ticks_per_ms_ +=
854         (measured_ticks_per_ms - ticks_per_ms_) / measurements_count_;
855     // Update the externally accessible result.
856     result_ = static_cast<AtomicWord>(ticks_per_ms_ * kResultScale);
857   }
858   last_wall_time_ = current_time;
859   wall_time_query_countdown_ =
860       static_cast<unsigned>(kWallTimeQueryIntervalMs * ticks_per_ms_);
861 }
862
863
864 const char* const ProfileGenerator::kAnonymousFunctionName =
865     "(anonymous function)";
866 const char* const ProfileGenerator::kProgramEntryName =
867     "(program)";
868 const char* const ProfileGenerator::kGarbageCollectorEntryName =
869     "(garbage collector)";
870
871
872 ProfileGenerator::ProfileGenerator(CpuProfilesCollection* profiles)
873     : profiles_(profiles),
874       program_entry_(
875           profiles->NewCodeEntry(Logger::FUNCTION_TAG, kProgramEntryName)),
876       gc_entry_(
877           profiles->NewCodeEntry(Logger::BUILTIN_TAG,
878                                  kGarbageCollectorEntryName)) {
879 }
880
881
882 void ProfileGenerator::RecordTickSample(const TickSample& sample) {
883   // Allocate space for stack frames + pc + function + vm-state.
884   ScopedVector<CodeEntry*> entries(sample.frames_count + 3);
885   // As actual number of decoded code entries may vary, initialize
886   // entries vector with NULL values.
887   CodeEntry** entry = entries.start();
888   memset(entry, 0, entries.length() * sizeof(*entry));
889   if (sample.pc != NULL) {
890     *entry++ = code_map_.FindEntry(sample.pc);
891
892     if (sample.has_external_callback) {
893       // Don't use PC when in external callback code, as it can point
894       // inside callback's code, and we will erroneously report
895       // that a callback calls itself.
896       *(entries.start()) = NULL;
897       *entry++ = code_map_.FindEntry(sample.external_callback);
898     } else if (sample.tos != NULL) {
899       // Find out, if top of stack was pointing inside a JS function
900       // meaning that we have encountered a frameless invocation.
901       *entry = code_map_.FindEntry(sample.tos);
902       if (*entry != NULL && !(*entry)->is_js_function()) {
903         *entry = NULL;
904       }
905       entry++;
906     }
907
908     for (const Address* stack_pos = sample.stack,
909            *stack_end = stack_pos + sample.frames_count;
910          stack_pos != stack_end;
911          ++stack_pos) {
912       *entry++ = code_map_.FindEntry(*stack_pos);
913     }
914   }
915
916   if (FLAG_prof_browser_mode) {
917     bool no_symbolized_entries = true;
918     for (CodeEntry** e = entries.start(); e != entry; ++e) {
919       if (*e != NULL) {
920         no_symbolized_entries = false;
921         break;
922       }
923     }
924     // If no frames were symbolized, put the VM state entry in.
925     if (no_symbolized_entries) {
926       *entry++ = EntryForVMState(sample.state);
927     }
928   }
929
930   profiles_->AddPathToCurrentProfiles(entries);
931 }
932
933
934 HeapGraphEdge::HeapGraphEdge(Type type, const char* name, int from, int to)
935     : type_(type),
936       from_index_(from),
937       to_index_(to),
938       name_(name) {
939   ASSERT(type == kContextVariable
940       || type == kProperty
941       || type == kInternal
942       || type == kShortcut);
943 }
944
945
946 HeapGraphEdge::HeapGraphEdge(Type type, int index, int from, int to)
947     : type_(type),
948       from_index_(from),
949       to_index_(to),
950       index_(index) {
951   ASSERT(type == kElement || type == kHidden || type == kWeak);
952 }
953
954
955 void HeapGraphEdge::ReplaceToIndexWithEntry(HeapSnapshot* snapshot) {
956   to_entry_ = &snapshot->entries()[to_index_];
957 }
958
959
960 const int HeapEntry::kNoEntry = -1;
961
962 HeapEntry::HeapEntry(HeapSnapshot* snapshot,
963                      Type type,
964                      const char* name,
965                      SnapshotObjectId id,
966                      int self_size)
967     : painted_(false),
968       user_reachable_(false),
969       dominator_(kNoEntry),
970       type_(type),
971       retainers_count_(0),
972       retainers_index_(-1),
973       children_count_(0),
974       children_index_(-1),
975       self_size_(self_size),
976       retained_size_(0),
977       id_(id),
978       snapshot_(snapshot),
979       name_(name) { }
980
981
982 void HeapEntry::SetNamedReference(HeapGraphEdge::Type type,
983                                   const char* name,
984                                   HeapEntry* entry) {
985   HeapGraphEdge edge(type, name, this->index(), entry->index());
986   snapshot_->edges().Add(edge);
987   ++children_count_;
988   ++entry->retainers_count_;
989 }
990
991
992 void HeapEntry::SetIndexedReference(HeapGraphEdge::Type type,
993                                     int index,
994                                     HeapEntry* entry) {
995   HeapGraphEdge edge(type, index, this->index(), entry->index());
996   snapshot_->edges().Add(edge);
997   ++children_count_;
998   ++entry->retainers_count_;
999 }
1000
1001
1002 Handle<HeapObject> HeapEntry::GetHeapObject() {
1003   return snapshot_->collection()->FindHeapObjectById(id());
1004 }
1005
1006
1007 void HeapEntry::Print(
1008     const char* prefix, const char* edge_name, int max_depth, int indent) {
1009   STATIC_CHECK(sizeof(unsigned) == sizeof(id()));
1010   OS::Print("%6d %7d @%6u %*c %s%s: ",
1011             self_size(), retained_size(), id(),
1012             indent, ' ', prefix, edge_name);
1013   if (type() != kString) {
1014     OS::Print("%s %.40s\n", TypeAsString(), name_);
1015   } else {
1016     OS::Print("\"");
1017     const char* c = name_;
1018     while (*c && (c - name_) <= 40) {
1019       if (*c != '\n')
1020         OS::Print("%c", *c);
1021       else
1022         OS::Print("\\n");
1023       ++c;
1024     }
1025     OS::Print("\"\n");
1026   }
1027   if (--max_depth == 0) return;
1028   Vector<HeapGraphEdge*> ch = children();
1029   for (int i = 0; i < ch.length(); ++i) {
1030     HeapGraphEdge& edge = *ch[i];
1031     const char* edge_prefix = "";
1032     EmbeddedVector<char, 64> index;
1033     const char* edge_name = index.start();
1034     switch (edge.type()) {
1035       case HeapGraphEdge::kContextVariable:
1036         edge_prefix = "#";
1037         edge_name = edge.name();
1038         break;
1039       case HeapGraphEdge::kElement:
1040         OS::SNPrintF(index, "%d", edge.index());
1041         break;
1042       case HeapGraphEdge::kInternal:
1043         edge_prefix = "$";
1044         edge_name = edge.name();
1045         break;
1046       case HeapGraphEdge::kProperty:
1047         edge_name = edge.name();
1048         break;
1049       case HeapGraphEdge::kHidden:
1050         edge_prefix = "$";
1051         OS::SNPrintF(index, "%d", edge.index());
1052         break;
1053       case HeapGraphEdge::kShortcut:
1054         edge_prefix = "^";
1055         edge_name = edge.name();
1056         break;
1057       case HeapGraphEdge::kWeak:
1058         edge_prefix = "w";
1059         OS::SNPrintF(index, "%d", edge.index());
1060         break;
1061       default:
1062         OS::SNPrintF(index, "!!! unknown edge type: %d ", edge.type());
1063     }
1064     edge.to()->Print(edge_prefix, edge_name, max_depth, indent + 2);
1065   }
1066 }
1067
1068
1069 const char* HeapEntry::TypeAsString() {
1070   switch (type()) {
1071     case kHidden: return "/hidden/";
1072     case kObject: return "/object/";
1073     case kClosure: return "/closure/";
1074     case kString: return "/string/";
1075     case kCode: return "/code/";
1076     case kArray: return "/array/";
1077     case kRegExp: return "/regexp/";
1078     case kHeapNumber: return "/number/";
1079     case kNative: return "/native/";
1080     case kSynthetic: return "/synthetic/";
1081     default: return "???";
1082   }
1083 }
1084
1085
1086 // It is very important to keep objects that form a heap snapshot
1087 // as small as possible.
1088 namespace {  // Avoid littering the global namespace.
1089
1090 template <size_t ptr_size> struct SnapshotSizeConstants;
1091
1092 template <> struct SnapshotSizeConstants<4> {
1093   static const int kExpectedHeapGraphEdgeSize = 12;
1094   static const int kExpectedHeapEntrySize = 40;
1095   static const size_t kMaxSerializableSnapshotRawSize = 256 * MB;
1096 };
1097
1098 template <> struct SnapshotSizeConstants<8> {
1099   static const int kExpectedHeapGraphEdgeSize = 24;
1100   static const int kExpectedHeapEntrySize = 48;
1101   static const uint64_t kMaxSerializableSnapshotRawSize =
1102       static_cast<uint64_t>(6000) * MB;
1103 };
1104
1105 }  // namespace
1106
1107 HeapSnapshot::HeapSnapshot(HeapSnapshotsCollection* collection,
1108                            HeapSnapshot::Type type,
1109                            const char* title,
1110                            unsigned uid)
1111     : collection_(collection),
1112       type_(type),
1113       title_(title),
1114       uid_(uid),
1115       root_index_(HeapEntry::kNoEntry),
1116       gc_roots_index_(HeapEntry::kNoEntry),
1117       natives_root_index_(HeapEntry::kNoEntry),
1118       max_snapshot_js_object_id_(0) {
1119   STATIC_CHECK(
1120       sizeof(HeapGraphEdge) ==
1121       SnapshotSizeConstants<kPointerSize>::kExpectedHeapGraphEdgeSize);
1122   STATIC_CHECK(
1123       sizeof(HeapEntry) ==
1124       SnapshotSizeConstants<kPointerSize>::kExpectedHeapEntrySize);
1125   for (int i = 0; i < VisitorSynchronization::kNumberOfSyncTags; ++i) {
1126     gc_subroot_indexes_[i] = HeapEntry::kNoEntry;
1127   }
1128 }
1129
1130
1131 void HeapSnapshot::Delete() {
1132   collection_->RemoveSnapshot(this);
1133   delete this;
1134 }
1135
1136
1137 void HeapSnapshot::RememberLastJSObjectId() {
1138   max_snapshot_js_object_id_ = collection_->last_assigned_id();
1139 }
1140
1141
1142 static void HeapEntryClearPaint(HeapEntry* entry_ptr) {
1143   entry_ptr->clear_paint();
1144 }
1145
1146
1147 void HeapSnapshot::ClearPaint() {
1148   entries_.Iterate(HeapEntryClearPaint);
1149 }
1150
1151
1152 HeapEntry* HeapSnapshot::AddRootEntry() {
1153   ASSERT(root_index_ == HeapEntry::kNoEntry);
1154   ASSERT(entries_.is_empty());  // Root entry must be the first one.
1155   HeapEntry* entry = AddEntry(HeapEntry::kObject,
1156                               "",
1157                               HeapObjectsMap::kInternalRootObjectId,
1158                               0);
1159   root_index_ = entry->index();
1160   ASSERT(root_index_ == 0);
1161   return entry;
1162 }
1163
1164
1165 HeapEntry* HeapSnapshot::AddGcRootsEntry() {
1166   ASSERT(gc_roots_index_ == HeapEntry::kNoEntry);
1167   HeapEntry* entry = AddEntry(HeapEntry::kObject,
1168                               "(GC roots)",
1169                               HeapObjectsMap::kGcRootsObjectId,
1170                               0);
1171   gc_roots_index_ = entry->index();
1172   return entry;
1173 }
1174
1175
1176 HeapEntry* HeapSnapshot::AddGcSubrootEntry(int tag) {
1177   ASSERT(gc_subroot_indexes_[tag] == HeapEntry::kNoEntry);
1178   ASSERT(0 <= tag && tag < VisitorSynchronization::kNumberOfSyncTags);
1179   HeapEntry* entry = AddEntry(
1180       HeapEntry::kObject,
1181       VisitorSynchronization::kTagNames[tag],
1182       HeapObjectsMap::GetNthGcSubrootId(tag),
1183       0);
1184   gc_subroot_indexes_[tag] = entry->index();
1185   return entry;
1186 }
1187
1188
1189 HeapEntry* HeapSnapshot::AddEntry(HeapEntry::Type type,
1190                                   const char* name,
1191                                   SnapshotObjectId id,
1192                                   int size) {
1193   HeapEntry entry(this, type, name, id, size);
1194   entries_.Add(entry);
1195   return &entries_.last();
1196 }
1197
1198
1199 void HeapSnapshot::FillChildrenAndRetainers() {
1200   ASSERT(children().is_empty());
1201   children().Allocate(edges().length());
1202   ASSERT(retainers().is_empty());
1203   retainers().Allocate(edges().length());
1204   int children_index = 0;
1205   int retainers_index = 0;
1206   for (int i = 0; i < entries().length(); ++i) {
1207     HeapEntry* entry = &entries()[i];
1208     children_index = entry->set_children_index(children_index);
1209     retainers_index = entry->set_retainers_index(retainers_index);
1210   }
1211   ASSERT(edges().length() == children_index);
1212   ASSERT(edges().length() == retainers_index);
1213   for (int i = 0; i < edges().length(); ++i) {
1214     HeapGraphEdge* edge = &edges()[i];
1215     edge->ReplaceToIndexWithEntry(this);
1216     edge->from()->add_child(edge);
1217     edge->to()->add_retainer(edge);
1218   }
1219 }
1220
1221
1222 void HeapSnapshot::SetDominatorsToSelf() {
1223   for (int i = 0; i < entries_.length(); ++i) {
1224     entries_[i].set_dominator(&entries_[i]);
1225   }
1226 }
1227
1228
1229 class FindEntryById {
1230  public:
1231   explicit FindEntryById(SnapshotObjectId id) : id_(id) { }
1232   int operator()(HeapEntry* const* entry) {
1233     if ((*entry)->id() == id_) return 0;
1234     return (*entry)->id() < id_ ? -1 : 1;
1235   }
1236  private:
1237   SnapshotObjectId id_;
1238 };
1239
1240
1241 HeapEntry* HeapSnapshot::GetEntryById(SnapshotObjectId id) {
1242   List<HeapEntry*>* entries_by_id = GetSortedEntriesList();
1243   // Perform a binary search by id.
1244   int index = SortedListBSearch(*entries_by_id, FindEntryById(id));
1245   if (index == -1)
1246     return NULL;
1247   return entries_by_id->at(index);
1248 }
1249
1250
1251 template<class T>
1252 static int SortByIds(const T* entry1_ptr,
1253                      const T* entry2_ptr) {
1254   if ((*entry1_ptr)->id() == (*entry2_ptr)->id()) return 0;
1255   return (*entry1_ptr)->id() < (*entry2_ptr)->id() ? -1 : 1;
1256 }
1257
1258
1259 List<HeapEntry*>* HeapSnapshot::GetSortedEntriesList() {
1260   if (sorted_entries_.is_empty()) {
1261     sorted_entries_.Allocate(entries_.length());
1262     for (int i = 0; i < entries_.length(); ++i) {
1263       sorted_entries_[i] = &entries_[i];
1264     }
1265     sorted_entries_.Sort(SortByIds);
1266   }
1267   return &sorted_entries_;
1268 }
1269
1270
1271 void HeapSnapshot::Print(int max_depth) {
1272   root()->Print("", "", max_depth, 0);
1273 }
1274
1275
1276 template<typename T, class P>
1277 static size_t GetMemoryUsedByList(const List<T, P>& list) {
1278   return list.capacity() * sizeof(T);
1279 }
1280
1281
1282 size_t HeapSnapshot::RawSnapshotSize() const {
1283   return
1284       GetMemoryUsedByList(entries_) +
1285       GetMemoryUsedByList(edges_) +
1286       GetMemoryUsedByList(children_) +
1287       GetMemoryUsedByList(retainers_) +
1288       GetMemoryUsedByList(sorted_entries_);
1289 }
1290
1291
1292 // We split IDs on evens for embedder objects (see
1293 // HeapObjectsMap::GenerateId) and odds for native objects.
1294 const SnapshotObjectId HeapObjectsMap::kInternalRootObjectId = 1;
1295 const SnapshotObjectId HeapObjectsMap::kGcRootsObjectId =
1296     HeapObjectsMap::kInternalRootObjectId + HeapObjectsMap::kObjectIdStep;
1297 const SnapshotObjectId HeapObjectsMap::kGcRootsFirstSubrootId =
1298     HeapObjectsMap::kGcRootsObjectId + HeapObjectsMap::kObjectIdStep;
1299 const SnapshotObjectId HeapObjectsMap::kFirstAvailableObjectId =
1300     HeapObjectsMap::kGcRootsFirstSubrootId +
1301     VisitorSynchronization::kNumberOfSyncTags * HeapObjectsMap::kObjectIdStep;
1302
1303 HeapObjectsMap::HeapObjectsMap()
1304     : next_id_(kFirstAvailableObjectId),
1305       entries_map_(AddressesMatch) {
1306   // This dummy element solves a problem with entries_map_.
1307   // When we do lookup in HashMap we see no difference between two cases:
1308   // it has an entry with NULL as the value or it has created
1309   // a new entry on the fly with NULL as the default value.
1310   // With such dummy element we have a guaranty that all entries_map_ entries
1311   // will have the value field grater than 0.
1312   // This fact is using in MoveObject method.
1313   entries_.Add(EntryInfo(0, NULL, 0));
1314 }
1315
1316
1317 void HeapObjectsMap::SnapshotGenerationFinished() {
1318   RemoveDeadEntries();
1319 }
1320
1321
1322 void HeapObjectsMap::MoveObject(Address from, Address to) {
1323   ASSERT(to != NULL);
1324   ASSERT(from != NULL);
1325   if (from == to) return;
1326   void* from_value = entries_map_.Remove(from, AddressHash(from));
1327   if (from_value == NULL) return;
1328   int from_entry_info_index =
1329       static_cast<int>(reinterpret_cast<intptr_t>(from_value));
1330   entries_.at(from_entry_info_index).addr = to;
1331   HashMap::Entry* to_entry = entries_map_.Lookup(to, AddressHash(to), true);
1332   if (to_entry->value != NULL) {
1333     int to_entry_info_index =
1334         static_cast<int>(reinterpret_cast<intptr_t>(to_entry->value));
1335     // Without this operation we will have two EntryInfo's with the same
1336     // value in addr field. It is bad because later at RemoveDeadEntries
1337     // one of this entry will be removed with the corresponding entries_map_
1338     // entry.
1339     entries_.at(to_entry_info_index).addr = NULL;
1340   }
1341   to_entry->value = reinterpret_cast<void*>(from_entry_info_index);
1342 }
1343
1344
1345 SnapshotObjectId HeapObjectsMap::FindEntry(Address addr) {
1346   HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), false);
1347   if (entry == NULL) return 0;
1348   int entry_index = static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
1349   EntryInfo& entry_info = entries_.at(entry_index);
1350   ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
1351   return entry_info.id;
1352 }
1353
1354
1355 SnapshotObjectId HeapObjectsMap::FindOrAddEntry(Address addr,
1356                                                 unsigned int size) {
1357   ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
1358   HashMap::Entry* entry = entries_map_.Lookup(addr, AddressHash(addr), true);
1359   if (entry->value != NULL) {
1360     int entry_index =
1361         static_cast<int>(reinterpret_cast<intptr_t>(entry->value));
1362     EntryInfo& entry_info = entries_.at(entry_index);
1363     entry_info.accessed = true;
1364     entry_info.size = size;
1365     return entry_info.id;
1366   }
1367   entry->value = reinterpret_cast<void*>(entries_.length());
1368   SnapshotObjectId id = next_id_;
1369   next_id_ += kObjectIdStep;
1370   entries_.Add(EntryInfo(id, addr, size));
1371   ASSERT(static_cast<uint32_t>(entries_.length()) > entries_map_.occupancy());
1372   return id;
1373 }
1374
1375
1376 void HeapObjectsMap::StopHeapObjectsTracking() {
1377   time_intervals_.Clear();
1378 }
1379
1380 void HeapObjectsMap::UpdateHeapObjectsMap() {
1381   HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
1382                           "HeapSnapshotsCollection::UpdateHeapObjectsMap");
1383   HeapIterator iterator;
1384   for (HeapObject* obj = iterator.next();
1385        obj != NULL;
1386        obj = iterator.next()) {
1387     FindOrAddEntry(obj->address(), obj->Size());
1388   }
1389   RemoveDeadEntries();
1390 }
1391
1392
1393 void HeapObjectsMap::PushHeapObjectsStats(OutputStream* stream) {
1394   UpdateHeapObjectsMap();
1395   time_intervals_.Add(TimeInterval(next_id_));
1396   int prefered_chunk_size = stream->GetChunkSize();
1397   List<v8::HeapStatsUpdate> stats_buffer;
1398   ASSERT(!entries_.is_empty());
1399   EntryInfo* entry_info = &entries_.first();
1400   EntryInfo* end_entry_info = &entries_.last() + 1;
1401   for (int time_interval_index = 0;
1402        time_interval_index < time_intervals_.length();
1403        ++time_interval_index) {
1404     TimeInterval& time_interval = time_intervals_[time_interval_index];
1405     SnapshotObjectId time_interval_id = time_interval.id;
1406     uint32_t entries_size = 0;
1407     EntryInfo* start_entry_info = entry_info;
1408     while (entry_info < end_entry_info && entry_info->id < time_interval_id) {
1409       entries_size += entry_info->size;
1410       ++entry_info;
1411     }
1412     uint32_t entries_count =
1413         static_cast<uint32_t>(entry_info - start_entry_info);
1414     if (time_interval.count != entries_count ||
1415         time_interval.size != entries_size) {
1416       stats_buffer.Add(v8::HeapStatsUpdate(
1417           time_interval_index,
1418           time_interval.count = entries_count,
1419           time_interval.size = entries_size));
1420       if (stats_buffer.length() >= prefered_chunk_size) {
1421         OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
1422             &stats_buffer.first(), stats_buffer.length());
1423         if (result == OutputStream::kAbort) return;
1424         stats_buffer.Clear();
1425       }
1426     }
1427   }
1428   ASSERT(entry_info == end_entry_info);
1429   if (!stats_buffer.is_empty()) {
1430     OutputStream::WriteResult result = stream->WriteHeapStatsChunk(
1431         &stats_buffer.first(), stats_buffer.length());
1432     if (result == OutputStream::kAbort) return;
1433   }
1434   stream->EndOfStream();
1435 }
1436
1437
1438 void HeapObjectsMap::RemoveDeadEntries() {
1439   ASSERT(entries_.length() > 0 &&
1440          entries_.at(0).id == 0 &&
1441          entries_.at(0).addr == NULL);
1442   int first_free_entry = 1;
1443   for (int i = 1; i < entries_.length(); ++i) {
1444     EntryInfo& entry_info = entries_.at(i);
1445     if (entry_info.accessed) {
1446       if (first_free_entry != i) {
1447         entries_.at(first_free_entry) = entry_info;
1448       }
1449       entries_.at(first_free_entry).accessed = false;
1450       HashMap::Entry* entry = entries_map_.Lookup(
1451           entry_info.addr, AddressHash(entry_info.addr), false);
1452       ASSERT(entry);
1453       entry->value = reinterpret_cast<void*>(first_free_entry);
1454       ++first_free_entry;
1455     } else {
1456       if (entry_info.addr) {
1457         entries_map_.Remove(entry_info.addr, AddressHash(entry_info.addr));
1458       }
1459     }
1460   }
1461   entries_.Rewind(first_free_entry);
1462   ASSERT(static_cast<uint32_t>(entries_.length()) - 1 ==
1463          entries_map_.occupancy());
1464 }
1465
1466
1467 SnapshotObjectId HeapObjectsMap::GenerateId(v8::RetainedObjectInfo* info) {
1468   SnapshotObjectId id = static_cast<SnapshotObjectId>(info->GetHash());
1469   const char* label = info->GetLabel();
1470   id ^= HashSequentialString(label,
1471                              static_cast<int>(strlen(label)),
1472                              HEAP->HashSeed());
1473   intptr_t element_count = info->GetElementCount();
1474   if (element_count != -1)
1475     id ^= ComputeIntegerHash(static_cast<uint32_t>(element_count),
1476                              v8::internal::kZeroHashSeed);
1477   return id << 1;
1478 }
1479
1480
1481 HeapSnapshotsCollection::HeapSnapshotsCollection()
1482     : is_tracking_objects_(false),
1483       snapshots_uids_(HeapSnapshotsMatch),
1484       token_enumerator_(new TokenEnumerator()) {
1485 }
1486
1487
1488 static void DeleteHeapSnapshot(HeapSnapshot** snapshot_ptr) {
1489   delete *snapshot_ptr;
1490 }
1491
1492
1493 HeapSnapshotsCollection::~HeapSnapshotsCollection() {
1494   delete token_enumerator_;
1495   snapshots_.Iterate(DeleteHeapSnapshot);
1496 }
1497
1498
1499 HeapSnapshot* HeapSnapshotsCollection::NewSnapshot(HeapSnapshot::Type type,
1500                                                    const char* name,
1501                                                    unsigned uid) {
1502   is_tracking_objects_ = true;  // Start watching for heap objects moves.
1503   return new HeapSnapshot(this, type, name, uid);
1504 }
1505
1506
1507 void HeapSnapshotsCollection::SnapshotGenerationFinished(
1508     HeapSnapshot* snapshot) {
1509   ids_.SnapshotGenerationFinished();
1510   if (snapshot != NULL) {
1511     snapshots_.Add(snapshot);
1512     HashMap::Entry* entry =
1513         snapshots_uids_.Lookup(reinterpret_cast<void*>(snapshot->uid()),
1514                                static_cast<uint32_t>(snapshot->uid()),
1515                                true);
1516     ASSERT(entry->value == NULL);
1517     entry->value = snapshot;
1518   }
1519 }
1520
1521
1522 HeapSnapshot* HeapSnapshotsCollection::GetSnapshot(unsigned uid) {
1523   HashMap::Entry* entry = snapshots_uids_.Lookup(reinterpret_cast<void*>(uid),
1524                                                  static_cast<uint32_t>(uid),
1525                                                  false);
1526   return entry != NULL ? reinterpret_cast<HeapSnapshot*>(entry->value) : NULL;
1527 }
1528
1529
1530 void HeapSnapshotsCollection::RemoveSnapshot(HeapSnapshot* snapshot) {
1531   snapshots_.RemoveElement(snapshot);
1532   unsigned uid = snapshot->uid();
1533   snapshots_uids_.Remove(reinterpret_cast<void*>(uid),
1534                          static_cast<uint32_t>(uid));
1535 }
1536
1537
1538 Handle<HeapObject> HeapSnapshotsCollection::FindHeapObjectById(
1539     SnapshotObjectId id) {
1540   // First perform a full GC in order to avoid dead objects.
1541   HEAP->CollectAllGarbage(Heap::kMakeHeapIterableMask,
1542                           "HeapSnapshotsCollection::FindHeapObjectById");
1543   AssertNoAllocation no_allocation;
1544   HeapObject* object = NULL;
1545   HeapIterator iterator(HeapIterator::kFilterUnreachable);
1546   // Make sure that object with the given id is still reachable.
1547   for (HeapObject* obj = iterator.next();
1548        obj != NULL;
1549        obj = iterator.next()) {
1550     if (ids_.FindEntry(obj->address()) == id) {
1551       ASSERT(object == NULL);
1552       object = obj;
1553       // Can't break -- kFilterUnreachable requires full heap traversal.
1554     }
1555   }
1556   return object != NULL ? Handle<HeapObject>(object) : Handle<HeapObject>();
1557 }
1558
1559
1560 HeapEntriesMap::HeapEntriesMap()
1561     : entries_(HeapThingsMatch) {
1562 }
1563
1564
1565 int HeapEntriesMap::Map(HeapThing thing) {
1566   HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), false);
1567   if (cache_entry == NULL) return HeapEntry::kNoEntry;
1568   return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
1569 }
1570
1571
1572 void HeapEntriesMap::Pair(HeapThing thing, int entry) {
1573   HashMap::Entry* cache_entry = entries_.Lookup(thing, Hash(thing), true);
1574   ASSERT(cache_entry->value == NULL);
1575   cache_entry->value = reinterpret_cast<void*>(static_cast<intptr_t>(entry));
1576 }
1577
1578
1579 HeapObjectsSet::HeapObjectsSet()
1580     : entries_(HeapEntriesMap::HeapThingsMatch) {
1581 }
1582
1583
1584 void HeapObjectsSet::Clear() {
1585   entries_.Clear();
1586 }
1587
1588
1589 bool HeapObjectsSet::Contains(Object* obj) {
1590   if (!obj->IsHeapObject()) return false;
1591   HeapObject* object = HeapObject::cast(obj);
1592   return entries_.Lookup(object, HeapEntriesMap::Hash(object), false) != NULL;
1593 }
1594
1595
1596 void HeapObjectsSet::Insert(Object* obj) {
1597   if (!obj->IsHeapObject()) return;
1598   HeapObject* object = HeapObject::cast(obj);
1599   entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
1600 }
1601
1602
1603 const char* HeapObjectsSet::GetTag(Object* obj) {
1604   HeapObject* object = HeapObject::cast(obj);
1605   HashMap::Entry* cache_entry =
1606       entries_.Lookup(object, HeapEntriesMap::Hash(object), false);
1607   return cache_entry != NULL
1608       ? reinterpret_cast<const char*>(cache_entry->value)
1609       : NULL;
1610 }
1611
1612
1613 void HeapObjectsSet::SetTag(Object* obj, const char* tag) {
1614   if (!obj->IsHeapObject()) return;
1615   HeapObject* object = HeapObject::cast(obj);
1616   HashMap::Entry* cache_entry =
1617       entries_.Lookup(object, HeapEntriesMap::Hash(object), true);
1618   cache_entry->value = const_cast<char*>(tag);
1619 }
1620
1621
1622 HeapObject* const V8HeapExplorer::kInternalRootObject =
1623     reinterpret_cast<HeapObject*>(
1624         static_cast<intptr_t>(HeapObjectsMap::kInternalRootObjectId));
1625 HeapObject* const V8HeapExplorer::kGcRootsObject =
1626     reinterpret_cast<HeapObject*>(
1627         static_cast<intptr_t>(HeapObjectsMap::kGcRootsObjectId));
1628 HeapObject* const V8HeapExplorer::kFirstGcSubrootObject =
1629     reinterpret_cast<HeapObject*>(
1630         static_cast<intptr_t>(HeapObjectsMap::kGcRootsFirstSubrootId));
1631 HeapObject* const V8HeapExplorer::kLastGcSubrootObject =
1632     reinterpret_cast<HeapObject*>(
1633         static_cast<intptr_t>(HeapObjectsMap::kFirstAvailableObjectId));
1634
1635
1636 V8HeapExplorer::V8HeapExplorer(
1637     HeapSnapshot* snapshot,
1638     SnapshottingProgressReportingInterface* progress)
1639     : heap_(Isolate::Current()->heap()),
1640       snapshot_(snapshot),
1641       collection_(snapshot_->collection()),
1642       progress_(progress),
1643       filler_(NULL) {
1644 }
1645
1646
1647 V8HeapExplorer::~V8HeapExplorer() {
1648 }
1649
1650
1651 HeapEntry* V8HeapExplorer::AllocateEntry(HeapThing ptr) {
1652   return AddEntry(reinterpret_cast<HeapObject*>(ptr));
1653 }
1654
1655
1656 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object) {
1657   if (object == kInternalRootObject) {
1658     snapshot_->AddRootEntry();
1659     return snapshot_->root();
1660   } else if (object == kGcRootsObject) {
1661     HeapEntry* entry = snapshot_->AddGcRootsEntry();
1662     return entry;
1663   } else if (object >= kFirstGcSubrootObject && object < kLastGcSubrootObject) {
1664     HeapEntry* entry = snapshot_->AddGcSubrootEntry(GetGcSubrootOrder(object));
1665     return entry;
1666   } else if (object->IsJSFunction()) {
1667     JSFunction* func = JSFunction::cast(object);
1668     SharedFunctionInfo* shared = func->shared();
1669     const char* name = shared->bound() ? "native_bind" :
1670         collection_->names()->GetName(String::cast(shared->name()));
1671     return AddEntry(object, HeapEntry::kClosure, name);
1672   } else if (object->IsJSRegExp()) {
1673     JSRegExp* re = JSRegExp::cast(object);
1674     return AddEntry(object,
1675                     HeapEntry::kRegExp,
1676                     collection_->names()->GetName(re->Pattern()));
1677   } else if (object->IsJSObject()) {
1678     const char* name = collection_->names()->GetName(
1679         GetConstructorName(JSObject::cast(object)));
1680     if (object->IsJSGlobalObject()) {
1681       const char* tag = objects_tags_.GetTag(object);
1682       if (tag != NULL) {
1683         name = collection_->names()->GetFormatted("%s / %s", name, tag);
1684       }
1685     }
1686     return AddEntry(object, HeapEntry::kObject, name);
1687   } else if (object->IsString()) {
1688     return AddEntry(object,
1689                     HeapEntry::kString,
1690                     collection_->names()->GetName(String::cast(object)));
1691   } else if (object->IsCode()) {
1692     return AddEntry(object, HeapEntry::kCode, "");
1693   } else if (object->IsSharedFunctionInfo()) {
1694     String* name = String::cast(SharedFunctionInfo::cast(object)->name());
1695     return AddEntry(object,
1696                     HeapEntry::kCode,
1697                     collection_->names()->GetName(name));
1698   } else if (object->IsScript()) {
1699     Object* name = Script::cast(object)->name();
1700     return AddEntry(object,
1701                     HeapEntry::kCode,
1702                     name->IsString()
1703                         ? collection_->names()->GetName(String::cast(name))
1704                         : "");
1705   } else if (object->IsGlobalContext()) {
1706     return AddEntry(object, HeapEntry::kHidden, "system / GlobalContext");
1707   } else if (object->IsContext()) {
1708     return AddEntry(object, HeapEntry::kHidden, "system / Context");
1709   } else if (object->IsFixedArray() ||
1710              object->IsFixedDoubleArray() ||
1711              object->IsByteArray() ||
1712              object->IsExternalArray()) {
1713     return AddEntry(object, HeapEntry::kArray, "");
1714   } else if (object->IsHeapNumber()) {
1715     return AddEntry(object, HeapEntry::kHeapNumber, "number");
1716   }
1717   return AddEntry(object, HeapEntry::kHidden, GetSystemEntryName(object));
1718 }
1719
1720
1721 HeapEntry* V8HeapExplorer::AddEntry(HeapObject* object,
1722                                     HeapEntry::Type type,
1723                                     const char* name) {
1724   int object_size = object->Size();
1725   SnapshotObjectId object_id =
1726     collection_->GetObjectId(object->address(), object_size);
1727   return snapshot_->AddEntry(type, name, object_id, object_size);
1728 }
1729
1730
1731 class GcSubrootsEnumerator : public ObjectVisitor {
1732  public:
1733   GcSubrootsEnumerator(
1734       SnapshotFillerInterface* filler, V8HeapExplorer* explorer)
1735       : filler_(filler),
1736         explorer_(explorer),
1737         previous_object_count_(0),
1738         object_count_(0) {
1739   }
1740   void VisitPointers(Object** start, Object** end) {
1741     object_count_ += end - start;
1742   }
1743   void Synchronize(VisitorSynchronization::SyncTag tag) {
1744     // Skip empty subroots.
1745     if (previous_object_count_ != object_count_) {
1746       previous_object_count_ = object_count_;
1747       filler_->AddEntry(V8HeapExplorer::GetNthGcSubrootObject(tag), explorer_);
1748     }
1749   }
1750  private:
1751   SnapshotFillerInterface* filler_;
1752   V8HeapExplorer* explorer_;
1753   intptr_t previous_object_count_;
1754   intptr_t object_count_;
1755 };
1756
1757
1758 void V8HeapExplorer::AddRootEntries(SnapshotFillerInterface* filler) {
1759   filler->AddEntry(kInternalRootObject, this);
1760   filler->AddEntry(kGcRootsObject, this);
1761   GcSubrootsEnumerator enumerator(filler, this);
1762   heap_->IterateRoots(&enumerator, VISIT_ALL);
1763 }
1764
1765
1766 const char* V8HeapExplorer::GetSystemEntryName(HeapObject* object) {
1767   switch (object->map()->instance_type()) {
1768     case MAP_TYPE: return "system / Map";
1769     case JS_GLOBAL_PROPERTY_CELL_TYPE: return "system / JSGlobalPropertyCell";
1770     case FOREIGN_TYPE: return "system / Foreign";
1771     case ODDBALL_TYPE: return "system / Oddball";
1772 #define MAKE_STRUCT_CASE(NAME, Name, name) \
1773     case NAME##_TYPE: return "system / "#Name;
1774   STRUCT_LIST(MAKE_STRUCT_CASE)
1775 #undef MAKE_STRUCT_CASE
1776     default: return "system";
1777   }
1778 }
1779
1780
1781 int V8HeapExplorer::EstimateObjectsCount(HeapIterator* iterator) {
1782   int objects_count = 0;
1783   for (HeapObject* obj = iterator->next();
1784        obj != NULL;
1785        obj = iterator->next()) {
1786     objects_count++;
1787   }
1788   return objects_count;
1789 }
1790
1791
1792 class IndexedReferencesExtractor : public ObjectVisitor {
1793  public:
1794   IndexedReferencesExtractor(V8HeapExplorer* generator,
1795                              HeapObject* parent_obj,
1796                              int parent)
1797       : generator_(generator),
1798         parent_obj_(parent_obj),
1799         parent_(parent),
1800         next_index_(1) {
1801   }
1802   void VisitPointers(Object** start, Object** end) {
1803     for (Object** p = start; p < end; p++) {
1804       if (CheckVisitedAndUnmark(p)) continue;
1805       generator_->SetHiddenReference(parent_obj_, parent_, next_index_++, *p);
1806     }
1807   }
1808   static void MarkVisitedField(HeapObject* obj, int offset) {
1809     if (offset < 0) return;
1810     Address field = obj->address() + offset;
1811     ASSERT(!Memory::Object_at(field)->IsFailure());
1812     ASSERT(Memory::Object_at(field)->IsHeapObject());
1813     *field |= kFailureTag;
1814   }
1815
1816  private:
1817   bool CheckVisitedAndUnmark(Object** field) {
1818     if ((*field)->IsFailure()) {
1819       intptr_t untagged = reinterpret_cast<intptr_t>(*field) & ~kFailureTagMask;
1820       *field = reinterpret_cast<Object*>(untagged | kHeapObjectTag);
1821       ASSERT((*field)->IsHeapObject());
1822       return true;
1823     }
1824     return false;
1825   }
1826   V8HeapExplorer* generator_;
1827   HeapObject* parent_obj_;
1828   int parent_;
1829   int next_index_;
1830 };
1831
1832
1833 void V8HeapExplorer::ExtractReferences(HeapObject* obj) {
1834   HeapEntry* heap_entry = GetEntry(obj);
1835   if (heap_entry == NULL) return;  // No interest in this object.
1836   int entry = heap_entry->index();
1837
1838   bool extract_indexed_refs = true;
1839   if (obj->IsJSGlobalProxy()) {
1840     ExtractJSGlobalProxyReferences(JSGlobalProxy::cast(obj));
1841   } else if (obj->IsJSObject()) {
1842     ExtractJSObjectReferences(entry, JSObject::cast(obj));
1843   } else if (obj->IsString()) {
1844     ExtractStringReferences(entry, String::cast(obj));
1845     extract_indexed_refs = false;
1846   } else if (obj->IsContext()) {
1847     ExtractContextReferences(entry, Context::cast(obj));
1848   } else if (obj->IsMap()) {
1849     ExtractMapReferences(entry, Map::cast(obj));
1850   } else if (obj->IsSharedFunctionInfo()) {
1851     ExtractSharedFunctionInfoReferences(entry, SharedFunctionInfo::cast(obj));
1852   } else if (obj->IsScript()) {
1853     ExtractScriptReferences(entry, Script::cast(obj));
1854   } else if (obj->IsCodeCache()) {
1855     ExtractCodeCacheReferences(entry, CodeCache::cast(obj));
1856   } else if (obj->IsCode()) {
1857     ExtractCodeReferences(entry, Code::cast(obj));
1858   } else if (obj->IsJSGlobalPropertyCell()) {
1859     ExtractJSGlobalPropertyCellReferences(
1860         entry, JSGlobalPropertyCell::cast(obj));
1861     extract_indexed_refs = false;
1862   }
1863   if (extract_indexed_refs) {
1864     SetInternalReference(obj, entry, "map", obj->map(), HeapObject::kMapOffset);
1865     IndexedReferencesExtractor refs_extractor(this, obj, entry);
1866     obj->Iterate(&refs_extractor);
1867   }
1868 }
1869
1870
1871 void V8HeapExplorer::ExtractJSGlobalProxyReferences(JSGlobalProxy* proxy) {
1872   // We need to reference JS global objects from snapshot's root.
1873   // We use JSGlobalProxy because this is what embedder (e.g. browser)
1874   // uses for the global object.
1875   Object* object = proxy->map()->prototype();
1876   bool is_debug_object = false;
1877 #ifdef ENABLE_DEBUGGER_SUPPORT
1878   is_debug_object = object->IsGlobalObject() &&
1879       Isolate::Current()->debug()->IsDebugGlobal(GlobalObject::cast(object));
1880 #endif
1881   if (!is_debug_object) {
1882     SetUserGlobalReference(object);
1883   }
1884 }
1885
1886
1887 void V8HeapExplorer::ExtractJSObjectReferences(
1888     int entry, JSObject* js_obj) {
1889   HeapObject* obj = js_obj;
1890   ExtractClosureReferences(js_obj, entry);
1891   ExtractPropertyReferences(js_obj, entry);
1892   ExtractElementReferences(js_obj, entry);
1893   ExtractInternalReferences(js_obj, entry);
1894   SetPropertyReference(
1895       obj, entry, heap_->Proto_symbol(), js_obj->GetPrototype());
1896   if (obj->IsJSFunction()) {
1897     JSFunction* js_fun = JSFunction::cast(js_obj);
1898     Object* proto_or_map = js_fun->prototype_or_initial_map();
1899     if (!proto_or_map->IsTheHole()) {
1900       if (!proto_or_map->IsMap()) {
1901         SetPropertyReference(
1902             obj, entry,
1903             heap_->prototype_symbol(), proto_or_map,
1904             NULL,
1905             JSFunction::kPrototypeOrInitialMapOffset);
1906       } else {
1907         SetPropertyReference(
1908             obj, entry,
1909             heap_->prototype_symbol(), js_fun->prototype());
1910       }
1911     }
1912     SharedFunctionInfo* shared_info = js_fun->shared();
1913     // JSFunction has either bindings or literals and never both.
1914     bool bound = shared_info->bound();
1915     TagObject(js_fun->literals_or_bindings(),
1916               bound ? "(function bindings)" : "(function literals)");
1917     SetInternalReference(js_fun, entry,
1918                          bound ? "bindings" : "literals",
1919                          js_fun->literals_or_bindings(),
1920                          JSFunction::kLiteralsOffset);
1921     TagObject(shared_info, "(shared function info)");
1922     SetInternalReference(js_fun, entry,
1923                          "shared", shared_info,
1924                          JSFunction::kSharedFunctionInfoOffset);
1925     TagObject(js_fun->unchecked_context(), "(context)");
1926     SetInternalReference(js_fun, entry,
1927                          "context", js_fun->unchecked_context(),
1928                          JSFunction::kContextOffset);
1929     for (int i = JSFunction::kNonWeakFieldsEndOffset;
1930          i < JSFunction::kSize;
1931          i += kPointerSize) {
1932       SetWeakReference(js_fun, entry, i, *HeapObject::RawField(js_fun, i), i);
1933     }
1934   } else if (obj->IsGlobalObject()) {
1935     GlobalObject* global_obj = GlobalObject::cast(obj);
1936     SetInternalReference(global_obj, entry,
1937                          "builtins", global_obj->builtins(),
1938                          GlobalObject::kBuiltinsOffset);
1939     SetInternalReference(global_obj, entry,
1940                          "global_context", global_obj->global_context(),
1941                          GlobalObject::kGlobalContextOffset);
1942     SetInternalReference(global_obj, entry,
1943                          "global_receiver", global_obj->global_receiver(),
1944                          GlobalObject::kGlobalReceiverOffset);
1945   }
1946   TagObject(js_obj->properties(), "(object properties)");
1947   SetInternalReference(obj, entry,
1948                        "properties", js_obj->properties(),
1949                        JSObject::kPropertiesOffset);
1950   TagObject(js_obj->elements(), "(object elements)");
1951   SetInternalReference(obj, entry,
1952                        "elements", js_obj->elements(),
1953                        JSObject::kElementsOffset);
1954 }
1955
1956
1957 void V8HeapExplorer::ExtractStringReferences(int entry, String* string) {
1958   if (string->IsConsString()) {
1959     ConsString* cs = ConsString::cast(string);
1960     SetInternalReference(cs, entry, "first", cs->first());
1961     SetInternalReference(cs, entry, "second", cs->second());
1962   } else if (string->IsSlicedString()) {
1963     SlicedString* ss = SlicedString::cast(string);
1964     SetInternalReference(ss, entry, "parent", ss->parent());
1965   }
1966 }
1967
1968
1969 void V8HeapExplorer::ExtractContextReferences(int entry, Context* context) {
1970 #define EXTRACT_CONTEXT_FIELD(index, type, name) \
1971   SetInternalReference(context, entry, #name, context->get(Context::index), \
1972       FixedArray::OffsetOfElementAt(Context::index));
1973   EXTRACT_CONTEXT_FIELD(CLOSURE_INDEX, JSFunction, closure);
1974   EXTRACT_CONTEXT_FIELD(PREVIOUS_INDEX, Context, previous);
1975   EXTRACT_CONTEXT_FIELD(EXTENSION_INDEX, Object, extension);
1976   EXTRACT_CONTEXT_FIELD(GLOBAL_INDEX, GlobalObject, global);
1977   if (context->IsGlobalContext()) {
1978     TagObject(context->jsfunction_result_caches(),
1979               "(context func. result caches)");
1980     TagObject(context->normalized_map_cache(), "(context norm. map cache)");
1981     TagObject(context->runtime_context(), "(runtime context)");
1982     TagObject(context->data(), "(context data)");
1983     GLOBAL_CONTEXT_FIELDS(EXTRACT_CONTEXT_FIELD);
1984 #undef EXTRACT_CONTEXT_FIELD
1985     for (int i = Context::FIRST_WEAK_SLOT;
1986          i < Context::GLOBAL_CONTEXT_SLOTS;
1987          ++i) {
1988       SetWeakReference(context, entry, i, context->get(i),
1989           FixedArray::OffsetOfElementAt(i));
1990     }
1991   }
1992 }
1993
1994
1995 void V8HeapExplorer::ExtractMapReferences(int entry, Map* map) {
1996   SetInternalReference(map, entry,
1997                        "prototype", map->prototype(), Map::kPrototypeOffset);
1998   SetInternalReference(map, entry,
1999                        "constructor", map->constructor(),
2000                        Map::kConstructorOffset);
2001   if (!map->instance_descriptors()->IsEmpty()) {
2002     TagObject(map->instance_descriptors(), "(map descriptors)");
2003     SetInternalReference(map, entry,
2004                          "descriptors", map->instance_descriptors(),
2005                          Map::kInstanceDescriptorsOrBitField3Offset);
2006   }
2007   if (map->unchecked_prototype_transitions()->IsFixedArray()) {
2008     TagObject(map->prototype_transitions(), "(prototype transitions)");
2009     SetInternalReference(map, entry,
2010                          "prototype_transitions", map->prototype_transitions(),
2011                          Map::kPrototypeTransitionsOrBackPointerOffset);
2012   } else {
2013     SetInternalReference(map, entry,
2014                          "back_pointer", map->GetBackPointer(),
2015                          Map::kPrototypeTransitionsOrBackPointerOffset);
2016   }
2017   SetInternalReference(map, entry,
2018                        "code_cache", map->code_cache(),
2019                        Map::kCodeCacheOffset);
2020 }
2021
2022
2023 void V8HeapExplorer::ExtractSharedFunctionInfoReferences(
2024     int entry, SharedFunctionInfo* shared) {
2025   HeapObject* obj = shared;
2026   SetInternalReference(obj, entry,
2027                        "name", shared->name(),
2028                        SharedFunctionInfo::kNameOffset);
2029   TagObject(shared->code(), "(code)");
2030   SetInternalReference(obj, entry,
2031                        "code", shared->code(),
2032                        SharedFunctionInfo::kCodeOffset);
2033   TagObject(shared->scope_info(), "(function scope info)");
2034   SetInternalReference(obj, entry,
2035                        "scope_info", shared->scope_info(),
2036                        SharedFunctionInfo::kScopeInfoOffset);
2037   SetInternalReference(obj, entry,
2038                        "instance_class_name", shared->instance_class_name(),
2039                        SharedFunctionInfo::kInstanceClassNameOffset);
2040   SetInternalReference(obj, entry,
2041                        "script", shared->script(),
2042                        SharedFunctionInfo::kScriptOffset);
2043   TagObject(shared->construct_stub(), "(code)");
2044   SetInternalReference(obj, entry,
2045                        "construct_stub", shared->construct_stub(),
2046                        SharedFunctionInfo::kConstructStubOffset);
2047   SetInternalReference(obj, entry,
2048                        "function_data", shared->function_data(),
2049                        SharedFunctionInfo::kFunctionDataOffset);
2050   SetInternalReference(obj, entry,
2051                        "debug_info", shared->debug_info(),
2052                        SharedFunctionInfo::kDebugInfoOffset);
2053   SetInternalReference(obj, entry,
2054                        "inferred_name", shared->inferred_name(),
2055                        SharedFunctionInfo::kInferredNameOffset);
2056   SetInternalReference(obj, entry,
2057                        "this_property_assignments",
2058                        shared->this_property_assignments(),
2059                        SharedFunctionInfo::kThisPropertyAssignmentsOffset);
2060   SetWeakReference(obj, entry,
2061                    1, shared->initial_map(),
2062                    SharedFunctionInfo::kInitialMapOffset);
2063 }
2064
2065
2066 void V8HeapExplorer::ExtractScriptReferences(int entry, Script* script) {
2067   HeapObject* obj = script;
2068   SetInternalReference(obj, entry,
2069                        "source", script->source(),
2070                        Script::kSourceOffset);
2071   SetInternalReference(obj, entry,
2072                        "name", script->name(),
2073                        Script::kNameOffset);
2074   SetInternalReference(obj, entry,
2075                        "data", script->data(),
2076                        Script::kDataOffset);
2077   SetInternalReference(obj, entry,
2078                        "context_data", script->context_data(),
2079                        Script::kContextOffset);
2080   TagObject(script->line_ends(), "(script line ends)");
2081   SetInternalReference(obj, entry,
2082                        "line_ends", script->line_ends(),
2083                        Script::kLineEndsOffset);
2084 }
2085
2086
2087 void V8HeapExplorer::ExtractCodeCacheReferences(
2088     int entry, CodeCache* code_cache) {
2089   TagObject(code_cache->default_cache(), "(default code cache)");
2090   SetInternalReference(code_cache, entry,
2091                        "default_cache", code_cache->default_cache(),
2092                        CodeCache::kDefaultCacheOffset);
2093   TagObject(code_cache->normal_type_cache(), "(code type cache)");
2094   SetInternalReference(code_cache, entry,
2095                        "type_cache", code_cache->normal_type_cache(),
2096                        CodeCache::kNormalTypeCacheOffset);
2097 }
2098
2099
2100 void V8HeapExplorer::ExtractCodeReferences(int entry, Code* code) {
2101   TagObject(code->relocation_info(), "(code relocation info)");
2102   SetInternalReference(code, entry,
2103                        "relocation_info", code->relocation_info(),
2104                        Code::kRelocationInfoOffset);
2105   SetInternalReference(code, entry,
2106                        "handler_table", code->handler_table(),
2107                        Code::kHandlerTableOffset);
2108   TagObject(code->deoptimization_data(), "(code deopt data)");
2109   SetInternalReference(code, entry,
2110                        "deoptimization_data", code->deoptimization_data(),
2111                        Code::kDeoptimizationDataOffset);
2112   SetInternalReference(code, entry,
2113                        "type_feedback_info", code->type_feedback_info(),
2114                        Code::kTypeFeedbackInfoOffset);
2115   SetInternalReference(code, entry,
2116                        "gc_metadata", code->gc_metadata(),
2117                        Code::kGCMetadataOffset);
2118 }
2119
2120
2121 void V8HeapExplorer::ExtractJSGlobalPropertyCellReferences(
2122     int entry, JSGlobalPropertyCell* cell) {
2123   SetInternalReference(cell, entry, "value", cell->value());
2124 }
2125
2126
2127 void V8HeapExplorer::ExtractClosureReferences(JSObject* js_obj, int entry) {
2128   if (!js_obj->IsJSFunction()) return;
2129
2130   JSFunction* func = JSFunction::cast(js_obj);
2131   if (func->shared()->bound()) {
2132     FixedArray* bindings = func->function_bindings();
2133     SetNativeBindReference(js_obj, entry, "bound_this",
2134                            bindings->get(JSFunction::kBoundThisIndex));
2135     SetNativeBindReference(js_obj, entry, "bound_function",
2136                            bindings->get(JSFunction::kBoundFunctionIndex));
2137     for (int i = JSFunction::kBoundArgumentsStartIndex;
2138          i < bindings->length(); i++) {
2139       const char* reference_name = collection_->names()->GetFormatted(
2140           "bound_argument_%d",
2141           i - JSFunction::kBoundArgumentsStartIndex);
2142       SetNativeBindReference(js_obj, entry, reference_name,
2143                              bindings->get(i));
2144     }
2145   } else {
2146     Context* context = func->context()->declaration_context();
2147     ScopeInfo* scope_info = context->closure()->shared()->scope_info();
2148     // Add context allocated locals.
2149     int context_locals = scope_info->ContextLocalCount();
2150     for (int i = 0; i < context_locals; ++i) {
2151       String* local_name = scope_info->ContextLocalName(i);
2152       int idx = Context::MIN_CONTEXT_SLOTS + i;
2153       SetClosureReference(js_obj, entry, local_name, context->get(idx));
2154     }
2155
2156     // Add function variable.
2157     if (scope_info->HasFunctionName()) {
2158       String* name = scope_info->FunctionName();
2159       VariableMode mode;
2160       int idx = scope_info->FunctionContextSlotIndex(name, &mode);
2161       if (idx >= 0) {
2162         SetClosureReference(js_obj, entry, name, context->get(idx));
2163       }
2164     }
2165   }
2166 }
2167
2168
2169 void V8HeapExplorer::ExtractPropertyReferences(JSObject* js_obj, int entry) {
2170   if (js_obj->HasFastProperties()) {
2171     DescriptorArray* descs = js_obj->map()->instance_descriptors();
2172     for (int i = 0; i < descs->number_of_descriptors(); i++) {
2173       switch (descs->GetType(i)) {
2174         case FIELD: {
2175           int index = descs->GetFieldIndex(i);
2176           if (index < js_obj->map()->inobject_properties()) {
2177             SetPropertyReference(
2178                 js_obj, entry,
2179                 descs->GetKey(i), js_obj->InObjectPropertyAt(index),
2180                 NULL,
2181                 js_obj->GetInObjectPropertyOffset(index));
2182           } else {
2183             SetPropertyReference(
2184                 js_obj, entry,
2185                 descs->GetKey(i), js_obj->FastPropertyAt(index));
2186           }
2187           break;
2188         }
2189         case CONSTANT_FUNCTION:
2190           SetPropertyReference(
2191               js_obj, entry,
2192               descs->GetKey(i), descs->GetConstantFunction(i));
2193           break;
2194         case CALLBACKS: {
2195           Object* callback_obj = descs->GetValue(i);
2196           if (callback_obj->IsAccessorPair()) {
2197             AccessorPair* accessors = AccessorPair::cast(callback_obj);
2198             if (Object* getter = accessors->getter()) {
2199               SetPropertyReference(js_obj, entry, descs->GetKey(i),
2200                                    getter, "get-%s");
2201             }
2202             if (Object* setter = accessors->setter()) {
2203               SetPropertyReference(js_obj, entry, descs->GetKey(i),
2204                                    setter, "set-%s");
2205             }
2206           }
2207           break;
2208         }
2209         case NORMAL:  // only in slow mode
2210         case HANDLER:  // only in lookup results, not in descriptors
2211         case INTERCEPTOR:  // only in lookup results, not in descriptors
2212         case MAP_TRANSITION:  // we do not care about transitions here...
2213         case ELEMENTS_TRANSITION:
2214         case CONSTANT_TRANSITION:
2215         case NULL_DESCRIPTOR:  // ... and not about "holes"
2216           break;
2217       }
2218     }
2219   } else {
2220     StringDictionary* dictionary = js_obj->property_dictionary();
2221     int length = dictionary->Capacity();
2222     for (int i = 0; i < length; ++i) {
2223       Object* k = dictionary->KeyAt(i);
2224       if (dictionary->IsKey(k)) {
2225         Object* target = dictionary->ValueAt(i);
2226         // We assume that global objects can only have slow properties.
2227         Object* value = target->IsJSGlobalPropertyCell()
2228             ? JSGlobalPropertyCell::cast(target)->value()
2229             : target;
2230         if (String::cast(k)->length() > 0) {
2231           SetPropertyReference(js_obj, entry, String::cast(k), value);
2232         } else {
2233           TagObject(value, "(hidden properties)");
2234           SetInternalReference(js_obj, entry, "hidden_properties", value);
2235         }
2236       }
2237     }
2238   }
2239 }
2240
2241
2242 void V8HeapExplorer::ExtractElementReferences(JSObject* js_obj, int entry) {
2243   if (js_obj->HasFastElements()) {
2244     FixedArray* elements = FixedArray::cast(js_obj->elements());
2245     int length = js_obj->IsJSArray() ?
2246         Smi::cast(JSArray::cast(js_obj)->length())->value() :
2247         elements->length();
2248     for (int i = 0; i < length; ++i) {
2249       if (!elements->get(i)->IsTheHole()) {
2250         SetElementReference(js_obj, entry, i, elements->get(i));
2251       }
2252     }
2253   } else if (js_obj->HasDictionaryElements()) {
2254     SeededNumberDictionary* dictionary = js_obj->element_dictionary();
2255     int length = dictionary->Capacity();
2256     for (int i = 0; i < length; ++i) {
2257       Object* k = dictionary->KeyAt(i);
2258       if (dictionary->IsKey(k)) {
2259         ASSERT(k->IsNumber());
2260         uint32_t index = static_cast<uint32_t>(k->Number());
2261         SetElementReference(js_obj, entry, index, dictionary->ValueAt(i));
2262       }
2263     }
2264   }
2265 }
2266
2267
2268 void V8HeapExplorer::ExtractInternalReferences(JSObject* js_obj, int entry) {
2269   int length = js_obj->GetInternalFieldCount();
2270   for (int i = 0; i < length; ++i) {
2271     Object* o = js_obj->GetInternalField(i);
2272     SetInternalReference(
2273         js_obj, entry, i, o, js_obj->GetInternalFieldOffset(i));
2274   }
2275 }
2276
2277
2278 String* V8HeapExplorer::GetConstructorName(JSObject* object) {
2279   Heap* heap = object->GetHeap();
2280   if (object->IsJSFunction()) return heap->closure_symbol();
2281   String* constructor_name = object->constructor_name();
2282   if (constructor_name == heap->Object_symbol()) {
2283     // Look up an immediate "constructor" property, if it is a function,
2284     // return its name. This is for instances of binding objects, which
2285     // have prototype constructor type "Object".
2286     Object* constructor_prop = NULL;
2287     LookupResult result(heap->isolate());
2288     object->LocalLookupRealNamedProperty(heap->constructor_symbol(), &result);
2289     if (result.IsProperty()) {
2290       constructor_prop = result.GetLazyValue();
2291     }
2292     if (constructor_prop->IsJSFunction()) {
2293       Object* maybe_name = JSFunction::cast(constructor_prop)->shared()->name();
2294       if (maybe_name->IsString()) {
2295         String* name = String::cast(maybe_name);
2296         if (name->length() > 0) return name;
2297       }
2298     }
2299   }
2300   return object->constructor_name();
2301 }
2302
2303
2304 HeapEntry* V8HeapExplorer::GetEntry(Object* obj) {
2305   if (!obj->IsHeapObject()) return NULL;
2306   return filler_->FindOrAddEntry(obj, this);
2307 }
2308
2309
2310 class RootsReferencesExtractor : public ObjectVisitor {
2311  private:
2312   struct IndexTag {
2313     IndexTag(int index, VisitorSynchronization::SyncTag tag)
2314         : index(index), tag(tag) { }
2315     int index;
2316     VisitorSynchronization::SyncTag tag;
2317   };
2318
2319  public:
2320   RootsReferencesExtractor()
2321       : collecting_all_references_(false),
2322         previous_reference_count_(0) {
2323   }
2324
2325   void VisitPointers(Object** start, Object** end) {
2326     if (collecting_all_references_) {
2327       for (Object** p = start; p < end; p++) all_references_.Add(*p);
2328     } else {
2329       for (Object** p = start; p < end; p++) strong_references_.Add(*p);
2330     }
2331   }
2332
2333   void SetCollectingAllReferences() { collecting_all_references_ = true; }
2334
2335   void FillReferences(V8HeapExplorer* explorer) {
2336     ASSERT(strong_references_.length() <= all_references_.length());
2337     for (int i = 0; i < reference_tags_.length(); ++i) {
2338       explorer->SetGcRootsReference(reference_tags_[i].tag);
2339     }
2340     int strong_index = 0, all_index = 0, tags_index = 0;
2341     while (all_index < all_references_.length()) {
2342       if (strong_index < strong_references_.length() &&
2343           strong_references_[strong_index] == all_references_[all_index]) {
2344         explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
2345                                         false,
2346                                         all_references_[all_index++]);
2347         ++strong_index;
2348       } else {
2349         explorer->SetGcSubrootReference(reference_tags_[tags_index].tag,
2350                                         true,
2351                                         all_references_[all_index++]);
2352       }
2353       if (reference_tags_[tags_index].index == all_index) ++tags_index;
2354     }
2355   }
2356
2357   void Synchronize(VisitorSynchronization::SyncTag tag) {
2358     if (collecting_all_references_ &&
2359         previous_reference_count_ != all_references_.length()) {
2360       previous_reference_count_ = all_references_.length();
2361       reference_tags_.Add(IndexTag(previous_reference_count_, tag));
2362     }
2363   }
2364
2365  private:
2366   bool collecting_all_references_;
2367   List<Object*> strong_references_;
2368   List<Object*> all_references_;
2369   int previous_reference_count_;
2370   List<IndexTag> reference_tags_;
2371 };
2372
2373
2374 bool V8HeapExplorer::IterateAndExtractReferences(
2375     SnapshotFillerInterface* filler) {
2376   HeapIterator iterator(HeapIterator::kFilterUnreachable);
2377
2378   filler_ = filler;
2379   bool interrupted = false;
2380
2381   // Heap iteration with filtering must be finished in any case.
2382   for (HeapObject* obj = iterator.next();
2383        obj != NULL;
2384        obj = iterator.next(), progress_->ProgressStep()) {
2385     if (!interrupted) {
2386       ExtractReferences(obj);
2387       if (!progress_->ProgressReport(false)) interrupted = true;
2388     }
2389   }
2390   if (interrupted) {
2391     filler_ = NULL;
2392     return false;
2393   }
2394
2395   SetRootGcRootsReference();
2396   RootsReferencesExtractor extractor;
2397   heap_->IterateRoots(&extractor, VISIT_ONLY_STRONG);
2398   extractor.SetCollectingAllReferences();
2399   heap_->IterateRoots(&extractor, VISIT_ALL);
2400   extractor.FillReferences(this);
2401   filler_ = NULL;
2402   return progress_->ProgressReport(true);
2403 }
2404
2405
2406 bool V8HeapExplorer::IsEssentialObject(Object* object) {
2407   // We have to use raw_unchecked_* versions because checked versions
2408   // would fail during iteration over object properties.
2409   return object->IsHeapObject()
2410       && !object->IsOddball()
2411       && object != heap_->raw_unchecked_empty_byte_array()
2412       && object != heap_->raw_unchecked_empty_fixed_array()
2413       && object != heap_->raw_unchecked_empty_descriptor_array()
2414       && object != heap_->raw_unchecked_fixed_array_map()
2415       && object != heap_->raw_unchecked_global_property_cell_map()
2416       && object != heap_->raw_unchecked_shared_function_info_map()
2417       && object != heap_->raw_unchecked_free_space_map()
2418       && object != heap_->raw_unchecked_one_pointer_filler_map()
2419       && object != heap_->raw_unchecked_two_pointer_filler_map();
2420 }
2421
2422
2423 void V8HeapExplorer::SetClosureReference(HeapObject* parent_obj,
2424                                          int parent_entry,
2425                                          String* reference_name,
2426                                          Object* child_obj) {
2427   HeapEntry* child_entry = GetEntry(child_obj);
2428   if (child_entry != NULL) {
2429     filler_->SetNamedReference(HeapGraphEdge::kContextVariable,
2430                                parent_entry,
2431                                collection_->names()->GetName(reference_name),
2432                                child_entry);
2433   }
2434 }
2435
2436
2437 void V8HeapExplorer::SetNativeBindReference(HeapObject* parent_obj,
2438                                             int parent_entry,
2439                                             const char* reference_name,
2440                                             Object* child_obj) {
2441   HeapEntry* child_entry = GetEntry(child_obj);
2442   if (child_entry != NULL) {
2443     filler_->SetNamedReference(HeapGraphEdge::kShortcut,
2444                                parent_entry,
2445                                reference_name,
2446                                child_entry);
2447   }
2448 }
2449
2450
2451 void V8HeapExplorer::SetElementReference(HeapObject* parent_obj,
2452                                          int parent_entry,
2453                                          int index,
2454                                          Object* child_obj) {
2455   HeapEntry* child_entry = GetEntry(child_obj);
2456   if (child_entry != NULL) {
2457     filler_->SetIndexedReference(HeapGraphEdge::kElement,
2458                                  parent_entry,
2459                                  index,
2460                                  child_entry);
2461   }
2462 }
2463
2464
2465 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
2466                                           int parent_entry,
2467                                           const char* reference_name,
2468                                           Object* child_obj,
2469                                           int field_offset) {
2470   HeapEntry* child_entry = GetEntry(child_obj);
2471   if (child_entry == NULL) return;
2472   if (IsEssentialObject(child_obj)) {
2473     filler_->SetNamedReference(HeapGraphEdge::kInternal,
2474                                parent_entry,
2475                                reference_name,
2476                                child_entry);
2477   }
2478   IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2479 }
2480
2481
2482 void V8HeapExplorer::SetInternalReference(HeapObject* parent_obj,
2483                                           int parent_entry,
2484                                           int index,
2485                                           Object* child_obj,
2486                                           int field_offset) {
2487   HeapEntry* child_entry = GetEntry(child_obj);
2488   if (child_entry == NULL) return;
2489   if (IsEssentialObject(child_obj)) {
2490     filler_->SetNamedReference(HeapGraphEdge::kInternal,
2491                                parent_entry,
2492                                collection_->names()->GetName(index),
2493                                child_entry);
2494   }
2495   IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2496 }
2497
2498
2499 void V8HeapExplorer::SetHiddenReference(HeapObject* parent_obj,
2500                                         int parent_entry,
2501                                         int index,
2502                                         Object* child_obj) {
2503   HeapEntry* child_entry = GetEntry(child_obj);
2504   if (child_entry != NULL && IsEssentialObject(child_obj)) {
2505     filler_->SetIndexedReference(HeapGraphEdge::kHidden,
2506                                  parent_entry,
2507                                  index,
2508                                  child_entry);
2509   }
2510 }
2511
2512
2513 void V8HeapExplorer::SetWeakReference(HeapObject* parent_obj,
2514                                       int parent_entry,
2515                                       int index,
2516                                       Object* child_obj,
2517                                       int field_offset) {
2518   HeapEntry* child_entry = GetEntry(child_obj);
2519   if (child_entry != NULL) {
2520     filler_->SetIndexedReference(HeapGraphEdge::kWeak,
2521                                  parent_entry,
2522                                  index,
2523                                  child_entry);
2524     IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2525   }
2526 }
2527
2528
2529 void V8HeapExplorer::SetPropertyReference(HeapObject* parent_obj,
2530                                           int parent_entry,
2531                                           String* reference_name,
2532                                           Object* child_obj,
2533                                           const char* name_format_string,
2534                                           int field_offset) {
2535   HeapEntry* child_entry = GetEntry(child_obj);
2536   if (child_entry != NULL) {
2537     HeapGraphEdge::Type type = reference_name->length() > 0 ?
2538         HeapGraphEdge::kProperty : HeapGraphEdge::kInternal;
2539     const char* name = name_format_string  != NULL ?
2540         collection_->names()->GetFormatted(
2541             name_format_string,
2542             *reference_name->ToCString(DISALLOW_NULLS,
2543                                        ROBUST_STRING_TRAVERSAL)) :
2544         collection_->names()->GetName(reference_name);
2545
2546     filler_->SetNamedReference(type,
2547                                parent_entry,
2548                                name,
2549                                child_entry);
2550     IndexedReferencesExtractor::MarkVisitedField(parent_obj, field_offset);
2551   }
2552 }
2553
2554
2555 void V8HeapExplorer::SetPropertyShortcutReference(HeapObject* parent_obj,
2556                                                   int parent_entry,
2557                                                   String* reference_name,
2558                                                   Object* child_obj) {
2559   HeapEntry* child_entry = GetEntry(child_obj);
2560   if (child_entry != NULL) {
2561     filler_->SetNamedReference(HeapGraphEdge::kShortcut,
2562                                parent_entry,
2563                                collection_->names()->GetName(reference_name),
2564                                child_entry);
2565   }
2566 }
2567
2568
2569 void V8HeapExplorer::SetRootGcRootsReference() {
2570   filler_->SetIndexedAutoIndexReference(
2571       HeapGraphEdge::kElement,
2572       snapshot_->root()->index(),
2573       snapshot_->gc_roots());
2574 }
2575
2576
2577 void V8HeapExplorer::SetUserGlobalReference(Object* child_obj) {
2578   HeapEntry* child_entry = GetEntry(child_obj);
2579   ASSERT(child_entry != NULL);
2580   filler_->SetNamedAutoIndexReference(
2581       HeapGraphEdge::kShortcut,
2582       snapshot_->root()->index(),
2583       child_entry);
2584 }
2585
2586
2587 void V8HeapExplorer::SetGcRootsReference(VisitorSynchronization::SyncTag tag) {
2588   filler_->SetIndexedAutoIndexReference(
2589       HeapGraphEdge::kElement,
2590       snapshot_->gc_roots()->index(),
2591       snapshot_->gc_subroot(tag));
2592 }
2593
2594
2595 void V8HeapExplorer::SetGcSubrootReference(
2596     VisitorSynchronization::SyncTag tag, bool is_weak, Object* child_obj) {
2597   HeapEntry* child_entry = GetEntry(child_obj);
2598   if (child_entry != NULL) {
2599     const char* name = GetStrongGcSubrootName(child_obj);
2600     if (name != NULL) {
2601       filler_->SetNamedReference(
2602           HeapGraphEdge::kInternal,
2603           snapshot_->gc_subroot(tag)->index(),
2604           name,
2605           child_entry);
2606     } else {
2607       filler_->SetIndexedAutoIndexReference(
2608           is_weak ? HeapGraphEdge::kWeak : HeapGraphEdge::kElement,
2609           snapshot_->gc_subroot(tag)->index(),
2610           child_entry);
2611     }
2612   }
2613 }
2614
2615
2616 const char* V8HeapExplorer::GetStrongGcSubrootName(Object* object) {
2617   if (strong_gc_subroot_names_.is_empty()) {
2618 #define NAME_ENTRY(name) strong_gc_subroot_names_.SetTag(heap_->name(), #name);
2619 #define ROOT_NAME(type, name, camel_name) NAME_ENTRY(name)
2620     STRONG_ROOT_LIST(ROOT_NAME)
2621 #undef ROOT_NAME
2622 #define STRUCT_MAP_NAME(NAME, Name, name) NAME_ENTRY(name##_map)
2623     STRUCT_LIST(STRUCT_MAP_NAME)
2624 #undef STRUCT_MAP_NAME
2625 #define SYMBOL_NAME(name, str) NAME_ENTRY(name)
2626     SYMBOL_LIST(SYMBOL_NAME)
2627 #undef SYMBOL_NAME
2628 #undef NAME_ENTRY
2629     CHECK(!strong_gc_subroot_names_.is_empty());
2630   }
2631   return strong_gc_subroot_names_.GetTag(object);
2632 }
2633
2634
2635 void V8HeapExplorer::TagObject(Object* obj, const char* tag) {
2636   if (IsEssentialObject(obj)) {
2637     HeapEntry* entry = GetEntry(obj);
2638     if (entry->name()[0] == '\0') {
2639       entry->set_name(tag);
2640     }
2641   }
2642 }
2643
2644
2645 class GlobalObjectsEnumerator : public ObjectVisitor {
2646  public:
2647   virtual void VisitPointers(Object** start, Object** end) {
2648     for (Object** p = start; p < end; p++) {
2649       if ((*p)->IsGlobalContext()) {
2650         Context* context = Context::cast(*p);
2651         JSObject* proxy = context->global_proxy();
2652         if (proxy->IsJSGlobalProxy()) {
2653           Object* global = proxy->map()->prototype();
2654           if (global->IsJSGlobalObject()) {
2655             objects_.Add(Handle<JSGlobalObject>(JSGlobalObject::cast(global)));
2656           }
2657         }
2658       }
2659     }
2660   }
2661   int count() { return objects_.length(); }
2662   Handle<JSGlobalObject>& at(int i) { return objects_[i]; }
2663
2664  private:
2665   List<Handle<JSGlobalObject> > objects_;
2666 };
2667
2668
2669 // Modifies heap. Must not be run during heap traversal.
2670 void V8HeapExplorer::TagGlobalObjects() {
2671   HandleScope scope;
2672   Isolate* isolate = Isolate::Current();
2673   GlobalObjectsEnumerator enumerator;
2674   isolate->global_handles()->IterateAllRoots(&enumerator);
2675   Handle<String> document_string =
2676       isolate->factory()->NewStringFromAscii(CStrVector("document"));
2677   Handle<String> url_string =
2678       isolate->factory()->NewStringFromAscii(CStrVector("URL"));
2679   const char** urls = NewArray<const char*>(enumerator.count());
2680   for (int i = 0, l = enumerator.count(); i < l; ++i) {
2681     urls[i] = NULL;
2682     HandleScope scope;
2683     Handle<JSGlobalObject> global_obj = enumerator.at(i);
2684     Object* obj_document;
2685     if (global_obj->GetProperty(*document_string)->ToObject(&obj_document) &&
2686         obj_document->IsJSObject()) {
2687       JSObject* document = JSObject::cast(obj_document);
2688       Object* obj_url;
2689       if (document->GetProperty(*url_string)->ToObject(&obj_url) &&
2690           obj_url->IsString()) {
2691         urls[i] = collection_->names()->GetName(String::cast(obj_url));
2692       }
2693     }
2694   }
2695
2696   AssertNoAllocation no_allocation;
2697   for (int i = 0, l = enumerator.count(); i < l; ++i) {
2698     objects_tags_.SetTag(*enumerator.at(i), urls[i]);
2699   }
2700
2701   DeleteArray(urls);
2702 }
2703
2704
2705 class GlobalHandlesExtractor : public ObjectVisitor {
2706  public:
2707   explicit GlobalHandlesExtractor(NativeObjectsExplorer* explorer)
2708       : explorer_(explorer) {}
2709   virtual ~GlobalHandlesExtractor() {}
2710   virtual void VisitPointers(Object** start, Object** end) {
2711     UNREACHABLE();
2712   }
2713   virtual void VisitEmbedderReference(Object** p, uint16_t class_id) {
2714     explorer_->VisitSubtreeWrapper(p, class_id);
2715   }
2716  private:
2717   NativeObjectsExplorer* explorer_;
2718 };
2719
2720
2721 class BasicHeapEntriesAllocator : public HeapEntriesAllocator {
2722  public:
2723   BasicHeapEntriesAllocator(
2724       HeapSnapshot* snapshot,
2725       HeapEntry::Type entries_type)
2726     : snapshot_(snapshot),
2727       collection_(snapshot_->collection()),
2728       entries_type_(entries_type) {
2729   }
2730   virtual HeapEntry* AllocateEntry(HeapThing ptr);
2731  private:
2732   HeapSnapshot* snapshot_;
2733   HeapSnapshotsCollection* collection_;
2734   HeapEntry::Type entries_type_;
2735 };
2736
2737
2738 HeapEntry* BasicHeapEntriesAllocator::AllocateEntry(HeapThing ptr) {
2739   v8::RetainedObjectInfo* info = reinterpret_cast<v8::RetainedObjectInfo*>(ptr);
2740   intptr_t elements = info->GetElementCount();
2741   intptr_t size = info->GetSizeInBytes();
2742   const char* name = elements != -1
2743       ? collection_->names()->GetFormatted(
2744             "%s / %" V8_PTR_PREFIX "d entries", info->GetLabel(), elements)
2745       : collection_->names()->GetCopy(info->GetLabel());
2746   return snapshot_->AddEntry(
2747       entries_type_,
2748       name,
2749       HeapObjectsMap::GenerateId(info),
2750       size != -1 ? static_cast<int>(size) : 0);
2751 }
2752
2753
2754 NativeObjectsExplorer::NativeObjectsExplorer(
2755     HeapSnapshot* snapshot, SnapshottingProgressReportingInterface* progress)
2756     : snapshot_(snapshot),
2757       collection_(snapshot_->collection()),
2758       progress_(progress),
2759       embedder_queried_(false),
2760       objects_by_info_(RetainedInfosMatch),
2761       native_groups_(StringsMatch),
2762       filler_(NULL) {
2763   synthetic_entries_allocator_ =
2764       new BasicHeapEntriesAllocator(snapshot, HeapEntry::kSynthetic);
2765   native_entries_allocator_ =
2766       new BasicHeapEntriesAllocator(snapshot, HeapEntry::kNative);
2767 }
2768
2769
2770 NativeObjectsExplorer::~NativeObjectsExplorer() {
2771   for (HashMap::Entry* p = objects_by_info_.Start();
2772        p != NULL;
2773        p = objects_by_info_.Next(p)) {
2774     v8::RetainedObjectInfo* info =
2775         reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2776     info->Dispose();
2777     List<HeapObject*>* objects =
2778         reinterpret_cast<List<HeapObject*>* >(p->value);
2779     delete objects;
2780   }
2781   for (HashMap::Entry* p = native_groups_.Start();
2782        p != NULL;
2783        p = native_groups_.Next(p)) {
2784     v8::RetainedObjectInfo* info =
2785         reinterpret_cast<v8::RetainedObjectInfo*>(p->value);
2786     info->Dispose();
2787   }
2788   delete synthetic_entries_allocator_;
2789   delete native_entries_allocator_;
2790 }
2791
2792
2793 int NativeObjectsExplorer::EstimateObjectsCount() {
2794   FillRetainedObjects();
2795   return objects_by_info_.occupancy();
2796 }
2797
2798
2799 void NativeObjectsExplorer::FillRetainedObjects() {
2800   if (embedder_queried_) return;
2801   Isolate* isolate = Isolate::Current();
2802   // Record objects that are joined into ObjectGroups.
2803   isolate->heap()->CallGlobalGCPrologueCallback();
2804   List<ObjectGroup*>* groups = isolate->global_handles()->object_groups();
2805   for (int i = 0; i < groups->length(); ++i) {
2806     ObjectGroup* group = groups->at(i);
2807     if (group->info_ == NULL) continue;
2808     List<HeapObject*>* list = GetListMaybeDisposeInfo(group->info_);
2809     for (size_t j = 0; j < group->length_; ++j) {
2810       HeapObject* obj = HeapObject::cast(*group->objects_[j]);
2811       list->Add(obj);
2812       in_groups_.Insert(obj);
2813     }
2814     group->info_ = NULL;  // Acquire info object ownership.
2815   }
2816   isolate->global_handles()->RemoveObjectGroups();
2817   isolate->heap()->CallGlobalGCEpilogueCallback();
2818   // Record objects that are not in ObjectGroups, but have class ID.
2819   GlobalHandlesExtractor extractor(this);
2820   isolate->global_handles()->IterateAllRootsWithClassIds(&extractor);
2821   embedder_queried_ = true;
2822 }
2823
2824 void NativeObjectsExplorer::FillImplicitReferences() {
2825   Isolate* isolate = Isolate::Current();
2826   List<ImplicitRefGroup*>* groups =
2827       isolate->global_handles()->implicit_ref_groups();
2828   for (int i = 0; i < groups->length(); ++i) {
2829     ImplicitRefGroup* group = groups->at(i);
2830     HeapObject* parent = *group->parent_;
2831     int parent_entry =
2832         filler_->FindOrAddEntry(parent, native_entries_allocator_)->index();
2833     ASSERT(parent_entry != HeapEntry::kNoEntry);
2834     Object*** children = group->children_;
2835     for (size_t j = 0; j < group->length_; ++j) {
2836       Object* child = *children[j];
2837       HeapEntry* child_entry =
2838           filler_->FindOrAddEntry(child, native_entries_allocator_);
2839       filler_->SetNamedReference(
2840           HeapGraphEdge::kInternal,
2841           parent_entry,
2842           "native",
2843           child_entry);
2844     }
2845   }
2846 }
2847
2848 List<HeapObject*>* NativeObjectsExplorer::GetListMaybeDisposeInfo(
2849     v8::RetainedObjectInfo* info) {
2850   HashMap::Entry* entry =
2851       objects_by_info_.Lookup(info, InfoHash(info), true);
2852   if (entry->value != NULL) {
2853     info->Dispose();
2854   } else {
2855     entry->value = new List<HeapObject*>(4);
2856   }
2857   return reinterpret_cast<List<HeapObject*>* >(entry->value);
2858 }
2859
2860
2861 bool NativeObjectsExplorer::IterateAndExtractReferences(
2862     SnapshotFillerInterface* filler) {
2863   filler_ = filler;
2864   FillRetainedObjects();
2865   FillImplicitReferences();
2866   if (EstimateObjectsCount() > 0) {
2867     for (HashMap::Entry* p = objects_by_info_.Start();
2868          p != NULL;
2869          p = objects_by_info_.Next(p)) {
2870       v8::RetainedObjectInfo* info =
2871           reinterpret_cast<v8::RetainedObjectInfo*>(p->key);
2872       SetNativeRootReference(info);
2873       List<HeapObject*>* objects =
2874           reinterpret_cast<List<HeapObject*>* >(p->value);
2875       for (int i = 0; i < objects->length(); ++i) {
2876         SetWrapperNativeReferences(objects->at(i), info);
2877       }
2878     }
2879     SetRootNativeRootsReference();
2880   }
2881   filler_ = NULL;
2882   return true;
2883 }
2884
2885
2886 class NativeGroupRetainedObjectInfo : public v8::RetainedObjectInfo {
2887  public:
2888   explicit NativeGroupRetainedObjectInfo(const char* label)
2889       : disposed_(false),
2890         hash_(reinterpret_cast<intptr_t>(label)),
2891         label_(label) {
2892   }
2893
2894   virtual ~NativeGroupRetainedObjectInfo() {}
2895   virtual void Dispose() {
2896     CHECK(!disposed_);
2897     disposed_ = true;
2898     delete this;
2899   }
2900   virtual bool IsEquivalent(RetainedObjectInfo* other) {
2901     return hash_ == other->GetHash() && !strcmp(label_, other->GetLabel());
2902   }
2903   virtual intptr_t GetHash() { return hash_; }
2904   virtual const char* GetLabel() { return label_; }
2905
2906  private:
2907   bool disposed_;
2908   intptr_t hash_;
2909   const char* label_;
2910 };
2911
2912
2913 NativeGroupRetainedObjectInfo* NativeObjectsExplorer::FindOrAddGroupInfo(
2914     const char* label) {
2915   const char* label_copy = collection_->names()->GetCopy(label);
2916   uint32_t hash = HashSequentialString(label_copy,
2917                                        static_cast<int>(strlen(label_copy)),
2918                                        HEAP->HashSeed());
2919   HashMap::Entry* entry = native_groups_.Lookup(const_cast<char*>(label_copy),
2920                                                 hash, true);
2921   if (entry->value == NULL) {
2922     entry->value = new NativeGroupRetainedObjectInfo(label);
2923   }
2924   return static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2925 }
2926
2927
2928 void NativeObjectsExplorer::SetNativeRootReference(
2929     v8::RetainedObjectInfo* info) {
2930   HeapEntry* child_entry =
2931       filler_->FindOrAddEntry(info, native_entries_allocator_);
2932   ASSERT(child_entry != NULL);
2933   NativeGroupRetainedObjectInfo* group_info =
2934       FindOrAddGroupInfo(info->GetGroupLabel());
2935   HeapEntry* group_entry =
2936       filler_->FindOrAddEntry(group_info, synthetic_entries_allocator_);
2937   filler_->SetNamedAutoIndexReference(
2938       HeapGraphEdge::kInternal,
2939       group_entry->index(),
2940       child_entry);
2941 }
2942
2943
2944 void NativeObjectsExplorer::SetWrapperNativeReferences(
2945     HeapObject* wrapper, v8::RetainedObjectInfo* info) {
2946   HeapEntry* wrapper_entry = filler_->FindEntry(wrapper);
2947   ASSERT(wrapper_entry != NULL);
2948   HeapEntry* info_entry =
2949       filler_->FindOrAddEntry(info, native_entries_allocator_);
2950   ASSERT(info_entry != NULL);
2951   filler_->SetNamedReference(HeapGraphEdge::kInternal,
2952                              wrapper_entry->index(),
2953                              "native",
2954                              info_entry);
2955   filler_->SetIndexedAutoIndexReference(HeapGraphEdge::kElement,
2956                                         info_entry->index(),
2957                                         wrapper_entry);
2958 }
2959
2960
2961 void NativeObjectsExplorer::SetRootNativeRootsReference() {
2962   for (HashMap::Entry* entry = native_groups_.Start();
2963        entry;
2964        entry = native_groups_.Next(entry)) {
2965     NativeGroupRetainedObjectInfo* group_info =
2966         static_cast<NativeGroupRetainedObjectInfo*>(entry->value);
2967     HeapEntry* group_entry =
2968         filler_->FindOrAddEntry(group_info, native_entries_allocator_);
2969     ASSERT(group_entry != NULL);
2970     filler_->SetIndexedAutoIndexReference(
2971         HeapGraphEdge::kElement,
2972         snapshot_->root()->index(),
2973         group_entry);
2974   }
2975 }
2976
2977
2978 void NativeObjectsExplorer::VisitSubtreeWrapper(Object** p, uint16_t class_id) {
2979   if (in_groups_.Contains(*p)) return;
2980   Isolate* isolate = Isolate::Current();
2981   v8::RetainedObjectInfo* info =
2982       isolate->heap_profiler()->ExecuteWrapperClassCallback(class_id, p);
2983   if (info == NULL) return;
2984   GetListMaybeDisposeInfo(info)->Add(HeapObject::cast(*p));
2985 }
2986
2987
2988 class SnapshotFiller : public SnapshotFillerInterface {
2989  public:
2990   explicit SnapshotFiller(HeapSnapshot* snapshot, HeapEntriesMap* entries)
2991       : snapshot_(snapshot),
2992         collection_(snapshot->collection()),
2993         entries_(entries) { }
2994   HeapEntry* AddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
2995     HeapEntry* entry = allocator->AllocateEntry(ptr);
2996     entries_->Pair(ptr, entry->index());
2997     return entry;
2998   }
2999   HeapEntry* FindEntry(HeapThing ptr) {
3000     int index = entries_->Map(ptr);
3001     return index != HeapEntry::kNoEntry ? &snapshot_->entries()[index] : NULL;
3002   }
3003   HeapEntry* FindOrAddEntry(HeapThing ptr, HeapEntriesAllocator* allocator) {
3004     HeapEntry* entry = FindEntry(ptr);
3005     return entry != NULL ? entry : AddEntry(ptr, allocator);
3006   }
3007   void SetIndexedReference(HeapGraphEdge::Type type,
3008                            int parent,
3009                            int index,
3010                            HeapEntry* child_entry) {
3011     HeapEntry* parent_entry = &snapshot_->entries()[parent];
3012     parent_entry->SetIndexedReference(type, index, child_entry);
3013   }
3014   void SetIndexedAutoIndexReference(HeapGraphEdge::Type type,
3015                                     int parent,
3016                                     HeapEntry* child_entry) {
3017     HeapEntry* parent_entry = &snapshot_->entries()[parent];
3018     int index = parent_entry->children_count() + 1;
3019     parent_entry->SetIndexedReference(type, index, child_entry);
3020   }
3021   void SetNamedReference(HeapGraphEdge::Type type,
3022                          int parent,
3023                          const char* reference_name,
3024                          HeapEntry* child_entry) {
3025     HeapEntry* parent_entry = &snapshot_->entries()[parent];
3026     parent_entry->SetNamedReference(type, reference_name, child_entry);
3027   }
3028   void SetNamedAutoIndexReference(HeapGraphEdge::Type type,
3029                                   int parent,
3030                                   HeapEntry* child_entry) {
3031     HeapEntry* parent_entry = &snapshot_->entries()[parent];
3032     int index = parent_entry->children_count() + 1;
3033     parent_entry->SetNamedReference(
3034         type,
3035         collection_->names()->GetName(index),
3036         child_entry);
3037   }
3038
3039  private:
3040   HeapSnapshot* snapshot_;
3041   HeapSnapshotsCollection* collection_;
3042   HeapEntriesMap* entries_;
3043 };
3044
3045
3046 HeapSnapshotGenerator::HeapSnapshotGenerator(HeapSnapshot* snapshot,
3047                                              v8::ActivityControl* control)
3048     : snapshot_(snapshot),
3049       control_(control),
3050       v8_heap_explorer_(snapshot_, this),
3051       dom_explorer_(snapshot_, this) {
3052 }
3053
3054
3055 bool HeapSnapshotGenerator::GenerateSnapshot() {
3056   v8_heap_explorer_.TagGlobalObjects();
3057
3058   // TODO(1562) Profiler assumes that any object that is in the heap after
3059   // full GC is reachable from the root when computing dominators.
3060   // This is not true for weakly reachable objects.
3061   // As a temporary solution we call GC twice.
3062   Isolate::Current()->heap()->CollectAllGarbage(
3063       Heap::kMakeHeapIterableMask,
3064       "HeapSnapshotGenerator::GenerateSnapshot");
3065   Isolate::Current()->heap()->CollectAllGarbage(
3066       Heap::kMakeHeapIterableMask,
3067       "HeapSnapshotGenerator::GenerateSnapshot");
3068
3069 #ifdef DEBUG
3070   Heap* debug_heap = Isolate::Current()->heap();
3071   ASSERT(!debug_heap->old_data_space()->was_swept_conservatively());
3072   ASSERT(!debug_heap->old_pointer_space()->was_swept_conservatively());
3073   ASSERT(!debug_heap->code_space()->was_swept_conservatively());
3074   ASSERT(!debug_heap->cell_space()->was_swept_conservatively());
3075   ASSERT(!debug_heap->map_space()->was_swept_conservatively());
3076 #endif
3077
3078   // The following code uses heap iterators, so we want the heap to be
3079   // stable. It should follow TagGlobalObjects as that can allocate.
3080   AssertNoAllocation no_alloc;
3081
3082 #ifdef DEBUG
3083   debug_heap->Verify();
3084 #endif
3085
3086   SetProgressTotal(1);  // 1 pass.
3087
3088 #ifdef DEBUG
3089   debug_heap->Verify();
3090 #endif
3091
3092   if (!FillReferences()) return false;
3093
3094   snapshot_->FillChildrenAndRetainers();
3095   snapshot_->RememberLastJSObjectId();
3096
3097   if (!SetEntriesDominators()) return false;
3098   if (!CalculateRetainedSizes()) return false;
3099
3100   progress_counter_ = progress_total_;
3101   if (!ProgressReport(true)) return false;
3102   return true;
3103 }
3104
3105
3106 void HeapSnapshotGenerator::ProgressStep() {
3107   ++progress_counter_;
3108 }
3109
3110
3111 bool HeapSnapshotGenerator::ProgressReport(bool force) {
3112   const int kProgressReportGranularity = 10000;
3113   if (control_ != NULL
3114       && (force || progress_counter_ % kProgressReportGranularity == 0)) {
3115       return
3116           control_->ReportProgressValue(progress_counter_, progress_total_) ==
3117           v8::ActivityControl::kContinue;
3118   }
3119   return true;
3120 }
3121
3122
3123 void HeapSnapshotGenerator::SetProgressTotal(int iterations_count) {
3124   if (control_ == NULL) return;
3125   HeapIterator iterator(HeapIterator::kFilterUnreachable);
3126   progress_total_ = iterations_count * (
3127       v8_heap_explorer_.EstimateObjectsCount(&iterator) +
3128       dom_explorer_.EstimateObjectsCount());
3129   progress_counter_ = 0;
3130 }
3131
3132
3133 bool HeapSnapshotGenerator::FillReferences() {
3134   SnapshotFiller filler(snapshot_, &entries_);
3135   v8_heap_explorer_.AddRootEntries(&filler);
3136   return v8_heap_explorer_.IterateAndExtractReferences(&filler)
3137       && dom_explorer_.IterateAndExtractReferences(&filler);
3138 }
3139
3140
3141 bool HeapSnapshotGenerator::IsUserGlobalReference(const HeapGraphEdge* edge) {
3142   ASSERT(edge->from() == snapshot_->root());
3143   return edge->type() == HeapGraphEdge::kShortcut;
3144 }
3145
3146
3147 void HeapSnapshotGenerator::MarkUserReachableObjects() {
3148   List<HeapEntry*> worklist;
3149
3150   Vector<HeapGraphEdge*> children = snapshot_->root()->children();
3151   for (int i = 0; i < children.length(); ++i) {
3152     if (IsUserGlobalReference(children[i])) {
3153       worklist.Add(children[i]->to());
3154     }
3155   }
3156
3157   while (!worklist.is_empty()) {
3158     HeapEntry* entry = worklist.RemoveLast();
3159     if (entry->user_reachable()) continue;
3160     entry->set_user_reachable();
3161     Vector<HeapGraphEdge*> children = entry->children();
3162     for (int i = 0; i < children.length(); ++i) {
3163       HeapEntry* child = children[i]->to();
3164       if (!child->user_reachable()) {
3165         worklist.Add(child);
3166       }
3167     }
3168   }
3169 }
3170
3171
3172 static bool IsRetainingEdge(HeapGraphEdge* edge) {
3173   if (edge->type() == HeapGraphEdge::kShortcut) return false;
3174   // The edge is not retaining if it goes from system domain
3175   // (i.e. an object not reachable from window) to the user domain
3176   // (i.e. a reachable object).
3177   return edge->from()->user_reachable()
3178       || !edge->to()->user_reachable();
3179 }
3180
3181
3182 void HeapSnapshotGenerator::FillPostorderIndexes(
3183     Vector<HeapEntry*>* entries) {
3184   snapshot_->ClearPaint();
3185   int current_entry = 0;
3186   List<HeapEntry*> nodes_to_visit;
3187   HeapEntry* root = snapshot_->root();
3188   nodes_to_visit.Add(root);
3189   snapshot_->root()->paint();
3190   while (!nodes_to_visit.is_empty()) {
3191     HeapEntry* entry = nodes_to_visit.last();
3192     Vector<HeapGraphEdge*> children = entry->children();
3193     bool has_new_edges = false;
3194     for (int i = 0; i < children.length(); ++i) {
3195       if (entry != root && !IsRetainingEdge(children[i])) continue;
3196       HeapEntry* child = children[i]->to();
3197       if (!child->painted()) {
3198         nodes_to_visit.Add(child);
3199         child->paint();
3200         has_new_edges = true;
3201       }
3202     }
3203     if (!has_new_edges) {
3204       entry->set_postorder_index(current_entry);
3205       (*entries)[current_entry++] = entry;
3206       nodes_to_visit.RemoveLast();
3207     }
3208   }
3209   ASSERT_EQ(current_entry, entries->length());
3210 }
3211
3212
3213 static int Intersect(int i1, int i2, const Vector<int>& dominators) {
3214   int finger1 = i1, finger2 = i2;
3215   while (finger1 != finger2) {
3216     while (finger1 < finger2) finger1 = dominators[finger1];
3217     while (finger2 < finger1) finger2 = dominators[finger2];
3218   }
3219   return finger1;
3220 }
3221
3222
3223 // The algorithm is based on the article:
3224 // K. Cooper, T. Harvey and K. Kennedy "A Simple, Fast Dominance Algorithm"
3225 // Softw. Pract. Exper. 4 (2001), pp. 1-10.
3226 bool HeapSnapshotGenerator::BuildDominatorTree(
3227     const Vector<HeapEntry*>& entries,
3228     Vector<int>* dominators) {
3229   if (entries.length() == 0) return true;
3230   HeapEntry* root = snapshot_->root();
3231   const int entries_length = entries.length(), root_index = entries_length - 1;
3232   for (int i = 0; i < root_index; ++i) (*dominators)[i] = HeapEntry::kNoEntry;
3233   (*dominators)[root_index] = root_index;
3234
3235   // The affected array is used to mark entries which dominators
3236   // have to be racalculated because of changes in their retainers.
3237   ScopedVector<bool> affected(entries_length);
3238   for (int i = 0; i < affected.length(); ++i) affected[i] = false;
3239   // Mark the root direct children as affected.
3240   Vector<HeapGraphEdge*> children = entries[root_index]->children();
3241   for (int i = 0; i < children.length(); ++i) {
3242     affected[children[i]->to()->postorder_index()] = true;
3243   }
3244
3245   bool changed = true;
3246   while (changed) {
3247     changed = false;
3248     if (!ProgressReport(false)) return false;
3249     for (int i = root_index - 1; i >= 0; --i) {
3250       if (!affected[i]) continue;
3251       affected[i] = false;
3252       // If dominator of the entry has already been set to root,
3253       // then it can't propagate any further.
3254       if ((*dominators)[i] == root_index) continue;
3255       int new_idom_index = HeapEntry::kNoEntry;
3256       Vector<HeapGraphEdge*> rets = entries[i]->retainers();
3257       for (int j = 0; j < rets.length(); ++j) {
3258         if (rets[j]->from() != root && !IsRetainingEdge(rets[j])) continue;
3259         int ret_index = rets[j]->from()->postorder_index();
3260         if (dominators->at(ret_index) != HeapEntry::kNoEntry) {
3261           new_idom_index = new_idom_index == HeapEntry::kNoEntry
3262               ? ret_index
3263               : Intersect(ret_index, new_idom_index, *dominators);
3264           // If idom has already reached the root, it doesn't make sense
3265           // to check other retainers.
3266           if (new_idom_index == root_index) break;
3267         }
3268       }
3269       if (new_idom_index != HeapEntry::kNoEntry
3270           && dominators->at(i) != new_idom_index) {
3271         (*dominators)[i] = new_idom_index;
3272         changed = true;
3273         Vector<HeapGraphEdge*> children = entries[i]->children();
3274         for (int j = 0; j < children.length(); ++j) {
3275           affected[children[j]->to()->postorder_index()] = true;
3276         }
3277       }
3278     }
3279   }
3280   return true;
3281 }
3282
3283
3284 bool HeapSnapshotGenerator::SetEntriesDominators() {
3285   MarkUserReachableObjects();
3286   // This array is used for maintaining postorder of nodes.
3287   ScopedVector<HeapEntry*> ordered_entries(snapshot_->entries().length());
3288   FillPostorderIndexes(&ordered_entries);
3289   ScopedVector<int> dominators(ordered_entries.length());
3290   if (!BuildDominatorTree(ordered_entries, &dominators)) return false;
3291   for (int i = 0; i < ordered_entries.length(); ++i) {
3292     ASSERT(dominators[i] != HeapEntry::kNoEntry);
3293     ordered_entries[i]->set_dominator(ordered_entries[dominators[i]]);
3294   }
3295   return true;
3296 }
3297
3298
3299 bool HeapSnapshotGenerator::CalculateRetainedSizes() {
3300   // As for the dominators tree we only know parent nodes, not
3301   // children, to sum up total sizes we "bubble" node's self size
3302   // adding it to all of its parents.
3303   List<HeapEntry>& entries = snapshot_->entries();
3304   for (int i = 0; i < entries.length(); ++i) {
3305     HeapEntry* entry = &entries[i];
3306     entry->set_retained_size(entry->self_size());
3307   }
3308   for (int i = 0; i < entries.length(); ++i) {
3309     int entry_size = entries[i].self_size();
3310     HeapEntry* current = &entries[i];
3311     for (HeapEntry* dominator = current->dominator();
3312          dominator != current;
3313          current = dominator, dominator = current->dominator()) {
3314       ASSERT(current->dominator() != NULL);
3315       dominator->add_retained_size(entry_size);
3316     }
3317   }
3318   return true;
3319 }
3320
3321
3322 template<int bytes> struct MaxDecimalDigitsIn;
3323 template<> struct MaxDecimalDigitsIn<4> {
3324   static const int kSigned = 11;
3325   static const int kUnsigned = 10;
3326 };
3327 template<> struct MaxDecimalDigitsIn<8> {
3328   static const int kSigned = 20;
3329   static const int kUnsigned = 20;
3330 };
3331
3332
3333 class OutputStreamWriter {
3334  public:
3335   explicit OutputStreamWriter(v8::OutputStream* stream)
3336       : stream_(stream),
3337         chunk_size_(stream->GetChunkSize()),
3338         chunk_(chunk_size_),
3339         chunk_pos_(0),
3340         aborted_(false) {
3341     ASSERT(chunk_size_ > 0);
3342   }
3343   bool aborted() { return aborted_; }
3344   void AddCharacter(char c) {
3345     ASSERT(c != '\0');
3346     ASSERT(chunk_pos_ < chunk_size_);
3347     chunk_[chunk_pos_++] = c;
3348     MaybeWriteChunk();
3349   }
3350   void AddString(const char* s) {
3351     AddSubstring(s, StrLength(s));
3352   }
3353   void AddSubstring(const char* s, int n) {
3354     if (n <= 0) return;
3355     ASSERT(static_cast<size_t>(n) <= strlen(s));
3356     const char* s_end = s + n;
3357     while (s < s_end) {
3358       int s_chunk_size = Min(
3359           chunk_size_ - chunk_pos_, static_cast<int>(s_end - s));
3360       ASSERT(s_chunk_size > 0);
3361       memcpy(chunk_.start() + chunk_pos_, s, s_chunk_size);
3362       s += s_chunk_size;
3363       chunk_pos_ += s_chunk_size;
3364       MaybeWriteChunk();
3365     }
3366   }
3367   void AddNumber(unsigned n) { AddNumberImpl<unsigned>(n, "%u"); }
3368   void Finalize() {
3369     if (aborted_) return;
3370     ASSERT(chunk_pos_ < chunk_size_);
3371     if (chunk_pos_ != 0) {
3372       WriteChunk();
3373     }
3374     stream_->EndOfStream();
3375   }
3376
3377  private:
3378   template<typename T>
3379   void AddNumberImpl(T n, const char* format) {
3380     // Buffer for the longest value plus trailing \0
3381     static const int kMaxNumberSize =
3382         MaxDecimalDigitsIn<sizeof(T)>::kUnsigned + 1;
3383     if (chunk_size_ - chunk_pos_ >= kMaxNumberSize) {
3384       int result = OS::SNPrintF(
3385           chunk_.SubVector(chunk_pos_, chunk_size_), format, n);
3386       ASSERT(result != -1);
3387       chunk_pos_ += result;
3388       MaybeWriteChunk();
3389     } else {
3390       EmbeddedVector<char, kMaxNumberSize> buffer;
3391       int result = OS::SNPrintF(buffer, format, n);
3392       USE(result);
3393       ASSERT(result != -1);
3394       AddString(buffer.start());
3395     }
3396   }
3397   void MaybeWriteChunk() {
3398     ASSERT(chunk_pos_ <= chunk_size_);
3399     if (chunk_pos_ == chunk_size_) {
3400       WriteChunk();
3401     }
3402   }
3403   void WriteChunk() {
3404     if (aborted_) return;
3405     if (stream_->WriteAsciiChunk(chunk_.start(), chunk_pos_) ==
3406         v8::OutputStream::kAbort) aborted_ = true;
3407     chunk_pos_ = 0;
3408   }
3409
3410   v8::OutputStream* stream_;
3411   int chunk_size_;
3412   ScopedVector<char> chunk_;
3413   int chunk_pos_;
3414   bool aborted_;
3415 };
3416
3417
3418 // type, name|index, to_node.
3419 const int HeapSnapshotJSONSerializer::kEdgeFieldsCount = 3;
3420 // type, name, id, self_size, retained_size, dominator, children_index.
3421 const int HeapSnapshotJSONSerializer::kNodeFieldsCount = 7;
3422
3423 void HeapSnapshotJSONSerializer::Serialize(v8::OutputStream* stream) {
3424   ASSERT(writer_ == NULL);
3425   writer_ = new OutputStreamWriter(stream);
3426
3427   HeapSnapshot* original_snapshot = NULL;
3428   if (snapshot_->RawSnapshotSize() >=
3429       SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize) {
3430     // The snapshot is too big. Serialize a fake snapshot.
3431     original_snapshot = snapshot_;
3432     snapshot_ = CreateFakeSnapshot();
3433   }
3434
3435   SerializeImpl();
3436
3437   delete writer_;
3438   writer_ = NULL;
3439
3440   if (original_snapshot != NULL) {
3441     delete snapshot_;
3442     snapshot_ = original_snapshot;
3443   }
3444 }
3445
3446
3447 HeapSnapshot* HeapSnapshotJSONSerializer::CreateFakeSnapshot() {
3448   HeapSnapshot* result = new HeapSnapshot(snapshot_->collection(),
3449                                           HeapSnapshot::kFull,
3450                                           snapshot_->title(),
3451                                           snapshot_->uid());
3452   result->AddRootEntry();
3453   const char* text = snapshot_->collection()->names()->GetFormatted(
3454       "The snapshot is too big. "
3455       "Maximum snapshot size is %"  V8_PTR_PREFIX "u MB. "
3456       "Actual snapshot size is %"  V8_PTR_PREFIX "u MB.",
3457       SnapshotSizeConstants<kPointerSize>::kMaxSerializableSnapshotRawSize / MB,
3458       (snapshot_->RawSnapshotSize() + MB - 1) / MB);
3459   HeapEntry* message = result->AddEntry(HeapEntry::kString, text, 0, 4);
3460   result->root()->SetIndexedReference(HeapGraphEdge::kElement, 1, message);
3461   result->FillChildrenAndRetainers();
3462   result->SetDominatorsToSelf();
3463   return result;
3464 }
3465
3466
3467 void HeapSnapshotJSONSerializer::SerializeImpl() {
3468   List<HeapEntry>& nodes = snapshot_->entries();
3469   ASSERT(0 == snapshot_->root()->index());
3470   writer_->AddCharacter('{');
3471   writer_->AddString("\"snapshot\":{");
3472   SerializeSnapshot();
3473   if (writer_->aborted()) return;
3474   writer_->AddString("},\n");
3475   writer_->AddString("\"nodes\":[");
3476   SerializeNodes(nodes);
3477   if (writer_->aborted()) return;
3478   writer_->AddString("],\n");
3479   writer_->AddString("\"edges\":[");
3480   SerializeEdges(nodes);
3481   if (writer_->aborted()) return;
3482   writer_->AddString("],\n");
3483   writer_->AddString("\"strings\":[");
3484   SerializeStrings();
3485   if (writer_->aborted()) return;
3486   writer_->AddCharacter(']');
3487   writer_->AddCharacter('}');
3488   writer_->Finalize();
3489 }
3490
3491
3492 int HeapSnapshotJSONSerializer::GetStringId(const char* s) {
3493   HashMap::Entry* cache_entry = strings_.Lookup(
3494       const_cast<char*>(s), ObjectHash(s), true);
3495   if (cache_entry->value == NULL) {
3496     cache_entry->value = reinterpret_cast<void*>(next_string_id_++);
3497   }
3498   return static_cast<int>(reinterpret_cast<intptr_t>(cache_entry->value));
3499 }
3500
3501
3502 static int utoa(unsigned value, const Vector<char>& buffer, int buffer_pos) {
3503   int number_of_digits = 0;
3504   unsigned t = value;
3505   do {
3506     ++number_of_digits;
3507   } while (t /= 10);
3508
3509   buffer_pos += number_of_digits;
3510   int result = buffer_pos;
3511   do {
3512     int last_digit = value % 10;
3513     buffer[--buffer_pos] = '0' + last_digit;
3514     value /= 10;
3515   } while (value);
3516   return result;
3517 }
3518
3519
3520 void HeapSnapshotJSONSerializer::SerializeEdge(HeapGraphEdge* edge,
3521                                                bool first_edge) {
3522   // The buffer needs space for 3 ints, 3 commas and \0
3523   static const int kBufferSize =
3524       MaxDecimalDigitsIn<sizeof(int)>::kSigned * 3 + 3 + 1;  // NOLINT
3525   EmbeddedVector<char, kBufferSize> buffer;
3526   int edge_name_or_index = edge->type() == HeapGraphEdge::kElement
3527       || edge->type() == HeapGraphEdge::kHidden
3528       || edge->type() == HeapGraphEdge::kWeak
3529       ? edge->index() : GetStringId(edge->name());
3530   int buffer_pos = 0;
3531   if (!first_edge) {
3532     buffer[buffer_pos++] = ',';
3533   }
3534   buffer_pos = utoa(edge->type(), buffer, buffer_pos);
3535   buffer[buffer_pos++] = ',';
3536   buffer_pos = utoa(edge_name_or_index, buffer, buffer_pos);
3537   buffer[buffer_pos++] = ',';
3538   buffer_pos = utoa(entry_index(edge->to()), buffer, buffer_pos);
3539   buffer[buffer_pos++] = '\0';
3540   writer_->AddString(buffer.start());
3541 }
3542
3543
3544 void HeapSnapshotJSONSerializer::SerializeEdges(const List<HeapEntry>& nodes) {
3545   bool first_edge = true;
3546   for (int i = 0; i < nodes.length(); ++i) {
3547     HeapEntry* entry = &nodes[i];
3548     Vector<HeapGraphEdge*> children = entry->children();
3549     for (int j = 0; j < children.length(); ++j) {
3550       SerializeEdge(children[j], first_edge);
3551       first_edge = false;
3552       if (writer_->aborted()) return;
3553     }
3554   }
3555 }
3556
3557
3558 void HeapSnapshotJSONSerializer::SerializeNode(HeapEntry* entry,
3559                                                int edges_index) {
3560   // The buffer needs space for 6 ints, 1 uint32_t, 7 commas, \n and \0
3561   static const int kBufferSize =
3562       6 * MaxDecimalDigitsIn<sizeof(int)>::kSigned  // NOLINT
3563       + MaxDecimalDigitsIn<sizeof(uint32_t)>::kUnsigned  // NOLINT
3564       + 7 + 1 + 1;
3565   EmbeddedVector<char, kBufferSize> buffer;
3566   int buffer_pos = 0;
3567   if (entry_index(entry) != 0) {
3568     buffer[buffer_pos++] = ',';
3569   }
3570   buffer_pos = utoa(entry->type(), buffer, buffer_pos);
3571   buffer[buffer_pos++] = ',';
3572   buffer_pos = utoa(GetStringId(entry->name()), buffer, buffer_pos);
3573   buffer[buffer_pos++] = ',';
3574   buffer_pos = utoa(entry->id(), buffer, buffer_pos);
3575   buffer[buffer_pos++] = ',';
3576   buffer_pos = utoa(entry->self_size(), buffer, buffer_pos);
3577   buffer[buffer_pos++] = ',';
3578   buffer_pos = utoa(entry->retained_size(), buffer, buffer_pos);
3579   buffer[buffer_pos++] = ',';
3580   buffer_pos = utoa(entry_index(entry->dominator()), buffer, buffer_pos);
3581   buffer[buffer_pos++] = ',';
3582   buffer_pos = utoa(edges_index, buffer, buffer_pos);
3583   buffer[buffer_pos++] = '\n';
3584   buffer[buffer_pos++] = '\0';
3585   writer_->AddString(buffer.start());
3586 }
3587
3588
3589 void HeapSnapshotJSONSerializer::SerializeNodes(const List<HeapEntry>& nodes) {
3590   int edges_index = 0;
3591   for (int i = 0; i < nodes.length(); ++i) {
3592     HeapEntry* entry = &nodes[i];
3593     SerializeNode(entry, edges_index);
3594     edges_index += entry->children().length() * kEdgeFieldsCount;
3595     if (writer_->aborted()) return;
3596   }
3597 }
3598
3599
3600 void HeapSnapshotJSONSerializer::SerializeSnapshot() {
3601   writer_->AddString("\"title\":\"");
3602   writer_->AddString(snapshot_->title());
3603   writer_->AddString("\"");
3604   writer_->AddString(",\"uid\":");
3605   writer_->AddNumber(snapshot_->uid());
3606   writer_->AddString(",\"meta\":");
3607   // The object describing node serialization layout.
3608   // We use a set of macros to improve readability.
3609 #define JSON_A(s) "[" s "]"
3610 #define JSON_O(s) "{" s "}"
3611 #define JSON_S(s) "\"" s "\""
3612   writer_->AddString(JSON_O(
3613     JSON_S("node_fields") ":" JSON_A(
3614         JSON_S("type") ","
3615         JSON_S("name") ","
3616         JSON_S("id") ","
3617         JSON_S("self_size") ","
3618         JSON_S("retained_size") ","
3619         JSON_S("dominator") ","
3620         JSON_S("edges_index")) ","
3621     JSON_S("node_types") ":" JSON_A(
3622         JSON_A(
3623             JSON_S("hidden") ","
3624             JSON_S("array") ","
3625             JSON_S("string") ","
3626             JSON_S("object") ","
3627             JSON_S("code") ","
3628             JSON_S("closure") ","
3629             JSON_S("regexp") ","
3630             JSON_S("number") ","
3631             JSON_S("native") ","
3632             JSON_S("synthetic")) ","
3633         JSON_S("string") ","
3634         JSON_S("number") ","
3635         JSON_S("number") ","
3636         JSON_S("number") ","
3637         JSON_S("number") ","
3638         JSON_S("number")) ","
3639     JSON_S("edge_fields") ":" JSON_A(
3640         JSON_S("type") ","
3641         JSON_S("name_or_index") ","
3642         JSON_S("to_node")) ","
3643     JSON_S("edge_types") ":" JSON_A(
3644         JSON_A(
3645             JSON_S("context") ","
3646             JSON_S("element") ","
3647             JSON_S("property") ","
3648             JSON_S("internal") ","
3649             JSON_S("hidden") ","
3650             JSON_S("shortcut") ","
3651             JSON_S("weak")) ","
3652         JSON_S("string_or_number") ","
3653         JSON_S("node"))));
3654 #undef JSON_S
3655 #undef JSON_O
3656 #undef JSON_A
3657   writer_->AddString(",\"node_count\":");
3658   writer_->AddNumber(snapshot_->entries().length());
3659   writer_->AddString(",\"edge_count\":");
3660   writer_->AddNumber(snapshot_->edges().length());
3661 }
3662
3663
3664 static void WriteUChar(OutputStreamWriter* w, unibrow::uchar u) {
3665   static const char hex_chars[] = "0123456789ABCDEF";
3666   w->AddString("\\u");
3667   w->AddCharacter(hex_chars[(u >> 12) & 0xf]);
3668   w->AddCharacter(hex_chars[(u >> 8) & 0xf]);
3669   w->AddCharacter(hex_chars[(u >> 4) & 0xf]);
3670   w->AddCharacter(hex_chars[u & 0xf]);
3671 }
3672
3673 void HeapSnapshotJSONSerializer::SerializeString(const unsigned char* s) {
3674   writer_->AddCharacter('\n');
3675   writer_->AddCharacter('\"');
3676   for ( ; *s != '\0'; ++s) {
3677     switch (*s) {
3678       case '\b':
3679         writer_->AddString("\\b");
3680         continue;
3681       case '\f':
3682         writer_->AddString("\\f");
3683         continue;
3684       case '\n':
3685         writer_->AddString("\\n");
3686         continue;
3687       case '\r':
3688         writer_->AddString("\\r");
3689         continue;
3690       case '\t':
3691         writer_->AddString("\\t");
3692         continue;
3693       case '\"':
3694       case '\\':
3695         writer_->AddCharacter('\\');
3696         writer_->AddCharacter(*s);
3697         continue;
3698       default:
3699         if (*s > 31 && *s < 128) {
3700           writer_->AddCharacter(*s);
3701         } else if (*s <= 31) {
3702           // Special character with no dedicated literal.
3703           WriteUChar(writer_, *s);
3704         } else {
3705           // Convert UTF-8 into \u UTF-16 literal.
3706           unsigned length = 1, cursor = 0;
3707           for ( ; length <= 4 && *(s + length) != '\0'; ++length) { }
3708           unibrow::uchar c = unibrow::Utf8::CalculateValue(s, length, &cursor);
3709           if (c != unibrow::Utf8::kBadChar) {
3710             WriteUChar(writer_, c);
3711             ASSERT(cursor != 0);
3712             s += cursor - 1;
3713           } else {
3714             writer_->AddCharacter('?');
3715           }
3716         }
3717     }
3718   }
3719   writer_->AddCharacter('\"');
3720 }
3721
3722
3723 void HeapSnapshotJSONSerializer::SerializeStrings() {
3724   List<HashMap::Entry*> sorted_strings;
3725   SortHashMap(&strings_, &sorted_strings);
3726   writer_->AddString("\"<dummy>\"");
3727   for (int i = 0; i < sorted_strings.length(); ++i) {
3728     writer_->AddCharacter(',');
3729     SerializeString(
3730         reinterpret_cast<const unsigned char*>(sorted_strings[i]->key));
3731     if (writer_->aborted()) return;
3732   }
3733 }
3734
3735
3736 template<typename T>
3737 inline static int SortUsingEntryValue(const T* x, const T* y) {
3738   uintptr_t x_uint = reinterpret_cast<uintptr_t>((*x)->value);
3739   uintptr_t y_uint = reinterpret_cast<uintptr_t>((*y)->value);
3740   if (x_uint > y_uint) {
3741     return 1;
3742   } else if (x_uint == y_uint) {
3743     return 0;
3744   } else {
3745     return -1;
3746   }
3747 }
3748
3749
3750 void HeapSnapshotJSONSerializer::SortHashMap(
3751     HashMap* map, List<HashMap::Entry*>* sorted_entries) {
3752   for (HashMap::Entry* p = map->Start(); p != NULL; p = map->Next(p))
3753     sorted_entries->Add(p);
3754   sorted_entries->Sort(SortUsingEntryValue);
3755 }
3756
3757 } }  // namespace v8::internal