deps: upgrade v8 to 3.31.74.1
[platform/upstream/nodejs.git] / deps / v8 / src / heap / gc-tracer.cc
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/v8.h"
6
7 #include "src/heap/gc-tracer.h"
8
9 namespace v8 {
10 namespace internal {
11
12 static intptr_t CountTotalHolesSize(Heap* heap) {
13   intptr_t holes_size = 0;
14   OldSpaces spaces(heap);
15   for (OldSpace* space = spaces.next(); space != NULL; space = spaces.next()) {
16     holes_size += space->Waste() + space->Available();
17   }
18   return holes_size;
19 }
20
21
22 GCTracer::AllocationEvent::AllocationEvent(double duration,
23                                            intptr_t allocation_in_bytes) {
24   duration_ = duration;
25   allocation_in_bytes_ = allocation_in_bytes;
26 }
27
28
29 GCTracer::ContextDisposalEvent::ContextDisposalEvent(double time) {
30   time_ = time;
31 }
32
33
34 GCTracer::SurvivalEvent::SurvivalEvent(double survival_rate) {
35   survival_rate_ = survival_rate;
36 }
37
38
39 GCTracer::Event::Event(Type type, const char* gc_reason,
40                        const char* collector_reason)
41     : type(type),
42       gc_reason(gc_reason),
43       collector_reason(collector_reason),
44       start_time(0.0),
45       end_time(0.0),
46       start_object_size(0),
47       end_object_size(0),
48       start_memory_size(0),
49       end_memory_size(0),
50       start_holes_size(0),
51       end_holes_size(0),
52       cumulative_incremental_marking_steps(0),
53       incremental_marking_steps(0),
54       cumulative_incremental_marking_bytes(0),
55       incremental_marking_bytes(0),
56       cumulative_incremental_marking_duration(0.0),
57       incremental_marking_duration(0.0),
58       cumulative_pure_incremental_marking_duration(0.0),
59       pure_incremental_marking_duration(0.0),
60       longest_incremental_marking_step(0.0) {
61   for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
62     scopes[i] = 0;
63   }
64 }
65
66
67 const char* GCTracer::Event::TypeName(bool short_name) const {
68   switch (type) {
69     case SCAVENGER:
70       if (short_name) {
71         return "s";
72       } else {
73         return "Scavenge";
74       }
75     case MARK_COMPACTOR:
76     case INCREMENTAL_MARK_COMPACTOR:
77       if (short_name) {
78         return "ms";
79       } else {
80         return "Mark-sweep";
81       }
82     case START:
83       if (short_name) {
84         return "st";
85       } else {
86         return "Start";
87       }
88   }
89   return "Unknown Event Type";
90 }
91
92
93 GCTracer::GCTracer(Heap* heap)
94     : heap_(heap),
95       cumulative_incremental_marking_steps_(0),
96       cumulative_incremental_marking_bytes_(0),
97       cumulative_incremental_marking_duration_(0.0),
98       cumulative_pure_incremental_marking_duration_(0.0),
99       longest_incremental_marking_step_(0.0),
100       cumulative_marking_duration_(0.0),
101       cumulative_sweeping_duration_(0.0),
102       new_space_top_after_gc_(0),
103       start_counter_(0) {
104   current_ = Event(Event::START, NULL, NULL);
105   current_.end_time = base::OS::TimeCurrentMillis();
106   previous_ = previous_incremental_mark_compactor_event_ = current_;
107 }
108
109
110 void GCTracer::Start(GarbageCollector collector, const char* gc_reason,
111                      const char* collector_reason) {
112   start_counter_++;
113   if (start_counter_ != 1) return;
114
115   previous_ = current_;
116   double start_time = base::OS::TimeCurrentMillis();
117   if (new_space_top_after_gc_ != 0) {
118     AddNewSpaceAllocationTime(
119         start_time - previous_.end_time,
120         reinterpret_cast<intptr_t>((heap_->new_space()->top()) -
121                                    new_space_top_after_gc_));
122   }
123   if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR)
124     previous_incremental_mark_compactor_event_ = current_;
125
126   if (collector == SCAVENGER) {
127     current_ = Event(Event::SCAVENGER, gc_reason, collector_reason);
128   } else if (collector == MARK_COMPACTOR) {
129     if (heap_->incremental_marking()->WasActivated()) {
130       current_ =
131           Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason, collector_reason);
132     } else {
133       current_ = Event(Event::MARK_COMPACTOR, gc_reason, collector_reason);
134     }
135   }
136
137   current_.start_time = start_time;
138   current_.start_object_size = heap_->SizeOfObjects();
139   current_.start_memory_size = heap_->isolate()->memory_allocator()->Size();
140   current_.start_holes_size = CountTotalHolesSize(heap_);
141   current_.new_space_object_size =
142       heap_->new_space()->top() - heap_->new_space()->bottom();
143
144   current_.cumulative_incremental_marking_steps =
145       cumulative_incremental_marking_steps_;
146   current_.cumulative_incremental_marking_bytes =
147       cumulative_incremental_marking_bytes_;
148   current_.cumulative_incremental_marking_duration =
149       cumulative_incremental_marking_duration_;
150   current_.cumulative_pure_incremental_marking_duration =
151       cumulative_pure_incremental_marking_duration_;
152   current_.longest_incremental_marking_step = longest_incremental_marking_step_;
153
154   for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
155     current_.scopes[i] = 0;
156   }
157 }
158
159
160 void GCTracer::Stop(GarbageCollector collector) {
161   start_counter_--;
162   if (start_counter_ != 0) {
163     if (FLAG_trace_gc) {
164       PrintF("[Finished reentrant %s during %s.]\n",
165              collector == SCAVENGER ? "Scavenge" : "Mark-sweep",
166              current_.TypeName(false));
167     }
168     return;
169   }
170
171   DCHECK(start_counter_ >= 0);
172   DCHECK((collector == SCAVENGER && current_.type == Event::SCAVENGER) ||
173          (collector == MARK_COMPACTOR &&
174           (current_.type == Event::MARK_COMPACTOR ||
175            current_.type == Event::INCREMENTAL_MARK_COMPACTOR)));
176
177   current_.end_time = base::OS::TimeCurrentMillis();
178   current_.end_object_size = heap_->SizeOfObjects();
179   current_.end_memory_size = heap_->isolate()->memory_allocator()->Size();
180   current_.end_holes_size = CountTotalHolesSize(heap_);
181   new_space_top_after_gc_ =
182       reinterpret_cast<intptr_t>(heap_->new_space()->top());
183
184   if (current_.type == Event::SCAVENGER) {
185     current_.incremental_marking_steps =
186         current_.cumulative_incremental_marking_steps -
187         previous_.cumulative_incremental_marking_steps;
188     current_.incremental_marking_bytes =
189         current_.cumulative_incremental_marking_bytes -
190         previous_.cumulative_incremental_marking_bytes;
191     current_.incremental_marking_duration =
192         current_.cumulative_incremental_marking_duration -
193         previous_.cumulative_incremental_marking_duration;
194     current_.pure_incremental_marking_duration =
195         current_.cumulative_pure_incremental_marking_duration -
196         previous_.cumulative_pure_incremental_marking_duration;
197     scavenger_events_.push_front(current_);
198   } else if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
199     current_.incremental_marking_steps =
200         current_.cumulative_incremental_marking_steps -
201         previous_incremental_mark_compactor_event_
202             .cumulative_incremental_marking_steps;
203     current_.incremental_marking_bytes =
204         current_.cumulative_incremental_marking_bytes -
205         previous_incremental_mark_compactor_event_
206             .cumulative_incremental_marking_bytes;
207     current_.incremental_marking_duration =
208         current_.cumulative_incremental_marking_duration -
209         previous_incremental_mark_compactor_event_
210             .cumulative_incremental_marking_duration;
211     current_.pure_incremental_marking_duration =
212         current_.cumulative_pure_incremental_marking_duration -
213         previous_incremental_mark_compactor_event_
214             .cumulative_pure_incremental_marking_duration;
215     longest_incremental_marking_step_ = 0.0;
216     incremental_mark_compactor_events_.push_front(current_);
217   } else {
218     DCHECK(current_.incremental_marking_bytes == 0);
219     DCHECK(current_.incremental_marking_duration == 0);
220     DCHECK(current_.pure_incremental_marking_duration == 0);
221     DCHECK(longest_incremental_marking_step_ == 0.0);
222     mark_compactor_events_.push_front(current_);
223   }
224
225   // TODO(ernstm): move the code below out of GCTracer.
226
227   if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return;
228
229   double duration = current_.end_time - current_.start_time;
230   double spent_in_mutator = Max(current_.start_time - previous_.end_time, 0.0);
231
232   heap_->UpdateCumulativeGCStatistics(duration, spent_in_mutator,
233                                       current_.scopes[Scope::MC_MARK]);
234
235   if (current_.type == Event::SCAVENGER && FLAG_trace_gc_ignore_scavenger)
236     return;
237
238   if (FLAG_trace_gc) {
239     if (FLAG_trace_gc_nvp)
240       PrintNVP();
241     else
242       Print();
243
244     heap_->PrintShortHeapStatistics();
245   }
246 }
247
248
249 void GCTracer::AddNewSpaceAllocationTime(double duration,
250                                          intptr_t allocation_in_bytes) {
251   allocation_events_.push_front(AllocationEvent(duration, allocation_in_bytes));
252 }
253
254
255 void GCTracer::AddContextDisposalTime(double time) {
256   context_disposal_events_.push_front(ContextDisposalEvent(time));
257 }
258
259
260 void GCTracer::AddSurvivalRate(double survival_rate) {
261   survival_events_.push_front(SurvivalEvent(survival_rate));
262 }
263
264
265 void GCTracer::AddIncrementalMarkingStep(double duration, intptr_t bytes) {
266   cumulative_incremental_marking_steps_++;
267   cumulative_incremental_marking_bytes_ += bytes;
268   cumulative_incremental_marking_duration_ += duration;
269   longest_incremental_marking_step_ =
270       Max(longest_incremental_marking_step_, duration);
271   cumulative_marking_duration_ += duration;
272   if (bytes > 0) {
273     cumulative_pure_incremental_marking_duration_ += duration;
274   }
275 }
276
277
278 void GCTracer::Print() const {
279   PrintPID("%8.0f ms: ", heap_->isolate()->time_millis_since_init());
280
281   PrintF("%s %.1f (%.1f) -> %.1f (%.1f) MB, ", current_.TypeName(false),
282          static_cast<double>(current_.start_object_size) / MB,
283          static_cast<double>(current_.start_memory_size) / MB,
284          static_cast<double>(current_.end_object_size) / MB,
285          static_cast<double>(current_.end_memory_size) / MB);
286
287   int external_time = static_cast<int>(current_.scopes[Scope::EXTERNAL]);
288   if (external_time > 0) PrintF("%d / ", external_time);
289
290   double duration = current_.end_time - current_.start_time;
291   PrintF("%.1f ms", duration);
292   if (current_.type == Event::SCAVENGER) {
293     if (current_.incremental_marking_steps > 0) {
294       PrintF(" (+ %.1f ms in %d steps since last GC)",
295              current_.incremental_marking_duration,
296              current_.incremental_marking_steps);
297     }
298   } else {
299     if (current_.incremental_marking_steps > 0) {
300       PrintF(
301           " (+ %.1f ms in %d steps since start of marking, "
302           "biggest step %.1f ms)",
303           current_.incremental_marking_duration,
304           current_.incremental_marking_steps,
305           current_.longest_incremental_marking_step);
306     }
307   }
308
309   if (current_.gc_reason != NULL) {
310     PrintF(" [%s]", current_.gc_reason);
311   }
312
313   if (current_.collector_reason != NULL) {
314     PrintF(" [%s]", current_.collector_reason);
315   }
316
317   PrintF(".\n");
318 }
319
320
321 void GCTracer::PrintNVP() const {
322   PrintPID("%8.0f ms: ", heap_->isolate()->time_millis_since_init());
323
324   double duration = current_.end_time - current_.start_time;
325   double spent_in_mutator = current_.start_time - previous_.end_time;
326
327   PrintF("pause=%.1f ", duration);
328   PrintF("mutator=%.1f ", spent_in_mutator);
329   PrintF("gc=%s ", current_.TypeName(true));
330
331   PrintF("external=%.1f ", current_.scopes[Scope::EXTERNAL]);
332   PrintF("mark=%.1f ", current_.scopes[Scope::MC_MARK]);
333   PrintF("sweep=%.2f ", current_.scopes[Scope::MC_SWEEP]);
334   PrintF("sweepns=%.2f ", current_.scopes[Scope::MC_SWEEP_NEWSPACE]);
335   PrintF("sweepos=%.2f ", current_.scopes[Scope::MC_SWEEP_OLDSPACE]);
336   PrintF("sweepcode=%.2f ", current_.scopes[Scope::MC_SWEEP_CODE]);
337   PrintF("sweepcell=%.2f ", current_.scopes[Scope::MC_SWEEP_CELL]);
338   PrintF("sweepmap=%.2f ", current_.scopes[Scope::MC_SWEEP_MAP]);
339   PrintF("evacuate=%.1f ", current_.scopes[Scope::MC_EVACUATE_PAGES]);
340   PrintF("new_new=%.1f ",
341          current_.scopes[Scope::MC_UPDATE_NEW_TO_NEW_POINTERS]);
342   PrintF("root_new=%.1f ",
343          current_.scopes[Scope::MC_UPDATE_ROOT_TO_NEW_POINTERS]);
344   PrintF("old_new=%.1f ",
345          current_.scopes[Scope::MC_UPDATE_OLD_TO_NEW_POINTERS]);
346   PrintF("compaction_ptrs=%.1f ",
347          current_.scopes[Scope::MC_UPDATE_POINTERS_TO_EVACUATED]);
348   PrintF("intracompaction_ptrs=%.1f ",
349          current_.scopes[Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED]);
350   PrintF("misc_compaction=%.1f ",
351          current_.scopes[Scope::MC_UPDATE_MISC_POINTERS]);
352   PrintF("weak_closure=%.1f ", current_.scopes[Scope::MC_WEAKCLOSURE]);
353   PrintF("weakcollection_process=%.1f ",
354          current_.scopes[Scope::MC_WEAKCOLLECTION_PROCESS]);
355   PrintF("weakcollection_clear=%.1f ",
356          current_.scopes[Scope::MC_WEAKCOLLECTION_CLEAR]);
357   PrintF("weakcollection_abort=%.1f ",
358          current_.scopes[Scope::MC_WEAKCOLLECTION_ABORT]);
359
360   PrintF("total_size_before=%" V8_PTR_PREFIX "d ", current_.start_object_size);
361   PrintF("total_size_after=%" V8_PTR_PREFIX "d ", current_.end_object_size);
362   PrintF("holes_size_before=%" V8_PTR_PREFIX "d ", current_.start_holes_size);
363   PrintF("holes_size_after=%" V8_PTR_PREFIX "d ", current_.end_holes_size);
364
365   intptr_t allocated_since_last_gc =
366       current_.start_object_size - previous_.end_object_size;
367   PrintF("allocated=%" V8_PTR_PREFIX "d ", allocated_since_last_gc);
368   PrintF("promoted=%" V8_PTR_PREFIX "d ", heap_->promoted_objects_size_);
369   PrintF("semi_space_copied=%" V8_PTR_PREFIX "d ",
370          heap_->semi_space_copied_object_size_);
371   PrintF("nodes_died_in_new=%d ", heap_->nodes_died_in_new_space_);
372   PrintF("nodes_copied_in_new=%d ", heap_->nodes_copied_in_new_space_);
373   PrintF("nodes_promoted=%d ", heap_->nodes_promoted_);
374   PrintF("promotion_ratio=%.1f%% ", heap_->promotion_ratio_);
375   PrintF("promotion_rate=%.1f%% ", heap_->promotion_rate_);
376   PrintF("semi_space_copy_rate=%.1f%% ", heap_->semi_space_copied_rate_);
377   PrintF("average_survival_rate%.1f%% ", AverageSurvivalRate());
378   PrintF("new_space_allocation_throughput=%" V8_PTR_PREFIX "d ",
379          NewSpaceAllocationThroughputInBytesPerMillisecond());
380   PrintF("context_disposal_rate=%.1f ", ContextDisposalRateInMilliseconds());
381
382   if (current_.type == Event::SCAVENGER) {
383     PrintF("steps_count=%d ", current_.incremental_marking_steps);
384     PrintF("steps_took=%.1f ", current_.incremental_marking_duration);
385     PrintF("scavenge_throughput=%" V8_PTR_PREFIX "d ",
386            ScavengeSpeedInBytesPerMillisecond());
387   } else {
388     PrintF("steps_count=%d ", current_.incremental_marking_steps);
389     PrintF("steps_took=%.1f ", current_.incremental_marking_duration);
390     PrintF("longest_step=%.1f ", current_.longest_incremental_marking_step);
391     PrintF("incremental_marking_throughput=%" V8_PTR_PREFIX "d ",
392            IncrementalMarkingSpeedInBytesPerMillisecond());
393   }
394
395   PrintF("\n");
396 }
397
398
399 double GCTracer::MeanDuration(const EventBuffer& events) const {
400   if (events.empty()) return 0.0;
401
402   double mean = 0.0;
403   EventBuffer::const_iterator iter = events.begin();
404   while (iter != events.end()) {
405     mean += iter->end_time - iter->start_time;
406     ++iter;
407   }
408
409   return mean / events.size();
410 }
411
412
413 double GCTracer::MaxDuration(const EventBuffer& events) const {
414   if (events.empty()) return 0.0;
415
416   double maximum = 0.0f;
417   EventBuffer::const_iterator iter = events.begin();
418   while (iter != events.end()) {
419     maximum = Max(iter->end_time - iter->start_time, maximum);
420     ++iter;
421   }
422
423   return maximum;
424 }
425
426
427 double GCTracer::MeanIncrementalMarkingDuration() const {
428   if (cumulative_incremental_marking_steps_ == 0) return 0.0;
429
430   // We haven't completed an entire round of incremental marking, yet.
431   // Use data from GCTracer instead of data from event buffers.
432   if (incremental_mark_compactor_events_.empty()) {
433     return cumulative_incremental_marking_duration_ /
434            cumulative_incremental_marking_steps_;
435   }
436
437   int steps = 0;
438   double durations = 0.0;
439   EventBuffer::const_iterator iter = incremental_mark_compactor_events_.begin();
440   while (iter != incremental_mark_compactor_events_.end()) {
441     steps += iter->incremental_marking_steps;
442     durations += iter->incremental_marking_duration;
443     ++iter;
444   }
445
446   if (steps == 0) return 0.0;
447
448   return durations / steps;
449 }
450
451
452 double GCTracer::MaxIncrementalMarkingDuration() const {
453   // We haven't completed an entire round of incremental marking, yet.
454   // Use data from GCTracer instead of data from event buffers.
455   if (incremental_mark_compactor_events_.empty())
456     return longest_incremental_marking_step_;
457
458   double max_duration = 0.0;
459   EventBuffer::const_iterator iter = incremental_mark_compactor_events_.begin();
460   while (iter != incremental_mark_compactor_events_.end())
461     max_duration = Max(iter->longest_incremental_marking_step, max_duration);
462
463   return max_duration;
464 }
465
466
467 intptr_t GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond() const {
468   if (cumulative_incremental_marking_duration_ == 0.0) return 0;
469
470   // We haven't completed an entire round of incremental marking, yet.
471   // Use data from GCTracer instead of data from event buffers.
472   if (incremental_mark_compactor_events_.empty()) {
473     return static_cast<intptr_t>(cumulative_incremental_marking_bytes_ /
474                                  cumulative_pure_incremental_marking_duration_);
475   }
476
477   intptr_t bytes = 0;
478   double durations = 0.0;
479   EventBuffer::const_iterator iter = incremental_mark_compactor_events_.begin();
480   while (iter != incremental_mark_compactor_events_.end()) {
481     bytes += iter->incremental_marking_bytes;
482     durations += iter->pure_incremental_marking_duration;
483     ++iter;
484   }
485
486   if (durations == 0.0) return 0;
487
488   return static_cast<intptr_t>(bytes / durations);
489 }
490
491
492 intptr_t GCTracer::ScavengeSpeedInBytesPerMillisecond() const {
493   intptr_t bytes = 0;
494   double durations = 0.0;
495   EventBuffer::const_iterator iter = scavenger_events_.begin();
496   while (iter != scavenger_events_.end()) {
497     bytes += iter->new_space_object_size;
498     durations += iter->end_time - iter->start_time;
499     ++iter;
500   }
501
502   if (durations == 0.0) return 0;
503
504   return static_cast<intptr_t>(bytes / durations);
505 }
506
507
508 intptr_t GCTracer::MarkCompactSpeedInBytesPerMillisecond() const {
509   intptr_t bytes = 0;
510   double durations = 0.0;
511   EventBuffer::const_iterator iter = mark_compactor_events_.begin();
512   while (iter != mark_compactor_events_.end()) {
513     bytes += iter->start_object_size;
514     durations += iter->end_time - iter->start_time;
515     ++iter;
516   }
517
518   if (durations == 0.0) return 0;
519
520   return static_cast<intptr_t>(bytes / durations);
521 }
522
523
524 intptr_t GCTracer::FinalIncrementalMarkCompactSpeedInBytesPerMillisecond()
525     const {
526   intptr_t bytes = 0;
527   double durations = 0.0;
528   EventBuffer::const_iterator iter = incremental_mark_compactor_events_.begin();
529   while (iter != incremental_mark_compactor_events_.end()) {
530     bytes += iter->start_object_size;
531     durations += iter->end_time - iter->start_time;
532     ++iter;
533   }
534
535   if (durations == 0.0) return 0;
536
537   return static_cast<intptr_t>(bytes / durations);
538 }
539
540
541 intptr_t GCTracer::NewSpaceAllocationThroughputInBytesPerMillisecond() const {
542   intptr_t bytes = 0;
543   double durations = 0.0;
544   AllocationEventBuffer::const_iterator iter = allocation_events_.begin();
545   while (iter != allocation_events_.end()) {
546     bytes += iter->allocation_in_bytes_;
547     durations += iter->duration_;
548     ++iter;
549   }
550
551   if (durations == 0.0) return 0;
552
553   return static_cast<intptr_t>(bytes / durations);
554 }
555
556
557 double GCTracer::ContextDisposalRateInMilliseconds() const {
558   if (context_disposal_events_.size() < kRingBufferMaxSize) return 0.0;
559
560   double begin = base::OS::TimeCurrentMillis();
561   double end = 0.0;
562   ContextDisposalEventBuffer::const_iterator iter =
563       context_disposal_events_.begin();
564   while (iter != context_disposal_events_.end()) {
565     end = iter->time_;
566     ++iter;
567   }
568
569   return (begin - end) / context_disposal_events_.size();
570 }
571
572
573 double GCTracer::AverageSurvivalRate() const {
574   if (survival_events_.size() == 0) return 0.0;
575
576   double sum_of_rates = 0.0;
577   SurvivalEventBuffer::const_iterator iter = survival_events_.begin();
578   while (iter != survival_events_.end()) {
579     sum_of_rates += iter->survival_rate_;
580     ++iter;
581   }
582
583   return sum_of_rates / static_cast<double>(survival_events_.size());
584 }
585
586
587 bool GCTracer::SurvivalEventsRecorded() const {
588   return survival_events_.size() > 0;
589 }
590
591
592 void GCTracer::ResetSurvivalEvents() { survival_events_.reset(); }
593 }
594 }  // namespace v8::internal