1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "src/heap/gc-tracer.h"
12 static intptr_t CountTotalHolesSize(Heap* heap) {
13 intptr_t holes_size = 0;
14 OldSpaces spaces(heap);
15 for (OldSpace* space = spaces.next(); space != NULL; space = spaces.next()) {
16 holes_size += space->Waste() + space->Available();
22 GCTracer::AllocationEvent::AllocationEvent(double duration,
23 intptr_t allocation_in_bytes) {
25 allocation_in_bytes_ = allocation_in_bytes;
29 GCTracer::ContextDisposalEvent::ContextDisposalEvent(double time) {
34 GCTracer::SurvivalEvent::SurvivalEvent(double survival_rate) {
35 survival_rate_ = survival_rate;
39 GCTracer::Event::Event(Type type, const char* gc_reason,
40 const char* collector_reason)
43 collector_reason(collector_reason),
52 cumulative_incremental_marking_steps(0),
53 incremental_marking_steps(0),
54 cumulative_incremental_marking_bytes(0),
55 incremental_marking_bytes(0),
56 cumulative_incremental_marking_duration(0.0),
57 incremental_marking_duration(0.0),
58 cumulative_pure_incremental_marking_duration(0.0),
59 pure_incremental_marking_duration(0.0),
60 longest_incremental_marking_step(0.0) {
61 for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
67 const char* GCTracer::Event::TypeName(bool short_name) const {
76 case INCREMENTAL_MARK_COMPACTOR:
89 return "Unknown Event Type";
93 GCTracer::GCTracer(Heap* heap)
95 cumulative_incremental_marking_steps_(0),
96 cumulative_incremental_marking_bytes_(0),
97 cumulative_incremental_marking_duration_(0.0),
98 cumulative_pure_incremental_marking_duration_(0.0),
99 longest_incremental_marking_step_(0.0),
100 cumulative_marking_duration_(0.0),
101 cumulative_sweeping_duration_(0.0),
102 new_space_top_after_gc_(0),
104 current_ = Event(Event::START, NULL, NULL);
105 current_.end_time = base::OS::TimeCurrentMillis();
106 previous_ = previous_incremental_mark_compactor_event_ = current_;
110 void GCTracer::Start(GarbageCollector collector, const char* gc_reason,
111 const char* collector_reason) {
113 if (start_counter_ != 1) return;
115 previous_ = current_;
116 double start_time = base::OS::TimeCurrentMillis();
117 if (new_space_top_after_gc_ != 0) {
118 AddNewSpaceAllocationTime(
119 start_time - previous_.end_time,
120 reinterpret_cast<intptr_t>((heap_->new_space()->top()) -
121 new_space_top_after_gc_));
123 if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR)
124 previous_incremental_mark_compactor_event_ = current_;
126 if (collector == SCAVENGER) {
127 current_ = Event(Event::SCAVENGER, gc_reason, collector_reason);
128 } else if (collector == MARK_COMPACTOR) {
129 if (heap_->incremental_marking()->WasActivated()) {
131 Event(Event::INCREMENTAL_MARK_COMPACTOR, gc_reason, collector_reason);
133 current_ = Event(Event::MARK_COMPACTOR, gc_reason, collector_reason);
137 current_.start_time = start_time;
138 current_.start_object_size = heap_->SizeOfObjects();
139 current_.start_memory_size = heap_->isolate()->memory_allocator()->Size();
140 current_.start_holes_size = CountTotalHolesSize(heap_);
141 current_.new_space_object_size =
142 heap_->new_space()->top() - heap_->new_space()->bottom();
144 current_.cumulative_incremental_marking_steps =
145 cumulative_incremental_marking_steps_;
146 current_.cumulative_incremental_marking_bytes =
147 cumulative_incremental_marking_bytes_;
148 current_.cumulative_incremental_marking_duration =
149 cumulative_incremental_marking_duration_;
150 current_.cumulative_pure_incremental_marking_duration =
151 cumulative_pure_incremental_marking_duration_;
152 current_.longest_incremental_marking_step = longest_incremental_marking_step_;
154 for (int i = 0; i < Scope::NUMBER_OF_SCOPES; i++) {
155 current_.scopes[i] = 0;
160 void GCTracer::Stop(GarbageCollector collector) {
162 if (start_counter_ != 0) {
164 PrintF("[Finished reentrant %s during %s.]\n",
165 collector == SCAVENGER ? "Scavenge" : "Mark-sweep",
166 current_.TypeName(false));
171 DCHECK(start_counter_ >= 0);
172 DCHECK((collector == SCAVENGER && current_.type == Event::SCAVENGER) ||
173 (collector == MARK_COMPACTOR &&
174 (current_.type == Event::MARK_COMPACTOR ||
175 current_.type == Event::INCREMENTAL_MARK_COMPACTOR)));
177 current_.end_time = base::OS::TimeCurrentMillis();
178 current_.end_object_size = heap_->SizeOfObjects();
179 current_.end_memory_size = heap_->isolate()->memory_allocator()->Size();
180 current_.end_holes_size = CountTotalHolesSize(heap_);
181 new_space_top_after_gc_ =
182 reinterpret_cast<intptr_t>(heap_->new_space()->top());
184 if (current_.type == Event::SCAVENGER) {
185 current_.incremental_marking_steps =
186 current_.cumulative_incremental_marking_steps -
187 previous_.cumulative_incremental_marking_steps;
188 current_.incremental_marking_bytes =
189 current_.cumulative_incremental_marking_bytes -
190 previous_.cumulative_incremental_marking_bytes;
191 current_.incremental_marking_duration =
192 current_.cumulative_incremental_marking_duration -
193 previous_.cumulative_incremental_marking_duration;
194 current_.pure_incremental_marking_duration =
195 current_.cumulative_pure_incremental_marking_duration -
196 previous_.cumulative_pure_incremental_marking_duration;
197 scavenger_events_.push_front(current_);
198 } else if (current_.type == Event::INCREMENTAL_MARK_COMPACTOR) {
199 current_.incremental_marking_steps =
200 current_.cumulative_incremental_marking_steps -
201 previous_incremental_mark_compactor_event_
202 .cumulative_incremental_marking_steps;
203 current_.incremental_marking_bytes =
204 current_.cumulative_incremental_marking_bytes -
205 previous_incremental_mark_compactor_event_
206 .cumulative_incremental_marking_bytes;
207 current_.incremental_marking_duration =
208 current_.cumulative_incremental_marking_duration -
209 previous_incremental_mark_compactor_event_
210 .cumulative_incremental_marking_duration;
211 current_.pure_incremental_marking_duration =
212 current_.cumulative_pure_incremental_marking_duration -
213 previous_incremental_mark_compactor_event_
214 .cumulative_pure_incremental_marking_duration;
215 longest_incremental_marking_step_ = 0.0;
216 incremental_mark_compactor_events_.push_front(current_);
218 DCHECK(current_.incremental_marking_bytes == 0);
219 DCHECK(current_.incremental_marking_duration == 0);
220 DCHECK(current_.pure_incremental_marking_duration == 0);
221 DCHECK(longest_incremental_marking_step_ == 0.0);
222 mark_compactor_events_.push_front(current_);
225 // TODO(ernstm): move the code below out of GCTracer.
227 if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return;
229 double duration = current_.end_time - current_.start_time;
230 double spent_in_mutator = Max(current_.start_time - previous_.end_time, 0.0);
232 heap_->UpdateCumulativeGCStatistics(duration, spent_in_mutator,
233 current_.scopes[Scope::MC_MARK]);
235 if (current_.type == Event::SCAVENGER && FLAG_trace_gc_ignore_scavenger)
239 if (FLAG_trace_gc_nvp)
244 heap_->PrintShortHeapStatistics();
249 void GCTracer::AddNewSpaceAllocationTime(double duration,
250 intptr_t allocation_in_bytes) {
251 allocation_events_.push_front(AllocationEvent(duration, allocation_in_bytes));
255 void GCTracer::AddContextDisposalTime(double time) {
256 context_disposal_events_.push_front(ContextDisposalEvent(time));
260 void GCTracer::AddSurvivalRate(double survival_rate) {
261 survival_events_.push_front(SurvivalEvent(survival_rate));
265 void GCTracer::AddIncrementalMarkingStep(double duration, intptr_t bytes) {
266 cumulative_incremental_marking_steps_++;
267 cumulative_incremental_marking_bytes_ += bytes;
268 cumulative_incremental_marking_duration_ += duration;
269 longest_incremental_marking_step_ =
270 Max(longest_incremental_marking_step_, duration);
271 cumulative_marking_duration_ += duration;
273 cumulative_pure_incremental_marking_duration_ += duration;
278 void GCTracer::Print() const {
279 PrintPID("%8.0f ms: ", heap_->isolate()->time_millis_since_init());
281 PrintF("%s %.1f (%.1f) -> %.1f (%.1f) MB, ", current_.TypeName(false),
282 static_cast<double>(current_.start_object_size) / MB,
283 static_cast<double>(current_.start_memory_size) / MB,
284 static_cast<double>(current_.end_object_size) / MB,
285 static_cast<double>(current_.end_memory_size) / MB);
287 int external_time = static_cast<int>(current_.scopes[Scope::EXTERNAL]);
288 if (external_time > 0) PrintF("%d / ", external_time);
290 double duration = current_.end_time - current_.start_time;
291 PrintF("%.1f ms", duration);
292 if (current_.type == Event::SCAVENGER) {
293 if (current_.incremental_marking_steps > 0) {
294 PrintF(" (+ %.1f ms in %d steps since last GC)",
295 current_.incremental_marking_duration,
296 current_.incremental_marking_steps);
299 if (current_.incremental_marking_steps > 0) {
301 " (+ %.1f ms in %d steps since start of marking, "
302 "biggest step %.1f ms)",
303 current_.incremental_marking_duration,
304 current_.incremental_marking_steps,
305 current_.longest_incremental_marking_step);
309 if (current_.gc_reason != NULL) {
310 PrintF(" [%s]", current_.gc_reason);
313 if (current_.collector_reason != NULL) {
314 PrintF(" [%s]", current_.collector_reason);
321 void GCTracer::PrintNVP() const {
322 PrintPID("%8.0f ms: ", heap_->isolate()->time_millis_since_init());
324 double duration = current_.end_time - current_.start_time;
325 double spent_in_mutator = current_.start_time - previous_.end_time;
327 PrintF("pause=%.1f ", duration);
328 PrintF("mutator=%.1f ", spent_in_mutator);
329 PrintF("gc=%s ", current_.TypeName(true));
331 PrintF("external=%.1f ", current_.scopes[Scope::EXTERNAL]);
332 PrintF("mark=%.1f ", current_.scopes[Scope::MC_MARK]);
333 PrintF("sweep=%.2f ", current_.scopes[Scope::MC_SWEEP]);
334 PrintF("sweepns=%.2f ", current_.scopes[Scope::MC_SWEEP_NEWSPACE]);
335 PrintF("sweepos=%.2f ", current_.scopes[Scope::MC_SWEEP_OLDSPACE]);
336 PrintF("sweepcode=%.2f ", current_.scopes[Scope::MC_SWEEP_CODE]);
337 PrintF("sweepcell=%.2f ", current_.scopes[Scope::MC_SWEEP_CELL]);
338 PrintF("sweepmap=%.2f ", current_.scopes[Scope::MC_SWEEP_MAP]);
339 PrintF("evacuate=%.1f ", current_.scopes[Scope::MC_EVACUATE_PAGES]);
340 PrintF("new_new=%.1f ",
341 current_.scopes[Scope::MC_UPDATE_NEW_TO_NEW_POINTERS]);
342 PrintF("root_new=%.1f ",
343 current_.scopes[Scope::MC_UPDATE_ROOT_TO_NEW_POINTERS]);
344 PrintF("old_new=%.1f ",
345 current_.scopes[Scope::MC_UPDATE_OLD_TO_NEW_POINTERS]);
346 PrintF("compaction_ptrs=%.1f ",
347 current_.scopes[Scope::MC_UPDATE_POINTERS_TO_EVACUATED]);
348 PrintF("intracompaction_ptrs=%.1f ",
349 current_.scopes[Scope::MC_UPDATE_POINTERS_BETWEEN_EVACUATED]);
350 PrintF("misc_compaction=%.1f ",
351 current_.scopes[Scope::MC_UPDATE_MISC_POINTERS]);
352 PrintF("weak_closure=%.1f ", current_.scopes[Scope::MC_WEAKCLOSURE]);
353 PrintF("weakcollection_process=%.1f ",
354 current_.scopes[Scope::MC_WEAKCOLLECTION_PROCESS]);
355 PrintF("weakcollection_clear=%.1f ",
356 current_.scopes[Scope::MC_WEAKCOLLECTION_CLEAR]);
357 PrintF("weakcollection_abort=%.1f ",
358 current_.scopes[Scope::MC_WEAKCOLLECTION_ABORT]);
360 PrintF("total_size_before=%" V8_PTR_PREFIX "d ", current_.start_object_size);
361 PrintF("total_size_after=%" V8_PTR_PREFIX "d ", current_.end_object_size);
362 PrintF("holes_size_before=%" V8_PTR_PREFIX "d ", current_.start_holes_size);
363 PrintF("holes_size_after=%" V8_PTR_PREFIX "d ", current_.end_holes_size);
365 intptr_t allocated_since_last_gc =
366 current_.start_object_size - previous_.end_object_size;
367 PrintF("allocated=%" V8_PTR_PREFIX "d ", allocated_since_last_gc);
368 PrintF("promoted=%" V8_PTR_PREFIX "d ", heap_->promoted_objects_size_);
369 PrintF("semi_space_copied=%" V8_PTR_PREFIX "d ",
370 heap_->semi_space_copied_object_size_);
371 PrintF("nodes_died_in_new=%d ", heap_->nodes_died_in_new_space_);
372 PrintF("nodes_copied_in_new=%d ", heap_->nodes_copied_in_new_space_);
373 PrintF("nodes_promoted=%d ", heap_->nodes_promoted_);
374 PrintF("promotion_ratio=%.1f%% ", heap_->promotion_ratio_);
375 PrintF("promotion_rate=%.1f%% ", heap_->promotion_rate_);
376 PrintF("semi_space_copy_rate=%.1f%% ", heap_->semi_space_copied_rate_);
377 PrintF("average_survival_rate%.1f%% ", AverageSurvivalRate());
378 PrintF("new_space_allocation_throughput=%" V8_PTR_PREFIX "d ",
379 NewSpaceAllocationThroughputInBytesPerMillisecond());
380 PrintF("context_disposal_rate=%.1f ", ContextDisposalRateInMilliseconds());
382 if (current_.type == Event::SCAVENGER) {
383 PrintF("steps_count=%d ", current_.incremental_marking_steps);
384 PrintF("steps_took=%.1f ", current_.incremental_marking_duration);
385 PrintF("scavenge_throughput=%" V8_PTR_PREFIX "d ",
386 ScavengeSpeedInBytesPerMillisecond());
388 PrintF("steps_count=%d ", current_.incremental_marking_steps);
389 PrintF("steps_took=%.1f ", current_.incremental_marking_duration);
390 PrintF("longest_step=%.1f ", current_.longest_incremental_marking_step);
391 PrintF("incremental_marking_throughput=%" V8_PTR_PREFIX "d ",
392 IncrementalMarkingSpeedInBytesPerMillisecond());
399 double GCTracer::MeanDuration(const EventBuffer& events) const {
400 if (events.empty()) return 0.0;
403 EventBuffer::const_iterator iter = events.begin();
404 while (iter != events.end()) {
405 mean += iter->end_time - iter->start_time;
409 return mean / events.size();
413 double GCTracer::MaxDuration(const EventBuffer& events) const {
414 if (events.empty()) return 0.0;
416 double maximum = 0.0f;
417 EventBuffer::const_iterator iter = events.begin();
418 while (iter != events.end()) {
419 maximum = Max(iter->end_time - iter->start_time, maximum);
427 double GCTracer::MeanIncrementalMarkingDuration() const {
428 if (cumulative_incremental_marking_steps_ == 0) return 0.0;
430 // We haven't completed an entire round of incremental marking, yet.
431 // Use data from GCTracer instead of data from event buffers.
432 if (incremental_mark_compactor_events_.empty()) {
433 return cumulative_incremental_marking_duration_ /
434 cumulative_incremental_marking_steps_;
438 double durations = 0.0;
439 EventBuffer::const_iterator iter = incremental_mark_compactor_events_.begin();
440 while (iter != incremental_mark_compactor_events_.end()) {
441 steps += iter->incremental_marking_steps;
442 durations += iter->incremental_marking_duration;
446 if (steps == 0) return 0.0;
448 return durations / steps;
452 double GCTracer::MaxIncrementalMarkingDuration() const {
453 // We haven't completed an entire round of incremental marking, yet.
454 // Use data from GCTracer instead of data from event buffers.
455 if (incremental_mark_compactor_events_.empty())
456 return longest_incremental_marking_step_;
458 double max_duration = 0.0;
459 EventBuffer::const_iterator iter = incremental_mark_compactor_events_.begin();
460 while (iter != incremental_mark_compactor_events_.end())
461 max_duration = Max(iter->longest_incremental_marking_step, max_duration);
467 intptr_t GCTracer::IncrementalMarkingSpeedInBytesPerMillisecond() const {
468 if (cumulative_incremental_marking_duration_ == 0.0) return 0;
470 // We haven't completed an entire round of incremental marking, yet.
471 // Use data from GCTracer instead of data from event buffers.
472 if (incremental_mark_compactor_events_.empty()) {
473 return static_cast<intptr_t>(cumulative_incremental_marking_bytes_ /
474 cumulative_pure_incremental_marking_duration_);
478 double durations = 0.0;
479 EventBuffer::const_iterator iter = incremental_mark_compactor_events_.begin();
480 while (iter != incremental_mark_compactor_events_.end()) {
481 bytes += iter->incremental_marking_bytes;
482 durations += iter->pure_incremental_marking_duration;
486 if (durations == 0.0) return 0;
488 return static_cast<intptr_t>(bytes / durations);
492 intptr_t GCTracer::ScavengeSpeedInBytesPerMillisecond() const {
494 double durations = 0.0;
495 EventBuffer::const_iterator iter = scavenger_events_.begin();
496 while (iter != scavenger_events_.end()) {
497 bytes += iter->new_space_object_size;
498 durations += iter->end_time - iter->start_time;
502 if (durations == 0.0) return 0;
504 return static_cast<intptr_t>(bytes / durations);
508 intptr_t GCTracer::MarkCompactSpeedInBytesPerMillisecond() const {
510 double durations = 0.0;
511 EventBuffer::const_iterator iter = mark_compactor_events_.begin();
512 while (iter != mark_compactor_events_.end()) {
513 bytes += iter->start_object_size;
514 durations += iter->end_time - iter->start_time;
518 if (durations == 0.0) return 0;
520 return static_cast<intptr_t>(bytes / durations);
524 intptr_t GCTracer::FinalIncrementalMarkCompactSpeedInBytesPerMillisecond()
527 double durations = 0.0;
528 EventBuffer::const_iterator iter = incremental_mark_compactor_events_.begin();
529 while (iter != incremental_mark_compactor_events_.end()) {
530 bytes += iter->start_object_size;
531 durations += iter->end_time - iter->start_time;
535 if (durations == 0.0) return 0;
537 return static_cast<intptr_t>(bytes / durations);
541 intptr_t GCTracer::NewSpaceAllocationThroughputInBytesPerMillisecond() const {
543 double durations = 0.0;
544 AllocationEventBuffer::const_iterator iter = allocation_events_.begin();
545 while (iter != allocation_events_.end()) {
546 bytes += iter->allocation_in_bytes_;
547 durations += iter->duration_;
551 if (durations == 0.0) return 0;
553 return static_cast<intptr_t>(bytes / durations);
557 double GCTracer::ContextDisposalRateInMilliseconds() const {
558 if (context_disposal_events_.size() < kRingBufferMaxSize) return 0.0;
560 double begin = base::OS::TimeCurrentMillis();
562 ContextDisposalEventBuffer::const_iterator iter =
563 context_disposal_events_.begin();
564 while (iter != context_disposal_events_.end()) {
569 return (begin - end) / context_disposal_events_.size();
573 double GCTracer::AverageSurvivalRate() const {
574 if (survival_events_.size() == 0) return 0.0;
576 double sum_of_rates = 0.0;
577 SurvivalEventBuffer::const_iterator iter = survival_events_.begin();
578 while (iter != survival_events_.end()) {
579 sum_of_rates += iter->survival_rate_;
583 return sum_of_rates / static_cast<double>(survival_events_.size());
587 bool GCTracer::SurvivalEventsRecorded() const {
588 return survival_events_.size() > 0;
592 void GCTracer::ResetSurvivalEvents() { survival_events_.reset(); }
594 } // namespace v8::internal