}
+GCTracer::AllocationEvent::AllocationEvent(double duration,
+ intptr_t allocation_in_bytes) {
+ duration_ = duration;
+ allocation_in_bytes_ = allocation_in_bytes;
+}
+
+
GCTracer::Event::Event(Type type, const char* gc_reason,
const char* collector_reason)
: type(type),
cumulative_pure_incremental_marking_duration_(0.0),
longest_incremental_marking_step_(0.0),
cumulative_marking_duration_(0.0),
- cumulative_sweeping_duration_(0.0) {
+ cumulative_sweeping_duration_(0.0),
+ new_space_top_after_gc_(0) {
current_ = Event(Event::START, NULL, NULL);
current_.end_time = base::OS::TimeCurrentMillis();
previous_ = previous_mark_compactor_event_ = current_;
void GCTracer::Start(GarbageCollector collector, const char* gc_reason,
const char* collector_reason) {
previous_ = current_;
+ double start_time = base::OS::TimeCurrentMillis();
+ if (new_space_top_after_gc_ != 0) {
+ AddNewSpaceAllocationTime(
+ start_time - previous_.end_time,
+ reinterpret_cast<intptr_t>((heap_->new_space()->top()) -
+ new_space_top_after_gc_));
+ }
if (current_.type == Event::MARK_COMPACTOR)
previous_mark_compactor_event_ = current_;
current_ = Event(Event::MARK_COMPACTOR, gc_reason, collector_reason);
}
- current_.start_time = base::OS::TimeCurrentMillis();
+ current_.start_time = start_time;
current_.start_object_size = heap_->SizeOfObjects();
current_.start_memory_size = heap_->isolate()->memory_allocator()->Size();
current_.start_holes_size = CountTotalHolesSize(heap_);
current_.end_object_size = heap_->SizeOfObjects();
current_.end_memory_size = heap_->isolate()->memory_allocator()->Size();
current_.end_holes_size = CountTotalHolesSize(heap_);
+ new_space_top_after_gc_ =
+ reinterpret_cast<intptr_t>(heap_->new_space()->top());
if (current_.type == Event::SCAVENGER) {
current_.incremental_marking_steps =
}
+void GCTracer::AddNewSpaceAllocationTime(double duration,
+ intptr_t allocation_in_bytes) {
+ allocation_events_.push_front(AllocationEvent(duration, allocation_in_bytes));
+}
+
+
void GCTracer::AddIncrementalMarkingStep(double duration, intptr_t bytes) {
cumulative_incremental_marking_steps_++;
cumulative_incremental_marking_bytes_ += bytes;
PrintF("nodes_promoted=%d ", heap_->nodes_promoted_);
PrintF("promotion_rate=%.1f%% ", heap_->promotion_rate_);
PrintF("semi_space_copy_rate=%.1f%% ", heap_->semi_space_copied_rate_);
+ PrintF("new_space_allocation_throughput=%d ",
+ NewSpaceAllocationThroughputInBytesPerMillisecond());
if (current_.type == Event::SCAVENGER) {
PrintF("steps_count=%d ", current_.incremental_marking_steps);
return static_cast<intptr_t>(bytes / durations);
}
+
+
+intptr_t GCTracer::NewSpaceAllocationThroughputInBytesPerMillisecond() const {
+ intptr_t bytes = 0;
+ double durations = 0.0;
+ AllocationEventBuffer::const_iterator iter = allocation_events_.begin();
+ while (iter != allocation_events_.end()) {
+ bytes += iter->allocation_in_bytes_;
+ durations += iter->duration_;
+ ++iter;
+ }
+
+ if (durations == 0.0) return 0;
+
+ return static_cast<intptr_t>(bytes / durations);
+}
}
} // namespace v8::internal
};
+ class AllocationEvent {
+ public:
+ // Default constructor leaves the event uninitialized.
+ AllocationEvent() {}
+
+ AllocationEvent(double duration, intptr_t allocation_in_bytes);
+
+ // Time spent in the mutator during the end of the last garbage collection
+ // to the beginning of the next garbage collection.
+ double duration_;
+
+ // Memory allocated in the new space during the end of the last garbage
+ // collection to the beginning of the next garbage collection.
+ intptr_t allocation_in_bytes_;
+ };
+
class Event {
public:
enum Type { SCAVENGER = 0, MARK_COMPACTOR = 1, START = 2 };
typedef RingBuffer<Event, kRingBufferMaxSize> EventBuffer;
+ typedef RingBuffer<AllocationEvent, kRingBufferMaxSize> AllocationEventBuffer;
+
explicit GCTracer(Heap* heap);
// Start collecting data.
// Stop collecting data and print results.
void Stop();
+ // Log an allocation throughput event.
+ void AddNewSpaceAllocationTime(double duration, intptr_t allocation_in_bytes);
+
// Log an incremental marking step.
void AddIncrementalMarkingStep(double duration, intptr_t bytes);
// Returns 0 if no events have been recorded.
intptr_t MarkCompactSpeedInBytesPerMillisecond() const;
+ // Allocation throughput in the new space in bytes/millisecond.
+ // Returns 0 if no events have been recorded.
+ intptr_t NewSpaceAllocationThroughputInBytesPerMillisecond() const;
+
private:
// Print one detailed trace line in name=value format.
// TODO(ernstm): Move to Heap.
// RingBuffers for MARK_COMPACTOR events.
EventBuffer mark_compactor_events_;
+ // RingBuffer for allocation events.
+ AllocationEventBuffer allocation_events_;
+
// Cumulative number of incremental marking steps since creation of tracer.
int cumulative_incremental_marking_steps_;
// all sweeping operations performed on the main thread.
double cumulative_sweeping_duration_;
+ // Holds the new space top pointer recorded at the end of the last garbage
+ // collection.
+ intptr_t new_space_top_after_gc_;
+
DISALLOW_COPY_AND_ASSIGN(GCTracer);
};
}