// When V8 cannot allocated memory FatalProcessOutOfMemory is called.
// The default fatal error handler is called and execution is stopped.
void i::V8::FatalProcessOutOfMemory(const char* location, bool take_snapshot) {
+ i::Isolate* isolate = i::Isolate::Current();
+ char last_few_messages[Heap::kTraceRingBufferSize + 1];
+ memset(last_few_messages, 0, Heap::kTraceRingBufferSize + 1);
+
i::HeapStats heap_stats;
int start_marker;
heap_stats.start_marker = &start_marker;
heap_stats.size_per_type = size_per_type;
int os_error;
heap_stats.os_error = &os_error;
+ heap_stats.last_few_messages = last_few_messages;
int end_marker;
heap_stats.end_marker = &end_marker;
- i::Isolate* isolate = i::Isolate::Current();
if (isolate->heap()->HasBeenSetUp()) {
// BUG(1718): Don't use the take_snapshot since we don't support
// HeapIterator here without doing a special GC.
isolate->heap()->RecordStats(&heap_stats, false);
+ char* first_newline = strchr(last_few_messages, '\n');
+ if (first_newline == NULL || first_newline[1] == '\0')
+ first_newline = last_few_messages;
+ PrintF("\n<--- Last few GCs --->\n%s\n", first_newline);
}
Utils::ApiCheck(false, location, "Allocation failed - process out of memory");
// If the fatal error handler returns, we stop execution.
void GCTracer::Stop(GarbageCollector collector) {
start_counter_--;
if (start_counter_ != 0) {
- if (FLAG_trace_gc) {
- PrintF("[Finished reentrant %s during %s.]\n",
- collector == SCAVENGER ? "Scavenge" : "Mark-sweep",
- current_.TypeName(false));
- }
+ Output("[Finished reentrant %s during %s.]\n",
+ collector == SCAVENGER ? "Scavenge" : "Mark-sweep",
+ current_.TypeName(false));
return;
}
// TODO(ernstm): move the code below out of GCTracer.
- if (!FLAG_trace_gc && !FLAG_print_cumulative_gc_stat) return;
-
double duration = current_.end_time - current_.start_time;
double spent_in_mutator = Max(current_.start_time - previous_.end_time, 0.0);
if (current_.type == Event::SCAVENGER && FLAG_trace_gc_ignore_scavenger)
return;
- if (FLAG_trace_gc) {
- if (FLAG_trace_gc_nvp)
- PrintNVP();
- else
- Print();
+ if (FLAG_trace_gc_nvp)
+ PrintNVP();
+ else
+ Print();
+ if (FLAG_trace_gc) {
heap_->PrintShortHeapStatistics();
}
}
}
+void GCTracer::Output(const char* format, ...) const {
+ if (FLAG_trace_gc) {
+ va_list arguments;
+ va_start(arguments, format);
+ base::OS::VPrint(format, arguments);
+ va_end(arguments);
+ }
+
+ const int kBufferSize = 256;
+ char raw_buffer[kBufferSize];
+ Vector<char> buffer(raw_buffer, kBufferSize);
+ va_list arguments2;
+ va_start(arguments2, format);
+ VSNPrintF(buffer, format, arguments2);
+ va_end(arguments2);
+
+ heap_->AddToRingBuffer(buffer.start());
+}
+
+
void GCTracer::Print() const {
- PrintIsolate(heap_->isolate(), "%8.0f ms: ",
- heap_->isolate()->time_millis_since_init());
+ if (FLAG_trace_gc) {
+ PrintIsolate(heap_->isolate(), "");
+ }
+ Output("%8.0f ms: ", heap_->isolate()->time_millis_since_init());
- PrintF("%s %.1f (%.1f) -> %.1f (%.1f) MB, ", current_.TypeName(false),
+ Output("%s %.1f (%.1f) -> %.1f (%.1f) MB, ", current_.TypeName(false),
static_cast<double>(current_.start_object_size) / MB,
static_cast<double>(current_.start_memory_size) / MB,
static_cast<double>(current_.end_object_size) / MB,
if (external_time > 0) PrintF("%d / ", external_time);
double duration = current_.end_time - current_.start_time;
- PrintF("%.1f ms", duration);
+ Output("%.1f ms", duration);
if (current_.type == Event::SCAVENGER) {
if (current_.incremental_marking_steps > 0) {
- PrintF(" (+ %.1f ms in %d steps since last GC)",
+ Output(" (+ %.1f ms in %d steps since last GC)",
current_.incremental_marking_duration,
current_.incremental_marking_steps);
}
} else {
if (current_.incremental_marking_steps > 0) {
- PrintF(
+ Output(
" (+ %.1f ms in %d steps since start of marking, "
"biggest step %.1f ms)",
current_.incremental_marking_duration,
}
if (current_.gc_reason != NULL) {
- PrintF(" [%s]", current_.gc_reason);
+ Output(" [%s]", current_.gc_reason);
}
if (current_.collector_reason != NULL) {
- PrintF(" [%s]", current_.collector_reason);
+ Output(" [%s]", current_.collector_reason);
}
- PrintF(".\n");
+ Output(".\n");
}
// TODO(ernstm): Move to Heap.
void Print() const;
+ // Prints a line and also adds it to the heap's ring buffer so that
+ // it can be included in later crash dumps.
+ void Output(const char* format, ...) const;
+
// Compute the mean duration of the events in the given ring buffer.
double MeanDuration(const EventBuffer& events) const;
old_generation_size_at_last_gc_(0),
gcs_since_last_deopt_(0),
allocation_sites_scratchpad_length_(0),
+ ring_buffer_full_(false),
+ ring_buffer_end_(0),
promotion_queue_(this),
configured_(false),
external_string_table_(this),
}
+void Heap::AddToRingBuffer(const char* string) {
+ size_t first_part =
+ Min(strlen(string), kTraceRingBufferSize - ring_buffer_end_);
+ memcpy(trace_ring_buffer_ + ring_buffer_end_, string, first_part);
+ ring_buffer_end_ += first_part;
+ if (first_part < strlen(string)) {
+ ring_buffer_full_ = true;
+ size_t second_part = strlen(string) - first_part;
+ memcpy(trace_ring_buffer_, string + first_part, second_part);
+ ring_buffer_end_ = second_part;
+ }
+}
+
+
+void Heap::GetFromRingBuffer(char* buffer) {
+ size_t copied = 0;
+ if (ring_buffer_full_) {
+ copied = kTraceRingBufferSize - ring_buffer_end_;
+ memcpy(buffer, trace_ring_buffer_ + ring_buffer_end_, copied);
+ }
+ memcpy(buffer + copied, trace_ring_buffer_, ring_buffer_end_);
+}
+
+
bool Heap::ConfigureHeapDefault() { return ConfigureHeap(0, 0, 0, 0); }
stats->size_per_type[type] += obj->Size();
}
}
+ if (stats->last_few_messages != NULL)
+ GetFromRingBuffer(stats->last_few_messages);
}
static const int kMaxExecutableSizeHugeMemoryDevice =
256 * kPointerMultiplier;
+ static const int kTraceRingBufferSize = 512;
+
// Calculates the allocation limit based on a given growing factor and a
// given old generation size.
intptr_t CalculateOldGenerationAllocationLimit(double factor,
inline void UpdateAllocationsHash(uint32_t value);
inline void PrintAlloctionsHash();
+ void AddToRingBuffer(const char* string);
+ void GetFromRingBuffer(char* buffer);
+
// Object counts and used memory by InstanceType
size_t object_counts_[OBJECT_STATS_COUNT];
size_t object_counts_last_time_[OBJECT_STATS_COUNT];
static const int kAllocationSiteScratchpadSize = 256;
int allocation_sites_scratchpad_length_;
+ char trace_ring_buffer_[kTraceRingBufferSize];
+ // If it's not full then the data is from 0 to ring_buffer_end_. If it's
+ // full then the data is from ring_buffer_end_ to the end of the buffer and
+ // from 0 to ring_buffer_end_.
+ bool ring_buffer_full_;
+ size_t ring_buffer_end_;
+
static const int kMaxMarkCompactsInIdleRound = 7;
static const int kIdleScavengeThreshold = 5;
int* objects_per_type; // 17
int* size_per_type; // 18
int* os_error; // 19
- int* end_marker; // 20
+ char* last_few_messages; // 20
+ int* end_marker; // 21
};