740b2a86e4895c8595749ae8f04e9451a994b53d
[platform/upstream/v8.git] / src / counters.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_COUNTERS_H_
6 #define V8_COUNTERS_H_
7
8 #include "include/v8.h"
9 #include "src/allocation.h"
10 #include "src/base/platform/elapsed-timer.h"
11 #include "src/base/platform/time.h"
12 #include "src/globals.h"
13 #include "src/objects.h"
14
15 namespace v8 {
16 namespace internal {
17
18 // StatsCounters is an interface for plugging into external
19 // counters for monitoring.  Counters can be looked up and
20 // manipulated by name.
21
22 class StatsTable {
23  public:
24   // Register an application-defined function where
25   // counters can be looked up.
26   void SetCounterFunction(CounterLookupCallback f) {
27     lookup_function_ = f;
28   }
29
30   // Register an application-defined function to create
31   // a histogram for passing to the AddHistogramSample function
32   void SetCreateHistogramFunction(CreateHistogramCallback f) {
33     create_histogram_function_ = f;
34   }
35
36   // Register an application-defined function to add a sample
37   // to a histogram created with CreateHistogram function
38   void SetAddHistogramSampleFunction(AddHistogramSampleCallback f) {
39     add_histogram_sample_function_ = f;
40   }
41
42   bool HasCounterFunction() const {
43     return lookup_function_ != NULL;
44   }
45
46   // Lookup the location of a counter by name.  If the lookup
47   // is successful, returns a non-NULL pointer for writing the
48   // value of the counter.  Each thread calling this function
49   // may receive a different location to store it's counter.
50   // The return value must not be cached and re-used across
51   // threads, although a single thread is free to cache it.
52   int* FindLocation(const char* name) {
53     if (!lookup_function_) return NULL;
54     return lookup_function_(name);
55   }
56
57   // Create a histogram by name. If the create is successful,
58   // returns a non-NULL pointer for use with AddHistogramSample
59   // function. min and max define the expected minimum and maximum
60   // sample values. buckets is the maximum number of buckets
61   // that the samples will be grouped into.
62   void* CreateHistogram(const char* name,
63                         int min,
64                         int max,
65                         size_t buckets) {
66     if (!create_histogram_function_) return NULL;
67     return create_histogram_function_(name, min, max, buckets);
68   }
69
70   // Add a sample to a histogram created with the CreateHistogram
71   // function.
72   void AddHistogramSample(void* histogram, int sample) {
73     if (!add_histogram_sample_function_) return;
74     return add_histogram_sample_function_(histogram, sample);
75   }
76
77  private:
78   StatsTable();
79
80   CounterLookupCallback lookup_function_;
81   CreateHistogramCallback create_histogram_function_;
82   AddHistogramSampleCallback add_histogram_sample_function_;
83
84   friend class Isolate;
85
86   DISALLOW_COPY_AND_ASSIGN(StatsTable);
87 };
88
89 // StatsCounters are dynamically created values which can be tracked in
90 // the StatsTable.  They are designed to be lightweight to create and
91 // easy to use.
92 //
93 // Internally, a counter represents a value in a row of a StatsTable.
94 // The row has a 32bit value for each process/thread in the table and also
95 // a name (stored in the table metadata).  Since the storage location can be
96 // thread-specific, this class cannot be shared across threads.
97 class StatsCounter {
98  public:
99   StatsCounter() { }
100   explicit StatsCounter(Isolate* isolate, const char* name)
101       : isolate_(isolate), name_(name), ptr_(NULL), lookup_done_(false) { }
102
103   // Sets the counter to a specific value.
104   void Set(int value) {
105     int* loc = GetPtr();
106     if (loc) *loc = value;
107   }
108
109   // Increments the counter.
110   void Increment() {
111     int* loc = GetPtr();
112     if (loc) (*loc)++;
113   }
114
115   void Increment(int value) {
116     int* loc = GetPtr();
117     if (loc)
118       (*loc) += value;
119   }
120
121   // Decrements the counter.
122   void Decrement() {
123     int* loc = GetPtr();
124     if (loc) (*loc)--;
125   }
126
127   void Decrement(int value) {
128     int* loc = GetPtr();
129     if (loc) (*loc) -= value;
130   }
131
132   // Is this counter enabled?
133   // Returns false if table is full.
134   bool Enabled() {
135     return GetPtr() != NULL;
136   }
137
138   // Get the internal pointer to the counter. This is used
139   // by the code generator to emit code that manipulates a
140   // given counter without calling the runtime system.
141   int* GetInternalPointer() {
142     int* loc = GetPtr();
143     DCHECK(loc != NULL);
144     return loc;
145   }
146
147   // Reset the cached internal pointer.
148   void Reset() { lookup_done_ = false; }
149
150  protected:
151   // Returns the cached address of this counter location.
152   int* GetPtr() {
153     if (lookup_done_) return ptr_;
154     lookup_done_ = true;
155     ptr_ = FindLocationInStatsTable();
156     return ptr_;
157   }
158
159  private:
160   int* FindLocationInStatsTable() const;
161
162   Isolate* isolate_;
163   const char* name_;
164   int* ptr_;
165   bool lookup_done_;
166 };
167
168 // A Histogram represents a dynamically created histogram in the StatsTable.
169 // It will be registered with the histogram system on first use.
170 class Histogram {
171  public:
172   Histogram() { }
173   Histogram(const char* name,
174             int min,
175             int max,
176             int num_buckets,
177             Isolate* isolate)
178       : name_(name),
179         min_(min),
180         max_(max),
181         num_buckets_(num_buckets),
182         histogram_(NULL),
183         lookup_done_(false),
184         isolate_(isolate) { }
185
186   // Add a single sample to this histogram.
187   void AddSample(int sample);
188
189   // Returns true if this histogram is enabled.
190   bool Enabled() {
191     return GetHistogram() != NULL;
192   }
193
194   // Reset the cached internal pointer.
195   void Reset() {
196     lookup_done_ = false;
197   }
198
199  protected:
200   // Returns the handle to the histogram.
201   void* GetHistogram() {
202     if (!lookup_done_) {
203       lookup_done_ = true;
204       histogram_ = CreateHistogram();
205     }
206     return histogram_;
207   }
208
209   const char* name() { return name_; }
210   Isolate* isolate() const { return isolate_; }
211
212  private:
213   void* CreateHistogram() const;
214
215   const char* name_;
216   int min_;
217   int max_;
218   int num_buckets_;
219   void* histogram_;
220   bool lookup_done_;
221   Isolate* isolate_;
222 };
223
224 // A HistogramTimer allows distributions of results to be created.
225 class HistogramTimer : public Histogram {
226  public:
227   enum Resolution {
228     MILLISECOND,
229     MICROSECOND
230   };
231
232   HistogramTimer() {}
233   HistogramTimer(const char* name, int min, int max, Resolution resolution,
234                  int num_buckets, Isolate* isolate)
235       : Histogram(name, min, max, num_buckets, isolate),
236         resolution_(resolution) {}
237
238   // Start the timer.
239   void Start();
240
241   // Stop the timer and record the results.
242   void Stop();
243
244   // Returns true if the timer is running.
245   bool Running() {
246     return Enabled() && timer_.IsStarted();
247   }
248
249   // TODO(bmeurer): Remove this when HistogramTimerScope is fixed.
250 #ifdef DEBUG
251   base::ElapsedTimer* timer() { return &timer_; }
252 #endif
253
254  private:
255   base::ElapsedTimer timer_;
256   Resolution resolution_;
257 };
258
259 // Helper class for scoping a HistogramTimer.
260 // TODO(bmeurer): The ifdeffery is an ugly hack around the fact that the
261 // Parser is currently reentrant (when it throws an error, we call back
262 // into JavaScript and all bets are off), but ElapsedTimer is not
263 // reentry-safe. Fix this properly and remove |allow_nesting|.
264 class HistogramTimerScope BASE_EMBEDDED {
265  public:
266   explicit HistogramTimerScope(HistogramTimer* timer,
267                                bool allow_nesting = false)
268 #ifdef DEBUG
269       : timer_(timer),
270         skipped_timer_start_(false) {
271     if (timer_->timer()->IsStarted() && allow_nesting) {
272       skipped_timer_start_ = true;
273     } else {
274       timer_->Start();
275     }
276   }
277 #else
278       : timer_(timer) {
279     timer_->Start();
280   }
281 #endif
282   ~HistogramTimerScope() {
283 #ifdef DEBUG
284     if (!skipped_timer_start_) {
285       timer_->Stop();
286     }
287 #else
288     timer_->Stop();
289 #endif
290   }
291
292  private:
293   HistogramTimer* timer_;
294 #ifdef DEBUG
295   bool skipped_timer_start_;
296 #endif
297 };
298
299
300 // A histogram timer that can aggregate events within a larger scope.
301 //
302 // Intended use of this timer is to have an outer (aggregating) and an inner
303 // (to be aggregated) scope, where the inner scope measure the time of events,
304 // and all those inner scope measurements will be summed up by the outer scope.
305 // An example use might be to aggregate the time spent in lazy compilation
306 // while running a script.
307 //
308 // Helpers:
309 // - AggregatingHistogramTimerScope, the "outer" scope within which
310 //     times will be summed up.
311 // - AggregatedHistogramTimerScope, the "inner" scope which defines the
312 //     events to be timed.
313 class AggregatableHistogramTimer : public Histogram {
314  public:
315   AggregatableHistogramTimer() {}
316   AggregatableHistogramTimer(const char* name, int min, int max,
317                              int num_buckets, Isolate* isolate)
318       : Histogram(name, min, max, num_buckets, isolate) {}
319
320   // Start/stop the "outer" scope.
321   void Start() { time_ = base::TimeDelta(); }
322   void Stop() { AddSample(static_cast<int>(time_.InMicroseconds())); }
323
324   // Add a time value ("inner" scope).
325   void Add(base::TimeDelta other) { time_ += other; }
326
327  private:
328   base::TimeDelta time_;
329 };
330
331
332 // A helper class for use with AggregatableHistogramTimer.
333 class AggregatingHistogramTimerScope {
334  public:
335   explicit AggregatingHistogramTimerScope(AggregatableHistogramTimer* histogram)
336       : histogram_(histogram) {
337     histogram_->Start();
338   }
339   ~AggregatingHistogramTimerScope() { histogram_->Stop(); }
340
341  private:
342   AggregatableHistogramTimer* histogram_;
343 };
344
345
346 // A helper class for use with AggregatableHistogramTimer.
347 class AggregatedHistogramTimerScope {
348  public:
349   explicit AggregatedHistogramTimerScope(AggregatableHistogramTimer* histogram)
350       : histogram_(histogram) {
351     timer_.Start();
352   }
353   ~AggregatedHistogramTimerScope() { histogram_->Add(timer_.Elapsed()); }
354
355  private:
356   base::ElapsedTimer timer_;
357   AggregatableHistogramTimer* histogram_;
358 };
359
360
361 // AggretatedMemoryHistogram collects (time, value) sample pairs and turns
362 // them into time-uniform samples for the backing historgram, such that the
363 // backing histogram receives one sample every T ms, where the T is controlled
364 // by the FLAG_histogram_interval.
365 //
366 // More formally: let F be a real-valued function that maps time to sample
367 // values. We define F as a linear interpolation between adjacent samples. For
368 // each time interval [x; x + T) the backing histogram gets one sample value
369 // that is the average of F(t) in the interval.
370 template <typename Histogram>
371 class AggregatedMemoryHistogram {
372  public:
373   AggregatedMemoryHistogram()
374       : is_initialized_(false),
375         start_ms_(0.0),
376         last_ms_(0.0),
377         aggregate_value_(0.0),
378         last_value_(0.0),
379         backing_histogram_(NULL) {}
380
381   explicit AggregatedMemoryHistogram(Histogram* backing_histogram)
382       : AggregatedMemoryHistogram() {
383     backing_histogram_ = backing_histogram;
384   }
385
386   // Invariants that hold before and after AddSample if
387   // is_initialized_ is true:
388   //
389   // 1) For we processed samples that came in before start_ms_ and sent the
390   // corresponding aggregated samples to backing histogram.
391   // 2) (last_ms_, last_value_) is the last received sample.
392   // 3) last_ms_ < start_ms_ + FLAG_histogram_interval.
393   // 4) aggregate_value_ is the average of the function that is constructed by
394   // linearly interpolating samples received between start_ms_ and last_ms_.
395   void AddSample(double current_ms, double current_value);
396
397  private:
398   double Aggregate(double current_ms, double current_value);
399   bool is_initialized_;
400   double start_ms_;
401   double last_ms_;
402   double aggregate_value_;
403   double last_value_;
404   Histogram* backing_histogram_;
405 };
406
407
408 template <typename Histogram>
409 void AggregatedMemoryHistogram<Histogram>::AddSample(double current_ms,
410                                                      double current_value) {
411   if (!is_initialized_) {
412     aggregate_value_ = current_value;
413     start_ms_ = current_ms;
414     last_value_ = current_value;
415     last_ms_ = current_ms;
416     is_initialized_ = true;
417   } else {
418     const double kEpsilon = 1e-6;
419     const int kMaxSamples = 1000;
420     if (current_ms < last_ms_ + kEpsilon) {
421       // Two samples have the same time, remember the last one.
422       last_value_ = current_value;
423     } else {
424       double sample_interval_ms = FLAG_histogram_interval;
425       double end_ms = start_ms_ + sample_interval_ms;
426       if (end_ms <= current_ms + kEpsilon) {
427         // Linearly interpolate between the last_ms_ and the current_ms.
428         double slope = (current_value - last_value_) / (current_ms - last_ms_);
429         int i;
430         // Send aggregated samples to the backing histogram from the start_ms
431         // to the current_ms.
432         for (i = 0; i < kMaxSamples && end_ms <= current_ms + kEpsilon; i++) {
433           double end_value = last_value_ + (end_ms - last_ms_) * slope;
434           double sample_value;
435           if (i == 0) {
436             // Take aggregate_value_ into account.
437             sample_value = Aggregate(end_ms, end_value);
438           } else {
439             // There is no aggregate_value_ for i > 0.
440             sample_value = (last_value_ + end_value) / 2;
441           }
442           backing_histogram_->AddSample(static_cast<int>(sample_value + 0.5));
443           last_value_ = end_value;
444           last_ms_ = end_ms;
445           end_ms += sample_interval_ms;
446         }
447         if (i == kMaxSamples) {
448           // We hit the sample limit, ignore the remaining samples.
449           aggregate_value_ = current_value;
450           start_ms_ = current_ms;
451         } else {
452           aggregate_value_ = last_value_;
453           start_ms_ = last_ms_;
454         }
455       }
456       aggregate_value_ = current_ms > start_ms_ + kEpsilon
457                              ? Aggregate(current_ms, current_value)
458                              : aggregate_value_;
459       last_value_ = current_value;
460       last_ms_ = current_ms;
461     }
462   }
463 }
464
465
466 template <typename Histogram>
467 double AggregatedMemoryHistogram<Histogram>::Aggregate(double current_ms,
468                                                        double current_value) {
469   double interval_ms = current_ms - start_ms_;
470   double value = (current_value + last_value_) / 2;
471   // The aggregate_value_ is the average for [start_ms_; last_ms_].
472   // The value is the average for [last_ms_; current_ms].
473   // Return the weighted average of the aggregate_value_ and the value.
474   return aggregate_value_ * ((last_ms_ - start_ms_) / interval_ms) +
475          value * ((current_ms - last_ms_) / interval_ms);
476 }
477
478
479 #define HISTOGRAM_RANGE_LIST(HR)                                              \
480   /* Generic range histograms */                                              \
481   HR(detached_context_age_in_gc, V8.DetachedContextAgeInGC, 0, 20, 21)        \
482   HR(gc_idle_time_allotted_in_ms, V8.GCIdleTimeAllottedInMS, 0, 10000, 101)   \
483   HR(gc_idle_time_limit_overshot, V8.GCIdleTimeLimit.Overshot, 0, 10000, 101) \
484   HR(gc_idle_time_limit_undershot, V8.GCIdleTimeLimit.Undershot, 0, 10000,    \
485      101)                                                                     \
486   HR(code_cache_reject_reason, V8.CodeCacheRejectReason, 1, 6, 6)
487
488 #define HISTOGRAM_TIMER_LIST(HT)                                              \
489   /* Garbage collection timers. */                                            \
490   HT(gc_compactor, V8.GCCompactor, 10000, MILLISECOND)                        \
491   HT(gc_scavenger, V8.GCScavenger, 10000, MILLISECOND)                        \
492   HT(gc_context, V8.GCContext, 10000,                                         \
493      MILLISECOND) /* GC context cleanup time */                               \
494   HT(gc_idle_notification, V8.GCIdleNotification, 10000, MILLISECOND)         \
495   HT(gc_incremental_marking, V8.GCIncrementalMarking, 10000, MILLISECOND)     \
496   HT(gc_low_memory_notification, V8.GCLowMemoryNotification, 10000,           \
497      MILLISECOND)                                                             \
498   /* Parsing timers. */                                                       \
499   HT(parse, V8.ParseMicroSeconds, 1000000, MICROSECOND)                       \
500   HT(parse_lazy, V8.ParseLazyMicroSeconds, 1000000, MICROSECOND)              \
501   HT(pre_parse, V8.PreParseMicroSeconds, 1000000, MICROSECOND)                \
502   /* Compilation times. */                                                    \
503   HT(compile, V8.CompileMicroSeconds, 1000000, MICROSECOND)                   \
504   HT(compile_eval, V8.CompileEvalMicroSeconds, 1000000, MICROSECOND)          \
505   /* Serialization as part of compilation (code caching) */                   \
506   HT(compile_serialize, V8.CompileSerializeMicroSeconds, 100000, MICROSECOND) \
507   HT(compile_deserialize, V8.CompileDeserializeMicroSeconds, 1000000,         \
508      MICROSECOND)                                                             \
509   /* Total compilation time incl. caching/parsing */                          \
510   HT(compile_script, V8.CompileScriptMicroSeconds, 1000000, MICROSECOND)
511
512
513 #define AGGREGATABLE_HISTOGRAM_TIMER_LIST(AHT) \
514   AHT(compile_lazy, V8.CompileLazyMicroSeconds)
515
516
517 #define HISTOGRAM_PERCENTAGE_LIST(HP)                                          \
518   /* Heap fragmentation. */                                                    \
519   HP(external_fragmentation_total, V8.MemoryExternalFragmentationTotal)        \
520   HP(external_fragmentation_old_space, V8.MemoryExternalFragmentationOldSpace) \
521   HP(external_fragmentation_code_space,                                        \
522      V8.MemoryExternalFragmentationCodeSpace)                                  \
523   HP(external_fragmentation_map_space, V8.MemoryExternalFragmentationMapSpace) \
524   HP(external_fragmentation_lo_space, V8.MemoryExternalFragmentationLoSpace)   \
525   /* Percentages of heap committed to each space. */                           \
526   HP(heap_fraction_new_space, V8.MemoryHeapFractionNewSpace)                   \
527   HP(heap_fraction_old_space, V8.MemoryHeapFractionOldSpace)                   \
528   HP(heap_fraction_code_space, V8.MemoryHeapFractionCodeSpace)                 \
529   HP(heap_fraction_map_space, V8.MemoryHeapFractionMapSpace)                   \
530   HP(heap_fraction_lo_space, V8.MemoryHeapFractionLoSpace)                     \
531   /* Percentage of crankshafted codegen. */                                    \
532   HP(codegen_fraction_crankshaft, V8.CodegenFractionCrankshaft)
533
534
535 #define HISTOGRAM_LEGACY_MEMORY_LIST(HM)                                      \
536   HM(heap_sample_total_committed, V8.MemoryHeapSampleTotalCommitted)          \
537   HM(heap_sample_total_used, V8.MemoryHeapSampleTotalUsed)                    \
538   HM(heap_sample_map_space_committed, V8.MemoryHeapSampleMapSpaceCommitted)   \
539   HM(heap_sample_code_space_committed, V8.MemoryHeapSampleCodeSpaceCommitted) \
540   HM(heap_sample_maximum_committed, V8.MemoryHeapSampleMaximumCommitted)
541
542 #define HISTOGRAM_MEMORY_LIST(HM)                   \
543   HM(memory_heap_committed, V8.MemoryHeapCommitted) \
544   HM(memory_heap_used, V8.MemoryHeapUsed)
545
546
547 // WARNING: STATS_COUNTER_LIST_* is a very large macro that is causing MSVC
548 // Intellisense to crash.  It was broken into two macros (each of length 40
549 // lines) rather than one macro (of length about 80 lines) to work around
550 // this problem.  Please avoid using recursive macros of this length when
551 // possible.
552 #define STATS_COUNTER_LIST_1(SC)                                      \
553   /* Global Handle Count*/                                            \
554   SC(global_handles, V8.GlobalHandles)                                \
555   /* OS Memory allocated */                                           \
556   SC(memory_allocated, V8.OsMemoryAllocated)                          \
557   SC(normalized_maps, V8.NormalizedMaps)                              \
558   SC(props_to_dictionary, V8.ObjectPropertiesToDictionary)            \
559   SC(elements_to_dictionary, V8.ObjectElementsToDictionary)           \
560   SC(alive_after_last_gc, V8.AliveAfterLastGC)                        \
561   SC(objs_since_last_young, V8.ObjsSinceLastYoung)                    \
562   SC(objs_since_last_full, V8.ObjsSinceLastFull)                      \
563   SC(string_table_capacity, V8.StringTableCapacity)                   \
564   SC(number_of_symbols, V8.NumberOfSymbols)                           \
565   SC(script_wrappers, V8.ScriptWrappers)                              \
566   SC(call_initialize_stubs, V8.CallInitializeStubs)                   \
567   SC(call_premonomorphic_stubs, V8.CallPreMonomorphicStubs)           \
568   SC(call_normal_stubs, V8.CallNormalStubs)                           \
569   SC(call_megamorphic_stubs, V8.CallMegamorphicStubs)                 \
570   SC(inlined_copied_elements, V8.InlinedCopiedElements)              \
571   SC(arguments_adaptors, V8.ArgumentsAdaptors)                        \
572   SC(compilation_cache_hits, V8.CompilationCacheHits)                 \
573   SC(compilation_cache_misses, V8.CompilationCacheMisses)             \
574   /* Amount of evaled source code. */                                 \
575   SC(total_eval_size, V8.TotalEvalSize)                               \
576   /* Amount of loaded source code. */                                 \
577   SC(total_load_size, V8.TotalLoadSize)                               \
578   /* Amount of parsed source code. */                                 \
579   SC(total_parse_size, V8.TotalParseSize)                             \
580   /* Amount of source code skipped over using preparsing. */          \
581   SC(total_preparse_skipped, V8.TotalPreparseSkipped)                 \
582   /* Number of symbol lookups skipped using preparsing */             \
583   SC(total_preparse_symbols_skipped, V8.TotalPreparseSymbolSkipped)   \
584   /* Amount of compiled source code. */                               \
585   SC(total_compile_size, V8.TotalCompileSize)                         \
586   /* Amount of source code compiled with the full codegen. */         \
587   SC(total_full_codegen_source_size, V8.TotalFullCodegenSourceSize)   \
588   /* Number of contexts created from scratch. */                      \
589   SC(contexts_created_from_scratch, V8.ContextsCreatedFromScratch)    \
590   /* Number of contexts created by partial snapshot. */               \
591   SC(contexts_created_by_snapshot, V8.ContextsCreatedBySnapshot)      \
592   /* Number of code objects found from pc. */                         \
593   SC(pc_to_code, V8.PcToCode)                                         \
594   SC(pc_to_code_cached, V8.PcToCodeCached)                            \
595   /* The store-buffer implementation of the write barrier. */         \
596   SC(store_buffer_compactions, V8.StoreBufferCompactions)             \
597   SC(store_buffer_overflows, V8.StoreBufferOverflows)
598
599
600 #define STATS_COUNTER_LIST_2(SC)                                               \
601   /* Number of code stubs. */                                                  \
602   SC(code_stubs, V8.CodeStubs)                                                 \
603   /* Amount of stub code. */                                                   \
604   SC(total_stubs_code_size, V8.TotalStubsCodeSize)                             \
605   /* Amount of (JS) compiled code. */                                          \
606   SC(total_compiled_code_size, V8.TotalCompiledCodeSize)                       \
607   SC(gc_compactor_caused_by_request, V8.GCCompactorCausedByRequest)            \
608   SC(gc_compactor_caused_by_promoted_data, V8.GCCompactorCausedByPromotedData) \
609   SC(gc_compactor_caused_by_oldspace_exhaustion,                               \
610      V8.GCCompactorCausedByOldspaceExhaustion)                                 \
611   SC(gc_last_resort_from_js, V8.GCLastResortFromJS)                            \
612   SC(gc_last_resort_from_handles, V8.GCLastResortFromHandles)                  \
613   /* How is the generic keyed-load stub used? */                               \
614   SC(keyed_load_generic_smi, V8.KeyedLoadGenericSmi)                           \
615   SC(keyed_load_generic_symbol, V8.KeyedLoadGenericSymbol)                     \
616   SC(keyed_load_generic_lookup_cache, V8.KeyedLoadGenericLookupCache)          \
617   SC(keyed_load_generic_slow, V8.KeyedLoadGenericSlow)                         \
618   SC(keyed_load_polymorphic_stubs, V8.KeyedLoadPolymorphicStubs)               \
619   SC(keyed_load_external_array_slow, V8.KeyedLoadExternalArraySlow)            \
620   /* How is the generic keyed-call stub used? */                               \
621   SC(keyed_call_generic_smi_fast, V8.KeyedCallGenericSmiFast)                  \
622   SC(keyed_call_generic_smi_dict, V8.KeyedCallGenericSmiDict)                  \
623   SC(keyed_call_generic_lookup_cache, V8.KeyedCallGenericLookupCache)          \
624   SC(keyed_call_generic_lookup_dict, V8.KeyedCallGenericLookupDict)            \
625   SC(keyed_call_generic_slow, V8.KeyedCallGenericSlow)                         \
626   SC(keyed_call_generic_slow_load, V8.KeyedCallGenericSlowLoad)                \
627   SC(named_load_global_stub, V8.NamedLoadGlobalStub)                           \
628   SC(named_store_global_inline, V8.NamedStoreGlobalInline)                     \
629   SC(named_store_global_inline_miss, V8.NamedStoreGlobalInlineMiss)            \
630   SC(keyed_store_polymorphic_stubs, V8.KeyedStorePolymorphicStubs)             \
631   SC(keyed_store_external_array_slow, V8.KeyedStoreExternalArraySlow)          \
632   SC(store_normal_miss, V8.StoreNormalMiss)                                    \
633   SC(store_normal_hit, V8.StoreNormalHit)                                      \
634   SC(cow_arrays_created_stub, V8.COWArraysCreatedStub)                         \
635   SC(cow_arrays_created_runtime, V8.COWArraysCreatedRuntime)                   \
636   SC(cow_arrays_converted, V8.COWArraysConverted)                              \
637   SC(call_miss, V8.CallMiss)                                                   \
638   SC(keyed_call_miss, V8.KeyedCallMiss)                                        \
639   SC(load_miss, V8.LoadMiss)                                                   \
640   SC(keyed_load_miss, V8.KeyedLoadMiss)                                        \
641   SC(call_const, V8.CallConst)                                                 \
642   SC(call_const_fast_api, V8.CallConstFastApi)                                 \
643   SC(call_const_interceptor, V8.CallConstInterceptor)                          \
644   SC(call_const_interceptor_fast_api, V8.CallConstInterceptorFastApi)          \
645   SC(call_global_inline, V8.CallGlobalInline)                                  \
646   SC(call_global_inline_miss, V8.CallGlobalInlineMiss)                         \
647   SC(constructed_objects, V8.ConstructedObjects)                               \
648   SC(constructed_objects_runtime, V8.ConstructedObjectsRuntime)                \
649   SC(negative_lookups, V8.NegativeLookups)                                     \
650   SC(negative_lookups_miss, V8.NegativeLookupsMiss)                            \
651   SC(megamorphic_stub_cache_probes, V8.MegamorphicStubCacheProbes)             \
652   SC(megamorphic_stub_cache_misses, V8.MegamorphicStubCacheMisses)             \
653   SC(megamorphic_stub_cache_updates, V8.MegamorphicStubCacheUpdates)           \
654   SC(array_function_runtime, V8.ArrayFunctionRuntime)                          \
655   SC(array_function_native, V8.ArrayFunctionNative)                            \
656   SC(enum_cache_hits, V8.EnumCacheHits)                                        \
657   SC(enum_cache_misses, V8.EnumCacheMisses)                                    \
658   SC(fast_new_closure_total, V8.FastNewClosureTotal)                           \
659   SC(fast_new_closure_try_optimized, V8.FastNewClosureTryOptimized)            \
660   SC(fast_new_closure_install_optimized, V8.FastNewClosureInstallOptimized)    \
661   SC(string_add_runtime, V8.StringAddRuntime)                                  \
662   SC(string_add_native, V8.StringAddNative)                                    \
663   SC(string_add_runtime_ext_to_one_byte, V8.StringAddRuntimeExtToOneByte)      \
664   SC(sub_string_runtime, V8.SubStringRuntime)                                  \
665   SC(sub_string_native, V8.SubStringNative)                                    \
666   SC(string_add_make_two_char, V8.StringAddMakeTwoChar)                        \
667   SC(string_compare_native, V8.StringCompareNative)                            \
668   SC(string_compare_runtime, V8.StringCompareRuntime)                          \
669   SC(regexp_entry_runtime, V8.RegExpEntryRuntime)                              \
670   SC(regexp_entry_native, V8.RegExpEntryNative)                                \
671   SC(number_to_string_native, V8.NumberToStringNative)                         \
672   SC(number_to_string_runtime, V8.NumberToStringRuntime)                       \
673   SC(math_acos, V8.MathAcos)                                                   \
674   SC(math_asin, V8.MathAsin)                                                   \
675   SC(math_atan, V8.MathAtan)                                                   \
676   SC(math_atan2, V8.MathAtan2)                                                 \
677   SC(math_clz32, V8.MathClz32)                                                 \
678   SC(math_exp, V8.MathExp)                                                     \
679   SC(math_floor, V8.MathFloor)                                                 \
680   SC(math_log, V8.MathLog)                                                     \
681   SC(math_pow, V8.MathPow)                                                     \
682   SC(math_round, V8.MathRound)                                                 \
683   SC(math_sqrt, V8.MathSqrt)                                                   \
684   SC(stack_interrupts, V8.StackInterrupts)                                     \
685   SC(runtime_profiler_ticks, V8.RuntimeProfilerTicks)                          \
686   SC(bounds_checks_eliminated, V8.BoundsChecksEliminated)                      \
687   SC(bounds_checks_hoisted, V8.BoundsChecksHoisted)                            \
688   SC(soft_deopts_requested, V8.SoftDeoptsRequested)                            \
689   SC(soft_deopts_inserted, V8.SoftDeoptsInserted)                              \
690   SC(soft_deopts_executed, V8.SoftDeoptsExecuted)                              \
691   /* Number of write barriers in generated code. */                            \
692   SC(write_barriers_dynamic, V8.WriteBarriersDynamic)                          \
693   SC(write_barriers_static, V8.WriteBarriersStatic)                            \
694   SC(new_space_bytes_available, V8.MemoryNewSpaceBytesAvailable)               \
695   SC(new_space_bytes_committed, V8.MemoryNewSpaceBytesCommitted)               \
696   SC(new_space_bytes_used, V8.MemoryNewSpaceBytesUsed)                         \
697   SC(old_space_bytes_available, V8.MemoryOldSpaceBytesAvailable)               \
698   SC(old_space_bytes_committed, V8.MemoryOldSpaceBytesCommitted)               \
699   SC(old_space_bytes_used, V8.MemoryOldSpaceBytesUsed)                         \
700   SC(code_space_bytes_available, V8.MemoryCodeSpaceBytesAvailable)             \
701   SC(code_space_bytes_committed, V8.MemoryCodeSpaceBytesCommitted)             \
702   SC(code_space_bytes_used, V8.MemoryCodeSpaceBytesUsed)                       \
703   SC(map_space_bytes_available, V8.MemoryMapSpaceBytesAvailable)               \
704   SC(map_space_bytes_committed, V8.MemoryMapSpaceBytesCommitted)               \
705   SC(map_space_bytes_used, V8.MemoryMapSpaceBytesUsed)                         \
706   SC(lo_space_bytes_available, V8.MemoryLoSpaceBytesAvailable)                 \
707   SC(lo_space_bytes_committed, V8.MemoryLoSpaceBytesCommitted)                 \
708   SC(lo_space_bytes_used, V8.MemoryLoSpaceBytesUsed)
709
710
711 // This file contains all the v8 counters that are in use.
712 class Counters {
713  public:
714 #define HR(name, caption, min, max, num_buckets) \
715   Histogram* name() { return &name##_; }
716   HISTOGRAM_RANGE_LIST(HR)
717 #undef HR
718
719 #define HT(name, caption, max, res) \
720   HistogramTimer* name() { return &name##_; }
721   HISTOGRAM_TIMER_LIST(HT)
722 #undef HT
723
724 #define AHT(name, caption) \
725   AggregatableHistogramTimer* name() { return &name##_; }
726   AGGREGATABLE_HISTOGRAM_TIMER_LIST(AHT)
727 #undef AHT
728
729 #define HP(name, caption) \
730   Histogram* name() { return &name##_; }
731   HISTOGRAM_PERCENTAGE_LIST(HP)
732 #undef HP
733
734 #define HM(name, caption) \
735   Histogram* name() { return &name##_; }
736   HISTOGRAM_LEGACY_MEMORY_LIST(HM)
737   HISTOGRAM_MEMORY_LIST(HM)
738 #undef HM
739
740 #define HM(name, caption)                                     \
741   AggregatedMemoryHistogram<Histogram>* aggregated_##name() { \
742     return &aggregated_##name##_;                             \
743   }
744   HISTOGRAM_MEMORY_LIST(HM)
745 #undef HM
746
747 #define SC(name, caption) \
748   StatsCounter* name() { return &name##_; }
749   STATS_COUNTER_LIST_1(SC)
750   STATS_COUNTER_LIST_2(SC)
751 #undef SC
752
753 #define SC(name) \
754   StatsCounter* count_of_##name() { return &count_of_##name##_; } \
755   StatsCounter* size_of_##name() { return &size_of_##name##_; }
756   INSTANCE_TYPE_LIST(SC)
757 #undef SC
758
759 #define SC(name) \
760   StatsCounter* count_of_CODE_TYPE_##name() \
761     { return &count_of_CODE_TYPE_##name##_; } \
762   StatsCounter* size_of_CODE_TYPE_##name() \
763     { return &size_of_CODE_TYPE_##name##_; }
764   CODE_KIND_LIST(SC)
765 #undef SC
766
767 #define SC(name) \
768   StatsCounter* count_of_FIXED_ARRAY_##name() \
769     { return &count_of_FIXED_ARRAY_##name##_; } \
770   StatsCounter* size_of_FIXED_ARRAY_##name() \
771     { return &size_of_FIXED_ARRAY_##name##_; }
772   FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(SC)
773 #undef SC
774
775 #define SC(name) \
776   StatsCounter* count_of_CODE_AGE_##name() \
777     { return &count_of_CODE_AGE_##name##_; } \
778   StatsCounter* size_of_CODE_AGE_##name() \
779     { return &size_of_CODE_AGE_##name##_; }
780   CODE_AGE_LIST_COMPLETE(SC)
781 #undef SC
782
783   enum Id {
784 #define RATE_ID(name, caption, max, res) k_##name,
785     HISTOGRAM_TIMER_LIST(RATE_ID)
786 #undef RATE_ID
787 #define AGGREGATABLE_ID(name, caption) k_##name,
788     AGGREGATABLE_HISTOGRAM_TIMER_LIST(AGGREGATABLE_ID)
789 #undef AGGREGATABLE_ID
790 #define PERCENTAGE_ID(name, caption) k_##name,
791     HISTOGRAM_PERCENTAGE_LIST(PERCENTAGE_ID)
792 #undef PERCENTAGE_ID
793 #define MEMORY_ID(name, caption) k_##name,
794     HISTOGRAM_LEGACY_MEMORY_LIST(MEMORY_ID)
795     HISTOGRAM_MEMORY_LIST(MEMORY_ID)
796 #undef MEMORY_ID
797 #define COUNTER_ID(name, caption) k_##name,
798     STATS_COUNTER_LIST_1(COUNTER_ID)
799     STATS_COUNTER_LIST_2(COUNTER_ID)
800 #undef COUNTER_ID
801 #define COUNTER_ID(name) kCountOf##name, kSizeOf##name,
802     INSTANCE_TYPE_LIST(COUNTER_ID)
803 #undef COUNTER_ID
804 #define COUNTER_ID(name) kCountOfCODE_TYPE_##name, \
805     kSizeOfCODE_TYPE_##name,
806     CODE_KIND_LIST(COUNTER_ID)
807 #undef COUNTER_ID
808 #define COUNTER_ID(name) kCountOfFIXED_ARRAY__##name, \
809     kSizeOfFIXED_ARRAY__##name,
810     FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(COUNTER_ID)
811 #undef COUNTER_ID
812 #define COUNTER_ID(name) kCountOfCODE_AGE__##name, \
813     kSizeOfCODE_AGE__##name,
814     CODE_AGE_LIST_COMPLETE(COUNTER_ID)
815 #undef COUNTER_ID
816     stats_counter_count
817   };
818
819   void ResetCounters();
820   void ResetHistograms();
821
822  private:
823 #define HR(name, caption, min, max, num_buckets) Histogram name##_;
824   HISTOGRAM_RANGE_LIST(HR)
825 #undef HR
826
827 #define HT(name, caption, max, res) HistogramTimer name##_;
828   HISTOGRAM_TIMER_LIST(HT)
829 #undef HT
830
831 #define AHT(name, caption) \
832   AggregatableHistogramTimer name##_;
833   AGGREGATABLE_HISTOGRAM_TIMER_LIST(AHT)
834 #undef AHT
835
836 #define HP(name, caption) \
837   Histogram name##_;
838   HISTOGRAM_PERCENTAGE_LIST(HP)
839 #undef HP
840
841 #define HM(name, caption) \
842   Histogram name##_;
843   HISTOGRAM_LEGACY_MEMORY_LIST(HM)
844   HISTOGRAM_MEMORY_LIST(HM)
845 #undef HM
846
847 #define HM(name, caption) \
848   AggregatedMemoryHistogram<Histogram> aggregated_##name##_;
849   HISTOGRAM_MEMORY_LIST(HM)
850 #undef HM
851
852 #define SC(name, caption) \
853   StatsCounter name##_;
854   STATS_COUNTER_LIST_1(SC)
855   STATS_COUNTER_LIST_2(SC)
856 #undef SC
857
858 #define SC(name) \
859   StatsCounter size_of_##name##_; \
860   StatsCounter count_of_##name##_;
861   INSTANCE_TYPE_LIST(SC)
862 #undef SC
863
864 #define SC(name) \
865   StatsCounter size_of_CODE_TYPE_##name##_; \
866   StatsCounter count_of_CODE_TYPE_##name##_;
867   CODE_KIND_LIST(SC)
868 #undef SC
869
870 #define SC(name) \
871   StatsCounter size_of_FIXED_ARRAY_##name##_; \
872   StatsCounter count_of_FIXED_ARRAY_##name##_;
873   FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(SC)
874 #undef SC
875
876 #define SC(name) \
877   StatsCounter size_of_CODE_AGE_##name##_; \
878   StatsCounter count_of_CODE_AGE_##name##_;
879   CODE_AGE_LIST_COMPLETE(SC)
880 #undef SC
881
882   friend class Isolate;
883
884   explicit Counters(Isolate* isolate);
885
886   DISALLOW_IMPLICIT_CONSTRUCTORS(Counters);
887 };
888
889 } }  // namespace v8::internal
890
891 #endif  // V8_COUNTERS_H_