1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
8 #include "include/v8.h"
9 #include "src/allocation.h"
10 #include "src/base/platform/elapsed-timer.h"
11 #include "src/base/platform/time.h"
12 #include "src/globals.h"
13 #include "src/objects.h"
18 // StatsCounters is an interface for plugging into external
19 // counters for monitoring. Counters can be looked up and
20 // manipulated by name.
24 // Register an application-defined function where
25 // counters can be looked up.
26 void SetCounterFunction(CounterLookupCallback f) {
30 // Register an application-defined function to create
31 // a histogram for passing to the AddHistogramSample function
32 void SetCreateHistogramFunction(CreateHistogramCallback f) {
33 create_histogram_function_ = f;
36 // Register an application-defined function to add a sample
37 // to a histogram created with CreateHistogram function
38 void SetAddHistogramSampleFunction(AddHistogramSampleCallback f) {
39 add_histogram_sample_function_ = f;
42 bool HasCounterFunction() const {
43 return lookup_function_ != NULL;
46 // Lookup the location of a counter by name. If the lookup
47 // is successful, returns a non-NULL pointer for writing the
48 // value of the counter. Each thread calling this function
49 // may receive a different location to store it's counter.
50 // The return value must not be cached and re-used across
51 // threads, although a single thread is free to cache it.
52 int* FindLocation(const char* name) {
53 if (!lookup_function_) return NULL;
54 return lookup_function_(name);
57 // Create a histogram by name. If the create is successful,
58 // returns a non-NULL pointer for use with AddHistogramSample
59 // function. min and max define the expected minimum and maximum
60 // sample values. buckets is the maximum number of buckets
61 // that the samples will be grouped into.
62 void* CreateHistogram(const char* name,
66 if (!create_histogram_function_) return NULL;
67 return create_histogram_function_(name, min, max, buckets);
70 // Add a sample to a histogram created with the CreateHistogram
72 void AddHistogramSample(void* histogram, int sample) {
73 if (!add_histogram_sample_function_) return;
74 return add_histogram_sample_function_(histogram, sample);
80 CounterLookupCallback lookup_function_;
81 CreateHistogramCallback create_histogram_function_;
82 AddHistogramSampleCallback add_histogram_sample_function_;
86 DISALLOW_COPY_AND_ASSIGN(StatsTable);
89 // StatsCounters are dynamically created values which can be tracked in
90 // the StatsTable. They are designed to be lightweight to create and
93 // Internally, a counter represents a value in a row of a StatsTable.
94 // The row has a 32bit value for each process/thread in the table and also
95 // a name (stored in the table metadata). Since the storage location can be
96 // thread-specific, this class cannot be shared across threads.
100 explicit StatsCounter(Isolate* isolate, const char* name)
101 : isolate_(isolate), name_(name), ptr_(NULL), lookup_done_(false) { }
103 // Sets the counter to a specific value.
104 void Set(int value) {
106 if (loc) *loc = value;
109 // Increments the counter.
115 void Increment(int value) {
121 // Decrements the counter.
127 void Decrement(int value) {
129 if (loc) (*loc) -= value;
132 // Is this counter enabled?
133 // Returns false if table is full.
135 return GetPtr() != NULL;
138 // Get the internal pointer to the counter. This is used
139 // by the code generator to emit code that manipulates a
140 // given counter without calling the runtime system.
141 int* GetInternalPointer() {
147 // Reset the cached internal pointer.
148 void Reset() { lookup_done_ = false; }
151 // Returns the cached address of this counter location.
153 if (lookup_done_) return ptr_;
155 ptr_ = FindLocationInStatsTable();
160 int* FindLocationInStatsTable() const;
168 // A Histogram represents a dynamically created histogram in the StatsTable.
169 // It will be registered with the histogram system on first use.
173 Histogram(const char* name,
181 num_buckets_(num_buckets),
184 isolate_(isolate) { }
186 // Add a single sample to this histogram.
187 void AddSample(int sample);
189 // Returns true if this histogram is enabled.
191 return GetHistogram() != NULL;
194 // Reset the cached internal pointer.
196 lookup_done_ = false;
200 // Returns the handle to the histogram.
201 void* GetHistogram() {
204 histogram_ = CreateHistogram();
209 const char* name() { return name_; }
210 Isolate* isolate() const { return isolate_; }
213 void* CreateHistogram() const;
224 // A HistogramTimer allows distributions of results to be created.
225 class HistogramTimer : public Histogram {
233 HistogramTimer(const char* name, int min, int max, Resolution resolution,
234 int num_buckets, Isolate* isolate)
235 : Histogram(name, min, max, num_buckets, isolate),
236 resolution_(resolution) {}
241 // Stop the timer and record the results.
244 // Returns true if the timer is running.
246 return Enabled() && timer_.IsStarted();
249 // TODO(bmeurer): Remove this when HistogramTimerScope is fixed.
251 base::ElapsedTimer* timer() { return &timer_; }
255 base::ElapsedTimer timer_;
256 Resolution resolution_;
259 // Helper class for scoping a HistogramTimer.
260 // TODO(bmeurer): The ifdeffery is an ugly hack around the fact that the
261 // Parser is currently reentrant (when it throws an error, we call back
262 // into JavaScript and all bets are off), but ElapsedTimer is not
263 // reentry-safe. Fix this properly and remove |allow_nesting|.
264 class HistogramTimerScope BASE_EMBEDDED {
266 explicit HistogramTimerScope(HistogramTimer* timer,
267 bool allow_nesting = false)
270 skipped_timer_start_(false) {
271 if (timer_->timer()->IsStarted() && allow_nesting) {
272 skipped_timer_start_ = true;
282 ~HistogramTimerScope() {
284 if (!skipped_timer_start_) {
293 HistogramTimer* timer_;
295 bool skipped_timer_start_;
300 // A histogram timer that can aggregate events within a larger scope.
302 // Intended use of this timer is to have an outer (aggregating) and an inner
303 // (to be aggregated) scope, where the inner scope measure the time of events,
304 // and all those inner scope measurements will be summed up by the outer scope.
305 // An example use might be to aggregate the time spent in lazy compilation
306 // while running a script.
309 // - AggregatingHistogramTimerScope, the "outer" scope within which
310 // times will be summed up.
311 // - AggregatedHistogramTimerScope, the "inner" scope which defines the
312 // events to be timed.
313 class AggregatableHistogramTimer : public Histogram {
315 AggregatableHistogramTimer() {}
316 AggregatableHistogramTimer(const char* name, int min, int max,
317 int num_buckets, Isolate* isolate)
318 : Histogram(name, min, max, num_buckets, isolate) {}
320 // Start/stop the "outer" scope.
321 void Start() { time_ = base::TimeDelta(); }
322 void Stop() { AddSample(static_cast<int>(time_.InMicroseconds())); }
324 // Add a time value ("inner" scope).
325 void Add(base::TimeDelta other) { time_ += other; }
328 base::TimeDelta time_;
332 // A helper class for use with AggregatableHistogramTimer.
333 class AggregatingHistogramTimerScope {
335 explicit AggregatingHistogramTimerScope(AggregatableHistogramTimer* histogram)
336 : histogram_(histogram) {
339 ~AggregatingHistogramTimerScope() { histogram_->Stop(); }
342 AggregatableHistogramTimer* histogram_;
346 // A helper class for use with AggregatableHistogramTimer.
347 class AggregatedHistogramTimerScope {
349 explicit AggregatedHistogramTimerScope(AggregatableHistogramTimer* histogram)
350 : histogram_(histogram) {
353 ~AggregatedHistogramTimerScope() { histogram_->Add(timer_.Elapsed()); }
356 base::ElapsedTimer timer_;
357 AggregatableHistogramTimer* histogram_;
361 #define HISTOGRAM_RANGE_LIST(HR) \
362 /* Generic range histograms */ \
363 HR(detached_context_age_in_gc, V8.DetachedContextAgeInGC, 0, 20, 21) \
364 HR(gc_idle_time_allotted_in_ms, V8.GCIdleTimeAllottedInMS, 0, 10000, 101) \
365 HR(gc_idle_time_limit_overshot, V8.GCIdleTimeLimit.Overshot, 0, 10000, 101) \
366 HR(gc_idle_time_limit_undershot, V8.GCIdleTimeLimit.Undershot, 0, 10000, \
368 HR(code_cache_reject_reason, V8.CodeCacheRejectReason, 1, 6, 6)
370 #define HISTOGRAM_TIMER_LIST(HT) \
371 /* Garbage collection timers. */ \
372 HT(gc_compactor, V8.GCCompactor, 10000, MILLISECOND) \
373 HT(gc_scavenger, V8.GCScavenger, 10000, MILLISECOND) \
374 HT(gc_context, V8.GCContext, 10000, \
375 MILLISECOND) /* GC context cleanup time */ \
376 HT(gc_idle_notification, V8.GCIdleNotification, 10000, MILLISECOND) \
377 HT(gc_incremental_marking, V8.GCIncrementalMarking, 10000, MILLISECOND) \
378 HT(gc_low_memory_notification, V8.GCLowMemoryNotification, 10000, \
380 /* Parsing timers. */ \
381 HT(parse, V8.ParseMicroSeconds, 1000000, MICROSECOND) \
382 HT(parse_lazy, V8.ParseLazyMicroSeconds, 1000000, MICROSECOND) \
383 HT(pre_parse, V8.PreParseMicroSeconds, 1000000, MICROSECOND) \
384 /* Compilation times. */ \
385 HT(compile, V8.CompileMicroSeconds, 1000000, MICROSECOND) \
386 HT(compile_eval, V8.CompileEvalMicroSeconds, 1000000, MICROSECOND) \
387 /* Serialization as part of compilation (code caching) */ \
388 HT(compile_serialize, V8.CompileSerializeMicroSeconds, 100000, MICROSECOND) \
389 HT(compile_deserialize, V8.CompileDeserializeMicroSeconds, 1000000, \
391 /* Total compilation time incl. caching/parsing */ \
392 HT(compile_script, V8.CompileScriptMicroSeconds, 1000000, MICROSECOND)
395 #define AGGREGATABLE_HISTOGRAM_TIMER_LIST(AHT) \
396 AHT(compile_lazy, V8.CompileLazyMicroSeconds)
399 #define HISTOGRAM_PERCENTAGE_LIST(HP) \
400 /* Heap fragmentation. */ \
401 HP(external_fragmentation_total, \
402 V8.MemoryExternalFragmentationTotal) \
403 HP(external_fragmentation_old_pointer_space, \
404 V8.MemoryExternalFragmentationOldPointerSpace) \
405 HP(external_fragmentation_old_data_space, \
406 V8.MemoryExternalFragmentationOldDataSpace) \
407 HP(external_fragmentation_code_space, \
408 V8.MemoryExternalFragmentationCodeSpace) \
409 HP(external_fragmentation_map_space, \
410 V8.MemoryExternalFragmentationMapSpace) \
411 HP(external_fragmentation_cell_space, \
412 V8.MemoryExternalFragmentationCellSpace) \
413 HP(external_fragmentation_property_cell_space, \
414 V8.MemoryExternalFragmentationPropertyCellSpace) \
415 HP(external_fragmentation_lo_space, \
416 V8.MemoryExternalFragmentationLoSpace) \
417 /* Percentages of heap committed to each space. */ \
418 HP(heap_fraction_new_space, \
419 V8.MemoryHeapFractionNewSpace) \
420 HP(heap_fraction_old_pointer_space, \
421 V8.MemoryHeapFractionOldPointerSpace) \
422 HP(heap_fraction_old_data_space, \
423 V8.MemoryHeapFractionOldDataSpace) \
424 HP(heap_fraction_code_space, \
425 V8.MemoryHeapFractionCodeSpace) \
426 HP(heap_fraction_map_space, \
427 V8.MemoryHeapFractionMapSpace) \
428 HP(heap_fraction_cell_space, \
429 V8.MemoryHeapFractionCellSpace) \
430 HP(heap_fraction_property_cell_space, \
431 V8.MemoryHeapFractionPropertyCellSpace) \
432 HP(heap_fraction_lo_space, \
433 V8.MemoryHeapFractionLoSpace) \
434 /* Percentage of crankshafted codegen. */ \
435 HP(codegen_fraction_crankshaft, \
436 V8.CodegenFractionCrankshaft) \
439 #define HISTOGRAM_MEMORY_LIST(HM) \
440 HM(heap_sample_total_committed, V8.MemoryHeapSampleTotalCommitted) \
441 HM(heap_sample_total_used, V8.MemoryHeapSampleTotalUsed) \
442 HM(heap_sample_map_space_committed, \
443 V8.MemoryHeapSampleMapSpaceCommitted) \
444 HM(heap_sample_cell_space_committed, \
445 V8.MemoryHeapSampleCellSpaceCommitted) \
446 HM(heap_sample_property_cell_space_committed, \
447 V8.MemoryHeapSamplePropertyCellSpaceCommitted) \
448 HM(heap_sample_code_space_committed, \
449 V8.MemoryHeapSampleCodeSpaceCommitted) \
450 HM(heap_sample_maximum_committed, \
451 V8.MemoryHeapSampleMaximumCommitted) \
454 // WARNING: STATS_COUNTER_LIST_* is a very large macro that is causing MSVC
455 // Intellisense to crash. It was broken into two macros (each of length 40
456 // lines) rather than one macro (of length about 80 lines) to work around
457 // this problem. Please avoid using recursive macros of this length when
459 #define STATS_COUNTER_LIST_1(SC) \
460 /* Global Handle Count*/ \
461 SC(global_handles, V8.GlobalHandles) \
462 /* OS Memory allocated */ \
463 SC(memory_allocated, V8.OsMemoryAllocated) \
464 SC(normalized_maps, V8.NormalizedMaps) \
465 SC(props_to_dictionary, V8.ObjectPropertiesToDictionary) \
466 SC(elements_to_dictionary, V8.ObjectElementsToDictionary) \
467 SC(alive_after_last_gc, V8.AliveAfterLastGC) \
468 SC(objs_since_last_young, V8.ObjsSinceLastYoung) \
469 SC(objs_since_last_full, V8.ObjsSinceLastFull) \
470 SC(string_table_capacity, V8.StringTableCapacity) \
471 SC(number_of_symbols, V8.NumberOfSymbols) \
472 SC(script_wrappers, V8.ScriptWrappers) \
473 SC(call_initialize_stubs, V8.CallInitializeStubs) \
474 SC(call_premonomorphic_stubs, V8.CallPreMonomorphicStubs) \
475 SC(call_normal_stubs, V8.CallNormalStubs) \
476 SC(call_megamorphic_stubs, V8.CallMegamorphicStubs) \
477 SC(inlined_copied_elements, V8.InlinedCopiedElements) \
478 SC(arguments_adaptors, V8.ArgumentsAdaptors) \
479 SC(compilation_cache_hits, V8.CompilationCacheHits) \
480 SC(compilation_cache_misses, V8.CompilationCacheMisses) \
481 SC(string_ctor_calls, V8.StringConstructorCalls) \
482 SC(string_ctor_conversions, V8.StringConstructorConversions) \
483 SC(string_ctor_cached_number, V8.StringConstructorCachedNumber) \
484 SC(string_ctor_string_value, V8.StringConstructorStringValue) \
485 SC(string_ctor_gc_required, V8.StringConstructorGCRequired) \
486 /* Amount of evaled source code. */ \
487 SC(total_eval_size, V8.TotalEvalSize) \
488 /* Amount of loaded source code. */ \
489 SC(total_load_size, V8.TotalLoadSize) \
490 /* Amount of parsed source code. */ \
491 SC(total_parse_size, V8.TotalParseSize) \
492 /* Amount of source code skipped over using preparsing. */ \
493 SC(total_preparse_skipped, V8.TotalPreparseSkipped) \
494 /* Number of symbol lookups skipped using preparsing */ \
495 SC(total_preparse_symbols_skipped, V8.TotalPreparseSymbolSkipped) \
496 /* Amount of compiled source code. */ \
497 SC(total_compile_size, V8.TotalCompileSize) \
498 /* Amount of source code compiled with the full codegen. */ \
499 SC(total_full_codegen_source_size, V8.TotalFullCodegenSourceSize) \
500 /* Number of contexts created from scratch. */ \
501 SC(contexts_created_from_scratch, V8.ContextsCreatedFromScratch) \
502 /* Number of contexts created by partial snapshot. */ \
503 SC(contexts_created_by_snapshot, V8.ContextsCreatedBySnapshot) \
504 /* Number of code objects found from pc. */ \
505 SC(pc_to_code, V8.PcToCode) \
506 SC(pc_to_code_cached, V8.PcToCodeCached) \
507 /* The store-buffer implementation of the write barrier. */ \
508 SC(store_buffer_compactions, V8.StoreBufferCompactions) \
509 SC(store_buffer_overflows, V8.StoreBufferOverflows)
512 #define STATS_COUNTER_LIST_2(SC) \
513 /* Number of code stubs. */ \
514 SC(code_stubs, V8.CodeStubs) \
515 /* Amount of stub code. */ \
516 SC(total_stubs_code_size, V8.TotalStubsCodeSize) \
517 /* Amount of (JS) compiled code. */ \
518 SC(total_compiled_code_size, V8.TotalCompiledCodeSize) \
519 SC(gc_compactor_caused_by_request, V8.GCCompactorCausedByRequest) \
520 SC(gc_compactor_caused_by_promoted_data, V8.GCCompactorCausedByPromotedData) \
521 SC(gc_compactor_caused_by_oldspace_exhaustion, \
522 V8.GCCompactorCausedByOldspaceExhaustion) \
523 SC(gc_last_resort_from_js, V8.GCLastResortFromJS) \
524 SC(gc_last_resort_from_handles, V8.GCLastResortFromHandles) \
525 /* How is the generic keyed-load stub used? */ \
526 SC(keyed_load_generic_smi, V8.KeyedLoadGenericSmi) \
527 SC(keyed_load_generic_symbol, V8.KeyedLoadGenericSymbol) \
528 SC(keyed_load_generic_lookup_cache, V8.KeyedLoadGenericLookupCache) \
529 SC(keyed_load_generic_slow, V8.KeyedLoadGenericSlow) \
530 SC(keyed_load_polymorphic_stubs, V8.KeyedLoadPolymorphicStubs) \
531 SC(keyed_load_external_array_slow, V8.KeyedLoadExternalArraySlow) \
532 /* How is the generic keyed-call stub used? */ \
533 SC(keyed_call_generic_smi_fast, V8.KeyedCallGenericSmiFast) \
534 SC(keyed_call_generic_smi_dict, V8.KeyedCallGenericSmiDict) \
535 SC(keyed_call_generic_lookup_cache, V8.KeyedCallGenericLookupCache) \
536 SC(keyed_call_generic_lookup_dict, V8.KeyedCallGenericLookupDict) \
537 SC(keyed_call_generic_slow, V8.KeyedCallGenericSlow) \
538 SC(keyed_call_generic_slow_load, V8.KeyedCallGenericSlowLoad) \
539 SC(named_load_global_stub, V8.NamedLoadGlobalStub) \
540 SC(named_store_global_inline, V8.NamedStoreGlobalInline) \
541 SC(named_store_global_inline_miss, V8.NamedStoreGlobalInlineMiss) \
542 SC(keyed_store_polymorphic_stubs, V8.KeyedStorePolymorphicStubs) \
543 SC(keyed_store_external_array_slow, V8.KeyedStoreExternalArraySlow) \
544 SC(store_normal_miss, V8.StoreNormalMiss) \
545 SC(store_normal_hit, V8.StoreNormalHit) \
546 SC(cow_arrays_created_stub, V8.COWArraysCreatedStub) \
547 SC(cow_arrays_created_runtime, V8.COWArraysCreatedRuntime) \
548 SC(cow_arrays_converted, V8.COWArraysConverted) \
549 SC(call_miss, V8.CallMiss) \
550 SC(keyed_call_miss, V8.KeyedCallMiss) \
551 SC(load_miss, V8.LoadMiss) \
552 SC(keyed_load_miss, V8.KeyedLoadMiss) \
553 SC(call_const, V8.CallConst) \
554 SC(call_const_fast_api, V8.CallConstFastApi) \
555 SC(call_const_interceptor, V8.CallConstInterceptor) \
556 SC(call_const_interceptor_fast_api, V8.CallConstInterceptorFastApi) \
557 SC(call_global_inline, V8.CallGlobalInline) \
558 SC(call_global_inline_miss, V8.CallGlobalInlineMiss) \
559 SC(constructed_objects, V8.ConstructedObjects) \
560 SC(constructed_objects_runtime, V8.ConstructedObjectsRuntime) \
561 SC(negative_lookups, V8.NegativeLookups) \
562 SC(negative_lookups_miss, V8.NegativeLookupsMiss) \
563 SC(megamorphic_stub_cache_probes, V8.MegamorphicStubCacheProbes) \
564 SC(megamorphic_stub_cache_misses, V8.MegamorphicStubCacheMisses) \
565 SC(megamorphic_stub_cache_updates, V8.MegamorphicStubCacheUpdates) \
566 SC(array_function_runtime, V8.ArrayFunctionRuntime) \
567 SC(array_function_native, V8.ArrayFunctionNative) \
568 SC(for_in, V8.ForIn) \
569 SC(enum_cache_hits, V8.EnumCacheHits) \
570 SC(enum_cache_misses, V8.EnumCacheMisses) \
571 SC(fast_new_closure_total, V8.FastNewClosureTotal) \
572 SC(fast_new_closure_try_optimized, V8.FastNewClosureTryOptimized) \
573 SC(fast_new_closure_install_optimized, V8.FastNewClosureInstallOptimized) \
574 SC(string_add_runtime, V8.StringAddRuntime) \
575 SC(string_add_native, V8.StringAddNative) \
576 SC(string_add_runtime_ext_to_one_byte, V8.StringAddRuntimeExtToOneByte) \
577 SC(sub_string_runtime, V8.SubStringRuntime) \
578 SC(sub_string_native, V8.SubStringNative) \
579 SC(string_add_make_two_char, V8.StringAddMakeTwoChar) \
580 SC(string_compare_native, V8.StringCompareNative) \
581 SC(string_compare_runtime, V8.StringCompareRuntime) \
582 SC(regexp_entry_runtime, V8.RegExpEntryRuntime) \
583 SC(regexp_entry_native, V8.RegExpEntryNative) \
584 SC(number_to_string_native, V8.NumberToStringNative) \
585 SC(number_to_string_runtime, V8.NumberToStringRuntime) \
586 SC(math_acos, V8.MathAcos) \
587 SC(math_asin, V8.MathAsin) \
588 SC(math_atan, V8.MathAtan) \
589 SC(math_atan2, V8.MathAtan2) \
590 SC(math_exp, V8.MathExp) \
591 SC(math_floor, V8.MathFloor) \
592 SC(math_log, V8.MathLog) \
593 SC(math_pow, V8.MathPow) \
594 SC(math_round, V8.MathRound) \
595 SC(math_sqrt, V8.MathSqrt) \
596 SC(stack_interrupts, V8.StackInterrupts) \
597 SC(runtime_profiler_ticks, V8.RuntimeProfilerTicks) \
598 SC(bounds_checks_eliminated, V8.BoundsChecksEliminated) \
599 SC(bounds_checks_hoisted, V8.BoundsChecksHoisted) \
600 SC(soft_deopts_requested, V8.SoftDeoptsRequested) \
601 SC(soft_deopts_inserted, V8.SoftDeoptsInserted) \
602 SC(soft_deopts_executed, V8.SoftDeoptsExecuted) \
603 /* Number of write barriers in generated code. */ \
604 SC(write_barriers_dynamic, V8.WriteBarriersDynamic) \
605 SC(write_barriers_static, V8.WriteBarriersStatic) \
606 SC(new_space_bytes_available, V8.MemoryNewSpaceBytesAvailable) \
607 SC(new_space_bytes_committed, V8.MemoryNewSpaceBytesCommitted) \
608 SC(new_space_bytes_used, V8.MemoryNewSpaceBytesUsed) \
609 SC(old_pointer_space_bytes_available, \
610 V8.MemoryOldPointerSpaceBytesAvailable) \
611 SC(old_pointer_space_bytes_committed, \
612 V8.MemoryOldPointerSpaceBytesCommitted) \
613 SC(old_pointer_space_bytes_used, V8.MemoryOldPointerSpaceBytesUsed) \
614 SC(old_data_space_bytes_available, V8.MemoryOldDataSpaceBytesAvailable) \
615 SC(old_data_space_bytes_committed, V8.MemoryOldDataSpaceBytesCommitted) \
616 SC(old_data_space_bytes_used, V8.MemoryOldDataSpaceBytesUsed) \
617 SC(code_space_bytes_available, V8.MemoryCodeSpaceBytesAvailable) \
618 SC(code_space_bytes_committed, V8.MemoryCodeSpaceBytesCommitted) \
619 SC(code_space_bytes_used, V8.MemoryCodeSpaceBytesUsed) \
620 SC(map_space_bytes_available, V8.MemoryMapSpaceBytesAvailable) \
621 SC(map_space_bytes_committed, V8.MemoryMapSpaceBytesCommitted) \
622 SC(map_space_bytes_used, V8.MemoryMapSpaceBytesUsed) \
623 SC(cell_space_bytes_available, V8.MemoryCellSpaceBytesAvailable) \
624 SC(cell_space_bytes_committed, V8.MemoryCellSpaceBytesCommitted) \
625 SC(cell_space_bytes_used, V8.MemoryCellSpaceBytesUsed) \
626 SC(property_cell_space_bytes_available, \
627 V8.MemoryPropertyCellSpaceBytesAvailable) \
628 SC(property_cell_space_bytes_committed, \
629 V8.MemoryPropertyCellSpaceBytesCommitted) \
630 SC(property_cell_space_bytes_used, V8.MemoryPropertyCellSpaceBytesUsed) \
631 SC(lo_space_bytes_available, V8.MemoryLoSpaceBytesAvailable) \
632 SC(lo_space_bytes_committed, V8.MemoryLoSpaceBytesCommitted) \
633 SC(lo_space_bytes_used, V8.MemoryLoSpaceBytesUsed)
636 // This file contains all the v8 counters that are in use.
639 #define HR(name, caption, min, max, num_buckets) \
640 Histogram* name() { return &name##_; }
641 HISTOGRAM_RANGE_LIST(HR)
644 #define HT(name, caption, max, res) \
645 HistogramTimer* name() { return &name##_; }
646 HISTOGRAM_TIMER_LIST(HT)
649 #define AHT(name, caption) \
650 AggregatableHistogramTimer* name() { return &name##_; }
651 AGGREGATABLE_HISTOGRAM_TIMER_LIST(AHT)
654 #define HP(name, caption) \
655 Histogram* name() { return &name##_; }
656 HISTOGRAM_PERCENTAGE_LIST(HP)
659 #define HM(name, caption) \
660 Histogram* name() { return &name##_; }
661 HISTOGRAM_MEMORY_LIST(HM)
664 #define SC(name, caption) \
665 StatsCounter* name() { return &name##_; }
666 STATS_COUNTER_LIST_1(SC)
667 STATS_COUNTER_LIST_2(SC)
671 StatsCounter* count_of_##name() { return &count_of_##name##_; } \
672 StatsCounter* size_of_##name() { return &size_of_##name##_; }
673 INSTANCE_TYPE_LIST(SC)
677 StatsCounter* count_of_CODE_TYPE_##name() \
678 { return &count_of_CODE_TYPE_##name##_; } \
679 StatsCounter* size_of_CODE_TYPE_##name() \
680 { return &size_of_CODE_TYPE_##name##_; }
685 StatsCounter* count_of_FIXED_ARRAY_##name() \
686 { return &count_of_FIXED_ARRAY_##name##_; } \
687 StatsCounter* size_of_FIXED_ARRAY_##name() \
688 { return &size_of_FIXED_ARRAY_##name##_; }
689 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(SC)
693 StatsCounter* count_of_CODE_AGE_##name() \
694 { return &count_of_CODE_AGE_##name##_; } \
695 StatsCounter* size_of_CODE_AGE_##name() \
696 { return &size_of_CODE_AGE_##name##_; }
697 CODE_AGE_LIST_COMPLETE(SC)
701 #define RATE_ID(name, caption, max, res) k_##name,
702 HISTOGRAM_TIMER_LIST(RATE_ID)
704 #define AGGREGATABLE_ID(name, caption) k_##name,
705 AGGREGATABLE_HISTOGRAM_TIMER_LIST(AGGREGATABLE_ID)
706 #undef AGGREGATABLE_ID
707 #define PERCENTAGE_ID(name, caption) k_##name,
708 HISTOGRAM_PERCENTAGE_LIST(PERCENTAGE_ID)
710 #define MEMORY_ID(name, caption) k_##name,
711 HISTOGRAM_MEMORY_LIST(MEMORY_ID)
713 #define COUNTER_ID(name, caption) k_##name,
714 STATS_COUNTER_LIST_1(COUNTER_ID)
715 STATS_COUNTER_LIST_2(COUNTER_ID)
717 #define COUNTER_ID(name) kCountOf##name, kSizeOf##name,
718 INSTANCE_TYPE_LIST(COUNTER_ID)
720 #define COUNTER_ID(name) kCountOfCODE_TYPE_##name, \
721 kSizeOfCODE_TYPE_##name,
722 CODE_KIND_LIST(COUNTER_ID)
724 #define COUNTER_ID(name) kCountOfFIXED_ARRAY__##name, \
725 kSizeOfFIXED_ARRAY__##name,
726 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(COUNTER_ID)
728 #define COUNTER_ID(name) kCountOfCODE_AGE__##name, \
729 kSizeOfCODE_AGE__##name,
730 CODE_AGE_LIST_COMPLETE(COUNTER_ID)
735 void ResetCounters();
736 void ResetHistograms();
739 #define HR(name, caption, min, max, num_buckets) Histogram name##_;
740 HISTOGRAM_RANGE_LIST(HR)
743 #define HT(name, caption, max, res) HistogramTimer name##_;
744 HISTOGRAM_TIMER_LIST(HT)
747 #define AHT(name, caption) \
748 AggregatableHistogramTimer name##_;
749 AGGREGATABLE_HISTOGRAM_TIMER_LIST(AHT)
752 #define HP(name, caption) \
754 HISTOGRAM_PERCENTAGE_LIST(HP)
757 #define HM(name, caption) \
759 HISTOGRAM_MEMORY_LIST(HM)
762 #define SC(name, caption) \
763 StatsCounter name##_;
764 STATS_COUNTER_LIST_1(SC)
765 STATS_COUNTER_LIST_2(SC)
769 StatsCounter size_of_##name##_; \
770 StatsCounter count_of_##name##_;
771 INSTANCE_TYPE_LIST(SC)
775 StatsCounter size_of_CODE_TYPE_##name##_; \
776 StatsCounter count_of_CODE_TYPE_##name##_;
781 StatsCounter size_of_FIXED_ARRAY_##name##_; \
782 StatsCounter count_of_FIXED_ARRAY_##name##_;
783 FIXED_ARRAY_SUB_INSTANCE_TYPE_LIST(SC)
787 StatsCounter size_of_CODE_AGE_##name##_; \
788 StatsCounter count_of_CODE_AGE_##name##_;
789 CODE_AGE_LIST_COMPLETE(SC)
792 friend class Isolate;
794 explicit Counters(Isolate* isolate);
796 DISALLOW_IMPLICIT_CONSTRUCTORS(Counters);
799 } } // namespace v8::internal
801 #endif // V8_COUNTERS_H_