From de510c3a58e067669c61c56e8494e51d6c36a224 Mon Sep 17 00:00:00 2001 From: "jkummerow@chromium.org" Date: Thu, 9 Feb 2012 13:30:01 +0000 Subject: [PATCH] Split experimental profiler flags Review URL: https://chromiumcodereview.appspot.com/9374015 git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@10660 ce2b1a6d-e550-0410-aec6-3dcde31c8c00 --- src/compiler.cc | 2 +- src/flag-definitions.h | 10 ++++++++-- src/heap.cc | 2 +- src/ic.cc | 2 +- src/mark-compact.cc | 4 ++-- src/runtime-profiler.cc | 10 +++++----- src/runtime-profiler.h | 2 +- 7 files changed, 19 insertions(+), 13 deletions(-) diff --git a/src/compiler.cc b/src/compiler.cc index 98786e0..aea889f 100644 --- a/src/compiler.cc +++ b/src/compiler.cc @@ -114,7 +114,7 @@ void CompilationInfo::DisableOptimization() { // profiler, so they trigger their own optimization when they're called // for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time. bool CompilationInfo::ShouldSelfOptimize() { - return FLAG_counting_profiler && + return FLAG_self_optimization && FLAG_crankshaft && !Serializer::enabled() && !function()->flags()->Contains(kDontSelfOptimize) && diff --git a/src/flag-definitions.h b/src/flag-definitions.h index 58d671d..59e54dd 100644 --- a/src/flag-definitions.h +++ b/src/flag-definitions.h @@ -165,8 +165,14 @@ DEFINE_int(stress_runs, 0, "number of stress runs") DEFINE_bool(optimize_closures, true, "optimize closures") DEFINE_int(loop_weight, 1, "loop weight for representation inference") -// Count-based optimization decisions. -DEFINE_bool(counting_profiler, false, "use experimental counter-based profiler") +// Experimental profiler changes. +DEFINE_bool(experimental_profiler, false, "enable all profiler experiments") +DEFINE_bool(watch_ic_patching, false, "profiler considers IC stability") +DEFINE_bool(self_optimization, false, + "primitive functions trigger their own optimization") + +DEFINE_implication(experimental_profiler, watch_ic_patching) +DEFINE_implication(experimental_profiler, self_optimization) // assembler-ia32.cc / assembler-arm.cc / assembler-x64.cc DEFINE_bool(debug_code, false, diff --git a/src/heap.cc b/src/heap.cc index 098e1a6..b082886 100644 --- a/src/heap.cc +++ b/src/heap.cc @@ -1201,7 +1201,7 @@ void Heap::Scavenge() { promotion_queue_.Destroy(); LiveObjectList::UpdateReferencesForScavengeGC(); - if (!FLAG_counting_profiler) { + if (!FLAG_watch_ic_patching) { isolate()->runtime_profiler()->UpdateSamplesAfterScavenge(); } incremental_marking()->UpdateMarkingDequeAfterScavenge(); diff --git a/src/ic.cc b/src/ic.cc index 9122294..9846984 100644 --- a/src/ic.cc +++ b/src/ic.cc @@ -293,7 +293,7 @@ Failure* IC::ReferenceError(const char* type, Handle name) { void IC::PostPatching() { - if (FLAG_counting_profiler) { + if (FLAG_watch_ic_patching) { Isolate::Current()->runtime_profiler()->NotifyICChanged(); // We do not want to optimize until the ICs have settled down, // so when they are patched, we postpone optimization for the diff --git a/src/mark-compact.cc b/src/mark-compact.cc index 8692ce8..1adb747 100644 --- a/src/mark-compact.cc +++ b/src/mark-compact.cc @@ -2373,7 +2373,7 @@ void MarkCompactCollector::AfterMarking() { code_flusher_->ProcessCandidates(); } - if (!FLAG_counting_profiler) { + if (!FLAG_watch_ic_patching) { // Clean up dead objects from the runtime profiler. heap()->isolate()->runtime_profiler()->RemoveDeadSamples(); } @@ -3383,7 +3383,7 @@ void MarkCompactCollector::EvacuateNewSpaceAndCandidates() { heap_->UpdateReferencesInExternalStringTable( &UpdateReferenceInExternalStringTableEntry); - if (!FLAG_counting_profiler) { + if (!FLAG_watch_ic_patching) { // Update JSFunction pointers from the runtime profiler. heap()->isolate()->runtime_profiler()->UpdateSamplesAfterCompact( &updating_visitor); diff --git a/src/runtime-profiler.cc b/src/runtime-profiler.cc index 3d29c17..3e719cd 100644 --- a/src/runtime-profiler.cc +++ b/src/runtime-profiler.cc @@ -204,7 +204,7 @@ void RuntimeProfiler::OptimizeNow() { JavaScriptFrame* frame = it.frame(); JSFunction* function = JSFunction::cast(frame->function()); - if (!FLAG_counting_profiler) { + if (!FLAG_watch_ic_patching) { // Adjust threshold each time we have processed // a certain number of ticks. if (sampler_ticks_until_threshold_adjustment_ > 0) { @@ -232,7 +232,7 @@ void RuntimeProfiler::OptimizeNow() { // Do not record non-optimizable functions. if (!function->IsOptimizable()) continue; - if (FLAG_counting_profiler) { + if (FLAG_watch_ic_patching) { int ticks = function->shared()->profiler_ticks(); if (ticks >= kProfilerTicksBeforeOptimization) { @@ -270,7 +270,7 @@ void RuntimeProfiler::OptimizeNow() { } } } - if (FLAG_counting_profiler) { + if (FLAG_watch_ic_patching) { any_ic_changed_ = false; code_generated_ = false; } else { // !FLAG_counting_profiler @@ -291,7 +291,7 @@ void RuntimeProfiler::NotifyTick() { void RuntimeProfiler::SetUp() { ASSERT(has_been_globally_set_up_); - if (!FLAG_counting_profiler) { + if (!FLAG_watch_ic_patching) { ClearSampleBuffer(); } // If the ticker hasn't already started, make sure to do so to get @@ -301,7 +301,7 @@ void RuntimeProfiler::SetUp() { void RuntimeProfiler::Reset() { - if (FLAG_counting_profiler) { + if (FLAG_watch_ic_patching) { total_code_generated_ = 0; } else { // !FLAG_counting_profiler sampler_threshold_ = kSamplerThresholdInit; diff --git a/src/runtime-profiler.h b/src/runtime-profiler.h index c01717c..f374566 100644 --- a/src/runtime-profiler.h +++ b/src/runtime-profiler.h @@ -64,7 +64,7 @@ class RuntimeProfiler { void NotifyICChanged() { any_ic_changed_ = true; } void NotifyCodeGenerated(int generated_code_size) { - if (FLAG_counting_profiler) { + if (FLAG_watch_ic_patching) { code_generated_ = true; total_code_generated_ += generated_code_size; } -- 2.7.4