1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #include "runtime-profiler.h"
32 #include "assembler.h"
33 #include "code-stubs.h"
34 #include "compilation-cache.h"
35 #include "deoptimizer.h"
36 #include "execution.h"
37 #include "global-handles.h"
38 #include "isolate-inl.h"
39 #include "mark-compact.h"
41 #include "scopeinfo.h"
47 // Optimization sampler constants.
48 static const int kSamplerFrameCount = 2;
50 // Constants for statistical profiler.
51 static const int kSamplerFrameWeight[kSamplerFrameCount] = { 2, 1 };
53 static const int kSamplerTicksBetweenThresholdAdjustment = 32;
55 static const int kSamplerThresholdInit = 3;
56 static const int kSamplerThresholdMin = 1;
57 static const int kSamplerThresholdDelta = 1;
59 static const int kSamplerThresholdSizeFactorInit = 3;
61 static const int kSizeLimit = 1500;
63 // Constants for counter based profiler.
65 // Number of times a function has to be seen on the stack before it is
67 static const int kProfilerTicksBeforeOptimization = 2;
68 // If a function does not have enough type info (according to
69 // FLAG_type_info_threshold), but has seen a huge number of ticks,
70 // optimize it as it is.
71 static const int kTicksWhenNotEnoughTypeInfo = 100;
72 // We only have one byte to store the number of ticks.
73 STATIC_ASSERT(kTicksWhenNotEnoughTypeInfo < 256);
75 // Maximum size in bytes of generated code for a function to be optimized
76 // the very first time it is seen on the stack.
77 static const int kMaxSizeEarlyOpt = 500;
80 Atomic32 RuntimeProfiler::state_ = 0;
82 // TODO(isolates): Clean up the semaphore when it is no longer required.
83 static LazySemaphore<0>::type semaphore = LAZY_SEMAPHORE_INITIALIZER;
86 bool RuntimeProfiler::has_been_globally_set_up_ = false;
88 bool RuntimeProfiler::enabled_ = false;
91 RuntimeProfiler::RuntimeProfiler(Isolate* isolate)
93 sampler_threshold_(kSamplerThresholdInit),
94 sampler_threshold_size_factor_(kSamplerThresholdSizeFactorInit),
95 sampler_ticks_until_threshold_adjustment_(
96 kSamplerTicksBetweenThresholdAdjustment),
97 sampler_window_position_(0),
98 any_ic_changed_(false),
99 code_generated_(false) {
104 void RuntimeProfiler::GlobalSetUp() {
105 ASSERT(!has_been_globally_set_up_);
106 enabled_ = V8::UseCrankshaft() && FLAG_opt;
108 has_been_globally_set_up_ = true;
113 static void GetICCounts(JSFunction* function,
114 int* ic_with_type_info_count,
118 *ic_with_type_info_count = 0;
120 function->shared()->code()->type_feedback_info();
121 if (raw_info->IsTypeFeedbackInfo()) {
122 TypeFeedbackInfo* info = TypeFeedbackInfo::cast(raw_info);
123 *ic_with_type_info_count = info->ic_with_type_info_count();
124 *ic_total_count = info->ic_total_count();
126 *percentage = *ic_total_count > 0
127 ? 100 * *ic_with_type_info_count / *ic_total_count
132 void RuntimeProfiler::Optimize(JSFunction* function, const char* reason) {
133 ASSERT(function->IsOptimizable());
134 if (FLAG_trace_opt) {
136 function->PrintName();
137 PrintF(" 0x%" V8PRIxPTR, reinterpret_cast<intptr_t>(function->address()));
138 PrintF(" for recompilation, reason: %s", reason);
139 if (FLAG_type_info_threshold > 0) {
140 int typeinfo, total, percentage;
141 GetICCounts(function, &typeinfo, &total, &percentage);
142 PrintF(", ICs with typeinfo: %d/%d (%d%%)", typeinfo, total, percentage);
147 // The next call to the function will trigger optimization.
148 function->MarkForLazyRecompilation();
152 void RuntimeProfiler::AttemptOnStackReplacement(JSFunction* function) {
153 // See AlwaysFullCompiler (in compiler.cc) comment on why we need
154 // Debug::has_break_points().
155 ASSERT(function->IsMarkedForLazyRecompilation());
157 isolate_->DebuggerHasBreakPoints() ||
158 function->IsBuiltin()) {
162 SharedFunctionInfo* shared = function->shared();
163 // If the code is not optimizable, don't try OSR.
164 if (!shared->code()->optimizable()) return;
166 // We are not prepared to do OSR for a function that already has an
167 // allocated arguments object. The optimized code would bypass it for
168 // arguments accesses, which is unsound. Don't try OSR.
169 if (shared->uses_arguments()) return;
171 // We're using on-stack replacement: patch the unoptimized code so that
172 // any back edge in any unoptimized frame will trigger on-stack
173 // replacement for that frame.
174 if (FLAG_trace_osr) {
175 PrintF("[patching stack checks in ");
176 function->PrintName();
177 PrintF(" for on-stack replacement]\n");
180 // Get the stack check stub code object to match against. We aren't
181 // prepared to generate it, but we don't expect to have to.
182 bool found_code = false;
183 Code* stack_check_code = NULL;
184 if (FLAG_count_based_interrupts) {
185 InterruptStub interrupt_stub;
186 found_code = interrupt_stub.FindCodeInCache(&stack_check_code);
189 StackCheckStub check_stub;
190 found_code = check_stub.FindCodeInCache(&stack_check_code);
193 Code* replacement_code =
194 isolate_->builtins()->builtin(Builtins::kOnStackReplacement);
195 Code* unoptimized_code = shared->code();
196 Deoptimizer::PatchStackCheckCode(unoptimized_code,
203 void RuntimeProfiler::ClearSampleBuffer() {
204 memset(sampler_window_, 0, sizeof(sampler_window_));
205 memset(sampler_window_weight_, 0, sizeof(sampler_window_weight_));
209 int RuntimeProfiler::LookupSample(JSFunction* function) {
211 for (int i = 0; i < kSamplerWindowSize; i++) {
212 Object* sample = sampler_window_[i];
213 if (sample != NULL) {
214 if (function == sample) {
215 weight += sampler_window_weight_[i];
223 void RuntimeProfiler::AddSample(JSFunction* function, int weight) {
224 ASSERT(IsPowerOf2(kSamplerWindowSize));
225 sampler_window_[sampler_window_position_] = function;
226 sampler_window_weight_[sampler_window_position_] = weight;
227 sampler_window_position_ = (sampler_window_position_ + 1) &
228 (kSamplerWindowSize - 1);
232 void RuntimeProfiler::OptimizeNow() {
233 HandleScope scope(isolate_);
235 // Run through the JavaScript frames and collect them. If we already
236 // have a sample of the function, we mark it for optimizations
237 // (eagerly or lazily).
238 JSFunction* samples[kSamplerFrameCount];
239 int sample_count = 0;
241 int frame_count_limit = FLAG_watch_ic_patching ? FLAG_frame_count
242 : kSamplerFrameCount;
243 for (JavaScriptFrameIterator it(isolate_);
244 frame_count++ < frame_count_limit && !it.done();
246 JavaScriptFrame* frame = it.frame();
247 JSFunction* function = JSFunction::cast(frame->function());
249 if (!FLAG_watch_ic_patching) {
250 // Adjust threshold each time we have processed
251 // a certain number of ticks.
252 if (sampler_ticks_until_threshold_adjustment_ > 0) {
253 sampler_ticks_until_threshold_adjustment_--;
254 if (sampler_ticks_until_threshold_adjustment_ <= 0) {
255 // If the threshold is not already at the minimum
256 // modify and reset the ticks until next adjustment.
257 if (sampler_threshold_ > kSamplerThresholdMin) {
258 sampler_threshold_ -= kSamplerThresholdDelta;
259 sampler_ticks_until_threshold_adjustment_ =
260 kSamplerTicksBetweenThresholdAdjustment;
266 Code* shared_code = function->shared()->code();
267 if (shared_code->kind() != Code::FUNCTION) continue;
269 if (function->IsMarkedForLazyRecompilation()) {
270 int nesting = shared_code->allow_osr_at_loop_nesting_level();
271 if (nesting == 0) AttemptOnStackReplacement(function);
272 int new_nesting = Min(nesting + 1, Code::kMaxLoopNestingMarker);
273 shared_code->set_allow_osr_at_loop_nesting_level(new_nesting);
276 // Do not record non-optimizable functions.
277 if (!function->IsOptimizable()) continue;
278 if (function->shared()->optimization_disabled()) continue;
280 // Only record top-level code on top of the execution stack and
281 // avoid optimizing excessively large scripts since top-level code
282 // will be executed only once.
283 const int kMaxToplevelSourceSize = 10 * 1024;
284 if (function->shared()->is_toplevel()
286 || function->shared()->SourceSize() > kMaxToplevelSourceSize)) {
290 if (FLAG_watch_ic_patching) {
291 int ticks = shared_code->profiler_ticks();
293 if (ticks >= kProfilerTicksBeforeOptimization) {
294 int typeinfo, total, percentage;
295 GetICCounts(function, &typeinfo, &total, &percentage);
296 if (percentage >= FLAG_type_info_threshold) {
297 // If this particular function hasn't had any ICs patched for enough
298 // ticks, optimize it now.
299 Optimize(function, "hot and stable");
300 } else if (ticks >= kTicksWhenNotEnoughTypeInfo) {
301 Optimize(function, "not much type info but very hot");
303 shared_code->set_profiler_ticks(ticks + 1);
304 if (FLAG_trace_opt_verbose) {
305 PrintF("[not yet optimizing ");
306 function->PrintName();
307 PrintF(", not enough type info: %d/%d (%d%%)]\n",
308 typeinfo, total, percentage);
311 } else if (!any_ic_changed_ &&
312 shared_code->instruction_size() < kMaxSizeEarlyOpt) {
313 // If no IC was patched since the last tick and this function is very
314 // small, optimistically optimize it now.
315 Optimize(function, "small function");
317 shared_code->set_profiler_ticks(ticks + 1);
319 } else { // !FLAG_watch_ic_patching
320 samples[sample_count++] = function;
322 int function_size = function->shared()->SourceSize();
323 int threshold_size_factor = (function_size > kSizeLimit)
324 ? sampler_threshold_size_factor_
327 int threshold = sampler_threshold_ * threshold_size_factor;
329 if (LookupSample(function) >= threshold) {
330 Optimize(function, "sampler window lookup");
334 if (FLAG_watch_ic_patching) {
335 any_ic_changed_ = false;
336 } else { // !FLAG_watch_ic_patching
337 // Add the collected functions as samples. It's important not to do
338 // this as part of collecting them because this will interfere with
339 // the sample lookup in case of recursive functions.
340 for (int i = 0; i < sample_count; i++) {
341 AddSample(samples[i], kSamplerFrameWeight[i]);
347 void RuntimeProfiler::NotifyTick() {
348 if (FLAG_count_based_interrupts) return;
349 isolate_->stack_guard()->RequestRuntimeProfilerTick();
353 void RuntimeProfiler::SetUp() {
354 ASSERT(has_been_globally_set_up_);
355 if (!FLAG_watch_ic_patching) {
358 // If the ticker hasn't already started, make sure to do so to get
359 // the ticks for the runtime profiler.
360 if (IsEnabled()) isolate_->logger()->EnsureTickerStarted();
364 void RuntimeProfiler::Reset() {
365 if (!FLAG_watch_ic_patching) {
366 sampler_threshold_ = kSamplerThresholdInit;
367 sampler_threshold_size_factor_ = kSamplerThresholdSizeFactorInit;
368 sampler_ticks_until_threshold_adjustment_ =
369 kSamplerTicksBetweenThresholdAdjustment;
374 void RuntimeProfiler::TearDown() {
379 int RuntimeProfiler::SamplerWindowSize() {
380 return kSamplerWindowSize;
384 // Update the pointers in the sampler window after a GC.
385 void RuntimeProfiler::UpdateSamplesAfterScavenge() {
386 for (int i = 0; i < kSamplerWindowSize; i++) {
387 Object* function = sampler_window_[i];
388 if (function != NULL && isolate_->heap()->InNewSpace(function)) {
389 MapWord map_word = HeapObject::cast(function)->map_word();
390 if (map_word.IsForwardingAddress()) {
391 sampler_window_[i] = map_word.ToForwardingAddress();
393 sampler_window_[i] = NULL;
400 void RuntimeProfiler::HandleWakeUp(Isolate* isolate) {
401 // The profiler thread must still be waiting.
402 ASSERT(NoBarrier_Load(&state_) >= 0);
403 // In IsolateEnteredJS we have already incremented the counter and
404 // undid the decrement done by the profiler thread. Increment again
405 // to get the right count of active isolates.
406 NoBarrier_AtomicIncrement(&state_, 1);
407 semaphore.Pointer()->Signal();
411 bool RuntimeProfiler::IsSomeIsolateInJS() {
412 return NoBarrier_Load(&state_) > 0;
416 bool RuntimeProfiler::WaitForSomeIsolateToEnterJS() {
417 Atomic32 old_state = NoBarrier_CompareAndSwap(&state_, 0, -1);
418 ASSERT(old_state >= -1);
419 if (old_state != 0) return false;
420 semaphore.Pointer()->Wait();
425 void RuntimeProfiler::StopRuntimeProfilerThreadBeforeShutdown(Thread* thread) {
426 // Do a fake increment. If the profiler is waiting on the semaphore,
427 // the returned state is 0, which can be left as an initial state in
428 // case profiling is restarted later. If the profiler is not
429 // waiting, the increment will prevent it from waiting, but has to
430 // be undone after the profiler is stopped.
431 Atomic32 new_state = NoBarrier_AtomicIncrement(&state_, 1);
432 ASSERT(new_state >= 0);
433 if (new_state == 0) {
434 // The profiler thread is waiting. Wake it up. It must check for
435 // stop conditions before attempting to wait again.
436 semaphore.Pointer()->Signal();
439 // The profiler thread is now stopped. Undo the increment in case it
441 if (new_state != 0) {
442 NoBarrier_AtomicIncrement(&state_, -1);
447 void RuntimeProfiler::RemoveDeadSamples() {
448 for (int i = 0; i < kSamplerWindowSize; i++) {
449 Object* function = sampler_window_[i];
450 if (function != NULL &&
451 !Marking::MarkBitFrom(HeapObject::cast(function)).Get()) {
452 sampler_window_[i] = NULL;
458 void RuntimeProfiler::UpdateSamplesAfterCompact(ObjectVisitor* visitor) {
459 for (int i = 0; i < kSamplerWindowSize; i++) {
460 visitor->VisitPointer(&sampler_window_[i]);
465 bool RuntimeProfilerRateLimiter::SuspendIfNecessary() {
466 if (!RuntimeProfiler::IsSomeIsolateInJS()) {
467 return RuntimeProfiler::WaitForSomeIsolateToEnterJS();
473 } } // namespace v8::internal