"primitive functions trigger their own optimization")
DEFINE_bool(count_based_interrupts, false,
"trigger profiler ticks based on counting instead of timing")
+DEFINE_bool(interrupt_at_exit, false,
+ "insert an interrupt check at function exit")
DEFINE_bool(weighted_back_edges, false,
"weight back edges by jump distance for interrupt triggering")
DEFINE_int(interrupt_budget, 10000,
DEFINE_implication(experimental_profiler, watch_ic_patching)
DEFINE_implication(experimental_profiler, self_optimization)
DEFINE_implication(experimental_profiler, count_based_interrupts)
+DEFINE_implication(experimental_profiler, interrupt_at_exit)
DEFINE_implication(experimental_profiler, weighted_back_edges)
DEFINE_bool(trace_opt_verbose, false, "extra verbose compilation tracing")
int weight = 1;
if (FLAG_weighted_back_edges) {
ASSERT(back_edge_target->is_bound());
- int distance = masm_->pc_offset() - back_edge_target->pos();
+ int distance = masm_->SizeOfCodeGeneratedSince(back_edge_target);
weight = Min(127, Max(1, distance / 100));
}
__ sub(Operand::Cell(profiling_counter_), Immediate(Smi::FromInt(weight)));
__ push(eax);
__ CallRuntime(Runtime::kTraceExit, 1);
}
+ if (FLAG_interrupt_at_exit) {
+ // Pretend that the exit is a backwards jump to the entry.
+ int weight = 1;
+ if (FLAG_weighted_back_edges) {
+ int distance = masm_->pc_offset();
+ weight = Min(127, Max(1, distance / 100));
+ }
+ __ sub(Operand::Cell(profiling_counter_),
+ Immediate(Smi::FromInt(weight)));
+ Label ok;
+ __ j(positive, &ok, Label::kNear);
+ __ push(eax);
+ InterruptStub stub;
+ __ CallStub(&stub);
+ __ pop(eax);
+ // Reset the countdown.
+ __ mov(Operand::Cell(profiling_counter_),
+ Immediate(Smi::FromInt(FLAG_interrupt_budget)));
+ __ bind(&ok);
+ }
#ifdef DEBUG
// Add a label for checking the size of the code used for returning.
Label check_exit_codesize;