}
+ExternalReference ExternalReference::stress_deopt_count(Isolate* isolate) {
+ return ExternalReference(isolate->stress_deopt_count_address());
+}
+
+
ExternalReference ExternalReference::transcendental_cache_array_address(
Isolate* isolate) {
return ExternalReference(
reinterpret_cast<ExternalReferenceRedirectorPointer*>(redirector));
}
+ static ExternalReference stress_deopt_count(Isolate* isolate);
+
private:
explicit ExternalReference(void* address)
: address_(address) {}
DEFINE_int(deopt_every_n_garbage_collections,
0,
"deoptimize every n garbage collections")
+DEFINE_bool(print_deopt_stress, false, "print number of possible deopt points")
DEFINE_bool(trap_on_deopt, false, "put a break point before deoptimizing")
DEFINE_bool(deoptimize_uncommon_cases, true, "deoptimize uncommon cases")
DEFINE_bool(polymorphic_inlining, true, "polymorphic inlining")
share->set_inferred_name(empty_string(), SKIP_WRITE_BARRIER);
share->set_initial_map(undefined_value(), SKIP_WRITE_BARRIER);
share->set_ast_node_count(0);
- share->set_stress_deopt_counter(FLAG_deopt_every_n_times);
share->set_counters(0);
// Set integer fields (smi or int, depending on the architecture).
}
if (FLAG_deopt_every_n_times != 0 && !info()->IsStub()) {
- Handle<SharedFunctionInfo> shared(info()->shared_info());
+ ExternalReference count = ExternalReference::stress_deopt_count(isolate());
Label no_deopt;
__ pushfd();
__ push(eax);
- __ push(ebx);
- __ mov(ebx, shared);
- __ mov(eax,
- FieldOperand(ebx, SharedFunctionInfo::kStressDeoptCounterOffset));
- __ sub(Operand(eax), Immediate(Smi::FromInt(1)));
+ __ mov(eax, Operand::StaticVariable(count));
+ __ sub(eax, Immediate(1));
__ j(not_zero, &no_deopt, Label::kNear);
if (FLAG_trap_on_deopt) __ int3();
- __ mov(eax, Immediate(Smi::FromInt(FLAG_deopt_every_n_times)));
- __ mov(FieldOperand(ebx, SharedFunctionInfo::kStressDeoptCounterOffset),
- eax);
- __ pop(ebx);
+ __ mov(eax, Immediate(FLAG_deopt_every_n_times));
+ __ mov(Operand::StaticVariable(count), eax);
__ pop(eax);
__ popfd();
ASSERT(frame_is_built_);
__ call(entry, RelocInfo::RUNTIME_ENTRY);
-
__ bind(&no_deopt);
- __ mov(FieldOperand(ebx, SharedFunctionInfo::kStressDeoptCounterOffset),
- eax);
- __ pop(ebx);
+ __ mov(Operand::StaticVariable(count), eax);
__ pop(eax);
__ popfd();
}
optimizing_compiler_thread_(this),
marking_thread_(NULL),
sweeper_thread_(NULL),
- callback_table_(NULL) {
+ callback_table_(NULL),
+ stress_deopt_count_(0) {
id_ = NoBarrier_AtomicIncrement(&isolate_counter_, 1);
TRACE_ISOLATE(constructor);
if (FLAG_hydrogen_stats) GetHStatistics()->Print();
+ if (FLAG_print_deopt_stress) {
+ PrintF(stdout, "=== Stress deopt counter: %u\n", stress_deopt_count_);
+ }
+
// We must stop the logger before we tear down other components.
Sampler* sampler = logger_->sampler();
if (sampler && sampler->IsActive()) sampler->Stop();
ASSERT(Isolate::Current() == this);
TRACE_ISOLATE(init);
+ stress_deopt_count_ = FLAG_deopt_every_n_times;
+
if (function_entry_hook() != NULL) {
// When function entry hooking is in effect, we have to create the code
// stubs from scratch to get entry hooks, rather than loading the previously
function_entry_hook_ = function_entry_hook;
}
+ void* stress_deopt_count_address() { return &stress_deopt_count_; }
+
private:
Isolate();
SweeperThread** sweeper_thread_;
CallbackTable* callback_table_;
+ // Counts deopt points if deopt_every_n_times is enabled.
+ unsigned int stress_deopt_count_;
+
friend class ExecutionAccess;
friend class HandleScopeImplementer;
friend class IsolateInitializer;
kCompilerHintsOffset)
SMI_ACCESSORS(SharedFunctionInfo, opt_count, kOptCountOffset)
SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
-SMI_ACCESSORS(SharedFunctionInfo,
- stress_deopt_counter,
- kStressDeoptCounterOffset)
+
#else
#define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, opt_count, kOptCountOffset)
PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
-PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
- stress_deopt_counter,
- kStressDeoptCounterOffset)
+
#endif
inline int ast_node_count();
inline void set_ast_node_count(int count);
- // A counter used to determine when to stress the deoptimizer with a
- // deopt.
- inline int stress_deopt_counter();
- inline void set_stress_deopt_counter(int counter);
-
inline int profiler_ticks();
// Inline cache age is used to infer whether the function survived a context
kFunctionTokenPositionOffset + kPointerSize;
static const int kOptCountOffset = kCompilerHintsOffset + kPointerSize;
static const int kCountersOffset = kOptCountOffset + kPointerSize;
- static const int kStressDeoptCounterOffset = kCountersOffset + kPointerSize;
// Total size.
- static const int kSize = kStressDeoptCounterOffset + kPointerSize;
+ static const int kSize = kCountersOffset + kPointerSize;
#else
// The only reason to use smi fields instead of int fields
// is to allow iteration without maps decoding during
static const int kOptCountOffset = kCompilerHintsOffset + kIntSize;
static const int kCountersOffset = kOptCountOffset + kIntSize;
- static const int kStressDeoptCounterOffset = kCountersOffset + kIntSize;
// Total size.
- static const int kSize = kStressDeoptCounterOffset + kIntSize;
+ static const int kSize = kCountersOffset + kIntSize;
#endif