}
~ArrayContextChecker() {
- checker_.ElseDeopt();
+ checker_.ElseDeopt("Array constructor called from different context");
checker_.End();
}
private:
IfBuilder builder(this);
builder.IfNot<HCompareObjectEqAndBranch, HValue*>(undefined, undefined);
builder.Then();
- builder.ElseDeopt();
+ builder.ElseDeopt("Forced deopt to runtime");
return undefined;
}
length));
}
- checker.ElseDeopt();
+ checker.ElseDeopt("Uninitialized boilerplate literals");
checker.End();
return environment()->Pop();
}
environment()->Push(object);
- checker.ElseDeopt();
+ checker.ElseDeopt("Uninitialized boilerplate in fast clone");
checker.End();
return environment()->Pop();
IfBuilder builder(this);
builder.If<HCompareObjectEqAndBranch>(cell_contents, value);
builder.Then();
- builder.ElseDeopt();
+ builder.ElseDeopt("Unexpected cell contents in constant global store");
builder.End();
} else {
// Load the payload of the global parameter cell. A hole indicates that the
HValue* hole_value = Add<HConstant>(hole);
builder.If<HCompareObjectEqAndBranch>(cell_contents, hole_value);
builder.Then();
- builder.Deopt();
+ builder.Deopt("Unexpected cell contents in global store");
builder.Else();
Add<HStoreNamedField>(cell, access, value);
builder.End();
if (FLAG_trace_elements_transitions) {
// Tracing elements transitions is the job of the runtime.
- Add<HDeoptimize>(Deoptimizer::EAGER);
+ Add<HDeoptimize>("Deopt due to --trace-elements-transitions",
+ Deoptimizer::EAGER);
} else {
info()->MarkAsSavesCallerDoubles();
}
-void HGraphBuilder::IfBuilder::Deopt() {
+void HGraphBuilder::IfBuilder::Deopt(const char* reason) {
ASSERT(did_then_);
if (did_else_) {
deopt_else_ = true;
} else {
deopt_then_ = true;
}
- builder_->Add<HDeoptimize>(Deoptimizer::EAGER);
+ builder_->Add<HDeoptimize>(reason, Deoptimizer::EAGER);
}
void HGraphBuilder::FinishExitWithHardDeoptimization(
- HBasicBlock* continuation) {
+ const char* reason, HBasicBlock* continuation) {
PadEnvironmentForContinuation(current_block(), continuation);
- Add<HDeoptimize>(Deoptimizer::EAGER);
+ Add<HDeoptimize>(reason, Deoptimizer::EAGER);
if (no_side_effects_scope_count_ > 0) {
current_block()->GotoNoSimulate(continuation);
} else {
IfBuilder key_checker(this);
key_checker.If<HCompareNumericAndBranch>(key, max_capacity, Token::LT);
key_checker.Then();
- key_checker.ElseDeopt();
+ key_checker.ElseDeopt("Key out of capacity range");
key_checker.End();
HValue* new_capacity = BuildNewElementsCapacity(key);
negative_checker.Then();
HInstruction* result = AddExternalArrayElementAccess(
external_elements, key, val, bounds_check, elements_kind, is_store);
- negative_checker.ElseDeopt();
+ negative_checker.ElseDeopt("Negative key encountered");
length_checker.End();
return result;
} else {
// emitted below is the actual monomorphic map.
BuildCheckMap(value, type->Classes().Current());
} else {
- if_nil.Deopt();
+ if_nil.Deopt("Too many undetectable types");
}
}
if (stmt->switch_type() == SwitchStatement::SMI_SWITCH) {
if (!clause->compare_type()->Is(Type::Smi())) {
- Add<HDeoptimize>(Deoptimizer::SOFT);
+ Add<HDeoptimize>("Non-smi switch type", Deoptimizer::SOFT);
}
HCompareNumericAndBranch* compare_ =
// know about and do not want to handle ones we've never seen. Otherwise
// use a generic IC.
if (count == types->length() && FLAG_deoptimize_uncommon_cases) {
- FinishExitWithHardDeoptimization(join);
+ FinishExitWithHardDeoptimization("All known maps handled", join);
} else {
HInstruction* instr = BuildStoreNamedGeneric(object, name, store_value);
instr->set_position(position);
HValue* value = environment()->ExpressionStackAt(0);
HValue* object = environment()->ExpressionStackAt(1);
- if (expr->IsUninitialized()) Add<HDeoptimize>(Deoptimizer::SOFT);
+ if (expr->IsUninitialized()) {
+ Add<HDeoptimize>("Insufficient type feedback for property assignment",
+ Deoptimizer::SOFT);
+ }
return BuildStoreNamed(expr, expr->id(), expr->position(),
expr->AssignmentId(), prop, object, value, value);
} else {
}
builder.Then();
builder.Else();
- Add<HDeoptimize>(Deoptimizer::EAGER);
+ Add<HDeoptimize>("Constant global variable assignment",
+ Deoptimizer::EAGER);
builder.End();
}
HInstruction* instr =
Handle<String> name,
Property* expr) {
if (expr->IsUninitialized()) {
- Add<HDeoptimize>(Deoptimizer::SOFT);
+ Add<HDeoptimize>("Insufficient feedback for generic named load",
+ Deoptimizer::SOFT);
}
HValue* context = environment()->context();
return new(zone()) HLoadNamedGeneric(context, object, name);
// Deopt if none of the cases matched.
NoObservableSideEffectsScope scope(this);
- FinishExitWithHardDeoptimization(join);
+ FinishExitWithHardDeoptimization("Unknown type in polymorphic element access",
+ join);
set_current_block(join);
return is_store ? NULL : Pop();
}
} else {
if (is_store) {
if (expr->IsAssignment() && expr->AsAssignment()->IsUninitialized()) {
- Add<HDeoptimize>(Deoptimizer::SOFT);
+ Add<HDeoptimize>("Insufficient feedback for keyed store",
+ Deoptimizer::SOFT);
}
instr = BuildStoreKeyedGeneric(obj, key, val);
} else {
if (expr->AsProperty()->IsUninitialized()) {
- Add<HDeoptimize>(Deoptimizer::SOFT);
+ Add<HDeoptimize>("Insufficient feedback for keyed load",
+ Deoptimizer::SOFT);
}
instr = BuildLoadKeyedGeneric(obj, key);
}
// that the environment stack matches the depth on deopt that it otherwise
// would have had after a successful call.
Drop(argument_count - (ast_context()->IsEffect() ? 0 : 1));
- FinishExitWithHardDeoptimization(join);
+ FinishExitWithHardDeoptimization("Unknown map in polymorphic call", join);
} else {
HValue* context = environment()->context();
HCallNamed* call = new(zone()) HCallNamed(context, name, argument_count);
}
if (left_type->Is(Type::None())) {
- Add<HDeoptimize>(Deoptimizer::SOFT);
+ Add<HDeoptimize>("Insufficient type feedback for left side",
+ Deoptimizer::SOFT);
// TODO(rossberg): we should be able to get rid of non-continuous defaults.
left_type = handle(Type::Any(), isolate());
}
if (right_type->Is(Type::None())) {
- Add<HDeoptimize>(Deoptimizer::SOFT);
+ Add<HDeoptimize>("Insufficient type feedback for right side",
+ Deoptimizer::SOFT);
right_type = handle(Type::Any(), isolate());
}
HInstruction* instr = NULL;
// Cases handled below depend on collected type feedback. They should
// soft deoptimize when there is no type feedback.
if (combined_type->Is(Type::None())) {
- Add<HDeoptimize>(Deoptimizer::SOFT);
+ Add<HDeoptimize>("insufficient type feedback for combined type",
+ Deoptimizer::SOFT);
combined_type = left_type = right_type = handle(Type::Any(), isolate());
}
void PushAndAdd(HInstruction* instr);
- void FinishExitWithHardDeoptimization(HBasicBlock* continuation);
+ void FinishExitWithHardDeoptimization(const char* reason,
+ HBasicBlock* continuation);
void AddIncrementCounter(StatsCounter* counter,
HValue* context);
void Else();
void End();
- void Deopt();
- void ElseDeopt() {
+ void Deopt(const char* reason);
+ void ElseDeopt(const char* reason) {
Else();
- Deopt();
+ Deopt(reason);
}
void Return(HValue* value);
template<>
inline HInstruction* HGraphBuilder::AddUncasted<HDeoptimize>(
- Deoptimizer::BailoutType type) {
+ const char* reason, Deoptimizer::BailoutType type) {
if (type == Deoptimizer::SOFT) {
isolate()->counters()->soft_deopts_requested()->Increment();
if (FLAG_always_opt) return NULL;
}
if (current_block()->IsDeoptimizing()) return NULL;
- HDeoptimize* instr = New<HDeoptimize>(type);
+ HDeoptimize* instr = New<HDeoptimize>(reason, type);
AddInstruction(instr);
if (type == Deoptimizer::SOFT) {
isolate()->counters()->soft_deopts_inserted()->Increment();
template<>
inline HDeoptimize* HGraphBuilder::Add<HDeoptimize>(
- Deoptimizer::BailoutType type) {
- return static_cast<HDeoptimize*>(AddUncasted<HDeoptimize>(type));
+ const char* reason, Deoptimizer::BailoutType type) {
+ return static_cast<HDeoptimize*>(AddUncasted<HDeoptimize>(reason, type));
}