int length = deopt_jump_table_.length();
for (int i = 0; i < length; i++) {
- __ bind(&deopt_jump_table_[i].label);
+ Deoptimizer::JumpTableEntry* table_entry = &deopt_jump_table_[i];
+ __ bind(&table_entry->label);
- Deoptimizer::BailoutType type = deopt_jump_table_[i].bailout_type;
+ Deoptimizer::BailoutType type = table_entry->bailout_type;
DCHECK(type == deopt_jump_table_[0].bailout_type);
- Address entry = deopt_jump_table_[i].address;
+ Address entry = table_entry->address;
int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
DCHECK(id != Deoptimizer::kNotDeoptimizationEntry);
Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id);
+ DeoptComment(table_entry->mnemonic, table_entry->reason);
// Second-level deopt table entries are contiguous and small, so instead
// of loading the full, absolute address of each one, load an immediate
// offset which will be added to the base address later.
__ mov(entry_offset, Operand(entry - base));
- if (deopt_jump_table_[i].needs_frame) {
+ if (table_entry->needs_frame) {
DCHECK(!info()->saves_caller_doubles());
if (needs_frame.is_bound()) {
__ b(&needs_frame);
void LCodeGen::DeoptimizeIf(Condition condition, LInstruction* instr,
+ const char* reason,
Deoptimizer::BailoutType bailout_type) {
LEnvironment* environment = instr->environment();
RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
// restore caller doubles.
if (condition == al && frame_is_built_ &&
!info()->saves_caller_doubles()) {
+ DeoptComment(instr->Mnemonic(), reason);
__ Call(entry, RelocInfo::RUNTIME_ENTRY);
} else {
// We often have several deopts to the same entry, reuse the last
(deopt_jump_table_.last().address != entry) ||
(deopt_jump_table_.last().bailout_type != bailout_type) ||
(deopt_jump_table_.last().needs_frame != !frame_is_built_)) {
- Deoptimizer::JumpTableEntry table_entry(entry,
- bailout_type,
- !frame_is_built_);
+ Deoptimizer::JumpTableEntry table_entry(entry, instr->Mnemonic(), reason,
+ bailout_type, !frame_is_built_);
deopt_jump_table_.Add(table_entry, zone());
}
__ b(condition, &deopt_jump_table_.last().label);
}
-void LCodeGen::DeoptimizeIf(Condition condition, LInstruction* instr) {
+void LCodeGen::DeoptimizeIf(Condition condition, LInstruction* instr,
+ const char* reason) {
Deoptimizer::BailoutType bailout_type = info()->IsStub()
? Deoptimizer::LAZY
: Deoptimizer::EAGER;
- DeoptimizeIf(condition, instr, bailout_type);
+ DeoptimizeIf(condition, instr, reason, bailout_type);
}
type = Deoptimizer::LAZY;
}
- Comment(";;; deoptimize: %s", instr->hydrogen()->reason());
- DeoptimizeIf(al, instr, type);
+ DeoptimizeIf(al, instr, instr->hydrogen()->reason(), type);
}
void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
Safepoint::DeoptMode mode);
void DeoptimizeIf(Condition condition, LInstruction* instr,
- Deoptimizer::BailoutType bailout_type);
- void DeoptimizeIf(Condition condition, LInstruction* instr);
+ const char* reason, Deoptimizer::BailoutType bailout_type);
+ void DeoptimizeIf(Condition condition, LInstruction* instr,
+ const char* reason = NULL);
void AddToTranslation(LEnvironment* environment,
Translation* translation,
// the frame (that is done in GeneratePrologue).
FrameScope frame_scope(masm_, StackFrame::NONE);
- return GeneratePrologue() &&
- GenerateBody() &&
- GenerateDeferredCode() &&
- GenerateDeoptJumpTable() &&
- GenerateSafepointTable();
+ return GeneratePrologue() && GenerateBody() && GenerateDeferredCode() &&
+ GenerateJumpTable() && GenerateSafepointTable();
}
}
-bool LCodeGen::GenerateDeoptJumpTable() {
+bool LCodeGen::GenerateJumpTable() {
Label needs_frame, restore_caller_doubles, call_deopt_entry;
- if (deopt_jump_table_.length() > 0) {
+ if (jump_table_.length() > 0) {
Comment(";;; -------------------- Jump table --------------------");
- Address base = deopt_jump_table_[0]->address;
+ Address base = jump_table_[0]->address;
UseScratchRegisterScope temps(masm());
Register entry_offset = temps.AcquireX();
- int length = deopt_jump_table_.length();
+ int length = jump_table_.length();
for (int i = 0; i < length; i++) {
- __ Bind(&deopt_jump_table_[i]->label);
+ Deoptimizer::JumpTableEntry* table_entry = jump_table_[i];
+ __ Bind(&table_entry->label);
- Deoptimizer::BailoutType type = deopt_jump_table_[i]->bailout_type;
- Address entry = deopt_jump_table_[i]->address;
+ Deoptimizer::BailoutType type = table_entry->bailout_type;
+ Address entry = table_entry->address;
int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
if (id == Deoptimizer::kNotDeoptimizationEntry) {
Comment(";;; jump table entry %d.", i);
} else {
Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id);
}
+ DeoptComment(table_entry->mnemonic, table_entry->reason);
// Second-level deopt table entries are contiguous and small, so instead
// of loading the full, absolute address of each one, load the base
// branch.
bool last_entry = (i + 1) == length;
- if (deopt_jump_table_[i]->needs_frame) {
+ if (table_entry->needs_frame) {
DCHECK(!info()->saves_caller_doubles());
if (!needs_frame.is_bound()) {
// This variant of deopt can only be used with stubs. Since we don't
void LCodeGen::DeoptimizeBranch(
- LInstruction* instr, BranchType branch_type, Register reg, int bit,
- Deoptimizer::BailoutType* override_bailout_type) {
+ LInstruction* instr, const char* reason, BranchType branch_type,
+ Register reg, int bit, Deoptimizer::BailoutType* override_bailout_type) {
LEnvironment* environment = instr->environment();
RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
Deoptimizer::BailoutType bailout_type =
// Go through jump table if we need to build frame, or restore caller doubles.
if (branch_type == always &&
frame_is_built_ && !info()->saves_caller_doubles()) {
+ DeoptComment(instr->Mnemonic(), reason);
__ Call(entry, RelocInfo::RUNTIME_ENTRY);
} else {
// We often have several deopts to the same entry, reuse the last
// jump entry if this is the case.
- if (deopt_jump_table_.is_empty() ||
- (deopt_jump_table_.last()->address != entry) ||
- (deopt_jump_table_.last()->bailout_type != bailout_type) ||
- (deopt_jump_table_.last()->needs_frame != !frame_is_built_)) {
+ if (jump_table_.is_empty() || (jump_table_.last()->address != entry) ||
+ (jump_table_.last()->bailout_type != bailout_type) ||
+ (jump_table_.last()->needs_frame != !frame_is_built_)) {
Deoptimizer::JumpTableEntry* table_entry =
- new(zone()) Deoptimizer::JumpTableEntry(entry,
- bailout_type,
- !frame_is_built_);
- deopt_jump_table_.Add(table_entry, zone());
+ new (zone()) Deoptimizer::JumpTableEntry(
+ entry, instr->Mnemonic(), reason, bailout_type, !frame_is_built_);
+ jump_table_.Add(table_entry, zone());
}
- __ B(&deopt_jump_table_.last()->label,
- branch_type, reg, bit);
+ __ B(&jump_table_.last()->label, branch_type, reg, bit);
}
}
void LCodeGen::Deoptimize(LInstruction* instr,
- Deoptimizer::BailoutType* override_bailout_type) {
- DeoptimizeBranch(instr, always, NoReg, -1, override_bailout_type);
+ Deoptimizer::BailoutType* override_bailout_type,
+ const char* reason) {
+ DeoptimizeBranch(instr, reason, always, NoReg, -1, override_bailout_type);
}
-void LCodeGen::DeoptimizeIf(Condition cond, LInstruction* instr) {
- DeoptimizeBranch(instr, static_cast<BranchType>(cond));
+void LCodeGen::DeoptimizeIf(Condition cond, LInstruction* instr,
+ const char* reason) {
+ DeoptimizeBranch(instr, reason, static_cast<BranchType>(cond));
}
-void LCodeGen::DeoptimizeIfZero(Register rt, LInstruction* instr) {
- DeoptimizeBranch(instr, reg_zero, rt);
+void LCodeGen::DeoptimizeIfZero(Register rt, LInstruction* instr,
+ const char* reason) {
+ DeoptimizeBranch(instr, reason, reg_zero, rt);
}
-void LCodeGen::DeoptimizeIfNotZero(Register rt, LInstruction* instr) {
- DeoptimizeBranch(instr, reg_not_zero, rt);
+void LCodeGen::DeoptimizeIfNotZero(Register rt, LInstruction* instr,
+ const char* reason) {
+ DeoptimizeBranch(instr, reason, reg_not_zero, rt);
}
-void LCodeGen::DeoptimizeIfNegative(Register rt, LInstruction* instr) {
+void LCodeGen::DeoptimizeIfNegative(Register rt, LInstruction* instr,
+ const char* reason) {
int sign_bit = rt.Is64Bits() ? kXSignBit : kWSignBit;
- DeoptimizeIfBitSet(rt, sign_bit, instr);
+ DeoptimizeIfBitSet(rt, sign_bit, instr, reason);
}
-void LCodeGen::DeoptimizeIfSmi(Register rt, LInstruction* instr) {
- DeoptimizeIfBitClear(rt, MaskToBit(kSmiTagMask), instr);
+void LCodeGen::DeoptimizeIfSmi(Register rt, LInstruction* instr,
+ const char* reason) {
+ DeoptimizeIfBitClear(rt, MaskToBit(kSmiTagMask), instr, reason);
}
-void LCodeGen::DeoptimizeIfNotSmi(Register rt, LInstruction* instr) {
- DeoptimizeIfBitSet(rt, MaskToBit(kSmiTagMask), instr);
+void LCodeGen::DeoptimizeIfNotSmi(Register rt, LInstruction* instr,
+ const char* reason) {
+ DeoptimizeIfBitSet(rt, MaskToBit(kSmiTagMask), instr, reason);
}
void LCodeGen::DeoptimizeIfRoot(Register rt, Heap::RootListIndex index,
- LInstruction* instr) {
+ LInstruction* instr, const char* reason) {
__ CompareRoot(rt, index);
- DeoptimizeIf(eq, instr);
+ DeoptimizeIf(eq, instr, reason);
}
void LCodeGen::DeoptimizeIfNotRoot(Register rt, Heap::RootListIndex index,
- LInstruction* instr) {
+ LInstruction* instr, const char* reason) {
__ CompareRoot(rt, index);
- DeoptimizeIf(ne, instr);
+ DeoptimizeIf(ne, instr, reason);
}
-void LCodeGen::DeoptimizeIfMinusZero(DoubleRegister input,
- LInstruction* instr) {
+void LCodeGen::DeoptimizeIfMinusZero(DoubleRegister input, LInstruction* instr,
+ const char* reason) {
__ TestForMinusZero(input);
- DeoptimizeIf(vs, instr);
+ DeoptimizeIf(vs, instr, reason);
}
-void LCodeGen::DeoptimizeIfBitSet(Register rt, int bit, LInstruction* instr) {
- DeoptimizeBranch(instr, reg_bit_set, rt, bit);
+void LCodeGen::DeoptimizeIfBitSet(Register rt, int bit, LInstruction* instr,
+ const char* reason) {
+ DeoptimizeBranch(instr, reason, reg_bit_set, rt, bit);
}
-void LCodeGen::DeoptimizeIfBitClear(Register rt, int bit, LInstruction* instr) {
- DeoptimizeBranch(instr, reg_bit_clear, rt, bit);
+void LCodeGen::DeoptimizeIfBitClear(Register rt, int bit, LInstruction* instr,
+ const char* reason) {
+ DeoptimizeBranch(instr, reason, reg_bit_clear, rt, bit);
}
type = Deoptimizer::LAZY;
}
- Comment(";;; deoptimize: %s", instr->hydrogen()->reason());
- Deoptimize(instr, &type);
+ Deoptimize(instr, &type, instr->hydrogen()->reason());
}
LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
: LCodeGenBase(chunk, assembler, info),
deoptimizations_(4, info->zone()),
- deopt_jump_table_(4, info->zone()),
+ jump_table_(4, info->zone()),
deoptimization_literals_(8, info->zone()),
inlined_function_count_(0),
scope_(info->scope()),
Register temp,
LOperand* index,
String::Encoding encoding);
- void DeoptimizeBranch(LInstruction* instr, BranchType branch_type,
- Register reg = NoReg, int bit = -1,
+ void DeoptimizeBranch(LInstruction* instr, const char* reason,
+ BranchType branch_type, Register reg = NoReg,
+ int bit = -1,
Deoptimizer::BailoutType* override_bailout_type = NULL);
void Deoptimize(LInstruction* instr,
- Deoptimizer::BailoutType* override_bailout_type = NULL);
- void DeoptimizeIf(Condition cond, LInstruction* instr);
- void DeoptimizeIfZero(Register rt, LInstruction* instr);
- void DeoptimizeIfNotZero(Register rt, LInstruction* instr);
- void DeoptimizeIfNegative(Register rt, LInstruction* instr);
- void DeoptimizeIfSmi(Register rt, LInstruction* instr);
- void DeoptimizeIfNotSmi(Register rt, LInstruction* instr);
+ Deoptimizer::BailoutType* override_bailout_type = NULL,
+ const char* reason = NULL);
+ void DeoptimizeIf(Condition cond, LInstruction* instr,
+ const char* reason = NULL);
+ void DeoptimizeIfZero(Register rt, LInstruction* instr,
+ const char* reason = NULL);
+ void DeoptimizeIfNotZero(Register rt, LInstruction* instr,
+ const char* reason = NULL);
+ void DeoptimizeIfNegative(Register rt, LInstruction* instr,
+ const char* reason = NULL);
+ void DeoptimizeIfSmi(Register rt, LInstruction* instr,
+ const char* reason = NULL);
+ void DeoptimizeIfNotSmi(Register rt, LInstruction* instr,
+ const char* reason = NULL);
void DeoptimizeIfRoot(Register rt, Heap::RootListIndex index,
- LInstruction* instr);
+ LInstruction* instr, const char* reason = NULL);
void DeoptimizeIfNotRoot(Register rt, Heap::RootListIndex index,
- LInstruction* instr);
- void DeoptimizeIfMinusZero(DoubleRegister input, LInstruction* instr);
- void DeoptimizeIfBitSet(Register rt, int bit, LInstruction* instr);
- void DeoptimizeIfBitClear(Register rt, int bit, LInstruction* instr);
+ LInstruction* instr, const char* reason = NULL);
+ void DeoptimizeIfMinusZero(DoubleRegister input, LInstruction* instr,
+ const char* reason = NULL);
+ void DeoptimizeIfBitSet(Register rt, int bit, LInstruction* instr,
+ const char* reason = NULL);
+ void DeoptimizeIfBitClear(Register rt, int bit, LInstruction* instr,
+ const char* reason = NULL);
MemOperand PrepareKeyedExternalArrayOperand(Register key,
Register base,
void GenerateBodyInstructionPre(LInstruction* instr) OVERRIDE;
bool GeneratePrologue();
bool GenerateDeferredCode();
- bool GenerateDeoptJumpTable();
+ bool GenerateJumpTable();
bool GenerateSafepointTable();
// Generates the custom OSR entrypoint and sets the osr_pc_offset.
void EnsureSpaceForLazyDeopt(int space_needed) OVERRIDE;
ZoneList<LEnvironment*> deoptimizations_;
- ZoneList<Deoptimizer::JumpTableEntry*> deopt_jump_table_;
+ ZoneList<Deoptimizer::JumpTableEntry*> jump_table_;
ZoneList<Handle<Object> > deoptimization_literals_;
int inlined_function_count_;
Scope* const scope_;
static const int kBailoutTypesWithCodeEntry = SOFT + 1;
struct JumpTableEntry : public ZoneObject {
- inline JumpTableEntry(Address entry,
- Deoptimizer::BailoutType type,
+ inline JumpTableEntry(Address entry, const char* the_mnemonic,
+ const char* the_reason, Deoptimizer::BailoutType type,
bool frame)
: label(),
address(entry),
+ mnemonic(the_mnemonic),
+ reason(the_reason),
bailout_type(type),
- needs_frame(frame) { }
+ needs_frame(frame) {}
Label label;
Address address;
+ const char* mnemonic;
+ const char* reason;
Deoptimizer::BailoutType bailout_type;
bool needs_frame;
};
Comment(";;; -------------------- Jump table --------------------");
}
for (int i = 0; i < jump_table_.length(); i++) {
- __ bind(&jump_table_[i].label);
- Address entry = jump_table_[i].address;
- Deoptimizer::BailoutType type = jump_table_[i].bailout_type;
+ Deoptimizer::JumpTableEntry* table_entry = &jump_table_[i];
+ __ bind(&table_entry->label);
+ Address entry = table_entry->address;
+ Deoptimizer::BailoutType type = table_entry->bailout_type;
int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
if (id == Deoptimizer::kNotDeoptimizationEntry) {
Comment(";;; jump table entry %d.", i);
} else {
Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id);
}
- if (jump_table_[i].needs_frame) {
+ DeoptComment(table_entry->mnemonic, table_entry->reason);
+ if (table_entry->needs_frame) {
DCHECK(!info()->saves_caller_doubles());
__ push(Immediate(ExternalReference::ForDeoptEntry(entry)));
if (needs_frame.is_bound()) {
void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr,
+ const char* reason,
Deoptimizer::BailoutType bailout_type) {
LEnvironment* environment = instr->environment();
RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
DCHECK(info()->IsStub() || frame_is_built_);
if (cc == no_condition && frame_is_built_) {
+ DeoptComment(instr->Mnemonic(), reason);
__ call(entry, RelocInfo::RUNTIME_ENTRY);
} else {
// We often have several deopts to the same entry, reuse the last
jump_table_.last().address != entry ||
jump_table_.last().needs_frame != !frame_is_built_ ||
jump_table_.last().bailout_type != bailout_type) {
- Deoptimizer::JumpTableEntry table_entry(entry,
- bailout_type,
- !frame_is_built_);
+ Deoptimizer::JumpTableEntry table_entry(entry, instr->Mnemonic(), reason,
+ bailout_type, !frame_is_built_);
jump_table_.Add(table_entry, zone());
}
if (cc == no_condition) {
}
-void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr) {
+void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr,
+ const char* reason) {
Deoptimizer::BailoutType bailout_type = info()->IsStub()
? Deoptimizer::LAZY
: Deoptimizer::EAGER;
- DeoptimizeIf(cc, instr, bailout_type);
+ DeoptimizeIf(cc, instr, reason, bailout_type);
}
if (info()->IsStub() && type == Deoptimizer::EAGER) {
type = Deoptimizer::LAZY;
}
- Comment(";;; deoptimize: %s", instr->hydrogen()->reason());
- DeoptimizeIf(no_condition, instr, type);
+ DeoptimizeIf(no_condition, instr, instr->hydrogen()->reason(), type);
}
void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
Safepoint::DeoptMode mode);
- void DeoptimizeIf(Condition cc, LInstruction* instr,
+ void DeoptimizeIf(Condition cc, LInstruction* instr, const char* reason,
Deoptimizer::BailoutType bailout_type);
- void DeoptimizeIf(Condition cc, LInstruction* instr);
+ void DeoptimizeIf(Condition cc, LInstruction* instr,
+ const char* reason = NULL);
bool DeoptEveryNTimes() {
return FLAG_deopt_every_n_times != 0 && !info()->IsStub();
}
+void LCodeGenBase::DeoptComment(const char* mnemonic, const char* reason) {
+ Comment(";;; deoptimize %s: %s", mnemonic,
+ reason == NULL ? "unknown reason" : reason);
+}
+
+
int LCodeGenBase::GetNextEmittedBlock() const {
for (int i = current_block_ + 1; i < graph()->blocks()->length(); ++i) {
if (!graph()->blocks()->at(i)->IsReachable()) continue;
HGraph* graph() const;
void FPRINTF_CHECKING Comment(const char* format, ...);
+ void DeoptComment(const char* mnemonic, const char* reason);
bool GenerateBody();
virtual void GenerateBodyInstructionPre(LInstruction* instr) {}
void LCodeGen::DeoptimizeIf(Condition condition, LInstruction* instr,
Deoptimizer::BailoutType bailout_type,
- Register src1, const Operand& src2) {
+ Register src1, const Operand& src2,
+ const char* reason) {
LEnvironment* environment = instr->environment();
RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
DCHECK(environment->HasBeenRegistered());
// restore caller doubles.
if (condition == al && frame_is_built_ &&
!info()->saves_caller_doubles()) {
+ DeoptComment(instr->Mnemonic(), reason);
__ Call(entry, RelocInfo::RUNTIME_ENTRY, condition, src1, src2);
} else {
// We often have several deopts to the same entry, reuse the last
(deopt_jump_table_.last().address != entry) ||
(deopt_jump_table_.last().bailout_type != bailout_type) ||
(deopt_jump_table_.last().needs_frame != !frame_is_built_)) {
- Deoptimizer::JumpTableEntry table_entry(entry,
- bailout_type,
- !frame_is_built_);
+ Deoptimizer::JumpTableEntry table_entry(entry, instr->Mnemonic(), reason,
+ bailout_type, !frame_is_built_);
deopt_jump_table_.Add(table_entry, zone());
}
__ Branch(&deopt_jump_table_.last().label, condition, src1, src2);
void LCodeGen::DeoptimizeIf(Condition condition, LInstruction* instr,
- Register src1, const Operand& src2) {
+ Register src1, const Operand& src2,
+ const char* reason) {
Deoptimizer::BailoutType bailout_type = info()->IsStub()
? Deoptimizer::LAZY
: Deoptimizer::EAGER;
- DeoptimizeIf(condition, instr, bailout_type, src1, src2);
+ DeoptimizeIf(condition, instr, bailout_type, src1, src2, reason);
}
type = Deoptimizer::LAZY;
}
- Comment(";;; deoptimize: %s", instr->hydrogen()->reason());
- DeoptimizeIf(al, instr, type, zero_reg, Operand(zero_reg));
+ DeoptimizeIf(al, instr, type, zero_reg, Operand(zero_reg),
+ instr->hydrogen()->reason());
}
void DeoptimizeIf(Condition condition, LInstruction* instr,
Deoptimizer::BailoutType bailout_type,
Register src1 = zero_reg,
- const Operand& src2 = Operand(zero_reg));
+ const Operand& src2 = Operand(zero_reg),
+ const char* reason = NULL);
void DeoptimizeIf(Condition condition, LInstruction* instr,
Register src1 = zero_reg,
- const Operand& src2 = Operand(zero_reg));
+ const Operand& src2 = Operand(zero_reg),
+ const char* reason = NULL);
void AddToTranslation(LEnvironment* environment,
Translation* translation,
void LCodeGen::DeoptimizeIf(Condition condition, LInstruction* instr,
Deoptimizer::BailoutType bailout_type,
- Register src1, const Operand& src2) {
+ Register src1, const Operand& src2,
+ const char* reason) {
LEnvironment* environment = instr->environment();
RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
DCHECK(environment->HasBeenRegistered());
// restore caller doubles.
if (condition == al && frame_is_built_ &&
!info()->saves_caller_doubles()) {
+ DeoptComment(instr->Mnemonic(), reason);
__ Call(entry, RelocInfo::RUNTIME_ENTRY, condition, src1, src2);
} else {
// We often have several deopts to the same entry, reuse the last
(deopt_jump_table_.last().address != entry) ||
(deopt_jump_table_.last().bailout_type != bailout_type) ||
(deopt_jump_table_.last().needs_frame != !frame_is_built_)) {
- Deoptimizer::JumpTableEntry table_entry(entry,
- bailout_type,
- !frame_is_built_);
+ Deoptimizer::JumpTableEntry table_entry(entry, instr->Mnemonic(), reason,
+ bailout_type, !frame_is_built_);
deopt_jump_table_.Add(table_entry, zone());
}
__ Branch(&deopt_jump_table_.last().label, condition, src1, src2);
void LCodeGen::DeoptimizeIf(Condition condition, LInstruction* instr,
- Register src1, const Operand& src2) {
+ Register src1, const Operand& src2,
+ const char* reason) {
Deoptimizer::BailoutType bailout_type = info()->IsStub()
? Deoptimizer::LAZY
: Deoptimizer::EAGER;
- DeoptimizeIf(condition, instr, bailout_type, src1, src2);
+ DeoptimizeIf(condition, instr, bailout_type, src1, src2, reason);
}
type = Deoptimizer::LAZY;
}
- Comment(";;; deoptimize: %s", instr->hydrogen()->reason());
- DeoptimizeIf(al, instr, type, zero_reg, Operand(zero_reg));
+ DeoptimizeIf(al, instr, type, zero_reg, Operand(zero_reg),
+ instr->hydrogen()->reason());
}
void DeoptimizeIf(Condition condition, LInstruction* instr,
Deoptimizer::BailoutType bailout_type,
Register src1 = zero_reg,
- const Operand& src2 = Operand(zero_reg));
+ const Operand& src2 = Operand(zero_reg),
+ const char* reason = NULL);
void DeoptimizeIf(Condition condition, LInstruction* instr,
Register src1 = zero_reg,
- const Operand& src2 = Operand(zero_reg));
+ const Operand& src2 = Operand(zero_reg),
+ const char* reason = NULL);
void AddToTranslation(LEnvironment* environment,
Translation* translation,
Comment(";;; -------------------- Jump table --------------------");
}
for (int i = 0; i < jump_table_.length(); i++) {
- __ bind(&jump_table_[i].label);
- Address entry = jump_table_[i].address;
- Deoptimizer::BailoutType type = jump_table_[i].bailout_type;
+ Deoptimizer::JumpTableEntry* table_entry = &jump_table_[i];
+ __ bind(&table_entry->label);
+ Address entry = table_entry->address;
+ Deoptimizer::BailoutType type = table_entry->bailout_type;
int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
if (id == Deoptimizer::kNotDeoptimizationEntry) {
Comment(";;; jump table entry %d.", i);
} else {
Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id);
}
- if (jump_table_[i].needs_frame) {
+ DeoptComment(table_entry->mnemonic, table_entry->reason);
+ if (table_entry->needs_frame) {
DCHECK(!info()->saves_caller_doubles());
__ Move(kScratchRegister, ExternalReference::ForDeoptEntry(entry));
if (needs_frame.is_bound()) {
void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr,
+ const char* reason,
Deoptimizer::BailoutType bailout_type) {
LEnvironment* environment = instr->environment();
RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
// restore caller doubles.
if (cc == no_condition && frame_is_built_ &&
!info()->saves_caller_doubles()) {
+ DeoptComment(instr->Mnemonic(), reason);
__ call(entry, RelocInfo::RUNTIME_ENTRY);
} else {
// We often have several deopts to the same entry, reuse the last
jump_table_.last().address != entry ||
jump_table_.last().needs_frame != !frame_is_built_ ||
jump_table_.last().bailout_type != bailout_type) {
- Deoptimizer::JumpTableEntry table_entry(entry,
- bailout_type,
- !frame_is_built_);
+ Deoptimizer::JumpTableEntry table_entry(entry, instr->Mnemonic(), reason,
+ bailout_type, !frame_is_built_);
jump_table_.Add(table_entry, zone());
}
if (cc == no_condition) {
}
-void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr) {
+void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr,
+ const char* reason) {
Deoptimizer::BailoutType bailout_type = info()->IsStub()
? Deoptimizer::LAZY
: Deoptimizer::EAGER;
- DeoptimizeIf(cc, instr, bailout_type);
+ DeoptimizeIf(cc, instr, reason, bailout_type);
}
if (info()->IsStub() && type == Deoptimizer::EAGER) {
type = Deoptimizer::LAZY;
}
-
- Comment(";;; deoptimize: %s", instr->hydrogen()->reason());
- DeoptimizeIf(no_condition, instr, type);
+ DeoptimizeIf(no_condition, instr, instr->hydrogen()->reason(), type);
}
int argc);
void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
Safepoint::DeoptMode mode);
- void DeoptimizeIf(Condition cc, LInstruction* instr,
+ void DeoptimizeIf(Condition cc, LInstruction* instr, const char* reason,
Deoptimizer::BailoutType bailout_type);
- void DeoptimizeIf(Condition cc, LInstruction* instr);
+ void DeoptimizeIf(Condition cc, LInstruction* instr,
+ const char* reason = NULL);
bool DeoptEveryNTimes() {
return FLAG_deopt_every_n_times != 0 && !info()->IsStub();
Comment(";;; -------------------- Jump table --------------------");
}
for (int i = 0; i < jump_table_.length(); i++) {
- __ bind(&jump_table_[i].label);
- Address entry = jump_table_[i].address;
- Deoptimizer::BailoutType type = jump_table_[i].bailout_type;
+ Deoptimizer::JumpTableEntry* table_entry = &jump_table_[i];
+ __ bind(&table_entry->label);
+ Address entry = table_entry->address;
+ Deoptimizer::BailoutType type = table_entry->bailout_type;
int id = Deoptimizer::GetDeoptimizationId(isolate(), entry, type);
if (id == Deoptimizer::kNotDeoptimizationEntry) {
Comment(";;; jump table entry %d.", i);
} else {
Comment(";;; jump table entry %d: deoptimization bailout %d.", i, id);
}
- if (jump_table_[i].needs_frame) {
+ DeoptComment(table_entry->mnemonic, table_entry->reason);
+ if (table_entry->needs_frame) {
DCHECK(!info()->saves_caller_doubles());
__ push(Immediate(ExternalReference::ForDeoptEntry(entry)));
if (needs_frame.is_bound()) {
void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr,
+ const char* reason,
Deoptimizer::BailoutType bailout_type) {
LEnvironment* environment = instr->environment();
RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt);
DCHECK(info()->IsStub() || frame_is_built_);
if (cc == no_condition && frame_is_built_) {
+ DeoptComment(instr->Mnemonic(), reason);
__ call(entry, RelocInfo::RUNTIME_ENTRY);
} else {
// We often have several deopts to the same entry, reuse the last
jump_table_.last().address != entry ||
jump_table_.last().needs_frame != !frame_is_built_ ||
jump_table_.last().bailout_type != bailout_type) {
- Deoptimizer::JumpTableEntry table_entry(entry,
- bailout_type,
- !frame_is_built_);
+ Deoptimizer::JumpTableEntry table_entry(entry, instr->Mnemonic(), reason,
+ bailout_type, !frame_is_built_);
jump_table_.Add(table_entry, zone());
}
if (cc == no_condition) {
}
-void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr) {
+void LCodeGen::DeoptimizeIf(Condition cc, LInstruction* instr,
+ const char* reason) {
Deoptimizer::BailoutType bailout_type = info()->IsStub()
? Deoptimizer::LAZY
: Deoptimizer::EAGER;
- DeoptimizeIf(cc, instr, bailout_type);
+ DeoptimizeIf(cc, instr, reason, bailout_type);
}
if (info()->IsStub() && type == Deoptimizer::EAGER) {
type = Deoptimizer::LAZY;
}
- Comment(";;; deoptimize: %s", instr->hydrogen()->reason());
- DeoptimizeIf(no_condition, instr, type);
+ DeoptimizeIf(no_condition, instr, instr->hydrogen()->reason(), type);
}
void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
Safepoint::DeoptMode mode);
- void DeoptimizeIf(Condition cc, LInstruction* instr,
+ void DeoptimizeIf(Condition cc, LInstruction* instr, const char* reason,
Deoptimizer::BailoutType bailout_type);
- void DeoptimizeIf(Condition cc, LInstruction* instr);
+ void DeoptimizeIf(Condition cc, LInstruction* instr,
+ const char* reason = NULL);
bool DeoptEveryNTimes() {
return FLAG_deopt_every_n_times != 0 && !info()->IsStub();