break;
}
case kArchJmp:
- __ b(GetLabel(i.InputRpo(0)));
+ AssembleArchJump(i.InputRpo(0));
DCHECK_EQ(LeaveCC, i.OutputSBit());
break;
case kArchNop:
// Assembles branches after an instruction.
-void CodeGenerator::AssembleArchBranch(Instruction* instr,
- FlagsCondition condition) {
+void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
ArmOperandConverter i(this, instr);
- Label done;
-
- // Emit a branch. The true and false targets are always the last two inputs
- // to the instruction.
- BasicBlock::RpoNumber tblock =
- i.InputRpo(static_cast<int>(instr->InputCount()) - 2);
- BasicBlock::RpoNumber fblock =
- i.InputRpo(static_cast<int>(instr->InputCount()) - 1);
- bool fallthru = IsNextInAssemblyOrder(fblock);
- Label* tlabel = GetLabel(tblock);
- Label* flabel = fallthru ? &done : GetLabel(fblock);
- switch (condition) {
+ Label* tlabel = branch->true_label;
+ Label* flabel = branch->false_label;
+ switch (branch->condition) {
case kUnorderedEqual:
__ b(vs, flabel);
// Fall through.
__ b(vc, tlabel);
break;
}
- if (!fallthru) __ b(flabel); // no fallthru to flabel.
- __ bind(&done);
+ if (!branch->fallthru) __ b(flabel); // no fallthru to flabel.
+}
+
+
+void CodeGenerator::AssembleArchJump(BasicBlock::RpoNumber target) {
+ if (!IsNextInAssemblyOrder(target)) __ b(GetLabel(target));
}
void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
BasicBlock* fbranch) {
FlagsContinuation cont(kNotEqual, tbranch, fbranch);
- if (IsNextInAssemblyOrder(tbranch)) { // We can fallthru to the true block.
- cont.Negate();
- cont.SwapBlocks();
- }
VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
}
break;
}
case kArchJmp:
- __ B(GetLabel(i.InputRpo(0)));
+ AssembleArchJump(i.InputRpo(0));
break;
case kArchNop:
// don't emit code for nops.
// Assemble branches after this instruction.
-void CodeGenerator::AssembleArchBranch(Instruction* instr,
- FlagsCondition condition) {
+void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
Arm64OperandConverter i(this, instr);
- Label done;
-
- // Emit a branch. The true and false targets are always the last two inputs
- // to the instruction.
- BasicBlock::RpoNumber tblock =
- i.InputRpo(static_cast<int>(instr->InputCount()) - 2);
- BasicBlock::RpoNumber fblock =
- i.InputRpo(static_cast<int>(instr->InputCount()) - 1);
- bool fallthru = IsNextInAssemblyOrder(fblock);
- Label* tlabel = GetLabel(tblock);
- Label* flabel = fallthru ? &done : GetLabel(fblock);
- switch (condition) {
+ Label* tlabel = branch->true_label;
+ Label* flabel = branch->false_label;
+ switch (branch->condition) {
case kUnorderedEqual:
__ B(vs, flabel);
// Fall through.
__ B(vc, tlabel);
break;
}
- if (!fallthru) __ B(flabel); // no fallthru to flabel.
- __ Bind(&done);
+ if (!branch->fallthru) __ B(flabel); // no fallthru to flabel.
+}
+
+
+void CodeGenerator::AssembleArchJump(BasicBlock::RpoNumber target) {
+ if (!IsNextInAssemblyOrder(target)) __ B(GetLabel(target));
}
FlagsContinuation cont(kNotEqual, tbranch, fbranch);
- // If we can fall through to the true block, invert the branch.
- if (IsNextInAssemblyOrder(tbranch)) {
- cont.Negate();
- cont.SwapBlocks();
- }
-
// Try to combine with comparisons against 0 by simply inverting the branch.
while (CanCover(user, value)) {
if (value->opcode() == IrOpcode::kWord32Equal) {
// Assemble architecture-specific code for the instruction.
AssembleArchInstruction(instr);
- // Assemble branches or boolean materializations after this instruction.
FlagsMode mode = FlagsModeField::decode(instr->opcode());
FlagsCondition condition = FlagsConditionField::decode(instr->opcode());
- switch (mode) {
- case kFlags_none:
+ if (mode == kFlags_branch) {
+ // Assemble a branch after this instruction.
+ InstructionOperandConverter i(this, instr);
+ BasicBlock::RpoNumber true_rpo =
+ i.InputRpo(static_cast<int>(instr->InputCount()) - 2);
+ BasicBlock::RpoNumber false_rpo =
+ i.InputRpo(static_cast<int>(instr->InputCount()) - 1);
+
+ if (true_rpo == false_rpo) {
+ // redundant branch.
+ if (!IsNextInAssemblyOrder(true_rpo)) {
+ AssembleArchJump(true_rpo);
+ }
return;
- case kFlags_set:
- return AssembleArchBoolean(instr, condition);
- case kFlags_branch:
- return AssembleArchBranch(instr, condition);
+ }
+ if (IsNextInAssemblyOrder(true_rpo)) {
+ // true block is next, can fall through if condition negated.
+ std::swap(true_rpo, false_rpo);
+ condition = NegateFlagsCondition(condition);
+ }
+ BranchInfo branch;
+ branch.condition = condition;
+ branch.true_label = GetLabel(true_rpo);
+ branch.false_label = GetLabel(false_rpo);
+ branch.fallthru = IsNextInAssemblyOrder(false_rpo);
+ // Assemble architecture-specific branch.
+ AssembleArchBranch(instr, &branch);
+ } else if (mode == kFlags_set) {
+ // Assemble a boolean materialization after this instruction.
+ AssembleArchBoolean(instr, condition);
}
- UNREACHABLE();
}
}
class Linkage;
+struct BranchInfo {
+ FlagsCondition condition;
+ Label* true_label;
+ Label* false_label;
+ bool fallthru;
+};
+
+
// Generates native code for a sequence of instructions.
class CodeGenerator FINAL : public GapResolver::Assembler {
public:
// ===========================================================================
void AssembleArchInstruction(Instruction* instr);
- void AssembleArchBranch(Instruction* instr, FlagsCondition condition);
+ void AssembleArchJump(BasicBlock::RpoNumber target);
+ void AssembleArchBranch(Instruction* instr, BranchInfo* branch);
void AssembleArchBoolean(Instruction* instr, FlagsCondition condition);
void AssembleDeoptimizerCall(int deoptimization_id);
break;
}
case kArchJmp:
- __ jmp(GetLabel(i.InputRpo(0)));
+ AssembleArchJump(i.InputRpo(0));
break;
case kArchNop:
// don't emit code for nops.
}
-// Assembles branches after an instruction.
-void CodeGenerator::AssembleArchBranch(Instruction* instr,
- FlagsCondition condition) {
+// Assembles a branch after an instruction.
+void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
IA32OperandConverter i(this, instr);
- Label done;
-
- // Emit a branch. The true and false targets are always the last two inputs
- // to the instruction.
- BasicBlock::RpoNumber tblock =
- i.InputRpo(static_cast<int>(instr->InputCount()) - 2);
- BasicBlock::RpoNumber fblock =
- i.InputRpo(static_cast<int>(instr->InputCount()) - 1);
- bool fallthru = IsNextInAssemblyOrder(fblock);
- Label* tlabel = GetLabel(tblock);
- Label* flabel = fallthru ? &done : GetLabel(fblock);
- Label::Distance flabel_distance = fallthru ? Label::kNear : Label::kFar;
- switch (condition) {
+ Label::Distance flabel_distance =
+ branch->fallthru ? Label::kNear : Label::kFar;
+ Label* tlabel = branch->true_label;
+ Label* flabel = branch->false_label;
+ switch (branch->condition) {
case kUnorderedEqual:
__ j(parity_even, flabel, flabel_distance);
// Fall through.
__ j(no_overflow, tlabel);
break;
}
- if (!fallthru) __ jmp(flabel, flabel_distance); // no fallthru to flabel.
- __ bind(&done);
+ // Add a jump if not falling through to the next block.
+ if (!branch->fallthru) __ jmp(flabel);
+}
+
+
+void CodeGenerator::AssembleArchJump(BasicBlock::RpoNumber target) {
+ if (!IsNextInAssemblyOrder(target)) __ jmp(GetLabel(target));
}
void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
BasicBlock* fbranch) {
FlagsContinuation cont(kNotEqual, tbranch, fbranch);
- if (IsNextInAssemblyOrder(tbranch)) { // We can fallthru to the true block.
- cont.Negate();
- cont.SwapBlocks();
- }
VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
}
kNotOverflow
};
+inline FlagsCondition NegateFlagsCondition(FlagsCondition condition) {
+ return static_cast<FlagsCondition>(condition ^ 1);
+}
+
std::ostream& operator<<(std::ostream& os, const FlagsCondition& fc);
// The InstructionCode is an opaque, target-specific integer that encodes
void Negate() {
DCHECK(!IsNone());
- condition_ = static_cast<FlagsCondition>(condition_ ^ 1);
+ condition_ = NegateFlagsCondition(condition_);
}
void Commute() {
if (negate) Negate();
}
- void SwapBlocks() { std::swap(true_block_, false_block_); }
-
// Encodes this flags continuation into the given opcode.
InstructionCode Encode(InstructionCode opcode) {
opcode |= FlagsModeField::encode(mode_);
void InstructionSelector::VisitGoto(BasicBlock* target) {
- if (IsNextInAssemblyOrder(target)) {
- // fall through to the next block.
- Emit(kArchNop, NULL)->MarkAsControl();
- } else {
- // jump to the next block.
- OperandGenerator g(this);
- Emit(kArchJmp, NULL, g.Label(target))->MarkAsControl();
- }
+ // jump to the next block.
+ OperandGenerator g(this);
+ Emit(kArchJmp, NULL, g.Label(target))->MarkAsControl();
}
break;
}
case kArchJmp:
- __ Branch(GetLabel(i.InputRpo(0)));
+ AssembleArchJump(i.InputRpo(0));
break;
case kArchNop:
// don't emit code for nops.
void CodeGenerator::AssembleArchBranch(Instruction* instr,
FlagsCondition condition) {
MipsOperandConverter i(this, instr);
- Label done;
-
- // Emit a branch. The true and false targets are always the last two inputs
- // to the instruction.
- BasicBlock::RpoNumber tblock =
- i.InputRpo(static_cast<int>(instr->InputCount()) - 2);
- BasicBlock::RpoNumber fblock =
- i.InputRpo(static_cast<int>(instr->InputCount()) - 1);
- bool fallthru = IsNextInAssemblyOrder(fblock);
- Label* tlabel = GetLabel(tblock);
- Label* flabel = fallthru ? &done : GetLabel(fblock);
+ Label* tlabel = branch->true_label;
+ Label* flabel = branch->false_label;
Condition cc = kNoCondition;
// MIPS does not have condition code flags, so compare and branch are
// implemented differently than on the other arch's. The compare operations
- // emit mips psuedo-instructions, which are handled here by branch
+ // emit mips pseudo-instructions, which are handled here by branch
// instructions that do the actual comparison. Essential that the input
- // registers to compare psuedo-op are not modified before this branch op, as
+ // registers to compare pseudo-op are not modified before this branch op, as
// they are tested here.
// TODO(plind): Add CHECK() to ensure that test/cmp and this branch were
// not separated by other instructions.
if (instr->arch_opcode() == kMipsTst) {
- switch (condition) {
+ switch (branch->condition) {
case kNotEqual:
cc = ne;
break;
} else if (instr->arch_opcode() == kMipsAddOvf ||
instr->arch_opcode() == kMipsSubOvf) {
// kMipsAddOvf, SubOvf emit negative result to 'kCompareReg' on overflow.
- switch (condition) {
+ switch (branch->condition) {
case kOverflow:
cc = lt;
break;
cc = ge;
break;
default:
- UNSUPPORTED_COND(kMipsAddOvf, condition);
+ UNSUPPORTED_COND(kMipsAddOvf, branch->condition);
break;
}
__ Branch(tlabel, cc, kCompareReg, Operand(zero_reg));
} else if (instr->arch_opcode() == kMipsCmp) {
- switch (condition) {
+ switch (branch->condition) {
case kEqual:
cc = eq;
break;
cc = hi;
break;
default:
- UNSUPPORTED_COND(kMipsCmp, condition);
+ UNSUPPORTED_COND(kMipsCmp, branch->condition);
break;
}
__ Branch(tlabel, cc, i.InputRegister(0), i.InputOperand(1));
- if (!fallthru) __ Branch(flabel); // no fallthru to flabel.
- __ bind(&done);
+ if (!branch->fallthru) __ Branch(flabel); // no fallthru to flabel.
} else if (instr->arch_opcode() == kMipsCmpD) {
- // TODO(dusmil) optimize unordered checks to use less instructions
+ // TODO(dusmil) optimize unordered checks to use fewer instructions
// even if we have to unfold BranchF macro.
Label* nan = flabel;
- switch (condition) {
+ switch (branch->condition) {
case kUnorderedEqual:
cc = eq;
break;
nan = tlabel;
break;
default:
- UNSUPPORTED_COND(kMipsCmpD, condition);
+ UNSUPPORTED_COND(kMipsCmpD, branch->condition);
break;
}
__ BranchF(tlabel, nan, cc, i.InputDoubleRegister(0),
i.InputDoubleRegister(1));
- if (!fallthru) __ Branch(flabel); // no fallthru to flabel.
- __ bind(&done);
+ if (!branch->fallthru) __ Branch(flabel); // no fallthru to flabel.
} else {
PrintF("AssembleArchBranch Unimplemented arch_opcode: %d\n",
}
+void CodeGenerator::AssembleArchJump(BasicBlock::RpoNumber target) {
+ if (!IsNextInAssemblyOrder(target)) __ Branch(GetLabel(target));
+}
+
+
// Assembles boolean materializations after an instruction.
void CodeGenerator::AssembleArchBoolean(Instruction* instr,
FlagsCondition condition) {
void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
BasicBlock* fbranch) {
FlagsContinuation cont(kNotEqual, tbranch, fbranch);
- // If we can fall through to the true block, invert the branch.
- if (IsNextInAssemblyOrder(tbranch)) {
- cont.Negate();
- cont.SwapBlocks();
- }
VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
}
break;
}
case kArchJmp:
- __ Branch(GetLabel(i.InputRpo(0)));
+ AssembleArchJump(i.InputRpo(0));
break;
case kArchNop:
// don't emit code for nops.
void CodeGenerator::AssembleArchBranch(Instruction* instr,
FlagsCondition condition) {
MipsOperandConverter i(this, instr);
- Label done;
-
- // Emit a branch. The true and false targets are always the last two inputs
- // to the instruction.
- BasicBlock::RpoNumber tblock =
- i.InputRpo(static_cast<int>(instr->InputCount()) - 2);
- BasicBlock::RpoNumber fblock =
- i.InputRpo(static_cast<int>(instr->InputCount()) - 1);
- bool fallthru = IsNextInAssemblyOrder(fblock);
- Label* tlabel = GetLabel(tblock);
- Label* flabel = fallthru ? &done : GetLabel(fblock);
+ Label* tlabel = branch->true_label;
+ Label* flabel = branch->false_label;
Condition cc = kNoCondition;
// MIPS does not have condition code flags, so compare and branch are
// not separated by other instructions.
if (instr->arch_opcode() == kMips64Tst) {
- switch (condition) {
+ switch (branch->condition) {
case kNotEqual:
cc = ne;
break;
cc = eq;
break;
default:
- UNSUPPORTED_COND(kMips64Tst, condition);
+ UNSUPPORTED_COND(kMips64Tst, branch->condition);
break;
}
__ And(at, i.InputRegister(0), i.InputOperand(1));
__ Branch(tlabel, cc, at, Operand(zero_reg));
} else if (instr->arch_opcode() == kMips64Tst32) {
- switch (condition) {
+ switch (branch->condition) {
case kNotEqual:
cc = ne;
break;
cc = eq;
break;
default:
- UNSUPPORTED_COND(kMips64Tst32, condition);
+ UNSUPPORTED_COND(kMips64Tst32, branch->condition);
break;
}
// Zero-extend registers on MIPS64 only 64-bit operand
__ Branch(tlabel, cc, at, Operand(zero_reg));
} else if (instr->arch_opcode() == kMips64Dadd ||
instr->arch_opcode() == kMips64Dsub) {
- switch (condition) {
+ switch (branch->condition) {
case kOverflow:
cc = ne;
break;
cc = eq;
break;
default:
- UNSUPPORTED_COND(kMips64Dadd, condition);
+ UNSUPPORTED_COND(kMips64Dadd, branch->condition);
break;
}
__ sra(at, i.OutputRegister(), 31);
__ Branch(tlabel, cc, at, Operand(kScratchReg));
} else if (instr->arch_opcode() == kMips64Cmp) {
- switch (condition) {
+ switch (branch->condition) {
case kEqual:
cc = eq;
break;
cc = hi;
break;
default:
- UNSUPPORTED_COND(kMips64Cmp, condition);
+ UNSUPPORTED_COND(kMips64Cmp, branch->condition);
break;
}
__ Branch(tlabel, cc, i.InputRegister(0), i.InputOperand(1));
- if (!fallthru) __ Branch(flabel); // no fallthru to flabel.
+ if (!branch->fallthru) __ Branch(flabel); // no fallthru to flabel.
__ bind(&done);
} else if (instr->arch_opcode() == kMips64Cmp32) {
- switch (condition) {
+ switch (branch->condition) {
case kEqual:
cc = eq;
break;
cc = hi;
break;
default:
- UNSUPPORTED_COND(kMips64Cmp32, condition);
+ UNSUPPORTED_COND(kMips64Cmp32, branch->condition);
break;
}
- switch (condition) {
+ switch (branch->condition) {
case kEqual:
case kNotEqual:
case kSignedLessThan:
}
break;
default:
- UNSUPPORTED_COND(kMips64Cmp, condition);
+ UNSUPPORTED_COND(kMips64Cmp, branch->condition);
break;
}
__ Branch(tlabel, cc, i.InputRegister(0), i.InputOperand(1));
- if (!fallthru) __ Branch(flabel); // no fallthru to flabel.
+ if (!branch->fallthru) __ Branch(flabel); // no fallthru to flabel.
__ bind(&done);
} else if (instr->arch_opcode() == kMips64CmpD) {
// TODO(dusmil) optimize unordered checks to use less instructions
// even if we have to unfold BranchF macro.
Label* nan = flabel;
- switch (condition) {
+ switch (branch->condition) {
case kUnorderedEqual:
cc = eq;
break;
nan = tlabel;
break;
default:
- UNSUPPORTED_COND(kMips64CmpD, condition);
+ UNSUPPORTED_COND(kMips64CmpD, branch->condition);
break;
}
__ BranchF(tlabel, nan, cc, i.InputDoubleRegister(0),
i.InputDoubleRegister(1));
- if (!fallthru) __ Branch(flabel); // no fallthru to flabel.
+ if (!branch->fallthru) __ Branch(flabel); // no fallthru to flabel.
__ bind(&done);
} else {
}
+void CodeGenerator::AssembleArchJump(BasicBlock::RpoNumber target) {
+ if (!IsNextInAssemblyOrder(target)) __ Branch(GetLabel(target));
+}
+
+
// Assembles boolean materializations after an instruction.
void CodeGenerator::AssembleArchBoolean(Instruction* instr,
FlagsCondition condition) {
void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
BasicBlock* fbranch) {
FlagsContinuation cont(kNotEqual, tbranch, fbranch);
- // If we can fall through to the true block, invert the branch.
- if (IsNextInAssemblyOrder(tbranch)) {
- cont.Negate();
- cont.SwapBlocks();
- }
-
VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
}
break;
}
case kArchJmp:
- __ jmp(GetLabel(i.InputRpo(0)));
+ AssembleArchJump(i.InputRpo(0));
break;
case kArchNop:
// don't emit code for nops.
// Assembles branches after this instruction.
-void CodeGenerator::AssembleArchBranch(Instruction* instr,
- FlagsCondition condition) {
+void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
X64OperandConverter i(this, instr);
- Label done;
-
- // Emit a branch. The true and false targets are always the last two inputs
- // to the instruction.
- BasicBlock::RpoNumber tblock =
- i.InputRpo(static_cast<int>(instr->InputCount()) - 2);
- BasicBlock::RpoNumber fblock =
- i.InputRpo(static_cast<int>(instr->InputCount()) - 1);
- bool fallthru = IsNextInAssemblyOrder(fblock);
- Label* tlabel = GetLabel(tblock);
- Label* flabel = fallthru ? &done : GetLabel(fblock);
- Label::Distance flabel_distance = fallthru ? Label::kNear : Label::kFar;
- switch (condition) {
+ Label::Distance flabel_distance =
+ branch->fallthru ? Label::kNear : Label::kFar;
+ Label* tlabel = branch->true_label;
+ Label* flabel = branch->false_label;
+ switch (branch->condition) {
case kUnorderedEqual:
__ j(parity_even, flabel, flabel_distance);
// Fall through.
__ j(no_overflow, tlabel);
break;
}
- if (!fallthru) __ jmp(flabel, flabel_distance); // no fallthru to flabel.
- __ bind(&done);
+ if (!branch->fallthru) __ jmp(flabel, flabel_distance);
+}
+
+
+void CodeGenerator::AssembleArchJump(BasicBlock::RpoNumber target) {
+ if (!IsNextInAssemblyOrder(target)) __ jmp(GetLabel(target));
}
FlagsContinuation cont(kNotEqual, tbranch, fbranch);
- // If we can fall through to the true block, invert the branch.
- if (IsNextInAssemblyOrder(tbranch)) {
- cont.Negate();
- cont.SwapBlocks();
- }
-
// Try to combine with comparisons against 0 by simply inverting the branch.
while (CanCover(user, value)) {
if (value->opcode() == IrOpcode::kWord32Equal) {