From 87e9d839a287d1ff2b318dad44c8c59091a0f0d1 Mon Sep 17 00:00:00 2001 From: titzer Date: Thu, 20 Nov 2014 08:23:11 -0800 Subject: [PATCH] [turbofan] Clean up and factor out branch generation logic. R=dcarney@chromium.org BUG= Review URL: https://codereview.chromium.org/745633002 Cr-Commit-Position: refs/heads/master@{#25446} --- src/compiler/arm/code-generator-arm.cc | 28 +++++------ src/compiler/arm/instruction-selector-arm.cc | 4 -- src/compiler/arm64/code-generator-arm64.cc | 28 +++++------ src/compiler/arm64/instruction-selector-arm64.cc | 6 --- src/compiler/code-generator.cc | 37 +++++++++++---- src/compiler/code-generator.h | 11 ++++- src/compiler/ia32/code-generator-ia32.cc | 34 ++++++-------- src/compiler/ia32/instruction-selector-ia32.cc | 4 -- src/compiler/instruction-codes.h | 4 ++ src/compiler/instruction-selector-impl.h | 4 +- src/compiler/instruction-selector.cc | 11 ++--- src/compiler/mips/code-generator-mips.cc | 46 ++++++++---------- src/compiler/mips/instruction-selector-mips.cc | 5 -- src/compiler/mips64/code-generator-mips64.cc | 54 ++++++++++------------ src/compiler/mips64/instruction-selector-mips64.cc | 6 --- src/compiler/x64/code-generator-x64.cc | 31 ++++++------- src/compiler/x64/instruction-selector-x64.cc | 6 --- 17 files changed, 142 insertions(+), 177 deletions(-) diff --git a/src/compiler/arm/code-generator-arm.cc b/src/compiler/arm/code-generator-arm.cc index c0a1873..8934b18 100644 --- a/src/compiler/arm/code-generator-arm.cc +++ b/src/compiler/arm/code-generator-arm.cc @@ -193,7 +193,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { break; } case kArchJmp: - __ b(GetLabel(i.InputRpo(0))); + AssembleArchJump(i.InputRpo(0)); DCHECK_EQ(LeaveCC, i.OutputSBit()); break; case kArchNop: @@ -539,21 +539,11 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { // Assembles branches after an instruction. -void CodeGenerator::AssembleArchBranch(Instruction* instr, - FlagsCondition condition) { +void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) { ArmOperandConverter i(this, instr); - Label done; - - // Emit a branch. The true and false targets are always the last two inputs - // to the instruction. - BasicBlock::RpoNumber tblock = - i.InputRpo(static_cast(instr->InputCount()) - 2); - BasicBlock::RpoNumber fblock = - i.InputRpo(static_cast(instr->InputCount()) - 1); - bool fallthru = IsNextInAssemblyOrder(fblock); - Label* tlabel = GetLabel(tblock); - Label* flabel = fallthru ? &done : GetLabel(fblock); - switch (condition) { + Label* tlabel = branch->true_label; + Label* flabel = branch->false_label; + switch (branch->condition) { case kUnorderedEqual: __ b(vs, flabel); // Fall through. @@ -609,8 +599,12 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, __ b(vc, tlabel); break; } - if (!fallthru) __ b(flabel); // no fallthru to flabel. - __ bind(&done); + if (!branch->fallthru) __ b(flabel); // no fallthru to flabel. +} + + +void CodeGenerator::AssembleArchJump(BasicBlock::RpoNumber target) { + if (!IsNextInAssemblyOrder(target)) __ b(GetLabel(target)); } diff --git a/src/compiler/arm/instruction-selector-arm.cc b/src/compiler/arm/instruction-selector-arm.cc index 146aede..6e1e084 100644 --- a/src/compiler/arm/instruction-selector-arm.cc +++ b/src/compiler/arm/instruction-selector-arm.cc @@ -1170,10 +1170,6 @@ void VisitWordCompareZero(InstructionSelector* selector, Node* user, void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch, BasicBlock* fbranch) { FlagsContinuation cont(kNotEqual, tbranch, fbranch); - if (IsNextInAssemblyOrder(tbranch)) { // We can fallthru to the true block. - cont.Negate(); - cont.SwapBlocks(); - } VisitWordCompareZero(this, branch, branch->InputAt(0), &cont); } diff --git a/src/compiler/arm64/code-generator-arm64.cc b/src/compiler/arm64/code-generator-arm64.cc index 61e69fc..fb1f06a 100644 --- a/src/compiler/arm64/code-generator-arm64.cc +++ b/src/compiler/arm64/code-generator-arm64.cc @@ -214,7 +214,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { break; } case kArchJmp: - __ B(GetLabel(i.InputRpo(0))); + AssembleArchJump(i.InputRpo(0)); break; case kArchNop: // don't emit code for nops. @@ -612,21 +612,11 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { // Assemble branches after this instruction. -void CodeGenerator::AssembleArchBranch(Instruction* instr, - FlagsCondition condition) { +void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) { Arm64OperandConverter i(this, instr); - Label done; - - // Emit a branch. The true and false targets are always the last two inputs - // to the instruction. - BasicBlock::RpoNumber tblock = - i.InputRpo(static_cast(instr->InputCount()) - 2); - BasicBlock::RpoNumber fblock = - i.InputRpo(static_cast(instr->InputCount()) - 1); - bool fallthru = IsNextInAssemblyOrder(fblock); - Label* tlabel = GetLabel(tblock); - Label* flabel = fallthru ? &done : GetLabel(fblock); - switch (condition) { + Label* tlabel = branch->true_label; + Label* flabel = branch->false_label; + switch (branch->condition) { case kUnorderedEqual: __ B(vs, flabel); // Fall through. @@ -682,8 +672,12 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, __ B(vc, tlabel); break; } - if (!fallthru) __ B(flabel); // no fallthru to flabel. - __ Bind(&done); + if (!branch->fallthru) __ B(flabel); // no fallthru to flabel. +} + + +void CodeGenerator::AssembleArchJump(BasicBlock::RpoNumber target) { + if (!IsNextInAssemblyOrder(target)) __ B(GetLabel(target)); } diff --git a/src/compiler/arm64/instruction-selector-arm64.cc b/src/compiler/arm64/instruction-selector-arm64.cc index 56eb9e5..5f53a0c 100644 --- a/src/compiler/arm64/instruction-selector-arm64.cc +++ b/src/compiler/arm64/instruction-selector-arm64.cc @@ -1120,12 +1120,6 @@ void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch, FlagsContinuation cont(kNotEqual, tbranch, fbranch); - // If we can fall through to the true block, invert the branch. - if (IsNextInAssemblyOrder(tbranch)) { - cont.Negate(); - cont.SwapBlocks(); - } - // Try to combine with comparisons against 0 by simply inverting the branch. while (CanCover(user, value)) { if (value->opcode() == IrOpcode::kWord32Equal) { diff --git a/src/compiler/code-generator.cc b/src/compiler/code-generator.cc index 708b18e..f3257ec 100644 --- a/src/compiler/code-generator.cc +++ b/src/compiler/code-generator.cc @@ -139,18 +139,39 @@ void CodeGenerator::AssembleInstruction(Instruction* instr) { // Assemble architecture-specific code for the instruction. AssembleArchInstruction(instr); - // Assemble branches or boolean materializations after this instruction. FlagsMode mode = FlagsModeField::decode(instr->opcode()); FlagsCondition condition = FlagsConditionField::decode(instr->opcode()); - switch (mode) { - case kFlags_none: + if (mode == kFlags_branch) { + // Assemble a branch after this instruction. + InstructionOperandConverter i(this, instr); + BasicBlock::RpoNumber true_rpo = + i.InputRpo(static_cast(instr->InputCount()) - 2); + BasicBlock::RpoNumber false_rpo = + i.InputRpo(static_cast(instr->InputCount()) - 1); + + if (true_rpo == false_rpo) { + // redundant branch. + if (!IsNextInAssemblyOrder(true_rpo)) { + AssembleArchJump(true_rpo); + } return; - case kFlags_set: - return AssembleArchBoolean(instr, condition); - case kFlags_branch: - return AssembleArchBranch(instr, condition); + } + if (IsNextInAssemblyOrder(true_rpo)) { + // true block is next, can fall through if condition negated. + std::swap(true_rpo, false_rpo); + condition = NegateFlagsCondition(condition); + } + BranchInfo branch; + branch.condition = condition; + branch.true_label = GetLabel(true_rpo); + branch.false_label = GetLabel(false_rpo); + branch.fallthru = IsNextInAssemblyOrder(false_rpo); + // Assemble architecture-specific branch. + AssembleArchBranch(instr, &branch); + } else if (mode == kFlags_set) { + // Assemble a boolean materialization after this instruction. + AssembleArchBoolean(instr, condition); } - UNREACHABLE(); } } diff --git a/src/compiler/code-generator.h b/src/compiler/code-generator.h index b03e2ad..2a3ade5 100644 --- a/src/compiler/code-generator.h +++ b/src/compiler/code-generator.h @@ -19,6 +19,14 @@ namespace compiler { class Linkage; +struct BranchInfo { + FlagsCondition condition; + Label* true_label; + Label* false_label; + bool fallthru; +}; + + // Generates native code for a sequence of instructions. class CodeGenerator FINAL : public GapResolver::Assembler { public: @@ -60,7 +68,8 @@ class CodeGenerator FINAL : public GapResolver::Assembler { // =========================================================================== void AssembleArchInstruction(Instruction* instr); - void AssembleArchBranch(Instruction* instr, FlagsCondition condition); + void AssembleArchJump(BasicBlock::RpoNumber target); + void AssembleArchBranch(Instruction* instr, BranchInfo* branch); void AssembleArchBoolean(Instruction* instr, FlagsCondition condition); void AssembleDeoptimizerCall(int deoptimization_id); diff --git a/src/compiler/ia32/code-generator-ia32.cc b/src/compiler/ia32/code-generator-ia32.cc index e226aaf..8c619f8 100644 --- a/src/compiler/ia32/code-generator-ia32.cc +++ b/src/compiler/ia32/code-generator-ia32.cc @@ -195,7 +195,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { break; } case kArchJmp: - __ jmp(GetLabel(i.InputRpo(0))); + AssembleArchJump(i.InputRpo(0)); break; case kArchNop: // don't emit code for nops. @@ -485,23 +485,14 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { } -// Assembles branches after an instruction. -void CodeGenerator::AssembleArchBranch(Instruction* instr, - FlagsCondition condition) { +// Assembles a branch after an instruction. +void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) { IA32OperandConverter i(this, instr); - Label done; - - // Emit a branch. The true and false targets are always the last two inputs - // to the instruction. - BasicBlock::RpoNumber tblock = - i.InputRpo(static_cast(instr->InputCount()) - 2); - BasicBlock::RpoNumber fblock = - i.InputRpo(static_cast(instr->InputCount()) - 1); - bool fallthru = IsNextInAssemblyOrder(fblock); - Label* tlabel = GetLabel(tblock); - Label* flabel = fallthru ? &done : GetLabel(fblock); - Label::Distance flabel_distance = fallthru ? Label::kNear : Label::kFar; - switch (condition) { + Label::Distance flabel_distance = + branch->fallthru ? Label::kNear : Label::kFar; + Label* tlabel = branch->true_label; + Label* flabel = branch->false_label; + switch (branch->condition) { case kUnorderedEqual: __ j(parity_even, flabel, flabel_distance); // Fall through. @@ -557,8 +548,13 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, __ j(no_overflow, tlabel); break; } - if (!fallthru) __ jmp(flabel, flabel_distance); // no fallthru to flabel. - __ bind(&done); + // Add a jump if not falling through to the next block. + if (!branch->fallthru) __ jmp(flabel); +} + + +void CodeGenerator::AssembleArchJump(BasicBlock::RpoNumber target) { + if (!IsNextInAssemblyOrder(target)) __ jmp(GetLabel(target)); } diff --git a/src/compiler/ia32/instruction-selector-ia32.cc b/src/compiler/ia32/instruction-selector-ia32.cc index dabaa56..f69a075 100644 --- a/src/compiler/ia32/instruction-selector-ia32.cc +++ b/src/compiler/ia32/instruction-selector-ia32.cc @@ -1003,10 +1003,6 @@ void VisitWordCompareZero(InstructionSelector* selector, Node* user, void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch, BasicBlock* fbranch) { FlagsContinuation cont(kNotEqual, tbranch, fbranch); - if (IsNextInAssemblyOrder(tbranch)) { // We can fallthru to the true block. - cont.Negate(); - cont.SwapBlocks(); - } VisitWordCompareZero(this, branch, branch->InputAt(0), &cont); } diff --git a/src/compiler/instruction-codes.h b/src/compiler/instruction-codes.h index 00fa1b4..21a0f78 100644 --- a/src/compiler/instruction-codes.h +++ b/src/compiler/instruction-codes.h @@ -98,6 +98,10 @@ enum FlagsCondition { kNotOverflow }; +inline FlagsCondition NegateFlagsCondition(FlagsCondition condition) { + return static_cast(condition ^ 1); +} + std::ostream& operator<<(std::ostream& os, const FlagsCondition& fc); // The InstructionCode is an opaque, target-specific integer that encodes diff --git a/src/compiler/instruction-selector-impl.h b/src/compiler/instruction-selector-impl.h index 53e288d..5e4c090 100644 --- a/src/compiler/instruction-selector-impl.h +++ b/src/compiler/instruction-selector-impl.h @@ -257,7 +257,7 @@ class FlagsContinuation FINAL { void Negate() { DCHECK(!IsNone()); - condition_ = static_cast(condition_ ^ 1); + condition_ = NegateFlagsCondition(condition_); } void Commute() { @@ -317,8 +317,6 @@ class FlagsContinuation FINAL { if (negate) Negate(); } - void SwapBlocks() { std::swap(true_block_, false_block_); } - // Encodes this flags continuation into the given opcode. InstructionCode Encode(InstructionCode opcode) { opcode |= FlagsModeField::encode(mode_); diff --git a/src/compiler/instruction-selector.cc b/src/compiler/instruction-selector.cc index da8dafd..2bfc3e1 100644 --- a/src/compiler/instruction-selector.cc +++ b/src/compiler/instruction-selector.cc @@ -966,14 +966,9 @@ void InstructionSelector::VisitConstant(Node* node) { void InstructionSelector::VisitGoto(BasicBlock* target) { - if (IsNextInAssemblyOrder(target)) { - // fall through to the next block. - Emit(kArchNop, NULL)->MarkAsControl(); - } else { - // jump to the next block. - OperandGenerator g(this); - Emit(kArchJmp, NULL, g.Label(target))->MarkAsControl(); - } + // jump to the next block. + OperandGenerator g(this); + Emit(kArchJmp, NULL, g.Label(target))->MarkAsControl(); } diff --git a/src/compiler/mips/code-generator-mips.cc b/src/compiler/mips/code-generator-mips.cc index 3904a71..a49be39 100644 --- a/src/compiler/mips/code-generator-mips.cc +++ b/src/compiler/mips/code-generator-mips.cc @@ -154,7 +154,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { break; } case kArchJmp: - __ Branch(GetLabel(i.InputRpo(0))); + AssembleArchJump(i.InputRpo(0)); break; case kArchNop: // don't emit code for nops. @@ -394,30 +394,21 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { void CodeGenerator::AssembleArchBranch(Instruction* instr, FlagsCondition condition) { MipsOperandConverter i(this, instr); - Label done; - - // Emit a branch. The true and false targets are always the last two inputs - // to the instruction. - BasicBlock::RpoNumber tblock = - i.InputRpo(static_cast(instr->InputCount()) - 2); - BasicBlock::RpoNumber fblock = - i.InputRpo(static_cast(instr->InputCount()) - 1); - bool fallthru = IsNextInAssemblyOrder(fblock); - Label* tlabel = GetLabel(tblock); - Label* flabel = fallthru ? &done : GetLabel(fblock); + Label* tlabel = branch->true_label; + Label* flabel = branch->false_label; Condition cc = kNoCondition; // MIPS does not have condition code flags, so compare and branch are // implemented differently than on the other arch's. The compare operations - // emit mips psuedo-instructions, which are handled here by branch + // emit mips pseudo-instructions, which are handled here by branch // instructions that do the actual comparison. Essential that the input - // registers to compare psuedo-op are not modified before this branch op, as + // registers to compare pseudo-op are not modified before this branch op, as // they are tested here. // TODO(plind): Add CHECK() to ensure that test/cmp and this branch were // not separated by other instructions. if (instr->arch_opcode() == kMipsTst) { - switch (condition) { + switch (branch->condition) { case kNotEqual: cc = ne; break; @@ -434,7 +425,7 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, } else if (instr->arch_opcode() == kMipsAddOvf || instr->arch_opcode() == kMipsSubOvf) { // kMipsAddOvf, SubOvf emit negative result to 'kCompareReg' on overflow. - switch (condition) { + switch (branch->condition) { case kOverflow: cc = lt; break; @@ -442,13 +433,13 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, cc = ge; break; default: - UNSUPPORTED_COND(kMipsAddOvf, condition); + UNSUPPORTED_COND(kMipsAddOvf, branch->condition); break; } __ Branch(tlabel, cc, kCompareReg, Operand(zero_reg)); } else if (instr->arch_opcode() == kMipsCmp) { - switch (condition) { + switch (branch->condition) { case kEqual: cc = eq; break; @@ -480,19 +471,18 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, cc = hi; break; default: - UNSUPPORTED_COND(kMipsCmp, condition); + UNSUPPORTED_COND(kMipsCmp, branch->condition); break; } __ Branch(tlabel, cc, i.InputRegister(0), i.InputOperand(1)); - if (!fallthru) __ Branch(flabel); // no fallthru to flabel. - __ bind(&done); + if (!branch->fallthru) __ Branch(flabel); // no fallthru to flabel. } else if (instr->arch_opcode() == kMipsCmpD) { - // TODO(dusmil) optimize unordered checks to use less instructions + // TODO(dusmil) optimize unordered checks to use fewer instructions // even if we have to unfold BranchF macro. Label* nan = flabel; - switch (condition) { + switch (branch->condition) { case kUnorderedEqual: cc = eq; break; @@ -515,14 +505,13 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, nan = tlabel; break; default: - UNSUPPORTED_COND(kMipsCmpD, condition); + UNSUPPORTED_COND(kMipsCmpD, branch->condition); break; } __ BranchF(tlabel, nan, cc, i.InputDoubleRegister(0), i.InputDoubleRegister(1)); - if (!fallthru) __ Branch(flabel); // no fallthru to flabel. - __ bind(&done); + if (!branch->fallthru) __ Branch(flabel); // no fallthru to flabel. } else { PrintF("AssembleArchBranch Unimplemented arch_opcode: %d\n", @@ -532,6 +521,11 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, } +void CodeGenerator::AssembleArchJump(BasicBlock::RpoNumber target) { + if (!IsNextInAssemblyOrder(target)) __ Branch(GetLabel(target)); +} + + // Assembles boolean materializations after an instruction. void CodeGenerator::AssembleArchBoolean(Instruction* instr, FlagsCondition condition) { diff --git a/src/compiler/mips/instruction-selector-mips.cc b/src/compiler/mips/instruction-selector-mips.cc index 4862e98..4ee81a7 100644 --- a/src/compiler/mips/instruction-selector-mips.cc +++ b/src/compiler/mips/instruction-selector-mips.cc @@ -634,11 +634,6 @@ void VisitWordCompareZero(InstructionSelector* selector, Node* user, void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch, BasicBlock* fbranch) { FlagsContinuation cont(kNotEqual, tbranch, fbranch); - // If we can fall through to the true block, invert the branch. - if (IsNextInAssemblyOrder(tbranch)) { - cont.Negate(); - cont.SwapBlocks(); - } VisitWordCompareZero(this, branch, branch->InputAt(0), &cont); } diff --git a/src/compiler/mips64/code-generator-mips64.cc b/src/compiler/mips64/code-generator-mips64.cc index 32ec920..c623045 100644 --- a/src/compiler/mips64/code-generator-mips64.cc +++ b/src/compiler/mips64/code-generator-mips64.cc @@ -155,7 +155,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { break; } case kArchJmp: - __ Branch(GetLabel(i.InputRpo(0))); + AssembleArchJump(i.InputRpo(0)); break; case kArchNop: // don't emit code for nops. @@ -480,17 +480,8 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { void CodeGenerator::AssembleArchBranch(Instruction* instr, FlagsCondition condition) { MipsOperandConverter i(this, instr); - Label done; - - // Emit a branch. The true and false targets are always the last two inputs - // to the instruction. - BasicBlock::RpoNumber tblock = - i.InputRpo(static_cast(instr->InputCount()) - 2); - BasicBlock::RpoNumber fblock = - i.InputRpo(static_cast(instr->InputCount()) - 1); - bool fallthru = IsNextInAssemblyOrder(fblock); - Label* tlabel = GetLabel(tblock); - Label* flabel = fallthru ? &done : GetLabel(fblock); + Label* tlabel = branch->true_label; + Label* flabel = branch->false_label; Condition cc = kNoCondition; // MIPS does not have condition code flags, so compare and branch are @@ -503,7 +494,7 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, // not separated by other instructions. if (instr->arch_opcode() == kMips64Tst) { - switch (condition) { + switch (branch->condition) { case kNotEqual: cc = ne; break; @@ -511,13 +502,13 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, cc = eq; break; default: - UNSUPPORTED_COND(kMips64Tst, condition); + UNSUPPORTED_COND(kMips64Tst, branch->condition); break; } __ And(at, i.InputRegister(0), i.InputOperand(1)); __ Branch(tlabel, cc, at, Operand(zero_reg)); } else if (instr->arch_opcode() == kMips64Tst32) { - switch (condition) { + switch (branch->condition) { case kNotEqual: cc = ne; break; @@ -525,7 +516,7 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, cc = eq; break; default: - UNSUPPORTED_COND(kMips64Tst32, condition); + UNSUPPORTED_COND(kMips64Tst32, branch->condition); break; } // Zero-extend registers on MIPS64 only 64-bit operand @@ -538,7 +529,7 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, __ Branch(tlabel, cc, at, Operand(zero_reg)); } else if (instr->arch_opcode() == kMips64Dadd || instr->arch_opcode() == kMips64Dsub) { - switch (condition) { + switch (branch->condition) { case kOverflow: cc = ne; break; @@ -546,7 +537,7 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, cc = eq; break; default: - UNSUPPORTED_COND(kMips64Dadd, condition); + UNSUPPORTED_COND(kMips64Dadd, branch->condition); break; } @@ -554,7 +545,7 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, __ sra(at, i.OutputRegister(), 31); __ Branch(tlabel, cc, at, Operand(kScratchReg)); } else if (instr->arch_opcode() == kMips64Cmp) { - switch (condition) { + switch (branch->condition) { case kEqual: cc = eq; break; @@ -586,16 +577,16 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, cc = hi; break; default: - UNSUPPORTED_COND(kMips64Cmp, condition); + UNSUPPORTED_COND(kMips64Cmp, branch->condition); break; } __ Branch(tlabel, cc, i.InputRegister(0), i.InputOperand(1)); - if (!fallthru) __ Branch(flabel); // no fallthru to flabel. + if (!branch->fallthru) __ Branch(flabel); // no fallthru to flabel. __ bind(&done); } else if (instr->arch_opcode() == kMips64Cmp32) { - switch (condition) { + switch (branch->condition) { case kEqual: cc = eq; break; @@ -627,11 +618,11 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, cc = hi; break; default: - UNSUPPORTED_COND(kMips64Cmp32, condition); + UNSUPPORTED_COND(kMips64Cmp32, branch->condition); break; } - switch (condition) { + switch (branch->condition) { case kEqual: case kNotEqual: case kSignedLessThan: @@ -657,18 +648,18 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, } break; default: - UNSUPPORTED_COND(kMips64Cmp, condition); + UNSUPPORTED_COND(kMips64Cmp, branch->condition); break; } __ Branch(tlabel, cc, i.InputRegister(0), i.InputOperand(1)); - if (!fallthru) __ Branch(flabel); // no fallthru to flabel. + if (!branch->fallthru) __ Branch(flabel); // no fallthru to flabel. __ bind(&done); } else if (instr->arch_opcode() == kMips64CmpD) { // TODO(dusmil) optimize unordered checks to use less instructions // even if we have to unfold BranchF macro. Label* nan = flabel; - switch (condition) { + switch (branch->condition) { case kUnorderedEqual: cc = eq; break; @@ -691,13 +682,13 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, nan = tlabel; break; default: - UNSUPPORTED_COND(kMips64CmpD, condition); + UNSUPPORTED_COND(kMips64CmpD, branch->condition); break; } __ BranchF(tlabel, nan, cc, i.InputDoubleRegister(0), i.InputDoubleRegister(1)); - if (!fallthru) __ Branch(flabel); // no fallthru to flabel. + if (!branch->fallthru) __ Branch(flabel); // no fallthru to flabel. __ bind(&done); } else { @@ -708,6 +699,11 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, } +void CodeGenerator::AssembleArchJump(BasicBlock::RpoNumber target) { + if (!IsNextInAssemblyOrder(target)) __ Branch(GetLabel(target)); +} + + // Assembles boolean materializations after an instruction. void CodeGenerator::AssembleArchBoolean(Instruction* instr, FlagsCondition condition) { diff --git a/src/compiler/mips64/instruction-selector-mips64.cc b/src/compiler/mips64/instruction-selector-mips64.cc index f1a585d..80504ed 100644 --- a/src/compiler/mips64/instruction-selector-mips64.cc +++ b/src/compiler/mips64/instruction-selector-mips64.cc @@ -871,12 +871,6 @@ void VisitWordCompareZero(InstructionSelector* selector, Node* user, void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch, BasicBlock* fbranch) { FlagsContinuation cont(kNotEqual, tbranch, fbranch); - // If we can fall through to the true block, invert the branch. - if (IsNextInAssemblyOrder(tbranch)) { - cont.Negate(); - cont.SwapBlocks(); - } - VisitWordCompareZero(this, branch, branch->InputAt(0), &cont); } diff --git a/src/compiler/x64/code-generator-x64.cc b/src/compiler/x64/code-generator-x64.cc index 212c1de..174160c 100644 --- a/src/compiler/x64/code-generator-x64.cc +++ b/src/compiler/x64/code-generator-x64.cc @@ -238,7 +238,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { break; } case kArchJmp: - __ jmp(GetLabel(i.InputRpo(0))); + AssembleArchJump(i.InputRpo(0)); break; case kArchNop: // don't emit code for nops. @@ -631,22 +631,13 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) { // Assembles branches after this instruction. -void CodeGenerator::AssembleArchBranch(Instruction* instr, - FlagsCondition condition) { +void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) { X64OperandConverter i(this, instr); - Label done; - - // Emit a branch. The true and false targets are always the last two inputs - // to the instruction. - BasicBlock::RpoNumber tblock = - i.InputRpo(static_cast(instr->InputCount()) - 2); - BasicBlock::RpoNumber fblock = - i.InputRpo(static_cast(instr->InputCount()) - 1); - bool fallthru = IsNextInAssemblyOrder(fblock); - Label* tlabel = GetLabel(tblock); - Label* flabel = fallthru ? &done : GetLabel(fblock); - Label::Distance flabel_distance = fallthru ? Label::kNear : Label::kFar; - switch (condition) { + Label::Distance flabel_distance = + branch->fallthru ? Label::kNear : Label::kFar; + Label* tlabel = branch->true_label; + Label* flabel = branch->false_label; + switch (branch->condition) { case kUnorderedEqual: __ j(parity_even, flabel, flabel_distance); // Fall through. @@ -702,8 +693,12 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, __ j(no_overflow, tlabel); break; } - if (!fallthru) __ jmp(flabel, flabel_distance); // no fallthru to flabel. - __ bind(&done); + if (!branch->fallthru) __ jmp(flabel, flabel_distance); +} + + +void CodeGenerator::AssembleArchJump(BasicBlock::RpoNumber target) { + if (!IsNextInAssemblyOrder(target)) __ jmp(GetLabel(target)); } diff --git a/src/compiler/x64/instruction-selector-x64.cc b/src/compiler/x64/instruction-selector-x64.cc index 388a0c2..9de8e6d 100644 --- a/src/compiler/x64/instruction-selector-x64.cc +++ b/src/compiler/x64/instruction-selector-x64.cc @@ -942,12 +942,6 @@ void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch, FlagsContinuation cont(kNotEqual, tbranch, fbranch); - // If we can fall through to the true block, invert the branch. - if (IsNextInAssemblyOrder(tbranch)) { - cont.Negate(); - cont.SwapBlocks(); - } - // Try to combine with comparisons against 0 by simply inverting the branch. while (CanCover(user, value)) { if (value->opcode() == IrOpcode::kWord32Equal) { -- 2.7.4