[turbofan] Mark arm64 cbz/cbnz tbz/tbnz instructions as branch instructions.
authorbaptiste.afsa <baptiste.afsa@arm.com>
Thu, 11 Dec 2014 12:45:27 +0000 (04:45 -0800)
committerCommit bot <commit-bot@chromium.org>
Thu, 11 Dec 2014 12:45:33 +0000 (12:45 +0000)
The instruction selector now selects pseudo instructions: CompareAndBranch or
TestAndBranch which are associated with their continuations so that generic
code in the code generator will treat them as branch instruction and will be
able to apply optimization like avoiding branches when the code can falltrhough.

R=bmeurer@chromium.org

Review URL: https://codereview.chromium.org/798553002

Cr-Commit-Position: refs/heads/master@{#25773}

src/compiler/arm64/code-generator-arm64.cc
src/compiler/arm64/instruction-codes-arm64.h
src/compiler/arm64/instruction-selector-arm64.cc
test/unittests/compiler/arm64/instruction-selector-arm64-unittest.cc

index 62145cb445ac56e5758e8a75d05c2a2801904af7..636184a086b1a53a11ee3330c36f8c9c32936cf5 100644 (file)
@@ -280,13 +280,6 @@ class OutOfLineLoadInteger FINAL : public OutOfLineCode {
   } while (0)
 
 
-#define ASSEMBLE_BRANCH_TO(target)                    \
-  do {                                                \
-    bool fallthrough = IsNextInAssemblyOrder(target); \
-    if (!fallthrough) __ B(GetLabel(target));         \
-  } while (0)
-
-
 // Assembles an instruction after register allocation, producing machine code.
 void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
   Arm64OperandConverter i(this, instr);
@@ -541,29 +534,12 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
       __ Ubfx(i.OutputRegister32(), i.InputRegister32(0), i.InputInt8(1),
               i.InputInt8(2));
       break;
-    case kArm64Tbz:
-      __ Tbz(i.InputRegister64(0), i.InputInt6(1), GetLabel(i.InputRpo(2)));
-      ASSEMBLE_BRANCH_TO(i.InputRpo(3));
-      break;
-    case kArm64Tbz32:
-      __ Tbz(i.InputRegister32(0), i.InputInt5(1), GetLabel(i.InputRpo(2)));
-      ASSEMBLE_BRANCH_TO(i.InputRpo(3));
-      break;
-    case kArm64Tbnz:
-      __ Tbnz(i.InputRegister64(0), i.InputInt6(1), GetLabel(i.InputRpo(2)));
-      ASSEMBLE_BRANCH_TO(i.InputRpo(3));
+    case kArm64TestAndBranch32:
+    case kArm64TestAndBranch:
+      // Pseudo instructions turned into tbz/tbnz in AssembleArchBranch.
       break;
-    case kArm64Tbnz32:
-      __ Tbnz(i.InputRegister32(0), i.InputInt5(1), GetLabel(i.InputRpo(2)));
-      ASSEMBLE_BRANCH_TO(i.InputRpo(3));
-      break;
-    case kArm64Cbz32:
-      __ Cbz(i.InputRegister32(0), GetLabel(i.InputRpo(1)));
-      ASSEMBLE_BRANCH_TO(i.InputRpo(2));
-      break;
-    case kArm64Cbnz32:
-      __ Cbnz(i.InputRegister32(0), GetLabel(i.InputRpo(1)));
-      ASSEMBLE_BRANCH_TO(i.InputRpo(2));
+    case kArm64CompareAndBranch32:
+      // Pseudo instruction turned into cbz/cbnz in AssembleArchBranch.
       break;
     case kArm64Claim: {
       int words = MiscField::decode(instr->opcode());
@@ -766,61 +742,99 @@ void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
   Arm64OperandConverter i(this, instr);
   Label* tlabel = branch->true_label;
   Label* flabel = branch->false_label;
-  switch (branch->condition) {
-    case kUnorderedEqual:
-      __ B(vs, flabel);
-    // Fall through.
-    case kEqual:
-      __ B(eq, tlabel);
-      break;
-    case kUnorderedNotEqual:
-      __ B(vs, tlabel);
-    // Fall through.
-    case kNotEqual:
-      __ B(ne, tlabel);
-      break;
-    case kSignedLessThan:
-      __ B(lt, tlabel);
-      break;
-    case kSignedGreaterThanOrEqual:
-      __ B(ge, tlabel);
-      break;
-    case kSignedLessThanOrEqual:
-      __ B(le, tlabel);
-      break;
-    case kSignedGreaterThan:
-      __ B(gt, tlabel);
-      break;
-    case kUnorderedLessThan:
-      __ B(vs, flabel);
-    // Fall through.
-    case kUnsignedLessThan:
-      __ B(lo, tlabel);
-      break;
-    case kUnorderedGreaterThanOrEqual:
-      __ B(vs, tlabel);
-    // Fall through.
-    case kUnsignedGreaterThanOrEqual:
-      __ B(hs, tlabel);
-      break;
-    case kUnorderedLessThanOrEqual:
-      __ B(vs, flabel);
-    // Fall through.
-    case kUnsignedLessThanOrEqual:
-      __ B(ls, tlabel);
-      break;
-    case kUnorderedGreaterThan:
-      __ B(vs, tlabel);
-    // Fall through.
-    case kUnsignedGreaterThan:
-      __ B(hi, tlabel);
-      break;
-    case kOverflow:
-      __ B(vs, tlabel);
-      break;
-    case kNotOverflow:
-      __ B(vc, tlabel);
-      break;
+  FlagsCondition condition = branch->condition;
+  ArchOpcode opcode = instr->arch_opcode();
+
+  if (opcode == kArm64CompareAndBranch32) {
+    switch (condition) {
+      case kEqual:
+        __ Cbz(i.InputRegister32(0), tlabel);
+        break;
+      case kNotEqual:
+        __ Cbnz(i.InputRegister32(0), tlabel);
+        break;
+      default:
+        UNREACHABLE();
+    }
+  } else if (opcode == kArm64TestAndBranch32) {
+    switch (condition) {
+      case kEqual:
+        __ Tbz(i.InputRegister32(0), i.InputInt5(1), tlabel);
+        break;
+      case kNotEqual:
+        __ Tbnz(i.InputRegister32(0), i.InputInt5(1), tlabel);
+        break;
+      default:
+        UNREACHABLE();
+    }
+  } else if (opcode == kArm64TestAndBranch) {
+    switch (condition) {
+      case kEqual:
+        __ Tbz(i.InputRegister64(0), i.InputInt6(1), tlabel);
+        break;
+      case kNotEqual:
+        __ Tbnz(i.InputRegister64(0), i.InputInt6(1), tlabel);
+        break;
+      default:
+        UNREACHABLE();
+    }
+  } else {
+    switch (condition) {
+      case kUnorderedEqual:
+        __ B(vs, flabel);
+      // Fall through.
+      case kEqual:
+        __ B(eq, tlabel);
+        break;
+      case kUnorderedNotEqual:
+        __ B(vs, tlabel);
+      // Fall through.
+      case kNotEqual:
+        __ B(ne, tlabel);
+        break;
+      case kSignedLessThan:
+        __ B(lt, tlabel);
+        break;
+      case kSignedGreaterThanOrEqual:
+        __ B(ge, tlabel);
+        break;
+      case kSignedLessThanOrEqual:
+        __ B(le, tlabel);
+        break;
+      case kSignedGreaterThan:
+        __ B(gt, tlabel);
+        break;
+      case kUnorderedLessThan:
+        __ B(vs, flabel);
+      // Fall through.
+      case kUnsignedLessThan:
+        __ B(lo, tlabel);
+        break;
+      case kUnorderedGreaterThanOrEqual:
+        __ B(vs, tlabel);
+      // Fall through.
+      case kUnsignedGreaterThanOrEqual:
+        __ B(hs, tlabel);
+        break;
+      case kUnorderedLessThanOrEqual:
+        __ B(vs, flabel);
+      // Fall through.
+      case kUnsignedLessThanOrEqual:
+        __ B(ls, tlabel);
+        break;
+      case kUnorderedGreaterThan:
+        __ B(vs, tlabel);
+      // Fall through.
+      case kUnsignedGreaterThan:
+        __ B(hi, tlabel);
+        break;
+      case kOverflow:
+        __ B(vs, tlabel);
+        break;
+      case kNotOverflow:
+        __ B(vc, tlabel);
+        break;
+    }
   }
   if (!branch->fallthru) __ B(flabel);  // no fallthru to flabel.
 }
index 28521263049150733bfaf07b79e3e7e6d6ed87e3..863451f7c5dc5c496ae8ce9dcfd853b6baa7f658 100644 (file)
@@ -70,12 +70,9 @@ namespace compiler {
   V(Arm64Sxtw)                     \
   V(Arm64Ubfx)                     \
   V(Arm64Ubfx32)                   \
-  V(Arm64Tbz)                      \
-  V(Arm64Tbz32)                    \
-  V(Arm64Tbnz)                     \
-  V(Arm64Tbnz32)                   \
-  V(Arm64Cbz32)                    \
-  V(Arm64Cbnz32)                   \
+  V(Arm64TestAndBranch32)          \
+  V(Arm64TestAndBranch)            \
+  V(Arm64CompareAndBranch32)       \
   V(Arm64Claim)                    \
   V(Arm64Poke)                     \
   V(Arm64PokePairZero)             \
index 7379ec08556765328cb5e5e0c890bb083acbbafa..0922dace978c060e7a43e44b2b798bcb7158ca54 100644 (file)
@@ -1347,9 +1347,8 @@ void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
           // If the mask has only one bit set, we can use tbz/tbnz.
           DCHECK((cont.condition() == kEqual) ||
                  (cont.condition() == kNotEqual));
-          ArchOpcode opcode =
-              (cont.condition() == kEqual) ? kArm64Tbz32 : kArm64Tbnz32;
-          Emit(opcode, NULL, g.UseRegister(m.left().node()),
+          Emit(cont.Encode(kArm64TestAndBranch32), NULL,
+               g.UseRegister(m.left().node()),
                g.TempImmediate(
                    base::bits::CountTrailingZeros32(m.right().Value())),
                g.Label(cont.true_block()),
@@ -1366,9 +1365,8 @@ void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
           // If the mask has only one bit set, we can use tbz/tbnz.
           DCHECK((cont.condition() == kEqual) ||
                  (cont.condition() == kNotEqual));
-          ArchOpcode opcode =
-              (cont.condition() == kEqual) ? kArm64Tbz : kArm64Tbnz;
-          Emit(opcode, NULL, g.UseRegister(m.left().node()),
+          Emit(cont.Encode(kArm64TestAndBranch), NULL,
+               g.UseRegister(m.left().node()),
                g.TempImmediate(
                    base::bits::CountTrailingZeros64(m.right().Value())),
                g.Label(cont.true_block()),
@@ -1384,9 +1382,8 @@ void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
   }
 
   // Branch could not be combined with a compare, compare against 0 and branch.
-  DCHECK((cont.condition() == kEqual) || (cont.condition() == kNotEqual));
-  ArchOpcode opcode = (cont.condition() == kEqual) ? kArm64Cbz32 : kArm64Cbnz32;
-  Emit(opcode, NULL, g.UseRegister(value), g.Label(cont.true_block()),
+  Emit(cont.Encode(kArm64CompareAndBranch32), NULL, g.UseRegister(value),
+       g.Label(cont.true_block()),
        g.Label(cont.false_block()))->MarkAsControl();
 }
 
index 150d40b3d9be843d3eecbb19a8d54f579b58a199..cd3ce090f7043a4e444f37604ef926534621d3c7 100644 (file)
@@ -808,7 +808,8 @@ TEST_F(InstructionSelectorTest, Word32AndBranchWithOneBitMaskOnRight) {
     m.Return(m.Int32Constant(0));
     Stream s = m.Build();
     ASSERT_EQ(1U, s.size());
-    EXPECT_EQ(kArm64Tbnz32, s[0]->arch_opcode());
+    EXPECT_EQ(kArm64TestAndBranch32, s[0]->arch_opcode());
+    EXPECT_EQ(kNotEqual, s[0]->flags_condition());
     EXPECT_EQ(4U, s[0]->InputCount());
     EXPECT_EQ(InstructionOperand::IMMEDIATE, s[0]->InputAt(1)->kind());
     EXPECT_EQ(bit, s.ToInt32(s[0]->InputAt(1)));
@@ -827,7 +828,8 @@ TEST_F(InstructionSelectorTest, Word32AndBranchWithOneBitMaskOnRight) {
     m.Return(m.Int32Constant(0));
     Stream s = m.Build();
     ASSERT_EQ(1U, s.size());
-    EXPECT_EQ(kArm64Tbz32, s[0]->arch_opcode());
+    EXPECT_EQ(kArm64TestAndBranch32, s[0]->arch_opcode());
+    EXPECT_EQ(kEqual, s[0]->flags_condition());
     EXPECT_EQ(4U, s[0]->InputCount());
     EXPECT_EQ(InstructionOperand::IMMEDIATE, s[0]->InputAt(1)->kind());
     EXPECT_EQ(bit, s.ToInt32(s[0]->InputAt(1)));
@@ -847,7 +849,8 @@ TEST_F(InstructionSelectorTest, Word32AndBranchWithOneBitMaskOnLeft) {
     m.Return(m.Int32Constant(0));
     Stream s = m.Build();
     ASSERT_EQ(1U, s.size());
-    EXPECT_EQ(kArm64Tbnz32, s[0]->arch_opcode());
+    EXPECT_EQ(kArm64TestAndBranch32, s[0]->arch_opcode());
+    EXPECT_EQ(kNotEqual, s[0]->flags_condition());
     EXPECT_EQ(4U, s[0]->InputCount());
     EXPECT_EQ(InstructionOperand::IMMEDIATE, s[0]->InputAt(1)->kind());
     EXPECT_EQ(bit, s.ToInt32(s[0]->InputAt(1)));
@@ -866,7 +869,8 @@ TEST_F(InstructionSelectorTest, Word32AndBranchWithOneBitMaskOnLeft) {
     m.Return(m.Int32Constant(0));
     Stream s = m.Build();
     ASSERT_EQ(1U, s.size());
-    EXPECT_EQ(kArm64Tbz32, s[0]->arch_opcode());
+    EXPECT_EQ(kArm64TestAndBranch32, s[0]->arch_opcode());
+    EXPECT_EQ(kEqual, s[0]->flags_condition());
     EXPECT_EQ(4U, s[0]->InputCount());
     EXPECT_EQ(InstructionOperand::IMMEDIATE, s[0]->InputAt(1)->kind());
     EXPECT_EQ(bit, s.ToInt32(s[0]->InputAt(1)));
@@ -886,7 +890,8 @@ TEST_F(InstructionSelectorTest, Word64AndBranchWithOneBitMaskOnRight) {
     m.Return(m.Int32Constant(0));
     Stream s = m.Build();
     ASSERT_EQ(1U, s.size());
-    EXPECT_EQ(kArm64Tbnz, s[0]->arch_opcode());
+    EXPECT_EQ(kArm64TestAndBranch, s[0]->arch_opcode());
+    EXPECT_EQ(kNotEqual, s[0]->flags_condition());
     EXPECT_EQ(4U, s[0]->InputCount());
     EXPECT_EQ(InstructionOperand::IMMEDIATE, s[0]->InputAt(1)->kind());
     EXPECT_EQ(bit, s.ToInt64(s[0]->InputAt(1)));
@@ -905,7 +910,8 @@ TEST_F(InstructionSelectorTest, Word64AndBranchWithOneBitMaskOnRight) {
     m.Return(m.Int32Constant(0));
     Stream s = m.Build();
     ASSERT_EQ(1U, s.size());
-    EXPECT_EQ(kArm64Tbz, s[0]->arch_opcode());
+    EXPECT_EQ(kArm64TestAndBranch, s[0]->arch_opcode());
+    EXPECT_EQ(kEqual, s[0]->flags_condition());
     EXPECT_EQ(4U, s[0]->InputCount());
     EXPECT_EQ(InstructionOperand::IMMEDIATE, s[0]->InputAt(1)->kind());
     EXPECT_EQ(bit, s.ToInt64(s[0]->InputAt(1)));
@@ -925,7 +931,8 @@ TEST_F(InstructionSelectorTest, Word64AndBranchWithOneBitMaskOnLeft) {
     m.Return(m.Int32Constant(0));
     Stream s = m.Build();
     ASSERT_EQ(1U, s.size());
-    EXPECT_EQ(kArm64Tbnz, s[0]->arch_opcode());
+    EXPECT_EQ(kArm64TestAndBranch, s[0]->arch_opcode());
+    EXPECT_EQ(kNotEqual, s[0]->flags_condition());
     EXPECT_EQ(4U, s[0]->InputCount());
     EXPECT_EQ(InstructionOperand::IMMEDIATE, s[0]->InputAt(1)->kind());
     EXPECT_EQ(bit, s.ToInt64(s[0]->InputAt(1)));
@@ -944,7 +951,8 @@ TEST_F(InstructionSelectorTest, Word64AndBranchWithOneBitMaskOnLeft) {
     m.Return(m.Int32Constant(0));
     Stream s = m.Build();
     ASSERT_EQ(1U, s.size());
-    EXPECT_EQ(kArm64Tbz, s[0]->arch_opcode());
+    EXPECT_EQ(kArm64TestAndBranch, s[0]->arch_opcode());
+    EXPECT_EQ(kEqual, s[0]->flags_condition());
     EXPECT_EQ(4U, s[0]->InputCount());
     EXPECT_EQ(InstructionOperand::IMMEDIATE, s[0]->InputAt(1)->kind());
     EXPECT_EQ(bit, s.ToInt64(s[0]->InputAt(1)));
@@ -964,7 +972,8 @@ TEST_F(InstructionSelectorTest, CompareAgainstZeroAndBranch) {
     m.Return(m.Int32Constant(0));
     Stream s = m.Build();
     ASSERT_EQ(1U, s.size());
-    EXPECT_EQ(kArm64Cbnz32, s[0]->arch_opcode());
+    EXPECT_EQ(kArm64CompareAndBranch32, s[0]->arch_opcode());
+    EXPECT_EQ(kNotEqual, s[0]->flags_condition());
     EXPECT_EQ(3U, s[0]->InputCount());
     EXPECT_EQ(s.ToVreg(p0), s.ToVreg(s[0]->InputAt(0)));
   }
@@ -980,7 +989,8 @@ TEST_F(InstructionSelectorTest, CompareAgainstZeroAndBranch) {
     m.Return(m.Int32Constant(0));
     Stream s = m.Build();
     ASSERT_EQ(1U, s.size());
-    EXPECT_EQ(kArm64Cbz32, s[0]->arch_opcode());
+    EXPECT_EQ(kArm64CompareAndBranch32, s[0]->arch_opcode());
+    EXPECT_EQ(kEqual, s[0]->flags_condition());
     EXPECT_EQ(3U, s[0]->InputCount());
     EXPECT_EQ(s.ToVreg(p0), s.ToVreg(s[0]->InputAt(0)));
   }