}
/// IsSingleValuePHICycle - Check if MI is a PHI where all the source operands
-/// are copies of SingleValReg, possibly via copies through other PHIs. If
+/// are copies of SingleValReg, possibly via copies through other PHIs. If
/// SingleValReg is zero on entry, it is set to the register with the single
-/// non-copy value. PHIsInCycle is a set used to keep track of the PHIs that
-/// have been scanned.
+/// non-copy value. PHIsInCycle is a set used to keep track of the PHIs that
+/// have been scanned. PHIs may be grouped by cycle, several cycles or chains.
bool OptimizePHIs::IsSingleValuePHICycle(MachineInstr *MI,
unsigned &SingleValReg,
InstrSet &PHIsInCycle) {
if (SrcMI && SrcMI->isCopy() &&
!SrcMI->getOperand(0).getSubReg() &&
!SrcMI->getOperand(1).getSubReg() &&
- TargetRegisterInfo::isVirtualRegister(SrcMI->getOperand(1).getReg()))
- SrcMI = MRI->getVRegDef(SrcMI->getOperand(1).getReg());
+ TargetRegisterInfo::isVirtualRegister(SrcMI->getOperand(1).getReg())) {
+ SrcReg = SrcMI->getOperand(1).getReg();
+ SrcMI = MRI->getVRegDef(SrcReg);
+ }
if (!SrcMI)
return false;
return false;
} else {
// Fail if there is more than one non-phi/non-move register.
- if (SingleValReg != 0)
+ if (SingleValReg != 0 && SingleValReg != SrcReg)
return false;
SingleValReg = SrcReg;
}
if (!MRI->constrainRegClass(SingleValReg, MRI->getRegClass(OldReg)))
continue;
+ // for the case SingleValReg taken from copy instr
+ MRI->clearKillFlags(SingleValReg);
+
MRI->replaceRegWith(OldReg, SingleValReg);
MI->eraseFromParent();
++NumPHICycles;
; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm4
; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm5
; AVX1-NEXT: vextractf128 $1, %ymm2, %xmm2
-; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm6
-; AVX1-NEXT: vpaddd %xmm6, %xmm2, %xmm2
+; AVX1-NEXT: vpaddd %xmm5, %xmm2, %xmm2
; AVX1-NEXT: vpaddd %xmm2, %xmm5, %xmm2
; AVX1-NEXT: vpaddd %xmm2, %xmm4, %xmm2
; AVX1-NEXT: vpaddd %xmm3, %xmm0, %xmm0
; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm4
; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm5
; AVX1-NEXT: vextractf128 $1, %ymm2, %xmm2
-; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm6
-; AVX1-NEXT: vpaddd %xmm6, %xmm2, %xmm2
+; AVX1-NEXT: vpaddd %xmm5, %xmm2, %xmm2
; AVX1-NEXT: vpaddd %xmm2, %xmm5, %xmm2
; AVX1-NEXT: vpaddd %xmm2, %xmm4, %xmm2
; AVX1-NEXT: vpaddd %xmm3, %xmm0, %xmm0
--- /dev/null
+# RUN: llc -run-pass opt-phis -march=x86-64 -o - %s | FileCheck %s
+# All PHIs should be removed since they can be securely replaced
+# by %8 register.
+# CHECK-NOT: PHI
+--- |
+ define void @test() {
+ ret void
+ }
+...
+---
+name: test
+alignment: 4
+tracksRegLiveness: true
+jumpTable:
+ kind: block-address
+ entries:
+ - id: 0
+ blocks: [ '%bb.3', '%bb.2', '%bb.1', '%bb.4' ]
+body: |
+ bb.0:
+ liveins: $edi, $ymm0, $rsi
+
+ %9:gr64 = COPY $rsi
+ %8:vr256 = COPY $ymm0
+ %7:gr32 = COPY $edi
+ %11:gr32 = SAR32ri %7, 31, implicit-def dead $eflags
+ %12:gr32 = SHR32ri %11, 30, implicit-def dead $eflags
+ %13:gr32 = ADD32rr %7, killed %12, implicit-def dead $eflags
+ %14:gr32 = AND32ri8 %13, -4, implicit-def dead $eflags
+ %15:gr32 = SUB32rr %7, %14, implicit-def dead $eflags
+ %10:gr64_nosp = SUBREG_TO_REG 0, %15, %subreg.sub_32bit
+ %16:gr32 = SUB32ri8 %15, 3, implicit-def $eflags
+ JA_1 %bb.8, implicit $eflags
+
+ bb.9:
+ JMP64m $noreg, 8, %10, %jump-table.0, $noreg :: (load 8 from jump-table)
+
+ bb.1:
+ %0:vr256 = COPY %8
+ JMP_1 %bb.5
+
+ bb.2:
+ %1:vr256 = COPY %8
+ JMP_1 %bb.6
+
+ bb.3:
+ %2:vr256 = COPY %8
+ JMP_1 %bb.7
+
+ bb.4:
+ %3:vr256 = COPY %8
+ %17:vr128 = VEXTRACTF128rr %8, 1
+ VPEXTRDmr %9, 1, $noreg, 12, $noreg, killed %17, 2
+
+ bb.5:
+ %4:vr256 = PHI %0, %bb.1, %3, %bb.4
+ %18:vr128 = VEXTRACTF128rr %4, 1
+ VPEXTRDmr %9, 1, $noreg, 8, $noreg, killed %18, 1
+
+ bb.6:
+ %5:vr256 = PHI %1, %bb.2, %4, %bb.5
+ %19:vr128 = VEXTRACTF128rr %5, 1
+ VMOVPDI2DImr %9, 1, $noreg, 4, $noreg, killed %19
+
+ bb.7:
+ %6:vr256 = PHI %2, %bb.3, %5, %bb.6
+ %20:vr128 = COPY %6.sub_xmm
+ VPEXTRDmr %9, 1, $noreg, 0, $noreg, killed %20, 3
+
+ bb.8:
+ RET 0
+...
; AVX1-NEXT: vpaddd %xmm0, %xmm0, %xmm2
; AVX1-NEXT: vextractf128 $1, %ymm1, %xmm3
; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm4
-; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm5
-; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm6
-; AVX1-NEXT: vpaddd %xmm6, %xmm5, %xmm5
+; AVX1-NEXT: vpaddd %xmm4, %xmm4, %xmm5
; AVX1-NEXT: vpaddd %xmm5, %xmm4, %xmm4
; AVX1-NEXT: vpaddd %xmm4, %xmm3, %xmm3
; AVX1-NEXT: vpaddd %xmm2, %xmm0, %xmm0