; RV64ZVE32F-NEXT: andi a3, a2, 1
; RV64ZVE32F-NEXT: bnez a3, .LBB1_3
; RV64ZVE32F-NEXT: # %bb.1: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: bnez a0, .LBB1_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: bnez a2, .LBB1_4
; RV64ZVE32F-NEXT: .LBB1_2: # %else2
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB1_3: # %cond.load
; RV64ZVE32F-NEXT: lb a0, 0(a0)
; RV64ZVE32F-NEXT: vsetivli zero, 2, e8, mf4, tu, ma
; RV64ZVE32F-NEXT: vmv.s.x v8, a0
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB1_2
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB1_2
; RV64ZVE32F-NEXT: .LBB1_4: # %cond.load1
; RV64ZVE32F-NEXT: lb a0, 0(a1)
; RV64ZVE32F-NEXT: vsetivli zero, 2, e8, mf4, ta, ma
; RV64ZVE32F-NEXT: vsetivli zero, 2, e8, mf4, tu, ma
; RV64ZVE32F-NEXT: vmv.s.x v8, a0
; RV64ZVE32F-NEXT: .LBB2_2: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB2_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB2_4
; RV64ZVE32F-NEXT: # %bb.3: # %cond.load1
; RV64ZVE32F-NEXT: lb a0, 0(a1)
; RV64ZVE32F-NEXT: vsetivli zero, 2, e8, mf4, ta, ma
; RV64ZVE32F-NEXT: vsetivli zero, 2, e8, mf4, tu, ma
; RV64ZVE32F-NEXT: vmv.s.x v8, a0
; RV64ZVE32F-NEXT: .LBB3_2: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB3_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB3_4
; RV64ZVE32F-NEXT: # %bb.3: # %cond.load1
; RV64ZVE32F-NEXT: lb a0, 0(a1)
; RV64ZVE32F-NEXT: vsetivli zero, 2, e8, mf4, ta, ma
; RV64ZVE32F-NEXT: vsetivli zero, 2, e8, mf4, tu, ma
; RV64ZVE32F-NEXT: vmv.s.x v8, a0
; RV64ZVE32F-NEXT: .LBB4_2: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB4_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB4_4
; RV64ZVE32F-NEXT: # %bb.3: # %cond.load1
; RV64ZVE32F-NEXT: lb a0, 0(a1)
; RV64ZVE32F-NEXT: vsetivli zero, 2, e8, mf4, ta, ma
; RV64ZVE32F-NEXT: vsetivli zero, 2, e8, mf4, tu, ma
; RV64ZVE32F-NEXT: vmv.s.x v8, a0
; RV64ZVE32F-NEXT: .LBB5_2: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB5_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB5_4
; RV64ZVE32F-NEXT: # %bb.3: # %cond.load1
; RV64ZVE32F-NEXT: lb a0, 0(a1)
; RV64ZVE32F-NEXT: vsetivli zero, 2, e8, mf4, ta, ma
; RV64ZVE32F-NEXT: vsetivli zero, 2, e8, mf4, tu, ma
; RV64ZVE32F-NEXT: vmv.s.x v8, a0
; RV64ZVE32F-NEXT: .LBB6_2: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB6_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB6_4
; RV64ZVE32F-NEXT: # %bb.3: # %cond.load1
; RV64ZVE32F-NEXT: lb a0, 0(a1)
; RV64ZVE32F-NEXT: vsetivli zero, 2, e8, mf4, ta, ma
; RV64ZVE32F-NEXT: vsetivli zero, 2, e8, mf4, tu, ma
; RV64ZVE32F-NEXT: vmv.s.x v8, a0
; RV64ZVE32F-NEXT: .LBB7_2: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB7_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB7_4
; RV64ZVE32F-NEXT: # %bb.3: # %cond.load1
; RV64ZVE32F-NEXT: lb a0, 0(a1)
; RV64ZVE32F-NEXT: vsetivli zero, 2, e8, mf4, ta, ma
; RV64ZVE32F-NEXT: andi a3, a2, 1
; RV64ZVE32F-NEXT: bnez a3, .LBB14_3
; RV64ZVE32F-NEXT: # %bb.1: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: bnez a0, .LBB14_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: bnez a2, .LBB14_4
; RV64ZVE32F-NEXT: .LBB14_2: # %else2
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB14_3: # %cond.load
; RV64ZVE32F-NEXT: lh a0, 0(a0)
; RV64ZVE32F-NEXT: vsetivli zero, 2, e16, mf2, tu, ma
; RV64ZVE32F-NEXT: vmv.s.x v8, a0
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB14_2
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB14_2
; RV64ZVE32F-NEXT: .LBB14_4: # %cond.load1
; RV64ZVE32F-NEXT: lh a0, 0(a1)
; RV64ZVE32F-NEXT: vsetivli zero, 2, e16, mf2, ta, ma
; RV64ZVE32F-NEXT: vsetivli zero, 2, e16, mf2, tu, ma
; RV64ZVE32F-NEXT: vmv.s.x v8, a0
; RV64ZVE32F-NEXT: .LBB15_2: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB15_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB15_4
; RV64ZVE32F-NEXT: # %bb.3: # %cond.load1
; RV64ZVE32F-NEXT: lh a0, 0(a1)
; RV64ZVE32F-NEXT: vsetivli zero, 2, e16, mf2, ta, ma
; RV64ZVE32F-NEXT: vsetivli zero, 2, e16, mf2, tu, ma
; RV64ZVE32F-NEXT: vmv.s.x v8, a0
; RV64ZVE32F-NEXT: .LBB16_2: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB16_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB16_4
; RV64ZVE32F-NEXT: # %bb.3: # %cond.load1
; RV64ZVE32F-NEXT: lh a0, 0(a1)
; RV64ZVE32F-NEXT: vsetivli zero, 2, e16, mf2, ta, ma
; RV64ZVE32F-NEXT: vsetivli zero, 2, e16, mf2, tu, ma
; RV64ZVE32F-NEXT: vmv.s.x v8, a0
; RV64ZVE32F-NEXT: .LBB17_2: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB17_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB17_4
; RV64ZVE32F-NEXT: # %bb.3: # %cond.load1
; RV64ZVE32F-NEXT: lh a0, 0(a1)
; RV64ZVE32F-NEXT: vsetivli zero, 2, e16, mf2, ta, ma
; RV64ZVE32F-NEXT: vsetivli zero, 2, e16, mf2, tu, ma
; RV64ZVE32F-NEXT: vmv.s.x v8, a0
; RV64ZVE32F-NEXT: .LBB18_2: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB18_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB18_4
; RV64ZVE32F-NEXT: # %bb.3: # %cond.load1
; RV64ZVE32F-NEXT: lh a0, 0(a1)
; RV64ZVE32F-NEXT: vsetivli zero, 2, e16, mf2, ta, ma
; RV64ZVE32F-NEXT: andi a3, a2, 1
; RV64ZVE32F-NEXT: bnez a3, .LBB28_3
; RV64ZVE32F-NEXT: # %bb.1: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: bnez a0, .LBB28_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: bnez a2, .LBB28_4
; RV64ZVE32F-NEXT: .LBB28_2: # %else2
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB28_3: # %cond.load
; RV64ZVE32F-NEXT: lw a0, 0(a0)
; RV64ZVE32F-NEXT: vsetivli zero, 2, e32, m1, tu, ma
; RV64ZVE32F-NEXT: vmv.s.x v8, a0
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB28_2
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB28_2
; RV64ZVE32F-NEXT: .LBB28_4: # %cond.load1
; RV64ZVE32F-NEXT: lw a0, 0(a1)
; RV64ZVE32F-NEXT: vsetivli zero, 2, e32, m1, ta, ma
; RV64ZVE32F-NEXT: vsetivli zero, 2, e32, m1, tu, ma
; RV64ZVE32F-NEXT: vmv.s.x v8, a0
; RV64ZVE32F-NEXT: .LBB29_2: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB29_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB29_4
; RV64ZVE32F-NEXT: # %bb.3: # %cond.load1
; RV64ZVE32F-NEXT: lw a0, 0(a1)
; RV64ZVE32F-NEXT: vsetivli zero, 2, e32, m1, ta, ma
; RV64ZVE32F-NEXT: vsetivli zero, 2, e32, m1, tu, ma
; RV64ZVE32F-NEXT: vmv.s.x v8, a0
; RV64ZVE32F-NEXT: .LBB30_2: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB30_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB30_4
; RV64ZVE32F-NEXT: # %bb.3: # %cond.load1
; RV64ZVE32F-NEXT: lw a0, 0(a1)
; RV64ZVE32F-NEXT: vsetivli zero, 2, e32, m1, ta, ma
; RV64ZVE32F-NEXT: # %bb.1: # %cond.load
; RV64ZVE32F-NEXT: ld a2, 0(a0)
; RV64ZVE32F-NEXT: .LBB43_2: # %else
-; RV64ZVE32F-NEXT: andi a0, a4, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB43_4
+; RV64ZVE32F-NEXT: andi a4, a4, 2
+; RV64ZVE32F-NEXT: beqz a4, .LBB43_4
; RV64ZVE32F-NEXT: # %bb.3: # %cond.load1
; RV64ZVE32F-NEXT: ld a3, 0(a1)
; RV64ZVE32F-NEXT: .LBB43_4: # %else2
; RV64ZVE32F-NEXT: andi a3, a2, 1
; RV64ZVE32F-NEXT: bnez a3, .LBB59_3
; RV64ZVE32F-NEXT: # %bb.1: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: bnez a0, .LBB59_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: bnez a2, .LBB59_4
; RV64ZVE32F-NEXT: .LBB59_2: # %else2
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB59_3: # %cond.load
; RV64ZVE32F-NEXT: flh ft0, 0(a0)
; RV64ZVE32F-NEXT: vsetivli zero, 2, e16, mf2, tu, ma
; RV64ZVE32F-NEXT: vfmv.s.f v8, ft0
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB59_2
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB59_2
; RV64ZVE32F-NEXT: .LBB59_4: # %cond.load1
; RV64ZVE32F-NEXT: flh ft0, 0(a1)
; RV64ZVE32F-NEXT: vsetivli zero, 2, e16, mf2, ta, ma
; RV64ZVE32F-NEXT: andi a3, a2, 1
; RV64ZVE32F-NEXT: bnez a3, .LBB69_3
; RV64ZVE32F-NEXT: # %bb.1: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: bnez a0, .LBB69_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: bnez a2, .LBB69_4
; RV64ZVE32F-NEXT: .LBB69_2: # %else2
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB69_3: # %cond.load
; RV64ZVE32F-NEXT: flw ft0, 0(a0)
; RV64ZVE32F-NEXT: vsetivli zero, 2, e32, m1, tu, ma
; RV64ZVE32F-NEXT: vfmv.s.f v8, ft0
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB69_2
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB69_2
; RV64ZVE32F-NEXT: .LBB69_4: # %cond.load1
; RV64ZVE32F-NEXT: flw ft0, 0(a1)
; RV64ZVE32F-NEXT: vsetivli zero, 2, e32, m1, ta, ma
; RV64ZVE32F-NEXT: andi a3, a2, 1
; RV64ZVE32F-NEXT: bnez a3, .LBB82_3
; RV64ZVE32F-NEXT: # %bb.1: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: bnez a0, .LBB82_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: bnez a2, .LBB82_4
; RV64ZVE32F-NEXT: .LBB82_2: # %else2
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB82_3: # %cond.load
; RV64ZVE32F-NEXT: fld fa0, 0(a0)
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB82_2
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB82_2
; RV64ZVE32F-NEXT: .LBB82_4: # %cond.load1
; RV64ZVE32F-NEXT: fld fa1, 0(a1)
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: andi a3, a2, 1
; RV64ZVE32F-NEXT: bnez a3, .LBB1_3
; RV64ZVE32F-NEXT: # %bb.1: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: bnez a0, .LBB1_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: bnez a2, .LBB1_4
; RV64ZVE32F-NEXT: .LBB1_2: # %else2
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB1_3: # %cond.store
; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, mf4, ta, ma
; RV64ZVE32F-NEXT: vse8.v v8, (a0)
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB1_2
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB1_2
; RV64ZVE32F-NEXT: .LBB1_4: # %cond.store1
; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, mf4, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v8, v8, 1
; RV64ZVE32F-NEXT: vnsrl.wi v8, v8, 0
; RV64ZVE32F-NEXT: bnez a3, .LBB2_3
; RV64ZVE32F-NEXT: # %bb.1: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: bnez a0, .LBB2_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: bnez a2, .LBB2_4
; RV64ZVE32F-NEXT: .LBB2_2: # %else2
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB2_3: # %cond.store
; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, mf4, ta, ma
; RV64ZVE32F-NEXT: vse8.v v8, (a0)
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB2_2
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB2_2
; RV64ZVE32F-NEXT: .LBB2_4: # %cond.store1
; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, mf4, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v8, v8, 1
; RV64ZVE32F-NEXT: vnsrl.wi v8, v8, 0
; RV64ZVE32F-NEXT: bnez a3, .LBB3_3
; RV64ZVE32F-NEXT: # %bb.1: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: bnez a0, .LBB3_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: bnez a2, .LBB3_4
; RV64ZVE32F-NEXT: .LBB3_2: # %else2
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB3_3: # %cond.store
; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, mf4, ta, ma
; RV64ZVE32F-NEXT: vse8.v v8, (a0)
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB3_2
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB3_2
; RV64ZVE32F-NEXT: .LBB3_4: # %cond.store1
; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, mf4, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v8, v8, 1
; RV64ZVE32F-NEXT: andi a0, a3, 4
; RV64ZVE32F-NEXT: bnez a0, .LBB5_7
; RV64ZVE32F-NEXT: .LBB5_3: # %else4
-; RV64ZVE32F-NEXT: andi a0, a3, 8
-; RV64ZVE32F-NEXT: bnez a0, .LBB5_8
+; RV64ZVE32F-NEXT: andi a3, a3, 8
+; RV64ZVE32F-NEXT: bnez a3, .LBB5_8
; RV64ZVE32F-NEXT: .LBB5_4: # %else6
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB5_5: # %cond.store
; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, mf4, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v9, v8, 2
; RV64ZVE32F-NEXT: vse8.v v9, (a2)
-; RV64ZVE32F-NEXT: andi a0, a3, 8
-; RV64ZVE32F-NEXT: beqz a0, .LBB5_4
+; RV64ZVE32F-NEXT: andi a3, a3, 8
+; RV64ZVE32F-NEXT: beqz a3, .LBB5_4
; RV64ZVE32F-NEXT: .LBB5_8: # %cond.store5
; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, mf4, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v8, v8, 3
; RV64ZVE32F-NEXT: andi a0, a3, 4
; RV64ZVE32F-NEXT: bnez a0, .LBB6_7
; RV64ZVE32F-NEXT: .LBB6_3: # %else4
-; RV64ZVE32F-NEXT: andi a0, a3, 8
-; RV64ZVE32F-NEXT: bnez a0, .LBB6_8
+; RV64ZVE32F-NEXT: andi a3, a3, 8
+; RV64ZVE32F-NEXT: bnez a3, .LBB6_8
; RV64ZVE32F-NEXT: .LBB6_4: # %else6
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB6_5: # %cond.store
; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, mf4, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v9, v8, 2
; RV64ZVE32F-NEXT: vse8.v v9, (a2)
-; RV64ZVE32F-NEXT: andi a0, a3, 8
-; RV64ZVE32F-NEXT: beqz a0, .LBB6_4
+; RV64ZVE32F-NEXT: andi a3, a3, 8
+; RV64ZVE32F-NEXT: beqz a3, .LBB6_4
; RV64ZVE32F-NEXT: .LBB6_8: # %cond.store5
; RV64ZVE32F-NEXT: vsetivli zero, 1, e8, mf4, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v8, v8, 3
; RV64ZVE32F-NEXT: andi a3, a2, 1
; RV64ZVE32F-NEXT: bnez a3, .LBB11_3
; RV64ZVE32F-NEXT: # %bb.1: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: bnez a0, .LBB11_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: bnez a2, .LBB11_4
; RV64ZVE32F-NEXT: .LBB11_2: # %else2
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB11_3: # %cond.store
; RV64ZVE32F-NEXT: vsetivli zero, 1, e16, mf2, ta, ma
; RV64ZVE32F-NEXT: vse16.v v8, (a0)
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB11_2
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB11_2
; RV64ZVE32F-NEXT: .LBB11_4: # %cond.store1
; RV64ZVE32F-NEXT: vsetivli zero, 1, e16, mf2, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v8, v8, 1
; RV64ZVE32F-NEXT: andi a3, a2, 1
; RV64ZVE32F-NEXT: bnez a3, .LBB12_3
; RV64ZVE32F-NEXT: # %bb.1: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: bnez a0, .LBB12_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: bnez a2, .LBB12_4
; RV64ZVE32F-NEXT: .LBB12_2: # %else2
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB12_3: # %cond.store
; RV64ZVE32F-NEXT: vsetivli zero, 1, e16, mf2, ta, ma
; RV64ZVE32F-NEXT: vse16.v v8, (a0)
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB12_2
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB12_2
; RV64ZVE32F-NEXT: .LBB12_4: # %cond.store1
; RV64ZVE32F-NEXT: vsetivli zero, 1, e16, mf2, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v8, v8, 1
; RV64ZVE32F-NEXT: andi a0, a3, 4
; RV64ZVE32F-NEXT: bnez a0, .LBB14_7
; RV64ZVE32F-NEXT: .LBB14_3: # %else4
-; RV64ZVE32F-NEXT: andi a0, a3, 8
-; RV64ZVE32F-NEXT: bnez a0, .LBB14_8
+; RV64ZVE32F-NEXT: andi a3, a3, 8
+; RV64ZVE32F-NEXT: bnez a3, .LBB14_8
; RV64ZVE32F-NEXT: .LBB14_4: # %else6
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB14_5: # %cond.store
; RV64ZVE32F-NEXT: vsetivli zero, 1, e16, mf2, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v9, v8, 2
; RV64ZVE32F-NEXT: vse16.v v9, (a2)
-; RV64ZVE32F-NEXT: andi a0, a3, 8
-; RV64ZVE32F-NEXT: beqz a0, .LBB14_4
+; RV64ZVE32F-NEXT: andi a3, a3, 8
+; RV64ZVE32F-NEXT: beqz a3, .LBB14_4
; RV64ZVE32F-NEXT: .LBB14_8: # %cond.store5
; RV64ZVE32F-NEXT: vsetivli zero, 1, e16, mf2, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v8, v8, 3
; RV64ZVE32F-NEXT: andi a0, a3, 4
; RV64ZVE32F-NEXT: bnez a0, .LBB15_7
; RV64ZVE32F-NEXT: .LBB15_3: # %else4
-; RV64ZVE32F-NEXT: andi a0, a3, 8
-; RV64ZVE32F-NEXT: bnez a0, .LBB15_8
+; RV64ZVE32F-NEXT: andi a3, a3, 8
+; RV64ZVE32F-NEXT: bnez a3, .LBB15_8
; RV64ZVE32F-NEXT: .LBB15_4: # %else6
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB15_5: # %cond.store
; RV64ZVE32F-NEXT: vsetivli zero, 1, e16, mf2, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v9, v8, 2
; RV64ZVE32F-NEXT: vse16.v v9, (a2)
-; RV64ZVE32F-NEXT: andi a0, a3, 8
-; RV64ZVE32F-NEXT: beqz a0, .LBB15_4
+; RV64ZVE32F-NEXT: andi a3, a3, 8
+; RV64ZVE32F-NEXT: beqz a3, .LBB15_4
; RV64ZVE32F-NEXT: .LBB15_8: # %cond.store5
; RV64ZVE32F-NEXT: vsetivli zero, 1, e16, mf2, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v8, v8, 3
; RV64ZVE32F-NEXT: andi a3, a2, 1
; RV64ZVE32F-NEXT: bnez a3, .LBB23_3
; RV64ZVE32F-NEXT: # %bb.1: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: bnez a0, .LBB23_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: bnez a2, .LBB23_4
; RV64ZVE32F-NEXT: .LBB23_2: # %else2
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB23_3: # %cond.store
; RV64ZVE32F-NEXT: vsetivli zero, 1, e32, m1, ta, ma
; RV64ZVE32F-NEXT: vse32.v v8, (a0)
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB23_2
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB23_2
; RV64ZVE32F-NEXT: .LBB23_4: # %cond.store1
; RV64ZVE32F-NEXT: vsetivli zero, 1, e32, m1, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v8, v8, 1
; RV64ZVE32F-NEXT: andi a0, a3, 4
; RV64ZVE32F-NEXT: bnez a0, .LBB25_7
; RV64ZVE32F-NEXT: .LBB25_3: # %else4
-; RV64ZVE32F-NEXT: andi a0, a3, 8
-; RV64ZVE32F-NEXT: bnez a0, .LBB25_8
+; RV64ZVE32F-NEXT: andi a3, a3, 8
+; RV64ZVE32F-NEXT: bnez a3, .LBB25_8
; RV64ZVE32F-NEXT: .LBB25_4: # %else6
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB25_5: # %cond.store
; RV64ZVE32F-NEXT: vsetivli zero, 1, e32, m1, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v9, v8, 2
; RV64ZVE32F-NEXT: vse32.v v9, (a2)
-; RV64ZVE32F-NEXT: andi a0, a3, 8
-; RV64ZVE32F-NEXT: beqz a0, .LBB25_4
+; RV64ZVE32F-NEXT: andi a3, a3, 8
+; RV64ZVE32F-NEXT: beqz a3, .LBB25_4
; RV64ZVE32F-NEXT: .LBB25_8: # %cond.store5
; RV64ZVE32F-NEXT: vsetivli zero, 1, e32, m1, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v8, v8, 3
; RV64ZVE32F-NEXT: andi a0, a3, 4
; RV64ZVE32F-NEXT: bnez a0, .LBB26_7
; RV64ZVE32F-NEXT: .LBB26_3: # %else4
-; RV64ZVE32F-NEXT: andi a0, a3, 8
-; RV64ZVE32F-NEXT: bnez a0, .LBB26_8
+; RV64ZVE32F-NEXT: andi a3, a3, 8
+; RV64ZVE32F-NEXT: bnez a3, .LBB26_8
; RV64ZVE32F-NEXT: .LBB26_4: # %else6
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB26_5: # %cond.store
; RV64ZVE32F-NEXT: vsetivli zero, 1, e32, m1, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v9, v8, 2
; RV64ZVE32F-NEXT: vse32.v v9, (a2)
-; RV64ZVE32F-NEXT: andi a0, a3, 8
-; RV64ZVE32F-NEXT: beqz a0, .LBB26_4
+; RV64ZVE32F-NEXT: andi a3, a3, 8
+; RV64ZVE32F-NEXT: beqz a3, .LBB26_4
; RV64ZVE32F-NEXT: .LBB26_8: # %cond.store5
; RV64ZVE32F-NEXT: vsetivli zero, 1, e32, m1, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v8, v8, 3
; RV32ZVE32F-NEXT: andi a4, a3, 1
; RV32ZVE32F-NEXT: bnez a4, .LBB37_3
; RV32ZVE32F-NEXT: # %bb.1: # %else
-; RV32ZVE32F-NEXT: andi a0, a3, 2
-; RV32ZVE32F-NEXT: bnez a0, .LBB37_4
+; RV32ZVE32F-NEXT: andi a3, a3, 2
+; RV32ZVE32F-NEXT: bnez a3, .LBB37_4
; RV32ZVE32F-NEXT: .LBB37_2: # %else2
; RV32ZVE32F-NEXT: ret
; RV32ZVE32F-NEXT: .LBB37_3: # %cond.store
; RV32ZVE32F-NEXT: vmv.x.s a5, v8
; RV32ZVE32F-NEXT: sw a4, 4(a5)
; RV32ZVE32F-NEXT: sw a0, 0(a5)
-; RV32ZVE32F-NEXT: andi a0, a3, 2
-; RV32ZVE32F-NEXT: beqz a0, .LBB37_2
+; RV32ZVE32F-NEXT: andi a3, a3, 2
+; RV32ZVE32F-NEXT: beqz a3, .LBB37_2
; RV32ZVE32F-NEXT: .LBB37_4: # %cond.store1
; RV32ZVE32F-NEXT: vsetivli zero, 1, e32, m1, ta, ma
; RV32ZVE32F-NEXT: vslidedown.vi v8, v8, 1
; RV64ZVE32F-NEXT: andi a5, a4, 1
; RV64ZVE32F-NEXT: bnez a5, .LBB37_3
; RV64ZVE32F-NEXT: # %bb.1: # %else
-; RV64ZVE32F-NEXT: andi a0, a4, 2
-; RV64ZVE32F-NEXT: bnez a0, .LBB37_4
+; RV64ZVE32F-NEXT: andi a4, a4, 2
+; RV64ZVE32F-NEXT: bnez a4, .LBB37_4
; RV64ZVE32F-NEXT: .LBB37_2: # %else2
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB37_3: # %cond.store
; RV64ZVE32F-NEXT: sd a0, 0(a2)
-; RV64ZVE32F-NEXT: andi a0, a4, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB37_2
+; RV64ZVE32F-NEXT: andi a4, a4, 2
+; RV64ZVE32F-NEXT: beqz a4, .LBB37_2
; RV64ZVE32F-NEXT: .LBB37_4: # %cond.store1
; RV64ZVE32F-NEXT: sd a1, 0(a3)
; RV64ZVE32F-NEXT: ret
; RV32ZVE32F-NEXT: andi a0, a5, 4
; RV32ZVE32F-NEXT: bnez a0, .LBB38_7
; RV32ZVE32F-NEXT: .LBB38_3: # %else4
-; RV32ZVE32F-NEXT: andi a0, a5, 8
-; RV32ZVE32F-NEXT: bnez a0, .LBB38_8
+; RV32ZVE32F-NEXT: andi a5, a5, 8
+; RV32ZVE32F-NEXT: bnez a5, .LBB38_8
; RV32ZVE32F-NEXT: .LBB38_4: # %else6
; RV32ZVE32F-NEXT: ret
; RV32ZVE32F-NEXT: .LBB38_5: # %cond.store
; RV32ZVE32F-NEXT: vmv.x.s a0, v9
; RV32ZVE32F-NEXT: sw a4, 0(a0)
; RV32ZVE32F-NEXT: sw a3, 4(a0)
-; RV32ZVE32F-NEXT: andi a0, a5, 8
-; RV32ZVE32F-NEXT: beqz a0, .LBB38_4
+; RV32ZVE32F-NEXT: andi a5, a5, 8
+; RV32ZVE32F-NEXT: beqz a5, .LBB38_4
; RV32ZVE32F-NEXT: .LBB38_8: # %cond.store5
; RV32ZVE32F-NEXT: vsetivli zero, 1, e32, m1, ta, ma
; RV32ZVE32F-NEXT: vslidedown.vi v8, v8, 3
; RV32ZVE32F-NEXT: andi a0, a5, 4
; RV32ZVE32F-NEXT: bnez a0, .LBB39_7
; RV32ZVE32F-NEXT: .LBB39_3: # %else4
-; RV32ZVE32F-NEXT: andi a0, a5, 8
-; RV32ZVE32F-NEXT: bnez a0, .LBB39_8
+; RV32ZVE32F-NEXT: andi a5, a5, 8
+; RV32ZVE32F-NEXT: bnez a5, .LBB39_8
; RV32ZVE32F-NEXT: .LBB39_4: # %else6
; RV32ZVE32F-NEXT: ret
; RV32ZVE32F-NEXT: .LBB39_5: # %cond.store
; RV32ZVE32F-NEXT: vmv.x.s a0, v9
; RV32ZVE32F-NEXT: sw a4, 0(a0)
; RV32ZVE32F-NEXT: sw a3, 4(a0)
-; RV32ZVE32F-NEXT: andi a0, a5, 8
-; RV32ZVE32F-NEXT: beqz a0, .LBB39_4
+; RV32ZVE32F-NEXT: andi a5, a5, 8
+; RV32ZVE32F-NEXT: beqz a5, .LBB39_4
; RV32ZVE32F-NEXT: .LBB39_8: # %cond.store5
; RV32ZVE32F-NEXT: vsetivli zero, 1, e32, m1, ta, ma
; RV32ZVE32F-NEXT: vslidedown.vi v8, v8, 3
; RV64ZVE32F-NEXT: andi a3, a2, 1
; RV64ZVE32F-NEXT: bnez a3, .LBB53_3
; RV64ZVE32F-NEXT: # %bb.1: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: bnez a0, .LBB53_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: bnez a2, .LBB53_4
; RV64ZVE32F-NEXT: .LBB53_2: # %else2
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB53_3: # %cond.store
; RV64ZVE32F-NEXT: vsetivli zero, 1, e16, mf2, ta, ma
; RV64ZVE32F-NEXT: vse16.v v8, (a0)
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB53_2
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB53_2
; RV64ZVE32F-NEXT: .LBB53_4: # %cond.store1
; RV64ZVE32F-NEXT: vsetivli zero, 1, e16, mf2, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v8, v8, 1
; RV64ZVE32F-NEXT: andi a0, a3, 4
; RV64ZVE32F-NEXT: bnez a0, .LBB54_7
; RV64ZVE32F-NEXT: .LBB54_3: # %else4
-; RV64ZVE32F-NEXT: andi a0, a3, 8
-; RV64ZVE32F-NEXT: bnez a0, .LBB54_8
+; RV64ZVE32F-NEXT: andi a3, a3, 8
+; RV64ZVE32F-NEXT: bnez a3, .LBB54_8
; RV64ZVE32F-NEXT: .LBB54_4: # %else6
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB54_5: # %cond.store
; RV64ZVE32F-NEXT: vsetivli zero, 1, e16, mf2, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v9, v8, 2
; RV64ZVE32F-NEXT: vse16.v v9, (a2)
-; RV64ZVE32F-NEXT: andi a0, a3, 8
-; RV64ZVE32F-NEXT: beqz a0, .LBB54_4
+; RV64ZVE32F-NEXT: andi a3, a3, 8
+; RV64ZVE32F-NEXT: beqz a3, .LBB54_4
; RV64ZVE32F-NEXT: .LBB54_8: # %cond.store5
; RV64ZVE32F-NEXT: vsetivli zero, 1, e16, mf2, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v8, v8, 3
; RV64ZVE32F-NEXT: andi a0, a3, 4
; RV64ZVE32F-NEXT: bnez a0, .LBB55_7
; RV64ZVE32F-NEXT: .LBB55_3: # %else4
-; RV64ZVE32F-NEXT: andi a0, a3, 8
-; RV64ZVE32F-NEXT: bnez a0, .LBB55_8
+; RV64ZVE32F-NEXT: andi a3, a3, 8
+; RV64ZVE32F-NEXT: bnez a3, .LBB55_8
; RV64ZVE32F-NEXT: .LBB55_4: # %else6
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB55_5: # %cond.store
; RV64ZVE32F-NEXT: vsetivli zero, 1, e16, mf2, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v9, v8, 2
; RV64ZVE32F-NEXT: vse16.v v9, (a2)
-; RV64ZVE32F-NEXT: andi a0, a3, 8
-; RV64ZVE32F-NEXT: beqz a0, .LBB55_4
+; RV64ZVE32F-NEXT: andi a3, a3, 8
+; RV64ZVE32F-NEXT: beqz a3, .LBB55_4
; RV64ZVE32F-NEXT: .LBB55_8: # %cond.store5
; RV64ZVE32F-NEXT: vsetivli zero, 1, e16, mf2, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v8, v8, 3
; RV64ZVE32F-NEXT: andi a3, a2, 1
; RV64ZVE32F-NEXT: bnez a3, .LBB63_3
; RV64ZVE32F-NEXT: # %bb.1: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: bnez a0, .LBB63_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: bnez a2, .LBB63_4
; RV64ZVE32F-NEXT: .LBB63_2: # %else2
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB63_3: # %cond.store
; RV64ZVE32F-NEXT: vsetivli zero, 1, e32, m1, ta, ma
; RV64ZVE32F-NEXT: vse32.v v8, (a0)
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB63_2
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB63_2
; RV64ZVE32F-NEXT: .LBB63_4: # %cond.store1
; RV64ZVE32F-NEXT: vsetivli zero, 1, e32, m1, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v8, v8, 1
; RV64ZVE32F-NEXT: andi a0, a3, 4
; RV64ZVE32F-NEXT: bnez a0, .LBB64_7
; RV64ZVE32F-NEXT: .LBB64_3: # %else4
-; RV64ZVE32F-NEXT: andi a0, a3, 8
-; RV64ZVE32F-NEXT: bnez a0, .LBB64_8
+; RV64ZVE32F-NEXT: andi a3, a3, 8
+; RV64ZVE32F-NEXT: bnez a3, .LBB64_8
; RV64ZVE32F-NEXT: .LBB64_4: # %else6
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB64_5: # %cond.store
; RV64ZVE32F-NEXT: vsetivli zero, 1, e32, m1, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v9, v8, 2
; RV64ZVE32F-NEXT: vse32.v v9, (a2)
-; RV64ZVE32F-NEXT: andi a0, a3, 8
-; RV64ZVE32F-NEXT: beqz a0, .LBB64_4
+; RV64ZVE32F-NEXT: andi a3, a3, 8
+; RV64ZVE32F-NEXT: beqz a3, .LBB64_4
; RV64ZVE32F-NEXT: .LBB64_8: # %cond.store5
; RV64ZVE32F-NEXT: vsetivli zero, 1, e32, m1, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v8, v8, 3
; RV64ZVE32F-NEXT: andi a0, a3, 4
; RV64ZVE32F-NEXT: bnez a0, .LBB65_7
; RV64ZVE32F-NEXT: .LBB65_3: # %else4
-; RV64ZVE32F-NEXT: andi a0, a3, 8
-; RV64ZVE32F-NEXT: bnez a0, .LBB65_8
+; RV64ZVE32F-NEXT: andi a3, a3, 8
+; RV64ZVE32F-NEXT: bnez a3, .LBB65_8
; RV64ZVE32F-NEXT: .LBB65_4: # %else6
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB65_5: # %cond.store
; RV64ZVE32F-NEXT: vsetivli zero, 1, e32, m1, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v9, v8, 2
; RV64ZVE32F-NEXT: vse32.v v9, (a2)
-; RV64ZVE32F-NEXT: andi a0, a3, 8
-; RV64ZVE32F-NEXT: beqz a0, .LBB65_4
+; RV64ZVE32F-NEXT: andi a3, a3, 8
+; RV64ZVE32F-NEXT: beqz a3, .LBB65_4
; RV64ZVE32F-NEXT: .LBB65_8: # %cond.store5
; RV64ZVE32F-NEXT: vsetivli zero, 1, e32, m1, ta, ma
; RV64ZVE32F-NEXT: vslidedown.vi v8, v8, 3
; RV64ZVE32F-NEXT: andi a3, a2, 1
; RV64ZVE32F-NEXT: bnez a3, .LBB76_3
; RV64ZVE32F-NEXT: # %bb.1: # %else
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: bnez a0, .LBB76_4
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: bnez a2, .LBB76_4
; RV64ZVE32F-NEXT: .LBB76_2: # %else2
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB76_3: # %cond.store
; RV64ZVE32F-NEXT: fsd fa0, 0(a0)
-; RV64ZVE32F-NEXT: andi a0, a2, 2
-; RV64ZVE32F-NEXT: beqz a0, .LBB76_2
+; RV64ZVE32F-NEXT: andi a2, a2, 2
+; RV64ZVE32F-NEXT: beqz a2, .LBB76_2
; RV64ZVE32F-NEXT: .LBB76_4: # %cond.store1
; RV64ZVE32F-NEXT: fsd fa1, 0(a1)
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: andi a0, a3, 4
; RV64ZVE32F-NEXT: bnez a0, .LBB77_7
; RV64ZVE32F-NEXT: .LBB77_3: # %else4
-; RV64ZVE32F-NEXT: andi a0, a3, 8
-; RV64ZVE32F-NEXT: bnez a0, .LBB77_8
+; RV64ZVE32F-NEXT: andi a3, a3, 8
+; RV64ZVE32F-NEXT: bnez a3, .LBB77_8
; RV64ZVE32F-NEXT: .LBB77_4: # %else6
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB77_5: # %cond.store
; RV64ZVE32F-NEXT: beqz a0, .LBB77_3
; RV64ZVE32F-NEXT: .LBB77_7: # %cond.store3
; RV64ZVE32F-NEXT: fsd fa2, 0(a2)
-; RV64ZVE32F-NEXT: andi a0, a3, 8
-; RV64ZVE32F-NEXT: beqz a0, .LBB77_4
+; RV64ZVE32F-NEXT: andi a3, a3, 8
+; RV64ZVE32F-NEXT: beqz a3, .LBB77_4
; RV64ZVE32F-NEXT: .LBB77_8: # %cond.store5
; RV64ZVE32F-NEXT: fsd fa3, 0(a1)
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: andi a0, a3, 4
; RV64ZVE32F-NEXT: bnez a0, .LBB78_7
; RV64ZVE32F-NEXT: .LBB78_3: # %else4
-; RV64ZVE32F-NEXT: andi a0, a3, 8
-; RV64ZVE32F-NEXT: bnez a0, .LBB78_8
+; RV64ZVE32F-NEXT: andi a3, a3, 8
+; RV64ZVE32F-NEXT: bnez a3, .LBB78_8
; RV64ZVE32F-NEXT: .LBB78_4: # %else6
; RV64ZVE32F-NEXT: ret
; RV64ZVE32F-NEXT: .LBB78_5: # %cond.store
; RV64ZVE32F-NEXT: beqz a0, .LBB78_3
; RV64ZVE32F-NEXT: .LBB78_7: # %cond.store3
; RV64ZVE32F-NEXT: fsd fa2, 0(a2)
-; RV64ZVE32F-NEXT: andi a0, a3, 8
-; RV64ZVE32F-NEXT: beqz a0, .LBB78_4
+; RV64ZVE32F-NEXT: andi a3, a3, 8
+; RV64ZVE32F-NEXT: beqz a3, .LBB78_4
; RV64ZVE32F-NEXT: .LBB78_8: # %cond.store5
; RV64ZVE32F-NEXT: fsd fa3, 0(a1)
; RV64ZVE32F-NEXT: ret