case X86::ADD16rr: case X86::ADD8rr: case X86::ADD64rm:
case X86::ADD32rm: case X86::ADD16rm: case X86::ADD8rm:
case X86::INC64r: case X86::INC32r: case X86::INC16r: case X86::INC8r:
- case X86::AND64ri32: case X86::AND64ri8: case X86::AND32ri:
- case X86::AND32ri8: case X86::AND16ri: case X86::AND16ri8:
- case X86::AND8ri: case X86::AND64rr: case X86::AND32rr:
- case X86::AND16rr: case X86::AND8rr: case X86::AND64rm:
- case X86::AND32rm: case X86::AND16rm: case X86::AND8rm:
- case X86::XOR64ri32: case X86::XOR64ri8: case X86::XOR32ri:
- case X86::XOR32ri8: case X86::XOR16ri: case X86::XOR16ri8:
- case X86::XOR8ri: case X86::XOR64rr: case X86::XOR32rr:
- case X86::XOR16rr: case X86::XOR8rr: case X86::XOR64rm:
- case X86::XOR32rm: case X86::XOR16rm: case X86::XOR8rm:
- case X86::OR64ri32: case X86::OR64ri8: case X86::OR32ri:
- case X86::OR32ri8: case X86::OR16ri: case X86::OR16ri8:
- case X86::OR8ri: case X86::OR64rr: case X86::OR32rr:
- case X86::OR16rr: case X86::OR8rr: case X86::OR64rm:
- case X86::OR32rm: case X86::OR16rm: case X86::OR8rm:
case X86::ADC64ri32: case X86::ADC64ri8: case X86::ADC32ri:
case X86::ADC32ri8: case X86::ADC16ri: case X86::ADC16ri8:
case X86::ADC8ri: case X86::ADC64rr: case X86::ADC32rr:
case X86::TZCNT32rr: case X86::TZCNT32rm:
case X86::TZCNT64rr: case X86::TZCNT64rm:
return true;
+ case X86::AND64ri32: case X86::AND64ri8: case X86::AND32ri:
+ case X86::AND32ri8: case X86::AND16ri: case X86::AND16ri8:
+ case X86::AND8ri: case X86::AND64rr: case X86::AND32rr:
+ case X86::AND16rr: case X86::AND8rr: case X86::AND64rm:
+ case X86::AND32rm: case X86::AND16rm: case X86::AND8rm:
+ case X86::XOR64ri32: case X86::XOR64ri8: case X86::XOR32ri:
+ case X86::XOR32ri8: case X86::XOR16ri: case X86::XOR16ri8:
+ case X86::XOR8ri: case X86::XOR64rr: case X86::XOR32rr:
+ case X86::XOR16rr: case X86::XOR8rr: case X86::XOR64rm:
+ case X86::XOR32rm: case X86::XOR16rm: case X86::XOR8rm:
+ case X86::OR64ri32: case X86::OR64ri8: case X86::OR32ri:
+ case X86::OR32ri8: case X86::OR16ri: case X86::OR16ri8:
+ case X86::OR8ri: case X86::OR64rr: case X86::OR32rr:
+ case X86::OR16rr: case X86::OR8rr: case X86::OR64rm:
+ case X86::OR32rm: case X86::OR16rm: case X86::OR8rm:
case X86::ANDN32rr: case X86::ANDN32rm:
case X86::ANDN64rr: case X86::ANDN64rm:
case X86::BLSI32rr: case X86::BLSI32rm:
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NEXT: movl %eax, %ecx
; X86-NEXT: andl $-17, %ecx
-; X86-NEXT: testl %ecx, %ecx
; X86-NEXT: jle .LBB4_2
; X86-NEXT: # %bb.1:
; X86-NEXT: movl %ecx, %eax
; X64: # %bb.0:
; X64-NEXT: movl %edi, %eax
; X64-NEXT: andl $-17, %eax
-; X64-NEXT: testl %eax, %eax
; X64-NEXT: cmovlel %edi, %eax
; X64-NEXT: retq
%3 = and i32 %0, -17
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT: andl %eax, %ecx
-; X86-NEXT: testl %ecx, %ecx
; X86-NEXT: jle .LBB5_2
; X86-NEXT: # %bb.1:
; X86-NEXT: movl %ecx, %eax
; X64: # %bb.0:
; X64-NEXT: movl %esi, %eax
; X64-NEXT: andl %edi, %eax
-; X64-NEXT: testl %eax, %eax
; X64-NEXT: cmovlel %edi, %eax
; X64-NEXT: retq
%3 = and i32 %1, %0
; X64: # %bb.0:
; X64-NEXT: movq %rdi, %rax
; X64-NEXT: andq $-17, %rax
-; X64-NEXT: testq %rax, %rax
; X64-NEXT: cmovleq %rdi, %rax
; X64-NEXT: retq
%3 = and i64 %0, -17
; X64: # %bb.0:
; X64-NEXT: movq %rsi, %rax
; X64-NEXT: andq %rdi, %rax
-; X64-NEXT: testq %rax, %rax
; X64-NEXT: cmovleq %rdi, %rax
; X64-NEXT: retq
%3 = and i64 %1, %0
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NEXT: movl %eax, %ecx
; X86-NEXT: orl $-17, %ecx
-; X86-NEXT: testl %ecx, %ecx
; X86-NEXT: jle .LBB4_2
; X86-NEXT: # %bb.1:
; X86-NEXT: movl %ecx, %eax
; X64: # %bb.0:
; X64-NEXT: movl %edi, %eax
; X64-NEXT: orl $-17, %eax
-; X64-NEXT: testl %eax, %eax
; X64-NEXT: cmovlel %edi, %eax
; X64-NEXT: retq
%3 = or i32 %0, -17
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT: orl %eax, %ecx
-; X86-NEXT: testl %ecx, %ecx
; X86-NEXT: jle .LBB5_2
; X86-NEXT: # %bb.1:
; X86-NEXT: movl %ecx, %eax
; X64: # %bb.0:
; X64-NEXT: movl %esi, %eax
; X64-NEXT: orl %edi, %eax
-; X64-NEXT: testl %eax, %eax
; X64-NEXT: cmovlel %edi, %eax
; X64-NEXT: retq
%3 = or i32 %1, %0
; X64: # %bb.0:
; X64-NEXT: movq %rdi, %rax
; X64-NEXT: orq $-17, %rax
-; X64-NEXT: testq %rax, %rax
; X64-NEXT: cmovleq %rdi, %rax
; X64-NEXT: retq
%3 = or i64 %0, -17
; X64: # %bb.0:
; X64-NEXT: movq %rsi, %rax
; X64-NEXT: orq %rdi, %rax
-; X64-NEXT: testq %rax, %rax
; X64-NEXT: cmovleq %rdi, %rax
; X64-NEXT: retq
%3 = or i64 %1, %0
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NEXT: movl %eax, %ecx
; X86-NEXT: xorl $-17, %ecx
-; X86-NEXT: testl %ecx, %ecx
; X86-NEXT: jle .LBB4_2
; X86-NEXT: # %bb.1:
; X86-NEXT: movl %ecx, %eax
; X64: # %bb.0:
; X64-NEXT: movl %edi, %eax
; X64-NEXT: xorl $-17, %eax
-; X64-NEXT: testl %eax, %eax
; X64-NEXT: cmovlel %edi, %eax
; X64-NEXT: retq
%3 = xor i32 %0, -17
; X86-NEXT: movl {{[0-9]+}}(%esp), %eax
; X86-NEXT: movl {{[0-9]+}}(%esp), %ecx
; X86-NEXT: xorl %eax, %ecx
-; X86-NEXT: testl %ecx, %ecx
; X86-NEXT: jle .LBB5_2
; X86-NEXT: # %bb.1:
; X86-NEXT: movl %ecx, %eax
; X64: # %bb.0:
; X64-NEXT: movl %esi, %eax
; X64-NEXT: xorl %edi, %eax
-; X64-NEXT: testl %eax, %eax
; X64-NEXT: cmovlel %edi, %eax
; X64-NEXT: retq
%3 = xor i32 %1, %0
; X64: # %bb.0:
; X64-NEXT: movq %rdi, %rax
; X64-NEXT: xorq $-17, %rax
-; X64-NEXT: testq %rax, %rax
; X64-NEXT: cmovleq %rdi, %rax
; X64-NEXT: retq
%3 = xor i64 %0, -17
; X64: # %bb.0:
; X64-NEXT: movq %rsi, %rax
; X64-NEXT: xorq %rdi, %rax
-; X64-NEXT: testq %rax, %rax
; X64-NEXT: cmovleq %rdi, %rax
; X64-NEXT: retq
%3 = xor i64 %1, %0