From d9500c2e230e9cc68d3d647864fa824cc3c06b3f Mon Sep 17 00:00:00 2001 From: Craig Topper Date: Thu, 26 Nov 2020 02:02:38 -0800 Subject: [PATCH] [RISCV] Add isel patterns for sbsetw/sbclrw/sbinvw with sext_inreg as the root. This handles cases were the input isn't known to be sign extended. --- llvm/lib/Target/RISCV/RISCVInstrInfoB.td | 6 ++++++ llvm/test/CodeGen/RISCV/rv64Zbs.ll | 31 ++++++------------------------- 2 files changed, 12 insertions(+), 25 deletions(-) diff --git a/llvm/lib/Target/RISCV/RISCVInstrInfoB.td b/llvm/lib/Target/RISCV/RISCVInstrInfoB.td index 29ccffb..e80ef99 100644 --- a/llvm/lib/Target/RISCV/RISCVInstrInfoB.td +++ b/llvm/lib/Target/RISCV/RISCVInstrInfoB.td @@ -893,10 +893,16 @@ def : Pat<(riscv_rolw GPR:$rs1, uimm5:$rs2), let Predicates = [HasStdExtZbs, IsRV64] in { def : Pat<(and (not (riscv_sllw 1, GPR:$rs2)), (assertsexti32 GPR:$rs1)), (SBCLRW GPR:$rs1, GPR:$rs2)>; +def : Pat<(sext_inreg (and (not (riscv_sllw 1, GPR:$rs2)), GPR:$rs1), i32), + (SBCLRW GPR:$rs1, GPR:$rs2)>; def : Pat<(or (riscv_sllw 1, GPR:$rs2), (assertsexti32 GPR:$rs1)), (SBSETW GPR:$rs1, GPR:$rs2)>; +def : Pat<(sext_inreg (or (riscv_sllw 1, GPR:$rs2), GPR:$rs1), i32), + (SBSETW GPR:$rs1, GPR:$rs2)>; def : Pat<(xor (riscv_sllw 1, GPR:$rs2), (assertsexti32 GPR:$rs1)), (SBINVW GPR:$rs1, GPR:$rs2)>; +def : Pat<(sext_inreg (xor (riscv_sllw 1, GPR:$rs2), GPR:$rs1), i32), + (SBINVW GPR:$rs1, GPR:$rs2)>; def : Pat<(and (riscv_srlw GPR:$rs1, GPR:$rs2), 1), (SBEXTW GPR:$rs1, GPR:$rs2)>; } // Predicates = [HasStdExtZbs, IsRV64] diff --git a/llvm/test/CodeGen/RISCV/rv64Zbs.ll b/llvm/test/CodeGen/RISCV/rv64Zbs.ll index be6b5ad..85d1bd5 100644 --- a/llvm/test/CodeGen/RISCV/rv64Zbs.ll +++ b/llvm/test/CodeGen/RISCV/rv64Zbs.ll @@ -69,20 +69,13 @@ define signext i32 @sbclr_i32_load(i32* %p, i32 signext %b) nounwind { ; RV64IB-LABEL: sbclr_i32_load: ; RV64IB: # %bb.0: ; RV64IB-NEXT: lw a0, 0(a0) -; RV64IB-NEXT: addi a2, zero, 1 -; RV64IB-NEXT: sllw a1, a2, a1 -; RV64IB-NEXT: andn a0, a0, a1 -; RV64IB-NEXT: sext.w a0, a0 +; RV64IB-NEXT: sbclrw a0, a0, a1 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbclr_i32_load: ; RV64IBS: # %bb.0: ; RV64IBS-NEXT: lw a0, 0(a0) -; RV64IBS-NEXT: addi a2, zero, 1 -; RV64IBS-NEXT: sllw a1, a2, a1 -; RV64IBS-NEXT: not a1, a1 -; RV64IBS-NEXT: and a0, a1, a0 -; RV64IBS-NEXT: sext.w a0, a0 +; RV64IBS-NEXT: sbclrw a0, a0, a1 ; RV64IBS-NEXT: ret %a = load i32, i32* %p %shl = shl i32 1, %b @@ -198,19 +191,13 @@ define signext i32 @sbset_i32_load(i32* %p, i32 signext %b) nounwind { ; RV64IB-LABEL: sbset_i32_load: ; RV64IB: # %bb.0: ; RV64IB-NEXT: lw a0, 0(a0) -; RV64IB-NEXT: addi a2, zero, 1 -; RV64IB-NEXT: sllw a1, a2, a1 -; RV64IB-NEXT: or a0, a1, a0 -; RV64IB-NEXT: sext.w a0, a0 +; RV64IB-NEXT: sbsetw a0, a0, a1 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbset_i32_load: ; RV64IBS: # %bb.0: ; RV64IBS-NEXT: lw a0, 0(a0) -; RV64IBS-NEXT: addi a2, zero, 1 -; RV64IBS-NEXT: sllw a1, a2, a1 -; RV64IBS-NEXT: or a0, a1, a0 -; RV64IBS-NEXT: sext.w a0, a0 +; RV64IBS-NEXT: sbsetw a0, a0, a1 ; RV64IBS-NEXT: ret %a = load i32, i32* %p %shl = shl i32 1, %b @@ -321,19 +308,13 @@ define signext i32 @sbinv_i32_load(i32* %p, i32 signext %b) nounwind { ; RV64IB-LABEL: sbinv_i32_load: ; RV64IB: # %bb.0: ; RV64IB-NEXT: lw a0, 0(a0) -; RV64IB-NEXT: addi a2, zero, 1 -; RV64IB-NEXT: sllw a1, a2, a1 -; RV64IB-NEXT: xor a0, a1, a0 -; RV64IB-NEXT: sext.w a0, a0 +; RV64IB-NEXT: sbinvw a0, a0, a1 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbinv_i32_load: ; RV64IBS: # %bb.0: ; RV64IBS-NEXT: lw a0, 0(a0) -; RV64IBS-NEXT: addi a2, zero, 1 -; RV64IBS-NEXT: sllw a1, a2, a1 -; RV64IBS-NEXT: xor a0, a1, a0 -; RV64IBS-NEXT: sext.w a0, a0 +; RV64IBS-NEXT: sbinvw a0, a0, a1 ; RV64IBS-NEXT: ret %a = load i32, i32* %p %shl = shl i32 1, %b -- 2.7.4