diff --git a/llvm/lib/Target/RISCV/RISCVInstrInfoB.td b/llvm/lib/Target/RISCV/RISCVInstrInfoB.td --- a/llvm/lib/Target/RISCV/RISCVInstrInfoB.td +++ b/llvm/lib/Target/RISCV/RISCVInstrInfoB.td @@ -675,37 +675,17 @@ def : Pat<(rotr GPR:$rs1, GPR:$rs2), (ROR GPR:$rs1, GPR:$rs2)>; } // Predicates = [HasStdExtZbbOrZbp] -let Predicates = [HasStdExtZbs, IsRV32] in -def : Pat<(and (not (shl 1, (and GPR:$rs2, 31))), GPR:$rs1), - (SBCLR GPR:$rs1, GPR:$rs2)>; -let Predicates = [HasStdExtZbs, IsRV64] in -def : Pat<(and (not (shl 1, (and GPR:$rs2, 63))), GPR:$rs1), +let Predicates = [HasStdExtZbs] in { +def : Pat<(and (not (shiftop 1, GPR:$rs2)), GPR:$rs1), (SBCLR GPR:$rs1, GPR:$rs2)>; - -let Predicates = [HasStdExtZbs] in def : Pat<(and (rotl -2, GPR:$rs2), GPR:$rs1), (SBCLR GPR:$rs1, GPR:$rs2)>; - -let Predicates = [HasStdExtZbs, IsRV32] in -def : Pat<(or (shl 1, (and GPR:$rs2, 31)), GPR:$rs1), - (SBSET GPR:$rs1, GPR:$rs2)>; -let Predicates = [HasStdExtZbs, IsRV64] in -def : Pat<(or (shl 1, (and GPR:$rs2, 63)), GPR:$rs1), +def : Pat<(or (shiftop 1, GPR:$rs2), GPR:$rs1), (SBSET GPR:$rs1, GPR:$rs2)>; - -let Predicates = [HasStdExtZbs, IsRV32] in -def : Pat<(xor (shl 1, (and GPR:$rs2, 31)), GPR:$rs1), - (SBINV GPR:$rs1, GPR:$rs2)>; -let Predicates = [HasStdExtZbs, IsRV64] in -def : Pat<(xor (shl 1, (and GPR:$rs2, 63)), GPR:$rs1), +def : Pat<(xor (shiftop 1, GPR:$rs2), GPR:$rs1), (SBINV GPR:$rs1, GPR:$rs2)>; - -let Predicates = [HasStdExtZbs, IsRV32] in -def : Pat<(and (srl GPR:$rs1, (and GPR:$rs2, 31)), 1), - (SBEXT GPR:$rs1, GPR:$rs2)>; - -let Predicates = [HasStdExtZbs, IsRV64] in -def : Pat<(and (srl GPR:$rs1, (and GPR:$rs2, 63)), 1), +def : Pat<(and (shiftop GPR:$rs1, GPR:$rs2), 1), (SBEXT GPR:$rs1, GPR:$rs2)>; +} let Predicates = [HasStdExtZbb] in { def : Pat<(SLOIPat GPR:$rs1, uimmlog2xlen:$shamt), diff --git a/llvm/test/CodeGen/RISCV/rv32Zbs.ll b/llvm/test/CodeGen/RISCV/rv32Zbs.ll --- a/llvm/test/CodeGen/RISCV/rv32Zbs.ll +++ b/llvm/test/CodeGen/RISCV/rv32Zbs.ll @@ -47,10 +47,7 @@ ; ; RV32IBS-LABEL: sbclr_i32_no_mask: ; RV32IBS: # %bb.0: -; RV32IBS-NEXT: addi a2, zero, 1 -; RV32IBS-NEXT: sll a1, a2, a1 -; RV32IBS-NEXT: not a1, a1 -; RV32IBS-NEXT: and a0, a1, a0 +; RV32IBS-NEXT: sbclr a0, a0, a1 ; RV32IBS-NEXT: ret %shl = shl nuw i32 1, %b %neg = xor i32 %shl, -1 @@ -156,16 +153,12 @@ ; ; RV32IB-LABEL: sbset_i32_no_mask: ; RV32IB: # %bb.0: -; RV32IB-NEXT: addi a2, zero, 1 -; RV32IB-NEXT: sll a1, a2, a1 -; RV32IB-NEXT: or a0, a1, a0 +; RV32IB-NEXT: sbset a0, a0, a1 ; RV32IB-NEXT: ret ; ; RV32IBS-LABEL: sbset_i32_no_mask: ; RV32IBS: # %bb.0: -; RV32IBS-NEXT: addi a2, zero, 1 -; RV32IBS-NEXT: sll a1, a2, a1 -; RV32IBS-NEXT: or a0, a1, a0 +; RV32IBS-NEXT: sbset a0, a0, a1 ; RV32IBS-NEXT: ret %shl = shl nuw i32 1, %b %or = or i32 %shl, %a @@ -190,18 +183,18 @@ ; RV32IB-LABEL: sbset_i64: ; RV32IB: # %bb.0: ; RV32IB-NEXT: addi a3, zero, 1 -; RV32IB-NEXT: sll a2, a3, a2 -; RV32IB-NEXT: srai a3, a2, 31 -; RV32IB-NEXT: or a0, a2, a0 +; RV32IB-NEXT: sll a3, a3, a2 +; RV32IB-NEXT: srai a3, a3, 31 +; RV32IB-NEXT: sbset a0, a0, a2 ; RV32IB-NEXT: or a1, a3, a1 ; RV32IB-NEXT: ret ; ; RV32IBS-LABEL: sbset_i64: ; RV32IBS: # %bb.0: ; RV32IBS-NEXT: addi a3, zero, 1 -; RV32IBS-NEXT: sll a2, a3, a2 -; RV32IBS-NEXT: srai a3, a2, 31 -; RV32IBS-NEXT: or a0, a2, a0 +; RV32IBS-NEXT: sll a3, a3, a2 +; RV32IBS-NEXT: srai a3, a3, 31 +; RV32IBS-NEXT: sbset a0, a0, a2 ; RV32IBS-NEXT: or a1, a3, a1 ; RV32IBS-NEXT: ret %1 = trunc i64 %b to i32 @@ -253,18 +246,18 @@ ; RV32IB-LABEL: sbinv_i64: ; RV32IB: # %bb.0: ; RV32IB-NEXT: addi a3, zero, 1 -; RV32IB-NEXT: sll a2, a3, a2 -; RV32IB-NEXT: srai a3, a2, 31 -; RV32IB-NEXT: xor a0, a2, a0 +; RV32IB-NEXT: sll a3, a3, a2 +; RV32IB-NEXT: srai a3, a3, 31 +; RV32IB-NEXT: sbinv a0, a0, a2 ; RV32IB-NEXT: xor a1, a3, a1 ; RV32IB-NEXT: ret ; ; RV32IBS-LABEL: sbinv_i64: ; RV32IBS: # %bb.0: ; RV32IBS-NEXT: addi a3, zero, 1 -; RV32IBS-NEXT: sll a2, a3, a2 -; RV32IBS-NEXT: srai a3, a2, 31 -; RV32IBS-NEXT: xor a0, a2, a0 +; RV32IBS-NEXT: sll a3, a3, a2 +; RV32IBS-NEXT: srai a3, a3, 31 +; RV32IBS-NEXT: sbinv a0, a0, a2 ; RV32IBS-NEXT: xor a1, a3, a1 ; RV32IBS-NEXT: ret %1 = trunc i64 %b to i32 @@ -306,14 +299,12 @@ ; ; RV32IB-LABEL: sbext_i32_no_mask: ; RV32IB: # %bb.0: -; RV32IB-NEXT: srl a0, a0, a1 -; RV32IB-NEXT: andi a0, a0, 1 +; RV32IB-NEXT: sbext a0, a0, a1 ; RV32IB-NEXT: ret ; ; RV32IBS-LABEL: sbext_i32_no_mask: ; RV32IBS: # %bb.0: -; RV32IBS-NEXT: srl a0, a0, a1 -; RV32IBS-NEXT: andi a0, a0, 1 +; RV32IBS-NEXT: sbext a0, a0, a1 ; RV32IBS-NEXT: ret %shr = lshr i32 %a, %b %and1 = and i32 %shr, 1 diff --git a/llvm/test/CodeGen/RISCV/rv64Zbs.ll b/llvm/test/CodeGen/RISCV/rv64Zbs.ll --- a/llvm/test/CodeGen/RISCV/rv64Zbs.ll +++ b/llvm/test/CodeGen/RISCV/rv64Zbs.ll @@ -96,10 +96,7 @@ ; ; RV64IBS-LABEL: sbclr_i64_no_mask: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: addi a2, zero, 1 -; RV64IBS-NEXT: sll a1, a2, a1 -; RV64IBS-NEXT: not a1, a1 -; RV64IBS-NEXT: and a0, a1, a0 +; RV64IBS-NEXT: sbclr a0, a0, a1 ; RV64IBS-NEXT: ret %shl = shl i64 1, %b %neg = xor i64 %shl, -1 @@ -185,16 +182,12 @@ ; ; RV64IB-LABEL: sbset_i64_no_mask: ; RV64IB: # %bb.0: -; RV64IB-NEXT: addi a2, zero, 1 -; RV64IB-NEXT: sll a1, a2, a1 -; RV64IB-NEXT: or a0, a1, a0 +; RV64IB-NEXT: sbset a0, a0, a1 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbset_i64_no_mask: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: addi a2, zero, 1 -; RV64IBS-NEXT: sll a1, a2, a1 -; RV64IBS-NEXT: or a0, a1, a0 +; RV64IBS-NEXT: sbset a0, a0, a1 ; RV64IBS-NEXT: ret %shl = shl i64 1, %b %or = or i64 %shl, %a @@ -279,16 +272,12 @@ ; ; RV64IB-LABEL: sbinv_i64_no_mask: ; RV64IB: # %bb.0: -; RV64IB-NEXT: addi a2, zero, 1 -; RV64IB-NEXT: sll a1, a2, a1 -; RV64IB-NEXT: xor a0, a1, a0 +; RV64IB-NEXT: sbinv a0, a0, a1 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbinv_i64_no_mask: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: addi a2, zero, 1 -; RV64IBS-NEXT: sll a1, a2, a1 -; RV64IBS-NEXT: xor a0, a1, a0 +; RV64IBS-NEXT: sbinv a0, a0, a1 ; RV64IBS-NEXT: ret %shl = shl nuw i64 1, %b %xor = xor i64 %shl, %a @@ -369,14 +358,12 @@ ; ; RV64IB-LABEL: sbext_i64_no_mask: ; RV64IB: # %bb.0: -; RV64IB-NEXT: srl a0, a0, a1 -; RV64IB-NEXT: andi a0, a0, 1 +; RV64IB-NEXT: sbext a0, a0, a1 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbext_i64_no_mask: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: srl a0, a0, a1 -; RV64IBS-NEXT: andi a0, a0, 1 +; RV64IBS-NEXT: sbext a0, a0, a1 ; RV64IBS-NEXT: ret %shr = lshr i64 %a, %b %and1 = and i64 %shr, 1