Index: llvm/lib/Target/RISCV/RISCVISelLowering.cpp =================================================================== --- llvm/lib/Target/RISCV/RISCVISelLowering.cpp +++ llvm/lib/Target/RISCV/RISCVISelLowering.cpp @@ -1153,6 +1153,9 @@ } bool RISCVTargetLowering::hasBitTest(SDValue X, SDValue Y) const { + // Zbs provides bext[.i] and binv[.i]. + if (Subtarget.hasStdExtZbs()) + return X.getValueType().isScalarInteger(); // We can use ANDI+SEQZ/SNEZ as a bit test. Y contains the bit position. auto *C = dyn_cast(Y); return C && C->getAPIntValue().ule(10); Index: llvm/lib/Target/RISCV/RISCVInstrInfoZb.td =================================================================== --- llvm/lib/Target/RISCV/RISCVInstrInfoZb.td +++ llvm/lib/Target/RISCV/RISCVInstrInfoZb.td @@ -266,6 +266,12 @@ return !C || !isInt<12>(C->getSExtValue()); }]>; +def pow2_mask : ImmLeafis64Bit()) + return isPowerOf2_64(Imm); + return isPowerOf2_32(Imm); +}], BSETINVXForm>; + def sh1add_op : ComplexPattern; def sh2add_op : ComplexPattern; def sh3add_op : ComplexPattern; @@ -887,6 +893,8 @@ (BSETI GPR:$rs1, BSETINVMask:$mask)>; def : Pat<(xor GPR:$rs1, BSETINVMask:$mask), (BINVI GPR:$rs1, BSETINVMask:$mask)>; +def : Pat<(seteq (and GPR:$rs1, pow2_mask:$mask), 0), + (BINVI GPR:$rs1, pow2_mask:$mask)>; def : Pat<(and (srl GPR:$rs1, uimmlog2xlen:$shamt), (XLenVT 1)), (BEXTI GPR:$rs1, uimmlog2xlen:$shamt)>; Index: llvm/test/CodeGen/RISCV/bittest.ll =================================================================== --- llvm/test/CodeGen/RISCV/bittest.ll +++ llvm/test/CodeGen/RISCV/bittest.ll @@ -9,11 +9,22 @@ ; RUN: | FileCheck %s -check-prefixes=CHECK,ZBS,RV64,RV64ZBS define signext i32 @bittest_7_i32(i32 signext %a) nounwind { -; CHECK-LABEL: bittest_7_i32: -; CHECK: # %bb.0: -; CHECK-NEXT: andi a0, a0, 128 -; CHECK-NEXT: seqz a0, a0 -; CHECK-NEXT: ret +; RV32I-LABEL: bittest_7_i32: +; RV32I: # %bb.0: +; RV32I-NEXT: andi a0, a0, 128 +; RV32I-NEXT: seqz a0, a0 +; RV32I-NEXT: ret +; +; RV64I-LABEL: bittest_7_i32: +; RV64I: # %bb.0: +; RV64I-NEXT: andi a0, a0, 128 +; RV64I-NEXT: seqz a0, a0 +; RV64I-NEXT: ret +; +; ZBS-LABEL: bittest_7_i32: +; ZBS: # %bb.0: +; ZBS-NEXT: binvi a0, a0, 7 +; ZBS-NEXT: ret %shr = lshr i32 %a, 7 %not = xor i32 %shr, -1 %and = and i32 %not, 1 @@ -21,11 +32,22 @@ } define signext i32 @bittest_10_i32(i32 signext %a) nounwind { -; CHECK-LABEL: bittest_10_i32: -; CHECK: # %bb.0: -; CHECK-NEXT: andi a0, a0, 1024 -; CHECK-NEXT: seqz a0, a0 -; CHECK-NEXT: ret +; RV32I-LABEL: bittest_10_i32: +; RV32I: # %bb.0: +; RV32I-NEXT: andi a0, a0, 1024 +; RV32I-NEXT: seqz a0, a0 +; RV32I-NEXT: ret +; +; RV64I-LABEL: bittest_10_i32: +; RV64I: # %bb.0: +; RV64I-NEXT: andi a0, a0, 1024 +; RV64I-NEXT: seqz a0, a0 +; RV64I-NEXT: ret +; +; ZBS-LABEL: bittest_10_i32: +; ZBS: # %bb.0: +; ZBS-NEXT: binvi a0, a0, 10 +; ZBS-NEXT: ret %shr = lshr i32 %a, 10 %not = xor i32 %shr, -1 %and = and i32 %not, 1 @@ -49,8 +71,7 @@ ; ; ZBS-LABEL: bittest_11_i32: ; ZBS: # %bb.0: -; ZBS-NEXT: not a0, a0 -; ZBS-NEXT: bexti a0, a0, 11 +; ZBS-NEXT: binvi a0, a0, 11 ; ZBS-NEXT: ret %shr = lshr i32 %a, 11 %not = xor i32 %shr, -1 @@ -77,18 +98,29 @@ } define i64 @bittest_7_i64(i64 %a) nounwind { -; RV32-LABEL: bittest_7_i64: -; RV32: # %bb.0: -; RV32-NEXT: andi a0, a0, 128 -; RV32-NEXT: seqz a0, a0 -; RV32-NEXT: li a1, 0 -; RV32-NEXT: ret +; RV32I-LABEL: bittest_7_i64: +; RV32I: # %bb.0: +; RV32I-NEXT: andi a0, a0, 128 +; RV32I-NEXT: seqz a0, a0 +; RV32I-NEXT: li a1, 0 +; RV32I-NEXT: ret ; -; RV64-LABEL: bittest_7_i64: -; RV64: # %bb.0: -; RV64-NEXT: andi a0, a0, 128 -; RV64-NEXT: seqz a0, a0 -; RV64-NEXT: ret +; RV64I-LABEL: bittest_7_i64: +; RV64I: # %bb.0: +; RV64I-NEXT: andi a0, a0, 128 +; RV64I-NEXT: seqz a0, a0 +; RV64I-NEXT: ret +; +; RV32ZBS-LABEL: bittest_7_i64: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: binvi a0, a0, 7 +; RV32ZBS-NEXT: li a1, 0 +; RV32ZBS-NEXT: ret +; +; RV64ZBS-LABEL: bittest_7_i64: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: binvi a0, a0, 7 +; RV64ZBS-NEXT: ret %shr = lshr i64 %a, 7 %not = xor i64 %shr, -1 %and = and i64 %not, 1 @@ -96,18 +128,29 @@ } define i64 @bittest_10_i64(i64 %a) nounwind { -; RV32-LABEL: bittest_10_i64: -; RV32: # %bb.0: -; RV32-NEXT: andi a0, a0, 1024 -; RV32-NEXT: seqz a0, a0 -; RV32-NEXT: li a1, 0 -; RV32-NEXT: ret +; RV32I-LABEL: bittest_10_i64: +; RV32I: # %bb.0: +; RV32I-NEXT: andi a0, a0, 1024 +; RV32I-NEXT: seqz a0, a0 +; RV32I-NEXT: li a1, 0 +; RV32I-NEXT: ret ; -; RV64-LABEL: bittest_10_i64: -; RV64: # %bb.0: -; RV64-NEXT: andi a0, a0, 1024 -; RV64-NEXT: seqz a0, a0 -; RV64-NEXT: ret +; RV64I-LABEL: bittest_10_i64: +; RV64I: # %bb.0: +; RV64I-NEXT: andi a0, a0, 1024 +; RV64I-NEXT: seqz a0, a0 +; RV64I-NEXT: ret +; +; RV32ZBS-LABEL: bittest_10_i64: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: binvi a0, a0, 10 +; RV32ZBS-NEXT: li a1, 0 +; RV32ZBS-NEXT: ret +; +; RV64ZBS-LABEL: bittest_10_i64: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: binvi a0, a0, 10 +; RV64ZBS-NEXT: ret %shr = lshr i64 %a, 10 %not = xor i64 %shr, -1 %and = and i64 %not, 1 @@ -132,15 +175,13 @@ ; ; RV32ZBS-LABEL: bittest_11_i64: ; RV32ZBS: # %bb.0: -; RV32ZBS-NEXT: not a0, a0 -; RV32ZBS-NEXT: bexti a0, a0, 11 +; RV32ZBS-NEXT: binvi a0, a0, 11 ; RV32ZBS-NEXT: li a1, 0 ; RV32ZBS-NEXT: ret ; ; RV64ZBS-LABEL: bittest_11_i64: ; RV64ZBS: # %bb.0: -; RV64ZBS-NEXT: not a0, a0 -; RV64ZBS-NEXT: bexti a0, a0, 11 +; RV64ZBS-NEXT: binvi a0, a0, 11 ; RV64ZBS-NEXT: ret %shr = lshr i64 %a, 11 %not = xor i64 %shr, -1 @@ -156,11 +197,16 @@ ; RV32-NEXT: li a1, 0 ; RV32-NEXT: ret ; -; RV64-LABEL: bittest_31_i64: -; RV64: # %bb.0: -; RV64-NEXT: not a0, a0 -; RV64-NEXT: srliw a0, a0, 31 -; RV64-NEXT: ret +; RV64I-LABEL: bittest_31_i64: +; RV64I: # %bb.0: +; RV64I-NEXT: not a0, a0 +; RV64I-NEXT: srliw a0, a0, 31 +; RV64I-NEXT: ret +; +; RV64ZBS-LABEL: bittest_31_i64: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: binvi a0, a0, 31 +; RV64ZBS-NEXT: ret %shr = lshr i64 %a, 31 %not = xor i64 %shr, -1 %and = and i64 %not, 1 @@ -184,8 +230,7 @@ ; ; RV64ZBS-LABEL: bittest_32_i64: ; RV64ZBS: # %bb.0: -; RV64ZBS-NEXT: not a0, a0 -; RV64ZBS-NEXT: bexti a0, a0, 32 +; RV64ZBS-NEXT: binvi a0, a0, 32 ; RV64ZBS-NEXT: ret %shr = lshr i64 %a, 32 %not = xor i64 %shr, -1