diff --git a/llvm/lib/Target/RISCV/RISCVISelLowering.h b/llvm/lib/Target/RISCV/RISCVISelLowering.h --- a/llvm/lib/Target/RISCV/RISCVISelLowering.h +++ b/llvm/lib/Target/RISCV/RISCVISelLowering.h @@ -315,7 +315,7 @@ bool isSExtCheaperThanZExt(EVT SrcVT, EVT DstVT) const override; bool isCheapToSpeculateCttz() const override; bool isCheapToSpeculateCtlz() const override; - bool hasAndNot(SDValue Y) const override; + bool hasAndNotCompare(SDValue Y) const override; bool shouldSinkOperands(Instruction *I, SmallVectorImpl &Ops) const override; bool isFPImmLegal(const APFloat &Imm, EVT VT, diff --git a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp --- a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp +++ b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp @@ -1166,7 +1166,7 @@ return Subtarget.hasStdExtZbb(); } -bool RISCVTargetLowering::hasAndNot(SDValue Y) const { +bool RISCVTargetLowering::hasAndNotCompare(SDValue Y) const { EVT VT = Y.getValueType(); // FIXME: Support vectors once we have tests. diff --git a/llvm/test/CodeGen/RISCV/rv32zbb-zbp.ll b/llvm/test/CodeGen/RISCV/rv32zbb-zbp.ll --- a/llvm/test/CodeGen/RISCV/rv32zbb-zbp.ll +++ b/llvm/test/CodeGen/RISCV/rv32zbb-zbp.ll @@ -568,8 +568,7 @@ ; ; RV32ZBB-LABEL: andn_seqz_i32: ; RV32ZBB: # %bb.0: -; RV32ZBB-NEXT: and a0, a0, a1 -; RV32ZBB-NEXT: xor a0, a0, a1 +; RV32ZBB-NEXT: andn a0, a1, a0 ; RV32ZBB-NEXT: seqz a0, a0 ; RV32ZBB-NEXT: ret ; @@ -625,8 +624,7 @@ ; ; RV32ZBB-LABEL: andn_snez_i32: ; RV32ZBB: # %bb.0: -; RV32ZBB-NEXT: and a0, a0, a1 -; RV32ZBB-NEXT: xor a0, a0, a1 +; RV32ZBB-NEXT: andn a0, a1, a0 ; RV32ZBB-NEXT: snez a0, a0 ; RV32ZBB-NEXT: ret ; diff --git a/llvm/test/CodeGen/RISCV/rv64zbb-zbp.ll b/llvm/test/CodeGen/RISCV/rv64zbb-zbp.ll --- a/llvm/test/CodeGen/RISCV/rv64zbb-zbp.ll +++ b/llvm/test/CodeGen/RISCV/rv64zbb-zbp.ll @@ -639,8 +639,7 @@ ; ; RV64ZBB-LABEL: andn_seqz_i32: ; RV64ZBB: # %bb.0: -; RV64ZBB-NEXT: and a0, a0, a1 -; RV64ZBB-NEXT: xor a0, a0, a1 +; RV64ZBB-NEXT: andn a0, a1, a0 ; RV64ZBB-NEXT: seqz a0, a0 ; RV64ZBB-NEXT: ret ; @@ -665,8 +664,7 @@ ; ; RV64ZBB-LABEL: andn_seqz_i64: ; RV64ZBB: # %bb.0: -; RV64ZBB-NEXT: and a0, a0, a1 -; RV64ZBB-NEXT: xor a0, a0, a1 +; RV64ZBB-NEXT: andn a0, a1, a0 ; RV64ZBB-NEXT: seqz a0, a0 ; RV64ZBB-NEXT: ret ; @@ -691,8 +689,7 @@ ; ; RV64ZBB-LABEL: andn_snez_i32: ; RV64ZBB: # %bb.0: -; RV64ZBB-NEXT: and a0, a0, a1 -; RV64ZBB-NEXT: xor a0, a0, a1 +; RV64ZBB-NEXT: andn a0, a1, a0 ; RV64ZBB-NEXT: snez a0, a0 ; RV64ZBB-NEXT: ret ; @@ -717,8 +714,7 @@ ; ; RV64ZBB-LABEL: andn_snez_i64: ; RV64ZBB: # %bb.0: -; RV64ZBB-NEXT: and a0, a0, a1 -; RV64ZBB-NEXT: xor a0, a0, a1 +; RV64ZBB-NEXT: andn a0, a1, a0 ; RV64ZBB-NEXT: snez a0, a0 ; RV64ZBB-NEXT: ret ;