Index: lib/Target/RISCV/RISCVISelLowering.cpp =================================================================== --- lib/Target/RISCV/RISCVISelLowering.cpp +++ lib/Target/RISCV/RISCVISelLowering.cpp @@ -128,9 +128,9 @@ setOperationAction(ISD::CTPOP, XLenVT, Expand); ISD::CondCode FPCCToExtend[] = { - ISD::SETOGT, ISD::SETOGE, ISD::SETONE, ISD::SETO, ISD::SETUEQ, - ISD::SETUGT, ISD::SETUGE, ISD::SETULT, ISD::SETULE, ISD::SETUNE, - ISD::SETGT, ISD::SETGE, ISD::SETNE}; + ISD::SETOGT, ISD::SETOGE, ISD::SETONE, ISD::SETUEQ, ISD::SETUGT, + ISD::SETUGE, ISD::SETULT, ISD::SETULE, ISD::SETUNE, ISD::SETGT, + ISD::SETGE, ISD::SETNE}; ISD::NodeType FPOpToExtend[] = { ISD::FSIN, ISD::FCOS, ISD::FSINCOS, ISD::FPOW, ISD::FREM}; Index: lib/Target/RISCV/RISCVInstrInfoD.td =================================================================== --- lib/Target/RISCV/RISCVInstrInfoD.td +++ lib/Target/RISCV/RISCVInstrInfoD.td @@ -270,6 +270,10 @@ // handled by a RISC-V instruction and aren't expanded in the SelectionDAG // Legalizer. +def : Pat<(seto FPR64:$rs1, FPR64:$rs2), + (AND (FEQ_D FPR64:$rs1, FPR64:$rs1), + (FEQ_D FPR64:$rs2, FPR64:$rs2))>; + def : Pat<(setuo FPR64:$rs1, FPR64:$rs2), (SLTIU (AND (FEQ_D FPR64:$rs1, FPR64:$rs1), (FEQ_D FPR64:$rs2, FPR64:$rs2)), Index: lib/Target/RISCV/RISCVInstrInfoF.td =================================================================== --- lib/Target/RISCV/RISCVInstrInfoF.td +++ lib/Target/RISCV/RISCVInstrInfoF.td @@ -324,6 +324,10 @@ // handled by a RISC-V instruction and aren't expanded in the SelectionDAG // Legalizer. +def : Pat<(seto FPR32:$rs1, FPR32:$rs2), + (AND (FEQ_S FPR32:$rs1, FPR32:$rs1), + (FEQ_S FPR32:$rs2, FPR32:$rs2))>; + def : Pat<(setuo FPR32:$rs1, FPR32:$rs2), (SLTIU (AND (FEQ_S FPR32:$rs1, FPR32:$rs1), (FEQ_S FPR32:$rs2, FPR32:$rs2)), Index: test/CodeGen/RISCV/double-br-fcmp.ll =================================================================== --- test/CodeGen/RISCV/double-br-fcmp.ll +++ test/CodeGen/RISCV/double-br-fcmp.ll @@ -322,8 +322,6 @@ ; RV32IFD-NEXT: and a0, a1, a0 ; RV32IFD-NEXT: feq.d a1, ft0, ft1 ; RV32IFD-NEXT: not a1, a1 -; RV32IFD-NEXT: seqz a0, a0 -; RV32IFD-NEXT: xori a0, a0, 1 ; RV32IFD-NEXT: and a0, a1, a0 ; RV32IFD-NEXT: bnez a0, .LBB7_2 ; RV32IFD-NEXT: # %bb.1: # %if.else @@ -344,8 +342,6 @@ ; RV64IFD-NEXT: and a0, a1, a0 ; RV64IFD-NEXT: feq.d a1, ft0, ft1 ; RV64IFD-NEXT: not a1, a1 -; RV64IFD-NEXT: seqz a0, a0 -; RV64IFD-NEXT: xori a0, a0, 1 ; RV64IFD-NEXT: and a0, a1, a0 ; RV64IFD-NEXT: bnez a0, .LBB7_2 ; RV64IFD-NEXT: # %bb.1: # %if.else @@ -377,8 +373,6 @@ ; RV32IFD-NEXT: feq.d a0, ft1, ft1 ; RV32IFD-NEXT: feq.d a1, ft0, ft0 ; RV32IFD-NEXT: and a0, a1, a0 -; RV32IFD-NEXT: seqz a0, a0 -; RV32IFD-NEXT: xori a0, a0, 1 ; RV32IFD-NEXT: bnez a0, .LBB8_2 ; RV32IFD-NEXT: # %bb.1: # %if.else ; RV32IFD-NEXT: lw ra, 12(sp) @@ -396,8 +390,6 @@ ; RV64IFD-NEXT: fmv.d.x ft0, a0 ; RV64IFD-NEXT: feq.d a0, ft0, ft0 ; RV64IFD-NEXT: and a0, a0, a1 -; RV64IFD-NEXT: seqz a0, a0 -; RV64IFD-NEXT: xori a0, a0, 1 ; RV64IFD-NEXT: bnez a0, .LBB8_2 ; RV64IFD-NEXT: # %bb.1: # %if.else ; RV64IFD-NEXT: ld ra, 8(sp) Index: test/CodeGen/RISCV/double-fcmp.ll =================================================================== --- test/CodeGen/RISCV/double-fcmp.ll +++ test/CodeGen/RISCV/double-fcmp.ll @@ -159,8 +159,6 @@ ; RV32IFD-NEXT: and a0, a1, a0 ; RV32IFD-NEXT: feq.d a1, ft0, ft1 ; RV32IFD-NEXT: not a1, a1 -; RV32IFD-NEXT: seqz a0, a0 -; RV32IFD-NEXT: xori a0, a0, 1 ; RV32IFD-NEXT: and a0, a1, a0 ; RV32IFD-NEXT: addi sp, sp, 16 ; RV32IFD-NEXT: ret @@ -174,8 +172,6 @@ ; RV64IFD-NEXT: and a0, a1, a0 ; RV64IFD-NEXT: feq.d a1, ft0, ft1 ; RV64IFD-NEXT: not a1, a1 -; RV64IFD-NEXT: seqz a0, a0 -; RV64IFD-NEXT: xori a0, a0, 1 ; RV64IFD-NEXT: and a0, a1, a0 ; RV64IFD-NEXT: ret %1 = fcmp one double %a, %b @@ -196,8 +192,6 @@ ; RV32IFD-NEXT: feq.d a0, ft1, ft1 ; RV32IFD-NEXT: feq.d a1, ft0, ft0 ; RV32IFD-NEXT: and a0, a1, a0 -; RV32IFD-NEXT: seqz a0, a0 -; RV32IFD-NEXT: xori a0, a0, 1 ; RV32IFD-NEXT: addi sp, sp, 16 ; RV32IFD-NEXT: ret ; @@ -208,8 +202,6 @@ ; RV64IFD-NEXT: fmv.d.x ft0, a0 ; RV64IFD-NEXT: feq.d a0, ft0, ft0 ; RV64IFD-NEXT: and a0, a0, a1 -; RV64IFD-NEXT: seqz a0, a0 -; RV64IFD-NEXT: xori a0, a0, 1 ; RV64IFD-NEXT: ret %1 = fcmp ord double %a, %b %2 = zext i1 %1 to i32 Index: test/CodeGen/RISCV/double-select-fcmp.ll =================================================================== --- test/CodeGen/RISCV/double-select-fcmp.ll +++ test/CodeGen/RISCV/double-select-fcmp.ll @@ -221,8 +221,6 @@ ; RV32IFD-NEXT: and a0, a1, a0 ; RV32IFD-NEXT: feq.d a1, ft0, ft1 ; RV32IFD-NEXT: not a1, a1 -; RV32IFD-NEXT: seqz a0, a0 -; RV32IFD-NEXT: xori a0, a0, 1 ; RV32IFD-NEXT: and a0, a1, a0 ; RV32IFD-NEXT: bnez a0, .LBB6_2 ; RV32IFD-NEXT: # %bb.1: @@ -243,8 +241,6 @@ ; RV64IFD-NEXT: and a0, a1, a0 ; RV64IFD-NEXT: feq.d a1, ft0, ft1 ; RV64IFD-NEXT: not a1, a1 -; RV64IFD-NEXT: seqz a0, a0 -; RV64IFD-NEXT: xori a0, a0, 1 ; RV64IFD-NEXT: and a0, a1, a0 ; RV64IFD-NEXT: bnez a0, .LBB6_2 ; RV64IFD-NEXT: # %bb.1: @@ -270,8 +266,6 @@ ; RV32IFD-NEXT: feq.d a0, ft1, ft1 ; RV32IFD-NEXT: feq.d a1, ft0, ft0 ; RV32IFD-NEXT: and a0, a1, a0 -; RV32IFD-NEXT: seqz a0, a0 -; RV32IFD-NEXT: xori a0, a0, 1 ; RV32IFD-NEXT: bnez a0, .LBB7_2 ; RV32IFD-NEXT: # %bb.1: ; RV32IFD-NEXT: fmv.d ft0, ft1 @@ -289,8 +283,6 @@ ; RV64IFD-NEXT: feq.d a0, ft1, ft1 ; RV64IFD-NEXT: feq.d a1, ft0, ft0 ; RV64IFD-NEXT: and a0, a1, a0 -; RV64IFD-NEXT: seqz a0, a0 -; RV64IFD-NEXT: xori a0, a0, 1 ; RV64IFD-NEXT: bnez a0, .LBB7_2 ; RV64IFD-NEXT: # %bb.1: ; RV64IFD-NEXT: fmv.d ft0, ft1 Index: test/CodeGen/RISCV/float-br-fcmp.ll =================================================================== --- test/CodeGen/RISCV/float-br-fcmp.ll +++ test/CodeGen/RISCV/float-br-fcmp.ll @@ -295,8 +295,6 @@ ; RV32IF-NEXT: and a0, a1, a0 ; RV32IF-NEXT: feq.s a1, ft0, ft1 ; RV32IF-NEXT: not a1, a1 -; RV32IF-NEXT: seqz a0, a0 -; RV32IF-NEXT: xori a0, a0, 1 ; RV32IF-NEXT: and a0, a1, a0 ; RV32IF-NEXT: bnez a0, .LBB7_2 ; RV32IF-NEXT: # %bb.1: # %if.else @@ -317,8 +315,6 @@ ; RV64IF-NEXT: and a0, a1, a0 ; RV64IF-NEXT: feq.s a1, ft0, ft1 ; RV64IF-NEXT: not a1, a1 -; RV64IF-NEXT: seqz a0, a0 -; RV64IF-NEXT: xori a0, a0, 1 ; RV64IF-NEXT: and a0, a1, a0 ; RV64IF-NEXT: bnez a0, .LBB7_2 ; RV64IF-NEXT: # %bb.1: # %if.else @@ -346,8 +342,6 @@ ; RV32IF-NEXT: fmv.w.x ft0, a0 ; RV32IF-NEXT: feq.s a0, ft0, ft0 ; RV32IF-NEXT: and a0, a0, a1 -; RV32IF-NEXT: seqz a0, a0 -; RV32IF-NEXT: xori a0, a0, 1 ; RV32IF-NEXT: bnez a0, .LBB8_2 ; RV32IF-NEXT: # %bb.1: # %if.else ; RV32IF-NEXT: lw ra, 12(sp) @@ -365,8 +359,6 @@ ; RV64IF-NEXT: fmv.w.x ft0, a0 ; RV64IF-NEXT: feq.s a0, ft0, ft0 ; RV64IF-NEXT: and a0, a0, a1 -; RV64IF-NEXT: seqz a0, a0 -; RV64IF-NEXT: xori a0, a0, 1 ; RV64IF-NEXT: bnez a0, .LBB8_2 ; RV64IF-NEXT: # %bb.1: # %if.else ; RV64IF-NEXT: ld ra, 8(sp) Index: test/CodeGen/RISCV/float-fcmp.ll =================================================================== --- test/CodeGen/RISCV/float-fcmp.ll +++ test/CodeGen/RISCV/float-fcmp.ll @@ -124,8 +124,6 @@ ; RV32IF-NEXT: and a0, a1, a0 ; RV32IF-NEXT: feq.s a1, ft0, ft1 ; RV32IF-NEXT: not a1, a1 -; RV32IF-NEXT: seqz a0, a0 -; RV32IF-NEXT: xori a0, a0, 1 ; RV32IF-NEXT: and a0, a1, a0 ; RV32IF-NEXT: ret ; @@ -138,8 +136,6 @@ ; RV64IF-NEXT: and a0, a1, a0 ; RV64IF-NEXT: feq.s a1, ft0, ft1 ; RV64IF-NEXT: not a1, a1 -; RV64IF-NEXT: seqz a0, a0 -; RV64IF-NEXT: xori a0, a0, 1 ; RV64IF-NEXT: and a0, a1, a0 ; RV64IF-NEXT: ret %1 = fcmp one float %a, %b @@ -155,8 +151,6 @@ ; RV32IF-NEXT: fmv.w.x ft0, a0 ; RV32IF-NEXT: feq.s a0, ft0, ft0 ; RV32IF-NEXT: and a0, a0, a1 -; RV32IF-NEXT: seqz a0, a0 -; RV32IF-NEXT: xori a0, a0, 1 ; RV32IF-NEXT: ret ; ; RV64IF-LABEL: fcmp_ord: @@ -166,8 +160,6 @@ ; RV64IF-NEXT: fmv.w.x ft0, a0 ; RV64IF-NEXT: feq.s a0, ft0, ft0 ; RV64IF-NEXT: and a0, a0, a1 -; RV64IF-NEXT: seqz a0, a0 -; RV64IF-NEXT: xori a0, a0, 1 ; RV64IF-NEXT: ret %1 = fcmp ord float %a, %b %2 = zext i1 %1 to i32 Index: test/CodeGen/RISCV/float-select-fcmp.ll =================================================================== --- test/CodeGen/RISCV/float-select-fcmp.ll +++ test/CodeGen/RISCV/float-select-fcmp.ll @@ -175,8 +175,6 @@ ; RV32IF-NEXT: and a0, a1, a0 ; RV32IF-NEXT: feq.s a1, ft0, ft1 ; RV32IF-NEXT: not a1, a1 -; RV32IF-NEXT: seqz a0, a0 -; RV32IF-NEXT: xori a0, a0, 1 ; RV32IF-NEXT: and a0, a1, a0 ; RV32IF-NEXT: bnez a0, .LBB6_2 ; RV32IF-NEXT: # %bb.1: @@ -194,8 +192,6 @@ ; RV64IF-NEXT: and a0, a1, a0 ; RV64IF-NEXT: feq.s a1, ft0, ft1 ; RV64IF-NEXT: not a1, a1 -; RV64IF-NEXT: seqz a0, a0 -; RV64IF-NEXT: xori a0, a0, 1 ; RV64IF-NEXT: and a0, a1, a0 ; RV64IF-NEXT: bnez a0, .LBB6_2 ; RV64IF-NEXT: # %bb.1: @@ -216,8 +212,6 @@ ; RV32IF-NEXT: feq.s a0, ft1, ft1 ; RV32IF-NEXT: feq.s a1, ft0, ft0 ; RV32IF-NEXT: and a0, a1, a0 -; RV32IF-NEXT: seqz a0, a0 -; RV32IF-NEXT: xori a0, a0, 1 ; RV32IF-NEXT: bnez a0, .LBB7_2 ; RV32IF-NEXT: # %bb.1: ; RV32IF-NEXT: fmv.s ft0, ft1 @@ -232,8 +226,6 @@ ; RV64IF-NEXT: feq.s a0, ft1, ft1 ; RV64IF-NEXT: feq.s a1, ft0, ft0 ; RV64IF-NEXT: and a0, a1, a0 -; RV64IF-NEXT: seqz a0, a0 -; RV64IF-NEXT: xori a0, a0, 1 ; RV64IF-NEXT: bnez a0, .LBB7_2 ; RV64IF-NEXT: # %bb.1: ; RV64IF-NEXT: fmv.s ft0, ft1