diff --git a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp --- a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp +++ b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp @@ -7945,6 +7945,20 @@ } static SDValue performXORCombine(SDNode *N, SelectionDAG &DAG) { + SDValue N0 = N->getOperand(0); + SDValue N1 = N->getOperand(1); + + // fold (xor (sllw 1, x), -1) -> (rolw ~1, x) + // NOTE: Assumes ROL being legal means ROLW is legal. + const TargetLowering &TLI = DAG.getTargetLoweringInfo(); + if (N0.getOpcode() == RISCVISD::SLLW && + isAllOnesConstant(N1) && isOneConstant(N0.getOperand(0)) && + TLI.isOperationLegal(ISD::ROTL, MVT::i64)) { + SDLoc DL(N); + return DAG.getNode(RISCVISD::ROLW, DL, MVT::i64, + DAG.getConstant(~1, DL, MVT::i64), N0.getOperand(1)); + } + // fold (xor (select cond, 0, y), x) -> // (select cond, x, (xor x, y)) return combineSelectAndUseCommutative(N, DAG, /*AllOnes*/ false); diff --git a/llvm/test/CodeGen/RISCV/rv32zbb-zbp-zbkb.ll b/llvm/test/CodeGen/RISCV/rv32zbb-zbp-zbkb.ll --- a/llvm/test/CodeGen/RISCV/rv32zbb-zbp-zbkb.ll +++ b/llvm/test/CodeGen/RISCV/rv32zbb-zbp-zbkb.ll @@ -349,6 +349,59 @@ ret i64 %1 } +define i32 @not_shl_one_i32(i32 %x) { +; RV32I-LABEL: not_shl_one_i32: +; RV32I: # %bb.0: +; RV32I-NEXT: li a1, 1 +; RV32I-NEXT: sll a0, a1, a0 +; RV32I-NEXT: not a0, a0 +; RV32I-NEXT: ret +; +; RV32ZBB-ZBP-ZBKB-LABEL: not_shl_one_i32: +; RV32ZBB-ZBP-ZBKB: # %bb.0: +; RV32ZBB-ZBP-ZBKB-NEXT: li a1, -2 +; RV32ZBB-ZBP-ZBKB-NEXT: rol a0, a1, a0 +; RV32ZBB-ZBP-ZBKB-NEXT: ret + %1 = shl i32 1, %x + %2 = xor i32 %1, -1 + ret i32 %2 +} + +define i64 @not_shl_one_i64(i64 %x) { +; RV32I-LABEL: not_shl_one_i64: +; RV32I: # %bb.0: +; RV32I-NEXT: addi a3, a0, -32 +; RV32I-NEXT: li a2, 1 +; RV32I-NEXT: li a1, -1 +; RV32I-NEXT: bltz a3, .LBB15_2 +; RV32I-NEXT: # %bb.1: +; RV32I-NEXT: sll a0, a2, a3 +; RV32I-NEXT: not a1, a0 +; RV32I-NEXT: li a0, -1 +; RV32I-NEXT: ret +; RV32I-NEXT: .LBB15_2: +; RV32I-NEXT: sll a0, a2, a0 +; RV32I-NEXT: not a0, a0 +; RV32I-NEXT: ret +; +; RV32ZBB-ZBP-ZBKB-LABEL: not_shl_one_i64: +; RV32ZBB-ZBP-ZBKB: # %bb.0: +; RV32ZBB-ZBP-ZBKB-NEXT: addi a3, a0, -32 +; RV32ZBB-ZBP-ZBKB-NEXT: li a2, -2 +; RV32ZBB-ZBP-ZBKB-NEXT: li a1, -1 +; RV32ZBB-ZBP-ZBKB-NEXT: bltz a3, .LBB15_2 +; RV32ZBB-ZBP-ZBKB-NEXT: # %bb.1: +; RV32ZBB-ZBP-ZBKB-NEXT: rol a1, a2, a3 +; RV32ZBB-ZBP-ZBKB-NEXT: li a0, -1 +; RV32ZBB-ZBP-ZBKB-NEXT: ret +; RV32ZBB-ZBP-ZBKB-NEXT: .LBB15_2: +; RV32ZBB-ZBP-ZBKB-NEXT: rol a0, a2, a0 +; RV32ZBB-ZBP-ZBKB-NEXT: ret + %1 = shl i64 1, %x + %2 = xor i64 %1, -1 + ret i64 %2 +} + define i8 @srli_i8(i8 %a) nounwind { ; RV32I-LABEL: srli_i8: ; RV32I: # %bb.0: diff --git a/llvm/test/CodeGen/RISCV/rv64zbb-zbp-zbkb.ll b/llvm/test/CodeGen/RISCV/rv64zbb-zbp-zbkb.ll --- a/llvm/test/CodeGen/RISCV/rv64zbb-zbp-zbkb.ll +++ b/llvm/test/CodeGen/RISCV/rv64zbb-zbp-zbkb.ll @@ -420,6 +420,42 @@ ret i64 %1 } +define signext i32 @not_shl_one_i32(i32 signext %x) { +; RV64I-LABEL: not_shl_one_i32: +; RV64I: # %bb.0: +; RV64I-NEXT: li a1, 1 +; RV64I-NEXT: sllw a0, a1, a0 +; RV64I-NEXT: not a0, a0 +; RV64I-NEXT: ret +; +; RV64ZBB-ZBP-ZBKB-LABEL: not_shl_one_i32: +; RV64ZBB-ZBP-ZBKB: # %bb.0: +; RV64ZBB-ZBP-ZBKB-NEXT: li a1, -2 +; RV64ZBB-ZBP-ZBKB-NEXT: rolw a0, a1, a0 +; RV64ZBB-ZBP-ZBKB-NEXT: ret + %1 = shl i32 1, %x + %2 = xor i32 %1, -1 + ret i32 %2 +} + +define i64 @not_shl_one_i64(i64 %x) { +; RV64I-LABEL: not_shl_one_i64: +; RV64I: # %bb.0: +; RV64I-NEXT: li a1, 1 +; RV64I-NEXT: sll a0, a1, a0 +; RV64I-NEXT: not a0, a0 +; RV64I-NEXT: ret +; +; RV64ZBB-ZBP-ZBKB-LABEL: not_shl_one_i64: +; RV64ZBB-ZBP-ZBKB: # %bb.0: +; RV64ZBB-ZBP-ZBKB-NEXT: li a1, -2 +; RV64ZBB-ZBP-ZBKB-NEXT: rol a0, a1, a0 +; RV64ZBB-ZBP-ZBKB-NEXT: ret + %1 = shl i64 1, %x + %2 = xor i64 %1, -1 + ret i64 %2 +} + define i8 @srli_i8(i8 %a) nounwind { ; RV64I-LABEL: srli_i8: ; RV64I: # %bb.0: diff --git a/llvm/test/CodeGen/RISCV/sextw-removal.ll b/llvm/test/CodeGen/RISCV/sextw-removal.ll --- a/llvm/test/CodeGen/RISCV/sextw-removal.ll +++ b/llvm/test/CodeGen/RISCV/sextw-removal.ll @@ -81,17 +81,17 @@ ; RV64ZBB-LABEL: test2: ; RV64ZBB: # %bb.0: ; RV64ZBB-NEXT: lw a0, 0(a0) -; RV64ZBB-NEXT: li a2, 1 -; RV64ZBB-NEXT: sllw a1, a2, a1 -; RV64ZBB-NEXT: andn a0, a0, a1 +; RV64ZBB-NEXT: li a2, -2 +; RV64ZBB-NEXT: rolw a1, a2, a1 +; RV64ZBB-NEXT: and a0, a1, a0 ; RV64ZBB-NEXT: ret ; ; NOREMOVAL-LABEL: test2: ; NOREMOVAL: # %bb.0: ; NOREMOVAL-NEXT: lw a0, 0(a0) -; NOREMOVAL-NEXT: li a2, 1 -; NOREMOVAL-NEXT: sllw a1, a2, a1 -; NOREMOVAL-NEXT: andn a0, a0, a1 +; NOREMOVAL-NEXT: li a2, -2 +; NOREMOVAL-NEXT: rolw a1, a2, a1 +; NOREMOVAL-NEXT: and a0, a1, a0 ; NOREMOVAL-NEXT: sext.w a0, a0 ; NOREMOVAL-NEXT: ret %a = load i32, i32* %p @@ -114,17 +114,17 @@ ; RV64ZBB-LABEL: test3: ; RV64ZBB: # %bb.0: ; RV64ZBB-NEXT: lw a0, 0(a0) -; RV64ZBB-NEXT: li a2, 1 -; RV64ZBB-NEXT: sllw a1, a2, a1 -; RV64ZBB-NEXT: orn a0, a0, a1 +; RV64ZBB-NEXT: li a2, -2 +; RV64ZBB-NEXT: rolw a1, a2, a1 +; RV64ZBB-NEXT: or a0, a1, a0 ; RV64ZBB-NEXT: ret ; ; NOREMOVAL-LABEL: test3: ; NOREMOVAL: # %bb.0: ; NOREMOVAL-NEXT: lw a0, 0(a0) -; NOREMOVAL-NEXT: li a2, 1 -; NOREMOVAL-NEXT: sllw a1, a2, a1 -; NOREMOVAL-NEXT: orn a0, a0, a1 +; NOREMOVAL-NEXT: li a2, -2 +; NOREMOVAL-NEXT: rolw a1, a2, a1 +; NOREMOVAL-NEXT: or a0, a1, a0 ; NOREMOVAL-NEXT: sext.w a0, a0 ; NOREMOVAL-NEXT: ret %a = load i32, i32* %p