diff --git a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp --- a/llvm/lib/Target/RISCV/RISCVISelLowering.cpp +++ b/llvm/lib/Target/RISCV/RISCVISelLowering.cpp @@ -111,8 +111,11 @@ setOperationAction(ISD::VACOPY, MVT::Other, Expand); setOperationAction(ISD::VAEND, MVT::Other, Expand); - for (auto VT : {MVT::i1, MVT::i8, MVT::i16}) - setOperationAction(ISD::SIGN_EXTEND_INREG, VT, Expand); + setOperationAction(ISD::SIGN_EXTEND_INREG, MVT::i1, Expand); + if (!Subtarget.hasStdExtZbb()) { + setOperationAction(ISD::SIGN_EXTEND_INREG, MVT::i8, Expand); + setOperationAction(ISD::SIGN_EXTEND_INREG, MVT::i16, Expand); + } if (Subtarget.is64Bit()) { setOperationAction(ISD::ADD, MVT::i32, Custom); diff --git a/llvm/lib/Target/RISCV/RISCVInstrInfoB.td b/llvm/lib/Target/RISCV/RISCVInstrInfoB.td --- a/llvm/lib/Target/RISCV/RISCVInstrInfoB.td +++ b/llvm/lib/Target/RISCV/RISCVInstrInfoB.td @@ -845,15 +845,10 @@ def : Pat<(ctpop GPR:$rs1), (PCNT GPR:$rs1)>; } // Predicates = [HasStdExtZbb] -let Predicates = [HasStdExtZbb, IsRV32] in -def : Pat<(sra (shl GPR:$rs1, (i32 24)), (i32 24)), (SEXTB GPR:$rs1)>; -let Predicates = [HasStdExtZbb, IsRV64] in -def : Pat<(sra (shl GPR:$rs1, (i64 56)), (i64 56)), (SEXTB GPR:$rs1)>; - -let Predicates = [HasStdExtZbb, IsRV32] in -def : Pat<(sra (shl GPR:$rs1, (i32 16)), (i32 16)), (SEXTH GPR:$rs1)>; -let Predicates = [HasStdExtZbb, IsRV64] in -def : Pat<(sra (shl GPR:$rs1, (i64 48)), (i64 48)), (SEXTH GPR:$rs1)>; +let Predicates = [HasStdExtZbb] in { +def : Pat<(sext_inreg GPR:$rs1, i8), (SEXTB GPR:$rs1)>; +def : Pat<(sext_inreg GPR:$rs1, i16), (SEXTH GPR:$rs1)>; +} let Predicates = [HasStdExtZbb] in { def : Pat<(smin GPR:$rs1, GPR:$rs2), (MIN GPR:$rs1, GPR:$rs2)>; diff --git a/llvm/test/CodeGen/RISCV/rv32Zbb.ll b/llvm/test/CodeGen/RISCV/rv32Zbb.ll --- a/llvm/test/CodeGen/RISCV/rv32Zbb.ll +++ b/llvm/test/CodeGen/RISCV/rv32Zbb.ll @@ -824,18 +824,14 @@ ; ; RV32IB-LABEL: sextb_i64: ; RV32IB: # %bb.0: -; RV32IB-NEXT: sext.b a2, a0 -; RV32IB-NEXT: slli a0, a0, 24 +; RV32IB-NEXT: sext.b a0, a0 ; RV32IB-NEXT: srai a1, a0, 31 -; RV32IB-NEXT: mv a0, a2 ; RV32IB-NEXT: ret ; ; RV32IBB-LABEL: sextb_i64: ; RV32IBB: # %bb.0: -; RV32IBB-NEXT: sext.b a2, a0 -; RV32IBB-NEXT: slli a0, a0, 24 +; RV32IBB-NEXT: sext.b a0, a0 ; RV32IBB-NEXT: srai a1, a0, 31 -; RV32IBB-NEXT: mv a0, a2 ; RV32IBB-NEXT: ret %shl = shl i64 %a, 56 %shr = ashr exact i64 %shl, 56 @@ -873,18 +869,14 @@ ; ; RV32IB-LABEL: sexth_i64: ; RV32IB: # %bb.0: -; RV32IB-NEXT: sext.h a2, a0 -; RV32IB-NEXT: slli a0, a0, 16 +; RV32IB-NEXT: sext.h a0, a0 ; RV32IB-NEXT: srai a1, a0, 31 -; RV32IB-NEXT: mv a0, a2 ; RV32IB-NEXT: ret ; ; RV32IBB-LABEL: sexth_i64: ; RV32IBB: # %bb.0: -; RV32IBB-NEXT: sext.h a2, a0 -; RV32IBB-NEXT: slli a0, a0, 16 +; RV32IBB-NEXT: sext.h a0, a0 ; RV32IBB-NEXT: srai a1, a0, 31 -; RV32IBB-NEXT: mv a0, a2 ; RV32IBB-NEXT: ret %shl = shl i64 %a, 48 %shr = ashr exact i64 %shl, 48