diff --git a/llvm/lib/Target/RISCV/RISCVInstrInfoB.td b/llvm/lib/Target/RISCV/RISCVInstrInfoB.td --- a/llvm/lib/Target/RISCV/RISCVInstrInfoB.td +++ b/llvm/lib/Target/RISCV/RISCVInstrInfoB.td @@ -85,26 +85,26 @@ }]>; // Checks if this mask has a single 0 bit and cannot be used with ANDI. -def SBCLRMask : ImmLeafis64Bit()) return !isInt<12>(Imm) && isPowerOf2_64(~Imm); return !isInt<12>(Imm) && isPowerOf2_32(~Imm); }]>; // Checks if this mask has a single 1 bit and cannot be used with ORI/XORI. -def SBSETINVMask : ImmLeafis64Bit()) return !isInt<12>(Imm) && isPowerOf2_64(Imm); return !isInt<12>(Imm) && isPowerOf2_32(Imm); }]>; -def SBCLRXForm : SDNodeXFormgetTargetConstant(N->getAPIntValue().countTrailingOnes(), SDLoc(N), N->getValueType(0)); }]>; -def SBSETINVXForm : SDNodeXFormgetTargetConstant(N->getAPIntValue().countTrailingZeros(), SDLoc(N), N->getValueType(0)); @@ -112,13 +112,13 @@ // Similar to above, but makes sure the immediate has 33 sign bits. When used // with an AND/OR/XOR where the other operand has at least 33 sign bits, the -// result will have 33 sign bits. This can match SBCLRIW/SBSETIW/SBINVIW. -def SBCLRWMask : ImmLeaf(Imm) && !isInt<12>(Imm) && isPowerOf2_32(~Imm); }]>; -def SBSETINVWMask : ImmLeaf(Imm) && !isInt<12>(Imm) && isPowerOf2_32(Imm); }]>; @@ -241,10 +241,10 @@ } // Predicates = [HasStdExtZbbOrZbp] let Predicates = [HasStdExtZbs] in { -def SBCLR : ALU_rr<0b0100100, 0b001, "sbclr">, Sched<[]>; -def SBSET : ALU_rr<0b0010100, 0b001, "sbset">, Sched<[]>; -def SBINV : ALU_rr<0b0110100, 0b001, "sbinv">, Sched<[]>; -def SBEXT : ALU_rr<0b0100100, 0b101, "sbext">, Sched<[]>; +def BCLR : ALU_rr<0b0100100, 0b001, "bclr">, Sched<[]>; +def BSET : ALU_rr<0b0010100, 0b001, "bset">, Sched<[]>; +def BINV : ALU_rr<0b0110100, 0b001, "binv">, Sched<[]>; +def BEXT : ALU_rr<0b0100100, 0b101, "bext">, Sched<[]>; } // Predicates = [HasStdExtZbs] let Predicates = [HasStdExtZbp] in { @@ -261,10 +261,10 @@ def RORI : RVBShift_ri<0b01100, 0b101, OPC_OP_IMM, "rori">, Sched<[]>; let Predicates = [HasStdExtZbs] in { -def SBCLRI : RVBShift_ri<0b01001, 0b001, OPC_OP_IMM, "sbclri">, Sched<[]>; -def SBSETI : RVBShift_ri<0b00101, 0b001, OPC_OP_IMM, "sbseti">, Sched<[]>; -def SBINVI : RVBShift_ri<0b01101, 0b001, OPC_OP_IMM, "sbinvi">, Sched<[]>; -def SBEXTI : RVBShift_ri<0b01001, 0b101, OPC_OP_IMM, "sbexti">, Sched<[]>; +def BCLRI : RVBShift_ri<0b01001, 0b001, OPC_OP_IMM, "bclri">, Sched<[]>; +def BSETI : RVBShift_ri<0b00101, 0b001, OPC_OP_IMM, "bseti">, Sched<[]>; +def BINVI : RVBShift_ri<0b01101, 0b001, OPC_OP_IMM, "binvi">, Sched<[]>; +def BEXTI : RVBShift_ri<0b01001, 0b101, OPC_OP_IMM, "bexti">, Sched<[]>; } // Predicates = [HasStdExtZbs] let Predicates = [HasStdExtZbp] in { @@ -350,8 +350,8 @@ } // Predicates = [HasStdExtZbp] let Predicates = [HasStdExtZbe] in { -def BDEP : ALU_rr<0b0100100, 0b110, "bdep">, Sched<[]>; -def BEXT : ALU_rr<0b0000100, 0b110, "bext">, Sched<[]>; +def BDECOMPRESS : ALU_rr<0b0100100, 0b110, "bdecompress">, Sched<[]>; +def BCOMPRESS : ALU_rr<0b0000100, 0b110, "bcompress">, Sched<[]>; } // Predicates = [HasStdExtZbe] let Predicates = [HasStdExtZbbOrZbp] in { @@ -394,10 +394,10 @@ } // Predicates = [HasStdExtZbbOrZbp, IsRV64] let Predicates = [HasStdExtZbs, IsRV64] in { -def SBCLRW : ALUW_rr<0b0100100, 0b001, "sbclrw">, Sched<[]>; -def SBSETW : ALUW_rr<0b0010100, 0b001, "sbsetw">, Sched<[]>; -def SBINVW : ALUW_rr<0b0110100, 0b001, "sbinvw">, Sched<[]>; -def SBEXTW : ALUW_rr<0b0100100, 0b101, "sbextw">, Sched<[]>; +def BCLRW : ALUW_rr<0b0100100, 0b001, "bclrw">, Sched<[]>; +def BSETW : ALUW_rr<0b0010100, 0b001, "bsetw">, Sched<[]>; +def BINVW : ALUW_rr<0b0110100, 0b001, "binvw">, Sched<[]>; +def BEXTW : ALUW_rr<0b0100100, 0b101, "bextw">, Sched<[]>; } // Predicates = [HasStdExtZbs, IsRV64] let Predicates = [HasStdExtZbp, IsRV64] in { @@ -414,11 +414,11 @@ def RORIW : RVBShiftW_ri<0b0110000, 0b101, OPC_OP_IMM_32, "roriw">, Sched<[]>; let Predicates = [HasStdExtZbs, IsRV64] in { -def SBCLRIW : RVBShiftW_ri<0b0100100, 0b001, OPC_OP_IMM_32, "sbclriw">, +def BCLRIW : RVBShiftW_ri<0b0100100, 0b001, OPC_OP_IMM_32, "bclriw">, Sched<[]>; -def SBSETIW : RVBShiftW_ri<0b0010100, 0b001, OPC_OP_IMM_32, "sbsetiw">, +def BSETIW : RVBShiftW_ri<0b0010100, 0b001, OPC_OP_IMM_32, "bsetiw">, Sched<[]>; -def SBINVIW : RVBShiftW_ri<0b0110100, 0b001, OPC_OP_IMM_32, "sbinviw">, +def BINVIW : RVBShiftW_ri<0b0110100, 0b001, OPC_OP_IMM_32, "binviw">, Sched<[]>; } // Predicates = [HasStdExtZbs, IsRV64] @@ -451,8 +451,8 @@ } // Predicates = [HasStdExtZbp, IsRV64] let Predicates = [HasStdExtZbe, IsRV64] in { -def BDEPW : ALUW_rr<0b0100100, 0b110, "bdepw">, Sched<[]>; -def BEXTW : ALUW_rr<0b0000100, 0b110, "bextw">, Sched<[]>; +def BDECOMPRESSW : ALUW_rr<0b0100100, 0b110, "bdecompressw">, Sched<[]>; +def BCOMPRESSW : ALUW_rr<0b0000100, 0b110, "bcompressw">, Sched<[]>; } // Predicates = [HasStdExtZbe, IsRV64] let Predicates = [HasStdExtZbbOrZbp, IsRV64] in { @@ -651,24 +651,24 @@ let Predicates = [HasStdExtZbs] in { def : Pat<(and (not (shiftop 1, GPR:$rs2)), GPR:$rs1), - (SBCLR GPR:$rs1, GPR:$rs2)>; -def : Pat<(and (rotl -2, GPR:$rs2), GPR:$rs1), (SBCLR GPR:$rs1, GPR:$rs2)>; + (BCLR GPR:$rs1, GPR:$rs2)>; +def : Pat<(and (rotl -2, GPR:$rs2), GPR:$rs1), (BCLR GPR:$rs1, GPR:$rs2)>; def : Pat<(or (shiftop 1, GPR:$rs2), GPR:$rs1), - (SBSET GPR:$rs1, GPR:$rs2)>; + (BSET GPR:$rs1, GPR:$rs2)>; def : Pat<(xor (shiftop 1, GPR:$rs2), GPR:$rs1), - (SBINV GPR:$rs1, GPR:$rs2)>; + (BINV GPR:$rs1, GPR:$rs2)>; def : Pat<(and (shiftop GPR:$rs1, GPR:$rs2), 1), - (SBEXT GPR:$rs1, GPR:$rs2)>; + (BEXT GPR:$rs1, GPR:$rs2)>; def : Pat<(shiftop 1, GPR:$rs2), - (SBSET X0, GPR:$rs2)>; - -def : Pat<(and GPR:$rs1, SBCLRMask:$mask), - (SBCLRI GPR:$rs1, (SBCLRXForm imm:$mask))>; -def : Pat<(or GPR:$rs1, SBSETINVMask:$mask), - (SBSETI GPR:$rs1, (SBSETINVXForm imm:$mask))>; -def : Pat<(xor GPR:$rs1, SBSETINVMask:$mask), - (SBINVI GPR:$rs1, (SBSETINVXForm imm:$mask))>; + (BSET X0, GPR:$rs2)>; + +def : Pat<(and GPR:$rs1, BCLRMask:$mask), + (BCLRI GPR:$rs1, (BCLRXForm imm:$mask))>; +def : Pat<(or GPR:$rs1, BSETINVMask:$mask), + (BSETI GPR:$rs1, (BSETINVXForm imm:$mask))>; +def : Pat<(xor GPR:$rs1, BSETINVMask:$mask), + (BINVI GPR:$rs1, (BSETINVXForm imm:$mask))>; } let Predicates = [HasStdExtZbp] in { @@ -691,7 +691,7 @@ // pattern-matched by simple andi, ori, and xori. let Predicates = [HasStdExtZbs] in def : Pat<(and (srl GPR:$rs1, uimmlog2xlen:$shamt), (XLenVT 1)), - (SBEXTI GPR:$rs1, uimmlog2xlen:$shamt)>; + (BEXTI GPR:$rs1, uimmlog2xlen:$shamt)>; def riscv_grevi : SDNode<"RISCVISD::GREVI", SDTIntBinOp, []>; def riscv_greviw : SDNode<"RISCVISD::GREVIW", SDTIntBinOp, []>; @@ -876,29 +876,29 @@ let Predicates = [HasStdExtZbs, IsRV64] in { def : Pat<(and (not (riscv_sllw 1, GPR:$rs2)), (assertsexti32 GPR:$rs1)), - (SBCLRW GPR:$rs1, GPR:$rs2)>; + (BCLRW GPR:$rs1, GPR:$rs2)>; def : Pat<(sext_inreg (and (not (riscv_sllw 1, GPR:$rs2)), GPR:$rs1), i32), - (SBCLRW GPR:$rs1, GPR:$rs2)>; + (BCLRW GPR:$rs1, GPR:$rs2)>; def : Pat<(or (riscv_sllw 1, GPR:$rs2), (assertsexti32 GPR:$rs1)), - (SBSETW GPR:$rs1, GPR:$rs2)>; + (BSETW GPR:$rs1, GPR:$rs2)>; def : Pat<(sext_inreg (or (riscv_sllw 1, GPR:$rs2), GPR:$rs1), i32), - (SBSETW GPR:$rs1, GPR:$rs2)>; + (BSETW GPR:$rs1, GPR:$rs2)>; def : Pat<(xor (riscv_sllw 1, GPR:$rs2), (assertsexti32 GPR:$rs1)), - (SBINVW GPR:$rs1, GPR:$rs2)>; + (BINVW GPR:$rs1, GPR:$rs2)>; def : Pat<(sext_inreg (xor (riscv_sllw 1, GPR:$rs2), GPR:$rs1), i32), - (SBINVW GPR:$rs1, GPR:$rs2)>; + (BINVW GPR:$rs1, GPR:$rs2)>; def : Pat<(and (riscv_srlw GPR:$rs1, GPR:$rs2), 1), - (SBEXTW GPR:$rs1, GPR:$rs2)>; + (BEXTW GPR:$rs1, GPR:$rs2)>; def : Pat<(riscv_sllw 1, GPR:$rs2), - (SBSETW X0, GPR:$rs2)>; - -def : Pat<(and (assertsexti32 GPR:$rs1), SBCLRWMask:$mask), - (SBCLRIW GPR:$rs1, (SBCLRXForm imm:$mask))>; -def : Pat<(or (assertsexti32 GPR:$rs1), SBSETINVWMask:$mask), - (SBSETIW GPR:$rs1, (SBSETINVXForm imm:$mask))>; -def : Pat<(xor (assertsexti32 GPR:$rs1), SBSETINVWMask:$mask), - (SBINVIW GPR:$rs1, (SBSETINVXForm imm:$mask))>; + (BSETW X0, GPR:$rs2)>; + +def : Pat<(and (assertsexti32 GPR:$rs1), BCLRWMask:$mask), + (BCLRIW GPR:$rs1, (BCLRXForm imm:$mask))>; +def : Pat<(or (assertsexti32 GPR:$rs1), BSETINVWMask:$mask), + (BSETIW GPR:$rs1, (BSETINVXForm imm:$mask))>; +def : Pat<(xor (assertsexti32 GPR:$rs1), BSETINVWMask:$mask), + (BINVIW GPR:$rs1, (BSETINVXForm imm:$mask))>; } // Predicates = [HasStdExtZbs, IsRV64] diff --git a/llvm/test/CodeGen/RISCV/rv32Zbs.ll b/llvm/test/CodeGen/RISCV/rv32Zbs.ll --- a/llvm/test/CodeGen/RISCV/rv32Zbs.ll +++ b/llvm/test/CodeGen/RISCV/rv32Zbs.ll @@ -17,12 +17,12 @@ ; ; RV32IB-LABEL: sbclr_i32: ; RV32IB: # %bb.0: -; RV32IB-NEXT: sbclr a0, a0, a1 +; RV32IB-NEXT: bclr a0, a0, a1 ; RV32IB-NEXT: ret ; ; RV32IBS-LABEL: sbclr_i32: ; RV32IBS: # %bb.0: -; RV32IBS-NEXT: sbclr a0, a0, a1 +; RV32IBS-NEXT: bclr a0, a0, a1 ; RV32IBS-NEXT: ret %and = and i32 %b, 31 %shl = shl nuw i32 1, %and @@ -42,12 +42,12 @@ ; ; RV32IB-LABEL: sbclr_i32_no_mask: ; RV32IB: # %bb.0: -; RV32IB-NEXT: sbclr a0, a0, a1 +; RV32IB-NEXT: bclr a0, a0, a1 ; RV32IB-NEXT: ret ; ; RV32IBS-LABEL: sbclr_i32_no_mask: ; RV32IBS: # %bb.0: -; RV32IBS-NEXT: sbclr a0, a0, a1 +; RV32IBS-NEXT: bclr a0, a0, a1 ; RV32IBS-NEXT: ret %shl = shl nuw i32 1, %b %neg = xor i32 %shl, -1 @@ -80,10 +80,10 @@ ; RV32IB: # %bb.0: ; RV32IB-NEXT: andi a3, a2, 63 ; RV32IB-NEXT: addi a3, a3, -32 -; RV32IB-NEXT: sbset a4, zero, a3 +; RV32IB-NEXT: bset a4, zero, a3 ; RV32IB-NEXT: slti a5, a3, 0 ; RV32IB-NEXT: cmov a4, a5, zero, a4 -; RV32IB-NEXT: sbset a2, zero, a2 +; RV32IB-NEXT: bset a2, zero, a2 ; RV32IB-NEXT: srai a3, a3, 31 ; RV32IB-NEXT: and a2, a3, a2 ; RV32IB-NEXT: andn a1, a1, a4 @@ -97,11 +97,11 @@ ; RV32IBS-NEXT: bltz a3, .LBB2_2 ; RV32IBS-NEXT: # %bb.1: ; RV32IBS-NEXT: mv a2, zero -; RV32IBS-NEXT: sbset a3, zero, a3 +; RV32IBS-NEXT: bset a3, zero, a3 ; RV32IBS-NEXT: j .LBB2_3 ; RV32IBS-NEXT: .LBB2_2: ; RV32IBS-NEXT: mv a3, zero -; RV32IBS-NEXT: sbset a2, zero, a2 +; RV32IBS-NEXT: bset a2, zero, a2 ; RV32IBS-NEXT: .LBB2_3: ; RV32IBS-NEXT: not a3, a3 ; RV32IBS-NEXT: not a2, a2 @@ -125,12 +125,12 @@ ; ; RV32IB-LABEL: sbset_i32: ; RV32IB: # %bb.0: -; RV32IB-NEXT: sbset a0, a0, a1 +; RV32IB-NEXT: bset a0, a0, a1 ; RV32IB-NEXT: ret ; ; RV32IBS-LABEL: sbset_i32: ; RV32IBS: # %bb.0: -; RV32IBS-NEXT: sbset a0, a0, a1 +; RV32IBS-NEXT: bset a0, a0, a1 ; RV32IBS-NEXT: ret %and = and i32 %b, 31 %shl = shl nuw i32 1, %and @@ -148,12 +148,12 @@ ; ; RV32IB-LABEL: sbset_i32_no_mask: ; RV32IB: # %bb.0: -; RV32IB-NEXT: sbset a0, a0, a1 +; RV32IB-NEXT: bset a0, a0, a1 ; RV32IB-NEXT: ret ; ; RV32IBS-LABEL: sbset_i32_no_mask: ; RV32IBS: # %bb.0: -; RV32IBS-NEXT: sbset a0, a0, a1 +; RV32IBS-NEXT: bset a0, a0, a1 ; RV32IBS-NEXT: ret %shl = shl nuw i32 1, %b %or = or i32 %shl, %a @@ -170,12 +170,12 @@ ; ; RV32IB-LABEL: sbset_i32_zero: ; RV32IB: # %bb.0: -; RV32IB-NEXT: sbset a0, zero, a0 +; RV32IB-NEXT: bset a0, zero, a0 ; RV32IB-NEXT: ret ; ; RV32IBS-LABEL: sbset_i32_zero: ; RV32IBS: # %bb.0: -; RV32IBS-NEXT: sbset a0, zero, a0 +; RV32IBS-NEXT: bset a0, zero, a0 ; RV32IBS-NEXT: ret %shl = shl i32 1, %a ret i32 %shl @@ -198,17 +198,17 @@ ; ; RV32IB-LABEL: sbset_i64: ; RV32IB: # %bb.0: -; RV32IB-NEXT: sbset a3, zero, a2 +; RV32IB-NEXT: bset a3, zero, a2 ; RV32IB-NEXT: srai a3, a3, 31 -; RV32IB-NEXT: sbset a0, a0, a2 +; RV32IB-NEXT: bset a0, a0, a2 ; RV32IB-NEXT: or a1, a3, a1 ; RV32IB-NEXT: ret ; ; RV32IBS-LABEL: sbset_i64: ; RV32IBS: # %bb.0: -; RV32IBS-NEXT: sbset a3, zero, a2 +; RV32IBS-NEXT: bset a3, zero, a2 ; RV32IBS-NEXT: srai a3, a3, 31 -; RV32IBS-NEXT: sbset a0, a0, a2 +; RV32IBS-NEXT: bset a0, a0, a2 ; RV32IBS-NEXT: or a1, a3, a1 ; RV32IBS-NEXT: ret %1 = trunc i64 %b to i32 @@ -237,10 +237,10 @@ ; RV32IB-LABEL: sbset_i64_zero: ; RV32IB: # %bb.0: ; RV32IB-NEXT: addi a2, a0, -32 -; RV32IB-NEXT: sbset a1, zero, a2 +; RV32IB-NEXT: bset a1, zero, a2 ; RV32IB-NEXT: slti a3, a2, 0 ; RV32IB-NEXT: cmov a1, a3, zero, a1 -; RV32IB-NEXT: sbset a0, zero, a0 +; RV32IB-NEXT: bset a0, zero, a0 ; RV32IB-NEXT: srai a2, a2, 31 ; RV32IB-NEXT: and a0, a2, a0 ; RV32IB-NEXT: ret @@ -251,11 +251,11 @@ ; RV32IBS-NEXT: bltz a1, .LBB7_2 ; RV32IBS-NEXT: # %bb.1: ; RV32IBS-NEXT: mv a0, zero -; RV32IBS-NEXT: sbset a1, zero, a1 +; RV32IBS-NEXT: bset a1, zero, a1 ; RV32IBS-NEXT: ret ; RV32IBS-NEXT: .LBB7_2: ; RV32IBS-NEXT: mv a1, zero -; RV32IBS-NEXT: sbset a0, zero, a0 +; RV32IBS-NEXT: bset a0, zero, a0 ; RV32IBS-NEXT: ret %shl = shl i64 1, %a ret i64 %shl @@ -271,12 +271,12 @@ ; ; RV32IB-LABEL: sbinv_i32: ; RV32IB: # %bb.0: -; RV32IB-NEXT: sbinv a0, a0, a1 +; RV32IB-NEXT: binv a0, a0, a1 ; RV32IB-NEXT: ret ; ; RV32IBS-LABEL: sbinv_i32: ; RV32IBS: # %bb.0: -; RV32IBS-NEXT: sbinv a0, a0, a1 +; RV32IBS-NEXT: binv a0, a0, a1 ; RV32IBS-NEXT: ret %and = and i32 %b, 31 %shl = shl nuw i32 1, %and @@ -301,17 +301,17 @@ ; ; RV32IB-LABEL: sbinv_i64: ; RV32IB: # %bb.0: -; RV32IB-NEXT: sbset a3, zero, a2 +; RV32IB-NEXT: bset a3, zero, a2 ; RV32IB-NEXT: srai a3, a3, 31 -; RV32IB-NEXT: sbinv a0, a0, a2 +; RV32IB-NEXT: binv a0, a0, a2 ; RV32IB-NEXT: xor a1, a3, a1 ; RV32IB-NEXT: ret ; ; RV32IBS-LABEL: sbinv_i64: ; RV32IBS: # %bb.0: -; RV32IBS-NEXT: sbset a3, zero, a2 +; RV32IBS-NEXT: bset a3, zero, a2 ; RV32IBS-NEXT: srai a3, a3, 31 -; RV32IBS-NEXT: sbinv a0, a0, a2 +; RV32IBS-NEXT: binv a0, a0, a2 ; RV32IBS-NEXT: xor a1, a3, a1 ; RV32IBS-NEXT: ret %1 = trunc i64 %b to i32 @@ -331,12 +331,12 @@ ; ; RV32IB-LABEL: sbext_i32: ; RV32IB: # %bb.0: -; RV32IB-NEXT: sbext a0, a0, a1 +; RV32IB-NEXT: bext a0, a0, a1 ; RV32IB-NEXT: ret ; ; RV32IBS-LABEL: sbext_i32: ; RV32IBS: # %bb.0: -; RV32IBS-NEXT: sbext a0, a0, a1 +; RV32IBS-NEXT: bext a0, a0, a1 ; RV32IBS-NEXT: ret %and = and i32 %b, 31 %shr = lshr i32 %a, %and @@ -353,12 +353,12 @@ ; ; RV32IB-LABEL: sbext_i32_no_mask: ; RV32IB: # %bb.0: -; RV32IB-NEXT: sbext a0, a0, a1 +; RV32IB-NEXT: bext a0, a0, a1 ; RV32IB-NEXT: ret ; ; RV32IBS-LABEL: sbext_i32_no_mask: ; RV32IBS: # %bb.0: -; RV32IBS-NEXT: sbext a0, a0, a1 +; RV32IBS-NEXT: bext a0, a0, a1 ; RV32IBS-NEXT: ret %shr = lshr i32 %a, %b %and1 = and i32 %shr, 1 @@ -442,12 +442,12 @@ ; ; RV32IB-LABEL: sbexti_i32: ; RV32IB: # %bb.0: -; RV32IB-NEXT: sbexti a0, a0, 5 +; RV32IB-NEXT: bexti a0, a0, 5 ; RV32IB-NEXT: ret ; ; RV32IBS-LABEL: sbexti_i32: ; RV32IBS: # %bb.0: -; RV32IBS-NEXT: sbexti a0, a0, 5 +; RV32IBS-NEXT: bexti a0, a0, 5 ; RV32IBS-NEXT: ret %shr = lshr i32 %a, 5 %and = and i32 %shr, 1 @@ -464,13 +464,13 @@ ; ; RV32IB-LABEL: sbexti_i64: ; RV32IB: # %bb.0: -; RV32IB-NEXT: sbexti a0, a0, 5 +; RV32IB-NEXT: bexti a0, a0, 5 ; RV32IB-NEXT: mv a1, zero ; RV32IB-NEXT: ret ; ; RV32IBS-LABEL: sbexti_i64: ; RV32IBS: # %bb.0: -; RV32IBS-NEXT: sbexti a0, a0, 5 +; RV32IBS-NEXT: bexti a0, a0, 5 ; RV32IBS-NEXT: mv a1, zero ; RV32IBS-NEXT: ret %shr = lshr i64 %a, 5 @@ -507,12 +507,12 @@ ; ; RV32IB-LABEL: sbclri_i32_11: ; RV32IB: # %bb.0: -; RV32IB-NEXT: sbclri a0, a0, 11 +; RV32IB-NEXT: bclri a0, a0, 11 ; RV32IB-NEXT: ret ; ; RV32IBS-LABEL: sbclri_i32_11: ; RV32IBS: # %bb.0: -; RV32IBS-NEXT: sbclri a0, a0, 11 +; RV32IBS-NEXT: bclri a0, a0, 11 ; RV32IBS-NEXT: ret %and = and i32 %a, -2049 ret i32 %and @@ -528,12 +528,12 @@ ; ; RV32IB-LABEL: sbclri_i32_30: ; RV32IB: # %bb.0: -; RV32IB-NEXT: sbclri a0, a0, 30 +; RV32IB-NEXT: bclri a0, a0, 30 ; RV32IB-NEXT: ret ; ; RV32IBS-LABEL: sbclri_i32_30: ; RV32IBS: # %bb.0: -; RV32IBS-NEXT: sbclri a0, a0, 30 +; RV32IBS-NEXT: bclri a0, a0, 30 ; RV32IBS-NEXT: ret %and = and i32 %a, -1073741825 ret i32 %and @@ -549,12 +549,12 @@ ; ; RV32IB-LABEL: sbclri_i32_31: ; RV32IB: # %bb.0: -; RV32IB-NEXT: sbclri a0, a0, 31 +; RV32IB-NEXT: bclri a0, a0, 31 ; RV32IB-NEXT: ret ; ; RV32IBS-LABEL: sbclri_i32_31: ; RV32IBS: # %bb.0: -; RV32IBS-NEXT: sbclri a0, a0, 31 +; RV32IBS-NEXT: bclri a0, a0, 31 ; RV32IBS-NEXT: ret %and = and i32 %a, -2147483649 ret i32 %and @@ -589,12 +589,12 @@ ; ; RV32IB-LABEL: sbseti_i32_11: ; RV32IB: # %bb.0: -; RV32IB-NEXT: sbseti a0, a0, 11 +; RV32IB-NEXT: bseti a0, a0, 11 ; RV32IB-NEXT: ret ; ; RV32IBS-LABEL: sbseti_i32_11: ; RV32IBS: # %bb.0: -; RV32IBS-NEXT: sbseti a0, a0, 11 +; RV32IBS-NEXT: bseti a0, a0, 11 ; RV32IBS-NEXT: ret %or = or i32 %a, 2048 ret i32 %or @@ -609,12 +609,12 @@ ; ; RV32IB-LABEL: sbseti_i32_30: ; RV32IB: # %bb.0: -; RV32IB-NEXT: sbseti a0, a0, 30 +; RV32IB-NEXT: bseti a0, a0, 30 ; RV32IB-NEXT: ret ; ; RV32IBS-LABEL: sbseti_i32_30: ; RV32IBS: # %bb.0: -; RV32IBS-NEXT: sbseti a0, a0, 30 +; RV32IBS-NEXT: bseti a0, a0, 30 ; RV32IBS-NEXT: ret %or = or i32 %a, 1073741824 ret i32 %or @@ -629,12 +629,12 @@ ; ; RV32IB-LABEL: sbseti_i32_31: ; RV32IB: # %bb.0: -; RV32IB-NEXT: sbseti a0, a0, 31 +; RV32IB-NEXT: bseti a0, a0, 31 ; RV32IB-NEXT: ret ; ; RV32IBS-LABEL: sbseti_i32_31: ; RV32IBS: # %bb.0: -; RV32IBS-NEXT: sbseti a0, a0, 31 +; RV32IBS-NEXT: bseti a0, a0, 31 ; RV32IBS-NEXT: ret %or = or i32 %a, 2147483648 ret i32 %or @@ -669,12 +669,12 @@ ; ; RV32IB-LABEL: sbinvi_i32_11: ; RV32IB: # %bb.0: -; RV32IB-NEXT: sbinvi a0, a0, 11 +; RV32IB-NEXT: binvi a0, a0, 11 ; RV32IB-NEXT: ret ; ; RV32IBS-LABEL: sbinvi_i32_11: ; RV32IBS: # %bb.0: -; RV32IBS-NEXT: sbinvi a0, a0, 11 +; RV32IBS-NEXT: binvi a0, a0, 11 ; RV32IBS-NEXT: ret %xor = xor i32 %a, 2048 ret i32 %xor @@ -689,12 +689,12 @@ ; ; RV32IB-LABEL: sbinvi_i32_30: ; RV32IB: # %bb.0: -; RV32IB-NEXT: sbinvi a0, a0, 30 +; RV32IB-NEXT: binvi a0, a0, 30 ; RV32IB-NEXT: ret ; ; RV32IBS-LABEL: sbinvi_i32_30: ; RV32IBS: # %bb.0: -; RV32IBS-NEXT: sbinvi a0, a0, 30 +; RV32IBS-NEXT: binvi a0, a0, 30 ; RV32IBS-NEXT: ret %xor = xor i32 %a, 1073741824 ret i32 %xor @@ -709,12 +709,12 @@ ; ; RV32IB-LABEL: sbinvi_i32_31: ; RV32IB: # %bb.0: -; RV32IB-NEXT: sbinvi a0, a0, 31 +; RV32IB-NEXT: binvi a0, a0, 31 ; RV32IB-NEXT: ret ; ; RV32IBS-LABEL: sbinvi_i32_31: ; RV32IBS: # %bb.0: -; RV32IBS-NEXT: sbinvi a0, a0, 31 +; RV32IBS-NEXT: binvi a0, a0, 31 ; RV32IBS-NEXT: ret %xor = xor i32 %a, 2147483648 ret i32 %xor diff --git a/llvm/test/CodeGen/RISCV/rv32Zbt.ll b/llvm/test/CodeGen/RISCV/rv32Zbt.ll --- a/llvm/test/CodeGen/RISCV/rv32Zbt.ll +++ b/llvm/test/CodeGen/RISCV/rv32Zbt.ll @@ -680,7 +680,7 @@ ; RV32IB-NEXT: fsri a1, a0, a1, 31 ; RV32IB-NEXT: sll a1, a1, t3 ; RV32IB-NEXT: sub a5, a6, t4 -; RV32IB-NEXT: sbclri a0, a0, 31 +; RV32IB-NEXT: bclri a0, a0, 31 ; RV32IB-NEXT: srl a0, a0, a5 ; RV32IB-NEXT: or a0, a1, a0 ; RV32IB-NEXT: sll a1, t0, a2 diff --git a/llvm/test/CodeGen/RISCV/rv64Zbp.ll b/llvm/test/CodeGen/RISCV/rv64Zbp.ll --- a/llvm/test/CodeGen/RISCV/rv64Zbp.ll +++ b/llvm/test/CodeGen/RISCV/rv64Zbp.ll @@ -178,7 +178,7 @@ ; RV64IB-LABEL: sroiw_bug: ; RV64IB: # %bb.0: ; RV64IB-NEXT: srli a0, a0, 1 -; RV64IB-NEXT: sbseti a0, a0, 31 +; RV64IB-NEXT: bseti a0, a0, 31 ; RV64IB-NEXT: ret ; ; RV64IBP-LABEL: sroiw_bug: diff --git a/llvm/test/CodeGen/RISCV/rv64Zbs.ll b/llvm/test/CodeGen/RISCV/rv64Zbs.ll --- a/llvm/test/CodeGen/RISCV/rv64Zbs.ll +++ b/llvm/test/CodeGen/RISCV/rv64Zbs.ll @@ -17,12 +17,12 @@ ; ; RV64IB-LABEL: sbclr_i32: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbclrw a0, a0, a1 +; RV64IB-NEXT: bclrw a0, a0, a1 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbclr_i32: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbclrw a0, a0, a1 +; RV64IBS-NEXT: bclrw a0, a0, a1 ; RV64IBS-NEXT: ret %and = and i32 %b, 31 %shl = shl nuw i32 1, %and @@ -42,12 +42,12 @@ ; ; RV64IB-LABEL: sbclr_i32_no_mask: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbclrw a0, a0, a1 +; RV64IB-NEXT: bclrw a0, a0, a1 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbclr_i32_no_mask: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbclrw a0, a0, a1 +; RV64IBS-NEXT: bclrw a0, a0, a1 ; RV64IBS-NEXT: ret %shl = shl i32 1, %b %neg = xor i32 %shl, -1 @@ -69,13 +69,13 @@ ; RV64IB-LABEL: sbclr_i32_load: ; RV64IB: # %bb.0: ; RV64IB-NEXT: lw a0, 0(a0) -; RV64IB-NEXT: sbclrw a0, a0, a1 +; RV64IB-NEXT: bclrw a0, a0, a1 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbclr_i32_load: ; RV64IBS: # %bb.0: ; RV64IBS-NEXT: lw a0, 0(a0) -; RV64IBS-NEXT: sbclrw a0, a0, a1 +; RV64IBS-NEXT: bclrw a0, a0, a1 ; RV64IBS-NEXT: ret %a = load i32, i32* %p %shl = shl i32 1, %b @@ -95,12 +95,12 @@ ; ; RV64IB-LABEL: sbclr_i64: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbclr a0, a0, a1 +; RV64IB-NEXT: bclr a0, a0, a1 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbclr_i64: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbclr a0, a0, a1 +; RV64IBS-NEXT: bclr a0, a0, a1 ; RV64IBS-NEXT: ret %and = and i64 %b, 63 %shl = shl nuw i64 1, %and @@ -120,12 +120,12 @@ ; ; RV64IB-LABEL: sbclr_i64_no_mask: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbclr a0, a0, a1 +; RV64IB-NEXT: bclr a0, a0, a1 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbclr_i64_no_mask: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbclr a0, a0, a1 +; RV64IBS-NEXT: bclr a0, a0, a1 ; RV64IBS-NEXT: ret %shl = shl i64 1, %b %neg = xor i64 %shl, -1 @@ -143,12 +143,12 @@ ; ; RV64IB-LABEL: sbset_i32: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbsetw a0, a0, a1 +; RV64IB-NEXT: bsetw a0, a0, a1 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbset_i32: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbsetw a0, a0, a1 +; RV64IBS-NEXT: bsetw a0, a0, a1 ; RV64IBS-NEXT: ret %and = and i32 %b, 31 %shl = shl nuw i32 1, %and @@ -166,12 +166,12 @@ ; ; RV64IB-LABEL: sbset_i32_no_mask: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbsetw a0, a0, a1 +; RV64IB-NEXT: bsetw a0, a0, a1 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbset_i32_no_mask: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbsetw a0, a0, a1 +; RV64IBS-NEXT: bsetw a0, a0, a1 ; RV64IBS-NEXT: ret %shl = shl i32 1, %b %or = or i32 %shl, %a @@ -191,13 +191,13 @@ ; RV64IB-LABEL: sbset_i32_load: ; RV64IB: # %bb.0: ; RV64IB-NEXT: lw a0, 0(a0) -; RV64IB-NEXT: sbsetw a0, a0, a1 +; RV64IB-NEXT: bsetw a0, a0, a1 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbset_i32_load: ; RV64IBS: # %bb.0: ; RV64IBS-NEXT: lw a0, 0(a0) -; RV64IBS-NEXT: sbsetw a0, a0, a1 +; RV64IBS-NEXT: bsetw a0, a0, a1 ; RV64IBS-NEXT: ret %a = load i32, i32* %p %shl = shl i32 1, %b @@ -215,12 +215,12 @@ ; ; RV64IB-LABEL: sbset_i32_zero: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbsetw a0, zero, a0 +; RV64IB-NEXT: bsetw a0, zero, a0 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbset_i32_zero: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbsetw a0, zero, a0 +; RV64IBS-NEXT: bsetw a0, zero, a0 ; RV64IBS-NEXT: ret %shl = shl i32 1, %a ret i32 %shl @@ -236,12 +236,12 @@ ; ; RV64IB-LABEL: sbset_i64: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbset a0, a0, a1 +; RV64IB-NEXT: bset a0, a0, a1 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbset_i64: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbset a0, a0, a1 +; RV64IBS-NEXT: bset a0, a0, a1 ; RV64IBS-NEXT: ret %conv = and i64 %b, 63 %shl = shl nuw i64 1, %conv @@ -259,12 +259,12 @@ ; ; RV64IB-LABEL: sbset_i64_no_mask: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbset a0, a0, a1 +; RV64IB-NEXT: bset a0, a0, a1 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbset_i64_no_mask: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbset a0, a0, a1 +; RV64IBS-NEXT: bset a0, a0, a1 ; RV64IBS-NEXT: ret %shl = shl i64 1, %b %or = or i64 %shl, %a @@ -281,12 +281,12 @@ ; ; RV64IB-LABEL: sbset_i64_zero: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbset a0, zero, a0 +; RV64IB-NEXT: bset a0, zero, a0 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbset_i64_zero: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbset a0, zero, a0 +; RV64IBS-NEXT: bset a0, zero, a0 ; RV64IBS-NEXT: ret %shl = shl i64 1, %a ret i64 %shl @@ -302,12 +302,12 @@ ; ; RV64IB-LABEL: sbinv_i32: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbinvw a0, a0, a1 +; RV64IB-NEXT: binvw a0, a0, a1 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbinv_i32: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbinvw a0, a0, a1 +; RV64IBS-NEXT: binvw a0, a0, a1 ; RV64IBS-NEXT: ret %and = and i32 %b, 31 %shl = shl nuw i32 1, %and @@ -325,12 +325,12 @@ ; ; RV64IB-LABEL: sbinv_i32_no_mask: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbinvw a0, a0, a1 +; RV64IB-NEXT: binvw a0, a0, a1 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbinv_i32_no_mask: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbinvw a0, a0, a1 +; RV64IBS-NEXT: binvw a0, a0, a1 ; RV64IBS-NEXT: ret %shl = shl i32 1, %b %xor = xor i32 %shl, %a @@ -350,13 +350,13 @@ ; RV64IB-LABEL: sbinv_i32_load: ; RV64IB: # %bb.0: ; RV64IB-NEXT: lw a0, 0(a0) -; RV64IB-NEXT: sbinvw a0, a0, a1 +; RV64IB-NEXT: binvw a0, a0, a1 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbinv_i32_load: ; RV64IBS: # %bb.0: ; RV64IBS-NEXT: lw a0, 0(a0) -; RV64IBS-NEXT: sbinvw a0, a0, a1 +; RV64IBS-NEXT: binvw a0, a0, a1 ; RV64IBS-NEXT: ret %a = load i32, i32* %p %shl = shl i32 1, %b @@ -374,12 +374,12 @@ ; ; RV64IB-LABEL: sbinv_i64: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbinv a0, a0, a1 +; RV64IB-NEXT: binv a0, a0, a1 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbinv_i64: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbinv a0, a0, a1 +; RV64IBS-NEXT: binv a0, a0, a1 ; RV64IBS-NEXT: ret %conv = and i64 %b, 63 %shl = shl nuw i64 1, %conv @@ -397,12 +397,12 @@ ; ; RV64IB-LABEL: sbinv_i64_no_mask: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbinv a0, a0, a1 +; RV64IB-NEXT: binv a0, a0, a1 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbinv_i64_no_mask: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbinv a0, a0, a1 +; RV64IBS-NEXT: binv a0, a0, a1 ; RV64IBS-NEXT: ret %shl = shl nuw i64 1, %b %xor = xor i64 %shl, %a @@ -418,12 +418,12 @@ ; ; RV64IB-LABEL: sbext_i32: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbextw a0, a0, a1 +; RV64IB-NEXT: bextw a0, a0, a1 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbext_i32: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbextw a0, a0, a1 +; RV64IBS-NEXT: bextw a0, a0, a1 ; RV64IBS-NEXT: ret %and = and i32 %b, 31 %shr = lshr i32 %a, %and @@ -440,12 +440,12 @@ ; ; RV64IB-LABEL: sbext_i32_no_mask: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbextw a0, a0, a1 +; RV64IB-NEXT: bextw a0, a0, a1 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbext_i32_no_mask: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbextw a0, a0, a1 +; RV64IBS-NEXT: bextw a0, a0, a1 ; RV64IBS-NEXT: ret %shr = lshr i32 %a, %b %and1 = and i32 %shr, 1 @@ -461,12 +461,12 @@ ; ; RV64IB-LABEL: sbext_i64: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbext a0, a0, a1 +; RV64IB-NEXT: bext a0, a0, a1 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbext_i64: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbext a0, a0, a1 +; RV64IBS-NEXT: bext a0, a0, a1 ; RV64IBS-NEXT: ret %conv = and i64 %b, 63 %shr = lshr i64 %a, %conv @@ -483,12 +483,12 @@ ; ; RV64IB-LABEL: sbext_i64_no_mask: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbext a0, a0, a1 +; RV64IB-NEXT: bext a0, a0, a1 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbext_i64_no_mask: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbext a0, a0, a1 +; RV64IBS-NEXT: bext a0, a0, a1 ; RV64IBS-NEXT: ret %shr = lshr i64 %a, %b %and1 = and i64 %shr, 1 @@ -504,12 +504,12 @@ ; ; RV64IB-LABEL: sbexti_i32: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbexti a0, a0, 5 +; RV64IB-NEXT: bexti a0, a0, 5 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbexti_i32: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbexti a0, a0, 5 +; RV64IBS-NEXT: bexti a0, a0, 5 ; RV64IBS-NEXT: ret %shr = lshr i32 %a, 5 %and = and i32 %shr, 1 @@ -525,12 +525,12 @@ ; ; RV64IB-LABEL: sbexti_i64: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbexti a0, a0, 5 +; RV64IB-NEXT: bexti a0, a0, 5 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbexti_i64: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbexti a0, a0, 5 +; RV64IBS-NEXT: bexti a0, a0, 5 ; RV64IBS-NEXT: ret %shr = lshr i64 %a, 5 %and = and i64 %shr, 1 @@ -566,12 +566,12 @@ ; ; RV64IB-LABEL: sbclri_i32_11: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbclriw a0, a0, 11 +; RV64IB-NEXT: bclriw a0, a0, 11 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbclri_i32_11: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbclriw a0, a0, 11 +; RV64IBS-NEXT: bclriw a0, a0, 11 ; RV64IBS-NEXT: ret %and = and i32 %a, -2049 ret i32 %and @@ -587,12 +587,12 @@ ; ; RV64IB-LABEL: sbclri_i32_30: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbclriw a0, a0, 30 +; RV64IB-NEXT: bclriw a0, a0, 30 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbclri_i32_30: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbclriw a0, a0, 30 +; RV64IBS-NEXT: bclriw a0, a0, 30 ; RV64IBS-NEXT: ret %and = and i32 %a, -1073741825 ret i32 %and @@ -608,12 +608,12 @@ ; ; RV64IB-LABEL: sbclri_i32_31: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbclriw a0, a0, 31 +; RV64IB-NEXT: bclriw a0, a0, 31 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbclri_i32_31: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbclriw a0, a0, 31 +; RV64IBS-NEXT: bclriw a0, a0, 31 ; RV64IBS-NEXT: ret %and = and i32 %a, -2147483649 ret i32 %and @@ -648,12 +648,12 @@ ; ; RV64IB-LABEL: sbclri_i64_11: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbclri a0, a0, 11 +; RV64IB-NEXT: bclri a0, a0, 11 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbclri_i64_11: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbclri a0, a0, 11 +; RV64IBS-NEXT: bclri a0, a0, 11 ; RV64IBS-NEXT: ret %and = and i64 %a, -2049 ret i64 %and @@ -669,12 +669,12 @@ ; ; RV64IB-LABEL: sbclri_i64_30: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbclri a0, a0, 30 +; RV64IB-NEXT: bclri a0, a0, 30 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbclri_i64_30: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbclri a0, a0, 30 +; RV64IBS-NEXT: bclri a0, a0, 30 ; RV64IBS-NEXT: ret %and = and i64 %a, -1073741825 ret i64 %and @@ -691,12 +691,12 @@ ; ; RV64IB-LABEL: sbclri_i64_31: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbclri a0, a0, 31 +; RV64IB-NEXT: bclri a0, a0, 31 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbclri_i64_31: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbclri a0, a0, 31 +; RV64IBS-NEXT: bclri a0, a0, 31 ; RV64IBS-NEXT: ret %and = and i64 %a, -2147483649 ret i64 %and @@ -713,12 +713,12 @@ ; ; RV64IB-LABEL: sbclri_i64_62: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbclri a0, a0, 62 +; RV64IB-NEXT: bclri a0, a0, 62 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbclri_i64_62: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbclri a0, a0, 62 +; RV64IBS-NEXT: bclri a0, a0, 62 ; RV64IBS-NEXT: ret %and = and i64 %a, -4611686018427387905 ret i64 %and @@ -735,12 +735,12 @@ ; ; RV64IB-LABEL: sbclri_i64_63: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbclri a0, a0, 63 +; RV64IB-NEXT: bclri a0, a0, 63 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbclri_i64_63: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbclri a0, a0, 63 +; RV64IBS-NEXT: bclri a0, a0, 63 ; RV64IBS-NEXT: ret %and = and i64 %a, -9223372036854775809 ret i64 %and @@ -775,12 +775,12 @@ ; ; RV64IB-LABEL: sbseti_i32_11: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbsetiw a0, a0, 11 +; RV64IB-NEXT: bsetiw a0, a0, 11 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbseti_i32_11: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbsetiw a0, a0, 11 +; RV64IBS-NEXT: bsetiw a0, a0, 11 ; RV64IBS-NEXT: ret %or = or i32 %a, 2048 ret i32 %or @@ -795,12 +795,12 @@ ; ; RV64IB-LABEL: sbseti_i32_30: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbsetiw a0, a0, 30 +; RV64IB-NEXT: bsetiw a0, a0, 30 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbseti_i32_30: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbsetiw a0, a0, 30 +; RV64IBS-NEXT: bsetiw a0, a0, 30 ; RV64IBS-NEXT: ret %or = or i32 %a, 1073741824 ret i32 %or @@ -815,12 +815,12 @@ ; ; RV64IB-LABEL: sbseti_i32_31: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbsetiw a0, a0, 31 +; RV64IB-NEXT: bsetiw a0, a0, 31 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbseti_i32_31: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbsetiw a0, a0, 31 +; RV64IBS-NEXT: bsetiw a0, a0, 31 ; RV64IBS-NEXT: ret %or = or i32 %a, 2147483648 ret i32 %or @@ -855,12 +855,12 @@ ; ; RV64IB-LABEL: sbseti_i64_11: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbseti a0, a0, 11 +; RV64IB-NEXT: bseti a0, a0, 11 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbseti_i64_11: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbseti a0, a0, 11 +; RV64IBS-NEXT: bseti a0, a0, 11 ; RV64IBS-NEXT: ret %or = or i64 %a, 2048 ret i64 %or @@ -875,12 +875,12 @@ ; ; RV64IB-LABEL: sbseti_i64_30: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbseti a0, a0, 30 +; RV64IB-NEXT: bseti a0, a0, 30 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbseti_i64_30: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbseti a0, a0, 30 +; RV64IBS-NEXT: bseti a0, a0, 30 ; RV64IBS-NEXT: ret %or = or i64 %a, 1073741824 ret i64 %or @@ -896,12 +896,12 @@ ; ; RV64IB-LABEL: sbseti_i64_31: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbseti a0, a0, 31 +; RV64IB-NEXT: bseti a0, a0, 31 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbseti_i64_31: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbseti a0, a0, 31 +; RV64IBS-NEXT: bseti a0, a0, 31 ; RV64IBS-NEXT: ret %or = or i64 %a, 2147483648 ret i64 %or @@ -917,12 +917,12 @@ ; ; RV64IB-LABEL: sbseti_i64_62: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbseti a0, a0, 62 +; RV64IB-NEXT: bseti a0, a0, 62 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbseti_i64_62: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbseti a0, a0, 62 +; RV64IBS-NEXT: bseti a0, a0, 62 ; RV64IBS-NEXT: ret %or = or i64 %a, 4611686018427387904 ret i64 %or @@ -938,12 +938,12 @@ ; ; RV64IB-LABEL: sbseti_i64_63: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbseti a0, a0, 63 +; RV64IB-NEXT: bseti a0, a0, 63 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbseti_i64_63: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbseti a0, a0, 63 +; RV64IBS-NEXT: bseti a0, a0, 63 ; RV64IBS-NEXT: ret %or = or i64 %a, 9223372036854775808 ret i64 %or @@ -978,12 +978,12 @@ ; ; RV64IB-LABEL: sbinvi_i32_11: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbinviw a0, a0, 11 +; RV64IB-NEXT: binviw a0, a0, 11 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbinvi_i32_11: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbinviw a0, a0, 11 +; RV64IBS-NEXT: binviw a0, a0, 11 ; RV64IBS-NEXT: ret %xor = xor i32 %a, 2048 ret i32 %xor @@ -998,12 +998,12 @@ ; ; RV64IB-LABEL: sbinvi_i32_30: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbinviw a0, a0, 30 +; RV64IB-NEXT: binviw a0, a0, 30 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbinvi_i32_30: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbinviw a0, a0, 30 +; RV64IBS-NEXT: binviw a0, a0, 30 ; RV64IBS-NEXT: ret %xor = xor i32 %a, 1073741824 ret i32 %xor @@ -1018,12 +1018,12 @@ ; ; RV64IB-LABEL: sbinvi_i32_31: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbinviw a0, a0, 31 +; RV64IB-NEXT: binviw a0, a0, 31 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbinvi_i32_31: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbinviw a0, a0, 31 +; RV64IBS-NEXT: binviw a0, a0, 31 ; RV64IBS-NEXT: ret %xor = xor i32 %a, 2147483648 ret i32 %xor @@ -1058,12 +1058,12 @@ ; ; RV64IB-LABEL: sbinvi_i64_11: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbinvi a0, a0, 11 +; RV64IB-NEXT: binvi a0, a0, 11 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbinvi_i64_11: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbinvi a0, a0, 11 +; RV64IBS-NEXT: binvi a0, a0, 11 ; RV64IBS-NEXT: ret %xor = xor i64 %a, 2048 ret i64 %xor @@ -1078,12 +1078,12 @@ ; ; RV64IB-LABEL: sbinvi_i64_30: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbinvi a0, a0, 30 +; RV64IB-NEXT: binvi a0, a0, 30 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbinvi_i64_30: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbinvi a0, a0, 30 +; RV64IBS-NEXT: binvi a0, a0, 30 ; RV64IBS-NEXT: ret %xor = xor i64 %a, 1073741824 ret i64 %xor @@ -1099,12 +1099,12 @@ ; ; RV64IB-LABEL: sbinvi_i64_31: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbinvi a0, a0, 31 +; RV64IB-NEXT: binvi a0, a0, 31 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbinvi_i64_31: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbinvi a0, a0, 31 +; RV64IBS-NEXT: binvi a0, a0, 31 ; RV64IBS-NEXT: ret %xor = xor i64 %a, 2147483648 ret i64 %xor @@ -1120,12 +1120,12 @@ ; ; RV64IB-LABEL: sbinvi_i64_62: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbinvi a0, a0, 62 +; RV64IB-NEXT: binvi a0, a0, 62 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbinvi_i64_62: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbinvi a0, a0, 62 +; RV64IBS-NEXT: binvi a0, a0, 62 ; RV64IBS-NEXT: ret %xor = xor i64 %a, 4611686018427387904 ret i64 %xor @@ -1141,12 +1141,12 @@ ; ; RV64IB-LABEL: sbinvi_i64_63: ; RV64IB: # %bb.0: -; RV64IB-NEXT: sbinvi a0, a0, 63 +; RV64IB-NEXT: binvi a0, a0, 63 ; RV64IB-NEXT: ret ; ; RV64IBS-LABEL: sbinvi_i64_63: ; RV64IBS: # %bb.0: -; RV64IBS-NEXT: sbinvi a0, a0, 63 +; RV64IBS-NEXT: binvi a0, a0, 63 ; RV64IBS-NEXT: ret %xor = xor i64 %a, 9223372036854775808 ret i64 %xor diff --git a/llvm/test/MC/RISCV/rv32zbe-invalid.s b/llvm/test/MC/RISCV/rv32zbe-invalid.s --- a/llvm/test/MC/RISCV/rv32zbe-invalid.s +++ b/llvm/test/MC/RISCV/rv32zbe-invalid.s @@ -1,6 +1,6 @@ # RUN: not llvm-mc -triple riscv32 -mattr=+experimental-b,experimental-zbe < %s 2>&1 | FileCheck %s # Too few operands -bdep t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction +bdecompress t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction # Too few operands -bext t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction +bcompress t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction diff --git a/llvm/test/MC/RISCV/rv32zbe-valid.s b/llvm/test/MC/RISCV/rv32zbe-valid.s --- a/llvm/test/MC/RISCV/rv32zbe-valid.s +++ b/llvm/test/MC/RISCV/rv32zbe-valid.s @@ -12,9 +12,9 @@ # RUN: | llvm-objdump --mattr=+experimental-zbe -d -r - \ # RUN: | FileCheck --check-prefix=CHECK-ASM-AND-OBJ %s -# CHECK-ASM-AND-OBJ: bdep t0, t1, t2 +# CHECK-ASM-AND-OBJ: bdecompress t0, t1, t2 # CHECK-ASM: encoding: [0xb3,0x62,0x73,0x48] -bdep t0, t1, t2 -# CHECK-ASM-AND-OBJ: bext t0, t1, t2 +bdecompress t0, t1, t2 +# CHECK-ASM-AND-OBJ: bcompress t0, t1, t2 # CHECK-ASM: encoding: [0xb3,0x62,0x73,0x08] -bext t0, t1, t2 +bcompress t0, t1, t2 diff --git a/llvm/test/MC/RISCV/rv32zbs-invalid.s b/llvm/test/MC/RISCV/rv32zbs-invalid.s --- a/llvm/test/MC/RISCV/rv32zbs-invalid.s +++ b/llvm/test/MC/RISCV/rv32zbs-invalid.s @@ -1,30 +1,30 @@ # RUN: not llvm-mc -triple riscv32 -mattr=+experimental-b,experimental-zbs < %s 2>&1 | FileCheck %s # Too few operands -sbclr t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction +bclr t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction # Too few operands -sbset t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction +bset t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction # Too few operands -sbinv t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction +binv t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction # Too few operands -sbext t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction +bext t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction # Too few operands -sbclri t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction +bclri t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction # Immediate operand out of range -sbclri t0, t1, 32 # CHECK: :[[@LINE]]:16: error: immediate must be an integer in the range [0, 31] -sbclri t0, t1, -1 # CHECK: :[[@LINE]]:16: error: immediate must be an integer in the range [0, 31] +bclri t0, t1, 32 # CHECK: :[[@LINE]]:15: error: immediate must be an integer in the range [0, 31] +bclri t0, t1, -1 # CHECK: :[[@LINE]]:15: error: immediate must be an integer in the range [0, 31] # Too few operands -sbseti t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction +bseti t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction # Immediate operand out of range -sbseti t0, t1, 32 # CHECK: :[[@LINE]]:16: error: immediate must be an integer in the range [0, 31] -sbseti t0, t1, -1 # CHECK: :[[@LINE]]:16: error: immediate must be an integer in the range [0, 31] +bseti t0, t1, 32 # CHECK: :[[@LINE]]:15: error: immediate must be an integer in the range [0, 31] +bseti t0, t1, -1 # CHECK: :[[@LINE]]:15: error: immediate must be an integer in the range [0, 31] # Too few operands -sbinvi t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction +binvi t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction # Immediate operand out of range -sbinvi t0, t1, 32 # CHECK: :[[@LINE]]:16: error: immediate must be an integer in the range [0, 31] -sbinvi t0, t1, -1 # CHECK: :[[@LINE]]:16: error: immediate must be an integer in the range [0, 31] +binvi t0, t1, 32 # CHECK: :[[@LINE]]:15: error: immediate must be an integer in the range [0, 31] +binvi t0, t1, -1 # CHECK: :[[@LINE]]:15: error: immediate must be an integer in the range [0, 31] # Too few operands -sbexti t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction +bexti t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction # Immediate operand out of range -sbexti t0, t1, 32 # CHECK: :[[@LINE]]:16: error: immediate must be an integer in the range [0, 31] -sbexti t0, t1, -1 # CHECK: :[[@LINE]]:16: error: immediate must be an integer in the range [0, 31] +bexti t0, t1, 32 # CHECK: :[[@LINE]]:15: error: immediate must be an integer in the range [0, 31] +bexti t0, t1, -1 # CHECK: :[[@LINE]]:15: error: immediate must be an integer in the range [0, 31] diff --git a/llvm/test/MC/RISCV/rv32zbs-valid.s b/llvm/test/MC/RISCV/rv32zbs-valid.s --- a/llvm/test/MC/RISCV/rv32zbs-valid.s +++ b/llvm/test/MC/RISCV/rv32zbs-valid.s @@ -12,27 +12,27 @@ # RUN: | llvm-objdump --mattr=+experimental-zbs -d -r - \ # RUN: | FileCheck --check-prefix=CHECK-ASM-AND-OBJ %s -# CHECK-ASM-AND-OBJ: sbclr t0, t1, t2 +# CHECK-ASM-AND-OBJ: bclr t0, t1, t2 # CHECK-ASM: encoding: [0xb3,0x12,0x73,0x48] -sbclr t0, t1, t2 -# CHECK-ASM-AND-OBJ: sbset t0, t1, t2 +bclr t0, t1, t2 +# CHECK-ASM-AND-OBJ: bset t0, t1, t2 # CHECK-ASM: encoding: [0xb3,0x12,0x73,0x28] -sbset t0, t1, t2 -# CHECK-ASM-AND-OBJ: sbinv t0, t1, t2 +bset t0, t1, t2 +# CHECK-ASM-AND-OBJ: binv t0, t1, t2 # CHECK-ASM: encoding: [0xb3,0x12,0x73,0x68] -sbinv t0, t1, t2 -# CHECK-ASM-AND-OBJ: sbext t0, t1, t2 +binv t0, t1, t2 +# CHECK-ASM-AND-OBJ: bext t0, t1, t2 # CHECK-ASM: encoding: [0xb3,0x52,0x73,0x48] -sbext t0, t1, t2 -# CHECK-ASM-AND-OBJ: sbclri t0, t1, 1 +bext t0, t1, t2 +# CHECK-ASM-AND-OBJ: bclri t0, t1, 1 # CHECK-ASM: encoding: [0x93,0x12,0x13,0x48] -sbclri t0, t1, 1 -# CHECK-ASM-AND-OBJ: sbseti t0, t1, 1 +bclri t0, t1, 1 +# CHECK-ASM-AND-OBJ: bseti t0, t1, 1 # CHECK-ASM: encoding: [0x93,0x12,0x13,0x28] -sbseti t0, t1, 1 -# CHECK-ASM-AND-OBJ: sbinvi t0, t1, 1 +bseti t0, t1, 1 +# CHECK-ASM-AND-OBJ: binvi t0, t1, 1 # CHECK-ASM: encoding: [0x93,0x12,0x13,0x68] -sbinvi t0, t1, 1 -# CHECK-ASM-AND-OBJ: sbexti t0, t1, 1 +binvi t0, t1, 1 +# CHECK-ASM-AND-OBJ: bexti t0, t1, 1 # CHECK-ASM: encoding: [0x93,0x52,0x13,0x48] -sbexti t0, t1, 1 +bexti t0, t1, 1 diff --git a/llvm/test/MC/RISCV/rv64zbe-invalid.s b/llvm/test/MC/RISCV/rv64zbe-invalid.s --- a/llvm/test/MC/RISCV/rv64zbe-invalid.s +++ b/llvm/test/MC/RISCV/rv64zbe-invalid.s @@ -1,6 +1,6 @@ # RUN: not llvm-mc -triple riscv64 -mattr=+experimental-b,experimental-zbe < %s 2>&1 | FileCheck %s # Too few operands -bdepw t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction +bdecompressw t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction # Too few operands -bextw t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction +bcompressw t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction diff --git a/llvm/test/MC/RISCV/rv64zbe-valid.s b/llvm/test/MC/RISCV/rv64zbe-valid.s --- a/llvm/test/MC/RISCV/rv64zbe-valid.s +++ b/llvm/test/MC/RISCV/rv64zbe-valid.s @@ -12,9 +12,9 @@ # RUN: | llvm-objdump --mattr=+experimental-zbe -d -r - \ # RUN: | FileCheck --check-prefix=CHECK-ASM-AND-OBJ %s -# CHECK-ASM-AND-OBJ: bdepw t0, t1, t2 +# CHECK-ASM-AND-OBJ: bdecompressw t0, t1, t2 # CHECK-ASM: encoding: [0xbb,0x62,0x73,0x48] -bdepw t0, t1, t2 -# CHECK-ASM-AND-OBJ: bextw t0, t1, t2 +bdecompressw t0, t1, t2 +# CHECK-ASM-AND-OBJ: bcompressw t0, t1, t2 # CHECK-ASM: encoding: [0xbb,0x62,0x73,0x08] -bextw t0, t1, t2 +bcompressw t0, t1, t2 diff --git a/llvm/test/MC/RISCV/rv64zbs-invalid.s b/llvm/test/MC/RISCV/rv64zbs-invalid.s --- a/llvm/test/MC/RISCV/rv64zbs-invalid.s +++ b/llvm/test/MC/RISCV/rv64zbs-invalid.s @@ -1,25 +1,25 @@ # RUN: not llvm-mc -triple riscv64 -mattr=+experimental-b,experimental-zbs < %s 2>&1 | FileCheck %s # Too few operands -sbclrw t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction +bclrw t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction # Too few operands -sbsetw t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction +bsetw t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction # Too few operands -sbinvw t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction +binvw t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction # Too few operands -sbextw t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction +bextw t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction # Too few operands -sbclriw t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction +bclriw t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction # Immediate operand out of range -sbclriw t0, t1, 32 # CHECK: :[[@LINE]]:17: error: immediate must be an integer in the range [0, 31] -sbclriw t0, t1, -1 # CHECK: :[[@LINE]]:17: error: immediate must be an integer in the range [0, 31] +bclriw t0, t1, 32 # CHECK: :[[@LINE]]:16: error: immediate must be an integer in the range [0, 31] +bclriw t0, t1, -1 # CHECK: :[[@LINE]]:16: error: immediate must be an integer in the range [0, 31] # Too few operands -sbsetiw t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction +bsetiw t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction # Immediate operand out of range -sbsetiw t0, t1, 32 # CHECK: :[[@LINE]]:17: error: immediate must be an integer in the range [0, 31] -sbsetiw t0, t1, -1 # CHECK: :[[@LINE]]:17: error: immediate must be an integer in the range [0, 31] +bsetiw t0, t1, 32 # CHECK: :[[@LINE]]:16: error: immediate must be an integer in the range [0, 31] +bsetiw t0, t1, -1 # CHECK: :[[@LINE]]:16: error: immediate must be an integer in the range [0, 31] # Too few operands -sbinviw t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction +binviw t0, t1 # CHECK: :[[@LINE]]:1: error: too few operands for instruction # Immediate operand out of range -sbinviw t0, t1, 32 # CHECK: :[[@LINE]]:17: error: immediate must be an integer in the range [0, 31] -sbinviw t0, t1, -1 # CHECK: :[[@LINE]]:17: error: immediate must be an integer in the range [0, 31] +binviw t0, t1, 32 # CHECK: :[[@LINE]]:16: error: immediate must be an integer in the range [0, 31] +binviw t0, t1, -1 # CHECK: :[[@LINE]]:16: error: immediate must be an integer in the range [0, 31] diff --git a/llvm/test/MC/RISCV/rv64zbs-valid.s b/llvm/test/MC/RISCV/rv64zbs-valid.s --- a/llvm/test/MC/RISCV/rv64zbs-valid.s +++ b/llvm/test/MC/RISCV/rv64zbs-valid.s @@ -12,24 +12,24 @@ # RUN: | llvm-objdump --mattr=+experimental-zbs -d -r - \ # RUN: | FileCheck --check-prefix=CHECK-ASM-AND-OBJ %s -# CHECK-ASM-AND-OBJ: sbclrw t0, t1, t2 +# CHECK-ASM-AND-OBJ: bclrw t0, t1, t2 # CHECK-ASM: encoding: [0xbb,0x12,0x73,0x48] -sbclrw t0, t1, t2 -# CHECK-ASM-AND-OBJ: sbsetw t0, t1, t2 +bclrw t0, t1, t2 +# CHECK-ASM-AND-OBJ: bsetw t0, t1, t2 # CHECK-ASM: encoding: [0xbb,0x12,0x73,0x28] -sbsetw t0, t1, t2 -# CHECK-ASM-AND-OBJ: sbinvw t0, t1, t2 +bsetw t0, t1, t2 +# CHECK-ASM-AND-OBJ: binvw t0, t1, t2 # CHECK-ASM: encoding: [0xbb,0x12,0x73,0x68] -sbinvw t0, t1, t2 -# CHECK-ASM-AND-OBJ: sbextw t0, t1, t2 +binvw t0, t1, t2 +# CHECK-ASM-AND-OBJ: bextw t0, t1, t2 # CHECK-ASM: encoding: [0xbb,0x52,0x73,0x48] -sbextw t0, t1, t2 -# CHECK-ASM-AND-OBJ: sbclriw t0, t1, 0 +bextw t0, t1, t2 +# CHECK-ASM-AND-OBJ: bclriw t0, t1, 0 # CHECK-ASM: encoding: [0x9b,0x12,0x03,0x48] -sbclriw t0, t1, 0 -# CHECK-ASM-AND-OBJ: sbsetiw t0, t1, 0 +bclriw t0, t1, 0 +# CHECK-ASM-AND-OBJ: bsetiw t0, t1, 0 # CHECK-ASM: encoding: [0x9b,0x12,0x03,0x28] -sbsetiw t0, t1, 0 -# CHECK-ASM-AND-OBJ: sbinviw t0, t1, 0 +bsetiw t0, t1, 0 +# CHECK-ASM-AND-OBJ: binviw t0, t1, 0 # CHECK-ASM: encoding: [0x9b,0x12,0x03,0x68] -sbinviw t0, t1, 0 +binviw t0, t1, 0