diff --git a/llvm/test/CodeGen/RISCV/rv32zba.ll b/llvm/test/CodeGen/RISCV/rv32zba.ll --- a/llvm/test/CodeGen/RISCV/rv32zba.ll +++ b/llvm/test/CodeGen/RISCV/rv32zba.ll @@ -2,9 +2,9 @@ ; RUN: llc -mtriple=riscv32 -mattr=+m -verify-machineinstrs < %s \ ; RUN: | FileCheck %s -check-prefix=RV32I ; RUN: llc -mtriple=riscv32 -mattr=+m,+experimental-b -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV32IB +; RUN: | FileCheck %s -check-prefix=RV32B ; RUN: llc -mtriple=riscv32 -mattr=+m,+experimental-zba -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV32IBA +; RUN: | FileCheck %s -check-prefix=RV32ZBA define signext i16 @sh1add(i64 %0, i16* %1) { ; RV32I-LABEL: sh1add: @@ -14,17 +14,17 @@ ; RV32I-NEXT: lh a0, 0(a0) ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sh1add: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sh1add a0, a0, a2 -; RV32IB-NEXT: lh a0, 0(a0) -; RV32IB-NEXT: ret +; RV32B-LABEL: sh1add: +; RV32B: # %bb.0: +; RV32B-NEXT: sh1add a0, a0, a2 +; RV32B-NEXT: lh a0, 0(a0) +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: sh1add: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: sh1add a0, a0, a2 -; RV32IBA-NEXT: lh a0, 0(a0) -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: sh1add: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: sh1add a0, a0, a2 +; RV32ZBA-NEXT: lh a0, 0(a0) +; RV32ZBA-NEXT: ret %3 = getelementptr inbounds i16, i16* %1, i64 %0 %4 = load i16, i16* %3 ret i16 %4 @@ -38,17 +38,17 @@ ; RV32I-NEXT: lw a0, 0(a0) ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sh2add: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sh2add a0, a0, a2 -; RV32IB-NEXT: lw a0, 0(a0) -; RV32IB-NEXT: ret +; RV32B-LABEL: sh2add: +; RV32B: # %bb.0: +; RV32B-NEXT: sh2add a0, a0, a2 +; RV32B-NEXT: lw a0, 0(a0) +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: sh2add: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: sh2add a0, a0, a2 -; RV32IBA-NEXT: lw a0, 0(a0) -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: sh2add: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: sh2add a0, a0, a2 +; RV32ZBA-NEXT: lw a0, 0(a0) +; RV32ZBA-NEXT: ret %3 = getelementptr inbounds i32, i32* %1, i64 %0 %4 = load i32, i32* %3 ret i32 %4 @@ -63,19 +63,19 @@ ; RV32I-NEXT: lw a1, 4(a1) ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sh3add: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sh3add a1, a0, a2 -; RV32IB-NEXT: lw a0, 0(a1) -; RV32IB-NEXT: lw a1, 4(a1) -; RV32IB-NEXT: ret +; RV32B-LABEL: sh3add: +; RV32B: # %bb.0: +; RV32B-NEXT: sh3add a1, a0, a2 +; RV32B-NEXT: lw a0, 0(a1) +; RV32B-NEXT: lw a1, 4(a1) +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: sh3add: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: sh3add a1, a0, a2 -; RV32IBA-NEXT: lw a0, 0(a1) -; RV32IBA-NEXT: lw a1, 4(a1) -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: sh3add: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: sh3add a1, a0, a2 +; RV32ZBA-NEXT: lw a0, 0(a1) +; RV32ZBA-NEXT: lw a1, 4(a1) +; RV32ZBA-NEXT: ret %3 = getelementptr inbounds i64, i64* %1, i64 %0 %4 = load i64, i64* %3 ret i64 %4 @@ -89,17 +89,17 @@ ; RV32I-NEXT: add a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: addmul6: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sh1add a0, a0, a0 -; RV32IB-NEXT: sh1add a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: addmul6: +; RV32B: # %bb.0: +; RV32B-NEXT: sh1add a0, a0, a0 +; RV32B-NEXT: sh1add a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: addmul6: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: sh1add a0, a0, a0 -; RV32IBA-NEXT: sh1add a0, a0, a1 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: addmul6: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: sh1add a0, a0, a0 +; RV32ZBA-NEXT: sh1add a0, a0, a1 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 6 %d = add i32 %c, %b ret i32 %d @@ -113,17 +113,17 @@ ; RV32I-NEXT: add a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: addmul10: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sh2add a0, a0, a0 -; RV32IB-NEXT: sh1add a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: addmul10: +; RV32B: # %bb.0: +; RV32B-NEXT: sh2add a0, a0, a0 +; RV32B-NEXT: sh1add a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: addmul10: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: sh2add a0, a0, a0 -; RV32IBA-NEXT: sh1add a0, a0, a1 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: addmul10: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: sh2add a0, a0, a0 +; RV32ZBA-NEXT: sh1add a0, a0, a1 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 10 %d = add i32 %c, %b ret i32 %d @@ -137,17 +137,17 @@ ; RV32I-NEXT: add a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: addmul12: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sh1add a0, a0, a0 -; RV32IB-NEXT: sh2add a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: addmul12: +; RV32B: # %bb.0: +; RV32B-NEXT: sh1add a0, a0, a0 +; RV32B-NEXT: sh2add a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: addmul12: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: sh1add a0, a0, a0 -; RV32IBA-NEXT: sh2add a0, a0, a1 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: addmul12: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: sh1add a0, a0, a0 +; RV32ZBA-NEXT: sh2add a0, a0, a1 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 12 %d = add i32 %c, %b ret i32 %d @@ -161,17 +161,17 @@ ; RV32I-NEXT: add a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: addmul18: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sh3add a0, a0, a0 -; RV32IB-NEXT: sh1add a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: addmul18: +; RV32B: # %bb.0: +; RV32B-NEXT: sh3add a0, a0, a0 +; RV32B-NEXT: sh1add a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: addmul18: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: sh3add a0, a0, a0 -; RV32IBA-NEXT: sh1add a0, a0, a1 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: addmul18: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: sh3add a0, a0, a0 +; RV32ZBA-NEXT: sh1add a0, a0, a1 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 18 %d = add i32 %c, %b ret i32 %d @@ -185,17 +185,17 @@ ; RV32I-NEXT: add a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: addmul20: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sh2add a0, a0, a0 -; RV32IB-NEXT: sh2add a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: addmul20: +; RV32B: # %bb.0: +; RV32B-NEXT: sh2add a0, a0, a0 +; RV32B-NEXT: sh2add a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: addmul20: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: sh2add a0, a0, a0 -; RV32IBA-NEXT: sh2add a0, a0, a1 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: addmul20: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: sh2add a0, a0, a0 +; RV32ZBA-NEXT: sh2add a0, a0, a1 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 20 %d = add i32 %c, %b ret i32 %d @@ -209,17 +209,17 @@ ; RV32I-NEXT: add a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: addmul24: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sh1add a0, a0, a0 -; RV32IB-NEXT: sh3add a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: addmul24: +; RV32B: # %bb.0: +; RV32B-NEXT: sh1add a0, a0, a0 +; RV32B-NEXT: sh3add a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: addmul24: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: sh1add a0, a0, a0 -; RV32IBA-NEXT: sh3add a0, a0, a1 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: addmul24: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: sh1add a0, a0, a0 +; RV32ZBA-NEXT: sh3add a0, a0, a1 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 24 %d = add i32 %c, %b ret i32 %d @@ -233,17 +233,17 @@ ; RV32I-NEXT: add a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: addmul36: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sh3add a0, a0, a0 -; RV32IB-NEXT: sh2add a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: addmul36: +; RV32B: # %bb.0: +; RV32B-NEXT: sh3add a0, a0, a0 +; RV32B-NEXT: sh2add a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: addmul36: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: sh3add a0, a0, a0 -; RV32IBA-NEXT: sh2add a0, a0, a1 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: addmul36: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: sh3add a0, a0, a0 +; RV32ZBA-NEXT: sh2add a0, a0, a1 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 36 %d = add i32 %c, %b ret i32 %d @@ -257,17 +257,17 @@ ; RV32I-NEXT: add a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: addmul40: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sh2add a0, a0, a0 -; RV32IB-NEXT: sh3add a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: addmul40: +; RV32B: # %bb.0: +; RV32B-NEXT: sh2add a0, a0, a0 +; RV32B-NEXT: sh3add a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: addmul40: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: sh2add a0, a0, a0 -; RV32IBA-NEXT: sh3add a0, a0, a1 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: addmul40: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: sh2add a0, a0, a0 +; RV32ZBA-NEXT: sh3add a0, a0, a1 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 40 %d = add i32 %c, %b ret i32 %d @@ -281,17 +281,17 @@ ; RV32I-NEXT: add a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: addmul72: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sh3add a0, a0, a0 -; RV32IB-NEXT: sh3add a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: addmul72: +; RV32B: # %bb.0: +; RV32B-NEXT: sh3add a0, a0, a0 +; RV32B-NEXT: sh3add a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: addmul72: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: sh3add a0, a0, a0 -; RV32IBA-NEXT: sh3add a0, a0, a1 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: addmul72: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: sh3add a0, a0, a0 +; RV32ZBA-NEXT: sh3add a0, a0, a1 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 72 %d = add i32 %c, %b ret i32 %d @@ -304,17 +304,17 @@ ; RV32I-NEXT: mul a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: mul96: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sh1add a0, a0, a0 -; RV32IB-NEXT: slli a0, a0, 5 -; RV32IB-NEXT: ret +; RV32B-LABEL: mul96: +; RV32B: # %bb.0: +; RV32B-NEXT: sh1add a0, a0, a0 +; RV32B-NEXT: slli a0, a0, 5 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: mul96: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: sh1add a0, a0, a0 -; RV32IBA-NEXT: slli a0, a0, 5 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: mul96: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: sh1add a0, a0, a0 +; RV32ZBA-NEXT: slli a0, a0, 5 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 96 ret i32 %c } @@ -326,17 +326,17 @@ ; RV32I-NEXT: mul a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: mul160: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sh2add a0, a0, a0 -; RV32IB-NEXT: slli a0, a0, 5 -; RV32IB-NEXT: ret +; RV32B-LABEL: mul160: +; RV32B: # %bb.0: +; RV32B-NEXT: sh2add a0, a0, a0 +; RV32B-NEXT: slli a0, a0, 5 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: mul160: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: sh2add a0, a0, a0 -; RV32IBA-NEXT: slli a0, a0, 5 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: mul160: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: sh2add a0, a0, a0 +; RV32ZBA-NEXT: slli a0, a0, 5 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 160 ret i32 %c } @@ -348,17 +348,17 @@ ; RV32I-NEXT: mul a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: mul288: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sh3add a0, a0, a0 -; RV32IB-NEXT: slli a0, a0, 5 -; RV32IB-NEXT: ret +; RV32B-LABEL: mul288: +; RV32B: # %bb.0: +; RV32B-NEXT: sh3add a0, a0, a0 +; RV32B-NEXT: slli a0, a0, 5 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: mul288: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: sh3add a0, a0, a0 -; RV32IBA-NEXT: slli a0, a0, 5 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: mul288: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: sh3add a0, a0, a0 +; RV32ZBA-NEXT: slli a0, a0, 5 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 288 ret i32 %c } @@ -370,17 +370,17 @@ ; RV32I-NEXT: mul a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: mul258: -; RV32IB: # %bb.0: -; RV32IB-NEXT: addi a1, zero, 258 -; RV32IB-NEXT: mul a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: mul258: +; RV32B: # %bb.0: +; RV32B-NEXT: addi a1, zero, 258 +; RV32B-NEXT: mul a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: mul258: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: addi a1, zero, 258 -; RV32IBA-NEXT: mul a0, a0, a1 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: mul258: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: addi a1, zero, 258 +; RV32ZBA-NEXT: mul a0, a0, a1 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 258 ret i32 %c } @@ -392,17 +392,17 @@ ; RV32I-NEXT: mul a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: mul260: -; RV32IB: # %bb.0: -; RV32IB-NEXT: addi a1, zero, 260 -; RV32IB-NEXT: mul a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: mul260: +; RV32B: # %bb.0: +; RV32B-NEXT: addi a1, zero, 260 +; RV32B-NEXT: mul a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: mul260: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: addi a1, zero, 260 -; RV32IBA-NEXT: mul a0, a0, a1 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: mul260: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: addi a1, zero, 260 +; RV32ZBA-NEXT: mul a0, a0, a1 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 260 ret i32 %c } @@ -414,17 +414,17 @@ ; RV32I-NEXT: mul a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: mul264: -; RV32IB: # %bb.0: -; RV32IB-NEXT: addi a1, zero, 264 -; RV32IB-NEXT: mul a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: mul264: +; RV32B: # %bb.0: +; RV32B-NEXT: addi a1, zero, 264 +; RV32B-NEXT: mul a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: mul264: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: addi a1, zero, 264 -; RV32IBA-NEXT: mul a0, a0, a1 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: mul264: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: addi a1, zero, 264 +; RV32ZBA-NEXT: mul a0, a0, a1 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 264 ret i32 %c } @@ -436,17 +436,17 @@ ; RV32I-NEXT: mul a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: mul11: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sh2add a1, a0, a0 -; RV32IB-NEXT: sh1add a0, a1, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: mul11: +; RV32B: # %bb.0: +; RV32B-NEXT: sh2add a1, a0, a0 +; RV32B-NEXT: sh1add a0, a1, a0 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: mul11: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: sh2add a1, a0, a0 -; RV32IBA-NEXT: sh1add a0, a1, a0 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: mul11: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: sh2add a1, a0, a0 +; RV32ZBA-NEXT: sh1add a0, a1, a0 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 11 ret i32 %c } @@ -458,17 +458,17 @@ ; RV32I-NEXT: mul a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: mul19: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sh3add a1, a0, a0 -; RV32IB-NEXT: sh1add a0, a1, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: mul19: +; RV32B: # %bb.0: +; RV32B-NEXT: sh3add a1, a0, a0 +; RV32B-NEXT: sh1add a0, a1, a0 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: mul19: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: sh3add a1, a0, a0 -; RV32IBA-NEXT: sh1add a0, a1, a0 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: mul19: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: sh3add a1, a0, a0 +; RV32ZBA-NEXT: sh1add a0, a1, a0 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 19 ret i32 %c } @@ -480,17 +480,17 @@ ; RV32I-NEXT: mul a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: mul13: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sh1add a1, a0, a0 -; RV32IB-NEXT: sh2add a0, a1, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: mul13: +; RV32B: # %bb.0: +; RV32B-NEXT: sh1add a1, a0, a0 +; RV32B-NEXT: sh2add a0, a1, a0 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: mul13: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: sh1add a1, a0, a0 -; RV32IBA-NEXT: sh2add a0, a1, a0 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: mul13: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: sh1add a1, a0, a0 +; RV32ZBA-NEXT: sh2add a0, a1, a0 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 13 ret i32 %c } @@ -502,17 +502,17 @@ ; RV32I-NEXT: mul a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: mul21: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sh2add a1, a0, a0 -; RV32IB-NEXT: sh2add a0, a1, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: mul21: +; RV32B: # %bb.0: +; RV32B-NEXT: sh2add a1, a0, a0 +; RV32B-NEXT: sh2add a0, a1, a0 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: mul21: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: sh2add a1, a0, a0 -; RV32IBA-NEXT: sh2add a0, a1, a0 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: mul21: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: sh2add a1, a0, a0 +; RV32ZBA-NEXT: sh2add a0, a1, a0 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 21 ret i32 %c } @@ -524,17 +524,17 @@ ; RV32I-NEXT: mul a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: mul37: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sh3add a1, a0, a0 -; RV32IB-NEXT: sh2add a0, a1, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: mul37: +; RV32B: # %bb.0: +; RV32B-NEXT: sh3add a1, a0, a0 +; RV32B-NEXT: sh2add a0, a1, a0 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: mul37: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: sh3add a1, a0, a0 -; RV32IBA-NEXT: sh2add a0, a1, a0 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: mul37: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: sh3add a1, a0, a0 +; RV32ZBA-NEXT: sh2add a0, a1, a0 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 37 ret i32 %c } @@ -546,17 +546,17 @@ ; RV32I-NEXT: mul a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: mul25: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sh1add a1, a0, a0 -; RV32IB-NEXT: sh3add a0, a1, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: mul25: +; RV32B: # %bb.0: +; RV32B-NEXT: sh1add a1, a0, a0 +; RV32B-NEXT: sh3add a0, a1, a0 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: mul25: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: sh1add a1, a0, a0 -; RV32IBA-NEXT: sh3add a0, a1, a0 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: mul25: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: sh1add a1, a0, a0 +; RV32ZBA-NEXT: sh3add a0, a1, a0 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 25 ret i32 %c } @@ -568,17 +568,17 @@ ; RV32I-NEXT: mul a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: mul41: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sh2add a1, a0, a0 -; RV32IB-NEXT: sh3add a0, a1, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: mul41: +; RV32B: # %bb.0: +; RV32B-NEXT: sh2add a1, a0, a0 +; RV32B-NEXT: sh3add a0, a1, a0 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: mul41: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: sh2add a1, a0, a0 -; RV32IBA-NEXT: sh3add a0, a1, a0 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: mul41: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: sh2add a1, a0, a0 +; RV32ZBA-NEXT: sh3add a0, a1, a0 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 41 ret i32 %c } @@ -590,17 +590,17 @@ ; RV32I-NEXT: mul a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: mul73: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sh3add a1, a0, a0 -; RV32IB-NEXT: sh3add a0, a1, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: mul73: +; RV32B: # %bb.0: +; RV32B-NEXT: sh3add a1, a0, a0 +; RV32B-NEXT: sh3add a0, a1, a0 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: mul73: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: sh3add a1, a0, a0 -; RV32IBA-NEXT: sh3add a0, a1, a0 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: mul73: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: sh3add a1, a0, a0 +; RV32ZBA-NEXT: sh3add a0, a1, a0 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 73 ret i32 %c } @@ -612,17 +612,17 @@ ; RV32I-NEXT: mul a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: mul27: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sh3add a0, a0, a0 -; RV32IB-NEXT: sh1add a0, a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: mul27: +; RV32B: # %bb.0: +; RV32B-NEXT: sh3add a0, a0, a0 +; RV32B-NEXT: sh1add a0, a0, a0 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: mul27: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: sh3add a0, a0, a0 -; RV32IBA-NEXT: sh1add a0, a0, a0 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: mul27: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: sh3add a0, a0, a0 +; RV32ZBA-NEXT: sh1add a0, a0, a0 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 27 ret i32 %c } @@ -634,17 +634,17 @@ ; RV32I-NEXT: mul a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: mul45: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sh3add a0, a0, a0 -; RV32IB-NEXT: sh2add a0, a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: mul45: +; RV32B: # %bb.0: +; RV32B-NEXT: sh3add a0, a0, a0 +; RV32B-NEXT: sh2add a0, a0, a0 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: mul45: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: sh3add a0, a0, a0 -; RV32IBA-NEXT: sh2add a0, a0, a0 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: mul45: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: sh3add a0, a0, a0 +; RV32ZBA-NEXT: sh2add a0, a0, a0 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 45 ret i32 %c } @@ -656,17 +656,17 @@ ; RV32I-NEXT: mul a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: mul81: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sh3add a0, a0, a0 -; RV32IB-NEXT: sh3add a0, a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: mul81: +; RV32B: # %bb.0: +; RV32B-NEXT: sh3add a0, a0, a0 +; RV32B-NEXT: sh3add a0, a0, a0 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: mul81: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: sh3add a0, a0, a0 -; RV32IBA-NEXT: sh3add a0, a0, a0 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: mul81: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: sh3add a0, a0, a0 +; RV32ZBA-NEXT: sh3add a0, a0, a0 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 81 ret i32 %c } @@ -679,17 +679,17 @@ ; RV32I-NEXT: mul a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: mul4098: -; RV32IB: # %bb.0: -; RV32IB-NEXT: slli a1, a0, 12 -; RV32IB-NEXT: sh1add a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: mul4098: +; RV32B: # %bb.0: +; RV32B-NEXT: slli a1, a0, 12 +; RV32B-NEXT: sh1add a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: mul4098: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: slli a1, a0, 12 -; RV32IBA-NEXT: sh1add a0, a0, a1 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: mul4098: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: slli a1, a0, 12 +; RV32ZBA-NEXT: sh1add a0, a0, a1 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 4098 ret i32 %c } @@ -702,17 +702,17 @@ ; RV32I-NEXT: mul a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: mul4100: -; RV32IB: # %bb.0: -; RV32IB-NEXT: slli a1, a0, 12 -; RV32IB-NEXT: sh2add a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: mul4100: +; RV32B: # %bb.0: +; RV32B-NEXT: slli a1, a0, 12 +; RV32B-NEXT: sh2add a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: mul4100: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: slli a1, a0, 12 -; RV32IBA-NEXT: sh2add a0, a0, a1 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: mul4100: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: slli a1, a0, 12 +; RV32ZBA-NEXT: sh2add a0, a0, a1 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 4100 ret i32 %c } @@ -725,17 +725,17 @@ ; RV32I-NEXT: mul a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: mul4104: -; RV32IB: # %bb.0: -; RV32IB-NEXT: slli a1, a0, 12 -; RV32IB-NEXT: sh3add a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: mul4104: +; RV32B: # %bb.0: +; RV32B-NEXT: slli a1, a0, 12 +; RV32B-NEXT: sh3add a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBA-LABEL: mul4104: -; RV32IBA: # %bb.0: -; RV32IBA-NEXT: slli a1, a0, 12 -; RV32IBA-NEXT: sh3add a0, a0, a1 -; RV32IBA-NEXT: ret +; RV32ZBA-LABEL: mul4104: +; RV32ZBA: # %bb.0: +; RV32ZBA-NEXT: slli a1, a0, 12 +; RV32ZBA-NEXT: sh3add a0, a0, a1 +; RV32ZBA-NEXT: ret %c = mul i32 %a, 4104 ret i32 %c } diff --git a/llvm/test/CodeGen/RISCV/rv32zbb-intrinsic.ll b/llvm/test/CodeGen/RISCV/rv32zbb-intrinsic.ll --- a/llvm/test/CodeGen/RISCV/rv32zbb-intrinsic.ll +++ b/llvm/test/CodeGen/RISCV/rv32zbb-intrinsic.ll @@ -1,21 +1,21 @@ ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc -mtriple=riscv32 -mattr=+experimental-b -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV32IB +; RUN: | FileCheck %s -check-prefix=RV32B ; RUN: llc -mtriple=riscv32 -mattr=+experimental-zbb -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV32IBB +; RUN: | FileCheck %s -check-prefix=RV32ZBB declare i32 @llvm.riscv.orc.b.i32(i32) define i32 @orcb(i32 %a) nounwind { -; RV32IB-LABEL: orcb: -; RV32IB: # %bb.0: -; RV32IB-NEXT: orc.b a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: orcb: +; RV32B: # %bb.0: +; RV32B-NEXT: orc.b a0, a0 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: orcb: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: orc.b a0, a0 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: orcb: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: orc.b a0, a0 +; RV32ZBB-NEXT: ret %tmp = call i32 @llvm.riscv.orc.b.i32(i32 %a) ret i32 %tmp } diff --git a/llvm/test/CodeGen/RISCV/rv32zbb-zbp.ll b/llvm/test/CodeGen/RISCV/rv32zbb-zbp.ll --- a/llvm/test/CodeGen/RISCV/rv32zbb-zbp.ll +++ b/llvm/test/CodeGen/RISCV/rv32zbb-zbp.ll @@ -2,11 +2,11 @@ ; RUN: llc -mtriple=riscv32 -verify-machineinstrs < %s \ ; RUN: | FileCheck %s -check-prefix=RV32I ; RUN: llc -mtriple=riscv32 -mattr=+experimental-b -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV32IB +; RUN: | FileCheck %s -check-prefix=RV32B ; RUN: llc -mtriple=riscv32 -mattr=+experimental-zbb -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV32IBB +; RUN: | FileCheck %s -check-prefix=RV32ZBB ; RUN: llc -mtriple=riscv32 -mattr=+experimental-zbp -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV32IBP +; RUN: | FileCheck %s -check-prefix=RV32ZBP define i32 @andn_i32(i32 %a, i32 %b) nounwind { ; RV32I-LABEL: andn_i32: @@ -15,20 +15,20 @@ ; RV32I-NEXT: and a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: andn_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: andn a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: andn_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: andn a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: andn_i32: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: andn a0, a0, a1 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: andn_i32: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: andn a0, a0, a1 +; RV32ZBB-NEXT: ret ; -; RV32IBP-LABEL: andn_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: andn a0, a0, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: andn_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: andn a0, a0, a1 +; RV32ZBP-NEXT: ret %neg = xor i32 %b, -1 %and = and i32 %neg, %a ret i32 %and @@ -43,23 +43,23 @@ ; RV32I-NEXT: and a1, a3, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: andn_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: andn a0, a0, a2 -; RV32IB-NEXT: andn a1, a1, a3 -; RV32IB-NEXT: ret -; -; RV32IBB-LABEL: andn_i64: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: andn a0, a0, a2 -; RV32IBB-NEXT: andn a1, a1, a3 -; RV32IBB-NEXT: ret -; -; RV32IBP-LABEL: andn_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: andn a0, a0, a2 -; RV32IBP-NEXT: andn a1, a1, a3 -; RV32IBP-NEXT: ret +; RV32B-LABEL: andn_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: andn a0, a0, a2 +; RV32B-NEXT: andn a1, a1, a3 +; RV32B-NEXT: ret +; +; RV32ZBB-LABEL: andn_i64: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: andn a0, a0, a2 +; RV32ZBB-NEXT: andn a1, a1, a3 +; RV32ZBB-NEXT: ret +; +; RV32ZBP-LABEL: andn_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: andn a0, a0, a2 +; RV32ZBP-NEXT: andn a1, a1, a3 +; RV32ZBP-NEXT: ret %neg = xor i64 %b, -1 %and = and i64 %neg, %a ret i64 %and @@ -72,20 +72,20 @@ ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: orn_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: orn a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: orn_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: orn a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: orn_i32: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: orn a0, a0, a1 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: orn_i32: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: orn a0, a0, a1 +; RV32ZBB-NEXT: ret ; -; RV32IBP-LABEL: orn_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: orn a0, a0, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: orn_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: orn a0, a0, a1 +; RV32ZBP-NEXT: ret %neg = xor i32 %b, -1 %or = or i32 %neg, %a ret i32 %or @@ -100,23 +100,23 @@ ; RV32I-NEXT: or a1, a3, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: orn_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: orn a0, a0, a2 -; RV32IB-NEXT: orn a1, a1, a3 -; RV32IB-NEXT: ret -; -; RV32IBB-LABEL: orn_i64: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: orn a0, a0, a2 -; RV32IBB-NEXT: orn a1, a1, a3 -; RV32IBB-NEXT: ret -; -; RV32IBP-LABEL: orn_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: orn a0, a0, a2 -; RV32IBP-NEXT: orn a1, a1, a3 -; RV32IBP-NEXT: ret +; RV32B-LABEL: orn_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: orn a0, a0, a2 +; RV32B-NEXT: orn a1, a1, a3 +; RV32B-NEXT: ret +; +; RV32ZBB-LABEL: orn_i64: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: orn a0, a0, a2 +; RV32ZBB-NEXT: orn a1, a1, a3 +; RV32ZBB-NEXT: ret +; +; RV32ZBP-LABEL: orn_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: orn a0, a0, a2 +; RV32ZBP-NEXT: orn a1, a1, a3 +; RV32ZBP-NEXT: ret %neg = xor i64 %b, -1 %or = or i64 %neg, %a ret i64 %or @@ -129,20 +129,20 @@ ; RV32I-NEXT: not a0, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: xnor_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: xnor a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: xnor_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: xnor a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: xnor_i32: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: xnor a0, a0, a1 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: xnor_i32: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: xnor a0, a0, a1 +; RV32ZBB-NEXT: ret ; -; RV32IBP-LABEL: xnor_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: xnor a0, a0, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: xnor_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: xnor a0, a0, a1 +; RV32ZBP-NEXT: ret %neg = xor i32 %a, -1 %xor = xor i32 %neg, %b ret i32 %xor @@ -157,23 +157,23 @@ ; RV32I-NEXT: not a1, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: xnor_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: xnor a0, a0, a2 -; RV32IB-NEXT: xnor a1, a1, a3 -; RV32IB-NEXT: ret -; -; RV32IBB-LABEL: xnor_i64: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: xnor a0, a0, a2 -; RV32IBB-NEXT: xnor a1, a1, a3 -; RV32IBB-NEXT: ret -; -; RV32IBP-LABEL: xnor_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: xnor a0, a0, a2 -; RV32IBP-NEXT: xnor a1, a1, a3 -; RV32IBP-NEXT: ret +; RV32B-LABEL: xnor_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: xnor a0, a0, a2 +; RV32B-NEXT: xnor a1, a1, a3 +; RV32B-NEXT: ret +; +; RV32ZBB-LABEL: xnor_i64: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: xnor a0, a0, a2 +; RV32ZBB-NEXT: xnor a1, a1, a3 +; RV32ZBB-NEXT: ret +; +; RV32ZBP-LABEL: xnor_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: xnor a0, a0, a2 +; RV32ZBP-NEXT: xnor a1, a1, a3 +; RV32ZBP-NEXT: ret %neg = xor i64 %a, -1 %xor = xor i64 %neg, %b ret i64 %xor @@ -190,20 +190,20 @@ ; RV32I-NEXT: or a0, a2, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: rol_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rol a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: rol_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: rol a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: rol_i32: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: rol a0, a0, a1 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: rol_i32: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: rol a0, a0, a1 +; RV32ZBB-NEXT: ret ; -; RV32IBP-LABEL: rol_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rol a0, a0, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: rol_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rol a0, a0, a1 +; RV32ZBP-NEXT: ret %or = tail call i32 @llvm.fshl.i32(i32 %a, i32 %a, i32 %b) ret i32 %or } @@ -257,122 +257,122 @@ ; RV32I-NEXT: mv a0, a3 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: rol_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sll a7, a1, a2 -; RV32IB-NEXT: andi a4, a2, 63 -; RV32IB-NEXT: addi a6, zero, 31 -; RV32IB-NEXT: sub a5, a6, a4 -; RV32IB-NEXT: srli a3, a0, 1 -; RV32IB-NEXT: srl a3, a3, a5 -; RV32IB-NEXT: or a7, a7, a3 -; RV32IB-NEXT: addi t1, a4, -32 -; RV32IB-NEXT: sll a5, a0, t1 -; RV32IB-NEXT: slti a3, t1, 0 -; RV32IB-NEXT: cmov a7, a3, a7, a5 -; RV32IB-NEXT: neg a5, a2 -; RV32IB-NEXT: srl t0, a1, a5 -; RV32IB-NEXT: andi t2, a5, 63 -; RV32IB-NEXT: addi a4, t2, -32 -; RV32IB-NEXT: srai a3, a4, 31 -; RV32IB-NEXT: and a3, a3, t0 -; RV32IB-NEXT: or a7, a7, a3 -; RV32IB-NEXT: srl t0, a0, a5 -; RV32IB-NEXT: sub a5, a6, t2 -; RV32IB-NEXT: slli a3, a1, 1 -; RV32IB-NEXT: sll a3, a3, a5 -; RV32IB-NEXT: or a3, t0, a3 -; RV32IB-NEXT: srl a1, a1, a4 -; RV32IB-NEXT: slti a4, a4, 0 -; RV32IB-NEXT: cmov a1, a4, a3, a1 -; RV32IB-NEXT: sll a0, a0, a2 -; RV32IB-NEXT: srai a2, t1, 31 -; RV32IB-NEXT: and a0, a2, a0 -; RV32IB-NEXT: or a0, a0, a1 -; RV32IB-NEXT: mv a1, a7 -; RV32IB-NEXT: ret -; -; RV32IBB-LABEL: rol_i64: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: mv a7, a1 -; RV32IBB-NEXT: andi a1, a2, 63 -; RV32IBB-NEXT: addi t0, a1, -32 -; RV32IBB-NEXT: addi a6, zero, 31 -; RV32IBB-NEXT: bltz t0, .LBB7_2 -; RV32IBB-NEXT: # %bb.1: -; RV32IBB-NEXT: sll a1, a0, t0 -; RV32IBB-NEXT: j .LBB7_3 -; RV32IBB-NEXT: .LBB7_2: -; RV32IBB-NEXT: sll a3, a7, a2 -; RV32IBB-NEXT: sub a1, a6, a1 -; RV32IBB-NEXT: srli a4, a0, 1 -; RV32IBB-NEXT: srl a1, a4, a1 -; RV32IBB-NEXT: or a1, a3, a1 -; RV32IBB-NEXT: .LBB7_3: -; RV32IBB-NEXT: neg a5, a2 -; RV32IBB-NEXT: andi a3, a5, 63 -; RV32IBB-NEXT: addi a4, a3, -32 -; RV32IBB-NEXT: bltz a4, .LBB7_5 -; RV32IBB-NEXT: # %bb.4: -; RV32IBB-NEXT: srl a3, a7, a4 -; RV32IBB-NEXT: bltz t0, .LBB7_6 -; RV32IBB-NEXT: j .LBB7_7 -; RV32IBB-NEXT: .LBB7_5: -; RV32IBB-NEXT: srl a4, a7, a5 -; RV32IBB-NEXT: or a1, a1, a4 -; RV32IBB-NEXT: srl a4, a0, a5 -; RV32IBB-NEXT: sub a3, a6, a3 -; RV32IBB-NEXT: slli a5, a7, 1 -; RV32IBB-NEXT: sll a3, a5, a3 -; RV32IBB-NEXT: or a3, a4, a3 -; RV32IBB-NEXT: bgez t0, .LBB7_7 -; RV32IBB-NEXT: .LBB7_6: -; RV32IBB-NEXT: sll a0, a0, a2 -; RV32IBB-NEXT: or a3, a3, a0 -; RV32IBB-NEXT: .LBB7_7: -; RV32IBB-NEXT: mv a0, a3 -; RV32IBB-NEXT: ret -; -; RV32IBP-LABEL: rol_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: mv a7, a1 -; RV32IBP-NEXT: andi a1, a2, 63 -; RV32IBP-NEXT: addi t0, a1, -32 -; RV32IBP-NEXT: addi a6, zero, 31 -; RV32IBP-NEXT: bltz t0, .LBB7_2 -; RV32IBP-NEXT: # %bb.1: -; RV32IBP-NEXT: sll a1, a0, t0 -; RV32IBP-NEXT: j .LBB7_3 -; RV32IBP-NEXT: .LBB7_2: -; RV32IBP-NEXT: sll a3, a7, a2 -; RV32IBP-NEXT: sub a1, a6, a1 -; RV32IBP-NEXT: srli a4, a0, 1 -; RV32IBP-NEXT: srl a1, a4, a1 -; RV32IBP-NEXT: or a1, a3, a1 -; RV32IBP-NEXT: .LBB7_3: -; RV32IBP-NEXT: neg a5, a2 -; RV32IBP-NEXT: andi a3, a5, 63 -; RV32IBP-NEXT: addi a4, a3, -32 -; RV32IBP-NEXT: bltz a4, .LBB7_5 -; RV32IBP-NEXT: # %bb.4: -; RV32IBP-NEXT: srl a3, a7, a4 -; RV32IBP-NEXT: bltz t0, .LBB7_6 -; RV32IBP-NEXT: j .LBB7_7 -; RV32IBP-NEXT: .LBB7_5: -; RV32IBP-NEXT: srl a4, a7, a5 -; RV32IBP-NEXT: or a1, a1, a4 -; RV32IBP-NEXT: srl a4, a0, a5 -; RV32IBP-NEXT: sub a3, a6, a3 -; RV32IBP-NEXT: slli a5, a7, 1 -; RV32IBP-NEXT: sll a3, a5, a3 -; RV32IBP-NEXT: or a3, a4, a3 -; RV32IBP-NEXT: bgez t0, .LBB7_7 -; RV32IBP-NEXT: .LBB7_6: -; RV32IBP-NEXT: sll a0, a0, a2 -; RV32IBP-NEXT: or a3, a3, a0 -; RV32IBP-NEXT: .LBB7_7: -; RV32IBP-NEXT: mv a0, a3 -; RV32IBP-NEXT: ret +; RV32B-LABEL: rol_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: sll a7, a1, a2 +; RV32B-NEXT: andi a4, a2, 63 +; RV32B-NEXT: addi a6, zero, 31 +; RV32B-NEXT: sub a5, a6, a4 +; RV32B-NEXT: srli a3, a0, 1 +; RV32B-NEXT: srl a3, a3, a5 +; RV32B-NEXT: or a7, a7, a3 +; RV32B-NEXT: addi t1, a4, -32 +; RV32B-NEXT: sll a5, a0, t1 +; RV32B-NEXT: slti a3, t1, 0 +; RV32B-NEXT: cmov a7, a3, a7, a5 +; RV32B-NEXT: neg a5, a2 +; RV32B-NEXT: srl t0, a1, a5 +; RV32B-NEXT: andi t2, a5, 63 +; RV32B-NEXT: addi a4, t2, -32 +; RV32B-NEXT: srai a3, a4, 31 +; RV32B-NEXT: and a3, a3, t0 +; RV32B-NEXT: or a7, a7, a3 +; RV32B-NEXT: srl t0, a0, a5 +; RV32B-NEXT: sub a5, a6, t2 +; RV32B-NEXT: slli a3, a1, 1 +; RV32B-NEXT: sll a3, a3, a5 +; RV32B-NEXT: or a3, t0, a3 +; RV32B-NEXT: srl a1, a1, a4 +; RV32B-NEXT: slti a4, a4, 0 +; RV32B-NEXT: cmov a1, a4, a3, a1 +; RV32B-NEXT: sll a0, a0, a2 +; RV32B-NEXT: srai a2, t1, 31 +; RV32B-NEXT: and a0, a2, a0 +; RV32B-NEXT: or a0, a0, a1 +; RV32B-NEXT: mv a1, a7 +; RV32B-NEXT: ret +; +; RV32ZBB-LABEL: rol_i64: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: mv a7, a1 +; RV32ZBB-NEXT: andi a1, a2, 63 +; RV32ZBB-NEXT: addi t0, a1, -32 +; RV32ZBB-NEXT: addi a6, zero, 31 +; RV32ZBB-NEXT: bltz t0, .LBB7_2 +; RV32ZBB-NEXT: # %bb.1: +; RV32ZBB-NEXT: sll a1, a0, t0 +; RV32ZBB-NEXT: j .LBB7_3 +; RV32ZBB-NEXT: .LBB7_2: +; RV32ZBB-NEXT: sll a3, a7, a2 +; RV32ZBB-NEXT: sub a1, a6, a1 +; RV32ZBB-NEXT: srli a4, a0, 1 +; RV32ZBB-NEXT: srl a1, a4, a1 +; RV32ZBB-NEXT: or a1, a3, a1 +; RV32ZBB-NEXT: .LBB7_3: +; RV32ZBB-NEXT: neg a5, a2 +; RV32ZBB-NEXT: andi a3, a5, 63 +; RV32ZBB-NEXT: addi a4, a3, -32 +; RV32ZBB-NEXT: bltz a4, .LBB7_5 +; RV32ZBB-NEXT: # %bb.4: +; RV32ZBB-NEXT: srl a3, a7, a4 +; RV32ZBB-NEXT: bltz t0, .LBB7_6 +; RV32ZBB-NEXT: j .LBB7_7 +; RV32ZBB-NEXT: .LBB7_5: +; RV32ZBB-NEXT: srl a4, a7, a5 +; RV32ZBB-NEXT: or a1, a1, a4 +; RV32ZBB-NEXT: srl a4, a0, a5 +; RV32ZBB-NEXT: sub a3, a6, a3 +; RV32ZBB-NEXT: slli a5, a7, 1 +; RV32ZBB-NEXT: sll a3, a5, a3 +; RV32ZBB-NEXT: or a3, a4, a3 +; RV32ZBB-NEXT: bgez t0, .LBB7_7 +; RV32ZBB-NEXT: .LBB7_6: +; RV32ZBB-NEXT: sll a0, a0, a2 +; RV32ZBB-NEXT: or a3, a3, a0 +; RV32ZBB-NEXT: .LBB7_7: +; RV32ZBB-NEXT: mv a0, a3 +; RV32ZBB-NEXT: ret +; +; RV32ZBP-LABEL: rol_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: mv a7, a1 +; RV32ZBP-NEXT: andi a1, a2, 63 +; RV32ZBP-NEXT: addi t0, a1, -32 +; RV32ZBP-NEXT: addi a6, zero, 31 +; RV32ZBP-NEXT: bltz t0, .LBB7_2 +; RV32ZBP-NEXT: # %bb.1: +; RV32ZBP-NEXT: sll a1, a0, t0 +; RV32ZBP-NEXT: j .LBB7_3 +; RV32ZBP-NEXT: .LBB7_2: +; RV32ZBP-NEXT: sll a3, a7, a2 +; RV32ZBP-NEXT: sub a1, a6, a1 +; RV32ZBP-NEXT: srli a4, a0, 1 +; RV32ZBP-NEXT: srl a1, a4, a1 +; RV32ZBP-NEXT: or a1, a3, a1 +; RV32ZBP-NEXT: .LBB7_3: +; RV32ZBP-NEXT: neg a5, a2 +; RV32ZBP-NEXT: andi a3, a5, 63 +; RV32ZBP-NEXT: addi a4, a3, -32 +; RV32ZBP-NEXT: bltz a4, .LBB7_5 +; RV32ZBP-NEXT: # %bb.4: +; RV32ZBP-NEXT: srl a3, a7, a4 +; RV32ZBP-NEXT: bltz t0, .LBB7_6 +; RV32ZBP-NEXT: j .LBB7_7 +; RV32ZBP-NEXT: .LBB7_5: +; RV32ZBP-NEXT: srl a4, a7, a5 +; RV32ZBP-NEXT: or a1, a1, a4 +; RV32ZBP-NEXT: srl a4, a0, a5 +; RV32ZBP-NEXT: sub a3, a6, a3 +; RV32ZBP-NEXT: slli a5, a7, 1 +; RV32ZBP-NEXT: sll a3, a5, a3 +; RV32ZBP-NEXT: or a3, a4, a3 +; RV32ZBP-NEXT: bgez t0, .LBB7_7 +; RV32ZBP-NEXT: .LBB7_6: +; RV32ZBP-NEXT: sll a0, a0, a2 +; RV32ZBP-NEXT: or a3, a3, a0 +; RV32ZBP-NEXT: .LBB7_7: +; RV32ZBP-NEXT: mv a0, a3 +; RV32ZBP-NEXT: ret %or = tail call i64 @llvm.fshl.i64(i64 %a, i64 %a, i64 %b) ret i64 %or } @@ -388,20 +388,20 @@ ; RV32I-NEXT: or a0, a2, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: ror_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: ror a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: ror_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: ror a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: ror_i32: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: ror a0, a0, a1 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: ror_i32: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: ror a0, a0, a1 +; RV32ZBB-NEXT: ret ; -; RV32IBP-LABEL: ror_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: ror a0, a0, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: ror_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: ror a0, a0, a1 +; RV32ZBP-NEXT: ret %or = tail call i32 @llvm.fshr.i32(i32 %a, i32 %a, i32 %b) ret i32 %or } @@ -455,122 +455,122 @@ ; RV32I-NEXT: mv a1, a3 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: ror_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: srl a7, a0, a2 -; RV32IB-NEXT: andi a4, a2, 63 -; RV32IB-NEXT: addi a6, zero, 31 -; RV32IB-NEXT: sub a5, a6, a4 -; RV32IB-NEXT: slli a3, a1, 1 -; RV32IB-NEXT: sll a3, a3, a5 -; RV32IB-NEXT: or a7, a7, a3 -; RV32IB-NEXT: addi t1, a4, -32 -; RV32IB-NEXT: srl a5, a1, t1 -; RV32IB-NEXT: slti a3, t1, 0 -; RV32IB-NEXT: cmov a7, a3, a7, a5 -; RV32IB-NEXT: neg a5, a2 -; RV32IB-NEXT: sll t0, a0, a5 -; RV32IB-NEXT: andi t2, a5, 63 -; RV32IB-NEXT: addi a4, t2, -32 -; RV32IB-NEXT: srai a3, a4, 31 -; RV32IB-NEXT: and a3, a3, t0 -; RV32IB-NEXT: or a7, a7, a3 -; RV32IB-NEXT: sll t0, a1, a5 -; RV32IB-NEXT: sub a5, a6, t2 -; RV32IB-NEXT: srli a3, a0, 1 -; RV32IB-NEXT: srl a3, a3, a5 -; RV32IB-NEXT: or a3, t0, a3 -; RV32IB-NEXT: sll a0, a0, a4 -; RV32IB-NEXT: slti a4, a4, 0 -; RV32IB-NEXT: cmov a0, a4, a3, a0 -; RV32IB-NEXT: srl a1, a1, a2 -; RV32IB-NEXT: srai a2, t1, 31 -; RV32IB-NEXT: and a1, a2, a1 -; RV32IB-NEXT: or a1, a1, a0 -; RV32IB-NEXT: mv a0, a7 -; RV32IB-NEXT: ret -; -; RV32IBB-LABEL: ror_i64: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: mv t0, a0 -; RV32IBB-NEXT: andi a0, a2, 63 -; RV32IBB-NEXT: addi a7, a0, -32 -; RV32IBB-NEXT: addi a6, zero, 31 -; RV32IBB-NEXT: bltz a7, .LBB9_2 -; RV32IBB-NEXT: # %bb.1: -; RV32IBB-NEXT: srl a0, a1, a7 -; RV32IBB-NEXT: j .LBB9_3 -; RV32IBB-NEXT: .LBB9_2: -; RV32IBB-NEXT: srl a3, t0, a2 -; RV32IBB-NEXT: sub a0, a6, a0 -; RV32IBB-NEXT: slli a4, a1, 1 -; RV32IBB-NEXT: sll a0, a4, a0 -; RV32IBB-NEXT: or a0, a3, a0 -; RV32IBB-NEXT: .LBB9_3: -; RV32IBB-NEXT: neg a5, a2 -; RV32IBB-NEXT: andi a4, a5, 63 -; RV32IBB-NEXT: addi a3, a4, -32 -; RV32IBB-NEXT: bltz a3, .LBB9_5 -; RV32IBB-NEXT: # %bb.4: -; RV32IBB-NEXT: sll a3, t0, a3 -; RV32IBB-NEXT: bltz a7, .LBB9_6 -; RV32IBB-NEXT: j .LBB9_7 -; RV32IBB-NEXT: .LBB9_5: -; RV32IBB-NEXT: sll a3, t0, a5 -; RV32IBB-NEXT: or a0, a0, a3 -; RV32IBB-NEXT: sll a3, a1, a5 -; RV32IBB-NEXT: sub a4, a6, a4 -; RV32IBB-NEXT: srli a5, t0, 1 -; RV32IBB-NEXT: srl a4, a5, a4 -; RV32IBB-NEXT: or a3, a3, a4 -; RV32IBB-NEXT: bgez a7, .LBB9_7 -; RV32IBB-NEXT: .LBB9_6: -; RV32IBB-NEXT: srl a1, a1, a2 -; RV32IBB-NEXT: or a3, a3, a1 -; RV32IBB-NEXT: .LBB9_7: -; RV32IBB-NEXT: mv a1, a3 -; RV32IBB-NEXT: ret -; -; RV32IBP-LABEL: ror_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: mv t0, a0 -; RV32IBP-NEXT: andi a0, a2, 63 -; RV32IBP-NEXT: addi a7, a0, -32 -; RV32IBP-NEXT: addi a6, zero, 31 -; RV32IBP-NEXT: bltz a7, .LBB9_2 -; RV32IBP-NEXT: # %bb.1: -; RV32IBP-NEXT: srl a0, a1, a7 -; RV32IBP-NEXT: j .LBB9_3 -; RV32IBP-NEXT: .LBB9_2: -; RV32IBP-NEXT: srl a3, t0, a2 -; RV32IBP-NEXT: sub a0, a6, a0 -; RV32IBP-NEXT: slli a4, a1, 1 -; RV32IBP-NEXT: sll a0, a4, a0 -; RV32IBP-NEXT: or a0, a3, a0 -; RV32IBP-NEXT: .LBB9_3: -; RV32IBP-NEXT: neg a5, a2 -; RV32IBP-NEXT: andi a4, a5, 63 -; RV32IBP-NEXT: addi a3, a4, -32 -; RV32IBP-NEXT: bltz a3, .LBB9_5 -; RV32IBP-NEXT: # %bb.4: -; RV32IBP-NEXT: sll a3, t0, a3 -; RV32IBP-NEXT: bltz a7, .LBB9_6 -; RV32IBP-NEXT: j .LBB9_7 -; RV32IBP-NEXT: .LBB9_5: -; RV32IBP-NEXT: sll a3, t0, a5 -; RV32IBP-NEXT: or a0, a0, a3 -; RV32IBP-NEXT: sll a3, a1, a5 -; RV32IBP-NEXT: sub a4, a6, a4 -; RV32IBP-NEXT: srli a5, t0, 1 -; RV32IBP-NEXT: srl a4, a5, a4 -; RV32IBP-NEXT: or a3, a3, a4 -; RV32IBP-NEXT: bgez a7, .LBB9_7 -; RV32IBP-NEXT: .LBB9_6: -; RV32IBP-NEXT: srl a1, a1, a2 -; RV32IBP-NEXT: or a3, a3, a1 -; RV32IBP-NEXT: .LBB9_7: -; RV32IBP-NEXT: mv a1, a3 -; RV32IBP-NEXT: ret +; RV32B-LABEL: ror_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: srl a7, a0, a2 +; RV32B-NEXT: andi a4, a2, 63 +; RV32B-NEXT: addi a6, zero, 31 +; RV32B-NEXT: sub a5, a6, a4 +; RV32B-NEXT: slli a3, a1, 1 +; RV32B-NEXT: sll a3, a3, a5 +; RV32B-NEXT: or a7, a7, a3 +; RV32B-NEXT: addi t1, a4, -32 +; RV32B-NEXT: srl a5, a1, t1 +; RV32B-NEXT: slti a3, t1, 0 +; RV32B-NEXT: cmov a7, a3, a7, a5 +; RV32B-NEXT: neg a5, a2 +; RV32B-NEXT: sll t0, a0, a5 +; RV32B-NEXT: andi t2, a5, 63 +; RV32B-NEXT: addi a4, t2, -32 +; RV32B-NEXT: srai a3, a4, 31 +; RV32B-NEXT: and a3, a3, t0 +; RV32B-NEXT: or a7, a7, a3 +; RV32B-NEXT: sll t0, a1, a5 +; RV32B-NEXT: sub a5, a6, t2 +; RV32B-NEXT: srli a3, a0, 1 +; RV32B-NEXT: srl a3, a3, a5 +; RV32B-NEXT: or a3, t0, a3 +; RV32B-NEXT: sll a0, a0, a4 +; RV32B-NEXT: slti a4, a4, 0 +; RV32B-NEXT: cmov a0, a4, a3, a0 +; RV32B-NEXT: srl a1, a1, a2 +; RV32B-NEXT: srai a2, t1, 31 +; RV32B-NEXT: and a1, a2, a1 +; RV32B-NEXT: or a1, a1, a0 +; RV32B-NEXT: mv a0, a7 +; RV32B-NEXT: ret +; +; RV32ZBB-LABEL: ror_i64: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: mv t0, a0 +; RV32ZBB-NEXT: andi a0, a2, 63 +; RV32ZBB-NEXT: addi a7, a0, -32 +; RV32ZBB-NEXT: addi a6, zero, 31 +; RV32ZBB-NEXT: bltz a7, .LBB9_2 +; RV32ZBB-NEXT: # %bb.1: +; RV32ZBB-NEXT: srl a0, a1, a7 +; RV32ZBB-NEXT: j .LBB9_3 +; RV32ZBB-NEXT: .LBB9_2: +; RV32ZBB-NEXT: srl a3, t0, a2 +; RV32ZBB-NEXT: sub a0, a6, a0 +; RV32ZBB-NEXT: slli a4, a1, 1 +; RV32ZBB-NEXT: sll a0, a4, a0 +; RV32ZBB-NEXT: or a0, a3, a0 +; RV32ZBB-NEXT: .LBB9_3: +; RV32ZBB-NEXT: neg a5, a2 +; RV32ZBB-NEXT: andi a4, a5, 63 +; RV32ZBB-NEXT: addi a3, a4, -32 +; RV32ZBB-NEXT: bltz a3, .LBB9_5 +; RV32ZBB-NEXT: # %bb.4: +; RV32ZBB-NEXT: sll a3, t0, a3 +; RV32ZBB-NEXT: bltz a7, .LBB9_6 +; RV32ZBB-NEXT: j .LBB9_7 +; RV32ZBB-NEXT: .LBB9_5: +; RV32ZBB-NEXT: sll a3, t0, a5 +; RV32ZBB-NEXT: or a0, a0, a3 +; RV32ZBB-NEXT: sll a3, a1, a5 +; RV32ZBB-NEXT: sub a4, a6, a4 +; RV32ZBB-NEXT: srli a5, t0, 1 +; RV32ZBB-NEXT: srl a4, a5, a4 +; RV32ZBB-NEXT: or a3, a3, a4 +; RV32ZBB-NEXT: bgez a7, .LBB9_7 +; RV32ZBB-NEXT: .LBB9_6: +; RV32ZBB-NEXT: srl a1, a1, a2 +; RV32ZBB-NEXT: or a3, a3, a1 +; RV32ZBB-NEXT: .LBB9_7: +; RV32ZBB-NEXT: mv a1, a3 +; RV32ZBB-NEXT: ret +; +; RV32ZBP-LABEL: ror_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: mv t0, a0 +; RV32ZBP-NEXT: andi a0, a2, 63 +; RV32ZBP-NEXT: addi a7, a0, -32 +; RV32ZBP-NEXT: addi a6, zero, 31 +; RV32ZBP-NEXT: bltz a7, .LBB9_2 +; RV32ZBP-NEXT: # %bb.1: +; RV32ZBP-NEXT: srl a0, a1, a7 +; RV32ZBP-NEXT: j .LBB9_3 +; RV32ZBP-NEXT: .LBB9_2: +; RV32ZBP-NEXT: srl a3, t0, a2 +; RV32ZBP-NEXT: sub a0, a6, a0 +; RV32ZBP-NEXT: slli a4, a1, 1 +; RV32ZBP-NEXT: sll a0, a4, a0 +; RV32ZBP-NEXT: or a0, a3, a0 +; RV32ZBP-NEXT: .LBB9_3: +; RV32ZBP-NEXT: neg a5, a2 +; RV32ZBP-NEXT: andi a4, a5, 63 +; RV32ZBP-NEXT: addi a3, a4, -32 +; RV32ZBP-NEXT: bltz a3, .LBB9_5 +; RV32ZBP-NEXT: # %bb.4: +; RV32ZBP-NEXT: sll a3, t0, a3 +; RV32ZBP-NEXT: bltz a7, .LBB9_6 +; RV32ZBP-NEXT: j .LBB9_7 +; RV32ZBP-NEXT: .LBB9_5: +; RV32ZBP-NEXT: sll a3, t0, a5 +; RV32ZBP-NEXT: or a0, a0, a3 +; RV32ZBP-NEXT: sll a3, a1, a5 +; RV32ZBP-NEXT: sub a4, a6, a4 +; RV32ZBP-NEXT: srli a5, t0, 1 +; RV32ZBP-NEXT: srl a4, a5, a4 +; RV32ZBP-NEXT: or a3, a3, a4 +; RV32ZBP-NEXT: bgez a7, .LBB9_7 +; RV32ZBP-NEXT: .LBB9_6: +; RV32ZBP-NEXT: srl a1, a1, a2 +; RV32ZBP-NEXT: or a3, a3, a1 +; RV32ZBP-NEXT: .LBB9_7: +; RV32ZBP-NEXT: mv a1, a3 +; RV32ZBP-NEXT: ret %or = tail call i64 @llvm.fshr.i64(i64 %a, i64 %a, i64 %b) ret i64 %or } @@ -583,20 +583,20 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: rori_i32_fshl: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rori a0, a0, 1 -; RV32IB-NEXT: ret +; RV32B-LABEL: rori_i32_fshl: +; RV32B: # %bb.0: +; RV32B-NEXT: rori a0, a0, 1 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: rori_i32_fshl: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: rori a0, a0, 1 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: rori_i32_fshl: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: rori a0, a0, 1 +; RV32ZBB-NEXT: ret ; -; RV32IBP-LABEL: rori_i32_fshl: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rori a0, a0, 1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: rori_i32_fshl: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rori a0, a0, 1 +; RV32ZBP-NEXT: ret %1 = tail call i32 @llvm.fshl.i32(i32 %a, i32 %a, i32 31) ret i32 %1 } @@ -609,20 +609,20 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: rori_i32_fshr: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rori a0, a0, 31 -; RV32IB-NEXT: ret +; RV32B-LABEL: rori_i32_fshr: +; RV32B: # %bb.0: +; RV32B-NEXT: rori a0, a0, 31 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: rori_i32_fshr: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: rori a0, a0, 31 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: rori_i32_fshr: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: rori a0, a0, 31 +; RV32ZBB-NEXT: ret ; -; RV32IBP-LABEL: rori_i32_fshr: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rori a0, a0, 31 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: rori_i32_fshr: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rori a0, a0, 31 +; RV32ZBP-NEXT: ret %1 = tail call i32 @llvm.fshr.i32(i32 %a, i32 %a, i32 31) ret i32 %1 } @@ -639,34 +639,34 @@ ; RV32I-NEXT: mv a0, a2 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: rori_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: fsri a2, a0, a1, 1 -; RV32IB-NEXT: fsri a1, a1, a0, 1 -; RV32IB-NEXT: mv a0, a2 -; RV32IB-NEXT: ret -; -; RV32IBB-LABEL: rori_i64: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: slli a2, a1, 31 -; RV32IBB-NEXT: srli a3, a0, 1 -; RV32IBB-NEXT: or a2, a3, a2 -; RV32IBB-NEXT: srli a1, a1, 1 -; RV32IBB-NEXT: slli a0, a0, 31 -; RV32IBB-NEXT: or a1, a0, a1 -; RV32IBB-NEXT: mv a0, a2 -; RV32IBB-NEXT: ret -; -; RV32IBP-LABEL: rori_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: slli a2, a1, 31 -; RV32IBP-NEXT: srli a3, a0, 1 -; RV32IBP-NEXT: or a2, a3, a2 -; RV32IBP-NEXT: srli a1, a1, 1 -; RV32IBP-NEXT: slli a0, a0, 31 -; RV32IBP-NEXT: or a1, a0, a1 -; RV32IBP-NEXT: mv a0, a2 -; RV32IBP-NEXT: ret +; RV32B-LABEL: rori_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: fsri a2, a0, a1, 1 +; RV32B-NEXT: fsri a1, a1, a0, 1 +; RV32B-NEXT: mv a0, a2 +; RV32B-NEXT: ret +; +; RV32ZBB-LABEL: rori_i64: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: slli a2, a1, 31 +; RV32ZBB-NEXT: srli a3, a0, 1 +; RV32ZBB-NEXT: or a2, a3, a2 +; RV32ZBB-NEXT: srli a1, a1, 1 +; RV32ZBB-NEXT: slli a0, a0, 31 +; RV32ZBB-NEXT: or a1, a0, a1 +; RV32ZBB-NEXT: mv a0, a2 +; RV32ZBB-NEXT: ret +; +; RV32ZBP-LABEL: rori_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: slli a2, a1, 31 +; RV32ZBP-NEXT: srli a3, a0, 1 +; RV32ZBP-NEXT: or a2, a3, a2 +; RV32ZBP-NEXT: srli a1, a1, 1 +; RV32ZBP-NEXT: slli a0, a0, 31 +; RV32ZBP-NEXT: or a1, a0, a1 +; RV32ZBP-NEXT: mv a0, a2 +; RV32ZBP-NEXT: ret %1 = tail call i64 @llvm.fshl.i64(i64 %a, i64 %a, i64 63) ret i64 %1 } @@ -683,34 +683,34 @@ ; RV32I-NEXT: mv a0, a2 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: rori_i64_fshr: -; RV32IB: # %bb.0: -; RV32IB-NEXT: fsri a2, a1, a0, 31 -; RV32IB-NEXT: fsri a1, a0, a1, 31 -; RV32IB-NEXT: mv a0, a2 -; RV32IB-NEXT: ret -; -; RV32IBB-LABEL: rori_i64_fshr: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: slli a2, a0, 1 -; RV32IBB-NEXT: srli a3, a1, 31 -; RV32IBB-NEXT: or a2, a3, a2 -; RV32IBB-NEXT: srli a0, a0, 31 -; RV32IBB-NEXT: slli a1, a1, 1 -; RV32IBB-NEXT: or a1, a1, a0 -; RV32IBB-NEXT: mv a0, a2 -; RV32IBB-NEXT: ret -; -; RV32IBP-LABEL: rori_i64_fshr: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: slli a2, a0, 1 -; RV32IBP-NEXT: srli a3, a1, 31 -; RV32IBP-NEXT: or a2, a3, a2 -; RV32IBP-NEXT: srli a0, a0, 31 -; RV32IBP-NEXT: slli a1, a1, 1 -; RV32IBP-NEXT: or a1, a1, a0 -; RV32IBP-NEXT: mv a0, a2 -; RV32IBP-NEXT: ret +; RV32B-LABEL: rori_i64_fshr: +; RV32B: # %bb.0: +; RV32B-NEXT: fsri a2, a1, a0, 31 +; RV32B-NEXT: fsri a1, a0, a1, 31 +; RV32B-NEXT: mv a0, a2 +; RV32B-NEXT: ret +; +; RV32ZBB-LABEL: rori_i64_fshr: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: slli a2, a0, 1 +; RV32ZBB-NEXT: srli a3, a1, 31 +; RV32ZBB-NEXT: or a2, a3, a2 +; RV32ZBB-NEXT: srli a0, a0, 31 +; RV32ZBB-NEXT: slli a1, a1, 1 +; RV32ZBB-NEXT: or a1, a1, a0 +; RV32ZBB-NEXT: mv a0, a2 +; RV32ZBB-NEXT: ret +; +; RV32ZBP-LABEL: rori_i64_fshr: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: slli a2, a0, 1 +; RV32ZBP-NEXT: srli a3, a1, 31 +; RV32ZBP-NEXT: or a2, a3, a2 +; RV32ZBP-NEXT: srli a0, a0, 31 +; RV32ZBP-NEXT: slli a1, a1, 1 +; RV32ZBP-NEXT: or a1, a1, a0 +; RV32ZBP-NEXT: mv a0, a2 +; RV32ZBP-NEXT: ret %1 = tail call i64 @llvm.fshr.i64(i64 %a, i64 %a, i64 63) ret i64 %1 } @@ -722,23 +722,23 @@ ; RV32I-NEXT: srli a0, a0, 6 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: srli_i8: -; RV32IB: # %bb.0: -; RV32IB-NEXT: andi a0, a0, 192 -; RV32IB-NEXT: srli a0, a0, 6 -; RV32IB-NEXT: ret -; -; RV32IBB-LABEL: srli_i8: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: andi a0, a0, 192 -; RV32IBB-NEXT: srli a0, a0, 6 -; RV32IBB-NEXT: ret -; -; RV32IBP-LABEL: srli_i8: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: andi a0, a0, 192 -; RV32IBP-NEXT: srli a0, a0, 6 -; RV32IBP-NEXT: ret +; RV32B-LABEL: srli_i8: +; RV32B: # %bb.0: +; RV32B-NEXT: andi a0, a0, 192 +; RV32B-NEXT: srli a0, a0, 6 +; RV32B-NEXT: ret +; +; RV32ZBB-LABEL: srli_i8: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: andi a0, a0, 192 +; RV32ZBB-NEXT: srli a0, a0, 6 +; RV32ZBB-NEXT: ret +; +; RV32ZBP-LABEL: srli_i8: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: andi a0, a0, 192 +; RV32ZBP-NEXT: srli a0, a0, 6 +; RV32ZBP-NEXT: ret %1 = lshr i8 %a, 6 ret i8 %1 } @@ -750,23 +750,23 @@ ; RV32I-NEXT: srai a0, a0, 29 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: srai_i8: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sext.b a0, a0 -; RV32IB-NEXT: srai a0, a0, 5 -; RV32IB-NEXT: ret -; -; RV32IBB-LABEL: srai_i8: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: sext.b a0, a0 -; RV32IBB-NEXT: srai a0, a0, 5 -; RV32IBB-NEXT: ret -; -; RV32IBP-LABEL: srai_i8: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: slli a0, a0, 24 -; RV32IBP-NEXT: srai a0, a0, 29 -; RV32IBP-NEXT: ret +; RV32B-LABEL: srai_i8: +; RV32B: # %bb.0: +; RV32B-NEXT: sext.b a0, a0 +; RV32B-NEXT: srai a0, a0, 5 +; RV32B-NEXT: ret +; +; RV32ZBB-LABEL: srai_i8: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: sext.b a0, a0 +; RV32ZBB-NEXT: srai a0, a0, 5 +; RV32ZBB-NEXT: ret +; +; RV32ZBP-LABEL: srai_i8: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: slli a0, a0, 24 +; RV32ZBP-NEXT: srai a0, a0, 29 +; RV32ZBP-NEXT: ret %1 = ashr i8 %a, 5 ret i8 %1 } @@ -778,23 +778,23 @@ ; RV32I-NEXT: srli a0, a0, 22 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: srli_i16: -; RV32IB: # %bb.0: -; RV32IB-NEXT: zext.h a0, a0 -; RV32IB-NEXT: srli a0, a0, 6 -; RV32IB-NEXT: ret -; -; RV32IBB-LABEL: srli_i16: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: zext.h a0, a0 -; RV32IBB-NEXT: srli a0, a0, 6 -; RV32IBB-NEXT: ret -; -; RV32IBP-LABEL: srli_i16: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: zext.h a0, a0 -; RV32IBP-NEXT: srli a0, a0, 6 -; RV32IBP-NEXT: ret +; RV32B-LABEL: srli_i16: +; RV32B: # %bb.0: +; RV32B-NEXT: zext.h a0, a0 +; RV32B-NEXT: srli a0, a0, 6 +; RV32B-NEXT: ret +; +; RV32ZBB-LABEL: srli_i16: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: zext.h a0, a0 +; RV32ZBB-NEXT: srli a0, a0, 6 +; RV32ZBB-NEXT: ret +; +; RV32ZBP-LABEL: srli_i16: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: zext.h a0, a0 +; RV32ZBP-NEXT: srli a0, a0, 6 +; RV32ZBP-NEXT: ret %1 = lshr i16 %a, 6 ret i16 %1 } @@ -806,23 +806,23 @@ ; RV32I-NEXT: srai a0, a0, 25 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: srai_i16: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sext.h a0, a0 -; RV32IB-NEXT: srai a0, a0, 9 -; RV32IB-NEXT: ret -; -; RV32IBB-LABEL: srai_i16: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: sext.h a0, a0 -; RV32IBB-NEXT: srai a0, a0, 9 -; RV32IBB-NEXT: ret -; -; RV32IBP-LABEL: srai_i16: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: slli a0, a0, 16 -; RV32IBP-NEXT: srai a0, a0, 25 -; RV32IBP-NEXT: ret +; RV32B-LABEL: srai_i16: +; RV32B: # %bb.0: +; RV32B-NEXT: sext.h a0, a0 +; RV32B-NEXT: srai a0, a0, 9 +; RV32B-NEXT: ret +; +; RV32ZBB-LABEL: srai_i16: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: sext.h a0, a0 +; RV32ZBB-NEXT: srai a0, a0, 9 +; RV32ZBB-NEXT: ret +; +; RV32ZBP-LABEL: srai_i16: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: slli a0, a0, 16 +; RV32ZBP-NEXT: srai a0, a0, 25 +; RV32ZBP-NEXT: ret %1 = ashr i16 %a, 9 ret i16 %1 } diff --git a/llvm/test/CodeGen/RISCV/rv32zbb.ll b/llvm/test/CodeGen/RISCV/rv32zbb.ll --- a/llvm/test/CodeGen/RISCV/rv32zbb.ll +++ b/llvm/test/CodeGen/RISCV/rv32zbb.ll @@ -2,9 +2,9 @@ ; RUN: llc -mtriple=riscv32 -verify-machineinstrs < %s \ ; RUN: | FileCheck %s -check-prefix=RV32I ; RUN: llc -mtriple=riscv32 -mattr=+experimental-b -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV32IB +; RUN: | FileCheck %s -check-prefix=RV32B ; RUN: llc -mtriple=riscv32 -mattr=+experimental-zbb -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV32IBB +; RUN: | FileCheck %s -check-prefix=RV32ZBB declare i32 @llvm.ctlz.i32(i32, i1) @@ -54,15 +54,15 @@ ; RV32I-NEXT: addi sp, sp, 16 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: ctlz_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: clz a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: ctlz_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: clz a0, a0 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: ctlz_i32: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: clz a0, a0 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: ctlz_i32: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: clz a0, a0 +; RV32ZBB-NEXT: ret %1 = call i32 @llvm.ctlz.i32(i32 %a, i1 false) ret i32 %1 } @@ -158,27 +158,27 @@ ; RV32I-NEXT: addi sp, sp, 32 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: ctlz_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: clz a2, a1 -; RV32IB-NEXT: clz a0, a0 -; RV32IB-NEXT: addi a0, a0, 32 -; RV32IB-NEXT: cmov a0, a1, a2, a0 -; RV32IB-NEXT: mv a1, zero -; RV32IB-NEXT: ret +; RV32B-LABEL: ctlz_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: clz a2, a1 +; RV32B-NEXT: clz a0, a0 +; RV32B-NEXT: addi a0, a0, 32 +; RV32B-NEXT: cmov a0, a1, a2, a0 +; RV32B-NEXT: mv a1, zero +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: ctlz_i64: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: bnez a1, .LBB1_2 -; RV32IBB-NEXT: # %bb.1: -; RV32IBB-NEXT: clz a0, a0 -; RV32IBB-NEXT: addi a0, a0, 32 -; RV32IBB-NEXT: mv a1, zero -; RV32IBB-NEXT: ret -; RV32IBB-NEXT: .LBB1_2: -; RV32IBB-NEXT: clz a0, a1 -; RV32IBB-NEXT: mv a1, zero -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: ctlz_i64: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: bnez a1, .LBB1_2 +; RV32ZBB-NEXT: # %bb.1: +; RV32ZBB-NEXT: clz a0, a0 +; RV32ZBB-NEXT: addi a0, a0, 32 +; RV32ZBB-NEXT: mv a1, zero +; RV32ZBB-NEXT: ret +; RV32ZBB-NEXT: .LBB1_2: +; RV32ZBB-NEXT: clz a0, a1 +; RV32ZBB-NEXT: mv a1, zero +; RV32ZBB-NEXT: ret %1 = call i64 @llvm.ctlz.i64(i64 %a, i1 false) ret i64 %1 } @@ -223,15 +223,15 @@ ; RV32I-NEXT: addi sp, sp, 16 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: cttz_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: ctz a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: cttz_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: ctz a0, a0 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: cttz_i32: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: ctz a0, a0 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: cttz_i32: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: ctz a0, a0 +; RV32ZBB-NEXT: ret %1 = call i32 @llvm.cttz.i32(i32 %a, i1 false) ret i32 %1 } @@ -311,27 +311,27 @@ ; RV32I-NEXT: addi sp, sp, 32 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: cttz_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: ctz a2, a0 -; RV32IB-NEXT: ctz a1, a1 -; RV32IB-NEXT: addi a1, a1, 32 -; RV32IB-NEXT: cmov a0, a0, a2, a1 -; RV32IB-NEXT: mv a1, zero -; RV32IB-NEXT: ret +; RV32B-LABEL: cttz_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: ctz a2, a0 +; RV32B-NEXT: ctz a1, a1 +; RV32B-NEXT: addi a1, a1, 32 +; RV32B-NEXT: cmov a0, a0, a2, a1 +; RV32B-NEXT: mv a1, zero +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: cttz_i64: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: bnez a0, .LBB3_2 -; RV32IBB-NEXT: # %bb.1: -; RV32IBB-NEXT: ctz a0, a1 -; RV32IBB-NEXT: addi a0, a0, 32 -; RV32IBB-NEXT: mv a1, zero -; RV32IBB-NEXT: ret -; RV32IBB-NEXT: .LBB3_2: -; RV32IBB-NEXT: ctz a0, a0 -; RV32IBB-NEXT: mv a1, zero -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: cttz_i64: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: bnez a0, .LBB3_2 +; RV32ZBB-NEXT: # %bb.1: +; RV32ZBB-NEXT: ctz a0, a1 +; RV32ZBB-NEXT: addi a0, a0, 32 +; RV32ZBB-NEXT: mv a1, zero +; RV32ZBB-NEXT: ret +; RV32ZBB-NEXT: .LBB3_2: +; RV32ZBB-NEXT: ctz a0, a0 +; RV32ZBB-NEXT: mv a1, zero +; RV32ZBB-NEXT: ret %1 = call i64 @llvm.cttz.i64(i64 %a, i1 false) ret i64 %1 } @@ -367,15 +367,15 @@ ; RV32I-NEXT: addi sp, sp, 16 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: ctpop_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: cpop a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: ctpop_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: cpop a0, a0 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: ctpop_i32: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: cpop a0, a0 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: ctpop_i32: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: cpop a0, a0 +; RV32ZBB-NEXT: ret %1 = call i32 @llvm.ctpop.i32(i32 %a) ret i32 %1 } @@ -440,21 +440,21 @@ ; RV32I-NEXT: addi sp, sp, 32 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: ctpop_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: cpop a1, a1 -; RV32IB-NEXT: cpop a0, a0 -; RV32IB-NEXT: add a0, a0, a1 -; RV32IB-NEXT: mv a1, zero -; RV32IB-NEXT: ret +; RV32B-LABEL: ctpop_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: cpop a1, a1 +; RV32B-NEXT: cpop a0, a0 +; RV32B-NEXT: add a0, a0, a1 +; RV32B-NEXT: mv a1, zero +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: ctpop_i64: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: cpop a1, a1 -; RV32IBB-NEXT: cpop a0, a0 -; RV32IBB-NEXT: add a0, a0, a1 -; RV32IBB-NEXT: mv a1, zero -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: ctpop_i64: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: cpop a1, a1 +; RV32ZBB-NEXT: cpop a0, a0 +; RV32ZBB-NEXT: add a0, a0, a1 +; RV32ZBB-NEXT: mv a1, zero +; RV32ZBB-NEXT: ret %1 = call i64 @llvm.ctpop.i64(i64 %a) ret i64 %1 } @@ -466,15 +466,15 @@ ; RV32I-NEXT: srai a0, a0, 24 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sextb_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sext.b a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: sextb_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: sext.b a0, a0 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: sextb_i32: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: sext.b a0, a0 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: sextb_i32: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: sext.b a0, a0 +; RV32ZBB-NEXT: ret %shl = shl i32 %a, 24 %shr = ashr exact i32 %shl, 24 ret i32 %shr @@ -488,17 +488,17 @@ ; RV32I-NEXT: srai a1, a1, 31 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sextb_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sext.b a0, a0 -; RV32IB-NEXT: srai a1, a0, 31 -; RV32IB-NEXT: ret +; RV32B-LABEL: sextb_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: sext.b a0, a0 +; RV32B-NEXT: srai a1, a0, 31 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: sextb_i64: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: sext.b a0, a0 -; RV32IBB-NEXT: srai a1, a0, 31 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: sextb_i64: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: sext.b a0, a0 +; RV32ZBB-NEXT: srai a1, a0, 31 +; RV32ZBB-NEXT: ret %shl = shl i64 %a, 56 %shr = ashr exact i64 %shl, 56 ret i64 %shr @@ -511,15 +511,15 @@ ; RV32I-NEXT: srai a0, a0, 16 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sexth_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sext.h a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: sexth_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: sext.h a0, a0 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: sexth_i32: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: sext.h a0, a0 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: sexth_i32: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: sext.h a0, a0 +; RV32ZBB-NEXT: ret %shl = shl i32 %a, 16 %shr = ashr exact i32 %shl, 16 ret i32 %shr @@ -533,17 +533,17 @@ ; RV32I-NEXT: srai a1, a1, 31 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sexth_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sext.h a0, a0 -; RV32IB-NEXT: srai a1, a0, 31 -; RV32IB-NEXT: ret +; RV32B-LABEL: sexth_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: sext.h a0, a0 +; RV32B-NEXT: srai a1, a0, 31 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: sexth_i64: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: sext.h a0, a0 -; RV32IBB-NEXT: srai a1, a0, 31 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: sexth_i64: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: sext.h a0, a0 +; RV32ZBB-NEXT: srai a1, a0, 31 +; RV32ZBB-NEXT: ret %shl = shl i64 %a, 48 %shr = ashr exact i64 %shl, 48 ret i64 %shr @@ -558,15 +558,15 @@ ; RV32I-NEXT: .LBB10_2: ; RV32I-NEXT: ret ; -; RV32IB-LABEL: min_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: min a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: min_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: min a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: min_i32: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: min a0, a0, a1 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: min_i32: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: min a0, a0, a1 +; RV32ZBB-NEXT: ret %cmp = icmp slt i32 %a, %b %cond = select i1 %cmp, i32 %a, i32 %b ret i32 %cond @@ -594,32 +594,32 @@ ; RV32I-NEXT: .LBB11_4: ; RV32I-NEXT: ret ; -; RV32IB-LABEL: min_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: slt a4, a1, a3 -; RV32IB-NEXT: cmov a4, a4, a0, a2 -; RV32IB-NEXT: minu a0, a0, a2 -; RV32IB-NEXT: xor a2, a1, a3 -; RV32IB-NEXT: cmov a0, a2, a4, a0 -; RV32IB-NEXT: min a1, a1, a3 -; RV32IB-NEXT: ret +; RV32B-LABEL: min_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: slt a4, a1, a3 +; RV32B-NEXT: cmov a4, a4, a0, a2 +; RV32B-NEXT: minu a0, a0, a2 +; RV32B-NEXT: xor a2, a1, a3 +; RV32B-NEXT: cmov a0, a2, a4, a0 +; RV32B-NEXT: min a1, a1, a3 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: min_i64: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: mv a4, a0 -; RV32IBB-NEXT: bge a1, a3, .LBB11_3 -; RV32IBB-NEXT: # %bb.1: -; RV32IBB-NEXT: beq a1, a3, .LBB11_4 -; RV32IBB-NEXT: .LBB11_2: -; RV32IBB-NEXT: min a1, a1, a3 -; RV32IBB-NEXT: ret -; RV32IBB-NEXT: .LBB11_3: -; RV32IBB-NEXT: mv a0, a2 -; RV32IBB-NEXT: bne a1, a3, .LBB11_2 -; RV32IBB-NEXT: .LBB11_4: -; RV32IBB-NEXT: minu a0, a4, a2 -; RV32IBB-NEXT: min a1, a1, a3 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: min_i64: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: mv a4, a0 +; RV32ZBB-NEXT: bge a1, a3, .LBB11_3 +; RV32ZBB-NEXT: # %bb.1: +; RV32ZBB-NEXT: beq a1, a3, .LBB11_4 +; RV32ZBB-NEXT: .LBB11_2: +; RV32ZBB-NEXT: min a1, a1, a3 +; RV32ZBB-NEXT: ret +; RV32ZBB-NEXT: .LBB11_3: +; RV32ZBB-NEXT: mv a0, a2 +; RV32ZBB-NEXT: bne a1, a3, .LBB11_2 +; RV32ZBB-NEXT: .LBB11_4: +; RV32ZBB-NEXT: minu a0, a4, a2 +; RV32ZBB-NEXT: min a1, a1, a3 +; RV32ZBB-NEXT: ret %cmp = icmp slt i64 %a, %b %cond = select i1 %cmp, i64 %a, i64 %b ret i64 %cond @@ -634,15 +634,15 @@ ; RV32I-NEXT: .LBB12_2: ; RV32I-NEXT: ret ; -; RV32IB-LABEL: max_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: max a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: max_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: max a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: max_i32: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: max a0, a0, a1 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: max_i32: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: max a0, a0, a1 +; RV32ZBB-NEXT: ret %cmp = icmp sgt i32 %a, %b %cond = select i1 %cmp, i32 %a, i32 %b ret i32 %cond @@ -670,32 +670,32 @@ ; RV32I-NEXT: .LBB13_4: ; RV32I-NEXT: ret ; -; RV32IB-LABEL: max_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: slt a4, a3, a1 -; RV32IB-NEXT: cmov a4, a4, a0, a2 -; RV32IB-NEXT: maxu a0, a0, a2 -; RV32IB-NEXT: xor a2, a1, a3 -; RV32IB-NEXT: cmov a0, a2, a4, a0 -; RV32IB-NEXT: max a1, a1, a3 -; RV32IB-NEXT: ret +; RV32B-LABEL: max_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: slt a4, a3, a1 +; RV32B-NEXT: cmov a4, a4, a0, a2 +; RV32B-NEXT: maxu a0, a0, a2 +; RV32B-NEXT: xor a2, a1, a3 +; RV32B-NEXT: cmov a0, a2, a4, a0 +; RV32B-NEXT: max a1, a1, a3 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: max_i64: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: mv a4, a0 -; RV32IBB-NEXT: bge a3, a1, .LBB13_3 -; RV32IBB-NEXT: # %bb.1: -; RV32IBB-NEXT: beq a1, a3, .LBB13_4 -; RV32IBB-NEXT: .LBB13_2: -; RV32IBB-NEXT: max a1, a1, a3 -; RV32IBB-NEXT: ret -; RV32IBB-NEXT: .LBB13_3: -; RV32IBB-NEXT: mv a0, a2 -; RV32IBB-NEXT: bne a1, a3, .LBB13_2 -; RV32IBB-NEXT: .LBB13_4: -; RV32IBB-NEXT: maxu a0, a4, a2 -; RV32IBB-NEXT: max a1, a1, a3 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: max_i64: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: mv a4, a0 +; RV32ZBB-NEXT: bge a3, a1, .LBB13_3 +; RV32ZBB-NEXT: # %bb.1: +; RV32ZBB-NEXT: beq a1, a3, .LBB13_4 +; RV32ZBB-NEXT: .LBB13_2: +; RV32ZBB-NEXT: max a1, a1, a3 +; RV32ZBB-NEXT: ret +; RV32ZBB-NEXT: .LBB13_3: +; RV32ZBB-NEXT: mv a0, a2 +; RV32ZBB-NEXT: bne a1, a3, .LBB13_2 +; RV32ZBB-NEXT: .LBB13_4: +; RV32ZBB-NEXT: maxu a0, a4, a2 +; RV32ZBB-NEXT: max a1, a1, a3 +; RV32ZBB-NEXT: ret %cmp = icmp sgt i64 %a, %b %cond = select i1 %cmp, i64 %a, i64 %b ret i64 %cond @@ -710,15 +710,15 @@ ; RV32I-NEXT: .LBB14_2: ; RV32I-NEXT: ret ; -; RV32IB-LABEL: minu_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: minu a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: minu_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: minu a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: minu_i32: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: minu a0, a0, a1 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: minu_i32: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: minu a0, a0, a1 +; RV32ZBB-NEXT: ret %cmp = icmp ult i32 %a, %b %cond = select i1 %cmp, i32 %a, i32 %b ret i32 %cond @@ -746,32 +746,32 @@ ; RV32I-NEXT: .LBB15_4: ; RV32I-NEXT: ret ; -; RV32IB-LABEL: minu_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sltu a4, a1, a3 -; RV32IB-NEXT: cmov a4, a4, a0, a2 -; RV32IB-NEXT: minu a0, a0, a2 -; RV32IB-NEXT: xor a2, a1, a3 -; RV32IB-NEXT: cmov a0, a2, a4, a0 -; RV32IB-NEXT: minu a1, a1, a3 -; RV32IB-NEXT: ret +; RV32B-LABEL: minu_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: sltu a4, a1, a3 +; RV32B-NEXT: cmov a4, a4, a0, a2 +; RV32B-NEXT: minu a0, a0, a2 +; RV32B-NEXT: xor a2, a1, a3 +; RV32B-NEXT: cmov a0, a2, a4, a0 +; RV32B-NEXT: minu a1, a1, a3 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: minu_i64: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: mv a4, a0 -; RV32IBB-NEXT: bgeu a1, a3, .LBB15_3 -; RV32IBB-NEXT: # %bb.1: -; RV32IBB-NEXT: beq a1, a3, .LBB15_4 -; RV32IBB-NEXT: .LBB15_2: -; RV32IBB-NEXT: minu a1, a1, a3 -; RV32IBB-NEXT: ret -; RV32IBB-NEXT: .LBB15_3: -; RV32IBB-NEXT: mv a0, a2 -; RV32IBB-NEXT: bne a1, a3, .LBB15_2 -; RV32IBB-NEXT: .LBB15_4: -; RV32IBB-NEXT: minu a0, a4, a2 -; RV32IBB-NEXT: minu a1, a1, a3 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: minu_i64: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: mv a4, a0 +; RV32ZBB-NEXT: bgeu a1, a3, .LBB15_3 +; RV32ZBB-NEXT: # %bb.1: +; RV32ZBB-NEXT: beq a1, a3, .LBB15_4 +; RV32ZBB-NEXT: .LBB15_2: +; RV32ZBB-NEXT: minu a1, a1, a3 +; RV32ZBB-NEXT: ret +; RV32ZBB-NEXT: .LBB15_3: +; RV32ZBB-NEXT: mv a0, a2 +; RV32ZBB-NEXT: bne a1, a3, .LBB15_2 +; RV32ZBB-NEXT: .LBB15_4: +; RV32ZBB-NEXT: minu a0, a4, a2 +; RV32ZBB-NEXT: minu a1, a1, a3 +; RV32ZBB-NEXT: ret %cmp = icmp ult i64 %a, %b %cond = select i1 %cmp, i64 %a, i64 %b ret i64 %cond @@ -786,15 +786,15 @@ ; RV32I-NEXT: .LBB16_2: ; RV32I-NEXT: ret ; -; RV32IB-LABEL: maxu_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: maxu a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: maxu_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: maxu a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: maxu_i32: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: maxu a0, a0, a1 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: maxu_i32: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: maxu a0, a0, a1 +; RV32ZBB-NEXT: ret %cmp = icmp ugt i32 %a, %b %cond = select i1 %cmp, i32 %a, i32 %b ret i32 %cond @@ -822,32 +822,32 @@ ; RV32I-NEXT: .LBB17_4: ; RV32I-NEXT: ret ; -; RV32IB-LABEL: maxu_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sltu a4, a3, a1 -; RV32IB-NEXT: cmov a4, a4, a0, a2 -; RV32IB-NEXT: maxu a0, a0, a2 -; RV32IB-NEXT: xor a2, a1, a3 -; RV32IB-NEXT: cmov a0, a2, a4, a0 -; RV32IB-NEXT: maxu a1, a1, a3 -; RV32IB-NEXT: ret +; RV32B-LABEL: maxu_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: sltu a4, a3, a1 +; RV32B-NEXT: cmov a4, a4, a0, a2 +; RV32B-NEXT: maxu a0, a0, a2 +; RV32B-NEXT: xor a2, a1, a3 +; RV32B-NEXT: cmov a0, a2, a4, a0 +; RV32B-NEXT: maxu a1, a1, a3 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: maxu_i64: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: mv a4, a0 -; RV32IBB-NEXT: bgeu a3, a1, .LBB17_3 -; RV32IBB-NEXT: # %bb.1: -; RV32IBB-NEXT: beq a1, a3, .LBB17_4 -; RV32IBB-NEXT: .LBB17_2: -; RV32IBB-NEXT: maxu a1, a1, a3 -; RV32IBB-NEXT: ret -; RV32IBB-NEXT: .LBB17_3: -; RV32IBB-NEXT: mv a0, a2 -; RV32IBB-NEXT: bne a1, a3, .LBB17_2 -; RV32IBB-NEXT: .LBB17_4: -; RV32IBB-NEXT: maxu a0, a4, a2 -; RV32IBB-NEXT: maxu a1, a1, a3 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: maxu_i64: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: mv a4, a0 +; RV32ZBB-NEXT: bgeu a3, a1, .LBB17_3 +; RV32ZBB-NEXT: # %bb.1: +; RV32ZBB-NEXT: beq a1, a3, .LBB17_4 +; RV32ZBB-NEXT: .LBB17_2: +; RV32ZBB-NEXT: maxu a1, a1, a3 +; RV32ZBB-NEXT: ret +; RV32ZBB-NEXT: .LBB17_3: +; RV32ZBB-NEXT: mv a0, a2 +; RV32ZBB-NEXT: bne a1, a3, .LBB17_2 +; RV32ZBB-NEXT: .LBB17_4: +; RV32ZBB-NEXT: maxu a0, a4, a2 +; RV32ZBB-NEXT: maxu a1, a1, a3 +; RV32ZBB-NEXT: ret %cmp = icmp ugt i64 %a, %b %cond = select i1 %cmp, i64 %a, i64 %b ret i64 %cond @@ -863,17 +863,17 @@ ; RV32I-NEXT: xor a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: abs_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: neg a1, a0 -; RV32IB-NEXT: max a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: abs_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: neg a1, a0 +; RV32B-NEXT: max a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: abs_i32: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: neg a1, a0 -; RV32IBB-NEXT: max a0, a0, a1 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: abs_i32: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: neg a1, a0 +; RV32ZBB-NEXT: max a0, a0, a1 +; RV32ZBB-NEXT: ret %abs = tail call i32 @llvm.abs.i32(i32 %x, i1 true) ret i32 %abs } @@ -892,28 +892,28 @@ ; RV32I-NEXT: .LBB19_2: ; RV32I-NEXT: ret ; -; RV32IB-LABEL: abs_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: neg a2, a0 -; RV32IB-NEXT: slti a3, a1, 0 -; RV32IB-NEXT: cmov a2, a3, a2, a0 -; RV32IB-NEXT: snez a0, a0 -; RV32IB-NEXT: add a0, a1, a0 -; RV32IB-NEXT: neg a0, a0 -; RV32IB-NEXT: cmov a1, a3, a0, a1 -; RV32IB-NEXT: mv a0, a2 -; RV32IB-NEXT: ret +; RV32B-LABEL: abs_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: neg a2, a0 +; RV32B-NEXT: slti a3, a1, 0 +; RV32B-NEXT: cmov a2, a3, a2, a0 +; RV32B-NEXT: snez a0, a0 +; RV32B-NEXT: add a0, a1, a0 +; RV32B-NEXT: neg a0, a0 +; RV32B-NEXT: cmov a1, a3, a0, a1 +; RV32B-NEXT: mv a0, a2 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: abs_i64: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: bgez a1, .LBB19_2 -; RV32IBB-NEXT: # %bb.1: -; RV32IBB-NEXT: snez a2, a0 -; RV32IBB-NEXT: neg a0, a0 -; RV32IBB-NEXT: add a1, a1, a2 -; RV32IBB-NEXT: neg a1, a1 -; RV32IBB-NEXT: .LBB19_2: -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: abs_i64: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: bgez a1, .LBB19_2 +; RV32ZBB-NEXT: # %bb.1: +; RV32ZBB-NEXT: snez a2, a0 +; RV32ZBB-NEXT: neg a0, a0 +; RV32ZBB-NEXT: add a1, a1, a2 +; RV32ZBB-NEXT: neg a1, a1 +; RV32ZBB-NEXT: .LBB19_2: +; RV32ZBB-NEXT: ret %abs = tail call i64 @llvm.abs.i64(i64 %x, i1 true) ret i64 %abs } @@ -926,15 +926,15 @@ ; RV32I-NEXT: and a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: zexth_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: zext.h a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: zexth_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: zext.h a0, a0 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: zexth_i32: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: zext.h a0, a0 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: zexth_i32: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: zext.h a0, a0 +; RV32ZBB-NEXT: ret %and = and i32 %a, 65535 ret i32 %and } @@ -948,17 +948,17 @@ ; RV32I-NEXT: mv a1, zero ; RV32I-NEXT: ret ; -; RV32IB-LABEL: zexth_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: zext.h a0, a0 -; RV32IB-NEXT: mv a1, zero -; RV32IB-NEXT: ret +; RV32B-LABEL: zexth_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: zext.h a0, a0 +; RV32B-NEXT: mv a1, zero +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: zexth_i64: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: zext.h a0, a0 -; RV32IBB-NEXT: mv a1, zero -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: zexth_i64: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: zext.h a0, a0 +; RV32ZBB-NEXT: mv a1, zero +; RV32ZBB-NEXT: ret %and = and i64 %a, 65535 ret i64 %and } @@ -982,15 +982,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: bswap_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev8 a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: bswap_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: rev8 a0, a0 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: bswap_i32: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: rev8 a0, a0 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: bswap_i32: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: rev8 a0, a0 +; RV32ZBB-NEXT: ret %1 = tail call i32 @llvm.bswap.i32(i32 %a) ret i32 %1 } @@ -1024,19 +1024,19 @@ ; RV32I-NEXT: mv a0, a2 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: bswap_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev8 a2, a1 -; RV32IB-NEXT: rev8 a1, a0 -; RV32IB-NEXT: mv a0, a2 -; RV32IB-NEXT: ret +; RV32B-LABEL: bswap_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: rev8 a2, a1 +; RV32B-NEXT: rev8 a1, a0 +; RV32B-NEXT: mv a0, a2 +; RV32B-NEXT: ret ; -; RV32IBB-LABEL: bswap_i64: -; RV32IBB: # %bb.0: -; RV32IBB-NEXT: rev8 a2, a1 -; RV32IBB-NEXT: rev8 a1, a0 -; RV32IBB-NEXT: mv a0, a2 -; RV32IBB-NEXT: ret +; RV32ZBB-LABEL: bswap_i64: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: rev8 a2, a1 +; RV32ZBB-NEXT: rev8 a1, a0 +; RV32ZBB-NEXT: mv a0, a2 +; RV32ZBB-NEXT: ret %1 = call i64 @llvm.bswap.i64(i64 %a) ret i64 %1 } diff --git a/llvm/test/CodeGen/RISCV/rv32zbc-intrinsic.ll b/llvm/test/CodeGen/RISCV/rv32zbc-intrinsic.ll --- a/llvm/test/CodeGen/RISCV/rv32zbc-intrinsic.ll +++ b/llvm/test/CodeGen/RISCV/rv32zbc-intrinsic.ll @@ -1,21 +1,21 @@ ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc -mtriple=riscv32 -mattr=+experimental-b -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV32IB +; RUN: | FileCheck %s -check-prefix=RV32B ; RUN: llc -mtriple=riscv32 -mattr=+experimental-zbc -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV32IBC +; RUN: | FileCheck %s -check-prefix=RV32BC declare i32 @llvm.riscv.clmul.i32(i32 %a, i32 %b) define i32 @clmul32(i32 %a, i32 %b) nounwind { -; RV32IB-LABEL: clmul32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: clmul a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: clmul32: +; RV32B: # %bb.0: +; RV32B-NEXT: clmul a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBC-LABEL: clmul32: -; RV32IBC: # %bb.0: -; RV32IBC-NEXT: clmul a0, a0, a1 -; RV32IBC-NEXT: ret +; RV32BC-LABEL: clmul32: +; RV32BC: # %bb.0: +; RV32BC-NEXT: clmul a0, a0, a1 +; RV32BC-NEXT: ret %tmp = call i32 @llvm.riscv.clmul.i32(i32 %a, i32 %b) ret i32 %tmp } @@ -23,15 +23,15 @@ declare i32 @llvm.riscv.clmulh.i32(i32 %a, i32 %b) define i32 @clmul32h(i32 %a, i32 %b) nounwind { -; RV32IB-LABEL: clmul32h: -; RV32IB: # %bb.0: -; RV32IB-NEXT: clmulh a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: clmul32h: +; RV32B: # %bb.0: +; RV32B-NEXT: clmulh a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBC-LABEL: clmul32h: -; RV32IBC: # %bb.0: -; RV32IBC-NEXT: clmulh a0, a0, a1 -; RV32IBC-NEXT: ret +; RV32BC-LABEL: clmul32h: +; RV32BC: # %bb.0: +; RV32BC-NEXT: clmulh a0, a0, a1 +; RV32BC-NEXT: ret %tmp = call i32 @llvm.riscv.clmulh.i32(i32 %a, i32 %b) ret i32 %tmp } @@ -39,15 +39,15 @@ declare i32 @llvm.riscv.clmulr.i32(i32 %a, i32 %b) define i32 @clmul32r(i32 %a, i32 %b) nounwind { -; RV32IB-LABEL: clmul32r: -; RV32IB: # %bb.0: -; RV32IB-NEXT: clmulr a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: clmul32r: +; RV32B: # %bb.0: +; RV32B-NEXT: clmulr a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBC-LABEL: clmul32r: -; RV32IBC: # %bb.0: -; RV32IBC-NEXT: clmulr a0, a0, a1 -; RV32IBC-NEXT: ret +; RV32BC-LABEL: clmul32r: +; RV32BC: # %bb.0: +; RV32BC-NEXT: clmulr a0, a0, a1 +; RV32BC-NEXT: ret %tmp = call i32 @llvm.riscv.clmulr.i32(i32 %a, i32 %b) ret i32 %tmp } diff --git a/llvm/test/CodeGen/RISCV/rv32zbe-intrinsic.ll b/llvm/test/CodeGen/RISCV/rv32zbe-intrinsic.ll --- a/llvm/test/CodeGen/RISCV/rv32zbe-intrinsic.ll +++ b/llvm/test/CodeGen/RISCV/rv32zbe-intrinsic.ll @@ -1,21 +1,21 @@ ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc -mtriple=riscv32 -mattr=+experimental-b -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV32IB +; RUN: | FileCheck %s -check-prefix=RV32B ; RUN: llc -mtriple=riscv32 -mattr=+experimental-zbe -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV32IBE +; RUN: | FileCheck %s -check-prefix=RV32ZBE declare i32 @llvm.riscv.bcompress.i32(i32 %a, i32 %b) define i32 @bcompress32(i32 %a, i32 %b) nounwind { -; RV32IB-LABEL: bcompress32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: bcompress a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: bcompress32: +; RV32B: # %bb.0: +; RV32B-NEXT: bcompress a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBE-LABEL: bcompress32: -; RV32IBE: # %bb.0: -; RV32IBE-NEXT: bcompress a0, a0, a1 -; RV32IBE-NEXT: ret +; RV32ZBE-LABEL: bcompress32: +; RV32ZBE: # %bb.0: +; RV32ZBE-NEXT: bcompress a0, a0, a1 +; RV32ZBE-NEXT: ret %tmp = call i32 @llvm.riscv.bcompress.i32(i32 %a, i32 %b) ret i32 %tmp } @@ -23,15 +23,15 @@ declare i32 @llvm.riscv.bdecompress.i32(i32 %a, i32 %b) define i32 @bdecompress32(i32 %a, i32 %b) nounwind { -; RV32IB-LABEL: bdecompress32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: bdecompress a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: bdecompress32: +; RV32B: # %bb.0: +; RV32B-NEXT: bdecompress a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBE-LABEL: bdecompress32: -; RV32IBE: # %bb.0: -; RV32IBE-NEXT: bdecompress a0, a0, a1 -; RV32IBE-NEXT: ret +; RV32ZBE-LABEL: bdecompress32: +; RV32ZBE: # %bb.0: +; RV32ZBE-NEXT: bdecompress a0, a0, a1 +; RV32ZBE-NEXT: ret %tmp = call i32 @llvm.riscv.bdecompress.i32(i32 %a, i32 %b) ret i32 %tmp } diff --git a/llvm/test/CodeGen/RISCV/rv32zbp-intrinsic.ll b/llvm/test/CodeGen/RISCV/rv32zbp-intrinsic.ll --- a/llvm/test/CodeGen/RISCV/rv32zbp-intrinsic.ll +++ b/llvm/test/CodeGen/RISCV/rv32zbp-intrinsic.ll @@ -1,50 +1,50 @@ ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc -mtriple=riscv32 -mattr=+experimental-b -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV32IB +; RUN: | FileCheck %s -check-prefix=RV32B ; RUN: llc -mtriple=riscv32 -mattr=+experimental-zbp -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV32IBP +; RUN: | FileCheck %s -check-prefix=RV32ZBP declare i32 @llvm.riscv.grev.i32(i32 %a, i32 %b) define i32 @grev32(i32 %a, i32 %b) nounwind { -; RV32IB-LABEL: grev32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: grev a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: grev32: +; RV32B: # %bb.0: +; RV32B-NEXT: grev a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: grev a0, a0, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: grev a0, a0, a1 +; RV32ZBP-NEXT: ret %tmp = call i32 @llvm.riscv.grev.i32(i32 %a, i32 %b) ret i32 %tmp } define i32 @grev32_demandedbits(i32 %a, i32 %b) nounwind { -; RV32IB-LABEL: grev32_demandedbits: -; RV32IB: # %bb.0: -; RV32IB-NEXT: grev a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: grev32_demandedbits: +; RV32B: # %bb.0: +; RV32B-NEXT: grev a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev32_demandedbits: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: grev a0, a0, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev32_demandedbits: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: grev a0, a0, a1 +; RV32ZBP-NEXT: ret %c = and i32 %b, 31 %tmp = call i32 @llvm.riscv.grev.i32(i32 %a, i32 %b) ret i32 %tmp } define i32 @grevi32(i32 %a) nounwind { -; RV32IB-LABEL: grevi32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: grevi a0, a0, 13 -; RV32IB-NEXT: ret +; RV32B-LABEL: grevi32: +; RV32B: # %bb.0: +; RV32B-NEXT: grevi a0, a0, 13 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grevi32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: grevi a0, a0, 13 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grevi32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: grevi a0, a0, 13 +; RV32ZBP-NEXT: ret %tmp = call i32 @llvm.riscv.grev.i32(i32 %a, i32 13) ret i32 %tmp } @@ -52,44 +52,44 @@ declare i32 @llvm.riscv.gorc.i32(i32 %a, i32 %b) define i32 @gorc32(i32 %a, i32 %b) nounwind { -; RV32IB-LABEL: gorc32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: gorc a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorc32: +; RV32B: # %bb.0: +; RV32B-NEXT: gorc a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorc32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: gorc a0, a0, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorc32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: gorc a0, a0, a1 +; RV32ZBP-NEXT: ret %tmp = call i32 @llvm.riscv.gorc.i32(i32 %a, i32 %b) ret i32 %tmp } define i32 @gorc32_demandedbits(i32 %a, i32 %b) nounwind { -; RV32IB-LABEL: gorc32_demandedbits: -; RV32IB: # %bb.0: -; RV32IB-NEXT: gorc a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorc32_demandedbits: +; RV32B: # %bb.0: +; RV32B-NEXT: gorc a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorc32_demandedbits: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: gorc a0, a0, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorc32_demandedbits: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: gorc a0, a0, a1 +; RV32ZBP-NEXT: ret %c = and i32 %b, 31 %tmp = call i32 @llvm.riscv.gorc.i32(i32 %a, i32 %b) ret i32 %tmp } define i32 @gorci32(i32 %a) nounwind { -; RV32IB-LABEL: gorci32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: gorci a0, a0, 13 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorci32: +; RV32B: # %bb.0: +; RV32B-NEXT: gorci a0, a0, 13 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorci32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: gorci a0, a0, 13 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorci32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: gorci a0, a0, 13 +; RV32ZBP-NEXT: ret %tmp = call i32 @llvm.riscv.gorc.i32(i32 %a, i32 13) ret i32 %tmp } @@ -97,44 +97,44 @@ declare i32 @llvm.riscv.shfl.i32(i32 %a, i32 %b) define i32 @shfl32(i32 %a, i32 %b) nounwind { -; RV32IB-LABEL: shfl32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: shfl a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: shfl32: +; RV32B: # %bb.0: +; RV32B-NEXT: shfl a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: shfl32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: shfl a0, a0, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: shfl32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: shfl a0, a0, a1 +; RV32ZBP-NEXT: ret %tmp = call i32 @llvm.riscv.shfl.i32(i32 %a, i32 %b) ret i32 %tmp } define i32 @shfl32_demandedbits(i32 %a, i32 %b) nounwind { -; RV32IB-LABEL: shfl32_demandedbits: -; RV32IB: # %bb.0: -; RV32IB-NEXT: shfl a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: shfl32_demandedbits: +; RV32B: # %bb.0: +; RV32B-NEXT: shfl a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: shfl32_demandedbits: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: shfl a0, a0, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: shfl32_demandedbits: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: shfl a0, a0, a1 +; RV32ZBP-NEXT: ret %c = and i32 %b, 15 %tmp = call i32 @llvm.riscv.shfl.i32(i32 %a, i32 %c) ret i32 %tmp } define i32 @shfli32(i32 %a) nounwind { -; RV32IB-LABEL: shfli32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: shfli a0, a0, 13 -; RV32IB-NEXT: ret +; RV32B-LABEL: shfli32: +; RV32B: # %bb.0: +; RV32B-NEXT: shfli a0, a0, 13 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: shfli32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: shfli a0, a0, 13 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: shfli32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: shfli a0, a0, 13 +; RV32ZBP-NEXT: ret %tmp = call i32 @llvm.riscv.shfl.i32(i32 %a, i32 13) ret i32 %tmp } @@ -142,44 +142,44 @@ declare i32 @llvm.riscv.unshfl.i32(i32 %a, i32 %b) define i32 @unshfl32(i32 %a, i32 %b) nounwind { -; RV32IB-LABEL: unshfl32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: unshfl a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: unshfl32: +; RV32B: # %bb.0: +; RV32B-NEXT: unshfl a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: unshfl32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: unshfl a0, a0, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: unshfl32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: unshfl a0, a0, a1 +; RV32ZBP-NEXT: ret %tmp = call i32 @llvm.riscv.unshfl.i32(i32 %a, i32 %b) ret i32 %tmp } define i32 @unshfl32_demandedbits(i32 %a, i32 %b) nounwind { -; RV32IB-LABEL: unshfl32_demandedbits: -; RV32IB: # %bb.0: -; RV32IB-NEXT: unshfl a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: unshfl32_demandedbits: +; RV32B: # %bb.0: +; RV32B-NEXT: unshfl a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: unshfl32_demandedbits: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: unshfl a0, a0, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: unshfl32_demandedbits: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: unshfl a0, a0, a1 +; RV32ZBP-NEXT: ret %c = and i32 %b, 15 %tmp = call i32 @llvm.riscv.unshfl.i32(i32 %a, i32 %c) ret i32 %tmp } define i32 @unshfli32(i32 %a) nounwind { -; RV32IB-LABEL: unshfli32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: unshfli a0, a0, 13 -; RV32IB-NEXT: ret +; RV32B-LABEL: unshfli32: +; RV32B: # %bb.0: +; RV32B-NEXT: unshfli a0, a0, 13 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: unshfli32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: unshfli a0, a0, 13 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: unshfli32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: unshfli a0, a0, 13 +; RV32ZBP-NEXT: ret %tmp = call i32 @llvm.riscv.unshfl.i32(i32 %a, i32 13) ret i32 %tmp } @@ -187,15 +187,15 @@ declare i32 @llvm.riscv.xperm.n.i32(i32 %a, i32 %b) define i32 @xpermn32(i32 %a, i32 %b) nounwind { -; RV32IB-LABEL: xpermn32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: xperm.n a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: xpermn32: +; RV32B: # %bb.0: +; RV32B-NEXT: xperm.n a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: xpermn32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: xperm.n a0, a0, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: xpermn32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: xperm.n a0, a0, a1 +; RV32ZBP-NEXT: ret %tmp = call i32 @llvm.riscv.xperm.n.i32(i32 %a, i32 %b) ret i32 %tmp } @@ -203,15 +203,15 @@ declare i32 @llvm.riscv.xperm.b.i32(i32 %a, i32 %b) define i32 @xpermb32(i32 %a, i32 %b) nounwind { -; RV32IB-LABEL: xpermb32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: xperm.b a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: xpermb32: +; RV32B: # %bb.0: +; RV32B-NEXT: xperm.b a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: xpermb32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: xperm.b a0, a0, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: xpermb32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: xperm.b a0, a0, a1 +; RV32ZBP-NEXT: ret %tmp = call i32 @llvm.riscv.xperm.b.i32(i32 %a, i32 %b) ret i32 %tmp } @@ -219,15 +219,15 @@ declare i32 @llvm.riscv.xperm.h.i32(i32 %a, i32 %b) define i32 @xpermh32(i32 %a, i32 %b) nounwind { -; RV32IB-LABEL: xpermh32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: xperm.h a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: xpermh32: +; RV32B: # %bb.0: +; RV32B-NEXT: xperm.h a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: xpermh32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: xperm.h a0, a0, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: xpermh32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: xperm.h a0, a0, a1 +; RV32ZBP-NEXT: ret %tmp = call i32 @llvm.riscv.xperm.h.i32(i32 %a, i32 %b) ret i32 %tmp } diff --git a/llvm/test/CodeGen/RISCV/rv32zbp.ll b/llvm/test/CodeGen/RISCV/rv32zbp.ll --- a/llvm/test/CodeGen/RISCV/rv32zbp.ll +++ b/llvm/test/CodeGen/RISCV/rv32zbp.ll @@ -2,9 +2,9 @@ ; RUN: llc -mtriple=riscv32 -verify-machineinstrs < %s \ ; RUN: | FileCheck %s -check-prefix=RV32I ; RUN: llc -mtriple=riscv32 -mattr=+experimental-b -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV32IB +; RUN: | FileCheck %s -check-prefix=RV32B ; RUN: llc -mtriple=riscv32 -mattr=+experimental-zbp -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV32IBP +; RUN: | FileCheck %s -check-prefix=RV32ZBP define i32 @gorc1_i32(i32 %a) nounwind { ; RV32I-LABEL: gorc1_i32: @@ -21,15 +21,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: gorc1_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: orc.p a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorc1_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: orc.p a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorc1_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: orc.p a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorc1_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: orc.p a0, a0 +; RV32ZBP-NEXT: ret %and = shl i32 %a, 1 %shl = and i32 %and, -1431655766 %and1 = lshr i32 %a, 1 @@ -60,17 +60,17 @@ ; RV32I-NEXT: or a1, a1, a6 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: gorc1_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: orc.p a0, a0 -; RV32IB-NEXT: orc.p a1, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorc1_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: orc.p a0, a0 +; RV32B-NEXT: orc.p a1, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorc1_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: orc.p a0, a0 -; RV32IBP-NEXT: orc.p a1, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorc1_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: orc.p a0, a0 +; RV32ZBP-NEXT: orc.p a1, a1 +; RV32ZBP-NEXT: ret %and = shl i64 %a, 1 %shl = and i64 %and, -6148914691236517206 %and1 = lshr i64 %a, 1 @@ -95,15 +95,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: gorc2_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: orc2.n a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorc2_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: orc2.n a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorc2_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: orc2.n a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorc2_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: orc2.n a0, a0 +; RV32ZBP-NEXT: ret %and = shl i32 %a, 2 %shl = and i32 %and, -858993460 %and1 = lshr i32 %a, 2 @@ -134,17 +134,17 @@ ; RV32I-NEXT: or a1, a1, a6 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: gorc2_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: orc2.n a0, a0 -; RV32IB-NEXT: orc2.n a1, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorc2_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: orc2.n a0, a0 +; RV32B-NEXT: orc2.n a1, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorc2_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: orc2.n a0, a0 -; RV32IBP-NEXT: orc2.n a1, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorc2_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: orc2.n a0, a0 +; RV32ZBP-NEXT: orc2.n a1, a1 +; RV32ZBP-NEXT: ret %and = shl i64 %a, 2 %shl = and i64 %and, -3689348814741910324 %and1 = lshr i64 %a, 2 @@ -179,15 +179,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: gorc3_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: orc.n a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorc3_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: orc.n a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorc3_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: orc.n a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorc3_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: orc.n a0, a0 +; RV32ZBP-NEXT: ret %and1 = shl i32 %a, 1 %shl1 = and i32 %and1, -1431655766 %and1b = lshr i32 %a, 1 @@ -240,17 +240,17 @@ ; RV32I-NEXT: or a1, a1, a6 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: gorc3_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: orc.n a0, a0 -; RV32IB-NEXT: orc.n a1, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorc3_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: orc.n a0, a0 +; RV32B-NEXT: orc.n a1, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorc3_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: orc.n a0, a0 -; RV32IBP-NEXT: orc.n a1, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorc3_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: orc.n a0, a0 +; RV32ZBP-NEXT: orc.n a1, a1 +; RV32ZBP-NEXT: ret %and1 = shl i64 %a, 1 %shl1 = and i64 %and1, -6148914691236517206 %and1b = lshr i64 %a, 1 @@ -281,15 +281,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: gorc4_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: orc4.b a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorc4_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: orc4.b a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorc4_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: orc4.b a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorc4_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: orc4.b a0, a0 +; RV32ZBP-NEXT: ret %and = shl i32 %a, 4 %shl = and i32 %and, -252645136 %and1 = lshr i32 %a, 4 @@ -320,17 +320,17 @@ ; RV32I-NEXT: or a1, a1, a6 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: gorc4_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: orc4.b a0, a0 -; RV32IB-NEXT: orc4.b a1, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorc4_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: orc4.b a0, a0 +; RV32B-NEXT: orc4.b a1, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorc4_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: orc4.b a0, a0 -; RV32IBP-NEXT: orc4.b a1, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorc4_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: orc4.b a0, a0 +; RV32ZBP-NEXT: orc4.b a1, a1 +; RV32ZBP-NEXT: ret %and = shl i64 %a, 4 %shl = and i64 %and, -1085102592571150096 %and1 = lshr i64 %a, 4 @@ -365,15 +365,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: gorc5_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: gorci a0, a0, 5 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorc5_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: gorci a0, a0, 5 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorc5_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: gorci a0, a0, 5 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorc5_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: gorci a0, a0, 5 +; RV32ZBP-NEXT: ret %and1 = shl i32 %a, 1 %shl1 = and i32 %and1, -1431655766 %and1b = lshr i32 %a, 1 @@ -426,17 +426,17 @@ ; RV32I-NEXT: or a1, a1, a6 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: gorc5_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: gorci a0, a0, 5 -; RV32IB-NEXT: gorci a1, a1, 5 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorc5_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: gorci a0, a0, 5 +; RV32B-NEXT: gorci a1, a1, 5 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorc5_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: gorci a0, a0, 5 -; RV32IBP-NEXT: gorci a1, a1, 5 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorc5_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: gorci a0, a0, 5 +; RV32ZBP-NEXT: gorci a1, a1, 5 +; RV32ZBP-NEXT: ret %and1 = shl i64 %a, 1 %shl1 = and i64 %and1, -6148914691236517206 %and1b = lshr i64 %a, 1 @@ -477,15 +477,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: gorc6_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: orc2.b a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorc6_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: orc2.b a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorc6_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: orc2.b a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorc6_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: orc2.b a0, a0 +; RV32ZBP-NEXT: ret %and1 = shl i32 %a, 2 %shl1 = and i32 %and1, -858993460 %and1b = lshr i32 %a, 2 @@ -538,17 +538,17 @@ ; RV32I-NEXT: or a1, a1, a6 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: gorc6_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: orc2.b a0, a0 -; RV32IB-NEXT: orc2.b a1, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorc6_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: orc2.b a0, a0 +; RV32B-NEXT: orc2.b a1, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorc6_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: orc2.b a0, a0 -; RV32IBP-NEXT: orc2.b a1, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorc6_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: orc2.b a0, a0 +; RV32ZBP-NEXT: orc2.b a1, a1 +; RV32ZBP-NEXT: ret %and1 = shl i64 %a, 2 %shl1 = and i64 %and1, -3689348814741910324 %and1b = lshr i64 %a, 2 @@ -599,15 +599,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: gorc7_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: orc.b a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorc7_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: orc.b a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorc7_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: orc.b a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorc7_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: orc.b a0, a0 +; RV32ZBP-NEXT: ret %and1 = shl i32 %a, 1 %shl1 = and i32 %and1, -1431655766 %and1b = lshr i32 %a, 1 @@ -682,17 +682,17 @@ ; RV32I-NEXT: or a1, a1, a6 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: gorc7_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: orc.b a0, a0 -; RV32IB-NEXT: orc.b a1, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorc7_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: orc.b a0, a0 +; RV32B-NEXT: orc.b a1, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorc7_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: orc.b a0, a0 -; RV32IBP-NEXT: orc.b a1, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorc7_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: orc.b a0, a0 +; RV32ZBP-NEXT: orc.b a1, a1 +; RV32ZBP-NEXT: ret %and1 = shl i64 %a, 1 %shl1 = and i64 %and1, -6148914691236517206 %and1b = lshr i64 %a, 1 @@ -729,15 +729,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: gorc8_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: orc8.h a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorc8_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: orc8.h a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorc8_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: orc8.h a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorc8_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: orc8.h a0, a0 +; RV32ZBP-NEXT: ret %and = shl i32 %a, 8 %shl = and i32 %and, -16711936 %and1 = lshr i32 %a, 8 @@ -768,17 +768,17 @@ ; RV32I-NEXT: or a1, a1, a6 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: gorc8_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: orc8.h a0, a0 -; RV32IB-NEXT: orc8.h a1, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorc8_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: orc8.h a0, a0 +; RV32B-NEXT: orc8.h a1, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorc8_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: orc8.h a0, a0 -; RV32IBP-NEXT: orc8.h a1, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorc8_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: orc8.h a0, a0 +; RV32ZBP-NEXT: orc8.h a1, a1 +; RV32ZBP-NEXT: ret %and = shl i64 %a, 8 %shl = and i64 %and, -71777214294589696 %and1 = lshr i64 %a, 8 @@ -797,15 +797,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: gorc16_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: orc16 a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorc16_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: orc16 a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorc16_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: orc16 a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorc16_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: orc16 a0, a0 +; RV32ZBP-NEXT: ret %shl = shl i32 %a, 16 %shr = lshr i32 %a, 16 %or = or i32 %shr, %a @@ -822,15 +822,15 @@ ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: gorc16_rotl_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: orc16 a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorc16_rotl_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: orc16 a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorc16_rotl_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: orc16 a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorc16_rotl_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: orc16 a0, a0 +; RV32ZBP-NEXT: ret %rot = tail call i32 @llvm.fshl.i32(i32 %a, i32 %a, i32 16) %or = or i32 %rot, %a ret i32 %or @@ -845,15 +845,15 @@ ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: gorc16_rotr_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: orc16 a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorc16_rotr_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: orc16 a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorc16_rotr_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: orc16 a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorc16_rotr_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: orc16 a0, a0 +; RV32ZBP-NEXT: ret %rot = tail call i32 @llvm.fshr.i32(i32 %a, i32 %a, i32 16) %or = or i32 %rot, %a ret i32 %or @@ -872,17 +872,17 @@ ; RV32I-NEXT: or a1, a1, a2 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: gorc16_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: orc16 a0, a0 -; RV32IB-NEXT: orc16 a1, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorc16_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: orc16 a0, a0 +; RV32B-NEXT: orc16 a1, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorc16_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: orc16 a0, a0 -; RV32IBP-NEXT: orc16 a1, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorc16_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: orc16 a0, a0 +; RV32ZBP-NEXT: orc16 a1, a1 +; RV32ZBP-NEXT: ret %and = shl i64 %a, 16 %shl = and i64 %and, -281470681808896 %and1 = lshr i64 %a, 16 @@ -914,15 +914,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: gorc2b_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: orc2.n a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorc2b_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: orc2.n a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorc2b_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: orc2.n a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorc2b_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: orc2.n a0, a0 +; RV32ZBP-NEXT: ret %and1 = shl i32 %a, 2 %shl1 = and i32 %and1, -858993460 %and1b = lshr i32 %a, 2 @@ -972,17 +972,17 @@ ; RV32I-NEXT: or a1, a1, a6 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: gorc2b_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: orc2.n a0, a0 -; RV32IB-NEXT: orc2.n a1, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorc2b_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: orc2.n a0, a0 +; RV32B-NEXT: orc2.n a1, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorc2b_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: orc2.n a0, a0 -; RV32IBP-NEXT: orc2.n a1, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorc2b_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: orc2.n a0, a0 +; RV32ZBP-NEXT: orc2.n a1, a1 +; RV32ZBP-NEXT: ret %and1 = shl i64 %a, 2 %shl1 = and i64 %and1, -3689348814741910324 %and1b = lshr i64 %a, 2 @@ -1030,15 +1030,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: gorc3b_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: orc.n a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorc3b_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: orc.n a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorc3b_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: orc.n a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorc3b_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: orc.n a0, a0 +; RV32ZBP-NEXT: ret %and1 = shl i32 %a, 1 %shl1 = and i32 %and1, -1431655766 %and1b = lshr i32 %a, 1 @@ -1110,17 +1110,17 @@ ; RV32I-NEXT: or a1, a1, a6 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: gorc3b_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: orc.n a0, a0 -; RV32IB-NEXT: orc.n a1, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: gorc3b_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: orc.n a0, a0 +; RV32B-NEXT: orc.n a1, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: gorc3b_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: orc.n a0, a0 -; RV32IBP-NEXT: orc.n a1, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: gorc3b_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: orc.n a0, a0 +; RV32ZBP-NEXT: orc.n a1, a1 +; RV32ZBP-NEXT: ret %and1 = shl i64 %a, 1 %shl1 = and i64 %and1, -6148914691236517206 %and1b = lshr i64 %a, 1 @@ -1156,15 +1156,15 @@ ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: grev1_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev.p a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: grev1_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: rev.p a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev1_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev.p a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev1_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev.p a0, a0 +; RV32ZBP-NEXT: ret %and = shl i32 %a, 1 %shl = and i32 %and, -1431655766 %and1 = lshr i32 %a, 1 @@ -1192,17 +1192,17 @@ ; RV32I-NEXT: or a1, a3, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: grev1_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev.p a0, a0 -; RV32IB-NEXT: rev.p a1, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: grev1_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: rev.p a0, a0 +; RV32B-NEXT: rev.p a1, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev1_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev.p a0, a0 -; RV32IBP-NEXT: rev.p a1, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev1_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev.p a0, a0 +; RV32ZBP-NEXT: rev.p a1, a1 +; RV32ZBP-NEXT: ret %and = shl i64 %a, 1 %shl = and i64 %and, -6148914691236517206 %and1 = lshr i64 %a, 1 @@ -1225,15 +1225,15 @@ ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: grev2_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev2.n a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: grev2_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: rev2.n a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev2_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev2.n a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev2_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev2.n a0, a0 +; RV32ZBP-NEXT: ret %and = shl i32 %a, 2 %shl = and i32 %and, -858993460 %and1 = lshr i32 %a, 2 @@ -1261,17 +1261,17 @@ ; RV32I-NEXT: or a1, a3, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: grev2_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev2.n a0, a0 -; RV32IB-NEXT: rev2.n a1, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: grev2_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: rev2.n a0, a0 +; RV32B-NEXT: rev2.n a1, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev2_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev2.n a0, a0 -; RV32IBP-NEXT: rev2.n a1, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev2_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev2.n a0, a0 +; RV32ZBP-NEXT: rev2.n a1, a1 +; RV32ZBP-NEXT: ret %and = shl i64 %a, 2 %shl = and i64 %and, -3689348814741910324 %and1 = lshr i64 %a, 2 @@ -1303,15 +1303,15 @@ ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: grev3_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev.n a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: grev3_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: rev.n a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev3_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev.n a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev3_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev.n a0, a0 +; RV32ZBP-NEXT: ret %and1 = shl i32 %a, 1 %shl1 = and i32 %and1, -1431655766 %and1b = lshr i32 %a, 1 @@ -1358,17 +1358,17 @@ ; RV32I-NEXT: or a1, a3, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: grev3_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev.n a0, a0 -; RV32IB-NEXT: rev.n a1, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: grev3_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: rev.n a0, a0 +; RV32B-NEXT: rev.n a1, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev3_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev.n a0, a0 -; RV32IBP-NEXT: rev.n a1, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev3_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev.n a0, a0 +; RV32ZBP-NEXT: rev.n a1, a1 +; RV32ZBP-NEXT: ret %and1 = shl i64 %a, 1 %shl1 = and i64 %and1, -6148914691236517206 %and1b = lshr i64 %a, 1 @@ -1396,15 +1396,15 @@ ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: grev4_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev4.b a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: grev4_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: rev4.b a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev4_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev4.b a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev4_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev4.b a0, a0 +; RV32ZBP-NEXT: ret %and = shl i32 %a, 4 %shl = and i32 %and, -252645136 %and1 = lshr i32 %a, 4 @@ -1432,17 +1432,17 @@ ; RV32I-NEXT: or a1, a3, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: grev4_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev4.b a0, a0 -; RV32IB-NEXT: rev4.b a1, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: grev4_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: rev4.b a0, a0 +; RV32B-NEXT: rev4.b a1, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev4_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev4.b a0, a0 -; RV32IBP-NEXT: rev4.b a1, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev4_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev4.b a0, a0 +; RV32ZBP-NEXT: rev4.b a1, a1 +; RV32ZBP-NEXT: ret %and = shl i64 %a, 4 %shl = and i64 %and, -1085102592571150096 %and1 = lshr i64 %a, 4 @@ -1474,15 +1474,15 @@ ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: grev5_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: grevi a0, a0, 5 -; RV32IB-NEXT: ret +; RV32B-LABEL: grev5_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: grevi a0, a0, 5 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev5_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: grevi a0, a0, 5 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev5_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: grevi a0, a0, 5 +; RV32ZBP-NEXT: ret %and1 = shl i32 %a, 1 %shl1 = and i32 %and1, -1431655766 %and1b = lshr i32 %a, 1 @@ -1529,17 +1529,17 @@ ; RV32I-NEXT: or a1, a3, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: grev5_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: grevi a0, a0, 5 -; RV32IB-NEXT: grevi a1, a1, 5 -; RV32IB-NEXT: ret +; RV32B-LABEL: grev5_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: grevi a0, a0, 5 +; RV32B-NEXT: grevi a1, a1, 5 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev5_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: grevi a0, a0, 5 -; RV32IBP-NEXT: grevi a1, a1, 5 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev5_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: grevi a0, a0, 5 +; RV32ZBP-NEXT: grevi a1, a1, 5 +; RV32ZBP-NEXT: ret %and1 = shl i64 %a, 1 %shl1 = and i64 %and1, -6148914691236517206 %and1b = lshr i64 %a, 1 @@ -1577,15 +1577,15 @@ ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: grev6_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev2.b a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: grev6_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: rev2.b a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev6_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev2.b a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev6_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev2.b a0, a0 +; RV32ZBP-NEXT: ret %and1 = shl i32 %a, 2 %shl1 = and i32 %and1, -858993460 %and1b = lshr i32 %a, 2 @@ -1632,17 +1632,17 @@ ; RV32I-NEXT: or a1, a3, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: grev6_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev2.b a0, a0 -; RV32IB-NEXT: rev2.b a1, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: grev6_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: rev2.b a0, a0 +; RV32B-NEXT: rev2.b a1, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev6_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev2.b a0, a0 -; RV32IBP-NEXT: rev2.b a1, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev6_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev2.b a0, a0 +; RV32ZBP-NEXT: rev2.b a1, a1 +; RV32ZBP-NEXT: ret %and1 = shl i64 %a, 2 %shl1 = and i64 %and1, -3689348814741910324 %and1b = lshr i64 %a, 2 @@ -1688,15 +1688,15 @@ ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: grev7_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev.b a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: grev7_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: rev.b a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev7_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev.b a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev7_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev.b a0, a0 +; RV32ZBP-NEXT: ret %and1 = shl i32 %a, 1 %shl1 = and i32 %and1, -1431655766 %and1b = lshr i32 %a, 1 @@ -1762,17 +1762,17 @@ ; RV32I-NEXT: or a1, a3, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: grev7_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev.b a0, a0 -; RV32IB-NEXT: rev.b a1, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: grev7_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: rev.b a0, a0 +; RV32B-NEXT: rev.b a1, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev7_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev.b a0, a0 -; RV32IBP-NEXT: rev.b a1, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev7_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev.b a0, a0 +; RV32ZBP-NEXT: rev.b a1, a1 +; RV32ZBP-NEXT: ret %and1 = shl i64 %a, 1 %shl1 = and i64 %and1, -6148914691236517206 %and1b = lshr i64 %a, 1 @@ -1805,15 +1805,15 @@ ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: grev8_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev8.h a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: grev8_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: rev8.h a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev8_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev8.h a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev8_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev8.h a0, a0 +; RV32ZBP-NEXT: ret %and = shl i32 %a, 8 %shl = and i32 %and, -16711936 %and1 = lshr i32 %a, 8 @@ -1841,17 +1841,17 @@ ; RV32I-NEXT: or a1, a3, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: grev8_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev8.h a0, a0 -; RV32IB-NEXT: rev8.h a1, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: grev8_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: rev8.h a0, a0 +; RV32B-NEXT: rev8.h a1, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev8_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev8.h a0, a0 -; RV32IBP-NEXT: rev8.h a1, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev8_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev8.h a0, a0 +; RV32ZBP-NEXT: rev8.h a1, a1 +; RV32ZBP-NEXT: ret %and = shl i64 %a, 8 %shl = and i64 %and, -71777214294589696 %and1 = lshr i64 %a, 8 @@ -1868,15 +1868,15 @@ ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: grev16_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rori a0, a0, 16 -; RV32IB-NEXT: ret +; RV32B-LABEL: grev16_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: rori a0, a0, 16 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev16_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rori a0, a0, 16 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev16_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rori a0, a0, 16 +; RV32ZBP-NEXT: ret %shl = shl i32 %a, 16 %shr = lshr i32 %a, 16 %or = or i32 %shl, %shr @@ -1907,15 +1907,15 @@ ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: grev3b_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev.n a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: grev3b_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: rev.n a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev3b_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev.n a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev3b_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev.n a0, a0 +; RV32ZBP-NEXT: ret %and2 = shl i32 %a, 2 %shl2 = and i32 %and2, -858993460 %and2b = lshr i32 %a, 2 @@ -1962,17 +1962,17 @@ ; RV32I-NEXT: or a1, a3, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: grev3b_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev.n a0, a0 -; RV32IB-NEXT: rev.n a1, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: grev3b_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: rev.n a0, a0 +; RV32B-NEXT: rev.n a1, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev3b_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev.n a0, a0 -; RV32IBP-NEXT: rev.n a1, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev3b_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev.n a0, a0 +; RV32ZBP-NEXT: rev.n a1, a1 +; RV32ZBP-NEXT: ret %and2 = shl i64 %a, 2 %shl2 = and i64 %and2, -3689348814741910324 %and2b = lshr i64 %a, 2 @@ -2015,15 +2015,15 @@ ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: grev2b_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev2.n a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: grev2b_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: rev2.n a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev2b_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev2.n a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev2b_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev2.n a0, a0 +; RV32ZBP-NEXT: ret %and1 = shl i32 %a, 1 %shl1 = and i32 %and1, -1431655766 %and1b = lshr i32 %a, 1 @@ -2086,17 +2086,17 @@ ; RV32I-NEXT: or a1, a3, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: grev2b_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev2.n a0, a0 -; RV32IB-NEXT: rev2.n a1, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: grev2b_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: rev2.n a0, a0 +; RV32B-NEXT: rev2.n a1, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev2b_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev2.n a0, a0 -; RV32IBP-NEXT: rev2.n a1, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev2b_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev2.n a0, a0 +; RV32ZBP-NEXT: rev2.n a1, a1 +; RV32ZBP-NEXT: ret %and1 = shl i64 %a, 1 %shl1 = and i64 %and1, -6148914691236517206 %and1b = lshr i64 %a, 1 @@ -2149,13 +2149,13 @@ ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: grev0_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: ret +; RV32B-LABEL: grev0_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev0_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev0_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: ret %and1 = shl i32 %a, 1 %shl1 = and i32 %and1, -1431655766 %and1b = lshr i32 %a, 1 @@ -2233,13 +2233,13 @@ ; RV32I-NEXT: or a1, a5, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: grev0_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: ret +; RV32B-LABEL: grev0_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev0_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev0_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: ret %and1 = shl i64 %a, 1 %shl1 = and i64 %and1, -6148914691236517206 %and1b = lshr i64 %a, 1 @@ -2274,15 +2274,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: grev16_i32_fshl: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rori a0, a0, 16 -; RV32IB-NEXT: ret +; RV32B-LABEL: grev16_i32_fshl: +; RV32B: # %bb.0: +; RV32B-NEXT: rori a0, a0, 16 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev16_i32_fshl: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rori a0, a0, 16 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev16_i32_fshl: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rori a0, a0, 16 +; RV32ZBP-NEXT: ret %or = tail call i32 @llvm.fshl.i32(i32 %a, i32 %a, i32 16) ret i32 %or } @@ -2295,15 +2295,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: grev16_i32_fshr: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rori a0, a0, 16 -; RV32IB-NEXT: ret +; RV32B-LABEL: grev16_i32_fshr: +; RV32B: # %bb.0: +; RV32B-NEXT: rori a0, a0, 16 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev16_i32_fshr: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rori a0, a0, 16 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev16_i32_fshr: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rori a0, a0, 16 +; RV32ZBP-NEXT: ret %or = tail call i32 @llvm.fshr.i32(i32 %a, i32 %a, i32 16) ret i32 %or } @@ -2319,17 +2319,17 @@ ; RV32I-NEXT: or a1, a2, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: grev16_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rori a0, a0, 16 -; RV32IB-NEXT: rori a1, a1, 16 -; RV32IB-NEXT: ret +; RV32B-LABEL: grev16_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: rori a0, a0, 16 +; RV32B-NEXT: rori a1, a1, 16 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: grev16_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rori a0, a0, 16 -; RV32IBP-NEXT: rori a1, a1, 16 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: grev16_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rori a0, a0, 16 +; RV32ZBP-NEXT: rori a1, a1, 16 +; RV32ZBP-NEXT: ret %and = shl i64 %a, 16 %shl = and i64 %and, -281470681808896 %and1 = lshr i64 %a, 16 @@ -2351,15 +2351,15 @@ ; RV32I-NEXT: and a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: bswap_i16: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev8.h a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: bswap_i16: +; RV32B: # %bb.0: +; RV32B-NEXT: rev8.h a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: bswap_i16: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev8.h a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: bswap_i16: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev8.h a0, a0 +; RV32ZBP-NEXT: ret %1 = tail call i16 @llvm.bswap.i16(i16 %a) ret i16 %1 } @@ -2383,15 +2383,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: bswap_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev8 a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: bswap_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: rev8 a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: bswap_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev8 a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: bswap_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev8 a0, a0 +; RV32ZBP-NEXT: ret %1 = tail call i32 @llvm.bswap.i32(i32 %a) ret i32 %1 } @@ -2425,19 +2425,19 @@ ; RV32I-NEXT: mv a0, a2 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: bswap_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev8 a2, a1 -; RV32IB-NEXT: rev8 a1, a0 -; RV32IB-NEXT: mv a0, a2 -; RV32IB-NEXT: ret +; RV32B-LABEL: bswap_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: rev8 a2, a1 +; RV32B-NEXT: rev8 a1, a0 +; RV32B-NEXT: mv a0, a2 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: bswap_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev8 a2, a1 -; RV32IBP-NEXT: rev8 a1, a0 -; RV32IBP-NEXT: mv a0, a2 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: bswap_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev8 a2, a1 +; RV32ZBP-NEXT: rev8 a1, a0 +; RV32ZBP-NEXT: mv a0, a2 +; RV32ZBP-NEXT: ret %1 = call i64 @llvm.bswap.i64(i64 %a) ret i64 %1 } @@ -2463,15 +2463,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: bitreverse_i8: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev.b a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: bitreverse_i8: +; RV32B: # %bb.0: +; RV32B-NEXT: rev.b a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: bitreverse_i8: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev.b a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: bitreverse_i8: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev.b a0, a0 +; RV32ZBP-NEXT: ret %1 = tail call i8 @llvm.bitreverse.i8(i8 %a) ret i8 %1 } @@ -2513,15 +2513,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: bitreverse_i16: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev.h a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: bitreverse_i16: +; RV32B: # %bb.0: +; RV32B-NEXT: rev.h a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: bitreverse_i16: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev.h a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: bitreverse_i16: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev.h a0, a0 +; RV32ZBP-NEXT: ret %1 = tail call i16 @llvm.bitreverse.i16(i16 %a) ret i16 %1 } @@ -2572,15 +2572,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: bitreverse_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: bitreverse_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: rev a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: bitreverse_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: bitreverse_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev a0, a0 +; RV32ZBP-NEXT: ret %1 = tail call i32 @llvm.bitreverse.i32(i32 %a) ret i32 %1 } @@ -2656,19 +2656,19 @@ ; RV32I-NEXT: mv a0, a7 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: bitreverse_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev a2, a1 -; RV32IB-NEXT: rev a1, a0 -; RV32IB-NEXT: mv a0, a2 -; RV32IB-NEXT: ret +; RV32B-LABEL: bitreverse_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: rev a2, a1 +; RV32B-NEXT: rev a1, a0 +; RV32B-NEXT: mv a0, a2 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: bitreverse_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev a2, a1 -; RV32IBP-NEXT: rev a1, a0 -; RV32IBP-NEXT: mv a0, a2 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: bitreverse_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev a2, a1 +; RV32ZBP-NEXT: rev a1, a0 +; RV32ZBP-NEXT: mv a0, a2 +; RV32ZBP-NEXT: ret %1 = call i64 @llvm.bitreverse.i64(i64 %a) ret i64 %1 } @@ -2693,15 +2693,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: bswap_rotr_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev8.h a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: bswap_rotr_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: rev8.h a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: bswap_rotr_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev8.h a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: bswap_rotr_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev8.h a0, a0 +; RV32ZBP-NEXT: ret %1 = call i32 @llvm.bswap.i32(i32 %a) %2 = call i32 @llvm.fshr.i32(i32 %1, i32 %1, i32 16) ret i32 %2 @@ -2727,15 +2727,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: bswap_rotl_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev8.h a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: bswap_rotl_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: rev8.h a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: bswap_rotl_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev8.h a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: bswap_rotl_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev8.h a0, a0 +; RV32ZBP-NEXT: ret %1 = call i32 @llvm.bswap.i32(i32 %a) %2 = call i32 @llvm.fshl.i32(i32 %1, i32 %1, i32 16) ret i32 %2 @@ -2794,15 +2794,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: bitreverse_bswap_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev.b a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: bitreverse_bswap_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: rev.b a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: bitreverse_bswap_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev.b a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: bitreverse_bswap_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev.b a0, a0 +; RV32ZBP-NEXT: ret %1 = call i32 @llvm.bitreverse.i32(i32 %a) %2 = call i32 @llvm.bswap.i32(i32 %1) ret i32 %2 @@ -2894,17 +2894,17 @@ ; RV32I-NEXT: or a1, a1, a2 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: bitreverse_bswap_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: rev.b a0, a0 -; RV32IB-NEXT: rev.b a1, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: bitreverse_bswap_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: rev.b a0, a0 +; RV32B-NEXT: rev.b a1, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: bitreverse_bswap_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: rev.b a0, a0 -; RV32IBP-NEXT: rev.b a1, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: bitreverse_bswap_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: rev.b a0, a0 +; RV32ZBP-NEXT: rev.b a1, a1 +; RV32ZBP-NEXT: ret %1 = call i64 @llvm.bitreverse.i64(i64 %a) %2 = call i64 @llvm.bswap.i64(i64 %1) ret i64 %2 @@ -2928,15 +2928,15 @@ ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: shfl1_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: zip.n a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: shfl1_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: zip.n a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: shfl1_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: zip.n a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: shfl1_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: zip.n a0, a0 +; RV32ZBP-NEXT: ret %and = and i32 %a, -1717986919 %shl = shl i32 %a, 1 %and1 = and i32 %shl, 1145324612 @@ -2972,17 +2972,17 @@ ; RV32I-NEXT: or a1, a2, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: shfl1_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: zip.n a0, a0 -; RV32IB-NEXT: zip.n a1, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: shfl1_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: zip.n a0, a0 +; RV32B-NEXT: zip.n a1, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: shfl1_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: zip.n a0, a0 -; RV32IBP-NEXT: zip.n a1, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: shfl1_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: zip.n a0, a0 +; RV32ZBP-NEXT: zip.n a1, a1 +; RV32ZBP-NEXT: ret %and = and i64 %a, -7378697629483820647 %shl = shl i64 %a, 1 %and1 = and i64 %shl, 4919131752989213764 @@ -3011,15 +3011,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: shfl2_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: zip2.b a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: shfl2_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: zip2.b a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: shfl2_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: zip2.b a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: shfl2_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: zip2.b a0, a0 +; RV32ZBP-NEXT: ret %and = and i32 %a, -1010580541 %shl = shl i32 %a, 2 %and1 = and i32 %shl, 808464432 @@ -3055,17 +3055,17 @@ ; RV32I-NEXT: or a1, a1, a2 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: shfl2_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: zip2.b a0, a0 -; RV32IB-NEXT: zip2.b a1, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: shfl2_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: zip2.b a0, a0 +; RV32B-NEXT: zip2.b a1, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: shfl2_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: zip2.b a0, a0 -; RV32IBP-NEXT: zip2.b a1, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: shfl2_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: zip2.b a0, a0 +; RV32ZBP-NEXT: zip2.b a1, a1 +; RV32ZBP-NEXT: ret %and = and i64 %a, -4340410370284600381 %shl = shl i64 %a, 2 %and1 = and i64 %shl, 3472328296227680304 @@ -3094,15 +3094,15 @@ ; RV32I-NEXT: or a0, a0, a2 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: shfl4_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: zip4.h a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: shfl4_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: zip4.h a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: shfl4_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: zip4.h a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: shfl4_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: zip4.h a0, a0 +; RV32ZBP-NEXT: ret %and = and i32 %a, -267390961 %shl = shl i32 %a, 4 %and1 = and i32 %shl, 251662080 @@ -3138,17 +3138,17 @@ ; RV32I-NEXT: or a1, a1, a6 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: shfl4_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: zip4.h a0, a0 -; RV32IB-NEXT: zip4.h a1, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: shfl4_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: zip4.h a0, a0 +; RV32B-NEXT: zip4.h a1, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: shfl4_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: zip4.h a0, a0 -; RV32IBP-NEXT: zip4.h a1, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: shfl4_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: zip4.h a0, a0 +; RV32ZBP-NEXT: zip4.h a1, a1 +; RV32ZBP-NEXT: ret %and = and i64 %a, -1148435428713435121 %shl = shl i64 %a, 4 %and1 = and i64 %shl, 1080880403494997760 @@ -3176,15 +3176,15 @@ ; RV32I-NEXT: or a0, a0, a2 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: shfl8_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: zip8 a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: shfl8_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: zip8 a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: shfl8_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: zip8 a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: shfl8_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: zip8 a0, a0 +; RV32ZBP-NEXT: ret %and = and i32 %a, -16776961 %shl = shl i32 %a, 8 %and1 = and i32 %shl, 16711680 @@ -3219,17 +3219,17 @@ ; RV32I-NEXT: or a1, a5, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: shfl8_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: zip8 a0, a0 -; RV32IB-NEXT: zip8 a1, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: shfl8_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: zip8 a0, a0 +; RV32B-NEXT: zip8 a1, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: shfl8_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: zip8 a0, a0 -; RV32IBP-NEXT: zip8 a1, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: shfl8_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: zip8 a0, a0 +; RV32ZBP-NEXT: zip8 a1, a1 +; RV32ZBP-NEXT: ret %and = and i64 %a, -72056494543077121 %shl = shl i64 %a, 8 %and1 = and i64 %shl, 71776119077928960 @@ -3250,15 +3250,15 @@ ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: pack_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: pack a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: pack_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: pack a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: pack_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: pack a0, a0, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: pack_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: pack a0, a0, a1 +; RV32ZBP-NEXT: ret %shl = and i32 %a, 65535 %shl1 = shl i32 %b, 16 %or = or i32 %shl1, %shl @@ -3276,15 +3276,15 @@ ; RV32I-NEXT: mv a1, a2 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: pack_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: mv a1, a2 -; RV32IB-NEXT: ret +; RV32B-LABEL: pack_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: mv a1, a2 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: pack_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: mv a1, a2 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: pack_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: mv a1, a2 +; RV32ZBP-NEXT: ret %shl = and i64 %a, 4294967295 %shl1 = shl i64 %b, 32 %or = or i64 %shl1, %shl @@ -3300,15 +3300,15 @@ ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: packu_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: packu a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: packu_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: packu a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: packu_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: packu a0, a0, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: packu_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: packu a0, a0, a1 +; RV32ZBP-NEXT: ret %shr = lshr i32 %a, 16 %shr1 = and i32 %b, -65536 %or = or i32 %shr1, %shr @@ -3327,17 +3327,17 @@ ; RV32I-NEXT: mv a1, a3 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: packu_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: mv a0, a1 -; RV32IB-NEXT: mv a1, a3 -; RV32IB-NEXT: ret +; RV32B-LABEL: packu_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: mv a0, a1 +; RV32B-NEXT: mv a1, a3 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: packu_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: mv a0, a1 -; RV32IBP-NEXT: mv a1, a3 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: packu_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: mv a0, a1 +; RV32ZBP-NEXT: mv a1, a3 +; RV32ZBP-NEXT: ret %shr = lshr i64 %a, 32 %shr1 = and i64 %b, -4294967296 %or = or i64 %shr1, %shr @@ -3353,15 +3353,15 @@ ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: packh_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: packh a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: packh_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: packh a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: packh_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: packh a0, a0, a1 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: packh_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: packh a0, a0, a1 +; RV32ZBP-NEXT: ret %and = and i32 %a, 255 %and1 = shl i32 %b, 8 %shl = and i32 %and1, 65280 @@ -3379,17 +3379,17 @@ ; RV32I-NEXT: mv a1, zero ; RV32I-NEXT: ret ; -; RV32IB-LABEL: packh_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: packh a0, a0, a2 -; RV32IB-NEXT: mv a1, zero -; RV32IB-NEXT: ret +; RV32B-LABEL: packh_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: packh a0, a0, a2 +; RV32B-NEXT: mv a1, zero +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: packh_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: packh a0, a0, a2 -; RV32IBP-NEXT: mv a1, zero -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: packh_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: packh a0, a0, a2 +; RV32ZBP-NEXT: mv a1, zero +; RV32ZBP-NEXT: ret %and = and i64 %a, 255 %and1 = shl i64 %b, 8 %shl = and i64 %and1, 65280 @@ -3405,15 +3405,15 @@ ; RV32I-NEXT: and a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: zexth_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: zext.h a0, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: zexth_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: zext.h a0, a0 +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: zexth_i32: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: zext.h a0, a0 -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: zexth_i32: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: zext.h a0, a0 +; RV32ZBP-NEXT: ret %and = and i32 %a, 65535 ret i32 %and } @@ -3427,17 +3427,17 @@ ; RV32I-NEXT: mv a1, zero ; RV32I-NEXT: ret ; -; RV32IB-LABEL: zexth_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: zext.h a0, a0 -; RV32IB-NEXT: mv a1, zero -; RV32IB-NEXT: ret +; RV32B-LABEL: zexth_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: zext.h a0, a0 +; RV32B-NEXT: mv a1, zero +; RV32B-NEXT: ret ; -; RV32IBP-LABEL: zexth_i64: -; RV32IBP: # %bb.0: -; RV32IBP-NEXT: zext.h a0, a0 -; RV32IBP-NEXT: mv a1, zero -; RV32IBP-NEXT: ret +; RV32ZBP-LABEL: zexth_i64: +; RV32ZBP: # %bb.0: +; RV32ZBP-NEXT: zext.h a0, a0 +; RV32ZBP-NEXT: mv a1, zero +; RV32ZBP-NEXT: ret %and = and i64 %a, 65535 ret i64 %and } diff --git a/llvm/test/CodeGen/RISCV/rv32zbs.ll b/llvm/test/CodeGen/RISCV/rv32zbs.ll --- a/llvm/test/CodeGen/RISCV/rv32zbs.ll +++ b/llvm/test/CodeGen/RISCV/rv32zbs.ll @@ -2,9 +2,9 @@ ; RUN: llc -mtriple=riscv32 -verify-machineinstrs < %s \ ; RUN: | FileCheck %s -check-prefix=RV32I ; RUN: llc -mtriple=riscv32 -mattr=+experimental-b -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV32IB +; RUN: | FileCheck %s -check-prefix=RV32B ; RUN: llc -mtriple=riscv32 -mattr=+experimental-zbs -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV32IBS +; RUN: | FileCheck %s -check-prefix=RV32ZBS define i32 @sbclr_i32(i32 %a, i32 %b) nounwind { ; RV32I-LABEL: sbclr_i32: @@ -15,15 +15,15 @@ ; RV32I-NEXT: and a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbclr_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: bclr a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: sbclr_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: bclr a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: sbclr_i32: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: bclr a0, a0, a1 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: sbclr_i32: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: bclr a0, a0, a1 +; RV32ZBS-NEXT: ret %and = and i32 %b, 31 %shl = shl nuw i32 1, %and %neg = xor i32 %shl, -1 @@ -40,15 +40,15 @@ ; RV32I-NEXT: and a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbclr_i32_no_mask: -; RV32IB: # %bb.0: -; RV32IB-NEXT: bclr a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: sbclr_i32_no_mask: +; RV32B: # %bb.0: +; RV32B-NEXT: bclr a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: sbclr_i32_no_mask: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: bclr a0, a0, a1 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: sbclr_i32_no_mask: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: bclr a0, a0, a1 +; RV32ZBS-NEXT: ret %shl = shl nuw i32 1, %b %neg = xor i32 %shl, -1 %and1 = and i32 %neg, %a @@ -73,30 +73,30 @@ ; RV32I-NEXT: and a0, a0, a2 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbclr_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: bset a3, zero, a2 -; RV32IB-NEXT: andi a2, a2, 63 -; RV32IB-NEXT: addi a2, a2, -32 -; RV32IB-NEXT: srai a4, a2, 31 -; RV32IB-NEXT: and a3, a4, a3 -; RV32IB-NEXT: slti a4, a2, 0 -; RV32IB-NEXT: bclr a2, a1, a2 -; RV32IB-NEXT: cmov a1, a4, a1, a2 -; RV32IB-NEXT: andn a0, a0, a3 -; RV32IB-NEXT: ret -; -; RV32IBS-LABEL: sbclr_i64: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: andi a3, a2, 63 -; RV32IBS-NEXT: addi a3, a3, -32 -; RV32IBS-NEXT: bltz a3, .LBB2_2 -; RV32IBS-NEXT: # %bb.1: -; RV32IBS-NEXT: bclr a1, a1, a3 -; RV32IBS-NEXT: ret -; RV32IBS-NEXT: .LBB2_2: -; RV32IBS-NEXT: bclr a0, a0, a2 -; RV32IBS-NEXT: ret +; RV32B-LABEL: sbclr_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: bset a3, zero, a2 +; RV32B-NEXT: andi a2, a2, 63 +; RV32B-NEXT: addi a2, a2, -32 +; RV32B-NEXT: srai a4, a2, 31 +; RV32B-NEXT: and a3, a4, a3 +; RV32B-NEXT: slti a4, a2, 0 +; RV32B-NEXT: bclr a2, a1, a2 +; RV32B-NEXT: cmov a1, a4, a1, a2 +; RV32B-NEXT: andn a0, a0, a3 +; RV32B-NEXT: ret +; +; RV32ZBS-LABEL: sbclr_i64: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: andi a3, a2, 63 +; RV32ZBS-NEXT: addi a3, a3, -32 +; RV32ZBS-NEXT: bltz a3, .LBB2_2 +; RV32ZBS-NEXT: # %bb.1: +; RV32ZBS-NEXT: bclr a1, a1, a3 +; RV32ZBS-NEXT: ret +; RV32ZBS-NEXT: .LBB2_2: +; RV32ZBS-NEXT: bclr a0, a0, a2 +; RV32ZBS-NEXT: ret %and = and i64 %b, 63 %shl = shl nuw i64 1, %and %neg = xor i64 %shl, -1 @@ -112,15 +112,15 @@ ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbset_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: bset a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: sbset_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: bset a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: sbset_i32: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: bset a0, a0, a1 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: sbset_i32: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: bset a0, a0, a1 +; RV32ZBS-NEXT: ret %and = and i32 %b, 31 %shl = shl nuw i32 1, %and %or = or i32 %shl, %a @@ -135,15 +135,15 @@ ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbset_i32_no_mask: -; RV32IB: # %bb.0: -; RV32IB-NEXT: bset a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: sbset_i32_no_mask: +; RV32B: # %bb.0: +; RV32B-NEXT: bset a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: sbset_i32_no_mask: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: bset a0, a0, a1 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: sbset_i32_no_mask: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: bset a0, a0, a1 +; RV32ZBS-NEXT: ret %shl = shl nuw i32 1, %b %or = or i32 %shl, %a ret i32 %or @@ -157,15 +157,15 @@ ; RV32I-NEXT: sll a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbset_i32_zero: -; RV32IB: # %bb.0: -; RV32IB-NEXT: bset a0, zero, a0 -; RV32IB-NEXT: ret +; RV32B-LABEL: sbset_i32_zero: +; RV32B: # %bb.0: +; RV32B-NEXT: bset a0, zero, a0 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: sbset_i32_zero: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: bset a0, zero, a0 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: sbset_i32_zero: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: bset a0, zero, a0 +; RV32ZBS-NEXT: ret %shl = shl i32 1, %a ret i32 %shl } @@ -185,21 +185,21 @@ ; RV32I-NEXT: or a1, a3, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbset_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: bset a3, zero, a2 -; RV32IB-NEXT: srai a3, a3, 31 -; RV32IB-NEXT: bset a0, a0, a2 -; RV32IB-NEXT: or a1, a3, a1 -; RV32IB-NEXT: ret -; -; RV32IBS-LABEL: sbset_i64: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: bset a3, zero, a2 -; RV32IBS-NEXT: srai a3, a3, 31 -; RV32IBS-NEXT: bset a0, a0, a2 -; RV32IBS-NEXT: or a1, a3, a1 -; RV32IBS-NEXT: ret +; RV32B-LABEL: sbset_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: bset a3, zero, a2 +; RV32B-NEXT: srai a3, a3, 31 +; RV32B-NEXT: bset a0, a0, a2 +; RV32B-NEXT: or a1, a3, a1 +; RV32B-NEXT: ret +; +; RV32ZBS-LABEL: sbset_i64: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: bset a3, zero, a2 +; RV32ZBS-NEXT: srai a3, a3, 31 +; RV32ZBS-NEXT: bset a0, a0, a2 +; RV32ZBS-NEXT: or a1, a3, a1 +; RV32ZBS-NEXT: ret %1 = trunc i64 %b to i32 %conv = and i32 %1, 63 %shl = shl nuw i32 1, %conv @@ -223,29 +223,29 @@ ; RV32I-NEXT: sll a0, a2, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbset_i64_zero: -; RV32IB: # %bb.0: -; RV32IB-NEXT: addi a2, a0, -32 -; RV32IB-NEXT: bset a1, zero, a2 -; RV32IB-NEXT: slti a3, a2, 0 -; RV32IB-NEXT: cmov a1, a3, zero, a1 -; RV32IB-NEXT: bset a0, zero, a0 -; RV32IB-NEXT: srai a2, a2, 31 -; RV32IB-NEXT: and a0, a2, a0 -; RV32IB-NEXT: ret -; -; RV32IBS-LABEL: sbset_i64_zero: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: addi a1, a0, -32 -; RV32IBS-NEXT: bltz a1, .LBB7_2 -; RV32IBS-NEXT: # %bb.1: -; RV32IBS-NEXT: mv a0, zero -; RV32IBS-NEXT: bset a1, zero, a1 -; RV32IBS-NEXT: ret -; RV32IBS-NEXT: .LBB7_2: -; RV32IBS-NEXT: mv a1, zero -; RV32IBS-NEXT: bset a0, zero, a0 -; RV32IBS-NEXT: ret +; RV32B-LABEL: sbset_i64_zero: +; RV32B: # %bb.0: +; RV32B-NEXT: addi a2, a0, -32 +; RV32B-NEXT: bset a1, zero, a2 +; RV32B-NEXT: slti a3, a2, 0 +; RV32B-NEXT: cmov a1, a3, zero, a1 +; RV32B-NEXT: bset a0, zero, a0 +; RV32B-NEXT: srai a2, a2, 31 +; RV32B-NEXT: and a0, a2, a0 +; RV32B-NEXT: ret +; +; RV32ZBS-LABEL: sbset_i64_zero: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: addi a1, a0, -32 +; RV32ZBS-NEXT: bltz a1, .LBB7_2 +; RV32ZBS-NEXT: # %bb.1: +; RV32ZBS-NEXT: mv a0, zero +; RV32ZBS-NEXT: bset a1, zero, a1 +; RV32ZBS-NEXT: ret +; RV32ZBS-NEXT: .LBB7_2: +; RV32ZBS-NEXT: mv a1, zero +; RV32ZBS-NEXT: bset a0, zero, a0 +; RV32ZBS-NEXT: ret %shl = shl i64 1, %a ret i64 %shl } @@ -258,15 +258,15 @@ ; RV32I-NEXT: xor a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbinv_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: binv a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: sbinv_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: binv a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: sbinv_i32: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: binv a0, a0, a1 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: sbinv_i32: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: binv a0, a0, a1 +; RV32ZBS-NEXT: ret %and = and i32 %b, 31 %shl = shl nuw i32 1, %and %xor = xor i32 %shl, %a @@ -288,21 +288,21 @@ ; RV32I-NEXT: xor a1, a3, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbinv_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: bset a3, zero, a2 -; RV32IB-NEXT: srai a3, a3, 31 -; RV32IB-NEXT: binv a0, a0, a2 -; RV32IB-NEXT: xor a1, a3, a1 -; RV32IB-NEXT: ret -; -; RV32IBS-LABEL: sbinv_i64: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: bset a3, zero, a2 -; RV32IBS-NEXT: srai a3, a3, 31 -; RV32IBS-NEXT: binv a0, a0, a2 -; RV32IBS-NEXT: xor a1, a3, a1 -; RV32IBS-NEXT: ret +; RV32B-LABEL: sbinv_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: bset a3, zero, a2 +; RV32B-NEXT: srai a3, a3, 31 +; RV32B-NEXT: binv a0, a0, a2 +; RV32B-NEXT: xor a1, a3, a1 +; RV32B-NEXT: ret +; +; RV32ZBS-LABEL: sbinv_i64: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: bset a3, zero, a2 +; RV32ZBS-NEXT: srai a3, a3, 31 +; RV32ZBS-NEXT: binv a0, a0, a2 +; RV32ZBS-NEXT: xor a1, a3, a1 +; RV32ZBS-NEXT: ret %1 = trunc i64 %b to i32 %conv = and i32 %1, 63 %shl = shl nuw i32 1, %conv @@ -318,15 +318,15 @@ ; RV32I-NEXT: andi a0, a0, 1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbext_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: bext a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: sbext_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: bext a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: sbext_i32: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: bext a0, a0, a1 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: sbext_i32: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: bext a0, a0, a1 +; RV32ZBS-NEXT: ret %and = and i32 %b, 31 %shr = lshr i32 %a, %and %and1 = and i32 %shr, 1 @@ -340,15 +340,15 @@ ; RV32I-NEXT: andi a0, a0, 1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbext_i32_no_mask: -; RV32IB: # %bb.0: -; RV32IB-NEXT: bext a0, a0, a1 -; RV32IB-NEXT: ret +; RV32B-LABEL: sbext_i32_no_mask: +; RV32B: # %bb.0: +; RV32B-NEXT: bext a0, a0, a1 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: sbext_i32_no_mask: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: bext a0, a0, a1 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: sbext_i32_no_mask: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: bext a0, a0, a1 +; RV32ZBS-NEXT: ret %shr = lshr i32 %a, %b %and1 = and i32 %shr, 1 ret i32 %and1 @@ -380,42 +380,42 @@ ; RV32I-NEXT: mv a1, zero ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbext_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: srl a0, a0, a2 -; RV32IB-NEXT: andi a2, a2, 63 -; RV32IB-NEXT: addi a3, zero, 31 -; RV32IB-NEXT: sub a3, a3, a2 -; RV32IB-NEXT: slli a4, a1, 1 -; RV32IB-NEXT: sll a3, a4, a3 -; RV32IB-NEXT: or a0, a0, a3 -; RV32IB-NEXT: addi a2, a2, -32 -; RV32IB-NEXT: srl a1, a1, a2 -; RV32IB-NEXT: slti a2, a2, 0 -; RV32IB-NEXT: cmov a0, a2, a0, a1 -; RV32IB-NEXT: andi a0, a0, 1 -; RV32IB-NEXT: mv a1, zero -; RV32IB-NEXT: ret -; -; RV32IBS-LABEL: sbext_i64: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: andi a3, a2, 63 -; RV32IBS-NEXT: addi a4, a3, -32 -; RV32IBS-NEXT: bltz a4, .LBB12_2 -; RV32IBS-NEXT: # %bb.1: -; RV32IBS-NEXT: srl a0, a1, a4 -; RV32IBS-NEXT: j .LBB12_3 -; RV32IBS-NEXT: .LBB12_2: -; RV32IBS-NEXT: srl a0, a0, a2 -; RV32IBS-NEXT: addi a2, zero, 31 -; RV32IBS-NEXT: sub a2, a2, a3 -; RV32IBS-NEXT: slli a1, a1, 1 -; RV32IBS-NEXT: sll a1, a1, a2 -; RV32IBS-NEXT: or a0, a0, a1 -; RV32IBS-NEXT: .LBB12_3: -; RV32IBS-NEXT: andi a0, a0, 1 -; RV32IBS-NEXT: mv a1, zero -; RV32IBS-NEXT: ret +; RV32B-LABEL: sbext_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: srl a0, a0, a2 +; RV32B-NEXT: andi a2, a2, 63 +; RV32B-NEXT: addi a3, zero, 31 +; RV32B-NEXT: sub a3, a3, a2 +; RV32B-NEXT: slli a4, a1, 1 +; RV32B-NEXT: sll a3, a4, a3 +; RV32B-NEXT: or a0, a0, a3 +; RV32B-NEXT: addi a2, a2, -32 +; RV32B-NEXT: srl a1, a1, a2 +; RV32B-NEXT: slti a2, a2, 0 +; RV32B-NEXT: cmov a0, a2, a0, a1 +; RV32B-NEXT: andi a0, a0, 1 +; RV32B-NEXT: mv a1, zero +; RV32B-NEXT: ret +; +; RV32ZBS-LABEL: sbext_i64: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: andi a3, a2, 63 +; RV32ZBS-NEXT: addi a4, a3, -32 +; RV32ZBS-NEXT: bltz a4, .LBB12_2 +; RV32ZBS-NEXT: # %bb.1: +; RV32ZBS-NEXT: srl a0, a1, a4 +; RV32ZBS-NEXT: j .LBB12_3 +; RV32ZBS-NEXT: .LBB12_2: +; RV32ZBS-NEXT: srl a0, a0, a2 +; RV32ZBS-NEXT: addi a2, zero, 31 +; RV32ZBS-NEXT: sub a2, a2, a3 +; RV32ZBS-NEXT: slli a1, a1, 1 +; RV32ZBS-NEXT: sll a1, a1, a2 +; RV32ZBS-NEXT: or a0, a0, a1 +; RV32ZBS-NEXT: .LBB12_3: +; RV32ZBS-NEXT: andi a0, a0, 1 +; RV32ZBS-NEXT: mv a1, zero +; RV32ZBS-NEXT: ret %conv = and i64 %b, 63 %shr = lshr i64 %a, %conv %and1 = and i64 %shr, 1 @@ -429,15 +429,15 @@ ; RV32I-NEXT: andi a0, a0, 1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbexti_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: bexti a0, a0, 5 -; RV32IB-NEXT: ret +; RV32B-LABEL: sbexti_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: bexti a0, a0, 5 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: sbexti_i32: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: bexti a0, a0, 5 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: sbexti_i32: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: bexti a0, a0, 5 +; RV32ZBS-NEXT: ret %shr = lshr i32 %a, 5 %and = and i32 %shr, 1 ret i32 %and @@ -451,17 +451,17 @@ ; RV32I-NEXT: mv a1, zero ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbexti_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: bexti a0, a0, 5 -; RV32IB-NEXT: mv a1, zero -; RV32IB-NEXT: ret +; RV32B-LABEL: sbexti_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: bexti a0, a0, 5 +; RV32B-NEXT: mv a1, zero +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: sbexti_i64: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: bexti a0, a0, 5 -; RV32IBS-NEXT: mv a1, zero -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: sbexti_i64: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: bexti a0, a0, 5 +; RV32ZBS-NEXT: mv a1, zero +; RV32ZBS-NEXT: ret %shr = lshr i64 %a, 5 %and = and i64 %shr, 1 ret i64 %and @@ -473,15 +473,15 @@ ; RV32I-NEXT: andi a0, a0, -1025 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbclri_i32_10: -; RV32IB: # %bb.0: -; RV32IB-NEXT: andi a0, a0, -1025 -; RV32IB-NEXT: ret +; RV32B-LABEL: sbclri_i32_10: +; RV32B: # %bb.0: +; RV32B-NEXT: andi a0, a0, -1025 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: sbclri_i32_10: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: andi a0, a0, -1025 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: sbclri_i32_10: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: andi a0, a0, -1025 +; RV32ZBS-NEXT: ret %and = and i32 %a, -1025 ret i32 %and } @@ -494,15 +494,15 @@ ; RV32I-NEXT: and a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbclri_i32_11: -; RV32IB: # %bb.0: -; RV32IB-NEXT: bclri a0, a0, 11 -; RV32IB-NEXT: ret +; RV32B-LABEL: sbclri_i32_11: +; RV32B: # %bb.0: +; RV32B-NEXT: bclri a0, a0, 11 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: sbclri_i32_11: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: bclri a0, a0, 11 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: sbclri_i32_11: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: bclri a0, a0, 11 +; RV32ZBS-NEXT: ret %and = and i32 %a, -2049 ret i32 %and } @@ -515,15 +515,15 @@ ; RV32I-NEXT: and a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbclri_i32_30: -; RV32IB: # %bb.0: -; RV32IB-NEXT: bclri a0, a0, 30 -; RV32IB-NEXT: ret +; RV32B-LABEL: sbclri_i32_30: +; RV32B: # %bb.0: +; RV32B-NEXT: bclri a0, a0, 30 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: sbclri_i32_30: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: bclri a0, a0, 30 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: sbclri_i32_30: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: bclri a0, a0, 30 +; RV32ZBS-NEXT: ret %and = and i32 %a, -1073741825 ret i32 %and } @@ -536,15 +536,15 @@ ; RV32I-NEXT: and a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbclri_i32_31: -; RV32IB: # %bb.0: -; RV32IB-NEXT: bclri a0, a0, 31 -; RV32IB-NEXT: ret +; RV32B-LABEL: sbclri_i32_31: +; RV32B: # %bb.0: +; RV32B-NEXT: bclri a0, a0, 31 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: sbclri_i32_31: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: bclri a0, a0, 31 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: sbclri_i32_31: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: bclri a0, a0, 31 +; RV32ZBS-NEXT: ret %and = and i32 %a, -2147483649 ret i32 %and } @@ -557,17 +557,17 @@ ; RV32I-NEXT: and a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbclri_i32_large0: -; RV32IB: # %bb.0: -; RV32IB-NEXT: andi a0, a0, -256 -; RV32IB-NEXT: bclri a0, a0, 24 -; RV32IB-NEXT: ret +; RV32B-LABEL: sbclri_i32_large0: +; RV32B: # %bb.0: +; RV32B-NEXT: andi a0, a0, -256 +; RV32B-NEXT: bclri a0, a0, 24 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: sbclri_i32_large0: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: andi a0, a0, -256 -; RV32IBS-NEXT: bclri a0, a0, 24 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: sbclri_i32_large0: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: andi a0, a0, -256 +; RV32ZBS-NEXT: bclri a0, a0, 24 +; RV32ZBS-NEXT: ret %and = and i32 %a, -16777472 ret i32 %and } @@ -580,17 +580,17 @@ ; RV32I-NEXT: and a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbclri_i32_large1: -; RV32IB: # %bb.0: -; RV32IB-NEXT: bclri a0, a0, 16 -; RV32IB-NEXT: bclri a0, a0, 24 -; RV32IB-NEXT: ret +; RV32B-LABEL: sbclri_i32_large1: +; RV32B: # %bb.0: +; RV32B-NEXT: bclri a0, a0, 16 +; RV32B-NEXT: bclri a0, a0, 24 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: sbclri_i32_large1: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: bclri a0, a0, 16 -; RV32IBS-NEXT: bclri a0, a0, 24 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: sbclri_i32_large1: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: bclri a0, a0, 16 +; RV32ZBS-NEXT: bclri a0, a0, 24 +; RV32ZBS-NEXT: ret %and = and i32 %a, -16842753 ret i32 %and } @@ -603,17 +603,17 @@ ; RV32I-NEXT: and a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbclri_i32_large2: -; RV32IB: # %bb.0: -; RV32IB-NEXT: bclri a0, a0, 2 -; RV32IB-NEXT: bclri a0, a0, 31 -; RV32IB-NEXT: ret +; RV32B-LABEL: sbclri_i32_large2: +; RV32B: # %bb.0: +; RV32B-NEXT: bclri a0, a0, 2 +; RV32B-NEXT: bclri a0, a0, 31 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: sbclri_i32_large2: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: bclri a0, a0, 2 -; RV32IBS-NEXT: bclri a0, a0, 31 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: sbclri_i32_large2: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: bclri a0, a0, 2 +; RV32ZBS-NEXT: bclri a0, a0, 31 +; RV32ZBS-NEXT: ret %2 = and i32 %0, 2147483643 ret i32 %2 } @@ -626,17 +626,17 @@ ; RV32I-NEXT: and a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbclri_i32_large3: -; RV32IB: # %bb.0: -; RV32IB-NEXT: andi a0, a0, -6 -; RV32IB-NEXT: bclri a0, a0, 31 -; RV32IB-NEXT: ret +; RV32B-LABEL: sbclri_i32_large3: +; RV32B: # %bb.0: +; RV32B-NEXT: andi a0, a0, -6 +; RV32B-NEXT: bclri a0, a0, 31 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: sbclri_i32_large3: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: andi a0, a0, -6 -; RV32IBS-NEXT: bclri a0, a0, 31 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: sbclri_i32_large3: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: andi a0, a0, -6 +; RV32ZBS-NEXT: bclri a0, a0, 31 +; RV32ZBS-NEXT: ret %2 = and i32 %0, 2147483642 ret i32 %2 } @@ -647,15 +647,15 @@ ; RV32I-NEXT: ori a0, a0, 1024 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbseti_i32_10: -; RV32IB: # %bb.0: -; RV32IB-NEXT: ori a0, a0, 1024 -; RV32IB-NEXT: ret +; RV32B-LABEL: sbseti_i32_10: +; RV32B: # %bb.0: +; RV32B-NEXT: ori a0, a0, 1024 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: sbseti_i32_10: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: ori a0, a0, 1024 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: sbseti_i32_10: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: ori a0, a0, 1024 +; RV32ZBS-NEXT: ret %or = or i32 %a, 1024 ret i32 %or } @@ -668,15 +668,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbseti_i32_11: -; RV32IB: # %bb.0: -; RV32IB-NEXT: bseti a0, a0, 11 -; RV32IB-NEXT: ret +; RV32B-LABEL: sbseti_i32_11: +; RV32B: # %bb.0: +; RV32B-NEXT: bseti a0, a0, 11 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: sbseti_i32_11: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: bseti a0, a0, 11 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: sbseti_i32_11: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: bseti a0, a0, 11 +; RV32ZBS-NEXT: ret %or = or i32 %a, 2048 ret i32 %or } @@ -688,15 +688,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbseti_i32_30: -; RV32IB: # %bb.0: -; RV32IB-NEXT: bseti a0, a0, 30 -; RV32IB-NEXT: ret +; RV32B-LABEL: sbseti_i32_30: +; RV32B: # %bb.0: +; RV32B-NEXT: bseti a0, a0, 30 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: sbseti_i32_30: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: bseti a0, a0, 30 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: sbseti_i32_30: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: bseti a0, a0, 30 +; RV32ZBS-NEXT: ret %or = or i32 %a, 1073741824 ret i32 %or } @@ -708,15 +708,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbseti_i32_31: -; RV32IB: # %bb.0: -; RV32IB-NEXT: bseti a0, a0, 31 -; RV32IB-NEXT: ret +; RV32B-LABEL: sbseti_i32_31: +; RV32B: # %bb.0: +; RV32B-NEXT: bseti a0, a0, 31 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: sbseti_i32_31: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: bseti a0, a0, 31 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: sbseti_i32_31: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: bseti a0, a0, 31 +; RV32ZBS-NEXT: ret %or = or i32 %a, 2147483648 ret i32 %or } @@ -727,15 +727,15 @@ ; RV32I-NEXT: xori a0, a0, 1024 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbinvi_i32_10: -; RV32IB: # %bb.0: -; RV32IB-NEXT: xori a0, a0, 1024 -; RV32IB-NEXT: ret +; RV32B-LABEL: sbinvi_i32_10: +; RV32B: # %bb.0: +; RV32B-NEXT: xori a0, a0, 1024 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: sbinvi_i32_10: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: xori a0, a0, 1024 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: sbinvi_i32_10: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: xori a0, a0, 1024 +; RV32ZBS-NEXT: ret %xor = xor i32 %a, 1024 ret i32 %xor } @@ -748,15 +748,15 @@ ; RV32I-NEXT: xor a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbinvi_i32_11: -; RV32IB: # %bb.0: -; RV32IB-NEXT: binvi a0, a0, 11 -; RV32IB-NEXT: ret +; RV32B-LABEL: sbinvi_i32_11: +; RV32B: # %bb.0: +; RV32B-NEXT: binvi a0, a0, 11 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: sbinvi_i32_11: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: binvi a0, a0, 11 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: sbinvi_i32_11: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: binvi a0, a0, 11 +; RV32ZBS-NEXT: ret %xor = xor i32 %a, 2048 ret i32 %xor } @@ -768,15 +768,15 @@ ; RV32I-NEXT: xor a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbinvi_i32_30: -; RV32IB: # %bb.0: -; RV32IB-NEXT: binvi a0, a0, 30 -; RV32IB-NEXT: ret +; RV32B-LABEL: sbinvi_i32_30: +; RV32B: # %bb.0: +; RV32B-NEXT: binvi a0, a0, 30 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: sbinvi_i32_30: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: binvi a0, a0, 30 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: sbinvi_i32_30: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: binvi a0, a0, 30 +; RV32ZBS-NEXT: ret %xor = xor i32 %a, 1073741824 ret i32 %xor } @@ -788,15 +788,15 @@ ; RV32I-NEXT: xor a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: sbinvi_i32_31: -; RV32IB: # %bb.0: -; RV32IB-NEXT: binvi a0, a0, 31 -; RV32IB-NEXT: ret +; RV32B-LABEL: sbinvi_i32_31: +; RV32B: # %bb.0: +; RV32B-NEXT: binvi a0, a0, 31 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: sbinvi_i32_31: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: binvi a0, a0, 31 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: sbinvi_i32_31: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: binvi a0, a0, 31 +; RV32ZBS-NEXT: ret %xor = xor i32 %a, 2147483648 ret i32 %xor } @@ -809,17 +809,17 @@ ; RV32I-NEXT: xor a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: xor_i32_4098: -; RV32IB: # %bb.0: -; RV32IB-NEXT: binvi a0, a0, 1 -; RV32IB-NEXT: binvi a0, a0, 12 -; RV32IB-NEXT: ret +; RV32B-LABEL: xor_i32_4098: +; RV32B: # %bb.0: +; RV32B-NEXT: binvi a0, a0, 1 +; RV32B-NEXT: binvi a0, a0, 12 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: xor_i32_4098: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: binvi a0, a0, 1 -; RV32IBS-NEXT: binvi a0, a0, 12 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: xor_i32_4098: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: binvi a0, a0, 1 +; RV32ZBS-NEXT: binvi a0, a0, 12 +; RV32ZBS-NEXT: ret %xor = xor i32 %a, 4098 ret i32 %xor } @@ -832,17 +832,17 @@ ; RV32I-NEXT: xor a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: xor_i32_4099: -; RV32IB: # %bb.0: -; RV32IB-NEXT: xori a0, a0, 3 -; RV32IB-NEXT: binvi a0, a0, 12 -; RV32IB-NEXT: ret +; RV32B-LABEL: xor_i32_4099: +; RV32B: # %bb.0: +; RV32B-NEXT: xori a0, a0, 3 +; RV32B-NEXT: binvi a0, a0, 12 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: xor_i32_4099: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: xori a0, a0, 3 -; RV32IBS-NEXT: binvi a0, a0, 12 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: xor_i32_4099: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: xori a0, a0, 3 +; RV32ZBS-NEXT: binvi a0, a0, 12 +; RV32ZBS-NEXT: ret %xor = xor i32 %a, 4099 ret i32 %xor } @@ -853,15 +853,15 @@ ; RV32I-NEXT: xori a0, a0, 96 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: xor_i32_96: -; RV32IB: # %bb.0: -; RV32IB-NEXT: xori a0, a0, 96 -; RV32IB-NEXT: ret +; RV32B-LABEL: xor_i32_96: +; RV32B: # %bb.0: +; RV32B-NEXT: xori a0, a0, 96 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: xor_i32_96: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: xori a0, a0, 96 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: xor_i32_96: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: xori a0, a0, 96 +; RV32ZBS-NEXT: ret %xor = xor i32 %a, 96 ret i32 %xor } @@ -874,17 +874,17 @@ ; RV32I-NEXT: xor a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: xor_i32_66901: -; RV32IB: # %bb.0: -; RV32IB-NEXT: xori a0, a0, 1365 -; RV32IB-NEXT: binvi a0, a0, 16 -; RV32IB-NEXT: ret +; RV32B-LABEL: xor_i32_66901: +; RV32B: # %bb.0: +; RV32B-NEXT: xori a0, a0, 1365 +; RV32B-NEXT: binvi a0, a0, 16 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: xor_i32_66901: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: xori a0, a0, 1365 -; RV32IBS-NEXT: binvi a0, a0, 16 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: xor_i32_66901: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: xori a0, a0, 1365 +; RV32ZBS-NEXT: binvi a0, a0, 16 +; RV32ZBS-NEXT: ret %xor = xor i32 %a, 66901 ret i32 %xor } @@ -897,17 +897,17 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: or_i32_4098: -; RV32IB: # %bb.0: -; RV32IB-NEXT: bseti a0, a0, 1 -; RV32IB-NEXT: bseti a0, a0, 12 -; RV32IB-NEXT: ret +; RV32B-LABEL: or_i32_4098: +; RV32B: # %bb.0: +; RV32B-NEXT: bseti a0, a0, 1 +; RV32B-NEXT: bseti a0, a0, 12 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: or_i32_4098: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: bseti a0, a0, 1 -; RV32IBS-NEXT: bseti a0, a0, 12 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: or_i32_4098: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: bseti a0, a0, 1 +; RV32ZBS-NEXT: bseti a0, a0, 12 +; RV32ZBS-NEXT: ret %or = or i32 %a, 4098 ret i32 %or } @@ -920,17 +920,17 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: or_i32_4099: -; RV32IB: # %bb.0: -; RV32IB-NEXT: ori a0, a0, 3 -; RV32IB-NEXT: bseti a0, a0, 12 -; RV32IB-NEXT: ret +; RV32B-LABEL: or_i32_4099: +; RV32B: # %bb.0: +; RV32B-NEXT: ori a0, a0, 3 +; RV32B-NEXT: bseti a0, a0, 12 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: or_i32_4099: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: ori a0, a0, 3 -; RV32IBS-NEXT: bseti a0, a0, 12 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: or_i32_4099: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: ori a0, a0, 3 +; RV32ZBS-NEXT: bseti a0, a0, 12 +; RV32ZBS-NEXT: ret %or = or i32 %a, 4099 ret i32 %or } @@ -941,15 +941,15 @@ ; RV32I-NEXT: ori a0, a0, 96 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: or_i32_96: -; RV32IB: # %bb.0: -; RV32IB-NEXT: ori a0, a0, 96 -; RV32IB-NEXT: ret +; RV32B-LABEL: or_i32_96: +; RV32B: # %bb.0: +; RV32B-NEXT: ori a0, a0, 96 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: or_i32_96: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: ori a0, a0, 96 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: or_i32_96: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: ori a0, a0, 96 +; RV32ZBS-NEXT: ret %or = or i32 %a, 96 ret i32 %or } @@ -962,17 +962,17 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: or_i32_66901: -; RV32IB: # %bb.0: -; RV32IB-NEXT: ori a0, a0, 1365 -; RV32IB-NEXT: bseti a0, a0, 16 -; RV32IB-NEXT: ret +; RV32B-LABEL: or_i32_66901: +; RV32B: # %bb.0: +; RV32B-NEXT: ori a0, a0, 1365 +; RV32B-NEXT: bseti a0, a0, 16 +; RV32B-NEXT: ret ; -; RV32IBS-LABEL: or_i32_66901: -; RV32IBS: # %bb.0: -; RV32IBS-NEXT: ori a0, a0, 1365 -; RV32IBS-NEXT: bseti a0, a0, 16 -; RV32IBS-NEXT: ret +; RV32ZBS-LABEL: or_i32_66901: +; RV32ZBS: # %bb.0: +; RV32ZBS-NEXT: ori a0, a0, 1365 +; RV32ZBS-NEXT: bseti a0, a0, 16 +; RV32ZBS-NEXT: ret %or = or i32 %a, 66901 ret i32 %or } diff --git a/llvm/test/CodeGen/RISCV/rv32zbt.ll b/llvm/test/CodeGen/RISCV/rv32zbt.ll --- a/llvm/test/CodeGen/RISCV/rv32zbt.ll +++ b/llvm/test/CodeGen/RISCV/rv32zbt.ll @@ -2,9 +2,9 @@ ; RUN: llc -mtriple=riscv32 -verify-machineinstrs < %s \ ; RUN: | FileCheck %s -check-prefix=RV32I ; RUN: llc -mtriple=riscv32 -mattr=+experimental-b -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV32IB +; RUN: | FileCheck %s -check-prefix=RV32B ; RUN: llc -mtriple=riscv32 -mattr=+experimental-zbt -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV32IBT +; RUN: | FileCheck %s -check-prefix=RV32ZBT define i32 @cmix_i32(i32 %a, i32 %b, i32 %c) nounwind { ; RV32I-LABEL: cmix_i32: @@ -15,15 +15,15 @@ ; RV32I-NEXT: or a0, a1, a0 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: cmix_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: cmix a0, a1, a0, a2 -; RV32IB-NEXT: ret +; RV32B-LABEL: cmix_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: cmix a0, a1, a0, a2 +; RV32B-NEXT: ret ; -; RV32IBT-LABEL: cmix_i32: -; RV32IBT: # %bb.0: -; RV32IBT-NEXT: cmix a0, a1, a0, a2 -; RV32IBT-NEXT: ret +; RV32ZBT-LABEL: cmix_i32: +; RV32ZBT: # %bb.0: +; RV32ZBT-NEXT: cmix a0, a1, a0, a2 +; RV32ZBT-NEXT: ret %and = and i32 %b, %a %neg = xor i32 %b, -1 %and1 = and i32 %neg, %c @@ -44,17 +44,17 @@ ; RV32I-NEXT: or a1, a3, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: cmix_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: cmix a0, a2, a0, a4 -; RV32IB-NEXT: cmix a1, a3, a1, a5 -; RV32IB-NEXT: ret -; -; RV32IBT-LABEL: cmix_i64: -; RV32IBT: # %bb.0: -; RV32IBT-NEXT: cmix a0, a2, a0, a4 -; RV32IBT-NEXT: cmix a1, a3, a1, a5 -; RV32IBT-NEXT: ret +; RV32B-LABEL: cmix_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: cmix a0, a2, a0, a4 +; RV32B-NEXT: cmix a1, a3, a1, a5 +; RV32B-NEXT: ret +; +; RV32ZBT-LABEL: cmix_i64: +; RV32ZBT: # %bb.0: +; RV32ZBT-NEXT: cmix a0, a2, a0, a4 +; RV32ZBT-NEXT: cmix a1, a3, a1, a5 +; RV32ZBT-NEXT: ret %and = and i64 %b, %a %neg = xor i64 %b, -1 %and1 = and i64 %neg, %c @@ -72,15 +72,15 @@ ; RV32I-NEXT: mv a0, a2 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: cmov_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: cmov a0, a1, a0, a2 -; RV32IB-NEXT: ret +; RV32B-LABEL: cmov_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: cmov a0, a1, a0, a2 +; RV32B-NEXT: ret ; -; RV32IBT-LABEL: cmov_i32: -; RV32IBT: # %bb.0: -; RV32IBT-NEXT: cmov a0, a1, a0, a2 -; RV32IBT-NEXT: ret +; RV32ZBT-LABEL: cmov_i32: +; RV32ZBT: # %bb.0: +; RV32ZBT-NEXT: cmov a0, a1, a0, a2 +; RV32ZBT-NEXT: ret %tobool.not = icmp eq i32 %b, 0 %cond = select i1 %tobool.not, i32 %c, i32 %a ret i32 %cond @@ -95,17 +95,17 @@ ; RV32I-NEXT: .LBB3_2: ; RV32I-NEXT: ret ; -; RV32IB-LABEL: cmov_sle_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: slt a1, a2, a1 -; RV32IB-NEXT: cmov a0, a1, a3, a0 -; RV32IB-NEXT: ret -; -; RV32IBT-LABEL: cmov_sle_i32: -; RV32IBT: # %bb.0: -; RV32IBT-NEXT: slt a1, a2, a1 -; RV32IBT-NEXT: cmov a0, a1, a3, a0 -; RV32IBT-NEXT: ret +; RV32B-LABEL: cmov_sle_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: slt a1, a2, a1 +; RV32B-NEXT: cmov a0, a1, a3, a0 +; RV32B-NEXT: ret +; +; RV32ZBT-LABEL: cmov_sle_i32: +; RV32ZBT: # %bb.0: +; RV32ZBT-NEXT: slt a1, a2, a1 +; RV32ZBT-NEXT: cmov a0, a1, a3, a0 +; RV32ZBT-NEXT: ret %tobool = icmp sle i32 %b, %c %cond = select i1 %tobool, i32 %a, i32 %d ret i32 %cond @@ -120,17 +120,17 @@ ; RV32I-NEXT: .LBB4_2: ; RV32I-NEXT: ret ; -; RV32IB-LABEL: cmov_sge_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: slt a1, a1, a2 -; RV32IB-NEXT: cmov a0, a1, a3, a0 -; RV32IB-NEXT: ret -; -; RV32IBT-LABEL: cmov_sge_i32: -; RV32IBT: # %bb.0: -; RV32IBT-NEXT: slt a1, a1, a2 -; RV32IBT-NEXT: cmov a0, a1, a3, a0 -; RV32IBT-NEXT: ret +; RV32B-LABEL: cmov_sge_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: slt a1, a1, a2 +; RV32B-NEXT: cmov a0, a1, a3, a0 +; RV32B-NEXT: ret +; +; RV32ZBT-LABEL: cmov_sge_i32: +; RV32ZBT: # %bb.0: +; RV32ZBT-NEXT: slt a1, a1, a2 +; RV32ZBT-NEXT: cmov a0, a1, a3, a0 +; RV32ZBT-NEXT: ret %tobool = icmp sge i32 %b, %c %cond = select i1 %tobool, i32 %a, i32 %d ret i32 %cond @@ -145,17 +145,17 @@ ; RV32I-NEXT: .LBB5_2: ; RV32I-NEXT: ret ; -; RV32IB-LABEL: cmov_ule_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sltu a1, a2, a1 -; RV32IB-NEXT: cmov a0, a1, a3, a0 -; RV32IB-NEXT: ret -; -; RV32IBT-LABEL: cmov_ule_i32: -; RV32IBT: # %bb.0: -; RV32IBT-NEXT: sltu a1, a2, a1 -; RV32IBT-NEXT: cmov a0, a1, a3, a0 -; RV32IBT-NEXT: ret +; RV32B-LABEL: cmov_ule_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: sltu a1, a2, a1 +; RV32B-NEXT: cmov a0, a1, a3, a0 +; RV32B-NEXT: ret +; +; RV32ZBT-LABEL: cmov_ule_i32: +; RV32ZBT: # %bb.0: +; RV32ZBT-NEXT: sltu a1, a2, a1 +; RV32ZBT-NEXT: cmov a0, a1, a3, a0 +; RV32ZBT-NEXT: ret %tobool = icmp ule i32 %b, %c %cond = select i1 %tobool, i32 %a, i32 %d ret i32 %cond @@ -170,17 +170,17 @@ ; RV32I-NEXT: .LBB6_2: ; RV32I-NEXT: ret ; -; RV32IB-LABEL: cmov_uge_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sltu a1, a1, a2 -; RV32IB-NEXT: cmov a0, a1, a3, a0 -; RV32IB-NEXT: ret -; -; RV32IBT-LABEL: cmov_uge_i32: -; RV32IBT: # %bb.0: -; RV32IBT-NEXT: sltu a1, a1, a2 -; RV32IBT-NEXT: cmov a0, a1, a3, a0 -; RV32IBT-NEXT: ret +; RV32B-LABEL: cmov_uge_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: sltu a1, a1, a2 +; RV32B-NEXT: cmov a0, a1, a3, a0 +; RV32B-NEXT: ret +; +; RV32ZBT-LABEL: cmov_uge_i32: +; RV32ZBT: # %bb.0: +; RV32ZBT-NEXT: sltu a1, a1, a2 +; RV32ZBT-NEXT: cmov a0, a1, a3, a0 +; RV32ZBT-NEXT: ret %tobool = icmp uge i32 %b, %c %cond = select i1 %tobool, i32 %a, i32 %d ret i32 %cond @@ -199,19 +199,19 @@ ; RV32I-NEXT: mv a1, a5 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: cmov_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: or a2, a2, a3 -; RV32IB-NEXT: cmov a0, a2, a0, a4 -; RV32IB-NEXT: cmov a1, a2, a1, a5 -; RV32IB-NEXT: ret -; -; RV32IBT-LABEL: cmov_i64: -; RV32IBT: # %bb.0: -; RV32IBT-NEXT: or a2, a2, a3 -; RV32IBT-NEXT: cmov a0, a2, a0, a4 -; RV32IBT-NEXT: cmov a1, a2, a1, a5 -; RV32IBT-NEXT: ret +; RV32B-LABEL: cmov_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: or a2, a2, a3 +; RV32B-NEXT: cmov a0, a2, a0, a4 +; RV32B-NEXT: cmov a1, a2, a1, a5 +; RV32B-NEXT: ret +; +; RV32ZBT-LABEL: cmov_i64: +; RV32ZBT: # %bb.0: +; RV32ZBT-NEXT: or a2, a2, a3 +; RV32ZBT-NEXT: cmov a0, a2, a0, a4 +; RV32ZBT-NEXT: cmov a1, a2, a1, a5 +; RV32ZBT-NEXT: ret %tobool.not = icmp eq i64 %b, 0 %cond = select i1 %tobool.not, i64 %c, i64 %a ret i64 %cond @@ -236,29 +236,29 @@ ; RV32I-NEXT: .LBB8_4: ; RV32I-NEXT: ret ; -; RV32IB-LABEL: cmov_sle_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: xor t0, a3, a5 -; RV32IB-NEXT: sltu a2, a4, a2 -; RV32IB-NEXT: xori a2, a2, 1 -; RV32IB-NEXT: slt a3, a5, a3 -; RV32IB-NEXT: xori a3, a3, 1 -; RV32IB-NEXT: cmov a2, t0, a3, a2 -; RV32IB-NEXT: cmov a0, a2, a0, a6 -; RV32IB-NEXT: cmov a1, a2, a1, a7 -; RV32IB-NEXT: ret -; -; RV32IBT-LABEL: cmov_sle_i64: -; RV32IBT: # %bb.0: -; RV32IBT-NEXT: xor t0, a3, a5 -; RV32IBT-NEXT: sltu a2, a4, a2 -; RV32IBT-NEXT: xori a2, a2, 1 -; RV32IBT-NEXT: slt a3, a5, a3 -; RV32IBT-NEXT: xori a3, a3, 1 -; RV32IBT-NEXT: cmov a2, t0, a3, a2 -; RV32IBT-NEXT: cmov a0, a2, a0, a6 -; RV32IBT-NEXT: cmov a1, a2, a1, a7 -; RV32IBT-NEXT: ret +; RV32B-LABEL: cmov_sle_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: xor t0, a3, a5 +; RV32B-NEXT: sltu a2, a4, a2 +; RV32B-NEXT: xori a2, a2, 1 +; RV32B-NEXT: slt a3, a5, a3 +; RV32B-NEXT: xori a3, a3, 1 +; RV32B-NEXT: cmov a2, t0, a3, a2 +; RV32B-NEXT: cmov a0, a2, a0, a6 +; RV32B-NEXT: cmov a1, a2, a1, a7 +; RV32B-NEXT: ret +; +; RV32ZBT-LABEL: cmov_sle_i64: +; RV32ZBT: # %bb.0: +; RV32ZBT-NEXT: xor t0, a3, a5 +; RV32ZBT-NEXT: sltu a2, a4, a2 +; RV32ZBT-NEXT: xori a2, a2, 1 +; RV32ZBT-NEXT: slt a3, a5, a3 +; RV32ZBT-NEXT: xori a3, a3, 1 +; RV32ZBT-NEXT: cmov a2, t0, a3, a2 +; RV32ZBT-NEXT: cmov a0, a2, a0, a6 +; RV32ZBT-NEXT: cmov a1, a2, a1, a7 +; RV32ZBT-NEXT: ret %tobool = icmp sle i64 %b, %c %cond = select i1 %tobool, i64 %a, i64 %d ret i64 %cond @@ -283,29 +283,29 @@ ; RV32I-NEXT: .LBB9_4: ; RV32I-NEXT: ret ; -; RV32IB-LABEL: cmov_sge_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: xor t0, a3, a5 -; RV32IB-NEXT: sltu a2, a2, a4 -; RV32IB-NEXT: xori a2, a2, 1 -; RV32IB-NEXT: slt a3, a3, a5 -; RV32IB-NEXT: xori a3, a3, 1 -; RV32IB-NEXT: cmov a2, t0, a3, a2 -; RV32IB-NEXT: cmov a0, a2, a0, a6 -; RV32IB-NEXT: cmov a1, a2, a1, a7 -; RV32IB-NEXT: ret -; -; RV32IBT-LABEL: cmov_sge_i64: -; RV32IBT: # %bb.0: -; RV32IBT-NEXT: xor t0, a3, a5 -; RV32IBT-NEXT: sltu a2, a2, a4 -; RV32IBT-NEXT: xori a2, a2, 1 -; RV32IBT-NEXT: slt a3, a3, a5 -; RV32IBT-NEXT: xori a3, a3, 1 -; RV32IBT-NEXT: cmov a2, t0, a3, a2 -; RV32IBT-NEXT: cmov a0, a2, a0, a6 -; RV32IBT-NEXT: cmov a1, a2, a1, a7 -; RV32IBT-NEXT: ret +; RV32B-LABEL: cmov_sge_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: xor t0, a3, a5 +; RV32B-NEXT: sltu a2, a2, a4 +; RV32B-NEXT: xori a2, a2, 1 +; RV32B-NEXT: slt a3, a3, a5 +; RV32B-NEXT: xori a3, a3, 1 +; RV32B-NEXT: cmov a2, t0, a3, a2 +; RV32B-NEXT: cmov a0, a2, a0, a6 +; RV32B-NEXT: cmov a1, a2, a1, a7 +; RV32B-NEXT: ret +; +; RV32ZBT-LABEL: cmov_sge_i64: +; RV32ZBT: # %bb.0: +; RV32ZBT-NEXT: xor t0, a3, a5 +; RV32ZBT-NEXT: sltu a2, a2, a4 +; RV32ZBT-NEXT: xori a2, a2, 1 +; RV32ZBT-NEXT: slt a3, a3, a5 +; RV32ZBT-NEXT: xori a3, a3, 1 +; RV32ZBT-NEXT: cmov a2, t0, a3, a2 +; RV32ZBT-NEXT: cmov a0, a2, a0, a6 +; RV32ZBT-NEXT: cmov a1, a2, a1, a7 +; RV32ZBT-NEXT: ret %tobool = icmp sge i64 %b, %c %cond = select i1 %tobool, i64 %a, i64 %d ret i64 %cond @@ -330,29 +330,29 @@ ; RV32I-NEXT: .LBB10_4: ; RV32I-NEXT: ret ; -; RV32IB-LABEL: cmov_ule_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: xor t0, a3, a5 -; RV32IB-NEXT: sltu a2, a4, a2 -; RV32IB-NEXT: xori a2, a2, 1 -; RV32IB-NEXT: sltu a3, a5, a3 -; RV32IB-NEXT: xori a3, a3, 1 -; RV32IB-NEXT: cmov a2, t0, a3, a2 -; RV32IB-NEXT: cmov a0, a2, a0, a6 -; RV32IB-NEXT: cmov a1, a2, a1, a7 -; RV32IB-NEXT: ret -; -; RV32IBT-LABEL: cmov_ule_i64: -; RV32IBT: # %bb.0: -; RV32IBT-NEXT: xor t0, a3, a5 -; RV32IBT-NEXT: sltu a2, a4, a2 -; RV32IBT-NEXT: xori a2, a2, 1 -; RV32IBT-NEXT: sltu a3, a5, a3 -; RV32IBT-NEXT: xori a3, a3, 1 -; RV32IBT-NEXT: cmov a2, t0, a3, a2 -; RV32IBT-NEXT: cmov a0, a2, a0, a6 -; RV32IBT-NEXT: cmov a1, a2, a1, a7 -; RV32IBT-NEXT: ret +; RV32B-LABEL: cmov_ule_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: xor t0, a3, a5 +; RV32B-NEXT: sltu a2, a4, a2 +; RV32B-NEXT: xori a2, a2, 1 +; RV32B-NEXT: sltu a3, a5, a3 +; RV32B-NEXT: xori a3, a3, 1 +; RV32B-NEXT: cmov a2, t0, a3, a2 +; RV32B-NEXT: cmov a0, a2, a0, a6 +; RV32B-NEXT: cmov a1, a2, a1, a7 +; RV32B-NEXT: ret +; +; RV32ZBT-LABEL: cmov_ule_i64: +; RV32ZBT: # %bb.0: +; RV32ZBT-NEXT: xor t0, a3, a5 +; RV32ZBT-NEXT: sltu a2, a4, a2 +; RV32ZBT-NEXT: xori a2, a2, 1 +; RV32ZBT-NEXT: sltu a3, a5, a3 +; RV32ZBT-NEXT: xori a3, a3, 1 +; RV32ZBT-NEXT: cmov a2, t0, a3, a2 +; RV32ZBT-NEXT: cmov a0, a2, a0, a6 +; RV32ZBT-NEXT: cmov a1, a2, a1, a7 +; RV32ZBT-NEXT: ret %tobool = icmp ule i64 %b, %c %cond = select i1 %tobool, i64 %a, i64 %d ret i64 %cond @@ -377,29 +377,29 @@ ; RV32I-NEXT: .LBB11_4: ; RV32I-NEXT: ret ; -; RV32IB-LABEL: cmov_uge_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: xor t0, a3, a5 -; RV32IB-NEXT: sltu a2, a2, a4 -; RV32IB-NEXT: xori a2, a2, 1 -; RV32IB-NEXT: sltu a3, a3, a5 -; RV32IB-NEXT: xori a3, a3, 1 -; RV32IB-NEXT: cmov a2, t0, a3, a2 -; RV32IB-NEXT: cmov a0, a2, a0, a6 -; RV32IB-NEXT: cmov a1, a2, a1, a7 -; RV32IB-NEXT: ret -; -; RV32IBT-LABEL: cmov_uge_i64: -; RV32IBT: # %bb.0: -; RV32IBT-NEXT: xor t0, a3, a5 -; RV32IBT-NEXT: sltu a2, a2, a4 -; RV32IBT-NEXT: xori a2, a2, 1 -; RV32IBT-NEXT: sltu a3, a3, a5 -; RV32IBT-NEXT: xori a3, a3, 1 -; RV32IBT-NEXT: cmov a2, t0, a3, a2 -; RV32IBT-NEXT: cmov a0, a2, a0, a6 -; RV32IBT-NEXT: cmov a1, a2, a1, a7 -; RV32IBT-NEXT: ret +; RV32B-LABEL: cmov_uge_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: xor t0, a3, a5 +; RV32B-NEXT: sltu a2, a2, a4 +; RV32B-NEXT: xori a2, a2, 1 +; RV32B-NEXT: sltu a3, a3, a5 +; RV32B-NEXT: xori a3, a3, 1 +; RV32B-NEXT: cmov a2, t0, a3, a2 +; RV32B-NEXT: cmov a0, a2, a0, a6 +; RV32B-NEXT: cmov a1, a2, a1, a7 +; RV32B-NEXT: ret +; +; RV32ZBT-LABEL: cmov_uge_i64: +; RV32ZBT: # %bb.0: +; RV32ZBT-NEXT: xor t0, a3, a5 +; RV32ZBT-NEXT: sltu a2, a2, a4 +; RV32ZBT-NEXT: xori a2, a2, 1 +; RV32ZBT-NEXT: sltu a3, a3, a5 +; RV32ZBT-NEXT: xori a3, a3, 1 +; RV32ZBT-NEXT: cmov a2, t0, a3, a2 +; RV32ZBT-NEXT: cmov a0, a2, a0, a6 +; RV32ZBT-NEXT: cmov a1, a2, a1, a7 +; RV32ZBT-NEXT: ret %tobool = icmp uge i64 %b, %c %cond = select i1 %tobool, i64 %a, i64 %d ret i64 %cond @@ -417,17 +417,17 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: fshl_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: andi a2, a2, 31 -; RV32IB-NEXT: fsl a0, a0, a1, a2 -; RV32IB-NEXT: ret -; -; RV32IBT-LABEL: fshl_i32: -; RV32IBT: # %bb.0: -; RV32IBT-NEXT: andi a2, a2, 31 -; RV32IBT-NEXT: fsl a0, a0, a1, a2 -; RV32IBT-NEXT: ret +; RV32B-LABEL: fshl_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: andi a2, a2, 31 +; RV32B-NEXT: fsl a0, a0, a1, a2 +; RV32B-NEXT: ret +; +; RV32ZBT-LABEL: fshl_i32: +; RV32ZBT: # %bb.0: +; RV32ZBT-NEXT: andi a2, a2, 31 +; RV32ZBT-NEXT: fsl a0, a0, a1, a2 +; RV32ZBT-NEXT: ret %1 = tail call i32 @llvm.fshl.i32(i32 %a, i32 %b, i32 %c) ret i32 %1 } @@ -485,78 +485,78 @@ ; RV32I-NEXT: mv a0, a2 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: fshl_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: sll a7, a1, a4 -; RV32IB-NEXT: andi a5, a4, 63 -; RV32IB-NEXT: addi a6, zero, 31 -; RV32IB-NEXT: sub t0, a6, a5 -; RV32IB-NEXT: srli a1, a0, 1 -; RV32IB-NEXT: srl a1, a1, t0 -; RV32IB-NEXT: or a7, a7, a1 -; RV32IB-NEXT: addi t1, a5, -32 -; RV32IB-NEXT: sll t0, a0, t1 -; RV32IB-NEXT: slti a1, t1, 0 -; RV32IB-NEXT: cmov t0, a1, a7, t0 -; RV32IB-NEXT: not a7, a4 -; RV32IB-NEXT: srli t4, a3, 1 -; RV32IB-NEXT: srl t2, t4, a7 -; RV32IB-NEXT: addi a1, zero, 63 -; RV32IB-NEXT: andn t3, a1, a4 -; RV32IB-NEXT: addi a5, t3, -32 -; RV32IB-NEXT: srai a1, a5, 31 -; RV32IB-NEXT: and a1, a1, t2 -; RV32IB-NEXT: or a1, t0, a1 -; RV32IB-NEXT: fsri a2, a2, a3, 1 -; RV32IB-NEXT: srl a7, a2, a7 -; RV32IB-NEXT: sub a3, a6, t3 -; RV32IB-NEXT: slli a2, t4, 1 -; RV32IB-NEXT: sll a2, a2, a3 -; RV32IB-NEXT: or a2, a7, a2 -; RV32IB-NEXT: srl a3, t4, a5 -; RV32IB-NEXT: slti a5, a5, 0 -; RV32IB-NEXT: cmov a2, a5, a2, a3 -; RV32IB-NEXT: sll a0, a0, a4 -; RV32IB-NEXT: srai a3, t1, 31 -; RV32IB-NEXT: and a0, a3, a0 -; RV32IB-NEXT: or a0, a0, a2 -; RV32IB-NEXT: ret -; -; RV32IBT-LABEL: fshl_i64: -; RV32IBT: # %bb.0: -; RV32IBT-NEXT: sll a7, a1, a4 -; RV32IBT-NEXT: andi a5, a4, 63 -; RV32IBT-NEXT: addi a6, zero, 31 -; RV32IBT-NEXT: sub t0, a6, a5 -; RV32IBT-NEXT: srli a1, a0, 1 -; RV32IBT-NEXT: srl a1, a1, t0 -; RV32IBT-NEXT: or a7, a7, a1 -; RV32IBT-NEXT: addi t1, a5, -32 -; RV32IBT-NEXT: sll t0, a0, t1 -; RV32IBT-NEXT: slti a1, t1, 0 -; RV32IBT-NEXT: cmov t0, a1, a7, t0 -; RV32IBT-NEXT: not a5, a4 -; RV32IBT-NEXT: srli a7, a3, 1 -; RV32IBT-NEXT: srl t4, a7, a5 -; RV32IBT-NEXT: andi t2, a5, 63 -; RV32IBT-NEXT: addi t3, t2, -32 -; RV32IBT-NEXT: srai a1, t3, 31 -; RV32IBT-NEXT: and a1, a1, t4 -; RV32IBT-NEXT: or a1, t0, a1 -; RV32IBT-NEXT: fsri a2, a2, a3, 1 -; RV32IBT-NEXT: srl a2, a2, a5 -; RV32IBT-NEXT: sub a3, a6, t2 -; RV32IBT-NEXT: slli a5, a7, 1 -; RV32IBT-NEXT: sll a3, a5, a3 -; RV32IBT-NEXT: or a2, a2, a3 -; RV32IBT-NEXT: srl a3, a7, t3 -; RV32IBT-NEXT: slti a5, t3, 0 -; RV32IBT-NEXT: cmov a2, a5, a2, a3 -; RV32IBT-NEXT: sll a0, a0, a4 -; RV32IBT-NEXT: srai a3, t1, 31 -; RV32IBT-NEXT: and a0, a3, a0 -; RV32IBT-NEXT: or a0, a0, a2 -; RV32IBT-NEXT: ret +; RV32B-LABEL: fshl_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: sll a7, a1, a4 +; RV32B-NEXT: andi a5, a4, 63 +; RV32B-NEXT: addi a6, zero, 31 +; RV32B-NEXT: sub t0, a6, a5 +; RV32B-NEXT: srli a1, a0, 1 +; RV32B-NEXT: srl a1, a1, t0 +; RV32B-NEXT: or a7, a7, a1 +; RV32B-NEXT: addi t1, a5, -32 +; RV32B-NEXT: sll t0, a0, t1 +; RV32B-NEXT: slti a1, t1, 0 +; RV32B-NEXT: cmov t0, a1, a7, t0 +; RV32B-NEXT: not a7, a4 +; RV32B-NEXT: srli t4, a3, 1 +; RV32B-NEXT: srl t2, t4, a7 +; RV32B-NEXT: addi a1, zero, 63 +; RV32B-NEXT: andn t3, a1, a4 +; RV32B-NEXT: addi a5, t3, -32 +; RV32B-NEXT: srai a1, a5, 31 +; RV32B-NEXT: and a1, a1, t2 +; RV32B-NEXT: or a1, t0, a1 +; RV32B-NEXT: fsri a2, a2, a3, 1 +; RV32B-NEXT: srl a7, a2, a7 +; RV32B-NEXT: sub a3, a6, t3 +; RV32B-NEXT: slli a2, t4, 1 +; RV32B-NEXT: sll a2, a2, a3 +; RV32B-NEXT: or a2, a7, a2 +; RV32B-NEXT: srl a3, t4, a5 +; RV32B-NEXT: slti a5, a5, 0 +; RV32B-NEXT: cmov a2, a5, a2, a3 +; RV32B-NEXT: sll a0, a0, a4 +; RV32B-NEXT: srai a3, t1, 31 +; RV32B-NEXT: and a0, a3, a0 +; RV32B-NEXT: or a0, a0, a2 +; RV32B-NEXT: ret +; +; RV32ZBT-LABEL: fshl_i64: +; RV32ZBT: # %bb.0: +; RV32ZBT-NEXT: sll a7, a1, a4 +; RV32ZBT-NEXT: andi a5, a4, 63 +; RV32ZBT-NEXT: addi a6, zero, 31 +; RV32ZBT-NEXT: sub t0, a6, a5 +; RV32ZBT-NEXT: srli a1, a0, 1 +; RV32ZBT-NEXT: srl a1, a1, t0 +; RV32ZBT-NEXT: or a7, a7, a1 +; RV32ZBT-NEXT: addi t1, a5, -32 +; RV32ZBT-NEXT: sll t0, a0, t1 +; RV32ZBT-NEXT: slti a1, t1, 0 +; RV32ZBT-NEXT: cmov t0, a1, a7, t0 +; RV32ZBT-NEXT: not a5, a4 +; RV32ZBT-NEXT: srli a7, a3, 1 +; RV32ZBT-NEXT: srl t4, a7, a5 +; RV32ZBT-NEXT: andi t2, a5, 63 +; RV32ZBT-NEXT: addi t3, t2, -32 +; RV32ZBT-NEXT: srai a1, t3, 31 +; RV32ZBT-NEXT: and a1, a1, t4 +; RV32ZBT-NEXT: or a1, t0, a1 +; RV32ZBT-NEXT: fsri a2, a2, a3, 1 +; RV32ZBT-NEXT: srl a2, a2, a5 +; RV32ZBT-NEXT: sub a3, a6, t2 +; RV32ZBT-NEXT: slli a5, a7, 1 +; RV32ZBT-NEXT: sll a3, a5, a3 +; RV32ZBT-NEXT: or a2, a2, a3 +; RV32ZBT-NEXT: srl a3, a7, t3 +; RV32ZBT-NEXT: slti a5, t3, 0 +; RV32ZBT-NEXT: cmov a2, a5, a2, a3 +; RV32ZBT-NEXT: sll a0, a0, a4 +; RV32ZBT-NEXT: srai a3, t1, 31 +; RV32ZBT-NEXT: and a0, a3, a0 +; RV32ZBT-NEXT: or a0, a0, a2 +; RV32ZBT-NEXT: ret %1 = tail call i64 @llvm.fshl.i64(i64 %a, i64 %b, i64 %c) ret i64 %1 } @@ -573,17 +573,17 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: fshr_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: andi a2, a2, 31 -; RV32IB-NEXT: fsr a0, a1, a0, a2 -; RV32IB-NEXT: ret -; -; RV32IBT-LABEL: fshr_i32: -; RV32IBT: # %bb.0: -; RV32IBT-NEXT: andi a2, a2, 31 -; RV32IBT-NEXT: fsr a0, a1, a0, a2 -; RV32IBT-NEXT: ret +; RV32B-LABEL: fshr_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: andi a2, a2, 31 +; RV32B-NEXT: fsr a0, a1, a0, a2 +; RV32B-NEXT: ret +; +; RV32ZBT-LABEL: fshr_i32: +; RV32ZBT: # %bb.0: +; RV32ZBT-NEXT: andi a2, a2, 31 +; RV32ZBT-NEXT: fsr a0, a1, a0, a2 +; RV32ZBT-NEXT: ret %1 = tail call i32 @llvm.fshr.i32(i32 %a, i32 %b, i32 %c) ret i32 %1 } @@ -643,82 +643,82 @@ ; RV32I-NEXT: .LBB15_7: ; RV32I-NEXT: ret ; -; RV32IB-LABEL: fshr_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: srl a7, a2, a4 -; RV32IB-NEXT: andi a5, a4, 63 -; RV32IB-NEXT: addi a6, zero, 31 -; RV32IB-NEXT: sub t0, a6, a5 -; RV32IB-NEXT: slli a2, a3, 1 -; RV32IB-NEXT: sll a2, a2, t0 -; RV32IB-NEXT: or a7, a7, a2 -; RV32IB-NEXT: addi t2, a5, -32 -; RV32IB-NEXT: srl t0, a3, t2 -; RV32IB-NEXT: slti a2, t2, 0 -; RV32IB-NEXT: cmov a7, a2, a7, t0 -; RV32IB-NEXT: not t3, a4 -; RV32IB-NEXT: slli t0, a0, 1 -; RV32IB-NEXT: sll t1, t0, t3 -; RV32IB-NEXT: addi a5, zero, 63 -; RV32IB-NEXT: andn t4, a5, a4 -; RV32IB-NEXT: addi a2, t4, -32 -; RV32IB-NEXT: srai a5, a2, 31 -; RV32IB-NEXT: and a5, a5, t1 -; RV32IB-NEXT: or a7, a5, a7 -; RV32IB-NEXT: fsri a1, a0, a1, 31 -; RV32IB-NEXT: sll a1, a1, t3 -; RV32IB-NEXT: sub a5, a6, t4 -; RV32IB-NEXT: bclri a0, a0, 31 -; RV32IB-NEXT: srl a0, a0, a5 -; RV32IB-NEXT: or a0, a1, a0 -; RV32IB-NEXT: sll a1, t0, a2 -; RV32IB-NEXT: slti a2, a2, 0 -; RV32IB-NEXT: cmov a0, a2, a0, a1 -; RV32IB-NEXT: srl a1, a3, a4 -; RV32IB-NEXT: srai a2, t2, 31 -; RV32IB-NEXT: and a1, a2, a1 -; RV32IB-NEXT: or a1, a0, a1 -; RV32IB-NEXT: mv a0, a7 -; RV32IB-NEXT: ret -; -; RV32IBT-LABEL: fshr_i64: -; RV32IBT: # %bb.0: -; RV32IBT-NEXT: srl a7, a2, a4 -; RV32IBT-NEXT: andi a5, a4, 63 -; RV32IBT-NEXT: addi a6, zero, 31 -; RV32IBT-NEXT: sub t0, a6, a5 -; RV32IBT-NEXT: slli a2, a3, 1 -; RV32IBT-NEXT: sll a2, a2, t0 -; RV32IBT-NEXT: or a7, a7, a2 -; RV32IBT-NEXT: addi t2, a5, -32 -; RV32IBT-NEXT: srl t0, a3, t2 -; RV32IBT-NEXT: slti a2, t2, 0 -; RV32IBT-NEXT: cmov a7, a2, a7, t0 -; RV32IBT-NEXT: not t4, a4 -; RV32IBT-NEXT: slli t0, a0, 1 -; RV32IBT-NEXT: sll t1, t0, t4 -; RV32IBT-NEXT: andi t3, t4, 63 -; RV32IBT-NEXT: addi a5, t3, -32 -; RV32IBT-NEXT: srai a2, a5, 31 -; RV32IBT-NEXT: and a2, a2, t1 -; RV32IBT-NEXT: or a7, a2, a7 -; RV32IBT-NEXT: lui a2, 524288 -; RV32IBT-NEXT: addi a2, a2, -1 -; RV32IBT-NEXT: and t1, a0, a2 -; RV32IBT-NEXT: sub a2, a6, t3 -; RV32IBT-NEXT: srl a2, t1, a2 -; RV32IBT-NEXT: fsri a0, a0, a1, 31 -; RV32IBT-NEXT: sll a0, a0, t4 -; RV32IBT-NEXT: or a0, a0, a2 -; RV32IBT-NEXT: sll a1, t0, a5 -; RV32IBT-NEXT: slti a2, a5, 0 -; RV32IBT-NEXT: cmov a0, a2, a0, a1 -; RV32IBT-NEXT: srl a1, a3, a4 -; RV32IBT-NEXT: srai a2, t2, 31 -; RV32IBT-NEXT: and a1, a2, a1 -; RV32IBT-NEXT: or a1, a0, a1 -; RV32IBT-NEXT: mv a0, a7 -; RV32IBT-NEXT: ret +; RV32B-LABEL: fshr_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: srl a7, a2, a4 +; RV32B-NEXT: andi a5, a4, 63 +; RV32B-NEXT: addi a6, zero, 31 +; RV32B-NEXT: sub t0, a6, a5 +; RV32B-NEXT: slli a2, a3, 1 +; RV32B-NEXT: sll a2, a2, t0 +; RV32B-NEXT: or a7, a7, a2 +; RV32B-NEXT: addi t2, a5, -32 +; RV32B-NEXT: srl t0, a3, t2 +; RV32B-NEXT: slti a2, t2, 0 +; RV32B-NEXT: cmov a7, a2, a7, t0 +; RV32B-NEXT: not t3, a4 +; RV32B-NEXT: slli t0, a0, 1 +; RV32B-NEXT: sll t1, t0, t3 +; RV32B-NEXT: addi a5, zero, 63 +; RV32B-NEXT: andn t4, a5, a4 +; RV32B-NEXT: addi a2, t4, -32 +; RV32B-NEXT: srai a5, a2, 31 +; RV32B-NEXT: and a5, a5, t1 +; RV32B-NEXT: or a7, a5, a7 +; RV32B-NEXT: fsri a1, a0, a1, 31 +; RV32B-NEXT: sll a1, a1, t3 +; RV32B-NEXT: sub a5, a6, t4 +; RV32B-NEXT: bclri a0, a0, 31 +; RV32B-NEXT: srl a0, a0, a5 +; RV32B-NEXT: or a0, a1, a0 +; RV32B-NEXT: sll a1, t0, a2 +; RV32B-NEXT: slti a2, a2, 0 +; RV32B-NEXT: cmov a0, a2, a0, a1 +; RV32B-NEXT: srl a1, a3, a4 +; RV32B-NEXT: srai a2, t2, 31 +; RV32B-NEXT: and a1, a2, a1 +; RV32B-NEXT: or a1, a0, a1 +; RV32B-NEXT: mv a0, a7 +; RV32B-NEXT: ret +; +; RV32ZBT-LABEL: fshr_i64: +; RV32ZBT: # %bb.0: +; RV32ZBT-NEXT: srl a7, a2, a4 +; RV32ZBT-NEXT: andi a5, a4, 63 +; RV32ZBT-NEXT: addi a6, zero, 31 +; RV32ZBT-NEXT: sub t0, a6, a5 +; RV32ZBT-NEXT: slli a2, a3, 1 +; RV32ZBT-NEXT: sll a2, a2, t0 +; RV32ZBT-NEXT: or a7, a7, a2 +; RV32ZBT-NEXT: addi t2, a5, -32 +; RV32ZBT-NEXT: srl t0, a3, t2 +; RV32ZBT-NEXT: slti a2, t2, 0 +; RV32ZBT-NEXT: cmov a7, a2, a7, t0 +; RV32ZBT-NEXT: not t4, a4 +; RV32ZBT-NEXT: slli t0, a0, 1 +; RV32ZBT-NEXT: sll t1, t0, t4 +; RV32ZBT-NEXT: andi t3, t4, 63 +; RV32ZBT-NEXT: addi a5, t3, -32 +; RV32ZBT-NEXT: srai a2, a5, 31 +; RV32ZBT-NEXT: and a2, a2, t1 +; RV32ZBT-NEXT: or a7, a2, a7 +; RV32ZBT-NEXT: lui a2, 524288 +; RV32ZBT-NEXT: addi a2, a2, -1 +; RV32ZBT-NEXT: and t1, a0, a2 +; RV32ZBT-NEXT: sub a2, a6, t3 +; RV32ZBT-NEXT: srl a2, t1, a2 +; RV32ZBT-NEXT: fsri a0, a0, a1, 31 +; RV32ZBT-NEXT: sll a0, a0, t4 +; RV32ZBT-NEXT: or a0, a0, a2 +; RV32ZBT-NEXT: sll a1, t0, a5 +; RV32ZBT-NEXT: slti a2, a5, 0 +; RV32ZBT-NEXT: cmov a0, a2, a0, a1 +; RV32ZBT-NEXT: srl a1, a3, a4 +; RV32ZBT-NEXT: srai a2, t2, 31 +; RV32ZBT-NEXT: and a1, a2, a1 +; RV32ZBT-NEXT: or a1, a0, a1 +; RV32ZBT-NEXT: mv a0, a7 +; RV32ZBT-NEXT: ret %1 = tail call i64 @llvm.fshr.i64(i64 %a, i64 %b, i64 %c) ret i64 %1 } @@ -731,15 +731,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: fshri_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: fsri a0, a1, a0, 5 -; RV32IB-NEXT: ret +; RV32B-LABEL: fshri_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: fsri a0, a1, a0, 5 +; RV32B-NEXT: ret ; -; RV32IBT-LABEL: fshri_i32: -; RV32IBT: # %bb.0: -; RV32IBT-NEXT: fsri a0, a1, a0, 5 -; RV32IBT-NEXT: ret +; RV32ZBT-LABEL: fshri_i32: +; RV32ZBT: # %bb.0: +; RV32ZBT-NEXT: fsri a0, a1, a0, 5 +; RV32ZBT-NEXT: ret %1 = tail call i32 @llvm.fshr.i32(i32 %a, i32 %b, i32 5) ret i32 %1 } @@ -756,19 +756,19 @@ ; RV32I-NEXT: mv a0, a2 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: fshri_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: fsri a2, a2, a3, 5 -; RV32IB-NEXT: fsri a1, a3, a0, 5 -; RV32IB-NEXT: mv a0, a2 -; RV32IB-NEXT: ret -; -; RV32IBT-LABEL: fshri_i64: -; RV32IBT: # %bb.0: -; RV32IBT-NEXT: fsri a2, a2, a3, 5 -; RV32IBT-NEXT: fsri a1, a3, a0, 5 -; RV32IBT-NEXT: mv a0, a2 -; RV32IBT-NEXT: ret +; RV32B-LABEL: fshri_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: fsri a2, a2, a3, 5 +; RV32B-NEXT: fsri a1, a3, a0, 5 +; RV32B-NEXT: mv a0, a2 +; RV32B-NEXT: ret +; +; RV32ZBT-LABEL: fshri_i64: +; RV32ZBT: # %bb.0: +; RV32ZBT-NEXT: fsri a2, a2, a3, 5 +; RV32ZBT-NEXT: fsri a1, a3, a0, 5 +; RV32ZBT-NEXT: mv a0, a2 +; RV32ZBT-NEXT: ret %1 = tail call i64 @llvm.fshr.i64(i64 %a, i64 %b, i64 5) ret i64 %1 } @@ -781,15 +781,15 @@ ; RV32I-NEXT: or a0, a0, a1 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: fshli_i32: -; RV32IB: # %bb.0: -; RV32IB-NEXT: fsri a0, a1, a0, 27 -; RV32IB-NEXT: ret +; RV32B-LABEL: fshli_i32: +; RV32B: # %bb.0: +; RV32B-NEXT: fsri a0, a1, a0, 27 +; RV32B-NEXT: ret ; -; RV32IBT-LABEL: fshli_i32: -; RV32IBT: # %bb.0: -; RV32IBT-NEXT: fsri a0, a1, a0, 27 -; RV32IBT-NEXT: ret +; RV32ZBT-LABEL: fshli_i32: +; RV32ZBT: # %bb.0: +; RV32ZBT-NEXT: fsri a0, a1, a0, 27 +; RV32ZBT-NEXT: ret %1 = tail call i32 @llvm.fshl.i32(i32 %a, i32 %b, i32 5) ret i32 %1 } @@ -806,19 +806,19 @@ ; RV32I-NEXT: mv a0, a2 ; RV32I-NEXT: ret ; -; RV32IB-LABEL: fshli_i64: -; RV32IB: # %bb.0: -; RV32IB-NEXT: fsri a2, a3, a0, 27 -; RV32IB-NEXT: fsri a1, a0, a1, 27 -; RV32IB-NEXT: mv a0, a2 -; RV32IB-NEXT: ret -; -; RV32IBT-LABEL: fshli_i64: -; RV32IBT: # %bb.0: -; RV32IBT-NEXT: fsri a2, a3, a0, 27 -; RV32IBT-NEXT: fsri a1, a0, a1, 27 -; RV32IBT-NEXT: mv a0, a2 -; RV32IBT-NEXT: ret +; RV32B-LABEL: fshli_i64: +; RV32B: # %bb.0: +; RV32B-NEXT: fsri a2, a3, a0, 27 +; RV32B-NEXT: fsri a1, a0, a1, 27 +; RV32B-NEXT: mv a0, a2 +; RV32B-NEXT: ret +; +; RV32ZBT-LABEL: fshli_i64: +; RV32ZBT: # %bb.0: +; RV32ZBT-NEXT: fsri a2, a3, a0, 27 +; RV32ZBT-NEXT: fsri a1, a0, a1, 27 +; RV32ZBT-NEXT: mv a0, a2 +; RV32ZBT-NEXT: ret %1 = tail call i64 @llvm.fshl.i64(i64 %a, i64 %b, i64 5) ret i64 %1 } diff --git a/llvm/test/CodeGen/RISCV/rv64zba.ll b/llvm/test/CodeGen/RISCV/rv64zba.ll --- a/llvm/test/CodeGen/RISCV/rv64zba.ll +++ b/llvm/test/CodeGen/RISCV/rv64zba.ll @@ -2,9 +2,9 @@ ; RUN: llc -mtriple=riscv64 -mattr=+m -verify-machineinstrs < %s \ ; RUN: | FileCheck %s -check-prefix=RV64I ; RUN: llc -mtriple=riscv64 -mattr=+m,+experimental-b -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV64IB +; RUN: | FileCheck %s -check-prefix=RV64B ; RUN: llc -mtriple=riscv64 -mattr=+m,+experimental-zba -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV64IBA +; RUN: | FileCheck %s -check-prefix=RV64ZBA define i64 @slliuw(i64 %a) nounwind { ; RV64I-LABEL: slliuw: @@ -13,15 +13,15 @@ ; RV64I-NEXT: srli a0, a0, 31 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: slliuw: -; RV64IB: # %bb.0: -; RV64IB-NEXT: slli.uw a0, a0, 1 -; RV64IB-NEXT: ret +; RV64B-LABEL: slliuw: +; RV64B: # %bb.0: +; RV64B-NEXT: slli.uw a0, a0, 1 +; RV64B-NEXT: ret ; -; RV64IBA-LABEL: slliuw: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: slli.uw a0, a0, 1 -; RV64IBA-NEXT: ret +; RV64ZBA-LABEL: slliuw: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: slli.uw a0, a0, 1 +; RV64ZBA-NEXT: ret %conv1 = shl i64 %a, 1 %shl = and i64 %conv1, 8589934590 ret i64 %shl @@ -37,21 +37,21 @@ ; RV64I-NEXT: ld a1, 8(a1) ; RV64I-NEXT: ret ; -; RV64IB-LABEL: slliuw_2: -; RV64IB: # %bb.0: -; RV64IB-NEXT: slli.uw a0, a0, 4 -; RV64IB-NEXT: add a1, a1, a0 -; RV64IB-NEXT: ld a0, 0(a1) -; RV64IB-NEXT: ld a1, 8(a1) -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: slliuw_2: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: slli.uw a0, a0, 4 -; RV64IBA-NEXT: add a1, a1, a0 -; RV64IBA-NEXT: ld a0, 0(a1) -; RV64IBA-NEXT: ld a1, 8(a1) -; RV64IBA-NEXT: ret +; RV64B-LABEL: slliuw_2: +; RV64B: # %bb.0: +; RV64B-NEXT: slli.uw a0, a0, 4 +; RV64B-NEXT: add a1, a1, a0 +; RV64B-NEXT: ld a0, 0(a1) +; RV64B-NEXT: ld a1, 8(a1) +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: slliuw_2: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: slli.uw a0, a0, 4 +; RV64ZBA-NEXT: add a1, a1, a0 +; RV64ZBA-NEXT: ld a0, 0(a1) +; RV64ZBA-NEXT: ld a1, 8(a1) +; RV64ZBA-NEXT: ret %3 = zext i32 %0 to i64 %4 = getelementptr inbounds i128, i128* %1, i64 %3 %5 = load i128, i128* %4 @@ -66,15 +66,15 @@ ; RV64I-NEXT: add a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: adduw: -; RV64IB: # %bb.0: -; RV64IB-NEXT: add.uw a0, a1, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: adduw: +; RV64B: # %bb.0: +; RV64B-NEXT: add.uw a0, a1, a0 +; RV64B-NEXT: ret ; -; RV64IBA-LABEL: adduw: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: add.uw a0, a1, a0 -; RV64IBA-NEXT: ret +; RV64ZBA-LABEL: adduw: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: add.uw a0, a1, a0 +; RV64ZBA-NEXT: ret %and = and i64 %b, 4294967295 %add = add i64 %and, %a ret i64 %add @@ -89,17 +89,17 @@ ; RV64I-NEXT: lb a0, 0(a0) ; RV64I-NEXT: ret ; -; RV64IB-LABEL: adduw_2: -; RV64IB: # %bb.0: -; RV64IB-NEXT: add.uw a0, a0, a1 -; RV64IB-NEXT: lb a0, 0(a0) -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: adduw_2: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: add.uw a0, a0, a1 -; RV64IBA-NEXT: lb a0, 0(a0) -; RV64IBA-NEXT: ret +; RV64B-LABEL: adduw_2: +; RV64B: # %bb.0: +; RV64B-NEXT: add.uw a0, a0, a1 +; RV64B-NEXT: lb a0, 0(a0) +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: adduw_2: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: add.uw a0, a0, a1 +; RV64ZBA-NEXT: lb a0, 0(a0) +; RV64ZBA-NEXT: ret %3 = zext i32 %0 to i64 %4 = getelementptr inbounds i8, i8* %1, i64 %3 %5 = load i8, i8* %4 @@ -113,15 +113,15 @@ ; RV64I-NEXT: srli a0, a0, 32 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: zextw_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: zext.w a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: zextw_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: zext.w a0, a0 +; RV64B-NEXT: ret ; -; RV64IBA-LABEL: zextw_i64: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: zext.w a0, a0 -; RV64IBA-NEXT: ret +; RV64ZBA-LABEL: zextw_i64: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: zext.w a0, a0 +; RV64ZBA-NEXT: ret %and = and i64 %a, 4294967295 ret i64 %and } @@ -136,17 +136,17 @@ ; RV64I-NEXT: srli a0, a0, 32 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: zextw_demandedbits_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: ori a0, a0, 1 -; RV64IB-NEXT: zext.w a0, a0 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: zextw_demandedbits_i64: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: ori a0, a0, 1 -; RV64IBA-NEXT: zext.w a0, a0 -; RV64IBA-NEXT: ret +; RV64B-LABEL: zextw_demandedbits_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: ori a0, a0, 1 +; RV64B-NEXT: zext.w a0, a0 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: zextw_demandedbits_i64: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: ori a0, a0, 1 +; RV64ZBA-NEXT: zext.w a0, a0 +; RV64ZBA-NEXT: ret %2 = and i64 %0, 4294967294 %3 = or i64 %2, 1 ret i64 %3 @@ -160,17 +160,17 @@ ; RV64I-NEXT: lh a0, 0(a0) ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sh1add: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh1add a0, a0, a1 -; RV64IB-NEXT: lh a0, 0(a0) -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: sh1add: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh1add a0, a0, a1 -; RV64IBA-NEXT: lh a0, 0(a0) -; RV64IBA-NEXT: ret +; RV64B-LABEL: sh1add: +; RV64B: # %bb.0: +; RV64B-NEXT: sh1add a0, a0, a1 +; RV64B-NEXT: lh a0, 0(a0) +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: sh1add: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh1add a0, a0, a1 +; RV64ZBA-NEXT: lh a0, 0(a0) +; RV64ZBA-NEXT: ret %3 = getelementptr inbounds i16, i16* %1, i64 %0 %4 = load i16, i16* %3 ret i16 %4 @@ -184,17 +184,17 @@ ; RV64I-NEXT: lw a0, 0(a0) ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sh2add: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh2add a0, a0, a1 -; RV64IB-NEXT: lw a0, 0(a0) -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: sh2add: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh2add a0, a0, a1 -; RV64IBA-NEXT: lw a0, 0(a0) -; RV64IBA-NEXT: ret +; RV64B-LABEL: sh2add: +; RV64B: # %bb.0: +; RV64B-NEXT: sh2add a0, a0, a1 +; RV64B-NEXT: lw a0, 0(a0) +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: sh2add: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh2add a0, a0, a1 +; RV64ZBA-NEXT: lw a0, 0(a0) +; RV64ZBA-NEXT: ret %3 = getelementptr inbounds i32, i32* %1, i64 %0 %4 = load i32, i32* %3 ret i32 %4 @@ -208,17 +208,17 @@ ; RV64I-NEXT: ld a0, 0(a0) ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sh3add: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh3add a0, a0, a1 -; RV64IB-NEXT: ld a0, 0(a0) -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: sh3add: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh3add a0, a0, a1 -; RV64IBA-NEXT: ld a0, 0(a0) -; RV64IBA-NEXT: ret +; RV64B-LABEL: sh3add: +; RV64B: # %bb.0: +; RV64B-NEXT: sh3add a0, a0, a1 +; RV64B-NEXT: ld a0, 0(a0) +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: sh3add: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh3add a0, a0, a1 +; RV64ZBA-NEXT: ld a0, 0(a0) +; RV64ZBA-NEXT: ret %3 = getelementptr inbounds i64, i64* %1, i64 %0 %4 = load i64, i64* %3 ret i64 %4 @@ -233,17 +233,17 @@ ; RV64I-NEXT: lh a0, 0(a0) ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sh1adduw: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh1add.uw a0, a0, a1 -; RV64IB-NEXT: lh a0, 0(a0) -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: sh1adduw: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh1add.uw a0, a0, a1 -; RV64IBA-NEXT: lh a0, 0(a0) -; RV64IBA-NEXT: ret +; RV64B-LABEL: sh1adduw: +; RV64B: # %bb.0: +; RV64B-NEXT: sh1add.uw a0, a0, a1 +; RV64B-NEXT: lh a0, 0(a0) +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: sh1adduw: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh1add.uw a0, a0, a1 +; RV64ZBA-NEXT: lh a0, 0(a0) +; RV64ZBA-NEXT: ret %3 = zext i32 %0 to i64 %4 = getelementptr inbounds i16, i16* %1, i64 %3 %5 = load i16, i16* %4 @@ -258,15 +258,15 @@ ; RV64I-NEXT: add a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sh1adduw_2: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh1add.uw a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: sh1adduw_2: +; RV64B: # %bb.0: +; RV64B-NEXT: sh1add.uw a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBA-LABEL: sh1adduw_2: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh1add.uw a0, a0, a1 -; RV64IBA-NEXT: ret +; RV64ZBA-LABEL: sh1adduw_2: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh1add.uw a0, a0, a1 +; RV64ZBA-NEXT: ret %3 = shl i64 %0, 1 %4 = and i64 %3, 8589934590 %5 = add i64 %4, %1 @@ -282,17 +282,17 @@ ; RV64I-NEXT: lw a0, 0(a0) ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sh2adduw: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh2add.uw a0, a0, a1 -; RV64IB-NEXT: lw a0, 0(a0) -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: sh2adduw: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh2add.uw a0, a0, a1 -; RV64IBA-NEXT: lw a0, 0(a0) -; RV64IBA-NEXT: ret +; RV64B-LABEL: sh2adduw: +; RV64B: # %bb.0: +; RV64B-NEXT: sh2add.uw a0, a0, a1 +; RV64B-NEXT: lw a0, 0(a0) +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: sh2adduw: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh2add.uw a0, a0, a1 +; RV64ZBA-NEXT: lw a0, 0(a0) +; RV64ZBA-NEXT: ret %3 = zext i32 %0 to i64 %4 = getelementptr inbounds i32, i32* %1, i64 %3 %5 = load i32, i32* %4 @@ -307,15 +307,15 @@ ; RV64I-NEXT: add a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sh2adduw_2: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh2add.uw a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: sh2adduw_2: +; RV64B: # %bb.0: +; RV64B-NEXT: sh2add.uw a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBA-LABEL: sh2adduw_2: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh2add.uw a0, a0, a1 -; RV64IBA-NEXT: ret +; RV64ZBA-LABEL: sh2adduw_2: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh2add.uw a0, a0, a1 +; RV64ZBA-NEXT: ret %3 = shl i64 %0, 2 %4 = and i64 %3, 17179869180 %5 = add i64 %4, %1 @@ -331,17 +331,17 @@ ; RV64I-NEXT: ld a0, 0(a0) ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sh3adduw: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh3add.uw a0, a0, a1 -; RV64IB-NEXT: ld a0, 0(a0) -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: sh3adduw: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh3add.uw a0, a0, a1 -; RV64IBA-NEXT: ld a0, 0(a0) -; RV64IBA-NEXT: ret +; RV64B-LABEL: sh3adduw: +; RV64B: # %bb.0: +; RV64B-NEXT: sh3add.uw a0, a0, a1 +; RV64B-NEXT: ld a0, 0(a0) +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: sh3adduw: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh3add.uw a0, a0, a1 +; RV64ZBA-NEXT: ld a0, 0(a0) +; RV64ZBA-NEXT: ret %3 = zext i32 %0 to i64 %4 = getelementptr inbounds i64, i64* %1, i64 %3 %5 = load i64, i64* %4 @@ -356,15 +356,15 @@ ; RV64I-NEXT: add a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sh3adduw_2: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh3add.uw a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: sh3adduw_2: +; RV64B: # %bb.0: +; RV64B-NEXT: sh3add.uw a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBA-LABEL: sh3adduw_2: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh3add.uw a0, a0, a1 -; RV64IBA-NEXT: ret +; RV64ZBA-LABEL: sh3adduw_2: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh3add.uw a0, a0, a1 +; RV64ZBA-NEXT: ret %3 = shl i64 %0, 3 %4 = and i64 %3, 34359738360 %5 = add i64 %4, %1 @@ -379,17 +379,17 @@ ; RV64I-NEXT: add a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: addmul6: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh1add a0, a0, a0 -; RV64IB-NEXT: sh1add a0, a0, a1 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: addmul6: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh1add a0, a0, a0 -; RV64IBA-NEXT: sh1add a0, a0, a1 -; RV64IBA-NEXT: ret +; RV64B-LABEL: addmul6: +; RV64B: # %bb.0: +; RV64B-NEXT: sh1add a0, a0, a0 +; RV64B-NEXT: sh1add a0, a0, a1 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: addmul6: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh1add a0, a0, a0 +; RV64ZBA-NEXT: sh1add a0, a0, a1 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 6 %d = add i64 %c, %b ret i64 %d @@ -403,17 +403,17 @@ ; RV64I-NEXT: add a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: addmul10: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh2add a0, a0, a0 -; RV64IB-NEXT: sh1add a0, a0, a1 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: addmul10: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh2add a0, a0, a0 -; RV64IBA-NEXT: sh1add a0, a0, a1 -; RV64IBA-NEXT: ret +; RV64B-LABEL: addmul10: +; RV64B: # %bb.0: +; RV64B-NEXT: sh2add a0, a0, a0 +; RV64B-NEXT: sh1add a0, a0, a1 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: addmul10: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh2add a0, a0, a0 +; RV64ZBA-NEXT: sh1add a0, a0, a1 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 10 %d = add i64 %c, %b ret i64 %d @@ -427,17 +427,17 @@ ; RV64I-NEXT: add a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: addmul12: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh1add a0, a0, a0 -; RV64IB-NEXT: sh2add a0, a0, a1 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: addmul12: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh1add a0, a0, a0 -; RV64IBA-NEXT: sh2add a0, a0, a1 -; RV64IBA-NEXT: ret +; RV64B-LABEL: addmul12: +; RV64B: # %bb.0: +; RV64B-NEXT: sh1add a0, a0, a0 +; RV64B-NEXT: sh2add a0, a0, a1 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: addmul12: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh1add a0, a0, a0 +; RV64ZBA-NEXT: sh2add a0, a0, a1 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 12 %d = add i64 %c, %b ret i64 %d @@ -451,17 +451,17 @@ ; RV64I-NEXT: add a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: addmul18: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh3add a0, a0, a0 -; RV64IB-NEXT: sh1add a0, a0, a1 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: addmul18: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh3add a0, a0, a0 -; RV64IBA-NEXT: sh1add a0, a0, a1 -; RV64IBA-NEXT: ret +; RV64B-LABEL: addmul18: +; RV64B: # %bb.0: +; RV64B-NEXT: sh3add a0, a0, a0 +; RV64B-NEXT: sh1add a0, a0, a1 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: addmul18: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh3add a0, a0, a0 +; RV64ZBA-NEXT: sh1add a0, a0, a1 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 18 %d = add i64 %c, %b ret i64 %d @@ -475,17 +475,17 @@ ; RV64I-NEXT: add a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: addmul20: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh2add a0, a0, a0 -; RV64IB-NEXT: sh2add a0, a0, a1 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: addmul20: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh2add a0, a0, a0 -; RV64IBA-NEXT: sh2add a0, a0, a1 -; RV64IBA-NEXT: ret +; RV64B-LABEL: addmul20: +; RV64B: # %bb.0: +; RV64B-NEXT: sh2add a0, a0, a0 +; RV64B-NEXT: sh2add a0, a0, a1 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: addmul20: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh2add a0, a0, a0 +; RV64ZBA-NEXT: sh2add a0, a0, a1 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 20 %d = add i64 %c, %b ret i64 %d @@ -499,17 +499,17 @@ ; RV64I-NEXT: add a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: addmul24: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh1add a0, a0, a0 -; RV64IB-NEXT: sh3add a0, a0, a1 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: addmul24: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh1add a0, a0, a0 -; RV64IBA-NEXT: sh3add a0, a0, a1 -; RV64IBA-NEXT: ret +; RV64B-LABEL: addmul24: +; RV64B: # %bb.0: +; RV64B-NEXT: sh1add a0, a0, a0 +; RV64B-NEXT: sh3add a0, a0, a1 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: addmul24: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh1add a0, a0, a0 +; RV64ZBA-NEXT: sh3add a0, a0, a1 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 24 %d = add i64 %c, %b ret i64 %d @@ -523,17 +523,17 @@ ; RV64I-NEXT: add a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: addmul36: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh3add a0, a0, a0 -; RV64IB-NEXT: sh2add a0, a0, a1 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: addmul36: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh3add a0, a0, a0 -; RV64IBA-NEXT: sh2add a0, a0, a1 -; RV64IBA-NEXT: ret +; RV64B-LABEL: addmul36: +; RV64B: # %bb.0: +; RV64B-NEXT: sh3add a0, a0, a0 +; RV64B-NEXT: sh2add a0, a0, a1 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: addmul36: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh3add a0, a0, a0 +; RV64ZBA-NEXT: sh2add a0, a0, a1 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 36 %d = add i64 %c, %b ret i64 %d @@ -547,17 +547,17 @@ ; RV64I-NEXT: add a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: addmul40: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh2add a0, a0, a0 -; RV64IB-NEXT: sh3add a0, a0, a1 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: addmul40: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh2add a0, a0, a0 -; RV64IBA-NEXT: sh3add a0, a0, a1 -; RV64IBA-NEXT: ret +; RV64B-LABEL: addmul40: +; RV64B: # %bb.0: +; RV64B-NEXT: sh2add a0, a0, a0 +; RV64B-NEXT: sh3add a0, a0, a1 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: addmul40: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh2add a0, a0, a0 +; RV64ZBA-NEXT: sh3add a0, a0, a1 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 40 %d = add i64 %c, %b ret i64 %d @@ -571,17 +571,17 @@ ; RV64I-NEXT: add a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: addmul72: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh3add a0, a0, a0 -; RV64IB-NEXT: sh3add a0, a0, a1 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: addmul72: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh3add a0, a0, a0 -; RV64IBA-NEXT: sh3add a0, a0, a1 -; RV64IBA-NEXT: ret +; RV64B-LABEL: addmul72: +; RV64B: # %bb.0: +; RV64B-NEXT: sh3add a0, a0, a0 +; RV64B-NEXT: sh3add a0, a0, a1 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: addmul72: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh3add a0, a0, a0 +; RV64ZBA-NEXT: sh3add a0, a0, a1 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 72 %d = add i64 %c, %b ret i64 %d @@ -594,17 +594,17 @@ ; RV64I-NEXT: mul a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: mul96: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh1add a0, a0, a0 -; RV64IB-NEXT: slli a0, a0, 5 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: mul96: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh1add a0, a0, a0 -; RV64IBA-NEXT: slli a0, a0, 5 -; RV64IBA-NEXT: ret +; RV64B-LABEL: mul96: +; RV64B: # %bb.0: +; RV64B-NEXT: sh1add a0, a0, a0 +; RV64B-NEXT: slli a0, a0, 5 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: mul96: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh1add a0, a0, a0 +; RV64ZBA-NEXT: slli a0, a0, 5 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 96 ret i64 %c } @@ -616,17 +616,17 @@ ; RV64I-NEXT: mul a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: mul160: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh2add a0, a0, a0 -; RV64IB-NEXT: slli a0, a0, 5 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: mul160: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh2add a0, a0, a0 -; RV64IBA-NEXT: slli a0, a0, 5 -; RV64IBA-NEXT: ret +; RV64B-LABEL: mul160: +; RV64B: # %bb.0: +; RV64B-NEXT: sh2add a0, a0, a0 +; RV64B-NEXT: slli a0, a0, 5 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: mul160: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh2add a0, a0, a0 +; RV64ZBA-NEXT: slli a0, a0, 5 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 160 ret i64 %c } @@ -638,17 +638,17 @@ ; RV64I-NEXT: mul a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: mul288: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh3add a0, a0, a0 -; RV64IB-NEXT: slli a0, a0, 5 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: mul288: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh3add a0, a0, a0 -; RV64IBA-NEXT: slli a0, a0, 5 -; RV64IBA-NEXT: ret +; RV64B-LABEL: mul288: +; RV64B: # %bb.0: +; RV64B-NEXT: sh3add a0, a0, a0 +; RV64B-NEXT: slli a0, a0, 5 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: mul288: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh3add a0, a0, a0 +; RV64ZBA-NEXT: slli a0, a0, 5 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 288 ret i64 %c } @@ -660,17 +660,17 @@ ; RV64I-NEXT: addi a0, a0, 5 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sh1add_imm: -; RV64IB: # %bb.0: -; RV64IB-NEXT: slli a0, a0, 1 -; RV64IB-NEXT: addi a0, a0, 5 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: sh1add_imm: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: slli a0, a0, 1 -; RV64IBA-NEXT: addi a0, a0, 5 -; RV64IBA-NEXT: ret +; RV64B-LABEL: sh1add_imm: +; RV64B: # %bb.0: +; RV64B-NEXT: slli a0, a0, 1 +; RV64B-NEXT: addi a0, a0, 5 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: sh1add_imm: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: slli a0, a0, 1 +; RV64ZBA-NEXT: addi a0, a0, 5 +; RV64ZBA-NEXT: ret %a = shl i64 %0, 1 %b = add i64 %a, 5 ret i64 %b @@ -683,17 +683,17 @@ ; RV64I-NEXT: addi a0, a0, -6 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sh2add_imm: -; RV64IB: # %bb.0: -; RV64IB-NEXT: slli a0, a0, 2 -; RV64IB-NEXT: addi a0, a0, -6 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: sh2add_imm: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: slli a0, a0, 2 -; RV64IBA-NEXT: addi a0, a0, -6 -; RV64IBA-NEXT: ret +; RV64B-LABEL: sh2add_imm: +; RV64B: # %bb.0: +; RV64B-NEXT: slli a0, a0, 2 +; RV64B-NEXT: addi a0, a0, -6 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: sh2add_imm: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: slli a0, a0, 2 +; RV64ZBA-NEXT: addi a0, a0, -6 +; RV64ZBA-NEXT: ret %a = shl i64 %0, 2 %b = add i64 %a, -6 ret i64 %b @@ -706,17 +706,17 @@ ; RV64I-NEXT: ori a0, a0, 7 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sh3add_imm: -; RV64IB: # %bb.0: -; RV64IB-NEXT: slli a0, a0, 3 -; RV64IB-NEXT: ori a0, a0, 7 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: sh3add_imm: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: slli a0, a0, 3 -; RV64IBA-NEXT: ori a0, a0, 7 -; RV64IBA-NEXT: ret +; RV64B-LABEL: sh3add_imm: +; RV64B: # %bb.0: +; RV64B-NEXT: slli a0, a0, 3 +; RV64B-NEXT: ori a0, a0, 7 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: sh3add_imm: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: slli a0, a0, 3 +; RV64ZBA-NEXT: ori a0, a0, 7 +; RV64ZBA-NEXT: ret %a = shl i64 %0, 3 %b = add i64 %a, 7 ret i64 %b @@ -730,17 +730,17 @@ ; RV64I-NEXT: addi a0, a0, 11 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sh1adduw_imm: -; RV64IB: # %bb.0: -; RV64IB-NEXT: slli.uw a0, a0, 1 -; RV64IB-NEXT: addi a0, a0, 11 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: sh1adduw_imm: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: slli.uw a0, a0, 1 -; RV64IBA-NEXT: addi a0, a0, 11 -; RV64IBA-NEXT: ret +; RV64B-LABEL: sh1adduw_imm: +; RV64B: # %bb.0: +; RV64B-NEXT: slli.uw a0, a0, 1 +; RV64B-NEXT: addi a0, a0, 11 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: sh1adduw_imm: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: slli.uw a0, a0, 1 +; RV64ZBA-NEXT: addi a0, a0, 11 +; RV64ZBA-NEXT: ret %a = zext i32 %0 to i64 %b = shl i64 %a, 1 %c = add i64 %b, 11 @@ -755,17 +755,17 @@ ; RV64I-NEXT: addi a0, a0, -12 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sh2adduw_imm: -; RV64IB: # %bb.0: -; RV64IB-NEXT: slli.uw a0, a0, 2 -; RV64IB-NEXT: addi a0, a0, -12 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: sh2adduw_imm: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: slli.uw a0, a0, 2 -; RV64IBA-NEXT: addi a0, a0, -12 -; RV64IBA-NEXT: ret +; RV64B-LABEL: sh2adduw_imm: +; RV64B: # %bb.0: +; RV64B-NEXT: slli.uw a0, a0, 2 +; RV64B-NEXT: addi a0, a0, -12 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: sh2adduw_imm: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: slli.uw a0, a0, 2 +; RV64ZBA-NEXT: addi a0, a0, -12 +; RV64ZBA-NEXT: ret %a = zext i32 %0 to i64 %b = shl i64 %a, 2 %c = add i64 %b, -12 @@ -780,17 +780,17 @@ ; RV64I-NEXT: addi a0, a0, 13 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sh3adduw_imm: -; RV64IB: # %bb.0: -; RV64IB-NEXT: slli.uw a0, a0, 3 -; RV64IB-NEXT: addi a0, a0, 13 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: sh3adduw_imm: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: slli.uw a0, a0, 3 -; RV64IBA-NEXT: addi a0, a0, 13 -; RV64IBA-NEXT: ret +; RV64B-LABEL: sh3adduw_imm: +; RV64B: # %bb.0: +; RV64B-NEXT: slli.uw a0, a0, 3 +; RV64B-NEXT: addi a0, a0, 13 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: sh3adduw_imm: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: slli.uw a0, a0, 3 +; RV64ZBA-NEXT: addi a0, a0, 13 +; RV64ZBA-NEXT: ret %a = zext i32 %0 to i64 %b = shl i64 %a, 3 %c = add i64 %b, 13 @@ -805,17 +805,17 @@ ; RV64I-NEXT: addi a0, a0, 5 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: adduw_imm: -; RV64IB: # %bb.0: -; RV64IB-NEXT: zext.w a0, a0 -; RV64IB-NEXT: addi a0, a0, 5 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: adduw_imm: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: zext.w a0, a0 -; RV64IBA-NEXT: addi a0, a0, 5 -; RV64IBA-NEXT: ret +; RV64B-LABEL: adduw_imm: +; RV64B: # %bb.0: +; RV64B-NEXT: zext.w a0, a0 +; RV64B-NEXT: addi a0, a0, 5 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: adduw_imm: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: zext.w a0, a0 +; RV64ZBA-NEXT: addi a0, a0, 5 +; RV64ZBA-NEXT: ret %a = zext i32 %0 to i64 %b = add i64 %a, 5 ret i64 %b @@ -828,17 +828,17 @@ ; RV64I-NEXT: mul a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: mul258: -; RV64IB: # %bb.0: -; RV64IB-NEXT: addi a1, zero, 258 -; RV64IB-NEXT: mul a0, a0, a1 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: mul258: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: addi a1, zero, 258 -; RV64IBA-NEXT: mul a0, a0, a1 -; RV64IBA-NEXT: ret +; RV64B-LABEL: mul258: +; RV64B: # %bb.0: +; RV64B-NEXT: addi a1, zero, 258 +; RV64B-NEXT: mul a0, a0, a1 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: mul258: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: addi a1, zero, 258 +; RV64ZBA-NEXT: mul a0, a0, a1 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 258 ret i64 %c } @@ -850,17 +850,17 @@ ; RV64I-NEXT: mul a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: mul260: -; RV64IB: # %bb.0: -; RV64IB-NEXT: addi a1, zero, 260 -; RV64IB-NEXT: mul a0, a0, a1 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: mul260: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: addi a1, zero, 260 -; RV64IBA-NEXT: mul a0, a0, a1 -; RV64IBA-NEXT: ret +; RV64B-LABEL: mul260: +; RV64B: # %bb.0: +; RV64B-NEXT: addi a1, zero, 260 +; RV64B-NEXT: mul a0, a0, a1 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: mul260: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: addi a1, zero, 260 +; RV64ZBA-NEXT: mul a0, a0, a1 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 260 ret i64 %c } @@ -872,17 +872,17 @@ ; RV64I-NEXT: mul a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: mul264: -; RV64IB: # %bb.0: -; RV64IB-NEXT: addi a1, zero, 264 -; RV64IB-NEXT: mul a0, a0, a1 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: mul264: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: addi a1, zero, 264 -; RV64IBA-NEXT: mul a0, a0, a1 -; RV64IBA-NEXT: ret +; RV64B-LABEL: mul264: +; RV64B: # %bb.0: +; RV64B-NEXT: addi a1, zero, 264 +; RV64B-NEXT: mul a0, a0, a1 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: mul264: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: addi a1, zero, 264 +; RV64ZBA-NEXT: mul a0, a0, a1 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 264 ret i64 %c } @@ -895,17 +895,17 @@ ; RV64I-NEXT: addi a0, a0, -2 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: imm_zextw: -; RV64IB: # %bb.0: -; RV64IB-NEXT: addi a0, zero, -2 -; RV64IB-NEXT: zext.w a0, a0 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: imm_zextw: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: addi a0, zero, -2 -; RV64IBA-NEXT: zext.w a0, a0 -; RV64IBA-NEXT: ret +; RV64B-LABEL: imm_zextw: +; RV64B: # %bb.0: +; RV64B-NEXT: addi a0, zero, -2 +; RV64B-NEXT: zext.w a0, a0 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: imm_zextw: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: addi a0, zero, -2 +; RV64ZBA-NEXT: zext.w a0, a0 +; RV64ZBA-NEXT: ret ret i64 4294967294 ; -2 in 32 bits. } @@ -918,19 +918,19 @@ ; RV64I-NEXT: addi a0, a0, -1366 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: imm_zextw2: -; RV64IB: # %bb.0: -; RV64IB-NEXT: lui a0, 699051 -; RV64IB-NEXT: addiw a0, a0, -1366 -; RV64IB-NEXT: zext.w a0, a0 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: imm_zextw2: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: lui a0, 699051 -; RV64IBA-NEXT: addiw a0, a0, -1366 -; RV64IBA-NEXT: zext.w a0, a0 -; RV64IBA-NEXT: ret +; RV64B-LABEL: imm_zextw2: +; RV64B: # %bb.0: +; RV64B-NEXT: lui a0, 699051 +; RV64B-NEXT: addiw a0, a0, -1366 +; RV64B-NEXT: zext.w a0, a0 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: imm_zextw2: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: lui a0, 699051 +; RV64ZBA-NEXT: addiw a0, a0, -1366 +; RV64ZBA-NEXT: zext.w a0, a0 +; RV64ZBA-NEXT: ret ret i64 2863311530 ; 0xAAAAAAAA } @@ -941,17 +941,17 @@ ; RV64I-NEXT: mul a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: mul11: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh2add a1, a0, a0 -; RV64IB-NEXT: sh1add a0, a1, a0 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: mul11: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh2add a1, a0, a0 -; RV64IBA-NEXT: sh1add a0, a1, a0 -; RV64IBA-NEXT: ret +; RV64B-LABEL: mul11: +; RV64B: # %bb.0: +; RV64B-NEXT: sh2add a1, a0, a0 +; RV64B-NEXT: sh1add a0, a1, a0 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: mul11: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh2add a1, a0, a0 +; RV64ZBA-NEXT: sh1add a0, a1, a0 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 11 ret i64 %c } @@ -963,17 +963,17 @@ ; RV64I-NEXT: mul a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: mul19: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh3add a1, a0, a0 -; RV64IB-NEXT: sh1add a0, a1, a0 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: mul19: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh3add a1, a0, a0 -; RV64IBA-NEXT: sh1add a0, a1, a0 -; RV64IBA-NEXT: ret +; RV64B-LABEL: mul19: +; RV64B: # %bb.0: +; RV64B-NEXT: sh3add a1, a0, a0 +; RV64B-NEXT: sh1add a0, a1, a0 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: mul19: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh3add a1, a0, a0 +; RV64ZBA-NEXT: sh1add a0, a1, a0 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 19 ret i64 %c } @@ -985,17 +985,17 @@ ; RV64I-NEXT: mul a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: mul13: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh1add a1, a0, a0 -; RV64IB-NEXT: sh2add a0, a1, a0 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: mul13: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh1add a1, a0, a0 -; RV64IBA-NEXT: sh2add a0, a1, a0 -; RV64IBA-NEXT: ret +; RV64B-LABEL: mul13: +; RV64B: # %bb.0: +; RV64B-NEXT: sh1add a1, a0, a0 +; RV64B-NEXT: sh2add a0, a1, a0 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: mul13: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh1add a1, a0, a0 +; RV64ZBA-NEXT: sh2add a0, a1, a0 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 13 ret i64 %c } @@ -1007,17 +1007,17 @@ ; RV64I-NEXT: mul a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: mul21: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh2add a1, a0, a0 -; RV64IB-NEXT: sh2add a0, a1, a0 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: mul21: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh2add a1, a0, a0 -; RV64IBA-NEXT: sh2add a0, a1, a0 -; RV64IBA-NEXT: ret +; RV64B-LABEL: mul21: +; RV64B: # %bb.0: +; RV64B-NEXT: sh2add a1, a0, a0 +; RV64B-NEXT: sh2add a0, a1, a0 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: mul21: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh2add a1, a0, a0 +; RV64ZBA-NEXT: sh2add a0, a1, a0 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 21 ret i64 %c } @@ -1029,17 +1029,17 @@ ; RV64I-NEXT: mul a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: mul37: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh3add a1, a0, a0 -; RV64IB-NEXT: sh2add a0, a1, a0 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: mul37: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh3add a1, a0, a0 -; RV64IBA-NEXT: sh2add a0, a1, a0 -; RV64IBA-NEXT: ret +; RV64B-LABEL: mul37: +; RV64B: # %bb.0: +; RV64B-NEXT: sh3add a1, a0, a0 +; RV64B-NEXT: sh2add a0, a1, a0 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: mul37: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh3add a1, a0, a0 +; RV64ZBA-NEXT: sh2add a0, a1, a0 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 37 ret i64 %c } @@ -1051,17 +1051,17 @@ ; RV64I-NEXT: mul a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: mul25: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh1add a1, a0, a0 -; RV64IB-NEXT: sh3add a0, a1, a0 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: mul25: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh1add a1, a0, a0 -; RV64IBA-NEXT: sh3add a0, a1, a0 -; RV64IBA-NEXT: ret +; RV64B-LABEL: mul25: +; RV64B: # %bb.0: +; RV64B-NEXT: sh1add a1, a0, a0 +; RV64B-NEXT: sh3add a0, a1, a0 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: mul25: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh1add a1, a0, a0 +; RV64ZBA-NEXT: sh3add a0, a1, a0 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 25 ret i64 %c } @@ -1073,17 +1073,17 @@ ; RV64I-NEXT: mul a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: mul41: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh2add a1, a0, a0 -; RV64IB-NEXT: sh3add a0, a1, a0 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: mul41: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh2add a1, a0, a0 -; RV64IBA-NEXT: sh3add a0, a1, a0 -; RV64IBA-NEXT: ret +; RV64B-LABEL: mul41: +; RV64B: # %bb.0: +; RV64B-NEXT: sh2add a1, a0, a0 +; RV64B-NEXT: sh3add a0, a1, a0 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: mul41: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh2add a1, a0, a0 +; RV64ZBA-NEXT: sh3add a0, a1, a0 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 41 ret i64 %c } @@ -1095,17 +1095,17 @@ ; RV64I-NEXT: mul a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: mul73: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh3add a1, a0, a0 -; RV64IB-NEXT: sh3add a0, a1, a0 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: mul73: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh3add a1, a0, a0 -; RV64IBA-NEXT: sh3add a0, a1, a0 -; RV64IBA-NEXT: ret +; RV64B-LABEL: mul73: +; RV64B: # %bb.0: +; RV64B-NEXT: sh3add a1, a0, a0 +; RV64B-NEXT: sh3add a0, a1, a0 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: mul73: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh3add a1, a0, a0 +; RV64ZBA-NEXT: sh3add a0, a1, a0 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 73 ret i64 %c } @@ -1117,17 +1117,17 @@ ; RV64I-NEXT: mul a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: mul27: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh3add a0, a0, a0 -; RV64IB-NEXT: sh1add a0, a0, a0 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: mul27: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh3add a0, a0, a0 -; RV64IBA-NEXT: sh1add a0, a0, a0 -; RV64IBA-NEXT: ret +; RV64B-LABEL: mul27: +; RV64B: # %bb.0: +; RV64B-NEXT: sh3add a0, a0, a0 +; RV64B-NEXT: sh1add a0, a0, a0 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: mul27: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh3add a0, a0, a0 +; RV64ZBA-NEXT: sh1add a0, a0, a0 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 27 ret i64 %c } @@ -1139,17 +1139,17 @@ ; RV64I-NEXT: mul a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: mul45: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh3add a0, a0, a0 -; RV64IB-NEXT: sh2add a0, a0, a0 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: mul45: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh3add a0, a0, a0 -; RV64IBA-NEXT: sh2add a0, a0, a0 -; RV64IBA-NEXT: ret +; RV64B-LABEL: mul45: +; RV64B: # %bb.0: +; RV64B-NEXT: sh3add a0, a0, a0 +; RV64B-NEXT: sh2add a0, a0, a0 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: mul45: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh3add a0, a0, a0 +; RV64ZBA-NEXT: sh2add a0, a0, a0 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 45 ret i64 %c } @@ -1161,17 +1161,17 @@ ; RV64I-NEXT: mul a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: mul81: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sh3add a0, a0, a0 -; RV64IB-NEXT: sh3add a0, a0, a0 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: mul81: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: sh3add a0, a0, a0 -; RV64IBA-NEXT: sh3add a0, a0, a0 -; RV64IBA-NEXT: ret +; RV64B-LABEL: mul81: +; RV64B: # %bb.0: +; RV64B-NEXT: sh3add a0, a0, a0 +; RV64B-NEXT: sh3add a0, a0, a0 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: mul81: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: sh3add a0, a0, a0 +; RV64ZBA-NEXT: sh3add a0, a0, a0 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 81 ret i64 %c } @@ -1184,17 +1184,17 @@ ; RV64I-NEXT: mul a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: mul4098: -; RV64IB: # %bb.0: -; RV64IB-NEXT: slli a1, a0, 12 -; RV64IB-NEXT: sh1add a0, a0, a1 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: mul4098: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: slli a1, a0, 12 -; RV64IBA-NEXT: sh1add a0, a0, a1 -; RV64IBA-NEXT: ret +; RV64B-LABEL: mul4098: +; RV64B: # %bb.0: +; RV64B-NEXT: slli a1, a0, 12 +; RV64B-NEXT: sh1add a0, a0, a1 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: mul4098: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: slli a1, a0, 12 +; RV64ZBA-NEXT: sh1add a0, a0, a1 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 4098 ret i64 %c } @@ -1207,17 +1207,17 @@ ; RV64I-NEXT: mul a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: mul4100: -; RV64IB: # %bb.0: -; RV64IB-NEXT: slli a1, a0, 12 -; RV64IB-NEXT: sh2add a0, a0, a1 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: mul4100: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: slli a1, a0, 12 -; RV64IBA-NEXT: sh2add a0, a0, a1 -; RV64IBA-NEXT: ret +; RV64B-LABEL: mul4100: +; RV64B: # %bb.0: +; RV64B-NEXT: slli a1, a0, 12 +; RV64B-NEXT: sh2add a0, a0, a1 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: mul4100: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: slli a1, a0, 12 +; RV64ZBA-NEXT: sh2add a0, a0, a1 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 4100 ret i64 %c } @@ -1230,17 +1230,17 @@ ; RV64I-NEXT: mul a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: mul4104: -; RV64IB: # %bb.0: -; RV64IB-NEXT: slli a1, a0, 12 -; RV64IB-NEXT: sh3add a0, a0, a1 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: mul4104: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: slli a1, a0, 12 -; RV64IBA-NEXT: sh3add a0, a0, a1 -; RV64IBA-NEXT: ret +; RV64B-LABEL: mul4104: +; RV64B: # %bb.0: +; RV64B-NEXT: slli a1, a0, 12 +; RV64B-NEXT: sh3add a0, a0, a1 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: mul4104: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: slli a1, a0, 12 +; RV64ZBA-NEXT: sh3add a0, a0, a1 +; RV64ZBA-NEXT: ret %c = mul i64 %a, 4104 ret i64 %c } @@ -1252,17 +1252,17 @@ ; RV64I-NEXT: mulw a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: mulw192: -; RV64IB: # %bb.0: -; RV64IB-NEXT: addi a1, zero, 192 -; RV64IB-NEXT: mulw a0, a0, a1 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: mulw192: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: addi a1, zero, 192 -; RV64IBA-NEXT: mulw a0, a0, a1 -; RV64IBA-NEXT: ret +; RV64B-LABEL: mulw192: +; RV64B: # %bb.0: +; RV64B-NEXT: addi a1, zero, 192 +; RV64B-NEXT: mulw a0, a0, a1 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: mulw192: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: addi a1, zero, 192 +; RV64ZBA-NEXT: mulw a0, a0, a1 +; RV64ZBA-NEXT: ret %c = mul i32 %a, 192 ret i32 %c } @@ -1274,17 +1274,17 @@ ; RV64I-NEXT: mulw a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: mulw320: -; RV64IB: # %bb.0: -; RV64IB-NEXT: addi a1, zero, 320 -; RV64IB-NEXT: mulw a0, a0, a1 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: mulw320: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: addi a1, zero, 320 -; RV64IBA-NEXT: mulw a0, a0, a1 -; RV64IBA-NEXT: ret +; RV64B-LABEL: mulw320: +; RV64B: # %bb.0: +; RV64B-NEXT: addi a1, zero, 320 +; RV64B-NEXT: mulw a0, a0, a1 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: mulw320: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: addi a1, zero, 320 +; RV64ZBA-NEXT: mulw a0, a0, a1 +; RV64ZBA-NEXT: ret %c = mul i32 %a, 320 ret i32 %c } @@ -1296,17 +1296,17 @@ ; RV64I-NEXT: mulw a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: mulw576: -; RV64IB: # %bb.0: -; RV64IB-NEXT: addi a1, zero, 576 -; RV64IB-NEXT: mulw a0, a0, a1 -; RV64IB-NEXT: ret -; -; RV64IBA-LABEL: mulw576: -; RV64IBA: # %bb.0: -; RV64IBA-NEXT: addi a1, zero, 576 -; RV64IBA-NEXT: mulw a0, a0, a1 -; RV64IBA-NEXT: ret +; RV64B-LABEL: mulw576: +; RV64B: # %bb.0: +; RV64B-NEXT: addi a1, zero, 576 +; RV64B-NEXT: mulw a0, a0, a1 +; RV64B-NEXT: ret +; +; RV64ZBA-LABEL: mulw576: +; RV64ZBA: # %bb.0: +; RV64ZBA-NEXT: addi a1, zero, 576 +; RV64ZBA-NEXT: mulw a0, a0, a1 +; RV64ZBA-NEXT: ret %c = mul i32 %a, 576 ret i32 %c } diff --git a/llvm/test/CodeGen/RISCV/rv64zbb-intrinsic.ll b/llvm/test/CodeGen/RISCV/rv64zbb-intrinsic.ll --- a/llvm/test/CodeGen/RISCV/rv64zbb-intrinsic.ll +++ b/llvm/test/CodeGen/RISCV/rv64zbb-intrinsic.ll @@ -1,22 +1,22 @@ ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc -mtriple=riscv64 -mattr=+experimental-b -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV64IB +; RUN: | FileCheck %s -check-prefix=RV64B ; RUN: llc -mtriple=riscv64 -mattr=+experimental-zbb -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV64IBB +; RUN: | FileCheck %s -check-prefix=RV64ZBB declare i32 @llvm.riscv.orc.b.i32(i32) define signext i32 @orcb32(i32 signext %a) nounwind { -; RV64IB-LABEL: orcb32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: gorciw a0, a0, 7 -; RV64IB-NEXT: ret +; RV64B-LABEL: orcb32: +; RV64B: # %bb.0: +; RV64B-NEXT: gorciw a0, a0, 7 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: orcb32: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: orc.b a0, a0 -; RV64IBB-NEXT: sext.w a0, a0 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: orcb32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: orc.b a0, a0 +; RV64ZBB-NEXT: sext.w a0, a0 +; RV64ZBB-NEXT: ret %tmp = call i32 @llvm.riscv.orc.b.i32(i32 %a) ret i32 %tmp } @@ -24,15 +24,15 @@ declare i64 @llvm.riscv.orc.b.i64(i64) define i64 @orcb64(i64 %a) nounwind { -; RV64IB-LABEL: orcb64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: orc.b a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: orcb64: +; RV64B: # %bb.0: +; RV64B-NEXT: orc.b a0, a0 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: orcb64: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: orc.b a0, a0 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: orcb64: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: orc.b a0, a0 +; RV64ZBB-NEXT: ret %tmp = call i64 @llvm.riscv.orc.b.i64(i64 %a) ret i64 %tmp } diff --git a/llvm/test/CodeGen/RISCV/rv64zbb-zbp.ll b/llvm/test/CodeGen/RISCV/rv64zbb-zbp.ll --- a/llvm/test/CodeGen/RISCV/rv64zbb-zbp.ll +++ b/llvm/test/CodeGen/RISCV/rv64zbb-zbp.ll @@ -2,11 +2,11 @@ ; RUN: llc -mtriple=riscv64 -verify-machineinstrs < %s \ ; RUN: | FileCheck %s -check-prefix=RV64I ; RUN: llc -mtriple=riscv64 -mattr=+experimental-b -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV64IB +; RUN: | FileCheck %s -check-prefix=RV64B ; RUN: llc -mtriple=riscv64 -mattr=+experimental-zbb -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV64IBB +; RUN: | FileCheck %s -check-prefix=RV64ZBB ; RUN: llc -mtriple=riscv64 -mattr=+experimental-zbp -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV64IBP +; RUN: | FileCheck %s -check-prefix=RV64ZBP define signext i32 @andn_i32(i32 signext %a, i32 signext %b) nounwind { ; RV64I-LABEL: andn_i32: @@ -15,20 +15,20 @@ ; RV64I-NEXT: and a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: andn_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: andn a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: andn_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: andn a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: andn_i32: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: andn a0, a0, a1 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: andn_i32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: andn a0, a0, a1 +; RV64ZBB-NEXT: ret ; -; RV64IBP-LABEL: andn_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: andn a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: andn_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: andn a0, a0, a1 +; RV64ZBP-NEXT: ret %neg = xor i32 %b, -1 %and = and i32 %neg, %a ret i32 %and @@ -41,20 +41,20 @@ ; RV64I-NEXT: and a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: andn_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: andn a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: andn_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: andn a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: andn_i64: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: andn a0, a0, a1 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: andn_i64: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: andn a0, a0, a1 +; RV64ZBB-NEXT: ret ; -; RV64IBP-LABEL: andn_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: andn a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: andn_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: andn a0, a0, a1 +; RV64ZBP-NEXT: ret %neg = xor i64 %b, -1 %and = and i64 %neg, %a ret i64 %and @@ -67,20 +67,20 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: orn_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: orn a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: orn_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: orn a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: orn_i32: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: orn a0, a0, a1 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: orn_i32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: orn a0, a0, a1 +; RV64ZBB-NEXT: ret ; -; RV64IBP-LABEL: orn_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: orn a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: orn_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: orn a0, a0, a1 +; RV64ZBP-NEXT: ret %neg = xor i32 %b, -1 %or = or i32 %neg, %a ret i32 %or @@ -93,20 +93,20 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: orn_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: orn a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: orn_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: orn a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: orn_i64: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: orn a0, a0, a1 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: orn_i64: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: orn a0, a0, a1 +; RV64ZBB-NEXT: ret ; -; RV64IBP-LABEL: orn_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: orn a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: orn_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: orn a0, a0, a1 +; RV64ZBP-NEXT: ret %neg = xor i64 %b, -1 %or = or i64 %neg, %a ret i64 %or @@ -119,20 +119,20 @@ ; RV64I-NEXT: not a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: xnor_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: xnor a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: xnor_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: xnor a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: xnor_i32: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: xnor a0, a0, a1 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: xnor_i32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: xnor a0, a0, a1 +; RV64ZBB-NEXT: ret ; -; RV64IBP-LABEL: xnor_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: xnor a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: xnor_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: xnor a0, a0, a1 +; RV64ZBP-NEXT: ret %neg = xor i32 %a, -1 %xor = xor i32 %neg, %b ret i32 %xor @@ -145,20 +145,20 @@ ; RV64I-NEXT: not a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: xnor_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: xnor a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: xnor_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: xnor a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: xnor_i64: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: xnor a0, a0, a1 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: xnor_i64: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: xnor a0, a0, a1 +; RV64ZBB-NEXT: ret ; -; RV64IBP-LABEL: xnor_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: xnor a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: xnor_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: xnor a0, a0, a1 +; RV64ZBP-NEXT: ret %neg = xor i64 %a, -1 %xor = xor i64 %neg, %b ret i64 %xor @@ -175,20 +175,20 @@ ; RV64I-NEXT: or a0, a2, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: rol_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: rolw a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: rol_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: rolw a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: rol_i32: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: rolw a0, a0, a1 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: rol_i32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: rolw a0, a0, a1 +; RV64ZBB-NEXT: ret ; -; RV64IBP-LABEL: rol_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: rolw a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: rol_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: rolw a0, a0, a1 +; RV64ZBP-NEXT: ret %1 = tail call i32 @llvm.fshl.i32(i32 %a, i32 %a, i32 %b) ret i32 %1 } @@ -204,23 +204,23 @@ ; RV64I-NEXT: sw a0, 0(a2) ; RV64I-NEXT: ret ; -; RV64IB-LABEL: rol_i32_nosext: -; RV64IB: # %bb.0: -; RV64IB-NEXT: rolw a0, a0, a1 -; RV64IB-NEXT: sw a0, 0(a2) -; RV64IB-NEXT: ret -; -; RV64IBB-LABEL: rol_i32_nosext: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: rolw a0, a0, a1 -; RV64IBB-NEXT: sw a0, 0(a2) -; RV64IBB-NEXT: ret -; -; RV64IBP-LABEL: rol_i32_nosext: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: rolw a0, a0, a1 -; RV64IBP-NEXT: sw a0, 0(a2) -; RV64IBP-NEXT: ret +; RV64B-LABEL: rol_i32_nosext: +; RV64B: # %bb.0: +; RV64B-NEXT: rolw a0, a0, a1 +; RV64B-NEXT: sw a0, 0(a2) +; RV64B-NEXT: ret +; +; RV64ZBB-LABEL: rol_i32_nosext: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: rolw a0, a0, a1 +; RV64ZBB-NEXT: sw a0, 0(a2) +; RV64ZBB-NEXT: ret +; +; RV64ZBP-LABEL: rol_i32_nosext: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: rolw a0, a0, a1 +; RV64ZBP-NEXT: sw a0, 0(a2) +; RV64ZBP-NEXT: ret %1 = tail call i32 @llvm.fshl.i32(i32 %a, i32 %a, i32 %b) store i32 %1, i32* %x ret void @@ -236,23 +236,23 @@ ; RV64I-NEXT: or a0, a2, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: rol_i32_neg_constant_rhs: -; RV64IB: # %bb.0: -; RV64IB-NEXT: addi a1, zero, -2 -; RV64IB-NEXT: rolw a0, a1, a0 -; RV64IB-NEXT: ret -; -; RV64IBB-LABEL: rol_i32_neg_constant_rhs: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: addi a1, zero, -2 -; RV64IBB-NEXT: rolw a0, a1, a0 -; RV64IBB-NEXT: ret -; -; RV64IBP-LABEL: rol_i32_neg_constant_rhs: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: addi a1, zero, -2 -; RV64IBP-NEXT: rolw a0, a1, a0 -; RV64IBP-NEXT: ret +; RV64B-LABEL: rol_i32_neg_constant_rhs: +; RV64B: # %bb.0: +; RV64B-NEXT: addi a1, zero, -2 +; RV64B-NEXT: rolw a0, a1, a0 +; RV64B-NEXT: ret +; +; RV64ZBB-LABEL: rol_i32_neg_constant_rhs: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: addi a1, zero, -2 +; RV64ZBB-NEXT: rolw a0, a1, a0 +; RV64ZBB-NEXT: ret +; +; RV64ZBP-LABEL: rol_i32_neg_constant_rhs: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: addi a1, zero, -2 +; RV64ZBP-NEXT: rolw a0, a1, a0 +; RV64ZBP-NEXT: ret %1 = tail call i32 @llvm.fshl.i32(i32 -2, i32 -2, i32 %a) ret i32 %1 } @@ -268,20 +268,20 @@ ; RV64I-NEXT: or a0, a2, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: rol_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: rol a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: rol_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: rol a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: rol_i64: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: rol a0, a0, a1 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: rol_i64: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: rol a0, a0, a1 +; RV64ZBB-NEXT: ret ; -; RV64IBP-LABEL: rol_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: rol a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: rol_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: rol a0, a0, a1 +; RV64ZBP-NEXT: ret %or = tail call i64 @llvm.fshl.i64(i64 %a, i64 %a, i64 %b) ret i64 %or } @@ -297,20 +297,20 @@ ; RV64I-NEXT: or a0, a2, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: ror_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: rorw a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: ror_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: rorw a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: ror_i32: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: rorw a0, a0, a1 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: ror_i32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: rorw a0, a0, a1 +; RV64ZBB-NEXT: ret ; -; RV64IBP-LABEL: ror_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: rorw a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: ror_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: rorw a0, a0, a1 +; RV64ZBP-NEXT: ret %1 = tail call i32 @llvm.fshr.i32(i32 %a, i32 %a, i32 %b) ret i32 %1 } @@ -326,23 +326,23 @@ ; RV64I-NEXT: sw a0, 0(a2) ; RV64I-NEXT: ret ; -; RV64IB-LABEL: ror_i32_nosext: -; RV64IB: # %bb.0: -; RV64IB-NEXT: rorw a0, a0, a1 -; RV64IB-NEXT: sw a0, 0(a2) -; RV64IB-NEXT: ret -; -; RV64IBB-LABEL: ror_i32_nosext: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: rorw a0, a0, a1 -; RV64IBB-NEXT: sw a0, 0(a2) -; RV64IBB-NEXT: ret -; -; RV64IBP-LABEL: ror_i32_nosext: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: rorw a0, a0, a1 -; RV64IBP-NEXT: sw a0, 0(a2) -; RV64IBP-NEXT: ret +; RV64B-LABEL: ror_i32_nosext: +; RV64B: # %bb.0: +; RV64B-NEXT: rorw a0, a0, a1 +; RV64B-NEXT: sw a0, 0(a2) +; RV64B-NEXT: ret +; +; RV64ZBB-LABEL: ror_i32_nosext: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: rorw a0, a0, a1 +; RV64ZBB-NEXT: sw a0, 0(a2) +; RV64ZBB-NEXT: ret +; +; RV64ZBP-LABEL: ror_i32_nosext: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: rorw a0, a0, a1 +; RV64ZBP-NEXT: sw a0, 0(a2) +; RV64ZBP-NEXT: ret %1 = tail call i32 @llvm.fshr.i32(i32 %a, i32 %a, i32 %b) store i32 %1, i32* %x ret void @@ -358,23 +358,23 @@ ; RV64I-NEXT: or a0, a2, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: ror_i32_neg_constant_rhs: -; RV64IB: # %bb.0: -; RV64IB-NEXT: addi a1, zero, -2 -; RV64IB-NEXT: rorw a0, a1, a0 -; RV64IB-NEXT: ret -; -; RV64IBB-LABEL: ror_i32_neg_constant_rhs: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: addi a1, zero, -2 -; RV64IBB-NEXT: rorw a0, a1, a0 -; RV64IBB-NEXT: ret -; -; RV64IBP-LABEL: ror_i32_neg_constant_rhs: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: addi a1, zero, -2 -; RV64IBP-NEXT: rorw a0, a1, a0 -; RV64IBP-NEXT: ret +; RV64B-LABEL: ror_i32_neg_constant_rhs: +; RV64B: # %bb.0: +; RV64B-NEXT: addi a1, zero, -2 +; RV64B-NEXT: rorw a0, a1, a0 +; RV64B-NEXT: ret +; +; RV64ZBB-LABEL: ror_i32_neg_constant_rhs: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: addi a1, zero, -2 +; RV64ZBB-NEXT: rorw a0, a1, a0 +; RV64ZBB-NEXT: ret +; +; RV64ZBP-LABEL: ror_i32_neg_constant_rhs: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: addi a1, zero, -2 +; RV64ZBP-NEXT: rorw a0, a1, a0 +; RV64ZBP-NEXT: ret %1 = tail call i32 @llvm.fshr.i32(i32 -2, i32 -2, i32 %a) ret i32 %1 } @@ -390,20 +390,20 @@ ; RV64I-NEXT: or a0, a2, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: ror_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: ror a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: ror_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: ror a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: ror_i64: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: ror a0, a0, a1 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: ror_i64: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: ror a0, a0, a1 +; RV64ZBB-NEXT: ret ; -; RV64IBP-LABEL: ror_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: ror a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: ror_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: ror a0, a0, a1 +; RV64ZBP-NEXT: ret %or = tail call i64 @llvm.fshr.i64(i64 %a, i64 %a, i64 %b) ret i64 %or } @@ -417,20 +417,20 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: rori_i32_fshl: -; RV64IB: # %bb.0: -; RV64IB-NEXT: roriw a0, a0, 1 -; RV64IB-NEXT: ret +; RV64B-LABEL: rori_i32_fshl: +; RV64B: # %bb.0: +; RV64B-NEXT: roriw a0, a0, 1 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: rori_i32_fshl: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: roriw a0, a0, 1 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: rori_i32_fshl: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: roriw a0, a0, 1 +; RV64ZBB-NEXT: ret ; -; RV64IBP-LABEL: rori_i32_fshl: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: roriw a0, a0, 1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: rori_i32_fshl: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: roriw a0, a0, 1 +; RV64ZBP-NEXT: ret %1 = tail call i32 @llvm.fshl.i32(i32 %a, i32 %a, i32 31) ret i32 %1 } @@ -445,23 +445,23 @@ ; RV64I-NEXT: sw a0, 0(a1) ; RV64I-NEXT: ret ; -; RV64IB-LABEL: rori_i32_fshl_nosext: -; RV64IB: # %bb.0: -; RV64IB-NEXT: roriw a0, a0, 1 -; RV64IB-NEXT: sw a0, 0(a1) -; RV64IB-NEXT: ret -; -; RV64IBB-LABEL: rori_i32_fshl_nosext: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: roriw a0, a0, 1 -; RV64IBB-NEXT: sw a0, 0(a1) -; RV64IBB-NEXT: ret -; -; RV64IBP-LABEL: rori_i32_fshl_nosext: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: roriw a0, a0, 1 -; RV64IBP-NEXT: sw a0, 0(a1) -; RV64IBP-NEXT: ret +; RV64B-LABEL: rori_i32_fshl_nosext: +; RV64B: # %bb.0: +; RV64B-NEXT: roriw a0, a0, 1 +; RV64B-NEXT: sw a0, 0(a1) +; RV64B-NEXT: ret +; +; RV64ZBB-LABEL: rori_i32_fshl_nosext: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: roriw a0, a0, 1 +; RV64ZBB-NEXT: sw a0, 0(a1) +; RV64ZBB-NEXT: ret +; +; RV64ZBP-LABEL: rori_i32_fshl_nosext: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: roriw a0, a0, 1 +; RV64ZBP-NEXT: sw a0, 0(a1) +; RV64ZBP-NEXT: ret %1 = tail call i32 @llvm.fshl.i32(i32 %a, i32 %a, i32 31) store i32 %1, i32* %x ret void @@ -476,20 +476,20 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: rori_i32_fshr: -; RV64IB: # %bb.0: -; RV64IB-NEXT: roriw a0, a0, 31 -; RV64IB-NEXT: ret +; RV64B-LABEL: rori_i32_fshr: +; RV64B: # %bb.0: +; RV64B-NEXT: roriw a0, a0, 31 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: rori_i32_fshr: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: roriw a0, a0, 31 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: rori_i32_fshr: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: roriw a0, a0, 31 +; RV64ZBB-NEXT: ret ; -; RV64IBP-LABEL: rori_i32_fshr: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: roriw a0, a0, 31 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: rori_i32_fshr: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: roriw a0, a0, 31 +; RV64ZBP-NEXT: ret %1 = tail call i32 @llvm.fshr.i32(i32 %a, i32 %a, i32 31) ret i32 %1 } @@ -504,23 +504,23 @@ ; RV64I-NEXT: sw a0, 0(a1) ; RV64I-NEXT: ret ; -; RV64IB-LABEL: rori_i32_fshr_nosext: -; RV64IB: # %bb.0: -; RV64IB-NEXT: roriw a0, a0, 31 -; RV64IB-NEXT: sw a0, 0(a1) -; RV64IB-NEXT: ret -; -; RV64IBB-LABEL: rori_i32_fshr_nosext: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: roriw a0, a0, 31 -; RV64IBB-NEXT: sw a0, 0(a1) -; RV64IBB-NEXT: ret -; -; RV64IBP-LABEL: rori_i32_fshr_nosext: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: roriw a0, a0, 31 -; RV64IBP-NEXT: sw a0, 0(a1) -; RV64IBP-NEXT: ret +; RV64B-LABEL: rori_i32_fshr_nosext: +; RV64B: # %bb.0: +; RV64B-NEXT: roriw a0, a0, 31 +; RV64B-NEXT: sw a0, 0(a1) +; RV64B-NEXT: ret +; +; RV64ZBB-LABEL: rori_i32_fshr_nosext: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: roriw a0, a0, 31 +; RV64ZBB-NEXT: sw a0, 0(a1) +; RV64ZBB-NEXT: ret +; +; RV64ZBP-LABEL: rori_i32_fshr_nosext: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: roriw a0, a0, 31 +; RV64ZBP-NEXT: sw a0, 0(a1) +; RV64ZBP-NEXT: ret %1 = tail call i32 @llvm.fshr.i32(i32 %a, i32 %a, i32 31) store i32 %1, i32* %x ret void @@ -538,29 +538,29 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: not_rori_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: slli a0, a0, 31 -; RV64IB-NEXT: srliw a1, a1, 1 -; RV64IB-NEXT: or a0, a0, a1 -; RV64IB-NEXT: sext.w a0, a0 -; RV64IB-NEXT: ret -; -; RV64IBB-LABEL: not_rori_i32: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: slli a0, a0, 31 -; RV64IBB-NEXT: srliw a1, a1, 1 -; RV64IBB-NEXT: or a0, a0, a1 -; RV64IBB-NEXT: sext.w a0, a0 -; RV64IBB-NEXT: ret -; -; RV64IBP-LABEL: not_rori_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: slli a0, a0, 31 -; RV64IBP-NEXT: srliw a1, a1, 1 -; RV64IBP-NEXT: or a0, a0, a1 -; RV64IBP-NEXT: sext.w a0, a0 -; RV64IBP-NEXT: ret +; RV64B-LABEL: not_rori_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: slli a0, a0, 31 +; RV64B-NEXT: srliw a1, a1, 1 +; RV64B-NEXT: or a0, a0, a1 +; RV64B-NEXT: sext.w a0, a0 +; RV64B-NEXT: ret +; +; RV64ZBB-LABEL: not_rori_i32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: slli a0, a0, 31 +; RV64ZBB-NEXT: srliw a1, a1, 1 +; RV64ZBB-NEXT: or a0, a0, a1 +; RV64ZBB-NEXT: sext.w a0, a0 +; RV64ZBB-NEXT: ret +; +; RV64ZBP-LABEL: not_rori_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: slli a0, a0, 31 +; RV64ZBP-NEXT: srliw a1, a1, 1 +; RV64ZBP-NEXT: or a0, a0, a1 +; RV64ZBP-NEXT: sext.w a0, a0 +; RV64ZBP-NEXT: ret %a = shl i32 %x, 31 %b = lshr i32 %y, 1 %c = or i32 %a, %b @@ -581,35 +581,35 @@ ; RV64I-NEXT: xor a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: roriw_bug: -; RV64IB: # %bb.0: -; RV64IB-NEXT: slli a1, a0, 31 -; RV64IB-NEXT: andi a0, a0, -2 -; RV64IB-NEXT: srli a2, a0, 1 -; RV64IB-NEXT: or a1, a1, a2 -; RV64IB-NEXT: sext.w a1, a1 -; RV64IB-NEXT: xor a0, a0, a1 -; RV64IB-NEXT: ret -; -; RV64IBB-LABEL: roriw_bug: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: slli a1, a0, 31 -; RV64IBB-NEXT: andi a0, a0, -2 -; RV64IBB-NEXT: srli a2, a0, 1 -; RV64IBB-NEXT: or a1, a1, a2 -; RV64IBB-NEXT: sext.w a1, a1 -; RV64IBB-NEXT: xor a0, a0, a1 -; RV64IBB-NEXT: ret -; -; RV64IBP-LABEL: roriw_bug: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: slli a1, a0, 31 -; RV64IBP-NEXT: andi a0, a0, -2 -; RV64IBP-NEXT: srli a2, a0, 1 -; RV64IBP-NEXT: or a1, a1, a2 -; RV64IBP-NEXT: sext.w a1, a1 -; RV64IBP-NEXT: xor a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64B-LABEL: roriw_bug: +; RV64B: # %bb.0: +; RV64B-NEXT: slli a1, a0, 31 +; RV64B-NEXT: andi a0, a0, -2 +; RV64B-NEXT: srli a2, a0, 1 +; RV64B-NEXT: or a1, a1, a2 +; RV64B-NEXT: sext.w a1, a1 +; RV64B-NEXT: xor a0, a0, a1 +; RV64B-NEXT: ret +; +; RV64ZBB-LABEL: roriw_bug: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: slli a1, a0, 31 +; RV64ZBB-NEXT: andi a0, a0, -2 +; RV64ZBB-NEXT: srli a2, a0, 1 +; RV64ZBB-NEXT: or a1, a1, a2 +; RV64ZBB-NEXT: sext.w a1, a1 +; RV64ZBB-NEXT: xor a0, a0, a1 +; RV64ZBB-NEXT: ret +; +; RV64ZBP-LABEL: roriw_bug: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: slli a1, a0, 31 +; RV64ZBP-NEXT: andi a0, a0, -2 +; RV64ZBP-NEXT: srli a2, a0, 1 +; RV64ZBP-NEXT: or a1, a1, a2 +; RV64ZBP-NEXT: sext.w a1, a1 +; RV64ZBP-NEXT: xor a0, a0, a1 +; RV64ZBP-NEXT: ret %a = shl i64 %x, 31 %b = and i64 %x, 18446744073709551614 %c = lshr i64 %b, 1 @@ -628,20 +628,20 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: rori_i64_fshl: -; RV64IB: # %bb.0: -; RV64IB-NEXT: rori a0, a0, 1 -; RV64IB-NEXT: ret +; RV64B-LABEL: rori_i64_fshl: +; RV64B: # %bb.0: +; RV64B-NEXT: rori a0, a0, 1 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: rori_i64_fshl: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: rori a0, a0, 1 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: rori_i64_fshl: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: rori a0, a0, 1 +; RV64ZBB-NEXT: ret ; -; RV64IBP-LABEL: rori_i64_fshl: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: rori a0, a0, 1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: rori_i64_fshl: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: rori a0, a0, 1 +; RV64ZBP-NEXT: ret %1 = tail call i64 @llvm.fshl.i64(i64 %a, i64 %a, i64 63) ret i64 %1 } @@ -654,20 +654,20 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: rori_i64_fshr: -; RV64IB: # %bb.0: -; RV64IB-NEXT: rori a0, a0, 63 -; RV64IB-NEXT: ret +; RV64B-LABEL: rori_i64_fshr: +; RV64B: # %bb.0: +; RV64B-NEXT: rori a0, a0, 63 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: rori_i64_fshr: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: rori a0, a0, 63 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: rori_i64_fshr: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: rori a0, a0, 63 +; RV64ZBB-NEXT: ret ; -; RV64IBP-LABEL: rori_i64_fshr: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: rori a0, a0, 63 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: rori_i64_fshr: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: rori a0, a0, 63 +; RV64ZBP-NEXT: ret %1 = tail call i64 @llvm.fshr.i64(i64 %a, i64 %a, i64 63) ret i64 %1 } @@ -679,23 +679,23 @@ ; RV64I-NEXT: srli a0, a0, 6 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: srli_i8: -; RV64IB: # %bb.0: -; RV64IB-NEXT: andi a0, a0, 192 -; RV64IB-NEXT: srli a0, a0, 6 -; RV64IB-NEXT: ret -; -; RV64IBB-LABEL: srli_i8: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: andi a0, a0, 192 -; RV64IBB-NEXT: srli a0, a0, 6 -; RV64IBB-NEXT: ret -; -; RV64IBP-LABEL: srli_i8: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: andi a0, a0, 192 -; RV64IBP-NEXT: srli a0, a0, 6 -; RV64IBP-NEXT: ret +; RV64B-LABEL: srli_i8: +; RV64B: # %bb.0: +; RV64B-NEXT: andi a0, a0, 192 +; RV64B-NEXT: srli a0, a0, 6 +; RV64B-NEXT: ret +; +; RV64ZBB-LABEL: srli_i8: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: andi a0, a0, 192 +; RV64ZBB-NEXT: srli a0, a0, 6 +; RV64ZBB-NEXT: ret +; +; RV64ZBP-LABEL: srli_i8: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: andi a0, a0, 192 +; RV64ZBP-NEXT: srli a0, a0, 6 +; RV64ZBP-NEXT: ret %1 = lshr i8 %a, 6 ret i8 %1 } @@ -707,23 +707,23 @@ ; RV64I-NEXT: srai a0, a0, 61 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: srai_i8: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sext.b a0, a0 -; RV64IB-NEXT: srai a0, a0, 5 -; RV64IB-NEXT: ret -; -; RV64IBB-LABEL: srai_i8: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: sext.b a0, a0 -; RV64IBB-NEXT: srai a0, a0, 5 -; RV64IBB-NEXT: ret -; -; RV64IBP-LABEL: srai_i8: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: slli a0, a0, 56 -; RV64IBP-NEXT: srai a0, a0, 61 -; RV64IBP-NEXT: ret +; RV64B-LABEL: srai_i8: +; RV64B: # %bb.0: +; RV64B-NEXT: sext.b a0, a0 +; RV64B-NEXT: srai a0, a0, 5 +; RV64B-NEXT: ret +; +; RV64ZBB-LABEL: srai_i8: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: sext.b a0, a0 +; RV64ZBB-NEXT: srai a0, a0, 5 +; RV64ZBB-NEXT: ret +; +; RV64ZBP-LABEL: srai_i8: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: slli a0, a0, 56 +; RV64ZBP-NEXT: srai a0, a0, 61 +; RV64ZBP-NEXT: ret %1 = ashr i8 %a, 5 ret i8 %1 } @@ -735,23 +735,23 @@ ; RV64I-NEXT: srli a0, a0, 54 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: srli_i16: -; RV64IB: # %bb.0: -; RV64IB-NEXT: zext.h a0, a0 -; RV64IB-NEXT: srli a0, a0, 6 -; RV64IB-NEXT: ret -; -; RV64IBB-LABEL: srli_i16: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: zext.h a0, a0 -; RV64IBB-NEXT: srli a0, a0, 6 -; RV64IBB-NEXT: ret -; -; RV64IBP-LABEL: srli_i16: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: zext.h a0, a0 -; RV64IBP-NEXT: srli a0, a0, 6 -; RV64IBP-NEXT: ret +; RV64B-LABEL: srli_i16: +; RV64B: # %bb.0: +; RV64B-NEXT: zext.h a0, a0 +; RV64B-NEXT: srli a0, a0, 6 +; RV64B-NEXT: ret +; +; RV64ZBB-LABEL: srli_i16: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: zext.h a0, a0 +; RV64ZBB-NEXT: srli a0, a0, 6 +; RV64ZBB-NEXT: ret +; +; RV64ZBP-LABEL: srli_i16: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: zext.h a0, a0 +; RV64ZBP-NEXT: srli a0, a0, 6 +; RV64ZBP-NEXT: ret %1 = lshr i16 %a, 6 ret i16 %1 } @@ -763,23 +763,23 @@ ; RV64I-NEXT: srai a0, a0, 57 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: srai_i16: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sext.h a0, a0 -; RV64IB-NEXT: srai a0, a0, 9 -; RV64IB-NEXT: ret -; -; RV64IBB-LABEL: srai_i16: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: sext.h a0, a0 -; RV64IBB-NEXT: srai a0, a0, 9 -; RV64IBB-NEXT: ret -; -; RV64IBP-LABEL: srai_i16: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: slli a0, a0, 48 -; RV64IBP-NEXT: srai a0, a0, 57 -; RV64IBP-NEXT: ret +; RV64B-LABEL: srai_i16: +; RV64B: # %bb.0: +; RV64B-NEXT: sext.h a0, a0 +; RV64B-NEXT: srai a0, a0, 9 +; RV64B-NEXT: ret +; +; RV64ZBB-LABEL: srai_i16: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: sext.h a0, a0 +; RV64ZBB-NEXT: srai a0, a0, 9 +; RV64ZBB-NEXT: ret +; +; RV64ZBP-LABEL: srai_i16: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: slli a0, a0, 48 +; RV64ZBP-NEXT: srai a0, a0, 57 +; RV64ZBP-NEXT: ret %1 = ashr i16 %a, 9 ret i16 %1 } diff --git a/llvm/test/CodeGen/RISCV/rv64zbb.ll b/llvm/test/CodeGen/RISCV/rv64zbb.ll --- a/llvm/test/CodeGen/RISCV/rv64zbb.ll +++ b/llvm/test/CodeGen/RISCV/rv64zbb.ll @@ -2,9 +2,9 @@ ; RUN: llc -mtriple=riscv64 -verify-machineinstrs < %s \ ; RUN: | FileCheck %s -check-prefix=RV64I ; RUN: llc -mtriple=riscv64 -mattr=+experimental-b -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV64IB +; RUN: | FileCheck %s -check-prefix=RV64B ; RUN: llc -mtriple=riscv64 -mattr=+experimental-zbb -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV64IBB +; RUN: | FileCheck %s -check-prefix=RV64ZBB declare i32 @llvm.ctlz.i32(i32, i1) @@ -81,15 +81,15 @@ ; RV64I-NEXT: addi sp, sp, 16 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: ctlz_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: clzw a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: ctlz_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: clzw a0, a0 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: ctlz_i32: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: clzw a0, a0 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: ctlz_i32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: clzw a0, a0 +; RV64ZBB-NEXT: ret %1 = call i32 @llvm.ctlz.i32(i32 %a, i1 false) ret i32 %1 } @@ -169,19 +169,19 @@ ; RV64I-NEXT: addi sp, sp, 16 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: log2_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: clzw a0, a0 -; RV64IB-NEXT: addi a1, zero, 31 -; RV64IB-NEXT: sub a0, a1, a0 -; RV64IB-NEXT: ret -; -; RV64IBB-LABEL: log2_i32: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: clzw a0, a0 -; RV64IBB-NEXT: addi a1, zero, 31 -; RV64IBB-NEXT: sub a0, a1, a0 -; RV64IBB-NEXT: ret +; RV64B-LABEL: log2_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: clzw a0, a0 +; RV64B-NEXT: addi a1, zero, 31 +; RV64B-NEXT: sub a0, a1, a0 +; RV64B-NEXT: ret +; +; RV64ZBB-LABEL: log2_i32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: clzw a0, a0 +; RV64ZBB-NEXT: addi a1, zero, 31 +; RV64ZBB-NEXT: sub a0, a1, a0 +; RV64ZBB-NEXT: ret %1 = call i32 @llvm.ctlz.i32(i32 %a, i1 false) %2 = sub i32 31, %1 ret i32 %2 @@ -263,21 +263,21 @@ ; RV64I-NEXT: addi sp, sp, 16 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: log2_ceil_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: addi a0, a0, -1 -; RV64IB-NEXT: clzw a0, a0 -; RV64IB-NEXT: addi a1, zero, 32 -; RV64IB-NEXT: sub a0, a1, a0 -; RV64IB-NEXT: ret -; -; RV64IBB-LABEL: log2_ceil_i32: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: addi a0, a0, -1 -; RV64IBB-NEXT: clzw a0, a0 -; RV64IBB-NEXT: addi a1, zero, 32 -; RV64IBB-NEXT: sub a0, a1, a0 -; RV64IBB-NEXT: ret +; RV64B-LABEL: log2_ceil_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: addi a0, a0, -1 +; RV64B-NEXT: clzw a0, a0 +; RV64B-NEXT: addi a1, zero, 32 +; RV64B-NEXT: sub a0, a1, a0 +; RV64B-NEXT: ret +; +; RV64ZBB-LABEL: log2_ceil_i32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: addi a0, a0, -1 +; RV64ZBB-NEXT: clzw a0, a0 +; RV64ZBB-NEXT: addi a1, zero, 32 +; RV64ZBB-NEXT: sub a0, a1, a0 +; RV64ZBB-NEXT: ret %1 = sub i32 %a, 1 %2 = call i32 @llvm.ctlz.i32(i32 %1, i1 false) %3 = sub i32 32, %2 @@ -360,24 +360,24 @@ ; RV64I-NEXT: addi sp, sp, 16 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: findLastSet_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: clzw a1, a0 -; RV64IB-NEXT: xori a1, a1, 31 -; RV64IB-NEXT: addi a2, zero, -1 -; RV64IB-NEXT: cmov a0, a0, a1, a2 -; RV64IB-NEXT: ret -; -; RV64IBB-LABEL: findLastSet_i32: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: mv a1, a0 -; RV64IBB-NEXT: addi a0, zero, -1 -; RV64IBB-NEXT: beqz a1, .LBB3_2 -; RV64IBB-NEXT: # %bb.1: -; RV64IBB-NEXT: clzw a0, a1 -; RV64IBB-NEXT: xori a0, a0, 31 -; RV64IBB-NEXT: .LBB3_2: -; RV64IBB-NEXT: ret +; RV64B-LABEL: findLastSet_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: clzw a1, a0 +; RV64B-NEXT: xori a1, a1, 31 +; RV64B-NEXT: addi a2, zero, -1 +; RV64B-NEXT: cmov a0, a0, a1, a2 +; RV64B-NEXT: ret +; +; RV64ZBB-LABEL: findLastSet_i32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: mv a1, a0 +; RV64ZBB-NEXT: addi a0, zero, -1 +; RV64ZBB-NEXT: beqz a1, .LBB3_2 +; RV64ZBB-NEXT: # %bb.1: +; RV64ZBB-NEXT: clzw a0, a1 +; RV64ZBB-NEXT: xori a0, a0, 31 +; RV64ZBB-NEXT: .LBB3_2: +; RV64ZBB-NEXT: ret %1 = call i32 @llvm.ctlz.i32(i32 %a, i1 true) %2 = xor i32 31, %1 %3 = icmp eq i32 %a, 0 @@ -459,17 +459,17 @@ ; RV64I-NEXT: addi sp, sp, 16 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: ctlz_lshr_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: srliw a0, a0, 1 -; RV64IB-NEXT: clzw a0, a0 -; RV64IB-NEXT: ret -; -; RV64IBB-LABEL: ctlz_lshr_i32: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: srliw a0, a0, 1 -; RV64IBB-NEXT: clzw a0, a0 -; RV64IBB-NEXT: ret +; RV64B-LABEL: ctlz_lshr_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: srliw a0, a0, 1 +; RV64B-NEXT: clzw a0, a0 +; RV64B-NEXT: ret +; +; RV64ZBB-LABEL: ctlz_lshr_i32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: srliw a0, a0, 1 +; RV64ZBB-NEXT: clzw a0, a0 +; RV64ZBB-NEXT: ret %1 = lshr i32 %a, 1 %2 = call i32 @llvm.ctlz.i32(i32 %1, i1 false) ret i32 %2 @@ -547,15 +547,15 @@ ; RV64I-NEXT: addi sp, sp, 16 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: ctlz_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: clz a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: ctlz_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: clz a0, a0 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: ctlz_i64: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: clz a0, a0 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: ctlz_i64: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: clz a0, a0 +; RV64ZBB-NEXT: ret %1 = call i64 @llvm.ctlz.i64(i64 %a, i1 false) ret i64 %1 } @@ -622,15 +622,15 @@ ; RV64I-NEXT: addi sp, sp, 16 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: cttz_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: ctzw a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: cttz_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: ctzw a0, a0 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: cttz_i32: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: ctzw a0, a0 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: cttz_i32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: ctzw a0, a0 +; RV64ZBB-NEXT: ret %1 = call i32 @llvm.cttz.i32(i32 %a, i1 false) ret i32 %1 } @@ -689,15 +689,15 @@ ; RV64I-NEXT: addi sp, sp, 16 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: cttz_zero_undef_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: ctzw a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: cttz_zero_undef_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: ctzw a0, a0 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: cttz_zero_undef_i32: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: ctzw a0, a0 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: cttz_zero_undef_i32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: ctzw a0, a0 +; RV64ZBB-NEXT: ret %1 = call i32 @llvm.cttz.i32(i32 %a, i1 true) ret i32 %1 } @@ -764,22 +764,22 @@ ; RV64I-NEXT: addi sp, sp, 16 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: findFirstSet_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: ctzw a1, a0 -; RV64IB-NEXT: addi a2, zero, -1 -; RV64IB-NEXT: cmov a0, a0, a1, a2 -; RV64IB-NEXT: ret -; -; RV64IBB-LABEL: findFirstSet_i32: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: mv a1, a0 -; RV64IBB-NEXT: addi a0, zero, -1 -; RV64IBB-NEXT: beqz a1, .LBB8_2 -; RV64IBB-NEXT: # %bb.1: -; RV64IBB-NEXT: ctzw a0, a1 -; RV64IBB-NEXT: .LBB8_2: -; RV64IBB-NEXT: ret +; RV64B-LABEL: findFirstSet_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: ctzw a1, a0 +; RV64B-NEXT: addi a2, zero, -1 +; RV64B-NEXT: cmov a0, a0, a1, a2 +; RV64B-NEXT: ret +; +; RV64ZBB-LABEL: findFirstSet_i32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: mv a1, a0 +; RV64ZBB-NEXT: addi a0, zero, -1 +; RV64ZBB-NEXT: beqz a1, .LBB8_2 +; RV64ZBB-NEXT: # %bb.1: +; RV64ZBB-NEXT: ctzw a0, a1 +; RV64ZBB-NEXT: .LBB8_2: +; RV64ZBB-NEXT: ret %1 = call i32 @llvm.cttz.i32(i32 %a, i1 true) %2 = icmp eq i32 %a, 0 %3 = select i1 %2, i32 -1, i32 %1 @@ -849,23 +849,23 @@ ; RV64I-NEXT: addi sp, sp, 16 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: ffs_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: ctzw a1, a0 -; RV64IB-NEXT: addi a1, a1, 1 -; RV64IB-NEXT: cmov a0, a0, a1, zero -; RV64IB-NEXT: ret -; -; RV64IBB-LABEL: ffs_i32: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: mv a1, a0 -; RV64IBB-NEXT: mv a0, zero -; RV64IBB-NEXT: beqz a1, .LBB9_2 -; RV64IBB-NEXT: # %bb.1: -; RV64IBB-NEXT: ctzw a0, a1 -; RV64IBB-NEXT: addi a0, a0, 1 -; RV64IBB-NEXT: .LBB9_2: -; RV64IBB-NEXT: ret +; RV64B-LABEL: ffs_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: ctzw a1, a0 +; RV64B-NEXT: addi a1, a1, 1 +; RV64B-NEXT: cmov a0, a0, a1, zero +; RV64B-NEXT: ret +; +; RV64ZBB-LABEL: ffs_i32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: mv a1, a0 +; RV64ZBB-NEXT: mv a0, zero +; RV64ZBB-NEXT: beqz a1, .LBB9_2 +; RV64ZBB-NEXT: # %bb.1: +; RV64ZBB-NEXT: ctzw a0, a1 +; RV64ZBB-NEXT: addi a0, a0, 1 +; RV64ZBB-NEXT: .LBB9_2: +; RV64ZBB-NEXT: ret %1 = call i32 @llvm.cttz.i32(i32 %a, i1 true) %2 = add i32 %1, 1 %3 = icmp eq i32 %a, 0 @@ -935,15 +935,15 @@ ; RV64I-NEXT: addi sp, sp, 16 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: cttz_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: ctz a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: cttz_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: ctz a0, a0 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: cttz_i64: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: ctz a0, a0 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: cttz_i64: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: ctz a0, a0 +; RV64ZBB-NEXT: ret %1 = call i64 @llvm.cttz.i64(i64 %a, i1 false) ret i64 %1 } @@ -997,15 +997,15 @@ ; RV64I-NEXT: addi sp, sp, 16 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: ctpop_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: cpopw a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: ctpop_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: cpopw a0, a0 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: ctpop_i32: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: cpopw a0, a0 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: ctpop_i32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: cpopw a0, a0 +; RV64ZBB-NEXT: ret %1 = call i32 @llvm.ctpop.i32(i32 %a) ret i32 %1 } @@ -1056,17 +1056,17 @@ ; RV64I-NEXT: addi sp, sp, 16 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: ctpop_i32_load: -; RV64IB: # %bb.0: -; RV64IB-NEXT: lwu a0, 0(a0) -; RV64IB-NEXT: cpopw a0, a0 -; RV64IB-NEXT: ret -; -; RV64IBB-LABEL: ctpop_i32_load: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: lwu a0, 0(a0) -; RV64IBB-NEXT: cpopw a0, a0 -; RV64IBB-NEXT: ret +; RV64B-LABEL: ctpop_i32_load: +; RV64B: # %bb.0: +; RV64B-NEXT: lwu a0, 0(a0) +; RV64B-NEXT: cpopw a0, a0 +; RV64B-NEXT: ret +; +; RV64ZBB-LABEL: ctpop_i32_load: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: lwu a0, 0(a0) +; RV64ZBB-NEXT: cpopw a0, a0 +; RV64ZBB-NEXT: ret %a = load i32, i32* %p %1 = call i32 @llvm.ctpop.i32(i32 %a) ret i32 %1 @@ -1125,15 +1125,15 @@ ; RV64I-NEXT: addi sp, sp, 16 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: ctpop_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: cpop a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: ctpop_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: cpop a0, a0 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: ctpop_i64: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: cpop a0, a0 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: ctpop_i64: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: cpop a0, a0 +; RV64ZBB-NEXT: ret %1 = call i64 @llvm.ctpop.i64(i64 %a) ret i64 %1 } @@ -1145,15 +1145,15 @@ ; RV64I-NEXT: srai a0, a0, 56 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sextb_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sext.b a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: sextb_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: sext.b a0, a0 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: sextb_i32: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: sext.b a0, a0 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: sextb_i32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: sext.b a0, a0 +; RV64ZBB-NEXT: ret %shl = shl i32 %a, 24 %shr = ashr exact i32 %shl, 24 ret i32 %shr @@ -1166,15 +1166,15 @@ ; RV64I-NEXT: srai a0, a0, 56 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sextb_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sext.b a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: sextb_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: sext.b a0, a0 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: sextb_i64: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: sext.b a0, a0 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: sextb_i64: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: sext.b a0, a0 +; RV64ZBB-NEXT: ret %shl = shl i64 %a, 56 %shr = ashr exact i64 %shl, 56 ret i64 %shr @@ -1187,15 +1187,15 @@ ; RV64I-NEXT: srai a0, a0, 48 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sexth_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sext.h a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: sexth_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: sext.h a0, a0 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: sexth_i32: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: sext.h a0, a0 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: sexth_i32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: sext.h a0, a0 +; RV64ZBB-NEXT: ret %shl = shl i32 %a, 16 %shr = ashr exact i32 %shl, 16 ret i32 %shr @@ -1208,15 +1208,15 @@ ; RV64I-NEXT: srai a0, a0, 48 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sexth_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sext.h a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: sexth_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: sext.h a0, a0 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: sexth_i64: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: sext.h a0, a0 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: sexth_i64: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: sext.h a0, a0 +; RV64ZBB-NEXT: ret %shl = shl i64 %a, 48 %shr = ashr exact i64 %shl, 48 ret i64 %shr @@ -1231,15 +1231,15 @@ ; RV64I-NEXT: .LBB18_2: ; RV64I-NEXT: ret ; -; RV64IB-LABEL: min_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: min a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: min_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: min a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: min_i32: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: min a0, a0, a1 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: min_i32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: min a0, a0, a1 +; RV64ZBB-NEXT: ret %cmp = icmp slt i32 %a, %b %cond = select i1 %cmp, i32 %a, i32 %b ret i32 %cond @@ -1254,15 +1254,15 @@ ; RV64I-NEXT: .LBB19_2: ; RV64I-NEXT: ret ; -; RV64IB-LABEL: min_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: min a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: min_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: min a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: min_i64: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: min a0, a0, a1 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: min_i64: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: min a0, a0, a1 +; RV64ZBB-NEXT: ret %cmp = icmp slt i64 %a, %b %cond = select i1 %cmp, i64 %a, i64 %b ret i64 %cond @@ -1277,15 +1277,15 @@ ; RV64I-NEXT: .LBB20_2: ; RV64I-NEXT: ret ; -; RV64IB-LABEL: max_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: max a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: max_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: max a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: max_i32: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: max a0, a0, a1 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: max_i32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: max a0, a0, a1 +; RV64ZBB-NEXT: ret %cmp = icmp sgt i32 %a, %b %cond = select i1 %cmp, i32 %a, i32 %b ret i32 %cond @@ -1300,15 +1300,15 @@ ; RV64I-NEXT: .LBB21_2: ; RV64I-NEXT: ret ; -; RV64IB-LABEL: max_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: max a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: max_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: max a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: max_i64: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: max a0, a0, a1 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: max_i64: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: max a0, a0, a1 +; RV64ZBB-NEXT: ret %cmp = icmp sgt i64 %a, %b %cond = select i1 %cmp, i64 %a, i64 %b ret i64 %cond @@ -1323,15 +1323,15 @@ ; RV64I-NEXT: .LBB22_2: ; RV64I-NEXT: ret ; -; RV64IB-LABEL: minu_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: minu a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: minu_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: minu a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: minu_i32: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: minu a0, a0, a1 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: minu_i32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: minu a0, a0, a1 +; RV64ZBB-NEXT: ret %cmp = icmp ult i32 %a, %b %cond = select i1 %cmp, i32 %a, i32 %b ret i32 %cond @@ -1346,15 +1346,15 @@ ; RV64I-NEXT: .LBB23_2: ; RV64I-NEXT: ret ; -; RV64IB-LABEL: minu_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: minu a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: minu_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: minu a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: minu_i64: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: minu a0, a0, a1 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: minu_i64: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: minu a0, a0, a1 +; RV64ZBB-NEXT: ret %cmp = icmp ult i64 %a, %b %cond = select i1 %cmp, i64 %a, i64 %b ret i64 %cond @@ -1369,15 +1369,15 @@ ; RV64I-NEXT: .LBB24_2: ; RV64I-NEXT: ret ; -; RV64IB-LABEL: maxu_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: maxu a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: maxu_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: maxu a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: maxu_i32: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: maxu a0, a0, a1 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: maxu_i32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: maxu a0, a0, a1 +; RV64ZBB-NEXT: ret %cmp = icmp ugt i32 %a, %b %cond = select i1 %cmp, i32 %a, i32 %b ret i32 %cond @@ -1392,15 +1392,15 @@ ; RV64I-NEXT: .LBB25_2: ; RV64I-NEXT: ret ; -; RV64IB-LABEL: maxu_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: maxu a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: maxu_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: maxu a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: maxu_i64: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: maxu a0, a0, a1 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: maxu_i64: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: maxu a0, a0, a1 +; RV64ZBB-NEXT: ret %cmp = icmp ugt i64 %a, %b %cond = select i1 %cmp, i64 %a, i64 %b ret i64 %cond @@ -1417,19 +1417,19 @@ ; RV64I-NEXT: xor a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: abs_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sext.w a0, a0 -; RV64IB-NEXT: neg a1, a0 -; RV64IB-NEXT: max a0, a0, a1 -; RV64IB-NEXT: ret -; -; RV64IBB-LABEL: abs_i32: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: sext.w a0, a0 -; RV64IBB-NEXT: neg a1, a0 -; RV64IBB-NEXT: max a0, a0, a1 -; RV64IBB-NEXT: ret +; RV64B-LABEL: abs_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: sext.w a0, a0 +; RV64B-NEXT: neg a1, a0 +; RV64B-NEXT: max a0, a0, a1 +; RV64B-NEXT: ret +; +; RV64ZBB-LABEL: abs_i32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: sext.w a0, a0 +; RV64ZBB-NEXT: neg a1, a0 +; RV64ZBB-NEXT: max a0, a0, a1 +; RV64ZBB-NEXT: ret %abs = tail call i32 @llvm.abs.i32(i32 %x, i1 true) ret i32 %abs } @@ -1444,17 +1444,17 @@ ; RV64I-NEXT: xor a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: abs_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: neg a1, a0 -; RV64IB-NEXT: max a0, a0, a1 -; RV64IB-NEXT: ret -; -; RV64IBB-LABEL: abs_i64: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: neg a1, a0 -; RV64IBB-NEXT: max a0, a0, a1 -; RV64IBB-NEXT: ret +; RV64B-LABEL: abs_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: neg a1, a0 +; RV64B-NEXT: max a0, a0, a1 +; RV64B-NEXT: ret +; +; RV64ZBB-LABEL: abs_i64: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: neg a1, a0 +; RV64ZBB-NEXT: max a0, a0, a1 +; RV64ZBB-NEXT: ret %abs = tail call i64 @llvm.abs.i64(i64 %x, i1 true) ret i64 %abs } @@ -1467,15 +1467,15 @@ ; RV64I-NEXT: and a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: zexth_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: zext.h a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: zexth_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: zext.h a0, a0 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: zexth_i32: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: zext.h a0, a0 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: zexth_i32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: zext.h a0, a0 +; RV64ZBB-NEXT: ret %and = and i32 %a, 65535 ret i32 %and } @@ -1488,15 +1488,15 @@ ; RV64I-NEXT: and a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: zexth_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: zext.h a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: zexth_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: zext.h a0, a0 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: zexth_i64: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: zext.h a0, a0 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: zexth_i64: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: zext.h a0, a0 +; RV64ZBB-NEXT: ret %and = and i64 %a, 65535 ret i64 %and } @@ -1521,16 +1521,16 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: bswap_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: greviw a0, a0, 24 -; RV64IB-NEXT: ret +; RV64B-LABEL: bswap_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: greviw a0, a0, 24 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: bswap_i32: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: rev8 a0, a0 -; RV64IBB-NEXT: srai a0, a0, 32 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: bswap_i32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: rev8 a0, a0 +; RV64ZBB-NEXT: srai a0, a0, 32 +; RV64ZBB-NEXT: ret %1 = tail call i32 @llvm.bswap.i32(i32 %a) ret i32 %1 } @@ -1554,18 +1554,18 @@ ; RV64I-NEXT: sw a0, 0(a1) ; RV64I-NEXT: ret ; -; RV64IB-LABEL: bswap_i32_nosext: -; RV64IB: # %bb.0: -; RV64IB-NEXT: greviw a0, a0, 24 -; RV64IB-NEXT: sw a0, 0(a1) -; RV64IB-NEXT: ret -; -; RV64IBB-LABEL: bswap_i32_nosext: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: rev8 a0, a0 -; RV64IBB-NEXT: srli a0, a0, 32 -; RV64IBB-NEXT: sw a0, 0(a1) -; RV64IBB-NEXT: ret +; RV64B-LABEL: bswap_i32_nosext: +; RV64B: # %bb.0: +; RV64B-NEXT: greviw a0, a0, 24 +; RV64B-NEXT: sw a0, 0(a1) +; RV64B-NEXT: ret +; +; RV64ZBB-LABEL: bswap_i32_nosext: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: rev8 a0, a0 +; RV64ZBB-NEXT: srli a0, a0, 32 +; RV64ZBB-NEXT: sw a0, 0(a1) +; RV64ZBB-NEXT: ret %1 = tail call i32 @llvm.bswap.i32(i32 %a) store i32 %1, i32* %x ret void @@ -1607,15 +1607,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: bswap_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: rev8 a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: bswap_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: rev8 a0, a0 +; RV64B-NEXT: ret ; -; RV64IBB-LABEL: bswap_i64: -; RV64IBB: # %bb.0: -; RV64IBB-NEXT: rev8 a0, a0 -; RV64IBB-NEXT: ret +; RV64ZBB-LABEL: bswap_i64: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: rev8 a0, a0 +; RV64ZBB-NEXT: ret %1 = call i64 @llvm.bswap.i64(i64 %a) ret i64 %1 } diff --git a/llvm/test/CodeGen/RISCV/rv64zbc-intrinsic.ll b/llvm/test/CodeGen/RISCV/rv64zbc-intrinsic.ll --- a/llvm/test/CodeGen/RISCV/rv64zbc-intrinsic.ll +++ b/llvm/test/CodeGen/RISCV/rv64zbc-intrinsic.ll @@ -1,21 +1,21 @@ ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc -mtriple=riscv64 -mattr=+experimental-b -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV64IB +; RUN: | FileCheck %s -check-prefix=RV64B ; RUN: llc -mtriple=riscv64 -mattr=+experimental-zbc -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV64IBC +; RUN: | FileCheck %s -check-prefix=RV64BC declare i64 @llvm.riscv.clmul.i64(i64 %a, i64 %b) define i64 @clmul64(i64 %a, i64 %b) nounwind { -; RV64IB-LABEL: clmul64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: clmul a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: clmul64: +; RV64B: # %bb.0: +; RV64B-NEXT: clmul a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBC-LABEL: clmul64: -; RV64IBC: # %bb.0: -; RV64IBC-NEXT: clmul a0, a0, a1 -; RV64IBC-NEXT: ret +; RV64BC-LABEL: clmul64: +; RV64BC: # %bb.0: +; RV64BC-NEXT: clmul a0, a0, a1 +; RV64BC-NEXT: ret %tmp = call i64 @llvm.riscv.clmul.i64(i64 %a, i64 %b) ret i64 %tmp } @@ -23,15 +23,15 @@ declare i64 @llvm.riscv.clmulh.i64(i64 %a, i64 %b) define i64 @clmul64h(i64 %a, i64 %b) nounwind { -; RV64IB-LABEL: clmul64h: -; RV64IB: # %bb.0: -; RV64IB-NEXT: clmulh a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: clmul64h: +; RV64B: # %bb.0: +; RV64B-NEXT: clmulh a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBC-LABEL: clmul64h: -; RV64IBC: # %bb.0: -; RV64IBC-NEXT: clmulh a0, a0, a1 -; RV64IBC-NEXT: ret +; RV64BC-LABEL: clmul64h: +; RV64BC: # %bb.0: +; RV64BC-NEXT: clmulh a0, a0, a1 +; RV64BC-NEXT: ret %tmp = call i64 @llvm.riscv.clmulh.i64(i64 %a, i64 %b) ret i64 %tmp } @@ -39,15 +39,15 @@ declare i64 @llvm.riscv.clmulr.i64(i64 %a, i64 %b) define i64 @clmul64r(i64 %a, i64 %b) nounwind { -; RV64IB-LABEL: clmul64r: -; RV64IB: # %bb.0: -; RV64IB-NEXT: clmulr a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: clmul64r: +; RV64B: # %bb.0: +; RV64B-NEXT: clmulr a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBC-LABEL: clmul64r: -; RV64IBC: # %bb.0: -; RV64IBC-NEXT: clmulr a0, a0, a1 -; RV64IBC-NEXT: ret +; RV64BC-LABEL: clmul64r: +; RV64BC: # %bb.0: +; RV64BC-NEXT: clmulr a0, a0, a1 +; RV64BC-NEXT: ret %tmp = call i64 @llvm.riscv.clmulr.i64(i64 %a, i64 %b) ret i64 %tmp } diff --git a/llvm/test/CodeGen/RISCV/rv64zbe-intrinsic.ll b/llvm/test/CodeGen/RISCV/rv64zbe-intrinsic.ll --- a/llvm/test/CodeGen/RISCV/rv64zbe-intrinsic.ll +++ b/llvm/test/CodeGen/RISCV/rv64zbe-intrinsic.ll @@ -1,39 +1,39 @@ ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc -mtriple=riscv64 -mattr=+experimental-b -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV64IB +; RUN: | FileCheck %s -check-prefix=RV64B ; RUN: llc -mtriple=riscv64 -mattr=+experimental-zbe -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV64IBE +; RUN: | FileCheck %s -check-prefix=RV64ZBE declare i32 @llvm.riscv.bcompress.i32(i32 %a, i32 %b) define signext i32 @bcompress32(i32 signext %a, i32 signext %b) nounwind { -; RV64IB-LABEL: bcompress32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bcompressw a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: bcompress32: +; RV64B: # %bb.0: +; RV64B-NEXT: bcompressw a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBE-LABEL: bcompress32: -; RV64IBE: # %bb.0: -; RV64IBE-NEXT: bcompressw a0, a0, a1 -; RV64IBE-NEXT: ret +; RV64ZBE-LABEL: bcompress32: +; RV64ZBE: # %bb.0: +; RV64ZBE-NEXT: bcompressw a0, a0, a1 +; RV64ZBE-NEXT: ret %tmp = call i32 @llvm.riscv.bcompress.i32(i32 %a, i32 %b) ret i32 %tmp } define signext i32 @bcompress32_demandedbits(i32 signext %a, i32 signext %b, i32 signext %c, i32 signext %d) nounwind { -; RV64IB-LABEL: bcompress32_demandedbits: -; RV64IB: # %bb.0: -; RV64IB-NEXT: add a0, a0, a1 -; RV64IB-NEXT: add a1, a2, a3 -; RV64IB-NEXT: bcompressw a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: bcompress32_demandedbits: +; RV64B: # %bb.0: +; RV64B-NEXT: add a0, a0, a1 +; RV64B-NEXT: add a1, a2, a3 +; RV64B-NEXT: bcompressw a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBE-LABEL: bcompress32_demandedbits: -; RV64IBE: # %bb.0: -; RV64IBE-NEXT: add a0, a0, a1 -; RV64IBE-NEXT: add a1, a2, a3 -; RV64IBE-NEXT: bcompressw a0, a0, a1 -; RV64IBE-NEXT: ret +; RV64ZBE-LABEL: bcompress32_demandedbits: +; RV64ZBE: # %bb.0: +; RV64ZBE-NEXT: add a0, a0, a1 +; RV64ZBE-NEXT: add a1, a2, a3 +; RV64ZBE-NEXT: bcompressw a0, a0, a1 +; RV64ZBE-NEXT: ret %e = add i32 %a, %b %f = add i32 %c, %d %tmp = call i32 @llvm.riscv.bcompress.i32(i32 %e, i32 %f) @@ -43,33 +43,33 @@ declare i32 @llvm.riscv.bdecompress.i32(i32 %a, i32 %b) define signext i32 @bdecompress32(i32 signext %a, i32 signext %b) nounwind { -; RV64IB-LABEL: bdecompress32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bdecompressw a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: bdecompress32: +; RV64B: # %bb.0: +; RV64B-NEXT: bdecompressw a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBE-LABEL: bdecompress32: -; RV64IBE: # %bb.0: -; RV64IBE-NEXT: bdecompressw a0, a0, a1 -; RV64IBE-NEXT: ret +; RV64ZBE-LABEL: bdecompress32: +; RV64ZBE: # %bb.0: +; RV64ZBE-NEXT: bdecompressw a0, a0, a1 +; RV64ZBE-NEXT: ret %tmp = call i32 @llvm.riscv.bdecompress.i32(i32 %a, i32 %b) ret i32 %tmp } define signext i32 @bdecompress32_demandedbits(i32 signext %a, i32 signext %b, i32 signext %c, i32 signext %d) nounwind { -; RV64IB-LABEL: bdecompress32_demandedbits: -; RV64IB: # %bb.0: -; RV64IB-NEXT: add a0, a0, a1 -; RV64IB-NEXT: add a1, a2, a3 -; RV64IB-NEXT: bdecompressw a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: bdecompress32_demandedbits: +; RV64B: # %bb.0: +; RV64B-NEXT: add a0, a0, a1 +; RV64B-NEXT: add a1, a2, a3 +; RV64B-NEXT: bdecompressw a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBE-LABEL: bdecompress32_demandedbits: -; RV64IBE: # %bb.0: -; RV64IBE-NEXT: add a0, a0, a1 -; RV64IBE-NEXT: add a1, a2, a3 -; RV64IBE-NEXT: bdecompressw a0, a0, a1 -; RV64IBE-NEXT: ret +; RV64ZBE-LABEL: bdecompress32_demandedbits: +; RV64ZBE: # %bb.0: +; RV64ZBE-NEXT: add a0, a0, a1 +; RV64ZBE-NEXT: add a1, a2, a3 +; RV64ZBE-NEXT: bdecompressw a0, a0, a1 +; RV64ZBE-NEXT: ret %e = add i32 %a, %b %f = add i32 %c, %d %tmp = call i32 @llvm.riscv.bdecompress.i32(i32 %e, i32 %f) @@ -79,15 +79,15 @@ declare i64 @llvm.riscv.bcompress.i64(i64 %a, i64 %b) define i64 @bcompress64(i64 %a, i64 %b) nounwind { -; RV64IB-LABEL: bcompress64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bcompress a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: bcompress64: +; RV64B: # %bb.0: +; RV64B-NEXT: bcompress a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBE-LABEL: bcompress64: -; RV64IBE: # %bb.0: -; RV64IBE-NEXT: bcompress a0, a0, a1 -; RV64IBE-NEXT: ret +; RV64ZBE-LABEL: bcompress64: +; RV64ZBE: # %bb.0: +; RV64ZBE-NEXT: bcompress a0, a0, a1 +; RV64ZBE-NEXT: ret %tmp = call i64 @llvm.riscv.bcompress.i64(i64 %a, i64 %b) ret i64 %tmp } @@ -95,15 +95,15 @@ declare i64 @llvm.riscv.bdecompress.i64(i64 %a, i64 %b) define i64 @bdecompress64(i64 %a, i64 %b) nounwind { -; RV64IB-LABEL: bdecompress64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bdecompress a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: bdecompress64: +; RV64B: # %bb.0: +; RV64B-NEXT: bdecompress a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBE-LABEL: bdecompress64: -; RV64IBE: # %bb.0: -; RV64IBE-NEXT: bdecompress a0, a0, a1 -; RV64IBE-NEXT: ret +; RV64ZBE-LABEL: bdecompress64: +; RV64ZBE: # %bb.0: +; RV64ZBE-NEXT: bdecompress a0, a0, a1 +; RV64ZBE-NEXT: ret %tmp = call i64 @llvm.riscv.bdecompress.i64(i64 %a, i64 %b) ret i64 %tmp } diff --git a/llvm/test/CodeGen/RISCV/rv64zbp-intrinsic.ll b/llvm/test/CodeGen/RISCV/rv64zbp-intrinsic.ll --- a/llvm/test/CodeGen/RISCV/rv64zbp-intrinsic.ll +++ b/llvm/test/CodeGen/RISCV/rv64zbp-intrinsic.ll @@ -1,37 +1,37 @@ ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py ; RUN: llc -mtriple=riscv64 -mattr=+experimental-b -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV64IB +; RUN: | FileCheck %s -check-prefix=RV64B ; RUN: llc -mtriple=riscv64 -mattr=+experimental-zbp -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV64IBP +; RUN: | FileCheck %s -check-prefix=RV64ZBP declare i32 @llvm.riscv.grev.i32(i32 %a, i32 %b) define signext i32 @grev32(i32 signext %a, i32 signext %b) nounwind { -; RV64IB-LABEL: grev32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: grevw a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev32: +; RV64B: # %bb.0: +; RV64B-NEXT: grevw a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: grevw a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: grevw a0, a0, a1 +; RV64ZBP-NEXT: ret %tmp = call i32 @llvm.riscv.grev.i32(i32 %a, i32 %b) ret i32 %tmp } define signext i32 @grev32_demandedbits(i32 signext %a, i32 signext %b, i32 signext %c) nounwind { -; RV64IB-LABEL: grev32_demandedbits: -; RV64IB: # %bb.0: -; RV64IB-NEXT: add a0, a0, a1 -; RV64IB-NEXT: grevw a0, a0, a2 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev32_demandedbits: +; RV64B: # %bb.0: +; RV64B-NEXT: add a0, a0, a1 +; RV64B-NEXT: grevw a0, a0, a2 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev32_demandedbits: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: add a0, a0, a1 -; RV64IBP-NEXT: grevw a0, a0, a2 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev32_demandedbits: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: add a0, a0, a1 +; RV64ZBP-NEXT: grevw a0, a0, a2 +; RV64ZBP-NEXT: ret %d = add i32 %a, %b %e = and i32 %c, 31 %tmp = call i32 @llvm.riscv.grev.i32(i32 %d, i32 %e) @@ -41,15 +41,15 @@ declare i32 @llvm.riscv.grevi.i32(i32 %a) define signext i32 @grevi32(i32 signext %a) nounwind { -; RV64IB-LABEL: grevi32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: greviw a0, a0, 13 -; RV64IB-NEXT: ret +; RV64B-LABEL: grevi32: +; RV64B: # %bb.0: +; RV64B-NEXT: greviw a0, a0, 13 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grevi32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: greviw a0, a0, 13 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grevi32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: greviw a0, a0, 13 +; RV64ZBP-NEXT: ret %tmp = call i32 @llvm.riscv.grev.i32(i32 %a, i32 13) ret i32 %tmp } @@ -57,31 +57,31 @@ declare i32 @llvm.riscv.gorc.i32(i32 %a, i32 %b) define signext i32 @gorc32(i32 signext %a, i32 signext %b) nounwind { -; RV64IB-LABEL: gorc32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: gorcw a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc32: +; RV64B: # %bb.0: +; RV64B-NEXT: gorcw a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: gorcw a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: gorcw a0, a0, a1 +; RV64ZBP-NEXT: ret %tmp = call i32 @llvm.riscv.gorc.i32(i32 %a, i32 %b) ret i32 %tmp } define signext i32 @gorc32_demandedbits(i32 signext %a, i32 signext %b, i32 signext %c) nounwind { -; RV64IB-LABEL: gorc32_demandedbits: -; RV64IB: # %bb.0: -; RV64IB-NEXT: add a0, a0, a1 -; RV64IB-NEXT: gorcw a0, a0, a2 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc32_demandedbits: +; RV64B: # %bb.0: +; RV64B-NEXT: add a0, a0, a1 +; RV64B-NEXT: gorcw a0, a0, a2 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc32_demandedbits: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: add a0, a0, a1 -; RV64IBP-NEXT: gorcw a0, a0, a2 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc32_demandedbits: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: add a0, a0, a1 +; RV64ZBP-NEXT: gorcw a0, a0, a2 +; RV64ZBP-NEXT: ret %d = add i32 %a, %b %e = and i32 %c, 31 %tmp = call i32 @llvm.riscv.gorc.i32(i32 %d, i32 %e) @@ -89,15 +89,15 @@ } define signext i32 @gorci32(i32 signext %a) nounwind { -; RV64IB-LABEL: gorci32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: gorciw a0, a0, 13 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorci32: +; RV64B: # %bb.0: +; RV64B-NEXT: gorciw a0, a0, 13 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorci32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: gorciw a0, a0, 13 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorci32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: gorciw a0, a0, 13 +; RV64ZBP-NEXT: ret %tmp = call i32 @llvm.riscv.gorc.i32(i32 %a, i32 13) ret i32 %tmp } @@ -105,31 +105,31 @@ declare i32 @llvm.riscv.shfl.i32(i32 %a, i32 %b) define signext i32 @shfl32(i32 signext %a, i32 signext %b) nounwind { -; RV64IB-LABEL: shfl32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: shflw a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: shfl32: +; RV64B: # %bb.0: +; RV64B-NEXT: shflw a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: shfl32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: shflw a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: shfl32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: shflw a0, a0, a1 +; RV64ZBP-NEXT: ret %tmp = call i32 @llvm.riscv.shfl.i32(i32 %a, i32 %b) ret i32 %tmp } define signext i32 @shfl32_demandedbits(i32 signext %a, i32 signext %b, i32 signext %c) nounwind { -; RV64IB-LABEL: shfl32_demandedbits: -; RV64IB: # %bb.0: -; RV64IB-NEXT: add a0, a0, a1 -; RV64IB-NEXT: shflw a0, a0, a2 -; RV64IB-NEXT: ret +; RV64B-LABEL: shfl32_demandedbits: +; RV64B: # %bb.0: +; RV64B-NEXT: add a0, a0, a1 +; RV64B-NEXT: shflw a0, a0, a2 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: shfl32_demandedbits: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: add a0, a0, a1 -; RV64IBP-NEXT: shflw a0, a0, a2 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: shfl32_demandedbits: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: add a0, a0, a1 +; RV64ZBP-NEXT: shflw a0, a0, a2 +; RV64ZBP-NEXT: ret %d = add i32 %a, %b %e = and i32 %c, 15 %tmp = call i32 @llvm.riscv.shfl.i32(i32 %d, i32 %e) @@ -137,15 +137,15 @@ } define signext i32 @shfli32(i32 signext %a) nounwind { -; RV64IB-LABEL: shfli32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: shfli a0, a0, 13 -; RV64IB-NEXT: ret +; RV64B-LABEL: shfli32: +; RV64B: # %bb.0: +; RV64B-NEXT: shfli a0, a0, 13 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: shfli32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: shfli a0, a0, 13 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: shfli32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: shfli a0, a0, 13 +; RV64ZBP-NEXT: ret %tmp = call i32 @llvm.riscv.shfl.i32(i32 %a, i32 13) ret i32 %tmp } @@ -153,31 +153,31 @@ declare i32 @llvm.riscv.unshfl.i32(i32 %a, i32 %b) define signext i32 @unshfl32(i32 signext %a, i32 signext %b) nounwind { -; RV64IB-LABEL: unshfl32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: unshflw a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: unshfl32: +; RV64B: # %bb.0: +; RV64B-NEXT: unshflw a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: unshfl32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: unshflw a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: unshfl32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: unshflw a0, a0, a1 +; RV64ZBP-NEXT: ret %tmp = call i32 @llvm.riscv.unshfl.i32(i32 %a, i32 %b) ret i32 %tmp } define signext i32 @unshfl32_demandedbits(i32 signext %a, i32 signext %b, i32 signext %c) nounwind { -; RV64IB-LABEL: unshfl32_demandedbits: -; RV64IB: # %bb.0: -; RV64IB-NEXT: add a0, a0, a1 -; RV64IB-NEXT: unshflw a0, a0, a2 -; RV64IB-NEXT: ret +; RV64B-LABEL: unshfl32_demandedbits: +; RV64B: # %bb.0: +; RV64B-NEXT: add a0, a0, a1 +; RV64B-NEXT: unshflw a0, a0, a2 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: unshfl32_demandedbits: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: add a0, a0, a1 -; RV64IBP-NEXT: unshflw a0, a0, a2 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: unshfl32_demandedbits: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: add a0, a0, a1 +; RV64ZBP-NEXT: unshflw a0, a0, a2 +; RV64ZBP-NEXT: ret %d = add i32 %a, %b %e = and i32 %c, 15 %tmp = call i32 @llvm.riscv.unshfl.i32(i32 %d, i32 %e) @@ -185,15 +185,15 @@ } define signext i32 @unshfli32(i32 signext %a) nounwind { -; RV64IB-LABEL: unshfli32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: unshfli a0, a0, 13 -; RV64IB-NEXT: ret +; RV64B-LABEL: unshfli32: +; RV64B: # %bb.0: +; RV64B-NEXT: unshfli a0, a0, 13 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: unshfli32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: unshfli a0, a0, 13 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: unshfli32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: unshfli a0, a0, 13 +; RV64ZBP-NEXT: ret %tmp = call i32 @llvm.riscv.unshfl.i32(i32 %a, i32 13) ret i32 %tmp } @@ -201,44 +201,44 @@ declare i64 @llvm.riscv.grev.i64(i64 %a, i64 %b) define i64 @grev64(i64 %a, i64 %b) nounwind { -; RV64IB-LABEL: grev64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: grev a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev64: +; RV64B: # %bb.0: +; RV64B-NEXT: grev a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: grev a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: grev a0, a0, a1 +; RV64ZBP-NEXT: ret %tmp = call i64 @llvm.riscv.grev.i64(i64 %a, i64 %b) ret i64 %tmp } define i64 @grev64_demandedbits(i64 %a, i64 %b) nounwind { -; RV64IB-LABEL: grev64_demandedbits: -; RV64IB: # %bb.0: -; RV64IB-NEXT: grev a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev64_demandedbits: +; RV64B: # %bb.0: +; RV64B-NEXT: grev a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev64_demandedbits: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: grev a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev64_demandedbits: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: grev a0, a0, a1 +; RV64ZBP-NEXT: ret %c = and i64 %b, 63 %tmp = call i64 @llvm.riscv.grev.i64(i64 %a, i64 %c) ret i64 %tmp } define i64 @grevi64(i64 %a) nounwind { -; RV64IB-LABEL: grevi64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: grevi a0, a0, 13 -; RV64IB-NEXT: ret +; RV64B-LABEL: grevi64: +; RV64B: # %bb.0: +; RV64B-NEXT: grevi a0, a0, 13 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grevi64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: grevi a0, a0, 13 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grevi64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: grevi a0, a0, 13 +; RV64ZBP-NEXT: ret %tmp = call i64 @llvm.riscv.grev.i64(i64 %a, i64 13) ret i64 %tmp } @@ -246,29 +246,29 @@ declare i64 @llvm.riscv.gorc.i64(i64 %a, i64 %b) define i64 @gorc64(i64 %a, i64 %b) nounwind { -; RV64IB-LABEL: gorc64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: gorc a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc64: +; RV64B: # %bb.0: +; RV64B-NEXT: gorc a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: gorc a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: gorc a0, a0, a1 +; RV64ZBP-NEXT: ret %tmp = call i64 @llvm.riscv.gorc.i64(i64 %a, i64 %b) ret i64 %tmp } define i64 @gorc64_demandedbits(i64 %a, i64 %b) nounwind { -; RV64IB-LABEL: gorc64_demandedbits: -; RV64IB: # %bb.0: -; RV64IB-NEXT: gorc a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc64_demandedbits: +; RV64B: # %bb.0: +; RV64B-NEXT: gorc a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc64_demandedbits: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: gorc a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc64_demandedbits: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: gorc a0, a0, a1 +; RV64ZBP-NEXT: ret %c = and i64 %b, 63 %tmp = call i64 @llvm.riscv.gorc.i64(i64 %a, i64 %c) ret i64 %tmp @@ -277,15 +277,15 @@ declare i64 @llvm.riscv.gorci.i64(i64 %a) define i64 @gorci64(i64 %a) nounwind { -; RV64IB-LABEL: gorci64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: gorci a0, a0, 13 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorci64: +; RV64B: # %bb.0: +; RV64B-NEXT: gorci a0, a0, 13 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorci64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: gorci a0, a0, 13 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorci64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: gorci a0, a0, 13 +; RV64ZBP-NEXT: ret %tmp = call i64 @llvm.riscv.gorc.i64(i64 %a, i64 13) ret i64 %tmp } @@ -293,44 +293,44 @@ declare i64 @llvm.riscv.shfl.i64(i64 %a, i64 %b) define i64 @shfl64(i64 %a, i64 %b) nounwind { -; RV64IB-LABEL: shfl64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: shfl a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: shfl64: +; RV64B: # %bb.0: +; RV64B-NEXT: shfl a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: shfl64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: shfl a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: shfl64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: shfl a0, a0, a1 +; RV64ZBP-NEXT: ret %tmp = call i64 @llvm.riscv.shfl.i64(i64 %a, i64 %b) ret i64 %tmp } define i64 @shfl64_demandedbits(i64 %a, i64 %b) nounwind { -; RV64IB-LABEL: shfl64_demandedbits: -; RV64IB: # %bb.0: -; RV64IB-NEXT: shfl a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: shfl64_demandedbits: +; RV64B: # %bb.0: +; RV64B-NEXT: shfl a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: shfl64_demandedbits: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: shfl a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: shfl64_demandedbits: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: shfl a0, a0, a1 +; RV64ZBP-NEXT: ret %c = and i64 %b, 31 %tmp = call i64 @llvm.riscv.shfl.i64(i64 %a, i64 %c) ret i64 %tmp } define i64 @shfli64(i64 %a) nounwind { -; RV64IB-LABEL: shfli64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: shfli a0, a0, 13 -; RV64IB-NEXT: ret +; RV64B-LABEL: shfli64: +; RV64B: # %bb.0: +; RV64B-NEXT: shfli a0, a0, 13 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: shfli64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: shfli a0, a0, 13 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: shfli64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: shfli a0, a0, 13 +; RV64ZBP-NEXT: ret %tmp = call i64 @llvm.riscv.shfl.i64(i64 %a, i64 13) ret i64 %tmp } @@ -338,44 +338,44 @@ declare i64 @llvm.riscv.unshfl.i64(i64 %a, i64 %b) define i64 @unshfl64(i64 %a, i64 %b) nounwind { -; RV64IB-LABEL: unshfl64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: unshfl a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: unshfl64: +; RV64B: # %bb.0: +; RV64B-NEXT: unshfl a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: unshfl64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: unshfl a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: unshfl64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: unshfl a0, a0, a1 +; RV64ZBP-NEXT: ret %tmp = call i64 @llvm.riscv.unshfl.i64(i64 %a, i64 %b) ret i64 %tmp } define i64 @unshfl64_demandedbits(i64 %a, i64 %b) nounwind { -; RV64IB-LABEL: unshfl64_demandedbits: -; RV64IB: # %bb.0: -; RV64IB-NEXT: unshfl a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: unshfl64_demandedbits: +; RV64B: # %bb.0: +; RV64B-NEXT: unshfl a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: unshfl64_demandedbits: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: unshfl a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: unshfl64_demandedbits: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: unshfl a0, a0, a1 +; RV64ZBP-NEXT: ret %c = and i64 %b, 31 %tmp = call i64 @llvm.riscv.unshfl.i64(i64 %a, i64 %c) ret i64 %tmp } define i64 @unshfli64(i64 %a) nounwind { -; RV64IB-LABEL: unshfli64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: unshfli a0, a0, 13 -; RV64IB-NEXT: ret +; RV64B-LABEL: unshfli64: +; RV64B: # %bb.0: +; RV64B-NEXT: unshfli a0, a0, 13 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: unshfli64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: unshfli a0, a0, 13 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: unshfli64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: unshfli a0, a0, 13 +; RV64ZBP-NEXT: ret %tmp = call i64 @llvm.riscv.unshfl.i64(i64 %a, i64 13) ret i64 %tmp } @@ -383,15 +383,15 @@ declare i64 @llvm.riscv.xperm.n.i64(i64 %a, i64 %b) define i64 @xpermn64(i64 %a, i64 %b) nounwind { -; RV64IB-LABEL: xpermn64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: xperm.n a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: xpermn64: +; RV64B: # %bb.0: +; RV64B-NEXT: xperm.n a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: xpermn64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: xperm.n a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: xpermn64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: xperm.n a0, a0, a1 +; RV64ZBP-NEXT: ret %tmp = call i64 @llvm.riscv.xperm.n.i64(i64 %a, i64 %b) ret i64 %tmp } @@ -399,15 +399,15 @@ declare i64 @llvm.riscv.xperm.b.i64(i64 %a, i64 %b) define i64 @xpermb64(i64 %a, i64 %b) nounwind { -; RV64IB-LABEL: xpermb64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: xperm.b a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: xpermb64: +; RV64B: # %bb.0: +; RV64B-NEXT: xperm.b a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: xpermb64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: xperm.b a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: xpermb64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: xperm.b a0, a0, a1 +; RV64ZBP-NEXT: ret %tmp = call i64 @llvm.riscv.xperm.b.i64(i64 %a, i64 %b) ret i64 %tmp } @@ -415,15 +415,15 @@ declare i64 @llvm.riscv.xperm.h.i64(i64 %a, i64 %b) define i64 @xpermh64(i64 %a, i64 %b) nounwind { -; RV64IB-LABEL: xpermh64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: xperm.h a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: xpermh64: +; RV64B: # %bb.0: +; RV64B-NEXT: xperm.h a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: xpermh64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: xperm.h a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: xpermh64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: xperm.h a0, a0, a1 +; RV64ZBP-NEXT: ret %tmp = call i64 @llvm.riscv.xperm.h.i64(i64 %a, i64 %b) ret i64 %tmp } @@ -431,15 +431,15 @@ declare i64 @llvm.riscv.xperm.w.i64(i64 %a, i64 %b) define i64 @xpermw64(i64 %a, i64 %b) nounwind { -; RV64IB-LABEL: xpermw64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: xperm.w a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: xpermw64: +; RV64B: # %bb.0: +; RV64B-NEXT: xperm.w a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: xpermw64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: xperm.w a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: xpermw64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: xperm.w a0, a0, a1 +; RV64ZBP-NEXT: ret %tmp = call i64 @llvm.riscv.xperm.w.i64(i64 %a, i64 %b) ret i64 %tmp } diff --git a/llvm/test/CodeGen/RISCV/rv64zbp.ll b/llvm/test/CodeGen/RISCV/rv64zbp.ll --- a/llvm/test/CodeGen/RISCV/rv64zbp.ll +++ b/llvm/test/CodeGen/RISCV/rv64zbp.ll @@ -2,9 +2,9 @@ ; RUN: llc -mtriple=riscv64 -verify-machineinstrs < %s \ ; RUN: | FileCheck %s -check-prefix=RV64I ; RUN: llc -mtriple=riscv64 -mattr=+experimental-b -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV64IB +; RUN: | FileCheck %s -check-prefix=RV64B ; RUN: llc -mtriple=riscv64 -mattr=+experimental-zbp -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV64IBP +; RUN: | FileCheck %s -check-prefix=RV64ZBP define signext i32 @gorc1_i32(i32 signext %a) nounwind { ; RV64I-LABEL: gorc1_i32: @@ -22,15 +22,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc1_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: gorciw a0, a0, 1 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc1_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: gorciw a0, a0, 1 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc1_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: gorciw a0, a0, 1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc1_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: gorciw a0, a0, 1 +; RV64ZBP-NEXT: ret %and = shl i32 %a, 1 %shl = and i32 %and, -1431655766 %and1 = lshr i32 %a, 1 @@ -67,15 +67,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc1_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: orc.p a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc1_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: orc.p a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc1_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: orc.p a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc1_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: orc.p a0, a0 +; RV64ZBP-NEXT: ret %and = shl i64 %a, 1 %shl = and i64 %and, -6148914691236517206 %and1 = lshr i64 %a, 1 @@ -101,15 +101,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc2_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: gorciw a0, a0, 2 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc2_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: gorciw a0, a0, 2 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc2_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: gorciw a0, a0, 2 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc2_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: gorciw a0, a0, 2 +; RV64ZBP-NEXT: ret %and = shl i32 %a, 2 %shl = and i32 %and, -858993460 %and1 = lshr i32 %a, 2 @@ -146,15 +146,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc2_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: orc2.n a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc2_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: orc2.n a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc2_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: orc2.n a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc2_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: orc2.n a0, a0 +; RV64ZBP-NEXT: ret %and = shl i64 %a, 2 %shl = and i64 %and, -3689348814741910324 %and1 = lshr i64 %a, 2 @@ -190,15 +190,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc3_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: gorciw a0, a0, 3 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc3_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: gorciw a0, a0, 3 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc3_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: gorciw a0, a0, 3 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc3_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: gorciw a0, a0, 3 +; RV64ZBP-NEXT: ret %and1 = shl i32 %a, 1 %shl1 = and i32 %and1, -1431655766 %and1b = lshr i32 %a, 1 @@ -263,15 +263,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc3_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: orc.n a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc3_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: orc.n a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc3_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: orc.n a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc3_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: orc.n a0, a0 +; RV64ZBP-NEXT: ret %and1 = shl i64 %a, 1 %shl1 = and i64 %and1, -6148914691236517206 %and1b = lshr i64 %a, 1 @@ -303,15 +303,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc4_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: gorciw a0, a0, 4 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc4_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: gorciw a0, a0, 4 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc4_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: gorciw a0, a0, 4 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc4_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: gorciw a0, a0, 4 +; RV64ZBP-NEXT: ret %and = shl i32 %a, 4 %shl = and i32 %and, -252645136 %and1 = lshr i32 %a, 4 @@ -348,15 +348,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc4_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: orc4.b a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc4_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: orc4.b a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc4_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: orc4.b a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc4_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: orc4.b a0, a0 +; RV64ZBP-NEXT: ret %and = shl i64 %a, 4 %shl = and i64 %and, -1085102592571150096 %and1 = lshr i64 %a, 4 @@ -392,15 +392,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc5_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: gorciw a0, a0, 5 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc5_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: gorciw a0, a0, 5 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc5_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: gorciw a0, a0, 5 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc5_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: gorciw a0, a0, 5 +; RV64ZBP-NEXT: ret %and1 = shl i32 %a, 1 %shl1 = and i32 %and1, -1431655766 %and1b = lshr i32 %a, 1 @@ -465,15 +465,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc5_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: gorci a0, a0, 5 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc5_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: gorci a0, a0, 5 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc5_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: gorci a0, a0, 5 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc5_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: gorci a0, a0, 5 +; RV64ZBP-NEXT: ret %and1 = shl i64 %a, 1 %shl1 = and i64 %and1, -6148914691236517206 %and1b = lshr i64 %a, 1 @@ -515,15 +515,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc6_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: gorciw a0, a0, 6 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc6_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: gorciw a0, a0, 6 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc6_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: gorciw a0, a0, 6 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc6_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: gorciw a0, a0, 6 +; RV64ZBP-NEXT: ret %and1 = shl i32 %a, 2 %shl1 = and i32 %and1, -858993460 %and1b = lshr i32 %a, 2 @@ -588,15 +588,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc6_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: orc2.b a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc6_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: orc2.b a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc6_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: orc2.b a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc6_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: orc2.b a0, a0 +; RV64ZBP-NEXT: ret %and1 = shl i64 %a, 2 %shl1 = and i64 %and1, -3689348814741910324 %and1b = lshr i64 %a, 2 @@ -648,15 +648,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc7_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: gorciw a0, a0, 7 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc7_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: gorciw a0, a0, 7 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc7_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: gorciw a0, a0, 7 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc7_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: gorciw a0, a0, 7 +; RV64ZBP-NEXT: ret %and1 = shl i32 %a, 1 %shl1 = and i32 %and1, -1431655766 %and1b = lshr i32 %a, 1 @@ -749,15 +749,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc7_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: orc.b a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc7_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: orc.b a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc7_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: orc.b a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc7_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: orc.b a0, a0 +; RV64ZBP-NEXT: ret %and1 = shl i64 %a, 1 %shl1 = and i64 %and1, -6148914691236517206 %and1b = lshr i64 %a, 1 @@ -795,15 +795,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc8_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: gorciw a0, a0, 8 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc8_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: gorciw a0, a0, 8 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc8_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: gorciw a0, a0, 8 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc8_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: gorciw a0, a0, 8 +; RV64ZBP-NEXT: ret %and = shl i32 %a, 8 %shl = and i32 %and, -16711936 %and1 = lshr i32 %a, 8 @@ -836,15 +836,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc8_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: orc8.h a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc8_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: orc8.h a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc8_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: orc8.h a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc8_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: orc8.h a0, a0 +; RV64ZBP-NEXT: ret %and = shl i64 %a, 8 %shl = and i64 %and, -71777214294589696 %and1 = lshr i64 %a, 8 @@ -864,15 +864,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc16_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: gorciw a0, a0, 16 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc16_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: gorciw a0, a0, 16 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc16_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: gorciw a0, a0, 16 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc16_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: gorciw a0, a0, 16 +; RV64ZBP-NEXT: ret %shl = shl i32 %a, 16 %shr = lshr i32 %a, 16 %or = or i32 %shr, %a @@ -889,15 +889,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc16_rotl_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: gorciw a0, a0, 16 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc16_rotl_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: gorciw a0, a0, 16 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc16_rotl_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: gorciw a0, a0, 16 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc16_rotl_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: gorciw a0, a0, 16 +; RV64ZBP-NEXT: ret %rot = tail call i32 @llvm.fshl.i32(i32 %a, i32 %a, i32 16) %or = or i32 %rot, %a ret i32 %or @@ -912,15 +912,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc16_rotr_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: gorciw a0, a0, 16 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc16_rotr_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: gorciw a0, a0, 16 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc16_rotr_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: gorciw a0, a0, 16 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc16_rotr_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: gorciw a0, a0, 16 +; RV64ZBP-NEXT: ret %rot = tail call i32 @llvm.fshr.i32(i32 %a, i32 %a, i32 16) %or = or i32 %rot, %a ret i32 %or @@ -944,15 +944,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc16_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: orc16.w a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc16_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: orc16.w a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc16_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: orc16.w a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc16_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: orc16.w a0, a0 +; RV64ZBP-NEXT: ret %and = shl i64 %a, 16 %shl = and i64 %and, -281470681808896 %and1 = lshr i64 %a, 16 @@ -971,15 +971,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: orc32 a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc32: +; RV64B: # %bb.0: +; RV64B-NEXT: orc32 a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: orc32 a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: orc32 a0, a0 +; RV64ZBP-NEXT: ret %shl = shl i64 %a, 32 %shr = lshr i64 %a, 32 %or = or i64 %shr, %a @@ -1010,15 +1010,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc2b_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: gorciw a0, a0, 2 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc2b_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: gorciw a0, a0, 2 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc2b_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: gorciw a0, a0, 2 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc2b_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: gorciw a0, a0, 2 +; RV64ZBP-NEXT: ret %and1 = shl i32 %a, 2 %shl1 = and i32 %and1, -858993460 %and1b = lshr i32 %a, 2 @@ -1068,15 +1068,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc2b_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: orc2.n a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc2b_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: orc2.n a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc2b_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: orc2.n a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc2b_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: orc2.n a0, a0 +; RV64ZBP-NEXT: ret %and1 = shl i64 %a, 2 %shl1 = and i64 %and1, -3689348814741910324 %and1b = lshr i64 %a, 2 @@ -1125,15 +1125,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc3b_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: gorciw a0, a0, 3 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc3b_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: gorciw a0, a0, 3 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc3b_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: gorciw a0, a0, 3 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc3b_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: gorciw a0, a0, 3 +; RV64ZBP-NEXT: ret %and1 = shl i32 %a, 1 %shl1 = and i32 %and1, -1431655766 %and1b = lshr i32 %a, 1 @@ -1211,15 +1211,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc3b_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: orc.n a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc3b_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: orc.n a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc3b_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: orc.n a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc3b_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: orc.n a0, a0 +; RV64ZBP-NEXT: ret %and1 = shl i64 %a, 1 %shl1 = and i64 %and1, -6148914691236517206 %and1b = lshr i64 %a, 1 @@ -1250,15 +1250,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc32_rotl: -; RV64IB: # %bb.0: -; RV64IB-NEXT: orc32 a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc32_rotl: +; RV64B: # %bb.0: +; RV64B-NEXT: orc32 a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc32_rotl: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: orc32 a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc32_rotl: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: orc32 a0, a0 +; RV64ZBP-NEXT: ret %rot = tail call i64 @llvm.fshl.i64(i64 %a, i64 %a, i64 32) %or = or i64 %rot, %a ret i64 %or @@ -1273,15 +1273,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: gorc32_rotr: -; RV64IB: # %bb.0: -; RV64IB-NEXT: orc32 a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: gorc32_rotr: +; RV64B: # %bb.0: +; RV64B-NEXT: orc32 a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: gorc32_rotr: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: orc32 a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: gorc32_rotr: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: orc32 a0, a0 +; RV64ZBP-NEXT: ret %rot = tail call i64 @llvm.fshr.i64(i64 %a, i64 %a, i64 32) %or = or i64 %rot, %a ret i64 %or @@ -1302,15 +1302,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev1_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: greviw a0, a0, 1 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev1_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: greviw a0, a0, 1 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev1_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: greviw a0, a0, 1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev1_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: greviw a0, a0, 1 +; RV64ZBP-NEXT: ret %and = shl i32 %a, 1 %shl = and i32 %and, -1431655766 %and1 = lshr i32 %a, 1 @@ -1345,15 +1345,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev1_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: rev.p a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev1_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: rev.p a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev1_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: rev.p a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev1_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: rev.p a0, a0 +; RV64ZBP-NEXT: ret %and = shl i64 %a, 1 %shl = and i64 %and, -6148914691236517206 %and1 = lshr i64 %a, 1 @@ -1377,15 +1377,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev2_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: greviw a0, a0, 2 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev2_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: greviw a0, a0, 2 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev2_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: greviw a0, a0, 2 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev2_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: greviw a0, a0, 2 +; RV64ZBP-NEXT: ret %and = shl i32 %a, 2 %shl = and i32 %and, -858993460 %and1 = lshr i32 %a, 2 @@ -1420,15 +1420,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev2_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: rev2.n a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev2_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: rev2.n a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev2_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: rev2.n a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev2_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: rev2.n a0, a0 +; RV64ZBP-NEXT: ret %and = shl i64 %a, 2 %shl = and i64 %and, -3689348814741910324 %and1 = lshr i64 %a, 2 @@ -1461,15 +1461,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev3_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: greviw a0, a0, 3 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev3_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: greviw a0, a0, 3 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev3_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: greviw a0, a0, 3 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev3_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: greviw a0, a0, 3 +; RV64ZBP-NEXT: ret %and1 = shl i32 %a, 1 %shl1 = and i32 %and1, -1431655766 %and1b = lshr i32 %a, 1 @@ -1530,15 +1530,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev3_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: rev.n a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev3_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: rev.n a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev3_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: rev.n a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev3_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: rev.n a0, a0 +; RV64ZBP-NEXT: ret %and1 = shl i64 %a, 1 %shl1 = and i64 %and1, -6148914691236517206 %and1b = lshr i64 %a, 1 @@ -1567,15 +1567,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev4_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: greviw a0, a0, 4 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev4_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: greviw a0, a0, 4 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev4_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: greviw a0, a0, 4 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev4_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: greviw a0, a0, 4 +; RV64ZBP-NEXT: ret %and = shl i32 %a, 4 %shl = and i32 %and, -252645136 %and1 = lshr i32 %a, 4 @@ -1610,15 +1610,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev4_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: rev4.b a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev4_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: rev4.b a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev4_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: rev4.b a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev4_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: rev4.b a0, a0 +; RV64ZBP-NEXT: ret %and = shl i64 %a, 4 %shl = and i64 %and, -1085102592571150096 %and1 = lshr i64 %a, 4 @@ -1651,15 +1651,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev5_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: greviw a0, a0, 5 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev5_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: greviw a0, a0, 5 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev5_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: greviw a0, a0, 5 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev5_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: greviw a0, a0, 5 +; RV64ZBP-NEXT: ret %and1 = shl i32 %a, 1 %shl1 = and i32 %and1, -1431655766 %and1b = lshr i32 %a, 1 @@ -1720,15 +1720,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev5_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: grevi a0, a0, 5 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev5_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: grevi a0, a0, 5 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev5_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: grevi a0, a0, 5 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev5_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: grevi a0, a0, 5 +; RV64ZBP-NEXT: ret %and1 = shl i64 %a, 1 %shl1 = and i64 %and1, -6148914691236517206 %and1b = lshr i64 %a, 1 @@ -1767,15 +1767,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev6_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: greviw a0, a0, 6 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev6_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: greviw a0, a0, 6 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev6_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: greviw a0, a0, 6 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev6_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: greviw a0, a0, 6 +; RV64ZBP-NEXT: ret %and1 = shl i32 %a, 2 %shl1 = and i32 %and1, -858993460 %and1b = lshr i32 %a, 2 @@ -1836,15 +1836,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev6_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: rev2.b a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev6_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: rev2.b a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev6_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: rev2.b a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev6_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: rev2.b a0, a0 +; RV64ZBP-NEXT: ret %and1 = shl i64 %a, 2 %shl1 = and i64 %and1, -3689348814741910324 %and1b = lshr i64 %a, 2 @@ -1891,15 +1891,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev7_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: greviw a0, a0, 7 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev7_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: greviw a0, a0, 7 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev7_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: greviw a0, a0, 7 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev7_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: greviw a0, a0, 7 +; RV64ZBP-NEXT: ret %and1 = shl i32 %a, 1 %shl1 = and i32 %and1, -1431655766 %and1b = lshr i32 %a, 1 @@ -1986,15 +1986,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev7_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: rev.b a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev7_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: rev.b a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev7_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: rev.b a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev7_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: rev.b a0, a0 +; RV64ZBP-NEXT: ret %and1 = shl i64 %a, 1 %shl1 = and i64 %and1, -6148914691236517206 %and1b = lshr i64 %a, 1 @@ -2028,15 +2028,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev8_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: greviw a0, a0, 8 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev8_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: greviw a0, a0, 8 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev8_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: greviw a0, a0, 8 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev8_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: greviw a0, a0, 8 +; RV64ZBP-NEXT: ret %and = shl i32 %a, 8 %shl = and i32 %and, -16711936 %and1 = lshr i32 %a, 8 @@ -2067,15 +2067,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev8_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: rev8.h a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev8_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: rev8.h a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev8_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: rev8.h a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev8_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: rev8.h a0, a0 +; RV64ZBP-NEXT: ret %and = shl i64 %a, 8 %shl = and i64 %and, -71777214294589696 %and1 = lshr i64 %a, 8 @@ -2093,15 +2093,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev16_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: greviw a0, a0, 16 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev16_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: greviw a0, a0, 16 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev16_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: greviw a0, a0, 16 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev16_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: greviw a0, a0, 16 +; RV64ZBP-NEXT: ret %shl = shl i32 %a, 16 %shr = lshr i32 %a, 16 %or = or i32 %shl, %shr @@ -2120,15 +2120,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev16_i32_fshl: -; RV64IB: # %bb.0: -; RV64IB-NEXT: roriw a0, a0, 16 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev16_i32_fshl: +; RV64B: # %bb.0: +; RV64B-NEXT: roriw a0, a0, 16 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev16_i32_fshl: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: roriw a0, a0, 16 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev16_i32_fshl: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: roriw a0, a0, 16 +; RV64ZBP-NEXT: ret %or = tail call i32 @llvm.fshl.i32(i32 %a, i32 %a, i32 16) ret i32 %or } @@ -2142,15 +2142,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev16_i32_fshr: -; RV64IB: # %bb.0: -; RV64IB-NEXT: roriw a0, a0, 16 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev16_i32_fshr: +; RV64B: # %bb.0: +; RV64B-NEXT: roriw a0, a0, 16 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev16_i32_fshr: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: roriw a0, a0, 16 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev16_i32_fshr: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: roriw a0, a0, 16 +; RV64ZBP-NEXT: ret %or = tail call i32 @llvm.fshr.i32(i32 %a, i32 %a, i32 16) ret i32 %or } @@ -2172,15 +2172,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev16_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: rev16.w a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev16_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: rev16.w a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev16_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: rev16.w a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev16_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: rev16.w a0, a0 +; RV64ZBP-NEXT: ret %and = shl i64 %a, 16 %shl = and i64 %and, -281470681808896 %and1 = lshr i64 %a, 16 @@ -2197,15 +2197,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: rori a0, a0, 32 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev32: +; RV64B: # %bb.0: +; RV64B-NEXT: rori a0, a0, 32 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: rori a0, a0, 32 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: rori a0, a0, 32 +; RV64ZBP-NEXT: ret %shl = shl i64 %a, 32 %shr = lshr i64 %a, 32 %or = or i64 %shl, %shr @@ -2236,15 +2236,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev3b_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: greviw a0, a0, 3 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev3b_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: greviw a0, a0, 3 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev3b_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: greviw a0, a0, 3 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev3b_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: greviw a0, a0, 3 +; RV64ZBP-NEXT: ret %and2 = shl i32 %a, 2 %shl2 = and i32 %and2, -858993460 %and2b = lshr i32 %a, 2 @@ -2305,15 +2305,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev3b_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: rev.n a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev3b_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: rev.n a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev3b_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: rev.n a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev3b_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: rev.n a0, a0 +; RV64ZBP-NEXT: ret %and2 = shl i64 %a, 2 %shl2 = and i64 %and2, -3689348814741910324 %and2b = lshr i64 %a, 2 @@ -2357,15 +2357,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev2b_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: greviw a0, a0, 2 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev2b_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: greviw a0, a0, 2 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev2b_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: greviw a0, a0, 2 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev2b_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: greviw a0, a0, 2 +; RV64ZBP-NEXT: ret %and1 = shl i32 %a, 1 %shl1 = and i32 %and1, -1431655766 %and1b = lshr i32 %a, 1 @@ -2437,15 +2437,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev2b_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: rev2.n a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev2b_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: rev2.n a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev2b_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: rev2.n a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev2b_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: rev2.n a0, a0 +; RV64ZBP-NEXT: ret %and1 = shl i64 %a, 1 %shl1 = and i64 %and1, -6148914691236517206 %and1b = lshr i64 %a, 1 @@ -2499,13 +2499,13 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev0_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: ret +; RV64B-LABEL: grev0_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev0_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev0_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: ret %and1 = shl i32 %a, 1 %shl1 = and i32 %and1, -1431655766 %and1b = lshr i32 %a, 1 @@ -2587,13 +2587,13 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev0_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: ret +; RV64B-LABEL: grev0_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev0_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev0_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: ret %and1 = shl i64 %a, 1 %shl1 = and i64 %and1, -6148914691236517206 %and1b = lshr i64 %a, 1 @@ -2628,15 +2628,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev32_fshl: -; RV64IB: # %bb.0: -; RV64IB-NEXT: rori a0, a0, 32 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev32_fshl: +; RV64B: # %bb.0: +; RV64B-NEXT: rori a0, a0, 32 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev32_fshl: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: rori a0, a0, 32 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev32_fshl: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: rori a0, a0, 32 +; RV64ZBP-NEXT: ret %or = tail call i64 @llvm.fshl.i64(i64 %a, i64 %a, i64 32) ret i64 %or } @@ -2649,15 +2649,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: grev32_fshr: -; RV64IB: # %bb.0: -; RV64IB-NEXT: rori a0, a0, 32 -; RV64IB-NEXT: ret +; RV64B-LABEL: grev32_fshr: +; RV64B: # %bb.0: +; RV64B-NEXT: rori a0, a0, 32 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: grev32_fshr: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: rori a0, a0, 32 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: grev32_fshr: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: rori a0, a0, 32 +; RV64ZBP-NEXT: ret %or = tail call i64 @llvm.fshr.i64(i64 %a, i64 %a, i64 32) ret i64 %or } @@ -2675,15 +2675,15 @@ ; RV64I-NEXT: and a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: bswap_i16: -; RV64IB: # %bb.0: -; RV64IB-NEXT: greviw a0, a0, 8 -; RV64IB-NEXT: ret +; RV64B-LABEL: bswap_i16: +; RV64B: # %bb.0: +; RV64B-NEXT: greviw a0, a0, 8 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: bswap_i16: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: greviw a0, a0, 8 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: bswap_i16: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: greviw a0, a0, 8 +; RV64ZBP-NEXT: ret %1 = tail call i16 @llvm.bswap.i16(i16 %a) ret i16 %1 } @@ -2708,15 +2708,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: bswap_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: greviw a0, a0, 24 -; RV64IB-NEXT: ret +; RV64B-LABEL: bswap_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: greviw a0, a0, 24 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: bswap_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: greviw a0, a0, 24 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: bswap_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: greviw a0, a0, 24 +; RV64ZBP-NEXT: ret %1 = tail call i32 @llvm.bswap.i32(i32 %a) ret i32 %1 } @@ -2740,17 +2740,17 @@ ; RV64I-NEXT: sw a0, 0(a1) ; RV64I-NEXT: ret ; -; RV64IB-LABEL: bswap_i32_nosext: -; RV64IB: # %bb.0: -; RV64IB-NEXT: greviw a0, a0, 24 -; RV64IB-NEXT: sw a0, 0(a1) -; RV64IB-NEXT: ret +; RV64B-LABEL: bswap_i32_nosext: +; RV64B: # %bb.0: +; RV64B-NEXT: greviw a0, a0, 24 +; RV64B-NEXT: sw a0, 0(a1) +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: bswap_i32_nosext: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: greviw a0, a0, 24 -; RV64IBP-NEXT: sw a0, 0(a1) -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: bswap_i32_nosext: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: greviw a0, a0, 24 +; RV64ZBP-NEXT: sw a0, 0(a1) +; RV64ZBP-NEXT: ret %1 = tail call i32 @llvm.bswap.i32(i32 %a) store i32 %1, i32* %x ret void @@ -2792,15 +2792,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: bswap_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: rev8 a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: bswap_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: rev8 a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: bswap_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: rev8 a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: bswap_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: rev8 a0, a0 +; RV64ZBP-NEXT: ret %1 = call i64 @llvm.bswap.i64(i64 %a) ret i64 %1 } @@ -2826,15 +2826,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: bitreverse_i8: -; RV64IB: # %bb.0: -; RV64IB-NEXT: greviw a0, a0, 7 -; RV64IB-NEXT: ret +; RV64B-LABEL: bitreverse_i8: +; RV64B: # %bb.0: +; RV64B-NEXT: greviw a0, a0, 7 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: bitreverse_i8: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: greviw a0, a0, 7 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: bitreverse_i8: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: greviw a0, a0, 7 +; RV64ZBP-NEXT: ret %1 = tail call i8 @llvm.bitreverse.i8(i8 %a) ret i8 %1 } @@ -2876,15 +2876,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: bitreverse_i16: -; RV64IB: # %bb.0: -; RV64IB-NEXT: greviw a0, a0, 15 -; RV64IB-NEXT: ret +; RV64B-LABEL: bitreverse_i16: +; RV64B: # %bb.0: +; RV64B-NEXT: greviw a0, a0, 15 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: bitreverse_i16: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: greviw a0, a0, 15 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: bitreverse_i16: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: greviw a0, a0, 15 +; RV64ZBP-NEXT: ret %1 = tail call i16 @llvm.bitreverse.i16(i16 %a) ret i16 %1 } @@ -2938,15 +2938,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: bitreverse_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: greviw a0, a0, 31 -; RV64IB-NEXT: ret +; RV64B-LABEL: bitreverse_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: greviw a0, a0, 31 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: bitreverse_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: greviw a0, a0, 31 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: bitreverse_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: greviw a0, a0, 31 +; RV64ZBP-NEXT: ret %1 = tail call i32 @llvm.bitreverse.i32(i32 %a) ret i32 %1 } @@ -2999,17 +2999,17 @@ ; RV64I-NEXT: sw a0, 0(a1) ; RV64I-NEXT: ret ; -; RV64IB-LABEL: bitreverse_i32_nosext: -; RV64IB: # %bb.0: -; RV64IB-NEXT: greviw a0, a0, 31 -; RV64IB-NEXT: sw a0, 0(a1) -; RV64IB-NEXT: ret +; RV64B-LABEL: bitreverse_i32_nosext: +; RV64B: # %bb.0: +; RV64B-NEXT: greviw a0, a0, 31 +; RV64B-NEXT: sw a0, 0(a1) +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: bitreverse_i32_nosext: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: greviw a0, a0, 31 -; RV64IBP-NEXT: sw a0, 0(a1) -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: bitreverse_i32_nosext: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: greviw a0, a0, 31 +; RV64ZBP-NEXT: sw a0, 0(a1) +; RV64ZBP-NEXT: ret %1 = tail call i32 @llvm.bitreverse.i32(i32 %a) store i32 %1, i32* %x ret void @@ -3114,15 +3114,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: bitreverse_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: rev a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: bitreverse_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: rev a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: bitreverse_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: rev a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: bitreverse_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: rev a0, a0 +; RV64ZBP-NEXT: ret %1 = call i64 @llvm.bitreverse.i64(i64 %a) ret i64 %1 } @@ -3147,15 +3147,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: bswap_rotr_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: greviw a0, a0, 8 -; RV64IB-NEXT: ret +; RV64B-LABEL: bswap_rotr_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: greviw a0, a0, 8 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: bswap_rotr_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: greviw a0, a0, 8 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: bswap_rotr_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: greviw a0, a0, 8 +; RV64ZBP-NEXT: ret %1 = call i32 @llvm.bswap.i32(i32 %a) %2 = call i32 @llvm.fshr.i32(i32 %1, i32 %1, i32 16) ret i32 %2 @@ -3181,15 +3181,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: bswap_rotl_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: greviw a0, a0, 8 -; RV64IB-NEXT: ret +; RV64B-LABEL: bswap_rotl_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: greviw a0, a0, 8 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: bswap_rotl_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: greviw a0, a0, 8 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: bswap_rotl_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: greviw a0, a0, 8 +; RV64ZBP-NEXT: ret %1 = call i32 @llvm.bswap.i32(i32 %a) %2 = call i32 @llvm.fshl.i32(i32 %1, i32 %1, i32 16) ret i32 %2 @@ -3250,15 +3250,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: bitreverse_bswap_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: greviw a0, a0, 7 -; RV64IB-NEXT: ret +; RV64B-LABEL: bitreverse_bswap_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: greviw a0, a0, 7 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: bitreverse_bswap_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: greviw a0, a0, 7 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: bitreverse_bswap_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: greviw a0, a0, 7 +; RV64ZBP-NEXT: ret %1 = call i32 @llvm.bitreverse.i32(i32 %a) %2 = call i32 @llvm.bswap.i32(i32 %1) ret i32 %2 @@ -3382,15 +3382,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: bitreverse_bswap_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: rev.b a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: bitreverse_bswap_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: rev.b a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: bitreverse_bswap_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: rev.b a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: bitreverse_bswap_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: rev.b a0, a0 +; RV64ZBP-NEXT: ret %1 = call i64 @llvm.bitreverse.i64(i64 %a) %2 = call i64 @llvm.bswap.i64(i64 %1) ret i64 %2 @@ -3414,15 +3414,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: shfl1_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: zip.n a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: shfl1_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: zip.n a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: shfl1_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: zip.n a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: shfl1_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: zip.n a0, a0 +; RV64ZBP-NEXT: ret %and = and i32 %a, -1717986919 %shl = shl i32 %a, 1 %and1 = and i32 %shl, 1145324612 @@ -3463,15 +3463,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: shfl1_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: zip.n a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: shfl1_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: zip.n a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: shfl1_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: zip.n a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: shfl1_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: zip.n a0, a0 +; RV64ZBP-NEXT: ret %and = and i64 %a, -7378697629483820647 %shl = shl i64 %a, 1 %and1 = and i64 %shl, 4919131752989213764 @@ -3500,15 +3500,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: shfl2_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: zip2.b a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: shfl2_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: zip2.b a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: shfl2_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: zip2.b a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: shfl2_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: zip2.b a0, a0 +; RV64ZBP-NEXT: ret %and = and i32 %a, -1010580541 %shl = shl i32 %a, 2 %and1 = and i32 %shl, 808464432 @@ -3550,15 +3550,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: shfl2_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: zip2.b a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: shfl2_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: zip2.b a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: shfl2_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: zip2.b a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: shfl2_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: zip2.b a0, a0 +; RV64ZBP-NEXT: ret %and = and i64 %a, -4340410370284600381 %shl = shl i64 %a, 2 %and1 = and i64 %shl, 3472328296227680304 @@ -3587,15 +3587,15 @@ ; RV64I-NEXT: or a0, a0, a2 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: shfl4_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: zip4.h a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: shfl4_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: zip4.h a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: shfl4_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: zip4.h a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: shfl4_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: zip4.h a0, a0 +; RV64ZBP-NEXT: ret %and = and i32 %a, -267390961 %shl = shl i32 %a, 4 %and1 = and i32 %shl, 251662080 @@ -3638,15 +3638,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: shfl4_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: zip4.h a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: shfl4_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: zip4.h a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: shfl4_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: zip4.h a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: shfl4_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: zip4.h a0, a0 +; RV64ZBP-NEXT: ret %and = and i64 %a, -1148435428713435121 %shl = shl i64 %a, 4 %and1 = and i64 %shl, 1080880403494997760 @@ -3674,15 +3674,15 @@ ; RV64I-NEXT: or a0, a0, a2 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: shfl8_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: zip8.w a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: shfl8_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: zip8.w a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: shfl8_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: zip8.w a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: shfl8_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: zip8.w a0, a0 +; RV64ZBP-NEXT: ret %and = and i32 %a, -16776961 %shl = shl i32 %a, 8 %and1 = and i32 %shl, 16711680 @@ -3718,15 +3718,15 @@ ; RV64I-NEXT: or a0, a2, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: shfl8_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: zip8.w a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: shfl8_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: zip8.w a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: shfl8_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: zip8.w a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: shfl8_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: zip8.w a0, a0 +; RV64ZBP-NEXT: ret %and = and i64 %a, -72056494543077121 %shl = shl i64 %a, 8 %and1 = and i64 %shl, 71776119077928960 @@ -3757,15 +3757,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: shfl16: -; RV64IB: # %bb.0: -; RV64IB-NEXT: zip16 a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: shfl16: +; RV64B: # %bb.0: +; RV64B-NEXT: zip16 a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: shfl16: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: zip16 a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: shfl16: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: zip16 a0, a0 +; RV64ZBP-NEXT: ret %and = and i64 %a, -281474976645121 %shl = shl i64 %a, 16 %and1 = and i64 %shl, 281470681743360 @@ -3787,15 +3787,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: pack_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: packw a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: pack_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: packw a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: pack_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: packw a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: pack_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: packw a0, a0, a1 +; RV64ZBP-NEXT: ret %shl = and i32 %a, 65535 %shl1 = shl i32 %b, 16 %or = or i32 %shl1, %shl @@ -3811,15 +3811,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: pack_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: pack a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: pack_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: pack a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: pack_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: pack a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: pack_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: pack a0, a0, a1 +; RV64ZBP-NEXT: ret %shl = and i64 %a, 4294967295 %shl1 = shl i64 %b, 32 %or = or i64 %shl1, %shl @@ -3835,15 +3835,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: packu_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: packuw a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: packu_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: packuw a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: packu_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: packuw a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: packu_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: packuw a0, a0, a1 +; RV64ZBP-NEXT: ret %shr = lshr i32 %a, 16 %shr1 = and i32 %b, -65536 %or = or i32 %shr1, %shr @@ -3860,15 +3860,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: packu_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: packu a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: packu_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: packu a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: packu_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: packu a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: packu_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: packu a0, a0, a1 +; RV64ZBP-NEXT: ret %shr = lshr i64 %a, 32 %shr1 = and i64 %b, -4294967296 %or = or i64 %shr1, %shr @@ -3884,15 +3884,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: packh_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: packh a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: packh_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: packh a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: packh_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: packh a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: packh_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: packh a0, a0, a1 +; RV64ZBP-NEXT: ret %and = and i32 %a, 255 %and1 = shl i32 %b, 8 %shl = and i32 %and1, 65280 @@ -3909,15 +3909,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: packh_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: packh a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: packh_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: packh a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: packh_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: packh a0, a0, a1 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: packh_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: packh a0, a0, a1 +; RV64ZBP-NEXT: ret %and = and i64 %a, 255 %and1 = shl i64 %b, 8 %shl = and i64 %and1, 65280 @@ -3933,15 +3933,15 @@ ; RV64I-NEXT: and a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: zexth_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: zext.h a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: zexth_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: zext.h a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: zexth_i32: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: zext.h a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: zexth_i32: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: zext.h a0, a0 +; RV64ZBP-NEXT: ret %and = and i32 %a, 65535 ret i32 %and } @@ -3954,15 +3954,15 @@ ; RV64I-NEXT: and a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: zexth_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: zext.h a0, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: zexth_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: zext.h a0, a0 +; RV64B-NEXT: ret ; -; RV64IBP-LABEL: zexth_i64: -; RV64IBP: # %bb.0: -; RV64IBP-NEXT: zext.h a0, a0 -; RV64IBP-NEXT: ret +; RV64ZBP-LABEL: zexth_i64: +; RV64ZBP: # %bb.0: +; RV64ZBP-NEXT: zext.h a0, a0 +; RV64ZBP-NEXT: ret %and = and i64 %a, 65535 ret i64 %and } diff --git a/llvm/test/CodeGen/RISCV/rv64zbs.ll b/llvm/test/CodeGen/RISCV/rv64zbs.ll --- a/llvm/test/CodeGen/RISCV/rv64zbs.ll +++ b/llvm/test/CodeGen/RISCV/rv64zbs.ll @@ -2,9 +2,9 @@ ; RUN: llc -mtriple=riscv64 -verify-machineinstrs < %s \ ; RUN: | FileCheck %s -check-prefix=RV64I ; RUN: llc -mtriple=riscv64 -mattr=+experimental-b -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV64IB +; RUN: | FileCheck %s -check-prefix=RV64B ; RUN: llc -mtriple=riscv64 -mattr=+experimental-zbs -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV64IBS +; RUN: | FileCheck %s -check-prefix=RV64ZBS define signext i32 @sbclr_i32(i32 signext %a, i32 signext %b) nounwind { ; RV64I-LABEL: sbclr_i32: @@ -15,20 +15,20 @@ ; RV64I-NEXT: and a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbclr_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: addi a2, zero, 1 -; RV64IB-NEXT: sllw a1, a2, a1 -; RV64IB-NEXT: andn a0, a0, a1 -; RV64IB-NEXT: ret -; -; RV64IBS-LABEL: sbclr_i32: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: addi a2, zero, 1 -; RV64IBS-NEXT: sllw a1, a2, a1 -; RV64IBS-NEXT: not a1, a1 -; RV64IBS-NEXT: and a0, a1, a0 -; RV64IBS-NEXT: ret +; RV64B-LABEL: sbclr_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: addi a2, zero, 1 +; RV64B-NEXT: sllw a1, a2, a1 +; RV64B-NEXT: andn a0, a0, a1 +; RV64B-NEXT: ret +; +; RV64ZBS-LABEL: sbclr_i32: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: addi a2, zero, 1 +; RV64ZBS-NEXT: sllw a1, a2, a1 +; RV64ZBS-NEXT: not a1, a1 +; RV64ZBS-NEXT: and a0, a1, a0 +; RV64ZBS-NEXT: ret %and = and i32 %b, 31 %shl = shl nuw i32 1, %and %neg = xor i32 %shl, -1 @@ -45,20 +45,20 @@ ; RV64I-NEXT: and a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbclr_i32_no_mask: -; RV64IB: # %bb.0: -; RV64IB-NEXT: addi a2, zero, 1 -; RV64IB-NEXT: sllw a1, a2, a1 -; RV64IB-NEXT: andn a0, a0, a1 -; RV64IB-NEXT: ret -; -; RV64IBS-LABEL: sbclr_i32_no_mask: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: addi a2, zero, 1 -; RV64IBS-NEXT: sllw a1, a2, a1 -; RV64IBS-NEXT: not a1, a1 -; RV64IBS-NEXT: and a0, a1, a0 -; RV64IBS-NEXT: ret +; RV64B-LABEL: sbclr_i32_no_mask: +; RV64B: # %bb.0: +; RV64B-NEXT: addi a2, zero, 1 +; RV64B-NEXT: sllw a1, a2, a1 +; RV64B-NEXT: andn a0, a0, a1 +; RV64B-NEXT: ret +; +; RV64ZBS-LABEL: sbclr_i32_no_mask: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: addi a2, zero, 1 +; RV64ZBS-NEXT: sllw a1, a2, a1 +; RV64ZBS-NEXT: not a1, a1 +; RV64ZBS-NEXT: and a0, a1, a0 +; RV64ZBS-NEXT: ret %shl = shl i32 1, %b %neg = xor i32 %shl, -1 %and1 = and i32 %neg, %a @@ -76,24 +76,24 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbclr_i32_load: -; RV64IB: # %bb.0: -; RV64IB-NEXT: lw a0, 0(a0) -; RV64IB-NEXT: addi a2, zero, 1 -; RV64IB-NEXT: sllw a1, a2, a1 -; RV64IB-NEXT: andn a0, a0, a1 -; RV64IB-NEXT: sext.w a0, a0 -; RV64IB-NEXT: ret -; -; RV64IBS-LABEL: sbclr_i32_load: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: lw a0, 0(a0) -; RV64IBS-NEXT: addi a2, zero, 1 -; RV64IBS-NEXT: sllw a1, a2, a1 -; RV64IBS-NEXT: not a1, a1 -; RV64IBS-NEXT: and a0, a1, a0 -; RV64IBS-NEXT: sext.w a0, a0 -; RV64IBS-NEXT: ret +; RV64B-LABEL: sbclr_i32_load: +; RV64B: # %bb.0: +; RV64B-NEXT: lw a0, 0(a0) +; RV64B-NEXT: addi a2, zero, 1 +; RV64B-NEXT: sllw a1, a2, a1 +; RV64B-NEXT: andn a0, a0, a1 +; RV64B-NEXT: sext.w a0, a0 +; RV64B-NEXT: ret +; +; RV64ZBS-LABEL: sbclr_i32_load: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: lw a0, 0(a0) +; RV64ZBS-NEXT: addi a2, zero, 1 +; RV64ZBS-NEXT: sllw a1, a2, a1 +; RV64ZBS-NEXT: not a1, a1 +; RV64ZBS-NEXT: and a0, a1, a0 +; RV64ZBS-NEXT: sext.w a0, a0 +; RV64ZBS-NEXT: ret %a = load i32, i32* %p %shl = shl i32 1, %b %neg = xor i32 %shl, -1 @@ -110,15 +110,15 @@ ; RV64I-NEXT: and a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbclr_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bclr a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbclr_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: bclr a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbclr_i64: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: bclr a0, a0, a1 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbclr_i64: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: bclr a0, a0, a1 +; RV64ZBS-NEXT: ret %and = and i64 %b, 63 %shl = shl nuw i64 1, %and %neg = xor i64 %shl, -1 @@ -135,15 +135,15 @@ ; RV64I-NEXT: and a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbclr_i64_no_mask: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bclr a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbclr_i64_no_mask: +; RV64B: # %bb.0: +; RV64B-NEXT: bclr a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbclr_i64_no_mask: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: bclr a0, a0, a1 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbclr_i64_no_mask: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: bclr a0, a0, a1 +; RV64ZBS-NEXT: ret %shl = shl i64 1, %b %neg = xor i64 %shl, -1 %and1 = and i64 %neg, %a @@ -158,19 +158,19 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbset_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: addi a2, zero, 1 -; RV64IB-NEXT: sllw a1, a2, a1 -; RV64IB-NEXT: or a0, a1, a0 -; RV64IB-NEXT: ret -; -; RV64IBS-LABEL: sbset_i32: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: addi a2, zero, 1 -; RV64IBS-NEXT: sllw a1, a2, a1 -; RV64IBS-NEXT: or a0, a1, a0 -; RV64IBS-NEXT: ret +; RV64B-LABEL: sbset_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: addi a2, zero, 1 +; RV64B-NEXT: sllw a1, a2, a1 +; RV64B-NEXT: or a0, a1, a0 +; RV64B-NEXT: ret +; +; RV64ZBS-LABEL: sbset_i32: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: addi a2, zero, 1 +; RV64ZBS-NEXT: sllw a1, a2, a1 +; RV64ZBS-NEXT: or a0, a1, a0 +; RV64ZBS-NEXT: ret %and = and i32 %b, 31 %shl = shl nuw i32 1, %and %or = or i32 %shl, %a @@ -185,19 +185,19 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbset_i32_no_mask: -; RV64IB: # %bb.0: -; RV64IB-NEXT: addi a2, zero, 1 -; RV64IB-NEXT: sllw a1, a2, a1 -; RV64IB-NEXT: or a0, a1, a0 -; RV64IB-NEXT: ret -; -; RV64IBS-LABEL: sbset_i32_no_mask: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: addi a2, zero, 1 -; RV64IBS-NEXT: sllw a1, a2, a1 -; RV64IBS-NEXT: or a0, a1, a0 -; RV64IBS-NEXT: ret +; RV64B-LABEL: sbset_i32_no_mask: +; RV64B: # %bb.0: +; RV64B-NEXT: addi a2, zero, 1 +; RV64B-NEXT: sllw a1, a2, a1 +; RV64B-NEXT: or a0, a1, a0 +; RV64B-NEXT: ret +; +; RV64ZBS-LABEL: sbset_i32_no_mask: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: addi a2, zero, 1 +; RV64ZBS-NEXT: sllw a1, a2, a1 +; RV64ZBS-NEXT: or a0, a1, a0 +; RV64ZBS-NEXT: ret %shl = shl i32 1, %b %or = or i32 %shl, %a ret i32 %or @@ -213,23 +213,23 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbset_i32_load: -; RV64IB: # %bb.0: -; RV64IB-NEXT: lw a0, 0(a0) -; RV64IB-NEXT: addi a2, zero, 1 -; RV64IB-NEXT: sllw a1, a2, a1 -; RV64IB-NEXT: or a0, a1, a0 -; RV64IB-NEXT: sext.w a0, a0 -; RV64IB-NEXT: ret -; -; RV64IBS-LABEL: sbset_i32_load: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: lw a0, 0(a0) -; RV64IBS-NEXT: addi a2, zero, 1 -; RV64IBS-NEXT: sllw a1, a2, a1 -; RV64IBS-NEXT: or a0, a1, a0 -; RV64IBS-NEXT: sext.w a0, a0 -; RV64IBS-NEXT: ret +; RV64B-LABEL: sbset_i32_load: +; RV64B: # %bb.0: +; RV64B-NEXT: lw a0, 0(a0) +; RV64B-NEXT: addi a2, zero, 1 +; RV64B-NEXT: sllw a1, a2, a1 +; RV64B-NEXT: or a0, a1, a0 +; RV64B-NEXT: sext.w a0, a0 +; RV64B-NEXT: ret +; +; RV64ZBS-LABEL: sbset_i32_load: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: lw a0, 0(a0) +; RV64ZBS-NEXT: addi a2, zero, 1 +; RV64ZBS-NEXT: sllw a1, a2, a1 +; RV64ZBS-NEXT: or a0, a1, a0 +; RV64ZBS-NEXT: sext.w a0, a0 +; RV64ZBS-NEXT: ret %a = load i32, i32* %p %shl = shl i32 1, %b %or = or i32 %shl, %a @@ -244,17 +244,17 @@ ; RV64I-NEXT: sllw a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbset_i32_zero: -; RV64IB: # %bb.0: -; RV64IB-NEXT: addi a1, zero, 1 -; RV64IB-NEXT: sllw a0, a1, a0 -; RV64IB-NEXT: ret -; -; RV64IBS-LABEL: sbset_i32_zero: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: addi a1, zero, 1 -; RV64IBS-NEXT: sllw a0, a1, a0 -; RV64IBS-NEXT: ret +; RV64B-LABEL: sbset_i32_zero: +; RV64B: # %bb.0: +; RV64B-NEXT: addi a1, zero, 1 +; RV64B-NEXT: sllw a0, a1, a0 +; RV64B-NEXT: ret +; +; RV64ZBS-LABEL: sbset_i32_zero: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: addi a1, zero, 1 +; RV64ZBS-NEXT: sllw a0, a1, a0 +; RV64ZBS-NEXT: ret %shl = shl i32 1, %a ret i32 %shl } @@ -267,15 +267,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbset_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bset a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbset_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: bset a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbset_i64: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: bset a0, a0, a1 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbset_i64: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: bset a0, a0, a1 +; RV64ZBS-NEXT: ret %conv = and i64 %b, 63 %shl = shl nuw i64 1, %conv %or = or i64 %shl, %a @@ -290,15 +290,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbset_i64_no_mask: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bset a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbset_i64_no_mask: +; RV64B: # %bb.0: +; RV64B-NEXT: bset a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbset_i64_no_mask: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: bset a0, a0, a1 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbset_i64_no_mask: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: bset a0, a0, a1 +; RV64ZBS-NEXT: ret %shl = shl i64 1, %b %or = or i64 %shl, %a ret i64 %or @@ -312,15 +312,15 @@ ; RV64I-NEXT: sll a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbset_i64_zero: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bset a0, zero, a0 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbset_i64_zero: +; RV64B: # %bb.0: +; RV64B-NEXT: bset a0, zero, a0 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbset_i64_zero: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: bset a0, zero, a0 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbset_i64_zero: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: bset a0, zero, a0 +; RV64ZBS-NEXT: ret %shl = shl i64 1, %a ret i64 %shl } @@ -333,19 +333,19 @@ ; RV64I-NEXT: xor a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbinv_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: addi a2, zero, 1 -; RV64IB-NEXT: sllw a1, a2, a1 -; RV64IB-NEXT: xor a0, a1, a0 -; RV64IB-NEXT: ret -; -; RV64IBS-LABEL: sbinv_i32: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: addi a2, zero, 1 -; RV64IBS-NEXT: sllw a1, a2, a1 -; RV64IBS-NEXT: xor a0, a1, a0 -; RV64IBS-NEXT: ret +; RV64B-LABEL: sbinv_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: addi a2, zero, 1 +; RV64B-NEXT: sllw a1, a2, a1 +; RV64B-NEXT: xor a0, a1, a0 +; RV64B-NEXT: ret +; +; RV64ZBS-LABEL: sbinv_i32: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: addi a2, zero, 1 +; RV64ZBS-NEXT: sllw a1, a2, a1 +; RV64ZBS-NEXT: xor a0, a1, a0 +; RV64ZBS-NEXT: ret %and = and i32 %b, 31 %shl = shl nuw i32 1, %and %xor = xor i32 %shl, %a @@ -360,19 +360,19 @@ ; RV64I-NEXT: xor a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbinv_i32_no_mask: -; RV64IB: # %bb.0: -; RV64IB-NEXT: addi a2, zero, 1 -; RV64IB-NEXT: sllw a1, a2, a1 -; RV64IB-NEXT: xor a0, a1, a0 -; RV64IB-NEXT: ret -; -; RV64IBS-LABEL: sbinv_i32_no_mask: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: addi a2, zero, 1 -; RV64IBS-NEXT: sllw a1, a2, a1 -; RV64IBS-NEXT: xor a0, a1, a0 -; RV64IBS-NEXT: ret +; RV64B-LABEL: sbinv_i32_no_mask: +; RV64B: # %bb.0: +; RV64B-NEXT: addi a2, zero, 1 +; RV64B-NEXT: sllw a1, a2, a1 +; RV64B-NEXT: xor a0, a1, a0 +; RV64B-NEXT: ret +; +; RV64ZBS-LABEL: sbinv_i32_no_mask: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: addi a2, zero, 1 +; RV64ZBS-NEXT: sllw a1, a2, a1 +; RV64ZBS-NEXT: xor a0, a1, a0 +; RV64ZBS-NEXT: ret %shl = shl i32 1, %b %xor = xor i32 %shl, %a ret i32 %xor @@ -388,23 +388,23 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbinv_i32_load: -; RV64IB: # %bb.0: -; RV64IB-NEXT: lw a0, 0(a0) -; RV64IB-NEXT: addi a2, zero, 1 -; RV64IB-NEXT: sllw a1, a2, a1 -; RV64IB-NEXT: xor a0, a1, a0 -; RV64IB-NEXT: sext.w a0, a0 -; RV64IB-NEXT: ret -; -; RV64IBS-LABEL: sbinv_i32_load: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: lw a0, 0(a0) -; RV64IBS-NEXT: addi a2, zero, 1 -; RV64IBS-NEXT: sllw a1, a2, a1 -; RV64IBS-NEXT: xor a0, a1, a0 -; RV64IBS-NEXT: sext.w a0, a0 -; RV64IBS-NEXT: ret +; RV64B-LABEL: sbinv_i32_load: +; RV64B: # %bb.0: +; RV64B-NEXT: lw a0, 0(a0) +; RV64B-NEXT: addi a2, zero, 1 +; RV64B-NEXT: sllw a1, a2, a1 +; RV64B-NEXT: xor a0, a1, a0 +; RV64B-NEXT: sext.w a0, a0 +; RV64B-NEXT: ret +; +; RV64ZBS-LABEL: sbinv_i32_load: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: lw a0, 0(a0) +; RV64ZBS-NEXT: addi a2, zero, 1 +; RV64ZBS-NEXT: sllw a1, a2, a1 +; RV64ZBS-NEXT: xor a0, a1, a0 +; RV64ZBS-NEXT: sext.w a0, a0 +; RV64ZBS-NEXT: ret %a = load i32, i32* %p %shl = shl i32 1, %b %xor = xor i32 %shl, %a @@ -419,15 +419,15 @@ ; RV64I-NEXT: xor a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbinv_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: binv a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbinv_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: binv a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbinv_i64: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: binv a0, a0, a1 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbinv_i64: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: binv a0, a0, a1 +; RV64ZBS-NEXT: ret %conv = and i64 %b, 63 %shl = shl nuw i64 1, %conv %xor = xor i64 %shl, %a @@ -442,15 +442,15 @@ ; RV64I-NEXT: xor a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbinv_i64_no_mask: -; RV64IB: # %bb.0: -; RV64IB-NEXT: binv a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbinv_i64_no_mask: +; RV64B: # %bb.0: +; RV64B-NEXT: binv a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbinv_i64_no_mask: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: binv a0, a0, a1 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbinv_i64_no_mask: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: binv a0, a0, a1 +; RV64ZBS-NEXT: ret %shl = shl nuw i64 1, %b %xor = xor i64 %shl, %a ret i64 %xor @@ -463,17 +463,17 @@ ; RV64I-NEXT: andi a0, a0, 1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbext_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: srlw a0, a0, a1 -; RV64IB-NEXT: andi a0, a0, 1 -; RV64IB-NEXT: ret -; -; RV64IBS-LABEL: sbext_i32: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: srlw a0, a0, a1 -; RV64IBS-NEXT: andi a0, a0, 1 -; RV64IBS-NEXT: ret +; RV64B-LABEL: sbext_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: srlw a0, a0, a1 +; RV64B-NEXT: andi a0, a0, 1 +; RV64B-NEXT: ret +; +; RV64ZBS-LABEL: sbext_i32: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: srlw a0, a0, a1 +; RV64ZBS-NEXT: andi a0, a0, 1 +; RV64ZBS-NEXT: ret %and = and i32 %b, 31 %shr = lshr i32 %a, %and %and1 = and i32 %shr, 1 @@ -487,17 +487,17 @@ ; RV64I-NEXT: andi a0, a0, 1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbext_i32_no_mask: -; RV64IB: # %bb.0: -; RV64IB-NEXT: srlw a0, a0, a1 -; RV64IB-NEXT: andi a0, a0, 1 -; RV64IB-NEXT: ret -; -; RV64IBS-LABEL: sbext_i32_no_mask: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: srlw a0, a0, a1 -; RV64IBS-NEXT: andi a0, a0, 1 -; RV64IBS-NEXT: ret +; RV64B-LABEL: sbext_i32_no_mask: +; RV64B: # %bb.0: +; RV64B-NEXT: srlw a0, a0, a1 +; RV64B-NEXT: andi a0, a0, 1 +; RV64B-NEXT: ret +; +; RV64ZBS-LABEL: sbext_i32_no_mask: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: srlw a0, a0, a1 +; RV64ZBS-NEXT: andi a0, a0, 1 +; RV64ZBS-NEXT: ret %shr = lshr i32 %a, %b %and1 = and i32 %shr, 1 ret i32 %and1 @@ -510,15 +510,15 @@ ; RV64I-NEXT: andi a0, a0, 1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbext_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bext a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbext_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: bext a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbext_i64: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: bext a0, a0, a1 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbext_i64: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: bext a0, a0, a1 +; RV64ZBS-NEXT: ret %conv = and i64 %b, 63 %shr = lshr i64 %a, %conv %and1 = and i64 %shr, 1 @@ -532,15 +532,15 @@ ; RV64I-NEXT: andi a0, a0, 1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbext_i64_no_mask: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bext a0, a0, a1 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbext_i64_no_mask: +; RV64B: # %bb.0: +; RV64B-NEXT: bext a0, a0, a1 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbext_i64_no_mask: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: bext a0, a0, a1 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbext_i64_no_mask: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: bext a0, a0, a1 +; RV64ZBS-NEXT: ret %shr = lshr i64 %a, %b %and1 = and i64 %shr, 1 ret i64 %and1 @@ -553,15 +553,15 @@ ; RV64I-NEXT: andi a0, a0, 1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbexti_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bexti a0, a0, 5 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbexti_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: bexti a0, a0, 5 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbexti_i32: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: bexti a0, a0, 5 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbexti_i32: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: bexti a0, a0, 5 +; RV64ZBS-NEXT: ret %shr = lshr i32 %a, 5 %and = and i32 %shr, 1 ret i32 %and @@ -574,15 +574,15 @@ ; RV64I-NEXT: andi a0, a0, 1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbexti_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bexti a0, a0, 5 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbexti_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: bexti a0, a0, 5 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbexti_i64: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: bexti a0, a0, 5 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbexti_i64: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: bexti a0, a0, 5 +; RV64ZBS-NEXT: ret %shr = lshr i64 %a, 5 %and = and i64 %shr, 1 ret i64 %and @@ -594,15 +594,15 @@ ; RV64I-NEXT: andi a0, a0, -1025 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbclri_i32_10: -; RV64IB: # %bb.0: -; RV64IB-NEXT: andi a0, a0, -1025 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbclri_i32_10: +; RV64B: # %bb.0: +; RV64B-NEXT: andi a0, a0, -1025 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbclri_i32_10: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: andi a0, a0, -1025 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbclri_i32_10: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: andi a0, a0, -1025 +; RV64ZBS-NEXT: ret %and = and i32 %a, -1025 ret i32 %and } @@ -615,15 +615,15 @@ ; RV64I-NEXT: and a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbclri_i32_11: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bclri a0, a0, 11 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbclri_i32_11: +; RV64B: # %bb.0: +; RV64B-NEXT: bclri a0, a0, 11 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbclri_i32_11: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: bclri a0, a0, 11 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbclri_i32_11: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: bclri a0, a0, 11 +; RV64ZBS-NEXT: ret %and = and i32 %a, -2049 ret i32 %and } @@ -636,15 +636,15 @@ ; RV64I-NEXT: and a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbclri_i32_30: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bclri a0, a0, 30 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbclri_i32_30: +; RV64B: # %bb.0: +; RV64B-NEXT: bclri a0, a0, 30 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbclri_i32_30: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: bclri a0, a0, 30 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbclri_i32_30: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: bclri a0, a0, 30 +; RV64ZBS-NEXT: ret %and = and i32 %a, -1073741825 ret i32 %and } @@ -657,19 +657,19 @@ ; RV64I-NEXT: and a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbclri_i32_31: -; RV64IB: # %bb.0: -; RV64IB-NEXT: lui a1, 524288 -; RV64IB-NEXT: addiw a1, a1, -1 -; RV64IB-NEXT: and a0, a0, a1 -; RV64IB-NEXT: ret -; -; RV64IBS-LABEL: sbclri_i32_31: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: lui a1, 524288 -; RV64IBS-NEXT: addiw a1, a1, -1 -; RV64IBS-NEXT: and a0, a0, a1 -; RV64IBS-NEXT: ret +; RV64B-LABEL: sbclri_i32_31: +; RV64B: # %bb.0: +; RV64B-NEXT: lui a1, 524288 +; RV64B-NEXT: addiw a1, a1, -1 +; RV64B-NEXT: and a0, a0, a1 +; RV64B-NEXT: ret +; +; RV64ZBS-LABEL: sbclri_i32_31: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: lui a1, 524288 +; RV64ZBS-NEXT: addiw a1, a1, -1 +; RV64ZBS-NEXT: and a0, a0, a1 +; RV64ZBS-NEXT: ret %and = and i32 %a, -2147483649 ret i32 %and } @@ -680,15 +680,15 @@ ; RV64I-NEXT: andi a0, a0, -1025 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbclri_i64_10: -; RV64IB: # %bb.0: -; RV64IB-NEXT: andi a0, a0, -1025 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbclri_i64_10: +; RV64B: # %bb.0: +; RV64B-NEXT: andi a0, a0, -1025 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbclri_i64_10: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: andi a0, a0, -1025 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbclri_i64_10: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: andi a0, a0, -1025 +; RV64ZBS-NEXT: ret %and = and i64 %a, -1025 ret i64 %and } @@ -701,15 +701,15 @@ ; RV64I-NEXT: and a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbclri_i64_11: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bclri a0, a0, 11 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbclri_i64_11: +; RV64B: # %bb.0: +; RV64B-NEXT: bclri a0, a0, 11 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbclri_i64_11: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: bclri a0, a0, 11 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbclri_i64_11: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: bclri a0, a0, 11 +; RV64ZBS-NEXT: ret %and = and i64 %a, -2049 ret i64 %and } @@ -722,15 +722,15 @@ ; RV64I-NEXT: and a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbclri_i64_30: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bclri a0, a0, 30 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbclri_i64_30: +; RV64B: # %bb.0: +; RV64B-NEXT: bclri a0, a0, 30 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbclri_i64_30: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: bclri a0, a0, 30 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbclri_i64_30: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: bclri a0, a0, 30 +; RV64ZBS-NEXT: ret %and = and i64 %a, -1073741825 ret i64 %and } @@ -744,15 +744,15 @@ ; RV64I-NEXT: and a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbclri_i64_31: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bclri a0, a0, 31 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbclri_i64_31: +; RV64B: # %bb.0: +; RV64B-NEXT: bclri a0, a0, 31 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbclri_i64_31: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: bclri a0, a0, 31 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbclri_i64_31: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: bclri a0, a0, 31 +; RV64ZBS-NEXT: ret %and = and i64 %a, -2147483649 ret i64 %and } @@ -766,15 +766,15 @@ ; RV64I-NEXT: and a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbclri_i64_62: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bclri a0, a0, 62 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbclri_i64_62: +; RV64B: # %bb.0: +; RV64B-NEXT: bclri a0, a0, 62 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbclri_i64_62: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: bclri a0, a0, 62 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbclri_i64_62: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: bclri a0, a0, 62 +; RV64ZBS-NEXT: ret %and = and i64 %a, -4611686018427387905 ret i64 %and } @@ -787,15 +787,15 @@ ; RV64I-NEXT: and a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbclri_i64_63: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bclri a0, a0, 63 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbclri_i64_63: +; RV64B: # %bb.0: +; RV64B-NEXT: bclri a0, a0, 63 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbclri_i64_63: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: bclri a0, a0, 63 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbclri_i64_63: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: bclri a0, a0, 63 +; RV64ZBS-NEXT: ret %and = and i64 %a, -9223372036854775809 ret i64 %and } @@ -808,17 +808,17 @@ ; RV64I-NEXT: and a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbclri_i64_large0: -; RV64IB: # %bb.0: -; RV64IB-NEXT: andi a0, a0, -256 -; RV64IB-NEXT: bclri a0, a0, 24 -; RV64IB-NEXT: ret -; -; RV64IBS-LABEL: sbclri_i64_large0: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: andi a0, a0, -256 -; RV64IBS-NEXT: bclri a0, a0, 24 -; RV64IBS-NEXT: ret +; RV64B-LABEL: sbclri_i64_large0: +; RV64B: # %bb.0: +; RV64B-NEXT: andi a0, a0, -256 +; RV64B-NEXT: bclri a0, a0, 24 +; RV64B-NEXT: ret +; +; RV64ZBS-LABEL: sbclri_i64_large0: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: andi a0, a0, -256 +; RV64ZBS-NEXT: bclri a0, a0, 24 +; RV64ZBS-NEXT: ret %and = and i64 %a, -16777472 ret i64 %and } @@ -831,17 +831,17 @@ ; RV64I-NEXT: and a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbclri_i64_large1: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bclri a0, a0, 16 -; RV64IB-NEXT: bclri a0, a0, 24 -; RV64IB-NEXT: ret -; -; RV64IBS-LABEL: sbclri_i64_large1: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: bclri a0, a0, 16 -; RV64IBS-NEXT: bclri a0, a0, 24 -; RV64IBS-NEXT: ret +; RV64B-LABEL: sbclri_i64_large1: +; RV64B: # %bb.0: +; RV64B-NEXT: bclri a0, a0, 16 +; RV64B-NEXT: bclri a0, a0, 24 +; RV64B-NEXT: ret +; +; RV64ZBS-LABEL: sbclri_i64_large1: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: bclri a0, a0, 16 +; RV64ZBS-NEXT: bclri a0, a0, 24 +; RV64ZBS-NEXT: ret %and = and i64 %a, -16842753 ret i64 %and } @@ -852,15 +852,15 @@ ; RV64I-NEXT: ori a0, a0, 1024 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbseti_i32_10: -; RV64IB: # %bb.0: -; RV64IB-NEXT: ori a0, a0, 1024 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbseti_i32_10: +; RV64B: # %bb.0: +; RV64B-NEXT: ori a0, a0, 1024 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbseti_i32_10: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: ori a0, a0, 1024 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbseti_i32_10: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: ori a0, a0, 1024 +; RV64ZBS-NEXT: ret %or = or i32 %a, 1024 ret i32 %or } @@ -873,15 +873,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbseti_i32_11: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bseti a0, a0, 11 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbseti_i32_11: +; RV64B: # %bb.0: +; RV64B-NEXT: bseti a0, a0, 11 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbseti_i32_11: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: bseti a0, a0, 11 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbseti_i32_11: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: bseti a0, a0, 11 +; RV64ZBS-NEXT: ret %or = or i32 %a, 2048 ret i32 %or } @@ -893,15 +893,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbseti_i32_30: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bseti a0, a0, 30 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbseti_i32_30: +; RV64B: # %bb.0: +; RV64B-NEXT: bseti a0, a0, 30 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbseti_i32_30: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: bseti a0, a0, 30 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbseti_i32_30: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: bseti a0, a0, 30 +; RV64ZBS-NEXT: ret %or = or i32 %a, 1073741824 ret i32 %or } @@ -913,17 +913,17 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbseti_i32_31: -; RV64IB: # %bb.0: -; RV64IB-NEXT: lui a1, 524288 -; RV64IB-NEXT: or a0, a0, a1 -; RV64IB-NEXT: ret -; -; RV64IBS-LABEL: sbseti_i32_31: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: lui a1, 524288 -; RV64IBS-NEXT: or a0, a0, a1 -; RV64IBS-NEXT: ret +; RV64B-LABEL: sbseti_i32_31: +; RV64B: # %bb.0: +; RV64B-NEXT: lui a1, 524288 +; RV64B-NEXT: or a0, a0, a1 +; RV64B-NEXT: ret +; +; RV64ZBS-LABEL: sbseti_i32_31: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: lui a1, 524288 +; RV64ZBS-NEXT: or a0, a0, a1 +; RV64ZBS-NEXT: ret %or = or i32 %a, 2147483648 ret i32 %or } @@ -934,15 +934,15 @@ ; RV64I-NEXT: ori a0, a0, 1024 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbseti_i64_10: -; RV64IB: # %bb.0: -; RV64IB-NEXT: ori a0, a0, 1024 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbseti_i64_10: +; RV64B: # %bb.0: +; RV64B-NEXT: ori a0, a0, 1024 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbseti_i64_10: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: ori a0, a0, 1024 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbseti_i64_10: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: ori a0, a0, 1024 +; RV64ZBS-NEXT: ret %or = or i64 %a, 1024 ret i64 %or } @@ -955,15 +955,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbseti_i64_11: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bseti a0, a0, 11 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbseti_i64_11: +; RV64B: # %bb.0: +; RV64B-NEXT: bseti a0, a0, 11 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbseti_i64_11: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: bseti a0, a0, 11 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbseti_i64_11: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: bseti a0, a0, 11 +; RV64ZBS-NEXT: ret %or = or i64 %a, 2048 ret i64 %or } @@ -975,15 +975,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbseti_i64_30: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bseti a0, a0, 30 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbseti_i64_30: +; RV64B: # %bb.0: +; RV64B-NEXT: bseti a0, a0, 30 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbseti_i64_30: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: bseti a0, a0, 30 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbseti_i64_30: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: bseti a0, a0, 30 +; RV64ZBS-NEXT: ret %or = or i64 %a, 1073741824 ret i64 %or } @@ -996,15 +996,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbseti_i64_31: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bseti a0, a0, 31 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbseti_i64_31: +; RV64B: # %bb.0: +; RV64B-NEXT: bseti a0, a0, 31 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbseti_i64_31: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: bseti a0, a0, 31 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbseti_i64_31: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: bseti a0, a0, 31 +; RV64ZBS-NEXT: ret %or = or i64 %a, 2147483648 ret i64 %or } @@ -1017,15 +1017,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbseti_i64_62: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bseti a0, a0, 62 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbseti_i64_62: +; RV64B: # %bb.0: +; RV64B-NEXT: bseti a0, a0, 62 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbseti_i64_62: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: bseti a0, a0, 62 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbseti_i64_62: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: bseti a0, a0, 62 +; RV64ZBS-NEXT: ret %or = or i64 %a, 4611686018427387904 ret i64 %or } @@ -1038,15 +1038,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbseti_i64_63: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bseti a0, a0, 63 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbseti_i64_63: +; RV64B: # %bb.0: +; RV64B-NEXT: bseti a0, a0, 63 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbseti_i64_63: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: bseti a0, a0, 63 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbseti_i64_63: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: bseti a0, a0, 63 +; RV64ZBS-NEXT: ret %or = or i64 %a, 9223372036854775808 ret i64 %or } @@ -1057,15 +1057,15 @@ ; RV64I-NEXT: xori a0, a0, 1024 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbinvi_i32_10: -; RV64IB: # %bb.0: -; RV64IB-NEXT: xori a0, a0, 1024 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbinvi_i32_10: +; RV64B: # %bb.0: +; RV64B-NEXT: xori a0, a0, 1024 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbinvi_i32_10: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: xori a0, a0, 1024 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbinvi_i32_10: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: xori a0, a0, 1024 +; RV64ZBS-NEXT: ret %xor = xor i32 %a, 1024 ret i32 %xor } @@ -1078,15 +1078,15 @@ ; RV64I-NEXT: xor a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbinvi_i32_11: -; RV64IB: # %bb.0: -; RV64IB-NEXT: binvi a0, a0, 11 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbinvi_i32_11: +; RV64B: # %bb.0: +; RV64B-NEXT: binvi a0, a0, 11 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbinvi_i32_11: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: binvi a0, a0, 11 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbinvi_i32_11: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: binvi a0, a0, 11 +; RV64ZBS-NEXT: ret %xor = xor i32 %a, 2048 ret i32 %xor } @@ -1098,15 +1098,15 @@ ; RV64I-NEXT: xor a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbinvi_i32_30: -; RV64IB: # %bb.0: -; RV64IB-NEXT: binvi a0, a0, 30 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbinvi_i32_30: +; RV64B: # %bb.0: +; RV64B-NEXT: binvi a0, a0, 30 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbinvi_i32_30: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: binvi a0, a0, 30 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbinvi_i32_30: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: binvi a0, a0, 30 +; RV64ZBS-NEXT: ret %xor = xor i32 %a, 1073741824 ret i32 %xor } @@ -1118,17 +1118,17 @@ ; RV64I-NEXT: xor a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbinvi_i32_31: -; RV64IB: # %bb.0: -; RV64IB-NEXT: lui a1, 524288 -; RV64IB-NEXT: xor a0, a0, a1 -; RV64IB-NEXT: ret -; -; RV64IBS-LABEL: sbinvi_i32_31: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: lui a1, 524288 -; RV64IBS-NEXT: xor a0, a0, a1 -; RV64IBS-NEXT: ret +; RV64B-LABEL: sbinvi_i32_31: +; RV64B: # %bb.0: +; RV64B-NEXT: lui a1, 524288 +; RV64B-NEXT: xor a0, a0, a1 +; RV64B-NEXT: ret +; +; RV64ZBS-LABEL: sbinvi_i32_31: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: lui a1, 524288 +; RV64ZBS-NEXT: xor a0, a0, a1 +; RV64ZBS-NEXT: ret %xor = xor i32 %a, 2147483648 ret i32 %xor } @@ -1139,15 +1139,15 @@ ; RV64I-NEXT: xori a0, a0, 1024 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbinvi_i64_10: -; RV64IB: # %bb.0: -; RV64IB-NEXT: xori a0, a0, 1024 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbinvi_i64_10: +; RV64B: # %bb.0: +; RV64B-NEXT: xori a0, a0, 1024 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbinvi_i64_10: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: xori a0, a0, 1024 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbinvi_i64_10: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: xori a0, a0, 1024 +; RV64ZBS-NEXT: ret %xor = xor i64 %a, 1024 ret i64 %xor } @@ -1160,15 +1160,15 @@ ; RV64I-NEXT: xor a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbinvi_i64_11: -; RV64IB: # %bb.0: -; RV64IB-NEXT: binvi a0, a0, 11 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbinvi_i64_11: +; RV64B: # %bb.0: +; RV64B-NEXT: binvi a0, a0, 11 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbinvi_i64_11: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: binvi a0, a0, 11 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbinvi_i64_11: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: binvi a0, a0, 11 +; RV64ZBS-NEXT: ret %xor = xor i64 %a, 2048 ret i64 %xor } @@ -1180,15 +1180,15 @@ ; RV64I-NEXT: xor a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbinvi_i64_30: -; RV64IB: # %bb.0: -; RV64IB-NEXT: binvi a0, a0, 30 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbinvi_i64_30: +; RV64B: # %bb.0: +; RV64B-NEXT: binvi a0, a0, 30 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbinvi_i64_30: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: binvi a0, a0, 30 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbinvi_i64_30: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: binvi a0, a0, 30 +; RV64ZBS-NEXT: ret %xor = xor i64 %a, 1073741824 ret i64 %xor } @@ -1201,15 +1201,15 @@ ; RV64I-NEXT: xor a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbinvi_i64_31: -; RV64IB: # %bb.0: -; RV64IB-NEXT: binvi a0, a0, 31 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbinvi_i64_31: +; RV64B: # %bb.0: +; RV64B-NEXT: binvi a0, a0, 31 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbinvi_i64_31: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: binvi a0, a0, 31 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbinvi_i64_31: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: binvi a0, a0, 31 +; RV64ZBS-NEXT: ret %xor = xor i64 %a, 2147483648 ret i64 %xor } @@ -1222,15 +1222,15 @@ ; RV64I-NEXT: xor a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbinvi_i64_62: -; RV64IB: # %bb.0: -; RV64IB-NEXT: binvi a0, a0, 62 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbinvi_i64_62: +; RV64B: # %bb.0: +; RV64B-NEXT: binvi a0, a0, 62 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbinvi_i64_62: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: binvi a0, a0, 62 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbinvi_i64_62: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: binvi a0, a0, 62 +; RV64ZBS-NEXT: ret %xor = xor i64 %a, 4611686018427387904 ret i64 %xor } @@ -1243,15 +1243,15 @@ ; RV64I-NEXT: xor a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: sbinvi_i64_63: -; RV64IB: # %bb.0: -; RV64IB-NEXT: binvi a0, a0, 63 -; RV64IB-NEXT: ret +; RV64B-LABEL: sbinvi_i64_63: +; RV64B: # %bb.0: +; RV64B-NEXT: binvi a0, a0, 63 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: sbinvi_i64_63: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: binvi a0, a0, 63 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: sbinvi_i64_63: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: binvi a0, a0, 63 +; RV64ZBS-NEXT: ret %xor = xor i64 %a, 9223372036854775808 ret i64 %xor } @@ -1265,17 +1265,17 @@ ; RV64I-NEXT: xor a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: xor_i64_large: -; RV64IB: # %bb.0: -; RV64IB-NEXT: binvi a0, a0, 0 -; RV64IB-NEXT: binvi a0, a0, 32 -; RV64IB-NEXT: ret -; -; RV64IBS-LABEL: xor_i64_large: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: binvi a0, a0, 0 -; RV64IBS-NEXT: binvi a0, a0, 32 -; RV64IBS-NEXT: ret +; RV64B-LABEL: xor_i64_large: +; RV64B: # %bb.0: +; RV64B-NEXT: binvi a0, a0, 0 +; RV64B-NEXT: binvi a0, a0, 32 +; RV64B-NEXT: ret +; +; RV64ZBS-LABEL: xor_i64_large: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: binvi a0, a0, 0 +; RV64ZBS-NEXT: binvi a0, a0, 32 +; RV64ZBS-NEXT: ret %xor = xor i64 %a, 4294967297 ret i64 %xor } @@ -1288,17 +1288,17 @@ ; RV64I-NEXT: xor a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: xor_i64_4099: -; RV64IB: # %bb.0: -; RV64IB-NEXT: xori a0, a0, 3 -; RV64IB-NEXT: binvi a0, a0, 12 -; RV64IB-NEXT: ret -; -; RV64IBS-LABEL: xor_i64_4099: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: xori a0, a0, 3 -; RV64IBS-NEXT: binvi a0, a0, 12 -; RV64IBS-NEXT: ret +; RV64B-LABEL: xor_i64_4099: +; RV64B: # %bb.0: +; RV64B-NEXT: xori a0, a0, 3 +; RV64B-NEXT: binvi a0, a0, 12 +; RV64B-NEXT: ret +; +; RV64ZBS-LABEL: xor_i64_4099: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: xori a0, a0, 3 +; RV64ZBS-NEXT: binvi a0, a0, 12 +; RV64ZBS-NEXT: ret %xor = xor i64 %a, 4099 ret i64 %xor } @@ -1309,15 +1309,15 @@ ; RV64I-NEXT: xori a0, a0, 96 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: xor_i64_96: -; RV64IB: # %bb.0: -; RV64IB-NEXT: xori a0, a0, 96 -; RV64IB-NEXT: ret +; RV64B-LABEL: xor_i64_96: +; RV64B: # %bb.0: +; RV64B-NEXT: xori a0, a0, 96 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: xor_i64_96: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: xori a0, a0, 96 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: xor_i64_96: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: xori a0, a0, 96 +; RV64ZBS-NEXT: ret %xor = xor i64 %a, 96 ret i64 %xor } @@ -1331,17 +1331,17 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: or_i64_large: -; RV64IB: # %bb.0: -; RV64IB-NEXT: bseti a0, a0, 0 -; RV64IB-NEXT: bseti a0, a0, 32 -; RV64IB-NEXT: ret -; -; RV64IBS-LABEL: or_i64_large: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: bseti a0, a0, 0 -; RV64IBS-NEXT: bseti a0, a0, 32 -; RV64IBS-NEXT: ret +; RV64B-LABEL: or_i64_large: +; RV64B: # %bb.0: +; RV64B-NEXT: bseti a0, a0, 0 +; RV64B-NEXT: bseti a0, a0, 32 +; RV64B-NEXT: ret +; +; RV64ZBS-LABEL: or_i64_large: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: bseti a0, a0, 0 +; RV64ZBS-NEXT: bseti a0, a0, 32 +; RV64ZBS-NEXT: ret %or = or i64 %a, 4294967297 ret i64 %or } @@ -1354,17 +1354,17 @@ ; RV64I-NEXT: xor a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: xor_i64_66901: -; RV64IB: # %bb.0: -; RV64IB-NEXT: xori a0, a0, 1365 -; RV64IB-NEXT: binvi a0, a0, 16 -; RV64IB-NEXT: ret -; -; RV64IBS-LABEL: xor_i64_66901: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: xori a0, a0, 1365 -; RV64IBS-NEXT: binvi a0, a0, 16 -; RV64IBS-NEXT: ret +; RV64B-LABEL: xor_i64_66901: +; RV64B: # %bb.0: +; RV64B-NEXT: xori a0, a0, 1365 +; RV64B-NEXT: binvi a0, a0, 16 +; RV64B-NEXT: ret +; +; RV64ZBS-LABEL: xor_i64_66901: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: xori a0, a0, 1365 +; RV64ZBS-NEXT: binvi a0, a0, 16 +; RV64ZBS-NEXT: ret %xor = xor i64 %a, 66901 ret i64 %xor } @@ -1377,17 +1377,17 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: or_i64_4099: -; RV64IB: # %bb.0: -; RV64IB-NEXT: ori a0, a0, 3 -; RV64IB-NEXT: bseti a0, a0, 12 -; RV64IB-NEXT: ret -; -; RV64IBS-LABEL: or_i64_4099: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: ori a0, a0, 3 -; RV64IBS-NEXT: bseti a0, a0, 12 -; RV64IBS-NEXT: ret +; RV64B-LABEL: or_i64_4099: +; RV64B: # %bb.0: +; RV64B-NEXT: ori a0, a0, 3 +; RV64B-NEXT: bseti a0, a0, 12 +; RV64B-NEXT: ret +; +; RV64ZBS-LABEL: or_i64_4099: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: ori a0, a0, 3 +; RV64ZBS-NEXT: bseti a0, a0, 12 +; RV64ZBS-NEXT: ret %or = or i64 %a, 4099 ret i64 %or } @@ -1398,15 +1398,15 @@ ; RV64I-NEXT: ori a0, a0, 96 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: or_i64_96: -; RV64IB: # %bb.0: -; RV64IB-NEXT: ori a0, a0, 96 -; RV64IB-NEXT: ret +; RV64B-LABEL: or_i64_96: +; RV64B: # %bb.0: +; RV64B-NEXT: ori a0, a0, 96 +; RV64B-NEXT: ret ; -; RV64IBS-LABEL: or_i64_96: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: ori a0, a0, 96 -; RV64IBS-NEXT: ret +; RV64ZBS-LABEL: or_i64_96: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: ori a0, a0, 96 +; RV64ZBS-NEXT: ret %or = or i64 %a, 96 ret i64 %or } @@ -1419,17 +1419,17 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: or_i64_66901: -; RV64IB: # %bb.0: -; RV64IB-NEXT: ori a0, a0, 1365 -; RV64IB-NEXT: bseti a0, a0, 16 -; RV64IB-NEXT: ret -; -; RV64IBS-LABEL: or_i64_66901: -; RV64IBS: # %bb.0: -; RV64IBS-NEXT: ori a0, a0, 1365 -; RV64IBS-NEXT: bseti a0, a0, 16 -; RV64IBS-NEXT: ret +; RV64B-LABEL: or_i64_66901: +; RV64B: # %bb.0: +; RV64B-NEXT: ori a0, a0, 1365 +; RV64B-NEXT: bseti a0, a0, 16 +; RV64B-NEXT: ret +; +; RV64ZBS-LABEL: or_i64_66901: +; RV64ZBS: # %bb.0: +; RV64ZBS-NEXT: ori a0, a0, 1365 +; RV64ZBS-NEXT: bseti a0, a0, 16 +; RV64ZBS-NEXT: ret %or = or i64 %a, 66901 ret i64 %or } diff --git a/llvm/test/CodeGen/RISCV/rv64zbt.ll b/llvm/test/CodeGen/RISCV/rv64zbt.ll --- a/llvm/test/CodeGen/RISCV/rv64zbt.ll +++ b/llvm/test/CodeGen/RISCV/rv64zbt.ll @@ -2,9 +2,9 @@ ; RUN: llc -mtriple=riscv64 -verify-machineinstrs < %s \ ; RUN: | FileCheck %s -check-prefix=RV64I ; RUN: llc -mtriple=riscv64 -mattr=+experimental-b -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV64IB +; RUN: | FileCheck %s -check-prefix=RV64B ; RUN: llc -mtriple=riscv64 -mattr=+experimental-zbt -verify-machineinstrs < %s \ -; RUN: | FileCheck %s -check-prefix=RV64IBT +; RUN: | FileCheck %s -check-prefix=RV64ZBT define signext i32 @cmix_i32(i32 signext %a, i32 signext %b, i32 signext %c) nounwind { ; RV64I-LABEL: cmix_i32: @@ -15,15 +15,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: cmix_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: cmix a0, a1, a0, a2 -; RV64IB-NEXT: ret +; RV64B-LABEL: cmix_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: cmix a0, a1, a0, a2 +; RV64B-NEXT: ret ; -; RV64IBT-LABEL: cmix_i32: -; RV64IBT: # %bb.0: -; RV64IBT-NEXT: cmix a0, a1, a0, a2 -; RV64IBT-NEXT: ret +; RV64ZBT-LABEL: cmix_i32: +; RV64ZBT: # %bb.0: +; RV64ZBT-NEXT: cmix a0, a1, a0, a2 +; RV64ZBT-NEXT: ret %and = and i32 %b, %a %neg = xor i32 %b, -1 %and1 = and i32 %neg, %c @@ -40,15 +40,15 @@ ; RV64I-NEXT: or a0, a1, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: cmix_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: cmix a0, a1, a0, a2 -; RV64IB-NEXT: ret +; RV64B-LABEL: cmix_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: cmix a0, a1, a0, a2 +; RV64B-NEXT: ret ; -; RV64IBT-LABEL: cmix_i64: -; RV64IBT: # %bb.0: -; RV64IBT-NEXT: cmix a0, a1, a0, a2 -; RV64IBT-NEXT: ret +; RV64ZBT-LABEL: cmix_i64: +; RV64ZBT: # %bb.0: +; RV64ZBT-NEXT: cmix a0, a1, a0, a2 +; RV64ZBT-NEXT: ret %and = and i64 %b, %a %neg = xor i64 %b, -1 %and1 = and i64 %neg, %c @@ -66,15 +66,15 @@ ; RV64I-NEXT: mv a0, a2 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: cmov_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: cmov a0, a1, a0, a2 -; RV64IB-NEXT: ret +; RV64B-LABEL: cmov_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: cmov a0, a1, a0, a2 +; RV64B-NEXT: ret ; -; RV64IBT-LABEL: cmov_i32: -; RV64IBT: # %bb.0: -; RV64IBT-NEXT: cmov a0, a1, a0, a2 -; RV64IBT-NEXT: ret +; RV64ZBT-LABEL: cmov_i32: +; RV64ZBT: # %bb.0: +; RV64ZBT-NEXT: cmov a0, a1, a0, a2 +; RV64ZBT-NEXT: ret %tobool.not = icmp eq i32 %b, 0 %cond = select i1 %tobool.not, i32 %c, i32 %a ret i32 %cond @@ -89,17 +89,17 @@ ; RV64I-NEXT: .LBB3_2: ; RV64I-NEXT: ret ; -; RV64IB-LABEL: cmov_sle_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: slt a1, a2, a1 -; RV64IB-NEXT: cmov a0, a1, a3, a0 -; RV64IB-NEXT: ret -; -; RV64IBT-LABEL: cmov_sle_i32: -; RV64IBT: # %bb.0: -; RV64IBT-NEXT: slt a1, a2, a1 -; RV64IBT-NEXT: cmov a0, a1, a3, a0 -; RV64IBT-NEXT: ret +; RV64B-LABEL: cmov_sle_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: slt a1, a2, a1 +; RV64B-NEXT: cmov a0, a1, a3, a0 +; RV64B-NEXT: ret +; +; RV64ZBT-LABEL: cmov_sle_i32: +; RV64ZBT: # %bb.0: +; RV64ZBT-NEXT: slt a1, a2, a1 +; RV64ZBT-NEXT: cmov a0, a1, a3, a0 +; RV64ZBT-NEXT: ret %tobool = icmp sle i32 %b, %c %cond = select i1 %tobool, i32 %a, i32 %d ret i32 %cond @@ -114,17 +114,17 @@ ; RV64I-NEXT: .LBB4_2: ; RV64I-NEXT: ret ; -; RV64IB-LABEL: cmov_sge_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: slt a1, a1, a2 -; RV64IB-NEXT: cmov a0, a1, a3, a0 -; RV64IB-NEXT: ret -; -; RV64IBT-LABEL: cmov_sge_i32: -; RV64IBT: # %bb.0: -; RV64IBT-NEXT: slt a1, a1, a2 -; RV64IBT-NEXT: cmov a0, a1, a3, a0 -; RV64IBT-NEXT: ret +; RV64B-LABEL: cmov_sge_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: slt a1, a1, a2 +; RV64B-NEXT: cmov a0, a1, a3, a0 +; RV64B-NEXT: ret +; +; RV64ZBT-LABEL: cmov_sge_i32: +; RV64ZBT: # %bb.0: +; RV64ZBT-NEXT: slt a1, a1, a2 +; RV64ZBT-NEXT: cmov a0, a1, a3, a0 +; RV64ZBT-NEXT: ret %tobool = icmp sge i32 %b, %c %cond = select i1 %tobool, i32 %a, i32 %d ret i32 %cond @@ -139,17 +139,17 @@ ; RV64I-NEXT: .LBB5_2: ; RV64I-NEXT: ret ; -; RV64IB-LABEL: cmov_ule_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sltu a1, a2, a1 -; RV64IB-NEXT: cmov a0, a1, a3, a0 -; RV64IB-NEXT: ret -; -; RV64IBT-LABEL: cmov_ule_i32: -; RV64IBT: # %bb.0: -; RV64IBT-NEXT: sltu a1, a2, a1 -; RV64IBT-NEXT: cmov a0, a1, a3, a0 -; RV64IBT-NEXT: ret +; RV64B-LABEL: cmov_ule_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: sltu a1, a2, a1 +; RV64B-NEXT: cmov a0, a1, a3, a0 +; RV64B-NEXT: ret +; +; RV64ZBT-LABEL: cmov_ule_i32: +; RV64ZBT: # %bb.0: +; RV64ZBT-NEXT: sltu a1, a2, a1 +; RV64ZBT-NEXT: cmov a0, a1, a3, a0 +; RV64ZBT-NEXT: ret %tobool = icmp ule i32 %b, %c %cond = select i1 %tobool, i32 %a, i32 %d ret i32 %cond @@ -164,17 +164,17 @@ ; RV64I-NEXT: .LBB6_2: ; RV64I-NEXT: ret ; -; RV64IB-LABEL: cmov_uge_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sltu a1, a1, a2 -; RV64IB-NEXT: cmov a0, a1, a3, a0 -; RV64IB-NEXT: ret -; -; RV64IBT-LABEL: cmov_uge_i32: -; RV64IBT: # %bb.0: -; RV64IBT-NEXT: sltu a1, a1, a2 -; RV64IBT-NEXT: cmov a0, a1, a3, a0 -; RV64IBT-NEXT: ret +; RV64B-LABEL: cmov_uge_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: sltu a1, a1, a2 +; RV64B-NEXT: cmov a0, a1, a3, a0 +; RV64B-NEXT: ret +; +; RV64ZBT-LABEL: cmov_uge_i32: +; RV64ZBT: # %bb.0: +; RV64ZBT-NEXT: sltu a1, a1, a2 +; RV64ZBT-NEXT: cmov a0, a1, a3, a0 +; RV64ZBT-NEXT: ret %tobool = icmp uge i32 %b, %c %cond = select i1 %tobool, i32 %a, i32 %d ret i32 %cond @@ -190,15 +190,15 @@ ; RV64I-NEXT: mv a0, a2 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: cmov_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: cmov a0, a1, a0, a2 -; RV64IB-NEXT: ret +; RV64B-LABEL: cmov_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: cmov a0, a1, a0, a2 +; RV64B-NEXT: ret ; -; RV64IBT-LABEL: cmov_i64: -; RV64IBT: # %bb.0: -; RV64IBT-NEXT: cmov a0, a1, a0, a2 -; RV64IBT-NEXT: ret +; RV64ZBT-LABEL: cmov_i64: +; RV64ZBT: # %bb.0: +; RV64ZBT-NEXT: cmov a0, a1, a0, a2 +; RV64ZBT-NEXT: ret %tobool.not = icmp eq i64 %b, 0 %cond = select i1 %tobool.not, i64 %c, i64 %a ret i64 %cond @@ -213,17 +213,17 @@ ; RV64I-NEXT: .LBB8_2: ; RV64I-NEXT: ret ; -; RV64IB-LABEL: cmov_sle_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: slt a1, a2, a1 -; RV64IB-NEXT: cmov a0, a1, a3, a0 -; RV64IB-NEXT: ret -; -; RV64IBT-LABEL: cmov_sle_i64: -; RV64IBT: # %bb.0: -; RV64IBT-NEXT: slt a1, a2, a1 -; RV64IBT-NEXT: cmov a0, a1, a3, a0 -; RV64IBT-NEXT: ret +; RV64B-LABEL: cmov_sle_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: slt a1, a2, a1 +; RV64B-NEXT: cmov a0, a1, a3, a0 +; RV64B-NEXT: ret +; +; RV64ZBT-LABEL: cmov_sle_i64: +; RV64ZBT: # %bb.0: +; RV64ZBT-NEXT: slt a1, a2, a1 +; RV64ZBT-NEXT: cmov a0, a1, a3, a0 +; RV64ZBT-NEXT: ret %tobool = icmp sle i64 %b, %c %cond = select i1 %tobool, i64 %a, i64 %d ret i64 %cond @@ -238,17 +238,17 @@ ; RV64I-NEXT: .LBB9_2: ; RV64I-NEXT: ret ; -; RV64IB-LABEL: cmov_sge_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: slt a1, a1, a2 -; RV64IB-NEXT: cmov a0, a1, a3, a0 -; RV64IB-NEXT: ret -; -; RV64IBT-LABEL: cmov_sge_i64: -; RV64IBT: # %bb.0: -; RV64IBT-NEXT: slt a1, a1, a2 -; RV64IBT-NEXT: cmov a0, a1, a3, a0 -; RV64IBT-NEXT: ret +; RV64B-LABEL: cmov_sge_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: slt a1, a1, a2 +; RV64B-NEXT: cmov a0, a1, a3, a0 +; RV64B-NEXT: ret +; +; RV64ZBT-LABEL: cmov_sge_i64: +; RV64ZBT: # %bb.0: +; RV64ZBT-NEXT: slt a1, a1, a2 +; RV64ZBT-NEXT: cmov a0, a1, a3, a0 +; RV64ZBT-NEXT: ret %tobool = icmp sge i64 %b, %c %cond = select i1 %tobool, i64 %a, i64 %d ret i64 %cond @@ -263,17 +263,17 @@ ; RV64I-NEXT: .LBB10_2: ; RV64I-NEXT: ret ; -; RV64IB-LABEL: cmov_ule_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sltu a1, a2, a1 -; RV64IB-NEXT: cmov a0, a1, a3, a0 -; RV64IB-NEXT: ret -; -; RV64IBT-LABEL: cmov_ule_i64: -; RV64IBT: # %bb.0: -; RV64IBT-NEXT: sltu a1, a2, a1 -; RV64IBT-NEXT: cmov a0, a1, a3, a0 -; RV64IBT-NEXT: ret +; RV64B-LABEL: cmov_ule_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: sltu a1, a2, a1 +; RV64B-NEXT: cmov a0, a1, a3, a0 +; RV64B-NEXT: ret +; +; RV64ZBT-LABEL: cmov_ule_i64: +; RV64ZBT: # %bb.0: +; RV64ZBT-NEXT: sltu a1, a2, a1 +; RV64ZBT-NEXT: cmov a0, a1, a3, a0 +; RV64ZBT-NEXT: ret %tobool = icmp ule i64 %b, %c %cond = select i1 %tobool, i64 %a, i64 %d ret i64 %cond @@ -288,17 +288,17 @@ ; RV64I-NEXT: .LBB11_2: ; RV64I-NEXT: ret ; -; RV64IB-LABEL: cmov_uge_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: sltu a1, a1, a2 -; RV64IB-NEXT: cmov a0, a1, a3, a0 -; RV64IB-NEXT: ret -; -; RV64IBT-LABEL: cmov_uge_i64: -; RV64IBT: # %bb.0: -; RV64IBT-NEXT: sltu a1, a1, a2 -; RV64IBT-NEXT: cmov a0, a1, a3, a0 -; RV64IBT-NEXT: ret +; RV64B-LABEL: cmov_uge_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: sltu a1, a1, a2 +; RV64B-NEXT: cmov a0, a1, a3, a0 +; RV64B-NEXT: ret +; +; RV64ZBT-LABEL: cmov_uge_i64: +; RV64ZBT: # %bb.0: +; RV64ZBT-NEXT: sltu a1, a1, a2 +; RV64ZBT-NEXT: cmov a0, a1, a3, a0 +; RV64ZBT-NEXT: ret %tobool = icmp uge i64 %b, %c %cond = select i1 %tobool, i64 %a, i64 %d ret i64 %cond @@ -318,17 +318,17 @@ ; RV64I-NEXT: srai a0, a0, 32 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: fshl_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: andi a2, a2, 31 -; RV64IB-NEXT: fslw a0, a0, a1, a2 -; RV64IB-NEXT: ret -; -; RV64IBT-LABEL: fshl_i32: -; RV64IBT: # %bb.0: -; RV64IBT-NEXT: andi a2, a2, 31 -; RV64IBT-NEXT: fslw a0, a0, a1, a2 -; RV64IBT-NEXT: ret +; RV64B-LABEL: fshl_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: andi a2, a2, 31 +; RV64B-NEXT: fslw a0, a0, a1, a2 +; RV64B-NEXT: ret +; +; RV64ZBT-LABEL: fshl_i32: +; RV64ZBT: # %bb.0: +; RV64ZBT-NEXT: andi a2, a2, 31 +; RV64ZBT-NEXT: fslw a0, a0, a1, a2 +; RV64ZBT-NEXT: ret %1 = tail call i32 @llvm.fshl.i32(i32 %a, i32 %b, i32 %c) ret i32 %1 } @@ -347,19 +347,19 @@ ; RV64I-NEXT: sw a0, 0(a3) ; RV64I-NEXT: ret ; -; RV64IB-LABEL: fshl_i32_nosext: -; RV64IB: # %bb.0: -; RV64IB-NEXT: andi a2, a2, 31 -; RV64IB-NEXT: fslw a0, a0, a1, a2 -; RV64IB-NEXT: sw a0, 0(a3) -; RV64IB-NEXT: ret -; -; RV64IBT-LABEL: fshl_i32_nosext: -; RV64IBT: # %bb.0: -; RV64IBT-NEXT: andi a2, a2, 31 -; RV64IBT-NEXT: fslw a0, a0, a1, a2 -; RV64IBT-NEXT: sw a0, 0(a3) -; RV64IBT-NEXT: ret +; RV64B-LABEL: fshl_i32_nosext: +; RV64B: # %bb.0: +; RV64B-NEXT: andi a2, a2, 31 +; RV64B-NEXT: fslw a0, a0, a1, a2 +; RV64B-NEXT: sw a0, 0(a3) +; RV64B-NEXT: ret +; +; RV64ZBT-LABEL: fshl_i32_nosext: +; RV64ZBT: # %bb.0: +; RV64ZBT-NEXT: andi a2, a2, 31 +; RV64ZBT-NEXT: fslw a0, a0, a1, a2 +; RV64ZBT-NEXT: sw a0, 0(a3) +; RV64ZBT-NEXT: ret %1 = tail call i32 @llvm.fshl.i32(i32 %a, i32 %b, i32 %c) store i32 %1, i32* %x ret void @@ -377,17 +377,17 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: fshl_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: andi a2, a2, 63 -; RV64IB-NEXT: fsl a0, a0, a1, a2 -; RV64IB-NEXT: ret -; -; RV64IBT-LABEL: fshl_i64: -; RV64IBT: # %bb.0: -; RV64IBT-NEXT: andi a2, a2, 63 -; RV64IBT-NEXT: fsl a0, a0, a1, a2 -; RV64IBT-NEXT: ret +; RV64B-LABEL: fshl_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: andi a2, a2, 63 +; RV64B-NEXT: fsl a0, a0, a1, a2 +; RV64B-NEXT: ret +; +; RV64ZBT-LABEL: fshl_i64: +; RV64ZBT: # %bb.0: +; RV64ZBT-NEXT: andi a2, a2, 63 +; RV64ZBT-NEXT: fsl a0, a0, a1, a2 +; RV64ZBT-NEXT: ret %1 = tail call i64 @llvm.fshl.i64(i64 %a, i64 %b, i64 %c) ret i64 %1 } @@ -406,17 +406,17 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: fshr_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: andi a2, a2, 31 -; RV64IB-NEXT: fsrw a0, a1, a0, a2 -; RV64IB-NEXT: ret -; -; RV64IBT-LABEL: fshr_i32: -; RV64IBT: # %bb.0: -; RV64IBT-NEXT: andi a2, a2, 31 -; RV64IBT-NEXT: fsrw a0, a1, a0, a2 -; RV64IBT-NEXT: ret +; RV64B-LABEL: fshr_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: andi a2, a2, 31 +; RV64B-NEXT: fsrw a0, a1, a0, a2 +; RV64B-NEXT: ret +; +; RV64ZBT-LABEL: fshr_i32: +; RV64ZBT: # %bb.0: +; RV64ZBT-NEXT: andi a2, a2, 31 +; RV64ZBT-NEXT: fsrw a0, a1, a0, a2 +; RV64ZBT-NEXT: ret %1 = tail call i32 @llvm.fshr.i32(i32 %a, i32 %b, i32 %c) ret i32 %1 } @@ -434,19 +434,19 @@ ; RV64I-NEXT: sw a0, 0(a3) ; RV64I-NEXT: ret ; -; RV64IB-LABEL: fshr_i32_nosext: -; RV64IB: # %bb.0: -; RV64IB-NEXT: andi a2, a2, 31 -; RV64IB-NEXT: fsrw a0, a1, a0, a2 -; RV64IB-NEXT: sw a0, 0(a3) -; RV64IB-NEXT: ret -; -; RV64IBT-LABEL: fshr_i32_nosext: -; RV64IBT: # %bb.0: -; RV64IBT-NEXT: andi a2, a2, 31 -; RV64IBT-NEXT: fsrw a0, a1, a0, a2 -; RV64IBT-NEXT: sw a0, 0(a3) -; RV64IBT-NEXT: ret +; RV64B-LABEL: fshr_i32_nosext: +; RV64B: # %bb.0: +; RV64B-NEXT: andi a2, a2, 31 +; RV64B-NEXT: fsrw a0, a1, a0, a2 +; RV64B-NEXT: sw a0, 0(a3) +; RV64B-NEXT: ret +; +; RV64ZBT-LABEL: fshr_i32_nosext: +; RV64ZBT: # %bb.0: +; RV64ZBT-NEXT: andi a2, a2, 31 +; RV64ZBT-NEXT: fsrw a0, a1, a0, a2 +; RV64ZBT-NEXT: sw a0, 0(a3) +; RV64ZBT-NEXT: ret %1 = tail call i32 @llvm.fshr.i32(i32 %a, i32 %b, i32 %c) store i32 %1, i32* %x ret void @@ -464,17 +464,17 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: fshr_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: andi a2, a2, 63 -; RV64IB-NEXT: fsr a0, a1, a0, a2 -; RV64IB-NEXT: ret -; -; RV64IBT-LABEL: fshr_i64: -; RV64IBT: # %bb.0: -; RV64IBT-NEXT: andi a2, a2, 63 -; RV64IBT-NEXT: fsr a0, a1, a0, a2 -; RV64IBT-NEXT: ret +; RV64B-LABEL: fshr_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: andi a2, a2, 63 +; RV64B-NEXT: fsr a0, a1, a0, a2 +; RV64B-NEXT: ret +; +; RV64ZBT-LABEL: fshr_i64: +; RV64ZBT: # %bb.0: +; RV64ZBT-NEXT: andi a2, a2, 63 +; RV64ZBT-NEXT: fsr a0, a1, a0, a2 +; RV64ZBT-NEXT: ret %1 = tail call i64 @llvm.fshr.i64(i64 %a, i64 %b, i64 %c) ret i64 %1 } @@ -488,15 +488,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: fshri_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: fsriw a0, a1, a0, 5 -; RV64IB-NEXT: ret +; RV64B-LABEL: fshri_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: fsriw a0, a1, a0, 5 +; RV64B-NEXT: ret ; -; RV64IBT-LABEL: fshri_i32: -; RV64IBT: # %bb.0: -; RV64IBT-NEXT: fsriw a0, a1, a0, 5 -; RV64IBT-NEXT: ret +; RV64ZBT-LABEL: fshri_i32: +; RV64ZBT: # %bb.0: +; RV64ZBT-NEXT: fsriw a0, a1, a0, 5 +; RV64ZBT-NEXT: ret %1 = tail call i32 @llvm.fshr.i32(i32 %a, i32 %b, i32 5) ret i32 %1 } @@ -511,17 +511,17 @@ ; RV64I-NEXT: sw a0, 0(a2) ; RV64I-NEXT: ret ; -; RV64IB-LABEL: fshri_i32_nosext: -; RV64IB: # %bb.0: -; RV64IB-NEXT: fsriw a0, a1, a0, 5 -; RV64IB-NEXT: sw a0, 0(a2) -; RV64IB-NEXT: ret -; -; RV64IBT-LABEL: fshri_i32_nosext: -; RV64IBT: # %bb.0: -; RV64IBT-NEXT: fsriw a0, a1, a0, 5 -; RV64IBT-NEXT: sw a0, 0(a2) -; RV64IBT-NEXT: ret +; RV64B-LABEL: fshri_i32_nosext: +; RV64B: # %bb.0: +; RV64B-NEXT: fsriw a0, a1, a0, 5 +; RV64B-NEXT: sw a0, 0(a2) +; RV64B-NEXT: ret +; +; RV64ZBT-LABEL: fshri_i32_nosext: +; RV64ZBT: # %bb.0: +; RV64ZBT-NEXT: fsriw a0, a1, a0, 5 +; RV64ZBT-NEXT: sw a0, 0(a2) +; RV64ZBT-NEXT: ret %1 = tail call i32 @llvm.fshr.i32(i32 %a, i32 %b, i32 5) store i32 %1, i32* %x ret void @@ -535,15 +535,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: fshri_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: fsri a0, a1, a0, 5 -; RV64IB-NEXT: ret +; RV64B-LABEL: fshri_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: fsri a0, a1, a0, 5 +; RV64B-NEXT: ret ; -; RV64IBT-LABEL: fshri_i64: -; RV64IBT: # %bb.0: -; RV64IBT-NEXT: fsri a0, a1, a0, 5 -; RV64IBT-NEXT: ret +; RV64ZBT-LABEL: fshri_i64: +; RV64ZBT: # %bb.0: +; RV64ZBT-NEXT: fsri a0, a1, a0, 5 +; RV64ZBT-NEXT: ret %1 = tail call i64 @llvm.fshr.i64(i64 %a, i64 %b, i64 5) ret i64 %1 } @@ -557,15 +557,15 @@ ; RV64I-NEXT: sext.w a0, a0 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: fshli_i32: -; RV64IB: # %bb.0: -; RV64IB-NEXT: fsriw a0, a1, a0, 27 -; RV64IB-NEXT: ret +; RV64B-LABEL: fshli_i32: +; RV64B: # %bb.0: +; RV64B-NEXT: fsriw a0, a1, a0, 27 +; RV64B-NEXT: ret ; -; RV64IBT-LABEL: fshli_i32: -; RV64IBT: # %bb.0: -; RV64IBT-NEXT: fsriw a0, a1, a0, 27 -; RV64IBT-NEXT: ret +; RV64ZBT-LABEL: fshli_i32: +; RV64ZBT: # %bb.0: +; RV64ZBT-NEXT: fsriw a0, a1, a0, 27 +; RV64ZBT-NEXT: ret %1 = tail call i32 @llvm.fshl.i32(i32 %a, i32 %b, i32 5) ret i32 %1 } @@ -580,17 +580,17 @@ ; RV64I-NEXT: sw a0, 0(a2) ; RV64I-NEXT: ret ; -; RV64IB-LABEL: fshli_i32_nosext: -; RV64IB: # %bb.0: -; RV64IB-NEXT: fsriw a0, a1, a0, 27 -; RV64IB-NEXT: sw a0, 0(a2) -; RV64IB-NEXT: ret -; -; RV64IBT-LABEL: fshli_i32_nosext: -; RV64IBT: # %bb.0: -; RV64IBT-NEXT: fsriw a0, a1, a0, 27 -; RV64IBT-NEXT: sw a0, 0(a2) -; RV64IBT-NEXT: ret +; RV64B-LABEL: fshli_i32_nosext: +; RV64B: # %bb.0: +; RV64B-NEXT: fsriw a0, a1, a0, 27 +; RV64B-NEXT: sw a0, 0(a2) +; RV64B-NEXT: ret +; +; RV64ZBT-LABEL: fshli_i32_nosext: +; RV64ZBT: # %bb.0: +; RV64ZBT-NEXT: fsriw a0, a1, a0, 27 +; RV64ZBT-NEXT: sw a0, 0(a2) +; RV64ZBT-NEXT: ret %1 = tail call i32 @llvm.fshl.i32(i32 %a, i32 %b, i32 5) store i32 %1, i32* %x ret void @@ -604,15 +604,15 @@ ; RV64I-NEXT: or a0, a0, a1 ; RV64I-NEXT: ret ; -; RV64IB-LABEL: fshli_i64: -; RV64IB: # %bb.0: -; RV64IB-NEXT: fsri a0, a1, a0, 59 -; RV64IB-NEXT: ret +; RV64B-LABEL: fshli_i64: +; RV64B: # %bb.0: +; RV64B-NEXT: fsri a0, a1, a0, 59 +; RV64B-NEXT: ret ; -; RV64IBT-LABEL: fshli_i64: -; RV64IBT: # %bb.0: -; RV64IBT-NEXT: fsri a0, a1, a0, 59 -; RV64IBT-NEXT: ret +; RV64ZBT-LABEL: fshli_i64: +; RV64ZBT: # %bb.0: +; RV64ZBT-NEXT: fsri a0, a1, a0, 59 +; RV64ZBT-NEXT: ret %1 = tail call i64 @llvm.fshl.i64(i64 %a, i64 %b, i64 5) ret i64 %1 }