diff --git a/llvm/test/CodeGen/RISCV/rotl-rotr.ll b/llvm/test/CodeGen/RISCV/rotl-rotr.ll --- a/llvm/test/CodeGen/RISCV/rotl-rotr.ll +++ b/llvm/test/CodeGen/RISCV/rotl-rotr.ll @@ -322,6 +322,92 @@ ret i32 %d } +define i32 @rotl_32_mask_and_63_and_31(i32 %x, i32 %y) nounwind { +; RV32I-LABEL: rotl_32_mask_and_63_and_31: +; RV32I: # %bb.0: +; RV32I-NEXT: sll a2, a0, a1 +; RV32I-NEXT: neg a1, a1 +; RV32I-NEXT: srl a0, a0, a1 +; RV32I-NEXT: or a0, a2, a0 +; RV32I-NEXT: ret +; +; RV64I-LABEL: rotl_32_mask_and_63_and_31: +; RV64I: # %bb.0: +; RV64I-NEXT: sllw a2, a0, a1 +; RV64I-NEXT: negw a1, a1 +; RV64I-NEXT: srlw a0, a0, a1 +; RV64I-NEXT: or a0, a2, a0 +; RV64I-NEXT: ret +; +; RV32ZBB-LABEL: rotl_32_mask_and_63_and_31: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: sll a2, a0, a1 +; RV32ZBB-NEXT: neg a1, a1 +; RV32ZBB-NEXT: srl a0, a0, a1 +; RV32ZBB-NEXT: or a0, a2, a0 +; RV32ZBB-NEXT: ret +; +; RV64ZBB-LABEL: rotl_32_mask_and_63_and_31: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: sllw a2, a0, a1 +; RV64ZBB-NEXT: negw a1, a1 +; RV64ZBB-NEXT: srlw a0, a0, a1 +; RV64ZBB-NEXT: or a0, a2, a0 +; RV64ZBB-NEXT: ret + %a = and i32 %y, 63 + %b = shl i32 %x, %a + %c = sub i32 0, %y + %d = and i32 %c, 31 + %e = lshr i32 %x, %d + %f = or i32 %b, %e + ret i32 %f +} + +define i32 @rotl_32_mask_or_64_or_32(i32 %x, i32 %y) nounwind { +; RV32I-LABEL: rotl_32_mask_or_64_or_32: +; RV32I: # %bb.0: +; RV32I-NEXT: ori a2, a1, 64 +; RV32I-NEXT: sll a2, a0, a2 +; RV32I-NEXT: neg a1, a1 +; RV32I-NEXT: ori a1, a1, 32 +; RV32I-NEXT: srl a0, a0, a1 +; RV32I-NEXT: or a0, a2, a0 +; RV32I-NEXT: ret +; +; RV64I-LABEL: rotl_32_mask_or_64_or_32: +; RV64I: # %bb.0: +; RV64I-NEXT: sllw a2, a0, a1 +; RV64I-NEXT: negw a1, a1 +; RV64I-NEXT: srlw a0, a0, a1 +; RV64I-NEXT: or a0, a2, a0 +; RV64I-NEXT: ret +; +; RV32ZBB-LABEL: rotl_32_mask_or_64_or_32: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: ori a2, a1, 64 +; RV32ZBB-NEXT: sll a2, a0, a2 +; RV32ZBB-NEXT: neg a1, a1 +; RV32ZBB-NEXT: ori a1, a1, 32 +; RV32ZBB-NEXT: srl a0, a0, a1 +; RV32ZBB-NEXT: or a0, a2, a0 +; RV32ZBB-NEXT: ret +; +; RV64ZBB-LABEL: rotl_32_mask_or_64_or_32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: sllw a2, a0, a1 +; RV64ZBB-NEXT: negw a1, a1 +; RV64ZBB-NEXT: srlw a0, a0, a1 +; RV64ZBB-NEXT: or a0, a2, a0 +; RV64ZBB-NEXT: ret + %a = or i32 %y, 64 + %b = shl i32 %x, %a + %c = sub i32 0, %y + %d = or i32 %c, 32 + %e = lshr i32 %x, %d + %f = or i32 %b, %e + ret i32 %f +} + define i32 @rotr_32_mask(i32 %x, i32 %y) nounwind { ; RV32I-LABEL: rotr_32_mask: ; RV32I: # %bb.0: @@ -356,31 +442,117 @@ ret i32 %d } +define i32 @rotr_32_mask_and_64_and_31(i32 %x, i32 %y) nounwind { +; RV32I-LABEL: rotr_32_mask_and_64_and_31: +; RV32I: # %bb.0: +; RV32I-NEXT: srl a2, a0, a1 +; RV32I-NEXT: neg a1, a1 +; RV32I-NEXT: sll a0, a0, a1 +; RV32I-NEXT: or a0, a2, a0 +; RV32I-NEXT: ret +; +; RV64I-LABEL: rotr_32_mask_and_64_and_31: +; RV64I: # %bb.0: +; RV64I-NEXT: srlw a2, a0, a1 +; RV64I-NEXT: negw a1, a1 +; RV64I-NEXT: sllw a0, a0, a1 +; RV64I-NEXT: or a0, a2, a0 +; RV64I-NEXT: ret +; +; RV32ZBB-LABEL: rotr_32_mask_and_64_and_31: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: srl a2, a0, a1 +; RV32ZBB-NEXT: neg a1, a1 +; RV32ZBB-NEXT: sll a0, a0, a1 +; RV32ZBB-NEXT: or a0, a2, a0 +; RV32ZBB-NEXT: ret +; +; RV64ZBB-LABEL: rotr_32_mask_and_64_and_31: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: srlw a2, a0, a1 +; RV64ZBB-NEXT: negw a1, a1 +; RV64ZBB-NEXT: sllw a0, a0, a1 +; RV64ZBB-NEXT: or a0, a2, a0 +; RV64ZBB-NEXT: ret + %a = and i32 %y, 63 + %b = lshr i32 %x, %a + %c = sub i32 0, %y + %d = and i32 %c, 31 + %e = shl i32 %x, %d + %f = or i32 %b, %e + ret i32 %f +} + +define i32 @rotr_32_mask_or_64_or_32(i32 %x, i32 %y) nounwind { +; RV32I-LABEL: rotr_32_mask_or_64_or_32: +; RV32I: # %bb.0: +; RV32I-NEXT: ori a2, a1, 64 +; RV32I-NEXT: srl a2, a0, a2 +; RV32I-NEXT: neg a1, a1 +; RV32I-NEXT: ori a1, a1, 32 +; RV32I-NEXT: sll a0, a0, a1 +; RV32I-NEXT: or a0, a2, a0 +; RV32I-NEXT: ret +; +; RV64I-LABEL: rotr_32_mask_or_64_or_32: +; RV64I: # %bb.0: +; RV64I-NEXT: srlw a2, a0, a1 +; RV64I-NEXT: negw a1, a1 +; RV64I-NEXT: sllw a0, a0, a1 +; RV64I-NEXT: or a0, a2, a0 +; RV64I-NEXT: ret +; +; RV32ZBB-LABEL: rotr_32_mask_or_64_or_32: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: ori a2, a1, 64 +; RV32ZBB-NEXT: srl a2, a0, a2 +; RV32ZBB-NEXT: neg a1, a1 +; RV32ZBB-NEXT: ori a1, a1, 32 +; RV32ZBB-NEXT: sll a0, a0, a1 +; RV32ZBB-NEXT: or a0, a2, a0 +; RV32ZBB-NEXT: ret +; +; RV64ZBB-LABEL: rotr_32_mask_or_64_or_32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: srlw a2, a0, a1 +; RV64ZBB-NEXT: negw a1, a1 +; RV64ZBB-NEXT: sllw a0, a0, a1 +; RV64ZBB-NEXT: or a0, a2, a0 +; RV64ZBB-NEXT: ret + %a = or i32 %y, 64 + %b = lshr i32 %x, %a + %c = sub i32 0, %y + %d = or i32 %c, 32 + %e = shl i32 %x, %d + %f = or i32 %b, %e + ret i32 %f +} + define i64 @rotl_64_mask(i64 %x, i64 %y) nounwind { ; RV32I-LABEL: rotl_64_mask: ; RV32I: # %bb.0: ; RV32I-NEXT: addi a5, a2, -32 ; RV32I-NEXT: mv a3, a1 -; RV32I-NEXT: bltz a5, .LBB6_2 +; RV32I-NEXT: bltz a5, .LBB10_2 ; RV32I-NEXT: # %bb.1: ; RV32I-NEXT: sll a1, a0, a5 -; RV32I-NEXT: j .LBB6_3 -; RV32I-NEXT: .LBB6_2: +; RV32I-NEXT: j .LBB10_3 +; RV32I-NEXT: .LBB10_2: ; RV32I-NEXT: sll a1, a3, a2 ; RV32I-NEXT: xori a4, a2, 31 ; RV32I-NEXT: srli a6, a0, 1 ; RV32I-NEXT: srl a4, a6, a4 ; RV32I-NEXT: or a1, a1, a4 -; RV32I-NEXT: .LBB6_3: +; RV32I-NEXT: .LBB10_3: ; RV32I-NEXT: neg a6, a2 ; RV32I-NEXT: andi a4, a6, 63 ; RV32I-NEXT: addi a7, a4, -32 -; RV32I-NEXT: bltz a7, .LBB6_5 +; RV32I-NEXT: bltz a7, .LBB10_5 ; RV32I-NEXT: # %bb.4: ; RV32I-NEXT: srl a4, a3, a7 -; RV32I-NEXT: bltz a5, .LBB6_6 -; RV32I-NEXT: j .LBB6_7 -; RV32I-NEXT: .LBB6_5: +; RV32I-NEXT: bltz a5, .LBB10_6 +; RV32I-NEXT: j .LBB10_7 +; RV32I-NEXT: .LBB10_5: ; RV32I-NEXT: srl a7, a0, a6 ; RV32I-NEXT: xori a4, a4, 31 ; RV32I-NEXT: slli t0, a3, 1 @@ -388,11 +560,11 @@ ; RV32I-NEXT: or a4, a7, a4 ; RV32I-NEXT: srl a3, a3, a6 ; RV32I-NEXT: or a1, a1, a3 -; RV32I-NEXT: bgez a5, .LBB6_7 -; RV32I-NEXT: .LBB6_6: +; RV32I-NEXT: bgez a5, .LBB10_7 +; RV32I-NEXT: .LBB10_6: ; RV32I-NEXT: sll a0, a0, a2 ; RV32I-NEXT: or a4, a4, a0 -; RV32I-NEXT: .LBB6_7: +; RV32I-NEXT: .LBB10_7: ; RV32I-NEXT: mv a0, a4 ; RV32I-NEXT: ret ; @@ -408,26 +580,26 @@ ; RV32ZBB: # %bb.0: ; RV32ZBB-NEXT: addi a5, a2, -32 ; RV32ZBB-NEXT: mv a3, a1 -; RV32ZBB-NEXT: bltz a5, .LBB6_2 +; RV32ZBB-NEXT: bltz a5, .LBB10_2 ; RV32ZBB-NEXT: # %bb.1: ; RV32ZBB-NEXT: sll a1, a0, a5 -; RV32ZBB-NEXT: j .LBB6_3 -; RV32ZBB-NEXT: .LBB6_2: +; RV32ZBB-NEXT: j .LBB10_3 +; RV32ZBB-NEXT: .LBB10_2: ; RV32ZBB-NEXT: sll a1, a3, a2 ; RV32ZBB-NEXT: xori a4, a2, 31 ; RV32ZBB-NEXT: srli a6, a0, 1 ; RV32ZBB-NEXT: srl a4, a6, a4 ; RV32ZBB-NEXT: or a1, a1, a4 -; RV32ZBB-NEXT: .LBB6_3: +; RV32ZBB-NEXT: .LBB10_3: ; RV32ZBB-NEXT: neg a6, a2 ; RV32ZBB-NEXT: andi a4, a6, 63 ; RV32ZBB-NEXT: addi a7, a4, -32 -; RV32ZBB-NEXT: bltz a7, .LBB6_5 +; RV32ZBB-NEXT: bltz a7, .LBB10_5 ; RV32ZBB-NEXT: # %bb.4: ; RV32ZBB-NEXT: srl a4, a3, a7 -; RV32ZBB-NEXT: bltz a5, .LBB6_6 -; RV32ZBB-NEXT: j .LBB6_7 -; RV32ZBB-NEXT: .LBB6_5: +; RV32ZBB-NEXT: bltz a5, .LBB10_6 +; RV32ZBB-NEXT: j .LBB10_7 +; RV32ZBB-NEXT: .LBB10_5: ; RV32ZBB-NEXT: srl a7, a0, a6 ; RV32ZBB-NEXT: xori a4, a4, 31 ; RV32ZBB-NEXT: slli t0, a3, 1 @@ -435,11 +607,11 @@ ; RV32ZBB-NEXT: or a4, a7, a4 ; RV32ZBB-NEXT: srl a3, a3, a6 ; RV32ZBB-NEXT: or a1, a1, a3 -; RV32ZBB-NEXT: bgez a5, .LBB6_7 -; RV32ZBB-NEXT: .LBB6_6: +; RV32ZBB-NEXT: bgez a5, .LBB10_7 +; RV32ZBB-NEXT: .LBB10_6: ; RV32ZBB-NEXT: sll a0, a0, a2 ; RV32ZBB-NEXT: or a4, a4, a0 -; RV32ZBB-NEXT: .LBB6_7: +; RV32ZBB-NEXT: .LBB10_7: ; RV32ZBB-NEXT: mv a0, a4 ; RV32ZBB-NEXT: ret ; @@ -455,31 +627,181 @@ ret i64 %d } +define i64 @rotl_64_mask_and_63_and_31(i64 %x, i64 %y) nounwind { +; RV32I-LABEL: rotl_64_mask_and_63_and_31: +; RV32I: # %bb.0: +; RV32I-NEXT: andi a4, a2, 127 +; RV32I-NEXT: addi a5, a4, -32 +; RV32I-NEXT: mv a3, a1 +; RV32I-NEXT: bltz a5, .LBB11_2 +; RV32I-NEXT: # %bb.1: +; RV32I-NEXT: sll a1, a0, a5 +; RV32I-NEXT: j .LBB11_3 +; RV32I-NEXT: .LBB11_2: +; RV32I-NEXT: sll a1, a3, a2 +; RV32I-NEXT: srli a6, a0, 1 +; RV32I-NEXT: xori a4, a4, 31 +; RV32I-NEXT: srl a4, a6, a4 +; RV32I-NEXT: or a1, a1, a4 +; RV32I-NEXT: .LBB11_3: +; RV32I-NEXT: neg a6, a2 +; RV32I-NEXT: andi a4, a6, 63 +; RV32I-NEXT: addi a7, a4, -32 +; RV32I-NEXT: bltz a7, .LBB11_5 +; RV32I-NEXT: # %bb.4: +; RV32I-NEXT: srl a4, a3, a7 +; RV32I-NEXT: bltz a5, .LBB11_6 +; RV32I-NEXT: j .LBB11_7 +; RV32I-NEXT: .LBB11_5: +; RV32I-NEXT: srl a7, a0, a6 +; RV32I-NEXT: xori a4, a4, 31 +; RV32I-NEXT: slli t0, a3, 1 +; RV32I-NEXT: sll a4, t0, a4 +; RV32I-NEXT: or a4, a7, a4 +; RV32I-NEXT: srl a3, a3, a6 +; RV32I-NEXT: or a1, a1, a3 +; RV32I-NEXT: bgez a5, .LBB11_7 +; RV32I-NEXT: .LBB11_6: +; RV32I-NEXT: sll a0, a0, a2 +; RV32I-NEXT: or a4, a4, a0 +; RV32I-NEXT: .LBB11_7: +; RV32I-NEXT: mv a0, a4 +; RV32I-NEXT: ret +; +; RV64I-LABEL: rotl_64_mask_and_63_and_31: +; RV64I: # %bb.0: +; RV64I-NEXT: sll a2, a0, a1 +; RV64I-NEXT: neg a1, a1 +; RV64I-NEXT: srl a0, a0, a1 +; RV64I-NEXT: or a0, a2, a0 +; RV64I-NEXT: ret +; +; RV32ZBB-LABEL: rotl_64_mask_and_63_and_31: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: andi a4, a2, 127 +; RV32ZBB-NEXT: addi a5, a4, -32 +; RV32ZBB-NEXT: mv a3, a1 +; RV32ZBB-NEXT: bltz a5, .LBB11_2 +; RV32ZBB-NEXT: # %bb.1: +; RV32ZBB-NEXT: sll a1, a0, a5 +; RV32ZBB-NEXT: j .LBB11_3 +; RV32ZBB-NEXT: .LBB11_2: +; RV32ZBB-NEXT: sll a1, a3, a2 +; RV32ZBB-NEXT: srli a6, a0, 1 +; RV32ZBB-NEXT: xori a4, a4, 31 +; RV32ZBB-NEXT: srl a4, a6, a4 +; RV32ZBB-NEXT: or a1, a1, a4 +; RV32ZBB-NEXT: .LBB11_3: +; RV32ZBB-NEXT: neg a6, a2 +; RV32ZBB-NEXT: andi a4, a6, 63 +; RV32ZBB-NEXT: addi a7, a4, -32 +; RV32ZBB-NEXT: bltz a7, .LBB11_5 +; RV32ZBB-NEXT: # %bb.4: +; RV32ZBB-NEXT: srl a4, a3, a7 +; RV32ZBB-NEXT: bltz a5, .LBB11_6 +; RV32ZBB-NEXT: j .LBB11_7 +; RV32ZBB-NEXT: .LBB11_5: +; RV32ZBB-NEXT: srl a7, a0, a6 +; RV32ZBB-NEXT: xori a4, a4, 31 +; RV32ZBB-NEXT: slli t0, a3, 1 +; RV32ZBB-NEXT: sll a4, t0, a4 +; RV32ZBB-NEXT: or a4, a7, a4 +; RV32ZBB-NEXT: srl a3, a3, a6 +; RV32ZBB-NEXT: or a1, a1, a3 +; RV32ZBB-NEXT: bgez a5, .LBB11_7 +; RV32ZBB-NEXT: .LBB11_6: +; RV32ZBB-NEXT: sll a0, a0, a2 +; RV32ZBB-NEXT: or a4, a4, a0 +; RV32ZBB-NEXT: .LBB11_7: +; RV32ZBB-NEXT: mv a0, a4 +; RV32ZBB-NEXT: ret +; +; RV64ZBB-LABEL: rotl_64_mask_and_63_and_31: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: sll a2, a0, a1 +; RV64ZBB-NEXT: neg a1, a1 +; RV64ZBB-NEXT: srl a0, a0, a1 +; RV64ZBB-NEXT: or a0, a2, a0 +; RV64ZBB-NEXT: ret + %a = and i64 %y, 127 + %b = shl i64 %x, %a + %c = sub i64 0, %y + %d = and i64 %c, 63 + %e = lshr i64 %x, %d + %f = or i64 %b, %e + ret i64 %f +} + +define i64 @rotl_64_mask_or_64_or_32(i64 %x, i64 %y) nounwind { +; RV32I-LABEL: rotl_64_mask_or_64_or_32: +; RV32I: # %bb.0: +; RV32I-NEXT: sll a3, a0, a2 +; RV32I-NEXT: neg a0, a2 +; RV32I-NEXT: srl a0, a1, a0 +; RV32I-NEXT: mv a1, a3 +; RV32I-NEXT: ret +; +; RV64I-LABEL: rotl_64_mask_or_64_or_32: +; RV64I: # %bb.0: +; RV64I-NEXT: ori a2, a1, 128 +; RV64I-NEXT: sll a2, a0, a2 +; RV64I-NEXT: neg a1, a1 +; RV64I-NEXT: ori a1, a1, 64 +; RV64I-NEXT: srl a0, a0, a1 +; RV64I-NEXT: or a0, a2, a0 +; RV64I-NEXT: ret +; +; RV32ZBB-LABEL: rotl_64_mask_or_64_or_32: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: sll a3, a0, a2 +; RV32ZBB-NEXT: neg a0, a2 +; RV32ZBB-NEXT: srl a0, a1, a0 +; RV32ZBB-NEXT: mv a1, a3 +; RV32ZBB-NEXT: ret +; +; RV64ZBB-LABEL: rotl_64_mask_or_64_or_32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: ori a2, a1, 128 +; RV64ZBB-NEXT: sll a2, a0, a2 +; RV64ZBB-NEXT: neg a1, a1 +; RV64ZBB-NEXT: ori a1, a1, 64 +; RV64ZBB-NEXT: srl a0, a0, a1 +; RV64ZBB-NEXT: or a0, a2, a0 +; RV64ZBB-NEXT: ret + %a = or i64 %y, 128 + %b = shl i64 %x, %a + %c = sub i64 0, %y + %d = or i64 %c, 64 + %e = lshr i64 %x, %d + %f = or i64 %b, %e + ret i64 %f +} + define i64 @rotr_64_mask(i64 %x, i64 %y) nounwind { ; RV32I-LABEL: rotr_64_mask: ; RV32I: # %bb.0: ; RV32I-NEXT: addi a5, a2, -32 ; RV32I-NEXT: mv a3, a0 -; RV32I-NEXT: bltz a5, .LBB7_2 +; RV32I-NEXT: bltz a5, .LBB13_2 ; RV32I-NEXT: # %bb.1: ; RV32I-NEXT: srl a0, a1, a5 -; RV32I-NEXT: j .LBB7_3 -; RV32I-NEXT: .LBB7_2: +; RV32I-NEXT: j .LBB13_3 +; RV32I-NEXT: .LBB13_2: ; RV32I-NEXT: srl a0, a3, a2 ; RV32I-NEXT: xori a4, a2, 31 ; RV32I-NEXT: slli a6, a1, 1 ; RV32I-NEXT: sll a4, a6, a4 ; RV32I-NEXT: or a0, a0, a4 -; RV32I-NEXT: .LBB7_3: +; RV32I-NEXT: .LBB13_3: ; RV32I-NEXT: neg a6, a2 ; RV32I-NEXT: andi a4, a6, 63 ; RV32I-NEXT: addi a7, a4, -32 -; RV32I-NEXT: bltz a7, .LBB7_5 +; RV32I-NEXT: bltz a7, .LBB13_5 ; RV32I-NEXT: # %bb.4: ; RV32I-NEXT: sll a4, a3, a7 -; RV32I-NEXT: bltz a5, .LBB7_6 -; RV32I-NEXT: j .LBB7_7 -; RV32I-NEXT: .LBB7_5: +; RV32I-NEXT: bltz a5, .LBB13_6 +; RV32I-NEXT: j .LBB13_7 +; RV32I-NEXT: .LBB13_5: ; RV32I-NEXT: sll a7, a1, a6 ; RV32I-NEXT: xori a4, a4, 31 ; RV32I-NEXT: srli t0, a3, 1 @@ -487,11 +809,11 @@ ; RV32I-NEXT: or a4, a7, a4 ; RV32I-NEXT: sll a3, a3, a6 ; RV32I-NEXT: or a0, a0, a3 -; RV32I-NEXT: bgez a5, .LBB7_7 -; RV32I-NEXT: .LBB7_6: +; RV32I-NEXT: bgez a5, .LBB13_7 +; RV32I-NEXT: .LBB13_6: ; RV32I-NEXT: srl a1, a1, a2 ; RV32I-NEXT: or a4, a4, a1 -; RV32I-NEXT: .LBB7_7: +; RV32I-NEXT: .LBB13_7: ; RV32I-NEXT: mv a1, a4 ; RV32I-NEXT: ret ; @@ -507,26 +829,26 @@ ; RV32ZBB: # %bb.0: ; RV32ZBB-NEXT: addi a5, a2, -32 ; RV32ZBB-NEXT: mv a3, a0 -; RV32ZBB-NEXT: bltz a5, .LBB7_2 +; RV32ZBB-NEXT: bltz a5, .LBB13_2 ; RV32ZBB-NEXT: # %bb.1: ; RV32ZBB-NEXT: srl a0, a1, a5 -; RV32ZBB-NEXT: j .LBB7_3 -; RV32ZBB-NEXT: .LBB7_2: +; RV32ZBB-NEXT: j .LBB13_3 +; RV32ZBB-NEXT: .LBB13_2: ; RV32ZBB-NEXT: srl a0, a3, a2 ; RV32ZBB-NEXT: xori a4, a2, 31 ; RV32ZBB-NEXT: slli a6, a1, 1 ; RV32ZBB-NEXT: sll a4, a6, a4 ; RV32ZBB-NEXT: or a0, a0, a4 -; RV32ZBB-NEXT: .LBB7_3: +; RV32ZBB-NEXT: .LBB13_3: ; RV32ZBB-NEXT: neg a6, a2 ; RV32ZBB-NEXT: andi a4, a6, 63 ; RV32ZBB-NEXT: addi a7, a4, -32 -; RV32ZBB-NEXT: bltz a7, .LBB7_5 +; RV32ZBB-NEXT: bltz a7, .LBB13_5 ; RV32ZBB-NEXT: # %bb.4: ; RV32ZBB-NEXT: sll a4, a3, a7 -; RV32ZBB-NEXT: bltz a5, .LBB7_6 -; RV32ZBB-NEXT: j .LBB7_7 -; RV32ZBB-NEXT: .LBB7_5: +; RV32ZBB-NEXT: bltz a5, .LBB13_6 +; RV32ZBB-NEXT: j .LBB13_7 +; RV32ZBB-NEXT: .LBB13_5: ; RV32ZBB-NEXT: sll a7, a1, a6 ; RV32ZBB-NEXT: xori a4, a4, 31 ; RV32ZBB-NEXT: srli t0, a3, 1 @@ -534,11 +856,11 @@ ; RV32ZBB-NEXT: or a4, a7, a4 ; RV32ZBB-NEXT: sll a3, a3, a6 ; RV32ZBB-NEXT: or a0, a0, a3 -; RV32ZBB-NEXT: bgez a5, .LBB7_7 -; RV32ZBB-NEXT: .LBB7_6: +; RV32ZBB-NEXT: bgez a5, .LBB13_7 +; RV32ZBB-NEXT: .LBB13_6: ; RV32ZBB-NEXT: srl a1, a1, a2 ; RV32ZBB-NEXT: or a4, a4, a1 -; RV32ZBB-NEXT: .LBB7_7: +; RV32ZBB-NEXT: .LBB13_7: ; RV32ZBB-NEXT: mv a1, a4 ; RV32ZBB-NEXT: ret ; @@ -554,6 +876,156 @@ ret i64 %d } +define i64 @rotr_64_mask_and_64_and_31(i64 %x, i64 %y) nounwind { +; RV32I-LABEL: rotr_64_mask_and_64_and_31: +; RV32I: # %bb.0: +; RV32I-NEXT: andi a4, a2, 127 +; RV32I-NEXT: addi a5, a4, -32 +; RV32I-NEXT: mv a3, a0 +; RV32I-NEXT: bltz a5, .LBB14_2 +; RV32I-NEXT: # %bb.1: +; RV32I-NEXT: srl a0, a1, a5 +; RV32I-NEXT: j .LBB14_3 +; RV32I-NEXT: .LBB14_2: +; RV32I-NEXT: srl a0, a3, a2 +; RV32I-NEXT: slli a6, a1, 1 +; RV32I-NEXT: xori a4, a4, 31 +; RV32I-NEXT: sll a4, a6, a4 +; RV32I-NEXT: or a0, a0, a4 +; RV32I-NEXT: .LBB14_3: +; RV32I-NEXT: neg a6, a2 +; RV32I-NEXT: andi a4, a6, 63 +; RV32I-NEXT: addi a7, a4, -32 +; RV32I-NEXT: bltz a7, .LBB14_5 +; RV32I-NEXT: # %bb.4: +; RV32I-NEXT: sll a4, a3, a7 +; RV32I-NEXT: bltz a5, .LBB14_6 +; RV32I-NEXT: j .LBB14_7 +; RV32I-NEXT: .LBB14_5: +; RV32I-NEXT: sll a7, a1, a6 +; RV32I-NEXT: xori a4, a4, 31 +; RV32I-NEXT: srli t0, a3, 1 +; RV32I-NEXT: srl a4, t0, a4 +; RV32I-NEXT: or a4, a7, a4 +; RV32I-NEXT: sll a3, a3, a6 +; RV32I-NEXT: or a0, a0, a3 +; RV32I-NEXT: bgez a5, .LBB14_7 +; RV32I-NEXT: .LBB14_6: +; RV32I-NEXT: srl a1, a1, a2 +; RV32I-NEXT: or a4, a4, a1 +; RV32I-NEXT: .LBB14_7: +; RV32I-NEXT: mv a1, a4 +; RV32I-NEXT: ret +; +; RV64I-LABEL: rotr_64_mask_and_64_and_31: +; RV64I: # %bb.0: +; RV64I-NEXT: srl a2, a0, a1 +; RV64I-NEXT: neg a1, a1 +; RV64I-NEXT: sll a0, a0, a1 +; RV64I-NEXT: or a0, a2, a0 +; RV64I-NEXT: ret +; +; RV32ZBB-LABEL: rotr_64_mask_and_64_and_31: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: andi a4, a2, 127 +; RV32ZBB-NEXT: addi a5, a4, -32 +; RV32ZBB-NEXT: mv a3, a0 +; RV32ZBB-NEXT: bltz a5, .LBB14_2 +; RV32ZBB-NEXT: # %bb.1: +; RV32ZBB-NEXT: srl a0, a1, a5 +; RV32ZBB-NEXT: j .LBB14_3 +; RV32ZBB-NEXT: .LBB14_2: +; RV32ZBB-NEXT: srl a0, a3, a2 +; RV32ZBB-NEXT: slli a6, a1, 1 +; RV32ZBB-NEXT: xori a4, a4, 31 +; RV32ZBB-NEXT: sll a4, a6, a4 +; RV32ZBB-NEXT: or a0, a0, a4 +; RV32ZBB-NEXT: .LBB14_3: +; RV32ZBB-NEXT: neg a6, a2 +; RV32ZBB-NEXT: andi a4, a6, 63 +; RV32ZBB-NEXT: addi a7, a4, -32 +; RV32ZBB-NEXT: bltz a7, .LBB14_5 +; RV32ZBB-NEXT: # %bb.4: +; RV32ZBB-NEXT: sll a4, a3, a7 +; RV32ZBB-NEXT: bltz a5, .LBB14_6 +; RV32ZBB-NEXT: j .LBB14_7 +; RV32ZBB-NEXT: .LBB14_5: +; RV32ZBB-NEXT: sll a7, a1, a6 +; RV32ZBB-NEXT: xori a4, a4, 31 +; RV32ZBB-NEXT: srli t0, a3, 1 +; RV32ZBB-NEXT: srl a4, t0, a4 +; RV32ZBB-NEXT: or a4, a7, a4 +; RV32ZBB-NEXT: sll a3, a3, a6 +; RV32ZBB-NEXT: or a0, a0, a3 +; RV32ZBB-NEXT: bgez a5, .LBB14_7 +; RV32ZBB-NEXT: .LBB14_6: +; RV32ZBB-NEXT: srl a1, a1, a2 +; RV32ZBB-NEXT: or a4, a4, a1 +; RV32ZBB-NEXT: .LBB14_7: +; RV32ZBB-NEXT: mv a1, a4 +; RV32ZBB-NEXT: ret +; +; RV64ZBB-LABEL: rotr_64_mask_and_64_and_31: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: srl a2, a0, a1 +; RV64ZBB-NEXT: neg a1, a1 +; RV64ZBB-NEXT: sll a0, a0, a1 +; RV64ZBB-NEXT: or a0, a2, a0 +; RV64ZBB-NEXT: ret + %a = and i64 %y, 127 + %b = lshr i64 %x, %a + %c = sub i64 0, %y + %d = and i64 %c, 63 + %e = shl i64 %x, %d + %f = or i64 %b, %e + ret i64 %f +} + +define i64 @rotr_64_mask_or_64_or_32(i64 %x, i64 %y) nounwind { +; RV32I-LABEL: rotr_64_mask_or_64_or_32: +; RV32I: # %bb.0: +; RV32I-NEXT: srl a3, a1, a2 +; RV32I-NEXT: neg a1, a2 +; RV32I-NEXT: sll a1, a0, a1 +; RV32I-NEXT: mv a0, a3 +; RV32I-NEXT: ret +; +; RV64I-LABEL: rotr_64_mask_or_64_or_32: +; RV64I: # %bb.0: +; RV64I-NEXT: ori a2, a1, 128 +; RV64I-NEXT: srl a2, a0, a2 +; RV64I-NEXT: neg a1, a1 +; RV64I-NEXT: ori a1, a1, 64 +; RV64I-NEXT: sll a0, a0, a1 +; RV64I-NEXT: or a0, a2, a0 +; RV64I-NEXT: ret +; +; RV32ZBB-LABEL: rotr_64_mask_or_64_or_32: +; RV32ZBB: # %bb.0: +; RV32ZBB-NEXT: srl a3, a1, a2 +; RV32ZBB-NEXT: neg a1, a2 +; RV32ZBB-NEXT: sll a1, a0, a1 +; RV32ZBB-NEXT: mv a0, a3 +; RV32ZBB-NEXT: ret +; +; RV64ZBB-LABEL: rotr_64_mask_or_64_or_32: +; RV64ZBB: # %bb.0: +; RV64ZBB-NEXT: ori a2, a1, 128 +; RV64ZBB-NEXT: srl a2, a0, a2 +; RV64ZBB-NEXT: neg a1, a1 +; RV64ZBB-NEXT: ori a1, a1, 64 +; RV64ZBB-NEXT: sll a0, a0, a1 +; RV64ZBB-NEXT: or a0, a2, a0 +; RV64ZBB-NEXT: ret + %a = or i64 %y, 128 + %b = lshr i64 %x, %a + %c = sub i64 0, %y + %d = or i64 %c, 64 + %e = shl i64 %x, %d + %f = or i64 %b, %e + ret i64 %f +} + ; Test that we're able to remove a mask on the rotate amount that has more than ; one use. define signext i32 @rotl_32_mask_shared(i32 signext %a, i32 signext %b, i32 signext %amt) nounwind { @@ -604,16 +1076,16 @@ ; RV32I-NEXT: slli a5, a4, 26 ; RV32I-NEXT: srli a5, a5, 31 ; RV32I-NEXT: mv a7, a0 -; RV32I-NEXT: bnez a5, .LBB9_2 +; RV32I-NEXT: bnez a5, .LBB17_2 ; RV32I-NEXT: # %bb.1: ; RV32I-NEXT: mv a7, a1 -; RV32I-NEXT: .LBB9_2: +; RV32I-NEXT: .LBB17_2: ; RV32I-NEXT: andi a6, a4, 63 ; RV32I-NEXT: sll t0, a7, a4 -; RV32I-NEXT: bnez a5, .LBB9_4 +; RV32I-NEXT: bnez a5, .LBB17_4 ; RV32I-NEXT: # %bb.3: ; RV32I-NEXT: mv a1, a0 -; RV32I-NEXT: .LBB9_4: +; RV32I-NEXT: .LBB17_4: ; RV32I-NEXT: srli a0, a1, 1 ; RV32I-NEXT: not t1, a4 ; RV32I-NEXT: srl a0, a0, t1 @@ -623,12 +1095,12 @@ ; RV32I-NEXT: srl a7, a0, t1 ; RV32I-NEXT: addi a0, a6, -32 ; RV32I-NEXT: or a1, a1, a7 -; RV32I-NEXT: bltz a0, .LBB9_6 +; RV32I-NEXT: bltz a0, .LBB17_6 ; RV32I-NEXT: # %bb.5: ; RV32I-NEXT: sll a3, a2, a0 ; RV32I-NEXT: mv a0, a1 -; RV32I-NEXT: j .LBB9_7 -; RV32I-NEXT: .LBB9_6: +; RV32I-NEXT: j .LBB17_7 +; RV32I-NEXT: .LBB17_6: ; RV32I-NEXT: sll a0, a3, a4 ; RV32I-NEXT: srli a3, a2, 1 ; RV32I-NEXT: xori a6, a6, 31 @@ -636,7 +1108,7 @@ ; RV32I-NEXT: or a3, a0, a3 ; RV32I-NEXT: sll a0, a2, a4 ; RV32I-NEXT: add a0, a1, a0 -; RV32I-NEXT: .LBB9_7: +; RV32I-NEXT: .LBB17_7: ; RV32I-NEXT: sltu a1, a0, a1 ; RV32I-NEXT: add a2, a5, a3 ; RV32I-NEXT: add a1, a2, a1 @@ -657,16 +1129,16 @@ ; RV32ZBB-NEXT: slli a5, a4, 26 ; RV32ZBB-NEXT: srli a5, a5, 31 ; RV32ZBB-NEXT: mv a7, a0 -; RV32ZBB-NEXT: bnez a5, .LBB9_2 +; RV32ZBB-NEXT: bnez a5, .LBB17_2 ; RV32ZBB-NEXT: # %bb.1: ; RV32ZBB-NEXT: mv a7, a1 -; RV32ZBB-NEXT: .LBB9_2: +; RV32ZBB-NEXT: .LBB17_2: ; RV32ZBB-NEXT: andi a6, a4, 63 ; RV32ZBB-NEXT: sll t0, a7, a4 -; RV32ZBB-NEXT: bnez a5, .LBB9_4 +; RV32ZBB-NEXT: bnez a5, .LBB17_4 ; RV32ZBB-NEXT: # %bb.3: ; RV32ZBB-NEXT: mv a1, a0 -; RV32ZBB-NEXT: .LBB9_4: +; RV32ZBB-NEXT: .LBB17_4: ; RV32ZBB-NEXT: srli a0, a1, 1 ; RV32ZBB-NEXT: not t1, a4 ; RV32ZBB-NEXT: srl a0, a0, t1 @@ -676,12 +1148,12 @@ ; RV32ZBB-NEXT: srl a7, a0, t1 ; RV32ZBB-NEXT: addi a0, a6, -32 ; RV32ZBB-NEXT: or a1, a1, a7 -; RV32ZBB-NEXT: bltz a0, .LBB9_6 +; RV32ZBB-NEXT: bltz a0, .LBB17_6 ; RV32ZBB-NEXT: # %bb.5: ; RV32ZBB-NEXT: sll a3, a2, a0 ; RV32ZBB-NEXT: mv a0, a1 -; RV32ZBB-NEXT: j .LBB9_7 -; RV32ZBB-NEXT: .LBB9_6: +; RV32ZBB-NEXT: j .LBB17_7 +; RV32ZBB-NEXT: .LBB17_6: ; RV32ZBB-NEXT: sll a0, a3, a4 ; RV32ZBB-NEXT: srli a3, a2, 1 ; RV32ZBB-NEXT: xori a6, a6, 31 @@ -689,7 +1161,7 @@ ; RV32ZBB-NEXT: or a3, a0, a3 ; RV32ZBB-NEXT: sll a0, a2, a4 ; RV32ZBB-NEXT: add a0, a1, a0 -; RV32ZBB-NEXT: .LBB9_7: +; RV32ZBB-NEXT: .LBB17_7: ; RV32ZBB-NEXT: sltu a1, a0, a1 ; RV32ZBB-NEXT: add a2, a5, a3 ; RV32ZBB-NEXT: add a1, a2, a1 @@ -756,16 +1228,16 @@ ; RV32I: # %bb.0: ; RV32I-NEXT: andi a7, a4, 32 ; RV32I-NEXT: mv a6, a1 -; RV32I-NEXT: beqz a7, .LBB11_2 +; RV32I-NEXT: beqz a7, .LBB19_2 ; RV32I-NEXT: # %bb.1: ; RV32I-NEXT: mv a6, a0 -; RV32I-NEXT: .LBB11_2: +; RV32I-NEXT: .LBB19_2: ; RV32I-NEXT: andi a5, a4, 63 ; RV32I-NEXT: srl t0, a6, a4 -; RV32I-NEXT: beqz a7, .LBB11_4 +; RV32I-NEXT: beqz a7, .LBB19_4 ; RV32I-NEXT: # %bb.3: ; RV32I-NEXT: mv a0, a1 -; RV32I-NEXT: .LBB11_4: +; RV32I-NEXT: .LBB19_4: ; RV32I-NEXT: slli a1, a0, 1 ; RV32I-NEXT: not a7, a4 ; RV32I-NEXT: sll a1, a1, a7 @@ -775,12 +1247,12 @@ ; RV32I-NEXT: sll a6, a0, a7 ; RV32I-NEXT: addi a0, a5, -32 ; RV32I-NEXT: or a6, a6, t0 -; RV32I-NEXT: bltz a0, .LBB11_6 +; RV32I-NEXT: bltz a0, .LBB19_6 ; RV32I-NEXT: # %bb.5: ; RV32I-NEXT: sll a3, a2, a0 ; RV32I-NEXT: mv a0, a6 -; RV32I-NEXT: j .LBB11_7 -; RV32I-NEXT: .LBB11_6: +; RV32I-NEXT: j .LBB19_7 +; RV32I-NEXT: .LBB19_6: ; RV32I-NEXT: sll a0, a3, a4 ; RV32I-NEXT: srli a3, a2, 1 ; RV32I-NEXT: xori a5, a5, 31 @@ -788,7 +1260,7 @@ ; RV32I-NEXT: or a3, a0, a3 ; RV32I-NEXT: sll a0, a2, a4 ; RV32I-NEXT: add a0, a6, a0 -; RV32I-NEXT: .LBB11_7: +; RV32I-NEXT: .LBB19_7: ; RV32I-NEXT: sltu a2, a0, a6 ; RV32I-NEXT: add a1, a1, a3 ; RV32I-NEXT: add a1, a1, a2 @@ -808,16 +1280,16 @@ ; RV32ZBB: # %bb.0: ; RV32ZBB-NEXT: andi a7, a4, 32 ; RV32ZBB-NEXT: mv a6, a1 -; RV32ZBB-NEXT: beqz a7, .LBB11_2 +; RV32ZBB-NEXT: beqz a7, .LBB19_2 ; RV32ZBB-NEXT: # %bb.1: ; RV32ZBB-NEXT: mv a6, a0 -; RV32ZBB-NEXT: .LBB11_2: +; RV32ZBB-NEXT: .LBB19_2: ; RV32ZBB-NEXT: andi a5, a4, 63 ; RV32ZBB-NEXT: srl t0, a6, a4 -; RV32ZBB-NEXT: beqz a7, .LBB11_4 +; RV32ZBB-NEXT: beqz a7, .LBB19_4 ; RV32ZBB-NEXT: # %bb.3: ; RV32ZBB-NEXT: mv a0, a1 -; RV32ZBB-NEXT: .LBB11_4: +; RV32ZBB-NEXT: .LBB19_4: ; RV32ZBB-NEXT: slli a1, a0, 1 ; RV32ZBB-NEXT: not a7, a4 ; RV32ZBB-NEXT: sll a1, a1, a7 @@ -827,12 +1299,12 @@ ; RV32ZBB-NEXT: sll a6, a0, a7 ; RV32ZBB-NEXT: addi a0, a5, -32 ; RV32ZBB-NEXT: or a6, a6, t0 -; RV32ZBB-NEXT: bltz a0, .LBB11_6 +; RV32ZBB-NEXT: bltz a0, .LBB19_6 ; RV32ZBB-NEXT: # %bb.5: ; RV32ZBB-NEXT: sll a3, a2, a0 ; RV32ZBB-NEXT: mv a0, a6 -; RV32ZBB-NEXT: j .LBB11_7 -; RV32ZBB-NEXT: .LBB11_6: +; RV32ZBB-NEXT: j .LBB19_7 +; RV32ZBB-NEXT: .LBB19_6: ; RV32ZBB-NEXT: sll a0, a3, a4 ; RV32ZBB-NEXT: srli a3, a2, 1 ; RV32ZBB-NEXT: xori a5, a5, 31 @@ -840,7 +1312,7 @@ ; RV32ZBB-NEXT: or a3, a0, a3 ; RV32ZBB-NEXT: sll a0, a2, a4 ; RV32ZBB-NEXT: add a0, a6, a0 -; RV32ZBB-NEXT: .LBB11_7: +; RV32ZBB-NEXT: .LBB19_7: ; RV32ZBB-NEXT: sltu a2, a0, a6 ; RV32ZBB-NEXT: add a1, a1, a3 ; RV32ZBB-NEXT: add a1, a1, a2 @@ -911,14 +1383,14 @@ ; RV32I-NEXT: slli a5, a4, 26 ; RV32I-NEXT: srli a5, a5, 31 ; RV32I-NEXT: mv a6, a1 -; RV32I-NEXT: bnez a5, .LBB13_2 +; RV32I-NEXT: bnez a5, .LBB21_2 ; RV32I-NEXT: # %bb.1: ; RV32I-NEXT: mv a6, a0 -; RV32I-NEXT: .LBB13_2: -; RV32I-NEXT: bnez a5, .LBB13_4 +; RV32I-NEXT: .LBB21_2: +; RV32I-NEXT: bnez a5, .LBB21_4 ; RV32I-NEXT: # %bb.3: ; RV32I-NEXT: mv a0, a1 -; RV32I-NEXT: .LBB13_4: +; RV32I-NEXT: .LBB21_4: ; RV32I-NEXT: sll a7, a6, a4 ; RV32I-NEXT: srli t0, a0, 1 ; RV32I-NEXT: not a1, a4 @@ -927,17 +1399,17 @@ ; RV32I-NEXT: srli a0, a6, 1 ; RV32I-NEXT: srl t2, a0, a1 ; RV32I-NEXT: mv a0, a3 -; RV32I-NEXT: bnez a5, .LBB13_6 +; RV32I-NEXT: bnez a5, .LBB21_6 ; RV32I-NEXT: # %bb.5: ; RV32I-NEXT: mv a0, a2 -; RV32I-NEXT: .LBB13_6: +; RV32I-NEXT: .LBB21_6: ; RV32I-NEXT: or a6, a7, t0 ; RV32I-NEXT: or a7, t1, t2 ; RV32I-NEXT: sll t0, a0, a4 -; RV32I-NEXT: bnez a5, .LBB13_8 +; RV32I-NEXT: bnez a5, .LBB21_8 ; RV32I-NEXT: # %bb.7: ; RV32I-NEXT: mv a2, a3 -; RV32I-NEXT: .LBB13_8: +; RV32I-NEXT: .LBB21_8: ; RV32I-NEXT: srli a3, a2, 1 ; RV32I-NEXT: srl a3, a3, a1 ; RV32I-NEXT: or a3, t0, a3 @@ -968,14 +1440,14 @@ ; RV32ZBB-NEXT: slli a5, a4, 26 ; RV32ZBB-NEXT: srli a5, a5, 31 ; RV32ZBB-NEXT: mv a6, a1 -; RV32ZBB-NEXT: bnez a5, .LBB13_2 +; RV32ZBB-NEXT: bnez a5, .LBB21_2 ; RV32ZBB-NEXT: # %bb.1: ; RV32ZBB-NEXT: mv a6, a0 -; RV32ZBB-NEXT: .LBB13_2: -; RV32ZBB-NEXT: bnez a5, .LBB13_4 +; RV32ZBB-NEXT: .LBB21_2: +; RV32ZBB-NEXT: bnez a5, .LBB21_4 ; RV32ZBB-NEXT: # %bb.3: ; RV32ZBB-NEXT: mv a0, a1 -; RV32ZBB-NEXT: .LBB13_4: +; RV32ZBB-NEXT: .LBB21_4: ; RV32ZBB-NEXT: sll a7, a6, a4 ; RV32ZBB-NEXT: srli t0, a0, 1 ; RV32ZBB-NEXT: not a1, a4 @@ -984,17 +1456,17 @@ ; RV32ZBB-NEXT: srli a0, a6, 1 ; RV32ZBB-NEXT: srl t2, a0, a1 ; RV32ZBB-NEXT: mv a0, a3 -; RV32ZBB-NEXT: bnez a5, .LBB13_6 +; RV32ZBB-NEXT: bnez a5, .LBB21_6 ; RV32ZBB-NEXT: # %bb.5: ; RV32ZBB-NEXT: mv a0, a2 -; RV32ZBB-NEXT: .LBB13_6: +; RV32ZBB-NEXT: .LBB21_6: ; RV32ZBB-NEXT: or a6, a7, t0 ; RV32ZBB-NEXT: or a7, t1, t2 ; RV32ZBB-NEXT: sll t0, a0, a4 -; RV32ZBB-NEXT: bnez a5, .LBB13_8 +; RV32ZBB-NEXT: bnez a5, .LBB21_8 ; RV32ZBB-NEXT: # %bb.7: ; RV32ZBB-NEXT: mv a2, a3 -; RV32ZBB-NEXT: .LBB13_8: +; RV32ZBB-NEXT: .LBB21_8: ; RV32ZBB-NEXT: srli a3, a2, 1 ; RV32ZBB-NEXT: srl a3, a3, a1 ; RV32ZBB-NEXT: or a3, t0, a3 @@ -1071,14 +1543,14 @@ ; RV32I: # %bb.0: ; RV32I-NEXT: andi a5, a4, 32 ; RV32I-NEXT: mv a6, a0 -; RV32I-NEXT: beqz a5, .LBB15_2 +; RV32I-NEXT: beqz a5, .LBB23_2 ; RV32I-NEXT: # %bb.1: ; RV32I-NEXT: mv a6, a1 -; RV32I-NEXT: .LBB15_2: -; RV32I-NEXT: beqz a5, .LBB15_4 +; RV32I-NEXT: .LBB23_2: +; RV32I-NEXT: beqz a5, .LBB23_4 ; RV32I-NEXT: # %bb.3: ; RV32I-NEXT: mv a1, a0 -; RV32I-NEXT: .LBB15_4: +; RV32I-NEXT: .LBB23_4: ; RV32I-NEXT: srl a7, a6, a4 ; RV32I-NEXT: slli t0, a1, 1 ; RV32I-NEXT: not a0, a4 @@ -1087,17 +1559,17 @@ ; RV32I-NEXT: slli a1, a6, 1 ; RV32I-NEXT: sll t2, a1, a0 ; RV32I-NEXT: mv a6, a2 -; RV32I-NEXT: beqz a5, .LBB15_6 +; RV32I-NEXT: beqz a5, .LBB23_6 ; RV32I-NEXT: # %bb.5: ; RV32I-NEXT: mv a6, a3 -; RV32I-NEXT: .LBB15_6: +; RV32I-NEXT: .LBB23_6: ; RV32I-NEXT: or a1, t0, a7 ; RV32I-NEXT: or a7, t2, t1 ; RV32I-NEXT: srl t0, a6, a4 -; RV32I-NEXT: beqz a5, .LBB15_8 +; RV32I-NEXT: beqz a5, .LBB23_8 ; RV32I-NEXT: # %bb.7: ; RV32I-NEXT: mv a3, a2 -; RV32I-NEXT: .LBB15_8: +; RV32I-NEXT: .LBB23_8: ; RV32I-NEXT: slli a2, a3, 1 ; RV32I-NEXT: sll a2, a2, a0 ; RV32I-NEXT: or a2, a2, t0 @@ -1127,14 +1599,14 @@ ; RV32ZBB: # %bb.0: ; RV32ZBB-NEXT: andi a5, a4, 32 ; RV32ZBB-NEXT: mv a6, a0 -; RV32ZBB-NEXT: beqz a5, .LBB15_2 +; RV32ZBB-NEXT: beqz a5, .LBB23_2 ; RV32ZBB-NEXT: # %bb.1: ; RV32ZBB-NEXT: mv a6, a1 -; RV32ZBB-NEXT: .LBB15_2: -; RV32ZBB-NEXT: beqz a5, .LBB15_4 +; RV32ZBB-NEXT: .LBB23_2: +; RV32ZBB-NEXT: beqz a5, .LBB23_4 ; RV32ZBB-NEXT: # %bb.3: ; RV32ZBB-NEXT: mv a1, a0 -; RV32ZBB-NEXT: .LBB15_4: +; RV32ZBB-NEXT: .LBB23_4: ; RV32ZBB-NEXT: srl a7, a6, a4 ; RV32ZBB-NEXT: slli t0, a1, 1 ; RV32ZBB-NEXT: not a0, a4 @@ -1143,17 +1615,17 @@ ; RV32ZBB-NEXT: slli a1, a6, 1 ; RV32ZBB-NEXT: sll t2, a1, a0 ; RV32ZBB-NEXT: mv a6, a2 -; RV32ZBB-NEXT: beqz a5, .LBB15_6 +; RV32ZBB-NEXT: beqz a5, .LBB23_6 ; RV32ZBB-NEXT: # %bb.5: ; RV32ZBB-NEXT: mv a6, a3 -; RV32ZBB-NEXT: .LBB15_6: +; RV32ZBB-NEXT: .LBB23_6: ; RV32ZBB-NEXT: or a1, t0, a7 ; RV32ZBB-NEXT: or a7, t2, t1 ; RV32ZBB-NEXT: srl t0, a6, a4 -; RV32ZBB-NEXT: beqz a5, .LBB15_8 +; RV32ZBB-NEXT: beqz a5, .LBB23_8 ; RV32ZBB-NEXT: # %bb.7: ; RV32ZBB-NEXT: mv a3, a2 -; RV32ZBB-NEXT: .LBB15_8: +; RV32ZBB-NEXT: .LBB23_8: ; RV32ZBB-NEXT: slli a2, a3, 1 ; RV32ZBB-NEXT: sll a2, a2, a0 ; RV32ZBB-NEXT: or a2, a2, t0