diff --git a/llvm/test/CodeGen/AMDGPU/combine_andor_with_cmps.ll b/llvm/test/CodeGen/AMDGPU/combine_andor_with_cmps.ll new file mode 100644 --- /dev/null +++ b/llvm/test/CodeGen/AMDGPU/combine_andor_with_cmps.ll @@ -0,0 +1,1251 @@ +; NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py UTC_ARGS: --version 2 +; RUN: llc -mtriple=amdgcn-amd-amdhsa -mcpu=gfx1100 -verify-machineinstrs -stop-after=si-fix-sgpr-copies < %s | FileCheck %s + +; The tests check the following optimization of DAGCombiner: +; CMP(A,C)||CMP(B,C) => CMP(MIN/MAX(A,B), C) +; CMP(A,C)&&CMP(B,C) => CMP(MIN/MAX(A,B), C) + +define i1 @test1(i32 %arg1, i32 %arg2) #0 { + ; CHECK-LABEL: name: test1 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 1000 + ; CHECK-NEXT: [[V_CMP_LT_I32_e64_:%[0-9]+]]:sreg_32 = V_CMP_LT_I32_e64 [[COPY1]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[V_CMP_LT_I32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_LT_I32_e64 [[COPY]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_LT_I32_e64_]], killed [[V_CMP_LT_I32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp slt i32 %arg1, 1000 + %cmp2 = icmp slt i32 %arg2, 1000 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test2(i32 %arg1, i32 %arg2) #0 { + ; CHECK-LABEL: name: test2 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 1000 + ; CHECK-NEXT: [[V_CMP_LT_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_LT_U32_e64 [[COPY1]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[V_CMP_LT_U32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_LT_U32_e64 [[COPY]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_LT_U32_e64_]], killed [[V_CMP_LT_U32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp ult i32 %arg1, 1000 + %cmp2 = icmp ult i32 %arg2, 1000 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test3(i32 %arg1, i32 %arg2) #0 { + ; CHECK-LABEL: name: test3 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 1001 + ; CHECK-NEXT: [[V_CMP_LT_I32_e64_:%[0-9]+]]:sreg_32 = V_CMP_LT_I32_e64 [[COPY1]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[V_CMP_LT_I32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_LT_I32_e64 [[COPY]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_LT_I32_e64_]], killed [[V_CMP_LT_I32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp sle i32 %arg1, 1000 + %cmp2 = icmp sle i32 %arg2, 1000 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test4(i32 %arg1, i32 %arg2) #0 { + ; CHECK-LABEL: name: test4 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 1001 + ; CHECK-NEXT: [[V_CMP_LT_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_LT_U32_e64 [[COPY1]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[V_CMP_LT_U32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_LT_U32_e64 [[COPY]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_LT_U32_e64_]], killed [[V_CMP_LT_U32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp ule i32 %arg1, 1000 + %cmp2 = icmp ule i32 %arg2, 1000 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test5(i32 %arg1, i32 %arg2) #0 { + ; CHECK-LABEL: name: test5 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 1000 + ; CHECK-NEXT: [[V_CMP_GT_I32_e64_:%[0-9]+]]:sreg_32 = V_CMP_GT_I32_e64 [[COPY1]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[V_CMP_GT_I32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_GT_I32_e64 [[COPY]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_GT_I32_e64_]], killed [[V_CMP_GT_I32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp sgt i32 %arg1, 1000 + %cmp2 = icmp sgt i32 %arg2, 1000 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test6(i32 %arg1, i32 %arg2) #0 { + ; CHECK-LABEL: name: test6 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 1000 + ; CHECK-NEXT: [[V_CMP_GT_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_GT_U32_e64 [[COPY1]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[V_CMP_GT_U32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_GT_U32_e64 [[COPY]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_GT_U32_e64_]], killed [[V_CMP_GT_U32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp ugt i32 %arg1, 1000 + %cmp2 = icmp ugt i32 %arg2, 1000 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test7(i32 %arg1, i32 %arg2) #0 { + ; CHECK-LABEL: name: test7 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 999 + ; CHECK-NEXT: [[V_CMP_GT_I32_e64_:%[0-9]+]]:sreg_32 = V_CMP_GT_I32_e64 [[COPY1]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[V_CMP_GT_I32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_GT_I32_e64 [[COPY]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_GT_I32_e64_]], killed [[V_CMP_GT_I32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp sge i32 %arg1, 1000 + %cmp2 = icmp sge i32 %arg2, 1000 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test8(i32 %arg1, i32 %arg2) #0 { + ; CHECK-LABEL: name: test8 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 999 + ; CHECK-NEXT: [[V_CMP_GT_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_GT_U32_e64 [[COPY1]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[V_CMP_GT_U32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_GT_U32_e64 [[COPY]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_GT_U32_e64_]], killed [[V_CMP_GT_U32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp uge i32 %arg1, 1000 + %cmp2 = icmp uge i32 %arg2, 1000 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test9(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test9 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[V_CMP_LT_I32_e64_:%[0-9]+]]:sreg_32 = V_CMP_LT_I32_e64 [[COPY2]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[V_CMP_LT_I32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_LT_I32_e64 [[COPY1]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_LT_I32_e64_]], killed [[V_CMP_LT_I32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp slt i32 %arg1, %arg3 + %cmp2 = icmp slt i32 %arg2, %arg3 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test10(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test10 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[V_CMP_LT_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_LT_U32_e64 [[COPY2]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[V_CMP_LT_U32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_LT_U32_e64 [[COPY1]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_LT_U32_e64_]], killed [[V_CMP_LT_U32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp ult i32 %arg1, %arg3 + %cmp2 = icmp ult i32 %arg2, %arg3 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test11(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test11 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[V_CMP_LE_I32_e64_:%[0-9]+]]:sreg_32 = V_CMP_LE_I32_e64 [[COPY2]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[V_CMP_LE_I32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_LE_I32_e64 [[COPY1]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_LE_I32_e64_]], killed [[V_CMP_LE_I32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp sle i32 %arg1, %arg3 + %cmp2 = icmp sle i32 %arg2, %arg3 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test12(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test12 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[V_CMP_LE_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_LE_U32_e64 [[COPY2]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[V_CMP_LE_U32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_LE_U32_e64 [[COPY1]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_LE_U32_e64_]], killed [[V_CMP_LE_U32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp ule i32 %arg1, %arg3 + %cmp2 = icmp ule i32 %arg2, %arg3 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test13(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test13 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[V_CMP_GT_I32_e64_:%[0-9]+]]:sreg_32 = V_CMP_GT_I32_e64 [[COPY2]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[V_CMP_GT_I32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_GT_I32_e64 [[COPY1]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_GT_I32_e64_]], killed [[V_CMP_GT_I32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp sgt i32 %arg1, %arg3 + %cmp2 = icmp sgt i32 %arg2, %arg3 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test14(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test14 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[V_CMP_GT_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_GT_U32_e64 [[COPY2]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[V_CMP_GT_U32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_GT_U32_e64 [[COPY1]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_GT_U32_e64_]], killed [[V_CMP_GT_U32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp ugt i32 %arg1, %arg3 + %cmp2 = icmp ugt i32 %arg2, %arg3 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test15(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test15 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[V_CMP_GE_I32_e64_:%[0-9]+]]:sreg_32 = V_CMP_GE_I32_e64 [[COPY2]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[V_CMP_GE_I32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_GE_I32_e64 [[COPY1]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_GE_I32_e64_]], killed [[V_CMP_GE_I32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp sge i32 %arg1, %arg3 + %cmp2 = icmp sge i32 %arg2, %arg3 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test16(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test16 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[V_CMP_GE_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_GE_U32_e64 [[COPY2]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[V_CMP_GE_U32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_GE_U32_e64 [[COPY1]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_GE_U32_e64_]], killed [[V_CMP_GE_U32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp uge i32 %arg1, %arg3 + %cmp2 = icmp uge i32 %arg2, %arg3 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test17(i32 %arg1, i32 %arg2) #0 { + ; CHECK-LABEL: name: test17 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 1000 + ; CHECK-NEXT: [[V_CMP_LT_I32_e64_:%[0-9]+]]:sreg_32 = V_CMP_LT_I32_e64 [[COPY1]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[V_CMP_LT_I32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_LT_I32_e64 [[COPY]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[S_AND_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_B32 killed [[V_CMP_LT_I32_e64_]], killed [[V_CMP_LT_I32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_AND_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp slt i32 %arg1, 1000 + %cmp2 = icmp slt i32 %arg2, 1000 + %and = and i1 %cmp1, %cmp2 + ret i1 %and +} + +define i1 @test18(i32 %arg1, i32 %arg2) #0 { + ; CHECK-LABEL: name: test18 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 1000 + ; CHECK-NEXT: [[V_CMP_LT_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_LT_U32_e64 [[COPY1]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[V_CMP_LT_U32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_LT_U32_e64 [[COPY]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[S_AND_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_B32 killed [[V_CMP_LT_U32_e64_]], killed [[V_CMP_LT_U32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_AND_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp ult i32 %arg1, 1000 + %cmp2 = icmp ult i32 %arg2, 1000 + %and = and i1 %cmp1, %cmp2 + ret i1 %and +} + +define i1 @test19(i32 %arg1, i32 %arg2) #0 { + ; CHECK-LABEL: name: test19 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 1001 + ; CHECK-NEXT: [[V_CMP_LT_I32_e64_:%[0-9]+]]:sreg_32 = V_CMP_LT_I32_e64 [[COPY1]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[V_CMP_LT_I32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_LT_I32_e64 [[COPY]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[S_AND_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_B32 killed [[V_CMP_LT_I32_e64_]], killed [[V_CMP_LT_I32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_AND_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp sle i32 %arg1, 1000 + %cmp2 = icmp sle i32 %arg2, 1000 + %and = and i1 %cmp1, %cmp2 + ret i1 %and +} + +define i1 @test20(i32 %arg1, i32 %arg2) #0 { + ; CHECK-LABEL: name: test20 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 1001 + ; CHECK-NEXT: [[V_CMP_LT_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_LT_U32_e64 [[COPY1]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[V_CMP_LT_U32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_LT_U32_e64 [[COPY]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[S_AND_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_B32 killed [[V_CMP_LT_U32_e64_]], killed [[V_CMP_LT_U32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_AND_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp ule i32 %arg1, 1000 + %cmp2 = icmp ule i32 %arg2, 1000 + %and = and i1 %cmp1, %cmp2 + ret i1 %and +} + +define i1 @test21(i32 %arg1, i32 %arg2) #0 { + ; CHECK-LABEL: name: test21 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 1000 + ; CHECK-NEXT: [[V_CMP_GT_I32_e64_:%[0-9]+]]:sreg_32 = V_CMP_GT_I32_e64 [[COPY1]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[V_CMP_GT_I32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_GT_I32_e64 [[COPY]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[S_AND_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_B32 killed [[V_CMP_GT_I32_e64_]], killed [[V_CMP_GT_I32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_AND_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp sgt i32 %arg1, 1000 + %cmp2 = icmp sgt i32 %arg2, 1000 + %and = and i1 %cmp1, %cmp2 + ret i1 %and +} + +define i1 @test22(i32 %arg1, i32 %arg2) #0 { + ; CHECK-LABEL: name: test22 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 1000 + ; CHECK-NEXT: [[V_CMP_GT_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_GT_U32_e64 [[COPY1]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[V_CMP_GT_U32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_GT_U32_e64 [[COPY]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[S_AND_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_B32 killed [[V_CMP_GT_U32_e64_]], killed [[V_CMP_GT_U32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_AND_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp ugt i32 %arg1, 1000 + %cmp2 = icmp ugt i32 %arg2, 1000 + %and = and i1 %cmp1, %cmp2 + ret i1 %and +} + +define i1 @test23(i32 %arg1, i32 %arg2) #0 { + ; CHECK-LABEL: name: test23 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 999 + ; CHECK-NEXT: [[V_CMP_GT_I32_e64_:%[0-9]+]]:sreg_32 = V_CMP_GT_I32_e64 [[COPY1]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[V_CMP_GT_I32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_GT_I32_e64 [[COPY]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[S_AND_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_B32 killed [[V_CMP_GT_I32_e64_]], killed [[V_CMP_GT_I32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_AND_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp sge i32 %arg1, 1000 + %cmp2 = icmp sge i32 %arg2, 1000 + %and = and i1 %cmp1, %cmp2 + ret i1 %and +} + +define i1 @test24(i32 %arg1, i32 %arg2) #0 { + ; CHECK-LABEL: name: test24 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 999 + ; CHECK-NEXT: [[V_CMP_GT_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_GT_U32_e64 [[COPY1]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[V_CMP_GT_U32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_GT_U32_e64 [[COPY]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[S_AND_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_B32 killed [[V_CMP_GT_U32_e64_]], killed [[V_CMP_GT_U32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_AND_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp uge i32 %arg1, 1000 + %cmp2 = icmp uge i32 %arg2, 1000 + %and = and i1 %cmp1, %cmp2 + ret i1 %and +} + +define i1 @test25(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test25 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[V_CMP_LT_I32_e64_:%[0-9]+]]:sreg_32 = V_CMP_LT_I32_e64 [[COPY2]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[V_CMP_LT_I32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_LT_I32_e64 [[COPY1]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[S_AND_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_B32 killed [[V_CMP_LT_I32_e64_]], killed [[V_CMP_LT_I32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_AND_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp slt i32 %arg1, %arg3 + %cmp2 = icmp slt i32 %arg2, %arg3 + %and = and i1 %cmp1, %cmp2 + ret i1 %and +} + +define i1 @test26(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test26 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[V_CMP_LT_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_LT_U32_e64 [[COPY2]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[V_CMP_LT_U32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_LT_U32_e64 [[COPY1]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[S_AND_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_B32 killed [[V_CMP_LT_U32_e64_]], killed [[V_CMP_LT_U32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_AND_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp ult i32 %arg1, %arg3 + %cmp2 = icmp ult i32 %arg2, %arg3 + %and = and i1 %cmp1, %cmp2 + ret i1 %and +} + +define i1 @test27(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test27 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[V_CMP_LE_I32_e64_:%[0-9]+]]:sreg_32 = V_CMP_LE_I32_e64 [[COPY2]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[V_CMP_LE_I32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_LE_I32_e64 [[COPY1]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[S_AND_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_B32 killed [[V_CMP_LE_I32_e64_]], killed [[V_CMP_LE_I32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_AND_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp sle i32 %arg1, %arg3 + %cmp2 = icmp sle i32 %arg2, %arg3 + %and = and i1 %cmp1, %cmp2 + ret i1 %and +} + +define i1 @test28(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test28 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[V_CMP_LE_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_LE_U32_e64 [[COPY2]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[V_CMP_LE_U32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_LE_U32_e64 [[COPY1]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[S_AND_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_B32 killed [[V_CMP_LE_U32_e64_]], killed [[V_CMP_LE_U32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_AND_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp ule i32 %arg1, %arg3 + %cmp2 = icmp ule i32 %arg2, %arg3 + %and = and i1 %cmp1, %cmp2 + ret i1 %and +} + +define i1 @test29(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test29 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[V_CMP_GT_I32_e64_:%[0-9]+]]:sreg_32 = V_CMP_GT_I32_e64 [[COPY2]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[V_CMP_GT_I32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_GT_I32_e64 [[COPY1]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[S_AND_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_B32 killed [[V_CMP_GT_I32_e64_]], killed [[V_CMP_GT_I32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_AND_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp sgt i32 %arg1, %arg3 + %cmp2 = icmp sgt i32 %arg2, %arg3 + %and = and i1 %cmp1, %cmp2 + ret i1 %and +} + +define i1 @test30(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test30 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[V_CMP_GT_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_GT_U32_e64 [[COPY2]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[V_CMP_GT_U32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_GT_U32_e64 [[COPY1]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[S_AND_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_B32 killed [[V_CMP_GT_U32_e64_]], killed [[V_CMP_GT_U32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_AND_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp ugt i32 %arg1, %arg3 + %cmp2 = icmp ugt i32 %arg2, %arg3 + %and = and i1 %cmp1, %cmp2 + ret i1 %and +} + +define i1 @test31(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test31 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[V_CMP_GE_I32_e64_:%[0-9]+]]:sreg_32 = V_CMP_GE_I32_e64 [[COPY2]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[V_CMP_GE_I32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_GE_I32_e64 [[COPY1]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[S_AND_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_B32 killed [[V_CMP_GE_I32_e64_]], killed [[V_CMP_GE_I32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_AND_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp sge i32 %arg1, %arg3 + %cmp2 = icmp sge i32 %arg2, %arg3 + %and = and i1 %cmp1, %cmp2 + ret i1 %and +} + +define i1 @test32(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test32 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[V_CMP_GE_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_GE_U32_e64 [[COPY2]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[V_CMP_GE_U32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_GE_U32_e64 [[COPY1]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[S_AND_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_B32 killed [[V_CMP_GE_U32_e64_]], killed [[V_CMP_GE_U32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_AND_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp uge i32 %arg1, %arg3 + %cmp2 = icmp uge i32 %arg2, %arg3 + %and = and i1 %cmp1, %cmp2 + ret i1 %and +} + +define i1 @test33(i32 %arg1, i32 %arg2) #0 { + ; CHECK-LABEL: name: test33 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[V_CMP_LT_I32_e64_:%[0-9]+]]:sreg_32 = V_CMP_LT_I32_e64 [[COPY1]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 1000 + ; CHECK-NEXT: [[V_CMP_LT_I32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_LT_I32_e64 [[COPY1]], killed [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_LT_I32_e64_]], killed [[V_CMP_LT_I32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp slt i32 %arg1, %arg2 + %cmp2 = icmp slt i32 %arg1, 1000 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test34(i32 %arg1, i64 %arg2) #0 { + ; CHECK-LABEL: name: test34 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[DEF:%[0-9]+]]:sgpr_32 = IMPLICIT_DEF + ; CHECK-NEXT: [[DEF1:%[0-9]+]]:sgpr_32 = IMPLICIT_DEF + ; CHECK-NEXT: [[REG_SEQUENCE:%[0-9]+]]:vreg_64 = REG_SEQUENCE [[COPY1]], %subreg.sub0, [[COPY]], %subreg.sub1 + ; CHECK-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 1000 + ; CHECK-NEXT: [[V_CMP_LT_I32_e64_:%[0-9]+]]:sreg_32 = V_CMP_LT_I32_e64 [[COPY2]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[S_MOV_B32_1:%[0-9]+]]:sreg_32 = S_MOV_B32 0 + ; CHECK-NEXT: [[REG_SEQUENCE1:%[0-9]+]]:sreg_64 = REG_SEQUENCE [[S_MOV_B32_]], %subreg.sub0, killed [[S_MOV_B32_1]], %subreg.sub1 + ; CHECK-NEXT: [[V_CMP_LT_I64_e64_:%[0-9]+]]:sreg_32 = V_CMP_LT_I64_e64 killed [[REG_SEQUENCE]], killed [[REG_SEQUENCE1]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_LT_I32_e64_]], killed [[V_CMP_LT_I64_e64_]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp slt i32 %arg1, 1000 + %cmp2 = icmp slt i64 %arg2, 1000 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test35(i32 %arg1, i64 %arg2) #0 { + ; CHECK-LABEL: name: test35 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[DEF:%[0-9]+]]:sgpr_32 = IMPLICIT_DEF + ; CHECK-NEXT: [[DEF1:%[0-9]+]]:sgpr_32 = IMPLICIT_DEF + ; CHECK-NEXT: [[REG_SEQUENCE:%[0-9]+]]:vreg_64 = REG_SEQUENCE [[COPY1]], %subreg.sub0, [[COPY]], %subreg.sub1 + ; CHECK-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 1000 + ; CHECK-NEXT: [[V_CMP_EQ_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_EQ_U32_e64 [[COPY2]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[S_MOV_B32_1:%[0-9]+]]:sreg_32 = S_MOV_B32 0 + ; CHECK-NEXT: [[REG_SEQUENCE1:%[0-9]+]]:sreg_64 = REG_SEQUENCE [[S_MOV_B32_]], %subreg.sub0, killed [[S_MOV_B32_1]], %subreg.sub1 + ; CHECK-NEXT: [[V_CMP_EQ_U64_e64_:%[0-9]+]]:sreg_32 = V_CMP_EQ_U64_e64 killed [[REG_SEQUENCE]], killed [[REG_SEQUENCE1]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_EQ_U32_e64_]], killed [[V_CMP_EQ_U64_e64_]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp eq i32 %arg1, 1000 + %cmp2 = icmp eq i64 %arg2, 1000 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test36(i32 %arg1, i64 %arg2) #0 { + ; CHECK-LABEL: name: test36 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[DEF:%[0-9]+]]:sgpr_32 = IMPLICIT_DEF + ; CHECK-NEXT: [[DEF1:%[0-9]+]]:sgpr_32 = IMPLICIT_DEF + ; CHECK-NEXT: [[REG_SEQUENCE:%[0-9]+]]:vreg_64 = REG_SEQUENCE [[COPY1]], %subreg.sub0, [[COPY]], %subreg.sub1 + ; CHECK-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 1000 + ; CHECK-NEXT: [[V_CMP_NE_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_NE_U32_e64 [[COPY2]], [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[S_MOV_B32_1:%[0-9]+]]:sreg_32 = S_MOV_B32 0 + ; CHECK-NEXT: [[REG_SEQUENCE1:%[0-9]+]]:sreg_64 = REG_SEQUENCE [[S_MOV_B32_]], %subreg.sub0, killed [[S_MOV_B32_1]], %subreg.sub1 + ; CHECK-NEXT: [[V_CMP_NE_U64_e64_:%[0-9]+]]:sreg_32 = V_CMP_NE_U64_e64 killed [[REG_SEQUENCE]], killed [[REG_SEQUENCE1]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_NE_U32_e64_]], killed [[V_CMP_NE_U64_e64_]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp ne i32 %arg1, 1000 + %cmp2 = icmp ne i64 %arg2, 1000 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define amdgpu_kernel void @test37(i32 %arg1, i32 %arg2) #0 { + ; CHECK-LABEL: name: test37 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $sgpr0_sgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:sgpr_64(p4) = COPY $sgpr0_sgpr1 + ; CHECK-NEXT: [[S_LOAD_DWORDX2_IMM:%[0-9]+]]:sreg_64_xexec = S_LOAD_DWORDX2_IMM [[COPY]](p4), 0, 0 :: (dereferenceable invariant load (s64) from %ir.arg1.kernarg.offset1, align 16, addrspace 4) + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:sreg_32 = COPY [[S_LOAD_DWORDX2_IMM]].sub0 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:sreg_32 = COPY [[S_LOAD_DWORDX2_IMM]].sub1 + ; CHECK-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 1001 + ; CHECK-NEXT: S_CMP_LT_I32 killed [[COPY1]], [[S_MOV_B32_]], implicit-def $scc + ; CHECK-NEXT: [[S_CSELECT_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_CSELECT_B32 -1, 0, implicit $scc + ; CHECK-NEXT: [[COPY3:%[0-9]+]]:sreg_32 = COPY [[S_CSELECT_B32_]] + ; CHECK-NEXT: S_CMP_LT_I32 killed [[COPY2]], [[S_MOV_B32_]], implicit-def $scc + ; CHECK-NEXT: [[S_CSELECT_B32_1:%[0-9]+]]:sreg_32_xm0_xexec = S_CSELECT_B32 -1, 0, implicit $scc + ; CHECK-NEXT: [[COPY4:%[0-9]+]]:sreg_32 = COPY [[S_CSELECT_B32_1]] + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[COPY3]], killed [[COPY4]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: [[S_MOV_B64_:%[0-9]+]]:sreg_64 = S_MOV_B64 0 + ; CHECK-NEXT: [[COPY5:%[0-9]+]]:vreg_64 = COPY [[S_MOV_B64_]] + ; CHECK-NEXT: GLOBAL_STORE_BYTE killed [[COPY5]], killed [[V_CNDMASK_B32_e64_]], 0, 0, implicit $exec :: (volatile store (s8) into `ptr addrspace(1) null`, addrspace 1) + ; CHECK-NEXT: S_ENDPGM 0 + %cmp1 = icmp sle i32 %arg1, 1000 + %cmp2 = icmp sle i32 %arg2, 1000 + %or = or i1 %cmp1, %cmp2 + store volatile i1 %or, ptr addrspace(1) null + ret void +} + +define amdgpu_kernel void @test38(i32 %arg1, i32 %arg2) #0 { + ; CHECK-LABEL: name: test38 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $sgpr0_sgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:sgpr_64(p4) = COPY $sgpr0_sgpr1 + ; CHECK-NEXT: [[S_LOAD_DWORDX2_IMM:%[0-9]+]]:sreg_64_xexec = S_LOAD_DWORDX2_IMM [[COPY]](p4), 0, 0 :: (dereferenceable invariant load (s64) from %ir.arg1.kernarg.offset1, align 16, addrspace 4) + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:sreg_32 = COPY [[S_LOAD_DWORDX2_IMM]].sub0 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:sreg_32 = COPY [[S_LOAD_DWORDX2_IMM]].sub1 + ; CHECK-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 1000 + ; CHECK-NEXT: S_CMP_GT_I32 killed [[COPY1]], [[S_MOV_B32_]], implicit-def $scc + ; CHECK-NEXT: [[S_CSELECT_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_CSELECT_B32 -1, 0, implicit $scc + ; CHECK-NEXT: [[COPY3:%[0-9]+]]:sreg_32 = COPY [[S_CSELECT_B32_]] + ; CHECK-NEXT: S_CMP_GT_I32 killed [[COPY2]], [[S_MOV_B32_]], implicit-def $scc + ; CHECK-NEXT: [[S_CSELECT_B32_1:%[0-9]+]]:sreg_32_xm0_xexec = S_CSELECT_B32 -1, 0, implicit $scc + ; CHECK-NEXT: [[COPY4:%[0-9]+]]:sreg_32 = COPY [[S_CSELECT_B32_1]] + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[COPY3]], killed [[COPY4]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: [[S_MOV_B64_:%[0-9]+]]:sreg_64 = S_MOV_B64 0 + ; CHECK-NEXT: [[COPY5:%[0-9]+]]:vreg_64 = COPY [[S_MOV_B64_]] + ; CHECK-NEXT: GLOBAL_STORE_BYTE killed [[COPY5]], killed [[V_CNDMASK_B32_e64_]], 0, 0, implicit $exec :: (volatile store (s8) into `ptr addrspace(1) null`, addrspace 1) + ; CHECK-NEXT: S_ENDPGM 0 + %cmp1 = icmp sgt i32 %arg1, 1000 + %cmp2 = icmp sgt i32 %arg2, 1000 + %or = or i1 %cmp1, %cmp2 + store volatile i1 %or, ptr addrspace(1) null + ret void +} + +define amdgpu_kernel void @test39(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test39 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $sgpr0_sgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:sgpr_64(p4) = COPY $sgpr0_sgpr1 + ; CHECK-NEXT: [[S_LOAD_DWORDX4_IMM:%[0-9]+]]:sgpr_128 = S_LOAD_DWORDX4_IMM [[COPY]](p4), 0, 0 :: (dereferenceable invariant load (s128) from %ir.arg1.kernarg.offset1, addrspace 4) + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:sreg_32 = COPY [[S_LOAD_DWORDX4_IMM]].sub1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:sreg_32 = COPY [[S_LOAD_DWORDX4_IMM]].sub2 + ; CHECK-NEXT: [[COPY3:%[0-9]+]]:sreg_32 = COPY [[S_LOAD_DWORDX4_IMM]].sub0 + ; CHECK-NEXT: S_CMP_LT_U32 killed [[COPY3]], [[COPY2]], implicit-def $scc + ; CHECK-NEXT: [[S_CSELECT_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_CSELECT_B32 -1, 0, implicit $scc + ; CHECK-NEXT: [[COPY4:%[0-9]+]]:sreg_32 = COPY [[S_CSELECT_B32_]] + ; CHECK-NEXT: S_CMP_LT_U32 killed [[COPY1]], [[COPY2]], implicit-def $scc + ; CHECK-NEXT: [[S_CSELECT_B32_1:%[0-9]+]]:sreg_32_xm0_xexec = S_CSELECT_B32 -1, 0, implicit $scc + ; CHECK-NEXT: [[COPY5:%[0-9]+]]:sreg_32 = COPY [[S_CSELECT_B32_1]] + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[COPY4]], killed [[COPY5]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: [[S_MOV_B64_:%[0-9]+]]:sreg_64 = S_MOV_B64 0 + ; CHECK-NEXT: [[COPY6:%[0-9]+]]:vreg_64 = COPY [[S_MOV_B64_]] + ; CHECK-NEXT: GLOBAL_STORE_BYTE killed [[COPY6]], killed [[V_CNDMASK_B32_e64_]], 0, 0, implicit $exec :: (volatile store (s8) into `ptr addrspace(1) null`, addrspace 1) + ; CHECK-NEXT: S_ENDPGM 0 + %cmp1 = icmp ult i32 %arg1, %arg3 + %cmp2 = icmp ult i32 %arg2, %arg3 + %or = or i1 %cmp1, %cmp2 + store volatile i1 %or, ptr addrspace(1) null + ret void +} + +define amdgpu_kernel void @test40(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test40 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $sgpr0_sgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:sgpr_64(p4) = COPY $sgpr0_sgpr1 + ; CHECK-NEXT: [[S_LOAD_DWORDX4_IMM:%[0-9]+]]:sgpr_128 = S_LOAD_DWORDX4_IMM [[COPY]](p4), 0, 0 :: (dereferenceable invariant load (s128) from %ir.arg1.kernarg.offset1, addrspace 4) + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:sreg_32 = COPY [[S_LOAD_DWORDX4_IMM]].sub1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:sreg_32 = COPY [[S_LOAD_DWORDX4_IMM]].sub2 + ; CHECK-NEXT: [[COPY3:%[0-9]+]]:sreg_32 = COPY [[S_LOAD_DWORDX4_IMM]].sub0 + ; CHECK-NEXT: S_CMP_GE_I32 killed [[COPY3]], [[COPY2]], implicit-def $scc + ; CHECK-NEXT: [[S_CSELECT_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_CSELECT_B32 -1, 0, implicit $scc + ; CHECK-NEXT: [[COPY4:%[0-9]+]]:sreg_32 = COPY [[S_CSELECT_B32_]] + ; CHECK-NEXT: S_CMP_GE_I32 killed [[COPY1]], [[COPY2]], implicit-def $scc + ; CHECK-NEXT: [[S_CSELECT_B32_1:%[0-9]+]]:sreg_32_xm0_xexec = S_CSELECT_B32 -1, 0, implicit $scc + ; CHECK-NEXT: [[COPY5:%[0-9]+]]:sreg_32 = COPY [[S_CSELECT_B32_1]] + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[COPY4]], killed [[COPY5]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: [[S_MOV_B64_:%[0-9]+]]:sreg_64 = S_MOV_B64 0 + ; CHECK-NEXT: [[COPY6:%[0-9]+]]:vreg_64 = COPY [[S_MOV_B64_]] + ; CHECK-NEXT: GLOBAL_STORE_BYTE killed [[COPY6]], killed [[V_CNDMASK_B32_e64_]], 0, 0, implicit $exec :: (volatile store (s8) into `ptr addrspace(1) null`, addrspace 1) + ; CHECK-NEXT: S_ENDPGM 0 + %cmp1 = icmp sge i32 %arg1, %arg3 + %cmp2 = icmp sge i32 %arg2, %arg3 + %or = or i1 %cmp1, %cmp2 + store volatile i1 %or, ptr addrspace(1) null + ret void +} + +define amdgpu_kernel void @test41(i32 %arg1, i32 %arg2) #0 { + ; CHECK-LABEL: name: test41 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $sgpr0_sgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:sgpr_64(p4) = COPY $sgpr0_sgpr1 + ; CHECK-NEXT: [[S_LOAD_DWORDX2_IMM:%[0-9]+]]:sreg_64_xexec = S_LOAD_DWORDX2_IMM [[COPY]](p4), 0, 0 :: (dereferenceable invariant load (s64) from %ir.arg1.kernarg.offset1, align 16, addrspace 4) + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:sreg_32 = COPY [[S_LOAD_DWORDX2_IMM]].sub0 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:sreg_32 = COPY [[S_LOAD_DWORDX2_IMM]].sub1 + ; CHECK-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 1001 + ; CHECK-NEXT: S_CMP_LT_U32 killed [[COPY1]], [[S_MOV_B32_]], implicit-def $scc + ; CHECK-NEXT: [[S_CSELECT_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_CSELECT_B32 -1, 0, implicit $scc + ; CHECK-NEXT: [[COPY3:%[0-9]+]]:sreg_32 = COPY [[S_CSELECT_B32_]] + ; CHECK-NEXT: S_CMP_LT_U32 killed [[COPY2]], [[S_MOV_B32_]], implicit-def $scc + ; CHECK-NEXT: [[S_CSELECT_B32_1:%[0-9]+]]:sreg_32_xm0_xexec = S_CSELECT_B32 -1, 0, implicit $scc + ; CHECK-NEXT: [[COPY4:%[0-9]+]]:sreg_32 = COPY [[S_CSELECT_B32_1]] + ; CHECK-NEXT: [[S_AND_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_B32 killed [[COPY3]], killed [[COPY4]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_AND_B32_]], implicit $exec + ; CHECK-NEXT: [[S_MOV_B64_:%[0-9]+]]:sreg_64 = S_MOV_B64 0 + ; CHECK-NEXT: [[COPY5:%[0-9]+]]:vreg_64 = COPY [[S_MOV_B64_]] + ; CHECK-NEXT: GLOBAL_STORE_BYTE killed [[COPY5]], killed [[V_CNDMASK_B32_e64_]], 0, 0, implicit $exec :: (volatile store (s8) into `ptr addrspace(1) null`, addrspace 1) + ; CHECK-NEXT: S_ENDPGM 0 + %cmp1 = icmp ule i32 %arg1, 1000 + %cmp2 = icmp ule i32 %arg2, 1000 + %and = and i1 %cmp1, %cmp2 + store volatile i1 %and, ptr addrspace(1) null + ret void +} + +define amdgpu_kernel void @test42(i32 %arg1, i32 %arg2) #0 { + ; CHECK-LABEL: name: test42 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $sgpr0_sgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:sgpr_64(p4) = COPY $sgpr0_sgpr1 + ; CHECK-NEXT: [[S_LOAD_DWORDX2_IMM:%[0-9]+]]:sreg_64_xexec = S_LOAD_DWORDX2_IMM [[COPY]](p4), 0, 0 :: (dereferenceable invariant load (s64) from %ir.arg1.kernarg.offset1, align 16, addrspace 4) + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:sreg_32 = COPY [[S_LOAD_DWORDX2_IMM]].sub0 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:sreg_32 = COPY [[S_LOAD_DWORDX2_IMM]].sub1 + ; CHECK-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 999 + ; CHECK-NEXT: S_CMP_GT_I32 killed [[COPY1]], [[S_MOV_B32_]], implicit-def $scc + ; CHECK-NEXT: [[S_CSELECT_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_CSELECT_B32 -1, 0, implicit $scc + ; CHECK-NEXT: [[COPY3:%[0-9]+]]:sreg_32 = COPY [[S_CSELECT_B32_]] + ; CHECK-NEXT: S_CMP_GT_I32 killed [[COPY2]], [[S_MOV_B32_]], implicit-def $scc + ; CHECK-NEXT: [[S_CSELECT_B32_1:%[0-9]+]]:sreg_32_xm0_xexec = S_CSELECT_B32 -1, 0, implicit $scc + ; CHECK-NEXT: [[COPY4:%[0-9]+]]:sreg_32 = COPY [[S_CSELECT_B32_1]] + ; CHECK-NEXT: [[S_AND_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_B32 killed [[COPY3]], killed [[COPY4]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_AND_B32_]], implicit $exec + ; CHECK-NEXT: [[S_MOV_B64_:%[0-9]+]]:sreg_64 = S_MOV_B64 0 + ; CHECK-NEXT: [[COPY5:%[0-9]+]]:vreg_64 = COPY [[S_MOV_B64_]] + ; CHECK-NEXT: GLOBAL_STORE_BYTE killed [[COPY5]], killed [[V_CNDMASK_B32_e64_]], 0, 0, implicit $exec :: (volatile store (s8) into `ptr addrspace(1) null`, addrspace 1) + ; CHECK-NEXT: S_ENDPGM 0 + %cmp1 = icmp sge i32 %arg1, 1000 + %cmp2 = icmp sge i32 %arg2, 1000 + %and = and i1 %cmp1, %cmp2 + store volatile i1 %and, ptr addrspace(1) null + ret void +} + +define amdgpu_kernel void @test43(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test43 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $sgpr0_sgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:sgpr_64(p4) = COPY $sgpr0_sgpr1 + ; CHECK-NEXT: [[S_LOAD_DWORDX4_IMM:%[0-9]+]]:sgpr_128 = S_LOAD_DWORDX4_IMM [[COPY]](p4), 0, 0 :: (dereferenceable invariant load (s128) from %ir.arg1.kernarg.offset1, addrspace 4) + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:sreg_32 = COPY [[S_LOAD_DWORDX4_IMM]].sub1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:sreg_32 = COPY [[S_LOAD_DWORDX4_IMM]].sub2 + ; CHECK-NEXT: [[COPY3:%[0-9]+]]:sreg_32 = COPY [[S_LOAD_DWORDX4_IMM]].sub0 + ; CHECK-NEXT: S_CMP_LE_I32 killed [[COPY3]], [[COPY2]], implicit-def $scc + ; CHECK-NEXT: [[S_CSELECT_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_CSELECT_B32 -1, 0, implicit $scc + ; CHECK-NEXT: [[COPY4:%[0-9]+]]:sreg_32 = COPY [[S_CSELECT_B32_]] + ; CHECK-NEXT: S_CMP_LE_I32 killed [[COPY1]], [[COPY2]], implicit-def $scc + ; CHECK-NEXT: [[S_CSELECT_B32_1:%[0-9]+]]:sreg_32_xm0_xexec = S_CSELECT_B32 -1, 0, implicit $scc + ; CHECK-NEXT: [[COPY5:%[0-9]+]]:sreg_32 = COPY [[S_CSELECT_B32_1]] + ; CHECK-NEXT: [[S_AND_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_B32 killed [[COPY4]], killed [[COPY5]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_AND_B32_]], implicit $exec + ; CHECK-NEXT: [[S_MOV_B64_:%[0-9]+]]:sreg_64 = S_MOV_B64 0 + ; CHECK-NEXT: [[COPY6:%[0-9]+]]:vreg_64 = COPY [[S_MOV_B64_]] + ; CHECK-NEXT: GLOBAL_STORE_BYTE killed [[COPY6]], killed [[V_CNDMASK_B32_e64_]], 0, 0, implicit $exec :: (volatile store (s8) into `ptr addrspace(1) null`, addrspace 1) + ; CHECK-NEXT: S_ENDPGM 0 + %cmp1 = icmp sle i32 %arg1, %arg3 + %cmp2 = icmp sle i32 %arg2, %arg3 + %and = and i1 %cmp1, %cmp2 + store volatile i1 %and, ptr addrspace(1) null + ret void +} + +define amdgpu_kernel void @test44(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test44 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $sgpr0_sgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:sgpr_64(p4) = COPY $sgpr0_sgpr1 + ; CHECK-NEXT: [[S_LOAD_DWORDX4_IMM:%[0-9]+]]:sgpr_128 = S_LOAD_DWORDX4_IMM [[COPY]](p4), 0, 0 :: (dereferenceable invariant load (s128) from %ir.arg1.kernarg.offset1, addrspace 4) + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:sreg_32 = COPY [[S_LOAD_DWORDX4_IMM]].sub1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:sreg_32 = COPY [[S_LOAD_DWORDX4_IMM]].sub2 + ; CHECK-NEXT: [[COPY3:%[0-9]+]]:sreg_32 = COPY [[S_LOAD_DWORDX4_IMM]].sub0 + ; CHECK-NEXT: S_CMP_GE_U32 killed [[COPY3]], [[COPY2]], implicit-def $scc + ; CHECK-NEXT: [[S_CSELECT_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_CSELECT_B32 -1, 0, implicit $scc + ; CHECK-NEXT: [[COPY4:%[0-9]+]]:sreg_32 = COPY [[S_CSELECT_B32_]] + ; CHECK-NEXT: S_CMP_GE_U32 killed [[COPY1]], [[COPY2]], implicit-def $scc + ; CHECK-NEXT: [[S_CSELECT_B32_1:%[0-9]+]]:sreg_32_xm0_xexec = S_CSELECT_B32 -1, 0, implicit $scc + ; CHECK-NEXT: [[COPY5:%[0-9]+]]:sreg_32 = COPY [[S_CSELECT_B32_1]] + ; CHECK-NEXT: [[S_AND_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_B32 killed [[COPY4]], killed [[COPY5]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_AND_B32_]], implicit $exec + ; CHECK-NEXT: [[S_MOV_B64_:%[0-9]+]]:sreg_64 = S_MOV_B64 0 + ; CHECK-NEXT: [[COPY6:%[0-9]+]]:vreg_64 = COPY [[S_MOV_B64_]] + ; CHECK-NEXT: GLOBAL_STORE_BYTE killed [[COPY6]], killed [[V_CNDMASK_B32_e64_]], 0, 0, implicit $exec :: (volatile store (s8) into `ptr addrspace(1) null`, addrspace 1) + ; CHECK-NEXT: S_ENDPGM 0 + %cmp1 = icmp uge i32 %arg1, %arg3 + %cmp2 = icmp uge i32 %arg2, %arg3 + %and = and i1 %cmp1, %cmp2 + store volatile i1 %and, ptr addrspace(1) null + ret void +} + +define i1 @test45(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test45 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[V_CMP_LT_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_LT_U32_e64 [[COPY]], [[COPY2]], implicit $exec + ; CHECK-NEXT: [[V_CMP_LT_U32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_LT_U32_e64 [[COPY]], [[COPY1]], implicit $exec + ; CHECK-NEXT: [[S_AND_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_B32 killed [[V_CMP_LT_U32_e64_]], killed [[V_CMP_LT_U32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_AND_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp ult i32 %arg3, %arg1 + %cmp2 = icmp ult i32 %arg3, %arg2 + %or = and i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test46(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test46 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[V_CMP_LT_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_LT_U32_e64 [[COPY]], [[COPY2]], implicit $exec + ; CHECK-NEXT: [[V_CMP_LT_U32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_LT_U32_e64 [[COPY]], [[COPY1]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_LT_U32_e64_]], killed [[V_CMP_LT_U32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp ult i32 %arg3, %arg1 + %cmp2 = icmp ult i32 %arg3, %arg2 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + + +define i1 @test47(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test47 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[V_CMP_GT_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_GT_U32_e64 [[COPY]], [[COPY2]], implicit $exec + ; CHECK-NEXT: [[V_CMP_GT_U32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_GT_U32_e64 [[COPY]], [[COPY1]], implicit $exec + ; CHECK-NEXT: [[S_AND_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_AND_B32 killed [[V_CMP_GT_U32_e64_]], killed [[V_CMP_GT_U32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_AND_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp ugt i32 %arg3, %arg1 + %cmp2 = icmp ugt i32 %arg3, %arg2 + %or = and i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test48(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test48 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[V_CMP_GT_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_GT_U32_e64 [[COPY]], [[COPY2]], implicit $exec + ; CHECK-NEXT: [[V_CMP_GT_U32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_GT_U32_e64 [[COPY]], [[COPY1]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_GT_U32_e64_]], killed [[V_CMP_GT_U32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp ugt i32 %arg3, %arg1 + %cmp2 = icmp ugt i32 %arg3, %arg2 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test49(i64 %arg1, i64 %arg2, i64 %arg3) #0 { + ; CHECK-LABEL: name: test49 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $vgpr5 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr5 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr4 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr3 + ; CHECK-NEXT: [[COPY3:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY4:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY5:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[DEF:%[0-9]+]]:sgpr_32 = IMPLICIT_DEF + ; CHECK-NEXT: [[DEF1:%[0-9]+]]:sgpr_32 = IMPLICIT_DEF + ; CHECK-NEXT: [[REG_SEQUENCE:%[0-9]+]]:vreg_64 = REG_SEQUENCE [[COPY3]], %subreg.sub0, [[COPY2]], %subreg.sub1 + ; CHECK-NEXT: [[DEF2:%[0-9]+]]:sgpr_32 = IMPLICIT_DEF + ; CHECK-NEXT: [[DEF3:%[0-9]+]]:sgpr_32 = IMPLICIT_DEF + ; CHECK-NEXT: [[REG_SEQUENCE1:%[0-9]+]]:vreg_64 = REG_SEQUENCE [[COPY1]], %subreg.sub0, [[COPY]], %subreg.sub1 + ; CHECK-NEXT: [[DEF4:%[0-9]+]]:sgpr_32 = IMPLICIT_DEF + ; CHECK-NEXT: [[DEF5:%[0-9]+]]:sgpr_32 = IMPLICIT_DEF + ; CHECK-NEXT: [[REG_SEQUENCE2:%[0-9]+]]:vreg_64 = REG_SEQUENCE [[COPY5]], %subreg.sub0, [[COPY4]], %subreg.sub1 + ; CHECK-NEXT: [[V_CMP_LT_U64_e64_:%[0-9]+]]:sreg_32 = V_CMP_LT_U64_e64 killed [[REG_SEQUENCE2]], [[REG_SEQUENCE1]], implicit $exec + ; CHECK-NEXT: [[V_CMP_LT_U64_e64_1:%[0-9]+]]:sreg_32 = V_CMP_LT_U64_e64 killed [[REG_SEQUENCE]], [[REG_SEQUENCE1]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_LT_U64_e64_]], killed [[V_CMP_LT_U64_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp ult i64 %arg1, %arg3 + %cmp2 = icmp ult i64 %arg2, %arg3 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +; The optimization does not apply to the following tests. + +define i1 @test50(float %arg1, float %arg2, float %arg3) #0 { + ; CHECK-LABEL: name: test50 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[V_CMP_NGE_F32_e64_:%[0-9]+]]:sreg_32 = nofpexcept V_CMP_NGE_F32_e64 0, [[COPY2]], 0, [[COPY]], 0, implicit $mode, implicit $exec + ; CHECK-NEXT: [[V_CMP_NGE_F32_e64_1:%[0-9]+]]:sreg_32 = nofpexcept V_CMP_NGE_F32_e64 0, [[COPY1]], 0, [[COPY]], 0, implicit $mode, implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_NGE_F32_e64_]], killed [[V_CMP_NGE_F32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = fcmp ult float %arg1, %arg3 + %cmp2 = fcmp ult float %arg2, %arg3 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test51(double %arg1, double %arg2, double %arg3) #0 { + ; CHECK-LABEL: name: test51 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3, $vgpr4, $vgpr5 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr5 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr4 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr3 + ; CHECK-NEXT: [[COPY3:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY4:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY5:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[DEF:%[0-9]+]]:sgpr_32 = IMPLICIT_DEF + ; CHECK-NEXT: [[DEF1:%[0-9]+]]:sgpr_32 = IMPLICIT_DEF + ; CHECK-NEXT: [[REG_SEQUENCE:%[0-9]+]]:vreg_64 = REG_SEQUENCE [[COPY3]], %subreg.sub0, [[COPY2]], %subreg.sub1 + ; CHECK-NEXT: [[DEF2:%[0-9]+]]:sgpr_32 = IMPLICIT_DEF + ; CHECK-NEXT: [[DEF3:%[0-9]+]]:sgpr_32 = IMPLICIT_DEF + ; CHECK-NEXT: [[REG_SEQUENCE1:%[0-9]+]]:vreg_64 = REG_SEQUENCE [[COPY1]], %subreg.sub0, [[COPY]], %subreg.sub1 + ; CHECK-NEXT: [[DEF4:%[0-9]+]]:sgpr_32 = IMPLICIT_DEF + ; CHECK-NEXT: [[DEF5:%[0-9]+]]:sgpr_32 = IMPLICIT_DEF + ; CHECK-NEXT: [[REG_SEQUENCE2:%[0-9]+]]:vreg_64 = REG_SEQUENCE [[COPY5]], %subreg.sub0, [[COPY4]], %subreg.sub1 + ; CHECK-NEXT: [[V_CMP_NGE_F64_e64_:%[0-9]+]]:sreg_32 = nofpexcept V_CMP_NGE_F64_e64 0, killed [[REG_SEQUENCE2]], 0, [[REG_SEQUENCE1]], 0, implicit $mode, implicit $exec + ; CHECK-NEXT: [[V_CMP_NGE_F64_e64_1:%[0-9]+]]:sreg_32 = nofpexcept V_CMP_NGE_F64_e64 0, killed [[REG_SEQUENCE]], 0, [[REG_SEQUENCE1]], 0, implicit $mode, implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_NGE_F64_e64_]], killed [[V_CMP_NGE_F64_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = fcmp ult double %arg1, %arg3 + %cmp2 = fcmp ult double %arg2, %arg3 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test52(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test52 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[V_CMP_LT_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_LT_U32_e64 [[COPY2]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[V_CMP_LT_U32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_LT_U32_e64 [[COPY]], [[COPY1]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_LT_U32_e64_]], killed [[V_CMP_LT_U32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp ult i32 %arg1, %arg3 + %cmp2 = icmp ult i32 %arg3, %arg2 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test53(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test53 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[V_CMP_LT_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_LT_U32_e64 [[COPY2]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[V_CMP_GT_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_GT_U32_e64 [[COPY1]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_LT_U32_e64_]], killed [[V_CMP_GT_U32_e64_]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp ult i32 %arg1, %arg3 + %cmp2 = icmp ugt i32 %arg2, %arg3 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test54(i32 %arg1, i32 %arg2, i32 %arg3) #0 { + ; CHECK-LABEL: name: test54 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[V_CMP_GT_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_GT_U32_e64 [[COPY]], [[COPY2]], implicit $exec + ; CHECK-NEXT: [[V_CMP_LT_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_LT_U32_e64 [[COPY1]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_GT_U32_e64_]], killed [[V_CMP_LT_U32_e64_]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp ugt i32 %arg3, %arg1 + %cmp2 = icmp ult i32 %arg2, %arg3 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test55(i16 %arg1, i32 %arg2) #0 { + ; CHECK-LABEL: name: test55 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[S_MOV_B32_:%[0-9]+]]:sreg_32 = S_MOV_B32 10 + ; CHECK-NEXT: [[V_CMP_LT_U16_t16_e64_:%[0-9]+]]:sreg_32 = V_CMP_LT_U16_t16_e64 [[COPY1]], killed [[S_MOV_B32_]], implicit $exec + ; CHECK-NEXT: [[S_MOV_B32_1:%[0-9]+]]:sreg_32 = S_MOV_B32 10 + ; CHECK-NEXT: [[V_CMP_LT_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_LT_U32_e64 [[COPY]], killed [[S_MOV_B32_1]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_LT_U16_t16_e64_]], killed [[V_CMP_LT_U32_e64_]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp ult i16 %arg1, 10 + %cmp2 = icmp ult i32 %arg2, 10 + %or = or i1 %cmp1, %cmp2 + ret i1 %or +} + +define i1 @test56(i32 %arg1, i32 %arg2, i32 %arg3, i32 %arg4) #0 { + ; CHECK-LABEL: name: test56 + ; CHECK: bb.0 (%ir-block.0): + ; CHECK-NEXT: liveins: $vgpr0, $vgpr1, $vgpr2, $vgpr3 + ; CHECK-NEXT: {{ $}} + ; CHECK-NEXT: [[COPY:%[0-9]+]]:vgpr_32 = COPY $vgpr3 + ; CHECK-NEXT: [[COPY1:%[0-9]+]]:vgpr_32 = COPY $vgpr2 + ; CHECK-NEXT: [[COPY2:%[0-9]+]]:vgpr_32 = COPY $vgpr1 + ; CHECK-NEXT: [[COPY3:%[0-9]+]]:vgpr_32 = COPY $vgpr0 + ; CHECK-NEXT: [[V_CMP_LT_U32_e64_:%[0-9]+]]:sreg_32 = V_CMP_LT_U32_e64 [[COPY3]], [[COPY1]], implicit $exec + ; CHECK-NEXT: [[V_CMP_LT_U32_e64_1:%[0-9]+]]:sreg_32 = V_CMP_LT_U32_e64 [[COPY2]], [[COPY1]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_:%[0-9]+]]:sreg_32 = S_OR_B32 killed [[V_CMP_LT_U32_e64_]], killed [[V_CMP_LT_U32_e64_1]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CMP_LT_U32_e64_2:%[0-9]+]]:sreg_32 = V_CMP_LT_U32_e64 [[COPY3]], [[COPY]], implicit $exec + ; CHECK-NEXT: [[S_OR_B32_1:%[0-9]+]]:sreg_32_xm0_xexec = S_OR_B32 killed [[V_CMP_LT_U32_e64_2]], killed [[S_OR_B32_]], implicit-def dead $scc + ; CHECK-NEXT: [[V_CNDMASK_B32_e64_:%[0-9]+]]:vgpr_32 = V_CNDMASK_B32_e64 0, 0, 0, 1, killed [[S_OR_B32_1]], implicit $exec + ; CHECK-NEXT: $vgpr0 = COPY [[V_CNDMASK_B32_e64_]] + ; CHECK-NEXT: SI_RETURN implicit $vgpr0 + %cmp1 = icmp ult i32 %arg1, %arg3 + %cmp2 = icmp ult i32 %arg2, %arg3 + %or1 = or i1 %cmp1, %cmp2 + %cmp3 = icmp ult i32 %arg1, %arg4 + %or2 = or i1 %cmp3, %or1 + ret i1 %or2 +} + +attributes #0 = { nounwind readnone }