diff --git a/llvm/test/CodeGen/X86/setcc-combine.ll b/llvm/test/CodeGen/X86/setcc-combine.ll --- a/llvm/test/CodeGen/X86/setcc-combine.ll +++ b/llvm/test/CodeGen/X86/setcc-combine.ll @@ -499,3 +499,218 @@ %r = select i1 %cmp, double %x, double %neg ret double %r } + +define i64 @cmp_sgt(i64 %a, i64 %b) { +; CHECK-LABEL: cmp_sgt: +; CHECK: # %bb.0: +; CHECK-NEXT: notq %rdi +; CHECK-NEXT: notq %rsi +; CHECK-NEXT: xorl %eax, %eax +; CHECK-NEXT: cmpq %rsi, %rdi +; CHECK-NEXT: setg %al +; CHECK-NEXT: negq %rax +; CHECK-NEXT: retq + %na = xor i64 %a, -1 + %nb = xor i64 %b, -1 + %c = icmp sgt i64 %na, %nb + %r = sext i1 %c to i64 + ret i64 %r +} + +define i64 @cmp_sgt_constant(i64 %a) { +; CHECK-LABEL: cmp_sgt_constant: +; CHECK: # %bb.0: +; CHECK-NEXT: notq %rdi +; CHECK-NEXT: xorl %eax, %eax +; CHECK-NEXT: cmpq $43, %rdi +; CHECK-NEXT: setge %al +; CHECK-NEXT: negq %rax +; CHECK-NEXT: retq + %na = xor i64 %a, -1 + %c = icmp sgt i64 %na, 42 + %r = sext i1 %c to i64 + ret i64 %r +} + +define <2 x i64> @cmp_sgt_vec(<2 x i64> %a, <2 x i64> %b) { +; CHECK-LABEL: cmp_sgt_vec: +; CHECK: # %bb.0: +; CHECK-NEXT: movdqa {{.*#+}} xmm2 = [18446744071562067967,18446744071562067967] +; CHECK-NEXT: pxor %xmm2, %xmm1 +; CHECK-NEXT: pxor %xmm2, %xmm0 +; CHECK-NEXT: movdqa %xmm0, %xmm2 +; CHECK-NEXT: pcmpgtd %xmm1, %xmm2 +; CHECK-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2] +; CHECK-NEXT: pcmpeqd %xmm1, %xmm0 +; CHECK-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3] +; CHECK-NEXT: pand %xmm3, %xmm1 +; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3] +; CHECK-NEXT: por %xmm1, %xmm0 +; CHECK-NEXT: retq + %na = xor <2 x i64> %a, + %nb = xor <2 x i64> %b, + %c = icmp sgt <2 x i64> %na, %nb + %r = sext <2 x i1> %c to <2 x i64> + ret <2 x i64> %r +} + +define i64 @cmp_ugt(i64 %a, i64 %b) { +; CHECK-LABEL: cmp_ugt: +; CHECK: # %bb.0: +; CHECK-NEXT: notq %rdi +; CHECK-NEXT: xorl %eax, %eax +; CHECK-NEXT: addq %rsi, %rdi +; CHECK-NEXT: sbbq %rax, %rax +; CHECK-NEXT: retq + %na = xor i64 %a, -1 + %nb = xor i64 %b, -1 + %c = icmp ugt i64 %na, %nb + %r = sext i1 %c to i64 + ret i64 %r +} + +define i64 @cmp_ugt_constant(i64 %a) { +; CHECK-LABEL: cmp_ugt_constant: +; CHECK: # %bb.0: +; CHECK-NEXT: notq %rdi +; CHECK-NEXT: xorl %eax, %eax +; CHECK-NEXT: cmpq $43, %rdi +; CHECK-NEXT: adcq $-1, %rax +; CHECK-NEXT: retq + %na = xor i64 %a, -1 + %c = icmp ugt i64 %na, 42 + %r = sext i1 %c to i64 + ret i64 %r +} + +define <2 x i64> @cmp_ugt_vec(<2 x i64> %a, <2 x i64> %b) { +; CHECK-LABEL: cmp_ugt_vec: +; CHECK: # %bb.0: +; CHECK-NEXT: movdqa {{.*#+}} xmm2 = [9223372034707292159,9223372034707292159] +; CHECK-NEXT: pxor %xmm2, %xmm1 +; CHECK-NEXT: pxor %xmm2, %xmm0 +; CHECK-NEXT: movdqa %xmm0, %xmm2 +; CHECK-NEXT: pcmpgtd %xmm1, %xmm2 +; CHECK-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2] +; CHECK-NEXT: pcmpeqd %xmm1, %xmm0 +; CHECK-NEXT: pshufd {{.*#+}} xmm1 = xmm0[1,1,3,3] +; CHECK-NEXT: pand %xmm3, %xmm1 +; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm2[1,1,3,3] +; CHECK-NEXT: por %xmm1, %xmm0 +; CHECK-NEXT: retq + %na = xor <2 x i64> %a, + %nb = xor <2 x i64> %b, + %c = icmp ugt <2 x i64> %na, %nb + %r = sext <2 x i1> %c to <2 x i64> + ret <2 x i64> %r +} + +define i64 @cmp_sle(i64 %a, i64 %b) { +; CHECK-LABEL: cmp_sle: +; CHECK: # %bb.0: +; CHECK-NEXT: notq %rdi +; CHECK-NEXT: notq %rsi +; CHECK-NEXT: xorl %eax, %eax +; CHECK-NEXT: cmpq %rsi, %rdi +; CHECK-NEXT: setle %al +; CHECK-NEXT: negq %rax +; CHECK-NEXT: retq + %na = xor i64 %a, -1 + %nb = xor i64 %b, -1 + %c = icmp sle i64 %na, %nb + %r = sext i1 %c to i64 + ret i64 %r +} + +define i64 @cmp_sle_constant(i64 %a) { +; CHECK-LABEL: cmp_sle_constant: +; CHECK: # %bb.0: +; CHECK-NEXT: notq %rdi +; CHECK-NEXT: xorl %eax, %eax +; CHECK-NEXT: cmpq $43, %rdi +; CHECK-NEXT: setl %al +; CHECK-NEXT: negq %rax +; CHECK-NEXT: retq + %na = xor i64 %a, -1 + %c = icmp sle i64 %na, 42 + %r = sext i1 %c to i64 + ret i64 %r +} + +define <2 x i64> @cmp_sle_vec(<2 x i64> %a, <2 x i64> %b) { +; CHECK-LABEL: cmp_sle_vec: +; CHECK: # %bb.0: +; CHECK-NEXT: movdqa {{.*#+}} xmm2 = [9223372034707292159,9223372034707292159] +; CHECK-NEXT: pxor %xmm2, %xmm1 +; CHECK-NEXT: pxor %xmm2, %xmm0 +; CHECK-NEXT: movdqa %xmm0, %xmm2 +; CHECK-NEXT: pcmpgtd %xmm1, %xmm2 +; CHECK-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2] +; CHECK-NEXT: pcmpeqd %xmm1, %xmm0 +; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3] +; CHECK-NEXT: pand %xmm3, %xmm0 +; CHECK-NEXT: pshufd {{.*#+}} xmm1 = xmm2[1,1,3,3] +; CHECK-NEXT: por %xmm0, %xmm1 +; CHECK-NEXT: pcmpeqd %xmm0, %xmm0 +; CHECK-NEXT: pxor %xmm1, %xmm0 +; CHECK-NEXT: retq + %na = xor <2 x i64> %a, + %nb = xor <2 x i64> %b, + %c = icmp ule <2 x i64> %na, %nb + %r = sext <2 x i1> %c to <2 x i64> + ret <2 x i64> %r +} + +define i64 @cmp_ule(i64 %a, i64 %b) { +; CHECK-LABEL: cmp_ule: +; CHECK: # %bb.0: +; CHECK-NEXT: notq %rdi +; CHECK-NEXT: notq %rsi +; CHECK-NEXT: xorl %eax, %eax +; CHECK-NEXT: cmpq %rdi, %rsi +; CHECK-NEXT: adcq $-1, %rax +; CHECK-NEXT: retq + %na = xor i64 %a, -1 + %nb = xor i64 %b, -1 + %c = icmp ule i64 %na, %nb + %r = sext i1 %c to i64 + ret i64 %r +} + +define i64 @cmp_ule_constant(i64 %a) { +; CHECK-LABEL: cmp_ule_constant: +; CHECK: # %bb.0: +; CHECK-NEXT: notq %rdi +; CHECK-NEXT: xorl %eax, %eax +; CHECK-NEXT: cmpq $43, %rdi +; CHECK-NEXT: sbbq %rax, %rax +; CHECK-NEXT: retq + %na = xor i64 %a, -1 + %c = icmp ule i64 %na, 42 + %r = sext i1 %c to i64 + ret i64 %r +} + +define <2 x i64> @cmp_ule_vec(<2 x i64> %a, <2 x i64> %b) { +; CHECK-LABEL: cmp_ule_vec: +; CHECK: # %bb.0: +; CHECK-NEXT: movdqa {{.*#+}} xmm2 = [9223372034707292159,9223372034707292159] +; CHECK-NEXT: pxor %xmm2, %xmm1 +; CHECK-NEXT: pxor %xmm2, %xmm0 +; CHECK-NEXT: movdqa %xmm0, %xmm2 +; CHECK-NEXT: pcmpgtd %xmm1, %xmm2 +; CHECK-NEXT: pshufd {{.*#+}} xmm3 = xmm2[0,0,2,2] +; CHECK-NEXT: pcmpeqd %xmm1, %xmm0 +; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,3,3] +; CHECK-NEXT: pand %xmm3, %xmm0 +; CHECK-NEXT: pshufd {{.*#+}} xmm1 = xmm2[1,1,3,3] +; CHECK-NEXT: por %xmm0, %xmm1 +; CHECK-NEXT: pcmpeqd %xmm0, %xmm0 +; CHECK-NEXT: pxor %xmm1, %xmm0 +; CHECK-NEXT: retq + %na = xor <2 x i64> %a, + %nb = xor <2 x i64> %b, + %c = icmp ule <2 x i64> %na, %nb + %r = sext <2 x i1> %c to <2 x i64> + ret <2 x i64> %r +}