Index: lib/CodeGen/SelectionDAG/DAGCombiner.cpp =================================================================== --- lib/CodeGen/SelectionDAG/DAGCombiner.cpp +++ lib/CodeGen/SelectionDAG/DAGCombiner.cpp @@ -9414,10 +9414,13 @@ // fold (sext_in_reg (sext x)) -> (sext x) // fold (sext_in_reg (aext x)) -> (sext x) - // if x is small enough. + // if x is small enough or if we know that x has more than 1 sign bit and the + // sign_extend_inreg is extending from one of them. if (N0.getOpcode() == ISD::SIGN_EXTEND || N0.getOpcode() == ISD::ANY_EXTEND) { SDValue N00 = N0.getOperand(0); - if (N00.getScalarValueSizeInBits() <= EVTBits && + unsigned N00Bits = N00.getScalarValueSizeInBits(); + if ((N00Bits <= EVTBits || + (N00Bits - DAG.ComputeNumSignBits(N00)) < EVTBits) && (!LegalOperations || TLI.isOperationLegal(ISD::SIGN_EXTEND, VT))) return DAG.getNode(ISD::SIGN_EXTEND, SDLoc(N), VT, N00); } Index: test/CodeGen/PowerPC/f128-truncateNconv.ll =================================================================== --- test/CodeGen/PowerPC/f128-truncateNconv.ll +++ test/CodeGen/PowerPC/f128-truncateNconv.ll @@ -355,7 +355,7 @@ ; CHECK-NEXT: lxv v2, 0(r3) ; CHECK-NEXT: xscvqpswz v2, v2 ; CHECK-NEXT: mfvsrwz r3, v2 -; CHECK-NEXT: extsh r3, r3 +; CHECK-NEXT: extsw r3, r3 ; CHECK-NEXT: blr entry: %0 = load fp128, fp128* %a, align 16 @@ -393,7 +393,7 @@ ; CHECK-NEXT: xsaddqp v2, v2, v3 ; CHECK-NEXT: xscvqpswz v2, v2 ; CHECK-NEXT: mfvsrwz r3, v2 -; CHECK-NEXT: extsh r3, r3 +; CHECK-NEXT: extsw r3, r3 ; CHECK-NEXT: blr entry: %0 = load fp128, fp128* %a, align 16 @@ -509,7 +509,7 @@ ; CHECK-NEXT: lxv v2, 0(r3) ; CHECK-NEXT: xscvqpswz v2, v2 ; CHECK-NEXT: mfvsrwz r3, v2 -; CHECK-NEXT: extsb r3, r3 +; CHECK-NEXT: extsw r3, r3 ; CHECK-NEXT: blr entry: %0 = load fp128, fp128* %a, align 16 @@ -547,7 +547,7 @@ ; CHECK-NEXT: xsaddqp v2, v2, v3 ; CHECK-NEXT: xscvqpswz v2, v2 ; CHECK-NEXT: mfvsrwz r3, v2 -; CHECK-NEXT: extsb r3, r3 +; CHECK-NEXT: extsw r3, r3 ; CHECK-NEXT: blr entry: %0 = load fp128, fp128* %a, align 16 Index: test/CodeGen/PowerPC/ppc64-P9-setb.ll =================================================================== --- test/CodeGen/PowerPC/ppc64-P9-setb.ll +++ test/CodeGen/PowerPC/ppc64-P9-setb.ll @@ -744,14 +744,14 @@ ; CHECK-NOT: addic ; CHECK-NOT: subfe ; CHECK-NOT: isel -; CHECK: extsh +; CHECK: extsw ; CHECK: blr ; CHECK-PWR8-LABEL: setb28 ; CHECK-PWR8-DAG: cmpd ; CHECK-PWR8-DAG: addic ; CHECK-PWR8-DAG: subfe ; CHECK-PWR8: isel -; CHECK-PWR8: extsh +; CHECK-PWR8: extsw ; CHECK-PWR8: blr } Index: test/CodeGen/X86/setcc-combine.ll =================================================================== --- test/CodeGen/X86/setcc-combine.ll +++ test/CodeGen/X86/setcc-combine.ll @@ -7,8 +7,8 @@ ; CHECK-NEXT: pcmpgtd %xmm0, %xmm1 ; CHECK-NEXT: pcmpeqd %xmm0, %xmm0 ; CHECK-NEXT: pxor %xmm1, %xmm0 -; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,2,3] -; CHECK-NEXT: movd %xmm0, %eax +; CHECK-NEXT: pextrw $2, %xmm0, %eax +; CHECK-NEXT: movsbl %al, %eax ; CHECK-NEXT: retq %cmp = icmp slt <4 x i32> %A, %B %sext = sext <4 x i1> %cmp to <4 x i32> @@ -22,8 +22,8 @@ ; CHECK-LABEL: test_ne_1: ; CHECK: # %bb.0: ; CHECK-NEXT: pcmpgtd %xmm0, %xmm1 -; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,2,3] -; CHECK-NEXT: movd %xmm0, %eax +; CHECK-NEXT: pextrw $2, %xmm1, %eax +; CHECK-NEXT: movsbl %al, %eax ; CHECK-NEXT: retq %cmp = icmp slt <4 x i32> %A, %B %sext = sext <4 x i1> %cmp to <4 x i32> @@ -52,8 +52,8 @@ ; CHECK-NEXT: pcmpgtd %xmm0, %xmm1 ; CHECK-NEXT: pcmpeqd %xmm0, %xmm0 ; CHECK-NEXT: pxor %xmm1, %xmm0 -; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,2,3] -; CHECK-NEXT: movd %xmm0, %eax +; CHECK-NEXT: pextrw $2, %xmm0, %eax +; CHECK-NEXT: movsbl %al, %eax ; CHECK-NEXT: retq %cmp = icmp slt <4 x i32> %A, %B %sext = sext <4 x i1> %cmp to <4 x i32> @@ -67,8 +67,8 @@ ; CHECK-LABEL: test_lt_1: ; CHECK: # %bb.0: ; CHECK-NEXT: pcmpgtd %xmm0, %xmm1 -; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,2,3] -; CHECK-NEXT: movd %xmm0, %eax +; CHECK-NEXT: pextrw $2, %xmm1, %eax +; CHECK-NEXT: movsbl %al, %eax ; CHECK-NEXT: retq %cmp = icmp slt <4 x i32> %A, %B %sext = sext <4 x i1> %cmp to <4 x i32> @@ -97,8 +97,8 @@ ; CHECK-NEXT: pcmpgtd %xmm1, %xmm0 ; CHECK-NEXT: pcmpeqd %xmm1, %xmm1 ; CHECK-NEXT: pxor %xmm0, %xmm1 -; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,2,3] -; CHECK-NEXT: movd %xmm0, %eax +; CHECK-NEXT: pextrw $2, %xmm1, %eax +; CHECK-NEXT: movsbl %al, %eax ; CHECK-NEXT: retq %cmp = icmp slt <4 x i32> %B, %A %sext = sext <4 x i1> %cmp to <4 x i32> @@ -112,8 +112,8 @@ ; CHECK-LABEL: test_ne_2: ; CHECK: # %bb.0: ; CHECK-NEXT: pcmpgtd %xmm1, %xmm0 -; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,2,3] -; CHECK-NEXT: movd %xmm0, %eax +; CHECK-NEXT: pextrw $2, %xmm0, %eax +; CHECK-NEXT: movsbl %al, %eax ; CHECK-NEXT: retq %cmp = icmp slt <4 x i32> %B, %A %sext = sext <4 x i1> %cmp to <4 x i32> @@ -129,8 +129,8 @@ ; CHECK-NEXT: pcmpgtd %xmm1, %xmm0 ; CHECK-NEXT: pcmpeqd %xmm1, %xmm1 ; CHECK-NEXT: pxor %xmm0, %xmm1 -; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm1[1,1,2,3] -; CHECK-NEXT: movd %xmm0, %eax +; CHECK-NEXT: pextrw $2, %xmm1, %eax +; CHECK-NEXT: movsbl %al, %eax ; CHECK-NEXT: retq %cmp = icmp slt <4 x i32> %B, %A %sext = sext <4 x i1> %cmp to <4 x i32> @@ -157,8 +157,8 @@ ; CHECK-LABEL: test_lt_2: ; CHECK: # %bb.0: ; CHECK-NEXT: pcmpgtd %xmm1, %xmm0 -; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,2,3] -; CHECK-NEXT: movd %xmm0, %eax +; CHECK-NEXT: pextrw $2, %xmm0, %eax +; CHECK-NEXT: movsbl %al, %eax ; CHECK-NEXT: retq %cmp = icmp slt <4 x i32> %B, %A %sext = sext <4 x i1> %cmp to <4 x i32> @@ -172,8 +172,8 @@ ; CHECK-LABEL: test_gt_2: ; CHECK: # %bb.0: ; CHECK-NEXT: pcmpgtd %xmm1, %xmm0 -; CHECK-NEXT: pshufd {{.*#+}} xmm0 = xmm0[1,1,2,3] -; CHECK-NEXT: movd %xmm0, %eax +; CHECK-NEXT: pextrw $2, %xmm0, %eax +; CHECK-NEXT: movsbl %al, %eax ; CHECK-NEXT: retq %cmp = icmp slt <4 x i32> %B, %A %sext = sext <4 x i1> %cmp to <4 x i32> Index: test/CodeGen/X86/vsel-cmp-load.ll =================================================================== --- test/CodeGen/X86/vsel-cmp-load.ll +++ test/CodeGen/X86/vsel-cmp-load.ll @@ -11,10 +11,9 @@ ; AVX1-NEXT: vpmovzxbw {{.*#+}} xmm2 = mem[0],zero,mem[1],zero,mem[2],zero,mem[3],zero,mem[4],zero,mem[5],zero,mem[6],zero,mem[7],zero ; AVX1-NEXT: vpxor %xmm3, %xmm3, %xmm3 ; AVX1-NEXT: vpcmpeqw %xmm3, %xmm2, %xmm2 -; AVX1-NEXT: vpmovzxwd {{.*#+}} xmm3 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero -; AVX1-NEXT: vpslld $24, %xmm3, %xmm3 -; AVX1-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm0[4],xmm2[5],xmm0[5],xmm2[6],xmm0[6],xmm2[7],xmm0[7] -; AVX1-NEXT: vpslld $24, %xmm2, %xmm2 +; AVX1-NEXT: vpmovsxwd %xmm2, %xmm3 +; AVX1-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,3,0,1] +; AVX1-NEXT: vpmovsxwd %xmm2, %xmm2 ; AVX1-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm2 ; AVX1-NEXT: vblendvps %ymm2, %ymm0, %ymm1, %ymm0 ; AVX1-NEXT: retq @@ -115,12 +114,9 @@ ; AVX1-LABEL: slt_zero: ; AVX1: # %bb.0: ; AVX1-NEXT: vpmovsxbw (%rdi), %xmm2 -; AVX1-NEXT: vpxor %xmm3, %xmm3, %xmm3 -; AVX1-NEXT: vpcmpgtw %xmm2, %xmm3, %xmm2 -; AVX1-NEXT: vpmovzxwd {{.*#+}} xmm3 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero -; AVX1-NEXT: vpslld $24, %xmm3, %xmm3 -; AVX1-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm0[4],xmm2[5],xmm0[5],xmm2[6],xmm0[6],xmm2[7],xmm0[7] -; AVX1-NEXT: vpslld $24, %xmm2, %xmm2 +; AVX1-NEXT: vpmovsxwd %xmm2, %xmm3 +; AVX1-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,3,0,1] +; AVX1-NEXT: vpmovsxwd %xmm2, %xmm2 ; AVX1-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm2 ; AVX1-NEXT: vblendvps %ymm2, %ymm0, %ymm1, %ymm0 ; AVX1-NEXT: retq @@ -184,10 +180,9 @@ ; AVX1-NEXT: vpcmpeqw %xmm3, %xmm2, %xmm2 ; AVX1-NEXT: vpcmpeqd %xmm3, %xmm3, %xmm3 ; AVX1-NEXT: vpxor %xmm3, %xmm2, %xmm2 -; AVX1-NEXT: vpmovzxwd {{.*#+}} xmm3 = xmm2[0],zero,xmm2[1],zero,xmm2[2],zero,xmm2[3],zero -; AVX1-NEXT: vpslld $24, %xmm3, %xmm3 -; AVX1-NEXT: vpunpckhwd {{.*#+}} xmm2 = xmm2[4],xmm0[4],xmm2[5],xmm0[5],xmm2[6],xmm0[6],xmm2[7],xmm0[7] -; AVX1-NEXT: vpslld $24, %xmm2, %xmm2 +; AVX1-NEXT: vpmovsxwd %xmm2, %xmm3 +; AVX1-NEXT: vpshufd {{.*#+}} xmm2 = xmm2[2,3,0,1] +; AVX1-NEXT: vpmovsxwd %xmm2, %xmm2 ; AVX1-NEXT: vinsertf128 $1, %xmm2, %ymm3, %ymm2 ; AVX1-NEXT: vblendvps %ymm2, %ymm0, %ymm1, %ymm0 ; AVX1-NEXT: retq