Index: llvm/trunk/lib/Target/X86/X86ISelLowering.cpp =================================================================== --- llvm/trunk/lib/Target/X86/X86ISelLowering.cpp +++ llvm/trunk/lib/Target/X86/X86ISelLowering.cpp @@ -22968,7 +22968,8 @@ return Op; } -static SDValue LowerCTTZ(SDValue Op, SelectionDAG &DAG) { +static SDValue LowerCTTZ(SDValue Op, const X86Subtarget &Subtarget, + SelectionDAG &DAG) { MVT VT = Op.getSimpleValueType(); unsigned NumBits = VT.getScalarSizeInBits(); SDLoc dl(Op); @@ -22977,21 +22978,24 @@ SDValue N0 = Op.getOperand(0); SDValue Zero = DAG.getConstant(0, dl, VT); - // lsb(x) = (x & -x) - SDValue LSB = DAG.getNode(ISD::AND, dl, VT, N0, - DAG.getNode(ISD::SUB, dl, VT, Zero, N0)); + // Decompose 256-bit ops into smaller 128-bit ops. + if (VT.is256BitVector() && !Subtarget.hasInt256()) + return Lower256IntUnary(Op, DAG); - // cttz_undef(x) = (width - 1) - ctlz(lsb) + // cttz_undef(x) = (width - 1) - ctlz(x & -x) if (Op.getOpcode() == ISD::CTTZ_ZERO_UNDEF) { SDValue WidthMinusOne = DAG.getConstant(NumBits - 1, dl, VT); + SDValue LSB = DAG.getNode(ISD::AND, dl, VT, N0, + DAG.getNode(ISD::SUB, dl, VT, Zero, N0)); return DAG.getNode(ISD::SUB, dl, VT, WidthMinusOne, DAG.getNode(ISD::CTLZ, dl, VT, LSB)); } - // cttz(x) = ctpop(lsb - 1) + // cttz(x) = ctpop(~x & (x - 1)) SDValue One = DAG.getConstant(1, dl, VT); return DAG.getNode(ISD::CTPOP, dl, VT, - DAG.getNode(ISD::SUB, dl, VT, LSB, One)); + DAG.getNode(ISD::AND, dl, VT, DAG.getNOT(dl, N0, VT), + DAG.getNode(ISD::SUB, dl, VT, N0, One))); } assert(Op.getOpcode() == ISD::CTTZ && @@ -25918,7 +25922,7 @@ case ISD::CTLZ: case ISD::CTLZ_ZERO_UNDEF: return LowerCTLZ(Op, Subtarget, DAG); case ISD::CTTZ: - case ISD::CTTZ_ZERO_UNDEF: return LowerCTTZ(Op, DAG); + case ISD::CTTZ_ZERO_UNDEF: return LowerCTTZ(Op, Subtarget, DAG); case ISD::MUL: return LowerMUL(Op, Subtarget, DAG); case ISD::MULHS: case ISD::MULHU: return LowerMULH(Op, Subtarget, DAG); Index: llvm/trunk/test/CodeGen/X86/vec_ctbits.ll =================================================================== --- llvm/trunk/test/CodeGen/X86/vec_ctbits.ll +++ llvm/trunk/test/CodeGen/X86/vec_ctbits.ll @@ -8,27 +8,26 @@ define <2 x i64> @footz(<2 x i64> %a) nounwind { ; CHECK-LABEL: footz: ; CHECK: # %bb.0: -; CHECK-NEXT: pxor %xmm1, %xmm1 -; CHECK-NEXT: pxor %xmm2, %xmm2 -; CHECK-NEXT: psubq %xmm0, %xmm2 -; CHECK-NEXT: pand %xmm0, %xmm2 -; CHECK-NEXT: pcmpeqd %xmm3, %xmm3 -; CHECK-NEXT: paddq %xmm2, %xmm3 -; CHECK-NEXT: movdqa %xmm3, %xmm0 -; CHECK-NEXT: psrlw $1, %xmm0 -; CHECK-NEXT: pand {{.*}}(%rip), %xmm0 -; CHECK-NEXT: psubb %xmm0, %xmm3 -; CHECK-NEXT: movdqa {{.*#+}} xmm0 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] -; CHECK-NEXT: movdqa %xmm3, %xmm2 -; CHECK-NEXT: pand %xmm0, %xmm2 -; CHECK-NEXT: psrlw $2, %xmm3 -; CHECK-NEXT: pand %xmm0, %xmm3 -; CHECK-NEXT: paddb %xmm2, %xmm3 -; CHECK-NEXT: movdqa %xmm3, %xmm0 -; CHECK-NEXT: psrlw $4, %xmm0 -; CHECK-NEXT: paddb %xmm3, %xmm0 -; CHECK-NEXT: pand {{.*}}(%rip), %xmm0 -; CHECK-NEXT: psadbw %xmm1, %xmm0 +; CHECK-NEXT: pcmpeqd %xmm1, %xmm1 +; CHECK-NEXT: paddq %xmm0, %xmm1 +; CHECK-NEXT: pandn %xmm1, %xmm0 +; CHECK-NEXT: movdqa %xmm0, %xmm1 +; CHECK-NEXT: psrlw $1, %xmm1 +; CHECK-NEXT: pand {{.*}}(%rip), %xmm1 +; CHECK-NEXT: psubb %xmm1, %xmm0 +; CHECK-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] +; CHECK-NEXT: movdqa %xmm0, %xmm2 +; CHECK-NEXT: pand %xmm1, %xmm2 +; CHECK-NEXT: psrlw $2, %xmm0 +; CHECK-NEXT: pand %xmm1, %xmm0 +; CHECK-NEXT: paddb %xmm2, %xmm0 +; CHECK-NEXT: movdqa %xmm0, %xmm1 +; CHECK-NEXT: psrlw $4, %xmm1 +; CHECK-NEXT: paddb %xmm0, %xmm1 +; CHECK-NEXT: pand {{.*}}(%rip), %xmm1 +; CHECK-NEXT: pxor %xmm0, %xmm0 +; CHECK-NEXT: psadbw %xmm0, %xmm1 +; CHECK-NEXT: movdqa %xmm1, %xmm0 ; CHECK-NEXT: retq %c = call <2 x i64> @llvm.cttz.v2i64(<2 x i64> %a, i1 true) ret <2 x i64> %c @@ -112,27 +111,26 @@ ; CHECK-LABEL: promtz: ; CHECK: # %bb.0: ; CHECK-NEXT: por {{.*}}(%rip), %xmm0 -; CHECK-NEXT: pxor %xmm1, %xmm1 -; CHECK-NEXT: pxor %xmm2, %xmm2 -; CHECK-NEXT: psubq %xmm0, %xmm2 -; CHECK-NEXT: pand %xmm0, %xmm2 -; CHECK-NEXT: pcmpeqd %xmm3, %xmm3 -; CHECK-NEXT: paddq %xmm2, %xmm3 -; CHECK-NEXT: movdqa %xmm3, %xmm0 -; CHECK-NEXT: psrlw $1, %xmm0 -; CHECK-NEXT: pand {{.*}}(%rip), %xmm0 -; CHECK-NEXT: psubb %xmm0, %xmm3 -; CHECK-NEXT: movdqa {{.*#+}} xmm0 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] -; CHECK-NEXT: movdqa %xmm3, %xmm2 -; CHECK-NEXT: pand %xmm0, %xmm2 -; CHECK-NEXT: psrlw $2, %xmm3 -; CHECK-NEXT: pand %xmm0, %xmm3 -; CHECK-NEXT: paddb %xmm2, %xmm3 -; CHECK-NEXT: movdqa %xmm3, %xmm0 -; CHECK-NEXT: psrlw $4, %xmm0 -; CHECK-NEXT: paddb %xmm3, %xmm0 -; CHECK-NEXT: pand {{.*}}(%rip), %xmm0 -; CHECK-NEXT: psadbw %xmm1, %xmm0 +; CHECK-NEXT: pcmpeqd %xmm1, %xmm1 +; CHECK-NEXT: paddq %xmm0, %xmm1 +; CHECK-NEXT: pandn %xmm1, %xmm0 +; CHECK-NEXT: movdqa %xmm0, %xmm1 +; CHECK-NEXT: psrlw $1, %xmm1 +; CHECK-NEXT: pand {{.*}}(%rip), %xmm1 +; CHECK-NEXT: psubb %xmm1, %xmm0 +; CHECK-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] +; CHECK-NEXT: movdqa %xmm0, %xmm2 +; CHECK-NEXT: pand %xmm1, %xmm2 +; CHECK-NEXT: psrlw $2, %xmm0 +; CHECK-NEXT: pand %xmm1, %xmm0 +; CHECK-NEXT: paddb %xmm2, %xmm0 +; CHECK-NEXT: movdqa %xmm0, %xmm1 +; CHECK-NEXT: psrlw $4, %xmm1 +; CHECK-NEXT: paddb %xmm0, %xmm1 +; CHECK-NEXT: pand {{.*}}(%rip), %xmm1 +; CHECK-NEXT: pxor %xmm0, %xmm0 +; CHECK-NEXT: psadbw %xmm0, %xmm1 +; CHECK-NEXT: movdqa %xmm1, %xmm0 ; CHECK-NEXT: retq %c = call <2 x i32> @llvm.cttz.v2i32(<2 x i32> %a, i1 false) ret <2 x i32> %c Index: llvm/trunk/test/CodeGen/X86/vector-tzcnt-128.ll =================================================================== --- llvm/trunk/test/CodeGen/X86/vector-tzcnt-128.ll +++ llvm/trunk/test/CodeGen/X86/vector-tzcnt-128.ll @@ -18,121 +18,112 @@ define <2 x i64> @testv2i64(<2 x i64> %in) nounwind { ; SSE2-LABEL: testv2i64: ; SSE2: # %bb.0: -; SSE2-NEXT: pxor %xmm1, %xmm1 -; SSE2-NEXT: pxor %xmm2, %xmm2 -; SSE2-NEXT: psubq %xmm0, %xmm2 -; SSE2-NEXT: pand %xmm0, %xmm2 -; SSE2-NEXT: pcmpeqd %xmm3, %xmm3 -; SSE2-NEXT: paddq %xmm2, %xmm3 -; SSE2-NEXT: movdqa %xmm3, %xmm0 -; SSE2-NEXT: psrlw $1, %xmm0 -; SSE2-NEXT: pand {{.*}}(%rip), %xmm0 -; SSE2-NEXT: psubb %xmm0, %xmm3 -; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] -; SSE2-NEXT: movdqa %xmm3, %xmm2 -; SSE2-NEXT: pand %xmm0, %xmm2 -; SSE2-NEXT: psrlw $2, %xmm3 -; SSE2-NEXT: pand %xmm0, %xmm3 -; SSE2-NEXT: paddb %xmm2, %xmm3 -; SSE2-NEXT: movdqa %xmm3, %xmm0 -; SSE2-NEXT: psrlw $4, %xmm0 -; SSE2-NEXT: paddb %xmm3, %xmm0 -; SSE2-NEXT: pand {{.*}}(%rip), %xmm0 -; SSE2-NEXT: psadbw %xmm1, %xmm0 +; SSE2-NEXT: pcmpeqd %xmm1, %xmm1 +; SSE2-NEXT: paddq %xmm0, %xmm1 +; SSE2-NEXT: pandn %xmm1, %xmm0 +; SSE2-NEXT: movdqa %xmm0, %xmm1 +; SSE2-NEXT: psrlw $1, %xmm1 +; SSE2-NEXT: pand {{.*}}(%rip), %xmm1 +; SSE2-NEXT: psubb %xmm1, %xmm0 +; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] +; SSE2-NEXT: movdqa %xmm0, %xmm2 +; SSE2-NEXT: pand %xmm1, %xmm2 +; SSE2-NEXT: psrlw $2, %xmm0 +; SSE2-NEXT: pand %xmm1, %xmm0 +; SSE2-NEXT: paddb %xmm2, %xmm0 +; SSE2-NEXT: movdqa %xmm0, %xmm1 +; SSE2-NEXT: psrlw $4, %xmm1 +; SSE2-NEXT: paddb %xmm0, %xmm1 +; SSE2-NEXT: pand {{.*}}(%rip), %xmm1 +; SSE2-NEXT: pxor %xmm0, %xmm0 +; SSE2-NEXT: psadbw %xmm0, %xmm1 +; SSE2-NEXT: movdqa %xmm1, %xmm0 ; SSE2-NEXT: retq ; ; SSE3-LABEL: testv2i64: ; SSE3: # %bb.0: -; SSE3-NEXT: pxor %xmm1, %xmm1 -; SSE3-NEXT: pxor %xmm2, %xmm2 -; SSE3-NEXT: psubq %xmm0, %xmm2 -; SSE3-NEXT: pand %xmm0, %xmm2 -; SSE3-NEXT: pcmpeqd %xmm3, %xmm3 -; SSE3-NEXT: paddq %xmm2, %xmm3 -; SSE3-NEXT: movdqa %xmm3, %xmm0 -; SSE3-NEXT: psrlw $1, %xmm0 -; SSE3-NEXT: pand {{.*}}(%rip), %xmm0 -; SSE3-NEXT: psubb %xmm0, %xmm3 -; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] -; SSE3-NEXT: movdqa %xmm3, %xmm2 -; SSE3-NEXT: pand %xmm0, %xmm2 -; SSE3-NEXT: psrlw $2, %xmm3 -; SSE3-NEXT: pand %xmm0, %xmm3 -; SSE3-NEXT: paddb %xmm2, %xmm3 -; SSE3-NEXT: movdqa %xmm3, %xmm0 -; SSE3-NEXT: psrlw $4, %xmm0 -; SSE3-NEXT: paddb %xmm3, %xmm0 -; SSE3-NEXT: pand {{.*}}(%rip), %xmm0 -; SSE3-NEXT: psadbw %xmm1, %xmm0 +; SSE3-NEXT: pcmpeqd %xmm1, %xmm1 +; SSE3-NEXT: paddq %xmm0, %xmm1 +; SSE3-NEXT: pandn %xmm1, %xmm0 +; SSE3-NEXT: movdqa %xmm0, %xmm1 +; SSE3-NEXT: psrlw $1, %xmm1 +; SSE3-NEXT: pand {{.*}}(%rip), %xmm1 +; SSE3-NEXT: psubb %xmm1, %xmm0 +; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] +; SSE3-NEXT: movdqa %xmm0, %xmm2 +; SSE3-NEXT: pand %xmm1, %xmm2 +; SSE3-NEXT: psrlw $2, %xmm0 +; SSE3-NEXT: pand %xmm1, %xmm0 +; SSE3-NEXT: paddb %xmm2, %xmm0 +; SSE3-NEXT: movdqa %xmm0, %xmm1 +; SSE3-NEXT: psrlw $4, %xmm1 +; SSE3-NEXT: paddb %xmm0, %xmm1 +; SSE3-NEXT: pand {{.*}}(%rip), %xmm1 +; SSE3-NEXT: pxor %xmm0, %xmm0 +; SSE3-NEXT: psadbw %xmm0, %xmm1 +; SSE3-NEXT: movdqa %xmm1, %xmm0 ; SSE3-NEXT: retq ; ; SSSE3-LABEL: testv2i64: ; SSSE3: # %bb.0: -; SSSE3-NEXT: pxor %xmm1, %xmm1 -; SSSE3-NEXT: pxor %xmm2, %xmm2 -; SSSE3-NEXT: psubq %xmm0, %xmm2 -; SSSE3-NEXT: pand %xmm0, %xmm2 -; SSSE3-NEXT: pcmpeqd %xmm3, %xmm3 -; SSSE3-NEXT: paddq %xmm2, %xmm3 -; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; SSSE3-NEXT: pcmpeqd %xmm1, %xmm1 +; SSSE3-NEXT: paddq %xmm0, %xmm1 +; SSSE3-NEXT: pandn %xmm1, %xmm0 +; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; SSSE3-NEXT: movdqa %xmm0, %xmm2 +; SSSE3-NEXT: pand %xmm1, %xmm2 +; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] ; SSSE3-NEXT: movdqa %xmm3, %xmm4 -; SSSE3-NEXT: pand %xmm2, %xmm4 -; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; SSSE3-NEXT: movdqa %xmm0, %xmm5 -; SSSE3-NEXT: pshufb %xmm4, %xmm5 -; SSSE3-NEXT: psrlw $4, %xmm3 -; SSSE3-NEXT: pand %xmm2, %xmm3 -; SSSE3-NEXT: pshufb %xmm3, %xmm0 -; SSSE3-NEXT: paddb %xmm5, %xmm0 -; SSSE3-NEXT: psadbw %xmm1, %xmm0 +; SSSE3-NEXT: pshufb %xmm2, %xmm4 +; SSSE3-NEXT: psrlw $4, %xmm0 +; SSSE3-NEXT: pand %xmm1, %xmm0 +; SSSE3-NEXT: pshufb %xmm0, %xmm3 +; SSSE3-NEXT: paddb %xmm4, %xmm3 +; SSSE3-NEXT: pxor %xmm0, %xmm0 +; SSSE3-NEXT: psadbw %xmm3, %xmm0 ; SSSE3-NEXT: retq ; ; SSE41-LABEL: testv2i64: ; SSE41: # %bb.0: -; SSE41-NEXT: pxor %xmm1, %xmm1 -; SSE41-NEXT: pxor %xmm2, %xmm2 -; SSE41-NEXT: psubq %xmm0, %xmm2 -; SSE41-NEXT: pand %xmm0, %xmm2 -; SSE41-NEXT: pcmpeqd %xmm3, %xmm3 -; SSE41-NEXT: paddq %xmm2, %xmm3 -; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; SSE41-NEXT: pcmpeqd %xmm1, %xmm1 +; SSE41-NEXT: paddq %xmm0, %xmm1 +; SSE41-NEXT: pandn %xmm1, %xmm0 +; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; SSE41-NEXT: movdqa %xmm0, %xmm2 +; SSE41-NEXT: pand %xmm1, %xmm2 +; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] ; SSE41-NEXT: movdqa %xmm3, %xmm4 -; SSE41-NEXT: pand %xmm2, %xmm4 -; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; SSE41-NEXT: movdqa %xmm0, %xmm5 -; SSE41-NEXT: pshufb %xmm4, %xmm5 -; SSE41-NEXT: psrlw $4, %xmm3 -; SSE41-NEXT: pand %xmm2, %xmm3 -; SSE41-NEXT: pshufb %xmm3, %xmm0 -; SSE41-NEXT: paddb %xmm5, %xmm0 -; SSE41-NEXT: psadbw %xmm1, %xmm0 +; SSE41-NEXT: pshufb %xmm2, %xmm4 +; SSE41-NEXT: psrlw $4, %xmm0 +; SSE41-NEXT: pand %xmm1, %xmm0 +; SSE41-NEXT: pshufb %xmm0, %xmm3 +; SSE41-NEXT: paddb %xmm4, %xmm3 +; SSE41-NEXT: pxor %xmm0, %xmm0 +; SSE41-NEXT: psadbw %xmm3, %xmm0 ; SSE41-NEXT: retq ; ; AVX-LABEL: testv2i64: ; AVX: # %bb.0: -; AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX-NEXT: vpsubq %xmm0, %xmm1, %xmm2 -; AVX-NEXT: vpand %xmm2, %xmm0, %xmm0 -; AVX-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 -; AVX-NEXT: vpaddq %xmm2, %xmm0, %xmm0 -; AVX-NEXT: vmovdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX-NEXT: vpand %xmm2, %xmm0, %xmm3 -; AVX-NEXT: vmovdqa {{.*#+}} xmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX-NEXT: vpshufb %xmm3, %xmm4, %xmm3 +; AVX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 +; AVX-NEXT: vpaddq %xmm1, %xmm0, %xmm1 +; AVX-NEXT: vpandn %xmm1, %xmm0, %xmm0 +; AVX-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX-NEXT: vpand %xmm1, %xmm0, %xmm2 +; AVX-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX-NEXT: vpshufb %xmm2, %xmm3, %xmm2 ; AVX-NEXT: vpsrlw $4, %xmm0, %xmm0 -; AVX-NEXT: vpand %xmm2, %xmm0, %xmm0 -; AVX-NEXT: vpshufb %xmm0, %xmm4, %xmm0 -; AVX-NEXT: vpaddb %xmm3, %xmm0, %xmm0 +; AVX-NEXT: vpand %xmm1, %xmm0, %xmm0 +; AVX-NEXT: vpshufb %xmm0, %xmm3, %xmm0 +; AVX-NEXT: vpaddb %xmm2, %xmm0, %xmm0 +; AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; AVX-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: testv2i64: ; AVX512VPOPCNTDQ: # %bb.0: -; AVX512VPOPCNTDQ-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpsubq %xmm0, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQ-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpaddq %xmm1, %xmm0, %xmm0 +; AVX512VPOPCNTDQ-NEXT: vpaddq %xmm1, %xmm0, %xmm1 +; AVX512VPOPCNTDQ-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 killed $zmm0 ; AVX512VPOPCNTDQ-NEXT: vzeroupper @@ -140,55 +131,50 @@ ; ; AVX512VPOPCNTDQVL-LABEL: testv2i64: ; AVX512VPOPCNTDQVL: # %bb.0: -; AVX512VPOPCNTDQVL-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpsubq %xmm0, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpaddq %xmm1, %xmm0, %xmm0 +; AVX512VPOPCNTDQVL-NEXT: vpaddq %xmm1, %xmm0, %xmm1 +; AVX512VPOPCNTDQVL-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: testv2i64: ; BITALG_NOVLX: # %bb.0: -; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG_NOVLX-NEXT: vpsubq %xmm0, %xmm1, %xmm2 -; BITALG_NOVLX-NEXT: vpand %xmm2, %xmm0, %xmm0 -; BITALG_NOVLX-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 -; BITALG_NOVLX-NEXT: vpaddq %xmm2, %xmm0, %xmm0 +; BITALG_NOVLX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 +; BITALG_NOVLX-NEXT: vpaddq %xmm1, %xmm0, %xmm1 +; BITALG_NOVLX-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0 +; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG_NOVLX-NEXT: vzeroupper ; BITALG_NOVLX-NEXT: retq ; ; BITALG-LABEL: testv2i64: ; BITALG: # %bb.0: -; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpsubq %xmm0, %xmm1, %xmm2 -; BITALG-NEXT: vpand %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 -; BITALG-NEXT: vpaddq %xmm2, %xmm0, %xmm0 +; BITALG-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 +; BITALG-NEXT: vpaddq %xmm1, %xmm0, %xmm1 +; BITALG-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 +; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq ; ; X32-SSE-LABEL: testv2i64: ; X32-SSE: # %bb.0: -; X32-SSE-NEXT: pxor %xmm1, %xmm1 -; X32-SSE-NEXT: pxor %xmm2, %xmm2 -; X32-SSE-NEXT: psubq %xmm0, %xmm2 -; X32-SSE-NEXT: pand %xmm0, %xmm2 -; X32-SSE-NEXT: psubq {{\.LCPI.*}}, %xmm2 -; X32-SSE-NEXT: movdqa {{.*#+}} xmm3 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; X32-SSE-NEXT: movdqa %xmm2, %xmm4 -; X32-SSE-NEXT: pand %xmm3, %xmm4 -; X32-SSE-NEXT: movdqa {{.*#+}} xmm0 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; X32-SSE-NEXT: movdqa %xmm0, %xmm5 -; X32-SSE-NEXT: pshufb %xmm4, %xmm5 -; X32-SSE-NEXT: psrlw $4, %xmm2 -; X32-SSE-NEXT: pand %xmm3, %xmm2 -; X32-SSE-NEXT: pshufb %xmm2, %xmm0 -; X32-SSE-NEXT: paddb %xmm5, %xmm0 -; X32-SSE-NEXT: psadbw %xmm1, %xmm0 +; X32-SSE-NEXT: movdqa %xmm0, %xmm1 +; X32-SSE-NEXT: psubq {{\.LCPI.*}}, %xmm1 +; X32-SSE-NEXT: pandn %xmm1, %xmm0 +; X32-SSE-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; X32-SSE-NEXT: movdqa %xmm0, %xmm2 +; X32-SSE-NEXT: pand %xmm1, %xmm2 +; X32-SSE-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; X32-SSE-NEXT: movdqa %xmm3, %xmm4 +; X32-SSE-NEXT: pshufb %xmm2, %xmm4 +; X32-SSE-NEXT: psrlw $4, %xmm0 +; X32-SSE-NEXT: pand %xmm1, %xmm0 +; X32-SSE-NEXT: pshufb %xmm0, %xmm3 +; X32-SSE-NEXT: paddb %xmm4, %xmm3 +; X32-SSE-NEXT: pxor %xmm0, %xmm0 +; X32-SSE-NEXT: psadbw %xmm3, %xmm0 ; X32-SSE-NEXT: retl %out = call <2 x i64> @llvm.cttz.v2i64(<2 x i64> %in, i1 0) ret <2 x i64> %out @@ -197,129 +183,121 @@ define <2 x i64> @testv2i64u(<2 x i64> %in) nounwind { ; SSE2-LABEL: testv2i64u: ; SSE2: # %bb.0: -; SSE2-NEXT: pxor %xmm1, %xmm1 -; SSE2-NEXT: pxor %xmm2, %xmm2 -; SSE2-NEXT: psubq %xmm0, %xmm2 -; SSE2-NEXT: pand %xmm0, %xmm2 -; SSE2-NEXT: pcmpeqd %xmm3, %xmm3 -; SSE2-NEXT: paddq %xmm2, %xmm3 -; SSE2-NEXT: movdqa %xmm3, %xmm0 -; SSE2-NEXT: psrlw $1, %xmm0 -; SSE2-NEXT: pand {{.*}}(%rip), %xmm0 -; SSE2-NEXT: psubb %xmm0, %xmm3 -; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] -; SSE2-NEXT: movdqa %xmm3, %xmm2 -; SSE2-NEXT: pand %xmm0, %xmm2 -; SSE2-NEXT: psrlw $2, %xmm3 -; SSE2-NEXT: pand %xmm0, %xmm3 -; SSE2-NEXT: paddb %xmm2, %xmm3 -; SSE2-NEXT: movdqa %xmm3, %xmm0 -; SSE2-NEXT: psrlw $4, %xmm0 -; SSE2-NEXT: paddb %xmm3, %xmm0 -; SSE2-NEXT: pand {{.*}}(%rip), %xmm0 -; SSE2-NEXT: psadbw %xmm1, %xmm0 +; SSE2-NEXT: pcmpeqd %xmm1, %xmm1 +; SSE2-NEXT: paddq %xmm0, %xmm1 +; SSE2-NEXT: pandn %xmm1, %xmm0 +; SSE2-NEXT: movdqa %xmm0, %xmm1 +; SSE2-NEXT: psrlw $1, %xmm1 +; SSE2-NEXT: pand {{.*}}(%rip), %xmm1 +; SSE2-NEXT: psubb %xmm1, %xmm0 +; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] +; SSE2-NEXT: movdqa %xmm0, %xmm2 +; SSE2-NEXT: pand %xmm1, %xmm2 +; SSE2-NEXT: psrlw $2, %xmm0 +; SSE2-NEXT: pand %xmm1, %xmm0 +; SSE2-NEXT: paddb %xmm2, %xmm0 +; SSE2-NEXT: movdqa %xmm0, %xmm1 +; SSE2-NEXT: psrlw $4, %xmm1 +; SSE2-NEXT: paddb %xmm0, %xmm1 +; SSE2-NEXT: pand {{.*}}(%rip), %xmm1 +; SSE2-NEXT: pxor %xmm0, %xmm0 +; SSE2-NEXT: psadbw %xmm0, %xmm1 +; SSE2-NEXT: movdqa %xmm1, %xmm0 ; SSE2-NEXT: retq ; ; SSE3-LABEL: testv2i64u: ; SSE3: # %bb.0: -; SSE3-NEXT: pxor %xmm1, %xmm1 -; SSE3-NEXT: pxor %xmm2, %xmm2 -; SSE3-NEXT: psubq %xmm0, %xmm2 -; SSE3-NEXT: pand %xmm0, %xmm2 -; SSE3-NEXT: pcmpeqd %xmm3, %xmm3 -; SSE3-NEXT: paddq %xmm2, %xmm3 -; SSE3-NEXT: movdqa %xmm3, %xmm0 -; SSE3-NEXT: psrlw $1, %xmm0 -; SSE3-NEXT: pand {{.*}}(%rip), %xmm0 -; SSE3-NEXT: psubb %xmm0, %xmm3 -; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] -; SSE3-NEXT: movdqa %xmm3, %xmm2 -; SSE3-NEXT: pand %xmm0, %xmm2 -; SSE3-NEXT: psrlw $2, %xmm3 -; SSE3-NEXT: pand %xmm0, %xmm3 -; SSE3-NEXT: paddb %xmm2, %xmm3 -; SSE3-NEXT: movdqa %xmm3, %xmm0 -; SSE3-NEXT: psrlw $4, %xmm0 -; SSE3-NEXT: paddb %xmm3, %xmm0 -; SSE3-NEXT: pand {{.*}}(%rip), %xmm0 -; SSE3-NEXT: psadbw %xmm1, %xmm0 +; SSE3-NEXT: pcmpeqd %xmm1, %xmm1 +; SSE3-NEXT: paddq %xmm0, %xmm1 +; SSE3-NEXT: pandn %xmm1, %xmm0 +; SSE3-NEXT: movdqa %xmm0, %xmm1 +; SSE3-NEXT: psrlw $1, %xmm1 +; SSE3-NEXT: pand {{.*}}(%rip), %xmm1 +; SSE3-NEXT: psubb %xmm1, %xmm0 +; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] +; SSE3-NEXT: movdqa %xmm0, %xmm2 +; SSE3-NEXT: pand %xmm1, %xmm2 +; SSE3-NEXT: psrlw $2, %xmm0 +; SSE3-NEXT: pand %xmm1, %xmm0 +; SSE3-NEXT: paddb %xmm2, %xmm0 +; SSE3-NEXT: movdqa %xmm0, %xmm1 +; SSE3-NEXT: psrlw $4, %xmm1 +; SSE3-NEXT: paddb %xmm0, %xmm1 +; SSE3-NEXT: pand {{.*}}(%rip), %xmm1 +; SSE3-NEXT: pxor %xmm0, %xmm0 +; SSE3-NEXT: psadbw %xmm0, %xmm1 +; SSE3-NEXT: movdqa %xmm1, %xmm0 ; SSE3-NEXT: retq ; ; SSSE3-LABEL: testv2i64u: ; SSSE3: # %bb.0: -; SSSE3-NEXT: pxor %xmm1, %xmm1 -; SSSE3-NEXT: pxor %xmm2, %xmm2 -; SSSE3-NEXT: psubq %xmm0, %xmm2 -; SSSE3-NEXT: pand %xmm0, %xmm2 -; SSSE3-NEXT: pcmpeqd %xmm3, %xmm3 -; SSSE3-NEXT: paddq %xmm2, %xmm3 -; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; SSSE3-NEXT: pcmpeqd %xmm1, %xmm1 +; SSSE3-NEXT: paddq %xmm0, %xmm1 +; SSSE3-NEXT: pandn %xmm1, %xmm0 +; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; SSSE3-NEXT: movdqa %xmm0, %xmm2 +; SSSE3-NEXT: pand %xmm1, %xmm2 +; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] ; SSSE3-NEXT: movdqa %xmm3, %xmm4 -; SSSE3-NEXT: pand %xmm2, %xmm4 -; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; SSSE3-NEXT: movdqa %xmm0, %xmm5 -; SSSE3-NEXT: pshufb %xmm4, %xmm5 -; SSSE3-NEXT: psrlw $4, %xmm3 -; SSSE3-NEXT: pand %xmm2, %xmm3 -; SSSE3-NEXT: pshufb %xmm3, %xmm0 -; SSSE3-NEXT: paddb %xmm5, %xmm0 -; SSSE3-NEXT: psadbw %xmm1, %xmm0 +; SSSE3-NEXT: pshufb %xmm2, %xmm4 +; SSSE3-NEXT: psrlw $4, %xmm0 +; SSSE3-NEXT: pand %xmm1, %xmm0 +; SSSE3-NEXT: pshufb %xmm0, %xmm3 +; SSSE3-NEXT: paddb %xmm4, %xmm3 +; SSSE3-NEXT: pxor %xmm0, %xmm0 +; SSSE3-NEXT: psadbw %xmm3, %xmm0 ; SSSE3-NEXT: retq ; ; SSE41-LABEL: testv2i64u: ; SSE41: # %bb.0: -; SSE41-NEXT: pxor %xmm1, %xmm1 -; SSE41-NEXT: pxor %xmm2, %xmm2 -; SSE41-NEXT: psubq %xmm0, %xmm2 -; SSE41-NEXT: pand %xmm0, %xmm2 -; SSE41-NEXT: pcmpeqd %xmm3, %xmm3 -; SSE41-NEXT: paddq %xmm2, %xmm3 -; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; SSE41-NEXT: pcmpeqd %xmm1, %xmm1 +; SSE41-NEXT: paddq %xmm0, %xmm1 +; SSE41-NEXT: pandn %xmm1, %xmm0 +; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; SSE41-NEXT: movdqa %xmm0, %xmm2 +; SSE41-NEXT: pand %xmm1, %xmm2 +; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] ; SSE41-NEXT: movdqa %xmm3, %xmm4 -; SSE41-NEXT: pand %xmm2, %xmm4 -; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; SSE41-NEXT: movdqa %xmm0, %xmm5 -; SSE41-NEXT: pshufb %xmm4, %xmm5 -; SSE41-NEXT: psrlw $4, %xmm3 -; SSE41-NEXT: pand %xmm2, %xmm3 -; SSE41-NEXT: pshufb %xmm3, %xmm0 -; SSE41-NEXT: paddb %xmm5, %xmm0 -; SSE41-NEXT: psadbw %xmm1, %xmm0 +; SSE41-NEXT: pshufb %xmm2, %xmm4 +; SSE41-NEXT: psrlw $4, %xmm0 +; SSE41-NEXT: pand %xmm1, %xmm0 +; SSE41-NEXT: pshufb %xmm0, %xmm3 +; SSE41-NEXT: paddb %xmm4, %xmm3 +; SSE41-NEXT: pxor %xmm0, %xmm0 +; SSE41-NEXT: psadbw %xmm3, %xmm0 ; SSE41-NEXT: retq ; ; AVX1-LABEL: testv2i64u: ; AVX1: # %bb.0: -; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX1-NEXT: vpsubq %xmm0, %xmm1, %xmm2 -; AVX1-NEXT: vpand %xmm2, %xmm0, %xmm0 -; AVX1-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 -; AVX1-NEXT: vpaddq %xmm2, %xmm0, %xmm0 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX1-NEXT: vpand %xmm2, %xmm0, %xmm3 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX1-NEXT: vpshufb %xmm3, %xmm4, %xmm3 +; AVX1-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 +; AVX1-NEXT: vpaddq %xmm1, %xmm0, %xmm1 +; AVX1-NEXT: vpandn %xmm1, %xmm0, %xmm0 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2 ; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0 -; AVX1-NEXT: vpand %xmm2, %xmm0, %xmm0 -; AVX1-NEXT: vpshufb %xmm0, %xmm4, %xmm0 -; AVX1-NEXT: vpaddb %xmm3, %xmm0, %xmm0 +; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0 +; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0 +; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0 +; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX1-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; AVX1-NEXT: retq ; ; AVX2-LABEL: testv2i64u: ; AVX2: # %bb.0: -; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX2-NEXT: vpsubq %xmm0, %xmm1, %xmm2 -; AVX2-NEXT: vpand %xmm2, %xmm0, %xmm0 -; AVX2-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 -; AVX2-NEXT: vpaddq %xmm2, %xmm0, %xmm0 -; AVX2-NEXT: vmovdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX2-NEXT: vpand %xmm2, %xmm0, %xmm3 -; AVX2-NEXT: vmovdqa {{.*#+}} xmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX2-NEXT: vpshufb %xmm3, %xmm4, %xmm3 +; AVX2-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 +; AVX2-NEXT: vpaddq %xmm1, %xmm0, %xmm1 +; AVX2-NEXT: vpandn %xmm1, %xmm0, %xmm0 +; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2 +; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2 ; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0 -; AVX2-NEXT: vpand %xmm2, %xmm0, %xmm0 -; AVX2-NEXT: vpshufb %xmm0, %xmm4, %xmm0 -; AVX2-NEXT: vpaddb %xmm3, %xmm0, %xmm0 +; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0 +; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0 +; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0 +; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX2-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: retq ; @@ -346,11 +324,9 @@ ; ; AVX512VPOPCNTDQ-LABEL: testv2i64u: ; AVX512VPOPCNTDQ: # %bb.0: -; AVX512VPOPCNTDQ-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpsubq %xmm0, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQ-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpaddq %xmm1, %xmm0, %xmm0 +; AVX512VPOPCNTDQ-NEXT: vpaddq %xmm1, %xmm0, %xmm1 +; AVX512VPOPCNTDQ-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 killed $zmm0 ; AVX512VPOPCNTDQ-NEXT: vzeroupper @@ -358,55 +334,50 @@ ; ; AVX512VPOPCNTDQVL-LABEL: testv2i64u: ; AVX512VPOPCNTDQVL: # %bb.0: -; AVX512VPOPCNTDQVL-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpsubq %xmm0, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpaddq %xmm1, %xmm0, %xmm0 +; AVX512VPOPCNTDQVL-NEXT: vpaddq %xmm1, %xmm0, %xmm1 +; AVX512VPOPCNTDQVL-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: testv2i64u: ; BITALG_NOVLX: # %bb.0: -; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG_NOVLX-NEXT: vpsubq %xmm0, %xmm1, %xmm2 -; BITALG_NOVLX-NEXT: vpand %xmm2, %xmm0, %xmm0 -; BITALG_NOVLX-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 -; BITALG_NOVLX-NEXT: vpaddq %xmm2, %xmm0, %xmm0 +; BITALG_NOVLX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 +; BITALG_NOVLX-NEXT: vpaddq %xmm1, %xmm0, %xmm1 +; BITALG_NOVLX-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0 +; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG_NOVLX-NEXT: vzeroupper ; BITALG_NOVLX-NEXT: retq ; ; BITALG-LABEL: testv2i64u: ; BITALG: # %bb.0: -; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpsubq %xmm0, %xmm1, %xmm2 -; BITALG-NEXT: vpand %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 -; BITALG-NEXT: vpaddq %xmm2, %xmm0, %xmm0 +; BITALG-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 +; BITALG-NEXT: vpaddq %xmm1, %xmm0, %xmm1 +; BITALG-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 +; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq ; ; X32-SSE-LABEL: testv2i64u: ; X32-SSE: # %bb.0: -; X32-SSE-NEXT: pxor %xmm1, %xmm1 -; X32-SSE-NEXT: pxor %xmm2, %xmm2 -; X32-SSE-NEXT: psubq %xmm0, %xmm2 -; X32-SSE-NEXT: pand %xmm0, %xmm2 -; X32-SSE-NEXT: psubq {{\.LCPI.*}}, %xmm2 -; X32-SSE-NEXT: movdqa {{.*#+}} xmm3 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; X32-SSE-NEXT: movdqa %xmm2, %xmm4 -; X32-SSE-NEXT: pand %xmm3, %xmm4 -; X32-SSE-NEXT: movdqa {{.*#+}} xmm0 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; X32-SSE-NEXT: movdqa %xmm0, %xmm5 -; X32-SSE-NEXT: pshufb %xmm4, %xmm5 -; X32-SSE-NEXT: psrlw $4, %xmm2 -; X32-SSE-NEXT: pand %xmm3, %xmm2 -; X32-SSE-NEXT: pshufb %xmm2, %xmm0 -; X32-SSE-NEXT: paddb %xmm5, %xmm0 -; X32-SSE-NEXT: psadbw %xmm1, %xmm0 +; X32-SSE-NEXT: movdqa %xmm0, %xmm1 +; X32-SSE-NEXT: psubq {{\.LCPI.*}}, %xmm1 +; X32-SSE-NEXT: pandn %xmm1, %xmm0 +; X32-SSE-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; X32-SSE-NEXT: movdqa %xmm0, %xmm2 +; X32-SSE-NEXT: pand %xmm1, %xmm2 +; X32-SSE-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; X32-SSE-NEXT: movdqa %xmm3, %xmm4 +; X32-SSE-NEXT: pshufb %xmm2, %xmm4 +; X32-SSE-NEXT: psrlw $4, %xmm0 +; X32-SSE-NEXT: pand %xmm1, %xmm0 +; X32-SSE-NEXT: pshufb %xmm0, %xmm3 +; X32-SSE-NEXT: paddb %xmm4, %xmm3 +; X32-SSE-NEXT: pxor %xmm0, %xmm0 +; X32-SSE-NEXT: psadbw %xmm3, %xmm0 ; X32-SSE-NEXT: retl %out = call <2 x i64> @llvm.cttz.v2i64(<2 x i64> %in, i1 -1) ret <2 x i64> %out @@ -415,130 +386,124 @@ define <4 x i32> @testv4i32(<4 x i32> %in) nounwind { ; SSE2-LABEL: testv4i32: ; SSE2: # %bb.0: -; SSE2-NEXT: pxor %xmm1, %xmm1 -; SSE2-NEXT: pxor %xmm2, %xmm2 -; SSE2-NEXT: psubd %xmm0, %xmm2 -; SSE2-NEXT: pand %xmm0, %xmm2 -; SSE2-NEXT: pcmpeqd %xmm3, %xmm3 -; SSE2-NEXT: paddd %xmm2, %xmm3 -; SSE2-NEXT: movdqa %xmm3, %xmm0 -; SSE2-NEXT: psrlw $1, %xmm0 -; SSE2-NEXT: pand {{.*}}(%rip), %xmm0 -; SSE2-NEXT: psubb %xmm0, %xmm3 -; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] -; SSE2-NEXT: movdqa %xmm3, %xmm2 -; SSE2-NEXT: pand %xmm0, %xmm2 -; SSE2-NEXT: psrlw $2, %xmm3 -; SSE2-NEXT: pand %xmm0, %xmm3 -; SSE2-NEXT: paddb %xmm2, %xmm3 -; SSE2-NEXT: movdqa %xmm3, %xmm0 -; SSE2-NEXT: psrlw $4, %xmm0 -; SSE2-NEXT: paddb %xmm3, %xmm0 -; SSE2-NEXT: pand {{.*}}(%rip), %xmm0 +; SSE2-NEXT: pcmpeqd %xmm1, %xmm1 +; SSE2-NEXT: paddd %xmm0, %xmm1 +; SSE2-NEXT: pandn %xmm1, %xmm0 +; SSE2-NEXT: movdqa %xmm0, %xmm1 +; SSE2-NEXT: psrlw $1, %xmm1 +; SSE2-NEXT: pand {{.*}}(%rip), %xmm1 +; SSE2-NEXT: psubb %xmm1, %xmm0 +; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] ; SSE2-NEXT: movdqa %xmm0, %xmm2 -; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm1[2],xmm2[3],xmm1[3] -; SSE2-NEXT: psadbw %xmm1, %xmm2 -; SSE2-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1] -; SSE2-NEXT: psadbw %xmm1, %xmm0 -; SSE2-NEXT: packuswb %xmm2, %xmm0 +; SSE2-NEXT: pand %xmm1, %xmm2 +; SSE2-NEXT: psrlw $2, %xmm0 +; SSE2-NEXT: pand %xmm1, %xmm0 +; SSE2-NEXT: paddb %xmm2, %xmm0 +; SSE2-NEXT: movdqa %xmm0, %xmm1 +; SSE2-NEXT: psrlw $4, %xmm1 +; SSE2-NEXT: paddb %xmm0, %xmm1 +; SSE2-NEXT: pand {{.*}}(%rip), %xmm1 +; SSE2-NEXT: pxor %xmm0, %xmm0 +; SSE2-NEXT: movdqa %xmm1, %xmm2 +; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3] +; SSE2-NEXT: psadbw %xmm0, %xmm2 +; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1] +; SSE2-NEXT: psadbw %xmm0, %xmm1 +; SSE2-NEXT: packuswb %xmm2, %xmm1 +; SSE2-NEXT: movdqa %xmm1, %xmm0 ; SSE2-NEXT: retq ; ; SSE3-LABEL: testv4i32: ; SSE3: # %bb.0: -; SSE3-NEXT: pxor %xmm1, %xmm1 -; SSE3-NEXT: pxor %xmm2, %xmm2 -; SSE3-NEXT: psubd %xmm0, %xmm2 -; SSE3-NEXT: pand %xmm0, %xmm2 -; SSE3-NEXT: pcmpeqd %xmm3, %xmm3 -; SSE3-NEXT: paddd %xmm2, %xmm3 -; SSE3-NEXT: movdqa %xmm3, %xmm0 -; SSE3-NEXT: psrlw $1, %xmm0 -; SSE3-NEXT: pand {{.*}}(%rip), %xmm0 -; SSE3-NEXT: psubb %xmm0, %xmm3 -; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] -; SSE3-NEXT: movdqa %xmm3, %xmm2 -; SSE3-NEXT: pand %xmm0, %xmm2 -; SSE3-NEXT: psrlw $2, %xmm3 -; SSE3-NEXT: pand %xmm0, %xmm3 -; SSE3-NEXT: paddb %xmm2, %xmm3 -; SSE3-NEXT: movdqa %xmm3, %xmm0 -; SSE3-NEXT: psrlw $4, %xmm0 -; SSE3-NEXT: paddb %xmm3, %xmm0 -; SSE3-NEXT: pand {{.*}}(%rip), %xmm0 +; SSE3-NEXT: pcmpeqd %xmm1, %xmm1 +; SSE3-NEXT: paddd %xmm0, %xmm1 +; SSE3-NEXT: pandn %xmm1, %xmm0 +; SSE3-NEXT: movdqa %xmm0, %xmm1 +; SSE3-NEXT: psrlw $1, %xmm1 +; SSE3-NEXT: pand {{.*}}(%rip), %xmm1 +; SSE3-NEXT: psubb %xmm1, %xmm0 +; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] ; SSE3-NEXT: movdqa %xmm0, %xmm2 -; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm1[2],xmm2[3],xmm1[3] -; SSE3-NEXT: psadbw %xmm1, %xmm2 -; SSE3-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1] -; SSE3-NEXT: psadbw %xmm1, %xmm0 -; SSE3-NEXT: packuswb %xmm2, %xmm0 +; SSE3-NEXT: pand %xmm1, %xmm2 +; SSE3-NEXT: psrlw $2, %xmm0 +; SSE3-NEXT: pand %xmm1, %xmm0 +; SSE3-NEXT: paddb %xmm2, %xmm0 +; SSE3-NEXT: movdqa %xmm0, %xmm1 +; SSE3-NEXT: psrlw $4, %xmm1 +; SSE3-NEXT: paddb %xmm0, %xmm1 +; SSE3-NEXT: pand {{.*}}(%rip), %xmm1 +; SSE3-NEXT: pxor %xmm0, %xmm0 +; SSE3-NEXT: movdqa %xmm1, %xmm2 +; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3] +; SSE3-NEXT: psadbw %xmm0, %xmm2 +; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1] +; SSE3-NEXT: psadbw %xmm0, %xmm1 +; SSE3-NEXT: packuswb %xmm2, %xmm1 +; SSE3-NEXT: movdqa %xmm1, %xmm0 ; SSE3-NEXT: retq ; ; SSSE3-LABEL: testv4i32: ; SSSE3: # %bb.0: -; SSSE3-NEXT: pxor %xmm1, %xmm1 -; SSSE3-NEXT: pxor %xmm2, %xmm2 -; SSSE3-NEXT: psubd %xmm0, %xmm2 -; SSSE3-NEXT: pand %xmm0, %xmm2 -; SSSE3-NEXT: pcmpeqd %xmm3, %xmm3 -; SSSE3-NEXT: paddd %xmm2, %xmm3 +; SSSE3-NEXT: pcmpeqd %xmm1, %xmm1 +; SSSE3-NEXT: paddd %xmm0, %xmm1 +; SSSE3-NEXT: pandn %xmm1, %xmm0 ; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; SSSE3-NEXT: movdqa %xmm3, %xmm4 -; SSSE3-NEXT: pand %xmm2, %xmm4 -; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; SSSE3-NEXT: movdqa %xmm0, %xmm5 -; SSSE3-NEXT: pshufb %xmm4, %xmm5 -; SSSE3-NEXT: psrlw $4, %xmm3 +; SSSE3-NEXT: movdqa %xmm0, %xmm3 ; SSSE3-NEXT: pand %xmm2, %xmm3 -; SSSE3-NEXT: pshufb %xmm3, %xmm0 -; SSSE3-NEXT: paddb %xmm5, %xmm0 -; SSSE3-NEXT: movdqa %xmm0, %xmm2 -; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm1[2],xmm2[3],xmm1[3] -; SSSE3-NEXT: psadbw %xmm1, %xmm2 -; SSSE3-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1] -; SSSE3-NEXT: psadbw %xmm1, %xmm0 -; SSSE3-NEXT: packuswb %xmm2, %xmm0 +; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; SSSE3-NEXT: movdqa %xmm1, %xmm4 +; SSSE3-NEXT: pshufb %xmm3, %xmm4 +; SSSE3-NEXT: psrlw $4, %xmm0 +; SSSE3-NEXT: pand %xmm2, %xmm0 +; SSSE3-NEXT: pshufb %xmm0, %xmm1 +; SSSE3-NEXT: paddb %xmm4, %xmm1 +; SSSE3-NEXT: pxor %xmm0, %xmm0 +; SSSE3-NEXT: movdqa %xmm1, %xmm2 +; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3] +; SSSE3-NEXT: psadbw %xmm0, %xmm2 +; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1] +; SSSE3-NEXT: psadbw %xmm0, %xmm1 +; SSSE3-NEXT: packuswb %xmm2, %xmm1 +; SSSE3-NEXT: movdqa %xmm1, %xmm0 ; SSSE3-NEXT: retq ; ; SSE41-LABEL: testv4i32: ; SSE41: # %bb.0: -; SSE41-NEXT: pxor %xmm1, %xmm1 -; SSE41-NEXT: pxor %xmm2, %xmm2 -; SSE41-NEXT: psubd %xmm0, %xmm2 -; SSE41-NEXT: pand %xmm0, %xmm2 -; SSE41-NEXT: pcmpeqd %xmm0, %xmm0 -; SSE41-NEXT: paddd %xmm2, %xmm0 -; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; SSE41-NEXT: movdqa %xmm0, %xmm3 -; SSE41-NEXT: pand %xmm2, %xmm3 -; SSE41-NEXT: movdqa {{.*#+}} xmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; SSE41-NEXT: movdqa %xmm4, %xmm5 -; SSE41-NEXT: pshufb %xmm3, %xmm5 +; SSE41-NEXT: pcmpeqd %xmm1, %xmm1 +; SSE41-NEXT: paddd %xmm0, %xmm1 +; SSE41-NEXT: pandn %xmm1, %xmm0 +; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; SSE41-NEXT: movdqa %xmm0, %xmm2 +; SSE41-NEXT: pand %xmm1, %xmm2 +; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; SSE41-NEXT: movdqa %xmm3, %xmm4 +; SSE41-NEXT: pshufb %xmm2, %xmm4 ; SSE41-NEXT: psrlw $4, %xmm0 -; SSE41-NEXT: pand %xmm2, %xmm0 -; SSE41-NEXT: pshufb %xmm0, %xmm4 -; SSE41-NEXT: paddb %xmm5, %xmm4 -; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm4[0],zero,xmm4[1],zero -; SSE41-NEXT: punpckhdq {{.*#+}} xmm4 = xmm4[2],xmm1[2],xmm4[3],xmm1[3] -; SSE41-NEXT: psadbw %xmm1, %xmm4 +; SSE41-NEXT: pand %xmm1, %xmm0 +; SSE41-NEXT: pshufb %xmm0, %xmm3 +; SSE41-NEXT: paddb %xmm4, %xmm3 +; SSE41-NEXT: pxor %xmm1, %xmm1 +; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero +; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3] +; SSE41-NEXT: psadbw %xmm1, %xmm3 ; SSE41-NEXT: psadbw %xmm1, %xmm0 -; SSE41-NEXT: packuswb %xmm4, %xmm0 +; SSE41-NEXT: packuswb %xmm3, %xmm0 ; SSE41-NEXT: retq ; ; AVX1-LABEL: testv4i32: ; AVX1: # %bb.0: -; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX1-NEXT: vpsubd %xmm0, %xmm1, %xmm2 -; AVX1-NEXT: vpand %xmm2, %xmm0, %xmm0 -; AVX1-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 -; AVX1-NEXT: vpaddd %xmm2, %xmm0, %xmm0 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX1-NEXT: vpand %xmm2, %xmm0, %xmm3 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX1-NEXT: vpshufb %xmm3, %xmm4, %xmm3 +; AVX1-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 +; AVX1-NEXT: vpaddd %xmm1, %xmm0, %xmm1 +; AVX1-NEXT: vpandn %xmm1, %xmm0, %xmm0 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2 ; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0 -; AVX1-NEXT: vpand %xmm2, %xmm0, %xmm0 -; AVX1-NEXT: vpshufb %xmm0, %xmm4, %xmm0 -; AVX1-NEXT: vpaddb %xmm3, %xmm0, %xmm0 +; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0 +; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0 +; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0 +; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3] ; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2 ; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero @@ -548,19 +513,18 @@ ; ; AVX2-LABEL: testv4i32: ; AVX2: # %bb.0: -; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX2-NEXT: vpsubd %xmm0, %xmm1, %xmm2 -; AVX2-NEXT: vpand %xmm2, %xmm0, %xmm0 -; AVX2-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 -; AVX2-NEXT: vpaddd %xmm2, %xmm0, %xmm0 -; AVX2-NEXT: vmovdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX2-NEXT: vpand %xmm2, %xmm0, %xmm3 -; AVX2-NEXT: vmovdqa {{.*#+}} xmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX2-NEXT: vpshufb %xmm3, %xmm4, %xmm3 +; AVX2-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 +; AVX2-NEXT: vpaddd %xmm1, %xmm0, %xmm1 +; AVX2-NEXT: vpandn %xmm1, %xmm0, %xmm0 +; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2 +; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2 ; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0 -; AVX2-NEXT: vpand %xmm2, %xmm0, %xmm0 -; AVX2-NEXT: vpshufb %xmm0, %xmm4, %xmm0 -; AVX2-NEXT: vpaddb %xmm3, %xmm0, %xmm0 +; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0 +; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0 +; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0 +; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3] ; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2 ; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero @@ -570,19 +534,18 @@ ; ; AVX512CDVL-LABEL: testv4i32: ; AVX512CDVL: # %bb.0: -; AVX512CDVL-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512CDVL-NEXT: vpsubd %xmm0, %xmm1, %xmm2 -; AVX512CDVL-NEXT: vpand %xmm2, %xmm0, %xmm0 -; AVX512CDVL-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 -; AVX512CDVL-NEXT: vpaddd %xmm2, %xmm0, %xmm0 -; AVX512CDVL-NEXT: vmovdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX512CDVL-NEXT: vpand %xmm2, %xmm0, %xmm3 -; AVX512CDVL-NEXT: vmovdqa {{.*#+}} xmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX512CDVL-NEXT: vpshufb %xmm3, %xmm4, %xmm3 +; AVX512CDVL-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 +; AVX512CDVL-NEXT: vpaddd %xmm1, %xmm0, %xmm1 +; AVX512CDVL-NEXT: vpandn %xmm1, %xmm0, %xmm0 +; AVX512CDVL-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX512CDVL-NEXT: vpand %xmm1, %xmm0, %xmm2 +; AVX512CDVL-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX512CDVL-NEXT: vpshufb %xmm2, %xmm3, %xmm2 ; AVX512CDVL-NEXT: vpsrlw $4, %xmm0, %xmm0 -; AVX512CDVL-NEXT: vpand %xmm2, %xmm0, %xmm0 -; AVX512CDVL-NEXT: vpshufb %xmm0, %xmm4, %xmm0 -; AVX512CDVL-NEXT: vpaddb %xmm3, %xmm0, %xmm0 +; AVX512CDVL-NEXT: vpand %xmm1, %xmm0, %xmm0 +; AVX512CDVL-NEXT: vpshufb %xmm0, %xmm3, %xmm0 +; AVX512CDVL-NEXT: vpaddb %xmm2, %xmm0, %xmm0 +; AVX512CDVL-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512CDVL-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3] ; AVX512CDVL-NEXT: vpsadbw %xmm1, %xmm2, %xmm2 ; AVX512CDVL-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1] @@ -592,19 +555,18 @@ ; ; AVX512CD-LABEL: testv4i32: ; AVX512CD: # %bb.0: -; AVX512CD-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512CD-NEXT: vpsubd %xmm0, %xmm1, %xmm2 -; AVX512CD-NEXT: vpand %xmm2, %xmm0, %xmm0 -; AVX512CD-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 -; AVX512CD-NEXT: vpaddd %xmm2, %xmm0, %xmm0 -; AVX512CD-NEXT: vmovdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX512CD-NEXT: vpand %xmm2, %xmm0, %xmm3 -; AVX512CD-NEXT: vmovdqa {{.*#+}} xmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX512CD-NEXT: vpshufb %xmm3, %xmm4, %xmm3 +; AVX512CD-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 +; AVX512CD-NEXT: vpaddd %xmm1, %xmm0, %xmm1 +; AVX512CD-NEXT: vpandn %xmm1, %xmm0, %xmm0 +; AVX512CD-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX512CD-NEXT: vpand %xmm1, %xmm0, %xmm2 +; AVX512CD-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX512CD-NEXT: vpshufb %xmm2, %xmm3, %xmm2 ; AVX512CD-NEXT: vpsrlw $4, %xmm0, %xmm0 -; AVX512CD-NEXT: vpand %xmm2, %xmm0, %xmm0 -; AVX512CD-NEXT: vpshufb %xmm0, %xmm4, %xmm0 -; AVX512CD-NEXT: vpaddb %xmm3, %xmm0, %xmm0 +; AVX512CD-NEXT: vpand %xmm1, %xmm0, %xmm0 +; AVX512CD-NEXT: vpshufb %xmm0, %xmm3, %xmm0 +; AVX512CD-NEXT: vpaddb %xmm2, %xmm0, %xmm0 +; AVX512CD-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512CD-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3] ; AVX512CD-NEXT: vpsadbw %xmm1, %xmm2, %xmm2 ; AVX512CD-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero @@ -614,11 +576,9 @@ ; ; AVX512VPOPCNTDQ-LABEL: testv4i32: ; AVX512VPOPCNTDQ: # %bb.0: -; AVX512VPOPCNTDQ-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpsubd %xmm0, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQ-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpaddd %xmm1, %xmm0, %xmm0 +; AVX512VPOPCNTDQ-NEXT: vpaddd %xmm1, %xmm0, %xmm1 +; AVX512VPOPCNTDQ-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 killed $zmm0 ; AVX512VPOPCNTDQ-NEXT: vzeroupper @@ -626,22 +586,19 @@ ; ; AVX512VPOPCNTDQVL-LABEL: testv4i32: ; AVX512VPOPCNTDQVL: # %bb.0: -; AVX512VPOPCNTDQVL-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpsubd %xmm0, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpaddd %xmm1, %xmm0, %xmm0 +; AVX512VPOPCNTDQVL-NEXT: vpaddd %xmm1, %xmm0, %xmm1 +; AVX512VPOPCNTDQVL-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: testv4i32: ; BITALG_NOVLX: # %bb.0: -; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG_NOVLX-NEXT: vpsubd %xmm0, %xmm1, %xmm2 -; BITALG_NOVLX-NEXT: vpand %xmm2, %xmm0, %xmm0 -; BITALG_NOVLX-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 -; BITALG_NOVLX-NEXT: vpaddd %xmm2, %xmm0, %xmm0 +; BITALG_NOVLX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 +; BITALG_NOVLX-NEXT: vpaddd %xmm1, %xmm0, %xmm1 +; BITALG_NOVLX-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0 +; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3] ; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2 ; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero @@ -652,12 +609,11 @@ ; ; BITALG-LABEL: testv4i32: ; BITALG: # %bb.0: -; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpsubd %xmm0, %xmm1, %xmm2 -; BITALG-NEXT: vpand %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 -; BITALG-NEXT: vpaddd %xmm2, %xmm0, %xmm0 +; BITALG-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 +; BITALG-NEXT: vpaddd %xmm1, %xmm0, %xmm1 +; BITALG-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 +; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3] ; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2 ; BITALG-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1] @@ -667,27 +623,25 @@ ; ; X32-SSE-LABEL: testv4i32: ; X32-SSE: # %bb.0: -; X32-SSE-NEXT: pxor %xmm1, %xmm1 -; X32-SSE-NEXT: pxor %xmm2, %xmm2 -; X32-SSE-NEXT: psubd %xmm0, %xmm2 -; X32-SSE-NEXT: pand %xmm0, %xmm2 -; X32-SSE-NEXT: pcmpeqd %xmm0, %xmm0 -; X32-SSE-NEXT: paddd %xmm2, %xmm0 -; X32-SSE-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; X32-SSE-NEXT: movdqa %xmm0, %xmm3 -; X32-SSE-NEXT: pand %xmm2, %xmm3 -; X32-SSE-NEXT: movdqa {{.*#+}} xmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; X32-SSE-NEXT: movdqa %xmm4, %xmm5 -; X32-SSE-NEXT: pshufb %xmm3, %xmm5 +; X32-SSE-NEXT: pcmpeqd %xmm1, %xmm1 +; X32-SSE-NEXT: paddd %xmm0, %xmm1 +; X32-SSE-NEXT: pandn %xmm1, %xmm0 +; X32-SSE-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; X32-SSE-NEXT: movdqa %xmm0, %xmm2 +; X32-SSE-NEXT: pand %xmm1, %xmm2 +; X32-SSE-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; X32-SSE-NEXT: movdqa %xmm3, %xmm4 +; X32-SSE-NEXT: pshufb %xmm2, %xmm4 ; X32-SSE-NEXT: psrlw $4, %xmm0 -; X32-SSE-NEXT: pand %xmm2, %xmm0 -; X32-SSE-NEXT: pshufb %xmm0, %xmm4 -; X32-SSE-NEXT: paddb %xmm5, %xmm4 -; X32-SSE-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm4[0],zero,xmm4[1],zero -; X32-SSE-NEXT: punpckhdq {{.*#+}} xmm4 = xmm4[2],xmm1[2],xmm4[3],xmm1[3] -; X32-SSE-NEXT: psadbw %xmm1, %xmm4 +; X32-SSE-NEXT: pand %xmm1, %xmm0 +; X32-SSE-NEXT: pshufb %xmm0, %xmm3 +; X32-SSE-NEXT: paddb %xmm4, %xmm3 +; X32-SSE-NEXT: pxor %xmm1, %xmm1 +; X32-SSE-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero +; X32-SSE-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3] +; X32-SSE-NEXT: psadbw %xmm1, %xmm3 ; X32-SSE-NEXT: psadbw %xmm1, %xmm0 -; X32-SSE-NEXT: packuswb %xmm4, %xmm0 +; X32-SSE-NEXT: packuswb %xmm3, %xmm0 ; X32-SSE-NEXT: retl %out = call <4 x i32> @llvm.cttz.v4i32(<4 x i32> %in, i1 0) ret <4 x i32> %out @@ -696,130 +650,124 @@ define <4 x i32> @testv4i32u(<4 x i32> %in) nounwind { ; SSE2-LABEL: testv4i32u: ; SSE2: # %bb.0: -; SSE2-NEXT: pxor %xmm1, %xmm1 -; SSE2-NEXT: pxor %xmm2, %xmm2 -; SSE2-NEXT: psubd %xmm0, %xmm2 -; SSE2-NEXT: pand %xmm0, %xmm2 -; SSE2-NEXT: pcmpeqd %xmm3, %xmm3 -; SSE2-NEXT: paddd %xmm2, %xmm3 -; SSE2-NEXT: movdqa %xmm3, %xmm0 -; SSE2-NEXT: psrlw $1, %xmm0 -; SSE2-NEXT: pand {{.*}}(%rip), %xmm0 -; SSE2-NEXT: psubb %xmm0, %xmm3 -; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] -; SSE2-NEXT: movdqa %xmm3, %xmm2 -; SSE2-NEXT: pand %xmm0, %xmm2 -; SSE2-NEXT: psrlw $2, %xmm3 -; SSE2-NEXT: pand %xmm0, %xmm3 -; SSE2-NEXT: paddb %xmm2, %xmm3 -; SSE2-NEXT: movdqa %xmm3, %xmm0 -; SSE2-NEXT: psrlw $4, %xmm0 -; SSE2-NEXT: paddb %xmm3, %xmm0 -; SSE2-NEXT: pand {{.*}}(%rip), %xmm0 +; SSE2-NEXT: pcmpeqd %xmm1, %xmm1 +; SSE2-NEXT: paddd %xmm0, %xmm1 +; SSE2-NEXT: pandn %xmm1, %xmm0 +; SSE2-NEXT: movdqa %xmm0, %xmm1 +; SSE2-NEXT: psrlw $1, %xmm1 +; SSE2-NEXT: pand {{.*}}(%rip), %xmm1 +; SSE2-NEXT: psubb %xmm1, %xmm0 +; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] ; SSE2-NEXT: movdqa %xmm0, %xmm2 -; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm1[2],xmm2[3],xmm1[3] -; SSE2-NEXT: psadbw %xmm1, %xmm2 -; SSE2-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1] -; SSE2-NEXT: psadbw %xmm1, %xmm0 -; SSE2-NEXT: packuswb %xmm2, %xmm0 +; SSE2-NEXT: pand %xmm1, %xmm2 +; SSE2-NEXT: psrlw $2, %xmm0 +; SSE2-NEXT: pand %xmm1, %xmm0 +; SSE2-NEXT: paddb %xmm2, %xmm0 +; SSE2-NEXT: movdqa %xmm0, %xmm1 +; SSE2-NEXT: psrlw $4, %xmm1 +; SSE2-NEXT: paddb %xmm0, %xmm1 +; SSE2-NEXT: pand {{.*}}(%rip), %xmm1 +; SSE2-NEXT: pxor %xmm0, %xmm0 +; SSE2-NEXT: movdqa %xmm1, %xmm2 +; SSE2-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3] +; SSE2-NEXT: psadbw %xmm0, %xmm2 +; SSE2-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1] +; SSE2-NEXT: psadbw %xmm0, %xmm1 +; SSE2-NEXT: packuswb %xmm2, %xmm1 +; SSE2-NEXT: movdqa %xmm1, %xmm0 ; SSE2-NEXT: retq ; ; SSE3-LABEL: testv4i32u: ; SSE3: # %bb.0: -; SSE3-NEXT: pxor %xmm1, %xmm1 -; SSE3-NEXT: pxor %xmm2, %xmm2 -; SSE3-NEXT: psubd %xmm0, %xmm2 -; SSE3-NEXT: pand %xmm0, %xmm2 -; SSE3-NEXT: pcmpeqd %xmm3, %xmm3 -; SSE3-NEXT: paddd %xmm2, %xmm3 -; SSE3-NEXT: movdqa %xmm3, %xmm0 -; SSE3-NEXT: psrlw $1, %xmm0 -; SSE3-NEXT: pand {{.*}}(%rip), %xmm0 -; SSE3-NEXT: psubb %xmm0, %xmm3 -; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] -; SSE3-NEXT: movdqa %xmm3, %xmm2 -; SSE3-NEXT: pand %xmm0, %xmm2 -; SSE3-NEXT: psrlw $2, %xmm3 -; SSE3-NEXT: pand %xmm0, %xmm3 -; SSE3-NEXT: paddb %xmm2, %xmm3 -; SSE3-NEXT: movdqa %xmm3, %xmm0 -; SSE3-NEXT: psrlw $4, %xmm0 -; SSE3-NEXT: paddb %xmm3, %xmm0 -; SSE3-NEXT: pand {{.*}}(%rip), %xmm0 +; SSE3-NEXT: pcmpeqd %xmm1, %xmm1 +; SSE3-NEXT: paddd %xmm0, %xmm1 +; SSE3-NEXT: pandn %xmm1, %xmm0 +; SSE3-NEXT: movdqa %xmm0, %xmm1 +; SSE3-NEXT: psrlw $1, %xmm1 +; SSE3-NEXT: pand {{.*}}(%rip), %xmm1 +; SSE3-NEXT: psubb %xmm1, %xmm0 +; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] ; SSE3-NEXT: movdqa %xmm0, %xmm2 -; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm1[2],xmm2[3],xmm1[3] -; SSE3-NEXT: psadbw %xmm1, %xmm2 -; SSE3-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1] -; SSE3-NEXT: psadbw %xmm1, %xmm0 -; SSE3-NEXT: packuswb %xmm2, %xmm0 +; SSE3-NEXT: pand %xmm1, %xmm2 +; SSE3-NEXT: psrlw $2, %xmm0 +; SSE3-NEXT: pand %xmm1, %xmm0 +; SSE3-NEXT: paddb %xmm2, %xmm0 +; SSE3-NEXT: movdqa %xmm0, %xmm1 +; SSE3-NEXT: psrlw $4, %xmm1 +; SSE3-NEXT: paddb %xmm0, %xmm1 +; SSE3-NEXT: pand {{.*}}(%rip), %xmm1 +; SSE3-NEXT: pxor %xmm0, %xmm0 +; SSE3-NEXT: movdqa %xmm1, %xmm2 +; SSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3] +; SSE3-NEXT: psadbw %xmm0, %xmm2 +; SSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1] +; SSE3-NEXT: psadbw %xmm0, %xmm1 +; SSE3-NEXT: packuswb %xmm2, %xmm1 +; SSE3-NEXT: movdqa %xmm1, %xmm0 ; SSE3-NEXT: retq ; ; SSSE3-LABEL: testv4i32u: ; SSSE3: # %bb.0: -; SSSE3-NEXT: pxor %xmm1, %xmm1 -; SSSE3-NEXT: pxor %xmm2, %xmm2 -; SSSE3-NEXT: psubd %xmm0, %xmm2 -; SSSE3-NEXT: pand %xmm0, %xmm2 -; SSSE3-NEXT: pcmpeqd %xmm3, %xmm3 -; SSSE3-NEXT: paddd %xmm2, %xmm3 +; SSSE3-NEXT: pcmpeqd %xmm1, %xmm1 +; SSSE3-NEXT: paddd %xmm0, %xmm1 +; SSSE3-NEXT: pandn %xmm1, %xmm0 ; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; SSSE3-NEXT: movdqa %xmm3, %xmm4 -; SSSE3-NEXT: pand %xmm2, %xmm4 -; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; SSSE3-NEXT: movdqa %xmm0, %xmm5 -; SSSE3-NEXT: pshufb %xmm4, %xmm5 -; SSSE3-NEXT: psrlw $4, %xmm3 +; SSSE3-NEXT: movdqa %xmm0, %xmm3 ; SSSE3-NEXT: pand %xmm2, %xmm3 -; SSSE3-NEXT: pshufb %xmm3, %xmm0 -; SSSE3-NEXT: paddb %xmm5, %xmm0 -; SSSE3-NEXT: movdqa %xmm0, %xmm2 -; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm1[2],xmm2[3],xmm1[3] -; SSSE3-NEXT: psadbw %xmm1, %xmm2 -; SSSE3-NEXT: punpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1] -; SSSE3-NEXT: psadbw %xmm1, %xmm0 -; SSSE3-NEXT: packuswb %xmm2, %xmm0 +; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; SSSE3-NEXT: movdqa %xmm1, %xmm4 +; SSSE3-NEXT: pshufb %xmm3, %xmm4 +; SSSE3-NEXT: psrlw $4, %xmm0 +; SSSE3-NEXT: pand %xmm2, %xmm0 +; SSSE3-NEXT: pshufb %xmm0, %xmm1 +; SSSE3-NEXT: paddb %xmm4, %xmm1 +; SSSE3-NEXT: pxor %xmm0, %xmm0 +; SSSE3-NEXT: movdqa %xmm1, %xmm2 +; SSSE3-NEXT: punpckhdq {{.*#+}} xmm2 = xmm2[2],xmm0[2],xmm2[3],xmm0[3] +; SSSE3-NEXT: psadbw %xmm0, %xmm2 +; SSSE3-NEXT: punpckldq {{.*#+}} xmm1 = xmm1[0],xmm0[0],xmm1[1],xmm0[1] +; SSSE3-NEXT: psadbw %xmm0, %xmm1 +; SSSE3-NEXT: packuswb %xmm2, %xmm1 +; SSSE3-NEXT: movdqa %xmm1, %xmm0 ; SSSE3-NEXT: retq ; ; SSE41-LABEL: testv4i32u: ; SSE41: # %bb.0: -; SSE41-NEXT: pxor %xmm1, %xmm1 -; SSE41-NEXT: pxor %xmm2, %xmm2 -; SSE41-NEXT: psubd %xmm0, %xmm2 -; SSE41-NEXT: pand %xmm0, %xmm2 -; SSE41-NEXT: pcmpeqd %xmm0, %xmm0 -; SSE41-NEXT: paddd %xmm2, %xmm0 -; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; SSE41-NEXT: movdqa %xmm0, %xmm3 -; SSE41-NEXT: pand %xmm2, %xmm3 -; SSE41-NEXT: movdqa {{.*#+}} xmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; SSE41-NEXT: movdqa %xmm4, %xmm5 -; SSE41-NEXT: pshufb %xmm3, %xmm5 +; SSE41-NEXT: pcmpeqd %xmm1, %xmm1 +; SSE41-NEXT: paddd %xmm0, %xmm1 +; SSE41-NEXT: pandn %xmm1, %xmm0 +; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; SSE41-NEXT: movdqa %xmm0, %xmm2 +; SSE41-NEXT: pand %xmm1, %xmm2 +; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; SSE41-NEXT: movdqa %xmm3, %xmm4 +; SSE41-NEXT: pshufb %xmm2, %xmm4 ; SSE41-NEXT: psrlw $4, %xmm0 -; SSE41-NEXT: pand %xmm2, %xmm0 -; SSE41-NEXT: pshufb %xmm0, %xmm4 -; SSE41-NEXT: paddb %xmm5, %xmm4 -; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm4[0],zero,xmm4[1],zero -; SSE41-NEXT: punpckhdq {{.*#+}} xmm4 = xmm4[2],xmm1[2],xmm4[3],xmm1[3] -; SSE41-NEXT: psadbw %xmm1, %xmm4 +; SSE41-NEXT: pand %xmm1, %xmm0 +; SSE41-NEXT: pshufb %xmm0, %xmm3 +; SSE41-NEXT: paddb %xmm4, %xmm3 +; SSE41-NEXT: pxor %xmm1, %xmm1 +; SSE41-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero +; SSE41-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3] +; SSE41-NEXT: psadbw %xmm1, %xmm3 ; SSE41-NEXT: psadbw %xmm1, %xmm0 -; SSE41-NEXT: packuswb %xmm4, %xmm0 +; SSE41-NEXT: packuswb %xmm3, %xmm0 ; SSE41-NEXT: retq ; ; AVX1-LABEL: testv4i32u: ; AVX1: # %bb.0: -; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX1-NEXT: vpsubd %xmm0, %xmm1, %xmm2 -; AVX1-NEXT: vpand %xmm2, %xmm0, %xmm0 -; AVX1-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 -; AVX1-NEXT: vpaddd %xmm2, %xmm0, %xmm0 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX1-NEXT: vpand %xmm2, %xmm0, %xmm3 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX1-NEXT: vpshufb %xmm3, %xmm4, %xmm3 +; AVX1-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 +; AVX1-NEXT: vpaddd %xmm1, %xmm0, %xmm1 +; AVX1-NEXT: vpandn %xmm1, %xmm0, %xmm0 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm2 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX1-NEXT: vpshufb %xmm2, %xmm3, %xmm2 ; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0 -; AVX1-NEXT: vpand %xmm2, %xmm0, %xmm0 -; AVX1-NEXT: vpshufb %xmm0, %xmm4, %xmm0 -; AVX1-NEXT: vpaddb %xmm3, %xmm0, %xmm0 +; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0 +; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0 +; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0 +; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3] ; AVX1-NEXT: vpsadbw %xmm1, %xmm2, %xmm2 ; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero @@ -829,19 +777,18 @@ ; ; AVX2-LABEL: testv4i32u: ; AVX2: # %bb.0: -; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX2-NEXT: vpsubd %xmm0, %xmm1, %xmm2 -; AVX2-NEXT: vpand %xmm2, %xmm0, %xmm0 -; AVX2-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 -; AVX2-NEXT: vpaddd %xmm2, %xmm0, %xmm0 -; AVX2-NEXT: vmovdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX2-NEXT: vpand %xmm2, %xmm0, %xmm3 -; AVX2-NEXT: vmovdqa {{.*#+}} xmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX2-NEXT: vpshufb %xmm3, %xmm4, %xmm3 +; AVX2-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 +; AVX2-NEXT: vpaddd %xmm1, %xmm0, %xmm1 +; AVX2-NEXT: vpandn %xmm1, %xmm0, %xmm0 +; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm2 +; AVX2-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX2-NEXT: vpshufb %xmm2, %xmm3, %xmm2 ; AVX2-NEXT: vpsrlw $4, %xmm0, %xmm0 -; AVX2-NEXT: vpand %xmm2, %xmm0, %xmm0 -; AVX2-NEXT: vpshufb %xmm0, %xmm4, %xmm0 -; AVX2-NEXT: vpaddb %xmm3, %xmm0, %xmm0 +; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0 +; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0 +; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0 +; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX2-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3] ; AVX2-NEXT: vpsadbw %xmm1, %xmm2, %xmm2 ; AVX2-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero @@ -872,11 +819,9 @@ ; ; AVX512VPOPCNTDQ-LABEL: testv4i32u: ; AVX512VPOPCNTDQ: # %bb.0: -; AVX512VPOPCNTDQ-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpsubd %xmm0, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQ-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpaddd %xmm1, %xmm0, %xmm0 +; AVX512VPOPCNTDQ-NEXT: vpaddd %xmm1, %xmm0, %xmm1 +; AVX512VPOPCNTDQ-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: # kill: def $xmm0 killed $xmm0 killed $zmm0 ; AVX512VPOPCNTDQ-NEXT: vzeroupper @@ -884,22 +829,19 @@ ; ; AVX512VPOPCNTDQVL-LABEL: testv4i32u: ; AVX512VPOPCNTDQVL: # %bb.0: -; AVX512VPOPCNTDQVL-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpsubd %xmm0, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpaddd %xmm1, %xmm0, %xmm0 +; AVX512VPOPCNTDQVL-NEXT: vpaddd %xmm1, %xmm0, %xmm1 +; AVX512VPOPCNTDQVL-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: testv4i32u: ; BITALG_NOVLX: # %bb.0: -; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG_NOVLX-NEXT: vpsubd %xmm0, %xmm1, %xmm2 -; BITALG_NOVLX-NEXT: vpand %xmm2, %xmm0, %xmm0 -; BITALG_NOVLX-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 -; BITALG_NOVLX-NEXT: vpaddd %xmm2, %xmm0, %xmm0 +; BITALG_NOVLX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 +; BITALG_NOVLX-NEXT: vpaddd %xmm1, %xmm0, %xmm1 +; BITALG_NOVLX-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0 +; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3] ; BITALG_NOVLX-NEXT: vpsadbw %xmm1, %xmm2, %xmm2 ; BITALG_NOVLX-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero @@ -910,12 +852,11 @@ ; ; BITALG-LABEL: testv4i32u: ; BITALG: # %bb.0: -; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpsubd %xmm0, %xmm1, %xmm2 -; BITALG-NEXT: vpand %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 -; BITALG-NEXT: vpaddd %xmm2, %xmm0, %xmm0 +; BITALG-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 +; BITALG-NEXT: vpaddd %xmm1, %xmm0, %xmm1 +; BITALG-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 +; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm1[2],xmm0[3],xmm1[3] ; BITALG-NEXT: vpsadbw %xmm1, %xmm2, %xmm2 ; BITALG-NEXT: vpunpckldq {{.*#+}} xmm0 = xmm0[0],xmm1[0],xmm0[1],xmm1[1] @@ -925,27 +866,25 @@ ; ; X32-SSE-LABEL: testv4i32u: ; X32-SSE: # %bb.0: -; X32-SSE-NEXT: pxor %xmm1, %xmm1 -; X32-SSE-NEXT: pxor %xmm2, %xmm2 -; X32-SSE-NEXT: psubd %xmm0, %xmm2 -; X32-SSE-NEXT: pand %xmm0, %xmm2 -; X32-SSE-NEXT: pcmpeqd %xmm0, %xmm0 -; X32-SSE-NEXT: paddd %xmm2, %xmm0 -; X32-SSE-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; X32-SSE-NEXT: movdqa %xmm0, %xmm3 -; X32-SSE-NEXT: pand %xmm2, %xmm3 -; X32-SSE-NEXT: movdqa {{.*#+}} xmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; X32-SSE-NEXT: movdqa %xmm4, %xmm5 -; X32-SSE-NEXT: pshufb %xmm3, %xmm5 +; X32-SSE-NEXT: pcmpeqd %xmm1, %xmm1 +; X32-SSE-NEXT: paddd %xmm0, %xmm1 +; X32-SSE-NEXT: pandn %xmm1, %xmm0 +; X32-SSE-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; X32-SSE-NEXT: movdqa %xmm0, %xmm2 +; X32-SSE-NEXT: pand %xmm1, %xmm2 +; X32-SSE-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; X32-SSE-NEXT: movdqa %xmm3, %xmm4 +; X32-SSE-NEXT: pshufb %xmm2, %xmm4 ; X32-SSE-NEXT: psrlw $4, %xmm0 -; X32-SSE-NEXT: pand %xmm2, %xmm0 -; X32-SSE-NEXT: pshufb %xmm0, %xmm4 -; X32-SSE-NEXT: paddb %xmm5, %xmm4 -; X32-SSE-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm4[0],zero,xmm4[1],zero -; X32-SSE-NEXT: punpckhdq {{.*#+}} xmm4 = xmm4[2],xmm1[2],xmm4[3],xmm1[3] -; X32-SSE-NEXT: psadbw %xmm1, %xmm4 +; X32-SSE-NEXT: pand %xmm1, %xmm0 +; X32-SSE-NEXT: pshufb %xmm0, %xmm3 +; X32-SSE-NEXT: paddb %xmm4, %xmm3 +; X32-SSE-NEXT: pxor %xmm1, %xmm1 +; X32-SSE-NEXT: pmovzxdq {{.*#+}} xmm0 = xmm3[0],zero,xmm3[1],zero +; X32-SSE-NEXT: punpckhdq {{.*#+}} xmm3 = xmm3[2],xmm1[2],xmm3[3],xmm1[3] +; X32-SSE-NEXT: psadbw %xmm1, %xmm3 ; X32-SSE-NEXT: psadbw %xmm1, %xmm0 -; X32-SSE-NEXT: packuswb %xmm4, %xmm0 +; X32-SSE-NEXT: packuswb %xmm3, %xmm0 ; X32-SSE-NEXT: retl %out = call <4 x i32> @llvm.cttz.v4i32(<4 x i32> %in, i1 -1) ret <4 x i32> %out @@ -954,11 +893,9 @@ define <8 x i16> @testv8i16(<8 x i16> %in) nounwind { ; SSE2-LABEL: testv8i16: ; SSE2: # %bb.0: -; SSE2-NEXT: pxor %xmm1, %xmm1 -; SSE2-NEXT: psubw %xmm0, %xmm1 -; SSE2-NEXT: pand %xmm0, %xmm1 -; SSE2-NEXT: pcmpeqd %xmm0, %xmm0 -; SSE2-NEXT: paddw %xmm1, %xmm0 +; SSE2-NEXT: pcmpeqd %xmm1, %xmm1 +; SSE2-NEXT: paddw %xmm0, %xmm1 +; SSE2-NEXT: pandn %xmm1, %xmm0 ; SSE2-NEXT: movdqa %xmm0, %xmm1 ; SSE2-NEXT: psrlw $1, %xmm1 ; SSE2-NEXT: pand {{.*}}(%rip), %xmm1 @@ -981,11 +918,9 @@ ; ; SSE3-LABEL: testv8i16: ; SSE3: # %bb.0: -; SSE3-NEXT: pxor %xmm1, %xmm1 -; SSE3-NEXT: psubw %xmm0, %xmm1 -; SSE3-NEXT: pand %xmm0, %xmm1 -; SSE3-NEXT: pcmpeqd %xmm0, %xmm0 -; SSE3-NEXT: paddw %xmm1, %xmm0 +; SSE3-NEXT: pcmpeqd %xmm1, %xmm1 +; SSE3-NEXT: paddw %xmm0, %xmm1 +; SSE3-NEXT: pandn %xmm1, %xmm0 ; SSE3-NEXT: movdqa %xmm0, %xmm1 ; SSE3-NEXT: psrlw $1, %xmm1 ; SSE3-NEXT: pand {{.*}}(%rip), %xmm1 @@ -1008,11 +943,9 @@ ; ; SSSE3-LABEL: testv8i16: ; SSSE3: # %bb.0: -; SSSE3-NEXT: pxor %xmm1, %xmm1 -; SSSE3-NEXT: psubw %xmm0, %xmm1 -; SSSE3-NEXT: pand %xmm0, %xmm1 -; SSSE3-NEXT: pcmpeqd %xmm0, %xmm0 -; SSSE3-NEXT: paddw %xmm1, %xmm0 +; SSSE3-NEXT: pcmpeqd %xmm1, %xmm1 +; SSSE3-NEXT: paddw %xmm0, %xmm1 +; SSSE3-NEXT: pandn %xmm1, %xmm0 ; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; SSSE3-NEXT: movdqa %xmm0, %xmm2 ; SSSE3-NEXT: pand %xmm1, %xmm2 @@ -1031,11 +964,9 @@ ; ; SSE41-LABEL: testv8i16: ; SSE41: # %bb.0: -; SSE41-NEXT: pxor %xmm1, %xmm1 -; SSE41-NEXT: psubw %xmm0, %xmm1 -; SSE41-NEXT: pand %xmm0, %xmm1 -; SSE41-NEXT: pcmpeqd %xmm0, %xmm0 -; SSE41-NEXT: paddw %xmm1, %xmm0 +; SSE41-NEXT: pcmpeqd %xmm1, %xmm1 +; SSE41-NEXT: paddw %xmm0, %xmm1 +; SSE41-NEXT: pandn %xmm1, %xmm0 ; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; SSE41-NEXT: movdqa %xmm0, %xmm2 ; SSE41-NEXT: pand %xmm1, %xmm2 @@ -1054,11 +985,9 @@ ; ; AVX-LABEL: testv8i16: ; AVX: # %bb.0: -; AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX-NEXT: vpsubw %xmm0, %xmm1, %xmm1 -; AVX-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; AVX-NEXT: vpaddw %xmm1, %xmm0, %xmm0 +; AVX-NEXT: vpaddw %xmm1, %xmm0, %xmm1 +; AVX-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; AVX-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX-NEXT: vpand %xmm1, %xmm0, %xmm2 ; AVX-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -1074,11 +1003,9 @@ ; ; AVX512VPOPCNTDQ-LABEL: testv8i16: ; AVX512VPOPCNTDQ: # %bb.0: -; AVX512VPOPCNTDQ-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpsubw %xmm0, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQ-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpaddw %xmm1, %xmm0, %xmm0 +; AVX512VPOPCNTDQ-NEXT: vpaddw %xmm1, %xmm0, %xmm1 +; AVX512VPOPCNTDQ-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0 @@ -1088,11 +1015,9 @@ ; ; AVX512VPOPCNTDQVL-LABEL: testv8i16: ; AVX512VPOPCNTDQVL: # %bb.0: -; AVX512VPOPCNTDQVL-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpsubw %xmm0, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpaddw %xmm1, %xmm0, %xmm0 +; AVX512VPOPCNTDQVL-NEXT: vpaddw %xmm1, %xmm0, %xmm1 +; AVX512VPOPCNTDQVL-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0 @@ -1101,11 +1026,9 @@ ; ; BITALG_NOVLX-LABEL: testv8i16: ; BITALG_NOVLX: # %bb.0: -; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG_NOVLX-NEXT: vpsubw %xmm0, %xmm1, %xmm1 -; BITALG_NOVLX-NEXT: vpand %xmm1, %xmm0, %xmm0 ; BITALG_NOVLX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; BITALG_NOVLX-NEXT: vpaddw %xmm1, %xmm0, %xmm0 +; BITALG_NOVLX-NEXT: vpaddw %xmm1, %xmm0, %xmm1 +; BITALG_NOVLX-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0 ; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 killed $zmm0 ; BITALG_NOVLX-NEXT: vzeroupper @@ -1113,21 +1036,17 @@ ; ; BITALG-LABEL: testv8i16: ; BITALG: # %bb.0: -; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpsubw %xmm0, %xmm1, %xmm1 -; BITALG-NEXT: vpand %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpaddw %xmm1, %xmm0, %xmm0 +; BITALG-NEXT: vpaddw %xmm1, %xmm0, %xmm1 +; BITALG-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 ; BITALG-NEXT: retq ; ; X32-SSE-LABEL: testv8i16: ; X32-SSE: # %bb.0: -; X32-SSE-NEXT: pxor %xmm1, %xmm1 -; X32-SSE-NEXT: psubw %xmm0, %xmm1 -; X32-SSE-NEXT: pand %xmm0, %xmm1 -; X32-SSE-NEXT: pcmpeqd %xmm0, %xmm0 -; X32-SSE-NEXT: paddw %xmm1, %xmm0 +; X32-SSE-NEXT: pcmpeqd %xmm1, %xmm1 +; X32-SSE-NEXT: paddw %xmm0, %xmm1 +; X32-SSE-NEXT: pandn %xmm1, %xmm0 ; X32-SSE-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; X32-SSE-NEXT: movdqa %xmm0, %xmm2 ; X32-SSE-NEXT: pand %xmm1, %xmm2 @@ -1150,11 +1069,9 @@ define <8 x i16> @testv8i16u(<8 x i16> %in) nounwind { ; SSE2-LABEL: testv8i16u: ; SSE2: # %bb.0: -; SSE2-NEXT: pxor %xmm1, %xmm1 -; SSE2-NEXT: psubw %xmm0, %xmm1 -; SSE2-NEXT: pand %xmm0, %xmm1 -; SSE2-NEXT: pcmpeqd %xmm0, %xmm0 -; SSE2-NEXT: paddw %xmm1, %xmm0 +; SSE2-NEXT: pcmpeqd %xmm1, %xmm1 +; SSE2-NEXT: paddw %xmm0, %xmm1 +; SSE2-NEXT: pandn %xmm1, %xmm0 ; SSE2-NEXT: movdqa %xmm0, %xmm1 ; SSE2-NEXT: psrlw $1, %xmm1 ; SSE2-NEXT: pand {{.*}}(%rip), %xmm1 @@ -1177,11 +1094,9 @@ ; ; SSE3-LABEL: testv8i16u: ; SSE3: # %bb.0: -; SSE3-NEXT: pxor %xmm1, %xmm1 -; SSE3-NEXT: psubw %xmm0, %xmm1 -; SSE3-NEXT: pand %xmm0, %xmm1 -; SSE3-NEXT: pcmpeqd %xmm0, %xmm0 -; SSE3-NEXT: paddw %xmm1, %xmm0 +; SSE3-NEXT: pcmpeqd %xmm1, %xmm1 +; SSE3-NEXT: paddw %xmm0, %xmm1 +; SSE3-NEXT: pandn %xmm1, %xmm0 ; SSE3-NEXT: movdqa %xmm0, %xmm1 ; SSE3-NEXT: psrlw $1, %xmm1 ; SSE3-NEXT: pand {{.*}}(%rip), %xmm1 @@ -1204,11 +1119,9 @@ ; ; SSSE3-LABEL: testv8i16u: ; SSSE3: # %bb.0: -; SSSE3-NEXT: pxor %xmm1, %xmm1 -; SSSE3-NEXT: psubw %xmm0, %xmm1 -; SSSE3-NEXT: pand %xmm0, %xmm1 -; SSSE3-NEXT: pcmpeqd %xmm0, %xmm0 -; SSSE3-NEXT: paddw %xmm1, %xmm0 +; SSSE3-NEXT: pcmpeqd %xmm1, %xmm1 +; SSSE3-NEXT: paddw %xmm0, %xmm1 +; SSSE3-NEXT: pandn %xmm1, %xmm0 ; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; SSSE3-NEXT: movdqa %xmm0, %xmm2 ; SSSE3-NEXT: pand %xmm1, %xmm2 @@ -1227,11 +1140,9 @@ ; ; SSE41-LABEL: testv8i16u: ; SSE41: # %bb.0: -; SSE41-NEXT: pxor %xmm1, %xmm1 -; SSE41-NEXT: psubw %xmm0, %xmm1 -; SSE41-NEXT: pand %xmm0, %xmm1 -; SSE41-NEXT: pcmpeqd %xmm0, %xmm0 -; SSE41-NEXT: paddw %xmm1, %xmm0 +; SSE41-NEXT: pcmpeqd %xmm1, %xmm1 +; SSE41-NEXT: paddw %xmm0, %xmm1 +; SSE41-NEXT: pandn %xmm1, %xmm0 ; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; SSE41-NEXT: movdqa %xmm0, %xmm2 ; SSE41-NEXT: pand %xmm1, %xmm2 @@ -1250,11 +1161,9 @@ ; ; AVX-LABEL: testv8i16u: ; AVX: # %bb.0: -; AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX-NEXT: vpsubw %xmm0, %xmm1, %xmm1 -; AVX-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; AVX-NEXT: vpaddw %xmm1, %xmm0, %xmm0 +; AVX-NEXT: vpaddw %xmm1, %xmm0, %xmm1 +; AVX-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; AVX-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX-NEXT: vpand %xmm1, %xmm0, %xmm2 ; AVX-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -1270,11 +1179,9 @@ ; ; AVX512VPOPCNTDQ-LABEL: testv8i16u: ; AVX512VPOPCNTDQ: # %bb.0: -; AVX512VPOPCNTDQ-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpsubw %xmm0, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQ-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpaddw %xmm1, %xmm0, %xmm0 +; AVX512VPOPCNTDQ-NEXT: vpaddw %xmm1, %xmm0, %xmm1 +; AVX512VPOPCNTDQ-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0 @@ -1284,11 +1191,9 @@ ; ; AVX512VPOPCNTDQVL-LABEL: testv8i16u: ; AVX512VPOPCNTDQVL: # %bb.0: -; AVX512VPOPCNTDQVL-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpsubw %xmm0, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpaddw %xmm1, %xmm0, %xmm0 +; AVX512VPOPCNTDQVL-NEXT: vpaddw %xmm1, %xmm0, %xmm1 +; AVX512VPOPCNTDQVL-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} ymm0 = xmm0[0],zero,xmm0[1],zero,xmm0[2],zero,xmm0[3],zero,xmm0[4],zero,xmm0[5],zero,xmm0[6],zero,xmm0[7],zero ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpmovdw %ymm0, %xmm0 @@ -1297,11 +1202,9 @@ ; ; BITALG_NOVLX-LABEL: testv8i16u: ; BITALG_NOVLX: # %bb.0: -; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG_NOVLX-NEXT: vpsubw %xmm0, %xmm1, %xmm1 -; BITALG_NOVLX-NEXT: vpand %xmm1, %xmm0, %xmm0 ; BITALG_NOVLX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; BITALG_NOVLX-NEXT: vpaddw %xmm1, %xmm0, %xmm0 +; BITALG_NOVLX-NEXT: vpaddw %xmm1, %xmm0, %xmm1 +; BITALG_NOVLX-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0 ; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 killed $zmm0 ; BITALG_NOVLX-NEXT: vzeroupper @@ -1309,21 +1212,17 @@ ; ; BITALG-LABEL: testv8i16u: ; BITALG: # %bb.0: -; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpsubw %xmm0, %xmm1, %xmm1 -; BITALG-NEXT: vpand %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpaddw %xmm1, %xmm0, %xmm0 +; BITALG-NEXT: vpaddw %xmm1, %xmm0, %xmm1 +; BITALG-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 ; BITALG-NEXT: retq ; ; X32-SSE-LABEL: testv8i16u: ; X32-SSE: # %bb.0: -; X32-SSE-NEXT: pxor %xmm1, %xmm1 -; X32-SSE-NEXT: psubw %xmm0, %xmm1 -; X32-SSE-NEXT: pand %xmm0, %xmm1 -; X32-SSE-NEXT: pcmpeqd %xmm0, %xmm0 -; X32-SSE-NEXT: paddw %xmm1, %xmm0 +; X32-SSE-NEXT: pcmpeqd %xmm1, %xmm1 +; X32-SSE-NEXT: paddw %xmm0, %xmm1 +; X32-SSE-NEXT: pandn %xmm1, %xmm0 ; X32-SSE-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; X32-SSE-NEXT: movdqa %xmm0, %xmm2 ; X32-SSE-NEXT: pand %xmm1, %xmm2 @@ -1346,95 +1245,89 @@ define <16 x i8> @testv16i8(<16 x i8> %in) nounwind { ; SSE2-LABEL: testv16i8: ; SSE2: # %bb.0: -; SSE2-NEXT: pxor %xmm1, %xmm1 -; SSE2-NEXT: psubb %xmm0, %xmm1 -; SSE2-NEXT: pand %xmm0, %xmm1 -; SSE2-NEXT: pcmpeqd %xmm2, %xmm2 -; SSE2-NEXT: paddb %xmm1, %xmm2 -; SSE2-NEXT: movdqa %xmm2, %xmm0 -; SSE2-NEXT: psrlw $1, %xmm0 -; SSE2-NEXT: pand {{.*}}(%rip), %xmm0 -; SSE2-NEXT: psubb %xmm0, %xmm2 -; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] -; SSE2-NEXT: movdqa %xmm2, %xmm1 -; SSE2-NEXT: pand %xmm0, %xmm1 -; SSE2-NEXT: psrlw $2, %xmm2 -; SSE2-NEXT: pand %xmm0, %xmm2 -; SSE2-NEXT: paddb %xmm1, %xmm2 -; SSE2-NEXT: movdqa %xmm2, %xmm0 -; SSE2-NEXT: psrlw $4, %xmm0 +; SSE2-NEXT: pcmpeqd %xmm1, %xmm1 +; SSE2-NEXT: paddb %xmm0, %xmm1 +; SSE2-NEXT: pandn %xmm1, %xmm0 +; SSE2-NEXT: movdqa %xmm0, %xmm1 +; SSE2-NEXT: psrlw $1, %xmm1 +; SSE2-NEXT: pand {{.*}}(%rip), %xmm1 +; SSE2-NEXT: psubb %xmm1, %xmm0 +; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] +; SSE2-NEXT: movdqa %xmm0, %xmm2 +; SSE2-NEXT: pand %xmm1, %xmm2 +; SSE2-NEXT: psrlw $2, %xmm0 +; SSE2-NEXT: pand %xmm1, %xmm0 ; SSE2-NEXT: paddb %xmm2, %xmm0 -; SSE2-NEXT: pand {{.*}}(%rip), %xmm0 +; SSE2-NEXT: movdqa %xmm0, %xmm1 +; SSE2-NEXT: psrlw $4, %xmm1 +; SSE2-NEXT: paddb %xmm0, %xmm1 +; SSE2-NEXT: pand {{.*}}(%rip), %xmm1 +; SSE2-NEXT: movdqa %xmm1, %xmm0 ; SSE2-NEXT: retq ; ; SSE3-LABEL: testv16i8: ; SSE3: # %bb.0: -; SSE3-NEXT: pxor %xmm1, %xmm1 -; SSE3-NEXT: psubb %xmm0, %xmm1 -; SSE3-NEXT: pand %xmm0, %xmm1 -; SSE3-NEXT: pcmpeqd %xmm2, %xmm2 -; SSE3-NEXT: paddb %xmm1, %xmm2 -; SSE3-NEXT: movdqa %xmm2, %xmm0 -; SSE3-NEXT: psrlw $1, %xmm0 -; SSE3-NEXT: pand {{.*}}(%rip), %xmm0 -; SSE3-NEXT: psubb %xmm0, %xmm2 -; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] -; SSE3-NEXT: movdqa %xmm2, %xmm1 -; SSE3-NEXT: pand %xmm0, %xmm1 -; SSE3-NEXT: psrlw $2, %xmm2 -; SSE3-NEXT: pand %xmm0, %xmm2 -; SSE3-NEXT: paddb %xmm1, %xmm2 -; SSE3-NEXT: movdqa %xmm2, %xmm0 -; SSE3-NEXT: psrlw $4, %xmm0 +; SSE3-NEXT: pcmpeqd %xmm1, %xmm1 +; SSE3-NEXT: paddb %xmm0, %xmm1 +; SSE3-NEXT: pandn %xmm1, %xmm0 +; SSE3-NEXT: movdqa %xmm0, %xmm1 +; SSE3-NEXT: psrlw $1, %xmm1 +; SSE3-NEXT: pand {{.*}}(%rip), %xmm1 +; SSE3-NEXT: psubb %xmm1, %xmm0 +; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] +; SSE3-NEXT: movdqa %xmm0, %xmm2 +; SSE3-NEXT: pand %xmm1, %xmm2 +; SSE3-NEXT: psrlw $2, %xmm0 +; SSE3-NEXT: pand %xmm1, %xmm0 ; SSE3-NEXT: paddb %xmm2, %xmm0 -; SSE3-NEXT: pand {{.*}}(%rip), %xmm0 +; SSE3-NEXT: movdqa %xmm0, %xmm1 +; SSE3-NEXT: psrlw $4, %xmm1 +; SSE3-NEXT: paddb %xmm0, %xmm1 +; SSE3-NEXT: pand {{.*}}(%rip), %xmm1 +; SSE3-NEXT: movdqa %xmm1, %xmm0 ; SSE3-NEXT: retq ; ; SSSE3-LABEL: testv16i8: ; SSSE3: # %bb.0: -; SSSE3-NEXT: pxor %xmm1, %xmm1 -; SSSE3-NEXT: psubb %xmm0, %xmm1 -; SSSE3-NEXT: pand %xmm0, %xmm1 -; SSSE3-NEXT: pcmpeqd %xmm2, %xmm2 -; SSSE3-NEXT: paddb %xmm1, %xmm2 -; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; SSSE3-NEXT: movdqa %xmm2, %xmm3 -; SSSE3-NEXT: pand %xmm1, %xmm3 -; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; SSSE3-NEXT: movdqa %xmm0, %xmm4 +; SSSE3-NEXT: pcmpeqd %xmm1, %xmm1 +; SSSE3-NEXT: paddb %xmm0, %xmm1 +; SSSE3-NEXT: pandn %xmm1, %xmm0 +; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; SSSE3-NEXT: movdqa %xmm0, %xmm3 +; SSSE3-NEXT: pand %xmm2, %xmm3 +; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; SSSE3-NEXT: movdqa %xmm1, %xmm4 ; SSSE3-NEXT: pshufb %xmm3, %xmm4 -; SSSE3-NEXT: psrlw $4, %xmm2 -; SSSE3-NEXT: pand %xmm1, %xmm2 -; SSSE3-NEXT: pshufb %xmm2, %xmm0 -; SSSE3-NEXT: paddb %xmm4, %xmm0 +; SSSE3-NEXT: psrlw $4, %xmm0 +; SSSE3-NEXT: pand %xmm2, %xmm0 +; SSSE3-NEXT: pshufb %xmm0, %xmm1 +; SSSE3-NEXT: paddb %xmm4, %xmm1 +; SSSE3-NEXT: movdqa %xmm1, %xmm0 ; SSSE3-NEXT: retq ; ; SSE41-LABEL: testv16i8: ; SSE41: # %bb.0: -; SSE41-NEXT: pxor %xmm1, %xmm1 -; SSE41-NEXT: psubb %xmm0, %xmm1 -; SSE41-NEXT: pand %xmm0, %xmm1 -; SSE41-NEXT: pcmpeqd %xmm2, %xmm2 -; SSE41-NEXT: paddb %xmm1, %xmm2 -; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; SSE41-NEXT: movdqa %xmm2, %xmm3 -; SSE41-NEXT: pand %xmm1, %xmm3 -; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; SSE41-NEXT: movdqa %xmm0, %xmm4 +; SSE41-NEXT: pcmpeqd %xmm1, %xmm1 +; SSE41-NEXT: paddb %xmm0, %xmm1 +; SSE41-NEXT: pandn %xmm1, %xmm0 +; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; SSE41-NEXT: movdqa %xmm0, %xmm3 +; SSE41-NEXT: pand %xmm2, %xmm3 +; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; SSE41-NEXT: movdqa %xmm1, %xmm4 ; SSE41-NEXT: pshufb %xmm3, %xmm4 -; SSE41-NEXT: psrlw $4, %xmm2 -; SSE41-NEXT: pand %xmm1, %xmm2 -; SSE41-NEXT: pshufb %xmm2, %xmm0 -; SSE41-NEXT: paddb %xmm4, %xmm0 +; SSE41-NEXT: psrlw $4, %xmm0 +; SSE41-NEXT: pand %xmm2, %xmm0 +; SSE41-NEXT: pshufb %xmm0, %xmm1 +; SSE41-NEXT: paddb %xmm4, %xmm1 +; SSE41-NEXT: movdqa %xmm1, %xmm0 ; SSE41-NEXT: retq ; ; AVX-LABEL: testv16i8: ; AVX: # %bb.0: -; AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX-NEXT: vpsubb %xmm0, %xmm1, %xmm1 -; AVX-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; AVX-NEXT: vpaddb %xmm1, %xmm0, %xmm0 +; AVX-NEXT: vpaddb %xmm1, %xmm0, %xmm1 +; AVX-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; AVX-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX-NEXT: vpand %xmm1, %xmm0, %xmm2 ; AVX-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -1447,11 +1340,9 @@ ; ; AVX512VPOPCNTDQ-LABEL: testv16i8: ; AVX512VPOPCNTDQ: # %bb.0: -; AVX512VPOPCNTDQ-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpsubb %xmm0, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQ-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpaddb %xmm1, %xmm0, %xmm0 +; AVX512VPOPCNTDQ-NEXT: vpaddb %xmm1, %xmm0, %xmm1 +; AVX512VPOPCNTDQ-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: vpmovdb %zmm0, %xmm0 @@ -1460,11 +1351,9 @@ ; ; AVX512VPOPCNTDQVL-LABEL: testv16i8: ; AVX512VPOPCNTDQVL: # %bb.0: -; AVX512VPOPCNTDQVL-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpsubb %xmm0, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpaddb %xmm1, %xmm0, %xmm0 +; AVX512VPOPCNTDQVL-NEXT: vpaddb %xmm1, %xmm0, %xmm1 +; AVX512VPOPCNTDQVL-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %zmm0, %zmm0 ; AVX512VPOPCNTDQVL-NEXT: vpmovdb %zmm0, %xmm0 @@ -1473,11 +1362,9 @@ ; ; BITALG_NOVLX-LABEL: testv16i8: ; BITALG_NOVLX: # %bb.0: -; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG_NOVLX-NEXT: vpsubb %xmm0, %xmm1, %xmm1 -; BITALG_NOVLX-NEXT: vpand %xmm1, %xmm0, %xmm0 ; BITALG_NOVLX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; BITALG_NOVLX-NEXT: vpaddb %xmm1, %xmm0, %xmm0 +; BITALG_NOVLX-NEXT: vpaddb %xmm1, %xmm0, %xmm1 +; BITALG_NOVLX-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 killed $zmm0 ; BITALG_NOVLX-NEXT: vzeroupper @@ -1485,31 +1372,28 @@ ; ; BITALG-LABEL: testv16i8: ; BITALG: # %bb.0: -; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpsubb %xmm0, %xmm1, %xmm1 -; BITALG-NEXT: vpand %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpaddb %xmm1, %xmm0, %xmm0 +; BITALG-NEXT: vpaddb %xmm1, %xmm0, %xmm1 +; BITALG-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: retq ; ; X32-SSE-LABEL: testv16i8: ; X32-SSE: # %bb.0: -; X32-SSE-NEXT: pxor %xmm1, %xmm1 -; X32-SSE-NEXT: psubb %xmm0, %xmm1 -; X32-SSE-NEXT: pand %xmm0, %xmm1 -; X32-SSE-NEXT: pcmpeqd %xmm2, %xmm2 -; X32-SSE-NEXT: paddb %xmm1, %xmm2 -; X32-SSE-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; X32-SSE-NEXT: movdqa %xmm2, %xmm3 -; X32-SSE-NEXT: pand %xmm1, %xmm3 -; X32-SSE-NEXT: movdqa {{.*#+}} xmm0 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; X32-SSE-NEXT: movdqa %xmm0, %xmm4 +; X32-SSE-NEXT: pcmpeqd %xmm1, %xmm1 +; X32-SSE-NEXT: paddb %xmm0, %xmm1 +; X32-SSE-NEXT: pandn %xmm1, %xmm0 +; X32-SSE-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; X32-SSE-NEXT: movdqa %xmm0, %xmm3 +; X32-SSE-NEXT: pand %xmm2, %xmm3 +; X32-SSE-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; X32-SSE-NEXT: movdqa %xmm1, %xmm4 ; X32-SSE-NEXT: pshufb %xmm3, %xmm4 -; X32-SSE-NEXT: psrlw $4, %xmm2 -; X32-SSE-NEXT: pand %xmm1, %xmm2 -; X32-SSE-NEXT: pshufb %xmm2, %xmm0 -; X32-SSE-NEXT: paddb %xmm4, %xmm0 +; X32-SSE-NEXT: psrlw $4, %xmm0 +; X32-SSE-NEXT: pand %xmm2, %xmm0 +; X32-SSE-NEXT: pshufb %xmm0, %xmm1 +; X32-SSE-NEXT: paddb %xmm4, %xmm1 +; X32-SSE-NEXT: movdqa %xmm1, %xmm0 ; X32-SSE-NEXT: retl %out = call <16 x i8> @llvm.cttz.v16i8(<16 x i8> %in, i1 0) ret <16 x i8> %out @@ -1518,95 +1402,89 @@ define <16 x i8> @testv16i8u(<16 x i8> %in) nounwind { ; SSE2-LABEL: testv16i8u: ; SSE2: # %bb.0: -; SSE2-NEXT: pxor %xmm1, %xmm1 -; SSE2-NEXT: psubb %xmm0, %xmm1 -; SSE2-NEXT: pand %xmm0, %xmm1 -; SSE2-NEXT: pcmpeqd %xmm2, %xmm2 -; SSE2-NEXT: paddb %xmm1, %xmm2 -; SSE2-NEXT: movdqa %xmm2, %xmm0 -; SSE2-NEXT: psrlw $1, %xmm0 -; SSE2-NEXT: pand {{.*}}(%rip), %xmm0 -; SSE2-NEXT: psubb %xmm0, %xmm2 -; SSE2-NEXT: movdqa {{.*#+}} xmm0 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] -; SSE2-NEXT: movdqa %xmm2, %xmm1 -; SSE2-NEXT: pand %xmm0, %xmm1 -; SSE2-NEXT: psrlw $2, %xmm2 -; SSE2-NEXT: pand %xmm0, %xmm2 -; SSE2-NEXT: paddb %xmm1, %xmm2 -; SSE2-NEXT: movdqa %xmm2, %xmm0 -; SSE2-NEXT: psrlw $4, %xmm0 +; SSE2-NEXT: pcmpeqd %xmm1, %xmm1 +; SSE2-NEXT: paddb %xmm0, %xmm1 +; SSE2-NEXT: pandn %xmm1, %xmm0 +; SSE2-NEXT: movdqa %xmm0, %xmm1 +; SSE2-NEXT: psrlw $1, %xmm1 +; SSE2-NEXT: pand {{.*}}(%rip), %xmm1 +; SSE2-NEXT: psubb %xmm1, %xmm0 +; SSE2-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] +; SSE2-NEXT: movdqa %xmm0, %xmm2 +; SSE2-NEXT: pand %xmm1, %xmm2 +; SSE2-NEXT: psrlw $2, %xmm0 +; SSE2-NEXT: pand %xmm1, %xmm0 ; SSE2-NEXT: paddb %xmm2, %xmm0 -; SSE2-NEXT: pand {{.*}}(%rip), %xmm0 +; SSE2-NEXT: movdqa %xmm0, %xmm1 +; SSE2-NEXT: psrlw $4, %xmm1 +; SSE2-NEXT: paddb %xmm0, %xmm1 +; SSE2-NEXT: pand {{.*}}(%rip), %xmm1 +; SSE2-NEXT: movdqa %xmm1, %xmm0 ; SSE2-NEXT: retq ; ; SSE3-LABEL: testv16i8u: ; SSE3: # %bb.0: -; SSE3-NEXT: pxor %xmm1, %xmm1 -; SSE3-NEXT: psubb %xmm0, %xmm1 -; SSE3-NEXT: pand %xmm0, %xmm1 -; SSE3-NEXT: pcmpeqd %xmm2, %xmm2 -; SSE3-NEXT: paddb %xmm1, %xmm2 -; SSE3-NEXT: movdqa %xmm2, %xmm0 -; SSE3-NEXT: psrlw $1, %xmm0 -; SSE3-NEXT: pand {{.*}}(%rip), %xmm0 -; SSE3-NEXT: psubb %xmm0, %xmm2 -; SSE3-NEXT: movdqa {{.*#+}} xmm0 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] -; SSE3-NEXT: movdqa %xmm2, %xmm1 -; SSE3-NEXT: pand %xmm0, %xmm1 -; SSE3-NEXT: psrlw $2, %xmm2 -; SSE3-NEXT: pand %xmm0, %xmm2 -; SSE3-NEXT: paddb %xmm1, %xmm2 -; SSE3-NEXT: movdqa %xmm2, %xmm0 -; SSE3-NEXT: psrlw $4, %xmm0 +; SSE3-NEXT: pcmpeqd %xmm1, %xmm1 +; SSE3-NEXT: paddb %xmm0, %xmm1 +; SSE3-NEXT: pandn %xmm1, %xmm0 +; SSE3-NEXT: movdqa %xmm0, %xmm1 +; SSE3-NEXT: psrlw $1, %xmm1 +; SSE3-NEXT: pand {{.*}}(%rip), %xmm1 +; SSE3-NEXT: psubb %xmm1, %xmm0 +; SSE3-NEXT: movdqa {{.*#+}} xmm1 = [51,51,51,51,51,51,51,51,51,51,51,51,51,51,51,51] +; SSE3-NEXT: movdqa %xmm0, %xmm2 +; SSE3-NEXT: pand %xmm1, %xmm2 +; SSE3-NEXT: psrlw $2, %xmm0 +; SSE3-NEXT: pand %xmm1, %xmm0 ; SSE3-NEXT: paddb %xmm2, %xmm0 -; SSE3-NEXT: pand {{.*}}(%rip), %xmm0 +; SSE3-NEXT: movdqa %xmm0, %xmm1 +; SSE3-NEXT: psrlw $4, %xmm1 +; SSE3-NEXT: paddb %xmm0, %xmm1 +; SSE3-NEXT: pand {{.*}}(%rip), %xmm1 +; SSE3-NEXT: movdqa %xmm1, %xmm0 ; SSE3-NEXT: retq ; ; SSSE3-LABEL: testv16i8u: ; SSSE3: # %bb.0: -; SSSE3-NEXT: pxor %xmm1, %xmm1 -; SSSE3-NEXT: psubb %xmm0, %xmm1 -; SSSE3-NEXT: pand %xmm0, %xmm1 -; SSSE3-NEXT: pcmpeqd %xmm2, %xmm2 -; SSSE3-NEXT: paddb %xmm1, %xmm2 -; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; SSSE3-NEXT: movdqa %xmm2, %xmm3 -; SSSE3-NEXT: pand %xmm1, %xmm3 -; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; SSSE3-NEXT: movdqa %xmm0, %xmm4 +; SSSE3-NEXT: pcmpeqd %xmm1, %xmm1 +; SSSE3-NEXT: paddb %xmm0, %xmm1 +; SSSE3-NEXT: pandn %xmm1, %xmm0 +; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; SSSE3-NEXT: movdqa %xmm0, %xmm3 +; SSSE3-NEXT: pand %xmm2, %xmm3 +; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; SSSE3-NEXT: movdqa %xmm1, %xmm4 ; SSSE3-NEXT: pshufb %xmm3, %xmm4 -; SSSE3-NEXT: psrlw $4, %xmm2 -; SSSE3-NEXT: pand %xmm1, %xmm2 -; SSSE3-NEXT: pshufb %xmm2, %xmm0 -; SSSE3-NEXT: paddb %xmm4, %xmm0 +; SSSE3-NEXT: psrlw $4, %xmm0 +; SSSE3-NEXT: pand %xmm2, %xmm0 +; SSSE3-NEXT: pshufb %xmm0, %xmm1 +; SSSE3-NEXT: paddb %xmm4, %xmm1 +; SSSE3-NEXT: movdqa %xmm1, %xmm0 ; SSSE3-NEXT: retq ; ; SSE41-LABEL: testv16i8u: ; SSE41: # %bb.0: -; SSE41-NEXT: pxor %xmm1, %xmm1 -; SSE41-NEXT: psubb %xmm0, %xmm1 -; SSE41-NEXT: pand %xmm0, %xmm1 -; SSE41-NEXT: pcmpeqd %xmm2, %xmm2 -; SSE41-NEXT: paddb %xmm1, %xmm2 -; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; SSE41-NEXT: movdqa %xmm2, %xmm3 -; SSE41-NEXT: pand %xmm1, %xmm3 -; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; SSE41-NEXT: movdqa %xmm0, %xmm4 +; SSE41-NEXT: pcmpeqd %xmm1, %xmm1 +; SSE41-NEXT: paddb %xmm0, %xmm1 +; SSE41-NEXT: pandn %xmm1, %xmm0 +; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; SSE41-NEXT: movdqa %xmm0, %xmm3 +; SSE41-NEXT: pand %xmm2, %xmm3 +; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; SSE41-NEXT: movdqa %xmm1, %xmm4 ; SSE41-NEXT: pshufb %xmm3, %xmm4 -; SSE41-NEXT: psrlw $4, %xmm2 -; SSE41-NEXT: pand %xmm1, %xmm2 -; SSE41-NEXT: pshufb %xmm2, %xmm0 -; SSE41-NEXT: paddb %xmm4, %xmm0 +; SSE41-NEXT: psrlw $4, %xmm0 +; SSE41-NEXT: pand %xmm2, %xmm0 +; SSE41-NEXT: pshufb %xmm0, %xmm1 +; SSE41-NEXT: paddb %xmm4, %xmm1 +; SSE41-NEXT: movdqa %xmm1, %xmm0 ; SSE41-NEXT: retq ; ; AVX-LABEL: testv16i8u: ; AVX: # %bb.0: -; AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX-NEXT: vpsubb %xmm0, %xmm1, %xmm1 -; AVX-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; AVX-NEXT: vpaddb %xmm1, %xmm0, %xmm0 +; AVX-NEXT: vpaddb %xmm1, %xmm0, %xmm1 +; AVX-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; AVX-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX-NEXT: vpand %xmm1, %xmm0, %xmm2 ; AVX-NEXT: vmovdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -1619,11 +1497,9 @@ ; ; AVX512VPOPCNTDQ-LABEL: testv16i8u: ; AVX512VPOPCNTDQ: # %bb.0: -; AVX512VPOPCNTDQ-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpsubb %xmm0, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQ-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpaddb %xmm1, %xmm0, %xmm0 +; AVX512VPOPCNTDQ-NEXT: vpaddb %xmm1, %xmm0, %xmm1 +; AVX512VPOPCNTDQ-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQ-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: vpmovdb %zmm0, %xmm0 @@ -1632,11 +1508,9 @@ ; ; AVX512VPOPCNTDQVL-LABEL: testv16i8u: ; AVX512VPOPCNTDQVL: # %bb.0: -; AVX512VPOPCNTDQVL-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpsubb %xmm0, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpaddb %xmm1, %xmm0, %xmm0 +; AVX512VPOPCNTDQVL-NEXT: vpaddb %xmm1, %xmm0, %xmm1 +; AVX512VPOPCNTDQVL-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: vpmovzxbd {{.*#+}} zmm0 = xmm0[0],zero,zero,zero,xmm0[1],zero,zero,zero,xmm0[2],zero,zero,zero,xmm0[3],zero,zero,zero,xmm0[4],zero,zero,zero,xmm0[5],zero,zero,zero,xmm0[6],zero,zero,zero,xmm0[7],zero,zero,zero,xmm0[8],zero,zero,zero,xmm0[9],zero,zero,zero,xmm0[10],zero,zero,zero,xmm0[11],zero,zero,zero,xmm0[12],zero,zero,zero,xmm0[13],zero,zero,zero,xmm0[14],zero,zero,zero,xmm0[15],zero,zero,zero ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %zmm0, %zmm0 ; AVX512VPOPCNTDQVL-NEXT: vpmovdb %zmm0, %xmm0 @@ -1645,11 +1519,9 @@ ; ; BITALG_NOVLX-LABEL: testv16i8u: ; BITALG_NOVLX: # %bb.0: -; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG_NOVLX-NEXT: vpsubb %xmm0, %xmm1, %xmm1 -; BITALG_NOVLX-NEXT: vpand %xmm1, %xmm0, %xmm0 ; BITALG_NOVLX-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; BITALG_NOVLX-NEXT: vpaddb %xmm1, %xmm0, %xmm0 +; BITALG_NOVLX-NEXT: vpaddb %xmm1, %xmm0, %xmm1 +; BITALG_NOVLX-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG_NOVLX-NEXT: # kill: def $xmm0 killed $xmm0 killed $zmm0 ; BITALG_NOVLX-NEXT: vzeroupper @@ -1657,31 +1529,28 @@ ; ; BITALG-LABEL: testv16i8u: ; BITALG: # %bb.0: -; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpsubb %xmm0, %xmm1, %xmm1 -; BITALG-NEXT: vpand %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpaddb %xmm1, %xmm0, %xmm0 +; BITALG-NEXT: vpaddb %xmm1, %xmm0, %xmm1 +; BITALG-NEXT: vpandn %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: retq ; ; X32-SSE-LABEL: testv16i8u: ; X32-SSE: # %bb.0: -; X32-SSE-NEXT: pxor %xmm1, %xmm1 -; X32-SSE-NEXT: psubb %xmm0, %xmm1 -; X32-SSE-NEXT: pand %xmm0, %xmm1 -; X32-SSE-NEXT: pcmpeqd %xmm2, %xmm2 -; X32-SSE-NEXT: paddb %xmm1, %xmm2 -; X32-SSE-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; X32-SSE-NEXT: movdqa %xmm2, %xmm3 -; X32-SSE-NEXT: pand %xmm1, %xmm3 -; X32-SSE-NEXT: movdqa {{.*#+}} xmm0 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; X32-SSE-NEXT: movdqa %xmm0, %xmm4 +; X32-SSE-NEXT: pcmpeqd %xmm1, %xmm1 +; X32-SSE-NEXT: paddb %xmm0, %xmm1 +; X32-SSE-NEXT: pandn %xmm1, %xmm0 +; X32-SSE-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; X32-SSE-NEXT: movdqa %xmm0, %xmm3 +; X32-SSE-NEXT: pand %xmm2, %xmm3 +; X32-SSE-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; X32-SSE-NEXT: movdqa %xmm1, %xmm4 ; X32-SSE-NEXT: pshufb %xmm3, %xmm4 -; X32-SSE-NEXT: psrlw $4, %xmm2 -; X32-SSE-NEXT: pand %xmm1, %xmm2 -; X32-SSE-NEXT: pshufb %xmm2, %xmm0 -; X32-SSE-NEXT: paddb %xmm4, %xmm0 +; X32-SSE-NEXT: psrlw $4, %xmm0 +; X32-SSE-NEXT: pand %xmm2, %xmm0 +; X32-SSE-NEXT: pshufb %xmm0, %xmm1 +; X32-SSE-NEXT: paddb %xmm4, %xmm1 +; X32-SSE-NEXT: movdqa %xmm1, %xmm0 ; X32-SSE-NEXT: retl %out = call <16 x i8> @llvm.cttz.v16i8(<16 x i8> %in, i1 -1) ret <16 x i8> %out Index: llvm/trunk/test/CodeGen/X86/vector-tzcnt-256.ll =================================================================== --- llvm/trunk/test/CodeGen/X86/vector-tzcnt-256.ll +++ llvm/trunk/test/CodeGen/X86/vector-tzcnt-256.ll @@ -15,144 +15,132 @@ ; AVX1-LABEL: testv4i64: ; AVX1: # %bb.0: ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm1 -; AVX1-NEXT: vpxor %xmm2, %xmm2, %xmm2 -; AVX1-NEXT: vpsubq %xmm1, %xmm2, %xmm3 -; AVX1-NEXT: vpand %xmm3, %xmm1, %xmm1 -; AVX1-NEXT: vpcmpeqd %xmm3, %xmm3, %xmm3 -; AVX1-NEXT: vpaddq %xmm3, %xmm1, %xmm1 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm4 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX1-NEXT: vpand %xmm4, %xmm1, %xmm5 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm6 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX1-NEXT: vpshufb %xmm5, %xmm6, %xmm5 +; AVX1-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 +; AVX1-NEXT: vpaddq %xmm2, %xmm1, %xmm3 +; AVX1-NEXT: vpandn %xmm3, %xmm1, %xmm1 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX1-NEXT: vpand %xmm3, %xmm1, %xmm4 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm5 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX1-NEXT: vpshufb %xmm4, %xmm5, %xmm4 ; AVX1-NEXT: vpsrlw $4, %xmm1, %xmm1 -; AVX1-NEXT: vpand %xmm4, %xmm1, %xmm1 -; AVX1-NEXT: vpshufb %xmm1, %xmm6, %xmm1 -; AVX1-NEXT: vpaddb %xmm5, %xmm1, %xmm1 -; AVX1-NEXT: vpsadbw %xmm2, %xmm1, %xmm1 -; AVX1-NEXT: vpsubq %xmm0, %xmm2, %xmm5 -; AVX1-NEXT: vpand %xmm5, %xmm0, %xmm0 -; AVX1-NEXT: vpaddq %xmm3, %xmm0, %xmm0 -; AVX1-NEXT: vpand %xmm4, %xmm0, %xmm3 -; AVX1-NEXT: vpshufb %xmm3, %xmm6, %xmm3 +; AVX1-NEXT: vpand %xmm3, %xmm1, %xmm1 +; AVX1-NEXT: vpshufb %xmm1, %xmm5, %xmm1 +; AVX1-NEXT: vpaddb %xmm4, %xmm1, %xmm1 +; AVX1-NEXT: vpxor %xmm4, %xmm4, %xmm4 +; AVX1-NEXT: vpsadbw %xmm4, %xmm1, %xmm1 +; AVX1-NEXT: vpaddq %xmm2, %xmm0, %xmm2 +; AVX1-NEXT: vpandn %xmm2, %xmm0, %xmm0 +; AVX1-NEXT: vpand %xmm3, %xmm0, %xmm2 +; AVX1-NEXT: vpshufb %xmm2, %xmm5, %xmm2 ; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0 -; AVX1-NEXT: vpand %xmm4, %xmm0, %xmm0 -; AVX1-NEXT: vpshufb %xmm0, %xmm6, %xmm0 -; AVX1-NEXT: vpaddb %xmm3, %xmm0, %xmm0 -; AVX1-NEXT: vpsadbw %xmm2, %xmm0, %xmm0 +; AVX1-NEXT: vpand %xmm3, %xmm0, %xmm0 +; AVX1-NEXT: vpshufb %xmm0, %xmm5, %xmm0 +; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0 +; AVX1-NEXT: vpsadbw %xmm4, %xmm0, %xmm0 ; AVX1-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0 ; AVX1-NEXT: retq ; ; AVX2-LABEL: testv4i64: ; AVX2: # %bb.0: -; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX2-NEXT: vpsubq %ymm0, %ymm1, %ymm2 -; AVX2-NEXT: vpand %ymm2, %ymm0, %ymm0 -; AVX2-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2 -; AVX2-NEXT: vpaddq %ymm2, %ymm0, %ymm0 -; AVX2-NEXT: vmovdqa {{.*#+}} ymm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX2-NEXT: vpand %ymm2, %ymm0, %ymm3 -; AVX2-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX2-NEXT: vpshufb %ymm3, %ymm4, %ymm3 +; AVX2-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 +; AVX2-NEXT: vpaddq %ymm1, %ymm0, %ymm1 +; AVX2-NEXT: vpandn %ymm1, %ymm0, %ymm0 +; AVX2-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm2 +; AVX2-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX2-NEXT: vpshufb %ymm2, %ymm3, %ymm2 ; AVX2-NEXT: vpsrlw $4, %ymm0, %ymm0 -; AVX2-NEXT: vpand %ymm2, %ymm0, %ymm0 -; AVX2-NEXT: vpshufb %ymm0, %ymm4, %ymm0 -; AVX2-NEXT: vpaddb %ymm3, %ymm0, %ymm0 +; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm0 +; AVX2-NEXT: vpshufb %ymm0, %ymm3, %ymm0 +; AVX2-NEXT: vpaddb %ymm2, %ymm0, %ymm0 +; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX2-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; AVX2-NEXT: retq ; ; AVX512CDVL-LABEL: testv4i64: ; AVX512CDVL: # %bb.0: -; AVX512CDVL-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512CDVL-NEXT: vpsubq %ymm0, %ymm1, %ymm2 -; AVX512CDVL-NEXT: vpand %ymm2, %ymm0, %ymm0 -; AVX512CDVL-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2 -; AVX512CDVL-NEXT: vpaddq %ymm2, %ymm0, %ymm0 -; AVX512CDVL-NEXT: vmovdqa {{.*#+}} ymm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX512CDVL-NEXT: vpand %ymm2, %ymm0, %ymm3 -; AVX512CDVL-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX512CDVL-NEXT: vpshufb %ymm3, %ymm4, %ymm3 +; AVX512CDVL-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 +; AVX512CDVL-NEXT: vpaddq %ymm1, %ymm0, %ymm1 +; AVX512CDVL-NEXT: vpandn %ymm1, %ymm0, %ymm0 +; AVX512CDVL-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX512CDVL-NEXT: vpand %ymm1, %ymm0, %ymm2 +; AVX512CDVL-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX512CDVL-NEXT: vpshufb %ymm2, %ymm3, %ymm2 ; AVX512CDVL-NEXT: vpsrlw $4, %ymm0, %ymm0 -; AVX512CDVL-NEXT: vpand %ymm2, %ymm0, %ymm0 -; AVX512CDVL-NEXT: vpshufb %ymm0, %ymm4, %ymm0 -; AVX512CDVL-NEXT: vpaddb %ymm3, %ymm0, %ymm0 +; AVX512CDVL-NEXT: vpand %ymm1, %ymm0, %ymm0 +; AVX512CDVL-NEXT: vpshufb %ymm0, %ymm3, %ymm0 +; AVX512CDVL-NEXT: vpaddb %ymm2, %ymm0, %ymm0 +; AVX512CDVL-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512CDVL-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; AVX512CDVL-NEXT: retq ; ; AVX512CD-LABEL: testv4i64: ; AVX512CD: # %bb.0: -; AVX512CD-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512CD-NEXT: vpsubq %ymm0, %ymm1, %ymm2 -; AVX512CD-NEXT: vpand %ymm2, %ymm0, %ymm0 -; AVX512CD-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2 -; AVX512CD-NEXT: vpaddq %ymm2, %ymm0, %ymm0 -; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX512CD-NEXT: vpand %ymm2, %ymm0, %ymm3 -; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX512CD-NEXT: vpshufb %ymm3, %ymm4, %ymm3 +; AVX512CD-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 +; AVX512CD-NEXT: vpaddq %ymm1, %ymm0, %ymm1 +; AVX512CD-NEXT: vpandn %ymm1, %ymm0, %ymm0 +; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX512CD-NEXT: vpand %ymm1, %ymm0, %ymm2 +; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX512CD-NEXT: vpshufb %ymm2, %ymm3, %ymm2 ; AVX512CD-NEXT: vpsrlw $4, %ymm0, %ymm0 -; AVX512CD-NEXT: vpand %ymm2, %ymm0, %ymm0 -; AVX512CD-NEXT: vpshufb %ymm0, %ymm4, %ymm0 -; AVX512CD-NEXT: vpaddb %ymm3, %ymm0, %ymm0 +; AVX512CD-NEXT: vpand %ymm1, %ymm0, %ymm0 +; AVX512CD-NEXT: vpshufb %ymm0, %ymm3, %ymm0 +; AVX512CD-NEXT: vpaddb %ymm2, %ymm0, %ymm0 +; AVX512CD-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512CD-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; AVX512CD-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: testv4i64: ; AVX512VPOPCNTDQ: # %bb.0: -; AVX512VPOPCNTDQ-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpsubq %ymm0, %ymm1, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpaddq %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vpaddq %ymm1, %ymm0, %ymm1 +; AVX512VPOPCNTDQ-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: # kill: def $ymm0 killed $ymm0 killed $zmm0 ; AVX512VPOPCNTDQ-NEXT: retq ; ; AVX512VPOPCNTDQVL-LABEL: testv4i64: ; AVX512VPOPCNTDQVL: # %bb.0: -; AVX512VPOPCNTDQVL-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpsubq %ymm0, %ymm1, %ymm1 -; AVX512VPOPCNTDQVL-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX512VPOPCNTDQVL-NEXT: vpaddq %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQVL-NEXT: vpaddq %ymm1, %ymm0, %ymm1 +; AVX512VPOPCNTDQVL-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: testv4i64: ; BITALG_NOVLX: # %bb.0: -; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG_NOVLX-NEXT: vpsubq %ymm0, %ymm1, %ymm2 -; BITALG_NOVLX-NEXT: vpand %ymm2, %ymm0, %ymm0 -; BITALG_NOVLX-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2 -; BITALG_NOVLX-NEXT: vpaddq %ymm2, %ymm0, %ymm0 +; BITALG_NOVLX-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 +; BITALG_NOVLX-NEXT: vpaddq %ymm1, %ymm0, %ymm1 +; BITALG_NOVLX-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0 +; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG_NOVLX-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG_NOVLX-NEXT: retq ; ; BITALG-LABEL: testv4i64: ; BITALG: # %bb.0: -; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpsubq %ymm0, %ymm1, %ymm2 -; BITALG-NEXT: vpand %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2 -; BITALG-NEXT: vpaddq %ymm2, %ymm0, %ymm0 +; BITALG-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 +; BITALG-NEXT: vpaddq %ymm1, %ymm0, %ymm1 +; BITALG-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 +; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq ; ; X32-AVX-LABEL: testv4i64: ; X32-AVX: # %bb.0: -; X32-AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; X32-AVX-NEXT: vpsubq %ymm0, %ymm1, %ymm2 -; X32-AVX-NEXT: vpand %ymm2, %ymm0, %ymm0 -; X32-AVX-NEXT: vpsubq {{\.LCPI.*}}, %ymm0, %ymm0 -; X32-AVX-NEXT: vmovdqa {{.*#+}} ymm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; X32-AVX-NEXT: vpand %ymm2, %ymm0, %ymm3 -; X32-AVX-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; X32-AVX-NEXT: vpshufb %ymm3, %ymm4, %ymm3 +; X32-AVX-NEXT: vpsubq {{\.LCPI.*}}, %ymm0, %ymm1 +; X32-AVX-NEXT: vpandn %ymm1, %ymm0, %ymm0 +; X32-AVX-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; X32-AVX-NEXT: vpand %ymm1, %ymm0, %ymm2 +; X32-AVX-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; X32-AVX-NEXT: vpshufb %ymm2, %ymm3, %ymm2 ; X32-AVX-NEXT: vpsrlw $4, %ymm0, %ymm0 -; X32-AVX-NEXT: vpand %ymm2, %ymm0, %ymm0 -; X32-AVX-NEXT: vpshufb %ymm0, %ymm4, %ymm0 -; X32-AVX-NEXT: vpaddb %ymm3, %ymm0, %ymm0 +; X32-AVX-NEXT: vpand %ymm1, %ymm0, %ymm0 +; X32-AVX-NEXT: vpshufb %ymm0, %ymm3, %ymm0 +; X32-AVX-NEXT: vpaddb %ymm2, %ymm0, %ymm0 +; X32-AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; X32-AVX-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; X32-AVX-NEXT: retl %out = call <4 x i64> @llvm.cttz.v4i64(<4 x i64> %in, i1 0) @@ -163,48 +151,45 @@ ; AVX1-LABEL: testv4i64u: ; AVX1: # %bb.0: ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm1 -; AVX1-NEXT: vpxor %xmm2, %xmm2, %xmm2 -; AVX1-NEXT: vpsubq %xmm1, %xmm2, %xmm3 -; AVX1-NEXT: vpand %xmm3, %xmm1, %xmm1 -; AVX1-NEXT: vpcmpeqd %xmm3, %xmm3, %xmm3 -; AVX1-NEXT: vpaddq %xmm3, %xmm1, %xmm1 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm4 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX1-NEXT: vpand %xmm4, %xmm1, %xmm5 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm6 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX1-NEXT: vpshufb %xmm5, %xmm6, %xmm5 +; AVX1-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 +; AVX1-NEXT: vpaddq %xmm2, %xmm1, %xmm3 +; AVX1-NEXT: vpandn %xmm3, %xmm1, %xmm1 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX1-NEXT: vpand %xmm3, %xmm1, %xmm4 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm5 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX1-NEXT: vpshufb %xmm4, %xmm5, %xmm4 ; AVX1-NEXT: vpsrlw $4, %xmm1, %xmm1 -; AVX1-NEXT: vpand %xmm4, %xmm1, %xmm1 -; AVX1-NEXT: vpshufb %xmm1, %xmm6, %xmm1 -; AVX1-NEXT: vpaddb %xmm5, %xmm1, %xmm1 -; AVX1-NEXT: vpsadbw %xmm2, %xmm1, %xmm1 -; AVX1-NEXT: vpsubq %xmm0, %xmm2, %xmm5 -; AVX1-NEXT: vpand %xmm5, %xmm0, %xmm0 -; AVX1-NEXT: vpaddq %xmm3, %xmm0, %xmm0 -; AVX1-NEXT: vpand %xmm4, %xmm0, %xmm3 -; AVX1-NEXT: vpshufb %xmm3, %xmm6, %xmm3 +; AVX1-NEXT: vpand %xmm3, %xmm1, %xmm1 +; AVX1-NEXT: vpshufb %xmm1, %xmm5, %xmm1 +; AVX1-NEXT: vpaddb %xmm4, %xmm1, %xmm1 +; AVX1-NEXT: vpxor %xmm4, %xmm4, %xmm4 +; AVX1-NEXT: vpsadbw %xmm4, %xmm1, %xmm1 +; AVX1-NEXT: vpaddq %xmm2, %xmm0, %xmm2 +; AVX1-NEXT: vpandn %xmm2, %xmm0, %xmm0 +; AVX1-NEXT: vpand %xmm3, %xmm0, %xmm2 +; AVX1-NEXT: vpshufb %xmm2, %xmm5, %xmm2 ; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0 -; AVX1-NEXT: vpand %xmm4, %xmm0, %xmm0 -; AVX1-NEXT: vpshufb %xmm0, %xmm6, %xmm0 -; AVX1-NEXT: vpaddb %xmm3, %xmm0, %xmm0 -; AVX1-NEXT: vpsadbw %xmm2, %xmm0, %xmm0 +; AVX1-NEXT: vpand %xmm3, %xmm0, %xmm0 +; AVX1-NEXT: vpshufb %xmm0, %xmm5, %xmm0 +; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0 +; AVX1-NEXT: vpsadbw %xmm4, %xmm0, %xmm0 ; AVX1-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0 ; AVX1-NEXT: retq ; ; AVX2-LABEL: testv4i64u: ; AVX2: # %bb.0: -; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX2-NEXT: vpsubq %ymm0, %ymm1, %ymm2 -; AVX2-NEXT: vpand %ymm2, %ymm0, %ymm0 -; AVX2-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2 -; AVX2-NEXT: vpaddq %ymm2, %ymm0, %ymm0 -; AVX2-NEXT: vmovdqa {{.*#+}} ymm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX2-NEXT: vpand %ymm2, %ymm0, %ymm3 -; AVX2-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX2-NEXT: vpshufb %ymm3, %ymm4, %ymm3 +; AVX2-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 +; AVX2-NEXT: vpaddq %ymm1, %ymm0, %ymm1 +; AVX2-NEXT: vpandn %ymm1, %ymm0, %ymm0 +; AVX2-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm2 +; AVX2-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX2-NEXT: vpshufb %ymm2, %ymm3, %ymm2 ; AVX2-NEXT: vpsrlw $4, %ymm0, %ymm0 -; AVX2-NEXT: vpand %ymm2, %ymm0, %ymm0 -; AVX2-NEXT: vpshufb %ymm0, %ymm4, %ymm0 -; AVX2-NEXT: vpaddb %ymm3, %ymm0, %ymm0 +; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm0 +; AVX2-NEXT: vpshufb %ymm0, %ymm3, %ymm0 +; AVX2-NEXT: vpaddb %ymm2, %ymm0, %ymm0 +; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX2-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; AVX2-NEXT: retq ; @@ -230,61 +215,54 @@ ; ; AVX512VPOPCNTDQ-LABEL: testv4i64u: ; AVX512VPOPCNTDQ: # %bb.0: -; AVX512VPOPCNTDQ-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpsubq %ymm0, %ymm1, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpaddq %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vpaddq %ymm1, %ymm0, %ymm1 +; AVX512VPOPCNTDQ-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: # kill: def $ymm0 killed $ymm0 killed $zmm0 ; AVX512VPOPCNTDQ-NEXT: retq ; ; AVX512VPOPCNTDQVL-LABEL: testv4i64u: ; AVX512VPOPCNTDQVL: # %bb.0: -; AVX512VPOPCNTDQVL-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpsubq %ymm0, %ymm1, %ymm1 -; AVX512VPOPCNTDQVL-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX512VPOPCNTDQVL-NEXT: vpaddq %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQVL-NEXT: vpaddq %ymm1, %ymm0, %ymm1 +; AVX512VPOPCNTDQVL-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: testv4i64u: ; BITALG_NOVLX: # %bb.0: -; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG_NOVLX-NEXT: vpsubq %ymm0, %ymm1, %ymm2 -; BITALG_NOVLX-NEXT: vpand %ymm2, %ymm0, %ymm0 -; BITALG_NOVLX-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2 -; BITALG_NOVLX-NEXT: vpaddq %ymm2, %ymm0, %ymm0 +; BITALG_NOVLX-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 +; BITALG_NOVLX-NEXT: vpaddq %ymm1, %ymm0, %ymm1 +; BITALG_NOVLX-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0 +; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG_NOVLX-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG_NOVLX-NEXT: retq ; ; BITALG-LABEL: testv4i64u: ; BITALG: # %bb.0: -; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpsubq %ymm0, %ymm1, %ymm2 -; BITALG-NEXT: vpand %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2 -; BITALG-NEXT: vpaddq %ymm2, %ymm0, %ymm0 +; BITALG-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 +; BITALG-NEXT: vpaddq %ymm1, %ymm0, %ymm1 +; BITALG-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 +; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq ; ; X32-AVX-LABEL: testv4i64u: ; X32-AVX: # %bb.0: -; X32-AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; X32-AVX-NEXT: vpsubq %ymm0, %ymm1, %ymm2 -; X32-AVX-NEXT: vpand %ymm2, %ymm0, %ymm0 -; X32-AVX-NEXT: vpsubq {{\.LCPI.*}}, %ymm0, %ymm0 -; X32-AVX-NEXT: vmovdqa {{.*#+}} ymm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; X32-AVX-NEXT: vpand %ymm2, %ymm0, %ymm3 -; X32-AVX-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; X32-AVX-NEXT: vpshufb %ymm3, %ymm4, %ymm3 +; X32-AVX-NEXT: vpsubq {{\.LCPI.*}}, %ymm0, %ymm1 +; X32-AVX-NEXT: vpandn %ymm1, %ymm0, %ymm0 +; X32-AVX-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; X32-AVX-NEXT: vpand %ymm1, %ymm0, %ymm2 +; X32-AVX-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; X32-AVX-NEXT: vpshufb %ymm2, %ymm3, %ymm2 ; X32-AVX-NEXT: vpsrlw $4, %ymm0, %ymm0 -; X32-AVX-NEXT: vpand %ymm2, %ymm0, %ymm0 -; X32-AVX-NEXT: vpshufb %ymm0, %ymm4, %ymm0 -; X32-AVX-NEXT: vpaddb %ymm3, %ymm0, %ymm0 +; X32-AVX-NEXT: vpand %ymm1, %ymm0, %ymm0 +; X32-AVX-NEXT: vpshufb %ymm0, %ymm3, %ymm0 +; X32-AVX-NEXT: vpaddb %ymm2, %ymm0, %ymm0 +; X32-AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; X32-AVX-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; X32-AVX-NEXT: retl %out = call <4 x i64> @llvm.cttz.v4i64(<4 x i64> %in, i1 -1) @@ -295,56 +273,53 @@ ; AVX1-LABEL: testv8i32: ; AVX1: # %bb.0: ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm1 -; AVX1-NEXT: vpxor %xmm2, %xmm2, %xmm2 -; AVX1-NEXT: vpsubd %xmm1, %xmm2, %xmm3 -; AVX1-NEXT: vpand %xmm3, %xmm1, %xmm1 -; AVX1-NEXT: vpcmpeqd %xmm3, %xmm3, %xmm3 -; AVX1-NEXT: vpaddd %xmm3, %xmm1, %xmm1 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm4 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX1-NEXT: vpand %xmm4, %xmm1, %xmm5 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm6 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX1-NEXT: vpshufb %xmm5, %xmm6, %xmm5 +; AVX1-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 +; AVX1-NEXT: vpaddd %xmm2, %xmm1, %xmm3 +; AVX1-NEXT: vpandn %xmm3, %xmm1, %xmm1 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX1-NEXT: vpand %xmm3, %xmm1, %xmm4 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm5 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX1-NEXT: vpshufb %xmm4, %xmm5, %xmm4 ; AVX1-NEXT: vpsrlw $4, %xmm1, %xmm1 -; AVX1-NEXT: vpand %xmm4, %xmm1, %xmm1 -; AVX1-NEXT: vpshufb %xmm1, %xmm6, %xmm1 -; AVX1-NEXT: vpaddb %xmm5, %xmm1, %xmm1 -; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm5 = xmm1[2],xmm2[2],xmm1[3],xmm2[3] -; AVX1-NEXT: vpsadbw %xmm2, %xmm5, %xmm5 +; AVX1-NEXT: vpand %xmm3, %xmm1, %xmm1 +; AVX1-NEXT: vpshufb %xmm1, %xmm5, %xmm1 +; AVX1-NEXT: vpaddb %xmm4, %xmm1, %xmm1 +; AVX1-NEXT: vpxor %xmm4, %xmm4, %xmm4 +; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm6 = xmm1[2],xmm4[2],xmm1[3],xmm4[3] +; AVX1-NEXT: vpsadbw %xmm4, %xmm6, %xmm6 ; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm1 = xmm1[0],zero,xmm1[1],zero -; AVX1-NEXT: vpsadbw %xmm2, %xmm1, %xmm1 -; AVX1-NEXT: vpackuswb %xmm5, %xmm1, %xmm1 -; AVX1-NEXT: vpsubd %xmm0, %xmm2, %xmm5 -; AVX1-NEXT: vpand %xmm5, %xmm0, %xmm0 -; AVX1-NEXT: vpaddd %xmm3, %xmm0, %xmm0 -; AVX1-NEXT: vpand %xmm4, %xmm0, %xmm3 -; AVX1-NEXT: vpshufb %xmm3, %xmm6, %xmm3 +; AVX1-NEXT: vpsadbw %xmm4, %xmm1, %xmm1 +; AVX1-NEXT: vpackuswb %xmm6, %xmm1, %xmm1 +; AVX1-NEXT: vpaddd %xmm2, %xmm0, %xmm2 +; AVX1-NEXT: vpandn %xmm2, %xmm0, %xmm0 +; AVX1-NEXT: vpand %xmm3, %xmm0, %xmm2 +; AVX1-NEXT: vpshufb %xmm2, %xmm5, %xmm2 ; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0 -; AVX1-NEXT: vpand %xmm4, %xmm0, %xmm0 -; AVX1-NEXT: vpshufb %xmm0, %xmm6, %xmm0 -; AVX1-NEXT: vpaddb %xmm3, %xmm0, %xmm0 -; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm3 = xmm0[2],xmm2[2],xmm0[3],xmm2[3] -; AVX1-NEXT: vpsadbw %xmm2, %xmm3, %xmm3 +; AVX1-NEXT: vpand %xmm3, %xmm0, %xmm0 +; AVX1-NEXT: vpshufb %xmm0, %xmm5, %xmm0 +; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0 +; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm4[2],xmm0[3],xmm4[3] +; AVX1-NEXT: vpsadbw %xmm4, %xmm2, %xmm2 ; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero -; AVX1-NEXT: vpsadbw %xmm2, %xmm0, %xmm0 -; AVX1-NEXT: vpackuswb %xmm3, %xmm0, %xmm0 +; AVX1-NEXT: vpsadbw %xmm4, %xmm0, %xmm0 +; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 ; AVX1-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0 ; AVX1-NEXT: retq ; ; AVX2-LABEL: testv8i32: ; AVX2: # %bb.0: -; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX2-NEXT: vpsubd %ymm0, %ymm1, %ymm2 -; AVX2-NEXT: vpand %ymm2, %ymm0, %ymm0 -; AVX2-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2 -; AVX2-NEXT: vpaddd %ymm2, %ymm0, %ymm0 -; AVX2-NEXT: vmovdqa {{.*#+}} ymm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX2-NEXT: vpand %ymm2, %ymm0, %ymm3 -; AVX2-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX2-NEXT: vpshufb %ymm3, %ymm4, %ymm3 +; AVX2-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 +; AVX2-NEXT: vpaddd %ymm1, %ymm0, %ymm1 +; AVX2-NEXT: vpandn %ymm1, %ymm0, %ymm0 +; AVX2-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm2 +; AVX2-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX2-NEXT: vpshufb %ymm2, %ymm3, %ymm2 ; AVX2-NEXT: vpsrlw $4, %ymm0, %ymm0 -; AVX2-NEXT: vpand %ymm2, %ymm0, %ymm0 -; AVX2-NEXT: vpshufb %ymm0, %ymm4, %ymm0 -; AVX2-NEXT: vpaddb %ymm3, %ymm0, %ymm0 +; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm0 +; AVX2-NEXT: vpshufb %ymm0, %ymm3, %ymm0 +; AVX2-NEXT: vpaddb %ymm2, %ymm0, %ymm0 +; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX2-NEXT: vpunpckhdq {{.*#+}} ymm2 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7] ; AVX2-NEXT: vpsadbw %ymm1, %ymm2, %ymm2 ; AVX2-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] @@ -354,19 +329,18 @@ ; ; AVX512CDVL-LABEL: testv8i32: ; AVX512CDVL: # %bb.0: -; AVX512CDVL-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512CDVL-NEXT: vpsubd %ymm0, %ymm1, %ymm2 -; AVX512CDVL-NEXT: vpand %ymm2, %ymm0, %ymm0 -; AVX512CDVL-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2 -; AVX512CDVL-NEXT: vpaddd %ymm2, %ymm0, %ymm0 -; AVX512CDVL-NEXT: vmovdqa {{.*#+}} ymm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX512CDVL-NEXT: vpand %ymm2, %ymm0, %ymm3 -; AVX512CDVL-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX512CDVL-NEXT: vpshufb %ymm3, %ymm4, %ymm3 +; AVX512CDVL-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 +; AVX512CDVL-NEXT: vpaddd %ymm1, %ymm0, %ymm1 +; AVX512CDVL-NEXT: vpandn %ymm1, %ymm0, %ymm0 +; AVX512CDVL-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX512CDVL-NEXT: vpand %ymm1, %ymm0, %ymm2 +; AVX512CDVL-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX512CDVL-NEXT: vpshufb %ymm2, %ymm3, %ymm2 ; AVX512CDVL-NEXT: vpsrlw $4, %ymm0, %ymm0 -; AVX512CDVL-NEXT: vpand %ymm2, %ymm0, %ymm0 -; AVX512CDVL-NEXT: vpshufb %ymm0, %ymm4, %ymm0 -; AVX512CDVL-NEXT: vpaddb %ymm3, %ymm0, %ymm0 +; AVX512CDVL-NEXT: vpand %ymm1, %ymm0, %ymm0 +; AVX512CDVL-NEXT: vpshufb %ymm0, %ymm3, %ymm0 +; AVX512CDVL-NEXT: vpaddb %ymm2, %ymm0, %ymm0 +; AVX512CDVL-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512CDVL-NEXT: vpunpckhdq {{.*#+}} ymm2 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7] ; AVX512CDVL-NEXT: vpsadbw %ymm1, %ymm2, %ymm2 ; AVX512CDVL-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] @@ -376,19 +350,18 @@ ; ; AVX512CD-LABEL: testv8i32: ; AVX512CD: # %bb.0: -; AVX512CD-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512CD-NEXT: vpsubd %ymm0, %ymm1, %ymm2 -; AVX512CD-NEXT: vpand %ymm2, %ymm0, %ymm0 -; AVX512CD-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2 -; AVX512CD-NEXT: vpaddd %ymm2, %ymm0, %ymm0 -; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX512CD-NEXT: vpand %ymm2, %ymm0, %ymm3 -; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX512CD-NEXT: vpshufb %ymm3, %ymm4, %ymm3 +; AVX512CD-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 +; AVX512CD-NEXT: vpaddd %ymm1, %ymm0, %ymm1 +; AVX512CD-NEXT: vpandn %ymm1, %ymm0, %ymm0 +; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX512CD-NEXT: vpand %ymm1, %ymm0, %ymm2 +; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX512CD-NEXT: vpshufb %ymm2, %ymm3, %ymm2 ; AVX512CD-NEXT: vpsrlw $4, %ymm0, %ymm0 -; AVX512CD-NEXT: vpand %ymm2, %ymm0, %ymm0 -; AVX512CD-NEXT: vpshufb %ymm0, %ymm4, %ymm0 -; AVX512CD-NEXT: vpaddb %ymm3, %ymm0, %ymm0 +; AVX512CD-NEXT: vpand %ymm1, %ymm0, %ymm0 +; AVX512CD-NEXT: vpshufb %ymm0, %ymm3, %ymm0 +; AVX512CD-NEXT: vpaddb %ymm2, %ymm0, %ymm0 +; AVX512CD-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512CD-NEXT: vpunpckhdq {{.*#+}} ymm2 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7] ; AVX512CD-NEXT: vpsadbw %ymm1, %ymm2, %ymm2 ; AVX512CD-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] @@ -398,33 +371,28 @@ ; ; AVX512VPOPCNTDQ-LABEL: testv8i32: ; AVX512VPOPCNTDQ: # %bb.0: -; AVX512VPOPCNTDQ-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpsubd %ymm0, %ymm1, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpaddd %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vpaddd %ymm1, %ymm0, %ymm1 +; AVX512VPOPCNTDQ-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: # kill: def $ymm0 killed $ymm0 killed $zmm0 ; AVX512VPOPCNTDQ-NEXT: retq ; ; AVX512VPOPCNTDQVL-LABEL: testv8i32: ; AVX512VPOPCNTDQVL: # %bb.0: -; AVX512VPOPCNTDQVL-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpsubd %ymm0, %ymm1, %ymm1 -; AVX512VPOPCNTDQVL-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX512VPOPCNTDQVL-NEXT: vpaddd %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQVL-NEXT: vpaddd %ymm1, %ymm0, %ymm1 +; AVX512VPOPCNTDQVL-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: testv8i32: ; BITALG_NOVLX: # %bb.0: -; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG_NOVLX-NEXT: vpsubd %ymm0, %ymm1, %ymm2 -; BITALG_NOVLX-NEXT: vpand %ymm2, %ymm0, %ymm0 -; BITALG_NOVLX-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2 -; BITALG_NOVLX-NEXT: vpaddd %ymm2, %ymm0, %ymm0 +; BITALG_NOVLX-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 +; BITALG_NOVLX-NEXT: vpaddd %ymm1, %ymm0, %ymm1 +; BITALG_NOVLX-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0 +; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} ymm2 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7] ; BITALG_NOVLX-NEXT: vpsadbw %ymm1, %ymm2, %ymm2 ; BITALG_NOVLX-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] @@ -434,12 +402,11 @@ ; ; BITALG-LABEL: testv8i32: ; BITALG: # %bb.0: -; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpsubd %ymm0, %ymm1, %ymm2 -; BITALG-NEXT: vpand %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2 -; BITALG-NEXT: vpaddd %ymm2, %ymm0, %ymm0 +; BITALG-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 +; BITALG-NEXT: vpaddd %ymm1, %ymm0, %ymm1 +; BITALG-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 +; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpunpckhdq {{.*#+}} ymm2 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7] ; BITALG-NEXT: vpsadbw %ymm1, %ymm2, %ymm2 ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] @@ -449,19 +416,18 @@ ; ; X32-AVX-LABEL: testv8i32: ; X32-AVX: # %bb.0: -; X32-AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; X32-AVX-NEXT: vpsubd %ymm0, %ymm1, %ymm2 -; X32-AVX-NEXT: vpand %ymm2, %ymm0, %ymm0 -; X32-AVX-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2 -; X32-AVX-NEXT: vpaddd %ymm2, %ymm0, %ymm0 -; X32-AVX-NEXT: vmovdqa {{.*#+}} ymm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; X32-AVX-NEXT: vpand %ymm2, %ymm0, %ymm3 -; X32-AVX-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; X32-AVX-NEXT: vpshufb %ymm3, %ymm4, %ymm3 +; X32-AVX-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 +; X32-AVX-NEXT: vpaddd %ymm1, %ymm0, %ymm1 +; X32-AVX-NEXT: vpandn %ymm1, %ymm0, %ymm0 +; X32-AVX-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; X32-AVX-NEXT: vpand %ymm1, %ymm0, %ymm2 +; X32-AVX-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; X32-AVX-NEXT: vpshufb %ymm2, %ymm3, %ymm2 ; X32-AVX-NEXT: vpsrlw $4, %ymm0, %ymm0 -; X32-AVX-NEXT: vpand %ymm2, %ymm0, %ymm0 -; X32-AVX-NEXT: vpshufb %ymm0, %ymm4, %ymm0 -; X32-AVX-NEXT: vpaddb %ymm3, %ymm0, %ymm0 +; X32-AVX-NEXT: vpand %ymm1, %ymm0, %ymm0 +; X32-AVX-NEXT: vpshufb %ymm0, %ymm3, %ymm0 +; X32-AVX-NEXT: vpaddb %ymm2, %ymm0, %ymm0 +; X32-AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; X32-AVX-NEXT: vpunpckhdq {{.*#+}} ymm2 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7] ; X32-AVX-NEXT: vpsadbw %ymm1, %ymm2, %ymm2 ; X32-AVX-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] @@ -476,56 +442,53 @@ ; AVX1-LABEL: testv8i32u: ; AVX1: # %bb.0: ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm1 -; AVX1-NEXT: vpxor %xmm2, %xmm2, %xmm2 -; AVX1-NEXT: vpsubd %xmm1, %xmm2, %xmm3 -; AVX1-NEXT: vpand %xmm3, %xmm1, %xmm1 -; AVX1-NEXT: vpcmpeqd %xmm3, %xmm3, %xmm3 -; AVX1-NEXT: vpaddd %xmm3, %xmm1, %xmm1 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm4 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX1-NEXT: vpand %xmm4, %xmm1, %xmm5 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm6 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX1-NEXT: vpshufb %xmm5, %xmm6, %xmm5 +; AVX1-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 +; AVX1-NEXT: vpaddd %xmm2, %xmm1, %xmm3 +; AVX1-NEXT: vpandn %xmm3, %xmm1, %xmm1 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX1-NEXT: vpand %xmm3, %xmm1, %xmm4 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm5 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX1-NEXT: vpshufb %xmm4, %xmm5, %xmm4 ; AVX1-NEXT: vpsrlw $4, %xmm1, %xmm1 -; AVX1-NEXT: vpand %xmm4, %xmm1, %xmm1 -; AVX1-NEXT: vpshufb %xmm1, %xmm6, %xmm1 -; AVX1-NEXT: vpaddb %xmm5, %xmm1, %xmm1 -; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm5 = xmm1[2],xmm2[2],xmm1[3],xmm2[3] -; AVX1-NEXT: vpsadbw %xmm2, %xmm5, %xmm5 +; AVX1-NEXT: vpand %xmm3, %xmm1, %xmm1 +; AVX1-NEXT: vpshufb %xmm1, %xmm5, %xmm1 +; AVX1-NEXT: vpaddb %xmm4, %xmm1, %xmm1 +; AVX1-NEXT: vpxor %xmm4, %xmm4, %xmm4 +; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm6 = xmm1[2],xmm4[2],xmm1[3],xmm4[3] +; AVX1-NEXT: vpsadbw %xmm4, %xmm6, %xmm6 ; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm1 = xmm1[0],zero,xmm1[1],zero -; AVX1-NEXT: vpsadbw %xmm2, %xmm1, %xmm1 -; AVX1-NEXT: vpackuswb %xmm5, %xmm1, %xmm1 -; AVX1-NEXT: vpsubd %xmm0, %xmm2, %xmm5 -; AVX1-NEXT: vpand %xmm5, %xmm0, %xmm0 -; AVX1-NEXT: vpaddd %xmm3, %xmm0, %xmm0 -; AVX1-NEXT: vpand %xmm4, %xmm0, %xmm3 -; AVX1-NEXT: vpshufb %xmm3, %xmm6, %xmm3 +; AVX1-NEXT: vpsadbw %xmm4, %xmm1, %xmm1 +; AVX1-NEXT: vpackuswb %xmm6, %xmm1, %xmm1 +; AVX1-NEXT: vpaddd %xmm2, %xmm0, %xmm2 +; AVX1-NEXT: vpandn %xmm2, %xmm0, %xmm0 +; AVX1-NEXT: vpand %xmm3, %xmm0, %xmm2 +; AVX1-NEXT: vpshufb %xmm2, %xmm5, %xmm2 ; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0 -; AVX1-NEXT: vpand %xmm4, %xmm0, %xmm0 -; AVX1-NEXT: vpshufb %xmm0, %xmm6, %xmm0 -; AVX1-NEXT: vpaddb %xmm3, %xmm0, %xmm0 -; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm3 = xmm0[2],xmm2[2],xmm0[3],xmm2[3] -; AVX1-NEXT: vpsadbw %xmm2, %xmm3, %xmm3 +; AVX1-NEXT: vpand %xmm3, %xmm0, %xmm0 +; AVX1-NEXT: vpshufb %xmm0, %xmm5, %xmm0 +; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0 +; AVX1-NEXT: vpunpckhdq {{.*#+}} xmm2 = xmm0[2],xmm4[2],xmm0[3],xmm4[3] +; AVX1-NEXT: vpsadbw %xmm4, %xmm2, %xmm2 ; AVX1-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero -; AVX1-NEXT: vpsadbw %xmm2, %xmm0, %xmm0 -; AVX1-NEXT: vpackuswb %xmm3, %xmm0, %xmm0 +; AVX1-NEXT: vpsadbw %xmm4, %xmm0, %xmm0 +; AVX1-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 ; AVX1-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0 ; AVX1-NEXT: retq ; ; AVX2-LABEL: testv8i32u: ; AVX2: # %bb.0: -; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX2-NEXT: vpsubd %ymm0, %ymm1, %ymm2 -; AVX2-NEXT: vpand %ymm2, %ymm0, %ymm0 -; AVX2-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2 -; AVX2-NEXT: vpaddd %ymm2, %ymm0, %ymm0 -; AVX2-NEXT: vmovdqa {{.*#+}} ymm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX2-NEXT: vpand %ymm2, %ymm0, %ymm3 -; AVX2-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX2-NEXT: vpshufb %ymm3, %ymm4, %ymm3 +; AVX2-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 +; AVX2-NEXT: vpaddd %ymm1, %ymm0, %ymm1 +; AVX2-NEXT: vpandn %ymm1, %ymm0, %ymm0 +; AVX2-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm2 +; AVX2-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX2-NEXT: vpshufb %ymm2, %ymm3, %ymm2 ; AVX2-NEXT: vpsrlw $4, %ymm0, %ymm0 -; AVX2-NEXT: vpand %ymm2, %ymm0, %ymm0 -; AVX2-NEXT: vpshufb %ymm0, %ymm4, %ymm0 -; AVX2-NEXT: vpaddb %ymm3, %ymm0, %ymm0 +; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm0 +; AVX2-NEXT: vpshufb %ymm0, %ymm3, %ymm0 +; AVX2-NEXT: vpaddb %ymm2, %ymm0, %ymm0 +; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX2-NEXT: vpunpckhdq {{.*#+}} ymm2 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7] ; AVX2-NEXT: vpsadbw %ymm1, %ymm2, %ymm2 ; AVX2-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] @@ -555,33 +518,28 @@ ; ; AVX512VPOPCNTDQ-LABEL: testv8i32u: ; AVX512VPOPCNTDQ: # %bb.0: -; AVX512VPOPCNTDQ-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpsubd %ymm0, %ymm1, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpaddd %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vpaddd %ymm1, %ymm0, %ymm1 +; AVX512VPOPCNTDQ-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: # kill: def $ymm0 killed $ymm0 killed $zmm0 ; AVX512VPOPCNTDQ-NEXT: retq ; ; AVX512VPOPCNTDQVL-LABEL: testv8i32u: ; AVX512VPOPCNTDQVL: # %bb.0: -; AVX512VPOPCNTDQVL-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpsubd %ymm0, %ymm1, %ymm1 -; AVX512VPOPCNTDQVL-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX512VPOPCNTDQVL-NEXT: vpaddd %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQVL-NEXT: vpaddd %ymm1, %ymm0, %ymm1 +; AVX512VPOPCNTDQVL-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: testv8i32u: ; BITALG_NOVLX: # %bb.0: -; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG_NOVLX-NEXT: vpsubd %ymm0, %ymm1, %ymm2 -; BITALG_NOVLX-NEXT: vpand %ymm2, %ymm0, %ymm0 -; BITALG_NOVLX-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2 -; BITALG_NOVLX-NEXT: vpaddd %ymm2, %ymm0, %ymm0 +; BITALG_NOVLX-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 +; BITALG_NOVLX-NEXT: vpaddd %ymm1, %ymm0, %ymm1 +; BITALG_NOVLX-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0 +; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG_NOVLX-NEXT: vpunpckhdq {{.*#+}} ymm2 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7] ; BITALG_NOVLX-NEXT: vpsadbw %ymm1, %ymm2, %ymm2 ; BITALG_NOVLX-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] @@ -591,12 +549,11 @@ ; ; BITALG-LABEL: testv8i32u: ; BITALG: # %bb.0: -; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpsubd %ymm0, %ymm1, %ymm2 -; BITALG-NEXT: vpand %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2 -; BITALG-NEXT: vpaddd %ymm2, %ymm0, %ymm0 +; BITALG-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 +; BITALG-NEXT: vpaddd %ymm1, %ymm0, %ymm1 +; BITALG-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 +; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpunpckhdq {{.*#+}} ymm2 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7] ; BITALG-NEXT: vpsadbw %ymm1, %ymm2, %ymm2 ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] @@ -606,19 +563,18 @@ ; ; X32-AVX-LABEL: testv8i32u: ; X32-AVX: # %bb.0: -; X32-AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; X32-AVX-NEXT: vpsubd %ymm0, %ymm1, %ymm2 -; X32-AVX-NEXT: vpand %ymm2, %ymm0, %ymm0 -; X32-AVX-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2 -; X32-AVX-NEXT: vpaddd %ymm2, %ymm0, %ymm0 -; X32-AVX-NEXT: vmovdqa {{.*#+}} ymm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; X32-AVX-NEXT: vpand %ymm2, %ymm0, %ymm3 -; X32-AVX-NEXT: vmovdqa {{.*#+}} ymm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; X32-AVX-NEXT: vpshufb %ymm3, %ymm4, %ymm3 +; X32-AVX-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 +; X32-AVX-NEXT: vpaddd %ymm1, %ymm0, %ymm1 +; X32-AVX-NEXT: vpandn %ymm1, %ymm0, %ymm0 +; X32-AVX-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; X32-AVX-NEXT: vpand %ymm1, %ymm0, %ymm2 +; X32-AVX-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; X32-AVX-NEXT: vpshufb %ymm2, %ymm3, %ymm2 ; X32-AVX-NEXT: vpsrlw $4, %ymm0, %ymm0 -; X32-AVX-NEXT: vpand %ymm2, %ymm0, %ymm0 -; X32-AVX-NEXT: vpshufb %ymm0, %ymm4, %ymm0 -; X32-AVX-NEXT: vpaddb %ymm3, %ymm0, %ymm0 +; X32-AVX-NEXT: vpand %ymm1, %ymm0, %ymm0 +; X32-AVX-NEXT: vpshufb %ymm0, %ymm3, %ymm0 +; X32-AVX-NEXT: vpaddb %ymm2, %ymm0, %ymm0 +; X32-AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; X32-AVX-NEXT: vpunpckhdq {{.*#+}} ymm2 = ymm0[2],ymm1[2],ymm0[3],ymm1[3],ymm0[6],ymm1[6],ymm0[7],ymm1[7] ; X32-AVX-NEXT: vpsadbw %ymm1, %ymm2, %ymm2 ; X32-AVX-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] @@ -632,31 +588,28 @@ define <16 x i16> @testv16i16(<16 x i16> %in) nounwind { ; AVX1-LABEL: testv16i16: ; AVX1: # %bb.0: -; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX1-NEXT: vpsubw %xmm0, %xmm1, %xmm2 -; AVX1-NEXT: vpand %xmm2, %xmm0, %xmm2 -; AVX1-NEXT: vpcmpeqd %xmm3, %xmm3, %xmm3 -; AVX1-NEXT: vpaddw %xmm3, %xmm2, %xmm2 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm4 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX1-NEXT: vpand %xmm4, %xmm2, %xmm5 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm6 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX1-NEXT: vpshufb %xmm5, %xmm6, %xmm5 +; AVX1-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 +; AVX1-NEXT: vpaddw %xmm1, %xmm0, %xmm2 +; AVX1-NEXT: vpandn %xmm2, %xmm0, %xmm2 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX1-NEXT: vpand %xmm3, %xmm2, %xmm4 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm5 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX1-NEXT: vpshufb %xmm4, %xmm5, %xmm4 ; AVX1-NEXT: vpsrlw $4, %xmm2, %xmm2 -; AVX1-NEXT: vpand %xmm4, %xmm2, %xmm2 -; AVX1-NEXT: vpshufb %xmm2, %xmm6, %xmm2 -; AVX1-NEXT: vpaddb %xmm5, %xmm2, %xmm2 -; AVX1-NEXT: vpsllw $8, %xmm2, %xmm5 -; AVX1-NEXT: vpaddb %xmm2, %xmm5, %xmm2 +; AVX1-NEXT: vpand %xmm3, %xmm2, %xmm2 +; AVX1-NEXT: vpshufb %xmm2, %xmm5, %xmm2 +; AVX1-NEXT: vpaddb %xmm4, %xmm2, %xmm2 +; AVX1-NEXT: vpsllw $8, %xmm2, %xmm4 +; AVX1-NEXT: vpaddb %xmm2, %xmm4, %xmm2 ; AVX1-NEXT: vpsrlw $8, %xmm2, %xmm2 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm0 -; AVX1-NEXT: vpsubw %xmm0, %xmm1, %xmm1 -; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0 -; AVX1-NEXT: vpaddw %xmm3, %xmm0, %xmm0 -; AVX1-NEXT: vpand %xmm4, %xmm0, %xmm1 -; AVX1-NEXT: vpshufb %xmm1, %xmm6, %xmm1 +; AVX1-NEXT: vpaddw %xmm1, %xmm0, %xmm1 +; AVX1-NEXT: vpandn %xmm1, %xmm0, %xmm0 +; AVX1-NEXT: vpand %xmm3, %xmm0, %xmm1 +; AVX1-NEXT: vpshufb %xmm1, %xmm5, %xmm1 ; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0 -; AVX1-NEXT: vpand %xmm4, %xmm0, %xmm0 -; AVX1-NEXT: vpshufb %xmm0, %xmm6, %xmm0 +; AVX1-NEXT: vpand %xmm3, %xmm0, %xmm0 +; AVX1-NEXT: vpshufb %xmm0, %xmm5, %xmm0 ; AVX1-NEXT: vpaddb %xmm1, %xmm0, %xmm0 ; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1 ; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0 @@ -666,11 +619,9 @@ ; ; AVX2-LABEL: testv16i16: ; AVX2: # %bb.0: -; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX2-NEXT: vpsubw %ymm0, %ymm1, %ymm1 -; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX2-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX2-NEXT: vpaddw %ymm1, %ymm0, %ymm0 +; AVX2-NEXT: vpaddw %ymm1, %ymm0, %ymm1 +; AVX2-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm2 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -686,11 +637,9 @@ ; ; AVX512CDVL-LABEL: testv16i16: ; AVX512CDVL: # %bb.0: -; AVX512CDVL-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512CDVL-NEXT: vpsubw %ymm0, %ymm1, %ymm1 -; AVX512CDVL-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512CDVL-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX512CDVL-NEXT: vpaddw %ymm1, %ymm0, %ymm0 +; AVX512CDVL-NEXT: vpaddw %ymm1, %ymm0, %ymm1 +; AVX512CDVL-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX512CDVL-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX512CDVL-NEXT: vpand %ymm1, %ymm0, %ymm2 ; AVX512CDVL-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -706,11 +655,9 @@ ; ; AVX512CD-LABEL: testv16i16: ; AVX512CD: # %bb.0: -; AVX512CD-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512CD-NEXT: vpsubw %ymm0, %ymm1, %ymm1 -; AVX512CD-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512CD-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX512CD-NEXT: vpaddw %ymm1, %ymm0, %ymm0 +; AVX512CD-NEXT: vpaddw %ymm1, %ymm0, %ymm1 +; AVX512CD-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX512CD-NEXT: vpand %ymm1, %ymm0, %ymm2 ; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -726,11 +673,9 @@ ; ; AVX512VPOPCNTDQ-LABEL: testv16i16: ; AVX512VPOPCNTDQ: # %bb.0: -; AVX512VPOPCNTDQ-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpsubw %ymm0, %ymm1, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpaddw %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vpaddw %ymm1, %ymm0, %ymm1 +; AVX512VPOPCNTDQ-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} zmm0 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0 @@ -738,11 +683,9 @@ ; ; AVX512VPOPCNTDQVL-LABEL: testv16i16: ; AVX512VPOPCNTDQVL: # %bb.0: -; AVX512VPOPCNTDQVL-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpsubw %ymm0, %ymm1, %ymm1 -; AVX512VPOPCNTDQVL-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX512VPOPCNTDQVL-NEXT: vpaddw %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQVL-NEXT: vpaddw %ymm1, %ymm0, %ymm1 +; AVX512VPOPCNTDQVL-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} zmm0 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %zmm0, %zmm0 ; AVX512VPOPCNTDQVL-NEXT: vpmovdw %zmm0, %ymm0 @@ -750,32 +693,26 @@ ; ; BITALG_NOVLX-LABEL: testv16i16: ; BITALG_NOVLX: # %bb.0: -; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG_NOVLX-NEXT: vpsubw %ymm0, %ymm1, %ymm1 -; BITALG_NOVLX-NEXT: vpand %ymm1, %ymm0, %ymm0 ; BITALG_NOVLX-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; BITALG_NOVLX-NEXT: vpaddw %ymm1, %ymm0, %ymm0 +; BITALG_NOVLX-NEXT: vpaddw %ymm1, %ymm0, %ymm1 +; BITALG_NOVLX-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0 ; BITALG_NOVLX-NEXT: # kill: def $ymm0 killed $ymm0 killed $zmm0 ; BITALG_NOVLX-NEXT: retq ; ; BITALG-LABEL: testv16i16: ; BITALG: # %bb.0: -; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpsubw %ymm0, %ymm1, %ymm1 -; BITALG-NEXT: vpand %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; BITALG-NEXT: vpaddw %ymm1, %ymm0, %ymm0 +; BITALG-NEXT: vpaddw %ymm1, %ymm0, %ymm1 +; BITALG-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 ; BITALG-NEXT: retq ; ; X32-AVX-LABEL: testv16i16: ; X32-AVX: # %bb.0: -; X32-AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; X32-AVX-NEXT: vpsubw %ymm0, %ymm1, %ymm1 -; X32-AVX-NEXT: vpand %ymm1, %ymm0, %ymm0 ; X32-AVX-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; X32-AVX-NEXT: vpaddw %ymm1, %ymm0, %ymm0 +; X32-AVX-NEXT: vpaddw %ymm1, %ymm0, %ymm1 +; X32-AVX-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; X32-AVX-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; X32-AVX-NEXT: vpand %ymm1, %ymm0, %ymm2 ; X32-AVX-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -795,31 +732,28 @@ define <16 x i16> @testv16i16u(<16 x i16> %in) nounwind { ; AVX1-LABEL: testv16i16u: ; AVX1: # %bb.0: -; AVX1-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX1-NEXT: vpsubw %xmm0, %xmm1, %xmm2 -; AVX1-NEXT: vpand %xmm2, %xmm0, %xmm2 -; AVX1-NEXT: vpcmpeqd %xmm3, %xmm3, %xmm3 -; AVX1-NEXT: vpaddw %xmm3, %xmm2, %xmm2 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm4 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX1-NEXT: vpand %xmm4, %xmm2, %xmm5 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm6 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX1-NEXT: vpshufb %xmm5, %xmm6, %xmm5 +; AVX1-NEXT: vpcmpeqd %xmm1, %xmm1, %xmm1 +; AVX1-NEXT: vpaddw %xmm1, %xmm0, %xmm2 +; AVX1-NEXT: vpandn %xmm2, %xmm0, %xmm2 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX1-NEXT: vpand %xmm3, %xmm2, %xmm4 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm5 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX1-NEXT: vpshufb %xmm4, %xmm5, %xmm4 ; AVX1-NEXT: vpsrlw $4, %xmm2, %xmm2 -; AVX1-NEXT: vpand %xmm4, %xmm2, %xmm2 -; AVX1-NEXT: vpshufb %xmm2, %xmm6, %xmm2 -; AVX1-NEXT: vpaddb %xmm5, %xmm2, %xmm2 -; AVX1-NEXT: vpsllw $8, %xmm2, %xmm5 -; AVX1-NEXT: vpaddb %xmm2, %xmm5, %xmm2 +; AVX1-NEXT: vpand %xmm3, %xmm2, %xmm2 +; AVX1-NEXT: vpshufb %xmm2, %xmm5, %xmm2 +; AVX1-NEXT: vpaddb %xmm4, %xmm2, %xmm2 +; AVX1-NEXT: vpsllw $8, %xmm2, %xmm4 +; AVX1-NEXT: vpaddb %xmm2, %xmm4, %xmm2 ; AVX1-NEXT: vpsrlw $8, %xmm2, %xmm2 ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm0 -; AVX1-NEXT: vpsubw %xmm0, %xmm1, %xmm1 -; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0 -; AVX1-NEXT: vpaddw %xmm3, %xmm0, %xmm0 -; AVX1-NEXT: vpand %xmm4, %xmm0, %xmm1 -; AVX1-NEXT: vpshufb %xmm1, %xmm6, %xmm1 +; AVX1-NEXT: vpaddw %xmm1, %xmm0, %xmm1 +; AVX1-NEXT: vpandn %xmm1, %xmm0, %xmm0 +; AVX1-NEXT: vpand %xmm3, %xmm0, %xmm1 +; AVX1-NEXT: vpshufb %xmm1, %xmm5, %xmm1 ; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0 -; AVX1-NEXT: vpand %xmm4, %xmm0, %xmm0 -; AVX1-NEXT: vpshufb %xmm0, %xmm6, %xmm0 +; AVX1-NEXT: vpand %xmm3, %xmm0, %xmm0 +; AVX1-NEXT: vpshufb %xmm0, %xmm5, %xmm0 ; AVX1-NEXT: vpaddb %xmm1, %xmm0, %xmm0 ; AVX1-NEXT: vpsllw $8, %xmm0, %xmm1 ; AVX1-NEXT: vpaddb %xmm0, %xmm1, %xmm0 @@ -829,11 +763,9 @@ ; ; AVX2-LABEL: testv16i16u: ; AVX2: # %bb.0: -; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX2-NEXT: vpsubw %ymm0, %ymm1, %ymm1 -; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX2-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX2-NEXT: vpaddw %ymm1, %ymm0, %ymm0 +; AVX2-NEXT: vpaddw %ymm1, %ymm0, %ymm1 +; AVX2-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm2 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -849,11 +781,9 @@ ; ; AVX512CDVL-LABEL: testv16i16u: ; AVX512CDVL: # %bb.0: -; AVX512CDVL-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512CDVL-NEXT: vpsubw %ymm0, %ymm1, %ymm1 -; AVX512CDVL-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512CDVL-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX512CDVL-NEXT: vpaddw %ymm1, %ymm0, %ymm0 +; AVX512CDVL-NEXT: vpaddw %ymm1, %ymm0, %ymm1 +; AVX512CDVL-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX512CDVL-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX512CDVL-NEXT: vpand %ymm1, %ymm0, %ymm2 ; AVX512CDVL-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -869,11 +799,9 @@ ; ; AVX512CD-LABEL: testv16i16u: ; AVX512CD: # %bb.0: -; AVX512CD-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512CD-NEXT: vpsubw %ymm0, %ymm1, %ymm1 -; AVX512CD-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512CD-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX512CD-NEXT: vpaddw %ymm1, %ymm0, %ymm0 +; AVX512CD-NEXT: vpaddw %ymm1, %ymm0, %ymm1 +; AVX512CD-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX512CD-NEXT: vpand %ymm1, %ymm0, %ymm2 ; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -889,11 +817,9 @@ ; ; AVX512VPOPCNTDQ-LABEL: testv16i16u: ; AVX512VPOPCNTDQ: # %bb.0: -; AVX512VPOPCNTDQ-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpsubw %ymm0, %ymm1, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpaddw %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vpaddw %ymm1, %ymm0, %ymm1 +; AVX512VPOPCNTDQ-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} zmm0 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0 @@ -901,11 +827,9 @@ ; ; AVX512VPOPCNTDQVL-LABEL: testv16i16u: ; AVX512VPOPCNTDQVL: # %bb.0: -; AVX512VPOPCNTDQVL-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpsubw %ymm0, %ymm1, %ymm1 -; AVX512VPOPCNTDQVL-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX512VPOPCNTDQVL-NEXT: vpaddw %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQVL-NEXT: vpaddw %ymm1, %ymm0, %ymm1 +; AVX512VPOPCNTDQVL-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpmovzxwd {{.*#+}} zmm0 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %zmm0, %zmm0 ; AVX512VPOPCNTDQVL-NEXT: vpmovdw %zmm0, %ymm0 @@ -913,32 +837,26 @@ ; ; BITALG_NOVLX-LABEL: testv16i16u: ; BITALG_NOVLX: # %bb.0: -; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG_NOVLX-NEXT: vpsubw %ymm0, %ymm1, %ymm1 -; BITALG_NOVLX-NEXT: vpand %ymm1, %ymm0, %ymm0 ; BITALG_NOVLX-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; BITALG_NOVLX-NEXT: vpaddw %ymm1, %ymm0, %ymm0 +; BITALG_NOVLX-NEXT: vpaddw %ymm1, %ymm0, %ymm1 +; BITALG_NOVLX-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; BITALG_NOVLX-NEXT: vpopcntw %zmm0, %zmm0 ; BITALG_NOVLX-NEXT: # kill: def $ymm0 killed $ymm0 killed $zmm0 ; BITALG_NOVLX-NEXT: retq ; ; BITALG-LABEL: testv16i16u: ; BITALG: # %bb.0: -; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpsubw %ymm0, %ymm1, %ymm1 -; BITALG-NEXT: vpand %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; BITALG-NEXT: vpaddw %ymm1, %ymm0, %ymm0 +; BITALG-NEXT: vpaddw %ymm1, %ymm0, %ymm1 +; BITALG-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 ; BITALG-NEXT: retq ; ; X32-AVX-LABEL: testv16i16u: ; X32-AVX: # %bb.0: -; X32-AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; X32-AVX-NEXT: vpsubw %ymm0, %ymm1, %ymm1 -; X32-AVX-NEXT: vpand %ymm1, %ymm0, %ymm0 ; X32-AVX-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; X32-AVX-NEXT: vpaddw %ymm1, %ymm0, %ymm0 +; X32-AVX-NEXT: vpaddw %ymm1, %ymm0, %ymm1 +; X32-AVX-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; X32-AVX-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; X32-AVX-NEXT: vpand %ymm1, %ymm0, %ymm2 ; X32-AVX-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -959,38 +877,33 @@ ; AVX1-LABEL: testv32i8: ; AVX1: # %bb.0: ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm1 -; AVX1-NEXT: vpxor %xmm2, %xmm2, %xmm2 -; AVX1-NEXT: vpsubb %xmm1, %xmm2, %xmm3 -; AVX1-NEXT: vpand %xmm3, %xmm1, %xmm1 -; AVX1-NEXT: vpcmpeqd %xmm3, %xmm3, %xmm3 -; AVX1-NEXT: vpaddb %xmm3, %xmm1, %xmm1 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm4 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX1-NEXT: vpand %xmm4, %xmm1, %xmm5 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm6 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX1-NEXT: vpshufb %xmm5, %xmm6, %xmm5 +; AVX1-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 +; AVX1-NEXT: vpaddb %xmm2, %xmm1, %xmm3 +; AVX1-NEXT: vpandn %xmm3, %xmm1, %xmm1 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX1-NEXT: vpand %xmm3, %xmm1, %xmm4 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm5 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX1-NEXT: vpshufb %xmm4, %xmm5, %xmm4 ; AVX1-NEXT: vpsrlw $4, %xmm1, %xmm1 -; AVX1-NEXT: vpand %xmm4, %xmm1, %xmm1 -; AVX1-NEXT: vpshufb %xmm1, %xmm6, %xmm1 -; AVX1-NEXT: vpaddb %xmm5, %xmm1, %xmm1 -; AVX1-NEXT: vpsubb %xmm0, %xmm2, %xmm2 -; AVX1-NEXT: vpand %xmm2, %xmm0, %xmm0 -; AVX1-NEXT: vpaddb %xmm3, %xmm0, %xmm0 -; AVX1-NEXT: vpand %xmm4, %xmm0, %xmm2 -; AVX1-NEXT: vpshufb %xmm2, %xmm6, %xmm2 +; AVX1-NEXT: vpand %xmm3, %xmm1, %xmm1 +; AVX1-NEXT: vpshufb %xmm1, %xmm5, %xmm1 +; AVX1-NEXT: vpaddb %xmm4, %xmm1, %xmm1 +; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm2 +; AVX1-NEXT: vpandn %xmm2, %xmm0, %xmm0 +; AVX1-NEXT: vpand %xmm3, %xmm0, %xmm2 +; AVX1-NEXT: vpshufb %xmm2, %xmm5, %xmm2 ; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0 -; AVX1-NEXT: vpand %xmm4, %xmm0, %xmm0 -; AVX1-NEXT: vpshufb %xmm0, %xmm6, %xmm0 +; AVX1-NEXT: vpand %xmm3, %xmm0, %xmm0 +; AVX1-NEXT: vpshufb %xmm0, %xmm5, %xmm0 ; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0 ; AVX1-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0 ; AVX1-NEXT: retq ; ; AVX2-LABEL: testv32i8: ; AVX2: # %bb.0: -; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX2-NEXT: vpsubb %ymm0, %ymm1, %ymm1 -; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX2-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX2-NEXT: vpaddb %ymm1, %ymm0, %ymm0 +; AVX2-NEXT: vpaddb %ymm1, %ymm0, %ymm1 +; AVX2-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm2 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -1003,11 +916,9 @@ ; ; AVX512CDVL-LABEL: testv32i8: ; AVX512CDVL: # %bb.0: -; AVX512CDVL-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512CDVL-NEXT: vpsubb %ymm0, %ymm1, %ymm1 -; AVX512CDVL-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512CDVL-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX512CDVL-NEXT: vpaddb %ymm1, %ymm0, %ymm0 +; AVX512CDVL-NEXT: vpaddb %ymm1, %ymm0, %ymm1 +; AVX512CDVL-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX512CDVL-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX512CDVL-NEXT: vpand %ymm1, %ymm0, %ymm2 ; AVX512CDVL-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -1020,11 +931,9 @@ ; ; AVX512CD-LABEL: testv32i8: ; AVX512CD: # %bb.0: -; AVX512CD-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512CD-NEXT: vpsubb %ymm0, %ymm1, %ymm1 -; AVX512CD-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512CD-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX512CD-NEXT: vpaddb %ymm1, %ymm0, %ymm0 +; AVX512CD-NEXT: vpaddb %ymm1, %ymm0, %ymm1 +; AVX512CD-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX512CD-NEXT: vpand %ymm1, %ymm0, %ymm2 ; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -1037,11 +946,9 @@ ; ; AVX512VPOPCNTDQ-LABEL: testv32i8: ; AVX512VPOPCNTDQ: # %bb.0: -; AVX512VPOPCNTDQ-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpsubb %ymm0, %ymm1, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm1, %ymm0, %ymm1 +; AVX512VPOPCNTDQ-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX512VPOPCNTDQ-NEXT: vpand %ymm1, %ymm0, %ymm2 ; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -1054,11 +961,9 @@ ; ; AVX512VPOPCNTDQVL-LABEL: testv32i8: ; AVX512VPOPCNTDQVL: # %bb.0: -; AVX512VPOPCNTDQVL-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpsubb %ymm0, %ymm1, %ymm1 -; AVX512VPOPCNTDQVL-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX512VPOPCNTDQVL-NEXT: vpaddb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQVL-NEXT: vpaddb %ymm1, %ymm0, %ymm1 +; AVX512VPOPCNTDQVL-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX512VPOPCNTDQVL-NEXT: vpand %ymm1, %ymm0, %ymm2 ; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -1071,32 +976,26 @@ ; ; BITALG_NOVLX-LABEL: testv32i8: ; BITALG_NOVLX: # %bb.0: -; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG_NOVLX-NEXT: vpsubb %ymm0, %ymm1, %ymm1 -; BITALG_NOVLX-NEXT: vpand %ymm1, %ymm0, %ymm0 ; BITALG_NOVLX-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; BITALG_NOVLX-NEXT: vpaddb %ymm1, %ymm0, %ymm0 +; BITALG_NOVLX-NEXT: vpaddb %ymm1, %ymm0, %ymm1 +; BITALG_NOVLX-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG_NOVLX-NEXT: # kill: def $ymm0 killed $ymm0 killed $zmm0 ; BITALG_NOVLX-NEXT: retq ; ; BITALG-LABEL: testv32i8: ; BITALG: # %bb.0: -; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpsubb %ymm0, %ymm1, %ymm1 -; BITALG-NEXT: vpand %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; BITALG-NEXT: vpaddb %ymm1, %ymm0, %ymm0 +; BITALG-NEXT: vpaddb %ymm1, %ymm0, %ymm1 +; BITALG-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: retq ; ; X32-AVX-LABEL: testv32i8: ; X32-AVX: # %bb.0: -; X32-AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; X32-AVX-NEXT: vpsubb %ymm0, %ymm1, %ymm1 -; X32-AVX-NEXT: vpand %ymm1, %ymm0, %ymm0 ; X32-AVX-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; X32-AVX-NEXT: vpaddb %ymm1, %ymm0, %ymm0 +; X32-AVX-NEXT: vpaddb %ymm1, %ymm0, %ymm1 +; X32-AVX-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; X32-AVX-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; X32-AVX-NEXT: vpand %ymm1, %ymm0, %ymm2 ; X32-AVX-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -1114,38 +1013,33 @@ ; AVX1-LABEL: testv32i8u: ; AVX1: # %bb.0: ; AVX1-NEXT: vextractf128 $1, %ymm0, %xmm1 -; AVX1-NEXT: vpxor %xmm2, %xmm2, %xmm2 -; AVX1-NEXT: vpsubb %xmm1, %xmm2, %xmm3 -; AVX1-NEXT: vpand %xmm3, %xmm1, %xmm1 -; AVX1-NEXT: vpcmpeqd %xmm3, %xmm3, %xmm3 -; AVX1-NEXT: vpaddb %xmm3, %xmm1, %xmm1 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm4 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX1-NEXT: vpand %xmm4, %xmm1, %xmm5 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm6 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX1-NEXT: vpshufb %xmm5, %xmm6, %xmm5 +; AVX1-NEXT: vpcmpeqd %xmm2, %xmm2, %xmm2 +; AVX1-NEXT: vpaddb %xmm2, %xmm1, %xmm3 +; AVX1-NEXT: vpandn %xmm3, %xmm1, %xmm1 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm3 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX1-NEXT: vpand %xmm3, %xmm1, %xmm4 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm5 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX1-NEXT: vpshufb %xmm4, %xmm5, %xmm4 ; AVX1-NEXT: vpsrlw $4, %xmm1, %xmm1 -; AVX1-NEXT: vpand %xmm4, %xmm1, %xmm1 -; AVX1-NEXT: vpshufb %xmm1, %xmm6, %xmm1 -; AVX1-NEXT: vpaddb %xmm5, %xmm1, %xmm1 -; AVX1-NEXT: vpsubb %xmm0, %xmm2, %xmm2 -; AVX1-NEXT: vpand %xmm2, %xmm0, %xmm0 -; AVX1-NEXT: vpaddb %xmm3, %xmm0, %xmm0 -; AVX1-NEXT: vpand %xmm4, %xmm0, %xmm2 -; AVX1-NEXT: vpshufb %xmm2, %xmm6, %xmm2 +; AVX1-NEXT: vpand %xmm3, %xmm1, %xmm1 +; AVX1-NEXT: vpshufb %xmm1, %xmm5, %xmm1 +; AVX1-NEXT: vpaddb %xmm4, %xmm1, %xmm1 +; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm2 +; AVX1-NEXT: vpandn %xmm2, %xmm0, %xmm0 +; AVX1-NEXT: vpand %xmm3, %xmm0, %xmm2 +; AVX1-NEXT: vpshufb %xmm2, %xmm5, %xmm2 ; AVX1-NEXT: vpsrlw $4, %xmm0, %xmm0 -; AVX1-NEXT: vpand %xmm4, %xmm0, %xmm0 -; AVX1-NEXT: vpshufb %xmm0, %xmm6, %xmm0 +; AVX1-NEXT: vpand %xmm3, %xmm0, %xmm0 +; AVX1-NEXT: vpshufb %xmm0, %xmm5, %xmm0 ; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0 ; AVX1-NEXT: vinsertf128 $1, %xmm1, %ymm0, %ymm0 ; AVX1-NEXT: retq ; ; AVX2-LABEL: testv32i8u: ; AVX2: # %bb.0: -; AVX2-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX2-NEXT: vpsubb %ymm0, %ymm1, %ymm1 -; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX2-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX2-NEXT: vpaddb %ymm1, %ymm0, %ymm0 +; AVX2-NEXT: vpaddb %ymm1, %ymm0, %ymm1 +; AVX2-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm2 ; AVX2-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -1158,11 +1052,9 @@ ; ; AVX512CDVL-LABEL: testv32i8u: ; AVX512CDVL: # %bb.0: -; AVX512CDVL-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512CDVL-NEXT: vpsubb %ymm0, %ymm1, %ymm1 -; AVX512CDVL-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512CDVL-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX512CDVL-NEXT: vpaddb %ymm1, %ymm0, %ymm0 +; AVX512CDVL-NEXT: vpaddb %ymm1, %ymm0, %ymm1 +; AVX512CDVL-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX512CDVL-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX512CDVL-NEXT: vpand %ymm1, %ymm0, %ymm2 ; AVX512CDVL-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -1175,11 +1067,9 @@ ; ; AVX512CD-LABEL: testv32i8u: ; AVX512CD: # %bb.0: -; AVX512CD-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512CD-NEXT: vpsubb %ymm0, %ymm1, %ymm1 -; AVX512CD-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512CD-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX512CD-NEXT: vpaddb %ymm1, %ymm0, %ymm0 +; AVX512CD-NEXT: vpaddb %ymm1, %ymm0, %ymm1 +; AVX512CD-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX512CD-NEXT: vpand %ymm1, %ymm0, %ymm2 ; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -1192,11 +1082,9 @@ ; ; AVX512VPOPCNTDQ-LABEL: testv32i8u: ; AVX512VPOPCNTDQ: # %bb.0: -; AVX512VPOPCNTDQ-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpsubb %ymm0, %ymm1, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm1, %ymm0, %ymm1 +; AVX512VPOPCNTDQ-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX512VPOPCNTDQ-NEXT: vpand %ymm1, %ymm0, %ymm2 ; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -1209,11 +1097,9 @@ ; ; AVX512VPOPCNTDQVL-LABEL: testv32i8u: ; AVX512VPOPCNTDQVL: # %bb.0: -; AVX512VPOPCNTDQVL-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQVL-NEXT: vpsubb %ymm0, %ymm1, %ymm1 -; AVX512VPOPCNTDQVL-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; AVX512VPOPCNTDQVL-NEXT: vpaddb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQVL-NEXT: vpaddb %ymm1, %ymm0, %ymm1 +; AVX512VPOPCNTDQVL-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX512VPOPCNTDQVL-NEXT: vpand %ymm1, %ymm0, %ymm2 ; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -1226,32 +1112,26 @@ ; ; BITALG_NOVLX-LABEL: testv32i8u: ; BITALG_NOVLX: # %bb.0: -; BITALG_NOVLX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG_NOVLX-NEXT: vpsubb %ymm0, %ymm1, %ymm1 -; BITALG_NOVLX-NEXT: vpand %ymm1, %ymm0, %ymm0 ; BITALG_NOVLX-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; BITALG_NOVLX-NEXT: vpaddb %ymm1, %ymm0, %ymm0 +; BITALG_NOVLX-NEXT: vpaddb %ymm1, %ymm0, %ymm1 +; BITALG_NOVLX-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; BITALG_NOVLX-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG_NOVLX-NEXT: # kill: def $ymm0 killed $ymm0 killed $zmm0 ; BITALG_NOVLX-NEXT: retq ; ; BITALG-LABEL: testv32i8u: ; BITALG: # %bb.0: -; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpsubb %ymm0, %ymm1, %ymm1 -; BITALG-NEXT: vpand %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; BITALG-NEXT: vpaddb %ymm1, %ymm0, %ymm0 +; BITALG-NEXT: vpaddb %ymm1, %ymm0, %ymm1 +; BITALG-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: retq ; ; X32-AVX-LABEL: testv32i8u: ; X32-AVX: # %bb.0: -; X32-AVX-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; X32-AVX-NEXT: vpsubb %ymm0, %ymm1, %ymm1 -; X32-AVX-NEXT: vpand %ymm1, %ymm0, %ymm0 ; X32-AVX-NEXT: vpcmpeqd %ymm1, %ymm1, %ymm1 -; X32-AVX-NEXT: vpaddb %ymm1, %ymm0, %ymm0 +; X32-AVX-NEXT: vpaddb %ymm1, %ymm0, %ymm1 +; X32-AVX-NEXT: vpandn %ymm1, %ymm0, %ymm0 ; X32-AVX-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; X32-AVX-NEXT: vpand %ymm1, %ymm0, %ymm2 ; X32-AVX-NEXT: vmovdqa {{.*#+}} ymm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] Index: llvm/trunk/test/CodeGen/X86/vector-tzcnt-512.ll =================================================================== --- llvm/trunk/test/CodeGen/X86/vector-tzcnt-512.ll +++ llvm/trunk/test/CodeGen/X86/vector-tzcnt-512.ll @@ -8,11 +8,9 @@ define <8 x i64> @testv8i64(<8 x i64> %in) nounwind { ; AVX512CD-LABEL: testv8i64: ; AVX512CD: # %bb.0: -; AVX512CD-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512CD-NEXT: vpsubq %zmm0, %zmm1, %zmm1 -; AVX512CD-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512CD-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 -; AVX512CD-NEXT: vpaddq %zmm1, %zmm0, %zmm0 +; AVX512CD-NEXT: vpaddq %zmm1, %zmm0, %zmm1 +; AVX512CD-NEXT: vpandnq %zmm1, %zmm0, %zmm0 ; AVX512CD-NEXT: vextracti64x4 $1, %zmm0, %ymm1 ; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX512CD-NEXT: vpand %ymm2, %ymm1, %ymm3 @@ -36,58 +34,53 @@ ; ; AVX512CDBW-LABEL: testv8i64: ; AVX512CDBW: # %bb.0: -; AVX512CDBW-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512CDBW-NEXT: vpsubq %zmm0, %zmm1, %zmm2 -; AVX512CDBW-NEXT: vpandq %zmm2, %zmm0, %zmm0 -; AVX512CDBW-NEXT: vpternlogd $255, %zmm2, %zmm2, %zmm2 -; AVX512CDBW-NEXT: vpaddq %zmm2, %zmm0, %zmm0 -; AVX512CDBW-NEXT: vmovdqa64 {{.*#+}} zmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX512CDBW-NEXT: vpandq %zmm2, %zmm0, %zmm3 -; AVX512CDBW-NEXT: vmovdqa64 {{.*#+}} zmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX512CDBW-NEXT: vpshufb %zmm3, %zmm4, %zmm3 +; AVX512CDBW-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 +; AVX512CDBW-NEXT: vpaddq %zmm1, %zmm0, %zmm1 +; AVX512CDBW-NEXT: vpandnq %zmm1, %zmm0, %zmm0 +; AVX512CDBW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX512CDBW-NEXT: vpandq %zmm1, %zmm0, %zmm2 +; AVX512CDBW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX512CDBW-NEXT: vpshufb %zmm2, %zmm3, %zmm2 ; AVX512CDBW-NEXT: vpsrlw $4, %zmm0, %zmm0 -; AVX512CDBW-NEXT: vpandq %zmm2, %zmm0, %zmm0 -; AVX512CDBW-NEXT: vpshufb %zmm0, %zmm4, %zmm0 -; AVX512CDBW-NEXT: vpaddb %zmm3, %zmm0, %zmm0 +; AVX512CDBW-NEXT: vpandq %zmm1, %zmm0, %zmm0 +; AVX512CDBW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 +; AVX512CDBW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 +; AVX512CDBW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512CDBW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512CDBW-NEXT: retq ; ; AVX512BW-LABEL: testv8i64: ; AVX512BW: # %bb.0: -; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512BW-NEXT: vpsubq %zmm0, %zmm1, %zmm2 -; AVX512BW-NEXT: vpandq %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpternlogd $255, %zmm2, %zmm2, %zmm2 -; AVX512BW-NEXT: vpaddq %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX512BW-NEXT: vpandq %zmm2, %zmm0, %zmm3 -; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX512BW-NEXT: vpshufb %zmm3, %zmm4, %zmm3 +; AVX512BW-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 +; AVX512BW-NEXT: vpaddq %zmm1, %zmm0, %zmm1 +; AVX512BW-NEXT: vpandnq %zmm1, %zmm0, %zmm0 +; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm2 +; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX512BW-NEXT: vpshufb %zmm2, %zmm3, %zmm2 ; AVX512BW-NEXT: vpsrlw $4, %zmm0, %zmm0 -; AVX512BW-NEXT: vpandq %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpshufb %zmm0, %zmm4, %zmm0 -; AVX512BW-NEXT: vpaddb %zmm3, %zmm0, %zmm0 +; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 +; AVX512BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 +; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 +; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: testv8i64: ; AVX512VPOPCNTDQ: # %bb.0: -; AVX512VPOPCNTDQ-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpsubq %zmm0, %zmm1, %zmm1 -; AVX512VPOPCNTDQ-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 -; AVX512VPOPCNTDQ-NEXT: vpaddq %zmm1, %zmm0, %zmm0 +; AVX512VPOPCNTDQ-NEXT: vpaddq %zmm1, %zmm0, %zmm1 +; AVX512VPOPCNTDQ-NEXT: vpandnq %zmm1, %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: retq ; ; BITALG-LABEL: testv8i64: ; BITALG: # %bb.0: -; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpsubq %zmm0, %zmm1, %zmm2 -; BITALG-NEXT: vpandq %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpternlogd $255, %zmm2, %zmm2, %zmm2 -; BITALG-NEXT: vpaddq %zmm2, %zmm0, %zmm0 +; BITALG-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 +; BITALG-NEXT: vpaddq %zmm1, %zmm0, %zmm1 +; BITALG-NEXT: vpandnq %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 +; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: retq %out = call <8 x i64> @llvm.cttz.v8i64(<8 x i64> %in, i1 0) @@ -117,40 +110,36 @@ ; ; AVX512BW-LABEL: testv8i64u: ; AVX512BW: # %bb.0: -; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512BW-NEXT: vpsubq %zmm0, %zmm1, %zmm2 -; AVX512BW-NEXT: vpandq %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpternlogd $255, %zmm2, %zmm2, %zmm2 -; AVX512BW-NEXT: vpaddq %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX512BW-NEXT: vpandq %zmm2, %zmm0, %zmm3 -; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX512BW-NEXT: vpshufb %zmm3, %zmm4, %zmm3 +; AVX512BW-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 +; AVX512BW-NEXT: vpaddq %zmm1, %zmm0, %zmm1 +; AVX512BW-NEXT: vpandnq %zmm1, %zmm0, %zmm0 +; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm2 +; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX512BW-NEXT: vpshufb %zmm2, %zmm3, %zmm2 ; AVX512BW-NEXT: vpsrlw $4, %zmm0, %zmm0 -; AVX512BW-NEXT: vpandq %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpshufb %zmm0, %zmm4, %zmm0 -; AVX512BW-NEXT: vpaddb %zmm3, %zmm0, %zmm0 +; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 +; AVX512BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 +; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 +; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: testv8i64u: ; AVX512VPOPCNTDQ: # %bb.0: -; AVX512VPOPCNTDQ-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpsubq %zmm0, %zmm1, %zmm1 -; AVX512VPOPCNTDQ-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 -; AVX512VPOPCNTDQ-NEXT: vpaddq %zmm1, %zmm0, %zmm0 +; AVX512VPOPCNTDQ-NEXT: vpaddq %zmm1, %zmm0, %zmm1 +; AVX512VPOPCNTDQ-NEXT: vpandnq %zmm1, %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: retq ; ; BITALG-LABEL: testv8i64u: ; BITALG: # %bb.0: -; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpsubq %zmm0, %zmm1, %zmm2 -; BITALG-NEXT: vpandq %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpternlogd $255, %zmm2, %zmm2, %zmm2 -; BITALG-NEXT: vpaddq %zmm2, %zmm0, %zmm0 +; BITALG-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 +; BITALG-NEXT: vpaddq %zmm1, %zmm0, %zmm1 +; BITALG-NEXT: vpandnq %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 +; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: retq %out = call <8 x i64> @llvm.cttz.v8i64(<8 x i64> %in, i1 -1) @@ -160,11 +149,9 @@ define <16 x i32> @testv16i32(<16 x i32> %in) nounwind { ; AVX512CD-LABEL: testv16i32: ; AVX512CD: # %bb.0: -; AVX512CD-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512CD-NEXT: vpsubd %zmm0, %zmm1, %zmm1 -; AVX512CD-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512CD-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 -; AVX512CD-NEXT: vpaddd %zmm1, %zmm0, %zmm0 +; AVX512CD-NEXT: vpaddd %zmm1, %zmm0, %zmm1 +; AVX512CD-NEXT: vpandnq %zmm1, %zmm0, %zmm0 ; AVX512CD-NEXT: vextracti64x4 $1, %zmm0, %ymm1 ; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX512CD-NEXT: vpand %ymm2, %ymm1, %ymm3 @@ -196,19 +183,18 @@ ; ; AVX512CDBW-LABEL: testv16i32: ; AVX512CDBW: # %bb.0: -; AVX512CDBW-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512CDBW-NEXT: vpsubd %zmm0, %zmm1, %zmm2 -; AVX512CDBW-NEXT: vpandq %zmm2, %zmm0, %zmm0 -; AVX512CDBW-NEXT: vpternlogd $255, %zmm2, %zmm2, %zmm2 -; AVX512CDBW-NEXT: vpaddd %zmm2, %zmm0, %zmm0 -; AVX512CDBW-NEXT: vmovdqa64 {{.*#+}} zmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX512CDBW-NEXT: vpandq %zmm2, %zmm0, %zmm3 -; AVX512CDBW-NEXT: vmovdqa64 {{.*#+}} zmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX512CDBW-NEXT: vpshufb %zmm3, %zmm4, %zmm3 +; AVX512CDBW-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 +; AVX512CDBW-NEXT: vpaddd %zmm1, %zmm0, %zmm1 +; AVX512CDBW-NEXT: vpandnq %zmm1, %zmm0, %zmm0 +; AVX512CDBW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX512CDBW-NEXT: vpandq %zmm1, %zmm0, %zmm2 +; AVX512CDBW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX512CDBW-NEXT: vpshufb %zmm2, %zmm3, %zmm2 ; AVX512CDBW-NEXT: vpsrlw $4, %zmm0, %zmm0 -; AVX512CDBW-NEXT: vpandq %zmm2, %zmm0, %zmm0 -; AVX512CDBW-NEXT: vpshufb %zmm0, %zmm4, %zmm0 -; AVX512CDBW-NEXT: vpaddb %zmm3, %zmm0, %zmm0 +; AVX512CDBW-NEXT: vpandq %zmm1, %zmm0, %zmm0 +; AVX512CDBW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 +; AVX512CDBW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 +; AVX512CDBW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512CDBW-NEXT: vpunpckhdq {{.*#+}} zmm2 = zmm0[2],zmm1[2],zmm0[3],zmm1[3],zmm0[6],zmm1[6],zmm0[7],zmm1[7],zmm0[10],zmm1[10],zmm0[11],zmm1[11],zmm0[14],zmm1[14],zmm0[15],zmm1[15] ; AVX512CDBW-NEXT: vpsadbw %zmm1, %zmm2, %zmm2 ; AVX512CDBW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] @@ -218,19 +204,18 @@ ; ; AVX512BW-LABEL: testv16i32: ; AVX512BW: # %bb.0: -; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512BW-NEXT: vpsubd %zmm0, %zmm1, %zmm2 -; AVX512BW-NEXT: vpandq %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpternlogd $255, %zmm2, %zmm2, %zmm2 -; AVX512BW-NEXT: vpaddd %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX512BW-NEXT: vpandq %zmm2, %zmm0, %zmm3 -; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX512BW-NEXT: vpshufb %zmm3, %zmm4, %zmm3 +; AVX512BW-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 +; AVX512BW-NEXT: vpaddd %zmm1, %zmm0, %zmm1 +; AVX512BW-NEXT: vpandnq %zmm1, %zmm0, %zmm0 +; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm2 +; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX512BW-NEXT: vpshufb %zmm2, %zmm3, %zmm2 ; AVX512BW-NEXT: vpsrlw $4, %zmm0, %zmm0 -; AVX512BW-NEXT: vpandq %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpshufb %zmm0, %zmm4, %zmm0 -; AVX512BW-NEXT: vpaddb %zmm3, %zmm0, %zmm0 +; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 +; AVX512BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 +; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 +; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpunpckhdq {{.*#+}} zmm2 = zmm0[2],zmm1[2],zmm0[3],zmm1[3],zmm0[6],zmm1[6],zmm0[7],zmm1[7],zmm0[10],zmm1[10],zmm0[11],zmm1[11],zmm0[14],zmm1[14],zmm0[15],zmm1[15] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm2, %zmm2 ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] @@ -240,22 +225,19 @@ ; ; AVX512VPOPCNTDQ-LABEL: testv16i32: ; AVX512VPOPCNTDQ: # %bb.0: -; AVX512VPOPCNTDQ-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpsubd %zmm0, %zmm1, %zmm1 -; AVX512VPOPCNTDQ-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 -; AVX512VPOPCNTDQ-NEXT: vpaddd %zmm1, %zmm0, %zmm0 +; AVX512VPOPCNTDQ-NEXT: vpaddd %zmm1, %zmm0, %zmm1 +; AVX512VPOPCNTDQ-NEXT: vpandnq %zmm1, %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: retq ; ; BITALG-LABEL: testv16i32: ; BITALG: # %bb.0: -; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpsubd %zmm0, %zmm1, %zmm2 -; BITALG-NEXT: vpandq %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpternlogd $255, %zmm2, %zmm2, %zmm2 -; BITALG-NEXT: vpaddd %zmm2, %zmm0, %zmm0 +; BITALG-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 +; BITALG-NEXT: vpaddd %zmm1, %zmm0, %zmm1 +; BITALG-NEXT: vpandnq %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 +; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpunpckhdq {{.*#+}} zmm2 = zmm0[2],zmm1[2],zmm0[3],zmm1[3],zmm0[6],zmm1[6],zmm0[7],zmm1[7],zmm0[10],zmm1[10],zmm0[11],zmm1[11],zmm0[14],zmm1[14],zmm0[15],zmm1[15] ; BITALG-NEXT: vpsadbw %zmm1, %zmm2, %zmm2 ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] @@ -289,19 +271,18 @@ ; ; AVX512BW-LABEL: testv16i32u: ; AVX512BW: # %bb.0: -; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512BW-NEXT: vpsubd %zmm0, %zmm1, %zmm2 -; AVX512BW-NEXT: vpandq %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpternlogd $255, %zmm2, %zmm2, %zmm2 -; AVX512BW-NEXT: vpaddd %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX512BW-NEXT: vpandq %zmm2, %zmm0, %zmm3 -; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm4 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX512BW-NEXT: vpshufb %zmm3, %zmm4, %zmm3 +; AVX512BW-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 +; AVX512BW-NEXT: vpaddd %zmm1, %zmm0, %zmm1 +; AVX512BW-NEXT: vpandnq %zmm1, %zmm0, %zmm0 +; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm2 +; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX512BW-NEXT: vpshufb %zmm2, %zmm3, %zmm2 ; AVX512BW-NEXT: vpsrlw $4, %zmm0, %zmm0 -; AVX512BW-NEXT: vpandq %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpshufb %zmm0, %zmm4, %zmm0 -; AVX512BW-NEXT: vpaddb %zmm3, %zmm0, %zmm0 +; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 +; AVX512BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 +; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 +; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpunpckhdq {{.*#+}} zmm2 = zmm0[2],zmm1[2],zmm0[3],zmm1[3],zmm0[6],zmm1[6],zmm0[7],zmm1[7],zmm0[10],zmm1[10],zmm0[11],zmm1[11],zmm0[14],zmm1[14],zmm0[15],zmm1[15] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm2, %zmm2 ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] @@ -311,22 +292,19 @@ ; ; AVX512VPOPCNTDQ-LABEL: testv16i32u: ; AVX512VPOPCNTDQ: # %bb.0: -; AVX512VPOPCNTDQ-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512VPOPCNTDQ-NEXT: vpsubd %zmm0, %zmm1, %zmm1 -; AVX512VPOPCNTDQ-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 -; AVX512VPOPCNTDQ-NEXT: vpaddd %zmm1, %zmm0, %zmm0 +; AVX512VPOPCNTDQ-NEXT: vpaddd %zmm1, %zmm0, %zmm1 +; AVX512VPOPCNTDQ-NEXT: vpandnq %zmm1, %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: retq ; ; BITALG-LABEL: testv16i32u: ; BITALG: # %bb.0: -; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpsubd %zmm0, %zmm1, %zmm2 -; BITALG-NEXT: vpandq %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpternlogd $255, %zmm2, %zmm2, %zmm2 -; BITALG-NEXT: vpaddd %zmm2, %zmm0, %zmm0 +; BITALG-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 +; BITALG-NEXT: vpaddd %zmm1, %zmm0, %zmm1 +; BITALG-NEXT: vpandnq %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 +; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpunpckhdq {{.*#+}} zmm2 = zmm0[2],zmm1[2],zmm0[3],zmm1[3],zmm0[6],zmm1[6],zmm0[7],zmm1[7],zmm0[10],zmm1[10],zmm0[11],zmm1[11],zmm0[14],zmm1[14],zmm0[15],zmm1[15] ; BITALG-NEXT: vpsadbw %zmm1, %zmm2, %zmm2 ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] @@ -340,30 +318,27 @@ define <32 x i16> @testv32i16(<32 x i16> %in) nounwind { ; AVX512CD-LABEL: testv32i16: ; AVX512CD: # %bb.0: -; AVX512CD-NEXT: vpxor %xmm2, %xmm2, %xmm2 -; AVX512CD-NEXT: vpsubw %ymm0, %ymm2, %ymm3 -; AVX512CD-NEXT: vpand %ymm3, %ymm0, %ymm0 -; AVX512CD-NEXT: vpcmpeqd %ymm3, %ymm3, %ymm3 -; AVX512CD-NEXT: vpaddw %ymm3, %ymm0, %ymm0 -; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm4 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX512CD-NEXT: vpand %ymm4, %ymm0, %ymm5 -; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm6 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX512CD-NEXT: vpshufb %ymm5, %ymm6, %ymm5 +; AVX512CD-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2 +; AVX512CD-NEXT: vpaddw %ymm2, %ymm0, %ymm3 +; AVX512CD-NEXT: vpandn %ymm3, %ymm0, %ymm0 +; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm3 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX512CD-NEXT: vpand %ymm3, %ymm0, %ymm4 +; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm5 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX512CD-NEXT: vpshufb %ymm4, %ymm5, %ymm4 ; AVX512CD-NEXT: vpsrlw $4, %ymm0, %ymm0 -; AVX512CD-NEXT: vpand %ymm4, %ymm0, %ymm0 -; AVX512CD-NEXT: vpshufb %ymm0, %ymm6, %ymm0 -; AVX512CD-NEXT: vpaddb %ymm5, %ymm0, %ymm0 -; AVX512CD-NEXT: vpsllw $8, %ymm0, %ymm5 -; AVX512CD-NEXT: vpaddb %ymm0, %ymm5, %ymm0 +; AVX512CD-NEXT: vpand %ymm3, %ymm0, %ymm0 +; AVX512CD-NEXT: vpshufb %ymm0, %ymm5, %ymm0 +; AVX512CD-NEXT: vpaddb %ymm4, %ymm0, %ymm0 +; AVX512CD-NEXT: vpsllw $8, %ymm0, %ymm4 +; AVX512CD-NEXT: vpaddb %ymm0, %ymm4, %ymm0 ; AVX512CD-NEXT: vpsrlw $8, %ymm0, %ymm0 -; AVX512CD-NEXT: vpsubw %ymm1, %ymm2, %ymm2 -; AVX512CD-NEXT: vpand %ymm2, %ymm1, %ymm1 -; AVX512CD-NEXT: vpaddw %ymm3, %ymm1, %ymm1 -; AVX512CD-NEXT: vpand %ymm4, %ymm1, %ymm2 -; AVX512CD-NEXT: vpshufb %ymm2, %ymm6, %ymm2 +; AVX512CD-NEXT: vpaddw %ymm2, %ymm1, %ymm2 +; AVX512CD-NEXT: vpandn %ymm2, %ymm1, %ymm1 +; AVX512CD-NEXT: vpand %ymm3, %ymm1, %ymm2 +; AVX512CD-NEXT: vpshufb %ymm2, %ymm5, %ymm2 ; AVX512CD-NEXT: vpsrlw $4, %ymm1, %ymm1 -; AVX512CD-NEXT: vpand %ymm4, %ymm1, %ymm1 -; AVX512CD-NEXT: vpshufb %ymm1, %ymm6, %ymm1 +; AVX512CD-NEXT: vpand %ymm3, %ymm1, %ymm1 +; AVX512CD-NEXT: vpshufb %ymm1, %ymm5, %ymm1 ; AVX512CD-NEXT: vpaddb %ymm2, %ymm1, %ymm1 ; AVX512CD-NEXT: vpsllw $8, %ymm1, %ymm2 ; AVX512CD-NEXT: vpaddb %ymm1, %ymm2, %ymm1 @@ -372,11 +347,9 @@ ; ; AVX512CDBW-LABEL: testv32i16: ; AVX512CDBW: # %bb.0: -; AVX512CDBW-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512CDBW-NEXT: vpsubw %zmm0, %zmm1, %zmm1 -; AVX512CDBW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512CDBW-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 -; AVX512CDBW-NEXT: vpaddw %zmm1, %zmm0, %zmm0 +; AVX512CDBW-NEXT: vpaddw %zmm1, %zmm0, %zmm1 +; AVX512CDBW-NEXT: vpandnq %zmm1, %zmm0, %zmm0 ; AVX512CDBW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX512CDBW-NEXT: vpandq %zmm1, %zmm0, %zmm2 ; AVX512CDBW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -392,11 +365,9 @@ ; ; AVX512BW-LABEL: testv32i16: ; AVX512BW: # %bb.0: -; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512BW-NEXT: vpsubw %zmm0, %zmm1, %zmm1 -; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 -; AVX512BW-NEXT: vpaddw %zmm1, %zmm0, %zmm0 +; AVX512BW-NEXT: vpaddw %zmm1, %zmm0, %zmm1 +; AVX512BW-NEXT: vpandnq %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm2 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -412,17 +383,14 @@ ; ; AVX512VPOPCNTDQ-LABEL: testv32i16: ; AVX512VPOPCNTDQ: # %bb.0: -; AVX512VPOPCNTDQ-NEXT: vpxor %xmm2, %xmm2, %xmm2 -; AVX512VPOPCNTDQ-NEXT: vpsubw %ymm0, %ymm2, %ymm3 -; AVX512VPOPCNTDQ-NEXT: vpand %ymm3, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpeqd %ymm3, %ymm3, %ymm3 -; AVX512VPOPCNTDQ-NEXT: vpaddw %ymm3, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2 +; AVX512VPOPCNTDQ-NEXT: vpaddw %ymm2, %ymm0, %ymm3 +; AVX512VPOPCNTDQ-NEXT: vpandn %ymm3, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} zmm0 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0 -; AVX512VPOPCNTDQ-NEXT: vpsubw %ymm1, %ymm2, %ymm2 -; AVX512VPOPCNTDQ-NEXT: vpand %ymm2, %ymm1, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpaddw %ymm3, %ymm1, %ymm1 +; AVX512VPOPCNTDQ-NEXT: vpaddw %ymm2, %ymm1, %ymm2 +; AVX512VPOPCNTDQ-NEXT: vpandn %ymm2, %ymm1, %ymm1 ; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} zmm1 = ymm1[0],zero,ymm1[1],zero,ymm1[2],zero,ymm1[3],zero,ymm1[4],zero,ymm1[5],zero,ymm1[6],zero,ymm1[7],zero,ymm1[8],zero,ymm1[9],zero,ymm1[10],zero,ymm1[11],zero,ymm1[12],zero,ymm1[13],zero,ymm1[14],zero,ymm1[15],zero ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm1, %zmm1 ; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm1, %ymm1 @@ -430,11 +398,9 @@ ; ; BITALG-LABEL: testv32i16: ; BITALG: # %bb.0: -; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpsubw %zmm0, %zmm1, %zmm1 -; BITALG-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 -; BITALG-NEXT: vpaddw %zmm1, %zmm0, %zmm0 +; BITALG-NEXT: vpaddw %zmm1, %zmm0, %zmm1 +; BITALG-NEXT: vpandnq %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 ; BITALG-NEXT: retq %out = call <32 x i16> @llvm.cttz.v32i16(<32 x i16> %in, i1 0) @@ -444,30 +410,27 @@ define <32 x i16> @testv32i16u(<32 x i16> %in) nounwind { ; AVX512CD-LABEL: testv32i16u: ; AVX512CD: # %bb.0: -; AVX512CD-NEXT: vpxor %xmm2, %xmm2, %xmm2 -; AVX512CD-NEXT: vpsubw %ymm0, %ymm2, %ymm3 -; AVX512CD-NEXT: vpand %ymm3, %ymm0, %ymm0 -; AVX512CD-NEXT: vpcmpeqd %ymm3, %ymm3, %ymm3 -; AVX512CD-NEXT: vpaddw %ymm3, %ymm0, %ymm0 -; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm4 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX512CD-NEXT: vpand %ymm4, %ymm0, %ymm5 -; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm6 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX512CD-NEXT: vpshufb %ymm5, %ymm6, %ymm5 +; AVX512CD-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2 +; AVX512CD-NEXT: vpaddw %ymm2, %ymm0, %ymm3 +; AVX512CD-NEXT: vpandn %ymm3, %ymm0, %ymm0 +; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm3 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX512CD-NEXT: vpand %ymm3, %ymm0, %ymm4 +; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm5 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX512CD-NEXT: vpshufb %ymm4, %ymm5, %ymm4 ; AVX512CD-NEXT: vpsrlw $4, %ymm0, %ymm0 -; AVX512CD-NEXT: vpand %ymm4, %ymm0, %ymm0 -; AVX512CD-NEXT: vpshufb %ymm0, %ymm6, %ymm0 -; AVX512CD-NEXT: vpaddb %ymm5, %ymm0, %ymm0 -; AVX512CD-NEXT: vpsllw $8, %ymm0, %ymm5 -; AVX512CD-NEXT: vpaddb %ymm0, %ymm5, %ymm0 +; AVX512CD-NEXT: vpand %ymm3, %ymm0, %ymm0 +; AVX512CD-NEXT: vpshufb %ymm0, %ymm5, %ymm0 +; AVX512CD-NEXT: vpaddb %ymm4, %ymm0, %ymm0 +; AVX512CD-NEXT: vpsllw $8, %ymm0, %ymm4 +; AVX512CD-NEXT: vpaddb %ymm0, %ymm4, %ymm0 ; AVX512CD-NEXT: vpsrlw $8, %ymm0, %ymm0 -; AVX512CD-NEXT: vpsubw %ymm1, %ymm2, %ymm2 -; AVX512CD-NEXT: vpand %ymm2, %ymm1, %ymm1 -; AVX512CD-NEXT: vpaddw %ymm3, %ymm1, %ymm1 -; AVX512CD-NEXT: vpand %ymm4, %ymm1, %ymm2 -; AVX512CD-NEXT: vpshufb %ymm2, %ymm6, %ymm2 +; AVX512CD-NEXT: vpaddw %ymm2, %ymm1, %ymm2 +; AVX512CD-NEXT: vpandn %ymm2, %ymm1, %ymm1 +; AVX512CD-NEXT: vpand %ymm3, %ymm1, %ymm2 +; AVX512CD-NEXT: vpshufb %ymm2, %ymm5, %ymm2 ; AVX512CD-NEXT: vpsrlw $4, %ymm1, %ymm1 -; AVX512CD-NEXT: vpand %ymm4, %ymm1, %ymm1 -; AVX512CD-NEXT: vpshufb %ymm1, %ymm6, %ymm1 +; AVX512CD-NEXT: vpand %ymm3, %ymm1, %ymm1 +; AVX512CD-NEXT: vpshufb %ymm1, %ymm5, %ymm1 ; AVX512CD-NEXT: vpaddb %ymm2, %ymm1, %ymm1 ; AVX512CD-NEXT: vpsllw $8, %ymm1, %ymm2 ; AVX512CD-NEXT: vpaddb %ymm1, %ymm2, %ymm1 @@ -476,11 +439,9 @@ ; ; AVX512CDBW-LABEL: testv32i16u: ; AVX512CDBW: # %bb.0: -; AVX512CDBW-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512CDBW-NEXT: vpsubw %zmm0, %zmm1, %zmm1 -; AVX512CDBW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512CDBW-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 -; AVX512CDBW-NEXT: vpaddw %zmm1, %zmm0, %zmm0 +; AVX512CDBW-NEXT: vpaddw %zmm1, %zmm0, %zmm1 +; AVX512CDBW-NEXT: vpandnq %zmm1, %zmm0, %zmm0 ; AVX512CDBW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX512CDBW-NEXT: vpandq %zmm1, %zmm0, %zmm2 ; AVX512CDBW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -496,11 +457,9 @@ ; ; AVX512BW-LABEL: testv32i16u: ; AVX512BW: # %bb.0: -; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512BW-NEXT: vpsubw %zmm0, %zmm1, %zmm1 -; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 -; AVX512BW-NEXT: vpaddw %zmm1, %zmm0, %zmm0 +; AVX512BW-NEXT: vpaddw %zmm1, %zmm0, %zmm1 +; AVX512BW-NEXT: vpandnq %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm2 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -516,17 +475,14 @@ ; ; AVX512VPOPCNTDQ-LABEL: testv32i16u: ; AVX512VPOPCNTDQ: # %bb.0: -; AVX512VPOPCNTDQ-NEXT: vpxor %xmm2, %xmm2, %xmm2 -; AVX512VPOPCNTDQ-NEXT: vpsubw %ymm0, %ymm2, %ymm3 -; AVX512VPOPCNTDQ-NEXT: vpand %ymm3, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpeqd %ymm3, %ymm3, %ymm3 -; AVX512VPOPCNTDQ-NEXT: vpaddw %ymm3, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2 +; AVX512VPOPCNTDQ-NEXT: vpaddw %ymm2, %ymm0, %ymm3 +; AVX512VPOPCNTDQ-NEXT: vpandn %ymm3, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} zmm0 = ymm0[0],zero,ymm0[1],zero,ymm0[2],zero,ymm0[3],zero,ymm0[4],zero,ymm0[5],zero,ymm0[6],zero,ymm0[7],zero,ymm0[8],zero,ymm0[9],zero,ymm0[10],zero,ymm0[11],zero,ymm0[12],zero,ymm0[13],zero,ymm0[14],zero,ymm0[15],zero ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 ; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm0, %ymm0 -; AVX512VPOPCNTDQ-NEXT: vpsubw %ymm1, %ymm2, %ymm2 -; AVX512VPOPCNTDQ-NEXT: vpand %ymm2, %ymm1, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpaddw %ymm3, %ymm1, %ymm1 +; AVX512VPOPCNTDQ-NEXT: vpaddw %ymm2, %ymm1, %ymm2 +; AVX512VPOPCNTDQ-NEXT: vpandn %ymm2, %ymm1, %ymm1 ; AVX512VPOPCNTDQ-NEXT: vpmovzxwd {{.*#+}} zmm1 = ymm1[0],zero,ymm1[1],zero,ymm1[2],zero,ymm1[3],zero,ymm1[4],zero,ymm1[5],zero,ymm1[6],zero,ymm1[7],zero,ymm1[8],zero,ymm1[9],zero,ymm1[10],zero,ymm1[11],zero,ymm1[12],zero,ymm1[13],zero,ymm1[14],zero,ymm1[15],zero ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm1, %zmm1 ; AVX512VPOPCNTDQ-NEXT: vpmovdw %zmm1, %ymm1 @@ -534,11 +490,9 @@ ; ; BITALG-LABEL: testv32i16u: ; BITALG: # %bb.0: -; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpsubw %zmm0, %zmm1, %zmm1 -; BITALG-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 -; BITALG-NEXT: vpaddw %zmm1, %zmm0, %zmm0 +; BITALG-NEXT: vpaddw %zmm1, %zmm0, %zmm1 +; BITALG-NEXT: vpandnq %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 ; BITALG-NEXT: retq %out = call <32 x i16> @llvm.cttz.v32i16(<32 x i16> %in, i1 -1) @@ -548,37 +502,32 @@ define <64 x i8> @testv64i8(<64 x i8> %in) nounwind { ; AVX512CD-LABEL: testv64i8: ; AVX512CD: # %bb.0: -; AVX512CD-NEXT: vpxor %xmm2, %xmm2, %xmm2 -; AVX512CD-NEXT: vpsubb %ymm0, %ymm2, %ymm3 -; AVX512CD-NEXT: vpand %ymm3, %ymm0, %ymm0 -; AVX512CD-NEXT: vpcmpeqd %ymm3, %ymm3, %ymm3 -; AVX512CD-NEXT: vpaddb %ymm3, %ymm0, %ymm0 -; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm4 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX512CD-NEXT: vpand %ymm4, %ymm0, %ymm5 -; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm6 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX512CD-NEXT: vpshufb %ymm5, %ymm6, %ymm5 +; AVX512CD-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2 +; AVX512CD-NEXT: vpaddb %ymm2, %ymm0, %ymm3 +; AVX512CD-NEXT: vpandn %ymm3, %ymm0, %ymm0 +; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm3 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX512CD-NEXT: vpand %ymm3, %ymm0, %ymm4 +; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm5 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX512CD-NEXT: vpshufb %ymm4, %ymm5, %ymm4 ; AVX512CD-NEXT: vpsrlw $4, %ymm0, %ymm0 -; AVX512CD-NEXT: vpand %ymm4, %ymm0, %ymm0 -; AVX512CD-NEXT: vpshufb %ymm0, %ymm6, %ymm0 -; AVX512CD-NEXT: vpaddb %ymm5, %ymm0, %ymm0 -; AVX512CD-NEXT: vpsubb %ymm1, %ymm2, %ymm2 -; AVX512CD-NEXT: vpand %ymm2, %ymm1, %ymm1 -; AVX512CD-NEXT: vpaddb %ymm3, %ymm1, %ymm1 -; AVX512CD-NEXT: vpand %ymm4, %ymm1, %ymm2 -; AVX512CD-NEXT: vpshufb %ymm2, %ymm6, %ymm2 +; AVX512CD-NEXT: vpand %ymm3, %ymm0, %ymm0 +; AVX512CD-NEXT: vpshufb %ymm0, %ymm5, %ymm0 +; AVX512CD-NEXT: vpaddb %ymm4, %ymm0, %ymm0 +; AVX512CD-NEXT: vpaddb %ymm2, %ymm1, %ymm2 +; AVX512CD-NEXT: vpandn %ymm2, %ymm1, %ymm1 +; AVX512CD-NEXT: vpand %ymm3, %ymm1, %ymm2 +; AVX512CD-NEXT: vpshufb %ymm2, %ymm5, %ymm2 ; AVX512CD-NEXT: vpsrlw $4, %ymm1, %ymm1 -; AVX512CD-NEXT: vpand %ymm4, %ymm1, %ymm1 -; AVX512CD-NEXT: vpshufb %ymm1, %ymm6, %ymm1 +; AVX512CD-NEXT: vpand %ymm3, %ymm1, %ymm1 +; AVX512CD-NEXT: vpshufb %ymm1, %ymm5, %ymm1 ; AVX512CD-NEXT: vpaddb %ymm2, %ymm1, %ymm1 ; AVX512CD-NEXT: retq ; ; AVX512CDBW-LABEL: testv64i8: ; AVX512CDBW: # %bb.0: -; AVX512CDBW-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512CDBW-NEXT: vpsubb %zmm0, %zmm1, %zmm1 -; AVX512CDBW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512CDBW-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 -; AVX512CDBW-NEXT: vpaddb %zmm1, %zmm0, %zmm0 +; AVX512CDBW-NEXT: vpaddb %zmm1, %zmm0, %zmm1 +; AVX512CDBW-NEXT: vpandnq %zmm1, %zmm0, %zmm0 ; AVX512CDBW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX512CDBW-NEXT: vpandq %zmm1, %zmm0, %zmm2 ; AVX512CDBW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -591,11 +540,9 @@ ; ; AVX512BW-LABEL: testv64i8: ; AVX512BW: # %bb.0: -; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512BW-NEXT: vpsubb %zmm0, %zmm1, %zmm1 -; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 -; AVX512BW-NEXT: vpaddb %zmm1, %zmm0, %zmm0 +; AVX512BW-NEXT: vpaddb %zmm1, %zmm0, %zmm1 +; AVX512BW-NEXT: vpandnq %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm2 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -608,37 +555,32 @@ ; ; AVX512VPOPCNTDQ-LABEL: testv64i8: ; AVX512VPOPCNTDQ: # %bb.0: -; AVX512VPOPCNTDQ-NEXT: vpxor %xmm2, %xmm2, %xmm2 -; AVX512VPOPCNTDQ-NEXT: vpsubb %ymm0, %ymm2, %ymm3 -; AVX512VPOPCNTDQ-NEXT: vpand %ymm3, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpeqd %ymm3, %ymm3, %ymm3 -; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm3, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} ymm4 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX512VPOPCNTDQ-NEXT: vpand %ymm4, %ymm0, %ymm5 -; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} ymm6 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm5, %ymm6, %ymm5 +; AVX512VPOPCNTDQ-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2 +; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm2, %ymm0, %ymm3 +; AVX512VPOPCNTDQ-NEXT: vpandn %ymm3, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} ymm3 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX512VPOPCNTDQ-NEXT: vpand %ymm3, %ymm0, %ymm4 +; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} ymm5 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm4, %ymm5, %ymm4 ; AVX512VPOPCNTDQ-NEXT: vpsrlw $4, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NEXT: vpand %ymm4, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm0, %ymm6, %ymm0 -; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm5, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NEXT: vpsubb %ymm1, %ymm2, %ymm2 -; AVX512VPOPCNTDQ-NEXT: vpand %ymm2, %ymm1, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm3, %ymm1, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpand %ymm4, %ymm1, %ymm2 -; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm2, %ymm6, %ymm2 +; AVX512VPOPCNTDQ-NEXT: vpand %ymm3, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm0, %ymm5, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm4, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm2, %ymm1, %ymm2 +; AVX512VPOPCNTDQ-NEXT: vpandn %ymm2, %ymm1, %ymm1 +; AVX512VPOPCNTDQ-NEXT: vpand %ymm3, %ymm1, %ymm2 +; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm2, %ymm5, %ymm2 ; AVX512VPOPCNTDQ-NEXT: vpsrlw $4, %ymm1, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpand %ymm4, %ymm1, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm1, %ymm6, %ymm1 +; AVX512VPOPCNTDQ-NEXT: vpand %ymm3, %ymm1, %ymm1 +; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm1, %ymm5, %ymm1 ; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm2, %ymm1, %ymm1 ; AVX512VPOPCNTDQ-NEXT: retq ; ; BITALG-LABEL: testv64i8: ; BITALG: # %bb.0: -; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpsubb %zmm0, %zmm1, %zmm1 -; BITALG-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 -; BITALG-NEXT: vpaddb %zmm1, %zmm0, %zmm0 +; BITALG-NEXT: vpaddb %zmm1, %zmm0, %zmm1 +; BITALG-NEXT: vpandnq %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: retq %out = call <64 x i8> @llvm.cttz.v64i8(<64 x i8> %in, i1 0) @@ -648,37 +590,32 @@ define <64 x i8> @testv64i8u(<64 x i8> %in) nounwind { ; AVX512CD-LABEL: testv64i8u: ; AVX512CD: # %bb.0: -; AVX512CD-NEXT: vpxor %xmm2, %xmm2, %xmm2 -; AVX512CD-NEXT: vpsubb %ymm0, %ymm2, %ymm3 -; AVX512CD-NEXT: vpand %ymm3, %ymm0, %ymm0 -; AVX512CD-NEXT: vpcmpeqd %ymm3, %ymm3, %ymm3 -; AVX512CD-NEXT: vpaddb %ymm3, %ymm0, %ymm0 -; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm4 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX512CD-NEXT: vpand %ymm4, %ymm0, %ymm5 -; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm6 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX512CD-NEXT: vpshufb %ymm5, %ymm6, %ymm5 +; AVX512CD-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2 +; AVX512CD-NEXT: vpaddb %ymm2, %ymm0, %ymm3 +; AVX512CD-NEXT: vpandn %ymm3, %ymm0, %ymm0 +; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm3 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX512CD-NEXT: vpand %ymm3, %ymm0, %ymm4 +; AVX512CD-NEXT: vmovdqa {{.*#+}} ymm5 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX512CD-NEXT: vpshufb %ymm4, %ymm5, %ymm4 ; AVX512CD-NEXT: vpsrlw $4, %ymm0, %ymm0 -; AVX512CD-NEXT: vpand %ymm4, %ymm0, %ymm0 -; AVX512CD-NEXT: vpshufb %ymm0, %ymm6, %ymm0 -; AVX512CD-NEXT: vpaddb %ymm5, %ymm0, %ymm0 -; AVX512CD-NEXT: vpsubb %ymm1, %ymm2, %ymm2 -; AVX512CD-NEXT: vpand %ymm2, %ymm1, %ymm1 -; AVX512CD-NEXT: vpaddb %ymm3, %ymm1, %ymm1 -; AVX512CD-NEXT: vpand %ymm4, %ymm1, %ymm2 -; AVX512CD-NEXT: vpshufb %ymm2, %ymm6, %ymm2 +; AVX512CD-NEXT: vpand %ymm3, %ymm0, %ymm0 +; AVX512CD-NEXT: vpshufb %ymm0, %ymm5, %ymm0 +; AVX512CD-NEXT: vpaddb %ymm4, %ymm0, %ymm0 +; AVX512CD-NEXT: vpaddb %ymm2, %ymm1, %ymm2 +; AVX512CD-NEXT: vpandn %ymm2, %ymm1, %ymm1 +; AVX512CD-NEXT: vpand %ymm3, %ymm1, %ymm2 +; AVX512CD-NEXT: vpshufb %ymm2, %ymm5, %ymm2 ; AVX512CD-NEXT: vpsrlw $4, %ymm1, %ymm1 -; AVX512CD-NEXT: vpand %ymm4, %ymm1, %ymm1 -; AVX512CD-NEXT: vpshufb %ymm1, %ymm6, %ymm1 +; AVX512CD-NEXT: vpand %ymm3, %ymm1, %ymm1 +; AVX512CD-NEXT: vpshufb %ymm1, %ymm5, %ymm1 ; AVX512CD-NEXT: vpaddb %ymm2, %ymm1, %ymm1 ; AVX512CD-NEXT: retq ; ; AVX512CDBW-LABEL: testv64i8u: ; AVX512CDBW: # %bb.0: -; AVX512CDBW-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512CDBW-NEXT: vpsubb %zmm0, %zmm1, %zmm1 -; AVX512CDBW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512CDBW-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 -; AVX512CDBW-NEXT: vpaddb %zmm1, %zmm0, %zmm0 +; AVX512CDBW-NEXT: vpaddb %zmm1, %zmm0, %zmm1 +; AVX512CDBW-NEXT: vpandnq %zmm1, %zmm0, %zmm0 ; AVX512CDBW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX512CDBW-NEXT: vpandq %zmm1, %zmm0, %zmm2 ; AVX512CDBW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -691,11 +628,9 @@ ; ; AVX512BW-LABEL: testv64i8u: ; AVX512BW: # %bb.0: -; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; AVX512BW-NEXT: vpsubb %zmm0, %zmm1, %zmm1 -; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 -; AVX512BW-NEXT: vpaddb %zmm1, %zmm0, %zmm0 +; AVX512BW-NEXT: vpaddb %zmm1, %zmm0, %zmm1 +; AVX512BW-NEXT: vpandnq %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] ; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm2 ; AVX512BW-NEXT: vmovdqa64 {{.*#+}} zmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] @@ -708,37 +643,32 @@ ; ; AVX512VPOPCNTDQ-LABEL: testv64i8u: ; AVX512VPOPCNTDQ: # %bb.0: -; AVX512VPOPCNTDQ-NEXT: vpxor %xmm2, %xmm2, %xmm2 -; AVX512VPOPCNTDQ-NEXT: vpsubb %ymm0, %ymm2, %ymm3 -; AVX512VPOPCNTDQ-NEXT: vpand %ymm3, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpeqd %ymm3, %ymm3, %ymm3 -; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm3, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} ymm4 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; AVX512VPOPCNTDQ-NEXT: vpand %ymm4, %ymm0, %ymm5 -; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} ymm6 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm5, %ymm6, %ymm5 +; AVX512VPOPCNTDQ-NEXT: vpcmpeqd %ymm2, %ymm2, %ymm2 +; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm2, %ymm0, %ymm3 +; AVX512VPOPCNTDQ-NEXT: vpandn %ymm3, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} ymm3 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; AVX512VPOPCNTDQ-NEXT: vpand %ymm3, %ymm0, %ymm4 +; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} ymm5 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4,0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm4, %ymm5, %ymm4 ; AVX512VPOPCNTDQ-NEXT: vpsrlw $4, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NEXT: vpand %ymm4, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm0, %ymm6, %ymm0 -; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm5, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NEXT: vpsubb %ymm1, %ymm2, %ymm2 -; AVX512VPOPCNTDQ-NEXT: vpand %ymm2, %ymm1, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm3, %ymm1, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpand %ymm4, %ymm1, %ymm2 -; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm2, %ymm6, %ymm2 +; AVX512VPOPCNTDQ-NEXT: vpand %ymm3, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm0, %ymm5, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm4, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm2, %ymm1, %ymm2 +; AVX512VPOPCNTDQ-NEXT: vpandn %ymm2, %ymm1, %ymm1 +; AVX512VPOPCNTDQ-NEXT: vpand %ymm3, %ymm1, %ymm2 +; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm2, %ymm5, %ymm2 ; AVX512VPOPCNTDQ-NEXT: vpsrlw $4, %ymm1, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpand %ymm4, %ymm1, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm1, %ymm6, %ymm1 +; AVX512VPOPCNTDQ-NEXT: vpand %ymm3, %ymm1, %ymm1 +; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm1, %ymm5, %ymm1 ; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm2, %ymm1, %ymm1 ; AVX512VPOPCNTDQ-NEXT: retq ; ; BITALG-LABEL: testv64i8u: ; BITALG: # %bb.0: -; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 -; BITALG-NEXT: vpsubb %zmm0, %zmm1, %zmm1 -; BITALG-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpternlogd $255, %zmm1, %zmm1, %zmm1 -; BITALG-NEXT: vpaddb %zmm1, %zmm0, %zmm0 +; BITALG-NEXT: vpaddb %zmm1, %zmm0, %zmm1 +; BITALG-NEXT: vpandnq %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: retq %out = call <64 x i8> @llvm.cttz.v64i8(<64 x i8> %in, i1 -1)