diff --git a/llvm/lib/Target/X86/X86ISelLowering.cpp b/llvm/lib/Target/X86/X86ISelLowering.cpp --- a/llvm/lib/Target/X86/X86ISelLowering.cpp +++ b/llvm/lib/Target/X86/X86ISelLowering.cpp @@ -24582,7 +24582,8 @@ /// incremented or decremented. If incrementing or decrementing would result in /// unsigned overflow or underflow or this is not a simple vector constant, /// return an empty value. -static SDValue incDecVectorConstant(SDValue V, SelectionDAG &DAG, bool IsInc) { +static SDValue incDecVectorConstant(SDValue V, SelectionDAG &DAG, bool IsInc, + bool NSW) { auto *BV = dyn_cast(V.getNode()); if (!BV) return SDValue(); @@ -24601,6 +24602,9 @@ const APInt &EltC = Elt->getAPIntValue(); if ((IsInc && EltC.isMaxValue()) || (!IsInc && EltC.isZero())) return SDValue(); + if (NSW && ((IsInc && EltC.isMaxSignedValue()) || + (!IsInc && EltC.isMinSignedValue()))) + return SDValue(); NewVecC.push_back(DAG.getConstant(EltC + (IsInc ? 1 : -1), DL, EltVT)); } @@ -24634,7 +24638,8 @@ // Only do this pre-AVX since vpcmp* is no longer destructive. if (Subtarget.hasAVX()) return SDValue(); - SDValue ULEOp1 = incDecVectorConstant(Op1, DAG, /*IsInc*/false); + SDValue ULEOp1 = + incDecVectorConstant(Op1, DAG, /*IsInc*/ false, /*NSW*/ false); if (!ULEOp1) return SDValue(); Op1 = ULEOp1; @@ -24645,7 +24650,8 @@ // This is beneficial because materializing a constant 0 for the PCMPEQ is // probably cheaper than XOR+PCMPGT using 2 different vector constants: // cmpgt (xor X, SignMaskC) CmpC --> cmpeq (usubsat (CmpC+1), X), 0 - SDValue UGEOp1 = incDecVectorConstant(Op1, DAG, /*IsInc*/true); + SDValue UGEOp1 = + incDecVectorConstant(Op1, DAG, /*IsInc*/ true, /*NSW*/ false); if (!UGEOp1) return SDValue(); Op1 = Op0; @@ -24938,14 +24944,16 @@ // condition to avoid an invert. if (Cond == ISD::SETUGT) { // X > C --> X >= (C+1) --> X == umax(X, C+1) - if (SDValue UGTOp1 = incDecVectorConstant(Op1, DAG, /*IsInc*/true)) { + if (SDValue UGTOp1 = + incDecVectorConstant(Op1, DAG, /*IsInc*/ true, /*NSW*/ false)) { Op1 = UGTOp1; Cond = ISD::SETUGE; } } if (Cond == ISD::SETULT) { // X < C --> X <= (C-1) --> X == umin(X, C-1) - if (SDValue ULTOp1 = incDecVectorConstant(Op1, DAG, /*IsInc*/false)) { + if (SDValue ULTOp1 = + incDecVectorConstant(Op1, DAG, /*IsInc*/ false, /*NSW*/ false)) { Op1 = ULTOp1; Cond = ISD::SETULE; } @@ -53265,6 +53273,25 @@ return SDValue(); } +/// If we have AVX512, but not BWI and this is a vXi16/vXi8 setcc, just +/// pre-promote its result type since vXi1 vectors don't get promoted +/// during type legalization. +/// NOTE: The element count check is to ignore operand types that need to +/// go through type promotion to a 128-bit vector. +static SDValue truncateAVX512SetCCNoBWI(EVT VT, EVT OpVT, SDValue LHS, + SDValue RHS, ISD::CondCode CC, SDLoc DL, + SelectionDAG &DAG, + const X86Subtarget &Subtarget) { + if (Subtarget.hasAVX512() && !Subtarget.hasBWI() && VT.isVector() && + VT.getVectorElementType() == MVT::i1 && + (OpVT.getVectorElementType() == MVT::i8 || + OpVT.getVectorElementType() == MVT::i16)) { + SDValue Setcc = DAG.getSetCC(DL, OpVT, LHS, RHS, CC); + return DAG.getNode(ISD::TRUNCATE, DL, VT, Setcc); + } + return SDValue(); +} + static SDValue combineSetCC(SDNode *N, SelectionDAG &DAG, TargetLowering::DAGCombinerInfo &DCI, const X86Subtarget &Subtarget) { @@ -53377,19 +53404,81 @@ } } - // If we have AVX512, but not BWI and this is a vXi16/vXi8 setcc, just - // pre-promote its result type since vXi1 vectors don't get promoted - // during type legalization. - // NOTE: The element count check is to ignore operand types that need to - // go through type promotion to a 128-bit vector. - if (Subtarget.hasAVX512() && !Subtarget.hasBWI() && VT.isVector() && - VT.getVectorElementType() == MVT::i1 && - (OpVT.getVectorElementType() == MVT::i8 || - OpVT.getVectorElementType() == MVT::i16)) { - SDValue Setcc = DAG.getSetCC(DL, OpVT, LHS, RHS, CC); - return DAG.getNode(ISD::TRUNCATE, DL, VT, Setcc); + // Try and make unsigned vector comparison signed. On pre AVX512 targets there + // only are unsigned comparisons (`PCMPGT`) and on AVX512 its often better to + // use `PCMPGT` if the result is mean to stay in a vector (and if its going to + // a mask, there are signed AVX512 comparisons). + if (VT.isVector() && OpVT.isVector()) { + if (OpVT.getVectorElementType().isScalarInteger()) { + bool CanMakeSigned = false; + if (ISD::isUnsignedIntSetCC(CC)) { + KnownBits CmpKnown = KnownBits::commonBits(DAG.computeKnownBits(LHS), + DAG.computeKnownBits(RHS)); + // If we know LHS/RHS share the same sign bit at each element we can + // make this signed. + // NOTE: `computeKnownBits` on a vector type aggregates common bits + // across all lanes. So a pattern where the sign varies from lane to + // lane, but at each lane Sign(LHS) is known to equal Sign(RHS), will be + // missed. We could get around this by demanding each lane + // independently, but this isn't the most important optimization and + // that may eat into compile time. + CanMakeSigned = + CmpKnown.Zero.isSignBitSet() || CmpKnown.One.isSignBitSet(); + } + if (CanMakeSigned || ISD::isSignedIntSetCC(CC)) { + SDValue LHSOut = LHS; + SDValue RHSOut = RHS; + ISD::CondCode NewCC = CC; + switch (CC) { + case ISD::SETGE: + case ISD::SETUGE: + if (SDValue NewLHS = incDecVectorConstant(LHS, DAG, /*IsInc*/ true, + /*NSW*/ true)) + LHSOut = NewLHS; + else if (SDValue NewRHS = incDecVectorConstant( + RHS, DAG, /*IsInc*/ false, /*NSW*/ true)) + RHSOut = NewRHS; + else + break; + + [[fallthrough]]; + case ISD::SETUGT: + NewCC = ISD::SETGT; + break; + + case ISD::SETLE: + case ISD::SETULE: + if (SDValue NewLHS = incDecVectorConstant(LHS, DAG, /*IsInc*/ false, + /*NSW*/ true)) + LHSOut = NewLHS; + else if (SDValue NewRHS = incDecVectorConstant( + RHS, DAG, /*IsInc*/ true, /*NSW*/ true)) + RHSOut = NewRHS; + else + break; + + [[fallthrough]]; + case ISD::SETULT: + // Will be swapped to SETGT in LowerVSETCC*. + NewCC = ISD::SETLT; + break; + default: + break; + } + if (NewCC != CC) { + if (SDValue R = truncateAVX512SetCCNoBWI(VT, OpVT, LHSOut, RHSOut, + NewCC, DL, DAG, Subtarget)) + return R; + return DAG.getSetCC(DL, VT, LHSOut, RHSOut, NewCC); + } + } + } } + if (SDValue R = + truncateAVX512SetCCNoBWI(VT, OpVT, LHS, RHS, CC, DL, DAG, Subtarget)) + return R; + // For an SSE1-only target, lower a comparison of v4f32 to X86ISD::CMPP early // to avoid scalarization via legalization because v4i32 is not a legal type. if (Subtarget.hasSSE1() && !Subtarget.hasSSE2() && VT == MVT::v4i32 && diff --git a/llvm/test/CodeGen/X86/urem-seteq-illegal-types.ll b/llvm/test/CodeGen/X86/urem-seteq-illegal-types.ll --- a/llvm/test/CodeGen/X86/urem-seteq-illegal-types.ll +++ b/llvm/test/CodeGen/X86/urem-seteq-illegal-types.ll @@ -246,7 +246,7 @@ ; AVX512VL-NEXT: vpand %xmm2, %xmm0, %xmm0 ; AVX512VL-NEXT: vpsrlvd {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; AVX512VL-NEXT: vpternlogd $200, %xmm1, %xmm2, %xmm0 -; AVX512VL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 +; AVX512VL-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 ; AVX512VL-NEXT: kshiftrw $1, %k0, %k1 ; AVX512VL-NEXT: kmovw %k1, %edx ; AVX512VL-NEXT: kshiftrw $2, %k0, %k1 diff --git a/llvm/test/CodeGen/X86/vector-compare-simplify.ll b/llvm/test/CodeGen/X86/vector-compare-simplify.ll --- a/llvm/test/CodeGen/X86/vector-compare-simplify.ll +++ b/llvm/test/CodeGen/X86/vector-compare-simplify.ll @@ -36,9 +36,7 @@ define <4 x i32> @sle_min(<4 x i32> %x) { ; CHECK-LABEL: sle_min: ; CHECK: # %bb.0: -; CHECK-NEXT: pcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0 -; CHECK-NEXT: pcmpeqd %xmm1, %xmm1 -; CHECK-NEXT: pxor %xmm1, %xmm0 +; CHECK-NEXT: pcmpeqd {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0 ; CHECK-NEXT: retq %cmp = icmp sle <4 x i32> %x, %r = sext <4 x i1> %cmp to <4 x i32> @@ -80,10 +78,7 @@ define <4 x i32> @sge_max(<4 x i32> %x) { ; CHECK-LABEL: sge_max: ; CHECK: # %bb.0: -; CHECK-NEXT: movdqa {{.*#+}} xmm1 = [2147483647,2147483647,2147483647,2147483647] -; CHECK-NEXT: pcmpgtd %xmm0, %xmm1 -; CHECK-NEXT: pcmpeqd %xmm0, %xmm0 -; CHECK-NEXT: pxor %xmm1, %xmm0 +; CHECK-NEXT: pcmpeqd {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0 ; CHECK-NEXT: retq %cmp = icmp sge <4 x i32> %x, %r = sext <4 x i1> %cmp to <4 x i32> @@ -197,10 +192,7 @@ define <4 x i32> @sge_min_plus1(<4 x i32> %x) { ; CHECK-LABEL: sge_min_plus1: ; CHECK: # %bb.0: -; CHECK-NEXT: movdqa {{.*#+}} xmm1 = [2147483649,2147483649,2147483649,2147483649] -; CHECK-NEXT: pcmpgtd %xmm0, %xmm1 -; CHECK-NEXT: pcmpeqd %xmm0, %xmm0 -; CHECK-NEXT: pxor %xmm1, %xmm0 +; CHECK-NEXT: pcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0 ; CHECK-NEXT: retq %cmp = icmp sge <4 x i32> %x, %r = sext <4 x i1> %cmp to <4 x i32> @@ -220,9 +212,9 @@ define <4 x i32> @sle_max_minus1(<4 x i32> %x) { ; CHECK-LABEL: sle_max_minus1: ; CHECK: # %bb.0: -; CHECK-NEXT: pcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0 -; CHECK-NEXT: pcmpeqd %xmm1, %xmm1 -; CHECK-NEXT: pxor %xmm1, %xmm0 +; CHECK-NEXT: movdqa {{.*#+}} xmm1 = [2147483647,2147483647,2147483647,2147483647] +; CHECK-NEXT: pcmpgtd %xmm0, %xmm1 +; CHECK-NEXT: movdqa %xmm1, %xmm0 ; CHECK-NEXT: retq %cmp = icmp sle <4 x i32> %x, %r = sext <4 x i1> %cmp to <4 x i32> diff --git a/llvm/test/CodeGen/X86/vector-popcnt-128-ult-ugt.ll b/llvm/test/CodeGen/X86/vector-popcnt-128-ult-ugt.ll --- a/llvm/test/CodeGen/X86/vector-popcnt-128-ult-ugt.ll +++ b/llvm/test/CodeGen/X86/vector-popcnt-128-ult-ugt.ll @@ -76,8 +76,7 @@ ; BITALG-LABEL: ugt_1_v16i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2b %k0, %xmm0 +; BITALG-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <16 x i8> @llvm.ctpop.v16i8(<16 x i8> %0) %3 = icmp ugt <16 x i8> %2, @@ -116,8 +115,8 @@ ; BITALG-LABEL: ult_2_v16i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2b %k0, %xmm0 +; BITALG-NEXT: vmovdqa {{.*#+}} xmm1 = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2] +; BITALG-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <16 x i8> @llvm.ctpop.v16i8(<16 x i8> %0) %3 = icmp ult <16 x i8> %2, @@ -166,36 +165,34 @@ ; ; SSSE3-LABEL: ugt_2_v16i8: ; SSSE3: # %bb.0: -; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; SSSE3-NEXT: movdqa %xmm0, %xmm2 -; SSSE3-NEXT: pand %xmm1, %xmm2 -; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; SSSE3-NEXT: movdqa %xmm3, %xmm4 -; SSSE3-NEXT: pshufb %xmm2, %xmm4 +; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; SSSE3-NEXT: movdqa %xmm0, %xmm3 +; SSSE3-NEXT: pand %xmm2, %xmm3 +; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; SSSE3-NEXT: movdqa %xmm1, %xmm4 +; SSSE3-NEXT: pshufb %xmm3, %xmm4 ; SSSE3-NEXT: psrlw $4, %xmm0 -; SSSE3-NEXT: pand %xmm1, %xmm0 -; SSSE3-NEXT: pshufb %xmm0, %xmm3 -; SSSE3-NEXT: paddb %xmm4, %xmm3 -; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3] -; SSSE3-NEXT: pmaxub %xmm3, %xmm0 -; SSSE3-NEXT: pcmpeqb %xmm3, %xmm0 +; SSSE3-NEXT: pand %xmm2, %xmm0 +; SSSE3-NEXT: pshufb %xmm0, %xmm1 +; SSSE3-NEXT: paddb %xmm4, %xmm1 +; SSSE3-NEXT: pcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm1 +; SSSE3-NEXT: movdqa %xmm1, %xmm0 ; SSSE3-NEXT: retq ; ; SSE41-LABEL: ugt_2_v16i8: ; SSE41: # %bb.0: -; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; SSE41-NEXT: movdqa %xmm0, %xmm2 -; SSE41-NEXT: pand %xmm1, %xmm2 -; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; SSE41-NEXT: movdqa %xmm3, %xmm4 -; SSE41-NEXT: pshufb %xmm2, %xmm4 +; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; SSE41-NEXT: movdqa %xmm0, %xmm3 +; SSE41-NEXT: pand %xmm2, %xmm3 +; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; SSE41-NEXT: movdqa %xmm1, %xmm4 +; SSE41-NEXT: pshufb %xmm3, %xmm4 ; SSE41-NEXT: psrlw $4, %xmm0 -; SSE41-NEXT: pand %xmm1, %xmm0 -; SSE41-NEXT: pshufb %xmm0, %xmm3 -; SSE41-NEXT: paddb %xmm4, %xmm3 -; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3] -; SSE41-NEXT: pmaxub %xmm3, %xmm0 -; SSE41-NEXT: pcmpeqb %xmm3, %xmm0 +; SSE41-NEXT: pand %xmm2, %xmm0 +; SSE41-NEXT: pshufb %xmm0, %xmm1 +; SSE41-NEXT: paddb %xmm4, %xmm1 +; SSE41-NEXT: pcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm1 +; SSE41-NEXT: movdqa %xmm1, %xmm0 ; SSE41-NEXT: retq ; ; AVX1-LABEL: ugt_2_v16i8: @@ -208,8 +205,7 @@ ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0 -; AVX1-NEXT: vpmaxub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm1 -; AVX1-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0 +; AVX1-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; AVX1-NEXT: retq ; ; AVX2-LABEL: ugt_2_v16i8: @@ -222,8 +218,7 @@ ; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0 -; AVX2-NEXT: vpmaxub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm1 -; AVX2-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0 +; AVX2-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; AVX2-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_2_v16i8: @@ -255,8 +250,7 @@ ; BITALG-LABEL: ugt_2_v16i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2b %k0, %xmm0 +; BITALG-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <16 x i8> @llvm.ctpop.v16i8(<16 x i8> %0) %3 = icmp ugt <16 x i8> %2, @@ -317,9 +311,8 @@ ; SSSE3-NEXT: pand %xmm1, %xmm0 ; SSSE3-NEXT: pshufb %xmm0, %xmm3 ; SSSE3-NEXT: paddb %xmm4, %xmm3 -; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2] -; SSSE3-NEXT: pminub %xmm3, %xmm0 -; SSSE3-NEXT: pcmpeqb %xmm3, %xmm0 +; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3] +; SSSE3-NEXT: pcmpgtb %xmm3, %xmm0 ; SSSE3-NEXT: retq ; ; SSE41-LABEL: ult_3_v16i8: @@ -334,9 +327,8 @@ ; SSE41-NEXT: pand %xmm1, %xmm0 ; SSE41-NEXT: pshufb %xmm0, %xmm3 ; SSE41-NEXT: paddb %xmm4, %xmm3 -; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2] -; SSE41-NEXT: pminub %xmm3, %xmm0 -; SSE41-NEXT: pcmpeqb %xmm3, %xmm0 +; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3] +; SSE41-NEXT: pcmpgtb %xmm3, %xmm0 ; SSE41-NEXT: retq ; ; AVX1-LABEL: ult_3_v16i8: @@ -349,8 +341,8 @@ ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0 -; AVX1-NEXT: vpminub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm1 -; AVX1-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3] +; AVX1-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0 ; AVX1-NEXT: retq ; ; AVX2-LABEL: ult_3_v16i8: @@ -363,8 +355,8 @@ ; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0 -; AVX2-NEXT: vpminub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm1 -; AVX2-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0 +; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3] +; AVX2-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0 ; AVX2-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_3_v16i8: @@ -399,8 +391,8 @@ ; BITALG-LABEL: ult_3_v16i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2b %k0, %xmm0 +; BITALG-NEXT: vmovdqa {{.*#+}} xmm1 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3] +; BITALG-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <16 x i8> @llvm.ctpop.v16i8(<16 x i8> %0) %3 = icmp ult <16 x i8> %2, @@ -449,36 +441,34 @@ ; ; SSSE3-LABEL: ugt_3_v16i8: ; SSSE3: # %bb.0: -; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; SSSE3-NEXT: movdqa %xmm0, %xmm2 -; SSSE3-NEXT: pand %xmm1, %xmm2 -; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; SSSE3-NEXT: movdqa %xmm3, %xmm4 -; SSSE3-NEXT: pshufb %xmm2, %xmm4 +; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; SSSE3-NEXT: movdqa %xmm0, %xmm3 +; SSSE3-NEXT: pand %xmm2, %xmm3 +; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; SSSE3-NEXT: movdqa %xmm1, %xmm4 +; SSSE3-NEXT: pshufb %xmm3, %xmm4 ; SSSE3-NEXT: psrlw $4, %xmm0 -; SSSE3-NEXT: pand %xmm1, %xmm0 -; SSSE3-NEXT: pshufb %xmm0, %xmm3 -; SSSE3-NEXT: paddb %xmm4, %xmm3 -; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] -; SSSE3-NEXT: pmaxub %xmm3, %xmm0 -; SSSE3-NEXT: pcmpeqb %xmm3, %xmm0 +; SSSE3-NEXT: pand %xmm2, %xmm0 +; SSSE3-NEXT: pshufb %xmm0, %xmm1 +; SSSE3-NEXT: paddb %xmm4, %xmm1 +; SSSE3-NEXT: pcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm1 +; SSSE3-NEXT: movdqa %xmm1, %xmm0 ; SSSE3-NEXT: retq ; ; SSE41-LABEL: ugt_3_v16i8: ; SSE41: # %bb.0: -; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; SSE41-NEXT: movdqa %xmm0, %xmm2 -; SSE41-NEXT: pand %xmm1, %xmm2 -; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; SSE41-NEXT: movdqa %xmm3, %xmm4 -; SSE41-NEXT: pshufb %xmm2, %xmm4 +; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; SSE41-NEXT: movdqa %xmm0, %xmm3 +; SSE41-NEXT: pand %xmm2, %xmm3 +; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; SSE41-NEXT: movdqa %xmm1, %xmm4 +; SSE41-NEXT: pshufb %xmm3, %xmm4 ; SSE41-NEXT: psrlw $4, %xmm0 -; SSE41-NEXT: pand %xmm1, %xmm0 -; SSE41-NEXT: pshufb %xmm0, %xmm3 -; SSE41-NEXT: paddb %xmm4, %xmm3 -; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] -; SSE41-NEXT: pmaxub %xmm3, %xmm0 -; SSE41-NEXT: pcmpeqb %xmm3, %xmm0 +; SSE41-NEXT: pand %xmm2, %xmm0 +; SSE41-NEXT: pshufb %xmm0, %xmm1 +; SSE41-NEXT: paddb %xmm4, %xmm1 +; SSE41-NEXT: pcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm1 +; SSE41-NEXT: movdqa %xmm1, %xmm0 ; SSE41-NEXT: retq ; ; AVX1-LABEL: ugt_3_v16i8: @@ -491,8 +481,7 @@ ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0 -; AVX1-NEXT: vpmaxub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm1 -; AVX1-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0 +; AVX1-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; AVX1-NEXT: retq ; ; AVX2-LABEL: ugt_3_v16i8: @@ -505,8 +494,7 @@ ; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0 -; AVX2-NEXT: vpmaxub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm1 -; AVX2-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0 +; AVX2-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; AVX2-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_3_v16i8: @@ -538,8 +526,7 @@ ; BITALG-LABEL: ugt_3_v16i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2b %k0, %xmm0 +; BITALG-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <16 x i8> @llvm.ctpop.v16i8(<16 x i8> %0) %3 = icmp ugt <16 x i8> %2, @@ -600,9 +587,8 @@ ; SSSE3-NEXT: pand %xmm1, %xmm0 ; SSSE3-NEXT: pshufb %xmm0, %xmm3 ; SSSE3-NEXT: paddb %xmm4, %xmm3 -; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3] -; SSSE3-NEXT: pminub %xmm3, %xmm0 -; SSSE3-NEXT: pcmpeqb %xmm3, %xmm0 +; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] +; SSSE3-NEXT: pcmpgtb %xmm3, %xmm0 ; SSSE3-NEXT: retq ; ; SSE41-LABEL: ult_4_v16i8: @@ -617,9 +603,8 @@ ; SSE41-NEXT: pand %xmm1, %xmm0 ; SSE41-NEXT: pshufb %xmm0, %xmm3 ; SSE41-NEXT: paddb %xmm4, %xmm3 -; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3] -; SSE41-NEXT: pminub %xmm3, %xmm0 -; SSE41-NEXT: pcmpeqb %xmm3, %xmm0 +; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] +; SSE41-NEXT: pcmpgtb %xmm3, %xmm0 ; SSE41-NEXT: retq ; ; AVX1-LABEL: ult_4_v16i8: @@ -632,8 +617,8 @@ ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0 -; AVX1-NEXT: vpminub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm1 -; AVX1-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] +; AVX1-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0 ; AVX1-NEXT: retq ; ; AVX2-LABEL: ult_4_v16i8: @@ -646,8 +631,8 @@ ; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0 -; AVX2-NEXT: vpminub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm1 -; AVX2-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0 +; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] +; AVX2-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0 ; AVX2-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_4_v16i8: @@ -682,8 +667,8 @@ ; BITALG-LABEL: ult_4_v16i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2b %k0, %xmm0 +; BITALG-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] +; BITALG-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <16 x i8> @llvm.ctpop.v16i8(<16 x i8> %0) %3 = icmp ult <16 x i8> %2, @@ -732,36 +717,34 @@ ; ; SSSE3-LABEL: ugt_4_v16i8: ; SSSE3: # %bb.0: -; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; SSSE3-NEXT: movdqa %xmm0, %xmm2 -; SSSE3-NEXT: pand %xmm1, %xmm2 -; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; SSSE3-NEXT: movdqa %xmm3, %xmm4 -; SSSE3-NEXT: pshufb %xmm2, %xmm4 +; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; SSSE3-NEXT: movdqa %xmm0, %xmm3 +; SSSE3-NEXT: pand %xmm2, %xmm3 +; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; SSSE3-NEXT: movdqa %xmm1, %xmm4 +; SSSE3-NEXT: pshufb %xmm3, %xmm4 ; SSSE3-NEXT: psrlw $4, %xmm0 -; SSSE3-NEXT: pand %xmm1, %xmm0 -; SSSE3-NEXT: pshufb %xmm0, %xmm3 -; SSSE3-NEXT: paddb %xmm4, %xmm3 -; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] -; SSSE3-NEXT: pmaxub %xmm3, %xmm0 -; SSSE3-NEXT: pcmpeqb %xmm3, %xmm0 +; SSSE3-NEXT: pand %xmm2, %xmm0 +; SSSE3-NEXT: pshufb %xmm0, %xmm1 +; SSSE3-NEXT: paddb %xmm4, %xmm1 +; SSSE3-NEXT: pcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm1 +; SSSE3-NEXT: movdqa %xmm1, %xmm0 ; SSSE3-NEXT: retq ; ; SSE41-LABEL: ugt_4_v16i8: ; SSE41: # %bb.0: -; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; SSE41-NEXT: movdqa %xmm0, %xmm2 -; SSE41-NEXT: pand %xmm1, %xmm2 -; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; SSE41-NEXT: movdqa %xmm3, %xmm4 -; SSE41-NEXT: pshufb %xmm2, %xmm4 +; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; SSE41-NEXT: movdqa %xmm0, %xmm3 +; SSE41-NEXT: pand %xmm2, %xmm3 +; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; SSE41-NEXT: movdqa %xmm1, %xmm4 +; SSE41-NEXT: pshufb %xmm3, %xmm4 ; SSE41-NEXT: psrlw $4, %xmm0 -; SSE41-NEXT: pand %xmm1, %xmm0 -; SSE41-NEXT: pshufb %xmm0, %xmm3 -; SSE41-NEXT: paddb %xmm4, %xmm3 -; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] -; SSE41-NEXT: pmaxub %xmm3, %xmm0 -; SSE41-NEXT: pcmpeqb %xmm3, %xmm0 +; SSE41-NEXT: pand %xmm2, %xmm0 +; SSE41-NEXT: pshufb %xmm0, %xmm1 +; SSE41-NEXT: paddb %xmm4, %xmm1 +; SSE41-NEXT: pcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm1 +; SSE41-NEXT: movdqa %xmm1, %xmm0 ; SSE41-NEXT: retq ; ; AVX1-LABEL: ugt_4_v16i8: @@ -774,8 +757,7 @@ ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0 -; AVX1-NEXT: vpmaxub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm1 -; AVX1-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0 +; AVX1-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; AVX1-NEXT: retq ; ; AVX2-LABEL: ugt_4_v16i8: @@ -788,8 +770,7 @@ ; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0 -; AVX2-NEXT: vpmaxub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm1 -; AVX2-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0 +; AVX2-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; AVX2-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_4_v16i8: @@ -821,8 +802,7 @@ ; BITALG-LABEL: ugt_4_v16i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2b %k0, %xmm0 +; BITALG-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <16 x i8> @llvm.ctpop.v16i8(<16 x i8> %0) %3 = icmp ugt <16 x i8> %2, @@ -883,9 +863,8 @@ ; SSSE3-NEXT: pand %xmm1, %xmm0 ; SSSE3-NEXT: pshufb %xmm0, %xmm3 ; SSSE3-NEXT: paddb %xmm4, %xmm3 -; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] -; SSSE3-NEXT: pminub %xmm3, %xmm0 -; SSSE3-NEXT: pcmpeqb %xmm3, %xmm0 +; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] +; SSSE3-NEXT: pcmpgtb %xmm3, %xmm0 ; SSSE3-NEXT: retq ; ; SSE41-LABEL: ult_5_v16i8: @@ -900,9 +879,8 @@ ; SSE41-NEXT: pand %xmm1, %xmm0 ; SSE41-NEXT: pshufb %xmm0, %xmm3 ; SSE41-NEXT: paddb %xmm4, %xmm3 -; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] -; SSE41-NEXT: pminub %xmm3, %xmm0 -; SSE41-NEXT: pcmpeqb %xmm3, %xmm0 +; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] +; SSE41-NEXT: pcmpgtb %xmm3, %xmm0 ; SSE41-NEXT: retq ; ; AVX1-LABEL: ult_5_v16i8: @@ -915,8 +893,8 @@ ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0 -; AVX1-NEXT: vpminub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm1 -; AVX1-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] +; AVX1-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0 ; AVX1-NEXT: retq ; ; AVX2-LABEL: ult_5_v16i8: @@ -929,8 +907,8 @@ ; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0 -; AVX2-NEXT: vpminub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm1 -; AVX2-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0 +; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] +; AVX2-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0 ; AVX2-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_5_v16i8: @@ -965,8 +943,8 @@ ; BITALG-LABEL: ult_5_v16i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2b %k0, %xmm0 +; BITALG-NEXT: vmovdqa {{.*#+}} xmm1 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] +; BITALG-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <16 x i8> @llvm.ctpop.v16i8(<16 x i8> %0) %3 = icmp ult <16 x i8> %2, @@ -1015,36 +993,34 @@ ; ; SSSE3-LABEL: ugt_5_v16i8: ; SSSE3: # %bb.0: -; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; SSSE3-NEXT: movdqa %xmm0, %xmm2 -; SSSE3-NEXT: pand %xmm1, %xmm2 -; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; SSSE3-NEXT: movdqa %xmm3, %xmm4 -; SSSE3-NEXT: pshufb %xmm2, %xmm4 +; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; SSSE3-NEXT: movdqa %xmm0, %xmm3 +; SSSE3-NEXT: pand %xmm2, %xmm3 +; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; SSSE3-NEXT: movdqa %xmm1, %xmm4 +; SSSE3-NEXT: pshufb %xmm3, %xmm4 ; SSSE3-NEXT: psrlw $4, %xmm0 -; SSSE3-NEXT: pand %xmm1, %xmm0 -; SSSE3-NEXT: pshufb %xmm0, %xmm3 -; SSSE3-NEXT: paddb %xmm4, %xmm3 -; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6] -; SSSE3-NEXT: pmaxub %xmm3, %xmm0 -; SSSE3-NEXT: pcmpeqb %xmm3, %xmm0 +; SSSE3-NEXT: pand %xmm2, %xmm0 +; SSSE3-NEXT: pshufb %xmm0, %xmm1 +; SSSE3-NEXT: paddb %xmm4, %xmm1 +; SSSE3-NEXT: pcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm1 +; SSSE3-NEXT: movdqa %xmm1, %xmm0 ; SSSE3-NEXT: retq ; ; SSE41-LABEL: ugt_5_v16i8: ; SSE41: # %bb.0: -; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; SSE41-NEXT: movdqa %xmm0, %xmm2 -; SSE41-NEXT: pand %xmm1, %xmm2 -; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; SSE41-NEXT: movdqa %xmm3, %xmm4 -; SSE41-NEXT: pshufb %xmm2, %xmm4 +; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; SSE41-NEXT: movdqa %xmm0, %xmm3 +; SSE41-NEXT: pand %xmm2, %xmm3 +; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; SSE41-NEXT: movdqa %xmm1, %xmm4 +; SSE41-NEXT: pshufb %xmm3, %xmm4 ; SSE41-NEXT: psrlw $4, %xmm0 -; SSE41-NEXT: pand %xmm1, %xmm0 -; SSE41-NEXT: pshufb %xmm0, %xmm3 -; SSE41-NEXT: paddb %xmm4, %xmm3 -; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6] -; SSE41-NEXT: pmaxub %xmm3, %xmm0 -; SSE41-NEXT: pcmpeqb %xmm3, %xmm0 +; SSE41-NEXT: pand %xmm2, %xmm0 +; SSE41-NEXT: pshufb %xmm0, %xmm1 +; SSE41-NEXT: paddb %xmm4, %xmm1 +; SSE41-NEXT: pcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm1 +; SSE41-NEXT: movdqa %xmm1, %xmm0 ; SSE41-NEXT: retq ; ; AVX1-LABEL: ugt_5_v16i8: @@ -1057,8 +1033,7 @@ ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0 -; AVX1-NEXT: vpmaxub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm1 -; AVX1-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0 +; AVX1-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; AVX1-NEXT: retq ; ; AVX2-LABEL: ugt_5_v16i8: @@ -1071,8 +1046,7 @@ ; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0 -; AVX2-NEXT: vpmaxub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm1 -; AVX2-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0 +; AVX2-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; AVX2-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_5_v16i8: @@ -1104,8 +1078,7 @@ ; BITALG-LABEL: ugt_5_v16i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2b %k0, %xmm0 +; BITALG-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <16 x i8> @llvm.ctpop.v16i8(<16 x i8> %0) %3 = icmp ugt <16 x i8> %2, @@ -1166,9 +1139,8 @@ ; SSSE3-NEXT: pand %xmm1, %xmm0 ; SSSE3-NEXT: pshufb %xmm0, %xmm3 ; SSSE3-NEXT: paddb %xmm4, %xmm3 -; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] -; SSSE3-NEXT: pminub %xmm3, %xmm0 -; SSSE3-NEXT: pcmpeqb %xmm3, %xmm0 +; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6] +; SSSE3-NEXT: pcmpgtb %xmm3, %xmm0 ; SSSE3-NEXT: retq ; ; SSE41-LABEL: ult_6_v16i8: @@ -1183,9 +1155,8 @@ ; SSE41-NEXT: pand %xmm1, %xmm0 ; SSE41-NEXT: pshufb %xmm0, %xmm3 ; SSE41-NEXT: paddb %xmm4, %xmm3 -; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] -; SSE41-NEXT: pminub %xmm3, %xmm0 -; SSE41-NEXT: pcmpeqb %xmm3, %xmm0 +; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6] +; SSE41-NEXT: pcmpgtb %xmm3, %xmm0 ; SSE41-NEXT: retq ; ; AVX1-LABEL: ult_6_v16i8: @@ -1198,8 +1169,8 @@ ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0 -; AVX1-NEXT: vpminub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm1 -; AVX1-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6] +; AVX1-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0 ; AVX1-NEXT: retq ; ; AVX2-LABEL: ult_6_v16i8: @@ -1212,8 +1183,8 @@ ; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0 -; AVX2-NEXT: vpminub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm1 -; AVX2-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0 +; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6] +; AVX2-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0 ; AVX2-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_6_v16i8: @@ -1248,8 +1219,8 @@ ; BITALG-LABEL: ult_6_v16i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2b %k0, %xmm0 +; BITALG-NEXT: vmovdqa {{.*#+}} xmm1 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6] +; BITALG-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <16 x i8> @llvm.ctpop.v16i8(<16 x i8> %0) %3 = icmp ult <16 x i8> %2, @@ -1298,36 +1269,34 @@ ; ; SSSE3-LABEL: ugt_6_v16i8: ; SSSE3: # %bb.0: -; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; SSSE3-NEXT: movdqa %xmm0, %xmm2 -; SSSE3-NEXT: pand %xmm1, %xmm2 -; SSSE3-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; SSSE3-NEXT: movdqa %xmm3, %xmm4 -; SSSE3-NEXT: pshufb %xmm2, %xmm4 +; SSSE3-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; SSSE3-NEXT: movdqa %xmm0, %xmm3 +; SSSE3-NEXT: pand %xmm2, %xmm3 +; SSSE3-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; SSSE3-NEXT: movdqa %xmm1, %xmm4 +; SSSE3-NEXT: pshufb %xmm3, %xmm4 ; SSSE3-NEXT: psrlw $4, %xmm0 -; SSSE3-NEXT: pand %xmm1, %xmm0 -; SSSE3-NEXT: pshufb %xmm0, %xmm3 -; SSSE3-NEXT: paddb %xmm4, %xmm3 -; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7] -; SSSE3-NEXT: pmaxub %xmm3, %xmm0 -; SSSE3-NEXT: pcmpeqb %xmm3, %xmm0 +; SSSE3-NEXT: pand %xmm2, %xmm0 +; SSSE3-NEXT: pshufb %xmm0, %xmm1 +; SSSE3-NEXT: paddb %xmm4, %xmm1 +; SSSE3-NEXT: pcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm1 +; SSSE3-NEXT: movdqa %xmm1, %xmm0 ; SSSE3-NEXT: retq ; ; SSE41-LABEL: ugt_6_v16i8: ; SSE41: # %bb.0: -; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] -; SSE41-NEXT: movdqa %xmm0, %xmm2 -; SSE41-NEXT: pand %xmm1, %xmm2 -; SSE41-NEXT: movdqa {{.*#+}} xmm3 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] -; SSE41-NEXT: movdqa %xmm3, %xmm4 -; SSE41-NEXT: pshufb %xmm2, %xmm4 +; SSE41-NEXT: movdqa {{.*#+}} xmm2 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; SSE41-NEXT: movdqa %xmm0, %xmm3 +; SSE41-NEXT: pand %xmm2, %xmm3 +; SSE41-NEXT: movdqa {{.*#+}} xmm1 = [0,1,1,2,1,2,2,3,1,2,2,3,2,3,3,4] +; SSE41-NEXT: movdqa %xmm1, %xmm4 +; SSE41-NEXT: pshufb %xmm3, %xmm4 ; SSE41-NEXT: psrlw $4, %xmm0 -; SSE41-NEXT: pand %xmm1, %xmm0 -; SSE41-NEXT: pshufb %xmm0, %xmm3 -; SSE41-NEXT: paddb %xmm4, %xmm3 -; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7] -; SSE41-NEXT: pmaxub %xmm3, %xmm0 -; SSE41-NEXT: pcmpeqb %xmm3, %xmm0 +; SSE41-NEXT: pand %xmm2, %xmm0 +; SSE41-NEXT: pshufb %xmm0, %xmm1 +; SSE41-NEXT: paddb %xmm4, %xmm1 +; SSE41-NEXT: pcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm1 +; SSE41-NEXT: movdqa %xmm1, %xmm0 ; SSE41-NEXT: retq ; ; AVX1-LABEL: ugt_6_v16i8: @@ -1340,8 +1309,7 @@ ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0 -; AVX1-NEXT: vpmaxub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm1 -; AVX1-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0 +; AVX1-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; AVX1-NEXT: retq ; ; AVX2-LABEL: ugt_6_v16i8: @@ -1354,8 +1322,7 @@ ; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0 -; AVX2-NEXT: vpmaxub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm1 -; AVX2-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0 +; AVX2-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; AVX2-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_6_v16i8: @@ -1387,8 +1354,7 @@ ; BITALG-LABEL: ugt_6_v16i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2b %k0, %xmm0 +; BITALG-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <16 x i8> @llvm.ctpop.v16i8(<16 x i8> %0) %3 = icmp ugt <16 x i8> %2, @@ -1449,9 +1415,8 @@ ; SSSE3-NEXT: pand %xmm1, %xmm0 ; SSSE3-NEXT: pshufb %xmm0, %xmm3 ; SSSE3-NEXT: paddb %xmm4, %xmm3 -; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6] -; SSSE3-NEXT: pminub %xmm3, %xmm0 -; SSSE3-NEXT: pcmpeqb %xmm3, %xmm0 +; SSSE3-NEXT: movdqa {{.*#+}} xmm0 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7] +; SSSE3-NEXT: pcmpgtb %xmm3, %xmm0 ; SSSE3-NEXT: retq ; ; SSE41-LABEL: ult_7_v16i8: @@ -1466,9 +1431,8 @@ ; SSE41-NEXT: pand %xmm1, %xmm0 ; SSE41-NEXT: pshufb %xmm0, %xmm3 ; SSE41-NEXT: paddb %xmm4, %xmm3 -; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6] -; SSE41-NEXT: pminub %xmm3, %xmm0 -; SSE41-NEXT: pcmpeqb %xmm3, %xmm0 +; SSE41-NEXT: movdqa {{.*#+}} xmm0 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7] +; SSE41-NEXT: pcmpgtb %xmm3, %xmm0 ; SSE41-NEXT: retq ; ; AVX1-LABEL: ult_7_v16i8: @@ -1481,8 +1445,8 @@ ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX1-NEXT: vpaddb %xmm2, %xmm0, %xmm0 -; AVX1-NEXT: vpminub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm1 -; AVX1-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7] +; AVX1-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0 ; AVX1-NEXT: retq ; ; AVX2-LABEL: ult_7_v16i8: @@ -1495,8 +1459,8 @@ ; AVX2-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX2-NEXT: vpaddb %xmm2, %xmm0, %xmm0 -; AVX2-NEXT: vpminub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm1 -; AVX2-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0 +; AVX2-NEXT: vmovdqa {{.*#+}} xmm1 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7] +; AVX2-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0 ; AVX2-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_7_v16i8: @@ -1531,8 +1495,8 @@ ; BITALG-LABEL: ult_7_v16i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2b %k0, %xmm0 +; BITALG-NEXT: vmovdqa {{.*#+}} xmm1 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7] +; BITALG-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <16 x i8> @llvm.ctpop.v16i8(<16 x i8> %0) %3 = icmp ult <16 x i8> %2, @@ -1605,8 +1569,7 @@ ; BITALG-LABEL: ugt_1_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ugt <8 x i16> %2, @@ -1645,8 +1608,8 @@ ; BITALG-LABEL: ult_2_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vmovdqa {{.*#+}} xmm1 = [2,2,2,2,2,2,2,2] +; BITALG-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ult <8 x i16> %2, @@ -1800,8 +1763,7 @@ ; BITALG-LABEL: ugt_2_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ugt <8 x i16> %2, @@ -1964,8 +1926,8 @@ ; BITALG-LABEL: ult_3_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vmovdqa {{.*#+}} xmm1 = [3,3,3,3,3,3,3,3] +; BITALG-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ult <8 x i16> %2, @@ -2119,8 +2081,7 @@ ; BITALG-LABEL: ugt_3_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ugt <8 x i16> %2, @@ -2283,8 +2244,8 @@ ; BITALG-LABEL: ult_4_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4,4,4,4,4,4,4] +; BITALG-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ult <8 x i16> %2, @@ -2438,8 +2399,7 @@ ; BITALG-LABEL: ugt_4_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ugt <8 x i16> %2, @@ -2602,8 +2562,8 @@ ; BITALG-LABEL: ult_5_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vmovdqa {{.*#+}} xmm1 = [5,5,5,5,5,5,5,5] +; BITALG-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ult <8 x i16> %2, @@ -2757,8 +2717,7 @@ ; BITALG-LABEL: ugt_5_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ugt <8 x i16> %2, @@ -2921,8 +2880,8 @@ ; BITALG-LABEL: ult_6_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vmovdqa {{.*#+}} xmm1 = [6,6,6,6,6,6,6,6] +; BITALG-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ult <8 x i16> %2, @@ -3076,8 +3035,7 @@ ; BITALG-LABEL: ugt_6_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ugt <8 x i16> %2, @@ -3240,8 +3198,8 @@ ; BITALG-LABEL: ult_7_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vmovdqa {{.*#+}} xmm1 = [7,7,7,7,7,7,7,7] +; BITALG-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ult <8 x i16> %2, @@ -3395,8 +3353,7 @@ ; BITALG-LABEL: ugt_7_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ugt <8 x i16> %2, @@ -3559,8 +3516,8 @@ ; BITALG-LABEL: ult_8_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vmovdqa {{.*#+}} xmm1 = [8,8,8,8,8,8,8,8] +; BITALG-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ult <8 x i16> %2, @@ -3714,8 +3671,7 @@ ; BITALG-LABEL: ugt_8_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ugt <8 x i16> %2, @@ -3878,8 +3834,8 @@ ; BITALG-LABEL: ult_9_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vmovdqa {{.*#+}} xmm1 = [9,9,9,9,9,9,9,9] +; BITALG-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ult <8 x i16> %2, @@ -4033,8 +3989,7 @@ ; BITALG-LABEL: ugt_9_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ugt <8 x i16> %2, @@ -4197,8 +4152,8 @@ ; BITALG-LABEL: ult_10_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vmovdqa {{.*#+}} xmm1 = [10,10,10,10,10,10,10,10] +; BITALG-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ult <8 x i16> %2, @@ -4352,8 +4307,7 @@ ; BITALG-LABEL: ugt_10_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ugt <8 x i16> %2, @@ -4516,8 +4470,8 @@ ; BITALG-LABEL: ult_11_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vmovdqa {{.*#+}} xmm1 = [11,11,11,11,11,11,11,11] +; BITALG-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ult <8 x i16> %2, @@ -4671,8 +4625,7 @@ ; BITALG-LABEL: ugt_11_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ugt <8 x i16> %2, @@ -4835,8 +4788,8 @@ ; BITALG-LABEL: ult_12_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vmovdqa {{.*#+}} xmm1 = [12,12,12,12,12,12,12,12] +; BITALG-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ult <8 x i16> %2, @@ -4990,8 +4943,7 @@ ; BITALG-LABEL: ugt_12_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ugt <8 x i16> %2, @@ -5154,8 +5106,8 @@ ; BITALG-LABEL: ult_13_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vmovdqa {{.*#+}} xmm1 = [13,13,13,13,13,13,13,13] +; BITALG-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ult <8 x i16> %2, @@ -5309,8 +5261,7 @@ ; BITALG-LABEL: ugt_13_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ugt <8 x i16> %2, @@ -5473,8 +5424,8 @@ ; BITALG-LABEL: ult_14_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vmovdqa {{.*#+}} xmm1 = [14,14,14,14,14,14,14,14] +; BITALG-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ult <8 x i16> %2, @@ -5628,8 +5579,7 @@ ; BITALG-LABEL: ugt_14_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ugt <8 x i16> %2, @@ -5792,8 +5742,8 @@ ; BITALG-LABEL: ult_15_v8i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %xmm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %xmm0 +; BITALG-NEXT: vmovdqa {{.*#+}} xmm1 = [15,15,15,15,15,15,15,15] +; BITALG-NEXT: vpcmpgtw %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <8 x i16> @llvm.ctpop.v8i16(<8 x i16> %0) %3 = icmp ult <8 x i16> %2, @@ -5845,9 +5795,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_1_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [1,1,1,1] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_1_v4i32: @@ -5917,9 +5866,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_2_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [2,2,2,2] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_2_v4i32: @@ -6093,9 +6041,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_2_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [2,2,2,2] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_2_v4i32: @@ -6122,9 +6069,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [2,2,2,2] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -6284,9 +6230,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_3_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [3,3,3,3] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_3_v4i32: @@ -6313,9 +6258,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [3,3,3,3] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -6471,9 +6415,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_3_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [3,3,3,3] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_3_v4i32: @@ -6500,9 +6443,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [3,3,3,3] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -6662,9 +6604,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_4_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [4,4,4,4] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_4_v4i32: @@ -6691,9 +6632,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [4,4,4,4] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -6849,9 +6789,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_4_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [4,4,4,4] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_4_v4i32: @@ -6878,9 +6817,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [4,4,4,4] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -7040,9 +6978,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_5_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [5,5,5,5] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_5_v4i32: @@ -7069,9 +7006,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [5,5,5,5] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -7227,9 +7163,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_5_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [5,5,5,5] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_5_v4i32: @@ -7256,9 +7191,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [5,5,5,5] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -7418,9 +7352,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_6_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [6,6,6,6] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_6_v4i32: @@ -7447,9 +7380,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [6,6,6,6] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -7605,9 +7537,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_6_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [6,6,6,6] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_6_v4i32: @@ -7634,9 +7565,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [6,6,6,6] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -7796,9 +7726,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_7_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [7,7,7,7] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_7_v4i32: @@ -7825,9 +7754,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [7,7,7,7] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -7983,9 +7911,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_7_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [7,7,7,7] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_7_v4i32: @@ -8012,9 +7939,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [7,7,7,7] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -8174,9 +8100,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_8_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [8,8,8,8] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_8_v4i32: @@ -8203,9 +8128,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [8,8,8,8] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -8361,9 +8285,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_8_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [8,8,8,8] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_8_v4i32: @@ -8390,9 +8313,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [8,8,8,8] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -8552,9 +8474,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_9_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [9,9,9,9] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_9_v4i32: @@ -8581,9 +8502,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [9,9,9,9] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -8739,9 +8659,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_9_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [9,9,9,9] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_9_v4i32: @@ -8768,9 +8687,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [9,9,9,9] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -8930,9 +8848,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_10_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [10,10,10,10] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_10_v4i32: @@ -8959,9 +8876,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [10,10,10,10] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -9117,9 +9033,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_10_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [10,10,10,10] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_10_v4i32: @@ -9146,9 +9061,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [10,10,10,10] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -9308,9 +9222,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_11_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [11,11,11,11] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_11_v4i32: @@ -9337,9 +9250,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [11,11,11,11] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -9495,9 +9407,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_11_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [11,11,11,11] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_11_v4i32: @@ -9524,9 +9435,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [11,11,11,11] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -9686,9 +9596,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_12_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [12,12,12,12] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_12_v4i32: @@ -9715,9 +9624,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [12,12,12,12] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -9873,9 +9781,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_12_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [12,12,12,12] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_12_v4i32: @@ -9902,9 +9809,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [12,12,12,12] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -10064,9 +9970,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_13_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [13,13,13,13] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_13_v4i32: @@ -10093,9 +9998,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [13,13,13,13] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -10251,9 +10155,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_13_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [13,13,13,13] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_13_v4i32: @@ -10280,9 +10183,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [13,13,13,13] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -10442,9 +10344,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_14_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [14,14,14,14] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_14_v4i32: @@ -10471,9 +10372,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [14,14,14,14] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -10629,9 +10529,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_14_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [14,14,14,14] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_14_v4i32: @@ -10658,9 +10557,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [14,14,14,14] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -10820,9 +10718,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_15_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [15,15,15,15] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_15_v4i32: @@ -10849,9 +10746,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [15,15,15,15] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -11007,9 +10903,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_15_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [15,15,15,15] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_15_v4i32: @@ -11036,9 +10931,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [15,15,15,15] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -11198,9 +11092,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_16_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [16,16,16,16] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_16_v4i32: @@ -11227,9 +11120,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [16,16,16,16] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -11385,9 +11277,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_16_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [16,16,16,16] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_16_v4i32: @@ -11414,9 +11305,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [16,16,16,16] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -11576,9 +11466,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_17_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [17,17,17,17] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_17_v4i32: @@ -11605,9 +11494,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [17,17,17,17] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -11763,9 +11651,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_17_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [17,17,17,17] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_17_v4i32: @@ -11792,9 +11679,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [17,17,17,17] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -11954,9 +11840,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_18_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [18,18,18,18] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_18_v4i32: @@ -11983,9 +11868,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [18,18,18,18] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -12141,9 +12025,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_18_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [18,18,18,18] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_18_v4i32: @@ -12170,9 +12053,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [18,18,18,18] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -12332,9 +12214,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_19_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [19,19,19,19] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_19_v4i32: @@ -12361,9 +12242,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [19,19,19,19] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -12519,9 +12399,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_19_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [19,19,19,19] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_19_v4i32: @@ -12548,9 +12427,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [19,19,19,19] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -12710,9 +12588,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_20_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [20,20,20,20] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_20_v4i32: @@ -12739,9 +12616,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [20,20,20,20] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -12897,9 +12773,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_20_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [20,20,20,20] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_20_v4i32: @@ -12926,9 +12801,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [20,20,20,20] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -13088,9 +12962,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_21_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [21,21,21,21] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_21_v4i32: @@ -13117,9 +12990,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [21,21,21,21] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -13275,9 +13147,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_21_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [21,21,21,21] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_21_v4i32: @@ -13304,9 +13175,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [21,21,21,21] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -13466,9 +13336,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_22_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [22,22,22,22] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_22_v4i32: @@ -13495,9 +13364,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [22,22,22,22] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -13653,9 +13521,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_22_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [22,22,22,22] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_22_v4i32: @@ -13682,9 +13549,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [22,22,22,22] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -13844,9 +13710,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_23_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [23,23,23,23] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_23_v4i32: @@ -13873,9 +13738,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [23,23,23,23] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -14031,9 +13895,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_23_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [23,23,23,23] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_23_v4i32: @@ -14060,9 +13923,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [23,23,23,23] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -14222,9 +14084,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_24_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [24,24,24,24] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_24_v4i32: @@ -14251,9 +14112,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [24,24,24,24] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -14409,9 +14269,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_24_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [24,24,24,24] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_24_v4i32: @@ -14438,9 +14297,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [24,24,24,24] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -14600,9 +14458,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_25_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [25,25,25,25] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_25_v4i32: @@ -14629,9 +14486,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [25,25,25,25] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -14787,9 +14643,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_25_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [25,25,25,25] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_25_v4i32: @@ -14816,9 +14671,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [25,25,25,25] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -14978,9 +14832,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_26_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [26,26,26,26] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_26_v4i32: @@ -15007,9 +14860,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [26,26,26,26] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -15165,9 +15017,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_26_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [26,26,26,26] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_26_v4i32: @@ -15194,9 +15045,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [26,26,26,26] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -15356,9 +15206,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_27_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [27,27,27,27] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_27_v4i32: @@ -15385,9 +15234,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [27,27,27,27] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -15543,9 +15391,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_27_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [27,27,27,27] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_27_v4i32: @@ -15572,9 +15419,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [27,27,27,27] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -15734,9 +15580,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_28_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [28,28,28,28] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_28_v4i32: @@ -15763,9 +15608,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [28,28,28,28] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -15921,9 +15765,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_28_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [28,28,28,28] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_28_v4i32: @@ -15950,9 +15793,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [28,28,28,28] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -16112,9 +15954,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_29_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [29,29,29,29] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_29_v4i32: @@ -16141,9 +15982,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [29,29,29,29] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -16299,9 +16139,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_29_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [29,29,29,29] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_29_v4i32: @@ -16328,9 +16167,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [29,29,29,29] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -16490,9 +16328,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_30_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [30,30,30,30] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_30_v4i32: @@ -16519,9 +16356,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [30,30,30,30] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -16677,9 +16513,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_30_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [30,30,30,30] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_30_v4i32: @@ -16706,9 +16541,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [30,30,30,30] +; BITALG-NEXT: vpcmpgtd %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ugt <4 x i32> %2, @@ -16868,9 +16702,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_31_v4i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} xmm1 = [31,31,31,31] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_31_v4i32: @@ -16897,9 +16730,8 @@ ; BITALG-NEXT: vpmovzxdq {{.*#+}} xmm0 = xmm0[0],zero,xmm0[1],zero ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: vpackuswb %xmm2, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa32 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} xmm1 = [31,31,31,31] +; BITALG-NEXT: vpcmpgtd %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <4 x i32> @llvm.ctpop.v4i32(<4 x i32> %0) %3 = icmp ult <4 x i32> %2, @@ -16989,9 +16821,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_1_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [1,1] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_1_v2i64: @@ -17094,9 +16925,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_2_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [2,2] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_2_v2i64: @@ -17268,9 +17098,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_2_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [2,2] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_2_v2i64: @@ -17288,9 +17117,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [2,2] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -17451,9 +17279,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_3_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [3,3] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_3_v2i64: @@ -17472,9 +17299,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [3,3] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -17628,9 +17454,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_3_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [3,3] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_3_v2i64: @@ -17648,9 +17473,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [3,3] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -17811,9 +17635,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_4_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [4,4] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_4_v2i64: @@ -17832,9 +17655,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [4,4] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -17988,9 +17810,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_4_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [4,4] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_4_v2i64: @@ -18008,9 +17829,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [4,4] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -18171,9 +17991,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_5_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [5,5] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_5_v2i64: @@ -18192,9 +18011,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [5,5] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -18348,9 +18166,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_5_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [5,5] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_5_v2i64: @@ -18368,9 +18185,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [5,5] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -18531,9 +18347,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_6_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [6,6] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_6_v2i64: @@ -18552,9 +18367,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [6,6] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -18708,9 +18522,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_6_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [6,6] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_6_v2i64: @@ -18728,9 +18541,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [6,6] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -18891,9 +18703,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_7_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [7,7] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_7_v2i64: @@ -18912,9 +18723,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [7,7] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -19068,9 +18878,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_7_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [7,7] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_7_v2i64: @@ -19088,9 +18897,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [7,7] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -19251,9 +19059,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_8_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [8,8] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_8_v2i64: @@ -19272,9 +19079,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [8,8] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -19428,9 +19234,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_8_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [8,8] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_8_v2i64: @@ -19448,9 +19253,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [8,8] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -19611,9 +19415,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_9_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [9,9] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_9_v2i64: @@ -19632,9 +19435,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [9,9] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -19788,9 +19590,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_9_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [9,9] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_9_v2i64: @@ -19808,9 +19609,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [9,9] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -19971,9 +19771,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_10_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [10,10] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_10_v2i64: @@ -19992,9 +19791,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [10,10] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -20148,9 +19946,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_10_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [10,10] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_10_v2i64: @@ -20168,9 +19965,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [10,10] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -20331,9 +20127,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_11_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [11,11] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_11_v2i64: @@ -20352,9 +20147,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [11,11] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -20508,9 +20302,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_11_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [11,11] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_11_v2i64: @@ -20528,9 +20321,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [11,11] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -20691,9 +20483,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_12_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [12,12] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_12_v2i64: @@ -20712,9 +20503,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [12,12] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -20868,9 +20658,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_12_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [12,12] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_12_v2i64: @@ -20888,9 +20677,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [12,12] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -21051,9 +20839,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_13_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [13,13] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_13_v2i64: @@ -21072,9 +20859,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [13,13] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -21228,9 +21014,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_13_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [13,13] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_13_v2i64: @@ -21248,9 +21033,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [13,13] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -21411,9 +21195,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_14_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [14,14] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_14_v2i64: @@ -21432,9 +21215,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [14,14] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -21588,9 +21370,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_14_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [14,14] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_14_v2i64: @@ -21608,9 +21389,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [14,14] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -21771,9 +21551,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_15_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [15,15] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_15_v2i64: @@ -21792,9 +21571,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [15,15] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -21948,9 +21726,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_15_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [15,15] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_15_v2i64: @@ -21968,9 +21745,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [15,15] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -22131,9 +21907,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_16_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [16,16] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_16_v2i64: @@ -22152,9 +21927,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [16,16] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -22308,9 +22082,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_16_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [16,16] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_16_v2i64: @@ -22328,9 +22101,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [16,16] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -22491,9 +22263,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_17_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [17,17] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_17_v2i64: @@ -22512,9 +22283,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [17,17] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -22668,9 +22438,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_17_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [17,17] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_17_v2i64: @@ -22688,9 +22457,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [17,17] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -22851,9 +22619,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_18_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [18,18] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_18_v2i64: @@ -22872,9 +22639,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [18,18] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -23028,9 +22794,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_18_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [18,18] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_18_v2i64: @@ -23048,9 +22813,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [18,18] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -23211,9 +22975,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_19_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [19,19] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_19_v2i64: @@ -23232,9 +22995,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [19,19] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -23388,9 +23150,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_19_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [19,19] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_19_v2i64: @@ -23408,9 +23169,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [19,19] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -23571,9 +23331,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_20_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [20,20] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_20_v2i64: @@ -23592,9 +23351,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [20,20] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -23748,9 +23506,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_20_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [20,20] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_20_v2i64: @@ -23768,9 +23525,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [20,20] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -23931,9 +23687,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_21_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [21,21] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_21_v2i64: @@ -23952,9 +23707,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [21,21] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -24108,9 +23862,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_21_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [21,21] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_21_v2i64: @@ -24128,9 +23881,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [21,21] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -24291,9 +24043,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_22_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [22,22] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_22_v2i64: @@ -24312,9 +24063,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [22,22] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -24468,9 +24218,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_22_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [22,22] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_22_v2i64: @@ -24488,9 +24237,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [22,22] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -24651,9 +24399,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_23_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [23,23] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_23_v2i64: @@ -24672,9 +24419,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [23,23] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -24828,9 +24574,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_23_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [23,23] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_23_v2i64: @@ -24848,9 +24593,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [23,23] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -25011,9 +24755,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_24_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [24,24] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_24_v2i64: @@ -25032,9 +24775,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [24,24] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -25188,9 +24930,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_24_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [24,24] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_24_v2i64: @@ -25208,9 +24949,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [24,24] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -25371,9 +25111,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_25_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [25,25] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_25_v2i64: @@ -25392,9 +25131,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [25,25] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -25548,9 +25286,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_25_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [25,25] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_25_v2i64: @@ -25568,9 +25305,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [25,25] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -25731,9 +25467,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_26_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [26,26] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_26_v2i64: @@ -25752,9 +25487,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [26,26] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -25908,9 +25642,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_26_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [26,26] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_26_v2i64: @@ -25928,9 +25661,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [26,26] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -26091,9 +25823,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_27_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [27,27] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_27_v2i64: @@ -26112,9 +25843,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [27,27] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -26268,9 +25998,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_27_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [27,27] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_27_v2i64: @@ -26288,9 +26017,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [27,27] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -26451,9 +26179,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_28_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [28,28] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_28_v2i64: @@ -26472,9 +26199,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [28,28] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -26628,9 +26354,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_28_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [28,28] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_28_v2i64: @@ -26648,9 +26373,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [28,28] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -26811,9 +26535,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_29_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [29,29] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_29_v2i64: @@ -26832,9 +26555,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [29,29] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -26988,9 +26710,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_29_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [29,29] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_29_v2i64: @@ -27008,9 +26729,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [29,29] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -27171,9 +26891,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_30_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [30,30] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_30_v2i64: @@ -27192,9 +26911,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [30,30] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -27348,9 +27066,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_30_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [30,30] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_30_v2i64: @@ -27368,9 +27085,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [30,30] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -27531,9 +27247,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_31_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [31,31] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_31_v2i64: @@ -27552,9 +27267,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [31,31] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -27708,9 +27422,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_31_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [31,31] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_31_v2i64: @@ -27728,9 +27441,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [31,31] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -27891,9 +27603,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_32_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [32,32] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_32_v2i64: @@ -27912,9 +27623,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [32,32] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -28068,9 +27778,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_32_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [32,32] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_32_v2i64: @@ -28088,9 +27797,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [32,32] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -28251,9 +27959,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_33_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [33,33] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_33_v2i64: @@ -28272,9 +27979,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [33,33] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -28428,9 +28134,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_33_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [33,33] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_33_v2i64: @@ -28448,9 +28153,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [33,33] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -28611,9 +28315,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_34_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [34,34] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_34_v2i64: @@ -28632,9 +28335,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [34,34] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -28788,9 +28490,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_34_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [34,34] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_34_v2i64: @@ -28808,9 +28509,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [34,34] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -28971,9 +28671,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_35_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [35,35] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_35_v2i64: @@ -28992,9 +28691,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [35,35] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -29148,9 +28846,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_35_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [35,35] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_35_v2i64: @@ -29168,9 +28865,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [35,35] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -29331,9 +29027,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_36_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [36,36] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_36_v2i64: @@ -29352,9 +29047,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [36,36] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -29508,9 +29202,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_36_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [36,36] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_36_v2i64: @@ -29528,9 +29221,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [36,36] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -29691,9 +29383,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_37_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [37,37] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_37_v2i64: @@ -29712,9 +29403,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [37,37] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -29868,9 +29558,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_37_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [37,37] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_37_v2i64: @@ -29888,9 +29577,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [37,37] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -30051,9 +29739,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_38_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [38,38] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_38_v2i64: @@ -30072,9 +29759,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [38,38] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -30228,9 +29914,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_38_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [38,38] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_38_v2i64: @@ -30248,9 +29933,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [38,38] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -30411,9 +30095,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_39_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [39,39] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_39_v2i64: @@ -30432,9 +30115,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [39,39] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -30588,9 +30270,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_39_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [39,39] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_39_v2i64: @@ -30608,9 +30289,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [39,39] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -30771,9 +30451,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_40_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [40,40] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_40_v2i64: @@ -30792,9 +30471,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [40,40] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -30948,9 +30626,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_40_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [40,40] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_40_v2i64: @@ -30968,9 +30645,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [40,40] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -31131,9 +30807,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_41_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [41,41] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_41_v2i64: @@ -31152,9 +30827,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [41,41] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -31308,9 +30982,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_41_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [41,41] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_41_v2i64: @@ -31328,9 +31001,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [41,41] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -31491,9 +31163,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_42_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [42,42] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_42_v2i64: @@ -31512,9 +31183,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [42,42] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -31668,9 +31338,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_42_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [42,42] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_42_v2i64: @@ -31688,9 +31357,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [42,42] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -31851,9 +31519,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_43_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [43,43] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_43_v2i64: @@ -31872,9 +31539,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [43,43] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -32028,9 +31694,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_43_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [43,43] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_43_v2i64: @@ -32048,9 +31713,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [43,43] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -32211,9 +31875,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_44_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [44,44] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_44_v2i64: @@ -32232,9 +31895,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [44,44] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -32388,9 +32050,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_44_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [44,44] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_44_v2i64: @@ -32408,9 +32069,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [44,44] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -32571,9 +32231,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_45_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [45,45] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_45_v2i64: @@ -32592,9 +32251,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [45,45] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -32748,9 +32406,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_45_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [45,45] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_45_v2i64: @@ -32768,9 +32425,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [45,45] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -32931,9 +32587,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_46_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [46,46] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_46_v2i64: @@ -32952,9 +32607,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [46,46] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -33108,9 +32762,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_46_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [46,46] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_46_v2i64: @@ -33128,9 +32781,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [46,46] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -33291,9 +32943,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_47_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [47,47] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_47_v2i64: @@ -33312,9 +32963,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [47,47] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -33468,9 +33118,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_47_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [47,47] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_47_v2i64: @@ -33488,9 +33137,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [47,47] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -33651,9 +33299,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_48_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [48,48] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_48_v2i64: @@ -33672,9 +33319,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [48,48] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -33828,9 +33474,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_48_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [48,48] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_48_v2i64: @@ -33848,9 +33493,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [48,48] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -34011,9 +33655,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_49_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [49,49] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_49_v2i64: @@ -34032,9 +33675,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [49,49] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -34188,9 +33830,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_49_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [49,49] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_49_v2i64: @@ -34208,9 +33849,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [49,49] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -34371,9 +34011,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_50_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [50,50] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_50_v2i64: @@ -34392,9 +34031,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [50,50] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -34548,9 +34186,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_50_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [50,50] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_50_v2i64: @@ -34568,9 +34205,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [50,50] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -34731,9 +34367,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_51_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [51,51] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_51_v2i64: @@ -34752,9 +34387,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [51,51] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -34908,9 +34542,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_51_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [51,51] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_51_v2i64: @@ -34928,9 +34561,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [51,51] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -35091,9 +34723,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_52_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [52,52] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_52_v2i64: @@ -35112,9 +34743,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [52,52] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -35268,9 +34898,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_52_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [52,52] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_52_v2i64: @@ -35288,9 +34917,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [52,52] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -35451,9 +35079,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_53_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [53,53] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_53_v2i64: @@ -35472,9 +35099,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [53,53] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -35628,9 +35254,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_53_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [53,53] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_53_v2i64: @@ -35648,9 +35273,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [53,53] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -35811,9 +35435,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_54_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [54,54] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_54_v2i64: @@ -35832,9 +35455,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [54,54] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -35988,9 +35610,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_54_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [54,54] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_54_v2i64: @@ -36008,9 +35629,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [54,54] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -36171,9 +35791,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_55_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [55,55] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_55_v2i64: @@ -36192,9 +35811,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [55,55] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -36348,9 +35966,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_55_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [55,55] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_55_v2i64: @@ -36368,9 +35985,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [55,55] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -36531,9 +36147,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_56_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [56,56] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_56_v2i64: @@ -36552,9 +36167,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [56,56] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -36708,9 +36322,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_56_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [56,56] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_56_v2i64: @@ -36728,9 +36341,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [56,56] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -36891,9 +36503,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_57_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [57,57] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_57_v2i64: @@ -36912,9 +36523,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [57,57] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -37068,9 +36678,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_57_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [57,57] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_57_v2i64: @@ -37088,9 +36697,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [57,57] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -37251,9 +36859,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_58_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [58,58] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_58_v2i64: @@ -37272,9 +36879,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [58,58] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -37428,9 +37034,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_58_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [58,58] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_58_v2i64: @@ -37448,9 +37053,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [58,58] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -37611,9 +37215,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_59_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [59,59] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_59_v2i64: @@ -37632,9 +37235,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [59,59] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -37788,9 +37390,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_59_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [59,59] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_59_v2i64: @@ -37808,9 +37409,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [59,59] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -37971,9 +37571,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_60_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [60,60] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_60_v2i64: @@ -37992,9 +37591,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [60,60] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -38148,9 +37746,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_60_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [60,60] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_60_v2i64: @@ -38168,9 +37765,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [60,60] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -38331,9 +37927,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_61_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [61,61] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_61_v2i64: @@ -38352,9 +37947,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [61,61] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -38508,9 +38102,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_61_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [61,61] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_61_v2i64: @@ -38528,9 +38121,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [61,61] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -38691,9 +38283,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_62_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [62,62] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_62_v2i64: @@ -38712,9 +38303,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [62,62] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, @@ -38868,9 +38458,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_62_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [62,62] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_62_v2i64: @@ -38888,9 +38477,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [62,62] +; BITALG-NEXT: vpcmpgtq %xmm1, %xmm0, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ugt <2 x i64> %2, @@ -39051,9 +38639,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_63_v2i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} xmm1 = [63,63] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_63_v2i64: @@ -39072,9 +38659,8 @@ ; BITALG-NEXT: vpopcntb %xmm0, %xmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %xmm1, %xmm0, %xmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to2}, %xmm0, %k1 -; BITALG-NEXT: vpcmpeqd %xmm0, %xmm0, %xmm0 -; BITALG-NEXT: vmovdqa64 %xmm0, %xmm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} xmm1 = [63,63] +; BITALG-NEXT: vpcmpgtq %xmm0, %xmm1, %xmm0 ; BITALG-NEXT: retq %2 = tail call <2 x i64> @llvm.ctpop.v2i64(<2 x i64> %0) %3 = icmp ult <2 x i64> %2, diff --git a/llvm/test/CodeGen/X86/vector-popcnt-256-ult-ugt.ll b/llvm/test/CodeGen/X86/vector-popcnt-256-ult-ugt.ll --- a/llvm/test/CodeGen/X86/vector-popcnt-256-ult-ugt.ll +++ b/llvm/test/CodeGen/X86/vector-popcnt-256-ult-ugt.ll @@ -65,8 +65,7 @@ ; BITALG-LABEL: ugt_1_v32i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2b %k0, %ymm0 +; BITALG-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <32 x i8> @llvm.ctpop.v32i8(<32 x i8> %0) %3 = icmp ugt <32 x i8> %2, @@ -127,8 +126,8 @@ ; BITALG-LABEL: ult_2_v32i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2b %k0, %ymm0 +; BITALG-NEXT: vmovdqa {{.*#+}} ymm1 = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2] +; BITALG-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <32 x i8> @llvm.ctpop.v32i8(<32 x i8> %0) %3 = icmp ult <32 x i8> %2, @@ -154,11 +153,9 @@ ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX1-NEXT: vpaddb %xmm4, %xmm0, %xmm0 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3] -; AVX1-NEXT: vpmaxub %xmm1, %xmm0, %xmm3 -; AVX1-NEXT: vpcmpeqb %xmm3, %xmm0, %xmm0 -; AVX1-NEXT: vpmaxub %xmm1, %xmm2, %xmm1 -; AVX1-NEXT: vpcmpeqb %xmm1, %xmm2, %xmm1 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2] +; AVX1-NEXT: vpcmpgtb %xmm1, %xmm0, %xmm0 +; AVX1-NEXT: vpcmpgtb %xmm1, %xmm2, %xmm1 ; AVX1-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0 ; AVX1-NEXT: retq ; @@ -172,8 +169,7 @@ ; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX2-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX2-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX2-NEXT: vpmaxub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX2-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX2-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; AVX2-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_2_v32i8: @@ -186,8 +182,7 @@ ; AVX512VPOPCNTDQ-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NEXT: vpmaxub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: retq ; ; AVX512VPOPCNTDQVL-LABEL: ugt_2_v32i8: @@ -200,8 +195,7 @@ ; AVX512VPOPCNTDQVL-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpmaxub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_2_v32i8: @@ -214,8 +208,7 @@ ; BITALG-LABEL: ugt_2_v32i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2b %k0, %ymm0 +; BITALG-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <32 x i8> @llvm.ctpop.v32i8(<32 x i8> %0) %3 = icmp ugt <32 x i8> %2, @@ -241,11 +234,9 @@ ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX1-NEXT: vpaddb %xmm4, %xmm0, %xmm0 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2] -; AVX1-NEXT: vpminub %xmm1, %xmm0, %xmm3 -; AVX1-NEXT: vpcmpeqb %xmm3, %xmm0, %xmm0 -; AVX1-NEXT: vpminub %xmm1, %xmm2, %xmm1 -; AVX1-NEXT: vpcmpeqb %xmm1, %xmm2, %xmm1 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3] +; AVX1-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0 +; AVX1-NEXT: vpcmpgtb %xmm2, %xmm1, %xmm1 ; AVX1-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0 ; AVX1-NEXT: retq ; @@ -259,8 +250,8 @@ ; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX2-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX2-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX2-NEXT: vpminub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX2-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX2-NEXT: vmovdqa {{.*#+}} ymm1 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3] +; AVX2-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 ; AVX2-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_3_v32i8: @@ -273,8 +264,8 @@ ; AVX512VPOPCNTDQ-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NEXT: vpminub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} ymm1 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3] +; AVX512VPOPCNTDQ-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQ-NEXT: retq ; ; AVX512VPOPCNTDQVL-LABEL: ult_3_v32i8: @@ -287,8 +278,8 @@ ; AVX512VPOPCNTDQVL-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpminub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} ymm1 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_3_v32i8: @@ -302,8 +293,8 @@ ; BITALG-LABEL: ult_3_v32i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2b %k0, %ymm0 +; BITALG-NEXT: vmovdqa {{.*#+}} ymm1 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3] +; BITALG-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <32 x i8> @llvm.ctpop.v32i8(<32 x i8> %0) %3 = icmp ult <32 x i8> %2, @@ -329,11 +320,9 @@ ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX1-NEXT: vpaddb %xmm4, %xmm0, %xmm0 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] -; AVX1-NEXT: vpmaxub %xmm1, %xmm0, %xmm3 -; AVX1-NEXT: vpcmpeqb %xmm3, %xmm0, %xmm0 -; AVX1-NEXT: vpmaxub %xmm1, %xmm2, %xmm1 -; AVX1-NEXT: vpcmpeqb %xmm1, %xmm2, %xmm1 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3] +; AVX1-NEXT: vpcmpgtb %xmm1, %xmm0, %xmm0 +; AVX1-NEXT: vpcmpgtb %xmm1, %xmm2, %xmm1 ; AVX1-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0 ; AVX1-NEXT: retq ; @@ -347,8 +336,7 @@ ; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX2-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX2-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX2-NEXT: vpmaxub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX2-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX2-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; AVX2-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_3_v32i8: @@ -361,8 +349,7 @@ ; AVX512VPOPCNTDQ-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NEXT: vpmaxub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: retq ; ; AVX512VPOPCNTDQVL-LABEL: ugt_3_v32i8: @@ -375,8 +362,7 @@ ; AVX512VPOPCNTDQVL-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpmaxub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_3_v32i8: @@ -389,8 +375,7 @@ ; BITALG-LABEL: ugt_3_v32i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2b %k0, %ymm0 +; BITALG-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <32 x i8> @llvm.ctpop.v32i8(<32 x i8> %0) %3 = icmp ugt <32 x i8> %2, @@ -416,11 +401,9 @@ ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX1-NEXT: vpaddb %xmm4, %xmm0, %xmm0 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3] -; AVX1-NEXT: vpminub %xmm1, %xmm0, %xmm3 -; AVX1-NEXT: vpcmpeqb %xmm3, %xmm0, %xmm0 -; AVX1-NEXT: vpminub %xmm1, %xmm2, %xmm1 -; AVX1-NEXT: vpcmpeqb %xmm1, %xmm2, %xmm1 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] +; AVX1-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0 +; AVX1-NEXT: vpcmpgtb %xmm2, %xmm1, %xmm1 ; AVX1-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0 ; AVX1-NEXT: retq ; @@ -434,8 +417,8 @@ ; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX2-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX2-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX2-NEXT: vpminub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX2-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX2-NEXT: vmovdqa {{.*#+}} ymm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] +; AVX2-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 ; AVX2-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_4_v32i8: @@ -448,8 +431,8 @@ ; AVX512VPOPCNTDQ-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NEXT: vpminub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} ymm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] +; AVX512VPOPCNTDQ-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQ-NEXT: retq ; ; AVX512VPOPCNTDQVL-LABEL: ult_4_v32i8: @@ -462,8 +445,8 @@ ; AVX512VPOPCNTDQVL-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpminub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} ymm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_4_v32i8: @@ -477,8 +460,8 @@ ; BITALG-LABEL: ult_4_v32i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2b %k0, %ymm0 +; BITALG-NEXT: vmovdqa {{.*#+}} ymm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] +; BITALG-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <32 x i8> @llvm.ctpop.v32i8(<32 x i8> %0) %3 = icmp ult <32 x i8> %2, @@ -504,11 +487,9 @@ ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX1-NEXT: vpaddb %xmm4, %xmm0, %xmm0 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] -; AVX1-NEXT: vpmaxub %xmm1, %xmm0, %xmm3 -; AVX1-NEXT: vpcmpeqb %xmm3, %xmm0, %xmm0 -; AVX1-NEXT: vpmaxub %xmm1, %xmm2, %xmm1 -; AVX1-NEXT: vpcmpeqb %xmm1, %xmm2, %xmm1 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] +; AVX1-NEXT: vpcmpgtb %xmm1, %xmm0, %xmm0 +; AVX1-NEXT: vpcmpgtb %xmm1, %xmm2, %xmm1 ; AVX1-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0 ; AVX1-NEXT: retq ; @@ -522,8 +503,7 @@ ; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX2-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX2-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX2-NEXT: vpmaxub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX2-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX2-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; AVX2-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_4_v32i8: @@ -536,8 +516,7 @@ ; AVX512VPOPCNTDQ-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NEXT: vpmaxub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: retq ; ; AVX512VPOPCNTDQVL-LABEL: ugt_4_v32i8: @@ -550,8 +529,7 @@ ; AVX512VPOPCNTDQVL-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpmaxub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_4_v32i8: @@ -564,8 +542,7 @@ ; BITALG-LABEL: ugt_4_v32i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2b %k0, %ymm0 +; BITALG-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <32 x i8> @llvm.ctpop.v32i8(<32 x i8> %0) %3 = icmp ugt <32 x i8> %2, @@ -591,11 +568,9 @@ ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX1-NEXT: vpaddb %xmm4, %xmm0, %xmm0 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] -; AVX1-NEXT: vpminub %xmm1, %xmm0, %xmm3 -; AVX1-NEXT: vpcmpeqb %xmm3, %xmm0, %xmm0 -; AVX1-NEXT: vpminub %xmm1, %xmm2, %xmm1 -; AVX1-NEXT: vpcmpeqb %xmm1, %xmm2, %xmm1 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] +; AVX1-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0 +; AVX1-NEXT: vpcmpgtb %xmm2, %xmm1, %xmm1 ; AVX1-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0 ; AVX1-NEXT: retq ; @@ -609,8 +584,8 @@ ; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX2-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX2-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX2-NEXT: vpminub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX2-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX2-NEXT: vmovdqa {{.*#+}} ymm1 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] +; AVX2-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 ; AVX2-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_5_v32i8: @@ -623,8 +598,8 @@ ; AVX512VPOPCNTDQ-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NEXT: vpminub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} ymm1 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] +; AVX512VPOPCNTDQ-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQ-NEXT: retq ; ; AVX512VPOPCNTDQVL-LABEL: ult_5_v32i8: @@ -637,8 +612,8 @@ ; AVX512VPOPCNTDQVL-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpminub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} ymm1 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_5_v32i8: @@ -652,8 +627,8 @@ ; BITALG-LABEL: ult_5_v32i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2b %k0, %ymm0 +; BITALG-NEXT: vmovdqa {{.*#+}} ymm1 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] +; BITALG-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <32 x i8> @llvm.ctpop.v32i8(<32 x i8> %0) %3 = icmp ult <32 x i8> %2, @@ -679,11 +654,9 @@ ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX1-NEXT: vpaddb %xmm4, %xmm0, %xmm0 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6] -; AVX1-NEXT: vpmaxub %xmm1, %xmm0, %xmm3 -; AVX1-NEXT: vpcmpeqb %xmm3, %xmm0, %xmm0 -; AVX1-NEXT: vpmaxub %xmm1, %xmm2, %xmm1 -; AVX1-NEXT: vpcmpeqb %xmm1, %xmm2, %xmm1 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] +; AVX1-NEXT: vpcmpgtb %xmm1, %xmm0, %xmm0 +; AVX1-NEXT: vpcmpgtb %xmm1, %xmm2, %xmm1 ; AVX1-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0 ; AVX1-NEXT: retq ; @@ -697,8 +670,7 @@ ; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX2-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX2-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX2-NEXT: vpmaxub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX2-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX2-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; AVX2-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_5_v32i8: @@ -711,8 +683,7 @@ ; AVX512VPOPCNTDQ-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NEXT: vpmaxub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: retq ; ; AVX512VPOPCNTDQVL-LABEL: ugt_5_v32i8: @@ -725,8 +696,7 @@ ; AVX512VPOPCNTDQVL-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpmaxub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_5_v32i8: @@ -739,8 +709,7 @@ ; BITALG-LABEL: ugt_5_v32i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2b %k0, %ymm0 +; BITALG-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <32 x i8> @llvm.ctpop.v32i8(<32 x i8> %0) %3 = icmp ugt <32 x i8> %2, @@ -766,11 +735,9 @@ ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX1-NEXT: vpaddb %xmm4, %xmm0, %xmm0 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] -; AVX1-NEXT: vpminub %xmm1, %xmm0, %xmm3 -; AVX1-NEXT: vpcmpeqb %xmm3, %xmm0, %xmm0 -; AVX1-NEXT: vpminub %xmm1, %xmm2, %xmm1 -; AVX1-NEXT: vpcmpeqb %xmm1, %xmm2, %xmm1 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6] +; AVX1-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0 +; AVX1-NEXT: vpcmpgtb %xmm2, %xmm1, %xmm1 ; AVX1-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0 ; AVX1-NEXT: retq ; @@ -784,8 +751,8 @@ ; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX2-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX2-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX2-NEXT: vpminub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX2-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX2-NEXT: vmovdqa {{.*#+}} ymm1 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6] +; AVX2-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 ; AVX2-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_6_v32i8: @@ -798,8 +765,8 @@ ; AVX512VPOPCNTDQ-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NEXT: vpminub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} ymm1 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6] +; AVX512VPOPCNTDQ-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQ-NEXT: retq ; ; AVX512VPOPCNTDQVL-LABEL: ult_6_v32i8: @@ -812,8 +779,8 @@ ; AVX512VPOPCNTDQVL-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpminub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} ymm1 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_6_v32i8: @@ -827,8 +794,8 @@ ; BITALG-LABEL: ult_6_v32i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2b %k0, %ymm0 +; BITALG-NEXT: vmovdqa {{.*#+}} ymm1 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6] +; BITALG-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <32 x i8> @llvm.ctpop.v32i8(<32 x i8> %0) %3 = icmp ult <32 x i8> %2, @@ -854,11 +821,9 @@ ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX1-NEXT: vpaddb %xmm4, %xmm0, %xmm0 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7] -; AVX1-NEXT: vpmaxub %xmm1, %xmm0, %xmm3 -; AVX1-NEXT: vpcmpeqb %xmm3, %xmm0, %xmm0 -; AVX1-NEXT: vpmaxub %xmm1, %xmm2, %xmm1 -; AVX1-NEXT: vpcmpeqb %xmm1, %xmm2, %xmm1 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6] +; AVX1-NEXT: vpcmpgtb %xmm1, %xmm0, %xmm0 +; AVX1-NEXT: vpcmpgtb %xmm1, %xmm2, %xmm1 ; AVX1-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0 ; AVX1-NEXT: retq ; @@ -872,8 +837,7 @@ ; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX2-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX2-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX2-NEXT: vpmaxub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX2-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX2-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; AVX2-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_6_v32i8: @@ -886,8 +850,7 @@ ; AVX512VPOPCNTDQ-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NEXT: vpmaxub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: retq ; ; AVX512VPOPCNTDQVL-LABEL: ugt_6_v32i8: @@ -900,8 +863,7 @@ ; AVX512VPOPCNTDQVL-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpmaxub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_6_v32i8: @@ -914,8 +876,7 @@ ; BITALG-LABEL: ugt_6_v32i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2b %k0, %ymm0 +; BITALG-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <32 x i8> @llvm.ctpop.v32i8(<32 x i8> %0) %3 = icmp ugt <32 x i8> %2, @@ -941,11 +902,9 @@ ; AVX1-NEXT: vpand %xmm1, %xmm0, %xmm0 ; AVX1-NEXT: vpshufb %xmm0, %xmm3, %xmm0 ; AVX1-NEXT: vpaddb %xmm4, %xmm0, %xmm0 -; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6] -; AVX1-NEXT: vpminub %xmm1, %xmm0, %xmm3 -; AVX1-NEXT: vpcmpeqb %xmm3, %xmm0, %xmm0 -; AVX1-NEXT: vpminub %xmm1, %xmm2, %xmm1 -; AVX1-NEXT: vpcmpeqb %xmm1, %xmm2, %xmm1 +; AVX1-NEXT: vmovdqa {{.*#+}} xmm1 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7] +; AVX1-NEXT: vpcmpgtb %xmm0, %xmm1, %xmm0 +; AVX1-NEXT: vpcmpgtb %xmm2, %xmm1, %xmm1 ; AVX1-NEXT: vinsertf128 $1, %xmm0, %ymm1, %ymm0 ; AVX1-NEXT: retq ; @@ -959,8 +918,8 @@ ; AVX2-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX2-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX2-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX2-NEXT: vpminub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX2-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX2-NEXT: vmovdqa {{.*#+}} ymm1 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7] +; AVX2-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 ; AVX2-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_7_v32i8: @@ -973,8 +932,8 @@ ; AVX512VPOPCNTDQ-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQ-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NEXT: vpminub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX512VPOPCNTDQ-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NEXT: vmovdqa {{.*#+}} ymm1 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7] +; AVX512VPOPCNTDQ-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQ-NEXT: retq ; ; AVX512VPOPCNTDQVL-LABEL: ult_7_v32i8: @@ -987,8 +946,8 @@ ; AVX512VPOPCNTDQVL-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: vpaddb %ymm2, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpminub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQVL-NEXT: vmovdqa {{.*#+}} ymm1 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_7_v32i8: @@ -1002,8 +961,8 @@ ; BITALG-LABEL: ult_7_v32i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2b %k0, %ymm0 +; BITALG-NEXT: vmovdqa {{.*#+}} ymm1 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7] +; BITALG-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <32 x i8> @llvm.ctpop.v32i8(<32 x i8> %0) %3 = icmp ult <32 x i8> %2, @@ -1069,8 +1028,7 @@ ; BITALG-LABEL: ugt_1_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ugt <16 x i16> %2, @@ -1131,8 +1089,8 @@ ; BITALG-LABEL: ult_2_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vmovdqa {{.*#+}} ymm1 = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2] +; BITALG-NEXT: vpcmpgtw %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ult <16 x i16> %2, @@ -1212,8 +1170,7 @@ ; BITALG-LABEL: ugt_2_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ugt <16 x i16> %2, @@ -1297,8 +1254,8 @@ ; BITALG-LABEL: ult_3_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vmovdqa {{.*#+}} ymm1 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3] +; BITALG-NEXT: vpcmpgtw %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ult <16 x i16> %2, @@ -1378,8 +1335,7 @@ ; BITALG-LABEL: ugt_3_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ugt <16 x i16> %2, @@ -1463,8 +1419,8 @@ ; BITALG-LABEL: ult_4_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vmovdqa {{.*#+}} ymm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] +; BITALG-NEXT: vpcmpgtw %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ult <16 x i16> %2, @@ -1544,8 +1500,7 @@ ; BITALG-LABEL: ugt_4_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ugt <16 x i16> %2, @@ -1629,8 +1584,8 @@ ; BITALG-LABEL: ult_5_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vmovdqa {{.*#+}} ymm1 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] +; BITALG-NEXT: vpcmpgtw %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ult <16 x i16> %2, @@ -1710,8 +1665,7 @@ ; BITALG-LABEL: ugt_5_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ugt <16 x i16> %2, @@ -1795,8 +1749,8 @@ ; BITALG-LABEL: ult_6_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vmovdqa {{.*#+}} ymm1 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6] +; BITALG-NEXT: vpcmpgtw %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ult <16 x i16> %2, @@ -1876,8 +1830,7 @@ ; BITALG-LABEL: ugt_6_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ugt <16 x i16> %2, @@ -1961,8 +1914,8 @@ ; BITALG-LABEL: ult_7_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vmovdqa {{.*#+}} ymm1 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7] +; BITALG-NEXT: vpcmpgtw %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ult <16 x i16> %2, @@ -2042,8 +1995,7 @@ ; BITALG-LABEL: ugt_7_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ugt <16 x i16> %2, @@ -2127,8 +2079,8 @@ ; BITALG-LABEL: ult_8_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vmovdqa {{.*#+}} ymm1 = [8,8,8,8,8,8,8,8,8,8,8,8,8,8,8,8] +; BITALG-NEXT: vpcmpgtw %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ult <16 x i16> %2, @@ -2208,8 +2160,7 @@ ; BITALG-LABEL: ugt_8_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ugt <16 x i16> %2, @@ -2293,8 +2244,8 @@ ; BITALG-LABEL: ult_9_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vmovdqa {{.*#+}} ymm1 = [9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9] +; BITALG-NEXT: vpcmpgtw %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ult <16 x i16> %2, @@ -2374,8 +2325,7 @@ ; BITALG-LABEL: ugt_9_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ugt <16 x i16> %2, @@ -2459,8 +2409,8 @@ ; BITALG-LABEL: ult_10_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vmovdqa {{.*#+}} ymm1 = [10,10,10,10,10,10,10,10,10,10,10,10,10,10,10,10] +; BITALG-NEXT: vpcmpgtw %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ult <16 x i16> %2, @@ -2540,8 +2490,7 @@ ; BITALG-LABEL: ugt_10_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ugt <16 x i16> %2, @@ -2625,8 +2574,8 @@ ; BITALG-LABEL: ult_11_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vmovdqa {{.*#+}} ymm1 = [11,11,11,11,11,11,11,11,11,11,11,11,11,11,11,11] +; BITALG-NEXT: vpcmpgtw %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ult <16 x i16> %2, @@ -2706,8 +2655,7 @@ ; BITALG-LABEL: ugt_11_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ugt <16 x i16> %2, @@ -2791,8 +2739,8 @@ ; BITALG-LABEL: ult_12_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vmovdqa {{.*#+}} ymm1 = [12,12,12,12,12,12,12,12,12,12,12,12,12,12,12,12] +; BITALG-NEXT: vpcmpgtw %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ult <16 x i16> %2, @@ -2872,8 +2820,7 @@ ; BITALG-LABEL: ugt_12_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ugt <16 x i16> %2, @@ -2957,8 +2904,8 @@ ; BITALG-LABEL: ult_13_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vmovdqa {{.*#+}} ymm1 = [13,13,13,13,13,13,13,13,13,13,13,13,13,13,13,13] +; BITALG-NEXT: vpcmpgtw %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ult <16 x i16> %2, @@ -3038,8 +2985,7 @@ ; BITALG-LABEL: ugt_13_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ugt <16 x i16> %2, @@ -3123,8 +3069,8 @@ ; BITALG-LABEL: ult_14_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vmovdqa {{.*#+}} ymm1 = [14,14,14,14,14,14,14,14,14,14,14,14,14,14,14,14] +; BITALG-NEXT: vpcmpgtw %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ult <16 x i16> %2, @@ -3204,8 +3150,7 @@ ; BITALG-LABEL: ugt_14_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ugt <16 x i16> %2, @@ -3289,8 +3234,8 @@ ; BITALG-LABEL: ult_15_v16i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %ymm0, %k0 -; BITALG-NEXT: vpmovm2w %k0, %ymm0 +; BITALG-NEXT: vmovdqa {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15,15,15,15,15,15,15,15,15] +; BITALG-NEXT: vpcmpgtw %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <16 x i16> @llvm.ctpop.v16i16(<16 x i16> %0) %3 = icmp ult <16 x i16> %2, @@ -3336,9 +3281,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_1_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [1,1,1,1,1,1,1,1] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_1_v8i32: @@ -3402,9 +3346,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_2_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [2,2,2,2,2,2,2,2] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_2_v8i32: @@ -3496,9 +3439,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_2_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [2,2,2,2,2,2,2,2] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_2_v8i32: @@ -3524,9 +3466,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [2,2,2,2,2,2,2,2] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -3600,9 +3541,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_3_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [3,3,3,3,3,3,3,3] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_3_v8i32: @@ -3628,9 +3568,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [3,3,3,3,3,3,3,3] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -3704,9 +3643,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_3_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [3,3,3,3,3,3,3,3] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_3_v8i32: @@ -3732,9 +3670,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [3,3,3,3,3,3,3,3] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -3808,9 +3745,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_4_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [4,4,4,4,4,4,4,4] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_4_v8i32: @@ -3836,9 +3772,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [4,4,4,4,4,4,4,4] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -3912,9 +3847,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_4_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [4,4,4,4,4,4,4,4] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_4_v8i32: @@ -3940,9 +3874,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [4,4,4,4,4,4,4,4] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -4016,9 +3949,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_5_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [5,5,5,5,5,5,5,5] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_5_v8i32: @@ -4044,9 +3976,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [5,5,5,5,5,5,5,5] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -4120,9 +4051,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_5_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [5,5,5,5,5,5,5,5] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_5_v8i32: @@ -4148,9 +4078,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [5,5,5,5,5,5,5,5] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -4224,9 +4153,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_6_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [6,6,6,6,6,6,6,6] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_6_v8i32: @@ -4252,9 +4180,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [6,6,6,6,6,6,6,6] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -4328,9 +4255,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_6_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [6,6,6,6,6,6,6,6] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_6_v8i32: @@ -4356,9 +4282,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [6,6,6,6,6,6,6,6] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -4432,9 +4357,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_7_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [7,7,7,7,7,7,7,7] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_7_v8i32: @@ -4460,9 +4384,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [7,7,7,7,7,7,7,7] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -4536,9 +4459,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_7_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [7,7,7,7,7,7,7,7] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_7_v8i32: @@ -4564,9 +4486,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [7,7,7,7,7,7,7,7] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -4640,9 +4561,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_8_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [8,8,8,8,8,8,8,8] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_8_v8i32: @@ -4668,9 +4588,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [8,8,8,8,8,8,8,8] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -4744,9 +4663,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_8_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [8,8,8,8,8,8,8,8] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_8_v8i32: @@ -4772,9 +4690,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [8,8,8,8,8,8,8,8] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -4848,9 +4765,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_9_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [9,9,9,9,9,9,9,9] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_9_v8i32: @@ -4876,9 +4792,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [9,9,9,9,9,9,9,9] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -4952,9 +4867,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_9_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [9,9,9,9,9,9,9,9] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_9_v8i32: @@ -4980,9 +4894,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [9,9,9,9,9,9,9,9] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -5056,9 +4969,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_10_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [10,10,10,10,10,10,10,10] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_10_v8i32: @@ -5084,9 +4996,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [10,10,10,10,10,10,10,10] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -5160,9 +5071,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_10_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [10,10,10,10,10,10,10,10] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_10_v8i32: @@ -5188,9 +5098,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [10,10,10,10,10,10,10,10] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -5264,9 +5173,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_11_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [11,11,11,11,11,11,11,11] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_11_v8i32: @@ -5292,9 +5200,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [11,11,11,11,11,11,11,11] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -5368,9 +5275,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_11_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [11,11,11,11,11,11,11,11] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_11_v8i32: @@ -5396,9 +5302,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [11,11,11,11,11,11,11,11] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -5472,9 +5377,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_12_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [12,12,12,12,12,12,12,12] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_12_v8i32: @@ -5500,9 +5404,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [12,12,12,12,12,12,12,12] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -5576,9 +5479,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_12_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [12,12,12,12,12,12,12,12] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_12_v8i32: @@ -5604,9 +5506,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [12,12,12,12,12,12,12,12] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -5680,9 +5581,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_13_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [13,13,13,13,13,13,13,13] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_13_v8i32: @@ -5708,9 +5608,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [13,13,13,13,13,13,13,13] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -5784,9 +5683,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_13_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [13,13,13,13,13,13,13,13] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_13_v8i32: @@ -5812,9 +5710,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [13,13,13,13,13,13,13,13] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -5888,9 +5785,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_14_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [14,14,14,14,14,14,14,14] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_14_v8i32: @@ -5916,9 +5812,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [14,14,14,14,14,14,14,14] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -5992,9 +5887,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_14_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [14,14,14,14,14,14,14,14] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_14_v8i32: @@ -6020,9 +5914,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [14,14,14,14,14,14,14,14] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -6096,9 +5989,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_15_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_15_v8i32: @@ -6124,9 +6016,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -6200,9 +6091,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_15_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_15_v8i32: @@ -6228,9 +6118,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [15,15,15,15,15,15,15,15] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -6304,9 +6193,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_16_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [16,16,16,16,16,16,16,16] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_16_v8i32: @@ -6332,9 +6220,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [16,16,16,16,16,16,16,16] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -6408,9 +6295,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_16_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [16,16,16,16,16,16,16,16] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_16_v8i32: @@ -6436,9 +6322,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [16,16,16,16,16,16,16,16] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -6512,9 +6397,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_17_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [17,17,17,17,17,17,17,17] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_17_v8i32: @@ -6540,9 +6424,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [17,17,17,17,17,17,17,17] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -6616,9 +6499,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_17_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [17,17,17,17,17,17,17,17] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_17_v8i32: @@ -6644,9 +6526,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [17,17,17,17,17,17,17,17] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -6720,9 +6601,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_18_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [18,18,18,18,18,18,18,18] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_18_v8i32: @@ -6748,9 +6628,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [18,18,18,18,18,18,18,18] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -6824,9 +6703,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_18_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [18,18,18,18,18,18,18,18] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_18_v8i32: @@ -6852,9 +6730,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [18,18,18,18,18,18,18,18] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -6928,9 +6805,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_19_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [19,19,19,19,19,19,19,19] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_19_v8i32: @@ -6956,9 +6832,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [19,19,19,19,19,19,19,19] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -7032,9 +6907,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_19_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [19,19,19,19,19,19,19,19] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_19_v8i32: @@ -7060,9 +6934,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [19,19,19,19,19,19,19,19] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -7136,9 +7009,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_20_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [20,20,20,20,20,20,20,20] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_20_v8i32: @@ -7164,9 +7036,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [20,20,20,20,20,20,20,20] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -7240,9 +7111,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_20_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [20,20,20,20,20,20,20,20] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_20_v8i32: @@ -7268,9 +7138,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [20,20,20,20,20,20,20,20] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -7344,9 +7213,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_21_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [21,21,21,21,21,21,21,21] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_21_v8i32: @@ -7372,9 +7240,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [21,21,21,21,21,21,21,21] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -7448,9 +7315,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_21_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [21,21,21,21,21,21,21,21] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_21_v8i32: @@ -7476,9 +7342,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [21,21,21,21,21,21,21,21] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -7552,9 +7417,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_22_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [22,22,22,22,22,22,22,22] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_22_v8i32: @@ -7580,9 +7444,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [22,22,22,22,22,22,22,22] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -7656,9 +7519,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_22_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [22,22,22,22,22,22,22,22] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_22_v8i32: @@ -7684,9 +7546,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [22,22,22,22,22,22,22,22] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -7760,9 +7621,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_23_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [23,23,23,23,23,23,23,23] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_23_v8i32: @@ -7788,9 +7648,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [23,23,23,23,23,23,23,23] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -7864,9 +7723,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_23_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [23,23,23,23,23,23,23,23] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_23_v8i32: @@ -7892,9 +7750,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [23,23,23,23,23,23,23,23] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -7968,9 +7825,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_24_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [24,24,24,24,24,24,24,24] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_24_v8i32: @@ -7996,9 +7852,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [24,24,24,24,24,24,24,24] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -8072,9 +7927,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_24_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [24,24,24,24,24,24,24,24] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_24_v8i32: @@ -8100,9 +7954,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [24,24,24,24,24,24,24,24] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -8176,9 +8029,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_25_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [25,25,25,25,25,25,25,25] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_25_v8i32: @@ -8204,9 +8056,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [25,25,25,25,25,25,25,25] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -8280,9 +8131,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_25_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [25,25,25,25,25,25,25,25] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_25_v8i32: @@ -8308,9 +8158,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [25,25,25,25,25,25,25,25] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -8384,9 +8233,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_26_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [26,26,26,26,26,26,26,26] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_26_v8i32: @@ -8412,9 +8260,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [26,26,26,26,26,26,26,26] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -8488,9 +8335,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_26_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [26,26,26,26,26,26,26,26] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_26_v8i32: @@ -8516,9 +8362,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [26,26,26,26,26,26,26,26] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -8592,9 +8437,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_27_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [27,27,27,27,27,27,27,27] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_27_v8i32: @@ -8620,9 +8464,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [27,27,27,27,27,27,27,27] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -8696,9 +8539,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_27_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [27,27,27,27,27,27,27,27] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_27_v8i32: @@ -8724,9 +8566,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [27,27,27,27,27,27,27,27] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -8800,9 +8641,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_28_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [28,28,28,28,28,28,28,28] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_28_v8i32: @@ -8828,9 +8668,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [28,28,28,28,28,28,28,28] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -8904,9 +8743,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_28_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [28,28,28,28,28,28,28,28] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_28_v8i32: @@ -8932,9 +8770,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [28,28,28,28,28,28,28,28] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -9008,9 +8845,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_29_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [29,29,29,29,29,29,29,29] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_29_v8i32: @@ -9036,9 +8872,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [29,29,29,29,29,29,29,29] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -9112,9 +8947,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_29_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [29,29,29,29,29,29,29,29] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_29_v8i32: @@ -9140,9 +8974,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [29,29,29,29,29,29,29,29] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -9216,9 +9049,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_30_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [30,30,30,30,30,30,30,30] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_30_v8i32: @@ -9244,9 +9076,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [30,30,30,30,30,30,30,30] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -9320,9 +9151,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_30_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [30,30,30,30,30,30,30,30] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_30_v8i32: @@ -9348,9 +9178,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [30,30,30,30,30,30,30,30] +; BITALG-NEXT: vpcmpgtd %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ugt <8 x i32> %2, @@ -9424,9 +9253,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_31_v8i32: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntd %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastd {{.*#+}} ymm1 = [31,31,31,31,31,31,31,31] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_31_v8i32: @@ -9452,9 +9280,8 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} ymm0 = ymm0[0],ymm1[0],ymm0[1],ymm1[1],ymm0[4],ymm1[4],ymm0[5],ymm1[5] ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa32 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastd {{.*#+}} ymm1 = [31,31,31,31,31,31,31,31] +; BITALG-NEXT: vpcmpgtd %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <8 x i32> @llvm.ctpop.v8i32(<8 x i32> %0) %3 = icmp ult <8 x i32> %2, @@ -9500,9 +9327,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_1_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [1,1,1,1] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_1_v4i64: @@ -9566,9 +9392,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_2_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [2,2,2,2] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_2_v4i64: @@ -9648,9 +9473,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_2_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [2,2,2,2] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_2_v4i64: @@ -9668,9 +9492,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [2,2,2,2] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -9732,9 +9555,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_3_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [3,3,3,3] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_3_v4i64: @@ -9752,9 +9574,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [3,3,3,3] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -9816,9 +9637,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_3_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [3,3,3,3] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_3_v4i64: @@ -9836,9 +9656,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [3,3,3,3] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -9900,9 +9719,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_4_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [4,4,4,4] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_4_v4i64: @@ -9920,9 +9738,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [4,4,4,4] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -9984,9 +9801,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_4_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [4,4,4,4] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_4_v4i64: @@ -10004,9 +9820,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [4,4,4,4] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -10068,9 +9883,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_5_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [5,5,5,5] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_5_v4i64: @@ -10088,9 +9902,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [5,5,5,5] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -10152,9 +9965,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_5_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [5,5,5,5] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_5_v4i64: @@ -10172,9 +9984,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [5,5,5,5] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -10236,9 +10047,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_6_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [6,6,6,6] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_6_v4i64: @@ -10256,9 +10066,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [6,6,6,6] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -10320,9 +10129,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_6_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [6,6,6,6] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_6_v4i64: @@ -10340,9 +10148,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [6,6,6,6] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -10404,9 +10211,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_7_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [7,7,7,7] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_7_v4i64: @@ -10424,9 +10230,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [7,7,7,7] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -10488,9 +10293,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_7_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [7,7,7,7] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_7_v4i64: @@ -10508,9 +10312,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [7,7,7,7] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -10572,9 +10375,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_8_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [8,8,8,8] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_8_v4i64: @@ -10592,9 +10394,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [8,8,8,8] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -10656,9 +10457,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_8_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [8,8,8,8] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_8_v4i64: @@ -10676,9 +10476,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [8,8,8,8] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -10740,9 +10539,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_9_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [9,9,9,9] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_9_v4i64: @@ -10760,9 +10558,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [9,9,9,9] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -10824,9 +10621,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_9_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [9,9,9,9] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_9_v4i64: @@ -10844,9 +10640,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [9,9,9,9] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -10908,9 +10703,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_10_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [10,10,10,10] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_10_v4i64: @@ -10928,9 +10722,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [10,10,10,10] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -10992,9 +10785,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_10_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [10,10,10,10] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_10_v4i64: @@ -11012,9 +10804,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [10,10,10,10] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -11076,9 +10867,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_11_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [11,11,11,11] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_11_v4i64: @@ -11096,9 +10886,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [11,11,11,11] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -11160,9 +10949,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_11_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [11,11,11,11] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_11_v4i64: @@ -11180,9 +10968,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [11,11,11,11] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -11244,9 +11031,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_12_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [12,12,12,12] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_12_v4i64: @@ -11264,9 +11050,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [12,12,12,12] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -11328,9 +11113,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_12_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [12,12,12,12] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_12_v4i64: @@ -11348,9 +11132,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [12,12,12,12] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -11412,9 +11195,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_13_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [13,13,13,13] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_13_v4i64: @@ -11432,9 +11214,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [13,13,13,13] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -11496,9 +11277,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_13_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [13,13,13,13] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_13_v4i64: @@ -11516,9 +11296,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [13,13,13,13] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -11580,9 +11359,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_14_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [14,14,14,14] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_14_v4i64: @@ -11600,9 +11378,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [14,14,14,14] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -11664,9 +11441,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_14_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [14,14,14,14] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_14_v4i64: @@ -11684,9 +11460,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [14,14,14,14] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -11748,9 +11523,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_15_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [15,15,15,15] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_15_v4i64: @@ -11768,9 +11542,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [15,15,15,15] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -11832,9 +11605,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_15_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [15,15,15,15] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_15_v4i64: @@ -11852,9 +11624,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [15,15,15,15] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -11916,9 +11687,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_16_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [16,16,16,16] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_16_v4i64: @@ -11936,9 +11706,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [16,16,16,16] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -12000,9 +11769,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_16_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [16,16,16,16] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_16_v4i64: @@ -12020,9 +11788,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [16,16,16,16] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -12084,9 +11851,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_17_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [17,17,17,17] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_17_v4i64: @@ -12104,9 +11870,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [17,17,17,17] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -12168,9 +11933,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_17_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [17,17,17,17] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_17_v4i64: @@ -12188,9 +11952,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [17,17,17,17] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -12252,9 +12015,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_18_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [18,18,18,18] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_18_v4i64: @@ -12272,9 +12034,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [18,18,18,18] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -12336,9 +12097,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_18_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [18,18,18,18] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_18_v4i64: @@ -12356,9 +12116,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [18,18,18,18] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -12420,9 +12179,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_19_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [19,19,19,19] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_19_v4i64: @@ -12440,9 +12198,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [19,19,19,19] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -12504,9 +12261,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_19_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [19,19,19,19] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_19_v4i64: @@ -12524,9 +12280,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [19,19,19,19] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -12588,9 +12343,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_20_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [20,20,20,20] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_20_v4i64: @@ -12608,9 +12362,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [20,20,20,20] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -12672,9 +12425,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_20_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [20,20,20,20] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_20_v4i64: @@ -12692,9 +12444,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [20,20,20,20] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -12756,9 +12507,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_21_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [21,21,21,21] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_21_v4i64: @@ -12776,9 +12526,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [21,21,21,21] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -12840,9 +12589,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_21_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [21,21,21,21] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_21_v4i64: @@ -12860,9 +12608,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [21,21,21,21] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -12924,9 +12671,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_22_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [22,22,22,22] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_22_v4i64: @@ -12944,9 +12690,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [22,22,22,22] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -13008,9 +12753,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_22_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [22,22,22,22] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_22_v4i64: @@ -13028,9 +12772,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [22,22,22,22] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -13092,9 +12835,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_23_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [23,23,23,23] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_23_v4i64: @@ -13112,9 +12854,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [23,23,23,23] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -13176,9 +12917,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_23_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [23,23,23,23] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_23_v4i64: @@ -13196,9 +12936,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [23,23,23,23] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -13260,9 +12999,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_24_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [24,24,24,24] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_24_v4i64: @@ -13280,9 +13018,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [24,24,24,24] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -13344,9 +13081,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_24_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [24,24,24,24] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_24_v4i64: @@ -13364,9 +13100,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [24,24,24,24] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -13428,9 +13163,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_25_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [25,25,25,25] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_25_v4i64: @@ -13448,9 +13182,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [25,25,25,25] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -13512,9 +13245,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_25_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [25,25,25,25] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_25_v4i64: @@ -13532,9 +13264,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [25,25,25,25] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -13596,9 +13327,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_26_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [26,26,26,26] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_26_v4i64: @@ -13616,9 +13346,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [26,26,26,26] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -13680,9 +13409,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_26_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [26,26,26,26] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_26_v4i64: @@ -13700,9 +13428,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [26,26,26,26] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -13764,9 +13491,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_27_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [27,27,27,27] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_27_v4i64: @@ -13784,9 +13510,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [27,27,27,27] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -13848,9 +13573,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_27_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [27,27,27,27] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_27_v4i64: @@ -13868,9 +13592,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [27,27,27,27] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -13932,9 +13655,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_28_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [28,28,28,28] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_28_v4i64: @@ -13952,9 +13674,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [28,28,28,28] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -14016,9 +13737,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_28_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [28,28,28,28] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_28_v4i64: @@ -14036,9 +13756,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [28,28,28,28] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -14100,9 +13819,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_29_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [29,29,29,29] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_29_v4i64: @@ -14120,9 +13838,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [29,29,29,29] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -14184,9 +13901,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_29_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [29,29,29,29] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_29_v4i64: @@ -14204,9 +13920,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [29,29,29,29] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -14268,9 +13983,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_30_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [30,30,30,30] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_30_v4i64: @@ -14288,9 +14002,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [30,30,30,30] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -14352,9 +14065,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_30_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [30,30,30,30] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_30_v4i64: @@ -14372,9 +14084,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [30,30,30,30] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -14436,9 +14147,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_31_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [31,31,31,31] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_31_v4i64: @@ -14456,9 +14166,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [31,31,31,31] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -14520,9 +14229,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_31_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [31,31,31,31] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_31_v4i64: @@ -14540,9 +14248,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [31,31,31,31] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -14604,9 +14311,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_32_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [32,32,32,32] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_32_v4i64: @@ -14624,9 +14330,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [32,32,32,32] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -14688,9 +14393,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_32_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [32,32,32,32] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_32_v4i64: @@ -14708,9 +14412,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [32,32,32,32] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -14772,9 +14475,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_33_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [33,33,33,33] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_33_v4i64: @@ -14792,9 +14494,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [33,33,33,33] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -14856,9 +14557,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_33_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [33,33,33,33] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_33_v4i64: @@ -14876,9 +14576,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [33,33,33,33] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -14940,9 +14639,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_34_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [34,34,34,34] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_34_v4i64: @@ -14960,9 +14658,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [34,34,34,34] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -15024,9 +14721,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_34_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [34,34,34,34] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_34_v4i64: @@ -15044,9 +14740,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [34,34,34,34] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -15108,9 +14803,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_35_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [35,35,35,35] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_35_v4i64: @@ -15128,9 +14822,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [35,35,35,35] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -15192,9 +14885,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_35_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [35,35,35,35] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_35_v4i64: @@ -15212,9 +14904,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [35,35,35,35] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -15276,9 +14967,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_36_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [36,36,36,36] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_36_v4i64: @@ -15296,9 +14986,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [36,36,36,36] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -15360,9 +15049,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_36_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [36,36,36,36] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_36_v4i64: @@ -15380,9 +15068,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [36,36,36,36] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -15444,9 +15131,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_37_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [37,37,37,37] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_37_v4i64: @@ -15464,9 +15150,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [37,37,37,37] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -15528,9 +15213,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_37_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [37,37,37,37] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_37_v4i64: @@ -15548,9 +15232,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [37,37,37,37] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -15612,9 +15295,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_38_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [38,38,38,38] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_38_v4i64: @@ -15632,9 +15314,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [38,38,38,38] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -15696,9 +15377,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_38_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [38,38,38,38] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_38_v4i64: @@ -15716,9 +15396,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [38,38,38,38] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -15780,9 +15459,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_39_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [39,39,39,39] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_39_v4i64: @@ -15800,9 +15478,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [39,39,39,39] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -15864,9 +15541,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_39_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [39,39,39,39] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_39_v4i64: @@ -15884,9 +15560,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [39,39,39,39] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -15948,9 +15623,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_40_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [40,40,40,40] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_40_v4i64: @@ -15968,9 +15642,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [40,40,40,40] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -16032,9 +15705,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_40_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [40,40,40,40] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_40_v4i64: @@ -16052,9 +15724,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [40,40,40,40] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -16116,9 +15787,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_41_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [41,41,41,41] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_41_v4i64: @@ -16136,9 +15806,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [41,41,41,41] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -16200,9 +15869,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_41_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [41,41,41,41] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_41_v4i64: @@ -16220,9 +15888,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [41,41,41,41] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -16284,9 +15951,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_42_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [42,42,42,42] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_42_v4i64: @@ -16304,9 +15970,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [42,42,42,42] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -16368,9 +16033,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_42_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [42,42,42,42] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_42_v4i64: @@ -16388,9 +16052,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [42,42,42,42] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -16452,9 +16115,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_43_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [43,43,43,43] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_43_v4i64: @@ -16472,9 +16134,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [43,43,43,43] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -16536,9 +16197,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_43_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [43,43,43,43] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_43_v4i64: @@ -16556,9 +16216,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [43,43,43,43] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -16620,9 +16279,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_44_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [44,44,44,44] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_44_v4i64: @@ -16640,9 +16298,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [44,44,44,44] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -16704,9 +16361,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_44_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [44,44,44,44] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_44_v4i64: @@ -16724,9 +16380,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [44,44,44,44] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -16788,9 +16443,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_45_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [45,45,45,45] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_45_v4i64: @@ -16808,9 +16462,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [45,45,45,45] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -16872,9 +16525,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_45_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [45,45,45,45] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_45_v4i64: @@ -16892,9 +16544,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [45,45,45,45] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -16956,9 +16607,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_46_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [46,46,46,46] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_46_v4i64: @@ -16976,9 +16626,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [46,46,46,46] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -17040,9 +16689,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_46_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [46,46,46,46] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_46_v4i64: @@ -17060,9 +16708,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [46,46,46,46] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -17124,9 +16771,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_47_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [47,47,47,47] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_47_v4i64: @@ -17144,9 +16790,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [47,47,47,47] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -17208,9 +16853,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_47_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [47,47,47,47] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_47_v4i64: @@ -17228,9 +16872,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [47,47,47,47] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -17292,9 +16935,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_48_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [48,48,48,48] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_48_v4i64: @@ -17312,9 +16954,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [48,48,48,48] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -17376,9 +17017,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_48_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [48,48,48,48] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_48_v4i64: @@ -17396,9 +17036,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [48,48,48,48] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -17460,9 +17099,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_49_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [49,49,49,49] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_49_v4i64: @@ -17480,9 +17118,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [49,49,49,49] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -17544,9 +17181,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_49_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [49,49,49,49] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_49_v4i64: @@ -17564,9 +17200,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [49,49,49,49] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -17628,9 +17263,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_50_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [50,50,50,50] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_50_v4i64: @@ -17648,9 +17282,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [50,50,50,50] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -17712,9 +17345,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_50_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [50,50,50,50] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_50_v4i64: @@ -17732,9 +17364,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [50,50,50,50] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -17796,9 +17427,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_51_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [51,51,51,51] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_51_v4i64: @@ -17816,9 +17446,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [51,51,51,51] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -17880,9 +17509,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_51_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [51,51,51,51] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_51_v4i64: @@ -17900,9 +17528,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [51,51,51,51] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -17964,9 +17591,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_52_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [52,52,52,52] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_52_v4i64: @@ -17984,9 +17610,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [52,52,52,52] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -18048,9 +17673,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_52_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [52,52,52,52] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_52_v4i64: @@ -18068,9 +17692,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [52,52,52,52] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -18132,9 +17755,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_53_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [53,53,53,53] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_53_v4i64: @@ -18152,9 +17774,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [53,53,53,53] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -18216,9 +17837,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_53_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [53,53,53,53] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_53_v4i64: @@ -18236,9 +17856,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [53,53,53,53] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -18300,9 +17919,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_54_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [54,54,54,54] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_54_v4i64: @@ -18320,9 +17938,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [54,54,54,54] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -18384,9 +18001,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_54_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [54,54,54,54] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_54_v4i64: @@ -18404,9 +18020,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [54,54,54,54] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -18468,9 +18083,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_55_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [55,55,55,55] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_55_v4i64: @@ -18488,9 +18102,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [55,55,55,55] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -18552,9 +18165,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_55_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [55,55,55,55] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_55_v4i64: @@ -18572,9 +18184,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [55,55,55,55] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -18636,9 +18247,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_56_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [56,56,56,56] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_56_v4i64: @@ -18656,9 +18266,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [56,56,56,56] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -18720,9 +18329,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_56_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [56,56,56,56] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_56_v4i64: @@ -18740,9 +18348,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [56,56,56,56] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -18804,9 +18411,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_57_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [57,57,57,57] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_57_v4i64: @@ -18824,9 +18430,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [57,57,57,57] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -18888,9 +18493,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_57_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [57,57,57,57] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_57_v4i64: @@ -18908,9 +18512,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [57,57,57,57] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -18972,9 +18575,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_58_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [58,58,58,58] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_58_v4i64: @@ -18992,9 +18594,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [58,58,58,58] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -19056,9 +18657,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_58_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [58,58,58,58] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_58_v4i64: @@ -19076,9 +18676,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [58,58,58,58] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -19140,9 +18739,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_59_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [59,59,59,59] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_59_v4i64: @@ -19160,9 +18758,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [59,59,59,59] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -19224,9 +18821,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_59_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [59,59,59,59] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_59_v4i64: @@ -19244,9 +18840,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [59,59,59,59] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -19308,9 +18903,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_60_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [60,60,60,60] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_60_v4i64: @@ -19328,9 +18922,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [60,60,60,60] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -19392,9 +18985,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_60_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [60,60,60,60] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_60_v4i64: @@ -19412,9 +19004,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [60,60,60,60] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -19476,9 +19067,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_61_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [61,61,61,61] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_61_v4i64: @@ -19496,9 +19086,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [61,61,61,61] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -19560,9 +19149,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_61_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [61,61,61,61] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_61_v4i64: @@ -19580,9 +19168,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [61,61,61,61] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -19644,9 +19231,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_62_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [62,62,62,62] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_62_v4i64: @@ -19664,9 +19250,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [62,62,62,62] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, @@ -19728,9 +19313,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ugt_62_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [62,62,62,62] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ugt_62_v4i64: @@ -19748,9 +19332,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [62,62,62,62] +; BITALG-NEXT: vpcmpgtq %ymm1, %ymm0, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ugt <4 x i64> %2, @@ -19812,9 +19395,8 @@ ; AVX512VPOPCNTDQVL-LABEL: ult_63_v4i64: ; AVX512VPOPCNTDQVL: # %bb.0: ; AVX512VPOPCNTDQVL-NEXT: vpopcntq %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; AVX512VPOPCNTDQVL-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; AVX512VPOPCNTDQVL-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; AVX512VPOPCNTDQVL-NEXT: vpbroadcastq {{.*#+}} ymm1 = [63,63,63,63] +; AVX512VPOPCNTDQVL-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; AVX512VPOPCNTDQVL-NEXT: retq ; ; BITALG_NOVLX-LABEL: ult_63_v4i64: @@ -19832,9 +19414,8 @@ ; BITALG-NEXT: vpopcntb %ymm0, %ymm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %ymm1, %ymm0, %ymm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to4}, %ymm0, %k1 -; BITALG-NEXT: vpcmpeqd %ymm0, %ymm0, %ymm0 -; BITALG-NEXT: vmovdqa64 %ymm0, %ymm0 {%k1} {z} +; BITALG-NEXT: vpbroadcastq {{.*#+}} ymm1 = [63,63,63,63] +; BITALG-NEXT: vpcmpgtq %ymm0, %ymm1, %ymm0 ; BITALG-NEXT: retq %2 = tail call <4 x i64> @llvm.ctpop.v4i64(<4 x i64> %0) %3 = icmp ult <4 x i64> %2, diff --git a/llvm/test/CodeGen/X86/vector-popcnt-512-ult-ugt.ll b/llvm/test/CodeGen/X86/vector-popcnt-512-ult-ugt.ll --- a/llvm/test/CodeGen/X86/vector-popcnt-512-ult-ugt.ll +++ b/llvm/test/CodeGen/X86/vector-popcnt-512-ult-ugt.ll @@ -56,7 +56,7 @@ ; BITALG-LABEL: ugt_1_v64i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2b %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <64 x i8> @llvm.ctpop.v64i8(<64 x i8> %0) @@ -113,7 +113,7 @@ ; BITALG-LABEL: ult_2_v64i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpltb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2b %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <64 x i8> @llvm.ctpop.v64i8(<64 x i8> %0) @@ -140,11 +140,9 @@ ; AVX512F-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512F-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512F-NEXT: vpaddb %ymm4, %ymm0, %ymm0 -; AVX512F-NEXT: vmovdqa {{.*#+}} ymm1 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3] -; AVX512F-NEXT: vpmaxub %ymm1, %ymm0, %ymm3 -; AVX512F-NEXT: vpcmpeqb %ymm3, %ymm0, %ymm0 -; AVX512F-NEXT: vpmaxub %ymm1, %ymm2, %ymm1 -; AVX512F-NEXT: vpcmpeqb %ymm1, %ymm2, %ymm1 +; AVX512F-NEXT: vmovdqa {{.*#+}} ymm1 = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2] +; AVX512F-NEXT: vpcmpgtb %ymm1, %ymm0, %ymm0 +; AVX512F-NEXT: vpcmpgtb %ymm1, %ymm2, %ymm1 ; AVX512F-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; AVX512F-NEXT: retq ; @@ -158,7 +156,7 @@ ; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2b %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -179,11 +177,9 @@ ; AVX512VPOPCNTDQ-NOBW-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: vpaddb %ymm4, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NOBW-NEXT: vmovdqa {{.*#+}} ymm1 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3] -; AVX512VPOPCNTDQ-NOBW-NEXT: vpmaxub %ymm1, %ymm0, %ymm3 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpeqb %ymm3, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpmaxub %ymm1, %ymm2, %ymm1 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpeqb %ymm1, %ymm2, %ymm1 +; AVX512VPOPCNTDQ-NOBW-NEXT: vmovdqa {{.*#+}} ymm1 = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2] +; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpgtb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpgtb %ymm1, %ymm2, %ymm1 ; AVX512VPOPCNTDQ-NOBW-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: retq ; @@ -197,14 +193,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2b %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ugt_2_v64i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2b %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <64 x i8> @llvm.ctpop.v64i8(<64 x i8> %0) @@ -231,11 +227,9 @@ ; AVX512F-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512F-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512F-NEXT: vpaddb %ymm4, %ymm0, %ymm0 -; AVX512F-NEXT: vmovdqa {{.*#+}} ymm1 = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2] -; AVX512F-NEXT: vpminub %ymm1, %ymm0, %ymm3 -; AVX512F-NEXT: vpcmpeqb %ymm3, %ymm0, %ymm0 -; AVX512F-NEXT: vpminub %ymm1, %ymm2, %ymm1 -; AVX512F-NEXT: vpcmpeqb %ymm1, %ymm2, %ymm1 +; AVX512F-NEXT: vmovdqa {{.*#+}} ymm1 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3] +; AVX512F-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 +; AVX512F-NEXT: vpcmpgtb %ymm2, %ymm1, %ymm1 ; AVX512F-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; AVX512F-NEXT: retq ; @@ -249,7 +243,7 @@ ; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpltb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2b %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -270,11 +264,9 @@ ; AVX512VPOPCNTDQ-NOBW-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: vpaddb %ymm4, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NOBW-NEXT: vmovdqa {{.*#+}} ymm1 = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2] -; AVX512VPOPCNTDQ-NOBW-NEXT: vpminub %ymm1, %ymm0, %ymm3 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpeqb %ymm3, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpminub %ymm1, %ymm2, %ymm1 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpeqb %ymm1, %ymm2, %ymm1 +; AVX512VPOPCNTDQ-NOBW-NEXT: vmovdqa {{.*#+}} ymm1 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3] +; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 +; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpgtb %ymm2, %ymm1, %ymm1 ; AVX512VPOPCNTDQ-NOBW-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: retq ; @@ -288,14 +280,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2b %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ult_3_v64i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpltb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2b %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <64 x i8> @llvm.ctpop.v64i8(<64 x i8> %0) @@ -322,11 +314,9 @@ ; AVX512F-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512F-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512F-NEXT: vpaddb %ymm4, %ymm0, %ymm0 -; AVX512F-NEXT: vmovdqa {{.*#+}} ymm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] -; AVX512F-NEXT: vpmaxub %ymm1, %ymm0, %ymm3 -; AVX512F-NEXT: vpcmpeqb %ymm3, %ymm0, %ymm0 -; AVX512F-NEXT: vpmaxub %ymm1, %ymm2, %ymm1 -; AVX512F-NEXT: vpcmpeqb %ymm1, %ymm2, %ymm1 +; AVX512F-NEXT: vmovdqa {{.*#+}} ymm1 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3] +; AVX512F-NEXT: vpcmpgtb %ymm1, %ymm0, %ymm0 +; AVX512F-NEXT: vpcmpgtb %ymm1, %ymm2, %ymm1 ; AVX512F-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; AVX512F-NEXT: retq ; @@ -340,7 +330,7 @@ ; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2b %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -361,11 +351,9 @@ ; AVX512VPOPCNTDQ-NOBW-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: vpaddb %ymm4, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NOBW-NEXT: vmovdqa {{.*#+}} ymm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] -; AVX512VPOPCNTDQ-NOBW-NEXT: vpmaxub %ymm1, %ymm0, %ymm3 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpeqb %ymm3, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpmaxub %ymm1, %ymm2, %ymm1 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpeqb %ymm1, %ymm2, %ymm1 +; AVX512VPOPCNTDQ-NOBW-NEXT: vmovdqa {{.*#+}} ymm1 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3] +; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpgtb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpgtb %ymm1, %ymm2, %ymm1 ; AVX512VPOPCNTDQ-NOBW-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: retq ; @@ -379,14 +367,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2b %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ugt_3_v64i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2b %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <64 x i8> @llvm.ctpop.v64i8(<64 x i8> %0) @@ -413,11 +401,9 @@ ; AVX512F-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512F-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512F-NEXT: vpaddb %ymm4, %ymm0, %ymm0 -; AVX512F-NEXT: vmovdqa {{.*#+}} ymm1 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3] -; AVX512F-NEXT: vpminub %ymm1, %ymm0, %ymm3 -; AVX512F-NEXT: vpcmpeqb %ymm3, %ymm0, %ymm0 -; AVX512F-NEXT: vpminub %ymm1, %ymm2, %ymm1 -; AVX512F-NEXT: vpcmpeqb %ymm1, %ymm2, %ymm1 +; AVX512F-NEXT: vmovdqa {{.*#+}} ymm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] +; AVX512F-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 +; AVX512F-NEXT: vpcmpgtb %ymm2, %ymm1, %ymm1 ; AVX512F-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; AVX512F-NEXT: retq ; @@ -431,7 +417,7 @@ ; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpltb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2b %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -452,11 +438,9 @@ ; AVX512VPOPCNTDQ-NOBW-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: vpaddb %ymm4, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NOBW-NEXT: vmovdqa {{.*#+}} ymm1 = [3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3] -; AVX512VPOPCNTDQ-NOBW-NEXT: vpminub %ymm1, %ymm0, %ymm3 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpeqb %ymm3, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpminub %ymm1, %ymm2, %ymm1 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpeqb %ymm1, %ymm2, %ymm1 +; AVX512VPOPCNTDQ-NOBW-NEXT: vmovdqa {{.*#+}} ymm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] +; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 +; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpgtb %ymm2, %ymm1, %ymm1 ; AVX512VPOPCNTDQ-NOBW-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: retq ; @@ -470,14 +454,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2b %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ult_4_v64i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpltb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2b %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <64 x i8> @llvm.ctpop.v64i8(<64 x i8> %0) @@ -504,11 +488,9 @@ ; AVX512F-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512F-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512F-NEXT: vpaddb %ymm4, %ymm0, %ymm0 -; AVX512F-NEXT: vmovdqa {{.*#+}} ymm1 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] -; AVX512F-NEXT: vpmaxub %ymm1, %ymm0, %ymm3 -; AVX512F-NEXT: vpcmpeqb %ymm3, %ymm0, %ymm0 -; AVX512F-NEXT: vpmaxub %ymm1, %ymm2, %ymm1 -; AVX512F-NEXT: vpcmpeqb %ymm1, %ymm2, %ymm1 +; AVX512F-NEXT: vmovdqa {{.*#+}} ymm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] +; AVX512F-NEXT: vpcmpgtb %ymm1, %ymm0, %ymm0 +; AVX512F-NEXT: vpcmpgtb %ymm1, %ymm2, %ymm1 ; AVX512F-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; AVX512F-NEXT: retq ; @@ -522,7 +504,7 @@ ; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2b %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -543,11 +525,9 @@ ; AVX512VPOPCNTDQ-NOBW-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: vpaddb %ymm4, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NOBW-NEXT: vmovdqa {{.*#+}} ymm1 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] -; AVX512VPOPCNTDQ-NOBW-NEXT: vpmaxub %ymm1, %ymm0, %ymm3 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpeqb %ymm3, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpmaxub %ymm1, %ymm2, %ymm1 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpeqb %ymm1, %ymm2, %ymm1 +; AVX512VPOPCNTDQ-NOBW-NEXT: vmovdqa {{.*#+}} ymm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] +; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpgtb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpgtb %ymm1, %ymm2, %ymm1 ; AVX512VPOPCNTDQ-NOBW-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: retq ; @@ -561,14 +541,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2b %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ugt_4_v64i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2b %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <64 x i8> @llvm.ctpop.v64i8(<64 x i8> %0) @@ -595,11 +575,9 @@ ; AVX512F-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512F-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512F-NEXT: vpaddb %ymm4, %ymm0, %ymm0 -; AVX512F-NEXT: vmovdqa {{.*#+}} ymm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] -; AVX512F-NEXT: vpminub %ymm1, %ymm0, %ymm3 -; AVX512F-NEXT: vpcmpeqb %ymm3, %ymm0, %ymm0 -; AVX512F-NEXT: vpminub %ymm1, %ymm2, %ymm1 -; AVX512F-NEXT: vpcmpeqb %ymm1, %ymm2, %ymm1 +; AVX512F-NEXT: vmovdqa {{.*#+}} ymm1 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] +; AVX512F-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 +; AVX512F-NEXT: vpcmpgtb %ymm2, %ymm1, %ymm1 ; AVX512F-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; AVX512F-NEXT: retq ; @@ -613,7 +591,7 @@ ; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpltb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2b %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -634,11 +612,9 @@ ; AVX512VPOPCNTDQ-NOBW-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: vpaddb %ymm4, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NOBW-NEXT: vmovdqa {{.*#+}} ymm1 = [4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4] -; AVX512VPOPCNTDQ-NOBW-NEXT: vpminub %ymm1, %ymm0, %ymm3 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpeqb %ymm3, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpminub %ymm1, %ymm2, %ymm1 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpeqb %ymm1, %ymm2, %ymm1 +; AVX512VPOPCNTDQ-NOBW-NEXT: vmovdqa {{.*#+}} ymm1 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] +; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 +; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpgtb %ymm2, %ymm1, %ymm1 ; AVX512VPOPCNTDQ-NOBW-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: retq ; @@ -652,14 +628,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2b %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ult_5_v64i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpltb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2b %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <64 x i8> @llvm.ctpop.v64i8(<64 x i8> %0) @@ -686,11 +662,9 @@ ; AVX512F-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512F-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512F-NEXT: vpaddb %ymm4, %ymm0, %ymm0 -; AVX512F-NEXT: vmovdqa {{.*#+}} ymm1 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6] -; AVX512F-NEXT: vpmaxub %ymm1, %ymm0, %ymm3 -; AVX512F-NEXT: vpcmpeqb %ymm3, %ymm0, %ymm0 -; AVX512F-NEXT: vpmaxub %ymm1, %ymm2, %ymm1 -; AVX512F-NEXT: vpcmpeqb %ymm1, %ymm2, %ymm1 +; AVX512F-NEXT: vmovdqa {{.*#+}} ymm1 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] +; AVX512F-NEXT: vpcmpgtb %ymm1, %ymm0, %ymm0 +; AVX512F-NEXT: vpcmpgtb %ymm1, %ymm2, %ymm1 ; AVX512F-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; AVX512F-NEXT: retq ; @@ -704,7 +678,7 @@ ; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2b %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -725,11 +699,9 @@ ; AVX512VPOPCNTDQ-NOBW-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: vpaddb %ymm4, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NOBW-NEXT: vmovdqa {{.*#+}} ymm1 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6] -; AVX512VPOPCNTDQ-NOBW-NEXT: vpmaxub %ymm1, %ymm0, %ymm3 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpeqb %ymm3, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpmaxub %ymm1, %ymm2, %ymm1 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpeqb %ymm1, %ymm2, %ymm1 +; AVX512VPOPCNTDQ-NOBW-NEXT: vmovdqa {{.*#+}} ymm1 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] +; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpgtb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpgtb %ymm1, %ymm2, %ymm1 ; AVX512VPOPCNTDQ-NOBW-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: retq ; @@ -743,14 +715,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2b %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ugt_5_v64i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2b %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <64 x i8> @llvm.ctpop.v64i8(<64 x i8> %0) @@ -777,11 +749,9 @@ ; AVX512F-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512F-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512F-NEXT: vpaddb %ymm4, %ymm0, %ymm0 -; AVX512F-NEXT: vmovdqa {{.*#+}} ymm1 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] -; AVX512F-NEXT: vpminub %ymm1, %ymm0, %ymm3 -; AVX512F-NEXT: vpcmpeqb %ymm3, %ymm0, %ymm0 -; AVX512F-NEXT: vpminub %ymm1, %ymm2, %ymm1 -; AVX512F-NEXT: vpcmpeqb %ymm1, %ymm2, %ymm1 +; AVX512F-NEXT: vmovdqa {{.*#+}} ymm1 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6] +; AVX512F-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 +; AVX512F-NEXT: vpcmpgtb %ymm2, %ymm1, %ymm1 ; AVX512F-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; AVX512F-NEXT: retq ; @@ -795,7 +765,7 @@ ; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpltb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2b %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -816,11 +786,9 @@ ; AVX512VPOPCNTDQ-NOBW-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: vpaddb %ymm4, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NOBW-NEXT: vmovdqa {{.*#+}} ymm1 = [5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5] -; AVX512VPOPCNTDQ-NOBW-NEXT: vpminub %ymm1, %ymm0, %ymm3 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpeqb %ymm3, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpminub %ymm1, %ymm2, %ymm1 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpeqb %ymm1, %ymm2, %ymm1 +; AVX512VPOPCNTDQ-NOBW-NEXT: vmovdqa {{.*#+}} ymm1 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6] +; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 +; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpgtb %ymm2, %ymm1, %ymm1 ; AVX512VPOPCNTDQ-NOBW-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: retq ; @@ -834,14 +802,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2b %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ult_6_v64i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpltb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2b %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <64 x i8> @llvm.ctpop.v64i8(<64 x i8> %0) @@ -868,11 +836,9 @@ ; AVX512F-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512F-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512F-NEXT: vpaddb %ymm4, %ymm0, %ymm0 -; AVX512F-NEXT: vmovdqa {{.*#+}} ymm1 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7] -; AVX512F-NEXT: vpmaxub %ymm1, %ymm0, %ymm3 -; AVX512F-NEXT: vpcmpeqb %ymm3, %ymm0, %ymm0 -; AVX512F-NEXT: vpmaxub %ymm1, %ymm2, %ymm1 -; AVX512F-NEXT: vpcmpeqb %ymm1, %ymm2, %ymm1 +; AVX512F-NEXT: vmovdqa {{.*#+}} ymm1 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6] +; AVX512F-NEXT: vpcmpgtb %ymm1, %ymm0, %ymm0 +; AVX512F-NEXT: vpcmpgtb %ymm1, %ymm2, %ymm1 ; AVX512F-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; AVX512F-NEXT: retq ; @@ -886,7 +852,7 @@ ; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2b %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -907,11 +873,9 @@ ; AVX512VPOPCNTDQ-NOBW-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: vpaddb %ymm4, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NOBW-NEXT: vmovdqa {{.*#+}} ymm1 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7] -; AVX512VPOPCNTDQ-NOBW-NEXT: vpmaxub %ymm1, %ymm0, %ymm3 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpeqb %ymm3, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpmaxub %ymm1, %ymm2, %ymm1 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpeqb %ymm1, %ymm2, %ymm1 +; AVX512VPOPCNTDQ-NOBW-NEXT: vmovdqa {{.*#+}} ymm1 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6] +; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpgtb %ymm1, %ymm0, %ymm0 +; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpgtb %ymm1, %ymm2, %ymm1 ; AVX512VPOPCNTDQ-NOBW-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: retq ; @@ -925,14 +889,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2b %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ugt_6_v64i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpgtb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2b %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <64 x i8> @llvm.ctpop.v64i8(<64 x i8> %0) @@ -959,11 +923,9 @@ ; AVX512F-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512F-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512F-NEXT: vpaddb %ymm4, %ymm0, %ymm0 -; AVX512F-NEXT: vmovdqa {{.*#+}} ymm1 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6] -; AVX512F-NEXT: vpminub %ymm1, %ymm0, %ymm3 -; AVX512F-NEXT: vpcmpeqb %ymm3, %ymm0, %ymm0 -; AVX512F-NEXT: vpminub %ymm1, %ymm2, %ymm1 -; AVX512F-NEXT: vpcmpeqb %ymm1, %ymm2, %ymm1 +; AVX512F-NEXT: vmovdqa {{.*#+}} ymm1 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7] +; AVX512F-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 +; AVX512F-NEXT: vpcmpgtb %ymm2, %ymm1, %ymm1 ; AVX512F-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; AVX512F-NEXT: retq ; @@ -977,7 +939,7 @@ ; AVX512BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpltb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2b %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -998,11 +960,9 @@ ; AVX512VPOPCNTDQ-NOBW-NEXT: vpand %ymm1, %ymm0, %ymm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: vpshufb %ymm0, %ymm3, %ymm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: vpaddb %ymm4, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NOBW-NEXT: vmovdqa {{.*#+}} ymm1 = [6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6] -; AVX512VPOPCNTDQ-NOBW-NEXT: vpminub %ymm1, %ymm0, %ymm3 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpeqb %ymm3, %ymm0, %ymm0 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpminub %ymm1, %ymm2, %ymm1 -; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpeqb %ymm1, %ymm2, %ymm1 +; AVX512VPOPCNTDQ-NOBW-NEXT: vmovdqa {{.*#+}} ymm1 = [7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7] +; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpgtb %ymm0, %ymm1, %ymm0 +; AVX512VPOPCNTDQ-NOBW-NEXT: vpcmpgtb %ymm2, %ymm1, %ymm1 ; AVX512VPOPCNTDQ-NOBW-NEXT: vinserti64x4 $1, %ymm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-NOBW-NEXT: retq ; @@ -1016,14 +976,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpandq %zmm1, %zmm0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpshufb %zmm0, %zmm3, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2b %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ult_7_v64i8: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltub {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpltb {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2b %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <64 x i8> @llvm.ctpop.v64i8(<64 x i8> %0) @@ -1082,7 +1042,7 @@ ; BITALG-LABEL: ugt_1_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -1139,7 +1099,7 @@ ; BITALG-LABEL: ult_2_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -1191,7 +1151,7 @@ ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -1223,14 +1183,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ugt_2_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -1282,7 +1242,7 @@ ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -1314,14 +1274,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ult_3_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -1373,7 +1333,7 @@ ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -1405,14 +1365,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ugt_3_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -1464,7 +1424,7 @@ ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -1496,14 +1456,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ult_4_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -1555,7 +1515,7 @@ ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -1587,14 +1547,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ugt_4_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -1646,7 +1606,7 @@ ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -1678,14 +1638,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ult_5_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -1737,7 +1697,7 @@ ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -1769,14 +1729,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ugt_5_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -1828,7 +1788,7 @@ ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -1860,14 +1820,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ult_6_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -1919,7 +1879,7 @@ ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -1951,14 +1911,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ugt_6_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -2010,7 +1970,7 @@ ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -2042,14 +2002,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ult_7_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -2101,7 +2061,7 @@ ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -2133,14 +2093,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ugt_7_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -2192,7 +2152,7 @@ ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -2224,14 +2184,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ult_8_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -2283,7 +2243,7 @@ ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -2315,14 +2275,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ugt_8_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -2374,7 +2334,7 @@ ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -2406,14 +2366,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ult_9_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -2465,7 +2425,7 @@ ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -2497,14 +2457,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ugt_9_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -2556,7 +2516,7 @@ ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -2588,14 +2548,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ult_10_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -2647,7 +2607,7 @@ ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -2679,14 +2639,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ugt_10_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -2738,7 +2698,7 @@ ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -2770,14 +2730,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ult_11_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -2829,7 +2789,7 @@ ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -2861,14 +2821,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ugt_11_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -2920,7 +2880,7 @@ ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -2952,14 +2912,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ult_12_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -3011,7 +2971,7 @@ ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -3043,14 +3003,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ugt_12_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -3102,7 +3062,7 @@ ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -3134,14 +3094,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ult_13_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -3193,7 +3153,7 @@ ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -3225,14 +3185,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ugt_13_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -3284,7 +3244,7 @@ ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -3316,14 +3276,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ult_14_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -3375,7 +3335,7 @@ ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -3407,14 +3367,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ugt_14_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpgtw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -3466,7 +3426,7 @@ ; AVX512BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512BW-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512BW-NEXT: retq ; @@ -3498,14 +3458,14 @@ ; AVX512VPOPCNTDQ-BW-NEXT: vpsllw $8, %zmm0, %zmm1 ; AVX512VPOPCNTDQ-BW-NEXT: vpaddb %zmm0, %zmm1, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: vpsrlw $8, %zmm0, %zmm0 -; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; AVX512VPOPCNTDQ-BW-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; AVX512VPOPCNTDQ-BW-NEXT: vpmovm2w %k0, %zmm0 ; AVX512VPOPCNTDQ-BW-NEXT: retq ; ; BITALG-LABEL: ult_15_v32i16: ; BITALG: # %bb.0: ; BITALG-NEXT: vpopcntw %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 +; BITALG-NEXT: vpcmpltw {{\.?LCPI[0-9]+_[0-9]+}}(%rip), %zmm0, %k0 ; BITALG-NEXT: vpmovm2w %k0, %zmm0 ; BITALG-NEXT: retq %2 = tail call <32 x i16> @llvm.ctpop.v32i16(<32 x i16> %0) @@ -3534,7 +3494,7 @@ ; AVX512VPOPCNTDQ-LABEL: ugt_1_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -3571,7 +3531,7 @@ ; AVX512VPOPCNTDQ-LABEL: ult_2_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -3618,7 +3578,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -3638,14 +3598,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_2_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -3658,7 +3618,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -3697,7 +3657,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -3717,14 +3677,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_3_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -3737,7 +3697,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -3776,7 +3736,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -3796,14 +3756,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_3_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -3816,7 +3776,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -3855,7 +3815,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -3875,14 +3835,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_4_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -3895,7 +3855,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -3934,7 +3894,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -3954,14 +3914,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_4_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -3974,7 +3934,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -4013,7 +3973,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -4033,14 +3993,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_5_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -4053,7 +4013,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -4092,7 +4052,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -4112,14 +4072,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_5_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -4132,7 +4092,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -4171,7 +4131,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -4191,14 +4151,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_6_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -4211,7 +4171,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -4250,7 +4210,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -4270,14 +4230,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_6_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -4290,7 +4250,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -4329,7 +4289,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -4349,14 +4309,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_7_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -4369,7 +4329,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -4408,7 +4368,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -4428,14 +4388,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_7_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -4448,7 +4408,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -4487,7 +4447,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -4507,14 +4467,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_8_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -4527,7 +4487,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -4566,7 +4526,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -4586,14 +4546,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_8_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -4606,7 +4566,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -4645,7 +4605,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -4665,14 +4625,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_9_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -4685,7 +4645,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -4724,7 +4684,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -4744,14 +4704,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_9_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -4764,7 +4724,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -4803,7 +4763,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -4823,14 +4783,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_10_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -4843,7 +4803,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -4882,7 +4842,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -4902,14 +4862,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_10_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -4922,7 +4882,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -4961,7 +4921,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -4981,14 +4941,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_11_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -5001,7 +4961,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -5040,7 +5000,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -5060,14 +5020,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_11_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -5080,7 +5040,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -5119,7 +5079,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -5139,14 +5099,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_12_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -5159,7 +5119,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -5198,7 +5158,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -5218,14 +5178,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_12_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -5238,7 +5198,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -5277,7 +5237,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -5297,14 +5257,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_13_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -5317,7 +5277,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -5356,7 +5316,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -5376,14 +5336,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_13_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -5396,7 +5356,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -5435,7 +5395,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -5455,14 +5415,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_14_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -5475,7 +5435,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -5514,7 +5474,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -5534,14 +5494,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_14_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -5554,7 +5514,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -5593,7 +5553,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -5613,14 +5573,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_15_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -5633,7 +5593,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -5672,7 +5632,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -5692,14 +5652,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_15_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -5712,7 +5672,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -5751,7 +5711,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -5771,14 +5731,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_16_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -5791,7 +5751,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -5830,7 +5790,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -5850,14 +5810,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_16_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -5870,7 +5830,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -5909,7 +5869,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -5929,14 +5889,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_17_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -5949,7 +5909,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -5988,7 +5948,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -6008,14 +5968,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_17_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -6028,7 +5988,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -6067,7 +6027,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -6087,14 +6047,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_18_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -6107,7 +6067,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -6146,7 +6106,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -6166,14 +6126,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_18_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -6186,7 +6146,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -6225,7 +6185,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -6245,14 +6205,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_19_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -6265,7 +6225,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -6304,7 +6264,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -6324,14 +6284,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_19_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -6344,7 +6304,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -6383,7 +6343,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -6403,14 +6363,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_20_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -6423,7 +6383,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -6462,7 +6422,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -6482,14 +6442,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_20_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -6502,7 +6462,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -6541,7 +6501,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -6561,14 +6521,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_21_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -6581,7 +6541,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -6620,7 +6580,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -6640,14 +6600,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_21_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -6660,7 +6620,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -6699,7 +6659,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -6719,14 +6679,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_22_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -6739,7 +6699,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -6778,7 +6738,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -6798,14 +6758,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_22_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -6818,7 +6778,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -6857,7 +6817,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -6877,14 +6837,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_23_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -6897,7 +6857,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -6936,7 +6896,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -6956,14 +6916,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_23_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -6976,7 +6936,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -7015,7 +6975,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -7035,14 +6995,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_24_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -7055,7 +7015,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -7094,7 +7054,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -7114,14 +7074,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_24_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -7134,7 +7094,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -7173,7 +7133,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -7193,14 +7153,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_25_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -7213,7 +7173,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -7252,7 +7212,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -7272,14 +7232,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_25_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -7292,7 +7252,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -7331,7 +7291,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -7351,14 +7311,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_26_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -7371,7 +7331,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -7410,7 +7370,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -7430,14 +7390,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_26_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -7450,7 +7410,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -7489,7 +7449,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -7509,14 +7469,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_27_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -7529,7 +7489,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -7568,7 +7528,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -7588,14 +7548,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_27_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -7608,7 +7568,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -7647,7 +7607,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -7667,14 +7627,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_28_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -7687,7 +7647,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -7726,7 +7686,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -7746,14 +7706,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_28_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -7766,7 +7726,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -7805,7 +7765,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -7825,14 +7785,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_29_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -7845,7 +7805,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -7884,7 +7844,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -7904,14 +7864,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_29_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -7924,7 +7884,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -7963,7 +7923,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -7983,14 +7943,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_30_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -8003,7 +7963,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -8042,7 +8002,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -8062,14 +8022,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_30_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -8082,7 +8042,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -8121,7 +8081,7 @@ ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vpackuswb %ymm2, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -8141,14 +8101,14 @@ ; AVX512BW-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; AVX512BW-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_31_v16i32: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntd %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -8161,7 +8121,7 @@ ; BITALG-NEXT: vpunpckldq {{.*#+}} zmm0 = zmm0[0],zmm1[0],zmm0[1],zmm1[1],zmm0[4],zmm1[4],zmm0[5],zmm1[5],zmm0[8],zmm1[8],zmm0[9],zmm1[9],zmm0[12],zmm1[12],zmm0[13],zmm1[13] ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 ; BITALG-NEXT: vpackuswb %zmm2, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltud {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltd {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to16}, %zmm0, %k1 ; BITALG-NEXT: vpternlogd $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <16 x i32> @llvm.ctpop.v16i32(<16 x i32> %0) @@ -8190,7 +8150,7 @@ ; AVX512VPOPCNTDQ-LABEL: ugt_1_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -8227,7 +8187,7 @@ ; AVX512VPOPCNTDQ-LABEL: ult_2_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -8266,7 +8226,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -8282,14 +8242,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_2_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -8298,7 +8258,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -8329,7 +8289,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -8345,14 +8305,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_3_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -8361,7 +8321,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -8392,7 +8352,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -8408,14 +8368,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_3_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -8424,7 +8384,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -8455,7 +8415,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -8471,14 +8431,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_4_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -8487,7 +8447,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -8518,7 +8478,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -8534,14 +8494,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_4_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -8550,7 +8510,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -8581,7 +8541,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -8597,14 +8557,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_5_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -8613,7 +8573,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -8644,7 +8604,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -8660,14 +8620,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_5_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -8676,7 +8636,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -8707,7 +8667,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -8723,14 +8683,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_6_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -8739,7 +8699,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -8770,7 +8730,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -8786,14 +8746,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_6_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -8802,7 +8762,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -8833,7 +8793,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -8849,14 +8809,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_7_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -8865,7 +8825,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -8896,7 +8856,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -8912,14 +8872,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_7_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -8928,7 +8888,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -8959,7 +8919,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -8975,14 +8935,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_8_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -8991,7 +8951,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -9022,7 +8982,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -9038,14 +8998,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_8_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -9054,7 +9014,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -9085,7 +9045,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -9101,14 +9061,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_9_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -9117,7 +9077,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -9148,7 +9108,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -9164,14 +9124,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_9_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -9180,7 +9140,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -9211,7 +9171,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -9227,14 +9187,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_10_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -9243,7 +9203,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -9274,7 +9234,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -9290,14 +9250,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_10_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -9306,7 +9266,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -9337,7 +9297,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -9353,14 +9313,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_11_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -9369,7 +9329,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -9400,7 +9360,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -9416,14 +9376,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_11_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -9432,7 +9392,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -9463,7 +9423,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -9479,14 +9439,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_12_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -9495,7 +9455,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -9526,7 +9486,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -9542,14 +9502,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_12_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -9558,7 +9518,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -9589,7 +9549,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -9605,14 +9565,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_13_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -9621,7 +9581,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -9652,7 +9612,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -9668,14 +9628,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_13_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -9684,7 +9644,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -9715,7 +9675,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -9731,14 +9691,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_14_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -9747,7 +9707,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -9778,7 +9738,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -9794,14 +9754,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_14_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -9810,7 +9770,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -9841,7 +9801,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -9857,14 +9817,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_15_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -9873,7 +9833,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -9904,7 +9864,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -9920,14 +9880,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_15_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -9936,7 +9896,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -9967,7 +9927,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -9983,14 +9943,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_16_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -9999,7 +9959,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -10030,7 +9990,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -10046,14 +10006,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_16_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -10062,7 +10022,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -10093,7 +10053,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -10109,14 +10069,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_17_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -10125,7 +10085,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -10156,7 +10116,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -10172,14 +10132,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_17_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -10188,7 +10148,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -10219,7 +10179,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -10235,14 +10195,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_18_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -10251,7 +10211,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -10282,7 +10242,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -10298,14 +10258,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_18_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -10314,7 +10274,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -10345,7 +10305,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -10361,14 +10321,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_19_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -10377,7 +10337,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -10408,7 +10368,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -10424,14 +10384,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_19_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -10440,7 +10400,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -10471,7 +10431,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -10487,14 +10447,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_20_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -10503,7 +10463,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -10534,7 +10494,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -10550,14 +10510,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_20_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -10566,7 +10526,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -10597,7 +10557,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -10613,14 +10573,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_21_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -10629,7 +10589,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -10660,7 +10620,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -10676,14 +10636,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_21_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -10692,7 +10652,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -10723,7 +10683,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -10739,14 +10699,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_22_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -10755,7 +10715,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -10786,7 +10746,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -10802,14 +10762,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_22_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -10818,7 +10778,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -10849,7 +10809,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -10865,14 +10825,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_23_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -10881,7 +10841,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -10912,7 +10872,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -10928,14 +10888,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_23_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -10944,7 +10904,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -10975,7 +10935,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -10991,14 +10951,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_24_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -11007,7 +10967,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -11038,7 +10998,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -11054,14 +11014,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_24_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -11070,7 +11030,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -11101,7 +11061,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -11117,14 +11077,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_25_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -11133,7 +11093,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -11164,7 +11124,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -11180,14 +11140,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_25_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -11196,7 +11156,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -11227,7 +11187,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -11243,14 +11203,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_26_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -11259,7 +11219,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -11290,7 +11250,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -11306,14 +11266,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_26_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -11322,7 +11282,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -11353,7 +11313,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -11369,14 +11329,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_27_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -11385,7 +11345,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -11416,7 +11376,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -11432,14 +11392,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_27_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -11448,7 +11408,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -11479,7 +11439,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -11495,14 +11455,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_28_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -11511,7 +11471,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -11542,7 +11502,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -11558,14 +11518,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_28_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -11574,7 +11534,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -11605,7 +11565,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -11621,14 +11581,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_29_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -11637,7 +11597,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -11668,7 +11628,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -11684,14 +11644,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_29_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -11700,7 +11660,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -11731,7 +11691,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -11747,14 +11707,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_30_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -11763,7 +11723,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -11794,7 +11754,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -11810,14 +11770,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_30_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -11826,7 +11786,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -11857,7 +11817,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -11873,14 +11833,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_31_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -11889,7 +11849,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -11920,7 +11880,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -11936,14 +11896,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_31_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -11952,7 +11912,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -11983,7 +11943,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -11999,14 +11959,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_32_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -12015,7 +11975,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -12046,7 +12006,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -12062,14 +12022,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_32_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -12078,7 +12038,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -12109,7 +12069,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -12125,14 +12085,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_33_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -12141,7 +12101,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -12172,7 +12132,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -12188,14 +12148,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_33_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -12204,7 +12164,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -12235,7 +12195,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -12251,14 +12211,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_34_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -12267,7 +12227,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -12298,7 +12258,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -12314,14 +12274,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_34_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -12330,7 +12290,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -12361,7 +12321,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -12377,14 +12337,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_35_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -12393,7 +12353,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -12424,7 +12384,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -12440,14 +12400,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_35_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -12456,7 +12416,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -12487,7 +12447,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -12503,14 +12463,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_36_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -12519,7 +12479,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -12550,7 +12510,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -12566,14 +12526,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_36_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -12582,7 +12542,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -12613,7 +12573,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -12629,14 +12589,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_37_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -12645,7 +12605,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -12676,7 +12636,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -12692,14 +12652,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_37_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -12708,7 +12668,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -12739,7 +12699,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -12755,14 +12715,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_38_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -12771,7 +12731,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -12802,7 +12762,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -12818,14 +12778,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_38_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -12834,7 +12794,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -12865,7 +12825,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -12881,14 +12841,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_39_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -12897,7 +12857,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -12928,7 +12888,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -12944,14 +12904,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_39_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -12960,7 +12920,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -12991,7 +12951,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -13007,14 +12967,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_40_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -13023,7 +12983,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -13054,7 +13014,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -13070,14 +13030,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_40_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -13086,7 +13046,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -13117,7 +13077,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -13133,14 +13093,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_41_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -13149,7 +13109,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -13180,7 +13140,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -13196,14 +13156,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_41_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -13212,7 +13172,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -13243,7 +13203,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -13259,14 +13219,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_42_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -13275,7 +13235,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -13306,7 +13266,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -13322,14 +13282,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_42_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -13338,7 +13298,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -13369,7 +13329,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -13385,14 +13345,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_43_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -13401,7 +13361,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -13432,7 +13392,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -13448,14 +13408,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_43_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -13464,7 +13424,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -13495,7 +13455,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -13511,14 +13471,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_44_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -13527,7 +13487,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -13558,7 +13518,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -13574,14 +13534,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_44_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -13590,7 +13550,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -13621,7 +13581,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -13637,14 +13597,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_45_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -13653,7 +13613,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -13684,7 +13644,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -13700,14 +13660,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_45_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -13716,7 +13676,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -13747,7 +13707,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -13763,14 +13723,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_46_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -13779,7 +13739,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -13810,7 +13770,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -13826,14 +13786,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_46_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -13842,7 +13802,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -13873,7 +13833,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -13889,14 +13849,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_47_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -13905,7 +13865,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -13936,7 +13896,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -13952,14 +13912,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_47_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -13968,7 +13928,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -13999,7 +13959,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -14015,14 +13975,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_48_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -14031,7 +13991,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -14062,7 +14022,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -14078,14 +14038,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_48_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -14094,7 +14054,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -14125,7 +14085,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -14141,14 +14101,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_49_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -14157,7 +14117,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -14188,7 +14148,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -14204,14 +14164,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_49_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -14220,7 +14180,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -14251,7 +14211,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -14267,14 +14227,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_50_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -14283,7 +14243,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -14314,7 +14274,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -14330,14 +14290,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_50_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -14346,7 +14306,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -14377,7 +14337,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -14393,14 +14353,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_51_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -14409,7 +14369,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -14440,7 +14400,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -14456,14 +14416,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_51_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -14472,7 +14432,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -14503,7 +14463,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -14519,14 +14479,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_52_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -14535,7 +14495,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -14566,7 +14526,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -14582,14 +14542,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_52_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -14598,7 +14558,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -14629,7 +14589,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -14645,14 +14605,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_53_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -14661,7 +14621,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -14692,7 +14652,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -14708,14 +14668,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_53_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -14724,7 +14684,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -14755,7 +14715,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -14771,14 +14731,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_54_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -14787,7 +14747,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -14818,7 +14778,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -14834,14 +14794,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_54_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -14850,7 +14810,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -14881,7 +14841,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -14897,14 +14857,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_55_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -14913,7 +14873,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -14944,7 +14904,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -14960,14 +14920,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_55_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -14976,7 +14936,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -15007,7 +14967,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -15023,14 +14983,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_56_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -15039,7 +14999,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -15070,7 +15030,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -15086,14 +15046,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_56_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -15102,7 +15062,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -15133,7 +15093,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -15149,14 +15109,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_57_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -15165,7 +15125,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -15196,7 +15156,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -15212,14 +15172,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_57_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -15228,7 +15188,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -15259,7 +15219,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -15275,14 +15235,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_58_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -15291,7 +15251,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -15322,7 +15282,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -15338,14 +15298,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_58_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -15354,7 +15314,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -15385,7 +15345,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -15401,14 +15361,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_59_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -15417,7 +15377,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -15448,7 +15408,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -15464,14 +15424,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_59_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -15480,7 +15440,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -15511,7 +15471,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -15527,14 +15487,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_60_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -15543,7 +15503,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -15574,7 +15534,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -15590,14 +15550,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_60_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -15606,7 +15566,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -15637,7 +15597,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -15653,14 +15613,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_61_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -15669,7 +15629,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -15700,7 +15660,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -15716,14 +15676,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_61_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -15732,7 +15692,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -15763,7 +15723,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -15779,14 +15739,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_62_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -15795,7 +15755,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -15826,7 +15786,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -15842,14 +15802,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ugt_62_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -15858,7 +15818,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpnleuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpgtq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0) @@ -15889,7 +15849,7 @@ ; AVX512F-NEXT: vpaddb %ymm5, %ymm0, %ymm0 ; AVX512F-NEXT: vpsadbw %ymm3, %ymm0, %ymm0 ; AVX512F-NEXT: vinserti64x4 $1, %ymm1, %zmm0, %zmm0 -; AVX512F-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512F-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512F-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512F-NEXT: retq ; @@ -15905,14 +15865,14 @@ ; AVX512BW-NEXT: vpaddb %zmm2, %zmm0, %zmm0 ; AVX512BW-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; AVX512BW-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; AVX512BW-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512BW-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512BW-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512BW-NEXT: retq ; ; AVX512VPOPCNTDQ-LABEL: ult_63_v8i64: ; AVX512VPOPCNTDQ: # %bb.0: ; AVX512VPOPCNTDQ-NEXT: vpopcntq %zmm0, %zmm0 -; AVX512VPOPCNTDQ-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; AVX512VPOPCNTDQ-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; AVX512VPOPCNTDQ-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; AVX512VPOPCNTDQ-NEXT: retq ; @@ -15921,7 +15881,7 @@ ; BITALG-NEXT: vpopcntb %zmm0, %zmm0 ; BITALG-NEXT: vpxor %xmm1, %xmm1, %xmm1 ; BITALG-NEXT: vpsadbw %zmm1, %zmm0, %zmm0 -; BITALG-NEXT: vpcmpltuq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 +; BITALG-NEXT: vpcmpltq {{\.?LCPI[0-9]+_[0-9]+}}(%rip){1to8}, %zmm0, %k1 ; BITALG-NEXT: vpternlogq $255, %zmm0, %zmm0, %zmm0 {%k1} {z} ; BITALG-NEXT: retq %2 = tail call <8 x i64> @llvm.ctpop.v8i64(<8 x i64> %0)