Index: llvm/lib/Target/X86/X86ISelLowering.cpp =================================================================== --- llvm/lib/Target/X86/X86ISelLowering.cpp +++ llvm/lib/Target/X86/X86ISelLowering.cpp @@ -45002,9 +45002,11 @@ unsigned NumElts = VecVT.getVectorNumElements(); unsigned NumEltBits = VecVT.getScalarSizeInBits(); - bool IsAnyOf = CmpOpcode == X86ISD::CMP && CmpVal.isZero(); + bool IsAnyOf = CmpOpcode == X86ISD::CMP && CmpVal.isZero() && + CmpOp.getNode()->hasOneUse(); bool IsAllOf = (CmpOpcode == X86ISD::SUB || CmpOpcode == X86ISD::CMP) && - NumElts <= CmpBits && CmpVal.isMask(NumElts); + NumElts <= CmpBits && CmpVal.isMask(NumElts) && + CmpOp.getNode()->hasOneUse(); if (!IsAnyOf && !IsAllOf) return SDValue(); Index: llvm/test/CodeGen/X86/vector-compare-all_of.ll =================================================================== --- llvm/test/CodeGen/X86/vector-compare-all_of.ll +++ llvm/test/CodeGen/X86/vector-compare-all_of.ll @@ -881,7 +881,7 @@ ret i8 %11 } -; FIXME: Should not "MOVMSK(PCMPEQ(..)) -> PTESTZ(..)" when cmp result has muti-uses. +; Should not "MOVMSK(PCMPEQ(..)) -> PTESTZ(..)" when cmp result has muti-uses. define i32 @test_v32i8_muti_uses(<32 x i8> %x, <32 x i8>%y, i32 %z) { ; SSE-LABEL: test_v32i8_muti_uses: ; SSE: # %bb.0: @@ -914,10 +914,9 @@ ; ; AVX2-LABEL: test_v32i8_muti_uses: ; AVX2: # %bb.0: -; AVX2-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm2 -; AVX2-NEXT: vpmovmskb %ymm2, %ecx -; AVX2-NEXT: vpsubb %ymm1, %ymm0, %ymm0 -; AVX2-NEXT: vptest %ymm0, %ymm0 +; AVX2-NEXT: vpcmpeqb %ymm1, %ymm0, %ymm0 +; AVX2-NEXT: vpmovmskb %ymm0, %ecx +; AVX2-NEXT: cmpl $-1, %ecx ; AVX2-NEXT: movl $16, %eax ; AVX2-NEXT: cmovnel %ecx, %eax ; AVX2-NEXT: vzeroupper Index: llvm/test/CodeGen/X86/vector-compare-any_of.ll =================================================================== --- llvm/test/CodeGen/X86/vector-compare-any_of.ll +++ llvm/test/CodeGen/X86/vector-compare-any_of.ll @@ -1358,11 +1358,9 @@ ; AVX2: # %bb.0: ; AVX2-NEXT: vpcmpeqb %xmm1, %xmm0, %xmm0 ; AVX2-NEXT: vpcmpeqb %xmm1, %xmm2, %xmm1 -; AVX2-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm2 -; AVX2-NEXT: vpmovmskb %ymm2, %eax -; AVX2-NEXT: vpor %xmm1, %xmm0, %xmm0 -; AVX2-NEXT: vpmovmskb %xmm0, %ecx -; AVX2-NEXT: testl %ecx, %ecx +; AVX2-NEXT: vinserti128 $1, %xmm1, %ymm0, %ymm0 +; AVX2-NEXT: vpmovmskb %ymm0, %eax +; AVX2-NEXT: testl %eax, %eax ; AVX2-NEXT: sete %dl ; AVX2-NEXT: vzeroupper ; AVX2-NEXT: retq