Index: llvm/lib/Transforms/InstCombine/InstCombineCompares.cpp =================================================================== --- llvm/lib/Transforms/InstCombine/InstCombineCompares.cpp +++ llvm/lib/Transforms/InstCombine/InstCombineCompares.cpp @@ -3228,12 +3228,22 @@ case Intrinsic::fshl: case Intrinsic::fshr: - // (rot X, ?) == 0/-1 --> X == 0/-1 - // TODO: This transform is safe to re-use undef elts in a vector, but - // the constant value passed in by the caller doesn't allow that. - if (C.isNullValue() || C.isAllOnesValue()) - if (II->getArgOperand(0) == II->getArgOperand(1)) + if (II->getArgOperand(0) == II->getArgOperand(1)) { + // (rot X, ?) == 0/-1 --> X == 0/-1 + // TODO: This transform is safe to re-use undef elts in a vector, but + // the constant value passed in by the caller doesn't allow that. + if (C.isNullValue() || C.isAllOnesValue()) return new ICmpInst(Pred, II->getArgOperand(0), Cmp.getOperand(1)); + + const APInt *RotAmt; + // ror(X, RotAmt) == C --> X == rol(C, RotAmt) + // rol(X, RotAmt) == C --> X == ror(C, RotAmt) + if (match(II->getArgOperand(2), m_APInt(RotAmt))) + return new ICmpInst(Pred, II->getArgOperand(0), + II->getIntrinsicID() == Intrinsic::fshl + ? ConstantInt::get(Ty, C.rotr(*RotAmt)) + : ConstantInt::get(Ty, C.rotl(*RotAmt))); + } break; case Intrinsic::uadd_sat: { Index: llvm/test/Transforms/InstCombine/icmp-rotate.ll =================================================================== --- llvm/test/Transforms/InstCombine/icmp-rotate.ll +++ llvm/test/Transforms/InstCombine/icmp-rotate.ll @@ -5,6 +5,7 @@ declare i8 @llvm.fshr.i8(i8, i8, i8) declare <2 x i5> @llvm.fshl.v2i5(<2 x i5>, <2 x i5>, <2 x i5>) declare <2 x i5> @llvm.fshr.v2i5(<2 x i5>, <2 x i5>, <2 x i5>) +declare <32 x i32> @llvm.fshl.v32i32(<32 x i32>, <32 x i32>, <32 x i32>) declare void @use(i8) define i1 @rol_eq(i8 %x, i8 %y, i8 %z) { @@ -105,6 +106,59 @@ ret <2 x i1> %r } + +define i1 @rol_eq_cst(i8 %x) { +; CHECK-LABEL: @rol_eq_cst( +; CHECK-NEXT: [[R:%.*]] = icmp eq i8 [[X:%.*]], 32 +; CHECK-NEXT: ret i1 [[R]] +; + %f = tail call i8 @llvm.fshl.i8(i8 %x, i8 %x, i8 28) + %r = icmp eq i8 %f, 2 + ret i1 %r +} + +define i1 @ror_eq_cst(i8 %x) { +; CHECK-LABEL: @ror_eq_cst( +; CHECK-NEXT: [[R:%.*]] = icmp eq i8 [[X:%.*]], 16 +; CHECK-NEXT: ret i1 [[R]] +; + %f = tail call i8 @llvm.fshr.i8(i8 %x, i8 %x, i8 27) + %r = icmp eq i8 %f, 2 + ret i1 %r +} + +define i1 @ror_ne_cst(i8 %x) { +; CHECK-LABEL: @ror_ne_cst( +; CHECK-NEXT: [[R:%.*]] = icmp ne i8 [[X:%.*]], 24 +; CHECK-NEXT: ret i1 [[R]] +; + %f = tail call i8 @llvm.fshr.i8(i8 %x, i8 %x, i8 27) + %r = icmp ne i8 %f, 3 + ret i1 %r +} + +define <2 x i1> @rol_eq_cst_vec(<2 x i5> %x) { +; CHECK-LABEL: @rol_eq_cst_vec( +; CHECK-NEXT: [[R:%.*]] = icmp eq <2 x i5> [[X:%.*]], +; CHECK-NEXT: ret <2 x i1> [[R]] +; + %f = tail call <2 x i5> @llvm.fshr.v2i5(<2 x i5> %x, <2 x i5> %x, <2 x i5> ) + %r = icmp eq <2 x i5> %f, + ret <2 x i1> %r +} + +define <2 x i1> @rol_eq_cst_undef(<2 x i5> %x) { +; CHECK-LABEL: @rol_eq_cst_undef( +; CHECK-NEXT: [[F:%.*]] = call <2 x i5> @llvm.fshl.v2i5(<2 x i5> [[X:%.*]], <2 x i5> [[X]], <2 x i5> ) +; CHECK-NEXT: [[R:%.*]] = icmp eq <2 x i5> [[F]], +; CHECK-NEXT: ret <2 x i1> [[R]] +; + %f = tail call <2 x i5> @llvm.fshr.v2i5(<2 x i5> %x, <2 x i5> %x, <2 x i5> ) + %r = icmp eq <2 x i5> %f, + ret <2 x i1> %r +} + + ; negative test - not a rotate define i1 @no_rotate(i8 %x, i8 %y, i8 %z) { ; CHECK-LABEL: @no_rotate(