Index: llvm/lib/Transforms/InstCombine/InstCombineMulDivRem.cpp =================================================================== --- llvm/lib/Transforms/InstCombine/InstCombineMulDivRem.cpp +++ llvm/lib/Transforms/InstCombine/InstCombineMulDivRem.cpp @@ -555,22 +555,30 @@ } } - // exp(X) * exp(Y) -> exp(X + Y) - if (match(Op0, m_Intrinsic(m_Value(X))) && - match(Op1, m_Intrinsic(m_Value(Y))) && - I.isOnlyUserOfAnyOperand()) { - Value *XY = Builder.CreateFAddFMF(X, Y, &I); - Value *Exp = Builder.CreateUnaryIntrinsic(Intrinsic::exp, XY, &I); - return replaceInstUsesWith(I, Exp); - } + if (I.isOnlyUserOfAnyOperand()) { + // pow(x, y) * pow(x, z) -> pow(x, y + z) + if (match(Op0, m_Intrinsic(m_Value(X), m_Value(Y))) && + match(Op1, m_Intrinsic(m_Specific(X), m_Value(Z)))) { + auto *YZ = Builder.CreateFAddFMF(Y, Z, &I); + auto *NewPow = Builder.CreateBinaryIntrinsic(Intrinsic::pow, X, YZ, &I); + return replaceInstUsesWith(I, NewPow); + } - // exp2(X) * exp2(Y) -> exp2(X + Y) - if (match(Op0, m_Intrinsic(m_Value(X))) && - match(Op1, m_Intrinsic(m_Value(Y))) && - I.isOnlyUserOfAnyOperand()) { - Value *XY = Builder.CreateFAddFMF(X, Y, &I); - Value *Exp2 = Builder.CreateUnaryIntrinsic(Intrinsic::exp2, XY, &I); - return replaceInstUsesWith(I, Exp2); + // exp(X) * exp(Y) -> exp(X + Y) + if (match(Op0, m_Intrinsic(m_Value(X))) && + match(Op1, m_Intrinsic(m_Value(Y)))) { + Value *XY = Builder.CreateFAddFMF(X, Y, &I); + Value *Exp = Builder.CreateUnaryIntrinsic(Intrinsic::exp, XY, &I); + return replaceInstUsesWith(I, Exp); + } + + // exp2(X) * exp2(Y) -> exp2(X + Y) + if (match(Op0, m_Intrinsic(m_Value(X))) && + match(Op1, m_Intrinsic(m_Value(Y)))) { + Value *XY = Builder.CreateFAddFMF(X, Y, &I); + Value *Exp2 = Builder.CreateUnaryIntrinsic(Intrinsic::exp2, XY, &I); + return replaceInstUsesWith(I, Exp2); + } } // (X*Y) * X => (X*X) * Y where Y != X Index: llvm/test/Transforms/InstCombine/fmul-pow.ll =================================================================== --- llvm/test/Transforms/InstCombine/fmul-pow.ll +++ llvm/test/Transforms/InstCombine/fmul-pow.ll @@ -79,10 +79,9 @@ define double @pow_ab_x_pow_ac_reassoc(double %a, double %b, double %c) { ; CHECK-LABEL: @pow_ab_x_pow_ac_reassoc( -; CHECK-NEXT: [[TMP1:%.*]] = call double @llvm.pow.f64(double [[A:%.*]], double [[B:%.*]]) -; CHECK-NEXT: [[TMP2:%.*]] = call double @llvm.pow.f64(double [[A]], double [[C:%.*]]) -; CHECK-NEXT: [[MUL:%.*]] = fmul reassoc double [[TMP2]], [[TMP1]] -; CHECK-NEXT: ret double [[MUL]] +; CHECK-NEXT: [[ADD:%.*]] = fadd reassoc double [[C:%.*]], [[B:%.*]] +; CHECK-NEXT: [[POW:%.*]] = call reassoc double @llvm.pow.f64(double [[A:%.*]], double [[ADD]]) +; CHECK-NEXT: ret double [[POW]] ; %1 = call double @llvm.pow.f64(double %a, double %b) %2 = call double @llvm.pow.f64(double %a, double %c) @@ -90,12 +89,11 @@ ret double %mul } - define double @pow_ab_reassoc(double %a, double %b) { ; CHECK-LABEL: @pow_ab_reassoc( -; CHECK-NEXT: [[TMP1:%.*]] = call double @llvm.pow.f64(double [[A:%.*]], double [[B:%.*]]) -; CHECK-NEXT: [[MUL:%.*]] = fmul reassoc double [[TMP1]], [[TMP1]] -; CHECK-NEXT: ret double [[MUL]] +; CHECK-NEXT: [[ADD:%.*]] = fadd reassoc double [[B:%.*]], [[B]] +; CHECK-NEXT: [[POW:%.*]] = call reassoc double @llvm.pow.f64(double [[A:%.*]], double [[ADD]]) +; CHECK-NEXT: ret double [[POW]] ; %1 = call double @llvm.pow.f64(double %a, double %b) %mul = fmul reassoc double %1, %1 @@ -104,9 +102,9 @@ define double @pow_ab_reassoc_extra_use(double %a, double %b) { ; CHECK-LABEL: @pow_ab_reassoc_extra_use( -; CHECK-NEXT: [[TMP1:%.*]] = call double @llvm.pow.f64(double [[A:%.*]], double [[B:%.*]]) -; CHECK-NEXT: [[MUL:%.*]] = fmul reassoc double [[TMP1]], [[TMP1]] -; CHECK-NEXT: call void @use(double [[TMP1]]) +; CHECK-NEXT: [[POW:%.*]] = call double @llvm.pow.f64(double [[A:%.*]], double [[B:%.*]]) +; CHECK-NEXT: [[MUL:%.*]] = fmul reassoc double [[POW]], [[POW]] +; CHECK-NEXT: call void @use(double [[POW]]) ; CHECK-NEXT: ret double [[MUL]] ; %1 = call double @llvm.pow.f64(double %a, double %b) @@ -117,11 +115,11 @@ define double @pow_ab_x_pow_ac_reassoc_extra_use(double %a, double %b, double %c) { ; CHECK-LABEL: @pow_ab_x_pow_ac_reassoc_extra_use( -; CHECK-NEXT: [[TMP1:%.*]] = call double @llvm.pow.f64(double [[A:%.*]], double [[B:%.*]]) -; CHECK-NEXT: [[TMP2:%.*]] = call double @llvm.pow.f64(double [[A]], double [[C:%.*]]) -; CHECK-NEXT: [[MUL:%.*]] = fmul reassoc double [[TMP1]], [[TMP2]] -; CHECK-NEXT: call void @use(double [[TMP1]]) -; CHECK-NEXT: ret double [[MUL]] +; CHECK-NEXT: [[POW1:%.*]] = call double @llvm.pow.f64(double [[A:%.*]], double [[B:%.*]]) +; CHECK-NEXT: [[ADD:%.*]] = fadd reassoc double [[B]], [[C:%.*]] +; CHECK-NEXT: [[POW2:%.*]] = call reassoc double @llvm.pow.f64(double [[A]], double [[ADD]]) +; CHECK-NEXT: call void @use(double [[POW1]]) +; CHECK-NEXT: ret double [[POW2]] ; %1 = call double @llvm.pow.f64(double %a, double %b) %2 = call double @llvm.pow.f64(double %a, double %c) @@ -132,11 +130,11 @@ define double @pow_ab_x_pow_ac_reassoc_multiple_uses(double %a, double %b, double %c) { ; CHECK-LABEL: @pow_ab_x_pow_ac_reassoc_multiple_uses( -; CHECK-NEXT: [[TMP1:%.*]] = call double @llvm.pow.f64(double [[A:%.*]], double [[B:%.*]]) -; CHECK-NEXT: [[TMP2:%.*]] = call double @llvm.pow.f64(double [[A]], double [[C:%.*]]) -; CHECK-NEXT: [[MUL:%.*]] = fmul reassoc double [[TMP1]], [[TMP2]] -; CHECK-NEXT: call void @use(double [[TMP1]]) -; CHECK-NEXT: call void @use(double [[TMP2]]) +; CHECK-NEXT: [[POW1:%.*]] = call double @llvm.pow.f64(double [[A:%.*]], double [[B:%.*]]) +; CHECK-NEXT: [[POW2:%.*]] = call double @llvm.pow.f64(double [[A]], double [[C:%.*]]) +; CHECK-NEXT: [[MUL:%.*]] = fmul reassoc double [[POW1]], [[POW2]] +; CHECK-NEXT: call void @use(double [[POW1]]) +; CHECK-NEXT: call void @use(double [[POW2]]) ; CHECK-NEXT: ret double [[MUL]] ; %1 = call double @llvm.pow.f64(double %a, double %b)