Index: llvm/lib/Transforms/InstCombine/InstCombineMulDivRem.cpp =================================================================== --- llvm/lib/Transforms/InstCombine/InstCombineMulDivRem.cpp +++ llvm/lib/Transforms/InstCombine/InstCombineMulDivRem.cpp @@ -564,6 +564,16 @@ return replaceInstUsesWith(I, NewPow); } + // powi(x, y) * powi(x, z) -> powi(x, y + z) + if (match(Op0, m_Intrinsic(m_Value(X), m_Value(Y))) && + match(Op1, m_Intrinsic(m_Specific(X), m_Value(Z)))) { + auto *YZ = Builder.CreateAdd(Y, Z); + Function *F = Intrinsic::getDeclaration(I.getModule(), Intrinsic::powi, + {X->getType(), YZ->getType()}); + auto *NewPow = Builder.CreateCall(F, {X, YZ}); + return replaceInstUsesWith(I, NewPow); + } + // exp(X) * exp(Y) -> exp(X + Y) if (match(Op0, m_Intrinsic(m_Value(X))) && match(Op1, m_Intrinsic(m_Value(Y)))) { Index: llvm/test/Transforms/InstCombine/powi.ll =================================================================== --- llvm/test/Transforms/InstCombine/powi.ll +++ llvm/test/Transforms/InstCombine/powi.ll @@ -4,6 +4,7 @@ declare double @llvm.powi.f64.i32(double, i32) declare double @llvm.fabs.f64(double) declare double @llvm.copysign.f64(double, double) +declare void @use(double) define double @powi_fneg_even_int(double %x) { ; CHECK-LABEL: @powi_fneg_even_int( @@ -79,3 +80,123 @@ %r = tail call double @llvm.powi.f64.i32(double %cs, i32 5) ret double %r } + +define double @powi_fmul_arg0_no_reassoc(double %x, i32 %i) { +; CHECK-LABEL: @powi_fmul_arg0_no_reassoc( +; CHECK-NEXT: entry: +; CHECK-NEXT: [[POW:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[I:%.*]]) +; CHECK-NEXT: [[MUL:%.*]] = fmul double [[POW]], [[X]] +; CHECK-NEXT: ret double [[MUL]] +; +entry: + %pow = tail call double @llvm.powi.f64.i32(double %x, i32 %i) + %mul = fmul double %pow, %x + ret double %mul +} + + +define double @powi_fmul_arg0(double %x, i32 %i) { +; CHECK-LABEL: @powi_fmul_arg0( +; CHECK-NEXT: entry: +; CHECK-NEXT: [[POW:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[I:%.*]]) +; CHECK-NEXT: [[MUL:%.*]] = fmul reassoc double [[POW]], [[X]] +; CHECK-NEXT: ret double [[MUL]] +; +entry: + %pow = tail call double @llvm.powi.f64.i32(double %x, i32 %i) + %mul = fmul reassoc double %pow, %x + ret double %mul +} + +define double @powi_fmul_arg0_use(double %x, i32 %i) { +; CHECK-LABEL: @powi_fmul_arg0_use( +; CHECK-NEXT: entry: +; CHECK-NEXT: [[POW:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[I:%.*]]) +; CHECK-NEXT: tail call void @use(double [[POW]]) +; CHECK-NEXT: [[MUL:%.*]] = fmul reassoc double [[POW]], [[X]] +; CHECK-NEXT: ret double [[MUL]] +; +entry: + %pow = tail call double @llvm.powi.f64.i32(double %x, i32 %i) + tail call void @use(double %pow) + %mul = fmul reassoc double %pow, %x + ret double %mul +} + +define double @powi_fmul_powi_no_reassoc(double %x, i32 %y, i32 %z) { +; CHECK-LABEL: @powi_fmul_powi_no_reassoc( +; CHECK-NEXT: entry: +; CHECK-NEXT: [[P1:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[Y:%.*]]) +; CHECK-NEXT: [[P2:%.*]] = tail call double @llvm.powi.f64.i32(double [[X]], i32 [[Z:%.*]]) +; CHECK-NEXT: [[MUL:%.*]] = fmul double [[P2]], [[P1]] +; CHECK-NEXT: ret double [[MUL]] +; +entry: + %p1 = tail call double @llvm.powi.f64.i32(double %x, i32 %y) + %p2 = tail call double @llvm.powi.f64.i32(double %x, i32 %z) + %mul = fmul double %p2, %p1 + ret double %mul +} + + +define double @powi_fmul_powi(double %x, i32 %y, i32 %z) { +; CHECK-LABEL: @powi_fmul_powi( +; CHECK-NEXT: entry: +; CHECK-NEXT: [[TMP0:%.*]] = add i32 [[Z:%.*]], [[Y:%.*]] +; CHECK-NEXT: [[TMP1:%.*]] = call double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[TMP0]]) +; CHECK-NEXT: ret double [[TMP1]] +; +entry: + %p1 = tail call double @llvm.powi.f64.i32(double %x, i32 %y) + %p2 = tail call double @llvm.powi.f64.i32(double %x, i32 %z) + %mul = fmul reassoc double %p2, %p1 + ret double %mul +} + +define double @powi_fmul_powi_same_power(double %x, i32 %y, i32 %z) { +; CHECK-LABEL: @powi_fmul_powi_same_power( +; CHECK-NEXT: entry: +; CHECK-NEXT: [[TMP0:%.*]] = shl i32 [[Y:%.*]], 1 +; CHECK-NEXT: [[TMP1:%.*]] = call double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[TMP0]]) +; CHECK-NEXT: ret double [[TMP1]] +; +entry: + %p1 = tail call double @llvm.powi.f64.i32(double %x, i32 %y) + %p2 = tail call double @llvm.powi.f64.i32(double %x, i32 %y) + %mul = fmul reassoc double %p2, %p1 + ret double %mul +} + +define double @powi_fmul_powi_use_first(double %x, i32 %y, i32 %z) { +; CHECK-LABEL: @powi_fmul_powi_use_first( +; CHECK-NEXT: entry: +; CHECK-NEXT: [[P1:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[Y:%.*]]) +; CHECK-NEXT: tail call void @use(double [[P1]]) +; CHECK-NEXT: [[TMP0:%.*]] = add i32 [[Z:%.*]], [[Y]] +; CHECK-NEXT: [[TMP1:%.*]] = call double @llvm.powi.f64.i32(double [[X]], i32 [[TMP0]]) +; CHECK-NEXT: ret double [[TMP1]] +; +entry: + %p1 = tail call double @llvm.powi.f64.i32(double %x, i32 %y) + tail call void @use(double %p1) + %p2 = tail call double @llvm.powi.f64.i32(double %x, i32 %z) + %mul = fmul reassoc double %p2, %p1 + ret double %mul +} + +define double @powi_fmul_powi_use_second(double %x, i32 %y, i32 %z) { +; CHECK-LABEL: @powi_fmul_powi_use_second( +; CHECK-NEXT: entry: +; CHECK-NEXT: [[P1:%.*]] = tail call double @llvm.powi.f64.i32(double [[X:%.*]], i32 [[Z:%.*]]) +; CHECK-NEXT: tail call void @use(double [[P1]]) +; CHECK-NEXT: [[TMP0:%.*]] = add i32 [[Y:%.*]], [[Z]] +; CHECK-NEXT: [[TMP1:%.*]] = call double @llvm.powi.f64.i32(double [[X]], i32 [[TMP0]]) +; CHECK-NEXT: ret double [[TMP1]] +; +entry: + %p1 = tail call double @llvm.powi.f64.i32(double %x, i32 %z) + tail call void @use(double %p1) + %p2 = tail call double @llvm.powi.f64.i32(double %x, i32 %y) + %mul = fmul reassoc double %p2, %p1 + ret double %mul +}