Index: llvm/trunk/include/llvm/Analysis/TargetTransformInfo.h =================================================================== --- llvm/trunk/include/llvm/Analysis/TargetTransformInfo.h +++ llvm/trunk/include/llvm/Analysis/TargetTransformInfo.h @@ -208,18 +208,21 @@ /// This is the most basic query for estimating call cost: it only knows the /// function type and (potentially) the number of arguments at the call site. /// The latter is only interesting for varargs function types. - int getCallCost(FunctionType *FTy, int NumArgs = -1) const; + int getCallCost(FunctionType *FTy, int NumArgs = -1, + const User *U = nullptr) const; /// Estimate the cost of calling a specific function when lowered. /// /// This overload adds the ability to reason about the particular function /// being called in the event it is a library call with special lowering. - int getCallCost(const Function *F, int NumArgs = -1) const; + int getCallCost(const Function *F, int NumArgs = -1, + const User *U = nullptr) const; /// Estimate the cost of calling a specific function when lowered. /// /// This overload allows specifying a set of candidate argument values. - int getCallCost(const Function *F, ArrayRef Arguments) const; + int getCallCost(const Function *F, ArrayRef Arguments, + const User *U = nullptr) const; /// \returns A value by which our inlining threshold should be multiplied. /// This is primarily used to bump up the inlining threshold wholesale on @@ -233,13 +236,15 @@ /// /// Mirrors the \c getCallCost method but uses an intrinsic identifier. int getIntrinsicCost(Intrinsic::ID IID, Type *RetTy, - ArrayRef ParamTys) const; + ArrayRef ParamTys, + const User *U = nullptr) const; /// Estimate the cost of an intrinsic when lowered. /// /// Mirrors the \c getCallCost method but uses an intrinsic identifier. int getIntrinsicCost(Intrinsic::ID IID, Type *RetTy, - ArrayRef Arguments) const; + ArrayRef Arguments, + const User *U = nullptr) const; /// \return The estimated number of case clusters when lowering \p 'SI'. /// \p JTSize Set a jump table size only when \p SI is suitable for a jump @@ -1038,15 +1043,16 @@ virtual int getGEPCost(Type *PointeeType, const Value *Ptr, ArrayRef Operands) = 0; virtual int getExtCost(const Instruction *I, const Value *Src) = 0; - virtual int getCallCost(FunctionType *FTy, int NumArgs) = 0; - virtual int getCallCost(const Function *F, int NumArgs) = 0; + virtual int getCallCost(FunctionType *FTy, int NumArgs, const User *U) = 0; + virtual int getCallCost(const Function *F, int NumArgs, const User *U) = 0; virtual int getCallCost(const Function *F, - ArrayRef Arguments) = 0; + ArrayRef Arguments, const User *U) = 0; virtual unsigned getInliningThresholdMultiplier() = 0; virtual int getIntrinsicCost(Intrinsic::ID IID, Type *RetTy, - ArrayRef ParamTys) = 0; + ArrayRef ParamTys, const User *U) = 0; virtual int getIntrinsicCost(Intrinsic::ID IID, Type *RetTy, - ArrayRef Arguments) = 0; + ArrayRef Arguments, + const User *U) = 0; virtual unsigned getEstimatedNumberOfCaseClusters(const SwitchInst &SI, unsigned &JTSize) = 0; virtual int @@ -1239,26 +1245,27 @@ int getExtCost(const Instruction *I, const Value *Src) override { return Impl.getExtCost(I, Src); } - int getCallCost(FunctionType *FTy, int NumArgs) override { - return Impl.getCallCost(FTy, NumArgs); + int getCallCost(FunctionType *FTy, int NumArgs, const User *U) override { + return Impl.getCallCost(FTy, NumArgs, U); } - int getCallCost(const Function *F, int NumArgs) override { - return Impl.getCallCost(F, NumArgs); + int getCallCost(const Function *F, int NumArgs, const User *U) override { + return Impl.getCallCost(F, NumArgs, U); } int getCallCost(const Function *F, - ArrayRef Arguments) override { - return Impl.getCallCost(F, Arguments); + ArrayRef Arguments, const User *U) override { + return Impl.getCallCost(F, Arguments, U); } unsigned getInliningThresholdMultiplier() override { return Impl.getInliningThresholdMultiplier(); } int getIntrinsicCost(Intrinsic::ID IID, Type *RetTy, - ArrayRef ParamTys) override { - return Impl.getIntrinsicCost(IID, RetTy, ParamTys); + ArrayRef ParamTys, const User *U = nullptr) override { + return Impl.getIntrinsicCost(IID, RetTy, ParamTys, U); } int getIntrinsicCost(Intrinsic::ID IID, Type *RetTy, - ArrayRef Arguments) override { - return Impl.getIntrinsicCost(IID, RetTy, Arguments); + ArrayRef Arguments, + const User *U = nullptr) override { + return Impl.getIntrinsicCost(IID, RetTy, Arguments, U); } int getUserCost(const User *U, ArrayRef Operands) override { return Impl.getUserCost(U, Operands); Index: llvm/trunk/include/llvm/Analysis/TargetTransformInfoImpl.h =================================================================== --- llvm/trunk/include/llvm/Analysis/TargetTransformInfoImpl.h +++ llvm/trunk/include/llvm/Analysis/TargetTransformInfoImpl.h @@ -123,7 +123,7 @@ return TTI::TCC_Basic; } - unsigned getCallCost(FunctionType *FTy, int NumArgs) { + unsigned getCallCost(FunctionType *FTy, int NumArgs, const User *U) { assert(FTy && "FunctionType must be provided to this routine."); // The target-independent implementation just measures the size of the @@ -141,7 +141,7 @@ unsigned getInliningThresholdMultiplier() { return 1; } unsigned getIntrinsicCost(Intrinsic::ID IID, Type *RetTy, - ArrayRef ParamTys) { + ArrayRef ParamTys, const User *U) { switch (IID) { default: // Intrinsics rarely (if ever) have normal argument setup constraints. @@ -680,7 +680,7 @@ public: using BaseT::getCallCost; - unsigned getCallCost(const Function *F, int NumArgs) { + unsigned getCallCost(const Function *F, int NumArgs, const User *U) { assert(F && "A concrete function must be provided to this routine."); if (NumArgs < 0) @@ -692,21 +692,22 @@ FunctionType *FTy = F->getFunctionType(); SmallVector ParamTys(FTy->param_begin(), FTy->param_end()); return static_cast(this) - ->getIntrinsicCost(IID, FTy->getReturnType(), ParamTys); + ->getIntrinsicCost(IID, FTy->getReturnType(), ParamTys, U); } if (!static_cast(this)->isLoweredToCall(F)) return TTI::TCC_Basic; // Give a basic cost if it will be lowered // directly. - return static_cast(this)->getCallCost(F->getFunctionType(), NumArgs); + return static_cast(this)->getCallCost(F->getFunctionType(), NumArgs, U); } - unsigned getCallCost(const Function *F, ArrayRef Arguments) { + unsigned getCallCost(const Function *F, ArrayRef Arguments, + const User *U) { // Simply delegate to generic handling of the call. // FIXME: We should use instsimplify or something else to catch calls which // will constant fold with these arguments. - return static_cast(this)->getCallCost(F, Arguments.size()); + return static_cast(this)->getCallCost(F, Arguments.size(), U); } using BaseT::getGEPCost; @@ -777,7 +778,7 @@ using BaseT::getIntrinsicCost; unsigned getIntrinsicCost(Intrinsic::ID IID, Type *RetTy, - ArrayRef Arguments) { + ArrayRef Arguments, const User *U) { // Delegate to the generic intrinsic handling code. This mostly provides an // opportunity for targets to (for example) special case the cost of // certain intrinsics based on constants used as arguments. @@ -785,7 +786,7 @@ ParamTys.reserve(Arguments.size()); for (unsigned Idx = 0, Size = Arguments.size(); Idx != Size; ++Idx) ParamTys.push_back(Arguments[Idx]->getType()); - return static_cast(this)->getIntrinsicCost(IID, RetTy, ParamTys); + return static_cast(this)->getIntrinsicCost(IID, RetTy, ParamTys, U); } unsigned getUserCost(const User *U, ArrayRef Operands) { @@ -809,11 +810,11 @@ // Just use the called value type. Type *FTy = CS.getCalledValue()->getType()->getPointerElementType(); return static_cast(this) - ->getCallCost(cast(FTy), CS.arg_size()); + ->getCallCost(cast(FTy), CS.arg_size(), U); } SmallVector Arguments(CS.arg_begin(), CS.arg_end()); - return static_cast(this)->getCallCost(F, Arguments); + return static_cast(this)->getCallCost(F, Arguments, U); } if (const CastInst *CI = dyn_cast(U)) { Index: llvm/trunk/include/llvm/CodeGen/BasicTTIImpl.h =================================================================== --- llvm/trunk/include/llvm/CodeGen/BasicTTIImpl.h +++ llvm/trunk/include/llvm/CodeGen/BasicTTIImpl.h @@ -292,12 +292,12 @@ } unsigned getIntrinsicCost(Intrinsic::ID IID, Type *RetTy, - ArrayRef Arguments) { - return BaseT::getIntrinsicCost(IID, RetTy, Arguments); + ArrayRef Arguments, const User *U) { + return BaseT::getIntrinsicCost(IID, RetTy, Arguments, U); } unsigned getIntrinsicCost(Intrinsic::ID IID, Type *RetTy, - ArrayRef ParamTys) { + ArrayRef ParamTys, const User *U) { if (IID == Intrinsic::cttz) { if (getTLI()->isCheapToSpeculateCttz()) return TargetTransformInfo::TCC_Basic; @@ -310,7 +310,7 @@ return TargetTransformInfo::TCC_Expensive; } - return BaseT::getIntrinsicCost(IID, RetTy, ParamTys); + return BaseT::getIntrinsicCost(IID, RetTy, ParamTys, U); } unsigned getEstimatedNumberOfCaseClusters(const SwitchInst &SI, Index: llvm/trunk/lib/Analysis/TargetTransformInfo.cpp =================================================================== --- llvm/trunk/lib/Analysis/TargetTransformInfo.cpp +++ llvm/trunk/lib/Analysis/TargetTransformInfo.cpp @@ -60,15 +60,17 @@ return Cost; } -int TargetTransformInfo::getCallCost(FunctionType *FTy, int NumArgs) const { - int Cost = TTIImpl->getCallCost(FTy, NumArgs); +int TargetTransformInfo::getCallCost(FunctionType *FTy, int NumArgs, + const User *U) const { + int Cost = TTIImpl->getCallCost(FTy, NumArgs, U); assert(Cost >= 0 && "TTI should not produce negative costs!"); return Cost; } int TargetTransformInfo::getCallCost(const Function *F, - ArrayRef Arguments) const { - int Cost = TTIImpl->getCallCost(F, Arguments); + ArrayRef Arguments, + const User *U) const { + int Cost = TTIImpl->getCallCost(F, Arguments, U); assert(Cost >= 0 && "TTI should not produce negative costs!"); return Cost; } @@ -88,8 +90,9 @@ } int TargetTransformInfo::getIntrinsicCost( - Intrinsic::ID IID, Type *RetTy, ArrayRef Arguments) const { - int Cost = TTIImpl->getIntrinsicCost(IID, RetTy, Arguments); + Intrinsic::ID IID, Type *RetTy, ArrayRef Arguments, + const User *U) const { + int Cost = TTIImpl->getIntrinsicCost(IID, RetTy, Arguments, U); assert(Cost >= 0 && "TTI should not produce negative costs!"); return Cost; }