diff --git a/llvm/lib/Analysis/DevelopmentModeInlineAdvisor.cpp b/llvm/lib/Analysis/DevelopmentModeInlineAdvisor.cpp --- a/llvm/lib/Analysis/DevelopmentModeInlineAdvisor.cpp +++ b/llvm/lib/Analysis/DevelopmentModeInlineAdvisor.cpp @@ -71,14 +71,14 @@ /// lines up with how TF SequenceExample represents it. class TrainingLogger final { public: - TrainingLogger(); + TrainingLogger(StringRef LogFileName); /// Log one inlining event. void logInlineEvent(const InlineEvent &Event, const MLModelRunner &ModelRunner); /// Print the stored tensors. - void print(raw_fd_ostream &OutFile); + void print(); private: /// Write the values of one tensor as a list. @@ -156,6 +156,7 @@ OutFile << " }\n"; } + StringRef LogFileName; std::vector Features; std::vector DefaultDecisions; std::vector Decisions; @@ -193,7 +194,8 @@ DevelopmentModeMLInlineAdvisor( Module &M, ModuleAnalysisManager &MAM, std::unique_ptr ModelRunner, - std::function GetDefaultAdvice, bool IsDoingInference); + std::function GetDefaultAdvice, bool IsDoingInference, + std::unique_ptr Logger); size_t getTotalSizeEstimate(); @@ -211,10 +213,10 @@ size_t getNativeSizeEstimate(const Function &F) const; private: - bool isLogging() const { return !TrainingLog.empty(); } + bool isLogging() const { return !!Logger; } std::function GetDefaultAdvice; - TrainingLogger Logger; + std::unique_ptr Logger; const bool IsDoingInference; const int32_t InitialNativeSize; @@ -346,7 +348,8 @@ }; } // namespace -TrainingLogger::TrainingLogger() { +TrainingLogger::TrainingLogger(StringRef LogFileName) + : LogFileName(LogFileName) { for (size_t I = 0; I < NumberOfFeatures; ++I) { Features.push_back(InlineFeatures()); } @@ -364,7 +367,9 @@ DefaultDecisions.push_back(Event.DefaultDecision); } -void TrainingLogger::print(raw_fd_ostream &OutFile) { +void TrainingLogger::print() { + std::error_code ErrorCode; + raw_fd_ostream OutFile(LogFileName, ErrorCode); size_t NumberOfRecords = Decisions.size(); if (NumberOfRecords == 0) return; @@ -392,9 +397,11 @@ DevelopmentModeMLInlineAdvisor::DevelopmentModeMLInlineAdvisor( Module &M, ModuleAnalysisManager &MAM, std::unique_ptr ModelRunner, - std::function GetDefaultAdvice, bool IsDoingInference) + std::function GetDefaultAdvice, bool IsDoingInference, + std::unique_ptr Logger) : MLInlineAdvisor(M, MAM, std::move(ModelRunner)), - GetDefaultAdvice(GetDefaultAdvice), IsDoingInference(IsDoingInference), + GetDefaultAdvice(GetDefaultAdvice), Logger(std::move(Logger)), + IsDoingInference(IsDoingInference), InitialNativeSize(isLogging() ? getTotalSizeEstimate() : 0), CurrentNativeSize(InitialNativeSize) { // We cannot have the case of neither inference nor logging. @@ -402,11 +409,8 @@ } DevelopmentModeMLInlineAdvisor::~DevelopmentModeMLInlineAdvisor() { - if (TrainingLog.empty()) - return; - std::error_code ErrorCode; - raw_fd_ostream OutFile(TrainingLog, ErrorCode); - Logger.print(OutFile); + if (isLogging()) + Logger->print(); } size_t @@ -428,7 +432,7 @@ return MLInlineAdvisor::getMandatoryAdvice(CB, ORE); return std::make_unique( /*Advisor=*/this, - /*CB=*/CB, /*ORE=*/ORE, /*Recommendation=*/true, /*Logger=*/Logger, + /*CB=*/CB, /*ORE=*/ORE, /*Recommendation=*/true, /*Logger=*/*Logger, /*CallerSizeEstimateBefore=*/getNativeSizeEstimate(*CB.getCaller()), /*CalleeSizeEstimateBefore=*/ getNativeSizeEstimate(*CB.getCalledFunction()), @@ -446,7 +450,7 @@ return std::make_unique( /*Advisor=*/this, /*CB=*/CB, /*ORE=*/ORE, /*Recommendation=*/Recommendation, - /*Logger=*/Logger, + /*Logger=*/*Logger, /*CallerSizeEstimateBefore=*/getNativeSizeEstimate(*CB.getCaller()), /*CalleeSizeEstimateBefore=*/ getNativeSizeEstimate(*CB.getCalledFunction()), @@ -531,7 +535,12 @@ } IsDoingInference = true; } + std::unique_ptr Logger; + if (!TrainingLog.empty()) + Logger = std::make_unique(TrainingLog); + return std::make_unique( - M, MAM, std::move(Runner), GetDefaultAdvice, IsDoingInference); + M, MAM, std::move(Runner), GetDefaultAdvice, IsDoingInference, + std::move(Logger)); } #endif // defined(LLVM_HAVE_TF_API)