diff --git a/llvm/CMakeLists.txt b/llvm/CMakeLists.txt --- a/llvm/CMakeLists.txt +++ b/llvm/CMakeLists.txt @@ -832,6 +832,21 @@ ${LLVM_INCLUDE_DIR}/llvm/Config/Targets.def ) +# For up-to-date instructions for installing the Tensorflow dependency, refer to +# the bot setup script: https://github.com/google/ml-compiler-opt/blob/master/buildbot/buildbot_init.sh +# In this case, the latest C API library is available for download from +# https://www.tensorflow.org/install/lang_c. +# We will expose the conditional compilation variable, +# LLVM_HAVE_TF_API, through llvm-config.h, so that a user of the LLVM library may +# also leverage the dependency. +set(TENSORFLOW_C_LIB_PATH "" CACHE PATH "Path to TensorFlow C library install") +find_library(tensorflow_c_api tensorflow PATHS ${TENSORFLOW_C_LIB_PATH}/lib) + +if (tensorflow_c_api) + set(LLVM_HAVE_TF_API "ON" CACHE BOOL "Full Tensorflow API available") + include_directories(${TENSORFLOW_C_LIB_PATH}/include) +endif() + # Configure the three LLVM configuration header files. configure_file( ${LLVM_MAIN_INCLUDE_DIR}/llvm/Config/config.h.cmake @@ -972,27 +987,18 @@ if (NOT TENSORFLOW_AOT_PATH STREQUAL "") set(LLVM_HAVE_TF_AOT "ON" CACHE BOOL "Tensorflow AOT available") - set(TENSORFLOW_AOT_COMPILER - "${TENSORFLOW_AOT_PATH}/../../../../bin/saved_model_cli" - CACHE PATH "Path to the Tensorflow AOT compiler") + set(TENSORFLOW_AOT_COMPILER + "${TENSORFLOW_AOT_PATH}/../../../../bin/saved_model_cli" + CACHE PATH "Path to the Tensorflow AOT compiler") + # Unlike the LLVM_HAVE_TF_API case, we don't need to expose this through + # llvm-config.h, because it's an internal implementation detail. A user of the llvm library that wants to also + # use the TF AOT compiler may do so through their custom build step. add_definitions("-DLLVM_HAVE_TF_AOT") include_directories(${TENSORFLOW_AOT_PATH}/include) add_subdirectory(${TENSORFLOW_AOT_PATH}/xla_aot_runtime_src ${CMAKE_ARCHIVE_OUTPUT_DIRECTORY}/tf_runtime) endif() -set(TENSORFLOW_C_LIB_PATH "" CACHE PATH "Path to TensorFlow C library install") -find_library(tensorflow_c_api tensorflow PATHS ${TENSORFLOW_C_LIB_PATH}/lib) - -# Similar to the above Tensorflow dependency, please refer to the same script. -# In this case, the latest C API library is available for download from -# https://www.tensorflow.org/install/lang_c -if (tensorflow_c_api) - set(LLVM_HAVE_TF_API "ON" CACHE BOOL "Full Tensorflow API available") - add_definitions("-DLLVM_HAVE_TF_API") - include_directories(${TENSORFLOW_C_LIB_PATH}/include) -endif() - # Put this before tblgen. Else we have a circular dependence. add_subdirectory(lib/Demangle) add_subdirectory(lib/Support) diff --git a/llvm/include/llvm/Analysis/Utils/TFUtils.h b/llvm/include/llvm/Analysis/Utils/TFUtils.h --- a/llvm/include/llvm/Analysis/Utils/TFUtils.h +++ b/llvm/include/llvm/Analysis/Utils/TFUtils.h @@ -9,6 +9,8 @@ #ifndef LLVM_ANALYSIS_UTILS_TFUTILS_H #define LLVM_ANALYSIS_UTILS_TFUTILS_H +#include "llvm/Config/config.h" + #ifdef LLVM_HAVE_TF_API #include "tensorflow/c/c_api.h" #include "llvm/IR/LLVMContext.h" diff --git a/llvm/include/llvm/Config/llvm-config.h.cmake b/llvm/include/llvm/Config/llvm-config.h.cmake --- a/llvm/include/llvm/Config/llvm-config.h.cmake +++ b/llvm/include/llvm/Config/llvm-config.h.cmake @@ -79,4 +79,7 @@ */ #cmakedefine01 LLVM_FORCE_ENABLE_STATS +/* Define if LLVM was built with a dependency to the libtensorflow dynamic library */ +#cmakedefine LLVM_HAVE_TF_API + #endif diff --git a/llvm/unittests/Analysis/InlineSizeEstimatorAnalysisTest.cpp b/llvm/unittests/Analysis/InlineSizeEstimatorAnalysisTest.cpp --- a/llvm/unittests/Analysis/InlineSizeEstimatorAnalysisTest.cpp +++ b/llvm/unittests/Analysis/InlineSizeEstimatorAnalysisTest.cpp @@ -26,7 +26,7 @@ extern const char *TestMainArgv0; extern cl::opt TFIR2NativeModelPath; -#if LLVM_HAVE_TF_API +#ifdef LLVM_HAVE_TF_API static std::string getModelPath() { SmallString<128> InputsDir = unittest::getInputFileDirectory(TestMainArgv0); llvm::sys::path::append(InputsDir, "ir2native_x86_64_model"); @@ -87,13 +87,13 @@ )IR"); FunctionAnalysisManager FAM = buildFAM(); -#if LLVM_HAVE_TF_API +#ifdef LLVM_HAVE_TF_API TFIR2NativeModelPath = getModelPath(); #endif InlineSizeEstimatorAnalysis FA; auto SizeEstimate = FA.run(*M->getFunction("branches"), FAM); -#if LLVM_HAVE_TF_API +#ifdef LLVM_HAVE_TF_API EXPECT_GT(*SizeEstimate, 0); #else EXPECT_FALSE(SizeEstimate.hasValue());