diff --git a/mlir/CMakeLists.txt b/mlir/CMakeLists.txt --- a/mlir/CMakeLists.txt +++ b/mlir/CMakeLists.txt @@ -95,25 +95,26 @@ # Requires: # The pybind11 library can be found (set with -DPYBIND_DIR=...) # The python executable is correct (set with -DPython3_EXECUTABLE=...) -# -# Version locking -# --------------- -# By default, python extensions are version locked to specific Python libraries. -# This linking mode is somewhat more consistent across platforms and surfaces -# undefined symbols at link time (vs runtime). It is suitable for development -# workflows but can be disabled for more flexible deployment by -# setting -DMLIR_BINDINGS_PYTHON_LOCK_VERSION=OFF #------------------------------------------------------------------------------- set(MLIR_ENABLE_BINDINGS_PYTHON 0 CACHE BOOL "Enables building of Python bindings.") -set(MLIR_BINDINGS_PYTHON_LOCK_VERSION 1 CACHE BOOL - "Links to specific python libraries, resolving all symbols.") if(MLIR_ENABLE_BINDINGS_PYTHON) include(MLIRDetectPythonEnv) + # After CMake 3.18, we are able to limit the scope of the search to just + # Development.Module. Searching for Development will fail in situations where + # the Python libraries are not available. When possible, limit to just + # Development.Module. + # See https://pybind11.readthedocs.io/en/stable/compiling.html#findpython-mode + if(CMAKE_VERSION VERSION_LESS "3.18.0") + set(_python_development_component Development) + else() + set(_python_development_component Development.Module) + endif() find_package(Python3 ${LLVM_MINIMUM_PYTHON_VERSION} - COMPONENTS Interpreter Development NumPy REQUIRED) + COMPONENTS Interpreter ${_python_development_component} NumPy REQUIRED) + unset(_python_development_component) message(STATUS "Found python include dirs: ${Python3_INCLUDE_DIRS}") message(STATUS "Found python libraries: ${Python3_LIBRARIES}") message(STATUS "Found numpy v${Python3_NumPy_VERSION}: ${Python3_NumPy_INCLUDE_DIRS}") diff --git a/mlir/cmake/modules/AddMLIRPython.cmake b/mlir/cmake/modules/AddMLIRPython.cmake --- a/mlir/cmake/modules/AddMLIRPython.cmake +++ b/mlir/cmake/modules/AddMLIRPython.cmake @@ -419,62 +419,19 @@ message(FATAL_ERROR " Missing SOURCES argument to add_mlir_python_extension(${libname}, ...") endif() - # Build-time RPath layouts require to be a directory one up from the - # binary root. - # TODO: Don't reference the LLVM_BINARY_DIR here: the invariant is that - # the output directory must be at the same level of the lib directory - # where libMLIR.so is installed. This is presently not optimal from a - # project separation perspective and a discussion on how to better - # segment MLIR libraries needs to happen. - # TODO: Remove this when downstreams are moved off of it. - if(NOT ARG_OUTPUT_DIRECTORY) - set(ARG_OUTPUT_DIRECTORY ${LLVM_BINARY_DIR}/python) - endif() - - # Normally on unix-like platforms, extensions are built as "MODULE" libraries - # and do not explicitly link to the python shared object. This allows for - # some greater deployment flexibility since the extension will bind to - # symbols in the python interpreter on load. However, it also keeps the - # linker from erroring on undefined symbols, leaving this to (usually obtuse) - # runtime errors. Building in "SHARED" mode with an explicit link to the - # python libraries allows us to build with the expectation of no undefined - # symbols, which is better for development. Note that not all python - # configurations provide build-time libraries to link against, in which - # case, we fall back to MODULE linking. - if(Python3_LIBRARIES STREQUAL "" OR NOT MLIR_BINDINGS_PYTHON_LOCK_VERSION) - set(PYEXT_LINK_MODE MODULE) - set(PYEXT_LIBADD) - else() - set(PYEXT_LINK_MODE SHARED) - set(PYEXT_LIBADD ${Python3_LIBRARIES}) - endif() - # The actual extension library produces a shared-object or DLL and has # sources that must be compiled in accordance with pybind11 needs (RTTI and # exceptions). - add_library(${libname} - ${PYEXT_LINK_MODE} + pybind11_add_module(${libname} ${ARG_SOURCES} ) - target_include_directories(${libname} PRIVATE - "${Python3_INCLUDE_DIRS}" - "${pybind11_INCLUDE_DIR}" - ) - - target_link_directories(${libname} PRIVATE - "${Python3_LIBRARY_DIRS}" - ) - # The extension itself must be compiled with RTTI and exceptions enabled. # Also, some warning classes triggered by pybind11 are disabled. target_compile_options(${libname} PRIVATE $<$,$,$>: # Enable RTTI and exceptions. -frtti -fexceptions - # Noisy pybind warnings - -Wno-unused-value - -Wno-covered-switch-default > $<$: # Enable RTTI and exceptions. @@ -486,8 +443,6 @@ ${libname} PROPERTIES LIBRARY_OUTPUT_DIRECTORY ${ARG_OUTPUT_DIRECTORY} OUTPUT_NAME "${extname}" - PREFIX "${PYTHON_MODULE_PREFIX}" - SUFFIX "${PYTHON_MODULE_SUFFIX}${PYTHON_MODULE_EXTENSION}" NO_SONAME ON ) @@ -501,13 +456,6 @@ ) endif() - # pybind11 requires binding code to be compiled with -fvisibility=hidden - # For static linkage, better code can be generated if the entire project - # compiles that way, but that is not enforced here. Instead, include a linker - # script that explicitly hides anything but the PyInit_* symbols, allowing gc - # to take place. - set_target_properties(${libname} PROPERTIES CXX_VISIBILITY_PRESET "hidden") - # Python extensions depends *only* on the public API and LLVMSupport unless # if further dependencies are added explicitly. target_link_libraries(${libname} diff --git a/mlir/docs/Bindings/Python.md b/mlir/docs/Bindings/Python.md --- a/mlir/docs/Bindings/Python.md +++ b/mlir/docs/Bindings/Python.md @@ -25,14 +25,6 @@ multiple Python implementations, setting this explicitly to the preferred `python3` executable is strongly recommended. -* **`MLIR_BINDINGS_PYTHON_LOCK_VERSION`**`:BOOL` - - Links the native extension against the Python runtime library, which is - optional on some platforms. While setting this to `OFF` can yield some greater - deployment flexibility, linking in this way allows the linker to report - compile time errors for unresolved symbols on all platforms, which makes for a - smoother development workflow. Defaults to `ON`. - ### Recommended development practices It is recommended to use a python virtual environment. Many ways exist for this,