[CoreML][OSS] Integrate with CMake (#64523)

Summary:
Pull Request resolved: https://github.com/pytorch/pytorch/pull/64523

- Build Pytorch with CoreML delegate - ` USE_PYTORCH_METAL=ON python setup.py install --cmake`
- Build iOS static libs - `IOS_PLATFORM=SIMULATOR USE_COREML_DELEGATE=1  ./scripts/build_ios.sh`
ghstack-source-id: 138324216

Test Plan:
- Test the Helloword example

{F657778559}

Reviewed By: iseeyuan

Differential Revision: D30594041

fbshipit-source-id: 8cece0b2d4b3ef82d3ef4da8c1054919148beb16
This commit is contained in:
Tao Xu 2021-09-17 10:14:40 -07:00 committed by Facebook GitHub Bot
parent c1415a0a72
commit 18fa58c4e9
5 changed files with 36 additions and 0 deletions

View File

@ -320,6 +320,8 @@ cmake_dependent_option(
"BUILD_PYTHON" OFF)
cmake_dependent_option(USE_CCACHE "Attempt using CCache to wrap the compilation" ON "UNIX" OFF)
option(WERROR "Build with -Werror supported by the compiler" OFF)
option(USE_COREML_DELEGATE "Use the CoreML backend through delegate APIs" OFF)
if(USE_CCACHE)
find_program(CCACHE_PROGRAM ccache)
@ -703,6 +705,10 @@ if(USE_LITE_INTERPRETER_PROFILER)
string(APPEND CMAKE_CXX_FLAGS " -DEDGE_PROFILER_USE_KINETO")
endif()
if(USE_COREML_DELEGATE)
string(APPEND CMAKE_CXX_FLAGS " -DUSE_COREML_DELEGATE")
endif()
# ---[ Allowlist file if allowlist is specified
include(cmake/Allowlist.cmake)

View File

@ -519,6 +519,22 @@ if(NOT INTERN_BUILD_MOBILE OR NOT BUILD_CAFFE2_MOBILE)
endforeach()
endif()
# Mobile backend delegate srcs
if(INTERN_BUILD_MOBILE AND NOT BUILD_CAFFE2_MOBILE)
set(DELEGATE_SRCS
${TORCH_SRC_DIR}/csrc/jit/backends/backend_debug_info.cpp
${TORCH_SRC_DIR}/csrc/jit/backends/backend_interface.cpp
)
list(APPEND TORCH_SRCS ${DELEGATE_SRCS})
if(IOS AND USE_COREML_DELEGATE)
set(COREML_DELEGATE_SRCS
${TORCH_SRC_DIR}/csrc/jit/backends/coreml/objc/PTMCoreMLBackend.mm
${TORCH_SRC_DIR}/csrc/jit/backends/coreml/objc/PTMCoreMLExecutor.mm
)
list(APPEND TORCH_SRCS ${COREML_DELEGATE_SRCS})
endif()
endif()
# Required workaround for LLVM 9 includes.
if(NOT MSVC)
set_source_files_properties(${TORCH_SRC_DIR}/csrc/jit/tensorexpr/llvm_jit.cpp PROPERTIES COMPILE_FLAGS -Wno-noexcept-type)

View File

@ -182,4 +182,6 @@ function(caffe2_print_configuration_summary)
message(STATUS " USE_BREAKPAD : ${USE_BREAKPAD}")
message(STATUS " Public Dependencies : ${Caffe2_PUBLIC_DEPENDENCY_LIBS}")
message(STATUS " Private Dependencies : ${Caffe2_DEPENDENCY_LIBS}")
# coreml
message(STATUS " USE_COREML_DELEGATE : ${USE_COREML_DELEGATE}")
endfunction()

View File

@ -106,6 +106,11 @@ if [ "${USE_PYTORCH_METAL:-}" == "1" ]; then
CMAKE_ARGS+=("-DUSE_PYTORCH_METAL=ON")
fi
# Core ML
if [ "${USE_COREML_DELEGATE}" == "1" ]; then
CMAKE_ARGS+=("-DUSE_COREML_DELEGATE=ON")
fi
# pthreads
CMAKE_ARGS+=("-DCMAKE_THREAD_LIBS_INIT=-lpthread")
CMAKE_ARGS+=("-DCMAKE_HAVE_THREADS_LIBRARY=1")

View File

@ -335,6 +335,13 @@ if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
set_source_files_properties(${TORCH_SRC_DIR}/csrc/utils/throughput_benchmark.cpp PROPERTIES COMPILE_FLAGS -Wno-attributes)
endif()
# coreml
if(USE_COREML_DELEGATE)
list(APPEND TORCH_PYTHON_SRCS ${TORCH_SRC_DIR}/csrc/jit/backends/coreml/cpp/backend.cpp)
list(APPEND TORCH_PYTHON_SRCS ${TORCH_SRC_DIR}/csrc/jit/backends/coreml/cpp/preprocess.cpp)
endif()
add_library(torch_python SHARED ${TORCH_PYTHON_SRCS})
if(HAVE_SOVERSION)
set_target_properties(torch_python PROPERTIES