mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-06 12:20:52 +01:00
[Split Build]Use same package (#127934)
This PR removes the second separate package we were using for the libtorch wheel. In terms of testing that this works we will look use the PRs above this in the stack. As for sanity checking these are the wheels that are produced by running ``` python setup.py clean && BUILD_LIBTORCH_WHL=1 with-proxy python setup.py bdist_whee l && BUILD_PYTHON_ONLY=1 with-proxy python setup.py bdist_wheel --cmake ``` ``` sahanp@devgpu086 ~/pytorch ((5f15e171…))> ls -al dist/ (pytorch-3.10) total 677236 drwxr-xr-x 1 sahanp users 188 Jun 4 12:19 ./ drwxr-xr-x 1 sahanp users 1696 Jun 4 12:59 ../ -rw-r--r-- 1 sahanp users 81405742 Jun 4 12:19 torch-2.4.0a0+gitca0a73c-cp310-cp310-linux_x86_64.whl -rw-r--r-- 1 sahanp users 612076919 Jun 4 12:19 libtorch-2.4.0a0+gitca0a73c-py3-none-any.whl ``` Pull Request resolved: https://github.com/pytorch/pytorch/pull/127934 Approved by: https://github.com/atalman
This commit is contained in:
parent
ffb50fb691
commit
7d33ff59ba
|
|
@ -284,13 +284,27 @@ else
|
|||
# Which should be backward compatible with Numpy-1.X
|
||||
python -mpip install --pre numpy==2.0.0rc1
|
||||
fi
|
||||
WERROR=1 python setup.py bdist_wheel
|
||||
|
||||
WERROR=1 python setup.py clean
|
||||
|
||||
if [[ "$USE_SPLIT_BUILD" == "true" ]]; then
|
||||
BUILD_LIBTORCH_WHL=1 BUILD_PYTHON_ONLY=0 python setup.py bdist_wheel
|
||||
BUILD_LIBTORCH_WHL=0 BUILD_PYTHON_ONLY=1 python setup.py bdist_wheel --cmake
|
||||
else
|
||||
WERROR=1 python setup.py bdist_wheel
|
||||
fi
|
||||
else
|
||||
python setup.py clean
|
||||
if [[ "$BUILD_ENVIRONMENT" == *xla* ]]; then
|
||||
source .ci/pytorch/install_cache_xla.sh
|
||||
fi
|
||||
if [[ "$USE_SPLIT_BUILD" == "true" ]]; then
|
||||
echo "USE_SPLIT_BUILD cannot be used with xla or rocm"
|
||||
exit 1
|
||||
else
|
||||
python setup.py bdist_wheel
|
||||
fi
|
||||
fi
|
||||
pip_install_whl "$(echo dist/*.whl)"
|
||||
|
||||
# TODO: I'm not sure why, but somehow we lose verbose commands
|
||||
|
|
@ -328,9 +342,10 @@ else
|
|||
CUSTOM_OP_TEST="$PWD/test/custom_operator"
|
||||
python --version
|
||||
SITE_PACKAGES="$(python -c 'from distutils.sysconfig import get_python_lib; print(get_python_lib())')"
|
||||
|
||||
mkdir -p "$CUSTOM_OP_BUILD"
|
||||
pushd "$CUSTOM_OP_BUILD"
|
||||
cmake "$CUSTOM_OP_TEST" -DCMAKE_PREFIX_PATH="$SITE_PACKAGES/torch" -DPython_EXECUTABLE="$(which python)" \
|
||||
cmake "$CUSTOM_OP_TEST" -DCMAKE_PREFIX_PATH="$SITE_PACKAGES/torch;$SITE_PACKAGES" -DPython_EXECUTABLE="$(which python)" \
|
||||
-DCMAKE_MODULE_PATH="$CUSTOM_TEST_MODULE_PATH" -DUSE_ROCM="$CUSTOM_TEST_USE_ROCM"
|
||||
make VERBOSE=1
|
||||
popd
|
||||
|
|
@ -343,7 +358,7 @@ else
|
|||
SITE_PACKAGES="$(python -c 'from distutils.sysconfig import get_python_lib; print(get_python_lib())')"
|
||||
mkdir -p "$JIT_HOOK_BUILD"
|
||||
pushd "$JIT_HOOK_BUILD"
|
||||
cmake "$JIT_HOOK_TEST" -DCMAKE_PREFIX_PATH="$SITE_PACKAGES/torch" -DPython_EXECUTABLE="$(which python)" \
|
||||
cmake "$JIT_HOOK_TEST" -DCMAKE_PREFIX_PATH="$SITE_PACKAGES/torch;$SITE_PACKAGES" -DPython_EXECUTABLE="$(which python)" \
|
||||
-DCMAKE_MODULE_PATH="$CUSTOM_TEST_MODULE_PATH" -DUSE_ROCM="$CUSTOM_TEST_USE_ROCM"
|
||||
make VERBOSE=1
|
||||
popd
|
||||
|
|
@ -355,7 +370,7 @@ else
|
|||
python --version
|
||||
mkdir -p "$CUSTOM_BACKEND_BUILD"
|
||||
pushd "$CUSTOM_BACKEND_BUILD"
|
||||
cmake "$CUSTOM_BACKEND_TEST" -DCMAKE_PREFIX_PATH="$SITE_PACKAGES/torch" -DPython_EXECUTABLE="$(which python)" \
|
||||
cmake "$CUSTOM_BACKEND_TEST" -DCMAKE_PREFIX_PATH="$SITE_PACKAGES/torch;$SITE_PACKAGES" -DPython_EXECUTABLE="$(which python)" \
|
||||
-DCMAKE_MODULE_PATH="$CUSTOM_TEST_MODULE_PATH" -DUSE_ROCM="$CUSTOM_TEST_USE_ROCM"
|
||||
make VERBOSE=1
|
||||
popd
|
||||
|
|
|
|||
93
setup.py
93
setup.py
|
|
@ -199,7 +199,6 @@
|
|||
# Builds pytorch as a wheel using libtorch.so from a seperate wheel
|
||||
|
||||
import os
|
||||
import pkgutil
|
||||
import sys
|
||||
|
||||
if sys.platform == "win32" and sys.maxsize.bit_length() == 31:
|
||||
|
|
@ -210,19 +209,6 @@ if sys.platform == "win32" and sys.maxsize.bit_length() == 31:
|
|||
|
||||
import platform
|
||||
|
||||
|
||||
def _get_package_path(package_name):
|
||||
loader = pkgutil.find_loader(package_name)
|
||||
if loader:
|
||||
# The package might be a namespace package, so get_data may fail
|
||||
try:
|
||||
file_path = loader.get_filename()
|
||||
return os.path.dirname(file_path)
|
||||
except AttributeError:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
BUILD_LIBTORCH_WHL = os.getenv("BUILD_LIBTORCH_WHL", "0") == "1"
|
||||
BUILD_PYTHON_ONLY = os.getenv("BUILD_PYTHON_ONLY", "0") == "1"
|
||||
|
||||
|
|
@ -237,6 +223,7 @@ if sys.version_info < python_min_version:
|
|||
import filecmp
|
||||
import glob
|
||||
import importlib
|
||||
import importlib.util
|
||||
import json
|
||||
import shutil
|
||||
import subprocess
|
||||
|
|
@ -253,15 +240,24 @@ from setuptools.dist import Distribution
|
|||
from tools.build_pytorch_libs import build_caffe2
|
||||
from tools.generate_torch_version import get_torch_version
|
||||
from tools.setup_helpers.cmake import CMake
|
||||
from tools.setup_helpers.env import (
|
||||
build_type,
|
||||
IS_DARWIN,
|
||||
IS_LINUX,
|
||||
IS_WINDOWS,
|
||||
LIBTORCH_PKG_NAME,
|
||||
)
|
||||
from tools.setup_helpers.env import build_type, IS_DARWIN, IS_LINUX, IS_WINDOWS
|
||||
from tools.setup_helpers.generate_linker_script import gen_linker_script
|
||||
|
||||
|
||||
def _get_package_path(package_name):
|
||||
spec = importlib.util.find_spec(package_name)
|
||||
if spec:
|
||||
# The package might be a namespace package, so get_data may fail
|
||||
try:
|
||||
loader = spec.loader
|
||||
if loader is not None:
|
||||
file_path = loader.get_filename() # type: ignore[attr-defined]
|
||||
return os.path.dirname(file_path)
|
||||
except AttributeError:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
# set up appropriate env variables
|
||||
if BUILD_LIBTORCH_WHL:
|
||||
# Set up environment variables for ONLY building libtorch.so and not libtorch_python.so
|
||||
|
|
@ -271,7 +267,7 @@ if BUILD_LIBTORCH_WHL:
|
|||
|
||||
if BUILD_PYTHON_ONLY:
|
||||
os.environ["BUILD_LIBTORCHLESS"] = "ON"
|
||||
os.environ["LIBTORCH_LIB_PATH"] = f"{_get_package_path(LIBTORCH_PKG_NAME)}/lib"
|
||||
os.environ["LIBTORCH_LIB_PATH"] = f"{_get_package_path('torch')}/lib"
|
||||
|
||||
################################################################################
|
||||
# Parameters parsed from environment
|
||||
|
|
@ -347,9 +343,12 @@ cmake_python_include_dir = sysconfig.get_path("include")
|
|||
# Version, create_version_file, and package_name
|
||||
################################################################################
|
||||
|
||||
DEFAULT_PACKAGE_NAME = LIBTORCH_PKG_NAME if BUILD_LIBTORCH_WHL else "torch"
|
||||
package_name = os.getenv("TORCH_PACKAGE_NAME", "torch")
|
||||
LIBTORCH_PKG_NAME = os.getenv("LIBTORCH_PACKAGE_NAME", "libtorch")
|
||||
if BUILD_LIBTORCH_WHL:
|
||||
package_name = LIBTORCH_PKG_NAME
|
||||
|
||||
|
||||
package_name = os.getenv("TORCH_PACKAGE_NAME", DEFAULT_PACKAGE_NAME)
|
||||
package_type = os.getenv("PACKAGE_TYPE", "wheel")
|
||||
version = get_torch_version()
|
||||
report(f"Building wheel {package_name}-{version}")
|
||||
|
|
@ -472,7 +471,6 @@ def build_deps():
|
|||
check_submodules()
|
||||
check_pydep("yaml", "pyyaml")
|
||||
build_python = not BUILD_LIBTORCH_WHL
|
||||
|
||||
build_caffe2(
|
||||
version=version,
|
||||
cmake_python_library=cmake_python_library,
|
||||
|
|
@ -1125,8 +1123,6 @@ def main():
|
|||
raise RuntimeError(
|
||||
"Conflict: 'BUILD_LIBTORCH_WHL' and 'BUILD_PYTHON_ONLY' can't both be 1. Set one to 0 and rerun."
|
||||
)
|
||||
|
||||
# the list of runtime dependencies required by this built package
|
||||
install_requires = [
|
||||
"filelock",
|
||||
"typing-extensions>=4.8.0",
|
||||
|
|
@ -1141,7 +1137,7 @@ def main():
|
|||
install_requires.append("setuptools")
|
||||
|
||||
if BUILD_PYTHON_ONLY:
|
||||
install_requires.append(LIBTORCH_PKG_NAME)
|
||||
install_requires.append(f"{LIBTORCH_PKG_NAME}=={get_torch_version()}")
|
||||
|
||||
use_prioritized_text = str(os.getenv("USE_PRIORITIZED_TEXT_FOR_LD", ""))
|
||||
if (
|
||||
|
|
@ -1190,7 +1186,6 @@ def main():
|
|||
entry_points,
|
||||
extra_install_requires,
|
||||
) = configure_extension_build()
|
||||
|
||||
install_requires += extra_install_requires
|
||||
|
||||
extras_require = {
|
||||
|
|
@ -1219,6 +1214,7 @@ def main():
|
|||
"utils/data/*.pyi",
|
||||
"utils/data/datapipes/*.pyi",
|
||||
"lib/*.pdb",
|
||||
"lib/*shm*",
|
||||
"lib/torch_shm_manager",
|
||||
"lib/*.h",
|
||||
"include/*.h",
|
||||
|
|
@ -1383,15 +1379,15 @@ def main():
|
|||
"utils/model_dump/*.mjs",
|
||||
]
|
||||
|
||||
if BUILD_PYTHON_ONLY:
|
||||
if not BUILD_LIBTORCH_WHL:
|
||||
torch_package_data.extend(
|
||||
[
|
||||
"lib/libtorch_python*",
|
||||
"lib/*shm*",
|
||||
"lib/libtorch_global_deps*",
|
||||
"lib/libtorch_python.so",
|
||||
"lib/libtorch_python.dylib",
|
||||
"lib/libtorch_python.dll",
|
||||
]
|
||||
)
|
||||
else:
|
||||
if not BUILD_PYTHON_ONLY:
|
||||
torch_package_data.extend(
|
||||
[
|
||||
"lib/*.so*",
|
||||
|
|
@ -1442,29 +1438,19 @@ def main():
|
|||
"packaged/autograd/*",
|
||||
"packaged/autograd/templates/*",
|
||||
]
|
||||
|
||||
if BUILD_LIBTORCH_WHL:
|
||||
modified_packages = []
|
||||
for package in packages:
|
||||
parts = package.split(".")
|
||||
if parts[0] == "torch":
|
||||
modified_packages.append(DEFAULT_PACKAGE_NAME + package[len("torch") :])
|
||||
packages = modified_packages
|
||||
package_dir = {LIBTORCH_PKG_NAME: "torch"}
|
||||
torch_package_dir_name = LIBTORCH_PKG_NAME
|
||||
package_data = {LIBTORCH_PKG_NAME: torch_package_data}
|
||||
extensions = []
|
||||
else:
|
||||
torch_package_dir_name = "torch"
|
||||
package_dir = {}
|
||||
package_data = {
|
||||
"torch": torch_package_data,
|
||||
"torchgen": torchgen_package_data,
|
||||
"caffe2": [
|
||||
"python/serialized_test/data/operator_test/*.zip",
|
||||
],
|
||||
}
|
||||
|
||||
if not BUILD_LIBTORCH_WHL:
|
||||
package_data["torchgen"] = torchgen_package_data
|
||||
package_data["caffe2"] = [
|
||||
"python/serialized_test/data/operator_test/*.zip",
|
||||
]
|
||||
else:
|
||||
# no extensions in BUILD_LIBTORCH_WHL mode
|
||||
extensions = []
|
||||
|
||||
setup(
|
||||
name=package_name,
|
||||
version=version,
|
||||
|
|
@ -1481,7 +1467,6 @@ def main():
|
|||
install_requires=install_requires,
|
||||
extras_require=extras_require,
|
||||
package_data=package_data,
|
||||
package_dir=package_dir,
|
||||
url="https://pytorch.org/",
|
||||
download_url="https://github.com/pytorch/pytorch/tags",
|
||||
author="PyTorch Team",
|
||||
|
|
|
|||
|
|
@ -21,8 +21,6 @@ IS_64BIT = struct.calcsize("P") == 8
|
|||
|
||||
BUILD_DIR = "build"
|
||||
|
||||
LIBTORCH_PKG_NAME = "libtorchsplit"
|
||||
|
||||
|
||||
def check_env_flag(name: str, default: str = "") -> bool:
|
||||
return os.getenv(name, default).upper() in ["ON", "1", "YES", "TRUE", "Y"]
|
||||
|
|
|
|||
|
|
@ -309,6 +309,51 @@ if(HAVE_SOVERSION)
|
|||
set_target_properties(torch_python PROPERTIES
|
||||
VERSION ${TORCH_VERSION} SOVERSION ${TORCH_SOVERSION})
|
||||
endif()
|
||||
|
||||
# in case of the split build we need to add compile definitions
|
||||
if(BUILD_LIBTORCHLESS)
|
||||
if(USE_UCC AND USE_C10D_UCC)
|
||||
target_compile_definitions(torch_python PRIVATE USE_C10D_UCC)
|
||||
endif()
|
||||
|
||||
if(USE_UCC AND USE_C10D_NCCL)
|
||||
target_compile_definitions(torch_python PRIVATE USE_C10D_NCCL)
|
||||
endif()
|
||||
|
||||
if(USE_DISTRIBUTED)
|
||||
target_compile_definitions(torch_python PRIVATE USE_DISTRIBUTED)
|
||||
endif()
|
||||
|
||||
if(USE_MPI AND USE_C10D_MPI)
|
||||
target_compile_definitions(torch_python PRIVATE USE_C10D_MPI)
|
||||
endif()
|
||||
|
||||
if(USE_GLOO AND USE_C10D_GLOO)
|
||||
target_compile_definitions(torch_python PRIVATE USE_C10D_GLOO)
|
||||
endif()
|
||||
|
||||
if(NOT WIN32)
|
||||
target_compile_definitions(torch_python PRIVATE USE_RPC)
|
||||
endif()
|
||||
|
||||
if(USE_TENSORPIPE)
|
||||
target_compile_definitions(torch_python PRIVATE USE_TENSORPIPE)
|
||||
endif()
|
||||
|
||||
set(EXPERIMENTAL_SINGLE_THREAD_POOL "0" CACHE STRING
|
||||
"Experimental option to use a single thread pool for inter- and intra-op parallelism")
|
||||
if("${EXPERIMENTAL_SINGLE_THREAD_POOL}")
|
||||
target_compile_definitions(torch_python PRIVATE "-DAT_EXPERIMENTAL_SINGLE_THREAD_POOL=1")
|
||||
endif()
|
||||
|
||||
if(MSVC AND NOT BUILD_SHARED_LIBS)
|
||||
target_compile_definitions(torch_python PRIVATE "AT_CORE_STATIC_WINDOWS=1")
|
||||
endif()
|
||||
|
||||
|
||||
|
||||
endif()
|
||||
|
||||
add_dependencies(torch_python torch_python_stubs)
|
||||
add_dependencies(torch_python flatbuffers)
|
||||
|
||||
|
|
|
|||
|
|
@ -271,38 +271,6 @@ def _preload_cuda_deps(lib_folder, lib_name):
|
|||
|
||||
# See Note [Global dependencies]
|
||||
def _load_global_deps() -> None:
|
||||
LIBTORCH_PKG_NAME = "libtorchsplit"
|
||||
|
||||
def find_package_path(package_name):
|
||||
spec = importlib.util.find_spec(package_name)
|
||||
if spec:
|
||||
# The package might be a namespace package, so get_data may fail
|
||||
try:
|
||||
loader = spec.loader
|
||||
if loader is not None:
|
||||
file_path = loader.get_filename() # type: ignore[attr-defined]
|
||||
return os.path.dirname(file_path)
|
||||
except AttributeError:
|
||||
pass
|
||||
return None
|
||||
|
||||
def load_shared_libraries(library_path):
|
||||
lib_dir = os.path.join(library_path, "lib")
|
||||
if not os.path.exists(lib_dir):
|
||||
return
|
||||
|
||||
# Find all shared library files with the appropriate extension
|
||||
library_files = [f for f in os.listdir(lib_dir) if f.endswith(lib_ext)]
|
||||
if not library_files:
|
||||
return
|
||||
|
||||
for lib_file in library_files:
|
||||
lib_path = os.path.join(lib_dir, lib_file)
|
||||
try:
|
||||
ctypes.CDLL(lib_path, mode=ctypes.RTLD_GLOBAL)
|
||||
except OSError as err:
|
||||
print(f"Failed to load {lib_path}: {err}")
|
||||
|
||||
if _running_with_deploy() or platform.system() == "Windows":
|
||||
return
|
||||
|
||||
|
|
@ -312,11 +280,6 @@ def _load_global_deps() -> None:
|
|||
here = os.path.abspath(__file__)
|
||||
global_deps_lib_path = os.path.join(os.path.dirname(here), "lib", lib_name)
|
||||
|
||||
split_build_lib_name = LIBTORCH_PKG_NAME
|
||||
library_path = find_package_path(split_build_lib_name)
|
||||
|
||||
if library_path:
|
||||
global_deps_lib_path = os.path.join(library_path, "lib", lib_name)
|
||||
try:
|
||||
ctypes.CDLL(global_deps_lib_path, mode=ctypes.RTLD_GLOBAL)
|
||||
except OSError as err:
|
||||
|
|
@ -344,10 +307,6 @@ def _load_global_deps() -> None:
|
|||
_preload_cuda_deps(lib_folder, lib_name)
|
||||
ctypes.CDLL(global_deps_lib_path, mode=ctypes.RTLD_GLOBAL)
|
||||
|
||||
if library_path:
|
||||
# loading libtorch_global_deps first due its special logic
|
||||
load_shared_libraries(library_path)
|
||||
|
||||
|
||||
if (USE_RTLD_GLOBAL_WITH_LIBTORCH or os.getenv("TORCH_USE_RTLD_GLOBAL")) and (
|
||||
_running_with_deploy() or platform.system() != "Windows"
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user