mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-06 12:20:52 +01:00
[BE][Easy] use pathlib.Path instead of dirname / ".." / pardir (#129374)
Changes by apply order:
1. Replace all `".."` and `os.pardir` usage with `os.path.dirname(...)`.
2. Replace nested `os.path.dirname(os.path.dirname(...))` call with `str(Path(...).parent.parent)`.
3. Reorder `.absolute()` ~/ `.resolve()`~ and `.parent`: always resolve the path first.
`.parent{...}.absolute()` -> `.absolute().parent{...}`
4. Replace chained `.parent x N` with `.parents[${N - 1}]`: the code is easier to read (see 5.)
`.parent.parent.parent.parent` -> `.parents[3]`
5. ~Replace `.parents[${N - 1}]` with `.parents[${N} - 1]`: the code is easier to read and does not introduce any runtime overhead.~
~`.parents[3]` -> `.parents[4 - 1]`~
6. ~Replace `.parents[2 - 1]` with `.parent.parent`: because the code is shorter and easier to read.~
Pull Request resolved: https://github.com/pytorch/pytorch/pull/129374
Approved by: https://github.com/justinchuby, https://github.com/malfet
This commit is contained in:
parent
7101b8ca35
commit
b6bdb67f82
|
|
@ -7,7 +7,7 @@ import yaml
|
||||||
|
|
||||||
|
|
||||||
# Need to import modules that lie on an upward-relative path
|
# Need to import modules that lie on an upward-relative path
|
||||||
sys.path.append(os.path.join(sys.path[0], ".."))
|
sys.path.append(os.path.dirname(sys.path[0]))
|
||||||
|
|
||||||
import cimodel.lib.miniyaml as miniyaml
|
import cimodel.lib.miniyaml as miniyaml
|
||||||
|
|
||||||
|
|
|
||||||
2
.github/scripts/delete_old_branches.py
vendored
2
.github/scripts/delete_old_branches.py
vendored
|
|
@ -22,7 +22,7 @@ TOKEN = os.environ["GITHUB_TOKEN"]
|
||||||
if not TOKEN:
|
if not TOKEN:
|
||||||
raise Exception("GITHUB_TOKEN is not set") # noqa: TRY002
|
raise Exception("GITHUB_TOKEN is not set") # noqa: TRY002
|
||||||
|
|
||||||
REPO_ROOT = Path(__file__).parent.parent.parent
|
REPO_ROOT = Path(__file__).parents[2]
|
||||||
|
|
||||||
# Query for all PRs instead of just closed/merged because it's faster
|
# Query for all PRs instead of just closed/merged because it's faster
|
||||||
GRAPHQL_ALL_PRS_BY_UPDATED_AT = """
|
GRAPHQL_ALL_PRS_BY_UPDATED_AT = """
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ from pathlib import Path
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
|
|
||||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent
|
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||||
WORKFLOWS = REPO_ROOT / ".github" / "workflows"
|
WORKFLOWS = REPO_ROOT / ".github" / "workflows"
|
||||||
EXPECTED_GROUP_PREFIX = (
|
EXPECTED_GROUP_PREFIX = (
|
||||||
"${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}"
|
"${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }}"
|
||||||
|
|
|
||||||
|
|
@ -94,7 +94,7 @@ def get_nccl_submodule_version() -> str:
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
nccl_version_mk = (
|
nccl_version_mk = (
|
||||||
Path(__file__).absolute().parent.parent.parent
|
Path(__file__).absolute().parents[2]
|
||||||
/ "third_party"
|
/ "third_party"
|
||||||
/ "nccl"
|
/ "nccl"
|
||||||
/ "nccl"
|
/ "nccl"
|
||||||
|
|
|
||||||
2
.github/scripts/gitutils.py
vendored
2
.github/scripts/gitutils.py
vendored
|
|
@ -32,7 +32,7 @@ def get_git_remote_name() -> str:
|
||||||
def get_git_repo_dir() -> str:
|
def get_git_repo_dir() -> str:
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
return os.getenv("GIT_REPO_DIR", str(Path(__file__).resolve().parent.parent.parent))
|
return os.getenv("GIT_REPO_DIR", str(Path(__file__).resolve().parents[2]))
|
||||||
|
|
||||||
|
|
||||||
def fuzzy_list_to_dict(items: List[Tuple[str, str]]) -> Dict[str, List[str]]:
|
def fuzzy_list_to_dict(items: List[Tuple[str, str]]) -> Dict[str, List[str]]:
|
||||||
|
|
|
||||||
2
.github/scripts/lint_native_functions.py
vendored
2
.github/scripts/lint_native_functions.py
vendored
|
|
@ -26,7 +26,7 @@ def fn(base: str) -> str:
|
||||||
return str(base / Path("aten/src/ATen/native/native_functions.yaml"))
|
return str(base / Path("aten/src/ATen/native/native_functions.yaml"))
|
||||||
|
|
||||||
|
|
||||||
with open(Path(__file__).parent.parent.parent / fn(".")) as f:
|
with open(Path(__file__).parents[2] / fn(".")) as f:
|
||||||
contents = f.read()
|
contents = f.read()
|
||||||
|
|
||||||
yaml = ruamel.yaml.YAML() # type: ignore[attr-defined]
|
yaml = ruamel.yaml.YAML() # type: ignore[attr-defined]
|
||||||
|
|
|
||||||
2
.github/scripts/test_gitutils.py
vendored
2
.github/scripts/test_gitutils.py
vendored
|
|
@ -68,7 +68,7 @@ class TestRetriesDecorator(TestCase):
|
||||||
|
|
||||||
class TestGitRepo(TestCase):
|
class TestGitRepo(TestCase):
|
||||||
def setUp(self) -> None:
|
def setUp(self) -> None:
|
||||||
repo_dir = BASE_DIR.parent.parent.absolute()
|
repo_dir = BASE_DIR.absolute().parent.parent
|
||||||
if not (repo_dir / ".git").is_dir():
|
if not (repo_dir / ".git").is_dir():
|
||||||
raise SkipTest(
|
raise SkipTest(
|
||||||
"Can't find git directory, make sure to run this test on real repo checkout"
|
"Can't find git directory, make sure to run this test on real repo checkout"
|
||||||
|
|
|
||||||
|
|
@ -71,7 +71,7 @@ ARTIFACTS_QUERY_URL = (
|
||||||
"c1cdfadc-6bb2-4a91-bbf9-3d19e1981cd4/run?format=JSON"
|
"c1cdfadc-6bb2-4a91-bbf9-3d19e1981cd4/run?format=JSON"
|
||||||
)
|
)
|
||||||
CSV_LINTER = str(
|
CSV_LINTER = str(
|
||||||
Path(__file__).absolute().parent.parent.parent.parent
|
Path(__file__).absolute().parents[3]
|
||||||
/ "tools/linter/adapters/no_merge_conflict_csv_linter.py"
|
/ "tools/linter/adapters/no_merge_conflict_csv_linter.py"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -7,9 +7,9 @@ import torch._prims as prims
|
||||||
from torchgen.gen import parse_native_yaml
|
from torchgen.gen import parse_native_yaml
|
||||||
|
|
||||||
|
|
||||||
ROOT = Path(__file__).absolute().parent.parent.parent.parent
|
ROOT = Path(__file__).absolute().parents[3]
|
||||||
NATIVE_FUNCTION_YAML_PATH = ROOT / Path("aten/src/ATen/native/native_functions.yaml")
|
NATIVE_FUNCTION_YAML_PATH = ROOT / "aten/src/ATen/native/native_functions.yaml"
|
||||||
TAGS_YAML_PATH = ROOT / Path("aten/src/ATen/native/tags.yaml")
|
TAGS_YAML_PATH = ROOT / "aten/src/ATen/native/tags.yaml"
|
||||||
|
|
||||||
BUILD_DIR = "build/ir"
|
BUILD_DIR = "build/ir"
|
||||||
ATEN_OPS_CSV_FILE = "aten_ops.csv"
|
ATEN_OPS_CSV_FILE = "aten_ops.csv"
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,7 @@ from torch.ao.quantization.backend_config.utils import (
|
||||||
|
|
||||||
# Create a directory for the images, if it doesn't exist
|
# Create a directory for the images, if it doesn't exist
|
||||||
QUANTIZATION_BACKEND_CONFIG_IMAGE_PATH = os.path.join(
|
QUANTIZATION_BACKEND_CONFIG_IMAGE_PATH = os.path.join(
|
||||||
os.path.realpath(os.path.join(__file__, "..")), "quantization_backend_configs"
|
os.path.realpath(os.path.dirname(__file__)), "quantization_backend_configs"
|
||||||
)
|
)
|
||||||
|
|
||||||
if not os.path.exists(QUANTIZATION_BACKEND_CONFIG_IMAGE_PATH):
|
if not os.path.exists(QUANTIZATION_BACKEND_CONFIG_IMAGE_PATH):
|
||||||
|
|
|
||||||
|
|
@ -11,9 +11,9 @@ from torch.export import export
|
||||||
|
|
||||||
|
|
||||||
PWD = Path(__file__).absolute().parent
|
PWD = Path(__file__).absolute().parent
|
||||||
ROOT = Path(__file__).absolute().parent.parent.parent.parent
|
ROOT = Path(__file__).absolute().parents[3]
|
||||||
SOURCE = ROOT / Path("source")
|
SOURCE = ROOT / "source"
|
||||||
EXPORTDB_SOURCE = SOURCE / Path("generated") / Path("exportdb")
|
EXPORTDB_SOURCE = SOURCE / "generated" / "exportdb"
|
||||||
|
|
||||||
|
|
||||||
def generate_example_rst(example_case: ExportCase):
|
def generate_example_rst(example_case: ExportCase):
|
||||||
|
|
|
||||||
|
|
@ -194,7 +194,7 @@ if __name__ == "__main__":
|
||||||
"filename",
|
"filename",
|
||||||
nargs="?",
|
nargs="?",
|
||||||
default=str(
|
default=str(
|
||||||
Path(__file__).absolute().parent.parent.parent
|
Path(__file__).absolute().parents[2]
|
||||||
/ "torch/testing/_internal/dynamo_test_failures.py"
|
/ "torch/testing/_internal/dynamo_test_failures.py"
|
||||||
),
|
),
|
||||||
help="Optional path to dynamo_test_failures.py",
|
help="Optional path to dynamo_test_failures.py",
|
||||||
|
|
@ -203,7 +203,7 @@ if __name__ == "__main__":
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"test_dir",
|
"test_dir",
|
||||||
nargs="?",
|
nargs="?",
|
||||||
default=str(Path(__file__).absolute().parent.parent.parent / "test"),
|
default=str(Path(__file__).absolute().parents[2] / "test"),
|
||||||
help="Optional path to test folder",
|
help="Optional path to test folder",
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
|
|
|
||||||
|
|
@ -41,7 +41,7 @@ Inherits most tests from TestNNAPI, which loads Android NNAPI models
|
||||||
without the delegate API.
|
without the delegate API.
|
||||||
"""
|
"""
|
||||||
# First skip is needed for IS_WINDOWS or IS_MACOS to skip the tests.
|
# First skip is needed for IS_WINDOWS or IS_MACOS to skip the tests.
|
||||||
torch_root = Path(__file__).resolve().parent.parent.parent
|
torch_root = Path(__file__).resolve().parents[2]
|
||||||
lib_path = torch_root / "build" / "lib" / "libnnapi_backend.so"
|
lib_path = torch_root / "build" / "lib" / "libnnapi_backend.so"
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -7,6 +7,7 @@ import dataclasses
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import unittest
|
import unittest
|
||||||
|
from pathlib import Path
|
||||||
from typing import Tuple
|
from typing import Tuple
|
||||||
|
|
||||||
import onnxruntime
|
import onnxruntime
|
||||||
|
|
@ -24,7 +25,8 @@ from torch.testing._internal import common_utils
|
||||||
from torch.testing._internal.common_utils import skipIfNNModuleInlined
|
from torch.testing._internal.common_utils import skipIfNNModuleInlined
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.append(str(Path(__file__).absolute().parents[1]))
|
||||||
|
|
||||||
import onnx_test_common
|
import onnx_test_common
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -45,8 +45,7 @@ _InputArgsType = Optional[
|
||||||
_OutputsType = Sequence[_NumericType]
|
_OutputsType = Sequence[_NumericType]
|
||||||
|
|
||||||
onnx_model_dir = os.path.join(
|
onnx_model_dir = os.path.join(
|
||||||
os.path.dirname(os.path.realpath(__file__)),
|
os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
|
||||||
os.pardir,
|
|
||||||
"repos",
|
"repos",
|
||||||
"onnx",
|
"onnx",
|
||||||
"onnx",
|
"onnx",
|
||||||
|
|
@ -54,11 +53,7 @@ onnx_model_dir = os.path.join(
|
||||||
"test",
|
"test",
|
||||||
"data",
|
"data",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
pytorch_converted_dir = os.path.join(onnx_model_dir, "pytorch-converted")
|
pytorch_converted_dir = os.path.join(onnx_model_dir, "pytorch-converted")
|
||||||
|
|
||||||
|
|
||||||
pytorch_operator_dir = os.path.join(onnx_model_dir, "pytorch-operator")
|
pytorch_operator_dir = os.path.join(onnx_model_dir, "pytorch-operator")
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,8 @@
|
||||||
# Owner(s): ["module: onnx"]
|
# Owner(s): ["module: onnx"]
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
import torch
|
import torch
|
||||||
import torch.onnx
|
import torch.onnx
|
||||||
|
|
@ -10,7 +10,8 @@ from torch.testing._internal import common_utils
|
||||||
from torch.utils import _pytree as torch_pytree
|
from torch.utils import _pytree as torch_pytree
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.append(str(Path(__file__).absolute().parents[1]))
|
||||||
|
|
||||||
import onnx_test_common
|
import onnx_test_common
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -51,7 +51,7 @@ class TestQuantizationDocs(QuantizationTestCase):
|
||||||
"been updated to have the correct relative path between "
|
"been updated to have the correct relative path between "
|
||||||
"test_docs.py and the docs."
|
"test_docs.py and the docs."
|
||||||
)
|
)
|
||||||
pytorch_root = core_dir.parent.parent.parent
|
pytorch_root = core_dir.parents[2]
|
||||||
return pytorch_root / path_from_pytorch
|
return pytorch_root / path_from_pytorch
|
||||||
|
|
||||||
path_to_file = get_correct_path(path_from_pytorch)
|
path_to_file = get_correct_path(path_from_pytorch)
|
||||||
|
|
|
||||||
|
|
@ -30,7 +30,7 @@ DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "typing"))
|
||||||
REVEAL_DIR = os.path.join(DATA_DIR, "reveal")
|
REVEAL_DIR = os.path.join(DATA_DIR, "reveal")
|
||||||
PASS_DIR = os.path.join(DATA_DIR, "pass")
|
PASS_DIR = os.path.join(DATA_DIR, "pass")
|
||||||
FAIL_DIR = os.path.join(DATA_DIR, "fail")
|
FAIL_DIR = os.path.join(DATA_DIR, "fail")
|
||||||
MYPY_INI = os.path.join(DATA_DIR, os.pardir, os.pardir, "mypy.ini")
|
MYPY_INI = os.path.join(os.path.dirname(os.path.dirname(DATA_DIR)), "mypy.ini")
|
||||||
CACHE_DIR = os.path.join(DATA_DIR, ".mypy_cache")
|
CACHE_DIR = os.path.join(DATA_DIR, ".mypy_cache")
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -4,15 +4,16 @@
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(
|
# NOTE: `tools/amd_build/build_amd.py` could be a symlink.
|
||||||
os.path.realpath(
|
# The behavior of `symlink / '..'` is different from `symlink.parent`.
|
||||||
os.path.join(
|
# Use `pardir` three times rather than using `path.parents[2]`.
|
||||||
__file__, os.path.pardir, os.path.pardir, os.path.pardir, "torch", "utils"
|
REPO_ROOT = (
|
||||||
)
|
Path(__file__).absolute() / os.path.pardir / os.path.pardir / os.path.pardir
|
||||||
)
|
).resolve()
|
||||||
)
|
sys.path.append(str(REPO_ROOT / "torch" / "utils"))
|
||||||
|
|
||||||
from hipify import hipify_python # type: ignore[import]
|
from hipify import hipify_python # type: ignore[import]
|
||||||
|
|
||||||
|
|
@ -53,8 +54,9 @@ parser.add_argument(
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# NOTE: `tools/amd_build/build_amd.py` could be a symlink.
|
||||||
amd_build_dir = os.path.dirname(os.path.realpath(__file__))
|
amd_build_dir = os.path.dirname(os.path.realpath(__file__))
|
||||||
proj_dir = os.path.join(os.path.dirname(os.path.dirname(amd_build_dir)))
|
proj_dir = os.path.dirname(os.path.dirname(amd_build_dir))
|
||||||
|
|
||||||
if args.project_directory:
|
if args.project_directory:
|
||||||
proj_dir = args.project_directory
|
proj_dir = args.project_directory
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,13 @@
|
||||||
import argparse
|
import argparse
|
||||||
import sys
|
import sys
|
||||||
from os.path import abspath, dirname
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
# By appending pytorch_root to sys.path, this module can import other torch
|
# By appending REPO_ROOT to sys.path, this module can import other torch
|
||||||
# modules even when run as a standalone script. i.e., it's okay either you
|
# modules even when run as a standalone script. i.e., it's okay either you
|
||||||
# do `python build_libtorch.py` or `python -m tools.build_libtorch`.
|
# do `python build_libtorch.py` or `python -m tools.build_libtorch`.
|
||||||
pytorch_root = dirname(dirname(abspath(__file__)))
|
REPO_ROOT = Path(__file__).absolute().parent.parent
|
||||||
sys.path.append(pytorch_root)
|
sys.path.append(str(REPO_ROOT))
|
||||||
|
|
||||||
from tools.build_pytorch_libs import build_pytorch
|
from tools.build_pytorch_libs import build_pytorch
|
||||||
from tools.setup_helpers.cmake import CMake
|
from tools.setup_helpers.cmake import CMake
|
||||||
|
|
|
||||||
|
|
@ -43,9 +43,7 @@ def get_llvm_tool_path() -> str:
|
||||||
def get_pytorch_folder() -> str:
|
def get_pytorch_folder() -> str:
|
||||||
# TOOLS_FOLDER in oss: pytorch/tools/code_coverage
|
# TOOLS_FOLDER in oss: pytorch/tools/code_coverage
|
||||||
return os.path.abspath(
|
return os.path.abspath(
|
||||||
os.environ.get(
|
os.environ.get("PYTORCH_FOLDER", os.path.dirname(os.path.dirname(TOOLS_FOLDER)))
|
||||||
"PYTORCH_FOLDER", os.path.join(TOOLS_FOLDER, os.path.pardir, os.path.pardir)
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -2,13 +2,12 @@ from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
# <project folder>
|
# <project folder>
|
||||||
HOME_DIR = os.environ["HOME"]
|
HOME_DIR = os.environ["HOME"]
|
||||||
TOOLS_FOLDER = os.path.join(
|
TOOLS_FOLDER = str(Path(__file__).resolve().parents[2])
|
||||||
os.path.dirname(os.path.realpath(__file__)), os.path.pardir, os.path.pardir
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# <profile folder>
|
# <profile folder>
|
||||||
|
|
|
||||||
|
|
@ -10,24 +10,28 @@ import glob
|
||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
|
||||||
from itertools import product
|
|
||||||
|
|
||||||
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
|
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import sys
|
||||||
import textwrap
|
import textwrap
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
from itertools import product
|
||||||
|
from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
from yaml.constructor import ConstructorError
|
from yaml.constructor import ConstructorError
|
||||||
from yaml.nodes import MappingNode
|
from yaml.nodes import MappingNode
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from yaml import CLoader as Loader
|
from yaml import CLoader as Loader
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from yaml import Loader # type: ignore[assignment, misc]
|
from yaml import Loader # type: ignore[assignment, misc]
|
||||||
|
|
||||||
|
|
||||||
|
REPO_ROOT = Path(__file__).absolute().parent.parent
|
||||||
|
sys.path.append(str(REPO_ROOT))
|
||||||
|
|
||||||
CPP_H_NAME = "spv.h"
|
CPP_H_NAME = "spv.h"
|
||||||
CPP_SRC_NAME = "spv.cpp"
|
CPP_SRC_NAME = "spv.cpp"
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -26,10 +26,7 @@ try:
|
||||||
PYTORCH_ROOT = result.stdout.decode("utf-8").strip()
|
PYTORCH_ROOT = result.stdout.decode("utf-8").strip()
|
||||||
except subprocess.CalledProcessError:
|
except subprocess.CalledProcessError:
|
||||||
# If git is not installed, compute repo root as 3 folders up from this file
|
# If git is not installed, compute repo root as 3 folders up from this file
|
||||||
path_ = os.path.abspath(__file__)
|
PYTORCH_ROOT = str(Path(__file__).absolute().parents[3])
|
||||||
for _ in range(4):
|
|
||||||
path_ = os.path.dirname(path_)
|
|
||||||
PYTORCH_ROOT = path_
|
|
||||||
|
|
||||||
DRY_RUN = False
|
DRY_RUN = False
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -30,7 +30,7 @@ def read_sub_write(path: str, prefix_pat: str, new_default: int) -> None:
|
||||||
|
|
||||||
|
|
||||||
def main(args: Any) -> None:
|
def main(args: Any) -> None:
|
||||||
pytorch_dir = Path(__file__).parent.parent.parent.resolve()
|
pytorch_dir = Path(__file__).parents[2].resolve()
|
||||||
onnx_dir = pytorch_dir / "third_party" / "onnx"
|
onnx_dir = pytorch_dir / "third_party" / "onnx"
|
||||||
os.chdir(onnx_dir)
|
os.chdir(onnx_dir)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -8,6 +8,7 @@ import platform
|
||||||
import sys
|
import sys
|
||||||
import sysconfig
|
import sysconfig
|
||||||
from distutils.version import LooseVersion
|
from distutils.version import LooseVersion
|
||||||
|
from pathlib import Path
|
||||||
from subprocess import CalledProcessError, check_call, check_output
|
from subprocess import CalledProcessError, check_call, check_output
|
||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
|
|
@ -173,9 +174,7 @@ class CMake:
|
||||||
toolset_expr = ",".join([f"{k}={v}" for k, v in toolset_dict.items()])
|
toolset_expr = ",".join([f"{k}={v}" for k, v in toolset_dict.items()])
|
||||||
args.append("-T" + toolset_expr)
|
args.append("-T" + toolset_expr)
|
||||||
|
|
||||||
base_dir = os.path.dirname(
|
base_dir = str(Path(__file__).absolute().parents[2])
|
||||||
os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
|
||||||
)
|
|
||||||
install_dir = os.path.join(base_dir, "torch")
|
install_dir = os.path.join(base_dir, "torch")
|
||||||
|
|
||||||
_mkdir_p(install_dir)
|
_mkdir_p(install_dir)
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,11 @@
|
||||||
# Little stub file to get BUILD.bazel to play along
|
# Little stub file to get BUILD.bazel to play along
|
||||||
|
|
||||||
import os.path
|
|
||||||
import sys
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
REPO_ROOT = Path(__file__).absolute().parents[2]
|
||||||
sys.path.insert(0, root)
|
sys.path.insert(0, str(REPO_ROOT))
|
||||||
|
|
||||||
import torchgen.gen
|
import torchgen.gen
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,11 @@
|
||||||
# Little stub file to get BUILD.bazel to play along
|
# Little stub file to get BUILD.bazel to play along
|
||||||
|
|
||||||
import os.path
|
|
||||||
import sys
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
REPO_ROOT = Path(__file__).absolute().parents[2]
|
||||||
sys.path.insert(0, root)
|
sys.path.insert(0, str(REPO_ROOT))
|
||||||
|
|
||||||
import tools.jit.gen_unboxing
|
import tools.jit.gen_unboxing
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -15,6 +15,7 @@ try:
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from yaml import SafeLoader as YamlLoader # type: ignore[assignment, misc]
|
from yaml import SafeLoader as YamlLoader # type: ignore[assignment, misc]
|
||||||
|
|
||||||
|
|
||||||
NATIVE_FUNCTIONS_PATH = "aten/src/ATen/native/native_functions.yaml"
|
NATIVE_FUNCTIONS_PATH = "aten/src/ATen/native/native_functions.yaml"
|
||||||
TAGS_PATH = "aten/src/ATen/native/tags.yaml"
|
TAGS_PATH = "aten/src/ATen/native/tags.yaml"
|
||||||
|
|
||||||
|
|
@ -110,8 +111,9 @@ def get_selector(
|
||||||
operators_yaml_path: str | None,
|
operators_yaml_path: str | None,
|
||||||
) -> Any:
|
) -> Any:
|
||||||
# cwrap depends on pyyaml, so we can't import it earlier
|
# cwrap depends on pyyaml, so we can't import it earlier
|
||||||
root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
REPO_ROOT = Path(__file__).absolute().parents[2]
|
||||||
sys.path.insert(0, root)
|
sys.path.insert(0, str(REPO_ROOT))
|
||||||
|
|
||||||
from torchgen.selective_build.selector import SelectiveBuilder
|
from torchgen.selective_build.selector import SelectiveBuilder
|
||||||
|
|
||||||
assert not (
|
assert not (
|
||||||
|
|
|
||||||
|
|
@ -2,8 +2,9 @@ import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent
|
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||||
sys.path.append(str(REPO_ROOT))
|
sys.path.append(str(REPO_ROOT))
|
||||||
|
|
||||||
from tools.stats.import_test_stats import get_test_class_times, get_test_times
|
from tools.stats.import_test_stats import get_test_class_times, get_test_times
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -11,7 +11,7 @@ from typing import Any, Callable, cast, Dict
|
||||||
from urllib.request import urlopen
|
from urllib.request import urlopen
|
||||||
|
|
||||||
|
|
||||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent
|
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||||
|
|
||||||
|
|
||||||
def get_disabled_issues() -> list[str]:
|
def get_disabled_issues() -> list[str]:
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ from typing import Any
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
|
|
||||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent.parent
|
REPO_ROOT = Path(__file__).resolve().parents[3]
|
||||||
sys.path.append(str(REPO_ROOT))
|
sys.path.append(str(REPO_ROOT))
|
||||||
|
|
||||||
from tools.test.heuristics.test_interface import TestTD
|
from tools.test.heuristics.test_interface import TestTD
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent.parent
|
REPO_ROOT = Path(__file__).resolve().parents[3]
|
||||||
sys.path.append(str(REPO_ROOT))
|
sys.path.append(str(REPO_ROOT))
|
||||||
|
|
||||||
import tools.testing.target_determination.heuristics.interface as interface
|
import tools.testing.target_determination.heuristics.interface as interface
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent.parent
|
REPO_ROOT = Path(__file__).resolve().parents[3]
|
||||||
sys.path.append(str(REPO_ROOT))
|
sys.path.append(str(REPO_ROOT))
|
||||||
|
|
||||||
import tools.testing.target_determination.heuristics.utils as utils
|
import tools.testing.target_determination.heuristics.utils as utils
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,6 @@
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
|
||||||
import tempfile
|
import tempfile
|
||||||
import unittest
|
import unittest
|
||||||
|
|
||||||
|
|
@ -12,10 +11,6 @@ from torchgen.gen import _GLOBAL_PARSE_NATIVE_YAML_CACHE # noqa: F401
|
||||||
from torchgen.gen_backend_stubs import run
|
from torchgen.gen_backend_stubs import run
|
||||||
|
|
||||||
|
|
||||||
path = os.path.dirname(os.path.realpath(__file__))
|
|
||||||
gen_backend_stubs_path = os.path.join(path, "../torchgen/gen_backend_stubs.py")
|
|
||||||
|
|
||||||
|
|
||||||
# gen_backend_stubs.py is an integration point that is called directly by external backends.
|
# gen_backend_stubs.py is an integration point that is called directly by external backends.
|
||||||
# The tests here are to confirm that badly formed inputs result in reasonable error messages.
|
# The tests here are to confirm that badly formed inputs result in reasonable error messages.
|
||||||
class TestGenBackendStubs(expecttest.TestCase):
|
class TestGenBackendStubs(expecttest.TestCase):
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,7 @@ import unittest
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent
|
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||||
try:
|
try:
|
||||||
# using tools/ to optimize test run.
|
# using tools/ to optimize test run.
|
||||||
sys.path.append(str(REPO_ROOT))
|
sys.path.append(str(REPO_ROOT))
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,7 @@ from collections import defaultdict
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent
|
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||||
try:
|
try:
|
||||||
# using tools/ to optimize test run.
|
# using tools/ to optimize test run.
|
||||||
sys.path.append(str(REPO_ROOT))
|
sys.path.append(str(REPO_ROOT))
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ from typing import Any
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
|
|
||||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent
|
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||||
sys.path.insert(0, str(REPO_ROOT))
|
sys.path.insert(0, str(REPO_ROOT))
|
||||||
|
|
||||||
from tools.stats.upload_metrics import add_global_metric, emit_metric, global_metrics
|
from tools.stats.upload_metrics import add_global_metric, emit_metric, global_metrics
|
||||||
|
|
|
||||||
|
|
@ -9,7 +9,7 @@ from pathlib import Path
|
||||||
CPP_TEST_PREFIX = "cpp"
|
CPP_TEST_PREFIX = "cpp"
|
||||||
CPP_TEST_PATH = "build/bin"
|
CPP_TEST_PATH = "build/bin"
|
||||||
CPP_TESTS_DIR = os.path.abspath(os.getenv("CPP_TESTS_DIR", default=CPP_TEST_PATH))
|
CPP_TESTS_DIR = os.path.abspath(os.getenv("CPP_TESTS_DIR", default=CPP_TEST_PATH))
|
||||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent
|
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||||
|
|
||||||
|
|
||||||
def parse_test_module(test: str) -> str:
|
def parse_test_module(test: str) -> str:
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@ import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent
|
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||||
sys.path.insert(0, str(REPO_ROOT))
|
sys.path.insert(0, str(REPO_ROOT))
|
||||||
|
|
||||||
from tools.stats.import_test_stats import (
|
from tools.stats.import_test_stats import (
|
||||||
|
|
|
||||||
|
|
@ -12,7 +12,7 @@ from typing import Any
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
|
|
||||||
REPO_ROOT = Path(__file__).parent.parent.parent
|
REPO_ROOT = Path(__file__).parents[2]
|
||||||
CONFIG_YML = REPO_ROOT / ".circleci" / "config.yml"
|
CONFIG_YML = REPO_ROOT / ".circleci" / "config.yml"
|
||||||
WORKFLOWS_DIR = REPO_ROOT / ".github" / "workflows"
|
WORKFLOWS_DIR = REPO_ROOT / ".github" / "workflows"
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,7 @@ from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent
|
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||||
|
|
||||||
# These tests are slow enough that it's worth calculating whether the patch
|
# These tests are slow enough that it's worth calculating whether the patch
|
||||||
# touched any related files first. This list was manually generated, but for every
|
# touched any related files first. This list was manually generated, but for every
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,7 @@ from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent.parent
|
REPO_ROOT = Path(__file__).resolve().parents[3]
|
||||||
|
|
||||||
|
|
||||||
def gen_ci_artifact(included: list[Any], excluded: list[Any]) -> None:
|
def gen_ci_artifact(included: list[Any], excluded: list[Any]) -> None:
|
||||||
|
|
|
||||||
|
|
@ -17,7 +17,7 @@ from tools.testing.target_determination.heuristics.utils import (
|
||||||
from tools.testing.test_run import TestRun
|
from tools.testing.test_run import TestRun
|
||||||
|
|
||||||
|
|
||||||
REPO_ROOT = Path(__file__).parent.parent.parent.parent
|
REPO_ROOT = Path(__file__).parents[3]
|
||||||
|
|
||||||
keyword_synonyms: dict[str, list[str]] = {
|
keyword_synonyms: dict[str, list[str]] = {
|
||||||
"amp": ["mixed_precision"],
|
"amp": ["mixed_precision"],
|
||||||
|
|
|
||||||
|
|
@ -16,7 +16,7 @@ from tools.testing.target_determination.heuristics.utils import normalize_rating
|
||||||
from tools.testing.test_run import TestRun
|
from tools.testing.test_run import TestRun
|
||||||
|
|
||||||
|
|
||||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent.parent.parent
|
REPO_ROOT = Path(__file__).resolve().parents[4]
|
||||||
|
|
||||||
|
|
||||||
class LLM(HeuristicInterface):
|
class LLM(HeuristicInterface):
|
||||||
|
|
|
||||||
|
|
@ -20,7 +20,7 @@ from tools.testing.target_determination.heuristics.utils import (
|
||||||
from tools.testing.test_run import TestRun
|
from tools.testing.test_run import TestRun
|
||||||
|
|
||||||
|
|
||||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent.parent.parent
|
REPO_ROOT = Path(__file__).resolve().parents[4]
|
||||||
|
|
||||||
|
|
||||||
class PreviouslyFailedInPR(HeuristicInterface):
|
class PreviouslyFailedInPR(HeuristicInterface):
|
||||||
|
|
|
||||||
|
|
@ -15,7 +15,8 @@ from warnings import warn
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from tools.testing.test_run import TestRun
|
from tools.testing.test_run import TestRun
|
||||||
|
|
||||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent.parent.parent
|
|
||||||
|
REPO_ROOT = Path(__file__).resolve().parents[4]
|
||||||
|
|
||||||
|
|
||||||
def python_test_file_to_test_name(tests: set[str]) -> set[str]:
|
def python_test_file_to_test_name(tests: set[str]) -> set[str]:
|
||||||
|
|
|
||||||
|
|
@ -14,7 +14,7 @@ if TYPE_CHECKING:
|
||||||
from collections.abc import Sequence
|
from collections.abc import Sequence
|
||||||
|
|
||||||
|
|
||||||
REPO_ROOT = Path(__file__).resolve().parent.parent.parent
|
REPO_ROOT = Path(__file__).resolve().parents[2]
|
||||||
|
|
||||||
IS_MEM_LEAK_CHECK = os.getenv("PYTORCH_TEST_CUDA_MEM_LEAK_CHECK", "0") == "1"
|
IS_MEM_LEAK_CHECK = os.getenv("PYTORCH_TEST_CUDA_MEM_LEAK_CHECK", "0") == "1"
|
||||||
BUILD_ENVIRONMENT = os.getenv("BUILD_ENVIRONMENT", "")
|
BUILD_ENVIRONMENT = os.getenv("BUILD_ENVIRONMENT", "")
|
||||||
|
|
|
||||||
|
|
@ -5,6 +5,7 @@ import functools
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import warnings
|
import warnings
|
||||||
|
from pathlib import Path
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
from typing import Any, Callable, Dict
|
from typing import Any, Callable, Dict
|
||||||
|
|
||||||
|
|
@ -51,15 +52,13 @@ def _reload_python_module(key, path):
|
||||||
def _set_triton_ptxas_path() -> None:
|
def _set_triton_ptxas_path() -> None:
|
||||||
if os.environ.get("TRITON_PTXAS_PATH") is not None:
|
if os.environ.get("TRITON_PTXAS_PATH") is not None:
|
||||||
return
|
return
|
||||||
ptxas_path = os.path.abspath(
|
ptxas = Path(__file__).absolute().parents[1] / "bin" / "ptxas"
|
||||||
os.path.join(os.path.dirname(__file__), "..", "bin", "ptxas")
|
if not ptxas.exists():
|
||||||
)
|
|
||||||
if not os.path.exists(ptxas_path):
|
|
||||||
return
|
return
|
||||||
if os.path.isfile(ptxas_path) and os.access(ptxas_path, os.X_OK):
|
if ptxas.is_file() and os.access(ptxas, os.X_OK):
|
||||||
os.environ["TRITON_PTXAS_PATH"] = ptxas_path
|
os.environ["TRITON_PTXAS_PATH"] = str(ptxas)
|
||||||
else:
|
else:
|
||||||
warnings.warn(f"{ptxas_path} exists but is not an executable")
|
warnings.warn(f"{ptxas} exists but is not an executable")
|
||||||
|
|
||||||
|
|
||||||
def _worker_compile_triton(load_kernel: Callable[[], Any], extra_env: Dict[str, str]):
|
def _worker_compile_triton(load_kernel: Callable[[], Any], extra_env: Dict[str, str]):
|
||||||
|
|
|
||||||
|
|
@ -5008,7 +5008,7 @@ def find_library_location(lib_name: str) -> Path:
|
||||||
path = torch_root / 'lib' / lib_name
|
path = torch_root / 'lib' / lib_name
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
return path
|
return path
|
||||||
torch_root = Path(__file__).resolve().parent.parent.parent
|
torch_root = Path(__file__).resolve().parents[2]
|
||||||
return torch_root / 'build' / 'lib' / lib_name
|
return torch_root / 'build' / 'lib' / lib_name
|
||||||
|
|
||||||
def skip_but_pass_in_sandcastle(reason):
|
def skip_but_pass_in_sandcastle(reason):
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,12 @@
|
||||||
# mypy: ignore-errors
|
# mypy: ignore-errors
|
||||||
import os
|
|
||||||
import random
|
import random
|
||||||
import sys
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
|
sys.path.append(str(Path(__file__).absolute().parents[1]))
|
||||||
|
|
||||||
from benchmark_runner import BenchmarkRunner # type: ignore[import-not-found]
|
from benchmark_runner import BenchmarkRunner # type: ignore[import-not-found]
|
||||||
from benchmark_utils import ( # type: ignore[import-not-found]
|
from benchmark_utils import ( # type: ignore[import-not-found]
|
||||||
fits_in_memory,
|
fits_in_memory,
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,12 @@
|
||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
import unittest
|
import unittest
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
||||||
|
|
||||||
from expecttest import TestCase
|
from expecttest import TestCase
|
||||||
|
|
||||||
|
|
||||||
|
sys.path.append(str(Path(__file__).absolute().parents[1]))
|
||||||
|
|
||||||
from test_utils import read_file_to_string, run_bash # type: ignore[import-not-found]
|
from test_utils import read_file_to_string, run_bash # type: ignore[import-not-found]
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,9 @@
|
||||||
# mypy: ignore-errors
|
# mypy: ignore-errors
|
||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.append(str(Path(__file__).absolute().parents[1]))
|
||||||
|
|
||||||
from train_decision import AHTrainDecisionTree
|
from train_decision import AHTrainDecisionTree
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,13 +1,12 @@
|
||||||
import itertools
|
import itertools
|
||||||
import os
|
|
||||||
import random
|
import random
|
||||||
import sys
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
|
sys.path.append(str(Path(__file__).absolute().parents[1]))
|
||||||
|
|
||||||
from benchmark_runner import BenchmarkRunner # type: ignore[import-not-found]
|
from benchmark_runner import BenchmarkRunner # type: ignore[import-not-found]
|
||||||
from benchmark_utils import ( # type: ignore[import-not-found]
|
from benchmark_utils import ( # type: ignore[import-not-found]
|
||||||
fits_in_memory,
|
fits_in_memory,
|
||||||
|
|
|
||||||
|
|
@ -1,11 +1,11 @@
|
||||||
# mypy: ignore-errors
|
# mypy: ignore-errors
|
||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
import pandas as pd # type: ignore[import-untyped]
|
import pandas as pd # type: ignore[import-untyped]
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.append(str(Path(__file__).absolute().parents[1]))
|
||||||
|
|
||||||
from train_decision import AHTrainDecisionTree
|
from train_decision import AHTrainDecisionTree
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,11 @@
|
||||||
import os
|
|
||||||
import random
|
import random
|
||||||
import sys
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
||||||
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
|
sys.path.append(str(Path(__file__).absolute().parents[1]))
|
||||||
|
|
||||||
from benchmark_runner import BenchmarkRunner # type: ignore[import-not-found]
|
from benchmark_runner import BenchmarkRunner # type: ignore[import-not-found]
|
||||||
from benchmark_utils import ( # type: ignore[import-not-found]
|
from benchmark_utils import ( # type: ignore[import-not-found]
|
||||||
fits_in_memory,
|
fits_in_memory,
|
||||||
|
|
|
||||||
|
|
@ -1,12 +1,12 @@
|
||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
import unittest
|
import unittest
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
|
||||||
|
|
||||||
from expecttest import TestCase
|
from expecttest import TestCase
|
||||||
|
|
||||||
|
|
||||||
|
sys.path.append(str(Path(__file__).absolute().parents[1]))
|
||||||
|
|
||||||
from test_utils import read_file_to_string, run_bash # type: ignore[import-not-found]
|
from test_utils import read_file_to_string, run_bash # type: ignore[import-not-found]
|
||||||
|
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,9 @@
|
||||||
# mypy: ignore-errors
|
# mypy: ignore-errors
|
||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.append(str(Path(__file__).absolute().parents[1]))
|
||||||
|
|
||||||
from train_decision import AHTrainDecisionTree
|
from train_decision import AHTrainDecisionTree
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,9 @@
|
||||||
# mypy: ignore-errors
|
# mypy: ignore-errors
|
||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.append(str(Path(__file__).absolute().parents[1]))
|
||||||
|
|
||||||
from train_regression import AHTrainRegressionTree
|
from train_regression import AHTrainRegressionTree
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,9 @@
|
||||||
# mypy: ignore-errors
|
# mypy: ignore-errors
|
||||||
import os
|
|
||||||
import sys
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
sys.path.append(str(Path(__file__).absolute().parents[1]))
|
||||||
|
|
||||||
from train_regression import AHTrainRegressionTree
|
from train_regression import AHTrainRegressionTree
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -533,7 +533,7 @@ def run(
|
||||||
source_yaml: str, output_dir: str, dry_run: bool, impl_path: str | None = None
|
source_yaml: str, output_dir: str, dry_run: bool, impl_path: str | None = None
|
||||||
) -> None:
|
) -> None:
|
||||||
# Assumes that this file lives at PYTORCH_ROOT/torchgen/gen_backend_stubs.py
|
# Assumes that this file lives at PYTORCH_ROOT/torchgen/gen_backend_stubs.py
|
||||||
pytorch_root = Path(__file__).parent.parent.absolute()
|
pytorch_root = Path(__file__).absolute().parent.parent
|
||||||
template_dir = os.path.join(pytorch_root, "aten/src/ATen/templates")
|
template_dir = os.path.join(pytorch_root, "aten/src/ATen/templates")
|
||||||
|
|
||||||
def make_file_manager(install_dir: str) -> FileManager:
|
def make_file_manager(install_dir: str) -> FileManager:
|
||||||
|
|
|
||||||
|
|
@ -256,7 +256,7 @@ def main() -> None:
|
||||||
options = parser.parse_args()
|
options = parser.parse_args()
|
||||||
|
|
||||||
# Assumes that this file lives at PYTORCH_ROOT/torchgen/gen_backend_stubs.py
|
# Assumes that this file lives at PYTORCH_ROOT/torchgen/gen_backend_stubs.py
|
||||||
torch_root = Path(__file__).parent.parent.parent.absolute()
|
torch_root = Path(__file__).absolute().parents[2]
|
||||||
aten_path = str(torch_root / "aten" / "src" / "ATen")
|
aten_path = str(torch_root / "aten" / "src" / "ATen")
|
||||||
lazy_ir_generator: type[GenLazyIR] = default_args.lazy_ir_generator
|
lazy_ir_generator: type[GenLazyIR] = default_args.lazy_ir_generator
|
||||||
if options.gen_ts_lowerings:
|
if options.gen_ts_lowerings:
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue
Block a user