mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-07 12:21:27 +01:00
Summary: Fixed a XLC build failure by generating an always-return-false
default CanBeReused method.
This reverts commit 3cade9d454.
Pull Request resolved: https://github.com/pytorch/pytorch/pull/77513
Approved by: https://github.com/alanwaketan
21 lines
878 B
Python
21 lines
878 B
Python
from typing import List
|
|
from torch import Tensor
|
|
|
|
#defined in torch/csrc/lazy/python/init.cpp
|
|
def _mark_step(device: str, devices: List[str], wait: bool): ...
|
|
def _wait_device_ops(devices: List[str]): ...
|
|
def _reset_metrics(): ...
|
|
def _counter_names() -> List[str]: ...
|
|
def _counter_value(name: str) -> int: ...
|
|
def _get_graph_hash(tensors: List[Tensor]) -> str: ...
|
|
def _sync_multi(tensors: List[Tensor], devices: List[str], wait: bool = True, sync_ltc_data: bool = True): ...
|
|
def _get_tensor_id(tensor: Tensor) -> int: ...
|
|
def _get_tensors_text(tensors: List[Tensor]) -> str: ...
|
|
def _get_tensors_dot(tensors: List[Tensor]) -> str: ...
|
|
def _get_tensors_backend(tensors: List[Tensor]) -> str: ...
|
|
def _get_force_fallback() -> str: ...
|
|
def _set_force_fallback(newval: str): ...
|
|
def _clear_ir_cache(): ...
|
|
def _dump_ir_cache(filename: str): ...
|
|
def _set_reuse_ir(val: bool): ...
|