mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-07 12:21:27 +01:00
Summary: I have no idea how to run distributed tests locally so I'll let CI do this. Hopefully everything still works with `IntEnum`. cc mcarilli Pull Request resolved: https://github.com/pytorch/pytorch/pull/11715 Reviewed By: pietern Differential Revision: D9889646 Pulled By: SsnL fbshipit-source-id: 1e2a487cb6fe0bd4cc67501c9d72a295c35693e2
18 lines
460 B
Python
18 lines
460 B
Python
import torch
|
|
|
|
|
|
def is_available():
|
|
return hasattr(torch._C, "_c10d_init")
|
|
|
|
|
|
if is_available() and not torch._C._c10d_init():
|
|
raise RuntimeError("Failed to initialize PyTorch distributed support")
|
|
|
|
|
|
if is_available():
|
|
from .distributed_c10d import *
|
|
# Variables prefixed with underscore are not auto imported
|
|
# See the comment in `distributed_c10d.py` above `_backend` on why we expose
|
|
# this.
|
|
from .distributed_c10d import _backend
|