mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-07 00:21:07 +01:00
Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/37021 Test Plan: Imported from OSS Differential Revision: D21164318 Pulled By: mrshenli fbshipit-source-id: 08a446af342cbe54f3eb4994956ffa7ef4922bcf
29 lines
939 B
Python
29 lines
939 B
Python
from __future__ import absolute_import, division, print_function, unicode_literals
|
|
|
|
import torch
|
|
|
|
|
|
def is_available():
|
|
"""
|
|
Returns ``True`` if the distributed package is available. Otherwise,
|
|
``torch.distributed`` does not expose any other APIs. Currently,
|
|
``torch.distributed`` is available on Linux and MacOS. Set
|
|
``USE_DISTRIBUTED=1`` to enable it when building PyTorch from source.
|
|
Currently, the default value is ``USE_DISTRIBUTED=1`` for Linux and
|
|
``USE_DISTRIBUTED=0`` for MacOS.
|
|
"""
|
|
return hasattr(torch._C, "_c10d_init")
|
|
|
|
|
|
if is_available() and not torch._C._c10d_init():
|
|
raise RuntimeError("Failed to initialize torch.distributed")
|
|
|
|
|
|
if is_available():
|
|
from .distributed_c10d import *
|
|
# Variables prefixed with underscore are not auto imported
|
|
# See the comment in `distributed_c10d.py` above `_backend` on why we expose
|
|
# this.
|
|
|
|
from .distributed_c10d import _backend
|