mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-06 12:20:52 +01:00
[BE] document distributed apis (#165194)
This PR documents some `torch.distributed.distributed_c10d` APIs. Below are some screenshots of the rendered docs. <img width="909" height="527" alt="Screenshot 2025-10-10 at 10 18 40 PM" src="https://github.com/user-attachments/assets/555ae886-bead-47f3-8c67-9bc91c14bd11" /> <img width="885" height="548" alt="Screenshot 2025-10-10 at 10 18 47 PM" src="https://github.com/user-attachments/assets/1d6f7af1-db28-40f9-927e-5c47668a1a88" /> Pull Request resolved: https://github.com/pytorch/pytorch/pull/165194 Approved by: https://github.com/janeyx99
This commit is contained in:
parent
a71ca4dcb9
commit
fa95882093
|
|
@ -534,42 +534,6 @@ coverage_ignore_functions = [
|
||||||
# torch.distributed.checkpoint.utils
|
# torch.distributed.checkpoint.utils
|
||||||
"find_state_dict_object",
|
"find_state_dict_object",
|
||||||
"find_tensor_shard",
|
"find_tensor_shard",
|
||||||
# torch.distributed.collective_utils
|
|
||||||
"all_gather",
|
|
||||||
"all_gather_object_enforce_type",
|
|
||||||
"broadcast",
|
|
||||||
# torch.distributed.distributed_c10d
|
|
||||||
"all_gather",
|
|
||||||
"all_gather_coalesced",
|
|
||||||
"all_gather_into_tensor",
|
|
||||||
"all_gather_object",
|
|
||||||
"all_reduce",
|
|
||||||
"all_reduce_coalesced",
|
|
||||||
"all_to_all",
|
|
||||||
"all_to_all_single",
|
|
||||||
"barrier",
|
|
||||||
"batch_isend_irecv",
|
|
||||||
"broadcast",
|
|
||||||
"broadcast_object_list",
|
|
||||||
"destroy_process_group",
|
|
||||||
"gather",
|
|
||||||
"gather_object",
|
|
||||||
"get_backend",
|
|
||||||
"get_backend_config",
|
|
||||||
"get_global_rank",
|
|
||||||
"get_group_rank",
|
|
||||||
"get_process_group_ranks",
|
|
||||||
"get_rank",
|
|
||||||
"get_world_size",
|
|
||||||
"init_process_group",
|
|
||||||
"irecv",
|
|
||||||
"is_backend_available",
|
|
||||||
"is_gloo_available",
|
|
||||||
"is_initialized",
|
|
||||||
"is_mpi_available",
|
|
||||||
"is_nccl_available",
|
|
||||||
"is_torchelastic_launched",
|
|
||||||
"is_ucc_available",
|
|
||||||
"isend",
|
"isend",
|
||||||
"monitored_barrier",
|
"monitored_barrier",
|
||||||
"new_group",
|
"new_group",
|
||||||
|
|
@ -643,15 +607,8 @@ coverage_ignore_functions = [
|
||||||
"transformer_auto_wrap_policy",
|
"transformer_auto_wrap_policy",
|
||||||
"wrap",
|
"wrap",
|
||||||
# torch.distributed.nn.functional
|
# torch.distributed.nn.functional
|
||||||
"all_gather",
|
|
||||||
"all_reduce",
|
|
||||||
"all_to_all",
|
"all_to_all",
|
||||||
"all_to_all_single",
|
"all_to_all_single",
|
||||||
"broadcast",
|
|
||||||
"gather",
|
|
||||||
"reduce",
|
|
||||||
"reduce_scatter",
|
|
||||||
"scatter",
|
|
||||||
# torch.distributed.nn.jit.instantiator
|
# torch.distributed.nn.jit.instantiator
|
||||||
"get_arg_return_types_from_interface",
|
"get_arg_return_types_from_interface",
|
||||||
"instantiate_non_scriptable_remote_module_template",
|
"instantiate_non_scriptable_remote_module_template",
|
||||||
|
|
|
||||||
|
|
@ -10,6 +10,7 @@ torch.cpu
|
||||||
current_device
|
current_device
|
||||||
current_stream
|
current_stream
|
||||||
is_available
|
is_available
|
||||||
|
is_initialized
|
||||||
synchronize
|
synchronize
|
||||||
stream
|
stream
|
||||||
set_device
|
set_device
|
||||||
|
|
|
||||||
|
|
@ -221,6 +221,16 @@ inconsistent 'UUID' assignment across ranks, and to prevent races during initial
|
||||||
|
|
||||||
```{eval-rst}
|
```{eval-rst}
|
||||||
.. autofunction:: torch.distributed.distributed_c10d.is_xccl_available
|
.. autofunction:: torch.distributed.distributed_c10d.is_xccl_available
|
||||||
|
.. autofunction:: torch.distributed.distributed_c10d.batch_isend_irecv
|
||||||
|
.. autofunction:: torch.distributed.distributed_c10d.destroy_process_group
|
||||||
|
.. autofunction:: torch.distributed.distributed_c10d.is_backend_available
|
||||||
|
.. autofunction:: torch.distributed.distributed_c10d.irecv
|
||||||
|
.. autofunction:: torch.distributed.distributed_c10d.is_gloo_available
|
||||||
|
.. autofunction:: torch.distributed.distributed_c10d.is_initialized
|
||||||
|
.. autofunction:: torch.distributed.distributed_c10d.is_mpi_available
|
||||||
|
.. autofunction:: torch.distributed.distributed_c10d.is_nccl_available
|
||||||
|
.. autofunction:: torch.distributed.distributed_c10d.is_torchelastic_launched
|
||||||
|
.. autofunction:: torch.distributed.distributed_c10d.is_ucc_available
|
||||||
```
|
```
|
||||||
|
|
||||||
```{eval-rst}
|
```{eval-rst}
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue
Block a user