pytorch/torch/_C/_distributed_autograd.pyi
Xu Zhao fe77ded48a Add Python declaration of torch._C and torch._C._autograd modules. (#46622)
Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/46622

Test Plan: Imported from OSS

Reviewed By: malfet

Differential Revision: D24761503

Pulled By: xuzhao9

fbshipit-source-id: c7ff9a9e46480a83bf6961e09972b5d20bdeb67b
2020-11-06 01:25:47 -08:00

26 lines
908 B
Python

import torch
from typing import Dict, List, Set, Any
# This module is defined in torch/csrc/distributed/autograd/init.cpp
class DistAutogradContext:
def _context_id(self) -> int: ...
def _recv_functions(self) -> Dict[int, Any]: ...
def _send_functions(self) -> Dict[int, Any]: ...
def _known_worker_ids(self) -> Set[int]: ...
def _new_context() -> DistAutogradContext: ...
def _release_context(context_id: int) -> None: ...
def _get_max_id() -> int: ...
def _is_valid_context(worker_id: int) -> bool: ...
def _retrieve_context(context_id: int) -> DistAutogradContext: ...
def _current_context() -> DistAutogradContext: ...
def _init(worker_id: int) -> None: ...
def _get_debug_info() -> Dict[str, str]: ...
def backward(
context_id: int,
roots: List[torch.Tensor],
retain_graph = False
) -> None: ...
def get_gradients(context_id: int) -> Dict[torch.Tensor, torch.Tensor]: ...