pytorch/torch/distributed/_checkpointable.py
PyTorch MergeBot 6374332d33 Revert "PEP585 update - torch/distributed (#145164)"
This reverts commit 6cb186e279.

Reverted https://github.com/pytorch/pytorch/pull/145164 on behalf of https://github.com/huydhn due to Sorry for reverting your change but it is failing an inductor test ([comment](https://github.com/pytorch/pytorch/pull/145164#issuecomment-2602875679))
2025-01-20 16:46:46 +00:00

38 lines
1.3 KiB
Python

# Copyright (c) Meta Platforms, Inc. and affiliates
from typing import List, Protocol, runtime_checkable
import torch
@runtime_checkable
class _Checkpointable(Protocol): # noqa: PYI046
"""
Interface for checkpointable objects.
Implemented as a protocol, implicit subtyping is supported so subclasses do not need to inherit this explicitly.
This is to allow arbitrary objects/tensor subclasses to hook into DCP seamlessly through implementing the interface.
"""
def __create_write_items__(self, fqn: str, object: object) -> List[object]:
"""
Return a list of WriteItems based on object's contents.
"""
raise NotImplementedError(
"_Checkpointable._create_write_items is not implemented"
)
def __create_chunk_list__(self) -> List[object]:
"""
Return a list of `ChunkStorageMetadata` based on object's contents.
"""
raise NotImplementedError(
"_Checkpointable._create_chunk_list is not implemented"
)
def __get_tensor_shard__(self, index: int) -> torch.Tensor:
"""
Return a 'torch.Tensor' shard based on 'MetadataIndex'.
"""
raise NotImplementedError(
"_Checkpointable._get_tensor_shard is not implemented"
)