mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-06 12:20:52 +01:00
Ports over the step closure functionality from PyTorch/XLA to Lazy Tensor Core: References:205ae574c0/torch_xla/core/xla_model.py (L852-L900)205ae574c0/torch_xla/utils/closures.py (L7-L83)CC: @wconstab @JackCaoG @Krovatkin Pull Request resolved: https://github.com/pytorch/pytorch/pull/84300 Approved by: https://github.com/JackCaoG, https://github.com/wconstab
26 lines
634 B
Python
26 lines
634 B
Python
import threading
|
|
from typing import Any, Dict
|
|
|
|
import torch._C._lazy
|
|
|
|
|
|
class DeviceContext:
|
|
_CONTEXTS: Dict[str, Any] = dict()
|
|
_CONTEXTS_LOCK = threading.Lock()
|
|
|
|
def __init__(self, device):
|
|
self.device = device
|
|
|
|
|
|
def get_device_context(device=None):
|
|
if device is None:
|
|
device = torch._C._lazy._get_default_device_type()
|
|
else:
|
|
device = str(device)
|
|
with DeviceContext._CONTEXTS_LOCK:
|
|
devctx = DeviceContext._CONTEXTS.get(device, None)
|
|
if devctx is None:
|
|
devctx = DeviceContext(device)
|
|
DeviceContext._CONTEXTS[device] = devctx
|
|
return devctx
|