mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-07 12:21:27 +01:00
[dist_optim] add warning to distributed optimizer (#50630)
Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/50630 Add a warning log to distributed optimizer, to warn user the optimizer is created without TorchScript support. Test Plan: Imported from OSS Reviewed By: rohan-varma Differential Revision: D25932777 Pulled By: wanchaol fbshipit-source-id: 8db3b98bdd27fc04c5a3b8d910b028c0c37f138d
This commit is contained in:
parent
6dda0363bb
commit
3562ca2da2
|
|
@ -1,4 +1,5 @@
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
import logging
|
||||||
|
|
||||||
import torch.distributed.rpc as rpc
|
import torch.distributed.rpc as rpc
|
||||||
import torch.optim as optim
|
import torch.optim as optim
|
||||||
|
|
@ -18,6 +19,7 @@ import torch.distributed.autograd as dist_autograd
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from threading import Lock
|
from threading import Lock
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# XXX: we define a _ScriptModuleOptimizer here to explicitly
|
# XXX: we define a _ScriptModuleOptimizer here to explicitly
|
||||||
# compile the FunctionalOptimizer class into TorchScript
|
# compile the FunctionalOptimizer class into TorchScript
|
||||||
|
|
@ -207,6 +209,13 @@ class DistributedOptimizer:
|
||||||
if self.is_functional_optim:
|
if self.is_functional_optim:
|
||||||
optimizer_new_func = _new_script_local_optimizer
|
optimizer_new_func = _new_script_local_optimizer
|
||||||
else:
|
else:
|
||||||
|
logger.warn(
|
||||||
|
f"Creating the optimizer {optimizer_class} without TorchScript support, "
|
||||||
|
"this might result in slow computation time in multithreading environment"
|
||||||
|
"(i.e. Distributed Model Parallel training on CPU) due to the Python's "
|
||||||
|
"Global Interpreter Lock (GIL). Please file an issue if you need this "
|
||||||
|
"optimizer in TorchScript. "
|
||||||
|
)
|
||||||
optimizer_new_func = _new_local_optimizer
|
optimizer_new_func = _new_local_optimizer
|
||||||
|
|
||||||
remote_optim_futs = []
|
remote_optim_futs = []
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue
Block a user