mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-06 12:20:52 +01:00
Avoid Host & Device Sync In LR Scheduler (#133663)
Fixes #133662. Pull Request resolved: https://github.com/pytorch/pytorch/pull/133663 Approved by: https://github.com/janeyx99, https://github.com/eqy Co-authored-by: Jane (Yuan) Xu <31798555+janeyx99@users.noreply.github.com>
This commit is contained in:
parent
e847b6bb9b
commit
de06345e9b
|
|
@ -247,8 +247,7 @@ class LRScheduler:
|
|||
for i, data in enumerate(zip(self.optimizer.param_groups, values)):
|
||||
param_group, lr = data
|
||||
if isinstance(param_group["lr"], Tensor):
|
||||
lr_val = lr.item() if isinstance(lr, Tensor) else lr # type: ignore[attr-defined]
|
||||
param_group["lr"].fill_(lr_val)
|
||||
param_group["lr"].fill_(lr)
|
||||
else:
|
||||
param_group["lr"] = lr
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user