mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-07 12:21:27 +01:00
Fixes https://github.com/pytorch/pytorch/issues/114089 Set the lr to 1e-3 in SGD to increase the consistency of input signature of optimizers. @janeyx99 This should be the redacted PR #114434 , sincerely. Pull Request resolved: https://github.com/pytorch/pytorch/pull/114467 Approved by: https://github.com/janeyx99
13 lines
294 B
Python
13 lines
294 B
Python
from .optimizer import Optimizer, ParamsT
|
|
|
|
class SGD(Optimizer):
|
|
def __init__(
|
|
self,
|
|
params: ParamsT,
|
|
lr: float = ...,
|
|
momentum: float = ...,
|
|
dampening: float = ...,
|
|
weight_decay: float = ...,
|
|
nesterov: bool = ...,
|
|
) -> None: ...
|