mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-08 07:39:33 +01:00
* initial AMSGrad * added test for amsgrad * added amsgrad to adam * fixed tests * added option to sparse adam * flake8
32 lines
705 B
Python
32 lines
705 B
Python
"""
|
|
:mod:`torch.optim` is a package implementing various optimization algorithms.
|
|
Most commonly used methods are already supported, and the interface is general
|
|
enough, so that more sophisticated ones can be also easily integrated in the
|
|
future.
|
|
"""
|
|
|
|
from .adadelta import Adadelta
|
|
from .adagrad import Adagrad
|
|
from .adam import Adam
|
|
from .sparse_adam import SparseAdam
|
|
from .adamax import Adamax
|
|
from .asgd import ASGD
|
|
from .sgd import SGD
|
|
from .rprop import Rprop
|
|
from .rmsprop import RMSprop
|
|
from .optimizer import Optimizer
|
|
from .lbfgs import LBFGS
|
|
from . import lr_scheduler
|
|
|
|
del adadelta
|
|
del adagrad
|
|
del adam
|
|
del sparse_adam
|
|
del adamax
|
|
del asgd
|
|
del sgd
|
|
del rprop
|
|
del rmsprop
|
|
del optimizer
|
|
del lbfgs
|