| .. |
|
__init__.py
|
enable torch.optim.swa_utils.SWALR (#42574)
|
2020-08-05 12:37:45 -07:00 |
|
__init__.pyi
|
enable torch.optim.swa_utils.SWALR (#42574)
|
2020-08-05 12:37:45 -07:00 |
|
adadelta.py
|
End of the .data removal in torch/optim (#34211)
|
2020-03-09 06:40:39 -07:00 |
|
adadelta.pyi
|
|
|
|
adagrad.py
|
End of the .data removal in torch/optim (#34211)
|
2020-03-09 06:40:39 -07:00 |
|
adagrad.pyi
|
|
|
|
adam.py
|
Update Adam documentation (#41679)
|
2020-07-23 09:25:41 -07:00 |
|
adam.pyi
|
|
|
|
adamax.py
|
End of the .data removal in torch/optim (#34211)
|
2020-03-09 06:40:39 -07:00 |
|
adamax.pyi
|
|
|
|
adamw.py
|
Fix exception message of torch.optim.AdamW. (#36088)
|
2020-04-09 08:02:10 -07:00 |
|
adamw.pyi
|
|
|
|
asgd.py
|
Fix HTTP links in documentation to HTTPS (#40878)
|
2020-07-06 20:05:21 -07:00 |
|
asgd.pyi
|
|
|
|
lbfgs.py
|
Avoid zero division in _cubic_interpolate (#42093)
|
2020-07-28 08:32:00 -07:00 |
|
lbfgs.pyi
|
|
|
|
lr_scheduler.py
|
Fix "non-negative integer" error messages (#42734)
|
2020-08-10 19:39:37 -07:00 |
|
lr_scheduler.pyi
|
Fix type annotation for CosineAnnealingLR (#41866)
|
2020-07-23 15:56:50 -07:00 |
|
optimizer.py
|
grad detach_ only when it has grad_fn in zero_grad call (#41283)
|
2020-07-29 11:40:13 -07:00 |
|
optimizer.pyi
|
Fix minor issue in type stub for Optimizer (#38067)
|
2020-05-07 20:11:40 -07:00 |
|
rmsprop.py
|
Fix HTTP links in documentation to HTTPS (#40878)
|
2020-07-06 20:05:21 -07:00 |
|
rmsprop.pyi
|
|
|
|
rprop.py
|
End of the .data removal in torch/optim (#34211)
|
2020-03-09 06:40:39 -07:00 |
|
rprop.pyi
|
|
|
|
sgd.py
|
End of the .data removal in torch/optim (#34211)
|
2020-03-09 06:40:39 -07:00 |
|
sgd.pyi
|
|
|
|
sparse_adam.py
|
End of the .data removal in torch/optim (#34211)
|
2020-03-09 06:40:39 -07:00 |
|
sparse_adam.pyi
|
|
|
|
swa_utils.py
|
typo fixes (#41632)
|
2020-07-20 07:23:00 -07:00 |
|
swa_utils.pyi
|
Add SWA to PyTorch mainline (#35032)
|
2020-04-27 07:42:19 -07:00 |