pytorch/torch/optim/swa_utils.pyi
Gal Rotem 11c6a98bca [torch] add use_buffers to swa_utils interface (#109078)
Summary: As title, this already exists in swa_utils.py

Differential Revision: D49155243

Pull Request resolved: https://github.com/pytorch/pytorch/pull/109078
Approved by: https://github.com/janeyx99
2023-09-19 21:30:59 +00:00

33 lines
830 B
Python

from typing import Any, Callable, Iterable, Union
from torch import device, Tensor
from torch.nn.modules import Module
from .lr_scheduler import _LRScheduler
from .optimizer import Optimizer
class AveragedModel(Module):
def __init__(
self,
model: Module,
device: Union[int, device] = ...,
avg_fn: Callable[[Tensor, Tensor, int], Tensor] = ...,
use_buffers: bool = ...,
) -> None: ...
def update_parameters(self, model: Module) -> None: ...
def update_bn(
loader: Iterable[Any],
model: Module,
device: Union[int, device] = ...,
) -> None: ...
class SWALR(_LRScheduler):
def __init__(
self,
optimizer: Optimizer,
swa_lr: float,
anneal_epochs: int,
anneal_strategy: str,
last_epoch: int = ...,
) -> None: ...