mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-07 00:21:07 +01:00
Summary: As title, this already exists in swa_utils.py Differential Revision: D49155243 Pull Request resolved: https://github.com/pytorch/pytorch/pull/109078 Approved by: https://github.com/janeyx99
33 lines
830 B
Python
33 lines
830 B
Python
from typing import Any, Callable, Iterable, Union
|
|
|
|
from torch import device, Tensor
|
|
from torch.nn.modules import Module
|
|
from .lr_scheduler import _LRScheduler
|
|
from .optimizer import Optimizer
|
|
|
|
class AveragedModel(Module):
|
|
def __init__(
|
|
self,
|
|
model: Module,
|
|
device: Union[int, device] = ...,
|
|
avg_fn: Callable[[Tensor, Tensor, int], Tensor] = ...,
|
|
use_buffers: bool = ...,
|
|
) -> None: ...
|
|
def update_parameters(self, model: Module) -> None: ...
|
|
|
|
def update_bn(
|
|
loader: Iterable[Any],
|
|
model: Module,
|
|
device: Union[int, device] = ...,
|
|
) -> None: ...
|
|
|
|
class SWALR(_LRScheduler):
|
|
def __init__(
|
|
self,
|
|
optimizer: Optimizer,
|
|
swa_lr: float,
|
|
anneal_epochs: int,
|
|
anneal_strategy: str,
|
|
last_epoch: int = ...,
|
|
) -> None: ...
|