mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-08 07:39:33 +01:00
Summary: Closes https://github.com/pytorch/pytorch/issues/18724 Pull Request resolved: https://github.com/pytorch/pytorch/pull/19089 Differential Revision: D16073654 Pulled By: ezyang fbshipit-source-id: 5642179651ce45ab7c5a46cc1fcc4fd6b37fa71c
59 lines
1.4 KiB
Python
59 lines
1.4 KiB
Python
from .module import Module
|
|
from typing import Any, Union, List
|
|
from ... import Tensor, Size
|
|
from .. import Parameter
|
|
|
|
|
|
class LocalResponseNorm(Module):
|
|
size: int = ...
|
|
alpha: float = ...
|
|
beta: float = ...
|
|
k: float = ...
|
|
|
|
def __init__(self, size: int, alpha: float = ..., beta: float = ..., k: float = ...) -> None: ...
|
|
|
|
def forward(self, input: Tensor) -> Tensor: ...
|
|
|
|
|
|
class CrossMapLRN2d(Module):
|
|
size: int = ...
|
|
alpha: float = ...
|
|
beta: float = ...
|
|
k: float = ...
|
|
|
|
def __init__(self, size: int, alpha: float = ..., beta: float = ..., k: float = ...) -> None: ...
|
|
|
|
def forward(self, input: Tensor) -> Tensor: ...
|
|
|
|
|
|
_shape_t = Union[int, List[int], Size]
|
|
|
|
|
|
class LayerNorm(Module):
|
|
normalized_shape: _shape_t = ...
|
|
eps: float = ...
|
|
elementwise_affine: bool = ...
|
|
weight: Parameter = ...
|
|
bias: Parameter = ...
|
|
|
|
def __init__(self, normalized_shape: _shape_t, eps: float = ..., elementwise_affine: bool = ...) -> None: ...
|
|
|
|
def reset_parameters(self) -> None: ...
|
|
|
|
def forward(self, input: Tensor) -> Tensor: ...
|
|
|
|
|
|
class GroupNorm(Module):
|
|
num_groups: int = ...
|
|
num_channels: int = ...
|
|
eps: float = ...
|
|
affine: bool = ...
|
|
weight: Parameter = ...
|
|
bias: Parameter = ...
|
|
|
|
def __init__(self, num_groups: int, num_channels: int, eps: float = ..., affine: bool = ...) -> None: ...
|
|
|
|
def reset_parameters(self) -> None: ...
|
|
|
|
def forward(self, input: Tensor) -> Tensor: ...
|