mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-06 12:20:52 +01:00
Summary: * Deletes all weak script decorators / associated data structures / methods * In order to keep supporting the standard library in script, this enables recursive script on any function defined in `torch.nn` * Most changes in `torch/nn` are the result of `ag -Q "weak" torch/nn/ -l | xargs sed -i '/weak/d'`, only `rnn.py` needed manual editing to use the `ignore` and `export` to continue supporting the overloaded `forward` methods * `Sequential`/`ModuleList` no longer need to be added to constants since they are compiled on demand This should also fix https://github.com/pytorch/pytorch/issues/22212 Pull Request resolved: https://github.com/pytorch/pytorch/pull/22212 Differential Revision: D15988346 Pulled By: driazati fbshipit-source-id: af223e3ad0580be895377312949997a70e988e4f
44 lines
1.2 KiB
Python
44 lines
1.2 KiB
Python
from __future__ import absolute_import
|
|
from __future__ import division
|
|
from __future__ import print_function
|
|
from __future__ import unicode_literals
|
|
|
|
from .. import functional as F
|
|
from ...modules.module import Module
|
|
|
|
class ReLU(Module):
|
|
r"""Applies quantized rectified linear unit function element-wise:
|
|
|
|
:math:`\text{ReLU}(x)= \max(x_0, x)`, where :math:`x_0` is the zero point.
|
|
|
|
Please see https://pytorch.org/docs/stable/nn.html#torch.nn.ReLU
|
|
for more documentation on ReLU.
|
|
|
|
|
|
Args:
|
|
inplace: (Currently not supported) can optionally do the operation in-place.
|
|
|
|
Shape:
|
|
- Input: :math:`(N, *)` where `*` means, any number of additional
|
|
dimensions
|
|
- Output: :math:`(N, *)`, same shape as the input
|
|
|
|
Examples::
|
|
|
|
>>> m = nn.quantized.ReLU()
|
|
>>> input = torch.randn(2)
|
|
>>> input = torch.quantize_linear(input, 1.0, 0, dtype=torch.qint32)
|
|
>>> output = m(input)
|
|
"""
|
|
def __init__(self, inplace=False):
|
|
super(ReLU, self).__init__(inplace)
|
|
assert not inplace, 'torch.nn.quantized.ReLU does not support inplace'
|
|
|
|
|
|
def forward(self, input):
|
|
return F.relu(input)
|
|
|
|
@staticmethod
|
|
def from_float(mod):
|
|
return ReLU(mod.inplace)
|