mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-07 00:21:07 +01:00
Summary: Fix https://github.com/pytorch/pytorch/issues/29712 #29668 , add arg checking, doc, and support for reflection and replication padding modes. Pull Request resolved: https://github.com/pytorch/pytorch/pull/31784 Differential Revision: D19301974 Pulled By: ezyang fbshipit-source-id: a0ed4815c0c22e416b16e256bba04324e376b2f8
33 lines
887 B
Python
33 lines
887 B
Python
from torch._six import container_abcs
|
|
from itertools import repeat
|
|
|
|
|
|
def _ntuple(n):
|
|
def parse(x):
|
|
if isinstance(x, container_abcs.Iterable):
|
|
return x
|
|
return tuple(repeat(x, n))
|
|
return parse
|
|
|
|
_single = _ntuple(1)
|
|
_pair = _ntuple(2)
|
|
_triple = _ntuple(3)
|
|
_quadruple = _ntuple(4)
|
|
|
|
|
|
def _repeat_tuple(t, n):
|
|
r"""Repeat each element of `t` for `n` times.
|
|
|
|
This can be used to translate padding arg used by Conv and Pooling modules
|
|
to the ones used by `F.pad`.
|
|
"""
|
|
return tuple(x for x in t for _ in range(n))
|
|
|
|
|
|
def _list_with_default(out_size, defaults):
|
|
if isinstance(out_size, int):
|
|
return out_size
|
|
if len(defaults) <= len(out_size):
|
|
raise ValueError('Input dimension should be at least {}'.format(len(out_size) + 1))
|
|
return [v if v is not None else d for v, d in zip(out_size, defaults[-len(out_size):])]
|