mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-07 12:21:27 +01:00
Fix typing for setup_context in autograd (#101464)
The original only matches a tuple of length 1, but it's intended to match any length.
Also, it now aligns with the docstring at L320
d5cba0618a/torch/autograd/function.py (L320)
Pull Request resolved: https://github.com/pytorch/pytorch/pull/101464
Approved by: https://github.com/soulitzer, https://github.com/kit1980
This commit is contained in:
parent
eac5f2a8e4
commit
e7681b53e3
|
|
@ -339,7 +339,7 @@ class _SingleLevelFunction(_C._FunctionBase, FunctionCtx, _HookMixin, metaclass=
|
|||
" autograd.Function.")
|
||||
|
||||
@staticmethod
|
||||
def setup_context(ctx: Any, inputs: Tuple[Any], output: Any) -> Any:
|
||||
def setup_context(ctx: Any, inputs: Tuple[Any, ...], output: Any) -> Any:
|
||||
r"""There are two ways to define the forward pass of an autograd.Function.
|
||||
|
||||
Either:
|
||||
|
|
|
|||
Loading…
Reference in New Issue
Block a user