mirror of
https://github.com/zebrajr/pytorch.git
synced 2025-12-07 00:21:07 +01:00
Changes: - #95200 1. Recognize `.py.in` and `.pyi.in` files as Python in VS Code for a better development experience. 2. Fix deep setting merge in `tools/vscode_settings.py`. - #95267 3. Use `Namedtuple` rather than `namedtuple + __annotations__` for `torch.nn.utils.rnn.PackedSequence_`: `namedtuple + __annotations__`: ```python PackedSequence_ = namedtuple('PackedSequence_', ['data', 'batch_sizes', 'sorted_indices', 'unsorted_indices']) # type annotation for PackedSequence_ to make it compatible with TorchScript PackedSequence_.__annotations__ = {'data': torch.Tensor, 'batch_sizes': torch.Tensor, 'sorted_indices': Optional[torch.Tensor], 'unsorted_indices': Optional[torch.Tensor]} ``` `Namedtuple`: Python 3.6+ ```python class PackedSequence_(NamedTuple): data: torch.Tensor batch_sizes: torch.Tensor sorted_indices: Optional[torch.Tensor] unsorted_indices: Optional[torch.Tensor] ``` - => this PR: #95268 4. Sort import statements and remove unnecessary imports in `.pyi`, `.pyi.in` files. 5. Format `.pyi`, `.pyi.in` files and remove unnecessary ellipsis `...` in type stubs. Pull Request resolved: https://github.com/pytorch/pytorch/pull/95268 Approved by: https://github.com/huydhn
16 lines
583 B
Python
16 lines
583 B
Python
from . import lr_scheduler as lr_scheduler, swa_utils as swa_utils
|
|
from .adadelta import Adadelta as Adadelta
|
|
from .adagrad import Adagrad as Adagrad
|
|
from .adam import Adam as Adam
|
|
from .adamax import Adamax as Adamax
|
|
from .adamw import AdamW as AdamW
|
|
from .asgd import ASGD as ASGD
|
|
from .lbfgs import LBFGS as LBFGS
|
|
from .nadam import NAdam as NAdam
|
|
from .optimizer import Optimizer as Optimizer
|
|
from .radam import RAdam as RAdam
|
|
from .rmsprop import RMSprop as RMSprop
|
|
from .rprop import Rprop as Rprop
|
|
from .sgd import SGD as SGD
|
|
from .sparse_adam import SparseAdam as SparseAdam
|