[5/N] Apply ruff UP035 rule (#164423)

Continued code migration to enable ruff `UP035`. Most changes are about moving `Callable` from `typing` to `from collections.abc`.

Pull Request resolved: https://github.com/pytorch/pytorch/pull/164423
Approved by: https://github.com/ezyang
This commit is contained in:
Yuanyuan Chen 2025-10-02 07:31:11 +00:00 committed by PyTorch MergeBot
parent bcafea5c92
commit a43c4c3972
113 changed files with 243 additions and 179 deletions

View File

@ -1,6 +1,7 @@
# mypy: allow-untyped-defs
from collections.abc import Callable
from enum import Enum
from typing import Any, Callable
from typing import Any
import torch
from torch._C._profiler import (

View File

@ -1,4 +1,4 @@
from typing import Callable
from collections.abc import Callable
from torch import Tensor
from torch._dynamo.compiled_autograd import AutogradCompilerInstance

View File

@ -1,6 +1,6 @@
import enum
from typing import Any, Callable, Optional
from typing_extensions import TypeAlias
from collections.abc import Callable
from typing import Any, Optional, TypeAlias
import torch

View File

@ -1,9 +1,9 @@
# Defined in torch/csrc/monitor/python_init.cpp
import datetime
from collections.abc import Callable
from enum import Enum
from types import TracebackType
from typing import Callable
class Aggregation(Enum):
VALUE = ...

View File

@ -1,6 +1,5 @@
from enum import Enum
from typing import Literal
from typing_extensions import TypeAlias
from typing import Literal, TypeAlias
from torch._C import device, dtype, layout

View File

@ -1,10 +1,10 @@
# mypy: allow-untyped-defs
import inspect
from collections import defaultdict
from collections.abc import Sequence
from collections.abc import Callable, Sequence
from functools import lru_cache, partial, wraps
from itertools import chain
from typing import Callable, Optional, TYPE_CHECKING, TypeVar, Union
from typing import Optional, TYPE_CHECKING, TypeVar, Union
from typing_extensions import ParamSpec

View File

@ -5,12 +5,12 @@ import itertools
import numbers
import operator
import sys
from collections.abc import Iterable
from collections.abc import Callable, Iterable
from contextlib import nullcontext
from enum import Enum
from functools import partial, reduce
from itertools import chain, product
from typing import Any, Callable, cast, Optional, Union
from typing import Any, cast, Optional, Union
import torch
import torch._meta_registrations

View File

@ -1,7 +1,8 @@
# mypy: allow-untyped-decorators
# mypy: allow-untyped-defs
import inspect
from typing import Callable, Optional
from collections.abc import Callable
from typing import Optional
import torch
import torch._decomp

View File

@ -2,7 +2,7 @@
# mypy: allow-untyped-defs
import functools
from collections import defaultdict
from typing import Callable
from collections.abc import Callable
import torch
import torch._decomp as decomp

View File

@ -1,9 +1,9 @@
# mypy: allow-untyped-defs
import itertools
import unittest.mock
from collections.abc import Iterator
from collections.abc import Callable, Iterator
from contextlib import contextmanager
from typing import Callable, TypeVar, Union
from typing import TypeVar, Union
from typing_extensions import ParamSpec
import torch

View File

@ -16,7 +16,8 @@ from collections import OrderedDict
from contextlib import contextmanager
from functools import lru_cache
from typing import Any, Callable, Optional, TYPE_CHECKING, Union
from typing import Any, Optional, TYPE_CHECKING, Union
from collections.abc import Callable
from unittest.mock import patch
import torch

View File

@ -4,9 +4,9 @@ import logging
import operator
import typing
import warnings
from collections.abc import Sequence
from collections.abc import Callable, Sequence
from contextlib import contextmanager
from typing import Any, Callable, Optional, Union
from typing import Any, Optional, Union
import torch
import torch.export._trace

View File

@ -6,9 +6,9 @@ import inspect
import logging
import math
from collections import defaultdict
from collections.abc import Sequence
from collections.abc import Callable, Sequence
from contextlib import contextmanager
from typing import Any, Callable, Optional, TYPE_CHECKING, Union
from typing import Any, Optional, TYPE_CHECKING, Union
import torch
import torch.utils._pytree as pytree

View File

@ -2,8 +2,9 @@
import operator
import traceback
import typing
from collections.abc import Callable
from contextlib import nullcontext
from typing import Any, Callable, Optional, Union
from typing import Any, Optional, Union
import torch
from torch import fx

View File

@ -3,7 +3,7 @@ import math
import operator
import traceback
from functools import partial
from typing import Callable, NamedTuple
from typing import NamedTuple, TYPE_CHECKING
import sympy
@ -15,6 +15,10 @@ from torch.utils._sympy.numbers import int_oo
from torch.utils._sympy.value_ranges import ValueRanges
if TYPE_CHECKING:
from collections.abc import Callable
__all__ = ["InputDim"]

View File

@ -1,7 +1,8 @@
# mypy: allow-untyped-defs
import collections
from collections import defaultdict
from typing import Any, Callable, Optional
from collections.abc import Callable
from typing import Any, Optional
import torch
import torch.utils._pytree as pytree

View File

@ -4,7 +4,7 @@ from __future__ import annotations
import contextlib
import copy
import operator
from typing import Callable, Optional, TYPE_CHECKING
from typing import Optional, TYPE_CHECKING
import torch
@ -12,6 +12,8 @@ from ..utils import node_replace_, nodes_map
if TYPE_CHECKING:
from collections.abc import Callable
from torch._ops import HigherOrderOperator
from torch.export.graph_signature import ExportGraphSignature

View File

@ -14,11 +14,11 @@ import operator
import traceback
import typing
from collections import namedtuple, OrderedDict
from collections.abc import Iterable, Iterator, Sequence
from collections.abc import Callable, Iterable, Iterator, Sequence
from contextlib import contextmanager
from dataclasses import dataclass, field
from enum import Enum
from typing import Annotated, Any, Callable, cast, final, Optional, Union
from typing import Annotated, Any, cast, final, Optional, Union
import sympy

View File

@ -9,10 +9,10 @@ import math
import operator
import re
from collections import defaultdict
from collections.abc import Iterable
from collections.abc import Callable, Iterable
from contextlib import contextmanager
from inspect import ismethod, Parameter
from typing import Any, Callable, Optional, TYPE_CHECKING, Union
from typing import Any, Optional, TYPE_CHECKING, Union
import torch
from torch._guards import detect_fake_mode

View File

@ -1,6 +1,6 @@
import operator
from collections import deque
from typing import Callable
from collections.abc import Callable
import networkx as nx

View File

@ -16,9 +16,10 @@ import shutil
import time
import traceback
from abc import ABC, abstractmethod
from collections.abc import Callable
from copy import copy
from dataclasses import dataclass
from typing import Any, Callable, Generic, Optional, TYPE_CHECKING, TypeVar, Union
from typing import Any, Generic, Optional, TYPE_CHECKING, TypeVar, Union
from typing_extensions import override
import torch

View File

@ -11,7 +11,8 @@ a functionalized version of the graph under compilation.
import collections
import contextlib
import logging
from typing import Callable, Optional
from collections.abc import Callable
from typing import Optional
import torch
import torch.utils._pytree as pytree

View File

@ -12,9 +12,10 @@ It does so by:
"""
import warnings
from collections.abc import Callable
from contextlib import AbstractContextManager, contextmanager, ExitStack, nullcontext
from dataclasses import dataclass
from typing import Any, Callable, cast, Optional, TypeVar, Union
from typing import Any, cast, Optional, TypeVar, Union
from unittest.mock import patch
import torch

View File

@ -17,8 +17,9 @@ import operator
import time
import traceback
from collections import defaultdict
from collections.abc import Callable
from contextlib import nullcontext
from typing import Any, Callable, Optional, TYPE_CHECKING, Union
from typing import Any, Optional, TYPE_CHECKING, Union
if TYPE_CHECKING:

View File

@ -14,10 +14,11 @@ import copy
import functools
import itertools
import pprint
from collections.abc import Callable
from contextlib import AbstractContextManager, nullcontext
from dataclasses import dataclass, field
from functools import wraps
from typing import Any, Callable, Optional, TYPE_CHECKING, Union
from typing import Any, Optional, TYPE_CHECKING, Union
if TYPE_CHECKING:

View File

@ -11,16 +11,7 @@ import functools
import itertools
from dataclasses import dataclass, field
from enum import Enum
from typing import (
Any,
Callable,
NewType,
Optional,
Protocol,
TYPE_CHECKING,
TypeVar,
Union,
)
from typing import Any, NewType, Optional, Protocol, TYPE_CHECKING, TypeVar, Union
import torch
import torch.utils._pytree as pytree
@ -37,7 +28,7 @@ from .utils import strict_zip
if TYPE_CHECKING:
import contextlib
from collections.abc import Iterable, Sequence
from collections.abc import Callable, Iterable, Sequence
from torch._guards import Source
from torch._inductor.output_code import OutputCode

View File

@ -7,9 +7,8 @@ and this includes tensor subclasses that implement __torch_dispatch__.
import collections
import typing
from collections.abc import Iterable
from typing import Any, Callable, Optional, TypeVar, Union
from typing_extensions import TypeGuard
from collections.abc import Callable, Iterable
from typing import Any, Optional, TypeGuard, TypeVar, Union
import torch
import torch.utils._pytree as pytree

View File

@ -6,9 +6,10 @@ Contains various utils for AOTAutograd, including those for handling collections
import dataclasses
import operator
import warnings
from collections.abc import Callable
from contextlib import nullcontext
from functools import wraps
from typing import Any, Callable, Optional, TypeVar, Union
from typing import Any, Optional, TypeVar, Union
from typing_extensions import ParamSpec
import torch

View File

@ -2,9 +2,10 @@
import contextlib
import itertools
from collections.abc import Callable
from contextlib import nullcontext
from functools import wraps
from typing import Any, Callable, Optional
from typing import Any, Optional
from unittest.mock import patch
import torch

View File

@ -2,7 +2,7 @@
import operator
from typing import Callable
from collections.abc import Callable
import sympy

View File

@ -5,9 +5,10 @@ import logging
import os
import pickle
import random
from collections.abc import Callable
from contextlib import contextmanager
from functools import partial
from typing import Callable, Union
from typing import Union
import sympy

View File

@ -10,7 +10,8 @@ documentation.
import textwrap
import warnings
from typing import Any, Callable, Optional, Union
from collections.abc import Callable
from typing import Any, Optional, Union
import torch._functorch.apis as apis
import torch._functorch.eager_transforms as _impl

View File

@ -7,8 +7,9 @@
# LICENSE file in the root directory of this source tree.
import contextlib
from collections.abc import Callable
from functools import partial, wraps
from typing import Any, Callable, Optional, Union
from typing import Any, Optional, Union
import torch
import torch.autograd.forward_ad as fwAD

View File

@ -4,9 +4,9 @@ import copy
import math
import os
import sys
from collections.abc import Callable
from dataclasses import dataclass
from functools import partial, wraps
from typing import Callable
import torch
import torch.fx as fx

View File

@ -6,8 +6,8 @@
# LICENSE file in the root directory of this source tree.
import copy
from collections.abc import Iterable, Sequence
from typing import Any, Callable, NoReturn, Union
from collections.abc import Callable, Iterable, Sequence
from typing import Any, NoReturn, Union
import torch
import torch.nn as nn

View File

@ -11,8 +11,9 @@ import os
import os.path
import re
from collections import defaultdict
from collections.abc import Callable
from dataclasses import dataclass, replace
from typing import Any, Callable, Optional, TYPE_CHECKING, Union
from typing import Any, Optional, TYPE_CHECKING, Union
import torch
import torch._inductor.inductor_prims

View File

@ -9,8 +9,9 @@
import contextlib
import functools
import itertools
from collections.abc import Callable
from functools import partial
from typing import Any, Callable, Optional, Union
from typing import Any, Optional, Union
import torch
from torch import Tensor

View File

@ -1,7 +1,8 @@
# mypy: allow-untyped-defs
import functools
import itertools
from typing import Any, Callable
from collections.abc import Callable
from typing import Any
import torch
import torch._prims_common as utils

View File

@ -1,9 +1,9 @@
# mypy: allow-untyped-defs
import warnings
from abc import ABC, abstractmethod
from collections.abc import Sequence
from collections.abc import Callable, Sequence
from dataclasses import dataclass
from typing import Any, Callable, get_args, Optional, Union
from typing import Any, get_args, Optional, Union
import torch
import torch._library.utils as library_utils

View File

@ -4,7 +4,8 @@ import contextlib
import functools
import logging
import warnings
from typing import Any, Callable, Optional, Union
from collections.abc import Callable
from typing import Any, Optional, Union
import torch
import torch.utils._pytree as pytree

View File

@ -1,6 +1,6 @@
# mypy: allow-untyped-defs
from collections.abc import Callable
from dataclasses import dataclass
from typing import Callable
import torch
import torch.fx.node

View File

@ -1,6 +1,6 @@
import math
from collections.abc import Sequence
from typing import Any, Callable, Optional, Union
from collections.abc import Callable, Sequence
from typing import Any, Optional, Union
import torch
import torch.utils._pytree as pytree

View File

@ -1,6 +1,7 @@
# mypy: allow-untyped-decorators
# mypy: allow-untyped-defs
from typing import Any, Callable
from collections.abc import Callable
from typing import Any
from torch._higher_order_ops.base_hop import BaseHOP, FunctionWithNoFreeVars

View File

@ -3,7 +3,7 @@
import contextlib
from contextlib import nullcontext
from dataclasses import dataclass, field
from typing import Any, Callable, Optional, Union
from typing import Any, Optional, TYPE_CHECKING, Union
import torch
import torch.utils._pytree as pytree
@ -36,6 +36,10 @@ from torch.fx.graph_module import GraphModule
from torch.fx.passes.runtime_assert import insert_deferred_runtime_asserts
if TYPE_CHECKING:
from collections.abc import Callable
invoke_subgraph_counter = 0

View File

@ -6,9 +6,9 @@
# NOTE: this file may be removed once we move to a dynamo frontend
import functools
from collections.abc import Generator
from collections.abc import Callable, Generator
from contextlib import contextmanager
from typing import Any, Callable, Optional
from typing import Any, Optional
import torch
import torch.utils._pytree as pytree

View File

@ -1,6 +1,7 @@
# mypy: allow-untyped-defs
import functools
from typing import Callable, Union
from collections.abc import Callable
from typing import Union
from typing_extensions import TypeVarTuple
import torch

View File

@ -1,5 +1,6 @@
import logging
from typing import Any, Callable, Union
from collections.abc import Callable
from typing import Any, Union
import torch
from torch._higher_order_ops.utils import create_bw_fn, materialize_as_graph

View File

@ -3,7 +3,8 @@ import enum
import functools
import itertools
import logging
from typing import Any, Callable
from collections.abc import Callable
from typing import Any
import torch
import torch._prims_common as utils

View File

@ -1,5 +1,5 @@
# mypy: allow-untyped-defs
from typing import Any, Callable, Union
from typing import Any, TYPE_CHECKING, Union
import torch
import torch._subclasses.functional_tensor
@ -20,6 +20,10 @@ from torch.fx.experimental.proxy_tensor import (
from torch.utils._python_dispatch import _get_current_dispatch_mode
if TYPE_CHECKING:
from collections.abc import Callable
@exposed_in("torch")
def strict_mode(callable, operands):
from torch._dynamo.backends.debugging import (

View File

@ -8,8 +8,8 @@ import logging
import operator
import threading
from collections import defaultdict
from collections.abc import Sequence
from typing import Any, Callable, Optional, TYPE_CHECKING, Union
from collections.abc import Callable, Sequence
from typing import Any, Optional, TYPE_CHECKING, Union
from typing_extensions import Never
import sympy

View File

@ -1,10 +1,10 @@
# mypy: allow-untyped-defs
import contextlib
import functools
from collections.abc import Iterable, Sequence
from collections.abc import Callable, Iterable, Sequence
from contextlib import AbstractContextManager, contextmanager, ExitStack, nullcontext
from dataclasses import dataclass
from typing import Any, Callable, Optional, overload, TypeVar, Union
from typing import Any, Optional, overload, TypeVar, Union
import torch
import torch.fx.traceback as fx_traceback

View File

@ -1,7 +1,8 @@
# mypy: allow-untyped-defs
import contextlib
import functools
from typing import Any, Callable, Union
from collections.abc import Callable
from typing import Any, Union
import torch
import torch.utils._pytree as pytree

View File

@ -3,7 +3,8 @@ import copy
import dataclasses
import itertools
import os
from typing import Any, Callable
from collections.abc import Callable
from typing import Any
import torch
import torch._lazy as lazy

View File

@ -1,7 +1,8 @@
# mypy: allow-untyped-defs
import dataclasses
from collections.abc import Callable
from dataclasses import dataclass
from typing import Any, Callable, Optional, Protocol
from typing import Any, Optional, Protocol
from torch import _C, _ops, autograd, Tensor
from torch.utils import _pytree

View File

@ -3,9 +3,9 @@ import collections
import inspect
import logging
import weakref
from collections.abc import Iterable, Sequence
from collections.abc import Callable, Iterable, Sequence
from contextlib import contextmanager
from typing import Any, Callable, Optional, overload, Union
from typing import Any, Optional, overload, Union
import torch
from torch import _C, _ops, Tensor

View File

@ -1,7 +1,7 @@
# mypy: allow-untyped-defs
import contextlib
import functools
from typing import Callable
from collections.abc import Callable
from typing_extensions import deprecated
import torch

View File

@ -2,9 +2,9 @@ import contextlib
import io
import logging
import os
from collections.abc import Generator
from collections.abc import Callable, Generator
from dataclasses import dataclass
from typing import Any, Callable, Optional, Union
from typing import Any, Optional, Union
import torch
from torch._library.custom_ops import _maybe_get_opdef

View File

@ -1,4 +1,5 @@
from typing import Any, Callable, Optional
from collections.abc import Callable
from typing import Any, Optional
from .fake_impl import FakeImplHolder
from .utils import RegistrationHandle

View File

@ -2,8 +2,8 @@ import ast
import contextlib
import inspect
import threading
from collections.abc import Generator, Iterable
from typing import Any, Callable, Optional, Union
from collections.abc import Callable, Generator, Iterable
from typing import Any, Optional, Union
from torch.utils._exposed_in import exposed_in

View File

@ -2,8 +2,8 @@
import dataclasses
import inspect
import sys
from collections.abc import Iterable, Iterator
from typing import Any, Callable, Literal, Optional, overload, Union
from collections.abc import Callable, Iterable, Iterator
from typing import Any, Literal, Optional, overload, Union
import torch
import torch.utils._pytree as pytree

View File

@ -15,8 +15,9 @@ import tempfile
import time
import warnings
from collections import defaultdict
from collections.abc import Callable
from dataclasses import dataclass, field
from typing import Any, Callable, Generic, Optional, Union
from typing import Any, Generic, Optional, Union
from typing_extensions import ParamSpec
from weakref import WeakSet

View File

@ -1,5 +1,5 @@
from typing import Callable, Union
from typing_extensions import TypeAlias
from collections.abc import Callable
from typing import TypeAlias, Union
try:

View File

@ -1,9 +1,9 @@
# mypy: allow-untyped-defs
import operator
from collections.abc import Sequence
from collections.abc import Callable, Sequence
from enum import Enum
from functools import partial, reduce
from typing import Callable, Optional, Union
from typing import Optional, Union
import torch
import torch._prims_common as utils

View File

@ -2,12 +2,12 @@ from __future__ import annotations
import functools
from contextlib import nullcontext
from typing import Any, Callable, TYPE_CHECKING, TypeVar
from typing import Any, TYPE_CHECKING, TypeVar
from typing_extensions import ParamSpec
if TYPE_CHECKING:
from collections.abc import Sequence
from collections.abc import Callable, Sequence
import torch
import torch._decomp

View File

@ -1,4 +1,5 @@
from typing import Any, Callable, Optional, TypeVar
from collections.abc import Callable
from typing import Any, Optional, TypeVar
from typing_extensions import ParamSpec, TypeVarTuple, Unpack
from torch._prims.context import TorchRefsMode

View File

@ -4,22 +4,23 @@ from __future__ import annotations
import operator
import typing
import warnings
from collections.abc import Sequence
from collections.abc import Callable, Sequence
from contextlib import AbstractContextManager, nullcontext
from enum import Enum
from functools import reduce
from typing import (
Any,
Callable,
cast,
NamedTuple,
Optional,
overload,
TYPE_CHECKING,
TypeAlias,
TypeGuard,
TypeVar,
Union,
)
from typing_extensions import deprecated, TypeAlias, TypeGuard
from typing_extensions import deprecated
import torch
from torch import sym_float, sym_int, sym_max

View File

@ -2,10 +2,10 @@
import inspect
import types
import warnings
from collections.abc import Sequence
from collections.abc import Callable, Sequence
from functools import wraps
from types import GenericAlias
from typing import Callable, NamedTuple, Optional, overload, TypeVar, Union
from typing import NamedTuple, Optional, overload, TypeVar, Union
from typing_extensions import ParamSpec
import torch

View File

@ -7,10 +7,10 @@ import itertools
import math
import operator
import warnings
from collections.abc import Iterable, Sequence
from collections.abc import Callable, Iterable, Sequence
from enum import Enum
from functools import partial, reduce, singledispatch, wraps
from typing import Any, Callable, cast, Optional, overload, Union
from typing import Any, cast, Optional, overload, Union
import torch
import torch._prims as prims

View File

@ -1,9 +1,10 @@
# mypy: allow-untyped-decorators
# mypy: allow-untyped-defs
import math
from collections.abc import Callable
from functools import wraps
from typing import Callable, Optional, TypeVar, Union
from typing_extensions import Concatenate, ParamSpec
from typing import Concatenate, Optional, TypeVar, Union
from typing_extensions import ParamSpec
import torch
import torch._prims as prims

View File

@ -6,10 +6,10 @@ import os
import re
import subprocess
import time
from collections.abc import Sequence
from collections.abc import Callable, Sequence
from threading import Lock
from timeit import default_timer as timer
from typing import Any, Callable, Optional, TypeVar
from typing import Any, Optional, TypeVar
from typing_extensions import ParamSpec

View File

@ -5,8 +5,9 @@ import itertools
import math
import operator
import sys
from collections.abc import Callable
from functools import reduce
from typing import Callable, Optional, Union
from typing import Optional, Union
import torch
import torch._custom_op

View File

@ -15,8 +15,17 @@ import typing
import weakref
from collections import defaultdict
from dataclasses import dataclass
from typing import Any, Callable, cast, Literal, Optional, TYPE_CHECKING, TypeVar, Union
from typing_extensions import Self, TypeGuard
from typing import (
Any,
cast,
Literal,
Optional,
TYPE_CHECKING,
TypeGuard,
TypeVar,
Union,
)
from typing_extensions import Self
from weakref import ReferenceType
import torch
@ -53,7 +62,7 @@ from ._fake_tensor_utils import _CacheKeyState, _PySymInputStub, _SymIntOutputSt
if TYPE_CHECKING:
from collections.abc import Generator, Iterable, Mapping, Sequence
from collections.abc import Callable, Generator, Iterable, Mapping, Sequence
from types import TracebackType
from torch._guards import Source

View File

@ -2,7 +2,8 @@
import functools
import warnings
from typing import Any, Callable, Union
from collections.abc import Callable
from typing import Any, Union
import torch
import torch.utils._pytree as pytree

View File

@ -3,8 +3,9 @@ import contextlib
import warnings
import weakref
from abc import ABC, abstractmethod
from collections.abc import Callable
from contextlib import AbstractContextManager
from typing import Any, Callable, Optional, Union
from typing import Any, Optional, Union
import torch
import torch.utils._pytree as pytree

View File

@ -11,17 +11,17 @@ from contextlib import AbstractContextManager, contextmanager
from dataclasses import dataclass
from typing import (
Any,
Callable,
ClassVar,
Generic,
NewType,
Optional,
Protocol,
TYPE_CHECKING,
TypeGuard,
TypeVar,
Union,
)
from typing_extensions import override, TypedDict, TypeGuard, TypeIs, Unpack
from typing_extensions import override, TypedDict, TypeIs, Unpack
import torch
from torch._C._autograd import CreationMeta
@ -46,7 +46,7 @@ from torch.utils.weak import WeakIdKeyDictionary
if TYPE_CHECKING:
from collections.abc import Generator
from collections.abc import Callable, Generator
from torch._C._functorch import CInterpreter
from torch._guards import Source

View File

@ -1,6 +1,7 @@
# mypy: allow-untyped-defs
import io
from typing import Any, Callable, Optional, TYPE_CHECKING, TypeVar, Union
from collections.abc import Callable
from typing import Any, Optional, TYPE_CHECKING, TypeVar, Union
from typing_extensions import ParamSpec
import torch

View File

@ -1,6 +1,7 @@
# mypy: allow-untyped-defs
from typing import Any, Callable, Optional
from collections.abc import Callable
from typing import Any, Optional
r"""

View File

@ -1,8 +1,8 @@
# mypy: allow-untyped-defs
import math
import warnings
from collections.abc import Callable
from functools import total_ordering
from typing import Callable
import torch
from torch import inf, Tensor

View File

@ -1,6 +1,6 @@
from collections.abc import Sequence
from collections.abc import Callable, Sequence
from functools import update_wrapper
from typing import Any, Callable, Final, Generic, Optional, overload, TypeVar, Union
from typing import Any, Final, Generic, Optional, overload, TypeVar, Union
import torch
import torch.nn.functional as F

View File

@ -2,8 +2,8 @@ import logging
import os
import warnings
import zipfile
from collections.abc import Mapping
from typing import Any, Callable, Optional, Union
from collections.abc import Callable, Mapping
from typing import Any, Optional, Union
from typing_extensions import deprecated
import torch

View File

@ -5,10 +5,10 @@ import os
import re
import tempfile
import time
from collections.abc import Mapping
from collections.abc import Callable, Mapping
from dataclasses import dataclass
from enum import IntEnum
from typing import Any, Callable, Optional, Union
from typing import Any, Optional, Union
import torch
import torch._logging._internal

View File

@ -8,9 +8,9 @@ import re
import sys
import time
import warnings
from collections.abc import Callable
from contextlib import contextmanager, nullcontext
from typing import Any, Callable, Optional, TYPE_CHECKING, Union
from typing_extensions import TypeAlias
from typing import Any, Optional, TYPE_CHECKING, TypeAlias, Union
if TYPE_CHECKING:

View File

@ -1,4 +1,5 @@
from typing import Any, Callable, Optional
from collections.abc import Callable
from typing import Any, Optional
from torch.utils._pytree import Context, TreeSpec

View File

@ -1,5 +1,5 @@
# mypy: allow-untyped-defs
from typing import Callable
from collections.abc import Callable
import torch
from torch._export.utils import (

View File

@ -4,8 +4,9 @@ import inspect
import logging
import sys
from collections import defaultdict
from collections.abc import Callable
from enum import auto, Enum
from typing import Any, Callable, Optional, TYPE_CHECKING, Union
from typing import Any, Optional, TYPE_CHECKING, Union
import torch
from torch.utils._pytree import (

View File

@ -8,9 +8,9 @@ import operator
import types
import warnings
from collections import defaultdict
from collections.abc import Iterator
from collections.abc import Callable, Iterator
from contextlib import contextmanager
from typing import Any, Callable, final, NamedTuple, Optional, TYPE_CHECKING, Union
from typing import Any, final, NamedTuple, Optional, TYPE_CHECKING, Union
from torch._guards import tracing, TracingContext
from torch._higher_order_ops.utils import autograd_not_implemented

View File

@ -6,8 +6,7 @@ import os
import tempfile
import zipfile
from dataclasses import dataclass
from typing import Any, IO, Optional, TYPE_CHECKING, Union
from typing_extensions import TypeAlias
from typing import Any, IO, Optional, TYPE_CHECKING, TypeAlias, Union
import torch
import torch.utils._pytree as pytree

View File

@ -5,11 +5,12 @@ import logging
import operator
import re
from collections import defaultdict
from collections.abc import Callable
from contextlib import contextmanager
from copy import deepcopy
from dataclasses import dataclass
from enum import Enum
from typing import Any, Callable, cast, Optional, Union
from typing import Any, cast, Optional, Union
import torch
import torch.fx._pytree as fx_pytree

View File

@ -1,11 +1,15 @@
# mypy: allow-untyped-defs
from __future__ import annotations
from typing import Callable, cast, Generic, Optional, TypeVar, Union
from typing import cast, Generic, Optional, TYPE_CHECKING, TypeVar, Union
import torch
if TYPE_CHECKING:
from collections.abc import Callable
__all__ = ["Future", "collect_all", "wait_all"]

View File

@ -4,8 +4,8 @@ import ast
import dataclasses
import inspect
import os
from collections.abc import Callable
from functools import partial
from typing import Callable
from torch._jit_internal import FAKE_FILENAME_PREFIX, is_optional
from torch._sources import ParsedDef, SourceContext

View File

@ -6,7 +6,8 @@ from torch import Tensor
aten = torch.ops.aten
import inspect
import warnings
from typing import Callable, Optional, TypeVar
from collections.abc import Callable
from typing import Optional, TypeVar
from typing_extensions import ParamSpec
from torch.types import Number

View File

@ -14,7 +14,8 @@ import functools
import inspect
import pickle
import warnings
from typing import Any, Callable, Union
from collections.abc import Callable
from typing import Any, Union
from typing_extensions import deprecated
import torch

View File

@ -1,7 +1,8 @@
# mypy: allow-untyped-defs
# mypy: disable-error-code="type-arg"
from typing import Any, Callable, NamedTuple, overload, TypeVar
from typing_extensions import Never, TypeAlias
from collections.abc import Callable
from typing import Any, NamedTuple, overload, TypeAlias, TypeVar
from typing_extensions import Never
from _typeshed import Incomplete

View File

@ -16,8 +16,9 @@ import inspect
import os
import re
import warnings
from collections.abc import Callable
from enum import Enum
from typing import Any, Callable, Optional, TypeVar
from typing import Any, Optional, TypeVar
from typing_extensions import ParamSpec
import torch

View File

@ -1,7 +1,8 @@
# mypy: allow-untyped-defs
import warnings
from typing import Any, Callable, Optional, TYPE_CHECKING, TypeVar, Union
from typing_extensions import ParamSpec, TypeAlias
from collections.abc import Callable
from typing import Any, Optional, TYPE_CHECKING, TypeAlias, TypeVar, Union
from typing_extensions import ParamSpec
import torch
from torch import sym_float, Tensor

View File

@ -1,8 +1,9 @@
# mypy: allow-untyped-defs
# Copyright (c) Meta Platforms, Inc. and affiliates
from collections.abc import Callable
from functools import partial
from typing import Any, Callable, TYPE_CHECKING
from typing import Any, TYPE_CHECKING
import torch

View File

@ -5,7 +5,8 @@ This package enables an interface for accessing MTIA backend in python
import threading
import warnings
from typing import Any, Callable, Optional, Union
from collections.abc import Callable
from typing import Any, Optional, Union
import torch
from torch import device as _device, Tensor

View File

@ -1,12 +1,12 @@
import os
import traceback
from collections import defaultdict
from collections.abc import Iterable, Iterator
from collections.abc import Callable, Iterable, Iterator
from contextlib import contextmanager
from dataclasses import asdict, dataclass
from enum import Enum
from logging import getLogger
from typing import Callable, Optional, TypeVar
from typing import Optional, TypeVar
import torch
from torch._utils_internal import signpost_event

View File

@ -7,7 +7,7 @@ import dataclasses
import difflib
import io
import sys
from typing import Any, Callable, TYPE_CHECKING
from typing import Any, TYPE_CHECKING
import torch
import torch.fx
@ -15,6 +15,8 @@ from torch._subclasses.fake_tensor import unset_fake_temporarily
if TYPE_CHECKING:
from collections.abc import Callable
from torch._subclasses import fake_tensor

View File

@ -6,7 +6,7 @@ import abc
import dataclasses
import inspect
import logging
from typing import Any, Callable, TYPE_CHECKING
from typing import Any, TYPE_CHECKING
import torch
import torch._dispatch.python
@ -26,7 +26,7 @@ from torch.utils import _python_dispatch, _pytree
if TYPE_CHECKING:
from collections.abc import Mapping, Sequence
from collections.abc import Callable, Mapping, Sequence
from types import ModuleType
from torch._subclasses import fake_tensor

Some files were not shown because too many files have changed in this diff Show More