[BE][Easy][18/19] enforce style for empty lines in import segments in torch/d*/ (#129770)

See https://github.com/pytorch/pytorch/pull/129751#issue-2380881501. Most changes are auto-generated by linter.

You can review these PRs via:

```bash
git diff --ignore-all-space --ignore-blank-lines HEAD~1
```

Pull Request resolved: https://github.com/pytorch/pytorch/pull/129770
Approved by: https://github.com/wconstab
This commit is contained in:
Xuehai Pan 2024-07-31 20:34:38 +08:00 committed by PyTorch MergeBot
parent bc7ed1fbdc
commit b25ef91bf1
58 changed files with 56 additions and 18 deletions

View File

@ -54,7 +54,6 @@ ISORT_SKIPLIST = re.compile(
# torch/[a-c]*/**
"torch/[a-c]*/**",
# torch/d*/**
"torch/d*/**",
# torch/[e-n]*/**
"torch/[e-n]*/**",
# torch/[o-z]*/**

View File

@ -2,7 +2,6 @@
# mypy: allow-untyped-defs
import functools
import logging
from typing import (
Any,
Callable,

View File

@ -23,6 +23,7 @@ import torch.distributed as dist
import torch.nn.functional as F
from torch.distributed._functional_collectives import AsyncCollectiveTensor
if dist.is_available() or TYPE_CHECKING:
from torch.distributed import distributed_c10d
from torch.distributed._shard.sharded_tensor import ShardedTensor

View File

@ -1,7 +1,6 @@
# mypy: allow-untyped-decorators
import socket
import uuid
from contextlib import contextmanager
from datetime import timedelta
from functools import partial
@ -12,6 +11,7 @@ import torch.distributed._functional_collectives as funcol
import torch.distributed.distributed_c10d as c10d
from torch._C._distributed_c10d import _SymmetricMemory, Work as _Work
_group_name_to_store: Dict[str, c10d.Store] = {}

View File

@ -20,6 +20,7 @@ from torch.distributed.checkpoint.planner import (
WriteItemType,
)
aten = (
torch.ops.aten
) # pyre-ignore[5]: Globally accessible variable `aten` has no type specified.

View File

@ -3,18 +3,15 @@ import copy
import json
import re
import weakref
from collections import defaultdict
from typing import Any, Dict
import torch
import torch.nn
from torch._guards import detect_fake_mode
from torch.autograd.graph import register_multi_grad_hook
from torch.distributed._tensor.api import DTensor
from torch.distributed._tools.mod_tracker import ModTracker
from torch.nn.modules.module import (
register_module_forward_hook,
register_module_forward_pre_hook,
@ -23,10 +20,9 @@ from torch.nn.modules.module import (
from torch.utils._python_dispatch import TorchDispatchMode
from torch.utils._pytree import tree_flatten
funcol_native = torch.ops._c10d_functional
funcol_py = torch.ops.c10d_functional
from torch._guards import detect_fake_mode
funcol_autograd = torch.ops._c10d_functional_autograd
c10d_ops = torch.ops.c10d

View File

@ -1,14 +1,11 @@
import os
from typing import Callable, Dict, Union
import torch
import torch.nn as nn
from torch.distributed._tensor import DeviceMesh
from torch.distributed._tensor.debug import CommDebugMode
from torch.distributed._tensor.examples.comm_mode_features_example_argparser import args
from torch.distributed.tensor.parallel import (
ColwiseParallel,
parallelize_module,
@ -21,7 +18,6 @@ from torch.testing._internal.distributed._tensor.common_dtensor import (
NUM_DEVICES,
Transformer,
)
from torch.utils.checkpoint import checkpoint

View File

@ -1,5 +1,6 @@
import argparse
parser = argparse.ArgumentParser(
description="comm_mode_feature examples",
formatter_class=argparse.RawTextHelpFormatter,

View File

@ -21,6 +21,7 @@ from torch.utils._python_dispatch import TorchDispatchMode
from torch.utils._pytree import tree_map_only
from torch.utils.weak import WeakIdKeyDictionary, weakref
_TOTAL_KEY = "Total"
__all__ = ["FSDPMemTracker"]

View File

@ -17,7 +17,6 @@ from typing import (
TYPE_CHECKING,
Union,
)
from typing_extensions import Self
import torch
@ -32,9 +31,9 @@ from torch.utils._python_dispatch import (
TorchDispatchMode,
)
from torch.utils._pytree import tree_flatten, tree_map_only
from torch.utils.weak import WeakIdKeyDictionary, weakref
if TYPE_CHECKING:
from torch.utils.hooks import RemovableHandle

View File

@ -2,13 +2,13 @@
import functools
import time
from typing import Any, Callable, Dict, List, TypeVar
from typing_extensions import ParamSpec
from uuid import uuid4
import torch.distributed.c10d_logger as c10d_logger
from torch.distributed.checkpoint.logging_handlers import DCP_LOGGER_NAME
__all__: List[str] = []
global _dcp_logger

View File

@ -6,7 +6,6 @@ from typing import Optional
import torch
import torch.distributed as dist
from torch._utils import _get_device_module
from torch.distributed import distributed_c10d
from torch.distributed._shard.sharded_tensor import (

View File

@ -7,6 +7,7 @@ from typing import List, Tuple, Union
import torch
from torch import fx
logger = logging.getLogger(__name__)

View File

@ -71,6 +71,7 @@ derivative would be as follows::
loss.backward()
"""
from . import transforms
from .bernoulli import Bernoulli
from .beta import Beta
from .binomial import Binomial
@ -111,12 +112,12 @@ from .relaxed_categorical import RelaxedOneHotCategorical
from .studentT import StudentT
from .transformed_distribution import TransformedDistribution
from .transforms import * # noqa: F403
from . import transforms
from .uniform import Uniform
from .von_mises import VonMises
from .weibull import Weibull
from .wishart import Wishart
_add_kl_info()
del _add_kl_info

View File

@ -13,6 +13,7 @@ from torch.distributions.utils import (
)
from torch.nn.functional import binary_cross_entropy_with_logits
__all__ = ["Bernoulli"]

View File

@ -7,6 +7,7 @@ from torch.distributions.dirichlet import Dirichlet
from torch.distributions.exp_family import ExponentialFamily
from torch.distributions.utils import broadcast_all
__all__ = ["Beta"]

View File

@ -9,6 +9,7 @@ from torch.distributions.utils import (
probs_to_logits,
)
__all__ = ["Binomial"]

View File

@ -5,6 +5,7 @@ from torch.distributions import constraints
from torch.distributions.distribution import Distribution
from torch.distributions.utils import lazy_property, logits_to_probs, probs_to_logits
__all__ = ["Categorical"]

View File

@ -8,6 +8,7 @@ from torch.distributions import constraints
from torch.distributions.distribution import Distribution
from torch.distributions.utils import broadcast_all
__all__ = ["Cauchy"]

View File

@ -2,6 +2,7 @@
from torch.distributions import constraints
from torch.distributions.gamma import Gamma
__all__ = ["Chi2"]

View File

@ -70,6 +70,7 @@ import numbers
from torch.distributions import constraints, transforms
__all__ = [
"ConstraintRegistry",
"biject_to",

View File

@ -34,6 +34,7 @@ The following constraints are implemented:
import torch
__all__ = [
"Constraint",
"boolean",

View File

@ -14,6 +14,7 @@ from torch.distributions.utils import (
)
from torch.nn.functional import binary_cross_entropy_with_logits
__all__ = ["ContinuousBernoulli"]

View File

@ -5,6 +5,7 @@ from torch.autograd.function import once_differentiable
from torch.distributions import constraints
from torch.distributions.exp_family import ExponentialFamily
__all__ = ["Dirichlet"]

View File

@ -8,6 +8,7 @@ from torch.distributions import constraints
from torch.distributions.utils import lazy_property
from torch.types import _size
__all__ = ["Distribution"]

View File

@ -2,6 +2,7 @@
import torch
from torch.distributions.distribution import Distribution
__all__ = ["ExponentialFamily"]

View File

@ -6,6 +6,7 @@ from torch.distributions import constraints
from torch.distributions.exp_family import ExponentialFamily
from torch.distributions.utils import broadcast_all
__all__ = ["Exponential"]

View File

@ -8,6 +8,7 @@ from torch.distributions.distribution import Distribution
from torch.distributions.gamma import Gamma
from torch.distributions.utils import broadcast_all
__all__ = ["FisherSnedecor"]

View File

@ -6,6 +6,7 @@ from torch.distributions import constraints
from torch.distributions.exp_family import ExponentialFamily
from torch.distributions.utils import broadcast_all
__all__ = ["Gamma"]

View File

@ -12,6 +12,7 @@ from torch.distributions.utils import (
)
from torch.nn.functional import binary_cross_entropy_with_logits
__all__ = ["Geometric"]

View File

@ -9,6 +9,7 @@ from torch.distributions.transforms import AffineTransform, ExpTransform
from torch.distributions.uniform import Uniform
from torch.distributions.utils import broadcast_all, euler_constant
__all__ = ["Gumbel"]

View File

@ -8,6 +8,7 @@ from torch.distributions.cauchy import Cauchy
from torch.distributions.transformed_distribution import TransformedDistribution
from torch.distributions.transforms import AbsTransform
__all__ = ["HalfCauchy"]

View File

@ -8,6 +8,7 @@ from torch.distributions.normal import Normal
from torch.distributions.transformed_distribution import TransformedDistribution
from torch.distributions.transforms import AbsTransform
__all__ = ["HalfNormal"]

View File

@ -6,6 +6,7 @@ from torch.distributions import constraints
from torch.distributions.distribution import Distribution
from torch.distributions.utils import _sum_rightmost
__all__ = ["Independent"]

View File

@ -37,6 +37,7 @@ from .transformed_distribution import TransformedDistribution
from .uniform import Uniform
from .utils import _sum_rightmost, euler_constant as _euler_gamma
_KL_REGISTRY: Dict[
Tuple[Type, Type], Callable
] = {} # Source of truth mapping a few general (type, type) pairs to functions.

View File

@ -7,6 +7,7 @@ from torch.distributions.transforms import AffineTransform, PowerTransform
from torch.distributions.uniform import Uniform
from torch.distributions.utils import broadcast_all, euler_constant
__all__ = ["Kumaraswamy"]

View File

@ -6,6 +6,7 @@ from torch.distributions import constraints
from torch.distributions.distribution import Distribution
from torch.distributions.utils import broadcast_all
__all__ = ["Laplace"]

View File

@ -15,6 +15,7 @@ from torch.distributions import Beta, constraints
from torch.distributions.distribution import Distribution
from torch.distributions.utils import broadcast_all
__all__ = ["LKJCholesky"]

View File

@ -4,6 +4,7 @@ from torch.distributions.normal import Normal
from torch.distributions.transformed_distribution import TransformedDistribution
from torch.distributions.transforms import ExpTransform
__all__ = ["LogNormal"]

View File

@ -4,6 +4,7 @@ from torch.distributions.normal import Normal
from torch.distributions.transformed_distribution import TransformedDistribution
from torch.distributions.transforms import StickBreakingTransform
__all__ = ["LogisticNormal"]

View File

@ -7,6 +7,7 @@ from torch.distributions.distribution import Distribution
from torch.distributions.multivariate_normal import _batch_mahalanobis, _batch_mv
from torch.distributions.utils import _standard_normal, lazy_property
__all__ = ["LowRankMultivariateNormal"]

View File

@ -5,6 +5,7 @@ import torch
from torch.distributions import Categorical, constraints
from torch.distributions.distribution import Distribution
__all__ = ["MixtureSameFamily"]

View File

@ -6,6 +6,7 @@ from torch.distributions.binomial import Binomial
from torch.distributions.distribution import Distribution
from torch.distributions.utils import broadcast_all
__all__ = ["Multinomial"]

View File

@ -6,6 +6,7 @@ from torch.distributions import constraints
from torch.distributions.distribution import Distribution
from torch.distributions.utils import _standard_normal, lazy_property
__all__ = ["MultivariateNormal"]

View File

@ -10,6 +10,7 @@ from torch.distributions.utils import (
probs_to_logits,
)
__all__ = ["NegativeBinomial"]

View File

@ -7,6 +7,7 @@ from torch.distributions import constraints
from torch.distributions.exp_family import ExponentialFamily
from torch.distributions.utils import _standard_normal, broadcast_all
__all__ = ["Normal"]

View File

@ -4,6 +4,7 @@ from torch.distributions import constraints
from torch.distributions.categorical import Categorical
from torch.distributions.distribution import Distribution
__all__ = ["OneHotCategorical", "OneHotCategoricalStraightThrough"]

View File

@ -5,6 +5,7 @@ from torch.distributions.transformed_distribution import TransformedDistribution
from torch.distributions.transforms import AffineTransform, ExpTransform
from torch.distributions.utils import broadcast_all
__all__ = ["Pareto"]

View File

@ -6,6 +6,7 @@ from torch.distributions import constraints
from torch.distributions.exp_family import ExponentialFamily
from torch.distributions.utils import broadcast_all
__all__ = ["Poisson"]

View File

@ -14,6 +14,7 @@ from torch.distributions.utils import (
probs_to_logits,
)
__all__ = ["LogitRelaxedBernoulli", "RelaxedBernoulli"]

View File

@ -7,6 +7,7 @@ from torch.distributions.transformed_distribution import TransformedDistribution
from torch.distributions.transforms import ExpTransform
from torch.distributions.utils import broadcast_all, clamp_probs
__all__ = ["ExpRelaxedCategorical", "RelaxedOneHotCategorical"]

View File

@ -7,6 +7,7 @@ from torch.distributions import Chi2, constraints
from torch.distributions.distribution import Distribution
from torch.distributions.utils import _standard_normal, broadcast_all
__all__ = ["StudentT"]

View File

@ -8,6 +8,7 @@ from torch.distributions.independent import Independent
from torch.distributions.transforms import ComposeTransform, Transform
from torch.distributions.utils import _sum_rightmost
__all__ = ["TransformedDistribution"]

View File

@ -18,6 +18,7 @@ from torch.distributions.utils import (
)
from torch.nn.functional import pad, softplus
__all__ = [
"AbsTransform",
"AffineTransform",

View File

@ -7,6 +7,7 @@ from torch.distributions import constraints
from torch.distributions.distribution import Distribution
from torch.distributions.utils import broadcast_all
__all__ = ["Uniform"]

View File

@ -7,6 +7,7 @@ import torch
import torch.nn.functional as F
from torch.overrides import is_tensor_like
euler_constant = 0.57721566490153286060 # Euler Mascheroni Constant
__all__ = [

View File

@ -7,6 +7,7 @@ from torch.distributions import constraints
from torch.distributions.distribution import Distribution
from torch.distributions.utils import broadcast_all, lazy_property
__all__ = ["VonMises"]

View File

@ -7,6 +7,7 @@ from torch.distributions.transformed_distribution import TransformedDistribution
from torch.distributions.transforms import AffineTransform, PowerTransform
from torch.distributions.utils import broadcast_all
__all__ = ["Weibull"]