From 8cee0a25bd5444d633347beab61473db3ae3f25a Mon Sep 17 00:00:00 2001 From: Aryan Gupta Date: Tue, 7 Nov 2023 21:38:13 +0000 Subject: [PATCH] fix: Flake8-BugBear code B-026 for PyTorch (#111362) Fixes #106571 I have fixed the B-026 error codes for Flake8 tests on the codebase. Please review and tell me anything else to do. Thanks and excited for this first contribution to PyTorch. Also I refer this issue which introduced [B-026](https://github.com/PyCQA/flake8-bugbear/issues/286) in `pytest-bugbear` and discuss the error code. Pull Request resolved: https://github.com/pytorch/pytorch/pull/111362 Approved by: https://github.com/Skylion007 --- .flake8 | 2 +- pyproject.toml | 2 +- test/distributed/fsdp/test_fsdp_checkpoint.py | 2 +- test/distributed/fsdp/test_fsdp_state_dict.py | 4 ++-- test/onnx/test_operators.py | 2 +- test/test_ops.py | 2 +- torch/_functorch/aot_autograd.py | 2 +- torch/utils/data/datapipes/iter/combinatorics.py | 2 +- 8 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.flake8 b/.flake8 index 5da4f8e575e..5d69dc54fd1 100644 --- a/.flake8 +++ b/.flake8 @@ -14,7 +14,7 @@ ignore = # to line this up with executable bit EXE001, # these ignores are from flake8-bugbear; please fix! - B007,B008,B017,B019,B023,B026,B028,B903,B904,B905,B906,B907 + B007,B008,B017,B019,B023,B028,B903,B904,B905,B906,B907 # these ignores are from flake8-comprehensions; please fix! C407, # these ignores are from flake8-logging-format; please fix! diff --git a/pyproject.toml b/pyproject.toml index 3585c372018..076bc91bd56 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,7 @@ ignore = [ "B007", "B008", "B017", "B018", # Useless expression "B019", - "B023", "B026", + "B023", "B028", # No explicit `stacklevel` keyword argument found "B904", "E402", diff --git a/test/distributed/fsdp/test_fsdp_checkpoint.py b/test/distributed/fsdp/test_fsdp_checkpoint.py index 7f99d95af24..57708a94acb 100644 --- a/test/distributed/fsdp/test_fsdp_checkpoint.py +++ b/test/distributed/fsdp/test_fsdp_checkpoint.py @@ -100,7 +100,7 @@ class TestFSDPCheckpoint(FSDPTest): l3 = ckpt_wrapper(l3) fsdp_wrapper = partial( - _maybe_wrap_fsdp, wrap_fsdp=wrap_fsdp, *fsdp_args, **fsdp_kwargs + _maybe_wrap_fsdp, *fsdp_args, wrap_fsdp=wrap_fsdp, **fsdp_kwargs ) self.ffn = nn.Sequential( fsdp_wrapper(l1), diff --git a/test/distributed/fsdp/test_fsdp_state_dict.py b/test/distributed/fsdp/test_fsdp_state_dict.py index 3bdd5b4da00..de4ae8b0e64 100644 --- a/test/distributed/fsdp/test_fsdp_state_dict.py +++ b/test/distributed/fsdp/test_fsdp_state_dict.py @@ -230,8 +230,8 @@ class TestFSDPStateDict(FSDPTest): bn1 = checkpoint_wrapper(bn1) lin2 = checkpoint_wrapper(lin2) seq = nn.Sequential( - FSDP(lin1, mixed_precision=lin_mp, *fsdp_args, **fsdp_kwargs), - FSDP(bn1, mixed_precision=bn_mp, *fsdp_args, **fsdp_kwargs), + FSDP(lin1, *fsdp_args, mixed_precision=lin_mp, **fsdp_kwargs), + FSDP(bn1, *fsdp_args, mixed_precision=bn_mp, **fsdp_kwargs), lin2, ) if checkpoint_wrap: diff --git a/test/onnx/test_operators.py b/test/onnx/test_operators.py index c0861a9c32f..d456c6a30cf 100644 --- a/test/onnx/test_operators.py +++ b/test/onnx/test_operators.py @@ -48,7 +48,7 @@ _onnx_dep = True # flag to import onnx package. def export_to_pbtxt(model, inputs, *args, **kwargs): return torch.onnx.export_to_pretty_string( - model, inputs, google_printer=True, *args, **kwargs + model, inputs, *args, google_printer=True, **kwargs ) diff --git a/test/test_ops.py b/test/test_ops.py index 017854439ed..14e327a39e2 100644 --- a/test/test_ops.py +++ b/test/test_ops.py @@ -986,7 +986,7 @@ class TestCommon(TestCase): try: if with_out: out = torch.empty(0, dtype=torch.int32, device=device) - op_to_test(inputs, out=out, *args, **kwargs) + op_to_test(inputs, *args, out=out, **kwargs) else: out = op_to_test(inputs, *args, **kwargs) self.assertFalse(expectFail) diff --git a/torch/_functorch/aot_autograd.py b/torch/_functorch/aot_autograd.py index b9f82270c15..4fc58d405f6 100644 --- a/torch/_functorch/aot_autograd.py +++ b/torch/_functorch/aot_autograd.py @@ -4716,7 +4716,7 @@ def aot_module(mod: nn.Module, *args, **kwargs) -> nn.Module: named_buffers = dict(mod.named_buffers(remove_duplicate=False)) num_params_buffers = len(named_params) + len(named_buffers) compiled_f = aot_function( - functional_call, num_params_buffers=num_params_buffers, *args, **kwargs + functional_call, *args, num_params_buffers=num_params_buffers, **kwargs ) class AOTModule(nn.Module): diff --git a/torch/utils/data/datapipes/iter/combinatorics.py b/torch/utils/data/datapipes/iter/combinatorics.py index fc2f5c68e97..16d2f5444dc 100644 --- a/torch/utils/data/datapipes/iter/combinatorics.py +++ b/torch/utils/data/datapipes/iter/combinatorics.py @@ -40,7 +40,7 @@ class SamplerIterDataPipe(IterDataPipe[T_co]): self.sampler_args = () if sampler_args is None else sampler_args self.sampler_kwargs = {} if sampler_kwargs is None else sampler_kwargs # https://github.com/python/mypy/pull/9629 will solve - self.sampler = sampler(data_source=self.datapipe, *self.sampler_args, **self.sampler_kwargs) # type: ignore[misc] + self.sampler = sampler(*self.sampler_args, data_source=self.datapipe, **self.sampler_kwargs) # type: ignore[misc] def __iter__(self) -> Iterator[T_co]: return iter(self.sampler)