Fix B902 lint error: invalid first argument. (#18181)

Summary:
Pull Request resolved: https://github.com/pytorch/pytorch/pull/18181
ghimport-source-id: 9c23551584a1a1b0b7ac246367f3a7ae1c50b315

Stack from [ghstack](https://github.com/ezyang/ghstack):
* #18184 Fix B903 lint: save memory for data classes with slots/namedtuple
* **#18181 Fix B902 lint error: invalid first argument.**
* #18178 Fix B006 lint errors: using mutable structure in default argument.
* #18177 Fix lstrip bug revealed by B005 lint

A variety of sins were committed:
- Some code was dead
- Some code was actually a staticmethod
- Some code just named it the wrong way
- Some code was purposely testing the omitted case

Signed-off-by: Edward Z. Yang <ezyang@fb.com>

Differential Revision: D14530876

fbshipit-source-id: 292a371d9a76ddc7bfcfd38b6f0da9165290a58e
This commit is contained in:
Edward Yang 2019-03-21 09:06:30 -07:00 committed by Facebook Github Bot
parent 0654c7d4a7
commit ba81074c40
8 changed files with 26 additions and 26 deletions

View File

@ -6,5 +6,5 @@ max-line-length = 120
ignore = ignore =
E203,E305,E402,E501,E721,E741,F401,F403,F405,F821,F841,F999,W503,W504,C408, E203,E305,E402,E501,E721,E741,F401,F403,F405,F821,F841,F999,W503,W504,C408,
# ignores below are temporary, fix them and remove please! # ignores below are temporary, fix them and remove please!
B007,B008,B902,B903 B007,B008,B903
exclude = docs/src,venv,third_party,caffe2,scripts,docs/caffe2,tools/amd_build/pyHIPIFY,torch/lib/include,torch/lib/tmp_install,build,torch/include exclude = docs/src,venv,third_party,caffe2,scripts,docs/caffe2,tools/amd_build/pyHIPIFY,torch/lib/include,torch/lib/tmp_install,build,torch/include

View File

@ -176,7 +176,7 @@ class Errors(object):
if self.errors: if self.errors:
self.fail() self.fail()
def recover(parent_self): def recover(self):
""" """
Returns a context manager which can be used to recover in case of Returns a context manager which can be used to recover in case of
an error. Example usage: an error. Example usage:
@ -184,6 +184,8 @@ class Errors(object):
>>> with errs.recover(): >>> with errs.recover():
>>> ... >>> ...
""" """
parent_self = self
class Recover(object): class Recover(object):
def __enter__(self): def __enter__(self):
pass pass
@ -193,7 +195,7 @@ class Errors(object):
return True return True
return Recover() return Recover()
def addErrCtxt(parent_self, msg): def addErrCtxt(self, msg):
""" """
Returns a context manager which encloses a fragment of code with Returns a context manager which encloses a fragment of code with
an extra contextual message, e.g., where an error occurred, or a hint an extra contextual message, e.g., where an error occurred, or a hint
@ -202,6 +204,8 @@ class Errors(object):
>>> with errs.addErrCtx("Some text"): >>> with errs.addErrCtx("Some text"):
>>> ... >>> ...
""" """
parent_self = self
class AddContext(object): class AddContext(object):
def __enter__(self): def __enter__(self):
parent_self.context.append(msg) parent_self.context.append(msg)

View File

@ -8331,7 +8331,7 @@ a")
def test_method_no_self(self): def test_method_no_self(self):
with self.assertRaisesRegex(RuntimeError, 'methods must have a self argument'): with self.assertRaisesRegex(RuntimeError, 'methods must have a self argument'):
class MethodNoSelf(torch.jit.ScriptModule): class MethodNoSelf(torch.jit.ScriptModule):
@torch.jit.script_method @torch.jit.script_method # noqa: B902
def forward(): def forward():
return torch.zeros(3, 4) return torch.zeros(3, 4)

View File

@ -402,6 +402,7 @@ class TestMultiprocessing(TestCase):
self.assertEqual(list(tensor), [4, 4, 4, 4]) self.assertEqual(list(tensor), [4, 4, 4, 4])
p.join() p.join()
@staticmethod
def _test_event_multiprocess_child(event, p2c, c2p): def _test_event_multiprocess_child(event, p2c, c2p):
c2p.put(0) # notify parent child is ready c2p.put(0) # notify parent child is ready
p2c.get() # wait for record in parent p2c.get() # wait for record in parent
@ -457,6 +458,7 @@ class TestMultiprocessing(TestCase):
# create handle on different device from recorded event # create handle on different device from recorded event
e1.ipc_handle() e1.ipc_handle()
@staticmethod
def _test_event_handle_importer_consumer(handle, p2c, c2p): def _test_event_handle_importer_consumer(handle, p2c, c2p):
e1 = torch.cuda.Event.from_ipc_handle(0, handle) e1 = torch.cuda.Event.from_ipc_handle(0, handle)
c2p.put(0) # notify parent child is ready c2p.put(0) # notify parent child is ready
@ -491,6 +493,7 @@ class TestMultiprocessing(TestCase):
p2c.put(1) # notify child that parent is done p2c.put(1) # notify child that parent is done
p.join() p.join()
@staticmethod
def _test_event_handle_exporter_consumer(handle, p2c, c2p): def _test_event_handle_exporter_consumer(handle, p2c, c2p):
stream = torch.cuda.Stream() stream = torch.cuda.Stream()
with torch.cuda.stream(stream): with torch.cuda.stream(stream):

View File

@ -587,7 +587,8 @@ class TestNN(NNTestCase):
self.par = nn.ParameterList() self.par = nn.ParameterList()
self.par.append(nn.Parameter(torch.randn(10))) self.par.append(nn.Parameter(torch.randn(10)))
def forward(inp): def forward(self, inp):
# NB: dead code
return inp.clone() return inp.clone()
net = Net() net = Net()

View File

@ -100,16 +100,6 @@ class BytesIOContext(io.BytesIO):
# This is intentionally prefixed by an underscore. Otherwise pytest will try to # This is intentionally prefixed by an underscore. Otherwise pytest will try to
# run its methods as test cases. # run its methods as test cases.
class _TestTorchMixin(object): class _TestTorchMixin(object):
def _check_sum_dim(tensors, dim):
for tensor in tensors:
expected = tensor.numpy().sum(dim)
actual = tensor.sum(dim)
self.assertEqual(expected.shape, actual.shape)
if actual.dtype == torch.float:
self.assertTrue(np.allclose(expected, actual.numpy(), rtol=1e-03, atol=1e-05))
else:
self.assertTrue(np.allclose(expected, actual.numpy()))
def _make_tensors(self, shape, val_range=(-100, 100), use_floating=True, use_integral=True): def _make_tensors(self, shape, val_range=(-100, 100), use_floating=True, use_integral=True):
float_types = [torch.double, float_types = [torch.double,
torch.float] torch.float]

View File

@ -3,7 +3,7 @@ from torch._six import with_metaclass
class VariableMeta(type): class VariableMeta(type):
def __instancecheck__(self, other): def __instancecheck__(cls, other):
return isinstance(other, torch.Tensor) return isinstance(other, torch.Tensor)

View File

@ -979,21 +979,23 @@ class ScriptMeta(type(torch._C.ScriptModule)):
# this has to inherit from pybind11's metaclass otherwise we get # this has to inherit from pybind11's metaclass otherwise we get
# issues because ScriptModule inherits from torch._C.ScriptModule, # issues because ScriptModule inherits from torch._C.ScriptModule,
# a pybind11 type # a pybind11 type
def __init__(cls, name, bases, attrs): def __init__(self, name, bases, attrs):
# find all the script methods # find all the script methods
cls._original_methods = {} self._original_methods = {}
methods = [] methods = []
for k, v in sorted(attrs.items()): for k, v in sorted(attrs.items()):
if isinstance(v, ScriptMethodStub): if isinstance(v, ScriptMethodStub):
delattr(cls, k) delattr(self, k)
methods.append(v) methods.append(v)
cls._original_methods[v.original_method.__name__] = v.original_method self._original_methods[v.original_method.__name__] = v.original_method
# after the user's __init__ register all the script methods # after the user's __init__ register all the script methods
# with the module # with the module
original_init = getattr(cls, '__init__', lambda self: None) original_init = getattr(self, '__init__', lambda self: None)
super_constants = getattr(super(cls), '_constants_set', set()) super_constants = getattr(super(self), '_constants_set', set())
cls._constants_set = set(getattr(cls, '__constants__', ())).union(super_constants) self._constants_set = set(getattr(self, '__constants__', ())).union(super_constants)
cls._overloads = dict(getattr(cls, '__overloads__', {})) self._overloads = dict(getattr(self, '__overloads__', {}))
cls = self
@functools.wraps(original_init) @functools.wraps(original_init)
def init_then_register(self, *args, **kwargs): def init_then_register(self, *args, **kwargs):
@ -1005,8 +1007,8 @@ class ScriptMeta(type(torch._C.ScriptModule)):
original_init(self, *args, **kwargs) original_init(self, *args, **kwargs)
_create_methods_from_stubs(self, methods) _create_methods_from_stubs(self, methods)
cls.__init__ = init_then_register self.__init__ = init_then_register
return super(ScriptMeta, cls).__init__(name, bases, attrs) return super(ScriptMeta, self).__init__(name, bases, attrs)
if _enabled: if _enabled: