From: Edward Yang Date: Wed, 27 Mar 2019 15:01:15 +0000 (-0700) Subject: Upgrade flake8-bugbear to master, fix the new lints. (#18507) X-Git-Tag: accepted/tizen/6.5/unified/20211028.231830~600 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=81e030d9a6e8d1d5c3d83628f37f2d49bbed597d;p=platform%2Fupstream%2Fpytorch.git Upgrade flake8-bugbear to master, fix the new lints. (#18507) Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/18507 ghimport-source-id: 1c3642befad2da78a7e5f39d6d58732b85c76267 Stack from [ghstack](https://github.com/ezyang/ghstack): * **#18507 Upgrade flake8-bugbear to master, fix the new lints.** It turns out Facebobok is internally using the unreleased master flake8-bugbear, so upgrading it grabs a few more lints that Phabricator was complaining about but we didn't get in open source. A few of the getattr sites that I fixed look very suspicious (they're written as if Python were a lazy language), but I didn't look more closely into the matter. Signed-off-by: Edward Z. Yang Differential Revision: D14633682 fbshipit-source-id: fc3f97c87dca40bbda943a1d1061953490dbacf8 --- diff --git a/.flake8 b/.flake8 index 9137fa6..a80b1b7 100644 --- a/.flake8 +++ b/.flake8 @@ -7,4 +7,4 @@ ignore = E203,E305,E402,E501,E721,E741,F401,F403,F405,F821,F841,F999,W503,W504,C408, # ignores below are temporary, fix them and remove please! B007,B008 -exclude = docs/src,venv,third_party,caffe2,scripts,docs/caffe2,tools/amd_build/pyHIPIFY,torch/lib/include,torch/lib/tmp_install,build,torch/include +exclude = docs/src,venv,third_party,caffe2,scripts,docs/caffe2,tools/amd_build/pyHIPIFY,torch/lib/include,torch/lib/tmp_install,build,torch/include,torch/__init__.pyi diff --git a/.travis.yml b/.travis.yml index a46fdb6..acd83be 100644 --- a/.travis.yml +++ b/.travis.yml @@ -24,7 +24,10 @@ matrix: python: "3.7" dist: xenial # required for Python 3.7 (travis-ci/travis-ci#9069) sudo: required # required for Python 3.7 (travis-ci/travis-ci#9069) - install: pip install flake8-mypy flake8-bugbear + install: + - pip install flake8-mypy + # Apparently Facebook runs master (not released) + - pip install git+https://github.com/PyCQA/flake8-bugbear.git@d9444713a51a9fb6ee8cd2d88fca85e9ff0c2d58 script: flake8 - name: "MyPy typecheck" python: "3.6" diff --git a/docs/cpp/source/conf.py b/docs/cpp/source/conf.py index 5aa4c57..5569a39 100644 --- a/docs/cpp/source/conf.py +++ b/docs/cpp/source/conf.py @@ -195,7 +195,7 @@ def setup(app): # In Sphinx 1.8 it was renamed to `add_css_file`, 1.7 and prior it is # `add_stylesheet` (deprecated in 1.8). - add_css = getattr(app, 'add_css_file', getattr(app, 'add_stylesheet')) + add_css = getattr(app, 'add_css_file', app.add_stylesheet) for css_file in html_css_files: add_css(css_file) diff --git a/docs/source/conf.py b/docs/source/conf.py index 358f62c..97d4ade 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -172,7 +172,7 @@ def setup(app): # In Sphinx 1.8 it was renamed to `add_css_file`, 1.7 and prior it is # `add_stylesheet` (deprecated in 1.8). - add_css = getattr(app, 'add_css_file', getattr(app, 'add_stylesheet')) + add_css = getattr(app, 'add_css_file', app.add_stylesheet) for css_file in html_css_files: add_css(css_file) diff --git a/test/test_torch.py b/test/test_torch.py index 52ba153..93da20c 100644 --- a/test/test_torch.py +++ b/test/test_torch.py @@ -60,11 +60,11 @@ with warnings.catch_warnings(record=True) as warns: class FilelikeMock(object): def __init__(self, data, has_fileno=True, has_readinto=False): if has_readinto: - setattr(self, 'readinto', self.readinto_opt) + self.readinto = self.readinto_opt if has_fileno: # Python 2's StringIO.StringIO has no fileno attribute. # This is used to test that. - setattr(self, 'fileno', self.fileno_opt) + self.fileno = self.fileno_opt self.calls = set() self.bytesio = io.BytesIO(data) @@ -1052,7 +1052,7 @@ class _TestTorchMixin(object): "std", "sum", "var", "max", "min"] def normfn_attr(t, dim, keepdim=False, out=None): - attr = getattr(torch, "norm") + attr = torch.norm return attr(t, 2, dim, keepdim, out=out) for fn_name in dim_red_fns: @@ -3837,14 +3837,14 @@ class _TestTorchMixin(object): for lhs in lhsTensors: lhs_expanded = lhs.expand(*(torch.Size(full_batch_dims) + torch.Size(lhs_mat_dims))) - lhs_expanded_matmul_fn = getattr(lhs_expanded, "matmul") + lhs_expanded_matmul_fn = lhs_expanded.matmul for rhs in rhsTensors: rhs_expanded = ((rhs if len(rhs_dims) != 1 else rhs.unsqueeze(-1)). expand(*(torch.Size(full_batch_dims) + torch.Size(rhs_mat_dims)))) truth = maybe_squeeze_result(lhs_expanded, rhs_expanded, lhs_expanded_matmul_fn(rhs_expanded)) for l in (lhs, lhs_expanded): for r in (rhs, rhs_expanded): - l_matmul_fn = getattr(l, "matmul") + l_matmul_fn = l.matmul result = maybe_squeeze_result(l, r, l_matmul_fn(r)) self.assertEqual(truth, result) # test torch.matmul function as well diff --git a/torch/_jit_internal.py b/torch/_jit_internal.py index 3667cfe..7bf3f60 100644 --- a/torch/_jit_internal.py +++ b/torch/_jit_internal.py @@ -204,20 +204,26 @@ except ImportError: return TupleInstance(types) class TupleInstance(object): + __slots__ = ['__args__'] + def __init__(self, types): - setattr(self, '__args__', types) + self.__args__ = types class ListInstance(object): + __slots__ = ['__args__'] + def __init__(self, types): - setattr(self, '__args__', types) + self.__args__ = types class ListCls(object): def __getitem__(self, types): return TupleInstance(types) class DictInstance(object): + __slots__ = ['__args__'] + def __init__(self, types): - setattr(self, '__args__', types) + self.__args__ = types class DictCls(object): def __getitem__(self, types): diff --git a/torch/_six.py b/torch/_six.py index f23fe61..5eeb18a 100644 --- a/torch/_six.py +++ b/torch/_six.py @@ -80,7 +80,8 @@ else: if PY3: import builtins - exec_ = getattr(builtins, "exec") + # See https://github.com/PyCQA/flake8-bugbear/issues/64 + exec_ = getattr(builtins, "exec") # noqa: B009 else: def exec_(_code_, _globs_=None, _locs_=None): """Execute code in a namespace.""" diff --git a/torch/autograd/function.py b/torch/autograd/function.py index 89e930a..0fe2c07 100644 --- a/torch/autograd/function.py +++ b/torch/autograd/function.py @@ -94,14 +94,14 @@ class FunctionMeta(type): has_static_forward = isinstance(forward, staticmethod) or isinstance(forward, classmethod) break - setattr(cls, '_is_legacy', not has_static_forward) + cls._is_legacy = not has_static_forward # old-style functions if not has_static_forward: return super(FunctionMeta, cls).__init__(name, bases, attrs) backward_fn = type(name + 'Backward', (BackwardCFunction,), {'_forward_cls': cls}) - setattr(cls, '_backward_cls', backward_fn) + cls._backward_cls = backward_fn return super(FunctionMeta, cls).__init__(name, bases, attrs)