Fix flake8 issues
authorIgor Fedan <ifedan@fb.com>
Wed, 3 Apr 2019 04:10:22 +0000 (21:10 -0700)
committerFacebook Github Bot <facebook-github-bot@users.noreply.github.com>
Wed, 3 Apr 2019 04:18:01 +0000 (21:18 -0700)
Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/18762

Reviewed By: houseroad

Differential Revision: D14734152

Pulled By: ifedan

fbshipit-source-id: 5adf123f88273895ad34ee9041896358d686de08

test/test_torch.py
torch/onnx/symbolic.py

index 2f8d1fc..d2e8f38 100644 (file)
@@ -9863,7 +9863,7 @@ tensor([[[1., 1., 1.,  ..., 1., 1., 1.],
 
         def _get_tensors(**kwargs):
             return [
-                torch.tensor([10,11], **kwargs),
+                torch.tensor([10, 11], **kwargs),
                 torch.randn(3, 5, **kwargs),
                 torch.rand(3, **kwargs),
                 # torch.randint(3,5, **kwargs), // unsupported
@@ -9872,7 +9872,7 @@ tensor([[[1., 1., 1.,  ..., 1., 1., 1.],
                 torch.empty(6, **kwargs),
                 torch.ones(6, **kwargs),
                 torch.eye(6, **kwargs),
-                torch.arange(3, 5, **kwargs),]
+                torch.arange(3, 5, **kwargs), ]
 
         pinned_tensors = _get_tensors(pin_memory=True) + _get_like(torch.empty(5, dtype=torch.float64), pin_memory=True)
         for x in pinned_tensors:
index 383db27..1edc190 100644 (file)
@@ -1320,7 +1320,7 @@ def full(g, sizes, value, dtype, layout, device, pin_memory=False):
                     value_t=torch.tensor([const_value], dtype=scalar_type_to_pytorch_type[dtype], pin_memory=pin_memory))
 
 
-@parse_args('v', 'f', 'i', 'v', 'v','b')
+@parse_args('v', 'f', 'i', 'v', 'v', 'b')
 def full_like(g, input, fill_value, dtype, layout, device, pin_memory=False):
     shape = g.op("Shape", input)
     return g.op("ConstantOfShape", shape,