[TEST] Temporary disable fp16 type_as test for PyTorch Frontend (#5799)
authorTianqi Chen <tqchen@users.noreply.github.com>
Sat, 13 Jun 2020 19:18:17 +0000 (12:18 -0700)
committerGitHub <noreply@github.com>
Sat, 13 Jun 2020 19:18:17 +0000 (12:18 -0700)
tests/python/frontend/pytorch/test_forward.py

index f8fb57f..96e9144 100644 (file)
@@ -842,13 +842,13 @@ def test_forward_size():
 def test_type_as():
     torch.set_grad_enabled(False)
     input_shape = [1, 3]
-    
+
     def _create_module(dtype):
         class TypeAs(Module):
             def forward(self, *args):
                 expected_type_tensor = torch.zeros(1, 3, dtype=dtype)
                 return args[0].type_as(expected_type_tensor)
-        
+
         return TypeAs()
 
     input_data = torch.randn(input_shape).float()
@@ -868,7 +868,10 @@ def test_type_as():
         except Exception as e:
             # If GPU is not enabled in TVM, skip the fp16 test.
             pass
-        
+
+        # Temporary disable fp16 test
+        check_fp16 = False
+
         if check_fp16:
             verify_model(_create_module(torch.float16), input_data=input_data)