From: Wanchao Liang Date: Thu, 14 Feb 2019 23:37:42 +0000 (-0800) Subject: fix test_jit canonicalize_tensor_iterator X-Git-Tag: accepted/tizen/6.5/unified/20211028.231830~1278 X-Git-Url: http://review.tizen.org/git/?a=commitdiff_plain;h=f3dd5563e44616bc2be5317cdfd7947849e2efd8;p=platform%2Fupstream%2Fpytorch.git fix test_jit canonicalize_tensor_iterator Summary: Pull Request resolved: https://github.com/pytorch/pytorch/pull/17104 Differential Revision: D14089928 Pulled By: wanchaol fbshipit-source-id: 8b288514ab9ee8d24a11d39b75eef95783f28f20 --- diff --git a/test/test_jit.py b/test/test_jit.py index 926501f..344ad68 100644 --- a/test/test_jit.py +++ b/test/test_jit.py @@ -886,9 +886,9 @@ class TestJit(JitTestCase): traced = torch.jit.trace(f, (x,)) f(x) graph = traced.graph_for(x) - # There should be 4 int constants for the right sides of operators, plus two - # for alpha arguments for add and sub - self.assertTrue(str(traced.graph_for(x)).count(': int = prim::Constant'), 6) + # There should be 4 int constants for the right sides of operators, plus one + # for the alpha argument for add and sub + self.assertTrue(str(traced.graph_for(x)).count(': int = prim::Constant') == 5) # TODO: adapt this test to check that GraphExecutor treats them differently @unittest.skip("Need to be adjusted to Graph Executor")