Make timeout configurable in performance test runner.
authormachenbach@chromium.org <machenbach@chromium.org>
Mon, 15 Sep 2014 13:00:32 +0000 (13:00 +0000)
committermachenbach@chromium.org <machenbach@chromium.org>
Mon, 15 Sep 2014 13:00:32 +0000 (13:00 +0000)
BUG=374740
LOG=n
TBR=bmeurer@chromium.org

Review URL: https://codereview.chromium.org/569213002

git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@23945 ce2b1a6d-e550-0410-aec6-3dcde31c8c00

tools/run_perf.py
tools/unittests/run_perf_test.py

index 0022c8e..920c18d 100755 (executable)
@@ -164,6 +164,7 @@ class DefaultSentinel(Node):
     super(DefaultSentinel, self).__init__()
     self.binary = "d8"
     self.run_count = 10
+    self.timeout = 60
     self.path = []
     self.graphs = []
     self.flags = []
@@ -198,6 +199,7 @@ class Graph(Node):
     self.binary = suite.get("binary", parent.binary)
     self.run_count = suite.get("run_count", parent.run_count)
     self.run_count = suite.get("run_count_%s" % arch, self.run_count)
+    self.timeout = suite.get("timeout", parent.timeout)
     self.units = suite.get("units", parent.units)
     self.total = suite.get("total", parent.total)
 
@@ -463,15 +465,18 @@ def Main(args):
       def Runner():
         """Output generator that reruns several times."""
         for i in xrange(0, max(1, runnable.run_count)):
-          # TODO(machenbach): Make timeout configurable in the suite definition.
-          # Allow timeout per arch like with run_count per arch.
-          output = commands.Execute(runnable.GetCommand(shell_dir), timeout=60)
+          # TODO(machenbach): Allow timeout per arch like with run_count per
+          # arch.
+          output = commands.Execute(runnable.GetCommand(shell_dir),
+                                    timeout=runnable.timeout)
           print ">>> Stdout (#%d):" % (i + 1)
           print output.stdout
           if output.stderr:  # pragma: no cover
             # Print stderr for debugging.
             print ">>> Stderr (#%d):" % (i + 1)
             print output.stderr
+          if output.timed_out:
+            print ">>> Test timed out after %ss." % runnable.timeout
           yield output.stdout
 
       # Let runnable iterate over all runs and handle output.
index 86c8199..76e8d23 100644 (file)
@@ -77,7 +77,7 @@ V8_GENERIC_JSON = {
   "units": "ms",
 }
 
-Output = namedtuple("Output", "stdout, stderr")
+Output = namedtuple("Output", "stdout, stderr, timed_out")
 
 class PerfTest(unittest.TestCase):
   @classmethod
@@ -113,9 +113,12 @@ class PerfTest(unittest.TestCase):
     with open(self._test_input, "w") as f:
       f.write(json.dumps(json_content))
 
-  def _MockCommand(self, *args):
+  def _MockCommand(self, *args, **kwargs):
     # Fake output for each test run.
-    test_outputs = [Output(stdout=arg, stderr=None) for arg in args[1]]
+    test_outputs = [Output(stdout=arg,
+                           stderr=None,
+                           timed_out=kwargs.get("timed_out", False))
+                    for arg in args[1]]
     def execute(*args, **kwargs):
       return test_outputs.pop()
     commands.Execute = MagicMock(side_effect=execute)
@@ -151,17 +154,18 @@ class PerfTest(unittest.TestCase):
   def _VerifyErrors(self, errors):
     self.assertEquals(errors, self._LoadResults()["errors"])
 
-  def _VerifyMock(self, binary, *args):
+  def _VerifyMock(self, binary, *args, **kwargs):
     arg = [path.join(path.dirname(self.base), binary)]
     arg += args
-    commands.Execute.assert_called_with(arg, timeout=60)
+    commands.Execute.assert_called_with(
+        arg, timeout=kwargs.get("timeout", 60))
 
-  def _VerifyMockMultiple(self, *args):
+  def _VerifyMockMultiple(self, *args, **kwargs):
     expected = []
     for arg in args:
       a = [path.join(path.dirname(self.base), arg[0])]
       a += arg[1:]
-      expected.append(((a,), {"timeout": 60}))
+      expected.append(((a,), {"timeout": kwargs.get("timeout", 60)}))
     self.assertEquals(expected, commands.Execute.call_args_list)
 
   def testOneRun(self):
@@ -347,3 +351,20 @@ class PerfTest(unittest.TestCase):
     ])
     self._VerifyErrors([])
     self._VerifyMock(path.join("out", "x64.release", "cc"), "--flag", "")
+
+  def testOneRunTimingOut(self):
+    test_input = dict(V8_JSON)
+    test_input["timeout"] = 70
+    self._WriteTestInput(test_input)
+    self._MockCommand(["."], [""], timed_out=True)
+    self.assertEquals(1, self._CallMain())
+    self._VerifyResults("test", "score", [
+      {"name": "Richards", "results": [], "stddev": ""},
+      {"name": "DeltaBlue", "results": [], "stddev": ""},
+    ])
+    self._VerifyErrors([
+      "Regexp \"^Richards: (.+)$\" didn't match for test Richards.",
+      "Regexp \"^DeltaBlue: (.+)$\" didn't match for test DeltaBlue.",
+    ])
+    self._VerifyMock(
+        path.join("out", "x64.release", "d7"), "--flag", "run.js", timeout=70)