+2012-04-12 Ryosuke Niwa <rniwa@webkit.org>
+
+ Enable SVG page loading performance tests
+ https://bugs.webkit.org/show_bug.cgi?id=83861
+
+ Reviewed by Adam Barth.
+
+ Unskip PageLoad directory and skip tests that load too fast (less than 100ms) and are unstable,
+ and the ones that take too long to load (more than 20s).
+
+ * Skipped:
+
2012-04-04 Ilya Tikhonovsky <loislo@chromium.org>
Web Inspector: Unreviewed. temporary disable broken inspector tests.
# Not enabled by default on some ports
Mutation
+# Unstable SVG page loading tests
+PageLoad/svg/files/33041-Samurai.svg
+PageLoad/svg/files/Sierpinski_carpet_6.svg # Takes too long to load
+PageLoad/svg/files/az-lizard_benji_park_01.svg
+PageLoad/svg/files/france.svg
+PageLoad/svg/files/francobollo_gnome_ezechi_02.svg
+PageLoad/svg/files/gearflowers.svg
+PageLoad/svg/files/hereGear4.svg
+PageLoad/svg/files/mtsthelens.svg # Takes 22s on MacBookPro
+
# Do not conform to WebKit or Chromium perf test styles
-PageLoad
SunSpider
XSSAuditor
+2012-04-12 Ryosuke Niwa <rniwa@webkit.org>
+
+ Enable SVG page loading performance tests
+ https://bugs.webkit.org/show_bug.cgi?id=83861
+
+ Reviewed by Adam Barth.
+
+ Add support for page loading tests. These are tests without any test running harnesses, and we use
+ DriverOutput's test_timeout to measure the loading time.
+
+ Extracted PerfTest.run and PerfTest.run_failed from PerfTestRunner._run_single_test.
+
+ * Scripts/webkitpy/performance_tests/perftest.py:
+ (PerfTest.run):
+ (PerfTest):
+ (PerfTest.run_failed):
+ (PerfTest.parse_output):
+ (PerfTest.output_statistics):
+ (ChromiumStylePerfTest.parse_output):
+ (PageLoadingPerfTest): Added.
+ (PageLoadingPerfTest.__init__):
+ (PageLoadingPerfTest.run): Run the driver 20 times to get the statistics for the loading time.
+ * Scripts/webkitpy/performance_tests/perftest_unittest.py: Added.
+ (MockPrinter):
+ (MockPrinter.__init__):
+ (MockPrinter.write):
+ (MainTest):
+ (MainTest.test_parse_output):
+ (MainTest.test_parse_output_with_failing_line):
+ (TestPageLoadingPerfTest):
+ (TestPageLoadingPerfTest.assertWritten):
+ (TestPageLoadingPerfTest.MockDriver):
+ (TestPageLoadingPerfTest.MockDriver.__init__):
+ (TestPageLoadingPerfTest.MockDriver.run_test):
+ (TestPageLoadingPerfTest.test_run):
+ (TestPageLoadingPerfTest.test_run_with_bad_output):
+ * Scripts/webkitpy/performance_tests/perftestsrunner.py:
+ (PerfTestsRunner):
+ (PerfTestsRunner._collect_tests._is_test_file): Now recognizes svg tests.
+ (PerfTestsRunner._collect_tests):
+ (PerfTestsRunner._run_tests_set):
+ (PerfTestsRunner._run_single_test):
+ * Scripts/webkitpy/performance_tests/perftestsrunner_unittest.py:
+ (test_collect_tests_with_page_load_svg):
+ (test_collect_tests_with_page_load_svg.add_file):
+
2012-04-10 Brady Eidson <beidson@apple.com>
<rdar://problem/11176921> and https://bugs.webkit.org/show_bug.cgi?id=83600
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+import math
import re
+from webkitpy.layout_tests.port.driver import DriverInput
+
class PerfTest(object):
def __init__(self, test_name, dirname, path_or_url):
def path_or_url(self):
return self._path_or_url
+ def run(self, driver, timeout_ms, printer, buildbot_output):
+ output = driver.run_test(DriverInput(self.path_or_url(), timeout_ms, None, False))
+ if self.run_failed(output, printer):
+ return None
+ return self.parse_output(output, printer, buildbot_output)
+
+ def run_failed(self, output, printer):
+ if output.text == None or output.error:
+ pass
+ elif output.timeout:
+ printer.write('timeout: %s' % self.test_name())
+ elif output.crash:
+ printer.write('crash: %s' % self.test_name())
+ else:
+ return False
+
+ if output.error:
+ printer.write('error: %s\n%s' % (self.test_name(), output.error))
+
+ return True
+
_lines_to_ignore_in_parser_result = [
re.compile(r'^Running \d+ times$'),
re.compile(r'^Ignoring warm-up '),
re.compile(re.escape("""frame "<!--framePath //<!--frame0-->-->" - has 1 onunload handler(s)""")),
re.compile(re.escape("""frame "<!--framePath //<!--frame0-->/<!--frame0-->-->" - has 1 onunload handler(s)"""))]
+ _statistics_keys = ['avg', 'median', 'stdev', 'min', 'max']
+
def _should_ignore_line_in_parser_test_result(self, line):
if not line:
return True
got_a_result = False
test_failed = False
results = {}
- keys = ['avg', 'median', 'stdev', 'min', 'max']
- score_regex = re.compile(r'^(?P<key>' + r'|'.join(keys) + r')\s+(?P<value>[0-9\.]+)\s*(?P<unit>.*)')
+ score_regex = re.compile(r'^(?P<key>' + r'|'.join(self._statistics_keys) + r')\s+(?P<value>[0-9\.]+)\s*(?P<unit>.*)')
unit = "ms"
for line in re.split('\n', output.text):
test_failed = True
printer.write("%s" % line)
- if test_failed or set(keys) != set(results.keys()):
+ if test_failed or set(self._statistics_keys) != set(results.keys()):
return None
results['unit'] = unit
test_name = re.sub(r'\.\w+$', '', self._test_name)
- buildbot_output.write('RESULT %s= %s %s\n' % (test_name.replace('/', ': '), results['avg'], unit))
- buildbot_output.write(', '.join(['%s= %s %s' % (key, results[key], unit) for key in keys[1:]]) + '\n')
+ self.output_statistics(test_name, results, buildbot_output)
return {test_name: results}
+ def output_statistics(self, test_name, results, buildbot_output):
+ unit = results['unit']
+ buildbot_output.write('RESULT %s= %s %s\n' % (test_name.replace('/', ': '), results['avg'], unit))
+ buildbot_output.write(', '.join(['%s= %s %s' % (key, results[key], unit) for key in self._statistics_keys[1:]]) + '\n')
+
class ChromiumStylePerfTest(PerfTest):
_chromium_style_result_regex = re.compile(r'^RESULT\s+(?P<name>[^=]+)\s*=\s+(?P<value>\d+(\.\d+)?)\s*(?P<unit>\w+)$')
test_failed = True
printer.write("%s" % line)
return results if results and not test_failed else None
+
+
+class PageLoadingPerfTest(PerfTest):
+ def __init__(self, test_name, dirname, path_or_url):
+ super(PageLoadingPerfTest, self).__init__(test_name, dirname, path_or_url)
+
+ def run(self, driver, timeout_ms, printer, buildbot_output):
+ test_times = []
+
+ for i in range(0, 2):
+ output = driver.run_test(DriverInput(self.path_or_url(), timeout_ms, None, False))
+ if self.run_failed(output, printer):
+ return None
+ if i == 0:
+ continue
+ test_times.append(output.test_time * 1000)
+
+ test_times = sorted(test_times)
+
+ # Compute the mean and variance using a numerically stable algorithm.
+ squareSum = 0
+ mean = 0
+ valueSum = sum(test_times)
+ for i, time in enumerate(test_times):
+ delta = time - mean
+ sweep = i + 1.0
+ mean += delta / sweep
+ squareSum += delta * delta * (i / sweep)
+
+ middle = int(len(test_times) / 2)
+ results = {'avg': mean,
+ 'min': min(test_times),
+ 'max': max(test_times),
+ 'median': test_times[middle] if len(test_times) % 2 else (test_times[middle - 1] + test_times[middle]) / 2,
+ 'stdev': math.sqrt(squareSum),
+ 'unit': 'ms'}
+ self.output_statistics(self.test_name(), results, buildbot_output)
+ return {self.test_name(): results}
--- /dev/null
+#!/usr/bin/python
+# Copyright (C) 2012 Google Inc. All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import StringIO
+import math
+import unittest
+
+from webkitpy.layout_tests.port.driver import DriverOutput
+from webkitpy.performance_tests.perftest import ChromiumStylePerfTest
+from webkitpy.performance_tests.perftest import PageLoadingPerfTest
+from webkitpy.performance_tests.perftest import PerfTest
+
+
+class MockPrinter(object):
+ def __init__(self):
+ self.written_lines = []
+
+ def write(self, line):
+ self.written_lines.append(line)
+
+
+class MainTest(unittest.TestCase):
+ def test_parse_output(self):
+ printer = MockPrinter()
+ buildbot_output = StringIO.StringIO()
+ output = DriverOutput('\n'.join([
+ 'Running 20 times',
+ 'Ignoring warm-up run (1115)',
+ '',
+ 'avg 1100',
+ 'median 1101',
+ 'stdev 11',
+ 'min 1080',
+ 'max 1120']), image=None, image_hash=None, audio=None)
+ test = PerfTest('some-test', 'some-dir/some-test', '/path/some-dir/some-test')
+ self.assertEqual(test.parse_output(output, printer, buildbot_output),
+ {'some-test': {'avg': 1100.0, 'median': 1101.0, 'min': 1080.0, 'max': 1120.0, 'stdev': 11.0, 'unit': 'ms'}})
+ self.assertEqual(printer.written_lines, [])
+
+ def test_parse_output_with_failing_line(self):
+ printer = MockPrinter()
+ buildbot_output = StringIO.StringIO()
+ output = DriverOutput('\n'.join([
+ 'Running 20 times',
+ 'Ignoring warm-up run (1115)',
+ '',
+ 'some-unrecognizable-line',
+ '',
+ 'avg 1100',
+ 'median 1101',
+ 'stdev 11',
+ 'min 1080',
+ 'max 1120']), image=None, image_hash=None, audio=None)
+ test = PerfTest('some-test', 'some-dir/some-test', '/path/some-dir/some-test')
+ self.assertEqual(test.parse_output(output, printer, buildbot_output), None)
+ self.assertEqual(printer.written_lines, ['some-unrecognizable-line'])
+
+
+class TestPageLoadingPerfTest(unittest.TestCase):
+ def assertWritten(self, stream, contents):
+ self.assertEquals(stream.buflist, contents)
+
+ class MockDriver(object):
+ def __init__(self, values):
+ self._values = values
+ self._index = 0
+
+ def run_test(self, input):
+ value = self._values[self._index]
+ self._index += 1
+ if isinstance(value, str):
+ return DriverOutput('some output', image=None, image_hash=None, audio=None, error=value)
+ else:
+ return DriverOutput('some output', image=None, image_hash=None, audio=None, test_time=self._values[self._index - 1])
+
+ def test_run(self):
+ printer = MockPrinter()
+ buildbot_output = StringIO.StringIO()
+ test = PageLoadingPerfTest('some-test', 'some-dir/some-test', '/path/some-dir/some-test')
+ driver = TestPageLoadingPerfTest.MockDriver([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20])
+ self.assertEqual(test.run(driver, None, printer, buildbot_output),
+ {'some-test': {'max': 20000, 'avg': 10500.0, 'median': 10500, 'stdev': math.sqrt(665 * 1000 * 1000), 'min': 1000, 'unit': 'ms'}})
+ self.assertEqual(printer.written_lines, [])
+ self.assertWritten(buildbot_output, ['RESULT some-test= 10500.0 ms\n', 'median= 10500 ms, stdev= 25787.5939165 ms, min= 1000 ms, max= 20000 ms\n'])
+
+ def test_run_with_bad_output(self):
+ printer = MockPrinter()
+ buildbot_output = StringIO.StringIO()
+ test = PageLoadingPerfTest('some-test', 'some-dir/some-test', '/path/some-dir/some-test')
+ driver = TestPageLoadingPerfTest.MockDriver([1, 2, 3, 4, 5, 6, 7, 'some error', 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20])
+ self.assertEqual(test.run(driver, None, printer, buildbot_output), None)
+ self.assertEqual(printer.written_lines, ['error: some-test\nsome error'])
+
+if __name__ == '__main__':
+ unittest.main()
from webkitpy.common import find_files
from webkitpy.common.host import Host
from webkitpy.common.net.file_uploader import FileUploader
-from webkitpy.layout_tests.port.driver import DriverInput
from webkitpy.layout_tests.views import printing
from webkitpy.performance_tests.perftest import ChromiumStylePerfTest
+from webkitpy.performance_tests.perftest import PageLoadingPerfTest
from webkitpy.performance_tests.perftest import PerfTest
+
_log = logging.getLogger(__name__)
class PerfTestsRunner(object):
- _test_directories_for_chromium_style_tests = ['inspector']
+ _pattern_for_chromium_style_tests = re.compile('^inspector/')
+ _pattern_for_page_loading_tests = re.compile('^PageLoad/')
_default_branch = 'webkit-trunk'
_EXIT_CODE_BAD_BUILD = -1
_EXIT_CODE_BAD_JSON = -2
"""Return the list of tests found."""
def _is_test_file(filesystem, dirname, filename):
- return filename.endswith('.html')
+ return filesystem.splitext(filename)[1] in ['.html', '.svg']
filesystem = self._host.filesystem
continue
test_name = relative_path.replace('\\', '/')
dirname = filesystem.dirname(path)
- if self._host.filesystem.dirname(relative_path) in self._test_directories_for_chromium_style_tests:
+ if self._pattern_for_chromium_style_tests.match(relative_path):
tests.append(ChromiumStylePerfTest(test_name, dirname, path))
+ elif self._pattern_for_page_loading_tests.match(relative_path):
+ tests.append(PageLoadingPerfTest(test_name, dirname, path))
else:
tests.append(PerfTest(test_name, dirname, path))
return unexpected
- _inspector_result_regex = re.compile(r'^RESULT\s+(?P<name>[^=]+)\s*=\s+(?P<value>\d+(\.\d+)?)\s*(?P<unit>\w+)$')
-
- def _process_chromium_style_test_result(self, test, output):
- test_failed = False
- got_a_result = False
- for line in re.split('\n', output.text):
- resultLine = self._inspector_result_regex.match(line)
- if resultLine:
- # FIXME: Store the unit
- self._results[resultLine.group('name').replace(' ', '')] = float(resultLine.group('value'))
- self._buildbot_output.write("%s\n" % line)
- got_a_result = True
- elif not len(line) == 0:
- test_failed = True
- self._printer.write("%s" % line)
- return test_failed or not got_a_result
-
def _run_single_test(self, test, driver):
start_time = time.time()
- output = driver.run_test(DriverInput(test.path_or_url(), self._options.time_out_ms, None, False))
- new_results = None
-
- if output.text == None:
- pass
- elif output.timeout:
- self._printer.write('timeout: %s' % test.test_name())
- elif output.crash:
- self._printer.write('crash: %s' % test.test_name())
- else:
- new_results = test.parse_output(output, self._printer, self._buildbot_output)
-
- if len(output.error):
- self._printer.write('error:\n%s' % output.error)
- new_results = None
-
+ new_results = test.run(driver, self._options.time_out_ms, self._printer, self._buildbot_output)
if new_results:
self._results.update(new_results)
else:
runner._port.skipped_perf_tests = lambda: ['inspector/unsupported_test1.html', 'unsupported']
self.assertEqual(self._collect_tests_and_sort_test_name(runner), ['inspector/test1.html', 'inspector/test2.html'])
+ def test_collect_tests_with_page_load_svg(self):
+ runner = self.create_runner()
+
+ def add_file(dirname, filename, content=True):
+ dirname = runner._host.filesystem.join(runner._base_path, dirname) if dirname else runner._base_path
+ runner._host.filesystem.maybe_make_directory(dirname)
+ runner._host.filesystem.files[runner._host.filesystem.join(dirname, filename)] = content
+
+ add_file('PageLoad', 'some-svg-test.svg')
+ tests = runner._collect_tests()
+ self.assertEqual(len(tests), 1)
+ self.assertEqual(tests[0].__class__.__name__, 'PageLoadingPerfTest')
+
def test_parse_args(self):
runner = self.create_runner()
options, args = PerfTestsRunner._parse_args([