1 # Copyright (C) 2010 Google Inc. All rights reserved.
3 # Redistribution and use in source and binary forms, with or without
4 # modification, are permitted provided that the following conditions are
7 # * Redistributions of source code must retain the above copyright
8 # notice, this list of conditions and the following disclaimer.
9 # * Redistributions in binary form must reproduce the above
10 # copyright notice, this list of conditions and the following disclaimer
11 # in the documentation and/or other materials provided with the
13 # * Neither the name of Google Inc. nor the names of its
14 # contributors may be used to endorse or promote products derived from
15 # this software without specific prior written permission.
17 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 from jsonresults import *
33 print "ERROR: Add the TestResultServer, google_appengine and yaml/lib directories to your PYTHONPATH"
36 from handlers import master_config
42 from google.appengine.ext import blobstore
43 from google.appengine.ext import db
44 from google.appengine.ext import testbed
46 FULL_RESULT_EXAMPLE = """ADD_RESULTS({
47 "seconds_since_epoch": 1368146629,
51 "encrypted-media-v2-events.html": {
52 "bugs": ["crbug.com/1234"],
53 "expected": "TIMEOUT",
57 "encrypted-media-v2-syntax.html": {
58 "expected": "TIMEOUT",
62 "progress-events-generated-correctly.html": {
63 "expected": "PASS FAIL IMAGE TIMEOUT CRASH MISSING",
70 "src_removal_does_not_trigger_loadstart.html": {
79 "src_removal_does_not_trigger_loadstart.html": {
92 "unexpected-skip.html": {
96 "unexpected-fail.html": {
100 "flaky-failed.html": {
101 "expected": "PASS FAIL",
104 "media-document-audio-repaint.html": {
109 "unexpected-leak.html": {
116 "num_regressions": 0,
118 "interrupted": false,
119 "layout_tests_dir": "\/tmp\/cr\/src\/third_party\/WebKit\/LayoutTests",
121 "builder_name": "Webkit",
123 "pixel_tests_enabled": true,
124 "blink_revision": "1234",
125 "has_pretty_patch": true,
128 "num_failures_by_type": {
142 "chromium_revision": "5678"
145 JSON_RESULTS_OLD_TEMPLATE = (
146 '{"[BUILDER_NAME]":{'
147 '"allFixableCount":[[TESTDATA_COUNT]],'
148 '"blinkRevision":[[TESTDATA_WEBKITREVISION]],'
149 '"buildNumbers":[[TESTDATA_BUILDNUMBERS]],'
150 '"chromeRevision":[[TESTDATA_CHROMEREVISION]],'
152 '"fixableCount":[[TESTDATA_COUNT]],'
153 '"fixableCounts":[[TESTDATA_COUNTS]],'
154 '"secondsSinceEpoch":[[TESTDATA_TIMES]],'
155 '"tests":{[TESTDATA_TESTS]}'
157 '"version":[VERSION]'
158 '}') % json.dumps(CHAR_TO_FAILURE)
160 JSON_RESULTS_COUNTS = '{"' + '":[[TESTDATA_COUNT]],"'.join([char for char in CHAR_TO_FAILURE.values()]) + '":[[TESTDATA_COUNT]]}'
162 JSON_RESULTS_TEMPLATE = (
163 '{"[BUILDER_NAME]":{'
164 '"blinkRevision":[[TESTDATA_WEBKITREVISION]],'
165 '"buildNumbers":[[TESTDATA_BUILDNUMBERS]],'
166 '"chromeRevision":[[TESTDATA_CHROMEREVISION]],'
168 '"num_failures_by_type":%s,'
169 '"secondsSinceEpoch":[[TESTDATA_TIMES]],'
170 '"tests":{[TESTDATA_TESTS]}'
172 '"version":[VERSION]'
173 '}') % (json.dumps(CHAR_TO_FAILURE), JSON_RESULTS_COUNTS)
175 JSON_RESULTS_COUNTS_TEMPLATE = '{"' + '":[TESTDATA],"'.join([char for char in CHAR_TO_FAILURE]) + '":[TESTDATA]}'
177 JSON_RESULTS_TEST_LIST_TEMPLATE = '{"Webkit":{"tests":{[TESTDATA_TESTS]}}}'
180 class MockFile(object):
182 def file_information(self):
183 return "master: %s, builder: %s, test_type: %s, build_number: %r, name: %s." % (
184 self.master, self.builder, self.test_type, self.build_number, self.name)
186 def __init__(self, name='results.json', data=''):
187 self.master = 'MockMasterName'
188 self.builder = 'MockBuilderName'
189 self.test_type = 'MockTestType'
190 self.build_number = 0
194 def save(self, data):
199 class JsonResultsTest(unittest.TestCase):
201 self._builder = "Webkit"
202 self.old_log_level = logging.root.level
203 logging.root.setLevel(logging.ERROR)
206 logging.root.setLevel(self.old_log_level)
208 # Use this to get better error messages than just string compare gives.
209 def assert_json_equal(self, a, b):
211 a = json.loads(a) if isinstance(a, str) else a
212 b = json.loads(b) if isinstance(b, str) else b
213 self.assertEqual(a, b)
215 def test_strip_prefix_suffix(self):
216 json = "['contents']"
217 self.assertEqual(JsonResults._strip_prefix_suffix("ADD_RESULTS(" + json + ");"), json)
218 self.assertEqual(JsonResults._strip_prefix_suffix(json), json)
220 def _make_test_json(self, test_data, json_string=JSON_RESULTS_TEMPLATE, builder_name="Webkit"):
224 builds = test_data["builds"]
225 tests = test_data["tests"]
226 if not builds or not tests:
235 counts.append(JSON_RESULTS_COUNTS_TEMPLATE.replace("[TESTDATA]", build))
236 build_numbers.append("1000%s" % build)
237 webkit_revision.append("2000%s" % build)
238 chrome_revision.append("3000%s" % build)
239 times.append("100000%s000" % build)
241 json_string = json_string.replace("[BUILDER_NAME]", builder_name)
242 json_string = json_string.replace("[TESTDATA_COUNTS]", ",".join(counts))
243 json_string = json_string.replace("[TESTDATA_COUNT]", ",".join(builds))
244 json_string = json_string.replace("[TESTDATA_BUILDNUMBERS]", ",".join(build_numbers))
245 json_string = json_string.replace("[TESTDATA_WEBKITREVISION]", ",".join(webkit_revision))
246 json_string = json_string.replace("[TESTDATA_CHROMEREVISION]", ",".join(chrome_revision))
247 json_string = json_string.replace("[TESTDATA_TIMES]", ",".join(times))
249 version = str(test_data["version"]) if "version" in test_data else "4"
250 json_string = json_string.replace("[VERSION]", version)
251 json_string = json_string.replace("{[TESTDATA_TESTS]}", json.dumps(tests, separators=(',', ':'), sort_keys=True))
254 def _test_merge(self, aggregated_data, incremental_data, expected_data, max_builds=jsonresults.JSON_RESULTS_MAX_BUILDS):
255 aggregated_results = self._make_test_json(aggregated_data, builder_name=self._builder)
256 incremental_results = self._make_test_json(incremental_data, builder_name=self._builder)
257 incremental_json, _ = JsonResults._get_incremental_json(self._builder, JsonResults._load_json(aggregated_results), is_full_results_format=False)
258 merged_results, status_code = JsonResults.merge(self._builder, aggregated_results, JsonResults._load_json(incremental_results), num_runs=max_builds, sort_keys=True)
261 expected_results = self._make_test_json(expected_data, builder_name=self._builder)
262 self.assert_json_equal(merged_results, expected_results)
263 self.assertEqual(status_code, 200)
265 self.assertTrue(status_code != 200)
267 def _test_get_test_list(self, input_data, expected_data):
268 input_results = self._make_test_json(input_data)
269 expected_results = JSON_RESULTS_TEST_LIST_TEMPLATE.replace("{[TESTDATA_TESTS]}", json.dumps(expected_data, separators=(',', ':')))
270 actual_results = JsonResults.get_test_list(self._builder, input_results)
271 self.assert_json_equal(actual_results, expected_results)
273 def test_update_files_empty_aggregate_data(self):
274 small_file = MockFile(name='results-small.json')
275 large_file = MockFile(name='results.json')
278 "builds": ["2", "1"],
281 "results": [[200, TEXT]],
286 incremental_string = self._make_test_json(incremental_data, builder_name=small_file.builder)
287 incremental_json = JsonResults._load_json(incremental_string)
289 self.assertTrue(JsonResults.update_files(small_file.builder, incremental_json, small_file, large_file, is_full_results_format=False))
290 self.assert_json_equal(small_file.data, incremental_string)
291 self.assert_json_equal(large_file.data, incremental_string)
293 def test_update_files_null_incremental_data(self):
294 small_file = MockFile(name='results-small.json')
295 large_file = MockFile(name='results.json')
298 "builds": ["2", "1"],
301 "results": [[200, TEXT]],
306 aggregated_string = self._make_test_json(aggregated_data, builder_name=small_file.builder)
308 small_file.data = large_file.data = aggregated_string
310 incremental_string = ""
312 self.assertEqual(JsonResults.update_files(small_file.builder, incremental_string, small_file, large_file, is_full_results_format=False),
313 ('No incremental JSON data to merge.', 403))
314 self.assert_json_equal(small_file.data, aggregated_string)
315 self.assert_json_equal(large_file.data, aggregated_string)
317 def test_update_files_empty_incremental_data(self):
318 small_file = MockFile(name='results-small.json')
319 large_file = MockFile(name='results.json')
322 "builds": ["2", "1"],
325 "results": [[200, TEXT]],
330 aggregated_string = self._make_test_json(aggregated_data, builder_name=small_file.builder)
332 small_file.data = large_file.data = aggregated_string
338 incremental_string = self._make_test_json(incremental_data, builder_name=small_file.builder)
340 self.assertEqual(JsonResults.update_files(small_file.builder, incremental_string, small_file, large_file, is_full_results_format=False),
341 ('No incremental JSON data to merge.', 403))
342 self.assert_json_equal(small_file.data, aggregated_string)
343 self.assert_json_equal(large_file.data, aggregated_string)
345 def test_merge_with_empty_aggregated_results(self):
347 "builds": ["2", "1"],
350 "results": [[200, TEXT]],
355 incremental_json = JsonResults._load_json(self._make_test_json(incremental_data))
356 incremental_results, _ = JsonResults._get_incremental_json(self._builder, incremental_json, is_full_results_format=False)
357 aggregated_results = ""
358 merged_results, _ = JsonResults.merge(self._builder, aggregated_results, incremental_results, num_runs=jsonresults.JSON_RESULTS_MAX_BUILDS, sort_keys=True)
359 self.assert_json_equal(merged_results, incremental_results)
361 def test_failures_by_type_added(self):
362 aggregated_results = self._make_test_json({
363 "builds": ["2", "1"],
366 "results": [[100, TEXT], [100, FAIL]],
370 }, json_string=JSON_RESULTS_OLD_TEMPLATE)
371 incremental_results = self._make_test_json({
375 "results": [[1, TEXT]],
379 }, json_string=JSON_RESULTS_OLD_TEMPLATE)
380 incremental_json, _ = JsonResults._get_incremental_json(self._builder, JsonResults._load_json(incremental_results), is_full_results_format=False)
381 merged_results, _ = JsonResults.merge(self._builder, aggregated_results, incremental_json, num_runs=201, sort_keys=True)
382 self.assert_json_equal(merged_results, self._make_test_json({
383 "builds": ["3", "2", "1"],
386 "results": [[101, TEXT], [100, FAIL]],
392 def test_merge_full_results_format(self):
393 expected_incremental_results = {
395 "blinkRevision": ["1234"],
396 "buildNumbers": ["3"],
397 "chromeRevision": ["5678"],
398 "failure_map": CHAR_TO_FAILURE,
399 "num_failures_by_type": {"AUDIO": [0], "CRASH": [3], "FAIL": [2], "IMAGE": [1], "IMAGE+TEXT": [0], "MISSING": [0], "PASS": [10], "SKIP": [2], "TEXT": [3], "TIMEOUT": [16], "LEAK": [1]},
400 "secondsSinceEpoch": [1368146629],
406 "src_removal_does_not_trigger_loadstart.html": {
407 "results": [[1, PASS]],
414 "encrypted-media-v2-events.html": {
415 "bugs": ["crbug.com/1234"],
416 "expected": "TIMEOUT",
417 "results": [[1, TIMEOUT]],
420 "encrypted-media-v2-syntax.html": {
421 "expected": "TIMEOUT",
422 "results": [[1, TIMEOUT]],
426 "media-document-audio-repaint.html": {
428 "results": [[1, IMAGE]],
431 "progress-events-generated-correctly.html": {
432 "expected": "PASS FAIL IMAGE TIMEOUT CRASH MISSING",
433 "results": [[1, TIMEOUT]],
436 "flaky-failed.html": {
437 "expected": "PASS FAIL",
438 "results": [[1, FAIL]],
441 "unexpected-fail.html": {
442 "results": [[1, FAIL]],
445 "unexpected-leak.html": {
446 "results": [[1, LEAK]],
455 aggregated_results = ""
456 incremental_json, _ = JsonResults._get_incremental_json(self._builder, JsonResults._load_json(FULL_RESULT_EXAMPLE), is_full_results_format=True)
457 merged_results, _ = JsonResults.merge("Webkit", aggregated_results, incremental_json, num_runs=jsonresults.JSON_RESULTS_MAX_BUILDS, sort_keys=True)
458 self.assert_json_equal(merged_results, expected_incremental_results)
460 def test_merge_empty_aggregated_results(self):
461 # No existing aggregated results.
462 # Merged results == new incremental results.
466 # Incremental results
467 {"builds": ["2", "1"],
468 "tests": {"001.html": {
469 "results": [[200, TEXT]],
470 "times": [[200, 0]]}}},
472 {"builds": ["2", "1"],
473 "tests": {"001.html": {
474 "results": [[200, TEXT]],
475 "times": [[200, 0]]}}})
477 def test_merge_duplicate_build_number(self):
480 {"builds": ["2", "1"],
481 "tests": {"001.html": {
482 "results": [[100, TEXT]],
483 "times": [[100, 0]]}}},
484 # Incremental results
486 "tests": {"001.html": {
487 "results": [[1, TEXT]],
488 "times": [[1, 0]]}}},
492 def test_merge_incremental_single_test_single_run_same_result(self):
493 # Incremental results has the latest build and same test results for
495 # Insert the incremental results at the first place and sum number
496 # of runs for TEXT (200 + 1) to get merged results.
499 {"builds": ["2", "1"],
500 "tests": {"001.html": {
501 "results": [[200, TEXT]],
502 "times": [[200, 0]]}}},
503 # Incremental results
505 "tests": {"001.html": {
506 "results": [[1, TEXT]],
507 "times": [[1, 0]]}}},
509 {"builds": ["3", "2", "1"],
510 "tests": {"001.html": {
511 "results": [[201, TEXT]],
512 "times": [[201, 0]]}}})
514 def test_merge_single_test_single_run_different_result(self):
515 # Incremental results has the latest build but different test results
517 # Insert the incremental results at the first place.
520 {"builds": ["2", "1"],
521 "tests": {"001.html": {
522 "results": [[200, TEXT]],
523 "times": [[200, 0]]}}},
524 # Incremental results
526 "tests": {"001.html": {
527 "results": [[1, IMAGE]],
528 "times": [[1, 1]]}}},
530 {"builds": ["3", "2", "1"],
531 "tests": {"001.html": {
532 "results": [[1, IMAGE], [200, TEXT]],
533 "times": [[1, 1], [200, 0]]}}})
535 def test_merge_single_test_single_run_result_changed(self):
536 # Incremental results has the latest build but results which differ from
537 # the latest result (but are the same as an older result).
540 {"builds": ["2", "1"],
541 "tests": {"001.html": {
542 "results": [[200, TEXT], [10, IMAGE]],
543 "times": [[200, 0], [10, 1]]}}},
544 # Incremental results
546 "tests": {"001.html": {
547 "results": [[1, IMAGE]],
548 "times": [[1, 1]]}}},
550 {"builds": ["3", "2", "1"],
551 "tests": {"001.html": {
552 "results": [[1, IMAGE], [200, TEXT], [10, IMAGE]],
553 "times": [[1, 1], [200, 0], [10, 1]]}}})
555 def test_merge_multiple_tests_single_run(self):
556 # All tests have incremental updates.
559 {"builds": ["2", "1"],
560 "tests": {"001.html": {
561 "results": [[200, TEXT]],
562 "times": [[200, 0]]},
564 "results": [[100, IMAGE]],
565 "times": [[100, 1]]}}},
566 # Incremental results
568 "tests": {"001.html": {
569 "results": [[1, TEXT]],
572 "results": [[1, IMAGE]],
573 "times": [[1, 1]]}}},
575 {"builds": ["3", "2", "1"],
576 "tests": {"001.html": {
577 "results": [[201, TEXT]],
578 "times": [[201, 0]]},
580 "results": [[101, IMAGE]],
581 "times": [[101, 1]]}}})
583 def test_merge_multiple_tests_single_run_one_no_result(self):
586 {"builds": ["2", "1"],
587 "tests": {"001.html": {
588 "results": [[200, TEXT]],
589 "times": [[200, 0]]},
591 "results": [[100, IMAGE]],
592 "times": [[100, 1]]}}},
593 # Incremental results
595 "tests": {"002.html": {
596 "results": [[1, IMAGE]],
597 "times": [[1, 1]]}}},
599 {"builds": ["3", "2", "1"],
600 "tests": {"001.html": {
601 "results": [[1, NO_DATA], [200, TEXT]],
602 "times": [[201, 0]]},
604 "results": [[101, IMAGE]],
605 "times": [[101, 1]]}}})
607 def test_merge_single_test_multiple_runs(self):
610 {"builds": ["2", "1"],
611 "tests": {"001.html": {
612 "results": [[200, TEXT]],
613 "times": [[200, 0]]}}},
614 # Incremental results
615 {"builds": ["4", "3"],
616 "tests": {"001.html": {
617 "results": [[2, IMAGE], [1, FAIL]],
618 "times": [[3, 2]]}}},
620 {"builds": ["4", "3", "2", "1"],
621 "tests": {"001.html": {
622 "results": [[1, FAIL], [2, IMAGE], [200, TEXT]],
623 "times": [[3, 2], [200, 0]]}}})
625 def test_merge_multiple_tests_multiple_runs(self):
628 {"builds": ["2", "1"],
629 "tests": {"001.html": {
630 "results": [[200, TEXT]],
631 "times": [[200, 0]]},
633 "results": [[10, IMAGE_PLUS_TEXT]],
634 "times": [[10, 0]]}}},
635 # Incremental results
636 {"builds": ["4", "3"],
637 "tests": {"001.html": {
638 "results": [[2, IMAGE]],
641 "results": [[1, CRASH]],
642 "times": [[1, 1]]}}},
644 {"builds": ["4", "3", "2", "1"],
645 "tests": {"001.html": {
646 "results": [[2, IMAGE], [200, TEXT]],
647 "times": [[2, 2], [200, 0]]},
649 "results": [[1, CRASH], [10, IMAGE_PLUS_TEXT]],
650 "times": [[1, 1], [10, 0]]}}})
652 def test_merge_incremental_result_older_build(self):
653 # Test the build in incremental results is older than the most recent
654 # build in aggregated results.
657 {"builds": ["3", "1"],
658 "tests": {"001.html": {
659 "results": [[5, TEXT]],
660 "times": [[5, 0]]}}},
661 # Incremental results
663 "tests": {"001.html": {
664 "results": [[1, TEXT]],
665 "times": [[1, 0]]}}},
666 # Expected no merge happens.
667 {"builds": ["2", "3", "1"],
668 "tests": {"001.html": {
669 "results": [[6, TEXT]],
670 "times": [[6, 0]]}}})
672 def test_merge_incremental_result_same_build(self):
673 # Test the build in incremental results is same as the build in
674 # aggregated results.
677 {"builds": ["2", "1"],
678 "tests": {"001.html": {
679 "results": [[5, TEXT]],
680 "times": [[5, 0]]}}},
681 # Incremental results
682 {"builds": ["3", "2"],
683 "tests": {"001.html": {
684 "results": [[2, TEXT]],
685 "times": [[2, 0]]}}},
686 # Expected no merge happens.
687 {"builds": ["3", "2", "2", "1"],
688 "tests": {"001.html": {
689 "results": [[7, TEXT]],
690 "times": [[7, 0]]}}})
692 def test_merge_remove_new_test(self):
695 {"builds": ["2", "1"],
696 "tests": {"001.html": {
697 "results": [[199, TEXT]],
698 "times": [[199, 0]]},
700 # Incremental results
702 "tests": {"001.html": {
703 "results": [[1, TEXT]],
706 "results": [[1, PASS]],
709 "results": [[1, NOTRUN]],
712 "results": [[1, NO_DATA]],
716 {"builds": ["3", "2", "1"],
717 "tests": {"001.html": {
718 "results": [[200, TEXT]],
719 "times": [[200, 0]]},
723 def test_merge_remove_test(self):
727 "builds": ["2", "1"],
732 "results": [[200, PASS]],
738 "results": [[10, TEXT]],
742 "results": [[190, PASS], [9, NO_DATA], [1, TEXT]],
747 # Incremental results
754 "results": [[1, PASS]],
760 "results": [[1, PASS]],
764 "results": [[1, PASS]],
771 "builds": ["3", "2", "1"],
774 "results": [[1, PASS], [10, TEXT]],
781 def test_merge_updates_expected(self):
785 "builds": ["2", "1"],
791 "results": [[200, PASS]],
797 "bugs": ["crbug.com/1234"],
799 "results": [[10, TEXT]],
804 "results": [[190, PASS], [9, NO_DATA], [1, TEXT]],
808 "results": [[199, PASS], [1, TEXT]],
813 # Incremental results
819 "results": [[1, PASS]],
823 "expected": "TIMEOUT",
824 "results": [[1, PASS]],
828 "bugs": ["crbug.com/1234"],
829 "results": [[1, PASS]],
836 "builds": ["3", "2", "1"],
839 "results": [[1, PASS], [10, TEXT]],
843 "expected": "TIMEOUT",
844 "results": [[191, PASS], [9, NO_DATA]],
848 "bugs": ["crbug.com/1234"],
849 "results": [[200, PASS]],
857 def test_merge_keep_test_with_all_pass_but_slow_time(self):
860 {"builds": ["2", "1"],
861 "tests": {"001.html": {
862 "results": [[200, PASS]],
863 "times": [[200, jsonresults.JSON_RESULTS_MIN_TIME]]},
865 "results": [[10, TEXT]],
866 "times": [[10, 0]]}}},
867 # Incremental results
869 "tests": {"001.html": {
870 "results": [[1, PASS]],
873 "results": [[1, PASS]],
874 "times": [[1, 0]]}}},
876 {"builds": ["3", "2", "1"],
877 "tests": {"001.html": {
878 "results": [[201, PASS]],
879 "times": [[1, 1], [200, jsonresults.JSON_RESULTS_MIN_TIME]]},
881 "results": [[1, PASS], [10, TEXT]],
882 "times": [[11, 0]]}}})
884 def test_merge_pruning_slow_tests_for_debug_builders(self):
885 self._builder = "MockBuilder(dbg)"
888 {"builds": ["2", "1"],
889 "tests": {"001.html": {
890 "results": [[200, PASS]],
891 "times": [[200, 3 * jsonresults.JSON_RESULTS_MIN_TIME]]},
893 "results": [[10, TEXT]],
894 "times": [[10, 0]]}}},
895 # Incremental results
897 "tests": {"001.html": {
898 "results": [[1, PASS]],
901 "results": [[1, PASS]],
904 "results": [[1, PASS]],
905 "times": [[1, jsonresults.JSON_RESULTS_MIN_TIME]]}}},
907 {"builds": ["3", "2", "1"],
908 "tests": {"001.html": {
909 "results": [[201, PASS]],
910 "times": [[1, 1], [200, 3 * jsonresults.JSON_RESULTS_MIN_TIME]]},
912 "results": [[1, PASS], [10, TEXT]],
913 "times": [[11, 0]]}}})
915 def test_merge_prune_extra_results(self):
916 # Remove items from test results and times that exceed the max number
917 # of builds to track.
918 max_builds = jsonresults.JSON_RESULTS_MAX_BUILDS
921 {"builds": ["2", "1"],
922 "tests": {"001.html": {
923 "results": [[max_builds, TEXT], [1, IMAGE]],
924 "times": [[max_builds, 0], [1, 1]]}}},
925 # Incremental results
927 "tests": {"001.html": {
928 "results": [[1, TIMEOUT]],
929 "times": [[1, 1]]}}},
931 {"builds": ["3", "2", "1"],
932 "tests": {"001.html": {
933 "results": [[1, TIMEOUT], [max_builds, TEXT]],
934 "times": [[1, 1], [max_builds, 0]]}}})
936 def test_merge_prune_extra_results_small(self):
937 # Remove items from test results and times that exceed the max number
938 # of builds to track, using smaller threshold.
939 max_builds = jsonresults.JSON_RESULTS_MAX_BUILDS_SMALL
942 {"builds": ["2", "1"],
943 "tests": {"001.html": {
944 "results": [[max_builds, TEXT], [1, IMAGE]],
945 "times": [[max_builds, 0], [1, 1]]}}},
946 # Incremental results
948 "tests": {"001.html": {
949 "results": [[1, TIMEOUT]],
950 "times": [[1, 1]]}}},
952 {"builds": ["3", "2", "1"],
953 "tests": {"001.html": {
954 "results": [[1, TIMEOUT], [max_builds, TEXT]],
955 "times": [[1, 1], [max_builds, 0]]}}},
958 def test_merge_prune_extra_results_with_new_result_of_same_type(self):
959 # Test that merging in a new result of the same type as the last result
960 # causes old results to fall off.
961 max_builds = jsonresults.JSON_RESULTS_MAX_BUILDS_SMALL
964 {"builds": ["2", "1"],
965 "tests": {"001.html": {
966 "results": [[max_builds, TEXT], [1, NO_DATA]],
967 "times": [[max_builds, 0], [1, 1]]}}},
968 # Incremental results
970 "tests": {"001.html": {
971 "results": [[1, TEXT]],
972 "times": [[1, 0]]}}},
974 {"builds": ["3", "2", "1"],
975 "tests": {"001.html": {
976 "results": [[max_builds, TEXT]],
977 "times": [[max_builds, 0]]}}},
980 def test_merge_build_directory_hierarchy(self):
983 {"builds": ["2", "1"],
984 "tests": {"bar": {"baz": {
986 "results": [[25, TEXT]],
987 "times": [[25, 0]]}}},
990 "results": [[50, TEXT]],
993 "results": [[100, IMAGE]],
994 "times": [[100, 0]]}}},
996 # Incremental results
1000 "results": [[1, IMAGE]],
1001 "times": [[1, 0]]}},
1004 "results": [[1, TEXT]],
1007 "results": [[1, IMAGE]],
1008 "times": [[1, 0]]}}},
1011 {"builds": ["3", "2", "1"],
1012 "tests": {"bar": {"baz": {
1014 "results": [[1, NO_DATA], [25, TEXT]],
1015 "times": [[26, 0]]}}},
1018 "results": [[1, IMAGE]],
1019 "times": [[1, 0]]}},
1022 "results": [[51, TEXT]],
1023 "times": [[51, 0]]},
1025 "results": [[101, IMAGE]],
1026 "times": [[101, 0]]}}},
1029 # FIXME(aboxhall): Add some tests for xhtml/svg test results.
1031 def test_get_test_name_list(self):
1032 # Get test name list only. Don't include non-test-list data and
1033 # of test result details.
1034 # FIXME: This also tests a temporary bug in the data where directory-level
1035 # results have a results and times values. Once that bug is fixed,
1036 # remove this test-case and assert we don't ever hit it.
1037 self._test_get_test_list(
1039 {"builds": ["3", "2", "1"],
1042 "results": [[200, PASS]],
1043 "times": [[200, 0]]},
1044 "results": [[1, NO_DATA]],
1047 "results": [[10, TEXT]],
1048 "times": [[10, 0]]}}},
1050 {"foo": {"001.html": {}}, "002.html": {}})
1052 def test_gtest(self):
1054 # Aggregated results
1055 {"builds": ["2", "1"],
1056 "tests": {"foo.bar": {
1057 "results": [[50, TEXT]],
1058 "times": [[50, 0]]},
1060 "results": [[100, IMAGE]],
1061 "times": [[100, 0]]},
1063 "results": [[5, FAIL]],
1067 # Incremental results
1069 "tests": {"foo.bar2": {
1070 "results": [[1, IMAGE]],
1073 "results": [[1, TEXT]],
1076 "results": [[5, FAIL]],
1081 {"builds": ["3", "2", "1"],
1082 "tests": {"foo.bar": {
1083 "results": [[1, NO_DATA], [50, TEXT]],
1084 "times": [[51, 0]]},
1086 "results": [[101, IMAGE]],
1087 "times": [[101, 0]]},
1089 "results": [[1, TEXT]],
1092 "results": [[10, FAIL]],
1093 "times": [[10, 0]]},
1097 def test_deprecated_master_name(self):
1098 tb = testbed.Testbed()
1100 tb.init_datastore_v3_stub()
1101 tb.init_blobstore_stub()
1103 master = master_config.getMaster('chromium.chromiumos')
1104 builder = 'test-builder'
1105 test_type = 'test-type'
1110 'Test1.testproc1': {
1116 'build_number': '123',
1117 'version': JSON_RESULTS_HIERARCHICAL_VERSION,
1118 'builder_name': builder,
1119 'blink_revision': '12345',
1120 'seconds_since_epoch': 1406123456,
1121 'num_failures_by_type': {
1126 'chromium_revision': '67890',
1130 'Test2.testproc2': {
1136 'build_number': '456',
1137 'version': JSON_RESULTS_HIERARCHICAL_VERSION,
1138 'builder_name': builder,
1139 'blink_revision': '54321',
1140 'seconds_since_epoch': 1406654321,
1141 'num_failures_by_type': {
1146 'chromium_revision': '98765',
1150 # Upload a file using old master name
1152 # Seed results files using the old name.
1153 JsonResults.update(master['name'], builder, test_type, test_data[0], None, True)
1154 # Update results files using the new name.
1155 JsonResults.update(master['url_name'], builder, test_type, test_data[1], master['name'], True)
1156 # Verify that the file keyed by url_name contains both sets of results.
1157 files = TestFile.get_files(master['url_name'], builder, test_type, None, None, limit=3)
1158 self.assertEqual(len(files), 2)
1160 j = json.loads(f.data)
1161 self.assertItemsEqual(j[builder]['blinkRevision'], ['12345', '54321'])
1165 def test_normalize_results_with_top_level_results_key_does_not_crash(self):
1168 'results': {'foo': {'results': [(1, 'P')],
1169 'times': [(1, 1)]}},
1172 JsonResults._normalize_results(aggregated_json, 1, 2)
1174 if __name__ == '__main__':