1 # Copyright (C) 2010 Google Inc. All rights reserved.
3 # Redistribution and use in source and binary forms, with or without
4 # modification, are permitted provided that the following conditions are
7 # * Redistributions of source code must retain the above copyright
8 # notice, this list of conditions and the following disclaimer.
9 # * Redistributions in binary form must reproduce the above
10 # copyright notice, this list of conditions and the following disclaimer
11 # in the documentation and/or other materials provided with the
13 # * Neither the name of Google Inc. nor the names of its
14 # contributors may be used to endorse or promote products derived from
15 # this software without specific prior written permission.
17 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20 # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21 # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22 # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23 # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 from jsonresults import JsonResults
33 print "ERROR: Add the TestResultServer, google_appengine and yaml/lib directories to your PYTHONPATH"
36 from django.utils import simplejson
41 JSON_RESULTS_TEMPLATE = (
43 '"allFixableCount":[[TESTDATA_COUNT]],'
44 '"buildNumbers":[[TESTDATA_BUILDNUMBERS]],'
45 '"chromeRevision":[[TESTDATA_CHROMEREVISION]],'
46 '"deferredCounts":[[TESTDATA_COUNTS]],'
47 '"fixableCount":[[TESTDATA_COUNT]],'
48 '"fixableCounts":[[TESTDATA_COUNTS]],'
49 '"secondsSinceEpoch":[[TESTDATA_TIMES]],'
50 '"tests":{[TESTDATA_TESTS]},'
51 '"webkitRevision":[[TESTDATA_WEBKITREVISION]],'
52 '"wontfixCounts":[[TESTDATA_COUNTS]]'
57 JSON_RESULTS_COUNTS_TEMPLATE = (
68 JSON_RESULTS_DIRECTORY_TEMPLATE = '[[TESTDATA_DIRECTORY]]:{[TESTDATA_DATA]}'
70 JSON_RESULTS_TESTS_TEMPLATE = (
71 '[[TESTDATA_TEST_NAME]]:{'
72 '"results":[[TESTDATA_TEST_RESULTS]],'
73 '"times":[[TESTDATA_TEST_TIMES]]}')
75 JSON_RESULTS_TEST_LIST_TEMPLATE = (
76 '{"Webkit":{"tests":{[TESTDATA_TESTS]}}}')
79 class JsonResultsTest(unittest.TestCase):
81 self._builder = "Webkit"
83 def test_strip_prefix_suffix(self):
85 self.assertEqual(JsonResults._strip_prefix_suffix("ADD_RESULTS(" + json + ");"), json)
86 self.assertEqual(JsonResults._strip_prefix_suffix(json), json)
88 def _make_test_json(self, test_data):
92 builds = test_data["builds"]
93 tests = test_data["tests"]
94 if not builds or not tests:
97 json = JSON_RESULTS_TEMPLATE
105 counts.append(JSON_RESULTS_COUNTS_TEMPLATE.replace("[TESTDATA]", build))
106 build_numbers.append("1000%s" % build)
107 webkit_revision.append("2000%s" % build)
108 chrome_revision.append("3000%s" % build)
109 times.append("100000%s000" % build)
111 json = json.replace("[TESTDATA_COUNTS]", ",".join(counts))
112 json = json.replace("[TESTDATA_COUNT]", ",".join(builds))
113 json = json.replace("[TESTDATA_BUILDNUMBERS]", ",".join(build_numbers))
114 json = json.replace("[TESTDATA_WEBKITREVISION]", ",".join(webkit_revision))
115 json = json.replace("[TESTDATA_CHROMEREVISION]", ",".join(chrome_revision))
116 json = json.replace("[TESTDATA_TIMES]", ",".join(times))
118 version = str(test_data["version"]) if "version" in test_data else "4"
119 json = json.replace("[VERSION]", version)
120 json = json.replace("{[TESTDATA_TESTS]}", simplejson.dumps(tests, separators=(',', ':'), sort_keys=True))
123 def _test_merge(self, aggregated_data, incremental_data, expected_data, max_builds=jsonresults.JSON_RESULTS_MAX_BUILDS):
124 aggregated_results = self._make_test_json(aggregated_data)
125 incremental_results = self._make_test_json(incremental_data)
126 merged_results = JsonResults.merge(self._builder, aggregated_results, incremental_results, max_builds, sort_keys=True)
129 expected_results = self._make_test_json(expected_data)
130 self.assertEquals(merged_results, expected_results)
132 self.assertFalse(merged_results)
134 def _test_get_test_list(self, input_data, expected_data):
135 input_results = self._make_test_json(input_data)
136 expected_results = JSON_RESULTS_TEST_LIST_TEMPLATE.replace("{[TESTDATA_TESTS]}", simplejson.dumps(expected_data, separators=(',', ':')))
137 actual_results = JsonResults.get_test_list(self._builder, input_results)
138 self.assertEquals(actual_results, expected_results)
140 def test_merge_null_incremental_results(self):
141 # Empty incremental results json.
145 {"builds": ["2", "1"],
146 "tests": {"001.html": {
147 "results": [[200,"F"]],
148 "times": [[200,0]]}}},
149 # Incremental results
151 # Expect no merge happens.
154 def test_merge_empty_incremental_results(self):
155 # No actual incremental test results (only prefix and suffix) to merge.
159 {"builds": ["2", "1"],
160 "tests": {"001.html": {
161 "results": [[200,"F"]],
162 "times": [[200,0]]}}},
163 # Incremental results
166 # Expected no merge happens.
169 def test_merge_empty_aggregated_results(self):
170 # No existing aggregated results.
171 # Merged results == new incremental results.
175 # Incremental results
177 {"builds": ["2", "1"],
178 "tests": {"001.html": {
179 "results": [[200,"F"]],
180 "times": [[200,0]]}}},
182 {"builds": ["2", "1"],
183 "tests": {"001.html": {
184 "results": [[200,"F"]],
185 "times": [[200,0]]}}})
187 def test_merge_incremental_single_test_single_run_same_result(self):
188 # Incremental results has the latest build and same test results for
190 # Insert the incremental results at the first place and sum number
191 # of runs for "F" (200 + 1) to get merged results.
194 {"builds": ["2", "1"],
195 "tests": {"001.html": {
196 "results": [[200,"F"]],
197 "times": [[200,0]]}}},
198 # Incremental results
200 "tests": {"001.html": {
201 "results": [[1,"F"]],
204 {"builds": ["3", "2", "1"],
205 "tests": {"001.html": {
206 "results": [[201,"F"]],
207 "times": [[201,0]]}}})
209 def test_merge_single_test_single_run_different_result(self):
210 # Incremental results has the latest build but different test results
212 # Insert the incremental results at the first place.
215 {"builds": ["2", "1"],
216 "tests": {"001.html": {
217 "results": [[200,"F"]],
218 "times": [[200,0]]}}},
219 # Incremental results
221 "tests": {"001.html": {
222 "results": [[1, "I"]],
225 {"builds": ["3", "2", "1"],
226 "tests": {"001.html": {
227 "results": [[1,"I"],[200,"F"]],
228 "times": [[1,1],[200,0]]}}})
230 def test_merge_single_test_single_run_result_changed(self):
231 # Incremental results has the latest build but results which differ from
232 # the latest result (but are the same as an older result).
235 {"builds": ["2", "1"],
236 "tests": {"001.html": {
237 "results": [[200,"F"],[10,"I"]],
238 "times": [[200,0],[10,1]]}}},
239 # Incremental results
241 "tests": {"001.html": {
242 "results": [[1,"I"]],
245 {"builds": ["3", "2", "1"],
246 "tests": {"001.html": {
247 "results": [[1,"I"],[200,"F"],[10,"I"]],
248 "times": [[1,1],[200,0],[10,1]]}}})
250 def test_merge_multiple_tests_single_run(self):
251 # All tests have incremental updates.
254 {"builds": ["2", "1"],
255 "tests": {"001.html": {
256 "results": [[200,"F"]],
259 "results": [[100,"I"]],
260 "times": [[100,1]]}}},
261 # Incremental results
263 "tests": {"001.html": {
264 "results": [[1,"F"]],
267 "results": [[1,"I"]],
270 {"builds": ["3", "2", "1"],
271 "tests": {"001.html": {
272 "results": [[201,"F"]],
275 "results": [[101,"I"]],
276 "times": [[101,1]]}}})
278 def test_merge_multiple_tests_single_run_one_no_result(self):
281 {"builds": ["2", "1"],
282 "tests": {"001.html": {
283 "results": [[200,"F"]],
286 "results": [[100,"I"]],
287 "times": [[100,1]]}}},
288 # Incremental results
290 "tests": {"002.html": {
291 "results": [[1,"I"]],
294 {"builds": ["3", "2", "1"],
295 "tests": {"001.html": {
296 "results": [[1,"N"],[200,"F"]],
299 "results": [[101,"I"]],
300 "times": [[101,1]]}}})
302 def test_merge_single_test_multiple_runs(self):
305 {"builds": ["2", "1"],
306 "tests": {"001.html": {
307 "results": [[200,"F"]],
308 "times": [[200,0]]}}},
309 # Incremental results
310 {"builds": ["4", "3"],
311 "tests": {"001.html": {
312 "results": [[2, "I"]],
315 {"builds": ["4", "3", "2", "1"],
316 "tests": {"001.html": {
317 "results": [[2,"I"],[200,"F"]],
318 "times": [[2,2],[200,0]]}}})
320 def test_merge_multiple_tests_multiple_runs(self):
323 {"builds": ["2", "1"],
324 "tests": {"001.html": {
325 "results": [[200,"F"]],
328 "results": [[10,"Z"]],
329 "times": [[10,0]]}}},
330 # Incremental results
331 {"builds": ["4", "3"],
332 "tests": {"001.html": {
333 "results": [[2, "I"]],
336 "results": [[1,"C"]],
339 {"builds": ["4", "3", "2", "1"],
340 "tests": {"001.html": {
341 "results": [[2,"I"],[200,"F"]],
342 "times": [[2,2],[200,0]]},
344 "results": [[1,"C"],[10,"Z"]],
345 "times": [[1,1],[10,0]]}}})
347 def test_merge_incremental_result_older_build(self):
348 # Test the build in incremental results is older than the most recent
349 # build in aggregated results.
352 {"builds": ["3", "1"],
353 "tests": {"001.html": {
354 "results": [[5,"F"]],
356 # Incremental results
358 "tests": {"001.html": {
359 "results": [[1, "F"]],
361 # Expected no merge happens.
362 {"builds": ["2", "3", "1"],
363 "tests": {"001.html": {
364 "results": [[6,"F"]],
367 def test_merge_incremental_result_same_build(self):
368 # Test the build in incremental results is same as the build in
369 # aggregated results.
372 {"builds": ["2", "1"],
373 "tests": {"001.html": {
374 "results": [[5,"F"]],
376 # Incremental results
377 {"builds": ["3", "2"],
378 "tests": {"001.html": {
379 "results": [[2, "F"]],
381 # Expected no merge happens.
382 {"builds": ["3", "2", "2", "1"],
383 "tests": {"001.html": {
384 "results": [[7,"F"]],
387 def test_merge_remove_test_with_no_data(self):
388 # Remove test where there is no data in all runs.
391 {"builds": ["2", "1"],
392 "tests": {"001.html": {
393 "results": [[200,"N"]],
396 "results": [[10,"F"]],
397 "times": [[10,0]]}}},
398 # Incremental results
400 "tests": {"001.html": {
401 "results": [[1,"N"]],
404 "results": [[1,"P"]],
407 {"builds": ["3", "2", "1"],
408 "tests": {"002.html": {
409 "results": [[1,"P"],[10,"F"]],
410 "times": [[11,0]]}}})
412 def test_merge_remove_test_with_all_pass(self):
413 # Remove test where all run pass and max running time < 1 seconds
416 {"builds": ["2", "1"],
417 "tests": {"001.html": {
418 "results": [[200,"P"]],
421 "results": [[10,"F"]],
422 "times": [[10,0]]}}},
423 # Incremental results
425 "tests": {"001.html": {
426 "results": [[1,"P"]],
429 "results": [[1,"P"]],
432 {"builds": ["3", "2", "1"],
433 "tests": {"002.html": {
434 "results": [[1,"P"],[10,"F"]],
435 "times": [[11,0]]}}})
437 def test_merge_keep_test_with_all_pass_but_slow_time(self):
438 # Do not remove test where all run pass but max running time >= 1 seconds
441 {"builds": ["2", "1"],
442 "tests": {"001.html": {
443 "results": [[200,"P"]],
446 "results": [[10,"F"]],
447 "times": [[10,0]]}}},
448 # Incremental results
450 "tests": {"001.html": {
451 "results": [[1,"P"]],
454 "results": [[1,"P"]],
457 {"builds": ["3", "2", "1"],
458 "tests": {"001.html": {
459 "results": [[201,"P"]],
460 "times": [[1,1],[200,0]]},
462 "results": [[1,"P"],[10,"F"]],
463 "times": [[11,0]]}}})
465 def test_merge_prune_extra_results(self):
466 # Remove items from test results and times that exceed the max number
467 # of builds to track.
468 max_builds = jsonresults.JSON_RESULTS_MAX_BUILDS
471 {"builds": ["2", "1"],
472 "tests": {"001.html": {
473 "results": [[max_builds,"F"],[1,"I"]],
474 "times": [[max_builds,0],[1,1]]}}},
475 # Incremental results
477 "tests": {"001.html": {
478 "results": [[1,"T"]],
481 {"builds": ["3", "2", "1"],
482 "tests": {"001.html": {
483 "results": [[1,"T"],[max_builds,"F"]],
484 "times": [[1,1],[max_builds,0]]}}})
486 def test_merge_prune_extra_results_small(self):
487 # Remove items from test results and times that exceed the max number
488 # of builds to track, using smaller threshold.
489 max_builds = jsonresults.JSON_RESULTS_MAX_BUILDS_SMALL
492 {"builds": ["2", "1"],
493 "tests": {"001.html": {
494 "results": [[max_builds,"F"],[1,"I"]],
495 "times": [[max_builds,0],[1,1]]}}},
496 # Incremental results
498 "tests": {"001.html": {
499 "results": [[1,"T"]],
502 {"builds": ["3", "2", "1"],
503 "tests": {"001.html": {
504 "results": [[1,"T"],[max_builds,"F"]],
505 "times": [[1,1],[max_builds,0]]}}},
508 def test_merge_prune_extra_results_with_new_result_of_same_type(self):
509 # Test that merging in a new result of the same type as the last result
510 # causes old results to fall off.
511 max_builds = jsonresults.JSON_RESULTS_MAX_BUILDS_SMALL
514 {"builds": ["2", "1"],
515 "tests": {"001.html": {
516 "results": [[max_builds,"F"],[1,"N"]],
517 "times": [[max_builds,0],[1,1]]}}},
518 # Incremental results
520 "tests": {"001.html": {
521 "results": [[1,"F"]],
524 {"builds": ["3", "2", "1"],
525 "tests": {"001.html": {
526 "results": [[max_builds,"F"]],
527 "times": [[max_builds,0]]}}},
530 def test_merge_build_directory_hierarchy_old_version(self):
533 {"builds": ["2", "1"],
534 "tests": {"bar/003.html": {
535 "results": [[25,"F"]],
538 "results": [[50,"F"]],
541 "results": [[100,"I"]],
542 "times": [[100,0]]}},
544 # Incremental results
548 "results": [[1,"I"]],
552 "results": [[1,"F"]],
555 "results": [[1,"I"]],
559 {"builds": ["3", "2", "1"],
562 "results": [[1,"N"],[25,"F"]],
566 "results": [[1,"I"]],
570 "results": [[51,"F"]],
573 "results": [[101,"I"]],
574 "times": [[101,0]]}}},
577 # FIXME: Some data got corrupted and has results and times at the directory level.
578 # Once we've purged this from all the data, we should throw an error on this case.
579 def test_merge_directory_hierarchy_extra_results_and_times(self):
582 {"builds": ["2", "1"],
585 "results": [[25,"F"]],
587 "results": [[25,"F"]],
589 # Incremental results
593 "results": [[1,"F"]],
594 "times": [[1,0]]}}}},
596 {"builds": ["3", "2", "1"],
599 "results": [[26,"F"]],
600 "times": [[26,0]]}}},
603 def test_merge_build_directory_hierarchy(self):
606 {"builds": ["2", "1"],
607 "tests": {"bar": {"baz": {
609 "results": [[25,"F"]],
610 "times": [[25,0]]}}},
613 "results": [[50,"F"]],
616 "results": [[100,"I"]],
617 "times": [[100,0]]}}},
619 # Incremental results
623 "results": [[1,"I"]],
627 "results": [[1,"F"]],
630 "results": [[1,"I"]],
634 {"builds": ["3", "2", "1"],
635 "tests": {"bar": {"baz": {
637 "results": [[1,"N"],[25,"F"]],
638 "times": [[26,0]]}}},
641 "results": [[1,"I"]],
645 "results": [[51,"F"]],
648 "results": [[101,"I"]],
649 "times": [[101,0]]}}},
652 # FIXME(aboxhall): Add some tests for xhtml/svg test results.
654 def test_get_test_name_list(self):
655 # Get test name list only. Don't include non-test-list data and
656 # of test result details.
657 # FIXME: This also tests a temporary bug in the data where directory-level
658 # results have a results and times values. Once that bug is fixed,
659 # remove this test-case and assert we don't ever hit it.
660 self._test_get_test_list(
662 {"builds": ["3", "2", "1"],
665 "results": [[200,"P"]],
667 "results": [[1,"N"]],
670 "results": [[10,"F"]],
671 "times": [[10,0]]}}},
673 {"foo": {"001.html":{}}, "002.html":{}})
675 def test_remove_gtest_modifiers(self):
678 {"builds": ["2", "1"],
679 "tests": {"foo.bar": {
680 "results": [[50,"F"]],
683 "results": [[100,"I"]],
686 "results": [[100,"I"]],
690 # Incremental results
692 "tests": {"foo.DISABLED_bar": {
693 "results": [[1,"F"]],
696 "results": [[1,"N"]],
699 "results": [[1,"I"]],
702 "results": [[1,"N"]],
705 "results": [[1,"I"]],
708 "results": [[1,"I"]],
712 {"builds": ["3", "2", "1"],
713 "tests": {"foo.FAILS_bar3": {
714 "results": [[1,"N"],[100,"I"]],
717 "results": [[51,"F"]],
720 "results": [[101,"I"]],
723 "results": [[1,"I"]],
726 "results": [[1,"I"]],
730 if __name__ == '__main__':