2 # Copyright 2011 The Chromium Authors
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 """Contains two functions that run different test cases and the same test
7 case in parallel repeatedly to identify flaky tests.
10 from __future__ import print_function
18 # Defaults for FindShardingFlakiness().
19 FF_DATA_SUFFIX = '_flakies'
20 FF_SLEEP_INTERVAL = 10.0
21 FF_NUM_ITERATIONS = 100
22 FF_SUPERVISOR_ARGS = ['-r3', '--random-seed']
24 # Defaults for FindUnaryFlakiness().
25 FF_OUTPUT_SUFFIX = '_purges'
31 def FindShardingFlakiness(test_path, data_path, supervisor_args):
32 """Finds flaky test cases by sharding and running a test for the specified
33 number of times. The data file is read at the beginning of each run to find
34 the last known counts and is overwritten at the end of each run with the new
35 counts. There is an optional sleep interval between each run so the script can
36 be killed without losing the data, useful for overnight (or weekend!) runs.
40 # Read a previously written data file.
41 if os.path.exists(data_path):
42 data_file = open(data_path, 'r')
43 num_runs = int(data_file.readline().split(' ')[0])
44 num_passes = int(data_file.readline().split(' ')[0])
45 for line in data_file:
47 split_line = line.split(' -> ')
48 failed_tests[split_line[0]] = int(split_line[1])
56 args = ['python', '../sharding_supervisor/sharding_supervisor.py']
57 args.extend(supervisor_args + [test_path])
58 proc = subprocess.Popen(args, stderr=subprocess.PIPE)
60 # Shard the test and collect failures.
62 line = proc.stderr.readline()
64 if proc.poll() is not None:
70 if line in failed_tests:
71 failed_tests[line] += 1
73 failed_tests[line] = 1
74 elif line.find('FAILED TESTS:') >= 0:
77 if proc.returncode == 0:
80 # Write the data file and print results.
81 data_file = open(data_path, 'w')
82 print('%i runs' % num_runs)
83 data_file.write('%i runs\n' % num_runs)
84 print('%i passes' % num_passes)
85 data_file.write('%i passes\n' % num_passes)
86 for (test, count) in failed_tests.iteritems():
87 print('%s -> %i' % (test, count))
88 data_file.write('%s -> %i\n' % (test, count))
92 def FindUnaryFlakiness(test_path, output_path, num_procs, num_repeats, timeout):
93 """Runs all the test cases in a given test in parallel with itself, to get at
94 those that hold on to shared resources. The idea is that if a test uses a
95 unary resource, then running many instances of this test will purge out some
96 of them as failures or timeouts.
99 test_name_regex = r'((\w+/)?\w+\.\w+(/\d+)?)'
100 test_start = re.compile('\[\s+RUN\s+\] ' + test_name_regex)
103 # Run the test to discover all the test cases.
104 proc = subprocess.Popen([test_path], stdout=subprocess.PIPE)
106 line = proc.stdout.readline()
108 if proc.poll() is not None:
112 results = test_start.search(line)
114 test_list.append(results.group(1))
118 total = len(test_list)
120 # Run each test case in parallel with itself.
121 for test_name in test_list:
125 args = [test_path, '--gtest_filter=' + test_name,
126 '--gtest_repeat=%i' % num_repeats]
127 while len(procs) < num_procs:
128 procs.append(subprocess.Popen(args))
132 if proc.poll() is not None:
133 if proc.returncode != 0:
136 # Timeout exceeded, kill the remaining processes and make a note.
137 if seconds > timeout:
138 num_fails += len(procs)
139 num_terminated = len(procs)
141 procs.pop().terminate()
145 line = '%s: %i failed' % (test_name, num_fails)
147 line += ' (%i terminated)' % num_terminated
148 failures.append(line)
149 print('%s (%i / %i): %i failed' % (test_name, index, total, num_fails))
153 # Print the results and write the data file.
155 data_file = open(output_path, 'w')
156 for line in failures:
157 data_file.write(line + '\n')
163 parser.error('You must specify a path to test!')
164 if not os.path.exists(args[0]):
165 parser.error('%s does not exist!' % args[0])
167 data_path = os.path.basename(args[0]) + FF_DATA_SUFFIX
168 output_path = os.path.basename(args[0]) + FF_OUTPUT_SUFFIX
170 for i in range(FF_NUM_ITERATIONS):
171 FindShardingFlakiness(args[0], data_path, FF_SUPERVISOR_ARGS)
172 print('That was just iteration %i of %i.' % (i + 1, FF_NUM_ITERATIONS))
173 time.sleep(FF_SLEEP_INTERVAL)
176 args[0], output_path, FF_NUM_PROCS, FF_NUM_REPEATS, FF_TIMEOUT)
179 if __name__ == '__main__':