2 # Copyright 2012 The Swarming Authors. All rights reserved.
3 # Use of this source code is governed under the Apache License, Version 2.0 that
4 # can be found in the LICENSE file.
6 """Runs the whole set unit tests on swarm.
8 This is done in a few steps:
9 - Archive the whole directory as a single .isolated file.
10 - Create one test-specific .isolated for each test to run. The file is created
11 directly and archived manually with isolateserver.py.
12 - Trigger each of these test-specific .isolated file per OS.
13 - Get all results out of order.
28 BASE_DIR = os.path.dirname(os.path.abspath(__file__))
29 ROOT_DIR = os.path.dirname(BASE_DIR)
31 sys.path.insert(0, ROOT_DIR)
33 from utils import threading_utils
34 from utils import tools
37 def check_output(cmd, cwd):
38 return subprocess.check_output([sys.executable] + cmd, cwd=cwd)
41 def capture(cmd, cwd):
43 p = subprocess.Popen([sys.executable] + cmd, cwd=cwd, stdout=subprocess.PIPE)
44 out = p.communicate()[0]
45 return p.returncode, out, time.time() - start
48 def archive_tree(root, isolate_server):
49 """Archives a whole tree and return the sha1 of the .isolated file.
51 Manually creates a temporary isolated file and archives it.
53 logging.info('archive_tree(%s)', root)
55 'isolateserver.py', 'archive', '--isolate-server', isolate_server, root,
57 if logging.getLogger().isEnabledFor(logging.INFO):
58 cmd.append('--verbose')
59 out = check_output(cmd, root)
63 def archive_isolated_triggers(cwd, isolate_server, tree_isolated, tests):
64 """Creates and archives all the .isolated files for the tests at once.
66 Archiving them in one batch is faster than archiving each file individually.
67 Also the .isolated files can be reused across OSes, reducing the amount of
71 list of (test, sha1) tuples.
74 'archive_isolated_triggers(%s, %s, %s)', cwd, tree_isolated, tests)
75 tempdir = tempfile.mkdtemp(prefix='run_swarm_tests_on_swarm_')
79 test_name = os.path.basename(test)
80 # Creates a manual .isolated file. See
81 # https://code.google.com/p/swarming/wiki/IsolatedDesign for more details.
84 'command': ['python', test],
85 'includes': [tree_isolated],
88 v = os.path.join(tempdir, test_name + '.isolated')
89 tools.write_json(v, isolated, True)
92 'isolateserver.py', 'archive', '--isolate-server', isolate_server,
94 if logging.getLogger().isEnabledFor(logging.INFO):
95 cmd.append('--verbose')
96 items = [i.split() for i in check_output(cmd, cwd).splitlines()]
97 assert len(items) == len(tests)
99 items[i][1].endswith(os.path.basename(tests[i]) + '.isolated')
100 for i in xrange(len(tests)))
101 return zip(tests, [i[0] for i in items])
103 shutil.rmtree(tempdir)
107 cwd, swarm_server, isolate_server, task_name, platform, isolated_hash):
108 """Triggers a specified .isolated file."""
112 '--swarming', swarm_server,
113 '--isolate-server', isolate_server,
114 '--dimension', 'os', platform,
115 '--task-name', task_name,
118 return capture(cmd, cwd)
121 def collect(cwd, swarm_server, task_name):
125 '--swarming', swarm_server,
128 return capture(cmd, cwd)
131 class Runner(object):
132 def __init__(self, isolate_server, swarm_server, add_task, progress):
133 self.isolate_server = isolate_server
134 self.swarm_server = swarm_server
135 self.add_task = add_task
136 self.progress = progress
138 getpass.getuser() + '-' + datetime.datetime.now().isoformat() + '-')
140 def trigger(self, task_name, platform, isolated_hash):
141 returncode, stdout, duration = trigger(
148 step_name = '%s (%3.2fs)' % (task_name, duration)
150 line = 'Failed to trigger %s\n%s' % (step_name, stdout)
151 self.progress.update_item(line, index=1)
153 self.progress.update_item('Triggered %s' % step_name, index=1)
154 self.add_task(0, self.collect, task_name, platform)
156 def collect(self, task_name, platform):
157 returncode, stdout, duration = collect(
158 ROOT_DIR, self.swarm_server, task_name)
159 step_name = '%s (%3.2fs)' % (task_name, duration)
161 # Only print the output for failures, successes are unexciting.
162 self.progress.update_item(
163 'Failed %s:\n%s' % (step_name, stdout), index=1)
164 return (task_name, platform, stdout)
165 self.progress.update_item('Passed %s' % step_name, index=1)
168 def run_swarm_tests_on_swarm(swarm_server, isolate_server, oses, tests, logs):
169 """Archives, triggers swarming jobs and gets results."""
171 # First, archive the whole tree.
172 tree_isolated = archive_tree(ROOT_DIR, isolate_server)
174 # Create and archive all the .isolated files.
175 isolateds = archive_isolated_triggers(
176 ROOT_DIR, isolate_server, tree_isolated, tests)
177 logging.debug('%s', isolateds)
178 print('Archival took %3.2fs' % (time.time() - start))
180 # Trigger all the jobs and get results. This is parallelized in worker
182 runs = len(isolateds) * len(oses)
185 columns = [('index', 0), ('size', total)]
186 progress = threading_utils.Progress(columns)
187 progress.use_cr_only = False
189 with threading_utils.ThreadPoolWithProgress(
190 progress, runs, runs, total) as pool:
192 runner = Runner(isolate_server, swarm_server, pool.add_task, progress)
193 for test_path, isolated in isolateds:
194 test_name = os.path.basename(test_path).split('.')[0]
195 for platform in oses:
196 task_name = '%s/%s/%s' % (test_name, platform, isolated)
197 pool.add_task(0, runner.trigger, task_name, platform, isolated)
199 for failed_test in pool.iter_results():
200 # collect() only return test case failures.
201 test_name, platform, stdout = failed_test
202 failed_tests.append(test_name)
204 # Write the logs are they are retrieved.
205 if not os.path.isdir(logs):
207 name = '%s_%s.log' % (platform, test_name.split('/', 1)[0])
208 with open(os.path.join(logs, name), 'wb') as f:
210 duration = time.time() - start
211 print('\nCompleted in %3.2fs' % duration)
213 print('Detected the following failures:')
214 for test in sorted(failed_tests):
216 return bool(failed_tests)
220 parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
222 '-I', '--isolate-server',
223 metavar='URL', default='',
224 help='Isolate server to use')
227 metavar='URL', default='',
228 help='Swarming server to use')
231 help='Destination where to store the failure logs (recommended)')
232 parser.add_option('-o', '--os', help='Run tests only on this OS')
234 '-t', '--test', action='append',
235 help='Run only these test, can be specified multiple times')
236 parser.add_option('-v', '--verbose', action='store_true')
237 options, args = parser.parse_args()
239 parser.error('Unsupported argument %s' % args)
241 os.environ['ISOLATE_DEBUG'] = '1'
243 if not options.isolate_server:
244 parser.error('--isolate-server is required.')
245 if not options.swarming:
246 parser.error('--swarming is required.')
248 logging.basicConfig(level=logging.DEBUG if options.verbose else logging.ERROR)
250 oses = ['Linux', 'Mac', 'Windows']
252 os.path.relpath(i, ROOT_DIR)
254 glob.glob(os.path.join(ROOT_DIR, 'tests', '*_test.py')) +
255 glob.glob(os.path.join(ROOT_DIR, 'googletest', 'tests', '*_test.py')))
257 valid_tests = sorted(map(os.path.basename, tests))
258 assert len(valid_tests) == len(set(valid_tests)), (
259 'Can\'t have 2 tests with the same base name')
262 for t in options.test:
263 if not t in valid_tests:
265 '--test %s is unknown. Valid values are:\n%s' % (
266 t, '\n'.join(' ' + i for i in valid_tests)))
267 filters = tuple(os.path.sep + t for t in options.test)
268 tests = [t for t in tests if t.endswith(filters)]
271 if options.os not in oses:
273 '--os %s is unknown. Valid values are %s' % (
274 options.os, ', '.join(sorted(oses))))
277 if sys.platform in ('win32', 'cygwin'):
278 # If we are on Windows, don't generate the tests for Linux and Mac since
279 # they use symlinks and we can't create symlinks on windows.
281 if options.os != 'win32':
282 print('Linux and Mac tests skipped since running on Windows.')
284 return run_swarm_tests_on_swarm(
286 options.isolate_server,
292 if __name__ == '__main__':