2 # Copyright 2013 The Swarming Authors. All rights reserved.
3 # Use of this source code is governed under the Apache License, Version 2.0 that
4 # can be found in the LICENSE file.
6 """Traces each test cases of a google-test executable individually.
8 Gives detailed information about each test case. The logs can be read afterward
9 with ./trace_inputs.py read -l /path/to/executable.logs
13 import multiprocessing
18 ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
19 if not ROOT_DIR in sys.path:
20 sys.path.insert(0, ROOT_DIR)
23 from googletest import run_test_cases
24 from utils import threading_utils
25 from utils import tools
28 def sanitize_test_case_name(test_case):
29 """Removes characters that are valid as test case names but invalid as file
32 return test_case.replace('/', '-')
36 def __init__(self, tracer, cmd, cwd_dir, progress):
40 self.cwd_dir = cwd_dir
41 self.progress = progress
43 def map(self, test_case):
44 """Traces a single test case and returns its output."""
46 cmd.append('--gtest_filter=%s' % test_case)
47 tracename = sanitize_test_case_name(test_case)
50 for retry in range(5):
52 returncode, output = self.tracer.trace(cmd, self.cwd_dir, tracename, True)
53 duration = time.time() - start
54 # TODO(maruel): Define a way to detect if a strace log is valid.
58 'test_case': test_case,
59 'tracename': tracename,
60 'returncode': returncode,
66 'Tracing %s done: %d, %.1fs' % (test_case, returncode, duration))
68 self.progress.update_item(
69 '%s - %d' % (test_case, retry), index=1, size=int(not valid))
71 self.progress.update_item(test_case, index=1, size=int(not valid))
77 def trace_test_cases(cmd, cwd_dir, test_cases, jobs, logname):
78 """Traces each test cases individually but all in parallel."""
79 assert os.path.isabs(cwd_dir) and os.path.isdir(cwd_dir), cwd_dir
84 # Resolve any symlink.
85 cwd_dir = os.path.realpath(cwd_dir)
86 assert os.path.isdir(cwd_dir)
88 api = trace_inputs.get_api()
89 api.clean_trace(logname)
91 jobs = jobs or multiprocessing.cpu_count()
92 # Try to do black magic here by guessing a few of the run_test_cases.py
93 # flags. It's cheezy but it works.
94 for i, v in enumerate(cmd):
95 if v.endswith('run_test_cases.py'):
96 # Found it. Process the arguments here.
97 _, options, _ = run_test_cases.process_args(cmd[i:])
98 # Always override with the lowest value.
99 jobs = min(options.jobs, jobs)
102 columns = [('index', 0), ('size', len(test_cases))]
103 progress = threading_utils.Progress(columns)
104 with threading_utils.ThreadPoolWithProgress(
105 progress, jobs, jobs, len(test_cases)) as pool:
106 with api.get_tracer(logname) as tracer:
107 function = Tracer(tracer, cmd, cwd_dir, progress).map
108 for test_case in test_cases:
109 pool.add_task(0, function, test_case)
111 results = pool.join()
116 def write_details(logname, outfile, root_dir, blacklist, results):
117 """Writes an .test_cases file with all the information about each test
120 api = trace_inputs.get_api()
122 (i.pop('trace'), i) for i in api.parse_log(logname, blacklist, None))
123 results_processed = {}
125 for items in results:
129 log_dict = logs[item['tracename']]
130 if log_dict.get('exception'):
131 exception = exception or log_dict['exception']
133 trace_result = log_dict['results']
135 trace_result = trace_result.strip_root(root_dir)
136 results_processed[item['test_case']] = {
137 'trace': trace_result.flatten(),
138 'duration': item['duration'],
139 'output': item['output'],
140 'returncode': item['returncode'],
143 # Make it dense if there is more than 20 results.
147 len(results_processed) > 20)
149 raise exception[0], exception[1], exception[2]
153 """CLI frontend to validate arguments."""
154 tools.disable_buffering()
155 parser = run_test_cases.OptionParserTestCases(
156 usage='%prog <options> [gtest]')
157 parser.format_description = lambda *_: parser.description
160 help='output file, defaults to <executable>.test_cases')
163 help='Root directory under which file access should be noted')
165 '--trace-blacklist', action='append', default=[],
166 help='List of regexp to use as blacklist filter')
167 # TODO(maruel): Add support for options.timeout.
168 parser.remove_option('--timeout')
169 options, args = parser.parse_args()
173 'Please provide the executable line to run, if you need fancy things '
174 'like xvfb, start this script from *inside* xvfb, it\'ll be much faster'
177 cmd = tools.fix_python_path(args)
178 cmd[0] = os.path.abspath(cmd[0])
179 if not os.path.isfile(cmd[0]):
180 parser.error('Tracing failed for: %s\nIt doesn\'t exit' % ' '.join(cmd))
183 options.out = '%s.test_cases' % cmd[-1]
184 options.out = os.path.abspath(options.out)
186 options.root_dir = os.path.abspath(options.root_dir)
187 logname = options.out + '.log'
189 test_cases = parser.process_gtest_options(cmd, os.getcwd(), options)
193 results = trace_test_cases(
199 print('Reading trace logs...')
200 blacklist = tools.gen_blacklist(options.trace_blacklist)
201 write_details(logname, options.out, options.root_dir, blacklist, results)
205 if __name__ == '__main__':