2 # Copyright 2017 The Chromium Authors
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 from __future__ import print_function
15 MISSING_SHARDS_MSG = r"""Missing results from the following shard(s): %s
17 This can happen in following cases:
18 * Test failed to start (missing *.dll/*.so dependency for example)
19 * Test crashed or hung
20 * Task expired because there are not enough bots available and are all used
21 * Swarming service experienced problems
23 Please examine logs to figure out what happened.
27 def emit_warning(title, log=None):
28 print('@@@STEP_WARNINGS@@@')
31 title = title.rstrip()
32 for line in log.splitlines():
33 print('@@@STEP_LOG_LINE@%s@%s@@@' % (title, line.rstrip()))
34 print('@@@STEP_LOG_END@%s@@@' % title)
37 def merge_shard_results(summary_json, jsons_to_merge):
38 """Reads JSON test output from all shards and combines them into one.
40 Returns dict with merged test output on success or None on failure. Emits
43 # summary.json is produced by swarming client itself. We are mostly interested
44 # in the number of shards.
46 with open(summary_json) as f:
47 summary = json.load(f)
48 except (IOError, ValueError):
50 'summary.json is missing or can not be read',
51 'Something is seriously wrong with swarming client or the bot.')
54 # Merge all JSON files together. Keep track of missing shards.
57 'disabled_tests': set(),
60 'per_iteration_data': [],
61 'swarming_summary': summary,
64 for index, result in enumerate(summary['shards']):
66 merged['missing_shards'].append(index)
69 # Author note: this code path doesn't trigger convert_to_old_format() in
70 # client/swarming.py, which means the state enum is saved in its string
71 # name form, not in the number form.
72 state = result.get('state')
73 if state == u'BOT_DIED':
74 emit_warning('Shard #%d had a Swarming internal failure' % index)
75 elif state == u'EXPIRED':
76 emit_warning('There wasn\'t enough capacity to run your test')
77 elif state == u'TIMED_OUT':
79 'Test runtime exceeded allocated time',
80 'Either it ran for too long (hard timeout) or it didn\'t produce '
81 'I/O for an extended period of time (I/O timeout)')
82 elif state != u'COMPLETED':
83 emit_warning('Invalid Swarming task state: %s' % state)
85 json_data, err_msg = load_shard_json(index, result.get('task_id'),
89 for key in ('all_tests', 'disabled_tests', 'global_tags'):
90 merged[key].update(json_data.get(key), [])
93 for key in ('test_locations',):
94 merged[key].update(json_data.get(key, {}))
96 # 'per_iteration_data' is a list of dicts. Dicts should be merged
97 # together, not the 'per_iteration_data' list itself.
98 merged['per_iteration_data'] = merge_list_of_dicts(
99 merged['per_iteration_data'],
100 json_data.get('per_iteration_data', []))
102 merged['missing_shards'].append(index)
103 emit_warning('No result was found: %s' % err_msg)
105 # If some shards are missing, make it known. Continue parsing anyway. Step
106 # should be red anyway, since swarming.py return non-zero exit code in that
108 if merged['missing_shards']:
109 as_str = ', '.join(map(str, merged['missing_shards']))
111 'some shards did not complete: %s' % as_str,
112 MISSING_SHARDS_MSG % as_str)
113 # Not all tests run, combined JSON summary can not be trusted.
114 merged['global_tags'].add('UNRELIABLE_RESULTS')
116 # Convert to jsonish dict.
117 for key in ('all_tests', 'disabled_tests', 'global_tags'):
118 merged[key] = sorted(merged[key])
122 OUTPUT_JSON_SIZE_LIMIT = 100 * 1024 * 1024 # 100 MB
125 def load_shard_json(index, task_id, jsons_to_merge):
126 """Reads JSON output of the specified shard.
129 output_dir: The directory in which to look for the JSON output to load.
130 index: The index of the shard to load data for, this is for old api.
131 task_id: The directory of the shard to load data for, this is for new api.
133 Returns: A tuple containing:
134 * The contents of path, deserialized into a python object.
136 (exactly one of the tuple elements will be non-None).
138 # 'output.json' is set in swarming/api.py, gtest_task method.
139 matching_json_files = [
140 j for j in jsons_to_merge
141 if (os.path.basename(j) == 'output.json' and
142 (os.path.basename(os.path.dirname(j)) == str(index) or
143 os.path.basename(os.path.dirname(j)) == task_id))]
145 if not matching_json_files:
146 print('shard %s test output missing' % index, file=sys.stderr)
147 return (None, 'shard %s test output was missing' % index)
148 if len(matching_json_files) > 1:
149 print('duplicate test output for shard %s' % index, file=sys.stderr)
150 return (None, 'shard %s test output was duplicated' % index)
152 path = matching_json_files[0]
155 filesize = os.stat(path).st_size
156 if filesize > OUTPUT_JSON_SIZE_LIMIT:
157 print('output.json is %d bytes. Max size is %d' % (
158 filesize, OUTPUT_JSON_SIZE_LIMIT), file=sys.stderr)
159 return (None, 'shard %s test output exceeded the size limit' % index)
161 with open(path) as f:
162 return (json.load(f), None)
163 except (IOError, ValueError, OSError) as e:
164 print('Missing or invalid gtest JSON file: %s' % path, file=sys.stderr)
165 print('%s: %s' % (type(e).__name__, e), file=sys.stderr)
167 return (None, 'shard %s test output was missing or invalid' % index)
170 def merge_list_of_dicts(left, right):
171 """Merges dicts left[0] with right[0], left[1] with right[1], etc."""
173 for i in range(max(len(left), len(right))):
174 left_dict = left[i] if i < len(left) else {}
175 right_dict = right[i] if i < len(right) else {}
176 merged_dict = left_dict.copy()
177 merged_dict.update(right_dict)
178 output.append(merged_dict)
182 def standard_gtest_merge(
183 output_json, summary_json, jsons_to_merge):
185 output = merge_shard_results(summary_json, jsons_to_merge)
186 with open(output_json, 'w') as f:
194 parser = merge_api.ArgumentParser()
195 args = parser.parse_args(raw_args)
197 return standard_gtest_merge(
198 args.output_json, args.summary_json, args.jsons_to_merge)
201 if __name__ == '__main__':
202 sys.exit(main(sys.argv[1:]))