2 # Copyright 2013 The Swarming Authors. All rights reserved.
3 # Use of this source code is governed under the Apache License, Version 2.0 that
4 # can be found in the LICENSE file.
6 """Triggers a ton of fake jobs to test its handling under high load.
8 Generates an histogram with the latencies to process the tasks and number of
21 ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
23 sys.path.insert(0, ROOT_DIR)
25 from third_party import colorama
29 from utils import graph
31 from utils import threading_utils
33 import swarming_load_test_bot
36 def print_results(results, columns, buckets):
37 delays = [i for i in results if isinstance(i, float)]
38 failures = [i for i in results if not isinstance(i, float)]
40 graph.print_histogram(
41 graph.generate_histogram(delays, buckets), columns, '%5.3f')
43 print('Total items : %d' % len(results))
46 average = sum(delays)/ len(delays)
47 print('Average delay: %s' % graph.to_units(average))
48 #print('Average overhead: %s' % graph.to_units(total_size / len(sizes)))
52 print('%sFAILURES%s:' % (colorama.Fore.RED, colorama.Fore.RESET))
53 print('\n'.join(' %s' % i for i in failures))
56 def trigger_task(swarming_url, dimensions, progress, unique, timeout, index):
57 """Triggers a Swarming job and collects results.
59 Returns the total amount of time to run a task remotely, including all the
62 name = 'load-test-%d-%s' % (index, unique)
65 logging.info('trigger')
66 manifest = swarming.Manifest(
67 isolate_server='http://localhost:1',
68 namespace='dummy-isolate',
73 dimensions=dimensions,
79 # TODO(maruel): Make output size configurable.
80 # TODO(maruel): Make number of shards configurable.
82 cmd = ['python', '-c', 'print(\'1\'*%s)' % output_size]
83 manifest.add_task('echo stuff', cmd)
84 data = {'request': manifest.to_json()}
85 response = net.url_open(swarming_url + '/test', data=data)
87 # Failed to trigger. Return a failure.
88 return 'failed_trigger'
89 result = json.load(response)
90 test_key = result['test_keys'][0].pop('test_key')
93 'test_case_name': name,
96 # Old API uses harcoded config name.
97 'config_name': 'isolated',
103 if result != expected:
104 # New API doesn't have concept of config name so it uses the task name.
105 expected['test_keys'][0]['config_name'] = name
106 assert result == expected, '%s\n%s' % (result, expected)
107 progress.update_item('%5d' % index, processing=1)
109 logging.info('collect')
110 test_keys = swarming.get_task_keys(swarming_url, name)
112 return 'no_test_keys'
113 assert test_keys == [test_key], test_keys
116 for _index, output in swarming.yield_results(
117 swarming_url, test_keys, timeout, None, False, None)
121 out[0].pop('machine_tag')
122 out[0].pop('machine_id')
125 u'config_instance_index': 0,
127 u'num_config_instances': 1,
128 u'output': swarming_load_test_bot.TASK_OUTPUT,
131 assert out == expected, '\n%s\n%s' % (out, expected)
132 return time.time() - start
134 progress.update_item('%5d - done' % index, processing=-1, processed=1)
139 parser = optparse.OptionParser(description=sys.modules[__name__].__doc__)
142 metavar='URL', default='',
143 help='Swarming server to use')
144 swarming.add_filter_options(parser)
145 parser.set_defaults(dimensions=[('os', swarming_load_test_bot.OS_NAME)])
147 group = optparse.OptionGroup(parser, 'Load generated')
149 '-s', '--send-rate', type='float', default=16., metavar='RATE',
150 help='Rate (item/s) of sending requests as a float, default: %default')
152 '-D', '--duration', type='float', default=60., metavar='N',
153 help='Duration (s) of the sending phase of the load test, '
156 '-m', '--concurrent', type='int', default=200, metavar='N',
157 help='Maximum concurrent on-going requests, default: %default')
159 '-t', '--timeout', type='float', default=3600., metavar='N',
160 help='Timeout to get results, default: %default')
161 parser.add_option_group(group)
163 group = optparse.OptionGroup(parser, 'Display options')
165 '--columns', type='int', default=graph.get_console_width(), metavar='N',
166 help='For histogram display, default:%default')
168 '--buckets', type='int', default=20, metavar='N',
169 help='Number of buckets for histogram display, default:%default')
170 parser.add_option_group(group)
173 '--dump', metavar='FOO.JSON', help='Dumps to json file')
175 '-v', '--verbose', action='store_true', help='Enables logging')
177 options, args = parser.parse_args()
178 logging.basicConfig(level=logging.INFO if options.verbose else logging.FATAL)
180 parser.error('Unsupported args: %s' % args)
181 options.swarming = options.swarming.rstrip('/')
182 if not options.swarming:
183 parser.error('--swarming is required.')
184 if options.duration <= 0:
185 parser.error('Needs --duration > 0. 0.01 is a valid value.')
186 swarming.process_filter_options(parser, options)
188 total = options.send_rate * options.duration
190 'Sending %.1f i/s for %ds with max %d parallel requests; timeout %.1fs; '
192 (options.send_rate, options.duration, options.concurrent,
193 options.timeout, total))
194 print('[processing/processed/todo]')
196 # This is used so there's no clash between runs and actual real usage.
197 unique = ''.join(random.choice(string.ascii_letters) for _ in range(8))
198 columns = [('processing', 0), ('processed', 0), ('todo', 0)]
199 progress = threading_utils.Progress(columns)
201 with threading_utils.ThreadPoolWithProgress(
202 progress, 1, options.concurrent, 0) as pool:
206 duration = time.time() - start
207 if duration > options.duration:
209 should_have_triggered_so_far = int(duration * options.send_rate)
210 while index < should_have_triggered_so_far:
220 progress.update_item('', todo=1)
222 progress.print_update()
224 except KeyboardInterrupt:
225 aborted = pool.abort()
226 progress.update_item(
227 'Got Ctrl-C. Aborted %d unsent tasks.' % aborted,
230 progress.print_update()
232 # TODO(maruel): We could give up on collecting results for the on-going
233 # tasks but that would need to be optional.
234 progress.update_item('Getting results for on-going tasks.', raw=True)
235 results = sorted(pool.join())
236 progress.print_update()
237 # At this point, progress is not used anymore.
239 print(' - Took %.1fs.' % (time.time() - start))
241 print_results(results, options.columns, options.buckets)
243 with open(options.dump, 'w') as f:
244 json.dump(results, f, separators=(',',':'))
248 if __name__ == '__main__':