Upstream version 9.38.198.0
[platform/framework/web/crosswalk.git] / src / tools / telemetry / telemetry / page / page_runner.py
1 # Copyright 2014 The Chromium Authors. All rights reserved.
2 # Use of this source code is governed by a BSD-style license that can be
3 # found in the LICENSE file.
4
5 import collections
6 import logging
7 import optparse
8 import os
9 import random
10 import sys
11 import tempfile
12 import time
13
14 from telemetry import decorators
15 from telemetry.core import browser_finder
16 from telemetry.core import browser_info
17 from telemetry.core import exceptions
18 from telemetry.core import util
19 from telemetry.core import wpr_modes
20 from telemetry.core.platform.profiler import profiler_finder
21 from telemetry.page import page_filter
22 from telemetry.page import page_test
23 from telemetry.page.actions import navigate
24 from telemetry.page.actions import page_action
25 from telemetry.results import results_options
26 from telemetry.util import cloud_storage
27 from telemetry.util import exception_formatter
28 from telemetry.value import failure
29 from telemetry.value import skip
30
31
32 class _RunState(object):
33   def __init__(self):
34     self.browser = None
35
36     self._append_to_existing_wpr = False
37     self._last_archive_path = None
38     self._first_browser = True
39     self.first_page = collections.defaultdict(lambda: True)
40     self.profiler_dir = None
41
42   def StartBrowserIfNeeded(self, test, page_set, page, possible_browser,
43                            credentials_path, archive_path, finder_options):
44     started_browser = not self.browser
45     # Create a browser.
46     if not self.browser:
47       test.CustomizeBrowserOptionsForSinglePage(page, finder_options)
48       self.browser = possible_browser.Create()
49       self.browser.credentials.credentials_path = credentials_path
50
51       # Set up WPR path on the new browser.
52       self.browser.SetReplayArchivePath(archive_path,
53                                         self._append_to_existing_wpr,
54                                         page_set.make_javascript_deterministic)
55       self._last_archive_path = page.archive_path
56
57       test.WillStartBrowser(self.browser)
58       self.browser.Start()
59       test.DidStartBrowser(self.browser)
60
61       if self._first_browser:
62         self._first_browser = False
63         self.browser.credentials.WarnIfMissingCredentials(page_set)
64         logging.info('OS: %s %s',
65                      self.browser.platform.GetOSName(),
66                      self.browser.platform.GetOSVersionName())
67         if self.browser.supports_system_info:
68           system_info = self.browser.GetSystemInfo()
69           if system_info.model_name:
70             logging.info('Model: %s', system_info.model_name)
71           if system_info.gpu:
72             for i, device in enumerate(system_info.gpu.devices):
73               logging.info('GPU device %d: %s', i, device)
74             if system_info.gpu.aux_attributes:
75               logging.info('GPU Attributes:')
76               for k, v in sorted(system_info.gpu.aux_attributes.iteritems()):
77                 logging.info('  %-20s: %s', k, v)
78             if system_info.gpu.feature_status:
79               logging.info('Feature Status:')
80               for k, v in sorted(system_info.gpu.feature_status.iteritems()):
81                 logging.info('  %-20s: %s', k, v)
82             if system_info.gpu.driver_bug_workarounds:
83               logging.info('Driver Bug Workarounds:')
84               for workaround in system_info.gpu.driver_bug_workarounds:
85                 logging.info('  %s', workaround)
86           else:
87             logging.info('No GPU devices')
88     else:
89       # Set up WPR path if it changed.
90       if page.archive_path and self._last_archive_path != page.archive_path:
91         self.browser.SetReplayArchivePath(
92             page.archive_path,
93             self._append_to_existing_wpr,
94             page_set.make_javascript_deterministic)
95         self._last_archive_path = page.archive_path
96
97     if self.browser.supports_tab_control and test.close_tabs_before_run:
98       # Create a tab if there's none.
99       if len(self.browser.tabs) == 0:
100         self.browser.tabs.New()
101
102       # Ensure only one tab is open, unless the test is a multi-tab test.
103       if not test.is_multi_tab_test:
104         while len(self.browser.tabs) > 1:
105           self.browser.tabs[-1].Close()
106
107       # Must wait for tab to commit otherwise it can commit after the next
108       # navigation has begun and RenderFrameHostManager::DidNavigateMainFrame()
109       # will cancel the next navigation because it's pending. This manifests as
110       # the first navigation in a PageSet freezing indefinitly because the
111       # navigation was silently cancelled when |self.browser.tabs[0]| was
112       # committed. Only do this when we just started the browser, otherwise
113       # there are cases where previous pages in a PageSet never complete
114       # loading so we'll wait forever.
115       if started_browser:
116         self.browser.tabs[0].WaitForDocumentReadyStateToBeComplete()
117
118   def StopBrowser(self):
119     if self.browser:
120       self.browser.Close()
121       self.browser = None
122
123       # Restarting the state will also restart the wpr server. If we're
124       # recording, we need to continue adding into the same wpr archive,
125       # not overwrite it.
126       self._append_to_existing_wpr = True
127
128   def StartProfiling(self, page, finder_options):
129     if not self.profiler_dir:
130       self.profiler_dir = tempfile.mkdtemp()
131     output_file = os.path.join(self.profiler_dir, page.file_safe_name)
132     is_repeating = (finder_options.page_repeat != 1 or
133                     finder_options.pageset_repeat != 1)
134     if is_repeating:
135       output_file = util.GetSequentialFileName(output_file)
136     self.browser.StartProfiling(finder_options.profiler, output_file)
137
138   def StopProfiling(self):
139     if self.browser:
140       self.browser.StopProfiling()
141
142
143 class PageState(object):
144   def __init__(self, page, tab):
145     self.page = page
146     self.tab = tab
147
148     self._did_login = False
149
150   def PreparePage(self, test=None):
151     if self.page.is_file:
152       server_started = self.tab.browser.SetHTTPServerDirectories(
153         self.page.page_set.serving_dirs | set([self.page.serving_dir]))
154       if server_started and test:
155         test.DidStartHTTPServer(self.tab)
156
157     if self.page.credentials:
158       if not self.tab.browser.credentials.LoginNeeded(
159           self.tab, self.page.credentials):
160         raise page_test.Failure('Login as ' + self.page.credentials + ' failed')
161       self._did_login = True
162
163     if test:
164       if test.clear_cache_before_each_run:
165         self.tab.ClearCache(force=True)
166
167   def ImplicitPageNavigation(self, test=None):
168     """Executes the implicit navigation that occurs for every page iteration.
169
170     This function will be called once per page before any actions are executed.
171     """
172     if test:
173       test.WillNavigateToPage(self.page, self.tab)
174       test.RunNavigateSteps(self.page, self.tab)
175       test.DidNavigateToPage(self.page, self.tab)
176     else:
177       i = navigate.NavigateAction()
178       i.RunAction(self.page, self.tab, None)
179
180   def CleanUpPage(self, test):
181     test.CleanUpAfterPage(self.page, self.tab)
182     if self.page.credentials and self._did_login:
183       self.tab.browser.credentials.LoginNoLongerNeeded(
184           self.tab, self.page.credentials)
185
186
187 def AddCommandLineArgs(parser):
188   page_filter.PageFilter.AddCommandLineArgs(parser)
189   results_options.AddResultsOptions(parser)
190
191   # Page set options
192   group = optparse.OptionGroup(parser, 'Page set ordering and repeat options')
193   group.add_option('--pageset-shuffle', action='store_true',
194       dest='pageset_shuffle',
195       help='Shuffle the order of pages within a pageset.')
196   group.add_option('--pageset-shuffle-order-file',
197       dest='pageset_shuffle_order_file', default=None,
198       help='Filename of an output of a previously run test on the current '
199       'pageset. The tests will run in the same order again, overriding '
200       'what is specified by --page-repeat and --pageset-repeat.')
201   group.add_option('--page-repeat', default=1, type='int',
202                    help='Number of times to repeat each individual page '
203                    'before proceeding with the next page in the pageset.')
204   group.add_option('--pageset-repeat', default=1, type='int',
205                    help='Number of times to repeat the entire pageset.')
206   parser.add_option_group(group)
207
208   # WPR options
209   group = optparse.OptionGroup(parser, 'Web Page Replay options')
210   group.add_option('--use-live-sites',
211       dest='use_live_sites', action='store_true',
212       help='Run against live sites and ignore the Web Page Replay archives.')
213   parser.add_option_group(group)
214
215   parser.add_option('-d', '--also-run-disabled-tests',
216                     dest='run_disabled_tests',
217                     action='store_true', default=False,
218                     help='Ignore @Disabled and @Enabled restrictions.')
219
220 def ProcessCommandLineArgs(parser, args):
221   page_filter.PageFilter.ProcessCommandLineArgs(parser, args)
222
223   # Page set options
224   if args.pageset_shuffle_order_file and not args.pageset_shuffle:
225     parser.error('--pageset-shuffle-order-file requires --pageset-shuffle.')
226
227   if args.page_repeat < 1:
228     parser.error('--page-repeat must be a positive integer.')
229   if args.pageset_repeat < 1:
230     parser.error('--pageset-repeat must be a positive integer.')
231
232
233 def _PrepareAndRunPage(test, page_set, expectations, finder_options,
234                        browser_options, page, credentials_path,
235                        possible_browser, results, state):
236   if finder_options.use_live_sites:
237     browser_options.wpr_mode = wpr_modes.WPR_OFF
238   elif browser_options.wpr_mode != wpr_modes.WPR_RECORD:
239     browser_options.wpr_mode = (
240         wpr_modes.WPR_REPLAY
241         if page.archive_path and os.path.isfile(page.archive_path)
242         else wpr_modes.WPR_OFF)
243
244   max_attempts = test.attempts
245   attempt_num = 0
246   while attempt_num < max_attempts:
247     attempt_num = attempt_num + 1
248     try:
249       results.WillAttemptPageRun(attempt_num, max_attempts)
250
251       if test.RestartBrowserBeforeEachPage() or page.startup_url:
252         state.StopBrowser()
253         # If we are restarting the browser for each page customize the per page
254         # options for just the current page before starting the browser.
255       state.StartBrowserIfNeeded(test, page_set, page, possible_browser,
256                                  credentials_path, page.archive_path,
257                                  finder_options)
258       if not page.CanRunOnBrowser(browser_info.BrowserInfo(state.browser)):
259         logging.info('Skip test for page %s because browser is not supported.'
260                      % page.url)
261         return
262
263       expectation = expectations.GetExpectationForPage(state.browser, page)
264
265       _WaitForThermalThrottlingIfNeeded(state.browser.platform)
266
267       if finder_options.profiler:
268         state.StartProfiling(page, finder_options)
269
270       try:
271         _RunPage(test, page, state, expectation, results)
272         _CheckThermalThrottling(state.browser.platform)
273       except exceptions.TabCrashException as e:
274         if test.is_multi_tab_test:
275           logging.error('Aborting multi-tab test after tab %s crashed',
276                         page.url)
277           raise
278         logging.warning(str(e))
279         state.StopBrowser()
280
281       if finder_options.profiler:
282         state.StopProfiling()
283
284       if (test.StopBrowserAfterPage(state.browser, page)):
285         state.StopBrowser()
286
287       return
288     except exceptions.BrowserGoneException as e:
289       state.StopBrowser()
290       if attempt_num == max_attempts:
291         logging.error('Aborting after too many retries')
292         raise
293       if test.is_multi_tab_test:
294         logging.error('Aborting multi-tab test after browser crashed')
295         raise
296       logging.warning(str(e))
297
298
299 def _UpdatePageSetArchivesIfChanged(page_set):
300   # Attempt to download the credentials file.
301   if page_set.credentials_path:
302     try:
303       cloud_storage.GetIfChanged(
304           os.path.join(page_set.base_dir, page_set.credentials_path))
305     except (cloud_storage.CredentialsError, cloud_storage.PermissionError,
306             cloud_storage.CloudStorageError) as e:
307       logging.warning('Cannot retrieve credential file %s due to cloud storage '
308                       'error %s', page_set.credentials_path, str(e))
309
310   # Scan every serving directory for .sha1 files
311   # and download them from Cloud Storage. Assume all data is public.
312   all_serving_dirs = page_set.serving_dirs.copy()
313   # Add individual page dirs to all serving dirs.
314   for page in page_set:
315     if page.is_file:
316       all_serving_dirs.add(page.serving_dir)
317   # Scan all serving dirs.
318   for serving_dir in all_serving_dirs:
319     if os.path.splitdrive(serving_dir)[1] == '/':
320       raise ValueError('Trying to serve root directory from HTTP server.')
321     for dirpath, _, filenames in os.walk(serving_dir):
322       for filename in filenames:
323         path, extension = os.path.splitext(
324             os.path.join(dirpath, filename))
325         if extension != '.sha1':
326           continue
327         cloud_storage.GetIfChanged(path, page_set.bucket)
328
329
330 def Run(test, page_set, expectations, finder_options, results):
331   """Runs a given test against a given page_set with the given options."""
332   test.ValidatePageSet(page_set)
333
334   # Create a possible_browser with the given options.
335   try:
336     possible_browser = browser_finder.FindBrowser(finder_options)
337   except browser_finder.BrowserTypeRequiredException, e:
338     sys.stderr.write(str(e) + '\n')
339     sys.exit(-1)
340   if not possible_browser:
341     sys.stderr.write(
342         'No browser found. Available browsers:\n' +
343         '\n'.join(browser_finder.GetAllAvailableBrowserTypes(finder_options)) +
344         '\n')
345     sys.exit(-1)
346
347   browser_options = possible_browser.finder_options.browser_options
348   browser_options.browser_type = possible_browser.browser_type
349   test.CustomizeBrowserOptions(browser_options)
350
351   should_run = decorators.IsEnabled(test, possible_browser)
352
353   should_run = should_run or finder_options.run_disabled_tests
354
355   if not should_run:
356     logging.warning('You are trying to run a disabled test.')
357     logging.warning('Pass --also-run-disabled-tests to squelch this message.')
358     return
359
360   # Reorder page set based on options.
361   pages = _ShuffleAndFilterPageSet(page_set, finder_options)
362
363   if (not finder_options.use_live_sites and
364       browser_options.wpr_mode != wpr_modes.WPR_RECORD):
365     _UpdatePageSetArchivesIfChanged(page_set)
366     pages = _CheckArchives(page_set, pages, results)
367
368   # Verify credentials path.
369   credentials_path = None
370   if page_set.credentials_path:
371     credentials_path = os.path.join(os.path.dirname(page_set.file_path),
372                                     page_set.credentials_path)
373     if not os.path.exists(credentials_path):
374       credentials_path = None
375
376   # Set up user agent.
377   browser_options.browser_user_agent_type = page_set.user_agent_type or None
378
379   if finder_options.profiler:
380     profiler_class = profiler_finder.FindProfiler(finder_options.profiler)
381     profiler_class.CustomizeBrowserOptions(browser_options.browser_type,
382                                            finder_options)
383
384   for page in list(pages):
385     if not test.CanRunForPage(page):
386       results.WillRunPage(page)
387       logging.debug('Skipping test: it cannot run for %s', page.url)
388       results.AddValue(skip.SkipValue(page, 'Test cannot run'))
389       results.DidRunPage(page)
390       pages.remove(page)
391
392   if not pages:
393     return
394
395   state = _RunState()
396   # TODO(dtu): Move results creation and results_for_current_run into RunState.
397
398   try:
399     test.WillRunTest(finder_options)
400     for _ in xrange(0, finder_options.pageset_repeat):
401       for page in pages:
402         if test.IsExiting():
403           break
404
405         test.WillRunPageRepeats(page)
406         for _ in xrange(0, finder_options.page_repeat):
407           results.WillRunPage(page)
408           try:
409             _PrepareAndRunPage(
410                 test, page_set, expectations, finder_options, browser_options,
411                 page, credentials_path, possible_browser, results, state)
412           finally:
413             discard_run = False
414             if state.first_page[page]:
415               state.first_page[page] = False
416               if test.discard_first_result:
417                 discard_run = True
418             results.DidRunPage(page, discard_run=discard_run)
419         test.DidRunPageRepeats(page)
420         if (not test.max_failures is None and
421             len(results.failures) > test.max_failures):
422           logging.error('Too many failures. Aborting.')
423           test.RequestExit()
424
425   finally:
426     test.DidRunTest(state.browser, results)
427     state.StopBrowser()
428
429   return
430
431
432 def _ShuffleAndFilterPageSet(page_set, finder_options):
433   if finder_options.pageset_shuffle_order_file:
434     return page_set.ReorderPageSet(finder_options.pageset_shuffle_order_file)
435
436   pages = [page for page in page_set.pages[:]
437            if not page.disabled and page_filter.PageFilter.IsSelected(page)]
438
439   if finder_options.pageset_shuffle:
440     random.Random().shuffle(pages)
441
442   return pages
443
444
445 def _CheckArchives(page_set, pages, results):
446   """Returns a subset of pages that are local or have WPR archives.
447
448   Logs warnings if any are missing."""
449   page_set_has_live_sites = False
450   for page in pages:
451     if not page.is_local:
452       page_set_has_live_sites = True
453       break
454
455   # Potential problems with the entire page set.
456   if page_set_has_live_sites:
457     if not page_set.archive_data_file:
458       logging.warning('The page set is missing an "archive_data_file" '
459                       'property. Skipping any live sites. To include them, '
460                       'pass the flag --use-live-sites.')
461     if not page_set.wpr_archive_info:
462       logging.warning('The archive info file is missing. '
463                       'To fix this, either add svn-internal to your '
464                       '.gclient using http://goto/read-src-internal, '
465                       'or create a new archive using record_wpr.')
466
467   # Potential problems with individual pages.
468   pages_missing_archive_path = []
469   pages_missing_archive_data = []
470
471   for page in pages:
472     if page.is_local:
473       continue
474
475     if not page.archive_path:
476       pages_missing_archive_path.append(page)
477     elif not os.path.isfile(page.archive_path):
478       pages_missing_archive_data.append(page)
479
480   if pages_missing_archive_path:
481     logging.warning('The page set archives for some pages do not exist. '
482                     'Skipping those pages. To fix this, record those pages '
483                     'using record_wpr. To ignore this warning and run '
484                     'against live sites, pass the flag --use-live-sites.')
485   if pages_missing_archive_data:
486     logging.warning('The page set archives for some pages are missing. '
487                     'Someone forgot to check them in, or they were deleted. '
488                     'Skipping those pages. To fix this, record those pages '
489                     'using record_wpr. To ignore this warning and run '
490                     'against live sites, pass the flag --use-live-sites.')
491
492   for page in pages_missing_archive_path + pages_missing_archive_data:
493     results.WillRunPage(page)
494     results.AddValue(failure.FailureValue.FromMessage(
495         page, 'Page set archive doesn\'t exist.'))
496     results.DidRunPage(page)
497
498   return [page for page in pages if page not in
499           pages_missing_archive_path + pages_missing_archive_data]
500
501
502 def _RunPage(test, page, state, expectation, results):
503   if expectation == 'skip':
504     logging.debug('Skipping test: Skip expectation for %s', page.url)
505     results.AddValue(skip.SkipValue(page, 'Skipped by test expectations'))
506     return
507
508   logging.info('Running %s', page.url)
509
510   page_state = PageState(page, test.TabForPage(page, state.browser))
511
512   def ProcessError():
513     if expectation == 'fail':
514       msg = 'Expected exception while running %s' % page.url
515     else:
516       msg = 'Exception while running %s' % page.url
517       results.AddValue(failure.FailureValue(page, sys.exc_info()))
518     exception_formatter.PrintFormattedException(msg=msg)
519
520   try:
521     page_state.PreparePage(test)
522     page_state.ImplicitPageNavigation(test)
523     test.RunPage(page, page_state.tab, results)
524     util.CloseConnections(page_state.tab)
525   except page_test.TestNotSupportedOnPlatformFailure:
526     raise
527   except page_test.Failure:
528     if expectation == 'fail':
529       exception_formatter.PrintFormattedException(
530           msg='Expected failure while running %s' % page.url)
531     else:
532       exception_formatter.PrintFormattedException(
533           msg='Failure while running %s' % page.url)
534       results.AddValue(failure.FailureValue(page, sys.exc_info()))
535   except (util.TimeoutException, exceptions.LoginException,
536           exceptions.ProfilingException):
537     ProcessError()
538   except (exceptions.TabCrashException, exceptions.BrowserGoneException):
539     ProcessError()
540     # Run() catches these exceptions to relaunch the tab/browser, so re-raise.
541     raise
542   except page_action.PageActionNotSupported as e:
543     results.AddValue(skip.SkipValue(page, 'Unsupported page action: %s' % e))
544   except Exception:
545     exception_formatter.PrintFormattedException(
546         msg='Unhandled exception while running %s' % page.url)
547     results.AddValue(failure.FailureValue(page, sys.exc_info()))
548   else:
549     if expectation == 'fail':
550       logging.warning('%s was expected to fail, but passed.\n', page.url)
551   finally:
552     page_state.CleanUpPage(test)
553
554
555 def _WaitForThermalThrottlingIfNeeded(platform):
556   if not platform.CanMonitorThermalThrottling():
557     return
558   thermal_throttling_retry = 0
559   while (platform.IsThermallyThrottled() and
560          thermal_throttling_retry < 3):
561     logging.warning('Thermally throttled, waiting (%d)...',
562                     thermal_throttling_retry)
563     thermal_throttling_retry += 1
564     time.sleep(thermal_throttling_retry * 2)
565
566   if thermal_throttling_retry and platform.IsThermallyThrottled():
567     logging.warning('Device is thermally throttled before running '
568                     'performance tests, results will vary.')
569
570
571 def _CheckThermalThrottling(platform):
572   if not platform.CanMonitorThermalThrottling():
573     return
574   if platform.HasBeenThermallyThrottled():
575     logging.warning('Device has been thermally throttled during '
576                     'performance tests, results will vary.')