# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
+
import collections
import copy
import glob
import logging
+import optparse
import os
+import random
import sys
import tempfile
import time
-import traceback
-import random
-from telemetry import exception_formatter
+from telemetry import decorators
from telemetry.core import browser_finder
from telemetry.core import exceptions
from telemetry.core import util
from telemetry.core import wpr_modes
from telemetry.core.platform.profiler import profiler_finder
-from telemetry.page import page_filter as page_filter_module
+from telemetry.page import cloud_storage
+from telemetry.page import page_filter
from telemetry.page import page_runner_repeat
from telemetry.page import page_test
from telemetry.page import results_options
from telemetry.page.actions import navigate
from telemetry.page.actions import page_action
+from telemetry.util import exception_formatter
class _RunState(object):
if self._first_browser:
self._first_browser = False
self.browser.credentials.WarnIfMissingCredentials(page_set)
+ logging.info('OS: %s %s',
+ self.browser.platform.GetOSName(),
+ self.browser.platform.GetOSVersionName())
if self.browser.supports_system_info:
system_info = self.browser.GetSystemInfo()
if system_info.model_name:
logging.info('Feature Status:')
for k, v in sorted(system_info.gpu.feature_status.iteritems()):
logging.info(' %-20s: %s', k, v)
+ if system_info.gpu.driver_bug_workarounds:
+ logging.info('Driver Bug Workarounds:')
+ for workaround in system_info.gpu.driver_bug_workarounds:
+ logging.info(' %s', workaround)
else:
logging.info('No GPU devices')
else:
if self.browser.supports_tab_control and test.close_tabs_before_run:
# Create a tab if there's none.
if len(self.browser.tabs) == 0:
+ # TODO(nduca/tonyg): Remove this line. Added as part of crbug.com/348337
+ # chasing.
+ logging.warning('Making a new tab\n')
self.browser.tabs.New()
# Ensure only one tab is open, unless the test is a multi-tab test.
if not self.profiler_dir:
self.profiler_dir = tempfile.mkdtemp()
output_file = os.path.join(self.profiler_dir, page.file_safe_name)
- if finder_options.repeat_options.IsRepeating():
+ is_repeating = (finder_options.page_repeat != 1 or
+ finder_options.pageset_repeat != 1)
+ if is_repeating:
output_file = _GetSequentialFileName(output_file)
self.browser.StartProfiling(finder_options.profiler, output_file)
if test:
if test.clear_cache_before_each_run:
- self.tab.ClearCache()
+ self.tab.ClearCache(force=True)
def ImplicitPageNavigation(self, test=None):
"""Executes the implicit navigation that occurs for every page iteration.
i = navigate.NavigateAction()
i.RunAction(self.page, self.tab, None)
- def CleanUpPage(self):
+ def CleanUpPage(self, test):
+ test.CleanUpAfterPage(self.page, self.tab)
if self.page.credentials and self._did_login:
self.tab.browser.credentials.LoginNoLongerNeeded(
self.tab, self.page.credentials)
- if self.tab:
- self.tab.Disconnect()
- self.tab = None
-
-def AddCommandLineOptions(parser):
- page_filter_module.PageFilter.AddCommandLineOptions(parser)
+def AddCommandLineArgs(parser):
+ page_filter.PageFilter.AddCommandLineArgs(parser)
results_options.AddResultsOptions(parser)
+ # Page set options
+ group = optparse.OptionGroup(parser, 'Page set ordering and repeat options')
+ group.add_option('--pageset-shuffle', action='store_true',
+ dest='pageset_shuffle',
+ help='Shuffle the order of pages within a pageset.')
+ group.add_option('--pageset-shuffle-order-file',
+ dest='pageset_shuffle_order_file', default=None,
+ help='Filename of an output of a previously run test on the current '
+ 'pageset. The tests will run in the same order again, overriding '
+ 'what is specified by --page-repeat and --pageset-repeat.')
+ group.add_option('--page-repeat', default=1, type='int',
+ help='Number of times to repeat each individual page '
+ 'before proceeding with the next page in the pageset.')
+ group.add_option('--pageset-repeat', default=1, type='int',
+ help='Number of times to repeat the entire pageset.')
+ parser.add_option_group(group)
+
+ # WPR options
+ group = optparse.OptionGroup(parser, 'Web Page Replay options')
+ group.add_option('--use-live-sites',
+ dest='use_live_sites', action='store_true',
+ help='Run against live sites and ignore the Web Page Replay archives.')
+ parser.add_option_group(group)
+
+
+def ProcessCommandLineArgs(parser, args):
+ page_filter.PageFilter.ProcessCommandLineArgs(parser, args)
+
+ # Page set options
+ if args.pageset_shuffle_order_file and not args.pageset_shuffle:
+ parser.error('--pageset-shuffle-order-file requires --pageset-shuffle.')
+
+ if args.page_repeat < 1:
+ parser.error('--page-repeat must be a positive integer.')
+ if args.pageset_repeat < 1:
+ parser.error('--pageset-repeat must be a positive integer.')
+
def _LogStackTrace(title, browser):
if browser:
if page.archive_path and os.path.isfile(page.archive_path)
else wpr_modes.WPR_OFF)
- tries = 3
+ tries = test.attempts
while tries:
tries -= 1
try:
results_for_current_run = copy.copy(results)
results_for_current_run.StartTest(page)
- if test.RestartBrowserBeforeEachPage():
+ if test.RestartBrowserBeforeEachPage() or page.startup_url:
state.StopBrowser()
# If we are restarting the browser for each page customize the per page
# options for just the current page before starting the browser.
raise
+def _UpdatePageSetArchivesIfChanged(page_set):
+ # Attempt to download the credentials file.
+ if page_set.credentials_path:
+ try:
+ cloud_storage.GetIfChanged(
+ os.path.join(page_set.base_dir, page_set.credentials_path))
+ except (cloud_storage.CredentialsError, cloud_storage.PermissionError):
+ logging.warning('Cannot retrieve credential file: %s',
+ page_set.credentials_path)
+ # Scan every serving directory for .sha1 files
+ # and download them from Cloud Storage. Assume all data is public.
+ all_serving_dirs = page_set.serving_dirs.copy()
+ # Add individual page dirs to all serving dirs.
+ for page in page_set:
+ if page.is_file:
+ all_serving_dirs.add(page.serving_dir)
+ # Scan all serving dirs.
+ for serving_dir in all_serving_dirs:
+ if os.path.splitdrive(serving_dir)[1] == '/':
+ raise ValueError('Trying to serve root directory from HTTP server.')
+ for dirpath, _, filenames in os.walk(serving_dir):
+ for filename in filenames:
+ path, extension = os.path.splitext(
+ os.path.join(dirpath, filename))
+ if extension != '.sha1':
+ continue
+ cloud_storage.GetIfChanged(path)
+
+
def Run(test, page_set, expectations, finder_options):
"""Runs a given test against a given page_set with the given options."""
results = results_options.PrepareResults(test, finder_options)
possible_browser = browser_finder.FindBrowser(finder_options)
except browser_finder.BrowserTypeRequiredException, e:
sys.stderr.write(str(e) + '\n')
- sys.exit(1)
+ sys.exit(-1)
if not possible_browser:
sys.stderr.write(
'No browser found. Available browsers:\n' +
'\n'.join(browser_finder.GetAllAvailableBrowserTypes(finder_options)) +
'\n')
- sys.exit(1)
+ sys.exit(-1)
browser_options.browser_type = possible_browser.browser_type
+ if not decorators.IsEnabled(
+ test, browser_options.browser_type, possible_browser.platform):
+ return results
+
# Reorder page set based on options.
pages = _ShuffleAndFilterPageSet(page_set, finder_options)
- if (not finder_options.allow_live_sites and
+ if (not finder_options.use_live_sites and
browser_options.wpr_mode != wpr_modes.WPR_RECORD):
+ _UpdatePageSetArchivesIfChanged(page_set)
pages = _CheckArchives(page_set, pages, results)
# Verify credentials path.
if page_set.user_agent_type:
browser_options.browser_user_agent_type = page_set.user_agent_type
- test.CustomizeBrowserOptionsForPageSet(page_set,
- possible_browser.finder_options)
if finder_options.profiler:
profiler_class = profiler_finder.FindProfiler(finder_options.profiler)
profiler_class.CustomizeBrowserOptions(possible_browser.browser_type,
# TODO(dtu): Move results creation and results_for_current_run into RunState.
try:
- test.WillRunTest()
+ test.WillRunTest(finder_options)
state.repeat_state = page_runner_repeat.PageRunnerRepeatState(
- finder_options.repeat_options)
+ finder_options)
state.repeat_state.WillRunPageSet()
while state.repeat_state.ShouldRepeatPageSet() and not test.IsExiting():
page, credentials_path, possible_browser, results, state)
state.repeat_state.DidRunPage()
test.DidRunPageRepeats(page)
+ if (not test.max_failures is None and
+ len(results.failures) > test.max_failures):
+ logging.error('Too many failures. Aborting.')
+ test.RequestExit()
+ if (not test.max_errors is None and
+ len(results.errors) > test.max_errors):
+ logging.error('Too many errors. Aborting.')
+ test.RequestExit()
if test.IsExiting():
break
state.repeat_state.DidRunPageSet()
def _ShuffleAndFilterPageSet(page_set, finder_options):
- if (finder_options.pageset_shuffle_order_file and
- not finder_options.pageset_shuffle):
- raise Exception('--pageset-shuffle-order-file requires --pageset-shuffle.')
-
if finder_options.pageset_shuffle_order_file:
return page_set.ReorderPageSet(finder_options.pageset_shuffle_order_file)
- page_filter = page_filter_module.PageFilter(finder_options)
pages = [page for page in page_set.pages[:]
- if not page.disabled and page_filter.IsSelected(page)]
+ if not page.disabled and page_filter.PageFilter.IsSelected(page)]
if finder_options.pageset_shuffle:
random.Random().shuffle(pages)
if not page_set.archive_data_file:
logging.warning('The page set is missing an "archive_data_file" '
'property. Skipping any live sites. To include them, '
- 'pass the flag --allow-live-sites.')
+ 'pass the flag --use-live-sites.')
if not page_set.wpr_archive_info:
logging.warning('The archive info file is missing. '
'To fix this, either add svn-internal to your '
logging.warning('The page set archives for some pages do not exist. '
'Skipping those pages. To fix this, record those pages '
'using record_wpr. To ignore this warning and run '
- 'against live sites, pass the flag --allow-live-sites.')
+ 'against live sites, pass the flag --use-live-sites.')
if pages_missing_archive_data:
logging.warning('The page set archives for some pages are missing. '
'Someone forgot to check them in, or they were deleted. '
'Skipping those pages. To fix this, record those pages '
'using record_wpr. To ignore this warning and run '
- 'against live sites, pass the flag --allow-live-sites.')
+ 'against live sites, pass the flag --use-live-sites.')
for page in pages_missing_archive_path + pages_missing_archive_data:
results.StartTest(page)
def _RunPage(test, page, state, expectation, results, finder_options):
if expectation == 'skip':
- logging.info('Skipped %s' % page.url)
+ logging.debug('Skipping test: Skip expectation for %s', page.url)
+ results.AddSkip(page, 'Skipped by test expectations')
return
logging.info('Running %s' % page.url)
page_state = PageState(page, test.TabForPage(page, state.browser))
- page_action.PageAction.ResetNextTimelineMarkerId()
-
def ProcessError():
- logging.error('%s:\n%s', page.url, traceback.format_exc())
+ logging.error('%s:', page.url)
+ exception_formatter.PrintFormattedException()
if expectation == 'fail':
logging.info('Error was expected\n')
results.AddSuccess(page)
if state.repeat_state.ShouldNavigate(
finder_options.skip_navigate_on_repeat):
page_state.ImplicitPageNavigation(test)
- test.Run(finder_options, page, page_state.tab, results)
+ test.RunPage(page, page_state.tab, results)
util.CloseConnections(page_state.tab)
+ except page_test.TestNotSupportedOnPlatformFailure:
+ raise
except page_test.Failure:
if expectation == 'fail':
- logging.info('%s:\n%s', page.url, traceback.format_exc())
+ logging.info('%s:', page.url)
+ exception_formatter.PrintFormattedException()
logging.info('Failure was expected\n')
results.AddSuccess(page)
else:
- logging.warning('%s:\n%s', page.url, traceback.format_exc())
+ logging.warning('%s:', page.url)
+ exception_formatter.PrintFormattedException()
results.AddFailure(page, sys.exc_info())
except (util.TimeoutException, exceptions.LoginException,
exceptions.ProfilingException):
ProcessError()
# Run() catches these exceptions to relaunch the tab/browser, so re-raise.
raise
+ except page_action.PageActionNotSupported as e:
+ results.AddSkip(page, 'Unsupported page action: %s' % e)
except Exception:
logging.warning('While running %s', page.url)
- exception_formatter.PrintFormattedException(*sys.exc_info())
+ exception_formatter.PrintFormattedException()
results.AddFailure(page, sys.exc_info())
else:
if expectation == 'fail':
logging.warning('%s was expected to fail, but passed.\n', page.url)
results.AddSuccess(page)
finally:
- page_state.CleanUpPage()
+ page_state.CleanUpPage(test)
def _GetSequentialFileName(base_name):