it will ask you whether it is good or bad before continuing the search.
"""
-# The root URL for storage.
-CHROMIUM_BASE_URL = 'http://commondatastorage.googleapis.com/chromium-browser-snapshots'
-WEBKIT_BASE_URL = 'http://commondatastorage.googleapis.com/chromium-webkit-snapshots'
-
-# The root URL for official builds.
+# The base URL for stored build archives.
+CHROMIUM_BASE_URL = ('http://commondatastorage.googleapis.com'
+ '/chromium-browser-snapshots')
+WEBKIT_BASE_URL = ('http://commondatastorage.googleapis.com'
+ '/chromium-webkit-snapshots')
+ASAN_BASE_URL = ('http://commondatastorage.googleapis.com'
+ '/chromium-browser-asan')
+
+# The base URL for official builds.
OFFICIAL_BASE_URL = 'http://master.chrome.corp.google.com/official_builds'
-# Changelogs URL.
-CHANGELOG_URL = 'http://build.chromium.org/f/chromium/' \
- 'perf/dashboard/ui/changelog.html?' \
- 'url=/trunk/src&range=%d%%3A%d'
+# URL template for viewing changelogs between revisions.
+CHANGELOG_URL = ('http://build.chromium.org'
+ '/f/chromium/perf/dashboard/ui/changelog.html'
+ '?url=/trunk/src&range=%d%%3A%d')
-# Official Changelogs URL.
-OFFICIAL_CHANGELOG_URL = 'http://omahaproxy.appspot.com/'\
- 'changelog?old_version=%s&new_version=%s'
+# URL template for viewing changelogs between official versions.
+OFFICIAL_CHANGELOG_URL = ('http://omahaproxy.appspot.com/changelog'
+ '?old_version=%s&new_version=%s')
# DEPS file URL.
DEPS_FILE = 'http://src.chromium.org/viewvc/chrome/trunk/src/DEPS?revision=%d'
-# Blink Changelogs URL.
-BLINK_CHANGELOG_URL = 'http://build.chromium.org/f/chromium/' \
- 'perf/dashboard/ui/changelog_blink.html?' \
- 'url=/trunk&range=%d%%3A%d'
-DONE_MESSAGE_GOOD_MIN = 'You are probably looking for a change made after %s ' \
- '(known good), but no later than %s (first known bad).'
-DONE_MESSAGE_GOOD_MAX = 'You are probably looking for a change made after %s ' \
- '(known bad), but no later than %s (first known good).'
+# Blink changelogs URL.
+BLINK_CHANGELOG_URL = ('http://build.chromium.org'
+ '/f/chromium/perf/dashboard/ui/changelog_blink.html'
+ '?url=/trunk&range=%d%%3A%d')
+
+DONE_MESSAGE_GOOD_MIN = ('You are probably looking for a change made after %s ('
+ 'known good), but no later than %s (first known bad).')
+DONE_MESSAGE_GOOD_MAX = ('You are probably looking for a change made after %s ('
+ 'known bad), but no later than %s (first known good).')
+
+CHROMIUM_GITHASH_TO_SVN_URL = (
+ 'https://chromium.googlesource.com/chromium/src/+/%s?format=json')
+
+BLINK_GITHASH_TO_SVN_URL = (
+ 'https://chromium.googlesource.com/chromium/blink/+/%s?format=json')
+
+GITHASH_TO_SVN_URL = {
+ 'chromium': CHROMIUM_GITHASH_TO_SVN_URL,
+ 'blink': BLINK_GITHASH_TO_SVN_URL,
+}
+
+# Search pattern to be matched in the JSON output from
+# CHROMIUM_GITHASH_TO_SVN_URL to get the chromium revision (svn revision).
+CHROMIUM_SEARCH_PATTERN = (
+ r'.*git-svn-id: svn://svn.chromium.org/chrome/trunk/src@(\d+) ')
+
+# Search pattern to be matched in the json output from
+# BLINK_GITHASH_TO_SVN_URL to get the blink revision (svn revision).
+BLINK_SEARCH_PATTERN = (
+ r'.*git-svn-id: svn://svn.chromium.org/blink/trunk@(\d+) ')
+
+SEARCH_PATTERN = {
+ 'chromium': CHROMIUM_SEARCH_PATTERN,
+ 'blink': BLINK_SEARCH_PATTERN,
+}
###############################################################################
"""A PathContext is used to carry the information used to construct URLs and
paths when dealing with the storage server and archives."""
def __init__(self, base_url, platform, good_revision, bad_revision,
- is_official, is_aura, flash_path = None):
+ is_official, is_asan, use_local_repo, flash_path = None,
+ pdf_path = None):
super(PathContext, self).__init__()
# Store off the input parameters.
self.base_url = base_url
self.good_revision = good_revision
self.bad_revision = bad_revision
self.is_official = is_official
- self.is_aura = is_aura
+ self.is_asan = is_asan
+ self.build_type = 'release'
self.flash_path = flash_path
+ # Dictionary which stores svn revision number as key and it's
+ # corresponding git hash as value. This data is populated in
+ # _FetchAndParse and used later in GetDownloadURL while downloading
+ # the build.
+ self.githash_svn_dict = {}
+ self.pdf_path = pdf_path
# The name of the ZIP file in a revision directory on the server.
self.archive_name = None
+ # If the script is run from a local Chromium checkout,
+ # "--use-local-repo" option can be used to make the script run faster.
+ # It uses "git svn find-rev <SHA1>" command to convert git hash to svn
+ # revision number.
+ self.use_local_repo = use_local_repo
+
# Set some internal members:
# _listing_platform_dir = Directory that holds revisions. Ends with a '/'.
# _archive_extract_dir = Uncompressed directory in the archive_name file.
self._listing_platform_dir = 'mac/'
self._binary_name = 'Google Chrome.app/Contents/MacOS/Google Chrome'
elif self.platform == 'win':
- if self.is_aura:
- self._listing_platform_dir = 'win-aura/'
- else:
- self._listing_platform_dir = 'win/'
+ self._listing_platform_dir = 'win/'
else:
if self.platform in ('linux', 'linux64', 'linux-arm'):
self.archive_name = 'chrome-linux.zip'
elif self.platform == 'win':
self._listing_platform_dir = 'Win/'
+ def GetASANPlatformDir(self):
+ """ASAN builds are in directories like "linux-release", or have filenames
+ like "asan-win32-release-277079.zip". This aligns to our platform names
+ except in the case of Windows where they use "win32" instead of "win"."""
+ if self.platform == 'win':
+ return 'win32'
+ else:
+ return self.platform
+
def GetListingURL(self, marker=None):
"""Returns the URL for a directory listing, with an optional marker."""
marker_param = ''
if marker:
marker_param = '&marker=' + str(marker)
- return self.base_url + '/?delimiter=/&prefix=' + \
- self._listing_platform_dir + marker_param
+ if self.is_asan:
+ prefix = '%s-%s' % (self.GetASANPlatformDir(), self.build_type)
+ return self.base_url + '/?delimiter=&prefix=' + prefix + marker_param
+ else:
+ return (self.base_url + '/?delimiter=/&prefix=' +
+ self._listing_platform_dir + marker_param)
def GetDownloadURL(self, revision):
"""Gets the download URL for a build archive of a specific revision."""
+ if self.is_asan:
+ return '%s/%s-%s/%s-%d.zip' % (
+ ASAN_BASE_URL, self.GetASANPlatformDir(), self.build_type,
+ self.GetASANBaseName(), revision)
if self.is_official:
- return "%s/%s/%s%s" % (
+ return '%s/%s/%s%s' % (
OFFICIAL_BASE_URL, revision, self._listing_platform_dir,
self.archive_name)
else:
- return "%s/%s%s/%s" % (self.base_url, self._listing_platform_dir,
+ if str(revision) in self.githash_svn_dict:
+ revision = self.githash_svn_dict[str(revision)]
+ return '%s/%s%s/%s' % (self.base_url, self._listing_platform_dir,
revision, self.archive_name)
def GetLastChangeURL(self):
"""Returns a URL to the LAST_CHANGE file."""
return self.base_url + '/' + self._listing_platform_dir + 'LAST_CHANGE'
- def GetLaunchPath(self):
+ def GetASANBaseName(self):
+ """Returns the base name of the ASAN zip file."""
+ if 'linux' in self.platform:
+ return 'asan-symbolized-%s-%s' % (self.GetASANPlatformDir(),
+ self.build_type)
+ else:
+ return 'asan-%s-%s' % (self.GetASANPlatformDir(), self.build_type)
+
+ def GetLaunchPath(self, revision):
"""Returns a relative path (presumably from the archive extraction location)
that is used to run the executable."""
- return os.path.join(self._archive_extract_dir, self._binary_name)
-
- def IsAuraBuild(self, build):
- """Check the given build is Aura."""
- return build.split('.')[3] == '1'
-
- def IsASANBuild(self, build):
- """Check the given build is ASAN build."""
- return build.split('.')[3] == '2'
+ if self.is_asan:
+ extract_dir = '%s-%d' % (self.GetASANBaseName(), revision)
+ else:
+ extract_dir = self._archive_extract_dir
+ return os.path.join(extract_dir, self._binary_name)
def ParseDirectoryIndex(self):
"""Parses the Google Storage directory listing into a list of revision
root_tag = document.getroot().tag
end_ns_pos = root_tag.find('}')
if end_ns_pos == -1:
- raise Exception("Could not locate end namespace for directory index")
+ raise Exception('Could not locate end namespace for directory index')
namespace = root_tag[:end_ns_pos + 1]
# Find the prefix (_listing_platform_dir) and whether or not the list is
is_truncated = document.find(namespace + 'IsTruncated')
if is_truncated is not None and is_truncated.text.lower() == 'true':
next_marker = document.find(namespace + 'NextMarker').text
-
# Get a list of all the revisions.
- all_prefixes = document.findall(namespace + 'CommonPrefixes/' +
- namespace + 'Prefix')
- # The <Prefix> nodes have content of the form of
- # |_listing_platform_dir/revision/|. Strip off the platform dir and the
- # trailing slash to just have a number.
revisions = []
- for prefix in all_prefixes:
- revnum = prefix.text[prefix_len:-1]
- try:
- revnum = int(revnum)
- revisions.append(revnum)
- except ValueError:
- pass
- return (revisions, next_marker)
+ githash_svn_dict = {}
+ if self.is_asan:
+ asan_regex = re.compile(r'.*%s-(\d+)\.zip$' % (self.GetASANBaseName()))
+ # Non ASAN builds are in a <revision> directory. The ASAN builds are
+ # flat
+ all_prefixes = document.findall(namespace + 'Contents/' +
+ namespace + 'Key')
+ for prefix in all_prefixes:
+ m = asan_regex.match(prefix.text)
+ if m:
+ try:
+ revisions.append(int(m.group(1)))
+ except ValueError:
+ pass
+ else:
+ all_prefixes = document.findall(namespace + 'CommonPrefixes/' +
+ namespace + 'Prefix')
+ # The <Prefix> nodes have content of the form of
+ # |_listing_platform_dir/revision/|. Strip off the platform dir and the
+ # trailing slash to just have a number.
+ for prefix in all_prefixes:
+ revnum = prefix.text[prefix_len:-1]
+ try:
+ if not revnum.isdigit():
+ git_hash = revnum
+ revnum = self.GetSVNRevisionFromGitHash(git_hash)
+ githash_svn_dict[revnum] = git_hash
+ if revnum is not None:
+ revnum = int(revnum)
+ revisions.append(revnum)
+ except ValueError:
+ pass
+ return (revisions, next_marker, githash_svn_dict)
# Fetch the first list of revisions.
- (revisions, next_marker) = _FetchAndParse(self.GetListingURL())
-
+ (revisions, next_marker, self.githash_svn_dict) = _FetchAndParse(
+ self.GetListingURL())
# If the result list was truncated, refetch with the next marker. Do this
# until an entire directory listing is done.
while next_marker:
next_url = self.GetListingURL(next_marker)
- (new_revisions, next_marker) = _FetchAndParse(next_url)
+ (new_revisions, next_marker, new_dict) = _FetchAndParse(next_url)
revisions.extend(new_revisions)
+ self.githash_svn_dict.update(new_dict)
return revisions
+ def _GetSVNRevisionFromGitHashWithoutGitCheckout(self, git_sha1, depot):
+ json_url = GITHASH_TO_SVN_URL[depot] % git_sha1
+ response = urllib.urlopen(json_url)
+ if response.getcode() == 200:
+ try:
+ data = json.loads(response.read()[4:])
+ except ValueError:
+ print 'ValueError for JSON URL: %s' % json_url
+ raise ValueError
+ else:
+ raise ValueError
+ if 'message' in data:
+ message = data['message'].split('\n')
+ message = [line for line in message if line.strip()]
+ search_pattern = re.compile(SEARCH_PATTERN[depot])
+ result = search_pattern.search(message[len(message)-1])
+ if result:
+ return result.group(1)
+ print 'Failed to get svn revision number for %s' % git_sha1
+ raise ValueError
+
+ def _GetSVNRevisionFromGitHashFromGitCheckout(self, git_sha1, depot):
+ def _RunGit(command, path):
+ command = ['git'] + command
+ if path:
+ original_path = os.getcwd()
+ os.chdir(path)
+ shell = sys.platform.startswith('win')
+ proc = subprocess.Popen(command, shell=shell, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ (output, _) = proc.communicate()
+
+ if path:
+ os.chdir(original_path)
+ return (output, proc.returncode)
+
+ path = None
+ if depot == 'blink':
+ path = os.path.join(os.getcwd(), 'third_party', 'WebKit')
+ if os.path.basename(os.getcwd()) == 'src':
+ command = ['svn', 'find-rev', git_sha1]
+ (git_output, return_code) = _RunGit(command, path)
+ if not return_code:
+ return git_output.strip('\n')
+ raise ValueError
+ else:
+ print ('Script should be run from src folder. ' +
+ 'Eg: python tools/bisect-builds.py -g 280588 -b 280590' +
+ '--archive linux64 --use-local-repo')
+ sys.exit(1)
+
+ def GetSVNRevisionFromGitHash(self, git_sha1, depot='chromium'):
+ if not self.use_local_repo:
+ return self._GetSVNRevisionFromGitHashWithoutGitCheckout(git_sha1, depot)
+ else:
+ return self._GetSVNRevisionFromGitHashFromGitCheckout(git_sha1, depot)
+
def GetRevList(self):
"""Gets the list of revision numbers between self.good_revision and
self.bad_revision."""
i = 0
parsed_build_numbers = [LooseVersion(x) for x in build_numbers]
for build_number in sorted(parsed_build_numbers):
- path = OFFICIAL_BASE_URL + '/' + str(build_number) + '/' + \
- self._listing_platform_dir + self.archive_name
+ path = (OFFICIAL_BASE_URL + '/' + str(build_number) + '/' +
+ self._listing_platform_dir + self.archive_name)
i = i + 1
try:
connection = urllib.urlopen(path)
if build_number > maxrev:
break
if build_number >= minrev:
- # If we are bisecting Aura, we want to include only builds which
- # ends with ".1".
- if self.is_aura:
- if self.IsAuraBuild(str(build_number)):
- final_list.append(str(build_number))
- # If we are bisecting only official builds (without --aura),
- # we can not include builds which ends with '.1' or '.2' since
- # they have different folder hierarchy inside.
- elif (not self.IsAuraBuild(str(build_number)) and
- not self.IsASANBuild(str(build_number))):
- final_list.append(str(build_number))
- except urllib.HTTPError, e:
+ final_list.append(str(build_number))
+ except urllib.HTTPError:
pass
return final_list
"""
def ReportHook(blocknum, blocksize, totalsize):
if quit_event and quit_event.isSet():
- raise RuntimeError("Aborting download of revision %s" % str(rev))
+ raise RuntimeError('Aborting download of revision %s' % str(rev))
if progress_event and progress_event.isSet():
size = blocknum * blocksize
if totalsize == -1: # Total size not known.
- progress = "Received %d bytes" % size
+ progress = 'Received %d bytes' % size
else:
size = min(totalsize, size)
- progress = "Received %d of %d bytes, %.2f%%" % (
+ progress = 'Received %d of %d bytes, %.2f%%' % (
size, totalsize, 100.0 * size / totalsize)
# Send a \r to let all progress messages use just one line of output.
- sys.stdout.write("\r" + progress)
+ sys.stdout.write('\r' + progress)
sys.stdout.flush()
download_url = context.GetDownloadURL(rev)
urllib.urlretrieve(download_url, filename, ReportHook)
if progress_event and progress_event.isSet():
print
- except RuntimeError, e:
+ except RuntimeError:
pass
-def RunRevision(context, revision, zipfile, profile, num_runs, command, args):
+def RunRevision(context, revision, zip_file, profile, num_runs, command, args):
"""Given a zipped revision, unzip it and run the test."""
- print "Trying revision %s..." % str(revision)
+ print 'Trying revision %s...' % str(revision)
# Create a temp directory and unzip the revision into it.
cwd = os.getcwd()
tempdir = tempfile.mkdtemp(prefix='bisect_tmp')
- UnzipFilenameToDir(zipfile, tempdir)
+ UnzipFilenameToDir(zip_file, tempdir)
os.chdir(tempdir)
# Run the build as many times as specified.
testargs = ['--user-data-dir=%s' % profile] + args
# The sandbox must be run as root on Official Chrome, so bypass it.
- if ((context.is_official or context.flash_path) and
+ if ((context.is_official or context.flash_path or context.pdf_path) and
context.platform.startswith('linux')):
testargs.append('--no-sandbox')
if context.flash_path:
# pass the correct version we just spoof it.
testargs.append('--ppapi-flash-version=99.9.999.999')
+ # TODO(vitalybuka): Remove in the future. See crbug.com/395687.
+ if context.pdf_path:
+ shutil.copy(context.pdf_path,
+ os.path.dirname(context.GetLaunchPath(revision)))
+ testargs.append('--enable-print-preview')
+
runcommand = []
for token in shlex.split(command):
- if token == "%a":
+ if token == '%a':
runcommand.extend(testargs)
else:
- runcommand.append( \
- token.replace('%p', os.path.abspath(context.GetLaunchPath())) \
- .replace('%s', ' '.join(testargs)))
+ runcommand.append(
+ token.replace('%p', os.path.abspath(context.GetLaunchPath(revision))).
+ replace('%s', ' '.join(testargs)))
results = []
- for i in range(0, num_runs):
+ for _ in range(num_runs):
subproc = subprocess.Popen(runcommand,
bufsize=-1,
stdout=subprocess.PIPE,
os.chdir(cwd)
try:
shutil.rmtree(tempdir, True)
- except Exception, e:
+ except Exception:
pass
for (returncode, stdout, stderr) in results:
return results[0]
+# The arguments official_builds, status, stdout and stderr are unused.
+# They are present here because this function is passed to Bisect which then
+# calls it with 5 arguments.
+# pylint: disable=W0613
def AskIsGoodBuild(rev, official_builds, status, stdout, stderr):
- """Ask the user whether build |rev| is good or bad."""
+ """Asks the user whether build |rev| is good or bad."""
# Loop until we get a response that we can parse.
while True:
- response = raw_input('Revision %s is ' \
+ response = raw_input('Revision %s is '
'[(g)ood/(b)ad/(r)etry/(u)nknown/(q)uit]: ' %
str(rev))
if response and response in ('g', 'b', 'r', 'u'):
raise SystemExit()
+def IsGoodASANBuild(rev, official_builds, status, stdout, stderr):
+ """Determine if an ASAN build |rev| is good or bad
+
+ Will examine stderr looking for the error message emitted by ASAN. If not
+ found then will fallback to asking the user."""
+ if stderr:
+ bad_count = 0
+ for line in stderr.splitlines():
+ print line
+ if line.find('ERROR: AddressSanitizer:') != -1:
+ bad_count += 1
+ if bad_count > 0:
+ print 'Revision %d determined to be bad.' % rev
+ return 'b'
+ return AskIsGoodBuild(rev, official_builds, status, stdout, stderr)
+
class DownloadJob(object):
"""DownloadJob represents a task to download a given Chromium revision."""
- def __init__(self, context, name, rev, zipfile):
+
+ def __init__(self, context, name, rev, zip_file):
super(DownloadJob, self).__init__()
# Store off the input parameters.
self.context = context
self.name = name
self.rev = rev
- self.zipfile = zipfile
+ self.zip_file = zip_file
self.quit_event = threading.Event()
self.progress_event = threading.Event()
+ self.thread = None
def Start(self):
"""Starts the download."""
fetchargs = (self.context,
self.rev,
- self.zipfile,
+ self.zip_file,
self.quit_event,
self.progress_event)
self.thread = threading.Thread(target=FetchRevision,
def Stop(self):
"""Stops the download which must have been started previously."""
+ assert self.thread, 'DownloadJob must be started before Stop is called.'
self.quit_event.set()
self.thread.join()
- os.unlink(self.zipfile)
+ os.unlink(self.zip_file)
def WaitFor(self):
"""Prints a message and waits for the download to complete. The download
must have been started previously."""
- print "Downloading revision %s..." % str(self.rev)
+ assert self.thread, 'DownloadJob must be started before WaitFor is called.'
+ print 'Downloading revision %s...' % str(self.rev)
self.progress_event.set() # Display progress of download.
self.thread.join()
-def Bisect(base_url,
- platform,
- official_builds,
- is_aura,
- good_rev=0,
- bad_rev=0,
+def Bisect(context,
num_runs=1,
- command="%p %a",
+ command='%p %a',
try_args=(),
profile=None,
- flash_path=None,
interactive=True,
evaluate=AskIsGoodBuild):
"""Given known good and known bad revisions, run a binary search on all
archived revisions to determine the last known good revision.
- @param platform Which build to download/run ('mac', 'win', 'linux64', etc.).
- @param official_builds Specify build type (Chromium or Official build).
- @param good_rev Number/tag of the known good revision.
- @param bad_rev Number/tag of the known bad revision.
+ @param context PathContext object initialized with user provided parameters.
@param num_runs Number of times to run each build for asking good/bad.
@param try_args A tuple of arguments to pass to the test application.
@param profile The name of the user profile to run with.
if not profile:
profile = 'profile'
- context = PathContext(base_url, platform, good_rev, bad_rev,
- official_builds, is_aura, flash_path)
+ good_rev = context.good_revision
+ bad_rev = context.bad_revision
cwd = os.getcwd()
- print "Downloading list of known revisions..."
+ print 'Downloading list of known revisions...',
+ if not context.use_local_repo:
+ print '(use --use-local-repo for speed if you have a local checkout)'
+ else:
+ print
_GetDownloadPath = lambda rev: os.path.join(cwd,
'%s-%s' % (str(rev), context.archive_name))
- if official_builds:
+ if context.is_official:
revlist = context.GetOfficialBuildsList()
else:
revlist = context.GetRevList()
maxrev = len(revlist) - 1
pivot = maxrev / 2
rev = revlist[pivot]
- zipfile = _GetDownloadPath(rev)
- fetch = DownloadJob(context, 'initial_fetch', rev, zipfile)
+ zip_file = _GetDownloadPath(rev)
+ fetch = DownloadJob(context, 'initial_fetch', rev, zip_file)
fetch.Start()
fetch.WaitFor()
# Binary search time!
- while fetch and fetch.zipfile and maxrev - minrev > 1:
+ while fetch and fetch.zip_file and maxrev - minrev > 1:
if bad_rev < good_rev:
- min_str, max_str = "bad", "good"
+ min_str, max_str = 'bad', 'good'
else:
- min_str, max_str = "good", "bad"
- print 'Bisecting range [%s (%s), %s (%s)].' % (revlist[minrev], min_str, \
+ min_str, max_str = 'good', 'bad'
+ print 'Bisecting range [%s (%s), %s (%s)].' % (revlist[minrev], min_str,
revlist[maxrev], max_str)
# Pre-fetch next two possible pivots
try:
(status, stdout, stderr) = RunRevision(context,
rev,
- fetch.zipfile,
+ fetch.zip_file,
profile,
num_runs,
command,
answer = 'g'
print 'Good revision: %s' % rev
else:
- answer = evaluate(rev, official_builds, status, stdout, stderr)
- if answer == 'g' and good_rev < bad_rev or \
- answer == 'b' and bad_rev < good_rev:
+ answer = evaluate(rev, context.is_official, status, stdout, stderr)
+ if ((answer == 'g' and good_rev < bad_rev)
+ or (answer == 'b' and bad_rev < good_rev)):
fetch.Stop()
minrev = pivot
if down_fetch:
up_fetch.WaitFor()
pivot = up_pivot
fetch = up_fetch
- elif answer == 'b' and good_rev < bad_rev or \
- answer == 'g' and bad_rev < good_rev:
+ elif ((answer == 'b' and good_rev < bad_rev)
+ or (answer == 'g' and bad_rev < good_rev)):
fetch.Stop()
maxrev = pivot
if up_fetch:
pivot = up_pivot - 1 # Subtracts 1 because revlist was resized.
else:
pivot = down_pivot
- zipfile = fetch.zipfile
+ zip_file = fetch.zip_file
if down_fetch and fetch != down_fetch:
down_fetch.Stop()
if up_fetch and fetch != up_fetch:
up_fetch.Stop()
else:
- assert False, "Unexpected return value from evaluate(): " + answer
+ assert False, 'Unexpected return value from evaluate(): ' + answer
except SystemExit:
- print "Cleaning up..."
+ print 'Cleaning up...'
for f in [_GetDownloadPath(revlist[down_pivot]),
_GetDownloadPath(revlist[up_pivot])]:
try:
rev = revlist[pivot]
- return (revlist[minrev], revlist[maxrev])
+ return (revlist[minrev], revlist[maxrev], context)
def GetBlinkDEPSRevisionForChromiumRevision(rev):
if m:
return int(m.group(1))
else:
- raise Exception('Could not get Blink revision for Chromium rev %d'
- % rev)
+ raise Exception('Could not get Blink revision for Chromium rev %d' % rev)
-def GetBlinkRevisionForChromiumRevision(self, rev):
+def GetBlinkRevisionForChromiumRevision(context, rev):
"""Returns the blink revision that was in REVISIONS file at
chromium revision |rev|."""
- file_url = "%s/%s%d/REVISIONS" % (self.base_url,
- self._listing_platform_dir, rev)
+ def _IsRevisionNumber(revision):
+ if isinstance(revision, int):
+ return True
+ else:
+ return revision.isdigit()
+ if str(rev) in context.githash_svn_dict:
+ rev = context.githash_svn_dict[str(rev)]
+ file_url = '%s/%s%s/REVISIONS' % (context.base_url,
+ context._listing_platform_dir, rev)
url = urllib.urlopen(file_url)
- data = json.loads(url.read())
+ if url.getcode() == 200:
+ try:
+ data = json.loads(url.read())
+ except ValueError:
+ print 'ValueError for JSON URL: %s' % file_url
+ raise ValueError
+ else:
+ raise ValueError
url.close()
if 'webkit_revision' in data:
- return data['webkit_revision']
+ blink_rev = data['webkit_revision']
+ if not _IsRevisionNumber(blink_rev):
+ blink_rev = int(context.GetSVNRevisionFromGitHash(blink_rev, 'blink'))
+ return blink_rev
else:
raise Exception('Could not get blink revision for cr rev %d' % rev)
+
def FixChromiumRevForBlink(revisions_final, revisions, self, rev):
"""Returns the chromium revision that has the correct blink revision
for blink bisect, DEPS and REVISIONS file might not match since
blink snapshots point to tip of tree blink.
Note: The revisions_final variable might get modified to include
additional revisions."""
-
blink_deps_rev = GetBlinkDEPSRevisionForChromiumRevision(rev)
while (GetBlinkRevisionForChromiumRevision(self, rev) > blink_deps_rev):
revisions_final.sort()
return rev
-def GetChromiumRevision(url):
+
+def GetChromiumRevision(context, url):
"""Returns the chromium revision read from given URL."""
try:
# Location of the latest build revision number
- return int(urllib.urlopen(url).read())
- except Exception, e:
- print('Could not determine latest revision. This could be bad...')
+ latest_revision = urllib.urlopen(url).read()
+ if latest_revision.isdigit():
+ return int(latest_revision)
+ return context.GetSVNRevisionFromGitHash(latest_revision)
+ except Exception:
+ print 'Could not determine latest revision. This could be bad...'
return 999999999
choices = ['mac', 'win', 'linux', 'linux64', 'linux-arm']
# linux-chromiumos lacks a continuous archive http://crbug.com/78158
parser.add_option('-a', '--archive',
- choices = choices,
- help = 'The buildbot archive to bisect [%s].' %
- '|'.join(choices))
- parser.add_option('-o', action="store_true", dest='official_builds',
- help = 'Bisect across official ' +
- 'Chrome builds (internal only) instead of ' +
- 'Chromium archives.')
- parser.add_option('-b', '--bad', type = 'str',
- help = 'A bad revision to start bisection. ' +
- 'May be earlier or later than the good revision. ' +
- 'Default is HEAD.')
- parser.add_option('-f', '--flash_path', type = 'str',
- help = 'Absolute path to a recent Adobe Pepper Flash ' +
- 'binary to be used in this bisection (e.g. ' +
- 'on Windows C:\...\pepflashplayer.dll and on Linux ' +
- '/opt/google/chrome/PepperFlash/libpepflashplayer.so).')
- parser.add_option('-g', '--good', type = 'str',
- help = 'A good revision to start bisection. ' +
- 'May be earlier or later than the bad revision. ' +
- 'Default is 0.')
- parser.add_option('-p', '--profile', '--user-data-dir', type = 'str',
- help = 'Profile to use; this will not reset every run. ' +
- 'Defaults to a clean profile.', default = 'profile')
- parser.add_option('-t', '--times', type = 'int',
- help = 'Number of times to run each build before asking ' +
- 'if it\'s good or bad. Temporary profiles are reused.',
- default = 1)
- parser.add_option('-c', '--command', type = 'str',
- help = 'Command to execute. %p and %a refer to Chrome ' +
- 'executable and specified extra arguments respectively. ' +
- 'Use %s to specify all extra arguments as one string. ' +
- 'Defaults to "%p %a". Note that any extra paths ' +
- 'specified should be absolute.',
- default = '%p %a')
- parser.add_option('-l', '--blink', action='store_true',
- help = 'Use Blink bisect instead of Chromium. ')
- parser.add_option('', '--not-interactive', action='store_true',
- help = 'Use command exit code to tell good/bad revision.',
- default=False)
- parser.add_option('--aura',
- dest='aura',
+ choices=choices,
+ help='The buildbot archive to bisect [%s].' %
+ '|'.join(choices))
+ parser.add_option('-o',
+ action='store_true',
+ dest='official_builds',
+ help='Bisect across official Chrome builds (internal '
+ 'only) instead of Chromium archives.')
+ parser.add_option('-b', '--bad',
+ type='str',
+ help='A bad revision to start bisection. '
+ 'May be earlier or later than the good revision. '
+ 'Default is HEAD.')
+ parser.add_option('-f', '--flash_path',
+ type='str',
+ help='Absolute path to a recent Adobe Pepper Flash '
+ 'binary to be used in this bisection (e.g. '
+ 'on Windows C:\...\pepflashplayer.dll and on Linux '
+ '/opt/google/chrome/PepperFlash/'
+ 'libpepflashplayer.so).')
+ parser.add_option('-d', '--pdf_path',
+ type='str',
+ help='Absolute path to a recent PDF plugin '
+ 'binary to be used in this bisection (e.g. '
+ 'on Windows C:\...\pdf.dll and on Linux '
+ '/opt/google/chrome/libpdf.so). Option also enables '
+ 'print preview.')
+ parser.add_option('-g', '--good',
+ type='str',
+ help='A good revision to start bisection. ' +
+ 'May be earlier or later than the bad revision. ' +
+ 'Default is 0.')
+ parser.add_option('-p', '--profile', '--user-data-dir',
+ type='str',
+ default='profile',
+ help='Profile to use; this will not reset every run. '
+ 'Defaults to a clean profile.')
+ parser.add_option('-t', '--times',
+ type='int',
+ default=1,
+ help='Number of times to run each build before asking '
+ 'if it\'s good or bad. Temporary profiles are reused.')
+ parser.add_option('-c', '--command',
+ type='str',
+ default='%p %a',
+ help='Command to execute. %p and %a refer to Chrome '
+ 'executable and specified extra arguments '
+ 'respectively. Use %s to specify all extra arguments '
+ 'as one string. Defaults to "%p %a". Note that any '
+ 'extra paths specified should be absolute.')
+ parser.add_option('-l', '--blink',
+ action='store_true',
+ help='Use Blink bisect instead of Chromium. ')
+ parser.add_option('', '--not-interactive',
action='store_true',
default=False,
- help='Allow the script to bisect aura builds')
+ help='Use command exit code to tell good/bad revision.')
+ parser.add_option('--asan',
+ dest='asan',
+ action='store_true',
+ default=False,
+ help='Allow the script to bisect ASAN builds')
+ parser.add_option('--use-local-repo',
+ dest='use_local_repo',
+ action='store_true',
+ default=False,
+ help='Allow the script to convert git SHA1 to SVN '
+ 'revision using "git svn find-rev <SHA1>" '
+ 'command from a Chromium checkout.')
(opts, args) = parser.parse_args()
parser.print_help()
return 1
- if opts.aura:
- if opts.archive != 'win' or not opts.official_builds:
- print 'Error: Aura is supported only on Windows platform '\
- 'and official builds.'
+ if opts.asan:
+ supported_platforms = ['linux', 'mac', 'win']
+ if opts.archive not in supported_platforms:
+ print 'Error: ASAN bisecting only supported on these platforms: [%s].' % (
+ '|'.join(supported_platforms))
+ return 1
+ if opts.official_builds:
+ print 'Error: Do not yet support bisecting official ASAN builds.'
return 1
- if opts.blink:
+ if opts.asan:
+ base_url = ASAN_BASE_URL
+ elif opts.blink:
base_url = WEBKIT_BASE_URL
else:
base_url = CHROMIUM_BASE_URL
# Create the context. Initialize 0 for the revisions as they are set below.
- context = PathContext(base_url, opts.archive, 0, 0,
- opts.official_builds, opts.aura, None)
+ context = PathContext(base_url, opts.archive, opts.good, opts.bad,
+ opts.official_builds, opts.asan, opts.use_local_repo,
+ opts.flash_path, opts.pdf_path)
# Pick a starting point, try to get HEAD for this.
- if opts.bad:
- bad_rev = opts.bad
- else:
- bad_rev = '999.0.0.0'
- if not opts.official_builds:
- bad_rev = GetChromiumRevision(context.GetLastChangeURL())
+ if not opts.bad:
+ context.bad_revision = '999.0.0.0'
+ context.bad_revision = GetChromiumRevision(
+ context, context.GetLastChangeURL())
# Find out when we were good.
- if opts.good:
- good_rev = opts.good
- else:
- good_rev = '0.0.0.0' if opts.official_builds else 0
+ if not opts.good:
+ context.good_revision = '0.0.0.0' if opts.official_builds else 0
if opts.flash_path:
- flash_path = opts.flash_path
- msg = 'Could not find Flash binary at %s' % flash_path
- assert os.path.exists(flash_path), msg
+ msg = 'Could not find Flash binary at %s' % opts.flash_path
+ assert os.path.exists(opts.flash_path), msg
+
+ if opts.pdf_path:
+ msg = 'Could not find PDF binary at %s' % opts.pdf_path
+ assert os.path.exists(opts.pdf_path), msg
if opts.official_builds:
- good_rev = LooseVersion(good_rev)
- bad_rev = LooseVersion(bad_rev)
+ context.good_revision = LooseVersion(context.good_revision)
+ context.bad_revision = LooseVersion(context.bad_revision)
else:
- good_rev = int(good_rev)
- bad_rev = int(bad_rev)
+ context.good_revision = int(context.good_revision)
+ context.bad_revision = int(context.bad_revision)
if opts.times < 1:
print('Number of times to run (%d) must be greater than or equal to 1.' %
parser.print_help()
return 1
- (min_chromium_rev, max_chromium_rev) = Bisect(
- base_url, opts.archive, opts.official_builds, opts.aura, good_rev,
- bad_rev, opts.times, opts.command, args, opts.profile, opts.flash_path,
- not opts.not_interactive)
+ if opts.asan:
+ evaluator = IsGoodASANBuild
+ else:
+ evaluator = AskIsGoodBuild
+
+ # Save these revision numbers to compare when showing the changelog URL
+ # after the bisect.
+ good_rev = context.good_revision
+ bad_rev = context.bad_revision
+
+ (min_chromium_rev, max_chromium_rev, context) = Bisect(
+ context, opts.times, opts.command, args, opts.profile,
+ not opts.not_interactive, evaluator)
# Get corresponding blink revisions.
try:
min_chromium_rev)
max_blink_rev = GetBlinkRevisionForChromiumRevision(context,
max_chromium_rev)
- except Exception, e:
+ except Exception:
# Silently ignore the failure.
min_blink_rev, max_blink_rev = 0, 0
print DONE_MESSAGE_GOOD_MIN % (str(min_chromium_rev),
str(max_chromium_rev))
if min_blink_rev != max_blink_rev:
- print ("NOTE: There is a Blink roll in the range, "
- "you might also want to do a Blink bisect.")
+ print ('NOTE: There is a Blink roll in the range, '
+ 'you might also want to do a Blink bisect.')
print 'CHANGELOG URL:'
if opts.official_builds:
else:
print ' ' + CHANGELOG_URL % (min_chromium_rev, max_chromium_rev)
+
if __name__ == '__main__':
sys.exit(main())