[email protected] | cb155a8 | 2011-11-29 17:25:34 | [diff] [blame] | 1 | #!/usr/bin/env python |
[email protected] | 5e93cf16 | 2012-01-28 02:16:56 | [diff] [blame] | 2 | # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 3 | # Use of this source code is governed by a BSD-style license that can be |
| 4 | # found in the LICENSE file. |
| 5 | |
| 6 | """Snapshot Build Bisect Tool |
| 7 | |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 8 | This script bisects a snapshot archive using binary search. It starts at |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 9 | a bad revision (it will try to guess HEAD) and asks for a last known-good |
| 10 | revision. It will then binary search across this revision range by downloading, |
| 11 | unzipping, and opening Chromium for you. After testing the specific revision, |
| 12 | it will ask you whether it is good or bad before continuing the search. |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 13 | """ |
| 14 | |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 15 | # The base URL for stored build archives. |
| 16 | CHROMIUM_BASE_URL = ('http://commondatastorage.googleapis.com' |
| 17 | '/chromium-browser-snapshots') |
| 18 | WEBKIT_BASE_URL = ('http://commondatastorage.googleapis.com' |
| 19 | '/chromium-webkit-snapshots') |
[email protected] | 01188669 | 2014-08-01 21:00:21 | [diff] [blame] | 20 | ASAN_BASE_URL = ('http://commondatastorage.googleapis.com' |
| 21 | '/chromium-browser-asan') |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 22 | |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 23 | # URL template for viewing changelogs between revisions. |
pshenoy | 9ce271f | 2014-09-02 22:14:05 | [diff] [blame] | 24 | CHANGELOG_URL = ('https://chromium.googlesource.com/chromium/src/+log/%s..%s') |
| 25 | |
| 26 | # URL to convert SVN revision to git hash. |
pshenoy | 13cb79e0 | 2014-09-05 01:42:53 | [diff] [blame] | 27 | CRREV_URL = ('https://cr-rev.appspot.com/_ah/api/crrev/v1/redirect/') |
[email protected] | f6a71a7 | 2009-10-08 19:55:38 | [diff] [blame] | 28 | |
[email protected] | b2fe7f2 | 2011-10-25 22:58:31 | [diff] [blame] | 29 | # DEPS file URL. |
Di Mu | 08c5968 | 2016-07-11 23:05:07 | [diff] [blame] | 30 | DEPS_FILE = ('https://chromium.googlesource.com/chromium/src/+/%s/DEPS') |
[email protected] | b2fe7f2 | 2011-10-25 22:58:31 | [diff] [blame] | 31 | |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 32 | # Blink changelogs URL. |
| 33 | BLINK_CHANGELOG_URL = ('http://build.chromium.org' |
| 34 | '/f/chromium/perf/dashboard/ui/changelog_blink.html' |
| 35 | '?url=/trunk&range=%d%%3A%d') |
| 36 | |
| 37 | DONE_MESSAGE_GOOD_MIN = ('You are probably looking for a change made after %s (' |
| 38 | 'known good), but no later than %s (first known bad).') |
| 39 | DONE_MESSAGE_GOOD_MAX = ('You are probably looking for a change made after %s (' |
| 40 | 'known bad), but no later than %s (first known good).') |
[email protected] | 05ff3fd | 2012-04-17 23:24:06 | [diff] [blame] | 41 | |
[email protected] | 3e7c8532 | 2014-06-27 20:27:36 | [diff] [blame] | 42 | CHROMIUM_GITHASH_TO_SVN_URL = ( |
| 43 | 'https://chromium.googlesource.com/chromium/src/+/%s?format=json') |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 44 | |
[email protected] | 3e7c8532 | 2014-06-27 20:27:36 | [diff] [blame] | 45 | BLINK_GITHASH_TO_SVN_URL = ( |
| 46 | 'https://chromium.googlesource.com/chromium/blink/+/%s?format=json') |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 47 | |
| 48 | GITHASH_TO_SVN_URL = { |
| 49 | 'chromium': CHROMIUM_GITHASH_TO_SVN_URL, |
| 50 | 'blink': BLINK_GITHASH_TO_SVN_URL, |
| 51 | } |
| 52 | |
| 53 | # Search pattern to be matched in the JSON output from |
[email protected] | 3e7c8532 | 2014-06-27 20:27:36 | [diff] [blame] | 54 | # CHROMIUM_GITHASH_TO_SVN_URL to get the chromium revision (svn revision). |
pshenoy | b23a145 | 2014-09-05 22:52:05 | [diff] [blame] | 55 | CHROMIUM_SEARCH_PATTERN_OLD = ( |
[email protected] | 3e7c8532 | 2014-06-27 20:27:36 | [diff] [blame] | 56 | r'.*git-svn-id: svn://svn.chromium.org/chrome/trunk/src@(\d+) ') |
pshenoy | b23a145 | 2014-09-05 22:52:05 | [diff] [blame] | 57 | CHROMIUM_SEARCH_PATTERN = ( |
| 58 | r'Cr-Commit-Position: refs/heads/master@{#(\d+)}') |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 59 | |
[email protected] | 3e7c8532 | 2014-06-27 20:27:36 | [diff] [blame] | 60 | # Search pattern to be matched in the json output from |
| 61 | # BLINK_GITHASH_TO_SVN_URL to get the blink revision (svn revision). |
| 62 | BLINK_SEARCH_PATTERN = ( |
| 63 | r'.*git-svn-id: svn://svn.chromium.org/blink/trunk@(\d+) ') |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 64 | |
| 65 | SEARCH_PATTERN = { |
| 66 | 'chromium': CHROMIUM_SEARCH_PATTERN, |
| 67 | 'blink': BLINK_SEARCH_PATTERN, |
| 68 | } |
[email protected] | 3e7c8532 | 2014-06-27 20:27:36 | [diff] [blame] | 69 | |
[email protected] | 48036978 | 2014-08-22 20:15:58 | [diff] [blame] | 70 | CREDENTIAL_ERROR_MESSAGE = ('You are attempting to access protected data with ' |
| 71 | 'no configured credentials') |
| 72 | |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 73 | ############################################################################### |
| 74 | |
Dominic Mazzoni | 215e80b | 2017-11-29 20:05:27 | [diff] [blame^] | 75 | import glob |
[email protected] | 8304850 | 2014-08-21 16:48:44 | [diff] [blame] | 76 | import httplib |
[email protected] | 4c6fec6b | 2013-09-17 17:44:08 | [diff] [blame] | 77 | import json |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 78 | import optparse |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 79 | import os |
| 80 | import re |
[email protected] | 61ea90a | 2013-09-26 10:17:34 | [diff] [blame] | 81 | import shlex |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 82 | import shutil |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 83 | import subprocess |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 84 | import sys |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 85 | import tempfile |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 86 | import threading |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 87 | import urllib |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 88 | from distutils.version import LooseVersion |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 89 | from xml.etree import ElementTree |
[email protected] | bd8dcb9 | 2010-03-31 01:05:24 | [diff] [blame] | 90 | import zipfile |
| 91 | |
[email protected] | cb155a8 | 2011-11-29 17:25:34 | [diff] [blame] | 92 | |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 93 | class PathContext(object): |
| 94 | """A PathContext is used to carry the information used to construct URLs and |
| 95 | paths when dealing with the storage server and archives.""" |
[email protected] | 4c6fec6b | 2013-09-17 17:44:08 | [diff] [blame] | 96 | def __init__(self, base_url, platform, good_revision, bad_revision, |
Jason Kersey | 97bb027a | 2016-05-11 20:10:43 | [diff] [blame] | 97 | is_asan, use_local_cache, flash_path = None): |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 98 | super(PathContext, self).__init__() |
| 99 | # Store off the input parameters. |
[email protected] | 4c6fec6b | 2013-09-17 17:44:08 | [diff] [blame] | 100 | self.base_url = base_url |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 101 | self.platform = platform # What's passed in to the '-a/--archive' option. |
| 102 | self.good_revision = good_revision |
| 103 | self.bad_revision = bad_revision |
[email protected] | 01188669 | 2014-08-01 21:00:21 | [diff] [blame] | 104 | self.is_asan = is_asan |
| 105 | self.build_type = 'release' |
[email protected] | fc3702e | 2013-11-09 04:23:00 | [diff] [blame] | 106 | self.flash_path = flash_path |
[email protected] | 3e7c8532 | 2014-06-27 20:27:36 | [diff] [blame] | 107 | # Dictionary which stores svn revision number as key and it's |
| 108 | # corresponding git hash as value. This data is populated in |
| 109 | # _FetchAndParse and used later in GetDownloadURL while downloading |
| 110 | # the build. |
| 111 | self.githash_svn_dict = {} |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 112 | # The name of the ZIP file in a revision directory on the server. |
| 113 | self.archive_name = None |
| 114 | |
rob | 724c906 | 2015-01-22 00:26:42 | [diff] [blame] | 115 | # Whether to cache and use the list of known revisions in a local file to |
| 116 | # speed up the initialization of the script at the next run. |
| 117 | self.use_local_cache = use_local_cache |
| 118 | |
| 119 | # Locate the local checkout to speed up the script by using locally stored |
| 120 | # metadata. |
| 121 | abs_file_path = os.path.abspath(os.path.realpath(__file__)) |
| 122 | local_src_path = os.path.join(os.path.dirname(abs_file_path), '..') |
| 123 | if abs_file_path.endswith(os.path.join('tools', 'bisect-builds.py')) and\ |
| 124 | os.path.exists(os.path.join(local_src_path, '.git')): |
| 125 | self.local_src_path = os.path.normpath(local_src_path) |
| 126 | else: |
| 127 | self.local_src_path = None |
[email protected] | 6a7a5d6 | 2014-07-09 04:45:50 | [diff] [blame] | 128 | |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 129 | # Set some internal members: |
| 130 | # _listing_platform_dir = Directory that holds revisions. Ends with a '/'. |
| 131 | # _archive_extract_dir = Uncompressed directory in the archive_name file. |
| 132 | # _binary_name = The name of the executable to run. |
dmazzoni | 76e907d | 2015-01-22 08:14:49 | [diff] [blame] | 133 | if self.platform in ('linux', 'linux64', 'linux-arm', 'chromeos'): |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 134 | self._binary_name = 'chrome' |
[email protected] | 48036978 | 2014-08-22 20:15:58 | [diff] [blame] | 135 | elif self.platform in ('mac', 'mac64'): |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 136 | self.archive_name = 'chrome-mac.zip' |
| 137 | self._archive_extract_dir = 'chrome-mac' |
[email protected] | 48036978 | 2014-08-22 20:15:58 | [diff] [blame] | 138 | elif self.platform in ('win', 'win64'): |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 139 | self.archive_name = 'chrome-win32.zip' |
| 140 | self._archive_extract_dir = 'chrome-win32' |
| 141 | self._binary_name = 'chrome.exe' |
| 142 | else: |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 143 | raise Exception('Invalid platform: %s' % self.platform) |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 144 | |
Jason Kersey | 97bb027a | 2016-05-11 20:10:43 | [diff] [blame] | 145 | if self.platform in ('linux', 'linux64', 'linux-arm', 'chromeos'): |
| 146 | self.archive_name = 'chrome-linux.zip' |
| 147 | self._archive_extract_dir = 'chrome-linux' |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 148 | if self.platform == 'linux': |
Jason Kersey | 97bb027a | 2016-05-11 20:10:43 | [diff] [blame] | 149 | self._listing_platform_dir = 'Linux/' |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 150 | elif self.platform == 'linux64': |
Jason Kersey | 97bb027a | 2016-05-11 20:10:43 | [diff] [blame] | 151 | self._listing_platform_dir = 'Linux_x64/' |
| 152 | elif self.platform == 'linux-arm': |
| 153 | self._listing_platform_dir = 'Linux_ARM_Cross-Compile/' |
| 154 | elif self.platform == 'chromeos': |
| 155 | self._listing_platform_dir = 'Linux_ChromiumOS_Full/' |
| 156 | elif self.platform in ('mac', 'mac64'): |
| 157 | self._listing_platform_dir = 'Mac/' |
| 158 | self._binary_name = 'Chromium.app/Contents/MacOS/Chromium' |
| 159 | elif self.platform == 'win': |
| 160 | self._listing_platform_dir = 'Win/' |
jiawei.shao | 734efbc9 | 2016-09-23 02:11:45 | [diff] [blame] | 161 | elif self.platform == 'win64': |
| 162 | self._listing_platform_dir = 'Win_x64/' |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 163 | |
[email protected] | 01188669 | 2014-08-01 21:00:21 | [diff] [blame] | 164 | def GetASANPlatformDir(self): |
| 165 | """ASAN builds are in directories like "linux-release", or have filenames |
| 166 | like "asan-win32-release-277079.zip". This aligns to our platform names |
| 167 | except in the case of Windows where they use "win32" instead of "win".""" |
| 168 | if self.platform == 'win': |
| 169 | return 'win32' |
| 170 | else: |
| 171 | return self.platform |
| 172 | |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 173 | def GetListingURL(self, marker=None): |
| 174 | """Returns the URL for a directory listing, with an optional marker.""" |
| 175 | marker_param = '' |
| 176 | if marker: |
| 177 | marker_param = '&marker=' + str(marker) |
[email protected] | 01188669 | 2014-08-01 21:00:21 | [diff] [blame] | 178 | if self.is_asan: |
| 179 | prefix = '%s-%s' % (self.GetASANPlatformDir(), self.build_type) |
| 180 | return self.base_url + '/?delimiter=&prefix=' + prefix + marker_param |
| 181 | else: |
| 182 | return (self.base_url + '/?delimiter=/&prefix=' + |
| 183 | self._listing_platform_dir + marker_param) |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 184 | |
| 185 | def GetDownloadURL(self, revision): |
| 186 | """Gets the download URL for a build archive of a specific revision.""" |
[email protected] | 01188669 | 2014-08-01 21:00:21 | [diff] [blame] | 187 | if self.is_asan: |
| 188 | return '%s/%s-%s/%s-%d.zip' % ( |
| 189 | ASAN_BASE_URL, self.GetASANPlatformDir(), self.build_type, |
| 190 | self.GetASANBaseName(), revision) |
Jason Kersey | 97bb027a | 2016-05-11 20:10:43 | [diff] [blame] | 191 | if str(revision) in self.githash_svn_dict: |
| 192 | revision = self.githash_svn_dict[str(revision)] |
| 193 | return '%s/%s%s/%s' % (self.base_url, self._listing_platform_dir, |
| 194 | revision, self.archive_name) |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 195 | |
| 196 | def GetLastChangeURL(self): |
| 197 | """Returns a URL to the LAST_CHANGE file.""" |
[email protected] | 4c6fec6b | 2013-09-17 17:44:08 | [diff] [blame] | 198 | return self.base_url + '/' + self._listing_platform_dir + 'LAST_CHANGE' |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 199 | |
[email protected] | 01188669 | 2014-08-01 21:00:21 | [diff] [blame] | 200 | def GetASANBaseName(self): |
| 201 | """Returns the base name of the ASAN zip file.""" |
| 202 | if 'linux' in self.platform: |
| 203 | return 'asan-symbolized-%s-%s' % (self.GetASANPlatformDir(), |
| 204 | self.build_type) |
| 205 | else: |
| 206 | return 'asan-%s-%s' % (self.GetASANPlatformDir(), self.build_type) |
| 207 | |
| 208 | def GetLaunchPath(self, revision): |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 209 | """Returns a relative path (presumably from the archive extraction location) |
| 210 | that is used to run the executable.""" |
[email protected] | 01188669 | 2014-08-01 21:00:21 | [diff] [blame] | 211 | if self.is_asan: |
| 212 | extract_dir = '%s-%d' % (self.GetASANBaseName(), revision) |
| 213 | else: |
| 214 | extract_dir = self._archive_extract_dir |
| 215 | return os.path.join(extract_dir, self._binary_name) |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 216 | |
rob | 724c906 | 2015-01-22 00:26:42 | [diff] [blame] | 217 | def ParseDirectoryIndex(self, last_known_rev): |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 218 | """Parses the Google Storage directory listing into a list of revision |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 219 | numbers.""" |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 220 | |
rob | 724c906 | 2015-01-22 00:26:42 | [diff] [blame] | 221 | def _GetMarkerForRev(revision): |
| 222 | if self.is_asan: |
| 223 | return '%s-%s/%s-%d.zip' % ( |
| 224 | self.GetASANPlatformDir(), self.build_type, |
| 225 | self.GetASANBaseName(), revision) |
| 226 | return '%s%d' % (self._listing_platform_dir, revision) |
| 227 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 228 | def _FetchAndParse(url): |
| 229 | """Fetches a URL and returns a 2-Tuple of ([revisions], next-marker). If |
| 230 | next-marker is not None, then the listing is a partial listing and another |
| 231 | fetch should be performed with next-marker being the marker= GET |
| 232 | parameter.""" |
| 233 | handle = urllib.urlopen(url) |
| 234 | document = ElementTree.parse(handle) |
| 235 | |
| 236 | # All nodes in the tree are namespaced. Get the root's tag name to extract |
| 237 | # the namespace. Etree does namespaces as |{namespace}tag|. |
| 238 | root_tag = document.getroot().tag |
| 239 | end_ns_pos = root_tag.find('}') |
| 240 | if end_ns_pos == -1: |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 241 | raise Exception('Could not locate end namespace for directory index') |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 242 | namespace = root_tag[:end_ns_pos + 1] |
| 243 | |
| 244 | # Find the prefix (_listing_platform_dir) and whether or not the list is |
| 245 | # truncated. |
| 246 | prefix_len = len(document.find(namespace + 'Prefix').text) |
| 247 | next_marker = None |
| 248 | is_truncated = document.find(namespace + 'IsTruncated') |
| 249 | if is_truncated is not None and is_truncated.text.lower() == 'true': |
| 250 | next_marker = document.find(namespace + 'NextMarker').text |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 251 | # Get a list of all the revisions. |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 252 | revisions = [] |
[email protected] | 3e7c8532 | 2014-06-27 20:27:36 | [diff] [blame] | 253 | githash_svn_dict = {} |
[email protected] | 01188669 | 2014-08-01 21:00:21 | [diff] [blame] | 254 | if self.is_asan: |
| 255 | asan_regex = re.compile(r'.*%s-(\d+)\.zip$' % (self.GetASANBaseName())) |
| 256 | # Non ASAN builds are in a <revision> directory. The ASAN builds are |
| 257 | # flat |
| 258 | all_prefixes = document.findall(namespace + 'Contents/' + |
| 259 | namespace + 'Key') |
| 260 | for prefix in all_prefixes: |
| 261 | m = asan_regex.match(prefix.text) |
| 262 | if m: |
| 263 | try: |
| 264 | revisions.append(int(m.group(1))) |
| 265 | except ValueError: |
| 266 | pass |
| 267 | else: |
| 268 | all_prefixes = document.findall(namespace + 'CommonPrefixes/' + |
| 269 | namespace + 'Prefix') |
| 270 | # The <Prefix> nodes have content of the form of |
| 271 | # |_listing_platform_dir/revision/|. Strip off the platform dir and the |
| 272 | # trailing slash to just have a number. |
| 273 | for prefix in all_prefixes: |
| 274 | revnum = prefix.text[prefix_len:-1] |
| 275 | try: |
dimu | a1dfa0ce | 2016-03-31 01:08:45 | [diff] [blame] | 276 | revnum = int(revnum) |
| 277 | revisions.append(revnum) |
| 278 | # Notes: |
| 279 | # Ignore hash in chromium-browser-snapshots as they are invalid |
| 280 | # Resulting in 404 error in fetching pages: |
| 281 | # https://chromium.googlesource.com/chromium/src/+/[rev_hash] |
[email protected] | 01188669 | 2014-08-01 21:00:21 | [diff] [blame] | 282 | except ValueError: |
| 283 | pass |
[email protected] | 3e7c8532 | 2014-06-27 20:27:36 | [diff] [blame] | 284 | return (revisions, next_marker, githash_svn_dict) |
[email protected] | 9639b00 | 2013-08-30 14:45:52 | [diff] [blame] | 285 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 286 | # Fetch the first list of revisions. |
rob | 724c906 | 2015-01-22 00:26:42 | [diff] [blame] | 287 | if last_known_rev: |
| 288 | revisions = [] |
| 289 | # Optimization: Start paging at the last known revision (local cache). |
| 290 | next_marker = _GetMarkerForRev(last_known_rev) |
| 291 | # Optimization: Stop paging at the last known revision (remote). |
| 292 | last_change_rev = GetChromiumRevision(self, self.GetLastChangeURL()) |
| 293 | if last_known_rev == last_change_rev: |
| 294 | return [] |
| 295 | else: |
| 296 | (revisions, next_marker, new_dict) = _FetchAndParse(self.GetListingURL()) |
| 297 | self.githash_svn_dict.update(new_dict) |
| 298 | last_change_rev = None |
| 299 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 300 | # If the result list was truncated, refetch with the next marker. Do this |
| 301 | # until an entire directory listing is done. |
| 302 | while next_marker: |
rob | 724c906 | 2015-01-22 00:26:42 | [diff] [blame] | 303 | sys.stdout.write('\rFetching revisions at marker %s' % next_marker) |
| 304 | sys.stdout.flush() |
| 305 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 306 | next_url = self.GetListingURL(next_marker) |
[email protected] | 3e7c8532 | 2014-06-27 20:27:36 | [diff] [blame] | 307 | (new_revisions, next_marker, new_dict) = _FetchAndParse(next_url) |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 308 | revisions.extend(new_revisions) |
[email protected] | 3e7c8532 | 2014-06-27 20:27:36 | [diff] [blame] | 309 | self.githash_svn_dict.update(new_dict) |
rob | 724c906 | 2015-01-22 00:26:42 | [diff] [blame] | 310 | if last_change_rev and last_change_rev in new_revisions: |
| 311 | break |
| 312 | sys.stdout.write('\r') |
| 313 | sys.stdout.flush() |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 314 | return revisions |
| 315 | |
[email protected] | 6a7a5d6 | 2014-07-09 04:45:50 | [diff] [blame] | 316 | def _GetSVNRevisionFromGitHashWithoutGitCheckout(self, git_sha1, depot): |
[email protected] | 3e7c8532 | 2014-06-27 20:27:36 | [diff] [blame] | 317 | json_url = GITHASH_TO_SVN_URL[depot] % git_sha1 |
[email protected] | 2e0f267 | 2014-08-13 20:32:58 | [diff] [blame] | 318 | response = urllib.urlopen(json_url) |
| 319 | if response.getcode() == 200: |
| 320 | try: |
| 321 | data = json.loads(response.read()[4:]) |
| 322 | except ValueError: |
| 323 | print 'ValueError for JSON URL: %s' % json_url |
| 324 | raise ValueError |
| 325 | else: |
| 326 | raise ValueError |
[email protected] | 3e7c8532 | 2014-06-27 20:27:36 | [diff] [blame] | 327 | if 'message' in data: |
| 328 | message = data['message'].split('\n') |
| 329 | message = [line for line in message if line.strip()] |
| 330 | search_pattern = re.compile(SEARCH_PATTERN[depot]) |
| 331 | result = search_pattern.search(message[len(message)-1]) |
| 332 | if result: |
| 333 | return result.group(1) |
pshenoy | b23a145 | 2014-09-05 22:52:05 | [diff] [blame] | 334 | else: |
| 335 | if depot == 'chromium': |
| 336 | result = re.search(CHROMIUM_SEARCH_PATTERN_OLD, |
| 337 | message[len(message)-1]) |
| 338 | if result: |
| 339 | return result.group(1) |
[email protected] | 3e7c8532 | 2014-06-27 20:27:36 | [diff] [blame] | 340 | print 'Failed to get svn revision number for %s' % git_sha1 |
[email protected] | 1f99f4d | 2014-07-23 16:44:14 | [diff] [blame] | 341 | raise ValueError |
[email protected] | 3e7c8532 | 2014-06-27 20:27:36 | [diff] [blame] | 342 | |
[email protected] | 6a7a5d6 | 2014-07-09 04:45:50 | [diff] [blame] | 343 | def _GetSVNRevisionFromGitHashFromGitCheckout(self, git_sha1, depot): |
| 344 | def _RunGit(command, path): |
| 345 | command = ['git'] + command |
[email protected] | 6a7a5d6 | 2014-07-09 04:45:50 | [diff] [blame] | 346 | shell = sys.platform.startswith('win') |
| 347 | proc = subprocess.Popen(command, shell=shell, stdout=subprocess.PIPE, |
rob | 724c906 | 2015-01-22 00:26:42 | [diff] [blame] | 348 | stderr=subprocess.PIPE, cwd=path) |
[email protected] | 6a7a5d6 | 2014-07-09 04:45:50 | [diff] [blame] | 349 | (output, _) = proc.communicate() |
[email protected] | 6a7a5d6 | 2014-07-09 04:45:50 | [diff] [blame] | 350 | return (output, proc.returncode) |
| 351 | |
rob | 724c906 | 2015-01-22 00:26:42 | [diff] [blame] | 352 | path = self.local_src_path |
[email protected] | 6a7a5d6 | 2014-07-09 04:45:50 | [diff] [blame] | 353 | if depot == 'blink': |
rob | 724c906 | 2015-01-22 00:26:42 | [diff] [blame] | 354 | path = os.path.join(self.local_src_path, 'third_party', 'WebKit') |
| 355 | revision = None |
| 356 | try: |
[email protected] | 6a7a5d6 | 2014-07-09 04:45:50 | [diff] [blame] | 357 | command = ['svn', 'find-rev', git_sha1] |
| 358 | (git_output, return_code) = _RunGit(command, path) |
| 359 | if not return_code: |
rob | 724c906 | 2015-01-22 00:26:42 | [diff] [blame] | 360 | revision = git_output.strip('\n') |
| 361 | except ValueError: |
| 362 | pass |
| 363 | if not revision: |
| 364 | command = ['log', '-n1', '--format=%s', git_sha1] |
| 365 | (git_output, return_code) = _RunGit(command, path) |
| 366 | if not return_code: |
| 367 | revision = re.match('SVN changes up to revision ([0-9]+)', git_output) |
| 368 | revision = revision.group(1) if revision else None |
| 369 | if revision: |
| 370 | return revision |
| 371 | raise ValueError |
[email protected] | 6a7a5d6 | 2014-07-09 04:45:50 | [diff] [blame] | 372 | |
| 373 | def GetSVNRevisionFromGitHash(self, git_sha1, depot='chromium'): |
rob | 724c906 | 2015-01-22 00:26:42 | [diff] [blame] | 374 | if not self.local_src_path: |
[email protected] | 6a7a5d6 | 2014-07-09 04:45:50 | [diff] [blame] | 375 | return self._GetSVNRevisionFromGitHashWithoutGitCheckout(git_sha1, depot) |
| 376 | else: |
| 377 | return self._GetSVNRevisionFromGitHashFromGitCheckout(git_sha1, depot) |
| 378 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 379 | def GetRevList(self): |
| 380 | """Gets the list of revision numbers between self.good_revision and |
| 381 | self.bad_revision.""" |
rob | 724c906 | 2015-01-22 00:26:42 | [diff] [blame] | 382 | |
| 383 | cache = {} |
| 384 | # The cache is stored in the same directory as bisect-builds.py |
| 385 | cache_filename = os.path.join( |
| 386 | os.path.abspath(os.path.dirname(__file__)), |
| 387 | '.bisect-builds-cache.json') |
| 388 | cache_dict_key = self.GetListingURL() |
| 389 | |
| 390 | def _LoadBucketFromCache(): |
| 391 | if self.use_local_cache: |
| 392 | try: |
| 393 | with open(cache_filename) as cache_file: |
rob | 1c83605 | 2015-05-18 16:34:02 | [diff] [blame] | 394 | for (key, value) in json.load(cache_file).items(): |
| 395 | cache[key] = value |
rob | 724c906 | 2015-01-22 00:26:42 | [diff] [blame] | 396 | revisions = cache.get(cache_dict_key, []) |
| 397 | githash_svn_dict = cache.get('githash_svn_dict', {}) |
| 398 | if revisions: |
| 399 | print 'Loaded revisions %d-%d from %s' % (revisions[0], |
| 400 | revisions[-1], cache_filename) |
| 401 | return (revisions, githash_svn_dict) |
| 402 | except (EnvironmentError, ValueError): |
| 403 | pass |
| 404 | return ([], {}) |
| 405 | |
| 406 | def _SaveBucketToCache(): |
| 407 | """Save the list of revisions and the git-svn mappings to a file. |
| 408 | The list of revisions is assumed to be sorted.""" |
| 409 | if self.use_local_cache: |
| 410 | cache[cache_dict_key] = revlist_all |
| 411 | cache['githash_svn_dict'] = self.githash_svn_dict |
| 412 | try: |
| 413 | with open(cache_filename, 'w') as cache_file: |
| 414 | json.dump(cache, cache_file) |
| 415 | print 'Saved revisions %d-%d to %s' % ( |
| 416 | revlist_all[0], revlist_all[-1], cache_filename) |
| 417 | except EnvironmentError: |
| 418 | pass |
| 419 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 420 | # Download the revlist and filter for just the range between good and bad. |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 421 | minrev = min(self.good_revision, self.bad_revision) |
| 422 | maxrev = max(self.good_revision, self.bad_revision) |
rob | 724c906 | 2015-01-22 00:26:42 | [diff] [blame] | 423 | |
| 424 | (revlist_all, self.githash_svn_dict) = _LoadBucketFromCache() |
| 425 | last_known_rev = revlist_all[-1] if revlist_all else 0 |
| 426 | if last_known_rev < maxrev: |
| 427 | revlist_all.extend(map(int, self.ParseDirectoryIndex(last_known_rev))) |
| 428 | revlist_all = list(set(revlist_all)) |
| 429 | revlist_all.sort() |
| 430 | _SaveBucketToCache() |
[email protected] | 37ed317 | 2013-09-24 23:49:30 | [diff] [blame] | 431 | |
| 432 | revlist = [x for x in revlist_all if x >= int(minrev) and x <= int(maxrev)] |
[email protected] | 37ed317 | 2013-09-24 23:49:30 | [diff] [blame] | 433 | |
| 434 | # Set good and bad revisions to be legit revisions. |
| 435 | if revlist: |
| 436 | if self.good_revision < self.bad_revision: |
| 437 | self.good_revision = revlist[0] |
| 438 | self.bad_revision = revlist[-1] |
| 439 | else: |
| 440 | self.bad_revision = revlist[0] |
| 441 | self.good_revision = revlist[-1] |
| 442 | |
| 443 | # Fix chromium rev so that the deps blink revision matches REVISIONS file. |
| 444 | if self.base_url == WEBKIT_BASE_URL: |
| 445 | revlist_all.sort() |
| 446 | self.good_revision = FixChromiumRevForBlink(revlist, |
| 447 | revlist_all, |
| 448 | self, |
| 449 | self.good_revision) |
| 450 | self.bad_revision = FixChromiumRevForBlink(revlist, |
| 451 | revlist_all, |
| 452 | self, |
| 453 | self.bad_revision) |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 454 | return revlist |
| 455 | |
prasadv | 2375e6d | 2017-03-20 19:23:23 | [diff] [blame] | 456 | |
| 457 | def IsMac(): |
| 458 | return sys.platform.startswith('darwin') |
| 459 | |
| 460 | |
[email protected] | fc3702e | 2013-11-09 04:23:00 | [diff] [blame] | 461 | def UnzipFilenameToDir(filename, directory): |
| 462 | """Unzip |filename| to |directory|.""" |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 463 | cwd = os.getcwd() |
| 464 | if not os.path.isabs(filename): |
| 465 | filename = os.path.join(cwd, filename) |
[email protected] | bd8dcb9 | 2010-03-31 01:05:24 | [diff] [blame] | 466 | # Make base. |
[email protected] | fc3702e | 2013-11-09 04:23:00 | [diff] [blame] | 467 | if not os.path.isdir(directory): |
| 468 | os.mkdir(directory) |
| 469 | os.chdir(directory) |
prasadv | 2375e6d | 2017-03-20 19:23:23 | [diff] [blame] | 470 | |
| 471 | # The Python ZipFile does not support symbolic links, which makes it |
| 472 | # unsuitable for Mac builds. so use ditto instead. |
| 473 | if IsMac(): |
| 474 | unzip_cmd = ['ditto', '-x', '-k', filename, '.'] |
| 475 | proc = subprocess.Popen(unzip_cmd, bufsize=0, stdout=subprocess.PIPE, |
| 476 | stderr=subprocess.PIPE) |
| 477 | proc.communicate() |
| 478 | os.chdir(cwd) |
| 479 | return |
| 480 | |
| 481 | zf = zipfile.ZipFile(filename) |
[email protected] | e29c08c | 2012-09-17 20:50:50 | [diff] [blame] | 482 | # Extract files. |
| 483 | for info in zf.infolist(): |
| 484 | name = info.filename |
| 485 | if name.endswith('/'): # dir |
| 486 | if not os.path.isdir(name): |
| 487 | os.makedirs(name) |
| 488 | else: # file |
[email protected] | fc3702e | 2013-11-09 04:23:00 | [diff] [blame] | 489 | directory = os.path.dirname(name) |
John Budorick | 06e5df1 | 2015-02-27 17:44:27 | [diff] [blame] | 490 | if not os.path.isdir(directory): |
[email protected] | fc3702e | 2013-11-09 04:23:00 | [diff] [blame] | 491 | os.makedirs(directory) |
[email protected] | e29c08c | 2012-09-17 20:50:50 | [diff] [blame] | 492 | out = open(name, 'wb') |
| 493 | out.write(zf.read(name)) |
| 494 | out.close() |
| 495 | # Set permissions. Permission info in external_attr is shifted 16 bits. |
| 496 | os.chmod(name, info.external_attr >> 16L) |
| 497 | os.chdir(cwd) |
[email protected] | bd8dcb9 | 2010-03-31 01:05:24 | [diff] [blame] | 498 | |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 499 | |
[email protected] | 468a977 | 2011-08-09 18:42:00 | [diff] [blame] | 500 | def FetchRevision(context, rev, filename, quit_event=None, progress_event=None): |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 501 | """Downloads and unzips revision |rev|. |
| 502 | @param context A PathContext instance. |
| 503 | @param rev The Chromium revision number/tag to download. |
| 504 | @param filename The destination for the downloaded file. |
| 505 | @param quit_event A threading.Event which will be set by the master thread to |
| 506 | indicate that the download should be aborted. |
[email protected] | 468a977 | 2011-08-09 18:42:00 | [diff] [blame] | 507 | @param progress_event A threading.Event which will be set by the master thread |
| 508 | to indicate that the progress of the download should be |
| 509 | displayed. |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 510 | """ |
| 511 | def ReportHook(blocknum, blocksize, totalsize): |
[email protected] | 946be75 | 2011-10-25 23:34:21 | [diff] [blame] | 512 | if quit_event and quit_event.isSet(): |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 513 | raise RuntimeError('Aborting download of revision %s' % str(rev)) |
[email protected] | 946be75 | 2011-10-25 23:34:21 | [diff] [blame] | 514 | if progress_event and progress_event.isSet(): |
[email protected] | 468a977 | 2011-08-09 18:42:00 | [diff] [blame] | 515 | size = blocknum * blocksize |
| 516 | if totalsize == -1: # Total size not known. |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 517 | progress = 'Received %d bytes' % size |
[email protected] | 468a977 | 2011-08-09 18:42:00 | [diff] [blame] | 518 | else: |
| 519 | size = min(totalsize, size) |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 520 | progress = 'Received %d of %d bytes, %.2f%%' % ( |
[email protected] | 468a977 | 2011-08-09 18:42:00 | [diff] [blame] | 521 | size, totalsize, 100.0 * size / totalsize) |
| 522 | # Send a \r to let all progress messages use just one line of output. |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 523 | sys.stdout.write('\r' + progress) |
[email protected] | 468a977 | 2011-08-09 18:42:00 | [diff] [blame] | 524 | sys.stdout.flush() |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 525 | download_url = context.GetDownloadURL(rev) |
| 526 | try: |
John Budorick | 06e5df1 | 2015-02-27 17:44:27 | [diff] [blame] | 527 | urllib.urlretrieve(download_url, filename, ReportHook) |
[email protected] | 946be75 | 2011-10-25 23:34:21 | [diff] [blame] | 528 | if progress_event and progress_event.isSet(): |
[email protected] | ecaba01e6 | 2011-10-26 05:33:28 | [diff] [blame] | 529 | print |
mikecase | e2b6ce8 | 2015-02-06 18:22:39 | [diff] [blame] | 530 | |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 531 | except RuntimeError: |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 532 | pass |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 533 | |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 534 | |
Dominic Mazzoni | 215e80b | 2017-11-29 20:05:27 | [diff] [blame^] | 535 | def CopyMissingFileFromCurrentSource(src_glob, dst): |
| 536 | """Work around missing files in archives. |
| 537 | This happens when archives of Chrome don't contain all of the files |
| 538 | needed to build it. In many cases we can work around this using |
| 539 | files from the current checkout. The source is in the form of a glob |
| 540 | so that it can try to look for possible sources of the file in |
| 541 | multiple locations, but we just arbitrarily try the first match. |
| 542 | |
| 543 | Silently fail if this doesn't work because we don't yet have clear |
| 544 | markers for builds that require certain files or a way to test |
| 545 | whether or not launching Chrome succeeded. |
| 546 | """ |
| 547 | if not os.path.exists(dst): |
| 548 | matches = glob.glob(src_glob) |
| 549 | if matches: |
| 550 | shutil.copy2(matches[0], dst) |
| 551 | |
| 552 | |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 553 | def RunRevision(context, revision, zip_file, profile, num_runs, command, args): |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 554 | """Given a zipped revision, unzip it and run the test.""" |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 555 | print 'Trying revision %s...' % str(revision) |
[email protected] | 3ff00b7 | 2011-07-20 21:34:47 | [diff] [blame] | 556 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 557 | # Create a temp directory and unzip the revision into it. |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 558 | cwd = os.getcwd() |
| 559 | tempdir = tempfile.mkdtemp(prefix='bisect_tmp') |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 560 | UnzipFilenameToDir(zip_file, tempdir) |
dmazzoni | 76e907d | 2015-01-22 08:14:49 | [diff] [blame] | 561 | |
Dominic Mazzoni | 215e80b | 2017-11-29 20:05:27 | [diff] [blame^] | 562 | # Hack: Some Chrome OS archives are missing some files; try to copy them |
| 563 | # from the local directory. |
dmazzoni | 76e907d | 2015-01-22 08:14:49 | [diff] [blame] | 564 | if context.platform == 'chromeos': |
Dominic Mazzoni | 215e80b | 2017-11-29 20:05:27 | [diff] [blame^] | 565 | CopyMissingFileFromCurrentSource('third_party/icu/common/icudtl.dat', |
| 566 | '%s/chrome-linux/icudtl.dat' % tempdir) |
| 567 | CopyMissingFileFromCurrentSource('*out*/*/libminigbm.so', |
| 568 | '%s/chrome-linux/libminigbm.so' % tempdir) |
dmazzoni | 76e907d | 2015-01-22 08:14:49 | [diff] [blame] | 569 | |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 570 | os.chdir(tempdir) |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 571 | |
[email protected] | 5e93cf16 | 2012-01-28 02:16:56 | [diff] [blame] | 572 | # Run the build as many times as specified. |
[email protected] | 4646a75 | 2013-07-19 22:14:34 | [diff] [blame] | 573 | testargs = ['--user-data-dir=%s' % profile] + args |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 574 | # The sandbox must be run as root on Official Chrome, so bypass it. |
Jason Kersey | 97bb027a | 2016-05-11 20:10:43 | [diff] [blame] | 575 | if (context.flash_path and context.platform.startswith('linux')): |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 576 | testargs.append('--no-sandbox') |
[email protected] | fc3702e | 2013-11-09 04:23:00 | [diff] [blame] | 577 | if context.flash_path: |
| 578 | testargs.append('--ppapi-flash-path=%s' % context.flash_path) |
| 579 | # We have to pass a large enough Flash version, which currently needs not |
| 580 | # be correct. Instead of requiring the user of the script to figure out and |
| 581 | # pass the correct version we just spoof it. |
| 582 | testargs.append('--ppapi-flash-version=99.9.999.999') |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 583 | |
[email protected] | 4646a75 | 2013-07-19 22:14:34 | [diff] [blame] | 584 | runcommand = [] |
[email protected] | 61ea90a | 2013-09-26 10:17:34 | [diff] [blame] | 585 | for token in shlex.split(command): |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 586 | if token == '%a': |
[email protected] | 4646a75 | 2013-07-19 22:14:34 | [diff] [blame] | 587 | runcommand.extend(testargs) |
| 588 | else: |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 589 | runcommand.append( |
[email protected] | 01188669 | 2014-08-01 21:00:21 | [diff] [blame] | 590 | token.replace('%p', os.path.abspath(context.GetLaunchPath(revision))). |
| 591 | replace('%s', ' '.join(testargs))) |
[email protected] | 4646a75 | 2013-07-19 22:14:34 | [diff] [blame] | 592 | |
[email protected] | d59c871 | 2014-02-11 21:04:57 | [diff] [blame] | 593 | results = [] |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 594 | for _ in range(num_runs): |
[email protected] | 4646a75 | 2013-07-19 22:14:34 | [diff] [blame] | 595 | subproc = subprocess.Popen(runcommand, |
[email protected] | 5e93cf16 | 2012-01-28 02:16:56 | [diff] [blame] | 596 | bufsize=-1, |
| 597 | stdout=subprocess.PIPE, |
| 598 | stderr=subprocess.PIPE) |
| 599 | (stdout, stderr) = subproc.communicate() |
[email protected] | d59c871 | 2014-02-11 21:04:57 | [diff] [blame] | 600 | results.append((subproc.returncode, stdout, stderr)) |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 601 | os.chdir(cwd) |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 602 | try: |
| 603 | shutil.rmtree(tempdir, True) |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 604 | except Exception: |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 605 | pass |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 606 | |
[email protected] | d59c871 | 2014-02-11 21:04:57 | [diff] [blame] | 607 | for (returncode, stdout, stderr) in results: |
| 608 | if returncode: |
| 609 | return (returncode, stdout, stderr) |
| 610 | return results[0] |
[email protected] | 79f1474 | 2010-03-10 01:01:57 | [diff] [blame] | 611 | |
[email protected] | cb155a8 | 2011-11-29 17:25:34 | [diff] [blame] | 612 | |
Jason Kersey | 97bb027a | 2016-05-11 20:10:43 | [diff] [blame] | 613 | # The arguments status, stdout and stderr are unused. |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 614 | # They are present here because this function is passed to Bisect which then |
| 615 | # calls it with 5 arguments. |
| 616 | # pylint: disable=W0613 |
Jason Kersey | 97bb027a | 2016-05-11 20:10:43 | [diff] [blame] | 617 | def AskIsGoodBuild(rev, exit_status, stdout, stderr): |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 618 | """Asks the user whether build |rev| is good or bad.""" |
[email protected] | 79f1474 | 2010-03-10 01:01:57 | [diff] [blame] | 619 | # Loop until we get a response that we can parse. |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 620 | while True: |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 621 | response = raw_input('Revision %s is ' |
wangxianzhu | d8c4c56 | 2015-12-15 23:39:51 | [diff] [blame] | 622 | '[(g)ood/(b)ad/(r)etry/(u)nknown/(s)tdout/(q)uit]: ' % |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 623 | str(rev)) |
wangxianzhu | d8c4c56 | 2015-12-15 23:39:51 | [diff] [blame] | 624 | if response in ('g', 'b', 'r', 'u'): |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 625 | return response |
wangxianzhu | d8c4c56 | 2015-12-15 23:39:51 | [diff] [blame] | 626 | if response == 'q': |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 627 | raise SystemExit() |
wangxianzhu | d8c4c56 | 2015-12-15 23:39:51 | [diff] [blame] | 628 | if response == 's': |
| 629 | print stdout |
| 630 | print stderr |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 631 | |
[email protected] | cb155a8 | 2011-11-29 17:25:34 | [diff] [blame] | 632 | |
Jason Kersey | 97bb027a | 2016-05-11 20:10:43 | [diff] [blame] | 633 | def IsGoodASANBuild(rev, exit_status, stdout, stderr): |
[email protected] | 01188669 | 2014-08-01 21:00:21 | [diff] [blame] | 634 | """Determine if an ASAN build |rev| is good or bad |
| 635 | |
| 636 | Will examine stderr looking for the error message emitted by ASAN. If not |
| 637 | found then will fallback to asking the user.""" |
| 638 | if stderr: |
| 639 | bad_count = 0 |
| 640 | for line in stderr.splitlines(): |
| 641 | print line |
| 642 | if line.find('ERROR: AddressSanitizer:') != -1: |
| 643 | bad_count += 1 |
| 644 | if bad_count > 0: |
| 645 | print 'Revision %d determined to be bad.' % rev |
| 646 | return 'b' |
Jason Kersey | 97bb027a | 2016-05-11 20:10:43 | [diff] [blame] | 647 | return AskIsGoodBuild(rev, exit_status, stdout, stderr) |
skobes | 21b5cdfb | 2016-03-21 23:13:02 | [diff] [blame] | 648 | |
| 649 | |
Jason Kersey | 97bb027a | 2016-05-11 20:10:43 | [diff] [blame] | 650 | def DidCommandSucceed(rev, exit_status, stdout, stderr): |
skobes | 21b5cdfb | 2016-03-21 23:13:02 | [diff] [blame] | 651 | if exit_status: |
| 652 | print 'Bad revision: %s' % rev |
| 653 | return 'b' |
| 654 | else: |
| 655 | print 'Good revision: %s' % rev |
| 656 | return 'g' |
| 657 | |
[email protected] | 01188669 | 2014-08-01 21:00:21 | [diff] [blame] | 658 | |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 659 | class DownloadJob(object): |
| 660 | """DownloadJob represents a task to download a given Chromium revision.""" |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 661 | |
| 662 | def __init__(self, context, name, rev, zip_file): |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 663 | super(DownloadJob, self).__init__() |
| 664 | # Store off the input parameters. |
| 665 | self.context = context |
| 666 | self.name = name |
| 667 | self.rev = rev |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 668 | self.zip_file = zip_file |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 669 | self.quit_event = threading.Event() |
| 670 | self.progress_event = threading.Event() |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 671 | self.thread = None |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 672 | |
| 673 | def Start(self): |
| 674 | """Starts the download.""" |
| 675 | fetchargs = (self.context, |
| 676 | self.rev, |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 677 | self.zip_file, |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 678 | self.quit_event, |
| 679 | self.progress_event) |
| 680 | self.thread = threading.Thread(target=FetchRevision, |
| 681 | name=self.name, |
| 682 | args=fetchargs) |
| 683 | self.thread.start() |
| 684 | |
| 685 | def Stop(self): |
| 686 | """Stops the download which must have been started previously.""" |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 687 | assert self.thread, 'DownloadJob must be started before Stop is called.' |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 688 | self.quit_event.set() |
| 689 | self.thread.join() |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 690 | os.unlink(self.zip_file) |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 691 | |
| 692 | def WaitFor(self): |
| 693 | """Prints a message and waits for the download to complete. The download |
| 694 | must have been started previously.""" |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 695 | assert self.thread, 'DownloadJob must be started before WaitFor is called.' |
| 696 | print 'Downloading revision %s...' % str(self.rev) |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 697 | self.progress_event.set() # Display progress of download. |
rob | 8a4543f | 2016-01-20 00:43:59 | [diff] [blame] | 698 | try: |
| 699 | while self.thread.isAlive(): |
| 700 | # The parameter to join is needed to keep the main thread responsive to |
| 701 | # signals. Without it, the program will not respond to interruptions. |
| 702 | self.thread.join(1) |
| 703 | except (KeyboardInterrupt, SystemExit): |
| 704 | self.Stop() |
| 705 | raise |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 706 | |
| 707 | |
skobes | 21b5cdfb | 2016-03-21 23:13:02 | [diff] [blame] | 708 | def VerifyEndpoint(fetch, context, rev, profile, num_runs, command, try_args, |
| 709 | evaluate, expected_answer): |
| 710 | fetch.WaitFor() |
| 711 | try: |
| 712 | (exit_status, stdout, stderr) = RunRevision( |
| 713 | context, rev, fetch.zip_file, profile, num_runs, command, try_args) |
| 714 | except Exception, e: |
| 715 | print >> sys.stderr, e |
Jason Kersey | 97bb027a | 2016-05-11 20:10:43 | [diff] [blame] | 716 | if (evaluate(rev, exit_status, stdout, stderr) != expected_answer): |
skobes | 21b5cdfb | 2016-03-21 23:13:02 | [diff] [blame] | 717 | print 'Unexpected result at a range boundary! Your range is not correct.' |
| 718 | raise SystemExit |
| 719 | |
| 720 | |
[email protected] | 2e0f267 | 2014-08-13 20:32:58 | [diff] [blame] | 721 | def Bisect(context, |
[email protected] | 5e93cf16 | 2012-01-28 02:16:56 | [diff] [blame] | 722 | num_runs=1, |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 723 | command='%p %a', |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 724 | try_args=(), |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 725 | profile=None, |
skobes | 21b5cdfb | 2016-03-21 23:13:02 | [diff] [blame] | 726 | evaluate=AskIsGoodBuild, |
| 727 | verify_range=False): |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 728 | """Given known good and known bad revisions, run a binary search on all |
| 729 | archived revisions to determine the last known good revision. |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 730 | |
[email protected] | 2e0f267 | 2014-08-13 20:32:58 | [diff] [blame] | 731 | @param context PathContext object initialized with user provided parameters. |
[email protected] | 5e93cf16 | 2012-01-28 02:16:56 | [diff] [blame] | 732 | @param num_runs Number of times to run each build for asking good/bad. |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 733 | @param try_args A tuple of arguments to pass to the test application. |
| 734 | @param profile The name of the user profile to run with. |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 735 | @param evaluate A function which returns 'g' if the argument build is good, |
| 736 | 'b' if it's bad or 'u' if unknown. |
skobes | 21b5cdfb | 2016-03-21 23:13:02 | [diff] [blame] | 737 | @param verify_range If true, tests the first and last revisions in the range |
| 738 | before proceeding with the bisect. |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 739 | |
| 740 | Threading is used to fetch Chromium revisions in the background, speeding up |
| 741 | the user's experience. For example, suppose the bounds of the search are |
| 742 | good_rev=0, bad_rev=100. The first revision to be checked is 50. Depending on |
| 743 | whether revision 50 is good or bad, the next revision to check will be either |
| 744 | 25 or 75. So, while revision 50 is being checked, the script will download |
| 745 | revisions 25 and 75 in the background. Once the good/bad verdict on rev 50 is |
| 746 | known: |
| 747 | |
| 748 | - If rev 50 is good, the download of rev 25 is cancelled, and the next test |
| 749 | is run on rev 75. |
| 750 | |
| 751 | - If rev 50 is bad, the download of rev 75 is cancelled, and the next test |
| 752 | is run on rev 25. |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 753 | """ |
| 754 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 755 | if not profile: |
| 756 | profile = 'profile' |
| 757 | |
[email protected] | 2e0f267 | 2014-08-13 20:32:58 | [diff] [blame] | 758 | good_rev = context.good_revision |
| 759 | bad_rev = context.bad_revision |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 760 | cwd = os.getcwd() |
| 761 | |
[email protected] | 28a3c12 | 2014-08-09 11:04:51 | [diff] [blame] | 762 | print 'Downloading list of known revisions...', |
Jason Kersey | 97bb027a | 2016-05-11 20:10:43 | [diff] [blame] | 763 | if not context.use_local_cache: |
rob | 724c906 | 2015-01-22 00:26:42 | [diff] [blame] | 764 | print '(use --use-local-cache to cache and re-use the list of revisions)' |
[email protected] | 28a3c12 | 2014-08-09 11:04:51 | [diff] [blame] | 765 | else: |
| 766 | print |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 767 | _GetDownloadPath = lambda rev: os.path.join(cwd, |
| 768 | '%s-%s' % (str(rev), context.archive_name)) |
Jason Kersey | 97bb027a | 2016-05-11 20:10:43 | [diff] [blame] | 769 | revlist = context.GetRevList() |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 770 | |
| 771 | # Get a list of revisions to bisect across. |
| 772 | if len(revlist) < 2: # Don't have enough builds to bisect. |
| 773 | msg = 'We don\'t have enough builds to bisect. revlist: %s' % revlist |
| 774 | raise RuntimeError(msg) |
| 775 | |
| 776 | # Figure out our bookends and first pivot point; fetch the pivot revision. |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 777 | minrev = 0 |
| 778 | maxrev = len(revlist) - 1 |
| 779 | pivot = maxrev / 2 |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 780 | rev = revlist[pivot] |
skobes | 21b5cdfb | 2016-03-21 23:13:02 | [diff] [blame] | 781 | fetch = DownloadJob(context, 'initial_fetch', rev, _GetDownloadPath(rev)) |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 782 | fetch.Start() |
skobes | 21b5cdfb | 2016-03-21 23:13:02 | [diff] [blame] | 783 | |
| 784 | if verify_range: |
| 785 | minrev_fetch = DownloadJob( |
| 786 | context, 'minrev_fetch', revlist[minrev], |
| 787 | _GetDownloadPath(revlist[minrev])) |
| 788 | maxrev_fetch = DownloadJob( |
| 789 | context, 'maxrev_fetch', revlist[maxrev], |
| 790 | _GetDownloadPath(revlist[maxrev])) |
| 791 | minrev_fetch.Start() |
| 792 | maxrev_fetch.Start() |
| 793 | try: |
| 794 | VerifyEndpoint(minrev_fetch, context, revlist[minrev], profile, num_runs, |
| 795 | command, try_args, evaluate, 'b' if bad_rev < good_rev else 'g') |
| 796 | VerifyEndpoint(maxrev_fetch, context, revlist[maxrev], profile, num_runs, |
| 797 | command, try_args, evaluate, 'g' if bad_rev < good_rev else 'b') |
| 798 | except (KeyboardInterrupt, SystemExit): |
| 799 | print 'Cleaning up...' |
| 800 | fetch.Stop() |
| 801 | sys.exit(0) |
| 802 | finally: |
| 803 | minrev_fetch.Stop() |
| 804 | maxrev_fetch.Stop() |
| 805 | |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 806 | fetch.WaitFor() |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 807 | |
| 808 | # Binary search time! |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 809 | while fetch and fetch.zip_file and maxrev - minrev > 1: |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 810 | if bad_rev < good_rev: |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 811 | min_str, max_str = 'bad', 'good' |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 812 | else: |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 813 | min_str, max_str = 'good', 'bad' |
zinovy.nis | 0951bca | 2017-04-12 19:23:54 | [diff] [blame] | 814 | print ('Bisecting range [%s (%s), %s (%s)], ' |
| 815 | 'roughly %d steps left.') % (revlist[minrev], min_str, |
| 816 | revlist[maxrev], max_str, |
| 817 | int(maxrev - minrev) |
| 818 | .bit_length()) |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 819 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 820 | # Pre-fetch next two possible pivots |
| 821 | # - down_pivot is the next revision to check if the current revision turns |
| 822 | # out to be bad. |
| 823 | # - up_pivot is the next revision to check if the current revision turns |
| 824 | # out to be good. |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 825 | down_pivot = int((pivot - minrev) / 2) + minrev |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 826 | down_fetch = None |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 827 | if down_pivot != pivot and down_pivot != minrev: |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 828 | down_rev = revlist[down_pivot] |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 829 | down_fetch = DownloadJob(context, 'down_fetch', down_rev, |
| 830 | _GetDownloadPath(down_rev)) |
| 831 | down_fetch.Start() |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 832 | |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 833 | up_pivot = int((maxrev - pivot) / 2) + pivot |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 834 | up_fetch = None |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 835 | if up_pivot != pivot and up_pivot != maxrev: |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 836 | up_rev = revlist[up_pivot] |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 837 | up_fetch = DownloadJob(context, 'up_fetch', up_rev, |
| 838 | _GetDownloadPath(up_rev)) |
| 839 | up_fetch.Start() |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 840 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 841 | # Run test on the pivot revision. |
skobes | 21b5cdfb | 2016-03-21 23:13:02 | [diff] [blame] | 842 | exit_status = None |
[email protected] | e29c08c | 2012-09-17 20:50:50 | [diff] [blame] | 843 | stdout = None |
| 844 | stderr = None |
| 845 | try: |
skobes | 21b5cdfb | 2016-03-21 23:13:02 | [diff] [blame] | 846 | (exit_status, stdout, stderr) = RunRevision( |
| 847 | context, rev, fetch.zip_file, profile, num_runs, command, try_args) |
[email protected] | e29c08c | 2012-09-17 20:50:50 | [diff] [blame] | 848 | except Exception, e: |
[email protected] | fc3702e | 2013-11-09 04:23:00 | [diff] [blame] | 849 | print >> sys.stderr, e |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 850 | |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 851 | # Call the evaluate function to see if the current revision is good or bad. |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 852 | # On that basis, kill one of the background downloads and complete the |
| 853 | # other, as described in the comments above. |
| 854 | try: |
Jason Kersey | 97bb027a | 2016-05-11 20:10:43 | [diff] [blame] | 855 | answer = evaluate(rev, exit_status, stdout, stderr) |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 856 | if ((answer == 'g' and good_rev < bad_rev) |
| 857 | or (answer == 'b' and bad_rev < good_rev)): |
[email protected] | 1d4a0624 | 2013-08-20 22:53:12 | [diff] [blame] | 858 | fetch.Stop() |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 859 | minrev = pivot |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 860 | if down_fetch: |
| 861 | down_fetch.Stop() # Kill the download of the older revision. |
[email protected] | 1d4a0624 | 2013-08-20 22:53:12 | [diff] [blame] | 862 | fetch = None |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 863 | if up_fetch: |
| 864 | up_fetch.WaitFor() |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 865 | pivot = up_pivot |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 866 | fetch = up_fetch |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 867 | elif ((answer == 'b' and good_rev < bad_rev) |
| 868 | or (answer == 'g' and bad_rev < good_rev)): |
[email protected] | 1d4a0624 | 2013-08-20 22:53:12 | [diff] [blame] | 869 | fetch.Stop() |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 870 | maxrev = pivot |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 871 | if up_fetch: |
| 872 | up_fetch.Stop() # Kill the download of the newer revision. |
[email protected] | 1d4a0624 | 2013-08-20 22:53:12 | [diff] [blame] | 873 | fetch = None |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 874 | if down_fetch: |
| 875 | down_fetch.WaitFor() |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 876 | pivot = down_pivot |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 877 | fetch = down_fetch |
[email protected] | 1d4a0624 | 2013-08-20 22:53:12 | [diff] [blame] | 878 | elif answer == 'r': |
| 879 | pass # Retry requires no changes. |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 880 | elif answer == 'u': |
| 881 | # Nuke the revision from the revlist and choose a new pivot. |
[email protected] | 1d4a0624 | 2013-08-20 22:53:12 | [diff] [blame] | 882 | fetch.Stop() |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 883 | revlist.pop(pivot) |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 884 | maxrev -= 1 # Assumes maxrev >= pivot. |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 885 | |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 886 | if maxrev - minrev > 1: |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 887 | # Alternate between using down_pivot or up_pivot for the new pivot |
| 888 | # point, without affecting the range. Do this instead of setting the |
| 889 | # pivot to the midpoint of the new range because adjacent revisions |
| 890 | # are likely affected by the same issue that caused the (u)nknown |
| 891 | # response. |
| 892 | if up_fetch and down_fetch: |
| 893 | fetch = [up_fetch, down_fetch][len(revlist) % 2] |
| 894 | elif up_fetch: |
| 895 | fetch = up_fetch |
| 896 | else: |
| 897 | fetch = down_fetch |
| 898 | fetch.WaitFor() |
| 899 | if fetch == up_fetch: |
| 900 | pivot = up_pivot - 1 # Subtracts 1 because revlist was resized. |
| 901 | else: |
| 902 | pivot = down_pivot |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 903 | |
| 904 | if down_fetch and fetch != down_fetch: |
| 905 | down_fetch.Stop() |
| 906 | if up_fetch and fetch != up_fetch: |
| 907 | up_fetch.Stop() |
| 908 | else: |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 909 | assert False, 'Unexpected return value from evaluate(): ' + answer |
skobes | 21b5cdfb | 2016-03-21 23:13:02 | [diff] [blame] | 910 | except (KeyboardInterrupt, SystemExit): |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 911 | print 'Cleaning up...' |
skobes | 21b5cdfb | 2016-03-21 23:13:02 | [diff] [blame] | 912 | for f in [_GetDownloadPath(rev), |
| 913 | _GetDownloadPath(revlist[down_pivot]), |
[email protected] | 5e93cf16 | 2012-01-28 02:16:56 | [diff] [blame] | 914 | _GetDownloadPath(revlist[up_pivot])]: |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 915 | try: |
| 916 | os.unlink(f) |
| 917 | except OSError: |
| 918 | pass |
| 919 | sys.exit(0) |
| 920 | |
| 921 | rev = revlist[pivot] |
| 922 | |
[email protected] | 2e0f267 | 2014-08-13 20:32:58 | [diff] [blame] | 923 | return (revlist[minrev], revlist[maxrev], context) |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 924 | |
| 925 | |
pshenoy | cd6bd68 | 2014-09-10 20:50:22 | [diff] [blame] | 926 | def GetBlinkDEPSRevisionForChromiumRevision(self, rev): |
[email protected] | 4c6fec6b | 2013-09-17 17:44:08 | [diff] [blame] | 927 | """Returns the blink revision that was in REVISIONS file at |
[email protected] | b2fe7f2 | 2011-10-25 22:58:31 | [diff] [blame] | 928 | chromium revision |rev|.""" |
pshenoy | cd6bd68 | 2014-09-10 20:50:22 | [diff] [blame] | 929 | |
| 930 | def _GetBlinkRev(url, blink_re): |
| 931 | m = blink_re.search(url.read()) |
| 932 | url.close() |
| 933 | if m: |
fmalita | a898d22 | 2016-07-12 22:29:03 | [diff] [blame] | 934 | return m.group(1) |
pshenoy | cd6bd68 | 2014-09-10 20:50:22 | [diff] [blame] | 935 | |
Di Mu | 08c5968 | 2016-07-11 23:05:07 | [diff] [blame] | 936 | url = urllib.urlopen(DEPS_FILE % GetGitHashFromSVNRevision(rev)) |
pshenoy | cd6bd68 | 2014-09-10 20:50:22 | [diff] [blame] | 937 | if url.getcode() == 200: |
Di Mu | 08c5968 | 2016-07-11 23:05:07 | [diff] [blame] | 938 | blink_re = re.compile(r'webkit_revision\D*\d+;\D*\d+;(\w+)') |
| 939 | blink_git_sha = _GetBlinkRev(url, blink_re) |
| 940 | return self.GetSVNRevisionFromGitHash(blink_git_sha, 'blink') |
pshenoy | cd6bd68 | 2014-09-10 20:50:22 | [diff] [blame] | 941 | raise Exception('Could not get Blink revision for Chromium rev %d' % rev) |
[email protected] | 37ed317 | 2013-09-24 23:49:30 | [diff] [blame] | 942 | |
| 943 | |
[email protected] | 2e0f267 | 2014-08-13 20:32:58 | [diff] [blame] | 944 | def GetBlinkRevisionForChromiumRevision(context, rev): |
[email protected] | 37ed317 | 2013-09-24 23:49:30 | [diff] [blame] | 945 | """Returns the blink revision that was in REVISIONS file at |
| 946 | chromium revision |rev|.""" |
[email protected] | 3e7c8532 | 2014-06-27 20:27:36 | [diff] [blame] | 947 | def _IsRevisionNumber(revision): |
| 948 | if isinstance(revision, int): |
| 949 | return True |
| 950 | else: |
| 951 | return revision.isdigit() |
[email protected] | 2e0f267 | 2014-08-13 20:32:58 | [diff] [blame] | 952 | if str(rev) in context.githash_svn_dict: |
| 953 | rev = context.githash_svn_dict[str(rev)] |
| 954 | file_url = '%s/%s%s/REVISIONS' % (context.base_url, |
| 955 | context._listing_platform_dir, rev) |
[email protected] | 4c6fec6b | 2013-09-17 17:44:08 | [diff] [blame] | 956 | url = urllib.urlopen(file_url) |
[email protected] | 2e0f267 | 2014-08-13 20:32:58 | [diff] [blame] | 957 | if url.getcode() == 200: |
| 958 | try: |
| 959 | data = json.loads(url.read()) |
| 960 | except ValueError: |
| 961 | print 'ValueError for JSON URL: %s' % file_url |
| 962 | raise ValueError |
| 963 | else: |
| 964 | raise ValueError |
[email protected] | b2fe7f2 | 2011-10-25 22:58:31 | [diff] [blame] | 965 | url.close() |
[email protected] | 4c6fec6b | 2013-09-17 17:44:08 | [diff] [blame] | 966 | if 'webkit_revision' in data: |
[email protected] | 3e7c8532 | 2014-06-27 20:27:36 | [diff] [blame] | 967 | blink_rev = data['webkit_revision'] |
| 968 | if not _IsRevisionNumber(blink_rev): |
[email protected] | 2e0f267 | 2014-08-13 20:32:58 | [diff] [blame] | 969 | blink_rev = int(context.GetSVNRevisionFromGitHash(blink_rev, 'blink')) |
[email protected] | 3e7c8532 | 2014-06-27 20:27:36 | [diff] [blame] | 970 | return blink_rev |
[email protected] | b2fe7f2 | 2011-10-25 22:58:31 | [diff] [blame] | 971 | else: |
[email protected] | ff50d1c | 2013-04-17 18:49:36 | [diff] [blame] | 972 | raise Exception('Could not get blink revision for cr rev %d' % rev) |
[email protected] | b2fe7f2 | 2011-10-25 22:58:31 | [diff] [blame] | 973 | |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 974 | |
[email protected] | 37ed317 | 2013-09-24 23:49:30 | [diff] [blame] | 975 | def FixChromiumRevForBlink(revisions_final, revisions, self, rev): |
| 976 | """Returns the chromium revision that has the correct blink revision |
| 977 | for blink bisect, DEPS and REVISIONS file might not match since |
| 978 | blink snapshots point to tip of tree blink. |
| 979 | Note: The revisions_final variable might get modified to include |
| 980 | additional revisions.""" |
pshenoy | cd6bd68 | 2014-09-10 20:50:22 | [diff] [blame] | 981 | blink_deps_rev = GetBlinkDEPSRevisionForChromiumRevision(self, rev) |
[email protected] | 37ed317 | 2013-09-24 23:49:30 | [diff] [blame] | 982 | |
| 983 | while (GetBlinkRevisionForChromiumRevision(self, rev) > blink_deps_rev): |
| 984 | idx = revisions.index(rev) |
| 985 | if idx > 0: |
| 986 | rev = revisions[idx-1] |
| 987 | if rev not in revisions_final: |
| 988 | revisions_final.insert(0, rev) |
| 989 | |
| 990 | revisions_final.sort() |
| 991 | return rev |
[email protected] | b2fe7f2 | 2011-10-25 22:58:31 | [diff] [blame] | 992 | |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 993 | |
[email protected] | 5980b75 | 2014-07-02 00:34:40 | [diff] [blame] | 994 | def GetChromiumRevision(context, url): |
[email protected] | 801fb65 | 2012-07-20 20:13:50 | [diff] [blame] | 995 | """Returns the chromium revision read from given URL.""" |
| 996 | try: |
| 997 | # Location of the latest build revision number |
[email protected] | 5980b75 | 2014-07-02 00:34:40 | [diff] [blame] | 998 | latest_revision = urllib.urlopen(url).read() |
| 999 | if latest_revision.isdigit(): |
| 1000 | return int(latest_revision) |
| 1001 | return context.GetSVNRevisionFromGitHash(latest_revision) |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 1002 | except Exception: |
| 1003 | print 'Could not determine latest revision. This could be bad...' |
[email protected] | 801fb65 | 2012-07-20 20:13:50 | [diff] [blame] | 1004 | return 999999999 |
| 1005 | |
pshenoy | cd6bd68 | 2014-09-10 20:50:22 | [diff] [blame] | 1006 | def GetGitHashFromSVNRevision(svn_revision): |
| 1007 | crrev_url = CRREV_URL + str(svn_revision) |
| 1008 | url = urllib.urlopen(crrev_url) |
| 1009 | if url.getcode() == 200: |
| 1010 | data = json.loads(url.read()) |
| 1011 | if 'git_sha' in data: |
| 1012 | return data['git_sha'] |
| 1013 | |
pshenoy | 9ce271f | 2014-09-02 22:14:05 | [diff] [blame] | 1014 | def PrintChangeLog(min_chromium_rev, max_chromium_rev): |
| 1015 | """Prints the changelog URL.""" |
| 1016 | |
pshenoy | cd6bd68 | 2014-09-10 20:50:22 | [diff] [blame] | 1017 | print (' ' + CHANGELOG_URL % (GetGitHashFromSVNRevision(min_chromium_rev), |
| 1018 | GetGitHashFromSVNRevision(max_chromium_rev))) |
pshenoy | 9ce271f | 2014-09-02 22:14:05 | [diff] [blame] | 1019 | |
elawrence | 446bcc3 | 2017-04-14 17:18:51 | [diff] [blame] | 1020 | def error_internal_option(option, opt, value, parser): |
| 1021 | raise optparse.OptionValueError( |
| 1022 | 'The -o and -r options are only\navailable in the internal version of ' |
| 1023 | 'this script. Google\nemployees should visit http://go/bisect-builds ' |
| 1024 | 'for\nconfiguration instructions.') |
[email protected] | 801fb65 | 2012-07-20 20:13:50 | [diff] [blame] | 1025 | |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 1026 | def main(): |
[email protected] | 2c1d273 | 2009-10-29 19:52:17 | [diff] [blame] | 1027 | usage = ('%prog [options] [-- chromium-options]\n' |
[email protected] | 887c918 | 2013-02-12 20:30:31 | [diff] [blame] | 1028 | 'Perform binary search on the snapshot builds to find a minimal\n' |
| 1029 | 'range of revisions where a behavior change happened. The\n' |
| 1030 | 'behaviors are described as "good" and "bad".\n' |
| 1031 | 'It is NOT assumed that the behavior of the later revision is\n' |
[email protected] | 09c58da | 2013-01-07 21:30:17 | [diff] [blame] | 1032 | 'the bad one.\n' |
[email protected] | 178aab7 | 2010-10-08 17:21:38 | [diff] [blame] | 1033 | '\n' |
[email protected] | 887c918 | 2013-02-12 20:30:31 | [diff] [blame] | 1034 | 'Revision numbers should use\n' |
[email protected] | 887c918 | 2013-02-12 20:30:31 | [diff] [blame] | 1035 | ' SVN revisions (e.g. 123456) for chromium builds, from trunk.\n' |
| 1036 | ' Use base_trunk_revision from http://omahaproxy.appspot.com/\n' |
| 1037 | ' for earlier revs.\n' |
| 1038 | ' Chrome\'s about: build number and omahaproxy branch_revision\n' |
| 1039 | ' are incorrect, they are from branches.\n' |
| 1040 | '\n' |
[email protected] | 178aab7 | 2010-10-08 17:21:38 | [diff] [blame] | 1041 | 'Tip: add "-- --no-first-run" to bypass the first run prompts.') |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 1042 | parser = optparse.OptionParser(usage=usage) |
[email protected] | 1a45d22 | 2009-09-19 01:58:57 | [diff] [blame] | 1043 | # Strangely, the default help output doesn't include the choice list. |
mikecase | a8cd284c | 2014-12-02 21:30:58 | [diff] [blame] | 1044 | choices = ['mac', 'mac64', 'win', 'win64', 'linux', 'linux64', 'linux-arm', |
dmazzoni | 76e907d | 2015-01-22 08:14:49 | [diff] [blame] | 1045 | 'chromeos'] |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 1046 | parser.add_option('-a', '--archive', |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 1047 | choices=choices, |
| 1048 | help='The buildbot archive to bisect [%s].' % |
| 1049 | '|'.join(choices)) |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 1050 | parser.add_option('-b', '--bad', |
| 1051 | type='str', |
| 1052 | help='A bad revision to start bisection. ' |
| 1053 | 'May be earlier or later than the good revision. ' |
| 1054 | 'Default is HEAD.') |
| 1055 | parser.add_option('-f', '--flash_path', |
| 1056 | type='str', |
| 1057 | help='Absolute path to a recent Adobe Pepper Flash ' |
| 1058 | 'binary to be used in this bisection (e.g. ' |
| 1059 | 'on Windows C:\...\pepflashplayer.dll and on Linux ' |
| 1060 | '/opt/google/chrome/PepperFlash/' |
| 1061 | 'libpepflashplayer.so).') |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 1062 | parser.add_option('-g', '--good', |
| 1063 | type='str', |
| 1064 | help='A good revision to start bisection. ' + |
| 1065 | 'May be earlier or later than the bad revision. ' + |
| 1066 | 'Default is 0.') |
| 1067 | parser.add_option('-p', '--profile', '--user-data-dir', |
| 1068 | type='str', |
| 1069 | default='profile', |
| 1070 | help='Profile to use; this will not reset every run. ' |
| 1071 | 'Defaults to a clean profile.') |
| 1072 | parser.add_option('-t', '--times', |
| 1073 | type='int', |
| 1074 | default=1, |
| 1075 | help='Number of times to run each build before asking ' |
| 1076 | 'if it\'s good or bad. Temporary profiles are reused.') |
| 1077 | parser.add_option('-c', '--command', |
| 1078 | type='str', |
| 1079 | default='%p %a', |
| 1080 | help='Command to execute. %p and %a refer to Chrome ' |
| 1081 | 'executable and specified extra arguments ' |
| 1082 | 'respectively. Use %s to specify all extra arguments ' |
| 1083 | 'as one string. Defaults to "%p %a". Note that any ' |
| 1084 | 'extra paths specified should be absolute.') |
| 1085 | parser.add_option('-l', '--blink', |
| 1086 | action='store_true', |
| 1087 | help='Use Blink bisect instead of Chromium. ') |
| 1088 | parser.add_option('', '--not-interactive', |
| 1089 | action='store_true', |
| 1090 | default=False, |
| 1091 | help='Use command exit code to tell good/bad revision.') |
[email protected] | 01188669 | 2014-08-01 21:00:21 | [diff] [blame] | 1092 | parser.add_option('--asan', |
| 1093 | dest='asan', |
| 1094 | action='store_true', |
| 1095 | default=False, |
| 1096 | help='Allow the script to bisect ASAN builds') |
rob | 724c906 | 2015-01-22 00:26:42 | [diff] [blame] | 1097 | parser.add_option('--use-local-cache', |
| 1098 | dest='use_local_cache', |
[email protected] | 6a7a5d6 | 2014-07-09 04:45:50 | [diff] [blame] | 1099 | action='store_true', |
| 1100 | default=False, |
rob | 724c906 | 2015-01-22 00:26:42 | [diff] [blame] | 1101 | help='Use a local file in the current directory to cache ' |
| 1102 | 'a list of known revisions to speed up the ' |
| 1103 | 'initialization of this script.') |
skobes | 21b5cdfb | 2016-03-21 23:13:02 | [diff] [blame] | 1104 | parser.add_option('--verify-range', |
| 1105 | dest='verify_range', |
| 1106 | action='store_true', |
| 1107 | default=False, |
| 1108 | help='Test the first and last revisions in the range ' + |
| 1109 | 'before proceeding with the bisect.') |
elawrence | 446bcc3 | 2017-04-14 17:18:51 | [diff] [blame] | 1110 | parser.add_option("-r", action="callback", callback=error_internal_option) |
| 1111 | parser.add_option("-o", action="callback", callback=error_internal_option) |
[email protected] | b3b2051 | 2013-08-26 18:51:04 | [diff] [blame] | 1112 | |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 1113 | (opts, args) = parser.parse_args() |
| 1114 | |
| 1115 | if opts.archive is None: |
[email protected] | 178aab7 | 2010-10-08 17:21:38 | [diff] [blame] | 1116 | print 'Error: missing required parameter: --archive' |
| 1117 | print |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 1118 | parser.print_help() |
| 1119 | return 1 |
| 1120 | |
[email protected] | 01188669 | 2014-08-01 21:00:21 | [diff] [blame] | 1121 | if opts.asan: |
| 1122 | supported_platforms = ['linux', 'mac', 'win'] |
| 1123 | if opts.archive not in supported_platforms: |
| 1124 | print 'Error: ASAN bisecting only supported on these platforms: [%s].' % ( |
| 1125 | '|'.join(supported_platforms)) |
| 1126 | return 1 |
[email protected] | 01188669 | 2014-08-01 21:00:21 | [diff] [blame] | 1127 | |
| 1128 | if opts.asan: |
| 1129 | base_url = ASAN_BASE_URL |
| 1130 | elif opts.blink: |
[email protected] | 4c6fec6b | 2013-09-17 17:44:08 | [diff] [blame] | 1131 | base_url = WEBKIT_BASE_URL |
| 1132 | else: |
| 1133 | base_url = CHROMIUM_BASE_URL |
| 1134 | |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 1135 | # Create the context. Initialize 0 for the revisions as they are set below. |
[email protected] | 2e0f267 | 2014-08-13 20:32:58 | [diff] [blame] | 1136 | context = PathContext(base_url, opts.archive, opts.good, opts.bad, |
Jason Kersey | 97bb027a | 2016-05-11 20:10:43 | [diff] [blame] | 1137 | opts.asan, opts.use_local_cache, |
vitalybuka | 4d1e1e41 | 2015-07-06 17:21:06 | [diff] [blame] | 1138 | opts.flash_path) |
mikecase | a8cd284c | 2014-12-02 21:30:58 | [diff] [blame] | 1139 | |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 1140 | # Pick a starting point, try to get HEAD for this. |
[email protected] | 2e0f267 | 2014-08-13 20:32:58 | [diff] [blame] | 1141 | if not opts.bad: |
| 1142 | context.bad_revision = '999.0.0.0' |
| 1143 | context.bad_revision = GetChromiumRevision( |
| 1144 | context, context.GetLastChangeURL()) |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 1145 | |
| 1146 | # Find out when we were good. |
[email protected] | 2e0f267 | 2014-08-13 20:32:58 | [diff] [blame] | 1147 | if not opts.good: |
Jason Kersey | 97bb027a | 2016-05-11 20:10:43 | [diff] [blame] | 1148 | context.good_revision = 0 |
[email protected] | 801fb65 | 2012-07-20 20:13:50 | [diff] [blame] | 1149 | |
[email protected] | fc3702e | 2013-11-09 04:23:00 | [diff] [blame] | 1150 | if opts.flash_path: |
[email protected] | 2e0f267 | 2014-08-13 20:32:58 | [diff] [blame] | 1151 | msg = 'Could not find Flash binary at %s' % opts.flash_path |
| 1152 | assert os.path.exists(opts.flash_path), msg |
[email protected] | fc3702e | 2013-11-09 04:23:00 | [diff] [blame] | 1153 | |
Jason Kersey | 97bb027a | 2016-05-11 20:10:43 | [diff] [blame] | 1154 | context.good_revision = int(context.good_revision) |
| 1155 | context.bad_revision = int(context.bad_revision) |
[email protected] | 801fb65 | 2012-07-20 20:13:50 | [diff] [blame] | 1156 | |
[email protected] | 5e93cf16 | 2012-01-28 02:16:56 | [diff] [blame] | 1157 | if opts.times < 1: |
| 1158 | print('Number of times to run (%d) must be greater than or equal to 1.' % |
| 1159 | opts.times) |
| 1160 | parser.print_help() |
| 1161 | return 1 |
| 1162 | |
skobes | 21b5cdfb | 2016-03-21 23:13:02 | [diff] [blame] | 1163 | if opts.not_interactive: |
| 1164 | evaluator = DidCommandSucceed |
| 1165 | elif opts.asan: |
[email protected] | 01188669 | 2014-08-01 21:00:21 | [diff] [blame] | 1166 | evaluator = IsGoodASANBuild |
| 1167 | else: |
| 1168 | evaluator = AskIsGoodBuild |
| 1169 | |
[email protected] | 2e0f267 | 2014-08-13 20:32:58 | [diff] [blame] | 1170 | # Save these revision numbers to compare when showing the changelog URL |
| 1171 | # after the bisect. |
| 1172 | good_rev = context.good_revision |
| 1173 | bad_rev = context.bad_revision |
| 1174 | |
| 1175 | (min_chromium_rev, max_chromium_rev, context) = Bisect( |
| 1176 | context, opts.times, opts.command, args, opts.profile, |
skobes | 21b5cdfb | 2016-03-21 23:13:02 | [diff] [blame] | 1177 | evaluator, opts.verify_range) |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 1178 | |
[email protected] | ff50d1c | 2013-04-17 18:49:36 | [diff] [blame] | 1179 | # Get corresponding blink revisions. |
[email protected] | b2fe7f2 | 2011-10-25 22:58:31 | [diff] [blame] | 1180 | try: |
[email protected] | 4c6fec6b | 2013-09-17 17:44:08 | [diff] [blame] | 1181 | min_blink_rev = GetBlinkRevisionForChromiumRevision(context, |
| 1182 | min_chromium_rev) |
| 1183 | max_blink_rev = GetBlinkRevisionForChromiumRevision(context, |
| 1184 | max_chromium_rev) |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 1185 | except Exception: |
[email protected] | b2fe7f2 | 2011-10-25 22:58:31 | [diff] [blame] | 1186 | # Silently ignore the failure. |
[email protected] | ff50d1c | 2013-04-17 18:49:36 | [diff] [blame] | 1187 | min_blink_rev, max_blink_rev = 0, 0 |
[email protected] | b2fe7f2 | 2011-10-25 22:58:31 | [diff] [blame] | 1188 | |
[email protected] | 3bdaa475 | 2013-09-30 20:13:36 | [diff] [blame] | 1189 | if opts.blink: |
| 1190 | # We're done. Let the user know the results in an official manner. |
| 1191 | if good_rev > bad_rev: |
| 1192 | print DONE_MESSAGE_GOOD_MAX % (str(min_blink_rev), str(max_blink_rev)) |
| 1193 | else: |
| 1194 | print DONE_MESSAGE_GOOD_MIN % (str(min_blink_rev), str(max_blink_rev)) |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 1195 | |
[email protected] | ff50d1c | 2013-04-17 18:49:36 | [diff] [blame] | 1196 | print 'BLINK CHANGELOG URL:' |
| 1197 | print ' ' + BLINK_CHANGELOG_URL % (max_blink_rev, min_blink_rev) |
[email protected] | 3bdaa475 | 2013-09-30 20:13:36 | [diff] [blame] | 1198 | |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 1199 | else: |
[email protected] | 3bdaa475 | 2013-09-30 20:13:36 | [diff] [blame] | 1200 | # We're done. Let the user know the results in an official manner. |
| 1201 | if good_rev > bad_rev: |
| 1202 | print DONE_MESSAGE_GOOD_MAX % (str(min_chromium_rev), |
| 1203 | str(max_chromium_rev)) |
| 1204 | else: |
| 1205 | print DONE_MESSAGE_GOOD_MIN % (str(min_chromium_rev), |
| 1206 | str(max_chromium_rev)) |
| 1207 | if min_blink_rev != max_blink_rev: |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 1208 | print ('NOTE: There is a Blink roll in the range, ' |
| 1209 | 'you might also want to do a Blink bisect.') |
[email protected] | 3bdaa475 | 2013-09-30 20:13:36 | [diff] [blame] | 1210 | |
| 1211 | print 'CHANGELOG URL:' |
Jason Kersey | 97bb027a | 2016-05-11 20:10:43 | [diff] [blame] | 1212 | PrintChangeLog(min_chromium_rev, max_chromium_rev) |
[email protected] | cb155a8 | 2011-11-29 17:25:34 | [diff] [blame] | 1213 | |
[email protected] | 4df583c | 2014-07-31 17:11:55 | [diff] [blame] | 1214 | |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 1215 | if __name__ == '__main__': |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 1216 | sys.exit(main()) |