[email protected] | cb155a8 | 2011-11-29 17:25:34 | [diff] [blame] | 1 | #!/usr/bin/env python |
[email protected] | 5e93cf16 | 2012-01-28 02:16:56 | [diff] [blame] | 2 | # Copyright (c) 2012 The Chromium Authors. All rights reserved. |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 3 | # Use of this source code is governed by a BSD-style license that can be |
| 4 | # found in the LICENSE file. |
| 5 | |
| 6 | """Snapshot Build Bisect Tool |
| 7 | |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 8 | This script bisects a snapshot archive using binary search. It starts at |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 9 | a bad revision (it will try to guess HEAD) and asks for a last known-good |
| 10 | revision. It will then binary search across this revision range by downloading, |
| 11 | unzipping, and opening Chromium for you. After testing the specific revision, |
| 12 | it will ask you whether it is good or bad before continuing the search. |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 13 | """ |
| 14 | |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 15 | # The root URL for storage. |
[email protected] | 4c6fec6b | 2013-09-17 17:44:08 | [diff] [blame] | 16 | CHROMIUM_BASE_URL = 'http://commondatastorage.googleapis.com/chromium-browser-snapshots' |
| 17 | WEBKIT_BASE_URL = 'http://commondatastorage.googleapis.com/chromium-webkit-snapshots' |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 18 | |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 19 | # The root URL for official builds. |
[email protected] | b290583 | 2012-07-19 21:28:43 | [diff] [blame] | 20 | OFFICIAL_BASE_URL = 'http://master.chrome.corp.google.com/official_builds' |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 21 | |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 22 | # Changelogs URL. |
[email protected] | 0724746 | 2010-12-24 07:45:56 | [diff] [blame] | 23 | CHANGELOG_URL = 'http://build.chromium.org/f/chromium/' \ |
[email protected] | ff50d1c | 2013-04-17 18:49:36 | [diff] [blame] | 24 | 'perf/dashboard/ui/changelog.html?' \ |
| 25 | 'url=/trunk/src&range=%d%%3A%d' |
[email protected] | f6a71a7 | 2009-10-08 19:55:38 | [diff] [blame] | 26 | |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 27 | # Official Changelogs URL. |
| 28 | OFFICIAL_CHANGELOG_URL = 'http://omahaproxy.appspot.com/'\ |
| 29 | 'changelog?old_version=%s&new_version=%s' |
| 30 | |
[email protected] | b2fe7f2 | 2011-10-25 22:58:31 | [diff] [blame] | 31 | # DEPS file URL. |
| 32 | DEPS_FILE= 'http://src.chromium.org/viewvc/chrome/trunk/src/DEPS?revision=%d' |
[email protected] | ff50d1c | 2013-04-17 18:49:36 | [diff] [blame] | 33 | # Blink Changelogs URL. |
| 34 | BLINK_CHANGELOG_URL = 'http://build.chromium.org/f/chromium/' \ |
| 35 | 'perf/dashboard/ui/changelog_blink.html?' \ |
| 36 | 'url=/trunk&range=%d%%3A%d' |
[email protected] | b2fe7f2 | 2011-10-25 22:58:31 | [diff] [blame] | 37 | |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 38 | DONE_MESSAGE_GOOD_MIN = 'You are probably looking for a change made after %s ' \ |
| 39 | '(known good), but no later than %s (first known bad).' |
| 40 | DONE_MESSAGE_GOOD_MAX = 'You are probably looking for a change made after %s ' \ |
| 41 | '(known bad), but no later than %s (first known good).' |
[email protected] | 05ff3fd | 2012-04-17 23:24:06 | [diff] [blame] | 42 | |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 43 | ############################################################################### |
| 44 | |
[email protected] | 4c6fec6b | 2013-09-17 17:44:08 | [diff] [blame] | 45 | import json |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 46 | import math |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 47 | import optparse |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 48 | import os |
[email protected] | d4bf358 | 2009-09-20 00:56:38 | [diff] [blame] | 49 | import pipes |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 50 | import re |
| 51 | import shutil |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 52 | import subprocess |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 53 | import sys |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 54 | import tempfile |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 55 | import threading |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 56 | import urllib |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 57 | from distutils.version import LooseVersion |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 58 | from xml.etree import ElementTree |
[email protected] | bd8dcb9 | 2010-03-31 01:05:24 | [diff] [blame] | 59 | import zipfile |
| 60 | |
[email protected] | cb155a8 | 2011-11-29 17:25:34 | [diff] [blame] | 61 | |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 62 | class PathContext(object): |
| 63 | """A PathContext is used to carry the information used to construct URLs and |
| 64 | paths when dealing with the storage server and archives.""" |
[email protected] | 4c6fec6b | 2013-09-17 17:44:08 | [diff] [blame] | 65 | def __init__(self, base_url, platform, good_revision, bad_revision, |
| 66 | is_official, is_aura): |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 67 | super(PathContext, self).__init__() |
| 68 | # Store off the input parameters. |
[email protected] | 4c6fec6b | 2013-09-17 17:44:08 | [diff] [blame] | 69 | self.base_url = base_url |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 70 | self.platform = platform # What's passed in to the '-a/--archive' option. |
| 71 | self.good_revision = good_revision |
| 72 | self.bad_revision = bad_revision |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 73 | self.is_official = is_official |
[email protected] | b3b2051 | 2013-08-26 18:51:04 | [diff] [blame] | 74 | self.is_aura = is_aura |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 75 | |
| 76 | # The name of the ZIP file in a revision directory on the server. |
| 77 | self.archive_name = None |
| 78 | |
| 79 | # Set some internal members: |
| 80 | # _listing_platform_dir = Directory that holds revisions. Ends with a '/'. |
| 81 | # _archive_extract_dir = Uncompressed directory in the archive_name file. |
| 82 | # _binary_name = The name of the executable to run. |
[email protected] | 7aec9e8 | 2013-05-09 05:09:23 | [diff] [blame] | 83 | if self.platform in ('linux', 'linux64', 'linux-arm'): |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 84 | self._binary_name = 'chrome' |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 85 | elif self.platform == 'mac': |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 86 | self.archive_name = 'chrome-mac.zip' |
| 87 | self._archive_extract_dir = 'chrome-mac' |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 88 | elif self.platform == 'win': |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 89 | self.archive_name = 'chrome-win32.zip' |
| 90 | self._archive_extract_dir = 'chrome-win32' |
| 91 | self._binary_name = 'chrome.exe' |
| 92 | else: |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 93 | raise Exception('Invalid platform: %s' % self.platform) |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 94 | |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 95 | if is_official: |
| 96 | if self.platform == 'linux': |
[email protected] | 9639b00 | 2013-08-30 14:45:52 | [diff] [blame] | 97 | self._listing_platform_dir = 'precise32bit/' |
| 98 | self.archive_name = 'chrome-precise32bit.zip' |
| 99 | self._archive_extract_dir = 'chrome-precise32bit' |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 100 | elif self.platform == 'linux64': |
[email protected] | 9639b00 | 2013-08-30 14:45:52 | [diff] [blame] | 101 | self._listing_platform_dir = 'precise64bit/' |
| 102 | self.archive_name = 'chrome-precise64bit.zip' |
| 103 | self._archive_extract_dir = 'chrome-precise64bit' |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 104 | elif self.platform == 'mac': |
| 105 | self._listing_platform_dir = 'mac/' |
| 106 | self._binary_name = 'Google Chrome.app/Contents/MacOS/Google Chrome' |
| 107 | elif self.platform == 'win': |
[email protected] | b3b2051 | 2013-08-26 18:51:04 | [diff] [blame] | 108 | if self.is_aura: |
| 109 | self._listing_platform_dir = 'win-aura/' |
| 110 | else: |
| 111 | self._listing_platform_dir = 'win/' |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 112 | else: |
[email protected] | 7aec9e8 | 2013-05-09 05:09:23 | [diff] [blame] | 113 | if self.platform in ('linux', 'linux64', 'linux-arm'): |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 114 | self.archive_name = 'chrome-linux.zip' |
| 115 | self._archive_extract_dir = 'chrome-linux' |
| 116 | if self.platform == 'linux': |
| 117 | self._listing_platform_dir = 'Linux/' |
| 118 | elif self.platform == 'linux64': |
| 119 | self._listing_platform_dir = 'Linux_x64/' |
[email protected] | 7aec9e8 | 2013-05-09 05:09:23 | [diff] [blame] | 120 | elif self.platform == 'linux-arm': |
| 121 | self._listing_platform_dir = 'Linux_ARM_Cross-Compile/' |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 122 | elif self.platform == 'mac': |
| 123 | self._listing_platform_dir = 'Mac/' |
| 124 | self._binary_name = 'Chromium.app/Contents/MacOS/Chromium' |
| 125 | elif self.platform == 'win': |
| 126 | self._listing_platform_dir = 'Win/' |
| 127 | |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 128 | def GetListingURL(self, marker=None): |
| 129 | """Returns the URL for a directory listing, with an optional marker.""" |
| 130 | marker_param = '' |
| 131 | if marker: |
| 132 | marker_param = '&marker=' + str(marker) |
[email protected] | 4c6fec6b | 2013-09-17 17:44:08 | [diff] [blame] | 133 | return self.base_url + '/?delimiter=/&prefix=' + \ |
| 134 | self._listing_platform_dir + marker_param |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 135 | |
| 136 | def GetDownloadURL(self, revision): |
| 137 | """Gets the download URL for a build archive of a specific revision.""" |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 138 | if self.is_official: |
| 139 | return "%s/%s/%s%s" % ( |
| 140 | OFFICIAL_BASE_URL, revision, self._listing_platform_dir, |
| 141 | self.archive_name) |
| 142 | else: |
[email protected] | 4c6fec6b | 2013-09-17 17:44:08 | [diff] [blame] | 143 | return "%s/%s%s/%s" % (self.base_url, self._listing_platform_dir, |
| 144 | revision, self.archive_name) |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 145 | |
| 146 | def GetLastChangeURL(self): |
| 147 | """Returns a URL to the LAST_CHANGE file.""" |
[email protected] | 4c6fec6b | 2013-09-17 17:44:08 | [diff] [blame] | 148 | return self.base_url + '/' + self._listing_platform_dir + 'LAST_CHANGE' |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 149 | |
| 150 | def GetLaunchPath(self): |
| 151 | """Returns a relative path (presumably from the archive extraction location) |
| 152 | that is used to run the executable.""" |
| 153 | return os.path.join(self._archive_extract_dir, self._binary_name) |
| 154 | |
[email protected] | b3b2051 | 2013-08-26 18:51:04 | [diff] [blame] | 155 | def IsAuraBuild(self, build): |
| 156 | """Check the given build is Aura.""" |
| 157 | return build.split('.')[3] == '1' |
| 158 | |
| 159 | def IsASANBuild(self, build): |
| 160 | """Check the given build is ASAN build.""" |
| 161 | return build.split('.')[3] == '2' |
| 162 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 163 | def ParseDirectoryIndex(self): |
| 164 | """Parses the Google Storage directory listing into a list of revision |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 165 | numbers.""" |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 166 | |
| 167 | def _FetchAndParse(url): |
| 168 | """Fetches a URL and returns a 2-Tuple of ([revisions], next-marker). If |
| 169 | next-marker is not None, then the listing is a partial listing and another |
| 170 | fetch should be performed with next-marker being the marker= GET |
| 171 | parameter.""" |
| 172 | handle = urllib.urlopen(url) |
| 173 | document = ElementTree.parse(handle) |
| 174 | |
| 175 | # All nodes in the tree are namespaced. Get the root's tag name to extract |
| 176 | # the namespace. Etree does namespaces as |{namespace}tag|. |
| 177 | root_tag = document.getroot().tag |
| 178 | end_ns_pos = root_tag.find('}') |
| 179 | if end_ns_pos == -1: |
| 180 | raise Exception("Could not locate end namespace for directory index") |
| 181 | namespace = root_tag[:end_ns_pos + 1] |
| 182 | |
| 183 | # Find the prefix (_listing_platform_dir) and whether or not the list is |
| 184 | # truncated. |
| 185 | prefix_len = len(document.find(namespace + 'Prefix').text) |
| 186 | next_marker = None |
| 187 | is_truncated = document.find(namespace + 'IsTruncated') |
| 188 | if is_truncated is not None and is_truncated.text.lower() == 'true': |
| 189 | next_marker = document.find(namespace + 'NextMarker').text |
| 190 | |
| 191 | # Get a list of all the revisions. |
| 192 | all_prefixes = document.findall(namespace + 'CommonPrefixes/' + |
| 193 | namespace + 'Prefix') |
| 194 | # The <Prefix> nodes have content of the form of |
| 195 | # |_listing_platform_dir/revision/|. Strip off the platform dir and the |
| 196 | # trailing slash to just have a number. |
| 197 | revisions = [] |
| 198 | for prefix in all_prefixes: |
| 199 | revnum = prefix.text[prefix_len:-1] |
| 200 | try: |
| 201 | revnum = int(revnum) |
| 202 | revisions.append(revnum) |
| 203 | except ValueError: |
| 204 | pass |
| 205 | return (revisions, next_marker) |
[email protected] | 9639b00 | 2013-08-30 14:45:52 | [diff] [blame] | 206 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 207 | # Fetch the first list of revisions. |
| 208 | (revisions, next_marker) = _FetchAndParse(self.GetListingURL()) |
| 209 | |
| 210 | # If the result list was truncated, refetch with the next marker. Do this |
| 211 | # until an entire directory listing is done. |
| 212 | while next_marker: |
| 213 | next_url = self.GetListingURL(next_marker) |
| 214 | (new_revisions, next_marker) = _FetchAndParse(next_url) |
| 215 | revisions.extend(new_revisions) |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 216 | return revisions |
| 217 | |
| 218 | def GetRevList(self): |
| 219 | """Gets the list of revision numbers between self.good_revision and |
| 220 | self.bad_revision.""" |
| 221 | # Download the revlist and filter for just the range between good and bad. |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 222 | minrev = min(self.good_revision, self.bad_revision) |
| 223 | maxrev = max(self.good_revision, self.bad_revision) |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 224 | revlist = map(int, self.ParseDirectoryIndex()) |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 225 | revlist = [x for x in revlist if x >= int(minrev) and x <= int(maxrev)] |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 226 | revlist.sort() |
| 227 | return revlist |
| 228 | |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 229 | def GetOfficialBuildsList(self): |
| 230 | """Gets the list of official build numbers between self.good_revision and |
| 231 | self.bad_revision.""" |
| 232 | # Download the revlist and filter for just the range between good and bad. |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 233 | minrev = min(self.good_revision, self.bad_revision) |
| 234 | maxrev = max(self.good_revision, self.bad_revision) |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 235 | handle = urllib.urlopen(OFFICIAL_BASE_URL) |
| 236 | dirindex = handle.read() |
| 237 | handle.close() |
| 238 | build_numbers = re.findall(r'<a href="([0-9][0-9].*)/">', dirindex) |
| 239 | final_list = [] |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 240 | i = 0 |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 241 | parsed_build_numbers = [LooseVersion(x) for x in build_numbers] |
| 242 | for build_number in sorted(parsed_build_numbers): |
| 243 | path = OFFICIAL_BASE_URL + '/' + str(build_number) + '/' + \ |
| 244 | self._listing_platform_dir + self.archive_name |
| 245 | i = i + 1 |
| 246 | try: |
| 247 | connection = urllib.urlopen(path) |
| 248 | connection.close() |
[email protected] | 801fb65 | 2012-07-20 20:13:50 | [diff] [blame] | 249 | if build_number > maxrev: |
| 250 | break |
| 251 | if build_number >= minrev: |
[email protected] | b3b2051 | 2013-08-26 18:51:04 | [diff] [blame] | 252 | # If we are bisecting Aura, we want to include only builds which |
| 253 | # ends with ".1". |
| 254 | if self.is_aura: |
| 255 | if self.IsAuraBuild(str(build_number)): |
| 256 | final_list.append(str(build_number)) |
| 257 | # If we are bisecting only official builds (without --aura), |
| 258 | # we can not include builds which ends with '.1' or '.2' since |
| 259 | # they have different folder hierarchy inside. |
| 260 | elif (not self.IsAuraBuild(str(build_number)) and |
| 261 | not self.IsASANBuild(str(build_number))): |
| 262 | final_list.append(str(build_number)) |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 263 | except urllib.HTTPError, e: |
| 264 | pass |
[email protected] | 801fb65 | 2012-07-20 20:13:50 | [diff] [blame] | 265 | return final_list |
[email protected] | bd8dcb9 | 2010-03-31 01:05:24 | [diff] [blame] | 266 | |
| 267 | def UnzipFilenameToDir(filename, dir): |
| 268 | """Unzip |filename| to directory |dir|.""" |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 269 | cwd = os.getcwd() |
| 270 | if not os.path.isabs(filename): |
| 271 | filename = os.path.join(cwd, filename) |
[email protected] | bd8dcb9 | 2010-03-31 01:05:24 | [diff] [blame] | 272 | zf = zipfile.ZipFile(filename) |
| 273 | # Make base. |
[email protected] | e29c08c | 2012-09-17 20:50:50 | [diff] [blame] | 274 | if not os.path.isdir(dir): |
| 275 | os.mkdir(dir) |
| 276 | os.chdir(dir) |
| 277 | # Extract files. |
| 278 | for info in zf.infolist(): |
| 279 | name = info.filename |
| 280 | if name.endswith('/'): # dir |
| 281 | if not os.path.isdir(name): |
| 282 | os.makedirs(name) |
| 283 | else: # file |
| 284 | dir = os.path.dirname(name) |
| 285 | if not os.path.isdir(dir): |
| 286 | os.makedirs(dir) |
| 287 | out = open(name, 'wb') |
| 288 | out.write(zf.read(name)) |
| 289 | out.close() |
| 290 | # Set permissions. Permission info in external_attr is shifted 16 bits. |
| 291 | os.chmod(name, info.external_attr >> 16L) |
| 292 | os.chdir(cwd) |
[email protected] | bd8dcb9 | 2010-03-31 01:05:24 | [diff] [blame] | 293 | |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 294 | |
[email protected] | 468a977 | 2011-08-09 18:42:00 | [diff] [blame] | 295 | def FetchRevision(context, rev, filename, quit_event=None, progress_event=None): |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 296 | """Downloads and unzips revision |rev|. |
| 297 | @param context A PathContext instance. |
| 298 | @param rev The Chromium revision number/tag to download. |
| 299 | @param filename The destination for the downloaded file. |
| 300 | @param quit_event A threading.Event which will be set by the master thread to |
| 301 | indicate that the download should be aborted. |
[email protected] | 468a977 | 2011-08-09 18:42:00 | [diff] [blame] | 302 | @param progress_event A threading.Event which will be set by the master thread |
| 303 | to indicate that the progress of the download should be |
| 304 | displayed. |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 305 | """ |
| 306 | def ReportHook(blocknum, blocksize, totalsize): |
[email protected] | 946be75 | 2011-10-25 23:34:21 | [diff] [blame] | 307 | if quit_event and quit_event.isSet(): |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 308 | raise RuntimeError("Aborting download of revision %s" % str(rev)) |
[email protected] | 946be75 | 2011-10-25 23:34:21 | [diff] [blame] | 309 | if progress_event and progress_event.isSet(): |
[email protected] | 468a977 | 2011-08-09 18:42:00 | [diff] [blame] | 310 | size = blocknum * blocksize |
| 311 | if totalsize == -1: # Total size not known. |
| 312 | progress = "Received %d bytes" % size |
| 313 | else: |
| 314 | size = min(totalsize, size) |
| 315 | progress = "Received %d of %d bytes, %.2f%%" % ( |
| 316 | size, totalsize, 100.0 * size / totalsize) |
| 317 | # Send a \r to let all progress messages use just one line of output. |
| 318 | sys.stdout.write("\r" + progress) |
| 319 | sys.stdout.flush() |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 320 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 321 | download_url = context.GetDownloadURL(rev) |
| 322 | try: |
| 323 | urllib.urlretrieve(download_url, filename, ReportHook) |
[email protected] | 946be75 | 2011-10-25 23:34:21 | [diff] [blame] | 324 | if progress_event and progress_event.isSet(): |
[email protected] | ecaba01e6 | 2011-10-26 05:33:28 | [diff] [blame] | 325 | print |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 326 | except RuntimeError, e: |
| 327 | pass |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 328 | |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 329 | |
[email protected] | 4646a75 | 2013-07-19 22:14:34 | [diff] [blame] | 330 | def RunRevision(context, revision, zipfile, profile, num_runs, command, args): |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 331 | """Given a zipped revision, unzip it and run the test.""" |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 332 | print "Trying revision %s..." % str(revision) |
[email protected] | 3ff00b7 | 2011-07-20 21:34:47 | [diff] [blame] | 333 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 334 | # Create a temp directory and unzip the revision into it. |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 335 | cwd = os.getcwd() |
| 336 | tempdir = tempfile.mkdtemp(prefix='bisect_tmp') |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 337 | UnzipFilenameToDir(zipfile, tempdir) |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 338 | os.chdir(tempdir) |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 339 | |
[email protected] | 5e93cf16 | 2012-01-28 02:16:56 | [diff] [blame] | 340 | # Run the build as many times as specified. |
[email protected] | 4646a75 | 2013-07-19 22:14:34 | [diff] [blame] | 341 | testargs = ['--user-data-dir=%s' % profile] + args |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 342 | # The sandbox must be run as root on Official Chrome, so bypass it. |
[email protected] | 7aec9e8 | 2013-05-09 05:09:23 | [diff] [blame] | 343 | if context.is_official and context.platform.startswith('linux'): |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 344 | testargs.append('--no-sandbox') |
| 345 | |
[email protected] | 4646a75 | 2013-07-19 22:14:34 | [diff] [blame] | 346 | runcommand = [] |
| 347 | for token in command.split(): |
| 348 | if token == "%a": |
| 349 | runcommand.extend(testargs) |
| 350 | else: |
| 351 | runcommand.append( \ |
| 352 | token.replace('%p', context.GetLaunchPath()) \ |
| 353 | .replace('%s', ' '.join(testargs))) |
| 354 | |
[email protected] | 5e93cf16 | 2012-01-28 02:16:56 | [diff] [blame] | 355 | for i in range(0, num_runs): |
[email protected] | 4646a75 | 2013-07-19 22:14:34 | [diff] [blame] | 356 | subproc = subprocess.Popen(runcommand, |
[email protected] | 5e93cf16 | 2012-01-28 02:16:56 | [diff] [blame] | 357 | bufsize=-1, |
| 358 | stdout=subprocess.PIPE, |
| 359 | stderr=subprocess.PIPE) |
| 360 | (stdout, stderr) = subproc.communicate() |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 361 | |
| 362 | os.chdir(cwd) |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 363 | try: |
| 364 | shutil.rmtree(tempdir, True) |
| 365 | except Exception, e: |
| 366 | pass |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 367 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 368 | return (subproc.returncode, stdout, stderr) |
[email protected] | 79f1474 | 2010-03-10 01:01:57 | [diff] [blame] | 369 | |
[email protected] | cb155a8 | 2011-11-29 17:25:34 | [diff] [blame] | 370 | |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 371 | def AskIsGoodBuild(rev, official_builds, status, stdout, stderr): |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 372 | """Ask the user whether build |rev| is good or bad.""" |
[email protected] | 79f1474 | 2010-03-10 01:01:57 | [diff] [blame] | 373 | # Loop until we get a response that we can parse. |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 374 | while True: |
[email protected] | 1d4a0624 | 2013-08-20 22:53:12 | [diff] [blame] | 375 | response = raw_input('Revision %s is ' \ |
| 376 | '[(g)ood/(b)ad/(r)etry/(u)nknown/(q)uit]: ' % |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 377 | str(rev)) |
[email protected] | 1d4a0624 | 2013-08-20 22:53:12 | [diff] [blame] | 378 | if response and response in ('g', 'b', 'r', 'u'): |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 379 | return response |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 380 | if response and response == 'q': |
| 381 | raise SystemExit() |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 382 | |
[email protected] | cb155a8 | 2011-11-29 17:25:34 | [diff] [blame] | 383 | |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 384 | class DownloadJob(object): |
| 385 | """DownloadJob represents a task to download a given Chromium revision.""" |
| 386 | def __init__(self, context, name, rev, zipfile): |
| 387 | super(DownloadJob, self).__init__() |
| 388 | # Store off the input parameters. |
| 389 | self.context = context |
| 390 | self.name = name |
| 391 | self.rev = rev |
| 392 | self.zipfile = zipfile |
| 393 | self.quit_event = threading.Event() |
| 394 | self.progress_event = threading.Event() |
| 395 | |
| 396 | def Start(self): |
| 397 | """Starts the download.""" |
| 398 | fetchargs = (self.context, |
| 399 | self.rev, |
| 400 | self.zipfile, |
| 401 | self.quit_event, |
| 402 | self.progress_event) |
| 403 | self.thread = threading.Thread(target=FetchRevision, |
| 404 | name=self.name, |
| 405 | args=fetchargs) |
| 406 | self.thread.start() |
| 407 | |
| 408 | def Stop(self): |
| 409 | """Stops the download which must have been started previously.""" |
| 410 | self.quit_event.set() |
| 411 | self.thread.join() |
| 412 | os.unlink(self.zipfile) |
| 413 | |
| 414 | def WaitFor(self): |
| 415 | """Prints a message and waits for the download to complete. The download |
| 416 | must have been started previously.""" |
| 417 | print "Downloading revision %s..." % str(self.rev) |
| 418 | self.progress_event.set() # Display progress of download. |
| 419 | self.thread.join() |
| 420 | |
| 421 | |
[email protected] | 4c6fec6b | 2013-09-17 17:44:08 | [diff] [blame] | 422 | def Bisect(base_url, |
| 423 | platform, |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 424 | official_builds, |
[email protected] | b3b2051 | 2013-08-26 18:51:04 | [diff] [blame] | 425 | is_aura, |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 426 | good_rev=0, |
| 427 | bad_rev=0, |
[email protected] | 5e93cf16 | 2012-01-28 02:16:56 | [diff] [blame] | 428 | num_runs=1, |
[email protected] | 4646a75 | 2013-07-19 22:14:34 | [diff] [blame] | 429 | command="%p %a", |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 430 | try_args=(), |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 431 | profile=None, |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 432 | evaluate=AskIsGoodBuild): |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 433 | """Given known good and known bad revisions, run a binary search on all |
| 434 | archived revisions to determine the last known good revision. |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 435 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 436 | @param platform Which build to download/run ('mac', 'win', 'linux64', etc.). |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 437 | @param official_builds Specify build type (Chromium or Official build). |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 438 | @param good_rev Number/tag of the known good revision. |
| 439 | @param bad_rev Number/tag of the known bad revision. |
[email protected] | 5e93cf16 | 2012-01-28 02:16:56 | [diff] [blame] | 440 | @param num_runs Number of times to run each build for asking good/bad. |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 441 | @param try_args A tuple of arguments to pass to the test application. |
| 442 | @param profile The name of the user profile to run with. |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 443 | @param evaluate A function which returns 'g' if the argument build is good, |
| 444 | 'b' if it's bad or 'u' if unknown. |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 445 | |
| 446 | Threading is used to fetch Chromium revisions in the background, speeding up |
| 447 | the user's experience. For example, suppose the bounds of the search are |
| 448 | good_rev=0, bad_rev=100. The first revision to be checked is 50. Depending on |
| 449 | whether revision 50 is good or bad, the next revision to check will be either |
| 450 | 25 or 75. So, while revision 50 is being checked, the script will download |
| 451 | revisions 25 and 75 in the background. Once the good/bad verdict on rev 50 is |
| 452 | known: |
| 453 | |
| 454 | - If rev 50 is good, the download of rev 25 is cancelled, and the next test |
| 455 | is run on rev 75. |
| 456 | |
| 457 | - If rev 50 is bad, the download of rev 75 is cancelled, and the next test |
| 458 | is run on rev 25. |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 459 | """ |
| 460 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 461 | if not profile: |
| 462 | profile = 'profile' |
| 463 | |
[email protected] | 4c6fec6b | 2013-09-17 17:44:08 | [diff] [blame] | 464 | context = PathContext(base_url, platform, good_rev, bad_rev, |
| 465 | official_builds, is_aura) |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 466 | cwd = os.getcwd() |
| 467 | |
[email protected] | 468a977 | 2011-08-09 18:42:00 | [diff] [blame] | 468 | print "Downloading list of known revisions..." |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 469 | _GetDownloadPath = lambda rev: os.path.join(cwd, |
| 470 | '%s-%s' % (str(rev), context.archive_name)) |
| 471 | if official_builds: |
| 472 | revlist = context.GetOfficialBuildsList() |
| 473 | else: |
| 474 | revlist = context.GetRevList() |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 475 | |
| 476 | # Get a list of revisions to bisect across. |
| 477 | if len(revlist) < 2: # Don't have enough builds to bisect. |
| 478 | msg = 'We don\'t have enough builds to bisect. revlist: %s' % revlist |
| 479 | raise RuntimeError(msg) |
| 480 | |
| 481 | # Figure out our bookends and first pivot point; fetch the pivot revision. |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 482 | minrev = 0 |
| 483 | maxrev = len(revlist) - 1 |
| 484 | pivot = maxrev / 2 |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 485 | rev = revlist[pivot] |
| 486 | zipfile = _GetDownloadPath(rev) |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 487 | fetch = DownloadJob(context, 'initial_fetch', rev, zipfile) |
| 488 | fetch.Start() |
| 489 | fetch.WaitFor() |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 490 | |
| 491 | # Binary search time! |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 492 | while fetch and fetch.zipfile and maxrev - minrev > 1: |
| 493 | if bad_rev < good_rev: |
| 494 | min_str, max_str = "bad", "good" |
| 495 | else: |
| 496 | min_str, max_str = "good", "bad" |
| 497 | print 'Bisecting range [%s (%s), %s (%s)].' % (revlist[minrev], min_str, \ |
| 498 | revlist[maxrev], max_str) |
| 499 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 500 | # Pre-fetch next two possible pivots |
| 501 | # - down_pivot is the next revision to check if the current revision turns |
| 502 | # out to be bad. |
| 503 | # - up_pivot is the next revision to check if the current revision turns |
| 504 | # out to be good. |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 505 | down_pivot = int((pivot - minrev) / 2) + minrev |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 506 | down_fetch = None |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 507 | if down_pivot != pivot and down_pivot != minrev: |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 508 | down_rev = revlist[down_pivot] |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 509 | down_fetch = DownloadJob(context, 'down_fetch', down_rev, |
| 510 | _GetDownloadPath(down_rev)) |
| 511 | down_fetch.Start() |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 512 | |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 513 | up_pivot = int((maxrev - pivot) / 2) + pivot |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 514 | up_fetch = None |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 515 | if up_pivot != pivot and up_pivot != maxrev: |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 516 | up_rev = revlist[up_pivot] |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 517 | up_fetch = DownloadJob(context, 'up_fetch', up_rev, |
| 518 | _GetDownloadPath(up_rev)) |
| 519 | up_fetch.Start() |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 520 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 521 | # Run test on the pivot revision. |
[email protected] | e29c08c | 2012-09-17 20:50:50 | [diff] [blame] | 522 | status = None |
| 523 | stdout = None |
| 524 | stderr = None |
| 525 | try: |
| 526 | (status, stdout, stderr) = RunRevision(context, |
| 527 | rev, |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 528 | fetch.zipfile, |
[email protected] | e29c08c | 2012-09-17 20:50:50 | [diff] [blame] | 529 | profile, |
| 530 | num_runs, |
[email protected] | 4646a75 | 2013-07-19 22:14:34 | [diff] [blame] | 531 | command, |
[email protected] | e29c08c | 2012-09-17 20:50:50 | [diff] [blame] | 532 | try_args) |
| 533 | except Exception, e: |
| 534 | print >>sys.stderr, e |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 535 | |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 536 | # Call the evaluate function to see if the current revision is good or bad. |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 537 | # On that basis, kill one of the background downloads and complete the |
| 538 | # other, as described in the comments above. |
| 539 | try: |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 540 | answer = evaluate(rev, official_builds, status, stdout, stderr) |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 541 | if answer == 'g' and good_rev < bad_rev or \ |
| 542 | answer == 'b' and bad_rev < good_rev: |
[email protected] | 1d4a0624 | 2013-08-20 22:53:12 | [diff] [blame] | 543 | fetch.Stop() |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 544 | minrev = pivot |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 545 | if down_fetch: |
| 546 | down_fetch.Stop() # Kill the download of the older revision. |
[email protected] | 1d4a0624 | 2013-08-20 22:53:12 | [diff] [blame] | 547 | fetch = None |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 548 | if up_fetch: |
| 549 | up_fetch.WaitFor() |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 550 | pivot = up_pivot |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 551 | fetch = up_fetch |
| 552 | elif answer == 'b' and good_rev < bad_rev or \ |
| 553 | answer == 'g' and bad_rev < good_rev: |
[email protected] | 1d4a0624 | 2013-08-20 22:53:12 | [diff] [blame] | 554 | fetch.Stop() |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 555 | maxrev = pivot |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 556 | if up_fetch: |
| 557 | up_fetch.Stop() # Kill the download of the newer revision. |
[email protected] | 1d4a0624 | 2013-08-20 22:53:12 | [diff] [blame] | 558 | fetch = None |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 559 | if down_fetch: |
| 560 | down_fetch.WaitFor() |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 561 | pivot = down_pivot |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 562 | fetch = down_fetch |
[email protected] | 1d4a0624 | 2013-08-20 22:53:12 | [diff] [blame] | 563 | elif answer == 'r': |
| 564 | pass # Retry requires no changes. |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 565 | elif answer == 'u': |
| 566 | # Nuke the revision from the revlist and choose a new pivot. |
[email protected] | 1d4a0624 | 2013-08-20 22:53:12 | [diff] [blame] | 567 | fetch.Stop() |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 568 | revlist.pop(pivot) |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 569 | maxrev -= 1 # Assumes maxrev >= pivot. |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 570 | |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 571 | if maxrev - minrev > 1: |
[email protected] | 53bb634 | 2012-06-01 04:11:00 | [diff] [blame] | 572 | # Alternate between using down_pivot or up_pivot for the new pivot |
| 573 | # point, without affecting the range. Do this instead of setting the |
| 574 | # pivot to the midpoint of the new range because adjacent revisions |
| 575 | # are likely affected by the same issue that caused the (u)nknown |
| 576 | # response. |
| 577 | if up_fetch and down_fetch: |
| 578 | fetch = [up_fetch, down_fetch][len(revlist) % 2] |
| 579 | elif up_fetch: |
| 580 | fetch = up_fetch |
| 581 | else: |
| 582 | fetch = down_fetch |
| 583 | fetch.WaitFor() |
| 584 | if fetch == up_fetch: |
| 585 | pivot = up_pivot - 1 # Subtracts 1 because revlist was resized. |
| 586 | else: |
| 587 | pivot = down_pivot |
| 588 | zipfile = fetch.zipfile |
| 589 | |
| 590 | if down_fetch and fetch != down_fetch: |
| 591 | down_fetch.Stop() |
| 592 | if up_fetch and fetch != up_fetch: |
| 593 | up_fetch.Stop() |
| 594 | else: |
| 595 | assert False, "Unexpected return value from evaluate(): " + answer |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 596 | except SystemExit: |
[email protected] | 468a977 | 2011-08-09 18:42:00 | [diff] [blame] | 597 | print "Cleaning up..." |
[email protected] | 5e93cf16 | 2012-01-28 02:16:56 | [diff] [blame] | 598 | for f in [_GetDownloadPath(revlist[down_pivot]), |
| 599 | _GetDownloadPath(revlist[up_pivot])]: |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 600 | try: |
| 601 | os.unlink(f) |
| 602 | except OSError: |
| 603 | pass |
| 604 | sys.exit(0) |
| 605 | |
| 606 | rev = revlist[pivot] |
| 607 | |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 608 | return (revlist[minrev], revlist[maxrev]) |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 609 | |
| 610 | |
[email protected] | 4c6fec6b | 2013-09-17 17:44:08 | [diff] [blame] | 611 | def GetBlinkRevisionForChromiumRevision(self, rev): |
| 612 | """Returns the blink revision that was in REVISIONS file at |
[email protected] | b2fe7f2 | 2011-10-25 22:58:31 | [diff] [blame] | 613 | chromium revision |rev|.""" |
| 614 | # . doesn't match newlines without re.DOTALL, so this is safe. |
[email protected] | 4c6fec6b | 2013-09-17 17:44:08 | [diff] [blame] | 615 | file_url = "%s/%s%d/REVISIONS" % (self.base_url, |
| 616 | self._listing_platform_dir, rev) |
| 617 | url = urllib.urlopen(file_url) |
| 618 | data = json.loads(url.read()) |
[email protected] | b2fe7f2 | 2011-10-25 22:58:31 | [diff] [blame] | 619 | url.close() |
[email protected] | 4c6fec6b | 2013-09-17 17:44:08 | [diff] [blame] | 620 | if 'webkit_revision' in data: |
| 621 | return data['webkit_revision'] |
[email protected] | b2fe7f2 | 2011-10-25 22:58:31 | [diff] [blame] | 622 | else: |
[email protected] | ff50d1c | 2013-04-17 18:49:36 | [diff] [blame] | 623 | raise Exception('Could not get blink revision for cr rev %d' % rev) |
[email protected] | b2fe7f2 | 2011-10-25 22:58:31 | [diff] [blame] | 624 | |
| 625 | |
[email protected] | 801fb65 | 2012-07-20 20:13:50 | [diff] [blame] | 626 | def GetChromiumRevision(url): |
| 627 | """Returns the chromium revision read from given URL.""" |
| 628 | try: |
| 629 | # Location of the latest build revision number |
| 630 | return int(urllib.urlopen(url).read()) |
| 631 | except Exception, e: |
| 632 | print('Could not determine latest revision. This could be bad...') |
| 633 | return 999999999 |
| 634 | |
| 635 | |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 636 | def main(): |
[email protected] | 2c1d273 | 2009-10-29 19:52:17 | [diff] [blame] | 637 | usage = ('%prog [options] [-- chromium-options]\n' |
[email protected] | 887c918 | 2013-02-12 20:30:31 | [diff] [blame] | 638 | 'Perform binary search on the snapshot builds to find a minimal\n' |
| 639 | 'range of revisions where a behavior change happened. The\n' |
| 640 | 'behaviors are described as "good" and "bad".\n' |
| 641 | 'It is NOT assumed that the behavior of the later revision is\n' |
[email protected] | 09c58da | 2013-01-07 21:30:17 | [diff] [blame] | 642 | 'the bad one.\n' |
[email protected] | 178aab7 | 2010-10-08 17:21:38 | [diff] [blame] | 643 | '\n' |
[email protected] | 887c918 | 2013-02-12 20:30:31 | [diff] [blame] | 644 | 'Revision numbers should use\n' |
| 645 | ' Official versions (e.g. 1.0.1000.0) for official builds. (-o)\n' |
| 646 | ' SVN revisions (e.g. 123456) for chromium builds, from trunk.\n' |
| 647 | ' Use base_trunk_revision from http://omahaproxy.appspot.com/\n' |
| 648 | ' for earlier revs.\n' |
| 649 | ' Chrome\'s about: build number and omahaproxy branch_revision\n' |
| 650 | ' are incorrect, they are from branches.\n' |
| 651 | '\n' |
[email protected] | 178aab7 | 2010-10-08 17:21:38 | [diff] [blame] | 652 | 'Tip: add "-- --no-first-run" to bypass the first run prompts.') |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 653 | parser = optparse.OptionParser(usage=usage) |
[email protected] | 1a45d22 | 2009-09-19 01:58:57 | [diff] [blame] | 654 | # Strangely, the default help output doesn't include the choice list. |
[email protected] | 7aec9e8 | 2013-05-09 05:09:23 | [diff] [blame] | 655 | choices = ['mac', 'win', 'linux', 'linux64', 'linux-arm'] |
[email protected] | 4082b18 | 2011-05-02 20:30:17 | [diff] [blame] | 656 | # linux-chromiumos lacks a continuous archive http://crbug.com/78158 |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 657 | parser.add_option('-a', '--archive', |
[email protected] | 1a45d22 | 2009-09-19 01:58:57 | [diff] [blame] | 658 | choices = choices, |
| 659 | help = 'The buildbot archive to bisect [%s].' % |
| 660 | '|'.join(choices)) |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 661 | parser.add_option('-o', action="store_true", dest='official_builds', |
| 662 | help = 'Bisect across official ' + |
| 663 | 'Chrome builds (internal only) instead of ' + |
| 664 | 'Chromium archives.') |
| 665 | parser.add_option('-b', '--bad', type = 'str', |
[email protected] | 09c58da | 2013-01-07 21:30:17 | [diff] [blame] | 666 | help = 'A bad revision to start bisection. ' + |
| 667 | 'May be earlier or later than the good revision. ' + |
| 668 | 'Default is HEAD.') |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 669 | parser.add_option('-g', '--good', type = 'str', |
[email protected] | 09c58da | 2013-01-07 21:30:17 | [diff] [blame] | 670 | help = 'A good revision to start bisection. ' + |
| 671 | 'May be earlier or later than the bad revision. ' + |
[email protected] | 801fb65 | 2012-07-20 20:13:50 | [diff] [blame] | 672 | 'Default is 0.') |
[email protected] | d4bf358 | 2009-09-20 00:56:38 | [diff] [blame] | 673 | parser.add_option('-p', '--profile', '--user-data-dir', type = 'str', |
| 674 | help = 'Profile to use; this will not reset every run. ' + |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 675 | 'Defaults to a clean profile.', default = 'profile') |
[email protected] | 5e93cf16 | 2012-01-28 02:16:56 | [diff] [blame] | 676 | parser.add_option('-t', '--times', type = 'int', |
| 677 | help = 'Number of times to run each build before asking ' + |
| 678 | 'if it\'s good or bad. Temporary profiles are reused.', |
| 679 | default = 1) |
[email protected] | 4646a75 | 2013-07-19 22:14:34 | [diff] [blame] | 680 | parser.add_option('-c', '--command', type = 'str', |
| 681 | help = 'Command to execute. %p and %a refer to Chrome ' + |
| 682 | 'executable and specified extra arguments respectively. ' + |
| 683 | 'Use %s to specify all extra arguments as one string. ' + |
| 684 | 'Defaults to "%p %a". Note that any extra paths ' + |
| 685 | 'specified should be absolute.', |
| 686 | default = '%p %a'); |
[email protected] | 4c6fec6b | 2013-09-17 17:44:08 | [diff] [blame] | 687 | parser.add_option('-l', '--blink', action='store_true', |
| 688 | help = 'Use Blink bisect instead of Chromium. ') |
[email protected] | b3b2051 | 2013-08-26 18:51:04 | [diff] [blame] | 689 | parser.add_option('--aura', |
| 690 | dest='aura', |
| 691 | action='store_true', |
| 692 | default=False, |
| 693 | help='Allow the script to bisect aura builds') |
| 694 | |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 695 | (opts, args) = parser.parse_args() |
| 696 | |
| 697 | if opts.archive is None: |
[email protected] | 178aab7 | 2010-10-08 17:21:38 | [diff] [blame] | 698 | print 'Error: missing required parameter: --archive' |
| 699 | print |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 700 | parser.print_help() |
| 701 | return 1 |
| 702 | |
[email protected] | b3b2051 | 2013-08-26 18:51:04 | [diff] [blame] | 703 | if opts.aura: |
| 704 | if opts.archive != 'win' or not opts.official_builds: |
| 705 | print 'Error: Aura is supported only on Windows platform '\ |
| 706 | 'and official builds.' |
| 707 | return 1 |
| 708 | |
[email protected] | 4c6fec6b | 2013-09-17 17:44:08 | [diff] [blame] | 709 | if opts.blink: |
| 710 | base_url = WEBKIT_BASE_URL |
| 711 | else: |
| 712 | base_url = CHROMIUM_BASE_URL |
| 713 | |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 714 | # Create the context. Initialize 0 for the revisions as they are set below. |
[email protected] | 4c6fec6b | 2013-09-17 17:44:08 | [diff] [blame] | 715 | context = PathContext(base_url, opts.archive, 0, 0, |
| 716 | opts.official_builds, opts.aura) |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 717 | # Pick a starting point, try to get HEAD for this. |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 718 | if opts.bad: |
| 719 | bad_rev = opts.bad |
| 720 | else: |
[email protected] | 801fb65 | 2012-07-20 20:13:50 | [diff] [blame] | 721 | bad_rev = '999.0.0.0' |
| 722 | if not opts.official_builds: |
| 723 | bad_rev = GetChromiumRevision(context.GetLastChangeURL()) |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 724 | |
| 725 | # Find out when we were good. |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 726 | if opts.good: |
| 727 | good_rev = opts.good |
| 728 | else: |
[email protected] | 801fb65 | 2012-07-20 20:13:50 | [diff] [blame] | 729 | good_rev = '0.0.0.0' if opts.official_builds else 0 |
| 730 | |
| 731 | if opts.official_builds: |
| 732 | good_rev = LooseVersion(good_rev) |
| 733 | bad_rev = LooseVersion(bad_rev) |
| 734 | else: |
| 735 | good_rev = int(good_rev) |
| 736 | bad_rev = int(bad_rev) |
| 737 | |
[email protected] | 5e93cf16 | 2012-01-28 02:16:56 | [diff] [blame] | 738 | if opts.times < 1: |
| 739 | print('Number of times to run (%d) must be greater than or equal to 1.' % |
| 740 | opts.times) |
| 741 | parser.print_help() |
| 742 | return 1 |
| 743 | |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 744 | (min_chromium_rev, max_chromium_rev) = Bisect( |
[email protected] | 4c6fec6b | 2013-09-17 17:44:08 | [diff] [blame] | 745 | base_url, opts.archive, opts.official_builds, opts.aura, good_rev, |
| 746 | bad_rev, opts.times, opts.command, args, opts.profile) |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 747 | |
[email protected] | ff50d1c | 2013-04-17 18:49:36 | [diff] [blame] | 748 | # Get corresponding blink revisions. |
[email protected] | b2fe7f2 | 2011-10-25 22:58:31 | [diff] [blame] | 749 | try: |
[email protected] | 4c6fec6b | 2013-09-17 17:44:08 | [diff] [blame] | 750 | min_blink_rev = GetBlinkRevisionForChromiumRevision(context, |
| 751 | min_chromium_rev) |
| 752 | max_blink_rev = GetBlinkRevisionForChromiumRevision(context, |
| 753 | max_chromium_rev) |
[email protected] | b2fe7f2 | 2011-10-25 22:58:31 | [diff] [blame] | 754 | except Exception, e: |
| 755 | # Silently ignore the failure. |
[email protected] | ff50d1c | 2013-04-17 18:49:36 | [diff] [blame] | 756 | min_blink_rev, max_blink_rev = 0, 0 |
[email protected] | b2fe7f2 | 2011-10-25 22:58:31 | [diff] [blame] | 757 | |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 758 | # We're done. Let the user know the results in an official manner. |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 759 | if good_rev > bad_rev: |
| 760 | print DONE_MESSAGE_GOOD_MAX % (str(min_chromium_rev), str(max_chromium_rev)) |
| 761 | else: |
| 762 | print DONE_MESSAGE_GOOD_MIN % (str(min_chromium_rev), str(max_chromium_rev)) |
| 763 | |
[email protected] | ff50d1c | 2013-04-17 18:49:36 | [diff] [blame] | 764 | if min_blink_rev != max_blink_rev: |
| 765 | print 'BLINK CHANGELOG URL:' |
| 766 | print ' ' + BLINK_CHANGELOG_URL % (max_blink_rev, min_blink_rev) |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 767 | print 'CHANGELOG URL:' |
| 768 | if opts.official_builds: |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 769 | print OFFICIAL_CHANGELOG_URL % (min_chromium_rev, max_chromium_rev) |
[email protected] | d0149c5c | 2012-05-29 21:12:11 | [diff] [blame] | 770 | else: |
[email protected] | eadd95d | 2012-11-02 22:42:09 | [diff] [blame] | 771 | print ' ' + CHANGELOG_URL % (min_chromium_rev, max_chromium_rev) |
[email protected] | cb155a8 | 2011-11-29 17:25:34 | [diff] [blame] | 772 | |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 773 | if __name__ == '__main__': |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 774 | sys.exit(main()) |