[email protected] | 178aab7 | 2010-10-08 17:21:38 | [diff] [blame] | 1 | #!/usr/bin/python |
[email protected] | 4082b18 | 2011-05-02 20:30:17 | [diff] [blame] | 2 | # Copyright (c) 2011 The Chromium Authors. All rights reserved. |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 3 | # Use of this source code is governed by a BSD-style license that can be |
| 4 | # found in the LICENSE file. |
| 5 | |
| 6 | """Snapshot Build Bisect Tool |
| 7 | |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 8 | This script bisects a snapshot archive using binary search. It starts at |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 9 | a bad revision (it will try to guess HEAD) and asks for a last known-good |
| 10 | revision. It will then binary search across this revision range by downloading, |
| 11 | unzipping, and opening Chromium for you. After testing the specific revision, |
| 12 | it will ask you whether it is good or bad before continuing the search. |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 13 | """ |
| 14 | |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 15 | # The root URL for storage. |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 16 | BASE_URL = 'http://commondatastorage.googleapis.com/chromium-browser-snapshots' |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 17 | |
| 18 | # URL to the ViewVC commit page. |
[email protected] | bd8dcb9 | 2010-03-31 01:05:24 | [diff] [blame] | 19 | BUILD_VIEWVC_URL = 'http://src.chromium.org/viewvc/chrome?view=rev&revision=%d' |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 20 | |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 21 | # Changelogs URL. |
[email protected] | 0724746 | 2010-12-24 07:45:56 | [diff] [blame] | 22 | CHANGELOG_URL = 'http://build.chromium.org/f/chromium/' \ |
[email protected] | bd8dcb9 | 2010-03-31 01:05:24 | [diff] [blame] | 23 | 'perf/dashboard/ui/changelog.html?url=/trunk/src&range=%d:%d' |
[email protected] | f6a71a7 | 2009-10-08 19:55:38 | [diff] [blame] | 24 | |
[email protected] | b2fe7f2 | 2011-10-25 22:58:31 | [diff] [blame] | 25 | # DEPS file URL. |
| 26 | DEPS_FILE= 'http://src.chromium.org/viewvc/chrome/trunk/src/DEPS?revision=%d' |
| 27 | |
| 28 | # WebKit Changelogs URL. |
| 29 | WEBKIT_CHANGELOG_URL = 'http://trac.webkit.org/log/' \ |
| 30 | 'trunk/?rev=%d&stop_rev=%d&verbose=on' |
| 31 | |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 32 | ############################################################################### |
| 33 | |
| 34 | import math |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 35 | import optparse |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 36 | import os |
[email protected] | d4bf358 | 2009-09-20 00:56:38 | [diff] [blame] | 37 | import pipes |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 38 | import re |
| 39 | import shutil |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 40 | import subprocess |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 41 | import sys |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 42 | import tempfile |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 43 | import threading |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 44 | import urllib |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 45 | from xml.etree import ElementTree |
[email protected] | bd8dcb9 | 2010-03-31 01:05:24 | [diff] [blame] | 46 | import zipfile |
| 47 | |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 48 | class PathContext(object): |
| 49 | """A PathContext is used to carry the information used to construct URLs and |
| 50 | paths when dealing with the storage server and archives.""" |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 51 | def __init__(self, platform, good_revision, bad_revision): |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 52 | super(PathContext, self).__init__() |
| 53 | # Store off the input parameters. |
| 54 | self.platform = platform # What's passed in to the '-a/--archive' option. |
| 55 | self.good_revision = good_revision |
| 56 | self.bad_revision = bad_revision |
| 57 | |
| 58 | # The name of the ZIP file in a revision directory on the server. |
| 59 | self.archive_name = None |
| 60 | |
| 61 | # Set some internal members: |
| 62 | # _listing_platform_dir = Directory that holds revisions. Ends with a '/'. |
| 63 | # _archive_extract_dir = Uncompressed directory in the archive_name file. |
| 64 | # _binary_name = The name of the executable to run. |
[email protected] | 1960edd | 2011-07-01 16:53:52 | [diff] [blame] | 65 | if self.platform == 'linux' or self.platform == 'linux64': |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 66 | self._listing_platform_dir = 'Linux/' |
| 67 | self.archive_name = 'chrome-linux.zip' |
| 68 | self._archive_extract_dir = 'chrome-linux' |
| 69 | self._binary_name = 'chrome' |
| 70 | # Linux and x64 share all the same path data except for the archive dir. |
[email protected] | 1960edd | 2011-07-01 16:53:52 | [diff] [blame] | 71 | if self.platform == 'linux64': |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 72 | self._listing_platform_dir = 'Linux_x64/' |
| 73 | elif self.platform == 'mac': |
| 74 | self._listing_platform_dir = 'Mac/' |
| 75 | self.archive_name = 'chrome-mac.zip' |
| 76 | self._archive_extract_dir = 'chrome-mac' |
| 77 | self._binary_name = 'Chromium.app/Contents/MacOS/Chromium' |
| 78 | elif self.platform == 'win': |
| 79 | self._listing_platform_dir = 'Win/' |
| 80 | self.archive_name = 'chrome-win32.zip' |
| 81 | self._archive_extract_dir = 'chrome-win32' |
| 82 | self._binary_name = 'chrome.exe' |
| 83 | else: |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 84 | raise Exception('Invalid platform: %s' % self.platform) |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 85 | |
| 86 | def GetListingURL(self, marker=None): |
| 87 | """Returns the URL for a directory listing, with an optional marker.""" |
| 88 | marker_param = '' |
| 89 | if marker: |
| 90 | marker_param = '&marker=' + str(marker) |
| 91 | return BASE_URL + '/?delimiter=/&prefix=' + self._listing_platform_dir + \ |
| 92 | marker_param |
| 93 | |
| 94 | def GetDownloadURL(self, revision): |
| 95 | """Gets the download URL for a build archive of a specific revision.""" |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 96 | return "%s/%s%d/%s" % ( |
| 97 | BASE_URL, self._listing_platform_dir, revision, self.archive_name) |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 98 | |
| 99 | def GetLastChangeURL(self): |
| 100 | """Returns a URL to the LAST_CHANGE file.""" |
| 101 | return BASE_URL + '/' + self._listing_platform_dir + 'LAST_CHANGE' |
| 102 | |
| 103 | def GetLaunchPath(self): |
| 104 | """Returns a relative path (presumably from the archive extraction location) |
| 105 | that is used to run the executable.""" |
| 106 | return os.path.join(self._archive_extract_dir, self._binary_name) |
| 107 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 108 | def ParseDirectoryIndex(self): |
| 109 | """Parses the Google Storage directory listing into a list of revision |
| 110 | numbers. The range starts with self.good_revision and goes until |
| 111 | self.bad_revision.""" |
| 112 | |
| 113 | def _FetchAndParse(url): |
| 114 | """Fetches a URL and returns a 2-Tuple of ([revisions], next-marker). If |
| 115 | next-marker is not None, then the listing is a partial listing and another |
| 116 | fetch should be performed with next-marker being the marker= GET |
| 117 | parameter.""" |
| 118 | handle = urllib.urlopen(url) |
| 119 | document = ElementTree.parse(handle) |
| 120 | |
| 121 | # All nodes in the tree are namespaced. Get the root's tag name to extract |
| 122 | # the namespace. Etree does namespaces as |{namespace}tag|. |
| 123 | root_tag = document.getroot().tag |
| 124 | end_ns_pos = root_tag.find('}') |
| 125 | if end_ns_pos == -1: |
| 126 | raise Exception("Could not locate end namespace for directory index") |
| 127 | namespace = root_tag[:end_ns_pos + 1] |
| 128 | |
| 129 | # Find the prefix (_listing_platform_dir) and whether or not the list is |
| 130 | # truncated. |
| 131 | prefix_len = len(document.find(namespace + 'Prefix').text) |
| 132 | next_marker = None |
| 133 | is_truncated = document.find(namespace + 'IsTruncated') |
| 134 | if is_truncated is not None and is_truncated.text.lower() == 'true': |
| 135 | next_marker = document.find(namespace + 'NextMarker').text |
| 136 | |
| 137 | # Get a list of all the revisions. |
| 138 | all_prefixes = document.findall(namespace + 'CommonPrefixes/' + |
| 139 | namespace + 'Prefix') |
| 140 | # The <Prefix> nodes have content of the form of |
| 141 | # |_listing_platform_dir/revision/|. Strip off the platform dir and the |
| 142 | # trailing slash to just have a number. |
| 143 | revisions = [] |
| 144 | for prefix in all_prefixes: |
| 145 | revnum = prefix.text[prefix_len:-1] |
| 146 | try: |
| 147 | revnum = int(revnum) |
| 148 | revisions.append(revnum) |
| 149 | except ValueError: |
| 150 | pass |
| 151 | return (revisions, next_marker) |
| 152 | |
| 153 | # Fetch the first list of revisions. |
| 154 | (revisions, next_marker) = _FetchAndParse(self.GetListingURL()) |
| 155 | |
| 156 | # If the result list was truncated, refetch with the next marker. Do this |
| 157 | # until an entire directory listing is done. |
| 158 | while next_marker: |
| 159 | next_url = self.GetListingURL(next_marker) |
| 160 | (new_revisions, next_marker) = _FetchAndParse(next_url) |
| 161 | revisions.extend(new_revisions) |
| 162 | |
| 163 | return revisions |
| 164 | |
| 165 | def GetRevList(self): |
| 166 | """Gets the list of revision numbers between self.good_revision and |
| 167 | self.bad_revision.""" |
| 168 | # Download the revlist and filter for just the range between good and bad. |
| 169 | minrev = self.good_revision |
| 170 | maxrev = self.bad_revision |
| 171 | revlist = map(int, self.ParseDirectoryIndex()) |
| 172 | revlist = [x for x in revlist if x >= minrev and x <= maxrev] |
| 173 | revlist.sort() |
| 174 | return revlist |
| 175 | |
[email protected] | bd8dcb9 | 2010-03-31 01:05:24 | [diff] [blame] | 176 | |
| 177 | def UnzipFilenameToDir(filename, dir): |
| 178 | """Unzip |filename| to directory |dir|.""" |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 179 | cwd = os.getcwd() |
| 180 | if not os.path.isabs(filename): |
| 181 | filename = os.path.join(cwd, filename) |
[email protected] | bd8dcb9 | 2010-03-31 01:05:24 | [diff] [blame] | 182 | zf = zipfile.ZipFile(filename) |
| 183 | # Make base. |
[email protected] | bd8dcb9 | 2010-03-31 01:05:24 | [diff] [blame] | 184 | try: |
| 185 | if not os.path.isdir(dir): |
| 186 | os.mkdir(dir) |
| 187 | os.chdir(dir) |
| 188 | # Extract files. |
| 189 | for info in zf.infolist(): |
| 190 | name = info.filename |
| 191 | if name.endswith('/'): # dir |
| 192 | if not os.path.isdir(name): |
| 193 | os.makedirs(name) |
| 194 | else: # file |
| 195 | dir = os.path.dirname(name) |
| 196 | if not os.path.isdir(dir): |
| 197 | os.makedirs(dir) |
| 198 | out = open(name, 'wb') |
| 199 | out.write(zf.read(name)) |
| 200 | out.close() |
| 201 | # Set permissions. Permission info in external_attr is shifted 16 bits. |
| 202 | os.chmod(name, info.external_attr >> 16L) |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 203 | os.chdir(cwd) |
[email protected] | bd8dcb9 | 2010-03-31 01:05:24 | [diff] [blame] | 204 | except Exception, e: |
| 205 | print >>sys.stderr, e |
| 206 | sys.exit(1) |
| 207 | |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 208 | |
[email protected] | 468a977 | 2011-08-09 18:42:00 | [diff] [blame] | 209 | def FetchRevision(context, rev, filename, quit_event=None, progress_event=None): |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 210 | """Downloads and unzips revision |rev|. |
| 211 | @param context A PathContext instance. |
| 212 | @param rev The Chromium revision number/tag to download. |
| 213 | @param filename The destination for the downloaded file. |
| 214 | @param quit_event A threading.Event which will be set by the master thread to |
| 215 | indicate that the download should be aborted. |
[email protected] | 468a977 | 2011-08-09 18:42:00 | [diff] [blame] | 216 | @param progress_event A threading.Event which will be set by the master thread |
| 217 | to indicate that the progress of the download should be |
| 218 | displayed. |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 219 | """ |
| 220 | def ReportHook(blocknum, blocksize, totalsize): |
[email protected] | 946be75 | 2011-10-25 23:34:21 | [diff] [blame^] | 221 | if quit_event and quit_event.isSet(): |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 222 | raise RuntimeError("Aborting download of revision %d" % rev) |
[email protected] | 946be75 | 2011-10-25 23:34:21 | [diff] [blame^] | 223 | if progress_event and progress_event.isSet(): |
[email protected] | 468a977 | 2011-08-09 18:42:00 | [diff] [blame] | 224 | size = blocknum * blocksize |
| 225 | if totalsize == -1: # Total size not known. |
| 226 | progress = "Received %d bytes" % size |
| 227 | else: |
| 228 | size = min(totalsize, size) |
| 229 | progress = "Received %d of %d bytes, %.2f%%" % ( |
| 230 | size, totalsize, 100.0 * size / totalsize) |
| 231 | # Send a \r to let all progress messages use just one line of output. |
| 232 | sys.stdout.write("\r" + progress) |
| 233 | sys.stdout.flush() |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 234 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 235 | download_url = context.GetDownloadURL(rev) |
| 236 | try: |
| 237 | urllib.urlretrieve(download_url, filename, ReportHook) |
[email protected] | 946be75 | 2011-10-25 23:34:21 | [diff] [blame^] | 238 | if progress_event and progress_event.isSet(): |
[email protected] | 468a977 | 2011-08-09 18:42:00 | [diff] [blame] | 239 | print() |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 240 | except RuntimeError, e: |
| 241 | pass |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 242 | |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 243 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 244 | def RunRevision(context, revision, zipfile, profile, args): |
| 245 | """Given a zipped revision, unzip it and run the test.""" |
| 246 | print "Trying revision %d..." % revision |
[email protected] | 3ff00b7 | 2011-07-20 21:34:47 | [diff] [blame] | 247 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 248 | # Create a temp directory and unzip the revision into it. |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 249 | cwd = os.getcwd() |
| 250 | tempdir = tempfile.mkdtemp(prefix='bisect_tmp') |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 251 | UnzipFilenameToDir(zipfile, tempdir) |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 252 | os.chdir(tempdir) |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 253 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 254 | # Run the build. |
| 255 | testargs = [context.GetLaunchPath(), '--user-data-dir=%s' % profile] + args |
| 256 | subproc = subprocess.Popen(testargs, |
| 257 | bufsize=-1, |
| 258 | stdout=subprocess.PIPE, |
| 259 | stderr=subprocess.PIPE) |
| 260 | (stdout, stderr) = subproc.communicate() |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 261 | |
| 262 | os.chdir(cwd) |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 263 | try: |
| 264 | shutil.rmtree(tempdir, True) |
| 265 | except Exception, e: |
| 266 | pass |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 267 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 268 | return (subproc.returncode, stdout, stderr) |
[email protected] | 79f1474 | 2010-03-10 01:01:57 | [diff] [blame] | 269 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 270 | def AskIsGoodBuild(rev, status, stdout, stderr): |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 271 | """Ask the user whether build |rev| is good or bad.""" |
[email protected] | 79f1474 | 2010-03-10 01:01:57 | [diff] [blame] | 272 | # Loop until we get a response that we can parse. |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 273 | while True: |
[email protected] | 468a977 | 2011-08-09 18:42:00 | [diff] [blame] | 274 | response = raw_input('Revision %d is [(g)ood/(b)ad/(q)uit]: ' % int(rev)) |
[email protected] | bd8dcb9 | 2010-03-31 01:05:24 | [diff] [blame] | 275 | if response and response in ('g', 'b'): |
| 276 | return response == 'g' |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 277 | if response and response == 'q': |
| 278 | raise SystemExit() |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 279 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 280 | def Bisect(platform, |
| 281 | good_rev=0, |
| 282 | bad_rev=0, |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 283 | try_args=(), |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 284 | profile=None, |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 285 | predicate=AskIsGoodBuild): |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 286 | """Given known good and known bad revisions, run a binary search on all |
| 287 | archived revisions to determine the last known good revision. |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 288 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 289 | @param platform Which build to download/run ('mac', 'win', 'linux64', etc.). |
| 290 | @param good_rev Number/tag of the last known good revision. |
| 291 | @param bad_rev Number/tag of the first known bad revision. |
| 292 | @param try_args A tuple of arguments to pass to the test application. |
| 293 | @param profile The name of the user profile to run with. |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 294 | @param predicate A predicate function which returns True iff the argument |
| 295 | chromium revision is good. |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 296 | |
| 297 | Threading is used to fetch Chromium revisions in the background, speeding up |
| 298 | the user's experience. For example, suppose the bounds of the search are |
| 299 | good_rev=0, bad_rev=100. The first revision to be checked is 50. Depending on |
| 300 | whether revision 50 is good or bad, the next revision to check will be either |
| 301 | 25 or 75. So, while revision 50 is being checked, the script will download |
| 302 | revisions 25 and 75 in the background. Once the good/bad verdict on rev 50 is |
| 303 | known: |
| 304 | |
| 305 | - If rev 50 is good, the download of rev 25 is cancelled, and the next test |
| 306 | is run on rev 75. |
| 307 | |
| 308 | - If rev 50 is bad, the download of rev 75 is cancelled, and the next test |
| 309 | is run on rev 25. |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 310 | """ |
| 311 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 312 | if not profile: |
| 313 | profile = 'profile' |
| 314 | |
| 315 | context = PathContext(platform, good_rev, bad_rev) |
| 316 | cwd = os.getcwd() |
| 317 | |
| 318 | _GetDownloadPath = lambda rev: os.path.join(cwd, |
| 319 | '%d-%s' % (rev, context.archive_name)) |
| 320 | |
[email protected] | 468a977 | 2011-08-09 18:42:00 | [diff] [blame] | 321 | print "Downloading list of known revisions..." |
| 322 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 323 | revlist = context.GetRevList() |
| 324 | |
| 325 | # Get a list of revisions to bisect across. |
| 326 | if len(revlist) < 2: # Don't have enough builds to bisect. |
| 327 | msg = 'We don\'t have enough builds to bisect. revlist: %s' % revlist |
| 328 | raise RuntimeError(msg) |
| 329 | |
| 330 | # Figure out our bookends and first pivot point; fetch the pivot revision. |
[email protected] | 3ff00b7 | 2011-07-20 21:34:47 | [diff] [blame] | 331 | good = 0 |
| 332 | bad = len(revlist) - 1 |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 333 | pivot = bad / 2 |
| 334 | rev = revlist[pivot] |
| 335 | zipfile = _GetDownloadPath(rev) |
[email protected] | 468a977 | 2011-08-09 18:42:00 | [diff] [blame] | 336 | progress_event = threading.Event() |
| 337 | progress_event.set() |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 338 | print "Downloading revision %d..." % rev |
[email protected] | 468a977 | 2011-08-09 18:42:00 | [diff] [blame] | 339 | FetchRevision(context, rev, zipfile, |
| 340 | quit_event=None, progress_event=progress_event) |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 341 | |
| 342 | # Binary search time! |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 343 | while zipfile and bad - good > 1: |
| 344 | # Pre-fetch next two possible pivots |
| 345 | # - down_pivot is the next revision to check if the current revision turns |
| 346 | # out to be bad. |
| 347 | # - up_pivot is the next revision to check if the current revision turns |
| 348 | # out to be good. |
| 349 | down_pivot = int((pivot - good) / 2) + good |
| 350 | down_thread = None |
| 351 | if down_pivot != pivot and down_pivot != good: |
| 352 | down_rev = revlist[down_pivot] |
| 353 | down_zipfile = _GetDownloadPath(down_rev) |
[email protected] | 468a977 | 2011-08-09 18:42:00 | [diff] [blame] | 354 | down_quit_event = threading.Event() |
| 355 | down_progress_event = threading.Event() |
| 356 | fetchargs = (context, |
| 357 | down_rev, |
| 358 | down_zipfile, |
| 359 | down_quit_event, |
| 360 | down_progress_event) |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 361 | down_thread = threading.Thread(target=FetchRevision, |
| 362 | name='down_fetch', |
| 363 | args=fetchargs) |
| 364 | down_thread.start() |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 365 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 366 | up_pivot = int((bad - pivot) / 2) + pivot |
| 367 | up_thread = None |
| 368 | if up_pivot != pivot and up_pivot != bad: |
| 369 | up_rev = revlist[up_pivot] |
| 370 | up_zipfile = _GetDownloadPath(up_rev) |
[email protected] | 468a977 | 2011-08-09 18:42:00 | [diff] [blame] | 371 | up_quit_event = threading.Event() |
| 372 | up_progress_event = threading.Event() |
| 373 | fetchargs = (context, |
| 374 | up_rev, |
| 375 | up_zipfile, |
| 376 | up_quit_event, |
| 377 | up_progress_event) |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 378 | up_thread = threading.Thread(target=FetchRevision, |
| 379 | name='up_fetch', |
| 380 | args=fetchargs) |
| 381 | up_thread.start() |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 382 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 383 | # Run test on the pivot revision. |
| 384 | (status, stdout, stderr) = RunRevision(context, |
| 385 | rev, |
| 386 | zipfile, |
| 387 | profile, |
| 388 | try_args) |
| 389 | os.unlink(zipfile) |
| 390 | zipfile = None |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 391 | |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 392 | # Call the predicate function to see if the current revision is good or bad. |
| 393 | # On that basis, kill one of the background downloads and complete the |
| 394 | # other, as described in the comments above. |
| 395 | try: |
| 396 | if predicate(rev, status, stdout, stderr): |
| 397 | good = pivot |
| 398 | if down_thread: |
[email protected] | 468a977 | 2011-08-09 18:42:00 | [diff] [blame] | 399 | down_quit_event.set() # Kill the download of older revision. |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 400 | down_thread.join() |
| 401 | os.unlink(down_zipfile) |
| 402 | if up_thread: |
| 403 | print "Downloading revision %d..." % up_rev |
[email protected] | 468a977 | 2011-08-09 18:42:00 | [diff] [blame] | 404 | up_progress_event.set() # Display progress of download. |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 405 | up_thread.join() # Wait for newer revision to finish downloading. |
| 406 | pivot = up_pivot |
| 407 | zipfile = up_zipfile |
| 408 | else: |
| 409 | bad = pivot |
| 410 | if up_thread: |
[email protected] | 468a977 | 2011-08-09 18:42:00 | [diff] [blame] | 411 | up_quit_event.set() # Kill download of newer revision. |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 412 | up_thread.join() |
| 413 | os.unlink(up_zipfile) |
| 414 | if down_thread: |
| 415 | print "Downloading revision %d..." % down_rev |
[email protected] | 468a977 | 2011-08-09 18:42:00 | [diff] [blame] | 416 | down_progress_event.set() # Display progress of download. |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 417 | down_thread.join() # Wait for older revision to finish downloading. |
| 418 | pivot = down_pivot |
| 419 | zipfile = down_zipfile |
| 420 | except SystemExit: |
[email protected] | 468a977 | 2011-08-09 18:42:00 | [diff] [blame] | 421 | print "Cleaning up..." |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 422 | for f in [down_zipfile, up_zipfile]: |
| 423 | try: |
| 424 | os.unlink(f) |
| 425 | except OSError: |
| 426 | pass |
| 427 | sys.exit(0) |
| 428 | |
| 429 | rev = revlist[pivot] |
| 430 | |
| 431 | return (revlist[good], revlist[bad]) |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 432 | |
| 433 | |
[email protected] | b2fe7f2 | 2011-10-25 22:58:31 | [diff] [blame] | 434 | def GetWebKitRevisionForChromiumRevision(rev): |
| 435 | """Returns the webkit revision that was in chromium's DEPS file at |
| 436 | chromium revision |rev|.""" |
| 437 | # . doesn't match newlines without re.DOTALL, so this is safe. |
| 438 | webkit_re = re.compile(r'webkit_revision.:\D*(\d+)') |
| 439 | url = urllib.urlopen(DEPS_FILE % rev) |
| 440 | m = webkit_re.search(url.read()) |
| 441 | url.close() |
| 442 | if m: |
| 443 | return int(m.group(1)) |
| 444 | else: |
| 445 | raise Exception('Could not get webkit revision for cr rev %d' % rev) |
| 446 | |
| 447 | |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 448 | def main(): |
[email protected] | 2c1d273 | 2009-10-29 19:52:17 | [diff] [blame] | 449 | usage = ('%prog [options] [-- chromium-options]\n' |
[email protected] | 178aab7 | 2010-10-08 17:21:38 | [diff] [blame] | 450 | 'Perform binary search on the snapshot builds.\n' |
| 451 | '\n' |
| 452 | 'Tip: add "-- --no-first-run" to bypass the first run prompts.') |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 453 | parser = optparse.OptionParser(usage=usage) |
[email protected] | 1a45d22 | 2009-09-19 01:58:57 | [diff] [blame] | 454 | # Strangely, the default help output doesn't include the choice list. |
[email protected] | 20105cf | 2011-05-10 18:16:45 | [diff] [blame] | 455 | choices = ['mac', 'win', 'linux', 'linux64'] |
[email protected] | 4082b18 | 2011-05-02 20:30:17 | [diff] [blame] | 456 | # linux-chromiumos lacks a continuous archive http://crbug.com/78158 |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 457 | parser.add_option('-a', '--archive', |
[email protected] | 1a45d22 | 2009-09-19 01:58:57 | [diff] [blame] | 458 | choices = choices, |
| 459 | help = 'The buildbot archive to bisect [%s].' % |
| 460 | '|'.join(choices)) |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 461 | parser.add_option('-b', '--bad', type = 'int', |
| 462 | help = 'The bad revision to bisect to.') |
| 463 | parser.add_option('-g', '--good', type = 'int', |
| 464 | help = 'The last known good revision to bisect from.') |
[email protected] | d4bf358 | 2009-09-20 00:56:38 | [diff] [blame] | 465 | parser.add_option('-p', '--profile', '--user-data-dir', type = 'str', |
| 466 | help = 'Profile to use; this will not reset every run. ' + |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 467 | 'Defaults to a clean profile.', default = 'profile') |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 468 | (opts, args) = parser.parse_args() |
| 469 | |
| 470 | if opts.archive is None: |
[email protected] | 178aab7 | 2010-10-08 17:21:38 | [diff] [blame] | 471 | print 'Error: missing required parameter: --archive' |
| 472 | print |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 473 | parser.print_help() |
| 474 | return 1 |
| 475 | |
| 476 | if opts.bad and opts.good and (opts.good > opts.bad): |
| 477 | print ('The good revision (%d) must precede the bad revision (%d).\n' % |
| 478 | (opts.good, opts.bad)) |
| 479 | parser.print_help() |
| 480 | return 1 |
| 481 | |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 482 | # Create the context. Initialize 0 for the revisions as they are set below. |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 483 | context = PathContext(opts.archive, 0, 0) |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 484 | |
| 485 | # Pick a starting point, try to get HEAD for this. |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 486 | if opts.bad: |
| 487 | bad_rev = opts.bad |
| 488 | else: |
| 489 | bad_rev = 0 |
| 490 | try: |
| 491 | # Location of the latest build revision number |
[email protected] | 183706d9 | 2011-06-10 13:06:22 | [diff] [blame] | 492 | nh = urllib.urlopen(context.GetLastChangeURL()) |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 493 | latest = int(nh.read()) |
| 494 | nh.close() |
[email protected] | bd8dcb9 | 2010-03-31 01:05:24 | [diff] [blame] | 495 | bad_rev = raw_input('Bad revision [HEAD:%d]: ' % latest) |
| 496 | if (bad_rev == ''): |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 497 | bad_rev = latest |
| 498 | bad_rev = int(bad_rev) |
| 499 | except Exception, e: |
[email protected] | bd8dcb9 | 2010-03-31 01:05:24 | [diff] [blame] | 500 | print('Could not determine latest revision. This could be bad...') |
| 501 | bad_rev = int(raw_input('Bad revision: ')) |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 502 | |
| 503 | # Find out when we were good. |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 504 | if opts.good: |
| 505 | good_rev = opts.good |
| 506 | else: |
| 507 | good_rev = 0 |
| 508 | try: |
[email protected] | bd8dcb9 | 2010-03-31 01:05:24 | [diff] [blame] | 509 | good_rev = int(raw_input('Last known good [0]: ')) |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 510 | except Exception, e: |
| 511 | pass |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 512 | |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 513 | (last_known_good_rev, first_known_bad_rev) = Bisect( |
[email protected] | afe3066 | 2011-07-30 01:05:52 | [diff] [blame] | 514 | opts.archive, good_rev, bad_rev, args, opts.profile) |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 515 | |
[email protected] | b2fe7f2 | 2011-10-25 22:58:31 | [diff] [blame] | 516 | # Get corresponding webkit revisions. |
| 517 | try: |
| 518 | last_known_good_webkit_rev = GetWebKitRevisionForChromiumRevision( |
| 519 | last_known_good_rev) |
| 520 | first_known_bad_webkit_rev = GetWebKitRevisionForChromiumRevision( |
| 521 | first_known_bad_rev) |
| 522 | except Exception, e: |
| 523 | # Silently ignore the failure. |
| 524 | last_known_good_webkit_rev, first_known_bad_webkit_rev = 0, 0 |
| 525 | |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 526 | # We're done. Let the user know the results in an official manner. |
[email protected] | 60ac66e3 | 2011-07-18 16:08:25 | [diff] [blame] | 527 | print('You are probably looking for build %d.' % first_known_bad_rev) |
[email protected] | b2fe7f2 | 2011-10-25 22:58:31 | [diff] [blame] | 528 | if last_known_good_webkit_rev != first_known_bad_webkit_rev: |
| 529 | print 'WEBKIT CHANGELOG URL:' |
| 530 | print WEBKIT_CHANGELOG_URL % (first_known_bad_webkit_rev, |
| 531 | last_known_good_webkit_rev) |
| 532 | print 'CHANGELOG URL:' |
| 533 | print CHANGELOG_URL % (last_known_good_rev, first_known_bad_rev) |
| 534 | print 'Built at revision:' |
| 535 | print BUILD_VIEWVC_URL % first_known_bad_rev |
[email protected] | 67e0bc6 | 2009-09-03 22:06:09 | [diff] [blame] | 536 | |
| 537 | if __name__ == '__main__': |
[email protected] | 7ad66a7 | 2009-09-04 17:52:33 | [diff] [blame] | 538 | sys.exit(main()) |