blob: a6ae537d2d86eeb9d8f83d1571f18e1f45d57d9d [file] [log] [blame]
[email protected]cb155a82011-11-29 17:25:341#!/usr/bin/env python
[email protected]5e93cf162012-01-28 02:16:562# Copyright (c) 2012 The Chromium Authors. All rights reserved.
[email protected]67e0bc62009-09-03 22:06:093# Use of this source code is governed by a BSD-style license that can be
4# found in the LICENSE file.
5
6"""Snapshot Build Bisect Tool
7
[email protected]7ad66a72009-09-04 17:52:338This script bisects a snapshot archive using binary search. It starts at
[email protected]67e0bc62009-09-03 22:06:099a bad revision (it will try to guess HEAD) and asks for a last known-good
10revision. It will then binary search across this revision range by downloading,
11unzipping, and opening Chromium for you. After testing the specific revision,
12it will ask you whether it is good or bad before continuing the search.
[email protected]67e0bc62009-09-03 22:06:0913"""
14
[email protected]183706d92011-06-10 13:06:2215# The root URL for storage.
[email protected]60ac66e32011-07-18 16:08:2516BASE_URL = 'http://commondatastorage.googleapis.com/chromium-browser-snapshots'
[email protected]67e0bc62009-09-03 22:06:0917
[email protected]d0149c5c2012-05-29 21:12:1118# The root URL for official builds.
[email protected]b2905832012-07-19 21:28:4319OFFICIAL_BASE_URL = 'http://master.chrome.corp.google.com/official_builds'
[email protected]d0149c5c2012-05-29 21:12:1120
[email protected]183706d92011-06-10 13:06:2221# Changelogs URL.
[email protected]07247462010-12-24 07:45:5622CHANGELOG_URL = 'http://build.chromium.org/f/chromium/' \
[email protected]3d6d9f82012-04-18 05:55:1323 'perf/dashboard/ui/changelog.html?url=/trunk/src&range=%d%%3A%d'
[email protected]f6a71a72009-10-08 19:55:3824
[email protected]d0149c5c2012-05-29 21:12:1125# Official Changelogs URL.
26OFFICIAL_CHANGELOG_URL = 'http://omahaproxy.appspot.com/'\
27 'changelog?old_version=%s&new_version=%s'
28
[email protected]b2fe7f22011-10-25 22:58:3129# DEPS file URL.
30DEPS_FILE= 'http://src.chromium.org/viewvc/chrome/trunk/src/DEPS?revision=%d'
[email protected]b2fe7f22011-10-25 22:58:3131# WebKit Changelogs URL.
32WEBKIT_CHANGELOG_URL = 'http://trac.webkit.org/log/' \
[email protected]9fc175ca2012-05-24 01:15:4233 'trunk/?rev=%d&stop_rev=%d&verbose=on&limit=10000'
[email protected]b2fe7f22011-10-25 22:58:3134
[email protected]eadd95d2012-11-02 22:42:0935DONE_MESSAGE_GOOD_MIN = 'You are probably looking for a change made after %s ' \
36 '(known good), but no later than %s (first known bad).'
37DONE_MESSAGE_GOOD_MAX = 'You are probably looking for a change made after %s ' \
38 '(known bad), but no later than %s (first known good).'
[email protected]05ff3fd2012-04-17 23:24:0639
[email protected]67e0bc62009-09-03 22:06:0940###############################################################################
41
42import math
[email protected]7ad66a72009-09-04 17:52:3343import optparse
[email protected]67e0bc62009-09-03 22:06:0944import os
[email protected]d4bf3582009-09-20 00:56:3845import pipes
[email protected]67e0bc62009-09-03 22:06:0946import re
47import shutil
[email protected]afe30662011-07-30 01:05:5248import subprocess
[email protected]67e0bc62009-09-03 22:06:0949import sys
[email protected]7ad66a72009-09-04 17:52:3350import tempfile
[email protected]afe30662011-07-30 01:05:5251import threading
[email protected]67e0bc62009-09-03 22:06:0952import urllib
[email protected]d0149c5c2012-05-29 21:12:1153from distutils.version import LooseVersion
[email protected]183706d92011-06-10 13:06:2254from xml.etree import ElementTree
[email protected]bd8dcb92010-03-31 01:05:2455import zipfile
56
[email protected]cb155a82011-11-29 17:25:3457
[email protected]183706d92011-06-10 13:06:2258class PathContext(object):
59 """A PathContext is used to carry the information used to construct URLs and
60 paths when dealing with the storage server and archives."""
[email protected]d0149c5c2012-05-29 21:12:1161 def __init__(self, platform, good_revision, bad_revision, is_official):
[email protected]183706d92011-06-10 13:06:2262 super(PathContext, self).__init__()
63 # Store off the input parameters.
64 self.platform = platform # What's passed in to the '-a/--archive' option.
65 self.good_revision = good_revision
66 self.bad_revision = bad_revision
[email protected]d0149c5c2012-05-29 21:12:1167 self.is_official = is_official
[email protected]183706d92011-06-10 13:06:2268
69 # The name of the ZIP file in a revision directory on the server.
70 self.archive_name = None
71
72 # Set some internal members:
73 # _listing_platform_dir = Directory that holds revisions. Ends with a '/'.
74 # _archive_extract_dir = Uncompressed directory in the archive_name file.
75 # _binary_name = The name of the executable to run.
[email protected]1960edd2011-07-01 16:53:5276 if self.platform == 'linux' or self.platform == 'linux64':
[email protected]183706d92011-06-10 13:06:2277 self._binary_name = 'chrome'
[email protected]183706d92011-06-10 13:06:2278 elif self.platform == 'mac':
[email protected]183706d92011-06-10 13:06:2279 self.archive_name = 'chrome-mac.zip'
80 self._archive_extract_dir = 'chrome-mac'
[email protected]183706d92011-06-10 13:06:2281 elif self.platform == 'win':
[email protected]183706d92011-06-10 13:06:2282 self.archive_name = 'chrome-win32.zip'
83 self._archive_extract_dir = 'chrome-win32'
84 self._binary_name = 'chrome.exe'
85 else:
[email protected]afe30662011-07-30 01:05:5286 raise Exception('Invalid platform: %s' % self.platform)
[email protected]183706d92011-06-10 13:06:2287
[email protected]d0149c5c2012-05-29 21:12:1188 if is_official:
89 if self.platform == 'linux':
90 self._listing_platform_dir = 'lucid32bit/'
91 self.archive_name = 'chrome-lucid32bit.zip'
92 self._archive_extract_dir = 'chrome-lucid32bit'
93 elif self.platform == 'linux64':
94 self._listing_platform_dir = 'lucid64bit/'
95 self.archive_name = 'chrome-lucid64bit.zip'
96 self._archive_extract_dir = 'chrome-lucid64bit'
97 elif self.platform == 'mac':
98 self._listing_platform_dir = 'mac/'
99 self._binary_name = 'Google Chrome.app/Contents/MacOS/Google Chrome'
100 elif self.platform == 'win':
101 self._listing_platform_dir = 'win/'
102 else:
103 if self.platform == 'linux' or self.platform == 'linux64':
104 self.archive_name = 'chrome-linux.zip'
105 self._archive_extract_dir = 'chrome-linux'
106 if self.platform == 'linux':
107 self._listing_platform_dir = 'Linux/'
108 elif self.platform == 'linux64':
109 self._listing_platform_dir = 'Linux_x64/'
110 elif self.platform == 'mac':
111 self._listing_platform_dir = 'Mac/'
112 self._binary_name = 'Chromium.app/Contents/MacOS/Chromium'
113 elif self.platform == 'win':
114 self._listing_platform_dir = 'Win/'
115
[email protected]183706d92011-06-10 13:06:22116 def GetListingURL(self, marker=None):
117 """Returns the URL for a directory listing, with an optional marker."""
118 marker_param = ''
119 if marker:
120 marker_param = '&marker=' + str(marker)
121 return BASE_URL + '/?delimiter=/&prefix=' + self._listing_platform_dir + \
122 marker_param
123
124 def GetDownloadURL(self, revision):
125 """Gets the download URL for a build archive of a specific revision."""
[email protected]d0149c5c2012-05-29 21:12:11126 if self.is_official:
127 return "%s/%s/%s%s" % (
128 OFFICIAL_BASE_URL, revision, self._listing_platform_dir,
129 self.archive_name)
130 else:
131 return "%s/%s%s/%s" % (
132 BASE_URL, self._listing_platform_dir, revision, self.archive_name)
[email protected]183706d92011-06-10 13:06:22133
134 def GetLastChangeURL(self):
135 """Returns a URL to the LAST_CHANGE file."""
136 return BASE_URL + '/' + self._listing_platform_dir + 'LAST_CHANGE'
137
138 def GetLaunchPath(self):
139 """Returns a relative path (presumably from the archive extraction location)
140 that is used to run the executable."""
141 return os.path.join(self._archive_extract_dir, self._binary_name)
142
[email protected]afe30662011-07-30 01:05:52143 def ParseDirectoryIndex(self):
144 """Parses the Google Storage directory listing into a list of revision
[email protected]eadd95d2012-11-02 22:42:09145 numbers."""
[email protected]afe30662011-07-30 01:05:52146
147 def _FetchAndParse(url):
148 """Fetches a URL and returns a 2-Tuple of ([revisions], next-marker). If
149 next-marker is not None, then the listing is a partial listing and another
150 fetch should be performed with next-marker being the marker= GET
151 parameter."""
152 handle = urllib.urlopen(url)
153 document = ElementTree.parse(handle)
154
155 # All nodes in the tree are namespaced. Get the root's tag name to extract
156 # the namespace. Etree does namespaces as |{namespace}tag|.
157 root_tag = document.getroot().tag
158 end_ns_pos = root_tag.find('}')
159 if end_ns_pos == -1:
160 raise Exception("Could not locate end namespace for directory index")
161 namespace = root_tag[:end_ns_pos + 1]
162
163 # Find the prefix (_listing_platform_dir) and whether or not the list is
164 # truncated.
165 prefix_len = len(document.find(namespace + 'Prefix').text)
166 next_marker = None
167 is_truncated = document.find(namespace + 'IsTruncated')
168 if is_truncated is not None and is_truncated.text.lower() == 'true':
169 next_marker = document.find(namespace + 'NextMarker').text
170
171 # Get a list of all the revisions.
172 all_prefixes = document.findall(namespace + 'CommonPrefixes/' +
173 namespace + 'Prefix')
174 # The <Prefix> nodes have content of the form of
175 # |_listing_platform_dir/revision/|. Strip off the platform dir and the
176 # trailing slash to just have a number.
177 revisions = []
178 for prefix in all_prefixes:
179 revnum = prefix.text[prefix_len:-1]
180 try:
181 revnum = int(revnum)
182 revisions.append(revnum)
183 except ValueError:
184 pass
185 return (revisions, next_marker)
[email protected]d0149c5c2012-05-29 21:12:11186
[email protected]afe30662011-07-30 01:05:52187 # Fetch the first list of revisions.
188 (revisions, next_marker) = _FetchAndParse(self.GetListingURL())
189
190 # If the result list was truncated, refetch with the next marker. Do this
191 # until an entire directory listing is done.
192 while next_marker:
193 next_url = self.GetListingURL(next_marker)
194 (new_revisions, next_marker) = _FetchAndParse(next_url)
195 revisions.extend(new_revisions)
[email protected]afe30662011-07-30 01:05:52196 return revisions
197
198 def GetRevList(self):
199 """Gets the list of revision numbers between self.good_revision and
200 self.bad_revision."""
201 # Download the revlist and filter for just the range between good and bad.
[email protected]eadd95d2012-11-02 22:42:09202 minrev = min(self.good_revision, self.bad_revision)
203 maxrev = max(self.good_revision, self.bad_revision)
[email protected]afe30662011-07-30 01:05:52204 revlist = map(int, self.ParseDirectoryIndex())
[email protected]d0149c5c2012-05-29 21:12:11205 revlist = [x for x in revlist if x >= int(minrev) and x <= int(maxrev)]
[email protected]afe30662011-07-30 01:05:52206 revlist.sort()
207 return revlist
208
[email protected]d0149c5c2012-05-29 21:12:11209 def GetOfficialBuildsList(self):
210 """Gets the list of official build numbers between self.good_revision and
211 self.bad_revision."""
212 # Download the revlist and filter for just the range between good and bad.
[email protected]eadd95d2012-11-02 22:42:09213 minrev = min(self.good_revision, self.bad_revision)
214 maxrev = max(self.good_revision, self.bad_revision)
[email protected]d0149c5c2012-05-29 21:12:11215 handle = urllib.urlopen(OFFICIAL_BASE_URL)
216 dirindex = handle.read()
217 handle.close()
218 build_numbers = re.findall(r'<a href="([0-9][0-9].*)/">', dirindex)
219 final_list = []
[email protected]d0149c5c2012-05-29 21:12:11220 i = 0
[email protected]d0149c5c2012-05-29 21:12:11221 parsed_build_numbers = [LooseVersion(x) for x in build_numbers]
222 for build_number in sorted(parsed_build_numbers):
223 path = OFFICIAL_BASE_URL + '/' + str(build_number) + '/' + \
224 self._listing_platform_dir + self.archive_name
225 i = i + 1
226 try:
227 connection = urllib.urlopen(path)
228 connection.close()
[email protected]801fb652012-07-20 20:13:50229 if build_number > maxrev:
230 break
231 if build_number >= minrev:
232 final_list.append(str(build_number))
[email protected]d0149c5c2012-05-29 21:12:11233 except urllib.HTTPError, e:
234 pass
[email protected]801fb652012-07-20 20:13:50235 return final_list
[email protected]bd8dcb92010-03-31 01:05:24236
237def UnzipFilenameToDir(filename, dir):
238 """Unzip |filename| to directory |dir|."""
[email protected]afe30662011-07-30 01:05:52239 cwd = os.getcwd()
240 if not os.path.isabs(filename):
241 filename = os.path.join(cwd, filename)
[email protected]bd8dcb92010-03-31 01:05:24242 zf = zipfile.ZipFile(filename)
243 # Make base.
[email protected]e29c08c2012-09-17 20:50:50244 if not os.path.isdir(dir):
245 os.mkdir(dir)
246 os.chdir(dir)
247 # Extract files.
248 for info in zf.infolist():
249 name = info.filename
250 if name.endswith('/'): # dir
251 if not os.path.isdir(name):
252 os.makedirs(name)
253 else: # file
254 dir = os.path.dirname(name)
255 if not os.path.isdir(dir):
256 os.makedirs(dir)
257 out = open(name, 'wb')
258 out.write(zf.read(name))
259 out.close()
260 # Set permissions. Permission info in external_attr is shifted 16 bits.
261 os.chmod(name, info.external_attr >> 16L)
262 os.chdir(cwd)
[email protected]bd8dcb92010-03-31 01:05:24263
[email protected]67e0bc62009-09-03 22:06:09264
[email protected]468a9772011-08-09 18:42:00265def FetchRevision(context, rev, filename, quit_event=None, progress_event=None):
[email protected]afe30662011-07-30 01:05:52266 """Downloads and unzips revision |rev|.
267 @param context A PathContext instance.
268 @param rev The Chromium revision number/tag to download.
269 @param filename The destination for the downloaded file.
270 @param quit_event A threading.Event which will be set by the master thread to
271 indicate that the download should be aborted.
[email protected]468a9772011-08-09 18:42:00272 @param progress_event A threading.Event which will be set by the master thread
273 to indicate that the progress of the download should be
274 displayed.
[email protected]afe30662011-07-30 01:05:52275 """
276 def ReportHook(blocknum, blocksize, totalsize):
[email protected]946be752011-10-25 23:34:21277 if quit_event and quit_event.isSet():
[email protected]d0149c5c2012-05-29 21:12:11278 raise RuntimeError("Aborting download of revision %s" % str(rev))
[email protected]946be752011-10-25 23:34:21279 if progress_event and progress_event.isSet():
[email protected]468a9772011-08-09 18:42:00280 size = blocknum * blocksize
281 if totalsize == -1: # Total size not known.
282 progress = "Received %d bytes" % size
283 else:
284 size = min(totalsize, size)
285 progress = "Received %d of %d bytes, %.2f%%" % (
286 size, totalsize, 100.0 * size / totalsize)
287 # Send a \r to let all progress messages use just one line of output.
288 sys.stdout.write("\r" + progress)
289 sys.stdout.flush()
[email protected]7ad66a72009-09-04 17:52:33290
[email protected]afe30662011-07-30 01:05:52291 download_url = context.GetDownloadURL(rev)
292 try:
293 urllib.urlretrieve(download_url, filename, ReportHook)
[email protected]946be752011-10-25 23:34:21294 if progress_event and progress_event.isSet():
[email protected]ecaba01e62011-10-26 05:33:28295 print
[email protected]afe30662011-07-30 01:05:52296 except RuntimeError, e:
297 pass
[email protected]7ad66a72009-09-04 17:52:33298
[email protected]7ad66a72009-09-04 17:52:33299
[email protected]5e93cf162012-01-28 02:16:56300def RunRevision(context, revision, zipfile, profile, num_runs, args):
[email protected]afe30662011-07-30 01:05:52301 """Given a zipped revision, unzip it and run the test."""
[email protected]d0149c5c2012-05-29 21:12:11302 print "Trying revision %s..." % str(revision)
[email protected]3ff00b72011-07-20 21:34:47303
[email protected]afe30662011-07-30 01:05:52304 # Create a temp directory and unzip the revision into it.
[email protected]7ad66a72009-09-04 17:52:33305 cwd = os.getcwd()
306 tempdir = tempfile.mkdtemp(prefix='bisect_tmp')
[email protected]afe30662011-07-30 01:05:52307 UnzipFilenameToDir(zipfile, tempdir)
[email protected]7ad66a72009-09-04 17:52:33308 os.chdir(tempdir)
[email protected]67e0bc62009-09-03 22:06:09309
[email protected]5e93cf162012-01-28 02:16:56310 # Run the build as many times as specified.
[email protected]afe30662011-07-30 01:05:52311 testargs = [context.GetLaunchPath(), '--user-data-dir=%s' % profile] + args
[email protected]d0149c5c2012-05-29 21:12:11312 # The sandbox must be run as root on Official Chrome, so bypass it.
313 if context.is_official and (context.platform == 'linux' or
314 context.platform == 'linux64'):
315 testargs.append('--no-sandbox')
316
[email protected]5e93cf162012-01-28 02:16:56317 for i in range(0, num_runs):
318 subproc = subprocess.Popen(testargs,
319 bufsize=-1,
320 stdout=subprocess.PIPE,
321 stderr=subprocess.PIPE)
322 (stdout, stderr) = subproc.communicate()
[email protected]7ad66a72009-09-04 17:52:33323
324 os.chdir(cwd)
[email protected]7ad66a72009-09-04 17:52:33325 try:
326 shutil.rmtree(tempdir, True)
327 except Exception, e:
328 pass
[email protected]67e0bc62009-09-03 22:06:09329
[email protected]afe30662011-07-30 01:05:52330 return (subproc.returncode, stdout, stderr)
[email protected]79f14742010-03-10 01:01:57331
[email protected]cb155a82011-11-29 17:25:34332
[email protected]d0149c5c2012-05-29 21:12:11333def AskIsGoodBuild(rev, official_builds, status, stdout, stderr):
[email protected]183706d92011-06-10 13:06:22334 """Ask the user whether build |rev| is good or bad."""
[email protected]79f14742010-03-10 01:01:57335 # Loop until we get a response that we can parse.
[email protected]67e0bc62009-09-03 22:06:09336 while True:
[email protected]53bb6342012-06-01 04:11:00337 response = raw_input('Revision %s is [(g)ood/(b)ad/(u)nknown/(q)uit]: ' %
338 str(rev))
339 if response and response in ('g', 'b', 'u'):
340 return response
[email protected]afe30662011-07-30 01:05:52341 if response and response == 'q':
342 raise SystemExit()
[email protected]67e0bc62009-09-03 22:06:09343
[email protected]cb155a82011-11-29 17:25:34344
[email protected]53bb6342012-06-01 04:11:00345class DownloadJob(object):
346 """DownloadJob represents a task to download a given Chromium revision."""
347 def __init__(self, context, name, rev, zipfile):
348 super(DownloadJob, self).__init__()
349 # Store off the input parameters.
350 self.context = context
351 self.name = name
352 self.rev = rev
353 self.zipfile = zipfile
354 self.quit_event = threading.Event()
355 self.progress_event = threading.Event()
356
357 def Start(self):
358 """Starts the download."""
359 fetchargs = (self.context,
360 self.rev,
361 self.zipfile,
362 self.quit_event,
363 self.progress_event)
364 self.thread = threading.Thread(target=FetchRevision,
365 name=self.name,
366 args=fetchargs)
367 self.thread.start()
368
369 def Stop(self):
370 """Stops the download which must have been started previously."""
371 self.quit_event.set()
372 self.thread.join()
373 os.unlink(self.zipfile)
374
375 def WaitFor(self):
376 """Prints a message and waits for the download to complete. The download
377 must have been started previously."""
378 print "Downloading revision %s..." % str(self.rev)
379 self.progress_event.set() # Display progress of download.
380 self.thread.join()
381
382
[email protected]afe30662011-07-30 01:05:52383def Bisect(platform,
[email protected]d0149c5c2012-05-29 21:12:11384 official_builds,
[email protected]afe30662011-07-30 01:05:52385 good_rev=0,
386 bad_rev=0,
[email protected]5e93cf162012-01-28 02:16:56387 num_runs=1,
[email protected]60ac66e32011-07-18 16:08:25388 try_args=(),
[email protected]afe30662011-07-30 01:05:52389 profile=None,
[email protected]53bb6342012-06-01 04:11:00390 evaluate=AskIsGoodBuild):
[email protected]afe30662011-07-30 01:05:52391 """Given known good and known bad revisions, run a binary search on all
392 archived revisions to determine the last known good revision.
[email protected]60ac66e32011-07-18 16:08:25393
[email protected]afe30662011-07-30 01:05:52394 @param platform Which build to download/run ('mac', 'win', 'linux64', etc.).
[email protected]d0149c5c2012-05-29 21:12:11395 @param official_builds Specify build type (Chromium or Official build).
[email protected]eadd95d2012-11-02 22:42:09396 @param good_rev Number/tag of the known good revision.
397 @param bad_rev Number/tag of the known bad revision.
[email protected]5e93cf162012-01-28 02:16:56398 @param num_runs Number of times to run each build for asking good/bad.
[email protected]afe30662011-07-30 01:05:52399 @param try_args A tuple of arguments to pass to the test application.
400 @param profile The name of the user profile to run with.
[email protected]53bb6342012-06-01 04:11:00401 @param evaluate A function which returns 'g' if the argument build is good,
402 'b' if it's bad or 'u' if unknown.
[email protected]afe30662011-07-30 01:05:52403
404 Threading is used to fetch Chromium revisions in the background, speeding up
405 the user's experience. For example, suppose the bounds of the search are
406 good_rev=0, bad_rev=100. The first revision to be checked is 50. Depending on
407 whether revision 50 is good or bad, the next revision to check will be either
408 25 or 75. So, while revision 50 is being checked, the script will download
409 revisions 25 and 75 in the background. Once the good/bad verdict on rev 50 is
410 known:
411
412 - If rev 50 is good, the download of rev 25 is cancelled, and the next test
413 is run on rev 75.
414
415 - If rev 50 is bad, the download of rev 75 is cancelled, and the next test
416 is run on rev 25.
[email protected]60ac66e32011-07-18 16:08:25417 """
418
[email protected]afe30662011-07-30 01:05:52419 if not profile:
420 profile = 'profile'
421
[email protected]d0149c5c2012-05-29 21:12:11422 context = PathContext(platform, good_rev, bad_rev, official_builds)
[email protected]afe30662011-07-30 01:05:52423 cwd = os.getcwd()
424
[email protected]d0149c5c2012-05-29 21:12:11425
[email protected]afe30662011-07-30 01:05:52426
[email protected]468a9772011-08-09 18:42:00427 print "Downloading list of known revisions..."
[email protected]d0149c5c2012-05-29 21:12:11428 _GetDownloadPath = lambda rev: os.path.join(cwd,
429 '%s-%s' % (str(rev), context.archive_name))
430 if official_builds:
431 revlist = context.GetOfficialBuildsList()
432 else:
433 revlist = context.GetRevList()
[email protected]afe30662011-07-30 01:05:52434
435 # Get a list of revisions to bisect across.
436 if len(revlist) < 2: # Don't have enough builds to bisect.
437 msg = 'We don\'t have enough builds to bisect. revlist: %s' % revlist
438 raise RuntimeError(msg)
439
440 # Figure out our bookends and first pivot point; fetch the pivot revision.
[email protected]eadd95d2012-11-02 22:42:09441 minrev = 0
442 maxrev = len(revlist) - 1
443 pivot = maxrev / 2
[email protected]afe30662011-07-30 01:05:52444 rev = revlist[pivot]
445 zipfile = _GetDownloadPath(rev)
[email protected]eadd95d2012-11-02 22:42:09446 fetch = DownloadJob(context, 'initial_fetch', rev, zipfile)
447 fetch.Start()
448 fetch.WaitFor()
[email protected]60ac66e32011-07-18 16:08:25449
450 # Binary search time!
[email protected]eadd95d2012-11-02 22:42:09451 while fetch and fetch.zipfile and maxrev - minrev > 1:
452 if bad_rev < good_rev:
453 min_str, max_str = "bad", "good"
454 else:
455 min_str, max_str = "good", "bad"
456 print 'Bisecting range [%s (%s), %s (%s)].' % (revlist[minrev], min_str, \
457 revlist[maxrev], max_str)
458
[email protected]afe30662011-07-30 01:05:52459 # Pre-fetch next two possible pivots
460 # - down_pivot is the next revision to check if the current revision turns
461 # out to be bad.
462 # - up_pivot is the next revision to check if the current revision turns
463 # out to be good.
[email protected]eadd95d2012-11-02 22:42:09464 down_pivot = int((pivot - minrev) / 2) + minrev
[email protected]53bb6342012-06-01 04:11:00465 down_fetch = None
[email protected]eadd95d2012-11-02 22:42:09466 if down_pivot != pivot and down_pivot != minrev:
[email protected]afe30662011-07-30 01:05:52467 down_rev = revlist[down_pivot]
[email protected]53bb6342012-06-01 04:11:00468 down_fetch = DownloadJob(context, 'down_fetch', down_rev,
469 _GetDownloadPath(down_rev))
470 down_fetch.Start()
[email protected]60ac66e32011-07-18 16:08:25471
[email protected]eadd95d2012-11-02 22:42:09472 up_pivot = int((maxrev - pivot) / 2) + pivot
[email protected]53bb6342012-06-01 04:11:00473 up_fetch = None
[email protected]eadd95d2012-11-02 22:42:09474 if up_pivot != pivot and up_pivot != maxrev:
[email protected]afe30662011-07-30 01:05:52475 up_rev = revlist[up_pivot]
[email protected]53bb6342012-06-01 04:11:00476 up_fetch = DownloadJob(context, 'up_fetch', up_rev,
477 _GetDownloadPath(up_rev))
478 up_fetch.Start()
[email protected]60ac66e32011-07-18 16:08:25479
[email protected]afe30662011-07-30 01:05:52480 # Run test on the pivot revision.
[email protected]e29c08c2012-09-17 20:50:50481 status = None
482 stdout = None
483 stderr = None
484 try:
485 (status, stdout, stderr) = RunRevision(context,
486 rev,
[email protected]eadd95d2012-11-02 22:42:09487 fetch.zipfile,
[email protected]e29c08c2012-09-17 20:50:50488 profile,
489 num_runs,
490 try_args)
491 except Exception, e:
492 print >>sys.stderr, e
[email protected]eadd95d2012-11-02 22:42:09493 fetch.Stop()
494 fetch = None
[email protected]60ac66e32011-07-18 16:08:25495
[email protected]53bb6342012-06-01 04:11:00496 # Call the evaluate function to see if the current revision is good or bad.
[email protected]afe30662011-07-30 01:05:52497 # On that basis, kill one of the background downloads and complete the
498 # other, as described in the comments above.
499 try:
[email protected]53bb6342012-06-01 04:11:00500 answer = evaluate(rev, official_builds, status, stdout, stderr)
[email protected]eadd95d2012-11-02 22:42:09501 if answer == 'g' and good_rev < bad_rev or \
502 answer == 'b' and bad_rev < good_rev:
503 minrev = pivot
[email protected]53bb6342012-06-01 04:11:00504 if down_fetch:
505 down_fetch.Stop() # Kill the download of the older revision.
506 if up_fetch:
507 up_fetch.WaitFor()
[email protected]afe30662011-07-30 01:05:52508 pivot = up_pivot
[email protected]eadd95d2012-11-02 22:42:09509 fetch = up_fetch
510 elif answer == 'b' and good_rev < bad_rev or \
511 answer == 'g' and bad_rev < good_rev:
512 maxrev = pivot
[email protected]53bb6342012-06-01 04:11:00513 if up_fetch:
514 up_fetch.Stop() # Kill the download of the newer revision.
515 if down_fetch:
516 down_fetch.WaitFor()
[email protected]afe30662011-07-30 01:05:52517 pivot = down_pivot
[email protected]eadd95d2012-11-02 22:42:09518 fetch = down_fetch
[email protected]53bb6342012-06-01 04:11:00519 elif answer == 'u':
520 # Nuke the revision from the revlist and choose a new pivot.
521 revlist.pop(pivot)
[email protected]eadd95d2012-11-02 22:42:09522 maxrev -= 1 # Assumes maxrev >= pivot.
[email protected]53bb6342012-06-01 04:11:00523
[email protected]eadd95d2012-11-02 22:42:09524 if maxrev - minrev > 1:
[email protected]53bb6342012-06-01 04:11:00525 # Alternate between using down_pivot or up_pivot for the new pivot
526 # point, without affecting the range. Do this instead of setting the
527 # pivot to the midpoint of the new range because adjacent revisions
528 # are likely affected by the same issue that caused the (u)nknown
529 # response.
530 if up_fetch and down_fetch:
531 fetch = [up_fetch, down_fetch][len(revlist) % 2]
532 elif up_fetch:
533 fetch = up_fetch
534 else:
535 fetch = down_fetch
536 fetch.WaitFor()
537 if fetch == up_fetch:
538 pivot = up_pivot - 1 # Subtracts 1 because revlist was resized.
539 else:
540 pivot = down_pivot
541 zipfile = fetch.zipfile
542
543 if down_fetch and fetch != down_fetch:
544 down_fetch.Stop()
545 if up_fetch and fetch != up_fetch:
546 up_fetch.Stop()
547 else:
548 assert False, "Unexpected return value from evaluate(): " + answer
[email protected]afe30662011-07-30 01:05:52549 except SystemExit:
[email protected]468a9772011-08-09 18:42:00550 print "Cleaning up..."
[email protected]5e93cf162012-01-28 02:16:56551 for f in [_GetDownloadPath(revlist[down_pivot]),
552 _GetDownloadPath(revlist[up_pivot])]:
[email protected]afe30662011-07-30 01:05:52553 try:
554 os.unlink(f)
555 except OSError:
556 pass
557 sys.exit(0)
558
559 rev = revlist[pivot]
560
[email protected]eadd95d2012-11-02 22:42:09561 return (revlist[minrev], revlist[maxrev])
[email protected]60ac66e32011-07-18 16:08:25562
563
[email protected]b2fe7f22011-10-25 22:58:31564def GetWebKitRevisionForChromiumRevision(rev):
565 """Returns the webkit revision that was in chromium's DEPS file at
566 chromium revision |rev|."""
567 # . doesn't match newlines without re.DOTALL, so this is safe.
568 webkit_re = re.compile(r'webkit_revision.:\D*(\d+)')
569 url = urllib.urlopen(DEPS_FILE % rev)
570 m = webkit_re.search(url.read())
571 url.close()
572 if m:
573 return int(m.group(1))
574 else:
575 raise Exception('Could not get webkit revision for cr rev %d' % rev)
576
577
[email protected]801fb652012-07-20 20:13:50578def GetChromiumRevision(url):
579 """Returns the chromium revision read from given URL."""
580 try:
581 # Location of the latest build revision number
582 return int(urllib.urlopen(url).read())
583 except Exception, e:
584 print('Could not determine latest revision. This could be bad...')
585 return 999999999
586
587
[email protected]67e0bc62009-09-03 22:06:09588def main():
[email protected]2c1d2732009-10-29 19:52:17589 usage = ('%prog [options] [-- chromium-options]\n'
[email protected]887c9182013-02-12 20:30:31590 'Perform binary search on the snapshot builds to find a minimal\n'
591 'range of revisions where a behavior change happened. The\n'
592 'behaviors are described as "good" and "bad".\n'
593 'It is NOT assumed that the behavior of the later revision is\n'
[email protected]09c58da2013-01-07 21:30:17594 'the bad one.\n'
[email protected]178aab72010-10-08 17:21:38595 '\n'
[email protected]887c9182013-02-12 20:30:31596 'Revision numbers should use\n'
597 ' Official versions (e.g. 1.0.1000.0) for official builds. (-o)\n'
598 ' SVN revisions (e.g. 123456) for chromium builds, from trunk.\n'
599 ' Use base_trunk_revision from http://omahaproxy.appspot.com/\n'
600 ' for earlier revs.\n'
601 ' Chrome\'s about: build number and omahaproxy branch_revision\n'
602 ' are incorrect, they are from branches.\n'
603 '\n'
[email protected]178aab72010-10-08 17:21:38604 'Tip: add "-- --no-first-run" to bypass the first run prompts.')
[email protected]7ad66a72009-09-04 17:52:33605 parser = optparse.OptionParser(usage=usage)
[email protected]1a45d222009-09-19 01:58:57606 # Strangely, the default help output doesn't include the choice list.
[email protected]20105cf2011-05-10 18:16:45607 choices = ['mac', 'win', 'linux', 'linux64']
[email protected]4082b182011-05-02 20:30:17608 # linux-chromiumos lacks a continuous archive http://crbug.com/78158
[email protected]7ad66a72009-09-04 17:52:33609 parser.add_option('-a', '--archive',
[email protected]1a45d222009-09-19 01:58:57610 choices = choices,
611 help = 'The buildbot archive to bisect [%s].' %
612 '|'.join(choices))
[email protected]d0149c5c2012-05-29 21:12:11613 parser.add_option('-o', action="store_true", dest='official_builds',
614 help = 'Bisect across official ' +
615 'Chrome builds (internal only) instead of ' +
616 'Chromium archives.')
617 parser.add_option('-b', '--bad', type = 'str',
[email protected]09c58da2013-01-07 21:30:17618 help = 'A bad revision to start bisection. ' +
619 'May be earlier or later than the good revision. ' +
620 'Default is HEAD.')
[email protected]d0149c5c2012-05-29 21:12:11621 parser.add_option('-g', '--good', type = 'str',
[email protected]09c58da2013-01-07 21:30:17622 help = 'A good revision to start bisection. ' +
623 'May be earlier or later than the bad revision. ' +
[email protected]801fb652012-07-20 20:13:50624 'Default is 0.')
[email protected]d4bf3582009-09-20 00:56:38625 parser.add_option('-p', '--profile', '--user-data-dir', type = 'str',
626 help = 'Profile to use; this will not reset every run. ' +
[email protected]60ac66e32011-07-18 16:08:25627 'Defaults to a clean profile.', default = 'profile')
[email protected]5e93cf162012-01-28 02:16:56628 parser.add_option('-t', '--times', type = 'int',
629 help = 'Number of times to run each build before asking ' +
630 'if it\'s good or bad. Temporary profiles are reused.',
631 default = 1)
[email protected]7ad66a72009-09-04 17:52:33632 (opts, args) = parser.parse_args()
633
634 if opts.archive is None:
[email protected]178aab72010-10-08 17:21:38635 print 'Error: missing required parameter: --archive'
636 print
[email protected]7ad66a72009-09-04 17:52:33637 parser.print_help()
638 return 1
639
[email protected]183706d92011-06-10 13:06:22640 # Create the context. Initialize 0 for the revisions as they are set below.
[email protected]d0149c5c2012-05-29 21:12:11641 context = PathContext(opts.archive, 0, 0, opts.official_builds)
[email protected]67e0bc62009-09-03 22:06:09642 # Pick a starting point, try to get HEAD for this.
[email protected]7ad66a72009-09-04 17:52:33643 if opts.bad:
644 bad_rev = opts.bad
645 else:
[email protected]801fb652012-07-20 20:13:50646 bad_rev = '999.0.0.0'
647 if not opts.official_builds:
648 bad_rev = GetChromiumRevision(context.GetLastChangeURL())
[email protected]67e0bc62009-09-03 22:06:09649
650 # Find out when we were good.
[email protected]7ad66a72009-09-04 17:52:33651 if opts.good:
652 good_rev = opts.good
653 else:
[email protected]801fb652012-07-20 20:13:50654 good_rev = '0.0.0.0' if opts.official_builds else 0
655
656 if opts.official_builds:
657 good_rev = LooseVersion(good_rev)
658 bad_rev = LooseVersion(bad_rev)
659 else:
660 good_rev = int(good_rev)
661 bad_rev = int(bad_rev)
662
[email protected]5e93cf162012-01-28 02:16:56663 if opts.times < 1:
664 print('Number of times to run (%d) must be greater than or equal to 1.' %
665 opts.times)
666 parser.print_help()
667 return 1
668
[email protected]eadd95d2012-11-02 22:42:09669 (min_chromium_rev, max_chromium_rev) = Bisect(
[email protected]d0149c5c2012-05-29 21:12:11670 opts.archive, opts.official_builds, good_rev, bad_rev, opts.times, args,
671 opts.profile)
[email protected]67e0bc62009-09-03 22:06:09672
[email protected]b2fe7f22011-10-25 22:58:31673 # Get corresponding webkit revisions.
674 try:
[email protected]eadd95d2012-11-02 22:42:09675 min_webkit_rev = GetWebKitRevisionForChromiumRevision(min_chromium_rev)
676 max_webkit_rev = GetWebKitRevisionForChromiumRevision(max_chromium_rev)
[email protected]b2fe7f22011-10-25 22:58:31677 except Exception, e:
678 # Silently ignore the failure.
[email protected]eadd95d2012-11-02 22:42:09679 min_webkit_rev, max_webkit_rev = 0, 0
[email protected]b2fe7f22011-10-25 22:58:31680
[email protected]67e0bc62009-09-03 22:06:09681 # We're done. Let the user know the results in an official manner.
[email protected]eadd95d2012-11-02 22:42:09682 if good_rev > bad_rev:
683 print DONE_MESSAGE_GOOD_MAX % (str(min_chromium_rev), str(max_chromium_rev))
684 else:
685 print DONE_MESSAGE_GOOD_MIN % (str(min_chromium_rev), str(max_chromium_rev))
686
687 if min_webkit_rev != max_webkit_rev:
[email protected]b2fe7f22011-10-25 22:58:31688 print 'WEBKIT CHANGELOG URL:'
[email protected]eadd95d2012-11-02 22:42:09689 print ' ' + WEBKIT_CHANGELOG_URL % (max_webkit_rev, min_webkit_rev)
[email protected]d0149c5c2012-05-29 21:12:11690 print 'CHANGELOG URL:'
691 if opts.official_builds:
[email protected]eadd95d2012-11-02 22:42:09692 print OFFICIAL_CHANGELOG_URL % (min_chromium_rev, max_chromium_rev)
[email protected]d0149c5c2012-05-29 21:12:11693 else:
[email protected]eadd95d2012-11-02 22:42:09694 print ' ' + CHANGELOG_URL % (min_chromium_rev, max_chromium_rev)
[email protected]cb155a82011-11-29 17:25:34695
[email protected]67e0bc62009-09-03 22:06:09696if __name__ == '__main__':
[email protected]7ad66a72009-09-04 17:52:33697 sys.exit(main())