blob: 307a7b99f3a1ecf82e3ad9893d66a656617beb3c [file] [log] [blame]
[email protected]cb155a82011-11-29 17:25:341#!/usr/bin/env python
[email protected]5e93cf162012-01-28 02:16:562# Copyright (c) 2012 The Chromium Authors. All rights reserved.
[email protected]67e0bc62009-09-03 22:06:093# Use of this source code is governed by a BSD-style license that can be
4# found in the LICENSE file.
5
6"""Snapshot Build Bisect Tool
7
[email protected]7ad66a72009-09-04 17:52:338This script bisects a snapshot archive using binary search. It starts at
[email protected]67e0bc62009-09-03 22:06:099a bad revision (it will try to guess HEAD) and asks for a last known-good
10revision. It will then binary search across this revision range by downloading,
11unzipping, and opening Chromium for you. After testing the specific revision,
12it will ask you whether it is good or bad before continuing the search.
[email protected]67e0bc62009-09-03 22:06:0913"""
14
[email protected]183706d92011-06-10 13:06:2215# The root URL for storage.
[email protected]60ac66e32011-07-18 16:08:2516BASE_URL = 'http://commondatastorage.googleapis.com/chromium-browser-snapshots'
[email protected]67e0bc62009-09-03 22:06:0917
[email protected]d0149c5c2012-05-29 21:12:1118# The root URL for official builds.
[email protected]b2905832012-07-19 21:28:4319OFFICIAL_BASE_URL = 'http://master.chrome.corp.google.com/official_builds'
[email protected]d0149c5c2012-05-29 21:12:1120
[email protected]183706d92011-06-10 13:06:2221# Changelogs URL.
[email protected]07247462010-12-24 07:45:5622CHANGELOG_URL = 'http://build.chromium.org/f/chromium/' \
[email protected]3d6d9f82012-04-18 05:55:1323 'perf/dashboard/ui/changelog.html?url=/trunk/src&range=%d%%3A%d'
[email protected]f6a71a72009-10-08 19:55:3824
[email protected]d0149c5c2012-05-29 21:12:1125# Official Changelogs URL.
26OFFICIAL_CHANGELOG_URL = 'http://omahaproxy.appspot.com/'\
27 'changelog?old_version=%s&new_version=%s'
28
[email protected]b2fe7f22011-10-25 22:58:3129# DEPS file URL.
30DEPS_FILE= 'http://src.chromium.org/viewvc/chrome/trunk/src/DEPS?revision=%d'
[email protected]b2fe7f22011-10-25 22:58:3131# WebKit Changelogs URL.
32WEBKIT_CHANGELOG_URL = 'http://trac.webkit.org/log/' \
[email protected]9fc175ca2012-05-24 01:15:4233 'trunk/?rev=%d&stop_rev=%d&verbose=on&limit=10000'
[email protected]b2fe7f22011-10-25 22:58:3134
[email protected]05ff3fd2012-04-17 23:24:0635DONE_MESSAGE = 'You are probably looking for a change made after ' \
[email protected]d0149c5c2012-05-29 21:12:1136 '%s (known good), but no later than %s (first known bad).'
[email protected]05ff3fd2012-04-17 23:24:0637
[email protected]67e0bc62009-09-03 22:06:0938###############################################################################
39
40import math
[email protected]7ad66a72009-09-04 17:52:3341import optparse
[email protected]67e0bc62009-09-03 22:06:0942import os
[email protected]d4bf3582009-09-20 00:56:3843import pipes
[email protected]67e0bc62009-09-03 22:06:0944import re
45import shutil
[email protected]afe30662011-07-30 01:05:5246import subprocess
[email protected]67e0bc62009-09-03 22:06:0947import sys
[email protected]7ad66a72009-09-04 17:52:3348import tempfile
[email protected]afe30662011-07-30 01:05:5249import threading
[email protected]67e0bc62009-09-03 22:06:0950import urllib
[email protected]d0149c5c2012-05-29 21:12:1151from distutils.version import LooseVersion
[email protected]183706d92011-06-10 13:06:2252from xml.etree import ElementTree
[email protected]bd8dcb92010-03-31 01:05:2453import zipfile
54
[email protected]cb155a82011-11-29 17:25:3455
[email protected]183706d92011-06-10 13:06:2256class PathContext(object):
57 """A PathContext is used to carry the information used to construct URLs and
58 paths when dealing with the storage server and archives."""
[email protected]d0149c5c2012-05-29 21:12:1159 def __init__(self, platform, good_revision, bad_revision, is_official):
[email protected]183706d92011-06-10 13:06:2260 super(PathContext, self).__init__()
61 # Store off the input parameters.
62 self.platform = platform # What's passed in to the '-a/--archive' option.
63 self.good_revision = good_revision
64 self.bad_revision = bad_revision
[email protected]d0149c5c2012-05-29 21:12:1165 self.is_official = is_official
[email protected]183706d92011-06-10 13:06:2266
67 # The name of the ZIP file in a revision directory on the server.
68 self.archive_name = None
69
70 # Set some internal members:
71 # _listing_platform_dir = Directory that holds revisions. Ends with a '/'.
72 # _archive_extract_dir = Uncompressed directory in the archive_name file.
73 # _binary_name = The name of the executable to run.
[email protected]1960edd2011-07-01 16:53:5274 if self.platform == 'linux' or self.platform == 'linux64':
[email protected]183706d92011-06-10 13:06:2275 self._binary_name = 'chrome'
[email protected]183706d92011-06-10 13:06:2276 elif self.platform == 'mac':
[email protected]183706d92011-06-10 13:06:2277 self.archive_name = 'chrome-mac.zip'
78 self._archive_extract_dir = 'chrome-mac'
[email protected]183706d92011-06-10 13:06:2279 elif self.platform == 'win':
[email protected]183706d92011-06-10 13:06:2280 self.archive_name = 'chrome-win32.zip'
81 self._archive_extract_dir = 'chrome-win32'
82 self._binary_name = 'chrome.exe'
83 else:
[email protected]afe30662011-07-30 01:05:5284 raise Exception('Invalid platform: %s' % self.platform)
[email protected]183706d92011-06-10 13:06:2285
[email protected]d0149c5c2012-05-29 21:12:1186 if is_official:
87 if self.platform == 'linux':
88 self._listing_platform_dir = 'lucid32bit/'
89 self.archive_name = 'chrome-lucid32bit.zip'
90 self._archive_extract_dir = 'chrome-lucid32bit'
91 elif self.platform == 'linux64':
92 self._listing_platform_dir = 'lucid64bit/'
93 self.archive_name = 'chrome-lucid64bit.zip'
94 self._archive_extract_dir = 'chrome-lucid64bit'
95 elif self.platform == 'mac':
96 self._listing_platform_dir = 'mac/'
97 self._binary_name = 'Google Chrome.app/Contents/MacOS/Google Chrome'
98 elif self.platform == 'win':
99 self._listing_platform_dir = 'win/'
100 else:
101 if self.platform == 'linux' or self.platform == 'linux64':
102 self.archive_name = 'chrome-linux.zip'
103 self._archive_extract_dir = 'chrome-linux'
104 if self.platform == 'linux':
105 self._listing_platform_dir = 'Linux/'
106 elif self.platform == 'linux64':
107 self._listing_platform_dir = 'Linux_x64/'
108 elif self.platform == 'mac':
109 self._listing_platform_dir = 'Mac/'
110 self._binary_name = 'Chromium.app/Contents/MacOS/Chromium'
111 elif self.platform == 'win':
112 self._listing_platform_dir = 'Win/'
113
[email protected]183706d92011-06-10 13:06:22114 def GetListingURL(self, marker=None):
115 """Returns the URL for a directory listing, with an optional marker."""
116 marker_param = ''
117 if marker:
118 marker_param = '&marker=' + str(marker)
119 return BASE_URL + '/?delimiter=/&prefix=' + self._listing_platform_dir + \
120 marker_param
121
122 def GetDownloadURL(self, revision):
123 """Gets the download URL for a build archive of a specific revision."""
[email protected]d0149c5c2012-05-29 21:12:11124 if self.is_official:
125 return "%s/%s/%s%s" % (
126 OFFICIAL_BASE_URL, revision, self._listing_platform_dir,
127 self.archive_name)
128 else:
129 return "%s/%s%s/%s" % (
130 BASE_URL, self._listing_platform_dir, revision, self.archive_name)
[email protected]183706d92011-06-10 13:06:22131
132 def GetLastChangeURL(self):
133 """Returns a URL to the LAST_CHANGE file."""
134 return BASE_URL + '/' + self._listing_platform_dir + 'LAST_CHANGE'
135
136 def GetLaunchPath(self):
137 """Returns a relative path (presumably from the archive extraction location)
138 that is used to run the executable."""
139 return os.path.join(self._archive_extract_dir, self._binary_name)
140
[email protected]afe30662011-07-30 01:05:52141 def ParseDirectoryIndex(self):
142 """Parses the Google Storage directory listing into a list of revision
143 numbers. The range starts with self.good_revision and goes until
144 self.bad_revision."""
145
146 def _FetchAndParse(url):
147 """Fetches a URL and returns a 2-Tuple of ([revisions], next-marker). If
148 next-marker is not None, then the listing is a partial listing and another
149 fetch should be performed with next-marker being the marker= GET
150 parameter."""
151 handle = urllib.urlopen(url)
152 document = ElementTree.parse(handle)
153
154 # All nodes in the tree are namespaced. Get the root's tag name to extract
155 # the namespace. Etree does namespaces as |{namespace}tag|.
156 root_tag = document.getroot().tag
157 end_ns_pos = root_tag.find('}')
158 if end_ns_pos == -1:
159 raise Exception("Could not locate end namespace for directory index")
160 namespace = root_tag[:end_ns_pos + 1]
161
162 # Find the prefix (_listing_platform_dir) and whether or not the list is
163 # truncated.
164 prefix_len = len(document.find(namespace + 'Prefix').text)
165 next_marker = None
166 is_truncated = document.find(namespace + 'IsTruncated')
167 if is_truncated is not None and is_truncated.text.lower() == 'true':
168 next_marker = document.find(namespace + 'NextMarker').text
169
170 # Get a list of all the revisions.
171 all_prefixes = document.findall(namespace + 'CommonPrefixes/' +
172 namespace + 'Prefix')
173 # The <Prefix> nodes have content of the form of
174 # |_listing_platform_dir/revision/|. Strip off the platform dir and the
175 # trailing slash to just have a number.
176 revisions = []
177 for prefix in all_prefixes:
178 revnum = prefix.text[prefix_len:-1]
179 try:
180 revnum = int(revnum)
181 revisions.append(revnum)
182 except ValueError:
183 pass
184 return (revisions, next_marker)
[email protected]d0149c5c2012-05-29 21:12:11185
[email protected]afe30662011-07-30 01:05:52186 # Fetch the first list of revisions.
187 (revisions, next_marker) = _FetchAndParse(self.GetListingURL())
188
189 # If the result list was truncated, refetch with the next marker. Do this
190 # until an entire directory listing is done.
191 while next_marker:
192 next_url = self.GetListingURL(next_marker)
193 (new_revisions, next_marker) = _FetchAndParse(next_url)
194 revisions.extend(new_revisions)
[email protected]afe30662011-07-30 01:05:52195 return revisions
196
197 def GetRevList(self):
198 """Gets the list of revision numbers between self.good_revision and
199 self.bad_revision."""
200 # Download the revlist and filter for just the range between good and bad.
201 minrev = self.good_revision
202 maxrev = self.bad_revision
203 revlist = map(int, self.ParseDirectoryIndex())
[email protected]d0149c5c2012-05-29 21:12:11204 revlist = [x for x in revlist if x >= int(minrev) and x <= int(maxrev)]
[email protected]afe30662011-07-30 01:05:52205 revlist.sort()
206 return revlist
207
[email protected]d0149c5c2012-05-29 21:12:11208 def GetOfficialBuildsList(self):
209 """Gets the list of official build numbers between self.good_revision and
210 self.bad_revision."""
211 # Download the revlist and filter for just the range between good and bad.
212 minrev = self.good_revision
213 maxrev = self.bad_revision
214 handle = urllib.urlopen(OFFICIAL_BASE_URL)
215 dirindex = handle.read()
216 handle.close()
217 build_numbers = re.findall(r'<a href="([0-9][0-9].*)/">', dirindex)
218 final_list = []
[email protected]c2540d502012-06-06 07:02:08219 start_index = 0
220 end_index = 0
[email protected]d0149c5c2012-05-29 21:12:11221 i = 0
222
223 parsed_build_numbers = [LooseVersion(x) for x in build_numbers]
224 for build_number in sorted(parsed_build_numbers):
225 path = OFFICIAL_BASE_URL + '/' + str(build_number) + '/' + \
226 self._listing_platform_dir + self.archive_name
227 i = i + 1
228 try:
229 connection = urllib.urlopen(path)
230 connection.close()
231 final_list.append(str(build_number))
232 if str(build_number) == minrev:
233 start_index = i
234 if str(build_number) == maxrev:
235 end_index = i
236 except urllib.HTTPError, e:
237 pass
238 return final_list[start_index:end_index]
[email protected]bd8dcb92010-03-31 01:05:24239
240def UnzipFilenameToDir(filename, dir):
241 """Unzip |filename| to directory |dir|."""
[email protected]afe30662011-07-30 01:05:52242 cwd = os.getcwd()
243 if not os.path.isabs(filename):
244 filename = os.path.join(cwd, filename)
[email protected]bd8dcb92010-03-31 01:05:24245 zf = zipfile.ZipFile(filename)
246 # Make base.
[email protected]bd8dcb92010-03-31 01:05:24247 try:
248 if not os.path.isdir(dir):
249 os.mkdir(dir)
250 os.chdir(dir)
251 # Extract files.
252 for info in zf.infolist():
253 name = info.filename
254 if name.endswith('/'): # dir
255 if not os.path.isdir(name):
256 os.makedirs(name)
257 else: # file
258 dir = os.path.dirname(name)
259 if not os.path.isdir(dir):
260 os.makedirs(dir)
261 out = open(name, 'wb')
262 out.write(zf.read(name))
263 out.close()
264 # Set permissions. Permission info in external_attr is shifted 16 bits.
265 os.chmod(name, info.external_attr >> 16L)
[email protected]afe30662011-07-30 01:05:52266 os.chdir(cwd)
[email protected]bd8dcb92010-03-31 01:05:24267 except Exception, e:
268 print >>sys.stderr, e
269 sys.exit(1)
270
[email protected]67e0bc62009-09-03 22:06:09271
[email protected]468a9772011-08-09 18:42:00272def FetchRevision(context, rev, filename, quit_event=None, progress_event=None):
[email protected]afe30662011-07-30 01:05:52273 """Downloads and unzips revision |rev|.
274 @param context A PathContext instance.
275 @param rev The Chromium revision number/tag to download.
276 @param filename The destination for the downloaded file.
277 @param quit_event A threading.Event which will be set by the master thread to
278 indicate that the download should be aborted.
[email protected]468a9772011-08-09 18:42:00279 @param progress_event A threading.Event which will be set by the master thread
280 to indicate that the progress of the download should be
281 displayed.
[email protected]afe30662011-07-30 01:05:52282 """
283 def ReportHook(blocknum, blocksize, totalsize):
[email protected]946be752011-10-25 23:34:21284 if quit_event and quit_event.isSet():
[email protected]d0149c5c2012-05-29 21:12:11285 raise RuntimeError("Aborting download of revision %s" % str(rev))
[email protected]946be752011-10-25 23:34:21286 if progress_event and progress_event.isSet():
[email protected]468a9772011-08-09 18:42:00287 size = blocknum * blocksize
288 if totalsize == -1: # Total size not known.
289 progress = "Received %d bytes" % size
290 else:
291 size = min(totalsize, size)
292 progress = "Received %d of %d bytes, %.2f%%" % (
293 size, totalsize, 100.0 * size / totalsize)
294 # Send a \r to let all progress messages use just one line of output.
295 sys.stdout.write("\r" + progress)
296 sys.stdout.flush()
[email protected]7ad66a72009-09-04 17:52:33297
[email protected]afe30662011-07-30 01:05:52298 download_url = context.GetDownloadURL(rev)
299 try:
300 urllib.urlretrieve(download_url, filename, ReportHook)
[email protected]946be752011-10-25 23:34:21301 if progress_event and progress_event.isSet():
[email protected]ecaba01e62011-10-26 05:33:28302 print
[email protected]afe30662011-07-30 01:05:52303 except RuntimeError, e:
304 pass
[email protected]7ad66a72009-09-04 17:52:33305
[email protected]7ad66a72009-09-04 17:52:33306
[email protected]5e93cf162012-01-28 02:16:56307def RunRevision(context, revision, zipfile, profile, num_runs, args):
[email protected]afe30662011-07-30 01:05:52308 """Given a zipped revision, unzip it and run the test."""
[email protected]d0149c5c2012-05-29 21:12:11309 print "Trying revision %s..." % str(revision)
[email protected]3ff00b72011-07-20 21:34:47310
[email protected]afe30662011-07-30 01:05:52311 # Create a temp directory and unzip the revision into it.
[email protected]7ad66a72009-09-04 17:52:33312 cwd = os.getcwd()
313 tempdir = tempfile.mkdtemp(prefix='bisect_tmp')
[email protected]afe30662011-07-30 01:05:52314 UnzipFilenameToDir(zipfile, tempdir)
[email protected]7ad66a72009-09-04 17:52:33315 os.chdir(tempdir)
[email protected]67e0bc62009-09-03 22:06:09316
[email protected]5e93cf162012-01-28 02:16:56317 # Run the build as many times as specified.
[email protected]afe30662011-07-30 01:05:52318 testargs = [context.GetLaunchPath(), '--user-data-dir=%s' % profile] + args
[email protected]d0149c5c2012-05-29 21:12:11319 # The sandbox must be run as root on Official Chrome, so bypass it.
320 if context.is_official and (context.platform == 'linux' or
321 context.platform == 'linux64'):
322 testargs.append('--no-sandbox')
323
[email protected]5e93cf162012-01-28 02:16:56324 for i in range(0, num_runs):
325 subproc = subprocess.Popen(testargs,
326 bufsize=-1,
327 stdout=subprocess.PIPE,
328 stderr=subprocess.PIPE)
329 (stdout, stderr) = subproc.communicate()
[email protected]7ad66a72009-09-04 17:52:33330
331 os.chdir(cwd)
[email protected]7ad66a72009-09-04 17:52:33332 try:
333 shutil.rmtree(tempdir, True)
334 except Exception, e:
335 pass
[email protected]67e0bc62009-09-03 22:06:09336
[email protected]afe30662011-07-30 01:05:52337 return (subproc.returncode, stdout, stderr)
[email protected]79f14742010-03-10 01:01:57338
[email protected]cb155a82011-11-29 17:25:34339
[email protected]d0149c5c2012-05-29 21:12:11340def AskIsGoodBuild(rev, official_builds, status, stdout, stderr):
[email protected]183706d92011-06-10 13:06:22341 """Ask the user whether build |rev| is good or bad."""
[email protected]79f14742010-03-10 01:01:57342 # Loop until we get a response that we can parse.
[email protected]67e0bc62009-09-03 22:06:09343 while True:
[email protected]53bb6342012-06-01 04:11:00344 response = raw_input('Revision %s is [(g)ood/(b)ad/(u)nknown/(q)uit]: ' %
345 str(rev))
346 if response and response in ('g', 'b', 'u'):
347 return response
[email protected]afe30662011-07-30 01:05:52348 if response and response == 'q':
349 raise SystemExit()
[email protected]67e0bc62009-09-03 22:06:09350
[email protected]cb155a82011-11-29 17:25:34351
[email protected]53bb6342012-06-01 04:11:00352class DownloadJob(object):
353 """DownloadJob represents a task to download a given Chromium revision."""
354 def __init__(self, context, name, rev, zipfile):
355 super(DownloadJob, self).__init__()
356 # Store off the input parameters.
357 self.context = context
358 self.name = name
359 self.rev = rev
360 self.zipfile = zipfile
361 self.quit_event = threading.Event()
362 self.progress_event = threading.Event()
363
364 def Start(self):
365 """Starts the download."""
366 fetchargs = (self.context,
367 self.rev,
368 self.zipfile,
369 self.quit_event,
370 self.progress_event)
371 self.thread = threading.Thread(target=FetchRevision,
372 name=self.name,
373 args=fetchargs)
374 self.thread.start()
375
376 def Stop(self):
377 """Stops the download which must have been started previously."""
378 self.quit_event.set()
379 self.thread.join()
380 os.unlink(self.zipfile)
381
382 def WaitFor(self):
383 """Prints a message and waits for the download to complete. The download
384 must have been started previously."""
385 print "Downloading revision %s..." % str(self.rev)
386 self.progress_event.set() # Display progress of download.
387 self.thread.join()
388
389
[email protected]afe30662011-07-30 01:05:52390def Bisect(platform,
[email protected]d0149c5c2012-05-29 21:12:11391 official_builds,
[email protected]afe30662011-07-30 01:05:52392 good_rev=0,
393 bad_rev=0,
[email protected]5e93cf162012-01-28 02:16:56394 num_runs=1,
[email protected]60ac66e32011-07-18 16:08:25395 try_args=(),
[email protected]afe30662011-07-30 01:05:52396 profile=None,
[email protected]53bb6342012-06-01 04:11:00397 evaluate=AskIsGoodBuild):
[email protected]afe30662011-07-30 01:05:52398 """Given known good and known bad revisions, run a binary search on all
399 archived revisions to determine the last known good revision.
[email protected]60ac66e32011-07-18 16:08:25400
[email protected]afe30662011-07-30 01:05:52401 @param platform Which build to download/run ('mac', 'win', 'linux64', etc.).
[email protected]d0149c5c2012-05-29 21:12:11402 @param official_builds Specify build type (Chromium or Official build).
[email protected]afe30662011-07-30 01:05:52403 @param good_rev Number/tag of the last known good revision.
404 @param bad_rev Number/tag of the first known bad revision.
[email protected]5e93cf162012-01-28 02:16:56405 @param num_runs Number of times to run each build for asking good/bad.
[email protected]afe30662011-07-30 01:05:52406 @param try_args A tuple of arguments to pass to the test application.
407 @param profile The name of the user profile to run with.
[email protected]53bb6342012-06-01 04:11:00408 @param evaluate A function which returns 'g' if the argument build is good,
409 'b' if it's bad or 'u' if unknown.
[email protected]afe30662011-07-30 01:05:52410
411 Threading is used to fetch Chromium revisions in the background, speeding up
412 the user's experience. For example, suppose the bounds of the search are
413 good_rev=0, bad_rev=100. The first revision to be checked is 50. Depending on
414 whether revision 50 is good or bad, the next revision to check will be either
415 25 or 75. So, while revision 50 is being checked, the script will download
416 revisions 25 and 75 in the background. Once the good/bad verdict on rev 50 is
417 known:
418
419 - If rev 50 is good, the download of rev 25 is cancelled, and the next test
420 is run on rev 75.
421
422 - If rev 50 is bad, the download of rev 75 is cancelled, and the next test
423 is run on rev 25.
[email protected]60ac66e32011-07-18 16:08:25424 """
425
[email protected]afe30662011-07-30 01:05:52426 if not profile:
427 profile = 'profile'
428
[email protected]d0149c5c2012-05-29 21:12:11429 context = PathContext(platform, good_rev, bad_rev, official_builds)
[email protected]afe30662011-07-30 01:05:52430 cwd = os.getcwd()
431
[email protected]d0149c5c2012-05-29 21:12:11432
[email protected]afe30662011-07-30 01:05:52433
[email protected]468a9772011-08-09 18:42:00434 print "Downloading list of known revisions..."
[email protected]d0149c5c2012-05-29 21:12:11435 _GetDownloadPath = lambda rev: os.path.join(cwd,
436 '%s-%s' % (str(rev), context.archive_name))
437 if official_builds:
438 revlist = context.GetOfficialBuildsList()
439 else:
440 revlist = context.GetRevList()
[email protected]afe30662011-07-30 01:05:52441
442 # Get a list of revisions to bisect across.
443 if len(revlist) < 2: # Don't have enough builds to bisect.
444 msg = 'We don\'t have enough builds to bisect. revlist: %s' % revlist
445 raise RuntimeError(msg)
446
447 # Figure out our bookends and first pivot point; fetch the pivot revision.
[email protected]3ff00b72011-07-20 21:34:47448 good = 0
449 bad = len(revlist) - 1
[email protected]afe30662011-07-30 01:05:52450 pivot = bad / 2
451 rev = revlist[pivot]
452 zipfile = _GetDownloadPath(rev)
[email protected]53bb6342012-06-01 04:11:00453 initial_fetch = DownloadJob(context, 'initial_fetch', rev, zipfile)
454 initial_fetch.Start()
455 initial_fetch.WaitFor()
[email protected]60ac66e32011-07-18 16:08:25456
457 # Binary search time!
[email protected]afe30662011-07-30 01:05:52458 while zipfile and bad - good > 1:
459 # Pre-fetch next two possible pivots
460 # - down_pivot is the next revision to check if the current revision turns
461 # out to be bad.
462 # - up_pivot is the next revision to check if the current revision turns
463 # out to be good.
464 down_pivot = int((pivot - good) / 2) + good
[email protected]53bb6342012-06-01 04:11:00465 down_fetch = None
[email protected]afe30662011-07-30 01:05:52466 if down_pivot != pivot and down_pivot != good:
467 down_rev = revlist[down_pivot]
[email protected]53bb6342012-06-01 04:11:00468 down_fetch = DownloadJob(context, 'down_fetch', down_rev,
469 _GetDownloadPath(down_rev))
470 down_fetch.Start()
[email protected]60ac66e32011-07-18 16:08:25471
[email protected]afe30662011-07-30 01:05:52472 up_pivot = int((bad - pivot) / 2) + pivot
[email protected]53bb6342012-06-01 04:11:00473 up_fetch = None
[email protected]afe30662011-07-30 01:05:52474 if up_pivot != pivot and up_pivot != bad:
475 up_rev = revlist[up_pivot]
[email protected]53bb6342012-06-01 04:11:00476 up_fetch = DownloadJob(context, 'up_fetch', up_rev,
477 _GetDownloadPath(up_rev))
478 up_fetch.Start()
[email protected]60ac66e32011-07-18 16:08:25479
[email protected]afe30662011-07-30 01:05:52480 # Run test on the pivot revision.
481 (status, stdout, stderr) = RunRevision(context,
482 rev,
483 zipfile,
484 profile,
[email protected]5e93cf162012-01-28 02:16:56485 num_runs,
[email protected]afe30662011-07-30 01:05:52486 try_args)
487 os.unlink(zipfile)
488 zipfile = None
[email protected]60ac66e32011-07-18 16:08:25489
[email protected]53bb6342012-06-01 04:11:00490 # Call the evaluate function to see if the current revision is good or bad.
[email protected]afe30662011-07-30 01:05:52491 # On that basis, kill one of the background downloads and complete the
492 # other, as described in the comments above.
493 try:
[email protected]53bb6342012-06-01 04:11:00494 answer = evaluate(rev, official_builds, status, stdout, stderr)
495 if answer == 'g':
[email protected]afe30662011-07-30 01:05:52496 good = pivot
[email protected]53bb6342012-06-01 04:11:00497 if down_fetch:
498 down_fetch.Stop() # Kill the download of the older revision.
499 if up_fetch:
500 up_fetch.WaitFor()
[email protected]afe30662011-07-30 01:05:52501 pivot = up_pivot
[email protected]53bb6342012-06-01 04:11:00502 zipfile = up_fetch.zipfile
503 elif answer == 'b':
[email protected]afe30662011-07-30 01:05:52504 bad = pivot
[email protected]53bb6342012-06-01 04:11:00505 if up_fetch:
506 up_fetch.Stop() # Kill the download of the newer revision.
507 if down_fetch:
508 down_fetch.WaitFor()
[email protected]afe30662011-07-30 01:05:52509 pivot = down_pivot
[email protected]53bb6342012-06-01 04:11:00510 zipfile = down_fetch.zipfile
511 elif answer == 'u':
512 # Nuke the revision from the revlist and choose a new pivot.
513 revlist.pop(pivot)
514 bad -= 1 # Assumes bad >= pivot.
515
516 fetch = None
517 if bad - good > 1:
518 # Alternate between using down_pivot or up_pivot for the new pivot
519 # point, without affecting the range. Do this instead of setting the
520 # pivot to the midpoint of the new range because adjacent revisions
521 # are likely affected by the same issue that caused the (u)nknown
522 # response.
523 if up_fetch and down_fetch:
524 fetch = [up_fetch, down_fetch][len(revlist) % 2]
525 elif up_fetch:
526 fetch = up_fetch
527 else:
528 fetch = down_fetch
529 fetch.WaitFor()
530 if fetch == up_fetch:
531 pivot = up_pivot - 1 # Subtracts 1 because revlist was resized.
532 else:
533 pivot = down_pivot
534 zipfile = fetch.zipfile
535
536 if down_fetch and fetch != down_fetch:
537 down_fetch.Stop()
538 if up_fetch and fetch != up_fetch:
539 up_fetch.Stop()
540 else:
541 assert False, "Unexpected return value from evaluate(): " + answer
[email protected]afe30662011-07-30 01:05:52542 except SystemExit:
[email protected]468a9772011-08-09 18:42:00543 print "Cleaning up..."
[email protected]5e93cf162012-01-28 02:16:56544 for f in [_GetDownloadPath(revlist[down_pivot]),
545 _GetDownloadPath(revlist[up_pivot])]:
[email protected]afe30662011-07-30 01:05:52546 try:
547 os.unlink(f)
548 except OSError:
549 pass
550 sys.exit(0)
551
552 rev = revlist[pivot]
553
554 return (revlist[good], revlist[bad])
[email protected]60ac66e32011-07-18 16:08:25555
556
[email protected]b2fe7f22011-10-25 22:58:31557def GetWebKitRevisionForChromiumRevision(rev):
558 """Returns the webkit revision that was in chromium's DEPS file at
559 chromium revision |rev|."""
560 # . doesn't match newlines without re.DOTALL, so this is safe.
561 webkit_re = re.compile(r'webkit_revision.:\D*(\d+)')
562 url = urllib.urlopen(DEPS_FILE % rev)
563 m = webkit_re.search(url.read())
564 url.close()
565 if m:
566 return int(m.group(1))
567 else:
568 raise Exception('Could not get webkit revision for cr rev %d' % rev)
569
570
[email protected]67e0bc62009-09-03 22:06:09571def main():
[email protected]2c1d2732009-10-29 19:52:17572 usage = ('%prog [options] [-- chromium-options]\n'
[email protected]178aab72010-10-08 17:21:38573 'Perform binary search on the snapshot builds.\n'
574 '\n'
575 'Tip: add "-- --no-first-run" to bypass the first run prompts.')
[email protected]7ad66a72009-09-04 17:52:33576 parser = optparse.OptionParser(usage=usage)
[email protected]1a45d222009-09-19 01:58:57577 # Strangely, the default help output doesn't include the choice list.
[email protected]20105cf2011-05-10 18:16:45578 choices = ['mac', 'win', 'linux', 'linux64']
[email protected]4082b182011-05-02 20:30:17579 # linux-chromiumos lacks a continuous archive http://crbug.com/78158
[email protected]7ad66a72009-09-04 17:52:33580 parser.add_option('-a', '--archive',
[email protected]1a45d222009-09-19 01:58:57581 choices = choices,
582 help = 'The buildbot archive to bisect [%s].' %
583 '|'.join(choices))
[email protected]d0149c5c2012-05-29 21:12:11584 parser.add_option('-o', action="store_true", dest='official_builds',
585 help = 'Bisect across official ' +
586 'Chrome builds (internal only) instead of ' +
587 'Chromium archives.')
588 parser.add_option('-b', '--bad', type = 'str',
[email protected]7ad66a72009-09-04 17:52:33589 help = 'The bad revision to bisect to.')
[email protected]d0149c5c2012-05-29 21:12:11590 parser.add_option('-g', '--good', type = 'str',
[email protected]7ad66a72009-09-04 17:52:33591 help = 'The last known good revision to bisect from.')
[email protected]d4bf3582009-09-20 00:56:38592 parser.add_option('-p', '--profile', '--user-data-dir', type = 'str',
593 help = 'Profile to use; this will not reset every run. ' +
[email protected]60ac66e32011-07-18 16:08:25594 'Defaults to a clean profile.', default = 'profile')
[email protected]5e93cf162012-01-28 02:16:56595 parser.add_option('-t', '--times', type = 'int',
596 help = 'Number of times to run each build before asking ' +
597 'if it\'s good or bad. Temporary profiles are reused.',
598 default = 1)
[email protected]7ad66a72009-09-04 17:52:33599 (opts, args) = parser.parse_args()
600
601 if opts.archive is None:
[email protected]178aab72010-10-08 17:21:38602 print 'Error: missing required parameter: --archive'
603 print
[email protected]7ad66a72009-09-04 17:52:33604 parser.print_help()
605 return 1
606
607 if opts.bad and opts.good and (opts.good > opts.bad):
608 print ('The good revision (%d) must precede the bad revision (%d).\n' %
609 (opts.good, opts.bad))
610 parser.print_help()
611 return 1
612
[email protected]183706d92011-06-10 13:06:22613 # Create the context. Initialize 0 for the revisions as they are set below.
[email protected]d0149c5c2012-05-29 21:12:11614 context = PathContext(opts.archive, 0, 0, opts.official_builds)
615
616 if opts.official_builds and opts.bad is None:
617 print >>sys.stderr, 'Bisecting official builds requires a bad build number.'
618 parser.print_help()
619 return 1
[email protected]67e0bc62009-09-03 22:06:09620
621 # Pick a starting point, try to get HEAD for this.
[email protected]7ad66a72009-09-04 17:52:33622 if opts.bad:
623 bad_rev = opts.bad
624 else:
625 bad_rev = 0
626 try:
627 # Location of the latest build revision number
[email protected]183706d92011-06-10 13:06:22628 nh = urllib.urlopen(context.GetLastChangeURL())
[email protected]7ad66a72009-09-04 17:52:33629 latest = int(nh.read())
630 nh.close()
[email protected]bd8dcb92010-03-31 01:05:24631 bad_rev = raw_input('Bad revision [HEAD:%d]: ' % latest)
632 if (bad_rev == ''):
[email protected]7ad66a72009-09-04 17:52:33633 bad_rev = latest
634 bad_rev = int(bad_rev)
635 except Exception, e:
[email protected]bd8dcb92010-03-31 01:05:24636 print('Could not determine latest revision. This could be bad...')
637 bad_rev = int(raw_input('Bad revision: '))
[email protected]67e0bc62009-09-03 22:06:09638
639 # Find out when we were good.
[email protected]7ad66a72009-09-04 17:52:33640 if opts.good:
641 good_rev = opts.good
642 else:
643 good_rev = 0
644 try:
[email protected]bd8dcb92010-03-31 01:05:24645 good_rev = int(raw_input('Last known good [0]: '))
[email protected]7ad66a72009-09-04 17:52:33646 except Exception, e:
647 pass
[email protected]67e0bc62009-09-03 22:06:09648
[email protected]5e93cf162012-01-28 02:16:56649 if opts.times < 1:
650 print('Number of times to run (%d) must be greater than or equal to 1.' %
651 opts.times)
652 parser.print_help()
653 return 1
654
[email protected]60ac66e32011-07-18 16:08:25655 (last_known_good_rev, first_known_bad_rev) = Bisect(
[email protected]d0149c5c2012-05-29 21:12:11656 opts.archive, opts.official_builds, good_rev, bad_rev, opts.times, args,
657 opts.profile)
[email protected]67e0bc62009-09-03 22:06:09658
[email protected]b2fe7f22011-10-25 22:58:31659 # Get corresponding webkit revisions.
660 try:
661 last_known_good_webkit_rev = GetWebKitRevisionForChromiumRevision(
662 last_known_good_rev)
663 first_known_bad_webkit_rev = GetWebKitRevisionForChromiumRevision(
664 first_known_bad_rev)
665 except Exception, e:
666 # Silently ignore the failure.
667 last_known_good_webkit_rev, first_known_bad_webkit_rev = 0, 0
668
[email protected]67e0bc62009-09-03 22:06:09669 # We're done. Let the user know the results in an official manner.
[email protected]d0149c5c2012-05-29 21:12:11670 print DONE_MESSAGE % (str(last_known_good_rev), str(first_known_bad_rev))
[email protected]b2fe7f22011-10-25 22:58:31671 if last_known_good_webkit_rev != first_known_bad_webkit_rev:
672 print 'WEBKIT CHANGELOG URL:'
[email protected]05ff3fd2012-04-17 23:24:06673 print ' ' + WEBKIT_CHANGELOG_URL % (first_known_bad_webkit_rev,
674 last_known_good_webkit_rev)
[email protected]d0149c5c2012-05-29 21:12:11675 print 'CHANGELOG URL:'
676 if opts.official_builds:
677 print OFFICIAL_CHANGELOG_URL % (last_known_good_rev, first_known_bad_rev)
678 else:
679 print ' ' + CHANGELOG_URL % (last_known_good_rev, first_known_bad_rev)
[email protected]cb155a82011-11-29 17:25:34680
[email protected]67e0bc62009-09-03 22:06:09681if __name__ == '__main__':
[email protected]7ad66a72009-09-04 17:52:33682 sys.exit(main())