blob: e115b160accd3d525e9db37d76c297cd05ce3b3f [file] [log] [blame]
[email protected]178aab72010-10-08 17:21:381#!/usr/bin/python
[email protected]4082b182011-05-02 20:30:172# Copyright (c) 2011 The Chromium Authors. All rights reserved.
[email protected]67e0bc62009-09-03 22:06:093# Use of this source code is governed by a BSD-style license that can be
4# found in the LICENSE file.
5
6"""Snapshot Build Bisect Tool
7
[email protected]7ad66a72009-09-04 17:52:338This script bisects a snapshot archive using binary search. It starts at
[email protected]67e0bc62009-09-03 22:06:099a bad revision (it will try to guess HEAD) and asks for a last known-good
10revision. It will then binary search across this revision range by downloading,
11unzipping, and opening Chromium for you. After testing the specific revision,
12it will ask you whether it is good or bad before continuing the search.
[email protected]67e0bc62009-09-03 22:06:0913"""
14
[email protected]183706d92011-06-10 13:06:2215# The root URL for storage.
[email protected]60ac66e32011-07-18 16:08:2516BASE_URL = 'http://commondatastorage.googleapis.com/chromium-browser-snapshots'
[email protected]67e0bc62009-09-03 22:06:0917
18# URL to the ViewVC commit page.
[email protected]bd8dcb92010-03-31 01:05:2419BUILD_VIEWVC_URL = 'http://src.chromium.org/viewvc/chrome?view=rev&revision=%d'
[email protected]67e0bc62009-09-03 22:06:0920
[email protected]183706d92011-06-10 13:06:2221# Changelogs URL.
[email protected]07247462010-12-24 07:45:5622CHANGELOG_URL = 'http://build.chromium.org/f/chromium/' \
[email protected]bd8dcb92010-03-31 01:05:2423 'perf/dashboard/ui/changelog.html?url=/trunk/src&range=%d:%d'
[email protected]f6a71a72009-10-08 19:55:3824
[email protected]b2fe7f22011-10-25 22:58:3125# DEPS file URL.
26DEPS_FILE= 'http://src.chromium.org/viewvc/chrome/trunk/src/DEPS?revision=%d'
27
28# WebKit Changelogs URL.
29WEBKIT_CHANGELOG_URL = 'http://trac.webkit.org/log/' \
30 'trunk/?rev=%d&stop_rev=%d&verbose=on'
31
[email protected]67e0bc62009-09-03 22:06:0932###############################################################################
33
34import math
[email protected]7ad66a72009-09-04 17:52:3335import optparse
[email protected]67e0bc62009-09-03 22:06:0936import os
[email protected]d4bf3582009-09-20 00:56:3837import pipes
[email protected]67e0bc62009-09-03 22:06:0938import re
39import shutil
[email protected]afe30662011-07-30 01:05:5240import subprocess
[email protected]67e0bc62009-09-03 22:06:0941import sys
[email protected]7ad66a72009-09-04 17:52:3342import tempfile
[email protected]afe30662011-07-30 01:05:5243import threading
[email protected]67e0bc62009-09-03 22:06:0944import urllib
[email protected]183706d92011-06-10 13:06:2245from xml.etree import ElementTree
[email protected]bd8dcb92010-03-31 01:05:2446import zipfile
47
[email protected]183706d92011-06-10 13:06:2248class PathContext(object):
49 """A PathContext is used to carry the information used to construct URLs and
50 paths when dealing with the storage server and archives."""
[email protected]afe30662011-07-30 01:05:5251 def __init__(self, platform, good_revision, bad_revision):
[email protected]183706d92011-06-10 13:06:2252 super(PathContext, self).__init__()
53 # Store off the input parameters.
54 self.platform = platform # What's passed in to the '-a/--archive' option.
55 self.good_revision = good_revision
56 self.bad_revision = bad_revision
57
58 # The name of the ZIP file in a revision directory on the server.
59 self.archive_name = None
60
61 # Set some internal members:
62 # _listing_platform_dir = Directory that holds revisions. Ends with a '/'.
63 # _archive_extract_dir = Uncompressed directory in the archive_name file.
64 # _binary_name = The name of the executable to run.
[email protected]1960edd2011-07-01 16:53:5265 if self.platform == 'linux' or self.platform == 'linux64':
[email protected]183706d92011-06-10 13:06:2266 self._listing_platform_dir = 'Linux/'
67 self.archive_name = 'chrome-linux.zip'
68 self._archive_extract_dir = 'chrome-linux'
69 self._binary_name = 'chrome'
70 # Linux and x64 share all the same path data except for the archive dir.
[email protected]1960edd2011-07-01 16:53:5271 if self.platform == 'linux64':
[email protected]183706d92011-06-10 13:06:2272 self._listing_platform_dir = 'Linux_x64/'
73 elif self.platform == 'mac':
74 self._listing_platform_dir = 'Mac/'
75 self.archive_name = 'chrome-mac.zip'
76 self._archive_extract_dir = 'chrome-mac'
77 self._binary_name = 'Chromium.app/Contents/MacOS/Chromium'
78 elif self.platform == 'win':
79 self._listing_platform_dir = 'Win/'
80 self.archive_name = 'chrome-win32.zip'
81 self._archive_extract_dir = 'chrome-win32'
82 self._binary_name = 'chrome.exe'
83 else:
[email protected]afe30662011-07-30 01:05:5284 raise Exception('Invalid platform: %s' % self.platform)
[email protected]183706d92011-06-10 13:06:2285
86 def GetListingURL(self, marker=None):
87 """Returns the URL for a directory listing, with an optional marker."""
88 marker_param = ''
89 if marker:
90 marker_param = '&marker=' + str(marker)
91 return BASE_URL + '/?delimiter=/&prefix=' + self._listing_platform_dir + \
92 marker_param
93
94 def GetDownloadURL(self, revision):
95 """Gets the download URL for a build archive of a specific revision."""
[email protected]afe30662011-07-30 01:05:5296 return "%s/%s%d/%s" % (
97 BASE_URL, self._listing_platform_dir, revision, self.archive_name)
[email protected]183706d92011-06-10 13:06:2298
99 def GetLastChangeURL(self):
100 """Returns a URL to the LAST_CHANGE file."""
101 return BASE_URL + '/' + self._listing_platform_dir + 'LAST_CHANGE'
102
103 def GetLaunchPath(self):
104 """Returns a relative path (presumably from the archive extraction location)
105 that is used to run the executable."""
106 return os.path.join(self._archive_extract_dir, self._binary_name)
107
[email protected]afe30662011-07-30 01:05:52108 def ParseDirectoryIndex(self):
109 """Parses the Google Storage directory listing into a list of revision
110 numbers. The range starts with self.good_revision and goes until
111 self.bad_revision."""
112
113 def _FetchAndParse(url):
114 """Fetches a URL and returns a 2-Tuple of ([revisions], next-marker). If
115 next-marker is not None, then the listing is a partial listing and another
116 fetch should be performed with next-marker being the marker= GET
117 parameter."""
118 handle = urllib.urlopen(url)
119 document = ElementTree.parse(handle)
120
121 # All nodes in the tree are namespaced. Get the root's tag name to extract
122 # the namespace. Etree does namespaces as |{namespace}tag|.
123 root_tag = document.getroot().tag
124 end_ns_pos = root_tag.find('}')
125 if end_ns_pos == -1:
126 raise Exception("Could not locate end namespace for directory index")
127 namespace = root_tag[:end_ns_pos + 1]
128
129 # Find the prefix (_listing_platform_dir) and whether or not the list is
130 # truncated.
131 prefix_len = len(document.find(namespace + 'Prefix').text)
132 next_marker = None
133 is_truncated = document.find(namespace + 'IsTruncated')
134 if is_truncated is not None and is_truncated.text.lower() == 'true':
135 next_marker = document.find(namespace + 'NextMarker').text
136
137 # Get a list of all the revisions.
138 all_prefixes = document.findall(namespace + 'CommonPrefixes/' +
139 namespace + 'Prefix')
140 # The <Prefix> nodes have content of the form of
141 # |_listing_platform_dir/revision/|. Strip off the platform dir and the
142 # trailing slash to just have a number.
143 revisions = []
144 for prefix in all_prefixes:
145 revnum = prefix.text[prefix_len:-1]
146 try:
147 revnum = int(revnum)
148 revisions.append(revnum)
149 except ValueError:
150 pass
151 return (revisions, next_marker)
152
153 # Fetch the first list of revisions.
154 (revisions, next_marker) = _FetchAndParse(self.GetListingURL())
155
156 # If the result list was truncated, refetch with the next marker. Do this
157 # until an entire directory listing is done.
158 while next_marker:
159 next_url = self.GetListingURL(next_marker)
160 (new_revisions, next_marker) = _FetchAndParse(next_url)
161 revisions.extend(new_revisions)
162
163 return revisions
164
165 def GetRevList(self):
166 """Gets the list of revision numbers between self.good_revision and
167 self.bad_revision."""
168 # Download the revlist and filter for just the range between good and bad.
169 minrev = self.good_revision
170 maxrev = self.bad_revision
171 revlist = map(int, self.ParseDirectoryIndex())
172 revlist = [x for x in revlist if x >= minrev and x <= maxrev]
173 revlist.sort()
174 return revlist
175
[email protected]bd8dcb92010-03-31 01:05:24176
177def UnzipFilenameToDir(filename, dir):
178 """Unzip |filename| to directory |dir|."""
[email protected]afe30662011-07-30 01:05:52179 cwd = os.getcwd()
180 if not os.path.isabs(filename):
181 filename = os.path.join(cwd, filename)
[email protected]bd8dcb92010-03-31 01:05:24182 zf = zipfile.ZipFile(filename)
183 # Make base.
[email protected]bd8dcb92010-03-31 01:05:24184 try:
185 if not os.path.isdir(dir):
186 os.mkdir(dir)
187 os.chdir(dir)
188 # Extract files.
189 for info in zf.infolist():
190 name = info.filename
191 if name.endswith('/'): # dir
192 if not os.path.isdir(name):
193 os.makedirs(name)
194 else: # file
195 dir = os.path.dirname(name)
196 if not os.path.isdir(dir):
197 os.makedirs(dir)
198 out = open(name, 'wb')
199 out.write(zf.read(name))
200 out.close()
201 # Set permissions. Permission info in external_attr is shifted 16 bits.
202 os.chmod(name, info.external_attr >> 16L)
[email protected]afe30662011-07-30 01:05:52203 os.chdir(cwd)
[email protected]bd8dcb92010-03-31 01:05:24204 except Exception, e:
205 print >>sys.stderr, e
206 sys.exit(1)
207
[email protected]67e0bc62009-09-03 22:06:09208
[email protected]468a9772011-08-09 18:42:00209def FetchRevision(context, rev, filename, quit_event=None, progress_event=None):
[email protected]afe30662011-07-30 01:05:52210 """Downloads and unzips revision |rev|.
211 @param context A PathContext instance.
212 @param rev The Chromium revision number/tag to download.
213 @param filename The destination for the downloaded file.
214 @param quit_event A threading.Event which will be set by the master thread to
215 indicate that the download should be aborted.
[email protected]468a9772011-08-09 18:42:00216 @param progress_event A threading.Event which will be set by the master thread
217 to indicate that the progress of the download should be
218 displayed.
[email protected]afe30662011-07-30 01:05:52219 """
220 def ReportHook(blocknum, blocksize, totalsize):
[email protected]946be752011-10-25 23:34:21221 if quit_event and quit_event.isSet():
[email protected]afe30662011-07-30 01:05:52222 raise RuntimeError("Aborting download of revision %d" % rev)
[email protected]946be752011-10-25 23:34:21223 if progress_event and progress_event.isSet():
[email protected]468a9772011-08-09 18:42:00224 size = blocknum * blocksize
225 if totalsize == -1: # Total size not known.
226 progress = "Received %d bytes" % size
227 else:
228 size = min(totalsize, size)
229 progress = "Received %d of %d bytes, %.2f%%" % (
230 size, totalsize, 100.0 * size / totalsize)
231 # Send a \r to let all progress messages use just one line of output.
232 sys.stdout.write("\r" + progress)
233 sys.stdout.flush()
[email protected]7ad66a72009-09-04 17:52:33234
[email protected]afe30662011-07-30 01:05:52235 download_url = context.GetDownloadURL(rev)
236 try:
237 urllib.urlretrieve(download_url, filename, ReportHook)
[email protected]946be752011-10-25 23:34:21238 if progress_event and progress_event.isSet():
[email protected]468a9772011-08-09 18:42:00239 print()
[email protected]afe30662011-07-30 01:05:52240 except RuntimeError, e:
241 pass
[email protected]7ad66a72009-09-04 17:52:33242
[email protected]7ad66a72009-09-04 17:52:33243
[email protected]afe30662011-07-30 01:05:52244def RunRevision(context, revision, zipfile, profile, args):
245 """Given a zipped revision, unzip it and run the test."""
246 print "Trying revision %d..." % revision
[email protected]3ff00b72011-07-20 21:34:47247
[email protected]afe30662011-07-30 01:05:52248 # Create a temp directory and unzip the revision into it.
[email protected]7ad66a72009-09-04 17:52:33249 cwd = os.getcwd()
250 tempdir = tempfile.mkdtemp(prefix='bisect_tmp')
[email protected]afe30662011-07-30 01:05:52251 UnzipFilenameToDir(zipfile, tempdir)
[email protected]7ad66a72009-09-04 17:52:33252 os.chdir(tempdir)
[email protected]67e0bc62009-09-03 22:06:09253
[email protected]afe30662011-07-30 01:05:52254 # Run the build.
255 testargs = [context.GetLaunchPath(), '--user-data-dir=%s' % profile] + args
256 subproc = subprocess.Popen(testargs,
257 bufsize=-1,
258 stdout=subprocess.PIPE,
259 stderr=subprocess.PIPE)
260 (stdout, stderr) = subproc.communicate()
[email protected]7ad66a72009-09-04 17:52:33261
262 os.chdir(cwd)
[email protected]7ad66a72009-09-04 17:52:33263 try:
264 shutil.rmtree(tempdir, True)
265 except Exception, e:
266 pass
[email protected]67e0bc62009-09-03 22:06:09267
[email protected]afe30662011-07-30 01:05:52268 return (subproc.returncode, stdout, stderr)
[email protected]79f14742010-03-10 01:01:57269
[email protected]afe30662011-07-30 01:05:52270def AskIsGoodBuild(rev, status, stdout, stderr):
[email protected]183706d92011-06-10 13:06:22271 """Ask the user whether build |rev| is good or bad."""
[email protected]79f14742010-03-10 01:01:57272 # Loop until we get a response that we can parse.
[email protected]67e0bc62009-09-03 22:06:09273 while True:
[email protected]468a9772011-08-09 18:42:00274 response = raw_input('Revision %d is [(g)ood/(b)ad/(q)uit]: ' % int(rev))
[email protected]bd8dcb92010-03-31 01:05:24275 if response and response in ('g', 'b'):
276 return response == 'g'
[email protected]afe30662011-07-30 01:05:52277 if response and response == 'q':
278 raise SystemExit()
[email protected]67e0bc62009-09-03 22:06:09279
[email protected]afe30662011-07-30 01:05:52280def Bisect(platform,
281 good_rev=0,
282 bad_rev=0,
[email protected]60ac66e32011-07-18 16:08:25283 try_args=(),
[email protected]afe30662011-07-30 01:05:52284 profile=None,
[email protected]60ac66e32011-07-18 16:08:25285 predicate=AskIsGoodBuild):
[email protected]afe30662011-07-30 01:05:52286 """Given known good and known bad revisions, run a binary search on all
287 archived revisions to determine the last known good revision.
[email protected]60ac66e32011-07-18 16:08:25288
[email protected]afe30662011-07-30 01:05:52289 @param platform Which build to download/run ('mac', 'win', 'linux64', etc.).
290 @param good_rev Number/tag of the last known good revision.
291 @param bad_rev Number/tag of the first known bad revision.
292 @param try_args A tuple of arguments to pass to the test application.
293 @param profile The name of the user profile to run with.
[email protected]60ac66e32011-07-18 16:08:25294 @param predicate A predicate function which returns True iff the argument
295 chromium revision is good.
[email protected]afe30662011-07-30 01:05:52296
297 Threading is used to fetch Chromium revisions in the background, speeding up
298 the user's experience. For example, suppose the bounds of the search are
299 good_rev=0, bad_rev=100. The first revision to be checked is 50. Depending on
300 whether revision 50 is good or bad, the next revision to check will be either
301 25 or 75. So, while revision 50 is being checked, the script will download
302 revisions 25 and 75 in the background. Once the good/bad verdict on rev 50 is
303 known:
304
305 - If rev 50 is good, the download of rev 25 is cancelled, and the next test
306 is run on rev 75.
307
308 - If rev 50 is bad, the download of rev 75 is cancelled, and the next test
309 is run on rev 25.
[email protected]60ac66e32011-07-18 16:08:25310 """
311
[email protected]afe30662011-07-30 01:05:52312 if not profile:
313 profile = 'profile'
314
315 context = PathContext(platform, good_rev, bad_rev)
316 cwd = os.getcwd()
317
318 _GetDownloadPath = lambda rev: os.path.join(cwd,
319 '%d-%s' % (rev, context.archive_name))
320
[email protected]468a9772011-08-09 18:42:00321 print "Downloading list of known revisions..."
322
[email protected]afe30662011-07-30 01:05:52323 revlist = context.GetRevList()
324
325 # Get a list of revisions to bisect across.
326 if len(revlist) < 2: # Don't have enough builds to bisect.
327 msg = 'We don\'t have enough builds to bisect. revlist: %s' % revlist
328 raise RuntimeError(msg)
329
330 # Figure out our bookends and first pivot point; fetch the pivot revision.
[email protected]3ff00b72011-07-20 21:34:47331 good = 0
332 bad = len(revlist) - 1
[email protected]afe30662011-07-30 01:05:52333 pivot = bad / 2
334 rev = revlist[pivot]
335 zipfile = _GetDownloadPath(rev)
[email protected]468a9772011-08-09 18:42:00336 progress_event = threading.Event()
337 progress_event.set()
[email protected]afe30662011-07-30 01:05:52338 print "Downloading revision %d..." % rev
[email protected]468a9772011-08-09 18:42:00339 FetchRevision(context, rev, zipfile,
340 quit_event=None, progress_event=progress_event)
[email protected]60ac66e32011-07-18 16:08:25341
342 # Binary search time!
[email protected]afe30662011-07-30 01:05:52343 while zipfile and bad - good > 1:
344 # Pre-fetch next two possible pivots
345 # - down_pivot is the next revision to check if the current revision turns
346 # out to be bad.
347 # - up_pivot is the next revision to check if the current revision turns
348 # out to be good.
349 down_pivot = int((pivot - good) / 2) + good
350 down_thread = None
351 if down_pivot != pivot and down_pivot != good:
352 down_rev = revlist[down_pivot]
353 down_zipfile = _GetDownloadPath(down_rev)
[email protected]468a9772011-08-09 18:42:00354 down_quit_event = threading.Event()
355 down_progress_event = threading.Event()
356 fetchargs = (context,
357 down_rev,
358 down_zipfile,
359 down_quit_event,
360 down_progress_event)
[email protected]afe30662011-07-30 01:05:52361 down_thread = threading.Thread(target=FetchRevision,
362 name='down_fetch',
363 args=fetchargs)
364 down_thread.start()
[email protected]60ac66e32011-07-18 16:08:25365
[email protected]afe30662011-07-30 01:05:52366 up_pivot = int((bad - pivot) / 2) + pivot
367 up_thread = None
368 if up_pivot != pivot and up_pivot != bad:
369 up_rev = revlist[up_pivot]
370 up_zipfile = _GetDownloadPath(up_rev)
[email protected]468a9772011-08-09 18:42:00371 up_quit_event = threading.Event()
372 up_progress_event = threading.Event()
373 fetchargs = (context,
374 up_rev,
375 up_zipfile,
376 up_quit_event,
377 up_progress_event)
[email protected]afe30662011-07-30 01:05:52378 up_thread = threading.Thread(target=FetchRevision,
379 name='up_fetch',
380 args=fetchargs)
381 up_thread.start()
[email protected]60ac66e32011-07-18 16:08:25382
[email protected]afe30662011-07-30 01:05:52383 # Run test on the pivot revision.
384 (status, stdout, stderr) = RunRevision(context,
385 rev,
386 zipfile,
387 profile,
388 try_args)
389 os.unlink(zipfile)
390 zipfile = None
[email protected]60ac66e32011-07-18 16:08:25391
[email protected]afe30662011-07-30 01:05:52392 # Call the predicate function to see if the current revision is good or bad.
393 # On that basis, kill one of the background downloads and complete the
394 # other, as described in the comments above.
395 try:
396 if predicate(rev, status, stdout, stderr):
397 good = pivot
398 if down_thread:
[email protected]468a9772011-08-09 18:42:00399 down_quit_event.set() # Kill the download of older revision.
[email protected]afe30662011-07-30 01:05:52400 down_thread.join()
401 os.unlink(down_zipfile)
402 if up_thread:
403 print "Downloading revision %d..." % up_rev
[email protected]468a9772011-08-09 18:42:00404 up_progress_event.set() # Display progress of download.
[email protected]afe30662011-07-30 01:05:52405 up_thread.join() # Wait for newer revision to finish downloading.
406 pivot = up_pivot
407 zipfile = up_zipfile
408 else:
409 bad = pivot
410 if up_thread:
[email protected]468a9772011-08-09 18:42:00411 up_quit_event.set() # Kill download of newer revision.
[email protected]afe30662011-07-30 01:05:52412 up_thread.join()
413 os.unlink(up_zipfile)
414 if down_thread:
415 print "Downloading revision %d..." % down_rev
[email protected]468a9772011-08-09 18:42:00416 down_progress_event.set() # Display progress of download.
[email protected]afe30662011-07-30 01:05:52417 down_thread.join() # Wait for older revision to finish downloading.
418 pivot = down_pivot
419 zipfile = down_zipfile
420 except SystemExit:
[email protected]468a9772011-08-09 18:42:00421 print "Cleaning up..."
[email protected]afe30662011-07-30 01:05:52422 for f in [down_zipfile, up_zipfile]:
423 try:
424 os.unlink(f)
425 except OSError:
426 pass
427 sys.exit(0)
428
429 rev = revlist[pivot]
430
431 return (revlist[good], revlist[bad])
[email protected]60ac66e32011-07-18 16:08:25432
433
[email protected]b2fe7f22011-10-25 22:58:31434def GetWebKitRevisionForChromiumRevision(rev):
435 """Returns the webkit revision that was in chromium's DEPS file at
436 chromium revision |rev|."""
437 # . doesn't match newlines without re.DOTALL, so this is safe.
438 webkit_re = re.compile(r'webkit_revision.:\D*(\d+)')
439 url = urllib.urlopen(DEPS_FILE % rev)
440 m = webkit_re.search(url.read())
441 url.close()
442 if m:
443 return int(m.group(1))
444 else:
445 raise Exception('Could not get webkit revision for cr rev %d' % rev)
446
447
[email protected]67e0bc62009-09-03 22:06:09448def main():
[email protected]2c1d2732009-10-29 19:52:17449 usage = ('%prog [options] [-- chromium-options]\n'
[email protected]178aab72010-10-08 17:21:38450 'Perform binary search on the snapshot builds.\n'
451 '\n'
452 'Tip: add "-- --no-first-run" to bypass the first run prompts.')
[email protected]7ad66a72009-09-04 17:52:33453 parser = optparse.OptionParser(usage=usage)
[email protected]1a45d222009-09-19 01:58:57454 # Strangely, the default help output doesn't include the choice list.
[email protected]20105cf2011-05-10 18:16:45455 choices = ['mac', 'win', 'linux', 'linux64']
[email protected]4082b182011-05-02 20:30:17456 # linux-chromiumos lacks a continuous archive http://crbug.com/78158
[email protected]7ad66a72009-09-04 17:52:33457 parser.add_option('-a', '--archive',
[email protected]1a45d222009-09-19 01:58:57458 choices = choices,
459 help = 'The buildbot archive to bisect [%s].' %
460 '|'.join(choices))
[email protected]7ad66a72009-09-04 17:52:33461 parser.add_option('-b', '--bad', type = 'int',
462 help = 'The bad revision to bisect to.')
463 parser.add_option('-g', '--good', type = 'int',
464 help = 'The last known good revision to bisect from.')
[email protected]d4bf3582009-09-20 00:56:38465 parser.add_option('-p', '--profile', '--user-data-dir', type = 'str',
466 help = 'Profile to use; this will not reset every run. ' +
[email protected]60ac66e32011-07-18 16:08:25467 'Defaults to a clean profile.', default = 'profile')
[email protected]7ad66a72009-09-04 17:52:33468 (opts, args) = parser.parse_args()
469
470 if opts.archive is None:
[email protected]178aab72010-10-08 17:21:38471 print 'Error: missing required parameter: --archive'
472 print
[email protected]7ad66a72009-09-04 17:52:33473 parser.print_help()
474 return 1
475
476 if opts.bad and opts.good and (opts.good > opts.bad):
477 print ('The good revision (%d) must precede the bad revision (%d).\n' %
478 (opts.good, opts.bad))
479 parser.print_help()
480 return 1
481
[email protected]183706d92011-06-10 13:06:22482 # Create the context. Initialize 0 for the revisions as they are set below.
[email protected]afe30662011-07-30 01:05:52483 context = PathContext(opts.archive, 0, 0)
[email protected]67e0bc62009-09-03 22:06:09484
485 # Pick a starting point, try to get HEAD for this.
[email protected]7ad66a72009-09-04 17:52:33486 if opts.bad:
487 bad_rev = opts.bad
488 else:
489 bad_rev = 0
490 try:
491 # Location of the latest build revision number
[email protected]183706d92011-06-10 13:06:22492 nh = urllib.urlopen(context.GetLastChangeURL())
[email protected]7ad66a72009-09-04 17:52:33493 latest = int(nh.read())
494 nh.close()
[email protected]bd8dcb92010-03-31 01:05:24495 bad_rev = raw_input('Bad revision [HEAD:%d]: ' % latest)
496 if (bad_rev == ''):
[email protected]7ad66a72009-09-04 17:52:33497 bad_rev = latest
498 bad_rev = int(bad_rev)
499 except Exception, e:
[email protected]bd8dcb92010-03-31 01:05:24500 print('Could not determine latest revision. This could be bad...')
501 bad_rev = int(raw_input('Bad revision: '))
[email protected]67e0bc62009-09-03 22:06:09502
503 # Find out when we were good.
[email protected]7ad66a72009-09-04 17:52:33504 if opts.good:
505 good_rev = opts.good
506 else:
507 good_rev = 0
508 try:
[email protected]bd8dcb92010-03-31 01:05:24509 good_rev = int(raw_input('Last known good [0]: '))
[email protected]7ad66a72009-09-04 17:52:33510 except Exception, e:
511 pass
[email protected]67e0bc62009-09-03 22:06:09512
[email protected]60ac66e32011-07-18 16:08:25513 (last_known_good_rev, first_known_bad_rev) = Bisect(
[email protected]afe30662011-07-30 01:05:52514 opts.archive, good_rev, bad_rev, args, opts.profile)
[email protected]67e0bc62009-09-03 22:06:09515
[email protected]b2fe7f22011-10-25 22:58:31516 # Get corresponding webkit revisions.
517 try:
518 last_known_good_webkit_rev = GetWebKitRevisionForChromiumRevision(
519 last_known_good_rev)
520 first_known_bad_webkit_rev = GetWebKitRevisionForChromiumRevision(
521 first_known_bad_rev)
522 except Exception, e:
523 # Silently ignore the failure.
524 last_known_good_webkit_rev, first_known_bad_webkit_rev = 0, 0
525
[email protected]67e0bc62009-09-03 22:06:09526 # We're done. Let the user know the results in an official manner.
[email protected]60ac66e32011-07-18 16:08:25527 print('You are probably looking for build %d.' % first_known_bad_rev)
[email protected]b2fe7f22011-10-25 22:58:31528 if last_known_good_webkit_rev != first_known_bad_webkit_rev:
529 print 'WEBKIT CHANGELOG URL:'
530 print WEBKIT_CHANGELOG_URL % (first_known_bad_webkit_rev,
531 last_known_good_webkit_rev)
532 print 'CHANGELOG URL:'
533 print CHANGELOG_URL % (last_known_good_rev, first_known_bad_rev)
534 print 'Built at revision:'
535 print BUILD_VIEWVC_URL % first_known_bad_rev
[email protected]67e0bc62009-09-03 22:06:09536
537if __name__ == '__main__':
[email protected]7ad66a72009-09-04 17:52:33538 sys.exit(main())