Adding a util function for setting a "do not backup" bit
[chromium-blink-merge.git] / tools / bisect-builds.py
blob7e07e3d03761dac31b0fdefa4cc611cd46f12c8d
1 #!/usr/bin/env python
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 """Snapshot Build Bisect Tool
8 This script bisects a snapshot archive using binary search. It starts at
9 a bad revision (it will try to guess HEAD) and asks for a last known-good
10 revision. It will then binary search across this revision range by downloading,
11 unzipping, and opening Chromium for you. After testing the specific revision,
12 it will ask you whether it is good or bad before continuing the search.
13 """
15 # The base URL for stored build archives.
16 CHROMIUM_BASE_URL = ('http://commondatastorage.googleapis.com'
17 '/chromium-browser-snapshots')
18 WEBKIT_BASE_URL = ('http://commondatastorage.googleapis.com'
19 '/chromium-webkit-snapshots')
20 ASAN_BASE_URL = ('http://commondatastorage.googleapis.com'
21 '/chromium-browser-asan')
23 # GS bucket name.
24 GS_BUCKET_NAME = 'chrome-unsigned/desktop-W15K3Y'
26 # Base URL for downloading official builds.
27 GOOGLE_APIS_URL = 'commondatastorage.googleapis.com'
29 # The base URL for official builds.
30 OFFICIAL_BASE_URL = 'http://%s/%s' % (GOOGLE_APIS_URL, GS_BUCKET_NAME)
32 # URL template for viewing changelogs between revisions.
33 CHANGELOG_URL = ('https://chromium.googlesource.com/chromium/src/+log/%s..%s')
35 # URL to convert SVN revision to git hash.
36 CRREV_URL = ('https://cr-rev.appspot.com/_ah/api/crrev/v1/redirect/')
38 # URL template for viewing changelogs between official versions.
39 OFFICIAL_CHANGELOG_URL = ('https://chromium.googlesource.com/chromium/'
40 'src/+log/%s..%s?pretty=full')
42 # DEPS file URL.
43 DEPS_FILE_OLD = ('http://src.chromium.org/viewvc/chrome/trunk/src/'
44 'DEPS?revision=%d')
45 DEPS_FILE_NEW = ('https://chromium.googlesource.com/chromium/src/+/%s/DEPS')
47 # Blink changelogs URL.
48 BLINK_CHANGELOG_URL = ('http://build.chromium.org'
49 '/f/chromium/perf/dashboard/ui/changelog_blink.html'
50 '?url=/trunk&range=%d%%3A%d')
52 DONE_MESSAGE_GOOD_MIN = ('You are probably looking for a change made after %s ('
53 'known good), but no later than %s (first known bad).')
54 DONE_MESSAGE_GOOD_MAX = ('You are probably looking for a change made after %s ('
55 'known bad), but no later than %s (first known good).')
57 CHROMIUM_GITHASH_TO_SVN_URL = (
58 'https://chromium.googlesource.com/chromium/src/+/%s?format=json')
60 BLINK_GITHASH_TO_SVN_URL = (
61 'https://chromium.googlesource.com/chromium/blink/+/%s?format=json')
63 GITHASH_TO_SVN_URL = {
64 'chromium': CHROMIUM_GITHASH_TO_SVN_URL,
65 'blink': BLINK_GITHASH_TO_SVN_URL,
68 # Search pattern to be matched in the JSON output from
69 # CHROMIUM_GITHASH_TO_SVN_URL to get the chromium revision (svn revision).
70 CHROMIUM_SEARCH_PATTERN_OLD = (
71 r'.*git-svn-id: svn://svn.chromium.org/chrome/trunk/src@(\d+) ')
72 CHROMIUM_SEARCH_PATTERN = (
73 r'Cr-Commit-Position: refs/heads/master@{#(\d+)}')
75 # Search pattern to be matched in the json output from
76 # BLINK_GITHASH_TO_SVN_URL to get the blink revision (svn revision).
77 BLINK_SEARCH_PATTERN = (
78 r'.*git-svn-id: svn://svn.chromium.org/blink/trunk@(\d+) ')
80 SEARCH_PATTERN = {
81 'chromium': CHROMIUM_SEARCH_PATTERN,
82 'blink': BLINK_SEARCH_PATTERN,
85 CREDENTIAL_ERROR_MESSAGE = ('You are attempting to access protected data with '
86 'no configured credentials')
88 ###############################################################################
90 import httplib
91 import json
92 import optparse
93 import os
94 import re
95 import shlex
96 import shutil
97 import subprocess
98 import sys
99 import tempfile
100 import threading
101 import urllib
102 from distutils.version import LooseVersion
103 from xml.etree import ElementTree
104 import zipfile
107 class PathContext(object):
108 """A PathContext is used to carry the information used to construct URLs and
109 paths when dealing with the storage server and archives."""
110 def __init__(self, base_url, platform, good_revision, bad_revision,
111 is_official, is_asan, use_local_cache, flash_path = None,
112 pdf_path = None):
113 super(PathContext, self).__init__()
114 # Store off the input parameters.
115 self.base_url = base_url
116 self.platform = platform # What's passed in to the '-a/--archive' option.
117 self.good_revision = good_revision
118 self.bad_revision = bad_revision
119 self.is_official = is_official
120 self.is_asan = is_asan
121 self.build_type = 'release'
122 self.flash_path = flash_path
123 # Dictionary which stores svn revision number as key and it's
124 # corresponding git hash as value. This data is populated in
125 # _FetchAndParse and used later in GetDownloadURL while downloading
126 # the build.
127 self.githash_svn_dict = {}
128 self.pdf_path = pdf_path
129 # The name of the ZIP file in a revision directory on the server.
130 self.archive_name = None
132 # Whether to cache and use the list of known revisions in a local file to
133 # speed up the initialization of the script at the next run.
134 self.use_local_cache = use_local_cache
136 # Locate the local checkout to speed up the script by using locally stored
137 # metadata.
138 abs_file_path = os.path.abspath(os.path.realpath(__file__))
139 local_src_path = os.path.join(os.path.dirname(abs_file_path), '..')
140 if abs_file_path.endswith(os.path.join('tools', 'bisect-builds.py')) and\
141 os.path.exists(os.path.join(local_src_path, '.git')):
142 self.local_src_path = os.path.normpath(local_src_path)
143 else:
144 self.local_src_path = None
146 # Set some internal members:
147 # _listing_platform_dir = Directory that holds revisions. Ends with a '/'.
148 # _archive_extract_dir = Uncompressed directory in the archive_name file.
149 # _binary_name = The name of the executable to run.
150 if self.platform in ('linux', 'linux64', 'linux-arm', 'chromeos'):
151 self._binary_name = 'chrome'
152 elif self.platform in ('mac', 'mac64'):
153 self.archive_name = 'chrome-mac.zip'
154 self._archive_extract_dir = 'chrome-mac'
155 elif self.platform in ('win', 'win64'):
156 self.archive_name = 'chrome-win32.zip'
157 self._archive_extract_dir = 'chrome-win32'
158 self._binary_name = 'chrome.exe'
159 else:
160 raise Exception('Invalid platform: %s' % self.platform)
162 if is_official:
163 if self.platform == 'linux':
164 self._listing_platform_dir = 'precise32/'
165 self.archive_name = 'chrome-precise32.zip'
166 self._archive_extract_dir = 'chrome-precise32'
167 elif self.platform == 'linux64':
168 self._listing_platform_dir = 'precise64/'
169 self.archive_name = 'chrome-precise64.zip'
170 self._archive_extract_dir = 'chrome-precise64'
171 elif self.platform == 'mac':
172 self._listing_platform_dir = 'mac/'
173 self._binary_name = 'Google Chrome.app/Contents/MacOS/Google Chrome'
174 elif self.platform == 'mac64':
175 self._listing_platform_dir = 'mac64/'
176 self._binary_name = 'Google Chrome.app/Contents/MacOS/Google Chrome'
177 elif self.platform == 'win':
178 self._listing_platform_dir = 'win/'
179 self.archive_name = 'chrome-win.zip'
180 self._archive_extract_dir = 'chrome-win'
181 elif self.platform == 'win64':
182 self._listing_platform_dir = 'win64/'
183 self.archive_name = 'chrome-win64.zip'
184 self._archive_extract_dir = 'chrome-win64'
185 else:
186 if self.platform in ('linux', 'linux64', 'linux-arm', 'chromeos'):
187 self.archive_name = 'chrome-linux.zip'
188 self._archive_extract_dir = 'chrome-linux'
189 if self.platform == 'linux':
190 self._listing_platform_dir = 'Linux/'
191 elif self.platform == 'linux64':
192 self._listing_platform_dir = 'Linux_x64/'
193 elif self.platform == 'linux-arm':
194 self._listing_platform_dir = 'Linux_ARM_Cross-Compile/'
195 elif self.platform == 'chromeos':
196 self._listing_platform_dir = 'Linux_ChromiumOS_Full/'
197 elif self.platform == 'mac':
198 self._listing_platform_dir = 'Mac/'
199 self._binary_name = 'Chromium.app/Contents/MacOS/Chromium'
200 elif self.platform == 'win':
201 self._listing_platform_dir = 'Win/'
203 def GetASANPlatformDir(self):
204 """ASAN builds are in directories like "linux-release", or have filenames
205 like "asan-win32-release-277079.zip". This aligns to our platform names
206 except in the case of Windows where they use "win32" instead of "win"."""
207 if self.platform == 'win':
208 return 'win32'
209 else:
210 return self.platform
212 def GetListingURL(self, marker=None):
213 """Returns the URL for a directory listing, with an optional marker."""
214 marker_param = ''
215 if marker:
216 marker_param = '&marker=' + str(marker)
217 if self.is_asan:
218 prefix = '%s-%s' % (self.GetASANPlatformDir(), self.build_type)
219 return self.base_url + '/?delimiter=&prefix=' + prefix + marker_param
220 else:
221 return (self.base_url + '/?delimiter=/&prefix=' +
222 self._listing_platform_dir + marker_param)
224 def GetDownloadURL(self, revision):
225 """Gets the download URL for a build archive of a specific revision."""
226 if self.is_asan:
227 return '%s/%s-%s/%s-%d.zip' % (
228 ASAN_BASE_URL, self.GetASANPlatformDir(), self.build_type,
229 self.GetASANBaseName(), revision)
230 if self.is_official:
231 return '%s/%s/%s%s' % (
232 OFFICIAL_BASE_URL, revision, self._listing_platform_dir,
233 self.archive_name)
234 else:
235 if str(revision) in self.githash_svn_dict:
236 revision = self.githash_svn_dict[str(revision)]
237 return '%s/%s%s/%s' % (self.base_url, self._listing_platform_dir,
238 revision, self.archive_name)
240 def GetLastChangeURL(self):
241 """Returns a URL to the LAST_CHANGE file."""
242 return self.base_url + '/' + self._listing_platform_dir + 'LAST_CHANGE'
244 def GetASANBaseName(self):
245 """Returns the base name of the ASAN zip file."""
246 if 'linux' in self.platform:
247 return 'asan-symbolized-%s-%s' % (self.GetASANPlatformDir(),
248 self.build_type)
249 else:
250 return 'asan-%s-%s' % (self.GetASANPlatformDir(), self.build_type)
252 def GetLaunchPath(self, revision):
253 """Returns a relative path (presumably from the archive extraction location)
254 that is used to run the executable."""
255 if self.is_asan:
256 extract_dir = '%s-%d' % (self.GetASANBaseName(), revision)
257 else:
258 extract_dir = self._archive_extract_dir
259 return os.path.join(extract_dir, self._binary_name)
261 def ParseDirectoryIndex(self, last_known_rev):
262 """Parses the Google Storage directory listing into a list of revision
263 numbers."""
265 def _GetMarkerForRev(revision):
266 if self.is_asan:
267 return '%s-%s/%s-%d.zip' % (
268 self.GetASANPlatformDir(), self.build_type,
269 self.GetASANBaseName(), revision)
270 return '%s%d' % (self._listing_platform_dir, revision)
272 def _FetchAndParse(url):
273 """Fetches a URL and returns a 2-Tuple of ([revisions], next-marker). If
274 next-marker is not None, then the listing is a partial listing and another
275 fetch should be performed with next-marker being the marker= GET
276 parameter."""
277 handle = urllib.urlopen(url)
278 document = ElementTree.parse(handle)
280 # All nodes in the tree are namespaced. Get the root's tag name to extract
281 # the namespace. Etree does namespaces as |{namespace}tag|.
282 root_tag = document.getroot().tag
283 end_ns_pos = root_tag.find('}')
284 if end_ns_pos == -1:
285 raise Exception('Could not locate end namespace for directory index')
286 namespace = root_tag[:end_ns_pos + 1]
288 # Find the prefix (_listing_platform_dir) and whether or not the list is
289 # truncated.
290 prefix_len = len(document.find(namespace + 'Prefix').text)
291 next_marker = None
292 is_truncated = document.find(namespace + 'IsTruncated')
293 if is_truncated is not None and is_truncated.text.lower() == 'true':
294 next_marker = document.find(namespace + 'NextMarker').text
295 # Get a list of all the revisions.
296 revisions = []
297 githash_svn_dict = {}
298 if self.is_asan:
299 asan_regex = re.compile(r'.*%s-(\d+)\.zip$' % (self.GetASANBaseName()))
300 # Non ASAN builds are in a <revision> directory. The ASAN builds are
301 # flat
302 all_prefixes = document.findall(namespace + 'Contents/' +
303 namespace + 'Key')
304 for prefix in all_prefixes:
305 m = asan_regex.match(prefix.text)
306 if m:
307 try:
308 revisions.append(int(m.group(1)))
309 except ValueError:
310 pass
311 else:
312 all_prefixes = document.findall(namespace + 'CommonPrefixes/' +
313 namespace + 'Prefix')
314 # The <Prefix> nodes have content of the form of
315 # |_listing_platform_dir/revision/|. Strip off the platform dir and the
316 # trailing slash to just have a number.
317 for prefix in all_prefixes:
318 revnum = prefix.text[prefix_len:-1]
319 try:
320 if not revnum.isdigit():
321 # During the svn-git migration, some items were stored by hash.
322 # These items may appear anywhere in the list of items.
323 # If |last_known_rev| is set, assume that the full list has been
324 # retrieved before (including the hashes), so we can safely skip
325 # all git hashes and focus on the numeric revision numbers.
326 if last_known_rev:
327 revnum = None
328 else:
329 git_hash = revnum
330 revnum = self.GetSVNRevisionFromGitHash(git_hash)
331 githash_svn_dict[revnum] = git_hash
332 if revnum is not None:
333 revnum = int(revnum)
334 revisions.append(revnum)
335 except ValueError:
336 pass
337 return (revisions, next_marker, githash_svn_dict)
339 # Fetch the first list of revisions.
340 if last_known_rev:
341 revisions = []
342 # Optimization: Start paging at the last known revision (local cache).
343 next_marker = _GetMarkerForRev(last_known_rev)
344 # Optimization: Stop paging at the last known revision (remote).
345 last_change_rev = GetChromiumRevision(self, self.GetLastChangeURL())
346 if last_known_rev == last_change_rev:
347 return []
348 else:
349 (revisions, next_marker, new_dict) = _FetchAndParse(self.GetListingURL())
350 self.githash_svn_dict.update(new_dict)
351 last_change_rev = None
353 # If the result list was truncated, refetch with the next marker. Do this
354 # until an entire directory listing is done.
355 while next_marker:
356 sys.stdout.write('\rFetching revisions at marker %s' % next_marker)
357 sys.stdout.flush()
359 next_url = self.GetListingURL(next_marker)
360 (new_revisions, next_marker, new_dict) = _FetchAndParse(next_url)
361 revisions.extend(new_revisions)
362 self.githash_svn_dict.update(new_dict)
363 if last_change_rev and last_change_rev in new_revisions:
364 break
365 sys.stdout.write('\r')
366 sys.stdout.flush()
367 return revisions
369 def _GetSVNRevisionFromGitHashWithoutGitCheckout(self, git_sha1, depot):
370 json_url = GITHASH_TO_SVN_URL[depot] % git_sha1
371 response = urllib.urlopen(json_url)
372 if response.getcode() == 200:
373 try:
374 data = json.loads(response.read()[4:])
375 except ValueError:
376 print 'ValueError for JSON URL: %s' % json_url
377 raise ValueError
378 else:
379 raise ValueError
380 if 'message' in data:
381 message = data['message'].split('\n')
382 message = [line for line in message if line.strip()]
383 search_pattern = re.compile(SEARCH_PATTERN[depot])
384 result = search_pattern.search(message[len(message)-1])
385 if result:
386 return result.group(1)
387 else:
388 if depot == 'chromium':
389 result = re.search(CHROMIUM_SEARCH_PATTERN_OLD,
390 message[len(message)-1])
391 if result:
392 return result.group(1)
393 print 'Failed to get svn revision number for %s' % git_sha1
394 raise ValueError
396 def _GetSVNRevisionFromGitHashFromGitCheckout(self, git_sha1, depot):
397 def _RunGit(command, path):
398 command = ['git'] + command
399 shell = sys.platform.startswith('win')
400 proc = subprocess.Popen(command, shell=shell, stdout=subprocess.PIPE,
401 stderr=subprocess.PIPE, cwd=path)
402 (output, _) = proc.communicate()
403 return (output, proc.returncode)
405 path = self.local_src_path
406 if depot == 'blink':
407 path = os.path.join(self.local_src_path, 'third_party', 'WebKit')
408 revision = None
409 try:
410 command = ['svn', 'find-rev', git_sha1]
411 (git_output, return_code) = _RunGit(command, path)
412 if not return_code:
413 revision = git_output.strip('\n')
414 except ValueError:
415 pass
416 if not revision:
417 command = ['log', '-n1', '--format=%s', git_sha1]
418 (git_output, return_code) = _RunGit(command, path)
419 if not return_code:
420 revision = re.match('SVN changes up to revision ([0-9]+)', git_output)
421 revision = revision.group(1) if revision else None
422 if revision:
423 return revision
424 raise ValueError
426 def GetSVNRevisionFromGitHash(self, git_sha1, depot='chromium'):
427 if not self.local_src_path:
428 return self._GetSVNRevisionFromGitHashWithoutGitCheckout(git_sha1, depot)
429 else:
430 return self._GetSVNRevisionFromGitHashFromGitCheckout(git_sha1, depot)
432 def GetRevList(self):
433 """Gets the list of revision numbers between self.good_revision and
434 self.bad_revision."""
436 cache = {}
437 # The cache is stored in the same directory as bisect-builds.py
438 cache_filename = os.path.join(
439 os.path.abspath(os.path.dirname(__file__)),
440 '.bisect-builds-cache.json')
441 cache_dict_key = self.GetListingURL()
443 def _LoadBucketFromCache():
444 if self.use_local_cache:
445 try:
446 with open(cache_filename) as cache_file:
447 for (key, value) in json.load(cache_file).items():
448 cache[key] = value
449 revisions = cache.get(cache_dict_key, [])
450 githash_svn_dict = cache.get('githash_svn_dict', {})
451 if revisions:
452 print 'Loaded revisions %d-%d from %s' % (revisions[0],
453 revisions[-1], cache_filename)
454 return (revisions, githash_svn_dict)
455 except (EnvironmentError, ValueError):
456 pass
457 return ([], {})
459 def _SaveBucketToCache():
460 """Save the list of revisions and the git-svn mappings to a file.
461 The list of revisions is assumed to be sorted."""
462 if self.use_local_cache:
463 cache[cache_dict_key] = revlist_all
464 cache['githash_svn_dict'] = self.githash_svn_dict
465 try:
466 with open(cache_filename, 'w') as cache_file:
467 json.dump(cache, cache_file)
468 print 'Saved revisions %d-%d to %s' % (
469 revlist_all[0], revlist_all[-1], cache_filename)
470 except EnvironmentError:
471 pass
473 # Download the revlist and filter for just the range between good and bad.
474 minrev = min(self.good_revision, self.bad_revision)
475 maxrev = max(self.good_revision, self.bad_revision)
477 (revlist_all, self.githash_svn_dict) = _LoadBucketFromCache()
478 last_known_rev = revlist_all[-1] if revlist_all else 0
479 if last_known_rev < maxrev:
480 revlist_all.extend(map(int, self.ParseDirectoryIndex(last_known_rev)))
481 revlist_all = list(set(revlist_all))
482 revlist_all.sort()
483 _SaveBucketToCache()
485 revlist = [x for x in revlist_all if x >= int(minrev) and x <= int(maxrev)]
487 # Set good and bad revisions to be legit revisions.
488 if revlist:
489 if self.good_revision < self.bad_revision:
490 self.good_revision = revlist[0]
491 self.bad_revision = revlist[-1]
492 else:
493 self.bad_revision = revlist[0]
494 self.good_revision = revlist[-1]
496 # Fix chromium rev so that the deps blink revision matches REVISIONS file.
497 if self.base_url == WEBKIT_BASE_URL:
498 revlist_all.sort()
499 self.good_revision = FixChromiumRevForBlink(revlist,
500 revlist_all,
501 self,
502 self.good_revision)
503 self.bad_revision = FixChromiumRevForBlink(revlist,
504 revlist_all,
505 self,
506 self.bad_revision)
507 return revlist
509 def GetOfficialBuildsList(self):
510 """Gets the list of official build numbers between self.good_revision and
511 self.bad_revision."""
513 def CheckDepotToolsInPath():
514 delimiter = ';' if sys.platform.startswith('win') else ':'
515 path_list = os.environ['PATH'].split(delimiter)
516 for path in path_list:
517 if path.rstrip(os.path.sep).endswith('depot_tools'):
518 return path
519 return None
521 def RunGsutilCommand(args):
522 gsutil_path = CheckDepotToolsInPath()
523 if gsutil_path is None:
524 print ('Follow the instructions in this document '
525 'http://dev.chromium.org/developers/how-tos/install-depot-tools'
526 ' to install depot_tools and then try again.')
527 sys.exit(1)
528 gsutil_path = os.path.join(gsutil_path, 'third_party', 'gsutil', 'gsutil')
529 gsutil = subprocess.Popen([sys.executable, gsutil_path] + args,
530 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
531 env=None)
532 stdout, stderr = gsutil.communicate()
533 if gsutil.returncode:
534 if (re.findall(r'status[ |=]40[1|3]', stderr) or
535 stderr.startswith(CREDENTIAL_ERROR_MESSAGE)):
536 print ('Follow these steps to configure your credentials and try'
537 ' running the bisect-builds.py again.:\n'
538 ' 1. Run "python %s config" and follow its instructions.\n'
539 ' 2. If you have a @google.com account, use that account.\n'
540 ' 3. For the project-id, just enter 0.' % gsutil_path)
541 sys.exit(1)
542 else:
543 raise Exception('Error running the gsutil command: %s' % stderr)
544 return stdout
546 def GsutilList(bucket):
547 query = 'gs://%s/' % bucket
548 stdout = RunGsutilCommand(['ls', query])
549 return [url[len(query):].strip('/') for url in stdout.splitlines()]
551 # Download the revlist and filter for just the range between good and bad.
552 minrev = min(self.good_revision, self.bad_revision)
553 maxrev = max(self.good_revision, self.bad_revision)
554 build_numbers = GsutilList(GS_BUCKET_NAME)
555 revision_re = re.compile(r'(\d\d\.\d\.\d{4}\.\d+)')
556 build_numbers = filter(lambda b: revision_re.search(b), build_numbers)
557 final_list = []
558 parsed_build_numbers = [LooseVersion(x) for x in build_numbers]
559 connection = httplib.HTTPConnection(GOOGLE_APIS_URL)
560 for build_number in sorted(parsed_build_numbers):
561 if build_number > maxrev:
562 break
563 if build_number < minrev:
564 continue
565 path = ('/' + GS_BUCKET_NAME + '/' + str(build_number) + '/' +
566 self._listing_platform_dir + self.archive_name)
567 connection.request('HEAD', path)
568 response = connection.getresponse()
569 if response.status == 200:
570 final_list.append(str(build_number))
571 response.read()
572 connection.close()
573 return final_list
575 def UnzipFilenameToDir(filename, directory):
576 """Unzip |filename| to |directory|."""
577 cwd = os.getcwd()
578 if not os.path.isabs(filename):
579 filename = os.path.join(cwd, filename)
580 zf = zipfile.ZipFile(filename)
581 # Make base.
582 if not os.path.isdir(directory):
583 os.mkdir(directory)
584 os.chdir(directory)
585 # Extract files.
586 for info in zf.infolist():
587 name = info.filename
588 if name.endswith('/'): # dir
589 if not os.path.isdir(name):
590 os.makedirs(name)
591 else: # file
592 directory = os.path.dirname(name)
593 if not os.path.isdir(directory):
594 os.makedirs(directory)
595 out = open(name, 'wb')
596 out.write(zf.read(name))
597 out.close()
598 # Set permissions. Permission info in external_attr is shifted 16 bits.
599 os.chmod(name, info.external_attr >> 16L)
600 os.chdir(cwd)
603 def FetchRevision(context, rev, filename, quit_event=None, progress_event=None):
604 """Downloads and unzips revision |rev|.
605 @param context A PathContext instance.
606 @param rev The Chromium revision number/tag to download.
607 @param filename The destination for the downloaded file.
608 @param quit_event A threading.Event which will be set by the master thread to
609 indicate that the download should be aborted.
610 @param progress_event A threading.Event which will be set by the master thread
611 to indicate that the progress of the download should be
612 displayed.
614 def ReportHook(blocknum, blocksize, totalsize):
615 if quit_event and quit_event.isSet():
616 raise RuntimeError('Aborting download of revision %s' % str(rev))
617 if progress_event and progress_event.isSet():
618 size = blocknum * blocksize
619 if totalsize == -1: # Total size not known.
620 progress = 'Received %d bytes' % size
621 else:
622 size = min(totalsize, size)
623 progress = 'Received %d of %d bytes, %.2f%%' % (
624 size, totalsize, 100.0 * size / totalsize)
625 # Send a \r to let all progress messages use just one line of output.
626 sys.stdout.write('\r' + progress)
627 sys.stdout.flush()
628 download_url = context.GetDownloadURL(rev)
629 try:
630 urllib.urlretrieve(download_url, filename, ReportHook)
631 if progress_event and progress_event.isSet():
632 print
634 except RuntimeError:
635 pass
638 def RunRevision(context, revision, zip_file, profile, num_runs, command, args):
639 """Given a zipped revision, unzip it and run the test."""
640 print 'Trying revision %s...' % str(revision)
642 # Create a temp directory and unzip the revision into it.
643 cwd = os.getcwd()
644 tempdir = tempfile.mkdtemp(prefix='bisect_tmp')
645 UnzipFilenameToDir(zip_file, tempdir)
647 # Hack: Chrome OS archives are missing icudtl.dat; try to copy it from
648 # the local directory.
649 if context.platform == 'chromeos':
650 icudtl_path = 'third_party/icu/source/data/in/icudtl.dat'
651 if not os.access(icudtl_path, os.F_OK):
652 print 'Couldn\'t find: ' + icudtl_path
653 sys.exit()
654 os.system('cp %s %s/chrome-linux/' % (icudtl_path, tempdir))
656 os.chdir(tempdir)
658 # Run the build as many times as specified.
659 testargs = ['--user-data-dir=%s' % profile] + args
660 # The sandbox must be run as root on Official Chrome, so bypass it.
661 if ((context.is_official or context.flash_path or context.pdf_path) and
662 context.platform.startswith('linux')):
663 testargs.append('--no-sandbox')
664 if context.flash_path:
665 testargs.append('--ppapi-flash-path=%s' % context.flash_path)
666 # We have to pass a large enough Flash version, which currently needs not
667 # be correct. Instead of requiring the user of the script to figure out and
668 # pass the correct version we just spoof it.
669 testargs.append('--ppapi-flash-version=99.9.999.999')
671 # TODO(vitalybuka): Remove in the future. See crbug.com/395687.
672 if context.pdf_path:
673 shutil.copy(context.pdf_path,
674 os.path.dirname(context.GetLaunchPath(revision)))
675 testargs.append('--enable-print-preview')
677 runcommand = []
678 for token in shlex.split(command):
679 if token == '%a':
680 runcommand.extend(testargs)
681 else:
682 runcommand.append(
683 token.replace('%p', os.path.abspath(context.GetLaunchPath(revision))).
684 replace('%s', ' '.join(testargs)))
686 results = []
687 for _ in range(num_runs):
688 subproc = subprocess.Popen(runcommand,
689 bufsize=-1,
690 stdout=subprocess.PIPE,
691 stderr=subprocess.PIPE)
692 (stdout, stderr) = subproc.communicate()
693 results.append((subproc.returncode, stdout, stderr))
694 os.chdir(cwd)
695 try:
696 shutil.rmtree(tempdir, True)
697 except Exception:
698 pass
700 for (returncode, stdout, stderr) in results:
701 if returncode:
702 return (returncode, stdout, stderr)
703 return results[0]
706 # The arguments official_builds, status, stdout and stderr are unused.
707 # They are present here because this function is passed to Bisect which then
708 # calls it with 5 arguments.
709 # pylint: disable=W0613
710 def AskIsGoodBuild(rev, official_builds, status, stdout, stderr):
711 """Asks the user whether build |rev| is good or bad."""
712 # Loop until we get a response that we can parse.
713 while True:
714 response = raw_input('Revision %s is '
715 '[(g)ood/(b)ad/(r)etry/(u)nknown/(q)uit]: ' %
716 str(rev))
717 if response and response in ('g', 'b', 'r', 'u'):
718 return response
719 if response and response == 'q':
720 raise SystemExit()
723 def IsGoodASANBuild(rev, official_builds, status, stdout, stderr):
724 """Determine if an ASAN build |rev| is good or bad
726 Will examine stderr looking for the error message emitted by ASAN. If not
727 found then will fallback to asking the user."""
728 if stderr:
729 bad_count = 0
730 for line in stderr.splitlines():
731 print line
732 if line.find('ERROR: AddressSanitizer:') != -1:
733 bad_count += 1
734 if bad_count > 0:
735 print 'Revision %d determined to be bad.' % rev
736 return 'b'
737 return AskIsGoodBuild(rev, official_builds, status, stdout, stderr)
739 class DownloadJob(object):
740 """DownloadJob represents a task to download a given Chromium revision."""
742 def __init__(self, context, name, rev, zip_file):
743 super(DownloadJob, self).__init__()
744 # Store off the input parameters.
745 self.context = context
746 self.name = name
747 self.rev = rev
748 self.zip_file = zip_file
749 self.quit_event = threading.Event()
750 self.progress_event = threading.Event()
751 self.thread = None
753 def Start(self):
754 """Starts the download."""
755 fetchargs = (self.context,
756 self.rev,
757 self.zip_file,
758 self.quit_event,
759 self.progress_event)
760 self.thread = threading.Thread(target=FetchRevision,
761 name=self.name,
762 args=fetchargs)
763 self.thread.start()
765 def Stop(self):
766 """Stops the download which must have been started previously."""
767 assert self.thread, 'DownloadJob must be started before Stop is called.'
768 self.quit_event.set()
769 self.thread.join()
770 os.unlink(self.zip_file)
772 def WaitFor(self):
773 """Prints a message and waits for the download to complete. The download
774 must have been started previously."""
775 assert self.thread, 'DownloadJob must be started before WaitFor is called.'
776 print 'Downloading revision %s...' % str(self.rev)
777 self.progress_event.set() # Display progress of download.
778 self.thread.join()
781 def Bisect(context,
782 num_runs=1,
783 command='%p %a',
784 try_args=(),
785 profile=None,
786 interactive=True,
787 evaluate=AskIsGoodBuild):
788 """Given known good and known bad revisions, run a binary search on all
789 archived revisions to determine the last known good revision.
791 @param context PathContext object initialized with user provided parameters.
792 @param num_runs Number of times to run each build for asking good/bad.
793 @param try_args A tuple of arguments to pass to the test application.
794 @param profile The name of the user profile to run with.
795 @param interactive If it is false, use command exit code for good or bad
796 judgment of the argument build.
797 @param evaluate A function which returns 'g' if the argument build is good,
798 'b' if it's bad or 'u' if unknown.
800 Threading is used to fetch Chromium revisions in the background, speeding up
801 the user's experience. For example, suppose the bounds of the search are
802 good_rev=0, bad_rev=100. The first revision to be checked is 50. Depending on
803 whether revision 50 is good or bad, the next revision to check will be either
804 25 or 75. So, while revision 50 is being checked, the script will download
805 revisions 25 and 75 in the background. Once the good/bad verdict on rev 50 is
806 known:
808 - If rev 50 is good, the download of rev 25 is cancelled, and the next test
809 is run on rev 75.
811 - If rev 50 is bad, the download of rev 75 is cancelled, and the next test
812 is run on rev 25.
815 if not profile:
816 profile = 'profile'
818 good_rev = context.good_revision
819 bad_rev = context.bad_revision
820 cwd = os.getcwd()
822 print 'Downloading list of known revisions...',
823 if not context.use_local_cache and not context.is_official:
824 print '(use --use-local-cache to cache and re-use the list of revisions)'
825 else:
826 print
827 _GetDownloadPath = lambda rev: os.path.join(cwd,
828 '%s-%s' % (str(rev), context.archive_name))
829 if context.is_official:
830 revlist = context.GetOfficialBuildsList()
831 else:
832 revlist = context.GetRevList()
834 # Get a list of revisions to bisect across.
835 if len(revlist) < 2: # Don't have enough builds to bisect.
836 msg = 'We don\'t have enough builds to bisect. revlist: %s' % revlist
837 raise RuntimeError(msg)
839 # Figure out our bookends and first pivot point; fetch the pivot revision.
840 minrev = 0
841 maxrev = len(revlist) - 1
842 pivot = maxrev / 2
843 rev = revlist[pivot]
844 zip_file = _GetDownloadPath(rev)
845 fetch = DownloadJob(context, 'initial_fetch', rev, zip_file)
846 fetch.Start()
847 fetch.WaitFor()
849 # Binary search time!
850 while fetch and fetch.zip_file and maxrev - minrev > 1:
851 if bad_rev < good_rev:
852 min_str, max_str = 'bad', 'good'
853 else:
854 min_str, max_str = 'good', 'bad'
855 print 'Bisecting range [%s (%s), %s (%s)].' % (revlist[minrev], min_str,
856 revlist[maxrev], max_str)
858 # Pre-fetch next two possible pivots
859 # - down_pivot is the next revision to check if the current revision turns
860 # out to be bad.
861 # - up_pivot is the next revision to check if the current revision turns
862 # out to be good.
863 down_pivot = int((pivot - minrev) / 2) + minrev
864 down_fetch = None
865 if down_pivot != pivot and down_pivot != minrev:
866 down_rev = revlist[down_pivot]
867 down_fetch = DownloadJob(context, 'down_fetch', down_rev,
868 _GetDownloadPath(down_rev))
869 down_fetch.Start()
871 up_pivot = int((maxrev - pivot) / 2) + pivot
872 up_fetch = None
873 if up_pivot != pivot and up_pivot != maxrev:
874 up_rev = revlist[up_pivot]
875 up_fetch = DownloadJob(context, 'up_fetch', up_rev,
876 _GetDownloadPath(up_rev))
877 up_fetch.Start()
879 # Run test on the pivot revision.
880 status = None
881 stdout = None
882 stderr = None
883 try:
884 (status, stdout, stderr) = RunRevision(context,
885 rev,
886 fetch.zip_file,
887 profile,
888 num_runs,
889 command,
890 try_args)
891 except Exception, e:
892 print >> sys.stderr, e
894 # Call the evaluate function to see if the current revision is good or bad.
895 # On that basis, kill one of the background downloads and complete the
896 # other, as described in the comments above.
897 try:
898 if not interactive:
899 if status:
900 answer = 'b'
901 print 'Bad revision: %s' % rev
902 else:
903 answer = 'g'
904 print 'Good revision: %s' % rev
905 else:
906 answer = evaluate(rev, context.is_official, status, stdout, stderr)
907 if ((answer == 'g' and good_rev < bad_rev)
908 or (answer == 'b' and bad_rev < good_rev)):
909 fetch.Stop()
910 minrev = pivot
911 if down_fetch:
912 down_fetch.Stop() # Kill the download of the older revision.
913 fetch = None
914 if up_fetch:
915 up_fetch.WaitFor()
916 pivot = up_pivot
917 fetch = up_fetch
918 elif ((answer == 'b' and good_rev < bad_rev)
919 or (answer == 'g' and bad_rev < good_rev)):
920 fetch.Stop()
921 maxrev = pivot
922 if up_fetch:
923 up_fetch.Stop() # Kill the download of the newer revision.
924 fetch = None
925 if down_fetch:
926 down_fetch.WaitFor()
927 pivot = down_pivot
928 fetch = down_fetch
929 elif answer == 'r':
930 pass # Retry requires no changes.
931 elif answer == 'u':
932 # Nuke the revision from the revlist and choose a new pivot.
933 fetch.Stop()
934 revlist.pop(pivot)
935 maxrev -= 1 # Assumes maxrev >= pivot.
937 if maxrev - minrev > 1:
938 # Alternate between using down_pivot or up_pivot for the new pivot
939 # point, without affecting the range. Do this instead of setting the
940 # pivot to the midpoint of the new range because adjacent revisions
941 # are likely affected by the same issue that caused the (u)nknown
942 # response.
943 if up_fetch and down_fetch:
944 fetch = [up_fetch, down_fetch][len(revlist) % 2]
945 elif up_fetch:
946 fetch = up_fetch
947 else:
948 fetch = down_fetch
949 fetch.WaitFor()
950 if fetch == up_fetch:
951 pivot = up_pivot - 1 # Subtracts 1 because revlist was resized.
952 else:
953 pivot = down_pivot
954 zip_file = fetch.zip_file
956 if down_fetch and fetch != down_fetch:
957 down_fetch.Stop()
958 if up_fetch and fetch != up_fetch:
959 up_fetch.Stop()
960 else:
961 assert False, 'Unexpected return value from evaluate(): ' + answer
962 except SystemExit:
963 print 'Cleaning up...'
964 for f in [_GetDownloadPath(revlist[down_pivot]),
965 _GetDownloadPath(revlist[up_pivot])]:
966 try:
967 os.unlink(f)
968 except OSError:
969 pass
970 sys.exit(0)
972 rev = revlist[pivot]
974 return (revlist[minrev], revlist[maxrev], context)
977 def GetBlinkDEPSRevisionForChromiumRevision(self, rev):
978 """Returns the blink revision that was in REVISIONS file at
979 chromium revision |rev|."""
981 def _GetBlinkRev(url, blink_re):
982 m = blink_re.search(url.read())
983 url.close()
984 if m:
985 return m.group(1)
987 url = urllib.urlopen(DEPS_FILE_OLD % rev)
988 if url.getcode() == 200:
989 # . doesn't match newlines without re.DOTALL, so this is safe.
990 blink_re = re.compile(r'webkit_revision\D*(\d+)')
991 return int(_GetBlinkRev(url, blink_re))
992 else:
993 url = urllib.urlopen(DEPS_FILE_NEW % GetGitHashFromSVNRevision(rev))
994 if url.getcode() == 200:
995 blink_re = re.compile(r'webkit_revision\D*\d+;\D*\d+;(\w+)')
996 blink_git_sha = _GetBlinkRev(url, blink_re)
997 return self.GetSVNRevisionFromGitHash(blink_git_sha, 'blink')
998 raise Exception('Could not get Blink revision for Chromium rev %d' % rev)
1001 def GetBlinkRevisionForChromiumRevision(context, rev):
1002 """Returns the blink revision that was in REVISIONS file at
1003 chromium revision |rev|."""
1004 def _IsRevisionNumber(revision):
1005 if isinstance(revision, int):
1006 return True
1007 else:
1008 return revision.isdigit()
1009 if str(rev) in context.githash_svn_dict:
1010 rev = context.githash_svn_dict[str(rev)]
1011 file_url = '%s/%s%s/REVISIONS' % (context.base_url,
1012 context._listing_platform_dir, rev)
1013 url = urllib.urlopen(file_url)
1014 if url.getcode() == 200:
1015 try:
1016 data = json.loads(url.read())
1017 except ValueError:
1018 print 'ValueError for JSON URL: %s' % file_url
1019 raise ValueError
1020 else:
1021 raise ValueError
1022 url.close()
1023 if 'webkit_revision' in data:
1024 blink_rev = data['webkit_revision']
1025 if not _IsRevisionNumber(blink_rev):
1026 blink_rev = int(context.GetSVNRevisionFromGitHash(blink_rev, 'blink'))
1027 return blink_rev
1028 else:
1029 raise Exception('Could not get blink revision for cr rev %d' % rev)
1032 def FixChromiumRevForBlink(revisions_final, revisions, self, rev):
1033 """Returns the chromium revision that has the correct blink revision
1034 for blink bisect, DEPS and REVISIONS file might not match since
1035 blink snapshots point to tip of tree blink.
1036 Note: The revisions_final variable might get modified to include
1037 additional revisions."""
1038 blink_deps_rev = GetBlinkDEPSRevisionForChromiumRevision(self, rev)
1040 while (GetBlinkRevisionForChromiumRevision(self, rev) > blink_deps_rev):
1041 idx = revisions.index(rev)
1042 if idx > 0:
1043 rev = revisions[idx-1]
1044 if rev not in revisions_final:
1045 revisions_final.insert(0, rev)
1047 revisions_final.sort()
1048 return rev
1051 def GetChromiumRevision(context, url):
1052 """Returns the chromium revision read from given URL."""
1053 try:
1054 # Location of the latest build revision number
1055 latest_revision = urllib.urlopen(url).read()
1056 if latest_revision.isdigit():
1057 return int(latest_revision)
1058 return context.GetSVNRevisionFromGitHash(latest_revision)
1059 except Exception:
1060 print 'Could not determine latest revision. This could be bad...'
1061 return 999999999
1063 def GetGitHashFromSVNRevision(svn_revision):
1064 crrev_url = CRREV_URL + str(svn_revision)
1065 url = urllib.urlopen(crrev_url)
1066 if url.getcode() == 200:
1067 data = json.loads(url.read())
1068 if 'git_sha' in data:
1069 return data['git_sha']
1071 def PrintChangeLog(min_chromium_rev, max_chromium_rev):
1072 """Prints the changelog URL."""
1074 print (' ' + CHANGELOG_URL % (GetGitHashFromSVNRevision(min_chromium_rev),
1075 GetGitHashFromSVNRevision(max_chromium_rev)))
1078 def main():
1079 usage = ('%prog [options] [-- chromium-options]\n'
1080 'Perform binary search on the snapshot builds to find a minimal\n'
1081 'range of revisions where a behavior change happened. The\n'
1082 'behaviors are described as "good" and "bad".\n'
1083 'It is NOT assumed that the behavior of the later revision is\n'
1084 'the bad one.\n'
1085 '\n'
1086 'Revision numbers should use\n'
1087 ' Official versions (e.g. 1.0.1000.0) for official builds. (-o)\n'
1088 ' SVN revisions (e.g. 123456) for chromium builds, from trunk.\n'
1089 ' Use base_trunk_revision from http://omahaproxy.appspot.com/\n'
1090 ' for earlier revs.\n'
1091 ' Chrome\'s about: build number and omahaproxy branch_revision\n'
1092 ' are incorrect, they are from branches.\n'
1093 '\n'
1094 'Tip: add "-- --no-first-run" to bypass the first run prompts.')
1095 parser = optparse.OptionParser(usage=usage)
1096 # Strangely, the default help output doesn't include the choice list.
1097 choices = ['mac', 'mac64', 'win', 'win64', 'linux', 'linux64', 'linux-arm',
1098 'chromeos']
1099 parser.add_option('-a', '--archive',
1100 choices=choices,
1101 help='The buildbot archive to bisect [%s].' %
1102 '|'.join(choices))
1103 parser.add_option('-o',
1104 action='store_true',
1105 dest='official_builds',
1106 help='Bisect across official Chrome builds (internal '
1107 'only) instead of Chromium archives.')
1108 parser.add_option('-b', '--bad',
1109 type='str',
1110 help='A bad revision to start bisection. '
1111 'May be earlier or later than the good revision. '
1112 'Default is HEAD.')
1113 parser.add_option('-f', '--flash_path',
1114 type='str',
1115 help='Absolute path to a recent Adobe Pepper Flash '
1116 'binary to be used in this bisection (e.g. '
1117 'on Windows C:\...\pepflashplayer.dll and on Linux '
1118 '/opt/google/chrome/PepperFlash/'
1119 'libpepflashplayer.so).')
1120 parser.add_option('-d', '--pdf_path',
1121 type='str',
1122 help='Absolute path to a recent PDF plugin '
1123 'binary to be used in this bisection (e.g. '
1124 'on Windows C:\...\pdf.dll and on Linux '
1125 '/opt/google/chrome/libpdf.so). Option also enables '
1126 'print preview.')
1127 parser.add_option('-g', '--good',
1128 type='str',
1129 help='A good revision to start bisection. ' +
1130 'May be earlier or later than the bad revision. ' +
1131 'Default is 0.')
1132 parser.add_option('-p', '--profile', '--user-data-dir',
1133 type='str',
1134 default='profile',
1135 help='Profile to use; this will not reset every run. '
1136 'Defaults to a clean profile.')
1137 parser.add_option('-t', '--times',
1138 type='int',
1139 default=1,
1140 help='Number of times to run each build before asking '
1141 'if it\'s good or bad. Temporary profiles are reused.')
1142 parser.add_option('-c', '--command',
1143 type='str',
1144 default='%p %a',
1145 help='Command to execute. %p and %a refer to Chrome '
1146 'executable and specified extra arguments '
1147 'respectively. Use %s to specify all extra arguments '
1148 'as one string. Defaults to "%p %a". Note that any '
1149 'extra paths specified should be absolute.')
1150 parser.add_option('-l', '--blink',
1151 action='store_true',
1152 help='Use Blink bisect instead of Chromium. ')
1153 parser.add_option('', '--not-interactive',
1154 action='store_true',
1155 default=False,
1156 help='Use command exit code to tell good/bad revision.')
1157 parser.add_option('--asan',
1158 dest='asan',
1159 action='store_true',
1160 default=False,
1161 help='Allow the script to bisect ASAN builds')
1162 parser.add_option('--use-local-cache',
1163 dest='use_local_cache',
1164 action='store_true',
1165 default=False,
1166 help='Use a local file in the current directory to cache '
1167 'a list of known revisions to speed up the '
1168 'initialization of this script.')
1170 (opts, args) = parser.parse_args()
1172 if opts.archive is None:
1173 print 'Error: missing required parameter: --archive'
1174 print
1175 parser.print_help()
1176 return 1
1178 if opts.asan:
1179 supported_platforms = ['linux', 'mac', 'win']
1180 if opts.archive not in supported_platforms:
1181 print 'Error: ASAN bisecting only supported on these platforms: [%s].' % (
1182 '|'.join(supported_platforms))
1183 return 1
1184 if opts.official_builds:
1185 print 'Error: Do not yet support bisecting official ASAN builds.'
1186 return 1
1188 if opts.asan:
1189 base_url = ASAN_BASE_URL
1190 elif opts.blink:
1191 base_url = WEBKIT_BASE_URL
1192 else:
1193 base_url = CHROMIUM_BASE_URL
1195 # Create the context. Initialize 0 for the revisions as they are set below.
1196 context = PathContext(base_url, opts.archive, opts.good, opts.bad,
1197 opts.official_builds, opts.asan, opts.use_local_cache,
1198 opts.flash_path, opts.pdf_path)
1200 # Pick a starting point, try to get HEAD for this.
1201 if not opts.bad:
1202 context.bad_revision = '999.0.0.0'
1203 context.bad_revision = GetChromiumRevision(
1204 context, context.GetLastChangeURL())
1206 # Find out when we were good.
1207 if not opts.good:
1208 context.good_revision = '0.0.0.0' if opts.official_builds else 0
1210 if opts.flash_path:
1211 msg = 'Could not find Flash binary at %s' % opts.flash_path
1212 assert os.path.exists(opts.flash_path), msg
1214 if opts.pdf_path:
1215 msg = 'Could not find PDF binary at %s' % opts.pdf_path
1216 assert os.path.exists(opts.pdf_path), msg
1218 if opts.official_builds:
1219 context.good_revision = LooseVersion(context.good_revision)
1220 context.bad_revision = LooseVersion(context.bad_revision)
1221 else:
1222 context.good_revision = int(context.good_revision)
1223 context.bad_revision = int(context.bad_revision)
1225 if opts.times < 1:
1226 print('Number of times to run (%d) must be greater than or equal to 1.' %
1227 opts.times)
1228 parser.print_help()
1229 return 1
1231 if opts.asan:
1232 evaluator = IsGoodASANBuild
1233 else:
1234 evaluator = AskIsGoodBuild
1236 # Save these revision numbers to compare when showing the changelog URL
1237 # after the bisect.
1238 good_rev = context.good_revision
1239 bad_rev = context.bad_revision
1241 (min_chromium_rev, max_chromium_rev, context) = Bisect(
1242 context, opts.times, opts.command, args, opts.profile,
1243 not opts.not_interactive, evaluator)
1245 # Get corresponding blink revisions.
1246 try:
1247 min_blink_rev = GetBlinkRevisionForChromiumRevision(context,
1248 min_chromium_rev)
1249 max_blink_rev = GetBlinkRevisionForChromiumRevision(context,
1250 max_chromium_rev)
1251 except Exception:
1252 # Silently ignore the failure.
1253 min_blink_rev, max_blink_rev = 0, 0
1255 if opts.blink:
1256 # We're done. Let the user know the results in an official manner.
1257 if good_rev > bad_rev:
1258 print DONE_MESSAGE_GOOD_MAX % (str(min_blink_rev), str(max_blink_rev))
1259 else:
1260 print DONE_MESSAGE_GOOD_MIN % (str(min_blink_rev), str(max_blink_rev))
1262 print 'BLINK CHANGELOG URL:'
1263 print ' ' + BLINK_CHANGELOG_URL % (max_blink_rev, min_blink_rev)
1265 else:
1266 # We're done. Let the user know the results in an official manner.
1267 if good_rev > bad_rev:
1268 print DONE_MESSAGE_GOOD_MAX % (str(min_chromium_rev),
1269 str(max_chromium_rev))
1270 else:
1271 print DONE_MESSAGE_GOOD_MIN % (str(min_chromium_rev),
1272 str(max_chromium_rev))
1273 if min_blink_rev != max_blink_rev:
1274 print ('NOTE: There is a Blink roll in the range, '
1275 'you might also want to do a Blink bisect.')
1277 print 'CHANGELOG URL:'
1278 if opts.official_builds:
1279 print OFFICIAL_CHANGELOG_URL % (min_chromium_rev, max_chromium_rev)
1280 else:
1281 PrintChangeLog(min_chromium_rev, max_chromium_rev)
1284 if __name__ == '__main__':
1285 sys.exit(main())