Handle ListAccount fetches and watching the GAIA cookies from within the GaiaCookieMa...
[chromium-blink-merge.git] / tools / bisect-builds.py
blobc2105b8b6ddc75d1ac7f5a53a78dfa652dd50ab4
1 #!/usr/bin/env python
2 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 """Snapshot Build Bisect Tool
8 This script bisects a snapshot archive using binary search. It starts at
9 a bad revision (it will try to guess HEAD) and asks for a last known-good
10 revision. It will then binary search across this revision range by downloading,
11 unzipping, and opening Chromium for you. After testing the specific revision,
12 it will ask you whether it is good or bad before continuing the search.
13 """
15 # The base URL for stored build archives.
16 CHROMIUM_BASE_URL = ('http://commondatastorage.googleapis.com'
17 '/chromium-browser-snapshots')
18 WEBKIT_BASE_URL = ('http://commondatastorage.googleapis.com'
19 '/chromium-webkit-snapshots')
20 ASAN_BASE_URL = ('http://commondatastorage.googleapis.com'
21 '/chromium-browser-asan')
23 # GS bucket name.
24 GS_BUCKET_NAME = 'chrome-unsigned/desktop-W15K3Y'
26 # Base URL for downloading official builds.
27 GOOGLE_APIS_URL = 'commondatastorage.googleapis.com'
29 # The base URL for official builds.
30 OFFICIAL_BASE_URL = 'http://%s/%s' % (GOOGLE_APIS_URL, GS_BUCKET_NAME)
32 # URL template for viewing changelogs between revisions.
33 CHANGELOG_URL = ('https://chromium.googlesource.com/chromium/src/+log/%s..%s')
35 # URL to convert SVN revision to git hash.
36 CRREV_URL = ('https://cr-rev.appspot.com/_ah/api/crrev/v1/redirect/')
38 # URL template for viewing changelogs between official versions.
39 OFFICIAL_CHANGELOG_URL = ('https://chromium.googlesource.com/chromium/'
40 'src/+log/%s..%s?pretty=full')
42 # DEPS file URL.
43 DEPS_FILE_OLD = ('http://src.chromium.org/viewvc/chrome/trunk/src/'
44 'DEPS?revision=%d')
45 DEPS_FILE_NEW = ('https://chromium.googlesource.com/chromium/src/+/%s/DEPS')
47 # Blink changelogs URL.
48 BLINK_CHANGELOG_URL = ('http://build.chromium.org'
49 '/f/chromium/perf/dashboard/ui/changelog_blink.html'
50 '?url=/trunk&range=%d%%3A%d')
52 DONE_MESSAGE_GOOD_MIN = ('You are probably looking for a change made after %s ('
53 'known good), but no later than %s (first known bad).')
54 DONE_MESSAGE_GOOD_MAX = ('You are probably looking for a change made after %s ('
55 'known bad), but no later than %s (first known good).')
57 CHROMIUM_GITHASH_TO_SVN_URL = (
58 'https://chromium.googlesource.com/chromium/src/+/%s?format=json')
60 BLINK_GITHASH_TO_SVN_URL = (
61 'https://chromium.googlesource.com/chromium/blink/+/%s?format=json')
63 GITHASH_TO_SVN_URL = {
64 'chromium': CHROMIUM_GITHASH_TO_SVN_URL,
65 'blink': BLINK_GITHASH_TO_SVN_URL,
68 # Search pattern to be matched in the JSON output from
69 # CHROMIUM_GITHASH_TO_SVN_URL to get the chromium revision (svn revision).
70 CHROMIUM_SEARCH_PATTERN_OLD = (
71 r'.*git-svn-id: svn://svn.chromium.org/chrome/trunk/src@(\d+) ')
72 CHROMIUM_SEARCH_PATTERN = (
73 r'Cr-Commit-Position: refs/heads/master@{#(\d+)}')
75 # Search pattern to be matched in the json output from
76 # BLINK_GITHASH_TO_SVN_URL to get the blink revision (svn revision).
77 BLINK_SEARCH_PATTERN = (
78 r'.*git-svn-id: svn://svn.chromium.org/blink/trunk@(\d+) ')
80 SEARCH_PATTERN = {
81 'chromium': CHROMIUM_SEARCH_PATTERN,
82 'blink': BLINK_SEARCH_PATTERN,
85 CREDENTIAL_ERROR_MESSAGE = ('You are attempting to access protected data with '
86 'no configured credentials')
88 ###############################################################################
90 import httplib
91 import json
92 import optparse
93 import os
94 import re
95 import shlex
96 import shutil
97 import subprocess
98 import sys
99 import tempfile
100 import threading
101 import urllib
102 from distutils.version import LooseVersion
103 from xml.etree import ElementTree
104 import zipfile
107 class PathContext(object):
108 """A PathContext is used to carry the information used to construct URLs and
109 paths when dealing with the storage server and archives."""
110 def __init__(self, base_url, platform, good_revision, bad_revision,
111 is_official, is_asan, use_local_cache, flash_path = None,
112 pdf_path = None):
113 super(PathContext, self).__init__()
114 # Store off the input parameters.
115 self.base_url = base_url
116 self.platform = platform # What's passed in to the '-a/--archive' option.
117 self.good_revision = good_revision
118 self.bad_revision = bad_revision
119 self.is_official = is_official
120 self.is_asan = is_asan
121 self.build_type = 'release'
122 self.flash_path = flash_path
123 # Dictionary which stores svn revision number as key and it's
124 # corresponding git hash as value. This data is populated in
125 # _FetchAndParse and used later in GetDownloadURL while downloading
126 # the build.
127 self.githash_svn_dict = {}
128 self.pdf_path = pdf_path
129 # The name of the ZIP file in a revision directory on the server.
130 self.archive_name = None
132 # Whether to cache and use the list of known revisions in a local file to
133 # speed up the initialization of the script at the next run.
134 self.use_local_cache = use_local_cache
136 # Locate the local checkout to speed up the script by using locally stored
137 # metadata.
138 abs_file_path = os.path.abspath(os.path.realpath(__file__))
139 local_src_path = os.path.join(os.path.dirname(abs_file_path), '..')
140 if abs_file_path.endswith(os.path.join('tools', 'bisect-builds.py')) and\
141 os.path.exists(os.path.join(local_src_path, '.git')):
142 self.local_src_path = os.path.normpath(local_src_path)
143 else:
144 self.local_src_path = None
146 # Set some internal members:
147 # _listing_platform_dir = Directory that holds revisions. Ends with a '/'.
148 # _archive_extract_dir = Uncompressed directory in the archive_name file.
149 # _binary_name = The name of the executable to run.
150 if self.platform in ('linux', 'linux64', 'linux-arm', 'chromeos'):
151 self._binary_name = 'chrome'
152 elif self.platform in ('mac', 'mac64'):
153 self.archive_name = 'chrome-mac.zip'
154 self._archive_extract_dir = 'chrome-mac'
155 elif self.platform in ('win', 'win64'):
156 self.archive_name = 'chrome-win32.zip'
157 self._archive_extract_dir = 'chrome-win32'
158 self._binary_name = 'chrome.exe'
159 else:
160 raise Exception('Invalid platform: %s' % self.platform)
162 if is_official:
163 if self.platform == 'linux':
164 self._listing_platform_dir = 'precise32/'
165 self.archive_name = 'chrome-precise32.zip'
166 self._archive_extract_dir = 'chrome-precise32'
167 elif self.platform == 'linux64':
168 self._listing_platform_dir = 'precise64/'
169 self.archive_name = 'chrome-precise64.zip'
170 self._archive_extract_dir = 'chrome-precise64'
171 elif self.platform == 'mac':
172 self._listing_platform_dir = 'mac/'
173 self._binary_name = 'Google Chrome.app/Contents/MacOS/Google Chrome'
174 elif self.platform == 'mac64':
175 self._listing_platform_dir = 'mac64/'
176 self._binary_name = 'Google Chrome.app/Contents/MacOS/Google Chrome'
177 elif self.platform == 'win':
178 self._listing_platform_dir = 'win/'
179 self.archive_name = 'chrome-win.zip'
180 self._archive_extract_dir = 'chrome-win'
181 elif self.platform == 'win64':
182 self._listing_platform_dir = 'win64/'
183 self.archive_name = 'chrome-win64.zip'
184 self._archive_extract_dir = 'chrome-win64'
185 else:
186 if self.platform in ('linux', 'linux64', 'linux-arm', 'chromeos'):
187 self.archive_name = 'chrome-linux.zip'
188 self._archive_extract_dir = 'chrome-linux'
189 if self.platform == 'linux':
190 self._listing_platform_dir = 'Linux/'
191 elif self.platform == 'linux64':
192 self._listing_platform_dir = 'Linux_x64/'
193 elif self.platform == 'linux-arm':
194 self._listing_platform_dir = 'Linux_ARM_Cross-Compile/'
195 elif self.platform == 'chromeos':
196 self._listing_platform_dir = 'Linux_ChromiumOS_Full/'
197 elif self.platform == 'mac':
198 self._listing_platform_dir = 'Mac/'
199 self._binary_name = 'Chromium.app/Contents/MacOS/Chromium'
200 elif self.platform == 'win':
201 self._listing_platform_dir = 'Win/'
203 def GetASANPlatformDir(self):
204 """ASAN builds are in directories like "linux-release", or have filenames
205 like "asan-win32-release-277079.zip". This aligns to our platform names
206 except in the case of Windows where they use "win32" instead of "win"."""
207 if self.platform == 'win':
208 return 'win32'
209 else:
210 return self.platform
212 def GetListingURL(self, marker=None):
213 """Returns the URL for a directory listing, with an optional marker."""
214 marker_param = ''
215 if marker:
216 marker_param = '&marker=' + str(marker)
217 if self.is_asan:
218 prefix = '%s-%s' % (self.GetASANPlatformDir(), self.build_type)
219 return self.base_url + '/?delimiter=&prefix=' + prefix + marker_param
220 else:
221 return (self.base_url + '/?delimiter=/&prefix=' +
222 self._listing_platform_dir + marker_param)
224 def GetDownloadURL(self, revision):
225 """Gets the download URL for a build archive of a specific revision."""
226 if self.is_asan:
227 return '%s/%s-%s/%s-%d.zip' % (
228 ASAN_BASE_URL, self.GetASANPlatformDir(), self.build_type,
229 self.GetASANBaseName(), revision)
230 if self.is_official:
231 return '%s/%s/%s%s' % (
232 OFFICIAL_BASE_URL, revision, self._listing_platform_dir,
233 self.archive_name)
234 else:
235 if str(revision) in self.githash_svn_dict:
236 revision = self.githash_svn_dict[str(revision)]
237 return '%s/%s%s/%s' % (self.base_url, self._listing_platform_dir,
238 revision, self.archive_name)
240 def GetLastChangeURL(self):
241 """Returns a URL to the LAST_CHANGE file."""
242 return self.base_url + '/' + self._listing_platform_dir + 'LAST_CHANGE'
244 def GetASANBaseName(self):
245 """Returns the base name of the ASAN zip file."""
246 if 'linux' in self.platform:
247 return 'asan-symbolized-%s-%s' % (self.GetASANPlatformDir(),
248 self.build_type)
249 else:
250 return 'asan-%s-%s' % (self.GetASANPlatformDir(), self.build_type)
252 def GetLaunchPath(self, revision):
253 """Returns a relative path (presumably from the archive extraction location)
254 that is used to run the executable."""
255 if self.is_asan:
256 extract_dir = '%s-%d' % (self.GetASANBaseName(), revision)
257 else:
258 extract_dir = self._archive_extract_dir
259 return os.path.join(extract_dir, self._binary_name)
261 def ParseDirectoryIndex(self, last_known_rev):
262 """Parses the Google Storage directory listing into a list of revision
263 numbers."""
265 def _GetMarkerForRev(revision):
266 if self.is_asan:
267 return '%s-%s/%s-%d.zip' % (
268 self.GetASANPlatformDir(), self.build_type,
269 self.GetASANBaseName(), revision)
270 return '%s%d' % (self._listing_platform_dir, revision)
272 def _FetchAndParse(url):
273 """Fetches a URL and returns a 2-Tuple of ([revisions], next-marker). If
274 next-marker is not None, then the listing is a partial listing and another
275 fetch should be performed with next-marker being the marker= GET
276 parameter."""
277 handle = urllib.urlopen(url)
278 document = ElementTree.parse(handle)
280 # All nodes in the tree are namespaced. Get the root's tag name to extract
281 # the namespace. Etree does namespaces as |{namespace}tag|.
282 root_tag = document.getroot().tag
283 end_ns_pos = root_tag.find('}')
284 if end_ns_pos == -1:
285 raise Exception('Could not locate end namespace for directory index')
286 namespace = root_tag[:end_ns_pos + 1]
288 # Find the prefix (_listing_platform_dir) and whether or not the list is
289 # truncated.
290 prefix_len = len(document.find(namespace + 'Prefix').text)
291 next_marker = None
292 is_truncated = document.find(namespace + 'IsTruncated')
293 if is_truncated is not None and is_truncated.text.lower() == 'true':
294 next_marker = document.find(namespace + 'NextMarker').text
295 # Get a list of all the revisions.
296 revisions = []
297 githash_svn_dict = {}
298 if self.is_asan:
299 asan_regex = re.compile(r'.*%s-(\d+)\.zip$' % (self.GetASANBaseName()))
300 # Non ASAN builds are in a <revision> directory. The ASAN builds are
301 # flat
302 all_prefixes = document.findall(namespace + 'Contents/' +
303 namespace + 'Key')
304 for prefix in all_prefixes:
305 m = asan_regex.match(prefix.text)
306 if m:
307 try:
308 revisions.append(int(m.group(1)))
309 except ValueError:
310 pass
311 else:
312 all_prefixes = document.findall(namespace + 'CommonPrefixes/' +
313 namespace + 'Prefix')
314 # The <Prefix> nodes have content of the form of
315 # |_listing_platform_dir/revision/|. Strip off the platform dir and the
316 # trailing slash to just have a number.
317 for prefix in all_prefixes:
318 revnum = prefix.text[prefix_len:-1]
319 try:
320 if not revnum.isdigit():
321 # During the svn-git migration, some items were stored by hash.
322 # These items may appear anywhere in the list of items.
323 # If |last_known_rev| is set, assume that the full list has been
324 # retrieved before (including the hashes), so we can safely skip
325 # all git hashes and focus on the numeric revision numbers.
326 if last_known_rev:
327 revnum = None
328 else:
329 git_hash = revnum
330 revnum = self.GetSVNRevisionFromGitHash(git_hash)
331 githash_svn_dict[revnum] = git_hash
332 if revnum is not None:
333 revnum = int(revnum)
334 revisions.append(revnum)
335 except ValueError:
336 pass
337 return (revisions, next_marker, githash_svn_dict)
339 # Fetch the first list of revisions.
340 if last_known_rev:
341 revisions = []
342 # Optimization: Start paging at the last known revision (local cache).
343 next_marker = _GetMarkerForRev(last_known_rev)
344 # Optimization: Stop paging at the last known revision (remote).
345 last_change_rev = GetChromiumRevision(self, self.GetLastChangeURL())
346 if last_known_rev == last_change_rev:
347 return []
348 else:
349 (revisions, next_marker, new_dict) = _FetchAndParse(self.GetListingURL())
350 self.githash_svn_dict.update(new_dict)
351 last_change_rev = None
353 # If the result list was truncated, refetch with the next marker. Do this
354 # until an entire directory listing is done.
355 while next_marker:
356 sys.stdout.write('\rFetching revisions at marker %s' % next_marker)
357 sys.stdout.flush()
359 next_url = self.GetListingURL(next_marker)
360 (new_revisions, next_marker, new_dict) = _FetchAndParse(next_url)
361 revisions.extend(new_revisions)
362 self.githash_svn_dict.update(new_dict)
363 if last_change_rev and last_change_rev in new_revisions:
364 break
365 sys.stdout.write('\r')
366 sys.stdout.flush()
367 return revisions
369 def _GetSVNRevisionFromGitHashWithoutGitCheckout(self, git_sha1, depot):
370 json_url = GITHASH_TO_SVN_URL[depot] % git_sha1
371 response = urllib.urlopen(json_url)
372 if response.getcode() == 200:
373 try:
374 data = json.loads(response.read()[4:])
375 except ValueError:
376 print 'ValueError for JSON URL: %s' % json_url
377 raise ValueError
378 else:
379 raise ValueError
380 if 'message' in data:
381 message = data['message'].split('\n')
382 message = [line for line in message if line.strip()]
383 search_pattern = re.compile(SEARCH_PATTERN[depot])
384 result = search_pattern.search(message[len(message)-1])
385 if result:
386 return result.group(1)
387 else:
388 if depot == 'chromium':
389 result = re.search(CHROMIUM_SEARCH_PATTERN_OLD,
390 message[len(message)-1])
391 if result:
392 return result.group(1)
393 print 'Failed to get svn revision number for %s' % git_sha1
394 raise ValueError
396 def _GetSVNRevisionFromGitHashFromGitCheckout(self, git_sha1, depot):
397 def _RunGit(command, path):
398 command = ['git'] + command
399 shell = sys.platform.startswith('win')
400 proc = subprocess.Popen(command, shell=shell, stdout=subprocess.PIPE,
401 stderr=subprocess.PIPE, cwd=path)
402 (output, _) = proc.communicate()
403 return (output, proc.returncode)
405 path = self.local_src_path
406 if depot == 'blink':
407 path = os.path.join(self.local_src_path, 'third_party', 'WebKit')
408 revision = None
409 try:
410 command = ['svn', 'find-rev', git_sha1]
411 (git_output, return_code) = _RunGit(command, path)
412 if not return_code:
413 revision = git_output.strip('\n')
414 except ValueError:
415 pass
416 if not revision:
417 command = ['log', '-n1', '--format=%s', git_sha1]
418 (git_output, return_code) = _RunGit(command, path)
419 if not return_code:
420 revision = re.match('SVN changes up to revision ([0-9]+)', git_output)
421 revision = revision.group(1) if revision else None
422 if revision:
423 return revision
424 raise ValueError
426 def GetSVNRevisionFromGitHash(self, git_sha1, depot='chromium'):
427 if not self.local_src_path:
428 return self._GetSVNRevisionFromGitHashWithoutGitCheckout(git_sha1, depot)
429 else:
430 return self._GetSVNRevisionFromGitHashFromGitCheckout(git_sha1, depot)
432 def GetRevList(self):
433 """Gets the list of revision numbers between self.good_revision and
434 self.bad_revision."""
436 cache = {}
437 # The cache is stored in the same directory as bisect-builds.py
438 cache_filename = os.path.join(
439 os.path.abspath(os.path.dirname(__file__)),
440 '.bisect-builds-cache.json')
441 cache_dict_key = self.GetListingURL()
443 def _LoadBucketFromCache():
444 if self.use_local_cache:
445 try:
446 with open(cache_filename) as cache_file:
447 cache = json.load(cache_file)
448 revisions = cache.get(cache_dict_key, [])
449 githash_svn_dict = cache.get('githash_svn_dict', {})
450 if revisions:
451 print 'Loaded revisions %d-%d from %s' % (revisions[0],
452 revisions[-1], cache_filename)
453 return (revisions, githash_svn_dict)
454 except (EnvironmentError, ValueError):
455 pass
456 return ([], {})
458 def _SaveBucketToCache():
459 """Save the list of revisions and the git-svn mappings to a file.
460 The list of revisions is assumed to be sorted."""
461 if self.use_local_cache:
462 cache[cache_dict_key] = revlist_all
463 cache['githash_svn_dict'] = self.githash_svn_dict
464 try:
465 with open(cache_filename, 'w') as cache_file:
466 json.dump(cache, cache_file)
467 print 'Saved revisions %d-%d to %s' % (
468 revlist_all[0], revlist_all[-1], cache_filename)
469 except EnvironmentError:
470 pass
472 # Download the revlist and filter for just the range between good and bad.
473 minrev = min(self.good_revision, self.bad_revision)
474 maxrev = max(self.good_revision, self.bad_revision)
476 (revlist_all, self.githash_svn_dict) = _LoadBucketFromCache()
477 last_known_rev = revlist_all[-1] if revlist_all else 0
478 if last_known_rev < maxrev:
479 revlist_all.extend(map(int, self.ParseDirectoryIndex(last_known_rev)))
480 revlist_all = list(set(revlist_all))
481 revlist_all.sort()
482 _SaveBucketToCache()
484 revlist = [x for x in revlist_all if x >= int(minrev) and x <= int(maxrev)]
486 # Set good and bad revisions to be legit revisions.
487 if revlist:
488 if self.good_revision < self.bad_revision:
489 self.good_revision = revlist[0]
490 self.bad_revision = revlist[-1]
491 else:
492 self.bad_revision = revlist[0]
493 self.good_revision = revlist[-1]
495 # Fix chromium rev so that the deps blink revision matches REVISIONS file.
496 if self.base_url == WEBKIT_BASE_URL:
497 revlist_all.sort()
498 self.good_revision = FixChromiumRevForBlink(revlist,
499 revlist_all,
500 self,
501 self.good_revision)
502 self.bad_revision = FixChromiumRevForBlink(revlist,
503 revlist_all,
504 self,
505 self.bad_revision)
506 return revlist
508 def GetOfficialBuildsList(self):
509 """Gets the list of official build numbers between self.good_revision and
510 self.bad_revision."""
512 def CheckDepotToolsInPath():
513 delimiter = ';' if sys.platform.startswith('win') else ':'
514 path_list = os.environ['PATH'].split(delimiter)
515 for path in path_list:
516 if path.rstrip(os.path.sep).endswith('depot_tools'):
517 return path
518 return None
520 def RunGsutilCommand(args):
521 gsutil_path = CheckDepotToolsInPath()
522 if gsutil_path is None:
523 print ('Follow the instructions in this document '
524 'http://dev.chromium.org/developers/how-tos/install-depot-tools'
525 ' to install depot_tools and then try again.')
526 sys.exit(1)
527 gsutil_path = os.path.join(gsutil_path, 'third_party', 'gsutil', 'gsutil')
528 gsutil = subprocess.Popen([sys.executable, gsutil_path] + args,
529 stdout=subprocess.PIPE, stderr=subprocess.PIPE,
530 env=None)
531 stdout, stderr = gsutil.communicate()
532 if gsutil.returncode:
533 if (re.findall(r'status[ |=]40[1|3]', stderr) or
534 stderr.startswith(CREDENTIAL_ERROR_MESSAGE)):
535 print ('Follow these steps to configure your credentials and try'
536 ' running the bisect-builds.py again.:\n'
537 ' 1. Run "python %s config" and follow its instructions.\n'
538 ' 2. If you have a @google.com account, use that account.\n'
539 ' 3. For the project-id, just enter 0.' % gsutil_path)
540 sys.exit(1)
541 else:
542 raise Exception('Error running the gsutil command: %s' % stderr)
543 return stdout
545 def GsutilList(bucket):
546 query = 'gs://%s/' % bucket
547 stdout = RunGsutilCommand(['ls', query])
548 return [url[len(query):].strip('/') for url in stdout.splitlines()]
550 # Download the revlist and filter for just the range between good and bad.
551 minrev = min(self.good_revision, self.bad_revision)
552 maxrev = max(self.good_revision, self.bad_revision)
553 build_numbers = GsutilList(GS_BUCKET_NAME)
554 revision_re = re.compile(r'(\d\d\.\d\.\d{4}\.\d+)')
555 build_numbers = filter(lambda b: revision_re.search(b), build_numbers)
556 final_list = []
557 parsed_build_numbers = [LooseVersion(x) for x in build_numbers]
558 connection = httplib.HTTPConnection(GOOGLE_APIS_URL)
559 for build_number in sorted(parsed_build_numbers):
560 if build_number > maxrev:
561 break
562 if build_number < minrev:
563 continue
564 path = ('/' + GS_BUCKET_NAME + '/' + str(build_number) + '/' +
565 self._listing_platform_dir + self.archive_name)
566 connection.request('HEAD', path)
567 response = connection.getresponse()
568 if response.status == 200:
569 final_list.append(str(build_number))
570 response.read()
571 connection.close()
572 return final_list
574 def UnzipFilenameToDir(filename, directory):
575 """Unzip |filename| to |directory|."""
576 cwd = os.getcwd()
577 if not os.path.isabs(filename):
578 filename = os.path.join(cwd, filename)
579 zf = zipfile.ZipFile(filename)
580 # Make base.
581 if not os.path.isdir(directory):
582 os.mkdir(directory)
583 os.chdir(directory)
584 # Extract files.
585 for info in zf.infolist():
586 name = info.filename
587 if name.endswith('/'): # dir
588 if not os.path.isdir(name):
589 os.makedirs(name)
590 else: # file
591 directory = os.path.dirname(name)
592 if not os.path.isdir(directory):
593 os.makedirs(directory)
594 out = open(name, 'wb')
595 out.write(zf.read(name))
596 out.close()
597 # Set permissions. Permission info in external_attr is shifted 16 bits.
598 os.chmod(name, info.external_attr >> 16L)
599 os.chdir(cwd)
602 def FetchRevision(context, rev, filename, quit_event=None, progress_event=None):
603 """Downloads and unzips revision |rev|.
604 @param context A PathContext instance.
605 @param rev The Chromium revision number/tag to download.
606 @param filename The destination for the downloaded file.
607 @param quit_event A threading.Event which will be set by the master thread to
608 indicate that the download should be aborted.
609 @param progress_event A threading.Event which will be set by the master thread
610 to indicate that the progress of the download should be
611 displayed.
613 def ReportHook(blocknum, blocksize, totalsize):
614 if quit_event and quit_event.isSet():
615 raise RuntimeError('Aborting download of revision %s' % str(rev))
616 if progress_event and progress_event.isSet():
617 size = blocknum * blocksize
618 if totalsize == -1: # Total size not known.
619 progress = 'Received %d bytes' % size
620 else:
621 size = min(totalsize, size)
622 progress = 'Received %d of %d bytes, %.2f%%' % (
623 size, totalsize, 100.0 * size / totalsize)
624 # Send a \r to let all progress messages use just one line of output.
625 sys.stdout.write('\r' + progress)
626 sys.stdout.flush()
627 download_url = context.GetDownloadURL(rev)
628 try:
629 urllib.urlretrieve(download_url, filename, ReportHook)
630 if progress_event and progress_event.isSet():
631 print
633 except RuntimeError:
634 pass
637 def RunRevision(context, revision, zip_file, profile, num_runs, command, args):
638 """Given a zipped revision, unzip it and run the test."""
639 print 'Trying revision %s...' % str(revision)
641 # Create a temp directory and unzip the revision into it.
642 cwd = os.getcwd()
643 tempdir = tempfile.mkdtemp(prefix='bisect_tmp')
644 UnzipFilenameToDir(zip_file, tempdir)
646 # Hack: Chrome OS archives are missing icudtl.dat; try to copy it from
647 # the local directory.
648 if context.platform == 'chromeos':
649 icudtl_path = 'third_party/icu/source/data/in/icudtl.dat'
650 if not os.access(icudtl_path, os.F_OK):
651 print 'Couldn\'t find: ' + icudtl_path
652 sys.exit()
653 os.system('cp %s %s/chrome-linux/' % (icudtl_path, tempdir))
655 os.chdir(tempdir)
657 # Run the build as many times as specified.
658 testargs = ['--user-data-dir=%s' % profile] + args
659 # The sandbox must be run as root on Official Chrome, so bypass it.
660 if ((context.is_official or context.flash_path or context.pdf_path) and
661 context.platform.startswith('linux')):
662 testargs.append('--no-sandbox')
663 if context.flash_path:
664 testargs.append('--ppapi-flash-path=%s' % context.flash_path)
665 # We have to pass a large enough Flash version, which currently needs not
666 # be correct. Instead of requiring the user of the script to figure out and
667 # pass the correct version we just spoof it.
668 testargs.append('--ppapi-flash-version=99.9.999.999')
670 # TODO(vitalybuka): Remove in the future. See crbug.com/395687.
671 if context.pdf_path:
672 shutil.copy(context.pdf_path,
673 os.path.dirname(context.GetLaunchPath(revision)))
674 testargs.append('--enable-print-preview')
676 runcommand = []
677 for token in shlex.split(command):
678 if token == '%a':
679 runcommand.extend(testargs)
680 else:
681 runcommand.append(
682 token.replace('%p', os.path.abspath(context.GetLaunchPath(revision))).
683 replace('%s', ' '.join(testargs)))
685 results = []
686 for _ in range(num_runs):
687 subproc = subprocess.Popen(runcommand,
688 bufsize=-1,
689 stdout=subprocess.PIPE,
690 stderr=subprocess.PIPE)
691 (stdout, stderr) = subproc.communicate()
692 results.append((subproc.returncode, stdout, stderr))
693 os.chdir(cwd)
694 try:
695 shutil.rmtree(tempdir, True)
696 except Exception:
697 pass
699 for (returncode, stdout, stderr) in results:
700 if returncode:
701 return (returncode, stdout, stderr)
702 return results[0]
705 # The arguments official_builds, status, stdout and stderr are unused.
706 # They are present here because this function is passed to Bisect which then
707 # calls it with 5 arguments.
708 # pylint: disable=W0613
709 def AskIsGoodBuild(rev, official_builds, status, stdout, stderr):
710 """Asks the user whether build |rev| is good or bad."""
711 # Loop until we get a response that we can parse.
712 while True:
713 response = raw_input('Revision %s is '
714 '[(g)ood/(b)ad/(r)etry/(u)nknown/(q)uit]: ' %
715 str(rev))
716 if response and response in ('g', 'b', 'r', 'u'):
717 return response
718 if response and response == 'q':
719 raise SystemExit()
722 def IsGoodASANBuild(rev, official_builds, status, stdout, stderr):
723 """Determine if an ASAN build |rev| is good or bad
725 Will examine stderr looking for the error message emitted by ASAN. If not
726 found then will fallback to asking the user."""
727 if stderr:
728 bad_count = 0
729 for line in stderr.splitlines():
730 print line
731 if line.find('ERROR: AddressSanitizer:') != -1:
732 bad_count += 1
733 if bad_count > 0:
734 print 'Revision %d determined to be bad.' % rev
735 return 'b'
736 return AskIsGoodBuild(rev, official_builds, status, stdout, stderr)
738 class DownloadJob(object):
739 """DownloadJob represents a task to download a given Chromium revision."""
741 def __init__(self, context, name, rev, zip_file):
742 super(DownloadJob, self).__init__()
743 # Store off the input parameters.
744 self.context = context
745 self.name = name
746 self.rev = rev
747 self.zip_file = zip_file
748 self.quit_event = threading.Event()
749 self.progress_event = threading.Event()
750 self.thread = None
752 def Start(self):
753 """Starts the download."""
754 fetchargs = (self.context,
755 self.rev,
756 self.zip_file,
757 self.quit_event,
758 self.progress_event)
759 self.thread = threading.Thread(target=FetchRevision,
760 name=self.name,
761 args=fetchargs)
762 self.thread.start()
764 def Stop(self):
765 """Stops the download which must have been started previously."""
766 assert self.thread, 'DownloadJob must be started before Stop is called.'
767 self.quit_event.set()
768 self.thread.join()
769 os.unlink(self.zip_file)
771 def WaitFor(self):
772 """Prints a message and waits for the download to complete. The download
773 must have been started previously."""
774 assert self.thread, 'DownloadJob must be started before WaitFor is called.'
775 print 'Downloading revision %s...' % str(self.rev)
776 self.progress_event.set() # Display progress of download.
777 self.thread.join()
780 def Bisect(context,
781 num_runs=1,
782 command='%p %a',
783 try_args=(),
784 profile=None,
785 interactive=True,
786 evaluate=AskIsGoodBuild):
787 """Given known good and known bad revisions, run a binary search on all
788 archived revisions to determine the last known good revision.
790 @param context PathContext object initialized with user provided parameters.
791 @param num_runs Number of times to run each build for asking good/bad.
792 @param try_args A tuple of arguments to pass to the test application.
793 @param profile The name of the user profile to run with.
794 @param interactive If it is false, use command exit code for good or bad
795 judgment of the argument build.
796 @param evaluate A function which returns 'g' if the argument build is good,
797 'b' if it's bad or 'u' if unknown.
799 Threading is used to fetch Chromium revisions in the background, speeding up
800 the user's experience. For example, suppose the bounds of the search are
801 good_rev=0, bad_rev=100. The first revision to be checked is 50. Depending on
802 whether revision 50 is good or bad, the next revision to check will be either
803 25 or 75. So, while revision 50 is being checked, the script will download
804 revisions 25 and 75 in the background. Once the good/bad verdict on rev 50 is
805 known:
807 - If rev 50 is good, the download of rev 25 is cancelled, and the next test
808 is run on rev 75.
810 - If rev 50 is bad, the download of rev 75 is cancelled, and the next test
811 is run on rev 25.
814 if not profile:
815 profile = 'profile'
817 good_rev = context.good_revision
818 bad_rev = context.bad_revision
819 cwd = os.getcwd()
821 print 'Downloading list of known revisions...',
822 if not context.use_local_cache and not context.is_official:
823 print '(use --use-local-cache to cache and re-use the list of revisions)'
824 else:
825 print
826 _GetDownloadPath = lambda rev: os.path.join(cwd,
827 '%s-%s' % (str(rev), context.archive_name))
828 if context.is_official:
829 revlist = context.GetOfficialBuildsList()
830 else:
831 revlist = context.GetRevList()
833 # Get a list of revisions to bisect across.
834 if len(revlist) < 2: # Don't have enough builds to bisect.
835 msg = 'We don\'t have enough builds to bisect. revlist: %s' % revlist
836 raise RuntimeError(msg)
838 # Figure out our bookends and first pivot point; fetch the pivot revision.
839 minrev = 0
840 maxrev = len(revlist) - 1
841 pivot = maxrev / 2
842 rev = revlist[pivot]
843 zip_file = _GetDownloadPath(rev)
844 fetch = DownloadJob(context, 'initial_fetch', rev, zip_file)
845 fetch.Start()
846 fetch.WaitFor()
848 # Binary search time!
849 while fetch and fetch.zip_file and maxrev - minrev > 1:
850 if bad_rev < good_rev:
851 min_str, max_str = 'bad', 'good'
852 else:
853 min_str, max_str = 'good', 'bad'
854 print 'Bisecting range [%s (%s), %s (%s)].' % (revlist[minrev], min_str,
855 revlist[maxrev], max_str)
857 # Pre-fetch next two possible pivots
858 # - down_pivot is the next revision to check if the current revision turns
859 # out to be bad.
860 # - up_pivot is the next revision to check if the current revision turns
861 # out to be good.
862 down_pivot = int((pivot - minrev) / 2) + minrev
863 down_fetch = None
864 if down_pivot != pivot and down_pivot != minrev:
865 down_rev = revlist[down_pivot]
866 down_fetch = DownloadJob(context, 'down_fetch', down_rev,
867 _GetDownloadPath(down_rev))
868 down_fetch.Start()
870 up_pivot = int((maxrev - pivot) / 2) + pivot
871 up_fetch = None
872 if up_pivot != pivot and up_pivot != maxrev:
873 up_rev = revlist[up_pivot]
874 up_fetch = DownloadJob(context, 'up_fetch', up_rev,
875 _GetDownloadPath(up_rev))
876 up_fetch.Start()
878 # Run test on the pivot revision.
879 status = None
880 stdout = None
881 stderr = None
882 try:
883 (status, stdout, stderr) = RunRevision(context,
884 rev,
885 fetch.zip_file,
886 profile,
887 num_runs,
888 command,
889 try_args)
890 except Exception, e:
891 print >> sys.stderr, e
893 # Call the evaluate function to see if the current revision is good or bad.
894 # On that basis, kill one of the background downloads and complete the
895 # other, as described in the comments above.
896 try:
897 if not interactive:
898 if status:
899 answer = 'b'
900 print 'Bad revision: %s' % rev
901 else:
902 answer = 'g'
903 print 'Good revision: %s' % rev
904 else:
905 answer = evaluate(rev, context.is_official, status, stdout, stderr)
906 if ((answer == 'g' and good_rev < bad_rev)
907 or (answer == 'b' and bad_rev < good_rev)):
908 fetch.Stop()
909 minrev = pivot
910 if down_fetch:
911 down_fetch.Stop() # Kill the download of the older revision.
912 fetch = None
913 if up_fetch:
914 up_fetch.WaitFor()
915 pivot = up_pivot
916 fetch = up_fetch
917 elif ((answer == 'b' and good_rev < bad_rev)
918 or (answer == 'g' and bad_rev < good_rev)):
919 fetch.Stop()
920 maxrev = pivot
921 if up_fetch:
922 up_fetch.Stop() # Kill the download of the newer revision.
923 fetch = None
924 if down_fetch:
925 down_fetch.WaitFor()
926 pivot = down_pivot
927 fetch = down_fetch
928 elif answer == 'r':
929 pass # Retry requires no changes.
930 elif answer == 'u':
931 # Nuke the revision from the revlist and choose a new pivot.
932 fetch.Stop()
933 revlist.pop(pivot)
934 maxrev -= 1 # Assumes maxrev >= pivot.
936 if maxrev - minrev > 1:
937 # Alternate between using down_pivot or up_pivot for the new pivot
938 # point, without affecting the range. Do this instead of setting the
939 # pivot to the midpoint of the new range because adjacent revisions
940 # are likely affected by the same issue that caused the (u)nknown
941 # response.
942 if up_fetch and down_fetch:
943 fetch = [up_fetch, down_fetch][len(revlist) % 2]
944 elif up_fetch:
945 fetch = up_fetch
946 else:
947 fetch = down_fetch
948 fetch.WaitFor()
949 if fetch == up_fetch:
950 pivot = up_pivot - 1 # Subtracts 1 because revlist was resized.
951 else:
952 pivot = down_pivot
953 zip_file = fetch.zip_file
955 if down_fetch and fetch != down_fetch:
956 down_fetch.Stop()
957 if up_fetch and fetch != up_fetch:
958 up_fetch.Stop()
959 else:
960 assert False, 'Unexpected return value from evaluate(): ' + answer
961 except SystemExit:
962 print 'Cleaning up...'
963 for f in [_GetDownloadPath(revlist[down_pivot]),
964 _GetDownloadPath(revlist[up_pivot])]:
965 try:
966 os.unlink(f)
967 except OSError:
968 pass
969 sys.exit(0)
971 rev = revlist[pivot]
973 return (revlist[minrev], revlist[maxrev], context)
976 def GetBlinkDEPSRevisionForChromiumRevision(self, rev):
977 """Returns the blink revision that was in REVISIONS file at
978 chromium revision |rev|."""
980 def _GetBlinkRev(url, blink_re):
981 m = blink_re.search(url.read())
982 url.close()
983 if m:
984 return m.group(1)
986 url = urllib.urlopen(DEPS_FILE_OLD % rev)
987 if url.getcode() == 200:
988 # . doesn't match newlines without re.DOTALL, so this is safe.
989 blink_re = re.compile(r'webkit_revision\D*(\d+)')
990 return int(_GetBlinkRev(url, blink_re))
991 else:
992 url = urllib.urlopen(DEPS_FILE_NEW % GetGitHashFromSVNRevision(rev))
993 if url.getcode() == 200:
994 blink_re = re.compile(r'webkit_revision\D*\d+;\D*\d+;(\w+)')
995 blink_git_sha = _GetBlinkRev(url, blink_re)
996 return self.GetSVNRevisionFromGitHash(blink_git_sha, 'blink')
997 raise Exception('Could not get Blink revision for Chromium rev %d' % rev)
1000 def GetBlinkRevisionForChromiumRevision(context, rev):
1001 """Returns the blink revision that was in REVISIONS file at
1002 chromium revision |rev|."""
1003 def _IsRevisionNumber(revision):
1004 if isinstance(revision, int):
1005 return True
1006 else:
1007 return revision.isdigit()
1008 if str(rev) in context.githash_svn_dict:
1009 rev = context.githash_svn_dict[str(rev)]
1010 file_url = '%s/%s%s/REVISIONS' % (context.base_url,
1011 context._listing_platform_dir, rev)
1012 url = urllib.urlopen(file_url)
1013 if url.getcode() == 200:
1014 try:
1015 data = json.loads(url.read())
1016 except ValueError:
1017 print 'ValueError for JSON URL: %s' % file_url
1018 raise ValueError
1019 else:
1020 raise ValueError
1021 url.close()
1022 if 'webkit_revision' in data:
1023 blink_rev = data['webkit_revision']
1024 if not _IsRevisionNumber(blink_rev):
1025 blink_rev = int(context.GetSVNRevisionFromGitHash(blink_rev, 'blink'))
1026 return blink_rev
1027 else:
1028 raise Exception('Could not get blink revision for cr rev %d' % rev)
1031 def FixChromiumRevForBlink(revisions_final, revisions, self, rev):
1032 """Returns the chromium revision that has the correct blink revision
1033 for blink bisect, DEPS and REVISIONS file might not match since
1034 blink snapshots point to tip of tree blink.
1035 Note: The revisions_final variable might get modified to include
1036 additional revisions."""
1037 blink_deps_rev = GetBlinkDEPSRevisionForChromiumRevision(self, rev)
1039 while (GetBlinkRevisionForChromiumRevision(self, rev) > blink_deps_rev):
1040 idx = revisions.index(rev)
1041 if idx > 0:
1042 rev = revisions[idx-1]
1043 if rev not in revisions_final:
1044 revisions_final.insert(0, rev)
1046 revisions_final.sort()
1047 return rev
1050 def GetChromiumRevision(context, url):
1051 """Returns the chromium revision read from given URL."""
1052 try:
1053 # Location of the latest build revision number
1054 latest_revision = urllib.urlopen(url).read()
1055 if latest_revision.isdigit():
1056 return int(latest_revision)
1057 return context.GetSVNRevisionFromGitHash(latest_revision)
1058 except Exception:
1059 print 'Could not determine latest revision. This could be bad...'
1060 return 999999999
1062 def GetGitHashFromSVNRevision(svn_revision):
1063 crrev_url = CRREV_URL + str(svn_revision)
1064 url = urllib.urlopen(crrev_url)
1065 if url.getcode() == 200:
1066 data = json.loads(url.read())
1067 if 'git_sha' in data:
1068 return data['git_sha']
1070 def PrintChangeLog(min_chromium_rev, max_chromium_rev):
1071 """Prints the changelog URL."""
1073 print (' ' + CHANGELOG_URL % (GetGitHashFromSVNRevision(min_chromium_rev),
1074 GetGitHashFromSVNRevision(max_chromium_rev)))
1077 def main():
1078 usage = ('%prog [options] [-- chromium-options]\n'
1079 'Perform binary search on the snapshot builds to find a minimal\n'
1080 'range of revisions where a behavior change happened. The\n'
1081 'behaviors are described as "good" and "bad".\n'
1082 'It is NOT assumed that the behavior of the later revision is\n'
1083 'the bad one.\n'
1084 '\n'
1085 'Revision numbers should use\n'
1086 ' Official versions (e.g. 1.0.1000.0) for official builds. (-o)\n'
1087 ' SVN revisions (e.g. 123456) for chromium builds, from trunk.\n'
1088 ' Use base_trunk_revision from http://omahaproxy.appspot.com/\n'
1089 ' for earlier revs.\n'
1090 ' Chrome\'s about: build number and omahaproxy branch_revision\n'
1091 ' are incorrect, they are from branches.\n'
1092 '\n'
1093 'Tip: add "-- --no-first-run" to bypass the first run prompts.')
1094 parser = optparse.OptionParser(usage=usage)
1095 # Strangely, the default help output doesn't include the choice list.
1096 choices = ['mac', 'mac64', 'win', 'win64', 'linux', 'linux64', 'linux-arm',
1097 'chromeos']
1098 parser.add_option('-a', '--archive',
1099 choices=choices,
1100 help='The buildbot archive to bisect [%s].' %
1101 '|'.join(choices))
1102 parser.add_option('-o',
1103 action='store_true',
1104 dest='official_builds',
1105 help='Bisect across official Chrome builds (internal '
1106 'only) instead of Chromium archives.')
1107 parser.add_option('-b', '--bad',
1108 type='str',
1109 help='A bad revision to start bisection. '
1110 'May be earlier or later than the good revision. '
1111 'Default is HEAD.')
1112 parser.add_option('-f', '--flash_path',
1113 type='str',
1114 help='Absolute path to a recent Adobe Pepper Flash '
1115 'binary to be used in this bisection (e.g. '
1116 'on Windows C:\...\pepflashplayer.dll and on Linux '
1117 '/opt/google/chrome/PepperFlash/'
1118 'libpepflashplayer.so).')
1119 parser.add_option('-d', '--pdf_path',
1120 type='str',
1121 help='Absolute path to a recent PDF plugin '
1122 'binary to be used in this bisection (e.g. '
1123 'on Windows C:\...\pdf.dll and on Linux '
1124 '/opt/google/chrome/libpdf.so). Option also enables '
1125 'print preview.')
1126 parser.add_option('-g', '--good',
1127 type='str',
1128 help='A good revision to start bisection. ' +
1129 'May be earlier or later than the bad revision. ' +
1130 'Default is 0.')
1131 parser.add_option('-p', '--profile', '--user-data-dir',
1132 type='str',
1133 default='profile',
1134 help='Profile to use; this will not reset every run. '
1135 'Defaults to a clean profile.')
1136 parser.add_option('-t', '--times',
1137 type='int',
1138 default=1,
1139 help='Number of times to run each build before asking '
1140 'if it\'s good or bad. Temporary profiles are reused.')
1141 parser.add_option('-c', '--command',
1142 type='str',
1143 default='%p %a',
1144 help='Command to execute. %p and %a refer to Chrome '
1145 'executable and specified extra arguments '
1146 'respectively. Use %s to specify all extra arguments '
1147 'as one string. Defaults to "%p %a". Note that any '
1148 'extra paths specified should be absolute.')
1149 parser.add_option('-l', '--blink',
1150 action='store_true',
1151 help='Use Blink bisect instead of Chromium. ')
1152 parser.add_option('', '--not-interactive',
1153 action='store_true',
1154 default=False,
1155 help='Use command exit code to tell good/bad revision.')
1156 parser.add_option('--asan',
1157 dest='asan',
1158 action='store_true',
1159 default=False,
1160 help='Allow the script to bisect ASAN builds')
1161 parser.add_option('--use-local-cache',
1162 dest='use_local_cache',
1163 action='store_true',
1164 default=False,
1165 help='Use a local file in the current directory to cache '
1166 'a list of known revisions to speed up the '
1167 'initialization of this script.')
1169 (opts, args) = parser.parse_args()
1171 if opts.archive is None:
1172 print 'Error: missing required parameter: --archive'
1173 print
1174 parser.print_help()
1175 return 1
1177 if opts.asan:
1178 supported_platforms = ['linux', 'mac', 'win']
1179 if opts.archive not in supported_platforms:
1180 print 'Error: ASAN bisecting only supported on these platforms: [%s].' % (
1181 '|'.join(supported_platforms))
1182 return 1
1183 if opts.official_builds:
1184 print 'Error: Do not yet support bisecting official ASAN builds.'
1185 return 1
1187 if opts.asan:
1188 base_url = ASAN_BASE_URL
1189 elif opts.blink:
1190 base_url = WEBKIT_BASE_URL
1191 else:
1192 base_url = CHROMIUM_BASE_URL
1194 # Create the context. Initialize 0 for the revisions as they are set below.
1195 context = PathContext(base_url, opts.archive, opts.good, opts.bad,
1196 opts.official_builds, opts.asan, opts.use_local_cache,
1197 opts.flash_path, opts.pdf_path)
1199 # Pick a starting point, try to get HEAD for this.
1200 if not opts.bad:
1201 context.bad_revision = '999.0.0.0'
1202 context.bad_revision = GetChromiumRevision(
1203 context, context.GetLastChangeURL())
1205 # Find out when we were good.
1206 if not opts.good:
1207 context.good_revision = '0.0.0.0' if opts.official_builds else 0
1209 if opts.flash_path:
1210 msg = 'Could not find Flash binary at %s' % opts.flash_path
1211 assert os.path.exists(opts.flash_path), msg
1213 if opts.pdf_path:
1214 msg = 'Could not find PDF binary at %s' % opts.pdf_path
1215 assert os.path.exists(opts.pdf_path), msg
1217 if opts.official_builds:
1218 context.good_revision = LooseVersion(context.good_revision)
1219 context.bad_revision = LooseVersion(context.bad_revision)
1220 else:
1221 context.good_revision = int(context.good_revision)
1222 context.bad_revision = int(context.bad_revision)
1224 if opts.times < 1:
1225 print('Number of times to run (%d) must be greater than or equal to 1.' %
1226 opts.times)
1227 parser.print_help()
1228 return 1
1230 if opts.asan:
1231 evaluator = IsGoodASANBuild
1232 else:
1233 evaluator = AskIsGoodBuild
1235 # Save these revision numbers to compare when showing the changelog URL
1236 # after the bisect.
1237 good_rev = context.good_revision
1238 bad_rev = context.bad_revision
1240 (min_chromium_rev, max_chromium_rev, context) = Bisect(
1241 context, opts.times, opts.command, args, opts.profile,
1242 not opts.not_interactive, evaluator)
1244 # Get corresponding blink revisions.
1245 try:
1246 min_blink_rev = GetBlinkRevisionForChromiumRevision(context,
1247 min_chromium_rev)
1248 max_blink_rev = GetBlinkRevisionForChromiumRevision(context,
1249 max_chromium_rev)
1250 except Exception:
1251 # Silently ignore the failure.
1252 min_blink_rev, max_blink_rev = 0, 0
1254 if opts.blink:
1255 # We're done. Let the user know the results in an official manner.
1256 if good_rev > bad_rev:
1257 print DONE_MESSAGE_GOOD_MAX % (str(min_blink_rev), str(max_blink_rev))
1258 else:
1259 print DONE_MESSAGE_GOOD_MIN % (str(min_blink_rev), str(max_blink_rev))
1261 print 'BLINK CHANGELOG URL:'
1262 print ' ' + BLINK_CHANGELOG_URL % (max_blink_rev, min_blink_rev)
1264 else:
1265 # We're done. Let the user know the results in an official manner.
1266 if good_rev > bad_rev:
1267 print DONE_MESSAGE_GOOD_MAX % (str(min_chromium_rev),
1268 str(max_chromium_rev))
1269 else:
1270 print DONE_MESSAGE_GOOD_MIN % (str(min_chromium_rev),
1271 str(max_chromium_rev))
1272 if min_blink_rev != max_blink_rev:
1273 print ('NOTE: There is a Blink roll in the range, '
1274 'you might also want to do a Blink bisect.')
1276 print 'CHANGELOG URL:'
1277 if opts.official_builds:
1278 print OFFICIAL_CHANGELOG_URL % (min_chromium_rev, max_chromium_rev)
1279 else:
1280 PrintChangeLog(min_chromium_rev, max_chromium_rev)
1283 if __name__ == '__main__':
1284 sys.exit(main())