Remember whether we already ran the unit-tests
[0release.git] / release.py
blob2aa0fe93da232f17b5e9232665b3cc40811dc861
1 # Copyright (C) 2009, Thomas Leonard
2 # See the README file for details, or visit http://0install.net.
4 import os, sys, subprocess, shutil, tempfile
5 from zeroinstall import SafeException
6 from zeroinstall.injector import reader, model, qdom
7 from logging import info, warn
9 import support, compile
10 from scm import get_scm
12 XMLNS_RELEASE = 'http://zero-install.sourceforge.net/2007/namespaces/0release'
14 valid_phases = ['commit-release', 'generate-archive']
16 TMP_BRANCH_NAME = '0release-tmp'
18 def run_unit_tests(local_feed, impl):
19 self_test = impl.metadata.get('self-test', None)
20 if self_test is None:
21 print "SKIPPING unit tests for %s (no 'self-test' attribute set)" % impl
22 return
23 self_test_dir = os.path.dirname(os.path.join(impl.id, self_test))
24 print "Running self-test:", self_test
25 exitstatus = subprocess.call(['0launch', '--main', self_test, local_feed], cwd = self_test_dir)
26 if exitstatus:
27 raise SafeException("Self-test failed with exit status %d" % exitstatus)
29 def upload_archives(options, status, uploads):
30 # For each binary or source archive in uploads, ensure it is available
31 # from options.archive_dir_public_url
33 # We try to do all the uploads together first, and then verify them all
34 # afterwards. This is because we may have to wait for them to be moved
35 # from an incoming queue before we can test them.
37 # Ensure URL stem ends with a slash
38 archive_dir_public_url = options.archive_dir_public_url
39 if not archive_dir_public_url.endswith('/'):
40 archive_dir_public_url += '/'
42 def url(archive):
43 return archive_dir_public_url + archive
45 # Check that url exists and has the given size
46 def is_uploaded(url, size):
47 if url.startswith('http://TESTING/releases'):
48 return True
50 print "Testing URL %s..." % url
51 try:
52 actual_size = int(support.get_size(url))
53 except Exception, ex:
54 print "Can't get size of '%s': %s" % (url, ex)
55 return False
56 else:
57 if actual_size == size:
58 return True
59 print "WARNING: %s exists, but size is %d, not %d!" % (url, actual_size, size)
60 return False
62 # status.verified_uploads is an array of status flags:
63 description = {
64 'N': 'Upload required',
65 'A': 'Upload has been attempted, but we need to check whether it worked',
66 'V': 'Upload has been checked (exists and has correct size)',
69 if status.verified_uploads is None:
70 # First time around; no point checking for existing uploads
71 status.verified_uploads = 'N' * len(uploads)
72 status.save()
74 while True:
75 print "\nUpload status:"
76 for i, stat in enumerate(status.verified_uploads):
77 print "- %s : %s" % (uploads[i], description[stat])
78 print
80 # Break if finished
81 if status.verified_uploads == 'V' * len(uploads):
82 break
84 # Find all New archives
85 to_upload = []
86 for i, stat in enumerate(status.verified_uploads):
87 assert stat in 'NAV'
88 if stat == 'N':
89 to_upload.append(uploads[i])
90 print "Upload %s/%s as %s" % (status.release_version, uploads[i], url(uploads[i]))
92 if to_upload:
93 # Mark all New items as Attempted
94 status.verified_uploads = status.verified_uploads.replace('N', 'A')
95 status.save()
97 # Upload them...
98 cmd = options.archive_upload_command.strip()
99 if cmd:
100 support.show_and_run(cmd, to_upload)
101 else:
102 if len(to_upload) == 1:
103 print "No upload command is set => please upload the archive manually now"
104 raw_input('Press Return once the archive is uploaded.')
105 else:
106 print "No upload command is set => please upload the archives manually now"
107 raw_input('Press Return once the %d archives are uploaded.' % len(to_upload))
109 # Verify all Attempted uploads
110 new_stat = ''
111 for i, stat in enumerate(status.verified_uploads):
112 assert stat in 'AV', status.verified_uploads
113 if stat == 'A' :
114 if not is_uploaded(url(uploads[i]), os.path.getsize(uploads[i])):
115 print "** Archive '%s' still not uploaded! Try again..." % uploads[i]
116 stat = 'N'
117 else:
118 stat = 'V'
119 new_stat += stat
121 status.verified_uploads = new_stat
122 status.save()
124 def do_release(local_iface, options):
125 assert options.master_feed_file
126 options.master_feed_file = os.path.abspath(options.master_feed_file)
128 if not options.archive_dir_public_url:
129 raise SafeException("Downloads directory not set. Edit the 'make-release' script and try again.")
131 status = support.Status()
132 local_impl = support.get_singleton_impl(local_iface)
134 local_impl_dir = local_impl.id
135 assert local_impl_dir.startswith('/')
136 local_impl_dir = os.path.realpath(local_impl_dir)
137 assert os.path.isdir(local_impl_dir)
138 assert local_iface.uri.startswith(local_impl_dir + '/')
140 # From the impl directory to the feed
141 # NOT relative to the archive root (in general)
142 local_iface_rel_path = local_iface.uri[len(local_impl_dir) + 1:]
143 assert not local_iface_rel_path.startswith('/')
144 assert os.path.isfile(os.path.join(local_impl_dir, local_iface_rel_path))
146 phase_actions = {}
147 for phase in valid_phases:
148 phase_actions[phase] = [] # List of <release:action> elements
150 add_toplevel_dir = None
151 release_management = local_iface.get_metadata(XMLNS_RELEASE, 'management')
152 if len(release_management) == 1:
153 info("Found <release:management> element.")
154 release_management = release_management[0]
155 for x in release_management.childNodes:
156 if x.uri == XMLNS_RELEASE and x.name == 'action':
157 phase = x.getAttribute('phase')
158 if phase not in valid_phases:
159 raise SafeException("Invalid action phase '%s' in local feed %s. Valid actions are:\n%s" % (phase, local_iface.uri, '\n'.join(valid_phases)))
160 phase_actions[phase].append(x.content)
161 elif x.uri == XMLNS_RELEASE and x.name == 'add-toplevel-directory':
162 add_toplevel_dir = local_iface.get_name()
163 else:
164 warn("Unknown <release:management> element: %s", x)
165 elif len(release_management) > 1:
166 raise SafeException("Multiple <release:management> sections in %s!" % local_iface)
167 else:
168 info("No <release:management> element found in local feed.")
170 scm = get_scm(local_iface, options)
172 # Path relative to the archive / SCM root
173 local_iface_rel_root_path = local_iface.uri[len(scm.root_dir) + 1:]
175 def run_hooks(phase, cwd, env):
176 info("Running hooks for phase '%s'" % phase)
177 full_env = os.environ.copy()
178 full_env.update(env)
179 for x in phase_actions[phase]:
180 print "[%s]: %s" % (phase, x)
181 support.check_call(x, shell = True, cwd = cwd, env = full_env)
183 def set_to_release():
184 print "Snapshot version is " + local_impl.get_version()
185 suggested = support.suggest_release_version(local_impl.get_version())
186 release_version = raw_input("Version number for new release [%s]: " % suggested)
187 if not release_version:
188 release_version = suggested
190 scm.ensure_no_tag(release_version)
192 status.head_before_release = scm.get_head_revision()
193 status.save()
195 working_copy = local_impl.id
196 run_hooks('commit-release', cwd = working_copy, env = {'RELEASE_VERSION': release_version})
198 print "Releasing version", release_version
199 support.publish(local_iface.uri, set_released = 'today', set_version = release_version)
201 support.backup_if_exists(release_version)
202 os.mkdir(release_version)
203 os.chdir(release_version)
205 status.old_snapshot_version = local_impl.get_version()
206 status.release_version = release_version
207 status.head_at_release = scm.commit('Release %s' % release_version, branch = TMP_BRANCH_NAME, parent = 'HEAD')
208 status.save()
210 def set_to_snapshot(snapshot_version):
211 assert snapshot_version.endswith('-post')
212 support.publish(local_iface.uri, set_released = '', set_version = snapshot_version)
213 scm.commit('Start development series %s' % snapshot_version, branch = TMP_BRANCH_NAME, parent = TMP_BRANCH_NAME)
214 status.new_snapshot_version = scm.get_head_revision()
215 status.save()
217 def ensure_ready_to_release():
218 if not options.master_feed_file:
219 raise SafeException("Master feed file not set! Check your configuration")
221 scm.ensure_committed()
222 scm.ensure_versioned(os.path.abspath(local_iface.uri))
223 info("No uncommitted changes. Good.")
224 # Not needed for GIT. For SCMs where tagging is expensive (e.g. svn) this might be useful.
225 #run_unit_tests(local_impl)
227 scm.grep('\(^\\|[^=]\)\<\\(TODO\\|XXX\\|FIXME\\)\>')
229 def create_feed(target_feed, local_iface_path, archive_file, archive_name, main):
230 shutil.copyfile(local_iface_path, target_feed)
232 support.publish(target_feed,
233 set_main = main,
234 archive_url = options.archive_dir_public_url + '/' + os.path.basename(archive_file),
235 archive_file = archive_file,
236 archive_extract = archive_name)
238 def get_previous_release(this_version):
239 """Return the highest numbered verison in the master feed before this_version.
240 @return: version, or None if there wasn't one"""
241 parsed_release_version = model.parse_version(this_version)
243 if os.path.exists(options.master_feed_file):
244 master = model.Interface(os.path.realpath(options.master_feed_file))
245 reader.update(master, master.uri, local = True)
246 versions = [impl.version for impl in master.implementations.values() if impl.version < parsed_release_version]
247 if versions:
248 return model.format_version(max(versions))
249 return None
251 def export_changelog(previous_release):
252 changelog = file('changelog-%s' % status.release_version, 'w')
253 try:
254 try:
255 scm.export_changelog(previous_release, status.head_before_release, changelog)
256 except SafeException, ex:
257 print "WARNING: Failed to generate changelog: " + str(ex)
258 else:
259 print "Wrote changelog from %s to here as %s" % (previous_release or 'start', changelog.name)
260 finally:
261 changelog.close()
263 def fail_candidate(archive_file):
264 cwd = os.getcwd()
265 assert cwd.endswith(status.release_version)
266 support.backup_if_exists(cwd)
267 scm.delete_branch(TMP_BRANCH_NAME)
268 os.unlink(support.release_status_file)
269 print "Restored to state before starting release. Make your fixes and try again..."
271 def accept_and_publish(archive_file, archive_name, src_feed_name):
272 assert options.master_feed_file
274 if not options.archive_dir_public_url:
275 raise SafeException("Archive directory public URL is not set! Edit configuration and try again.")
277 if status.tagged:
278 print "Already tagged in SCM. Not re-tagging."
279 else:
280 scm.ensure_committed()
281 head = scm.get_head_revision()
282 if head != status.head_before_release:
283 raise SafeException("Changes committed since we started!\n" +
284 "HEAD was " + status.head_before_release + "\n"
285 "HEAD now " + head)
287 scm.tag(status.release_version, status.head_at_release)
288 scm.reset_hard(TMP_BRANCH_NAME)
289 scm.delete_branch(TMP_BRANCH_NAME)
291 status.tagged = 'true'
292 status.save()
294 if status.updated_master_feed:
295 print "Already added to master feed. Not changing."
296 else:
297 if os.path.exists(options.master_feed_file):
298 # Check we haven't already released this version
299 master = model.Interface(os.path.realpath(options.master_feed_file))
300 reader.update(master, master.uri, local = True)
301 existing_releases = [impl for impl in master.implementations.values() if impl.get_version() == status.release_version]
302 if len(existing_releases):
303 raise SafeException("Master feed %s already contains an implementation with version number %s!" % (options.master_feed_file, status.release_version))
305 # Merge the source and binary feeds together first, so
306 # that we update the master feed atomically and only
307 # have to sign it once.
308 shutil.copyfile(src_feed_name, 'merged.xml')
309 for b in compiler.get_binary_feeds():
310 support.publish('merged.xml', local = b)
312 support.publish(options.master_feed_file, local = 'merged.xml', xmlsign = True, key = options.key)
313 os.unlink('merged.xml')
315 status.updated_master_feed = 'true'
316 status.save()
318 # Copy files...
319 uploads = [os.path.basename(archive_file)]
320 for b in compiler.get_binary_feeds():
321 stream = file(b)
322 binary_feed = model.ZeroInstallFeed(qdom.parse(stream), local_path = b)
323 stream.close()
324 impl, = binary_feed.implementations.values()
325 uploads.append(os.path.basename(impl.download_sources[0].url))
327 upload_archives(options, status, uploads)
329 assert len(local_iface.feed_for) == 1
330 feed_base = os.path.dirname(local_iface.feed_for.keys()[0])
331 feed_files = [options.master_feed_file]
332 print "Upload %s into %s" % (', '.join(feed_files), feed_base)
333 cmd = options.master_feed_upload_command.strip()
334 if cmd:
335 support.show_and_run(cmd, feed_files)
336 else:
337 print "NOTE: No feed upload command set => you'll have to upload them yourself!"
339 print "Push changes to public SCM repository..."
340 public_repos = options.public_scm_repository
341 if public_repos:
342 scm.push_head_and_release(status.release_version)
343 else:
344 print "NOTE: No public repository set => you'll have to push the tag and trunk yourself."
346 os.unlink(support.release_status_file)
348 if status.head_before_release:
349 head = scm.get_head_revision()
350 if status.release_version:
351 print "RESUMING release of %s %s" % (local_iface.get_name(), status.release_version)
352 elif head == status.head_before_release:
353 print "Restarting release of %s (HEAD revision has not changed)" % local_iface.get_name()
354 else:
355 raise SafeException("Something went wrong with the last run:\n" +
356 "HEAD revision for last run was " + status.head_before_release + "\n" +
357 "HEAD revision now is " + head + "\n" +
358 "You should revert your working copy to the previous head and try again.\n" +
359 "If you're sure you want to release from the current head, delete '" + support.release_status_file + "'")
360 else:
361 print "Releasing", local_iface.get_name()
363 ensure_ready_to_release()
365 if status.release_version:
366 if not os.path.isdir(status.release_version):
367 raise SafeException("Can't resume; directory %s missing. Try deleting '%s'." % (status.release_version, support.release_status_file))
368 os.chdir(status.release_version)
369 need_set_snapshot = False
370 if status.tagged:
371 print "Already tagged. Resuming the publishing process..."
372 elif status.new_snapshot_version:
373 head = scm.get_head_revision()
374 if head != status.head_before_release:
375 raise SafeException("There are more commits since we started!\n"
376 "HEAD was " + status.head_before_release + "\n"
377 "HEAD now " + head + "\n"
378 "To include them, delete '" + support.release_status_file + "' and try again.\n"
379 "To leave them out, put them on a new branch and reset HEAD to the release version.")
380 else:
381 raise SafeException("Something went wrong previously when setting the new snapshot version.\n" +
382 "Suggest you reset to the original HEAD of\n%s and delete '%s'." % (status.head_before_release, support.release_status_file))
383 else:
384 set_to_release() # Changes directory
385 assert status.release_version
386 need_set_snapshot = True
388 archive_name = support.make_archive_name(local_iface.get_name(), status.release_version)
389 archive_file = archive_name + '.tar.bz2'
391 export_prefix = archive_name
392 if add_toplevel_dir is not None:
393 export_prefix += '/' + add_toplevel_dir
395 if status.created_archive and os.path.isfile(archive_file):
396 print "Archive already created"
397 else:
398 support.backup_if_exists(archive_file)
399 scm.export(export_prefix, archive_file, status.head_at_release)
401 has_submodules = scm.has_submodules()
403 if phase_actions['generate-archive'] or has_submodules:
404 try:
405 support.unpack_tarball(archive_file)
406 if has_submodules:
407 scm.export_submodules(archive_name)
408 run_hooks('generate-archive', cwd = archive_name, env = {'RELEASE_VERSION': status.release_version})
409 info("Regenerating archive (may have been modified by generate-archive hooks...")
410 support.check_call(['tar', 'cjf', archive_file, archive_name])
411 except SafeException:
412 scm.reset_hard(scm.get_current_branch())
413 fail_candidate(archive_file)
414 raise
416 status.created_archive = 'true'
417 status.save()
419 if need_set_snapshot:
420 set_to_snapshot(status.release_version + '-post')
421 # Revert back to the original revision, so that any fixes the user makes
422 # will get applied before the tag
423 scm.reset_hard(scm.get_current_branch())
425 #backup_if_exists(archive_name)
426 support.unpack_tarball(archive_file)
428 extracted_iface_path = os.path.abspath(os.path.join(export_prefix, local_iface_rel_root_path))
429 assert os.path.isfile(extracted_iface_path), "Local feed not in archive! Is it under version control?"
430 extracted_iface = model.Interface(extracted_iface_path)
431 reader.update(extracted_iface, extracted_iface_path, local = True)
432 extracted_impl = support.get_singleton_impl(extracted_iface)
434 if extracted_impl.main:
435 # Find main executable, relative to the archive root
436 abs_main = os.path.join(os.path.dirname(extracted_iface_path), extracted_impl.main)
437 main = support.relative_path(archive_name + '/', abs_main)
438 if main != extracted_impl.main:
439 print "(adjusting main: '%s' for the feed inside the archive, '%s' externally)" % (extracted_impl.main, main)
440 if not os.path.exists(abs_main):
441 raise SafeException("Main executable '%s' not found after unpacking archive!" % abs_main)
442 else:
443 main = None
445 try:
446 if status.src_tests_passed:
447 print "Unit-tests already passed - not running again"
448 else:
449 run_unit_tests(extracted_iface_path, extracted_impl)
450 status.src_tests_passed = True
451 status.save()
452 except SafeException:
453 print "(leaving extracted directory for examination)"
454 fail_candidate(archive_file)
455 raise
456 # Unpack it again in case the unit-tests changed anything
457 shutil.rmtree(archive_name)
458 support.unpack_tarball(archive_file)
460 # Generate feed for source
461 stream = open(extracted_iface_path)
462 src_feed_name = '%s.xml' % archive_name
463 create_feed(src_feed_name, extracted_iface_path, archive_file, archive_name, main)
464 print "Wrote source feed as %s" % src_feed_name
466 # If it's a source package, compile the binaries now...
467 compiler = compile.Compiler(options, os.path.abspath(src_feed_name))
468 compiler.build_binaries()
470 previous_release = get_previous_release(status.release_version)
471 export_changelog(previous_release)
473 print "\nCandidate release archive:", archive_file
474 print "(extracted to %s for inspection)" % os.path.abspath(archive_name)
476 print "\nPlease check candidate and select an action:"
477 print "P) Publish candidate (accept)"
478 print "F) Fail candidate (untag)"
479 if previous_release:
480 print "D) Diff against release archive for %s" % previous_release
481 maybe_diff = ['Diff']
482 else:
483 maybe_diff = []
484 print "(you can also hit CTRL-C and resume this script when done)"
486 while True:
487 choice = support.get_choice(['Publish', 'Fail'] + maybe_diff)
488 if choice == 'Diff':
489 previous_archive_name = support.make_archive_name(local_iface.get_name(), previous_release)
490 previous_archive_file = '../%s/%s.tar.bz2' % (previous_release, previous_archive_name)
492 # For archives created by older versions of 0release
493 if not os.path.isfile(previous_archive_file):
494 old_previous_archive_file = '../%s.tar.bz2' % previous_archive_name
495 if os.path.isfile(old_previous_archive_file):
496 previous_archive_file = old_previous_archive_file
498 if os.path.isfile(previous_archive_file):
499 support.unpack_tarball(previous_archive_file)
500 try:
501 support.show_diff(previous_archive_name, archive_name)
502 finally:
503 shutil.rmtree(previous_archive_name)
504 else:
505 # TODO: download it?
506 print "Sorry, archive file %s not found! Can't show diff." % previous_archive_file
507 else:
508 break
510 info("Deleting extracted archive %s", archive_name)
511 shutil.rmtree(archive_name)
513 if choice == 'Publish':
514 accept_and_publish(archive_file, archive_name, src_feed_name)
515 else:
516 assert choice == 'Fail'
517 fail_candidate(archive_file)