Use 0repo's merge code rather than 0publish's
[0release.git] / release.py
blob2e068412f32e536956a112ee537429bbd2475ddd
1 # Copyright (C) 2009, Thomas Leonard
2 # See the README file for details, or visit http://0install.net.
4 import os, subprocess, shutil, sys
5 from xml.dom import minidom
6 from zeroinstall import SafeException
7 from zeroinstall.injector import model
8 from zeroinstall.support import ro_rmtree
9 from logging import info, warn
11 sys.path.insert(0, os.environ['RELEASE_0REPO'])
12 from repo import registry, merge
14 import support, compile
15 from scm import get_scm
17 XMLNS_RELEASE = 'http://zero-install.sourceforge.net/2007/namespaces/0release'
19 valid_phases = ['commit-release', 'generate-archive']
21 TMP_BRANCH_NAME = '0release-tmp'
23 test_command = os.environ['0TEST']
25 def run_unit_tests(local_feed):
26 print "Running self-tests..."
27 exitstatus = subprocess.call([test_command, '--', local_feed])
28 if exitstatus == 2:
29 print "SKIPPED unit tests for %s (no 'self-test' attribute set)" % local_feed
30 return
31 if exitstatus:
32 raise SafeException("Self-test failed with exit status %d" % exitstatus)
34 def upload_archives(options, status, uploads):
35 # For each binary or source archive in uploads, ensure it is available
36 # from options.archive_dir_public_url
38 # We try to do all the uploads together first, and then verify them all
39 # afterwards. This is because we may have to wait for them to be moved
40 # from an incoming queue before we can test them.
42 def url(archive):
43 return support.get_archive_url(options, status.release_version, archive)
45 # Check that url exists and has the given size
46 def is_uploaded(url, size):
47 if url.startswith('http://TESTING/releases'):
48 return True
50 print "Testing URL %s..." % url
51 try:
52 actual_size = int(support.get_size(url))
53 except Exception, ex:
54 print "Can't get size of '%s': %s" % (url, ex)
55 return False
56 else:
57 if actual_size == size:
58 return True
59 print "WARNING: %s exists, but size is %d, not %d!" % (url, actual_size, size)
60 return False
62 # status.verified_uploads is an array of status flags:
63 description = {
64 'N': 'Upload required',
65 'A': 'Upload has been attempted, but we need to check whether it worked',
66 'V': 'Upload has been checked (exists and has correct size)',
69 if status.verified_uploads is None:
70 # First time around; no point checking for existing uploads
71 status.verified_uploads = 'N' * len(uploads)
72 status.save()
74 while True:
75 print "\nUpload status:"
76 for i, stat in enumerate(status.verified_uploads):
77 print "- %s : %s" % (uploads[i], description[stat])
78 print
80 # Break if finished
81 if status.verified_uploads == 'V' * len(uploads):
82 break
84 # Find all New archives
85 to_upload = []
86 for i, stat in enumerate(status.verified_uploads):
87 assert stat in 'NAV'
88 if stat == 'N':
89 to_upload.append(uploads[i])
90 print "Upload %s/%s as %s" % (status.release_version, uploads[i], url(uploads[i]))
92 cmd = options.archive_upload_command.strip()
94 if to_upload:
95 # Mark all New items as Attempted
96 status.verified_uploads = status.verified_uploads.replace('N', 'A')
97 status.save()
99 # Upload them...
100 if cmd:
101 support.show_and_run(cmd, to_upload)
102 else:
103 if len(to_upload) == 1:
104 print "No upload command is set => please upload the archive manually now"
105 raw_input('Press Return once the archive is uploaded.')
106 else:
107 print "No upload command is set => please upload the archives manually now"
108 raw_input('Press Return once the %d archives are uploaded.' % len(to_upload))
110 # Verify all Attempted uploads
111 new_stat = ''
112 for i, stat in enumerate(status.verified_uploads):
113 assert stat in 'AV', status.verified_uploads
114 if stat == 'A' :
115 if not is_uploaded(url(uploads[i]), os.path.getsize(uploads[i])):
116 print "** Archive '%s' still not uploaded! Try again..." % uploads[i]
117 stat = 'N'
118 else:
119 stat = 'V'
120 new_stat += stat
122 status.verified_uploads = new_stat
123 status.save()
125 if 'N' in new_stat and cmd:
126 raw_input('Press Return to try again.')
128 def do_release(local_feed, options):
129 if options.master_feed_file:
130 options.master_feed_file = os.path.abspath(options.master_feed_file)
132 if not local_feed.feed_for:
133 raise SafeException("Feed %s missing a <feed-for> element" % local_feed.local_path)
135 status = support.Status()
136 local_impl = support.get_singleton_impl(local_feed)
138 local_impl_dir = local_impl.id
139 assert local_impl_dir.startswith('/')
140 local_impl_dir = os.path.realpath(local_impl_dir)
141 assert os.path.isdir(local_impl_dir)
142 assert local_feed.local_path.startswith(local_impl_dir + '/')
144 # From the impl directory to the feed
145 # NOT relative to the archive root (in general)
146 local_iface_rel_path = local_feed.local_path[len(local_impl_dir) + 1:]
147 assert not local_iface_rel_path.startswith('/')
148 assert os.path.isfile(os.path.join(local_impl_dir, local_iface_rel_path))
150 phase_actions = {}
151 for phase in valid_phases:
152 phase_actions[phase] = [] # List of <release:action> elements
154 add_toplevel_dir = None
155 release_management = local_feed.get_metadata(XMLNS_RELEASE, 'management')
156 if len(release_management) == 1:
157 info("Found <release:management> element.")
158 release_management = release_management[0]
159 for x in release_management.childNodes:
160 if x.uri == XMLNS_RELEASE and x.name == 'action':
161 phase = x.getAttribute('phase')
162 if phase not in valid_phases:
163 raise SafeException("Invalid action phase '%s' in local feed %s. Valid actions are:\n%s" % (phase, local_feed.local_path, '\n'.join(valid_phases)))
164 phase_actions[phase].append(x.content)
165 elif x.uri == XMLNS_RELEASE and x.name == 'add-toplevel-directory':
166 add_toplevel_dir = local_feed.get_name()
167 else:
168 warn("Unknown <release:management> element: %s", x)
169 elif len(release_management) > 1:
170 raise SafeException("Multiple <release:management> sections in %s!" % local_feed)
171 else:
172 info("No <release:management> element found in local feed.")
174 scm = get_scm(local_feed, options)
176 # Path relative to the archive / SCM root
177 local_iface_rel_root_path = local_feed.local_path[len(scm.root_dir) + 1:]
179 def run_hooks(phase, cwd, env):
180 info("Running hooks for phase '%s'" % phase)
181 full_env = os.environ.copy()
182 full_env.update(env)
183 for x in phase_actions[phase]:
184 print "[%s]: %s" % (phase, x)
185 support.check_call(x, shell = True, cwd = cwd, env = full_env)
187 def set_to_release():
188 print "Snapshot version is " + local_impl.get_version()
189 release_version = options.release_version
190 if release_version is None:
191 suggested = support.suggest_release_version(local_impl.get_version())
192 release_version = raw_input("Version number for new release [%s]: " % suggested)
193 if not release_version:
194 release_version = suggested
196 scm.ensure_no_tag(release_version)
198 status.head_before_release = scm.get_head_revision()
199 status.save()
201 working_copy = local_impl.id
202 run_hooks('commit-release', cwd = working_copy, env = {'RELEASE_VERSION': release_version})
204 print "Releasing version", release_version
205 support.publish(local_feed.local_path, set_released = 'today', set_version = release_version)
207 support.backup_if_exists(release_version)
208 os.mkdir(release_version)
209 os.chdir(release_version)
211 status.old_snapshot_version = local_impl.get_version()
212 status.release_version = release_version
213 status.head_at_release = scm.commit('Release %s' % release_version, branch = TMP_BRANCH_NAME, parent = 'HEAD')
214 status.save()
216 def set_to_snapshot(snapshot_version):
217 assert snapshot_version.endswith('-post')
218 support.publish(local_feed.local_path, set_released = '', set_version = snapshot_version)
219 scm.commit('Start development series %s' % snapshot_version, branch = TMP_BRANCH_NAME, parent = TMP_BRANCH_NAME)
220 status.new_snapshot_version = scm.get_head_revision()
221 status.save()
223 def ensure_ready_to_release():
224 #if not options.master_feed_file:
225 # raise SafeException("Master feed file not set! Check your configuration")
227 scm.ensure_committed()
228 scm.ensure_versioned(os.path.abspath(local_feed.local_path))
229 info("No uncommitted changes. Good.")
230 # Not needed for GIT. For SCMs where tagging is expensive (e.g. svn) this might be useful.
231 #run_unit_tests(local_impl)
233 scm.grep('\(^\\|[^=]\)\<\\(TODO\\|XXX\\|FIXME\\)\>')
235 def create_feed(target_feed, local_iface_path, archive_file, archive_name, main):
236 shutil.copyfile(local_iface_path, target_feed)
238 support.publish(target_feed,
239 set_main = main,
240 archive_url = support.get_archive_url(options, status.release_version, os.path.basename(archive_file)),
241 archive_file = archive_file,
242 archive_extract = archive_name)
244 def get_previous_release(this_version):
245 """Return the highest numbered verison in the master feed before this_version.
246 @return: version, or None if there wasn't one"""
247 parsed_release_version = model.parse_version(this_version)
249 versions = [model.parse_version(version) for version in scm.get_tagged_versions()]
250 versions = [version for version in versions if version < parsed_release_version]
252 if versions:
253 return model.format_version(max(versions))
254 return None
256 def export_changelog(previous_release):
257 changelog = file('changelog-%s' % status.release_version, 'w')
258 try:
259 try:
260 scm.export_changelog(previous_release, status.head_before_release, changelog)
261 except SafeException, ex:
262 print "WARNING: Failed to generate changelog: " + str(ex)
263 else:
264 print "Wrote changelog from %s to here as %s" % (previous_release or 'start', changelog.name)
265 finally:
266 changelog.close()
268 def fail_candidate():
269 cwd = os.getcwd()
270 assert cwd.endswith(status.release_version)
271 support.backup_if_exists(cwd)
272 scm.delete_branch(TMP_BRANCH_NAME)
273 os.unlink(support.release_status_file)
274 print "Restored to state before starting release. Make your fixes and try again..."
276 def release_via_0repo(new_impls_feed):
277 import repo.cmd
278 support.make_archives_relative(new_impls_feed)
279 oldcwd = os.getcwd()
280 try:
281 repo.cmd.main(['0repo', 'add', '--', new_impls_feed])
282 finally:
283 os.chdir(oldcwd)
285 def release_without_0repo(archive_file, new_impls_feed):
286 assert options.master_feed_file
288 if not options.archive_dir_public_url:
289 raise SafeException("Archive directory public URL is not set! Edit configuration and try again.")
291 if status.updated_master_feed:
292 print "Already added to master feed. Not changing."
293 else:
294 publish_opts = {}
295 if os.path.exists(options.master_feed_file):
296 # Check we haven't already released this version
297 master = support.load_feed(os.path.realpath(options.master_feed_file))
298 existing_releases = [impl for impl in master.implementations.values() if impl.get_version() == status.release_version]
299 if len(existing_releases):
300 raise SafeException("Master feed %s already contains an implementation with version number %s!" % (options.master_feed_file, status.release_version))
302 previous_release = get_previous_release(status.release_version)
303 previous_testing_releases = [impl for impl in master.implementations.values() if impl.get_version() == previous_release
304 and impl.upstream_stability == model.stability_levels["testing"]]
305 if previous_testing_releases:
306 print "The previous release, version %s, is still marked as 'testing'. Set to stable?" % previous_release
307 if support.get_choice(['Yes', 'No']) == 'Yes':
308 publish_opts['select_version'] = previous_release
309 publish_opts['set_stability'] = "stable"
311 support.publish(options.master_feed_file, local = new_impls_feed, xmlsign = True, key = options.key, **publish_opts)
313 status.updated_master_feed = 'true'
314 status.save()
316 # Copy files...
317 uploads = [os.path.basename(archive_file)]
318 for b in compiler.get_binary_feeds():
319 binary_feed = support.load_feed(b)
320 impl, = binary_feed.implementations.values()
321 uploads.append(os.path.basename(impl.download_sources[0].url))
323 upload_archives(options, status, uploads)
325 feed_base = os.path.dirname(list(local_feed.feed_for)[0])
326 feed_files = [options.master_feed_file]
327 print "Upload %s into %s" % (', '.join(feed_files), feed_base)
328 cmd = options.master_feed_upload_command.strip()
329 if cmd:
330 support.show_and_run(cmd, feed_files)
331 else:
332 print "NOTE: No feed upload command set => you'll have to upload them yourself!"
334 def accept_and_publish(archive_file, src_feed_name):
335 if status.tagged:
336 print "Already tagged in SCM. Not re-tagging."
337 else:
338 scm.ensure_committed()
339 head = scm.get_head_revision()
340 if head != status.head_before_release:
341 raise SafeException("Changes committed since we started!\n" +
342 "HEAD was " + status.head_before_release + "\n"
343 "HEAD now " + head)
345 scm.tag(status.release_version, status.head_at_release)
346 scm.reset_hard(TMP_BRANCH_NAME)
347 scm.delete_branch(TMP_BRANCH_NAME)
349 status.tagged = 'true'
350 status.save()
352 assert len(local_feed.feed_for) == 1
354 # Merge the source and binary feeds together first, so
355 # that we update the master feed atomically and only
356 # have to sign it once.
357 with open(src_feed_name, 'rb') as stream:
358 doc = minidom.parse(stream)
359 for b in compiler.get_binary_feeds():
360 with open(b, 'rb') as stream:
361 bin_doc = minidom.parse(b)
362 merge.merge(doc, bin_doc)
363 new_impls_feed = 'merged.xml'
364 with open(new_impls_feed, 'wb') as stream:
365 doc.writexml(stream)
367 # TODO: support uploading to a sub-feed (requires support in 0repo too)
368 master_feed, = local_feed.feed_for
369 repository = registry.lookup(master_feed, missing_ok = True)
370 if repository:
371 release_via_0repo(new_impls_feed)
372 else:
373 release_without_0repo(archive_file, new_impls_feed)
375 os.unlink(new_impls_feed)
377 print "Push changes to public SCM repository..."
378 public_repos = options.public_scm_repository
379 if public_repos:
380 scm.push_head_and_release(status.release_version)
381 else:
382 print "NOTE: No public repository set => you'll have to push the tag and trunk yourself."
384 os.unlink(support.release_status_file)
386 if status.head_before_release:
387 head = scm.get_head_revision()
388 if status.release_version:
389 print "RESUMING release of %s %s" % (local_feed.get_name(), status.release_version)
390 if options.release_version and options.release_version != status.release_version:
391 raise SafeException("Can't start release of version %s; we are currently releasing %s.\nDelete the release-status file to abort the previous release." % (options.release_version, status.release_version))
392 elif head == status.head_before_release:
393 print "Restarting release of %s (HEAD revision has not changed)" % local_feed.get_name()
394 else:
395 raise SafeException("Something went wrong with the last run:\n" +
396 "HEAD revision for last run was " + status.head_before_release + "\n" +
397 "HEAD revision now is " + head + "\n" +
398 "You should revert your working copy to the previous head and try again.\n" +
399 "If you're sure you want to release from the current head, delete '" + support.release_status_file + "'")
400 else:
401 print "Releasing", local_feed.get_name()
403 ensure_ready_to_release()
405 if status.release_version:
406 if not os.path.isdir(status.release_version):
407 raise SafeException("Can't resume; directory %s missing. Try deleting '%s'." % (status.release_version, support.release_status_file))
408 os.chdir(status.release_version)
409 need_set_snapshot = False
410 if status.tagged:
411 print "Already tagged. Resuming the publishing process..."
412 elif status.new_snapshot_version:
413 head = scm.get_head_revision()
414 if head != status.head_before_release:
415 raise SafeException("There are more commits since we started!\n"
416 "HEAD was " + status.head_before_release + "\n"
417 "HEAD now " + head + "\n"
418 "To include them, delete '" + support.release_status_file + "' and try again.\n"
419 "To leave them out, put them on a new branch and reset HEAD to the release version.")
420 else:
421 raise SafeException("Something went wrong previously when setting the new snapshot version.\n" +
422 "Suggest you reset to the original HEAD of\n%s and delete '%s'." % (status.head_before_release, support.release_status_file))
423 else:
424 set_to_release() # Changes directory
425 assert status.release_version
426 need_set_snapshot = True
428 # May be needed by the upload command
429 os.environ['RELEASE_VERSION'] = status.release_version
431 archive_name = support.make_archive_name(local_feed.get_name(), status.release_version)
432 archive_file = archive_name + '.tar.bz2'
434 export_prefix = archive_name
435 if add_toplevel_dir is not None:
436 export_prefix += '/' + add_toplevel_dir
438 if status.created_archive and os.path.isfile(archive_file):
439 print "Archive already created"
440 else:
441 support.backup_if_exists(archive_file)
442 scm.export(export_prefix, archive_file, status.head_at_release)
444 has_submodules = scm.has_submodules()
446 if phase_actions['generate-archive'] or has_submodules:
447 try:
448 support.unpack_tarball(archive_file)
449 if has_submodules:
450 scm.export_submodules(archive_name)
451 run_hooks('generate-archive', cwd = archive_name, env = {'RELEASE_VERSION': status.release_version})
452 info("Regenerating archive (may have been modified by generate-archive hooks...")
453 support.check_call(['tar', 'cjf', archive_file, archive_name])
454 except SafeException:
455 scm.reset_hard(scm.get_current_branch())
456 fail_candidate()
457 raise
459 status.created_archive = 'true'
460 status.save()
462 if need_set_snapshot:
463 set_to_snapshot(status.release_version + '-post')
464 # Revert back to the original revision, so that any fixes the user makes
465 # will get applied before the tag
466 scm.reset_hard(scm.get_current_branch())
468 #backup_if_exists(archive_name)
469 support.unpack_tarball(archive_file)
471 extracted_feed_path = os.path.abspath(os.path.join(export_prefix, local_iface_rel_root_path))
472 assert os.path.isfile(extracted_feed_path), "Local feed not in archive! Is it under version control?"
473 extracted_feed = support.load_feed(extracted_feed_path)
474 extracted_impl = support.get_singleton_impl(extracted_feed)
476 if extracted_impl.main:
477 # Find main executable, relative to the archive root
478 abs_main = os.path.join(os.path.dirname(extracted_feed_path), extracted_impl.id, extracted_impl.main)
479 main = support.relative_path(archive_name + '/', abs_main)
480 if main != extracted_impl.main:
481 print "(adjusting main: '%s' for the feed inside the archive, '%s' externally)" % (extracted_impl.main, main)
482 # XXX: this is going to fail if the feed uses the new <command> syntax
483 if not os.path.exists(abs_main):
484 raise SafeException("Main executable '%s' not found after unpacking archive!" % abs_main)
485 if main == extracted_impl.main:
486 main = None # Don't change the main attribute
487 else:
488 main = None
490 try:
491 if status.src_tests_passed:
492 print "Unit-tests already passed - not running again"
493 else:
494 # Make directories read-only (checks tests don't write)
495 support.make_readonly_recursive(archive_name)
497 run_unit_tests(extracted_feed_path)
498 status.src_tests_passed = True
499 status.save()
500 except SafeException:
501 print "(leaving extracted directory for examination)"
502 fail_candidate()
503 raise
504 # Unpack it again in case the unit-tests changed anything
505 ro_rmtree(archive_name)
506 support.unpack_tarball(archive_file)
508 # Generate feed for source
509 src_feed_name = '%s.xml' % archive_name
510 create_feed(src_feed_name, extracted_feed_path, archive_file, archive_name, main)
511 print "Wrote source feed as %s" % src_feed_name
513 # If it's a source package, compile the binaries now...
514 compiler = compile.Compiler(options, os.path.abspath(src_feed_name), release_version = status.release_version)
515 compiler.build_binaries()
517 previous_release = get_previous_release(status.release_version)
518 export_changelog(previous_release)
520 if status.tagged:
521 raw_input('Already tagged. Press Return to resume publishing process...')
522 choice = 'Publish'
523 else:
524 print "\nCandidate release archive:", archive_file
525 print "(extracted to %s for inspection)" % os.path.abspath(archive_name)
527 print "\nPlease check candidate and select an action:"
528 print "P) Publish candidate (accept)"
529 print "F) Fail candidate (delete release-status file)"
530 if previous_release:
531 print "D) Diff against release archive for %s" % previous_release
532 maybe_diff = ['Diff']
533 else:
534 maybe_diff = []
535 print "(you can also hit CTRL-C and resume this script when done)"
537 while True:
538 choice = support.get_choice(['Publish', 'Fail'] + maybe_diff)
539 if choice == 'Diff':
540 previous_archive_name = support.make_archive_name(local_feed.get_name(), previous_release)
541 previous_archive_file = '../%s/%s.tar.bz2' % (previous_release, previous_archive_name)
543 # For archives created by older versions of 0release
544 if not os.path.isfile(previous_archive_file):
545 old_previous_archive_file = '../%s.tar.bz2' % previous_archive_name
546 if os.path.isfile(old_previous_archive_file):
547 previous_archive_file = old_previous_archive_file
549 if os.path.isfile(previous_archive_file):
550 support.unpack_tarball(previous_archive_file)
551 try:
552 support.show_diff(previous_archive_name, archive_name)
553 finally:
554 shutil.rmtree(previous_archive_name)
555 else:
556 # TODO: download it?
557 print "Sorry, archive file %s not found! Can't show diff." % previous_archive_file
558 else:
559 break
561 info("Deleting extracted archive %s", archive_name)
562 shutil.rmtree(archive_name)
564 if choice == 'Publish':
565 accept_and_publish(archive_file, src_feed_name)
566 else:
567 assert choice == 'Fail'
568 fail_candidate()