1 # Copyright (C) 2009, Thomas Leonard
2 # See the README file for details, or visit http://0install.net.
4 import os
, sys
, subprocess
, shutil
, tempfile
5 from zeroinstall
import SafeException
6 from zeroinstall
.injector
import reader
, model
, qdom
7 from logging
import info
, warn
9 import support
, compile
10 from scm
import get_scm
12 XMLNS_RELEASE
= 'http://zero-install.sourceforge.net/2007/namespaces/0release'
14 valid_phases
= ['commit-release', 'generate-archive']
16 TMP_BRANCH_NAME
= '0release-tmp'
18 def run_unit_tests(local_feed
, impl
):
19 self_test
= impl
.metadata
.get('self-test', None)
21 print "SKIPPING unit tests for %s (no 'self-test' attribute set)" % impl
23 self_test_dir
= os
.path
.dirname(os
.path
.join(impl
.id, self_test
))
24 print "Running self-test:", self_test
25 exitstatus
= subprocess
.call(['0launch', '--main', self_test
, local_feed
], cwd
= self_test_dir
)
27 raise SafeException("Self-test failed with exit status %d" % exitstatus
)
29 def get_archive_url(options
, status
, archive
):
30 archive_dir_public_url
= options
.archive_dir_public_url
.replace('$VERSION', status
.release_version
)
31 if not archive_dir_public_url
.endswith('/'):
32 archive_dir_public_url
+= '/'
33 return archive_dir_public_url
+ archive
35 def upload_archives(options
, status
, uploads
):
36 # For each binary or source archive in uploads, ensure it is available
37 # from options.archive_dir_public_url
39 # We try to do all the uploads together first, and then verify them all
40 # afterwards. This is because we may have to wait for them to be moved
41 # from an incoming queue before we can test them.
44 return get_archive_url(options
, status
, archive
)
46 # Check that url exists and has the given size
47 def is_uploaded(url
, size
):
48 if url
.startswith('http://TESTING/releases'):
51 print "Testing URL %s..." % url
53 actual_size
= int(support
.get_size(url
))
55 print "Can't get size of '%s': %s" % (url
, ex
)
58 if actual_size
== size
:
60 print "WARNING: %s exists, but size is %d, not %d!" % (url
, actual_size
, size
)
63 # status.verified_uploads is an array of status flags:
65 'N': 'Upload required',
66 'A': 'Upload has been attempted, but we need to check whether it worked',
67 'V': 'Upload has been checked (exists and has correct size)',
70 if status
.verified_uploads
is None:
71 # First time around; no point checking for existing uploads
72 status
.verified_uploads
= 'N' * len(uploads
)
76 print "\nUpload status:"
77 for i
, stat
in enumerate(status
.verified_uploads
):
78 print "- %s : %s" % (uploads
[i
], description
[stat
])
82 if status
.verified_uploads
== 'V' * len(uploads
):
85 # Find all New archives
87 for i
, stat
in enumerate(status
.verified_uploads
):
90 to_upload
.append(uploads
[i
])
91 print "Upload %s/%s as %s" % (status
.release_version
, uploads
[i
], url(uploads
[i
]))
93 cmd
= options
.archive_upload_command
.strip()
96 # Mark all New items as Attempted
97 status
.verified_uploads
= status
.verified_uploads
.replace('N', 'A')
102 support
.show_and_run(cmd
, to_upload
)
104 if len(to_upload
) == 1:
105 print "No upload command is set => please upload the archive manually now"
106 raw_input('Press Return once the archive is uploaded.')
108 print "No upload command is set => please upload the archives manually now"
109 raw_input('Press Return once the %d archives are uploaded.' % len(to_upload
))
111 # Verify all Attempted uploads
113 for i
, stat
in enumerate(status
.verified_uploads
):
114 assert stat
in 'AV', status
.verified_uploads
116 if not is_uploaded(url(uploads
[i
]), os
.path
.getsize(uploads
[i
])):
117 print "** Archive '%s' still not uploaded! Try again..." % uploads
[i
]
123 status
.verified_uploads
= new_stat
126 if 'N' in new_stat
and cmd
:
127 raw_input('Press Return to try again.')
129 def do_release(local_iface
, options
):
130 assert options
.master_feed_file
131 options
.master_feed_file
= os
.path
.abspath(options
.master_feed_file
)
133 if not options
.archive_dir_public_url
:
134 raise SafeException("Downloads directory not set. Edit the 'make-release' script and try again.")
136 status
= support
.Status()
137 local_impl
= support
.get_singleton_impl(local_iface
)
139 local_impl_dir
= local_impl
.id
140 assert local_impl_dir
.startswith('/')
141 local_impl_dir
= os
.path
.realpath(local_impl_dir
)
142 assert os
.path
.isdir(local_impl_dir
)
143 assert local_iface
.uri
.startswith(local_impl_dir
+ '/')
145 # From the impl directory to the feed
146 # NOT relative to the archive root (in general)
147 local_iface_rel_path
= local_iface
.uri
[len(local_impl_dir
) + 1:]
148 assert not local_iface_rel_path
.startswith('/')
149 assert os
.path
.isfile(os
.path
.join(local_impl_dir
, local_iface_rel_path
))
152 for phase
in valid_phases
:
153 phase_actions
[phase
] = [] # List of <release:action> elements
155 add_toplevel_dir
= None
156 release_management
= local_iface
.get_metadata(XMLNS_RELEASE
, 'management')
157 if len(release_management
) == 1:
158 info("Found <release:management> element.")
159 release_management
= release_management
[0]
160 for x
in release_management
.childNodes
:
161 if x
.uri
== XMLNS_RELEASE
and x
.name
== 'action':
162 phase
= x
.getAttribute('phase')
163 if phase
not in valid_phases
:
164 raise SafeException("Invalid action phase '%s' in local feed %s. Valid actions are:\n%s" % (phase
, local_iface
.uri
, '\n'.join(valid_phases
)))
165 phase_actions
[phase
].append(x
.content
)
166 elif x
.uri
== XMLNS_RELEASE
and x
.name
== 'add-toplevel-directory':
167 add_toplevel_dir
= local_iface
.get_name()
169 warn("Unknown <release:management> element: %s", x
)
170 elif len(release_management
) > 1:
171 raise SafeException("Multiple <release:management> sections in %s!" % local_iface
)
173 info("No <release:management> element found in local feed.")
175 scm
= get_scm(local_iface
, options
)
177 # Path relative to the archive / SCM root
178 local_iface_rel_root_path
= local_iface
.uri
[len(scm
.root_dir
) + 1:]
180 def run_hooks(phase
, cwd
, env
):
181 info("Running hooks for phase '%s'" % phase
)
182 full_env
= os
.environ
.copy()
184 for x
in phase_actions
[phase
]:
185 print "[%s]: %s" % (phase
, x
)
186 support
.check_call(x
, shell
= True, cwd
= cwd
, env
= full_env
)
188 def set_to_release():
189 print "Snapshot version is " + local_impl
.get_version()
190 suggested
= support
.suggest_release_version(local_impl
.get_version())
191 release_version
= raw_input("Version number for new release [%s]: " % suggested
)
192 if not release_version
:
193 release_version
= suggested
195 scm
.ensure_no_tag(release_version
)
197 status
.head_before_release
= scm
.get_head_revision()
200 working_copy
= local_impl
.id
201 run_hooks('commit-release', cwd
= working_copy
, env
= {'RELEASE_VERSION': release_version
})
203 print "Releasing version", release_version
204 support
.publish(local_iface
.uri
, set_released
= 'today', set_version
= release_version
)
206 support
.backup_if_exists(release_version
)
207 os
.mkdir(release_version
)
208 os
.chdir(release_version
)
210 status
.old_snapshot_version
= local_impl
.get_version()
211 status
.release_version
= release_version
212 status
.head_at_release
= scm
.commit('Release %s' % release_version
, branch
= TMP_BRANCH_NAME
, parent
= 'HEAD')
215 def set_to_snapshot(snapshot_version
):
216 assert snapshot_version
.endswith('-post')
217 support
.publish(local_iface
.uri
, set_released
= '', set_version
= snapshot_version
)
218 scm
.commit('Start development series %s' % snapshot_version
, branch
= TMP_BRANCH_NAME
, parent
= TMP_BRANCH_NAME
)
219 status
.new_snapshot_version
= scm
.get_head_revision()
222 def ensure_ready_to_release():
223 if not options
.master_feed_file
:
224 raise SafeException("Master feed file not set! Check your configuration")
226 scm
.ensure_committed()
227 scm
.ensure_versioned(os
.path
.abspath(local_iface
.uri
))
228 info("No uncommitted changes. Good.")
229 # Not needed for GIT. For SCMs where tagging is expensive (e.g. svn) this might be useful.
230 #run_unit_tests(local_impl)
232 scm
.grep('\(^\\|[^=]\)\<\\(TODO\\|XXX\\|FIXME\\)\>')
234 def create_feed(target_feed
, local_iface_path
, archive_file
, archive_name
, main
):
235 shutil
.copyfile(local_iface_path
, target_feed
)
237 support
.publish(target_feed
,
239 archive_url
= get_archive_url(options
, status
, os
.path
.basename(archive_file
)),
240 archive_file
= archive_file
,
241 archive_extract
= archive_name
)
243 def get_previous_release(this_version
):
244 """Return the highest numbered verison in the master feed before this_version.
245 @return: version, or None if there wasn't one"""
246 parsed_release_version
= model
.parse_version(this_version
)
248 if os
.path
.exists(options
.master_feed_file
):
249 master
= model
.Interface(os
.path
.realpath(options
.master_feed_file
))
250 reader
.update(master
, master
.uri
, local
= True)
251 versions
= [impl
.version
for impl
in master
.implementations
.values() if impl
.version
< parsed_release_version
]
253 return model
.format_version(max(versions
))
256 def export_changelog(previous_release
):
257 changelog
= file('changelog-%s' % status
.release_version
, 'w')
260 scm
.export_changelog(previous_release
, status
.head_before_release
, changelog
)
261 except SafeException
, ex
:
262 print "WARNING: Failed to generate changelog: " + str(ex
)
264 print "Wrote changelog from %s to here as %s" % (previous_release
or 'start', changelog
.name
)
268 def fail_candidate(archive_file
):
270 assert cwd
.endswith(status
.release_version
)
271 support
.backup_if_exists(cwd
)
272 scm
.delete_branch(TMP_BRANCH_NAME
)
273 os
.unlink(support
.release_status_file
)
274 print "Restored to state before starting release. Make your fixes and try again..."
276 def accept_and_publish(archive_file
, archive_name
, src_feed_name
):
277 assert options
.master_feed_file
279 if not options
.archive_dir_public_url
:
280 raise SafeException("Archive directory public URL is not set! Edit configuration and try again.")
283 print "Already tagged in SCM. Not re-tagging."
285 scm
.ensure_committed()
286 head
= scm
.get_head_revision()
287 if head
!= status
.head_before_release
:
288 raise SafeException("Changes committed since we started!\n" +
289 "HEAD was " + status
.head_before_release
+ "\n"
292 scm
.tag(status
.release_version
, status
.head_at_release
)
293 scm
.reset_hard(TMP_BRANCH_NAME
)
294 scm
.delete_branch(TMP_BRANCH_NAME
)
296 status
.tagged
= 'true'
299 if status
.updated_master_feed
:
300 print "Already added to master feed. Not changing."
302 if os
.path
.exists(options
.master_feed_file
):
303 # Check we haven't already released this version
304 master
= model
.Interface(os
.path
.realpath(options
.master_feed_file
))
305 reader
.update(master
, master
.uri
, local
= True)
306 existing_releases
= [impl
for impl
in master
.implementations
.values() if impl
.get_version() == status
.release_version
]
307 if len(existing_releases
):
308 raise SafeException("Master feed %s already contains an implementation with version number %s!" % (options
.master_feed_file
, status
.release_version
))
310 # Merge the source and binary feeds together first, so
311 # that we update the master feed atomically and only
312 # have to sign it once.
313 shutil
.copyfile(src_feed_name
, 'merged.xml')
314 for b
in compiler
.get_binary_feeds():
315 support
.publish('merged.xml', local
= b
)
317 support
.publish(options
.master_feed_file
, local
= 'merged.xml', xmlsign
= True, key
= options
.key
)
318 os
.unlink('merged.xml')
320 status
.updated_master_feed
= 'true'
324 uploads
= [os
.path
.basename(archive_file
)]
325 for b
in compiler
.get_binary_feeds():
327 binary_feed
= model
.ZeroInstallFeed(qdom
.parse(stream
), local_path
= b
)
329 impl
, = binary_feed
.implementations
.values()
330 uploads
.append(os
.path
.basename(impl
.download_sources
[0].url
))
332 upload_archives(options
, status
, uploads
)
334 assert len(local_iface
.feed_for
) == 1
335 feed_base
= os
.path
.dirname(local_iface
.feed_for
.keys()[0])
336 feed_files
= [options
.master_feed_file
]
337 print "Upload %s into %s" % (', '.join(feed_files
), feed_base
)
338 cmd
= options
.master_feed_upload_command
.strip()
340 support
.show_and_run(cmd
, feed_files
)
342 print "NOTE: No feed upload command set => you'll have to upload them yourself!"
344 print "Push changes to public SCM repository..."
345 public_repos
= options
.public_scm_repository
347 scm
.push_head_and_release(status
.release_version
)
349 print "NOTE: No public repository set => you'll have to push the tag and trunk yourself."
351 os
.unlink(support
.release_status_file
)
353 if status
.head_before_release
:
354 head
= scm
.get_head_revision()
355 if status
.release_version
:
356 print "RESUMING release of %s %s" % (local_iface
.get_name(), status
.release_version
)
357 elif head
== status
.head_before_release
:
358 print "Restarting release of %s (HEAD revision has not changed)" % local_iface
.get_name()
360 raise SafeException("Something went wrong with the last run:\n" +
361 "HEAD revision for last run was " + status
.head_before_release
+ "\n" +
362 "HEAD revision now is " + head
+ "\n" +
363 "You should revert your working copy to the previous head and try again.\n" +
364 "If you're sure you want to release from the current head, delete '" + support
.release_status_file
+ "'")
366 print "Releasing", local_iface
.get_name()
368 ensure_ready_to_release()
370 if status
.release_version
:
371 if not os
.path
.isdir(status
.release_version
):
372 raise SafeException("Can't resume; directory %s missing. Try deleting '%s'." % (status
.release_version
, support
.release_status_file
))
373 os
.chdir(status
.release_version
)
374 need_set_snapshot
= False
376 print "Already tagged. Resuming the publishing process..."
377 elif status
.new_snapshot_version
:
378 head
= scm
.get_head_revision()
379 if head
!= status
.head_before_release
:
380 raise SafeException("There are more commits since we started!\n"
381 "HEAD was " + status
.head_before_release
+ "\n"
382 "HEAD now " + head
+ "\n"
383 "To include them, delete '" + support
.release_status_file
+ "' and try again.\n"
384 "To leave them out, put them on a new branch and reset HEAD to the release version.")
386 raise SafeException("Something went wrong previously when setting the new snapshot version.\n" +
387 "Suggest you reset to the original HEAD of\n%s and delete '%s'." % (status
.head_before_release
, support
.release_status_file
))
389 set_to_release() # Changes directory
390 assert status
.release_version
391 need_set_snapshot
= True
393 # May be needed by the upload command
394 os
.environ
['VERSION'] = status
.release_version
396 archive_name
= support
.make_archive_name(local_iface
.get_name(), status
.release_version
)
397 archive_file
= archive_name
+ '.tar.bz2'
399 export_prefix
= archive_name
400 if add_toplevel_dir
is not None:
401 export_prefix
+= '/' + add_toplevel_dir
403 if status
.created_archive
and os
.path
.isfile(archive_file
):
404 print "Archive already created"
406 support
.backup_if_exists(archive_file
)
407 scm
.export(export_prefix
, archive_file
, status
.head_at_release
)
409 has_submodules
= scm
.has_submodules()
411 if phase_actions
['generate-archive'] or has_submodules
:
413 support
.unpack_tarball(archive_file
)
415 scm
.export_submodules(archive_name
)
416 run_hooks('generate-archive', cwd
= archive_name
, env
= {'RELEASE_VERSION': status
.release_version
})
417 info("Regenerating archive (may have been modified by generate-archive hooks...")
418 support
.check_call(['tar', 'cjf', archive_file
, archive_name
])
419 except SafeException
:
420 scm
.reset_hard(scm
.get_current_branch())
421 fail_candidate(archive_file
)
424 status
.created_archive
= 'true'
427 if need_set_snapshot
:
428 set_to_snapshot(status
.release_version
+ '-post')
429 # Revert back to the original revision, so that any fixes the user makes
430 # will get applied before the tag
431 scm
.reset_hard(scm
.get_current_branch())
433 #backup_if_exists(archive_name)
434 support
.unpack_tarball(archive_file
)
436 extracted_iface_path
= os
.path
.abspath(os
.path
.join(export_prefix
, local_iface_rel_root_path
))
437 assert os
.path
.isfile(extracted_iface_path
), "Local feed not in archive! Is it under version control?"
438 extracted_iface
= model
.Interface(extracted_iface_path
)
439 reader
.update(extracted_iface
, extracted_iface_path
, local
= True)
440 extracted_impl
= support
.get_singleton_impl(extracted_iface
)
442 if extracted_impl
.main
:
443 # Find main executable, relative to the archive root
444 abs_main
= os
.path
.join(os
.path
.dirname(extracted_iface_path
), extracted_impl
.id, extracted_impl
.main
)
445 main
= support
.relative_path(archive_name
+ '/', abs_main
)
446 if main
!= extracted_impl
.main
:
447 print "(adjusting main: '%s' for the feed inside the archive, '%s' externally)" % (extracted_impl
.main
, main
)
448 if not os
.path
.exists(abs_main
):
449 raise SafeException("Main executable '%s' not found after unpacking archive!" % abs_main
)
454 if status
.src_tests_passed
:
455 print "Unit-tests already passed - not running again"
457 run_unit_tests(extracted_iface_path
, extracted_impl
)
458 status
.src_tests_passed
= True
460 except SafeException
:
461 print "(leaving extracted directory for examination)"
462 fail_candidate(archive_file
)
464 # Unpack it again in case the unit-tests changed anything
465 shutil
.rmtree(archive_name
)
466 support
.unpack_tarball(archive_file
)
468 # Generate feed for source
469 stream
= open(extracted_iface_path
)
470 src_feed_name
= '%s.xml' % archive_name
471 create_feed(src_feed_name
, extracted_iface_path
, archive_file
, archive_name
, main
)
472 print "Wrote source feed as %s" % src_feed_name
474 # If it's a source package, compile the binaries now...
475 compiler
= compile.Compiler(options
, os
.path
.abspath(src_feed_name
))
476 compiler
.build_binaries()
478 previous_release
= get_previous_release(status
.release_version
)
479 export_changelog(previous_release
)
482 raw_input('Already tagged. Press Return to resume publishing process...')
485 print "\nCandidate release archive:", archive_file
486 print "(extracted to %s for inspection)" % os
.path
.abspath(archive_name
)
488 print "\nPlease check candidate and select an action:"
489 print "P) Publish candidate (accept)"
490 print "F) Fail candidate (untag)"
492 print "D) Diff against release archive for %s" % previous_release
493 maybe_diff
= ['Diff']
496 print "(you can also hit CTRL-C and resume this script when done)"
499 choice
= support
.get_choice(['Publish', 'Fail'] + maybe_diff
)
501 previous_archive_name
= support
.make_archive_name(local_iface
.get_name(), previous_release
)
502 previous_archive_file
= '../%s/%s.tar.bz2' % (previous_release
, previous_archive_name
)
504 # For archives created by older versions of 0release
505 if not os
.path
.isfile(previous_archive_file
):
506 old_previous_archive_file
= '../%s.tar.bz2' % previous_archive_name
507 if os
.path
.isfile(old_previous_archive_file
):
508 previous_archive_file
= old_previous_archive_file
510 if os
.path
.isfile(previous_archive_file
):
511 support
.unpack_tarball(previous_archive_file
)
513 support
.show_diff(previous_archive_name
, archive_name
)
515 shutil
.rmtree(previous_archive_name
)
518 print "Sorry, archive file %s not found! Can't show diff." % previous_archive_file
522 info("Deleting extracted archive %s", archive_name
)
523 shutil
.rmtree(archive_name
)
525 if choice
== 'Publish':
526 accept_and_publish(archive_file
, archive_name
, src_feed_name
)
528 assert choice
== 'Fail'
529 fail_candidate(archive_file
)