From b0d5231e7aaae88c9c9f9f0ac10aa04d9b732359 Mon Sep 17 00:00:00 2001 From: Thomas Leonard Date: Wed, 30 Jan 2008 20:27:24 +0000 Subject: [PATCH] Split out fetching code into fetch.py. --- tests/testautopolicy.py | 11 +- zeroinstall/0launch-gui/gui.py | 7 +- zeroinstall/0launch-gui/iface_browser.py | 2 +- zeroinstall/0launch-gui/mainwindow.py | 16 +- zeroinstall/injector/autopolicy.py | 28 +-- zeroinstall/injector/cli.py | 8 +- zeroinstall/injector/fetch.py | 212 +++++++++++++++++++++ zeroinstall/injector/handler.py | 16 +- zeroinstall/injector/iface_cache.py | 2 +- zeroinstall/injector/policy.py | 306 ++++++------------------------- zeroinstall/support/tasks.py | 7 +- 11 files changed, 311 insertions(+), 304 deletions(-) create mode 100644 zeroinstall/injector/fetch.py diff --git a/tests/testautopolicy.py b/tests/testautopolicy.py index 955c1e1..c6131bc 100755 --- a/tests/testautopolicy.py +++ b/tests/testautopolicy.py @@ -124,6 +124,7 @@ class TestAutoPolicy(BaseTest): policy.network_use = model.network_full policy.recalculate() assert policy.need_download() + assert policy.ready try: policy.execute([], main = 'NOTHING') assert False @@ -228,7 +229,7 @@ class TestAutoPolicy(BaseTest): policy.network_use = model.network_full policy.recalculate() assert policy.ready - foo_iface = policy.get_interface(foo_iface_uri) + foo_iface = iface_cache.iface_cache.get_interface(foo_iface_uri) self.assertEquals('sha1=123', policy.implementation[foo_iface].id) def testBadConfig(self): @@ -260,7 +261,7 @@ class TestAutoPolicy(BaseTest): download_only = False) policy.network_use = model.network_offline try: - policy.get_interface(foo_iface_uri) + iface_cache.iface_cache.get_interface(foo_iface_uri) assert False except reader.InvalidInterface, ex: assert 'Invalid feed URL' in str(ex) @@ -286,7 +287,7 @@ class TestAutoPolicy(BaseTest): except NeedDownload, ex: pass - iface = policy.get_interface(foo_iface_uri) + iface = iface_cache.iface_cache.get_interface(foo_iface_uri) iface._main_feed.feeds = [model.Feed('/BadFeed', None, False)] logger.setLevel(logging.ERROR) @@ -391,8 +392,8 @@ class TestAutoPolicy(BaseTest): #logger.setLevel(logging.DEBUG) policy.recalculate() #logger.setLevel(logging.WARN) - foo_iface = policy.get_interface(foo_iface_uri) - bar_iface = policy.get_interface('http://bar') + foo_iface = iface_cache.iface_cache.get_interface(foo_iface_uri) + bar_iface = iface_cache.iface_cache.get_interface('http://bar') assert policy.implementation[bar_iface].id == 'sha1=200' dep = policy.implementation[foo_iface].dependencies['http://bar'] diff --git a/zeroinstall/0launch-gui/gui.py b/zeroinstall/0launch-gui/gui.py index 47926ed..64e3f17 100644 --- a/zeroinstall/0launch-gui/gui.py +++ b/zeroinstall/0launch-gui/gui.py @@ -107,11 +107,6 @@ class GUIPolicy(Policy): return True # Need to download something; check first return False - def store_icon(self, interface, stream): - Policy.store_icon(self, interface, stream) - if self.window: - self.window.browser.build_tree() - def update_display(self): self.window.set_response_sensitive(gtk.RESPONSE_OK, self.ready) @@ -120,7 +115,7 @@ class GUIPolicy(Policy): # If we have feeds then treat this as an update check, # even if we've never seen the main interface before. # Used the first time the GUI is used, for example. - root = self.get_interface(self.root) + root = iface_cache.get_interface(self.root) if root.name is not None or root.feeds: self.checking = CheckingBox(root) diff --git a/zeroinstall/0launch-gui/iface_browser.py b/zeroinstall/0launch-gui/iface_browser.py index b8ed6e6..5df9472 100644 --- a/zeroinstall/0launch-gui/iface_browser.py +++ b/zeroinstall/0launch-gui/iface_browser.py @@ -296,7 +296,7 @@ class InterfaceBrowser: else: fetch = '(cached)' else: - src = policy.get_best_source(impl) + src = policy.fetcher.get_best_source(impl) if src: fetch = support.pretty_size(src.size) else: diff --git a/zeroinstall/0launch-gui/mainwindow.py b/zeroinstall/0launch-gui/mainwindow.py index 8386aa5..be4d8d9 100644 --- a/zeroinstall/0launch-gui/mainwindow.py +++ b/zeroinstall/0launch-gui/mainwindow.py @@ -84,15 +84,17 @@ class MainWindow: @tasks.async def download_and_run(self, run_button, cancelled): try: - downloaded = policy.download_impls() + downloaded = policy.download_uncached_implementations() - blockers = [downloaded, cancelled] - yield blockers - tasks.check(blockers) + if downloaded: + # We need to wait until everything is downloaded... + blockers = [downloaded, cancelled] + yield blockers + tasks.check(blockers) - if cancelled.happened: - policy.abort_all_downloads() - return + if cancelled.happened: + policy.abort_all_downloads() + return if policy.get_uncached_implementations(): dialog.alert('Not all downloads succeeded; cannot run program.') diff --git a/zeroinstall/injector/autopolicy.py b/zeroinstall/injector/autopolicy.py index 15483a6..de29235 100644 --- a/zeroinstall/injector/autopolicy.py +++ b/zeroinstall/injector/autopolicy.py @@ -10,7 +10,7 @@ is also the policy used to run the injector's GUI. # See the README file for details, or visit http://0install.net. import os -from logging import debug, info +from logging import debug, info, warn from zeroinstall.support import tasks from zeroinstall.injector import model, policy, run @@ -18,33 +18,23 @@ from zeroinstall.injector.handler import Handler from zeroinstall import NeedDownload class AutoPolicy(policy.Policy): - __slots__ = ['allow_downloads', 'download_only', 'dry_run'] + __slots__ = ['download_only'] def __init__(self, interface_uri, download_only = False, dry_run = False, src = False, handler = None): """@param handler: (new in 0.30) handler to use, or None to create a L{Handler}""" - policy.Policy.__init__(self, interface_uri, handler or Handler(), src = src) - self.dry_run = dry_run - self.allow_downloads = not dry_run + handler = handler or Handler() + if dry_run: + info("Note: dry_run is deprecated. Pass it to the handler instead!") + handler.dry_run = True + policy.Policy.__init__(self, interface_uri, handler, src = src) self.download_only = download_only - self.dry_run = dry_run - - def download_and_import_feed(self, feed_url, force = False): - if self.dry_run or not self.allow_downloads: - raise NeedDownload(feed_url) - else: - return policy.Policy.download_and_import_feed(self, feed_url, force) - - def download_archive(self, download_source, force = False): - if self.dry_run or not self.allow_downloads: - raise NeedDownload(download_source.url) - return policy.Policy.download_archive(self, download_source, force = force) def execute(self, prog_args, main = None, wrapper = None): - downloaded = self.download_impls() + downloaded = self.download_uncached_implementations() if downloaded: self.handler.wait_for_blocker(downloaded) if not self.download_only: - run.execute(self, prog_args, dry_run = self.dry_run, main = main, wrapper = wrapper) + run.execute(self, prog_args, dry_run = self.handler.dry_run, main = main, wrapper = wrapper) else: info("Downloads done (download-only mode)") diff --git a/zeroinstall/injector/cli.py b/zeroinstall/injector/cli.py index 2c36df8..6c66b4b 100755 --- a/zeroinstall/injector/cli.py +++ b/zeroinstall/injector/cli.py @@ -151,7 +151,7 @@ def _normal_mode(options, args): if options.main: raise model.SafeException("Can't use --main with --get-selections") - # Note that need_download() triggers a recalculate() + # Note that need_download() triggers a solve if options.refresh or options.gui: # We could run immediately, but the user asked us not to can_run_immediately = False @@ -176,7 +176,11 @@ def _normal_mode(options, args): if options.get_selections: _get_selections(policy) else: - policy.execute(args[1:], main = options.main, wrapper = options.wrapper) + if not options.download_only: + from zeroinstall.injector import run + run.execute(policy, args[1:], dry_run = options.dry_run, main = options.main, wrapper = options.wrapper) + else: + logging.info("Downloads done (download-only mode)") assert options.dry_run or options.download_only return diff --git a/zeroinstall/injector/fetch.py b/zeroinstall/injector/fetch.py new file mode 100644 index 0000000..14df77c --- /dev/null +++ b/zeroinstall/injector/fetch.py @@ -0,0 +1,212 @@ +""" +Chooses a set of implementations based on a policy. + +@deprecated: see L{solver} +""" + +# Copyright (C) 2008, Thomas Leonard +# See the README file for details, or visit http://0install.net. + +import os +from logging import info, debug, warn + +from zeroinstall.support import tasks, basedir +from zeroinstall.injector.namespaces import XMLNS_IFACE, config_site +from zeroinstall.injector.model import DownloadSource, Recipe, SafeException, network_offline, escape +from zeroinstall.injector.iface_cache import iface_cache, PendingFeed + +class Fetcher(object): + __slots__ = ['handler'] + + def __init__(self, handler): + self.handler = handler + + @tasks.async + def cook(self, required_digest, recipe, force = False): + """A Cook follows a Recipe. + @see: L{download_impl}""" + # Maybe we're taking this metaphor too far? + + # Start downloading all the ingredients. + downloads = {} # Downloads that are not yet successful + streams = {} # Streams collected from successful downloads + + # Start a download for each ingredient + blockers = [] + for step in recipe.steps: + blocker, stream = self.download_archive(step, force = force) + assert stream + blockers.append(blocker) + streams[step] = stream + + while blockers: + yield blockers + tasks.check(blockers) + blockers = [b for b in blockers if not b.happened] + + from zeroinstall.zerostore import unpack + + # Create an empty directory for the new implementation + store = iface_cache.stores.stores[0] + tmpdir = store.get_tmp_dir_for(required_digest) + try: + # Unpack each of the downloaded archives into it in turn + for step in recipe.steps: + stream = streams[step] + stream.seek(0) + unpack.unpack_archive_over(step.url, stream, tmpdir, step.extract) + # Check that the result is correct and store it in the cache + store.check_manifest_and_rename(required_digest, tmpdir) + tmpdir = None + finally: + # If unpacking fails, remove the temporary directory + if tmpdir is not None: + from zeroinstall import support + support.ro_rmtree(tmpdir) + + def download_and_import_feed(self, feed_url, force = False): + """Download the feed, download any required keys, confirm trust if needed and import.""" + + debug("download_and_import_feed %s (force = %d)", feed_url, force) + assert not feed_url.startswith('/') + + dl = self.handler.get_download(feed_url, force = force) + + @tasks.named_async("fetch_feed " + feed_url) + def fetch_feed(): + stream = dl.tempfile + + yield dl.downloaded + tasks.check(dl.downloaded) + + pending = PendingFeed(feed_url, stream) + iface_cache.add_pending(pending) + + keys_downloaded = tasks.Task(pending.download_keys(self.handler), "download keys for " + feed_url) + yield keys_downloaded.finished + tasks.check(keys_downloaded.finished) + + iface = iface_cache.get_interface(pending.url) + if not iface_cache.update_interface_if_trusted(iface, pending.sigs, pending.new_xml): + blocker = self.handler.confirm_trust_keys(iface, pending.sigs, pending.new_xml) + if blocker: + yield blocker + tasks.check(blocker) + if not iface_cache.update_interface_if_trusted(iface, pending.sigs, pending.new_xml): + raise SafeException("No signing keys trusted; not importing") + + return fetch_feed() + + @tasks.async + def download_impl(self, impl, retrieval_method, force = False): + """Download impl, using retrieval_method.""" + assert impl + assert retrieval_method + + from zeroinstall.zerostore import manifest + alg = impl.id.split('=', 1)[0] + if alg not in manifest.algorithms: + raise SafeException("Unknown digest algorithm '%s' for '%s' version %s" % + (alg, impl.feed.get_name(), impl.get_version())) + + if isinstance(retrieval_method, DownloadSource): + blocker, stream = self.download_archive(retrieval_method, force = force) + yield blocker + tasks.check(blocker) + + stream.seek(0) + iface_cache.add_to_cache(retrieval_method, stream) + elif isinstance(retrieval_method, Recipe): + blocker = self.cook(impl.id, retrieval_method, force) + yield blocker + tasks.check(blocker) + else: + raise Exception("Unknown download type for '%s'" % retrieval_method) + + def download_archive(self, download_source, force = False): + """Fetch an archive. You should normally call L{begin_impl_download} + instead, since it handles other kinds of retrieval method too.""" + from zeroinstall.zerostore import unpack + mime_type = download_source.type + if not mime_type: + mime_type = unpack.type_from_url(download_source.url) + if not mime_type: + raise SafeException("No 'type' attribute on archive, and I can't guess from the name (%s)" % download_source.url) + unpack.check_type_ok(mime_type) + dl = self.handler.get_download(download_source.url, force = force) + dl.expected_size = download_source.size + (download_source.start_offset or 0) + return (dl.downloaded, dl.tempfile) + + def download_icon(self, interface, force = False): + """Download an icon for this interface and add it to the + icon cache. If the interface has no icon or we are offline, do nothing. + @return: the task doing the import, or None + @rtype: L{tasks.Task}""" + debug("download_icon %s (force = %d)", interface, force) + + # Find a suitable icon to download + for icon in interface.get_metadata(XMLNS_IFACE, 'icon'): + type = icon.getAttribute('type') + if type != 'image/png': + debug('Skipping non-PNG icon') + continue + source = icon.getAttribute('href') + if source: + break + warn('Missing "href" attribute on in %s', interface) + else: + info('No PNG icons found in %s', interface) + return + + dl = self.handler.get_download(source, force = force) + + @tasks.async + def download_and_add_icon(): + stream = dl.tempfile + yield dl.downloaded + try: + tasks.check(dl.downloaded) + stream.seek(0) + + import shutil + icons_cache = basedir.save_cache_path(config_site, 'interface_icons') + icon_file = file(os.path.join(icons_cache, escape(interface.uri)), 'w') + shutil.copyfileobj(stream, icon_file) + except Exception, ex: + self.handler.report_error(ex) + + return download_and_add_icon() + + def download_impls(self, implementations): + """Download the given implementations, choosing a suitable retrieval method for each.""" + blockers = [] + + for impl in implementations: + debug("start_downloading_impls: for %s get %s", impl.feed, impl) + source = self.get_best_source(impl) + if not source: + raise SafeException("Implementation " + impl.id + " of " + "interface " + impl.feed.get_name() + " cannot be " + "downloaded (no download locations given in " + "interface!)") + blockers.append(self.download_impl(impl, source)) + + if not blockers: + return None + + @tasks.async + def download_impls(blockers): + while blockers: + yield blockers + tasks.check(blockers) + + blockers = [b for b in blockers if not b.happened] + return download_impls(blockers) + + def get_best_source(self, impl): + """Return the best download source for this implementation. + @rtype: L{model.RetrievalMethod}""" + if impl.download_sources: + return impl.download_sources[0] + return None + diff --git a/zeroinstall/injector/handler.py b/zeroinstall/injector/handler.py index bff6a41..90eb263 100644 --- a/zeroinstall/injector/handler.py +++ b/zeroinstall/injector/handler.py @@ -13,6 +13,7 @@ To do this, you supply a L{Handler} to the L{policy}. import os, sys from logging import debug, info, warn +from zeroinstall import NeedDownload from zeroinstall.support import tasks from zeroinstall.injector import model, download from zeroinstall.injector.iface_cache import iface_cache @@ -25,12 +26,12 @@ class Handler(object): @type monitored_downloads: {URL: (error_stream, L{download.Download})} """ - __slots__ = ['monitored_downloads', '_loop', '_loop_errors'] + __slots__ = ['monitored_downloads', '_loop', 'dry_run'] - def __init__(self, mainloop = None): + def __init__(self, mainloop = None, dry_run = False): self.monitored_downloads = {} self._loop = None - self._loop_errors = None + self.dry_run = dry_run def monitor_download(self, dl): """Called when a new L{download} is started. @@ -79,6 +80,9 @@ class Handler(object): a new one. @rtype: L{download.Download} """ + if self.dry_run: + raise NeedDownload(url) + try: dl = self.monitored_downloads[url] if dl and force: @@ -133,8 +137,4 @@ class Handler(object): @param exception: the exception to report @type exception: L{SafeException} @since: 0.25""" - if self._loop_errors is None: - warn("%s", exception) - else: - self._loop_errors.append(str(exception)) - info("%s", exception) # (will get reported later) + warn("%s", exception) diff --git a/zeroinstall/injector/iface_cache.py b/zeroinstall/injector/iface_cache.py index 00253dd..437bd29 100644 --- a/zeroinstall/injector/iface_cache.py +++ b/zeroinstall/injector/iface_cache.py @@ -110,8 +110,8 @@ class PendingFeed(object): else: blockers.append(b) except Exception: - warn("Failed to import key for '%s': %s", self.url, str(ex)) _, exception, tb = sys.exc_info() + warn("Failed to import key for '%s': %s", self.url, str(exception)) if exception and not any_success: raise exception, None, tb diff --git a/zeroinstall/injector/policy.py b/zeroinstall/injector/policy.py index 38c2327..810e1f6 100644 --- a/zeroinstall/injector/policy.py +++ b/zeroinstall/injector/policy.py @@ -4,7 +4,7 @@ Chooses a set of implementations based on a policy. @deprecated: see L{solver} """ -# Copyright (C) 2007, Thomas Leonard +# Copyright (C) 2008, Thomas Leonard # See the README file for details, or visit http://0install.net. import time @@ -15,7 +15,6 @@ import arch from model import * from namespaces import * import ConfigParser -from zeroinstall import NeedDownload from zeroinstall.support import tasks, basedir from zeroinstall.injector.iface_cache import iface_cache, PendingFeed from zeroinstall.injector.trust import trust_db @@ -23,48 +22,6 @@ from zeroinstall.injector.trust import trust_db # If we started a check within this period, don't start another one: FAILED_CHECK_DELAY = 60 * 60 # 1 Hour -@tasks.async -def _cook(policy, required_digest, recipe, force = False): - """A Cook follows a Recipe.""" - # Maybe we're taking this metaphor too far? - - # Start downloading all the ingredients. - downloads = {} # Downloads that are not yet successful - streams = {} # Streams collected from successful downloads - - # Start a download for each ingredient - blockers = [] - for step in recipe.steps: - blocker, stream = policy.download_archive(step, force = force) - assert stream - blockers.append(blocker) - streams[step] = stream - - while blockers: - yield blockers - tasks.check(blockers) - blockers = [b for b in blockers if not b.happened] - - from zeroinstall.zerostore import unpack - - # Create an empty directory for the new implementation - store = iface_cache.stores.stores[0] - tmpdir = store.get_tmp_dir_for(required_digest) - try: - # Unpack each of the downloaded archives into it in turn - for step in recipe.steps: - stream = streams[step] - stream.seek(0) - unpack.unpack_archive_over(step.url, stream, tmpdir, step.extract) - # Check that the result is correct and store it in the cache - store.check_manifest_and_rename(required_digest, tmpdir) - tmpdir = None - finally: - # If unpacking fails, remove the temporary directory - if tmpdir is not None: - from zeroinstall import support - support.ro_rmtree(tmpdir) - class Policy(object): """Chooses a set of implementations based on a policy. Typical use: @@ -94,7 +51,7 @@ class Policy(object): """ __slots__ = ['root', 'watchers', 'freshness', 'handler', '_warned_offline', - 'src', 'stale_feeds', 'solver'] + 'src', 'stale_feeds', 'solver', '_fetcher'] help_with_testing = property(lambda self: self.solver.help_with_testing, lambda self, value: setattr(self.solver, 'help_with_testing', value)) @@ -128,6 +85,7 @@ class Policy(object): # If we need to download something but can't because we are offline, # warn the user. But only the first time. self._warned_offline = False + self._fetcher = None # (allow self for backwards compat) self.handler = handler or self @@ -153,6 +111,13 @@ class Policy(object): # Probably need weakrefs here... iface_cache.add_watcher(self) + @property + def fetcher(self): + if not self._fetcher: + import fetch + self._fetcher = fetch.Fetcher(self.handler) + return self._fetcher + def set_root(self, root): """Change the root interface URI.""" assert isinstance(root, (str, unicode)) @@ -174,15 +139,8 @@ class Policy(object): os.rename(path + '.new', path) def recalculate(self, fetch_stale_interfaces = True): - """Try to choose a set of implementations. - This may start downloading more interfaces, but will return immediately. - @param fetch_stale_interfaces: whether to begin downloading interfaces which are present but haven't - been checked within the L{freshness} period - @type fetch_stale_interfaces: bool - @postcondition: L{ready} indicates whether a possible set of implementations was chosen - @note: A policy may be ready before all feeds have been downloaded. As new feeds - arrive, the chosen versions may change. - @return: a list of tasks which will require a recalculation when complete + """Deprecated. + @see: L{solve_with_downloads} """ self.stale_feeds = sets.Set() @@ -192,18 +150,24 @@ class Policy(object): host_arch = arch.SourceArchitecture(host_arch) self.solver.solve(self.root, host_arch) - for f in self.solver.feeds_used: - self.get_interface(f) # May start a download + if self.network_use == network_offline: + fetch_stale_interfaces = False - tasks = [] - if fetch_stale_interfaces and self.network_use != network_offline: - for stale in self.stale_feeds: - info("Checking for updates to stale feed %s", stale) - tasks.append(self.download_and_import_feed(stale, False)) + blockers = [] + for f in self.solver.feeds_used: + if f.startswith('/'): continue + feed = iface_cache.get_feed(f) + if feed is None or feed.last_modified is None: + self.download_and_import_feed_if_online(f) # Will start a download + elif self.is_stale(feed): + debug("Adding %s to stale set", f) + self.stale_feeds.add(iface_cache.get_interface(f)) # Legacy API + if fetch_stale_interfaces: + self.download_and_import_feed_if_online(f) # Will start a download for w in self.watchers: w() - return tasks + return blockers def usable_feeds(self, iface): """Generator for C{iface.feeds} that are valid for our architecture. @@ -246,163 +210,22 @@ class Policy(object): return True - def get_interface(self, uri): - """Get an interface from the L{iface_cache}. If it is missing start a new download. - If it is present but stale, add it to L{stale_feeds}. This should only be called - from L{recalculate}. - @see: iface_cache.iface_cache.get_interface - @rtype: L{model.Interface}""" - iface = iface_cache.get_interface(uri) - - if uri in iface_cache.pending: - # Don't start another download while one is pending - # TODO: unless the pending version is very old - return iface - - if not uri.startswith('/'): - if iface.last_modified is None: - if self.network_use != network_offline: - debug("Feed not cached and not off-line. Downloading...") - self.download_and_import_feed(iface.uri) - else: - if self._warned_offline: - debug("Nothing known about interface, but we are off-line.") - else: - if iface.feeds: - info("Nothing known about interface '%s' and off-line. Trying feeds only.", uri) - else: - warn("Nothing known about interface '%s', but we are in off-line mode " - "(so not fetching).", uri) - self._warned_offline = True - elif self.is_stale(iface._main_feed): - debug("Adding %s to stale set", iface) - self.stale_feeds.add(iface) - #else: debug("Local interface, so not checking staleness.") - - return iface - - def download_and_import_feed(self, feed_url, force = False): - """Download the feed, download any required keys, confirm trust if needed and import.""" - - debug("download_and_import_feed %s (force = %d)", feed_url, force) - assert not feed_url.startswith('/') - - dl = self.handler.get_download(feed_url, force = force) - - @tasks.named_async("fetch_feed " + feed_url) - def fetch_feed(): - stream = dl.tempfile - - yield dl.downloaded - tasks.check(dl.downloaded) - - pending = PendingFeed(feed_url, stream) - iface_cache.add_pending(pending) - - keys_downloaded = tasks.Task(pending.download_keys(self.handler), "download keys for " + feed_url) - yield keys_downloaded.finished - tasks.check(keys_downloaded.finished) - - iface = iface_cache.get_interface(pending.url) - if not iface_cache.update_interface_if_trusted(iface, pending.sigs, pending.new_xml): - blocker = self.handler.confirm_trust_keys(iface, pending.sigs, pending.new_xml) - if blocker: - yield blocker - tasks.check(blocker) - if not iface_cache.update_interface_if_trusted(iface, pending.sigs, pending.new_xml): - raise SafeException("No signing keys trusted; not importing") - - return fetch_feed() - - @tasks.async - def download_impl(self, impl, retrieval_method, force = False): - """Download impl, using retrieval_method.""" - assert impl - assert retrieval_method - - from zeroinstall.zerostore import manifest - alg = impl.id.split('=', 1)[0] - if alg not in manifest.algorithms: - raise SafeException("Unknown digest algorithm '%s' for '%s' version %s" % - (alg, impl.feed.get_name(), impl.get_version())) - - if isinstance(retrieval_method, DownloadSource): - blocker, stream = self.download_archive(retrieval_method, force = force) - yield blocker - tasks.check(blocker) - - stream.seek(0) - iface_cache.add_to_cache(retrieval_method, stream) - elif isinstance(retrieval_method, Recipe): - blocker = _cook(self, impl.id, retrieval_method, force) - yield blocker - tasks.check(blocker) - else: - raise Exception("Unknown download type for '%s'" % retrieval_method) - - def download_archive(self, download_source, force = False): - """Fetch an archive. You should normally call L{begin_impl_download} - instead, since it handles other kinds of retrieval method too.""" - from zeroinstall.zerostore import unpack - mime_type = download_source.type - if not mime_type: - mime_type = unpack.type_from_url(download_source.url) - if not mime_type: - raise SafeException("No 'type' attribute on archive, and I can't guess from the name (%s)" % download_source.url) - unpack.check_type_ok(mime_type) - dl = self.handler.get_download(download_source.url, force = force) - dl.expected_size = download_source.size + (download_source.start_offset or 0) - return (dl.downloaded, dl.tempfile) - - def download_icon(self, interface, force = False): - """Download an icon for this interface and add it to the - icon cache. If the interface has no icon or we are offline, do nothing. - @return: the task doing the import, or None - @rtype: L{tasks.Task}""" - debug("download_icon %s (force = %d)", interface, force) - - if self.network_use == network_offline: - info("No icon present for %s, but off-line so not downloading", interface) - return - - # Find a suitable icon to download - for icon in interface.get_metadata(XMLNS_IFACE, 'icon'): - type = icon.getAttribute('type') - if type != 'image/png': - debug('Skipping non-PNG icon') - continue - source = icon.getAttribute('href') - if source: - break - warn('Missing "href" attribute on in %s', interface) + def download_and_import_feed_if_online(self, feed_url): + """If we're online, call L{download_and_import_feed}. Otherwise, log a suitable warning.""" + if self.network_use != network_offline: + debug("Feed %s not cached and not off-line. Downloading...", feed_url) + return self.fetcher.download_and_import_feed(feed_url) else: - info('No PNG icons found in %s', interface) - return - - dl = self.handler.get_download(source, force = force) + if self._warned_offline: + debug("Not downloading feed '%s' because we are off-line.", feed_url) + elif feed_url == injector_gui_uri: + # Don't print a warning, because we always switch to off-line mode to + # run the GUI the first time. + info("Not downloading GUI feed '%s' because we are in off-line mode.", feed_url) + else: + warn("Not downloading feed '%s' because we are in off-line mode.", feed_url) + self._warned_offline = True - @tasks.async - def download_and_add_icon(): - stream = dl.tempfile - yield dl.downloaded - try: - tasks.check(dl.downloaded) - stream.seek(0) - self.store_icon(interface, stream) - except Exception, ex: - self.handler.report_error(ex) - - return download_and_add_icon() - - def store_icon(self, interface, stream): - """Called when an icon has been successfully downloaded. - Subclasses may wish to wrap this to repaint the display.""" - from zeroinstall.injector import basedir - import shutil - icons_cache = basedir.save_cache_path(config_site, 'interface_icons') - icon_file = file(os.path.join(icons_cache, escape(interface.uri)), 'w') - shutil.copyfileobj(stream, icon_file) - def get_implementation_path(self, impl): """Return the local path of impl. @rtype: str @@ -492,13 +315,6 @@ class Policy(object): warn("Warning: unknown interface '%s'" % feed_iface_uri) return [iface_cache.get_interface(uri) for uri in feed_targets] - def get_best_source(self, impl): - """Return the best download source for this implementation. - @rtype: L{model.RetrievalMethod}""" - if impl.download_sources: - return impl.download_sources[0] - return None - @tasks.async def solve_with_downloads(self, force = False): """Run the solver, then download any feeds that are missing or @@ -531,7 +347,7 @@ class Policy(object): if f.startswith('/'): continue feed = iface_cache.get_interface(f) - downloads_in_progress[f] = self.download_and_import_feed(f) + downloads_in_progress[f] = self.fetcher.download_and_import_feed(f) if not downloads_in_progress: break @@ -563,28 +379,20 @@ class Policy(object): return False - def download_impls(self): - """Download all implementations that are missing from the cache.""" - blockers = [] + def download_uncached_implementations(self): + """Download all implementations chosen by the solver that are missing from the cache.""" + assert self.solver.ready, "Solver is not ready!\n%s" % self.solver.selections + return self.fetcher.download_impls([impl for impl in self.solver.selections.values() if not self.get_cached(impl)]) + + def download_icon(self, interface, force = False): + """Download an icon for this interface and add it to the + icon cache. If the interface has no icon or we are offline, do nothing. + @return: the task doing the import, or None + @rtype: L{tasks.Task}""" + debug("download_icon %s (force = %d)", interface, force) + + if self.network_use == network_offline: + info("No icon present for %s, but off-line so not downloading", interface) + return - for iface, impl in self.get_uncached_implementations(): - debug("start_downloading_impls: for %s get %s", iface, impl) - source = self.get_best_source(impl) - if not source: - raise SafeException("Implementation " + impl.id + " of " - "interface " + iface.get_name() + " cannot be " - "downloaded (no download locations given in " - "interface!)") - blockers.append(self.download_impl(impl, source)) - - if not blockers: - return None - - @tasks.async - def download_impls(blockers): - while blockers: - yield blockers - tasks.check(blockers) - - blockers = [b for b in blockers if not b.happened] - return download_impls(blockers) + return self.fetcher.download_icon(interface, force) diff --git a/zeroinstall/support/tasks.py b/zeroinstall/support/tasks.py index c1ba5a8..e9f2b7f 100644 --- a/zeroinstall/support/tasks.py +++ b/zeroinstall/support/tasks.py @@ -119,11 +119,6 @@ class Blocker: #import traceback #traceback.print_exception(exception[0], None, exception[1]) - def check_exception(): - if self.exception: - self.exception_read = True - raise self.exception - def __del__(self): if self.exception and not self.exception_read: warn("Blocker %s garbage collected without having it's exception read: %s", self, self.exception) @@ -273,7 +268,7 @@ class Task: new_blockers = (new_blockers,) # Are we blocking on something that already happened? for blocker in new_blockers: - assert hasattr(blocker, 'happened'), "Not a Blocker: " + repr(blocker) + assert hasattr(blocker, 'happened'), "Not a Blocker: %s from %s" % (blocker, self) if blocker.happened: new_blockers = (_idle_blocker,) info("Task '%s' waiting on ready blocker %s!", self, blocker) -- 2.11.4.GIT