Implement --set-selections using new 0install command
[zeroinstall/zeroinstall-afb.git] / zeroinstall / injector / fetch.py
blobb816feb463cd20fe1977335ca32befc91c206c87
1 """
2 Downloads feeds, keys, packages and icons.
3 """
5 # Copyright (C) 2009, Thomas Leonard
6 # See the README file for details, or visit http://0install.net.
8 from zeroinstall import _
9 import os
10 from logging import info, debug, warn
12 from zeroinstall.support import tasks, basedir
13 from zeroinstall.injector.namespaces import XMLNS_IFACE, config_site
14 from zeroinstall.injector.model import DownloadSource, Recipe, SafeException, escape, DistributionSource
15 from zeroinstall.injector.iface_cache import PendingFeed, ReplayAttack
16 from zeroinstall.injector.handler import NoTrustedKeys
17 from zeroinstall.injector import download
19 DEFAULT_FEED_MIRROR = "http://roscidus.com/0mirror"
20 DEFAULT_KEY_LOOKUP_SERVER = 'https://keylookup.appspot.com'
22 def _escape_slashes(path):
23 return path.replace('/', '%23')
25 def _get_feed_dir(feed):
26 """The algorithm from 0mirror."""
27 if '#' in feed:
28 raise SafeException(_("Invalid URL '%s'") % feed)
29 scheme, rest = feed.split('://', 1)
30 assert '/' in rest, "Missing / in %s" % feed
31 domain, rest = rest.split('/', 1)
32 for x in [scheme, domain, rest]:
33 if not x or x.startswith(','):
34 raise SafeException(_("Invalid URL '%s'") % feed)
35 return os.path.join('feeds', scheme, domain, _escape_slashes(rest))
37 class KeyInfoFetcher:
38 """Fetches information about a GPG key from a key-info server.
39 See L{Fetcher.fetch_key_info} for details.
40 @since: 0.42
42 Example:
44 >>> kf = KeyInfoFetcher('https://server', fingerprint)
45 >>> while True:
46 print kf.info
47 if kf.blocker is None: break
48 print kf.status
49 yield kf.blocker
50 """
51 def __init__(self, server, fingerprint):
52 self.fingerprint = fingerprint
53 self.info = []
54 self.blocker = None
56 if server is None: return
58 self.status = _('Fetching key information from %s...') % server
60 dl = download.Download(server + '/key/' + fingerprint)
61 dl.start()
63 from xml.dom import minidom
65 @tasks.async
66 def fetch_key_info():
67 try:
68 tempfile = dl.tempfile
69 yield dl.downloaded
70 self.blocker = None
71 tasks.check(dl.downloaded)
72 tempfile.seek(0)
73 doc = minidom.parse(tempfile)
74 if doc.documentElement.localName != 'key-lookup':
75 raise SafeException(_('Expected <key-lookup>, not <%s>') % doc.documentElement.localName)
76 self.info += doc.documentElement.childNodes
77 except Exception, ex:
78 doc = minidom.parseString('<item vote="bad"/>')
79 root = doc.documentElement
80 root.appendChild(doc.createTextNode(_('Error getting key information: %s') % ex))
81 self.info.append(root)
83 self.blocker = fetch_key_info()
85 class Fetcher(object):
86 """Downloads and stores various things.
87 @ivar handler: handler to use for user-interaction
88 @type handler: L{handler.Handler}
89 @ivar key_info: caches information about GPG keys
90 @type key_info: {str: L{KeyInfoFetcher}}
91 @ivar key_info_server: the base URL of a key information server
92 @type key_info_server: str
93 @ivar feed_mirror: the base URL of a mirror site for keys and feeds
94 @type feed_mirror: str | None
95 """
96 __slots__ = ['handler', 'feed_mirror', 'key_info_server', 'key_info']
98 def __init__(self, handler):
99 self.handler = handler
100 self.feed_mirror = DEFAULT_FEED_MIRROR
101 self.key_info_server = DEFAULT_KEY_LOOKUP_SERVER
102 self.key_info = {}
104 @tasks.async
105 def cook(self, required_digest, recipe, stores, force = False, impl_hint = None):
106 """Follow a Recipe.
107 @param impl_hint: the Implementation this is for (if any) as a hint for the GUI
108 @see: L{download_impl} uses this method when appropriate"""
109 # Maybe we're taking this metaphor too far?
111 # Start downloading all the ingredients.
112 downloads = {} # Downloads that are not yet successful
113 streams = {} # Streams collected from successful downloads
115 # Start a download for each ingredient
116 blockers = []
117 for step in recipe.steps:
118 blocker, stream = self.download_archive(step, force = force, impl_hint = impl_hint)
119 assert stream
120 blockers.append(blocker)
121 streams[step] = stream
123 while blockers:
124 yield blockers
125 tasks.check(blockers)
126 blockers = [b for b in blockers if not b.happened]
128 from zeroinstall.zerostore import unpack
130 # Create an empty directory for the new implementation
131 store = stores.stores[0]
132 tmpdir = store.get_tmp_dir_for(required_digest)
133 try:
134 # Unpack each of the downloaded archives into it in turn
135 for step in recipe.steps:
136 stream = streams[step]
137 stream.seek(0)
138 unpack.unpack_archive_over(step.url, stream, tmpdir, step.extract)
139 # Check that the result is correct and store it in the cache
140 store.check_manifest_and_rename(required_digest, tmpdir)
141 tmpdir = None
142 finally:
143 # If unpacking fails, remove the temporary directory
144 if tmpdir is not None:
145 from zeroinstall import support
146 support.ro_rmtree(tmpdir)
148 def get_feed_mirror(self, url):
149 """Return the URL of a mirror for this feed."""
150 if self.feed_mirror is None:
151 return None
152 import urlparse
153 if urlparse.urlparse(url).hostname == 'localhost':
154 return None
155 return '%s/%s/latest.xml' % (self.feed_mirror, _get_feed_dir(url))
157 @tasks.async
158 def get_packagekit_feed(self, iface_cache, feed_url):
159 """Send a query to PackageKit (if available) for information about this package.
160 On success, the result is added to iface_cache.
162 assert feed_url.startswith('distribution:'), feed_url
163 master_feed = iface_cache.get_feed(feed_url.split(':', 1)[1])
164 if master_feed:
165 fetch = iface_cache.distro.fetch_candidates(master_feed)
166 if fetch:
167 yield fetch
168 tasks.check(fetch)
170 # Force feed to be regenerated with the new information
171 iface_cache.get_feed(feed_url, force = True)
173 def download_and_import_feed(self, feed_url, iface_cache, force = False):
174 """Download the feed, download any required keys, confirm trust if needed and import.
175 @param feed_url: the feed to be downloaded
176 @type feed_url: str
177 @param iface_cache: cache in which to store the feed
178 @type iface_cache: L{iface_cache.IfaceCache}
179 @param force: whether to abort and restart an existing download"""
180 from download import DownloadAborted
182 debug(_("download_and_import_feed %(url)s (force = %(force)d)"), {'url': feed_url, 'force': force})
183 assert not os.path.isabs(feed_url)
185 if feed_url.startswith('distribution:'):
186 return self.get_packagekit_feed(iface_cache, feed_url)
188 primary = self._download_and_import_feed(feed_url, iface_cache, force, use_mirror = False)
190 @tasks.named_async("monitor feed downloads for " + feed_url)
191 def wait_for_downloads(primary):
192 # Download just the upstream feed, unless it takes too long...
193 timeout = tasks.TimeoutBlocker(5, 'Mirror timeout') # 5 seconds
195 yield primary, timeout
196 tasks.check(timeout)
198 try:
199 tasks.check(primary)
200 if primary.happened:
201 return # OK, primary succeeded!
202 # OK, maybe it's just being slow...
203 info("Feed download from %s is taking a long time.", feed_url)
204 primary_ex = None
205 except NoTrustedKeys, ex:
206 raise # Don't bother trying the mirror if we have a trust problem
207 except ReplayAttack, ex:
208 raise # Don't bother trying the mirror if we have a replay attack
209 except DownloadAborted, ex:
210 raise # Don't bother trying the mirror if the user cancelled
211 except SafeException, ex:
212 # Primary failed
213 primary = None
214 primary_ex = ex
215 warn(_("Feed download from %(url)s failed: %(exception)s"), {'url': feed_url, 'exception': ex})
217 # Start downloading from mirror...
218 mirror = self._download_and_import_feed(feed_url, iface_cache, force, use_mirror = True)
220 # Wait until both mirror and primary tasks are complete...
221 while True:
222 blockers = filter(None, [primary, mirror])
223 if not blockers:
224 break
225 yield blockers
227 if primary:
228 try:
229 tasks.check(primary)
230 if primary.happened:
231 primary = None
232 # No point carrying on with the mirror once the primary has succeeded
233 if mirror:
234 info(_("Primary feed download succeeded; aborting mirror download for %s") % feed_url)
235 mirror.dl.abort()
236 except SafeException, ex:
237 primary = None
238 primary_ex = ex
239 info(_("Feed download from %(url)s failed; still trying mirror: %(exception)s"), {'url': feed_url, 'exception': ex})
241 if mirror:
242 try:
243 tasks.check(mirror)
244 if mirror.happened:
245 mirror = None
246 if primary_ex:
247 # We already warned; no need to raise an exception too,
248 # as the mirror download succeeded.
249 primary_ex = None
250 except ReplayAttack, ex:
251 info(_("Version from mirror is older than cached version; ignoring it: %s"), ex)
252 mirror = None
253 primary_ex = None
254 except SafeException, ex:
255 info(_("Mirror download failed: %s"), ex)
256 mirror = None
258 if primary_ex:
259 raise primary_ex
261 return wait_for_downloads(primary)
263 def _download_and_import_feed(self, feed_url, iface_cache, force, use_mirror):
264 """Download and import a feed.
265 @param use_mirror: False to use primary location; True to use mirror."""
266 if use_mirror:
267 url = self.get_feed_mirror(feed_url)
268 if url is None: return None
269 warn(_("Trying mirror server for feed %s") % feed_url)
270 else:
271 url = feed_url
273 dl = self.handler.get_download(url, force = force, hint = feed_url)
274 stream = dl.tempfile
276 @tasks.named_async("fetch_feed " + url)
277 def fetch_feed():
278 yield dl.downloaded
279 tasks.check(dl.downloaded)
281 pending = PendingFeed(feed_url, stream)
283 if use_mirror:
284 # If we got the feed from a mirror, get the key from there too
285 key_mirror = self.feed_mirror + '/keys/'
286 else:
287 key_mirror = None
289 keys_downloaded = tasks.Task(pending.download_keys(self.handler, feed_hint = feed_url, key_mirror = key_mirror), _("download keys for %s") % feed_url)
290 yield keys_downloaded.finished
291 tasks.check(keys_downloaded.finished)
293 if not iface_cache.update_feed_if_trusted(pending.url, pending.sigs, pending.new_xml):
294 blocker = self.handler.confirm_keys(pending, self.fetch_key_info)
295 if blocker:
296 yield blocker
297 tasks.check(blocker)
298 if not iface_cache.update_feed_if_trusted(pending.url, pending.sigs, pending.new_xml):
299 raise NoTrustedKeys(_("No signing keys trusted; not importing"))
301 task = fetch_feed()
302 task.dl = dl
303 return task
305 def fetch_key_info(self, fingerprint):
306 try:
307 return self.key_info[fingerprint]
308 except KeyError:
309 self.key_info[fingerprint] = info = KeyInfoFetcher(self.key_info_server, fingerprint)
310 return info
312 def download_impl(self, impl, retrieval_method, stores, force = False):
313 """Download an implementation.
314 @param impl: the selected implementation
315 @type impl: L{model.ZeroInstallImplementation}
316 @param retrieval_method: a way of getting the implementation (e.g. an Archive or a Recipe)
317 @type retrieval_method: L{model.RetrievalMethod}
318 @param stores: where to store the downloaded implementation
319 @type stores: L{zerostore.Stores}
320 @param force: whether to abort and restart an existing download
321 @rtype: L{tasks.Blocker}"""
322 assert impl
323 assert retrieval_method
325 if isinstance(retrieval_method, DistributionSource):
326 return retrieval_method.install(self.handler)
328 from zeroinstall.zerostore import manifest
329 best = None
330 for digest in impl.digests:
331 alg_name = digest.split('=', 1)[0]
332 alg = manifest.algorithms.get(alg_name, None)
333 if alg and (best is None or best.rating < alg.rating):
334 best = alg
335 required_digest = digest
337 if best is None:
338 if not impl.digests:
339 raise SafeException(_("No <manifest-digest> given for '%(implementation)s' version %(version)s") %
340 {'implementation': impl.feed.get_name(), 'version': impl.get_version()})
341 raise SafeException(_("Unknown digest algorithms '%(algorithms)s' for '%(implementation)s' version %(version)s") %
342 {'algorithms': impl.digests, 'implementation': impl.feed.get_name(), 'version': impl.get_version()})
344 @tasks.async
345 def download_impl():
346 if isinstance(retrieval_method, DownloadSource):
347 blocker, stream = self.download_archive(retrieval_method, force = force, impl_hint = impl)
348 yield blocker
349 tasks.check(blocker)
351 stream.seek(0)
352 self._add_to_cache(required_digest, stores, retrieval_method, stream)
353 elif isinstance(retrieval_method, Recipe):
354 blocker = self.cook(required_digest, retrieval_method, stores, force, impl_hint = impl)
355 yield blocker
356 tasks.check(blocker)
357 else:
358 raise Exception(_("Unknown download type for '%s'") % retrieval_method)
360 self.handler.impl_added_to_store(impl)
361 return download_impl()
363 def _add_to_cache(self, required_digest, stores, retrieval_method, stream):
364 assert isinstance(retrieval_method, DownloadSource)
365 url = retrieval_method.url
366 stores.add_archive_to_cache(required_digest, stream, retrieval_method.url, retrieval_method.extract,
367 type = retrieval_method.type, start_offset = retrieval_method.start_offset or 0)
369 def download_archive(self, download_source, force = False, impl_hint = None):
370 """Fetch an archive. You should normally call L{download_impl}
371 instead, since it handles other kinds of retrieval method too."""
372 from zeroinstall.zerostore import unpack
374 url = download_source.url
375 if not (url.startswith('http:') or url.startswith('https:') or url.startswith('ftp:')):
376 raise SafeException(_("Unknown scheme in download URL '%s'") % url)
378 mime_type = download_source.type
379 if not mime_type:
380 mime_type = unpack.type_from_url(download_source.url)
381 if not mime_type:
382 raise SafeException(_("No 'type' attribute on archive, and I can't guess from the name (%s)") % download_source.url)
383 unpack.check_type_ok(mime_type)
384 dl = self.handler.get_download(download_source.url, force = force, hint = impl_hint)
385 dl.expected_size = download_source.size + (download_source.start_offset or 0)
386 return (dl.downloaded, dl.tempfile)
388 def download_icon(self, interface, force = False, modification_time = None):
389 """Download an icon for this interface and add it to the
390 icon cache. If the interface has no icon or we are offline, do nothing.
391 @return: the task doing the import, or None
392 @rtype: L{tasks.Task}"""
393 debug(_("download_icon %(interface)s (force = %(force)d)"), {'interface': interface, 'force': force})
395 # Find a suitable icon to download
396 for icon in interface.get_metadata(XMLNS_IFACE, 'icon'):
397 type = icon.getAttribute('type')
398 if type != 'image/png':
399 debug(_('Skipping non-PNG icon'))
400 continue
401 source = icon.getAttribute('href')
402 if source:
403 break
404 warn(_('Missing "href" attribute on <icon> in %s'), interface)
405 else:
406 info(_('No PNG icons found in %s'), interface)
407 return
409 try:
410 dl = self.handler.monitored_downloads[source]
411 if dl and force:
412 dl.abort()
413 raise KeyError
414 except KeyError:
415 dl = download.Download(source, hint = interface, modification_time = modification_time)
416 self.handler.monitor_download(dl)
418 @tasks.async
419 def download_and_add_icon():
420 stream = dl.tempfile
421 yield dl.downloaded
422 try:
423 tasks.check(dl.downloaded)
424 if dl.unmodified: return
425 stream.seek(0)
427 import shutil
428 icons_cache = basedir.save_cache_path(config_site, 'interface_icons')
429 icon_file = file(os.path.join(icons_cache, escape(interface.uri)), 'w')
430 shutil.copyfileobj(stream, icon_file)
431 except Exception, ex:
432 self.handler.report_error(ex)
434 return download_and_add_icon()
436 def download_impls(self, implementations, stores):
437 """Download the given implementations, choosing a suitable retrieval method for each.
438 If any of the retrieval methods are DistributionSources and
439 need confirmation, handler.confirm is called to check that the
440 installation should proceed.
442 unsafe_impls = []
444 to_download = []
445 for impl in implementations:
446 debug(_("start_downloading_impls: for %(feed)s get %(implementation)s"), {'feed': impl.feed, 'implementation': impl})
447 source = self.get_best_source(impl)
448 if not source:
449 raise SafeException(_("Implementation %(implementation_id)s of interface %(interface)s"
450 " cannot be downloaded (no download locations given in "
451 "interface!)") % {'implementation_id': impl.id, 'interface': impl.feed.get_name()})
452 to_download.append((impl, source))
454 if isinstance(source, DistributionSource) and source.needs_confirmation:
455 unsafe_impls.append(source.package_id)
457 @tasks.async
458 def download_impls():
459 if unsafe_impls:
460 confirm = self.handler.confirm_install(_('The following components need to be installed using native packages. '
461 'These come from your distribution, and should therefore be trustworthy, but they also '
462 'run with extra privileges. In particular, installing them may run extra services on your '
463 'computer or affect other users. You may be asked to enter a password to confirm. The '
464 'packages are:\n\n') + ('\n'.join('- ' + x for x in unsafe_impls)))
465 yield confirm
466 tasks.check(confirm)
468 blockers = []
470 for impl, source in to_download:
471 blockers.append(self.download_impl(impl, source, stores))
473 # Record the first error log the rest
474 error = []
475 def dl_error(ex, tb = None):
476 if error:
477 self.handler.report_error(ex)
478 else:
479 error.append(ex)
480 while blockers:
481 yield blockers
482 tasks.check(blockers, dl_error)
484 blockers = [b for b in blockers if not b.happened]
485 if error:
486 raise error[0]
488 if not to_download:
489 return None
491 return download_impls()
493 def get_best_source(self, impl):
494 """Return the best download source for this implementation.
495 @rtype: L{model.RetrievalMethod}"""
496 if impl.download_sources:
497 return impl.download_sources[0]
498 return None