Merged '0install' command branch
[zeroinstall/zeroinstall-afb.git] / zeroinstall / injector / fetch.py
blob7eb754d5eeb65d592aee242b52f962d63f055ad1
1 """
2 Downloads feeds, keys, packages and icons.
3 """
5 # Copyright (C) 2009, Thomas Leonard
6 # See the README file for details, or visit http://0install.net.
8 from zeroinstall import _
9 import os
10 from logging import info, debug, warn
12 from zeroinstall.support import tasks, basedir
13 from zeroinstall.injector.namespaces import XMLNS_IFACE, config_site
14 from zeroinstall.injector.model import DownloadSource, Recipe, SafeException, escape, DistributionSource
15 from zeroinstall.injector.iface_cache import PendingFeed, ReplayAttack
16 from zeroinstall.injector.handler import NoTrustedKeys
17 from zeroinstall.injector import download
19 DEFAULT_FEED_MIRROR = "http://roscidus.com/0mirror"
20 DEFAULT_KEY_LOOKUP_SERVER = 'https://keylookup.appspot.com'
22 def _escape_slashes(path):
23 return path.replace('/', '%23')
25 def _get_feed_dir(feed):
26 """The algorithm from 0mirror."""
27 if '#' in feed:
28 raise SafeException(_("Invalid URL '%s'") % feed)
29 scheme, rest = feed.split('://', 1)
30 assert '/' in rest, "Missing / in %s" % feed
31 domain, rest = rest.split('/', 1)
32 for x in [scheme, domain, rest]:
33 if not x or x.startswith(','):
34 raise SafeException(_("Invalid URL '%s'") % feed)
35 return os.path.join('feeds', scheme, domain, _escape_slashes(rest))
37 class KeyInfoFetcher:
38 """Fetches information about a GPG key from a key-info server.
39 See L{Fetcher.fetch_key_info} for details.
40 @since: 0.42
42 Example:
44 >>> kf = KeyInfoFetcher('https://server', fingerprint)
45 >>> while True:
46 print kf.info
47 if kf.blocker is None: break
48 print kf.status
49 yield kf.blocker
50 """
51 def __init__(self, server, fingerprint):
52 self.fingerprint = fingerprint
53 self.info = []
54 self.blocker = None
56 if server is None: return
58 self.status = _('Fetching key information from %s...') % server
60 dl = download.Download(server + '/key/' + fingerprint)
61 dl.start()
63 from xml.dom import minidom
65 @tasks.async
66 def fetch_key_info():
67 try:
68 tempfile = dl.tempfile
69 yield dl.downloaded
70 self.blocker = None
71 tasks.check(dl.downloaded)
72 tempfile.seek(0)
73 doc = minidom.parse(tempfile)
74 if doc.documentElement.localName != 'key-lookup':
75 raise SafeException(_('Expected <key-lookup>, not <%s>') % doc.documentElement.localName)
76 self.info += doc.documentElement.childNodes
77 except Exception, ex:
78 doc = minidom.parseString('<item vote="bad"/>')
79 root = doc.documentElement
80 root.appendChild(doc.createTextNode(_('Error getting key information: %s') % ex))
81 self.info.append(root)
83 self.blocker = fetch_key_info()
85 class Fetcher(object):
86 """Downloads and stores various things.
87 @ivar handler: handler to use for user-interaction
88 @type handler: L{handler.Handler}
89 @ivar key_info: caches information about GPG keys
90 @type key_info: {str: L{KeyInfoFetcher}}
91 @ivar key_info_server: the base URL of a key information server
92 @type key_info_server: str
93 @ivar feed_mirror: the base URL of a mirror site for keys and feeds
94 @type feed_mirror: str | None
95 """
96 __slots__ = ['handler', 'feed_mirror', 'key_info_server', 'key_info']
98 def __init__(self, handler):
99 self.handler = handler
100 self.feed_mirror = DEFAULT_FEED_MIRROR
101 self.key_info_server = DEFAULT_KEY_LOOKUP_SERVER
102 self.key_info = {}
104 @tasks.async
105 def cook(self, required_digest, recipe, stores, force = False, impl_hint = None):
106 """Follow a Recipe.
107 @param impl_hint: the Implementation this is for (if any) as a hint for the GUI
108 @see: L{download_impl} uses this method when appropriate"""
109 # Maybe we're taking this metaphor too far?
111 # Start downloading all the ingredients.
112 streams = {} # Streams collected from successful downloads
114 # Start a download for each ingredient
115 blockers = []
116 for step in recipe.steps:
117 blocker, stream = self.download_archive(step, force = force, impl_hint = impl_hint)
118 assert stream
119 blockers.append(blocker)
120 streams[step] = stream
122 while blockers:
123 yield blockers
124 tasks.check(blockers)
125 blockers = [b for b in blockers if not b.happened]
127 from zeroinstall.zerostore import unpack
129 # Create an empty directory for the new implementation
130 store = stores.stores[0]
131 tmpdir = store.get_tmp_dir_for(required_digest)
132 try:
133 # Unpack each of the downloaded archives into it in turn
134 for step in recipe.steps:
135 stream = streams[step]
136 stream.seek(0)
137 unpack.unpack_archive_over(step.url, stream, tmpdir, step.extract)
138 # Check that the result is correct and store it in the cache
139 store.check_manifest_and_rename(required_digest, tmpdir)
140 tmpdir = None
141 finally:
142 # If unpacking fails, remove the temporary directory
143 if tmpdir is not None:
144 from zeroinstall import support
145 support.ro_rmtree(tmpdir)
147 def get_feed_mirror(self, url):
148 """Return the URL of a mirror for this feed."""
149 if self.feed_mirror is None:
150 return None
151 import urlparse
152 if urlparse.urlparse(url).hostname == 'localhost':
153 return None
154 return '%s/%s/latest.xml' % (self.feed_mirror, _get_feed_dir(url))
156 @tasks.async
157 def get_packagekit_feed(self, iface_cache, feed_url):
158 """Send a query to PackageKit (if available) for information about this package.
159 On success, the result is added to iface_cache.
161 assert feed_url.startswith('distribution:'), feed_url
162 master_feed = iface_cache.get_feed(feed_url.split(':', 1)[1])
163 if master_feed:
164 fetch = iface_cache.distro.fetch_candidates(master_feed)
165 if fetch:
166 yield fetch
167 tasks.check(fetch)
169 # Force feed to be regenerated with the new information
170 iface_cache.get_feed(feed_url, force = True)
172 def download_and_import_feed(self, feed_url, iface_cache, force = False):
173 """Download the feed, download any required keys, confirm trust if needed and import.
174 @param feed_url: the feed to be downloaded
175 @type feed_url: str
176 @param iface_cache: cache in which to store the feed
177 @type iface_cache: L{iface_cache.IfaceCache}
178 @param force: whether to abort and restart an existing download"""
179 from download import DownloadAborted
181 debug(_("download_and_import_feed %(url)s (force = %(force)d)"), {'url': feed_url, 'force': force})
182 assert not os.path.isabs(feed_url)
184 if feed_url.startswith('distribution:'):
185 return self.get_packagekit_feed(iface_cache, feed_url)
187 primary = self._download_and_import_feed(feed_url, iface_cache, force, use_mirror = False)
189 @tasks.named_async("monitor feed downloads for " + feed_url)
190 def wait_for_downloads(primary):
191 # Download just the upstream feed, unless it takes too long...
192 timeout = tasks.TimeoutBlocker(5, 'Mirror timeout') # 5 seconds
194 yield primary, timeout
195 tasks.check(timeout)
197 try:
198 tasks.check(primary)
199 if primary.happened:
200 return # OK, primary succeeded!
201 # OK, maybe it's just being slow...
202 info("Feed download from %s is taking a long time.", feed_url)
203 primary_ex = None
204 except NoTrustedKeys, ex:
205 raise # Don't bother trying the mirror if we have a trust problem
206 except ReplayAttack, ex:
207 raise # Don't bother trying the mirror if we have a replay attack
208 except DownloadAborted, ex:
209 raise # Don't bother trying the mirror if the user cancelled
210 except SafeException, ex:
211 # Primary failed
212 primary = None
213 primary_ex = ex
214 warn(_("Feed download from %(url)s failed: %(exception)s"), {'url': feed_url, 'exception': ex})
216 # Start downloading from mirror...
217 mirror = self._download_and_import_feed(feed_url, iface_cache, force, use_mirror = True)
219 # Wait until both mirror and primary tasks are complete...
220 while True:
221 blockers = filter(None, [primary, mirror])
222 if not blockers:
223 break
224 yield blockers
226 if primary:
227 try:
228 tasks.check(primary)
229 if primary.happened:
230 primary = None
231 # No point carrying on with the mirror once the primary has succeeded
232 if mirror:
233 info(_("Primary feed download succeeded; aborting mirror download for %s") % feed_url)
234 mirror.dl.abort()
235 except SafeException, ex:
236 primary = None
237 primary_ex = ex
238 info(_("Feed download from %(url)s failed; still trying mirror: %(exception)s"), {'url': feed_url, 'exception': ex})
240 if mirror:
241 try:
242 tasks.check(mirror)
243 if mirror.happened:
244 mirror = None
245 if primary_ex:
246 # We already warned; no need to raise an exception too,
247 # as the mirror download succeeded.
248 primary_ex = None
249 except ReplayAttack, ex:
250 info(_("Version from mirror is older than cached version; ignoring it: %s"), ex)
251 mirror = None
252 primary_ex = None
253 except SafeException, ex:
254 info(_("Mirror download failed: %s"), ex)
255 mirror = None
257 if primary_ex:
258 raise primary_ex
260 return wait_for_downloads(primary)
262 def _download_and_import_feed(self, feed_url, iface_cache, force, use_mirror):
263 """Download and import a feed.
264 @param use_mirror: False to use primary location; True to use mirror."""
265 if use_mirror:
266 url = self.get_feed_mirror(feed_url)
267 if url is None: return None
268 warn(_("Trying mirror server for feed %s") % feed_url)
269 else:
270 url = feed_url
272 dl = self.handler.get_download(url, force = force, hint = feed_url)
273 stream = dl.tempfile
275 @tasks.named_async("fetch_feed " + url)
276 def fetch_feed():
277 yield dl.downloaded
278 tasks.check(dl.downloaded)
280 pending = PendingFeed(feed_url, stream)
282 if use_mirror:
283 # If we got the feed from a mirror, get the key from there too
284 key_mirror = self.feed_mirror + '/keys/'
285 else:
286 key_mirror = None
288 keys_downloaded = tasks.Task(pending.download_keys(self.handler, feed_hint = feed_url, key_mirror = key_mirror), _("download keys for %s") % feed_url)
289 yield keys_downloaded.finished
290 tasks.check(keys_downloaded.finished)
292 if not iface_cache.update_feed_if_trusted(pending.url, pending.sigs, pending.new_xml):
293 blocker = self.handler.confirm_keys(pending, self.fetch_key_info)
294 if blocker:
295 yield blocker
296 tasks.check(blocker)
297 if not iface_cache.update_feed_if_trusted(pending.url, pending.sigs, pending.new_xml):
298 raise NoTrustedKeys(_("No signing keys trusted; not importing"))
300 task = fetch_feed()
301 task.dl = dl
302 return task
304 def fetch_key_info(self, fingerprint):
305 try:
306 return self.key_info[fingerprint]
307 except KeyError:
308 self.key_info[fingerprint] = key_info = KeyInfoFetcher(self.key_info_server, fingerprint)
309 return key_info
311 def download_impl(self, impl, retrieval_method, stores, force = False):
312 """Download an implementation.
313 @param impl: the selected implementation
314 @type impl: L{model.ZeroInstallImplementation}
315 @param retrieval_method: a way of getting the implementation (e.g. an Archive or a Recipe)
316 @type retrieval_method: L{model.RetrievalMethod}
317 @param stores: where to store the downloaded implementation
318 @type stores: L{zerostore.Stores}
319 @param force: whether to abort and restart an existing download
320 @rtype: L{tasks.Blocker}"""
321 assert impl
322 assert retrieval_method
324 if isinstance(retrieval_method, DistributionSource):
325 return retrieval_method.install(self.handler)
327 from zeroinstall.zerostore import manifest
328 best = None
329 for digest in impl.digests:
330 alg_name = digest.split('=', 1)[0]
331 alg = manifest.algorithms.get(alg_name, None)
332 if alg and (best is None or best.rating < alg.rating):
333 best = alg
334 required_digest = digest
336 if best is None:
337 if not impl.digests:
338 raise SafeException(_("No <manifest-digest> given for '%(implementation)s' version %(version)s") %
339 {'implementation': impl.feed.get_name(), 'version': impl.get_version()})
340 raise SafeException(_("Unknown digest algorithms '%(algorithms)s' for '%(implementation)s' version %(version)s") %
341 {'algorithms': impl.digests, 'implementation': impl.feed.get_name(), 'version': impl.get_version()})
343 @tasks.async
344 def download_impl():
345 if isinstance(retrieval_method, DownloadSource):
346 blocker, stream = self.download_archive(retrieval_method, force = force, impl_hint = impl)
347 yield blocker
348 tasks.check(blocker)
350 stream.seek(0)
351 self._add_to_cache(required_digest, stores, retrieval_method, stream)
352 elif isinstance(retrieval_method, Recipe):
353 blocker = self.cook(required_digest, retrieval_method, stores, force, impl_hint = impl)
354 yield blocker
355 tasks.check(blocker)
356 else:
357 raise Exception(_("Unknown download type for '%s'") % retrieval_method)
359 self.handler.impl_added_to_store(impl)
360 return download_impl()
362 def _add_to_cache(self, required_digest, stores, retrieval_method, stream):
363 assert isinstance(retrieval_method, DownloadSource)
364 stores.add_archive_to_cache(required_digest, stream, retrieval_method.url, retrieval_method.extract,
365 type = retrieval_method.type, start_offset = retrieval_method.start_offset or 0)
367 def download_archive(self, download_source, force = False, impl_hint = None):
368 """Fetch an archive. You should normally call L{download_impl}
369 instead, since it handles other kinds of retrieval method too."""
370 from zeroinstall.zerostore import unpack
372 url = download_source.url
373 if not (url.startswith('http:') or url.startswith('https:') or url.startswith('ftp:')):
374 raise SafeException(_("Unknown scheme in download URL '%s'") % url)
376 mime_type = download_source.type
377 if not mime_type:
378 mime_type = unpack.type_from_url(download_source.url)
379 if not mime_type:
380 raise SafeException(_("No 'type' attribute on archive, and I can't guess from the name (%s)") % download_source.url)
381 unpack.check_type_ok(mime_type)
382 dl = self.handler.get_download(download_source.url, force = force, hint = impl_hint)
383 dl.expected_size = download_source.size + (download_source.start_offset or 0)
384 return (dl.downloaded, dl.tempfile)
386 def download_icon(self, interface, force = False, modification_time = None):
387 """Download an icon for this interface and add it to the
388 icon cache. If the interface has no icon or we are offline, do nothing.
389 @return: the task doing the import, or None
390 @rtype: L{tasks.Task}"""
391 debug(_("download_icon %(interface)s (force = %(force)d)"), {'interface': interface, 'force': force})
393 # Find a suitable icon to download
394 for icon in interface.get_metadata(XMLNS_IFACE, 'icon'):
395 type = icon.getAttribute('type')
396 if type != 'image/png':
397 debug(_('Skipping non-PNG icon'))
398 continue
399 source = icon.getAttribute('href')
400 if source:
401 break
402 warn(_('Missing "href" attribute on <icon> in %s'), interface)
403 else:
404 info(_('No PNG icons found in %s'), interface)
405 return
407 try:
408 dl = self.handler.monitored_downloads[source]
409 if dl and force:
410 dl.abort()
411 raise KeyError
412 except KeyError:
413 dl = download.Download(source, hint = interface, modification_time = modification_time)
414 self.handler.monitor_download(dl)
416 @tasks.async
417 def download_and_add_icon():
418 stream = dl.tempfile
419 yield dl.downloaded
420 try:
421 tasks.check(dl.downloaded)
422 if dl.unmodified: return
423 stream.seek(0)
425 import shutil
426 icons_cache = basedir.save_cache_path(config_site, 'interface_icons')
427 icon_file = file(os.path.join(icons_cache, escape(interface.uri)), 'w')
428 shutil.copyfileobj(stream, icon_file)
429 except Exception, ex:
430 self.handler.report_error(ex)
432 return download_and_add_icon()
434 def download_impls(self, implementations, stores):
435 """Download the given implementations, choosing a suitable retrieval method for each.
436 If any of the retrieval methods are DistributionSources and
437 need confirmation, handler.confirm is called to check that the
438 installation should proceed.
440 unsafe_impls = []
442 to_download = []
443 for impl in implementations:
444 debug(_("start_downloading_impls: for %(feed)s get %(implementation)s"), {'feed': impl.feed, 'implementation': impl})
445 source = self.get_best_source(impl)
446 if not source:
447 raise SafeException(_("Implementation %(implementation_id)s of interface %(interface)s"
448 " cannot be downloaded (no download locations given in "
449 "interface!)") % {'implementation_id': impl.id, 'interface': impl.feed.get_name()})
450 to_download.append((impl, source))
452 if isinstance(source, DistributionSource) and source.needs_confirmation:
453 unsafe_impls.append(source.package_id)
455 @tasks.async
456 def download_impls():
457 if unsafe_impls:
458 confirm = self.handler.confirm_install(_('The following components need to be installed using native packages. '
459 'These come from your distribution, and should therefore be trustworthy, but they also '
460 'run with extra privileges. In particular, installing them may run extra services on your '
461 'computer or affect other users. You may be asked to enter a password to confirm. The '
462 'packages are:\n\n') + ('\n'.join('- ' + x for x in unsafe_impls)))
463 yield confirm
464 tasks.check(confirm)
466 blockers = []
468 for impl, source in to_download:
469 blockers.append(self.download_impl(impl, source, stores))
471 # Record the first error log the rest
472 error = []
473 def dl_error(ex, tb = None):
474 if error:
475 self.handler.report_error(ex)
476 else:
477 error.append(ex)
478 while blockers:
479 yield blockers
480 tasks.check(blockers, dl_error)
482 blockers = [b for b in blockers if not b.happened]
483 if error:
484 raise error[0]
486 if not to_download:
487 return None
489 return download_impls()
491 def get_best_source(self, impl):
492 """Return the best download source for this implementation.
493 @rtype: L{model.RetrievalMethod}"""
494 if impl.download_sources:
495 return impl.download_sources[0]
496 return None