2 Downloads feeds, keys, packages and icons.
5 # Copyright (C) 2009, Thomas Leonard
6 # See the README file for details, or visit http://0install.net.
8 from zeroinstall
import _
10 from logging
import info
, debug
, warn
12 from zeroinstall
.support
import tasks
, basedir
13 from zeroinstall
.injector
.namespaces
import XMLNS_IFACE
, config_site
14 from zeroinstall
.injector
.model
import DownloadSource
, Recipe
, SafeException
, escape
, DistributionSource
15 from zeroinstall
.injector
.iface_cache
import PendingFeed
, ReplayAttack
16 from zeroinstall
.injector
.handler
import NoTrustedKeys
17 from zeroinstall
.injector
import download
19 def _escape_slashes(path
):
20 return path
.replace('/', '%23')
22 def _get_feed_dir(feed
):
23 """The algorithm from 0mirror."""
25 raise SafeException(_("Invalid URL '%s'") % feed
)
26 scheme
, rest
= feed
.split('://', 1)
27 assert '/' in rest
, "Missing / in %s" % feed
28 domain
, rest
= rest
.split('/', 1)
29 for x
in [scheme
, domain
, rest
]:
30 if not x
or x
.startswith(','):
31 raise SafeException(_("Invalid URL '%s'") % feed
)
32 return os
.path
.join('feeds', scheme
, domain
, _escape_slashes(rest
))
35 """Fetches information about a GPG key from a key-info server.
36 See L{Fetcher.fetch_key_info} for details.
41 >>> kf = KeyInfoFetcher(handler, 'https://server', fingerprint)
44 if kf.blocker is None: break
48 def __init__(self
, handler
, server
, fingerprint
):
49 self
.fingerprint
= fingerprint
53 if server
is None: return
55 self
.status
= _('Fetching key information from %s...') % server
57 dl
= handler
.get_download(server
+ '/key/' + fingerprint
)
59 from xml
.dom
import minidom
64 tempfile
= dl
.tempfile
67 tasks
.check(dl
.downloaded
)
69 doc
= minidom
.parse(tempfile
)
70 if doc
.documentElement
.localName
!= 'key-lookup':
71 raise SafeException(_('Expected <key-lookup>, not <%s>') % doc
.documentElement
.localName
)
72 self
.info
+= doc
.documentElement
.childNodes
74 doc
= minidom
.parseString('<item vote="bad"/>')
75 root
= doc
.documentElement
76 root
.appendChild(doc
.createTextNode(_('Error getting key information: %s') % ex
))
77 self
.info
.append(root
)
79 self
.blocker
= fetch_key_info()
81 class Fetcher(object):
82 """Downloads and stores various things.
83 @ivar config: used to get handler, iface_cache and stores
84 @type config: L{config.Config}
85 @ivar key_info: caches information about GPG keys
86 @type key_info: {str: L{KeyInfoFetcher}}
88 __slots__
= ['config', 'key_info']
90 def __init__(self
, config
):
91 assert config
.handler
, "API change!"
97 return self
.config
.handler
100 def cook(self
, required_digest
, recipe
, stores
, force
= False, impl_hint
= None):
102 @param impl_hint: the Implementation this is for (if any) as a hint for the GUI
103 @see: L{download_impl} uses this method when appropriate"""
104 # Maybe we're taking this metaphor too far?
106 # Start downloading all the ingredients.
107 streams
= {} # Streams collected from successful downloads
109 # Start a download for each ingredient
111 for step
in recipe
.steps
:
112 blocker
, stream
= self
.download_archive(step
, force
= force
, impl_hint
= impl_hint
)
114 blockers
.append(blocker
)
115 streams
[step
] = stream
119 tasks
.check(blockers
)
120 blockers
= [b
for b
in blockers
if not b
.happened
]
122 from zeroinstall
.zerostore
import unpack
124 # Create an empty directory for the new implementation
125 store
= stores
.stores
[0]
126 tmpdir
= store
.get_tmp_dir_for(required_digest
)
128 # Unpack each of the downloaded archives into it in turn
129 for step
in recipe
.steps
:
130 stream
= streams
[step
]
132 unpack
.unpack_archive_over(step
.url
, stream
, tmpdir
, step
.extract
)
133 # Check that the result is correct and store it in the cache
134 store
.check_manifest_and_rename(required_digest
, tmpdir
)
137 # If unpacking fails, remove the temporary directory
138 if tmpdir
is not None:
139 from zeroinstall
import support
140 support
.ro_rmtree(tmpdir
)
142 def get_feed_mirror(self
, url
):
143 """Return the URL of a mirror for this feed."""
144 if self
.config
.feed_mirror
is None:
147 if urlparse
.urlparse(url
).hostname
== 'localhost':
149 return '%s/%s/latest.xml' % (self
.config
.feed_mirror
, _get_feed_dir(url
))
152 def get_packagekit_feed(self
, feed_url
):
153 """Send a query to PackageKit (if available) for information about this package.
154 On success, the result is added to iface_cache.
156 assert feed_url
.startswith('distribution:'), feed_url
157 master_feed
= self
.config
.iface_cache
.get_feed(feed_url
.split(':', 1)[1])
159 fetch
= self
.config
.iface_cache
.distro
.fetch_candidates(master_feed
)
164 # Force feed to be regenerated with the new information
165 self
.config
.iface_cache
.get_feed(feed_url
, force
= True)
167 def download_and_import_feed(self
, feed_url
, iface_cache
= None, force
= False):
168 """Download the feed, download any required keys, confirm trust if needed and import.
169 @param feed_url: the feed to be downloaded
171 @param iface_cache: (deprecated)
172 @param force: whether to abort and restart an existing download"""
173 from .download
import DownloadAborted
175 assert iface_cache
is None or iface_cache
is self
.config
.iface_cache
177 debug(_("download_and_import_feed %(url)s (force = %(force)d)"), {'url': feed_url
, 'force': force
})
178 assert not os
.path
.isabs(feed_url
)
180 if feed_url
.startswith('distribution:'):
181 return self
.get_packagekit_feed(feed_url
)
183 primary
= self
._download
_and
_import
_feed
(feed_url
, force
, use_mirror
= False)
185 @tasks.named_async("monitor feed downloads for " + feed_url
)
186 def wait_for_downloads(primary
):
187 # Download just the upstream feed, unless it takes too long...
188 timeout
= tasks
.TimeoutBlocker(5, 'Mirror timeout') # 5 seconds
190 yield primary
, timeout
196 return # OK, primary succeeded!
197 # OK, maybe it's just being slow...
198 info("Feed download from %s is taking a long time.", feed_url
)
200 except NoTrustedKeys
, ex
:
201 raise # Don't bother trying the mirror if we have a trust problem
202 except ReplayAttack
, ex
:
203 raise # Don't bother trying the mirror if we have a replay attack
204 except DownloadAborted
, ex
:
205 raise # Don't bother trying the mirror if the user cancelled
206 except SafeException
, ex
:
210 warn(_("Feed download from %(url)s failed: %(exception)s"), {'url': feed_url
, 'exception': ex
})
212 # Start downloading from mirror...
213 mirror
= self
._download
_and
_import
_feed
(feed_url
, force
, use_mirror
= True)
215 # Wait until both mirror and primary tasks are complete...
217 blockers
= filter(None, [primary
, mirror
])
227 # No point carrying on with the mirror once the primary has succeeded
229 info(_("Primary feed download succeeded; aborting mirror download for %s") % feed_url
)
231 except SafeException
, ex
:
234 info(_("Feed download from %(url)s failed; still trying mirror: %(exception)s"), {'url': feed_url
, 'exception': ex
})
242 # We already warned; no need to raise an exception too,
243 # as the mirror download succeeded.
245 except ReplayAttack
, ex
:
246 info(_("Version from mirror is older than cached version; ignoring it: %s"), ex
)
249 except SafeException
, ex
:
250 info(_("Mirror download failed: %s"), ex
)
256 return wait_for_downloads(primary
)
258 def _download_and_import_feed(self
, feed_url
, force
, use_mirror
):
259 """Download and import a feed.
260 @param use_mirror: False to use primary location; True to use mirror."""
262 url
= self
.get_feed_mirror(feed_url
)
263 if url
is None: return None
264 info(_("Trying mirror server for feed %s") % feed_url
)
268 dl
= self
.handler
.get_download(url
, force
= force
, hint
= feed_url
)
271 @tasks.named_async("fetch_feed " + url
)
274 tasks
.check(dl
.downloaded
)
276 pending
= PendingFeed(feed_url
, stream
)
279 # If we got the feed from a mirror, get the key from there too
280 key_mirror
= self
.config
.feed_mirror
+ '/keys/'
284 keys_downloaded
= tasks
.Task(pending
.download_keys(self
.handler
, feed_hint
= feed_url
, key_mirror
= key_mirror
), _("download keys for %s") % feed_url
)
285 yield keys_downloaded
.finished
286 tasks
.check(keys_downloaded
.finished
)
288 if not self
.config
.iface_cache
.update_feed_if_trusted(pending
.url
, pending
.sigs
, pending
.new_xml
):
289 blocker
= self
.config
.trust_mgr
.confirm_keys(pending
)
293 if not self
.config
.iface_cache
.update_feed_if_trusted(pending
.url
, pending
.sigs
, pending
.new_xml
):
294 raise NoTrustedKeys(_("No signing keys trusted; not importing"))
300 def fetch_key_info(self
, fingerprint
):
302 return self
.key_info
[fingerprint
]
304 self
.key_info
[fingerprint
] = key_info
= KeyInfoFetcher(self
.handler
,
305 self
.config
.key_info_server
, fingerprint
)
308 def download_impl(self
, impl
, retrieval_method
, stores
, force
= False):
309 """Download an implementation.
310 @param impl: the selected implementation
311 @type impl: L{model.ZeroInstallImplementation}
312 @param retrieval_method: a way of getting the implementation (e.g. an Archive or a Recipe)
313 @type retrieval_method: L{model.RetrievalMethod}
314 @param stores: where to store the downloaded implementation
315 @type stores: L{zerostore.Stores}
316 @param force: whether to abort and restart an existing download
317 @rtype: L{tasks.Blocker}"""
319 assert retrieval_method
321 if isinstance(retrieval_method
, DistributionSource
):
322 return retrieval_method
.install(self
.handler
)
324 from zeroinstall
.zerostore
import manifest
326 for digest
in impl
.digests
:
327 alg_name
= digest
.split('=', 1)[0]
328 alg
= manifest
.algorithms
.get(alg_name
, None)
329 if alg
and (best
is None or best
.rating
< alg
.rating
):
331 required_digest
= digest
335 raise SafeException(_("No <manifest-digest> given for '%(implementation)s' version %(version)s") %
336 {'implementation': impl
.feed
.get_name(), 'version': impl
.get_version()})
337 raise SafeException(_("Unknown digest algorithms '%(algorithms)s' for '%(implementation)s' version %(version)s") %
338 {'algorithms': impl
.digests
, 'implementation': impl
.feed
.get_name(), 'version': impl
.get_version()})
342 if isinstance(retrieval_method
, DownloadSource
):
343 blocker
, stream
= self
.download_archive(retrieval_method
, force
= force
, impl_hint
= impl
)
348 self
._add
_to
_cache
(required_digest
, stores
, retrieval_method
, stream
)
349 elif isinstance(retrieval_method
, Recipe
):
350 blocker
= self
.cook(required_digest
, retrieval_method
, stores
, force
, impl_hint
= impl
)
354 raise Exception(_("Unknown download type for '%s'") % retrieval_method
)
356 self
.handler
.impl_added_to_store(impl
)
357 return download_impl()
359 def _add_to_cache(self
, required_digest
, stores
, retrieval_method
, stream
):
360 assert isinstance(retrieval_method
, DownloadSource
)
361 stores
.add_archive_to_cache(required_digest
, stream
, retrieval_method
.url
, retrieval_method
.extract
,
362 type = retrieval_method
.type, start_offset
= retrieval_method
.start_offset
or 0)
364 def download_archive(self
, download_source
, force
= False, impl_hint
= None):
365 """Fetch an archive. You should normally call L{download_impl}
366 instead, since it handles other kinds of retrieval method too."""
367 from zeroinstall
.zerostore
import unpack
369 url
= download_source
.url
370 if not (url
.startswith('http:') or url
.startswith('https:') or url
.startswith('ftp:')):
371 raise SafeException(_("Unknown scheme in download URL '%s'") % url
)
373 mime_type
= download_source
.type
375 mime_type
= unpack
.type_from_url(download_source
.url
)
377 raise SafeException(_("No 'type' attribute on archive, and I can't guess from the name (%s)") % download_source
.url
)
378 unpack
.check_type_ok(mime_type
)
379 dl
= self
.handler
.get_download(download_source
.url
, force
= force
, hint
= impl_hint
)
380 dl
.expected_size
= download_source
.size
+ (download_source
.start_offset
or 0)
381 return (dl
.downloaded
, dl
.tempfile
)
383 def download_icon(self
, interface
, force
= False):
384 """Download an icon for this interface and add it to the
385 icon cache. If the interface has no icon do nothing.
386 @return: the task doing the import, or None
387 @rtype: L{tasks.Task}"""
388 debug("download_icon %(interface)s (force = %(force)d)", {'interface': interface
, 'force': force
})
390 modification_time
= None
391 existing_icon
= self
.config
.iface_cache
.get_icon_path(interface
)
393 file_mtime
= os
.stat(existing_icon
).st_mtime
394 from email
.utils
import formatdate
395 modification_time
= formatdate(timeval
= file_mtime
, localtime
= False, usegmt
= True)
397 # Find a suitable icon to download
398 for icon
in interface
.get_metadata(XMLNS_IFACE
, 'icon'):
399 type = icon
.getAttribute('type')
400 if type != 'image/png':
401 debug(_('Skipping non-PNG icon'))
403 source
= icon
.getAttribute('href')
406 warn(_('Missing "href" attribute on <icon> in %s'), interface
)
408 info(_('No PNG icons found in %s'), interface
)
412 dl
= self
.handler
.monitored_downloads
[source
]
417 dl
= download
.Download(source
, hint
= interface
, modification_time
= modification_time
)
418 self
.handler
.monitor_download(dl
)
421 def download_and_add_icon():
425 tasks
.check(dl
.downloaded
)
426 if dl
.unmodified
: return
430 icons_cache
= basedir
.save_cache_path(config_site
, 'interface_icons')
431 icon_file
= file(os
.path
.join(icons_cache
, escape(interface
.uri
)), 'w')
432 shutil
.copyfileobj(stream
, icon_file
)
433 except Exception, ex
:
434 self
.handler
.report_error(ex
)
436 return download_and_add_icon()
438 def download_impls(self
, implementations
, stores
):
439 """Download the given implementations, choosing a suitable retrieval method for each.
440 If any of the retrieval methods are DistributionSources and
441 need confirmation, handler.confirm is called to check that the
442 installation should proceed.
447 for impl
in implementations
:
448 debug(_("start_downloading_impls: for %(feed)s get %(implementation)s"), {'feed': impl
.feed
, 'implementation': impl
})
449 source
= self
.get_best_source(impl
)
451 raise SafeException(_("Implementation %(implementation_id)s of interface %(interface)s"
452 " cannot be downloaded (no download locations given in "
453 "interface!)") % {'implementation_id': impl
.id, 'interface': impl
.feed
.get_name()})
454 to_download
.append((impl
, source
))
456 if isinstance(source
, DistributionSource
) and source
.needs_confirmation
:
457 unsafe_impls
.append(source
.package_id
)
460 def download_impls():
462 confirm
= self
.handler
.confirm_install(_('The following components need to be installed using native packages. '
463 'These come from your distribution, and should therefore be trustworthy, but they also '
464 'run with extra privileges. In particular, installing them may run extra services on your '
465 'computer or affect other users. You may be asked to enter a password to confirm. The '
466 'packages are:\n\n') + ('\n'.join('- ' + x
for x
in unsafe_impls
)))
472 for impl
, source
in to_download
:
473 blockers
.append(self
.download_impl(impl
, source
, stores
))
475 # Record the first error log the rest
477 def dl_error(ex
, tb
= None):
479 self
.handler
.report_error(ex
)
484 tasks
.check(blockers
, dl_error
)
486 blockers
= [b
for b
in blockers
if not b
.happened
]
493 return download_impls()
495 def get_best_source(self
, impl
):
496 """Return the best download source for this implementation.
497 @rtype: L{model.RetrievalMethod}"""
498 if impl
.download_sources
:
499 return impl
.download_sources
[0]