2 Downloads feeds, keys, packages and icons.
5 # Copyright (C) 2009, Thomas Leonard
6 # See the README file for details, or visit http://0install.net.
8 from zeroinstall
import _
10 from logging
import info
, debug
, warn
12 from zeroinstall
.support
import tasks
, basedir
13 from zeroinstall
.injector
.namespaces
import XMLNS_IFACE
, config_site
14 from zeroinstall
.injector
.model
import DownloadSource
, Recipe
, SafeException
, escape
, DistributionSource
15 from zeroinstall
.injector
.iface_cache
import PendingFeed
, ReplayAttack
16 from zeroinstall
.injector
.handler
import NoTrustedKeys
17 from zeroinstall
.injector
import download
19 DEFAULT_FEED_MIRROR
= "http://roscidus.com/0mirror"
20 DEFAULT_KEY_LOOKUP_SERVER
= 'https://keylookup.appspot.com'
22 def _escape_slashes(path
):
23 return path
.replace('/', '%23')
25 def _get_feed_dir(feed
):
26 """The algorithm from 0mirror."""
28 raise SafeException(_("Invalid URL '%s'") % feed
)
29 scheme
, rest
= feed
.split('://', 1)
30 assert '/' in rest
, "Missing / in %s" % feed
31 domain
, rest
= rest
.split('/', 1)
32 for x
in [scheme
, domain
, rest
]:
33 if not x
or x
.startswith(','):
34 raise SafeException(_("Invalid URL '%s'") % feed
)
35 return os
.path
.join('feeds', scheme
, domain
, _escape_slashes(rest
))
38 """Fetches information about a GPG key from a key-info server.
39 See L{Fetcher.fetch_key_info} for details.
44 >>> kf = KeyInfoFetcher(handler, 'https://server', fingerprint)
47 if kf.blocker is None: break
51 def __init__(self
, handler
, server
, fingerprint
):
52 self
.fingerprint
= fingerprint
56 if server
is None: return
58 self
.status
= _('Fetching key information from %s...') % server
60 dl
= handler
.get_download(server
+ '/key/' + fingerprint
)
62 from xml
.dom
import minidom
67 tempfile
= dl
.tempfile
70 tasks
.check(dl
.downloaded
)
72 doc
= minidom
.parse(tempfile
)
73 if doc
.documentElement
.localName
!= 'key-lookup':
74 raise SafeException(_('Expected <key-lookup>, not <%s>') % doc
.documentElement
.localName
)
75 self
.info
+= doc
.documentElement
.childNodes
77 doc
= minidom
.parseString('<item vote="bad"/>')
78 root
= doc
.documentElement
79 root
.appendChild(doc
.createTextNode(_('Error getting key information: %s') % ex
))
80 self
.info
.append(root
)
82 self
.blocker
= fetch_key_info()
84 class Fetcher(object):
85 """Downloads and stores various things.
86 @ivar handler: handler to use for user-interaction
87 @type handler: L{handler.Handler}
88 @ivar key_info: caches information about GPG keys
89 @type key_info: {str: L{KeyInfoFetcher}}
90 @ivar key_info_server: the base URL of a key information server
91 @type key_info_server: str
92 @ivar feed_mirror: the base URL of a mirror site for keys and feeds
93 @type feed_mirror: str | None
95 __slots__
= ['handler', 'feed_mirror', 'key_info_server', 'key_info']
97 def __init__(self
, handler
):
98 self
.handler
= handler
99 self
.feed_mirror
= DEFAULT_FEED_MIRROR
100 self
.key_info_server
= DEFAULT_KEY_LOOKUP_SERVER
104 def cook(self
, required_digest
, recipe
, stores
, force
= False, impl_hint
= None):
106 @param impl_hint: the Implementation this is for (if any) as a hint for the GUI
107 @see: L{download_impl} uses this method when appropriate"""
108 # Maybe we're taking this metaphor too far?
110 # Start downloading all the ingredients.
111 streams
= {} # Streams collected from successful downloads
113 # Start a download for each ingredient
115 for step
in recipe
.steps
:
116 blocker
, stream
= self
.download_archive(step
, force
= force
, impl_hint
= impl_hint
)
118 blockers
.append(blocker
)
119 streams
[step
] = stream
123 tasks
.check(blockers
)
124 blockers
= [b
for b
in blockers
if not b
.happened
]
126 from zeroinstall
.zerostore
import unpack
128 # Create an empty directory for the new implementation
129 store
= stores
.stores
[0]
130 tmpdir
= store
.get_tmp_dir_for(required_digest
)
132 # Unpack each of the downloaded archives into it in turn
133 for step
in recipe
.steps
:
134 stream
= streams
[step
]
136 unpack
.unpack_archive_over(step
.url
, stream
, tmpdir
, step
.extract
)
137 # Check that the result is correct and store it in the cache
138 store
.check_manifest_and_rename(required_digest
, tmpdir
)
141 # If unpacking fails, remove the temporary directory
142 if tmpdir
is not None:
143 from zeroinstall
import support
144 support
.ro_rmtree(tmpdir
)
146 def get_feed_mirror(self
, url
):
147 """Return the URL of a mirror for this feed."""
148 if self
.feed_mirror
is None:
151 if urlparse
.urlparse(url
).hostname
== 'localhost':
153 return '%s/%s/latest.xml' % (self
.feed_mirror
, _get_feed_dir(url
))
156 def get_packagekit_feed(self
, iface_cache
, feed_url
):
157 """Send a query to PackageKit (if available) for information about this package.
158 On success, the result is added to iface_cache.
160 assert feed_url
.startswith('distribution:'), feed_url
161 master_feed
= iface_cache
.get_feed(feed_url
.split(':', 1)[1])
163 fetch
= iface_cache
.distro
.fetch_candidates(master_feed
)
168 # Force feed to be regenerated with the new information
169 iface_cache
.get_feed(feed_url
, force
= True)
171 def download_and_import_feed(self
, feed_url
, iface_cache
, force
= False):
172 """Download the feed, download any required keys, confirm trust if needed and import.
173 @param feed_url: the feed to be downloaded
175 @param iface_cache: cache in which to store the feed
176 @type iface_cache: L{iface_cache.IfaceCache}
177 @param force: whether to abort and restart an existing download"""
178 from download
import DownloadAborted
180 debug(_("download_and_import_feed %(url)s (force = %(force)d)"), {'url': feed_url
, 'force': force
})
181 assert not os
.path
.isabs(feed_url
)
183 if feed_url
.startswith('distribution:'):
184 return self
.get_packagekit_feed(iface_cache
, feed_url
)
186 primary
= self
._download
_and
_import
_feed
(feed_url
, iface_cache
, force
, use_mirror
= False)
188 @tasks.named_async("monitor feed downloads for " + feed_url
)
189 def wait_for_downloads(primary
):
190 # Download just the upstream feed, unless it takes too long...
191 timeout
= tasks
.TimeoutBlocker(5, 'Mirror timeout') # 5 seconds
193 yield primary
, timeout
199 return # OK, primary succeeded!
200 # OK, maybe it's just being slow...
201 info("Feed download from %s is taking a long time.", feed_url
)
203 except NoTrustedKeys
, ex
:
204 raise # Don't bother trying the mirror if we have a trust problem
205 except ReplayAttack
, ex
:
206 raise # Don't bother trying the mirror if we have a replay attack
207 except DownloadAborted
, ex
:
208 raise # Don't bother trying the mirror if the user cancelled
209 except SafeException
, ex
:
213 warn(_("Feed download from %(url)s failed: %(exception)s"), {'url': feed_url
, 'exception': ex
})
215 # Start downloading from mirror...
216 mirror
= self
._download
_and
_import
_feed
(feed_url
, iface_cache
, force
, use_mirror
= True)
218 # Wait until both mirror and primary tasks are complete...
220 blockers
= filter(None, [primary
, mirror
])
230 # No point carrying on with the mirror once the primary has succeeded
232 info(_("Primary feed download succeeded; aborting mirror download for %s") % feed_url
)
234 except SafeException
, ex
:
237 info(_("Feed download from %(url)s failed; still trying mirror: %(exception)s"), {'url': feed_url
, 'exception': ex
})
245 # We already warned; no need to raise an exception too,
246 # as the mirror download succeeded.
248 except ReplayAttack
, ex
:
249 info(_("Version from mirror is older than cached version; ignoring it: %s"), ex
)
252 except SafeException
, ex
:
253 info(_("Mirror download failed: %s"), ex
)
259 return wait_for_downloads(primary
)
261 def _download_and_import_feed(self
, feed_url
, iface_cache
, force
, use_mirror
):
262 """Download and import a feed.
263 @param use_mirror: False to use primary location; True to use mirror."""
265 url
= self
.get_feed_mirror(feed_url
)
266 if url
is None: return None
267 warn(_("Trying mirror server for feed %s") % feed_url
)
271 dl
= self
.handler
.get_download(url
, force
= force
, hint
= feed_url
)
274 @tasks.named_async("fetch_feed " + url
)
277 tasks
.check(dl
.downloaded
)
279 pending
= PendingFeed(feed_url
, stream
)
282 # If we got the feed from a mirror, get the key from there too
283 key_mirror
= self
.feed_mirror
+ '/keys/'
287 keys_downloaded
= tasks
.Task(pending
.download_keys(self
.handler
, feed_hint
= feed_url
, key_mirror
= key_mirror
), _("download keys for %s") % feed_url
)
288 yield keys_downloaded
.finished
289 tasks
.check(keys_downloaded
.finished
)
291 if not iface_cache
.update_feed_if_trusted(pending
.url
, pending
.sigs
, pending
.new_xml
):
292 blocker
= self
.handler
.confirm_keys(pending
, self
.fetch_key_info
)
296 if not iface_cache
.update_feed_if_trusted(pending
.url
, pending
.sigs
, pending
.new_xml
):
297 raise NoTrustedKeys(_("No signing keys trusted; not importing"))
303 def fetch_key_info(self
, fingerprint
):
305 return self
.key_info
[fingerprint
]
307 self
.key_info
[fingerprint
] = key_info
= KeyInfoFetcher(self
.handler
,
308 self
.key_info_server
, fingerprint
)
311 def download_impl(self
, impl
, retrieval_method
, stores
, force
= False):
312 """Download an implementation.
313 @param impl: the selected implementation
314 @type impl: L{model.ZeroInstallImplementation}
315 @param retrieval_method: a way of getting the implementation (e.g. an Archive or a Recipe)
316 @type retrieval_method: L{model.RetrievalMethod}
317 @param stores: where to store the downloaded implementation
318 @type stores: L{zerostore.Stores}
319 @param force: whether to abort and restart an existing download
320 @rtype: L{tasks.Blocker}"""
322 assert retrieval_method
324 if isinstance(retrieval_method
, DistributionSource
):
325 return retrieval_method
.install(self
.handler
)
327 from zeroinstall
.zerostore
import manifest
329 for digest
in impl
.digests
:
330 alg_name
= digest
.split('=', 1)[0]
331 alg
= manifest
.algorithms
.get(alg_name
, None)
332 if alg
and (best
is None or best
.rating
< alg
.rating
):
334 required_digest
= digest
338 raise SafeException(_("No <manifest-digest> given for '%(implementation)s' version %(version)s") %
339 {'implementation': impl
.feed
.get_name(), 'version': impl
.get_version()})
340 raise SafeException(_("Unknown digest algorithms '%(algorithms)s' for '%(implementation)s' version %(version)s") %
341 {'algorithms': impl
.digests
, 'implementation': impl
.feed
.get_name(), 'version': impl
.get_version()})
345 if isinstance(retrieval_method
, DownloadSource
):
346 blocker
, stream
= self
.download_archive(retrieval_method
, force
= force
, impl_hint
= impl
)
351 self
._add
_to
_cache
(required_digest
, stores
, retrieval_method
, stream
)
352 elif isinstance(retrieval_method
, Recipe
):
353 blocker
= self
.cook(required_digest
, retrieval_method
, stores
, force
, impl_hint
= impl
)
357 raise Exception(_("Unknown download type for '%s'") % retrieval_method
)
359 self
.handler
.impl_added_to_store(impl
)
360 return download_impl()
362 def _add_to_cache(self
, required_digest
, stores
, retrieval_method
, stream
):
363 assert isinstance(retrieval_method
, DownloadSource
)
364 stores
.add_archive_to_cache(required_digest
, stream
, retrieval_method
.url
, retrieval_method
.extract
,
365 type = retrieval_method
.type, start_offset
= retrieval_method
.start_offset
or 0)
367 def download_archive(self
, download_source
, force
= False, impl_hint
= None):
368 """Fetch an archive. You should normally call L{download_impl}
369 instead, since it handles other kinds of retrieval method too."""
370 from zeroinstall
.zerostore
import unpack
372 url
= download_source
.url
373 if not (url
.startswith('http:') or url
.startswith('https:') or url
.startswith('ftp:')):
374 raise SafeException(_("Unknown scheme in download URL '%s'") % url
)
376 mime_type
= download_source
.type
378 mime_type
= unpack
.type_from_url(download_source
.url
)
380 raise SafeException(_("No 'type' attribute on archive, and I can't guess from the name (%s)") % download_source
.url
)
381 unpack
.check_type_ok(mime_type
)
382 dl
= self
.handler
.get_download(download_source
.url
, force
= force
, hint
= impl_hint
)
383 dl
.expected_size
= download_source
.size
+ (download_source
.start_offset
or 0)
384 return (dl
.downloaded
, dl
.tempfile
)
386 def download_icon(self
, interface
, force
= False, modification_time
= None):
387 """Download an icon for this interface and add it to the
388 icon cache. If the interface has no icon or we are offline, do nothing.
389 @return: the task doing the import, or None
390 @rtype: L{tasks.Task}"""
391 debug(_("download_icon %(interface)s (force = %(force)d)"), {'interface': interface
, 'force': force
})
393 # Find a suitable icon to download
394 for icon
in interface
.get_metadata(XMLNS_IFACE
, 'icon'):
395 type = icon
.getAttribute('type')
396 if type != 'image/png':
397 debug(_('Skipping non-PNG icon'))
399 source
= icon
.getAttribute('href')
402 warn(_('Missing "href" attribute on <icon> in %s'), interface
)
404 info(_('No PNG icons found in %s'), interface
)
408 dl
= self
.handler
.monitored_downloads
[source
]
413 dl
= download
.Download(source
, hint
= interface
, modification_time
= modification_time
)
414 self
.handler
.monitor_download(dl
)
417 def download_and_add_icon():
421 tasks
.check(dl
.downloaded
)
422 if dl
.unmodified
: return
426 icons_cache
= basedir
.save_cache_path(config_site
, 'interface_icons')
427 icon_file
= file(os
.path
.join(icons_cache
, escape(interface
.uri
)), 'w')
428 shutil
.copyfileobj(stream
, icon_file
)
429 except Exception, ex
:
430 self
.handler
.report_error(ex
)
432 return download_and_add_icon()
434 def download_impls(self
, implementations
, stores
):
435 """Download the given implementations, choosing a suitable retrieval method for each.
436 If any of the retrieval methods are DistributionSources and
437 need confirmation, handler.confirm is called to check that the
438 installation should proceed.
443 for impl
in implementations
:
444 debug(_("start_downloading_impls: for %(feed)s get %(implementation)s"), {'feed': impl
.feed
, 'implementation': impl
})
445 source
= self
.get_best_source(impl
)
447 raise SafeException(_("Implementation %(implementation_id)s of interface %(interface)s"
448 " cannot be downloaded (no download locations given in "
449 "interface!)") % {'implementation_id': impl
.id, 'interface': impl
.feed
.get_name()})
450 to_download
.append((impl
, source
))
452 if isinstance(source
, DistributionSource
) and source
.needs_confirmation
:
453 unsafe_impls
.append(source
.package_id
)
456 def download_impls():
458 confirm
= self
.handler
.confirm_install(_('The following components need to be installed using native packages. '
459 'These come from your distribution, and should therefore be trustworthy, but they also '
460 'run with extra privileges. In particular, installing them may run extra services on your '
461 'computer or affect other users. You may be asked to enter a password to confirm. The '
462 'packages are:\n\n') + ('\n'.join('- ' + x
for x
in unsafe_impls
)))
468 for impl
, source
in to_download
:
469 blockers
.append(self
.download_impl(impl
, source
, stores
))
471 # Record the first error log the rest
473 def dl_error(ex
, tb
= None):
475 self
.handler
.report_error(ex
)
480 tasks
.check(blockers
, dl_error
)
482 blockers
= [b
for b
in blockers
if not b
.happened
]
489 return download_impls()
491 def get_best_source(self
, impl
):
492 """Return the best download source for this implementation.
493 @rtype: L{model.RetrievalMethod}"""
494 if impl
.download_sources
:
495 return impl
.download_sources
[0]