2 Downloads feeds, keys, packages and icons.
5 # Copyright (C) 2009, Thomas Leonard
6 # See the README file for details, or visit http://0install.net.
8 from zeroinstall
import _
10 from logging
import info
, debug
, warn
12 from zeroinstall
.support
import tasks
, basedir
13 from zeroinstall
.injector
.namespaces
import XMLNS_IFACE
, config_site
14 from zeroinstall
.injector
.model
import DownloadSource
, Recipe
, SafeException
, escape
, DistributionSource
15 from zeroinstall
.injector
.iface_cache
import PendingFeed
, ReplayAttack
16 from zeroinstall
.injector
.handler
import NoTrustedKeys
17 from zeroinstall
.injector
import download
19 DEFAULT_FEED_MIRROR
= "http://roscidus.com/0mirror"
20 DEFAULT_KEY_LOOKUP_SERVER
= 'https://keylookup.appspot.com'
22 def _escape_slashes(path
):
23 return path
.replace('/', '%23')
25 def _get_feed_dir(feed
):
26 """The algorithm from 0mirror."""
28 raise SafeException(_("Invalid URL '%s'") % feed
)
29 scheme
, rest
= feed
.split('://', 1)
30 domain
, rest
= rest
.split('/', 1)
31 for x
in [scheme
, domain
, rest
]:
32 if not x
or x
.startswith(','):
33 raise SafeException(_("Invalid URL '%s'") % feed
)
34 return os
.path
.join('feeds', scheme
, domain
, _escape_slashes(rest
))
37 """Fetches information about a GPG key from a key-info server.
38 See L{Fetcher.fetch_key_info} for details.
43 >>> kf = KeyInfoFetcher('https://server', fingerprint)
46 if kf.blocker is None: break
50 def __init__(self
, server
, fingerprint
):
51 self
.fingerprint
= fingerprint
55 if server
is None: return
57 self
.status
= _('Fetching key information from %s...') % server
59 dl
= download
.Download(server
+ '/key/' + fingerprint
)
62 from xml
.dom
import minidom
67 tempfile
= dl
.tempfile
70 tasks
.check(dl
.downloaded
)
72 doc
= minidom
.parse(tempfile
)
73 if doc
.documentElement
.localName
!= 'key-lookup':
74 raise SafeException(_('Expected <key-lookup>, not <%s>') % doc
.documentElement
.localName
)
75 self
.info
+= doc
.documentElement
.childNodes
77 doc
= minidom
.parseString('<item vote="bad"/>')
78 root
= doc
.documentElement
79 root
.appendChild(doc
.createTextNode(_('Error getting key information: %s') % ex
))
80 self
.info
.append(root
)
82 self
.blocker
= fetch_key_info()
84 class Fetcher(object):
85 """Downloads and stores various things.
86 @ivar handler: handler to use for user-interaction
87 @type handler: L{handler.Handler}
88 @ivar key_info: caches information about GPG keys
89 @type key_info: {str: L{KeyInfoFetcher}}
90 @ivar key_info_server: the base URL of a key information server
91 @type key_info_server: str
92 @ivar feed_mirror: the base URL of a mirror site for keys and feeds
93 @type feed_mirror: str | None
95 __slots__
= ['handler', 'feed_mirror', 'key_info_server', 'key_info']
97 def __init__(self
, handler
):
98 self
.handler
= handler
99 self
.feed_mirror
= DEFAULT_FEED_MIRROR
100 self
.key_info_server
= DEFAULT_KEY_LOOKUP_SERVER
104 def cook(self
, required_digest
, recipe
, stores
, force
= False, impl_hint
= None):
106 @param impl_hint: the Implementation this is for (if any) as a hint for the GUI
107 @see: L{download_impl} uses this method when appropriate"""
108 # Maybe we're taking this metaphor too far?
110 # Start downloading all the ingredients.
111 downloads
= {} # Downloads that are not yet successful
112 streams
= {} # Streams collected from successful downloads
114 # Start a download for each ingredient
116 for step
in recipe
.steps
:
117 blocker
, stream
= self
.download_archive(step
, force
= force
, impl_hint
= impl_hint
)
119 blockers
.append(blocker
)
120 streams
[step
] = stream
124 tasks
.check(blockers
)
125 blockers
= [b
for b
in blockers
if not b
.happened
]
127 from zeroinstall
.zerostore
import unpack
129 # Create an empty directory for the new implementation
130 store
= stores
.stores
[0]
131 tmpdir
= store
.get_tmp_dir_for(required_digest
)
133 # Unpack each of the downloaded archives into it in turn
134 for step
in recipe
.steps
:
135 stream
= streams
[step
]
137 unpack
.unpack_archive_over(step
.url
, stream
, tmpdir
, step
.extract
)
138 # Check that the result is correct and store it in the cache
139 store
.check_manifest_and_rename(required_digest
, tmpdir
)
142 # If unpacking fails, remove the temporary directory
143 if tmpdir
is not None:
144 from zeroinstall
import support
145 support
.ro_rmtree(tmpdir
)
147 def get_feed_mirror(self
, url
):
148 """Return the URL of a mirror for this feed."""
149 if self
.feed_mirror
is None:
152 if urlparse
.urlparse(url
).hostname
== 'localhost':
154 return '%s/%s/latest.xml' % (self
.feed_mirror
, _get_feed_dir(url
))
156 def download_and_import_feed(self
, feed_url
, iface_cache
, force
= False):
157 """Download the feed, download any required keys, confirm trust if needed and import.
158 @param feed_url: the feed to be downloaded
160 @param iface_cache: cache in which to store the feed
161 @type iface_cache: L{iface_cache.IfaceCache}
162 @param force: whether to abort and restart an existing download"""
163 from download
import DownloadAborted
165 debug(_("download_and_import_feed %(url)s (force = %(force)d)"), {'url': feed_url
, 'force': force
})
166 assert not feed_url
.startswith('/')
168 primary
= self
._download
_and
_import
_feed
(feed_url
, iface_cache
, force
, use_mirror
= False)
170 @tasks.named_async("monitor feed downloads for " + feed_url
)
171 def wait_for_downloads(primary
):
172 # Download just the upstream feed, unless it takes too long...
173 timeout
= tasks
.TimeoutBlocker(5, 'Mirror timeout') # 5 seconds
175 yield primary
, timeout
181 return # OK, primary succeeded!
182 # OK, maybe it's just being slow...
183 info("Feed download from %s is taking a long time.", feed_url
)
185 except NoTrustedKeys
, ex
:
186 raise # Don't bother trying the mirror if we have a trust problem
187 except ReplayAttack
, ex
:
188 raise # Don't bother trying the mirror if we have a replay attack
189 except DownloadAborted
, ex
:
190 raise # Don't bother trying the mirror if the user cancelled
191 except SafeException
, ex
:
195 warn(_("Feed download from %(url)s failed: %(exception)s"), {'url': feed_url
, 'exception': ex
})
197 # Start downloading from mirror...
198 mirror
= self
._download
_and
_import
_feed
(feed_url
, iface_cache
, force
, use_mirror
= True)
200 # Wait until both mirror and primary tasks are complete...
202 blockers
= filter(None, [primary
, mirror
])
212 # No point carrying on with the mirror once the primary has succeeded
214 info(_("Primary feed download succeeded; aborting mirror download for %s") % feed_url
)
216 except SafeException
, ex
:
219 info(_("Feed download from %(url)s failed; still trying mirror: %(exception)s"), {'url': feed_url
, 'exception': ex
})
227 # We already warned; no need to raise an exception too,
228 # as the mirror download succeeded.
230 except ReplayAttack
, ex
:
231 info(_("Version from mirror is older than cached version; ignoring it: %s"), ex
)
234 except SafeException
, ex
:
235 info(_("Mirror download failed: %s"), ex
)
241 return wait_for_downloads(primary
)
243 def _download_and_import_feed(self
, feed_url
, iface_cache
, force
, use_mirror
):
244 """Download and import a feed.
245 @param use_mirror: False to use primary location; True to use mirror."""
247 url
= self
.get_feed_mirror(feed_url
)
248 if url
is None: return None
249 warn(_("Trying mirror server for feed %s") % feed_url
)
253 dl
= self
.handler
.get_download(url
, force
= force
, hint
= feed_url
)
256 @tasks.named_async("fetch_feed " + url
)
259 tasks
.check(dl
.downloaded
)
261 pending
= PendingFeed(feed_url
, stream
)
264 # If we got the feed from a mirror, get the key from there too
265 key_mirror
= self
.feed_mirror
+ '/keys/'
269 keys_downloaded
= tasks
.Task(pending
.download_keys(self
.handler
, feed_hint
= feed_url
, key_mirror
= key_mirror
), _("download keys for %s") % feed_url
)
270 yield keys_downloaded
.finished
271 tasks
.check(keys_downloaded
.finished
)
273 if not iface_cache
.update_feed_if_trusted(pending
.url
, pending
.sigs
, pending
.new_xml
):
274 blocker
= self
.handler
.confirm_keys(pending
, self
.fetch_key_info
)
278 if not iface_cache
.update_feed_if_trusted(pending
.url
, pending
.sigs
, pending
.new_xml
):
279 raise NoTrustedKeys(_("No signing keys trusted; not importing"))
285 def fetch_key_info(self
, fingerprint
):
287 return self
.key_info
[fingerprint
]
289 self
.key_info
[fingerprint
] = info
= KeyInfoFetcher(self
.key_info_server
, fingerprint
)
292 def download_impl(self
, impl
, retrieval_method
, stores
, force
= False):
293 """Download an implementation.
294 @param impl: the selected implementation
295 @type impl: L{model.ZeroInstallImplementation}
296 @param retrieval_method: a way of getting the implementation (e.g. an Archive or a Recipe)
297 @type retrieval_method: L{model.RetrievalMethod}
298 @param stores: where to store the downloaded implementation
299 @type stores: L{zerostore.Stores}
300 @param force: whether to abort and restart an existing download
301 @rtype: L{tasks.Blocker}"""
303 assert retrieval_method
305 if isinstance(retrieval_method
, DistributionSource
):
306 raise SafeException(_("This program depends on '%s', which is a package that is available through your distribution. "
307 "Please install it manually using your distribution's tools and try again.") % retrieval_method
.package_id
)
309 from zeroinstall
.zerostore
import manifest
311 for digest
in impl
.digests
:
312 alg_name
= digest
.split('=', 1)[0]
313 alg
= manifest
.algorithms
.get(alg_name
, None)
314 if alg
and (best
is None or best
.rating
< alg
.rating
):
316 required_digest
= digest
320 raise SafeException(_("No <manifest-digest> given for '%(implementation)s' version %(version)s") %
321 {'implementation': impl
.feed
.get_name(), 'version': impl
.get_version()})
322 raise SafeException(_("Unknown digest algorithms '%(algorithms)s' for '%(implementation)s' version %(version)s") %
323 {'algorithms': impl
.digests
, 'implementation': impl
.feed
.get_name(), 'version': impl
.get_version()})
327 if isinstance(retrieval_method
, DownloadSource
):
328 blocker
, stream
= self
.download_archive(retrieval_method
, force
= force
, impl_hint
= impl
)
333 self
._add
_to
_cache
(required_digest
, stores
, retrieval_method
, stream
)
334 elif isinstance(retrieval_method
, Recipe
):
335 blocker
= self
.cook(required_digest
, retrieval_method
, stores
, force
, impl_hint
= impl
)
339 raise Exception(_("Unknown download type for '%s'") % retrieval_method
)
341 self
.handler
.impl_added_to_store(impl
)
342 return download_impl()
344 def _add_to_cache(self
, required_digest
, stores
, retrieval_method
, stream
):
345 assert isinstance(retrieval_method
, DownloadSource
)
346 url
= retrieval_method
.url
347 stores
.add_archive_to_cache(required_digest
, stream
, retrieval_method
.url
, retrieval_method
.extract
,
348 type = retrieval_method
.type, start_offset
= retrieval_method
.start_offset
or 0)
350 def download_archive(self
, download_source
, force
= False, impl_hint
= None):
351 """Fetch an archive. You should normally call L{download_impl}
352 instead, since it handles other kinds of retrieval method too."""
353 from zeroinstall
.zerostore
import unpack
355 url
= download_source
.url
356 if not (url
.startswith('http:') or url
.startswith('https:') or url
.startswith('ftp:')):
357 raise SafeException(_("Unknown scheme in download URL '%s'") % url
)
359 mime_type
= download_source
.type
361 mime_type
= unpack
.type_from_url(download_source
.url
)
363 raise SafeException(_("No 'type' attribute on archive, and I can't guess from the name (%s)") % download_source
.url
)
364 unpack
.check_type_ok(mime_type
)
365 dl
= self
.handler
.get_download(download_source
.url
, force
= force
, hint
= impl_hint
)
366 dl
.expected_size
= download_source
.size
+ (download_source
.start_offset
or 0)
367 return (dl
.downloaded
, dl
.tempfile
)
369 def download_icon(self
, interface
, force
= False, modification_time
= None):
370 """Download an icon for this interface and add it to the
371 icon cache. If the interface has no icon or we are offline, do nothing.
372 @return: the task doing the import, or None
373 @rtype: L{tasks.Task}"""
374 debug(_("download_icon %(interface)s (force = %(force)d)"), {'interface': interface
, 'force': force
})
376 # Find a suitable icon to download
377 for icon
in interface
.get_metadata(XMLNS_IFACE
, 'icon'):
378 type = icon
.getAttribute('type')
379 if type not in ('image/png', 'image/svg+xml', 'image/svg+xml-compressed'):
380 debug(_('MIME type %(mime_type)s is not permited for feed icons'), {'mime_type': type})
382 source
= icon
.getAttribute('href')
385 warn(_('Missing "href" attribute on <icon> in %s'), interface
)
387 info(_('No PNG icons found in %s'), interface
)
391 dl
= self
.handler
.monitored_downloads
[source
]
396 dl
= download
.Download(source
, hint
= interface
, modification_time
= modification_time
)
397 self
.handler
.monitor_download(dl
)
400 def download_and_add_icon():
404 tasks
.check(dl
.downloaded
)
405 if dl
.unmodified
: return
409 icons_cache
= basedir
.save_cache_path(config_site
, 'interface_icons')
410 icon_file
= file(os
.path
.join(icons_cache
, escape(interface
.uri
)), 'w')
411 shutil
.copyfileobj(stream
, icon_file
)
412 except Exception, ex
:
413 self
.handler
.report_error(ex
)
415 return download_and_add_icon()
417 def download_impls(self
, implementations
, stores
):
418 """Download the given implementations, choosing a suitable retrieval method for each."""
422 for impl
in implementations
:
423 debug(_("start_downloading_impls: for %(feed)s get %(implementation)s"), {'feed': impl
.feed
, 'implementation': impl
})
424 source
= self
.get_best_source(impl
)
426 raise SafeException(_("Implementation %(implementation_id)s of interface %(interface)s"
427 " cannot be downloaded (no download locations given in "
428 "interface!)") % {'implementation_id': impl
.id, 'interface': impl
.feed
.get_name()})
429 to_download
.append((impl
, source
))
431 for impl
, source
in to_download
:
432 blockers
.append(self
.download_impl(impl
, source
, stores
))
438 def download_impls(blockers
):
439 # Record the first error log the rest
441 def dl_error(ex
, tb
= None):
443 self
.handler
.report_error(ex
)
448 tasks
.check(blockers
, dl_error
)
450 blockers
= [b
for b
in blockers
if not b
.happened
]
454 return download_impls(blockers
)
456 def get_best_source(self
, impl
):
457 """Return the best download source for this implementation.
458 @rtype: L{model.RetrievalMethod}"""
459 if impl
.download_sources
:
460 return impl
.download_sources
[0]