2 Downloads feeds, keys, packages and icons.
5 # Copyright (C) 2009, Thomas Leonard
6 # See the README file for details, or visit http://0install.net.
8 from zeroinstall
import _
10 from logging
import info
, debug
, warn
12 from zeroinstall
.support
import tasks
, basedir
13 from zeroinstall
.injector
.namespaces
import XMLNS_IFACE
, config_site
14 from zeroinstall
.injector
.model
import DownloadSource
, Recipe
, SafeException
, escape
15 from zeroinstall
.injector
.iface_cache
import PendingFeed
, ReplayAttack
16 from zeroinstall
.injector
.handler
import NoTrustedKeys
17 from zeroinstall
.injector
import download
19 DEFAULT_KEY_LOOKUP_SERVER
= 'https://keylookup.appspot.com'
21 def _escape_slashes(path
):
22 return path
.replace('/', '%23')
24 def _get_feed_dir(feed
):
25 """The algorithm from 0mirror."""
27 raise SafeException(_("Invalid URL '%s'") % feed
)
28 scheme
, rest
= feed
.split('://', 1)
29 domain
, rest
= rest
.split('/', 1)
30 for x
in [scheme
, domain
, rest
]:
31 if not x
or x
.startswith(','):
32 raise SafeException(_("Invalid URL '%s'") % feed
)
33 return os
.path
.join('feeds', scheme
, domain
, _escape_slashes(rest
))
36 """Fetches information about a GPG key from a key-info server.
37 See L{Fetcher.fetch_key_info} for details.
42 >>> kf = KeyInfoFetcher('https://server', fingerprint)
45 if kf.blocker is None: break
49 def __init__(self
, server
, fingerprint
):
50 self
.fingerprint
= fingerprint
54 if server
is None: return
56 self
.status
= _('Fetching key information from %s...') % server
58 dl
= download
.Download(server
+ '/key/' + fingerprint
)
61 from xml
.dom
import minidom
66 tempfile
= dl
.tempfile
69 tasks
.check(dl
.downloaded
)
71 doc
= minidom
.parse(tempfile
)
72 if doc
.documentElement
.localName
!= 'key-lookup':
73 raise SafeException(_('Expected <key-lookup>, not <%s>') % doc
.documentElement
.localName
)
74 self
.info
+= doc
.documentElement
.childNodes
76 doc
= minidom
.parseString('<item vote="bad"/>')
77 root
= doc
.documentElement
78 root
.appendChild(doc
.createTextNode(_('Error getting key information: %s') % ex
))
79 self
.info
.append(root
)
81 self
.blocker
= fetch_key_info()
83 class Fetcher(object):
84 """Downloads and stores various things.
85 @ivar handler: handler to use for user-interaction
86 @type handler: L{handler.Handler}
87 @ivar key_info: caches information about GPG keys
88 @type key_info: {str: L{KeyInfoFetcher}}
89 @ivar key_info_server: the base URL of a key information server
90 @type key_info_server: str
91 @ivar feed_mirror: the base URL of a mirror site for keys and feeds
92 @type feed_mirror: str | None
94 __slots__
= ['handler', 'feed_mirror', 'key_info_server', 'key_info']
96 def __init__(self
, handler
):
97 self
.handler
= handler
98 self
.feed_mirror
= "http://roscidus.com/0mirror"
99 self
.key_info_server
= DEFAULT_KEY_LOOKUP_SERVER
103 def cook(self
, required_digest
, recipe
, stores
, force
= False, impl_hint
= None):
105 @param impl_hint: the Implementation this is for (if any) as a hint for the GUI
106 @see: L{download_impl} uses this method when appropriate"""
107 # Maybe we're taking this metaphor too far?
109 # Start downloading all the ingredients.
110 downloads
= {} # Downloads that are not yet successful
111 streams
= {} # Streams collected from successful downloads
113 # Start a download for each ingredient
115 for step
in recipe
.steps
:
116 blocker
, stream
= self
.download_archive(step
, force
= force
, impl_hint
= impl_hint
)
118 blockers
.append(blocker
)
119 streams
[step
] = stream
123 tasks
.check(blockers
)
124 blockers
= [b
for b
in blockers
if not b
.happened
]
126 from zeroinstall
.zerostore
import unpack
128 # Create an empty directory for the new implementation
129 store
= stores
.stores
[0]
130 tmpdir
= store
.get_tmp_dir_for(required_digest
)
132 # Unpack each of the downloaded archives into it in turn
133 for step
in recipe
.steps
:
134 stream
= streams
[step
]
136 unpack
.unpack_archive_over(step
.url
, stream
, tmpdir
, step
.extract
)
137 # Check that the result is correct and store it in the cache
138 store
.check_manifest_and_rename(required_digest
, tmpdir
)
141 # If unpacking fails, remove the temporary directory
142 if tmpdir
is not None:
143 from zeroinstall
import support
144 support
.ro_rmtree(tmpdir
)
146 def get_feed_mirror(self
, url
):
147 """Return the URL of a mirror for this feed."""
148 if self
.feed_mirror
is None:
151 if urlparse
.urlparse(url
).hostname
== 'localhost':
153 return '%s/%s/latest.xml' % (self
.feed_mirror
, _get_feed_dir(url
))
155 def download_and_import_feed(self
, feed_url
, iface_cache
, force
= False):
156 """Download the feed, download any required keys, confirm trust if needed and import.
157 @param feed_url: the feed to be downloaded
159 @param iface_cache: cache in which to store the feed
160 @type iface_cache: L{iface_cache.IfaceCache}
161 @param force: whether to abort and restart an existing download"""
162 from download
import DownloadAborted
164 debug(_("download_and_import_feed %(url)s (force = %(force)d)"), {'url': feed_url
, 'force': force
})
165 assert not feed_url
.startswith('/')
167 primary
= self
._download
_and
_import
_feed
(feed_url
, iface_cache
, force
, use_mirror
= False)
169 @tasks.named_async("monitor feed downloads for " + feed_url
)
170 def wait_for_downloads(primary
):
171 # Download just the upstream feed, unless it takes too long...
172 timeout
= tasks
.TimeoutBlocker(5, 'Mirror timeout') # 5 seconds
174 yield primary
, timeout
180 return # OK, primary succeeded!
181 # OK, maybe it's just being slow...
182 info("Feed download from %s is taking a long time. Trying mirror too...", feed_url
)
184 except NoTrustedKeys
, ex
:
185 raise # Don't bother trying the mirror if we have a trust problem
186 except ReplayAttack
, ex
:
187 raise # Don't bother trying the mirror if we have a replay attack
188 except DownloadAborted
, ex
:
189 raise # Don't bother trying the mirror if the user cancelled
190 except SafeException
, ex
:
194 warn(_("Trying mirror, as feed download from %(url)s failed: %(exception)s"), {'url': feed_url
, 'exception': ex
})
196 # Start downloading from mirror...
197 mirror
= self
._download
_and
_import
_feed
(feed_url
, iface_cache
, force
, use_mirror
= True)
199 # Wait until both mirror and primary tasks are complete...
201 blockers
= filter(None, [primary
, mirror
])
211 # No point carrying on with the mirror once the primary has succeeded
213 info(_("Primary feed download succeeded; aborting mirror download for %s") % feed_url
)
215 except SafeException
, ex
:
218 info(_("Feed download from %(url)s failed; still trying mirror: %(exception)s"), {'url': feed_url
, 'exception': ex
})
226 # We already warned; no need to raise an exception too,
227 # as the mirror download succeeded.
229 except ReplayAttack
, ex
:
230 info(_("Version from mirror is older than cached version; ignoring it: %s"), ex
)
233 except SafeException
, ex
:
234 info(_("Mirror download failed: %s"), ex
)
240 return wait_for_downloads(primary
)
242 def _download_and_import_feed(self
, feed_url
, iface_cache
, force
, use_mirror
):
243 """Download and import a feed.
244 @param use_mirror: False to use primary location; True to use mirror."""
246 url
= self
.get_feed_mirror(feed_url
)
247 if url
is None: return None
251 dl
= self
.handler
.get_download(url
, force
= force
, hint
= feed_url
)
254 @tasks.named_async("fetch_feed " + url
)
257 tasks
.check(dl
.downloaded
)
259 pending
= PendingFeed(feed_url
, stream
)
262 # If we got the feed from a mirror, get the key from there too
263 key_mirror
= self
.feed_mirror
+ '/keys/'
267 keys_downloaded
= tasks
.Task(pending
.download_keys(self
.handler
, feed_hint
= feed_url
, key_mirror
= key_mirror
), _("download keys for %s") % feed_url
)
268 yield keys_downloaded
.finished
269 tasks
.check(keys_downloaded
.finished
)
271 iface
= iface_cache
.get_interface(pending
.url
)
272 if not iface_cache
.update_interface_if_trusted(iface
, pending
.sigs
, pending
.new_xml
):
273 blocker
= self
.handler
.confirm_keys(pending
, self
.fetch_key_info
)
277 if not iface_cache
.update_interface_if_trusted(iface
, pending
.sigs
, pending
.new_xml
):
278 raise NoTrustedKeys(_("No signing keys trusted; not importing"))
284 def fetch_key_info(self
, fingerprint
):
286 return self
.key_info
[fingerprint
]
288 self
.key_info
[fingerprint
] = info
= KeyInfoFetcher(self
.key_info_server
, fingerprint
)
291 def download_impl(self
, impl
, retrieval_method
, stores
, force
= False):
292 """Download an implementation.
293 @param impl: the selected implementation
294 @type impl: L{model.ZeroInstallImplementation}
295 @param retrieval_method: a way of getting the implementation (e.g. an Archive or a Recipe)
296 @type retrieval_method: L{model.RetrievalMethod}
297 @param stores: where to store the downloaded implementation
298 @type stores: L{zerostore.Stores}
299 @param force: whether to abort and restart an existing download
300 @rtype: L{tasks.Blocker}"""
302 assert retrieval_method
304 from zeroinstall
.zerostore
import manifest
306 for digest
in impl
.digests
:
307 alg_name
= digest
.split('=', 1)[0]
308 alg
= manifest
.algorithms
.get(alg_name
, None)
309 if alg
and (best
is None or best
.rating
< alg
.rating
):
311 required_digest
= digest
315 raise SafeException(_("No <manifest-digest> given for '%(implementation)s' version %(version)s") %
316 {'implementation': impl
.feed
.get_name(), 'version': impl
.get_version()})
317 raise SafeException(_("Unknown digest algorithms '%(algorithms)s' for '%(implementation)s' version %(version)s") %
318 {'algorithms': impl
.digests
, 'implementation': impl
.feed
.get_name(), 'version': impl
.get_version()})
322 if isinstance(retrieval_method
, DownloadSource
):
323 blocker
, stream
= self
.download_archive(retrieval_method
, force
= force
, impl_hint
= impl
)
328 self
._add
_to
_cache
(required_digest
, stores
, retrieval_method
, stream
)
329 elif isinstance(retrieval_method
, Recipe
):
330 blocker
= self
.cook(required_digest
, retrieval_method
, stores
, force
, impl_hint
= impl
)
334 raise Exception(_("Unknown download type for '%s'") % retrieval_method
)
336 self
.handler
.impl_added_to_store(impl
)
337 return download_impl()
339 def _add_to_cache(self
, required_digest
, stores
, retrieval_method
, stream
):
340 assert isinstance(retrieval_method
, DownloadSource
)
341 url
= retrieval_method
.url
342 stores
.add_archive_to_cache(required_digest
, stream
, retrieval_method
.url
, retrieval_method
.extract
,
343 type = retrieval_method
.type, start_offset
= retrieval_method
.start_offset
or 0)
345 def download_archive(self
, download_source
, force
= False, impl_hint
= None):
346 """Fetch an archive. You should normally call L{download_impl}
347 instead, since it handles other kinds of retrieval method too."""
348 from zeroinstall
.zerostore
import unpack
350 url
= download_source
.url
351 if not (url
.startswith('http:') or url
.startswith('https:') or url
.startswith('ftp:')):
352 raise SafeException(_("Unknown scheme in download URL '%s'") % url
)
354 mime_type
= download_source
.type
356 mime_type
= unpack
.type_from_url(download_source
.url
)
358 raise SafeException(_("No 'type' attribute on archive, and I can't guess from the name (%s)") % download_source
.url
)
359 unpack
.check_type_ok(mime_type
)
360 dl
= self
.handler
.get_download(download_source
.url
, force
= force
, hint
= impl_hint
)
361 dl
.expected_size
= download_source
.size
+ (download_source
.start_offset
or 0)
362 return (dl
.downloaded
, dl
.tempfile
)
364 def download_icon(self
, interface
, force
= False, modification_time
= None):
365 """Download an icon for this interface and add it to the
366 icon cache. If the interface has no icon or we are offline, do nothing.
367 @return: the task doing the import, or None
368 @rtype: L{tasks.Task}"""
369 debug(_("download_icon %(interface)s (force = %(force)d)"), {'interface': interface
, 'force': force
})
371 # Find a suitable icon to download
372 for icon
in interface
.get_metadata(XMLNS_IFACE
, 'icon'):
373 type = icon
.getAttribute('type')
374 if type != 'image/png':
375 debug(_('Skipping non-PNG icon'))
377 source
= icon
.getAttribute('href')
380 warn(_('Missing "href" attribute on <icon> in %s'), interface
)
382 info(_('No PNG icons found in %s'), interface
)
386 dl
= self
.handler
.monitored_downloads
[source
]
391 dl
= download
.Download(source
, hint
= interface
, modification_time
= modification_time
)
392 self
.handler
.monitor_download(dl
)
395 def download_and_add_icon():
399 tasks
.check(dl
.downloaded
)
400 if dl
.unmodified
: return
404 icons_cache
= basedir
.save_cache_path(config_site
, 'interface_icons')
405 icon_file
= file(os
.path
.join(icons_cache
, escape(interface
.uri
)), 'w')
406 shutil
.copyfileobj(stream
, icon_file
)
407 except Exception, ex
:
408 self
.handler
.report_error(ex
)
410 return download_and_add_icon()
412 def download_impls(self
, implementations
, stores
):
413 """Download the given implementations, choosing a suitable retrieval method for each."""
417 for impl
in implementations
:
418 debug(_("start_downloading_impls: for %(feed)s get %(implementation)s"), {'feed': impl
.feed
, 'implementation': impl
})
419 source
= self
.get_best_source(impl
)
421 raise SafeException(_("Implementation %(implementation_id)s of interface %(interface)s"
422 " cannot be downloaded (no download locations given in "
423 "interface!)") % {'implementation_id': impl
.id, 'interface': impl
.feed
.get_name()})
424 to_download
.append((impl
, source
))
426 for impl
, source
in to_download
:
427 blockers
.append(self
.download_impl(impl
, source
, stores
))
433 def download_impls(blockers
):
434 # Record the first error log the rest
436 def dl_error(ex
, tb
= None):
438 self
.handler
.report_error(ex
)
443 tasks
.check(blockers
, dl_error
)
445 blockers
= [b
for b
in blockers
if not b
.happened
]
449 return download_impls(blockers
)
451 def get_best_source(self
, impl
):
452 """Return the best download source for this implementation.
453 @rtype: L{model.RetrievalMethod}"""
454 if impl
.download_sources
:
455 return impl
.download_sources
[0]