2 Downloads feeds, keys, packages and icons.
5 # Copyright (C) 2009, Thomas Leonard
6 # See the README file for details, or visit http://0install.net.
8 from zeroinstall
import _
10 from logging
import info
, debug
, warn
12 from zeroinstall
.support
import tasks
, basedir
13 from zeroinstall
.injector
.namespaces
import XMLNS_IFACE
, config_site
14 from zeroinstall
.injector
.model
import DownloadSource
, Recipe
, SafeException
, escape
15 from zeroinstall
.injector
.iface_cache
import PendingFeed
, ReplayAttack
16 from zeroinstall
.injector
.handler
import NoTrustedKeys
17 from zeroinstall
.injector
import download
19 DEFAULT_KEY_LOOKUP_SERVER
= 'https://keylookup.appspot.com'
21 def _escape_slashes(path
):
22 return path
.replace('/', '%23')
24 def _get_feed_dir(feed
):
25 """The algorithm from 0mirror."""
27 raise SafeException(_("Invalid URL '%s'") % feed
)
28 scheme
, rest
= feed
.split('://', 1)
29 domain
, rest
= rest
.split('/', 1)
30 for x
in [scheme
, domain
, rest
]:
31 if not x
or x
.startswith(','):
32 raise SafeException(_("Invalid URL '%s'") % feed
)
33 return os
.path
.join('feeds', scheme
, domain
, _escape_slashes(rest
))
36 """Fetches information about a GPG key from a key-info server.
37 See L{Fetcher.fetch_key_info} for details.
42 >>> kf = KeyInfoFetcher('https://server', fingerprint)
45 if kf.blocker is None: break
49 def __init__(self
, server
, fingerprint
):
50 self
.fingerprint
= fingerprint
54 if server
is None: return
56 self
.status
= _('Fetching key information from %s...') % server
58 dl
= download
.Download(server
+ '/key/' + fingerprint
)
61 from xml
.dom
import minidom
66 tempfile
= dl
.tempfile
69 tasks
.check(dl
.downloaded
)
71 doc
= minidom
.parse(tempfile
)
72 if doc
.documentElement
.localName
!= 'key-lookup':
73 raise SafeException(_('Expected <key-lookup>, not <%s>') % doc
.documentElement
.localName
)
74 self
.info
+= doc
.documentElement
.childNodes
76 doc
= minidom
.parseString('<item vote="bad"/>')
77 root
= doc
.documentElement
78 root
.appendChild(doc
.createTextNode(_('Error getting key information: %s') % ex
))
79 self
.info
.append(root
)
81 self
.blocker
= fetch_key_info()
83 class Fetcher(object):
84 """Downloads and stores various things.
85 @ivar handler: handler to use for user-interaction
86 @type handler: L{handler.Handler}
87 @ivar key_info: caches information about GPG keys
88 @type key_info: {str: L{KeyInfoFetcher}}
89 @ivar key_info_server: the base URL of a key information server
90 @type key_info_server: str
91 @ivar feed_mirror: the base URL of a mirror site for keys and feeds
92 @type feed_mirror: str
94 __slots__
= ['handler', 'feed_mirror', 'key_info_server', 'key_info']
96 def __init__(self
, handler
):
97 self
.handler
= handler
98 self
.feed_mirror
= "http://roscidus.com/0mirror"
99 self
.key_info_server
= DEFAULT_KEY_LOOKUP_SERVER
103 def cook(self
, required_digest
, recipe
, stores
, force
= False, impl_hint
= None):
105 @param impl_hint: the Implementation this is for (if any) as a hint for the GUI
106 @see: L{download_impl} uses this method when appropriate"""
107 # Maybe we're taking this metaphor too far?
109 # Start downloading all the ingredients.
110 downloads
= {} # Downloads that are not yet successful
111 streams
= {} # Streams collected from successful downloads
113 # Start a download for each ingredient
115 for step
in recipe
.steps
:
116 blocker
, stream
= self
.download_archive(step
, force
= force
, impl_hint
= impl_hint
)
118 blockers
.append(blocker
)
119 streams
[step
] = stream
123 tasks
.check(blockers
)
124 blockers
= [b
for b
in blockers
if not b
.happened
]
126 from zeroinstall
.zerostore
import unpack
128 # Create an empty directory for the new implementation
129 store
= stores
.stores
[0]
130 tmpdir
= store
.get_tmp_dir_for(required_digest
)
132 # Unpack each of the downloaded archives into it in turn
133 for step
in recipe
.steps
:
134 stream
= streams
[step
]
136 unpack
.unpack_archive_over(step
.url
, stream
, tmpdir
, step
.extract
)
137 # Check that the result is correct and store it in the cache
138 store
.check_manifest_and_rename(required_digest
, tmpdir
)
141 # If unpacking fails, remove the temporary directory
142 if tmpdir
is not None:
143 from zeroinstall
import support
144 support
.ro_rmtree(tmpdir
)
146 def get_feed_mirror(self
, url
):
147 """Return the URL of a mirror for this feed."""
149 if urlparse
.urlparse(url
).hostname
== 'localhost':
151 return '%s/%s/latest.xml' % (self
.feed_mirror
, _get_feed_dir(url
))
153 def download_and_import_feed(self
, feed_url
, iface_cache
, force
= False):
154 """Download the feed, download any required keys, confirm trust if needed and import.
155 @param feed_url: the feed to be downloaded
157 @param iface_cache: cache in which to store the feed
158 @type iface_cache: L{iface_cache.IfaceCache}
159 @param force: whether to abort and restart an existing download"""
160 from download
import DownloadAborted
162 debug(_("download_and_import_feed %(url)s (force = %(force)d)"), {'url': feed_url
, 'force': force
})
163 assert not feed_url
.startswith('/')
165 primary
= self
._download
_and
_import
_feed
(feed_url
, iface_cache
, force
, use_mirror
= False)
167 @tasks.named_async("monitor feed downloads for " + feed_url
)
168 def wait_for_downloads(primary
):
169 # Download just the upstream feed, unless it takes too long...
170 timeout
= tasks
.TimeoutBlocker(5, 'Mirror timeout') # 5 seconds
172 yield primary
, timeout
178 return # OK, primary succeeded!
179 # OK, maybe it's just being slow...
180 info("Feed download from %s is taking a long time. Trying mirror too...", feed_url
)
182 except NoTrustedKeys
, ex
:
183 raise # Don't bother trying the mirror if we have a trust problem
184 except ReplayAttack
, ex
:
185 raise # Don't bother trying the mirror if we have a replay attack
186 except DownloadAborted
, ex
:
187 raise # Don't bother trying the mirror if the user cancelled
188 except SafeException
, ex
:
192 warn(_("Trying mirror, as feed download from %(url)s failed: %(exception)s"), {'url': feed_url
, 'exception': ex
})
194 # Start downloading from mirror...
195 mirror
= self
._download
_and
_import
_feed
(feed_url
, iface_cache
, force
, use_mirror
= True)
197 # Wait until both mirror and primary tasks are complete...
199 blockers
= filter(None, [primary
, mirror
])
209 # No point carrying on with the mirror once the primary has succeeded
211 info(_("Primary feed download succeeded; aborting mirror download for %s") % feed_url
)
213 except SafeException
, ex
:
216 info(_("Feed download from %(url)s failed; still trying mirror: %(exception)s"), {'url': feed_url
, 'exception': ex
})
224 # We already warned; no need to raise an exception too,
225 # as the mirror download succeeded.
227 except ReplayAttack
, ex
:
228 info(_("Version from mirror is older than cached version; ignoring it: %s"), ex
)
231 except SafeException
, ex
:
232 info(_("Mirror download failed: %s"), ex
)
238 return wait_for_downloads(primary
)
240 def _download_and_import_feed(self
, feed_url
, iface_cache
, force
, use_mirror
):
241 """Download and import a feed.
242 @param use_mirror: False to use primary location; True to use mirror."""
244 url
= self
.get_feed_mirror(feed_url
)
245 if url
is None: return None
249 dl
= self
.handler
.get_download(url
, force
= force
, hint
= feed_url
)
252 @tasks.named_async("fetch_feed " + url
)
255 tasks
.check(dl
.downloaded
)
257 pending
= PendingFeed(feed_url
, stream
)
260 # If we got the feed from a mirror, get the key from there too
261 key_mirror
= self
.feed_mirror
+ '/keys/'
265 keys_downloaded
= tasks
.Task(pending
.download_keys(self
.handler
, feed_hint
= feed_url
, key_mirror
= key_mirror
), _("download keys for %s") % feed_url
)
266 yield keys_downloaded
.finished
267 tasks
.check(keys_downloaded
.finished
)
269 iface
= iface_cache
.get_interface(pending
.url
)
270 if not iface_cache
.update_interface_if_trusted(iface
, pending
.sigs
, pending
.new_xml
):
271 blocker
= self
.handler
.confirm_keys(pending
, self
.fetch_key_info
)
275 if not iface_cache
.update_interface_if_trusted(iface
, pending
.sigs
, pending
.new_xml
):
276 raise NoTrustedKeys(_("No signing keys trusted; not importing"))
282 def fetch_key_info(self
, fingerprint
):
284 return self
.key_info
[fingerprint
]
286 self
.key_info
[fingerprint
] = info
= KeyInfoFetcher(self
.key_info_server
, fingerprint
)
289 def download_impl(self
, impl
, retrieval_method
, stores
, force
= False):
290 """Download an implementation.
291 @param impl: the selected implementation
292 @type impl: L{model.ZeroInstallImplementation}
293 @param retrieval_method: a way of getting the implementation (e.g. an Archive or a Recipe)
294 @type retrieval_method: L{model.RetrievalMethod}
295 @param stores: where to store the downloaded implementation
296 @type stores: L{zerostore.Stores}
297 @param force: whether to abort and restart an existing download
298 @rtype: L{tasks.Blocker}"""
300 assert retrieval_method
302 from zeroinstall
.zerostore
import manifest
303 for required_digest
in impl
.digests
:
304 alg
= required_digest
.split('=', 1)[0]
305 if alg
in manifest
.algorithms
:
309 raise SafeException(_("No <manifest-digest> given for '%(implementation)s' version %(version)s") %
310 {'implementation': impl
.feed
.get_name(), 'version': impl
.get_version()})
311 raise SafeException(_("Unknown digest algorithms '%(algorithms)s' for '%(implementation)s' version %(version)s") %
312 {'algorithms': impl
.digests
, 'implementation': impl
.feed
.get_name(), 'version': impl
.get_version()})
316 if isinstance(retrieval_method
, DownloadSource
):
317 blocker
, stream
= self
.download_archive(retrieval_method
, force
= force
, impl_hint
= impl
)
322 self
._add
_to
_cache
(required_digest
, stores
, retrieval_method
, stream
)
323 elif isinstance(retrieval_method
, Recipe
):
324 blocker
= self
.cook(required_digest
, retrieval_method
, stores
, force
, impl_hint
= impl
)
328 raise Exception(_("Unknown download type for '%s'") % retrieval_method
)
330 self
.handler
.impl_added_to_store(impl
)
331 return download_impl()
333 def _add_to_cache(self
, required_digest
, stores
, retrieval_method
, stream
):
334 assert isinstance(retrieval_method
, DownloadSource
)
335 url
= retrieval_method
.url
336 stores
.add_archive_to_cache(required_digest
, stream
, retrieval_method
.url
, retrieval_method
.extract
,
337 type = retrieval_method
.type, start_offset
= retrieval_method
.start_offset
or 0)
339 def download_archive(self
, download_source
, force
= False, impl_hint
= None):
340 """Fetch an archive. You should normally call L{download_impl}
341 instead, since it handles other kinds of retrieval method too."""
342 from zeroinstall
.zerostore
import unpack
344 url
= download_source
.url
345 if not (url
.startswith('http:') or url
.startswith('https:') or url
.startswith('ftp:')):
346 raise SafeException(_("Unknown scheme in download URL '%s'") % url
)
348 mime_type
= download_source
.type
350 mime_type
= unpack
.type_from_url(download_source
.url
)
352 raise SafeException(_("No 'type' attribute on archive, and I can't guess from the name (%s)") % download_source
.url
)
353 unpack
.check_type_ok(mime_type
)
354 dl
= self
.handler
.get_download(download_source
.url
, force
= force
, hint
= impl_hint
)
355 dl
.expected_size
= download_source
.size
+ (download_source
.start_offset
or 0)
356 return (dl
.downloaded
, dl
.tempfile
)
358 def download_icon(self
, interface
, force
= False, modification_time
= None):
359 """Download an icon for this interface and add it to the
360 icon cache. If the interface has no icon or we are offline, do nothing.
361 @return: the task doing the import, or None
362 @rtype: L{tasks.Task}"""
363 debug(_("download_icon %(interface)s (force = %(force)d)"), {'interface': interface
, 'force': force
})
365 # Find a suitable icon to download
366 for icon
in interface
.get_metadata(XMLNS_IFACE
, 'icon'):
367 type = icon
.getAttribute('type')
368 if type != 'image/png':
369 debug(_('Skipping non-PNG icon'))
371 source
= icon
.getAttribute('href')
374 warn(_('Missing "href" attribute on <icon> in %s'), interface
)
376 info(_('No PNG icons found in %s'), interface
)
380 dl
= self
.handler
.monitored_downloads
[source
]
385 dl
= download
.Download(source
, hint
= interface
, modification_time
= modification_time
)
386 self
.handler
.monitor_download(dl
)
389 def download_and_add_icon():
393 tasks
.check(dl
.downloaded
)
394 if dl
.unmodified
: return
398 icons_cache
= basedir
.save_cache_path(config_site
, 'interface_icons')
399 icon_file
= file(os
.path
.join(icons_cache
, escape(interface
.uri
)), 'w')
400 shutil
.copyfileobj(stream
, icon_file
)
401 except Exception, ex
:
402 self
.handler
.report_error(ex
)
404 return download_and_add_icon()
406 def download_impls(self
, implementations
, stores
):
407 """Download the given implementations, choosing a suitable retrieval method for each."""
411 for impl
in implementations
:
412 debug(_("start_downloading_impls: for %(feed)s get %(implementation)s"), {'feed': impl
.feed
, 'implementation': impl
})
413 source
= self
.get_best_source(impl
)
415 raise SafeException(_("Implementation %(implementation_id)s of interface %(interface)s"
416 " cannot be downloaded (no download locations given in "
417 "interface!)") % {'implementation_id': impl
.id, 'interface': impl
.feed
.get_name()})
418 to_download
.append((impl
, source
))
420 for impl
, source
in to_download
:
421 blockers
.append(self
.download_impl(impl
, source
, stores
))
427 def download_impls(blockers
):
428 # Record the first error log the rest
430 def dl_error(ex
, tb
= None):
432 self
.handler
.report_error(ex
)
437 tasks
.check(blockers
, dl_error
)
439 blockers
= [b
for b
in blockers
if not b
.happened
]
443 return download_impls(blockers
)
445 def get_best_source(self
, impl
):
446 """Return the best download source for this implementation.
447 @rtype: L{model.RetrievalMethod}"""
448 if impl
.download_sources
:
449 return impl
.download_sources
[0]