2 Downloads feeds, keys, packages and icons.
5 # Copyright (C) 2009, Thomas Leonard
6 # See the README file for details, or visit http://0install.net.
8 from zeroinstall
import _
10 from logging
import info
, debug
, warn
12 from zeroinstall
.support
import tasks
, basedir
13 from zeroinstall
.injector
.namespaces
import XMLNS_IFACE
, config_site
14 from zeroinstall
.injector
.model
import DownloadSource
, Recipe
, SafeException
, escape
15 from zeroinstall
.injector
.iface_cache
import PendingFeed
, ReplayAttack
16 from zeroinstall
.injector
.handler
import NoTrustedKeys
17 from zeroinstall
.injector
import download
19 DEFAULT_KEY_LOOKUP_SERVER
= 'https://keylookup.appspot.com'
21 def _escape_slashes(path
):
22 return path
.replace('/', '%23')
24 def _get_feed_dir(feed
):
25 """The algorithm from 0mirror."""
27 raise SafeException(_("Invalid URL '%s'") % feed
)
28 scheme
, rest
= feed
.split('://', 1)
29 domain
, rest
= rest
.split('/', 1)
30 for x
in [scheme
, domain
, rest
]:
31 if not x
or x
.startswith(','):
32 raise SafeException(_("Invalid URL '%s'") % feed
)
33 return os
.path
.join('feeds', scheme
, domain
, _escape_slashes(rest
))
36 """Fetches information about a GPG key from a key-info server.
37 @see: L{Fetcher.fetch_key_info}
40 >>> kf = KeyInfoFetcher('https://server', fingerprint)
43 if kf.blocker is None: break
47 def __init__(self
, server
, fingerprint
):
48 self
.fingerprint
= fingerprint
52 if server
is None: return
54 self
.status
= _('Fetching key information from %s...') % server
56 dl
= download
.Download(server
+ '/key/' + fingerprint
)
59 from xml
.dom
import minidom
64 tempfile
= dl
.tempfile
67 tasks
.check(dl
.downloaded
)
69 doc
= minidom
.parse(tempfile
)
70 if doc
.documentElement
.localName
!= 'key-lookup':
71 raise SafeException(_('Expected <key-lookup>, not <%s>') % doc
.documentElement
.localName
)
72 self
.info
+= doc
.documentElement
.childNodes
74 doc
= minidom
.parseString('<item vote="bad"/>')
75 root
= doc
.documentElement
76 root
.appendChild(doc
.createTextNode(_('Error getting key information: %s') % ex
))
77 self
.info
.append(root
)
79 self
.blocker
= fetch_key_info()
81 class Fetcher(object):
82 """Downloads and stores various things.
83 @ivar handler: handler to use for user-interaction
84 @type handler: L{handler.Handler}
85 @ivar key_info: caches information about GPG keys
86 @type key_info: {str: L{KeyInfoFetcher}}
87 @ivar key_info_server: the base URL of a key information server
88 @type key_info_server: str
89 @ivar feed_mirror: the base URL of a mirror site for keys and feeds
90 @type feed_mirror: str
92 __slots__
= ['handler', 'feed_mirror', 'key_info_server', 'key_info']
94 def __init__(self
, handler
):
95 self
.handler
= handler
96 self
.feed_mirror
= "http://roscidus.com/0mirror"
97 self
.key_info_server
= DEFAULT_KEY_LOOKUP_SERVER
101 def cook(self
, required_digest
, recipe
, stores
, force
= False, impl_hint
= None):
103 @param impl_hint: the Implementation this is for (if any) as a hint for the GUI
104 @see: L{download_impl} uses this method when appropriate"""
105 # Maybe we're taking this metaphor too far?
107 # Start downloading all the ingredients.
108 downloads
= {} # Downloads that are not yet successful
109 streams
= {} # Streams collected from successful downloads
111 # Start a download for each ingredient
113 for step
in recipe
.steps
:
114 blocker
, stream
= self
.download_archive(step
, force
= force
, impl_hint
= impl_hint
)
116 blockers
.append(blocker
)
117 streams
[step
] = stream
121 tasks
.check(blockers
)
122 blockers
= [b
for b
in blockers
if not b
.happened
]
124 from zeroinstall
.zerostore
import unpack
126 # Create an empty directory for the new implementation
127 store
= stores
.stores
[0]
128 tmpdir
= store
.get_tmp_dir_for(required_digest
)
130 # Unpack each of the downloaded archives into it in turn
131 for step
in recipe
.steps
:
132 stream
= streams
[step
]
134 unpack
.unpack_archive_over(step
.url
, stream
, tmpdir
, step
.extract
)
135 # Check that the result is correct and store it in the cache
136 store
.check_manifest_and_rename(required_digest
, tmpdir
)
139 # If unpacking fails, remove the temporary directory
140 if tmpdir
is not None:
141 from zeroinstall
import support
142 support
.ro_rmtree(tmpdir
)
144 def get_feed_mirror(self
, url
):
145 """Return the URL of a mirror for this feed."""
147 if urlparse
.urlparse(url
).hostname
== 'localhost':
149 return '%s/%s/latest.xml' % (self
.feed_mirror
, _get_feed_dir(url
))
151 def download_and_import_feed(self
, feed_url
, iface_cache
, force
= False):
152 """Download the feed, download any required keys, confirm trust if needed and import.
153 @param feed_url: the feed to be downloaded
155 @param iface_cache: cache in which to store the feed
156 @type iface_cache: L{iface_cache.IfaceCache}
157 @param force: whether to abort and restart an existing download"""
158 from download
import DownloadAborted
160 debug(_("download_and_import_feed %(url)s (force = %(force)d)"), {'url': feed_url
, 'force': force
})
161 assert not feed_url
.startswith('/')
163 primary
= self
._download
_and
_import
_feed
(feed_url
, iface_cache
, force
, use_mirror
= False)
165 @tasks.named_async("monitor feed downloads for " + feed_url
)
166 def wait_for_downloads(primary
):
167 # Download just the upstream feed, unless it takes too long...
168 timeout
= tasks
.TimeoutBlocker(5, 'Mirror timeout') # 5 seconds
170 yield primary
, timeout
176 return # OK, primary succeeded!
177 # OK, maybe it's just being slow...
178 info("Feed download from %s is taking a long time. Trying mirror too...", feed_url
)
180 except NoTrustedKeys
, ex
:
181 raise # Don't bother trying the mirror if we have a trust problem
182 except ReplayAttack
, ex
:
183 raise # Don't bother trying the mirror if we have a replay attack
184 except DownloadAborted
, ex
:
185 raise # Don't bother trying the mirror if the user cancelled
186 except SafeException
, ex
:
190 warn(_("Trying mirror, as feed download from %(url)s failed: %(exception)s"), {'url': feed_url
, 'exception': ex
})
192 # Start downloading from mirror...
193 mirror
= self
._download
_and
_import
_feed
(feed_url
, iface_cache
, force
, use_mirror
= True)
195 # Wait until both mirror and primary tasks are complete...
197 blockers
= filter(None, [primary
, mirror
])
207 # No point carrying on with the mirror once the primary has succeeded
209 info(_("Primary feed download succeeded; aborting mirror download for %s") % feed_url
)
211 except SafeException
, ex
:
214 info(_("Feed download from %(url)s failed; still trying mirror: %(exception)s"), {'url': feed_url
, 'exception': ex
})
222 # We already warned; no need to raise an exception too,
223 # as the mirror download succeeded.
225 except ReplayAttack
, ex
:
226 info(_("Version from mirror is older than cached version; ignoring it: %s"), ex
)
229 except SafeException
, ex
:
230 info(_("Mirror download failed: %s"), ex
)
236 return wait_for_downloads(primary
)
238 def _download_and_import_feed(self
, feed_url
, iface_cache
, force
, use_mirror
):
239 """Download and import a feed.
240 @param use_mirror: False to use primary location; True to use mirror."""
242 url
= self
.get_feed_mirror(feed_url
)
243 if url
is None: return None
247 dl
= self
.handler
.get_download(url
, force
= force
, hint
= feed_url
)
250 @tasks.named_async("fetch_feed " + url
)
253 tasks
.check(dl
.downloaded
)
255 pending
= PendingFeed(feed_url
, stream
)
258 # If we got the feed from a mirror, get the key from there too
259 key_mirror
= self
.feed_mirror
+ '/keys/'
263 keys_downloaded
= tasks
.Task(pending
.download_keys(self
.handler
, feed_hint
= feed_url
, key_mirror
= key_mirror
), _("download keys for %s") % feed_url
)
264 yield keys_downloaded
.finished
265 tasks
.check(keys_downloaded
.finished
)
267 iface
= iface_cache
.get_interface(pending
.url
)
268 if not iface_cache
.update_interface_if_trusted(iface
, pending
.sigs
, pending
.new_xml
):
269 blocker
= self
.handler
.confirm_keys(pending
, self
.fetch_key_info
)
273 if not iface_cache
.update_interface_if_trusted(iface
, pending
.sigs
, pending
.new_xml
):
274 raise NoTrustedKeys(_("No signing keys trusted; not importing"))
280 def fetch_key_info(self
, fingerprint
):
282 return self
.key_info
[fingerprint
]
284 self
.key_info
[fingerprint
] = info
= KeyInfoFetcher(self
.key_info_server
, fingerprint
)
287 def download_impl(self
, impl
, retrieval_method
, stores
, force
= False):
288 """Download an implementation.
289 @param impl: the selected implementation
290 @type impl: L{model.ZeroInstallImplementation}
291 @param retrieval_method: a way of getting the implementation (e.g. an Archive or a Recipe)
292 @type retrieval_method: L{model.RetrievalMethod}
293 @param stores: where to store the downloaded implementation
294 @type stores: L{zerostore.Stores}
295 @param force: whether to abort and restart an existing download
296 @rtype: L{tasks.Blocker}"""
298 assert retrieval_method
300 from zeroinstall
.zerostore
import manifest
301 alg
= impl
.id.split('=', 1)[0]
302 if alg
not in manifest
.algorithms
:
303 raise SafeException(_("Unknown digest algorithm '%(algorithm)s' for '%(implementation)s' version %(version)s") %
304 {'algorithm': alg
, 'implementation': impl
.feed
.get_name(), 'version': impl
.get_version()})
308 if isinstance(retrieval_method
, DownloadSource
):
309 blocker
, stream
= self
.download_archive(retrieval_method
, force
= force
, impl_hint
= impl
)
314 self
._add
_to
_cache
(stores
, retrieval_method
, stream
)
315 elif isinstance(retrieval_method
, Recipe
):
316 blocker
= self
.cook(impl
.id, retrieval_method
, stores
, force
, impl_hint
= impl
)
320 raise Exception(_("Unknown download type for '%s'") % retrieval_method
)
322 self
.handler
.impl_added_to_store(impl
)
323 return download_impl()
325 def _add_to_cache(self
, stores
, retrieval_method
, stream
):
326 assert isinstance(retrieval_method
, DownloadSource
)
327 required_digest
= retrieval_method
.implementation
.id
328 url
= retrieval_method
.url
329 stores
.add_archive_to_cache(required_digest
, stream
, retrieval_method
.url
, retrieval_method
.extract
,
330 type = retrieval_method
.type, start_offset
= retrieval_method
.start_offset
or 0)
332 def download_archive(self
, download_source
, force
= False, impl_hint
= None):
333 """Fetch an archive. You should normally call L{download_impl}
334 instead, since it handles other kinds of retrieval method too."""
335 from zeroinstall
.zerostore
import unpack
337 url
= download_source
.url
338 if not (url
.startswith('http:') or url
.startswith('https:') or url
.startswith('ftp:')):
339 raise SafeException(_("Unknown scheme in download URL '%s'") % url
)
341 mime_type
= download_source
.type
343 mime_type
= unpack
.type_from_url(download_source
.url
)
345 raise SafeException(_("No 'type' attribute on archive, and I can't guess from the name (%s)") % download_source
.url
)
346 unpack
.check_type_ok(mime_type
)
347 dl
= self
.handler
.get_download(download_source
.url
, force
= force
, hint
= impl_hint
)
348 dl
.expected_size
= download_source
.size
+ (download_source
.start_offset
or 0)
349 return (dl
.downloaded
, dl
.tempfile
)
351 def download_icon(self
, interface
, force
= False, modification_time
= None):
352 """Download an icon for this interface and add it to the
353 icon cache. If the interface has no icon or we are offline, do nothing.
354 @return: the task doing the import, or None
355 @rtype: L{tasks.Task}"""
356 debug(_("download_icon %(interface)s (force = %(force)d)"), {'interface': interface
, 'force': force
})
358 # Find a suitable icon to download
359 for icon
in interface
.get_metadata(XMLNS_IFACE
, 'icon'):
360 type = icon
.getAttribute('type')
361 if type != 'image/png':
362 debug(_('Skipping non-PNG icon'))
364 source
= icon
.getAttribute('href')
367 warn(_('Missing "href" attribute on <icon> in %s'), interface
)
369 info(_('No PNG icons found in %s'), interface
)
373 dl
= self
.handler
.monitored_downloads
[source
]
378 dl
= download
.Download(source
, hint
= interface
, modification_time
= modification_time
)
379 self
.handler
.monitor_download(dl
)
382 def download_and_add_icon():
386 tasks
.check(dl
.downloaded
)
387 if dl
.unmodified
: return
391 icons_cache
= basedir
.save_cache_path(config_site
, 'interface_icons')
392 icon_file
= file(os
.path
.join(icons_cache
, escape(interface
.uri
)), 'w')
393 shutil
.copyfileobj(stream
, icon_file
)
394 except Exception, ex
:
395 self
.handler
.report_error(ex
)
397 return download_and_add_icon()
399 def download_impls(self
, implementations
, stores
):
400 """Download the given implementations, choosing a suitable retrieval method for each."""
404 for impl
in implementations
:
405 debug(_("start_downloading_impls: for %(feed)s get %(implementation)s"), {'feed': impl
.feed
, 'implementation': impl
})
406 source
= self
.get_best_source(impl
)
408 raise SafeException(_("Implementation %(implementation_id)s of interface %(interface)s"
409 " cannot be downloaded (no download locations given in "
410 "interface!)") % {'implementation_id': impl
.id, 'interface': impl
.feed
.get_name()})
411 to_download
.append((impl
, source
))
413 for impl
, source
in to_download
:
414 blockers
.append(self
.download_impl(impl
, source
, stores
))
420 def download_impls(blockers
):
421 # Record the first error log the rest
423 def dl_error(ex
, tb
= None):
425 self
.handler
.report_error(ex
)
430 tasks
.check(blockers
, dl_error
)
432 blockers
= [b
for b
in blockers
if not b
.happened
]
436 return download_impls(blockers
)
438 def get_best_source(self
, impl
):
439 """Return the best download source for this implementation.
440 @rtype: L{model.RetrievalMethod}"""
441 if impl
.download_sources
:
442 return impl
.download_sources
[0]