Model information about distribution packages as separate feeds
[zeroinstall/zeroinstall-afb.git] / zeroinstall / injector / iface_cache.py
blob4ec192c4bea51823d7bde91006bccaf7b5103651
1 """
2 Manages the feed cache.
4 @var iface_cache: A singleton cache object. You should normally use this rather than
5 creating new cache objects.
7 """
8 # Copyright (C) 2009, Thomas Leonard
9 # See the README file for details, or visit http://0install.net.
11 # Note:
13 # We need to know the modification time of each interface, because we refuse
14 # to update to an older version (this prevents an attack where the attacker
15 # sends back an old version which is correctly signed but has a known bug).
17 # The way we store this is a bit complicated due to backward compatibility:
19 # - GPG-signed interfaces have their signatures removed and a last-modified
20 # attribute is stored containing the date from the signature.
22 # - XML-signed interfaces are stored unmodified with their signatures. The
23 # date is extracted from the signature when needed.
25 # - Older versions used to add the last-modified attribute even to files
26 # with XML signatures - these files therefore have invalid signatures and
27 # we extract from the attribute for these.
29 # Eventually, support for the first and third cases will be removed.
31 import os, sys, time
32 from logging import debug, info, warn
33 from cStringIO import StringIO
35 from zeroinstall import _
36 from zeroinstall.support import basedir
37 from zeroinstall.injector import reader, model
38 from zeroinstall.injector.namespaces import config_site, config_prog
39 from zeroinstall.injector.model import Interface, escape, unescape
40 from zeroinstall import zerostore, SafeException
42 def _pretty_time(t):
43 assert isinstance(t, (int, long)), t
44 return time.strftime('%Y-%m-%d %H:%M:%S UTC', time.localtime(t))
46 class ReplayAttack(SafeException):
47 """Attempt to import a feed that's older than the one in the cache."""
48 pass
50 class PendingFeed(object):
51 """A feed that has been downloaded but not yet added to the interface cache.
52 Feeds remain in this state until the user confirms that they trust at least
53 one of the signatures.
54 @ivar url: URL for the feed
55 @type url: str
56 @ivar signed_data: the untrusted data
57 @type signed_data: stream
58 @ivar sigs: signatures extracted from signed_data
59 @type sigs: [L{gpg.Signature}]
60 @ivar new_xml: the payload of the signed_data, or the whole thing if XML
61 @type new_xml: str
62 @since: 0.25"""
63 __slots__ = ['url', 'signed_data', 'sigs', 'new_xml']
65 def __init__(self, url, signed_data):
66 """Downloaded data is a GPG-signed message.
67 @param url: the URL of the downloaded feed
68 @type url: str
69 @param signed_data: the downloaded data (not yet trusted)
70 @type signed_data: stream
71 @raise SafeException: if the data is not signed, and logs the actual data"""
72 self.url = url
73 self.signed_data = signed_data
74 self.recheck()
76 def download_keys(self, handler, feed_hint = None, key_mirror = None):
77 """Download any required GPG keys not already on our keyring.
78 When all downloads are done (successful or otherwise), add any new keys
79 to the keyring, L{recheck}.
80 @param handler: handler to manage the download
81 @type handler: L{handler.Handler}
82 @param key_mirror: URL of directory containing keys, or None to use feed's directory
83 @type key_mirror: str
84 """
85 downloads = {}
86 blockers = []
87 for x in self.sigs:
88 key_id = x.need_key()
89 if key_id:
90 import urlparse
91 key_url = urlparse.urljoin(key_mirror or self.url, '%s.gpg' % key_id)
92 info(_("Fetching key from %s"), key_url)
93 dl = handler.get_download(key_url, hint = feed_hint)
94 downloads[dl.downloaded] = (dl, dl.tempfile)
95 blockers.append(dl.downloaded)
97 exception = None
98 any_success = False
100 from zeroinstall.support import tasks
102 while blockers:
103 yield blockers
105 old_blockers = blockers
106 blockers = []
108 for b in old_blockers:
109 try:
110 tasks.check(b)
111 if b.happened:
112 dl, stream = downloads[b]
113 stream.seek(0)
114 self._downloaded_key(stream)
115 any_success = True
116 else:
117 blockers.append(b)
118 except Exception:
119 _type, exception, tb = sys.exc_info()
120 warn(_("Failed to import key for '%(url)s': %(exception)s"), {'url': self.url, 'exception': str(exception)})
122 if exception and not any_success:
123 raise exception, None, tb
125 self.recheck()
127 def _downloaded_key(self, stream):
128 import shutil, tempfile
129 from zeroinstall.injector import gpg
131 info(_("Importing key for feed '%s'"), self.url)
133 # Python2.4: can't call fileno() on stream, so save to tmp file instead
134 tmpfile = tempfile.TemporaryFile(prefix = 'injector-dl-data-')
135 try:
136 shutil.copyfileobj(stream, tmpfile)
137 tmpfile.flush()
139 tmpfile.seek(0)
140 gpg.import_key(tmpfile)
141 finally:
142 tmpfile.close()
144 def recheck(self):
145 """Set new_xml and sigs by reading signed_data.
146 You need to call this when previously-missing keys are added to the GPG keyring."""
147 import gpg
148 try:
149 self.signed_data.seek(0)
150 stream, sigs = gpg.check_stream(self.signed_data)
151 assert sigs
153 data = stream.read()
154 if stream is not self.signed_data:
155 stream.close()
157 self.new_xml = data
158 self.sigs = sigs
159 except:
160 self.signed_data.seek(0)
161 info(_("Failed to check GPG signature. Data received was:\n") + repr(self.signed_data.read()))
162 raise
164 class IfaceCache(object):
166 The interface cache stores downloaded and verified interfaces in
167 ~/.cache/0install.net/interfaces (by default).
169 There are methods to query the cache, add to it, check signatures, etc.
171 The cache is updated by L{fetch.Fetcher}.
173 Confusingly, this class is really two caches combined: the in-memory
174 cache of L{model.Interface} objects, and an on-disk cache of L{model.ZeroInstallFeed}s.
175 It will probably be split into two in future.
177 @ivar distro: the native distribution proxy
178 @type distro: L[distro.Distribution}
180 @see: L{iface_cache} - the singleton IfaceCache instance.
183 __slots__ = ['_interfaces', 'stores', '_feeds', '_distro']
185 def __init__(self, distro = None):
186 """@param distro: distribution used to fetch "distribution:" feeds (since 0.49)
187 @type distro: L[distro.Distribution}, or None to use the host distribution
189 self._interfaces = {}
190 self._feeds = {}
192 self.stores = zerostore.Stores()
194 self._distro = distro
196 @property
197 def distro(self):
198 if self._distro is None:
199 from zeroinstall.injector.distro import get_host_distribution
200 self._distro = get_host_distribution()
201 return self._distro
203 def update_interface_if_trusted(self, interface, sigs, xml):
204 import warnings
205 warnings.warn("Use update_feed_if_trusted instead", DeprecationWarning, stacklevel = 2)
206 return self.update_feed_if_trusted(interface.uri, sigs, xml)
208 def update_feed_if_trusted(self, feed_url, sigs, xml):
209 """Update a cached feed (using L{update_feed_from_network})
210 if we trust the signatures.
211 If we don't trust any of the signatures, do nothing.
212 @param feed_url: the feed being updated
213 @type feed_url: str
214 @param sigs: signatures from L{gpg.check_stream}
215 @type sigs: [L{gpg.Signature}]
216 @param xml: the downloaded replacement feed document
217 @type xml: str
218 @return: True if the feed was updated
219 @rtype: bool
220 @since: 0.48
222 import trust
223 updated = self._oldest_trusted(sigs, trust.domain_from_url(feed_url))
224 if updated is None: return False # None are trusted
226 self.update_feed_from_network(feed_url, xml, updated)
227 return True
229 def update_interface_from_network(self, interface, new_xml, modified_time):
230 import warnings
231 warnings.warn("Use update_feed_from_network instead", DeprecationWarning, stacklevel = 2)
232 self.update_feed_from_network(interface.uri, new_xml, modified_time)
234 def update_feed_from_network(self, feed_url, new_xml, modified_time):
235 """Update a cached feed.
236 Called by L{update_feed_if_trusted} if we trust this data.
237 After a successful update, L{writer} is used to update the feed's
238 last_checked time.
239 @param feed_url: the feed being updated
240 @type feed_url: L{model.Interface}
241 @param new_xml: the downloaded replacement feed document
242 @type new_xml: str
243 @param modified_time: the timestamp of the oldest trusted signature
244 (used as an approximation to the feed's modification time)
245 @type modified_time: long
246 @raises ReplayAttack: if modified_time is older than the currently cached time
247 @since: 0.48
249 debug(_("Updating '%(interface)s' from network; modified at %(time)s") %
250 {'interface': feed_url, 'time': _pretty_time(modified_time)})
252 if '\n<!-- Base64 Signature' not in new_xml:
253 # Only do this for old-style feeds without
254 # signatures Otherwise, we can get the time from the
255 # signature, and adding this attribute just makes the
256 # signature invalid.
257 from xml.dom import minidom
258 doc = minidom.parseString(new_xml)
259 doc.documentElement.setAttribute('last-modified', str(modified_time))
260 new_xml = StringIO()
261 doc.writexml(new_xml)
262 new_xml = new_xml.getvalue()
264 self._import_new_feed(feed_url, new_xml, modified_time)
266 feed = self.get_feed(feed_url)
268 import writer
269 feed.last_checked = long(time.time())
270 writer.save_feed(feed)
272 info(_("Updated feed cache entry for %(interface)s (modified %(time)s)"),
273 {'interface': feed.get_name(), 'time': _pretty_time(modified_time)})
275 def _import_new_feed(self, feed_url, new_xml, modified_time):
276 """Write new_xml into the cache.
277 @param feed_url: the URL for the feed being updated
278 @param new_xml: the data to write
279 @param modified_time: when new_xml was modified
280 @raises ReplayAttack: if the new mtime is older than the current one
282 assert modified_time
284 upstream_dir = basedir.save_cache_path(config_site, 'interfaces')
285 cached = os.path.join(upstream_dir, escape(feed_url))
287 old_modified = None
288 if os.path.exists(cached):
289 old_xml = file(cached).read()
290 if old_xml == new_xml:
291 debug(_("No change"))
292 # Update in-memory copy, in case someone else updated the disk copy
293 self.get_feed(feed_url, force = True)
294 return
295 old_modified = int(os.stat(cached).st_mtime)
297 # Do we need to write this temporary file now?
298 stream = file(cached + '.new', 'w')
299 stream.write(new_xml)
300 stream.close()
301 os.utime(cached + '.new', (modified_time, modified_time))
302 new_mtime = reader.check_readable(feed_url, cached + '.new')
303 assert new_mtime == modified_time
305 old_modified = self._get_signature_date(feed_url) or old_modified
307 if old_modified:
308 if new_mtime < old_modified:
309 os.unlink(cached + '.new')
310 raise ReplayAttack(_("New feed's modification time is "
311 "before old version!\nInterface: %(iface)s\nOld time: %(old_time)s\nNew time: %(new_time)s\n"
312 "Refusing update.")
313 % {'iface': feed_url, 'old_time': _pretty_time(old_modified), 'new_time': _pretty_time(new_mtime)})
314 if new_mtime == old_modified:
315 # You used to have to update the modification time manually.
316 # Now it comes from the signature, this check isn't useful
317 # and often causes problems when the stored format changes
318 # (e.g., when we stopped writing last-modified attributes)
319 pass
320 #raise SafeException("Interface has changed, but modification time "
321 # "hasn't! Refusing update.")
322 os.rename(cached + '.new', cached)
323 debug(_("Saved as %s") % cached)
325 self.get_feed(feed_url, force = True)
327 def get_feed(self, url, force = False):
328 """Get a feed from the cache.
329 @param url: the URL of the feed
330 @param force: load the file from disk again
331 @return: the feed, or None if it isn't cached
332 @rtype: L{model.ZeroInstallFeed}"""
333 if not force:
334 feed = self._feeds.get(url, False)
335 if feed != False:
336 return feed
338 if url.startswith('distribution:'):
339 master_feed = self.get_feed(url.split(':', 1)[1])
340 if not master_feed:
341 return None # Can't happen?
342 feed = self.distro.get_feed(master_feed)
343 else:
344 feed = reader.load_feed_from_cache(url)
345 if feed:
346 reader.update_user_feed_overrides(feed)
347 self._feeds[url] = feed
348 return feed
350 def get_interface(self, uri):
351 """Get the interface for uri, creating a new one if required.
352 New interfaces are initialised from the disk cache, but not from
353 the network.
354 @param uri: the URI of the interface to find
355 @rtype: L{model.Interface}
357 if type(uri) == str:
358 uri = unicode(uri)
359 assert isinstance(uri, unicode)
361 if uri in self._interfaces:
362 return self._interfaces[uri]
364 debug(_("Initialising new interface object for %s"), uri)
365 self._interfaces[uri] = Interface(uri)
366 reader.update_from_cache(self._interfaces[uri])
367 return self._interfaces[uri]
369 def list_all_interfaces(self):
370 """List all interfaces in the cache.
371 @rtype: [str]
373 all = set()
374 for d in basedir.load_cache_paths(config_site, 'interfaces'):
375 for leaf in os.listdir(d):
376 if not leaf.startswith('.'):
377 all.add(unescape(leaf))
378 for d in basedir.load_config_paths(config_site, config_prog, 'user_overrides'):
379 for leaf in os.listdir(d):
380 if not leaf.startswith('.'):
381 all.add(unescape(leaf))
382 return list(all) # Why not just return the set?
384 def get_icon_path(self, iface):
385 """Get the path of a cached icon for an interface.
386 @param iface: interface whose icon we want
387 @return: the path of the cached icon, or None if not cached.
388 @rtype: str"""
389 return basedir.load_first_cache(config_site, 'interface_icons',
390 escape(iface.uri))
392 def get_cached_signatures(self, uri):
393 """Verify the cached interface using GPG.
394 Only new-style XML-signed interfaces retain their signatures in the cache.
395 @param uri: the feed to check
396 @type uri: str
397 @return: a list of signatures, or None
398 @rtype: [L{gpg.Signature}] or None
399 @since: 0.25"""
400 import gpg
401 if os.path.isabs(uri):
402 old_iface = uri
403 else:
404 old_iface = basedir.load_first_cache(config_site, 'interfaces', escape(uri))
405 if old_iface is None:
406 return None
407 try:
408 return gpg.check_stream(file(old_iface))[1]
409 except SafeException, ex:
410 debug(_("No signatures (old-style interface): %s") % ex)
411 return None
413 def _get_signature_date(self, uri):
414 """Read the date-stamp from the signature of the cached interface.
415 If the date-stamp is unavailable, returns None."""
416 import trust
417 sigs = self.get_cached_signatures(uri)
418 if sigs:
419 return self._oldest_trusted(sigs, trust.domain_from_url(uri))
421 def _oldest_trusted(self, sigs, domain):
422 """Return the date of the oldest trusted signature in the list, or None if there
423 are no trusted sigs in the list."""
424 trusted = [s.get_timestamp() for s in sigs if s.is_trusted(domain)]
425 if trusted:
426 return min(trusted)
427 return None
429 def mark_as_checking(self, url):
430 """Touch a 'last_check_attempt_timestamp' file for this feed.
431 If url is a local path, nothing happens.
432 This prevents us from repeatedly trying to download a failing feed many
433 times in a short period."""
434 if os.path.isabs(url):
435 return
436 feeds_dir = basedir.save_cache_path(config_site, config_prog, 'last-check-attempt')
437 timestamp_path = os.path.join(feeds_dir, model._pretty_escape(url))
438 fd = os.open(timestamp_path, os.O_WRONLY | os.O_CREAT, 0644)
439 os.close(fd)
440 os.utime(timestamp_path, None) # In case file already exists
442 def get_last_check_attempt(self, url):
443 """Return the time of the most recent update attempt for a feed.
444 @see: L{mark_as_checking}
445 @return: The time, or None if none is recorded
446 @rtype: float | None"""
447 timestamp_path = basedir.load_first_cache(config_site, config_prog, 'last-check-attempt', model._pretty_escape(url))
448 if timestamp_path:
449 return os.stat(timestamp_path).st_mtime
450 return None
452 def get_feed_imports(self, iface):
453 """Get all feeds that add to this interface.
454 This is the feeds explicitly added by the user, feeds added by the distribution,
455 and feeds imported by a <feed> in the main feed (but not recursively, at present).
456 @rtype: L{Feed}
457 @since: 0.48"""
458 main_feed = self.get_feed(iface.uri)
459 if main_feed:
460 return iface.extra_feeds + main_feed.feeds
461 else:
462 return iface.extra_feeds
464 def get_feeds(self, iface):
465 """Get all feeds for this interface. This is a mapping from feed URLs
466 to ZeroInstallFeeds. It includes the interface's main feed, plus the
467 resolution of every feed returned by L{get_feed_imports}. Uncached
468 feeds are indicated by a value of None.
469 @rtype: {str: L{ZeroInstallFeed} | None}
470 @since: 0.48"""
471 main_feed = self.get_feed(iface.uri)
472 results = {iface.uri: main_feed}
473 for imp in iface.extra_feeds:
474 results[imp.uri] = self.get_feed(imp.uri)
475 if main_feed:
476 for imp in main_feed.feeds:
477 results[imp.uri] = self.get_feed(imp.uri)
478 return results
480 def get_implementations(self, iface):
481 """Return all implementations from all of iface's feeds.
482 @rtype: [L{Implementation}]
483 @since: 0.48"""
484 impls = []
485 for feed in self.get_feeds(iface).itervalues():
486 if feed:
487 impls += feed.implementations.values()
488 return impls
490 iface_cache = IfaceCache()