2 Manages the feed cache.
4 @var iface_cache: A singleton cache object. You should normally use this rather than
5 creating new cache objects.
8 # Copyright (C) 2009, Thomas Leonard
9 # See the README file for details, or visit http://0install.net.
13 # We need to know the modification time of each interface, because we refuse
14 # to update to an older version (this prevents an attack where the attacker
15 # sends back an old version which is correctly signed but has a known bug).
17 # The way we store this is a bit complicated due to backward compatibility:
19 # - GPG-signed interfaces have their signatures removed and a last-modified
20 # attribute is stored containing the date from the signature.
22 # - XML-signed interfaces are stored unmodified with their signatures. The
23 # date is extracted from the signature when needed.
25 # - Older versions used to add the last-modified attribute even to files
26 # with XML signatures - these files therefore have invalid signatures and
27 # we extract from the attribute for these.
29 # Eventually, support for the first and third cases will be removed.
32 from logging
import debug
, info
, warn
33 from cStringIO
import StringIO
35 from zeroinstall
import _
36 from zeroinstall
.support
import basedir
37 from zeroinstall
.injector
import reader
, model
38 from zeroinstall
.injector
.namespaces
import config_site
, config_prog
39 from zeroinstall
.injector
.model
import Interface
, escape
, unescape
40 from zeroinstall
import zerostore
, SafeException
43 assert isinstance(t
, (int, long)), t
44 return time
.strftime('%Y-%m-%d %H:%M:%S UTC', time
.localtime(t
))
46 class ReplayAttack(SafeException
):
47 """Attempt to import a feed that's older than the one in the cache."""
50 class PendingFeed(object):
51 """A feed that has been downloaded but not yet added to the interface cache.
52 Feeds remain in this state until the user confirms that they trust at least
53 one of the signatures.
54 @ivar url: URL for the feed
56 @ivar signed_data: the untrusted data
57 @type signed_data: stream
58 @ivar sigs: signatures extracted from signed_data
59 @type sigs: [L{gpg.Signature}]
60 @ivar new_xml: the payload of the signed_data, or the whole thing if XML
63 __slots__
= ['url', 'signed_data', 'sigs', 'new_xml']
65 def __init__(self
, url
, signed_data
):
66 """Downloaded data is a GPG-signed message.
67 @param url: the URL of the downloaded feed
69 @param signed_data: the downloaded data (not yet trusted)
70 @type signed_data: stream
71 @raise SafeException: if the data is not signed, and logs the actual data"""
73 self
.signed_data
= signed_data
76 def download_keys(self
, handler
, feed_hint
= None, key_mirror
= None):
77 """Download any required GPG keys not already on our keyring.
78 When all downloads are done (successful or otherwise), add any new keys
79 to the keyring, L{recheck}.
80 @param handler: handler to manage the download
81 @type handler: L{handler.Handler}
82 @param key_mirror: URL of directory containing keys, or None to use feed's directory
91 key_url
= urlparse
.urljoin(key_mirror
or self
.url
, '%s.gpg' % key_id
)
92 info(_("Fetching key from %s"), key_url
)
93 dl
= handler
.get_download(key_url
, hint
= feed_hint
)
94 downloads
[dl
.downloaded
] = (dl
, dl
.tempfile
)
95 blockers
.append(dl
.downloaded
)
100 from zeroinstall
.support
import tasks
105 old_blockers
= blockers
108 for b
in old_blockers
:
112 dl
, stream
= downloads
[b
]
114 self
._downloaded
_key
(stream
)
119 _type
, exception
, tb
= sys
.exc_info()
120 warn(_("Failed to import key for '%(url)s': %(exception)s"), {'url': self
.url
, 'exception': str(exception
)})
122 if exception
and not any_success
:
123 raise exception
, None, tb
127 def _downloaded_key(self
, stream
):
128 import shutil
, tempfile
129 from zeroinstall
.injector
import gpg
131 info(_("Importing key for feed '%s'"), self
.url
)
133 # Python2.4: can't call fileno() on stream, so save to tmp file instead
134 tmpfile
= tempfile
.TemporaryFile(prefix
= 'injector-dl-data-')
136 shutil
.copyfileobj(stream
, tmpfile
)
140 gpg
.import_key(tmpfile
)
145 """Set new_xml and sigs by reading signed_data.
146 You need to call this when previously-missing keys are added to the GPG keyring."""
149 self
.signed_data
.seek(0)
150 stream
, sigs
= gpg
.check_stream(self
.signed_data
)
154 if stream
is not self
.signed_data
:
160 self
.signed_data
.seek(0)
161 info(_("Failed to check GPG signature. Data received was:\n") + repr(self
.signed_data
.read()))
164 class IfaceCache(object):
166 The interface cache stores downloaded and verified interfaces in
167 ~/.cache/0install.net/interfaces (by default).
169 There are methods to query the cache, add to it, check signatures, etc.
171 The cache is updated by L{fetch.Fetcher}.
173 Confusingly, this class is really two caches combined: the in-memory
174 cache of L{model.Interface} objects, and an on-disk cache of L{model.ZeroInstallFeed}s.
175 It will probably be split into two in future.
177 @ivar distro: the native distribution proxy
178 @type distro: L{distro.Distribution}
180 @see: L{iface_cache} - the singleton IfaceCache instance.
183 __slots__
= ['_interfaces', 'stores', '_feeds', '_distro']
185 def __init__(self
, distro
= None):
186 """@param distro: distribution used to fetch "distribution:" feeds (since 0.49)
187 @param distro: distribution used to resolve "distribution:" feeds
188 @type distro: L{distro.Distribution}, or None to use the host distribution
190 self
._interfaces
= {}
193 self
.stores
= zerostore
.Stores()
195 self
._distro
= distro
199 if self
._distro
is None:
200 from zeroinstall
.injector
.distro
import get_host_distribution
201 self
._distro
= get_host_distribution()
204 def update_interface_if_trusted(self
, interface
, sigs
, xml
):
206 warnings
.warn("Use update_feed_if_trusted instead", DeprecationWarning, stacklevel
= 2)
207 return self
.update_feed_if_trusted(interface
.uri
, sigs
, xml
)
209 def update_feed_if_trusted(self
, feed_url
, sigs
, xml
):
210 """Update a cached feed (using L{update_feed_from_network})
211 if we trust the signatures.
212 If we don't trust any of the signatures, do nothing.
213 @param feed_url: the feed being updated
215 @param sigs: signatures from L{gpg.check_stream}
216 @type sigs: [L{gpg.Signature}]
217 @param xml: the downloaded replacement feed document
219 @return: True if the feed was updated
224 updated
= self
._oldest
_trusted
(sigs
, trust
.domain_from_url(feed_url
))
225 if updated
is None: return False # None are trusted
227 self
.update_feed_from_network(feed_url
, xml
, updated
)
230 def update_interface_from_network(self
, interface
, new_xml
, modified_time
):
232 warnings
.warn("Use update_feed_from_network instead", DeprecationWarning, stacklevel
= 2)
233 self
.update_feed_from_network(interface
.uri
, new_xml
, modified_time
)
235 def update_feed_from_network(self
, feed_url
, new_xml
, modified_time
):
236 """Update a cached feed.
237 Called by L{update_feed_if_trusted} if we trust this data.
238 After a successful update, L{writer} is used to update the feed's
240 @param feed_url: the feed being updated
241 @type feed_url: L{model.Interface}
242 @param new_xml: the downloaded replacement feed document
244 @param modified_time: the timestamp of the oldest trusted signature
245 (used as an approximation to the feed's modification time)
246 @type modified_time: long
247 @raises ReplayAttack: if modified_time is older than the currently cached time
250 debug(_("Updating '%(interface)s' from network; modified at %(time)s") %
251 {'interface': feed_url
, 'time': _pretty_time(modified_time
)})
253 if '\n<!-- Base64 Signature' not in new_xml
:
254 # Only do this for old-style feeds without
255 # signatures Otherwise, we can get the time from the
256 # signature, and adding this attribute just makes the
258 from xml
.dom
import minidom
259 doc
= minidom
.parseString(new_xml
)
260 doc
.documentElement
.setAttribute('last-modified', str(modified_time
))
262 doc
.writexml(new_xml
)
263 new_xml
= new_xml
.getvalue()
265 self
._import
_new
_feed
(feed_url
, new_xml
, modified_time
)
267 feed
= self
.get_feed(feed_url
)
270 feed
.last_checked
= long(time
.time())
271 writer
.save_feed(feed
)
273 info(_("Updated feed cache entry for %(interface)s (modified %(time)s)"),
274 {'interface': feed
.get_name(), 'time': _pretty_time(modified_time
)})
276 def _import_new_feed(self
, feed_url
, new_xml
, modified_time
):
277 """Write new_xml into the cache.
278 @param feed_url: the URL for the feed being updated
279 @param new_xml: the data to write
280 @param modified_time: when new_xml was modified
281 @raises ReplayAttack: if the new mtime is older than the current one
285 upstream_dir
= basedir
.save_cache_path(config_site
, 'interfaces')
286 cached
= os
.path
.join(upstream_dir
, escape(feed_url
))
289 if os
.path
.exists(cached
):
290 old_xml
= file(cached
).read()
291 if old_xml
== new_xml
:
292 debug(_("No change"))
293 # Update in-memory copy, in case someone else updated the disk copy
294 self
.get_feed(feed_url
, force
= True)
296 old_modified
= int(os
.stat(cached
).st_mtime
)
298 # Do we need to write this temporary file now?
299 stream
= file(cached
+ '.new', 'w')
300 stream
.write(new_xml
)
302 os
.utime(cached
+ '.new', (modified_time
, modified_time
))
303 new_mtime
= reader
.check_readable(feed_url
, cached
+ '.new')
304 assert new_mtime
== modified_time
306 old_modified
= self
._get
_signature
_date
(feed_url
) or old_modified
309 if new_mtime
< old_modified
:
310 os
.unlink(cached
+ '.new')
311 raise ReplayAttack(_("New feed's modification time is "
312 "before old version!\nInterface: %(iface)s\nOld time: %(old_time)s\nNew time: %(new_time)s\n"
314 % {'iface': feed_url
, 'old_time': _pretty_time(old_modified
), 'new_time': _pretty_time(new_mtime
)})
315 if new_mtime
== old_modified
:
316 # You used to have to update the modification time manually.
317 # Now it comes from the signature, this check isn't useful
318 # and often causes problems when the stored format changes
319 # (e.g., when we stopped writing last-modified attributes)
321 #raise SafeException("Interface has changed, but modification time "
322 # "hasn't! Refusing update.")
323 os
.rename(cached
+ '.new', cached
)
324 debug(_("Saved as %s") % cached
)
326 self
.get_feed(feed_url
, force
= True)
328 def get_feed(self
, url
, force
= False):
329 """Get a feed from the cache.
330 @param url: the URL of the feed
331 @param force: load the file from disk again
332 @return: the feed, or None if it isn't cached
333 @rtype: L{model.ZeroInstallFeed}"""
335 feed
= self
._feeds
.get(url
, False)
339 if url
.startswith('distribution:'):
340 master_feed
= self
.get_feed(url
.split(':', 1)[1])
342 return None # Can't happen?
343 feed
= self
.distro
.get_feed(master_feed
)
345 feed
= reader
.load_feed_from_cache(url
)
347 reader
.update_user_feed_overrides(feed
)
348 self
._feeds
[url
] = feed
351 def get_interface(self
, uri
):
352 """Get the interface for uri, creating a new one if required.
353 New interfaces are initialised from the disk cache, but not from
355 @param uri: the URI of the interface to find
356 @rtype: L{model.Interface}
360 assert isinstance(uri
, unicode)
362 if uri
in self
._interfaces
:
363 return self
._interfaces
[uri
]
365 debug(_("Initialising new interface object for %s"), uri
)
366 self
._interfaces
[uri
] = Interface(uri
)
367 reader
.update_from_cache(self
._interfaces
[uri
])
368 return self
._interfaces
[uri
]
370 def list_all_interfaces(self
):
371 """List all interfaces in the cache.
375 for d
in basedir
.load_cache_paths(config_site
, 'interfaces'):
376 for leaf
in os
.listdir(d
):
377 if not leaf
.startswith('.'):
378 all
.add(unescape(leaf
))
379 for d
in basedir
.load_config_paths(config_site
, config_prog
, 'user_overrides'):
380 for leaf
in os
.listdir(d
):
381 if not leaf
.startswith('.'):
382 all
.add(unescape(leaf
))
383 return list(all
) # Why not just return the set?
385 def get_icon_path(self
, iface
):
386 """Get the path of a cached icon for an interface.
387 @param iface: interface whose icon we want
388 @return: the path of the cached icon, or None if not cached.
390 return basedir
.load_first_cache(config_site
, 'interface_icons',
393 def get_cached_signatures(self
, uri
):
394 """Verify the cached interface using GPG.
395 Only new-style XML-signed interfaces retain their signatures in the cache.
396 @param uri: the feed to check
398 @return: a list of signatures, or None
399 @rtype: [L{gpg.Signature}] or None
402 if os
.path
.isabs(uri
):
405 old_iface
= basedir
.load_first_cache(config_site
, 'interfaces', escape(uri
))
406 if old_iface
is None:
409 return gpg
.check_stream(file(old_iface
))[1]
410 except SafeException
, ex
:
411 debug(_("No signatures (old-style interface): %s") % ex
)
414 def _get_signature_date(self
, uri
):
415 """Read the date-stamp from the signature of the cached interface.
416 If the date-stamp is unavailable, returns None."""
418 sigs
= self
.get_cached_signatures(uri
)
420 return self
._oldest
_trusted
(sigs
, trust
.domain_from_url(uri
))
422 def _oldest_trusted(self
, sigs
, domain
):
423 """Return the date of the oldest trusted signature in the list, or None if there
424 are no trusted sigs in the list."""
425 trusted
= [s
.get_timestamp() for s
in sigs
if s
.is_trusted(domain
)]
430 def mark_as_checking(self
, url
):
431 """Touch a 'last_check_attempt_timestamp' file for this feed.
432 If url is a local path, nothing happens.
433 This prevents us from repeatedly trying to download a failing feed many
434 times in a short period."""
435 if os
.path
.isabs(url
):
437 feeds_dir
= basedir
.save_cache_path(config_site
, config_prog
, 'last-check-attempt')
438 timestamp_path
= os
.path
.join(feeds_dir
, model
._pretty
_escape
(url
))
439 fd
= os
.open(timestamp_path
, os
.O_WRONLY | os
.O_CREAT
, 0644)
441 os
.utime(timestamp_path
, None) # In case file already exists
443 def get_last_check_attempt(self
, url
):
444 """Return the time of the most recent update attempt for a feed.
445 @see: L{mark_as_checking}
446 @return: The time, or None if none is recorded
447 @rtype: float | None"""
448 timestamp_path
= basedir
.load_first_cache(config_site
, config_prog
, 'last-check-attempt', model
._pretty
_escape
(url
))
450 return os
.stat(timestamp_path
).st_mtime
453 def get_feed_imports(self
, iface
):
454 """Get all feeds that add to this interface.
455 This is the feeds explicitly added by the user, feeds added by the distribution,
456 and feeds imported by a <feed> in the main feed (but not recursively, at present).
459 main_feed
= self
.get_feed(iface
.uri
)
461 return iface
.extra_feeds
+ main_feed
.feeds
463 return iface
.extra_feeds
465 def get_feeds(self
, iface
):
466 """Get all feeds for this interface. This is a mapping from feed URLs
467 to ZeroInstallFeeds. It includes the interface's main feed, plus the
468 resolution of every feed returned by L{get_feed_imports}. Uncached
469 feeds are indicated by a value of None.
470 @rtype: {str: L{ZeroInstallFeed} | None}
472 main_feed
= self
.get_feed(iface
.uri
)
473 results
= {iface
.uri
: main_feed
}
474 for imp
in iface
.extra_feeds
:
476 results
[imp
.uri
] = self
.get_feed(imp
.uri
)
477 except SafeException
, ex
:
478 warn("Failed to load feed '%s: %s", imp
.uri
, ex
)
480 for imp
in main_feed
.feeds
:
481 results
[imp
.uri
] = self
.get_feed(imp
.uri
)
484 def get_implementations(self
, iface
):
485 """Return all implementations from all of iface's feeds.
486 @rtype: [L{Implementation}]
489 for feed
in self
.get_feeds(iface
).itervalues():
491 impls
+= feed
.implementations
.values()
494 iface_cache
= IfaceCache()