Cache the GPG signatures to avoid rerunning gpg all the time
[0mirror.git] / 0mirror
blobd6a53d7d6f763c9a66c137ee2d8c166ba0971d4f
1 #!/usr/bin/env python
2 # Copyright (C) 2007, Thomas Leonard
3 # See the COPYING file for details, or visit http://0install.net.
5 from optparse import OptionParser
6 import os, sys, time, shutil, subprocess, signal, logging
7 from ConfigParser import RawConfigParser
8 from logging import info, debug, warn
9 from xml.dom import minidom
10 import codecs
12 signal.alarm(10 * 60) # Abort after 10 minutes
14 from zeroinstall import SafeException
15 from zeroinstall.injector.iface_cache import iface_cache
16 from zeroinstall.injector import model, namespaces, config, gpg, handler
17 from zeroinstall.support import basedir, tasks
19 from atom import AtomFeed, set_element
20 from stats import Stats, sig_cache
21 from support import format_date, get_feed_dir, ensure_dirs
23 missing_set = set()
25 # Site configuration!
27 site_config_file = os.path.abspath('0mirror.ini')
28 FEED_TIMEOUT = 60 # Seconds to wait before giving up on a feed download
30 version = '0.1'
32 parser = OptionParser(usage="usage: %prog [options] PUBLIC-DIR")
33 parser.add_option("-v", "--verbose", help="more verbose output", action='count')
34 parser.add_option("-V", "--version", help="display version information", action='store_true')
36 (options, args) = parser.parse_args()
38 if options.version:
39 print "0mirror (zero-install) " + version
40 print "Copyright (C) 2010 Thomas Leonard"
41 print "This program comes with ABSOLUTELY NO WARRANTY,"
42 print "to the extent permitted by law."
43 print "You may redistribute copies of this program"
44 print "under the terms of the GNU General Public License."
45 print "For more information about these matters, see the file named COPYING."
46 sys.exit(0)
48 if options.verbose:
49 logger = logging.getLogger()
50 if options.verbose == 1:
51 logger.setLevel(logging.INFO)
52 else:
53 logger.setLevel(logging.DEBUG)
55 if len(args) != 1:
56 parser.print_help()
57 sys.exit(1)
59 if not os.path.exists(site_config_file):
60 print >>sys.stderr, "Configuration file '%s' not found!" % site_config_file
61 sys.exit(1)
62 print "Reading configuration from", site_config_file
64 site_config = RawConfigParser()
65 site_config.read(site_config_file)
67 site_address = site_config.get('site', 'address') # e.g. "http://localhost/0mirror"
68 if not site_address.endswith('/'):
69 site_address += '/'
71 # Where we try if the primary site fails
72 my_mirror = site_config.get('fetching', 'upstream_mirror') or None
74 n_feeds_to_update = int(site_config.get('fetching', 'n_feeds_to_update'))
76 public_dir = args[0]
78 feed_file = os.path.join(public_dir, 'feed-list')
79 ignore_file = os.path.join(public_dir, 'ignore-list')
80 warnings_file = os.path.join(public_dir, 'warnings.xml')
82 summary_xml = """
83 <summary type='xhtml'>
84 <div xmlns="http://www.w3.org/1999/xhtml">
85 <a href=""/> - <span/>
86 </div>
87 </summary>
88 """
90 warnings_xml = """
91 <summary type='xhtml'>
92 <div xmlns="http://www.w3.org/1999/xhtml">
93 </div>
94 </summary>
95 """
97 unconfirmed_keys = [] # List of PendingFeeds
98 class NonInteractiveHandler(handler.Handler):
99 def confirm_import_feed(self, pending, valid_sigs):
100 for x in valid_sigs:
101 warn("Need to check key %s for %s", x.fingerprint, pending.url)
102 unconfirmed_keys.append(pending)
103 return None
105 @tasks.async
106 def wait_with_timeout(delay, blocker):
107 timeout = tasks.TimeoutBlocker(FEED_TIMEOUT, 'Mirror timeout')
108 yield timeout, blocker
109 tasks.check([timeout, blocker])
110 if not blocker.happened:
111 raise Exception("Timeout (waited %d seconds)" % delay)
113 warnings = []
114 def add_warning(title, msg):
115 warn("%s: %s", title, msg)
116 warnings.append((title, msg))
118 key_dir = os.path.join(public_dir, 'keys')
119 ensure_dirs(key_dir)
120 keys = set()
121 def ensure_key(fingerprint):
122 if fingerprint in keys:
123 return
124 key_path = os.path.join(key_dir, fingerprint[-16:] + '.gpg')
125 child = subprocess.Popen(['gpg', '-a', '--export', fingerprint], stdout = subprocess.PIPE)
126 keydata, unused = child.communicate()
127 stream = file(key_path, 'w')
128 stream.write(keydata)
129 stream.close()
130 print "Exported key", fingerprint
131 keys.add(fingerprint)
133 feeds = []
135 now = format_date(time.time())
136 news_feed = AtomFeed(title = "Zero Install News Feed",
137 link = site_address + "/news-feed.xml",
138 updated = now,
139 author = "0mirror")
140 warnings_feed = AtomFeed(title = "0mirror Warnings Feed",
141 link = site_address + "/warnings.xml",
142 updated = now,
143 author = "0mirror",
144 source = warnings_file)
146 def load_feed(url):
147 return iface_cache.get_feed(url)
149 def load_feeds(feed_uris):
150 logging.getLogger("0install").setLevel(logging.ERROR)
151 try:
152 feeds = {}
154 for feed_url in feed_uris:
155 feeds[feed_url] = load_feed(feed_url)
156 return feeds
157 finally:
158 logging.getLogger("0install").setLevel(logging.WARNING)
160 feed = None
161 try:
162 stats = Stats()
163 if not os.path.isdir(public_dir):
164 raise SafeException("Public directory '%s' does not exist. "
165 "To setup a new site, create it as an empty directory now." % public_dir)
166 if not os.path.isfile(feed_file):
167 raise SafeException("File '%s' does not exist. It should contain a list of feed URLs, one per line" % feed_file)
168 print "Reading", feed_file
170 lines = filter(None, file(feed_file).read().split('\n'))
171 feed_uris = [line for line in lines if not line.startswith('-')]
172 feed_set = set(feed_uris)
173 ignore_set = set(filter(None, file(ignore_file).read().split('\n')))
174 inactive_set = set(line[1:] for line in lines if line.startswith('-'))
176 known_set = feed_set | inactive_set
178 stale_feeds = [] # [(last-checked, feed)]
180 c = config.load_config()
181 c.mirror = my_mirror
183 feeds = load_feeds(feed_uris)
185 def last_checked(feed):
186 if feed is None:
187 # If we've never downloaded this feed, just keep trying (ignore last_check_attempt)
188 return 0
189 # Use the latest of the last successful check or the last failed check
190 last_check_attempt = iface_cache.get_last_check_attempt(feed.url)
191 if not last_check_attempt:
192 return feed.last_checked
193 return max(feed.last_checked or 0, last_check_attempt)
195 # List all the feeds, starting with the most stale
196 stale_feeds = [(last_checked(feed), url, feed) for url, feed in feeds.items()]
197 stale_feeds.sort()
199 # If we've got some completely new feeds, update all of them now
200 while n_feeds_to_update < len(stale_feeds) and stale_feeds[n_feeds_to_update - 1][0] in (0, None):
201 n_feeds_to_update += 1
203 # Update the first few feeds in the list
204 stale_feeds = stale_feeds[:n_feeds_to_update]
205 for last_check, feed_url, feed in stale_feeds:
206 if last_check > 0:
207 ctime_str = time.strftime('%Y-%m-%d_%H:%M', time.gmtime(last_check))
208 print "Feed %s last checked %s; updating..." % (feed_url, ctime_str)
209 else:
210 print "Feed %s is new; fetching..." % feed_url
212 iface_cache.mark_as_checking(feed_url)
213 blocker = c.fetcher.download_and_import_feed(feed_url, iface_cache)
214 try:
215 tasks.wait_for_blocker(wait_with_timeout(FEED_TIMEOUT, blocker))
216 except Exception, ex:
217 add_warning("Error fetching feed", "Error fetching '%s': %s" % (feed_url, ex))
218 continue
219 # Reload
220 feed = feeds[feed_url] = load_feed(feed_url)
221 #assert feed.last_checked, feed
223 for feed_url in feed_uris:
224 info("Processing feed '%s'", feed_url)
225 feed = feeds[feed_url]
226 if feed is None:
227 # Error during download?
228 add_warning("Fetch failed", "Attempted to fetch '%s', but still not cached" % feed_url)
229 continue
231 feed_dir = os.path.join(public_dir, get_feed_dir(feed_url))
232 ensure_dirs(feed_dir)
234 cached = basedir.load_first_cache(namespaces.config_site, 'interfaces', model.escape(feed.url))
235 assert cached is not None
237 for subfeed in feed.feeds:
238 if subfeed.uri not in known_set:
239 if subfeed.uri.startswith('/'):
240 continue
241 if subfeed.uri not in ignore_set:
242 add_warning("Missing subfeed", "WARNING: Subfeed %s of %s not in feeds list" % (subfeed.uri, feed.get_name()))
244 # Check dependencies
245 for impl in feed.implementations.values():
246 for dep in impl.requires:
247 if dep.interface not in known_set and dep.interface not in missing_set:
248 add_warning("Missing dependency", "Version %s of %s depends on %s, but that isn't being mirrored!" % (impl.get_version(), feed.url, dep.interface))
249 missing_set.add(dep.interface)
250 else:
251 continue
252 break # Once we've warned about one version, don't check any other versions
254 style = os.path.join(feed_dir, 'interface.xsl')
255 if not os.path.islink(style):
256 os.symlink('../../../../feed_style.xsl', style)
258 latest = os.path.join(feed_dir, 'latest.xml')
260 last_modified = int(os.stat(cached).st_mtime)
261 version_name = time.strftime('%Y-%m-%d_%H:%M.xml', time.gmtime(last_modified))
262 version_path = os.path.join(feed_dir, version_name)
264 if os.path.islink(latest) and os.readlink(latest) == version_name:
265 if os.path.exists(version_path):
266 continue
267 warn("Broken symlink '%s'!", latest)
269 # Get the keys
270 stream = file(cached)
271 unused, sigs = gpg.check_stream(stream)
272 stream.close()
274 for x in sigs:
275 if isinstance(x, gpg.ValidSig):
276 ensure_key(x.fingerprint)
277 else:
278 add_warning("Signature problem", x)
280 shutil.copyfile(cached, version_path)
281 latest_new = latest + '.new'
282 if os.path.exists(latest_new):
283 os.unlink(latest_new)
284 os.symlink(version_name, latest_new)
285 os.rename(latest_new, latest)
286 sig_cache.update(feed.url)
287 print "Updated %s to %s" % (feed, version_name)
289 feed = None
291 for feed_url in known_set:
292 feed = load_feed(feed_url)
293 if feed and feed.last_modified:
294 stats.add_feed(feed, feed_url in feed_set)
296 stats.write_summary(public_dir)
298 for pending_feed in unconfirmed_keys:
299 add_warning("Key awaiting confirmation",
300 "Feed: {feed}, Fingerprint: {fingerprint}".format(
301 feed = pending_feed.url,
302 fingerprint = pending_feed.sigs[0].fingerprint))
304 if warnings:
305 i = 0
306 for (title, warning) in warnings:
307 summary = minidom.parseString(warnings_xml)
308 div = summary.getElementsByTagNameNS("http://www.w3.org/1999/xhtml", "div")[0]
309 div.appendChild(summary.createTextNode(warning))
310 warnings_feed.add_entry(title = title,
311 link = site_address + "/warnings.xml",
312 entry_id = "warning-" + now + '-%d' % i,
313 updated = now,
314 summary = summary.documentElement)
315 i += 1
316 warnings_feed.limit(20)
317 with open(warnings_file, 'w') as stream:
318 warnings_feed.save(stream)
320 latest_feeds = [(feed.last_modified, feed) for feed in feeds.values() if feed]
321 latest_feeds.sort()
322 latest_feeds = reversed(latest_feeds[-16:])
323 for date, feed in latest_feeds:
324 summary = minidom.parseString(summary_xml)
325 set_element(summary, "summary/div/a", feed.get_name())
326 local_html_page = site_address + "/" + get_feed_dir(feed.url).replace('#', '%23') + "/feed.html"
327 set_element(summary, "summary/div/a/@href", local_html_page)
328 set_element(summary, "summary/div/span", feed.summary)
329 news_feed.add_entry(title = "%s feed updated" % feed.get_name(),
330 link = local_html_page,
331 entry_id = feed.url,
332 updated = format_date(date),
333 summary = summary.documentElement)
335 news_stream = codecs.open(os.path.join(public_dir, 'news-feed.xml'), 'w', encoding = 'utf-8')
336 news_feed.save(news_stream)
337 news_stream.close()
339 if False:
340 # Warn about possible missing feeds...
341 child = subprocess.Popen(['0launch', '--list'], stdout = subprocess.PIPE)
342 all_feeds, unused = child.communicate()
343 all_feeds = set([x for x in all_feeds.split('\n') if x and not x.startswith('/')])
344 unknown = all_feeds - known_set
346 if unknown:
347 print "\nUnknown feeds (add to known or ignore lists):"
348 for feed in sorted(unknown):
349 if '/tests/' in feed: continue
350 print feed
352 if missing_set:
353 print "\nMissing feeds:"
354 for x in missing_set:
355 print x
357 except KeyboardInterrupt, ex:
358 print >>sys.stderr, "Aborted at user's request"
359 sys.exit(1)
360 except SafeException, ex:
361 if options.verbose: raise
362 print >>sys.stderr, ex
363 if feed:
364 print "(while processing %s)" % feed
365 sys.exit(1)