2 # Copyright (C) 2007, Thomas Leonard
3 # See the COPYING file for details, or visit http://0install.net.
5 from optparse
import OptionParser
6 import os
, sys
, time
, shutil
, subprocess
, signal
7 from logging
import info
, debug
, warn
8 from xml
.dom
import minidom
10 signal
.alarm(10 * 60) # Abort after 10 minutes
12 from zeroinstall
import SafeException
13 from zeroinstall
.injector
.iface_cache
import iface_cache
14 from zeroinstall
.injector
import basedir
, model
, namespaces
, policy
, handler
, gpg
16 from atom
import AtomFeed
, set_element
19 site_address
= "http://roscidus.com/0mirror"
23 parser
= OptionParser(usage
="usage: %prog [options] PUBLIC-DIR")
24 parser
.add_option("-v", "--verbose", help="more verbose output", action
='count')
25 parser
.add_option("-V", "--version", help="display version information", action
='store_true')
27 (options
, args
) = parser
.parse_args()
30 print "0mirror (zero-install) " + version
31 print "Copyright (C) 2007 Thomas Leonard"
32 print "This program comes with ABSOLUTELY NO WARRANTY,"
33 print "to the extent permitted by law."
34 print "You may redistribute copies of this program"
35 print "under the terms of the GNU General Public License."
36 print "For more information about these matters, see the file named COPYING."
41 logger
= logging
.getLogger()
42 if options
.verbose
== 1:
43 logger
.setLevel(logging
.INFO
)
45 logger
.setLevel(logging
.DEBUG
)
52 feed_file
= os
.path
.join(public_dir
, 'feed-list')
54 def escape_slashes(path
):
55 return path
.replace('/', '#')
57 def ensure_dirs(path
):
58 if not os
.path
.isdir(path
):
62 <summary type='xhtml'>
63 <div xmlns="http://www.w3.org/1999/xhtml">
64 <a href=""/> - <span/>
69 key_dir
= os
.path
.join(public_dir
, 'keys')
72 def ensure_key(fingerprint
):
73 if fingerprint
in keys
:
75 key_path
= os
.path
.join(key_dir
, fingerprint
[-16:] + '.gpg')
76 child
= subprocess
.Popen(['gpg', '-a', '--export', fingerprint
], stdout
= subprocess
.PIPE
)
77 keydata
, unused
= child
.communicate()
78 stream
= file(key_path
, 'w')
81 print "Exported key", fingerprint
86 def format_date(date
):
87 return time
.strftime("%Y-%m-%dT%H:%M:%SZ", time
.gmtime(date
))
89 def get_feed_dir(url
):
91 raise SafeException("Invalid URL '%s'" % feed
)
92 scheme
, rest
= feed
.split('://', 1)
93 domain
, rest
= rest
.split('/', 1)
94 assert scheme
in ('http', 'https', 'ftp') # Just to check for mal-formed lines; add more as needed
95 for x
in [scheme
, domain
, rest
]:
96 if not x
or x
.startswith(','):
97 raise SafeException("Invalid URL '%s'" % feed
)
98 return os
.path
.join('feeds', scheme
, domain
, escape_slashes(rest
))
100 now
= format_date(time
.time())
101 news_feed
= AtomFeed(title
= "Zero Install News Feed",
102 link
= site_address
+ "/news-feed.xml",
107 if not os
.path
.isdir(public_dir
):
108 raise SafeException("Public directory '%s' does not exist. "
109 "To setup a new site, create it as an empty directory now." % public_dir
)
110 if not os
.path
.isfile(feed_file
):
111 raise SafeException("File '%s' does not exist. It should contain a list of feed URLs, one per line" % feed_file
)
112 feeds
= filter(None, file(feed_file
).read().split('\n'))
113 handler
= handler
.Handler()
115 info("Processing feed '%s'", feed
)
116 feed_dir
= os
.path
.join(public_dir
, get_feed_dir(feed
))
117 ensure_dirs(feed_dir
)
119 #print "Updating", feed
120 p
= policy
.Policy(feed
, handler
)
121 p
.stale_feeds
= set()
122 iface
= p
.get_interface(feed
) # May start a download
124 for x
in p
.stale_feeds
:
125 print "Updating stale feed", feed
126 p
.begin_iface_download(x
)
127 if handler
.monitored_downloads
:
128 print "Waiting for downloads for", feed
130 errors
= handler
.wait_for_downloads()
131 except SafeException
, ex
:
132 warn("Error updating '%s': %s", feed
, str(ex
))
134 for error
in errors
or []:
137 cached
= basedir
.load_first_cache(namespaces
.config_site
, 'interfaces', model
.escape(feed
))
139 # Error during download?
140 warn("Attempted to fetch '%s', but still not cached", feed
)
143 style
= os
.path
.join(feed_dir
, 'interface.xsl')
144 if not os
.path
.islink(style
):
145 os
.symlink('../../../../feed_style.xsl', style
)
147 latest
= os
.path
.join(feed_dir
, 'latest.xml')
149 last_modified
= int(os
.stat(cached
).st_mtime
)
150 version_name
= time
.strftime('%Y-%m-%d_%H:%M.xml', time
.gmtime(last_modified
))
151 version_path
= os
.path
.join(feed_dir
, version_name
)
153 if os
.path
.islink(latest
) and os
.readlink(latest
) == version_name
:
154 if os
.path
.exists(version_path
):
156 warn("Broken symlink '%s'!", latest
)
159 stream
= file(cached
)
160 unused
, sigs
= gpg
.check_stream(stream
)
164 if isinstance(x
, gpg
.ValidSig
):
165 ensure_key(x
.fingerprint
)
167 warn("Signature problem: ", x
)
169 shutil
.copyfile(cached
, version_path
)
170 latest_new
= latest
+ '.new'
171 if os
.path
.exists(latest_new
):
172 os
.unlink(latest_new
)
173 os
.symlink(version_name
, latest_new
)
174 os
.rename(latest_new
, latest
)
175 print "Updated %s to %s" % (feed
, version_name
)
177 latest_ifaces
= [(iface
.last_modified
, iface
) for iface
in ifaces
]
179 latest_ifaces
= reversed(latest_ifaces
[-16:])
180 for date
, iface
in latest_ifaces
:
181 summary
= minidom
.parseString(summary_xml
)
182 set_element(summary
, "summary/div/a", iface
.get_name())
183 set_element(summary
, "summary/div/a/@href", iface
.uri
)
184 set_element(summary
, "summary/div/span", iface
.summary
)
185 news_feed
.add_entry(title
= "%s feed updated" % iface
.get_name(),
187 extra_links
= {'http://0install.net/2007/namespaces/0mirror/cached':
188 site_address
+ "/" + get_feed_dir(iface
.uri
).replace('#', '%23') + "/latest.xml"},
189 entry_id
= iface
.uri
,
190 updated
= format_date(date
),
191 summary
= summary
.documentElement
)
193 news_stream
= file(os
.path
.join(public_dir
, 'news-feed.xml'), 'w')
194 news_feed
.save(news_stream
)
197 except KeyboardInterrupt, ex
:
198 print >>sys
.stderr
, "Aborted at user's request"
200 except SafeException
, ex
:
201 if options
.verbose
: raise
202 print >>sys
.stderr
, ex