3 from optparse
import OptionParser
4 import sys
, shutil
, tempfile
, urlparse
6 import urllib2
, os
, httplib
8 import logging
, time
, traceback
9 from logging
import info
11 from zeroinstall
import SafeException
12 from zeroinstall
.support
import basedir
, tasks
13 from zeroinstall
.injector
import model
, gpg
, namespaces
, qdom
14 from zeroinstall
.injector
.config
import load_config
16 from display
import checking
, result
, error
, highlight
, error_new_line
18 config
= load_config()
24 WEEK
= 60 * 60 * 24 * 7
27 if hasattr(address
, 'hostname'):
28 return address
.hostname
30 return address
[1].split(':', 1)[0]
33 if hasattr(address
, 'port'):
36 port
= address
[1].split(':', 1)[1:]
42 assert port(('http', 'foo:81')) == 81
43 assert port(urlparse
.urlparse('http://foo:81')) == 81
45 parser
= OptionParser(usage
="usage: %prog [options] feed.xml")
46 parser
.add_option("-d", "--dependencies", help="also check feeds for dependencies", action
='store_true')
47 parser
.add_option("-o", "--offline", help="only perform offline checks", action
='store_true')
48 parser
.add_option("-s", "--skip-archives", help="don't check the archives are OK", action
='store_true')
49 parser
.add_option("-v", "--verbose", help="more verbose output", action
='count')
50 parser
.add_option("-V", "--version", help="display version information", action
='store_true')
52 (options
, args
) = parser
.parse_args()
55 print "FeedLint (zero-install) " + version
56 print "Copyright (C) 2007 Thomas Leonard"
57 print "This program comes with ABSOLUTELY NO WARRANTY,"
58 print "to the extent permitted by law."
59 print "You may redistribute copies of this program"
60 print "under the terms of the GNU General Public License."
61 print "For more information about these matters, see the file named COPYING."
65 config
.network_use
= model
.network_offline
67 os
.environ
['http_proxy'] = 'http://localhost:9999/offline-mode'
70 logger
= logging
.getLogger()
71 if options
.verbose
== 1:
72 logger
.setLevel(logging
.INFO
)
74 logger
.setLevel(logging
.DEBUG
)
83 app
= config
.app_mgr
.lookup_app(arg
, missing_ok
= True)
85 return app
.get_requirements().interface_uri
87 return model
.canonical_iface_uri(a
)
90 to_check
= [arg_to_uri(a
) for a
in args
]
91 except SafeException
, ex
:
92 if options
.verbose
: raise
93 print >>sys
.stderr
, ex
96 def check_key(feed_url
, keyid
):
97 for line
in os
.popen('gpg --with-colons --list-keys %s' % keyid
):
98 if line
.startswith('pub:'):
99 key_id
= line
.split(':')[4]
102 raise SafeException('Failed to find key "%s" on your keyring' % keyid
)
104 if options
.offline
: return
106 key_url
= urlparse
.urljoin(feed_url
, '%s.gpg' % key_id
)
108 if key_url
in checked
:
109 info("(already checked key URL %s)", key_url
)
111 checking("Checking key %s" % key_url
)
112 urllib2
.urlopen(key_url
).read()
116 def get_http_size(url
, ttl
= 3):
117 assert not options
.offline
118 address
= urlparse
.urlparse(url
)
120 if url
.lower().startswith('http://'):
121 http
= httplib
.HTTPConnection(host(address
), port(address
) or 80)
122 elif url
.lower().startswith('https://'):
123 http
= httplib
.HTTPSConnection(host(address
), port(address
) or 443)
127 parts
= url
.split('/', 3)
133 http
.request('HEAD', '/' + path
, headers
= {'Host': host(address
)})
134 response
= http
.getresponse()
136 if response
.status
== 200:
137 return response
.getheader('Content-Length')
138 elif response
.status
in (301, 302, 303):
139 new_url_rel
= response
.getheader('Location') or response
.getheader('URI')
140 new_url
= urlparse
.urljoin(url
, new_url_rel
)
142 raise SafeException("HTTP error: got status code %s" % response
.status
)
147 result("Moved", 'YELLOW')
148 checking("Checking new URL %s" % new_url
)
150 return get_http_size(new_url
, ttl
- 1)
152 raise SafeException('Too many redirections.')
154 def get_ftp_size(url
):
155 address
= urlparse
.urlparse(url
)
156 ftp
= ftplib
.FTP(host(address
))
159 ftp
.voidcmd('TYPE I')
160 return ftp
.size(url
.split('/', 3)[3])
164 def get_size(url
, base_url
= None):
167 if base_url
and base_url
.startswith('/'):
169 local_path
= os
.path
.join(os
.path
.dirname(base_url
), url
)
170 if not os
.path
.exists(local_path
):
171 raise SafeException("Local file '%s' does not exist (should be a URL?)" % url
)
172 return os
.path
.getsize(local_path
)
173 if base_url
is not None:
174 raise SafeException("Local file reference '%s' in non-local feed '%s'" % (url
, base_url
))
175 # Fall-through to Unknown scheme error
177 scheme
= urlparse
.urlparse(url
)[0].lower()
178 if scheme
.startswith('http') or scheme
.startswith('https'):
179 return get_http_size(url
)
180 elif scheme
.startswith('ftp'):
181 return get_ftp_size(url
)
183 raise SafeException("Unknown scheme '%s' in '%s'" % (scheme
, url
))
185 def check_source(feed_url
, source
):
186 if hasattr(source
, 'url'):
187 checking("Checking archive %s" % source
.url
)
188 actual_size
= get_size(source
.url
, feed_url
)
189 if actual_size
is None:
190 result("No Content-Length for archive; can't check", 'YELLOW')
192 actual_size
= int(actual_size
)
193 expected_size
= source
.size
+ (source
.start_offset
or 0)
194 if actual_size
!= expected_size
:
196 raise SafeException("Expected archive to have a size of %d, but server says it is %d" %
197 (expected_size
, actual_size
))
199 elif hasattr(source
, 'steps'):
200 for step
in source
.steps
:
201 check_source(feed_url
, step
)
203 existing_urls
= set()
204 def check_exists(url
):
205 if url
in existing_urls
: return # Already checked
206 if options
.offline
: return
208 checking("Checking URL exists %s" % url
)
211 existing_urls
.add(url
)
213 def scan_implementations(impls
, dom
):
214 """Add each implementation in dom to impls. Error if duplicate."""
215 for elem
in dom
.childNodes
:
216 if elem
.uri
!= namespaces
.XMLNS_IFACE
: continue
217 if elem
.name
== 'implementation':
218 impl_id
= elem
.attrs
['id']
220 raise SafeException("Duplicate ID {id}!".format(id = impl_id
))
221 impls
[impl_id
] = elem
222 elif elem
.name
== 'group':
223 scan_implementations(impls
, elem
)
227 def check_gpg_sig(feed_url
, stream
):
228 start
= stream
.read(5)
231 elif start
== '-----':
232 result('Old sig', colour
= 'RED')
233 error_new_line(' Feed has an old-style plain GPG signature. Use 0publish --xmlsign.',
237 error_new_line(' Unknown format. File starts "%s"' % start
)
239 data
, sigs
= gpg
.check_stream(stream
)
242 if isinstance(s
, gpg
.ValidSig
):
243 check_key(feed_url
, s
.fingerprint
)
244 elif isinstance(s
, gpg
.ErrSig
) and s
.need_key():
245 # Download missing key
247 key_url
= urlparse
.urljoin(feed_url
, '%s.gpg' % key
)
248 dl
= config
.fetcher
.download_url(key_url
)
250 tasks
.wait_for_blocker(dl
.downloaded
)
253 gpg
.import_key(stream
)
256 check_key(feed_url
, key
)
258 raise SafeException("Can't check sig: %s" % s
)
263 feed
= to_check
.pop()
265 info("Already checked feed %s", feed
)
270 checking("Checking " + feed
, indent
= 0)
273 if feed
.startswith('/'):
274 with
open(feed
) as stream
:
275 dom
= qdom
.parse(stream
)
277 if "uri" in dom
.attrs
:
280 check_gpg_sig(dom
.attrs
['uri'], stream
)
281 except SafeException
, ex
:
283 error_new_line(' %s' % ex
)
285 feed_obj
= model
.ZeroInstallFeed(dom
, local_path
= feed
if "uri" not in dom
.attrs
else None)
287 elif options
.offline
:
288 cached
= basedir
.load_first_cache(namespaces
.config_site
, 'interfaces', model
.escape(feed
))
290 raise SafeException('Not cached (offline-mode)')
291 with
open(cached
, 'rb') as stream
:
292 dom
= qdom
.parse(stream
)
293 feed_obj
= model
.ZeroInstallFeed(dom
)
296 tmp
= tempfile
.TemporaryFile(prefix
= 'feedlint-')
299 stream
= urllib2
.urlopen(feed
)
300 shutil
.copyfileobj(stream
, tmp
)
301 except Exception as ex
:
302 raise SafeException('Failed to fetch feed: {ex}'.format(ex
= ex
))
305 data
= check_gpg_sig(feed
, tmp
)
308 dom
= qdom
.parse(data
)
309 feed_obj
= model
.ZeroInstallFeed(dom
)
311 if feed_obj
.url
!= feed
:
312 raise SafeException('Incorrect URL "%s"' % feed_obj
.url
)
317 # Check for duplicate IDs
318 scan_implementations({}, dom
)
320 for f
in feed_obj
.feeds
:
321 info("Will check feed %s", f
.uri
)
322 to_check
.append(f
.uri
)
324 highest_version
= None
325 for impl
in sorted(feed_obj
.implementations
.values()):
326 if hasattr(impl
, 'dependencies'):
327 for r
in impl
.dependencies
.values():
328 if r
.interface
not in checked
:
329 info("Will check dependency %s", r
)
330 if options
.dependencies
:
331 to_check
.append(r
.interface
)
333 check_exists(r
.interface
)
334 if hasattr(impl
, 'download_sources') and not options
.skip_archives
:
335 if not options
.offline
:
336 for source
in impl
.download_sources
:
337 check_source(feed_obj
.url
, source
)
338 if impl
.local_path
is None:
340 raise SafeException("Version {version} has no digests".format(version
= impl
.get_version()))
341 stability
= impl
.upstream_stability
or model
.testing
342 if highest_version
is None or impl
.version
> highest_version
.version
:
343 highest_version
= impl
344 if stability
== model
.testing
:
346 if not impl
.released
:
347 if not impl
.local_path
:
348 testing_error
= "No release date on testing version"
351 released
= time
.strptime(impl
.released
, '%Y-%m-%d')
352 except ValueError, ex
:
353 testing_error
= "Can't parse date"
355 ago
= now
- time
.mktime(released
)
357 testing_error
= 'Release date is in the future!'
359 raise SafeException("Version %s: %s (released %s)" % (impl
.get_version(), testing_error
, impl
.released
))
361 # Old Windows versions use 32-bit integers to store versions. Newer versions use 64-bit ones, but in general
362 # keeping the numbers small is helpful.
363 for i
in range(0, len(impl
.version
), 2):
364 for x
in impl
.version
[i
]:
366 raise SafeException("Version %s: component %s won't fit in a 32-bit signed integer" % (impl
.get_version(), x
))
368 if highest_version
and (highest_version
.upstream_stability
or model
.testing
) is model
.testing
:
369 print highlight(' Highest version (%s) is still "testing"' % highest_version
.get_version(), 'YELLOW')
371 for homepage
in feed_obj
.get_metadata(namespaces
.XMLNS_IFACE
, 'homepage'):
372 check_exists(homepage
.content
)
374 for icon
in feed_obj
.get_metadata(namespaces
.XMLNS_IFACE
, 'icon'):
375 check_exists(icon
.getAttribute('href'))
377 except (urllib2
.HTTPError
, httplib
.BadStatusLine
, socket
.error
, ftplib
.error_perm
), ex
:
378 err_msg
= str(ex
).strip() or str(type(ex
))
379 error_new_line(' ' + err_msg
)
381 if options
.verbose
: traceback
.print_exc()
382 except SafeException
, ex
:
383 if options
.verbose
: raise
384 error_new_line(' ' + str(ex
))
390 print "\nERRORS FOUND:", n_errors