Start development series 0.4-post
[0export.git] / utils.py
blob67696a76d2900e6a8459dec09b984dfd9b3696d1
1 import os, subprocess, shutil
2 from logging import info, warn
4 from zeroinstall import SafeException
5 from zeroinstall.injector import model, namespaces, gpg, iface_cache
6 from zeroinstall.support import basedir, find_in_path
7 from zeroinstall.zerostore import manifest
9 def escape_slashes(path):
10 return path.replace('/', '#')
12 # From 0mirror
13 def get_feed_path(feed):
14 if '#' in feed:
15 raise SafeException("Invalid URL '%s'" % feed)
16 scheme, rest = feed.split('://', 1)
17 domain, rest = rest.split('/', 1)
18 assert scheme in ('http', 'https', 'ftp') # Just to check for mal-formed lines; add more as needed
19 for x in [scheme, domain, rest]:
20 if not x or x.startswith(','):
21 raise SafeException("Invalid URL '%s'" % feed)
22 return os.path.join('feeds', scheme, domain, escape_slashes(rest))
24 def get_gpg():
25 return find_in_path('gpg') or find_in_path('gpg2')
27 def export_key(fingerprint, key_dir):
28 key_path = os.path.join(key_dir, fingerprint[-16:] + '.gpg')
29 child = subprocess.Popen([get_gpg(), '-a', '--export', fingerprint], stdout = subprocess.PIPE)
30 keydata, unused = child.communicate()
31 stream = file(key_path, 'w')
32 stream.write(keydata)
33 stream.close()
34 info("Exported key %s", fingerprint)
36 class NoLocalVersions:
37 def __init__(self, allow_package):
38 self.allow_package = allow_package
40 def meets_restriction(self, impl):
41 if isinstance(impl, model.ZeroInstallImplementation):
42 i = impl.id
43 return not (i.startswith('/') or i.startswith('.'))
44 # Accept package implementations to not deny ones that depend on it.
45 # Package implementations will be excluded from produced bundle later.
46 if impl.id.startswith('package:'):
47 return self.allow_package
48 return False
50 no_local = NoLocalVersions(True)
51 no_local_or_package = NoLocalVersions(False)
53 class NoLocalRestrictions(dict):
54 def __init__(self, uris):
55 self.uris = uris
57 # This restriction applies to all interfaces, so ignore key
58 def get(self, key, default):
59 if key.uri in self.uris:
60 return [no_local_or_package]
61 else:
62 return [no_local]
64 def export_feeds(export_dir, feeds, keys_used):
65 """Copy each feed (and icon) in feeds from the cache to export_dir.
66 Add all signing key fingerprints to keys_used."""
67 for feed in feeds:
68 if feed.startswith('/'):
69 info("Skipping local feed %s", feed)
70 continue
71 if feed.startswith('distribution:'):
72 info("Skipping distribution feed %s", feed)
73 continue
74 print "Exporting feed", feed
75 # Store feed
76 cached = basedir.load_first_cache(namespaces.config_site,
77 'interfaces',
78 model.escape(feed))
79 if cached:
80 feed_dir = os.path.join(export_dir, get_feed_path(feed))
81 feed_dst = os.path.join(feed_dir, 'latest.xml')
82 if not os.path.isdir(feed_dir):
83 os.makedirs(feed_dir)
84 shutil.copyfile(cached, feed_dst)
85 info("Exported feed %s", feed)
87 icon_path = iface_cache.iface_cache.get_icon_path(iface_cache.iface_cache.get_interface(feed))
88 if icon_path:
89 icon_dst = os.path.join(feed_dir, 'icon.png')
90 shutil.copyfile(icon_path, icon_dst)
92 # Get the keys
93 stream = file(cached)
94 unused, sigs = gpg.check_stream(stream)
95 stream.close()
96 for x in sigs:
97 if isinstance(x, gpg.ValidSig):
98 keys_used.add(x.fingerprint)
99 else:
100 warn("Signature problem: %s" % x)
101 else:
102 warn("Feed not cached: %s", feed)
104 def get_implementation_path(impl):
105 if impl.id.startswith('/'):
106 return impl.id
107 stores = iface_cache.iface_cache.stores
108 if hasattr(stores, 'lookup_any'):
109 # 0launch >= 0.45
110 return stores.lookup_any(impl.digests)
111 return stores.lookup(impl.id)
113 # impls is a map {digest: Implementation}. Create an exported item called
114 # "digest" with the cached implemention (even if we cached it under a different
115 # digest).
116 def export_impls(export_dir, impls):
117 implementations = os.path.join(export_dir, 'implementations')
118 for digest, impl in impls.iteritems():
119 print "Exporting implementation %s (%s %s)" % (impl, impl.feed.get_name(), impl.get_version())
120 # Store implementation
121 src = get_implementation_path(impl)
122 dst = os.path.join(implementations, digest)
123 shutil.copytree(src, dst, symlinks = True)
125 # Regenerate the manifest, because it might be for a different algorithm
126 os.chmod(dst, 0755)
127 os.unlink(os.path.join(dst, '.manifest'))
128 alg_name, required_value = digest.split('=', 1)
129 alg = manifest.algorithms[alg_name]
130 actual = manifest.add_manifest_file(dst, alg).hexdigest()
131 assert actual == required_value, "Expected digest '%s', but found '%s'" % (required_value, actual)
133 for root, dirs, files in os.walk(dst):
134 os.chmod(root, 0755)
135 os.unlink(os.path.join(dst, '.manifest'))
136 info("Exported implementation %s", impl)