2 Code for managing the implementation cache.
5 # Copyright (C) 2009, Thomas Leonard
6 # See the README file for details, or visit http://0install.net.
8 from zeroinstall
import _
, logger
11 from zeroinstall
.support
import basedir
12 from zeroinstall
import SafeException
, support
14 class BadDigest(SafeException
):
15 """Thrown if a digest is invalid (either syntactically or cryptographically)."""
18 class NotStored(SafeException
):
19 """Throws if a requested implementation isn't in the cache."""
21 class NonwritableStore(SafeException
):
22 """Attempt to add to a non-writable store directory."""
24 def _copytree2(src
, dst
):
26 names
= os
.listdir(src
)
27 assert os
.path
.isdir(dst
)
29 srcname
= os
.path
.join(src
, name
)
30 dstname
= os
.path
.join(dst
, name
)
31 if os
.path
.islink(srcname
):
32 linkto
= os
.readlink(srcname
)
33 os
.symlink(linkto
, dstname
)
34 elif os
.path
.isdir(srcname
):
36 mtime
= int(os
.lstat(srcname
).st_mtime
)
37 _copytree2(srcname
, dstname
)
38 os
.utime(dstname
, (mtime
, mtime
))
40 shutil
.copy2(srcname
, dstname
)
42 def _validate_pair(value
):
45 value
.startswith('.'):
46 raise BadDigest("Invalid digest '{value}'".format(value
= value
))
48 def parse_algorithm_digest_pair(src
):
49 """Break apart an algorithm/digest into in a tuple.
50 Old algorithms use '=' as the separator, while newer ones use '_'.
51 @param src: the combined string
53 @return: the parsed values
55 @raise BadDigest: if it can't be parsed
58 if src
.startswith('sha1=') or src
.startswith('sha1new=') or src
.startswith('sha256='):
59 return src
.split('=', 1)
60 result
= src
.split('_', 1)
63 raise BadDigest("Use '_' not '=' for new algorithms, in {src}".format(src
= src
))
64 raise BadDigest("Can't parse digest {src}".format(src
= src
))
67 def format_algorithm_digest_pair(alg
, digest
):
68 """The opposite of L{parse_algorithm_digest_pair}.
69 The result is suitable for use as a directory name (does not contain '/' characters).
70 @raise BadDigest: if the result is invalid
74 if alg
in ('sha1', 'sha1new', 'sha256'):
75 result
= alg
+ '=' + digest
77 result
= alg
+ '_' + digest
78 _validate_pair(result
)
82 """A directory for storing implementations."""
84 def __init__(self
, dir, public
= False):
85 """Create a new Store.
86 @param dir: directory to contain the implementations
88 @param public: deprecated
93 return _("Store '%s'") % self
.dir
95 def lookup(self
, digest
):
96 alg
, value
= parse_algorithm_digest_pair(digest
)
97 dir = os
.path
.join(self
.dir, digest
)
98 if os
.path
.isdir(dir):
102 def get_tmp_dir_for(self
, required_digest
):
103 """Create a temporary directory in the directory where we would store an implementation
104 with the given digest. This is used to setup a new implementation before being renamed if
106 @raise NonwritableStore: if we can't create it"""
108 if not os
.path
.isdir(self
.dir):
109 os
.makedirs(self
.dir)
110 from tempfile
import mkdtemp
111 tmp
= mkdtemp(dir = self
.dir, prefix
= 'tmp-')
112 os
.chmod(tmp
, 0o755) # r-x for all; needed by 0store-helper
114 except OSError as ex
:
115 raise NonwritableStore(str(ex
))
117 def add_archive_to_cache(self
, required_digest
, data
, url
, extract
= None, type = None, start_offset
= 0, try_helper
= False):
120 if self
.lookup(required_digest
):
121 logger
.info(_("Not adding %s as it already exists!"), required_digest
)
124 tmp
= self
.get_tmp_dir_for(required_digest
)
126 unpack
.unpack_archive(url
, data
, tmp
, extract
, type = type, start_offset
= start_offset
)
133 self
.check_manifest_and_rename(required_digest
, tmp
, extract
, try_helper
= try_helper
)
135 #warn(_("Leaving extracted directory as %s"), tmp)
136 support
.ro_rmtree(tmp
)
139 def add_dir_to_cache(self
, required_digest
, path
, try_helper
= False):
140 """Copy the contents of path to the cache.
141 @param required_digest: the expected digest
142 @type required_digest: str
143 @param path: the root of the tree to copy
145 @param try_helper: attempt to use privileged helper before user cache (since 0.26)
146 @type try_helper: bool
147 @raise BadDigest: if the contents don't match the given digest."""
148 if self
.lookup(required_digest
):
149 logger
.info(_("Not adding %s as it already exists!"), required_digest
)
152 tmp
= self
.get_tmp_dir_for(required_digest
)
154 _copytree2(path
, tmp
)
155 self
.check_manifest_and_rename(required_digest
, tmp
, try_helper
= try_helper
)
157 logger
.warn(_("Error importing directory."))
158 logger
.warn(_("Deleting %s"), tmp
)
159 support
.ro_rmtree(tmp
)
162 def _add_with_helper(self
, required_digest
, path
):
163 """Use 0store-secure-add to copy 'path' to the system store.
164 @param required_digest: the digest for path
165 @type required_digest: str
166 @param path: root of implementation directory structure
168 @return: True iff the directory was copied into the system cache successfully
170 if required_digest
.startswith('sha1='):
171 return False # Old digest alg not supported
172 helper
= support
.find_in_path('0store-secure-add-helper')
174 logger
.info(_("'0store-secure-add-helper' command not found. Not adding to system cache."))
177 env
= os
.environ
.copy()
178 env
['ENV_NOT_CLEARED'] = 'Unclean' # (warn about insecure configurations)
179 env
['HOME'] = 'Unclean' # (warn about insecure configurations)
180 dev_null
= os
.open(os
.devnull
, os
.O_RDONLY
)
182 logger
.info(_("Trying to add to system cache using %s"), helper
)
183 child
= subprocess
.Popen([helper
, required_digest
],
187 exit_code
= child
.wait()
192 logger
.warn(_("0store-secure-add-helper failed."))
195 logger
.info(_("Added succcessfully."))
198 def check_manifest_and_rename(self
, required_digest
, tmp
, extract
= None, try_helper
= False):
199 """Check that tmp[/extract] has the required_digest.
200 On success, rename the checked directory to the digest, and
201 make the whole tree read-only.
202 @param try_helper: attempt to use privileged helper to import to system cache first (since 0.26)
203 @type try_helper: bool
204 @raise BadDigest: if the input directory doesn't match the given digest"""
206 extracted
= os
.path
.join(tmp
, extract
)
207 if not os
.path
.isdir(extracted
):
208 raise Exception(_('Directory %s not found in archive') % extract
)
212 from . import manifest
214 manifest
.fixup_permissions(extracted
)
216 alg
, required_value
= manifest
.splitID(required_digest
)
217 actual_digest
= alg
.getID(manifest
.add_manifest_file(extracted
, alg
))
218 if actual_digest
!= required_digest
:
219 raise BadDigest(_('Incorrect manifest -- archive is corrupted.\n'
220 'Required digest: %(required_digest)s\n'
221 'Actual digest: %(actual_digest)s\n') %
222 {'required_digest': required_digest
, 'actual_digest': actual_digest
})
225 if self
._add
_with
_helper
(required_digest
, extracted
):
226 support
.ro_rmtree(tmp
)
228 logger
.info(_("Can't add to system store. Trying user store instead."))
230 logger
.info(_("Caching new implementation (digest %s) in %s"), required_digest
, self
.dir)
232 final_name
= os
.path
.join(self
.dir, required_digest
)
233 if os
.path
.isdir(final_name
):
234 raise Exception(_("Item %s already stored.") % final_name
) # XXX: not really an error
236 # If we just want a subdirectory then the rename will change
237 # extracted/.. and so we'll need write permission on 'extracted'
239 os
.chmod(extracted
, 0o755)
240 os
.rename(extracted
, final_name
)
241 os
.chmod(final_name
, 0o555)
247 return "<store: %s>" % self
.dir
249 class Stores(object):
250 """A list of L{Store}s. All stores are searched when looking for an implementation.
251 When storing, we use the first of the system caches (if writable), or the user's
253 __slots__
= ['stores']
256 user_store
= os
.path
.join(basedir
.xdg_cache_home
, '0install.net', 'implementations')
257 self
.stores
= [Store(user_store
)]
259 impl_dirs
= basedir
.load_first_config('0install.net', 'injector',
260 'implementation-dirs')
261 logger
.debug(_("Location of 'implementation-dirs' config file being used: '%s'"), impl_dirs
)
263 with
open(impl_dirs
, 'rt') as stream
:
264 dirs
= stream
.readlines()
267 from win32com
.shell
import shell
, shellcon
268 localAppData
= shell
.SHGetFolderPath(0, shellcon
.CSIDL_LOCAL_APPDATA
, 0, 0)
269 commonAppData
= shell
.SHGetFolderPath(0, shellcon
.CSIDL_COMMON_APPDATA
, 0, 0)
271 userCache
= os
.path
.join(localAppData
, "0install.net", "implementations")
272 sharedCache
= os
.path
.join(commonAppData
, "0install.net", "implementations")
273 dirs
= [userCache
, sharedCache
]
276 dirs
= ['/var/cache/0install.net/implementations']
278 for directory
in dirs
:
279 directory
= directory
.strip()
280 if directory
and not directory
.startswith('#'):
281 logger
.debug(_("Added system store '%s'"), directory
)
282 self
.stores
.append(Store(directory
))
284 def lookup(self
, digest
):
285 """@deprecated: use lookup_any instead"""
286 return self
.lookup_any([digest
])
288 def lookup_any(self
, digests
):
289 """Search for digest in all stores.
290 @raises NotStored: if not found"""
291 path
= self
.lookup_maybe(digests
)
294 raise NotStored(_("Item with digests '%(digests)s' not found in stores. Searched:\n- %(stores)s") %
295 {'digests': digests
, 'stores': '\n- '.join([s
.dir for s
in self
.stores
])})
297 def lookup_maybe(self
, digests
):
298 """Like lookup_any, but return None if it isn't found.
301 for digest
in digests
:
303 _validate_pair(digest
)
304 for store
in self
.stores
:
305 path
= store
.lookup(digest
)
310 def add_dir_to_cache(self
, required_digest
, dir):
311 """Add to the best writable cache.
312 @see: L{Store.add_dir_to_cache}"""
313 self
._write
_store
(lambda store
, **kwargs
: store
.add_dir_to_cache(required_digest
, dir, **kwargs
))
315 def add_archive_to_cache(self
, required_digest
, data
, url
, extract
= None, type = None, start_offset
= 0):
316 """Add to the best writable cache.
317 @see: L{Store.add_archive_to_cache}"""
318 self
._write
_store
(lambda store
, **kwargs
: store
.add_archive_to_cache(required_digest
,
319 data
, url
, extract
, type = type, start_offset
= start_offset
, **kwargs
))
321 def _write_store(self
, fn
):
322 """Call fn(first_system_store). If it's read-only, try again with the user store."""
323 if len(self
.stores
) > 1:
325 fn(self
.get_first_system_store())
327 except NonwritableStore
:
328 logger
.debug(_("%s not-writable. Trying helper instead."), self
.get_first_system_store())
330 fn(self
.stores
[0], try_helper
= True)
332 def get_first_system_store(self
):
333 """The first system store is the one we try writing to first.
336 return self
.stores
[1]
338 raise SafeException(_("No system stores have been configured"))