1 # Copyright (C) 2006, Thomas Leonard
2 # See the README file for details, or visit http://0install.net.
7 from tempfile
import mkdtemp
, mkstemp
10 from logging
import debug
, info
, warn
13 from zeroinstall
.injector
import basedir
14 from zeroinstall
import SafeException
16 class BadDigest(SafeException
): pass
17 class NotStored(SafeException
): pass
19 def copytree2(src
, dst
):
20 names
= os
.listdir(src
)
21 assert os
.path
.isdir(dst
)
24 srcname
= os
.path
.join(src
, name
)
25 dstname
= os
.path
.join(dst
, name
)
26 if os
.path
.islink(srcname
):
27 linkto
= os
.readlink(srcname
)
28 os
.symlink(linkto
, dstname
)
29 elif os
.path
.isdir(srcname
):
31 mtime
= os
.lstat(srcname
).st_mtime
32 copytree2(srcname
, dstname
)
33 os
.utime(dstname
, (mtime
, mtime
))
35 shutil
.copy2(srcname
, dstname
)
38 def __init__(self
, dir):
41 def lookup(self
, digest
):
42 alg
, value
= digest
.split('=', 1)
44 assert '/' not in value
45 int(value
, 16) # Check valid format
46 dir = os
.path
.join(self
.dir, digest
)
47 if os
.path
.isdir(dir):
51 def add_archive_to_cache(self
, required_digest
, data
, url
, extract
= None):
53 assert required_digest
.startswith('sha1=')
54 info("Caching new implementation (digest %s)", required_digest
)
56 if self
.lookup(required_digest
):
57 info("Not adding %s as it already exists!", required_digest
)
60 if not os
.path
.isdir(self
.dir):
62 tmp
= mkdtemp(dir = self
.dir, prefix
= 'tmp-')
64 unpack
.unpack_archive(url
, data
, tmp
, extract
)
70 self
.check_manifest_and_rename(required_digest
, tmp
, extract
)
72 warn("Leaving extracted directory as %s", tmp
)
75 def add_dir_to_cache(self
, required_digest
, path
):
76 if self
.lookup(required_digest
):
77 info("Not adding %s as it already exists!", required_digest
)
80 if not os
.path
.isdir(self
.dir):
82 tmp
= mkdtemp(dir = self
.dir, prefix
= 'tmp-')
85 self
.check_manifest_and_rename(required_digest
, tmp
)
87 warn("Error importing directory.")
88 warn("Deleting %s", tmp
)
92 def check_manifest_and_rename(self
, required_digest
, tmp
, extract
= None):
94 extracted
= os
.path
.join(tmp
, extract
)
95 if not os
.path
.isdir(extracted
):
96 raise Exception('Directory %s not found in archive' % extract
)
100 sha1
= 'sha1=' + manifest
.add_manifest_file(extracted
, sha
.new()).hexdigest()
101 if sha1
!= required_digest
:
102 raise BadDigest('Incorrect manifest -- archive is corrupted.\n'
103 'Required digest: %s\n'
104 'Actual digest: %s\n' %
105 (required_digest
, sha1
))
107 final_name
= os
.path
.join(self
.dir, required_digest
)
108 if os
.path
.isdir(final_name
):
109 raise Exception("Item %s already stored." % final_name
)
111 os
.rename(os
.path
.join(tmp
, extract
), final_name
)
114 os
.rename(tmp
, final_name
)
116 class Stores(object):
117 __slots__
= ['stores']
120 user_store
= os
.path
.join(basedir
.xdg_cache_home
, '0install.net', 'implementations')
121 self
.stores
= [Store(user_store
)]
123 impl_dirs
= basedir
.load_first_config('0install.net', 'injector',
124 'implementation-dirs')
125 debug("Location of 'implementation-dirs' config file being used: '%s'", impl_dirs
)
127 dirs
= file(impl_dirs
)
129 dirs
= ['/var/cache/0install.net/implementations']
130 for directory
in dirs
:
131 directory
= directory
.strip()
132 if directory
and not directory
.startswith('#'):
133 if os
.path
.isdir(directory
):
134 self
.stores
.append(Store(directory
))
135 debug("Added system store '%s'", directory
)
137 info("Ignoring non-directory store '%s'", directory
)
139 def lookup(self
, digest
):
140 """Search for digest in all stores."""
142 if '/' in digest
or '=' not in digest
:
143 raise BadDigest('Syntax error in digest (use ALG=VALUE)')
144 for store
in self
.stores
:
145 path
= store
.lookup(digest
)
148 raise NotStored("Item with digest '%s' not found in stores. Searched:\n- %s" %
149 (digest
, '\n- '.join([s
.dir for s
in self
.stores
])))
151 def add_dir_to_cache(self
, required_digest
, dir):
152 self
.stores
[0].add_dir_to_cache(required_digest
, dir)
154 def add_archive_to_cache(self
, required_digest
, data
, url
, extract
= None):
155 self
.stores
[0].add_archive_to_cache(required_digest
, data
, url
, extract
)