Updated uses of deprecated file() function
[zeroinstall/solver.git] / zeroinstall / injector / distro.py
blob9c1d2691e034f23dd621563cc9d191bf79969efc
1 """
2 Integration with native distribution package managers.
3 @since: 0.28
4 """
6 # Copyright (C) 2009, Thomas Leonard
7 # See the README file for details, or visit http://0install.net.
9 from zeroinstall import _
10 import os, platform, re, subprocess, sys
11 from logging import warn, info
12 from zeroinstall.injector import namespaces, model, arch
13 from zeroinstall.support import basedir
15 _dotted_ints = '[0-9]+(?:\.[0-9]+)*'
17 # This matches a version number that would be a valid Zero Install version without modification
18 _zeroinstall_regexp = '(?:%s)(?:-(?:pre|rc|post|)(?:%s))*' % (_dotted_ints, _dotted_ints)
20 # This matches the interesting bits of distribution version numbers
21 # (first bit is for Java-style 6b17 syntax)
22 _version_regexp = '({ints}b)?({zero})(-r{ints})?'.format(zero = _zeroinstall_regexp, ints = _dotted_ints)
24 # We try to do updates atomically without locking, but we don't worry too much about
25 # duplicate entries or being a little out of sync with the on-disk copy.
26 class Cache(object):
27 def __init__(self, cache_leaf, source, format):
28 """Maintain a cache file (e.g. ~/.cache/0install.net/injector/$name).
29 If the size or mtime of $source has changed, or the cache
30 format version if different, reset the cache first."""
31 self.cache_leaf = cache_leaf
32 self.source = source
33 self.format = format
34 self.cache_dir = basedir.save_cache_path(namespaces.config_site,
35 namespaces.config_prog)
36 self.cached_for = {} # Attributes of source when cache was created
37 try:
38 self._load_cache()
39 except Exception as ex:
40 info(_("Failed to load cache (%s). Flushing..."), ex)
41 self.flush()
43 def flush(self):
44 # Wipe the cache
45 try:
46 info = os.stat(self.source)
47 mtime = int(info.st_mtime)
48 size = info.st_size
49 except Exception as ex:
50 warn("Failed to stat %s: %s", self.source, ex)
51 mtime = size = 0
52 self.cache = {}
53 import tempfile
54 tmp, tmp_name = tempfile.mkstemp(dir = self.cache_dir)
55 data = "mtime=%d\nsize=%d\nformat=%d\n\n" % (mtime, size, self.format)
56 while data:
57 wrote = os.write(tmp, data)
58 data = data[wrote:]
59 os.rename(tmp_name, os.path.join(self.cache_dir, self.cache_leaf))
61 self._load_cache()
63 # Populate self.cache from our saved cache file.
64 # Throws an exception if the cache doesn't exist or has the wrong format.
65 def _load_cache(self):
66 self.cache = cache = {}
67 stream = open(os.path.join(self.cache_dir, self.cache_leaf))
68 try:
69 for line in stream:
70 line = line.strip()
71 if not line:
72 break
73 key, value = line.split('=', 1)
74 if key in ('mtime', 'size', 'format'):
75 self.cached_for[key] = int(value)
77 self._check_valid()
79 for line in stream:
80 key, value = line.split('=', 1)
81 cache[key] = value[:-1]
82 finally:
83 stream.close()
85 # Check the source file hasn't changed since we created the cache
86 def _check_valid(self):
87 info = os.stat(self.source)
88 if self.cached_for['mtime'] != int(info.st_mtime):
89 raise Exception("Modification time of %s has changed" % self.source)
90 if self.cached_for['size'] != info.st_size:
91 raise Exception("Size of %s has changed" % self.source)
92 if self.cached_for.get('format', None) != self.format:
93 raise Exception("Format of cache has changed")
95 def get(self, key):
96 try:
97 self._check_valid()
98 except Exception as ex:
99 info(_("Cache needs to be refreshed: %s"), ex)
100 self.flush()
101 return None
102 else:
103 return self.cache.get(key, None)
105 def put(self, key, value):
106 cache_path = os.path.join(self.cache_dir, self.cache_leaf)
107 self.cache[key] = value
108 try:
109 stream = open(cache_path, 'a')
110 try:
111 stream.write('%s=%s\n' % (key, value))
112 finally:
113 stream.close()
114 except Exception as ex:
115 warn("Failed to write to cache %s: %s=%s: %s", cache_path, key, value, ex)
117 def try_cleanup_distro_version(version):
118 """Try to turn a distribution version string into one readable by Zero Install.
119 We do this by stripping off anything we can't parse.
120 @return: the part we understood, or None if we couldn't parse anything
121 @rtype: str"""
122 if ':' in version:
123 version = version.split(':')[1] # Skip 'epoch'
124 version = version.replace('_', '-')
125 match = re.match(_version_regexp, version)
126 if match:
127 major, version, revision = match.groups()
128 if major is not None:
129 version = major[:-1] + '.' + version
130 if revision is None:
131 return version
132 else:
133 return '%s-%s' % (version, revision[2:])
134 return None
136 class Distribution(object):
137 """Represents a distribution with which we can integrate.
138 Sub-classes should specialise this to integrate with the package managers of
139 particular distributions. This base class ignores the native package manager.
140 @since: 0.28
142 _packagekit = None
144 def get_package_info(self, package, factory):
145 """Get information about the given package.
146 Add zero or more implementations using the factory (typically at most two
147 will be added; the currently installed version and the latest available).
148 @param package: package name (e.g. "gimp")
149 @type package: str
150 @param factory: function for creating new DistributionImplementation objects from IDs
151 @type factory: str -> L{model.DistributionImplementation}
153 return
155 def get_score(self, distribution):
156 """Indicate how closely the host distribution matches this one.
157 The <package-implementation> with the highest score is passed
158 to L{Distribution.get_package_info}. If several elements get
159 the same score, get_package_info is called for all of them.
160 @param distribution: a distribution name
161 @type distribution: str
162 @return: an integer, or None if there is no match at all
163 @rtype: int | None
165 return 0
167 def get_feed(self, master_feed):
168 """Generate a feed containing information about distribution packages.
169 This should immediately return a feed containing an implementation for the
170 package if it's already installed. Information about versions that could be
171 installed using the distribution's package manager can be added asynchronously
172 later (see L{fetch_candidates}).
173 @param master_feed: feed containing the <package-implementation> elements
174 @type master_feed: L{model.ZeroInstallFeed}
175 @rtype: L{model.ZeroInstallFeed}"""
177 feed = model.ZeroInstallFeed(None)
178 feed.url = 'distribution:' + master_feed.url
180 for item, item_attrs in master_feed.get_package_impls(self):
181 package = item_attrs.get('package', None)
182 if package is None:
183 raise model.InvalidInterface(_("Missing 'package' attribute on %s") % item)
185 def factory(id, only_if_missing = False, installed = True):
186 assert id.startswith('package:')
187 if id in feed.implementations:
188 if only_if_missing:
189 return None
190 warn(_("Duplicate ID '%s' for DistributionImplementation"), id)
191 impl = model.DistributionImplementation(feed, id, self)
192 feed.implementations[id] = impl
194 impl.installed = installed
195 impl.metadata = item_attrs
197 item_main = item_attrs.get('main', None)
198 if item_main and not item_main.startswith('/'):
199 raise model.InvalidInterface(_("'main' attribute must be absolute, but '%s' doesn't start with '/'!") %
200 item_main)
201 impl.main = item_main
202 impl.upstream_stability = model.packaged
204 return impl
206 self.get_package_info(package, factory)
208 if master_feed.url == 'http://repo.roscidus.com/python/python' and all(not impl.installed for impl in feed.implementations.values()):
209 # Hack: we can support Python on platforms with unsupported package managers
210 # by adding the implementation of Python running us now to the list.
211 python_version = '.'.join([str(v) for v in sys.version_info if isinstance(v, int)])
212 impl_id = 'package:host:python:' + python_version
213 assert impl_id not in feed.implementations
214 impl = model.DistributionImplementation(feed, impl_id, self)
215 impl.installed = True
216 impl.version = model.parse_version(python_version)
217 impl.main = sys.executable
218 impl.upstream_stability = model.packaged
219 impl.machine = host_machine # (hopefully)
220 feed.implementations[impl_id] = impl
222 return feed
224 def fetch_candidates(self, master_feed):
225 """Collect information about versions we could install using
226 the distribution's package manager. On success, the distribution
227 feed in iface_cache is updated.
228 @return: a L{tasks.Blocker} if the task is in progress, or None if not"""
229 if self.packagekit.available:
230 package_names = [item.getAttribute("package") for item, item_attrs in master_feed.get_package_impls(self)]
231 return self.packagekit.fetch_candidates(package_names)
233 @property
234 def packagekit(self):
235 """For use by subclasses.
236 @rtype: L{packagekit.PackageKit}"""
237 if not self._packagekit:
238 from zeroinstall.injector import packagekit
239 self._packagekit = packagekit.PackageKit()
240 return self._packagekit
242 class CachedDistribution(Distribution):
243 """For distributions where querying the package database is slow (e.g. requires running
244 an external command), we cache the results.
245 @since: 0.39
246 @deprecated: use Cache instead
249 def __init__(self, db_status_file):
250 """@param db_status_file: update the cache when the timestamp of this file changes"""
251 self._status_details = os.stat(db_status_file)
253 self.versions = {}
254 self.cache_dir = basedir.save_cache_path(namespaces.config_site,
255 namespaces.config_prog)
257 try:
258 self._load_cache()
259 except Exception as ex:
260 info(_("Failed to load distribution database cache (%s). Regenerating..."), ex)
261 try:
262 self.generate_cache()
263 self._load_cache()
264 except Exception as ex:
265 warn(_("Failed to regenerate distribution database cache: %s"), ex)
267 def _load_cache(self):
268 """Load {cache_leaf} cache file into self.versions if it is available and up-to-date.
269 Throws an exception if the cache should be (re)created."""
270 stream = open(os.path.join(self.cache_dir, self.cache_leaf))
272 cache_version = None
273 for line in stream:
274 if line == '\n':
275 break
276 name, value = line.split(': ')
277 if name == 'mtime' and int(value) != int(self._status_details.st_mtime):
278 raise Exception(_("Modification time of package database file has changed"))
279 if name == 'size' and int(value) != self._status_details.st_size:
280 raise Exception(_("Size of package database file has changed"))
281 if name == 'version':
282 cache_version = int(value)
283 else:
284 raise Exception(_('Invalid cache format (bad header)'))
286 if cache_version is None:
287 raise Exception(_('Old cache format'))
289 versions = self.versions
290 for line in stream:
291 package, version, zi_arch = line[:-1].split('\t')
292 versionarch = (version, intern(zi_arch))
293 if package not in versions:
294 versions[package] = [versionarch]
295 else:
296 versions[package].append(versionarch)
298 def _write_cache(self, cache):
299 #cache.sort() # Might be useful later; currently we don't care
300 import tempfile
301 fd, tmpname = tempfile.mkstemp(prefix = 'zeroinstall-cache-tmp',
302 dir = self.cache_dir)
303 try:
304 stream = os.fdopen(fd, 'wb')
305 stream.write('version: 2\n')
306 stream.write('mtime: %d\n' % int(self._status_details.st_mtime))
307 stream.write('size: %d\n' % self._status_details.st_size)
308 stream.write('\n')
309 for line in cache:
310 stream.write(line + '\n')
311 stream.close()
313 os.rename(tmpname,
314 os.path.join(self.cache_dir,
315 self.cache_leaf))
316 except:
317 os.unlink(tmpname)
318 raise
320 # Maps machine type names used in packages to their Zero Install versions
321 _canonical_machine = {
322 'all' : '*',
323 'any' : '*',
324 'noarch' : '*',
325 '(none)' : '*',
326 'x86_64': 'x86_64',
327 'amd64': 'x86_64',
328 'i386': 'i386',
329 'i486': 'i486',
330 'i586': 'i586',
331 'i686': 'i686',
332 'ppc64': 'ppc64',
333 'ppc': 'ppc',
336 host_machine = arch.canonicalize_machine(platform.uname()[4])
337 def canonical_machine(package_machine):
338 machine = _canonical_machine.get(package_machine, None)
339 if machine is None:
340 # Safe default if we can't understand the arch
341 return host_machine
342 return machine
344 class DebianDistribution(Distribution):
345 """A dpkg-based distribution."""
347 cache_leaf = 'dpkg-status.cache'
349 def __init__(self, dpkg_status, pkgcache):
350 self.dpkg_cache = Cache('dpkg-status.cache', dpkg_status, 2)
351 self.apt_cache = {}
353 def _query_installed_package(self, package):
354 null = os.open('/dev/null', os.O_WRONLY)
355 child = subprocess.Popen(["dpkg-query", "-W", "--showformat=${Version}\t${Architecture}\t${Status}\n", "--", package],
356 stdout = subprocess.PIPE, stderr = null)
357 os.close(null)
358 stdout, stderr = child.communicate()
359 child.wait()
360 for line in stdout.split('\n'):
361 if not line: continue
362 version, debarch, status = line.split('\t', 2)
363 if not status.endswith(' installed'): continue
364 clean_version = try_cleanup_distro_version(version)
365 if clean_version:
366 return '%s\t%s' % (clean_version, canonical_machine(debarch.strip()))
367 else:
368 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': package})
370 return '-'
372 def get_package_info(self, package, factory):
373 # Add any already-installed package...
374 installed_cached_info = self._get_dpkg_info(package)
376 if installed_cached_info != '-':
377 installed_version, machine = installed_cached_info.split('\t')
378 impl = factory('package:deb:%s:%s:%s' % (package, installed_version, machine))
379 impl.version = model.parse_version(installed_version)
380 if machine != '*':
381 impl.machine = machine
382 else:
383 installed_version = None
385 # Add any uninstalled candidates (note: only one of these two methods will add anything)
387 # From PackageKit...
388 self.packagekit.get_candidates(package, factory, 'package:deb')
390 # From apt-cache...
391 cached = self.apt_cache.get(package, None)
392 if cached:
393 candidate_version = cached['version']
394 candidate_arch = cached['arch']
395 if candidate_version and candidate_version != installed_version:
396 impl = factory('package:deb:%s:%s:%s' % (package, candidate_version, candidate_arch), installed = False)
397 impl.version = model.parse_version(candidate_version)
398 if candidate_arch != '*':
399 impl.machine = candidate_arch
400 def install(handler):
401 raise model.SafeException(_("This program depends on '%s', which is a package that is available through your distribution. "
402 "Please install it manually using your distribution's tools and try again. Or, install 'packagekit' and I can "
403 "use that to install it.") % package)
404 impl.download_sources.append(model.DistributionSource(package, cached['size'], install, needs_confirmation = False))
406 def get_score(self, disto_name):
407 return int(disto_name == 'Debian')
409 def _get_dpkg_info(self, package):
410 installed_cached_info = self.dpkg_cache.get(package)
411 if installed_cached_info == None:
412 installed_cached_info = self._query_installed_package(package)
413 self.dpkg_cache.put(package, installed_cached_info)
415 return installed_cached_info
417 def fetch_candidates(self, master_feed):
418 package_names = [item.getAttribute("package") for item, item_attrs in master_feed.get_package_impls(self)]
420 if self.packagekit.available:
421 return self.packagekit.fetch_candidates(package_names)
423 # No PackageKit. Use apt-cache directly.
424 for package in package_names:
425 # Check to see whether we could get a newer version using apt-get
426 try:
427 null = os.open('/dev/null', os.O_WRONLY)
428 child = subprocess.Popen(['apt-cache', 'show', '--no-all-versions', '--', package], stdout = subprocess.PIPE, stderr = null)
429 os.close(null)
431 arch = version = size = None
432 for line in child.stdout:
433 line = line.strip()
434 if line.startswith('Version: '):
435 version = line[9:]
436 version = try_cleanup_distro_version(version)
437 elif line.startswith('Architecture: '):
438 arch = canonical_machine(line[14:].strip())
439 elif line.startswith('Size: '):
440 size = int(line[6:].strip())
441 if version and arch:
442 cached = {'version': version, 'arch': arch, 'size': size}
443 else:
444 cached = None
445 child.wait()
446 except Exception as ex:
447 warn("'apt-cache show %s' failed: %s", package, ex)
448 cached = None
449 # (multi-arch support? can there be multiple candidates?)
450 self.apt_cache[package] = cached
452 class RPMDistribution(CachedDistribution):
453 """An RPM-based distribution."""
455 cache_leaf = 'rpm-status.cache'
457 def generate_cache(self):
458 cache = []
460 for line in os.popen("rpm -qa --qf='%{NAME}\t%{VERSION}-%{RELEASE}\t%{ARCH}\n'"):
461 package, version, rpmarch = line.split('\t', 2)
462 if package == 'gpg-pubkey':
463 continue
464 zi_arch = canonical_machine(rpmarch.strip())
465 clean_version = try_cleanup_distro_version(version)
466 if clean_version:
467 cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
468 else:
469 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': package})
471 self._write_cache(cache)
473 def get_package_info(self, package, factory):
474 # Add installed versions...
475 versions = self.versions.get(package, [])
477 for version, machine in versions:
478 impl = factory('package:rpm:%s:%s:%s' % (package, version, machine))
479 impl.version = model.parse_version(version)
480 if machine != '*':
481 impl.machine = machine
483 # Add any uninstalled candidates found by PackageKit
484 self.packagekit.get_candidates(package, factory, 'package:rpm')
486 def get_score(self, disto_name):
487 return int(disto_name == 'RPM')
489 class SlackDistribution(Distribution):
490 """A Slack-based distribution."""
492 def __init__(self, packages_dir):
493 self._packages_dir = packages_dir
495 def get_package_info(self, package, factory):
496 # Add installed versions...
497 for entry in os.listdir(self._packages_dir):
498 name, version, arch, build = entry.rsplit('-', 3)
499 if name == package:
500 zi_arch = canonical_machine(arch)
501 clean_version = try_cleanup_distro_version("%s-%s" % (version, build))
502 if not clean_version:
503 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': name})
504 continue
506 impl = factory('package:slack:%s:%s:%s' % \
507 (package, clean_version, zi_arch))
508 impl.version = model.parse_version(clean_version)
509 if zi_arch != '*':
510 impl.machine = zi_arch
512 # Add any uninstalled candidates found by PackageKit
513 self.packagekit.get_candidates(package, factory, 'package:slack')
515 def get_score(self, disto_name):
516 return int(disto_name == 'Slack')
518 class GentooDistribution(Distribution):
520 def __init__(self, pkgdir):
521 self._pkgdir = pkgdir
523 def get_package_info(self, package, factory):
524 # Add installed versions...
525 _version_start_reqexp = '-[0-9]'
527 if package.count('/') != 1: return
529 category, leafname = package.split('/')
530 category_dir = os.path.join(self._pkgdir, category)
531 match_prefix = leafname + '-'
533 if not os.path.isdir(category_dir): return
535 for filename in os.listdir(category_dir):
536 if filename.startswith(match_prefix) and filename[len(match_prefix)].isdigit():
537 name = open(os.path.join(category_dir, filename, 'PF')).readline().strip()
539 match = re.search(_version_start_reqexp, name)
540 if match is None:
541 warn(_('Cannot parse version from Gentoo package named "%(name)s"'), {'name': name})
542 continue
543 else:
544 version = try_cleanup_distro_version(name[match.start() + 1:])
546 if category == 'app-emulation' and name.startswith('emul-'):
547 __, __, machine, __ = name.split('-', 3)
548 else:
549 machine, __ = open(os.path.join(category_dir, filename, 'CHOST')).readline().split('-', 1)
550 machine = arch.canonicalize_machine(machine)
552 impl = factory('package:gentoo:%s:%s:%s' % \
553 (package, version, machine))
554 impl.version = model.parse_version(version)
555 impl.machine = machine
557 # Add any uninstalled candidates found by PackageKit
558 self.packagekit.get_candidates(package, factory, 'package:gentoo')
560 def get_score(self, disto_name):
561 return int(disto_name == 'Gentoo')
563 class PortsDistribution(Distribution):
565 def __init__(self, pkgdir):
566 self._pkgdir = pkgdir
568 def get_package_info(self, package, factory):
569 _name_version_regexp = '^(.+)-([^-]+)$'
571 nameversion = re.compile(_name_version_regexp)
572 for pkgname in os.listdir(self._pkgdir):
573 pkgdir = os.path.join(self._pkgdir, pkgname)
574 if not os.path.isdir(pkgdir): continue
576 #contents = open(os.path.join(pkgdir, '+CONTENTS')).readline().strip()
578 match = nameversion.search(pkgname)
579 if match is None:
580 warn(_('Cannot parse version from Ports package named "%(pkgname)s"'), {'pkgname': pkgname})
581 continue
582 else:
583 name = match.group(1)
584 if name != package:
585 continue
586 version = try_cleanup_distro_version(match.group(2))
588 machine = host_machine
590 impl = factory('package:ports:%s:%s:%s' % \
591 (package, version, machine))
592 impl.version = model.parse_version(version)
593 impl.machine = machine
595 def get_score(self, disto_name):
596 return int(disto_name == 'Ports')
598 class MacPortsDistribution(CachedDistribution):
600 cache_leaf = 'macports-status.cache'
602 def generate_cache(self):
603 cache = []
605 # for line in os.popen("port echo active"):
606 for line in os.popen("port -v installed"):
607 if not line.startswith(" "):
608 continue
609 if line.strip().count(" ") > 1:
610 package, version, extra = line.split(None, 2)
611 else:
612 package, version = line.split()
613 extra = ""
614 if not extra.startswith("(active)"):
615 continue
616 version = version.lstrip('@')
617 version = re.sub(r"\+.*","",version) # strip variants
618 zi_arch = '*'
619 clean_version = try_cleanup_distro_version(version)
620 if clean_version:
621 match = re.match(r" platform='([^' ]*)( \d+)?' archs='([^']*)'", extra)
622 if match:
623 platform, major, archs = match.groups()
624 for arch in archs.split():
625 zi_arch = canonical_machine(arch)
626 cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
627 else:
628 cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
629 else:
630 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': package})
632 self._write_cache(cache)
634 def get_package_info(self, package, factory):
635 # Add installed versions...
636 versions = self.versions.get(package, [])
638 for version, machine in versions:
639 impl = factory('package:macports:%s:%s:%s' % (package, version, machine))
640 impl.version = model.parse_version(version)
641 if machine != '*':
642 impl.machine = machine
644 def get_score(self, disto_name):
645 return int(disto_name == 'MacPorts')
648 _host_distribution = None
649 def get_host_distribution():
650 """Get a Distribution suitable for the host operating system.
651 Calling this twice will return the same object.
652 @rtype: L{Distribution}"""
653 global _host_distribution
654 if not _host_distribution:
655 dpkg_db_status = '/var/lib/dpkg/status'
656 pkgcache = '/var/cache/apt/pkgcache.bin'
657 _rpm_db = '/var/lib/rpm/Packages'
658 _slack_db = '/var/log/packages'
659 _pkg_db = '/var/db/pkg'
660 _macports_db = '/opt/local/var/macports/registry/registry.db'
662 if os.path.isdir(_pkg_db):
663 if sys.platform.startswith("linux"):
664 _host_distribution = GentooDistribution(_pkg_db)
665 elif sys.platform.startswith("freebsd"):
666 _host_distribution = PortsDistribution(_pkg_db)
667 elif os.path.isfile(_macports_db):
668 _host_distribution = MacPortsDistribution(_macports_db)
669 elif os.access(dpkg_db_status, os.R_OK):
670 _host_distribution = DebianDistribution(dpkg_db_status, pkgcache)
671 elif os.path.isfile(_rpm_db):
672 _host_distribution = RPMDistribution(_rpm_db)
673 elif os.path.isdir(_slack_db):
674 _host_distribution = SlackDistribution(_slack_db)
675 else:
676 _host_distribution = Distribution()
678 return _host_distribution