skip unindented, rather than match header
[zeroinstall/solver.git] / zeroinstall / injector / distro.py
blob8a5e07739186ddbb9f24dab35f5785a1b72313b5
1 """
2 Integration with native distribution package managers.
3 @since: 0.28
4 """
6 # Copyright (C) 2009, Thomas Leonard
7 # See the README file for details, or visit http://0install.net.
9 from zeroinstall import _
10 import os, platform, re, subprocess, sys
11 from logging import warn, info
12 from zeroinstall.injector import namespaces, model, arch
13 from zeroinstall.support import basedir
15 _dotted_ints = '[0-9]+(?:\.[0-9]+)*'
17 # This matches a version number that would be a valid Zero Install version without modification
18 _zeroinstall_regexp = '(?:%s)(?:-(?:pre|rc|post|)(?:%s))*' % (_dotted_ints, _dotted_ints)
20 # This matches the interesting bits of distribution version numbers
21 # (first bit is for Java-style 6b17 syntax)
22 _version_regexp = '({ints}b)?({zero})(-r{ints})?'.format(zero = _zeroinstall_regexp, ints = _dotted_ints)
24 # We try to do updates atomically without locking, but we don't worry too much about
25 # duplicate entries or being a little out of sync with the on-disk copy.
26 class Cache(object):
27 def __init__(self, cache_leaf, source, format):
28 """Maintain a cache file (e.g. ~/.cache/0install.net/injector/$name).
29 If the size or mtime of $source has changed, or the cache
30 format version if different, reset the cache first."""
31 self.cache_leaf = cache_leaf
32 self.source = source
33 self.format = format
34 self.cache_dir = basedir.save_cache_path(namespaces.config_site,
35 namespaces.config_prog)
36 self.cached_for = {} # Attributes of source when cache was created
37 try:
38 self._load_cache()
39 except Exception as ex:
40 info(_("Failed to load cache (%s). Flushing..."), ex)
41 self.flush()
43 def flush(self):
44 # Wipe the cache
45 try:
46 info = os.stat(self.source)
47 mtime = int(info.st_mtime)
48 size = info.st_size
49 except Exception as ex:
50 warn("Failed to stat %s: %s", self.source, ex)
51 mtime = size = 0
52 self.cache = {}
53 import tempfile
54 tmp, tmp_name = tempfile.mkstemp(dir = self.cache_dir)
55 data = "mtime=%d\nsize=%d\nformat=%d\n\n" % (mtime, size, self.format)
56 while data:
57 wrote = os.write(tmp, data)
58 data = data[wrote:]
59 os.rename(tmp_name, os.path.join(self.cache_dir, self.cache_leaf))
61 self._load_cache()
63 # Populate self.cache from our saved cache file.
64 # Throws an exception if the cache doesn't exist or has the wrong format.
65 def _load_cache(self):
66 self.cache = cache = {}
67 stream = file(os.path.join(self.cache_dir, self.cache_leaf))
68 try:
69 for line in stream:
70 line = line.strip()
71 if not line:
72 break
73 key, value = line.split('=', 1)
74 if key in ('mtime', 'size', 'format'):
75 self.cached_for[key] = int(value)
77 self._check_valid()
79 for line in stream:
80 key, value = line.split('=', 1)
81 cache[key] = value[:-1]
82 finally:
83 stream.close()
85 # Check the source file hasn't changed since we created the cache
86 def _check_valid(self):
87 info = os.stat(self.source)
88 if self.cached_for['mtime'] != int(info.st_mtime):
89 raise Exception("Modification time of %s has changed" % self.source)
90 if self.cached_for['size'] != info.st_size:
91 raise Exception("Size of %s has changed" % self.source)
92 if self.cached_for.get('format', None) != self.format:
93 raise Exception("Format of cache has changed")
95 def get(self, key):
96 try:
97 self._check_valid()
98 except Exception as ex:
99 info(_("Cache needs to be refreshed: %s"), ex)
100 self.flush()
101 return None
102 else:
103 return self.cache.get(key, None)
105 def put(self, key, value):
106 cache_path = os.path.join(self.cache_dir, self.cache_leaf)
107 self.cache[key] = value
108 try:
109 stream = file(cache_path, 'a')
110 try:
111 stream.write('%s=%s\n' % (key, value))
112 finally:
113 stream.close()
114 except Exception as ex:
115 warn("Failed to write to cache %s: %s=%s: %s", cache_path, key, value, ex)
117 def try_cleanup_distro_version(version):
118 """Try to turn a distribution version string into one readable by Zero Install.
119 We do this by stripping off anything we can't parse.
120 @return: the part we understood, or None if we couldn't parse anything
121 @rtype: str"""
122 if ':' in version:
123 version = version.split(':')[1] # Skip 'epoch'
124 version = version.replace('_', '-')
125 match = re.match(_version_regexp, version)
126 if match:
127 major, version, revision = match.groups()
128 if major is not None:
129 version = major[:-1] + '.' + version
130 if revision is None:
131 return version
132 else:
133 return '%s-%s' % (version, revision[2:])
134 return None
136 class Distribution(object):
137 """Represents a distribution with which we can integrate.
138 Sub-classes should specialise this to integrate with the package managers of
139 particular distributions. This base class ignores the native package manager.
140 @since: 0.28
142 _packagekit = None
144 def get_package_info(self, package, factory):
145 """Get information about the given package.
146 Add zero or more implementations using the factory (typically at most two
147 will be added; the currently installed version and the latest available).
148 @param package: package name (e.g. "gimp")
149 @type package: str
150 @param factory: function for creating new DistributionImplementation objects from IDs
151 @type factory: str -> L{model.DistributionImplementation}
153 return
155 def get_score(self, distribution):
156 """Indicate how closely the host distribution matches this one.
157 The <package-implementation> with the highest score is passed
158 to L{Distribution.get_package_info}. If several elements get
159 the same score, get_package_info is called for all of them.
160 @param distribution: a distribution name
161 @type distribution: str
162 @return: an integer, or None if there is no match at all
163 @rtype: int | None
165 return 0
167 def get_feed(self, master_feed):
168 """Generate a feed containing information about distribution packages.
169 This should immediately return a feed containing an implementation for the
170 package if it's already installed. Information about versions that could be
171 installed using the distribution's package manager can be added asynchronously
172 later (see L{fetch_candidates}).
173 @param master_feed: feed containing the <package-implementation> elements
174 @type master_feed: L{model.ZeroInstallFeed}
175 @rtype: L{model.ZeroInstallFeed}"""
177 feed = model.ZeroInstallFeed(None)
178 feed.url = 'distribution:' + master_feed.url
180 for item, item_attrs in master_feed.get_package_impls(self):
181 package = item_attrs.get('package', None)
182 if package is None:
183 raise model.InvalidInterface(_("Missing 'package' attribute on %s") % item)
185 def factory(id, only_if_missing = False, installed = True):
186 assert id.startswith('package:')
187 if id in feed.implementations:
188 if only_if_missing:
189 return None
190 warn(_("Duplicate ID '%s' for DistributionImplementation"), id)
191 impl = model.DistributionImplementation(feed, id, self)
192 feed.implementations[id] = impl
194 impl.installed = installed
195 impl.metadata = item_attrs
197 item_main = item_attrs.get('main', None)
198 if item_main and not item_main.startswith('/'):
199 raise model.InvalidInterface(_("'main' attribute must be absolute, but '%s' doesn't start with '/'!") %
200 item_main)
201 impl.main = item_main
202 impl.upstream_stability = model.packaged
204 return impl
206 self.get_package_info(package, factory)
208 if master_feed.url == 'http://repo.roscidus.com/python/python' and all(not impl.installed for impl in feed.implementations.values()):
209 # Hack: we can support Python on platforms with unsupported package managers
210 # by adding the implementation of Python running us now to the list.
211 python_version = '.'.join([str(v) for v in sys.version_info if isinstance(v, int)])
212 impl_id = 'package:host:python:' + python_version
213 assert impl_id not in feed.implementations
214 impl = model.DistributionImplementation(feed, impl_id, self)
215 impl.installed = True
216 impl.version = model.parse_version(python_version)
217 impl.main = sys.executable
218 impl.upstream_stability = model.packaged
219 impl.machine = host_machine # (hopefully)
220 feed.implementations[impl_id] = impl
222 return feed
224 def fetch_candidates(self, master_feed):
225 """Collect information about versions we could install using
226 the distribution's package manager. On success, the distribution
227 feed in iface_cache is updated.
228 @return: a L{tasks.Blocker} if the task is in progress, or None if not"""
229 if self.packagekit.available:
230 package_names = [item.getAttribute("package") for item, item_attrs in master_feed.get_package_impls(self)]
231 return self.packagekit.fetch_candidates(package_names)
233 @property
234 def packagekit(self):
235 """For use by subclasses.
236 @rtype: L{packagekit.PackageKit}"""
237 if not self._packagekit:
238 from zeroinstall.injector import packagekit
239 self._packagekit = packagekit.PackageKit()
240 return self._packagekit
242 class CachedDistribution(Distribution):
243 """For distributions where querying the package database is slow (e.g. requires running
244 an external command), we cache the results.
245 @since: 0.39
246 @deprecated: use Cache instead
249 def __init__(self, db_status_file):
250 """@param db_status_file: update the cache when the timestamp of this file changes"""
251 self._status_details = os.stat(db_status_file)
253 self.versions = {}
254 self.cache_dir = basedir.save_cache_path(namespaces.config_site,
255 namespaces.config_prog)
257 try:
258 self._load_cache()
259 except Exception as ex:
260 info(_("Failed to load distribution database cache (%s). Regenerating..."), ex)
261 try:
262 self.generate_cache()
263 self._load_cache()
264 except Exception as ex:
265 warn(_("Failed to regenerate distribution database cache: %s"), ex)
267 def _load_cache(self):
268 """Load {cache_leaf} cache file into self.versions if it is available and up-to-date.
269 Throws an exception if the cache should be (re)created."""
270 stream = file(os.path.join(self.cache_dir, self.cache_leaf))
272 cache_version = None
273 for line in stream:
274 if line == '\n':
275 break
276 name, value = line.split(': ')
277 if name == 'mtime' and int(value) != int(self._status_details.st_mtime):
278 raise Exception(_("Modification time of package database file has changed"))
279 if name == 'size' and int(value) != self._status_details.st_size:
280 raise Exception(_("Size of package database file has changed"))
281 if name == 'version':
282 cache_version = int(value)
283 else:
284 raise Exception(_('Invalid cache format (bad header)'))
286 if cache_version is None:
287 raise Exception(_('Old cache format'))
289 versions = self.versions
290 for line in stream:
291 package, version, zi_arch = line[:-1].split('\t')
292 versionarch = (version, intern(zi_arch))
293 if package not in versions:
294 versions[package] = [versionarch]
295 else:
296 versions[package].append(versionarch)
298 def _write_cache(self, cache):
299 #cache.sort() # Might be useful later; currently we don't care
300 import tempfile
301 fd, tmpname = tempfile.mkstemp(prefix = 'zeroinstall-cache-tmp',
302 dir = self.cache_dir)
303 try:
304 stream = os.fdopen(fd, 'wb')
305 stream.write('version: 2\n')
306 stream.write('mtime: %d\n' % int(self._status_details.st_mtime))
307 stream.write('size: %d\n' % self._status_details.st_size)
308 stream.write('\n')
309 for line in cache:
310 stream.write(line + '\n')
311 stream.close()
313 os.rename(tmpname,
314 os.path.join(self.cache_dir,
315 self.cache_leaf))
316 except:
317 os.unlink(tmpname)
318 raise
320 # Maps machine type names used in packages to their Zero Install versions
321 _canonical_machine = {
322 'all' : '*',
323 'any' : '*',
324 'noarch' : '*',
325 '(none)' : '*',
326 'x86_64': 'x86_64',
327 'amd64': 'x86_64',
328 'i386': 'i386',
329 'i486': 'i486',
330 'i586': 'i586',
331 'i686': 'i686',
332 'ppc64': 'ppc64',
333 'ppc': 'ppc',
336 host_machine = arch.canonicalize_machine(platform.uname()[4])
337 def canonical_machine(package_machine):
338 machine = _canonical_machine.get(package_machine, None)
339 if machine is None:
340 # Safe default if we can't understand the arch
341 return host_machine
342 return machine
344 class DebianDistribution(Distribution):
345 """A dpkg-based distribution."""
347 cache_leaf = 'dpkg-status.cache'
349 def __init__(self, dpkg_status, pkgcache):
350 self.dpkg_cache = Cache('dpkg-status.cache', dpkg_status, 2)
351 self.apt_cache = {}
353 def _query_installed_package(self, package):
354 null = os.open('/dev/null', os.O_WRONLY)
355 child = subprocess.Popen(["dpkg-query", "-W", "--showformat=${Version}\t${Architecture}\t${Status}\n", "--", package],
356 stdout = subprocess.PIPE, stderr = null)
357 os.close(null)
358 stdout, stderr = child.communicate()
359 child.wait()
360 for line in stdout.split('\n'):
361 if not line: continue
362 version, debarch, status = line.split('\t', 2)
363 if not status.endswith(' installed'): continue
364 clean_version = try_cleanup_distro_version(version)
365 if clean_version:
366 return '%s\t%s' % (clean_version, canonical_machine(debarch.strip()))
367 else:
368 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': package})
370 return '-'
372 def get_package_info(self, package, factory):
373 # Add any already-installed package...
374 installed_cached_info = self._get_dpkg_info(package)
376 if installed_cached_info != '-':
377 installed_version, machine = installed_cached_info.split('\t')
378 impl = factory('package:deb:%s:%s:%s' % (package, installed_version, machine))
379 impl.version = model.parse_version(installed_version)
380 if machine != '*':
381 impl.machine = machine
382 else:
383 installed_version = None
385 # Add any uninstalled candidates (note: only one of these two methods will add anything)
387 # From PackageKit...
388 self.packagekit.get_candidates(package, factory, 'package:deb')
390 # From apt-cache...
391 cached = self.apt_cache.get(package, None)
392 if cached:
393 candidate_version = cached['version']
394 candidate_arch = cached['arch']
395 if candidate_version and candidate_version != installed_version:
396 impl = factory('package:deb:%s:%s:%s' % (package, candidate_version, candidate_arch), installed = False)
397 impl.version = model.parse_version(candidate_version)
398 if candidate_arch != '*':
399 impl.machine = candidate_arch
400 def install(handler):
401 raise model.SafeException(_("This program depends on '%s', which is a package that is available through your distribution. "
402 "Please install it manually using your distribution's tools and try again.") % package)
403 impl.download_sources.append(model.DistributionSource(package, cached['size'], install, needs_confirmation = False))
405 def get_score(self, disto_name):
406 return int(disto_name == 'Debian')
408 def _get_dpkg_info(self, package):
409 installed_cached_info = self.dpkg_cache.get(package)
410 if installed_cached_info == None:
411 installed_cached_info = self._query_installed_package(package)
412 self.dpkg_cache.put(package, installed_cached_info)
414 return installed_cached_info
416 def fetch_candidates(self, master_feed):
417 package_names = [item.getAttribute("package") for item, item_attrs in master_feed.get_package_impls(self)]
419 if self.packagekit.available:
420 return self.packagekit.fetch_candidates(package_names)
422 # No PackageKit. Use apt-cache directly.
423 for package in package_names:
424 # Check to see whether we could get a newer version using apt-get
425 try:
426 null = os.open('/dev/null', os.O_WRONLY)
427 child = subprocess.Popen(['apt-cache', 'show', '--no-all-versions', '--', package], stdout = subprocess.PIPE, stderr = null)
428 os.close(null)
430 arch = version = size = None
431 for line in child.stdout:
432 line = line.strip()
433 if line.startswith('Version: '):
434 version = line[9:]
435 version = try_cleanup_distro_version(version)
436 elif line.startswith('Architecture: '):
437 arch = canonical_machine(line[14:].strip())
438 elif line.startswith('Size: '):
439 size = int(line[6:].strip())
440 if version and arch:
441 cached = {'version': version, 'arch': arch, 'size': size}
442 else:
443 cached = None
444 child.wait()
445 except Exception as ex:
446 warn("'apt-cache show %s' failed: %s", package, ex)
447 cached = None
448 # (multi-arch support? can there be multiple candidates?)
449 self.apt_cache[package] = cached
451 class RPMDistribution(CachedDistribution):
452 """An RPM-based distribution."""
454 cache_leaf = 'rpm-status.cache'
456 def generate_cache(self):
457 cache = []
459 for line in os.popen("rpm -qa --qf='%{NAME}\t%{VERSION}-%{RELEASE}\t%{ARCH}\n'"):
460 package, version, rpmarch = line.split('\t', 2)
461 if package == 'gpg-pubkey':
462 continue
463 zi_arch = canonical_machine(rpmarch.strip())
464 clean_version = try_cleanup_distro_version(version)
465 if clean_version:
466 cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
467 else:
468 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': package})
470 self._write_cache(cache)
472 def get_package_info(self, package, factory):
473 # Add installed versions...
474 versions = self.versions.get(package, [])
476 for version, machine in versions:
477 impl = factory('package:rpm:%s:%s:%s' % (package, version, machine))
478 impl.version = model.parse_version(version)
479 if machine != '*':
480 impl.machine = machine
482 # Add any uninstalled candidates found by PackageKit
483 self.packagekit.get_candidates(package, factory, 'package:rpm')
485 def get_score(self, disto_name):
486 return int(disto_name == 'RPM')
488 class SlackDistribution(Distribution):
489 """A Slack-based distribution."""
491 def __init__(self, packages_dir):
492 self._packages_dir = packages_dir
494 def get_package_info(self, package, factory):
495 # Add installed versions...
496 for entry in os.listdir(self._packages_dir):
497 name, version, arch, build = entry.rsplit('-', 3)
498 if name == package:
499 zi_arch = canonical_machine(arch)
500 clean_version = try_cleanup_distro_version("%s-%s" % (version, build))
501 if not clean_version:
502 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': name})
503 continue
505 impl = factory('package:slack:%s:%s:%s' % \
506 (package, clean_version, zi_arch))
507 impl.version = model.parse_version(clean_version)
508 if zi_arch != '*':
509 impl.machine = zi_arch
511 # Add any uninstalled candidates found by PackageKit
512 self.packagekit.get_candidates(package, factory, 'package:slack')
514 def get_score(self, disto_name):
515 return int(disto_name == 'Slack')
517 class GentooDistribution(Distribution):
519 def __init__(self, pkgdir):
520 self._pkgdir = pkgdir
522 def get_package_info(self, package, factory):
523 # Add installed versions...
524 _version_start_reqexp = '-[0-9]'
526 if package.count('/') != 1: return
528 category, leafname = package.split('/')
529 category_dir = os.path.join(self._pkgdir, category)
530 match_prefix = leafname + '-'
532 if not os.path.isdir(category_dir): return
534 for filename in os.listdir(category_dir):
535 if filename.startswith(match_prefix) and filename[len(match_prefix)].isdigit():
536 name = file(os.path.join(category_dir, filename, 'PF')).readline().strip()
538 match = re.search(_version_start_reqexp, name)
539 if match is None:
540 warn(_('Cannot parse version from Gentoo package named "%(name)s"'), {'name': name})
541 continue
542 else:
543 version = try_cleanup_distro_version(name[match.start() + 1:])
545 if category == 'app-emulation' and name.startswith('emul-'):
546 __, __, machine, __ = name.split('-', 3)
547 else:
548 machine, __ = file(os.path.join(category_dir, filename, 'CHOST')).readline().split('-', 1)
549 machine = arch.canonicalize_machine(machine)
551 impl = factory('package:gentoo:%s:%s:%s' % \
552 (package, version, machine))
553 impl.version = model.parse_version(version)
554 impl.machine = machine
556 # Add any uninstalled candidates found by PackageKit
557 self.packagekit.get_candidates(package, factory, 'package:gentoo')
559 def get_score(self, disto_name):
560 return int(disto_name == 'Gentoo')
562 class PortsDistribution(Distribution):
564 def __init__(self, pkgdir):
565 self._pkgdir = pkgdir
567 def get_package_info(self, package, factory):
568 _name_version_regexp = '^(.+)-([^-]+)$'
570 nameversion = re.compile(_name_version_regexp)
571 for pkgname in os.listdir(self._pkgdir):
572 pkgdir = os.path.join(self._pkgdir, pkgname)
573 if not os.path.isdir(pkgdir): continue
575 #contents = file(os.path.join(pkgdir, '+CONTENTS')).readline().strip()
577 match = nameversion.search(pkgname)
578 if match is None:
579 warn(_('Cannot parse version from Ports package named "%(pkgname)s"'), {'pkgname': pkgname})
580 continue
581 else:
582 name = match.group(1)
583 if name != package:
584 continue
585 version = try_cleanup_distro_version(match.group(2))
587 machine = host_machine
589 impl = factory('package:ports:%s:%s:%s' % \
590 (package, version, machine))
591 impl.version = model.parse_version(version)
592 impl.machine = machine
594 def get_score(self, disto_name):
595 return int(disto_name == 'Ports')
597 class MacPortsDistribution(CachedDistribution):
599 cache_leaf = 'macports-status.cache'
601 def generate_cache(self):
602 cache = []
604 # for line in os.popen("port echo active"):
605 for line in os.popen("port -v installed"):
606 if not line.startswith(" "):
607 continue
608 if line.strip().count(" ") > 1:
609 package, version, extra = line.split(None, 2)
610 else:
611 package, version = line.split()
612 extra = ""
613 if not extra.startswith("(active)"):
614 continue
615 version = version.lstrip('@')
616 version = re.sub(r"\+.*","",version) # strip variants
617 zi_arch = '*'
618 clean_version = try_cleanup_distro_version(version)
619 if clean_version:
620 match = re.match(r" platform='([^' ]*)( \d+)?' archs='([^']*)'", extra)
621 if match:
622 platform, major, archs = match.groups()
623 for arch in archs.split():
624 zi_arch = canonical_machine(arch)
625 cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
626 else:
627 cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
628 else:
629 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': package})
631 self._write_cache(cache)
633 def get_package_info(self, package, factory):
634 # Add installed versions...
635 versions = self.versions.get(package, [])
637 for version, machine in versions:
638 impl = factory('package:macports:%s:%s:%s' % (package, version, machine))
639 impl.version = model.parse_version(version)
640 if machine != '*':
641 impl.machine = machine
643 def get_score(self, disto_name):
644 return int(disto_name == 'MacPorts')
647 _host_distribution = None
648 def get_host_distribution():
649 """Get a Distribution suitable for the host operating system.
650 Calling this twice will return the same object.
651 @rtype: L{Distribution}"""
652 global _host_distribution
653 if not _host_distribution:
654 dpkg_db_status = '/var/lib/dpkg/status'
655 pkgcache = '/var/cache/apt/pkgcache.bin'
656 _rpm_db = '/var/lib/rpm/Packages'
657 _slack_db = '/var/log/packages'
658 _pkg_db = '/var/db/pkg'
659 _macports_db = '/opt/local/var/macports/registry/registry.db'
661 if os.path.isdir(_pkg_db):
662 if sys.platform.startswith("linux"):
663 _host_distribution = GentooDistribution(_pkg_db)
664 elif sys.platform.startswith("freebsd"):
665 _host_distribution = PortsDistribution(_pkg_db)
666 elif os.path.isfile(_macports_db):
667 _host_distribution = MacPortsDistribution(_macports_db)
668 elif os.access(dpkg_db_status, os.R_OK):
669 _host_distribution = DebianDistribution(dpkg_db_status, pkgcache)
670 elif os.path.isfile(_rpm_db):
671 _host_distribution = RPMDistribution(_rpm_db)
672 elif os.path.isdir(_slack_db):
673 _host_distribution = SlackDistribution(_slack_db)
674 else:
675 _host_distribution = Distribution()
677 return _host_distribution