Prefer a generic <package-implementation> over one which doesn't match
[zeroinstall/solver.git] / zeroinstall / injector / distro.py
blob00fa946626b1001d5b55dc3ce3701ba4ef6103cd
1 """
2 Integration with native distribution package managers.
3 @since: 0.28
4 """
6 # Copyright (C) 2009, Thomas Leonard
7 # See the README file for details, or visit http://0install.net.
9 from zeroinstall import _
10 import os, platform, re, subprocess, sys
11 from logging import warn, info
12 from zeroinstall.injector import namespaces, model, arch
13 from zeroinstall.support import basedir, portable_rename
15 _dotted_ints = '[0-9]+(?:\.[0-9]+)*'
17 # This matches a version number that would be a valid Zero Install version without modification
18 _zeroinstall_regexp = '(?:%s)(?:-(?:pre|rc|post|)(?:%s))*' % (_dotted_ints, _dotted_ints)
20 # This matches the interesting bits of distribution version numbers
21 # (first matching group is for Java-style 6b17 syntax, or "major")
22 _version_regexp = '(?:[a-z])?({ints}b)?({zero})(-r{ints})?'.format(zero = _zeroinstall_regexp, ints = _dotted_ints)
24 # We try to do updates atomically without locking, but we don't worry too much about
25 # duplicate entries or being a little out of sync with the on-disk copy.
26 class Cache(object):
27 def __init__(self, cache_leaf, source, format):
28 """Maintain a cache file (e.g. ~/.cache/0install.net/injector/$name).
29 If the size or mtime of $source has changed, or the cache
30 format version if different, reset the cache first."""
31 self.cache_leaf = cache_leaf
32 self.source = source
33 self.format = format
34 self.cache_dir = basedir.save_cache_path(namespaces.config_site,
35 namespaces.config_prog)
36 self.cached_for = {} # Attributes of source when cache was created
37 try:
38 self._load_cache()
39 except Exception as ex:
40 info(_("Failed to load cache (%s). Flushing..."), ex)
41 self.flush()
43 def flush(self):
44 # Wipe the cache
45 try:
46 info = os.stat(self.source)
47 mtime = int(info.st_mtime)
48 size = info.st_size
49 except Exception as ex:
50 warn("Failed to stat %s: %s", self.source, ex)
51 mtime = size = 0
52 self.cache = {}
53 import tempfile
54 tmp = tempfile.NamedTemporaryFile(mode = 'wt', dir = self.cache_dir, delete = False)
55 tmp.write("mtime=%d\nsize=%d\nformat=%d\n\n" % (mtime, size, self.format))
56 tmp.close()
57 portable_rename(tmp.name, os.path.join(self.cache_dir, self.cache_leaf))
59 self._load_cache()
61 # Populate self.cache from our saved cache file.
62 # Throws an exception if the cache doesn't exist or has the wrong format.
63 def _load_cache(self):
64 self.cache = cache = {}
65 with open(os.path.join(self.cache_dir, self.cache_leaf)) as stream:
66 for line in stream:
67 line = line.strip()
68 if not line:
69 break
70 key, value = line.split('=', 1)
71 if key in ('mtime', 'size', 'format'):
72 self.cached_for[key] = int(value)
74 self._check_valid()
76 for line in stream:
77 key, value = line.split('=', 1)
78 cache[key] = value[:-1]
80 # Check the source file hasn't changed since we created the cache
81 def _check_valid(self):
82 info = os.stat(self.source)
83 if self.cached_for['mtime'] != int(info.st_mtime):
84 raise Exception("Modification time of %s has changed" % self.source)
85 if self.cached_for['size'] != info.st_size:
86 raise Exception("Size of %s has changed" % self.source)
87 if self.cached_for.get('format', None) != self.format:
88 raise Exception("Format of cache has changed")
90 def get(self, key):
91 try:
92 self._check_valid()
93 except Exception as ex:
94 info(_("Cache needs to be refreshed: %s"), ex)
95 self.flush()
96 return None
97 else:
98 return self.cache.get(key, None)
100 def put(self, key, value):
101 cache_path = os.path.join(self.cache_dir, self.cache_leaf)
102 self.cache[key] = value
103 try:
104 with open(cache_path, 'a') as stream:
105 stream.write('%s=%s\n' % (key, value))
106 except Exception as ex:
107 warn("Failed to write to cache %s: %s=%s: %s", cache_path, key, value, ex)
109 def try_cleanup_distro_version(version):
110 """Try to turn a distribution version string into one readable by Zero Install.
111 We do this by stripping off anything we can't parse.
112 @return: the part we understood, or None if we couldn't parse anything
113 @rtype: str"""
114 if ':' in version:
115 version = version.split(':')[1] # Skip 'epoch'
116 version = version.replace('_', '-')
117 match = re.match(_version_regexp, version)
118 if match:
119 major, version, revision = match.groups()
120 if major is not None:
121 version = major[:-1] + '.' + version
122 if revision is None:
123 return version
124 else:
125 return '%s-%s' % (version, revision[2:])
126 return None
128 class Distribution(object):
129 """Represents a distribution with which we can integrate.
130 Sub-classes should specialise this to integrate with the package managers of
131 particular distributions. This base class ignores the native package manager.
132 @since: 0.28
134 _packagekit = None
136 def get_package_info(self, package, factory):
137 """Get information about the given package.
138 Add zero or more implementations using the factory (typically at most two
139 will be added; the currently installed version and the latest available).
140 @param package: package name (e.g. "gimp")
141 @type package: str
142 @param factory: function for creating new DistributionImplementation objects from IDs
143 @type factory: str -> L{model.DistributionImplementation}
145 return
147 def get_score(self, distribution):
148 """Indicate how closely the host distribution matches this one.
149 The <package-implementation> with the highest score is passed
150 to L{Distribution.get_package_info}. If several elements get
151 the same score, get_package_info is called for all of them.
152 @param distribution: a distribution name
153 @type distribution: str
154 @return: an integer, or -1 if there is no match at all
155 @rtype: int
157 return 0
159 def get_feed(self, master_feed):
160 """Generate a feed containing information about distribution packages.
161 This should immediately return a feed containing an implementation for the
162 package if it's already installed. Information about versions that could be
163 installed using the distribution's package manager can be added asynchronously
164 later (see L{fetch_candidates}).
165 @param master_feed: feed containing the <package-implementation> elements
166 @type master_feed: L{model.ZeroInstallFeed}
167 @rtype: L{model.ZeroInstallFeed}"""
169 feed = model.ZeroInstallFeed(None)
170 feed.url = 'distribution:' + master_feed.url
172 for item, item_attrs in master_feed.get_package_impls(self):
173 package = item_attrs.get('package', None)
174 if package is None:
175 raise model.InvalidInterface(_("Missing 'package' attribute on %s") % item)
177 def factory(id, only_if_missing = False, installed = True):
178 assert id.startswith('package:')
179 if id in feed.implementations:
180 if only_if_missing:
181 return None
182 warn(_("Duplicate ID '%s' for DistributionImplementation"), id)
183 impl = model.DistributionImplementation(feed, id, self, item)
184 feed.implementations[id] = impl
186 impl.installed = installed
187 impl.metadata = item_attrs
189 if 'run' not in impl.commands:
190 item_main = item_attrs.get('main', None)
191 if item_main:
192 if item_main.startswith('/'):
193 impl.main = item_main
194 else:
195 raise model.InvalidInterface(_("'main' attribute must be absolute, but '%s' doesn't start with '/'!") %
196 item_main)
197 impl.upstream_stability = model.packaged
199 return impl
201 self.get_package_info(package, factory)
203 if master_feed.url == 'http://repo.roscidus.com/python/python' and all(not impl.installed for impl in feed.implementations.values()):
204 # Hack: we can support Python on platforms with unsupported package managers
205 # by adding the implementation of Python running us now to the list.
206 python_version = '.'.join([str(v) for v in sys.version_info if isinstance(v, int)])
207 impl_id = 'package:host:python:' + python_version
208 assert impl_id not in feed.implementations
209 impl = model.DistributionImplementation(feed, impl_id, self)
210 impl.installed = True
211 impl.version = model.parse_version(python_version)
212 impl.main = sys.executable
213 impl.upstream_stability = model.packaged
214 impl.machine = host_machine # (hopefully)
215 feed.implementations[impl_id] = impl
217 return feed
219 def fetch_candidates(self, master_feed):
220 """Collect information about versions we could install using
221 the distribution's package manager. On success, the distribution
222 feed in iface_cache is updated.
223 @return: a L{tasks.Blocker} if the task is in progress, or None if not"""
224 if self.packagekit.available:
225 package_names = [item.getAttribute("package") for item, item_attrs in master_feed.get_package_impls(self)]
226 return self.packagekit.fetch_candidates(package_names)
228 @property
229 def packagekit(self):
230 """For use by subclasses.
231 @rtype: L{packagekit.PackageKit}"""
232 if not self._packagekit:
233 from zeroinstall.injector import packagekit
234 self._packagekit = packagekit.PackageKit()
235 return self._packagekit
237 class WindowsDistribution(Distribution):
238 def get_package_info(self, package, factory):
239 def _is_64bit_windows():
240 p = sys.platform
241 from win32process import IsWow64Process
242 if p == 'win64' or (p == 'win32' and IsWow64Process()): return True
243 elif p == 'win32': return False
244 else: raise Exception(_("WindowsDistribution may only be used on the Windows platform"))
246 def _read_hklm_reg(key_name, value_name):
247 from win32api import RegOpenKeyEx, RegQueryValueEx, RegCloseKey
248 from win32con import HKEY_LOCAL_MACHINE, KEY_READ
249 KEY_WOW64_64KEY = 0x0100
250 KEY_WOW64_32KEY = 0x0200
251 if _is_64bit_windows():
252 try:
253 key32 = RegOpenKeyEx(HKEY_LOCAL_MACHINE, key_name, 0, KEY_READ | KEY_WOW64_32KEY)
254 (value32, _) = RegQueryValueEx(key32, value_name)
255 RegCloseKey(key32)
256 except:
257 value32 = ''
258 try:
259 key64 = RegOpenKeyEx(HKEY_LOCAL_MACHINE, key_name, 0, KEY_READ | KEY_WOW64_64KEY)
260 (value64, _) = RegQueryValueEx(key64, value_name)
261 RegCloseKey(key64)
262 except:
263 value64 = ''
264 else:
265 try:
266 key32 = RegOpenKeyEx(HKEY_LOCAL_MACHINE, key_name, 0, KEY_READ)
267 (value32, _) = RegQueryValueEx(key32, value_name)
268 RegCloseKey(key32)
269 except:
270 value32 = ''
271 value64 = ''
272 return (value32, value64)
274 def find_java(part, win_version, zero_version):
275 reg_path = r"SOFTWARE\JavaSoft\{part}\{win_version}".format(part = part, win_version = win_version)
276 (java32_home, java64_home) = _read_hklm_reg(reg_path, "JavaHome")
278 for (home, arch) in [(java32_home, 'i486'),
279 (java64_home, 'x86_64')]:
280 if os.path.isfile(home + r"\bin\java.exe"):
281 impl = factory('package:windows:%s:%s:%s' % (package, zero_version, arch))
282 impl.machine = arch
283 impl.version = model.parse_version(zero_version)
284 impl.upstream_stability = model.packaged
285 impl.main = home + r"\bin\java.exe"
287 if package == 'openjdk-6-jre':
288 find_java("Java Runtime Environment", "1.6", '6')
289 elif package == 'openjdk-6-jdk':
290 find_java("Java Development Kit", "1.6", '6')
291 elif package == 'openjdk-7-jre':
292 find_java("Java Runtime Environment", "1.7", '7')
293 elif package == 'openjdk-7-jdk':
294 find_java("Java Development Kit", "1.7", '7')
296 def get_score(self, disto_name):
297 return int(disto_name == 'Windows')
299 class CachedDistribution(Distribution):
300 """For distributions where querying the package database is slow (e.g. requires running
301 an external command), we cache the results.
302 @since: 0.39
303 @deprecated: use Cache instead
306 def __init__(self, db_status_file):
307 """@param db_status_file: update the cache when the timestamp of this file changes"""
308 self._status_details = os.stat(db_status_file)
310 self.versions = {}
311 self.cache_dir = basedir.save_cache_path(namespaces.config_site,
312 namespaces.config_prog)
314 try:
315 self._load_cache()
316 except Exception as ex:
317 info(_("Failed to load distribution database cache (%s). Regenerating..."), ex)
318 try:
319 self.generate_cache()
320 self._load_cache()
321 except Exception as ex:
322 warn(_("Failed to regenerate distribution database cache: %s"), ex)
324 def _load_cache(self):
325 """Load {cache_leaf} cache file into self.versions if it is available and up-to-date.
326 Throws an exception if the cache should be (re)created."""
327 with open(os.path.join(self.cache_dir, self.cache_leaf), 'rt') as stream:
328 cache_version = None
329 for line in stream:
330 if line == '\n':
331 break
332 name, value = line.split(': ')
333 if name == 'mtime' and int(value) != int(self._status_details.st_mtime):
334 raise Exception(_("Modification time of package database file has changed"))
335 if name == 'size' and int(value) != self._status_details.st_size:
336 raise Exception(_("Size of package database file has changed"))
337 if name == 'version':
338 cache_version = int(value)
339 else:
340 raise Exception(_('Invalid cache format (bad header)'))
342 if cache_version is None:
343 raise Exception(_('Old cache format'))
345 versions = self.versions
346 for line in stream:
347 package, version, zi_arch = line[:-1].split('\t')
348 versionarch = (version, model.intern(zi_arch))
349 if package not in versions:
350 versions[package] = [versionarch]
351 else:
352 versions[package].append(versionarch)
354 def _write_cache(self, cache):
355 #cache.sort() # Might be useful later; currently we don't care
356 import tempfile
357 fd, tmpname = tempfile.mkstemp(prefix = 'zeroinstall-cache-tmp',
358 dir = self.cache_dir)
359 try:
360 stream = os.fdopen(fd, 'wt')
361 stream.write('version: 2\n')
362 stream.write('mtime: %d\n' % int(self._status_details.st_mtime))
363 stream.write('size: %d\n' % self._status_details.st_size)
364 stream.write('\n')
365 for line in cache:
366 stream.write(line + '\n')
367 stream.close()
369 portable_rename(tmpname,
370 os.path.join(self.cache_dir,
371 self.cache_leaf))
372 except:
373 os.unlink(tmpname)
374 raise
376 # Maps machine type names used in packages to their Zero Install versions
377 _canonical_machine = {
378 'all' : '*',
379 'any' : '*',
380 'noarch' : '*',
381 '(none)' : '*',
382 'x86_64': 'x86_64',
383 'amd64': 'x86_64',
384 'i386': 'i386',
385 'i486': 'i486',
386 'i586': 'i586',
387 'i686': 'i686',
388 'ppc64': 'ppc64',
389 'ppc': 'ppc',
392 host_machine = arch.canonicalize_machine(platform.uname()[4])
393 def canonical_machine(package_machine):
394 machine = _canonical_machine.get(package_machine, None)
395 if machine is None:
396 # Safe default if we can't understand the arch
397 return host_machine
398 return machine
400 class DebianDistribution(Distribution):
401 """A dpkg-based distribution."""
403 cache_leaf = 'dpkg-status.cache'
405 def __init__(self, dpkg_status):
406 self.dpkg_cache = Cache('dpkg-status.cache', dpkg_status, 2)
407 self.apt_cache = {}
409 def _query_installed_package(self, package):
410 null = os.open(os.devnull, os.O_WRONLY)
411 child = subprocess.Popen(["dpkg-query", "-W", "--showformat=${Version}\t${Architecture}\t${Status}\n", "--", package],
412 stdout = subprocess.PIPE, stderr = null,
413 universal_newlines = True) # Needed for Python 3
414 os.close(null)
415 stdout, stderr = child.communicate()
416 child.wait()
417 for line in stdout.split('\n'):
418 if not line: continue
419 version, debarch, status = line.split('\t', 2)
420 if not status.endswith(' installed'): continue
421 clean_version = try_cleanup_distro_version(version)
422 if debarch.find("-") != -1:
423 debarch = debarch.split("-")[-1]
424 if clean_version:
425 return '%s\t%s' % (clean_version, canonical_machine(debarch.strip()))
426 else:
427 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': package})
429 return '-'
431 def get_package_info(self, package, factory):
432 # Add any already-installed package...
433 installed_cached_info = self._get_dpkg_info(package)
435 if installed_cached_info != '-':
436 installed_version, machine = installed_cached_info.split('\t')
437 impl = factory('package:deb:%s:%s:%s' % (package, installed_version, machine))
438 impl.version = model.parse_version(installed_version)
439 if machine != '*':
440 impl.machine = machine
441 else:
442 installed_version = None
444 # Add any uninstalled candidates (note: only one of these two methods will add anything)
446 # From PackageKit...
447 self.packagekit.get_candidates(package, factory, 'package:deb')
449 # From apt-cache...
450 cached = self.apt_cache.get(package, None)
451 if cached:
452 candidate_version = cached['version']
453 candidate_arch = cached['arch']
454 if candidate_version and candidate_version != installed_version:
455 impl = factory('package:deb:%s:%s:%s' % (package, candidate_version, candidate_arch), installed = False)
456 impl.version = model.parse_version(candidate_version)
457 if candidate_arch != '*':
458 impl.machine = candidate_arch
459 def install(handler):
460 raise model.SafeException(_("This program depends on '%s', which is a package that is available through your distribution. "
461 "Please install it manually using your distribution's tools and try again. Or, install 'packagekit' and I can "
462 "use that to install it.") % package)
463 impl.download_sources.append(model.DistributionSource(package, cached['size'], install, needs_confirmation = False))
465 def get_score(self, disto_name):
466 return int(disto_name == 'Debian')
468 def _get_dpkg_info(self, package):
469 installed_cached_info = self.dpkg_cache.get(package)
470 if installed_cached_info == None:
471 installed_cached_info = self._query_installed_package(package)
472 self.dpkg_cache.put(package, installed_cached_info)
474 return installed_cached_info
476 def fetch_candidates(self, master_feed):
477 package_names = [item.getAttribute("package") for item, item_attrs in master_feed.get_package_impls(self)]
479 if self.packagekit.available:
480 return self.packagekit.fetch_candidates(package_names)
482 # No PackageKit. Use apt-cache directly.
483 for package in package_names:
484 # Check to see whether we could get a newer version using apt-get
485 try:
486 null = os.open(os.devnull, os.O_WRONLY)
487 child = subprocess.Popen(['apt-cache', 'show', '--no-all-versions', '--', package], stdout = subprocess.PIPE, stderr = null, universal_newlines = True)
488 os.close(null)
490 arch = version = size = None
491 for line in child.stdout:
492 line = line.strip()
493 if line.startswith('Version: '):
494 version = line[9:]
495 version = try_cleanup_distro_version(version)
496 elif line.startswith('Architecture: '):
497 arch = canonical_machine(line[14:].strip())
498 elif line.startswith('Size: '):
499 size = int(line[6:].strip())
500 if version and arch:
501 cached = {'version': version, 'arch': arch, 'size': size}
502 else:
503 cached = None
504 child.stdout.close()
505 child.wait()
506 except Exception as ex:
507 warn("'apt-cache show %s' failed: %s", package, ex)
508 cached = None
509 # (multi-arch support? can there be multiple candidates?)
510 self.apt_cache[package] = cached
512 class RPMDistribution(CachedDistribution):
513 """An RPM-based distribution."""
515 cache_leaf = 'rpm-status.cache'
517 def generate_cache(self):
518 cache = []
520 child = subprocess.Popen(["rpm", "-qa", "--qf=%{NAME}\t%{VERSION}-%{RELEASE}\t%{ARCH}\n"],
521 stdout = subprocess.PIPE, universal_newlines = True)
522 for line in child.stdout:
523 package, version, rpmarch = line.split('\t', 2)
524 if package == 'gpg-pubkey':
525 continue
526 zi_arch = canonical_machine(rpmarch.strip())
527 clean_version = try_cleanup_distro_version(version)
528 if clean_version:
529 cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
530 else:
531 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': package})
533 self._write_cache(cache)
534 child.stdout.close()
535 child.wait()
537 def get_package_info(self, package, factory):
538 # Add installed versions...
539 versions = self.versions.get(package, [])
541 for version, machine in versions:
542 impl = factory('package:rpm:%s:%s:%s' % (package, version, machine))
543 impl.version = model.parse_version(version)
544 if machine != '*':
545 impl.machine = machine
547 # Add any uninstalled candidates found by PackageKit
548 self.packagekit.get_candidates(package, factory, 'package:rpm')
550 def get_score(self, disto_name):
551 return int(disto_name == 'RPM')
553 class SlackDistribution(Distribution):
554 """A Slack-based distribution."""
556 def __init__(self, packages_dir):
557 self._packages_dir = packages_dir
559 def get_package_info(self, package, factory):
560 # Add installed versions...
561 for entry in os.listdir(self._packages_dir):
562 name, version, arch, build = entry.rsplit('-', 3)
563 if name == package:
564 zi_arch = canonical_machine(arch)
565 clean_version = try_cleanup_distro_version("%s-%s" % (version, build))
566 if not clean_version:
567 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': name})
568 continue
570 impl = factory('package:slack:%s:%s:%s' % \
571 (package, clean_version, zi_arch))
572 impl.version = model.parse_version(clean_version)
573 if zi_arch != '*':
574 impl.machine = zi_arch
576 # Add any uninstalled candidates found by PackageKit
577 self.packagekit.get_candidates(package, factory, 'package:slack')
579 def get_score(self, disto_name):
580 return int(disto_name == 'Slack')
582 class ArchDistribution(Distribution):
583 """An Arch Linux distribution."""
585 def __init__(self, packages_dir):
586 self._packages_dir = os.path.join(packages_dir, "local")
588 def get_package_info(self, package, factory):
589 # Add installed versions...
590 for entry in os.listdir(self._packages_dir):
591 name, version, build = entry.rsplit('-', 2)
592 if name == package:
593 gotarch = False
594 with open(os.path.join(self._packages_dir, entry, "desc"), 'rt') as stream:
595 for line in stream:
596 if line == "%ARCH%\n":
597 gotarch = True
598 continue
599 if gotarch:
600 arch = line.strip()
601 break
602 zi_arch = canonical_machine(arch)
603 clean_version = try_cleanup_distro_version("%s-%s" % (version, build))
604 if not clean_version:
605 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': name})
606 continue
608 impl = factory('package:arch:%s:%s:%s' % \
609 (package, clean_version, zi_arch))
610 impl.version = model.parse_version(clean_version)
611 if zi_arch != '*':
612 impl.machine = zi_arch
614 # Add any uninstalled candidates found by PackageKit
615 self.packagekit.get_candidates(package, factory, 'package:arch')
617 def get_score(self, disto_name):
618 return int(disto_name == 'Arch')
620 class GentooDistribution(Distribution):
622 def __init__(self, pkgdir):
623 self._pkgdir = pkgdir
625 def get_package_info(self, package, factory):
626 # Add installed versions...
627 _version_start_reqexp = '-[0-9]'
629 if package.count('/') != 1: return
631 category, leafname = package.split('/')
632 category_dir = os.path.join(self._pkgdir, category)
633 match_prefix = leafname + '-'
635 if not os.path.isdir(category_dir): return
637 for filename in os.listdir(category_dir):
638 if filename.startswith(match_prefix) and filename[len(match_prefix)].isdigit():
639 with open(os.path.join(category_dir, filename, 'PF'), 'rt') as stream:
640 name = stream.readline().strip()
642 match = re.search(_version_start_reqexp, name)
643 if match is None:
644 warn(_('Cannot parse version from Gentoo package named "%(name)s"'), {'name': name})
645 continue
646 else:
647 version = try_cleanup_distro_version(name[match.start() + 1:])
649 if category == 'app-emulation' and name.startswith('emul-'):
650 __, __, machine, __ = name.split('-', 3)
651 else:
652 with open(os.path.join(category_dir, filename, 'CHOST'), 'rt') as stream:
653 machine, __ = stream.readline().split('-', 1)
654 machine = arch.canonicalize_machine(machine)
656 impl = factory('package:gentoo:%s:%s:%s' % \
657 (package, version, machine))
658 impl.version = model.parse_version(version)
659 impl.machine = machine
661 # Add any uninstalled candidates found by PackageKit
662 self.packagekit.get_candidates(package, factory, 'package:gentoo')
664 def get_score(self, disto_name):
665 return int(disto_name == 'Gentoo')
667 class PortsDistribution(Distribution):
669 def __init__(self, pkgdir):
670 self._pkgdir = pkgdir
672 def get_package_info(self, package, factory):
673 _name_version_regexp = '^(.+)-([^-]+)$'
675 nameversion = re.compile(_name_version_regexp)
676 for pkgname in os.listdir(self._pkgdir):
677 pkgdir = os.path.join(self._pkgdir, pkgname)
678 if not os.path.isdir(pkgdir): continue
680 #contents = open(os.path.join(pkgdir, '+CONTENTS')).readline().strip()
682 match = nameversion.search(pkgname)
683 if match is None:
684 warn(_('Cannot parse version from Ports package named "%(pkgname)s"'), {'pkgname': pkgname})
685 continue
686 else:
687 name = match.group(1)
688 if name != package:
689 continue
690 version = try_cleanup_distro_version(match.group(2))
692 machine = host_machine
694 impl = factory('package:ports:%s:%s:%s' % \
695 (package, version, machine))
696 impl.version = model.parse_version(version)
697 impl.machine = machine
699 def get_score(self, disto_name):
700 return int(disto_name == 'Ports')
702 class MacPortsDistribution(CachedDistribution):
704 cache_leaf = 'macports-status.cache'
706 def generate_cache(self):
707 cache = []
709 child = subprocess.Popen(["port", "-v", "installed"],
710 stdout = subprocess.PIPE, universal_newlines = True)
711 for line in child.stdout:
712 if not line.startswith(" "):
713 continue
714 if line.strip().count(" ") > 1:
715 package, version, extra = line.split(None, 2)
716 else:
717 package, version = line.split()
718 extra = ""
719 if not extra.startswith("(active)"):
720 continue
721 version = version.lstrip('@')
722 version = re.sub(r"\+.*", "", version) # strip variants
723 zi_arch = '*'
724 clean_version = try_cleanup_distro_version(version)
725 if clean_version:
726 match = re.match(r" platform='([^' ]*)( \d+)?' archs='([^']*)'", extra)
727 if match:
728 platform, major, archs = match.groups()
729 for arch in archs.split():
730 zi_arch = canonical_machine(arch)
731 cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
732 else:
733 cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
734 else:
735 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': package})
736 self._write_cache(cache)
737 child.stdout.close()
738 child.wait()
740 def get_package_info(self, package, factory):
741 # Add installed versions...
742 versions = self.versions.get(package, [])
744 for version, machine in versions:
745 impl = factory('package:macports:%s:%s:%s' % (package, version, machine))
746 impl.version = model.parse_version(version)
747 if machine != '*':
748 impl.machine = machine
750 def get_score(self, disto_name):
751 return int(disto_name == 'MacPorts')
753 class CygwinDistribution(CachedDistribution):
754 """A Cygwin-based distribution."""
756 cache_leaf = 'cygcheck-status.cache'
758 def generate_cache(self):
759 cache = []
761 zi_arch = canonical_machine(arch)
762 for line in os.popen("cygcheck -c -d"):
763 if line == "Cygwin Package Information\r\n":
764 continue
765 if line == "\n":
766 continue
767 package, version = line.split()
768 if package == "Package" and version == "Version":
769 continue
770 clean_version = try_cleanup_distro_version(version)
771 if clean_version:
772 cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
773 else:
774 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': package})
776 self._write_cache(cache)
778 def get_package_info(self, package, factory):
779 # Add installed versions...
780 versions = self.versions.get(package, [])
782 for version, machine in versions:
783 impl = factory('package:cygwin:%s:%s:%s' % (package, version, machine))
784 impl.version = model.parse_version(version)
785 if machine != '*':
786 impl.machine = machine
788 def get_score(self, disto_name):
789 return int(disto_name == 'Cygwin')
792 _host_distribution = None
793 def get_host_distribution():
794 """Get a Distribution suitable for the host operating system.
795 Calling this twice will return the same object.
796 @rtype: L{Distribution}"""
797 global _host_distribution
798 if not _host_distribution:
799 dpkg_db_status = '/var/lib/dpkg/status'
800 rpm_db_packages = '/var/lib/rpm/Packages'
801 _slack_db = '/var/log/packages'
802 _arch_db = '/var/lib/pacman'
803 _pkg_db = '/var/db/pkg'
804 _macports_db = '/opt/local/var/macports/registry/registry.db'
805 _cygwin_log = '/var/log/setup.log'
807 if sys.prefix == "/sw":
808 dpkg_db_status = os.path.join(sys.prefix, dpkg_db_status)
809 rpm_db_packages = os.path.join(sys.prefix, rpm_db_packages)
811 if os.name == "nt":
812 _host_distribution = WindowsDistribution()
813 elif os.path.isdir(_pkg_db):
814 if sys.platform.startswith("linux"):
815 _host_distribution = GentooDistribution(_pkg_db)
816 elif sys.platform.startswith("freebsd"):
817 _host_distribution = PortsDistribution(_pkg_db)
818 elif os.path.isfile(_macports_db) \
819 and sys.prefix.startswith("/opt/local"):
820 _host_distribution = MacPortsDistribution(_macports_db)
821 elif os.path.isfile(_cygwin_log) and sys.platform == "cygwin":
822 _host_distribution = CygwinDistribution(_cygwin_log)
823 elif os.access(dpkg_db_status, os.R_OK) \
824 and os.path.getsize(dpkg_db_status) > 0:
825 _host_distribution = DebianDistribution(dpkg_db_status)
826 elif os.path.isfile(rpm_db_packages):
827 _host_distribution = RPMDistribution(rpm_db_packages)
828 elif os.path.isdir(_slack_db):
829 _host_distribution = SlackDistribution(_slack_db)
830 elif os.path.isdir(_arch_db):
831 _host_distribution = ArchDistribution(_arch_db)
832 else:
833 _host_distribution = Distribution()
835 return _host_distribution