/dev/null => os.devnull, for portability
[zeroinstall/solver.git] / zeroinstall / injector / distro.py
blob93a72fc309b43ad5bcf8066a07b5b02c94c14c4e
1 """
2 Integration with native distribution package managers.
3 @since: 0.28
4 """
6 # Copyright (C) 2009, Thomas Leonard
7 # See the README file for details, or visit http://0install.net.
9 from zeroinstall import _
10 import os, platform, re, subprocess, sys
11 from logging import warn, info
12 from zeroinstall.injector import namespaces, model, arch
13 from zeroinstall.support import basedir
15 _dotted_ints = '[0-9]+(?:\.[0-9]+)*'
17 # This matches a version number that would be a valid Zero Install version without modification
18 _zeroinstall_regexp = '(?:%s)(?:-(?:pre|rc|post|)(?:%s))*' % (_dotted_ints, _dotted_ints)
20 # This matches the interesting bits of distribution version numbers
21 # (first matching group is for Java-style 6b17 syntax, or "major")
22 _version_regexp = '(?:[a-z])?({ints}b)?({zero})(-r{ints})?'.format(zero = _zeroinstall_regexp, ints = _dotted_ints)
24 # We try to do updates atomically without locking, but we don't worry too much about
25 # duplicate entries or being a little out of sync with the on-disk copy.
26 class Cache(object):
27 def __init__(self, cache_leaf, source, format):
28 """Maintain a cache file (e.g. ~/.cache/0install.net/injector/$name).
29 If the size or mtime of $source has changed, or the cache
30 format version if different, reset the cache first."""
31 self.cache_leaf = cache_leaf
32 self.source = source
33 self.format = format
34 self.cache_dir = basedir.save_cache_path(namespaces.config_site,
35 namespaces.config_prog)
36 self.cached_for = {} # Attributes of source when cache was created
37 try:
38 self._load_cache()
39 except Exception as ex:
40 info(_("Failed to load cache (%s). Flushing..."), ex)
41 self.flush()
43 def flush(self):
44 # Wipe the cache
45 try:
46 info = os.stat(self.source)
47 mtime = int(info.st_mtime)
48 size = info.st_size
49 except Exception as ex:
50 warn("Failed to stat %s: %s", self.source, ex)
51 mtime = size = 0
52 self.cache = {}
53 import tempfile
54 tmp, tmp_name = tempfile.mkstemp(dir = self.cache_dir)
55 data = "mtime=%d\nsize=%d\nformat=%d\n\n" % (mtime, size, self.format)
56 while data:
57 wrote = os.write(tmp, data)
58 data = data[wrote:]
59 os.close(tmp)
60 os.rename(tmp_name, os.path.join(self.cache_dir, self.cache_leaf))
62 self._load_cache()
64 # Populate self.cache from our saved cache file.
65 # Throws an exception if the cache doesn't exist or has the wrong format.
66 def _load_cache(self):
67 self.cache = cache = {}
68 with open(os.path.join(self.cache_dir, self.cache_leaf)) as stream:
69 for line in stream:
70 line = line.strip()
71 if not line:
72 break
73 key, value = line.split('=', 1)
74 if key in ('mtime', 'size', 'format'):
75 self.cached_for[key] = int(value)
77 self._check_valid()
79 for line in stream:
80 key, value = line.split('=', 1)
81 cache[key] = value[:-1]
83 # Check the source file hasn't changed since we created the cache
84 def _check_valid(self):
85 info = os.stat(self.source)
86 if self.cached_for['mtime'] != int(info.st_mtime):
87 raise Exception("Modification time of %s has changed" % self.source)
88 if self.cached_for['size'] != info.st_size:
89 raise Exception("Size of %s has changed" % self.source)
90 if self.cached_for.get('format', None) != self.format:
91 raise Exception("Format of cache has changed")
93 def get(self, key):
94 try:
95 self._check_valid()
96 except Exception as ex:
97 info(_("Cache needs to be refreshed: %s"), ex)
98 self.flush()
99 return None
100 else:
101 return self.cache.get(key, None)
103 def put(self, key, value):
104 cache_path = os.path.join(self.cache_dir, self.cache_leaf)
105 self.cache[key] = value
106 try:
107 with open(cache_path, 'a') as stream:
108 stream.write('%s=%s\n' % (key, value))
109 except Exception as ex:
110 warn("Failed to write to cache %s: %s=%s: %s", cache_path, key, value, ex)
112 def try_cleanup_distro_version(version):
113 """Try to turn a distribution version string into one readable by Zero Install.
114 We do this by stripping off anything we can't parse.
115 @return: the part we understood, or None if we couldn't parse anything
116 @rtype: str"""
117 if ':' in version:
118 version = version.split(':')[1] # Skip 'epoch'
119 version = version.replace('_', '-')
120 match = re.match(_version_regexp, version)
121 if match:
122 major, version, revision = match.groups()
123 if major is not None:
124 version = major[:-1] + '.' + version
125 if revision is None:
126 return version
127 else:
128 return '%s-%s' % (version, revision[2:])
129 return None
131 class Distribution(object):
132 """Represents a distribution with which we can integrate.
133 Sub-classes should specialise this to integrate with the package managers of
134 particular distributions. This base class ignores the native package manager.
135 @since: 0.28
137 _packagekit = None
139 def get_package_info(self, package, factory):
140 """Get information about the given package.
141 Add zero or more implementations using the factory (typically at most two
142 will be added; the currently installed version and the latest available).
143 @param package: package name (e.g. "gimp")
144 @type package: str
145 @param factory: function for creating new DistributionImplementation objects from IDs
146 @type factory: str -> L{model.DistributionImplementation}
148 return
150 def get_score(self, distribution):
151 """Indicate how closely the host distribution matches this one.
152 The <package-implementation> with the highest score is passed
153 to L{Distribution.get_package_info}. If several elements get
154 the same score, get_package_info is called for all of them.
155 @param distribution: a distribution name
156 @type distribution: str
157 @return: an integer, or None if there is no match at all
158 @rtype: int | None
160 return 0
162 def get_feed(self, master_feed):
163 """Generate a feed containing information about distribution packages.
164 This should immediately return a feed containing an implementation for the
165 package if it's already installed. Information about versions that could be
166 installed using the distribution's package manager can be added asynchronously
167 later (see L{fetch_candidates}).
168 @param master_feed: feed containing the <package-implementation> elements
169 @type master_feed: L{model.ZeroInstallFeed}
170 @rtype: L{model.ZeroInstallFeed}"""
172 feed = model.ZeroInstallFeed(None)
173 feed.url = 'distribution:' + master_feed.url
175 for item, item_attrs in master_feed.get_package_impls(self):
176 package = item_attrs.get('package', None)
177 if package is None:
178 raise model.InvalidInterface(_("Missing 'package' attribute on %s") % item)
180 def factory(id, only_if_missing = False, installed = True):
181 assert id.startswith('package:')
182 if id in feed.implementations:
183 if only_if_missing:
184 return None
185 warn(_("Duplicate ID '%s' for DistributionImplementation"), id)
186 impl = model.DistributionImplementation(feed, id, self, item)
187 feed.implementations[id] = impl
189 impl.installed = installed
190 impl.metadata = item_attrs
192 if 'run' not in impl.commands:
193 item_main = item_attrs.get('main', None)
194 if item_main:
195 if item_main.startswith('/'):
196 impl.main = item_main
197 else:
198 raise model.InvalidInterface(_("'main' attribute must be absolute, but '%s' doesn't start with '/'!") %
199 item_main)
200 impl.upstream_stability = model.packaged
202 return impl
204 self.get_package_info(package, factory)
206 if master_feed.url == 'http://repo.roscidus.com/python/python' and all(not impl.installed for impl in feed.implementations.values()):
207 # Hack: we can support Python on platforms with unsupported package managers
208 # by adding the implementation of Python running us now to the list.
209 python_version = '.'.join([str(v) for v in sys.version_info if isinstance(v, int)])
210 impl_id = 'package:host:python:' + python_version
211 assert impl_id not in feed.implementations
212 impl = model.DistributionImplementation(feed, impl_id, self)
213 impl.installed = True
214 impl.version = model.parse_version(python_version)
215 impl.main = sys.executable
216 impl.upstream_stability = model.packaged
217 impl.machine = host_machine # (hopefully)
218 feed.implementations[impl_id] = impl
220 return feed
222 def fetch_candidates(self, master_feed):
223 """Collect information about versions we could install using
224 the distribution's package manager. On success, the distribution
225 feed in iface_cache is updated.
226 @return: a L{tasks.Blocker} if the task is in progress, or None if not"""
227 if self.packagekit.available:
228 package_names = [item.getAttribute("package") for item, item_attrs in master_feed.get_package_impls(self)]
229 return self.packagekit.fetch_candidates(package_names)
231 @property
232 def packagekit(self):
233 """For use by subclasses.
234 @rtype: L{packagekit.PackageKit}"""
235 if not self._packagekit:
236 from zeroinstall.injector import packagekit
237 self._packagekit = packagekit.PackageKit()
238 return self._packagekit
240 class WindowsDistribution(Distribution):
241 def get_package_info(self, package, factory):
242 def _is_64bit_windows():
243 p = sys.platform
244 from win32process import IsWow64Process
245 if p == 'win64' or (p == 'win32' and IsWow64Process()): return True
246 elif p == 'win32': return False
247 else: raise Exception(_("WindowsDistribution may only be used on the Windows platform"))
249 def _read_hklm_reg(key_name, value_name):
250 from win32api import RegOpenKeyEx, RegQueryValueEx, RegCloseKey
251 from win32con import HKEY_LOCAL_MACHINE, KEY_READ
252 KEY_WOW64_64KEY = 0x0100
253 KEY_WOW64_32KEY = 0x0200
254 if _is_64bit_windows():
255 try:
256 key32 = RegOpenKeyEx(HKEY_LOCAL_MACHINE, key_name, 0, KEY_READ | KEY_WOW64_32KEY)
257 (value32, _) = RegQueryValueEx(key32, value_name)
258 RegCloseKey(key32)
259 except:
260 value32 = ''
261 try:
262 key64 = RegOpenKeyEx(HKEY_LOCAL_MACHINE, key_name, 0, KEY_READ | KEY_WOW64_64KEY)
263 (value64, _) = RegQueryValueEx(key64, value_name)
264 RegCloseKey(key64)
265 except:
266 value64 = ''
267 else:
268 try:
269 key32 = RegOpenKeyEx(HKEY_LOCAL_MACHINE, key_name, 0, KEY_READ)
270 (value32, _) = RegQueryValueEx(key32, value_name)
271 RegCloseKey(key32)
272 except:
273 value32 = ''
274 value64 = ''
275 return (value32, value64)
277 if package == 'openjdk-6-jre':
278 (java32_home, java64_home) = _read_hklm_reg(r"SOFTWARE\JavaSoft\Java Runtime Environment\1.6", "JavaHome")
280 if os.path.isfile(java32_home + r"\bin\java.exe"):
281 impl = factory('package:windows:%s:%s:%s' % (package, '6', 'i486'))
282 impl.machine = 'i486'
283 impl.version = model.parse_version('6')
284 impl.upstream_stability = model.packaged
285 impl.main = java32_home + r"\bin\java.exe"
287 if os.path.isfile(java64_home + r"\bin\java.exe"):
288 impl = factory('package:windows:%s:%s:%s' % (package, '6', 'x86_64'))
289 impl.machine = 'x86_64'
290 impl.version = model.parse_version('6')
291 impl.upstream_stability = model.packaged
292 impl.main = java64_home + r"\bin\java.exe"
294 if package == 'openjdk-6-jdk':
295 (java32_home, java64_home) = _read_hklm_reg(r"SOFTWARE\JavaSoft\Java Development Kit\1.6", "JavaHome")
297 if os.path.isfile(java32_home + r"\bin\java.exe"):
298 impl = factory('package:windows:%s:%s:%s' % (package, '6', 'i486'))
299 impl.machine = 'i486'
300 impl.version = model.parse_version('6')
301 impl.upstream_stability = model.packaged
302 impl.main = java32_home + r"\bin\java.exe"
304 if os.path.isfile(java64_home + r"\bin\java.exe"):
305 impl = factory('package:windows:%s:%s:%s' % (package, '6', 'x86_64'))
306 impl.machine = 'x86_64'
307 impl.version = model.parse_version('6')
308 impl.upstream_stability = model.packaged
309 impl.main = java64_home + r"\bin\java.exe"
311 class CachedDistribution(Distribution):
312 """For distributions where querying the package database is slow (e.g. requires running
313 an external command), we cache the results.
314 @since: 0.39
315 @deprecated: use Cache instead
318 def __init__(self, db_status_file):
319 """@param db_status_file: update the cache when the timestamp of this file changes"""
320 self._status_details = os.stat(db_status_file)
322 self.versions = {}
323 self.cache_dir = basedir.save_cache_path(namespaces.config_site,
324 namespaces.config_prog)
326 try:
327 self._load_cache()
328 except Exception as ex:
329 info(_("Failed to load distribution database cache (%s). Regenerating..."), ex)
330 try:
331 self.generate_cache()
332 self._load_cache()
333 except Exception as ex:
334 warn(_("Failed to regenerate distribution database cache: %s"), ex)
336 def _load_cache(self):
337 """Load {cache_leaf} cache file into self.versions if it is available and up-to-date.
338 Throws an exception if the cache should be (re)created."""
339 with open(os.path.join(self.cache_dir, self.cache_leaf)) as stream:
340 cache_version = None
341 for line in stream:
342 if line == '\n':
343 break
344 name, value = line.split(': ')
345 if name == 'mtime' and int(value) != int(self._status_details.st_mtime):
346 raise Exception(_("Modification time of package database file has changed"))
347 if name == 'size' and int(value) != self._status_details.st_size:
348 raise Exception(_("Size of package database file has changed"))
349 if name == 'version':
350 cache_version = int(value)
351 else:
352 raise Exception(_('Invalid cache format (bad header)'))
354 if cache_version is None:
355 raise Exception(_('Old cache format'))
357 versions = self.versions
358 for line in stream:
359 package, version, zi_arch = line[:-1].split('\t')
360 versionarch = (version, intern(zi_arch))
361 if package not in versions:
362 versions[package] = [versionarch]
363 else:
364 versions[package].append(versionarch)
366 def _write_cache(self, cache):
367 #cache.sort() # Might be useful later; currently we don't care
368 import tempfile
369 fd, tmpname = tempfile.mkstemp(prefix = 'zeroinstall-cache-tmp',
370 dir = self.cache_dir)
371 try:
372 stream = os.fdopen(fd, 'wb')
373 stream.write('version: 2\n')
374 stream.write('mtime: %d\n' % int(self._status_details.st_mtime))
375 stream.write('size: %d\n' % self._status_details.st_size)
376 stream.write('\n')
377 for line in cache:
378 stream.write(line + '\n')
379 stream.close()
381 os.rename(tmpname,
382 os.path.join(self.cache_dir,
383 self.cache_leaf))
384 except:
385 os.unlink(tmpname)
386 raise
388 # Maps machine type names used in packages to their Zero Install versions
389 _canonical_machine = {
390 'all' : '*',
391 'any' : '*',
392 'noarch' : '*',
393 '(none)' : '*',
394 'x86_64': 'x86_64',
395 'amd64': 'x86_64',
396 'i386': 'i386',
397 'i486': 'i486',
398 'i586': 'i586',
399 'i686': 'i686',
400 'ppc64': 'ppc64',
401 'ppc': 'ppc',
404 host_machine = arch.canonicalize_machine(platform.uname()[4])
405 def canonical_machine(package_machine):
406 machine = _canonical_machine.get(package_machine, None)
407 if machine is None:
408 # Safe default if we can't understand the arch
409 return host_machine
410 return machine
412 class DebianDistribution(Distribution):
413 """A dpkg-based distribution."""
415 cache_leaf = 'dpkg-status.cache'
417 def __init__(self, dpkg_status):
418 self.dpkg_cache = Cache('dpkg-status.cache', dpkg_status, 2)
419 self.apt_cache = {}
421 def _query_installed_package(self, package):
422 null = os.open(os.devnull, os.O_WRONLY)
423 child = subprocess.Popen(["dpkg-query", "-W", "--showformat=${Version}\t${Architecture}\t${Status}\n", "--", package],
424 stdout = subprocess.PIPE, stderr = null)
425 os.close(null)
426 stdout, stderr = child.communicate()
427 child.wait()
428 for line in stdout.split('\n'):
429 if not line: continue
430 version, debarch, status = line.split('\t', 2)
431 if not status.endswith(' installed'): continue
432 clean_version = try_cleanup_distro_version(version)
433 if debarch.find("-") != -1:
434 debarch = debarch.split("-")[-1]
435 if clean_version:
436 return '%s\t%s' % (clean_version, canonical_machine(debarch.strip()))
437 else:
438 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': package})
440 return '-'
442 def get_package_info(self, package, factory):
443 # Add any already-installed package...
444 installed_cached_info = self._get_dpkg_info(package)
446 if installed_cached_info != '-':
447 installed_version, machine = installed_cached_info.split('\t')
448 impl = factory('package:deb:%s:%s:%s' % (package, installed_version, machine))
449 impl.version = model.parse_version(installed_version)
450 if machine != '*':
451 impl.machine = machine
452 else:
453 installed_version = None
455 # Add any uninstalled candidates (note: only one of these two methods will add anything)
457 # From PackageKit...
458 self.packagekit.get_candidates(package, factory, 'package:deb')
460 # From apt-cache...
461 cached = self.apt_cache.get(package, None)
462 if cached:
463 candidate_version = cached['version']
464 candidate_arch = cached['arch']
465 if candidate_version and candidate_version != installed_version:
466 impl = factory('package:deb:%s:%s:%s' % (package, candidate_version, candidate_arch), installed = False)
467 impl.version = model.parse_version(candidate_version)
468 if candidate_arch != '*':
469 impl.machine = candidate_arch
470 def install(handler):
471 raise model.SafeException(_("This program depends on '%s', which is a package that is available through your distribution. "
472 "Please install it manually using your distribution's tools and try again. Or, install 'packagekit' and I can "
473 "use that to install it.") % package)
474 impl.download_sources.append(model.DistributionSource(package, cached['size'], install, needs_confirmation = False))
476 def get_score(self, disto_name):
477 return int(disto_name == 'Debian')
479 def _get_dpkg_info(self, package):
480 installed_cached_info = self.dpkg_cache.get(package)
481 if installed_cached_info == None:
482 installed_cached_info = self._query_installed_package(package)
483 self.dpkg_cache.put(package, installed_cached_info)
485 return installed_cached_info
487 def fetch_candidates(self, master_feed):
488 package_names = [item.getAttribute("package") for item, item_attrs in master_feed.get_package_impls(self)]
490 if self.packagekit.available:
491 return self.packagekit.fetch_candidates(package_names)
493 # No PackageKit. Use apt-cache directly.
494 for package in package_names:
495 # Check to see whether we could get a newer version using apt-get
496 try:
497 null = os.open(os.devnull, os.O_WRONLY)
498 child = subprocess.Popen(['apt-cache', 'show', '--no-all-versions', '--', package], stdout = subprocess.PIPE, stderr = null)
499 os.close(null)
501 arch = version = size = None
502 for line in child.stdout:
503 line = line.strip()
504 if line.startswith('Version: '):
505 version = line[9:]
506 version = try_cleanup_distro_version(version)
507 elif line.startswith('Architecture: '):
508 arch = canonical_machine(line[14:].strip())
509 elif line.startswith('Size: '):
510 size = int(line[6:].strip())
511 if version and arch:
512 cached = {'version': version, 'arch': arch, 'size': size}
513 else:
514 cached = None
515 child.wait()
516 except Exception as ex:
517 warn("'apt-cache show %s' failed: %s", package, ex)
518 cached = None
519 # (multi-arch support? can there be multiple candidates?)
520 self.apt_cache[package] = cached
522 class RPMDistribution(CachedDistribution):
523 """An RPM-based distribution."""
525 cache_leaf = 'rpm-status.cache'
527 def generate_cache(self):
528 cache = []
530 for line in os.popen("rpm -qa --qf='%{NAME}\t%{VERSION}-%{RELEASE}\t%{ARCH}\n'"):
531 package, version, rpmarch = line.split('\t', 2)
532 if package == 'gpg-pubkey':
533 continue
534 zi_arch = canonical_machine(rpmarch.strip())
535 clean_version = try_cleanup_distro_version(version)
536 if clean_version:
537 cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
538 else:
539 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': package})
541 self._write_cache(cache)
543 def get_package_info(self, package, factory):
544 # Add installed versions...
545 versions = self.versions.get(package, [])
547 for version, machine in versions:
548 impl = factory('package:rpm:%s:%s:%s' % (package, version, machine))
549 impl.version = model.parse_version(version)
550 if machine != '*':
551 impl.machine = machine
553 # Add any uninstalled candidates found by PackageKit
554 self.packagekit.get_candidates(package, factory, 'package:rpm')
556 def get_score(self, disto_name):
557 return int(disto_name == 'RPM')
559 class SlackDistribution(Distribution):
560 """A Slack-based distribution."""
562 def __init__(self, packages_dir):
563 self._packages_dir = packages_dir
565 def get_package_info(self, package, factory):
566 # Add installed versions...
567 for entry in os.listdir(self._packages_dir):
568 name, version, arch, build = entry.rsplit('-', 3)
569 if name == package:
570 zi_arch = canonical_machine(arch)
571 clean_version = try_cleanup_distro_version("%s-%s" % (version, build))
572 if not clean_version:
573 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': name})
574 continue
576 impl = factory('package:slack:%s:%s:%s' % \
577 (package, clean_version, zi_arch))
578 impl.version = model.parse_version(clean_version)
579 if zi_arch != '*':
580 impl.machine = zi_arch
582 # Add any uninstalled candidates found by PackageKit
583 self.packagekit.get_candidates(package, factory, 'package:slack')
585 def get_score(self, disto_name):
586 return int(disto_name == 'Slack')
588 class ArchDistribution(Distribution):
589 """An Arch Linux distribution."""
591 def __init__(self, packages_dir):
592 self._packages_dir = os.path.join(packages_dir, "local")
594 def get_package_info(self, package, factory):
595 # Add installed versions...
596 for entry in os.listdir(self._packages_dir):
597 name, version, build = entry.rsplit('-', 2)
598 if name == package:
599 gotarch = False
600 for line in open(os.path.join(self._packages_dir, entry, "desc")):
601 if line == "%ARCH%\n":
602 gotarch = True
603 continue
604 if gotarch:
605 arch = line.strip()
606 break
607 zi_arch = canonical_machine(arch)
608 clean_version = try_cleanup_distro_version("%s-%s" % (version, build))
609 if not clean_version:
610 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': name})
611 continue
613 impl = factory('package:arch:%s:%s:%s' % \
614 (package, clean_version, zi_arch))
615 impl.version = model.parse_version(clean_version)
616 if zi_arch != '*':
617 impl.machine = zi_arch
619 # Add any uninstalled candidates found by PackageKit
620 self.packagekit.get_candidates(package, factory, 'package:arch')
622 def get_score(self, disto_name):
623 return int(disto_name == 'Arch')
625 class GentooDistribution(Distribution):
627 def __init__(self, pkgdir):
628 self._pkgdir = pkgdir
630 def get_package_info(self, package, factory):
631 # Add installed versions...
632 _version_start_reqexp = '-[0-9]'
634 if package.count('/') != 1: return
636 category, leafname = package.split('/')
637 category_dir = os.path.join(self._pkgdir, category)
638 match_prefix = leafname + '-'
640 if not os.path.isdir(category_dir): return
642 for filename in os.listdir(category_dir):
643 if filename.startswith(match_prefix) and filename[len(match_prefix)].isdigit():
644 name = open(os.path.join(category_dir, filename, 'PF')).readline().strip()
646 match = re.search(_version_start_reqexp, name)
647 if match is None:
648 warn(_('Cannot parse version from Gentoo package named "%(name)s"'), {'name': name})
649 continue
650 else:
651 version = try_cleanup_distro_version(name[match.start() + 1:])
653 if category == 'app-emulation' and name.startswith('emul-'):
654 __, __, machine, __ = name.split('-', 3)
655 else:
656 machine, __ = open(os.path.join(category_dir, filename, 'CHOST')).readline().split('-', 1)
657 machine = arch.canonicalize_machine(machine)
659 impl = factory('package:gentoo:%s:%s:%s' % \
660 (package, version, machine))
661 impl.version = model.parse_version(version)
662 impl.machine = machine
664 # Add any uninstalled candidates found by PackageKit
665 self.packagekit.get_candidates(package, factory, 'package:gentoo')
667 def get_score(self, disto_name):
668 return int(disto_name == 'Gentoo')
670 class PortsDistribution(Distribution):
672 def __init__(self, pkgdir):
673 self._pkgdir = pkgdir
675 def get_package_info(self, package, factory):
676 _name_version_regexp = '^(.+)-([^-]+)$'
678 nameversion = re.compile(_name_version_regexp)
679 for pkgname in os.listdir(self._pkgdir):
680 pkgdir = os.path.join(self._pkgdir, pkgname)
681 if not os.path.isdir(pkgdir): continue
683 #contents = open(os.path.join(pkgdir, '+CONTENTS')).readline().strip()
685 match = nameversion.search(pkgname)
686 if match is None:
687 warn(_('Cannot parse version from Ports package named "%(pkgname)s"'), {'pkgname': pkgname})
688 continue
689 else:
690 name = match.group(1)
691 if name != package:
692 continue
693 version = try_cleanup_distro_version(match.group(2))
695 machine = host_machine
697 impl = factory('package:ports:%s:%s:%s' % \
698 (package, version, machine))
699 impl.version = model.parse_version(version)
700 impl.machine = machine
702 def get_score(self, disto_name):
703 return int(disto_name == 'Ports')
705 class MacPortsDistribution(CachedDistribution):
707 cache_leaf = 'macports-status.cache'
709 def generate_cache(self):
710 cache = []
712 # for line in os.popen("port echo active"):
713 for line in os.popen("port -v installed"):
714 if not line.startswith(" "):
715 continue
716 if line.strip().count(" ") > 1:
717 package, version, extra = line.split(None, 2)
718 else:
719 package, version = line.split()
720 extra = ""
721 if not extra.startswith("(active)"):
722 continue
723 version = version.lstrip('@')
724 version = re.sub(r"\+.*", "", version) # strip variants
725 zi_arch = '*'
726 clean_version = try_cleanup_distro_version(version)
727 if clean_version:
728 match = re.match(r" platform='([^' ]*)( \d+)?' archs='([^']*)'", extra)
729 if match:
730 platform, major, archs = match.groups()
731 for arch in archs.split():
732 zi_arch = canonical_machine(arch)
733 cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
734 else:
735 cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
736 else:
737 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': package})
739 self._write_cache(cache)
741 def get_package_info(self, package, factory):
742 # Add installed versions...
743 versions = self.versions.get(package, [])
745 for version, machine in versions:
746 impl = factory('package:macports:%s:%s:%s' % (package, version, machine))
747 impl.version = model.parse_version(version)
748 if machine != '*':
749 impl.machine = machine
751 def get_score(self, disto_name):
752 return int(disto_name == 'MacPorts')
754 class CygwinDistribution(CachedDistribution):
755 """A Cygwin-based distribution."""
757 cache_leaf = 'cygcheck-status.cache'
759 def generate_cache(self):
760 cache = []
762 zi_arch = canonical_machine(arch)
763 for line in os.popen("cygcheck -c -d"):
764 if line == "Cygwin Package Information\r\n":
765 continue
766 if line == "\n":
767 continue
768 package, version = line.split()
769 if package == "Package" and version == "Version":
770 continue
771 clean_version = try_cleanup_distro_version(version)
772 if clean_version:
773 cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
774 else:
775 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': package})
777 self._write_cache(cache)
779 def get_package_info(self, package, factory):
780 # Add installed versions...
781 versions = self.versions.get(package, [])
783 for version, machine in versions:
784 impl = factory('package:cygwin:%s:%s:%s' % (package, version, machine))
785 impl.version = model.parse_version(version)
786 if machine != '*':
787 impl.machine = machine
789 def get_score(self, disto_name):
790 return int(disto_name == 'Cygwin')
793 _host_distribution = None
794 def get_host_distribution():
795 """Get a Distribution suitable for the host operating system.
796 Calling this twice will return the same object.
797 @rtype: L{Distribution}"""
798 global _host_distribution
799 if not _host_distribution:
800 dpkg_db_status = '/var/lib/dpkg/status'
801 rpm_db_packages = '/var/lib/rpm/Packages'
802 _slack_db = '/var/log/packages'
803 _arch_db = '/var/lib/pacman'
804 _pkg_db = '/var/db/pkg'
805 _macports_db = '/opt/local/var/macports/registry/registry.db'
806 _cygwin_log = '/var/log/setup.log'
808 if sys.prefix == "/sw":
809 dpkg_db_status = os.path.join(sys.prefix, dpkg_db_status)
810 rpm_db_packages = os.path.join(sys.prefix, rpm_db_packages)
812 if os.name == "nt":
813 _host_distribution = WindowsDistribution()
814 elif os.path.isdir(_pkg_db):
815 if sys.platform.startswith("linux"):
816 _host_distribution = GentooDistribution(_pkg_db)
817 elif sys.platform.startswith("freebsd"):
818 _host_distribution = PortsDistribution(_pkg_db)
819 elif os.path.isfile(_macports_db) \
820 and sys.prefix.startswith("/opt/local"):
821 _host_distribution = MacPortsDistribution(_macports_db)
822 elif os.path.isfile(_cygwin_log) and sys.platform == "cygwin":
823 _host_distribution = CygwinDistribution(_cygwin_log)
824 elif os.access(dpkg_db_status, os.R_OK) \
825 and os.path.getsize(dpkg_db_status) > 0:
826 _host_distribution = DebianDistribution(dpkg_db_status)
827 elif os.path.isfile(rpm_db_packages):
828 _host_distribution = RPMDistribution(rpm_db_packages)
829 elif os.path.isdir(_slack_db):
830 _host_distribution = SlackDistribution(_slack_db)
831 elif os.path.isdir(_arch_db):
832 _host_distribution = ArchDistribution(_arch_db)
833 else:
834 _host_distribution = Distribution()
836 return _host_distribution