Added <command glob='...'> to support Debian multi-arch Java packages
[zeroinstall.git] / zeroinstall / injector / distro.py
blob425d40279b552957b55ae1aab1888087d93ad7e3
1 """
2 Integration with native distribution package managers.
3 @since: 0.28
4 """
6 # Copyright (C) 2009, Thomas Leonard
7 # See the README file for details, or visit http://0install.net.
9 from zeroinstall import _
10 import os, platform, re, subprocess, sys
11 from logging import warn, info
12 from zeroinstall.injector import namespaces, model, arch
13 from zeroinstall.support import basedir
15 _dotted_ints = '[0-9]+(?:\.[0-9]+)*'
17 # This matches a version number that would be a valid Zero Install version without modification
18 _zeroinstall_regexp = '(?:%s)(?:-(?:pre|rc|post|)(?:%s))*' % (_dotted_ints, _dotted_ints)
20 # This matches the interesting bits of distribution version numbers
21 # (first bit is for Java-style 6b17 syntax)
22 _version_regexp = '({ints}b)?({zero})(-r{ints})?'.format(zero = _zeroinstall_regexp, ints = _dotted_ints)
24 # We try to do updates atomically without locking, but we don't worry too much about
25 # duplicate entries or being a little out of sync with the on-disk copy.
26 class Cache(object):
27 def __init__(self, cache_leaf, source, format):
28 """Maintain a cache file (e.g. ~/.cache/0install.net/injector/$name).
29 If the size or mtime of $source has changed, or the cache
30 format version if different, reset the cache first."""
31 self.cache_leaf = cache_leaf
32 self.source = source
33 self.format = format
34 self.cache_dir = basedir.save_cache_path(namespaces.config_site,
35 namespaces.config_prog)
36 self.cached_for = {} # Attributes of source when cache was created
37 try:
38 self._load_cache()
39 except Exception as ex:
40 info(_("Failed to load cache (%s). Flushing..."), ex)
41 self.flush()
43 def flush(self):
44 # Wipe the cache
45 try:
46 info = os.stat(self.source)
47 mtime = int(info.st_mtime)
48 size = info.st_size
49 except Exception as ex:
50 warn("Failed to stat %s: %s", self.source, ex)
51 mtime = size = 0
52 self.cache = {}
53 import tempfile
54 tmp, tmp_name = tempfile.mkstemp(dir = self.cache_dir)
55 data = "mtime=%d\nsize=%d\nformat=%d\n\n" % (mtime, size, self.format)
56 while data:
57 wrote = os.write(tmp, data)
58 data = data[wrote:]
59 os.close(tmp)
60 os.rename(tmp_name, os.path.join(self.cache_dir, self.cache_leaf))
62 self._load_cache()
64 # Populate self.cache from our saved cache file.
65 # Throws an exception if the cache doesn't exist or has the wrong format.
66 def _load_cache(self):
67 self.cache = cache = {}
68 with open(os.path.join(self.cache_dir, self.cache_leaf)) as stream:
69 for line in stream:
70 line = line.strip()
71 if not line:
72 break
73 key, value = line.split('=', 1)
74 if key in ('mtime', 'size', 'format'):
75 self.cached_for[key] = int(value)
77 self._check_valid()
79 for line in stream:
80 key, value = line.split('=', 1)
81 cache[key] = value[:-1]
83 # Check the source file hasn't changed since we created the cache
84 def _check_valid(self):
85 info = os.stat(self.source)
86 if self.cached_for['mtime'] != int(info.st_mtime):
87 raise Exception("Modification time of %s has changed" % self.source)
88 if self.cached_for['size'] != info.st_size:
89 raise Exception("Size of %s has changed" % self.source)
90 if self.cached_for.get('format', None) != self.format:
91 raise Exception("Format of cache has changed")
93 def get(self, key):
94 try:
95 self._check_valid()
96 except Exception as ex:
97 info(_("Cache needs to be refreshed: %s"), ex)
98 self.flush()
99 return None
100 else:
101 return self.cache.get(key, None)
103 def put(self, key, value):
104 cache_path = os.path.join(self.cache_dir, self.cache_leaf)
105 self.cache[key] = value
106 try:
107 with open(cache_path, 'a') as stream:
108 stream.write('%s=%s\n' % (key, value))
109 except Exception as ex:
110 warn("Failed to write to cache %s: %s=%s: %s", cache_path, key, value, ex)
112 def try_cleanup_distro_version(version):
113 """Try to turn a distribution version string into one readable by Zero Install.
114 We do this by stripping off anything we can't parse.
115 @return: the part we understood, or None if we couldn't parse anything
116 @rtype: str"""
117 if ':' in version:
118 version = version.split(':')[1] # Skip 'epoch'
119 version = version.replace('_', '-')
120 match = re.match(_version_regexp, version)
121 if match:
122 major, version, revision = match.groups()
123 if major is not None:
124 version = major[:-1] + '.' + version
125 if revision is None:
126 return version
127 else:
128 return '%s-%s' % (version, revision[2:])
129 return None
131 class Distribution(object):
132 """Represents a distribution with which we can integrate.
133 Sub-classes should specialise this to integrate with the package managers of
134 particular distributions. This base class ignores the native package manager.
135 @since: 0.28
137 _packagekit = None
139 def get_package_info(self, package, factory):
140 """Get information about the given package.
141 Add zero or more implementations using the factory (typically at most two
142 will be added; the currently installed version and the latest available).
143 @param package: package name (e.g. "gimp")
144 @type package: str
145 @param factory: function for creating new DistributionImplementation objects from IDs
146 @type factory: str -> L{model.DistributionImplementation}
148 return
150 def get_score(self, distribution):
151 """Indicate how closely the host distribution matches this one.
152 The <package-implementation> with the highest score is passed
153 to L{Distribution.get_package_info}. If several elements get
154 the same score, get_package_info is called for all of them.
155 @param distribution: a distribution name
156 @type distribution: str
157 @return: an integer, or None if there is no match at all
158 @rtype: int | None
160 return 0
162 def get_feed(self, master_feed):
163 """Generate a feed containing information about distribution packages.
164 This should immediately return a feed containing an implementation for the
165 package if it's already installed. Information about versions that could be
166 installed using the distribution's package manager can be added asynchronously
167 later (see L{fetch_candidates}).
168 @param master_feed: feed containing the <package-implementation> elements
169 @type master_feed: L{model.ZeroInstallFeed}
170 @rtype: L{model.ZeroInstallFeed}"""
172 feed = model.ZeroInstallFeed(None)
173 feed.url = 'distribution:' + master_feed.url
175 for item, item_attrs in master_feed.get_package_impls(self):
176 package = item_attrs.get('package', None)
177 if package is None:
178 raise model.InvalidInterface(_("Missing 'package' attribute on %s") % item)
180 def factory(id, only_if_missing = False, installed = True):
181 assert id.startswith('package:')
182 if id in feed.implementations:
183 if only_if_missing:
184 return None
185 warn(_("Duplicate ID '%s' for DistributionImplementation"), id)
186 impl = model.DistributionImplementation(feed, id, self, item)
187 feed.implementations[id] = impl
189 impl.installed = installed
190 impl.metadata = item_attrs
192 item_main = item_attrs.get('main', None)
193 if item_main:
194 if item_main.startswith('/'):
195 impl.main = item_main
196 else:
197 raise model.InvalidInterface(_("'main' attribute must be absolute, but '%s' doesn't start with '/'!") %
198 item_main)
199 impl.upstream_stability = model.packaged
201 return impl
203 self.get_package_info(package, factory)
205 if master_feed.url == 'http://repo.roscidus.com/python/python' and all(not impl.installed for impl in feed.implementations.values()):
206 # Hack: we can support Python on platforms with unsupported package managers
207 # by adding the implementation of Python running us now to the list.
208 python_version = '.'.join([str(v) for v in sys.version_info if isinstance(v, int)])
209 impl_id = 'package:host:python:' + python_version
210 assert impl_id not in feed.implementations
211 impl = model.DistributionImplementation(feed, impl_id, self)
212 impl.installed = True
213 impl.version = model.parse_version(python_version)
214 impl.main = sys.executable
215 impl.upstream_stability = model.packaged
216 impl.machine = host_machine # (hopefully)
217 feed.implementations[impl_id] = impl
219 return feed
221 def fetch_candidates(self, master_feed):
222 """Collect information about versions we could install using
223 the distribution's package manager. On success, the distribution
224 feed in iface_cache is updated.
225 @return: a L{tasks.Blocker} if the task is in progress, or None if not"""
226 if self.packagekit.available:
227 package_names = [item.getAttribute("package") for item, item_attrs in master_feed.get_package_impls(self)]
228 return self.packagekit.fetch_candidates(package_names)
230 @property
231 def packagekit(self):
232 """For use by subclasses.
233 @rtype: L{packagekit.PackageKit}"""
234 if not self._packagekit:
235 from zeroinstall.injector import packagekit
236 self._packagekit = packagekit.PackageKit()
237 return self._packagekit
239 class WindowsDistribution(Distribution):
240 def get_package_info(self, package, factory):
241 def _is_64bit_windows():
242 p = sys.platform
243 from win32process import IsWow64Process
244 if p == 'win64' or (p == 'win32' and IsWow64Process()): return True
245 elif p == 'win32': return False
246 else: raise Exception(_("WindowsDistribution may only be used on the Windows platform"))
248 def _read_hklm_reg(key_name, value_name):
249 from win32api import RegOpenKeyEx, RegQueryValueEx, RegCloseKey
250 from win32con import HKEY_LOCAL_MACHINE, KEY_READ
251 KEY_WOW64_64KEY = 0x0100
252 KEY_WOW64_32KEY = 0x0200
253 if _is_64bit_windows():
254 try:
255 key32 = RegOpenKeyEx(HKEY_LOCAL_MACHINE, key_name, 0, KEY_READ | KEY_WOW64_32KEY)
256 (value32, _) = RegQueryValueEx(key32, value_name)
257 RegCloseKey(key32)
258 except:
259 value32 = ''
260 try:
261 key64 = RegOpenKeyEx(HKEY_LOCAL_MACHINE, key_name, 0, KEY_READ | KEY_WOW64_64KEY)
262 (value64, _) = RegQueryValueEx(key64, value_name)
263 RegCloseKey(key64)
264 except:
265 value64 = ''
266 else:
267 try:
268 key32 = RegOpenKeyEx(HKEY_LOCAL_MACHINE, key_name, 0, KEY_READ)
269 (value32, _) = RegQueryValueEx(key32, value_name)
270 RegCloseKey(key32)
271 except:
272 value32 = ''
273 value64 = ''
274 return (value32, value64)
276 if package == 'openjdk-6-jre':
277 (java32_home, java64_home) = _read_hklm_reg(r"SOFTWARE\JavaSoft\Java Runtime Environment\1.6", "JavaHome")
279 if os.path.isfile(java32_home + r"\bin\java.exe"):
280 impl = factory('package:windows:%s:%s:%s' % (package, '6', 'i486'))
281 impl.machine = 'i486'
282 impl.version = model.parse_version('6')
283 impl.upstream_stability = model.packaged
284 impl.main = java32_home + r"\bin\java.exe"
286 if os.path.isfile(java64_home + r"\bin\java.exe"):
287 impl = factory('package:windows:%s:%s:%s' % (package, '6', 'x86_64'))
288 impl.machine = 'x86_64'
289 impl.version = model.parse_version('6')
290 impl.upstream_stability = model.packaged
291 impl.main = java64_home + r"\bin\java.exe"
293 if package == 'openjdk-6-jdk':
294 (java32_home, java64_home) = _read_hklm_reg(r"SOFTWARE\JavaSoft\Java Development Kit\1.6", "JavaHome")
296 if os.path.isfile(java32_home + r"\bin\java.exe"):
297 impl = factory('package:windows:%s:%s:%s' % (package, '6', 'i486'))
298 impl.machine = 'i486'
299 impl.version = model.parse_version('6')
300 impl.upstream_stability = model.packaged
301 impl.main = java32_home + r"\bin\java.exe"
303 if os.path.isfile(java64_home + r"\bin\java.exe"):
304 impl = factory('package:windows:%s:%s:%s' % (package, '6', 'x86_64'))
305 impl.machine = 'x86_64'
306 impl.version = model.parse_version('6')
307 impl.upstream_stability = model.packaged
308 impl.main = java64_home + r"\bin\java.exe"
310 class CachedDistribution(Distribution):
311 """For distributions where querying the package database is slow (e.g. requires running
312 an external command), we cache the results.
313 @since: 0.39
314 @deprecated: use Cache instead
317 def __init__(self, db_status_file):
318 """@param db_status_file: update the cache when the timestamp of this file changes"""
319 self._status_details = os.stat(db_status_file)
321 self.versions = {}
322 self.cache_dir = basedir.save_cache_path(namespaces.config_site,
323 namespaces.config_prog)
325 try:
326 self._load_cache()
327 except Exception as ex:
328 info(_("Failed to load distribution database cache (%s). Regenerating..."), ex)
329 try:
330 self.generate_cache()
331 self._load_cache()
332 except Exception as ex:
333 warn(_("Failed to regenerate distribution database cache: %s"), ex)
335 def _load_cache(self):
336 """Load {cache_leaf} cache file into self.versions if it is available and up-to-date.
337 Throws an exception if the cache should be (re)created."""
338 with open(os.path.join(self.cache_dir, self.cache_leaf)) as stream:
339 cache_version = None
340 for line in stream:
341 if line == '\n':
342 break
343 name, value = line.split(': ')
344 if name == 'mtime' and int(value) != int(self._status_details.st_mtime):
345 raise Exception(_("Modification time of package database file has changed"))
346 if name == 'size' and int(value) != self._status_details.st_size:
347 raise Exception(_("Size of package database file has changed"))
348 if name == 'version':
349 cache_version = int(value)
350 else:
351 raise Exception(_('Invalid cache format (bad header)'))
353 if cache_version is None:
354 raise Exception(_('Old cache format'))
356 versions = self.versions
357 for line in stream:
358 package, version, zi_arch = line[:-1].split('\t')
359 versionarch = (version, intern(zi_arch))
360 if package not in versions:
361 versions[package] = [versionarch]
362 else:
363 versions[package].append(versionarch)
365 def _write_cache(self, cache):
366 #cache.sort() # Might be useful later; currently we don't care
367 import tempfile
368 fd, tmpname = tempfile.mkstemp(prefix = 'zeroinstall-cache-tmp',
369 dir = self.cache_dir)
370 try:
371 stream = os.fdopen(fd, 'wb')
372 stream.write('version: 2\n')
373 stream.write('mtime: %d\n' % int(self._status_details.st_mtime))
374 stream.write('size: %d\n' % self._status_details.st_size)
375 stream.write('\n')
376 for line in cache:
377 stream.write(line + '\n')
378 stream.close()
380 os.rename(tmpname,
381 os.path.join(self.cache_dir,
382 self.cache_leaf))
383 except:
384 os.unlink(tmpname)
385 raise
387 # Maps machine type names used in packages to their Zero Install versions
388 _canonical_machine = {
389 'all' : '*',
390 'any' : '*',
391 'noarch' : '*',
392 '(none)' : '*',
393 'x86_64': 'x86_64',
394 'amd64': 'x86_64',
395 'i386': 'i386',
396 'i486': 'i486',
397 'i586': 'i586',
398 'i686': 'i686',
399 'ppc64': 'ppc64',
400 'ppc': 'ppc',
403 host_machine = arch.canonicalize_machine(platform.uname()[4])
404 def canonical_machine(package_machine):
405 machine = _canonical_machine.get(package_machine, None)
406 if machine is None:
407 # Safe default if we can't understand the arch
408 return host_machine
409 return machine
411 class DebianDistribution(Distribution):
412 """A dpkg-based distribution."""
414 cache_leaf = 'dpkg-status.cache'
416 def __init__(self, dpkg_status):
417 self.dpkg_cache = Cache('dpkg-status.cache', dpkg_status, 2)
418 self.apt_cache = {}
420 def _query_installed_package(self, package):
421 null = os.open('/dev/null', os.O_WRONLY)
422 child = subprocess.Popen(["dpkg-query", "-W", "--showformat=${Version}\t${Architecture}\t${Status}\n", "--", package],
423 stdout = subprocess.PIPE, stderr = null)
424 os.close(null)
425 stdout, stderr = child.communicate()
426 child.wait()
427 for line in stdout.split('\n'):
428 if not line: continue
429 version, debarch, status = line.split('\t', 2)
430 if not status.endswith(' installed'): continue
431 clean_version = try_cleanup_distro_version(version)
432 if debarch.find("-") != -1:
433 debarch = debarch.split("-")[-1]
434 if clean_version:
435 return '%s\t%s' % (clean_version, canonical_machine(debarch.strip()))
436 else:
437 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': package})
439 return '-'
441 def get_package_info(self, package, factory):
442 # Add any already-installed package...
443 installed_cached_info = self._get_dpkg_info(package)
445 if installed_cached_info != '-':
446 installed_version, machine = installed_cached_info.split('\t')
447 impl = factory('package:deb:%s:%s:%s' % (package, installed_version, machine))
448 impl.version = model.parse_version(installed_version)
449 if machine != '*':
450 impl.machine = machine
451 else:
452 installed_version = None
454 # Add any uninstalled candidates (note: only one of these two methods will add anything)
456 # From PackageKit...
457 self.packagekit.get_candidates(package, factory, 'package:deb')
459 # From apt-cache...
460 cached = self.apt_cache.get(package, None)
461 if cached:
462 candidate_version = cached['version']
463 candidate_arch = cached['arch']
464 if candidate_version and candidate_version != installed_version:
465 impl = factory('package:deb:%s:%s:%s' % (package, candidate_version, candidate_arch), installed = False)
466 impl.version = model.parse_version(candidate_version)
467 if candidate_arch != '*':
468 impl.machine = candidate_arch
469 def install(handler):
470 raise model.SafeException(_("This program depends on '%s', which is a package that is available through your distribution. "
471 "Please install it manually using your distribution's tools and try again. Or, install 'packagekit' and I can "
472 "use that to install it.") % package)
473 impl.download_sources.append(model.DistributionSource(package, cached['size'], install, needs_confirmation = False))
475 def get_score(self, disto_name):
476 return int(disto_name == 'Debian')
478 def _get_dpkg_info(self, package):
479 installed_cached_info = self.dpkg_cache.get(package)
480 if installed_cached_info == None:
481 installed_cached_info = self._query_installed_package(package)
482 self.dpkg_cache.put(package, installed_cached_info)
484 return installed_cached_info
486 def fetch_candidates(self, master_feed):
487 package_names = [item.getAttribute("package") for item, item_attrs in master_feed.get_package_impls(self)]
489 if self.packagekit.available:
490 return self.packagekit.fetch_candidates(package_names)
492 # No PackageKit. Use apt-cache directly.
493 for package in package_names:
494 # Check to see whether we could get a newer version using apt-get
495 try:
496 null = os.open('/dev/null', os.O_WRONLY)
497 child = subprocess.Popen(['apt-cache', 'show', '--no-all-versions', '--', package], stdout = subprocess.PIPE, stderr = null)
498 os.close(null)
500 arch = version = size = None
501 for line in child.stdout:
502 line = line.strip()
503 if line.startswith('Version: '):
504 version = line[9:]
505 version = try_cleanup_distro_version(version)
506 elif line.startswith('Architecture: '):
507 arch = canonical_machine(line[14:].strip())
508 elif line.startswith('Size: '):
509 size = int(line[6:].strip())
510 if version and arch:
511 cached = {'version': version, 'arch': arch, 'size': size}
512 else:
513 cached = None
514 child.wait()
515 except Exception as ex:
516 warn("'apt-cache show %s' failed: %s", package, ex)
517 cached = None
518 # (multi-arch support? can there be multiple candidates?)
519 self.apt_cache[package] = cached
521 class RPMDistribution(CachedDistribution):
522 """An RPM-based distribution."""
524 cache_leaf = 'rpm-status.cache'
526 def generate_cache(self):
527 cache = []
529 for line in os.popen("rpm -qa --qf='%{NAME}\t%{VERSION}-%{RELEASE}\t%{ARCH}\n'"):
530 package, version, rpmarch = line.split('\t', 2)
531 if package == 'gpg-pubkey':
532 continue
533 zi_arch = canonical_machine(rpmarch.strip())
534 clean_version = try_cleanup_distro_version(version)
535 if clean_version:
536 cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
537 else:
538 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': package})
540 self._write_cache(cache)
542 def get_package_info(self, package, factory):
543 # Add installed versions...
544 versions = self.versions.get(package, [])
546 for version, machine in versions:
547 impl = factory('package:rpm:%s:%s:%s' % (package, version, machine))
548 impl.version = model.parse_version(version)
549 if machine != '*':
550 impl.machine = machine
552 # Add any uninstalled candidates found by PackageKit
553 self.packagekit.get_candidates(package, factory, 'package:rpm')
555 def get_score(self, disto_name):
556 return int(disto_name == 'RPM')
558 class SlackDistribution(Distribution):
559 """A Slack-based distribution."""
561 def __init__(self, packages_dir):
562 self._packages_dir = packages_dir
564 def get_package_info(self, package, factory):
565 # Add installed versions...
566 for entry in os.listdir(self._packages_dir):
567 name, version, arch, build = entry.rsplit('-', 3)
568 if name == package:
569 zi_arch = canonical_machine(arch)
570 clean_version = try_cleanup_distro_version("%s-%s" % (version, build))
571 if not clean_version:
572 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': name})
573 continue
575 impl = factory('package:slack:%s:%s:%s' % \
576 (package, clean_version, zi_arch))
577 impl.version = model.parse_version(clean_version)
578 if zi_arch != '*':
579 impl.machine = zi_arch
581 # Add any uninstalled candidates found by PackageKit
582 self.packagekit.get_candidates(package, factory, 'package:slack')
584 def get_score(self, disto_name):
585 return int(disto_name == 'Slack')
587 class ArchDistribution(Distribution):
588 """An Arch Linux distribution."""
590 def __init__(self, packages_dir):
591 self._packages_dir = os.path.join(packages_dir, "local")
593 def get_package_info(self, package, factory):
594 # Add installed versions...
595 for entry in os.listdir(self._packages_dir):
596 name, version, build = entry.rsplit('-', 2)
597 if name == package:
598 gotarch = False
599 for line in open(os.path.join(self._packages_dir, entry, "desc")):
600 if line == "%ARCH%\n":
601 gotarch = True
602 continue
603 if gotarch:
604 arch = line.strip()
605 break
606 zi_arch = canonical_machine(arch)
607 clean_version = try_cleanup_distro_version("%s-%s" % (version, build))
608 if not clean_version:
609 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': name})
610 continue
612 impl = factory('package:arch:%s:%s:%s' % \
613 (package, clean_version, zi_arch))
614 impl.version = model.parse_version(clean_version)
615 if zi_arch != '*':
616 impl.machine = zi_arch
618 # Add any uninstalled candidates found by PackageKit
619 self.packagekit.get_candidates(package, factory, 'package:arch')
621 def get_score(self, disto_name):
622 return int(disto_name == 'Arch')
624 class GentooDistribution(Distribution):
626 def __init__(self, pkgdir):
627 self._pkgdir = pkgdir
629 def get_package_info(self, package, factory):
630 # Add installed versions...
631 _version_start_reqexp = '-[0-9]'
633 if package.count('/') != 1: return
635 category, leafname = package.split('/')
636 category_dir = os.path.join(self._pkgdir, category)
637 match_prefix = leafname + '-'
639 if not os.path.isdir(category_dir): return
641 for filename in os.listdir(category_dir):
642 if filename.startswith(match_prefix) and filename[len(match_prefix)].isdigit():
643 name = open(os.path.join(category_dir, filename, 'PF')).readline().strip()
645 match = re.search(_version_start_reqexp, name)
646 if match is None:
647 warn(_('Cannot parse version from Gentoo package named "%(name)s"'), {'name': name})
648 continue
649 else:
650 version = try_cleanup_distro_version(name[match.start() + 1:])
652 if category == 'app-emulation' and name.startswith('emul-'):
653 __, __, machine, __ = name.split('-', 3)
654 else:
655 machine, __ = open(os.path.join(category_dir, filename, 'CHOST')).readline().split('-', 1)
656 machine = arch.canonicalize_machine(machine)
658 impl = factory('package:gentoo:%s:%s:%s' % \
659 (package, version, machine))
660 impl.version = model.parse_version(version)
661 impl.machine = machine
663 # Add any uninstalled candidates found by PackageKit
664 self.packagekit.get_candidates(package, factory, 'package:gentoo')
666 def get_score(self, disto_name):
667 return int(disto_name == 'Gentoo')
669 class PortsDistribution(Distribution):
671 def __init__(self, pkgdir):
672 self._pkgdir = pkgdir
674 def get_package_info(self, package, factory):
675 _name_version_regexp = '^(.+)-([^-]+)$'
677 nameversion = re.compile(_name_version_regexp)
678 for pkgname in os.listdir(self._pkgdir):
679 pkgdir = os.path.join(self._pkgdir, pkgname)
680 if not os.path.isdir(pkgdir): continue
682 #contents = open(os.path.join(pkgdir, '+CONTENTS')).readline().strip()
684 match = nameversion.search(pkgname)
685 if match is None:
686 warn(_('Cannot parse version from Ports package named "%(pkgname)s"'), {'pkgname': pkgname})
687 continue
688 else:
689 name = match.group(1)
690 if name != package:
691 continue
692 version = try_cleanup_distro_version(match.group(2))
694 machine = host_machine
696 impl = factory('package:ports:%s:%s:%s' % \
697 (package, version, machine))
698 impl.version = model.parse_version(version)
699 impl.machine = machine
701 def get_score(self, disto_name):
702 return int(disto_name == 'Ports')
704 class MacPortsDistribution(CachedDistribution):
706 cache_leaf = 'macports-status.cache'
708 def generate_cache(self):
709 cache = []
711 # for line in os.popen("port echo active"):
712 for line in os.popen("port -v installed"):
713 if not line.startswith(" "):
714 continue
715 if line.strip().count(" ") > 1:
716 package, version, extra = line.split(None, 2)
717 else:
718 package, version = line.split()
719 extra = ""
720 if not extra.startswith("(active)"):
721 continue
722 version = version.lstrip('@')
723 version = re.sub(r"\+.*", "", version) # strip variants
724 zi_arch = '*'
725 clean_version = try_cleanup_distro_version(version)
726 if clean_version:
727 match = re.match(r" platform='([^' ]*)( \d+)?' archs='([^']*)'", extra)
728 if match:
729 platform, major, archs = match.groups()
730 for arch in archs.split():
731 zi_arch = canonical_machine(arch)
732 cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
733 else:
734 cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
735 else:
736 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': package})
738 self._write_cache(cache)
740 def get_package_info(self, package, factory):
741 # Add installed versions...
742 versions = self.versions.get(package, [])
744 for version, machine in versions:
745 impl = factory('package:macports:%s:%s:%s' % (package, version, machine))
746 impl.version = model.parse_version(version)
747 if machine != '*':
748 impl.machine = machine
750 def get_score(self, disto_name):
751 return int(disto_name == 'MacPorts')
753 class CygwinDistribution(CachedDistribution):
754 """A Cygwin-based distribution."""
756 cache_leaf = 'cygcheck-status.cache'
758 def generate_cache(self):
759 cache = []
761 zi_arch = canonical_machine(arch)
762 for line in os.popen("cygcheck -c -d"):
763 if line == "Cygwin Package Information\r\n":
764 continue
765 if line == "\n":
766 continue
767 package, version = line.split()
768 if package == "Package" and version == "Version":
769 continue
770 clean_version = try_cleanup_distro_version(version)
771 if clean_version:
772 cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
773 else:
774 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': package})
776 self._write_cache(cache)
778 def get_package_info(self, package, factory):
779 # Add installed versions...
780 versions = self.versions.get(package, [])
782 for version, machine in versions:
783 impl = factory('package:cygwin:%s:%s:%s' % (package, version, machine))
784 impl.version = model.parse_version(version)
785 if machine != '*':
786 impl.machine = machine
788 def get_score(self, disto_name):
789 return int(disto_name == 'Cygwin')
792 _host_distribution = None
793 def get_host_distribution():
794 """Get a Distribution suitable for the host operating system.
795 Calling this twice will return the same object.
796 @rtype: L{Distribution}"""
797 global _host_distribution
798 if not _host_distribution:
799 dpkg_db_status = '/var/lib/dpkg/status'
800 rpm_db_packages = '/var/lib/rpm/Packages'
801 _slack_db = '/var/log/packages'
802 _arch_db = '/var/lib/pacman'
803 _pkg_db = '/var/db/pkg'
804 _macports_db = '/opt/local/var/macports/registry/registry.db'
805 _cygwin_log = '/var/log/setup.log'
807 if sys.prefix == "/sw":
808 dpkg_db_status = os.path.join(sys.prefix, dpkg_db_status)
809 rpm_db_packages = os.path.join(sys.prefix, rpm_db_packages)
811 if os.name == "nt":
812 _host_distribution = WindowsDistribution()
813 elif os.path.isdir(_pkg_db):
814 if sys.platform.startswith("linux"):
815 _host_distribution = GentooDistribution(_pkg_db)
816 elif sys.platform.startswith("freebsd"):
817 _host_distribution = PortsDistribution(_pkg_db)
818 elif os.path.isfile(_macports_db) \
819 and sys.prefix.startswith("/opt/local"):
820 _host_distribution = MacPortsDistribution(_macports_db)
821 elif os.path.isfile(_cygwin_log) and sys.platform == "cygwin":
822 _host_distribution = CygwinDistribution(_cygwin_log)
823 elif os.access(dpkg_db_status, os.R_OK) \
824 and os.path.getsize(dpkg_db_status) > 0:
825 _host_distribution = DebianDistribution(dpkg_db_status)
826 elif os.path.isfile(rpm_db_packages):
827 _host_distribution = RPMDistribution(rpm_db_packages)
828 elif os.path.isdir(_slack_db):
829 _host_distribution = SlackDistribution(_slack_db)
830 elif os.path.isdir(_arch_db):
831 _host_distribution = ArchDistribution(_arch_db)
832 else:
833 _host_distribution = Distribution()
835 return _host_distribution