Fixed some file descriptor leaks
[zeroinstall.git] / zeroinstall / injector / distro.py
blob59b031c26b3d985b35f89fd380344d2ed36a5668
1 """
2 Integration with native distribution package managers.
3 @since: 0.28
4 """
6 # Copyright (C) 2009, Thomas Leonard
7 # See the README file for details, or visit http://0install.net.
9 from zeroinstall import _
10 import os, platform, re, subprocess, sys
11 from logging import warn, info
12 from zeroinstall.injector import namespaces, model, arch
13 from zeroinstall.support import basedir
15 _dotted_ints = '[0-9]+(?:\.[0-9]+)*'
17 # This matches a version number that would be a valid Zero Install version without modification
18 _zeroinstall_regexp = '(?:%s)(?:-(?:pre|rc|post|)(?:%s))*' % (_dotted_ints, _dotted_ints)
20 # This matches the interesting bits of distribution version numbers
21 # (first bit is for Java-style 6b17 syntax)
22 _version_regexp = '({ints}b)?({zero})(-r{ints})?'.format(zero = _zeroinstall_regexp, ints = _dotted_ints)
24 # We try to do updates atomically without locking, but we don't worry too much about
25 # duplicate entries or being a little out of sync with the on-disk copy.
26 class Cache(object):
27 def __init__(self, cache_leaf, source, format):
28 """Maintain a cache file (e.g. ~/.cache/0install.net/injector/$name).
29 If the size or mtime of $source has changed, or the cache
30 format version if different, reset the cache first."""
31 self.cache_leaf = cache_leaf
32 self.source = source
33 self.format = format
34 self.cache_dir = basedir.save_cache_path(namespaces.config_site,
35 namespaces.config_prog)
36 self.cached_for = {} # Attributes of source when cache was created
37 try:
38 self._load_cache()
39 except Exception as ex:
40 info(_("Failed to load cache (%s). Flushing..."), ex)
41 self.flush()
43 def flush(self):
44 # Wipe the cache
45 try:
46 info = os.stat(self.source)
47 mtime = int(info.st_mtime)
48 size = info.st_size
49 except Exception as ex:
50 warn("Failed to stat %s: %s", self.source, ex)
51 mtime = size = 0
52 self.cache = {}
53 import tempfile
54 tmp, tmp_name = tempfile.mkstemp(dir = self.cache_dir)
55 data = "mtime=%d\nsize=%d\nformat=%d\n\n" % (mtime, size, self.format)
56 while data:
57 wrote = os.write(tmp, data)
58 data = data[wrote:]
59 os.close(tmp)
60 os.rename(tmp_name, os.path.join(self.cache_dir, self.cache_leaf))
62 self._load_cache()
64 # Populate self.cache from our saved cache file.
65 # Throws an exception if the cache doesn't exist or has the wrong format.
66 def _load_cache(self):
67 self.cache = cache = {}
68 with open(os.path.join(self.cache_dir, self.cache_leaf)) as stream:
69 for line in stream:
70 line = line.strip()
71 if not line:
72 break
73 key, value = line.split('=', 1)
74 if key in ('mtime', 'size', 'format'):
75 self.cached_for[key] = int(value)
77 self._check_valid()
79 for line in stream:
80 key, value = line.split('=', 1)
81 cache[key] = value[:-1]
83 # Check the source file hasn't changed since we created the cache
84 def _check_valid(self):
85 info = os.stat(self.source)
86 if self.cached_for['mtime'] != int(info.st_mtime):
87 raise Exception("Modification time of %s has changed" % self.source)
88 if self.cached_for['size'] != info.st_size:
89 raise Exception("Size of %s has changed" % self.source)
90 if self.cached_for.get('format', None) != self.format:
91 raise Exception("Format of cache has changed")
93 def get(self, key):
94 try:
95 self._check_valid()
96 except Exception as ex:
97 info(_("Cache needs to be refreshed: %s"), ex)
98 self.flush()
99 return None
100 else:
101 return self.cache.get(key, None)
103 def put(self, key, value):
104 cache_path = os.path.join(self.cache_dir, self.cache_leaf)
105 self.cache[key] = value
106 try:
107 with open(cache_path, 'a') as stream:
108 stream.write('%s=%s\n' % (key, value))
109 except Exception as ex:
110 warn("Failed to write to cache %s: %s=%s: %s", cache_path, key, value, ex)
112 def try_cleanup_distro_version(version):
113 """Try to turn a distribution version string into one readable by Zero Install.
114 We do this by stripping off anything we can't parse.
115 @return: the part we understood, or None if we couldn't parse anything
116 @rtype: str"""
117 if ':' in version:
118 version = version.split(':')[1] # Skip 'epoch'
119 version = version.replace('_', '-')
120 match = re.match(_version_regexp, version)
121 if match:
122 major, version, revision = match.groups()
123 if major is not None:
124 version = major[:-1] + '.' + version
125 if revision is None:
126 return version
127 else:
128 return '%s-%s' % (version, revision[2:])
129 return None
131 class Distribution(object):
132 """Represents a distribution with which we can integrate.
133 Sub-classes should specialise this to integrate with the package managers of
134 particular distributions. This base class ignores the native package manager.
135 @since: 0.28
137 _packagekit = None
139 def get_package_info(self, package, factory):
140 """Get information about the given package.
141 Add zero or more implementations using the factory (typically at most two
142 will be added; the currently installed version and the latest available).
143 @param package: package name (e.g. "gimp")
144 @type package: str
145 @param factory: function for creating new DistributionImplementation objects from IDs
146 @type factory: str -> L{model.DistributionImplementation}
148 return
150 def get_score(self, distribution):
151 """Indicate how closely the host distribution matches this one.
152 The <package-implementation> with the highest score is passed
153 to L{Distribution.get_package_info}. If several elements get
154 the same score, get_package_info is called for all of them.
155 @param distribution: a distribution name
156 @type distribution: str
157 @return: an integer, or None if there is no match at all
158 @rtype: int | None
160 return 0
162 def get_feed(self, master_feed):
163 """Generate a feed containing information about distribution packages.
164 This should immediately return a feed containing an implementation for the
165 package if it's already installed. Information about versions that could be
166 installed using the distribution's package manager can be added asynchronously
167 later (see L{fetch_candidates}).
168 @param master_feed: feed containing the <package-implementation> elements
169 @type master_feed: L{model.ZeroInstallFeed}
170 @rtype: L{model.ZeroInstallFeed}"""
172 feed = model.ZeroInstallFeed(None)
173 feed.url = 'distribution:' + master_feed.url
175 for item, item_attrs in master_feed.get_package_impls(self):
176 package = item_attrs.get('package', None)
177 if package is None:
178 raise model.InvalidInterface(_("Missing 'package' attribute on %s") % item)
180 def factory(id, only_if_missing = False, installed = True):
181 assert id.startswith('package:')
182 if id in feed.implementations:
183 if only_if_missing:
184 return None
185 warn(_("Duplicate ID '%s' for DistributionImplementation"), id)
186 impl = model.DistributionImplementation(feed, id, self)
187 feed.implementations[id] = impl
189 impl.installed = installed
190 impl.metadata = item_attrs
192 item_main = item_attrs.get('main', None)
193 if item_main and not item_main.startswith('/'):
194 raise model.InvalidInterface(_("'main' attribute must be absolute, but '%s' doesn't start with '/'!") %
195 item_main)
196 impl.main = item_main
197 impl.upstream_stability = model.packaged
199 return impl
201 self.get_package_info(package, factory)
203 if master_feed.url == 'http://repo.roscidus.com/python/python' and all(not impl.installed for impl in feed.implementations.values()):
204 # Hack: we can support Python on platforms with unsupported package managers
205 # by adding the implementation of Python running us now to the list.
206 python_version = '.'.join([str(v) for v in sys.version_info if isinstance(v, int)])
207 impl_id = 'package:host:python:' + python_version
208 assert impl_id not in feed.implementations
209 impl = model.DistributionImplementation(feed, impl_id, self)
210 impl.installed = True
211 impl.version = model.parse_version(python_version)
212 impl.main = sys.executable
213 impl.upstream_stability = model.packaged
214 impl.machine = host_machine # (hopefully)
215 feed.implementations[impl_id] = impl
217 return feed
219 def fetch_candidates(self, master_feed):
220 """Collect information about versions we could install using
221 the distribution's package manager. On success, the distribution
222 feed in iface_cache is updated.
223 @return: a L{tasks.Blocker} if the task is in progress, or None if not"""
224 if self.packagekit.available:
225 package_names = [item.getAttribute("package") for item, item_attrs in master_feed.get_package_impls(self)]
226 return self.packagekit.fetch_candidates(package_names)
228 @property
229 def packagekit(self):
230 """For use by subclasses.
231 @rtype: L{packagekit.PackageKit}"""
232 if not self._packagekit:
233 from zeroinstall.injector import packagekit
234 self._packagekit = packagekit.PackageKit()
235 return self._packagekit
237 class WindowsDistribution(Distribution):
238 def get_package_info(self, package, factory):
239 def _is_64bit_windows():
240 p = sys.platform
241 from win32process import IsWow64Process
242 if p == 'win64' or (p == 'win32' and IsWow64Process()): return True
243 elif p == 'win32': return False
244 else: raise Exception(_("WindowsDistribution may only be used on the Windows platform"))
246 def _read_hklm_reg(key_name, value_name):
247 from win32api import RegOpenKeyEx, RegQueryValueEx, RegCloseKey
248 from win32con import HKEY_LOCAL_MACHINE, KEY_READ
249 KEY_WOW64_64KEY = 0x0100
250 KEY_WOW64_32KEY = 0x0200
251 if _is_64bit_windows():
252 try:
253 key32 = RegOpenKeyEx(HKEY_LOCAL_MACHINE, key_name, 0, KEY_READ | KEY_WOW64_32KEY)
254 (value32, _) = RegQueryValueEx(key32, value_name)
255 RegCloseKey(key32)
256 except:
257 value32 = ''
258 try:
259 key64 = RegOpenKeyEx(HKEY_LOCAL_MACHINE, key_name, 0, KEY_READ | KEY_WOW64_64KEY)
260 (value64, _) = RegQueryValueEx(key64, value_name)
261 RegCloseKey(key64)
262 except:
263 value64 = ''
264 else:
265 try:
266 key32 = RegOpenKeyEx(HKEY_LOCAL_MACHINE, key_name, 0, KEY_READ)
267 (value32, _) = RegQueryValueEx(key32, value_name)
268 RegCloseKey(key32)
269 except:
270 value32 = ''
271 value64 = ''
272 return (value32, value64)
274 if package == 'openjdk-6-jre':
275 (java32_home, java64_home) = _read_hklm_reg(r"SOFTWARE\JavaSoft\Java Runtime Environment\1.6", "JavaHome")
277 if os.path.isfile(java32_home + r"\bin\java.exe"):
278 impl = factory('package:windows:%s:%s:%s' % (package, '6', 'i486'))
279 impl.machine = 'i486'
280 impl.version = model.parse_version('6')
281 impl.upstream_stability = model.packaged
282 impl.main = java32_home + r"\bin\java.exe"
284 if os.path.isfile(java64_home + r"\bin\java.exe"):
285 impl = factory('package:windows:%s:%s:%s' % (package, '6', 'x86_64'))
286 impl.machine = 'x86_64'
287 impl.version = model.parse_version('6')
288 impl.upstream_stability = model.packaged
289 impl.main = java64_home + r"\bin\java.exe"
291 if package == 'openjdk-6-jdk':
292 (java32_home, java64_home) = _read_hklm_reg(r"SOFTWARE\JavaSoft\Java Development Kit\1.6", "JavaHome")
294 if os.path.isfile(java32_home + r"\bin\java.exe"):
295 impl = factory('package:windows:%s:%s:%s' % (package, '6', 'i486'))
296 impl.machine = 'i486'
297 impl.version = model.parse_version('6')
298 impl.upstream_stability = model.packaged
299 impl.main = java32_home + r"\bin\java.exe"
301 if os.path.isfile(java64_home + r"\bin\java.exe"):
302 impl = factory('package:windows:%s:%s:%s' % (package, '6', 'x86_64'))
303 impl.machine = 'x86_64'
304 impl.version = model.parse_version('6')
305 impl.upstream_stability = model.packaged
306 impl.main = java64_home + r"\bin\java.exe"
308 class CachedDistribution(Distribution):
309 """For distributions where querying the package database is slow (e.g. requires running
310 an external command), we cache the results.
311 @since: 0.39
312 @deprecated: use Cache instead
315 def __init__(self, db_status_file):
316 """@param db_status_file: update the cache when the timestamp of this file changes"""
317 self._status_details = os.stat(db_status_file)
319 self.versions = {}
320 self.cache_dir = basedir.save_cache_path(namespaces.config_site,
321 namespaces.config_prog)
323 try:
324 self._load_cache()
325 except Exception as ex:
326 info(_("Failed to load distribution database cache (%s). Regenerating..."), ex)
327 try:
328 self.generate_cache()
329 self._load_cache()
330 except Exception as ex:
331 warn(_("Failed to regenerate distribution database cache: %s"), ex)
333 def _load_cache(self):
334 """Load {cache_leaf} cache file into self.versions if it is available and up-to-date.
335 Throws an exception if the cache should be (re)created."""
336 with open(os.path.join(self.cache_dir, self.cache_leaf)) as stream:
337 cache_version = None
338 for line in stream:
339 if line == '\n':
340 break
341 name, value = line.split(': ')
342 if name == 'mtime' and int(value) != int(self._status_details.st_mtime):
343 raise Exception(_("Modification time of package database file has changed"))
344 if name == 'size' and int(value) != self._status_details.st_size:
345 raise Exception(_("Size of package database file has changed"))
346 if name == 'version':
347 cache_version = int(value)
348 else:
349 raise Exception(_('Invalid cache format (bad header)'))
351 if cache_version is None:
352 raise Exception(_('Old cache format'))
354 versions = self.versions
355 for line in stream:
356 package, version, zi_arch = line[:-1].split('\t')
357 versionarch = (version, intern(zi_arch))
358 if package not in versions:
359 versions[package] = [versionarch]
360 else:
361 versions[package].append(versionarch)
363 def _write_cache(self, cache):
364 #cache.sort() # Might be useful later; currently we don't care
365 import tempfile
366 fd, tmpname = tempfile.mkstemp(prefix = 'zeroinstall-cache-tmp',
367 dir = self.cache_dir)
368 try:
369 stream = os.fdopen(fd, 'wb')
370 stream.write('version: 2\n')
371 stream.write('mtime: %d\n' % int(self._status_details.st_mtime))
372 stream.write('size: %d\n' % self._status_details.st_size)
373 stream.write('\n')
374 for line in cache:
375 stream.write(line + '\n')
376 stream.close()
378 os.rename(tmpname,
379 os.path.join(self.cache_dir,
380 self.cache_leaf))
381 except:
382 os.unlink(tmpname)
383 raise
385 # Maps machine type names used in packages to their Zero Install versions
386 _canonical_machine = {
387 'all' : '*',
388 'any' : '*',
389 'noarch' : '*',
390 '(none)' : '*',
391 'x86_64': 'x86_64',
392 'amd64': 'x86_64',
393 'i386': 'i386',
394 'i486': 'i486',
395 'i586': 'i586',
396 'i686': 'i686',
397 'ppc64': 'ppc64',
398 'ppc': 'ppc',
401 host_machine = arch.canonicalize_machine(platform.uname()[4])
402 def canonical_machine(package_machine):
403 machine = _canonical_machine.get(package_machine, None)
404 if machine is None:
405 # Safe default if we can't understand the arch
406 return host_machine
407 return machine
409 class DebianDistribution(Distribution):
410 """A dpkg-based distribution."""
412 cache_leaf = 'dpkg-status.cache'
414 def __init__(self, dpkg_status):
415 self.dpkg_cache = Cache('dpkg-status.cache', dpkg_status, 2)
416 self.apt_cache = {}
418 def _query_installed_package(self, package):
419 null = os.open('/dev/null', os.O_WRONLY)
420 child = subprocess.Popen(["dpkg-query", "-W", "--showformat=${Version}\t${Architecture}\t${Status}\n", "--", package],
421 stdout = subprocess.PIPE, stderr = null)
422 os.close(null)
423 stdout, stderr = child.communicate()
424 child.wait()
425 for line in stdout.split('\n'):
426 if not line: continue
427 version, debarch, status = line.split('\t', 2)
428 if not status.endswith(' installed'): continue
429 clean_version = try_cleanup_distro_version(version)
430 if debarch.find("-") != -1:
431 debarch = debarch.split("-")[-1]
432 if clean_version:
433 return '%s\t%s' % (clean_version, canonical_machine(debarch.strip()))
434 else:
435 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': package})
437 return '-'
439 def get_package_info(self, package, factory):
440 # Add any already-installed package...
441 installed_cached_info = self._get_dpkg_info(package)
443 if installed_cached_info != '-':
444 installed_version, machine = installed_cached_info.split('\t')
445 impl = factory('package:deb:%s:%s:%s' % (package, installed_version, machine))
446 impl.version = model.parse_version(installed_version)
447 if machine != '*':
448 impl.machine = machine
449 else:
450 installed_version = None
452 # Add any uninstalled candidates (note: only one of these two methods will add anything)
454 # From PackageKit...
455 self.packagekit.get_candidates(package, factory, 'package:deb')
457 # From apt-cache...
458 cached = self.apt_cache.get(package, None)
459 if cached:
460 candidate_version = cached['version']
461 candidate_arch = cached['arch']
462 if candidate_version and candidate_version != installed_version:
463 impl = factory('package:deb:%s:%s:%s' % (package, candidate_version, candidate_arch), installed = False)
464 impl.version = model.parse_version(candidate_version)
465 if candidate_arch != '*':
466 impl.machine = candidate_arch
467 def install(handler):
468 raise model.SafeException(_("This program depends on '%s', which is a package that is available through your distribution. "
469 "Please install it manually using your distribution's tools and try again. Or, install 'packagekit' and I can "
470 "use that to install it.") % package)
471 impl.download_sources.append(model.DistributionSource(package, cached['size'], install, needs_confirmation = False))
473 def get_score(self, disto_name):
474 return int(disto_name == 'Debian')
476 def _get_dpkg_info(self, package):
477 installed_cached_info = self.dpkg_cache.get(package)
478 if installed_cached_info == None:
479 installed_cached_info = self._query_installed_package(package)
480 self.dpkg_cache.put(package, installed_cached_info)
482 return installed_cached_info
484 def fetch_candidates(self, master_feed):
485 package_names = [item.getAttribute("package") for item, item_attrs in master_feed.get_package_impls(self)]
487 if self.packagekit.available:
488 return self.packagekit.fetch_candidates(package_names)
490 # No PackageKit. Use apt-cache directly.
491 for package in package_names:
492 # Check to see whether we could get a newer version using apt-get
493 try:
494 null = os.open('/dev/null', os.O_WRONLY)
495 child = subprocess.Popen(['apt-cache', 'show', '--no-all-versions', '--', package], stdout = subprocess.PIPE, stderr = null)
496 os.close(null)
498 arch = version = size = None
499 for line in child.stdout:
500 line = line.strip()
501 if line.startswith('Version: '):
502 version = line[9:]
503 version = try_cleanup_distro_version(version)
504 elif line.startswith('Architecture: '):
505 arch = canonical_machine(line[14:].strip())
506 elif line.startswith('Size: '):
507 size = int(line[6:].strip())
508 if version and arch:
509 cached = {'version': version, 'arch': arch, 'size': size}
510 else:
511 cached = None
512 child.wait()
513 except Exception as ex:
514 warn("'apt-cache show %s' failed: %s", package, ex)
515 cached = None
516 # (multi-arch support? can there be multiple candidates?)
517 self.apt_cache[package] = cached
519 class RPMDistribution(CachedDistribution):
520 """An RPM-based distribution."""
522 cache_leaf = 'rpm-status.cache'
524 def generate_cache(self):
525 cache = []
527 for line in os.popen("rpm -qa --qf='%{NAME}\t%{VERSION}-%{RELEASE}\t%{ARCH}\n'"):
528 package, version, rpmarch = line.split('\t', 2)
529 if package == 'gpg-pubkey':
530 continue
531 zi_arch = canonical_machine(rpmarch.strip())
532 clean_version = try_cleanup_distro_version(version)
533 if clean_version:
534 cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
535 else:
536 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': package})
538 self._write_cache(cache)
540 def get_package_info(self, package, factory):
541 # Add installed versions...
542 versions = self.versions.get(package, [])
544 for version, machine in versions:
545 impl = factory('package:rpm:%s:%s:%s' % (package, version, machine))
546 impl.version = model.parse_version(version)
547 if machine != '*':
548 impl.machine = machine
550 # Add any uninstalled candidates found by PackageKit
551 self.packagekit.get_candidates(package, factory, 'package:rpm')
553 def get_score(self, disto_name):
554 return int(disto_name == 'RPM')
556 class SlackDistribution(Distribution):
557 """A Slack-based distribution."""
559 def __init__(self, packages_dir):
560 self._packages_dir = packages_dir
562 def get_package_info(self, package, factory):
563 # Add installed versions...
564 for entry in os.listdir(self._packages_dir):
565 name, version, arch, build = entry.rsplit('-', 3)
566 if name == package:
567 zi_arch = canonical_machine(arch)
568 clean_version = try_cleanup_distro_version("%s-%s" % (version, build))
569 if not clean_version:
570 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': name})
571 continue
573 impl = factory('package:slack:%s:%s:%s' % \
574 (package, clean_version, zi_arch))
575 impl.version = model.parse_version(clean_version)
576 if zi_arch != '*':
577 impl.machine = zi_arch
579 # Add any uninstalled candidates found by PackageKit
580 self.packagekit.get_candidates(package, factory, 'package:slack')
582 def get_score(self, disto_name):
583 return int(disto_name == 'Slack')
585 class ArchDistribution(Distribution):
586 """An Arch Linux distribution."""
588 def __init__(self, packages_dir):
589 self._packages_dir = os.path.join(packages_dir, "local")
591 def get_package_info(self, package, factory):
592 # Add installed versions...
593 for entry in os.listdir(self._packages_dir):
594 name, version, build = entry.rsplit('-', 2)
595 if name == package:
596 gotarch = False
597 for line in open(os.path.join(self._packages_dir, entry, "desc")):
598 if line == "%ARCH%\n":
599 gotarch = True
600 continue
601 if gotarch:
602 arch = line.strip()
603 break
604 zi_arch = canonical_machine(arch)
605 clean_version = try_cleanup_distro_version("%s-%s" % (version, build))
606 if not clean_version:
607 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': name})
608 continue
610 impl = factory('package:arch:%s:%s:%s' % \
611 (package, clean_version, zi_arch))
612 impl.version = model.parse_version(clean_version)
613 if zi_arch != '*':
614 impl.machine = zi_arch
616 # Add any uninstalled candidates found by PackageKit
617 self.packagekit.get_candidates(package, factory, 'package:arch')
619 def get_score(self, disto_name):
620 return int(disto_name == 'Arch')
622 class GentooDistribution(Distribution):
624 def __init__(self, pkgdir):
625 self._pkgdir = pkgdir
627 def get_package_info(self, package, factory):
628 # Add installed versions...
629 _version_start_reqexp = '-[0-9]'
631 if package.count('/') != 1: return
633 category, leafname = package.split('/')
634 category_dir = os.path.join(self._pkgdir, category)
635 match_prefix = leafname + '-'
637 if not os.path.isdir(category_dir): return
639 for filename in os.listdir(category_dir):
640 if filename.startswith(match_prefix) and filename[len(match_prefix)].isdigit():
641 name = open(os.path.join(category_dir, filename, 'PF')).readline().strip()
643 match = re.search(_version_start_reqexp, name)
644 if match is None:
645 warn(_('Cannot parse version from Gentoo package named "%(name)s"'), {'name': name})
646 continue
647 else:
648 version = try_cleanup_distro_version(name[match.start() + 1:])
650 if category == 'app-emulation' and name.startswith('emul-'):
651 __, __, machine, __ = name.split('-', 3)
652 else:
653 machine, __ = open(os.path.join(category_dir, filename, 'CHOST')).readline().split('-', 1)
654 machine = arch.canonicalize_machine(machine)
656 impl = factory('package:gentoo:%s:%s:%s' % \
657 (package, version, machine))
658 impl.version = model.parse_version(version)
659 impl.machine = machine
661 # Add any uninstalled candidates found by PackageKit
662 self.packagekit.get_candidates(package, factory, 'package:gentoo')
664 def get_score(self, disto_name):
665 return int(disto_name == 'Gentoo')
667 class PortsDistribution(Distribution):
669 def __init__(self, pkgdir):
670 self._pkgdir = pkgdir
672 def get_package_info(self, package, factory):
673 _name_version_regexp = '^(.+)-([^-]+)$'
675 nameversion = re.compile(_name_version_regexp)
676 for pkgname in os.listdir(self._pkgdir):
677 pkgdir = os.path.join(self._pkgdir, pkgname)
678 if not os.path.isdir(pkgdir): continue
680 #contents = open(os.path.join(pkgdir, '+CONTENTS')).readline().strip()
682 match = nameversion.search(pkgname)
683 if match is None:
684 warn(_('Cannot parse version from Ports package named "%(pkgname)s"'), {'pkgname': pkgname})
685 continue
686 else:
687 name = match.group(1)
688 if name != package:
689 continue
690 version = try_cleanup_distro_version(match.group(2))
692 machine = host_machine
694 impl = factory('package:ports:%s:%s:%s' % \
695 (package, version, machine))
696 impl.version = model.parse_version(version)
697 impl.machine = machine
699 def get_score(self, disto_name):
700 return int(disto_name == 'Ports')
702 class MacPortsDistribution(CachedDistribution):
704 cache_leaf = 'macports-status.cache'
706 def generate_cache(self):
707 cache = []
709 # for line in os.popen("port echo active"):
710 for line in os.popen("port -v installed"):
711 if not line.startswith(" "):
712 continue
713 if line.strip().count(" ") > 1:
714 package, version, extra = line.split(None, 2)
715 else:
716 package, version = line.split()
717 extra = ""
718 if not extra.startswith("(active)"):
719 continue
720 version = version.lstrip('@')
721 version = re.sub(r"\+.*", "", version) # strip variants
722 zi_arch = '*'
723 clean_version = try_cleanup_distro_version(version)
724 if clean_version:
725 match = re.match(r" platform='([^' ]*)( \d+)?' archs='([^']*)'", extra)
726 if match:
727 platform, major, archs = match.groups()
728 for arch in archs.split():
729 zi_arch = canonical_machine(arch)
730 cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
731 else:
732 cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
733 else:
734 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': package})
736 self._write_cache(cache)
738 def get_package_info(self, package, factory):
739 # Add installed versions...
740 versions = self.versions.get(package, [])
742 for version, machine in versions:
743 impl = factory('package:macports:%s:%s:%s' % (package, version, machine))
744 impl.version = model.parse_version(version)
745 if machine != '*':
746 impl.machine = machine
748 def get_score(self, disto_name):
749 return int(disto_name == 'MacPorts')
751 class CygwinDistribution(CachedDistribution):
752 """A Cygwin-based distribution."""
754 cache_leaf = 'cygcheck-status.cache'
756 def generate_cache(self):
757 cache = []
759 zi_arch = canonical_machine(arch)
760 for line in os.popen("cygcheck -c -d"):
761 if line == "Cygwin Package Information\r\n":
762 continue
763 if line == "\n":
764 continue
765 package, version = line.split()
766 if package == "Package" and version == "Version":
767 continue
768 clean_version = try_cleanup_distro_version(version)
769 if clean_version:
770 cache.append('%s\t%s\t%s' % (package, clean_version, zi_arch))
771 else:
772 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version, 'package': package})
774 self._write_cache(cache)
776 def get_package_info(self, package, factory):
777 # Add installed versions...
778 versions = self.versions.get(package, [])
780 for version, machine in versions:
781 impl = factory('package:cygwin:%s:%s:%s' % (package, version, machine))
782 impl.version = model.parse_version(version)
783 if machine != '*':
784 impl.machine = machine
786 def get_score(self, disto_name):
787 return int(disto_name == 'Cygwin')
790 _host_distribution = None
791 def get_host_distribution():
792 """Get a Distribution suitable for the host operating system.
793 Calling this twice will return the same object.
794 @rtype: L{Distribution}"""
795 global _host_distribution
796 if not _host_distribution:
797 dpkg_db_status = '/var/lib/dpkg/status'
798 rpm_db_packages = '/var/lib/rpm/Packages'
799 _slack_db = '/var/log/packages'
800 _arch_db = '/var/lib/pacman'
801 _pkg_db = '/var/db/pkg'
802 _macports_db = '/opt/local/var/macports/registry/registry.db'
803 _cygwin_log = '/var/log/setup.log'
805 if sys.prefix == "/sw":
806 dpkg_db_status = os.path.join(sys.prefix, dpkg_db_status)
807 rpm_db_packages = os.path.join(sys.prefix, rpm_db_packages)
809 if os.name == "nt":
810 _host_distribution = WindowsDistribution()
811 elif os.path.isdir(_pkg_db):
812 if sys.platform.startswith("linux"):
813 _host_distribution = GentooDistribution(_pkg_db)
814 elif sys.platform.startswith("freebsd"):
815 _host_distribution = PortsDistribution(_pkg_db)
816 elif os.path.isfile(_macports_db) \
817 and sys.prefix.startswith("/opt/local"):
818 _host_distribution = MacPortsDistribution(_macports_db)
819 elif os.path.isfile(_cygwin_log) and sys.platform == "cygwin":
820 _host_distribution = CygwinDistribution(_cygwin_log)
821 elif os.access(dpkg_db_status, os.R_OK) \
822 and os.path.getsize(dpkg_db_status) > 0:
823 _host_distribution = DebianDistribution(dpkg_db_status)
824 elif os.path.isfile(rpm_db_packages):
825 _host_distribution = RPMDistribution(rpm_db_packages)
826 elif os.path.isdir(_slack_db):
827 _host_distribution = SlackDistribution(_slack_db)
828 elif os.path.isdir(_arch_db):
829 _host_distribution = ArchDistribution(_arch_db)
830 else:
831 _host_distribution = Distribution()
833 return _host_distribution