2 Integration with native distribution package managers.
6 # Copyright (C) 2009, Thomas Leonard
7 # See the README file for details, or visit http://0install.net.
9 from zeroinstall
import _
10 import os
, platform
, re
, glob
, subprocess
, sys
11 from logging
import warn
, info
12 from zeroinstall
.injector
import namespaces
, model
, arch
13 from zeroinstall
.support
import basedir
, tasks
15 _dotted_ints
= '[0-9]+(?:\.[0-9]+)*'
17 # This matches a version number that would be a valid Zero Install version without modification
18 _zeroinstall_regexp
= '(?:%s)(?:-(?:pre|rc|post|)(?:%s))*' % (_dotted_ints
, _dotted_ints
)
20 # This matches the interesting bits of distribution version numbers
21 _version_regexp
= '(%s)(-r%s)?' % (_zeroinstall_regexp
, _dotted_ints
)
23 # We try to do updates atomically without locking, but we don't worry too much about
24 # duplicate entries or being a little out of sync with the on-disk copy.
26 def __init__(self
, cache_leaf
, source
, format
):
27 """Maintain a cache file (e.g. ~/.cache/0install.net/injector/$name).
28 If the size or mtime of $source has changed, or the cache
29 format version if different, reset the cache first."""
30 self
.cache_leaf
= cache_leaf
33 self
.cache_dir
= basedir
.save_cache_path(namespaces
.config_site
,
34 namespaces
.config_prog
)
35 self
.cached_for
= {} # Attributes of source when cache was created
39 info(_("Failed to load cache (%s). Flushing..."), ex
)
45 info
= os
.stat(self
.source
)
46 mtime
= int(info
.st_mtime
)
49 warn("Failed to stat %s: %s", self
.source
, ex
)
53 tmp
, tmp_name
= tempfile
.mkstemp(dir = self
.cache_dir
)
54 data
= "mtime=%d\nsize=%d\nformat=%d\n\n" % (mtime
, size
, self
.format
)
56 wrote
= os
.write(tmp
, data
)
58 os
.rename(tmp_name
, os
.path
.join(self
.cache_dir
, self
.cache_leaf
))
62 # Populate self.cache from our saved cache file.
63 # Throws an exception if the cache doesn't exist or has the wrong format.
64 def _load_cache(self
):
65 self
.cache
= cache
= {}
66 stream
= file(os
.path
.join(self
.cache_dir
, self
.cache_leaf
))
74 key
, value
= line
.split('=', 1)
75 if key
in ('mtime', 'size', 'format'):
76 self
.cached_for
[key
] = int(value
)
81 key
, value
= line
.split('=', 1)
82 cache
[key
] = value
[:-1]
86 # Check the source file hasn't changed since we created the cache
87 def _check_valid(self
):
88 info
= os
.stat(self
.source
)
89 if self
.cached_for
['mtime'] != int(info
.st_mtime
):
90 raise Exception("Modification time of %s has changed" % self
.source
)
91 if self
.cached_for
['size'] != info
.st_size
:
92 raise Exception("Size of %s has changed" % self
.source
)
93 if self
.cached_for
.get('format', None) != self
.format
:
94 raise Exception("Format of cache has changed")
100 info(_("Cache needs to be refreshed: %s"), ex
)
104 return self
.cache
.get(key
, None)
106 def put(self
, key
, value
):
107 cache_path
= os
.path
.join(self
.cache_dir
, self
.cache_leaf
)
108 self
.cache
[key
] = value
110 stream
= file(cache_path
, 'a')
112 stream
.write('%s=%s\n' % (key
, value
))
115 except Exception, ex
:
116 warn("Failed to write to cache %s: %s=%s: %s", cache_path
, key
, value
, ex
)
118 def try_cleanup_distro_version(version
):
119 """Try to turn a distribution version string into one readable by Zero Install.
120 We do this by stripping off anything we can't parse.
121 @return: the part we understood, or None if we couldn't parse anything
124 version
= version
.split(':')[1] # Skip 'epoch'
125 version
= version
.replace('_', '-')
126 match
= re
.match(_version_regexp
, version
)
128 version
, revision
= match
.groups()
132 return '%s-%s' % (version
, revision
[2:])
135 class Distribution(object):
136 """Represents a distribution with which we can integrate.
137 Sub-classes should specialise this to integrate with the package managers of
138 particular distributions. This base class ignores the native package manager.
143 def get_package_info(self
, package
, factory
):
144 """Get information about the given package.
145 Add zero or more implementations using the factory (typically at most two
146 will be added; the currently installed version and the latest available).
147 @param package: package name (e.g. "gimp")
149 @param factory: function for creating new DistributionImplementation objects from IDs
150 @type factory: str -> L{model.DistributionImplementation}
154 def get_score(self
, distribution
):
155 """Indicate how closely the host distribution matches this one.
156 The <package-implementation> with the highest score is passed
157 to L{Distribution.get_package_info}. If several elements get
158 the same score, get_package_info is called for all of them.
159 @param distribution: a distribution name
160 @type distribution: str
161 @return: an integer, or None if there is no match at all
166 def get_feed(self
, master_feed
):
167 """Generate a feed containing information about distribution packages.
168 This should immediately return a feed containing an implementation for the
169 package if it's already installed. Information about versions that could be
170 installed using the distribution's package manager can be added asynchronously
171 later (see L{fetch_candidates}).
172 @param master_feed: feed containing the <package-implementation> elements
173 @type master_feed: L{model.ZeroInstallFeed}
174 @rtype: L{model.ZeroInstallFeed}"""
176 feed
= model
.ZeroInstallFeed(None)
177 feed
.url
= 'distribution:' + master_feed
.url
179 for item
, item_attrs
in master_feed
.get_package_impls(self
):
180 package
= item_attrs
.get('package', None)
182 raise model
.InvalidInterface(_("Missing 'package' attribute on %s") % item
)
184 def factory(id, only_if_missing
= False, installed
= True):
185 assert id.startswith('package:')
186 if id in feed
.implementations
:
189 warn(_("Duplicate ID '%s' for DistributionImplementation"), id)
190 impl
= model
.DistributionImplementation(feed
, id, self
)
191 feed
.implementations
[id] = impl
193 impl
.installed
= installed
194 impl
.metadata
= item_attrs
196 item_main
= item_attrs
.get('main', None)
197 if item_main
and not item_main
.startswith('/'):
198 raise model
.InvalidInterface(_("'main' attribute must be absolute, but '%s' doesn't start with '/'!") %
200 impl
.main
= item_main
201 impl
.upstream_stability
= model
.packaged
205 self
.get_package_info(package
, factory
)
208 def fetch_candidates(self
, master_feed
):
209 """Collect information about versions we could install using
210 the distribution's package manager. On success, the distribution
211 feed in iface_cache is updated.
212 @return: a L{tasks.Blocker} if the task is in progress, or None if not"""
213 if self
.packagekit
.available
:
214 package_names
= [item
.getAttribute("package") for item
, item_attrs
in master_feed
.get_package_impls(self
)]
215 return self
.packagekit
.fetch_candidates(package_names
)
218 def packagekit(self
):
219 """For use by subclasses.
220 @rtype: L{packagekit.PackageKit}"""
221 if not self
._packagekit
:
222 from zeroinstall
.injector
import packagekit
223 self
._packagekit
= packagekit
.PackageKit()
224 return self
._packagekit
226 class WindowsDistribution(Distribution
):
227 def get_package_info(self
, package
, factory
):
229 #if package == 'openjdk-6-jre':
230 # impl = factory('package:windows:%s:%s' % (package, '6'))
231 # impl.version = model.parse_version('6')
232 # impl.main = os.environ["ProgramFiles"] + r"\Java\jre6\bin\java.exe"
234 class CachedDistribution(Distribution
):
235 """For distributions where querying the package database is slow (e.g. requires running
236 an external command), we cache the results.
238 @deprecated: use Cache instead
241 def __init__(self
, db_status_file
):
242 """@param db_status_file: update the cache when the timestamp of this file changes"""
243 self
._status
_details
= os
.stat(db_status_file
)
246 self
.cache_dir
= basedir
.save_cache_path(namespaces
.config_site
,
247 namespaces
.config_prog
)
251 except Exception, ex
:
252 info(_("Failed to load distribution database cache (%s). Regenerating..."), ex
)
254 self
.generate_cache()
256 except Exception, ex
:
257 warn(_("Failed to regenerate distribution database cache: %s"), ex
)
259 def _load_cache(self
):
260 """Load {cache_leaf} cache file into self.versions if it is available and up-to-date.
261 Throws an exception if the cache should be (re)created."""
262 stream
= file(os
.path
.join(self
.cache_dir
, self
.cache_leaf
))
268 name
, value
= line
.split(': ')
269 if name
== 'mtime' and int(value
) != int(self
._status
_details
.st_mtime
):
270 raise Exception(_("Modification time of package database file has changed"))
271 if name
== 'size' and int(value
) != self
._status
_details
.st_size
:
272 raise Exception(_("Size of package database file has changed"))
273 if name
== 'version':
274 cache_version
= int(value
)
276 raise Exception(_('Invalid cache format (bad header)'))
278 if cache_version
is None:
279 raise Exception(_('Old cache format'))
281 versions
= self
.versions
283 package
, version
, zi_arch
= line
[:-1].split('\t')
284 versionarch
= (version
, intern(zi_arch
))
285 if package
not in versions
:
286 versions
[package
] = [versionarch
]
288 versions
[package
].append(versionarch
)
290 def _write_cache(self
, cache
):
291 #cache.sort() # Might be useful later; currently we don't care
293 fd
, tmpname
= tempfile
.mkstemp(prefix
= 'zeroinstall-cache-tmp',
294 dir = self
.cache_dir
)
296 stream
= os
.fdopen(fd
, 'wb')
297 stream
.write('version: 2\n')
298 stream
.write('mtime: %d\n' % int(self
._status
_details
.st_mtime
))
299 stream
.write('size: %d\n' % self
._status
_details
.st_size
)
302 stream
.write(line
+ '\n')
306 os
.path
.join(self
.cache_dir
,
312 # Maps machine type names used in packages to their Zero Install versions
313 _canonical_machine
= {
327 host_machine
= arch
.canonicalize_machine(platform
.uname()[4])
328 def canonical_machine(package_machine
):
329 machine
= _canonical_machine
.get(package_machine
, None)
331 # Safe default if we can't understand the arch
335 class DebianDistribution(Distribution
):
336 """A dpkg-based distribution."""
338 cache_leaf
= 'dpkg-status.cache'
340 def __init__(self
, dpkg_status
, pkgcache
):
341 self
.dpkg_cache
= Cache('dpkg-status.cache', dpkg_status
, 2)
344 def _query_installed_package(self
, package
):
345 null
= os
.open('/dev/null', os
.O_WRONLY
)
346 child
= subprocess
.Popen(["dpkg-query", "-W", "--showformat=${Version}\t${Architecture}\t${Status}\n", "--", package
],
347 stdout
= subprocess
.PIPE
, stderr
= null
)
349 stdout
, stderr
= child
.communicate()
351 for line
in stdout
.split('\n'):
352 if not line
: continue
353 version
, debarch
, status
= line
.split('\t', 2)
354 if not status
.endswith(' installed'): continue
355 clean_version
= try_cleanup_distro_version(version
)
357 return '%s\t%s' % (clean_version
, canonical_machine(debarch
.strip()))
359 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version
, 'package': package
})
363 def get_package_info(self
, package
, factory
):
364 # Add any already-installed package...
365 installed_cached_info
= self
._get
_dpkg
_info
(package
)
367 if installed_cached_info
!= '-':
368 installed_version
, machine
= installed_cached_info
.split('\t')
369 impl
= factory('package:deb:%s:%s:%s' % (package
, installed_version
, machine
))
370 impl
.version
= model
.parse_version(installed_version
)
372 impl
.machine
= machine
374 installed_version
= None
376 # Add any uninstalled candidates (note: only one of these two methods will add anything)
379 self
.packagekit
.get_candidates(package
, factory
, 'package:deb')
382 cached
= self
.apt_cache
.get(package
, None)
384 candidate_version
= cached
['version']
385 candidate_arch
= cached
['arch']
386 if candidate_version
and candidate_version
!= installed_version
:
387 impl
= factory('package:deb:%s:%s:%s' % (package
, candidate_version
, candidate_arch
), installed
= False)
388 impl
.version
= model
.parse_version(candidate_version
)
389 if candidate_arch
!= '*':
390 impl
.machine
= candidate_arch
391 def install(handler
):
392 raise model
.SafeException(_("This program depends on '%s', which is a package that is available through your distribution. "
393 "Please install it manually using your distribution's tools and try again.") % package
)
394 impl
.download_sources
.append(model
.DistributionSource(package
, cached
['size'], install
, needs_confirmation
= False))
396 def get_score(self
, disto_name
):
397 return int(disto_name
== 'Debian')
399 def _get_dpkg_info(self
, package
):
400 installed_cached_info
= self
.dpkg_cache
.get(package
)
401 if installed_cached_info
== None:
402 installed_cached_info
= self
._query
_installed
_package
(package
)
403 self
.dpkg_cache
.put(package
, installed_cached_info
)
405 return installed_cached_info
407 def fetch_candidates(self
, master_feed
):
408 package_names
= [item
.getAttribute("package") for item
, item_attrs
in master_feed
.get_package_impls(self
)]
410 if self
.packagekit
.available
:
411 return self
.packagekit
.fetch_candidates(package_names
)
413 # No PackageKit. Use apt-cache directly.
414 for package
in package_names
:
415 # Check to see whether we could get a newer version using apt-get
417 null
= os
.open('/dev/null', os
.O_WRONLY
)
418 child
= subprocess
.Popen(['apt-cache', 'show', '--no-all-versions', '--', package
], stdout
= subprocess
.PIPE
, stderr
= null
)
421 arch
= version
= size
= None
422 for line
in child
.stdout
:
424 if line
.startswith('Version: '):
426 version
= try_cleanup_distro_version(version
)
427 elif line
.startswith('Architecture: '):
428 arch
= canonical_machine(line
[14:].strip())
429 elif line
.startswith('Size: '):
430 size
= int(line
[6:].strip())
432 cached
= {'version': version
, 'arch': arch
, 'size': size
}
436 except Exception, ex
:
437 warn("'apt-cache show %s' failed: %s", package
, ex
)
439 # (multi-arch support? can there be multiple candidates?)
440 self
.apt_cache
[package
] = cached
442 class RPMDistribution(CachedDistribution
):
443 """An RPM-based distribution."""
445 cache_leaf
= 'rpm-status.cache'
447 def generate_cache(self
):
450 for line
in os
.popen("rpm -qa --qf='%{NAME}\t%{VERSION}-%{RELEASE}\t%{ARCH}\n'"):
451 package
, version
, rpmarch
= line
.split('\t', 2)
452 if package
== 'gpg-pubkey':
454 zi_arch
= canonical_machine(rpmarch
.strip())
455 clean_version
= try_cleanup_distro_version(version
)
457 cache
.append('%s\t%s\t%s' % (package
, clean_version
, zi_arch
))
459 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version
, 'package': package
})
461 self
._write
_cache
(cache
)
463 def get_package_info(self
, package
, factory
):
464 # Add installed versions...
465 versions
= self
.versions
.get(package
, [])
467 for version
, machine
in versions
:
468 impl
= factory('package:rpm:%s:%s:%s' % (package
, version
, machine
))
469 impl
.version
= model
.parse_version(version
)
471 impl
.machine
= machine
473 # Add any uninstalled candidates found by PackageKit
474 self
.packagekit
.get_candidates(package
, factory
, 'package:rpm')
476 def get_score(self
, disto_name
):
477 return int(disto_name
== 'RPM')
479 class SlackDistribution(Distribution
):
480 """A Slack-based distribution."""
482 def __init__(self
, packages_dir
):
483 self
._packages
_dir
= packages_dir
485 def get_package_info(self
, package
, factory
):
486 # Add installed versions...
487 for entry
in os
.listdir(self
._packages
_dir
):
488 name
, version
, arch
, build
= entry
.rsplit('-', 3)
490 zi_arch
= canonical_machine(arch
)
491 clean_version
= try_cleanup_distro_version("%s-%s" % (version
, build
))
492 if not clean_version
:
493 warn(_("Can't parse distribution version '%(version)s' for package '%(package)s'"), {'version': version
, 'package': name
})
496 impl
= factory('package:slack:%s:%s:%s' % \
497 (package
, clean_version
, zi_arch
))
498 impl
.version
= model
.parse_version(clean_version
)
500 impl
.machine
= zi_arch
502 # Add any uninstalled candidates found by PackageKit
503 self
.packagekit
.get_candidates(package
, factory
, 'package:slack')
505 def get_score(self
, disto_name
):
506 return int(disto_name
== 'Slack')
508 class GentooDistribution(Distribution
):
510 def __init__(self
, pkgdir
):
511 self
._pkgdir
= pkgdir
513 def get_package_info(self
, package
, factory
):
514 # Add installed versions...
515 _version_start_reqexp
= '-[0-9]'
517 if package
.count('/') != 1: return
519 category
, leafname
= package
.split('/')
520 category_dir
= os
.path
.join(self
._pkgdir
, category
)
521 match_prefix
= leafname
+ '-'
523 if not os
.path
.isdir(category_dir
): return
525 for filename
in os
.listdir(category_dir
):
526 if filename
.startswith(match_prefix
) and filename
[len(match_prefix
)].isdigit():
527 name
= file(os
.path
.join(category_dir
, filename
, 'PF')).readline().strip()
529 match
= re
.search(_version_start_reqexp
, name
)
531 warn(_('Cannot parse version from Gentoo package named "%(name)s"'), {'name': name
})
534 version
= try_cleanup_distro_version(name
[match
.start() + 1:])
536 if category
== 'app-emulation' and name
.startswith('emul-'):
537 __
, __
, machine
, __
= name
.split('-', 3)
539 machine
, __
= file(os
.path
.join(category_dir
, filename
, 'CHOST')).readline().split('-', 1)
540 machine
= arch
.canonicalize_machine(machine
)
542 impl
= factory('package:gentoo:%s:%s:%s' % \
543 (package
, version
, machine
))
544 impl
.version
= model
.parse_version(version
)
545 impl
.machine
= machine
547 # Add any uninstalled candidates found by PackageKit
548 self
.packagekit
.get_candidates(package
, factory
, 'package:gentoo')
550 def get_score(self
, disto_name
):
551 return int(disto_name
== 'Gentoo')
553 class PortsDistribution(Distribution
):
555 def __init__(self
, pkgdir
):
556 self
._pkgdir
= pkgdir
558 def get_package_info(self
, package
, factory
):
559 _name_version_regexp
= '^(.+)-([^-]+)$'
561 nameversion
= re
.compile(_name_version_regexp
)
562 for pkgname
in os
.listdir(self
._pkgdir
):
563 pkgdir
= os
.path
.join(self
._pkgdir
, pkgname
)
564 if not os
.path
.isdir(pkgdir
): continue
566 #contents = file(os.path.join(pkgdir, '+CONTENTS')).readline().strip()
568 match
= nameversion
.search(pkgname
)
570 warn(_('Cannot parse version from Ports package named "%(pkgname)s"'), {'pkgname': pkgname
})
573 name
= match
.group(1)
576 version
= try_cleanup_distro_version(match
.group(2))
578 machine
= host_machine
580 impl
= factory('package:ports:%s:%s:%s' % \
581 (package
, version
, machine
))
582 impl
.version
= model
.parse_version(version
)
583 impl
.machine
= machine
585 def get_score(self
, disto_name
):
586 return int(disto_name
== 'Ports')
588 _host_distribution
= None
589 def get_host_distribution():
590 """Get a Distribution suitable for the host operating system.
591 Calling this twice will return the same object.
592 @rtype: L{Distribution}"""
593 global _host_distribution
594 if not _host_distribution
:
595 dpkg_db_status
= '/var/lib/dpkg/status'
596 pkgcache
= '/var/cache/apt/pkgcache.bin'
597 _rpm_db
= '/var/lib/rpm/Packages'
598 _slack_db
= '/var/log/packages'
599 _pkg_db
= '/var/db/pkg'
602 _host_distribution
= WindowsDistribution()
603 elif os
.path
.isdir(_pkg_db
):
604 if sys
.platform
.startswith("linux"):
605 _host_distribution
= GentooDistribution(_pkg_db
)
606 elif sys
.platform
.startswith("freebsd"):
607 _host_distribution
= PortsDistribution(_pkg_db
)
608 elif os
.access(dpkg_db_status
, os
.R_OK
):
609 _host_distribution
= DebianDistribution(dpkg_db_status
, pkgcache
)
610 elif os
.path
.isfile(_rpm_db
):
611 _host_distribution
= RPMDistribution(_rpm_db
)
612 elif os
.path
.isdir(_slack_db
):
613 _host_distribution
= SlackDistribution(_slack_db
)
615 _host_distribution
= Distribution()
617 return _host_distribution