remove sha512_t
[unleashed-pkg5.git] / src / setup.py
blob851906773bd931c48edf704cdfa5b33e9bc14b7c
1 #!/usr/bin/python2.7
3 # CDDL HEADER START
5 # The contents of this file are subject to the terms of the
6 # Common Development and Distribution License (the "License").
7 # You may not use this file except in compliance with the License.
9 # You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
10 # or http://www.opensolaris.org/os/licensing.
11 # See the License for the specific language governing permissions
12 # and limitations under the License.
14 # When distributing Covered Code, include this CDDL HEADER in each
15 # file and include the License file at usr/src/OPENSOLARIS.LICENSE.
16 # If applicable, add the following below this CDDL HEADER, with the
17 # fields enclosed by brackets "[]" replaced with your own identifying
18 # information: Portions Copyright [yyyy] [name of copyright owner]
20 # CDDL HEADER END
22 # Copyright (c) 2008, 2015, Oracle and/or its affiliates. All rights reserved.
25 from __future__ import print_function
26 import errno
27 import fnmatch
28 import os
29 import platform
30 import stat
31 import sys
32 import shutil
33 import re
34 import subprocess
35 import tarfile
36 import tempfile
37 import urllib
38 import py_compile
39 import hashlib
40 import time
41 import StringIO
43 from distutils.errors import DistutilsError, DistutilsFileError
44 from distutils.core import setup
45 from distutils.cmd import Command
46 from distutils.command.install import install as _install
47 from distutils.command.install_data import install_data as _install_data
48 from distutils.command.install_lib import install_lib as _install_lib
49 from distutils.command.build import build as _build
50 from distutils.command.build_ext import build_ext as _build_ext
51 from distutils.command.build_py import build_py as _build_py
52 from distutils.command.bdist import bdist as _bdist
53 from distutils.command.clean import clean as _clean
54 from distutils.dist import Distribution
55 from distutils import log
57 from distutils.sysconfig import get_python_inc
58 import distutils.dep_util as dep_util
59 import distutils.dir_util as dir_util
60 import distutils.file_util as file_util
61 import distutils.util as util
62 import distutils.ccompiler
63 from distutils.unixccompiler import UnixCCompiler
65 osname = platform.uname()[0].lower()
66 ostype = arch = 'unknown'
67 if osname == 'sunos' or osname == 'unleashed':
68 arch = platform.processor()
69 ostype = "posix"
70 elif osname == 'linux':
71 arch = "linux_" + platform.machine()
72 ostype = "posix"
73 elif osname == 'windows':
74 arch = osname
75 ostype = "windows"
76 elif osname == 'darwin':
77 arch = osname
78 ostype = "posix"
79 elif osname == 'aix':
80 arch = "aix"
81 ostype = "posix"
83 pwd = os.path.normpath(sys.path[0])
85 # the version of pylint that we must have in order to run the pylint checks.
86 req_pylint_version = "0.25.2"
89 # Unbuffer stdout and stderr. This helps to ensure that subprocess output
90 # is properly interleaved with output from this program.
92 sys.stdout = os.fdopen(sys.stdout.fileno(), "w", 0)
93 sys.stderr = os.fdopen(sys.stderr.fileno(), "w", 0)
95 dist_dir = os.path.normpath(os.path.join(pwd, os.pardir, "proto", "dist_" + arch))
96 build_dir = os.path.normpath(os.path.join(pwd, os.pardir, "proto", "build_" + arch))
97 if "ROOT" in os.environ and os.environ["ROOT"] != "":
98 root_dir = os.environ["ROOT"]
99 else:
100 root_dir = os.path.normpath(os.path.join(pwd, os.pardir, "proto", "root_" + arch))
101 pkgs_dir = os.path.normpath(os.path.join(pwd, os.pardir, "packages", arch))
102 extern_dir = os.path.normpath(os.path.join(pwd, "extern"))
104 # Extract Python minor version.
105 py_version = '.'.join(platform.python_version_tuple()[:2])
106 assert py_version in ('2.6', '2.7')
107 py_install_dir = 'usr/lib/python' + py_version + '/vendor-packages'
109 scripts_dir = 'usr/bin'
110 lib_dir = 'usr/lib'
111 svc_method_dir = 'lib/svc/method'
112 svc_share_dir = 'lib/svc/share'
114 man1_dir = 'usr/share/man/man1'
115 man1m_dir = 'usr/share/man/man1m'
116 man5_dir = 'usr/share/man/man5'
117 man1_ja_JP_dir = 'usr/share/man/ja_JP.UTF-8/man1'
118 man1m_ja_JP_dir = 'usr/share/man/ja_JP.UTF-8/man1m'
119 man5_ja_JP_dir = 'usr/share/man/ja_JP.UTF-8/man5'
120 man1_zh_CN_dir = 'usr/share/man/zh_CN.UTF-8/man1'
121 man1m_zh_CN_dir = 'usr/share/man/zh_CN.UTF-8/man1m'
122 man5_zh_CN_dir = 'usr/share/man/zh_CN.UTF-8/man5'
124 ignored_deps_dir = 'usr/share/pkg/ignored_deps'
125 rad_dir = 'usr/share/lib/pkg'
126 resource_dir = 'usr/share/lib/pkg'
127 transform_dir = 'usr/share/pkg/transforms'
128 smf_app_dir = 'lib/svc/manifest/application/pkg'
129 execattrd_dir = 'etc/security/exec_attr.d'
130 authattrd_dir = 'etc/security/auth_attr.d'
131 userattrd_dir = 'etc/user_attr.d'
132 sysrepo_dir = 'etc/pkg/sysrepo'
133 sysrepo_logs_dir = 'var/log/pkg/sysrepo'
134 sysrepo_cache_dir = 'var/cache/pkg/sysrepo'
135 depot_dir = 'etc/pkg/depot'
136 depot_conf_dir = 'etc/pkg/depot/conf.d'
137 depot_logs_dir = 'var/log/pkg/depot'
138 depot_cache_dir = 'var/cache/pkg/depot'
139 locale_dir = 'usr/share/locale'
140 mirror_logs_dir = 'var/log/pkg/mirror'
141 mirror_cache_dir = 'var/cache/pkg/mirror'
144 # A list of source, destination tuples of modules which should be hardlinked
145 # together if the os supports it and otherwise copied.
146 hardlink_modules = []
148 scripts_sunos = {
149 scripts_dir: [
150 ['client.py', 'pkg'],
151 ['pkgdep.py', 'pkgdepend'],
152 ['pkgrepo.py', 'pkgrepo'],
153 ['util/publish/pkgdiff.py', 'pkgdiff'],
154 ['util/publish/pkgfmt.py', 'pkgfmt'],
155 ['util/publish/pkglint.py', 'pkglint'],
156 ['util/publish/pkgmerge.py', 'pkgmerge'],
157 ['util/publish/pkgmogrify.py', 'pkgmogrify'],
158 ['util/publish/pkgsurf.py', 'pkgsurf'],
159 ['publish.py', 'pkgsend'],
160 ['pull.py', 'pkgrecv'],
161 ['sign.py', 'pkgsign'],
163 lib_dir: [
164 ['depot.py', 'pkg.depotd'],
165 ['sysrepo.py', 'pkg.sysrepo'],
166 ['depot-config.py', "pkg.depot-config"]
168 svc_method_dir: [
169 ['svc/svc-pkg-depot', 'svc-pkg-depot'],
170 ['svc/svc-pkg-mirror', 'svc-pkg-mirror'],
171 ['svc/svc-pkg-repositories-setup',
172 'svc-pkg-repositories-setup'],
173 ['svc/svc-pkg-server', 'svc-pkg-server'],
174 ['svc/svc-pkg-sysrepo', 'svc-pkg-sysrepo'],
176 svc_share_dir: [
177 ['svc/pkg5_include.sh', 'pkg5_include.sh'],
179 rad_dir: [
180 ["rad-invoke.py", "rad-invoke"],
184 scripts_windows = {
185 scripts_dir: [
186 ['client.py', 'client.py'],
187 ['pkgrepo.py', 'pkgrepo.py'],
188 ['publish.py', 'publish.py'],
189 ['pull.py', 'pull.py'],
190 ['scripts/pkg.bat', 'pkg.bat'],
191 ['scripts/pkgsend.bat', 'pkgsend.bat'],
192 ['scripts/pkgrecv.bat', 'pkgrecv.bat'],
194 lib_dir: [
195 ['depot.py', 'depot.py'],
196 ['scripts/pkg.depotd.bat', 'pkg.depotd.bat'],
200 scripts_other_unix = {
201 scripts_dir: [
202 ['client.py', 'client.py'],
203 ['pkgdep.py', 'pkgdep'],
204 ['util/publish/pkgdiff.py', 'pkgdiff'],
205 ['util/publish/pkgfmt.py', 'pkgfmt'],
206 ['util/publish/pkgmogrify.py', 'pkgmogrify'],
207 ['pull.py', 'pull.py'],
208 ['publish.py', 'publish.py'],
209 ['scripts/pkg.sh', 'pkg'],
210 ['scripts/pkgsend.sh', 'pkgsend'],
211 ['scripts/pkgrecv.sh', 'pkgrecv'],
213 lib_dir: [
214 ['depot.py', 'depot.py'],
215 ['scripts/pkg.depotd.sh', 'pkg.depotd'],
217 rad_dir: [
218 ["rad-invoke.py", "rad-invoke"],
222 # indexed by 'osname'
223 scripts = {
224 "sunos": scripts_sunos,
225 "unleashed": scripts_sunos,
226 "linux": scripts_other_unix,
227 "windows": scripts_windows,
228 "darwin": scripts_other_unix,
229 "aix" : scripts_other_unix,
230 "unknown": scripts_sunos,
233 man1_files = [
234 'man/pkg.1',
235 'man/pkgdepend.1',
236 'man/pkgdiff.1',
237 'man/pkgfmt.1',
238 'man/pkglint.1',
239 'man/pkgmerge.1',
240 'man/pkgmogrify.1',
241 'man/pkgsend.1',
242 'man/pkgsign.1',
243 'man/pkgsurf.1',
244 'man/pkgrecv.1',
245 'man/pkgrepo.1',
247 man1m_files = [
248 'man/pkg.depotd.1m',
249 'man/pkg.depot-config.1m',
250 'man/pkg.sysrepo.1m'
252 man5_files = [
253 'man/pkg.5'
256 man1_ja_files = [
257 'man/ja_JP/pkg.1',
258 'man/ja_JP/pkgdepend.1',
259 'man/ja_JP/pkgdiff.1',
260 'man/ja_JP/pkgfmt.1',
261 'man/ja_JP/pkglint.1',
262 'man/ja_JP/pkgmerge.1',
263 'man/ja_JP/pkgmogrify.1',
264 'man/ja_JP/pkgsend.1',
265 'man/ja_JP/pkgsign.1',
266 'man/ja_JP/pkgrecv.1',
267 'man/ja_JP/pkgrepo.1',
269 man1m_ja_files = [
270 'man/ja_JP/pkg.depotd.1m',
271 'man/ja_JP/pkg.sysrepo.1m'
273 man5_ja_files = [
274 'man/ja_JP/pkg.5'
277 man1_zh_CN_files = [
278 'man/zh_CN/pkg.1',
279 'man/zh_CN/pkgdepend.1',
280 'man/zh_CN/pkgdiff.1',
281 'man/zh_CN/pkgfmt.1',
282 'man/zh_CN/pkglint.1',
283 'man/zh_CN/pkgmerge.1',
284 'man/zh_CN/pkgmogrify.1',
285 'man/zh_CN/pkgsend.1',
286 'man/zh_CN/pkgsign.1',
287 'man/zh_CN/pkgrecv.1',
288 'man/zh_CN/pkgrepo.1',
290 man1m_zh_CN_files = [
291 'man/zh_CN/pkg.depotd.1m',
292 'man/zh_CN/pkg.sysrepo.1m'
294 man5_zh_CN_files = [
295 'man/zh_CN/pkg.5'
298 packages = [
299 'pkg',
300 'pkg.actions',
301 'pkg.bundle',
302 'pkg.client',
303 'pkg.client.linkedimage',
304 'pkg.client.transport',
305 'pkg.file_layout',
306 'pkg.flavor',
307 'pkg.lint',
308 'pkg.portable',
309 'pkg.publish',
310 'pkg.server'
313 pylint_targets = [
314 'pkg.altroot',
315 'pkg.client.__init__',
316 'pkg.client.api',
317 'pkg.client.linkedimage',
318 'pkg.client.pkg_solver',
319 'pkg.client.pkgdefs',
320 'pkg.client.pkgremote',
321 'pkg.client.plandesc',
322 'pkg.client.printengine',
323 'pkg.client.progress',
324 'pkg.misc',
325 'pkg.pipeutils',
328 web_files = []
329 for entry in os.walk("web"):
330 web_dir, dirs, files = entry
331 if not files:
332 continue
333 web_files.append((os.path.join(resource_dir, web_dir), [
334 os.path.join(web_dir, f) for f in files
335 if f != "Makefile"
337 # install same set of files in "en/" in "__LOCALE__/ as well"
338 # for localizable file package (regarding themes, install
339 # theme "oracle.com" only)
340 if os.path.basename(web_dir) == "en" and \
341 os.path.dirname(web_dir) in ("web", "web/_themes/oracle.com"):
342 web_files.append((os.path.join(resource_dir,
343 os.path.dirname(web_dir), "__LOCALE__"), [
344 os.path.join(web_dir, f) for f in files
345 if f != "Makefile"
348 smf_app_files = [
349 'svc/pkg-depot.xml',
350 'svc/pkg-mirror.xml',
351 'svc/pkg-repositories-setup.xml',
352 'svc/pkg-server.xml',
353 'svc/pkg-system-repository.xml',
354 'svc/zoneproxy-client.xml',
355 'svc/zoneproxyd.xml'
357 resource_files = [
358 'util/opensolaris.org.sections',
359 'util/pkglintrc',
361 transform_files = [
362 'util/publish/transforms/developer',
363 'util/publish/transforms/documentation',
364 'util/publish/transforms/locale',
365 'util/publish/transforms/smf-manifests'
367 sysrepo_files = [
368 'util/apache2/sysrepo/sysrepo_p5p.py',
369 'util/apache2/sysrepo/sysrepo_httpd.conf.mako',
370 'util/apache2/sysrepo/sysrepo_publisher_response.mako',
372 sysrepo_log_stubs = [
373 'util/apache2/sysrepo/logs/access_log',
374 'util/apache2/sysrepo/logs/error_log',
375 'util/apache2/sysrepo/logs/rewrite.log',
377 depot_files = [
378 'util/apache2/depot/depot.conf.mako',
379 'util/apache2/depot/depot_httpd.conf.mako',
380 'util/apache2/depot/depot_index.py',
381 'util/apache2/depot/depot_httpd_ssl_protocol.conf',
383 depot_log_stubs = [
384 'util/apache2/depot/logs/access_log',
385 'util/apache2/depot/logs/error_log',
386 'util/apache2/depot/logs/rewrite.log',
388 ignored_deps_files = []
390 # The apache-based depot includes an shtml file we add to the resource dir
391 web_files.append((os.path.join(resource_dir, "web"),
392 ["util/apache2/depot/repos.shtml"]))
393 execattrd_files = [
394 'util/misc/exec_attr.d/package:pkg',
396 authattrd_files = ['util/misc/auth_attr.d/package:pkg']
397 userattrd_files = ['util/misc/user_attr.d/package:pkg']
398 pkg_locales = \
399 'ar ca cs de es fr he hu id it ja ko nl pl pt_BR ru sk sv zh_CN zh_HK zh_TW'.split()
401 sysattr_srcs = [
402 'modules/sysattr.c'
404 syscallat_srcs = [
405 'modules/syscallat.c'
407 pspawn_srcs = [
408 'modules/pspawn.c'
410 elf_srcs = [
411 'modules/elf.c',
412 'modules/elfextract.c',
413 'modules/liblist.c',
415 arch_srcs = [
416 'modules/arch.c'
418 _actions_srcs = [
419 'modules/actions/_actions.c'
421 _actcomm_srcs = [
422 'modules/actions/_common.c'
424 _varcet_srcs = [
425 'modules/_varcet.c'
427 solver_srcs = [
428 'modules/solver/solver.c',
429 'modules/solver/py_solver.c'
431 solver_link_args = ["-lm", "-lc"]
432 if osname == 'sunos' or osname == 'unleashed':
433 solver_link_args = ["-ztext"] + solver_link_args
435 # Runs lint on the extension module source code
436 class pylint_func(Command):
437 description = "Runs pylint tools over IPS python source code"
438 user_options = []
440 def initialize_options(self):
441 pass
443 def finalize_options(self):
444 pass
446 # Make string shell-friendly
447 @staticmethod
448 def escape(astring):
449 return astring.replace(' ', '\\ ')
451 def run(self, quiet=False):
453 def supported_pylint_ver(version):
454 """Compare the installed version against the version
455 we require to build with, returning False if the version
456 is too old. It's tempting to use pkg.version.Version
457 here, but since that's a build artifact, we'll do it
458 the long way."""
459 inst_pylint_ver = version.split(".")
460 req_pylint_ver = req_pylint_version.split(".")
462 # if the lists are of different lengths, we just
463 # compare with the precision we have.
464 vers_comp = zip(inst_pylint_ver, req_pylint_ver)
465 for inst, req in vers_comp:
466 try:
467 if int(inst) < int(req):
468 return False
469 except ValueError:
470 # if we somehow get non-numeric version
471 # components, we ignore them.
472 continue
473 return True
475 # it's fine to default to the required version - the build will
476 # break if the installed version is incompatible and $PYLINT_VER
477 # didn't get set, somehow.
478 pylint_ver_str = os.environ.get("PYLINT_VER",
479 req_pylint_version)
480 if pylint_ver_str == "":
481 pylint_ver_str = req_pylint_version
483 if os.environ.get("PKG_SKIP_PYLINT"):
484 log.warn("WARNING: skipping pylint checks: "
485 "$PKG_SKIP_PYLINT was set")
486 return
487 elif not pylint_ver_str or \
488 not supported_pylint_ver(pylint_ver_str):
489 log.warn("WARNING: skipping pylint checks: the "
490 "installed version {0} is older than version {1}".format(
491 pylint_ver_str, req_pylint_version))
492 return
494 proto = os.path.join(root_dir, py_install_dir)
495 sys.path.insert(0, proto)
497 # Insert tests directory onto sys.path so any custom checkers
498 # can be found.
499 sys.path.insert(0, os.path.join(pwd, 'tests'))
500 # assumes pylint is accessible on the sys.path
501 from pylint import lint
504 # For some reason, the load-plugins option, when used in the
505 # rcfile, does not work, so we put it here instead, to load
506 # our custom checkers.
508 # Unfortunately, pylint seems pretty fragile and will crash if
509 # we try to run it over all the current pkg source. Hence for
510 # now we only run it over a subset of the source. As source
511 # files are made pylint clean they should be added to the
512 # pylint_targets list.
514 args = ['--load-plugins=multiplatform']
515 if quiet:
516 args += ['--reports=no']
517 args += ['--rcfile', os.path.join(pwd, 'tests', 'pylintrc')]
518 args += pylint_targets
519 lint.Run(args)
522 class pylint_func_quiet(pylint_func):
524 def run(self, quiet=False):
525 pylint_func.run(self, quiet=True)
528 include_dirs = [ 'modules' ]
529 lint_flags = [ '-u', '-axms', '-erroff=E_NAME_DEF_NOT_USED2' ]
531 # Runs lint on the extension module source code
532 class clint_func(Command):
533 description = "Runs lint tools over IPS C extension source code"
534 user_options = []
536 def initialize_options(self):
537 pass
539 def finalize_options(self):
540 pass
542 # Make string shell-friendly
543 @staticmethod
544 def escape(astring):
545 return astring.replace(' ', '\\ ')
547 def run(self):
548 if "LINT" in os.environ and os.environ["LINT"] != "":
549 lint = [os.environ["LINT"]]
550 else:
551 lint = ['lint']
552 if osname in ['sunos', 'unleashed', 'linux']:
553 archcmd = lint + lint_flags + \
554 ['-D_FILE_OFFSET_BITS=64'] + \
555 ["{0}{1}".format("-I", k) for k in include_dirs] + \
556 ['-I' + self.escape(get_python_inc())] + \
557 arch_srcs
558 elfcmd = lint + lint_flags + \
559 ["{0}{1}".format("-I", k) for k in include_dirs] + \
560 ['-I' + self.escape(get_python_inc())] + \
561 ["{0}{1}".format("-l", k) for k in elf_libraries] + \
562 elf_srcs
563 _actionscmd = lint + lint_flags + \
564 ["{0}{1}".format("-I", k) for k in include_dirs] + \
565 ['-I' + self.escape(get_python_inc())] + \
566 _actions_srcs
567 _actcommcmd = lint + lint_flags + \
568 ["{0}{1}".format("-I", k) for k in include_dirs] + \
569 ['-I' + self.escape(get_python_inc())] + \
570 _actcomm_srcs
571 _varcetcmd = lint + lint_flags + \
572 ["{0}{1}".format("-I", k) for k in include_dirs] + \
573 ['-I' + self.escape(get_python_inc())] + \
574 _varcet_srcs
575 pspawncmd = lint + lint_flags + \
576 ['-D_FILE_OFFSET_BITS=64'] + \
577 ["{0}{1}".format("-I", k) for k in include_dirs] + \
578 ['-I' + self.escape(get_python_inc())] + \
579 pspawn_srcs
580 syscallatcmd = lint + lint_flags + \
581 ['-D_FILE_OFFSET_BITS=64'] + \
582 ["{0}{1}".format("-I", k) for k in include_dirs] + \
583 ['-I' + self.escape(get_python_inc())] + \
584 syscallat_srcs
585 sysattrcmd = lint + lint_flags + \
586 ['-D_FILE_OFFSET_BITS=64'] + \
587 ["{0}{1}".format("-I", k) for k in include_dirs] + \
588 ['-I' + self.escape(get_python_inc())] + \
589 ["{0}{1}".format("-l", k) for k in sysattr_libraries] + \
590 sysattr_srcs
592 print(" ".join(archcmd))
593 os.system(" ".join(archcmd))
594 print(" ".join(elfcmd))
595 os.system(" ".join(elfcmd))
596 print(" ".join(_actionscmd))
597 os.system(" ".join(_actionscmd))
598 print(" ".join(_actcommcmd))
599 os.system(" ".join(_actcommcmd))
600 print(" ".join(_varcetcmd))
601 os.system(" ".join(_varcetcmd))
602 print(" ".join(pspawncmd))
603 os.system(" ".join(pspawncmd))
604 print(" ".join(syscallatcmd))
605 os.system(" ".join(syscallatcmd))
606 print(" ".join(sysattrcmd))
607 os.system(" ".join(sysattrcmd))
610 # Runs both C and Python lint
611 class lint_func(Command):
612 description = "Runs C and Python lint checkers"
613 user_options = []
615 def initialize_options(self):
616 pass
618 def finalize_options(self):
619 pass
621 # Make string shell-friendly
622 @staticmethod
623 def escape(astring):
624 return astring.replace(' ', '\\ ')
626 def run(self):
627 clint_func(Distribution()).run()
628 pylint_func(Distribution()).run()
630 class install_func(_install):
631 def initialize_options(self):
632 _install.initialize_options(self)
634 # PRIVATE_BUILD set in the environment tells us to put the build
635 # directory into the .pyc files, rather than the final
636 # installation directory.
637 private_build = os.getenv("PRIVATE_BUILD", None)
639 if private_build is None:
640 self.install_lib = py_install_dir
641 self.install_data = os.path.sep
642 self.root = root_dir
643 else:
644 self.install_lib = os.path.join(root_dir, py_install_dir)
645 self.install_data = root_dir
647 # This is used when installing scripts, below, but it isn't a
648 # standard distutils variable.
649 self.root_dir = root_dir
651 def run(self):
652 """At the end of the install function, we need to rename some
653 files because distutils provides no way to rename files as they
654 are placed in their install locations.
657 _install.run(self)
659 for o_src, o_dest in hardlink_modules:
660 for e in [".py", ".pyc"]:
661 src = util.change_root(self.root_dir, o_src + e)
662 dest = util.change_root(
663 self.root_dir, o_dest + e)
664 if ostype == "posix":
665 if os.path.exists(dest) and \
666 os.stat(src)[stat.ST_INO] != \
667 os.stat(dest)[stat.ST_INO]:
668 os.remove(dest)
669 file_util.copy_file(src, dest,
670 link="hard", update=1)
671 else:
672 file_util.copy_file(src, dest, update=1)
674 # Don't install the scripts for python 2.6.
675 if py_version == '2.6':
676 return
677 for d, files in scripts[osname].iteritems():
678 for (srcname, dstname) in files:
679 dst_dir = util.change_root(self.root_dir, d)
680 dst_path = util.change_root(self.root_dir,
681 os.path.join(d, dstname))
682 dir_util.mkpath(dst_dir, verbose=True)
683 file_util.copy_file(srcname, dst_path, update=True)
684 # make scripts executable
685 os.chmod(dst_path,
686 os.stat(dst_path).st_mode
687 | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
689 class install_lib_func(_install_lib):
690 """Remove the target files prior to the standard install_lib procedure
691 if the build_py module has determined that they've actually changed.
692 This may be needed when a module's timestamp goes backwards in time, if
693 a working-directory change is reverted, or an older changeset is checked
694 out.
697 def install(self):
698 build_py = self.get_finalized_command("build_py")
699 prefix_len = len(self.build_dir) + 1
700 for p in build_py.copied:
701 id_p = os.path.join(self.install_dir, p[prefix_len:])
702 rm_f(id_p)
703 if self.compile:
704 rm_f(id_p + "c")
705 if self.optimize > 0:
706 rm_f(id_p + "o")
707 return _install_lib.install(self)
709 class install_data_func(_install_data):
710 """Enhance the standard install_data subcommand to take not only a list
711 of filenames, but a list of source and destination filename tuples, for
712 the cases where a filename needs to be renamed between the two
713 locations."""
715 def run(self):
716 self.mkpath(self.install_dir)
717 for f in self.data_files:
718 dir, files = f
719 dir = util.convert_path(dir)
720 if not os.path.isabs(dir):
721 dir = os.path.join(self.install_dir, dir)
722 elif self.root:
723 dir = change_root(self.root, dir)
724 self.mkpath(dir)
726 if not files:
727 self.outfiles.append(dir)
728 else:
729 for file in files:
730 if isinstance(file, basestring):
731 infile = file
732 outfile = os.path.join(dir,
733 os.path.basename(file))
734 else:
735 infile, outfile = file
736 infile = util.convert_path(infile)
737 outfile = util.convert_path(outfile)
738 if os.path.sep not in outfile:
739 outfile = os.path.join(dir,
740 outfile)
741 self.copy_file(infile, outfile)
742 self.outfiles.append(outfile)
744 def run_cmd(args, swdir, updenv=None, ignerr=False, savestderr=None):
745 if updenv:
746 # use temp environment modified with the given dict
747 env = os.environ.copy()
748 env.update(updenv)
749 else:
750 # just use environment of this (parent) process as is
751 env = os.environ
752 if ignerr:
753 # send stderr to devnull
754 stderr = open(os.devnull)
755 elif savestderr:
756 stderr = savestderr
757 else:
758 # just use stderr of this (parent) process
759 stderr = None
760 ret = subprocess.Popen(args, cwd=swdir, env=env,
761 stderr=stderr).wait()
762 if ret != 0:
763 if stderr:
764 stderr.close()
765 print("install failed and returned {0:d}.".format(ret),
766 file=sys.stderr)
767 print("Command was: {0}".format(" ".join(args)),
768 file=sys.stderr)
770 sys.exit(1)
771 if stderr:
772 stderr.close()
774 def _copy_file_contents(src, dst, buffer_size=16*1024):
775 """A clone of distutils.file_util._copy_file_contents() that strips the
776 CDDL text. For Python files, we replace the CDDL text with an equal
777 number of empty comment lines so that line numbers match between the
778 source and destination files."""
780 # Match the lines between and including the CDDL header signposts, as
781 # well as empty comment lines before and after, if they exist.
782 cddl_re = re.compile("\n(#\s*\n)?^[^\n]*CDDL HEADER START.+"
783 "CDDL HEADER END[^\n]*$(\n#\s*$)?", re.MULTILINE|re.DOTALL)
785 with file(src, "r") as sfp:
786 try:
787 os.unlink(dst)
788 except EnvironmentError as e:
789 if e.errno != errno.ENOENT:
790 raise DistutilsFileError("could not delete "
791 "'{0}': {1}".format(dst, e))
793 with file(dst, "w") as dfp:
794 while True:
795 buf = sfp.read(buffer_size)
796 if not buf:
797 break
798 if src.endswith(".py"):
799 match = cddl_re.search(buf)
800 if match:
801 # replace the CDDL expression
802 # with the same number of empty
803 # comment lines as the cddl_re
804 # matched.
805 substr = buf[
806 match.start():match.end()]
807 count = len(
808 substr.split("\n")) - 2
809 blanks = "#\n" * count
810 buf = cddl_re.sub("\n" + blanks,
811 buf)
812 else:
813 buf = cddl_re.sub("", buf)
814 dfp.write(buf)
816 # Make file_util use our version of _copy_file_contents
817 file_util._copy_file_contents = _copy_file_contents
819 def intltool_update_maintain():
820 """Check if scope of localization looks up-to-date or possibly not,
821 by comparing file set described in po/POTFILES.{in,skip} and
822 actual source files (e.g. .py) detected.
824 rm_f("po/missing")
825 rm_f("po/notexist")
827 args = [
828 "/usr/bin/intltool-update", "--maintain"
830 print(" ".join(args))
831 podir = os.path.join(os.getcwd(), "po")
832 run_cmd(args, podir, updenv={"LC_ALL": "C"}, ignerr=True)
834 if os.path.exists("po/missing"):
835 print("New file(s) with translatable strings detected:",
836 file=sys.stderr)
837 missing = open("po/missing", "r")
838 print("--------", file=sys.stderr)
839 for fn in missing:
840 print("{0}".format(fn.strip()), file=sys.stderr)
841 print("--------", file=sys.stderr)
842 missing.close()
843 print("""\
844 Please evaluate whether any of the above file(s) needs localization.
845 If so, please add its name to po/POTFILES.in. If not (e.g., it's not
846 delivered), please add its name to po/POTFILES.skip.
847 Please be sure to maintain alphabetical ordering in both files.""", file=sys.stderr)
848 sys.exit(1)
850 if os.path.exists("po/notexist"):
851 print("""\
852 The following files are listed in po/POTFILES.in, but no longer exist
853 in the workspace:""", file=sys.stderr)
854 notexist = open("po/notexist", "r")
855 print("--------", file=sys.stderr)
856 for fn in notexist:
857 print("{0}".format(fn.strip()), file=sys.stderr)
858 print("--------", file=sys.stderr)
860 notexist.close()
861 print("Please remove the file names from po/POTFILES.in",
862 file=sys.stderr)
863 sys.exit(1)
865 def intltool_update_pot():
866 """Generate pkg.pot by extracting localizable strings from source
867 files (e.g. .py)
869 rm_f("po/pkg.pot")
871 args = [
872 "/usr/bin/intltool-update", "--pot"
874 print(" ".join(args))
875 podir = os.path.join(os.getcwd(), "po")
876 run_cmd(args, podir,
877 updenv={"LC_ALL": "C", "XGETTEXT": "/usr/gnu/bin/xgettext"})
879 if not os.path.exists("po/pkg.pot"):
880 print("Failed in generating pkg.pot.", file=sys.stderr)
881 sys.exit(1)
883 def intltool_merge(src, dst):
884 if not dep_util.newer(src, dst):
885 return
887 args = [
888 "/usr/bin/intltool-merge", "-d", "-u",
889 "-c", "po/.intltool-merge-cache", "po", src, dst
891 print(" ".join(args))
892 run_cmd(args, os.getcwd(), updenv={"LC_ALL": "C"})
894 def i18n_check():
895 """Checks for common i18n messaging bugs in the source."""
897 src_files = []
898 # A list of the i18n errors we check for in the code
899 common_i18n_errors = [
900 # This checks that messages with multiple parameters are always
901 # written using "{name}" format, rather than just "{0}"
902 "format string with unnamed arguments cannot be properly localized"
905 for line in open("po/POTFILES.in", "r").readlines():
906 if line.startswith("["):
907 continue
908 if line.startswith("#"):
909 continue
910 src_files.append(line.rstrip())
912 args = [
913 "/usr/gnu/bin/xgettext", "--from-code=UTF-8", "-o", "/dev/null"]
914 args += src_files
916 xgettext_output_path = tempfile.mkstemp()[1]
917 xgettext_output = open(xgettext_output_path, "w")
918 run_cmd(args, os.getcwd(), updenv={"LC_ALL": "C"},
919 savestderr=xgettext_output)
921 found_errs = False
922 i18n_errs = open("po/i18n_errs.txt", "w")
923 for line in open(xgettext_output_path, "r").readlines():
924 for err in common_i18n_errors:
925 if err in line:
926 i18n_errs.write(line)
927 found_errs = True
928 i18n_errs.close()
929 if found_errs:
930 print("""\
931 The following i18n errors were detected and should be corrected:
932 (this list is saved in po/i18n_errs.txt)
933 """, file=sys.stderr)
934 for line in open("po/i18n_errs.txt", "r"):
935 print(line.rstrip(), file=sys.stderr)
936 sys.exit(1)
937 os.remove(xgettext_output_path)
939 def msgfmt(src, dst):
940 if not dep_util.newer(src, dst):
941 return
943 args = ["/usr/bin/msgfmt", "-o", dst, src]
944 print(" ".join(args))
945 run_cmd(args, os.getcwd())
947 def localizablexml(src, dst):
948 """create XML help for localization, where French part of legalnotice
949 is stripped off
951 if not dep_util.newer(src, dst):
952 return
954 fsrc = open(src, "r")
955 fdst = open(dst, "w")
957 # indicates currently in French part of legalnotice
958 in_fr = False
960 for l in fsrc:
961 if in_fr: # in French part
962 if l.startswith('</legalnotice>'):
963 # reached end of legalnotice
964 print(l, file=fdst)
965 in_fr = False
966 elif l.startswith('<para lang="fr"/>') or \
967 l.startswith('<para lang="fr"></para>'):
968 in_fr = True
969 else:
970 # not in French part
971 print(l, file=fdst)
973 fsrc.close()
974 fdst.close()
976 def xml2po_gen(src, dst):
977 """Input is English XML file. Output is pkg_help.pot, message
978 source for next translation update.
980 if not dep_util.newer(src, dst):
981 return
983 args = ["/usr/bin/xml2po", "-o", dst, src]
984 print(" ".join(args))
985 run_cmd(args, os.getcwd())
987 def xml2po_merge(src, dst, mofile):
988 """Input is English XML file and <lang>.po file (which contains
989 translations). Output is translated XML file.
991 msgfmt(mofile[:-3] + ".po", mofile)
993 monewer = dep_util.newer(mofile, dst)
994 srcnewer = dep_util.newer(src, dst)
996 if not srcnewer and not monewer:
997 return
999 args = ["/usr/bin/xml2po", "-t", mofile, "-o", dst, src]
1000 print(" ".join(args))
1001 run_cmd(args, os.getcwd())
1003 class installfile(Command):
1004 user_options = [
1005 ("file=", "f", "source file to copy"),
1006 ("dest=", "d", "destination directory"),
1007 ("mode=", "m", "file mode"),
1010 description = "De-CDDLing file copy"
1012 def initialize_options(self):
1013 self.file = None
1014 self.dest = None
1015 self.mode = None
1017 def finalize_options(self):
1018 if self.mode is None:
1019 self.mode = 0o644
1020 elif isinstance(self.mode, basestring):
1021 try:
1022 self.mode = int(self.mode, 8)
1023 except ValueError:
1024 self.mode = 0o644
1026 def run(self):
1027 dest_file = os.path.join(self.dest, os.path.basename(self.file))
1028 ret = self.copy_file(self.file, dest_file)
1030 os.chmod(dest_file, self.mode)
1031 os.utime(dest_file, None)
1033 return ret
1035 class build_func(_build):
1036 sub_commands = _build.sub_commands + [('build_data', None)]
1038 def initialize_options(self):
1039 _build.initialize_options(self)
1040 self.build_base = build_dir
1042 def get_hg_version():
1043 try:
1044 cmd = 'hg id -i 2>/dev/null || git log --pretty=format:\'%h\' -1'
1045 p = subprocess.Popen(cmd, shell=True, stdout = subprocess.PIPE)
1046 return p.communicate()[0].strip()
1047 except OSError:
1048 print("ERROR: unable to obtain mercurial/git version",
1049 file=sys.stderr)
1050 return "unknown"
1052 def syntax_check(filename):
1053 """ Run python's compiler over the file, and discard the results.
1054 Arrange to generate an exception if the file does not compile.
1055 This is needed because distutil's own use of pycompile (in the
1056 distutils.utils module) is broken, and doesn't stop on error. """
1057 try:
1058 py_compile.compile(filename, os.devnull, doraise=True)
1059 except py_compile.PyCompileError as e:
1060 res = ""
1061 for err in e.exc_value:
1062 if isinstance(err, basestring):
1063 res += err + "\n"
1064 continue
1066 # Assume it's a tuple of (filename, lineno, col, code)
1067 fname, line, col, code = err
1068 res += "line {0:d}, column {1}, in {2}:\n{3}".format(
1069 line, col or "unknown", fname, code)
1071 raise DistutilsError(res)
1073 # On Solaris, ld inserts the full argument to the -o option into the symbol
1074 # table. This means that the resulting object will be different depending on
1075 # the path at which the workspace lives, and not just on the interesting content
1076 # of the object.
1078 # In order to work around that bug (7076871), we create a new compiler class
1079 # that looks at the argument indicating the output file, chdirs to its
1080 # directory, and runs the real link with the output file set to just the base
1081 # name of the file.
1083 # Unfortunately, distutils isn't too customizable in this regard, so we have to
1084 # twiddle with a couple of the names in the distutils.ccompiler namespace: we
1085 # have to add a new entry to the compiler_class dict, and we have to override
1086 # the new_compiler() function to point to our own. Luckily, our copy of
1087 # new_compiler() gets to be very simple, since we always know what we want to
1088 # return.
1089 class MyUnixCCompiler(UnixCCompiler):
1091 def link(self, *args, **kwargs):
1093 output_filename = args[2]
1094 output_dir = kwargs.get('output_dir')
1095 cwd = os.getcwd()
1097 assert(not output_dir)
1098 output_dir = os.path.join(cwd, os.path.dirname(output_filename))
1099 output_filename = os.path.basename(output_filename)
1100 nargs = args[:2] + (output_filename,) + args[3:]
1101 if not os.path.exists(output_dir):
1102 os.mkdir(output_dir, 0o755)
1103 os.chdir(output_dir)
1105 UnixCCompiler.link(self, *nargs, **kwargs)
1107 os.chdir(cwd)
1109 distutils.ccompiler.compiler_class['myunix'] = (
1110 'unixccompiler', 'MyUnixCCompiler',
1111 'standard Unix-style compiler with a link stage modified for Solaris'
1114 def my_new_compiler(plat=None, compiler=None, verbose=0, dry_run=0, force=0):
1115 return MyUnixCCompiler(None, dry_run, force)
1117 if osname == 'sunos' or osname == 'unleashed':
1118 distutils.ccompiler.new_compiler = my_new_compiler
1120 class build_ext_func(_build_ext):
1122 def initialize_options(self):
1123 _build_ext.initialize_options(self)
1124 self.build64 = False
1126 if osname == 'sunos' or osname == 'unleashed':
1127 self.compiler = 'myunix'
1129 def build_extension(self, ext):
1130 # Build 32-bit
1131 _build_ext.build_extension(self, ext)
1132 if not ext.build_64:
1133 return
1135 # Set up for 64-bit
1136 old_build_temp = self.build_temp
1137 d, f = os.path.split(self.build_temp)
1139 # store our 64-bit extensions elsewhere
1140 self.build_temp = d + "/temp64.{0}".format(
1141 os.path.basename(self.build_temp).replace("temp.", ""))
1142 ext.extra_compile_args += ["-m64"]
1143 ext.extra_link_args += ["-m64"]
1144 self.build64 = True
1146 # Build 64-bit
1147 _build_ext.build_extension(self, ext)
1149 # Reset to 32-bit
1150 self.build_temp = old_build_temp
1151 ext.extra_compile_args.remove("-m64")
1152 ext.extra_link_args.remove("-m64")
1153 self.build64 = False
1155 def get_ext_fullpath(self, ext_name):
1156 path = _build_ext.get_ext_fullpath(self, ext_name)
1157 if not self.build64:
1158 return path
1160 dpath, fpath = os.path.split(path)
1161 return os.path.join(dpath, "64", fpath)
1164 class build_py_func(_build_py):
1166 def __init__(self, dist):
1167 ret = _build_py.__init__(self, dist)
1169 self.copied = []
1171 # Gather the timestamps of the .py files in the gate, so we can
1172 # force the mtimes of the built and delivered copies to be
1173 # consistent across builds, causing their corresponding .pyc
1174 # files to be unchanged unless the .py file content changed.
1176 self.timestamps = {}
1178 pydates = "pydates"
1180 if os.path.isdir(os.path.join(pwd, "../.git")):
1181 pydates = "pydates.git"
1183 p = subprocess.Popen(
1184 os.path.join(pwd, pydates),
1185 stdout=subprocess.PIPE)
1187 for line in p.stdout:
1188 stamp, path = line.split()
1189 stamp = float(stamp)
1190 self.timestamps[path] = stamp
1192 if p.wait() != 0:
1193 print("ERROR: unable to gather .py timestamps",
1194 file=sys.stderr)
1195 sys.exit(1)
1197 return ret
1199 # override the build_module method to do VERSION substitution on
1200 # pkg/__init__.py
1201 def build_module (self, module, module_file, package):
1203 if module == "__init__" and package == "pkg":
1204 versionre = '(?m)^VERSION[^"]*"([^"]*)"'
1205 # Grab the previously-built version out of the build
1206 # tree.
1207 try:
1208 ocontent = \
1209 file(self.get_module_outfile(self.build_lib,
1210 [package], module)).read()
1211 ov = re.search(versionre, ocontent).group(1)
1212 except IOError:
1213 ov = None
1214 v = get_hg_version()
1215 vstr = 'VERSION = "{0}"'.format(v)
1216 # If the versions haven't changed, there's no need to
1217 # recompile.
1218 if v == ov:
1219 return
1221 mcontent = file(module_file).read()
1222 mcontent = re.sub(versionre, vstr, mcontent)
1223 tmpfd, tmp_file = tempfile.mkstemp()
1224 os.write(tmpfd, mcontent)
1225 os.close(tmpfd)
1226 print("doing version substitution: ", v)
1227 rv = _build_py.build_module(self, module, tmp_file, package)
1228 os.unlink(tmp_file)
1229 return rv
1231 # Will raise a DistutilsError on failure.
1232 syntax_check(module_file)
1234 return _build_py.build_module(self, module, module_file, package)
1236 def copy_file(self, infile, outfile, preserve_mode=1, preserve_times=1,
1237 link=None, level=1):
1239 # If the timestamp on the source file (coming from mercurial if
1240 # unchanged, or from the filesystem if changed) doesn't match
1241 # the filesystem timestamp on the destination, then force the
1242 # copy to make sure the right data is in place.
1244 try:
1245 dst_mtime = os.stat(outfile).st_mtime
1246 except OSError as e:
1247 if e.errno != errno.ENOENT:
1248 raise
1249 dst_mtime = time.time()
1251 # The timestamp for __init__.py is the timestamp for the
1252 # workspace itself.
1253 if outfile.endswith("/pkg/__init__.py"):
1254 src_mtime = self.timestamps["."]
1255 else:
1256 src_mtime = self.timestamps.get(
1257 os.path.join("src", infile), self.timestamps["."])
1259 # Force a copy of the file if the source timestamp is different
1260 # from that of the destination, not just if it's newer. This
1261 # allows timestamps in the working directory to regress (for
1262 # instance, following the reversion of a change).
1263 if dst_mtime != src_mtime:
1264 f = self.force
1265 self.force = True
1266 dst, copied = _build_py.copy_file(self, infile, outfile,
1267 preserve_mode, preserve_times, link, level)
1268 self.force = f
1269 else:
1270 dst, copied = outfile, 0
1272 # If we copied the file, then we need to go and readjust the
1273 # timestamp on the file to match what we have in our database.
1274 # Save the filename aside for our version of install_lib.
1275 if copied and dst.endswith(".py"):
1276 os.utime(dst, (src_mtime, src_mtime))
1277 self.copied.append(dst)
1279 return dst, copied
1281 def manpage_input_dir(path):
1282 """Convert a manpage output path to the directory where its source lives."""
1284 patharr = path.split("/")
1285 if len(patharr) == 4:
1286 loc = ""
1287 elif len(patharr) == 5:
1288 loc = patharr[-3].split(".")[0]
1289 else:
1290 raise RuntimeError("bad manpage path")
1291 return os.path.join(patharr[0], loc).rstrip("/")
1293 def xml2roff(files):
1294 """Convert XML manpages to ROFF for delivery.
1296 The input should be a list of the output file paths. The corresponding
1297 inputs will be generated from this. We do it in this way so that we can
1298 share the paths with the install code.
1300 All paths should have a common manpath root. In particular, pages
1301 belonging to different localizations should be run through this function
1302 separately.
1305 input_dir = manpage_input_dir(files[0])
1306 do_files = [
1307 os.path.join(input_dir, os.path.basename(f))
1308 for f in files
1309 if dep_util.newer(os.path.join(input_dir, os.path.basename(f)), f)
1311 if do_files:
1312 # Get the output dir by removing the filename and the manX
1313 # directory
1314 output_dir = os.path.join(*files[0].split("/")[:-2])
1315 args = ["/usr/share/xml/xsolbook/python/xml2roff.py", "-o", output_dir]
1316 args += do_files
1317 print(" ".join(args))
1318 run_cmd(args, os.getcwd())
1320 class build_data_func(Command):
1321 description = "build data files whose source isn't in deliverable form"
1322 user_options = []
1324 # As a subclass of distutils.cmd.Command, these methods are required to
1325 # be implemented.
1326 def initialize_options(self):
1327 pass
1329 def finalize_options(self):
1330 pass
1332 def run(self):
1333 pass
1335 def rm_f(filepath):
1336 """Remove a file without caring whether it exists."""
1338 try:
1339 os.unlink(filepath)
1340 except OSError as e:
1341 if e.errno != errno.ENOENT:
1342 raise
1344 class clean_func(_clean):
1345 def initialize_options(self):
1346 _clean.initialize_options(self)
1347 self.build_base = build_dir
1349 def run(self):
1350 _clean.run(self)
1352 rm_f("po/.intltool-merge-cache")
1354 for l in pkg_locales:
1355 rm_f("po/{0}.mo".format(l))
1357 rm_f("po/pkg.pot")
1359 rm_f("po/i18n_errs.txt")
1361 #shutil.rmtree(MANPAGE_OUTPUT_ROOT, True)
1363 class clobber_func(Command):
1364 user_options = []
1365 description = "Deletes any and all files created by setup"
1367 def initialize_options(self):
1368 pass
1369 def finalize_options(self):
1370 pass
1371 def run(self):
1372 # nuke everything
1373 print("deleting " + dist_dir)
1374 shutil.rmtree(dist_dir, True)
1375 print("deleting " + build_dir)
1376 shutil.rmtree(build_dir, True)
1377 print("deleting " + root_dir)
1378 shutil.rmtree(root_dir, True)
1379 print("deleting " + pkgs_dir)
1380 shutil.rmtree(pkgs_dir, True)
1381 print("deleting " + extern_dir)
1382 shutil.rmtree(extern_dir, True)
1384 class test_func(Command):
1385 # NOTE: these options need to be in sync with tests/run.py and the
1386 # list of options stored in initialize_options below. The first entry
1387 # in each tuple must be the exact name of a member variable.
1388 user_options = [
1389 ("archivedir=", 'a', "archive failed tests <dir>"),
1390 ("baselinefile=", 'b', "baseline file <file>"),
1391 ("coverage", "c", "collect code coverage data"),
1392 ("genbaseline", 'g', "generate test baseline"),
1393 ("only=", "o", "only <regex>"),
1394 ("parseable", 'p', "parseable output"),
1395 ("port=", "z", "lowest port to start a depot on"),
1396 ("timing", "t", "timing file <file>"),
1397 ("verbosemode", 'v', "run tests in verbose mode"),
1398 ("stoponerr", 'x', "stop when a baseline mismatch occurs"),
1399 ("debugoutput", 'd', "emit debugging output"),
1400 ("showonexpectedfail", 'f',
1401 "show all failure info, even for expected fails"),
1402 ("startattest=", 's', "start at indicated test"),
1403 ("jobs=", 'j', "number of parallel processes to use"),
1404 ("quiet", "q", "use the dots as the output format"),
1405 ("livesystem", 'l', "run tests on live system"),
1407 description = "Runs unit and functional tests"
1409 def initialize_options(self):
1410 self.only = ""
1411 self.baselinefile = ""
1412 self.verbosemode = 0
1413 self.parseable = 0
1414 self.genbaseline = 0
1415 self.timing = 0
1416 self.coverage = 0
1417 self.stoponerr = 0
1418 self.debugoutput = 0
1419 self.showonexpectedfail = 0
1420 self.startattest = ""
1421 self.archivedir = ""
1422 self.port = 12001
1423 self.jobs = 1
1424 self.quiet = False
1425 self.livesystem = False
1427 def finalize_options(self):
1428 pass
1430 def run(self):
1432 os.putenv('PYEXE', sys.executable)
1433 os.chdir(os.path.join(pwd, "tests"))
1435 # Reconstruct the cmdline and send that to run.py
1436 cmd = [sys.executable, "run.py"]
1437 args = ""
1438 if "test" in sys.argv:
1439 args = sys.argv[sys.argv.index("test")+1:]
1440 cmd.extend(args)
1441 subprocess.call(cmd)
1443 class dist_func(_bdist):
1444 def initialize_options(self):
1445 _bdist.initialize_options(self)
1446 self.dist_dir = dist_dir
1448 class Extension(distutils.core.Extension):
1449 # This class wraps the distutils Extension class, allowing us to set
1450 # build_64 in the object constructor instead of being forced to add it
1451 # after the object has been created.
1452 def __init__(self, name, sources, build_64=False, **kwargs):
1453 distutils.core.Extension.__init__(self, name, sources, **kwargs)
1454 self.build_64 = build_64
1456 # These are set to real values based on the platform, down below
1457 compile_args = None
1458 if osname in ("sunos", "unleashed", "linux", "darwin"):
1459 compile_args = [ "-O3" ]
1460 if osname == "sunos" or osname == "unleashed":
1461 link_args = []
1462 else:
1463 link_args = []
1465 ext_modules = [
1466 Extension(
1467 'actions._actions',
1468 _actions_srcs,
1469 include_dirs = include_dirs,
1470 extra_compile_args = compile_args,
1471 extra_link_args = link_args,
1472 build_64 = True
1474 Extension(
1475 'actions._common',
1476 _actcomm_srcs,
1477 include_dirs = include_dirs,
1478 extra_compile_args = compile_args,
1479 extra_link_args = link_args,
1480 build_64 = True
1482 Extension(
1483 '_varcet',
1484 _varcet_srcs,
1485 include_dirs = include_dirs,
1486 extra_compile_args = compile_args,
1487 extra_link_args = link_args,
1488 build_64 = True
1490 Extension(
1491 'solver',
1492 solver_srcs,
1493 include_dirs = include_dirs + ["."],
1494 extra_compile_args = compile_args,
1495 extra_link_args = link_args + solver_link_args,
1496 define_macros = [('_FILE_OFFSET_BITS', '64')],
1497 build_64 = True
1500 elf_libraries = None
1501 sysattr_libraries = None
1502 data_files = web_files
1503 cmdclasses = {
1504 'install': install_func,
1505 'install_data': install_data_func,
1506 'install_lib': install_lib_func,
1507 'build': build_func,
1508 'build_data': build_data_func,
1509 'build_ext': build_ext_func,
1510 'build_py': build_py_func,
1511 'bdist': dist_func,
1512 'lint': lint_func,
1513 'clint': clint_func,
1514 'pylint': pylint_func,
1515 'pylint_quiet': pylint_func_quiet,
1516 'clean': clean_func,
1517 'clobber': clobber_func,
1518 'test': test_func,
1519 'installfile': installfile,
1522 # all builds of IPS should have manpages
1523 data_files += [
1524 (man1_dir, man1_files),
1525 (man1m_dir, man1m_files),
1526 (man5_dir, man5_files),
1527 (man1_ja_JP_dir, man1_ja_files),
1528 (man1m_ja_JP_dir, man1m_ja_files),
1529 (man5_ja_JP_dir, man5_ja_files),
1530 (man1_zh_CN_dir, man1_zh_CN_files),
1531 (man1m_zh_CN_dir, man1m_zh_CN_files),
1532 (man5_zh_CN_dir, man5_zh_CN_files),
1533 (resource_dir, resource_files),
1535 # add transforms
1536 data_files += [
1537 (transform_dir, transform_files)
1539 # add ignored deps
1540 data_files += [
1541 (ignored_deps_dir, ignored_deps_files)
1543 if osname == 'sunos' or osname == 'unleashed':
1544 # Solaris-specific extensions are added here
1545 data_files += [
1546 (smf_app_dir, smf_app_files),
1547 (execattrd_dir, execattrd_files),
1548 (authattrd_dir, authattrd_files),
1549 (userattrd_dir, userattrd_files),
1550 (sysrepo_dir, sysrepo_files),
1551 (sysrepo_logs_dir, sysrepo_log_stubs),
1552 (sysrepo_cache_dir, {}),
1553 (depot_dir, depot_files),
1554 (depot_conf_dir, {}),
1555 (depot_logs_dir, depot_log_stubs),
1556 (depot_cache_dir, {}),
1557 (mirror_cache_dir, {}),
1558 (mirror_logs_dir, {}),
1561 if osname in ['sunos', 'unleashed', 'linux']:
1562 # Unix platforms which the elf extension has been ported to
1563 # are specified here, so they are built automatically
1564 elf_libraries = ['elf']
1565 ext_modules += [
1566 Extension(
1567 'elf',
1568 elf_srcs,
1569 include_dirs = include_dirs,
1570 libraries = elf_libraries,
1571 extra_compile_args = compile_args,
1572 extra_link_args = link_args,
1573 build_64 = True
1577 # Solaris has built-in md library and Solaris-specific arch extension
1578 # All others use OpenSSL and cross-platform arch module
1579 if osname == 'sunos' or osname == 'unleashed':
1580 elf_libraries += [ 'md' ]
1581 sysattr_libraries = [ 'nvpair' ]
1582 ext_modules += [
1583 Extension(
1584 'arch',
1585 arch_srcs,
1586 include_dirs = include_dirs,
1587 extra_compile_args = compile_args,
1588 extra_link_args = link_args,
1589 define_macros = [('_FILE_OFFSET_BITS', '64')],
1590 build_64 = True
1592 Extension(
1593 'pspawn',
1594 pspawn_srcs,
1595 include_dirs = include_dirs,
1596 extra_compile_args = compile_args,
1597 extra_link_args = link_args,
1598 define_macros = [('_FILE_OFFSET_BITS', '64')],
1599 build_64 = True
1601 Extension(
1602 'syscallat',
1603 syscallat_srcs,
1604 include_dirs = include_dirs,
1605 extra_compile_args = compile_args,
1606 extra_link_args = link_args,
1607 define_macros = [('_FILE_OFFSET_BITS', '64')],
1608 build_64 = True
1610 Extension(
1611 'sysattr',
1612 sysattr_srcs,
1613 include_dirs = include_dirs,
1614 libraries = sysattr_libraries,
1615 extra_compile_args = compile_args,
1616 extra_link_args = link_args,
1617 define_macros = [('_FILE_OFFSET_BITS', '64')],
1618 build_64 = True
1621 else:
1622 elf_libraries += [ 'ssl' ]
1624 setup(cmdclass = cmdclasses,
1625 name = 'pkg',
1626 version = '0.1',
1627 package_dir = {'pkg':'modules'},
1628 packages = packages,
1629 data_files = data_files,
1630 ext_package = 'pkg',
1631 ext_modules = ext_modules,