2 # Build many configurations of glibc.
3 # Copyright (C) 2016-2018 Free Software Foundation, Inc.
4 # This file is part of the GNU C Library.
6 # The GNU C Library is free software; you can redistribute it and/or
7 # modify it under the terms of the GNU Lesser General Public
8 # License as published by the Free Software Foundation; either
9 # version 2.1 of the License, or (at your option) any later version.
11 # The GNU C Library is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 # Lesser General Public License for more details.
16 # You should have received a copy of the GNU Lesser General Public
17 # License along with the GNU C Library; if not, see
18 # <http://www.gnu.org/licenses/>.
20 """Build many configurations of glibc.
22 This script takes as arguments a directory name (containing a src
23 subdirectory with sources of the relevant toolchain components) and a
24 description of what to do: 'checkout', to check out sources into that
25 directory, 'bot-cycle', to run a series of checkout and build steps,
26 'bot', to run 'bot-cycle' repeatedly, 'host-libraries', to build
27 libraries required by the toolchain, 'compilers', to build
28 cross-compilers for various configurations, or 'glibcs', to build
29 glibc for various configurations and run the compilation parts of the
30 testsuite. Subsequent arguments name the versions of components to
31 check out (<component>-<version), for 'checkout', or, for actions
32 other than 'checkout' and 'bot-cycle', name configurations for which
33 compilers or glibc are to be built.
39 import email
.mime
.text
55 import multiprocessing
56 os
.cpu_count
= lambda: multiprocessing
.cpu_count()
61 re
.fullmatch
= lambda p
,s
,f
=0: re
.match(p
+"\\Z",s
,f
)
66 class _CompletedProcess
:
67 def __init__(self
, args
, returncode
, stdout
=None, stderr
=None):
69 self
.returncode
= returncode
73 def _run(*popenargs
, input=None, timeout
=None, check
=False, **kwargs
):
74 assert(timeout
is None)
75 with subprocess
.Popen(*popenargs
, **kwargs
) as process
:
77 stdout
, stderr
= process
.communicate(input)
82 returncode
= process
.poll()
83 if check
and returncode
:
84 raise subprocess
.CalledProcessError(returncode
, popenargs
)
85 return _CompletedProcess(popenargs
, returncode
, stdout
, stderr
)
90 class Context(object):
91 """The global state associated with builds in a given directory."""
93 def __init__(self
, topdir
, parallelism
, keep
, replace_sources
, strip
,
95 """Initialize the context."""
97 self
.parallelism
= parallelism
99 self
.replace_sources
= replace_sources
101 self
.srcdir
= os
.path
.join(topdir
, 'src')
102 self
.versions_json
= os
.path
.join(self
.srcdir
, 'versions.json')
103 self
.build_state_json
= os
.path
.join(topdir
, 'build-state.json')
104 self
.bot_config_json
= os
.path
.join(topdir
, 'bot-config.json')
105 self
.installdir
= os
.path
.join(topdir
, 'install')
106 self
.host_libraries_installdir
= os
.path
.join(self
.installdir
,
108 self
.builddir
= os
.path
.join(topdir
, 'build')
109 self
.logsdir
= os
.path
.join(topdir
, 'logs')
110 self
.logsdir_old
= os
.path
.join(topdir
, 'logs-old')
111 self
.makefile
= os
.path
.join(self
.builddir
, 'Makefile')
112 self
.wrapper
= os
.path
.join(self
.builddir
, 'wrapper')
113 self
.save_logs
= os
.path
.join(self
.builddir
, 'save-logs')
114 self
.script_text
= self
.get_script_text()
115 if action
!= 'checkout':
116 self
.build_triplet
= self
.get_build_triplet()
117 self
.glibc_version
= self
.get_glibc_version()
119 self
.glibc_configs
= {}
120 self
.makefile_pieces
= ['.PHONY: all\n']
121 self
.add_all_configs()
122 self
.load_versions_json()
123 self
.load_build_state_json()
124 self
.status_log_list
= []
125 self
.email_warning
= False
127 def get_script_text(self
):
128 """Return the text of this script."""
129 with
open(sys
.argv
[0], 'r') as f
:
133 """Re-execute this script with the same arguments."""
135 os
.execv(sys
.executable
, [sys
.executable
] + sys
.argv
)
137 def get_build_triplet(self
):
138 """Determine the build triplet with config.guess."""
139 config_guess
= os
.path
.join(self
.component_srcdir('gcc'),
141 cg_out
= subprocess
.run([config_guess
], stdout
=subprocess
.PIPE
,
142 check
=True, universal_newlines
=True).stdout
143 return cg_out
.rstrip()
145 def get_glibc_version(self
):
146 """Determine the glibc version number (major.minor)."""
147 version_h
= os
.path
.join(self
.component_srcdir('glibc'), 'version.h')
148 with
open(version_h
, 'r') as f
:
149 lines
= f
.readlines()
150 starttext
= '#define VERSION "'
152 if l
.startswith(starttext
):
153 l
= l
[len(starttext
):]
155 m
= re
.fullmatch('([0-9]+)\.([0-9]+)[.0-9]*', l
)
156 return '%s.%s' % m
.group(1, 2)
157 print('error: could not determine glibc version')
160 def add_all_configs(self
):
161 """Add all known glibc build configurations."""
162 self
.add_config(arch
='aarch64',
164 extra_glibcs
=[{'variant': 'disable-multi-arch',
165 'cfg': ['--disable-multi-arch']}])
166 self
.add_config(arch
='aarch64_be',
168 self
.add_config(arch
='alpha',
170 self
.add_config(arch
='arm',
171 os_name
='linux-gnueabi')
172 self
.add_config(arch
='armeb',
173 os_name
='linux-gnueabi')
174 self
.add_config(arch
='armeb',
175 os_name
='linux-gnueabi',
177 gcc_cfg
=['--with-arch=armv7-a'])
178 self
.add_config(arch
='arm',
179 os_name
='linux-gnueabihf',
180 gcc_cfg
=['--with-float=hard', '--with-cpu=arm926ej-s'],
181 extra_glibcs
=[{'variant': 'v7a',
182 'ccopts': '-march=armv7-a -mfpu=vfpv3'},
183 {'variant': 'v7a-disable-multi-arch',
184 'ccopts': '-march=armv7-a -mfpu=vfpv3',
185 'cfg': ['--disable-multi-arch']}])
186 self
.add_config(arch
='armeb',
187 os_name
='linux-gnueabihf',
188 gcc_cfg
=['--with-float=hard', '--with-cpu=arm926ej-s'])
189 self
.add_config(arch
='armeb',
190 os_name
='linux-gnueabihf',
192 gcc_cfg
=['--with-float=hard', '--with-arch=armv7-a',
194 self
.add_config(arch
='hppa',
196 self
.add_config(arch
='i686',
198 self
.add_config(arch
='ia64',
200 first_gcc_cfg
=['--with-system-libunwind'])
201 self
.add_config(arch
='m68k',
203 gcc_cfg
=['--disable-multilib'])
204 self
.add_config(arch
='m68k',
207 gcc_cfg
=['--with-arch=cf', '--disable-multilib'])
208 self
.add_config(arch
='m68k',
210 variant
='coldfire-soft',
211 gcc_cfg
=['--with-arch=cf', '--with-cpu=54455',
212 '--disable-multilib'])
213 self
.add_config(arch
='microblaze',
215 gcc_cfg
=['--disable-multilib'])
216 self
.add_config(arch
='microblazeel',
218 gcc_cfg
=['--disable-multilib'])
219 self
.add_config(arch
='mips64',
221 gcc_cfg
=['--with-mips-plt'],
222 glibcs
=[{'variant': 'n32'},
224 'ccopts': '-mabi=32'},
226 'ccopts': '-mabi=64'}])
227 self
.add_config(arch
='mips64',
230 gcc_cfg
=['--with-mips-plt', '--with-float=soft'],
231 glibcs
=[{'variant': 'n32-soft'},
234 'ccopts': '-mabi=32'},
235 {'variant': 'n64-soft',
236 'ccopts': '-mabi=64'}])
237 self
.add_config(arch
='mips64',
240 gcc_cfg
=['--with-mips-plt', '--with-nan=2008',
241 '--with-arch-64=mips64r2',
242 '--with-arch-32=mips32r2'],
243 glibcs
=[{'variant': 'n32-nan2008'},
244 {'variant': 'nan2008',
246 'ccopts': '-mabi=32'},
247 {'variant': 'n64-nan2008',
248 'ccopts': '-mabi=64'}])
249 self
.add_config(arch
='mips64',
251 variant
='nan2008-soft',
252 gcc_cfg
=['--with-mips-plt', '--with-nan=2008',
253 '--with-arch-64=mips64r2',
254 '--with-arch-32=mips32r2',
255 '--with-float=soft'],
256 glibcs
=[{'variant': 'n32-nan2008-soft'},
257 {'variant': 'nan2008-soft',
259 'ccopts': '-mabi=32'},
260 {'variant': 'n64-nan2008-soft',
261 'ccopts': '-mabi=64'}])
262 self
.add_config(arch
='mips64el',
264 gcc_cfg
=['--with-mips-plt'],
265 glibcs
=[{'variant': 'n32'},
267 'ccopts': '-mabi=32'},
269 'ccopts': '-mabi=64'}])
270 self
.add_config(arch
='mips64el',
273 gcc_cfg
=['--with-mips-plt', '--with-float=soft'],
274 glibcs
=[{'variant': 'n32-soft'},
277 'ccopts': '-mabi=32'},
278 {'variant': 'n64-soft',
279 'ccopts': '-mabi=64'}])
280 self
.add_config(arch
='mips64el',
283 gcc_cfg
=['--with-mips-plt', '--with-nan=2008',
284 '--with-arch-64=mips64r2',
285 '--with-arch-32=mips32r2'],
286 glibcs
=[{'variant': 'n32-nan2008'},
287 {'variant': 'nan2008',
289 'ccopts': '-mabi=32'},
290 {'variant': 'n64-nan2008',
291 'ccopts': '-mabi=64'}])
292 self
.add_config(arch
='mips64el',
294 variant
='nan2008-soft',
295 gcc_cfg
=['--with-mips-plt', '--with-nan=2008',
296 '--with-arch-64=mips64r2',
297 '--with-arch-32=mips32r2',
298 '--with-float=soft'],
299 glibcs
=[{'variant': 'n32-nan2008-soft'},
300 {'variant': 'nan2008-soft',
302 'ccopts': '-mabi=32'},
303 {'variant': 'n64-nan2008-soft',
304 'ccopts': '-mabi=64'}])
305 self
.add_config(arch
='nios2',
307 self
.add_config(arch
='powerpc',
309 gcc_cfg
=['--disable-multilib', '--enable-secureplt'],
310 extra_glibcs
=[{'variant': 'power4',
311 'ccopts': '-mcpu=power4',
312 'cfg': ['--with-cpu=power4']}])
313 self
.add_config(arch
='powerpc',
316 gcc_cfg
=['--disable-multilib', '--with-float=soft',
317 '--enable-secureplt'])
318 self
.add_config(arch
='powerpc64',
320 gcc_cfg
=['--disable-multilib', '--enable-secureplt'])
321 self
.add_config(arch
='powerpc64le',
323 gcc_cfg
=['--disable-multilib', '--enable-secureplt'])
324 self
.add_config(arch
='powerpc',
325 os_name
='linux-gnuspe',
326 gcc_cfg
=['--disable-multilib', '--enable-secureplt',
327 '--enable-e500-double'])
328 self
.add_config(arch
='powerpc',
329 os_name
='linux-gnuspe',
331 gcc_cfg
=['--disable-multilib', '--enable-secureplt'])
332 self
.add_config(arch
='s390x',
335 {'arch': 's390', 'ccopts': '-m31'}])
336 self
.add_config(arch
='sh3',
338 self
.add_config(arch
='sh3eb',
340 self
.add_config(arch
='sh4',
342 self
.add_config(arch
='sh4eb',
344 self
.add_config(arch
='sh4',
347 gcc_cfg
=['--without-fp'])
348 self
.add_config(arch
='sh4eb',
351 gcc_cfg
=['--without-fp'])
352 self
.add_config(arch
='sparc64',
356 'ccopts': '-m32 -mlong-double-128'}],
357 extra_glibcs
=[{'variant': 'disable-multi-arch',
358 'cfg': ['--disable-multi-arch']},
359 {'variant': 'disable-multi-arch',
361 'ccopts': '-m32 -mlong-double-128',
362 'cfg': ['--disable-multi-arch']}])
363 self
.add_config(arch
='tilegx',
366 {'variant': '32', 'ccopts': '-m32'}])
367 self
.add_config(arch
='tilegxbe',
370 {'variant': '32', 'ccopts': '-m32'}])
371 self
.add_config(arch
='x86_64',
373 gcc_cfg
=['--with-multilib-list=m64,m32,mx32'],
375 {'variant': 'x32', 'ccopts': '-mx32'},
376 {'arch': 'i686', 'ccopts': '-m32 -march=i686'}],
377 extra_glibcs
=[{'variant': 'disable-multi-arch',
378 'cfg': ['--disable-multi-arch']},
379 {'variant': 'static-pie',
380 'cfg': ['--enable-static-pie']},
381 {'variant': 'x32-static-pie',
383 'cfg': ['--enable-static-pie']},
384 {'variant': 'static-pie',
386 'ccopts': '-m32 -march=i686',
387 'cfg': ['--enable-static-pie']},
388 {'variant': 'disable-multi-arch',
390 'ccopts': '-m32 -march=i686',
391 'cfg': ['--disable-multi-arch']},
393 'ccopts': '-m32 -march=i486'},
395 'ccopts': '-m32 -march=i586'}])
397 def add_config(self
, **args
):
398 """Add an individual build configuration."""
399 cfg
= Config(self
, **args
)
400 if cfg
.name
in self
.configs
:
401 print('error: duplicate config %s' % cfg
.name
)
403 self
.configs
[cfg
.name
] = cfg
404 for c
in cfg
.all_glibcs
:
405 if c
.name
in self
.glibc_configs
:
406 print('error: duplicate glibc config %s' % c
.name
)
408 self
.glibc_configs
[c
.name
] = c
410 def component_srcdir(self
, component
):
411 """Return the source directory for a given component, e.g. gcc."""
412 return os
.path
.join(self
.srcdir
, component
)
414 def component_builddir(self
, action
, config
, component
, subconfig
=None):
415 """Return the directory to use for a build."""
418 assert subconfig
is None
419 return os
.path
.join(self
.builddir
, action
, component
)
420 if subconfig
is None:
421 return os
.path
.join(self
.builddir
, action
, config
, component
)
423 # glibc build as part of compiler build.
424 return os
.path
.join(self
.builddir
, action
, config
, component
,
427 def compiler_installdir(self
, config
):
428 """Return the directory in which to install a compiler."""
429 return os
.path
.join(self
.installdir
, 'compilers', config
)
431 def compiler_bindir(self
, config
):
432 """Return the directory in which to find compiler binaries."""
433 return os
.path
.join(self
.compiler_installdir(config
), 'bin')
435 def compiler_sysroot(self
, config
):
436 """Return the sysroot directory for a compiler."""
437 return os
.path
.join(self
.compiler_installdir(config
), 'sysroot')
439 def glibc_installdir(self
, config
):
440 """Return the directory in which to install glibc."""
441 return os
.path
.join(self
.installdir
, 'glibcs', config
)
443 def run_builds(self
, action
, configs
):
444 """Run the requested builds."""
445 if action
== 'checkout':
446 self
.checkout(configs
)
448 if action
== 'bot-cycle':
450 print('error: configurations specified for bot-cycle')
456 print('error: configurations specified for bot')
460 if action
== 'host-libraries' and configs
:
461 print('error: configurations specified for host-libraries')
463 self
.clear_last_build_state(action
)
464 build_time
= datetime
.datetime
.utcnow()
465 if action
== 'host-libraries':
466 build_components
= ('gmp', 'mpfr', 'mpc')
469 self
.build_host_libraries()
470 elif action
== 'compilers':
471 build_components
= ('binutils', 'gcc', 'glibc', 'linux', 'mig',
473 old_components
= ('gmp', 'mpfr', 'mpc')
474 old_versions
= self
.build_state
['host-libraries']['build-versions']
475 self
.build_compilers(configs
)
477 build_components
= ('glibc',)
478 old_components
= ('gmp', 'mpfr', 'mpc', 'binutils', 'gcc', 'linux',
479 'mig', 'gnumach', 'hurd')
480 old_versions
= self
.build_state
['compilers']['build-versions']
481 self
.build_glibcs(configs
)
485 # Partial build, do not update stored state.
488 for k
in build_components
:
489 if k
in self
.versions
:
490 build_versions
[k
] = {'version': self
.versions
[k
]['version'],
491 'revision': self
.versions
[k
]['revision']}
492 for k
in old_components
:
493 if k
in old_versions
:
494 build_versions
[k
] = {'version': old_versions
[k
]['version'],
495 'revision': old_versions
[k
]['revision']}
496 self
.update_build_state(action
, build_time
, build_versions
)
499 def remove_dirs(*args
):
500 """Remove directories and their contents if they exist."""
502 shutil
.rmtree(dir, ignore_errors
=True)
505 def remove_recreate_dirs(*args
):
506 """Remove directories if they exist, and create them as empty."""
507 Context
.remove_dirs(*args
)
509 os
.makedirs(dir, exist_ok
=True)
511 def add_makefile_cmdlist(self
, target
, cmdlist
, logsdir
):
512 """Add makefile text for a list of commands."""
513 commands
= cmdlist
.makefile_commands(self
.wrapper
, logsdir
)
514 self
.makefile_pieces
.append('all: %s\n.PHONY: %s\n%s:\n%s\n' %
515 (target
, target
, target
, commands
))
516 self
.status_log_list
.extend(cmdlist
.status_logs(logsdir
))
518 def write_files(self
):
519 """Write out the Makefile and wrapper script."""
520 mftext
= ''.join(self
.makefile_pieces
)
521 with
open(self
.makefile
, 'w') as f
:
531 'prev_status=$prev_base-status.txt\n'
532 'this_status=$this_base-status.txt\n'
533 'this_log=$this_base-log.txt\n'
534 'date > "$this_log"\n'
535 'echo >> "$this_log"\n'
536 'echo "Description: $desc" >> "$this_log"\n'
537 'printf "%s" "Command:" >> "$this_log"\n'
538 'for word in "$@"; do\n'
539 ' if expr "$word" : "[]+,./0-9@A-Z_a-z-]\\\\{1,\\\\}\\$" > /dev/null; then\n'
540 ' printf " %s" "$word"\n'
543 ' printf "%s" "$word" | sed -e "s/\'/\'\\\\\\\\\'\'/"\n'
546 'done >> "$this_log"\n'
547 'echo >> "$this_log"\n'
548 'echo "Directory: $dir" >> "$this_log"\n'
549 'echo "Path addition: $path" >> "$this_log"\n'
550 'echo >> "$this_log"\n'
553 ' echo >> "$this_log"\n'
554 ' echo "$1: $desc" > "$this_status"\n'
555 ' echo "$1: $desc" >> "$this_log"\n'
556 ' echo >> "$this_log"\n'
557 ' date >> "$this_log"\n'
558 ' echo "$1: $desc"\n'
563 ' if [ "$1" != "0" ]; then\n'
564 ' record_status FAIL\n'
567 'if [ "$prev_base" ] && ! grep -q "^PASS" "$prev_status"; then\n'
568 ' record_status UNRESOLVED\n'
570 'if [ "$dir" ]; then\n'
572 ' check_error "$?"\n'
574 'if [ "$path" ]; then\n'
575 ' PATH=$path:$PATH\n'
577 '"$@" < /dev/null >> "$this_log" 2>&1\n'
579 'record_status PASS\n')
580 with
open(self
.wrapper
, 'w') as f
:
581 f
.write(wrapper_text
)
583 mode_exec
= (stat
.S_IRWXU|stat
.S_IRGRP|stat
.S_IXGRP|
584 stat
.S_IROTH|stat
.S_IXOTH
)
585 os
.chmod(self
.wrapper
, mode_exec
)
588 'if ! [ -f tests.sum ]; then\n'
589 ' echo "No test summary available."\n'
594 ' echo "Contents of $1:"\n'
598 ' echo "End of contents of $1."\n'
601 'save_file tests.sum\n'
602 'non_pass_tests=$(grep -v "^PASS: " tests.sum | sed -e "s/^PASS: //")\n'
603 'for t in $non_pass_tests; do\n'
604 ' if [ -f "$t.out" ]; then\n'
605 ' save_file "$t.out"\n'
608 with
open(self
.save_logs
, 'w') as f
:
609 f
.write(save_logs_text
)
610 os
.chmod(self
.save_logs
, mode_exec
)
613 """Do the actual build."""
614 cmd
= ['make', '-j%d' % self
.parallelism
]
615 subprocess
.run(cmd
, cwd
=self
.builddir
, check
=True)
617 def build_host_libraries(self
):
618 """Build the host libraries."""
619 installdir
= self
.host_libraries_installdir
620 builddir
= os
.path
.join(self
.builddir
, 'host-libraries')
621 logsdir
= os
.path
.join(self
.logsdir
, 'host-libraries')
622 self
.remove_recreate_dirs(installdir
, builddir
, logsdir
)
623 cmdlist
= CommandList('host-libraries', self
.keep
)
624 self
.build_host_library(cmdlist
, 'gmp')
625 self
.build_host_library(cmdlist
, 'mpfr',
626 ['--with-gmp=%s' % installdir
])
627 self
.build_host_library(cmdlist
, 'mpc',
628 ['--with-gmp=%s' % installdir
,
629 '--with-mpfr=%s' % installdir
])
630 cmdlist
.add_command('done', ['touch', os
.path
.join(installdir
, 'ok')])
631 self
.add_makefile_cmdlist('host-libraries', cmdlist
, logsdir
)
633 def build_host_library(self
, cmdlist
, lib
, extra_opts
=None):
634 """Build one host library."""
635 srcdir
= self
.component_srcdir(lib
)
636 builddir
= self
.component_builddir('host-libraries', None, lib
)
637 installdir
= self
.host_libraries_installdir
638 cmdlist
.push_subdesc(lib
)
639 cmdlist
.create_use_dir(builddir
)
640 cfg_cmd
= [os
.path
.join(srcdir
, 'configure'),
641 '--prefix=%s' % installdir
,
644 cfg_cmd
.extend (extra_opts
)
645 cmdlist
.add_command('configure', cfg_cmd
)
646 cmdlist
.add_command('build', ['make'])
647 cmdlist
.add_command('check', ['make', 'check'])
648 cmdlist
.add_command('install', ['make', 'install'])
649 cmdlist
.cleanup_dir()
650 cmdlist
.pop_subdesc()
652 def build_compilers(self
, configs
):
653 """Build the compilers."""
655 self
.remove_dirs(os
.path
.join(self
.builddir
, 'compilers'))
656 self
.remove_dirs(os
.path
.join(self
.installdir
, 'compilers'))
657 self
.remove_dirs(os
.path
.join(self
.logsdir
, 'compilers'))
658 configs
= sorted(self
.configs
.keys())
660 self
.configs
[c
].build()
662 def build_glibcs(self
, configs
):
663 """Build the glibcs."""
665 self
.remove_dirs(os
.path
.join(self
.builddir
, 'glibcs'))
666 self
.remove_dirs(os
.path
.join(self
.installdir
, 'glibcs'))
667 self
.remove_dirs(os
.path
.join(self
.logsdir
, 'glibcs'))
668 configs
= sorted(self
.glibc_configs
.keys())
670 self
.glibc_configs
[c
].build()
672 def load_versions_json(self
):
673 """Load information about source directory versions."""
674 if not os
.access(self
.versions_json
, os
.F_OK
):
677 with
open(self
.versions_json
, 'r') as f
:
678 self
.versions
= json
.load(f
)
680 def store_json(self
, data
, filename
):
681 """Store information in a JSON file."""
682 filename_tmp
= filename
+ '.tmp'
683 with
open(filename_tmp
, 'w') as f
:
684 json
.dump(data
, f
, indent
=2, sort_keys
=True)
685 os
.rename(filename_tmp
, filename
)
687 def store_versions_json(self
):
688 """Store information about source directory versions."""
689 self
.store_json(self
.versions
, self
.versions_json
)
691 def set_component_version(self
, component
, version
, explicit
, revision
):
692 """Set the version information for a component."""
693 self
.versions
[component
] = {'version': version
,
694 'explicit': explicit
,
695 'revision': revision
}
696 self
.store_versions_json()
698 def checkout(self
, versions
):
699 """Check out the desired component versions."""
700 default_versions
= {'binutils': 'vcs-2.30',
702 'glibc': 'vcs-mainline',
711 explicit_versions
= {}
714 for k
in default_versions
.keys():
718 if k
in use_versions
:
719 print('error: multiple versions for %s' % k
)
722 explicit_versions
[k
] = True
726 print('error: unknown component in %s' % v
)
728 for k
in default_versions
.keys():
729 if k
not in use_versions
:
730 if k
in self
.versions
and self
.versions
[k
]['explicit']:
731 use_versions
[k
] = self
.versions
[k
]['version']
732 explicit_versions
[k
] = True
734 use_versions
[k
] = default_versions
[k
]
735 explicit_versions
[k
] = False
736 os
.makedirs(self
.srcdir
, exist_ok
=True)
737 for k
in sorted(default_versions
.keys()):
738 update
= os
.access(self
.component_srcdir(k
), os
.F_OK
)
741 k
in self
.versions
and
742 v
!= self
.versions
[k
]['version']):
743 if not self
.replace_sources
:
744 print('error: version of %s has changed from %s to %s, '
745 'use --replace-sources to check out again' %
746 (k
, self
.versions
[k
]['version'], v
))
748 shutil
.rmtree(self
.component_srcdir(k
))
750 if v
.startswith('vcs-'):
751 revision
= self
.checkout_vcs(k
, v
[4:], update
)
753 self
.checkout_tar(k
, v
, update
)
755 self
.set_component_version(k
, v
, explicit_versions
[k
], revision
)
756 if self
.get_script_text() != self
.script_text
:
757 # Rerun the checkout process in case the updated script
758 # uses different default versions or new components.
761 def checkout_vcs(self
, component
, version
, update
):
762 """Check out the given version of the given component from version
763 control. Return a revision identifier."""
764 if component
== 'binutils':
765 git_url
= 'git://sourceware.org/git/binutils-gdb.git'
766 if version
== 'mainline':
767 git_branch
= 'master'
769 trans
= str.maketrans({'.': '_'})
770 git_branch
= 'binutils-%s-branch' % version
.translate(trans
)
771 return self
.git_checkout(component
, git_url
, git_branch
, update
)
772 elif component
== 'gcc':
773 if version
== 'mainline':
776 trans
= str.maketrans({'.': '_'})
777 branch
= 'branches/gcc-%s-branch' % version
.translate(trans
)
778 svn_url
= 'svn://gcc.gnu.org/svn/gcc/%s' % branch
779 return self
.gcc_checkout(svn_url
, update
)
780 elif component
== 'glibc':
781 git_url
= 'git://sourceware.org/git/glibc.git'
782 if version
== 'mainline':
783 git_branch
= 'master'
785 git_branch
= 'release/%s/master' % version
786 r
= self
.git_checkout(component
, git_url
, git_branch
, update
)
787 self
.fix_glibc_timestamps()
789 elif component
== 'hurd':
790 git_url
= 'git://git.savannah.gnu.org/hurd/hurd.git'
791 git_branch
= 'master'
792 r
= self
.git_checkout(component
, git_url
, git_branch
, update
)
793 subprocess
.run(['autoconf'],
794 cwd
=self
.component_srcdir(component
), check
=True)
797 print('error: component %s coming from VCS' % component
)
800 def git_checkout(self
, component
, git_url
, git_branch
, update
):
801 """Check out a component from git. Return a commit identifier."""
803 subprocess
.run(['git', 'remote', 'prune', 'origin'],
804 cwd
=self
.component_srcdir(component
), check
=True)
805 subprocess
.run(['git', 'pull', '-q'],
806 cwd
=self
.component_srcdir(component
), check
=True)
808 subprocess
.run(['git', 'clone', '-q', '-b', git_branch
, git_url
,
809 self
.component_srcdir(component
)], check
=True)
810 r
= subprocess
.run(['git', 'rev-parse', 'HEAD'],
811 cwd
=self
.component_srcdir(component
),
812 stdout
=subprocess
.PIPE
,
813 check
=True, universal_newlines
=True).stdout
816 def fix_glibc_timestamps(self
):
817 """Fix timestamps in a glibc checkout."""
818 # Ensure that builds do not try to regenerate generated files
819 # in the source tree.
820 srcdir
= self
.component_srcdir('glibc')
821 for dirpath
, dirnames
, filenames
in os
.walk(srcdir
):
823 if (f
== 'configure' or
824 f
== 'preconfigure' or
825 f
.endswith('-kw.h')):
826 to_touch
= os
.path
.join(dirpath
, f
)
827 subprocess
.run(['touch', to_touch
], check
=True)
829 def gcc_checkout(self
, svn_url
, update
):
830 """Check out GCC from SVN. Return the revision number."""
832 subprocess
.run(['svn', 'co', '-q', svn_url
,
833 self
.component_srcdir('gcc')], check
=True)
834 subprocess
.run(['contrib/gcc_update', '--silent'],
835 cwd
=self
.component_srcdir('gcc'), check
=True)
836 r
= subprocess
.run(['svnversion', self
.component_srcdir('gcc')],
837 stdout
=subprocess
.PIPE
,
838 check
=True, universal_newlines
=True).stdout
841 def checkout_tar(self
, component
, version
, update
):
842 """Check out the given version of the given component from a
846 url_map
= {'binutils': 'https://ftp.gnu.org/gnu/binutils/binutils-%(version)s.tar.bz2',
847 'gcc': 'https://ftp.gnu.org/gnu/gcc/gcc-%(version)s/gcc-%(version)s.tar.bz2',
848 'gmp': 'https://ftp.gnu.org/gnu/gmp/gmp-%(version)s.tar.xz',
849 'linux': 'https://www.kernel.org/pub/linux/kernel/v4.x/linux-%(version)s.tar.xz',
850 'mpc': 'https://ftp.gnu.org/gnu/mpc/mpc-%(version)s.tar.gz',
851 'mpfr': 'https://ftp.gnu.org/gnu/mpfr/mpfr-%(version)s.tar.xz',
852 'mig': 'https://ftp.gnu.org/gnu/mig/mig-%(version)s.tar.bz2',
853 'gnumach': 'https://ftp.gnu.org/gnu/gnumach/gnumach-%(version)s.tar.bz2',
854 'hurd': 'https://ftp.gnu.org/gnu/hurd/hurd-%(version)s.tar.bz2'}
855 if component
not in url_map
:
856 print('error: component %s coming from tarball' % component
)
858 url
= url_map
[component
] % {'version': version
}
859 filename
= os
.path
.join(self
.srcdir
, url
.split('/')[-1])
860 response
= urllib
.request
.urlopen(url
)
861 data
= response
.read()
862 with
open(filename
, 'wb') as f
:
864 subprocess
.run(['tar', '-C', self
.srcdir
, '-x', '-f', filename
],
866 os
.rename(os
.path
.join(self
.srcdir
, '%s-%s' % (component
, version
)),
867 self
.component_srcdir(component
))
870 def load_build_state_json(self
):
871 """Load information about the state of previous builds."""
872 if os
.access(self
.build_state_json
, os
.F_OK
):
873 with
open(self
.build_state_json
, 'r') as f
:
874 self
.build_state
= json
.load(f
)
876 self
.build_state
= {}
877 for k
in ('host-libraries', 'compilers', 'glibcs'):
878 if k
not in self
.build_state
:
879 self
.build_state
[k
] = {}
880 if 'build-time' not in self
.build_state
[k
]:
881 self
.build_state
[k
]['build-time'] = ''
882 if 'build-versions' not in self
.build_state
[k
]:
883 self
.build_state
[k
]['build-versions'] = {}
884 if 'build-results' not in self
.build_state
[k
]:
885 self
.build_state
[k
]['build-results'] = {}
886 if 'result-changes' not in self
.build_state
[k
]:
887 self
.build_state
[k
]['result-changes'] = {}
888 if 'ever-passed' not in self
.build_state
[k
]:
889 self
.build_state
[k
]['ever-passed'] = []
891 def store_build_state_json(self
):
892 """Store information about the state of previous builds."""
893 self
.store_json(self
.build_state
, self
.build_state_json
)
895 def clear_last_build_state(self
, action
):
896 """Clear information about the state of part of the build."""
897 # We clear the last build time and versions when starting a
898 # new build. The results of the last build are kept around,
899 # as comparison is still meaningful if this build is aborted
900 # and a new one started.
901 self
.build_state
[action
]['build-time'] = ''
902 self
.build_state
[action
]['build-versions'] = {}
903 self
.store_build_state_json()
905 def update_build_state(self
, action
, build_time
, build_versions
):
906 """Update the build state after a build."""
907 build_time
= build_time
.replace(microsecond
=0)
908 self
.build_state
[action
]['build-time'] = str(build_time
)
909 self
.build_state
[action
]['build-versions'] = build_versions
911 for log
in self
.status_log_list
:
912 with
open(log
, 'r') as f
:
914 log_text
= log_text
.rstrip()
915 m
= re
.fullmatch('([A-Z]+): (.*)', log_text
)
917 test_name
= m
.group(2)
918 assert test_name
not in build_results
919 build_results
[test_name
] = result
920 old_build_results
= self
.build_state
[action
]['build-results']
921 self
.build_state
[action
]['build-results'] = build_results
923 all_tests
= set(old_build_results
.keys()) |
set(build_results
.keys())
925 if t
in old_build_results
:
926 old_res
= old_build_results
[t
]
928 old_res
= '(New test)'
929 if t
in build_results
:
930 new_res
= build_results
[t
]
932 new_res
= '(Test removed)'
933 if old_res
!= new_res
:
934 result_changes
[t
] = '%s -> %s' % (old_res
, new_res
)
935 self
.build_state
[action
]['result-changes'] = result_changes
936 old_ever_passed
= {t
for t
in self
.build_state
[action
]['ever-passed']
937 if t
in build_results
}
938 new_passes
= {t
for t
in build_results
if build_results
[t
] == 'PASS'}
939 self
.build_state
[action
]['ever-passed'] = sorted(old_ever_passed |
941 self
.store_build_state_json()
943 def load_bot_config_json(self
):
944 """Load bot configuration."""
945 with
open(self
.bot_config_json
, 'r') as f
:
946 self
.bot_config
= json
.load(f
)
948 def part_build_old(self
, action
, delay
):
949 """Return whether the last build for a given action was at least a
950 given number of seconds ago, or does not have a time recorded."""
951 old_time_str
= self
.build_state
[action
]['build-time']
954 old_time
= datetime
.datetime
.strptime(old_time_str
,
956 new_time
= datetime
.datetime
.utcnow()
957 delta
= new_time
- old_time
958 return delta
.total_seconds() >= delay
961 """Run a single round of checkout and builds."""
962 print('Bot cycle starting %s.' % str(datetime
.datetime
.utcnow()))
963 self
.load_bot_config_json()
964 actions
= ('host-libraries', 'compilers', 'glibcs')
965 self
.bot_run_self(['--replace-sources'], 'checkout')
966 self
.load_versions_json()
967 if self
.get_script_text() != self
.script_text
:
968 print('Script changed, re-execing.')
969 # On script change, all parts of the build should be rerun.
971 self
.clear_last_build_state(a
)
973 check_components
= {'host-libraries': ('gmp', 'mpfr', 'mpc'),
974 'compilers': ('binutils', 'gcc', 'glibc', 'linux',
975 'mig', 'gnumach', 'hurd'),
976 'glibcs': ('glibc',)}
979 build_vers
= self
.build_state
[a
]['build-versions']
980 must_build
[a
] = False
981 if not self
.build_state
[a
]['build-time']:
985 for c
in check_components
[a
]:
987 old_vers
[c
] = build_vers
[c
]
988 new_vers
[c
] = {'version': self
.versions
[c
]['version'],
989 'revision': self
.versions
[c
]['revision']}
990 if new_vers
== old_vers
:
991 print('Versions for %s unchanged.' % a
)
993 print('Versions changed or rebuild forced for %s.' % a
)
994 if a
== 'compilers' and not self
.part_build_old(
995 a
, self
.bot_config
['compilers-rebuild-delay']):
996 print('Not requiring rebuild of compilers this soon.')
999 if must_build
['host-libraries']:
1000 must_build
['compilers'] = True
1001 if must_build
['compilers']:
1002 must_build
['glibcs'] = True
1005 print('Must rebuild %s.' % a
)
1006 self
.clear_last_build_state(a
)
1008 print('No need to rebuild %s.' % a
)
1009 if os
.access(self
.logsdir
, os
.F_OK
):
1010 shutil
.rmtree(self
.logsdir_old
, ignore_errors
=True)
1011 shutil
.copytree(self
.logsdir
, self
.logsdir_old
)
1014 build_time
= datetime
.datetime
.utcnow()
1015 print('Rebuilding %s at %s.' % (a
, str(build_time
)))
1016 self
.bot_run_self([], a
)
1017 self
.load_build_state_json()
1018 self
.bot_build_mail(a
, build_time
)
1019 print('Bot cycle done at %s.' % str(datetime
.datetime
.utcnow()))
1021 def bot_build_mail(self
, action
, build_time
):
1022 """Send email with the results of a build."""
1023 if not ('email-from' in self
.bot_config
and
1024 'email-server' in self
.bot_config
and
1025 'email-subject' in self
.bot_config
and
1026 'email-to' in self
.bot_config
):
1027 if not self
.email_warning
:
1028 print("Email not configured, not sending.")
1029 self
.email_warning
= True
1032 build_time
= build_time
.replace(microsecond
=0)
1033 subject
= (self
.bot_config
['email-subject'] %
1035 'build-time': str(build_time
)})
1036 results
= self
.build_state
[action
]['build-results']
1037 changes
= self
.build_state
[action
]['result-changes']
1038 ever_passed
= set(self
.build_state
[action
]['ever-passed'])
1039 versions
= self
.build_state
[action
]['build-versions']
1040 new_regressions
= {k
for k
in changes
if changes
[k
] == 'PASS -> FAIL'}
1041 all_regressions
= {k
for k
in ever_passed
if results
[k
] == 'FAIL'}
1042 all_fails
= {k
for k
in results
if results
[k
] == 'FAIL'}
1044 new_reg_list
= sorted(['FAIL: %s' % k
for k
in new_regressions
])
1045 new_reg_text
= ('New regressions:\n\n%s\n\n' %
1046 '\n'.join(new_reg_list
))
1050 all_reg_list
= sorted(['FAIL: %s' % k
for k
in all_regressions
])
1051 all_reg_text
= ('All regressions:\n\n%s\n\n' %
1052 '\n'.join(all_reg_list
))
1056 all_fail_list
= sorted(['FAIL: %s' % k
for k
in all_fails
])
1057 all_fail_text
= ('All failures:\n\n%s\n\n' %
1058 '\n'.join(all_fail_list
))
1062 changes_list
= sorted(changes
.keys())
1063 changes_list
= ['%s: %s' % (changes
[k
], k
) for k
in changes_list
]
1064 changes_text
= ('All changed results:\n\n%s\n\n' %
1065 '\n'.join(changes_list
))
1068 results_text
= (new_reg_text
+ all_reg_text
+ all_fail_text
+
1070 if not results_text
:
1071 results_text
= 'Clean build with unchanged results.\n\n'
1072 versions_list
= sorted(versions
.keys())
1073 versions_list
= ['%s: %s (%s)' % (k
, versions
[k
]['version'],
1074 versions
[k
]['revision'])
1075 for k
in versions_list
]
1076 versions_text
= ('Component versions for this build:\n\n%s\n' %
1077 '\n'.join(versions_list
))
1078 body_text
= results_text
+ versions_text
1079 msg
= email
.mime
.text
.MIMEText(body_text
)
1080 msg
['Subject'] = subject
1081 msg
['From'] = self
.bot_config
['email-from']
1082 msg
['To'] = self
.bot_config
['email-to']
1083 msg
['Message-ID'] = email
.utils
.make_msgid()
1084 msg
['Date'] = email
.utils
.format_datetime(datetime
.datetime
.utcnow())
1085 with smtplib
.SMTP(self
.bot_config
['email-server']) as s
:
1088 def bot_run_self(self
, opts
, action
, check
=True):
1089 """Run a copy of this script with given options."""
1090 cmd
= [sys
.executable
, sys
.argv
[0], '--keep=none',
1091 '-j%d' % self
.parallelism
]
1093 cmd
.extend([self
.topdir
, action
])
1095 subprocess
.run(cmd
, check
=check
)
1098 """Run repeated rounds of checkout and builds."""
1100 self
.load_bot_config_json()
1101 if not self
.bot_config
['run']:
1102 print('Bot exiting by request.')
1104 self
.bot_run_self([], 'bot-cycle', check
=False)
1105 self
.load_bot_config_json()
1106 if not self
.bot_config
['run']:
1107 print('Bot exiting by request.')
1109 time
.sleep(self
.bot_config
['delay'])
1110 if self
.get_script_text() != self
.script_text
:
1111 print('Script changed, bot re-execing.')
1115 class Config(object):
1116 """A configuration for building a compiler and associated libraries."""
1118 def __init__(self
, ctx
, arch
, os_name
, variant
=None, gcc_cfg
=None,
1119 first_gcc_cfg
=None, glibcs
=None, extra_glibcs
=None):
1120 """Initialize a Config object."""
1124 self
.variant
= variant
1126 self
.name
= '%s-%s' % (arch
, os_name
)
1128 self
.name
= '%s-%s-%s' % (arch
, os_name
, variant
)
1129 self
.triplet
= '%s-glibc-%s' % (arch
, os_name
)
1133 self
.gcc_cfg
= gcc_cfg
1134 if first_gcc_cfg
is None:
1135 self
.first_gcc_cfg
= []
1137 self
.first_gcc_cfg
= first_gcc_cfg
1139 glibcs
= [{'variant': variant
}]
1140 if extra_glibcs
is None:
1142 glibcs
= [Glibc(self
, **g
) for g
in glibcs
]
1143 extra_glibcs
= [Glibc(self
, **g
) for g
in extra_glibcs
]
1144 self
.all_glibcs
= glibcs
+ extra_glibcs
1145 self
.compiler_glibcs
= glibcs
1146 self
.installdir
= ctx
.compiler_installdir(self
.name
)
1147 self
.bindir
= ctx
.compiler_bindir(self
.name
)
1148 self
.sysroot
= ctx
.compiler_sysroot(self
.name
)
1149 self
.builddir
= os
.path
.join(ctx
.builddir
, 'compilers', self
.name
)
1150 self
.logsdir
= os
.path
.join(ctx
.logsdir
, 'compilers', self
.name
)
1152 def component_builddir(self
, component
):
1153 """Return the directory to use for a (non-glibc) build."""
1154 return self
.ctx
.component_builddir('compilers', self
.name
, component
)
1157 """Generate commands to build this compiler."""
1158 self
.ctx
.remove_recreate_dirs(self
.installdir
, self
.builddir
,
1160 cmdlist
= CommandList('compilers-%s' % self
.name
, self
.ctx
.keep
)
1161 cmdlist
.add_command('check-host-libraries',
1163 os
.path
.join(self
.ctx
.host_libraries_installdir
,
1165 cmdlist
.use_path(self
.bindir
)
1166 self
.build_cross_tool(cmdlist
, 'binutils', 'binutils',
1168 '--disable-libdecnumber',
1169 '--disable-readline',
1171 if self
.os
.startswith('linux'):
1172 self
.install_linux_headers(cmdlist
)
1173 self
.build_gcc(cmdlist
, True)
1174 if self
.os
== 'gnu':
1175 self
.install_gnumach_headers(cmdlist
)
1176 self
.build_cross_tool(cmdlist
, 'mig', 'mig')
1177 self
.install_hurd_headers(cmdlist
)
1178 for g
in self
.compiler_glibcs
:
1179 cmdlist
.push_subdesc('glibc')
1180 cmdlist
.push_subdesc(g
.name
)
1181 g
.build_glibc(cmdlist
, True)
1182 cmdlist
.pop_subdesc()
1183 cmdlist
.pop_subdesc()
1184 self
.build_gcc(cmdlist
, False)
1185 cmdlist
.add_command('done', ['touch',
1186 os
.path
.join(self
.installdir
, 'ok')])
1187 self
.ctx
.add_makefile_cmdlist('compilers-%s' % self
.name
, cmdlist
,
1190 def build_cross_tool(self
, cmdlist
, tool_src
, tool_build
, extra_opts
=None):
1191 """Build one cross tool."""
1192 srcdir
= self
.ctx
.component_srcdir(tool_src
)
1193 builddir
= self
.component_builddir(tool_build
)
1194 cmdlist
.push_subdesc(tool_build
)
1195 cmdlist
.create_use_dir(builddir
)
1196 cfg_cmd
= [os
.path
.join(srcdir
, 'configure'),
1197 '--prefix=%s' % self
.installdir
,
1198 '--build=%s' % self
.ctx
.build_triplet
,
1199 '--host=%s' % self
.ctx
.build_triplet
,
1200 '--target=%s' % self
.triplet
,
1201 '--with-sysroot=%s' % self
.sysroot
]
1203 cfg_cmd
.extend(extra_opts
)
1204 cmdlist
.add_command('configure', cfg_cmd
)
1205 cmdlist
.add_command('build', ['make'])
1206 # Parallel "make install" for GCC has race conditions that can
1207 # cause it to fail; see
1208 # <https://gcc.gnu.org/bugzilla/show_bug.cgi?id=42980>. Such
1209 # problems are not known for binutils, but doing the
1210 # installation in parallel within a particular toolchain build
1211 # (as opposed to installation of one toolchain from
1212 # build-many-glibcs.py running in parallel to the installation
1213 # of other toolchains being built) is not known to be
1214 # significantly beneficial, so it is simplest just to disable
1215 # parallel install for cross tools here.
1216 cmdlist
.add_command('install', ['make', '-j1', 'install'])
1217 cmdlist
.cleanup_dir()
1218 cmdlist
.pop_subdesc()
1220 def install_linux_headers(self
, cmdlist
):
1221 """Install Linux kernel headers."""
1222 arch_map
= {'aarch64': 'arm64',
1232 'microblaze': 'microblaze',
1235 'powerpc': 'powerpc',
1243 if self
.arch
.startswith(k
):
1244 linux_arch
= arch_map
[k
]
1246 assert linux_arch
is not None
1247 srcdir
= self
.ctx
.component_srcdir('linux')
1248 builddir
= self
.component_builddir('linux')
1249 headers_dir
= os
.path
.join(self
.sysroot
, 'usr')
1250 cmdlist
.push_subdesc('linux')
1251 cmdlist
.create_use_dir(builddir
)
1252 cmdlist
.add_command('install-headers',
1253 ['make', '-C', srcdir
, 'O=%s' % builddir
,
1254 'ARCH=%s' % linux_arch
,
1255 'INSTALL_HDR_PATH=%s' % headers_dir
,
1257 cmdlist
.cleanup_dir()
1258 cmdlist
.pop_subdesc()
1260 def install_gnumach_headers(self
, cmdlist
):
1261 """Install GNU Mach headers."""
1262 srcdir
= self
.ctx
.component_srcdir('gnumach')
1263 builddir
= self
.component_builddir('gnumach')
1264 cmdlist
.push_subdesc('gnumach')
1265 cmdlist
.create_use_dir(builddir
)
1266 cmdlist
.add_command('configure',
1267 [os
.path
.join(srcdir
, 'configure'),
1268 '--build=%s' % self
.ctx
.build_triplet
,
1269 '--host=%s' % self
.triplet
,
1271 'CC=%s-gcc -nostdlib' % self
.triplet
])
1272 cmdlist
.add_command('install', ['make', 'DESTDIR=%s' % self
.sysroot
,
1274 cmdlist
.cleanup_dir()
1275 cmdlist
.pop_subdesc()
1277 def install_hurd_headers(self
, cmdlist
):
1278 """Install Hurd headers."""
1279 srcdir
= self
.ctx
.component_srcdir('hurd')
1280 builddir
= self
.component_builddir('hurd')
1281 cmdlist
.push_subdesc('hurd')
1282 cmdlist
.create_use_dir(builddir
)
1283 cmdlist
.add_command('configure',
1284 [os
.path
.join(srcdir
, 'configure'),
1285 '--build=%s' % self
.ctx
.build_triplet
,
1286 '--host=%s' % self
.triplet
,
1288 '--disable-profile', '--without-parted',
1289 'CC=%s-gcc -nostdlib' % self
.triplet
])
1290 cmdlist
.add_command('install', ['make', 'prefix=%s' % self
.sysroot
,
1291 'no_deps=t', 'install-headers'])
1292 cmdlist
.cleanup_dir()
1293 cmdlist
.pop_subdesc()
1295 def build_gcc(self
, cmdlist
, bootstrap
):
1297 # libsanitizer commonly breaks because of glibc header
1298 # changes, or on unusual targets. libssp is of little
1299 # relevance with glibc's own stack checking support.
1300 cfg_opts
= list(self
.gcc_cfg
)
1301 cfg_opts
+= ['--disable-libsanitizer', '--disable-libssp']
1302 host_libs
= self
.ctx
.host_libraries_installdir
1303 cfg_opts
+= ['--with-gmp=%s' % host_libs
,
1304 '--with-mpfr=%s' % host_libs
,
1305 '--with-mpc=%s' % host_libs
]
1307 tool_build
= 'gcc-first'
1308 # Building a static-only, C-only compiler that is
1309 # sufficient to build glibc. Various libraries and
1310 # features that may require libc headers must be disabled.
1311 # When configuring with a sysroot, --with-newlib is
1312 # required to define inhibit_libc (to stop some parts of
1313 # libgcc including libc headers); --without-headers is not
1315 cfg_opts
+= ['--enable-languages=c', '--disable-shared',
1316 '--disable-threads',
1317 '--disable-libatomic',
1318 '--disable-decimal-float',
1320 '--disable-libgomp',
1323 '--disable-libquadmath',
1324 '--without-headers', '--with-newlib',
1325 '--with-glibc-version=%s' % self
.ctx
.glibc_version
1327 cfg_opts
+= self
.first_gcc_cfg
1330 cfg_opts
+= ['--enable-languages=c,c++', '--enable-shared',
1332 self
.build_cross_tool(cmdlist
, 'gcc', tool_build
, cfg_opts
)
1335 class Glibc(object):
1336 """A configuration for building glibc."""
1338 def __init__(self
, compiler
, arch
=None, os_name
=None, variant
=None,
1339 cfg
=None, ccopts
=None):
1340 """Initialize a Glibc object."""
1341 self
.ctx
= compiler
.ctx
1342 self
.compiler
= compiler
1344 self
.arch
= compiler
.arch
1348 self
.os
= compiler
.os
1351 self
.variant
= variant
1353 self
.name
= '%s-%s' % (self
.arch
, self
.os
)
1355 self
.name
= '%s-%s-%s' % (self
.arch
, self
.os
, variant
)
1356 self
.triplet
= '%s-glibc-%s' % (self
.arch
, self
.os
)
1361 self
.ccopts
= ccopts
1363 def tool_name(self
, tool
):
1364 """Return the name of a cross-compilation tool."""
1365 ctool
= '%s-%s' % (self
.compiler
.triplet
, tool
)
1366 if self
.ccopts
and (tool
== 'gcc' or tool
== 'g++'):
1367 ctool
= '%s %s' % (ctool
, self
.ccopts
)
1371 """Generate commands to build this glibc."""
1372 builddir
= self
.ctx
.component_builddir('glibcs', self
.name
, 'glibc')
1373 installdir
= self
.ctx
.glibc_installdir(self
.name
)
1374 logsdir
= os
.path
.join(self
.ctx
.logsdir
, 'glibcs', self
.name
)
1375 self
.ctx
.remove_recreate_dirs(installdir
, builddir
, logsdir
)
1376 cmdlist
= CommandList('glibcs-%s' % self
.name
, self
.ctx
.keep
)
1377 cmdlist
.add_command('check-compilers',
1379 os
.path
.join(self
.compiler
.installdir
, 'ok')])
1380 cmdlist
.use_path(self
.compiler
.bindir
)
1381 self
.build_glibc(cmdlist
, False)
1382 self
.ctx
.add_makefile_cmdlist('glibcs-%s' % self
.name
, cmdlist
,
1385 def build_glibc(self
, cmdlist
, for_compiler
):
1386 """Generate commands to build this glibc, either as part of a compiler
1387 build or with the bootstrapped compiler (and in the latter case, run
1389 srcdir
= self
.ctx
.component_srcdir('glibc')
1391 builddir
= self
.ctx
.component_builddir('compilers',
1392 self
.compiler
.name
, 'glibc',
1394 installdir
= self
.compiler
.sysroot
1395 srcdir_copy
= self
.ctx
.component_builddir('compilers',
1400 builddir
= self
.ctx
.component_builddir('glibcs', self
.name
,
1402 installdir
= self
.ctx
.glibc_installdir(self
.name
)
1403 srcdir_copy
= self
.ctx
.component_builddir('glibcs', self
.name
,
1405 cmdlist
.create_use_dir(builddir
)
1406 # glibc builds write into the source directory, and even if
1407 # not intentionally there is a risk of bugs that involve
1408 # writing into the working directory. To avoid possible
1409 # concurrency issues, copy the source directory.
1410 cmdlist
.create_copy_dir(srcdir
, srcdir_copy
)
1411 use_usr
= self
.os
!= 'gnu'
1412 prefix
= '/usr' if use_usr
else ''
1413 cfg_cmd
= [os
.path
.join(srcdir_copy
, 'configure'),
1414 '--prefix=%s' % prefix
,
1416 '--build=%s' % self
.ctx
.build_triplet
,
1417 '--host=%s' % self
.triplet
,
1418 'CC=%s' % self
.tool_name('gcc'),
1419 'CXX=%s' % self
.tool_name('g++'),
1420 'AR=%s' % self
.tool_name('ar'),
1421 'AS=%s' % self
.tool_name('as'),
1422 'LD=%s' % self
.tool_name('ld'),
1423 'NM=%s' % self
.tool_name('nm'),
1424 'OBJCOPY=%s' % self
.tool_name('objcopy'),
1425 'OBJDUMP=%s' % self
.tool_name('objdump'),
1426 'RANLIB=%s' % self
.tool_name('ranlib'),
1427 'READELF=%s' % self
.tool_name('readelf'),
1428 'STRIP=%s' % self
.tool_name('strip')]
1429 if self
.os
== 'gnu':
1430 cfg_cmd
+= ['MIG=%s' % self
.tool_name('mig')]
1432 cmdlist
.add_command('configure', cfg_cmd
)
1433 cmdlist
.add_command('build', ['make'])
1434 cmdlist
.add_command('install', ['make', 'install',
1435 'install_root=%s' % installdir
])
1436 # GCC uses paths such as lib/../lib64, so make sure lib
1437 # directories always exist.
1438 mkdir_cmd
= ['mkdir', '-p',
1439 os
.path
.join(installdir
, 'lib')]
1441 mkdir_cmd
+= [os
.path
.join(installdir
, 'usr', 'lib')]
1442 cmdlist
.add_command('mkdir-lib', mkdir_cmd
)
1443 if not for_compiler
:
1445 cmdlist
.add_command('strip',
1447 ('%s $(find %s/lib* -name "*.so")' %
1448 (self
.tool_name('strip'), installdir
))])
1449 cmdlist
.add_command('check', ['make', 'check'])
1450 cmdlist
.add_command('save-logs', [self
.ctx
.save_logs
],
1452 cmdlist
.cleanup_dir('cleanup-src', srcdir_copy
)
1453 cmdlist
.cleanup_dir()
1456 class Command(object):
1457 """A command run in the build process."""
1459 def __init__(self
, desc
, num
, dir, path
, command
, always_run
=False):
1460 """Initialize a Command object."""
1464 trans
= str.maketrans({' ': '-'})
1465 self
.logbase
= '%03d-%s' % (num
, desc
.translate(trans
))
1466 self
.command
= command
1467 self
.always_run
= always_run
1470 def shell_make_quote_string(s
):
1471 """Given a string not containing a newline, quote it for use by the
1473 assert '\n' not in s
1474 if re
.fullmatch('[]+,./0-9@A-Z_a-z-]+', s
):
1476 strans
= str.maketrans({"'": "'\\''"})
1477 s
= "'%s'" % s
.translate(strans
)
1478 mtrans
= str.maketrans({'$': '$$'})
1479 return s
.translate(mtrans
)
1482 def shell_make_quote_list(l
, translate_make
):
1483 """Given a list of strings not containing newlines, quote them for use
1484 by the shell and make, returning a single string. If translate_make
1485 is true and the first string is 'make', change it to $(MAKE)."""
1486 l
= [Command
.shell_make_quote_string(s
) for s
in l
]
1487 if translate_make
and l
[0] == 'make':
1491 def shell_make_quote(self
):
1492 """Return this command quoted for the shell and make."""
1493 return self
.shell_make_quote_list(self
.command
, True)
1496 class CommandList(object):
1497 """A list of commands run in the build process."""
1499 def __init__(self
, desc
, keep
):
1500 """Initialize a CommandList object."""
1507 def desc_txt(self
, desc
):
1508 """Return the description to use for a command."""
1509 return '%s %s' % (' '.join(self
.desc
), desc
)
1511 def use_dir(self
, dir):
1512 """Set the default directory for subsequent commands."""
1515 def use_path(self
, path
):
1516 """Set a directory to be prepended to the PATH for subsequent
1520 def push_subdesc(self
, subdesc
):
1521 """Set the default subdescription for subsequent commands (e.g., the
1522 name of a component being built, within the series of commands
1524 self
.desc
.append(subdesc
)
1526 def pop_subdesc(self
):
1527 """Pop a subdescription from the list of descriptions."""
1530 def create_use_dir(self
, dir):
1531 """Remove and recreate a directory and use it for subsequent
1533 self
.add_command_dir('rm', None, ['rm', '-rf', dir])
1534 self
.add_command_dir('mkdir', None, ['mkdir', '-p', dir])
1537 def create_copy_dir(self
, src
, dest
):
1538 """Remove a directory and recreate it as a copy from the given
1540 self
.add_command_dir('copy-rm', None, ['rm', '-rf', dest
])
1541 parent
= os
.path
.dirname(dest
)
1542 self
.add_command_dir('copy-mkdir', None, ['mkdir', '-p', parent
])
1543 self
.add_command_dir('copy', None, ['cp', '-a', src
, dest
])
1545 def add_command_dir(self
, desc
, dir, command
, always_run
=False):
1546 """Add a command to run in a given directory."""
1547 cmd
= Command(self
.desc_txt(desc
), len(self
.cmdlist
), dir, self
.path
,
1548 command
, always_run
)
1549 self
.cmdlist
.append(cmd
)
1551 def add_command(self
, desc
, command
, always_run
=False):
1552 """Add a command to run in the default directory."""
1553 cmd
= Command(self
.desc_txt(desc
), len(self
.cmdlist
), self
.dir,
1554 self
.path
, command
, always_run
)
1555 self
.cmdlist
.append(cmd
)
1557 def cleanup_dir(self
, desc
='cleanup', dir=None):
1558 """Clean up a build directory. If no directory is specified, the
1559 default directory is cleaned up and ceases to be the default
1564 if self
.keep
!= 'all':
1565 self
.add_command_dir(desc
, None, ['rm', '-rf', dir],
1566 always_run
=(self
.keep
== 'none'))
1568 def makefile_commands(self
, wrapper
, logsdir
):
1569 """Return the sequence of commands in the form of text for a Makefile.
1570 The given wrapper script takes arguments: base of logs for
1571 previous command, or empty; base of logs for this command;
1572 description; directory; PATH addition; the command itself."""
1573 # prev_base is the base of the name for logs of the previous
1574 # command that is not always-run (that is, a build command,
1575 # whose failure should stop subsequent build commands from
1576 # being run, as opposed to a cleanup command, which is run
1577 # even if previous commands failed).
1580 for c
in self
.cmdlist
:
1581 ctxt
= c
.shell_make_quote()
1582 if prev_base
and not c
.always_run
:
1583 prev_log
= os
.path
.join(logsdir
, prev_base
)
1586 this_log
= os
.path
.join(logsdir
, c
.logbase
)
1587 if not c
.always_run
:
1588 prev_base
= c
.logbase
1597 prelims
= [wrapper
, prev_log
, this_log
, c
.desc
, dir, path
]
1598 prelim_txt
= Command
.shell_make_quote_list(prelims
, False)
1599 cmds
.append('\t@%s %s' % (prelim_txt
, ctxt
))
1600 return '\n'.join(cmds
)
1602 def status_logs(self
, logsdir
):
1603 """Return the list of log files with command status."""
1604 return [os
.path
.join(logsdir
, '%s-status.txt' % c
.logbase
)
1605 for c
in self
.cmdlist
]
1609 """Return an argument parser for this module."""
1610 parser
= argparse
.ArgumentParser(description
=__doc__
)
1611 parser
.add_argument('-j', dest
='parallelism',
1612 help='Run this number of jobs in parallel',
1613 type=int, default
=os
.cpu_count())
1614 parser
.add_argument('--keep', dest
='keep',
1615 help='Whether to keep all build directories, '
1616 'none or only those from failed builds',
1617 default
='none', choices
=('none', 'all', 'failed'))
1618 parser
.add_argument('--replace-sources', action
='store_true',
1619 help='Remove and replace source directories '
1620 'with the wrong version of a component')
1621 parser
.add_argument('--strip', action
='store_true',
1622 help='Strip installed glibc libraries')
1623 parser
.add_argument('topdir',
1624 help='Toplevel working directory')
1625 parser
.add_argument('action',
1627 choices
=('checkout', 'bot-cycle', 'bot',
1628 'host-libraries', 'compilers', 'glibcs'))
1629 parser
.add_argument('configs',
1630 help='Versions to check out or configurations to build',
1636 """The main entry point."""
1637 parser
= get_parser()
1638 opts
= parser
.parse_args(argv
)
1639 topdir
= os
.path
.abspath(opts
.topdir
)
1640 ctx
= Context(topdir
, opts
.parallelism
, opts
.keep
, opts
.replace_sources
,
1641 opts
.strip
, opts
.action
)
1642 ctx
.run_builds(opts
.action
, opts
.configs
)
1645 if __name__
== '__main__':