Use common bits/shm.h for more architectures.
[glibc.git] / scripts / build-many-glibcs.py
blob61f98651c6676f3d3faf6271ff23c30a6c967cc9
1 #!/usr/bin/python3
2 # Build many configurations of glibc.
3 # Copyright (C) 2016-2018 Free Software Foundation, Inc.
4 # This file is part of the GNU C Library.
6 # The GNU C Library is free software; you can redistribute it and/or
7 # modify it under the terms of the GNU Lesser General Public
8 # License as published by the Free Software Foundation; either
9 # version 2.1 of the License, or (at your option) any later version.
11 # The GNU C Library is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 # Lesser General Public License for more details.
16 # You should have received a copy of the GNU Lesser General Public
17 # License along with the GNU C Library; if not, see
18 # <http://www.gnu.org/licenses/>.
20 """Build many configurations of glibc.
22 This script takes as arguments a directory name (containing a src
23 subdirectory with sources of the relevant toolchain components) and a
24 description of what to do: 'checkout', to check out sources into that
25 directory, 'bot-cycle', to run a series of checkout and build steps,
26 'bot', to run 'bot-cycle' repeatedly, 'host-libraries', to build
27 libraries required by the toolchain, 'compilers', to build
28 cross-compilers for various configurations, or 'glibcs', to build
29 glibc for various configurations and run the compilation parts of the
30 testsuite. Subsequent arguments name the versions of components to
31 check out (<component>-<version), for 'checkout', or, for actions
32 other than 'checkout' and 'bot-cycle', name configurations for which
33 compilers or glibc are to be built.
35 """
37 import argparse
38 import datetime
39 import email.mime.text
40 import email.utils
41 import json
42 import os
43 import re
44 import shutil
45 import smtplib
46 import stat
47 import subprocess
48 import sys
49 import time
50 import urllib.request
52 try:
53 os.cpu_count
54 except:
55 import multiprocessing
56 os.cpu_count = lambda: multiprocessing.cpu_count()
58 try:
59 re.fullmatch
60 except:
61 re.fullmatch = lambda p,s,f=0: re.match(p+"\\Z",s,f)
63 try:
64 subprocess.run
65 except:
66 class _CompletedProcess:
67 def __init__(self, args, returncode, stdout=None, stderr=None):
68 self.args = args
69 self.returncode = returncode
70 self.stdout = stdout
71 self.stderr = stderr
73 def _run(*popenargs, input=None, timeout=None, check=False, **kwargs):
74 assert(timeout is None)
75 with subprocess.Popen(*popenargs, **kwargs) as process:
76 try:
77 stdout, stderr = process.communicate(input)
78 except:
79 process.kill()
80 process.wait()
81 raise
82 returncode = process.poll()
83 if check and returncode:
84 raise subprocess.CalledProcessError(returncode, popenargs)
85 return _CompletedProcess(popenargs, returncode, stdout, stderr)
87 subprocess.run = _run
90 class Context(object):
91 """The global state associated with builds in a given directory."""
93 def __init__(self, topdir, parallelism, keep, replace_sources, strip,
94 full_gcc, action):
95 """Initialize the context."""
96 self.topdir = topdir
97 self.parallelism = parallelism
98 self.keep = keep
99 self.replace_sources = replace_sources
100 self.strip = strip
101 self.full_gcc = full_gcc
102 self.srcdir = os.path.join(topdir, 'src')
103 self.versions_json = os.path.join(self.srcdir, 'versions.json')
104 self.build_state_json = os.path.join(topdir, 'build-state.json')
105 self.bot_config_json = os.path.join(topdir, 'bot-config.json')
106 self.installdir = os.path.join(topdir, 'install')
107 self.host_libraries_installdir = os.path.join(self.installdir,
108 'host-libraries')
109 self.builddir = os.path.join(topdir, 'build')
110 self.logsdir = os.path.join(topdir, 'logs')
111 self.logsdir_old = os.path.join(topdir, 'logs-old')
112 self.makefile = os.path.join(self.builddir, 'Makefile')
113 self.wrapper = os.path.join(self.builddir, 'wrapper')
114 self.save_logs = os.path.join(self.builddir, 'save-logs')
115 self.script_text = self.get_script_text()
116 if action != 'checkout':
117 self.build_triplet = self.get_build_triplet()
118 self.glibc_version = self.get_glibc_version()
119 self.configs = {}
120 self.glibc_configs = {}
121 self.makefile_pieces = ['.PHONY: all\n']
122 self.add_all_configs()
123 self.load_versions_json()
124 self.load_build_state_json()
125 self.status_log_list = []
126 self.email_warning = False
128 def get_script_text(self):
129 """Return the text of this script."""
130 with open(sys.argv[0], 'r') as f:
131 return f.read()
133 def exec_self(self):
134 """Re-execute this script with the same arguments."""
135 sys.stdout.flush()
136 os.execv(sys.executable, [sys.executable] + sys.argv)
138 def get_build_triplet(self):
139 """Determine the build triplet with config.guess."""
140 config_guess = os.path.join(self.component_srcdir('gcc'),
141 'config.guess')
142 cg_out = subprocess.run([config_guess], stdout=subprocess.PIPE,
143 check=True, universal_newlines=True).stdout
144 return cg_out.rstrip()
146 def get_glibc_version(self):
147 """Determine the glibc version number (major.minor)."""
148 version_h = os.path.join(self.component_srcdir('glibc'), 'version.h')
149 with open(version_h, 'r') as f:
150 lines = f.readlines()
151 starttext = '#define VERSION "'
152 for l in lines:
153 if l.startswith(starttext):
154 l = l[len(starttext):]
155 l = l.rstrip('"\n')
156 m = re.fullmatch('([0-9]+)\.([0-9]+)[.0-9]*', l)
157 return '%s.%s' % m.group(1, 2)
158 print('error: could not determine glibc version')
159 exit(1)
161 def add_all_configs(self):
162 """Add all known glibc build configurations."""
163 self.add_config(arch='aarch64',
164 os_name='linux-gnu',
165 extra_glibcs=[{'variant': 'disable-multi-arch',
166 'cfg': ['--disable-multi-arch']}])
167 self.add_config(arch='aarch64_be',
168 os_name='linux-gnu')
169 self.add_config(arch='alpha',
170 os_name='linux-gnu')
171 self.add_config(arch='arm',
172 os_name='linux-gnueabi')
173 self.add_config(arch='armeb',
174 os_name='linux-gnueabi')
175 self.add_config(arch='armeb',
176 os_name='linux-gnueabi',
177 variant='be8',
178 gcc_cfg=['--with-arch=armv7-a'])
179 self.add_config(arch='arm',
180 os_name='linux-gnueabihf',
181 gcc_cfg=['--with-float=hard', '--with-cpu=arm926ej-s'],
182 extra_glibcs=[{'variant': 'v7a',
183 'ccopts': '-march=armv7-a -mfpu=vfpv3'},
184 {'variant': 'v7a-disable-multi-arch',
185 'ccopts': '-march=armv7-a -mfpu=vfpv3',
186 'cfg': ['--disable-multi-arch']}])
187 self.add_config(arch='armeb',
188 os_name='linux-gnueabihf',
189 gcc_cfg=['--with-float=hard', '--with-cpu=arm926ej-s'])
190 self.add_config(arch='armeb',
191 os_name='linux-gnueabihf',
192 variant='be8',
193 gcc_cfg=['--with-float=hard', '--with-arch=armv7-a',
194 '--with-fpu=vfpv3'])
195 self.add_config(arch='hppa',
196 os_name='linux-gnu')
197 self.add_config(arch='i686',
198 os_name='gnu')
199 self.add_config(arch='ia64',
200 os_name='linux-gnu',
201 first_gcc_cfg=['--with-system-libunwind'])
202 self.add_config(arch='m68k',
203 os_name='linux-gnu',
204 gcc_cfg=['--disable-multilib'])
205 self.add_config(arch='m68k',
206 os_name='linux-gnu',
207 variant='coldfire',
208 gcc_cfg=['--with-arch=cf', '--disable-multilib'])
209 self.add_config(arch='m68k',
210 os_name='linux-gnu',
211 variant='coldfire-soft',
212 gcc_cfg=['--with-arch=cf', '--with-cpu=54455',
213 '--disable-multilib'])
214 self.add_config(arch='microblaze',
215 os_name='linux-gnu',
216 gcc_cfg=['--disable-multilib'])
217 self.add_config(arch='microblazeel',
218 os_name='linux-gnu',
219 gcc_cfg=['--disable-multilib'])
220 self.add_config(arch='mips64',
221 os_name='linux-gnu',
222 gcc_cfg=['--with-mips-plt'],
223 glibcs=[{'variant': 'n32'},
224 {'arch': 'mips',
225 'ccopts': '-mabi=32'},
226 {'variant': 'n64',
227 'ccopts': '-mabi=64'}])
228 self.add_config(arch='mips64',
229 os_name='linux-gnu',
230 variant='soft',
231 gcc_cfg=['--with-mips-plt', '--with-float=soft'],
232 glibcs=[{'variant': 'n32-soft'},
233 {'variant': 'soft',
234 'arch': 'mips',
235 'ccopts': '-mabi=32'},
236 {'variant': 'n64-soft',
237 'ccopts': '-mabi=64'}])
238 self.add_config(arch='mips64',
239 os_name='linux-gnu',
240 variant='nan2008',
241 gcc_cfg=['--with-mips-plt', '--with-nan=2008',
242 '--with-arch-64=mips64r2',
243 '--with-arch-32=mips32r2'],
244 glibcs=[{'variant': 'n32-nan2008'},
245 {'variant': 'nan2008',
246 'arch': 'mips',
247 'ccopts': '-mabi=32'},
248 {'variant': 'n64-nan2008',
249 'ccopts': '-mabi=64'}])
250 self.add_config(arch='mips64',
251 os_name='linux-gnu',
252 variant='nan2008-soft',
253 gcc_cfg=['--with-mips-plt', '--with-nan=2008',
254 '--with-arch-64=mips64r2',
255 '--with-arch-32=mips32r2',
256 '--with-float=soft'],
257 glibcs=[{'variant': 'n32-nan2008-soft'},
258 {'variant': 'nan2008-soft',
259 'arch': 'mips',
260 'ccopts': '-mabi=32'},
261 {'variant': 'n64-nan2008-soft',
262 'ccopts': '-mabi=64'}])
263 self.add_config(arch='mips64el',
264 os_name='linux-gnu',
265 gcc_cfg=['--with-mips-plt'],
266 glibcs=[{'variant': 'n32'},
267 {'arch': 'mipsel',
268 'ccopts': '-mabi=32'},
269 {'variant': 'n64',
270 'ccopts': '-mabi=64'}])
271 self.add_config(arch='mips64el',
272 os_name='linux-gnu',
273 variant='soft',
274 gcc_cfg=['--with-mips-plt', '--with-float=soft'],
275 glibcs=[{'variant': 'n32-soft'},
276 {'variant': 'soft',
277 'arch': 'mipsel',
278 'ccopts': '-mabi=32'},
279 {'variant': 'n64-soft',
280 'ccopts': '-mabi=64'}])
281 self.add_config(arch='mips64el',
282 os_name='linux-gnu',
283 variant='nan2008',
284 gcc_cfg=['--with-mips-plt', '--with-nan=2008',
285 '--with-arch-64=mips64r2',
286 '--with-arch-32=mips32r2'],
287 glibcs=[{'variant': 'n32-nan2008'},
288 {'variant': 'nan2008',
289 'arch': 'mipsel',
290 'ccopts': '-mabi=32'},
291 {'variant': 'n64-nan2008',
292 'ccopts': '-mabi=64'}])
293 self.add_config(arch='mips64el',
294 os_name='linux-gnu',
295 variant='nan2008-soft',
296 gcc_cfg=['--with-mips-plt', '--with-nan=2008',
297 '--with-arch-64=mips64r2',
298 '--with-arch-32=mips32r2',
299 '--with-float=soft'],
300 glibcs=[{'variant': 'n32-nan2008-soft'},
301 {'variant': 'nan2008-soft',
302 'arch': 'mipsel',
303 'ccopts': '-mabi=32'},
304 {'variant': 'n64-nan2008-soft',
305 'ccopts': '-mabi=64'}])
306 self.add_config(arch='nios2',
307 os_name='linux-gnu')
308 self.add_config(arch='powerpc',
309 os_name='linux-gnu',
310 gcc_cfg=['--disable-multilib', '--enable-secureplt'],
311 extra_glibcs=[{'variant': 'power4',
312 'ccopts': '-mcpu=power4',
313 'cfg': ['--with-cpu=power4']}])
314 self.add_config(arch='powerpc',
315 os_name='linux-gnu',
316 variant='soft',
317 gcc_cfg=['--disable-multilib', '--with-float=soft',
318 '--enable-secureplt'])
319 self.add_config(arch='powerpc64',
320 os_name='linux-gnu',
321 gcc_cfg=['--disable-multilib', '--enable-secureplt'])
322 self.add_config(arch='powerpc64le',
323 os_name='linux-gnu',
324 gcc_cfg=['--disable-multilib', '--enable-secureplt'])
325 self.add_config(arch='powerpc',
326 os_name='linux-gnuspe',
327 gcc_cfg=['--disable-multilib', '--enable-secureplt',
328 '--enable-e500-double', '--enable-obsolete'])
329 self.add_config(arch='powerpc',
330 os_name='linux-gnuspe',
331 variant='e500v1',
332 gcc_cfg=['--disable-multilib', '--enable-secureplt',
333 '--enable-obsolete'])
334 self.add_config(arch='riscv64',
335 os_name='linux-gnu',
336 variant='rv64imac-lp64',
337 gcc_cfg=['--with-arch=rv64imac', '--with-abi=lp64',
338 '--disable-multilib'])
339 self.add_config(arch='riscv64',
340 os_name='linux-gnu',
341 variant='rv64imafdc-lp64',
342 gcc_cfg=['--with-arch=rv64imafdc', '--with-abi=lp64',
343 '--disable-multilib'])
344 self.add_config(arch='riscv64',
345 os_name='linux-gnu',
346 variant='rv64imafdc-lp64d',
347 gcc_cfg=['--with-arch=rv64imafdc', '--with-abi=lp64d',
348 '--disable-multilib'])
349 self.add_config(arch='s390x',
350 os_name='linux-gnu',
351 glibcs=[{},
352 {'arch': 's390', 'ccopts': '-m31'}])
353 self.add_config(arch='sh3',
354 os_name='linux-gnu')
355 self.add_config(arch='sh3eb',
356 os_name='linux-gnu')
357 self.add_config(arch='sh4',
358 os_name='linux-gnu')
359 self.add_config(arch='sh4eb',
360 os_name='linux-gnu')
361 self.add_config(arch='sh4',
362 os_name='linux-gnu',
363 variant='soft',
364 gcc_cfg=['--without-fp'])
365 self.add_config(arch='sh4eb',
366 os_name='linux-gnu',
367 variant='soft',
368 gcc_cfg=['--without-fp'])
369 self.add_config(arch='sparc64',
370 os_name='linux-gnu',
371 glibcs=[{},
372 {'arch': 'sparcv9',
373 'ccopts': '-m32 -mlong-double-128'}],
374 extra_glibcs=[{'variant': 'disable-multi-arch',
375 'cfg': ['--disable-multi-arch']},
376 {'variant': 'disable-multi-arch',
377 'arch': 'sparcv9',
378 'ccopts': '-m32 -mlong-double-128',
379 'cfg': ['--disable-multi-arch']}])
380 self.add_config(arch='x86_64',
381 os_name='linux-gnu',
382 gcc_cfg=['--with-multilib-list=m64,m32,mx32'],
383 glibcs=[{},
384 {'variant': 'x32', 'ccopts': '-mx32'},
385 {'arch': 'i686', 'ccopts': '-m32 -march=i686'}],
386 extra_glibcs=[{'variant': 'disable-multi-arch',
387 'cfg': ['--disable-multi-arch']},
388 {'variant': 'enable-obsolete',
389 'cfg': ['--enable-obsolete-rpc',
390 '--enable-obsolete-nsl']},
391 {'variant': 'static-pie',
392 'cfg': ['--enable-static-pie']},
393 {'variant': 'x32-static-pie',
394 'ccopts': '-mx32',
395 'cfg': ['--enable-static-pie']},
396 {'variant': 'static-pie',
397 'arch': 'i686',
398 'ccopts': '-m32 -march=i686',
399 'cfg': ['--enable-static-pie']},
400 {'variant': 'disable-multi-arch',
401 'arch': 'i686',
402 'ccopts': '-m32 -march=i686',
403 'cfg': ['--disable-multi-arch']},
404 {'variant': 'enable-obsolete',
405 'arch': 'i686',
406 'ccopts': '-m32 -march=i686',
407 'cfg': ['--enable-obsolete-rpc',
408 '--enable-obsolete-nsl']},
409 {'arch': 'i486',
410 'ccopts': '-m32 -march=i486'},
411 {'arch': 'i586',
412 'ccopts': '-m32 -march=i586'}])
414 def add_config(self, **args):
415 """Add an individual build configuration."""
416 cfg = Config(self, **args)
417 if cfg.name in self.configs:
418 print('error: duplicate config %s' % cfg.name)
419 exit(1)
420 self.configs[cfg.name] = cfg
421 for c in cfg.all_glibcs:
422 if c.name in self.glibc_configs:
423 print('error: duplicate glibc config %s' % c.name)
424 exit(1)
425 self.glibc_configs[c.name] = c
427 def component_srcdir(self, component):
428 """Return the source directory for a given component, e.g. gcc."""
429 return os.path.join(self.srcdir, component)
431 def component_builddir(self, action, config, component, subconfig=None):
432 """Return the directory to use for a build."""
433 if config is None:
434 # Host libraries.
435 assert subconfig is None
436 return os.path.join(self.builddir, action, component)
437 if subconfig is None:
438 return os.path.join(self.builddir, action, config, component)
439 else:
440 # glibc build as part of compiler build.
441 return os.path.join(self.builddir, action, config, component,
442 subconfig)
444 def compiler_installdir(self, config):
445 """Return the directory in which to install a compiler."""
446 return os.path.join(self.installdir, 'compilers', config)
448 def compiler_bindir(self, config):
449 """Return the directory in which to find compiler binaries."""
450 return os.path.join(self.compiler_installdir(config), 'bin')
452 def compiler_sysroot(self, config):
453 """Return the sysroot directory for a compiler."""
454 return os.path.join(self.compiler_installdir(config), 'sysroot')
456 def glibc_installdir(self, config):
457 """Return the directory in which to install glibc."""
458 return os.path.join(self.installdir, 'glibcs', config)
460 def run_builds(self, action, configs):
461 """Run the requested builds."""
462 if action == 'checkout':
463 self.checkout(configs)
464 return
465 if action == 'bot-cycle':
466 if configs:
467 print('error: configurations specified for bot-cycle')
468 exit(1)
469 self.bot_cycle()
470 return
471 if action == 'bot':
472 if configs:
473 print('error: configurations specified for bot')
474 exit(1)
475 self.bot()
476 return
477 if action == 'host-libraries' and configs:
478 print('error: configurations specified for host-libraries')
479 exit(1)
480 self.clear_last_build_state(action)
481 build_time = datetime.datetime.utcnow()
482 if action == 'host-libraries':
483 build_components = ('gmp', 'mpfr', 'mpc')
484 old_components = ()
485 old_versions = {}
486 self.build_host_libraries()
487 elif action == 'compilers':
488 build_components = ('binutils', 'gcc', 'glibc', 'linux', 'mig',
489 'gnumach', 'hurd')
490 old_components = ('gmp', 'mpfr', 'mpc')
491 old_versions = self.build_state['host-libraries']['build-versions']
492 self.build_compilers(configs)
493 else:
494 build_components = ('glibc',)
495 old_components = ('gmp', 'mpfr', 'mpc', 'binutils', 'gcc', 'linux',
496 'mig', 'gnumach', 'hurd')
497 old_versions = self.build_state['compilers']['build-versions']
498 self.build_glibcs(configs)
499 self.write_files()
500 self.do_build()
501 if configs:
502 # Partial build, do not update stored state.
503 return
504 build_versions = {}
505 for k in build_components:
506 if k in self.versions:
507 build_versions[k] = {'version': self.versions[k]['version'],
508 'revision': self.versions[k]['revision']}
509 for k in old_components:
510 if k in old_versions:
511 build_versions[k] = {'version': old_versions[k]['version'],
512 'revision': old_versions[k]['revision']}
513 self.update_build_state(action, build_time, build_versions)
515 @staticmethod
516 def remove_dirs(*args):
517 """Remove directories and their contents if they exist."""
518 for dir in args:
519 shutil.rmtree(dir, ignore_errors=True)
521 @staticmethod
522 def remove_recreate_dirs(*args):
523 """Remove directories if they exist, and create them as empty."""
524 Context.remove_dirs(*args)
525 for dir in args:
526 os.makedirs(dir, exist_ok=True)
528 def add_makefile_cmdlist(self, target, cmdlist, logsdir):
529 """Add makefile text for a list of commands."""
530 commands = cmdlist.makefile_commands(self.wrapper, logsdir)
531 self.makefile_pieces.append('all: %s\n.PHONY: %s\n%s:\n%s\n' %
532 (target, target, target, commands))
533 self.status_log_list.extend(cmdlist.status_logs(logsdir))
535 def write_files(self):
536 """Write out the Makefile and wrapper script."""
537 mftext = ''.join(self.makefile_pieces)
538 with open(self.makefile, 'w') as f:
539 f.write(mftext)
540 wrapper_text = (
541 '#!/bin/sh\n'
542 'prev_base=$1\n'
543 'this_base=$2\n'
544 'desc=$3\n'
545 'dir=$4\n'
546 'path=$5\n'
547 'shift 5\n'
548 'prev_status=$prev_base-status.txt\n'
549 'this_status=$this_base-status.txt\n'
550 'this_log=$this_base-log.txt\n'
551 'date > "$this_log"\n'
552 'echo >> "$this_log"\n'
553 'echo "Description: $desc" >> "$this_log"\n'
554 'printf "%s" "Command:" >> "$this_log"\n'
555 'for word in "$@"; do\n'
556 ' if expr "$word" : "[]+,./0-9@A-Z_a-z-]\\\\{1,\\\\}\\$" > /dev/null; then\n'
557 ' printf " %s" "$word"\n'
558 ' else\n'
559 ' printf " \'"\n'
560 ' printf "%s" "$word" | sed -e "s/\'/\'\\\\\\\\\'\'/"\n'
561 ' printf "\'"\n'
562 ' fi\n'
563 'done >> "$this_log"\n'
564 'echo >> "$this_log"\n'
565 'echo "Directory: $dir" >> "$this_log"\n'
566 'echo "Path addition: $path" >> "$this_log"\n'
567 'echo >> "$this_log"\n'
568 'record_status ()\n'
569 '{\n'
570 ' echo >> "$this_log"\n'
571 ' echo "$1: $desc" > "$this_status"\n'
572 ' echo "$1: $desc" >> "$this_log"\n'
573 ' echo >> "$this_log"\n'
574 ' date >> "$this_log"\n'
575 ' echo "$1: $desc"\n'
576 ' exit 0\n'
577 '}\n'
578 'check_error ()\n'
579 '{\n'
580 ' if [ "$1" != "0" ]; then\n'
581 ' record_status FAIL\n'
582 ' fi\n'
583 '}\n'
584 'if [ "$prev_base" ] && ! grep -q "^PASS" "$prev_status"; then\n'
585 ' record_status UNRESOLVED\n'
586 'fi\n'
587 'if [ "$dir" ]; then\n'
588 ' cd "$dir"\n'
589 ' check_error "$?"\n'
590 'fi\n'
591 'if [ "$path" ]; then\n'
592 ' PATH=$path:$PATH\n'
593 'fi\n'
594 '"$@" < /dev/null >> "$this_log" 2>&1\n'
595 'check_error "$?"\n'
596 'record_status PASS\n')
597 with open(self.wrapper, 'w') as f:
598 f.write(wrapper_text)
599 # Mode 0o755.
600 mode_exec = (stat.S_IRWXU|stat.S_IRGRP|stat.S_IXGRP|
601 stat.S_IROTH|stat.S_IXOTH)
602 os.chmod(self.wrapper, mode_exec)
603 save_logs_text = (
604 '#!/bin/sh\n'
605 'if ! [ -f tests.sum ]; then\n'
606 ' echo "No test summary available."\n'
607 ' exit 0\n'
608 'fi\n'
609 'save_file ()\n'
610 '{\n'
611 ' echo "Contents of $1:"\n'
612 ' echo\n'
613 ' cat "$1"\n'
614 ' echo\n'
615 ' echo "End of contents of $1."\n'
616 ' echo\n'
617 '}\n'
618 'save_file tests.sum\n'
619 'non_pass_tests=$(grep -v "^PASS: " tests.sum | sed -e "s/^PASS: //")\n'
620 'for t in $non_pass_tests; do\n'
621 ' if [ -f "$t.out" ]; then\n'
622 ' save_file "$t.out"\n'
623 ' fi\n'
624 'done\n')
625 with open(self.save_logs, 'w') as f:
626 f.write(save_logs_text)
627 os.chmod(self.save_logs, mode_exec)
629 def do_build(self):
630 """Do the actual build."""
631 cmd = ['make', '-j%d' % self.parallelism]
632 subprocess.run(cmd, cwd=self.builddir, check=True)
634 def build_host_libraries(self):
635 """Build the host libraries."""
636 installdir = self.host_libraries_installdir
637 builddir = os.path.join(self.builddir, 'host-libraries')
638 logsdir = os.path.join(self.logsdir, 'host-libraries')
639 self.remove_recreate_dirs(installdir, builddir, logsdir)
640 cmdlist = CommandList('host-libraries', self.keep)
641 self.build_host_library(cmdlist, 'gmp')
642 self.build_host_library(cmdlist, 'mpfr',
643 ['--with-gmp=%s' % installdir])
644 self.build_host_library(cmdlist, 'mpc',
645 ['--with-gmp=%s' % installdir,
646 '--with-mpfr=%s' % installdir])
647 cmdlist.add_command('done', ['touch', os.path.join(installdir, 'ok')])
648 self.add_makefile_cmdlist('host-libraries', cmdlist, logsdir)
650 def build_host_library(self, cmdlist, lib, extra_opts=None):
651 """Build one host library."""
652 srcdir = self.component_srcdir(lib)
653 builddir = self.component_builddir('host-libraries', None, lib)
654 installdir = self.host_libraries_installdir
655 cmdlist.push_subdesc(lib)
656 cmdlist.create_use_dir(builddir)
657 cfg_cmd = [os.path.join(srcdir, 'configure'),
658 '--prefix=%s' % installdir,
659 '--disable-shared']
660 if extra_opts:
661 cfg_cmd.extend (extra_opts)
662 cmdlist.add_command('configure', cfg_cmd)
663 cmdlist.add_command('build', ['make'])
664 cmdlist.add_command('check', ['make', 'check'])
665 cmdlist.add_command('install', ['make', 'install'])
666 cmdlist.cleanup_dir()
667 cmdlist.pop_subdesc()
669 def build_compilers(self, configs):
670 """Build the compilers."""
671 if not configs:
672 self.remove_dirs(os.path.join(self.builddir, 'compilers'))
673 self.remove_dirs(os.path.join(self.installdir, 'compilers'))
674 self.remove_dirs(os.path.join(self.logsdir, 'compilers'))
675 configs = sorted(self.configs.keys())
676 for c in configs:
677 self.configs[c].build()
679 def build_glibcs(self, configs):
680 """Build the glibcs."""
681 if not configs:
682 self.remove_dirs(os.path.join(self.builddir, 'glibcs'))
683 self.remove_dirs(os.path.join(self.installdir, 'glibcs'))
684 self.remove_dirs(os.path.join(self.logsdir, 'glibcs'))
685 configs = sorted(self.glibc_configs.keys())
686 for c in configs:
687 self.glibc_configs[c].build()
689 def load_versions_json(self):
690 """Load information about source directory versions."""
691 if not os.access(self.versions_json, os.F_OK):
692 self.versions = {}
693 return
694 with open(self.versions_json, 'r') as f:
695 self.versions = json.load(f)
697 def store_json(self, data, filename):
698 """Store information in a JSON file."""
699 filename_tmp = filename + '.tmp'
700 with open(filename_tmp, 'w') as f:
701 json.dump(data, f, indent=2, sort_keys=True)
702 os.rename(filename_tmp, filename)
704 def store_versions_json(self):
705 """Store information about source directory versions."""
706 self.store_json(self.versions, self.versions_json)
708 def set_component_version(self, component, version, explicit, revision):
709 """Set the version information for a component."""
710 self.versions[component] = {'version': version,
711 'explicit': explicit,
712 'revision': revision}
713 self.store_versions_json()
715 def checkout(self, versions):
716 """Check out the desired component versions."""
717 default_versions = {'binutils': 'vcs-2.31',
718 'gcc': 'vcs-8',
719 'glibc': 'vcs-mainline',
720 'gmp': '6.1.2',
721 'linux': '4.18',
722 'mpc': '1.1.0',
723 'mpfr': '4.0.1',
724 'mig': 'vcs-mainline',
725 'gnumach': 'vcs-mainline',
726 'hurd': 'vcs-mainline'}
727 use_versions = {}
728 explicit_versions = {}
729 for v in versions:
730 found_v = False
731 for k in default_versions.keys():
732 kx = k + '-'
733 if v.startswith(kx):
734 vx = v[len(kx):]
735 if k in use_versions:
736 print('error: multiple versions for %s' % k)
737 exit(1)
738 use_versions[k] = vx
739 explicit_versions[k] = True
740 found_v = True
741 break
742 if not found_v:
743 print('error: unknown component in %s' % v)
744 exit(1)
745 for k in default_versions.keys():
746 if k not in use_versions:
747 if k in self.versions and self.versions[k]['explicit']:
748 use_versions[k] = self.versions[k]['version']
749 explicit_versions[k] = True
750 else:
751 use_versions[k] = default_versions[k]
752 explicit_versions[k] = False
753 os.makedirs(self.srcdir, exist_ok=True)
754 for k in sorted(default_versions.keys()):
755 update = os.access(self.component_srcdir(k), os.F_OK)
756 v = use_versions[k]
757 if (update and
758 k in self.versions and
759 v != self.versions[k]['version']):
760 if not self.replace_sources:
761 print('error: version of %s has changed from %s to %s, '
762 'use --replace-sources to check out again' %
763 (k, self.versions[k]['version'], v))
764 exit(1)
765 shutil.rmtree(self.component_srcdir(k))
766 update = False
767 if v.startswith('vcs-'):
768 revision = self.checkout_vcs(k, v[4:], update)
769 else:
770 self.checkout_tar(k, v, update)
771 revision = v
772 self.set_component_version(k, v, explicit_versions[k], revision)
773 if self.get_script_text() != self.script_text:
774 # Rerun the checkout process in case the updated script
775 # uses different default versions or new components.
776 self.exec_self()
778 def checkout_vcs(self, component, version, update):
779 """Check out the given version of the given component from version
780 control. Return a revision identifier."""
781 if component == 'binutils':
782 git_url = 'git://sourceware.org/git/binutils-gdb.git'
783 if version == 'mainline':
784 git_branch = 'master'
785 else:
786 trans = str.maketrans({'.': '_'})
787 git_branch = 'binutils-%s-branch' % version.translate(trans)
788 return self.git_checkout(component, git_url, git_branch, update)
789 elif component == 'gcc':
790 if version == 'mainline':
791 branch = 'trunk'
792 else:
793 trans = str.maketrans({'.': '_'})
794 branch = 'branches/gcc-%s-branch' % version.translate(trans)
795 svn_url = 'svn://gcc.gnu.org/svn/gcc/%s' % branch
796 return self.gcc_checkout(svn_url, update)
797 elif component == 'glibc':
798 git_url = 'git://sourceware.org/git/glibc.git'
799 if version == 'mainline':
800 git_branch = 'master'
801 else:
802 git_branch = 'release/%s/master' % version
803 r = self.git_checkout(component, git_url, git_branch, update)
804 self.fix_glibc_timestamps()
805 return r
806 elif component == 'gnumach':
807 git_url = 'git://git.savannah.gnu.org/hurd/gnumach.git'
808 git_branch = 'master'
809 r = self.git_checkout(component, git_url, git_branch, update)
810 subprocess.run(['autoreconf', '-i'],
811 cwd=self.component_srcdir(component), check=True)
812 return r
813 elif component == 'mig':
814 git_url = 'git://git.savannah.gnu.org/hurd/mig.git'
815 git_branch = 'master'
816 r = self.git_checkout(component, git_url, git_branch, update)
817 subprocess.run(['autoreconf', '-i'],
818 cwd=self.component_srcdir(component), check=True)
819 return r
820 elif component == 'hurd':
821 git_url = 'git://git.savannah.gnu.org/hurd/hurd.git'
822 git_branch = 'master'
823 r = self.git_checkout(component, git_url, git_branch, update)
824 subprocess.run(['autoconf'],
825 cwd=self.component_srcdir(component), check=True)
826 return r
827 else:
828 print('error: component %s coming from VCS' % component)
829 exit(1)
831 def git_checkout(self, component, git_url, git_branch, update):
832 """Check out a component from git. Return a commit identifier."""
833 if update:
834 subprocess.run(['git', 'remote', 'prune', 'origin'],
835 cwd=self.component_srcdir(component), check=True)
836 if self.replace_sources:
837 subprocess.run(['git', 'clean', '-dxfq'],
838 cwd=self.component_srcdir(component), check=True)
839 subprocess.run(['git', 'pull', '-q'],
840 cwd=self.component_srcdir(component), check=True)
841 else:
842 subprocess.run(['git', 'clone', '-q', '-b', git_branch, git_url,
843 self.component_srcdir(component)], check=True)
844 r = subprocess.run(['git', 'rev-parse', 'HEAD'],
845 cwd=self.component_srcdir(component),
846 stdout=subprocess.PIPE,
847 check=True, universal_newlines=True).stdout
848 return r.rstrip()
850 def fix_glibc_timestamps(self):
851 """Fix timestamps in a glibc checkout."""
852 # Ensure that builds do not try to regenerate generated files
853 # in the source tree.
854 srcdir = self.component_srcdir('glibc')
855 for dirpath, dirnames, filenames in os.walk(srcdir):
856 for f in filenames:
857 if (f == 'configure' or
858 f == 'preconfigure' or
859 f.endswith('-kw.h')):
860 to_touch = os.path.join(dirpath, f)
861 subprocess.run(['touch', to_touch], check=True)
863 def gcc_checkout(self, svn_url, update):
864 """Check out GCC from SVN. Return the revision number."""
865 if not update:
866 subprocess.run(['svn', 'co', '-q', svn_url,
867 self.component_srcdir('gcc')], check=True)
868 subprocess.run(['contrib/gcc_update', '--silent'],
869 cwd=self.component_srcdir('gcc'), check=True)
870 r = subprocess.run(['svnversion', self.component_srcdir('gcc')],
871 stdout=subprocess.PIPE,
872 check=True, universal_newlines=True).stdout
873 return r.rstrip()
875 def checkout_tar(self, component, version, update):
876 """Check out the given version of the given component from a
877 tarball."""
878 if update:
879 return
880 url_map = {'binutils': 'https://ftp.gnu.org/gnu/binutils/binutils-%(version)s.tar.bz2',
881 'gcc': 'https://ftp.gnu.org/gnu/gcc/gcc-%(version)s/gcc-%(version)s.tar.gz',
882 'gmp': 'https://ftp.gnu.org/gnu/gmp/gmp-%(version)s.tar.xz',
883 'linux': 'https://www.kernel.org/pub/linux/kernel/v4.x/linux-%(version)s.tar.xz',
884 'mpc': 'https://ftp.gnu.org/gnu/mpc/mpc-%(version)s.tar.gz',
885 'mpfr': 'https://ftp.gnu.org/gnu/mpfr/mpfr-%(version)s.tar.xz',
886 'mig': 'https://ftp.gnu.org/gnu/mig/mig-%(version)s.tar.bz2',
887 'gnumach': 'https://ftp.gnu.org/gnu/gnumach/gnumach-%(version)s.tar.bz2',
888 'hurd': 'https://ftp.gnu.org/gnu/hurd/hurd-%(version)s.tar.bz2'}
889 if component not in url_map:
890 print('error: component %s coming from tarball' % component)
891 exit(1)
892 url = url_map[component] % {'version': version}
893 filename = os.path.join(self.srcdir, url.split('/')[-1])
894 response = urllib.request.urlopen(url)
895 data = response.read()
896 with open(filename, 'wb') as f:
897 f.write(data)
898 subprocess.run(['tar', '-C', self.srcdir, '-x', '-f', filename],
899 check=True)
900 os.rename(os.path.join(self.srcdir, '%s-%s' % (component, version)),
901 self.component_srcdir(component))
902 os.remove(filename)
904 def load_build_state_json(self):
905 """Load information about the state of previous builds."""
906 if os.access(self.build_state_json, os.F_OK):
907 with open(self.build_state_json, 'r') as f:
908 self.build_state = json.load(f)
909 else:
910 self.build_state = {}
911 for k in ('host-libraries', 'compilers', 'glibcs'):
912 if k not in self.build_state:
913 self.build_state[k] = {}
914 if 'build-time' not in self.build_state[k]:
915 self.build_state[k]['build-time'] = ''
916 if 'build-versions' not in self.build_state[k]:
917 self.build_state[k]['build-versions'] = {}
918 if 'build-results' not in self.build_state[k]:
919 self.build_state[k]['build-results'] = {}
920 if 'result-changes' not in self.build_state[k]:
921 self.build_state[k]['result-changes'] = {}
922 if 'ever-passed' not in self.build_state[k]:
923 self.build_state[k]['ever-passed'] = []
925 def store_build_state_json(self):
926 """Store information about the state of previous builds."""
927 self.store_json(self.build_state, self.build_state_json)
929 def clear_last_build_state(self, action):
930 """Clear information about the state of part of the build."""
931 # We clear the last build time and versions when starting a
932 # new build. The results of the last build are kept around,
933 # as comparison is still meaningful if this build is aborted
934 # and a new one started.
935 self.build_state[action]['build-time'] = ''
936 self.build_state[action]['build-versions'] = {}
937 self.store_build_state_json()
939 def update_build_state(self, action, build_time, build_versions):
940 """Update the build state after a build."""
941 build_time = build_time.replace(microsecond=0)
942 self.build_state[action]['build-time'] = str(build_time)
943 self.build_state[action]['build-versions'] = build_versions
944 build_results = {}
945 for log in self.status_log_list:
946 with open(log, 'r') as f:
947 log_text = f.read()
948 log_text = log_text.rstrip()
949 m = re.fullmatch('([A-Z]+): (.*)', log_text)
950 result = m.group(1)
951 test_name = m.group(2)
952 assert test_name not in build_results
953 build_results[test_name] = result
954 old_build_results = self.build_state[action]['build-results']
955 self.build_state[action]['build-results'] = build_results
956 result_changes = {}
957 all_tests = set(old_build_results.keys()) | set(build_results.keys())
958 for t in all_tests:
959 if t in old_build_results:
960 old_res = old_build_results[t]
961 else:
962 old_res = '(New test)'
963 if t in build_results:
964 new_res = build_results[t]
965 else:
966 new_res = '(Test removed)'
967 if old_res != new_res:
968 result_changes[t] = '%s -> %s' % (old_res, new_res)
969 self.build_state[action]['result-changes'] = result_changes
970 old_ever_passed = {t for t in self.build_state[action]['ever-passed']
971 if t in build_results}
972 new_passes = {t for t in build_results if build_results[t] == 'PASS'}
973 self.build_state[action]['ever-passed'] = sorted(old_ever_passed |
974 new_passes)
975 self.store_build_state_json()
977 def load_bot_config_json(self):
978 """Load bot configuration."""
979 with open(self.bot_config_json, 'r') as f:
980 self.bot_config = json.load(f)
982 def part_build_old(self, action, delay):
983 """Return whether the last build for a given action was at least a
984 given number of seconds ago, or does not have a time recorded."""
985 old_time_str = self.build_state[action]['build-time']
986 if not old_time_str:
987 return True
988 old_time = datetime.datetime.strptime(old_time_str,
989 '%Y-%m-%d %H:%M:%S')
990 new_time = datetime.datetime.utcnow()
991 delta = new_time - old_time
992 return delta.total_seconds() >= delay
994 def bot_cycle(self):
995 """Run a single round of checkout and builds."""
996 print('Bot cycle starting %s.' % str(datetime.datetime.utcnow()))
997 self.load_bot_config_json()
998 actions = ('host-libraries', 'compilers', 'glibcs')
999 self.bot_run_self(['--replace-sources'], 'checkout')
1000 self.load_versions_json()
1001 if self.get_script_text() != self.script_text:
1002 print('Script changed, re-execing.')
1003 # On script change, all parts of the build should be rerun.
1004 for a in actions:
1005 self.clear_last_build_state(a)
1006 self.exec_self()
1007 check_components = {'host-libraries': ('gmp', 'mpfr', 'mpc'),
1008 'compilers': ('binutils', 'gcc', 'glibc', 'linux',
1009 'mig', 'gnumach', 'hurd'),
1010 'glibcs': ('glibc',)}
1011 must_build = {}
1012 for a in actions:
1013 build_vers = self.build_state[a]['build-versions']
1014 must_build[a] = False
1015 if not self.build_state[a]['build-time']:
1016 must_build[a] = True
1017 old_vers = {}
1018 new_vers = {}
1019 for c in check_components[a]:
1020 if c in build_vers:
1021 old_vers[c] = build_vers[c]
1022 new_vers[c] = {'version': self.versions[c]['version'],
1023 'revision': self.versions[c]['revision']}
1024 if new_vers == old_vers:
1025 print('Versions for %s unchanged.' % a)
1026 else:
1027 print('Versions changed or rebuild forced for %s.' % a)
1028 if a == 'compilers' and not self.part_build_old(
1029 a, self.bot_config['compilers-rebuild-delay']):
1030 print('Not requiring rebuild of compilers this soon.')
1031 else:
1032 must_build[a] = True
1033 if must_build['host-libraries']:
1034 must_build['compilers'] = True
1035 if must_build['compilers']:
1036 must_build['glibcs'] = True
1037 for a in actions:
1038 if must_build[a]:
1039 print('Must rebuild %s.' % a)
1040 self.clear_last_build_state(a)
1041 else:
1042 print('No need to rebuild %s.' % a)
1043 if os.access(self.logsdir, os.F_OK):
1044 shutil.rmtree(self.logsdir_old, ignore_errors=True)
1045 shutil.copytree(self.logsdir, self.logsdir_old)
1046 for a in actions:
1047 if must_build[a]:
1048 build_time = datetime.datetime.utcnow()
1049 print('Rebuilding %s at %s.' % (a, str(build_time)))
1050 self.bot_run_self([], a)
1051 self.load_build_state_json()
1052 self.bot_build_mail(a, build_time)
1053 print('Bot cycle done at %s.' % str(datetime.datetime.utcnow()))
1055 def bot_build_mail(self, action, build_time):
1056 """Send email with the results of a build."""
1057 if not ('email-from' in self.bot_config and
1058 'email-server' in self.bot_config and
1059 'email-subject' in self.bot_config and
1060 'email-to' in self.bot_config):
1061 if not self.email_warning:
1062 print("Email not configured, not sending.")
1063 self.email_warning = True
1064 return
1066 build_time = build_time.replace(microsecond=0)
1067 subject = (self.bot_config['email-subject'] %
1068 {'action': action,
1069 'build-time': str(build_time)})
1070 results = self.build_state[action]['build-results']
1071 changes = self.build_state[action]['result-changes']
1072 ever_passed = set(self.build_state[action]['ever-passed'])
1073 versions = self.build_state[action]['build-versions']
1074 new_regressions = {k for k in changes if changes[k] == 'PASS -> FAIL'}
1075 all_regressions = {k for k in ever_passed if results[k] == 'FAIL'}
1076 all_fails = {k for k in results if results[k] == 'FAIL'}
1077 if new_regressions:
1078 new_reg_list = sorted(['FAIL: %s' % k for k in new_regressions])
1079 new_reg_text = ('New regressions:\n\n%s\n\n' %
1080 '\n'.join(new_reg_list))
1081 else:
1082 new_reg_text = ''
1083 if all_regressions:
1084 all_reg_list = sorted(['FAIL: %s' % k for k in all_regressions])
1085 all_reg_text = ('All regressions:\n\n%s\n\n' %
1086 '\n'.join(all_reg_list))
1087 else:
1088 all_reg_text = ''
1089 if all_fails:
1090 all_fail_list = sorted(['FAIL: %s' % k for k in all_fails])
1091 all_fail_text = ('All failures:\n\n%s\n\n' %
1092 '\n'.join(all_fail_list))
1093 else:
1094 all_fail_text = ''
1095 if changes:
1096 changes_list = sorted(changes.keys())
1097 changes_list = ['%s: %s' % (changes[k], k) for k in changes_list]
1098 changes_text = ('All changed results:\n\n%s\n\n' %
1099 '\n'.join(changes_list))
1100 else:
1101 changes_text = ''
1102 results_text = (new_reg_text + all_reg_text + all_fail_text +
1103 changes_text)
1104 if not results_text:
1105 results_text = 'Clean build with unchanged results.\n\n'
1106 versions_list = sorted(versions.keys())
1107 versions_list = ['%s: %s (%s)' % (k, versions[k]['version'],
1108 versions[k]['revision'])
1109 for k in versions_list]
1110 versions_text = ('Component versions for this build:\n\n%s\n' %
1111 '\n'.join(versions_list))
1112 body_text = results_text + versions_text
1113 msg = email.mime.text.MIMEText(body_text)
1114 msg['Subject'] = subject
1115 msg['From'] = self.bot_config['email-from']
1116 msg['To'] = self.bot_config['email-to']
1117 msg['Message-ID'] = email.utils.make_msgid()
1118 msg['Date'] = email.utils.format_datetime(datetime.datetime.utcnow())
1119 with smtplib.SMTP(self.bot_config['email-server']) as s:
1120 s.send_message(msg)
1122 def bot_run_self(self, opts, action, check=True):
1123 """Run a copy of this script with given options."""
1124 cmd = [sys.executable, sys.argv[0], '--keep=none',
1125 '-j%d' % self.parallelism]
1126 if self.full_gcc:
1127 cmd.append('--full-gcc')
1128 cmd.extend(opts)
1129 cmd.extend([self.topdir, action])
1130 sys.stdout.flush()
1131 subprocess.run(cmd, check=check)
1133 def bot(self):
1134 """Run repeated rounds of checkout and builds."""
1135 while True:
1136 self.load_bot_config_json()
1137 if not self.bot_config['run']:
1138 print('Bot exiting by request.')
1139 exit(0)
1140 self.bot_run_self([], 'bot-cycle', check=False)
1141 self.load_bot_config_json()
1142 if not self.bot_config['run']:
1143 print('Bot exiting by request.')
1144 exit(0)
1145 time.sleep(self.bot_config['delay'])
1146 if self.get_script_text() != self.script_text:
1147 print('Script changed, bot re-execing.')
1148 self.exec_self()
1151 class Config(object):
1152 """A configuration for building a compiler and associated libraries."""
1154 def __init__(self, ctx, arch, os_name, variant=None, gcc_cfg=None,
1155 first_gcc_cfg=None, glibcs=None, extra_glibcs=None):
1156 """Initialize a Config object."""
1157 self.ctx = ctx
1158 self.arch = arch
1159 self.os = os_name
1160 self.variant = variant
1161 if variant is None:
1162 self.name = '%s-%s' % (arch, os_name)
1163 else:
1164 self.name = '%s-%s-%s' % (arch, os_name, variant)
1165 self.triplet = '%s-glibc-%s' % (arch, os_name)
1166 if gcc_cfg is None:
1167 self.gcc_cfg = []
1168 else:
1169 self.gcc_cfg = gcc_cfg
1170 if first_gcc_cfg is None:
1171 self.first_gcc_cfg = []
1172 else:
1173 self.first_gcc_cfg = first_gcc_cfg
1174 if glibcs is None:
1175 glibcs = [{'variant': variant}]
1176 if extra_glibcs is None:
1177 extra_glibcs = []
1178 glibcs = [Glibc(self, **g) for g in glibcs]
1179 extra_glibcs = [Glibc(self, **g) for g in extra_glibcs]
1180 self.all_glibcs = glibcs + extra_glibcs
1181 self.compiler_glibcs = glibcs
1182 self.installdir = ctx.compiler_installdir(self.name)
1183 self.bindir = ctx.compiler_bindir(self.name)
1184 self.sysroot = ctx.compiler_sysroot(self.name)
1185 self.builddir = os.path.join(ctx.builddir, 'compilers', self.name)
1186 self.logsdir = os.path.join(ctx.logsdir, 'compilers', self.name)
1188 def component_builddir(self, component):
1189 """Return the directory to use for a (non-glibc) build."""
1190 return self.ctx.component_builddir('compilers', self.name, component)
1192 def build(self):
1193 """Generate commands to build this compiler."""
1194 self.ctx.remove_recreate_dirs(self.installdir, self.builddir,
1195 self.logsdir)
1196 cmdlist = CommandList('compilers-%s' % self.name, self.ctx.keep)
1197 cmdlist.add_command('check-host-libraries',
1198 ['test', '-f',
1199 os.path.join(self.ctx.host_libraries_installdir,
1200 'ok')])
1201 cmdlist.use_path(self.bindir)
1202 self.build_cross_tool(cmdlist, 'binutils', 'binutils',
1203 ['--disable-gdb',
1204 '--disable-libdecnumber',
1205 '--disable-readline',
1206 '--disable-sim'])
1207 if self.os.startswith('linux'):
1208 self.install_linux_headers(cmdlist)
1209 self.build_gcc(cmdlist, True)
1210 if self.os == 'gnu':
1211 self.install_gnumach_headers(cmdlist)
1212 self.build_cross_tool(cmdlist, 'mig', 'mig')
1213 self.install_hurd_headers(cmdlist)
1214 for g in self.compiler_glibcs:
1215 cmdlist.push_subdesc('glibc')
1216 cmdlist.push_subdesc(g.name)
1217 g.build_glibc(cmdlist, True)
1218 cmdlist.pop_subdesc()
1219 cmdlist.pop_subdesc()
1220 self.build_gcc(cmdlist, False)
1221 cmdlist.add_command('done', ['touch',
1222 os.path.join(self.installdir, 'ok')])
1223 self.ctx.add_makefile_cmdlist('compilers-%s' % self.name, cmdlist,
1224 self.logsdir)
1226 def build_cross_tool(self, cmdlist, tool_src, tool_build, extra_opts=None):
1227 """Build one cross tool."""
1228 srcdir = self.ctx.component_srcdir(tool_src)
1229 builddir = self.component_builddir(tool_build)
1230 cmdlist.push_subdesc(tool_build)
1231 cmdlist.create_use_dir(builddir)
1232 cfg_cmd = [os.path.join(srcdir, 'configure'),
1233 '--prefix=%s' % self.installdir,
1234 '--build=%s' % self.ctx.build_triplet,
1235 '--host=%s' % self.ctx.build_triplet,
1236 '--target=%s' % self.triplet,
1237 '--with-sysroot=%s' % self.sysroot]
1238 if extra_opts:
1239 cfg_cmd.extend(extra_opts)
1240 cmdlist.add_command('configure', cfg_cmd)
1241 cmdlist.add_command('build', ['make'])
1242 # Parallel "make install" for GCC has race conditions that can
1243 # cause it to fail; see
1244 # <https://gcc.gnu.org/bugzilla/show_bug.cgi?id=42980>. Such
1245 # problems are not known for binutils, but doing the
1246 # installation in parallel within a particular toolchain build
1247 # (as opposed to installation of one toolchain from
1248 # build-many-glibcs.py running in parallel to the installation
1249 # of other toolchains being built) is not known to be
1250 # significantly beneficial, so it is simplest just to disable
1251 # parallel install for cross tools here.
1252 cmdlist.add_command('install', ['make', '-j1', 'install'])
1253 cmdlist.cleanup_dir()
1254 cmdlist.pop_subdesc()
1256 def install_linux_headers(self, cmdlist):
1257 """Install Linux kernel headers."""
1258 arch_map = {'aarch64': 'arm64',
1259 'alpha': 'alpha',
1260 'arm': 'arm',
1261 'hppa': 'parisc',
1262 'i486': 'x86',
1263 'i586': 'x86',
1264 'i686': 'x86',
1265 'i786': 'x86',
1266 'ia64': 'ia64',
1267 'm68k': 'm68k',
1268 'microblaze': 'microblaze',
1269 'mips': 'mips',
1270 'nios2': 'nios2',
1271 'powerpc': 'powerpc',
1272 's390': 's390',
1273 'riscv32': 'riscv',
1274 'riscv64': 'riscv',
1275 'sh': 'sh',
1276 'sparc': 'sparc',
1277 'x86_64': 'x86'}
1278 linux_arch = None
1279 for k in arch_map:
1280 if self.arch.startswith(k):
1281 linux_arch = arch_map[k]
1282 break
1283 assert linux_arch is not None
1284 srcdir = self.ctx.component_srcdir('linux')
1285 builddir = self.component_builddir('linux')
1286 headers_dir = os.path.join(self.sysroot, 'usr')
1287 cmdlist.push_subdesc('linux')
1288 cmdlist.create_use_dir(builddir)
1289 cmdlist.add_command('install-headers',
1290 ['make', '-C', srcdir, 'O=%s' % builddir,
1291 'ARCH=%s' % linux_arch,
1292 'INSTALL_HDR_PATH=%s' % headers_dir,
1293 'headers_install'])
1294 cmdlist.cleanup_dir()
1295 cmdlist.pop_subdesc()
1297 def install_gnumach_headers(self, cmdlist):
1298 """Install GNU Mach headers."""
1299 srcdir = self.ctx.component_srcdir('gnumach')
1300 builddir = self.component_builddir('gnumach')
1301 cmdlist.push_subdesc('gnumach')
1302 cmdlist.create_use_dir(builddir)
1303 cmdlist.add_command('configure',
1304 [os.path.join(srcdir, 'configure'),
1305 '--build=%s' % self.ctx.build_triplet,
1306 '--host=%s' % self.triplet,
1307 '--prefix=',
1308 'CC=%s-gcc -nostdlib' % self.triplet])
1309 cmdlist.add_command('install', ['make', 'DESTDIR=%s' % self.sysroot,
1310 'install-data'])
1311 cmdlist.cleanup_dir()
1312 cmdlist.pop_subdesc()
1314 def install_hurd_headers(self, cmdlist):
1315 """Install Hurd headers."""
1316 srcdir = self.ctx.component_srcdir('hurd')
1317 builddir = self.component_builddir('hurd')
1318 cmdlist.push_subdesc('hurd')
1319 cmdlist.create_use_dir(builddir)
1320 cmdlist.add_command('configure',
1321 [os.path.join(srcdir, 'configure'),
1322 '--build=%s' % self.ctx.build_triplet,
1323 '--host=%s' % self.triplet,
1324 '--prefix=',
1325 '--disable-profile', '--without-parted',
1326 'CC=%s-gcc -nostdlib' % self.triplet])
1327 cmdlist.add_command('install', ['make', 'prefix=%s' % self.sysroot,
1328 'no_deps=t', 'install-headers'])
1329 cmdlist.cleanup_dir()
1330 cmdlist.pop_subdesc()
1332 def build_gcc(self, cmdlist, bootstrap):
1333 """Build GCC."""
1334 # libssp is of little relevance with glibc's own stack
1335 # checking support. libcilkrts does not support GNU/Hurd (and
1336 # has been removed in GCC 8, so --disable-libcilkrts can be
1337 # removed once glibc no longer supports building with older
1338 # GCC versions).
1339 cfg_opts = list(self.gcc_cfg)
1340 cfg_opts += ['--disable-libssp', '--disable-libcilkrts']
1341 host_libs = self.ctx.host_libraries_installdir
1342 cfg_opts += ['--with-gmp=%s' % host_libs,
1343 '--with-mpfr=%s' % host_libs,
1344 '--with-mpc=%s' % host_libs]
1345 if bootstrap:
1346 tool_build = 'gcc-first'
1347 # Building a static-only, C-only compiler that is
1348 # sufficient to build glibc. Various libraries and
1349 # features that may require libc headers must be disabled.
1350 # When configuring with a sysroot, --with-newlib is
1351 # required to define inhibit_libc (to stop some parts of
1352 # libgcc including libc headers); --without-headers is not
1353 # sufficient.
1354 cfg_opts += ['--enable-languages=c', '--disable-shared',
1355 '--disable-threads',
1356 '--disable-libatomic',
1357 '--disable-decimal-float',
1358 '--disable-libffi',
1359 '--disable-libgomp',
1360 '--disable-libitm',
1361 '--disable-libmpx',
1362 '--disable-libquadmath',
1363 '--disable-libsanitizer',
1364 '--without-headers', '--with-newlib',
1365 '--with-glibc-version=%s' % self.ctx.glibc_version
1367 cfg_opts += self.first_gcc_cfg
1368 else:
1369 tool_build = 'gcc'
1370 # libsanitizer commonly breaks because of glibc header
1371 # changes, or on unusual targets.
1372 if not self.ctx.full_gcc:
1373 cfg_opts += ['--disable-libsanitizer']
1374 langs = 'all' if self.ctx.full_gcc else 'c,c++'
1375 cfg_opts += ['--enable-languages=%s' % langs,
1376 '--enable-shared', '--enable-threads']
1377 self.build_cross_tool(cmdlist, 'gcc', tool_build, cfg_opts)
1380 class Glibc(object):
1381 """A configuration for building glibc."""
1383 def __init__(self, compiler, arch=None, os_name=None, variant=None,
1384 cfg=None, ccopts=None):
1385 """Initialize a Glibc object."""
1386 self.ctx = compiler.ctx
1387 self.compiler = compiler
1388 if arch is None:
1389 self.arch = compiler.arch
1390 else:
1391 self.arch = arch
1392 if os_name is None:
1393 self.os = compiler.os
1394 else:
1395 self.os = os_name
1396 self.variant = variant
1397 if variant is None:
1398 self.name = '%s-%s' % (self.arch, self.os)
1399 else:
1400 self.name = '%s-%s-%s' % (self.arch, self.os, variant)
1401 self.triplet = '%s-glibc-%s' % (self.arch, self.os)
1402 if cfg is None:
1403 self.cfg = []
1404 else:
1405 self.cfg = cfg
1406 self.ccopts = ccopts
1408 def tool_name(self, tool):
1409 """Return the name of a cross-compilation tool."""
1410 ctool = '%s-%s' % (self.compiler.triplet, tool)
1411 if self.ccopts and (tool == 'gcc' or tool == 'g++'):
1412 ctool = '%s %s' % (ctool, self.ccopts)
1413 return ctool
1415 def build(self):
1416 """Generate commands to build this glibc."""
1417 builddir = self.ctx.component_builddir('glibcs', self.name, 'glibc')
1418 installdir = self.ctx.glibc_installdir(self.name)
1419 logsdir = os.path.join(self.ctx.logsdir, 'glibcs', self.name)
1420 self.ctx.remove_recreate_dirs(installdir, builddir, logsdir)
1421 cmdlist = CommandList('glibcs-%s' % self.name, self.ctx.keep)
1422 cmdlist.add_command('check-compilers',
1423 ['test', '-f',
1424 os.path.join(self.compiler.installdir, 'ok')])
1425 cmdlist.use_path(self.compiler.bindir)
1426 self.build_glibc(cmdlist, False)
1427 self.ctx.add_makefile_cmdlist('glibcs-%s' % self.name, cmdlist,
1428 logsdir)
1430 def build_glibc(self, cmdlist, for_compiler):
1431 """Generate commands to build this glibc, either as part of a compiler
1432 build or with the bootstrapped compiler (and in the latter case, run
1433 tests as well)."""
1434 srcdir = self.ctx.component_srcdir('glibc')
1435 if for_compiler:
1436 builddir = self.ctx.component_builddir('compilers',
1437 self.compiler.name, 'glibc',
1438 self.name)
1439 installdir = self.compiler.sysroot
1440 srcdir_copy = self.ctx.component_builddir('compilers',
1441 self.compiler.name,
1442 'glibc-src',
1443 self.name)
1444 else:
1445 builddir = self.ctx.component_builddir('glibcs', self.name,
1446 'glibc')
1447 installdir = self.ctx.glibc_installdir(self.name)
1448 srcdir_copy = self.ctx.component_builddir('glibcs', self.name,
1449 'glibc-src')
1450 cmdlist.create_use_dir(builddir)
1451 # glibc builds write into the source directory, and even if
1452 # not intentionally there is a risk of bugs that involve
1453 # writing into the working directory. To avoid possible
1454 # concurrency issues, copy the source directory.
1455 cmdlist.create_copy_dir(srcdir, srcdir_copy)
1456 use_usr = self.os != 'gnu'
1457 prefix = '/usr' if use_usr else ''
1458 cfg_cmd = [os.path.join(srcdir_copy, 'configure'),
1459 '--prefix=%s' % prefix,
1460 '--enable-profile',
1461 '--build=%s' % self.ctx.build_triplet,
1462 '--host=%s' % self.triplet,
1463 'CC=%s' % self.tool_name('gcc'),
1464 'CXX=%s' % self.tool_name('g++'),
1465 'AR=%s' % self.tool_name('ar'),
1466 'AS=%s' % self.tool_name('as'),
1467 'LD=%s' % self.tool_name('ld'),
1468 'NM=%s' % self.tool_name('nm'),
1469 'OBJCOPY=%s' % self.tool_name('objcopy'),
1470 'OBJDUMP=%s' % self.tool_name('objdump'),
1471 'RANLIB=%s' % self.tool_name('ranlib'),
1472 'READELF=%s' % self.tool_name('readelf'),
1473 'STRIP=%s' % self.tool_name('strip')]
1474 if self.os == 'gnu':
1475 cfg_cmd += ['MIG=%s' % self.tool_name('mig')]
1476 cfg_cmd += self.cfg
1477 cmdlist.add_command('configure', cfg_cmd)
1478 cmdlist.add_command('build', ['make'])
1479 cmdlist.add_command('install', ['make', 'install',
1480 'install_root=%s' % installdir])
1481 # GCC uses paths such as lib/../lib64, so make sure lib
1482 # directories always exist.
1483 mkdir_cmd = ['mkdir', '-p',
1484 os.path.join(installdir, 'lib')]
1485 if use_usr:
1486 mkdir_cmd += [os.path.join(installdir, 'usr', 'lib')]
1487 cmdlist.add_command('mkdir-lib', mkdir_cmd)
1488 if not for_compiler:
1489 if self.ctx.strip:
1490 cmdlist.add_command('strip',
1491 ['sh', '-c',
1492 ('%s $(find %s/lib* -name "*.so")' %
1493 (self.tool_name('strip'), installdir))])
1494 cmdlist.add_command('check', ['make', 'check'])
1495 cmdlist.add_command('save-logs', [self.ctx.save_logs],
1496 always_run=True)
1497 cmdlist.cleanup_dir('cleanup-src', srcdir_copy)
1498 cmdlist.cleanup_dir()
1501 class Command(object):
1502 """A command run in the build process."""
1504 def __init__(self, desc, num, dir, path, command, always_run=False):
1505 """Initialize a Command object."""
1506 self.dir = dir
1507 self.path = path
1508 self.desc = desc
1509 trans = str.maketrans({' ': '-'})
1510 self.logbase = '%03d-%s' % (num, desc.translate(trans))
1511 self.command = command
1512 self.always_run = always_run
1514 @staticmethod
1515 def shell_make_quote_string(s):
1516 """Given a string not containing a newline, quote it for use by the
1517 shell and make."""
1518 assert '\n' not in s
1519 if re.fullmatch('[]+,./0-9@A-Z_a-z-]+', s):
1520 return s
1521 strans = str.maketrans({"'": "'\\''"})
1522 s = "'%s'" % s.translate(strans)
1523 mtrans = str.maketrans({'$': '$$'})
1524 return s.translate(mtrans)
1526 @staticmethod
1527 def shell_make_quote_list(l, translate_make):
1528 """Given a list of strings not containing newlines, quote them for use
1529 by the shell and make, returning a single string. If translate_make
1530 is true and the first string is 'make', change it to $(MAKE)."""
1531 l = [Command.shell_make_quote_string(s) for s in l]
1532 if translate_make and l[0] == 'make':
1533 l[0] = '$(MAKE)'
1534 return ' '.join(l)
1536 def shell_make_quote(self):
1537 """Return this command quoted for the shell and make."""
1538 return self.shell_make_quote_list(self.command, True)
1541 class CommandList(object):
1542 """A list of commands run in the build process."""
1544 def __init__(self, desc, keep):
1545 """Initialize a CommandList object."""
1546 self.cmdlist = []
1547 self.dir = None
1548 self.path = None
1549 self.desc = [desc]
1550 self.keep = keep
1552 def desc_txt(self, desc):
1553 """Return the description to use for a command."""
1554 return '%s %s' % (' '.join(self.desc), desc)
1556 def use_dir(self, dir):
1557 """Set the default directory for subsequent commands."""
1558 self.dir = dir
1560 def use_path(self, path):
1561 """Set a directory to be prepended to the PATH for subsequent
1562 commands."""
1563 self.path = path
1565 def push_subdesc(self, subdesc):
1566 """Set the default subdescription for subsequent commands (e.g., the
1567 name of a component being built, within the series of commands
1568 building it)."""
1569 self.desc.append(subdesc)
1571 def pop_subdesc(self):
1572 """Pop a subdescription from the list of descriptions."""
1573 self.desc.pop()
1575 def create_use_dir(self, dir):
1576 """Remove and recreate a directory and use it for subsequent
1577 commands."""
1578 self.add_command_dir('rm', None, ['rm', '-rf', dir])
1579 self.add_command_dir('mkdir', None, ['mkdir', '-p', dir])
1580 self.use_dir(dir)
1582 def create_copy_dir(self, src, dest):
1583 """Remove a directory and recreate it as a copy from the given
1584 source."""
1585 self.add_command_dir('copy-rm', None, ['rm', '-rf', dest])
1586 parent = os.path.dirname(dest)
1587 self.add_command_dir('copy-mkdir', None, ['mkdir', '-p', parent])
1588 self.add_command_dir('copy', None, ['cp', '-a', src, dest])
1590 def add_command_dir(self, desc, dir, command, always_run=False):
1591 """Add a command to run in a given directory."""
1592 cmd = Command(self.desc_txt(desc), len(self.cmdlist), dir, self.path,
1593 command, always_run)
1594 self.cmdlist.append(cmd)
1596 def add_command(self, desc, command, always_run=False):
1597 """Add a command to run in the default directory."""
1598 cmd = Command(self.desc_txt(desc), len(self.cmdlist), self.dir,
1599 self.path, command, always_run)
1600 self.cmdlist.append(cmd)
1602 def cleanup_dir(self, desc='cleanup', dir=None):
1603 """Clean up a build directory. If no directory is specified, the
1604 default directory is cleaned up and ceases to be the default
1605 directory."""
1606 if dir is None:
1607 dir = self.dir
1608 self.use_dir(None)
1609 if self.keep != 'all':
1610 self.add_command_dir(desc, None, ['rm', '-rf', dir],
1611 always_run=(self.keep == 'none'))
1613 def makefile_commands(self, wrapper, logsdir):
1614 """Return the sequence of commands in the form of text for a Makefile.
1615 The given wrapper script takes arguments: base of logs for
1616 previous command, or empty; base of logs for this command;
1617 description; directory; PATH addition; the command itself."""
1618 # prev_base is the base of the name for logs of the previous
1619 # command that is not always-run (that is, a build command,
1620 # whose failure should stop subsequent build commands from
1621 # being run, as opposed to a cleanup command, which is run
1622 # even if previous commands failed).
1623 prev_base = ''
1624 cmds = []
1625 for c in self.cmdlist:
1626 ctxt = c.shell_make_quote()
1627 if prev_base and not c.always_run:
1628 prev_log = os.path.join(logsdir, prev_base)
1629 else:
1630 prev_log = ''
1631 this_log = os.path.join(logsdir, c.logbase)
1632 if not c.always_run:
1633 prev_base = c.logbase
1634 if c.dir is None:
1635 dir = ''
1636 else:
1637 dir = c.dir
1638 if c.path is None:
1639 path = ''
1640 else:
1641 path = c.path
1642 prelims = [wrapper, prev_log, this_log, c.desc, dir, path]
1643 prelim_txt = Command.shell_make_quote_list(prelims, False)
1644 cmds.append('\t@%s %s' % (prelim_txt, ctxt))
1645 return '\n'.join(cmds)
1647 def status_logs(self, logsdir):
1648 """Return the list of log files with command status."""
1649 return [os.path.join(logsdir, '%s-status.txt' % c.logbase)
1650 for c in self.cmdlist]
1653 def get_parser():
1654 """Return an argument parser for this module."""
1655 parser = argparse.ArgumentParser(description=__doc__)
1656 parser.add_argument('-j', dest='parallelism',
1657 help='Run this number of jobs in parallel',
1658 type=int, default=os.cpu_count())
1659 parser.add_argument('--keep', dest='keep',
1660 help='Whether to keep all build directories, '
1661 'none or only those from failed builds',
1662 default='none', choices=('none', 'all', 'failed'))
1663 parser.add_argument('--replace-sources', action='store_true',
1664 help='Remove and replace source directories '
1665 'with the wrong version of a component')
1666 parser.add_argument('--strip', action='store_true',
1667 help='Strip installed glibc libraries')
1668 parser.add_argument('--full-gcc', action='store_true',
1669 help='Build GCC with all languages and libsanitizer')
1670 parser.add_argument('topdir',
1671 help='Toplevel working directory')
1672 parser.add_argument('action',
1673 help='What to do',
1674 choices=('checkout', 'bot-cycle', 'bot',
1675 'host-libraries', 'compilers', 'glibcs'))
1676 parser.add_argument('configs',
1677 help='Versions to check out or configurations to build',
1678 nargs='*')
1679 return parser
1682 def main(argv):
1683 """The main entry point."""
1684 parser = get_parser()
1685 opts = parser.parse_args(argv)
1686 topdir = os.path.abspath(opts.topdir)
1687 ctx = Context(topdir, opts.parallelism, opts.keep, opts.replace_sources,
1688 opts.strip, opts.full_gcc, opts.action)
1689 ctx.run_builds(opts.action, opts.configs)
1692 if __name__ == '__main__':
1693 main(sys.argv[1:])